text
stringlengths 213
32.3k
|
---|
import argparse
import chainer
from chainer import iterators
from chainercv.datasets import coco_bbox_label_names
from chainercv.datasets import COCOBboxDataset
from chainercv.datasets import voc_bbox_label_names
from chainercv.datasets import VOCBboxDataset
from chainercv.evaluations import eval_detection_coco
from chainercv.evaluations import eval_detection_voc
from chainercv.experimental.links import YOLOv2Tiny
from chainercv.links import FasterRCNNFPNResNet101
from chainercv.links import FasterRCNNFPNResNet50
from chainercv.links import FasterRCNNVGG16
from chainercv.links import LightHeadRCNNResNet101
from chainercv.links import SSD300
from chainercv.links import SSD512
from chainercv.links import YOLOv2
from chainercv.links import YOLOv3
from chainercv.utils import apply_to_iterator
from chainercv.utils import ProgressHook
models = {
# model: (class, dataset -> pretrained_model, default batchsize)
'faster_rcnn': (FasterRCNNVGG16, {'voc': 'voc07'}, 32),
'faster_rcnn_fpn_resnet50': (FasterRCNNFPNResNet50, {}, 1),
'faster_rcnn_fpn_resnet101': (FasterRCNNFPNResNet101, {}, 1),
'light_head_rcnn_resnet101': (LightHeadRCNNResNet101, {}, 1),
'ssd300': (SSD300, {'voc': 'voc0712'}, 32),
'ssd512': (SSD512, {'voc': 'voc0712'}, 16),
'yolo_v2': (YOLOv2, {'voc': 'voc0712'}, 32),
'yolo_v2_tiny': (YOLOv2Tiny, {'voc': 'voc0712'}, 32),
'yolo_v3': (YOLOv3, {'voc': 'voc0712'}, 16),
}
def setup(dataset, model, pretrained_model, batchsize):
dataset_name = dataset
if dataset_name == 'voc':
dataset = VOCBboxDataset(
year='2007', split='test',
use_difficult=True, return_difficult=True)
label_names = voc_bbox_label_names
def eval_(out_values, rest_values):
pred_bboxes, pred_labels, pred_scores = out_values
gt_bboxes, gt_labels, gt_difficults = rest_values
result = eval_detection_voc(
pred_bboxes, pred_labels, pred_scores,
gt_bboxes, gt_labels, gt_difficults,
use_07_metric=True)
print()
print('mAP: {:f}'.format(result['map']))
for l, name in enumerate(voc_bbox_label_names):
if result['ap'][l]:
print('{:s}: {:f}'.format(name, result['ap'][l]))
else:
print('{:s}: -'.format(name))
elif dataset_name == 'coco':
dataset = COCOBboxDataset(
year='2017', split='val',
use_crowded=True, return_area=True, return_crowded=True)
label_names = coco_bbox_label_names
def eval_(out_values, rest_values):
pred_bboxes, pred_labels, pred_scores = out_values
gt_bboxes, gt_labels, gt_area, gt_crowded = rest_values
result = eval_detection_coco(
pred_bboxes, pred_labels, pred_scores,
gt_bboxes, gt_labels, gt_area, gt_crowded)
print()
for area in ('all', 'large', 'medium', 'small'):
print('mmAP ({}):'.format(area),
result['map/iou=0.50:0.95/area={}/max_dets=100'.format(
area)])
cls, pretrained_models, default_batchsize = models[model]
if pretrained_model is None:
pretrained_model = pretrained_models.get(dataset_name, dataset_name)
model = cls(n_fg_class=len(label_names), pretrained_model=pretrained_model)
if batchsize is None:
batchsize = default_batchsize
return dataset, eval_, model, batchsize
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--dataset', choices=('voc', 'coco'))
parser.add_argument('--model', choices=sorted(models.keys()))
parser.add_argument('--pretrained-model')
parser.add_argument('--batchsize', type=int)
parser.add_argument('--gpu', type=int, default=-1)
args = parser.parse_args()
dataset, eval_, model, batchsize = setup(
args.dataset, args.model, args.pretrained_model, args.batchsize)
if args.gpu >= 0:
chainer.cuda.get_device_from_id(args.gpu).use()
model.to_gpu()
model.use_preset('evaluate')
iterator = iterators.MultithreadIterator(
dataset, batchsize, repeat=False, shuffle=False)
in_values, out_values, rest_values = apply_to_iterator(
model.predict, iterator, hook=ProgressHook(len(dataset)))
# delete unused iterators explicitly
del in_values
eval_(out_values, rest_values)
if __name__ == '__main__':
main()
|
import asyncio
import logging
from aiopvapi.helpers.constants import ATTR_POSITION1, ATTR_POSITION_DATA
from aiopvapi.resources.shade import (
ATTR_POSKIND1,
MAX_POSITION,
MIN_POSITION,
factory as PvShade,
)
import async_timeout
from homeassistant.components.cover import (
ATTR_POSITION,
DEVICE_CLASS_SHADE,
SUPPORT_CLOSE,
SUPPORT_OPEN,
SUPPORT_SET_POSITION,
SUPPORT_STOP,
CoverEntity,
)
from homeassistant.core import callback
from homeassistant.helpers.event import async_call_later
from .const import (
COORDINATOR,
DEVICE_INFO,
DEVICE_MODEL,
DOMAIN,
LEGACY_DEVICE_MODEL,
PV_API,
PV_ROOM_DATA,
PV_SHADE_DATA,
ROOM_ID_IN_SHADE,
ROOM_NAME_UNICODE,
SHADE_RESPONSE,
STATE_ATTRIBUTE_ROOM_NAME,
)
from .entity import ShadeEntity
_LOGGER = logging.getLogger(__name__)
# Estimated time it takes to complete a transition
# from one state to another
TRANSITION_COMPLETE_DURATION = 30
PARALLEL_UPDATES = 1
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up the hunter douglas shades."""
pv_data = hass.data[DOMAIN][entry.entry_id]
room_data = pv_data[PV_ROOM_DATA]
shade_data = pv_data[PV_SHADE_DATA]
pv_request = pv_data[PV_API]
coordinator = pv_data[COORDINATOR]
device_info = pv_data[DEVICE_INFO]
entities = []
for raw_shade in shade_data.values():
# The shade may be out of sync with the hub
# so we force a refresh when we add it if
# possible
shade = PvShade(raw_shade, pv_request)
name_before_refresh = shade.name
try:
async with async_timeout.timeout(1):
await shade.refresh()
except asyncio.TimeoutError:
# Forced refresh is not required for setup
pass
if ATTR_POSITION_DATA not in shade.raw_data:
_LOGGER.info(
"The %s shade was skipped because it is missing position data",
name_before_refresh,
)
continue
entities.append(
PowerViewShade(
shade, name_before_refresh, room_data, coordinator, device_info
)
)
async_add_entities(entities)
def hd_position_to_hass(hd_position):
"""Convert hunter douglas position to hass position."""
return round((hd_position / MAX_POSITION) * 100)
def hass_position_to_hd(hass_positon):
"""Convert hass position to hunter douglas position."""
return int(hass_positon / 100 * MAX_POSITION)
class PowerViewShade(ShadeEntity, CoverEntity):
"""Representation of a powerview shade."""
def __init__(self, shade, name, room_data, coordinator, device_info):
"""Initialize the shade."""
room_id = shade.raw_data.get(ROOM_ID_IN_SHADE)
super().__init__(coordinator, device_info, shade, name)
self._shade = shade
self._device_info = device_info
self._is_opening = False
self._is_closing = False
self._last_action_timestamp = 0
self._scheduled_transition_update = None
self._room_name = room_data.get(room_id, {}).get(ROOM_NAME_UNICODE, "")
self._current_cover_position = MIN_POSITION
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {STATE_ATTRIBUTE_ROOM_NAME: self._room_name}
@property
def supported_features(self):
"""Flag supported features."""
supported_features = SUPPORT_OPEN | SUPPORT_CLOSE | SUPPORT_SET_POSITION
if self._device_info[DEVICE_MODEL] != LEGACY_DEVICE_MODEL:
supported_features |= SUPPORT_STOP
return supported_features
@property
def is_closed(self):
"""Return if the cover is closed."""
return self._current_cover_position == MIN_POSITION
@property
def is_opening(self):
"""Return if the cover is opening."""
return self._is_opening
@property
def is_closing(self):
"""Return if the cover is closing."""
return self._is_closing
@property
def current_cover_position(self):
"""Return the current position of cover."""
return hd_position_to_hass(self._current_cover_position)
@property
def device_class(self):
"""Return device class."""
return DEVICE_CLASS_SHADE
@property
def name(self):
"""Return the name of the shade."""
return self._shade_name
async def async_close_cover(self, **kwargs):
"""Close the cover."""
await self._async_move(0)
async def async_open_cover(self, **kwargs):
"""Open the cover."""
await self._async_move(100)
async def async_stop_cover(self, **kwargs):
"""Stop the cover."""
# Cancel any previous updates
self._async_cancel_scheduled_transition_update()
self._async_update_from_command(await self._shade.stop())
await self._async_force_refresh_state()
async def async_set_cover_position(self, **kwargs):
"""Move the shade to a specific position."""
if ATTR_POSITION not in kwargs:
return
await self._async_move(kwargs[ATTR_POSITION])
async def _async_move(self, target_hass_position):
"""Move the shade to a position."""
current_hass_position = hd_position_to_hass(self._current_cover_position)
steps_to_move = abs(current_hass_position - target_hass_position)
if not steps_to_move:
return
self._async_schedule_update_for_transition(steps_to_move)
self._async_update_from_command(
await self._shade.move(
{
ATTR_POSITION1: hass_position_to_hd(target_hass_position),
ATTR_POSKIND1: 1,
}
)
)
self._is_opening = False
self._is_closing = False
if target_hass_position > current_hass_position:
self._is_opening = True
elif target_hass_position < current_hass_position:
self._is_closing = True
self.async_write_ha_state()
@callback
def _async_update_from_command(self, raw_data):
"""Update the shade state after a command."""
if not raw_data or SHADE_RESPONSE not in raw_data:
return
self._async_process_new_shade_data(raw_data[SHADE_RESPONSE])
@callback
def _async_process_new_shade_data(self, data):
"""Process new data from an update."""
self._shade.raw_data = data
self._async_update_current_cover_position()
@callback
def _async_update_current_cover_position(self):
"""Update the current cover position from the data."""
_LOGGER.debug("Raw data update: %s", self._shade.raw_data)
position_data = self._shade.raw_data.get(ATTR_POSITION_DATA, {})
if ATTR_POSITION1 in position_data:
self._current_cover_position = int(position_data[ATTR_POSITION1])
self._is_opening = False
self._is_closing = False
@callback
def _async_cancel_scheduled_transition_update(self):
"""Cancel any previous updates."""
if not self._scheduled_transition_update:
return
self._scheduled_transition_update()
self._scheduled_transition_update = None
@callback
def _async_schedule_update_for_transition(self, steps):
self.async_write_ha_state()
# Cancel any previous updates
self._async_cancel_scheduled_transition_update()
est_time_to_complete_transition = 1 + int(
TRANSITION_COMPLETE_DURATION * (steps / 100)
)
_LOGGER.debug(
"Estimated time to complete transition of %s steps for %s: %s",
steps,
self.name,
est_time_to_complete_transition,
)
# Schedule an update for when we expect the transition
# to be completed.
self._scheduled_transition_update = async_call_later(
self.hass,
est_time_to_complete_transition,
self._async_complete_schedule_update,
)
async def _async_complete_schedule_update(self, _):
"""Update status of the cover."""
_LOGGER.debug("Processing scheduled update for %s", self.name)
self._scheduled_transition_update = None
await self._async_force_refresh_state()
async def _async_force_refresh_state(self):
"""Refresh the cover state and force the device cache to be bypassed."""
await self._shade.refresh()
self._async_update_current_cover_position()
self.async_write_ha_state()
async def async_added_to_hass(self):
"""When entity is added to hass."""
self._async_update_current_cover_position()
self.async_on_remove(
self.coordinator.async_add_listener(self._async_update_shade_from_group)
)
@callback
def _async_update_shade_from_group(self):
"""Update with new data from the coordinator."""
if self._scheduled_transition_update:
# If a transition in in progress
# the data will be wrong
return
self._async_process_new_shade_data(self.coordinator.data[self._shade.id])
self.async_write_ha_state()
|
import logging
import voluptuous as vol
from homeassistant.components.image_processing import (
ATTR_CONFIDENCE,
CONF_CONFIDENCE,
CONF_ENTITY_ID,
CONF_NAME,
CONF_SOURCE,
PLATFORM_SCHEMA,
ImageProcessingFaceEntity,
)
from homeassistant.components.microsoft_face import DATA_MICROSOFT_FACE
from homeassistant.const import ATTR_NAME
from homeassistant.core import split_entity_id
from homeassistant.exceptions import HomeAssistantError
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_GROUP = "group"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Required(CONF_GROUP): cv.slugify})
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Microsoft Face identify platform."""
api = hass.data[DATA_MICROSOFT_FACE]
face_group = config[CONF_GROUP]
confidence = config[CONF_CONFIDENCE]
entities = []
for camera in config[CONF_SOURCE]:
entities.append(
MicrosoftFaceIdentifyEntity(
camera[CONF_ENTITY_ID],
api,
face_group,
confidence,
camera.get(CONF_NAME),
)
)
async_add_entities(entities)
class MicrosoftFaceIdentifyEntity(ImageProcessingFaceEntity):
"""Representation of the Microsoft Face API entity for identify."""
def __init__(self, camera_entity, api, face_group, confidence, name=None):
"""Initialize the Microsoft Face API."""
super().__init__()
self._api = api
self._camera = camera_entity
self._confidence = confidence
self._face_group = face_group
if name:
self._name = name
else:
self._name = f"MicrosoftFace {split_entity_id(camera_entity)[1]}"
@property
def confidence(self):
"""Return minimum confidence for send events."""
return self._confidence
@property
def camera_entity(self):
"""Return camera entity id from process pictures."""
return self._camera
@property
def name(self):
"""Return the name of the entity."""
return self._name
async def async_process_image(self, image):
"""Process image.
This method is a coroutine.
"""
detect = []
try:
face_data = await self._api.call_api("post", "detect", image, binary=True)
if face_data:
face_ids = [data["faceId"] for data in face_data]
detect = await self._api.call_api(
"post",
"identify",
{"faceIds": face_ids, "personGroupId": self._face_group},
)
except HomeAssistantError as err:
_LOGGER.error("Can't process image on Microsoft face: %s", err)
return
# Parse data
known_faces = []
total = 0
for face in detect:
total += 1
if not face["candidates"]:
continue
data = face["candidates"][0]
name = ""
for s_name, s_id in self._api.store[self._face_group].items():
if data["personId"] == s_id:
name = s_name
break
known_faces.append(
{ATTR_NAME: name, ATTR_CONFIDENCE: data["confidence"] * 100}
)
self.async_process_faces(known_faces, total)
|
from canary.api import LOCATION_MODE_AWAY, LOCATION_MODE_HOME, LOCATION_MODE_NIGHT
from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN
from homeassistant.components.canary import DOMAIN
from homeassistant.const import (
SERVICE_ALARM_ARM_AWAY,
SERVICE_ALARM_ARM_HOME,
SERVICE_ALARM_ARM_NIGHT,
SERVICE_ALARM_DISARM,
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_DISARMED,
STATE_UNKNOWN,
)
from homeassistant.setup import async_setup_component
from . import mock_device, mock_location, mock_mode
from tests.async_mock import PropertyMock, patch
from tests.common import mock_registry
async def test_alarm_control_panel(hass, canary) -> None:
"""Test the creation and values of the alarm_control_panel for Canary."""
await async_setup_component(hass, "persistent_notification", {})
registry = mock_registry(hass)
online_device_at_home = mock_device(20, "Dining Room", True, "Canary Pro")
mocked_location = mock_location(
location_id=100,
name="Home",
is_celsius=True,
is_private=False,
mode=mock_mode(7, "standby"),
devices=[online_device_at_home],
)
instance = canary.return_value
instance.get_locations.return_value = [mocked_location]
config = {DOMAIN: {"username": "test-username", "password": "test-password"}}
with patch("homeassistant.components.canary.PLATFORMS", ["alarm_control_panel"]):
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
entity_id = "alarm_control_panel.home"
entity_entry = registry.async_get(entity_id)
assert entity_entry
assert entity_entry.unique_id == "100"
state = hass.states.get(entity_id)
assert state
assert state.state == STATE_UNKNOWN
assert not state.attributes["private"]
# test private system
type(mocked_location).is_private = PropertyMock(return_value=True)
await hass.helpers.entity_component.async_update_entity(entity_id)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state
assert state.state == STATE_ALARM_DISARMED
assert state.attributes["private"]
type(mocked_location).is_private = PropertyMock(return_value=False)
# test armed home
type(mocked_location).mode = PropertyMock(
return_value=mock_mode(4, LOCATION_MODE_HOME)
)
await hass.helpers.entity_component.async_update_entity(entity_id)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state
assert state.state == STATE_ALARM_ARMED_HOME
# test armed away
type(mocked_location).mode = PropertyMock(
return_value=mock_mode(5, LOCATION_MODE_AWAY)
)
await hass.helpers.entity_component.async_update_entity(entity_id)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state
assert state.state == STATE_ALARM_ARMED_AWAY
# test armed night
type(mocked_location).mode = PropertyMock(
return_value=mock_mode(6, LOCATION_MODE_NIGHT)
)
await hass.helpers.entity_component.async_update_entity(entity_id)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state
assert state.state == STATE_ALARM_ARMED_NIGHT
async def test_alarm_control_panel_services(hass, canary) -> None:
"""Test the services of the alarm_control_panel for Canary."""
await async_setup_component(hass, "persistent_notification", {})
online_device_at_home = mock_device(20, "Dining Room", True, "Canary Pro")
mocked_location = mock_location(
location_id=100,
name="Home",
is_celsius=True,
mode=mock_mode(1, "disarmed"),
devices=[online_device_at_home],
)
instance = canary.return_value
instance.get_locations.return_value = [mocked_location]
config = {DOMAIN: {"username": "test-username", "password": "test-password"}}
with patch("homeassistant.components.canary.PLATFORMS", ["alarm_control_panel"]):
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
entity_id = "alarm_control_panel.home"
# test arm away
await hass.services.async_call(
ALARM_DOMAIN,
SERVICE_ALARM_ARM_AWAY,
service_data={"entity_id": entity_id},
blocking=True,
)
instance.set_location_mode.assert_called_with(100, LOCATION_MODE_AWAY)
# test arm home
await hass.services.async_call(
ALARM_DOMAIN,
SERVICE_ALARM_ARM_HOME,
service_data={"entity_id": entity_id},
blocking=True,
)
instance.set_location_mode.assert_called_with(100, LOCATION_MODE_HOME)
# test arm night
await hass.services.async_call(
ALARM_DOMAIN,
SERVICE_ALARM_ARM_NIGHT,
service_data={"entity_id": entity_id},
blocking=True,
)
instance.set_location_mode.assert_called_with(100, LOCATION_MODE_NIGHT)
# test disarm
await hass.services.async_call(
ALARM_DOMAIN,
SERVICE_ALARM_DISARM,
service_data={"entity_id": entity_id},
blocking=True,
)
instance.set_location_mode.assert_called_with(100, "disarmed", True)
|
import typing
import keras
import keras.backend as K
import tensorflow as tf
from matchzoo import preprocessors
from matchzoo.contrib.layers import DecayingDropoutLayer
from matchzoo.contrib.layers import EncodingLayer
from matchzoo.engine import hyper_spaces
from matchzoo.engine.base_model import BaseModel
from matchzoo.engine.param import Param
from matchzoo.engine.param_table import ParamTable
class DIIN(BaseModel):
"""
DIIN model.
Examples:
>>> model = DIIN()
>>> model.guess_and_fill_missing_params()
>>> model.params['embedding_input_dim'] = 10000
>>> model.params['embedding_output_dim'] = 300
>>> model.params['embedding_trainable'] = True
>>> model.params['optimizer'] = 'adam'
>>> model.params['dropout_initial_keep_rate'] = 1.0
>>> model.params['dropout_decay_interval'] = 10000
>>> model.params['dropout_decay_rate'] = 0.977
>>> model.params['char_embedding_input_dim'] = 100
>>> model.params['char_embedding_output_dim'] = 8
>>> model.params['char_conv_filters'] = 100
>>> model.params['char_conv_kernel_size'] = 5
>>> model.params['first_scale_down_ratio'] = 0.3
>>> model.params['nb_dense_blocks'] = 3
>>> model.params['layers_per_dense_block'] = 8
>>> model.params['growth_rate'] = 20
>>> model.params['transition_scale_down_ratio'] = 0.5
>>> model.build()
"""
@classmethod
def get_default_params(cls) -> ParamTable:
""":return: model default parameters."""
params = super().get_default_params(with_embedding=True)
params['optimizer'] = 'adam'
params.add(Param(name='dropout_decay_interval', value=10000,
desc="The decay interval of decaying_dropout."))
params.add(Param(name='char_embedding_input_dim', value=100,
desc="The input dimension of character embedding "
"layer."))
params.add(Param(name='char_embedding_output_dim', value=2,
desc="The output dimension of character embedding "
"layer."))
params.add(Param(name='char_conv_filters', value=8,
desc="The filter size of character convolution "
"layer."))
params.add(Param(name='char_conv_kernel_size', value=2,
desc="The kernel size of character convolution "
"layer."))
params.add(Param(name='first_scale_down_ratio', value=0.3,
desc="The channel scale down ratio of the "
"convolution layer before densenet."))
params.add(Param(name='nb_dense_blocks', value=1,
desc="The number of blocks in densenet."))
params.add(Param(name='layers_per_dense_block', value=2,
desc="The number of convolution layers in dense "
"block."))
params.add(Param(name='growth_rate', value=2,
desc="The filter size of each convolution layer in "
"dense block."))
params.add(Param(name='transition_scale_down_ratio', value=0.5,
desc="The channel scale down ratio of the "
"convolution layer in transition block."))
params.add(Param(
name='dropout_initial_keep_rate', value=1.0,
hyper_space=hyper_spaces.quniform(
low=0.8, high=1.0, q=0.02),
desc="The initial keep rate of decaying_dropout."
))
params.add(Param(
name='dropout_decay_rate', value=0.97,
hyper_space=hyper_spaces.quniform(
low=0.90, high=0.99, q=0.01),
desc="The decay rate of decaying_dropout."
))
return params
@classmethod
def get_default_preprocessor(cls):
""":return: Default preprocessor."""
return preprocessors.DIINPreprocessor()
def guess_and_fill_missing_params(self, verbose: int = 1):
"""
Guess and fill missing parameters in :attr:'params'.
Use this method to automatically fill-in hyper parameters.
This involves some guessing so the parameter it fills could be
wrong. For example, the default task is 'Ranking', and if we do not
set it to 'Classification' manually for data packs prepared for
classification, then the shape of the model output and the data will
mismatch.
:param verbose: Verbosity.
"""
self._params.get('input_shapes').set_default([(32,),
(32,),
(32, 16),
(32, 16),
(32,),
(32,)], verbose)
super().guess_and_fill_missing_params(verbose)
def _make_inputs(self) -> list:
text_left = keras.layers.Input(
name='text_left',
shape=self._params['input_shapes'][0]
)
text_right = keras.layers.Input(
name='text_right',
shape=self._params['input_shapes'][1]
)
char_left = keras.layers.Input(
name='char_left',
shape=self._params['input_shapes'][2]
)
char_right = keras.layers.Input(
name='char_right',
shape=self._params['input_shapes'][3]
)
match_left = keras.layers.Input(
name='match_left',
shape=self._params['input_shapes'][4]
)
match_right = keras.layers.Input(
name='match_right',
shape=self._params['input_shapes'][5]
)
return [text_left, text_right,
char_left, char_right,
match_left, match_right]
def build(self):
"""Build model structure."""
# Scalar dimensions referenced here:
# B = batch size (number of sequences)
# D = word embedding size
# L = 'input_left' sequence length
# R = 'input_right' sequence length
# C = fixed word length
inputs = self._make_inputs()
# Left text and right text.
# shape = [B, L]
# shape = [B, R]
text_left, text_right = inputs[0:2]
# Left character and right character.
# shape = [B, L, C]
# shape = [B, R, C]
char_left, char_right = inputs[2:4]
# Left exact match and right exact match.
# shape = [B, L]
# shape = [B, R]
match_left, match_right = inputs[4:6]
# Embedding module
left_embeddings = []
right_embeddings = []
# Word embedding feature
word_embedding = self._make_embedding_layer()
# shape = [B, L, D]
left_word_embedding = word_embedding(text_left)
# shape = [B, R, D]
right_word_embedding = word_embedding(text_right)
left_word_embedding = DecayingDropoutLayer(
initial_keep_rate=self._params['dropout_initial_keep_rate'],
decay_interval=self._params['dropout_decay_interval'],
decay_rate=self._params['dropout_decay_rate']
)(left_word_embedding)
right_word_embedding = DecayingDropoutLayer(
initial_keep_rate=self._params['dropout_initial_keep_rate'],
decay_interval=self._params['dropout_decay_interval'],
decay_rate=self._params['dropout_decay_rate']
)(right_word_embedding)
left_embeddings.append(left_word_embedding)
right_embeddings.append(right_word_embedding)
# Exact match feature
# shape = [B, L, 1]
left_exact_match = keras.layers.Reshape(
target_shape=(K.int_shape(match_left)[1], 1,)
)(match_left)
# shape = [B, R, 1]
right_exact_match = keras.layers.Reshape(
target_shape=(K.int_shape(match_left)[1], 1,)
)(match_right)
left_embeddings.append(left_exact_match)
right_embeddings.append(right_exact_match)
# Char embedding feature
char_embedding = self._make_char_embedding_layer()
char_embedding.build(
input_shape=(None, None, K.int_shape(char_left)[-1]))
left_char_embedding = char_embedding(char_left)
right_char_embedding = char_embedding(char_right)
left_embeddings.append(left_char_embedding)
right_embeddings.append(right_char_embedding)
# Concatenate
left_embedding = keras.layers.Concatenate()(left_embeddings)
right_embedding = keras.layers.Concatenate()(right_embeddings)
d = K.int_shape(left_embedding)[-1]
# Encoding module
left_encoding = EncodingLayer(
initial_keep_rate=self._params['dropout_initial_keep_rate'],
decay_interval=self._params['dropout_decay_interval'],
decay_rate=self._params['dropout_decay_rate']
)(left_embedding)
right_encoding = EncodingLayer(
initial_keep_rate=self._params['dropout_initial_keep_rate'],
decay_interval=self._params['dropout_decay_interval'],
decay_rate=self._params['dropout_decay_rate']
)(right_embedding)
# Interaction module
interaction = keras.layers.Lambda(self._make_interaction)(
[left_encoding, right_encoding])
# Feature extraction module
feature_extractor_input = keras.layers.Conv2D(
filters=int(d * self._params['first_scale_down_ratio']),
kernel_size=(1, 1),
activation=None)(interaction)
feature_extractor = self._create_densenet()
features = feature_extractor(feature_extractor_input)
# Output module
features = DecayingDropoutLayer(
initial_keep_rate=self._params['dropout_initial_keep_rate'],
decay_interval=self._params['dropout_decay_interval'],
decay_rate=self._params['dropout_decay_rate'])(features)
out = self._make_output_layer()(features)
self._backend = keras.Model(inputs=inputs, outputs=out)
def _make_char_embedding_layer(self) -> keras.layers.Layer:
"""
Apply embedding, conv and maxpooling operation over time dimension
for each token to obtain a vector.
:return: Wrapper Keras 'Layer' as character embedding feature
extractor.
"""
return keras.layers.TimeDistributed(keras.Sequential([
keras.layers.Embedding(
input_dim=self._params['char_embedding_input_dim'],
output_dim=self._params['char_embedding_output_dim'],
input_length=self._params['input_shapes'][2][-1]),
keras.layers.Conv1D(
filters=self._params['char_conv_filters'],
kernel_size=self._params['char_conv_kernel_size']),
keras.layers.GlobalMaxPooling1D()]))
def _make_interaction(self, inputs_) -> typing.Any:
left_encoding = inputs_[0]
right_encoding = inputs_[1]
left_encoding = tf.expand_dims(left_encoding, axis=2)
right_encoding = tf.expand_dims(right_encoding, axis=1)
interaction = left_encoding * right_encoding
return interaction
def _create_densenet(self) -> typing.Callable:
"""
DenseNet is consisted of 'nb_dense_blocks' sets of Dense block
and Transition block pair.
:return: Wrapper Keras 'Layer' as DenseNet, tensor in tensor out.
"""
def _wrapper(x):
for _ in range(self._params['nb_dense_blocks']):
# Dense block
# Apply 'layers_per_dense_block' convolution layers.
for _ in range(self._params['layers_per_dense_block']):
out_conv = keras.layers.Conv2D(
filters=self._params['growth_rate'],
kernel_size=(3, 3),
padding='same',
activation='relu')(x)
x = keras.layers.Concatenate(axis=-1)([x, out_conv])
# Transition block
# Apply a convolution layer and a maxpooling layer.
scale_down_ratio = self._params['transition_scale_down_ratio']
nb_filter = int(K.int_shape(x)[-1] * scale_down_ratio)
x = keras.layers.Conv2D(
filters=nb_filter,
kernel_size=(1, 1),
padding='same',
activation=None)(x)
x = keras.layers.MaxPool2D(strides=(2, 2))(x)
out_densenet = keras.layers.Flatten()(x)
return out_densenet
return _wrapper
|
import os
import re
import sys
import setuptools
import setuptools.command.test
from distutils.command.install import INSTALL_SCHEMES
try:
from setuptools import setup
except ImportError:
from distutils.core import setup # noqa
# -- Parse meta
re_meta = re.compile(r'__(\w+?)__\s*=\s*(.*)')
re_doc = re.compile(r'^"""(.+?)"""')
def add_default(m):
attr_name, attr_value = m.groups()
return ((attr_name, attr_value.strip("\"'")),)
def add_doc(m):
return (('doc', m.groups()[0]),)
pats = {re_meta: add_default, re_doc: add_doc}
here = os.path.abspath(os.path.dirname(__file__))
meta_fh = open(os.path.join(here, 'kombu/__init__.py'))
try:
meta = {}
for line in meta_fh:
if line.strip() == '# -eof meta-':
break
for pattern, handler in pats.items():
m = pattern.match(line.strip())
if m:
meta.update(handler(m))
finally:
meta_fh.close()
# --
def fullsplit(path, result=None):
if result is None:
result = []
head, tail = os.path.split(path)
if head == '':
return [tail] + result
if head == path:
return result
return fullsplit(head, [tail] + result)
for scheme in list(INSTALL_SCHEMES.values()):
scheme['data'] = scheme['purelib']
# if os.path.exists('README.rst'):
# long_description = codecs.open('README.rst', 'r', 'utf-8').read()
# else:
# long_description = 'See https://pypi.org/project/kombu/'
# -*- Installation Requires -*-
py_version = sys.version_info
is_pypy = hasattr(sys, 'pypy_version_info')
def strip_comments(l):
return l.split('#', 1)[0].strip()
def reqs(*f):
return [
r for r in (
strip_comments(l) for l in open(
os.path.join(os.getcwd(), 'requirements', *f)).readlines()
) if r]
def extras(*p):
return reqs('extras', *p)
class pytest(setuptools.command.test.test):
user_options = [('pytest-args=', 'a', 'Arguments to pass to py.test')]
def initialize_options(self):
setuptools.command.test.test.initialize_options(self)
self.pytest_args = []
def run_tests(self):
import pytest
sys.exit(pytest.main(self.pytest_args))
def readme():
with open('README.rst') as f:
return f.read()
setup(
name='kombu',
packages=setuptools.find_packages(exclude=['t', 't.*']),
version=meta['version'],
description=meta['doc'],
keywords='messaging message amqp rabbitmq redis actor producer consumer',
author=meta['author'],
author_email=meta['contact'],
url=meta['homepage'],
platforms=['any'],
zip_safe=False,
license='BSD',
cmdclass={'test': pytest},
python_requires=">=3.6",
install_requires=reqs('default.txt'),
tests_require=reqs('test.txt'),
extras_require={
'msgpack': extras('msgpack.txt'),
'yaml': extras('yaml.txt'),
'redis': extras('redis.txt'),
'mongodb': extras('mongodb.txt'),
'sqs': extras('sqs.txt'),
'zookeeper': extras('zookeeper.txt'),
'sqlalchemy': extras('sqlalchemy.txt'),
'librabbitmq': extras('librabbitmq.txt'),
'pyro': extras('pyro.txt'),
'slmq': extras('slmq.txt'),
'azurestoragequeues': extras('azurestoragequeues.txt'),
'azureservicebus': extras('azureservicebus.txt'),
'qpid': extras('qpid.txt'),
'consul': extras('consul.txt'),
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Intended Audience :: Developers',
'Topic :: Communications',
'Topic :: System :: Distributed Computing',
'Topic :: System :: Networking',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
import json
import pathlib
from pprint import pprint
import re
from .const import CORE_PROJECT_ID, FRONTEND_PROJECT_ID, INTEGRATIONS_DIR
from .lokalise import get_api
FRONTEND_REPO = pathlib.Path("../frontend/")
def create_lookup(results):
"""Create a lookup table by key name."""
return {key["key_name"]["web"]: key for key in results}
def rename_keys(project_id, to_migrate):
"""Rename keys.
to_migrate is Dict[from_key] = to_key.
"""
updates = []
lokalise = get_api(project_id)
from_key_data = lokalise.keys_list({"filter_keys": ",".join(to_migrate)})
if len(from_key_data) != len(to_migrate):
print(
f"Lookin up keys in Lokalise returns {len(from_key_data)} results, expected {len(to_migrate)}"
)
return
from_key_lookup = create_lookup(from_key_data)
print("Gathering IDs")
for from_key, to_key in to_migrate.items():
updates.append(
{"key_id": from_key_lookup[from_key]["key_id"], "key_name": to_key}
)
pprint(updates)
print()
while input("Type YES to confirm: ") != "YES":
pass
print()
print("Updating keys")
pprint(lokalise.keys_bulk_update(updates))
def list_keys_helper(lokalise, keys, params={}, *, validate=True):
"""List keys in chunks so it doesn't exceed max URL length."""
results = []
for i in range(0, len(keys), 100):
filter_keys = keys[i : i + 100]
from_key_data = lokalise.keys_list(
{
**params,
"filter_keys": ",".join(filter_keys),
"limit": len(filter_keys) + 1,
}
)
if len(from_key_data) == len(filter_keys) or not validate:
results.extend(from_key_data)
continue
print(
f"Lookin up keys in Lokalise returns {len(from_key_data)} results, expected {len(keys)}"
)
searched = set(filter_keys)
returned = set(create_lookup(from_key_data))
print("Not found:", ", ".join(searched - returned))
raise ValueError
return results
def migrate_project_keys_translations(from_project_id, to_project_id, to_migrate):
"""Migrate keys and translations from one project to another.
to_migrate is Dict[from_key] = to_key.
"""
from_lokalise = get_api(from_project_id)
to_lokalise = get_api(to_project_id)
# Fetch keys in target
# We are going to skip migrating existing keys
print("Checking which target keys exist..")
try:
to_key_data = list_keys_helper(
to_lokalise, list(to_migrate.values()), validate=False
)
except ValueError:
return
existing = set(create_lookup(to_key_data))
missing = [key for key in to_migrate.values() if key not in existing]
if not missing:
print("All keys to migrate exist already, nothing to do")
return
# Fetch keys whose translations we're importing
print("Fetch translations that we're importing..")
try:
from_key_data = list_keys_helper(
from_lokalise,
[key for key, value in to_migrate.items() if value not in existing],
{"include_translations": 1},
)
except ValueError:
return
from_key_lookup = create_lookup(from_key_data)
print("Creating", ", ".join(missing))
to_key_lookup = create_lookup(
to_lokalise.keys_create(
[{"key_name": key, "platforms": ["web"]} for key in missing]
)
)
updates = []
for from_key, to_key in to_migrate.items():
# If it is not in lookup, it already existed, skipping it.
if to_key not in to_key_lookup:
continue
updates.append(
{
"key_id": to_key_lookup[to_key]["key_id"],
"translations": [
{
"language_iso": from_translation["language_iso"],
"translation": from_translation["translation"],
"is_reviewed": from_translation["is_reviewed"],
"is_fuzzy": from_translation["is_fuzzy"],
}
for from_translation in from_key_lookup[from_key]["translations"]
],
}
)
print("Updating")
pprint(updates)
print()
print()
pprint(to_lokalise.keys_bulk_update(updates))
def find_and_rename_keys():
"""Find and rename keys in core."""
to_migrate = {}
for integration in INTEGRATIONS_DIR.iterdir():
strings_file = integration / "strings.json"
if not strings_file.is_file():
continue
strings = json.loads(strings_file.read_text())
if "title" in strings.get("config", {}):
from_key = f"component::{integration.name}::config::title"
to_key = f"component::{integration.name}::title"
to_migrate[from_key] = to_key
rename_keys(CORE_PROJECT_ID, to_migrate)
def find_different_languages():
"""Find different supported languages."""
core_api = get_api(CORE_PROJECT_ID)
frontend_api = get_api(FRONTEND_PROJECT_ID)
core_languages = {lang["lang_iso"] for lang in core_api.languages_list()}
frontend_languages = {lang["lang_iso"] for lang in frontend_api.languages_list()}
print("Core minus frontend", core_languages - frontend_languages)
print("Frontend minus core", frontend_languages - core_languages)
def interactive_update():
"""Interactive update integration strings."""
for integration in INTEGRATIONS_DIR.iterdir():
strings_file = integration / "strings.json"
if not strings_file.is_file():
continue
strings = json.loads(strings_file.read_text())
if "title" not in strings:
continue
manifest = json.loads((integration / "manifest.json").read_text())
print("Processing", manifest["name"])
print("Translation title", strings["title"])
if input("Drop title? (1=yes, 2=no) ") == "1":
strings.pop("title")
strings_file.write_text(json.dumps(strings))
print()
STATE_REWRITE = {
"Off": "[%key:common::state::off%]",
"On": "[%key:common::state::on%]",
"Unknown": "[%key:common::state::unknown%]",
"Unavailable": "[%key:common::state::unavailable%]",
"Open": "[%key:common::state::open%]",
"Closed": "[%key:common::state::closed%]",
"Connected": "[%key:common::state::connected%]",
"Disconnected": "[%key:common::state::disconnected%]",
"Locked": "[%key:common::state::locked%]",
"Unlocked": "[%key:common::state::unlocked%]",
"Active": "[%key:common::state::active%]",
"active": "[%key:common::state::active%]",
"Standby": "[%key:common::state::standby%]",
"Idle": "[%key:common::state::idle%]",
"idle": "[%key:common::state::idle%]",
"Paused": "[%key:common::state::paused%]",
"paused": "[%key:common::state::paused%]",
"Home": "[%key:common::state::home%]",
"Away": "[%key:common::state::not_home%]",
"[%key:state::default::off%]": "[%key:common::state::off%]",
"[%key:state::default::on%]": "[%key:common::state::on%]",
"[%key:state::cover::open%]": "[%key:common::state::open%]",
"[%key:state::cover::closed%]": "[%key:common::state::closed%]",
"[%key:state::lock::locked%]": "[%key:common::state::locked%]",
"[%key:state::lock::unlocked%]": "[%key:common::state::unlocked%]",
}
SKIP_DOMAIN = {"default", "scene"}
STATES_WITH_DEV_CLASS = {"binary_sensor", "zwave"}
GROUP_DELETE = {"opening", "closing", "stopped"} # They don't exist
def find_frontend_states():
"""Find frontend states.
Source key -> target key
Add key to integrations strings.json
"""
frontend_states = json.loads(
(FRONTEND_REPO / "src/translations/en.json").read_text()
)["state"]
# domain => state object
to_write = {}
to_migrate = {}
for domain, states in frontend_states.items():
if domain in SKIP_DOMAIN:
continue
to_key_base = f"component::{domain}::state"
from_key_base = f"state::{domain}"
if domain in STATES_WITH_DEV_CLASS:
domain_to_write = dict(states)
for device_class, dev_class_states in domain_to_write.items():
to_device_class = "_" if device_class == "default" else device_class
for key in dev_class_states:
to_migrate[
f"{from_key_base}::{device_class}::{key}"
] = f"{to_key_base}::{to_device_class}::{key}"
# Rewrite "default" device class to _
if "default" in domain_to_write:
domain_to_write["_"] = domain_to_write.pop("default")
else:
if domain == "group":
for key in GROUP_DELETE:
states.pop(key)
domain_to_write = {"_": states}
for key in states:
to_migrate[f"{from_key_base}::{key}"] = f"{to_key_base}::_::{key}"
# Map out common values with
for dev_class_states in domain_to_write.values():
for key, value in dev_class_states.copy().items():
if value in STATE_REWRITE:
dev_class_states[key] = STATE_REWRITE[value]
continue
match = re.match(r"\[\%key:state::(\w+)::(.+)\%\]", value)
if not match:
continue
dev_class_states[key] = "[%key:component::{}::state::{}%]".format(
*match.groups()
)
to_write[domain] = domain_to_write
for domain, state in to_write.items():
strings = INTEGRATIONS_DIR / domain / "strings.json"
if strings.is_file():
content = json.loads(strings.read_text())
else:
content = {}
content["state"] = state
strings.write_text(json.dumps(content, indent=2) + "\n")
pprint(to_migrate)
print()
while input("Type YES to confirm: ") != "YES":
pass
migrate_project_keys_translations(FRONTEND_PROJECT_ID, CORE_PROJECT_ID, to_migrate)
def apply_data_references(to_migrate):
"""Apply references."""
for strings_file in INTEGRATIONS_DIR.glob("*/strings.json"):
strings = json.loads(strings_file.read_text())
steps = strings.get("config", {}).get("step")
if not steps:
continue
changed = False
for step_data in steps.values():
step_data = step_data.get("data", {})
for key, value in step_data.items():
if key in to_migrate and value != to_migrate[key]:
if key.split("_")[0].lower() in value.lower():
step_data[key] = to_migrate[key]
changed = True
elif value.startswith("[%key"):
pass
else:
print(
f"{strings_file}: Skipped swapping '{key}': '{value}' does not contain '{key}'"
)
if not changed:
continue
strings_file.write_text(json.dumps(strings, indent=2))
def run():
"""Migrate translations."""
apply_data_references(
{
"host": "[%key:common::config_flow::data::host%]",
"username": "[%key:common::config_flow::data::username%]",
"password": "[%key:common::config_flow::data::password%]",
"port": "[%key:common::config_flow::data::port%]",
"usb_path": "[%key:common::config_flow::data::usb_path%]",
"access_token": "[%key:common::config_flow::data::access_token%]",
"api_key": "[%key:common::config_flow::data::api_key%]",
}
)
# Rename existing keys to common keys,
# Old keys have been updated with reference to the common key
# rename_keys(
# CORE_PROJECT_ID,
# {
# "component::blebox::config::step::user::data::host": "common::config_flow::data::ip",
# },
# )
# find_frontend_states()
# find_different_languages()
return 0
|
import asyncio
import logging
from pyopenuv import Client
from pyopenuv.errors import OpenUvError
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_API_KEY,
CONF_BINARY_SENSORS,
CONF_ELEVATION,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_SENSORS,
)
from homeassistant.core import callback
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import aiohttp_client
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.service import verify_domain_control
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
DATA_OPENUV_CLIENT = "data_client"
DATA_OPENUV_LISTENER = "data_listener"
DATA_PROTECTION_WINDOW = "protection_window"
DATA_UV = "uv"
DEFAULT_ATTRIBUTION = "Data provided by OpenUV"
NOTIFICATION_ID = "openuv_notification"
NOTIFICATION_TITLE = "OpenUV Component Setup"
TOPIC_UPDATE = f"{DOMAIN}_data_update"
TYPE_CURRENT_OZONE_LEVEL = "current_ozone_level"
TYPE_CURRENT_UV_INDEX = "current_uv_index"
TYPE_CURRENT_UV_LEVEL = "current_uv_level"
TYPE_MAX_UV_INDEX = "max_uv_index"
TYPE_PROTECTION_WINDOW = "uv_protection_window"
TYPE_SAFE_EXPOSURE_TIME_1 = "safe_exposure_time_type_1"
TYPE_SAFE_EXPOSURE_TIME_2 = "safe_exposure_time_type_2"
TYPE_SAFE_EXPOSURE_TIME_3 = "safe_exposure_time_type_3"
TYPE_SAFE_EXPOSURE_TIME_4 = "safe_exposure_time_type_4"
TYPE_SAFE_EXPOSURE_TIME_5 = "safe_exposure_time_type_5"
TYPE_SAFE_EXPOSURE_TIME_6 = "safe_exposure_time_type_6"
PLATFORMS = ["binary_sensor", "sensor"]
async def async_setup(hass, config):
"""Set up the OpenUV component."""
hass.data[DOMAIN] = {DATA_OPENUV_CLIENT: {}, DATA_OPENUV_LISTENER: {}}
return True
async def async_setup_entry(hass, config_entry):
"""Set up OpenUV as config entry."""
_verify_domain_control = verify_domain_control(hass, DOMAIN)
try:
websession = aiohttp_client.async_get_clientsession(hass)
openuv = OpenUV(
Client(
config_entry.data[CONF_API_KEY],
config_entry.data.get(CONF_LATITUDE, hass.config.latitude),
config_entry.data.get(CONF_LONGITUDE, hass.config.longitude),
websession,
altitude=config_entry.data.get(CONF_ELEVATION, hass.config.elevation),
)
)
await openuv.async_update()
hass.data[DOMAIN][DATA_OPENUV_CLIENT][config_entry.entry_id] = openuv
except OpenUvError as err:
_LOGGER.error("Config entry failed: %s", err)
raise ConfigEntryNotReady from err
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, component)
)
@_verify_domain_control
async def update_data(service):
"""Refresh all OpenUV data."""
_LOGGER.debug("Refreshing all OpenUV data")
await openuv.async_update()
async_dispatcher_send(hass, TOPIC_UPDATE)
@_verify_domain_control
async def update_uv_index_data(service):
"""Refresh OpenUV UV index data."""
_LOGGER.debug("Refreshing OpenUV UV index data")
await openuv.async_update_uv_index_data()
async_dispatcher_send(hass, TOPIC_UPDATE)
@_verify_domain_control
async def update_protection_data(service):
"""Refresh OpenUV protection window data."""
_LOGGER.debug("Refreshing OpenUV protection window data")
await openuv.async_update_protection_data()
async_dispatcher_send(hass, TOPIC_UPDATE)
for service, method in [
("update_data", update_data),
("update_uv_index_data", update_uv_index_data),
("update_protection_data", update_protection_data),
]:
hass.services.async_register(DOMAIN, service, method)
return True
async def async_unload_entry(hass, config_entry):
"""Unload an OpenUV config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(config_entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN][DATA_OPENUV_CLIENT].pop(config_entry.entry_id)
return unload_ok
async def async_migrate_entry(hass, config_entry):
"""Migrate the config entry upon new versions."""
version = config_entry.version
data = {**config_entry.data}
_LOGGER.debug("Migrating from version %s", version)
# 1 -> 2: Remove unused condition data:
if version == 1:
data.pop(CONF_BINARY_SENSORS, None)
data.pop(CONF_SENSORS, None)
version = config_entry.version = 2
hass.config_entries.async_update_entry(config_entry, data=data)
_LOGGER.debug("Migration to version %s successful", version)
return True
class OpenUV:
"""Define a generic OpenUV object."""
def __init__(self, client):
"""Initialize."""
self.client = client
self.data = {}
async def async_update_protection_data(self):
"""Update binary sensor (protection window) data."""
try:
resp = await self.client.uv_protection_window()
self.data[DATA_PROTECTION_WINDOW] = resp["result"]
except OpenUvError as err:
_LOGGER.error("Error during protection data update: %s", err)
self.data[DATA_PROTECTION_WINDOW] = {}
async def async_update_uv_index_data(self):
"""Update sensor (uv index, etc) data."""
try:
data = await self.client.uv_index()
self.data[DATA_UV] = data
except OpenUvError as err:
_LOGGER.error("Error during uv index data update: %s", err)
self.data[DATA_UV] = {}
async def async_update(self):
"""Update sensor/binary sensor data."""
tasks = [self.async_update_protection_data(), self.async_update_uv_index_data()]
await asyncio.gather(*tasks)
class OpenUvEntity(Entity):
"""Define a generic OpenUV entity."""
def __init__(self, openuv):
"""Initialize."""
self._attrs = {ATTR_ATTRIBUTION: DEFAULT_ATTRIBUTION}
self._available = True
self._name = None
self.openuv = openuv
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self._available
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._attrs
@property
def name(self):
"""Return the name of the entity."""
return self._name
async def async_added_to_hass(self):
"""Register callbacks."""
@callback
def update():
"""Update the state."""
self.update_from_latest_data()
self.async_write_ha_state()
self.async_on_remove(async_dispatcher_connect(self.hass, TOPIC_UPDATE, update))
self.update_from_latest_data()
def update_from_latest_data(self):
"""Update the sensor using the latest data."""
raise NotImplementedError
|
import asyncio
import binascii
from collections import OrderedDict
import copy
import logging
import RFXtrx as rfxtrxmod
import async_timeout
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components.binary_sensor import DEVICE_CLASSES_SCHEMA
from homeassistant.const import (
CONF_COMMAND_OFF,
CONF_COMMAND_ON,
CONF_DEVICE,
CONF_DEVICE_CLASS,
CONF_DEVICE_ID,
CONF_DEVICES,
CONF_HOST,
CONF_PORT,
DEGREE,
ELECTRICAL_CURRENT_AMPERE,
ENERGY_KILO_WATT_HOUR,
EVENT_HOMEASSISTANT_STOP,
LENGTH_MILLIMETERS,
PERCENTAGE,
POWER_WATT,
PRESSURE_HPA,
SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
SPEED_METERS_PER_SECOND,
TEMP_CELSIUS,
TIME_HOURS,
UV_INDEX,
VOLT,
)
from homeassistant.core import callback
from homeassistant.exceptions import ConfigEntryNotReady
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.restore_state import RestoreEntity
from .const import (
ATTR_EVENT,
CONF_AUTOMATIC_ADD,
CONF_DATA_BITS,
CONF_DEBUG,
CONF_FIRE_EVENT,
CONF_OFF_DELAY,
CONF_REMOVE_DEVICE,
CONF_SIGNAL_REPETITIONS,
DEVICE_PACKET_TYPE_LIGHTING4,
EVENT_RFXTRX_EVENT,
SERVICE_SEND,
)
DOMAIN = "rfxtrx"
DEFAULT_SIGNAL_REPETITIONS = 1
SIGNAL_EVENT = f"{DOMAIN}_event"
DATA_TYPES = OrderedDict(
[
("Temperature", TEMP_CELSIUS),
("Temperature2", TEMP_CELSIUS),
("Humidity", PERCENTAGE),
("Barometer", PRESSURE_HPA),
("Wind direction", DEGREE),
("Rain rate", f"{LENGTH_MILLIMETERS}/{TIME_HOURS}"),
("Energy usage", POWER_WATT),
("Total usage", ENERGY_KILO_WATT_HOUR),
("Sound", None),
("Sensor Status", None),
("Counter value", "count"),
("UV", UV_INDEX),
("Humidity status", None),
("Forecast", None),
("Forecast numeric", None),
("Rain total", LENGTH_MILLIMETERS),
("Wind average speed", SPEED_METERS_PER_SECOND),
("Wind gust", SPEED_METERS_PER_SECOND),
("Chill", TEMP_CELSIUS),
("Count", "count"),
("Current Ch. 1", ELECTRICAL_CURRENT_AMPERE),
("Current Ch. 2", ELECTRICAL_CURRENT_AMPERE),
("Current Ch. 3", ELECTRICAL_CURRENT_AMPERE),
("Voltage", VOLT),
("Current", ELECTRICAL_CURRENT_AMPERE),
("Battery numeric", PERCENTAGE),
("Rssi numeric", SIGNAL_STRENGTH_DECIBELS_MILLIWATT),
]
)
_LOGGER = logging.getLogger(__name__)
DATA_RFXOBJECT = "rfxobject"
DATA_LISTENER = "ha_stop"
def _bytearray_string(data):
val = cv.string(data)
try:
return bytearray.fromhex(val)
except ValueError as err:
raise vol.Invalid(
"Data must be a hex string with multiple of two characters"
) from err
def _ensure_device(value):
if value is None:
return DEVICE_DATA_SCHEMA({})
return DEVICE_DATA_SCHEMA(value)
SERVICE_SEND_SCHEMA = vol.Schema({ATTR_EVENT: _bytearray_string})
DEVICE_DATA_SCHEMA = vol.Schema(
{
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_FIRE_EVENT, default=False): cv.boolean,
vol.Optional(CONF_OFF_DELAY): vol.All(
cv.time_period, cv.positive_timedelta, lambda value: value.total_seconds()
),
vol.Optional(CONF_DATA_BITS): cv.positive_int,
vol.Optional(CONF_COMMAND_ON): cv.byte,
vol.Optional(CONF_COMMAND_OFF): cv.byte,
vol.Optional(CONF_SIGNAL_REPETITIONS, default=1): cv.positive_int,
}
)
BASE_SCHEMA = vol.Schema(
{
vol.Optional(CONF_DEBUG): cv.boolean,
vol.Optional(CONF_AUTOMATIC_ADD, default=False): cv.boolean,
vol.Optional(CONF_DEVICES, default={}): {cv.string: _ensure_device},
},
)
DEVICE_SCHEMA = BASE_SCHEMA.extend({vol.Required(CONF_DEVICE): cv.string})
PORT_SCHEMA = BASE_SCHEMA.extend(
{vol.Required(CONF_PORT): cv.port, vol.Optional(CONF_HOST): cv.string}
)
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: vol.All(cv.deprecated(CONF_DEBUG), vol.Any(DEVICE_SCHEMA, PORT_SCHEMA))},
extra=vol.ALLOW_EXTRA,
)
DOMAINS = ["switch", "sensor", "light", "binary_sensor", "cover"]
async def async_setup(hass, config):
"""Set up the RFXtrx component."""
if DOMAIN not in config:
return True
data = {
CONF_HOST: config[DOMAIN].get(CONF_HOST),
CONF_PORT: config[DOMAIN].get(CONF_PORT),
CONF_DEVICE: config[DOMAIN].get(CONF_DEVICE),
CONF_AUTOMATIC_ADD: config[DOMAIN].get(CONF_AUTOMATIC_ADD),
CONF_DEVICES: config[DOMAIN][CONF_DEVICES],
}
# Read device_id from the event code add to the data that will end up in the ConfigEntry
for event_code, event_config in data[CONF_DEVICES].items():
event = get_rfx_object(event_code)
if event is None:
continue
device_id = get_device_id(
event.device, data_bits=event_config.get(CONF_DATA_BITS)
)
event_config[CONF_DEVICE_ID] = device_id
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data=data,
)
)
return True
async def async_setup_entry(hass, entry: config_entries.ConfigEntry):
"""Set up the RFXtrx component."""
hass.data.setdefault(DOMAIN, {})
await async_setup_internal(hass, entry)
for domain in DOMAINS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, domain)
)
return True
async def async_unload_entry(hass, entry: config_entries.ConfigEntry):
"""Unload RFXtrx component."""
if not all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in DOMAINS
]
)
):
return False
hass.services.async_remove(DOMAIN, SERVICE_SEND)
listener = hass.data[DOMAIN][DATA_LISTENER]
listener()
rfx_object = hass.data[DOMAIN][DATA_RFXOBJECT]
await hass.async_add_executor_job(rfx_object.close_connection)
return True
def _create_rfx(config):
"""Construct a rfx object based on config."""
if config[CONF_PORT] is not None:
# If port is set then we create a TCP connection
rfx = rfxtrxmod.Connect(
(config[CONF_HOST], config[CONF_PORT]),
None,
transport_protocol=rfxtrxmod.PyNetworkTransport,
)
else:
rfx = rfxtrxmod.Connect(config[CONF_DEVICE], None)
return rfx
def _get_device_lookup(devices):
"""Get a lookup structure for devices."""
lookup = {}
for event_code, event_config in devices.items():
event = get_rfx_object(event_code)
if event is None:
continue
device_id = get_device_id(
event.device, data_bits=event_config.get(CONF_DATA_BITS)
)
lookup[device_id] = event_config
return lookup
async def async_setup_internal(hass, entry: config_entries.ConfigEntry):
"""Set up the RFXtrx component."""
config = entry.data
# Initialize library
try:
async with async_timeout.timeout(5):
rfx_object = await hass.async_add_executor_job(_create_rfx, config)
except asyncio.TimeoutError as err:
raise ConfigEntryNotReady from err
# Setup some per device config
devices = _get_device_lookup(config[CONF_DEVICES])
# Declare the Handle event
@callback
def async_handle_receive(event):
"""Handle received messages from RFXtrx gateway."""
# Log RFXCOM event
if not event.device.id_string:
return
event_data = {
"packet_type": event.device.packettype,
"sub_type": event.device.subtype,
"type_string": event.device.type_string,
"id_string": event.device.id_string,
"data": binascii.hexlify(event.data).decode("ASCII"),
"values": getattr(event, "values", None),
}
_LOGGER.debug("Receive RFXCOM event: %s", event_data)
data_bits = get_device_data_bits(event.device, devices)
device_id = get_device_id(event.device, data_bits=data_bits)
# Register new devices
if config[CONF_AUTOMATIC_ADD] and device_id not in devices:
_add_device(event, device_id)
# Callback to HA registered components.
hass.helpers.dispatcher.async_dispatcher_send(SIGNAL_EVENT, event, device_id)
# Signal event to any other listeners
fire_event = devices.get(device_id, {}).get(CONF_FIRE_EVENT)
if fire_event:
hass.bus.async_fire(EVENT_RFXTRX_EVENT, event_data)
@callback
def _add_device(event, device_id):
"""Add a device to config entry."""
config = DEVICE_DATA_SCHEMA({})
config[CONF_DEVICE_ID] = device_id
data = entry.data.copy()
data[CONF_DEVICES] = copy.deepcopy(entry.data[CONF_DEVICES])
event_code = binascii.hexlify(event.data).decode("ASCII")
data[CONF_DEVICES][event_code] = config
hass.config_entries.async_update_entry(entry=entry, data=data)
devices[device_id] = config
def _shutdown_rfxtrx(event):
"""Close connection with RFXtrx."""
rfx_object.close_connection()
listener = hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown_rfxtrx)
hass.data[DOMAIN][DATA_LISTENER] = listener
hass.data[DOMAIN][DATA_RFXOBJECT] = rfx_object
rfx_object.event_callback = lambda event: hass.add_job(async_handle_receive, event)
def send(call):
event = call.data[ATTR_EVENT]
rfx_object.transport.send(event)
hass.services.async_register(DOMAIN, SERVICE_SEND, send, schema=SERVICE_SEND_SCHEMA)
def get_rfx_object(packetid):
"""Return the RFXObject with the packetid."""
try:
binarypacket = bytearray.fromhex(packetid)
except ValueError:
return None
pkt = rfxtrxmod.lowlevel.parse(binarypacket)
if pkt is None:
return None
if isinstance(pkt, rfxtrxmod.lowlevel.SensorPacket):
obj = rfxtrxmod.SensorEvent(pkt)
elif isinstance(pkt, rfxtrxmod.lowlevel.Status):
obj = rfxtrxmod.StatusEvent(pkt)
else:
obj = rfxtrxmod.ControlEvent(pkt)
obj.data = binarypacket
return obj
def get_pt2262_deviceid(device_id, nb_data_bits):
"""Extract and return the address bits from a Lighting4/PT2262 packet."""
if nb_data_bits is None:
return
try:
data = bytearray.fromhex(device_id)
except ValueError:
return None
mask = 0xFF & ~((1 << nb_data_bits) - 1)
data[len(data) - 1] &= mask
return binascii.hexlify(data)
def get_pt2262_cmd(device_id, data_bits):
"""Extract and return the data bits from a Lighting4/PT2262 packet."""
try:
data = bytearray.fromhex(device_id)
except ValueError:
return None
mask = 0xFF & ((1 << data_bits) - 1)
return hex(data[-1] & mask)
def get_device_data_bits(device, devices):
"""Deduce data bits for device based on a cache of device bits."""
data_bits = None
if device.packettype == DEVICE_PACKET_TYPE_LIGHTING4:
for device_id, entity_config in devices.items():
bits = entity_config.get(CONF_DATA_BITS)
if get_device_id(device, bits) == device_id:
data_bits = bits
break
return data_bits
def find_possible_pt2262_device(device_ids, device_id):
"""Look for the device which id matches the given device_id parameter."""
for dev_id in device_ids:
if len(dev_id) == len(device_id):
size = None
for i, (char1, char2) in enumerate(zip(dev_id, device_id)):
if char1 != char2:
break
size = i
if size is not None:
size = len(dev_id) - size - 1
_LOGGER.info(
"rfxtrx: found possible device %s for %s "
"with the following configuration:\n"
"data_bits=%d\n"
"command_on=0x%s\n"
"command_off=0x%s\n",
device_id,
dev_id,
size * 4,
dev_id[-size:],
device_id[-size:],
)
return dev_id
return None
def get_device_id(device, data_bits=None):
"""Calculate a device id for device."""
id_string = device.id_string
if data_bits and device.packettype == DEVICE_PACKET_TYPE_LIGHTING4:
masked_id = get_pt2262_deviceid(id_string, data_bits)
if masked_id:
id_string = masked_id.decode("ASCII")
return (f"{device.packettype:x}", f"{device.subtype:x}", id_string)
class RfxtrxEntity(RestoreEntity):
"""Represents a Rfxtrx device.
Contains the common logic for Rfxtrx lights and switches.
"""
def __init__(self, device, device_id, event=None):
"""Initialize the device."""
self._name = f"{device.type_string} {device.id_string}"
self._device = device
self._event = event
self._device_id = device_id
self._unique_id = "_".join(x for x in self._device_id)
async def async_added_to_hass(self):
"""Restore RFXtrx device state (ON/OFF)."""
if self._event:
self._apply_event(self._event)
self.async_on_remove(
self.hass.helpers.dispatcher.async_dispatcher_connect(
SIGNAL_EVENT, self._handle_event
)
)
self.async_on_remove(
self.hass.helpers.dispatcher.async_dispatcher_connect(
f"{DOMAIN}_{CONF_REMOVE_DEVICE}_{self._device_id}", self.async_remove
)
)
@property
def should_poll(self):
"""No polling needed for a RFXtrx switch."""
return False
@property
def name(self):
"""Return the name of the device if any."""
return self._name
@property
def device_state_attributes(self):
"""Return the device state attributes."""
if not self._event:
return None
return {ATTR_EVENT: "".join(f"{x:02x}" for x in self._event.data)}
@property
def assumed_state(self):
"""Return true if unable to access real state of entity."""
return True
@property
def unique_id(self):
"""Return unique identifier of remote device."""
return self._unique_id
@property
def device_info(self):
"""Return the device info."""
return {
"identifiers": {(DOMAIN, *self._device_id)},
"name": f"{self._device.type_string} {self._device.id_string}",
"model": self._device.type_string,
}
def _apply_event(self, event):
"""Apply a received event."""
self._event = event
@callback
def _handle_event(self, event, device_id):
"""Handle a reception of data, overridden by other classes."""
class RfxtrxCommandEntity(RfxtrxEntity):
"""Represents a Rfxtrx device.
Contains the common logic for Rfxtrx lights and switches.
"""
def __init__(self, device, device_id, signal_repetitions=1, event=None):
"""Initialzie a switch or light device."""
super().__init__(device, device_id, event=event)
self.signal_repetitions = signal_repetitions
self._state = None
async def _async_send(self, fun, *args):
rfx_object = self.hass.data[DOMAIN][DATA_RFXOBJECT]
for _ in range(self.signal_repetitions):
await self.hass.async_add_executor_job(fun, rfx_object.transport, *args)
|
from django.core.exceptions import ValidationError
from django.template import Context, Engine, Template, TemplateSyntaxError
from django.urls import reverse
from django.utils.translation import gettext as _
from django.utils.translation import override
from weblate.utils.site import get_site_url
# List of schemes not allowed in editor URL
# This list is not intededed to be complete, just block
# the possibly dangerous ones.
FORBIDDEN_URL_SCHEMES = {
"javascript",
"data",
"vbscript",
"mailto",
"ftp",
"sms",
"tel",
}
class InvalidString(str):
def __mod__(self, other):
raise TemplateSyntaxError(_('Undefined variable: "%s"') % other)
class RestrictedEngine(Engine):
default_builtins = [
"django.template.defaultfilters",
"weblate.utils.templatetags.safe_render",
]
def __init__(self, *args, **kwargs):
kwargs["autoescape"] = False
kwargs["string_if_invalid"] = InvalidString("%s")
super().__init__(*args, **kwargs)
def render_template(template, **kwargs):
"""Helper class to render string template with context."""
from weblate.trans.models import Component, Project, Translation
translation = kwargs.get("translation")
component = kwargs.get("component")
project = kwargs.get("project")
# Comppatibility with older templates
if "addon_name" in kwargs:
kwargs["hook_name"] = kwargs["addon_name"]
if isinstance(translation, Translation):
translation.stats.ensure_basic()
kwargs["language_code"] = translation.language_code
kwargs["language_name"] = translation.language.get_name()
kwargs["stats"] = translation.stats.get_data()
kwargs["url"] = get_site_url(translation.get_absolute_url())
kwargs["filename"] = translation.filename
component = translation.component
kwargs.pop("translation", None)
if isinstance(component, Component):
kwargs["component_name"] = component.name
kwargs["component_slug"] = component.slug
kwargs[
"component_remote_branch"
] = component.repository.get_remote_branch_name()
if "url" not in kwargs:
kwargs["url"] = get_site_url(component.get_absolute_url())
kwargs["widget_url"] = get_site_url(
reverse(
"widget-image",
kwargs={
"project": component.project.slug,
"component": component.slug,
"widget": "horizontal",
"color": "auto",
"extension": "svg",
},
)
)
project = component.project
kwargs.pop("component", None)
if isinstance(project, Project):
kwargs["project_name"] = project.name
kwargs["project_slug"] = project.slug
if "url" not in kwargs:
kwargs["url"] = get_site_url(project.get_absolute_url())
kwargs.pop("project", None)
with override("en"):
return Template(template, engine=RestrictedEngine()).render(
Context(kwargs, autoescape=False)
)
def validate_render(value, **kwargs):
"""Validates rendered template."""
try:
return render_template(value, **kwargs)
except Exception as err:
raise ValidationError(_("Failed to render template: {}").format(err))
def validate_render_component(value, translation=None, **kwargs):
from weblate.lang.models import Language
from weblate.trans.models import Component, Project, Translation
component = Component(
project=Project(name="project", slug="project", id=-1),
name="component",
slug="component",
branch="master",
vcs="git",
id=-1,
)
if translation:
kwargs["translation"] = Translation(
id=-1,
component=component,
language_code="xx",
language=Language(name="xxx", code="xx"),
)
else:
kwargs["component"] = component
validate_render(value, **kwargs)
def validate_render_addon(value):
validate_render_component(value, hook_name="addon", addon_name="addon")
def validate_render_commit(value):
validate_render_component(value, translation=True, author="author")
def validate_repoweb(val):
"""Validate whether URL for repository browser is valid.
It checks whether it can be filled in using format string.
"""
if "%(file)s" in val or "%(line)s" in val:
raise ValidationError(
_(
"The format strings are no longer supported, "
"please use the template language instead."
)
)
validate_render(val, filename="file.po", line=9, branch="master")
def validate_editor(val):
"""Validate URL for custom editor link.
- Check whether it correctly uses format strings.
- Check whether scheme is sane.
"""
if not val:
return
validate_repoweb(val)
if ":" not in val:
raise ValidationError(_("The editor link lacks URL scheme!"))
scheme = val.split(":", 1)[0]
# Block forbidden schemes as well as format strings
if scheme.strip().lower() in FORBIDDEN_URL_SCHEMES or "%" in scheme:
raise ValidationError(_("Forbidden URL scheme!"))
def migrate_repoweb(val):
return val % {
"file": "{{filename}}",
"../file": "{{filename|parentdir}}",
"../../file": "{{filename|parentdir|parentdir}}",
"../../../file": "{{filename|parentdir|parentdir}}",
"line": "{{line}}",
"branch": "{{branch}}",
}
|
from unittest import mock
from nx584 import client as nx584_client
import pytest
import requests
from homeassistant.components.nx584 import binary_sensor as nx584
from homeassistant.setup import async_setup_component
class StopMe(Exception):
"""Stop helper."""
@pytest.fixture
def fake_zones():
"""Fixture for fake zones.
Returns:
list: List of fake zones
"""
return [
{"name": "front", "number": 1},
{"name": "back", "number": 2},
{"name": "inside", "number": 3},
]
@pytest.fixture
def client(fake_zones):
"""Fixture for client.
Args:
fake_zones (list): Fixture of fake zones
Yields:
MagicMock: Client Mock
"""
with mock.patch.object(nx584_client, "Client") as _mock_client:
client = nx584_client.Client.return_value
client.list_zones.return_value = fake_zones
client.get_version.return_value = "1.1"
yield _mock_client
@pytest.mark.usefixtures("client")
class TestNX584SensorSetup:
"""Test the NX584 sensor platform."""
@staticmethod
@mock.patch("homeassistant.components.nx584.binary_sensor.NX584Watcher")
@mock.patch("homeassistant.components.nx584.binary_sensor.NX584ZoneSensor")
def test_setup_defaults(mock_nx, mock_watcher, hass, fake_zones):
"""Test the setup with no configuration."""
add_entities = mock.MagicMock()
config = {
"host": nx584.DEFAULT_HOST,
"port": nx584.DEFAULT_PORT,
"exclude_zones": [],
"zone_types": {},
}
assert nx584.setup_platform(hass, config, add_entities)
mock_nx.assert_has_calls([mock.call(zone, "opening") for zone in fake_zones])
assert add_entities.called
assert nx584_client.Client.call_count == 1
assert nx584_client.Client.call_args == mock.call("http://localhost:5007")
@staticmethod
@mock.patch("homeassistant.components.nx584.binary_sensor.NX584Watcher")
@mock.patch("homeassistant.components.nx584.binary_sensor.NX584ZoneSensor")
def test_setup_full_config(mock_nx, mock_watcher, hass, fake_zones):
"""Test the setup with full configuration."""
config = {
"host": "foo",
"port": 123,
"exclude_zones": [2],
"zone_types": {3: "motion"},
}
add_entities = mock.MagicMock()
assert nx584.setup_platform(hass, config, add_entities)
mock_nx.assert_has_calls(
[
mock.call(fake_zones[0], "opening"),
mock.call(fake_zones[2], "motion"),
]
)
assert add_entities.called
assert nx584_client.Client.call_count == 1
assert nx584_client.Client.call_args == mock.call("http://foo:123")
assert mock_watcher.called
@staticmethod
async def _test_assert_graceful_fail(hass, config):
"""Test the failing."""
assert not await async_setup_component(hass, "nx584", config)
@pytest.mark.parametrize(
"config",
[
({"exclude_zones": ["a"]}),
({"zone_types": {"a": "b"}}),
({"zone_types": {1: "notatype"}}),
({"zone_types": {"notazone": "motion"}}),
],
)
async def test_setup_bad_config(self, hass, config):
"""Test the setup with bad configuration."""
await self._test_assert_graceful_fail(hass, config)
@pytest.mark.parametrize(
"exception_type",
[
pytest.param(requests.exceptions.ConnectionError, id="connect_failed"),
pytest.param(IndexError, id="no_partitions"),
],
)
async def test_setup_with_exceptions(self, hass, exception_type):
"""Test the setup handles exceptions."""
nx584_client.Client.return_value.list_zones.side_effect = exception_type
await self._test_assert_graceful_fail(hass, {})
async def test_setup_version_too_old(self, hass):
"""Test if version is too old."""
nx584_client.Client.return_value.get_version.return_value = "1.0"
await self._test_assert_graceful_fail(hass, {})
@staticmethod
def test_setup_no_zones(hass):
"""Test the setup with no zones."""
nx584_client.Client.return_value.list_zones.return_value = []
add_entities = mock.MagicMock()
assert nx584.setup_platform(hass, {}, add_entities)
assert not add_entities.called
def test_nx584_zone_sensor_normal():
"""Test for the NX584 zone sensor."""
zone = {"number": 1, "name": "foo", "state": True}
sensor = nx584.NX584ZoneSensor(zone, "motion")
assert "foo" == sensor.name
assert not sensor.should_poll
assert sensor.is_on
assert sensor.device_state_attributes["zone_number"] == 1
zone["state"] = False
assert not sensor.is_on
class TestNX584Watcher:
"""Test the NX584 watcher."""
@staticmethod
@mock.patch.object(nx584.NX584ZoneSensor, "schedule_update_ha_state")
def test_process_zone_event(mock_update):
"""Test the processing of zone events."""
zone1 = {"number": 1, "name": "foo", "state": True}
zone2 = {"number": 2, "name": "bar", "state": True}
zones = {
1: nx584.NX584ZoneSensor(zone1, "motion"),
2: nx584.NX584ZoneSensor(zone2, "motion"),
}
watcher = nx584.NX584Watcher(None, zones)
watcher._process_zone_event({"zone": 1, "zone_state": False})
assert not zone1["state"]
assert mock_update.call_count == 1
@staticmethod
@mock.patch.object(nx584.NX584ZoneSensor, "schedule_update_ha_state")
def test_process_zone_event_missing_zone(mock_update):
"""Test the processing of zone events with missing zones."""
watcher = nx584.NX584Watcher(None, {})
watcher._process_zone_event({"zone": 1, "zone_state": False})
assert not mock_update.called
@staticmethod
def test_run_with_zone_events():
"""Test the zone events."""
empty_me = [1, 2]
def fake_get_events():
"""Return nothing twice, then some events."""
if empty_me:
empty_me.pop()
else:
return fake_events
client = mock.MagicMock()
fake_events = [
{"zone": 1, "zone_state": True, "type": "zone_status"},
{"zone": 2, "foo": False},
]
client.get_events.side_effect = fake_get_events
watcher = nx584.NX584Watcher(client, {})
@mock.patch.object(watcher, "_process_zone_event")
def run(fake_process):
"""Run a fake process."""
fake_process.side_effect = StopMe
with pytest.raises(StopMe):
watcher._run()
assert fake_process.call_count == 1
assert fake_process.call_args == mock.call(fake_events[0])
run()
assert 3 == client.get_events.call_count
@staticmethod
@mock.patch("time.sleep")
def test_run_retries_failures(mock_sleep):
"""Test the retries with failures."""
empty_me = [1, 2]
def fake_run():
"""Fake runner."""
if empty_me:
empty_me.pop()
raise requests.exceptions.ConnectionError()
raise StopMe()
watcher = nx584.NX584Watcher(None, {})
with mock.patch.object(watcher, "_run") as mock_inner:
mock_inner.side_effect = fake_run
with pytest.raises(StopMe):
watcher.run()
assert 3 == mock_inner.call_count
mock_sleep.assert_has_calls([mock.call(10), mock.call(10)])
|
from bson.objectid import ObjectId
from mongoengine.fields import BooleanField
from mongoengine.fields import Document
from mongoengine.fields import EmbeddedDocument
from mongoengine.fields import EmbeddedDocumentField
from mongoengine.fields import EmbeddedDocumentListField
from mongoengine.fields import ListField
from mongoengine.fields import ObjectIdField
from mongoengine.fields import StringField
class LabeledSentences(EmbeddedDocument):
id = ObjectIdField(required=True, default=lambda: ObjectId())
data = ListField(required=True)
class Parameter(EmbeddedDocument):
id = ObjectIdField(default=lambda: ObjectId())
name = StringField(required=True)
required = BooleanField(default=False)
type = StringField(required=False)
prompt = StringField()
class ApiDetails(EmbeddedDocument):
url = StringField(required=True)
requestType = StringField(
choices=[
"POST",
"GET",
"DELETE",
"PUT"],
required=True)
headers = ListField(default=[])
isJson = BooleanField(default=False)
jsonData = StringField(default="{}")
def get_headers(self):
headers = {}
for header in self.headers:
headers[header["headerKey"]] = header["headerValue"]
return headers
class Intent(Document):
name = StringField(max_length=100, required=True, unique=True)
userDefined = BooleanField(default=True)
intentId = StringField(required=True, unique=True)
apiTrigger = BooleanField(required=True)
apiDetails = EmbeddedDocumentField(ApiDetails)
speechResponse = StringField(required=True)
parameters = ListField(EmbeddedDocumentField(Parameter))
labeledSentences = EmbeddedDocumentListField(LabeledSentences)
trainingData = ListField(required=False)
|
import unittest
import optuna
class TestOptuna(unittest.TestCase):
def test_study(self):
def objective(trial):
x = trial.suggest_uniform('x', -1., 1.)
return x ** 2
n_trials = 20
study = optuna.create_study()
study.optimize(objective, n_trials=n_trials)
self.assertEqual(len(study.trials), n_trials)
|
import numpy as np
from numpy.testing import assert_array_equal, assert_equal
import pytest
from mne.utils import requires_sklearn
from mne.fixes import _get_args
from mne.decoding.search_light import SlidingEstimator, GeneralizingEstimator
from mne.decoding.transformer import Vectorizer
def make_data():
"""Make data."""
n_epochs, n_chan, n_time = 50, 32, 10
X = np.random.rand(n_epochs, n_chan, n_time)
y = np.arange(n_epochs) % 2
for ii in range(n_time):
coef = np.random.randn(n_chan)
X[y == 0, :, ii] += coef
X[y == 1, :, ii] -= coef
return X, y
@requires_sklearn
def test_search_light():
"""Test SlidingEstimator."""
from sklearn.linear_model import Ridge, LogisticRegression
from sklearn.pipeline import make_pipeline
from sklearn.metrics import roc_auc_score, make_scorer
with pytest.warns(None): # NumPy module import
from sklearn.ensemble import BaggingClassifier
from sklearn.base import is_classifier
logreg = LogisticRegression(solver='liblinear', multi_class='ovr',
random_state=0)
X, y = make_data()
n_epochs, _, n_time = X.shape
# init
pytest.raises(ValueError, SlidingEstimator, 'foo')
sl = SlidingEstimator(Ridge())
assert (not is_classifier(sl))
sl = SlidingEstimator(LogisticRegression(solver='liblinear'))
assert (is_classifier(sl))
# fit
assert_equal(sl.__repr__()[:18], '<SlidingEstimator(')
sl.fit(X, y)
assert_equal(sl.__repr__()[-28:], ', fitted with 10 estimators>')
pytest.raises(ValueError, sl.fit, X[1:], y)
pytest.raises(ValueError, sl.fit, X[:, :, 0], y)
sl.fit(X, y, sample_weight=np.ones_like(y))
# transforms
pytest.raises(ValueError, sl.predict, X[:, :, :2])
y_pred = sl.predict(X)
assert (y_pred.dtype == int)
assert_array_equal(y_pred.shape, [n_epochs, n_time])
y_proba = sl.predict_proba(X)
assert (y_proba.dtype == float)
assert_array_equal(y_proba.shape, [n_epochs, n_time, 2])
# score
score = sl.score(X, y)
assert_array_equal(score.shape, [n_time])
assert (np.sum(np.abs(score)) != 0)
assert (score.dtype == float)
sl = SlidingEstimator(logreg)
assert_equal(sl.scoring, None)
# Scoring method
for scoring in ['foo', 999]:
sl = SlidingEstimator(logreg, scoring=scoring)
sl.fit(X, y)
pytest.raises((ValueError, TypeError), sl.score, X, y)
# Check sklearn's roc_auc fix: scikit-learn/scikit-learn#6874
# -- 3 class problem
sl = SlidingEstimator(logreg, scoring='roc_auc')
y = np.arange(len(X)) % 3
sl.fit(X, y)
with pytest.raises(ValueError, match='for two-class'):
sl.score(X, y)
# But check that valid ones should work with new enough sklearn
if 'multi_class' in _get_args(roc_auc_score):
scoring = make_scorer(
roc_auc_score, needs_proba=True, multi_class='ovo')
sl = SlidingEstimator(logreg, scoring=scoring)
sl.fit(X, y)
sl.score(X, y) # smoke test
# -- 2 class problem not in [0, 1]
y = np.arange(len(X)) % 2 + 1
sl.fit(X, y)
score = sl.score(X, y)
assert_array_equal(score, [roc_auc_score(y - 1, _y_pred - 1)
for _y_pred in sl.decision_function(X).T])
y = np.arange(len(X)) % 2
# Cannot pass a metric as a scoring parameter
sl1 = SlidingEstimator(logreg, scoring=roc_auc_score)
sl1.fit(X, y)
pytest.raises(ValueError, sl1.score, X, y)
# Now use string as scoring
sl1 = SlidingEstimator(logreg, scoring='roc_auc')
sl1.fit(X, y)
rng = np.random.RandomState(0)
X = rng.randn(*X.shape) # randomize X to avoid AUCs in [0, 1]
score_sl = sl1.score(X, y)
assert_array_equal(score_sl.shape, [n_time])
assert (score_sl.dtype == float)
# Check that scoring was applied adequately
scoring = make_scorer(roc_auc_score, needs_threshold=True)
score_manual = [scoring(est, x, y) for est, x in zip(
sl1.estimators_, X.transpose(2, 0, 1))]
assert_array_equal(score_manual, score_sl)
# n_jobs
sl = SlidingEstimator(logreg, n_jobs=1, scoring='roc_auc')
score_1job = sl.fit(X, y).score(X, y)
sl.n_jobs = 2
score_njobs = sl.fit(X, y).score(X, y)
assert_array_equal(score_1job, score_njobs)
sl.predict(X)
# n_jobs > n_estimators
sl.fit(X[..., [0]], y)
sl.predict(X[..., [0]])
# pipeline
class _LogRegTransformer(LogisticRegression):
# XXX needs transformer in pipeline to get first proba only
def __init__(self):
super(_LogRegTransformer, self).__init__()
self.multi_class = 'ovr'
self.random_state = 0
self.solver = 'liblinear'
def transform(self, X):
return super(_LogRegTransformer, self).predict_proba(X)[..., 1]
pipe = make_pipeline(SlidingEstimator(_LogRegTransformer()),
logreg)
pipe.fit(X, y)
pipe.predict(X)
# n-dimensional feature space
X = np.random.rand(10, 3, 4, 2)
y = np.arange(10) % 2
y_preds = list()
for n_jobs in [1, 2]:
pipe = SlidingEstimator(
make_pipeline(Vectorizer(), logreg), n_jobs=n_jobs)
y_preds.append(pipe.fit(X, y).predict(X))
features_shape = pipe.estimators_[0].steps[0][1].features_shape_
assert_array_equal(features_shape, [3, 4])
assert_array_equal(y_preds[0], y_preds[1])
# Bagging classifiers
X = np.random.rand(10, 3, 4)
for n_jobs in (1, 2):
pipe = SlidingEstimator(BaggingClassifier(None, 2), n_jobs=n_jobs)
pipe.fit(X, y)
pipe.score(X, y)
assert (isinstance(pipe.estimators_[0], BaggingClassifier))
@requires_sklearn
def test_generalization_light():
"""Test GeneralizingEstimator."""
from sklearn.pipeline import make_pipeline
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import roc_auc_score
logreg = LogisticRegression(solver='liblinear', multi_class='ovr',
random_state=0)
X, y = make_data()
n_epochs, _, n_time = X.shape
# fit
gl = GeneralizingEstimator(logreg)
assert_equal(repr(gl)[:23], '<GeneralizingEstimator(')
gl.fit(X, y)
gl.fit(X, y, sample_weight=np.ones_like(y))
assert_equal(gl.__repr__()[-28:], ', fitted with 10 estimators>')
# transforms
y_pred = gl.predict(X)
assert_array_equal(y_pred.shape, [n_epochs, n_time, n_time])
assert (y_pred.dtype == int)
y_proba = gl.predict_proba(X)
assert (y_proba.dtype == float)
assert_array_equal(y_proba.shape, [n_epochs, n_time, n_time, 2])
# transform to different datasize
y_pred = gl.predict(X[:, :, :2])
assert_array_equal(y_pred.shape, [n_epochs, n_time, 2])
# score
score = gl.score(X[:, :, :3], y)
assert_array_equal(score.shape, [n_time, 3])
assert (np.sum(np.abs(score)) != 0)
assert (score.dtype == float)
gl = GeneralizingEstimator(logreg, scoring='roc_auc')
gl.fit(X, y)
score = gl.score(X, y)
auc = roc_auc_score(y, gl.estimators_[0].predict_proba(X[..., 0])[..., 1])
assert_equal(score[0, 0], auc)
for scoring in ['foo', 999]:
gl = GeneralizingEstimator(logreg, scoring=scoring)
gl.fit(X, y)
pytest.raises((ValueError, TypeError), gl.score, X, y)
# Check sklearn's roc_auc fix: scikit-learn/scikit-learn#6874
# -- 3 class problem
gl = GeneralizingEstimator(logreg, scoring='roc_auc')
y = np.arange(len(X)) % 3
gl.fit(X, y)
pytest.raises(ValueError, gl.score, X, y)
# -- 2 class problem not in [0, 1]
y = np.arange(len(X)) % 2 + 1
gl.fit(X, y)
score = gl.score(X, y)
manual_score = [[roc_auc_score(y - 1, _y_pred) for _y_pred in _y_preds]
for _y_preds in gl.decision_function(X).transpose(1, 2, 0)]
assert_array_equal(score, manual_score)
# n_jobs
gl = GeneralizingEstimator(logreg, n_jobs=2)
gl.fit(X, y)
y_pred = gl.predict(X)
assert_array_equal(y_pred.shape, [n_epochs, n_time, n_time])
score = gl.score(X, y)
assert_array_equal(score.shape, [n_time, n_time])
# n_jobs > n_estimators
gl.fit(X[..., [0]], y)
gl.predict(X[..., [0]])
# n-dimensional feature space
X = np.random.rand(10, 3, 4, 2)
y = np.arange(10) % 2
y_preds = list()
for n_jobs in [1, 2]:
pipe = GeneralizingEstimator(
make_pipeline(Vectorizer(), logreg), n_jobs=n_jobs)
y_preds.append(pipe.fit(X, y).predict(X))
features_shape = pipe.estimators_[0].steps[0][1].features_shape_
assert_array_equal(features_shape, [3, 4])
assert_array_equal(y_preds[0], y_preds[1])
@requires_sklearn
def test_cross_val_predict():
"""Test cross_val_predict with predict_proba."""
from sklearn.linear_model import LinearRegression
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
from sklearn.base import BaseEstimator, clone
from sklearn.model_selection import cross_val_predict
rng = np.random.RandomState(42)
X = rng.randn(10, 1, 3)
y = rng.randint(0, 2, 10)
estimator = SlidingEstimator(LinearRegression())
cross_val_predict(estimator, X, y, cv=2)
class Classifier(BaseEstimator):
"""Moch class that does not have classes_ attribute."""
def __init__(self):
self.base_estimator = LinearDiscriminantAnalysis()
def fit(self, X, y):
self.estimator_ = clone(self.base_estimator).fit(X, y)
return self
def predict_proba(self, X):
return self.estimator_.predict_proba(X)
with pytest.raises(AttributeError, match="classes_ attribute"):
estimator = SlidingEstimator(Classifier())
cross_val_predict(estimator, X, y, method='predict_proba', cv=2)
estimator = SlidingEstimator(LinearDiscriminantAnalysis())
cross_val_predict(estimator, X, y, method='predict_proba', cv=2)
|
import copy
import unittest
from absl import flags
from perfkitbenchmarker import edw_service
from perfkitbenchmarker.configs import benchmark_config_spec
from tests import pkb_common_test_case
_CLUSTER_PARAMETER_GROUP = 'fake_redshift_cluster_parameter_group'
_CLUSTER_SUBNET_GROUP = 'fake_redshift_cluster_subnet_group'
_PKB_CLUSTER = 'pkb-cluster'
_PKB_CLUSTER_DATABASE = 'pkb-database'
_REDSHIFT_NODE_TYPE = 'dc2.large'
_USERNAME = 'pkb-username'
_PASSWORD = 'pkb-password'
_TEST_RUN_URI = 'fakeru'
_AWS_ZONE_US_EAST_1A = 'us-east-1a'
_BASE_REDSHIFT_SPEC = {
'cluster_identifier': _PKB_CLUSTER,
'db': _PKB_CLUSTER_DATABASE,
'user': _USERNAME,
'password': _PASSWORD,
'node_type': _REDSHIFT_NODE_TYPE,
'node_count': 1
}
FLAGS = flags.FLAGS
class ClientVm(object):
"""A fake VM class that can proxies a remote command to execute query."""
def RemoteCommand(self, command):
"""Returns sample output for executing a query."""
pass
class PreparedClientVm(object):
def Install(self, package_name):
if package_name != 'pip':
raise RuntimeError
def RemoteCommand(self, command):
pass
class FakeEdwService(edw_service.EdwService):
"""A fake Edw Service class."""
def _Create(self):
pass
def _Delete(self):
pass
class EdwServiceTest(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(EdwServiceTest, self).setUp()
FLAGS.run_uri = _TEST_RUN_URI
FLAGS.zones = [_AWS_ZONE_US_EAST_1A]
def testIsUserManaged(self):
kwargs = copy.copy({
'cluster_identifier': _PKB_CLUSTER,
'db': _PKB_CLUSTER_DATABASE
})
spec = benchmark_config_spec._EdwServiceSpec('NAME', **kwargs)
edw_local = FakeEdwService(spec)
self.assertTrue(edw_local.IsUserManaged(spec))
def testIsPkbManaged(self):
kwargs = copy.copy({'db': _PKB_CLUSTER_DATABASE})
spec = benchmark_config_spec._EdwServiceSpec('NAME', **kwargs)
edw_local = FakeEdwService(spec)
self.assertFalse(edw_local.IsUserManaged(spec))
def testUserManagedGetClusterIdentifier(self):
kwargs = copy.copy({
'cluster_identifier': _PKB_CLUSTER,
'db': _PKB_CLUSTER_DATABASE
})
spec = benchmark_config_spec._EdwServiceSpec('NAME', **kwargs)
edw_local = FakeEdwService(spec)
self.assertEqual(_PKB_CLUSTER, edw_local.GetClusterIdentifier(spec))
self.assertEqual(_PKB_CLUSTER, edw_local.cluster_identifier)
def testPkbManagedGetClusterIdentifier(self):
kwargs = copy.copy({'db': _PKB_CLUSTER_DATABASE})
spec = benchmark_config_spec._EdwServiceSpec('NAME', **kwargs)
edw_local = FakeEdwService(spec)
self.assertEqual('pkb-' + FLAGS.run_uri,
edw_local.GetClusterIdentifier(spec))
self.assertEqual('pkb-' + FLAGS.run_uri, edw_local.cluster_identifier)
if __name__ == '__main__':
unittest.main()
|
from hatasmota.discovery import get_status_sensor_entities
import pytest
from homeassistant import config_entries
from homeassistant.components.tasmota.const import (
CONF_DISCOVERY_PREFIX,
DEFAULT_PREFIX,
DOMAIN,
)
from tests.async_mock import patch
from tests.common import (
MockConfigEntry,
async_mock_service,
mock_device_registry,
mock_registry,
)
from tests.components.light.conftest import mock_light_profiles # noqa
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
@pytest.fixture
def calls(hass):
"""Track calls to a mock service."""
return async_mock_service(hass, "test", "automation")
@pytest.fixture(autouse=True)
def disable_debounce():
"""Set MQTT debounce timer to zero."""
with patch("hatasmota.mqtt.DEBOUNCE_TIMEOUT", 0):
yield
@pytest.fixture
def status_sensor_disabled():
"""Fixture to allow overriding MQTT config."""
return True
@pytest.fixture(autouse=True)
def disable_status_sensor(status_sensor_disabled):
"""Disable Tasmota status sensor."""
wraps = None if status_sensor_disabled else get_status_sensor_entities
with patch("hatasmota.discovery.get_status_sensor_entities", wraps=wraps):
yield
async def setup_tasmota_helper(hass):
"""Set up Tasmota."""
hass.config.components.add("tasmota")
entry = MockConfigEntry(
connection_class=config_entries.CONN_CLASS_LOCAL_PUSH,
data={CONF_DISCOVERY_PREFIX: DEFAULT_PREFIX},
domain=DOMAIN,
title="Tasmota",
)
entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert "tasmota" in hass.config.components
@pytest.fixture
async def setup_tasmota(hass):
"""Set up Tasmota."""
await setup_tasmota_helper(hass)
|
import contextlib
import functools
import inspect
import itertools
import os
import pprint
import sys
try:
import _thread
except ImportError:
import thread as _thread
from coverage.backward import reprlib, StringIO
from coverage.misc import isolate_module
os = isolate_module(os)
# When debugging, it can be helpful to force some options, especially when
# debugging the configuration mechanisms you usually use to control debugging!
# This is a list of forced debugging options.
FORCED_DEBUG = []
FORCED_DEBUG_FILE = None
class DebugControl(object):
"""Control and output for debugging."""
show_repr_attr = False # For SimpleReprMixin
def __init__(self, options, output):
"""Configure the options and output file for debugging."""
self.options = list(options) + FORCED_DEBUG
self.suppress_callers = False
filters = []
if self.should('pid'):
filters.append(add_pid_and_tid)
self.output = DebugOutputFile.get_one(
output,
show_process=self.should('process'),
filters=filters,
)
self.raw_output = self.output.outfile
def __repr__(self):
return "<DebugControl options=%r raw_output=%r>" % (self.options, self.raw_output)
def should(self, option):
"""Decide whether to output debug information in category `option`."""
if option == "callers" and self.suppress_callers:
return False
return (option in self.options)
@contextlib.contextmanager
def without_callers(self):
"""A context manager to prevent call stacks from being logged."""
old = self.suppress_callers
self.suppress_callers = True
try:
yield
finally:
self.suppress_callers = old
def write(self, msg):
"""Write a line of debug output.
`msg` is the line to write. A newline will be appended.
"""
self.output.write(msg+"\n")
if self.should('self'):
caller_self = inspect.stack()[1][0].f_locals.get('self')
if caller_self is not None:
self.output.write("self: {!r}\n".format(caller_self))
if self.should('callers'):
dump_stack_frames(out=self.output, skip=1)
self.output.flush()
class DebugControlString(DebugControl):
"""A `DebugControl` that writes to a StringIO, for testing."""
def __init__(self, options):
super(DebugControlString, self).__init__(options, StringIO())
def get_output(self):
"""Get the output text from the `DebugControl`."""
return self.raw_output.getvalue()
class NoDebugging(object):
"""A replacement for DebugControl that will never try to do anything."""
def should(self, option): # pylint: disable=unused-argument
"""Should we write debug messages? Never."""
return False
def info_header(label):
"""Make a nice header string."""
return "--{:-<60s}".format(" "+label+" ")
def info_formatter(info):
"""Produce a sequence of formatted lines from info.
`info` is a sequence of pairs (label, data). The produced lines are
nicely formatted, ready to print.
"""
info = list(info)
if not info:
return
label_len = 30
assert all(len(l) < label_len for l, _ in info)
for label, data in info:
if data == []:
data = "-none-"
if isinstance(data, (list, set, tuple)):
prefix = "%*s:" % (label_len, label)
for e in data:
yield "%*s %s" % (label_len+1, prefix, e)
prefix = ""
else:
yield "%*s: %s" % (label_len, label, data)
def write_formatted_info(writer, header, info):
"""Write a sequence of (label,data) pairs nicely."""
writer.write(info_header(header))
for line in info_formatter(info):
writer.write(" %s" % line)
def short_stack(limit=None, skip=0):
"""Return a string summarizing the call stack.
The string is multi-line, with one line per stack frame. Each line shows
the function name, the file name, and the line number:
...
start_import_stop : /Users/ned/coverage/trunk/tests/coveragetest.py @95
import_local_file : /Users/ned/coverage/trunk/tests/coveragetest.py @81
import_local_file : /Users/ned/coverage/trunk/coverage/backward.py @159
...
`limit` is the number of frames to include, defaulting to all of them.
`skip` is the number of frames to skip, so that debugging functions can
call this and not be included in the result.
"""
stack = inspect.stack()[limit:skip:-1]
return "\n".join("%30s : %s:%d" % (t[3], t[1], t[2]) for t in stack)
def dump_stack_frames(limit=None, out=None, skip=0):
"""Print a summary of the stack to stdout, or someplace else."""
out = out or sys.stdout
out.write(short_stack(limit=limit, skip=skip+1))
out.write("\n")
def clipped_repr(text, numchars=50):
"""`repr(text)`, but limited to `numchars`."""
r = reprlib.Repr()
r.maxstring = numchars
return r.repr(text)
def short_id(id64):
"""Given a 64-bit id, make a shorter 16-bit one."""
id16 = 0
for offset in range(0, 64, 16):
id16 ^= id64 >> offset
return id16 & 0xFFFF
def add_pid_and_tid(text):
"""A filter to add pid and tid to debug messages."""
# Thread ids are useful, but too long. Make a shorter one.
tid = "{:04x}".format(short_id(_thread.get_ident()))
text = "{:5d}.{}: {}".format(os.getpid(), tid, text)
return text
class SimpleReprMixin(object):
"""A mixin implementing a simple __repr__."""
simple_repr_ignore = ['simple_repr_ignore', '$coverage.object_id']
def __repr__(self):
show_attrs = (
(k, v) for k, v in self.__dict__.items()
if getattr(v, "show_repr_attr", True)
and not callable(v)
and k not in self.simple_repr_ignore
)
return "<{klass} @0x{id:x} {attrs}>".format(
klass=self.__class__.__name__,
id=id(self),
attrs=" ".join("{}={!r}".format(k, v) for k, v in show_attrs),
)
def simplify(v): # pragma: debugging
"""Turn things which are nearly dict/list/etc into dict/list/etc."""
if isinstance(v, dict):
return {k:simplify(vv) for k, vv in v.items()}
elif isinstance(v, (list, tuple)):
return type(v)(simplify(vv) for vv in v)
elif hasattr(v, "__dict__"):
return simplify({'.'+k: v for k, v in v.__dict__.items()})
else:
return v
def pp(v): # pragma: debugging
"""Debug helper to pretty-print data, including SimpleNamespace objects."""
# Might not be needed in 3.9+
pprint.pprint(simplify(v))
def filter_text(text, filters):
"""Run `text` through a series of filters.
`filters` is a list of functions. Each takes a string and returns a
string. Each is run in turn.
Returns: the final string that results after all of the filters have
run.
"""
clean_text = text.rstrip()
ending = text[len(clean_text):]
text = clean_text
for fn in filters:
lines = []
for line in text.splitlines():
lines.extend(fn(line).splitlines())
text = "\n".join(lines)
return text + ending
class CwdTracker(object): # pragma: debugging
"""A class to add cwd info to debug messages."""
def __init__(self):
self.cwd = None
def filter(self, text):
"""Add a cwd message for each new cwd."""
cwd = os.getcwd()
if cwd != self.cwd:
text = "cwd is now {!r}\n".format(cwd) + text
self.cwd = cwd
return text
class DebugOutputFile(object): # pragma: debugging
"""A file-like object that includes pid and cwd information."""
def __init__(self, outfile, show_process, filters):
self.outfile = outfile
self.show_process = show_process
self.filters = list(filters)
if self.show_process:
self.filters.insert(0, CwdTracker().filter)
self.write("New process: executable: %r\n" % (sys.executable,))
self.write("New process: cmd: %r\n" % (getattr(sys, 'argv', None),))
if hasattr(os, 'getppid'):
self.write("New process: pid: %r, parent pid: %r\n" % (os.getpid(), os.getppid()))
SYS_MOD_NAME = '$coverage.debug.DebugOutputFile.the_one'
@classmethod
def get_one(cls, fileobj=None, show_process=True, filters=(), interim=False):
"""Get a DebugOutputFile.
If `fileobj` is provided, then a new DebugOutputFile is made with it.
If `fileobj` isn't provided, then a file is chosen
(COVERAGE_DEBUG_FILE, or stderr), and a process-wide singleton
DebugOutputFile is made.
`show_process` controls whether the debug file adds process-level
information, and filters is a list of other message filters to apply.
`filters` are the text filters to apply to the stream to annotate with
pids, etc.
If `interim` is true, then a future `get_one` can replace this one.
"""
if fileobj is not None:
# Make DebugOutputFile around the fileobj passed.
return cls(fileobj, show_process, filters)
# Because of the way igor.py deletes and re-imports modules,
# this class can be defined more than once. But we really want
# a process-wide singleton. So stash it in sys.modules instead of
# on a class attribute. Yes, this is aggressively gross.
the_one, is_interim = sys.modules.get(cls.SYS_MOD_NAME, (None, True))
if the_one is None or is_interim:
if fileobj is None:
debug_file_name = os.environ.get("COVERAGE_DEBUG_FILE", FORCED_DEBUG_FILE)
if debug_file_name:
fileobj = open(debug_file_name, "a")
else:
fileobj = sys.stderr
the_one = cls(fileobj, show_process, filters)
sys.modules[cls.SYS_MOD_NAME] = (the_one, interim)
return the_one
def write(self, text):
"""Just like file.write, but filter through all our filters."""
self.outfile.write(filter_text(text, self.filters))
self.outfile.flush()
def flush(self):
"""Flush our file."""
self.outfile.flush()
def log(msg, stack=False): # pragma: debugging
"""Write a log message as forcefully as possible."""
out = DebugOutputFile.get_one(interim=True)
out.write(msg+"\n")
if stack:
dump_stack_frames(out=out, skip=1)
def decorate_methods(decorator, butnot=(), private=False): # pragma: debugging
"""A class decorator to apply a decorator to methods."""
def _decorator(cls):
for name, meth in inspect.getmembers(cls, inspect.isroutine):
if name not in cls.__dict__:
continue
if name != "__init__":
if not private and name.startswith("_"):
continue
if name in butnot:
continue
setattr(cls, name, decorator(meth))
return cls
return _decorator
def break_in_pudb(func): # pragma: debugging
"""A function decorator to stop in the debugger for each call."""
@functools.wraps(func)
def _wrapper(*args, **kwargs):
import pudb
sys.stdout = sys.__stdout__
pudb.set_trace()
return func(*args, **kwargs)
return _wrapper
OBJ_IDS = itertools.count()
CALLS = itertools.count()
OBJ_ID_ATTR = "$coverage.object_id"
def show_calls(show_args=True, show_stack=False, show_return=False): # pragma: debugging
"""A method decorator to debug-log each call to the function."""
def _decorator(func):
@functools.wraps(func)
def _wrapper(self, *args, **kwargs):
oid = getattr(self, OBJ_ID_ATTR, None)
if oid is None:
oid = "{:08d} {:04d}".format(os.getpid(), next(OBJ_IDS))
setattr(self, OBJ_ID_ATTR, oid)
extra = ""
if show_args:
eargs = ", ".join(map(repr, args))
ekwargs = ", ".join("{}={!r}".format(*item) for item in kwargs.items())
extra += "("
extra += eargs
if eargs and ekwargs:
extra += ", "
extra += ekwargs
extra += ")"
if show_stack:
extra += " @ "
extra += "; ".join(_clean_stack_line(l) for l in short_stack().splitlines())
callid = next(CALLS)
msg = "{} {:04d} {}{}\n".format(oid, callid, func.__name__, extra)
DebugOutputFile.get_one(interim=True).write(msg)
ret = func(self, *args, **kwargs)
if show_return:
msg = "{} {:04d} {} return {!r}\n".format(oid, callid, func.__name__, ret)
DebugOutputFile.get_one(interim=True).write(msg)
return ret
return _wrapper
return _decorator
def _clean_stack_line(s): # pragma: debugging
"""Simplify some paths in a stack trace, for compactness."""
s = s.strip()
s = s.replace(os.path.dirname(__file__) + '/', '')
s = s.replace(os.path.dirname(os.__file__) + '/', '')
s = s.replace(sys.prefix + '/', '')
return s
|
import arrow
def test_build_certificate_authority():
from lemur.plugins.lemur_cryptography.plugin import build_certificate_authority
options = {
"key_type": "RSA2048",
"country": "US",
"state": "CA",
"location": "Example place",
"organization": "Example, Inc.",
"organizational_unit": "Example Unit",
"common_name": "Example ROOT",
"validity_start": arrow.get("2016-12-01").datetime,
"validity_end": arrow.get("2016-12-02").datetime,
"first_serial": 1,
"serial_number": 1,
"owner": "[email protected]",
}
cert_pem, private_key_pem, chain_cert_pem = build_certificate_authority(options)
assert cert_pem
assert private_key_pem
assert chain_cert_pem == ""
def test_build_intermediate_certificate_authority(authority):
from lemur.plugins.lemur_cryptography.plugin import build_certificate_authority
options = {
"key_type": "RSA2048",
"country": "US",
"state": "CA",
"location": "Example place",
"organization": "Example, Inc.",
"organizational_unit": "Example Unit",
"common_name": "Example INTERMEDIATE",
"validity_start": arrow.get("2016-12-01").datetime,
"validity_end": arrow.get("2016-12-02").datetime,
"first_serial": 1,
"serial_number": 1,
"owner": "[email protected]",
"parent": authority
}
cert_pem, private_key_pem, chain_cert_pem = build_certificate_authority(options)
assert cert_pem
assert private_key_pem
assert chain_cert_pem == authority.authority_certificate.body
def test_issue_certificate(authority):
from lemur.tests.vectors import CSR_STR
from lemur.plugins.lemur_cryptography.plugin import issue_certificate
options = {
"common_name": "Example.com",
"authority": authority,
"validity_start": arrow.get("2016-12-01").datetime,
"validity_end": arrow.get("2016-12-02").datetime,
}
cert_pem, chain_cert_pem = issue_certificate(CSR_STR, options)
assert cert_pem
assert chain_cert_pem
|
from homeassistant.components.cover import (
ATTR_POSITION,
STATE_CLOSED,
STATE_CLOSING,
STATE_OPENING,
SUPPORT_CLOSE,
SUPPORT_OPEN,
SUPPORT_SET_POSITION,
SUPPORT_STOP,
CoverEntity,
)
from . import BleBoxEntity, create_blebox_entities
from .const import BLEBOX_TO_HASS_COVER_STATES, BLEBOX_TO_HASS_DEVICE_CLASSES
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up a BleBox entry."""
create_blebox_entities(
hass, config_entry, async_add_entities, BleBoxCoverEntity, "covers"
)
class BleBoxCoverEntity(BleBoxEntity, CoverEntity):
"""Representation of a BleBox cover feature."""
@property
def state(self):
"""Return the equivalent HA cover state."""
return BLEBOX_TO_HASS_COVER_STATES[self._feature.state]
@property
def device_class(self):
"""Return the device class."""
return BLEBOX_TO_HASS_DEVICE_CLASSES[self._feature.device_class]
@property
def supported_features(self):
"""Return the supported cover features."""
position = SUPPORT_SET_POSITION if self._feature.is_slider else 0
stop = SUPPORT_STOP if self._feature.has_stop else 0
return position | stop | SUPPORT_OPEN | SUPPORT_CLOSE
@property
def current_cover_position(self):
"""Return the current cover position."""
position = self._feature.current
if position == -1: # possible for shutterBox
return None
return None if position is None else 100 - position
@property
def is_opening(self):
"""Return whether cover is opening."""
return self._is_state(STATE_OPENING)
@property
def is_closing(self):
"""Return whether cover is closing."""
return self._is_state(STATE_CLOSING)
@property
def is_closed(self):
"""Return whether cover is closed."""
return self._is_state(STATE_CLOSED)
async def async_open_cover(self, **kwargs):
"""Open the cover position."""
await self._feature.async_open()
async def async_close_cover(self, **kwargs):
"""Close the cover position."""
await self._feature.async_close()
async def async_set_cover_position(self, **kwargs):
"""Set the cover position."""
position = kwargs[ATTR_POSITION]
await self._feature.async_set_position(100 - position)
async def async_stop_cover(self, **kwargs):
"""Stop the cover."""
await self._feature.async_stop()
def _is_state(self, state_name):
value = self.state
return None if value is None else value == state_name
|
from __future__ import division
import chainer
import numpy as np
from PIL import Image
try:
import cv2
_cv2_available = True
except ImportError:
_cv2_available = False
def _handle_four_channel_image(img, alpha):
if alpha is None:
raise ValueError(
'An RGBA image is read by chainercv.utils.read_image, '
'but the `alpha` option is not set. Please set the option so that '
'the function knows how to handle RGBA images.'
)
elif alpha == 'ignore':
img = img[:, :, :3]
elif alpha == 'blend_with_white':
color_channel = img[:, :, :3]
alpha_channel = img[:, :, 3:] / 255
img = (color_channel * alpha_channel +
255 * np.ones_like(color_channel) * (1 - alpha_channel))
elif alpha == 'blend_with_black':
color_channel = img[:, :, :3]
alpha_channel = img[:, :, 3:] / 255
img = color_channel * alpha_channel
return img
def _read_image_cv2(file, dtype, color, alpha):
if color:
if alpha is None:
color_option = cv2.IMREAD_COLOR
else:
# Images with alpha channel are read as (H, W, 4) by cv2.imread.
# Images without alpha channel are read as (H, W, 3).
color_option = cv2.IMREAD_UNCHANGED
else:
color_option = cv2.IMREAD_GRAYSCALE
if hasattr(file, 'read'):
b = np.array(bytearray(file.read()))
img = cv2.imdecode(b, color_option)
else:
img = cv2.imread(file, color_option)
if img.ndim == 2:
# reshape (H, W) -> (1, H, W)
return img[np.newaxis].astype(dtype)
else:
# alpha channel is included
if img.shape[-1] == 4:
img = _handle_four_channel_image(img, alpha)
img = img[:, :, ::-1] # BGR -> RGB
img = img.transpose((2, 0, 1)) # HWC -> CHW
return img.astype(dtype)
def _read_image_pil(file, dtype, color, alpha):
f = Image.open(file)
try:
if color:
if f.mode == 'RGBA':
img = f.convert('RGBA')
else:
img = f.convert('RGB')
else:
img = f.convert('L')
img = np.array(img, dtype=dtype)
if img.shape[-1] == 4:
img = _handle_four_channel_image(
img, alpha).astype(dtype, copy=False)
finally:
if hasattr(f, 'close'):
f.close()
if img.ndim == 2:
# reshape (H, W) -> (1, H, W)
return img[np.newaxis]
else:
# transpose (H, W, C) -> (C, H, W)
return img.transpose((2, 0, 1))
def read_image(file, dtype=np.float32, color=True, alpha=None):
"""Read an image from a file.
This function reads an image from given file. The image is CHW format and
the range of its value is :math:`[0, 255]`. If :obj:`color = True`, the
order of the channels is RGB.
The backend used by :func:`read_image` is configured by
:obj:`chainer.global_config.cv_read_image_backend`.
Two backends are supported: "cv2" and "PIL".
If this is :obj:`None`, "cv2" is used whenever "cv2" is installed,
and "PIL" is used when "cv2" is not installed.
Args:
file (string or file-like object): A path of image file or
a file-like object of image.
dtype: The type of array. The default value is :obj:`~numpy.float32`.
color (bool): This option determines the number of channels.
If :obj:`True`, the number of channels is three. In this case,
the order of the channels is RGB. This is the default behaviour.
If :obj:`False`, this function returns a grayscale image.
alpha (None or {'ignore', 'blend_with_white', 'blend_with_black'}): \
Choose how RGBA images are handled. By default, an error is raised.
Here are the other possible behaviors:
* `'ignore'`: Ignore alpha channel.
* `'blend_with_white'`: Blend RGB image multiplied by alpha on \
a white image.
* `'blend_with_black'`: Blend RGB image multiplied by alpha on \
a black image.
Returns:
~numpy.ndarray: An image.
"""
if chainer.config.cv_read_image_backend is None:
if _cv2_available:
return _read_image_cv2(file, dtype, color, alpha)
else:
return _read_image_pil(file, dtype, color, alpha)
elif chainer.config.cv_read_image_backend == 'cv2':
if not _cv2_available:
raise ValueError('cv2 is not installed even though '
'chainer.config.cv_read_image_backend == \'cv2\'')
return _read_image_cv2(file, dtype, color, alpha)
elif chainer.config.cv_read_image_backend == 'PIL':
return _read_image_pil(file, dtype, color, alpha)
else:
raise ValueError('chainer.config.cv_read_image_backend should be '
'either "cv2" or "PIL".')
|
from __future__ import absolute_import
from __future__ import unicode_literals
import textwrap
import unittest
import warnings
import fs
import requests
from instalooter.cli import main
from instalooter.batch import BatchRunner
from instalooter.looters import InstaLooter
try:
CONNECTION_FAILURE = not requests.get("https://instagr.am/instagram").ok
except requests.exceptions.ConnectionError:
CONNECTION_FAILURE = True
class TestBatchRunner(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.session = requests.Session()
@classmethod
def tearDownClass(cls):
cls.session.close()
def setUp(self):
self.destfs = fs.open_fs("temp://")
self.tmpdir = self.destfs.getsyspath("/")
def tearDown(self):
self.destfs.close()
@unittest.skipIf(CONNECTION_FAILURE, "cannot connect to Instagram")
def test_cli(self):
cfg = textwrap.dedent(
"""
[my job]
num-to-dl = 3
quiet = true
users:
therock: {self.tmpdir}
nintendo: {self.tmpdir}
"""
).format(self=self)
with self.destfs.open('batch.ini', 'w') as batch_file:
batch_file.write(cfg)
retcode = main(["batch", self.destfs.getsyspath('batch.ini')])
self.assertEqual(retcode, 0)
self.assertGreaterEqual(
len(list(self.destfs.filterdir("/", ["*.jpg"]))), 6)
def setUpModule():
warnings.simplefilter('ignore')
def tearDownModule():
warnings.simplefilter(warnings.defaultaction)
|
import numpy as np
import pytest
import xarray as xr
from xarray.core.npcompat import IS_NEP18_ACTIVE
from . import has_dask
try:
from dask.array import from_array as dask_from_array
except ImportError:
dask_from_array = lambda x: x
try:
import pint
unit_registry = pint.UnitRegistry(force_ndarray_like=True)
def quantity(x):
return unit_registry.Quantity(x, "m")
has_pint = True
except ImportError:
def quantity(x):
return x
has_pint = False
def test_allclose_regression():
x = xr.DataArray(1.01)
y = xr.DataArray(1.02)
xr.testing.assert_allclose(x, y, atol=0.01)
@pytest.mark.parametrize(
"obj1,obj2",
(
pytest.param(
xr.Variable("x", [1e-17, 2]), xr.Variable("x", [0, 3]), id="Variable"
),
pytest.param(
xr.DataArray([1e-17, 2], dims="x"),
xr.DataArray([0, 3], dims="x"),
id="DataArray",
),
pytest.param(
xr.Dataset({"a": ("x", [1e-17, 2]), "b": ("y", [-2e-18, 2])}),
xr.Dataset({"a": ("x", [0, 2]), "b": ("y", [0, 1])}),
id="Dataset",
),
),
)
def test_assert_allclose(obj1, obj2):
with pytest.raises(AssertionError):
xr.testing.assert_allclose(obj1, obj2)
@pytest.mark.filterwarnings("error")
@pytest.mark.parametrize(
"duckarray",
(
pytest.param(np.array, id="numpy"),
pytest.param(
dask_from_array,
id="dask",
marks=pytest.mark.skipif(not has_dask, reason="requires dask"),
),
pytest.param(
quantity,
id="pint",
marks=pytest.mark.skipif(not has_pint, reason="requires pint"),
),
),
)
@pytest.mark.parametrize(
["obj1", "obj2"],
(
pytest.param([1e-10, 2], [0.0, 2.0], id="both arrays"),
pytest.param([1e-17, 2], 0.0, id="second scalar"),
pytest.param(0.0, [1e-17, 2], id="first scalar"),
),
)
def test_assert_duckarray_equal_failing(duckarray, obj1, obj2):
# TODO: actually check the repr
a = duckarray(obj1)
b = duckarray(obj2)
with pytest.raises(AssertionError):
xr.testing.assert_duckarray_equal(a, b)
@pytest.mark.filterwarnings("error")
@pytest.mark.parametrize(
"duckarray",
(
pytest.param(
np.array,
id="numpy",
marks=pytest.mark.skipif(
not IS_NEP18_ACTIVE,
reason="NUMPY_EXPERIMENTAL_ARRAY_FUNCTION is not enabled",
),
),
pytest.param(
dask_from_array,
id="dask",
marks=pytest.mark.skipif(not has_dask, reason="requires dask"),
),
pytest.param(
quantity,
id="pint",
marks=pytest.mark.skipif(not has_pint, reason="requires pint"),
),
),
)
@pytest.mark.parametrize(
["obj1", "obj2"],
(
pytest.param([0, 2], [0.0, 2.0], id="both arrays"),
pytest.param([0, 0], 0.0, id="second scalar"),
pytest.param(0.0, [0, 0], id="first scalar"),
),
)
def test_assert_duckarray_equal(duckarray, obj1, obj2):
a = duckarray(obj1)
b = duckarray(obj2)
xr.testing.assert_duckarray_equal(a, b)
|
import voluptuous as vol
from homeassistant.components.light import LightEntity
from . import DOMAIN, PLATFORM_SCHEMA, XBeeDigitalOut, XBeeDigitalOutConfig
CONF_ON_STATE = "on_state"
DEFAULT_ON_STATE = "high"
STATES = ["high", "low"]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Optional(CONF_ON_STATE, default=DEFAULT_ON_STATE): vol.In(STATES)}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Create and add an entity based on the configuration."""
zigbee_device = hass.data[DOMAIN]
add_entities([XBeeLight(XBeeDigitalOutConfig(config), zigbee_device)])
class XBeeLight(XBeeDigitalOut, LightEntity):
"""Use XBeeDigitalOut as light."""
|
def get_project_stats(project):
"""Return stats for project."""
return [
{
"language": str(tup.language),
"code": tup.language.code,
"total": tup.all,
"translated": tup.translated,
"translated_percent": tup.translated_percent,
"total_words": tup.all_words,
"translated_words": tup.translated_words,
"translated_words_percent": tup.translated_words_percent,
"total_chars": tup.all_chars,
"translated_chars": tup.translated_chars,
"translated_chars_percent": tup.translated_chars_percent,
}
for tup in project.stats.get_language_stats()
]
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from diamond.collector import Collector
from nginx import NginxCollector
##########################################################################
class TestNginxCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('NginxCollector', {})
self.collector = NginxCollector(config, None)
def test_import(self):
self.assertTrue(NginxCollector)
@patch.object(Collector, 'publish')
@patch.object(Collector, 'publish_gauge')
@patch.object(Collector, 'publish_counter')
def test_should_work_with_real_data(self, publish_counter_mock,
publish_gauge_mock, publish_mock):
mockMimeMessage = Mock(**{'gettype.return_value': 'text/html'})
mockResponse = Mock(**{
'readlines.return_value': self.getFixture('status').readlines(),
'info.return_value': mockMimeMessage,
}
)
patch_urlopen = patch('urllib2.urlopen', Mock(
return_value=mockResponse))
patch_urlopen.start()
self.collector.collect()
patch_urlopen.stop()
metrics = {
'active_connections': 3,
'conn_accepted': 396396,
'conn_handled': 396396,
'req_handled': 396396,
'act_reads': 2,
'act_writes': 1,
'act_waits': 0,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany([publish_mock,
publish_gauge_mock,
publish_counter_mock
], metrics)
@patch.object(Collector, 'publish')
@patch.object(Collector, 'publish_gauge')
@patch.object(Collector, 'publish_counter')
def test_plus_should_work_with_real_data(self, publish_counter_mock,
publish_gauge_mock, publish_mock):
mockMimeMessage = Mock(**{'gettype.return_value': 'application/json'})
mockResponse = Mock(**{
'readlines.return_value':
self.getFixture('plus_status').readlines(),
'info.return_value': mockMimeMessage,
'read.return_value': self.getFixture('plus_status').read(),
}
)
patch_urlopen = patch('urllib2.urlopen', Mock(
return_value=mockResponse))
patch_urlopen.start()
self.collector.collect()
patch_urlopen.stop()
metrics = {
'conn.active': 11,
'conn.accepted': 25512010,
'conn.dropped': 0,
'conn.idle': 30225,
'req.current': 11,
'req.total': 1061989107,
'ssl.handshakes': 0,
'ssl.session_reuses': 0,
'ssl.handshakes_failed': 0,
'servers.www.processing': 10,
'servers.www.received': 1869206012545,
'servers.www.discarded': 2433140,
'servers.www.requests': 1061980757,
'servers.www.sent': 169151943651,
'servers.app_com.processing': 3,
'servers.app_com.received': 100,
'servers.app_com.discarded': 5,
'servers.app_com.requests': 25,
'servers.app_com.sent': 293752,
'servers.www.responses.1xx': 0,
'servers.www.responses.2xx': 1058969631,
'servers.www.responses.3xx': 363,
'servers.www.responses.4xx': 396193,
'servers.www.responses.5xx': 181420,
'servers.www.responses.total': 1059547607,
'servers.app_com.responses.1xx': 0,
'servers.app_com.responses.2xx': 100,
'servers.app_com.responses.3xx': 3,
'servers.app_com.responses.4xx': 4,
'servers.app_com.responses.5xx': 0,
'servers.app_com.responses.total': 107,
'upstreams.www-upstream.keepalive': 225,
'upstreams.www-upstream.peers.1_1_1_94-8080.active': 1,
'upstreams.www-upstream.peers.1_1_1_94-8080.downtime': 0,
'upstreams.www-upstream.peers.1_1_1_94-8080.fails': 1534,
'upstreams.www-upstream.peers.1_1_1_94-8080.max_conns': 540,
'upstreams.www-upstream.peers.1_1_1_94-8080.received': 1301376667,
'upstreams.www-upstream.peers.1_1_1_94-8080.requests': 106379240,
'upstreams.www-upstream.peers.1_1_1_94-8080.sent': 188216479779,
'upstreams.www-upstream.peers.1_1_1_94-8080.unavail': 0,
'upstreams.www-upstream.peers.1_1_1_94-8080.responses.1xx': 0,
'upstreams.www-upstream.peers.1_1_1_94-8080.responses.2xx':
106277550,
'upstreams.www-upstream.peers.1_1_1_94-8080.responses.3xx': 33,
'upstreams.www-upstream.peers.1_1_1_94-8080.responses.4xx': 39694,
'upstreams.www-upstream.peers.1_1_1_94-8080.responses.5xx': 0,
'upstreams.www-upstream.peers.1_1_1_94-8080.responses.total':
106317277,
'upstreams.app_upstream.keepalive': 0,
'upstreams.app_upstream.peers.1_2_5_3-8080.active': 0,
'upstreams.app_upstream.peers.1_2_5_3-8080.downtime': 0,
'upstreams.app_upstream.peers.1_2_5_3-8080.fails': 0,
'upstreams.app_upstream.peers.1_2_5_3-8080.received': 792,
'upstreams.app_upstream.peers.1_2_5_3-8080.requests': 4,
'upstreams.app_upstream.peers.1_2_5_3-8080.sent': 571,
'upstreams.app_upstream.peers.1_2_5_3-8080.unavail': 0,
'upstreams.app_upstream.peers.1_2_5_8-8080.responses.1xx': 0,
'upstreams.app_upstream.peers.1_2_5_8-8080.responses.2xx': 2,
'upstreams.app_upstream.peers.1_2_5_8-8080.responses.3xx': 0,
'upstreams.app_upstream.peers.1_2_5_8-8080.responses.4xx': 1,
'upstreams.app_upstream.peers.1_2_5_8-8080.responses.5xx': 0,
'upstreams.app_upstream.peers.1_2_5_8-8080.responses.total': 3,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany([publish_mock,
publish_gauge_mock,
publish_counter_mock
], metrics)
@patch.object(Collector, 'publish')
def test_should_fail_gracefully(self, publish_mock):
mockMimeMessage = Mock(**{'gettype.return_value': 'text/html'})
mockResponse = Mock(**{
'readlines.return_value':
self.getFixture('status_blank').readlines(),
'info.return_value': mockMimeMessage,
}
)
patch_urlopen = patch('urllib2.urlopen', Mock(
return_value=mockResponse))
patch_urlopen.start()
self.collector.collect()
patch_urlopen.stop()
self.assertPublishedMany(publish_mock, {})
##########################################################################
if __name__ == "__main__":
unittest.main()
|
import numpy as np
from .utils import logger, verbose, _check_option
def _log_rescale(baseline, mode='mean'):
"""Log the rescaling method."""
if baseline is not None:
_check_option('mode', mode, ['logratio', 'ratio', 'zscore', 'mean',
'percent', 'zlogratio'])
msg = 'Applying baseline correction (mode: %s)' % mode
else:
msg = 'No baseline correction applied'
return msg
@verbose
def rescale(data, times, baseline, mode='mean', copy=True, picks=None,
verbose=None):
"""Rescale (baseline correct) data.
Parameters
----------
data : array
It can be of any shape. The only constraint is that the last
dimension should be time.
times : 1D array
Time instants is seconds.
%(rescale_baseline)s
mode : 'mean' | 'ratio' | 'logratio' | 'percent' | 'zscore' | 'zlogratio'
Perform baseline correction by
- subtracting the mean of baseline values ('mean')
- dividing by the mean of baseline values ('ratio')
- dividing by the mean of baseline values and taking the log
('logratio')
- subtracting the mean of baseline values followed by dividing by
the mean of baseline values ('percent')
- subtracting the mean of baseline values and dividing by the
standard deviation of baseline values ('zscore')
- dividing by the mean of baseline values, taking the log, and
dividing by the standard deviation of log baseline values
('zlogratio')
copy : bool
Whether to return a new instance or modify in place.
picks : list of int | None
Data to process along the axis=-2 (None, default, processes all).
%(verbose)s
Returns
-------
data_scaled: array
Array of same shape as data after rescaling.
"""
data = data.copy() if copy else data
msg = _log_rescale(baseline, mode)
logger.info(msg)
if baseline is None or data.shape[-1] == 0:
return data
bmin, bmax = baseline
if bmin is None:
imin = 0
else:
imin = np.where(times >= bmin)[0]
if len(imin) == 0:
raise ValueError('bmin is too large (%s), it exceeds the largest '
'time value' % (bmin,))
imin = int(imin[0])
if bmax is None:
imax = len(times)
else:
imax = np.where(times <= bmax)[0]
if len(imax) == 0:
raise ValueError('bmax is too small (%s), it is smaller than the '
'smallest time value' % (bmax,))
imax = int(imax[-1]) + 1
if imin >= imax:
raise ValueError('Bad rescaling slice (%s:%s) from time values %s, %s'
% (imin, imax, bmin, bmax))
# technically this is inefficient when `picks` is given, but assuming
# that we generally pick most channels for rescaling, it's not so bad
mean = np.mean(data[..., imin:imax], axis=-1, keepdims=True)
if mode == 'mean':
def fun(d, m):
d -= m
elif mode == 'ratio':
def fun(d, m):
d /= m
elif mode == 'logratio':
def fun(d, m):
d /= m
np.log10(d, out=d)
elif mode == 'percent':
def fun(d, m):
d -= m
d /= m
elif mode == 'zscore':
def fun(d, m):
d -= m
d /= np.std(d[..., imin:imax], axis=-1, keepdims=True)
elif mode == 'zlogratio':
def fun(d, m):
d /= m
np.log10(d, out=d)
d /= np.std(d[..., imin:imax], axis=-1, keepdims=True)
if picks is None:
fun(data, mean)
else:
for pi in picks:
fun(data[..., pi, :], mean[..., pi, :])
return data
|
import re # noqa: F401
import sys # noqa: F401
from paasta_tools.paastaapi.api_client import ApiClient, Endpoint
from paasta_tools.paastaapi.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from paasta_tools.paastaapi.model.resource import Resource
class ResourcesApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __resources(
self,
**kwargs
):
"""Get resources in the cluster # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.resources(async_req=True)
>>> result = thread.get()
Keyword Args:
groupings ([str]): comma separated list of keys to group by. [optional]
filter ([str]): List of slave filters in format 'filter=attr_name:value1,value2&filter=attr2:value3,value4'. Matches attr_name=(value1 OR value2) AND attr2=(value3 OR value4). [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Resource
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.resources = Endpoint(
settings={
'response_type': (Resource,),
'auth': [],
'endpoint_path': '/resources/utilization',
'operation_id': 'resources',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'groupings',
'filter',
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'groupings':
([str],),
'filter':
([str],),
},
'attribute_map': {
'groupings': 'groupings',
'filter': 'filter',
},
'location_map': {
'groupings': 'query',
'filter': 'query',
},
'collection_format_map': {
'groupings': 'csv',
'filter': 'multi',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__resources
)
|
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_MONITORED_CONDITIONS
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from . import DOMAIN as WIRELESSTAG_DOMAIN, SIGNAL_TAG_UPDATE, WirelessTagBaseSensor
_LOGGER = logging.getLogger(__name__)
SENSOR_TEMPERATURE = "temperature"
SENSOR_HUMIDITY = "humidity"
SENSOR_MOISTURE = "moisture"
SENSOR_LIGHT = "light"
SENSOR_TYPES = [SENSOR_TEMPERATURE, SENSOR_HUMIDITY, SENSOR_MOISTURE, SENSOR_LIGHT]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_MONITORED_CONDITIONS, default=[]): vol.All(
cv.ensure_list, [vol.In(SENSOR_TYPES)]
)
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the sensor platform."""
platform = hass.data.get(WIRELESSTAG_DOMAIN)
sensors = []
tags = platform.tags
for tag in tags.values():
for sensor_type in config.get(CONF_MONITORED_CONDITIONS):
if sensor_type in tag.allowed_sensor_types:
sensors.append(
WirelessTagSensor(platform, tag, sensor_type, hass.config)
)
add_entities(sensors, True)
class WirelessTagSensor(WirelessTagBaseSensor):
"""Representation of a Sensor."""
def __init__(self, api, tag, sensor_type, config):
"""Initialize a WirelessTag sensor."""
super().__init__(api, tag)
self._sensor_type = sensor_type
self._name = self._tag.name
# I want to see entity_id as:
# sensor.wirelesstag_bedroom_temperature
# and not as sensor.bedroom for temperature and
# sensor.bedroom_2 for humidity
self._entity_id = (
f"sensor.{WIRELESSTAG_DOMAIN}_{self.underscored_name}_{self._sensor_type}"
)
async def async_added_to_hass(self):
"""Register callbacks."""
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_TAG_UPDATE.format(self.tag_id, self.tag_manager_mac),
self._update_tag_info_callback,
)
)
@property
def entity_id(self):
"""Overridden version."""
return self._entity_id
@property
def underscored_name(self):
"""Provide name savvy to be used in entity_id name of self."""
return self.name.lower().replace(" ", "_")
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def device_class(self):
"""Return the class of the sensor."""
return self._sensor_type
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self._sensor.unit
@property
def principal_value(self):
"""Return sensor current value."""
return self._sensor.value
@property
def _sensor(self):
"""Return tag sensor entity."""
return self._tag.sensor[self._sensor_type]
@callback
def _update_tag_info_callback(self, event):
"""Handle push notification sent by tag manager."""
_LOGGER.debug("Entity to update state: %s event data: %s", self, event.data)
new_value = self._sensor.value_from_update_event(event.data)
self._state = self.decorate_value(new_value)
self.async_write_ha_state()
|
from django.contrib.auth.views import LoginView
class LoginMixin(object):
"""
Mixin implemeting a login view
configurated for Zinnia.
"""
def login(self):
"""
Return the login view.
"""
return LoginView.as_view(
template_name='zinnia/login.html'
)(self.request)
class PasswordMixin(object):
"""
Mixin implementing a password view
configurated for Zinnia.
"""
error = False
def password(self):
"""
Return the password view.
"""
return self.response_class(request=self.request,
template='zinnia/password.html',
context={'error': self.error})
class EntryProtectionMixin(LoginMixin, PasswordMixin):
"""
Mixin returning a login view if the current
entry need authentication and password view
if the entry is protected by a password.
"""
session_key = 'zinnia_entry_%s_password'
def get(self, request, *args, **kwargs):
"""
Do the login and password protection.
"""
response = super(EntryProtectionMixin, self).get(
request, *args, **kwargs)
if self.object.login_required and not request.user.is_authenticated:
return self.login()
if (self.object.password and self.object.password !=
self.request.session.get(self.session_key % self.object.pk)):
return self.password()
return response
def post(self, request, *args, **kwargs):
"""
Do the login and password protection.
"""
self.object = self.get_object()
self.login()
if self.object.password:
entry_password = self.request.POST.get('entry_password')
if entry_password:
if entry_password == self.object.password:
self.request.session[self.session_key %
self.object.pk] = self.object.password
return self.get(request, *args, **kwargs)
else:
self.error = True
return self.password()
return self.get(request, *args, **kwargs)
|
import logging
import logging.config
import sys
import os
class DebugFormatter(logging.Formatter):
def __init__(self, fmt=None):
if fmt is None:
fmt = ('%(created)s\t' +
'[%(processName)s:%(process)d:%(levelname)s]\t' +
'%(message)s')
self.fmt_default = fmt
self.fmt_prefix = fmt.replace('%(message)s', '')
logging.Formatter.__init__(self, fmt)
def format(self, record):
self._fmt = self.fmt_default
if record.levelno in [logging.ERROR, logging.CRITICAL]:
self._fmt = ''
self._fmt += self.fmt_prefix
self._fmt += '%(message)s'
self._fmt += '\n'
self._fmt += self.fmt_prefix
self._fmt += '%(pathname)s:%(lineno)d'
return logging.Formatter.format(self, record)
def setup_logging(configfile, stdout=False):
log = logging.getLogger('diamond')
try:
logging.config.fileConfig(configfile, disable_existing_loggers=False)
# if the stdout flag is set, we use the log level of the root logger
# for logging to stdout, and keep all loggers defined in the conf file
if stdout:
rootLogLevel = logging.getLogger().getEffectiveLevel()
log.setLevel(rootLogLevel)
streamHandler = logging.StreamHandler(sys.stdout)
streamHandler.setFormatter(DebugFormatter())
streamHandler.setLevel(rootLogLevel)
log.addHandler(streamHandler)
except Exception as e:
sys.stderr.write("Error occurs when initialize logging: ")
sys.stderr.write(str(e))
sys.stderr.write(os.linesep)
return log
|
import numpy as np
import pytest
import matplotlib.pyplot as plt
from mne.viz import plot_connectivity_circle, circular_layout
def test_plot_connectivity_circle():
"""Test plotting connectivity circle."""
node_order = ['frontalpole-lh', 'parsorbitalis-lh',
'lateralorbitofrontal-lh', 'rostralmiddlefrontal-lh',
'medialorbitofrontal-lh', 'parstriangularis-lh',
'rostralanteriorcingulate-lh', 'temporalpole-lh',
'parsopercularis-lh', 'caudalanteriorcingulate-lh',
'entorhinal-lh', 'superiorfrontal-lh', 'insula-lh',
'caudalmiddlefrontal-lh', 'superiortemporal-lh',
'parahippocampal-lh', 'middletemporal-lh',
'inferiortemporal-lh', 'precentral-lh',
'transversetemporal-lh', 'posteriorcingulate-lh',
'fusiform-lh', 'postcentral-lh', 'bankssts-lh',
'supramarginal-lh', 'isthmuscingulate-lh', 'paracentral-lh',
'lingual-lh', 'precuneus-lh', 'inferiorparietal-lh',
'superiorparietal-lh', 'pericalcarine-lh',
'lateraloccipital-lh', 'cuneus-lh', 'cuneus-rh',
'lateraloccipital-rh', 'pericalcarine-rh',
'superiorparietal-rh', 'inferiorparietal-rh', 'precuneus-rh',
'lingual-rh', 'paracentral-rh', 'isthmuscingulate-rh',
'supramarginal-rh', 'bankssts-rh', 'postcentral-rh',
'fusiform-rh', 'posteriorcingulate-rh',
'transversetemporal-rh', 'precentral-rh',
'inferiortemporal-rh', 'middletemporal-rh',
'parahippocampal-rh', 'superiortemporal-rh',
'caudalmiddlefrontal-rh', 'insula-rh', 'superiorfrontal-rh',
'entorhinal-rh', 'caudalanteriorcingulate-rh',
'parsopercularis-rh', 'temporalpole-rh',
'rostralanteriorcingulate-rh', 'parstriangularis-rh',
'medialorbitofrontal-rh', 'rostralmiddlefrontal-rh',
'lateralorbitofrontal-rh', 'parsorbitalis-rh',
'frontalpole-rh']
label_names = ['bankssts-lh', 'bankssts-rh', 'caudalanteriorcingulate-lh',
'caudalanteriorcingulate-rh', 'caudalmiddlefrontal-lh',
'caudalmiddlefrontal-rh', 'cuneus-lh', 'cuneus-rh',
'entorhinal-lh', 'entorhinal-rh', 'frontalpole-lh',
'frontalpole-rh', 'fusiform-lh', 'fusiform-rh',
'inferiorparietal-lh', 'inferiorparietal-rh',
'inferiortemporal-lh', 'inferiortemporal-rh', 'insula-lh',
'insula-rh', 'isthmuscingulate-lh', 'isthmuscingulate-rh',
'lateraloccipital-lh', 'lateraloccipital-rh',
'lateralorbitofrontal-lh', 'lateralorbitofrontal-rh',
'lingual-lh', 'lingual-rh', 'medialorbitofrontal-lh',
'medialorbitofrontal-rh', 'middletemporal-lh',
'middletemporal-rh', 'paracentral-lh', 'paracentral-rh',
'parahippocampal-lh', 'parahippocampal-rh',
'parsopercularis-lh', 'parsopercularis-rh',
'parsorbitalis-lh', 'parsorbitalis-rh',
'parstriangularis-lh', 'parstriangularis-rh',
'pericalcarine-lh', 'pericalcarine-rh', 'postcentral-lh',
'postcentral-rh', 'posteriorcingulate-lh',
'posteriorcingulate-rh', 'precentral-lh', 'precentral-rh',
'precuneus-lh', 'precuneus-rh',
'rostralanteriorcingulate-lh',
'rostralanteriorcingulate-rh', 'rostralmiddlefrontal-lh',
'rostralmiddlefrontal-rh', 'superiorfrontal-lh',
'superiorfrontal-rh', 'superiorparietal-lh',
'superiorparietal-rh', 'superiortemporal-lh',
'superiortemporal-rh', 'supramarginal-lh',
'supramarginal-rh', 'temporalpole-lh', 'temporalpole-rh',
'transversetemporal-lh', 'transversetemporal-rh']
group_boundaries = [0, len(label_names) / 2]
node_angles = circular_layout(label_names, node_order, start_pos=90,
group_boundaries=group_boundaries)
con = np.random.RandomState(0).randn(68, 68)
plot_connectivity_circle(con, label_names, n_lines=300,
node_angles=node_angles, title='test',
)
pytest.raises(ValueError, circular_layout, label_names, node_order,
group_boundaries=[-1])
pytest.raises(ValueError, circular_layout, label_names, node_order,
group_boundaries=[20, 0])
plt.close('all')
|
from typing import Optional
from homeassistant.components.binary_sensor import BinarySensorEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.helpers.typing import HomeAssistantType
from .const import (
ATTR_DEFAULT_ENABLED,
ATTR_DEVICE_CLASS,
ATTR_ICON,
ATTR_INVERTED,
ATTR_MEASUREMENT,
ATTR_NAME,
ATTR_SECTION,
BINARY_SENSOR_ENTITIES,
DOMAIN,
)
from .coordinator import ToonDataUpdateCoordinator
from .models import (
ToonBoilerDeviceEntity,
ToonBoilerModuleDeviceEntity,
ToonDisplayDeviceEntity,
ToonEntity,
)
async def async_setup_entry(
hass: HomeAssistantType, entry: ConfigEntry, async_add_entities
) -> None:
"""Set up a Toon binary sensor based on a config entry."""
coordinator = hass.data[DOMAIN][entry.entry_id]
sensors = [
ToonBoilerModuleBinarySensor(
coordinator, key="thermostat_info_boiler_connected_None"
),
ToonDisplayBinarySensor(coordinator, key="thermostat_program_overridden"),
]
if coordinator.data.thermostat.have_opentherm_boiler:
sensors.extend(
[
ToonBoilerBinarySensor(coordinator, key=key)
for key in [
"thermostat_info_ot_communication_error_0",
"thermostat_info_error_found_255",
"thermostat_info_burner_info_None",
"thermostat_info_burner_info_1",
"thermostat_info_burner_info_2",
"thermostat_info_burner_info_3",
]
]
)
async_add_entities(sensors, True)
class ToonBinarySensor(ToonEntity, BinarySensorEntity):
"""Defines an Toon binary sensor."""
def __init__(self, coordinator: ToonDataUpdateCoordinator, *, key: str) -> None:
"""Initialize the Toon sensor."""
self.key = key
super().__init__(
coordinator,
enabled_default=BINARY_SENSOR_ENTITIES[key][ATTR_DEFAULT_ENABLED],
icon=BINARY_SENSOR_ENTITIES[key][ATTR_ICON],
name=BINARY_SENSOR_ENTITIES[key][ATTR_NAME],
)
@property
def unique_id(self) -> str:
"""Return the unique ID for this binary sensor."""
agreement_id = self.coordinator.data.agreement.agreement_id
# This unique ID is a bit ugly and contains unneeded information.
# It is here for legacy / backward compatible reasons.
return f"{DOMAIN}_{agreement_id}_binary_sensor_{self.key}"
@property
def device_class(self) -> str:
"""Return the device class."""
return BINARY_SENSOR_ENTITIES[self.key][ATTR_DEVICE_CLASS]
@property
def is_on(self) -> Optional[bool]:
"""Return the status of the binary sensor."""
section = getattr(
self.coordinator.data, BINARY_SENSOR_ENTITIES[self.key][ATTR_SECTION]
)
value = getattr(section, BINARY_SENSOR_ENTITIES[self.key][ATTR_MEASUREMENT])
if value is None:
return None
if BINARY_SENSOR_ENTITIES[self.key][ATTR_INVERTED]:
return not value
return value
class ToonBoilerBinarySensor(ToonBinarySensor, ToonBoilerDeviceEntity):
"""Defines a Boiler binary sensor."""
class ToonDisplayBinarySensor(ToonBinarySensor, ToonDisplayDeviceEntity):
"""Defines a Toon Display binary sensor."""
class ToonBoilerModuleBinarySensor(ToonBinarySensor, ToonBoilerModuleDeviceEntity):
"""Defines a Boiler module binary sensor."""
|
from __future__ import unicode_literals
import os
import time
import string
import locale
import traceback
from functools import reduce
from lib.fun.color import Colored
from lib.fun.osjudger import py_ver_egt_3
from lib.data.data import pystrs, paths, pyoptions
cool = Colored()
# order preserving
def unique(seq, idfun=None):
if idfun is None:
def idfun(x): return x
seen = {}
results = []
for item in seq:
marker = idfun(item)
if marker in seen:
continue
seen[marker] = 1
results.append(item)
return results
def rreplace(self, old, new, *max):
count = len(self)
if max and str(max[0]).isdigit():
count = max[0]
return new.join(self.rsplit(old, count))
def charanger(confstr):
ranges = []
for i in range(len(confstr.split(pyoptions.chars_split))):
if os.path.isfile(confstr.split(pyoptions.chars_split)[i]):
with open(confstr.split(pyoptions.chars_split)[i], 'r') as f:
for line in f.readlines():
ranges.append(line.strip())
elif pyoptions.char_range_split in confstr.split(pyoptions.chars_split)[i] and \
len(confstr.split(pyoptions.chars_split)[i].split(pyoptions.char_range_split)) == 2:
start = confstr.split(pyoptions.chars_split)[i].split(pyoptions.char_range_split)[0]
end = confstr.split(pyoptions.chars_split)[i].split(pyoptions.char_range_split)[1]
for c in string.printable:
if start <= c <= end:
ranges.append(c.strip())
else:
ranges.append(str(confstr.split(pyoptions.chars_split)[i]).strip())
return ranges
def walks_all_files(directory):
contents = []
for _ in get_subdir_files_path(directory):
with open(_, 'r') as f:
for line in f.readlines():
if line.strip() != '' and line.strip()[0] != pyoptions.annotator:
contents.append(line.strip())
return unique(contents)
def walk_pure_file(filepath, pure=True):
results = []
with open(filepath, 'r') as f:
for line in f.readlines():
if pure:
if line.strip() != '':
results.append(line.strip())
else:
if line.strip() != '' and line.strip()[0] != pyoptions.annotator:
results.append(line.strip())
return unique(results)
def get_subdir_files_path(directory, only_file_path=True):
dirpaths = []
filepaths = []
for rootpath, subdirsname, filenames in os.walk(directory):
dirpaths.extend([os.path.abspath(os.path.join(rootpath, _)) for _ in subdirsname])
filepaths.extend([os.path.abspath(os.path.join(rootpath, _)) for _ in filenames])
if only_file_path:
return filepaths
else:
return dirpaths, filepaths
def range_compatible(minlength, maxlength_plus_one):
if py_ver_egt_3():
return range(minlength, maxlength_plus_one)
else:
return xrange(minlength, maxlength_plus_one)
def lengthchecker(minlen, maxlen, sedb=False):
if str(minlen).isdigit() and str(maxlen).isdigit():
if int(minlen) <= int(maxlen):
if int(maxlen) > pyoptions.maxlen_switcher:
if not sedb:
exit(pyoptions.CRLF + cool.red("[-] Ensure maxlen <= %s" % pyoptions.maxlen_switcher))
else:
print(cool.fuchsia("[!] Ensure maxlen <= %s%s"
"[!] Modify /lib/data/data.py maxlen_switcher to adjust it" %
(pyoptions.maxlen_switcher, pyoptions.CRLF) + pyoptions.CRLF))
return False
else:
return True
else:
if not sedb:
exit(pyoptions.CRLF + cool.red("[-] Ensure minlen <= maxlen"))
else:
print(cool.fuchsia("[!] Ensure minlen <= maxlen") + pyoptions.CRLF)
return False
else:
if not sedb:
exit(pyoptions.CRLF + cool.red("[-] Make sure minlen and maxlen is digit"))
else:
print(cool.fuchsia("[!] Make sure minlen and maxlen is digit") + pyoptions.CRLF)
return False
def countchecker(charslength, *args):
count_check = 0
exit_msg = pyoptions.CRLF + cool.fuchsia("[!] Build items more than pyoptions.count_switcher: %s%s"
"[!] Modify /lib/data/data.py count_switcher to adjust it" %
(str(pyoptions.count_switcher), pyoptions.CRLF))
# chunk
if len(args) == 0:
if reduce(lambda a, b: a*b, range_compatible(1, charslength + 1)) > pyoptions.count_switcher:
exit(exit_msg)
# conf
elif len(args) == 1 and charslength == -1:
if args[0] > pyoptions.count_switcher:
exit(exit_msg)
# conf
elif len(args) == 2 and charslength == -1:
if args[0] * args[1] > pyoptions.count_switcher:
exit(exit_msg)
# base
elif len(args) == 2 and charslength != -1:
for _ in range_compatible(args[0], args[1] + 1):
count_check += pow(charslength, _)
if count_check > pyoptions.count_switcher:
exit(exit_msg)
# conf
elif len(args) >= 3 and charslength == -1:
allitems = 1
for x in range_compatible(0, len(args)):
allitems *= args[x]
if allitems > pyoptions.count_switcher:
exit(exit_msg)
def fun_name(isfun=False):
stack = traceback.extract_stack()
script_path, code_line, func_name, text = stack[-2]
script_name = os.path.split(script_path)[1][:-3]
if isfun:
return func_name
else:
return script_name
def is_en():
return "en" in locale.getdefaultlocale()[0].lower()
def mybuildtime():
return str(time.strftime("%H%M%S", time.localtime(time.time())))
def finishcounter(storepath):
line_count = 0
with open(storepath, 'r') as files:
for _ in files:
line_count += 1
return line_count
def finishprinter(storepath):
count = finishcounter(storepath)
print("[+] A total of :{0:} lines{1}"
"[+] Store in :{2} {1}"
"[+] Cost :{3} seconds".format(cool.orange(count), pyoptions.CRLF, cool.orange(storepath),
cool.orange(str(time.time() - pystrs.startime)[:6])))
def finalsavepath(prefix):
directory = paths.results_path
timestamp = mybuildtime()
ext = pyoptions.filextension
customname = paths.results_file_name
filename = "%s_%s%s" % (prefix.lower(), timestamp, ext)
filename = filename if not customname else customname
storepath = os.path.join(directory, filename)
return storepath
|
import bisect
import logging
import re
from collections import namedtuple
from datetime import datetime as dt, date, time, timedelta
from timeit import itertools
import pandas as pd
import pymongo
from ..date import mktz, DateRange, OPEN_OPEN, CLOSED_CLOSED, to_dt
from ..decorators import mongo_retry
from ..exceptions import (NoDataFoundException, UnhandledDtypeException, OverlappingDataException,
LibraryNotFoundException)
logger = logging.getLogger(__name__)
TickStoreLibrary = namedtuple("TickStoreLibrary", ["library", "date_range"])
TICK_STORE_TYPE = 'TopLevelTickStore'
PATTERN = r"^%s_\d{4}.%s"
YEAR_REGEX = re.compile(r"\d{4}")
end_time_min = (dt.combine(date.today(), time.min) - timedelta(milliseconds=1)).time()
class DictList(object):
def __init__(self, lst, key):
self.lst = lst
self.key = key
def __len__(self):
return len(self.lst)
def __getitem__(self, idx):
return self.lst[idx][self.key]
class TopLevelTickStore(object):
@classmethod
def initialize_library(cls, arctic_lib, **kwargs):
tl = TopLevelTickStore(arctic_lib)
tl._add_libraries()
tl._ensure_index()
def _ensure_index(self):
collection = self._collection
collection.create_index([('start', pymongo.ASCENDING)], background=True)
def _add_libraries(self):
name = self.get_name()
db_name, tick_type = name.split('.', 1)
regex = re.compile(PATTERN % (db_name, tick_type))
libraries = [lib for lib in self._arctic_lib.arctic.list_libraries() if regex.search(lib)]
for lib in libraries:
year = int(YEAR_REGEX.search(lib).group())
date_range = DateRange(dt(year, 1, 1), dt(year + 1, 1, 1) - timedelta(milliseconds=1))
self.add(date_range, lib)
def __init__(self, arctic_lib):
self._arctic_lib = arctic_lib
self._reset()
@mongo_retry
def _reset(self):
# The default collections
self._collection = self._arctic_lib.get_top_level_collection()
def add(self, date_range, library_name):
"""
Adds the library with the given date range to the underlying collection of libraries used by this store.
The underlying libraries should not overlap as the date ranges are assumed to be CLOSED_CLOSED by this function
and the rest of the class.
Arguments:
date_range: A date range provided on the assumption that it is CLOSED_CLOSED. If for example the underlying
libraries were split by year, the start of the date range would be datetime.datetime(year, 1, 1) and the end
would be datetime.datetime(year, 12, 31, 23, 59, 59, 999000). The date range must fall on UTC day boundaries,
that is the start must be add midnight and the end must be 1 millisecond before midnight.
library_name: The name of the underlying library. This must be the name of a valid Arctic library
"""
# check that the library is valid
try:
self._arctic_lib.arctic[library_name]
except Exception as e:
logger.error("Could not load library")
raise e
assert date_range.start and date_range.end, "Date range should have start and end properties {}".format(date_range)
start = date_range.start.astimezone(mktz('UTC')) if date_range.start.tzinfo is not None else date_range.start.replace(tzinfo=mktz('UTC'))
end = date_range.end.astimezone(mktz('UTC')) if date_range.end.tzinfo is not None else date_range.end.replace(tzinfo=mktz('UTC'))
assert start.time() == time.min and end.time() == end_time_min, "Date range should fall on UTC day boundaries {}".format(date_range)
# check that the date range does not overlap
library_metadata = self._get_library_metadata(date_range)
if len(library_metadata) > 1 or (len(library_metadata) == 1 and library_metadata[0] != library_name):
raise OverlappingDataException("""There are libraries that overlap with the date range:
library: {}
overlapping libraries: {}""".format(library_name, [lib.library for lib in library_metadata]))
self._collection.update_one({'library_name': library_name},
{'$set': {'start': start, 'end': end}}, upsert=True)
def read(self, symbol, date_range, columns=None, include_images=False):
libraries = self._get_libraries(date_range)
dfs = []
for lib in libraries:
try:
df = lib.library.read(symbol, lib.date_range.intersection(date_range), columns,
include_images=include_images)
dfs.append(df)
except NoDataFoundException as e:
continue
if len(dfs) == 0:
raise NoDataFoundException("No Data found for {} in range: {}".format(symbol, date_range))
return pd.concat(dfs)
def write(self, symbol, data):
"""
Split the tick data to the underlying collections and write the data to each low
level library.
Args:
symbol (str): the symbol for the timeseries data
data (list of dicts or pandas dataframe): Tick data to write
if a list of dicts is given the list must be in time order and the time must be stored in
an element named 'index' the value of which must be a timezone aware datetime.
For a pandas dataframe the index must be a datetime
"""
# get the full set of date ranges that we have
cursor = self._collection.find()
for res in cursor:
library = self._arctic_lib.arctic[res['library_name']]
dslice = self._slice(data, to_dt(res['start'], mktz('UTC')), to_dt(res['end'], mktz('UTC')))
if len(dslice) != 0:
library.write(symbol, dslice)
def list_symbols(self, date_range):
libraries = self._get_libraries(date_range)
return sorted(list(set(itertools.chain(*[lib.library.list_symbols() for lib in libraries]))))
def get_name(self):
name = self._arctic_lib.get_name()
if name.startswith(self._arctic_lib.DB_PREFIX + '_'):
name = name[len(self._arctic_lib.DB_PREFIX) + 1:]
return name
def _get_libraries(self, date_range):
libraries = self._get_library_metadata(date_range)
rtn = [TickStoreLibrary(self._arctic_lib.arctic[library.library], library.date_range)
for library in libraries]
if rtn:
current_start = rtn[-1].date_range.end
elif date_range.end.tzinfo:
current_start = dt(1970, 1, 1, 0, 0, tzinfo=date_range.end.tzinfo)
else:
current_start = dt(1970, 1, 1, 0, 0)
if date_range.end is None or current_start < date_range.end:
name = self.get_name()
db_name, tick_type = name.split('.', 1)
current_lib = "{}_current.{}".format(db_name, tick_type)
try:
rtn.append(TickStoreLibrary(self._arctic_lib.arctic[current_lib],
DateRange(current_start, None, OPEN_OPEN)))
except LibraryNotFoundException:
pass # No '_current', move on.
if not rtn:
raise NoDataFoundException("No underlying libraries exist for the given date range")
return rtn
def _slice(self, data, start, end):
if isinstance(data, list):
dictlist = DictList(data, 'index')
slice_start = bisect.bisect_left(dictlist, to_dt(start, mktz('UTC')))
slice_end = bisect.bisect_right(dictlist, to_dt(end, mktz('UTC')))
return data[slice_start:slice_end]
elif isinstance(data, pd.DataFrame):
return data[start:end]
else:
raise UnhandledDtypeException("Can't persist type %s to tickstore" % type(data))
def _get_library_metadata(self, date_range):
"""
Retrieve the libraries for the given date range, the assumption is that the date ranges do not overlap and
they are CLOSED_CLOSED.
At the moment the date range is mandatory
"""
if date_range is None:
raise Exception("A date range must be provided")
if not (date_range.start and date_range.end):
raise Exception("The date range {0} must contain a start and end date".format(date_range))
start = date_range.start if date_range.start.tzinfo is not None else date_range.start.replace(tzinfo=mktz())
end = date_range.end if date_range.end.tzinfo is not None else date_range.end.replace(tzinfo=mktz())
query = {'$or': [{'start': {'$lte': start}, 'end': {'$gte': start}},
{'start': {'$gte': start}, 'end': {'$lte': end}},
{'start': {'$lte': end}, 'end': {'$gte': end}}]}
cursor = self._collection.find(query,
projection={'library_name': 1, 'start': 1, 'end': 1},
sort=[('start', pymongo.ASCENDING)])
results = []
for res in cursor:
start = res['start']
if date_range.start.tzinfo is not None and start.tzinfo is None:
start = start.replace(tzinfo=mktz("UTC")).astimezone(tz=date_range.start.tzinfo)
end = res['end']
if date_range.end.tzinfo is not None and end.tzinfo is None:
end = end.replace(tzinfo=mktz("UTC")).astimezone(tz=date_range.end.tzinfo)
results.append(TickStoreLibrary(res['library_name'], DateRange(start, end, CLOSED_CLOSED)))
return results
|
import functools
import hashlib
import logging
import pickle
import numpy as np
import pandas as pd
import pymongo
import six
from bson import Binary
from pandas.compat import pickle_compat
from pymongo.errors import OperationFailure
from arctic._config import FW_POINTERS_REFS_KEY, FW_POINTERS_CONFIG_KEY, FwPointersCfg
from arctic._util import mongo_count, get_fwptr_config
def _split_arrs(array_2d, slices):
"""
Equivalent to numpy.split(array_2d, slices),
but avoids fancy indexing
"""
if len(array_2d) == 0:
return np.empty(0, dtype=np.object)
rtn = np.empty(len(slices) + 1, dtype=np.object)
start = 0
for i, s in enumerate(slices):
rtn[i] = array_2d[start:s]
start = s
rtn[-1] = array_2d[start:]
return rtn
def checksum(symbol, doc):
"""
Checksum the passed in dictionary
"""
sha = hashlib.sha1()
sha.update(symbol.encode('ascii'))
for k in sorted(iter(doc.keys()), reverse=True):
v = doc[k]
if isinstance(v, six.binary_type):
sha.update(doc[k])
else:
sha.update(str(doc[k]).encode('ascii'))
return Binary(sha.digest())
def get_symbol_alive_shas(symbol, versions_coll):
return set(Binary(x) for x in versions_coll.distinct(FW_POINTERS_REFS_KEY, {'symbol': symbol}))
def _cleanup_fw_pointers(collection, symbol, version_ids, versions_coll, shas_to_delete, do_clean=True):
shas_to_delete = set(shas_to_delete) if shas_to_delete else set()
if not version_ids or not shas_to_delete:
return shas_to_delete
symbol_alive_shas = get_symbol_alive_shas(symbol, versions_coll)
# This is the set of shas which are not referenced by any FW pointers
shas_safe_to_delete = shas_to_delete - symbol_alive_shas
if do_clean and shas_safe_to_delete:
collection.delete_many({'symbol': symbol, 'sha': {'$in': list(shas_safe_to_delete)}})
return shas_safe_to_delete
def _cleanup_parent_pointers(collection, symbol, version_ids):
for v in version_ids:
# Remove all documents which only contain the parent
collection.delete_many({'symbol': symbol,
'parent': [v]})
# Pull the parent from the parents field
collection.update_many({'symbol': symbol,
'parent': v},
{'$pull': {'parent': v}})
# Now remove all chunks which aren't parented - this is unlikely, as they will
# have been removed by the above
collection.delete_one({'symbol': symbol, 'parent': []})
def _cleanup_mixed(symbol, collection, version_ids, versions_coll):
# Pull the deleted version IDs from the the parents field
collection.update_many({'symbol': symbol, 'parent': {'$in': version_ids}}, {'$pullAll': {'parent': version_ids}})
# All-inclusive set of segments which are pointed by at least one version (SHA fw pointers)
symbol_alive_shas = get_symbol_alive_shas(symbol, versions_coll)
spec = {'symbol': symbol, 'parent': []}
if symbol_alive_shas:
# This query unfortunately, while it hits the index (symbol, sha) to find the documents, in order to filter
# the documents by "parent: []" it fetches at server side, and pollutes the cache of WiredTiger
# TODO: add a new index for segments collection: (symbol, sha, parent)
spec['sha'] = {'$nin': list(symbol_alive_shas)}
collection.delete_many(spec)
def _get_symbol_pointer_cfgs(symbol, versions_coll):
return set(get_fwptr_config(v)
for v in versions_coll.find({'symbol': symbol}, projection={FW_POINTERS_CONFIG_KEY: 1}))
def cleanup(arctic_lib, symbol, version_ids, versions_coll, shas_to_delete=None, pointers_cfgs=None):
"""
Helper method for cleaning up chunks from a version store
"""
pointers_cfgs = set(pointers_cfgs) if pointers_cfgs else set()
collection = arctic_lib.get_top_level_collection()
version_ids = list(version_ids)
# Iterate versions to check if they are created only with fw pointers, parent pointers (old), or mixed
# Keep in mind that the version is not yet inserted.
all_symbol_pointers_cfgs = _get_symbol_pointer_cfgs(symbol, versions_coll)
all_symbol_pointers_cfgs.update(pointers_cfgs)
# All the versions of the symbol have been created with old arctic or with disabled forward pointers.
# Preserves backwards compatibility and regression for old pointers implementation.
if all_symbol_pointers_cfgs == {FwPointersCfg.DISABLED} or not all_symbol_pointers_cfgs:
_cleanup_parent_pointers(collection, symbol, version_ids)
return
# All the versions of the symbol we wish to delete have been created with forward pointers
if FwPointersCfg.DISABLED not in all_symbol_pointers_cfgs:
_cleanup_fw_pointers(collection, symbol, version_ids, versions_coll,
shas_to_delete=shas_to_delete, do_clean=True)
return
# Reaching here means the symbol has versions with mixed forward pointers and legacy/parent pointer configurations
_cleanup_mixed(symbol, collection, version_ids, versions_coll)
def version_base_or_id(version):
return version.get('base_version_id', version['_id'])
def _define_compat_pickle_load():
"""Factory function to initialise the correct Pickle load function based on
the Pandas version.
"""
if pd.__version__.startswith("0.14"):
return pickle.load
return pickle_compat.load
def analyze_symbol(instance, sym, from_ver, to_ver, do_reads=False):
"""
This is a utility function to produce text output with details about the versions of a given symbol.
It is useful for debugging corruption issues and to mark corrupted versions.
Parameters
----------
instance : `arctic.store.version_store.VersionStore`
The VersionStore instance against which the analysis will be run.
sym : `str`
The symbol to analyze
from_ver : `int` or `None`
The lower bound for the version number we wish to analyze. If None then start from the earliest version.
to_ver : `int` or `None`
The upper bound for the version number we wish to analyze. If None then stop at the latest version.
do_reads : `bool`
If this flag is set to true, then the corruption check will actually try to read the symbol (slower).
"""
logging.info('Analyzing symbol {}. Versions range is [v{}, v{}]'.format(sym, from_ver, to_ver))
prev_rows = 0
prev_n = 0
prev_v = None
logging.info('\nVersions for {}:'.format(sym))
for v in instance._versions.find({'symbol': sym, 'version': {'$gte': from_ver, '$lte': to_ver}},
sort=[('version', pymongo.ASCENDING)]):
n = v.get('version')
is_deleted = v.get('metadata').get('deleted', False) if v.get('metadata') else False
if is_deleted:
matching = 0
else:
spec = {'symbol': sym, 'parent': v.get('base_version_id', v['_id']), 'segment': {'$lt': v.get('up_to', 0)}}
matching = mongo_count(instance._collection, filter=spec) if not is_deleted else 0
base_id = v.get('base_version_id')
snaps = ['/'.join((str(x), str(x.generation_time))) for x in v.get('parent')] if v.get('parent') else None
added_rows = v.get('up_to', 0) - prev_rows
meta_match_with_prev = v.get('metadata') == prev_v.get('metadata') if prev_v else False
delta_snap_creation = (min([x.generation_time for x in v.get('parent')]) - v['_id'].generation_time).total_seconds() / 60.0 if v.get('parent') else 0.0
prev_v_diff = 0 if not prev_v else v['version'] - prev_v['version']
corrupted = not is_deleted and (is_corrupted(instance, sym, v) if do_reads else fast_is_corrupted(instance, sym, v))
logging.info(
"v{: <6} "
"{: <6} "
"{: <5} "
"({: <20}): "
"expected={: <6} "
"found={: <6} "
"last_row={: <10} "
"new_rows={: <10} "
"append count={: <10} "
"append_size={: <10} "
"type={: <14} {: <14} "
"base={: <24}/{: <28} "
"snap={: <30}[{:.1f} mins delayed] "
"{: <20} "
"{: <20}".format(
n,
prev_v_diff,
'DEL' if is_deleted else 'ALIVE',
str(v['_id'].generation_time),
v.get('segment_count', 0),
matching,
v.get('up_to', 0),
added_rows,
v.get('append_count'),
v.get('append_size'),
v.get('type'),
'meta-same' if meta_match_with_prev else 'meta-changed',
str(base_id),
str(base_id.generation_time) if base_id else '',
str(snaps),
delta_snap_creation,
'PREV_MISSING' if prev_n < n - 1 else '',
'CORRUPTED VERSION' if corrupted else '')
)
prev_rows = v.get('up_to', 0)
prev_n = n
prev_v = v
logging.info('\nSegments for {}:'.format(sym))
for seg in instance._collection.find({'symbol': sym}, sort=[('_id', pymongo.ASCENDING)]):
logging.info("{: <32} {: <7} {: <10} {: <30}".format(
hashlib.sha1(seg['sha']).hexdigest(),
seg.get('segment'),
'compressed' if seg.get('compressed', False) else 'raw',
str([str(p) for p in seg.get('parent', [])])
))
def _fast_check_corruption(collection, sym, v, check_count, check_last_segment, check_append_safe):
if v is None:
logging.warning("Symbol {} with version {} not found, so can't be corrupted.".format(sym, v))
return False
if not check_count and not check_last_segment:
raise ValueError("_fast_check_corruption must be called with either of "
"check_count and check_last_segment set to True")
# If version marked symbol as deleted, it will force writes/appends to start from a new base: non corrupted.
if isinstance(v.get('metadata'), dict) and v['metadata'].get('deleted'):
return False
if check_append_safe:
# Check whether appending to the symbol version can potentially corrupt the data (history branch).
# Inspect all segments, don't limit to v['up_to']. No newer append segments after v should exist.
spec = {'symbol': sym, 'parent': v.get('base_version_id', v['_id'])}
else:
# Only verify segment count for current symbol version, don't check corruptability of future appends.
spec = {'symbol': sym, 'parent': v.get('base_version_id', v['_id']), 'segment': {'$lt': v['up_to']}}
try:
# Not that commands sequence (a) is slower than (b)
# (a) curs = collection.find(spec, {'segment': 1}, sort=[('segment', pymongo.DESCENDING)])
# curs.count()
# curs.next()
# (b) collection.find(spec, {'segment': 1}).count()
# collection.find_one(spec, {'segment': 1}, sort=[('segment', pymongo.DESCENDING)])
if check_count:
total_segments = mongo_count(collection, filter=spec)
# Quick check: compare segment count
if total_segments != v.get('segment_count', 0):
return True # corrupted, don't proceed with fetching from mongo the first hit
# Quick check: Segment counts agree and size is zero
if total_segments == 0:
return False
if check_last_segment:
# Quick check: compare the maximum segment's up_to number. It has to verify the version's up_to.
max_seg = collection.find_one(spec, {'segment': 1}, sort=[('segment', pymongo.DESCENDING)])
max_seg = max_seg['segment'] + 1 if max_seg else 0
if max_seg != v.get('up_to'):
return True # corrupted, last segment and version's up_to don't agree
except OperationFailure as e:
logging.warning("Corruption checks are skipped (sym={}, version={}): {}".format(sym, v['version'], str(e)))
return False
def is_safe_to_append(instance, sym, input_v):
"""
This method hints whether the symbol/version are safe for appending in two ways:
1. It verifies whether the symbol is already corrupted (fast, doesn't read the data)
2. It verifies that the symbol is safe to append, i.e. there are no subsequent appends,
or dangling segments from a failed append.
Parameters
----------
instance : `arctic.store.version_store.VersionStore`
The VersionStore instance against which the analysis will be run.
sym : `str`
The symbol to test if is corrupted.
input_v : `int` or `arctic.store.version_store.VersionedItem`
The specific version we wish to test if is appendable. This argument is mandatory.
Returns
-------
`bool`
True if the symbol is safe to append, False otherwise.
"""
input_v = instance._versions.find_one({'symbol': sym, 'version': input_v}) if isinstance(input_v, int) else input_v
return not _fast_check_corruption(instance._collection, sym, input_v,
check_count=True, check_last_segment=True, check_append_safe=True)
def fast_is_corrupted(instance, sym, input_v):
"""
This method can be used for a fast check (not involving a read) for a corrupted version.
Users can't trust this as may give false negatives, but it this returns True, then symbol is certainly broken (no false positives)
Parameters
----------
instance : `arctic.store.version_store.VersionStore`
The VersionStore instance against which the analysis will be run.
sym : `str`
The symbol to test if is corrupted.
input_v : `int` or `arctic.store.version_store.VersionedItem`
The specific version we wish to test if is corrupted. This argument is mandatory.
Returns
-------
`bool`
True if the symbol is found corrupted, False otherwise.
"""
input_v = instance._versions.find_one({'symbol': sym, 'version': input_v}) if isinstance(input_v, int) else input_v
return _fast_check_corruption(instance._collection, sym, input_v,
check_count=True, check_last_segment=True, check_append_safe=False)
def is_corrupted(instance, sym, input_v):
"""
This method can be used to check for a corrupted version.
Will continue to a full read (slower) if the internally invoked fast-detection does not locate a corruption.
Parameters
----------
instance : `arctic.store.version_store.VersionStore`
The VersionStore instance against which the analysis will be run.
sym : `str`
The symbol to test if is corrupted.
input_v : `int` or `arctic.store.version_store.VersionedItem`
The specific version we wish to test if is corrupted. This argument is mandatory.
Returns
-------
`bool`
True if the symbol is found corrupted, False otherwise.
"""
# If version is just a number, read the version document
input_v = instance._versions.find_one({'symbol': sym, 'version': input_v}) if isinstance(input_v, int) else input_v
if not _fast_check_corruption(instance._collection, sym, input_v,
check_count=True, check_last_segment=True, check_append_safe=False):
try:
# Done with the fast checks, proceed to a full read if instructed
instance.read(sym, as_of=input_v['version'])
return False
except Exception:
pass
return True
# Initialise the pickle load function and delete the factory function.
pickle_compat_load = _define_compat_pickle_load()
del _define_compat_pickle_load
|
import os.path as op
import numpy as np
from numpy.testing import assert_allclose
import pytest
import matplotlib.pyplot as plt
from matplotlib import gridspec
from matplotlib.cm import get_cmap
import mne
from mne import (read_events, Epochs, read_cov, compute_covariance,
make_fixed_length_events, compute_proj_evoked)
from mne.io import read_raw_fif
from mne.utils import run_tests_if_main, catch_logging, requires_version
from mne.viz import plot_compare_evokeds, plot_evoked_white
from mne.viz.utils import _fake_click
from mne.datasets import testing
from mne.io.constants import FIFF
from mne.stats.parametric import _parametric_ci
base_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data')
evoked_fname = op.join(base_dir, 'test-ave.fif')
raw_fname = op.join(base_dir, 'test_raw.fif')
raw_sss_fname = op.join(base_dir, 'test_chpi_raw_sss.fif')
cov_fname = op.join(base_dir, 'test-cov.fif')
event_name = op.join(base_dir, 'test-eve.fif')
event_id, tmin, tmax = 1, -0.1, 0.1
# Use a subset of channels for plotting speed
# make sure we have a magnetometer and a pair of grad pairs for topomap.
default_picks = (0, 1, 2, 3, 4, 6, 7, 61, 122, 183, 244, 305)
sel = (0, 7)
def _get_epochs(picks=default_picks):
"""Get epochs."""
raw = read_raw_fif(raw_fname)
raw.add_proj([], remove_existing=True)
events = read_events(event_name)
epochs = Epochs(raw, events[:5], event_id, tmin, tmax, picks=picks,
decim=10, verbose='error')
epochs.info['bads'] = [epochs.ch_names[-1]]
epochs.info.normalize_proj()
return epochs
def _get_epochs_delayed_ssp():
"""Get epochs with delayed SSP."""
raw = read_raw_fif(raw_fname)
events = read_events(event_name)
reject = dict(mag=4e-12)
epochs_delayed_ssp = Epochs(raw, events[:10], event_id, tmin, tmax,
picks=default_picks, proj='delayed',
reject=reject, verbose='error')
epochs_delayed_ssp.info.normalize_proj()
return epochs_delayed_ssp
def test_plot_evoked_cov():
"""Test plot_evoked with noise_cov."""
evoked = _get_epochs().average()
cov = read_cov(cov_fname)
cov['projs'] = [] # avoid warnings
evoked.plot(noise_cov=cov, time_unit='s')
with pytest.raises(TypeError, match='Covariance'):
evoked.plot(noise_cov=1., time_unit='s')
with pytest.raises(IOError, match='No such file'):
evoked.plot(noise_cov='nonexistent-cov.fif', time_unit='s')
raw = read_raw_fif(raw_sss_fname)
events = make_fixed_length_events(raw)
epochs = Epochs(raw, events, picks=default_picks)
cov = compute_covariance(epochs)
evoked_sss = epochs.average()
with pytest.warns(RuntimeWarning, match='relative scaling'):
evoked_sss.plot(noise_cov=cov, time_unit='s')
plt.close('all')
@pytest.mark.slowtest
def test_plot_evoked():
"""Test evoked.plot."""
epochs = _get_epochs()
evoked = epochs.average()
assert evoked.proj is False
fig = evoked.plot(proj=True, hline=[1], exclude=[], window_title='foo',
time_unit='s')
amplitudes = _get_amplitudes(fig)
assert len(amplitudes) == 12
assert evoked.proj is False
# Test a click
ax = fig.get_axes()[0]
line = ax.lines[0]
_fake_click(fig, ax,
[line.get_xdata()[0], line.get_ydata()[0]], 'data')
_fake_click(fig, ax,
[ax.get_xlim()[0], ax.get_ylim()[1]], 'data')
# plot with bad channels excluded & spatial_colors & zorder
evoked.plot(exclude='bads', time_unit='s')
# test selective updating of dict keys is working.
evoked.plot(hline=[1], units=dict(mag='femto foo'), time_unit='s')
evoked_delayed_ssp = _get_epochs_delayed_ssp().average()
evoked_delayed_ssp.plot(proj='interactive', time_unit='s')
evoked_delayed_ssp.apply_proj()
pytest.raises(RuntimeError, evoked_delayed_ssp.plot,
proj='interactive', time_unit='s')
evoked_delayed_ssp.info['projs'] = []
pytest.raises(RuntimeError, evoked_delayed_ssp.plot,
proj='interactive', time_unit='s')
pytest.raises(RuntimeError, evoked_delayed_ssp.plot,
proj='interactive', axes='foo', time_unit='s')
plt.close('all')
# test GFP only
evoked.plot(gfp='only', time_unit='s')
pytest.raises(ValueError, evoked.plot, gfp='foo', time_unit='s')
# plot with bad channels excluded, spatial_colors, zorder & pos. layout
evoked.rename_channels({'MEG 0133': 'MEG 0000'})
evoked.plot(exclude=evoked.info['bads'], spatial_colors=True, gfp=True,
zorder='std', time_unit='s')
evoked.plot(exclude=[], spatial_colors=True, zorder='unsorted',
time_unit='s')
pytest.raises(TypeError, evoked.plot, zorder='asdf', time_unit='s')
plt.close('all')
evoked.plot_sensors() # Test plot_sensors
plt.close('all')
evoked.pick_channels(evoked.ch_names[:4])
with catch_logging() as log_file:
evoked.plot(verbose=True, time_unit='s')
assert 'Need more than one' in log_file.getvalue()
@requires_version('matplotlib', '2.2')
def test_constrained_layout():
"""Test that we handle constrained layouts correctly."""
fig, ax = plt.subplots(1, 1, constrained_layout=True)
assert fig.get_constrained_layout()
evoked = mne.read_evokeds(evoked_fname)[0]
evoked.pick(evoked.ch_names[:2])
evoked.plot(axes=ax) # smoke test that it does not break things
assert fig.get_constrained_layout()
plt.close('all')
def _get_amplitudes(fig):
amplitudes = [line.get_ydata() for ax in fig.axes
for line in ax.get_lines()]
amplitudes = np.array(
[line for line in amplitudes if isinstance(line, np.ndarray)])
return amplitudes
@pytest.mark.parametrize('picks, rlims, avg_proj', [
(default_picks, (0.59, 0.61), False), # MEG
(np.arange(340, 360), (0.49, 0.51), True), # EEG
(np.arange(340, 360), (0.78, 0.80), False), # EEG
])
def test_plot_evoked_reconstruct(picks, rlims, avg_proj):
"""Test proj="reconstruct"."""
evoked = _get_epochs(picks=picks).average()
if avg_proj:
evoked.set_eeg_reference(projection=True).apply_proj()
assert len(evoked.info['projs']) == 1
assert evoked.proj is True
else:
assert len(evoked.info['projs']) == 0
assert evoked.proj is False
fig = evoked.plot(proj=True, hline=[1], exclude=[], window_title='foo',
time_unit='s')
amplitudes = _get_amplitudes(fig)
assert len(amplitudes) == len(picks)
assert evoked.proj is avg_proj
fig = evoked.plot(proj='reconstruct', exclude=[])
amplitudes_recon = _get_amplitudes(fig)
if avg_proj is False:
assert_allclose(amplitudes, amplitudes_recon)
proj = compute_proj_evoked(evoked.copy().crop(None, 0).apply_proj())
evoked.add_proj(proj)
assert len(evoked.info['projs']) == 2 if len(picks) == 3 else 4
fig = evoked.plot(proj=True, exclude=[])
amplitudes_proj = _get_amplitudes(fig)
fig = evoked.plot(proj='reconstruct', exclude=[])
amplitudes_recon = _get_amplitudes(fig)
assert len(amplitudes_recon) == len(picks)
norm = np.linalg.norm(amplitudes)
norm_proj = np.linalg.norm(amplitudes_proj)
norm_recon = np.linalg.norm(amplitudes_recon)
r = np.dot(amplitudes_recon.ravel(), amplitudes.ravel()) / (
norm_recon * norm)
assert rlims[0] < r < rlims[1]
assert 1.05 * norm_proj < norm_recon
if not avg_proj:
assert norm_proj < norm * 0.9
cov = read_cov(cov_fname)
with pytest.raises(ValueError, match='Cannot use proj="reconstruct"'):
evoked.plot(noise_cov=cov, proj='reconstruct')
plt.close('all')
def test_plot_evoked_image():
"""Test plot_evoked_image."""
evoked = _get_epochs().average()
evoked.plot_image(proj=True, time_unit='ms')
# fail nicely on NaN
evoked_nan = evoked.copy()
evoked_nan.data[:, 0] = np.nan
pytest.raises(ValueError, evoked_nan.plot)
with np.errstate(invalid='ignore'):
pytest.raises(ValueError, evoked_nan.plot_image)
pytest.raises(ValueError, evoked_nan.plot_joint)
# test mask
evoked.plot_image(picks=[1, 2], mask=evoked.data > 0, time_unit='s')
evoked.plot_image(picks=[1, 2], mask_cmap=None, colorbar=False,
mask=np.ones(evoked.data.shape).astype(bool),
time_unit='s')
with pytest.warns(RuntimeWarning, match='not adding contour'):
evoked.plot_image(picks=[1, 2], mask=None, mask_style="both",
time_unit='s')
with pytest.raises(ValueError, match='must have the same shape'):
evoked.plot_image(mask=evoked.data[1:, 1:] > 0, time_unit='s')
# plot with bad channels excluded
evoked.plot_image(exclude='bads', cmap='interactive', time_unit='s')
plt.close('all')
with pytest.raises(ValueError, match='not unique'):
evoked.plot_image(picks=[0, 0], time_unit='s') # duplicates
ch_names = evoked.ch_names[3:5]
picks = [evoked.ch_names.index(ch) for ch in ch_names]
evoked.plot_image(show_names="all", time_unit='s', picks=picks)
yticklabels = plt.gca().get_yticklabels()
for tick_target, tick_observed in zip(ch_names, yticklabels):
assert tick_target in str(tick_observed)
evoked.plot_image(show_names=True, time_unit='s')
# test groupby
evoked.plot_image(group_by=dict(sel=sel), axes=dict(sel=plt.axes()))
plt.close('all')
for group_by, axes in (("something", dict()), (dict(), "something")):
pytest.raises(ValueError, evoked.plot_image, group_by=group_by,
axes=axes)
def test_plot_white():
"""Test plot_white."""
cov = read_cov(cov_fname)
cov['method'] = 'empirical'
cov['projs'] = [] # avoid warnings
evoked = _get_epochs().average()
# test rank param.
evoked.plot_white(cov, rank={'mag': 101, 'grad': 201}, time_unit='s')
fig = evoked.plot_white(cov, rank={'mag': 101}, time_unit='s') # test rank
evoked.plot_white(cov, rank={'grad': 201}, time_unit='s', axes=fig.axes)
with pytest.raises(ValueError, match=r'must have shape \(3,\), got \(2,'):
evoked.plot_white(cov, axes=fig.axes[:2])
with pytest.raises(ValueError, match='When not using SSS'):
evoked.plot_white(cov, rank={'meg': 306})
evoked.plot_white([cov, cov], time_unit='s')
plt.close('all')
assert 'eeg' not in evoked
fig = plot_evoked_white(evoked, [cov, cov])
assert len(fig.axes) == 2 * 2
axes = np.array(fig.axes).reshape(2, 2)
plot_evoked_white(evoked, [cov, cov], axes=axes)
with pytest.raises(ValueError, match=r'have shape \(2, 2\), got'):
plot_evoked_white(evoked, [cov, cov], axes=axes[:, :1])
# Hack to test plotting of maxfiltered data
evoked_sss = evoked.copy()
sss = dict(sss_info=dict(in_order=80, components=np.arange(80)))
evoked_sss.info['proc_history'] = [dict(max_info=sss)]
evoked_sss.plot_white(cov, rank={'meg': 64})
with pytest.raises(ValueError, match='When using SSS'):
evoked_sss.plot_white(cov, rank={'grad': 201})
evoked_sss.plot_white(cov, time_unit='s')
plt.close('all')
def test_plot_compare_evokeds():
"""Test plot_compare_evokeds."""
evoked = _get_epochs().average()
# test defaults
figs = plot_compare_evokeds(evoked)
assert len(figs) == 2
# test picks, combine, and vlines (1-channel pick also shows sensor inset)
picks = ['MEG 0113', 'mag'] + 2 * [['MEG 0113', 'MEG 0112']] + [[0, 1]]
vlines = [[0.1, 0.2], []] + 3 * ['auto']
combine = [None, 'mean', 'std', None, lambda x: np.min(x, axis=1)]
title = ['MEG 0113', '(mean)', '(std. dev.)', '(GFP)', 'MEG 0112']
for _p, _v, _c, _t in zip(picks, vlines, combine, title):
fig = plot_compare_evokeds(evoked, picks=_p, vlines=_v, combine=_c)
assert fig[0].axes[0].get_title().endswith(_t)
# test passing more than one evoked
red, blue = evoked.copy(), evoked.copy()
red.data *= 1.5
blue.data /= 1.5
evoked_dict = {'aud/l': blue, 'aud/r': red, 'vis': evoked}
huge_dict = {'cond{}'.format(i): ev for i, ev in enumerate([evoked] * 11)}
plot_compare_evokeds(evoked_dict) # dict
plot_compare_evokeds([[red, evoked], [blue, evoked]]) # list of lists
figs = plot_compare_evokeds({'cond': [blue, red, evoked]}) # dict of list
# test that confidence bands are plausible
for fig in figs:
extents = fig.axes[0].collections[0].get_paths()[0].get_extents()
xlim, ylim = extents.get_points().T
assert np.allclose(xlim, evoked.times[[0, -1]])
line = fig.axes[0].lines[0]
xvals = line.get_xdata()
assert np.allclose(xvals, evoked.times)
yvals = line.get_ydata()
assert (yvals < ylim[1]).all()
assert (yvals > ylim[0]).all()
plt.close('all')
# test other CI args
for _ci in (None, False, 0.5,
lambda x: np.stack([x.mean(axis=0) + 1, x.mean(axis=0) - 1])):
plot_compare_evokeds({'cond': [blue, red, evoked]}, ci=_ci)
with pytest.raises(TypeError, match='"ci" must be None, bool, float or'):
plot_compare_evokeds(evoked, ci='foo')
# test sensor inset, legend location, and axis inversion & truncation
plot_compare_evokeds(evoked_dict, invert_y=True, legend='upper left',
show_sensors='center', truncate_xaxis=False,
truncate_yaxis=False)
plot_compare_evokeds(evoked, ylim=dict(mag=(-50, 50)), truncate_yaxis=True)
plt.close('all')
# test styles
plot_compare_evokeds(evoked_dict, colors=['b', 'r', 'g'],
linestyles=[':', '-', '--'], split_legend=True)
style_dict = dict(aud=dict(alpha=0.3), vis=dict(linewidth=3, c='k'))
plot_compare_evokeds(evoked_dict, styles=style_dict, colors={'aud/r': 'r'},
linestyles=dict(vis='dotted'), ci=False)
plot_compare_evokeds(evoked_dict, colors=list(range(3)))
plt.close('all')
# test colormap
cmap = get_cmap('viridis')
plot_compare_evokeds(evoked_dict, cmap=cmap, colors=dict(aud=0.4, vis=0.9))
plot_compare_evokeds(evoked_dict, cmap=cmap, colors=dict(aud=1, vis=2))
plot_compare_evokeds(evoked_dict, cmap=('cmap title', 'inferno'),
linestyles=['-', ':', '--'])
plt.close('all')
# test warnings
with pytest.warns(RuntimeWarning, match='in "picks"; cannot combine'):
plot_compare_evokeds(evoked, picks=[0], combine='median')
plt.close('all')
# test errors
with pytest.raises(TypeError, match='"evokeds" must be a dict, list'):
plot_compare_evokeds('foo')
with pytest.raises(ValueError, match=r'keys in "styles" \(.*\) must '):
plot_compare_evokeds(evoked_dict, styles=dict(foo='foo', bar='bar'))
with pytest.raises(ValueError, match='colors in the default color cycle'):
plot_compare_evokeds(huge_dict, colors=None)
with pytest.raises(TypeError, match='"cmap" is specified, then "colors"'):
plot_compare_evokeds(evoked_dict, cmap='Reds', colors={'aud/l': 'foo',
'aud/r': 'bar',
'vis': 'baz'})
plt.close('all')
for kwargs in [dict(colors=[0, 1]), dict(linestyles=['-', ':'])]:
match = r'but there are only \d* (colors|linestyles). Please specify'
with pytest.raises(ValueError, match=match):
plot_compare_evokeds(evoked_dict, **kwargs)
for kwargs in [dict(colors='foo'), dict(linestyles='foo')]:
match = r'"(colors|linestyles)" must be a dict, list, or None; got '
with pytest.raises(TypeError, match=match):
plot_compare_evokeds(evoked_dict, **kwargs)
for kwargs in [dict(colors=dict(foo='f')), dict(linestyles=dict(foo='f'))]:
match = r'If "(colors|linestyles)" is a dict its keys \(.*\) must '
with pytest.raises(ValueError, match=match):
plot_compare_evokeds(evoked_dict, **kwargs)
for kwargs in [dict(legend='foo'), dict(show_sensors='foo')]:
with pytest.raises(ValueError, match='not a legal MPL loc, please'):
plot_compare_evokeds(evoked_dict, **kwargs)
with pytest.raises(TypeError, match='an instance of list or tuple'):
plot_compare_evokeds(evoked_dict, vlines='foo')
with pytest.raises(ValueError, match='"truncate_yaxis" must be bool or '):
plot_compare_evokeds(evoked_dict, truncate_yaxis='foo')
plt.close('all')
# test axes='topo'
figs = plot_compare_evokeds(evoked_dict, axes='topo', legend=True)
for fig in figs:
assert len(fig.axes[0].lines) == len(evoked_dict)
# test with (fake) CSD data
csd = _get_epochs(picks=np.arange(315, 320)).average() # 5 EEG chs
for entry in csd.info['chs']:
entry['coil_type'] = FIFF.FIFFV_COIL_EEG_CSD
entry['unit'] = FIFF.FIFF_UNIT_V_M2
plot_compare_evokeds(csd, picks='csd', axes='topo')
# old tests
red.info['chs'][0]['loc'][:2] = 0 # test plotting channel at zero
plot_compare_evokeds([red, blue], picks=[0],
ci=lambda x: [x.std(axis=0), -x.std(axis=0)])
plot_compare_evokeds([list(evoked_dict.values())], picks=[0],
ci=_parametric_ci)
# smoke test for tmin >= 0 (from mailing list)
red.crop(0.01, None)
assert len(red.times) > 2
plot_compare_evokeds(red)
# plot a flat channel
red.data = np.zeros_like(red.data)
plot_compare_evokeds(red)
# smoke test for one time point (not useful but should not fail)
red.crop(0.02, 0.02)
assert len(red.times) == 1
plot_compare_evokeds(red)
# now that we've cropped `red`:
with pytest.raises(ValueError, match='not contain the same time instants'):
plot_compare_evokeds(evoked_dict)
plt.close('all')
def test_plot_compare_evokeds_neuromag122():
"""Test topomap plotting."""
evoked = mne.read_evokeds(evoked_fname, 'Left Auditory',
baseline=(None, 0))
evoked.pick_types(meg='grad')
evoked.pick_channels(evoked.ch_names[:122])
ch_names = ['MEG %03d' % k for k in range(1, 123)]
for c in evoked.info['chs']:
c['coil_type'] = FIFF.FIFFV_COIL_NM_122
evoked.rename_channels({c_old: c_new for (c_old, c_new) in
zip(evoked.ch_names, ch_names)})
mne.viz.plot_compare_evokeds([evoked, evoked])
@testing.requires_testing_data
def test_plot_ctf():
"""Test plotting of CTF evoked."""
ctf_dir = op.join(testing.data_path(download=False), 'CTF')
raw_fname = op.join(ctf_dir, 'testdata_ctf.ds')
raw = mne.io.read_raw_ctf(raw_fname, preload=True)
events = np.array([[200, 0, 1]])
event_id = 1
tmin, tmax = -0.1, 0.5 # start and end of an epoch in sec.
picks = mne.pick_types(raw.info, meg=True, stim=True, eog=True,
ref_meg=True, exclude='bads')[::20]
epochs = mne.Epochs(raw, events, event_id, tmin, tmax, proj=True,
picks=picks, preload=True, decim=10, verbose='error')
evoked = epochs.average()
evoked.plot_joint(times=[0.1])
mne.viz.plot_compare_evokeds([evoked, evoked])
# make sure axes position is "almost" unchanged
# when axes were passed to plot_joint by the user
times = [0.1, 0.2, 0.3]
fig = plt.figure()
# create custom axes for topomaps, colorbar and the timeseries
gs = gridspec.GridSpec(3, 7, hspace=0.5, top=0.8)
topo_axes = [fig.add_subplot(gs[0, idx * 2:(idx + 1) * 2])
for idx in range(len(times))]
topo_axes.append(fig.add_subplot(gs[0, -1]))
ts_axis = fig.add_subplot(gs[1:, 1:-1])
def get_axes_midpoints(axes):
midpoints = list()
for ax in axes[:-1]:
pos = ax.get_position()
midpoints.append([pos.x0 + (pos.width * 0.5),
pos.y0 + (pos.height * 0.5)])
return np.array(midpoints)
midpoints_before = get_axes_midpoints(topo_axes)
evoked.plot_joint(times=times, ts_args={'axes': ts_axis},
topomap_args={'axes': topo_axes}, title=None)
midpoints_after = get_axes_midpoints(topo_axes)
assert (np.linalg.norm(midpoints_before - midpoints_after) < 0.1).all()
run_tests_if_main()
|
import unittest
import numpy as np
import pandas as pd
from fbprophet import Prophet
class TestFbProphet(unittest.TestCase):
def test_fit(self):
train = pd.DataFrame({
'ds': np.array(['2012-05-18', '2012-05-20']),
'y': np.array([38.23, 21.25])
})
forecaster = Prophet(mcmc_samples=1)
forecaster.fit(train, control={'adapt_engaged': False})
|
import logging
import re
import time
import textwrap
import pytest
from PyQt5.QtCore import pyqtSignal, Qt, QEvent, QObject, QTimer
from PyQt5.QtWidgets import QStyle, QFrame, QSpinBox
from qutebrowser.utils import debug
@debug.log_events
class EventObject(QObject):
pass
def test_log_events(qapp, caplog):
obj = EventObject()
qapp.sendEvent(obj, QEvent(QEvent.User))
qapp.processEvents()
assert caplog.messages == ['Event in test_debug.EventObject: User']
class SignalObject(QObject):
signal1 = pyqtSignal()
signal2 = pyqtSignal(str, str)
def __repr__(self):
"""This is not a nice thing to do, but it makes our tests easier."""
return '<repr>'
@debug.log_signals
class DecoratedSignalObject(SignalObject):
pass
@pytest.fixture(params=[(SignalObject, True), (DecoratedSignalObject, False)])
def signal_obj(request):
klass, wrap = request.param
obj = klass()
if wrap:
debug.log_signals(obj)
return obj
def test_log_signals(caplog, signal_obj):
signal_obj.signal1.emit()
signal_obj.signal2.emit('foo', 'bar')
assert caplog.messages == ['Signal in <repr>: signal1()',
"Signal in <repr>: signal2('foo', 'bar')"]
class TestLogTime:
def test_duration(self, caplog):
logger_name = 'qt-tests'
with caplog.at_level(logging.DEBUG, logger_name):
with debug.log_time(logger_name, action='foobar'):
time.sleep(0.1)
assert len(caplog.records) == 1
pattern = re.compile(r'Foobar took ([\d.]*) seconds\.')
match = pattern.fullmatch(caplog.messages[0])
assert match
duration = float(match.group(1))
assert 0 < duration < 30
def test_logger(self, caplog):
"""Test with an explicit logger instead of a name."""
logger_name = 'qt-tests'
with caplog.at_level(logging.DEBUG, logger_name):
with debug.log_time(logging.getLogger(logger_name)):
pass
assert len(caplog.records) == 1
def test_decorator(self, caplog):
logger_name = 'qt-tests'
@debug.log_time(logger_name, action='foo')
def func(arg, *, kwarg):
assert arg == 1
assert kwarg == 2
with caplog.at_level(logging.DEBUG, logger_name):
func(1, kwarg=2)
assert len(caplog.records) == 1
assert caplog.messages[0].startswith('Foo took')
class TestQEnumKey:
def test_metaobj(self):
"""Make sure the classes we use in the tests have a metaobj or not.
If Qt/PyQt even changes and our tests wouldn't test the full
functionality of qenum_key because of that, this test will tell us.
"""
assert not hasattr(QStyle.PrimitiveElement, 'staticMetaObject')
assert hasattr(QFrame, 'staticMetaObject')
@pytest.mark.parametrize('base, value, klass, expected', [
(QStyle, QStyle.PE_PanelButtonCommand, None, 'PE_PanelButtonCommand'),
(QFrame, QFrame.Sunken, None, 'Sunken'),
(QFrame, 0x0030, QFrame.Shadow, 'Sunken'),
(QFrame, 0x1337, QFrame.Shadow, '0x1337'),
(Qt, Qt.AnchorLeft, None, 'AnchorLeft'),
])
def test_qenum_key(self, base, value, klass, expected):
key = debug.qenum_key(base, value, klass=klass)
assert key == expected
def test_add_base(self):
key = debug.qenum_key(QFrame, QFrame.Sunken, add_base=True)
assert key == 'QFrame.Sunken'
def test_int_noklass(self):
"""Test passing an int without explicit klass given."""
with pytest.raises(TypeError):
debug.qenum_key(QFrame, 42)
class TestQFlagsKey:
"""Tests for qutebrowser.utils.debug.qflags_key.
https://github.com/qutebrowser/qutebrowser/issues/42
"""
fixme = pytest.mark.xfail(reason="See issue #42", raises=AssertionError)
@pytest.mark.parametrize('base, value, klass, expected', [
(Qt, Qt.AlignTop, None, 'AlignTop'),
pytest.param(Qt, Qt.AlignLeft | Qt.AlignTop, None,
'AlignLeft|AlignTop', marks=fixme),
(Qt, Qt.AlignCenter, None, 'AlignHCenter|AlignVCenter'),
pytest.param(Qt, 0x0021, Qt.Alignment, 'AlignLeft|AlignTop',
marks=fixme),
(Qt, 0x1100, Qt.Alignment, '0x0100|0x1000'),
(Qt, Qt.DockWidgetAreas(0), Qt.DockWidgetArea, 'NoDockWidgetArea'),
(Qt, Qt.DockWidgetAreas(0), None, '0x0000'),
])
def test_qflags_key(self, base, value, klass, expected):
flags = debug.qflags_key(base, value, klass=klass)
assert flags == expected
def test_find_flags(self):
"""Test a weird TypeError we get from PyQt.
In exactly this constellation (happening via the "Searching with
--reverse" BDD test), calling QMetaEnum::valueToKey without wrapping
the flags in int() causes a TypeError.
No idea what's happening here exactly...
"""
qwebpage = pytest.importorskip("PyQt5.QtWebKitWidgets").QWebPage
flags = qwebpage.FindWrapsAroundDocument
flags |= qwebpage.FindBackward
flags &= ~qwebpage.FindBackward
flags &= ~qwebpage.FindWrapsAroundDocument
debug.qflags_key(qwebpage,
flags,
klass=qwebpage.FindFlag)
def test_add_base(self):
"""Test with add_base=True."""
flags = debug.qflags_key(Qt, Qt.AlignTop, add_base=True)
assert flags == 'Qt.AlignTop'
def test_int_noklass(self):
"""Test passing an int without explicit klass given."""
with pytest.raises(TypeError):
debug.qflags_key(Qt, 42)
@pytest.mark.parametrize('cls, signal', [
(SignalObject, 'signal1'),
(SignalObject, 'signal2'),
(QTimer, 'timeout'),
(QSpinBox, 'valueChanged'), # Overloaded signal
])
@pytest.mark.parametrize('bound', [True, False])
def test_signal_name(cls, signal, bound):
base = cls() if bound else cls
sig = getattr(base, signal)
assert debug.signal_name(sig) == signal
@pytest.mark.parametrize('args, kwargs, expected', [
([], {}, ''),
(None, None, ''),
(['foo'], None, "'foo'"),
(['foo', 'bar'], None, "'foo', 'bar'"),
(None, {'foo': 'bar'}, "foo='bar'"),
(['foo', 'bar'], {'baz': 'fish'}, "'foo', 'bar', baz='fish'"),
(['x' * 300], None, "'{}".format('x' * 198 + '…')),
], ids=lambda val: str(val)[:20])
def test_format_args(args, kwargs, expected):
assert debug.format_args(args, kwargs) == expected
def func():
pass
@pytest.mark.parametrize('func, args, kwargs, full, expected', [
(func, None, None, False, 'func()'),
(func, [1, 2], None, False, 'func(1, 2)'),
(func, [1, 2], None, True, 'test_debug.func(1, 2)'),
(func, [1, 2], {'foo': 3}, False, 'func(1, 2, foo=3)'),
])
def test_format_call(func, args, kwargs, full, expected):
assert debug.format_call(func, args, kwargs, full) == expected
@pytest.mark.parametrize('args, expected', [
([23, 42], 'fake(23, 42)'),
(['x' * 201], "fake('{}\u2026)".format('x' * 198)),
(['foo\nbar'], r"fake('foo\nbar')"),
], ids=lambda val: str(val)[:20])
def test_dbg_signal(stubs, args, expected):
assert debug.dbg_signal(stubs.FakeSignal(), args) == expected
class TestGetAllObjects:
class Object(QObject):
def __init__(self, name, parent=None):
self._name = name
super().__init__(parent)
def __repr__(self):
return '<{}>'.format(self._name)
def test_get_all_objects(self, stubs, monkeypatch):
# pylint: disable=unused-variable
widgets = [self.Object('Widget 1'), self.Object('Widget 2')]
app = stubs.FakeQApplication(all_widgets=widgets)
monkeypatch.setattr(debug, 'QApplication', app)
root = QObject()
o1 = self.Object('Object 1', root)
o2 = self.Object('Object 2', o1) # noqa: F841
o3 = self.Object('Object 3', root) # noqa: F841
expected = textwrap.dedent("""
Qt widgets - 2 objects:
<Widget 1>
<Widget 2>
Qt objects - 3 objects:
<Object 1>
<Object 2>
<Object 3>
global object registry - 0 objects:
""").rstrip('\n')
assert debug.get_all_objects(start_obj=root) == expected
@pytest.mark.usefixtures('qapp')
def test_get_all_objects_qapp(self):
objects = debug.get_all_objects()
event_dispatcher = '<PyQt5.QtCore.QAbstractEventDispatcher object at'
session_manager = '<PyQt5.QtGui.QSessionManager object at'
assert event_dispatcher in objects or session_manager in objects
|
import asyncio
import logging
import voluptuous as vol
from homeassistant.components.climate import PLATFORM_SCHEMA, ClimateEntity
from homeassistant.components.climate.const import (
ATTR_PRESET_MODE,
CURRENT_HVAC_COOL,
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
CURRENT_HVAC_OFF,
HVAC_MODE_COOL,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
PRESET_AWAY,
PRESET_NONE,
SUPPORT_PRESET_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_TEMPERATURE,
CONF_NAME,
EVENT_HOMEASSISTANT_START,
PRECISION_HALVES,
PRECISION_TENTHS,
PRECISION_WHOLE,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_ON,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
)
from homeassistant.core import DOMAIN as HA_DOMAIN, callback
from homeassistant.helpers import condition
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import (
async_track_state_change_event,
async_track_time_interval,
)
from homeassistant.helpers.reload import async_setup_reload_service
from homeassistant.helpers.restore_state import RestoreEntity
from . import DOMAIN, PLATFORMS
_LOGGER = logging.getLogger(__name__)
DEFAULT_TOLERANCE = 0.3
DEFAULT_NAME = "Generic Thermostat"
CONF_HEATER = "heater"
CONF_SENSOR = "target_sensor"
CONF_MIN_TEMP = "min_temp"
CONF_MAX_TEMP = "max_temp"
CONF_TARGET_TEMP = "target_temp"
CONF_AC_MODE = "ac_mode"
CONF_MIN_DUR = "min_cycle_duration"
CONF_COLD_TOLERANCE = "cold_tolerance"
CONF_HOT_TOLERANCE = "hot_tolerance"
CONF_KEEP_ALIVE = "keep_alive"
CONF_INITIAL_HVAC_MODE = "initial_hvac_mode"
CONF_AWAY_TEMP = "away_temp"
CONF_PRECISION = "precision"
SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HEATER): cv.entity_id,
vol.Required(CONF_SENSOR): cv.entity_id,
vol.Optional(CONF_AC_MODE): cv.boolean,
vol.Optional(CONF_MAX_TEMP): vol.Coerce(float),
vol.Optional(CONF_MIN_DUR): cv.positive_time_period,
vol.Optional(CONF_MIN_TEMP): vol.Coerce(float),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_COLD_TOLERANCE, default=DEFAULT_TOLERANCE): vol.Coerce(float),
vol.Optional(CONF_HOT_TOLERANCE, default=DEFAULT_TOLERANCE): vol.Coerce(float),
vol.Optional(CONF_TARGET_TEMP): vol.Coerce(float),
vol.Optional(CONF_KEEP_ALIVE): cv.positive_time_period,
vol.Optional(CONF_INITIAL_HVAC_MODE): vol.In(
[HVAC_MODE_COOL, HVAC_MODE_HEAT, HVAC_MODE_OFF]
),
vol.Optional(CONF_AWAY_TEMP): vol.Coerce(float),
vol.Optional(CONF_PRECISION): vol.In(
[PRECISION_TENTHS, PRECISION_HALVES, PRECISION_WHOLE]
),
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the generic thermostat platform."""
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
name = config.get(CONF_NAME)
heater_entity_id = config.get(CONF_HEATER)
sensor_entity_id = config.get(CONF_SENSOR)
min_temp = config.get(CONF_MIN_TEMP)
max_temp = config.get(CONF_MAX_TEMP)
target_temp = config.get(CONF_TARGET_TEMP)
ac_mode = config.get(CONF_AC_MODE)
min_cycle_duration = config.get(CONF_MIN_DUR)
cold_tolerance = config.get(CONF_COLD_TOLERANCE)
hot_tolerance = config.get(CONF_HOT_TOLERANCE)
keep_alive = config.get(CONF_KEEP_ALIVE)
initial_hvac_mode = config.get(CONF_INITIAL_HVAC_MODE)
away_temp = config.get(CONF_AWAY_TEMP)
precision = config.get(CONF_PRECISION)
unit = hass.config.units.temperature_unit
async_add_entities(
[
GenericThermostat(
name,
heater_entity_id,
sensor_entity_id,
min_temp,
max_temp,
target_temp,
ac_mode,
min_cycle_duration,
cold_tolerance,
hot_tolerance,
keep_alive,
initial_hvac_mode,
away_temp,
precision,
unit,
)
]
)
class GenericThermostat(ClimateEntity, RestoreEntity):
"""Representation of a Generic Thermostat device."""
def __init__(
self,
name,
heater_entity_id,
sensor_entity_id,
min_temp,
max_temp,
target_temp,
ac_mode,
min_cycle_duration,
cold_tolerance,
hot_tolerance,
keep_alive,
initial_hvac_mode,
away_temp,
precision,
unit,
):
"""Initialize the thermostat."""
self._name = name
self.heater_entity_id = heater_entity_id
self.sensor_entity_id = sensor_entity_id
self.ac_mode = ac_mode
self.min_cycle_duration = min_cycle_duration
self._cold_tolerance = cold_tolerance
self._hot_tolerance = hot_tolerance
self._keep_alive = keep_alive
self._hvac_mode = initial_hvac_mode
self._saved_target_temp = target_temp or away_temp
self._temp_precision = precision
if self.ac_mode:
self._hvac_list = [HVAC_MODE_COOL, HVAC_MODE_OFF]
else:
self._hvac_list = [HVAC_MODE_HEAT, HVAC_MODE_OFF]
self._active = False
self._cur_temp = None
self._temp_lock = asyncio.Lock()
self._min_temp = min_temp
self._max_temp = max_temp
self._target_temp = target_temp
self._unit = unit
self._support_flags = SUPPORT_FLAGS
if away_temp:
self._support_flags = SUPPORT_FLAGS | SUPPORT_PRESET_MODE
self._away_temp = away_temp
self._is_away = False
async def async_added_to_hass(self):
"""Run when entity about to be added."""
await super().async_added_to_hass()
# Add listener
self.async_on_remove(
async_track_state_change_event(
self.hass, [self.sensor_entity_id], self._async_sensor_changed
)
)
self.async_on_remove(
async_track_state_change_event(
self.hass, [self.heater_entity_id], self._async_switch_changed
)
)
if self._keep_alive:
self.async_on_remove(
async_track_time_interval(
self.hass, self._async_control_heating, self._keep_alive
)
)
@callback
def _async_startup(event):
"""Init on startup."""
sensor_state = self.hass.states.get(self.sensor_entity_id)
if sensor_state and sensor_state.state not in (
STATE_UNAVAILABLE,
STATE_UNKNOWN,
):
self._async_update_temp(sensor_state)
self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, _async_startup)
# Check If we have an old state
old_state = await self.async_get_last_state()
if old_state is not None:
# If we have no initial temperature, restore
if self._target_temp is None:
# If we have a previously saved temperature
if old_state.attributes.get(ATTR_TEMPERATURE) is None:
if self.ac_mode:
self._target_temp = self.max_temp
else:
self._target_temp = self.min_temp
_LOGGER.warning(
"Undefined target temperature, falling back to %s",
self._target_temp,
)
else:
self._target_temp = float(old_state.attributes[ATTR_TEMPERATURE])
if old_state.attributes.get(ATTR_PRESET_MODE) == PRESET_AWAY:
self._is_away = True
if not self._hvac_mode and old_state.state:
self._hvac_mode = old_state.state
else:
# No previous state, try and restore defaults
if self._target_temp is None:
if self.ac_mode:
self._target_temp = self.max_temp
else:
self._target_temp = self.min_temp
_LOGGER.warning(
"No previously saved temperature, setting to %s", self._target_temp
)
# Set default state to off
if not self._hvac_mode:
self._hvac_mode = HVAC_MODE_OFF
@property
def should_poll(self):
"""Return the polling state."""
return False
@property
def name(self):
"""Return the name of the thermostat."""
return self._name
@property
def precision(self):
"""Return the precision of the system."""
if self._temp_precision is not None:
return self._temp_precision
return super().precision
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return self._unit
@property
def current_temperature(self):
"""Return the sensor temperature."""
return self._cur_temp
@property
def hvac_mode(self):
"""Return current operation."""
return self._hvac_mode
@property
def hvac_action(self):
"""Return the current running hvac operation if supported.
Need to be one of CURRENT_HVAC_*.
"""
if self._hvac_mode == HVAC_MODE_OFF:
return CURRENT_HVAC_OFF
if not self._is_device_active:
return CURRENT_HVAC_IDLE
if self.ac_mode:
return CURRENT_HVAC_COOL
return CURRENT_HVAC_HEAT
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self._target_temp
@property
def hvac_modes(self):
"""List of available operation modes."""
return self._hvac_list
@property
def preset_mode(self):
"""Return the current preset mode, e.g., home, away, temp."""
return PRESET_AWAY if self._is_away else PRESET_NONE
@property
def preset_modes(self):
"""Return a list of available preset modes or PRESET_NONE if _away_temp is undefined."""
return [PRESET_NONE, PRESET_AWAY] if self._away_temp else PRESET_NONE
async def async_set_hvac_mode(self, hvac_mode):
"""Set hvac mode."""
if hvac_mode == HVAC_MODE_HEAT:
self._hvac_mode = HVAC_MODE_HEAT
await self._async_control_heating(force=True)
elif hvac_mode == HVAC_MODE_COOL:
self._hvac_mode = HVAC_MODE_COOL
await self._async_control_heating(force=True)
elif hvac_mode == HVAC_MODE_OFF:
self._hvac_mode = HVAC_MODE_OFF
if self._is_device_active:
await self._async_heater_turn_off()
else:
_LOGGER.error("Unrecognized hvac mode: %s", hvac_mode)
return
# Ensure we update the current operation after changing the mode
self.async_write_ha_state()
async def async_set_temperature(self, **kwargs):
"""Set new target temperature."""
temperature = kwargs.get(ATTR_TEMPERATURE)
if temperature is None:
return
self._target_temp = temperature
await self._async_control_heating(force=True)
self.async_write_ha_state()
@property
def min_temp(self):
"""Return the minimum temperature."""
if self._min_temp is not None:
return self._min_temp
# get default temp from super class
return super().min_temp
@property
def max_temp(self):
"""Return the maximum temperature."""
if self._max_temp is not None:
return self._max_temp
# Get default temp from super class
return super().max_temp
async def _async_sensor_changed(self, event):
"""Handle temperature changes."""
new_state = event.data.get("new_state")
if new_state is None or new_state.state in (STATE_UNAVAILABLE, STATE_UNKNOWN):
return
self._async_update_temp(new_state)
await self._async_control_heating()
self.async_write_ha_state()
@callback
def _async_switch_changed(self, event):
"""Handle heater switch state changes."""
new_state = event.data.get("new_state")
if new_state is None:
return
self.async_write_ha_state()
@callback
def _async_update_temp(self, state):
"""Update thermostat with latest state from sensor."""
try:
self._cur_temp = float(state.state)
except ValueError as ex:
_LOGGER.error("Unable to update from sensor: %s", ex)
async def _async_control_heating(self, time=None, force=False):
"""Check if we need to turn heating on or off."""
async with self._temp_lock:
if not self._active and None not in (self._cur_temp, self._target_temp):
self._active = True
_LOGGER.info(
"Obtained current and target temperature. "
"Generic thermostat active. %s, %s",
self._cur_temp,
self._target_temp,
)
if not self._active or self._hvac_mode == HVAC_MODE_OFF:
return
if not force and time is None:
# If the `force` argument is True, we
# ignore `min_cycle_duration`.
# If the `time` argument is not none, we were invoked for
# keep-alive purposes, and `min_cycle_duration` is irrelevant.
if self.min_cycle_duration:
if self._is_device_active:
current_state = STATE_ON
else:
current_state = HVAC_MODE_OFF
long_enough = condition.state(
self.hass,
self.heater_entity_id,
current_state,
self.min_cycle_duration,
)
if not long_enough:
return
too_cold = self._target_temp >= self._cur_temp + self._cold_tolerance
too_hot = self._cur_temp >= self._target_temp + self._hot_tolerance
if self._is_device_active:
if (self.ac_mode and too_cold) or (not self.ac_mode and too_hot):
_LOGGER.info("Turning off heater %s", self.heater_entity_id)
await self._async_heater_turn_off()
elif time is not None:
# The time argument is passed only in keep-alive case
_LOGGER.info(
"Keep-alive - Turning on heater heater %s",
self.heater_entity_id,
)
await self._async_heater_turn_on()
else:
if (self.ac_mode and too_hot) or (not self.ac_mode and too_cold):
_LOGGER.info("Turning on heater %s", self.heater_entity_id)
await self._async_heater_turn_on()
elif time is not None:
# The time argument is passed only in keep-alive case
_LOGGER.info(
"Keep-alive - Turning off heater %s", self.heater_entity_id
)
await self._async_heater_turn_off()
@property
def _is_device_active(self):
"""If the toggleable device is currently active."""
return self.hass.states.is_state(self.heater_entity_id, STATE_ON)
@property
def supported_features(self):
"""Return the list of supported features."""
return self._support_flags
async def _async_heater_turn_on(self):
"""Turn heater toggleable device on."""
data = {ATTR_ENTITY_ID: self.heater_entity_id}
await self.hass.services.async_call(
HA_DOMAIN, SERVICE_TURN_ON, data, context=self._context
)
async def _async_heater_turn_off(self):
"""Turn heater toggleable device off."""
data = {ATTR_ENTITY_ID: self.heater_entity_id}
await self.hass.services.async_call(
HA_DOMAIN, SERVICE_TURN_OFF, data, context=self._context
)
async def async_set_preset_mode(self, preset_mode: str):
"""Set new preset mode."""
if preset_mode == PRESET_AWAY and not self._is_away:
self._is_away = True
self._saved_target_temp = self._target_temp
self._target_temp = self._away_temp
await self._async_control_heating(force=True)
elif preset_mode == PRESET_NONE and self._is_away:
self._is_away = False
self._target_temp = self._saved_target_temp
await self._async_control_heating(force=True)
self.async_write_ha_state()
|
import pytest
from . import MOCK_GATEWAY_ID
from tests.async_mock import Mock, patch
from tests.components.light.conftest import mock_light_profiles # noqa
# pylint: disable=protected-access
@pytest.fixture
def mock_gateway_info():
"""Mock get_gateway_info."""
with patch(
"homeassistant.components.tradfri.config_flow.get_gateway_info"
) as gateway_info:
yield gateway_info
@pytest.fixture
def mock_entry_setup():
"""Mock entry setup."""
with patch("homeassistant.components.tradfri.async_setup_entry") as mock_setup:
mock_setup.return_value = True
yield mock_setup
@pytest.fixture(name="gateway_id")
def mock_gateway_id_fixture():
"""Return mock gateway_id."""
return MOCK_GATEWAY_ID
@pytest.fixture(name="mock_gateway")
def mock_gateway_fixture(gateway_id):
"""Mock a Tradfri gateway."""
def get_devices():
"""Return mock devices."""
return gateway.mock_devices
def get_groups():
"""Return mock groups."""
return gateway.mock_groups
gateway_info = Mock(id=gateway_id, firmware_version="1.2.1234")
def get_gateway_info():
"""Return mock gateway info."""
return gateway_info
gateway = Mock(
get_devices=get_devices,
get_groups=get_groups,
get_gateway_info=get_gateway_info,
mock_devices=[],
mock_groups=[],
mock_responses=[],
)
with patch("homeassistant.components.tradfri.Gateway", return_value=gateway), patch(
"homeassistant.components.tradfri.config_flow.Gateway", return_value=gateway
):
yield gateway
@pytest.fixture(name="mock_api")
def mock_api_fixture(mock_gateway):
"""Mock api."""
async def api(command):
"""Mock api function."""
# Store the data for "real" command objects.
if hasattr(command, "_data") and not isinstance(command, Mock):
mock_gateway.mock_responses.append(command._data)
return command
return api
@pytest.fixture(name="api_factory")
def mock_api_factory_fixture(mock_api):
"""Mock pytradfri api factory."""
with patch("homeassistant.components.tradfri.APIFactory", autospec=True) as factory:
factory.init.return_value = factory.return_value
factory.return_value.request = mock_api
yield factory.return_value
|
from __future__ import unicode_literals
import os
import traceback
import mimetypes
from lib.data.data import pyoptions
from lib.fun.decorator import magic
from lib.fun.fun import cool, finalsavepath, finishcounter
def uniqbiner_magic(*args):
"""[dir]"""
args = list(args[0])
if len(args) == 2:
directory = os.path.abspath(args[1])
else:
exit(pyoptions.CRLF + cool.fuchsia("[!] Usage: {} {}".format(args[0], pyoptions.tools_info.get(args[0]))))
filepaths = []
combine_list = []
for rootpath, subdirsname, filenames in os.walk(directory):
filepaths.extend([os.path.abspath(os.path.join(rootpath, _)) for _ in filenames])
if len(filepaths) > 0:
for _ in filepaths:
if mimetypes.guess_type(_)[0] == 'text/plain':
combine_list.append(_)
tempath = finalsavepath("combiner")
try:
with open(tempath, "a") as f:
for onefile in combine_list:
with open(onefile, 'r') as tf:
for line in tf.readlines():
f.write(line.strip() + pyoptions.CRLF)
except Exception as ex:
print(pyoptions.CRLF + cool.red("[-] Combine file failed, Looking: "))
exit(pyoptions.CRLF + traceback.print_exc())
@magic
def uniqbiner():
with open(tempath) as o_f:
for item in o_f.readlines():
yield item.strip()
print("[+] Source of :{0} lines".format(cool.orange(finishcounter(tempath))))
|
import unittest
from pgmpy.base import DAG, PDAG
import pgmpy.tests.help_functions as hf
import networkx as nx
class TestDAGCreation(unittest.TestCase):
def setUp(self):
self.graph = DAG()
def test_class_init_without_data(self):
self.assertIsInstance(self.graph, DAG)
def test_class_init_with_data_string(self):
self.graph = DAG([("a", "b"), ("b", "c")])
self.assertListEqual(sorted(self.graph.nodes()), ["a", "b", "c"])
self.assertListEqual(
hf.recursive_sorted(self.graph.edges()), [["a", "b"], ["b", "c"]]
)
def test_add_node_string(self):
self.graph.add_node("a")
self.assertListEqual(list(self.graph.nodes()), ["a"])
def test_add_node_nonstring(self):
self.graph.add_node(1)
def test_add_nodes_from_string(self):
self.graph.add_nodes_from(["a", "b", "c", "d"])
self.assertListEqual(sorted(self.graph.nodes()), ["a", "b", "c", "d"])
def test_add_nodes_from_non_string(self):
self.graph.add_nodes_from([1, 2, 3, 4])
def test_add_node_weight(self):
self.graph.add_node("weighted_a", 0.3)
self.assertEqual(self.graph.nodes["weighted_a"]["weight"], 0.3)
def test_add_nodes_from_weight(self):
self.graph.add_nodes_from(["weighted_b", "weighted_c"], [0.5, 0.6])
self.assertEqual(self.graph.nodes["weighted_b"]["weight"], 0.5)
self.assertEqual(self.graph.nodes["weighted_c"]["weight"], 0.6)
self.graph.add_nodes_from(["e", "f"])
self.assertEqual(self.graph.nodes["e"]["weight"], None)
self.assertEqual(self.graph.nodes["f"]["weight"], None)
def test_add_edge_string(self):
self.graph.add_edge("d", "e")
self.assertListEqual(sorted(self.graph.nodes()), ["d", "e"])
self.assertListEqual(list(self.graph.edges()), [("d", "e")])
self.graph.add_nodes_from(["a", "b", "c"])
self.graph.add_edge("a", "b")
self.assertListEqual(
hf.recursive_sorted(self.graph.edges()), [["a", "b"], ["d", "e"]]
)
def test_add_edge_nonstring(self):
self.graph.add_edge(1, 2)
def test_add_edges_from_string(self):
self.graph.add_edges_from([("a", "b"), ("b", "c")])
self.assertListEqual(sorted(self.graph.nodes()), ["a", "b", "c"])
self.assertListEqual(
hf.recursive_sorted(self.graph.edges()), [["a", "b"], ["b", "c"]]
)
self.graph.add_nodes_from(["d", "e", "f"])
self.graph.add_edges_from([("d", "e"), ("e", "f")])
self.assertListEqual(sorted(self.graph.nodes()), ["a", "b", "c", "d", "e", "f"])
self.assertListEqual(
hf.recursive_sorted(self.graph.edges()),
hf.recursive_sorted([("a", "b"), ("b", "c"), ("d", "e"), ("e", "f")]),
)
def test_add_edges_from_nonstring(self):
self.graph.add_edges_from([(1, 2), (2, 3)])
def test_add_edge_weight(self):
self.graph.add_edge("a", "b", weight=0.3)
if nx.__version__.startswith("1"):
self.assertEqual(self.graph.edge["a"]["b"]["weight"], 0.3)
else:
self.assertEqual(self.graph.adj["a"]["b"]["weight"], 0.3)
def test_add_edges_from_weight(self):
self.graph.add_edges_from([("b", "c"), ("c", "d")], weights=[0.5, 0.6])
if nx.__version__.startswith("1"):
self.assertEqual(self.graph.edge["b"]["c"]["weight"], 0.5)
self.assertEqual(self.graph.edge["c"]["d"]["weight"], 0.6)
self.graph.add_edges_from([("e", "f")])
self.assertEqual(self.graph.edge["e"]["f"]["weight"], None)
else:
self.assertEqual(self.graph.adj["b"]["c"]["weight"], 0.5)
self.assertEqual(self.graph.adj["c"]["d"]["weight"], 0.6)
self.graph.add_edges_from([("e", "f")])
self.assertEqual(self.graph.adj["e"]["f"]["weight"], None)
def test_update_node_parents_bm_constructor(self):
self.graph = DAG([("a", "b"), ("b", "c")])
self.assertListEqual(list(self.graph.predecessors("a")), [])
self.assertListEqual(list(self.graph.predecessors("b")), ["a"])
self.assertListEqual(list(self.graph.predecessors("c")), ["b"])
def test_update_node_parents(self):
self.graph.add_nodes_from(["a", "b", "c"])
self.graph.add_edges_from([("a", "b"), ("b", "c")])
self.assertListEqual(list(self.graph.predecessors("a")), [])
self.assertListEqual(list(self.graph.predecessors("b")), ["a"])
self.assertListEqual(list(self.graph.predecessors("c")), ["b"])
def test_get_leaves(self):
self.graph.add_edges_from(
[("A", "B"), ("B", "C"), ("B", "D"), ("D", "E"), ("D", "F"), ("A", "G")]
)
self.assertEqual(sorted(self.graph.get_leaves()), sorted(["C", "G", "E", "F"]))
def test_get_roots(self):
self.graph.add_edges_from(
[("A", "B"), ("B", "C"), ("B", "D"), ("D", "E"), ("D", "F"), ("A", "G")]
)
self.assertEqual(["A"], self.graph.get_roots())
self.graph.add_edge("H", "G")
self.assertEqual(sorted(["A", "H"]), sorted(self.graph.get_roots()))
def test_init_with_cycle(self):
self.assertRaises(ValueError, DAG, [("a", "a")])
self.assertRaises(ValueError, DAG, [("a", "b"), ("b", "a")])
self.assertRaises(ValueError, DAG, [("a", "b"), ("b", "c"), ("c", "a")])
def test_get_ancestral_graph(self):
dag = DAG([("A", "C"), ("B", "C"), ("D", "A"), ("D", "B")])
anc_dag = dag.get_ancestral_graph(["A", "B"])
self.assertEqual(set(anc_dag.edges()), set([("D", "A"), ("D", "B")]))
self.assertRaises(ValueError, dag.get_ancestral_graph, ["A", "gibber"])
def tearDown(self):
del self.graph
class TestDAGMoralization(unittest.TestCase):
def setUp(self):
self.graph = DAG()
self.graph.add_edges_from([("diff", "grade"), ("intel", "grade")])
def test_get_parents(self):
self.assertListEqual(sorted(self.graph.get_parents("grade")), ["diff", "intel"])
def test_moralize(self):
moral_graph = self.graph.moralize()
self.assertListEqual(
hf.recursive_sorted(moral_graph.edges()),
[["diff", "grade"], ["diff", "intel"], ["grade", "intel"]],
)
def test_moralize_disconnected(self):
graph_copy = self.graph.copy()
graph_copy.add_node("disconnected")
moral_graph = graph_copy.moralize()
self.assertListEqual(
hf.recursive_sorted(moral_graph.edges()),
[["diff", "grade"], ["diff", "intel"], ["grade", "intel"]],
)
self.assertEqual(
sorted(moral_graph.nodes()), ["diff", "disconnected", "grade", "intel"]
)
def test_get_children(self):
self.assertListEqual(sorted(self.graph.get_children("diff")), ["grade"])
def tearDown(self):
del self.graph
class TestDoOperator(unittest.TestCase):
def setUp(self):
self.graph = DAG()
self.graph.add_edges_from([("X", "A"), ("A", "Y"), ("A", "B")])
def test_do(self):
dag_do_x = self.graph.do("A")
self.assertEqual(set(dag_do_x.nodes()), set(self.graph.nodes()))
self.assertEqual(sorted(list(dag_do_x.edges())), [("A", "B"), ("A", "Y")])
class TestPDAG(unittest.TestCase):
def setUp(self):
self.pdag_mix = PDAG(
directed_ebunch=[("A", "C"), ("D", "C")],
undirected_ebunch=[("B", "A"), ("B", "D")],
)
self.pdag_dir = PDAG(
directed_ebunch=[("A", "B"), ("D", "B"), ("A", "C"), ("D", "C")]
)
self.pdag_undir = PDAG(
undirected_ebunch=[("A", "C"), ("D", "C"), ("B", "A"), ("B", "D")]
)
def test_init_normal(self):
# Mix directed and undirected
directed_edges = [("A", "C"), ("D", "C")]
undirected_edges = [("B", "A"), ("B", "D")]
pdag = PDAG(directed_ebunch=directed_edges, undirected_ebunch=undirected_edges)
expected_edges = {
("A", "C"),
("D", "C"),
("A", "B"),
("B", "A"),
("B", "D"),
("D", "B"),
}
self.assertEqual(set(pdag.edges()), expected_edges)
self.assertEqual(set(pdag.nodes()), {"A", "B", "C", "D"})
self.assertEqual(pdag.directed_edges, set(directed_edges))
self.assertEqual(pdag.undirected_edges, set(undirected_edges))
# Only undirected
undirected_edges = [("A", "C"), ("D", "C"), ("B", "A"), ("B", "D")]
pdag = PDAG(undirected_ebunch=undirected_edges)
expected_edges = {
("A", "C"),
("C", "A"),
("D", "C"),
("C", "D"),
("B", "A"),
("A", "B"),
("B", "D"),
("D", "B"),
}
self.assertEqual(set(pdag.edges()), expected_edges)
self.assertEqual(set(pdag.nodes()), {"A", "B", "C", "D"})
self.assertEqual(pdag.directed_edges, set())
self.assertEqual(pdag.undirected_edges, set(undirected_edges))
# Only directed
directed_edges = [("A", "B"), ("D", "B"), ("A", "C"), ("D", "C")]
pdag = PDAG(directed_ebunch=directed_edges)
self.assertEqual(set(pdag.edges()), set(directed_edges))
self.assertEqual(set(pdag.nodes()), {"A", "B", "C", "D"})
self.assertEqual(pdag.directed_edges, set(directed_edges))
self.assertEqual(pdag.undirected_edges, set())
# TODO: Fix the cycle issue.
# Test cycle
# directed_edges = [('A', 'C')]
# undirected_edges = [('A', 'B'), ('B', 'D'), ('D', 'C')]
# self.assertRaises(ValueError, PDAG, directed_ebunch=directed_edges, undirected_ebunch=undirected_edges)
def test_copy(self):
pdag_copy = self.pdag_mix.copy()
expected_edges = {
("A", "C"),
("D", "C"),
("A", "B"),
("B", "A"),
("B", "D"),
("D", "B"),
}
expected_dir = [("A", "C"), ("D", "C")]
expected_undir = [("B", "A"), ("B", "D")]
self.assertEqual(set(pdag_copy.edges()), expected_edges)
self.assertEqual(set(pdag_copy.nodes()), {"A", "B", "C", "D"})
self.assertEqual(pdag_copy.directed_edges, set([("A", "C"), ("D", "C")]))
self.assertEqual(pdag_copy.undirected_edges, set([("B", "A"), ("B", "D")]))
def test_pdag_to_dag(self):
# PDAG no: 1 Possibility of creating a v-structure
pdag = PDAG(
directed_ebunch=[("A", "B"), ("C", "B")],
undirected_ebunch=[("C", "D"), ("D", "A")],
)
dag = pdag.to_dag()
self.assertTrue(("A", "B") in dag.edges())
self.assertTrue(("C", "B") in dag.edges())
self.assertFalse((("A", "D") in dag.edges()) and (("C", "D") in dag.edges()))
self.assertTrue(len(dag.edges()) == 4)
# PDAG no: 2 No possibility of creation of v-structure.
pdag = PDAG(
directed_ebunch=[("B", "C"), ("A", "C")], undirected_ebunch=[("A", "D")]
)
dag = pdag.to_dag()
self.assertTrue(("B", "C") in dag.edges())
self.assertTrue(("A", "C") in dag.edges())
self.assertTrue((("A", "D") in dag.edges()) or (("D", "A") in dag.edges()))
# PDAG no: 3 Already existing v-structure, possiblity to add another
pdag = PDAG(
directed_ebunch=[("B", "C"), ("A", "C")], undirected_ebunch=[("C", "D")]
)
dag = pdag.to_dag()
expected_edges = {("B", "C"), ("C", "D"), ("A", "C")}
self.assertEqual(expected_edges, set(dag.edges()))
|
from homeassistant.components.bsblan.const import (
CONF_DEVICE_IDENT,
CONF_PASSKEY,
DOMAIN,
)
from homeassistant.const import CONF_HOST, CONF_PORT, CONTENT_TYPE_JSON
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry, load_fixture
from tests.test_util.aiohttp import AiohttpClientMocker
async def init_integration(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
skip_setup: bool = False,
) -> MockConfigEntry:
"""Set up the BSBLan integration in Home Assistant."""
aioclient_mock.post(
"http://example.local:80/1234/JQ?Parameter=6224,6225,6226",
params={"Parameter": "6224,6225,6226"},
text=load_fixture("bsblan/info.json"),
headers={"Content-Type": CONTENT_TYPE_JSON},
)
entry = MockConfigEntry(
domain=DOMAIN,
unique_id="RVS21.831F/127",
data={
CONF_HOST: "example.local",
CONF_PASSKEY: "1234",
CONF_PORT: 80,
CONF_DEVICE_IDENT: "RVS21.831F/127",
},
)
entry.add_to_hass(hass)
if not skip_setup:
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry
|
import socket
import telnetlib
import re
import diamond.collector
from distutils.version import LooseVersion
class AerospikeCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(AerospikeCollector, self).get_default_config_help()
config_help.update({
'req_host': 'Hostname',
'req_port': 'Port',
'statistics': 'Collect statistics',
'latency': 'Collect latency metrics',
'throughput': 'Collect throughput metrics',
'namespaces': 'Collect per-namespace metrics',
'namespaces_whitelist':
'List of namespaces to collect metrics' +
' from (default is to collect from all)',
'statistics_whitelist':
'List of global statistics values to collect',
'namespace_statistics_whitelist':
'List of per-namespace statistics values to collect',
'path': 'Metric path',
})
return config_help
def get_default_config(self):
default_config = super(AerospikeCollector, self).get_default_config()
default_config['req_host'] = 'localhost'
default_config['req_port'] = 3003
default_config['statistics'] = True
default_config['latency'] = True
default_config['throughput'] = True
default_config['namespaces'] = True
default_config['namespaces_whitelist'] = False
default_config['statistics_whitelist'] = [
# 2.7 Stats
'total-bytes-memory',
'total-bytes-disk',
'used-bytes-memory',
'used-bytes-disk',
'free-pct-memory',
'free-pct-disk',
'data-used-bytes-memory',
'cluster_size',
'objects',
'client_connections',
'index-used-bytes-memory',
# 3.9 Stats
'objects',
'cluster_size',
'system_free_mem_pct',
'client_connections',
'scans_active',
]
default_config['namespace_statistics_whitelist'] = [
# 2.7 Stats
'objects',
'evicted-objects',
'expired-objects',
'used-bytes-memory',
'data-used-bytes-memory',
'index-used-bytes-memory',
'used-bytes-disk',
'memory-size',
'total-bytes-memory',
'total-bytes-disk',
'migrate-tx-partitions-initial',
'migrate-tx-partitions-remaining',
'migrate-rx-partitions-initial',
'migrate-rx-partitions-remaining',
'available_pct',
# 3.9 Stats
'client_delete_error',
'client_delete_success',
'client_read_error',
'client_read_success',
'client_write_error',
'client_write_success',
'device_available_pct',
'device_free_pct',
'device_total_bytes',
'device_used_bytes',
'expired_objects',
'evicted_objects',
'memory-size',
'memory_free_pct',
'memory_used_bytes',
'memory_used_data_bytes',
'memory_used_index_bytes',
'memory_used_sindex_bytes',
'migrate_rx_partitions_active',
'migrate_rx_partitions_initial',
'migrate_rx_partitions_remaining',
'migrate_tx_partitions_active',
'migrate_tx_partitions_initial',
'migrate_tx_partitions_remaining',
'objects',
]
default_config['path'] = 'aerospike'
return default_config
def collect_latency(self, data):
fields = ['ops', '1ms', '8ms', '64ms']
if self.config['dialect'] >= 39:
# Get "header" section of each histogram
labels = data.split(';')[::2]
# Get contents of histogram
datasets = data.split(';')[1::2]
for i, label in enumerate(labels):
# Extract namespace and histogram type from header label
match = re.match('\{(\w+)\}-(\w+)', label)
if match:
namespace = match.group(1)
histogram = match.group(2)
# Create metrics dict for the namespace/histogram pair
dataset = datasets[i].split(',')[1:]
metrics = dict(zip(fields, dataset))
# Publish a metric for each field in the histogram
for field in fields:
self.publish_gauge('latency.%s.%s.%s' %
(namespace, histogram, field),
metrics[field])
elif self.config['dialect'] < 39:
# Get individual data lines (every other output line is data)
raw_lines = {}
(
raw_lines['reads'],
raw_lines['writes_master'],
raw_lines['proxy'],
raw_lines['udf'],
raw_lines['query'],
) = data.split(';')[1::2]
# Collapse each type of data line into a dict of metrics
for op_type in raw_lines.keys():
metrics = dict(zip(fields, raw_lines[op_type].split(',')[1:]))
# publish each metric
for metric in metrics.keys():
self.publish_gauge('latency.%s.%s' %
(op_type, metric), metrics[metric])
def collect_statistics(self, data):
# Only gather whitelisted metrics
gather_stats = self.config['statistics_whitelist']
# Break data into k/v pairs
for statline in data.split(';'):
(stat, value) = statline.split('=')
if stat in gather_stats:
self.publish_gauge('statistics.%s' % stat, value)
def collect_throughput(self, data):
if self.config['dialect'] >= 39:
# Get "header" section of each histogram
labels = data.split(';')[::2]
# Get contents of histogram
datasets = data.split(';')[1::2]
for i, label in enumerate(labels):
# Extract namespace and histogram type from header label
match = re.match('\{(\w+)\}-(\w+)', label)
if match:
namespace = match.group(1)
histogram = match.group(2)
# Exctract metric from dataset
metric = datasets[i].split(',')[1]
self.publish_gauge('throughput.%s.%s' %
(namespace, histogram), metric)
elif self.config['dialect'] < 39:
# Get individual data lines (every other output line is data)
raw_lines = {}
(
raw_lines['reads'],
raw_lines['writes_master'],
raw_lines['proxy'],
raw_lines['udf'],
raw_lines['query'],
) = data.split(';')[1::2]
for op_type in raw_lines.keys():
metric = raw_lines[op_type].split(',')[1]
self.publish_gauge('throughput.%s' % op_type, metric)
def collect_namespace(self, namespace, data):
# Only gather whitelisted metrics
gather_stats = self.config['namespace_statistics_whitelist']
# Break data into k/v pairs
for statline in data.split(';'):
(stat, value) = statline.split('=')
if stat in gather_stats:
self.publish_gauge('namespace.%s.%s' % (namespace, stat), value)
def collect(self):
self.log.debug('Connecting to %s:%s' %
(self.config['req_host'], self.config['req_port']))
t = telnetlib.Telnet(self.config['req_host'], self.config['req_port'])
try:
# Detect the version of aerospike for later
self.log.debug('Checking aerospike version')
t.write('version\n')
version = t.read_until('\n', 1)
if LooseVersion(version) >= LooseVersion("3.9"):
self.config['dialect'] = 39
else:
self.config['dialect'] = 27
self.log.debug('Got version %s and selecting dialect %s' %
(version, self.config['dialect']))
# Only collect metrics we're asked for
if (self.config['latency']):
self.log.debug('Polling for latency')
t.write('latency:\n')
latency = t.read_until('\n', 1)
self.collect_latency(latency)
if (self.config['statistics']):
self.log.debug('Polling for statistics')
t.write('statistics\n')
statistics = t.read_until('\n', 1)
self.collect_statistics(statistics)
if (self.config['throughput']):
self.log.debug('Polling for throughput')
t.write('throughput:\n')
throughput = t.read_until('\n', 1)
self.collect_throughput(throughput)
if (self.config['namespaces']):
self.log.debug('Polling for namespaces')
t.write('namespaces\n')
namespaces = t.read_until('\n', 1).strip()
for namespace in namespaces.split(';'):
self.log.debug('Polling namespace: %s' % namespace)
# Skip namespaces not whitelisted if there is a whitelist
if (self.config['namespaces_whitelist'] and
namespace not in self.config['namespaces_whitelist']):
self.log.debug('Skipping non-whitelisted namespace: %s'
% namespace)
continue
t.write('namespace/%s\n' % namespace)
namespace_data = t.read_until('\n', 1)
self.collect_namespace(namespace, namespace_data)
t.close()
except (socket.error, EOFError) as e:
self.log.error("Unable to retrieve aerospike data: %s" % e)
except Exception as e:
self.log.error("Unknown failure in aerospike collection: %s" % e)
|
import os
from django.db.models.signals import m2m_changed, post_delete, post_save
from django.dispatch import receiver
from weblate.trans.models._conf import WeblateConf
from weblate.trans.models.agreement import ContributorAgreement
from weblate.trans.models.alert import Alert
from weblate.trans.models.announcement import Announcement
from weblate.trans.models.change import Change
from weblate.trans.models.comment import Comment
from weblate.trans.models.component import Component
from weblate.trans.models.componentlist import AutoComponentList, ComponentList
from weblate.trans.models.label import Label
from weblate.trans.models.project import Project
from weblate.trans.models.suggestion import Suggestion, Vote
from weblate.trans.models.translation import Translation
from weblate.trans.models.unit import Unit
from weblate.trans.models.variant import Variant
from weblate.trans.signals import user_pre_delete
from weblate.utils.decorators import disable_for_loaddata
from weblate.utils.files import remove_tree
__all__ = [
"Project",
"Component",
"Translation",
"Unit",
"Suggestion",
"Comment",
"Vote",
"Change",
"Announcement",
"ComponentList",
"WeblateConf",
"ContributorAgreement",
"Alert",
"Variant",
"Label",
]
def delete_object_dir(instance):
"""Remove path if it exists."""
project_path = instance.full_path
if os.path.exists(project_path):
remove_tree(project_path)
@receiver(post_delete, sender=Project)
def project_post_delete(sender, instance, **kwargs):
"""Handler to delete (sub)project directory on project deletion."""
# Invalidate stats
instance.stats.invalidate()
# Remove directory
delete_object_dir(instance)
@receiver(post_delete, sender=Component)
def component_post_delete(sender, instance, **kwargs):
"""Handler to delete (sub)project directory on project deletion."""
# Invalidate stats
instance.stats.invalidate()
# Do not delete linked components
if not instance.is_repo_link:
delete_object_dir(instance)
@receiver(post_save, sender=Unit)
@disable_for_loaddata
def update_source(sender, instance, **kwargs):
"""Update unit priority or checks based on source change."""
if not instance.is_source:
return
# Run checks, update state and priority if flags changed or running bulk edit
if (
instance.old_unit.extra_flags != instance.extra_flags
or instance.state != instance.old_unit.state
):
# We can not exclude current unit here as we need to trigger the updates below
for unit in instance.unit_set.prefetch_full():
unit.update_state()
unit.update_priority()
unit.run_checks()
if not instance.is_bulk_edit and not instance.is_batch_update:
instance.translation.component.invalidate_stats_deep()
@receiver(m2m_changed, sender=Unit.labels.through)
@disable_for_loaddata
def change_labels(sender, instance, action, pk_set, **kwargs):
"""Update unit labels."""
if (
action not in ("post_add", "post_remove", "post_clear")
or (action != "post_clear" and not pk_set)
or not instance.is_source
):
return
if not instance.is_bulk_edit:
instance.translation.component.invalidate_stats_deep()
@receiver(user_pre_delete)
def user_commit_pending(sender, instance, **kwargs):
"""Commit pending changes for user on account removal."""
# All user changes
all_changes = Change.objects.last_changes(instance).filter(user=instance)
# Filter where project is active
user_translation_ids = all_changes.values_list("translation", flat=True).distinct()
# Commit changes where user is last author
for translation in Translation.objects.filter(pk__in=user_translation_ids):
try:
last_author = translation.change_set.content()[0].author
except IndexError:
# Non content changes
continue
if last_author == instance:
translation.commit_pending("user delete", None)
@receiver(m2m_changed, sender=ComponentList.components.through)
@disable_for_loaddata
def change_componentlist(sender, instance, action, **kwargs):
if not action.startswith("post_"):
return
instance.stats.invalidate()
@receiver(post_save, sender=AutoComponentList)
@disable_for_loaddata
def auto_componentlist(sender, instance, **kwargs):
for component in Component.objects.iterator():
instance.check_match(component)
@receiver(post_save, sender=Project)
@disable_for_loaddata
def auto_project_componentlist(sender, instance, **kwargs):
for component in instance.component_set.iterator():
auto_component_list(sender, component)
@receiver(post_save, sender=Component)
@disable_for_loaddata
def auto_component_list(sender, instance, **kwargs):
for auto in AutoComponentList.objects.iterator():
auto.check_match(instance)
@receiver(post_save, sender=Component)
@disable_for_loaddata
def post_save_update_checks(sender, instance, **kwargs):
from weblate.trans.tasks import update_checks
if instance.old_component.check_flags == instance.check_flags:
return
update_checks.delay(instance.pk)
@receiver(post_delete, sender=Component)
@disable_for_loaddata
def post_delete_linked(sender, instance, **kwargs):
# When removing project, the linked component might be already deleted now
try:
if instance.linked_component:
instance.linked_component.update_link_alerts(noupdate=True)
except Component.DoesNotExist:
pass
@receiver(post_save, sender=Comment)
@receiver(post_save, sender=Suggestion)
@receiver(post_delete, sender=Suggestion)
@disable_for_loaddata
def stats_invalidate(sender, instance, **kwargs):
"""Invalidate stats on new comment or suggestion."""
# Invalidate stats counts
instance.unit.translation.invalidate_cache()
# Invalidate unit cached properties
for key in ["all_comments", "suggestions"]:
if key in instance.__dict__:
del instance.__dict__[key]
|
from django.template.loader import select_template
from django.utils.html import format_html
from django.utils.translation import gettext_lazy as _
from django.utils.module_loading import import_string
from entangled.forms import EntangledModelFormMixin
from cms.plugin_pool import plugin_pool
from cmsplugin_cascade.link.cms_plugins import LinkElementMixin, LinkPluginBase
from cmsplugin_cascade.link.forms import LinkForm
from djng.forms.fields import ChoiceField
from shop.conf import app_settings
AUTH_FORM_TYPES = [
('login', _("Login Form")),
('login-reset-request', _("Login & Reset Request"), 'login'),
('logout', _("Logout Form")),
('login-logout', _("Shared Login/Logout Form"), 'login'),
('password-reset-request', _("Request Password Reset")),
('password-reset-confirm', _("Confirm Password Reset")),
('password-change', _("Change Password Form")),
('register-user', _("Register User"), app_settings.SHOP_CASCADE_FORMS['RegisterUserForm']),
('continue-as-guest', _("Continue as guest")),
]
class ShopAuthFormMixin(EntangledModelFormMixin):
form_type = ChoiceField(
label=_("Rendered Form"),
choices=[ft[:2] for ft in AUTH_FORM_TYPES],
help_text=_("Select the appropriate form for various authentication purposes."),
)
class Meta:
entangled_fields = {'glossary': ['form_type']}
class ShopAuthForm(LinkForm, ShopAuthFormMixin):
LINK_TYPE_CHOICES = [
('RELOAD_PAGE', _("Reload Page")),
('cmspage', _("CMS Page")),
('DO_NOTHING', _("Do Nothing")),
]
class ShopAuthenticationPlugin(LinkPluginBase):
"""
A placeholder plugin which provides various authentication forms, such as login-, logout-,
register-, and other forms. They can be added any to placeholder using the Cascade framework.
"""
name = _("Authentication Forms")
module = "Shop"
parent_classes = ['BootstrapColumnPlugin']
model_mixins = (LinkElementMixin,)
form = ShopAuthForm
cache = False
@classmethod
def get_identifier(cls, instance):
identifier = super().get_identifier(instance)
content = dict(ft[:2] for ft in AUTH_FORM_TYPES).get(instance.glossary.get('form_type'), _("unknown"))
return format_html('{0}{1}', identifier, content)
def get_render_template(self, context, instance, placeholder):
form_type = instance.glossary.get('form_type')
template_names = [
'{}/auth/{}.html'.format(app_settings.APP_LABEL, form_type),
'shop/auth/{}.html'.format(form_type),
'shop/auth/form-not-found.html',
]
return select_template(template_names)
def render(self, context, instance, placeholder):
"""
Return the context to render a DialogFormPlugin
"""
form_type = instance.glossary.get('form_type')
if form_type:
try:
# prevent a malicious database entry to import an ineligible file
form_type = AUTH_FORM_TYPES[[ft[0] for ft in AUTH_FORM_TYPES].index(form_type)]
FormClass = import_string(form_type[2])
except ValueError:
context['form_name'] = 'not_found_form'
except IndexError:
form_name = form_type[0].replace('-', '_')
context['form_name'] = '{0}_form'.format(form_name)
except ImportError:
form_name = form_type[2]
context['form_name'] = '{0}_form'.format(form_name)
else:
context['form_name'] = FormClass.form_name
context[FormClass.form_name] = FormClass()
context['proceed_with'] = instance.link
return self.super(ShopAuthenticationPlugin, self).render(context, instance, placeholder)
plugin_pool.register_plugin(ShopAuthenticationPlugin)
|
from datetime import timedelta
import logging
from homeassistant.const import DEVICE_CLASS_TIMESTAMP
import homeassistant.util.dt as dt_util
from .const import DOMAIN
from .entity import HomeConnectEntity
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Home Connect sensor."""
def get_entities():
"""Get a list of entities."""
entities = []
hc_api = hass.data[DOMAIN][config_entry.entry_id]
for device_dict in hc_api.devices:
entity_dicts = device_dict.get("entities", {}).get("sensor", [])
entities += [HomeConnectSensor(**d) for d in entity_dicts]
return entities
async_add_entities(await hass.async_add_executor_job(get_entities), True)
class HomeConnectSensor(HomeConnectEntity):
"""Sensor class for Home Connect."""
def __init__(self, device, desc, key, unit, icon, device_class, sign=1):
"""Initialize the entity."""
super().__init__(device, desc)
self._state = None
self._key = key
self._unit = unit
self._icon = icon
self._device_class = device_class
self._sign = sign
@property
def state(self):
"""Return true if the binary sensor is on."""
return self._state
@property
def available(self):
"""Return true if the sensor is available."""
return self._state is not None
async def async_update(self):
"""Update the sensos status."""
status = self.device.appliance.status
if self._key not in status:
self._state = None
else:
if self.device_class == DEVICE_CLASS_TIMESTAMP:
if "value" not in status[self._key]:
self._state = None
elif (
self._state is not None
and self._sign == 1
and dt_util.parse_datetime(self._state) < dt_util.utcnow()
):
# if the date is supposed to be in the future but we're
# already past it, set state to None.
self._state = None
else:
seconds = self._sign * float(status[self._key]["value"])
self._state = (
dt_util.utcnow() + timedelta(seconds=seconds)
).isoformat()
else:
self._state = status[self._key].get("value")
_LOGGER.debug("Updated, new state: %s", self._state)
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self._unit
@property
def icon(self):
"""Return the icon."""
return self._icon
@property
def device_class(self):
"""Return the device class."""
return self._device_class
|
from . import toolkit
from .app import app
from .lib import config
from .lib import index
from .lib import mirroring
import flask
cfg = config.load()
# Enable the search index
INDEX = index.load(cfg.search_backend.lower())
@app.route('/v1/search', methods=['GET'])
@mirroring.source_lookup(index_route=True, merge_results=True)
def get_search():
search_term = flask.request.args.get('q', '')
results = INDEX.results(search_term=search_term)
return toolkit.response({
'query': search_term,
'num_results': len(results),
'results': results,
})
|
import asyncio
import datetime
import fcntl
import logging
import math
import os
import random
import time
import typing
from collections import Counter
from contextlib import contextmanager
from typing import Any
from typing import Callable
from typing import Collection
from typing import Dict
from typing import Iterator
from typing import List
from typing import Mapping
from typing import Optional
from typing import Sequence
from typing import Set
from typing import TypeVar
import a_sync
from kazoo.client import KazooClient
from kazoo.exceptions import LockTimeout
from marathon.models import MarathonApp
from marathon.models import MarathonTask
from mypy_extensions import Arg
from mypy_extensions import DefaultArg
from mypy_extensions import TypedDict
from requests.exceptions import ConnectionError
from requests.exceptions import RequestException
from paasta_tools import marathon_tools
from paasta_tools.long_running_service_tools import BounceMethodConfigDict
from paasta_tools.smartstack_tools import get_registered_marathon_tasks
from paasta_tools.utils import compose_job_id
from paasta_tools.utils import load_system_paasta_config
from paasta_tools.utils import SystemPaastaConfig
from paasta_tools.utils import timeout
log = logging.getLogger(__name__)
log.addHandler(logging.NullHandler())
logging.getLogger("requests").setLevel(logging.WARNING)
ZK_LOCK_CONNECT_TIMEOUT_S = 10.0 # seconds to wait to connect to zookeeper
ZK_LOCK_PATH = "/bounce"
WAIT_CREATE_S = 3
WAIT_DELETE_S = 5
BounceMethodResult = TypedDict(
"BounceMethodResult", {"create_app": bool, "tasks_to_drain": Set}
)
BounceMethod = Callable[
[
Arg(BounceMethodConfigDict, "new_config"),
Arg(bool, "new_app_running"),
Arg(Collection, "happy_new_tasks"),
Arg(Sequence, "old_non_draining_tasks"),
DefaultArg(float, "margin_factor"),
],
BounceMethodResult,
]
_bounce_method_funcs: Dict[str, BounceMethod] = {}
def register_bounce_method(name: str) -> Callable[[BounceMethod], BounceMethod]:
"""Returns a decorator that registers that bounce function at a given name
so get_bounce_method_func can find it."""
def outer(bounce_func: BounceMethod):
_bounce_method_funcs[name] = bounce_func
return bounce_func
return outer
def get_bounce_method_func(name) -> BounceMethod:
return _bounce_method_funcs[name]
def list_bounce_methods() -> Collection[str]:
return _bounce_method_funcs.keys()
class LockHeldException(Exception):
pass
@contextmanager
def bounce_lock(name):
"""Acquire a bounce lockfile for the name given. The name should generally
be the service namespace being bounced.
This is a contextmanager. Please use it via 'with bounce_lock(name):'.
:param name: The lock name to acquire"""
lockfile = "/var/lock/%s.lock" % name
with open(lockfile, "w") as fd:
remove = False
try:
fcntl.lockf(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
remove = True
yield
except IOError:
raise LockHeldException("Service %s is already being bounced!" % name)
finally:
if remove:
os.remove(lockfile)
@contextmanager
def bounce_lock_zookeeper(
name: str, system_paasta_config: Optional[SystemPaastaConfig] = None
) -> Iterator:
"""Acquire a bounce lock in zookeeper for the name given. The name should
generally be the service namespace being bounced.
This is a contextmanager. Please use it via 'with bounce_lock(name):'.
:param name: The lock name to acquire"""
if system_paasta_config is None:
system_paasta_config = load_system_paasta_config()
zk = KazooClient(
hosts=system_paasta_config.get_zk_hosts(), timeout=ZK_LOCK_CONNECT_TIMEOUT_S,
)
zk.start()
lock = zk.Lock(f"{ZK_LOCK_PATH}/{name}")
try:
lock.acquire(timeout=1) # timeout=0 throws some other strange exception
yield
except LockTimeout:
raise LockHeldException("Service %s is already being bounced!" % name)
else:
lock.release()
finally:
zk.stop()
zk.close()
def wait_for_create(app_id, client):
"""Wait for the specified app_id to be listed in marathon.
Waits WAIT_CREATE_S seconds between calls to list_apps.
:param app_id: The app_id to ensure creation for
:param client: A MarathonClient object"""
while marathon_tools.is_app_id_running(app_id, client) is False:
log.info("Waiting for %s to be created in marathon..", app_id)
time.sleep(WAIT_CREATE_S)
@timeout(60, use_signals=False)
def create_marathon_app(app_id, config, client):
"""Create a new marathon application with a given
config and marathon client object.
:param config: The marathon configuration to be deployed
:param client: A MarathonClient object"""
client.create_app(app_id, MarathonApp(**config))
wait_for_create(app_id, client)
def wait_for_delete(app_id, client):
"""Wait for the specified app_id to not be listed in marathon
anymore. Waits WAIT_DELETE_S seconds inbetween checks.
:param app_id: The app_id to check for deletion
:param client: A MarathonClient object"""
while marathon_tools.is_app_id_running(app_id, client) is True:
log.info("Waiting for %s to be deleted from marathon...", app_id)
time.sleep(WAIT_DELETE_S)
@timeout(60, use_signals=False)
def delete_marathon_app(app_id, client):
"""Delete a new marathon application with a given
app_id and marathon client object.
:param app_id: The marathon app id to be deleted
:param client: A MarathonClient object"""
# Scale app to 0 first to work around
# https://github.com/mesosphere/marathon/issues/725
client.scale_app(app_id, instances=0, force=True)
time.sleep(1)
client.delete_app(app_id, force=True)
wait_for_delete(app_id, client)
def kill_old_ids(old_ids, client):
"""Kill old marathon job ids. Skips anything that doesn't exist or
otherwise raises an exception. If this doesn't kill something due
to an exception, that's okay- it'll get cleaned up later.
:param old_ids: A list of old job/app ids to kill
:param client: A marathon.MarathonClient object"""
for app in old_ids:
log.info("Killing %s", app)
delete_marathon_app(app, client)
def filter_tasks_in_smartstack(
tasks: Collection[MarathonTask],
service: str,
nerve_ns: str,
system_paasta_config: SystemPaastaConfig,
max_hosts_to_query: int = 20,
haproxy_min_fraction_up: float = 1.0,
) -> List[MarathonTask]:
all_hosts = list({t.host for t in tasks})
random.shuffle(all_hosts)
# We select a random 20 hosts here. This should be enough most of the time: for services discovered at the habitat
# level, in clusters with 2 habitats, there's about a 2 * (1/2) ** 20 ~= 2-per-million chance of not picking at
# least one host in each habitat. For clusters with 3 habitats, the odds are about 3 * (2/3) ** 20 ~= 1-in-1000.
# The only real effect would be that the bounce would decide to kill fewer old tasks, causing us to take another
# round. If this becomes a problem, we can try to select tasks more intelligently.
selected_hosts = all_hosts[:max_hosts_to_query]
registered_task_count: typing.Counter[MarathonTask] = Counter()
async def get_registered_tasks_on_host(host):
try:
registered_task_count.update(
set(
await a_sync.to_async(get_registered_marathon_tasks)(
synapse_host=host,
synapse_port=system_paasta_config.get_synapse_port(),
synapse_haproxy_url_format=system_paasta_config.get_synapse_haproxy_url_format(),
service=compose_job_id(service, nerve_ns),
marathon_tasks=tasks,
)
)
)
except (ConnectionError, RequestException):
log.warning(
f"Failed to connect to smartstack on {host}; this may cause us to consider tasks unhealthy."
)
if selected_hosts:
a_sync.block(
asyncio.wait,
[
asyncio.ensure_future(get_registered_tasks_on_host(host))
for host in selected_hosts
],
timeout=30,
)
threshold = len(selected_hosts) * haproxy_min_fraction_up
return [t for t in tasks if registered_task_count[t] >= threshold]
def get_happy_tasks(
app: MarathonApp,
service: str,
nerve_ns: str,
system_paasta_config: SystemPaastaConfig,
min_task_uptime: Optional[float] = None,
check_haproxy: bool = False,
haproxy_min_fraction_up: float = 1.0,
) -> List[MarathonTask]:
"""Given a MarathonApp object, return the subset of tasks which are considered healthy.
With the default options, this returns tasks where at least one of the defined Marathon healthchecks passes.
For it to do anything interesting, set min_task_uptime or check_haproxy.
:param app: A MarathonApp object.
:param service: The name of the service.
:param nerve_ns: The nerve namespace
:param min_task_uptime: Minimum number of seconds that a task must be running before we consider it healthy. Useful
if tasks take a while to start up.
:param check_haproxy: Whether to check the local haproxy to make sure this task has been registered and discovered.
"""
tasks = app.tasks
happy = []
now = datetime.datetime.now(datetime.timezone.utc)
for task in tasks:
if task.started_at is None:
# Can't be healthy if it hasn't started
continue
if min_task_uptime is not None:
if (now - task.started_at).total_seconds() < min_task_uptime:
continue
# if there are healthchecks defined for the app but none have executed yet, then task is unhappy
# BUT if the task is "old" and Marathon forgot about its healthcheck due to a leader election,
# treat it as happy
if (
len(app.health_checks) > 0
and len(task.health_check_results) == 0
and not marathon_tools.is_old_task_missing_healthchecks(task, app)
):
continue
# if there are health check results, check if at least one healthcheck is passing
if not marathon_tools.is_task_healthy(
task, require_all=False, default_healthy=True
):
continue
happy.append(task)
if check_haproxy:
return filter_tasks_in_smartstack(
happy,
service,
nerve_ns,
system_paasta_config,
haproxy_min_fraction_up=haproxy_min_fraction_up,
)
else:
return happy
_Flatten_Tasks_T = TypeVar("_Flatten_Tasks_T")
def flatten_tasks(
tasks_by_app_id: Mapping[Any, Collection[_Flatten_Tasks_T]]
) -> Set[_Flatten_Tasks_T]:
"""Takes a dictionary of app_id -> set([task, task, ...]) and returns the union of all the task sets.
:param tasks_by_app_id: A dictionary of app_id -> set(Tasks), such as the old_app_live_happy_tasks or
old_app_live_unhappy_tasks parameters passed to bounce methods.
:return: A set of Tasks which is the union of all the values of the dictionary.
"""
return set.union(set(), *(tasks_by_app_id.values()))
@register_bounce_method("brutal")
def brutal_bounce(
new_config: BounceMethodConfigDict,
new_app_running: bool,
happy_new_tasks: Collection,
old_non_draining_tasks: Sequence,
margin_factor=1.0,
) -> BounceMethodResult:
"""Pays no regard to safety. Starts the new app if necessary, and kills any
old ones. Mostly meant as an example of the simplest working bounce method,
but might be tolerable for some services.
:param new_config: The configuration dictionary representing the desired new app.
:param new_app_running: Whether there is an app in Marathon with the same ID as the new config.
:param happy_new_tasks: Set of MarathonTasks belonging to the new application that are considered healthy and up.
:param old_non_draining_tasks: A sequence of tasks not belonging to the new version. Tasks should be ordered from
most desirable to least desirable.
:param margin_factor: the multiplication factor used to calculate the number of instances to be drained
when the crossover method is used.
:return: A dictionary representing the desired bounce actions and containing the following keys:
- create_app: True if we should start the new Marathon app, False otherwise.
- tasks_to_drain: a set of task objects which should be drained and killed. May be empty.
"""
return {
"create_app": not new_app_running,
"tasks_to_drain": set(old_non_draining_tasks),
}
@register_bounce_method("upthendown")
def upthendown_bounce(
new_config: BounceMethodConfigDict,
new_app_running: bool,
happy_new_tasks: Collection,
old_non_draining_tasks: Sequence,
margin_factor=1.0,
) -> BounceMethodResult:
"""Starts a new app if necessary; only kills old apps once all the requested tasks for the new version are running.
See the docstring for brutal_bounce() for parameters and return value.
"""
if new_app_running and len(happy_new_tasks) == new_config["instances"]:
return {"create_app": False, "tasks_to_drain": set(old_non_draining_tasks)}
else:
return {"create_app": not new_app_running, "tasks_to_drain": set()}
@register_bounce_method("crossover")
def crossover_bounce(
new_config: BounceMethodConfigDict,
new_app_running: bool,
happy_new_tasks: Collection,
old_non_draining_tasks: Sequence,
margin_factor=1.0,
) -> BounceMethodResult:
"""Starts a new app if necessary; slowly kills old apps as instances of the new app become happy.
See the docstring for brutal_bounce() for parameters and return value.
"""
assert margin_factor > 0
assert margin_factor <= 1
needed_count = max(
int(math.ceil(new_config["instances"] * margin_factor)) - len(happy_new_tasks),
0,
)
return {
"create_app": not new_app_running,
"tasks_to_drain": set(old_non_draining_tasks[needed_count:]),
}
@register_bounce_method("downthenup")
def downthenup_bounce(
new_config: BounceMethodConfigDict,
new_app_running: bool,
happy_new_tasks: Collection,
old_non_draining_tasks: Sequence,
margin_factor=1.0,
) -> BounceMethodResult:
"""Stops any old apps and waits for them to die before starting a new one.
See the docstring for brutal_bounce() for parameters and return value.
"""
return {
"create_app": not old_non_draining_tasks and not new_app_running,
"tasks_to_drain": set(old_non_draining_tasks),
}
@register_bounce_method("down")
def down_bounce(
new_config: BounceMethodConfigDict,
new_app_running: bool,
happy_new_tasks: Collection,
old_non_draining_tasks: Sequence,
margin_factor=1.0,
) -> BounceMethodResult:
"""
Stops old apps, doesn't start any new apps.
Used for the graceful_app_drain script.
"""
return {"create_app": False, "tasks_to_drain": set(old_non_draining_tasks)}
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
|
import logging
import unittest
from gensim import matutils
from scipy.sparse import csr_matrix
import numpy as np
import math
from gensim.corpora.mmcorpus import MmCorpus
from gensim.models import ldamodel
from gensim.test.utils import datapath, common_dictionary, common_corpus
class TestIsBow(unittest.TestCase):
def test_None(self):
# test None
result = matutils.isbow(None)
expected = False
self.assertEqual(expected, result)
def test_bow(self):
# test list words
# one bag of words
potentialbow = [(0, 0.4)]
result = matutils.isbow(potentialbow)
expected = True
self.assertEqual(expected, result)
# multiple bags
potentialbow = [(0, 4.), (1, 2.), (2, 5.), (3, 8.)]
result = matutils.isbow(potentialbow)
expected = True
self.assertEqual(expected, result)
# checking empty input
potentialbow = []
result = matutils.isbow(potentialbow)
expected = True
self.assertEqual(expected, result)
# checking corpus; should return false
potentialbow = [[(2, 1), (3, 1), (4, 1), (5, 1), (1, 1), (7, 1)]]
result = matutils.isbow(potentialbow)
expected = False
self.assertEqual(expected, result)
# not a bag of words, should return false
potentialbow = [(1, 3, 6)]
result = matutils.isbow(potentialbow)
expected = False
self.assertEqual(expected, result)
# checking sparse matrix format bag of words
potentialbow = csr_matrix([[1, 0.4], [0, 0.3], [2, 0.1]])
result = matutils.isbow(potentialbow)
expected = True
self.assertEqual(expected, result)
# checking np array format bag of words
potentialbow = np.array([[1, 0.4], [0, 0.2], [2, 0.2]])
result = matutils.isbow(potentialbow)
expected = True
self.assertEqual(expected, result)
class TestHellinger(unittest.TestCase):
def setUp(self):
self.corpus = MmCorpus(datapath('testcorpus.mm'))
self.class_ = ldamodel.LdaModel
self.model = self.class_(common_corpus, id2word=common_dictionary, num_topics=2, passes=100)
def test_inputs(self):
# checking empty inputs
vec_1 = []
vec_2 = []
result = matutils.hellinger(vec_1, vec_2)
expected = 0.0
self.assertEqual(expected, result)
# checking np array and list input
vec_1 = np.array([])
vec_2 = []
result = matutils.hellinger(vec_1, vec_2)
expected = 0.0
self.assertEqual(expected, result)
# checking scipy csr matrix and list input
vec_1 = csr_matrix([])
vec_2 = []
result = matutils.hellinger(vec_1, vec_2)
expected = 0.0
self.assertEqual(expected, result)
def test_distributions(self):
# checking different length bag of words as inputs
vec_1 = [(2, 0.1), (3, 0.4), (4, 0.1), (5, 0.1), (1, 0.1), (7, 0.2)]
vec_2 = [(1, 0.1), (3, 0.8), (4, 0.1)]
result = matutils.hellinger(vec_1, vec_2)
expected = 0.484060507634
self.assertAlmostEqual(expected, result)
# checking symmetrical bag of words inputs return same distance
vec_1 = [(2, 0.1), (3, 0.4), (4, 0.1), (5, 0.1), (1, 0.1), (7, 0.2)]
vec_2 = [(1, 0.1), (3, 0.8), (4, 0.1), (8, 0.1), (10, 0.8), (9, 0.1)]
result = matutils.hellinger(vec_1, vec_2)
result_symmetric = matutils.hellinger(vec_2, vec_1)
expected = 0.856921568786
self.assertAlmostEqual(expected, result)
self.assertAlmostEqual(expected, result_symmetric)
# checking ndarray, csr_matrix as inputs
vec_1 = np.array([[1, 0.3], [0, 0.4], [2, 0.3]])
vec_2 = csr_matrix([[1, 0.4], [0, 0.2], [2, 0.2]])
result = matutils.hellinger(vec_1, vec_2)
expected = 0.160618030536
self.assertAlmostEqual(expected, result)
# checking ndarray, list as inputs
vec_1 = np.array([0.6, 0.1, 0.1, 0.2])
vec_2 = [0.2, 0.2, 0.1, 0.5]
result = matutils.hellinger(vec_1, vec_2)
expected = 0.309742984153
self.assertAlmostEqual(expected, result)
# testing LDA distribution vectors
np.random.seed(0)
model = self.class_(self.corpus, id2word=common_dictionary, num_topics=2, passes=100)
lda_vec1 = model[[(1, 2), (2, 3)]]
lda_vec2 = model[[(2, 2), (1, 3)]]
result = matutils.hellinger(lda_vec1, lda_vec2)
expected = 1.0406845281146034e-06
self.assertAlmostEqual(expected, result)
class TestKL(unittest.TestCase):
def setUp(self):
self.corpus = MmCorpus(datapath('testcorpus.mm'))
self.class_ = ldamodel.LdaModel
self.model = self.class_(common_corpus, id2word=common_dictionary, num_topics=2, passes=100)
def test_inputs(self):
# checking empty inputs
vec_1 = []
vec_2 = []
result = matutils.kullback_leibler(vec_1, vec_2)
expected = 0.0
self.assertEqual(expected, result)
# checking np array and list input
vec_1 = np.array([])
vec_2 = []
result = matutils.kullback_leibler(vec_1, vec_2)
expected = 0.0
self.assertEqual(expected, result)
# checking scipy csr matrix and list input
vec_1 = csr_matrix([])
vec_2 = []
result = matutils.kullback_leibler(vec_1, vec_2)
expected = 0.0
self.assertEqual(expected, result)
def test_distributions(self):
# checking bag of words as inputs
vec_1 = [(2, 0.1), (3, 0.4), (4, 0.1), (5, 0.1), (1, 0.1), (7, 0.2)]
vec_2 = [(1, 0.1), (3, 0.8), (4, 0.1)]
result = matutils.kullback_leibler(vec_2, vec_1, 8)
expected = 0.55451775
self.assertAlmostEqual(expected, result, places=5)
# KL is not symetric; vec1 compared with vec2 will contain log of zeros and return infinity
vec_1 = [(2, 0.1), (3, 0.4), (4, 0.1), (5, 0.1), (1, 0.1), (7, 0.2)]
vec_2 = [(1, 0.1), (3, 0.8), (4, 0.1)]
result = matutils.kullback_leibler(vec_1, vec_2, 8)
self.assertTrue(math.isinf(result))
# checking ndarray, csr_matrix as inputs
vec_1 = np.array([[1, 0.3], [0, 0.4], [2, 0.3]])
vec_2 = csr_matrix([[1, 0.4], [0, 0.2], [2, 0.2]])
result = matutils.kullback_leibler(vec_1, vec_2, 3)
expected = 0.0894502
self.assertAlmostEqual(expected, result, places=5)
# checking ndarray, list as inputs
vec_1 = np.array([0.6, 0.1, 0.1, 0.2])
vec_2 = [0.2, 0.2, 0.1, 0.5]
result = matutils.kullback_leibler(vec_1, vec_2)
expected = 0.40659450877
self.assertAlmostEqual(expected, result, places=5)
# testing LDA distribution vectors
np.random.seed(0)
model = self.class_(self.corpus, id2word=common_dictionary, num_topics=2, passes=100)
lda_vec1 = model[[(1, 2), (2, 3)]]
lda_vec2 = model[[(2, 2), (1, 3)]]
result = matutils.kullback_leibler(lda_vec1, lda_vec2)
expected = 4.283407e-12
self.assertAlmostEqual(expected, result, places=5)
class TestJaccard(unittest.TestCase):
def test_inputs(self):
# all empty inputs will give a divide by zero exception
vec_1 = []
vec_2 = []
self.assertRaises(ZeroDivisionError, matutils.jaccard, vec_1, vec_2)
def test_distributions(self):
# checking bag of words as inputs
vec_1 = [(2, 1), (3, 4), (4, 1), (5, 1), (1, 1), (7, 2)]
vec_2 = [(1, 1), (3, 8), (4, 1)]
result = matutils.jaccard(vec_2, vec_1)
expected = 1 - 0.3
self.assertAlmostEqual(expected, result)
# checking ndarray, csr_matrix as inputs
vec_1 = np.array([[1, 3], [0, 4], [2, 3]])
vec_2 = csr_matrix([[1, 4], [0, 2], [2, 2]])
result = matutils.jaccard(vec_1, vec_2)
expected = 1 - 0.388888888889
self.assertAlmostEqual(expected, result)
# checking ndarray, list as inputs
vec_1 = np.array([6, 1, 2, 3])
vec_2 = [4, 3, 2, 5]
result = matutils.jaccard(vec_1, vec_2)
expected = 1 - 0.333333333333
self.assertAlmostEqual(expected, result)
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.DEBUG)
unittest.main()
|
import remove_stopwords, preprocess_string
>>> remove_stopwords("Better late than never, but better never late.")
u'Better late never, better late.'
>>>
>>> preprocess_string("<i>Hel 9lo</i> <b>Wo9 rld</b>! Th3 weather_is really g00d today, isn't it?")
[u'hel', u'rld', u'weather', u'todai', u'isn']
"""
import re
import string
import glob
from gensim import utils
from gensim.parsing.porter import PorterStemmer
STOPWORDS = frozenset([
'all', 'six', 'just', 'less', 'being', 'indeed', 'over', 'move', 'anyway', 'four', 'not', 'own', 'through',
'using', 'fifty', 'where', 'mill', 'only', 'find', 'before', 'one', 'whose', 'system', 'how', 'somewhere',
'much', 'thick', 'show', 'had', 'enough', 'should', 'to', 'must', 'whom', 'seeming', 'yourselves', 'under',
'ours', 'two', 'has', 'might', 'thereafter', 'latterly', 'do', 'them', 'his', 'around', 'than', 'get', 'very',
'de', 'none', 'cannot', 'every', 'un', 'they', 'front', 'during', 'thus', 'now', 'him', 'nor', 'name', 'regarding',
'several', 'hereafter', 'did', 'always', 'who', 'didn', 'whither', 'this', 'someone', 'either', 'each', 'become',
'thereupon', 'sometime', 'side', 'towards', 'therein', 'twelve', 'because', 'often', 'ten', 'our', 'doing', 'km',
'eg', 'some', 'back', 'used', 'up', 'go', 'namely', 'computer', 'are', 'further', 'beyond', 'ourselves', 'yet',
'out', 'even', 'will', 'what', 'still', 'for', 'bottom', 'mine', 'since', 'please', 'forty', 'per', 'its',
'everything', 'behind', 'does', 'various', 'above', 'between', 'it', 'neither', 'seemed', 'ever', 'across', 'she',
'somehow', 'be', 'we', 'full', 'never', 'sixty', 'however', 'here', 'otherwise', 'were', 'whereupon', 'nowhere',
'although', 'found', 'alone', 're', 'along', 'quite', 'fifteen', 'by', 'both', 'about', 'last', 'would',
'anything', 'via', 'many', 'could', 'thence', 'put', 'against', 'keep', 'etc', 'amount', 'became', 'ltd', 'hence',
'onto', 'or', 'con', 'among', 'already', 'co', 'afterwards', 'formerly', 'within', 'seems', 'into', 'others',
'while', 'whatever', 'except', 'down', 'hers', 'everyone', 'done', 'least', 'another', 'whoever', 'moreover',
'couldnt', 'throughout', 'anyhow', 'yourself', 'three', 'from', 'her', 'few', 'together', 'top', 'there', 'due',
'been', 'next', 'anyone', 'eleven', 'cry', 'call', 'therefore', 'interest', 'then', 'thru', 'themselves',
'hundred', 'really', 'sincere', 'empty', 'more', 'himself', 'elsewhere', 'mostly', 'on', 'fire', 'am', 'becoming',
'hereby', 'amongst', 'else', 'part', 'everywhere', 'too', 'kg', 'herself', 'former', 'those', 'he', 'me', 'myself',
'made', 'twenty', 'these', 'was', 'bill', 'cant', 'us', 'until', 'besides', 'nevertheless', 'below', 'anywhere',
'nine', 'can', 'whether', 'of', 'your', 'toward', 'my', 'say', 'something', 'and', 'whereafter', 'whenever',
'give', 'almost', 'wherever', 'is', 'describe', 'beforehand', 'herein', 'doesn', 'an', 'as', 'itself', 'at',
'have', 'in', 'seem', 'whence', 'ie', 'any', 'fill', 'again', 'hasnt', 'inc', 'thereby', 'thin', 'no', 'perhaps',
'latter', 'meanwhile', 'when', 'detail', 'same', 'wherein', 'beside', 'also', 'that', 'other', 'take', 'which',
'becomes', 'you', 'if', 'nobody', 'unless', 'whereas', 'see', 'though', 'may', 'after', 'upon', 'most', 'hereupon',
'eight', 'but', 'serious', 'nothing', 'such', 'why', 'off', 'a', 'don', 'whereby', 'third', 'i', 'whole', 'noone',
'sometimes', 'well', 'amoungst', 'yours', 'their', 'rather', 'without', 'so', 'five', 'the', 'first', 'with',
'make', 'once'
])
RE_PUNCT = re.compile(r'([%s])+' % re.escape(string.punctuation), re.UNICODE)
RE_TAGS = re.compile(r"<([^>]+)>", re.UNICODE)
RE_NUMERIC = re.compile(r"[0-9]+", re.UNICODE)
RE_NONALPHA = re.compile(r"\W", re.UNICODE)
RE_AL_NUM = re.compile(r"([a-z]+)([0-9]+)", flags=re.UNICODE)
RE_NUM_AL = re.compile(r"([0-9]+)([a-z]+)", flags=re.UNICODE)
RE_WHITESPACE = re.compile(r"(\s)+", re.UNICODE)
def remove_stopwords(s):
"""Remove :const:`~gensim.parsing.preprocessing.STOPWORDS` from `s`.
Parameters
----------
s : str
Returns
-------
str
Unicode string without :const:`~gensim.parsing.preprocessing.STOPWORDS`.
Examples
--------
.. sourcecode:: pycon
>>> from gensim.parsing.preprocessing import remove_stopwords
>>> remove_stopwords("Better late than never, but better never late.")
u'Better late never, better late.'
"""
s = utils.to_unicode(s)
return " ".join(w for w in s.split() if w not in STOPWORDS)
def strip_punctuation(s):
"""Replace punctuation characters with spaces in `s` using :const:`~gensim.parsing.preprocessing.RE_PUNCT`.
Parameters
----------
s : str
Returns
-------
str
Unicode string without punctuation characters.
Examples
--------
.. sourcecode:: pycon
>>> from gensim.parsing.preprocessing import strip_punctuation
>>> strip_punctuation("A semicolon is a stronger break than a comma, but not as much as a full stop!")
u'A semicolon is a stronger break than a comma but not as much as a full stop '
"""
s = utils.to_unicode(s)
return RE_PUNCT.sub(" ", s)
strip_punctuation2 = strip_punctuation
def strip_tags(s):
"""Remove tags from `s` using :const:`~gensim.parsing.preprocessing.RE_TAGS`.
Parameters
----------
s : str
Returns
-------
str
Unicode string without tags.
Examples
--------
.. sourcecode:: pycon
>>> from gensim.parsing.preprocessing import strip_tags
>>> strip_tags("<i>Hello</i> <b>World</b>!")
u'Hello World!'
"""
s = utils.to_unicode(s)
return RE_TAGS.sub("", s)
def strip_short(s, minsize=3):
"""Remove words with length lesser than `minsize` from `s`.
Parameters
----------
s : str
minsize : int, optional
Returns
-------
str
Unicode string without short words.
Examples
--------
.. sourcecode:: pycon
>>> from gensim.parsing.preprocessing import strip_short
>>> strip_short("salut les amis du 59")
u'salut les amis'
>>>
>>> strip_short("one two three four five six seven eight nine ten", minsize=5)
u'three seven eight'
"""
s = utils.to_unicode(s)
return " ".join(e for e in s.split() if len(e) >= minsize)
def strip_numeric(s):
"""Remove digits from `s` using :const:`~gensim.parsing.preprocessing.RE_NUMERIC`.
Parameters
----------
s : str
Returns
-------
str
Unicode string without digits.
Examples
--------
.. sourcecode:: pycon
>>> from gensim.parsing.preprocessing import strip_numeric
>>> strip_numeric("0text24gensim365test")
u'textgensimtest'
"""
s = utils.to_unicode(s)
return RE_NUMERIC.sub("", s)
def strip_non_alphanum(s):
"""Remove non-alphabetic characters from `s` using :const:`~gensim.parsing.preprocessing.RE_NONALPHA`.
Parameters
----------
s : str
Returns
-------
str
Unicode string with alphabetic characters only.
Notes
-----
Word characters - alphanumeric & underscore.
Examples
--------
.. sourcecode:: pycon
>>> from gensim.parsing.preprocessing import strip_non_alphanum
>>> strip_non_alphanum("if-you#can%read$this&then@this#method^works")
u'if you can read this then this method works'
"""
s = utils.to_unicode(s)
return RE_NONALPHA.sub(" ", s)
def strip_multiple_whitespaces(s):
r"""Remove repeating whitespace characters (spaces, tabs, line breaks) from `s`
and turns tabs & line breaks into spaces using :const:`~gensim.parsing.preprocessing.RE_WHITESPACE`.
Parameters
----------
s : str
Returns
-------
str
Unicode string without repeating in a row whitespace characters.
Examples
--------
.. sourcecode:: pycon
>>> from gensim.parsing.preprocessing import strip_multiple_whitespaces
>>> strip_multiple_whitespaces("salut" + '\r' + " les" + '\n' + " loulous!")
u'salut les loulous!'
"""
s = utils.to_unicode(s)
return RE_WHITESPACE.sub(" ", s)
def split_alphanum(s):
"""Add spaces between digits & letters in `s` using :const:`~gensim.parsing.preprocessing.RE_AL_NUM`.
Parameters
----------
s : str
Returns
-------
str
Unicode string with spaces between digits & letters.
Examples
--------
.. sourcecode:: pycon
>>> from gensim.parsing.preprocessing import split_alphanum
>>> split_alphanum("24.0hours7 days365 a1b2c3")
u'24.0 hours 7 days 365 a 1 b 2 c 3'
"""
s = utils.to_unicode(s)
s = RE_AL_NUM.sub(r"\1 \2", s)
return RE_NUM_AL.sub(r"\1 \2", s)
def stem_text(text):
"""Transform `s` into lowercase and stem it.
Parameters
----------
text : str
Returns
-------
str
Unicode lowercased and porter-stemmed version of string `text`.
Examples
--------
.. sourcecode:: pycon
>>> from gensim.parsing.preprocessing import stem_text
>>> stem_text("While it is quite useful to be able to search a large collection of documents almost instantly.")
u'while it is quit us to be abl to search a larg collect of document almost instantly.'
"""
text = utils.to_unicode(text)
p = PorterStemmer()
return ' '.join(p.stem(word) for word in text.split())
stem = stem_text
DEFAULT_FILTERS = [
lambda x: x.lower(), strip_tags, strip_punctuation,
strip_multiple_whitespaces, strip_numeric,
remove_stopwords, strip_short, stem_text
]
def preprocess_string(s, filters=DEFAULT_FILTERS):
"""Apply list of chosen filters to `s`.
Default list of filters:
* :func:`~gensim.parsing.preprocessing.strip_tags`,
* :func:`~gensim.parsing.preprocessing.strip_punctuation`,
* :func:`~gensim.parsing.preprocessing.strip_multiple_whitespaces`,
* :func:`~gensim.parsing.preprocessing.strip_numeric`,
* :func:`~gensim.parsing.preprocessing.remove_stopwords`,
* :func:`~gensim.parsing.preprocessing.strip_short`,
* :func:`~gensim.parsing.preprocessing.stem_text`.
Parameters
----------
s : str
filters: list of functions, optional
Returns
-------
list of str
Processed strings (cleaned).
Examples
--------
.. sourcecode:: pycon
>>> from gensim.parsing.preprocessing import preprocess_string
>>> preprocess_string("<i>Hel 9lo</i> <b>Wo9 rld</b>! Th3 weather_is really g00d today, isn't it?")
[u'hel', u'rld', u'weather', u'todai', u'isn']
>>>
>>> s = "<i>Hel 9lo</i> <b>Wo9 rld</b>! Th3 weather_is really g00d today, isn't it?"
>>> CUSTOM_FILTERS = [lambda x: x.lower(), strip_tags, strip_punctuation]
>>> preprocess_string(s, CUSTOM_FILTERS)
[u'hel', u'9lo', u'wo9', u'rld', u'th3', u'weather', u'is', u'really', u'g00d', u'today', u'isn', u't', u'it']
"""
s = utils.to_unicode(s)
for f in filters:
s = f(s)
return s.split()
def preprocess_documents(docs):
"""Apply :const:`~gensim.parsing.preprocessing.DEFAULT_FILTERS` to the documents strings.
Parameters
----------
docs : list of str
Returns
-------
list of list of str
Processed documents split by whitespace.
Examples
--------
.. sourcecode:: pycon
>>> from gensim.parsing.preprocessing import preprocess_documents
>>> preprocess_documents(["<i>Hel 9lo</i> <b>Wo9 rld</b>!", "Th3 weather_is really g00d today, isn't it?"])
[[u'hel', u'rld'], [u'weather', u'todai', u'isn']]
"""
return [preprocess_string(d) for d in docs]
def read_file(path):
with utils.open(path, 'rb') as fin:
return fin.read()
def read_files(pattern):
return [read_file(fname) for fname in glob.glob(pattern)]
|
import posixpath
from http import client
from urllib.parse import quote
from radicale import app, httputils, pathutils, storage, xmlutils
from radicale.log import logger
def propose_filename(collection):
"""Propose a filename for a collection."""
tag = collection.get_meta("tag")
if tag == "VADDRESSBOOK":
fallback_title = "Address book"
suffix = ".vcf"
elif tag == "VCALENDAR":
fallback_title = "Calendar"
suffix = ".ics"
else:
fallback_title = posixpath.basename(collection.path)
suffix = ""
title = collection.get_meta("D:displayname") or fallback_title
if title and not title.lower().endswith(suffix.lower()):
title += suffix
return title
class ApplicationGetMixin:
def _content_disposition_attachement(self, filename):
value = "attachement"
try:
encoded_filename = quote(filename, encoding=self._encoding)
except UnicodeEncodeError:
logger.warning("Failed to encode filename: %r", filename,
exc_info=True)
encoded_filename = ""
if encoded_filename:
value += "; filename*=%s''%s" % (self._encoding, encoded_filename)
return value
def do_GET(self, environ, base_prefix, path, user):
"""Manage GET request."""
# Redirect to .web if the root URL is requested
if not pathutils.strip_path(path):
web_path = ".web"
if not environ.get("PATH_INFO"):
web_path = posixpath.join(posixpath.basename(base_prefix),
web_path)
return (client.FOUND,
{"Location": web_path, "Content-Type": "text/plain"},
"Redirected to %s" % web_path)
# Dispatch .web URL to web module
if path == "/.web" or path.startswith("/.web/"):
return self._web.get(environ, base_prefix, path, user)
access = app.Access(self._rights, user, path)
if not access.check("r") and "i" not in access.permissions:
return httputils.NOT_ALLOWED
with self._storage.acquire_lock("r", user):
item = next(self._storage.discover(path), None)
if not item:
return httputils.NOT_FOUND
if access.check("r", item):
limited_access = False
elif "i" in access.permissions:
limited_access = True
else:
return httputils.NOT_ALLOWED
if isinstance(item, storage.BaseCollection):
tag = item.get_meta("tag")
if not tag:
return (httputils.NOT_ALLOWED if limited_access else
httputils.DIRECTORY_LISTING)
content_type = xmlutils.MIMETYPES[tag]
content_disposition = self._content_disposition_attachement(
propose_filename(item))
elif limited_access:
return httputils.NOT_ALLOWED
else:
content_type = xmlutils.OBJECT_MIMETYPES[item.name]
content_disposition = ""
headers = {
"Content-Type": content_type,
"Last-Modified": item.last_modified,
"ETag": item.etag}
if content_disposition:
headers["Content-Disposition"] = content_disposition
answer = item.serialize()
return client.OK, headers, answer
|
import errno
import fcntl
import os.path
import random
MAC_ADDRESS_PREFIX = ("02", "52")
class MacAddressException(Exception):
pass
def reserve_unique_mac_address(lock_directory):
""" Pick and reserve a unique mac address for a container
returns (mac_address, lockfile)
where the mac address is a string in the form of 00:00:00:00:00:00
and lockfile is a file object that holds an exclusive lock
"""
for x in range(100):
random_hex = "{:08x}".format(random.getrandbits(32))
mac_address = ":".join(
MAC_ADDRESS_PREFIX
+ (random_hex[0:2], random_hex[2:4], random_hex[4:6], random_hex[6:8])
)
lock_filepath = os.path.join(lock_directory, mac_address)
lock_file = obtain_lock(lock_filepath)
if lock_file is not None:
return (mac_address, lock_file)
raise MacAddressException("Unable to pick unique MAC address")
def obtain_lock(lock_filepath):
""" Open and obtain a flock on the parameter. Returns a file if successful, None if not
"""
lock_file = open(lock_filepath, "w")
try:
fcntl.flock(lock_file, fcntl.LOCK_EX | fcntl.LOCK_NB)
return lock_file
except IOError as err:
if err.errno != errno.EAGAIN:
raise
lock_file.close()
return None
|
import asyncio
import functools
import time
import weakref
from collections import defaultdict
from typing import AsyncIterable
from typing import Awaitable
from typing import Callable
from typing import Dict
from typing import List
from typing import Optional
from typing import TypeVar
T = TypeVar("T")
# NOTE: this method is not thread-safe due to lack of locking while checking
# and updating the cache
def async_ttl_cache(
ttl: Optional[float] = 300,
cleanup_self: bool = False,
*,
cache: Optional[Dict] = None,
) -> Callable[
[Callable[..., Awaitable[T]]], Callable[..., Awaitable[T]] # wrapped # inner
]:
async def call_or_get_from_cache(cache, async_func, args_for_key, args, kwargs):
# Please note that anything which is put into `key` will be in the
# cache forever, potentially causing memory leaks. The most common
# case is the `self` arg pointing to a huge object. To mitigate that
# we're using `args_for_key`, which is supposed not contain any huge
# objects.
key = functools._make_key(args_for_key, kwargs, typed=False)
try:
future, last_update = cache[key]
if ttl is not None and time.time() - last_update > ttl:
raise KeyError
except KeyError:
future = asyncio.ensure_future(async_func(*args, **kwargs))
# set the timestamp to +infinity so that we always wait on the in-flight request.
cache[key] = (future, float("Inf"))
try:
value = await future
except Exception:
# Only update the cache if it's the same future we awaited and
# it hasn't already been updated by another coroutine
# Note also that we use get() in case the key was deleted from the
# cache by another coroutine
if cache.get(key) == (future, float("Inf")):
del cache[key]
raise
else:
if cache.get(key) == (future, float("Inf")):
cache[key] = (future, time.time())
return value
if cleanup_self:
instance_caches: Dict = cache if cache is not None else defaultdict(dict)
def on_delete(w):
del instance_caches[w]
def outer(wrapped):
@functools.wraps(wrapped)
async def inner(self, *args, **kwargs):
w = weakref.ref(self, on_delete)
self_cache = instance_caches[w]
return await call_or_get_from_cache(
self_cache, wrapped, args, (self,) + args, kwargs
)
return inner
else:
cache2: Dict = cache if cache is not None else {} # Should be Dict[Any, T] but that doesn't work.
def outer(wrapped):
@functools.wraps(wrapped)
async def inner(*args, **kwargs):
return await call_or_get_from_cache(cache2, wrapped, args, args, kwargs)
return inner
return outer
async def aiter_to_list(aiter: AsyncIterable[T],) -> List[T]:
return [x async for x in aiter]
def async_timeout(
seconds: int = 10,
) -> Callable[
[Callable[..., Awaitable[T]]], Callable[..., Awaitable[T]] # wrapped # inner
]:
def outer(wrapped):
@functools.wraps(wrapped)
async def inner(*args, **kwargs):
return await asyncio.wait_for(wrapped(*args, **kwargs), timeout=seconds)
return inner
return outer
|
from speak2mary import MaryTTS
import voluptuous as vol
from homeassistant.components.tts import CONF_LANG, PLATFORM_SCHEMA, Provider
from homeassistant.const import CONF_EFFECT, CONF_HOST, CONF_PORT
import homeassistant.helpers.config_validation as cv
CONF_VOICE = "voice"
CONF_CODEC = "codec"
SUPPORT_LANGUAGES = MaryTTS.supported_locales()
SUPPORT_CODEC = MaryTTS.supported_codecs()
SUPPORT_OPTIONS = [CONF_EFFECT]
SUPPORT_EFFECTS = MaryTTS.supported_effects().keys()
DEFAULT_HOST = "localhost"
DEFAULT_PORT = 59125
DEFAULT_LANG = "en_US"
DEFAULT_VOICE = "cmu-slt-hsmm"
DEFAULT_CODEC = "WAVE_FILE"
DEFAULT_EFFECTS = {}
MAP_MARYTTS_CODEC = {"WAVE_FILE": "wav", "AIFF_FILE": "aiff", "AU_FILE": "au"}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_LANG, default=DEFAULT_LANG): vol.In(SUPPORT_LANGUAGES),
vol.Optional(CONF_VOICE, default=DEFAULT_VOICE): cv.string,
vol.Optional(CONF_CODEC, default=DEFAULT_CODEC): vol.In(SUPPORT_CODEC),
vol.Optional(CONF_EFFECT, default=DEFAULT_EFFECTS): {
vol.All(cv.string, vol.In(SUPPORT_EFFECTS)): cv.string
},
}
)
def get_engine(hass, config, discovery_info=None):
"""Set up MaryTTS speech component."""
return MaryTTSProvider(hass, config)
class MaryTTSProvider(Provider):
"""MaryTTS speech api provider."""
def __init__(self, hass, conf):
"""Init MaryTTS TTS service."""
self.hass = hass
self._mary = MaryTTS(
conf.get(CONF_HOST),
conf.get(CONF_PORT),
conf.get(CONF_CODEC),
conf.get(CONF_LANG),
conf.get(CONF_VOICE),
)
self._effects = conf.get(CONF_EFFECT)
self.name = "MaryTTS"
@property
def default_language(self):
"""Return the default language."""
return self._mary.locale
@property
def supported_languages(self):
"""Return list of supported languages."""
return SUPPORT_LANGUAGES
@property
def default_options(self):
"""Return dict include default options."""
return {CONF_EFFECT: self._effects}
@property
def supported_options(self):
"""Return a list of supported options."""
return SUPPORT_OPTIONS
def get_tts_audio(self, message, language, options=None):
"""Load TTS from MaryTTS."""
effects = options[CONF_EFFECT]
data = self._mary.speak(message, effects)
audiotype = MAP_MARYTTS_CODEC[self._mary.codec]
return audiotype, data
|
from datetime import timedelta
import logging
import requests
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_API_KEY,
CONF_ID,
CONF_SCAN_INTERVAL,
CONF_UNIT_OF_MEASUREMENT,
CONF_URL,
CONF_VALUE_TEMPLATE,
HTTP_OK,
POWER_WATT,
STATE_UNKNOWN,
)
from homeassistant.helpers import template
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
ATTR_FEEDID = "FeedId"
ATTR_FEEDNAME = "FeedName"
ATTR_LASTUPDATETIME = "LastUpdated"
ATTR_LASTUPDATETIMESTR = "LastUpdatedStr"
ATTR_SIZE = "Size"
ATTR_TAG = "Tag"
ATTR_USERID = "UserId"
CONF_EXCLUDE_FEEDID = "exclude_feed_id"
CONF_ONLY_INCLUDE_FEEDID = "include_only_feed_id"
CONF_SENSOR_NAMES = "sensor_names"
DECIMALS = 2
DEFAULT_UNIT = POWER_WATT
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=5)
ONLY_INCL_EXCL_NONE = "only_include_exclude_or_none"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_URL): cv.string,
vol.Required(CONF_ID): cv.positive_int,
vol.Exclusive(CONF_ONLY_INCLUDE_FEEDID, ONLY_INCL_EXCL_NONE): vol.All(
cv.ensure_list, [cv.positive_int]
),
vol.Exclusive(CONF_EXCLUDE_FEEDID, ONLY_INCL_EXCL_NONE): vol.All(
cv.ensure_list, [cv.positive_int]
),
vol.Optional(CONF_SENSOR_NAMES): vol.All(
{cv.positive_int: vol.All(cv.string, vol.Length(min=1))}
),
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_UNIT_OF_MEASUREMENT, default=DEFAULT_UNIT): cv.string,
}
)
def get_id(sensorid, feedtag, feedname, feedid, feeduserid):
"""Return unique identifier for feed / sensor."""
return f"emoncms{sensorid}_{feedtag}_{feedname}_{feedid}_{feeduserid}"
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Emoncms sensor."""
apikey = config.get(CONF_API_KEY)
url = config.get(CONF_URL)
sensorid = config.get(CONF_ID)
value_template = config.get(CONF_VALUE_TEMPLATE)
config_unit = config.get(CONF_UNIT_OF_MEASUREMENT)
exclude_feeds = config.get(CONF_EXCLUDE_FEEDID)
include_only_feeds = config.get(CONF_ONLY_INCLUDE_FEEDID)
sensor_names = config.get(CONF_SENSOR_NAMES)
interval = config.get(CONF_SCAN_INTERVAL)
if value_template is not None:
value_template.hass = hass
data = EmonCmsData(hass, url, apikey, interval)
data.update()
if data.data is None:
return False
sensors = []
for elem in data.data:
if exclude_feeds is not None:
if int(elem["id"]) in exclude_feeds:
continue
if include_only_feeds is not None:
if int(elem["id"]) not in include_only_feeds:
continue
name = None
if sensor_names is not None:
name = sensor_names.get(int(elem["id"]), None)
unit = elem.get("unit")
if unit:
unit_of_measurement = unit
else:
unit_of_measurement = config_unit
sensors.append(
EmonCmsSensor(
hass,
data,
name,
value_template,
unit_of_measurement,
str(sensorid),
elem,
)
)
add_entities(sensors)
class EmonCmsSensor(Entity):
"""Implementation of an Emoncms sensor."""
def __init__(
self, hass, data, name, value_template, unit_of_measurement, sensorid, elem
):
"""Initialize the sensor."""
if name is None:
# Suppress ID in sensor name if it's 1, since most people won't
# have more than one EmonCMS source and it's redundant to show the
# ID if there's only one.
id_for_name = "" if str(sensorid) == "1" else sensorid
# Use the feed name assigned in EmonCMS or fall back to the feed ID
feed_name = elem.get("name") or f"Feed {elem['id']}"
self._name = f"EmonCMS{id_for_name} {feed_name}"
else:
self._name = name
self._identifier = get_id(
sensorid, elem["tag"], elem["name"], elem["id"], elem["userid"]
)
self._hass = hass
self._data = data
self._value_template = value_template
self._unit_of_measurement = unit_of_measurement
self._sensorid = sensorid
self._elem = elem
if self._value_template is not None:
self._state = self._value_template.render_with_possible_json_value(
elem["value"], STATE_UNKNOWN
)
else:
self._state = round(float(elem["value"]), DECIMALS)
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def device_state_attributes(self):
"""Return the attributes of the sensor."""
return {
ATTR_FEEDID: self._elem["id"],
ATTR_TAG: self._elem["tag"],
ATTR_FEEDNAME: self._elem["name"],
ATTR_SIZE: self._elem["size"],
ATTR_USERID: self._elem["userid"],
ATTR_LASTUPDATETIME: self._elem["time"],
ATTR_LASTUPDATETIMESTR: template.timestamp_local(float(self._elem["time"])),
}
def update(self):
"""Get the latest data and updates the state."""
self._data.update()
if self._data.data is None:
return
elem = next(
(
elem
for elem in self._data.data
if get_id(
self._sensorid,
elem["tag"],
elem["name"],
elem["id"],
elem["userid"],
)
== self._identifier
),
None,
)
if elem is None:
return
self._elem = elem
if self._value_template is not None:
self._state = self._value_template.render_with_possible_json_value(
elem["value"], STATE_UNKNOWN
)
else:
self._state = round(float(elem["value"]), DECIMALS)
class EmonCmsData:
"""The class for handling the data retrieval."""
def __init__(self, hass, url, apikey, interval):
"""Initialize the data object."""
self._apikey = apikey
self._url = f"{url}/feed/list.json"
self._interval = interval
self._hass = hass
self.data = None
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data from Emoncms."""
try:
parameters = {"apikey": self._apikey}
req = requests.get(
self._url, params=parameters, allow_redirects=True, timeout=5
)
except requests.exceptions.RequestException as exception:
_LOGGER.error(exception)
return
else:
if req.status_code == HTTP_OK:
self.data = req.json()
else:
_LOGGER.error(
"Please verify if the specified configuration value "
"'%s' is correct! (HTTP Status_code = %d)",
CONF_URL,
req.status_code,
)
|
import sys
import os
import os.path as op
import mne
from mne.utils import run_subprocess, get_subjects_dir
def freeview_bem_surfaces(subject, subjects_dir, method):
"""View 3-Layers BEM model with Freeview.
Parameters
----------
subject : string
Subject name
subjects_dir : string
Directory containing subjects data (Freesurfer SUBJECTS_DIR)
method : string
Can be 'flash' or 'watershed'.
"""
subjects_dir = get_subjects_dir(subjects_dir, raise_error=True)
if subject is None:
raise ValueError("subject argument is None.")
subject_dir = op.join(subjects_dir, subject)
if not op.isdir(subject_dir):
raise ValueError("Wrong path: '{}'. Check subjects-dir or"
"subject argument.".format(subject_dir))
env = os.environ.copy()
env['SUBJECT'] = subject
env['SUBJECTS_DIR'] = subjects_dir
if 'FREESURFER_HOME' not in env:
raise RuntimeError('The FreeSurfer environment needs to be set up.')
mri_dir = op.join(subject_dir, 'mri')
bem_dir = op.join(subject_dir, 'bem')
mri = op.join(mri_dir, 'T1.mgz')
if method == 'watershed':
bem_dir = op.join(bem_dir, 'watershed')
outer_skin = op.join(bem_dir, '%s_outer_skin_surface' % subject)
outer_skull = op.join(bem_dir, '%s_outer_skull_surface' % subject)
inner_skull = op.join(bem_dir, '%s_inner_skull_surface' % subject)
else:
if method == 'flash':
bem_dir = op.join(bem_dir, 'flash')
outer_skin = op.join(bem_dir, 'outer_skin.surf')
outer_skull = op.join(bem_dir, 'outer_skull.surf')
inner_skull = op.join(bem_dir, 'inner_skull.surf')
# put together the command
cmd = ['freeview']
cmd += ["--volume", mri]
cmd += ["--surface", "%s:color=red:edgecolor=red" % inner_skull]
cmd += ["--surface", "%s:color=yellow:edgecolor=yellow" % outer_skull]
cmd += ["--surface",
"%s:color=255,170,127:edgecolor=255,170,127" % outer_skin]
run_subprocess(cmd, env=env, stdout=sys.stdout)
print("[done]")
def run():
"""Run command."""
from mne.commands.utils import get_optparser
parser = get_optparser(__file__)
subject = os.environ.get('SUBJECT')
subjects_dir = get_subjects_dir()
parser.add_option("-s", "--subject", dest="subject",
help="Subject name", default=subject)
parser.add_option("-d", "--subjects-dir", dest="subjects_dir",
help="Subjects directory", default=subjects_dir)
parser.add_option("-m", "--method", dest="method",
help=("Method used to generate the BEM model. "
"Can be flash or watershed."))
options, args = parser.parse_args()
subject = options.subject
subjects_dir = options.subjects_dir
method = options.method
freeview_bem_surfaces(subject, subjects_dir, method)
mne.utils.run_command_if_main()
|
from gitless import core
from . import helpers, pprint
def parser(subparsers, repo):
desc = 'fuse the divergent changes of a branch onto the current branch'
fuse_parser = subparsers.add_parser(
'fuse', help=desc, description=(
desc.capitalize() + '. ' +
'By default all divergent changes from the given source branch are '
'fused. To customize the set of commits to fuse use the only and '
'exclude flags'), aliases=['fs'])
fuse_parser.add_argument(
'src', nargs='?',
help=(
'the source branch to read changes from. If none is given the upstream '
'branch of the current branch is used as the source'))
fuse_parser.add_argument(
'-o', '--only', nargs='+',
help=(
'fuse only the commits given (commits must belong to the set of '
'divergent commits from the given src branch)'),
metavar='commit_id', action=helpers.CommitIdProcessor, repo=repo)
fuse_parser.add_argument(
'-e', '--exclude', nargs='+',
help=(
'exclude from the fuse the commits given (commits must belong to the '
'set of divergent commits from the given src branch)'),
metavar='commit_id', action=helpers.CommitIdProcessor, repo=repo)
fuse_parser.add_argument(
'-ip', '--insertion-point', nargs='?',
help=(
'the divergent changes will be inserted after the commit given, dp for '
'divergent point is the default'), metavar='commit_id')
fuse_parser.add_argument(
'-a', '--abort', help='abort the fuse in progress', action='store_true')
fuse_parser.set_defaults(func=main)
def main(args, repo):
current_b = repo.current_branch
if args.abort:
current_b.abort_fuse(op_cb=pprint.OP_CB)
pprint.ok('Fuse aborted successfully')
return True
src_branch = helpers.get_branch_or_use_upstream(args.src, 'src', repo)
mb = repo.merge_base(current_b, src_branch)
if mb == src_branch.target: # the current branch is ahead or both branches are equal
pprint.err('No commits to fuse')
return False
if (not args.insertion_point or args.insertion_point == 'dp' or
args.insertion_point == 'divergent-point'):
insertion_point = mb
else:
insertion_point = repo.revparse_single(args.insertion_point).id
def valid_input(inp):
walker = src_branch.history()
walker.hide(insertion_point)
divergent_ids = frozenset(ci.id for ci in walker)
errors_found = False
for ci in inp - divergent_ids:
pprint.err(
'Commit with id {0} is not among the divergent commits of branch '
'{1}'.format(ci, src_branch))
errors_found = True
return not errors_found
only = None
exclude = None
if args.only:
only = frozenset(args.only)
if not valid_input(only):
return False
elif args.exclude:
exclude = frozenset(args.exclude)
if not valid_input(exclude):
return False
try:
current_b.fuse(
src_branch, insertion_point, only=only, exclude=exclude,
op_cb=pprint.OP_CB)
pprint.ok('Fuse succeeded')
except core.ApplyFailedError as e:
pprint.ok('Fuse succeeded')
raise e
return True
|
import pytest
import voluptuous as vol
import homeassistant.util.color as color_util
GAMUT = color_util.GamutType(
color_util.XYPoint(0.704, 0.296),
color_util.XYPoint(0.2151, 0.7106),
color_util.XYPoint(0.138, 0.08),
)
GAMUT_INVALID_1 = color_util.GamutType(
color_util.XYPoint(0.704, 0.296),
color_util.XYPoint(-0.201, 0.7106),
color_util.XYPoint(0.138, 0.08),
)
GAMUT_INVALID_2 = color_util.GamutType(
color_util.XYPoint(0.704, 1.296),
color_util.XYPoint(0.2151, 0.7106),
color_util.XYPoint(0.138, 0.08),
)
GAMUT_INVALID_3 = color_util.GamutType(
color_util.XYPoint(0.0, 0.0),
color_util.XYPoint(0.0, 0.0),
color_util.XYPoint(0.0, 0.0),
)
GAMUT_INVALID_4 = color_util.GamutType(
color_util.XYPoint(0.1, 0.1),
color_util.XYPoint(0.3, 0.3),
color_util.XYPoint(0.7, 0.7),
)
# pylint: disable=invalid-name
def test_color_RGB_to_xy_brightness():
"""Test color_RGB_to_xy_brightness."""
assert color_util.color_RGB_to_xy_brightness(0, 0, 0) == (0, 0, 0)
assert color_util.color_RGB_to_xy_brightness(255, 255, 255) == (0.323, 0.329, 255)
assert color_util.color_RGB_to_xy_brightness(0, 0, 255) == (0.136, 0.04, 12)
assert color_util.color_RGB_to_xy_brightness(0, 255, 0) == (0.172, 0.747, 170)
assert color_util.color_RGB_to_xy_brightness(255, 0, 0) == (0.701, 0.299, 72)
assert color_util.color_RGB_to_xy_brightness(128, 0, 0) == (0.701, 0.299, 16)
assert color_util.color_RGB_to_xy_brightness(255, 0, 0, GAMUT) == (0.7, 0.299, 72)
assert color_util.color_RGB_to_xy_brightness(0, 255, 0, GAMUT) == (
0.215,
0.711,
170,
)
assert color_util.color_RGB_to_xy_brightness(0, 0, 255, GAMUT) == (0.138, 0.08, 12)
def test_color_RGB_to_xy():
"""Test color_RGB_to_xy."""
assert color_util.color_RGB_to_xy(0, 0, 0) == (0, 0)
assert color_util.color_RGB_to_xy(255, 255, 255) == (0.323, 0.329)
assert color_util.color_RGB_to_xy(0, 0, 255) == (0.136, 0.04)
assert color_util.color_RGB_to_xy(0, 255, 0) == (0.172, 0.747)
assert color_util.color_RGB_to_xy(255, 0, 0) == (0.701, 0.299)
assert color_util.color_RGB_to_xy(128, 0, 0) == (0.701, 0.299)
assert color_util.color_RGB_to_xy(0, 0, 255, GAMUT) == (0.138, 0.08)
assert color_util.color_RGB_to_xy(0, 255, 0, GAMUT) == (0.215, 0.711)
assert color_util.color_RGB_to_xy(255, 0, 0, GAMUT) == (0.7, 0.299)
def test_color_xy_brightness_to_RGB():
"""Test color_xy_brightness_to_RGB."""
assert color_util.color_xy_brightness_to_RGB(1, 1, 0) == (0, 0, 0)
assert color_util.color_xy_brightness_to_RGB(0.35, 0.35, 128) == (194, 186, 169)
assert color_util.color_xy_brightness_to_RGB(0.35, 0.35, 255) == (255, 243, 222)
assert color_util.color_xy_brightness_to_RGB(1, 0, 255) == (255, 0, 60)
assert color_util.color_xy_brightness_to_RGB(0, 1, 255) == (0, 255, 0)
assert color_util.color_xy_brightness_to_RGB(0, 0, 255) == (0, 63, 255)
assert color_util.color_xy_brightness_to_RGB(1, 0, 255, GAMUT) == (255, 0, 3)
assert color_util.color_xy_brightness_to_RGB(0, 1, 255, GAMUT) == (82, 255, 0)
assert color_util.color_xy_brightness_to_RGB(0, 0, 255, GAMUT) == (9, 85, 255)
def test_color_xy_to_RGB():
"""Test color_xy_to_RGB."""
assert color_util.color_xy_to_RGB(0.35, 0.35) == (255, 243, 222)
assert color_util.color_xy_to_RGB(1, 0) == (255, 0, 60)
assert color_util.color_xy_to_RGB(0, 1) == (0, 255, 0)
assert color_util.color_xy_to_RGB(0, 0) == (0, 63, 255)
assert color_util.color_xy_to_RGB(1, 0, GAMUT) == (255, 0, 3)
assert color_util.color_xy_to_RGB(0, 1, GAMUT) == (82, 255, 0)
assert color_util.color_xy_to_RGB(0, 0, GAMUT) == (9, 85, 255)
def test_color_RGB_to_hsv():
"""Test color_RGB_to_hsv."""
assert color_util.color_RGB_to_hsv(0, 0, 0) == (0, 0, 0)
assert color_util.color_RGB_to_hsv(255, 255, 255) == (0, 0, 100)
assert color_util.color_RGB_to_hsv(0, 0, 255) == (240, 100, 100)
assert color_util.color_RGB_to_hsv(0, 255, 0) == (120, 100, 100)
assert color_util.color_RGB_to_hsv(255, 0, 0) == (0, 100, 100)
def test_color_hsv_to_RGB():
"""Test color_hsv_to_RGB."""
assert color_util.color_hsv_to_RGB(0, 0, 0) == (0, 0, 0)
assert color_util.color_hsv_to_RGB(0, 0, 100) == (255, 255, 255)
assert color_util.color_hsv_to_RGB(240, 100, 100) == (0, 0, 255)
assert color_util.color_hsv_to_RGB(120, 100, 100) == (0, 255, 0)
assert color_util.color_hsv_to_RGB(0, 100, 100) == (255, 0, 0)
def test_color_hsb_to_RGB():
"""Test color_hsb_to_RGB."""
assert color_util.color_hsb_to_RGB(0, 0, 0) == (0, 0, 0)
assert color_util.color_hsb_to_RGB(0, 0, 1.0) == (255, 255, 255)
assert color_util.color_hsb_to_RGB(240, 1.0, 1.0) == (0, 0, 255)
assert color_util.color_hsb_to_RGB(120, 1.0, 1.0) == (0, 255, 0)
assert color_util.color_hsb_to_RGB(0, 1.0, 1.0) == (255, 0, 0)
def test_color_xy_to_hs():
"""Test color_xy_to_hs."""
assert color_util.color_xy_to_hs(1, 1) == (47.294, 100)
assert color_util.color_xy_to_hs(0.35, 0.35) == (38.182, 12.941)
assert color_util.color_xy_to_hs(1, 0) == (345.882, 100)
assert color_util.color_xy_to_hs(0, 1) == (120, 100)
assert color_util.color_xy_to_hs(0, 0) == (225.176, 100)
assert color_util.color_xy_to_hs(1, 0, GAMUT) == (359.294, 100)
assert color_util.color_xy_to_hs(0, 1, GAMUT) == (100.706, 100)
assert color_util.color_xy_to_hs(0, 0, GAMUT) == (221.463, 96.471)
def test_color_hs_to_xy():
"""Test color_hs_to_xy."""
assert color_util.color_hs_to_xy(180, 100) == (0.151, 0.343)
assert color_util.color_hs_to_xy(350, 12.5) == (0.356, 0.321)
assert color_util.color_hs_to_xy(140, 50) == (0.229, 0.474)
assert color_util.color_hs_to_xy(0, 40) == (0.474, 0.317)
assert color_util.color_hs_to_xy(360, 0) == (0.323, 0.329)
assert color_util.color_hs_to_xy(0, 100, GAMUT) == (0.7, 0.299)
assert color_util.color_hs_to_xy(120, 100, GAMUT) == (0.215, 0.711)
assert color_util.color_hs_to_xy(180, 100, GAMUT) == (0.17, 0.34)
assert color_util.color_hs_to_xy(240, 100, GAMUT) == (0.138, 0.08)
assert color_util.color_hs_to_xy(360, 100, GAMUT) == (0.7, 0.299)
def test_rgb_hex_to_rgb_list():
"""Test rgb_hex_to_rgb_list."""
assert [255, 255, 255] == color_util.rgb_hex_to_rgb_list("ffffff")
assert [0, 0, 0] == color_util.rgb_hex_to_rgb_list("000000")
assert [255, 255, 255, 255] == color_util.rgb_hex_to_rgb_list("ffffffff")
assert [0, 0, 0, 0] == color_util.rgb_hex_to_rgb_list("00000000")
assert [51, 153, 255] == color_util.rgb_hex_to_rgb_list("3399ff")
assert [51, 153, 255, 0] == color_util.rgb_hex_to_rgb_list("3399ff00")
def test_color_name_to_rgb_valid_name():
"""Test color_name_to_rgb."""
assert color_util.color_name_to_rgb("red") == (255, 0, 0)
assert color_util.color_name_to_rgb("blue") == (0, 0, 255)
assert color_util.color_name_to_rgb("green") == (0, 128, 0)
# spaces in the name
assert color_util.color_name_to_rgb("dark slate blue") == (72, 61, 139)
# spaces removed from name
assert color_util.color_name_to_rgb("darkslateblue") == (72, 61, 139)
assert color_util.color_name_to_rgb("dark slateblue") == (72, 61, 139)
assert color_util.color_name_to_rgb("darkslate blue") == (72, 61, 139)
def test_color_name_to_rgb_unknown_name_raises_value_error():
"""Test color_name_to_rgb."""
with pytest.raises(ValueError):
color_util.color_name_to_rgb("not a color")
def test_color_rgb_to_rgbw():
"""Test color_rgb_to_rgbw."""
assert color_util.color_rgb_to_rgbw(0, 0, 0) == (0, 0, 0, 0)
assert color_util.color_rgb_to_rgbw(255, 255, 255) == (0, 0, 0, 255)
assert color_util.color_rgb_to_rgbw(255, 0, 0) == (255, 0, 0, 0)
assert color_util.color_rgb_to_rgbw(0, 255, 0) == (0, 255, 0, 0)
assert color_util.color_rgb_to_rgbw(0, 0, 255) == (0, 0, 255, 0)
assert color_util.color_rgb_to_rgbw(255, 127, 0) == (255, 127, 0, 0)
assert color_util.color_rgb_to_rgbw(255, 127, 127) == (255, 0, 0, 253)
assert color_util.color_rgb_to_rgbw(127, 127, 127) == (0, 0, 0, 127)
def test_color_rgbw_to_rgb():
"""Test color_rgbw_to_rgb."""
assert color_util.color_rgbw_to_rgb(0, 0, 0, 0) == (0, 0, 0)
assert color_util.color_rgbw_to_rgb(0, 0, 0, 255) == (255, 255, 255)
assert color_util.color_rgbw_to_rgb(255, 0, 0, 0) == (255, 0, 0)
assert color_util.color_rgbw_to_rgb(0, 255, 0, 0) == (0, 255, 0)
assert color_util.color_rgbw_to_rgb(0, 0, 255, 0) == (0, 0, 255)
assert color_util.color_rgbw_to_rgb(255, 127, 0, 0) == (255, 127, 0)
assert color_util.color_rgbw_to_rgb(255, 0, 0, 253) == (255, 127, 127)
assert color_util.color_rgbw_to_rgb(0, 0, 0, 127) == (127, 127, 127)
def test_color_rgb_to_hex():
"""Test color_rgb_to_hex."""
assert color_util.color_rgb_to_hex(255, 255, 255) == "ffffff"
assert color_util.color_rgb_to_hex(0, 0, 0) == "000000"
assert color_util.color_rgb_to_hex(51, 153, 255) == "3399ff"
assert color_util.color_rgb_to_hex(255, 67.9204190, 0) == "ff4400"
def test_gamut():
"""Test gamut functions."""
assert color_util.check_valid_gamut(GAMUT)
assert not color_util.check_valid_gamut(GAMUT_INVALID_1)
assert not color_util.check_valid_gamut(GAMUT_INVALID_2)
assert not color_util.check_valid_gamut(GAMUT_INVALID_3)
assert not color_util.check_valid_gamut(GAMUT_INVALID_4)
def test_color_temperature_mired_to_kelvin():
"""Test color_temperature_mired_to_kelvin."""
assert color_util.color_temperature_mired_to_kelvin(40) == 25000
assert color_util.color_temperature_mired_to_kelvin(200) == 5000
with pytest.raises(ZeroDivisionError):
assert color_util.color_temperature_mired_to_kelvin(0)
def test_color_temperature_kelvin_to_mired():
"""Test color_temperature_kelvin_to_mired."""
assert color_util.color_temperature_kelvin_to_mired(25000) == 40
assert color_util.color_temperature_kelvin_to_mired(5000) == 200
with pytest.raises(ZeroDivisionError):
assert color_util.color_temperature_kelvin_to_mired(0)
def test_returns_same_value_for_any_two_temperatures_below_1000():
"""Function should return same value for 999 Kelvin and 0 Kelvin."""
rgb_1 = color_util.color_temperature_to_rgb(999)
rgb_2 = color_util.color_temperature_to_rgb(0)
assert rgb_1 == rgb_2
def test_returns_same_value_for_any_two_temperatures_above_40000():
"""Function should return same value for 40001K and 999999K."""
rgb_1 = color_util.color_temperature_to_rgb(40001)
rgb_2 = color_util.color_temperature_to_rgb(999999)
assert rgb_1 == rgb_2
def test_should_return_pure_white_at_6600():
"""
Function should return red=255, blue=255, green=255 when given 6600K.
6600K is considered "pure white" light.
This is just a rough estimate because the formula itself is a "best
guess" approach.
"""
rgb = color_util.color_temperature_to_rgb(6600)
assert (255, 255, 255) == rgb
def test_color_above_6600_should_have_more_blue_than_red_or_green():
"""Function should return a higher blue value for blue-ish light."""
rgb = color_util.color_temperature_to_rgb(6700)
assert rgb[2] > rgb[1]
assert rgb[2] > rgb[0]
def test_color_below_6600_should_have_more_red_than_blue_or_green():
"""Function should return a higher red value for red-ish light."""
rgb = color_util.color_temperature_to_rgb(6500)
assert rgb[0] > rgb[1]
assert rgb[0] > rgb[2]
def test_get_color_in_voluptuous():
"""Test using the get method in color validation."""
schema = vol.Schema(color_util.color_name_to_rgb)
with pytest.raises(vol.Invalid):
schema("not a color")
assert schema("red") == (255, 0, 0)
|
import unittest
from absl import flags
from absl.testing import flagsaver
from perfkitbenchmarker.providers.gcp import gcp_spanner
from tests import pkb_common_test_case
FLAGS = flags.FLAGS
class SpannerTest(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super().setUp()
pass
@flagsaver.flagsaver
def testInitFromSpec(self):
FLAGS.zone = ['us-east1-a']
spec_args = {
'service_type': gcp_spanner.DEFAULT_SPANNER_TYPE,
'name': 'test_instance',
'description': 'test_description',
'database': 'test_database',
'ddl': 'test_schema',
'nodes': 2,
'project': 'test_project',
}
test_spec = gcp_spanner.SpannerSpec('test_component', None, **spec_args)
spanner = gcp_spanner.GcpSpannerInstance.FromSpec(test_spec)
self.assertEqual(spanner.name, 'test_instance')
self.assertEqual(spanner._description, 'test_description')
self.assertEqual(spanner.database, 'test_database')
self.assertEqual(spanner._ddl, 'test_schema')
self.assertEqual(spanner._nodes, 2)
self.assertEqual(spanner.project, 'test_project')
self.assertEqual(spanner._config, 'regional-us-east1')
if __name__ == '__main__':
unittest.main()
|
import os
import pytest
from mock import patch, mock_open
from decouple import AutoConfig, UndefinedValueError, RepositoryEmpty, DEFAULT_ENCODING, PY3
def test_autoconfig_env():
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'env', 'project')
with patch.object(config, '_caller_path', return_value=path):
assert 'ENV' == config('KEY')
def test_autoconfig_ini():
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'ini', 'project')
with patch.object(config, '_caller_path', return_value=path):
assert 'INI' == config('KEY')
def test_autoconfig_ini_in_subdir():
"""
When `AutoConfig._find_file()` gets a relative path from
`AutoConfig._caller_path()`, it will not properly search back to parent
dirs.
This is a regression test to make sure that when
`AutoConfig._caller_path()` finds something like `./config.py` it will look
for settings.ini in parent directories.
"""
config = AutoConfig()
subdir = os.path.join(os.path.dirname(__file__), 'autoconfig', 'ini',
'project', 'subdir')
os.chdir(subdir)
path = os.path.join(os.path.curdir, 'empty.py')
with patch.object(config, '_caller_path', return_value=path):
assert 'INI' == config('KEY')
def test_autoconfig_none():
os.environ['KeyFallback'] = 'On'
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'none')
with patch('os.path.isfile', return_value=False):
assert True is config('KeyFallback', cast=bool)
del os.environ['KeyFallback']
def test_autoconfig_exception():
os.environ['KeyFallback'] = 'On'
config = AutoConfig()
with patch('os.path.isfile', side_effect=Exception('PermissionDenied')):
assert True is config('KeyFallback', cast=bool)
del os.environ['KeyFallback']
def test_autoconfig_is_not_a_file():
os.environ['KeyFallback'] = 'On'
config = AutoConfig()
with patch('os.path.isfile', return_value=False):
assert True is config('KeyFallback', cast=bool)
del os.environ['KeyFallback']
def test_autoconfig_search_path():
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'env', 'custom-path')
config = AutoConfig(path)
assert 'CUSTOMPATH' == config('KEY')
def test_autoconfig_empty_repository():
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'env', 'custom-path')
config = AutoConfig(path)
with pytest.raises(UndefinedValueError):
config('KeyNotInEnvAndNotInRepository')
assert isinstance(config.config.repository, RepositoryEmpty)
def test_autoconfig_ini_default_encoding():
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'ini', 'project')
filename = os.path.join(os.path.dirname(__file__), 'autoconfig', 'ini', 'project', 'settings.ini')
with patch.object(config, '_caller_path', return_value=path):
with patch('decouple.open', mock_open(read_data='')) as mopen:
assert config.encoding == DEFAULT_ENCODING
assert 'ENV' == config('KEY', default='ENV')
mopen.assert_called_once_with(filename, encoding=DEFAULT_ENCODING)
def test_autoconfig_env_default_encoding():
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'env', 'project')
filename = os.path.join(os.path.dirname(__file__), 'autoconfig', 'env', '.env')
with patch.object(config, '_caller_path', return_value=path):
with patch('decouple.open', mock_open(read_data='')) as mopen:
assert config.encoding == DEFAULT_ENCODING
assert 'ENV' == config('KEY', default='ENV')
mopen.assert_called_once_with(filename, encoding=DEFAULT_ENCODING)
|
from weblate.formats.base import EmptyFormat
from weblate.formats.exporters import (
AndroidResourceExporter,
CSVExporter,
JSONExporter,
MoExporter,
PoExporter,
PoXliffExporter,
StringsExporter,
TBXExporter,
XliffExporter,
XlsxExporter,
)
from weblate.formats.helpers import BytesIOMode
from weblate.glossary.models import Term
from weblate.lang.models import Language, Plural
from weblate.trans.models import (
Comment,
Component,
Project,
Suggestion,
Translation,
Unit,
)
from weblate.trans.tests.test_models import BaseTestCase
from weblate.utils.state import STATE_EMPTY, STATE_TRANSLATED
class PoExporterTest(BaseTestCase):
_class = PoExporter
_has_context = True
_has_comments = True
def get_exporter(self, lang=None, **kwargs):
if lang is None:
lang, created = Language.objects.get_or_create(code="xx")
if created:
Plural.objects.create(language=lang)
return self._class(
language=lang,
source_language=Language.objects.get(code="en"),
project=Project(slug="test", name="TEST"),
**kwargs
)
def check_export(self, exporter):
output = exporter.serialize()
self.assertIsNotNone(output)
return output
def check_plurals(self, result):
self.assertIn(b"msgid_plural", result)
self.assertIn(b"msgstr[2]", result)
def check_glossary(self, word):
exporter = self.get_exporter()
exporter.add_glossary_term(word)
self.check_export(exporter)
def test_glossary(self):
self.check_glossary(Term(source="foo", target="bar"))
def test_glossary_markup(self):
self.check_glossary(Term(source="<b>foo</b>", target="<b>bar</b>"))
def test_glossary_special(self):
self.check_glossary(Term(source="bar\x1e\x1efoo", target="br\x1eff"))
def check_unit(self, nplurals=3, template=None, source_info=None, **kwargs):
if nplurals == 3:
formula = "n==0 ? 0 : n==1 ? 1 : 2"
else:
formula = "0"
lang = Language.objects.create(code="zz")
plural = Plural.objects.create(language=lang, number=nplurals, formula=formula)
project = Project(slug="test")
component = Component(
slug="comp",
project=project,
file_format="xliff",
template=template,
source_language=Language.objects.get(code="en"),
)
translation = Translation(language=lang, component=component, plural=plural)
# Fake file format to avoid need for actual files
translation.store = EmptyFormat(BytesIOMode("", b""))
unit = Unit(translation=translation, id_hash=-1, **kwargs)
if source_info:
for key, value in source_info.items():
setattr(unit, key, value)
# The dashes need special handling in XML based formats
unit.__dict__["unresolved_comments"] = [
Comment(comment="Weblate translator comment ---- ")
]
unit.__dict__["suggestions"] = [
Suggestion(target="Weblate translator suggestion")
]
else:
unit.__dict__["unresolved_comments"] = []
unit.source_unit = unit
exporter = self.get_exporter(lang, translation=translation)
exporter.add_unit(unit)
return self.check_export(exporter)
def test_unit(self):
self.check_unit(source="xxx", target="yyy")
def test_unit_mono(self):
self.check_unit(source="xxx", target="yyy", template="template")
def _encode(self, string):
return string.encode("utf-8")
def test_unit_plural(self):
result = self.check_unit(
source="xxx\x1e\x1efff",
target="yyy\x1e\x1efff\x1e\x1ewww",
state=STATE_TRANSLATED,
)
self.check_plurals(result)
def test_unit_plural_one(self):
self.check_unit(
nplurals=1, source="xxx\x1e\x1efff", target="yyy", state=STATE_TRANSLATED
)
def test_unit_not_translated(self):
self.check_unit(
nplurals=1, source="xxx\x1e\x1efff", target="yyy", state=STATE_EMPTY
)
def test_context(self):
result = self.check_unit(
source="foo", target="bar", context="context", state=STATE_TRANSLATED
)
if self._has_context:
self.assertIn(self._encode("context"), result)
elif self._has_context is not None:
self.assertNotIn(self._encode("context"), result)
def test_extra_info(self):
result = self.check_unit(
source="foo",
target="bar",
context="context",
state=STATE_TRANSLATED,
source_info={
"extra_flags": "max-length:200",
# The dashes need special handling in XML based formats
"explanation": "Context in Weblate\n------------------\n",
},
)
if self._has_context:
self.assertIn(self._encode("context"), result)
elif self._has_context is not None:
self.assertNotIn(self._encode("context"), result)
if self._has_comments:
self.assertIn(self._encode("Context in Weblate"), result)
self.assertIn(self._encode("Weblate translator comment"), result)
self.assertIn(self._encode("Suggested in Weblate"), result)
self.assertIn(self._encode("Weblate translator suggestion"), result)
def setUp(self):
self.exporter = self.get_exporter()
def test_has_get_storage(self):
self.assertTrue(hasattr(self.exporter, "get_storage"))
def test_has_setsourcelanguage(self):
self.assertTrue(hasattr(self.exporter.storage, "setsourcelanguage"))
def test_has_settargetlanguage(self):
self.assertTrue(hasattr(self.exporter.storage, "settargetlanguage"))
def test_has_unitclass(self):
self.assertTrue(hasattr(self.exporter.storage, "UnitClass"))
def test_has_addunit(self):
self.assertTrue(hasattr(self.exporter.storage, "addunit"))
class PoXliffExporterTest(PoExporterTest):
_class = PoXliffExporter
_has_context = True
def check_plurals(self, result):
self.assertIn(b"[2]", result)
def test_xml_nodes(self):
xml = """<xliff:g
xmlns:xliff="urn:oasis:names:tc:xliff:document:1.2"
example="Launcher3"
id="app_name">
%1$s
</xliff:g>"""
result = self.check_unit(source="x " + xml, target="y " + xml).decode()
self.assertIn("<g", result)
def test_php_code(self):
text = """<?php
if (!defined("FILENAME")){
define("FILENAME",0);
/*
* @author AUTHOR
*/
class CLASSNAME extends BASECLASS {
//constructor
function CLASSNAME(){
BASECLASS::BASECLASS();
}
}
}
?>"""
result = self.check_unit(source="x " + text, target="y " + text).decode()
self.assertIn("<?php", result)
class XliffExporterTest(PoXliffExporterTest):
_class = XliffExporter
def check_plurals(self, result):
# Doesn't support plurals
return
class TBXExporterTest(PoExporterTest):
_class = TBXExporter
_has_context = False
def check_plurals(self, result):
# Doesn't support plurals
return
class MoExporterTest(PoExporterTest):
_class = MoExporter
_has_context = True
_has_comments = False
def check_plurals(self, result):
self.assertIn(b"www", result)
class CSVExporterTest(PoExporterTest):
_class = CSVExporter
_has_context = True
def check_plurals(self, result):
# Doesn't support plurals
pass
def test_escaping(self):
output = self.check_unit(
source='=HYPERLINK("https://weblate.org/"&A1, "Weblate")', target="yyy"
)
self.assertIn(b"\"'=HYPERLINK", output)
class XlsxExporterTest(PoExporterTest):
_class = XlsxExporter
_has_context = False
_has_comments = False
def check_plurals(self, result):
# Doesn't support plurals
pass
class AndroidResourceExporterTest(PoExporterTest):
_class = AndroidResourceExporter
_has_comments = False
def check_plurals(self, result):
self.assertIn(b"<plural", result)
class JSONExporterTest(PoExporterTest):
_class = JSONExporter
_has_comments = False
def check_plurals(self, result):
# Doesn't support plurals
pass
class StringsExporterTest(PoExporterTest):
_class = StringsExporter
_has_comments = False
def _encode(self, string):
# Skip BOM
return string.encode("utf-16")[2:]
def check_plurals(self, result):
# Doesn't support plurals
pass
|
from pyvlx import OpeningDevice, Position
from pyvlx.opening_device import Awning, Blind, GarageDoor, Gate, RollerShutter, Window
from homeassistant.components.cover import (
ATTR_POSITION,
DEVICE_CLASS_AWNING,
DEVICE_CLASS_BLIND,
DEVICE_CLASS_GARAGE,
DEVICE_CLASS_GATE,
DEVICE_CLASS_SHUTTER,
DEVICE_CLASS_WINDOW,
SUPPORT_CLOSE,
SUPPORT_OPEN,
SUPPORT_SET_POSITION,
SUPPORT_STOP,
CoverEntity,
)
from homeassistant.core import callback
from . import DATA_VELUX
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up cover(s) for Velux platform."""
entities = []
for node in hass.data[DATA_VELUX].pyvlx.nodes:
if isinstance(node, OpeningDevice):
entities.append(VeluxCover(node))
async_add_entities(entities)
class VeluxCover(CoverEntity):
"""Representation of a Velux cover."""
def __init__(self, node):
"""Initialize the cover."""
self.node = node
@callback
def async_register_callbacks(self):
"""Register callbacks to update hass after device was changed."""
async def after_update_callback(device):
"""Call after device was updated."""
self.async_write_ha_state()
self.node.register_device_updated_cb(after_update_callback)
async def async_added_to_hass(self):
"""Store register state change callback."""
self.async_register_callbacks()
@property
def unique_id(self):
"""Return the unique ID of this cover."""
return self.node.serial_number
@property
def name(self):
"""Return the name of the Velux device."""
return self.node.name
@property
def should_poll(self):
"""No polling needed within Velux."""
return False
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_OPEN | SUPPORT_CLOSE | SUPPORT_SET_POSITION | SUPPORT_STOP
@property
def current_cover_position(self):
"""Return the current position of the cover."""
return 100 - self.node.position.position_percent
@property
def device_class(self):
"""Define this cover as either awning, blind, garage, gate, shutter or window."""
if isinstance(self.node, Awning):
return DEVICE_CLASS_AWNING
if isinstance(self.node, Blind):
return DEVICE_CLASS_BLIND
if isinstance(self.node, GarageDoor):
return DEVICE_CLASS_GARAGE
if isinstance(self.node, Gate):
return DEVICE_CLASS_GATE
if isinstance(self.node, RollerShutter):
return DEVICE_CLASS_SHUTTER
if isinstance(self.node, Window):
return DEVICE_CLASS_WINDOW
return DEVICE_CLASS_WINDOW
@property
def is_closed(self):
"""Return if the cover is closed."""
return self.node.position.closed
async def async_close_cover(self, **kwargs):
"""Close the cover."""
await self.node.close(wait_for_completion=False)
async def async_open_cover(self, **kwargs):
"""Open the cover."""
await self.node.open(wait_for_completion=False)
async def async_set_cover_position(self, **kwargs):
"""Move the cover to a specific position."""
if ATTR_POSITION in kwargs:
position_percent = 100 - kwargs[ATTR_POSITION]
await self.node.set_position(
Position(position_percent=position_percent), wait_for_completion=False
)
async def async_stop_cover(self, **kwargs):
"""Stop the cover."""
await self.node.stop(wait_for_completion=False)
|
from asyncio import TimeoutError as AsyncIOTimeoutError
from datetime import timedelta
import logging
from typing import Callable, List
from aiohttp import ClientError
from py_nightscout import Api as NightscoutAPI
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity import Entity
from .const import ATTR_DATE, ATTR_DELTA, ATTR_DEVICE, ATTR_DIRECTION, DOMAIN
SCAN_INTERVAL = timedelta(minutes=1)
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "Blood Glucose"
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
async_add_entities: Callable[[List[Entity], bool], None],
) -> None:
"""Set up the Glucose Sensor."""
api = hass.data[DOMAIN][entry.entry_id]
async_add_entities([NightscoutSensor(api, "Blood Sugar", entry.unique_id)], True)
class NightscoutSensor(Entity):
"""Implementation of a Nightscout sensor."""
def __init__(self, api: NightscoutAPI, name, unique_id):
"""Initialize the Nightscout sensor."""
self.api = api
self._unique_id = unique_id
self._name = name
self._state = None
self._attributes = None
self._unit_of_measurement = "mg/dL"
self._icon = "mdi:cloud-question"
self._available = False
@property
def unique_id(self):
"""Return the unique ID of the sensor."""
return self._unique_id
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
@property
def available(self):
"""Return if the sensor data are available."""
return self._available
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return self._icon
async def async_update(self):
"""Fetch the latest data from Nightscout REST API and update the state."""
try:
values = await self.api.get_sgvs()
except (ClientError, AsyncIOTimeoutError, OSError) as error:
_LOGGER.error("Error fetching data. Failed with %s", error)
self._available = False
return
self._available = True
self._attributes = {}
self._state = None
if values:
value = values[0]
self._attributes = {
ATTR_DEVICE: value.device,
ATTR_DATE: value.date,
ATTR_DELTA: value.delta,
ATTR_DIRECTION: value.direction,
}
self._state = value.sgv
self._icon = self._parse_icon()
else:
self._available = False
_LOGGER.warning("Empty reply found when expecting JSON data")
def _parse_icon(self) -> str:
"""Update the icon based on the direction attribute."""
switcher = {
"Flat": "mdi:arrow-right",
"SingleDown": "mdi:arrow-down",
"FortyFiveDown": "mdi:arrow-bottom-right",
"DoubleDown": "mdi:chevron-triple-down",
"SingleUp": "mdi:arrow-up",
"FortyFiveUp": "mdi:arrow-top-right",
"DoubleUp": "mdi:chevron-triple-up",
}
return switcher.get(self._attributes[ATTR_DIRECTION], "mdi:cloud-question")
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._attributes
|
import itertools
import numpy as np
import networkx as nx
try:
from lxml import etree
except ImportError:
try:
import xml.etree.ElementTree as etree
except ImportError:
# try:
# import xml.etree.cElementTree as etree
# except ImportError:
# commented out as causing problem with dictionary attributes
print("Failed to import ElementTree from any known place")
from pgmpy.models import BayesianModel
from pgmpy.factors.discrete import TabularCPD
class XBNReader(object):
"""
Base class for reading XML Belief Network File Format.
"""
def __init__(self, path=None, string=None):
"""
Initializer for XBNReader class.
Parameters
----------
path: str or file
Path of the file containing XBN data.
string: str
String of XBN data
Examples
--------
reader = XBNReader('test_XBN.xml')
Reference
---------
http://xml.coverpages.org/xbn-MSdefault19990414.html
"""
if path:
self.network = etree.parse(path).getroot()
elif string:
self.network = etree.fromstring(string)
else:
raise ValueError("Must specify either path or string")
self.bnmodel = self.network.find("BNMODEL")
self.analysisnotebook = self.get_analysisnotebook_values()
self.model_name = self.get_bnmodel_name()
self.static_properties = self.get_static_properties()
self.variables = self.get_variables()
self.edges = self.get_edges()
self.variable_CPD = self.get_distributions()
def get_analysisnotebook_values(self):
"""
Returns a dictionary of the attributes of ANALYSISNOTEBOOK tag
Examples
--------
>>> reader = XBNReader('xbn_test.xml')
>>> reader.get_analysisnotebook_values()
{'NAME': "Notebook.Cancer Example From Neapolitan",
'ROOT': "Cancer"}
"""
return {key: value for key, value in self.network.items()}
def get_bnmodel_name(self):
"""
Returns the name of the BNMODEL.
Examples
--------
>>> reader = XBNReader('xbn_test.xml')
>>> reader.get_bnmodel_name()
'Cancer'
"""
return self.network.find("BNMODEL").get("NAME")
def get_static_properties(self):
"""
Returns a dictionary of STATICPROPERTIES
Examples
--------
>>> reader = XBNReader('xbn_test.xml')
>>> reader.get_static_properties()
{'FORMAT': 'MSR DTAS XML', 'VERSION': '0.2', 'CREATOR': 'Microsoft Research DTAS'}
"""
return {
tags.tag: tags.get("VALUE")
for tags in self.bnmodel.find("STATICPROPERTIES")
}
def get_variables(self):
"""
Returns a list of variables.
Examples
--------
>>> reader = XBNReader('xbn_test.xml')
>>> reader.get_variables()
{'a': {'TYPE': 'discrete', 'XPOS': '13495',
'YPOS': '10465', 'DESCRIPTION': '(a) Metastatic Cancer',
'STATES': ['Present', 'Absent']}
'b': {'TYPE': 'discrete', 'XPOS': '11290',
'YPOS': '11965', 'DESCRIPTION': '(b) Serum Calcium Increase',
'STATES': ['Present', 'Absent']},
'c': {....},
'd': {....},
'e': {....}
}
"""
variables = {}
for variable in self.bnmodel.find("VARIABLES"):
variables[variable.get("NAME")] = {
"TYPE": variable.get("TYPE"),
"XPOS": variable.get("XPOS"),
"YPOS": variable.get("YPOS"),
"DESCRIPTION": variable.find("DESCRIPTION").text,
"STATES": [state.text for state in variable.findall("STATENAME")],
}
return variables
def get_edges(self):
"""
Returns a list of tuples. Each tuple contains two elements (parent, child) for each edge.
Examples
--------
>>> reader = XBNReader('xbn_test.xml')
>>> reader.get_edges()
[('a', 'b'), ('a', 'c'), ('b', 'd'), ('c', 'd'), ('c', 'e')]
"""
return [
(arc.get("PARENT"), arc.get("CHILD"))
for arc in self.bnmodel.find("STRUCTURE")
]
def get_distributions(self):
"""
Returns a dictionary of name and its distribution. Distribution is a ndarray.
The ndarray is stored in the standard way such that the rightmost variable
changes most often. Consider a CPD of variable 'd' which has parents 'b' and
'c' (distribution['CONDSET'] = ['b', 'c'])
| d_0 d_1
---------------------------
b_0, c_0 | 0.8 0.2
b_0, c_1 | 0.9 0.1
b_1, c_0 | 0.7 0.3
b_1, c_1 | 0.05 0.95
The value of distribution['d']['DPIS'] for the above example will be:
array([[ 0.8 , 0.2 ], [ 0.9 , 0.1 ], [ 0.7 , 0.3 ], [ 0.05, 0.95]])
Examples
--------
>>> reader = XBNReader('xbn_test.xml')
>>> reader.get_distributions()
{'a': {'TYPE': 'discrete', 'DPIS': array([[ 0.2, 0.8]])},
'e': {'TYPE': 'discrete', 'DPIS': array([[ 0.8, 0.2],
[ 0.6, 0.4]]), 'CONDSET': ['c'], 'CARDINALITY': [2]},
'b': {'TYPE': 'discrete', 'DPIS': array([[ 0.8, 0.2],
[ 0.2, 0.8]]), 'CONDSET': ['a'], 'CARDINALITY': [2]},
'c': {'TYPE': 'discrete', 'DPIS': array([[ 0.2 , 0.8 ],
[ 0.05, 0.95]]), 'CONDSET': ['a'], 'CARDINALITY': [2]},
'd': {'TYPE': 'discrete', 'DPIS': array([[ 0.8 , 0.2 ],
[ 0.9 , 0.1 ],
[ 0.7 , 0.3 ],
[ 0.05, 0.95]]), 'CONDSET': ['b', 'c']}, 'CARDINALITY': [2, 2]}
"""
distribution = {}
for dist in self.bnmodel.find("DISTRIBUTIONS"):
variable_name = dist.find("PRIVATE").get("NAME")
distribution[variable_name] = {"TYPE": dist.get("TYPE")}
if dist.find("CONDSET") is not None:
distribution[variable_name]["CONDSET"] = [
var.get("NAME") for var in dist.find("CONDSET").findall("CONDELEM")
]
distribution[variable_name]["CARDINALITY"] = np.array(
[
len(
set(
np.array(
[
list(map(int, dpi.get("INDEXES").split()))
for dpi in dist.find("DPIS")
]
)[:, i]
)
)
for i in range(len(distribution[variable_name]["CONDSET"]))
]
)
distribution[variable_name]["DPIS"] = np.array(
[list(map(float, dpi.text.split())) for dpi in dist.find("DPIS")]
).transpose()
return distribution
def get_model(self):
"""
Returns an instance of Bayesian Model.
"""
model = BayesianModel()
model.add_nodes_from(self.variables)
model.add_edges_from(self.edges)
model.name = self.model_name
tabular_cpds = []
for var, values in self.variable_CPD.items():
evidence = values["CONDSET"] if "CONDSET" in values else []
cpd = values["DPIS"]
evidence_card = values["CARDINALITY"] if "CARDINALITY" in values else []
states = self.variables[var]["STATES"]
cpd = TabularCPD(
var, len(states), cpd, evidence=evidence, evidence_card=evidence_card
)
tabular_cpds.append(cpd)
model.add_cpds(*tabular_cpds)
for var, properties in self.variables.items():
model._node[var] = properties
return model
class XBNWriter(object):
"""
Base class for writing XML Belief Network file format.
"""
def __init__(self, model, encoding="utf-8", prettyprint=True):
"""
Initializer for XBNWriter class
Parameters
----------
model: BayesianModel Instance
Model to write
encoding: str(optional)
Encoding for test data
prettyprint: Bool(optional)
Indentation in output XML if true
Reference
---------
http://xml.coverpages.org/xbn-MSdefault19990414.html
Examples
--------
>>> writer = XBNWriter(model)
"""
if not isinstance(model, BayesianModel):
raise TypeError("Model must be an instance of Bayesian Model.")
self.model = model
self.encoding = encoding
self.prettyprint = prettyprint
self.network = etree.Element("ANALYSISNOTEBOOK")
self.bnmodel = etree.SubElement(self.network, "BNMODEL")
if self.model.name:
etree.SubElement(self.bnmodel, "NAME").text = self.model.name
self.variables = self.set_variables(self.model.nodes)
self.structure = self.set_edges(sorted(self.model.edges()))
self.distribution = self.set_distributions()
def __str__(self):
"""
Return the XML as string.
"""
if self.prettyprint:
self.indent(self.network)
return etree.tostring(self.network, encoding=self.encoding)
def indent(self, elem, level=0):
"""
Inplace prettyprint formatter.
"""
i = "\n" + level * " "
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = i + " "
if not elem.tail or not elem.tail.strip():
elem.tail = i
for elem in elem:
self.indent(elem, level + 1)
if not elem.tail or not elem.tail.strip():
elem.tail = i
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i
def set_analysisnotebook(self, **data):
"""
Set attributes for ANALYSISNOTEBOOK tag
Parameters
----------
**data: dict
{name: value} for the attributes to be set.
Examples
--------
>>> from pgmpy.readwrite.XMLBeliefNetwork import XBNWriter
>>> writer = XBNWriter()
>>> writer.set_analysisnotebook(NAME="Notebook.Cancer Example From Neapolitan",
... ROOT='Cancer')
"""
for key, value in data.items():
self.network.set(str(key), str(value))
def set_bnmodel_name(self, name):
"""
Set the name of the BNMODEL.
Parameters
----------
name: str
Name of the BNModel.
Examples
--------
>>> from pgmpy.readwrite.XMLBeliefNetwork import XBNWriter
>>> writer = XBNWriter()
>>> writer.set_bnmodel_name("Cancer")
"""
self.bnmodel.set("NAME", str(name))
def set_static_properties(self, **data):
"""
Set STATICPROPERTIES tag for the network
Parameters
----------
**data: dict
{name: value} for name and value of the property.
Examples
--------
>>> from pgmpy.readwrite.XMLBeliefNetwork import XBNWriter
>>> writer = XBNWriter()
>>> writer.set_static_properties(FORMAT="MSR DTAS XML", VERSION="0.2", CREATOR="Microsoft Research DTAS")
"""
static_prop = etree.SubElement(self.bnmodel, "STATICPROPERTIES")
for key, value in data.items():
etree.SubElement(static_prop, key, attrib={"VALUE": value})
def set_variables(self, data):
"""
Set variables for the network.
Parameters
----------
data: dict
dict for variable in the form of example as shown.
Examples
--------
>>> from pgmpy.readwrite.XMLBeliefNetwork import XBNWriter
>>> writer = XBNWriter()
>>> writer.set_variables({'a': {'TYPE': 'discrete', 'XPOS': '13495',
... 'YPOS': '10465', 'DESCRIPTION': '(a) Metastatic Cancer',
... 'STATES': ['Present', 'Absent']},
... 'b': {'TYPE': 'discrete', 'XPOS': '11290',
... 'YPOS': '11965', 'DESCRIPTION': '(b) Serum Calcium Increase',
... 'STATES': ['Present', 'Absent']}})
"""
variables = etree.SubElement(self.bnmodel, "VARIABLES")
for var in sorted(data):
variable = etree.SubElement(
variables,
"VAR",
attrib={
"NAME": var,
"TYPE": data[var]["TYPE"],
"XPOS": data[var]["XPOS"],
"YPOS": data[var]["YPOS"],
},
)
etree.SubElement(
variable,
"DESCRIPTION",
attrib={"DESCRIPTION": data[var]["DESCRIPTION"]},
)
for state in data[var]["STATES"]:
etree.SubElement(variable, "STATENAME").text = state
def set_edges(self, edge_list):
"""
Set edges/arc in the network.
Parameters
----------
edge_list: array_like
list, tuple, dict or set whose each elements has two values (parent, child).
Examples
--------
>>> from pgmpy.readwrite.XMLBeliefNetwork import XBNWriter
>>> writer = XBNWriter()
>>> writer.set_edges([('a', 'b'), ('a', 'c'), ('b', 'd'), ('c', 'd'), ('c', 'e')])
"""
structure = etree.SubElement(self.bnmodel, "STRUCTURE")
for edge in edge_list:
etree.SubElement(
structure, "ARC", attrib={"PARENT": edge[0], "CHILD": edge[1]}
)
def set_distributions(self):
"""
Set distributions in the network.
Examples
--------
>>> from pgmpy.readwrite.XMLBeliefNetwork import XBNWriter
>>> writer =XBNWriter()
>>> writer.set_distributions()
"""
distributions = etree.SubElement(self.bnmodel, "DISTRIBUTIONS")
cpds = self.model.get_cpds()
cpds.sort(key=lambda x: x.variable)
for cpd in cpds:
cpd_values = cpd.get_values().transpose()
var = cpd.variable
dist = etree.SubElement(
distributions, "DIST", attrib={"TYPE": self.model.nodes[var]["TYPE"]}
)
etree.SubElement(dist, "PRIVATE", attrib={"NAME": var})
dpis = etree.SubElement(dist, "DPIS")
evidence = cpd.variables[1:]
evidence_card = cpd.cardinality[1:]
if evidence:
condset = etree.SubElement(dist, "CONDSET")
for condelem in evidence:
etree.SubElement(condset, "CONDELEM", attrib={"NAME": condelem})
indexes_iter = itertools.product(
*[range(card) for card in evidence_card]
)
for val in range(cpd_values.shape[0]):
index_value = " " + " ".join(map(str, next(indexes_iter))) + " "
etree.SubElement(
dpis, "DPI", attrib={"INDEXES": index_value}
).text = (" " + " ".join(map(str, cpd_values[val])) + " ")
else:
etree.SubElement(dpis, "DPI").text = (
" " + " ".join(map(str, cpd_values[0])) + " "
)
|
from flask import Flask, jsonify
from flasgger import APISpec, Schema, Swagger, fields
from apispec.ext.marshmallow import MarshmallowPlugin
from apispec_webframeworks.flask import FlaskPlugin
# Create an APISpec
spec = APISpec(
title='Flasger Petstore',
version='1.0.10',
openapi_version='2.0',
plugins=[
FlaskPlugin(),
MarshmallowPlugin(),
],
)
app = Flask(__name__)
# Optional marshmallow support
class CategorySchema(Schema):
id = fields.Int()
name = fields.Str(required=True)
class PetSchema(Schema):
category = fields.Nested(CategorySchema, many=True)
name = fields.Str()
@app.route('/random')
def random_pet():
"""
A cute furry animal endpoint.
Get a random pet
---
description: Get a random pet
responses:
200:
description: A pet to be returned
schema:
$ref: '#/definitions/Pet'
"""
pet = {'category': [{'id': 1, 'name': 'rodent'}], 'name': 'Mickey'}
return jsonify(PetSchema().dump(pet).data)
template = spec.to_flasgger(
app,
definitions=[CategorySchema, PetSchema],
paths=[random_pet]
)
"""
optionally if using apispec.APISpec from original module
you can do:
from flasgger.utils import apispec_to_template
template = apispec_to_template(
app=app,
spec=spec,
definitions=[CategorySchema, PetSchema],
paths=[random_pet]
)
"""
# start Flasgger using a template from apispec
swag = Swagger(app, template=template)
if __name__ == '__main__':
app.run(debug=True)
|
import numpy as np
from elephas.ml import adapter
import pytest
pytest.mark.usefixtures("spark_context")
def test_to_data_frame(spark_context):
features = np.ones((2, 10))
labels = np.asarray([[2.0], [1.0]])
data_frame = adapter.to_data_frame(
spark_context, features, labels, categorical=False)
assert data_frame.count() == 2
def test_to_data_frame_cat(spark_context):
features = np.ones((2, 10))
labels = np.asarray([[0, 0, 1.0], [0, 1.0, 0]])
data_frame = adapter.to_data_frame(
spark_context, features, labels, categorical=True)
assert data_frame.count() == 2
def test_from_data_frame(spark_context):
features = np.ones((2, 10))
labels = np.asarray([[2.0], [1.0]]).reshape((2,))
data_frame = adapter.to_data_frame(
spark_context, features, labels, categorical=False)
x, y = adapter.from_data_frame(data_frame, categorical=False)
assert features.shape == x.shape
assert labels.shape == y.shape
def test_from_data_frame_cat(spark_context):
features = np.ones((2, 10))
labels = np.asarray([[0, 0, 1.0], [0, 1.0, 0]])
data_frame = adapter.to_data_frame(
spark_context, features, labels, categorical=True)
x, y = adapter.from_data_frame(data_frame, categorical=True, nb_classes=3)
assert features.shape == x.shape
assert labels.shape == y.shape
def test_df_to_simple_rdd(spark_context):
features = np.ones((2, 10))
labels = np.asarray([[2.0], [1.0]]).reshape((2,))
data_frame = adapter.to_data_frame(
spark_context, features, labels, categorical=False)
rdd = adapter.df_to_simple_rdd(data_frame, False)
assert rdd.count() == 2
|
from datetime import timedelta
import json
import logging
import dweepy
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_DEVICE,
CONF_NAME,
CONF_UNIT_OF_MEASUREMENT,
CONF_VALUE_TEMPLATE,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "Dweet.io Sensor"
SCAN_INTERVAL = timedelta(minutes=1)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_DEVICE): cv.string,
vol.Required(CONF_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Dweet sensor."""
name = config.get(CONF_NAME)
device = config.get(CONF_DEVICE)
value_template = config.get(CONF_VALUE_TEMPLATE)
unit = config.get(CONF_UNIT_OF_MEASUREMENT)
if value_template is not None:
value_template.hass = hass
try:
content = json.dumps(dweepy.get_latest_dweet_for(device)[0]["content"])
except dweepy.DweepyError:
_LOGGER.error("Device/thing %s could not be found", device)
return
if value_template.render_with_possible_json_value(content) == "":
_LOGGER.error("%s was not found", value_template)
return
dweet = DweetData(device)
add_entities([DweetSensor(hass, dweet, name, value_template, unit)], True)
class DweetSensor(Entity):
"""Representation of a Dweet sensor."""
def __init__(self, hass, dweet, name, value_template, unit_of_measurement):
"""Initialize the sensor."""
self.hass = hass
self.dweet = dweet
self._name = name
self._value_template = value_template
self._state = None
self._unit_of_measurement = unit_of_measurement
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
@property
def state(self):
"""Return the state."""
return self._state
def update(self):
"""Get the latest data from REST API."""
self.dweet.update()
if self.dweet.data is None:
self._state = None
else:
values = json.dumps(self.dweet.data[0]["content"])
self._state = self._value_template.render_with_possible_json_value(
values, None
)
class DweetData:
"""The class for handling the data retrieval."""
def __init__(self, device):
"""Initialize the sensor."""
self._device = device
self.data = None
def update(self):
"""Get the latest data from Dweet.io."""
try:
self.data = dweepy.get_latest_dweet_for(self._device)
except dweepy.DweepyError:
_LOGGER.warning("Device %s doesn't contain any data", self._device)
self.data = None
|
from aiohomekit.model.characteristics import CharacteristicsTypes
from aiohomekit.model.services import ServicesTypes
from homeassistant.components.homekit_controller.const import KNOWN_DEVICES
from tests.components.homekit_controller.common import setup_test_component
LIGHT_BULB_NAME = "Light Bulb"
LIGHT_BULB_ENTITY_ID = "light.testdevice"
LIGHT_ON = ("lightbulb", "on")
LIGHT_BRIGHTNESS = ("lightbulb", "brightness")
LIGHT_HUE = ("lightbulb", "hue")
LIGHT_SATURATION = ("lightbulb", "saturation")
LIGHT_COLOR_TEMP = ("lightbulb", "color-temperature")
def create_lightbulb_service(accessory):
"""Define lightbulb characteristics."""
service = accessory.add_service(ServicesTypes.LIGHTBULB, name=LIGHT_BULB_NAME)
on_char = service.add_char(CharacteristicsTypes.ON)
on_char.value = 0
brightness = service.add_char(CharacteristicsTypes.BRIGHTNESS)
brightness.value = 0
return service
def create_lightbulb_service_with_hs(accessory):
"""Define a lightbulb service with hue + saturation."""
service = create_lightbulb_service(accessory)
hue = service.add_char(CharacteristicsTypes.HUE)
hue.value = 0
saturation = service.add_char(CharacteristicsTypes.SATURATION)
saturation.value = 0
return service
def create_lightbulb_service_with_color_temp(accessory):
"""Define a lightbulb service with color temp."""
service = create_lightbulb_service(accessory)
color_temp = service.add_char(CharacteristicsTypes.COLOR_TEMPERATURE)
color_temp.value = 0
return service
async def test_switch_change_light_state(hass, utcnow):
"""Test that we can turn a HomeKit light on and off again."""
helper = await setup_test_component(hass, create_lightbulb_service_with_hs)
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": "light.testdevice", "brightness": 255, "hs_color": [4, 5]},
blocking=True,
)
assert helper.characteristics[LIGHT_ON].value == 1
assert helper.characteristics[LIGHT_BRIGHTNESS].value == 100
assert helper.characteristics[LIGHT_HUE].value == 4
assert helper.characteristics[LIGHT_SATURATION].value == 5
await hass.services.async_call(
"light", "turn_off", {"entity_id": "light.testdevice"}, blocking=True
)
assert helper.characteristics[LIGHT_ON].value == 0
async def test_switch_change_light_state_color_temp(hass, utcnow):
"""Test that we can turn change color_temp."""
helper = await setup_test_component(hass, create_lightbulb_service_with_color_temp)
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": "light.testdevice", "brightness": 255, "color_temp": 400},
blocking=True,
)
assert helper.characteristics[LIGHT_ON].value == 1
assert helper.characteristics[LIGHT_BRIGHTNESS].value == 100
assert helper.characteristics[LIGHT_COLOR_TEMP].value == 400
async def test_switch_read_light_state(hass, utcnow):
"""Test that we can read the state of a HomeKit light accessory."""
helper = await setup_test_component(hass, create_lightbulb_service_with_hs)
# Initial state is that the light is off
state = await helper.poll_and_get_state()
assert state.state == "off"
# Simulate that someone switched on the device in the real world not via HA
helper.characteristics[LIGHT_ON].set_value(True)
helper.characteristics[LIGHT_BRIGHTNESS].value = 100
helper.characteristics[LIGHT_HUE].value = 4
helper.characteristics[LIGHT_SATURATION].value = 5
state = await helper.poll_and_get_state()
assert state.state == "on"
assert state.attributes["brightness"] == 255
assert state.attributes["hs_color"] == (4, 5)
# Simulate that device switched off in the real world not via HA
helper.characteristics[LIGHT_ON].set_value(False)
state = await helper.poll_and_get_state()
assert state.state == "off"
async def test_switch_push_light_state(hass, utcnow):
"""Test that we can read the state of a HomeKit light accessory."""
helper = await setup_test_component(hass, create_lightbulb_service_with_hs)
# Initial state is that the light is off
state = hass.states.get(LIGHT_BULB_ENTITY_ID)
assert state.state == "off"
await helper.update_named_service(
LIGHT_BULB_NAME,
{
CharacteristicsTypes.ON: True,
CharacteristicsTypes.BRIGHTNESS: 100,
CharacteristicsTypes.HUE: 4,
CharacteristicsTypes.SATURATION: 5,
},
)
state = hass.states.get(LIGHT_BULB_ENTITY_ID)
assert state.state == "on"
assert state.attributes["brightness"] == 255
assert state.attributes["hs_color"] == (4, 5)
# Simulate that device switched off in the real world not via HA
await helper.update_named_service(LIGHT_BULB_NAME, {CharacteristicsTypes.ON: False})
state = hass.states.get(LIGHT_BULB_ENTITY_ID)
assert state.state == "off"
async def test_switch_read_light_state_color_temp(hass, utcnow):
"""Test that we can read the color_temp of a light accessory."""
helper = await setup_test_component(hass, create_lightbulb_service_with_color_temp)
# Initial state is that the light is off
state = await helper.poll_and_get_state()
assert state.state == "off"
# Simulate that someone switched on the device in the real world not via HA
helper.characteristics[LIGHT_ON].set_value(True)
helper.characteristics[LIGHT_BRIGHTNESS].value = 100
helper.characteristics[LIGHT_COLOR_TEMP].value = 400
state = await helper.poll_and_get_state()
assert state.state == "on"
assert state.attributes["brightness"] == 255
assert state.attributes["color_temp"] == 400
async def test_switch_push_light_state_color_temp(hass, utcnow):
"""Test that we can read the state of a HomeKit light accessory."""
helper = await setup_test_component(hass, create_lightbulb_service_with_color_temp)
# Initial state is that the light is off
state = hass.states.get(LIGHT_BULB_ENTITY_ID)
assert state.state == "off"
await helper.update_named_service(
LIGHT_BULB_NAME,
{
CharacteristicsTypes.ON: True,
CharacteristicsTypes.BRIGHTNESS: 100,
CharacteristicsTypes.COLOR_TEMPERATURE: 400,
},
)
state = hass.states.get(LIGHT_BULB_ENTITY_ID)
assert state.state == "on"
assert state.attributes["brightness"] == 255
assert state.attributes["color_temp"] == 400
async def test_light_becomes_unavailable_but_recovers(hass, utcnow):
"""Test transition to and from unavailable state."""
helper = await setup_test_component(hass, create_lightbulb_service_with_color_temp)
# Initial state is that the light is off
state = await helper.poll_and_get_state()
assert state.state == "off"
# Test device goes offline
helper.pairing.available = False
state = await helper.poll_and_get_state()
assert state.state == "unavailable"
# Simulate that someone switched on the device in the real world not via HA
helper.characteristics[LIGHT_ON].set_value(True)
helper.characteristics[LIGHT_BRIGHTNESS].value = 100
helper.characteristics[LIGHT_COLOR_TEMP].value = 400
helper.pairing.available = True
state = await helper.poll_and_get_state()
assert state.state == "on"
assert state.attributes["brightness"] == 255
assert state.attributes["color_temp"] == 400
async def test_light_unloaded(hass, utcnow):
"""Test entity and HKDevice are correctly unloaded."""
helper = await setup_test_component(hass, create_lightbulb_service_with_color_temp)
# Initial state is that the light is off
state = await helper.poll_and_get_state()
assert state.state == "off"
unload_result = await helper.config_entry.async_unload(hass)
assert unload_result is True
# Make sure entity is unloaded
assert hass.states.get(helper.entity_id) is None
# Make sure HKDevice is no longer set to poll this accessory
conn = hass.data[KNOWN_DEVICES]["00:00:00:00:00:00"]
assert not conn.pollable_characteristics
|
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_WINDOW,
BinarySensorEntity,
)
from . import DATA_KEY
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Iterate through all MAX! Devices and add window shutters."""
devices = []
for handler in hass.data[DATA_KEY].values():
cube = handler.cube
for device in cube.devices:
name = f"{cube.room_by_id(device.room_id).name} {device.name}"
# Only add Window Shutters
if cube.is_windowshutter(device):
devices.append(MaxCubeShutter(handler, name, device.rf_address))
if devices:
add_entities(devices)
class MaxCubeShutter(BinarySensorEntity):
"""Representation of a MAX! Cube Binary Sensor device."""
def __init__(self, handler, name, rf_address):
"""Initialize MAX! Cube BinarySensorEntity."""
self._name = name
self._sensor_type = DEVICE_CLASS_WINDOW
self._rf_address = rf_address
self._cubehandle = handler
self._state = None
@property
def name(self):
"""Return the name of the BinarySensorEntity."""
return self._name
@property
def device_class(self):
"""Return the class of this sensor."""
return self._sensor_type
@property
def is_on(self):
"""Return true if the binary sensor is on/open."""
return self._state
def update(self):
"""Get latest data from MAX! Cube."""
self._cubehandle.update()
device = self._cubehandle.cube.device_by_rf(self._rf_address)
self._state = device.is_open
|
import mock
from flask import redirect, session
from app import github
def test_app(client):
assert client.get("/").status_code == 200
def test_login(client):
def mock_post(*args, **kwargs):
response = mock.Mock()
response.content = b'access_token=mocktoken&token_type=bearer'
return response
def mock_get_user(*args, **kwargs):
return {'login': 'mock_userid'}
@mock.patch.object(github, 'get', new=mock_get_user)
@mock.patch.object(github.session, 'post', new=mock_post)
def mock_authorize(*args, **kwargs):
client.get('/github/callback?code=mockcode')
return redirect('/')
@mock.patch.object(github, 'authorize', new=mock_authorize)
def test():
client.get('/login')
assert session['oauth_token'] == 'mocktoken'
assert session['u_id']['id'] == 'mock_userid'
test()
def test_logout(client):
client.get('/logout')
assert session.get('u_id') is None
|
import json
from typing import Any
from typing import List
from typing import Mapping
from typing import Optional
import requests
import service_configuration_lib
from mypy_extensions import TypedDict
from paasta_tools.kubernetes_tools import sanitised_cr_name
from paasta_tools.long_running_service_tools import LongRunningServiceConfig
from paasta_tools.long_running_service_tools import LongRunningServiceConfigDict
from paasta_tools.utils import BranchDictV2
from paasta_tools.utils import deep_merge_dictionaries
from paasta_tools.utils import DEFAULT_SOA_DIR
from paasta_tools.utils import load_service_instance_config
from paasta_tools.utils import load_v2_deployments_json
FLINK_INGRESS_PORT = 31080
FLINK_DASHBOARD_TIMEOUT_SECONDS = 5
class TaskManagerConfig(TypedDict, total=False):
instances: int
class FlinkDeploymentConfigDict(LongRunningServiceConfigDict, total=False):
taskmanager: TaskManagerConfig
class FlinkDeploymentConfig(LongRunningServiceConfig):
config_dict: FlinkDeploymentConfigDict
config_filename_prefix = "flink"
def __init__(
self,
service: str,
cluster: str,
instance: str,
config_dict: FlinkDeploymentConfigDict,
branch_dict: Optional[BranchDictV2],
soa_dir: str = DEFAULT_SOA_DIR,
) -> None:
super().__init__(
cluster=cluster,
instance=instance,
service=service,
soa_dir=soa_dir,
config_dict=config_dict,
branch_dict=branch_dict,
)
def validate(
self,
params: List[str] = [
"cpus",
"mem",
"security",
"dependencies_reference",
"deploy_group",
],
) -> List[str]:
# Use InstanceConfig to validate shared config keys like cpus and mem
error_msgs = super().validate(params=params)
if error_msgs:
name = self.get_instance()
return [f"{name}: {msg}" for msg in error_msgs]
else:
return []
# Since Flink services are stateful, losing capacity is not transparent to the users
def get_replication_crit_percentage(self) -> int:
return self.config_dict.get("replication_threshold", 100)
def load_flink_instance_config(
service: str,
instance: str,
cluster: str,
load_deployments: bool = True,
soa_dir: str = DEFAULT_SOA_DIR,
) -> FlinkDeploymentConfig:
"""Read a service instance's configuration for Flink.
If a branch isn't specified for a config, the 'branch' key defaults to
paasta-${cluster}.${instance}.
:param service: The service name
:param instance: The instance of the service to retrieve
:param cluster: The cluster to read the configuration for
:param load_deployments: A boolean indicating if the corresponding deployments.json for this service
should also be loaded
:param soa_dir: The SOA configuration directory to read from
:returns: A dictionary of whatever was in the config for the service instance"""
general_config = service_configuration_lib.read_service_configuration(
service, soa_dir=soa_dir
)
instance_config = load_service_instance_config(
service, instance, "flink", cluster, soa_dir=soa_dir
)
general_config = deep_merge_dictionaries(
overrides=instance_config, defaults=general_config
)
branch_dict: Optional[BranchDictV2] = None
if load_deployments:
deployments_json = load_v2_deployments_json(service, soa_dir=soa_dir)
temp_instance_config = FlinkDeploymentConfig(
service=service,
cluster=cluster,
instance=instance,
config_dict=general_config,
branch_dict=None,
soa_dir=soa_dir,
)
branch = temp_instance_config.get_branch()
deploy_group = temp_instance_config.get_deploy_group()
branch_dict = deployments_json.get_branch_dict(service, branch, deploy_group)
return FlinkDeploymentConfig(
service=service,
cluster=cluster,
instance=instance,
config_dict=general_config,
branch_dict=branch_dict,
soa_dir=soa_dir,
)
# TODO: read this from CRD in service configs
def cr_id(service: str, instance: str) -> Mapping[str, str]:
return dict(
group="yelp.com",
version="v1alpha1",
namespace="paasta-flinks",
plural="flinks",
name=sanitised_cr_name(service, instance),
)
def get_flink_ingress_url_root(cluster: str) -> str:
return f"http://flink.k8s.paasta-{cluster}.yelp:{FLINK_INGRESS_PORT}/"
def _dashboard_get(cr_name: str, cluster: str, path: str) -> str:
root = get_flink_ingress_url_root(cluster)
url = f"{root}{cr_name}/{path}"
response = requests.get(url, timeout=FLINK_DASHBOARD_TIMEOUT_SECONDS)
response.raise_for_status()
return response.text
def get_flink_jobmanager_overview(cr_name: str, cluster: str) -> Mapping[str, Any]:
try:
response = _dashboard_get(cr_name, cluster, "overview")
return json.loads(response)
except requests.RequestException as e:
url = e.request.url
err = e.response or str(e)
raise ValueError(f"failed HTTP request to Jobmanager dashboard {url}: {err}")
except json.JSONDecodeError as e:
raise ValueError(f"JSON decoding error from Jobmanager dashboard: {e}")
except ConnectionError as e:
raise ValueError(f"failed HTTP request to Jobmanager dashboard: {e}")
|
import os
import sys
import webbrowser
version = sys.argv[1]
curr_dir = os.path.dirname(__file__)
def copy_to_clipboard(text):
try:
import pyperclip
except ImportError:
print('pyperclip <https://pypi.org/project/pyperclip/> is missing.', file=sys.stderr)
print('copy-paste the contents of CHANGELOG.md manually', file=sys.stderr)
else:
pyperclip.copy(text)
with open(os.path.join(curr_dir, 'CHANGELOG.md')) as fin:
copy_to_clipboard(fin.read())
url = "https://github.com/RaRe-Technologies/smart_open/releases/tag/%s" % version
webbrowser.open(url)
|
import contextlib
import functools
import io
import itertools
import os.path
import re
import warnings
from enum import Enum
from typing import (
AbstractSet,
Any,
Callable,
Collection,
Container,
Dict,
Hashable,
Iterable,
Iterator,
Mapping,
MutableMapping,
MutableSet,
Optional,
Sequence,
Tuple,
TypeVar,
Union,
cast,
)
import numpy as np
import pandas as pd
K = TypeVar("K")
V = TypeVar("V")
T = TypeVar("T")
def alias_message(old_name: str, new_name: str) -> str:
return f"{old_name} has been deprecated. Use {new_name} instead."
def alias_warning(old_name: str, new_name: str, stacklevel: int = 3) -> None:
warnings.warn(
alias_message(old_name, new_name), FutureWarning, stacklevel=stacklevel
)
def alias(obj: Callable[..., T], old_name: str) -> Callable[..., T]:
assert isinstance(old_name, str)
@functools.wraps(obj)
def wrapper(*args, **kwargs):
alias_warning(old_name, obj.__name__)
return obj(*args, **kwargs)
wrapper.__doc__ = alias_message(old_name, obj.__name__)
return wrapper
def _maybe_cast_to_cftimeindex(index: pd.Index) -> pd.Index:
from ..coding.cftimeindex import CFTimeIndex
if len(index) > 0 and index.dtype == "O":
try:
return CFTimeIndex(index)
except (ImportError, TypeError):
return index
else:
return index
def maybe_cast_to_coords_dtype(label, coords_dtype):
if coords_dtype.kind == "f" and not isinstance(label, slice):
label = np.asarray(label, dtype=coords_dtype)
return label
def safe_cast_to_index(array: Any) -> pd.Index:
"""Given an array, safely cast it to a pandas.Index.
If it is already a pandas.Index, return it unchanged.
Unlike pandas.Index, if the array has dtype=object or dtype=timedelta64,
this function will not attempt to do automatic type conversion but will
always return an index with dtype=object.
"""
if isinstance(array, pd.Index):
index = array
elif hasattr(array, "to_index"):
index = array.to_index()
else:
kwargs = {}
if hasattr(array, "dtype") and array.dtype.kind == "O":
kwargs["dtype"] = object
index = pd.Index(np.asarray(array), **kwargs)
return _maybe_cast_to_cftimeindex(index)
def multiindex_from_product_levels(
levels: Sequence[pd.Index], names: Sequence[str] = None
) -> pd.MultiIndex:
"""Creating a MultiIndex from a product without refactorizing levels.
Keeping levels the same gives back the original labels when we unstack.
Parameters
----------
levels : sequence of pd.Index
Values for each MultiIndex level.
names : sequence of str, optional
Names for each level.
Returns
-------
pandas.MultiIndex
"""
if any(not isinstance(lev, pd.Index) for lev in levels):
raise TypeError("levels must be a list of pd.Index objects")
split_labels, levels = zip(*[lev.factorize() for lev in levels])
labels_mesh = np.meshgrid(*split_labels, indexing="ij")
labels = [x.ravel() for x in labels_mesh]
return pd.MultiIndex(levels, labels, sortorder=0, names=names)
def maybe_wrap_array(original, new_array):
"""Wrap a transformed array with __array_wrap__ if it can be done safely.
This lets us treat arbitrary functions that take and return ndarray objects
like ufuncs, as long as they return an array with the same shape.
"""
# in case func lost array's metadata
if isinstance(new_array, np.ndarray) and new_array.shape == original.shape:
return original.__array_wrap__(new_array)
else:
return new_array
def equivalent(first: T, second: T) -> bool:
"""Compare two objects for equivalence (identity or equality), using
array_equiv if either object is an ndarray. If both objects are lists,
equivalent is sequentially called on all the elements.
"""
# TODO: refactor to avoid circular import
from . import duck_array_ops
if isinstance(first, np.ndarray) or isinstance(second, np.ndarray):
return duck_array_ops.array_equiv(first, second)
elif isinstance(first, list) or isinstance(second, list):
return list_equiv(first, second)
else:
return (
(first is second)
or (first == second)
or (pd.isnull(first) and pd.isnull(second))
)
def list_equiv(first, second):
equiv = True
if len(first) != len(second):
return False
else:
for f, s in zip(first, second):
equiv = equiv and equivalent(f, s)
return equiv
def peek_at(iterable: Iterable[T]) -> Tuple[T, Iterator[T]]:
"""Returns the first value from iterable, as well as a new iterator with
the same content as the original iterable
"""
gen = iter(iterable)
peek = next(gen)
return peek, itertools.chain([peek], gen)
def update_safety_check(
first_dict: Mapping[K, V],
second_dict: Mapping[K, V],
compat: Callable[[V, V], bool] = equivalent,
) -> None:
"""Check the safety of updating one dictionary with another.
Raises ValueError if dictionaries have non-compatible values for any key,
where compatibility is determined by identity (they are the same item) or
the `compat` function.
Parameters
----------
first_dict, second_dict : dict-like
All items in the second dictionary are checked against for conflicts
against items in the first dictionary.
compat : function, optional
Binary operator to determine if two values are compatible. By default,
checks for equivalence.
"""
for k, v in second_dict.items():
if k in first_dict and not compat(v, first_dict[k]):
raise ValueError(
"unsafe to merge dictionaries without "
"overriding values; conflicting key %r" % k
)
def remove_incompatible_items(
first_dict: MutableMapping[K, V],
second_dict: Mapping[K, V],
compat: Callable[[V, V], bool] = equivalent,
) -> None:
"""Remove incompatible items from the first dictionary in-place.
Items are retained if their keys are found in both dictionaries and the
values are compatible.
Parameters
----------
first_dict, second_dict : dict-like
Mappings to merge.
compat : function, optional
Binary operator to determine if two values are compatible. By default,
checks for equivalence.
"""
for k in list(first_dict):
if k not in second_dict or not compat(first_dict[k], second_dict[k]):
del first_dict[k]
def is_dict_like(value: Any) -> bool:
return hasattr(value, "keys") and hasattr(value, "__getitem__")
def is_full_slice(value: Any) -> bool:
return isinstance(value, slice) and value == slice(None)
def is_list_like(value: Any) -> bool:
return isinstance(value, list) or isinstance(value, tuple)
def is_duck_array(value: Any) -> bool:
if isinstance(value, np.ndarray):
return True
return (
hasattr(value, "ndim")
and hasattr(value, "shape")
and hasattr(value, "dtype")
and hasattr(value, "__array_function__")
and hasattr(value, "__array_ufunc__")
)
def either_dict_or_kwargs(
pos_kwargs: Optional[Mapping[Hashable, T]],
kw_kwargs: Mapping[str, T],
func_name: str,
) -> Mapping[Hashable, T]:
if pos_kwargs is not None:
if not is_dict_like(pos_kwargs):
raise ValueError(
"the first argument to .%s must be a dictionary" % func_name
)
if kw_kwargs:
raise ValueError(
"cannot specify both keyword and positional "
"arguments to .%s" % func_name
)
return pos_kwargs
else:
# Need an explicit cast to appease mypy due to invariance; see
# https://github.com/python/mypy/issues/6228
return cast(Mapping[Hashable, T], kw_kwargs)
def is_scalar(value: Any, include_0d: bool = True) -> bool:
"""Whether to treat a value as a scalar.
Any non-iterable, string, or 0-D array
"""
from .variable import NON_NUMPY_SUPPORTED_ARRAY_TYPES
if include_0d:
include_0d = getattr(value, "ndim", None) == 0
return (
include_0d
or isinstance(value, (str, bytes))
or not (
isinstance(value, (Iterable,) + NON_NUMPY_SUPPORTED_ARRAY_TYPES)
or hasattr(value, "__array_function__")
)
)
def is_valid_numpy_dtype(dtype: Any) -> bool:
try:
np.dtype(dtype)
except (TypeError, ValueError):
return False
else:
return True
def to_0d_object_array(value: Any) -> np.ndarray:
"""Given a value, wrap it in a 0-D numpy.ndarray with dtype=object."""
result = np.empty((), dtype=object)
result[()] = value
return result
def to_0d_array(value: Any) -> np.ndarray:
"""Given a value, wrap it in a 0-D numpy.ndarray."""
if np.isscalar(value) or (isinstance(value, np.ndarray) and value.ndim == 0):
return np.array(value)
else:
return to_0d_object_array(value)
def dict_equiv(
first: Mapping[K, V],
second: Mapping[K, V],
compat: Callable[[V, V], bool] = equivalent,
) -> bool:
"""Test equivalence of two dict-like objects. If any of the values are
numpy arrays, compare them correctly.
Parameters
----------
first, second : dict-like
Dictionaries to compare for equality
compat : function, optional
Binary operator to determine if two values are compatible. By default,
checks for equivalence.
Returns
-------
equals : bool
True if the dictionaries are equal
"""
for k in first:
if k not in second or not compat(first[k], second[k]):
return False
for k in second:
if k not in first:
return False
return True
def compat_dict_intersection(
first_dict: Mapping[K, V],
second_dict: Mapping[K, V],
compat: Callable[[V, V], bool] = equivalent,
) -> MutableMapping[K, V]:
"""Return the intersection of two dictionaries as a new dictionary.
Items are retained if their keys are found in both dictionaries and the
values are compatible.
Parameters
----------
first_dict, second_dict : dict-like
Mappings to merge.
compat : function, optional
Binary operator to determine if two values are compatible. By default,
checks for equivalence.
Returns
-------
intersection : dict
Intersection of the contents.
"""
new_dict = dict(first_dict)
remove_incompatible_items(new_dict, second_dict, compat)
return new_dict
def compat_dict_union(
first_dict: Mapping[K, V],
second_dict: Mapping[K, V],
compat: Callable[[V, V], bool] = equivalent,
) -> MutableMapping[K, V]:
"""Return the union of two dictionaries as a new dictionary.
An exception is raised if any keys are found in both dictionaries and the
values are not compatible.
Parameters
----------
first_dict, second_dict : dict-like
Mappings to merge.
compat : function, optional
Binary operator to determine if two values are compatible. By default,
checks for equivalence.
Returns
-------
union : dict
union of the contents.
"""
new_dict = dict(first_dict)
update_safety_check(first_dict, second_dict, compat)
new_dict.update(second_dict)
return new_dict
class Frozen(Mapping[K, V]):
"""Wrapper around an object implementing the mapping interface to make it
immutable. If you really want to modify the mapping, the mutable version is
saved under the `mapping` attribute.
"""
__slots__ = ("mapping",)
def __init__(self, mapping: Mapping[K, V]):
self.mapping = mapping
def __getitem__(self, key: K) -> V:
return self.mapping[key]
def __iter__(self) -> Iterator[K]:
return iter(self.mapping)
def __len__(self) -> int:
return len(self.mapping)
def __contains__(self, key: object) -> bool:
return key in self.mapping
def __repr__(self) -> str:
return "{}({!r})".format(type(self).__name__, self.mapping)
def FrozenDict(*args, **kwargs) -> Frozen:
return Frozen(dict(*args, **kwargs))
class SortedKeysDict(MutableMapping[K, V]):
"""An wrapper for dictionary-like objects that always iterates over its
items in sorted order by key but is otherwise equivalent to the underlying
mapping.
"""
__slots__ = ("mapping",)
def __init__(self, mapping: MutableMapping[K, V] = None):
self.mapping = {} if mapping is None else mapping
def __getitem__(self, key: K) -> V:
return self.mapping[key]
def __setitem__(self, key: K, value: V) -> None:
self.mapping[key] = value
def __delitem__(self, key: K) -> None:
del self.mapping[key]
def __iter__(self) -> Iterator[K]:
# see #4571 for the reason of the type ignore
return iter(sorted(self.mapping)) # type: ignore
def __len__(self) -> int:
return len(self.mapping)
def __contains__(self, key: object) -> bool:
return key in self.mapping
def __repr__(self) -> str:
return "{}({!r})".format(type(self).__name__, self.mapping)
class OrderedSet(MutableSet[T]):
"""A simple ordered set.
The API matches the builtin set, but it preserves insertion order of elements, like
a dict. Note that, unlike in an OrderedDict, equality tests are not order-sensitive.
"""
_d: Dict[T, None]
__slots__ = ("_d",)
def __init__(self, values: AbstractSet[T] = None):
self._d = {}
if values is not None:
# Disable type checking - both mypy and PyCharm believe that
# we're altering the type of self in place (see signature of
# MutableSet.__ior__)
self |= values # type: ignore
# Required methods for MutableSet
def __contains__(self, value: object) -> bool:
return value in self._d
def __iter__(self) -> Iterator[T]:
return iter(self._d)
def __len__(self) -> int:
return len(self._d)
def add(self, value: T) -> None:
self._d[value] = None
def discard(self, value: T) -> None:
del self._d[value]
# Additional methods
def update(self, values: AbstractSet[T]) -> None:
# See comment on __init__ re. type checking
self |= values # type: ignore
def __repr__(self) -> str:
return "{}({!r})".format(type(self).__name__, list(self))
class NdimSizeLenMixin:
"""Mixin class that extends a class that defines a ``shape`` property to
one that also defines ``ndim``, ``size`` and ``__len__``.
"""
__slots__ = ()
@property
def ndim(self: Any) -> int:
return len(self.shape)
@property
def size(self: Any) -> int:
# cast to int so that shape = () gives size = 1
return int(np.prod(self.shape))
def __len__(self: Any) -> int:
try:
return self.shape[0]
except IndexError:
raise TypeError("len() of unsized object")
class NDArrayMixin(NdimSizeLenMixin):
"""Mixin class for making wrappers of N-dimensional arrays that conform to
the ndarray interface required for the data argument to Variable objects.
A subclass should set the `array` property and override one or more of
`dtype`, `shape` and `__getitem__`.
"""
__slots__ = ()
@property
def dtype(self: Any) -> np.dtype:
return self.array.dtype
@property
def shape(self: Any) -> Tuple[int]:
return self.array.shape
def __getitem__(self: Any, key):
return self.array[key]
def __repr__(self: Any) -> str:
return "{}(array={!r})".format(type(self).__name__, self.array)
class ReprObject:
"""Object that prints as the given value, for use with sentinel values."""
__slots__ = ("_value",)
def __init__(self, value: str):
self._value = value
def __repr__(self) -> str:
return self._value
def __eq__(self, other) -> bool:
if isinstance(other, ReprObject):
return self._value == other._value
return False
def __hash__(self) -> int:
return hash((type(self), self._value))
def __dask_tokenize__(self):
from dask.base import normalize_token
return normalize_token((type(self), self._value))
@contextlib.contextmanager
def close_on_error(f):
"""Context manager to ensure that a file opened by xarray is closed if an
exception is raised before the user sees the file object.
"""
try:
yield
except Exception:
f.close()
raise
def is_remote_uri(path: str) -> bool:
return bool(re.search(r"^https?\://", path))
def read_magic_number(filename_or_obj, count=8):
# check byte header to determine file type
if isinstance(filename_or_obj, bytes):
magic_number = filename_or_obj[:count]
elif isinstance(filename_or_obj, io.IOBase):
if filename_or_obj.tell() != 0:
raise ValueError(
"file-like object read/write pointer not at the start of the file, "
"please close and reopen, or use a context manager"
)
magic_number = filename_or_obj.read(count)
filename_or_obj.seek(0)
else:
raise TypeError(f"cannot read the magic number form {type(filename_or_obj)}")
return magic_number
def is_grib_path(path: str) -> bool:
_, ext = os.path.splitext(path)
return ext in [".grib", ".grb", ".grib2", ".grb2"]
def is_uniform_spaced(arr, **kwargs) -> bool:
"""Return True if values of an array are uniformly spaced and sorted.
>>> is_uniform_spaced(range(5))
True
>>> is_uniform_spaced([-4, 0, 100])
False
kwargs are additional arguments to ``np.isclose``
"""
arr = np.array(arr, dtype=float)
diffs = np.diff(arr)
return bool(np.isclose(diffs.min(), diffs.max(), **kwargs))
def hashable(v: Any) -> bool:
"""Determine whether `v` can be hashed."""
try:
hash(v)
except TypeError:
return False
return True
def not_implemented(*args, **kwargs):
return NotImplemented
def decode_numpy_dict_values(attrs: Mapping[K, V]) -> Dict[K, V]:
"""Convert attribute values from numpy objects to native Python objects,
for use in to_dict
"""
attrs = dict(attrs)
for k, v in attrs.items():
if isinstance(v, np.ndarray):
attrs[k] = v.tolist()
elif isinstance(v, np.generic):
attrs[k] = v.item()
return attrs
def ensure_us_time_resolution(val):
"""Convert val out of numpy time, for use in to_dict.
Needed because of numpy bug GH#7619"""
if np.issubdtype(val.dtype, np.datetime64):
val = val.astype("datetime64[us]")
elif np.issubdtype(val.dtype, np.timedelta64):
val = val.astype("timedelta64[us]")
return val
class HiddenKeyDict(MutableMapping[K, V]):
"""Acts like a normal dictionary, but hides certain keys."""
__slots__ = ("_data", "_hidden_keys")
# ``__init__`` method required to create instance from class.
def __init__(self, data: MutableMapping[K, V], hidden_keys: Iterable[K]):
self._data = data
self._hidden_keys = frozenset(hidden_keys)
def _raise_if_hidden(self, key: K) -> None:
if key in self._hidden_keys:
raise KeyError("Key `%r` is hidden." % key)
# The next five methods are requirements of the ABC.
def __setitem__(self, key: K, value: V) -> None:
self._raise_if_hidden(key)
self._data[key] = value
def __getitem__(self, key: K) -> V:
self._raise_if_hidden(key)
return self._data[key]
def __delitem__(self, key: K) -> None:
self._raise_if_hidden(key)
del self._data[key]
def __iter__(self) -> Iterator[K]:
for k in self._data:
if k not in self._hidden_keys:
yield k
def __len__(self) -> int:
num_hidden = len(self._hidden_keys & self._data.keys())
return len(self._data) - num_hidden
def infix_dims(dims_supplied: Collection, dims_all: Collection) -> Iterator:
"""
Resolves a supplied list containing an ellispsis representing other items, to
a generator with the 'realized' list of all items
"""
if ... in dims_supplied:
if len(set(dims_all)) != len(dims_all):
raise ValueError("Cannot use ellipsis with repeated dims")
if len([d for d in dims_supplied if d == ...]) > 1:
raise ValueError("More than one ellipsis supplied")
other_dims = [d for d in dims_all if d not in dims_supplied]
for d in dims_supplied:
if d == ...:
yield from other_dims
else:
yield d
else:
if set(dims_supplied) ^ set(dims_all):
raise ValueError(
f"{dims_supplied} must be a permuted list of {dims_all}, unless `...` is included"
)
yield from dims_supplied
def get_temp_dimname(dims: Container[Hashable], new_dim: Hashable) -> Hashable:
"""Get an new dimension name based on new_dim, that is not used in dims.
If the same name exists, we add an underscore(s) in the head.
Example1:
dims: ['a', 'b', 'c']
new_dim: ['_rolling']
-> ['_rolling']
Example2:
dims: ['a', 'b', 'c', '_rolling']
new_dim: ['_rolling']
-> ['__rolling']
"""
while new_dim in dims:
new_dim = "_" + str(new_dim)
return new_dim
def drop_dims_from_indexers(
indexers: Mapping[Hashable, Any],
dims: Union[list, Mapping[Hashable, int]],
missing_dims: str,
) -> Mapping[Hashable, Any]:
"""Depending on the setting of missing_dims, drop any dimensions from indexers that
are not present in dims.
Parameters
----------
indexers : dict
dims : sequence
missing_dims : {"raise", "warn", "ignore"}
"""
if missing_dims == "raise":
invalid = indexers.keys() - set(dims)
if invalid:
raise ValueError(
f"dimensions {invalid} do not exist. Expected one or more of {dims}"
)
return indexers
elif missing_dims == "warn":
# don't modify input
indexers = dict(indexers)
invalid = indexers.keys() - set(dims)
if invalid:
warnings.warn(
f"dimensions {invalid} do not exist. Expected one or more of {dims}"
)
for key in invalid:
indexers.pop(key)
return indexers
elif missing_dims == "ignore":
return {key: val for key, val in indexers.items() if key in dims}
else:
raise ValueError(
f"Unrecognised option {missing_dims} for missing_dims argument"
)
class UncachedAccessor:
"""Acts like a property, but on both classes and class instances
This class is necessary because some tools (e.g. pydoc and sphinx)
inspect classes for which property returns itself and not the
accessor.
"""
def __init__(self, accessor):
self._accessor = accessor
def __get__(self, obj, cls):
if obj is None:
return self._accessor
return self._accessor(obj)
# Singleton type, as per https://github.com/python/typing/pull/240
class Default(Enum):
token = 0
_default = Default.token
|
from . import core as html5
@html5.tag
class Label(html5.Label):
_parserTagName = "ignite-label"
def __init__(self, *args, **kwargs):
super(Label, self).__init__(style="label ignt-label", *args, **kwargs)
@html5.tag
class Input(html5.Input):
_parserTagName = "ignite-input"
def __init__(self, *args, **kwargs):
super(Input, self).__init__(style="input ignt-input", *args, **kwargs)
@html5.tag
class Switch(html5.Div):
_parserTagName = "ignite-switch"
def __init__(self, *args, **kwargs):
super(Switch, self).__init__(style="switch ignt-switch", *args, **kwargs)
self.input = html5.Input(style="switch-input")
self.appendChild(self.input)
self.input["type"] = "checkbox"
switchLabel = html5.Label(forElem=self.input)
switchLabel.addClass("switch-label")
self.appendChild(switchLabel)
def _setChecked(self, value):
self.input["checked"] = bool(value)
def _getChecked(self):
return self.input["checked"]
@html5.tag
class Check(html5.Input):
_parserTagName = "ignite-check"
def __init__(self, *args, **kwargs):
super(Check, self).__init__(style="check ignt-check", *args, **kwargs)
checkInput = html5.Input()
checkInput.addClass("check-input")
checkInput["type"] = "checkbox"
self.appendChild(checkInput)
checkLabel = html5.Label(forElem=checkInput)
checkLabel.addClass("check-label")
self.appendChild(checkLabel)
@html5.tag
class Radio(html5.Div):
_parserTagName = "ignite-radio"
def __init__(self, *args, **kwargs):
super(Radio, self).__init__(style="radio ignt-radio", *args, **kwargs)
radioInput = html5.Input()
radioInput.addClass("radio-input")
radioInput["type"] = "radio"
self.appendChild(radioInput)
radioLabel = html5.Label(forElem=radioInput)
radioLabel.addClass("radio-label")
self.appendChild(radioLabel)
@html5.tag
class Select(html5.Select):
_parserTagName = "ignite-select"
def __init__(self, *args, **kwargs):
super(Select, self).__init__(style="select ignt-select", *args, **kwargs)
defaultOpt = html5.Option()
defaultOpt["selected"] = True
defaultOpt["disabled"] = True
defaultOpt.element.innerHTML = ""
self.appendChild(defaultOpt)
@html5.tag
class Textarea(html5.Textarea):
_parserTagName = "ignite-textarea"
def __init__(self, *args, **kwargs):
super(Textarea, self).__init__(style="textarea ignt-textarea", *args, **kwargs)
@html5.tag
class Progress(html5.Progress):
_parserTagName = "ignite-progress"
def __init__(self, *args, **kwargs):
super(Progress, self).__init__(style="progress ignt-progress", *args, **kwargs)
@html5.tag
class Item(html5.Div):
_parserTagName = "ignite-item"
def __init__(self, title=None, descr=None, className=None, *args, **kwargs):
super(Item, self).__init__(style="item ignt-item", *args, **kwargs)
if className:
self.addClass(className)
self.fromHTML("""
<div class="item-image ignt-item-image" [name]="itemImage">
</div>
<div class="item-content ignt-item-content" [name]="itemContent">
<div class="item-headline ignt-item-headline" [name]="itemHeadline">
</div>
</div>
""")
if title:
self.itemHeadline.appendChild(html5.TextNode(title))
if descr:
self.itemSubline = html5.Div()
self.addClass("item-subline ignt-item-subline")
self.itemSubline.appendChild(html5.TextNode(descr))
self.appendChild(self.itemSubline)
@html5.tag
class Table(html5.Table):
_parserTagName = "ignite-table"
def __init__(self, *args, **kwargs):
super(Table, self).__init__(*args, **kwargs)
self.head.addClass("ignt-table-head")
self.body.addClass("ignt-table-body")
def prepareRow(self, row):
assert row >= 0, "Cannot create rows with negative index"
for child in self.body._children:
row -= child["rowspan"]
if row < 0:
return
while row >= 0:
tableRow = html5.Tr()
tableRow.addClass("ignt-table-body-row")
self.body.appendChild(tableRow)
row -= 1
def prepareCol(self, row, col):
assert col >= 0, "Cannot create cols with negative index"
self.prepareRow(row)
for rowChild in self.body._children:
row -= rowChild["rowspan"]
if row < 0:
for colChild in rowChild._children:
col -= colChild["colspan"]
if col < 0:
return
while col >= 0:
tableCell = html5.Td()
tableCell.addClass("ignt-table-body-cell")
rowChild.appendChild(tableCell)
col -= 1
return
def fastGrid( self, rows, cols, createHidden=False ):
colsstr = "".join(['<td class="ignt-table-body-cell"></td>' for i in range(0, cols)])
tblstr = '<tbody [name]="body" class="ignt-table-body" >'
for r in range(0, rows):
tblstr += '<tr class="ignt-table-body-row %s">%s</tr>' %("is-hidden" if createHidden else "",colsstr)
tblstr +="</tbody>"
self.fromHTML(tblstr)
|
import pytest
import sh
from molecule import config
from molecule.lint import yamllint
@pytest.fixture
def _patched_get_files(mocker):
m = mocker.patch('molecule.lint.yamllint.Yamllint._get_files')
m.return_value = [
'foo.yml',
'bar.yaml',
]
return m
@pytest.fixture
def _lint_section_data():
return {
'lint': {
'name': 'yamllint',
'options': {
'foo': 'bar',
},
'env': {
'FOO': 'bar',
}
}
}
# NOTE(retr0h): The use of the `patched_config_validate` fixture, disables
# config.Config._validate from executing. Thus preventing odd side-effects
# throughout patched.assert_called unit tests.
@pytest.fixture
def _instance(_lint_section_data, patched_config_validate, config_instance):
return yamllint.Yamllint(config_instance)
def test_config_private_member(_instance):
assert isinstance(_instance._config, config.Config)
def test_files_private_member(_patched_get_files, _instance):
x = [
'foo.yml',
'bar.yaml',
]
assert x == _instance._files
def test_default_options_property(_instance):
x = {
's': True,
}
assert x == _instance.default_options
def test_default_env_property(_instance):
assert 'MOLECULE_FILE' in _instance.default_env
assert 'MOLECULE_INVENTORY_FILE' in _instance.default_env
assert 'MOLECULE_SCENARIO_DIRECTORY' in _instance.default_env
assert 'MOLECULE_INSTANCE_CONFIG' in _instance.default_env
def test_name_property(_instance):
assert 'yamllint' == _instance.name
def test_enabled_property(_instance):
assert _instance.enabled
@pytest.mark.parametrize(
'config_instance', ['_lint_section_data'], indirect=True)
def test_options_property(_instance):
x = {
's': True,
'foo': 'bar',
}
assert x == _instance.options
@pytest.mark.parametrize(
'config_instance', ['_lint_section_data'], indirect=True)
def test_options_property_handles_cli_args(_instance):
_instance._config.args = {'debug': True}
x = {
's': True,
'foo': 'bar',
}
# Does nothing. The `yamllint` command does not support
# a `debug` flag.
assert x == _instance.options
@pytest.mark.parametrize(
'config_instance', ['_lint_section_data'], indirect=True)
def test_bake(_patched_get_files, _instance):
_instance.bake()
x = [
str(sh.Command('yamllint')),
'-s',
'--foo=bar',
'foo.yml',
'bar.yaml',
]
result = str(_instance._yamllint_command).split()
assert sorted(x) == sorted(result)
def test_execute(_patched_get_files, patched_logger_info,
patched_logger_success, patched_run_command, _instance):
_instance._yamllint_command = 'patched-yamllint-command'
_instance.execute()
patched_run_command.assert_called_once_with(
'patched-yamllint-command', debug=False)
msg = 'Executing Yamllint on files found in {}/...'.format(
_instance._config.project_directory)
patched_logger_info.assert_called_once_with(msg)
msg = 'Lint completed successfully.'
patched_logger_success.assert_called_once_with(msg)
def test_execute_does_not_execute(_patched_get_files, patched_logger_warn,
patched_logger_success, patched_run_command,
_instance):
_instance._config.config['lint']['enabled'] = False
_instance.execute()
assert not patched_run_command.called
msg = 'Skipping, lint is disabled.'
patched_logger_warn.assert_called_once_with(msg)
@pytest.mark.parametrize(
'config_instance', ['_lint_section_data'], indirect=True)
def test_execute_bakes(_patched_get_files, patched_run_command, _instance):
_instance.execute()
assert _instance._yamllint_command is not None
x = [
str(sh.Command('yamllint')),
'-s',
'--foo=bar',
'foo.yml',
'bar.yaml',
]
result = str(patched_run_command.mock_calls[0][1][0]).split()
assert sorted(x) == sorted(result)
def test_executes_catches_and_exits_return_code(patched_run_command,
_instance):
patched_run_command.side_effect = sh.ErrorReturnCode_1(
sh.yamllint, b'', b'')
with pytest.raises(SystemExit) as e:
_instance.execute()
assert 1 == e.value.code
|
import logging
import numpy as np
from scattertext import ScatterChart, TermCategoryFrequencies, ParsedCorpus, CorpusDF
from scattertext.Scalers import percentile_alphabetical
from scattertext.Corpus import Corpus
from scattertext.DocsAndLabelsFromCorpus import DocsAndLabelsFromCorpus, DocsAndLabelsFromCorpusSample
class ScatterChartExplorer(ScatterChart):
def __init__(self,
corpus,
verbose=False,
**kwargs):
'''See ScatterChart. This lets you click on terms to see what contexts they tend to appear in.
Running the `to_dict` function outputs
'''
#if not (isinstance(corpus, (Corpus, ParsedCorpus, CorpusDF, TermCategoryFrequencies))
# or (issubclass(type(corpus), (Corpus, ParsedCorpus, CorpusDF, TermCategoryFrequencies)))):
# raise AssertionError(corpus, 'of type', type(corpus),
# 'must be a subclass of Corpus or TermCategoryFrequencies.')
ScatterChart.__init__(self, corpus, verbose, **kwargs)
self._term_metadata = None
def to_dict(self,
category,
category_name=None,
not_category_name=None,
scores=None,
metadata=None,
max_docs_per_category=None,
transform=percentile_alphabetical,
alternative_text_field=None,
title_case_names=False,
not_categories=None,
neutral_categories=None,
extra_categories=None,
neutral_category_name=None,
extra_category_name=None,
background_scorer=None,
include_term_category_counts=False,
**kwargs):
'''
Parameters
----------
category : str
Category to annotate. Exact value of category.
category_name : str, optional
Name of category which will appear on web site. Default None is same as category.
not_category_name : str, optional
Name of ~category which will appear on web site. Default None is same as "not " + category.
scores : np.array, optional
Scores to use for coloring. Defaults to None, or RankDifference scores
metadata, None or array-like.
List of metadata for each document. Defaults to a list of blank strings.
max_docs_per_category, None or int, optional
Maximum number of documents to store per category. Defaults to 4.
transform : function, optional
Function for ranking terms. Defaults to scattertext.Scalers.percentile_lexicographic.
alternative_text_field : str or None, optional
Field in from dataframe used to make corpus to display in place of parsed text. Only
can be used if corpus is a ParsedCorpus instance.
title_case_names : bool, default False
Should the program title-case the category and not-category names?
not_categories : list, optional
List of categories to use as "not category". Defaults to all others.
neutral_categories : list, optional
List of categories to use as neutral. Defaults [].
extra_categories : list, optional
List of categories to use as extra. Defaults [].
neutral_category_name : str
"Neutral" by default. Only active if show_neutral is True. Name of the neutra l
column.
extra_category_name : str
"Extra" by default. Only active if show_neutral and show_extra are true. Name of the
extra column.
background_scorer : CharacteristicScorer, optional
Used for bg scores
include_term_category_counts : bool, default False
Includes term-category counts in keyed off 'term-category-count'. If use_non_text_features,
use metadata counts instead.
Returns
-------
dictionary {info: {'category_name': full category name, ...},
docs: {'texts': [doc1text, ...],
'labels': [1, 0, ...],
'meta': ['<b>blah</b>', '<b>blah</b>']},
// if include_term_category_counts
termCounts: [term num -> [total occurrences, total documents, variance], ... for the number of categories]
data: {term:term,
x:frequency [0-1],
y:frequency [0-1],
s: score,
bg: background score,
as: association score,
cat25k: freq per 25k in category,
cat: count in category,
ncat: count in non-category,
catdocs: [docnum, ...],
ncatdocs: [docnum, ...]
ncat25k: freq per 25k in non-category}
etc: term specific dictionary (if inject_term_metadata is called and contains terms)}
'''
if kwargs is not {} and self.verbose:
logging.info("Excessive arguments passed to ScatterChartExplorer.to_dict: " + str(kwargs))
json_data = ScatterChart.to_dict(self,
category,
category_name=category_name,
not_category_name=not_category_name,
scores=scores,
transform=transform,
title_case_names=title_case_names,
not_categories=not_categories,
neutral_categories=neutral_categories,
extra_categories=extra_categories,
background_scorer=background_scorer)
docs_getter = self._make_docs_getter(max_docs_per_category, alternative_text_field)
if neutral_category_name is None:
neutral_category_name = 'Neutral'
if extra_category_name is None:
extra_category_name = 'Extra'
json_data['docs'] = self._get_docs_structure(docs_getter, metadata)
json_data['info']['neutral_category_name'] = neutral_category_name
json_data['info']['extra_category_name'] = extra_category_name
if include_term_category_counts:
terms = np.array([term_struct['term'] for term_struct in json_data['data']])
json_data['termCounts'] = self._get_term_doc_counts(terms)
return json_data
def _get_term_doc_counts(self, terms):
term_counts = []
if self.scatterchartdata.use_non_text_features:
term_doc_counts = self.term_doc_matrix.get_metadata_doc_count_df('').loc[terms]
term_doc_freq = self.term_doc_matrix.get_metadata_freq_df('').loc[terms]
else:
term_doc_counts = self.term_doc_matrix.get_term_doc_count_df('').loc[terms]
term_doc_freq = self.term_doc_matrix.get_term_freq_df('').loc[terms]
# this can possibly be vectorized
for category_i, category in enumerate(term_doc_freq.columns):
category_counts = {}
for term_i, val in enumerate(term_doc_freq[category].values):
if val > 0:
category_counts[term_i] = [val, term_doc_counts.iloc[term_i, category_i]]
term_counts.append(category_counts)
return term_counts
def _make_docs_getter(self, max_docs_per_category, alternative_text_field):
if max_docs_per_category is None:
docs_getter = DocsAndLabelsFromCorpus(self.term_doc_matrix,
alternative_text_field=alternative_text_field)
else:
docs_getter = DocsAndLabelsFromCorpusSample(self.term_doc_matrix,
max_docs_per_category,
alternative_text_field=alternative_text_field)
if self.scatterchartdata.use_non_text_features:
docs_getter = docs_getter.use_non_text_features()
return docs_getter
def _get_docs_structure(self, docs_getter, metadata):
if metadata is not None:
return docs_getter.get_labels_and_texts_and_meta(np.array(metadata))
else:
return docs_getter.get_labels_and_texts()
def _add_term_freq_to_json_df(self, json_df, term_freq_df, category):
ScatterChart._add_term_freq_to_json_df(self, json_df, term_freq_df, category)
json_df['cat'] = term_freq_df[category + ' freq'].astype(np.int)
json_df['ncat'] = term_freq_df['not cat freq'].astype(np.int)
if self._term_metadata is not None:
json_df['etc'] = term_freq_df['term'].apply(lambda term: self._term_metadata.get(term, {}))
def inject_term_metadata(self, metadata):
'''
:param metadata: dict, maps terms to a dictionary which will be added to term's json structure
:return: ScatterChartExplorer
'''
self._term_metadata = metadata
return self
def inject_term_metadata_df(self, metadata_df):
'''
:param metadata_df: pd.DataFrame, indexed on terms with columns as structure
:return: ScatterChartExplorer
'''
term_metadata_dict = metadata_df.T.to_dict()
return self.inject_term_metadata(term_metadata_dict)
|
import os
from stash.tests.stashtest import StashTestCase
class ExpanderTests(StashTestCase):
def setUp(self):
StashTestCase.setUp(self)
self.expand = self.stash.runtime.expander.expand
def _get_pipe_sequence(self, line):
expanded = self.expand(line)
next(expanded)
return next(expanded)
def test_envars(self):
pipe_sequence = self._get_pipe_sequence(r'ls $SELFUPDATE_TARGET')
assert pipe_sequence.lst[0].args[0] == 'master'
def test_tilda(self):
pipe_sequence = self._get_pipe_sequence(r'ls ~/')
assert pipe_sequence.lst[0].args[0] == os.path.expanduser('~/')
def test_wildcards(self):
pipe_sequence = self._get_pipe_sequence(r'ls *')
assert 'README.md' in pipe_sequence.lst[0].args
assert 'CHANGES.md' in pipe_sequence.lst[0].args
pipe_sequence = self._get_pipe_sequence(r'ls README.?d')
assert pipe_sequence.lst[0].args[0] == 'README.md'
pipe_sequence = self._get_pipe_sequence(r'ls *stash*')
# assert 'stash.py' in pipe_sequence.lst[0].args
assert 'getstash.py' in pipe_sequence.lst[0].args
assert 'launch_stash.py' in pipe_sequence.lst[0].args
pipe_sequence = self._get_pipe_sequence(r'ls stash*')
assert 'getstash.py' not in pipe_sequence.lst[0].args
def test_escapes(self):
pipe_sequence = self._get_pipe_sequence(r'ls \n')
assert pipe_sequence.lst[0].args[0] == '\n'
pipe_sequence = self._get_pipe_sequence(r'ls \033[32m')
assert pipe_sequence.lst[0].args[0] == '\033[32m'
pipe_sequence = self._get_pipe_sequence(r'ls \x1b[32m')
assert pipe_sequence.lst[0].args[0] == '\x1b[32m'
def test_double_quotes(self):
pipe_sequence = self._get_pipe_sequence(r'ls "$SELFUPDATE_TARGET"')
assert pipe_sequence.lst[0].args[0] == 'master'
pipe_sequence = self._get_pipe_sequence(r'ls "~/"')
assert pipe_sequence.lst[0].args[0] == '~/'
pipe_sequence = self._get_pipe_sequence(r'ls "*"')
assert pipe_sequence.lst[0].args[0] == '*'
pipe_sequence = self._get_pipe_sequence(r'ls "\033[32m"')
assert pipe_sequence.lst[0].args[0] == '\033[32m'
def test_single_quotes(self):
pipe_sequence = self._get_pipe_sequence(r"ls '$SELFUPDATE_TARGET'")
assert pipe_sequence.lst[0].args[0] == '$SELFUPDATE_TARGET'
pipe_sequence = self._get_pipe_sequence(r'ls "~/"')
assert pipe_sequence.lst[0].args[0] == '~/'
pipe_sequence = self._get_pipe_sequence(r"ls '*'")
assert pipe_sequence.lst[0].args[0] == '*'
pipe_sequence = self._get_pipe_sequence(r"ls '\033[32m'")
assert pipe_sequence.lst[0].args[0] == '\\033[32m'
def test_redirect_file(self):
pipe_sequence = self._get_pipe_sequence(r'ls >> somefile')
cmd = pipe_sequence.lst[0]
assert cmd.cmd_word == 'ls'
assert cmd.args == []
assert cmd.io_redirect.operator == '>>'
assert cmd.io_redirect.filename == 'somefile'
def test_redirect_file_descriptor(self):
pipe_sequence = self._get_pipe_sequence(r'ls -1 > &3')
cmd = pipe_sequence.lst[0]
assert cmd.cmd_word == 'ls'
assert cmd.args[0] == '-1' and len(cmd.args) == 1
assert cmd.io_redirect.operator == '>'
assert cmd.io_redirect.filename == '&3'
|
import json
import logging
import voluptuous as vol
from homeassistant.components import mqtt
from homeassistant.components.mqtt import (
MqttAttributes,
MqttAvailability,
MqttDiscoveryUpdate,
MqttEntityDeviceInfo,
subscription,
)
from homeassistant.components.vacuum import (
SUPPORT_BATTERY,
SUPPORT_CLEAN_SPOT,
SUPPORT_FAN_SPEED,
SUPPORT_LOCATE,
SUPPORT_PAUSE,
SUPPORT_RETURN_HOME,
SUPPORT_SEND_COMMAND,
SUPPORT_STATUS,
SUPPORT_STOP,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
VacuumEntity,
)
from homeassistant.const import (
ATTR_SUPPORTED_FEATURES,
CONF_DEVICE,
CONF_NAME,
CONF_UNIQUE_ID,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.icon import icon_for_battery_level
from ..debug_info import log_messages
from .schema import MQTT_VACUUM_SCHEMA, services_to_strings, strings_to_services
_LOGGER = logging.getLogger(__name__)
SERVICE_TO_STRING = {
SUPPORT_TURN_ON: "turn_on",
SUPPORT_TURN_OFF: "turn_off",
SUPPORT_PAUSE: "pause",
SUPPORT_STOP: "stop",
SUPPORT_RETURN_HOME: "return_home",
SUPPORT_FAN_SPEED: "fan_speed",
SUPPORT_BATTERY: "battery",
SUPPORT_STATUS: "status",
SUPPORT_SEND_COMMAND: "send_command",
SUPPORT_LOCATE: "locate",
SUPPORT_CLEAN_SPOT: "clean_spot",
}
STRING_TO_SERVICE = {v: k for k, v in SERVICE_TO_STRING.items()}
DEFAULT_SERVICES = (
SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_STOP
| SUPPORT_RETURN_HOME
| SUPPORT_STATUS
| SUPPORT_BATTERY
| SUPPORT_CLEAN_SPOT
)
ALL_SERVICES = (
DEFAULT_SERVICES
| SUPPORT_PAUSE
| SUPPORT_LOCATE
| SUPPORT_FAN_SPEED
| SUPPORT_SEND_COMMAND
)
CONF_SUPPORTED_FEATURES = ATTR_SUPPORTED_FEATURES
CONF_BATTERY_LEVEL_TEMPLATE = "battery_level_template"
CONF_BATTERY_LEVEL_TOPIC = "battery_level_topic"
CONF_CHARGING_TEMPLATE = "charging_template"
CONF_CHARGING_TOPIC = "charging_topic"
CONF_CLEANING_TEMPLATE = "cleaning_template"
CONF_CLEANING_TOPIC = "cleaning_topic"
CONF_DOCKED_TEMPLATE = "docked_template"
CONF_DOCKED_TOPIC = "docked_topic"
CONF_ERROR_TEMPLATE = "error_template"
CONF_ERROR_TOPIC = "error_topic"
CONF_FAN_SPEED_LIST = "fan_speed_list"
CONF_FAN_SPEED_TEMPLATE = "fan_speed_template"
CONF_FAN_SPEED_TOPIC = "fan_speed_topic"
CONF_PAYLOAD_CLEAN_SPOT = "payload_clean_spot"
CONF_PAYLOAD_LOCATE = "payload_locate"
CONF_PAYLOAD_RETURN_TO_BASE = "payload_return_to_base"
CONF_PAYLOAD_START_PAUSE = "payload_start_pause"
CONF_PAYLOAD_STOP = "payload_stop"
CONF_PAYLOAD_TURN_OFF = "payload_turn_off"
CONF_PAYLOAD_TURN_ON = "payload_turn_on"
CONF_SEND_COMMAND_TOPIC = "send_command_topic"
CONF_SET_FAN_SPEED_TOPIC = "set_fan_speed_topic"
DEFAULT_NAME = "MQTT Vacuum"
DEFAULT_PAYLOAD_CLEAN_SPOT = "clean_spot"
DEFAULT_PAYLOAD_LOCATE = "locate"
DEFAULT_PAYLOAD_RETURN_TO_BASE = "return_to_base"
DEFAULT_PAYLOAD_START_PAUSE = "start_pause"
DEFAULT_PAYLOAD_STOP = "stop"
DEFAULT_PAYLOAD_TURN_OFF = "turn_off"
DEFAULT_PAYLOAD_TURN_ON = "turn_on"
DEFAULT_RETAIN = False
DEFAULT_SERVICE_STRINGS = services_to_strings(DEFAULT_SERVICES, SERVICE_TO_STRING)
PLATFORM_SCHEMA_LEGACY = (
mqtt.MQTT_BASE_PLATFORM_SCHEMA.extend(
{
vol.Inclusive(CONF_BATTERY_LEVEL_TEMPLATE, "battery"): cv.template,
vol.Inclusive(
CONF_BATTERY_LEVEL_TOPIC, "battery"
): mqtt.valid_publish_topic,
vol.Inclusive(CONF_CHARGING_TEMPLATE, "charging"): cv.template,
vol.Inclusive(CONF_CHARGING_TOPIC, "charging"): mqtt.valid_publish_topic,
vol.Inclusive(CONF_CLEANING_TEMPLATE, "cleaning"): cv.template,
vol.Inclusive(CONF_CLEANING_TOPIC, "cleaning"): mqtt.valid_publish_topic,
vol.Optional(CONF_DEVICE): mqtt.MQTT_ENTITY_DEVICE_INFO_SCHEMA,
vol.Inclusive(CONF_DOCKED_TEMPLATE, "docked"): cv.template,
vol.Inclusive(CONF_DOCKED_TOPIC, "docked"): mqtt.valid_publish_topic,
vol.Inclusive(CONF_ERROR_TEMPLATE, "error"): cv.template,
vol.Inclusive(CONF_ERROR_TOPIC, "error"): mqtt.valid_publish_topic,
vol.Optional(CONF_FAN_SPEED_LIST, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Inclusive(CONF_FAN_SPEED_TEMPLATE, "fan_speed"): cv.template,
vol.Inclusive(CONF_FAN_SPEED_TOPIC, "fan_speed"): mqtt.valid_publish_topic,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(
CONF_PAYLOAD_CLEAN_SPOT, default=DEFAULT_PAYLOAD_CLEAN_SPOT
): cv.string,
vol.Optional(
CONF_PAYLOAD_LOCATE, default=DEFAULT_PAYLOAD_LOCATE
): cv.string,
vol.Optional(
CONF_PAYLOAD_RETURN_TO_BASE, default=DEFAULT_PAYLOAD_RETURN_TO_BASE
): cv.string,
vol.Optional(
CONF_PAYLOAD_START_PAUSE, default=DEFAULT_PAYLOAD_START_PAUSE
): cv.string,
vol.Optional(CONF_PAYLOAD_STOP, default=DEFAULT_PAYLOAD_STOP): cv.string,
vol.Optional(
CONF_PAYLOAD_TURN_OFF, default=DEFAULT_PAYLOAD_TURN_OFF
): cv.string,
vol.Optional(
CONF_PAYLOAD_TURN_ON, default=DEFAULT_PAYLOAD_TURN_ON
): cv.string,
vol.Optional(CONF_SEND_COMMAND_TOPIC): mqtt.valid_publish_topic,
vol.Optional(CONF_SET_FAN_SPEED_TOPIC): mqtt.valid_publish_topic,
vol.Optional(
CONF_SUPPORTED_FEATURES, default=DEFAULT_SERVICE_STRINGS
): vol.All(cv.ensure_list, [vol.In(STRING_TO_SERVICE.keys())]),
vol.Optional(CONF_UNIQUE_ID): cv.string,
vol.Optional(mqtt.CONF_COMMAND_TOPIC): mqtt.valid_publish_topic,
vol.Optional(mqtt.CONF_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
}
)
.extend(mqtt.MQTT_AVAILABILITY_SCHEMA.schema)
.extend(mqtt.MQTT_JSON_ATTRS_SCHEMA.schema)
.extend(MQTT_VACUUM_SCHEMA.schema)
)
async def async_setup_entity_legacy(
config, async_add_entities, config_entry, discovery_data
):
"""Set up a MQTT Vacuum Legacy."""
async_add_entities([MqttVacuum(config, config_entry, discovery_data)])
class MqttVacuum(
MqttAttributes,
MqttAvailability,
MqttDiscoveryUpdate,
MqttEntityDeviceInfo,
VacuumEntity,
):
"""Representation of a MQTT-controlled legacy vacuum."""
def __init__(self, config, config_entry, discovery_info):
"""Initialize the vacuum."""
self._cleaning = False
self._charging = False
self._docked = False
self._error = None
self._status = "Unknown"
self._battery_level = 0
self._fan_speed = "unknown"
self._fan_speed_list = []
self._sub_state = None
self._unique_id = config.get(CONF_UNIQUE_ID)
# Load config
self._setup_from_config(config)
device_config = config.get(CONF_DEVICE)
MqttAttributes.__init__(self, config)
MqttAvailability.__init__(self, config)
MqttDiscoveryUpdate.__init__(self, discovery_info, self.discovery_update)
MqttEntityDeviceInfo.__init__(self, device_config, config_entry)
def _setup_from_config(self, config):
self._name = config[CONF_NAME]
supported_feature_strings = config[CONF_SUPPORTED_FEATURES]
self._supported_features = strings_to_services(
supported_feature_strings, STRING_TO_SERVICE
)
self._fan_speed_list = config[CONF_FAN_SPEED_LIST]
self._qos = config[mqtt.CONF_QOS]
self._retain = config[mqtt.CONF_RETAIN]
self._command_topic = config.get(mqtt.CONF_COMMAND_TOPIC)
self._set_fan_speed_topic = config.get(CONF_SET_FAN_SPEED_TOPIC)
self._send_command_topic = config.get(CONF_SEND_COMMAND_TOPIC)
self._payloads = {
key: config.get(key)
for key in (
CONF_PAYLOAD_TURN_ON,
CONF_PAYLOAD_TURN_OFF,
CONF_PAYLOAD_RETURN_TO_BASE,
CONF_PAYLOAD_STOP,
CONF_PAYLOAD_CLEAN_SPOT,
CONF_PAYLOAD_LOCATE,
CONF_PAYLOAD_START_PAUSE,
)
}
self._state_topics = {
key: config.get(key)
for key in (
CONF_BATTERY_LEVEL_TOPIC,
CONF_CHARGING_TOPIC,
CONF_CLEANING_TOPIC,
CONF_DOCKED_TOPIC,
CONF_ERROR_TOPIC,
CONF_FAN_SPEED_TOPIC,
)
}
self._templates = {
key: config.get(key)
for key in (
CONF_BATTERY_LEVEL_TEMPLATE,
CONF_CHARGING_TEMPLATE,
CONF_CLEANING_TEMPLATE,
CONF_DOCKED_TEMPLATE,
CONF_ERROR_TEMPLATE,
CONF_FAN_SPEED_TEMPLATE,
)
}
async def discovery_update(self, discovery_payload):
"""Handle updated discovery message."""
config = PLATFORM_SCHEMA_LEGACY(discovery_payload)
self._setup_from_config(config)
await self.attributes_discovery_update(config)
await self.availability_discovery_update(config)
await self.device_info_discovery_update(config)
await self._subscribe_topics()
self.async_write_ha_state()
async def async_added_to_hass(self):
"""Subscribe MQTT events."""
await super().async_added_to_hass()
await self._subscribe_topics()
async def async_will_remove_from_hass(self):
"""Unsubscribe when removed."""
self._sub_state = await subscription.async_unsubscribe_topics(
self.hass, self._sub_state
)
await MqttAttributes.async_will_remove_from_hass(self)
await MqttAvailability.async_will_remove_from_hass(self)
await MqttDiscoveryUpdate.async_will_remove_from_hass(self)
async def _subscribe_topics(self):
"""(Re)Subscribe to topics."""
for tpl in self._templates.values():
if tpl is not None:
tpl.hass = self.hass
@callback
@log_messages(self.hass, self.entity_id)
def message_received(msg):
"""Handle new MQTT message."""
if (
msg.topic == self._state_topics[CONF_BATTERY_LEVEL_TOPIC]
and self._templates[CONF_BATTERY_LEVEL_TEMPLATE]
):
battery_level = self._templates[
CONF_BATTERY_LEVEL_TEMPLATE
].async_render_with_possible_json_value(msg.payload, error_value=None)
if battery_level:
self._battery_level = int(battery_level)
if (
msg.topic == self._state_topics[CONF_CHARGING_TOPIC]
and self._templates[CONF_CHARGING_TEMPLATE]
):
charging = self._templates[
CONF_CHARGING_TEMPLATE
].async_render_with_possible_json_value(msg.payload, error_value=None)
if charging:
self._charging = cv.boolean(charging)
if (
msg.topic == self._state_topics[CONF_CLEANING_TOPIC]
and self._templates[CONF_CLEANING_TEMPLATE]
):
cleaning = self._templates[
CONF_CLEANING_TEMPLATE
].async_render_with_possible_json_value(msg.payload, error_value=None)
if cleaning:
self._cleaning = cv.boolean(cleaning)
if (
msg.topic == self._state_topics[CONF_DOCKED_TOPIC]
and self._templates[CONF_DOCKED_TEMPLATE]
):
docked = self._templates[
CONF_DOCKED_TEMPLATE
].async_render_with_possible_json_value(msg.payload, error_value=None)
if docked:
self._docked = cv.boolean(docked)
if (
msg.topic == self._state_topics[CONF_ERROR_TOPIC]
and self._templates[CONF_ERROR_TEMPLATE]
):
error = self._templates[
CONF_ERROR_TEMPLATE
].async_render_with_possible_json_value(msg.payload, error_value=None)
if error is not None:
self._error = cv.string(error)
if self._docked:
if self._charging:
self._status = "Docked & Charging"
else:
self._status = "Docked"
elif self._cleaning:
self._status = "Cleaning"
elif self._error:
self._status = f"Error: {self._error}"
else:
self._status = "Stopped"
if (
msg.topic == self._state_topics[CONF_FAN_SPEED_TOPIC]
and self._templates[CONF_FAN_SPEED_TEMPLATE]
):
fan_speed = self._templates[
CONF_FAN_SPEED_TEMPLATE
].async_render_with_possible_json_value(msg.payload, error_value=None)
if fan_speed:
self._fan_speed = fan_speed
self.async_write_ha_state()
topics_list = {topic for topic in self._state_topics.values() if topic}
self._sub_state = await subscription.async_subscribe_topics(
self.hass,
self._sub_state,
{
f"topic{i}": {
"topic": topic,
"msg_callback": message_received,
"qos": self._qos,
}
for i, topic in enumerate(topics_list)
},
)
@property
def name(self):
"""Return the name of the vacuum."""
return self._name
@property
def should_poll(self):
"""No polling needed for an MQTT vacuum."""
return False
@property
def is_on(self):
"""Return true if vacuum is on."""
return self._cleaning
@property
def unique_id(self):
"""Return a unique ID."""
return self._unique_id
@property
def status(self):
"""Return a status string for the vacuum."""
return self._status
@property
def fan_speed(self):
"""Return the status of the vacuum."""
return self._fan_speed
@property
def fan_speed_list(self):
"""Return the status of the vacuum."""
return self._fan_speed_list
@property
def battery_level(self):
"""Return the status of the vacuum."""
return max(0, min(100, self._battery_level))
@property
def battery_icon(self):
"""Return the battery icon for the vacuum cleaner.
No need to check SUPPORT_BATTERY, this won't be called if battery_level is None.
"""
return icon_for_battery_level(
battery_level=self.battery_level, charging=self._charging
)
@property
def supported_features(self):
"""Flag supported features."""
return self._supported_features
async def async_turn_on(self, **kwargs):
"""Turn the vacuum on."""
if self.supported_features & SUPPORT_TURN_ON == 0:
return
mqtt.async_publish(
self.hass,
self._command_topic,
self._payloads[CONF_PAYLOAD_TURN_ON],
self._qos,
self._retain,
)
self._status = "Cleaning"
self.async_write_ha_state()
async def async_turn_off(self, **kwargs):
"""Turn the vacuum off."""
if self.supported_features & SUPPORT_TURN_OFF == 0:
return None
mqtt.async_publish(
self.hass,
self._command_topic,
self._payloads[CONF_PAYLOAD_TURN_OFF],
self._qos,
self._retain,
)
self._status = "Turning Off"
self.async_write_ha_state()
async def async_stop(self, **kwargs):
"""Stop the vacuum."""
if self.supported_features & SUPPORT_STOP == 0:
return None
mqtt.async_publish(
self.hass,
self._command_topic,
self._payloads[CONF_PAYLOAD_STOP],
self._qos,
self._retain,
)
self._status = "Stopping the current task"
self.async_write_ha_state()
async def async_clean_spot(self, **kwargs):
"""Perform a spot clean-up."""
if self.supported_features & SUPPORT_CLEAN_SPOT == 0:
return None
mqtt.async_publish(
self.hass,
self._command_topic,
self._payloads[CONF_PAYLOAD_CLEAN_SPOT],
self._qos,
self._retain,
)
self._status = "Cleaning spot"
self.async_write_ha_state()
async def async_locate(self, **kwargs):
"""Locate the vacuum (usually by playing a song)."""
if self.supported_features & SUPPORT_LOCATE == 0:
return None
mqtt.async_publish(
self.hass,
self._command_topic,
self._payloads[CONF_PAYLOAD_LOCATE],
self._qos,
self._retain,
)
self._status = "Hi, I'm over here!"
self.async_write_ha_state()
async def async_start_pause(self, **kwargs):
"""Start, pause or resume the cleaning task."""
if self.supported_features & SUPPORT_PAUSE == 0:
return None
mqtt.async_publish(
self.hass,
self._command_topic,
self._payloads[CONF_PAYLOAD_START_PAUSE],
self._qos,
self._retain,
)
self._status = "Pausing/Resuming cleaning..."
self.async_write_ha_state()
async def async_return_to_base(self, **kwargs):
"""Tell the vacuum to return to its dock."""
if self.supported_features & SUPPORT_RETURN_HOME == 0:
return None
mqtt.async_publish(
self.hass,
self._command_topic,
self._payloads[CONF_PAYLOAD_RETURN_TO_BASE],
self._qos,
self._retain,
)
self._status = "Returning home..."
self.async_write_ha_state()
async def async_set_fan_speed(self, fan_speed, **kwargs):
"""Set fan speed."""
if (
self.supported_features & SUPPORT_FAN_SPEED == 0
) or fan_speed not in self._fan_speed_list:
return None
mqtt.async_publish(
self.hass, self._set_fan_speed_topic, fan_speed, self._qos, self._retain
)
self._status = f"Setting fan to {fan_speed}..."
self.async_write_ha_state()
async def async_send_command(self, command, params=None, **kwargs):
"""Send a command to a vacuum cleaner."""
if self.supported_features & SUPPORT_SEND_COMMAND == 0:
return
if params:
message = {"command": command}
message.update(params)
message = json.dumps(message)
else:
message = command
mqtt.async_publish(
self.hass, self._send_command_topic, message, self._qos, self._retain
)
self._status = f"Sending command {message}..."
self.async_write_ha_state()
|
import datetime
from homeassistant.components import gdacs
from homeassistant.components.gdacs import DEFAULT_SCAN_INTERVAL, DOMAIN, FEED
from homeassistant.components.gdacs.geo_location import (
ATTR_ALERT_LEVEL,
ATTR_COUNTRY,
ATTR_DESCRIPTION,
ATTR_DURATION_IN_WEEK,
ATTR_EVENT_TYPE,
ATTR_EXTERNAL_ID,
ATTR_FROM_DATE,
ATTR_POPULATION,
ATTR_SEVERITY,
ATTR_TO_DATE,
ATTR_VULNERABILITY,
)
from homeassistant.components.geo_location import ATTR_SOURCE
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_FRIENDLY_NAME,
ATTR_ICON,
ATTR_LATITUDE,
ATTR_LONGITUDE,
ATTR_UNIT_OF_MEASUREMENT,
CONF_RADIUS,
EVENT_HOMEASSISTANT_START,
LENGTH_KILOMETERS,
)
from homeassistant.helpers.entity_registry import async_get_registry
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from homeassistant.util.unit_system import IMPERIAL_SYSTEM
from tests.async_mock import patch
from tests.common import async_fire_time_changed
from tests.components.gdacs import _generate_mock_feed_entry
CONFIG = {gdacs.DOMAIN: {CONF_RADIUS: 200}}
async def test_setup(hass, legacy_patchable_time):
"""Test the general setup of the integration."""
# Set up some mock feed entries for this test.
mock_entry_1 = _generate_mock_feed_entry(
"1234",
"Description 1",
15.5,
(38.0, -3.0),
event_name="Name 1",
event_type_short="DR",
event_type="Drought",
alert_level="Alert Level 1",
country="Country 1",
attribution="Attribution 1",
from_date=datetime.datetime(2020, 1, 10, 8, 0, tzinfo=datetime.timezone.utc),
to_date=datetime.datetime(2020, 1, 20, 8, 0, tzinfo=datetime.timezone.utc),
duration_in_week=1,
population="Population 1",
severity="Severity 1",
vulnerability="Vulnerability 1",
)
mock_entry_2 = _generate_mock_feed_entry(
"2345",
"Description 2",
20.5,
(38.1, -3.1),
event_name="Name 2",
event_type_short="TC",
event_type="Tropical Cyclone",
)
mock_entry_3 = _generate_mock_feed_entry(
"3456",
"Description 3",
25.5,
(38.2, -3.2),
event_name="Name 3",
event_type_short="TC",
event_type="Tropical Cyclone",
country="Country 2",
)
mock_entry_4 = _generate_mock_feed_entry(
"4567", "Description 4", 12.5, (38.3, -3.3)
)
# Patching 'utcnow' to gain more control over the timed update.
utcnow = dt_util.utcnow()
with patch("homeassistant.util.dt.utcnow", return_value=utcnow), patch(
"aio_georss_client.feed.GeoRssFeed.update"
) as mock_feed_update:
mock_feed_update.return_value = "OK", [mock_entry_1, mock_entry_2, mock_entry_3]
assert await async_setup_component(hass, gdacs.DOMAIN, CONFIG)
await hass.async_block_till_done()
# Artificially trigger update and collect events.
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
all_states = hass.states.async_all()
# 3 geolocation and 1 sensor entities
assert len(all_states) == 4
entity_registry = await async_get_registry(hass)
assert len(entity_registry.entities) == 4
state = hass.states.get("geo_location.drought_name_1")
assert state is not None
assert state.name == "Drought: Name 1"
assert state.attributes == {
ATTR_EXTERNAL_ID: "1234",
ATTR_LATITUDE: 38.0,
ATTR_LONGITUDE: -3.0,
ATTR_FRIENDLY_NAME: "Drought: Name 1",
ATTR_DESCRIPTION: "Description 1",
ATTR_COUNTRY: "Country 1",
ATTR_ATTRIBUTION: "Attribution 1",
ATTR_FROM_DATE: datetime.datetime(
2020, 1, 10, 8, 0, tzinfo=datetime.timezone.utc
),
ATTR_TO_DATE: datetime.datetime(
2020, 1, 20, 8, 0, tzinfo=datetime.timezone.utc
),
ATTR_DURATION_IN_WEEK: 1,
ATTR_ALERT_LEVEL: "Alert Level 1",
ATTR_POPULATION: "Population 1",
ATTR_EVENT_TYPE: "Drought",
ATTR_SEVERITY: "Severity 1",
ATTR_VULNERABILITY: "Vulnerability 1",
ATTR_UNIT_OF_MEASUREMENT: LENGTH_KILOMETERS,
ATTR_SOURCE: "gdacs",
ATTR_ICON: "mdi:water-off",
}
assert float(state.state) == 15.5
state = hass.states.get("geo_location.tropical_cyclone_name_2")
assert state is not None
assert state.name == "Tropical Cyclone: Name 2"
assert state.attributes == {
ATTR_EXTERNAL_ID: "2345",
ATTR_LATITUDE: 38.1,
ATTR_LONGITUDE: -3.1,
ATTR_FRIENDLY_NAME: "Tropical Cyclone: Name 2",
ATTR_DESCRIPTION: "Description 2",
ATTR_EVENT_TYPE: "Tropical Cyclone",
ATTR_UNIT_OF_MEASUREMENT: LENGTH_KILOMETERS,
ATTR_SOURCE: "gdacs",
ATTR_ICON: "mdi:weather-hurricane",
}
assert float(state.state) == 20.5
state = hass.states.get("geo_location.tropical_cyclone_name_3")
assert state is not None
assert state.name == "Tropical Cyclone: Name 3"
assert state.attributes == {
ATTR_EXTERNAL_ID: "3456",
ATTR_LATITUDE: 38.2,
ATTR_LONGITUDE: -3.2,
ATTR_FRIENDLY_NAME: "Tropical Cyclone: Name 3",
ATTR_DESCRIPTION: "Description 3",
ATTR_EVENT_TYPE: "Tropical Cyclone",
ATTR_COUNTRY: "Country 2",
ATTR_UNIT_OF_MEASUREMENT: LENGTH_KILOMETERS,
ATTR_SOURCE: "gdacs",
ATTR_ICON: "mdi:weather-hurricane",
}
assert float(state.state) == 25.5
# Simulate an update - two existing, one new entry, one outdated entry
mock_feed_update.return_value = "OK", [mock_entry_1, mock_entry_4, mock_entry_3]
async_fire_time_changed(hass, utcnow + DEFAULT_SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 4
# Simulate an update - empty data, but successful update,
# so no changes to entities.
mock_feed_update.return_value = "OK_NO_DATA", None
async_fire_time_changed(hass, utcnow + 2 * DEFAULT_SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 4
# Simulate an update - empty data, removes all entities
mock_feed_update.return_value = "ERROR", None
async_fire_time_changed(hass, utcnow + 3 * DEFAULT_SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 1
assert len(entity_registry.entities) == 1
async def test_setup_imperial(hass, legacy_patchable_time):
"""Test the setup of the integration using imperial unit system."""
hass.config.units = IMPERIAL_SYSTEM
# Set up some mock feed entries for this test.
mock_entry_1 = _generate_mock_feed_entry(
"1234",
"Description 1",
15.5,
(38.0, -3.0),
event_name="Name 1",
event_type_short="DR",
event_type="Drought",
)
# Patching 'utcnow' to gain more control over the timed update.
utcnow = dt_util.utcnow()
with patch("homeassistant.util.dt.utcnow", return_value=utcnow), patch(
"aio_georss_client.feed.GeoRssFeed.update"
) as mock_feed_update, patch(
"aio_georss_client.feed.GeoRssFeed.last_timestamp", create=True
):
mock_feed_update.return_value = "OK", [mock_entry_1]
assert await async_setup_component(hass, gdacs.DOMAIN, CONFIG)
await hass.async_block_till_done()
# Artificially trigger update and collect events.
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 2
# Test conversion of 200 miles to kilometers.
feeds = hass.data[DOMAIN][FEED]
assert feeds is not None
assert len(feeds) == 1
manager = list(feeds.values())[0]
# Ensure that the filter value in km is correctly set.
assert manager._feed_manager._feed._filter_radius == 321.8688
state = hass.states.get("geo_location.drought_name_1")
assert state is not None
assert state.name == "Drought: Name 1"
assert state.attributes == {
ATTR_EXTERNAL_ID: "1234",
ATTR_LATITUDE: 38.0,
ATTR_LONGITUDE: -3.0,
ATTR_FRIENDLY_NAME: "Drought: Name 1",
ATTR_DESCRIPTION: "Description 1",
ATTR_EVENT_TYPE: "Drought",
ATTR_UNIT_OF_MEASUREMENT: "mi",
ATTR_SOURCE: "gdacs",
ATTR_ICON: "mdi:water-off",
}
# 15.5km (as defined in mock entry) has been converted to 9.6mi.
assert float(state.state) == 9.6
|
import re
import argparse
import functools
from typing import Any, Callable, Dict, Optional
import attr
from PyQt5.QtCore import QUrl, pyqtSlot, qVersion
from PyQt5.QtGui import QFont
import qutebrowser
from qutebrowser.config import config
from qutebrowser.utils import usertypes, urlmatch, qtutils, utils
from qutebrowser.misc import objects, debugcachestats
UNSET = object()
@attr.s
class UserAgent:
"""A parsed user agent."""
os_info: str = attr.ib()
webkit_version: str = attr.ib()
upstream_browser_key: str = attr.ib()
upstream_browser_version: str = attr.ib()
qt_key: str = attr.ib()
@classmethod
def parse(cls, ua: str) -> 'UserAgent':
"""Parse a user agent string into its components."""
comment_matches = re.finditer(r'\(([^)]*)\)', ua)
os_info = list(comment_matches)[0].group(1)
version_matches = re.finditer(r'(\S+)/(\S+)', ua)
versions = {}
for match in version_matches:
versions[match.group(1)] = match.group(2)
webkit_version = versions['AppleWebKit']
if 'Chrome' in versions:
upstream_browser_key = 'Chrome'
qt_key = 'QtWebEngine'
elif 'Version' in versions:
upstream_browser_key = 'Version'
qt_key = 'Qt'
else:
raise ValueError("Invalid upstream browser key: {}".format(ua))
upstream_browser_version = versions[upstream_browser_key]
return cls(os_info=os_info,
webkit_version=webkit_version,
upstream_browser_key=upstream_browser_key,
upstream_browser_version=upstream_browser_version,
qt_key=qt_key)
class AttributeInfo:
"""Info about a settings attribute."""
def __init__(self, *attributes: Any, converter: Callable = None) -> None:
self.attributes = attributes
if converter is None:
self.converter = lambda val: val
else:
self.converter = converter
class AbstractSettings:
"""Abstract base class for settings set via QWeb(Engine)Settings."""
_ATTRIBUTES: Dict[str, AttributeInfo] = {}
_FONT_SIZES: Dict[str, Any] = {}
_FONT_FAMILIES: Dict[str, Any] = {}
_FONT_TO_QFONT: Dict[Any, QFont.StyleHint] = {}
def __init__(self, settings: Any) -> None:
self._settings = settings
def _assert_not_unset(self, value: Any) -> None:
assert value is not usertypes.UNSET
def set_attribute(self, name: str, value: Any) -> None:
"""Set the given QWebSettings/QWebEngineSettings attribute.
If the value is usertypes.UNSET, the value is reset instead.
"""
info = self._ATTRIBUTES[name]
for attribute in info.attributes:
if value is usertypes.UNSET:
self._settings.resetAttribute(attribute)
else:
self._settings.setAttribute(attribute, info.converter(value))
def test_attribute(self, name: str) -> bool:
"""Get the value for the given attribute.
If the setting resolves to a list of attributes, only the first
attribute is tested.
"""
info = self._ATTRIBUTES[name]
return self._settings.testAttribute(info.attributes[0])
def set_font_size(self, name: str, value: int) -> None:
"""Set the given QWebSettings/QWebEngineSettings font size."""
self._assert_not_unset(value)
family = self._FONT_SIZES[name]
self._settings.setFontSize(family, value)
def set_font_family(self, name: str, value: Optional[str]) -> None:
"""Set the given QWebSettings/QWebEngineSettings font family.
With None (the default), QFont is used to get the default font for the
family.
"""
self._assert_not_unset(value)
family = self._FONT_FAMILIES[name]
if value is None:
font = QFont()
font.setStyleHint(self._FONT_TO_QFONT[family])
value = font.defaultFamily()
self._settings.setFontFamily(family, value)
def set_default_text_encoding(self, encoding: str) -> None:
"""Set the default text encoding to use."""
self._assert_not_unset(encoding)
self._settings.setDefaultTextEncoding(encoding)
def _update_setting(self, setting: str, value: Any) -> bool:
"""Update the given setting/value.
Unknown settings are ignored.
Return:
True if there was a change, False otherwise.
"""
if setting in self._ATTRIBUTES:
self.set_attribute(setting, value)
elif setting in self._FONT_SIZES:
self.set_font_size(setting, value)
elif setting in self._FONT_FAMILIES:
self.set_font_family(setting, value)
elif setting == 'content.default_encoding':
self.set_default_text_encoding(value)
return False
def update_setting(self, setting: str) -> None:
"""Update the given setting."""
value = config.instance.get(setting)
self._update_setting(setting, value)
def update_for_url(self, url: QUrl) -> None:
"""Update settings customized for the given tab."""
qtutils.ensure_valid(url)
for values in config.instance:
if not values.opt.supports_pattern:
continue
value = values.get_for_url(url, fallback=False)
self._update_setting(values.opt.name, value)
def init_settings(self) -> None:
"""Set all supported settings correctly."""
for setting in (list(self._ATTRIBUTES) + list(self._FONT_SIZES) +
list(self._FONT_FAMILIES)):
self.update_setting(setting)
@debugcachestats.register(name='user agent cache')
@functools.lru_cache()
def _format_user_agent(template: str, backend: usertypes.Backend) -> str:
if backend == usertypes.Backend.QtWebEngine:
from qutebrowser.browser.webengine import webenginesettings
parsed = webenginesettings.parsed_user_agent
else:
from qutebrowser.browser.webkit import webkitsettings
parsed = webkitsettings.parsed_user_agent
assert parsed is not None
return template.format(
os_info=parsed.os_info,
webkit_version=parsed.webkit_version,
qt_key=parsed.qt_key,
qt_version=qVersion(),
upstream_browser_key=parsed.upstream_browser_key,
upstream_browser_version=parsed.upstream_browser_version,
qutebrowser_version=qutebrowser.__version__,
)
def user_agent(url: QUrl = None) -> str:
"""Get the user agent for the given URL, or the global one if URL is None.
Note that the given URL should always be valid.
"""
template = config.instance.get('content.headers.user_agent', url=url)
return _format_user_agent(template=template, backend=objects.backend)
def init(args: argparse.Namespace) -> None:
"""Initialize all QWeb(Engine)Settings."""
if objects.backend == usertypes.Backend.QtWebEngine:
from qutebrowser.browser.webengine import webenginesettings
webenginesettings.init()
elif objects.backend == usertypes.Backend.QtWebKit:
from qutebrowser.browser.webkit import webkitsettings
webkitsettings.init()
else:
raise utils.Unreachable(objects.backend)
# Make sure special URLs always get JS support
for pattern in ['chrome://*/*', 'qute://*/*']:
config.instance.set_obj('content.javascript.enabled', True,
pattern=urlmatch.UrlPattern(pattern),
hide_userconfig=True)
def clear_private_data() -> None:
"""Clear cookies, cache and related data for private browsing sessions."""
if objects.backend == usertypes.Backend.QtWebEngine:
from qutebrowser.browser.webengine import webenginesettings
webenginesettings.init_private_profile()
elif objects.backend == usertypes.Backend.QtWebKit:
from qutebrowser.browser.webkit import cookies
assert cookies.ram_cookie_jar is not None
cookies.ram_cookie_jar.setAllCookies([])
else:
raise utils.Unreachable(objects.backend)
@pyqtSlot()
def shutdown() -> None:
"""Shut down QWeb(Engine)Settings."""
if objects.backend == usertypes.Backend.QtWebEngine:
from qutebrowser.browser.webengine import webenginesettings
webenginesettings.shutdown()
elif objects.backend == usertypes.Backend.QtWebKit:
from qutebrowser.browser.webkit import webkitsettings
webkitsettings.shutdown()
else:
raise utils.Unreachable(objects.backend)
|
from weblate.machinery.base import MachineTranslation
class GlosbeTranslation(MachineTranslation):
"""Glosbe machine translation support."""
name = "Glosbe"
max_score = 90
do_cleanup = False
def map_code_code(self, code):
"""Convert language to service specific code."""
return code.replace("_", "-").split("-")[0].lower()
def is_supported(self, source, language):
"""Any language is supported."""
return True
def download_translations(
self,
source,
language,
text: str,
unit,
user,
search: bool,
threshold: int = 75,
):
"""Download list of possible translations from a service."""
params = {"from": source, "dest": language, "format": "json", "phrase": text}
response = self.request(
"get", "https://glosbe.com/gapi/translate", params=params
)
payload = response.json()
if "tuc" not in payload:
return
for match in payload["tuc"]:
if "phrase" not in match or match["phrase"] is None:
continue
yield {
"text": match["phrase"]["text"],
"quality": self.max_score,
"service": self.name,
"source": text,
}
|
from __future__ import print_function
import sys
import time
import re
import unicodedata
from diamond.metric import Metric
import diamond.convertor
class NetAppCollector(diamond.collector.Collector):
# This is the list of metrics to collect.
# This is a dict of lists with tuples, which is parsed as such:
# The dict name is the object name in the NetApp API.
# For each object we have a list of metrics to retrieve.
# Each tuple is built like this;
# ("metric name in netapp api", "output name of metric", multiplier)
# The purpose of the output name is to enable replacement of reported
# metric names, since some the names in the API can be confusing.
# The purpose of the multiplier is to scale all metrics to a common
# scale, which is latencies in milliseconds, and data in bytes per sec.
# This is needed since the API will return a mixture of percentages,
# nanoseconds, milliseconds, bytes and kilobytes.
METRICS = {
'aggregate': [
("user_reads", "user_read_iops", 1),
("user_writes", "user_write_iops", 1)
],
'disk': [
("disk_busy", "disk_busy_pct", 100),
("base_for_disk_busy", "base_for_disk_busy", 1),
("user_read_blocks", "user_read_blocks_per_sec", 1),
("user_write_blocks", "user_write_blocks_per_sec", 1),
("user_read_latency", "user_read_latency", 0.001),
("user_write_latency", "user_write_latency", 0.001)
],
'ifnet': [
("send_data", "tx_bytes_per_sec", 1),
("recv_data", "rx_bytes_per_sec", 1)
],
'lun': [
("total_ops", "total_iops", 1),
("read_ops", "read_iops", 1),
("write_ops", "write_iops", 1),
("avg_latency", "avg_latency", 1)
],
'processor': [
("processor_busy", "processor_busy_pct", 100),
("processor_elapsed_time", "processor_elapsed_time", 1)
],
'system': [
("nfs_ops", "nfs_iops", 1),
("cifs_ops", "cifs_iops", 1),
("http_ops", "http_iops", 1),
("fcp_ops", "fcp_iops", 1),
("http_ops", "http_iops", 1),
("iscsi_ops", "iscsi_iops", 1),
("read_ops", "read_iops", 1),
("write_ops", "write_iops", 1),
("total_ops", "total_iops", 1),
("cpu_elapsed_time", "cpu_elapsed_time", 1),
("total_processor_busy", "total_processor_busy_pct", 100),
("avg_processor_busy", "avg_processor_busy_pct", 100),
("net_data_recv", "total_rx_bytes_per_sec", 1000),
("net_data_sent", "total_tx_bytes_per_sec", 1000),
("disk_data_read", "total_read_bytes_per_sec", 1000),
("disk_data_written", "total_write_bytes_per_sec", 1000),
("sys_read_latency", "sys_read_latency", 1),
("sys_write_latency", "sys_write_latency", 1),
("sys_avg_latency", "sys_avg_latency", 1)
],
'vfiler': [
("vfiler_cpu_busy", "cpu_busy_pct", 100),
("vfiler_cpu_busy_base", "cpu_busy_base", 1),
("vfiler_net_data_recv", "rx_bytes_per_sec", 1000),
("vfiler_net_data_sent", "tx_bytes_per_sec", 1000),
("vfiler_read_ops", "read_iops", 1),
("vfiler_write_ops", "write_iops", 1),
("vfiler_read_bytes", "read_bytes_per_sec", 1000),
("vfiler_write_bytes", "write_bytes_per_sec", 1000),
],
'volume': [
("total_ops", "total_iops", 1),
("avg_latency", "avg_latency", 0.001),
("read_ops", "read_iops", 1),
("write_ops", "write_iops", 1),
("read_latency", "read_latency", 0.001),
("write_latency", "write_latency", 0.001),
("read_data", "read_bytes_per_sec", 1),
("write_data", "write_bytes_per_sec", 1),
("cifs_read_data", "cifs_read_bytes_per_sec", 1),
("cifs_write_data", "cifs_write_bytes_per_sec", 1),
("cifs_read_latency", "cifs_read_latency", 0.001),
("cifs_write_latency", "cifs_write_latency", 0.001),
("cifs_read_ops", "cifs_read_iops", 1),
("cifs_write_ops", "cifs_write_iops", 1),
("fcp_read_data", "fcp_read_bytes_per_sec", 1),
("fcp_write_data", "fcp_write_bytes_per_sec", 1),
("fcp_read_latency", "fcp_read_latency", 0.001),
("fcp_write_latency", "fcp_write_latency", 0.001),
("fcp_read_ops", "fcp_read_iops", 1),
("fcp_write_ops", "fcp_write_iops", 1),
("iscsi_read_data", "iscsi_read_bytes_per_sec", 1),
("iscsi_write_data", "iscsi_write_bytes_per_sec", 1),
("iscsi_read_latency", "iscsi_read_latency", 0.001),
("iscsi_write_latency", "iscsi_write_latency", 0.001),
("iscsi_read_ops", "iscsi_read_iops", 1),
("iscsi_write_ops", "iscsi_write_iops", 1),
("nfs_read_data", "nfs_read_bytes_per_sec", 1),
("nfs_write_data", "nfs_write_bytes_per_sec", 1),
("nfs_read_latency", "nfs_read_latency", 0.001),
("nfs_write_latency", "nfs_write_latency", 0.001),
("nfs_read_ops", "nfs_read_iops", 1),
("nfs_write_ops", "nfs_write_iops", 1)
],
}
# For some metrics we need to divide one value from the API with another.
# This is a key-value list of the connected values.
DIVIDERS = {
"avg_latency": "total_ops",
"read_latency": "read_ops",
"write_latency": "write_ops",
"sys_avg_latency": "total_ops",
"sys_read_latency": "read_ops",
"sys_write_latency": "write_ops",
"cifs_read_latency": "cifs_read_ops",
"cifs_write_latency": "cifs_write_ops",
"fcp_read_latency": "fcp_read_ops",
"fcp_write_latency": "fcp_write_ops",
"iscsi_read_latency": "iscsi_read_ops",
"iscsi_write_latency": "iscsi_write_ops",
"nfs_read_latency": "nfs_read_ops",
"nfs_write_latency": "nfs_write_ops",
"user_read_latency": "user_read_blocks",
"user_write_latency": "user_write_blocks",
"total_processor_busy": "cpu_elapsed_time",
"avg_processor_busy": "cpu_elapsed_time",
"processor_busy": "processor_elapsed_time",
"disk_busy": "base_for_disk_busy",
"vfiler_cpu_busy": "vfiler_cpu_busy_base",
}
# Some metrics are collected simply to calculate other metrics.
# These should not be reported.
DROPMETRICS = [
"cpu_elapsed_time",
"processor_elapsed_time",
"base_for_disk_busy",
"vfiler_cpu_busy_base",
]
# Since we might have large collections collected often,
# we need a pretty good time_delta.
# We'll use a dict for this, keeping time_delta for each object.
LastCollectTime = {}
def get_default_config_help(self):
config_help = super(NetAppCollector, self).get_default_config_help()
return config_help
def get_default_config(self):
default_config = super(NetAppCollector, self).get_default_config()
default_config['path_prefix'] = "netapp"
default_config['netappsdkpath'] = "/opt/netapp/lib/python/NetApp"
return default_config
def _replace_and_publish(self, path, prettyname, value, device):
"""
Inputs a complete path for a metric and a value.
Replace the metric name and publish.
"""
if value is None:
return
newpath = path
# Change metric name before publish if needed.
newpath = ".".join([".".join(path.split(".")[:-1]), prettyname])
metric = Metric(newpath, value, precision=4, host=device)
self.publish_metric(metric)
def _gen_delta_depend(self, path, derivative, multiplier, prettyname,
device):
"""
For some metrics we need to divide the delta for one metric
with the delta of another.
Publishes a metric if the convertion goes well.
"""
primary_delta = derivative[path]
shortpath = ".".join(path.split(".")[:-1])
basename = path.split(".")[-1]
secondary_delta = None
if basename in self.DIVIDERS.keys():
mateKey = ".".join([shortpath, self.DIVIDERS[basename]])
else:
return
if mateKey in derivative.keys():
secondary_delta = derivative[mateKey]
else:
return
# If we find a corresponding secondary_delta, publish a metric
if primary_delta > 0 and secondary_delta > 0:
value = (float(primary_delta) / secondary_delta) * multiplier
self._replace_and_publish(path, prettyname, value, device)
def _gen_delta_per_sec(self, path, value_delta, time_delta, multiplier,
prettyname, device):
"""
Calulates the difference between to point, and scales is to per second.
"""
if time_delta < 0:
return
value = (value_delta / time_delta) * multiplier
# Only publish if there is any data.
# This helps keep unused metrics out of Graphite
if value > 0.0:
self._replace_and_publish(path, prettyname, value, device)
def collect(self, device, ip, user, password):
"""
This function collects the metrics for one filer.
"""
sys.path.append(self.config['netappsdkpath'])
try:
import NaServer
except ImportError:
self.log.error("Unable to load NetApp SDK from %s" % (
self.config['netappsdkpath']))
return
# Set up the parameters
server = NaServer.NaServer(ip, 1, 3)
server.set_transport_type('HTTPS')
server.set_style('LOGIN')
server.set_admin_user(user, password)
# We're only able to query a single object at a time,
# so we'll loop over the objects.
for na_object in self.METRICS.keys():
# For easy reference later, generate a new dict for this object
LOCALMETRICS = {}
for metric in self.METRICS[na_object]:
metricname, prettyname, multiplier = metric
LOCALMETRICS[metricname] = {}
LOCALMETRICS[metricname]["prettyname"] = prettyname
LOCALMETRICS[metricname]["multiplier"] = multiplier
# Keep track of how long has passed since we checked last
CollectTime = time.time()
time_delta = None
if na_object in self.LastCollectTime.keys():
time_delta = CollectTime - self.LastCollectTime[na_object]
self.LastCollectTime[na_object] = CollectTime
self.log.debug("Collecting metric of object %s" % na_object)
query = NaServer.NaElement("perf-object-get-instances-iter-start")
query.child_add_string("objectname", na_object)
counters = NaServer.NaElement("counters")
for metric in LOCALMETRICS.keys():
counters.child_add_string("counter", metric)
query.child_add(counters)
res = server.invoke_elem(query)
if(res.results_status() == "failed"):
self.log.error("Connection to filer %s failed; %s" % (
device, res.results_reason()))
return
iter_tag = res.child_get_string("tag")
num_records = 1
max_records = 100
# For some metrics there are dependencies between metrics for
# a single object, so we'll need to collect all, so we can do
# calculations later.
raw = {}
while(num_records != 0):
query = NaServer.NaElement(
"perf-object-get-instances-iter-next")
query.child_add_string("tag", iter_tag)
query.child_add_string("maximum", max_records)
res = server.invoke_elem(query)
if(res.results_status() == "failed"):
print("Connection to filer %s failed; %s" % (
device, res.results_reason()))
return
num_records = res.child_get_int("records")
if(num_records > 0):
instances_list = res.child_get("instances")
instances = instances_list.children_get()
for instance in instances:
raw_name = unicodedata.normalize(
'NFKD',
instance.child_get_string("name")).encode(
'ascii', 'ignore')
# Shorten the name for disks as they are very long and
# padded with zeroes, eg:
# 5000C500:3A236B0B:00000000:00000000:00000000:...
if na_object is "disk":
non_zero_blocks = [
block for block in raw_name.split(":")
if block != "00000000"
]
raw_name = "".join(non_zero_blocks)
instance_name = re.sub(r'\W', '_', raw_name)
counters_list = instance.child_get("counters")
counters = counters_list.children_get()
for counter in counters:
metricname = unicodedata.normalize(
'NFKD',
counter.child_get_string("name")).encode(
'ascii', 'ignore')
metricvalue = counter.child_get_string("value")
# We'll need a long complete pathname to not
# confuse self.derivative
pathname = ".".join([self.config["path_prefix"],
device, na_object,
instance_name, metricname])
raw[pathname] = int(metricvalue)
# Do the math
self.log.debug("Processing %i metrics for object %s" % (len(raw),
na_object))
# Since the derivative function both returns the derivative
# and saves a new point, we'll need to store all derivatives
# for local reference.
derivative = {}
for key in raw.keys():
derivative[key] = self.derivative(key, raw[key])
for key in raw.keys():
metricname = key.split(".")[-1]
prettyname = LOCALMETRICS[metricname]["prettyname"]
multiplier = LOCALMETRICS[metricname]["multiplier"]
if metricname in self.DROPMETRICS:
continue
elif metricname in self.DIVIDERS.keys():
self._gen_delta_depend(key, derivative, multiplier,
prettyname, device)
else:
self._gen_delta_per_sec(key, derivative[key], time_delta,
multiplier, prettyname, device)
|
import os
from typing import Dict, List, Optional
def get_env_list(name: str, default: Optional[List[str]] = None) -> List[str]:
"""Helper to get list from environment."""
if name not in os.environ:
return default or []
return os.environ[name].split(",")
def get_env_map(name: str, default: Optional[Dict[str, str]] = None) -> Dict[str, str]:
"""Helper to get mapping from environment.
parses 'full_name:name,email:mail' into {'email': 'mail', 'full_name': 'name'}
"""
if os.environ.get(name):
return dict(e.split(":") for e in os.environ[name].split(","))
return default or {}
def get_env_int(name: str, default: int = 0) -> int:
"""Helper to get integer value from environment."""
if name not in os.environ:
return default
return int(os.environ[name])
def get_env_bool(name: str, default: bool = False) -> bool:
"""Helper to get boolean value from environment."""
if name not in os.environ:
return default
true_values = {"true", "yes", "1"}
return os.environ[name].lower() in true_values
def modify_env_list(current: List[str], name: str) -> List[str]:
"""Helper to modify list (for example checks)."""
for item in reversed(get_env_list(f"WEBLATE_ADD_{name}")):
current.insert(0, item)
for item in get_env_list(f"WEBLATE_REMOVE_{name}"):
current.remove(item)
return current
|
import signal
import contextlib
import time
import pytest
from qutebrowser.api import cmdutils
from qutebrowser.utils import utils
from qutebrowser.components import misccommands
@contextlib.contextmanager
def _trapped_segv(handler):
"""Temporarily install given signal handler for SIGSEGV."""
old_handler = signal.signal(signal.SIGSEGV, handler)
yield
if old_handler is not None:
signal.signal(signal.SIGSEGV, old_handler)
def test_debug_crash_exception():
"""Verify that debug_crash crashes as intended."""
with pytest.raises(Exception, match="Forced crash"):
misccommands.debug_crash(typ='exception')
@pytest.mark.skipif(utils.is_windows,
reason="current CPython/win can't recover from SIGSEGV")
def test_debug_crash_segfault():
"""Verify that debug_crash crashes as intended."""
caught = False
def _handler(num, frame):
"""Temporary handler for segfault."""
nonlocal caught
caught = num == signal.SIGSEGV
with _trapped_segv(_handler):
# since we handle the segfault, execution will continue and run into
# the "Segfault failed (wat.)" Exception
with pytest.raises(Exception, match="Segfault failed"):
misccommands.debug_crash(typ='segfault')
time.sleep(0.001)
assert caught
def test_debug_trace(mocker):
"""Check if hunter.trace is properly called."""
# but only if hunter is available
pytest.importorskip('hunter')
hunter_mock = mocker.patch.object(misccommands, 'hunter')
misccommands.debug_trace(1)
hunter_mock.trace.assert_called_with(1)
def test_debug_trace_exception(mocker):
"""Check that exceptions thrown by hunter.trace are handled."""
def _mock_exception():
"""Side effect for testing debug_trace's reraise."""
raise Exception('message')
hunter_mock = mocker.patch.object(misccommands, 'hunter')
hunter_mock.trace.side_effect = _mock_exception
with pytest.raises(cmdutils.CommandError, match='Exception: message'):
misccommands.debug_trace()
def test_debug_trace_no_hunter(monkeypatch):
"""Test that an error is shown if debug_trace is called without hunter."""
monkeypatch.setattr(misccommands, 'hunter', None)
with pytest.raises(cmdutils.CommandError, match="You need to install "
"'hunter' to use this command!"):
misccommands.debug_trace()
|
import struct
from openrazer_daemon.dbus_services import endpoint
@endpoint('razer.device.dpi', 'setDPI', in_sig='qq')
def set_dpi_xy_byte(self, dpi_x, dpi_y):
"""
Set the DPI on the mouse, Takes in 4 bytes big-endian and converts it to bytes
:param dpi_x: X DPI
:type dpi_x: int
:param dpi_y: Y DPI
:type dpi_x: int
"""
self.logger.debug("DBus call set_dpi_xy_byte")
driver_path = self.get_driver_path('dpi')
if dpi_x > 6750:
dpi_x = 6750
elif dpi_x < 100:
dpi_x = 100
if dpi_y > 6750:
dpi_y = 6750
elif dpi_y < 100:
dpi_y = 100
dpi_x_scaled = int(round(dpi_x / 6750 * 255, 2))
dpi_y_scaled = int(round(dpi_y / 6750 * 255, 2))
self.dpi[0] = dpi_x
self.dpi[1] = dpi_y
self.set_persistence(None, "dpi_x", dpi_x_scaled)
self.set_persistence(None, "dpi_y", dpi_y_scaled)
if self._testing:
with open(driver_path, 'w') as driver_file:
driver_file.write("{}:{}".format(dpi_x_scaled, dpi_y_scaled))
return
dpi_bytes = struct.pack('>BB', dpi_x_scaled, dpi_y_scaled)
with open(driver_path, 'wb') as driver_file:
driver_file.write(dpi_bytes)
@endpoint('razer.device.dpi', 'getDPI', out_sig='ai')
def get_dpi_xy_byte(self):
"""
get the DPI on the mouse
:return: List of X, Y DPI
:rtype: list of int
"""
self.logger.debug("DBus call get_dpi_xy_byte")
driver_path = self.get_driver_path('dpi')
# try retrieving DPI from the hardware.
# if we can't (e.g. because the mouse has been disconnected)
# return the value in local storage.
try:
with open(driver_path, 'r') as driver_file:
result = driver_file.read()
dpi_x, dpi_y = [int(dpi) for dpi in result.strip().split(':')]
dpi_x = int(round(dpi_x / 255 * 6750, 2))
dpi_y = int(round(dpi_y / 255 * 6750, 2))
except FileNotFoundError:
dpi_x, dpi_y = self.dpi
return [dpi_x, dpi_y]
|
import unittest
from absl import flags
import mock
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.providers.aws import aws_dpb_emr
from perfkitbenchmarker.providers.aws import s3
from perfkitbenchmarker.providers.aws import util
from tests import pkb_common_test_case
TEST_RUN_URI = 'fakeru'
AWS_ZONE_US_EAST_1A = 'us-east-1a'
FLAGS = flags.FLAGS
class LocalAwsDpbEmr(aws_dpb_emr.AwsDpbEmr):
def __init__(self):
self.storage_service = s3.S3Service()
self.storage_service.PrepareService(
util.GetRegionFromZone(FLAGS.dpb_service_zone))
class AwsDpbEmrTestCase(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(AwsDpbEmrTestCase, self).setUp()
FLAGS.run_uri = TEST_RUN_URI
FLAGS.dpb_service_zone = AWS_ZONE_US_EAST_1A
FLAGS.zones = [AWS_ZONE_US_EAST_1A]
def testCreateLogBucket(self):
local_emr = LocalAwsDpbEmr()
with mock.patch(
vm_util.__name__ + '.IssueCommand',
return_value=('out_', 'err_', 0)) as mock_issue:
local_emr._CreateLogBucket()
self.assertEqual(mock_issue.call_count, 2)
call_arg_list, _ = mock_issue.call_args
self.assertListEqual([
'aws', 's3api', 'put-bucket-tagging', '--bucket',
'pkb-{0}-emr'.format(
FLAGS.run_uri), '--tagging', 'TagSet=[]', '--region=us-east-1'
], call_arg_list[0])
def testDeleteLogBucket(self):
local_emr = LocalAwsDpbEmr()
with mock.patch(
vm_util.__name__ + '.IssueCommand',
return_value=('out_', 'err_', 0)) as mock_issue:
local_emr._DeleteLogBucket()
self.assertEqual(mock_issue.call_count, 1)
call_arg_list, _ = mock_issue.call_args
self.assertListEqual([
'aws', 's3', 'rb',
's3://%s' % 'pkb-{0}-emr'.format(FLAGS.run_uri), '--region',
util.GetRegionFromZone(FLAGS.dpb_service_zone), '--force'
], call_arg_list[0])
def testCreateBucket(self):
local_emr = LocalAwsDpbEmr()
with mock.patch(
vm_util.__name__ + '.IssueCommand',
return_value=('out_', 'err_', 0)) as mock_issue:
local_emr.CreateBucket('foo')
self.assertEqual(mock_issue.call_count, 2)
call_arg_list, _ = mock_issue.call_args
self.assertListEqual([
'aws', 's3api', 'put-bucket-tagging', '--bucket', 'foo', '--tagging',
'TagSet=[]', '--region=us-east-1'
], call_arg_list[0])
def testDeleteBucket(self):
local_emr = LocalAwsDpbEmr()
with mock.patch(
vm_util.__name__ + '.IssueCommand',
return_value=('out_', 'err_', 0)) as mock_issue:
local_emr.DeleteBucket('foo')
self.assertEqual(mock_issue.call_count, 1)
call_arg_list, _ = mock_issue.call_args
self.assertListEqual([
'aws', 's3', 'rb', 's3://{}'.format('foo'), '--region',
util.GetRegionFromZone(FLAGS.dpb_service_zone), '--force'
], call_arg_list[0])
if __name__ == '__main__':
unittest.main()
|
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
MOCK_VERSION = "10.0"
async def test_version_sensor(hass):
"""Test the Version sensor."""
config = {"sensor": {"platform": "version"}}
assert await async_setup_component(hass, "sensor", config)
async def test_version(hass):
"""Test the Version sensor."""
config = {"sensor": {"platform": "version", "name": "test"}}
with patch("homeassistant.const.__version__", MOCK_VERSION):
assert await async_setup_component(hass, "sensor", config)
await hass.async_block_till_done()
state = hass.states.get("sensor.test")
assert state.state == "10.0"
|
import logging
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP,
ATTR_HS_COLOR,
ATTR_TRANSITION,
ATTR_WHITE_VALUE,
DOMAIN as LIGHT_DOMAIN,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_COLOR_TEMP,
SUPPORT_TRANSITION,
SUPPORT_WHITE_VALUE,
LightEntity,
)
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
import homeassistant.util.color as color_util
from .const import DATA_UNSUBSCRIBE, DOMAIN
from .entity import ZWaveDeviceEntity
_LOGGER = logging.getLogger(__name__)
ATTR_VALUE = "Value"
COLOR_CHANNEL_WARM_WHITE = 0x01
COLOR_CHANNEL_COLD_WHITE = 0x02
COLOR_CHANNEL_RED = 0x04
COLOR_CHANNEL_GREEN = 0x08
COLOR_CHANNEL_BLUE = 0x10
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Z-Wave Light from Config Entry."""
@callback
def async_add_light(values):
"""Add Z-Wave Light."""
light = ZwaveLight(values)
async_add_entities([light])
hass.data[DOMAIN][config_entry.entry_id][DATA_UNSUBSCRIBE].append(
async_dispatcher_connect(hass, f"{DOMAIN}_new_{LIGHT_DOMAIN}", async_add_light)
)
def byte_to_zwave_brightness(value):
"""Convert brightness in 0-255 scale to 0-99 scale.
`value` -- (int) Brightness byte value from 0-255.
"""
if value > 0:
return max(1, round((value / 255) * 99))
return 0
class ZwaveLight(ZWaveDeviceEntity, LightEntity):
"""Representation of a Z-Wave light."""
def __init__(self, values):
"""Initialize the light."""
super().__init__(values)
self._color_channels = None
self._hs = None
self._white = None
self._ct = None
self._supported_features = SUPPORT_BRIGHTNESS
self._min_mireds = 153 # 6500K as a safe default
self._max_mireds = 370 # 2700K as a safe default
# make sure that supported features is correctly set
self.on_value_update()
@callback
def on_value_update(self):
"""Call when the underlying value(s) is added or updated."""
if self.values.dimming_duration is not None:
self._supported_features |= SUPPORT_TRANSITION
if self.values.color is not None:
self._supported_features |= SUPPORT_COLOR
if self.values.color_channels is not None:
# Support Color Temp if both white channels
if (self.values.color_channels.value & COLOR_CHANNEL_WARM_WHITE) and (
self.values.color_channels.value & COLOR_CHANNEL_COLD_WHITE
):
self._supported_features |= SUPPORT_COLOR_TEMP
# Support White value if only a single white channel
if ((self.values.color_channels.value & COLOR_CHANNEL_WARM_WHITE) != 0) ^ (
(self.values.color_channels.value & COLOR_CHANNEL_COLD_WHITE) != 0
):
self._supported_features |= SUPPORT_WHITE_VALUE
if self.values.color is not None:
self._calculate_color_values()
@property
def brightness(self):
"""Return the brightness of this light between 0..255.
Zwave multilevel switches use a range of [0, 99] to control brightness.
"""
if "target" in self.values:
return round((self.values.target.value / 99) * 255)
return round((self.values.primary.value / 99) * 255)
@property
def is_on(self):
"""Return true if device is on (brightness above 0)."""
if "target" in self.values:
return self.values.target.value > 0
return self.values.primary.value > 0
@property
def supported_features(self):
"""Flag supported features."""
return self._supported_features
@property
def hs_color(self):
"""Return the hs color."""
return self._hs
@property
def white_value(self):
"""Return the white value of this light between 0..255."""
return self._white
@property
def color_temp(self):
"""Return the color temperature."""
return self._ct
@property
def min_mireds(self):
"""Return the coldest color_temp that this light supports."""
return self._min_mireds
@property
def max_mireds(self):
"""Return the warmest color_temp that this light supports."""
return self._max_mireds
@callback
def async_set_duration(self, **kwargs):
"""Set the transition time for the brightness value.
Zwave Dimming Duration values now use seconds as an
integer (max: 7620 seconds or 127 mins)
Build 1205 https://github.com/OpenZWave/open-zwave/commit/f81bc04
"""
if self.values.dimming_duration is None:
return
ozw_version = tuple(
int(x)
for x in self.values.primary.ozw_instance.get_status().openzwave_version.split(
"."
)
)
if ATTR_TRANSITION not in kwargs:
# no transition specified by user, use defaults
new_value = 7621 # anything over 7620 uses the factory default
if ozw_version < (1, 6, 1205):
new_value = 255 # default for older version
else:
# transition specified by user
new_value = int(max(0, min(7620, kwargs[ATTR_TRANSITION])))
if ozw_version < (1, 6, 1205):
transition = kwargs[ATTR_TRANSITION]
if transition <= 127:
new_value = int(transition)
else:
minutes = int(transition / 60)
_LOGGER.debug(
"Transition rounded to %d minutes for %s",
minutes,
self.entity_id,
)
new_value = minutes + 128
# only send value if it differs from current
# this prevents a command for nothing
if self.values.dimming_duration.value != new_value:
self.values.dimming_duration.send_value(new_value)
async def async_turn_on(self, **kwargs):
"""Turn the device on."""
self.async_set_duration(**kwargs)
rgbw = None
white = kwargs.get(ATTR_WHITE_VALUE)
hs_color = kwargs.get(ATTR_HS_COLOR)
color_temp = kwargs.get(ATTR_COLOR_TEMP)
if hs_color is not None:
rgbw = "#"
for colorval in color_util.color_hs_to_RGB(*hs_color):
rgbw += f"{colorval:02x}"
if self._color_channels and self._color_channels & COLOR_CHANNEL_COLD_WHITE:
rgbw += "0000"
else:
# trim the CW value or it will not work correctly
rgbw += "00"
# white LED must be off in order for color to work
elif white is not None:
if self._color_channels & COLOR_CHANNEL_WARM_WHITE:
# trim the CW value or it will not work correctly
rgbw = f"#000000{white:02x}"
else:
rgbw = f"#00000000{white:02x}"
elif color_temp is not None:
# Limit color temp to min/max values
cold = max(
0,
min(
255,
round(
(self._max_mireds - color_temp)
/ (self._max_mireds - self._min_mireds)
* 255
),
),
)
warm = 255 - cold
rgbw = f"#000000{warm:02x}{cold:02x}"
if rgbw and self.values.color:
self.values.color.send_value(rgbw)
# Zwave multilevel switches use a range of [0, 99] to control
# brightness. Level 255 means to set it to previous value.
if ATTR_BRIGHTNESS in kwargs:
brightness = kwargs[ATTR_BRIGHTNESS]
brightness = byte_to_zwave_brightness(brightness)
else:
brightness = 255
self.values.primary.send_value(brightness)
async def async_turn_off(self, **kwargs):
"""Turn the device off."""
self.async_set_duration(**kwargs)
self.values.primary.send_value(0)
def _calculate_color_values(self):
"""Parse color rgb and color temperature data."""
# Color Data String
data = self.values.color.data[ATTR_VALUE]
# RGB is always present in the OpenZWave color data string.
rgb = [int(data[1:3], 16), int(data[3:5], 16), int(data[5:7], 16)]
self._hs = color_util.color_RGB_to_hs(*rgb)
if self.values.color_channels is None:
return
# Color Channels
self._color_channels = self.values.color_channels.data[ATTR_VALUE]
# Parse remaining color channels. OpenZWave appends white channels
# that are present.
index = 7
temp_warm = 0
temp_cold = 0
# Update color temp limits.
if self.values.min_kelvin:
self._max_mireds = color_util.color_temperature_kelvin_to_mired(
self.values.min_kelvin.data[ATTR_VALUE]
)
if self.values.max_kelvin:
self._min_mireds = color_util.color_temperature_kelvin_to_mired(
self.values.max_kelvin.data[ATTR_VALUE]
)
# Warm white
if self._color_channels & COLOR_CHANNEL_WARM_WHITE:
self._white = int(data[index : index + 2], 16)
temp_warm = self._white
index += 2
# Cold white
if self._color_channels & COLOR_CHANNEL_COLD_WHITE:
self._white = int(data[index : index + 2], 16)
temp_cold = self._white
# Calculate color temps based on white LED status
if temp_cold or temp_warm:
self._ct = round(
self._max_mireds
- ((temp_cold / 255) * (self._max_mireds - self._min_mireds))
)
if not (
self._color_channels & COLOR_CHANNEL_RED
or self._color_channels & COLOR_CHANNEL_GREEN
or self._color_channels & COLOR_CHANNEL_BLUE
):
self._hs = None
|
from homeassistant.components.fan import SUPPORT_DIRECTION, SUPPORT_SET_SPEED
from tests.components.homekit_controller.common import (
Helper,
setup_accessories_from_file,
setup_test_accessories,
)
async def test_simpleconnect_fan_setup(hass):
"""Test that a SIMPLEconnect fan can be correctly setup in HA."""
accessories = await setup_accessories_from_file(hass, "simpleconnect_fan.json")
config_entry, pairing = await setup_test_accessories(hass, accessories)
entity_registry = await hass.helpers.entity_registry.async_get_registry()
# Check that the fan is correctly found and set up
fan_id = "fan.simpleconnect_fan_06f674"
fan = entity_registry.async_get(fan_id)
assert fan.unique_id == "homekit-1234567890abcd-8"
fan_helper = Helper(
hass,
"fan.simpleconnect_fan_06f674",
pairing,
accessories[0],
config_entry,
)
fan_state = await fan_helper.poll_and_get_state()
assert fan_state.attributes["friendly_name"] == "SIMPLEconnect Fan-06F674"
assert fan_state.state == "off"
assert fan_state.attributes["supported_features"] == (
SUPPORT_DIRECTION | SUPPORT_SET_SPEED
)
device_registry = await hass.helpers.device_registry.async_get_registry()
device = device_registry.async_get(fan.device_id)
assert device.manufacturer == "Hunter Fan"
assert device.name == "SIMPLEconnect Fan-06F674"
assert device.model == "SIMPLEconnect"
assert device.sw_version == ""
assert device.via_device_id is None
|
import logging
import attr
import pytest
from PyQt5.QtCore import pyqtSignal, pyqtSlot, QObject
from qutebrowser.browser import signalfilter
class Signaller(QObject):
signal = pyqtSignal(str)
link_hovered = pyqtSignal(str)
filtered_signal = pyqtSignal(str)
cur_link_hovered = pyqtSignal(str)
def __init__(self, parent=None):
super().__init__(parent)
self.filtered_signal_arg = None
self.filtered_signal.connect(self.filtered_signal_slot)
@pyqtSlot(str)
def filtered_signal_slot(self, s):
self.filtered_signal_arg = s
@attr.s
class Objects:
signal_filter = attr.ib()
signaller = attr.ib()
@pytest.fixture
def objects():
signal_filter = signalfilter.SignalFilter(0)
tab = None
signaller = Signaller()
signaller.signal.connect(
signal_filter.create(signaller.filtered_signal, tab))
signaller.link_hovered.connect(
signal_filter.create(signaller.cur_link_hovered, tab))
return Objects(signal_filter=signal_filter, signaller=signaller)
@pytest.mark.parametrize('index_of, emitted', [(0, True), (1, False)])
def test_filtering(objects, tabbed_browser_stubs, index_of, emitted):
browser = tabbed_browser_stubs[0]
browser.widget.current_index = 0
browser.widget.index_of = index_of
objects.signaller.signal.emit('foo')
if emitted:
assert objects.signaller.filtered_signal_arg == 'foo'
else:
assert objects.signaller.filtered_signal_arg is None
@pytest.mark.parametrize('index_of, verb', [(0, 'emitting'), (1, 'ignoring')])
def test_logging(caplog, objects, tabbed_browser_stubs, index_of, verb):
browser = tabbed_browser_stubs[0]
browser.widget.current_index = 0
browser.widget.index_of = index_of
with caplog.at_level(logging.DEBUG, logger='signals'):
objects.signaller.signal.emit('foo')
expected_msg = "{}: filtered_signal('foo') (tab {})".format(verb, index_of)
assert caplog.messages == [expected_msg]
@pytest.mark.parametrize('index_of', [0, 1])
def test_no_logging(caplog, objects, tabbed_browser_stubs, index_of):
browser = tabbed_browser_stubs[0]
browser.widget.current_index = 0
browser.widget.index_of = index_of
with caplog.at_level(logging.DEBUG, logger='signals'):
objects.signaller.link_hovered.emit('foo')
assert not caplog.records
def test_runtime_error(objects, tabbed_browser_stubs):
"""Test that there's no crash if indexOf() raises RuntimeError."""
browser = tabbed_browser_stubs[0]
browser.widget.current_index = 0
browser.widget.index_of = RuntimeError
objects.signaller.signal.emit('foo')
assert objects.signaller.filtered_signal_arg is None
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging as std_logging
import os
import sys
import threading
import time
import timeit
from absl import app
from absl import flags
from absl import logging
import mock
from six.moves import xrange # pylint: disable=redefined-builtin
FLAGS = flags.FLAGS
class VerboseDel(object):
"""Dummy class to test __del__ running."""
def __init__(self, msg):
self._msg = msg
def __del__(self):
sys.stderr.write(self._msg)
sys.stderr.flush()
def _test_do_logging():
"""Do some log operations."""
logging.vlog(3, 'This line is VLOG level 3')
logging.vlog(2, 'This line is VLOG level 2')
logging.log(2, 'This line is log level 2')
if logging.vlog_is_on(2):
logging.log(1, 'VLOG level 1, but only if VLOG level 2 is active')
logging.vlog(1, 'This line is VLOG level 1')
logging.log(1, 'This line is log level 1')
logging.debug('This line is DEBUG')
logging.vlog(0, 'This line is VLOG level 0')
logging.log(0, 'This line is log level 0')
logging.info('Interesting Stuff\0')
logging.info('Interesting Stuff with Arguments: %d', 42)
logging.info('%(a)s Stuff with %(b)s',
{'a': 'Interesting', 'b': 'Dictionary'})
with mock.patch.object(timeit, 'default_timer') as mock_timer:
mock_timer.return_value = 0
while timeit.default_timer() < 9:
logging.log_every_n_seconds(logging.INFO, 'This should appear 5 times.',
2)
mock_timer.return_value = mock_timer() + .2
for i in xrange(1, 5):
logging.log_first_n(logging.INFO, 'Info first %d of %d', 2, i, 2)
logging.log_every_n(logging.INFO, 'Info %d (every %d)', 3, i, 3)
logging.vlog(-1, 'This line is VLOG level -1')
logging.log(-1, 'This line is log level -1')
logging.warning('Worrying Stuff')
for i in xrange(1, 5):
logging.log_first_n(logging.WARNING, 'Warn first %d of %d', 2, i, 2)
logging.log_every_n(logging.WARNING, 'Warn %d (every %d)', 3, i, 3)
logging.vlog(-2, 'This line is VLOG level -2')
logging.log(-2, 'This line is log level -2')
try:
raise OSError('Fake Error')
except OSError:
saved_exc_info = sys.exc_info()
logging.exception('An Exception %s')
logging.exception('Once more, %(reason)s', {'reason': 'just because'})
logging.error('Exception 2 %s', exc_info=True)
logging.error('Non-exception', exc_info=False)
try:
sys.exc_clear()
except AttributeError:
# No sys.exc_clear() in Python 3, but this will clear sys.exc_info() too.
pass
logging.error('Exception %s', '3', exc_info=saved_exc_info)
logging.error('No traceback', exc_info=saved_exc_info[:2] + (None,))
logging.error('Alarming Stuff')
for i in xrange(1, 5):
logging.log_first_n(logging.ERROR, 'Error first %d of %d', 2, i, 2)
logging.log_every_n(logging.ERROR, 'Error %d (every %d)', 3, i, 3)
logging.flush()
def _test_fatal_main_thread_only():
"""Test logging.fatal from main thread, no other threads running."""
v = VerboseDel('fatal_main_thread_only main del called\n')
try:
logging.fatal('fatal_main_thread_only message')
finally:
del v
def _test_fatal_with_other_threads():
"""Test logging.fatal from main thread, other threads running."""
lock = threading.Lock()
lock.acquire()
def sleep_forever(lock=lock):
v = VerboseDel('fatal_with_other_threads non-main del called\n')
try:
lock.release()
while True:
time.sleep(10000)
finally:
del v
v = VerboseDel('fatal_with_other_threads main del called\n')
try:
# Start new thread
t = threading.Thread(target=sleep_forever)
t.start()
# Wait for other thread
lock.acquire()
lock.release()
# Die
logging.fatal('fatal_with_other_threads message')
while True:
time.sleep(10000)
finally:
del v
def _test_fatal_non_main_thread():
"""Test logging.fatal from non main thread."""
lock = threading.Lock()
lock.acquire()
def die_soon(lock=lock):
v = VerboseDel('fatal_non_main_thread non-main del called\n')
try:
# Wait for signal from other thread
lock.acquire()
lock.release()
logging.fatal('fatal_non_main_thread message')
while True:
time.sleep(10000)
finally:
del v
v = VerboseDel('fatal_non_main_thread main del called\n')
try:
# Start new thread
t = threading.Thread(target=die_soon)
t.start()
# Signal other thread
lock.release()
# Wait for it to die
while True:
time.sleep(10000)
finally:
del v
def _test_critical_from_non_absl_logger():
"""Test CRITICAL logs from non-absl loggers."""
std_logging.critical('A critical message')
def _test_register_frame_to_skip():
"""Test skipping frames for line number reporting."""
def _getline():
def _getline_inner():
return logging.get_absl_logger().findCaller()[1]
return _getline_inner()
# Check register_frame_to_skip function to see if log frame skipping works.
line1 = _getline()
line2 = _getline()
logging.get_absl_logger().register_frame_to_skip(__file__, '_getline')
line3 = _getline()
# Both should be line number of the _getline_inner() call.
assert (line1 == line2), (line1, line2)
# line3 should be a line number in this function.
assert (line2 != line3), (line2, line3)
def _test_flush():
"""Test flush in various difficult cases."""
# Flush, but one of the logfiles is closed
log_filename = os.path.join(FLAGS.log_dir, 'a_thread_with_logfile.txt')
with open(log_filename, 'w') as log_file:
logging.get_absl_handler().python_handler.stream = log_file
logging.flush()
def _test_stderrthreshold():
"""Tests modifying --stderrthreshold after flag parsing will work."""
def log_things():
logging.debug('FLAGS.stderrthreshold=%s, debug log', FLAGS.stderrthreshold)
logging.info('FLAGS.stderrthreshold=%s, info log', FLAGS.stderrthreshold)
logging.warning('FLAGS.stderrthreshold=%s, warning log',
FLAGS.stderrthreshold)
logging.error('FLAGS.stderrthreshold=%s, error log', FLAGS.stderrthreshold)
FLAGS.stderrthreshold = 'debug'
log_things()
FLAGS.stderrthreshold = 'info'
log_things()
FLAGS.stderrthreshold = 'warning'
log_things()
FLAGS.stderrthreshold = 'error'
log_things()
def _test_std_logging():
"""Tests logs from std logging."""
std_logging.debug('std debug log')
std_logging.info('std info log')
std_logging.warning('std warning log')
std_logging.error('std error log')
def _test_bad_exc_info():
"""Tests when a bad exc_info valud is provided."""
logging.info('Bad exc_info', exc_info=(None, None))
def _test_none_exc_info():
"""Tests when exc_info is requested but not available."""
# Clear exc_info first.
try:
sys.exc_clear()
except AttributeError:
# No sys.exc_clear() in Python 3, but this will clear sys.exc_info() too.
pass
logging.info('None exc_info', exc_info=True)
def _test_unicode():
"""Tests unicode handling."""
test_names = []
def log(name, msg, *args):
"""Logs the message, and ensures the same name is not logged again."""
assert name not in test_names, ('test_unicode expects unique names to work,'
' found existing name {}').format(name)
test_names.append(name)
# Add line seprators so that tests can verify the output for each log
# message.
sys.stderr.write('-- begin {} --\n'.format(name))
logging.info(msg, *args)
sys.stderr.write('-- end {} --\n'.format(name))
log('unicode', u'G\u00eete: Ch\u00e2tonnaye')
log('unicode % unicode', u'G\u00eete: %s', u'Ch\u00e2tonnaye')
log('bytes % bytes', u'G\u00eete: %s'.encode('utf-8'),
u'Ch\u00e2tonnaye'.encode('utf-8'))
log('unicode % bytes', u'G\u00eete: %s', u'Ch\u00e2tonnaye'.encode('utf-8'))
log('bytes % unicode', u'G\u00eete: %s'.encode('utf-8'), u'Ch\u00e2tonnaye')
log('unicode % iso8859-15', u'G\u00eete: %s',
u'Ch\u00e2tonnaye'.encode('iso-8859-15'))
log('str % exception', 'exception: %s', Exception(u'Ch\u00e2tonnaye'))
def main(argv):
del argv # Unused.
test_name = os.environ.get('TEST_NAME', None)
test_fn = globals().get('_test_%s' % test_name)
if test_fn is None:
raise AssertionError('TEST_NAME must be set to a valid value')
# Flush so previous messages are written to file before we switch to a new
# file with use_absl_log_file.
logging.flush()
if os.environ.get('USE_ABSL_LOG_FILE') == '1':
logging.get_absl_handler().use_absl_log_file('absl_log_file', FLAGS.log_dir)
test_fn()
if __name__ == '__main__':
sys.argv[0] = 'py_argv_0'
app.run(main)
|
import datetime
from homematicip.base.enums import AbsenceType
from homematicip.functionalHomes import IndoorClimateHome
from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN
from homeassistant.components.climate.const import (
ATTR_CURRENT_TEMPERATURE,
ATTR_HVAC_ACTION,
ATTR_PRESET_MODE,
ATTR_PRESET_MODES,
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
HVAC_MODE_AUTO,
HVAC_MODE_COOL,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
PRESET_AWAY,
PRESET_BOOST,
PRESET_ECO,
PRESET_NONE,
)
from homeassistant.components.homematicip_cloud import DOMAIN as HMIPC_DOMAIN
from homeassistant.components.homematicip_cloud.climate import (
ATTR_PRESET_END_TIME,
PERMANENT_END_TIME,
)
from homeassistant.setup import async_setup_component
from .helper import HAPID, async_manipulate_test_data, get_and_check_entity_basics
async def test_manually_configured_platform(hass):
"""Test that we do not set up an access point."""
assert await async_setup_component(
hass, CLIMATE_DOMAIN, {CLIMATE_DOMAIN: {"platform": HMIPC_DOMAIN}}
)
assert not hass.data.get(HMIPC_DOMAIN)
async def test_hmip_heating_group_heat(hass, default_mock_hap_factory):
"""Test HomematicipHeatingGroup."""
entity_id = "climate.badezimmer"
entity_name = "Badezimmer"
device_model = None
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=["Wandthermostat", "Heizkörperthermostat3"],
test_groups=[entity_name],
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == HVAC_MODE_AUTO
assert ha_state.attributes["current_temperature"] == 23.8
assert ha_state.attributes["min_temp"] == 5.0
assert ha_state.attributes["max_temp"] == 30.0
assert ha_state.attributes["temperature"] == 5.0
assert ha_state.attributes["current_humidity"] == 47
assert ha_state.attributes[ATTR_PRESET_MODE] == "STD"
assert ha_state.attributes[ATTR_PRESET_MODES] == [PRESET_BOOST, "STD", "Winter"]
service_call_counter = len(hmip_device.mock_calls)
await hass.services.async_call(
"climate",
"set_temperature",
{"entity_id": entity_id, "temperature": 22.5},
blocking=True,
)
assert len(hmip_device.mock_calls) == service_call_counter + 1
assert hmip_device.mock_calls[-1][0] == "set_point_temperature"
assert hmip_device.mock_calls[-1][1] == (22.5,)
await async_manipulate_test_data(hass, hmip_device, "actualTemperature", 22.5)
ha_state = hass.states.get(entity_id)
assert ha_state.attributes[ATTR_CURRENT_TEMPERATURE] == 22.5
await hass.services.async_call(
"climate",
"set_hvac_mode",
{"entity_id": entity_id, "hvac_mode": HVAC_MODE_HEAT},
blocking=True,
)
assert len(hmip_device.mock_calls) == service_call_counter + 3
assert hmip_device.mock_calls[-1][0] == "set_control_mode"
assert hmip_device.mock_calls[-1][1] == ("MANUAL",)
await async_manipulate_test_data(hass, hmip_device, "controlMode", "MANUAL")
ha_state = hass.states.get(entity_id)
assert ha_state.state == HVAC_MODE_HEAT
await hass.services.async_call(
"climate",
"set_hvac_mode",
{"entity_id": entity_id, "hvac_mode": HVAC_MODE_AUTO},
blocking=True,
)
assert len(hmip_device.mock_calls) == service_call_counter + 5
assert hmip_device.mock_calls[-1][0] == "set_control_mode"
assert hmip_device.mock_calls[-1][1] == ("AUTOMATIC",)
await async_manipulate_test_data(hass, hmip_device, "controlMode", "AUTO")
ha_state = hass.states.get(entity_id)
assert ha_state.state == HVAC_MODE_AUTO
await hass.services.async_call(
"climate",
"set_preset_mode",
{"entity_id": entity_id, "preset_mode": PRESET_BOOST},
blocking=True,
)
assert len(hmip_device.mock_calls) == service_call_counter + 7
assert hmip_device.mock_calls[-1][0] == "set_boost"
assert hmip_device.mock_calls[-1][1] == ()
await async_manipulate_test_data(hass, hmip_device, "boostMode", True)
ha_state = hass.states.get(entity_id)
assert ha_state.attributes[ATTR_PRESET_MODE] == PRESET_BOOST
await hass.services.async_call(
"climate",
"set_preset_mode",
{"entity_id": entity_id, "preset_mode": "STD"},
blocking=True,
)
assert len(hmip_device.mock_calls) == service_call_counter + 11
assert hmip_device.mock_calls[-1][0] == "set_active_profile"
assert hmip_device.mock_calls[-1][1] == (0,)
await async_manipulate_test_data(hass, hmip_device, "boostMode", False)
ha_state = hass.states.get(entity_id)
assert ha_state.attributes[ATTR_PRESET_MODE] == "STD"
# Not required for hmip, but a posiblity to send no temperature.
await hass.services.async_call(
"climate",
"set_temperature",
{"entity_id": entity_id, "target_temp_low": 10, "target_temp_high": 10},
blocking=True,
)
# No new service call should be in mock_calls.
assert len(hmip_device.mock_calls) == service_call_counter + 12
# Only fire event from last async_manipulate_test_data available.
assert hmip_device.mock_calls[-1][0] == "fire_update_event"
await async_manipulate_test_data(hass, hmip_device, "controlMode", "ECO")
await async_manipulate_test_data(
hass,
mock_hap.home.get_functionalHome(IndoorClimateHome),
"absenceType",
AbsenceType.VACATION,
fire_device=hmip_device,
)
ha_state = hass.states.get(entity_id)
assert ha_state.attributes[ATTR_PRESET_MODE] == PRESET_AWAY
await async_manipulate_test_data(hass, hmip_device, "controlMode", "ECO")
await async_manipulate_test_data(
hass,
mock_hap.home.get_functionalHome(IndoorClimateHome),
"absenceType",
AbsenceType.PERIOD,
fire_device=hmip_device,
)
ha_state = hass.states.get(entity_id)
assert ha_state.attributes[ATTR_PRESET_MODE] == PRESET_ECO
await hass.services.async_call(
"climate",
"set_preset_mode",
{"entity_id": entity_id, "preset_mode": "Winter"},
blocking=True,
)
assert len(hmip_device.mock_calls) == service_call_counter + 18
assert hmip_device.mock_calls[-1][0] == "set_active_profile"
assert hmip_device.mock_calls[-1][1] == (1,)
mock_hap.home.get_functionalHome(
IndoorClimateHome
).absenceType = AbsenceType.PERMANENT
await async_manipulate_test_data(hass, hmip_device, "controlMode", "ECO")
ha_state = hass.states.get(entity_id)
assert ha_state.attributes[ATTR_PRESET_END_TIME] == PERMANENT_END_TIME
await hass.services.async_call(
"climate",
"set_hvac_mode",
{"entity_id": entity_id, "hvac_mode": HVAC_MODE_HEAT},
blocking=True,
)
assert len(hmip_device.mock_calls) == service_call_counter + 20
assert hmip_device.mock_calls[-1][0] == "set_control_mode"
assert hmip_device.mock_calls[-1][1] == ("MANUAL",)
await async_manipulate_test_data(hass, hmip_device, "controlMode", "MANUAL")
ha_state = hass.states.get(entity_id)
assert ha_state.state == HVAC_MODE_HEAT
await hass.services.async_call(
"climate",
"set_preset_mode",
{"entity_id": entity_id, "preset_mode": "Winter"},
blocking=True,
)
assert len(hmip_device.mock_calls) == service_call_counter + 23
assert hmip_device.mock_calls[-1][0] == "set_active_profile"
assert hmip_device.mock_calls[-1][1] == (1,)
hmip_device.activeProfile = hmip_device.profiles[0]
await async_manipulate_test_data(hass, hmip_device, "controlMode", "AUTOMATIC")
ha_state = hass.states.get(entity_id)
assert ha_state.state == HVAC_MODE_AUTO
await hass.services.async_call(
"climate",
"set_hvac_mode",
{"entity_id": entity_id, "hvac_mode": "dry"},
blocking=True,
)
assert len(hmip_device.mock_calls) == service_call_counter + 24
# Only fire event from last async_manipulate_test_data available.
assert hmip_device.mock_calls[-1][0] == "fire_update_event"
await async_manipulate_test_data(hass, hmip_device, "floorHeatingMode", "RADIATOR")
await async_manipulate_test_data(hass, hmip_device, "valvePosition", 0.1)
ha_state = hass.states.get(entity_id)
assert ha_state.state == HVAC_MODE_AUTO
assert ha_state.attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_HEAT
await async_manipulate_test_data(hass, hmip_device, "floorHeatingMode", "RADIATOR")
await async_manipulate_test_data(hass, hmip_device, "valvePosition", 0.0)
ha_state = hass.states.get(entity_id)
assert ha_state.state == HVAC_MODE_AUTO
assert ha_state.attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_IDLE
async def test_hmip_heating_group_cool(hass, default_mock_hap_factory):
"""Test HomematicipHeatingGroup."""
entity_id = "climate.badezimmer"
entity_name = "Badezimmer"
device_model = None
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_groups=[entity_name]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
hmip_device.activeProfile = hmip_device.profiles[3]
await async_manipulate_test_data(hass, hmip_device, "cooling", True)
await async_manipulate_test_data(hass, hmip_device, "coolingAllowed", True)
await async_manipulate_test_data(hass, hmip_device, "coolingIgnored", False)
ha_state = hass.states.get(entity_id)
assert ha_state.state == HVAC_MODE_AUTO
assert ha_state.attributes["current_temperature"] == 23.8
assert ha_state.attributes["min_temp"] == 5.0
assert ha_state.attributes["max_temp"] == 30.0
assert ha_state.attributes["temperature"] == 5.0
assert ha_state.attributes["current_humidity"] == 47
assert ha_state.attributes[ATTR_PRESET_MODE] == "Cool1"
assert ha_state.attributes[ATTR_PRESET_MODES] == ["Cool1", "Cool2"]
service_call_counter = len(hmip_device.mock_calls)
await hass.services.async_call(
"climate",
"set_hvac_mode",
{"entity_id": entity_id, "hvac_mode": HVAC_MODE_COOL},
blocking=True,
)
assert len(hmip_device.mock_calls) == service_call_counter + 1
assert hmip_device.mock_calls[-1][0] == "set_control_mode"
assert hmip_device.mock_calls[-1][1] == ("MANUAL",)
await async_manipulate_test_data(hass, hmip_device, "controlMode", "MANUAL")
ha_state = hass.states.get(entity_id)
assert ha_state.state == HVAC_MODE_COOL
await hass.services.async_call(
"climate",
"set_hvac_mode",
{"entity_id": entity_id, "hvac_mode": HVAC_MODE_AUTO},
blocking=True,
)
assert len(hmip_device.mock_calls) == service_call_counter + 3
assert hmip_device.mock_calls[-1][0] == "set_control_mode"
assert hmip_device.mock_calls[-1][1] == ("AUTOMATIC",)
await async_manipulate_test_data(hass, hmip_device, "controlMode", "AUTO")
ha_state = hass.states.get(entity_id)
assert ha_state.state == HVAC_MODE_AUTO
await hass.services.async_call(
"climate",
"set_preset_mode",
{"entity_id": entity_id, "preset_mode": "Cool2"},
blocking=True,
)
assert len(hmip_device.mock_calls) == service_call_counter + 6
assert hmip_device.mock_calls[-1][0] == "set_active_profile"
assert hmip_device.mock_calls[-1][1] == (4,)
hmip_device.activeProfile = hmip_device.profiles[4]
await async_manipulate_test_data(hass, hmip_device, "cooling", True)
await async_manipulate_test_data(hass, hmip_device, "coolingAllowed", False)
await async_manipulate_test_data(hass, hmip_device, "coolingIgnored", False)
ha_state = hass.states.get(entity_id)
assert ha_state.state == HVAC_MODE_OFF
assert ha_state.attributes[ATTR_PRESET_MODE] == "none"
assert ha_state.attributes[ATTR_PRESET_MODES] == []
hmip_device.activeProfile = hmip_device.profiles[4]
await async_manipulate_test_data(hass, hmip_device, "cooling", True)
await async_manipulate_test_data(hass, hmip_device, "coolingAllowed", True)
await async_manipulate_test_data(hass, hmip_device, "coolingIgnored", True)
ha_state = hass.states.get(entity_id)
assert ha_state.state == HVAC_MODE_OFF
assert ha_state.attributes[ATTR_PRESET_MODE] == "none"
assert ha_state.attributes[ATTR_PRESET_MODES] == []
await hass.services.async_call(
"climate",
"set_preset_mode",
{"entity_id": entity_id, "preset_mode": "Cool2"},
blocking=True,
)
assert len(hmip_device.mock_calls) == service_call_counter + 12
# fire_update_event shows that set_active_profile has not been called.
assert hmip_device.mock_calls[-1][0] == "fire_update_event"
hmip_device.activeProfile = hmip_device.profiles[4]
await async_manipulate_test_data(hass, hmip_device, "cooling", True)
await async_manipulate_test_data(hass, hmip_device, "coolingAllowed", True)
await async_manipulate_test_data(hass, hmip_device, "coolingIgnored", False)
ha_state = hass.states.get(entity_id)
assert ha_state.state == HVAC_MODE_AUTO
assert ha_state.attributes[ATTR_PRESET_MODE] == "Cool2"
assert ha_state.attributes[ATTR_PRESET_MODES] == ["Cool1", "Cool2"]
await hass.services.async_call(
"climate",
"set_preset_mode",
{"entity_id": entity_id, "preset_mode": "Cool2"},
blocking=True,
)
assert len(hmip_device.mock_calls) == service_call_counter + 17
assert hmip_device.mock_calls[-1][0] == "set_active_profile"
assert hmip_device.mock_calls[-1][1] == (4,)
async def test_hmip_heating_group_heat_with_switch(hass, default_mock_hap_factory):
"""Test HomematicipHeatingGroup."""
entity_id = "climate.schlafzimmer"
entity_name = "Schlafzimmer"
device_model = None
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=["Wandthermostat", "Heizkörperthermostat", "Pc"],
test_groups=[entity_name],
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert hmip_device
assert ha_state.state == HVAC_MODE_AUTO
assert ha_state.attributes["current_temperature"] == 24.7
assert ha_state.attributes["min_temp"] == 5.0
assert ha_state.attributes["max_temp"] == 30.0
assert ha_state.attributes["temperature"] == 5.0
assert ha_state.attributes["current_humidity"] == 43
assert ha_state.attributes[ATTR_PRESET_MODE] == "STD"
assert ha_state.attributes[ATTR_PRESET_MODES] == [PRESET_BOOST, "STD", "P2"]
async def test_hmip_heating_group_heat_with_radiator(hass, default_mock_hap_factory):
"""Test HomematicipHeatingGroup."""
entity_id = "climate.vorzimmer"
entity_name = "Vorzimmer"
device_model = None
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=["Heizkörperthermostat2"],
test_groups=[entity_name],
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert hmip_device
assert ha_state.state == HVAC_MODE_AUTO
assert ha_state.attributes["current_temperature"] == 20
assert ha_state.attributes["min_temp"] == 5.0
assert ha_state.attributes["max_temp"] == 30.0
assert ha_state.attributes["temperature"] == 5.0
assert ha_state.attributes[ATTR_PRESET_MODE] is None
assert ha_state.attributes[ATTR_PRESET_MODES] == [PRESET_NONE, PRESET_BOOST]
async def test_hmip_climate_services(hass, mock_hap_with_service):
"""Test HomematicipHeatingGroup."""
home = mock_hap_with_service.home
await hass.services.async_call(
"homematicip_cloud",
"activate_eco_mode_with_duration",
{"duration": 60, "accesspoint_id": HAPID},
blocking=True,
)
assert home.mock_calls[-1][0] == "activate_absence_with_duration"
assert home.mock_calls[-1][1] == (60,)
assert len(home._connection.mock_calls) == 1 # pylint: disable=protected-access
await hass.services.async_call(
"homematicip_cloud",
"activate_eco_mode_with_duration",
{"duration": 60},
blocking=True,
)
assert home.mock_calls[-1][0] == "activate_absence_with_duration"
assert home.mock_calls[-1][1] == (60,)
assert len(home._connection.mock_calls) == 2 # pylint: disable=protected-access
await hass.services.async_call(
"homematicip_cloud",
"activate_eco_mode_with_period",
{"endtime": "2019-02-17 14:00", "accesspoint_id": HAPID},
blocking=True,
)
assert home.mock_calls[-1][0] == "activate_absence_with_period"
assert home.mock_calls[-1][1] == (datetime.datetime(2019, 2, 17, 14, 0),)
assert len(home._connection.mock_calls) == 3 # pylint: disable=protected-access
await hass.services.async_call(
"homematicip_cloud",
"activate_eco_mode_with_period",
{"endtime": "2019-02-17 14:00"},
blocking=True,
)
assert home.mock_calls[-1][0] == "activate_absence_with_period"
assert home.mock_calls[-1][1] == (datetime.datetime(2019, 2, 17, 14, 0),)
assert len(home._connection.mock_calls) == 4 # pylint: disable=protected-access
await hass.services.async_call(
"homematicip_cloud",
"activate_vacation",
{"endtime": "2019-02-17 14:00", "temperature": 18.5, "accesspoint_id": HAPID},
blocking=True,
)
assert home.mock_calls[-1][0] == "activate_vacation"
assert home.mock_calls[-1][1] == (datetime.datetime(2019, 2, 17, 14, 0), 18.5)
assert len(home._connection.mock_calls) == 5 # pylint: disable=protected-access
await hass.services.async_call(
"homematicip_cloud",
"activate_vacation",
{"endtime": "2019-02-17 14:00", "temperature": 18.5},
blocking=True,
)
assert home.mock_calls[-1][0] == "activate_vacation"
assert home.mock_calls[-1][1] == (datetime.datetime(2019, 2, 17, 14, 0), 18.5)
assert len(home._connection.mock_calls) == 6 # pylint: disable=protected-access
await hass.services.async_call(
"homematicip_cloud",
"deactivate_eco_mode",
{"accesspoint_id": HAPID},
blocking=True,
)
assert home.mock_calls[-1][0] == "deactivate_absence"
assert home.mock_calls[-1][1] == ()
assert len(home._connection.mock_calls) == 7 # pylint: disable=protected-access
await hass.services.async_call(
"homematicip_cloud", "deactivate_eco_mode", blocking=True
)
assert home.mock_calls[-1][0] == "deactivate_absence"
assert home.mock_calls[-1][1] == ()
assert len(home._connection.mock_calls) == 8 # pylint: disable=protected-access
await hass.services.async_call(
"homematicip_cloud",
"deactivate_vacation",
{"accesspoint_id": HAPID},
blocking=True,
)
assert home.mock_calls[-1][0] == "deactivate_vacation"
assert home.mock_calls[-1][1] == ()
assert len(home._connection.mock_calls) == 9 # pylint: disable=protected-access
await hass.services.async_call(
"homematicip_cloud", "deactivate_vacation", blocking=True
)
assert home.mock_calls[-1][0] == "deactivate_vacation"
assert home.mock_calls[-1][1] == ()
assert len(home._connection.mock_calls) == 10 # pylint: disable=protected-access
not_existing_hap_id = "5555F7110000000000000001"
await hass.services.async_call(
"homematicip_cloud",
"deactivate_vacation",
{"accesspoint_id": not_existing_hap_id},
blocking=True,
)
assert home.mock_calls[-1][0] == "deactivate_vacation"
assert home.mock_calls[-1][1] == ()
# There is no further call on connection.
assert len(home._connection.mock_calls) == 10 # pylint: disable=protected-access
async def test_hmip_heating_group_services(hass, default_mock_hap_factory):
"""Test HomematicipHeatingGroup services."""
entity_id = "climate.badezimmer"
entity_name = "Badezimmer"
device_model = None
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_groups=[entity_name]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state
await hass.services.async_call(
"homematicip_cloud",
"set_active_climate_profile",
{"climate_profile_index": 2, "entity_id": "climate.badezimmer"},
blocking=True,
)
assert hmip_device.mock_calls[-1][0] == "set_active_profile"
assert hmip_device.mock_calls[-1][1] == (1,)
assert (
len(hmip_device._connection.mock_calls) == 2 # pylint: disable=protected-access
)
await hass.services.async_call(
"homematicip_cloud",
"set_active_climate_profile",
{"climate_profile_index": 2, "entity_id": "all"},
blocking=True,
)
assert hmip_device.mock_calls[-1][0] == "set_active_profile"
assert hmip_device.mock_calls[-1][1] == (1,)
assert (
len(hmip_device._connection.mock_calls) == 4 # pylint: disable=protected-access
)
|
import voluptuous as vol
from homeassistant.const import CONF_EVENT, CONF_PLATFORM, EVENT_HOMEASSISTANT_STOP
from homeassistant.core import HassJob, callback
# mypy: allow-untyped-defs
EVENT_START = "start"
EVENT_SHUTDOWN = "shutdown"
TRIGGER_SCHEMA = vol.Schema(
{
vol.Required(CONF_PLATFORM): "homeassistant",
vol.Required(CONF_EVENT): vol.Any(EVENT_START, EVENT_SHUTDOWN),
}
)
async def async_attach_trigger(hass, config, action, automation_info):
"""Listen for events based on configuration."""
event = config.get(CONF_EVENT)
job = HassJob(action)
if event == EVENT_SHUTDOWN:
@callback
def hass_shutdown(event):
"""Execute when Home Assistant is shutting down."""
hass.async_run_hass_job(
job,
{
"trigger": {
"platform": "homeassistant",
"event": event,
"description": "Home Assistant stopping",
}
},
event.context,
)
return hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, hass_shutdown)
# Automation are enabled while hass is starting up, fire right away
# Check state because a config reload shouldn't trigger it.
if automation_info["home_assistant_start"]:
hass.async_run_hass_job(
job,
{
"trigger": {
"platform": "homeassistant",
"event": event,
"description": "Home Assistant starting",
}
},
)
return lambda: None
|
from __future__ import division
import argparse
import numpy as np
import chainer
from chainer.datasets import ConcatenatedDataset
from chainer.datasets import TransformDataset
from chainer import training
from chainer.training import extensions
from chainer.training.triggers import ManualScheduleTrigger
from chainercv.datasets import voc_bbox_label_names
from chainercv.datasets import VOCBboxDataset
from chainercv.extensions import DetectionVOCEvaluator
from chainercv.links import FasterRCNNVGG16
from chainercv.links.model.faster_rcnn import FasterRCNNTrainChain
from chainercv import transforms
# https://docs.chainer.org/en/stable/tips.html#my-training-process-gets-stuck-when-using-multiprocessiterator
try:
import cv2
cv2.setNumThreads(0)
except ImportError:
pass
class Transform(object):
def __init__(self, faster_rcnn):
self.faster_rcnn = faster_rcnn
def __call__(self, in_data):
img, bbox, label = in_data
_, H, W = img.shape
img = self.faster_rcnn.prepare(img)
_, o_H, o_W = img.shape
scale = o_H / H
bbox = transforms.resize_bbox(bbox, (H, W), (o_H, o_W))
# horizontally flip
img, params = transforms.random_flip(
img, x_random=True, return_param=True)
bbox = transforms.flip_bbox(
bbox, (o_H, o_W), x_flip=params['x_flip'])
return img, bbox, label, scale
def main():
parser = argparse.ArgumentParser(
description='ChainerCV training example: Faster R-CNN')
parser.add_argument('--dataset', choices=('voc07', 'voc0712'),
help='The dataset to use: VOC07, VOC07+12',
default='voc07')
parser.add_argument('--gpu', '-g', type=int, default=-1)
parser.add_argument('--lr', '-l', type=float, default=1e-3)
parser.add_argument('--out', '-o', default='result',
help='Output directory')
parser.add_argument('--seed', '-s', type=int, default=0)
parser.add_argument('--step-size', '-ss', type=int, default=50000)
parser.add_argument('--iteration', '-i', type=int, default=70000)
args = parser.parse_args()
np.random.seed(args.seed)
if args.dataset == 'voc07':
train_data = VOCBboxDataset(split='trainval', year='2007')
elif args.dataset == 'voc0712':
train_data = ConcatenatedDataset(
VOCBboxDataset(year='2007', split='trainval'),
VOCBboxDataset(year='2012', split='trainval'))
test_data = VOCBboxDataset(split='test', year='2007',
use_difficult=True, return_difficult=True)
faster_rcnn = FasterRCNNVGG16(n_fg_class=len(voc_bbox_label_names),
pretrained_model='imagenet')
faster_rcnn.use_preset('evaluate')
model = FasterRCNNTrainChain(faster_rcnn)
if args.gpu >= 0:
chainer.cuda.get_device_from_id(args.gpu).use()
model.to_gpu()
optimizer = chainer.optimizers.MomentumSGD(lr=args.lr, momentum=0.9)
optimizer.setup(model)
optimizer.add_hook(chainer.optimizer_hooks.WeightDecay(rate=0.0005))
train_data = TransformDataset(train_data, Transform(faster_rcnn))
train_iter = chainer.iterators.MultiprocessIterator(
train_data, batch_size=1, n_processes=None, shared_mem=100000000)
test_iter = chainer.iterators.SerialIterator(
test_data, batch_size=1, repeat=False, shuffle=False)
updater = chainer.training.updaters.StandardUpdater(
train_iter, optimizer, device=args.gpu)
trainer = training.Trainer(
updater, (args.iteration, 'iteration'), out=args.out)
trainer.extend(
extensions.snapshot_object(model.faster_rcnn, 'snapshot_model.npz'),
trigger=(args.iteration, 'iteration'))
trainer.extend(extensions.ExponentialShift('lr', 0.1),
trigger=(args.step_size, 'iteration'))
log_interval = 20, 'iteration'
plot_interval = 3000, 'iteration'
print_interval = 20, 'iteration'
trainer.extend(chainer.training.extensions.observe_lr(),
trigger=log_interval)
trainer.extend(extensions.LogReport(trigger=log_interval))
trainer.extend(extensions.PrintReport(
['iteration', 'epoch', 'elapsed_time', 'lr',
'main/loss',
'main/roi_loc_loss',
'main/roi_cls_loss',
'main/rpn_loc_loss',
'main/rpn_cls_loss',
'validation/main/map',
]), trigger=print_interval)
trainer.extend(extensions.ProgressBar(update_interval=10))
if extensions.PlotReport.available():
trainer.extend(
extensions.PlotReport(
['main/loss'],
file_name='loss.png', trigger=plot_interval
),
trigger=plot_interval
)
trainer.extend(
DetectionVOCEvaluator(
test_iter, model.faster_rcnn, use_07_metric=True,
label_names=voc_bbox_label_names),
trigger=ManualScheduleTrigger(
[args.step_size, args.iteration], 'iteration'))
trainer.extend(extensions.dump_graph('main/loss'))
trainer.run()
if __name__ == '__main__':
main()
|
from typing import Callable
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_CONNECTIVITY,
DEVICE_CLASS_DOOR,
DEVICE_CLASS_MOISTURE,
DEVICE_CLASS_SMOKE,
DEVICE_CLASS_WINDOW,
BinarySensorEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from . import NotionEntity
from .const import (
DATA_COORDINATOR,
DOMAIN,
SENSOR_BATTERY,
SENSOR_DOOR,
SENSOR_GARAGE_DOOR,
SENSOR_LEAK,
SENSOR_MISSING,
SENSOR_SAFE,
SENSOR_SLIDING,
SENSOR_SMOKE_CO,
SENSOR_WINDOW_HINGED_HORIZONTAL,
SENSOR_WINDOW_HINGED_VERTICAL,
)
BINARY_SENSOR_TYPES = {
SENSOR_BATTERY: ("Low Battery", "battery"),
SENSOR_DOOR: ("Door", DEVICE_CLASS_DOOR),
SENSOR_GARAGE_DOOR: ("Garage Door", "garage_door"),
SENSOR_LEAK: ("Leak Detector", DEVICE_CLASS_MOISTURE),
SENSOR_MISSING: ("Missing", DEVICE_CLASS_CONNECTIVITY),
SENSOR_SAFE: ("Safe", DEVICE_CLASS_DOOR),
SENSOR_SLIDING: ("Sliding Door/Window", DEVICE_CLASS_DOOR),
SENSOR_SMOKE_CO: ("Smoke/Carbon Monoxide Detector", DEVICE_CLASS_SMOKE),
SENSOR_WINDOW_HINGED_HORIZONTAL: ("Hinged Window", DEVICE_CLASS_WINDOW),
SENSOR_WINDOW_HINGED_VERTICAL: ("Hinged Window", DEVICE_CLASS_WINDOW),
}
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: Callable
):
"""Set up Notion sensors based on a config entry."""
coordinator = hass.data[DOMAIN][DATA_COORDINATOR][entry.entry_id]
sensor_list = []
for task_id, task in coordinator.data["tasks"].items():
if task["task_type"] not in BINARY_SENSOR_TYPES:
continue
name, device_class = BINARY_SENSOR_TYPES[task["task_type"]]
sensor = coordinator.data["sensors"][task["sensor_id"]]
sensor_list.append(
NotionBinarySensor(
coordinator,
task_id,
sensor["id"],
sensor["bridge"]["id"],
sensor["system_id"],
name,
device_class,
)
)
async_add_entities(sensor_list)
class NotionBinarySensor(NotionEntity, BinarySensorEntity):
"""Define a Notion sensor."""
@callback
def _async_update_from_latest_data(self) -> None:
"""Fetch new state data for the sensor."""
self._state = self.coordinator.data["tasks"][self._task_id]["status"]["value"]
@property
def is_on(self) -> bool:
"""Return whether the sensor is on or off."""
task = self.coordinator.data["tasks"][self._task_id]
if task["task_type"] == SENSOR_BATTERY:
return self._state != "battery_good"
if task["task_type"] in (
SENSOR_DOOR,
SENSOR_GARAGE_DOOR,
SENSOR_SAFE,
SENSOR_SLIDING,
SENSOR_WINDOW_HINGED_HORIZONTAL,
SENSOR_WINDOW_HINGED_VERTICAL,
):
return self._state != "closed"
if task["task_type"] == SENSOR_LEAK:
return self._state != "no_leak"
if task["task_type"] == SENSOR_MISSING:
return self._state == "not_missing"
if task["task_type"] == SENSOR_SMOKE_CO:
return self._state != "no_alarm"
|
import logging
from pyhap.const import CATEGORY_DOOR_LOCK
from homeassistant.components.lock import DOMAIN, STATE_LOCKED, STATE_UNLOCKED
from homeassistant.const import ATTR_CODE, ATTR_ENTITY_ID, STATE_UNKNOWN
from homeassistant.core import callback
from .accessories import TYPES, HomeAccessory
from .const import CHAR_LOCK_CURRENT_STATE, CHAR_LOCK_TARGET_STATE, SERV_LOCK
_LOGGER = logging.getLogger(__name__)
HASS_TO_HOMEKIT = {
STATE_UNLOCKED: 0,
STATE_LOCKED: 1,
# Value 2 is Jammed which hass doesn't have a state for
STATE_UNKNOWN: 3,
}
HOMEKIT_TO_HASS = {c: s for s, c in HASS_TO_HOMEKIT.items()}
STATE_TO_SERVICE = {STATE_LOCKED: "lock", STATE_UNLOCKED: "unlock"}
@TYPES.register("Lock")
class Lock(HomeAccessory):
"""Generate a Lock accessory for a lock entity.
The lock entity must support: unlock and lock.
"""
def __init__(self, *args):
"""Initialize a Lock accessory object."""
super().__init__(*args, category=CATEGORY_DOOR_LOCK)
self._code = self.config.get(ATTR_CODE)
state = self.hass.states.get(self.entity_id)
serv_lock_mechanism = self.add_preload_service(SERV_LOCK)
self.char_current_state = serv_lock_mechanism.configure_char(
CHAR_LOCK_CURRENT_STATE, value=HASS_TO_HOMEKIT[STATE_UNKNOWN]
)
self.char_target_state = serv_lock_mechanism.configure_char(
CHAR_LOCK_TARGET_STATE,
value=HASS_TO_HOMEKIT[STATE_LOCKED],
setter_callback=self.set_state,
)
self.async_update_state(state)
def set_state(self, value):
"""Set lock state to value if call came from HomeKit."""
_LOGGER.debug("%s: Set state to %d", self.entity_id, value)
hass_value = HOMEKIT_TO_HASS.get(value)
service = STATE_TO_SERVICE[hass_value]
if self.char_current_state.value != value:
self.char_current_state.set_value(value)
params = {ATTR_ENTITY_ID: self.entity_id}
if self._code:
params[ATTR_CODE] = self._code
self.call_service(DOMAIN, service, params)
@callback
def async_update_state(self, new_state):
"""Update lock after state changed."""
hass_state = new_state.state
if hass_state in HASS_TO_HOMEKIT:
current_lock_state = HASS_TO_HOMEKIT[hass_state]
_LOGGER.debug(
"%s: Updated current state to %s (%d)",
self.entity_id,
hass_state,
current_lock_state,
)
# LockTargetState only supports locked and unlocked
# Must set lock target state before current state
# or there will be no notification
if hass_state in (STATE_LOCKED, STATE_UNLOCKED):
if self.char_target_state.value != current_lock_state:
self.char_target_state.set_value(current_lock_state)
# Set lock current state ONLY after ensuring that
# target state is correct or there will be no
# notification
if self.char_current_state.value != current_lock_state:
self.char_current_state.set_value(current_lock_state)
|
from datetime import timedelta
from unittest import mock
from aiohomekit.exceptions import AccessoryDisconnectedError, EncryptionError
from aiohomekit.testing import FakePairing
import pytest
from homeassistant.components.light import SUPPORT_BRIGHTNESS, SUPPORT_COLOR
import homeassistant.util.dt as dt_util
from tests.common import async_fire_time_changed
from tests.components.homekit_controller.common import (
Helper,
setup_accessories_from_file,
setup_test_accessories,
)
LIGHT_ON = ("lightbulb", "on")
async def test_koogeek_ls1_setup(hass):
"""Test that a Koogeek LS1 can be correctly setup in HA."""
accessories = await setup_accessories_from_file(hass, "koogeek_ls1.json")
config_entry, pairing = await setup_test_accessories(hass, accessories)
entity_registry = await hass.helpers.entity_registry.async_get_registry()
# Assert that the entity is correctly added to the entity registry
entry = entity_registry.async_get("light.koogeek_ls1_20833f")
assert entry.unique_id == "homekit-AAAA011111111111-7"
helper = Helper(
hass, "light.koogeek_ls1_20833f", pairing, accessories[0], config_entry
)
state = await helper.poll_and_get_state()
# Assert that the friendly name is detected correctly
assert state.attributes["friendly_name"] == "Koogeek-LS1-20833F"
# Assert that all optional features the LS1 supports are detected
assert state.attributes["supported_features"] == (
SUPPORT_BRIGHTNESS | SUPPORT_COLOR
)
device_registry = await hass.helpers.device_registry.async_get_registry()
device = device_registry.async_get(entry.device_id)
assert device.manufacturer == "Koogeek"
assert device.name == "Koogeek-LS1-20833F"
assert device.model == "LS1"
assert device.sw_version == "2.2.15"
assert device.via_device_id is None
@pytest.mark.parametrize("failure_cls", [AccessoryDisconnectedError, EncryptionError])
async def test_recover_from_failure(hass, utcnow, failure_cls):
"""
Test that entity actually recovers from a network connection drop.
See https://github.com/home-assistant/core/issues/18949
"""
accessories = await setup_accessories_from_file(hass, "koogeek_ls1.json")
config_entry, pairing = await setup_test_accessories(hass, accessories)
helper = Helper(
hass, "light.koogeek_ls1_20833f", pairing, accessories[0], config_entry
)
# Set light state on fake device to off
helper.characteristics[LIGHT_ON].set_value(False)
# Test that entity starts off in a known state
state = await helper.poll_and_get_state()
assert state.state == "off"
# Set light state on fake device to on
helper.characteristics[LIGHT_ON].set_value(True)
# Test that entity remains in the same state if there is a network error
next_update = dt_util.utcnow() + timedelta(seconds=60)
with mock.patch.object(FakePairing, "get_characteristics") as get_char:
get_char.side_effect = failure_cls("Disconnected")
state = await helper.poll_and_get_state()
assert state.state == "off"
chars = get_char.call_args[0][0]
assert set(chars) == {(1, 8), (1, 9), (1, 10), (1, 11)}
# Test that entity changes state when network error goes away
next_update += timedelta(seconds=60)
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
state = await helper.poll_and_get_state()
assert state.state == "on"
|
import argparse
import glob
import os
import time
import random
COLOURS = (b'\xFF\x00\x00', b'\x00\xFF\x00', b'\x00\x00\xFF', b'\xFF\xFF\x00', b'\xFF\x00\xFF', b'\x00\xFF\xFF')
def write_binary(driver_path, device_file, payload):
with open(os.path.join(driver_path, device_file), 'wb') as open_file:
open_file.write(payload)
def read_string(driver_path, device_file):
with open(os.path.join(driver_path, device_file), 'r') as open_file:
return open_file.read().rstrip('\n')
def write_string(driver_path, device_file, payload):
with open(os.path.join(driver_path, device_file), 'w') as open_file:
open_file.write(payload)
def find_devices(vid, pid):
driver_paths = glob.glob(os.path.join('/sys/bus/hid/drivers/razerkbd', '*:{0:04X}:{1:04X}.*'.format(vid, pid)))
for driver_path in driver_paths:
device_type_path = os.path.join(driver_path, 'device_type')
if os.path.exists(device_type_path):
yield driver_path
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--skip-standard', action='store_true')
parser.add_argument('--skip-custom', action='store_true')
parser.add_argument('--skip-game-led', action='store_true')
parser.add_argument('--skip-macro-led', action='store_true')
return parser.parse_args()
if __name__ == '__main__':
args = parse_args()
found_chroma = False
for index, driver_path in enumerate(find_devices(0x1532, 0x021E), start=1):
found_chroma = True
print("Ornata Chroma {0}\n".format(index))
print("Driver version: {0}".format(read_string(driver_path, 'version')))
print("Driver firmware version: {0}".format(read_string(driver_path, 'firmware_version')))
print("Device serial: {0}".format(read_string(driver_path, 'device_serial')))
print("Device type: {0}".format(read_string(driver_path, 'device_type')))
print("Device mode: {0}".format(read_string(driver_path, 'device_mode')))
# Set to static red so that we have something standard
write_binary(driver_path, 'matrix_effect_static', b'\xFF\x00\x00')
if not args.skip_standard:
print("Starting brightness test. Press enter to begin.")
input()
print("Max brightness...", end='')
write_string(driver_path, 'matrix_brightness', '255')
time.sleep(1)
print("brightness ({0})".format(read_string(driver_path, 'matrix_brightness')))
time.sleep(1)
print("Half brightness...", end='')
write_string(driver_path, 'matrix_brightness', '128')
time.sleep(1)
print("brightness ({0})".format(read_string(driver_path, 'matrix_brightness')))
time.sleep(1)
print("Zero brightness...", end='')
write_string(driver_path, 'matrix_brightness', '0')
time.sleep(1)
print("brightness ({0})".format(read_string(driver_path, 'matrix_brightness')))
time.sleep(1)
write_string(driver_path, 'matrix_brightness', '255')
print("Starting other colour effect tests. Press enter to begin.")
input()
print("Green Static")
write_binary(driver_path, 'matrix_effect_static', b'\x00\xFF\x00')
time.sleep(5)
print("Cyan Static")
write_binary(driver_path, 'matrix_effect_static', b'\x00\xFF\xFF')
time.sleep(5)
print("Spectrum")
write_binary(driver_path, 'matrix_effect_spectrum', b'\x00')
time.sleep(10)
print("None")
write_binary(driver_path, 'matrix_effect_none', b'\x00')
time.sleep(5)
print("Wave Left")
write_string(driver_path, 'matrix_effect_wave', '0')
time.sleep(5)
print("Wave Right")
write_string(driver_path, 'matrix_effect_wave', '1')
time.sleep(5)
print("Breathing random")
write_binary(driver_path, 'matrix_effect_breath', b'\x00')
time.sleep(10)
print("Breathing red")
write_binary(driver_path, 'matrix_effect_breath', b'\xFF\x00\x00')
time.sleep(10)
print("Breathing blue-green")
write_binary(driver_path, 'matrix_effect_breath', b'\x00\xFF\x00\x00\x00\xFF')
time.sleep(10)
print("Starlight random fast")
write_binary(driver_path, 'matrix_effect_starlight', b'\x01')
time.sleep(5)
print("Starlight random slow")
write_binary(driver_path, 'matrix_effect_starlight', b'\x03')
time.sleep(10)
print("Starlight red fast")
write_binary(driver_path, 'matrix_effect_starlight', b'\x01\xFF\x00\x00')
time.sleep(10)
print("Starlight blue-green fast")
write_binary(driver_path, 'matrix_effect_starlight', b'\x01\x00\xFF\x00\x00\x00\xFF')
time.sleep(10)
if not args.skip_custom:
# Custom LEDs all rows
payload_all = b''
for row in range(0, 6): # 0x15 is 21. 0->21 inclusive
payload_all += row.to_bytes(1, byteorder='big') + b'\x00\x15'
for i in range(0, 22):
payload_all += random.choice(COLOURS)
# Custom LEDs M1-5
payload_m1_5 = b''
for row in range(0, 6): # Column 0 or column 0
payload_m1_5 += row.to_bytes(1, byteorder='big') + b'\x01\x01' + b'\xFF\xFF\xFF'
print("Custom LED matrix colours test. Press enter to begin.")
input()
write_binary(driver_path, 'matrix_custom_frame', payload_all)
write_binary(driver_path, 'matrix_effect_custom', b'\x00')
print("Custom LED matrix partial colours test. Setting left hand keys to white. Press enter to begin.")
input()
write_binary(driver_path, 'matrix_custom_frame', payload_m1_5)
write_binary(driver_path, 'matrix_effect_custom', b'\x00')
time.sleep(0.5)
if not args.skip_game_led:
# Game mode test
print("Starting game mode LED tests. Press enter to begin.")
input()
print("Enabling game mode LED")
write_string(driver_path, 'game_led_state', '1')
time.sleep(5)
print("Disabling game mode LED")
write_string(driver_path, 'game_led_state', '0')
time.sleep(5)
if not args.skip_macro_led:
print("Putting keyboard into driver mode as that is required for macro LED. Device mode: ", end='')
write_binary(driver_path, 'device_mode', b'\x03\x00')
time.sleep(5)
print(read_string(driver_path, 'device_mode'))
time.sleep(1)
print("Starting marco LED tests. Press enter to begin.")
input()
print("Enabling macro mode LED")
write_string(driver_path, 'macro_led_state', '1')
time.sleep(5)
print("Enabling macro mode LED static effect")
write_string(driver_path, 'macro_led_effect', '0')
time.sleep(5)
print("Enabling macro mode LED blinking effect")
write_string(driver_path, 'macro_led_effect', '1')
time.sleep(5)
write_string(driver_path, 'macro_led_effect', '0')
print("Disabling macro mode LED")
write_string(driver_path, 'macro_led_state', '0')
time.sleep(5)
print("Finished")
if not found_chroma:
print("No Blackwidow Chromas found")
|
from pprint import pprint
from unittest import TestCase
import pandas as pd
from scattertext.WhitespaceNLP import whitespace_nlp
from scattertext import CorpusFromPandas, SemioticSquareFromAxes
from scattertext.test.test_corpusFromPandas import get_docs_categories
class TestSemioticSquareFromAxes(TestCase):
@classmethod
def setUp(cls):
categories, documents = get_docs_categories()
cls.df = pd.DataFrame({'category': categories,
'text': documents})
cls.corpus = CorpusFromPandas(cls.df,
'category',
'text',
nlp=whitespace_nlp).build()
def test_main(self):
terms = self.corpus.get_terms()
axes = pd.DataFrame({'x': [len(x) for x in terms],
'y': [sum([ord(c) for c in x]) * 1. / len(x) for x in terms]}, index=terms)
axes['x'] = axes['x'] - axes['x'].median()
axes['y'] = axes['y'] - axes['y'].median()
x_axis_label = 'len'
y_axis_label = 'alpha'
with self.assertRaises(AssertionError):
SemioticSquareFromAxes(self.corpus, axes.iloc[:3], x_axis_label, y_axis_label)
with self.assertRaises(AssertionError):
axes2 = axes.copy()
axes2.loc['asdjfksafjd'] = pd.Series({'x': 3, 'y': 3})
SemioticSquareFromAxes(self.corpus, axes2, x_axis_label, y_axis_label)
with self.assertRaises(AssertionError):
SemioticSquareFromAxes(self.corpus, axes2[['x']], x_axis_label, y_axis_label)
with self.assertRaises(AssertionError):
axes2 = axes.copy()
axes2['a'] = 1
SemioticSquareFromAxes(self.corpus, axes2, x_axis_label, y_axis_label)
semsq = SemioticSquareFromAxes(self.corpus, axes, x_axis_label, y_axis_label)
self.assertEqual(semsq.get_labels(), {'a_and_b_label': 'alpha',
'a_and_not_b_label': 'not-len',
'a_label': 'not-len; alpha',
'b_and_not_a_label': 'len',
'b_label': 'len; alpha',
'not_a_and_not_b_label': 'not-alpha',
'not_a_label': 'len; not-alpha',
'not_b_label': 'not-len; not-alpha'})
self.assertEqual(semsq.get_axes().to_csv(), axes.to_csv())
self.assertEqual(semsq.get_lexicons(3), {'a': ['st', 'up', 'usurp'],
'a_and_b': ['usurp', 'worlds', 'thou'],
'a_and_not_b': ['and', 'did', 'i'],
'b': ['sometimes', 'brooklyn', 'returned'],
'b_and_not_a': ['sometimes march', 'together with', 'did sometimes'],
'not_a': ['i charge', 'fair and', 'charge thee'],
'not_a_and_not_b': ['is a', 'is i', 'i charge'],
'not_b': ['is a', 'is i', 'it is']})
|
Subsets and Splits