text
stringlengths 213
32.3k
|
---|
import numpy as np
from ..meas_info import create_info
from ...transforms import rotation3d_align_z_axis
from ...channels import make_dig_montage
from ..constants import FIFF
from ...utils import warn, _check_pandas_installed
from ..pick import pick_info
_supported_megs = ['neuromag306']
_unit_dict = {'m': 1,
'cm': 1e-2,
'mm': 1e-3,
'V': 1,
'mV': 1e-3,
'uV': 1e-6,
'T': 1,
'T/m': 1,
'T/cm': 1e2}
NOINFO_WARNING = 'Importing FieldTrip data without an info dict from the ' \
'original file. Channel locations, orientations and types ' \
'will be incorrect. The imported data cannot be used for ' \
'source analysis, channel interpolation etc.'
def _validate_ft_struct(ft_struct):
"""Run validation checks on the ft_structure."""
if isinstance(ft_struct, list):
raise RuntimeError('Loading of data in cell arrays is not supported')
def _create_info(ft_struct, raw_info):
"""Create MNE info structure from a FieldTrip structure."""
if raw_info is None:
warn(NOINFO_WARNING)
sfreq = _set_sfreq(ft_struct)
ch_names = ft_struct['label']
if raw_info:
info = raw_info.copy()
missing_channels = set(ch_names) - set(info['ch_names'])
if missing_channels:
warn('The following channels are present in the FieldTrip data '
'but cannot be found in the provided info: %s.\n'
'These channels will be removed from the resulting data!'
% (str(missing_channels), ))
missing_chan_idx = [ch_names.index(ch) for ch in missing_channels]
new_chs = [ch for ch in ch_names if ch not in missing_channels]
ch_names = new_chs
ft_struct['label'] = ch_names
if 'trial' in ft_struct:
ft_struct['trial'] = _remove_missing_channels_from_trial(
ft_struct['trial'],
missing_chan_idx
)
if 'avg' in ft_struct:
if ft_struct['avg'].ndim == 2:
ft_struct['avg'] = np.delete(ft_struct['avg'],
missing_chan_idx,
axis=0)
info['sfreq'] = sfreq
ch_idx = [info['ch_names'].index(ch) for ch in ch_names]
pick_info(info, ch_idx, copy=False)
else:
montage = _create_montage(ft_struct)
info = create_info(ch_names, sfreq)
info.set_montage(montage)
chs = _create_info_chs(ft_struct)
info['chs'] = chs
info._update_redundant()
return info
def _remove_missing_channels_from_trial(trial, missing_chan_idx):
if isinstance(trial, list):
for idx_trial in range(len(trial)):
trial[idx_trial] = _remove_missing_channels_from_trial(
trial[idx_trial], missing_chan_idx
)
elif isinstance(trial, np.ndarray):
if trial.ndim == 2:
trial = np.delete(trial,
missing_chan_idx,
axis=0)
else:
raise ValueError('"trial" field of the FieldTrip structure '
'has an unknown format.')
return trial
def _create_info_chs(ft_struct):
"""Create the chs info field from the FieldTrip structure."""
all_channels = ft_struct['label']
ch_defaults = dict(coord_frame=FIFF.FIFFV_COORD_UNKNOWN,
cal=1.0,
range=1.0,
unit_mul=FIFF.FIFF_UNITM_NONE,
loc=np.array([0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1]),
unit=FIFF.FIFF_UNIT_V)
try:
elec = ft_struct['elec']
except KeyError:
elec = None
try:
grad = ft_struct['grad']
except KeyError:
grad = None
if elec is None and grad is None:
warn('The supplied FieldTrip structure does not have an elec or grad '
'field. No channel locations will extracted and the kind of '
'channel might be inaccurate.')
chs = list()
for idx_chan, cur_channel_label in enumerate(all_channels):
cur_ch = ch_defaults.copy()
cur_ch['ch_name'] = cur_channel_label
cur_ch['logno'] = idx_chan + 1
cur_ch['scanno'] = idx_chan + 1
if elec and cur_channel_label in elec['label']:
cur_ch = _process_channel_eeg(cur_ch, elec)
elif grad and cur_channel_label in grad['label']:
cur_ch = _process_channel_meg(cur_ch, grad)
else:
if cur_channel_label.startswith('EOG'):
cur_ch['kind'] = FIFF.FIFFV_EOG_CH
cur_ch['coil_type'] = FIFF.FIFFV_COIL_EEG
elif cur_channel_label.startswith('ECG'):
cur_ch['kind'] = FIFF.FIFFV_ECG_CH
cur_ch['coil_type'] = FIFF.FIFFV_COIL_EEG_BIPOLAR
elif cur_channel_label.startswith('STI'):
cur_ch['kind'] = FIFF.FIFFV_STIM_CH
cur_ch['coil_type'] = FIFF.FIFFV_COIL_NONE
else:
warn('Cannot guess the correct type of channel %s. Making '
'it a MISC channel.' % (cur_channel_label,))
cur_ch['kind'] = FIFF.FIFFV_MISC_CH
cur_ch['coil_type'] = FIFF.FIFFV_COIL_NONE
chs.append(cur_ch)
return chs
def _create_montage(ft_struct):
"""Create a montage from the FieldTrip data."""
# try to create a montage
montage_pos, montage_ch_names = list(), list()
for cur_ch_type in ('grad', 'elec'):
if cur_ch_type in ft_struct:
cur_ch_struct = ft_struct[cur_ch_type]
available_channels = np.where(np.in1d(cur_ch_struct['label'],
ft_struct['label']))[0]
tmp_labels = cur_ch_struct['label']
if not isinstance(tmp_labels, list):
tmp_labels = [tmp_labels]
cur_labels = np.asanyarray(tmp_labels)
montage_ch_names.extend(
cur_labels[available_channels])
try:
montage_pos.extend(
cur_ch_struct['chanpos'][available_channels])
except KeyError:
raise RuntimeError('This file was created with an old version '
'of FieldTrip. You can convert the data to '
'the new version by loading it into '
'FieldTrip and applying ft_selectdata with '
'an empty cfg structure on it. '
'Otherwise you can supply the Info field.')
montage = None
if (len(montage_ch_names) > 0 and len(montage_pos) > 0 and
len(montage_ch_names) == len(montage_pos)):
montage = make_dig_montage(
ch_pos=dict(zip(montage_ch_names, montage_pos)),
# XXX: who grants 'head'?? this is BACKCOMPAT but seems a BUG
coord_frame='head',
)
return montage
def _set_sfreq(ft_struct):
"""Set the sample frequency."""
try:
sfreq = ft_struct['fsample']
except KeyError:
try:
time = ft_struct['time']
except KeyError:
raise ValueError('No Source for sfreq found')
else:
t1, t2 = float(time[0]), float(time[1])
sfreq = 1 / (t2 - t1)
try:
sfreq = float(sfreq)
except TypeError:
warn('FieldTrip structure contained multiple sample rates, trying the '
f'first of:\n{sfreq} Hz')
sfreq = float(sfreq.ravel()[0])
return sfreq
def _set_tmin(ft_struct):
"""Set the start time before the event in evoked data if possible."""
times = ft_struct['time']
time_check = all(times[i][0] == times[i - 1][0]
for i, x in enumerate(times))
if time_check:
tmin = times[0][0]
else:
raise RuntimeError('Loading data with non-uniform '
'times per epoch is not supported')
return tmin
def _create_events(ft_struct, trialinfo_column):
"""Create an event matrix from the FieldTrip structure."""
if 'trialinfo' not in ft_struct:
return None
event_type = ft_struct['trialinfo']
event_number = range(len(event_type))
if trialinfo_column < 0:
raise ValueError('trialinfo_column must be positive')
available_ti_cols = 1
if event_type.ndim == 2:
available_ti_cols = event_type.shape[1]
if trialinfo_column > (available_ti_cols - 1):
raise ValueError('trialinfo_column is higher than the amount of'
'columns in trialinfo.')
event_trans_val = np.zeros(len(event_type))
if event_type.ndim == 2:
event_type = event_type[:, trialinfo_column]
events = np.vstack([np.array(event_number), event_trans_val,
event_type]).astype('int').T
return events
def _create_event_metadata(ft_struct):
"""Create event metadata from trialinfo."""
pandas = _check_pandas_installed(strict=False)
if not pandas:
warn('The Pandas library is not installed. Not returning the original '
'trialinfo matrix as metadata.')
return None
metadata = pandas.DataFrame(ft_struct['trialinfo'])
return metadata
def _process_channel_eeg(cur_ch, elec):
"""Convert EEG channel from FieldTrip to MNE.
Parameters
----------
cur_ch: dict
Channel specific dictionary to populate.
elec: dict
elec dict as loaded from the FieldTrip structure
Returns
-------
cur_ch: dict
The original dict (cur_ch) with the added information
"""
all_labels = np.asanyarray(elec['label'])
chan_idx_in_elec = np.where(all_labels == cur_ch['ch_name'])[0][0]
position = np.squeeze(elec['chanpos'][chan_idx_in_elec, :])
chanunit = elec['chanunit'][chan_idx_in_elec]
position_unit = elec['unit']
position = position * _unit_dict[position_unit]
cur_ch['loc'] = np.hstack((position, np.zeros((9,))))
cur_ch['loc'][-1] = 1
cur_ch['unit'] = FIFF.FIFF_UNIT_V
cur_ch['unit_mul'] = np.log10(_unit_dict[chanunit[0]])
cur_ch['kind'] = FIFF.FIFFV_EEG_CH
cur_ch['coil_type'] = FIFF.FIFFV_COIL_EEG
cur_ch['coord_frame'] = FIFF.FIFFV_COORD_HEAD
return cur_ch
def _process_channel_meg(cur_ch, grad):
"""Convert MEG channel from FieldTrip to MNE.
Parameters
----------
cur_ch: dict
Channel specific dictionary to populate.
grad: dict
grad dict as loaded from the FieldTrip structure
Returns
-------
dict: The original dict (cur_ch) with the added information
"""
all_labels = np.asanyarray(grad['label'])
chan_idx_in_grad = np.where(all_labels == cur_ch['ch_name'])[0][0]
gradtype = grad['type']
chantype = grad['chantype'][chan_idx_in_grad]
position_unit = grad['unit']
position = np.squeeze(grad['chanpos'][chan_idx_in_grad, :])
position = position * _unit_dict[position_unit]
if gradtype == 'neuromag306' and 'tra' in grad and 'coilpos' in grad:
# Try to regenerate original channel pos.
idx_in_coilpos = np.where(grad['tra'][chan_idx_in_grad, :] != 0)[0]
cur_coilpos = grad['coilpos'][idx_in_coilpos, :]
cur_coilpos = cur_coilpos * _unit_dict[position_unit]
cur_coilori = grad['coilori'][idx_in_coilpos, :]
if chantype == 'megmag':
position = cur_coilpos[0] - 0.0003 * cur_coilori[0]
if chantype == 'megplanar':
tmp_pos = cur_coilpos - 0.0003 * cur_coilori
position = np.average(tmp_pos, axis=0)
original_orientation = np.squeeze(grad['chanori'][chan_idx_in_grad, :])
try:
orientation = rotation3d_align_z_axis(original_orientation).T
except AssertionError:
orientation = np.eye(3)
assert orientation.shape == (3, 3)
orientation = orientation.flatten()
chanunit = grad['chanunit'][chan_idx_in_grad]
cur_ch['loc'] = np.hstack((position, orientation))
cur_ch['kind'] = FIFF.FIFFV_MEG_CH
if chantype == 'megmag':
cur_ch['coil_type'] = FIFF.FIFFV_COIL_POINT_MAGNETOMETER
cur_ch['unit'] = FIFF.FIFF_UNIT_T
elif chantype == 'megplanar':
cur_ch['coil_type'] = FIFF.FIFFV_COIL_VV_PLANAR_T1
cur_ch['unit'] = FIFF.FIFF_UNIT_T_M
elif chantype == 'refmag':
cur_ch['coil_type'] = FIFF.FIFFV_COIL_MAGNES_REF_MAG
cur_ch['unit'] = FIFF.FIFF_UNIT_T
elif chantype == 'refgrad':
cur_ch['coil_type'] = FIFF.FIFFV_COIL_MAGNES_REF_GRAD
cur_ch['unit'] = FIFF.FIFF_UNIT_T
elif chantype == 'meggrad':
cur_ch['coil_type'] = FIFF.FIFFV_COIL_AXIAL_GRAD_5CM
cur_ch['unit'] = FIFF.FIFF_UNIT_T
else:
raise RuntimeError('Unexpected coil type: %s.' % (
chantype,))
cur_ch['unit_mul'] = np.log10(_unit_dict[chanunit[0]])
cur_ch['coord_frame'] = FIFF.FIFFV_COORD_HEAD
return cur_ch
|
import logging
import unittest
import os
import sys
import numpy
import gensim
from gensim.utils import check_output
from gensim.test.utils import datapath, get_tmpfile
class TestGlove2Word2Vec(unittest.TestCase):
def setUp(self):
self.datapath = datapath('test_glove.txt')
self.output_file = get_tmpfile('glove2word2vec.test')
def testConversion(self):
check_output(args=[
sys.executable, '-m', 'gensim.scripts.glove2word2vec',
'--input', self.datapath, '--output', self.output_file
])
# test that the converted model loads successfully
try:
self.test_model = gensim.models.KeyedVectors.load_word2vec_format(self.output_file)
self.assertTrue(numpy.allclose(self.test_model.n_similarity(['the', 'and'], ['and', 'the']), 1.0))
except Exception:
if os.path.isfile(os.path.join(self.output_file)):
self.fail('model file %s was created but could not be loaded.' % self.output_file)
else:
self.fail(
'model file %s creation failed, check the parameters and input file format.' % self.output_file
)
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.DEBUG)
unittest.main()
|
from asyncio import run_coroutine_threadsafe
from typing import Dict, Union
from pymfy.api import somfy_api
from homeassistant import config_entries, core
from homeassistant.helpers import config_entry_oauth2_flow
class ConfigEntrySomfyApi(somfy_api.SomfyApi):
"""Provide a Somfy API tied into an OAuth2 based config entry."""
def __init__(
self,
hass: core.HomeAssistant,
config_entry: config_entries.ConfigEntry,
implementation: config_entry_oauth2_flow.AbstractOAuth2Implementation,
):
"""Initialize the Config Entry Somfy API."""
self.hass = hass
self.config_entry = config_entry
self.session = config_entry_oauth2_flow.OAuth2Session(
hass, config_entry, implementation
)
super().__init__(None, None, token=self.session.token)
def refresh_tokens(
self,
) -> Dict[str, Union[str, int]]:
"""Refresh and return new Somfy tokens using Home Assistant OAuth2 session."""
run_coroutine_threadsafe(
self.session.async_ensure_token_valid(), self.hass.loop
).result()
return self.session.token
|
from homeassistant.components.atag import DOMAIN
from homeassistant.const import CONF_EMAIL, CONF_HOST, CONF_PORT, CONTENT_TYPE_JSON
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry
from tests.test_util.aiohttp import AiohttpClientMocker
USER_INPUT = {
CONF_HOST: "127.0.0.1",
CONF_EMAIL: "[email protected]",
CONF_PORT: 10000,
}
UID = "xxxx-xxxx-xxxx_xx-xx-xxx-xxx"
PAIR_REPLY = {"pair_reply": {"status": {"device_id": UID}, "acc_status": 2}}
UPDATE_REPLY = {"update_reply": {"status": {"device_id": UID}, "acc_status": 2}}
RECEIVE_REPLY = {
"retrieve_reply": {
"status": {"device_id": UID},
"report": {
"burning_hours": 1000,
"room_temp": 20,
"outside_temp": 15,
"dhw_water_temp": 30,
"ch_water_temp": 40,
"ch_water_pres": 1.8,
"ch_return_temp": 35,
"boiler_status": 0,
"tout_avg": 12,
"details": {"rel_mod_level": 0},
},
"control": {
"ch_control_mode": 0,
"ch_mode": 1,
"ch_mode_duration": 0,
"ch_mode_temp": 12,
"dhw_temp_setp": 40,
"dhw_mode": 1,
"dhw_mode_temp": 150,
"weather_status": 8,
},
"configuration": {
"download_url": "http://firmware.atag-one.com:80/R58",
"temp_unit": 0,
"dhw_max_set": 65,
"dhw_min_set": 40,
},
"acc_status": 2,
}
}
async def init_integration(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
rgbw: bool = False,
skip_setup: bool = False,
) -> MockConfigEntry:
"""Set up the Atag integration in Home Assistant."""
aioclient_mock.post(
"http://127.0.0.1:10000/retrieve",
json=RECEIVE_REPLY,
headers={"Content-Type": CONTENT_TYPE_JSON},
)
aioclient_mock.post(
"http://127.0.0.1:10000/update",
json=UPDATE_REPLY,
headers={"Content-Type": CONTENT_TYPE_JSON},
)
aioclient_mock.post(
"http://127.0.0.1:10000/pair",
json=PAIR_REPLY,
headers={"Content-Type": CONTENT_TYPE_JSON},
)
entry = MockConfigEntry(domain=DOMAIN, data=USER_INPUT)
entry.add_to_hass(hass)
if not skip_setup:
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry
|
from concurrent.futures import TimeoutError as FutTimeoutError
import logging
import mimetypes
import pathlib
import random
import string
import requests
import slixmpp
from slixmpp.exceptions import IqError, IqTimeout, XMPPError
from slixmpp.plugins.xep_0363.http_upload import (
FileTooBig,
FileUploadError,
UploadServiceNotFound,
)
from slixmpp.xmlstream.xmlstream import NotConnectedError
import voluptuous as vol
from homeassistant.components.notify import (
ATTR_TITLE,
ATTR_TITLE_DEFAULT,
PLATFORM_SCHEMA,
BaseNotificationService,
)
from homeassistant.const import (
CONF_PASSWORD,
CONF_RECIPIENT,
CONF_RESOURCE,
CONF_ROOM,
CONF_SENDER,
HTTP_BAD_REQUEST,
)
import homeassistant.helpers.config_validation as cv
import homeassistant.helpers.template as template_helper
_LOGGER = logging.getLogger(__name__)
ATTR_DATA = "data"
ATTR_PATH = "path"
ATTR_PATH_TEMPLATE = "path_template"
ATTR_TIMEOUT = "timeout"
ATTR_URL = "url"
ATTR_URL_TEMPLATE = "url_template"
ATTR_VERIFY = "verify"
CONF_TLS = "tls"
CONF_VERIFY = "verify"
DEFAULT_CONTENT_TYPE = "application/octet-stream"
DEFAULT_RESOURCE = "home-assistant"
XEP_0363_TIMEOUT = 10
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_SENDER): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_RECIPIENT): cv.string,
vol.Optional(CONF_RESOURCE, default=DEFAULT_RESOURCE): cv.string,
vol.Optional(CONF_ROOM, default=""): cv.string,
vol.Optional(CONF_TLS, default=True): cv.boolean,
vol.Optional(CONF_VERIFY, default=True): cv.boolean,
}
)
async def async_get_service(hass, config, discovery_info=None):
"""Get the Jabber (XMPP) notification service."""
return XmppNotificationService(
config.get(CONF_SENDER),
config.get(CONF_RESOURCE),
config.get(CONF_PASSWORD),
config.get(CONF_RECIPIENT),
config.get(CONF_TLS),
config.get(CONF_VERIFY),
config.get(CONF_ROOM),
hass,
)
class XmppNotificationService(BaseNotificationService):
"""Implement the notification service for Jabber (XMPP)."""
def __init__(self, sender, resource, password, recipient, tls, verify, room, hass):
"""Initialize the service."""
self._hass = hass
self._sender = sender
self._resource = resource
self._password = password
self._recipient = recipient
self._tls = tls
self._verify = verify
self._room = room
async def async_send_message(self, message="", **kwargs):
"""Send a message to a user."""
title = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)
text = f"{title}: {message}" if title else message
data = kwargs.get(ATTR_DATA)
timeout = data.get(ATTR_TIMEOUT, XEP_0363_TIMEOUT) if data else None
await async_send_message(
f"{self._sender}/{self._resource}",
self._password,
self._recipient,
self._tls,
self._verify,
self._room,
self._hass,
text,
timeout,
data,
)
async def async_send_message(
sender,
password,
recipient,
use_tls,
verify_certificate,
room,
hass,
message,
timeout=None,
data=None,
):
"""Send a message over XMPP."""
class SendNotificationBot(slixmpp.ClientXMPP):
"""Service for sending Jabber (XMPP) messages."""
def __init__(self):
"""Initialize the Jabber Bot."""
super().__init__(sender, password)
self.loop = hass.loop
self.force_starttls = use_tls
self.use_ipv6 = False
self.add_event_handler("failed_auth", self.disconnect_on_login_fail)
self.add_event_handler("session_start", self.start)
if room:
self.register_plugin("xep_0045") # MUC
if not verify_certificate:
self.add_event_handler(
"ssl_invalid_cert", self.discard_ssl_invalid_cert
)
if data:
# Init XEPs for image sending
self.register_plugin("xep_0030") # OOB dep
self.register_plugin("xep_0066") # Out of Band Data
self.register_plugin("xep_0071") # XHTML IM
self.register_plugin("xep_0128") # Service Discovery
self.register_plugin("xep_0363") # HTTP upload
self.connect(force_starttls=self.force_starttls, use_ssl=False)
async def start(self, event):
"""Start the communication and sends the message."""
# Sending image and message independently from each other
if data:
await self.send_file(timeout=timeout)
if message:
self.send_text_message()
self.disconnect(wait=True)
async def send_file(self, timeout=None):
"""Send file via XMPP.
Send XMPP file message using OOB (XEP_0066) and
HTTP Upload (XEP_0363)
"""
if room:
self.plugin["xep_0045"].join_muc(room, sender, wait=True)
try:
# Uploading with XEP_0363
_LOGGER.debug("Timeout set to %ss", timeout)
url = await self.upload_file(timeout=timeout)
_LOGGER.info("Upload success")
if room:
_LOGGER.info("Sending file to %s", room)
message = self.Message(sto=room, stype="groupchat")
else:
_LOGGER.info("Sending file to %s", recipient)
message = self.Message(sto=recipient, stype="chat")
message["body"] = url
message["oob"]["url"] = url
try:
message.send()
except (IqError, IqTimeout, XMPPError) as ex:
_LOGGER.error("Could not send image message %s", ex)
except (IqError, IqTimeout, XMPPError) as ex:
_LOGGER.error("Upload error, could not send message %s", ex)
except NotConnectedError as ex:
_LOGGER.error("Connection error %s", ex)
except FileTooBig as ex:
_LOGGER.error("File too big for server, could not upload file %s", ex)
except UploadServiceNotFound as ex:
_LOGGER.error("UploadServiceNotFound, could not upload file %s", ex)
except FileUploadError as ex:
_LOGGER.error("FileUploadError, could not upload file %s", ex)
except requests.exceptions.SSLError as ex:
_LOGGER.error("Cannot establish SSL connection %s", ex)
except requests.exceptions.ConnectionError as ex:
_LOGGER.error("Cannot connect to server %s", ex)
except (
FileNotFoundError,
PermissionError,
IsADirectoryError,
TimeoutError,
) as ex:
_LOGGER.error("Error reading file %s", ex)
except FutTimeoutError as ex:
_LOGGER.error("The server did not respond in time, %s", ex)
async def upload_file(self, timeout=None):
"""Upload file to Jabber server and return new URL.
upload a file with Jabber XEP_0363 from a remote URL or a local
file path and return a URL of that file.
"""
if data.get(ATTR_URL_TEMPLATE):
_LOGGER.debug("Got url template: %s", data[ATTR_URL_TEMPLATE])
templ = template_helper.Template(data[ATTR_URL_TEMPLATE], hass)
get_url = template_helper.render_complex(templ, None)
url = await self.upload_file_from_url(get_url, timeout=timeout)
elif data.get(ATTR_URL):
url = await self.upload_file_from_url(data[ATTR_URL], timeout=timeout)
elif data.get(ATTR_PATH_TEMPLATE):
_LOGGER.debug("Got path template: %s", data[ATTR_PATH_TEMPLATE])
templ = template_helper.Template(data[ATTR_PATH_TEMPLATE], hass)
get_path = template_helper.render_complex(templ, None)
url = await self.upload_file_from_path(get_path, timeout=timeout)
elif data.get(ATTR_PATH):
url = await self.upload_file_from_path(data[ATTR_PATH], timeout=timeout)
else:
url = None
if url is None:
_LOGGER.error("No path or URL found for file")
raise FileUploadError("Could not upload file")
return url
async def upload_file_from_url(self, url, timeout=None):
"""Upload a file from a URL. Returns a URL.
uploaded via XEP_0363 and HTTP and returns the resulting URL
"""
_LOGGER.info("Getting file from %s", url)
def get_url(url):
"""Return result for GET request to url."""
return requests.get(
url, verify=data.get(ATTR_VERIFY, True), timeout=timeout
)
result = await hass.async_add_executor_job(get_url, url)
if result.status_code >= HTTP_BAD_REQUEST:
_LOGGER.error("Could not load file from %s", url)
return None
filesize = len(result.content)
# we need a file extension, the upload server needs a
# filename, if none is provided, through the path we guess
# the extension
# also setting random filename for privacy
if data.get(ATTR_PATH):
# using given path as base for new filename. Don't guess type
filename = self.get_random_filename(data.get(ATTR_PATH))
else:
extension = (
mimetypes.guess_extension(result.headers["Content-Type"])
or ".unknown"
)
_LOGGER.debug("Got %s extension", extension)
filename = self.get_random_filename(None, extension=extension)
_LOGGER.info("Uploading file from URL, %s", filename)
url = await self["xep_0363"].upload_file(
filename,
size=filesize,
input_file=result.content,
content_type=result.headers["Content-Type"],
timeout=timeout,
)
return url
async def upload_file_from_path(self, path, timeout=None):
"""Upload a file from a local file path via XEP_0363."""
_LOGGER.info("Uploading file from path, %s", path)
if not hass.config.is_allowed_path(path):
raise PermissionError("Could not access file. Path not allowed")
with open(path, "rb") as upfile:
_LOGGER.debug("Reading file %s", path)
input_file = upfile.read()
filesize = len(input_file)
_LOGGER.debug("Filesize is %s bytes", filesize)
content_type = mimetypes.guess_type(path)[0]
if content_type is None:
content_type = DEFAULT_CONTENT_TYPE
_LOGGER.debug("Content type is %s", content_type)
# set random filename for privacy
filename = self.get_random_filename(data.get(ATTR_PATH))
_LOGGER.debug("Uploading file with random filename %s", filename)
url = await self["xep_0363"].upload_file(
filename,
size=filesize,
input_file=input_file,
content_type=content_type,
timeout=timeout,
)
return url
def send_text_message(self):
"""Send a text only message to a room or a recipient."""
try:
if room:
_LOGGER.debug("Joining room %s", room)
self.plugin["xep_0045"].join_muc(room, sender, wait=True)
self.send_message(mto=room, mbody=message, mtype="groupchat")
else:
_LOGGER.debug("Sending message to %s", recipient)
self.send_message(mto=recipient, mbody=message, mtype="chat")
except (IqError, IqTimeout, XMPPError) as ex:
_LOGGER.error("Could not send text message %s", ex)
except NotConnectedError as ex:
_LOGGER.error("Connection error %s", ex)
# pylint: disable=no-self-use
def get_random_filename(self, filename, extension=None):
"""Return a random filename, leaving the extension intact."""
if extension is None:
path = pathlib.Path(filename)
if path.suffix:
extension = "".join(path.suffixes)
else:
extension = ".txt"
return (
"".join(random.choice(string.ascii_letters) for i in range(10))
+ extension
)
def disconnect_on_login_fail(self, event):
"""Disconnect from the server if credentials are invalid."""
_LOGGER.warning("Login failed")
self.disconnect()
@staticmethod
def discard_ssl_invalid_cert(event):
"""Do nothing if ssl certificate is invalid."""
_LOGGER.info("Ignoring invalid SSL certificate as requested")
SendNotificationBot()
|
import logging
from homeassistant.const import (
DEVICE_CLASS_BATTERY,
DEVICE_CLASS_ILLUMINANCE,
DEVICE_CLASS_POWER,
DEVICE_CLASS_PRESSURE,
DEVICE_CLASS_TEMPERATURE,
ENERGY_KILO_WATT_HOUR,
ENERGY_WATT_HOUR,
PERCENTAGE,
POWER_WATT,
PRESSURE_BAR,
TEMP_CELSIUS,
VOLUME_CUBIC_METERS,
)
from homeassistant.core import callback
from homeassistant.helpers.entity import Entity
from .const import (
COOL_ICON,
COORDINATOR,
DEVICE_STATE,
DOMAIN,
FLAME_ICON,
IDLE_ICON,
SENSOR_MAP_DEVICE_CLASS,
SENSOR_MAP_MODEL,
SENSOR_MAP_UOM,
UNIT_LUMEN,
)
from .gateway import SmileGateway
_LOGGER = logging.getLogger(__name__)
ATTR_TEMPERATURE = [
"Temperature",
TEMP_CELSIUS,
DEVICE_CLASS_TEMPERATURE,
]
ATTR_BATTERY_LEVEL = [
"Charge",
PERCENTAGE,
DEVICE_CLASS_BATTERY,
]
ATTR_ILLUMINANCE = [
"Illuminance",
UNIT_LUMEN,
DEVICE_CLASS_ILLUMINANCE,
]
ATTR_PRESSURE = ["Pressure", PRESSURE_BAR, DEVICE_CLASS_PRESSURE]
TEMP_SENSOR_MAP = {
"setpoint": ATTR_TEMPERATURE,
"temperature": ATTR_TEMPERATURE,
"intended_boiler_temperature": ATTR_TEMPERATURE,
"temperature_difference": ATTR_TEMPERATURE,
"outdoor_temperature": ATTR_TEMPERATURE,
"water_temperature": ATTR_TEMPERATURE,
"return_temperature": ATTR_TEMPERATURE,
}
ENERGY_SENSOR_MAP = {
"electricity_consumed": ["Current Consumed Power", POWER_WATT, DEVICE_CLASS_POWER],
"electricity_produced": ["Current Produced Power", POWER_WATT, DEVICE_CLASS_POWER],
"electricity_consumed_interval": [
"Consumed Power Interval",
ENERGY_WATT_HOUR,
DEVICE_CLASS_POWER,
],
"electricity_consumed_peak_interval": [
"Consumed Power Interval",
ENERGY_WATT_HOUR,
DEVICE_CLASS_POWER,
],
"electricity_consumed_off_peak_interval": [
"Consumed Power Interval (off peak)",
ENERGY_WATT_HOUR,
DEVICE_CLASS_POWER,
],
"electricity_produced_interval": [
"Produced Power Interval",
ENERGY_WATT_HOUR,
DEVICE_CLASS_POWER,
],
"electricity_produced_peak_interval": [
"Produced Power Interval",
ENERGY_WATT_HOUR,
DEVICE_CLASS_POWER,
],
"electricity_produced_off_peak_interval": [
"Produced Power Interval (off peak)",
ENERGY_WATT_HOUR,
DEVICE_CLASS_POWER,
],
"electricity_consumed_off_peak_point": [
"Current Consumed Power (off peak)",
POWER_WATT,
DEVICE_CLASS_POWER,
],
"electricity_consumed_peak_point": [
"Current Consumed Power",
POWER_WATT,
DEVICE_CLASS_POWER,
],
"electricity_consumed_off_peak_cumulative": [
"Cumulative Consumed Power (off peak)",
ENERGY_KILO_WATT_HOUR,
DEVICE_CLASS_POWER,
],
"electricity_consumed_peak_cumulative": [
"Cumulative Consumed Power",
ENERGY_KILO_WATT_HOUR,
DEVICE_CLASS_POWER,
],
"electricity_produced_off_peak_point": [
"Current Consumed Power (off peak)",
POWER_WATT,
DEVICE_CLASS_POWER,
],
"electricity_produced_peak_point": [
"Current Consumed Power",
POWER_WATT,
DEVICE_CLASS_POWER,
],
"electricity_produced_off_peak_cumulative": [
"Cumulative Consumed Power (off peak)",
ENERGY_KILO_WATT_HOUR,
DEVICE_CLASS_POWER,
],
"electricity_produced_peak_cumulative": [
"Cumulative Consumed Power",
ENERGY_KILO_WATT_HOUR,
DEVICE_CLASS_POWER,
],
"gas_consumed_interval": ["Current Consumed Gas", VOLUME_CUBIC_METERS, None],
"gas_consumed_cumulative": ["Cumulative Consumed Gas", VOLUME_CUBIC_METERS, None],
"net_electricity_point": ["Current net Power", POWER_WATT, DEVICE_CLASS_POWER],
"net_electricity_cumulative": [
"Cumulative net Power",
ENERGY_KILO_WATT_HOUR,
DEVICE_CLASS_POWER,
],
}
MISC_SENSOR_MAP = {
"battery": ATTR_BATTERY_LEVEL,
"illuminance": ATTR_ILLUMINANCE,
"modulation_level": ["Heater Modulation Level", PERCENTAGE, None],
"valve_position": ["Valve Position", PERCENTAGE, None],
"water_pressure": ATTR_PRESSURE,
}
INDICATE_ACTIVE_LOCAL_DEVICE = [
"cooling_state",
"flame_state",
]
CUSTOM_ICONS = {
"gas_consumed_interval": "mdi:fire",
"gas_consumed_cumulative": "mdi:fire",
"modulation_level": "mdi:percent",
"valve_position": "mdi:valve",
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Smile sensors from a config entry."""
api = hass.data[DOMAIN][config_entry.entry_id]["api"]
coordinator = hass.data[DOMAIN][config_entry.entry_id][COORDINATOR]
entities = []
all_devices = api.get_all_devices()
single_thermostat = api.single_master_thermostat()
for dev_id, device_properties in all_devices.items():
data = api.get_device_data(dev_id)
for sensor, sensor_type in {
**TEMP_SENSOR_MAP,
**ENERGY_SENSOR_MAP,
**MISC_SENSOR_MAP,
}.items():
if data.get(sensor) is None:
continue
if "power" in device_properties["types"]:
model = None
if "plug" in device_properties["types"]:
model = "Metered Switch"
entities.append(
PwPowerSensor(
api,
coordinator,
device_properties["name"],
dev_id,
sensor,
sensor_type,
model,
)
)
else:
entities.append(
PwThermostatSensor(
api,
coordinator,
device_properties["name"],
dev_id,
sensor,
sensor_type,
)
)
if single_thermostat is False:
for state in INDICATE_ACTIVE_LOCAL_DEVICE:
if state not in data:
continue
entities.append(
PwAuxDeviceSensor(
api,
coordinator,
device_properties["name"],
dev_id,
DEVICE_STATE,
)
)
break
async_add_entities(entities, True)
class SmileSensor(SmileGateway):
"""Represent Smile Sensors."""
def __init__(self, api, coordinator, name, dev_id, sensor):
"""Initialise the sensor."""
super().__init__(api, coordinator, name, dev_id)
self._sensor = sensor
self._dev_class = None
self._state = None
self._unit_of_measurement = None
if dev_id == self._api.heater_id:
self._entity_name = "Auxiliary"
sensorname = sensor.replace("_", " ").title()
self._name = f"{self._entity_name} {sensorname}"
if dev_id == self._api.gateway_id:
self._entity_name = f"Smile {self._entity_name}"
self._unique_id = f"{dev_id}-{sensor}"
@property
def device_class(self):
"""Device class of this entity."""
return self._dev_class
@property
def state(self):
"""Device class of this entity."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
class PwThermostatSensor(SmileSensor, Entity):
"""Thermostat and climate sensor entities."""
def __init__(self, api, coordinator, name, dev_id, sensor, sensor_type):
"""Set up the Plugwise API."""
super().__init__(api, coordinator, name, dev_id, sensor)
self._icon = None
self._model = sensor_type[SENSOR_MAP_MODEL]
self._unit_of_measurement = sensor_type[SENSOR_MAP_UOM]
self._dev_class = sensor_type[SENSOR_MAP_DEVICE_CLASS]
@callback
def _async_process_data(self):
"""Update the entity."""
data = self._api.get_device_data(self._dev_id)
if not data:
_LOGGER.error("Received no data for device %s", self._entity_name)
self.async_write_ha_state()
return
if data.get(self._sensor) is not None:
measurement = data[self._sensor]
if self._sensor == "battery" or self._sensor == "valve_position":
measurement = measurement * 100
if self._unit_of_measurement == PERCENTAGE:
measurement = int(measurement)
self._state = measurement
self._icon = CUSTOM_ICONS.get(self._sensor, self._icon)
self.async_write_ha_state()
class PwAuxDeviceSensor(SmileSensor, Entity):
"""Auxiliary sensor entities for the heating/cooling device."""
def __init__(self, api, coordinator, name, dev_id, sensor):
"""Set up the Plugwise API."""
super().__init__(api, coordinator, name, dev_id, sensor)
self._cooling_state = False
self._heating_state = False
self._icon = None
@property
def icon(self):
"""Return the icon to use in the frontend."""
return self._icon
@callback
def _async_process_data(self):
"""Update the entity."""
data = self._api.get_device_data(self._dev_id)
if not data:
_LOGGER.error("Received no data for device %s", self._entity_name)
self.async_write_ha_state()
return
if data.get("heating_state") is not None:
self._heating_state = data["heating_state"]
if data.get("cooling_state") is not None:
self._cooling_state = data["cooling_state"]
self._state = "idle"
self._icon = IDLE_ICON
if self._heating_state:
self._state = "heating"
self._icon = FLAME_ICON
if self._cooling_state:
self._state = "cooling"
self._icon = COOL_ICON
self.async_write_ha_state()
class PwPowerSensor(SmileSensor, Entity):
"""Power sensor entities."""
def __init__(self, api, coordinator, name, dev_id, sensor, sensor_type, model):
"""Set up the Plugwise API."""
super().__init__(api, coordinator, name, dev_id, sensor)
self._icon = None
self._model = model
if model is None:
self._model = sensor_type[SENSOR_MAP_MODEL]
self._unit_of_measurement = sensor_type[SENSOR_MAP_UOM]
self._dev_class = sensor_type[SENSOR_MAP_DEVICE_CLASS]
if dev_id == self._api.gateway_id:
self._model = "P1 DSMR"
@callback
def _async_process_data(self):
"""Update the entity."""
data = self._api.get_device_data(self._dev_id)
if not data:
_LOGGER.error("Received no data for device %s", self._entity_name)
self.async_write_ha_state()
return
if data.get(self._sensor) is not None:
measurement = data[self._sensor]
if self._unit_of_measurement == ENERGY_KILO_WATT_HOUR:
measurement = int(measurement / 1000)
self._state = measurement
self._icon = CUSTOM_ICONS.get(self._sensor, self._icon)
self.async_write_ha_state()
|
__docformat__ = "restructuredtext en"
import sys
import types
import pkg_resources
__version__ = pkg_resources.get_distribution('logilab-common').version
# deprecated, but keep compatibility with pylint < 1.4.4
__pkginfo__ = types.ModuleType('__pkginfo__')
__pkginfo__.__package__ = __name__
__pkginfo__.version = __version__
sys.modules['logilab.common.__pkginfo__'] = __pkginfo__
STD_BLACKLIST = ('CVS', '.svn', '.hg', '.git', '.tox', 'debian', 'dist', 'build')
IGNORED_EXTENSIONS = ('.pyc', '.pyo', '.elc', '~', '.swp', '.orig')
# set this to False if you've mx DateTime installed but you don't want your db
# adapter to use it (should be set before you got a connection)
USE_MX_DATETIME = True
class attrdict(dict):
"""A dictionary for which keys are also accessible as attributes."""
def __getattr__(self, attr):
try:
return self[attr]
except KeyError:
raise AttributeError(attr)
class dictattr(dict):
def __init__(self, proxy):
self.__proxy = proxy
def __getitem__(self, attr):
try:
return getattr(self.__proxy, attr)
except AttributeError:
raise KeyError(attr)
class nullobject(object):
def __repr__(self):
return '<nullobject>'
def __bool__(self):
return False
__nonzero__ = __bool__
class tempattr(object):
def __init__(self, obj, attr, value):
self.obj = obj
self.attr = attr
self.value = value
def __enter__(self):
self.oldvalue = getattr(self.obj, self.attr)
setattr(self.obj, self.attr, self.value)
return self.obj
def __exit__(self, exctype, value, traceback):
setattr(self.obj, self.attr, self.oldvalue)
# flatten -----
# XXX move in a specific module and use yield instead
# do not mix flatten and translate
#
# def iterable(obj):
# try: iter(obj)
# except: return False
# return True
#
# def is_string_like(obj):
# try: obj +''
# except (TypeError, ValueError): return False
# return True
#
#def is_scalar(obj):
# return is_string_like(obj) or not iterable(obj)
#
#def flatten(seq):
# for item in seq:
# if is_scalar(item):
# yield item
# else:
# for subitem in flatten(item):
# yield subitem
def flatten(iterable, tr_func=None, results=None):
"""Flatten a list of list with any level.
If tr_func is not None, it should be a one argument function that'll be called
on each final element.
:rtype: list
>>> flatten([1, [2, 3]])
[1, 2, 3]
"""
if results is None:
results = []
for val in iterable:
if isinstance(val, (list, tuple)):
flatten(val, tr_func, results)
elif tr_func is None:
results.append(val)
else:
results.append(tr_func(val))
return results
# XXX is function below still used ?
def make_domains(lists):
"""
Given a list of lists, return a list of domain for each list to produce all
combinations of possibles values.
:rtype: list
Example:
>>> make_domains(['a', 'b'], ['c','d', 'e'])
[['a', 'b', 'a', 'b', 'a', 'b'], ['c', 'c', 'd', 'd', 'e', 'e']]
"""
from six.moves import range
domains = []
for iterable in lists:
new_domain = iterable[:]
for i in range(len(domains)):
domains[i] = domains[i]*len(iterable)
if domains:
missing = (len(domains[0]) - len(iterable)) / len(iterable)
i = 0
for j in range(len(iterable)):
value = iterable[j]
for dummy in range(missing):
new_domain.insert(i, value)
i += 1
i += 1
domains.append(new_domain)
return domains
# private stuff ################################################################
def _handle_blacklist(blacklist, dirnames, filenames):
"""remove files/directories in the black list
dirnames/filenames are usually from os.walk
"""
for norecurs in blacklist:
if norecurs in dirnames:
dirnames.remove(norecurs)
elif norecurs in filenames:
filenames.remove(norecurs)
|
from homeassistant.core import HomeAssistant
from homeassistant.helpers import intent
from . import DOMAIN, SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER
INTENT_OPEN_COVER = "HassOpenCover"
INTENT_CLOSE_COVER = "HassCloseCover"
async def async_setup_intents(hass: HomeAssistant) -> None:
"""Set up the cover intents."""
hass.helpers.intent.async_register(
intent.ServiceIntentHandler(
INTENT_OPEN_COVER, DOMAIN, SERVICE_OPEN_COVER, "Opened {}"
)
)
hass.helpers.intent.async_register(
intent.ServiceIntentHandler(
INTENT_CLOSE_COVER, DOMAIN, SERVICE_CLOSE_COVER, "Closed {}"
)
)
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock, call
from mock import patch
from diamond.collector import Collector
from aerospike import AerospikeCollector
##########################################################################
class TestAerospike39Collector(CollectorTestCase):
def bootStrap(self, custom_config={}):
config = get_collector_config('AerospikeCollector', custom_config)
self.collector = AerospikeCollector(config, None)
def test_import(self):
self.assertTrue(AerospikeCollector)
@patch.object(Collector, 'publish')
@patch.object(Collector, 'publish_gauge')
@patch.object(Collector, 'publish_counter')
def test_latency(self, publish_counter_mock,
publish_gauge_mock, publish_mock):
mockTelnet = Mock(**{
'read_until.side_effect':
[
"3.9",
self.getFixture('v3.9/latency').getvalue(),
]
})
patch_Telnet = patch('telnetlib.Telnet', Mock(return_value=mockTelnet))
patch_Telnet.start()
self.bootStrap(custom_config={
'latency': True,
'statistics': False,
'throughput': False,
'namespaces': False,
})
self.collector.collect()
patch_Telnet.stop()
mockTelnet.read_until.assert_any_call('\n', 1)
metrics = {
'latency.foo.read.1ms': 1.00,
'latency.foo.read.8ms': 3.00,
'latency.foo.read.64ms': 5.00,
'latency.foo.read.ops': 2206.8,
'latency.bar.read.1ms': 2.00,
'latency.bar.read.8ms': 4.00,
'latency.bar.read.64ms': 6.00,
'latency.bar.read.ops': 4206.8,
'latency.foo.write.1ms': 1.28,
'latency.foo.write.8ms': 3.01,
'latency.foo.write.64ms': 5.01,
'latency.foo.write.ops': 1480.4,
'latency.bar.write.1ms': 2.28,
'latency.bar.write.8ms': 4.01,
'latency.bar.write.64ms': 6.01,
'latency.bar.write.ops': 2480.4,
}
self.assertPublishedMany(
[publish_mock,
publish_gauge_mock,
publish_counter_mock,
],
metrics,
)
@patch.object(Collector, 'publish')
@patch.object(Collector, 'publish_gauge')
@patch.object(Collector, 'publish_counter')
def test_statistics(self, publish_counter_mock,
publish_gauge_mock, publish_mock):
mockTelnet = Mock(**{
'read_until.side_effect':
[
"3.9",
self.getFixture('v3.9/statistics').getvalue(),
]
})
patch_Telnet = patch('telnetlib.Telnet', Mock(return_value=mockTelnet))
patch_Telnet.start()
self.bootStrap(custom_config={
'latency': False,
'statistics': True,
'throughput': False,
'namespaces': False,
})
self.collector.collect()
patch_Telnet.stop()
mockTelnet.read_until.assert_any_call('\n', 1)
metrics = {
'statistics.objects': 6816672,
'statistics.cluster_size': 3,
'statistics.system_free_mem_pct': 87,
'statistics.client_connections': 51,
'statistics.scans_active': 0,
}
self.assertPublishedMany(
[publish_mock,
publish_gauge_mock,
publish_counter_mock,
],
metrics,
)
@patch.object(Collector, 'publish')
@patch.object(Collector, 'publish_gauge')
@patch.object(Collector, 'publish_counter')
def test_throughput(self, publish_counter_mock,
publish_gauge_mock, publish_mock):
mockTelnet = Mock(**{
'read_until.side_effect':
[
"3.9",
self.getFixture('v3.9/throughput').getvalue(),
]
})
patch_Telnet = patch('telnetlib.Telnet', Mock(return_value=mockTelnet))
patch_Telnet.start()
self.bootStrap(custom_config={
'latency': False,
'statistics': False,
'throughput': True,
'namespaces': False,
})
self.collector.collect()
patch_Telnet.stop()
mockTelnet.read_until.assert_any_call('\n', 1)
metrics = {
'throughput.foo.read': 2089.3,
'throughput.foo.write': 1478.6,
'throughput.bar.read': 3089.3,
'throughput.bar.write': 3478.6,
}
self.assertPublishedMany(
[publish_mock,
publish_gauge_mock,
publish_counter_mock,
],
metrics,
)
@patch.object(Collector, 'publish')
@patch.object(Collector, 'publish_gauge')
@patch.object(Collector, 'publish_counter')
def test_namespaces(self, publish_counter_mock,
publish_gauge_mock, publish_mock):
mockTelnet = Mock(**{
'read_until.side_effect':
[
"3.9",
self.getFixture('v3.9/namespaces').getvalue(),
self.getFixture('v3.9/namespace_foo').getvalue(),
self.getFixture('v3.9/namespace_bar').getvalue(),
],
})
patch_Telnet = patch('telnetlib.Telnet', Mock(return_value=mockTelnet))
patch_Telnet.start()
self.bootStrap(custom_config={
'latency': False,
'statistics': False,
'throughput': False,
'namespaces': True,
})
self.collector.collect()
patch_Telnet.stop()
mockTelnet.read_until.assert_any_call('\n', 1)
mockTelnet.write.assert_has_calls(
[
call('version\n'),
call('namespaces\n'),
call('namespace/foo\n'),
call('namespace/bar\n'),
],
)
metrics = {
'namespace.foo.objects': 6831009,
'namespace.foo.memory_free_pct': 86,
'namespace.foo.memory-size': 21474836480,
'namespace.foo.client_read_error': 0,
'namespace.foo.device_free_pct': 88,
'namespace.bar.objects': 5831009,
'namespace.bar.memory_free_pct': 76,
'namespace.bar.memory-size': 31474836480,
'namespace.bar.client_read_error': 0,
'namespace.bar.device_free_pct': 88,
}
self.assertPublishedMany(
[publish_mock,
publish_gauge_mock,
publish_counter_mock,
],
metrics,
)
def test_namespace_whitelist(self):
mockTelnet = Mock(**{
'read_until.side_effect':
[
"3.9",
self.getFixture('v3.9/namespaces').getvalue(),
self.getFixture('v3.9/namespace_bar').getvalue(),
],
})
patch_Telnet = patch('telnetlib.Telnet', Mock(return_value=mockTelnet))
patch_Telnet.start()
self.bootStrap(custom_config={
'latency': False,
'statistics': False,
'throughput': False,
'namespaces': True,
'namespaces_whitelist': ['bar'],
})
self.collector.collect()
patch_Telnet.stop()
mockTelnet.read_until.assert_any_call('\n', 1)
mockTelnet.write.assert_has_calls(
[
call('version\n'),
call('namespaces\n'),
call('namespace/bar\n'),
],
)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from perfkitbenchmarker import os_types
from perfkitbenchmarker import vm_util
import six
BACKGROUND_WORKLOADS = []
BACKGROUND_IPERF_PORT = 20001
BACKGROUND_IPERF_SECONDS = 2147483647
class AutoRegisterBackgroundWorkloadMeta(type):
"""Metaclass which allows BackgroundWorkloads to be auto-registered."""
def __init__(cls, name, bases, dct):
super(AutoRegisterBackgroundWorkloadMeta, cls).__init__(name, bases, dct)
BACKGROUND_WORKLOADS.append(cls)
class BaseBackgroundWorkload(
six.with_metaclass(AutoRegisterBackgroundWorkloadMeta, object)):
"""Baseclass for background workloads."""
EXCLUDED_OS_TYPES = []
@staticmethod
def IsEnabled(vm):
"""Returns true if this background workload is enabled on this VM."""
del vm # Unused
return False
@staticmethod
def Prepare(vm):
"""Prepares the background workload on this VM."""
pass
@staticmethod
def Start(vm):
"""Starts the background workload on this VM."""
pass
@staticmethod
def Stop(vm):
"""Stops the background workload on this VM."""
pass
class CpuWorkload(BaseBackgroundWorkload):
"""Workload that runs sysbench in the background."""
EXCLUDED_OS_TYPES = os_types.WINDOWS_OS_TYPES
@staticmethod
def IsEnabled(vm):
"""Returns true if this background workload is enabled on this VM."""
return bool(vm.background_cpu_threads)
@staticmethod
def Prepare(vm):
"""Prepares the background workload on this VM."""
vm.Install('sysbench')
@staticmethod
def Start(vm):
"""Starts the background workload on this VM."""
vm.RemoteCommand(
'nohup sysbench --num-threads=%s --test=cpu --cpu-max-prime=10000000 '
'run 1> /dev/null 2> /dev/null &' % vm.background_cpu_threads)
@staticmethod
def Stop(vm):
"""Stops the background workload on this VM."""
vm.RemoteCommand('pkill -9 sysbench')
class NetworkWorkload(BaseBackgroundWorkload):
"""Workload that runs iperf in the background."""
EXCLUDED_OS_TYPES = os_types.WINDOWS_OS_TYPES
@staticmethod
def IsEnabled(vm):
"""Returns true if this background workload is enabled on this VM."""
return bool(vm.background_network_mbits_per_sec)
@staticmethod
def Prepare(vm):
"""Prepares the background workload on this VM."""
vm.Install('iperf')
@staticmethod
def Start(vm):
"""Starts the background workload on this VM."""
vm.AllowPort(BACKGROUND_IPERF_PORT)
vm.RemoteCommand('nohup iperf --server --port %s &> /dev/null &' %
BACKGROUND_IPERF_PORT)
stdout, _ = vm.RemoteCommand('pgrep iperf -n')
vm.server_pid = stdout.strip()
if vm.background_network_ip_type == vm_util.IpAddressSubset.EXTERNAL:
ip_address = vm.ip_address
else:
ip_address = vm.internal_ip
iperf_cmd = ('nohup iperf --client %s --port %s --time %s -u -b %sM '
'&> /dev/null &' % (ip_address, BACKGROUND_IPERF_PORT,
BACKGROUND_IPERF_SECONDS,
vm.background_network_mbits_per_sec))
vm.RemoteCommand(iperf_cmd)
stdout, _ = vm.RemoteCommand('pgrep iperf -n')
vm.client_pid = stdout.strip()
@staticmethod
def Stop(vm):
"""Stops the background workload on this VM."""
vm.RemoteCommand('kill -9 ' + vm.client_pid)
vm.RemoteCommand('kill -9 ' + vm.server_pid)
|
import asyncio
from datetime import timedelta
from functools import wraps
import logging
from aiopylgtv import PyLGTVCmdException, PyLGTVPairException, WebOsClient
from websockets.exceptions import ConnectionClosed
from homeassistant import util
from homeassistant.components.media_player import DEVICE_CLASS_TV, MediaPlayerEntity
from homeassistant.components.media_player.const import (
MEDIA_TYPE_CHANNEL,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SELECT_SOURCE,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
SUPPORT_VOLUME_STEP,
)
from homeassistant.components.webostv.const import (
ATTR_PAYLOAD,
ATTR_SOUND_OUTPUT,
CONF_ON_ACTION,
CONF_SOURCES,
DOMAIN,
LIVE_TV_APP_ID,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_CUSTOMIZE,
CONF_HOST,
CONF_NAME,
ENTITY_MATCH_ALL,
ENTITY_MATCH_NONE,
STATE_OFF,
STATE_ON,
)
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.script import Script
_LOGGER = logging.getLogger(__name__)
SUPPORT_WEBOSTV = (
SUPPORT_TURN_OFF
| SUPPORT_NEXT_TRACK
| SUPPORT_PAUSE
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_SELECT_SOURCE
| SUPPORT_PLAY_MEDIA
| SUPPORT_PLAY
)
SUPPORT_WEBOSTV_VOLUME = SUPPORT_VOLUME_MUTE | SUPPORT_VOLUME_STEP
MIN_TIME_BETWEEN_SCANS = timedelta(seconds=10)
MIN_TIME_BETWEEN_FORCED_SCANS = timedelta(seconds=1)
SCAN_INTERVAL = timedelta(seconds=10)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the LG webOS Smart TV platform."""
if discovery_info is None:
return
host = discovery_info[CONF_HOST]
name = discovery_info[CONF_NAME]
customize = discovery_info[CONF_CUSTOMIZE]
turn_on_action = discovery_info.get(CONF_ON_ACTION)
client = hass.data[DOMAIN][host]["client"]
on_script = Script(hass, turn_on_action, name, DOMAIN) if turn_on_action else None
entity = LgWebOSMediaPlayerEntity(client, name, customize, on_script)
async_add_entities([entity], update_before_add=False)
def cmd(func):
"""Catch command exceptions."""
@wraps(func)
async def wrapper(obj, *args, **kwargs):
"""Wrap all command methods."""
try:
await func(obj, *args, **kwargs)
except (
asyncio.TimeoutError,
asyncio.CancelledError,
PyLGTVCmdException,
) as exc:
# If TV is off, we expect calls to fail.
if obj.state == STATE_OFF:
level = logging.INFO
else:
level = logging.ERROR
_LOGGER.log(
level,
"Error calling %s on entity %s: %r",
func.__name__,
obj.entity_id,
exc,
)
return wrapper
class LgWebOSMediaPlayerEntity(MediaPlayerEntity):
"""Representation of a LG webOS Smart TV."""
def __init__(self, client: WebOsClient, name: str, customize, on_script=None):
"""Initialize the webos device."""
self._client = client
self._name = name
self._unique_id = client.client_key
self._customize = customize
self._on_script = on_script
# Assume that the TV is not paused
self._paused = False
self._current_source = None
self._source_list = {}
async def async_added_to_hass(self):
"""Connect and subscribe to dispatcher signals and state updates."""
async_dispatcher_connect(self.hass, DOMAIN, self.async_signal_handler)
await self._client.register_state_update_callback(
self.async_handle_state_update
)
async def async_will_remove_from_hass(self):
"""Call disconnect on removal."""
self._client.unregister_state_update_callback(self.async_handle_state_update)
async def async_signal_handler(self, data):
"""Handle domain-specific signal by calling appropriate method."""
entity_ids = data[ATTR_ENTITY_ID]
if entity_ids == ENTITY_MATCH_NONE:
return
if entity_ids == ENTITY_MATCH_ALL or self.entity_id in entity_ids:
params = {
key: value
for key, value in data.items()
if key not in ["entity_id", "method"]
}
await getattr(self, data["method"])(**params)
async def async_handle_state_update(self):
"""Update state from WebOsClient."""
self.update_sources()
self.async_write_ha_state()
def update_sources(self):
"""Update list of sources from current source, apps, inputs and configured list."""
source_list = self._source_list
self._source_list = {}
conf_sources = self._customize[CONF_SOURCES]
found_live_tv = False
for app in self._client.apps.values():
if app["id"] == LIVE_TV_APP_ID:
found_live_tv = True
if app["id"] == self._client.current_appId:
self._current_source = app["title"]
self._source_list[app["title"]] = app
elif (
not conf_sources
or app["id"] in conf_sources
or any(word in app["title"] for word in conf_sources)
or any(word in app["id"] for word in conf_sources)
):
self._source_list[app["title"]] = app
for source in self._client.inputs.values():
if source["appId"] == LIVE_TV_APP_ID:
found_live_tv = True
if source["appId"] == self._client.current_appId:
self._current_source = source["label"]
self._source_list[source["label"]] = source
elif (
not conf_sources
or source["label"] in conf_sources
or any(source["label"].find(word) != -1 for word in conf_sources)
):
self._source_list[source["label"]] = source
# special handling of live tv since this might not appear in the app or input lists in some cases
if not found_live_tv:
app = {"id": LIVE_TV_APP_ID, "title": "Live TV"}
if LIVE_TV_APP_ID == self._client.current_appId:
self._current_source = app["title"]
self._source_list["Live TV"] = app
elif (
not conf_sources
or app["id"] in conf_sources
or any(word in app["title"] for word in conf_sources)
or any(word in app["id"] for word in conf_sources)
):
self._source_list["Live TV"] = app
if not self._source_list and source_list:
self._source_list = source_list
@util.Throttle(MIN_TIME_BETWEEN_SCANS, MIN_TIME_BETWEEN_FORCED_SCANS)
async def async_update(self):
"""Connect."""
if not self._client.is_connected():
try:
await self._client.connect()
except (
OSError,
ConnectionClosed,
ConnectionRefusedError,
asyncio.TimeoutError,
asyncio.CancelledError,
PyLGTVPairException,
PyLGTVCmdException,
):
pass
@property
def unique_id(self):
"""Return the unique id of the device."""
return self._unique_id
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def device_class(self):
"""Return the device class of the device."""
return DEVICE_CLASS_TV
@property
def state(self):
"""Return the state of the device."""
if self._client.is_on:
return STATE_ON
return STATE_OFF
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._client.muted
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
if self._client.volume is not None:
return self._client.volume / 100.0
return None
@property
def source(self):
"""Return the current input source."""
return self._current_source
@property
def source_list(self):
"""List of available input sources."""
return sorted(list(self._source_list))
@property
def media_content_type(self):
"""Content type of current playing media."""
if self._client.current_appId == LIVE_TV_APP_ID:
return MEDIA_TYPE_CHANNEL
return None
@property
def media_title(self):
"""Title of current playing media."""
if (self._client.current_appId == LIVE_TV_APP_ID) and (
self._client.current_channel is not None
):
return self._client.current_channel.get("channelName")
return None
@property
def media_image_url(self):
"""Image url of current playing media."""
if self._client.current_appId in self._client.apps:
icon = self._client.apps[self._client.current_appId]["largeIcon"]
if not icon.startswith("http"):
icon = self._client.apps[self._client.current_appId]["icon"]
return icon
return None
@property
def supported_features(self):
"""Flag media player features that are supported."""
supported = SUPPORT_WEBOSTV
if (self._client.sound_output == "external_arc") or (
self._client.sound_output == "external_speaker"
):
supported = supported | SUPPORT_WEBOSTV_VOLUME
elif self._client.sound_output != "lineout":
supported = supported | SUPPORT_WEBOSTV_VOLUME | SUPPORT_VOLUME_SET
if self._on_script:
supported = supported | SUPPORT_TURN_ON
return supported
@property
def device_state_attributes(self):
"""Return device specific state attributes."""
if self._client.sound_output is None and self.state == STATE_OFF:
return {}
return {ATTR_SOUND_OUTPUT: self._client.sound_output}
@cmd
async def async_turn_off(self):
"""Turn off media player."""
await self._client.power_off()
async def async_turn_on(self):
"""Turn on the media player."""
if self._on_script:
await self._on_script.async_run(context=self._context)
@cmd
async def async_volume_up(self):
"""Volume up the media player."""
await self._client.volume_up()
@cmd
async def async_volume_down(self):
"""Volume down media player."""
await self._client.volume_down()
@cmd
async def async_set_volume_level(self, volume):
"""Set volume level, range 0..1."""
tv_volume = int(round(volume * 100))
await self._client.set_volume(tv_volume)
@cmd
async def async_mute_volume(self, mute):
"""Send mute command."""
await self._client.set_mute(mute)
@cmd
async def async_select_sound_output(self, sound_output):
"""Select the sound output."""
await self._client.change_sound_output(sound_output)
@cmd
async def async_media_play_pause(self):
"""Simulate play pause media player."""
if self._paused:
await self.async_media_play()
else:
await self.async_media_pause()
@cmd
async def async_select_source(self, source):
"""Select input source."""
source_dict = self._source_list.get(source)
if source_dict is None:
_LOGGER.warning("Source %s not found for %s", source, self.name)
return
if source_dict.get("title"):
await self._client.launch_app(source_dict["id"])
elif source_dict.get("label"):
await self._client.set_input(source_dict["id"])
@cmd
async def async_play_media(self, media_type, media_id, **kwargs):
"""Play a piece of media."""
_LOGGER.debug("Call play media type <%s>, Id <%s>", media_type, media_id)
if media_type == MEDIA_TYPE_CHANNEL:
_LOGGER.debug("Searching channel...")
partial_match_channel_id = None
perfect_match_channel_id = None
for channel in self._client.channels:
if media_id == channel["channelNumber"]:
perfect_match_channel_id = channel["channelId"]
continue
if media_id.lower() == channel["channelName"].lower():
perfect_match_channel_id = channel["channelId"]
continue
if media_id.lower() in channel["channelName"].lower():
partial_match_channel_id = channel["channelId"]
if perfect_match_channel_id is not None:
_LOGGER.info(
"Switching to channel <%s> with perfect match",
perfect_match_channel_id,
)
await self._client.set_channel(perfect_match_channel_id)
elif partial_match_channel_id is not None:
_LOGGER.info(
"Switching to channel <%s> with partial match",
partial_match_channel_id,
)
await self._client.set_channel(partial_match_channel_id)
@cmd
async def async_media_play(self):
"""Send play command."""
self._paused = False
await self._client.play()
@cmd
async def async_media_pause(self):
"""Send media pause command to media player."""
self._paused = True
await self._client.pause()
@cmd
async def async_media_stop(self):
"""Send stop command to media player."""
await self._client.stop()
@cmd
async def async_media_next_track(self):
"""Send next track command."""
current_input = self._client.get_input()
if current_input == LIVE_TV_APP_ID:
await self._client.channel_up()
else:
await self._client.fast_forward()
@cmd
async def async_media_previous_track(self):
"""Send the previous track command."""
current_input = self._client.get_input()
if current_input == LIVE_TV_APP_ID:
await self._client.channel_down()
else:
await self._client.rewind()
@cmd
async def async_button(self, button):
"""Send a button press."""
await self._client.button(button)
@cmd
async def async_command(self, command, **kwargs):
"""Send a command."""
await self._client.request(command, payload=kwargs.get(ATTR_PAYLOAD))
|
import re
import unicodedata
from cryptography import x509
from cryptography.hazmat.primitives.serialization import Encoding
from flask import current_app
from lemur.common.utils import is_selfsigned
from lemur.extensions import sentry
from lemur.constants import SAN_NAMING_TEMPLATE, DEFAULT_NAMING_TEMPLATE
def text_to_slug(value, joiner="-"):
"""
Normalize a string to a "slug" value, stripping character accents and removing non-alphanum characters.
A series of non-alphanumeric characters is replaced with the joiner character.
"""
# Strip all character accents: decompose Unicode characters and then drop combining chars.
value = "".join(
c for c in unicodedata.normalize("NFKD", value) if not unicodedata.combining(c)
)
# Replace all remaining non-alphanumeric characters with joiner string. Multiple characters get collapsed into a
# single joiner. Except, keep 'xn--' used in IDNA domain names as is.
value = re.sub(r"[^A-Za-z0-9.]+(?<!xn--)", joiner, value)
# '-' in the beginning or end of string looks ugly.
return value.strip(joiner)
def certificate_name(common_name, issuer, not_before, not_after, san):
"""
Create a name for our certificate. A naming standard
is based on a series of templates. The name includes
useful information such as Common Name, Validation dates,
and Issuer.
:param san:
:param common_name:
:param not_after:
:param issuer:
:param not_before:
:rtype: str
:return:
"""
if san:
t = SAN_NAMING_TEMPLATE
else:
t = DEFAULT_NAMING_TEMPLATE
temp = t.format(
subject=common_name,
issuer=issuer.replace(" ", ""),
not_before=not_before.strftime("%Y%m%d"),
not_after=not_after.strftime("%Y%m%d"),
)
temp = temp.replace("*", "WILDCARD")
return text_to_slug(temp)
def signing_algorithm(cert):
return cert.signature_hash_algorithm.name
def common_name(cert):
"""
Attempts to get a sane common name from a given certificate.
:param cert:
:return: Common name or None
"""
try:
subject_oid = cert.subject.get_attributes_for_oid(x509.OID_COMMON_NAME)
if len(subject_oid) > 0:
return subject_oid[0].value.strip()
return None
except Exception as e:
sentry.captureException()
current_app.logger.error(
{
"message": "Unable to get common name",
"error": e,
"public_key": cert.public_bytes(Encoding.PEM).decode("utf-8")
},
exc_info=True
)
def organization(cert):
"""
Attempt to get the organization name from a given certificate.
:param cert:
:return:
"""
try:
o = cert.subject.get_attributes_for_oid(x509.OID_ORGANIZATION_NAME)
if not o:
return None
return o[0].value.strip()
except Exception as e:
sentry.captureException()
current_app.logger.error("Unable to get organization! {0}".format(e))
def organizational_unit(cert):
"""
Attempt to get the organization unit from a given certificate.
:param cert:
:return:
"""
try:
ou = cert.subject.get_attributes_for_oid(x509.OID_ORGANIZATIONAL_UNIT_NAME)
if not ou:
return None
return ou[0].value.strip()
except Exception as e:
sentry.captureException()
current_app.logger.error("Unable to get organizational unit! {0}".format(e))
def country(cert):
"""
Attempt to get the country from a given certificate.
:param cert:
:return:
"""
try:
c = cert.subject.get_attributes_for_oid(x509.OID_COUNTRY_NAME)
if not c:
return None
return c[0].value.strip()
except Exception as e:
sentry.captureException()
current_app.logger.error("Unable to get country! {0}".format(e))
def state(cert):
"""
Attempt to get the from a given certificate.
:param cert:
:return:
"""
try:
s = cert.subject.get_attributes_for_oid(x509.OID_STATE_OR_PROVINCE_NAME)
if not s:
return None
return s[0].value.strip()
except Exception as e:
sentry.captureException()
current_app.logger.error("Unable to get state! {0}".format(e))
def location(cert):
"""
Attempt to get the location name from a given certificate.
:param cert:
:return:
"""
try:
loc = cert.subject.get_attributes_for_oid(x509.OID_LOCALITY_NAME)
if not loc:
return None
return loc[0].value.strip()
except Exception as e:
sentry.captureException()
current_app.logger.error("Unable to get location! {0}".format(e))
def domains(cert):
"""
Attempts to get an domains listed in a certificate.
If 'subjectAltName' extension is not available we simply
return the common name.
:param cert:
:return: List of domains
"""
domains = []
try:
ext = cert.extensions.get_extension_for_oid(x509.OID_SUBJECT_ALTERNATIVE_NAME)
entries = ext.value.get_values_for_type(x509.DNSName)
for entry in entries:
domains.append(entry)
except x509.ExtensionNotFound:
if current_app.config.get("LOG_SSL_SUBJ_ALT_NAME_ERRORS", True):
sentry.captureException()
except Exception as e:
sentry.captureException()
return domains
def serial(cert):
"""
Fetch the serial number from the certificate.
:param cert:
:return: serial number
"""
return cert.serial_number
def san(cert):
"""
Determines if a given certificate is a SAN certificate.
SAN certificates are simply certificates that cover multiple domains.
:param cert:
:return: Bool
"""
if len(domains(cert)) > 1:
return True
def is_wildcard(cert):
"""
Determines if certificate is a wildcard certificate.
:param cert:
:return: Bool
"""
d = domains(cert)
if len(d) == 1 and d[0][0:1] == "*":
return True
if cert.subject.get_attributes_for_oid(x509.OID_COMMON_NAME)[0].value[0:1] == "*":
return True
def bitstrength(cert):
"""
Calculates a certificates public key bit length.
:param cert:
:return: Integer
"""
try:
return cert.public_key().key_size
except AttributeError:
sentry.captureException()
current_app.logger.debug("Unable to get bitstrength.")
def issuer(cert):
"""
Gets a sane issuer slug from a given certificate, stripping non-alphanumeric characters.
For self-signed certificates, the special value '<selfsigned>' is returned.
If issuer cannot be determined, '<unknown>' is returned.
:param cert: Parsed certificate object
:return: Issuer slug
"""
# If certificate is self-signed, we return a special value -- there really is no distinct "issuer" for it
if is_selfsigned(cert):
return "<selfsigned>"
# Try Common Name or fall back to Organization name
attrs = cert.issuer.get_attributes_for_oid(
x509.OID_COMMON_NAME
) or cert.issuer.get_attributes_for_oid(x509.OID_ORGANIZATION_NAME)
if not attrs:
current_app.logger.error(
"Unable to get issuer! Cert serial {:x}".format(cert.serial_number)
)
return "<unknown>"
return text_to_slug(attrs[0].value, "")
def not_before(cert):
"""
Gets the naive datetime of the certificates 'not_before' field.
This field denotes the first date in time which the given certificate
is valid.
:param cert:
:return: Datetime
"""
return cert.not_valid_before
def not_after(cert):
"""
Gets the naive datetime of the certificates 'not_after' field.
This field denotes the last date in time which the given certificate
is valid.
:return: Datetime
"""
return cert.not_valid_after
|
from typing import MutableSequence
from PyQt5.QtCore import pyqtSlot, pyqtSignal, QTimer, Qt
from PyQt5.QtWidgets import QWidget, QVBoxLayout, QLabel, QSizePolicy
from qutebrowser.config import config, stylesheet
from qutebrowser.utils import usertypes
class Message(QLabel):
"""A single error/warning/info message."""
def __init__(self, level, text, replace, parent=None):
super().__init__(text, parent)
self.replace = replace
self.setAttribute(Qt.WA_StyledBackground, True)
self.setWordWrap(True)
qss = """
padding-top: 2px;
padding-bottom: 2px;
"""
if level == usertypes.MessageLevel.error:
qss += """
background-color: {{ conf.colors.messages.error.bg }};
color: {{ conf.colors.messages.error.fg }};
font: {{ conf.fonts.messages.error }};
border-bottom: 1px solid {{ conf.colors.messages.error.border }};
"""
elif level == usertypes.MessageLevel.warning:
qss += """
background-color: {{ conf.colors.messages.warning.bg }};
color: {{ conf.colors.messages.warning.fg }};
font: {{ conf.fonts.messages.warning }};
border-bottom:
1px solid {{ conf.colors.messages.warning.border }};
"""
elif level == usertypes.MessageLevel.info:
qss += """
background-color: {{ conf.colors.messages.info.bg }};
color: {{ conf.colors.messages.info.fg }};
font: {{ conf.fonts.messages.info }};
border-bottom: 1px solid {{ conf.colors.messages.info.border }}
"""
else: # pragma: no cover
raise ValueError("Invalid level {!r}".format(level))
stylesheet.set_register(self, qss, update=False)
class MessageView(QWidget):
"""Widget which stacks error/warning/info messages."""
update_geometry = pyqtSignal()
def __init__(self, parent=None):
super().__init__(parent)
self._messages: MutableSequence[Message] = []
self._vbox = QVBoxLayout(self)
self._vbox.setContentsMargins(0, 0, 0, 0)
self._vbox.setSpacing(0)
self.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Fixed)
self._clear_timer = QTimer()
self._clear_timer.timeout.connect(self.clear_messages)
config.instance.changed.connect(self._set_clear_timer_interval)
self._last_text = None
@config.change_filter('messages.timeout')
def _set_clear_timer_interval(self):
"""Configure self._clear_timer according to the config."""
interval = config.val.messages.timeout
if interval > 0:
interval *= min(5, len(self._messages))
self._clear_timer.setInterval(interval)
def _remove_message(self, widget):
"""Fully remove and destroy widget from this object."""
self._vbox.removeWidget(widget)
widget.hide()
widget.deleteLater()
@pyqtSlot()
def clear_messages(self):
"""Hide and delete all messages."""
for widget in self._messages:
self._remove_message(widget)
self._messages = []
self._last_text = None
self.hide()
self._clear_timer.stop()
@pyqtSlot(usertypes.MessageLevel, str, bool)
def show_message(self, level, text, replace=False):
"""Show the given message with the given MessageLevel."""
if text == self._last_text:
return
if replace and self._messages and self._messages[-1].replace:
self._remove_message(self._messages.pop())
widget = Message(level, text, replace=replace, parent=self)
self._vbox.addWidget(widget)
widget.show()
self._messages.append(widget)
self._last_text = text
self.show()
self.update_geometry.emit()
if config.val.messages.timeout != 0:
self._set_clear_timer_interval()
self._clear_timer.start()
def mousePressEvent(self, e):
"""Clear messages when they are clicked on."""
if e.button() in [Qt.LeftButton, Qt.MiddleButton, Qt.RightButton]:
self.clear_messages()
|
import asyncio
from contextvars import ContextVar
from datetime import datetime, timedelta
from logging import Logger
from types import ModuleType
from typing import TYPE_CHECKING, Callable, Coroutine, Dict, Iterable, List, Optional
from homeassistant import config_entries
from homeassistant.const import ATTR_RESTORED, DEVICE_DEFAULT_NAME
from homeassistant.core import (
CALLBACK_TYPE,
ServiceCall,
callback,
split_entity_id,
valid_entity_id,
)
from homeassistant.exceptions import HomeAssistantError, PlatformNotReady
from homeassistant.helpers import config_validation as cv, service
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.util.async_ import run_callback_threadsafe
from .entity_registry import DISABLED_INTEGRATION
from .event import async_call_later, async_track_time_interval
if TYPE_CHECKING:
from .entity import Entity
# mypy: allow-untyped-defs
SLOW_SETUP_WARNING = 10
SLOW_SETUP_MAX_WAIT = 60
SLOW_ADD_ENTITY_MAX_WAIT = 15 # Per Entity
SLOW_ADD_MIN_TIMEOUT = 500
PLATFORM_NOT_READY_RETRIES = 10
DATA_ENTITY_PLATFORM = "entity_platform"
PLATFORM_NOT_READY_BASE_WAIT_TIME = 30 # seconds
class EntityPlatform:
"""Manage the entities for a single platform."""
def __init__(
self,
*,
hass: HomeAssistantType,
logger: Logger,
domain: str,
platform_name: str,
platform: Optional[ModuleType],
scan_interval: timedelta,
entity_namespace: Optional[str],
):
"""Initialize the entity platform."""
self.hass = hass
self.logger = logger
self.domain = domain
self.platform_name = platform_name
self.platform = platform
self.scan_interval = scan_interval
self.entity_namespace = entity_namespace
self.config_entry: Optional[config_entries.ConfigEntry] = None
self.entities: Dict[str, Entity] = {} # pylint: disable=used-before-assignment
self._tasks: List[asyncio.Future] = []
# Method to cancel the state change listener
self._async_unsub_polling: Optional[CALLBACK_TYPE] = None
# Method to cancel the retry of setup
self._async_cancel_retry_setup: Optional[CALLBACK_TYPE] = None
self._process_updates: Optional[asyncio.Lock] = None
self.parallel_updates: Optional[asyncio.Semaphore] = None
# Platform is None for the EntityComponent "catch-all" EntityPlatform
# which powers entity_component.add_entities
self.parallel_updates_created = platform is None
hass.data.setdefault(DATA_ENTITY_PLATFORM, {}).setdefault(
self.platform_name, []
).append(self)
def __repr__(self):
"""Represent an EntityPlatform."""
return f"<EntityPlatform domain={self.domain} platform_name={self.platform_name} config_entry={self.config_entry}>"
@callback
def _get_parallel_updates_semaphore(
self, entity_has_async_update: bool
) -> Optional[asyncio.Semaphore]:
"""Get or create a semaphore for parallel updates.
Semaphore will be created on demand because we base it off if update method is async or not.
If parallel updates is set to 0, we skip the semaphore.
If parallel updates is set to a number, we initialize the semaphore to that number.
The default value for parallel requests is decided based on the first entity that is added to Home Assistant.
It's 0 if the entity defines the async_update method, else it's 1.
"""
if self.parallel_updates_created:
return self.parallel_updates
self.parallel_updates_created = True
parallel_updates = getattr(self.platform, "PARALLEL_UPDATES", None)
if parallel_updates is None and not entity_has_async_update:
parallel_updates = 1
if parallel_updates == 0:
parallel_updates = None
if parallel_updates is not None:
self.parallel_updates = asyncio.Semaphore(parallel_updates)
return self.parallel_updates
async def async_setup(self, platform_config, discovery_info=None):
"""Set up the platform from a config file."""
platform = self.platform
hass = self.hass
if not hasattr(platform, "async_setup_platform") and not hasattr(
platform, "setup_platform"
):
self.logger.error(
"The %s platform for the %s integration does not support platform setup. Please remove it from your config.",
self.platform_name,
self.domain,
)
return
@callback
def async_create_setup_task() -> Coroutine:
"""Get task to set up platform."""
if getattr(platform, "async_setup_platform", None):
return platform.async_setup_platform( # type: ignore
hass,
platform_config,
self._async_schedule_add_entities,
discovery_info,
)
# This should not be replaced with hass.async_add_job because
# we don't want to track this task in case it blocks startup.
return hass.loop.run_in_executor( # type: ignore[return-value]
None,
platform.setup_platform, # type: ignore
hass,
platform_config,
self._schedule_add_entities,
discovery_info,
)
await self._async_setup_platform(async_create_setup_task)
async def async_setup_entry(self, config_entry: config_entries.ConfigEntry) -> bool:
"""Set up the platform from a config entry."""
# Store it so that we can save config entry ID in entity registry
self.config_entry = config_entry
platform = self.platform
@callback
def async_create_setup_task():
"""Get task to set up platform."""
return platform.async_setup_entry( # type: ignore
self.hass, config_entry, self._async_schedule_add_entities
)
return await self._async_setup_platform(async_create_setup_task)
async def _async_setup_platform(
self, async_create_setup_task: Callable[[], Coroutine], tries: int = 0
) -> bool:
"""Set up a platform via config file or config entry.
async_create_setup_task creates a coroutine that sets up platform.
"""
current_platform.set(self)
logger = self.logger
hass = self.hass
full_name = f"{self.domain}.{self.platform_name}"
logger.info("Setting up %s", full_name)
warn_task = hass.loop.call_later(
SLOW_SETUP_WARNING,
logger.warning,
"Setup of %s platform %s is taking over %s seconds.",
self.domain,
self.platform_name,
SLOW_SETUP_WARNING,
)
try:
task = async_create_setup_task()
async with hass.timeout.async_timeout(SLOW_SETUP_MAX_WAIT, self.domain):
await asyncio.shield(task)
# Block till all entities are done
if self._tasks:
pending = [task for task in self._tasks if not task.done()]
self._tasks.clear()
if pending:
await asyncio.gather(*pending)
hass.config.components.add(full_name)
return True
except PlatformNotReady:
tries += 1
wait_time = min(tries, 6) * PLATFORM_NOT_READY_BASE_WAIT_TIME
logger.warning(
"Platform %s not ready yet. Retrying in %d seconds.",
self.platform_name,
wait_time,
)
async def setup_again(now):
"""Run setup again."""
self._async_cancel_retry_setup = None
await self._async_setup_platform(async_create_setup_task, tries)
self._async_cancel_retry_setup = async_call_later(
hass, wait_time, setup_again
)
return False
except asyncio.TimeoutError:
logger.error(
"Setup of platform %s is taking longer than %s seconds."
" Startup will proceed without waiting any longer.",
self.platform_name,
SLOW_SETUP_MAX_WAIT,
)
return False
except Exception: # pylint: disable=broad-except
logger.exception(
"Error while setting up %s platform for %s",
self.platform_name,
self.domain,
)
return False
finally:
warn_task.cancel()
def _schedule_add_entities(
self, new_entities: Iterable["Entity"], update_before_add: bool = False
) -> None:
"""Schedule adding entities for a single platform, synchronously."""
run_callback_threadsafe(
self.hass.loop,
self._async_schedule_add_entities,
list(new_entities),
update_before_add,
).result()
@callback
def _async_schedule_add_entities(
self, new_entities: Iterable["Entity"], update_before_add: bool = False
) -> None:
"""Schedule adding entities for a single platform async."""
self._tasks.append(
self.hass.async_create_task(
self.async_add_entities(
new_entities, update_before_add=update_before_add
),
)
)
def add_entities(
self, new_entities: Iterable["Entity"], update_before_add: bool = False
) -> None:
"""Add entities for a single platform."""
# That avoid deadlocks
if update_before_add:
self.logger.warning(
"Call 'add_entities' with update_before_add=True "
"only inside tests or you can run into a deadlock!"
)
asyncio.run_coroutine_threadsafe(
self.async_add_entities(list(new_entities), update_before_add),
self.hass.loop,
).result()
async def async_add_entities(
self, new_entities: Iterable["Entity"], update_before_add: bool = False
) -> None:
"""Add entities for a single platform async.
This method must be run in the event loop.
"""
# handle empty list from component/platform
if not new_entities:
return
hass = self.hass
device_registry = await hass.helpers.device_registry.async_get_registry()
entity_registry = await hass.helpers.entity_registry.async_get_registry()
tasks = [
self._async_add_entity( # type: ignore
entity, update_before_add, entity_registry, device_registry
)
for entity in new_entities
]
# No entities for processing
if not tasks:
return
timeout = max(SLOW_ADD_ENTITY_MAX_WAIT * len(tasks), SLOW_ADD_MIN_TIMEOUT)
try:
async with self.hass.timeout.async_timeout(timeout, self.domain):
await asyncio.gather(*tasks)
except asyncio.TimeoutError:
self.logger.warning(
"Timed out adding entities for domain %s with platform %s after %ds",
self.domain,
self.platform_name,
timeout,
)
except Exception:
self.logger.exception(
"Error adding entities for domain %s with platform %s",
self.domain,
self.platform_name,
)
raise
if self._async_unsub_polling is not None or not any(
entity.should_poll for entity in self.entities.values()
):
return
self._async_unsub_polling = async_track_time_interval(
self.hass,
self._update_entity_states,
self.scan_interval,
)
async def _async_add_entity(
self, entity, update_before_add, entity_registry, device_registry
):
"""Add an entity to the platform."""
if entity is None:
raise ValueError("Entity cannot be None")
entity.add_to_platform_start(
self.hass,
self,
self._get_parallel_updates_semaphore(hasattr(entity, "async_update")),
)
# Update properties before we generate the entity_id
if update_before_add:
try:
await entity.async_device_update(warning=False)
except Exception: # pylint: disable=broad-except
self.logger.exception("%s: Error on device update!", self.platform_name)
entity.add_to_platform_abort()
return
requested_entity_id = None
suggested_object_id: Optional[str] = None
# Get entity_id from unique ID registration
if entity.unique_id is not None:
if entity.entity_id is not None:
requested_entity_id = entity.entity_id
suggested_object_id = split_entity_id(entity.entity_id)[1]
else:
suggested_object_id = entity.name
if self.entity_namespace is not None:
suggested_object_id = f"{self.entity_namespace} {suggested_object_id}"
if self.config_entry is not None:
config_entry_id: Optional[str] = self.config_entry.entry_id
else:
config_entry_id = None
device_info = entity.device_info
device_id = None
if config_entry_id is not None and device_info is not None:
processed_dev_info = {"config_entry_id": config_entry_id}
for key in (
"connections",
"identifiers",
"manufacturer",
"model",
"name",
"default_manufacturer",
"default_model",
"default_name",
"sw_version",
"entry_type",
"via_device",
):
if key in device_info:
processed_dev_info[key] = device_info[key]
device = device_registry.async_get_or_create(**processed_dev_info)
if device:
device_id = device.id
disabled_by: Optional[str] = None
if not entity.entity_registry_enabled_default:
disabled_by = DISABLED_INTEGRATION
entry = entity_registry.async_get_or_create(
self.domain,
self.platform_name,
entity.unique_id,
suggested_object_id=suggested_object_id,
config_entry=self.config_entry,
device_id=device_id,
known_object_ids=self.entities.keys(),
disabled_by=disabled_by,
capabilities=entity.capability_attributes,
supported_features=entity.supported_features,
device_class=entity.device_class,
unit_of_measurement=entity.unit_of_measurement,
original_name=entity.name,
original_icon=entity.icon,
)
entity.registry_entry = entry
entity.entity_id = entry.entity_id
if entry.disabled:
self.logger.info(
"Not adding entity %s because it's disabled",
entry.name
or entity.name
or f'"{self.platform_name} {entity.unique_id}"',
)
entity.add_to_platform_abort()
return
# We won't generate an entity ID if the platform has already set one
# We will however make sure that platform cannot pick a registered ID
elif entity.entity_id is not None and entity_registry.async_is_registered(
entity.entity_id
):
# If entity already registered, convert entity id to suggestion
suggested_object_id = split_entity_id(entity.entity_id)[1]
entity.entity_id = None
# Generate entity ID
if entity.entity_id is None:
suggested_object_id = (
suggested_object_id or entity.name or DEVICE_DEFAULT_NAME
)
if self.entity_namespace is not None:
suggested_object_id = f"{self.entity_namespace} {suggested_object_id}"
entity.entity_id = entity_registry.async_generate_entity_id(
self.domain, suggested_object_id, self.entities.keys()
)
# Make sure it is valid in case an entity set the value themselves
if not valid_entity_id(entity.entity_id):
entity.add_to_platform_abort()
raise HomeAssistantError(f"Invalid entity id: {entity.entity_id}")
already_exists = entity.entity_id in self.entities
restored = False
if not already_exists and not self.hass.states.async_available(
entity.entity_id
):
existing = self.hass.states.get(entity.entity_id)
if existing is not None and ATTR_RESTORED in existing.attributes:
restored = True
else:
already_exists = True
if already_exists:
if entity.unique_id is not None:
msg = f"Platform {self.platform_name} does not generate unique IDs. "
if requested_entity_id:
msg += f"ID {entity.unique_id} is already used by {entity.entity_id} - ignoring {requested_entity_id}"
else:
msg += f"ID {entity.unique_id} already exists - ignoring {entity.entity_id}"
else:
msg = f"Entity id already exists - ignoring: {entity.entity_id}"
self.logger.error(msg)
entity.add_to_platform_abort()
return
entity_id = entity.entity_id
self.entities[entity_id] = entity
if not restored:
# Reserve the state in the state machine
# because as soon as we return control to the event
# loop below, another entity could be added
# with the same id before `entity.add_to_platform_finish()`
# has a chance to finish.
self.hass.states.async_reserve(entity.entity_id)
entity.async_on_remove(lambda: self.entities.pop(entity_id))
await entity.add_to_platform_finish()
async def async_reset(self) -> None:
"""Remove all entities and reset data.
This method must be run in the event loop.
"""
if self._async_cancel_retry_setup is not None:
self._async_cancel_retry_setup()
self._async_cancel_retry_setup = None
if not self.entities:
return
tasks = [self.async_remove_entity(entity_id) for entity_id in self.entities]
await asyncio.gather(*tasks)
if self._async_unsub_polling is not None:
self._async_unsub_polling()
self._async_unsub_polling = None
async def async_destroy(self) -> None:
"""Destroy an entity platform.
Call before discarding the object.
"""
await self.async_reset()
self.hass.data[DATA_ENTITY_PLATFORM][self.platform_name].remove(self)
async def async_remove_entity(self, entity_id: str) -> None:
"""Remove entity id from platform."""
await self.entities[entity_id].async_remove()
# Clean up polling job if no longer needed
if self._async_unsub_polling is not None and not any(
entity.should_poll for entity in self.entities.values()
):
self._async_unsub_polling()
self._async_unsub_polling = None
async def async_extract_from_service(
self, service_call: ServiceCall, expand_group: bool = True
) -> List["Entity"]:
"""Extract all known and available entities from a service call.
Will return an empty list if entities specified but unknown.
This method must be run in the event loop.
"""
return await service.async_extract_entities(
self.hass, self.entities.values(), service_call, expand_group
)
@callback
def async_register_entity_service(self, name, schema, func, required_features=None):
"""Register an entity service.
Services will automatically be shared by all platforms of the same domain.
"""
if self.hass.services.has_service(self.platform_name, name):
return
if isinstance(schema, dict):
schema = cv.make_entity_service_schema(schema)
async def handle_service(call: ServiceCall) -> None:
"""Handle the service."""
await service.entity_service_call(
self.hass,
[
plf
for plf in self.hass.data[DATA_ENTITY_PLATFORM][self.platform_name]
if plf.domain == self.domain
],
func,
call,
required_features,
)
self.hass.services.async_register(
self.platform_name, name, handle_service, schema
)
async def _update_entity_states(self, now: datetime) -> None:
"""Update the states of all the polling entities.
To protect from flooding the executor, we will update async entities
in parallel and other entities sequential.
This method must be run in the event loop.
"""
if self._process_updates is None:
self._process_updates = asyncio.Lock()
if self._process_updates.locked():
self.logger.warning(
"Updating %s %s took longer than the scheduled update interval %s",
self.platform_name,
self.domain,
self.scan_interval,
)
return
async with self._process_updates:
tasks = []
for entity in self.entities.values():
if not entity.should_poll:
continue
tasks.append(entity.async_update_ha_state(True))
if tasks:
await asyncio.gather(*tasks)
current_platform: ContextVar[Optional[EntityPlatform]] = ContextVar(
"current_platform", default=None
)
@callback
def async_get_platforms(
hass: HomeAssistantType, integration_name: str
) -> List[EntityPlatform]:
"""Find existing platforms."""
if (
DATA_ENTITY_PLATFORM not in hass.data
or integration_name not in hass.data[DATA_ENTITY_PLATFORM]
):
return []
platforms: List[EntityPlatform] = hass.data[DATA_ENTITY_PLATFORM][integration_name]
return platforms
|
from typing import List, Optional
from homeassistant.components.climate import (
ATTR_TEMPERATURE,
HVAC_MODE_HEAT,
SUPPORT_TARGET_TEMPERATURE,
TEMP_CELSIUS,
ClimateEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import PRECISION_HALVES, PRECISION_TENTHS
from homeassistant.helpers.typing import HomeAssistantType
from .const import DOMAIN
from .devolo_multi_level_switch import DevoloMultiLevelSwitchDeviceEntity
async def async_setup_entry(
hass: HomeAssistantType, entry: ConfigEntry, async_add_entities
) -> None:
"""Get all cover devices and setup them via config entry."""
entities = []
for gateway in hass.data[DOMAIN][entry.entry_id]["gateways"]:
for device in gateway.multi_level_switch_devices:
for multi_level_switch in device.multi_level_switch_property:
if device.device_model_uid in [
"devolo.model.Thermostat:Valve",
"devolo.model.Room:Thermostat",
"devolo.model.Eurotronic:Spirit:Device",
]:
entities.append(
DevoloClimateDeviceEntity(
homecontrol=gateway,
device_instance=device,
element_uid=multi_level_switch,
)
)
async_add_entities(entities, False)
class DevoloClimateDeviceEntity(DevoloMultiLevelSwitchDeviceEntity, ClimateEntity):
"""Representation of a climate/thermostat device within devolo Home Control."""
@property
def current_temperature(self) -> Optional[float]:
"""Return the current temperature."""
if hasattr(self._device_instance, "multi_level_sensor_property"):
return next(
(
multi_level_sensor.value
for multi_level_sensor in self._device_instance.multi_level_sensor_property.values()
if multi_level_sensor.sensor_type == "temperature"
),
None,
)
return None
@property
def target_temperature(self) -> Optional[float]:
"""Return the target temperature."""
return self._value
@property
def target_temperature_step(self) -> float:
"""Return the precision of the target temperature."""
return PRECISION_HALVES
@property
def hvac_mode(self) -> str:
"""Return the supported HVAC mode."""
return HVAC_MODE_HEAT
@property
def hvac_modes(self) -> List[str]:
"""Return the list of available hvac operation modes."""
return [HVAC_MODE_HEAT]
@property
def min_temp(self) -> float:
"""Return the minimum set temperature value."""
return self._multi_level_switch_property.min
@property
def max_temp(self) -> float:
"""Return the maximum set temperature value."""
return self._multi_level_switch_property.max
@property
def precision(self) -> float:
"""Return the precision of the set temperature."""
return PRECISION_TENTHS
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_TARGET_TEMPERATURE
@property
def temperature_unit(self) -> str:
"""Return the supported unit of temperature."""
return TEMP_CELSIUS
def set_hvac_mode(self, hvac_mode: str) -> None:
"""Do nothing as devolo devices do not support changing the hvac mode."""
def set_temperature(self, **kwargs):
"""Set new target temperature."""
self._multi_level_switch_property.set(kwargs[ATTR_TEMPERATURE])
|
from django.core import checks
from django.db import models
from django.utils.translation import gettext_lazy as _
from shop import deferred
from shop.models.order import BaseOrder, BaseOrderItem, OrderItemModel
from shop.modifiers.pool import cart_modifiers_pool
class BaseDelivery(models.Model, metaclass=deferred.ForeignKeyBuilder):
"""
Shipping provider to keep track on each delivery.
"""
order = deferred.ForeignKey(
BaseOrder,
on_delete=models.CASCADE,
)
shipping_id = models.CharField(
_("Shipping ID"),
max_length=255,
null=True,
blank=True,
help_text=_("The transaction processor's reference"),
)
fulfilled_at = models.DateTimeField(
_("Fulfilled at"),
null=True,
blank=True,
help_text=_("Timestamp of delivery fulfillment"),
)
shipped_at = models.DateTimeField(
_("Shipped at"),
null=True,
blank=True,
help_text=_("Timestamp of delivery shipment"),
)
shipping_method = models.CharField(
_("Shipping method"),
max_length=50,
help_text=_("The shipping backend used to deliver items of this order"),
)
class Meta:
abstract = True
unique_together = ['shipping_method', 'shipping_id']
get_latest_by = 'shipped_at'
def __str__(self):
return _("Delivery ID: {}").format(self.id)
@classmethod
def check(cls, **kwargs):
errors = super().check(**kwargs)
for field in OrderItemModel._meta.fields:
if field.attname == 'canceled' and field.get_internal_type() == 'BooleanField':
break
else:
msg = "Class `{}` must implement a `BooleanField` named `canceled`, if used in combination with a Delivery model."
errors.append(checks.Error(msg.format(OrderItemModel.__name__)))
return errors
def clean(self):
if self.order._fsm_requested_transition == ('status', 'ship_goods') and not self.shipped_at:
shipping_modifier = cart_modifiers_pool.get_active_shipping_modifier(self.shipping_method)
shipping_modifier.ship_the_goods(self)
def get_number(self):
"""
Hook to get the delivery number.
A class inheriting from Order may transform this into a string which is better readable.
"""
if self.order.allow_partial_delivery:
for part, delivery in enumerate(self.order.delivery_set.all(), 1):
if delivery.pk == self.pk:
return "{} / {}".format(self.order.get_number(), part)
return self.order.get_number()
DeliveryModel = deferred.MaterializedModel(BaseDelivery)
class BaseDeliveryItem(models.Model, metaclass=deferred.ForeignKeyBuilder):
"""
Abstract base class to keep track on the delivered quantity for each ordered item. Since the
quantity can be any numerical value, it has to be defined by the class implementing this model.
"""
delivery = deferred.ForeignKey(
BaseDelivery,
verbose_name=_("Delivery"),
on_delete=models.CASCADE,
related_name='items',
help_text=_("Refer to the shipping provider used to ship this item"),
)
item = deferred.ForeignKey(
BaseOrderItem,
on_delete=models.CASCADE,
related_name='deliver_item',
verbose_name=_("Ordered item"),
)
class Meta:
abstract = True
verbose_name = _("Deliver item")
verbose_name_plural = _("Deliver items")
@classmethod
def check(cls, **kwargs):
errors = super().check(**kwargs)
for order_field in OrderItemModel._meta.fields:
if order_field.attname == 'quantity':
break
else:
msg = "Class `{}` must implement a field named `quantity`."
errors.append(checks.Error(msg.format(OrderItemModel.__name__)))
for deliver_field in OrderItemModel._meta.fields:
if deliver_field.attname == 'quantity':
break
else:
msg = "Class `{}` must implement a field named `quantity`."
errors.append(checks.Error(msg.format(cls.__name__)))
if order_field.get_internal_type() != deliver_field.get_internal_type():
msg = "Field `{}.quantity` must be of one same type `{}.quantity`."
errors.append(checks.Error(msg.format(cls.__name__, OrderItemModel.__name__)))
return errors
DeliveryItemModel = deferred.MaterializedModel(BaseDeliveryItem)
|
import contextlib
import fnmatch
import glob
import inspect
import os
import platform
import sys
import textwrap
import warnings
import zipfile
import pytest
@contextlib.contextmanager
def ignore_warnings():
"""Context manager to ignore warning within the with statement."""
with warnings.catch_warnings():
warnings.simplefilter("ignore")
yield
# Functions named do_* are executable from the command line: do_blah is run
# by "python igor.py blah".
def do_show_env():
"""Show the environment variables."""
print("Environment:")
for env in sorted(os.environ):
print(" %s = %r" % (env, os.environ[env]))
def do_remove_extension():
"""Remove the compiled C extension, no matter what its name."""
so_patterns = """
tracer.so
tracer.*.so
tracer.pyd
tracer.*.pyd
""".split()
for pattern in so_patterns:
pattern = os.path.join("coverage", pattern)
for filename in glob.glob(pattern):
try:
os.remove(filename)
except OSError:
pass
def label_for_tracer(tracer):
"""Get the label for these tests."""
if tracer == "py":
label = "with Python tracer"
else:
label = "with C tracer"
return label
def should_skip(tracer):
"""Is there a reason to skip these tests?"""
if tracer == "py":
# $set_env.py: COVERAGE_NO_PYTRACER - Don't run the tests under the Python tracer.
skipper = os.environ.get("COVERAGE_NO_PYTRACER")
else:
# $set_env.py: COVERAGE_NO_CTRACER - Don't run the tests under the C tracer.
skipper = os.environ.get("COVERAGE_NO_CTRACER")
if skipper:
msg = "Skipping tests " + label_for_tracer(tracer)
if len(skipper) > 1:
msg += ": " + skipper
else:
msg = ""
return msg
def make_env_id(tracer):
"""An environment id that will keep all the test runs distinct."""
impl = platform.python_implementation().lower()
version = "%s%s" % sys.version_info[:2]
if '__pypy__' in sys.builtin_module_names:
version += "_%s%s" % sys.pypy_version_info[:2]
env_id = "%s%s_%s" % (impl, version, tracer)
return env_id
def run_tests(tracer, *runner_args):
"""The actual running of tests."""
if 'COVERAGE_TESTING' not in os.environ:
os.environ['COVERAGE_TESTING'] = "True"
# $set_env.py: COVERAGE_ENV_ID - Use environment-specific test directories.
if 'COVERAGE_ENV_ID' in os.environ:
os.environ['COVERAGE_ENV_ID'] = make_env_id(tracer)
print_banner(label_for_tracer(tracer))
return pytest.main(list(runner_args))
def run_tests_with_coverage(tracer, *runner_args):
"""Run tests, but with coverage."""
# Need to define this early enough that the first import of env.py sees it.
os.environ['COVERAGE_TESTING'] = "True"
os.environ['COVERAGE_PROCESS_START'] = os.path.abspath('metacov.ini')
os.environ['COVERAGE_HOME'] = os.getcwd()
# Create the .pth file that will let us measure coverage in sub-processes.
# The .pth file seems to have to be alphabetically after easy-install.pth
# or the sys.path entries aren't created right?
# There's an entry in "make clean" to get rid of this file.
pth_dir = os.path.dirname(pytest.__file__)
pth_path = os.path.join(pth_dir, "zzz_metacov.pth")
with open(pth_path, "w") as pth_file:
pth_file.write("import coverage; coverage.process_startup()\n")
suffix = "%s_%s" % (make_env_id(tracer), platform.platform())
os.environ['COVERAGE_METAFILE'] = os.path.abspath(".metacov."+suffix)
import coverage
cov = coverage.Coverage(config_file="metacov.ini")
cov._warn_unimported_source = False
cov._warn_preimported_source = False
cov.start()
try:
# Re-import coverage to get it coverage tested! I don't understand all
# the mechanics here, but if I don't carry over the imported modules
# (in covmods), then things go haywire (os == None, eventually).
covmods = {}
covdir = os.path.split(coverage.__file__)[0]
# We have to make a list since we'll be deleting in the loop.
modules = list(sys.modules.items())
for name, mod in modules:
if name.startswith('coverage'):
if getattr(mod, '__file__', "??").startswith(covdir):
covmods[name] = mod
del sys.modules[name]
import coverage # pylint: disable=reimported
sys.modules.update(covmods)
# Run tests, with the arguments from our command line.
status = run_tests(tracer, *runner_args)
finally:
cov.stop()
os.remove(pth_path)
cov.combine()
cov.save()
return status
def do_combine_html():
"""Combine data from a meta-coverage run, and make the HTML and XML reports."""
import coverage
os.environ['COVERAGE_HOME'] = os.getcwd()
os.environ['COVERAGE_METAFILE'] = os.path.abspath(".metacov")
cov = coverage.Coverage(config_file="metacov.ini")
cov.load()
cov.combine()
cov.save()
show_contexts = bool(os.environ.get('COVERAGE_CONTEXT'))
cov.html_report(show_contexts=show_contexts)
cov.xml_report()
def do_test_with_tracer(tracer, *runner_args):
"""Run tests with a particular tracer."""
# If we should skip these tests, skip them.
skip_msg = should_skip(tracer)
if skip_msg:
print(skip_msg)
return None
os.environ["COVERAGE_TEST_TRACER"] = tracer
if os.environ.get("COVERAGE_COVERAGE", "no") == "yes":
return run_tests_with_coverage(tracer, *runner_args)
else:
return run_tests(tracer, *runner_args)
def do_zip_mods():
"""Build the zipmods.zip file."""
zf = zipfile.ZipFile("tests/zipmods.zip", "w")
# Take one file from disk.
zf.write("tests/covmodzip1.py", "covmodzip1.py")
# The others will be various encodings.
source = textwrap.dedent(u"""\
# coding: {encoding}
text = u"{text}"
ords = {ords}
assert [ord(c) for c in text] == ords
print(u"All OK with {encoding}")
""")
# These encodings should match the list in tests/test_python.py
details = [
(u'utf8', u'ⓗⓔⓛⓛⓞ, ⓦⓞⓡⓛⓓ'),
(u'gb2312', u'你好,世界'),
(u'hebrew', u'שלום, עולם'),
(u'shift_jis', u'こんにちは世界'),
(u'cp1252', u'“hi”'),
]
for encoding, text in details:
filename = 'encoded_{}.py'.format(encoding)
ords = [ord(c) for c in text]
source_text = source.format(encoding=encoding, text=text, ords=ords)
zf.writestr(filename, source_text.encode(encoding))
zf.close()
zf = zipfile.ZipFile("tests/covmain.zip", "w")
zf.write("coverage/__main__.py", "__main__.py")
zf.close()
def do_install_egg():
"""Install the egg1 egg for tests."""
# I am pretty certain there are easier ways to install eggs...
cur_dir = os.getcwd()
os.chdir("tests/eggsrc")
with ignore_warnings():
import distutils.core
distutils.core.run_setup("setup.py", ["--quiet", "bdist_egg"])
egg = glob.glob("dist/*.egg")[0]
distutils.core.run_setup(
"setup.py", ["--quiet", "easy_install", "--no-deps", "--zip-ok", egg]
)
os.chdir(cur_dir)
def do_check_eol():
"""Check files for incorrect newlines and trailing whitespace."""
ignore_dirs = [
'.svn', '.hg', '.git',
'.tox*',
'*.egg-info',
'_build',
'_spell',
'tmp',
'help',
]
checked = set()
def check_file(fname, crlf=True, trail_white=True):
"""Check a single file for whitespace abuse."""
fname = os.path.relpath(fname)
if fname in checked:
return
checked.add(fname)
line = None
with open(fname, "rb") as f:
for n, line in enumerate(f, start=1):
if crlf:
if b"\r" in line:
print("%s@%d: CR found" % (fname, n))
return
if trail_white:
line = line[:-1]
if not crlf:
line = line.rstrip(b'\r')
if line.rstrip() != line:
print("%s@%d: trailing whitespace found" % (fname, n))
return
if line is not None and not line.strip():
print("%s: final blank line" % (fname,))
def check_files(root, patterns, **kwargs):
"""Check a number of files for whitespace abuse."""
for where, dirs, files in os.walk(root):
for f in files:
fname = os.path.join(where, f)
for p in patterns:
if fnmatch.fnmatch(fname, p):
check_file(fname, **kwargs)
break
for ignore_dir in ignore_dirs:
ignored = []
for dir_name in dirs:
if fnmatch.fnmatch(dir_name, ignore_dir):
ignored.append(dir_name)
for dir_name in ignored:
dirs.remove(dir_name)
check_files("coverage", ["*.py"])
check_files("coverage/ctracer", ["*.c", "*.h"])
check_files("coverage/htmlfiles", ["*.html", "*.scss", "*.css", "*.js"])
check_files("tests", ["*.py"])
check_files("tests", ["*,cover"], trail_white=False)
check_files("tests/js", ["*.js", "*.html"])
check_file("setup.py")
check_file("igor.py")
check_file("Makefile")
check_files(".", ["*.rst", "*.txt"])
check_files(".", ["*.pip"])
check_files(".github", ["*"])
check_files("ci", ["*"])
def print_banner(label):
"""Print the version of Python."""
try:
impl = platform.python_implementation()
except AttributeError:
impl = "Python"
version = platform.python_version()
if '__pypy__' in sys.builtin_module_names:
version += " (pypy %s)" % ".".join(str(v) for v in sys.pypy_version_info)
try:
which_python = os.path.relpath(sys.executable)
except ValueError:
# On Windows having a python executable on a different drive
# than the sources cannot be relative.
which_python = sys.executable
print('=== %s %s %s (%s) ===' % (impl, version, label, which_python))
sys.stdout.flush()
def do_help():
"""List the available commands"""
items = list(globals().items())
items.sort()
for name, value in items:
if name.startswith('do_'):
print("%-20s%s" % (name[3:], value.__doc__))
def analyze_args(function):
"""What kind of args does `function` expect?
Returns:
star, num_pos:
star(boolean): Does `function` accept *args?
num_args(int): How many positional arguments does `function` have?
"""
try:
getargspec = inspect.getfullargspec
except AttributeError:
getargspec = inspect.getargspec
with ignore_warnings():
# DeprecationWarning: Use inspect.signature() instead of inspect.getfullargspec()
argspec = getargspec(function)
return bool(argspec[1]), len(argspec[0])
def main(args):
"""Main command-line execution for igor.
Verbs are taken from the command line, and extra words taken as directed
by the arguments needed by the handler.
"""
while args:
verb = args.pop(0)
handler = globals().get('do_'+verb)
if handler is None:
print("*** No handler for %r" % verb)
return 1
star, num_args = analyze_args(handler)
if star:
# Handler has *args, give it all the rest of the command line.
handler_args = args
args = []
else:
# Handler has specific arguments, give it only what it needs.
handler_args = args[:num_args]
args = args[num_args:]
ret = handler(*handler_args)
# If a handler returns a failure-like value, stop.
if ret:
return ret
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
import collections
import math
from functools import reduce
from itertools import chain, count, islice, takewhile
from multiprocessing import Pool, cpu_count
import dill as serializer
PROTOCOL = serializer.HIGHEST_PROTOCOL
CPU_COUNT = cpu_count()
def is_primitive(val):
"""
Checks if the passed value is a primitive type.
>>> is_primitive(1)
True
>>> is_primitive("abc")
True
>>> is_primitive(True)
True
>>> is_primitive({})
False
>>> is_primitive([])
False
>>> is_primitive(set([]))
:param val: value to check
:return: True if value is a primitive, else False
"""
return isinstance(val, (str, bool, float, complex, bytes, int))
def is_namedtuple(val):
"""
Use Duck Typing to check if val is a named tuple. Checks that val is of type tuple and contains
the attribute _fields which is defined for named tuples.
:param val: value to check type of
:return: True if val is a namedtuple
"""
val_type = type(val)
bases = val_type.__bases__
if len(bases) != 1 or bases[0] != tuple:
return False
fields = getattr(val_type, "_fields", None)
return all(isinstance(n, str) for n in fields)
def identity(arg):
"""
Function which returns the argument. Used as a default lambda function.
>>> obj = object()
>>> obj is identity(obj)
True
:param arg: object to take identity of
:return: return arg
"""
return arg
def is_iterable(val):
"""
Check if val is not a list, but is a collections.Iterable type. This is used to determine
when list() should be called on val
>>> l = [1, 2]
>>> is_iterable(l)
False
>>> is_iterable(iter(l))
True
:param val: value to check
:return: True if it is not a list, but is a collections.Iterable
"""
if isinstance(val, list):
return False
return isinstance(val, collections.abc.Iterable)
def is_tabulatable(val):
if is_primitive(val):
return False
if is_iterable(val) or is_namedtuple(val) or isinstance(val, list):
return True
return False
def split_every(parts, iterable):
"""
Split an iterable into parts of length parts
>>> l = iter([1, 2, 3, 4])
>>> split_every(2, l)
[[1, 2], [3, 4]]
:param iterable: iterable to split
:param parts: number of chunks
:return: return the iterable split in parts
"""
return takewhile(bool, (list(islice(iterable, parts)) for _ in count()))
def unpack(packed):
"""
Unpack the function and args then apply the function to the arguments and return result
:param packed: input packed tuple of (func, args)
:return: result of applying packed function on packed args
"""
func, args = serializer.loads(packed)
result = func(*args)
if isinstance(result, collections.abc.Iterable):
return list(result)
return None
def pack(func, args):
"""
Pack a function and the args it should be applied to
:param func: Function to apply
:param args: Args to evaluate with
:return: Packed (func, args) tuple
"""
return serializer.dumps((func, args), PROTOCOL)
def parallelize(func, result, processes=None, partition_size=None):
"""
Creates an iterable which is lazily computed in parallel from applying func on result
:param func: Function to apply
:param result: Data to apply to
:param processes: Number of processes to use in parallel
:param partition_size: Size of partitions for each parallel process
:return: Iterable of applying func on result
"""
parallel_iter = lazy_parallelize(
func, result, processes=processes, partition_size=partition_size
)
return chain.from_iterable(parallel_iter)
def lazy_parallelize(func, result, processes=None, partition_size=None):
"""
Lazily computes an iterable in parallel, and returns them in pool chunks
:param func: Function to apply
:param result: Data to apply to
:param processes: Number of processes to use in parallel
:param partition_size: Size of partitions for each parallel process
:return: Iterable of chunks where each chunk as func applied to it
"""
if processes is None or processes < 1:
processes = CPU_COUNT
else:
processes = min(processes, CPU_COUNT)
partition_size = partition_size or compute_partition_size(result, processes)
pool = Pool(processes=processes)
partitions = split_every(partition_size, iter(result))
packed_partitions = (pack(func, (partition,)) for partition in partitions)
for pool_result in pool.imap(unpack, packed_partitions):
yield pool_result
pool.terminate()
def compute_partition_size(result, processes):
"""
Attempts to compute the partition size to evenly distribute work across processes. Defaults to
1 if the length of result cannot be determined.
:param result: Result to compute on
:param processes: Number of processes to use
:return: Best partition size
"""
try:
return max(math.ceil(len(result) / processes), 1)
except TypeError:
return 1
def compose(*functions):
"""
Compose all the function arguments together
:param functions: Functions to compose
:return: Single composed function
"""
# pylint: disable=undefined-variable
return reduce(lambda f, g: lambda x: f(g(x)), functions, lambda x: x)
|
import os
import random
import yaml
def _get_smartstack_proxy_ports_from_file(root, file):
"""Given a root and file (as from os.walk), attempt to return the highest
smartstack proxy port number (int) from that file. Returns 0 if there is no
smartstack proxy_port.
"""
ports = set()
with open(os.path.join(root, file)) as f:
data = yaml.safe_load(f)
if file.endswith("service.yaml") and "smartstack" in data:
# Specifying this in service.yaml is old and deprecated and doesn't
# support multiple namespaces.
ports = {int(data["smartstack"].get("proxy_port", 0))}
elif file.endswith("smartstack.yaml"):
for namespace in data.keys():
ports.add(data[namespace].get("proxy_port", 0))
return ports
def read_etc_services():
with open("/etc/services") as fd:
return fd.readlines()
def get_inuse_ports_from_etc_services():
ports = set()
for line in read_etc_services():
if line.startswith("#"):
continue
try:
p = line.split()[1]
port = int(p.split("/")[0])
ports.add(port)
except Exception:
pass
return ports
def suggest_smartstack_proxy_port(
yelpsoa_config_root, range_min=19000, range_max=21000
):
"""Pick a random available port in the 19000-21000 block"""
available_proxy_ports = set(range(range_min, range_max + 1))
for root, dirs, files in os.walk(yelpsoa_config_root):
for f in files:
if f.endswith("smartstack.yaml"):
try:
used_ports = _get_smartstack_proxy_ports_from_file(root, f)
for used_port in used_ports:
available_proxy_ports.discard(used_port)
except Exception:
pass
available_proxy_ports.difference_update(get_inuse_ports_from_etc_services())
try:
return random.choice(list(available_proxy_ports))
except IndexError:
raise Exception(
f"There are no more ports available in the range [{range_min}, {range_max}]"
)
# vim: expandtab tabstop=4 sts=4 shiftwidth=4:
|
import contextlib
import logging
import os
import sys
import threading
LOGGER_NAME = "radicale"
LOGGER_FORMAT = "[%(asctime)s] [%(ident)s] [%(levelname)s] %(message)s"
DATE_FORMAT = "%Y-%m-%d %H:%M:%S %z"
logger = logging.getLogger(LOGGER_NAME)
class RemoveTracebackFilter(logging.Filter):
def filter(self, record):
record.exc_info = None
return True
REMOVE_TRACEBACK_FILTER = RemoveTracebackFilter()
class IdentLogRecordFactory:
"""LogRecordFactory that adds ``ident`` attribute."""
def __init__(self, upstream_factory):
self.upstream_factory = upstream_factory
def __call__(self, *args, **kwargs):
record = self.upstream_factory(*args, **kwargs)
ident = "%d" % os.getpid()
main_thread = threading.main_thread()
current_thread = threading.current_thread()
if current_thread.name and main_thread != current_thread:
ident += "/%s" % current_thread.name
record.ident = ident
return record
class ThreadedStreamHandler(logging.Handler):
"""Sends logging output to the stream registered for the current thread or
``sys.stderr`` when no stream was registered."""
terminator = "\n"
def __init__(self):
super().__init__()
self._streams = {}
def emit(self, record):
try:
stream = self._streams.get(threading.get_ident(), sys.stderr)
msg = self.format(record)
stream.write(msg)
stream.write(self.terminator)
if hasattr(stream, "flush"):
stream.flush()
except Exception:
self.handleError(record)
@contextlib.contextmanager
def register_stream(self, stream):
"""Register stream for logging output of the current thread."""
key = threading.get_ident()
self._streams[key] = stream
try:
yield
finally:
del self._streams[key]
@contextlib.contextmanager
def register_stream(stream):
"""Register stream for logging output of the current thread."""
yield
def setup():
"""Set global logging up."""
global register_stream
handler = ThreadedStreamHandler()
logging.basicConfig(format=LOGGER_FORMAT, datefmt=DATE_FORMAT,
handlers=[handler])
register_stream = handler.register_stream
log_record_factory = IdentLogRecordFactory(logging.getLogRecordFactory())
logging.setLogRecordFactory(log_record_factory)
set_level(logging.WARNING)
def set_level(level):
"""Set logging level for global logger."""
if isinstance(level, str):
level = getattr(logging, level.upper())
logger.setLevel(level)
if level == logging.DEBUG:
logger.removeFilter(REMOVE_TRACEBACK_FILTER)
else:
logger.addFilter(REMOVE_TRACEBACK_FILTER)
|
from flask import Blueprint, Flask, jsonify
from flasgger import Swagger
from flasgger.utils import swag_from
app = Flask(__name__)
example_blueprint = Blueprint("example_blueprint", __name__)
@example_blueprint.route('/usernames/<username>', methods=['GET', 'POST'])
@swag_from('username_specs.yml', methods=['GET'])
@swag_from('username_specs.yml', methods=['POST'])
def usernames(username):
return jsonify({'username': username})
@example_blueprint.route('/usernames2/<username>', methods=['GET', 'POST'])
def usernames2(username):
"""
This is the summary defined in yaml file
First line is the summary
All following lines until the hyphens is added to description
the format of the first lines until 3 hyphens will be not yaml compliant
but everything below the 3 hyphens should be.
---
tags:
- users
parameters:
- in: path
name: username
type: string
required: true
responses:
200:
description: A single user item
schema:
id: rec_username
properties:
username:
type: string
description: The name of the user
default: 'steve-harris'
"""
return jsonify({'username': username})
@example_blueprint.route('/users', endpoint='user-without-id', methods=['GET'])
@example_blueprint.route('/users/<user_id>', endpoint='user-with-id', methods=['GET'])
@swag_from('user_with_id_specs.yml', endpoint='example_blueprint.user-with-id', methods=['GET'])
@swag_from('user_without_id_specs.yml', endpoint='example_blueprint.user-without-id', methods=['GET'])
def usernames(user_id=None):
if user_id:
return jsonify({'user_id': user_id})
else:
return jsonify([])
app.register_blueprint(example_blueprint)
swag = Swagger(app)
if __name__ == "__main__":
app.run(debug=True)
|
import re
from django.utils.html import escape
from django.utils.safestring import mark_safe
from django.utils.translation import gettext
from django.utils.translation import gettext_lazy as _
from weblate.checks.base import TargetCheckParametrized
from weblate.checks.parser import multi_value_flag, single_value_flag
def parse_regex(val):
if isinstance(val, str):
return re.compile(val)
return val
class PlaceholderCheck(TargetCheckParametrized):
check_id = "placeholders"
default_disabled = True
name = _("Placeholders")
description = _("Translation is missing some placeholders")
@property
def param_type(self):
return multi_value_flag(lambda x: x)
def get_value(self, unit):
return re.compile(
"|".join(
re.escape(param) if isinstance(param, str) else param.pattern
for param in super().get_value(unit)
)
)
def check_target_params(self, sources, targets, unit, value):
expected = set(value.findall(unit.source_string))
missing = set()
extra = set()
for target in targets:
found = set(value.findall(target))
missing.update(expected - found)
extra.update(found - expected)
if missing or extra:
return {"missing": missing, "extra": extra}
return False
def check_highlight(self, source, unit):
if self.should_skip(unit):
return []
ret = []
regexp = self.get_value(unit)
for match in regexp.finditer(source):
ret.append((match.start(), match.end(), match.group()))
return ret
def get_description(self, check_obj):
unit = check_obj.unit
result = self.check_target_unit(
unit.get_source_plurals(), unit.get_target_plurals(), unit
)
if not result:
return super().get_description(check_obj)
errors = []
if result["missing"]:
errors.append(
gettext("Following format strings are missing: %s")
% ", ".join(sorted(result["missing"]))
)
if result["extra"]:
errors.append(
gettext("Following format strings are extra: %s")
% ", ".join(sorted(result["extra"]))
)
return mark_safe("<br />".join(escape(error) for error in errors))
class RegexCheck(TargetCheckParametrized):
check_id = "regex"
default_disabled = True
name = _("Regular expression")
description = _("Translation does not match regular expression:")
@property
def param_type(self):
return single_value_flag(parse_regex)
def check_target_params(self, sources, targets, unit, value):
return any(not value.findall(target) for target in targets)
def should_skip(self, unit):
if super().should_skip(unit):
return True
return not self.get_value(unit).pattern
def check_highlight(self, source, unit):
if self.should_skip(unit):
return []
ret = []
regex = self.get_value(unit)
for match in regex.finditer(source):
ret.append((match.start(), match.end(), match.group()))
return ret
def get_description(self, check_obj):
unit = check_obj.unit
if not self.has_value(unit):
return super().get_description(check_obj)
regex = self.get_value(unit)
return mark_safe(
"{} <code>{}</code>".format(escape(self.description), escape(regex.pattern))
)
|
from homeassistant.core import State
from homeassistant.setup import async_setup_component
VALID_NUMBER1 = "19.0"
VALID_NUMBER2 = "99.9"
async def test_reproducing_states(hass, caplog):
"""Test reproducing Input number states."""
assert await async_setup_component(
hass,
"input_number",
{
"input_number": {
"test_number": {"min": "5", "max": "100", "initial": VALID_NUMBER1}
}
},
)
# These calls should do nothing as entities already in desired state
await hass.helpers.state.async_reproduce_state(
[
State("input_number.test_number", VALID_NUMBER1),
# Should not raise
State("input_number.non_existing", "234"),
],
)
assert hass.states.get("input_number.test_number").state == VALID_NUMBER1
# Test reproducing with different state
await hass.helpers.state.async_reproduce_state(
[
State("input_number.test_number", VALID_NUMBER2),
# Should not raise
State("input_number.non_existing", "234"),
],
)
assert hass.states.get("input_number.test_number").state == VALID_NUMBER2
# Test setting state to number out of range
await hass.helpers.state.async_reproduce_state(
[State("input_number.test_number", "150")]
)
# The entity states should be unchanged after trying to set them to out-of-range number
assert hass.states.get("input_number.test_number").state == VALID_NUMBER2
await hass.helpers.state.async_reproduce_state(
[
# Test invalid state
State("input_number.test_number", "invalid_state"),
# Set to state it already is.
State("input_number.test_number", VALID_NUMBER2),
],
)
|
import time
import re
import os
import sys
# Fix Path for locating the SNMPCollector
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
'../',
'snmp',
)))
from diamond.metric import Metric
from snmp import SNMPCollector as parent_SNMPCollector
class ServerTechPDUCollector(parent_SNMPCollector):
"""
SNMPCollector for ServerTech PDUs
"""
PDU_SYSTEM_GAUGES = {
"systemTotalWatts": "1.3.6.1.4.1.1718.3.1.6"
}
PDU_INFEED_NAMES = "1.3.6.1.4.1.1718.3.2.2.1.3"
PDU_INFEED_GAUGES = {
"infeedCapacityAmps": "1.3.6.1.4.1.1718.3.2.2.1.10",
"infeedVolts": "1.3.6.1.4.1.1718.3.2.2.1.11",
"infeedAmps": "1.3.6.1.4.1.1718.3.2.2.1.7",
"infeedWatts": "1.3.6.1.4.1.1718.3.2.2.1.12"
}
def get_default_config_help(self):
config_help = super(ServerTechPDUCollector,
self).get_default_config_help()
config_help.update({
'host': 'PDU dns address',
'port': 'PDU port to collect snmp data',
'community': 'SNMP community'
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(ServerTechPDUCollector, self).get_default_config()
config.update({
'path': 'pdu',
'timeout': 15,
'retries': 3,
})
return config
def collect_snmp(self, device, host, port, community):
"""
Collect stats from device
"""
# Log
self.log.info("Collecting ServerTech PDU statistics from: %s" % device)
# Set timestamp
timestamp = time.time()
inputFeeds = {}
# Collect PDU input gauge values
for gaugeName, gaugeOid in self.PDU_SYSTEM_GAUGES.items():
systemGauges = self.walk(gaugeOid, host, port, community)
for o, gaugeValue in systemGauges.items():
# Get Metric Name
metricName = gaugeName
# Get Metric Value
metricValue = float(gaugeValue)
# Get Metric Path
metricPath = '.'.join(
['devices', device, 'system', metricName])
# Create Metric
metric = Metric(metricPath, metricValue, timestamp, 2)
# Publish Metric
self.publish_metric(metric)
# Collect PDU input feed names
inputFeedNames = self.walk(
self.PDU_INFEED_NAMES, host, port, community)
for o, inputFeedName in inputFeedNames.items():
# Extract input feed name
inputFeed = ".".join(o.split(".")[-2:])
inputFeeds[inputFeed] = inputFeedName
# Collect PDU input gauge values
for gaugeName, gaugeOid in self.PDU_INFEED_GAUGES.items():
inputFeedGauges = self.walk(gaugeOid, host, port, community)
for o, gaugeValue in inputFeedGauges.items():
# Extract input feed name
inputFeed = ".".join(o.split(".")[-2:])
# Get Metric Name
metricName = '.'.join([re.sub(r'\.|\\', '_',
inputFeeds[inputFeed]),
gaugeName])
# Get Metric Value
if gaugeName == "infeedVolts":
# Note: Voltage is in "tenth volts", so divide by 10
metricValue = float(gaugeValue) / 10.0
elif gaugeName == "infeedAmps":
# Note: Amps is in "hundredth amps", so divide by 100
metricValue = float(gaugeValue) / 100.0
else:
metricValue = float(gaugeValue)
# Get Metric Path
metricPath = '.'.join(['devices', device, 'input', metricName])
# Create Metric
metric = Metric(metricPath, metricValue, timestamp, 2)
# Publish Metric
self.publish_metric(metric)
|
from gi.repository import Gtk, Pango
from meld.conf import _
def layout_text_and_icon(stockid, primary_text, secondary_text=None):
image = Gtk.Image.new_from_icon_name(stockid, Gtk.IconSize.DIALOG)
image.set_alignment(0.5, 0.5)
vbox = Gtk.VBox(homogeneous=False, spacing=6)
primary_label = Gtk.Label(
label="<b>{}</b>".format(primary_text),
wrap=True,
wrap_mode=Pango.WrapMode.WORD_CHAR,
use_markup=True,
xalign=0,
can_focus=True,
selectable=True,
)
vbox.pack_start(primary_label, True, True, 0)
if secondary_text:
secondary_label = Gtk.Label(
"<small>{}</small>".format(secondary_text),
wrap=True,
wrap_mode=Pango.WrapMode.WORD_CHAR,
use_markup=True,
xalign=0,
can_focus=True,
selectable=True,
)
vbox.pack_start(secondary_label, True, True, 0)
hbox_content = Gtk.HBox(homogeneous=False, spacing=8)
hbox_content.pack_start(image, False, False, 0)
hbox_content.pack_start(vbox, True, True, 0)
hbox_content.show_all()
return hbox_content
class MsgAreaController(Gtk.HBox):
__gtype_name__ = "MsgAreaController"
def __init__(self):
super().__init__()
self.__msgarea = None
self.__msgid = None
def has_message(self):
return self.__msgarea is not None
def get_msg_id(self):
return self.__msgid
def set_msg_id(self, msgid):
self.__msgid = msgid
def clear(self):
if self.__msgarea is not None:
self.remove(self.__msgarea)
self.__msgarea.destroy()
self.__msgarea = None
self.__msgid = None
def new_from_text_and_icon(
self, stockid, primary, secondary=None, buttons=None):
self.clear()
msgarea = self.__msgarea = Gtk.InfoBar()
if buttons:
for (text, respid) in buttons:
self.add_button(text, respid)
content = layout_text_and_icon(stockid, primary, secondary)
content_area = msgarea.get_content_area()
content_area.foreach(content_area.remove, None)
content_area.add(content)
self.pack_start(msgarea, True, True, 0)
return msgarea
def add_dismissable_msg(self, icon, primary, secondary):
msgarea = self.new_from_text_and_icon(icon, primary, secondary)
msgarea.add_button(_("Hi_de"), Gtk.ResponseType.CLOSE)
msgarea.connect("response", lambda *args: self.clear())
msgarea.show_all()
return msgarea
def add_action_msg(self, icon, primary, secondary, action_label, callback):
def on_response(msgarea, response_id, *args):
self.clear()
if response_id == Gtk.ResponseType.ACCEPT:
callback()
msgarea = self.new_from_text_and_icon(icon, primary, secondary)
msgarea.add_button(action_label, Gtk.ResponseType.ACCEPT)
msgarea.add_button(_("Hi_de"), Gtk.ResponseType.CLOSE)
msgarea.connect("response", on_response)
msgarea.show_all()
return msgarea
|
from datetime import date
from django.conf import settings
from django.db import models
from weblate.accounts.models import AuditLog
from weblate.utils.request import get_ip_address, get_user_agent
# Current TOS date
TOS_DATE = date(2017, 7, 2)
class Agreement(models.Model):
user = models.OneToOneField(
settings.AUTH_USER_MODEL, unique=True, on_delete=models.deletion.CASCADE
)
tos = models.DateField(default=date(1970, 1, 1))
address = models.GenericIPAddressField(null=True)
user_agent = models.CharField(max_length=200, default="")
timestamp = models.DateTimeField(auto_now=True)
def __str__(self):
return f"{self.user.username}:{self.tos}"
def is_current(self):
return self.tos == TOS_DATE
def make_current(self, request):
if not self.is_current():
AuditLog.objects.create(
self.user, request, "tos", date=TOS_DATE.isoformat()
)
self.tos = TOS_DATE
self.address = get_ip_address(request)
self.user_agent = get_user_agent(request)
self.save()
|
from django.conf import settings
from django.apps import apps
from django.core.exceptions import ImproperlyConfigured, ObjectDoesNotExist, ValidationError
from django.forms import fields, widgets, ModelChoiceField
from django.template import TemplateDoesNotExist
from django.template.loader import select_template
from django.utils.html import format_html
from django.utils.module_loading import import_string
from django.utils.translation import gettext_lazy as _, pgettext_lazy
from entangled.forms import EntangledModelFormMixin, get_related_object
if 'cmsplugin_cascade' not in settings.INSTALLED_APPS:
raise ImproperlyConfigured("Please add 'cmsplugin_cascade' to your INSTALLED_APPS")
from cms.plugin_pool import plugin_pool
from cmsplugin_cascade.plugin_base import CascadePluginBase
from cmsplugin_cascade.link.forms import LinkForm
from cmsplugin_cascade.link.plugin_base import LinkPluginBase
from django_select2.forms import HeavySelect2Widget
from shop.conf import app_settings
from shop.forms.base import DialogFormMixin
from shop.models.cart import CartModel
from shop.models.product import ProductModel
class ShopPluginBase(CascadePluginBase):
module = "Shop"
require_parent = False
allow_children = False
class ProductSelectField(ModelChoiceField):
widget = HeavySelect2Widget(data_view='shop:select-product')
def __init__(self, *args, **kwargs):
kwargs.setdefault('queryset', ProductModel.objects.all())
super().__init__(*args, **kwargs)
class CatalogLinkForm(LinkForm):
"""
Alternative implementation of `cmsplugin_cascade.link.forms.LinkForm`, which allows to link onto
the Product model, using its method ``get_absolute_url``.
Note: In this form class the field ``product`` is missing. It is added later, when the shop's
Product knows about its materialized model.
"""
LINK_TYPE_CHOICES = [
('cmspage', _("CMS Page")),
('product', _("Product")),
('download', _("Download File")),
('exturl', _("External URL")),
('email', _("Mail To")),
]
product = ProductSelectField(
label=_("Product"),
required=False,
help_text=_("An internal link onto a product from the catalog"),
)
class Meta:
entangled_fields = {'glossary': ['product']}
def clean(self):
cleaned_data = super().clean()
link_type = cleaned_data.get('link_type')
error = None
if link_type == 'product':
if cleaned_data['product'] is None:
error = ValidationError(_("Product to link to is missing."))
self.add_error('product', error)
if error:
raise error
return cleaned_data
class CatalogLinkPluginBase(LinkPluginBase):
"""
Alternative implementation to ``cmsplugin_cascade.link.DefaultLinkPluginBase`` which adds
another link type, namely "Product", to set links onto arbitrary products of this shop.
"""
ring_plugin = 'ShopLinkPlugin'
class Media:
js = ['admin/js/jquery.init.js', 'shop/js/admin/shoplinkplugin.js']
@classmethod
def get_link(cls, obj):
link_type = obj.glossary.get('link_type')
if link_type == 'product':
relobj = get_related_object(obj.glossary, 'product')
if relobj:
return relobj.get_absolute_url()
else:
return super().get_link(obj) or link_type
class DialogPluginBaseForm(EntangledModelFormMixin):
RENDER_CHOICES = [
('form', _("Form dialog")),
('summary', _("Static summary")),
]
render_type = fields.ChoiceField(
choices=RENDER_CHOICES,
widget=widgets.RadioSelect,
label=_("Render as"),
initial='form',
help_text=_("A dialog can also be rendered as a box containing a read-only summary."),
)
headline_legend = fields.BooleanField(
label=_("Headline Legend"),
initial=True,
required=False,
help_text=_("Render a legend inside the dialog's headline."),
)
class Meta:
entangled_fields = {'glossary': ['render_type', 'headline_legend']}
class DialogFormPluginBase(ShopPluginBase):
"""
Base class for all plugins adding a dialog form to a placeholder field.
"""
require_parent = True
parent_classes = ['BootstrapColumnPlugin', 'ProcessStepPlugin', 'BootstrapPanelPlugin',
'SegmentPlugin', 'SimpleWrapperPlugin', 'ValidateSetOfFormsPlugin']
form = DialogPluginBaseForm
@classmethod
def register_plugin(cls, plugin):
"""
Register plugins derived from this class with this function instead of
`plugin_pool.register_plugin`, so that dialog plugins without a corresponding
form class are not registered.
"""
if not issubclass(plugin, cls):
msg = "Can not register plugin class `{}`, since is does not inherit from `{}`."
raise ImproperlyConfigured(msg.format(plugin.__name__, cls.__name__))
plugin_pool.register_plugin(plugin)
def get_form_class(self, instance):
try:
return import_string(self.form_class)
except AttributeError:
msg = "Can not register plugin class '{}', since it neither defines 'form_class' " \
"nor overrides 'get_form_class()'."
raise ImproperlyConfigured(msg.format(self.__name__))
@classmethod
def get_identifier(cls, instance):
render_type = instance.glossary.get('render_type')
render_type = dict(cls.form.RENDER_CHOICES).get(render_type, '')
return format_html(pgettext_lazy('get_identifier', "as {}"), render_type)
def get_form_data(self, context, instance, placeholder):
"""
Returns data to initialize the corresponding dialog form.
This method must return a dictionary containing
* either `instance` - a Python object to initialize the form class for this plugin,
* or `initial` - a dictionary containing initial form data, or if both are set, values
from `initial` override those of `instance`.
"""
if issubclass(self.get_form_class(instance), DialogFormMixin):
try:
cart = CartModel.objects.get_from_request(context['request'])
cart.update(context['request'])
except CartModel.DoesNotExist:
cart = None
return {'cart': cart}
return {}
def get_render_template(self, context, instance, placeholder):
render_type = instance.glossary.get('render_type')
if render_type not in ('form', 'summary',):
render_type = 'form'
try:
template_names = [
'{0}/checkout/{1}'.format(app_settings.APP_LABEL, self.template_leaf_name).format(render_type),
'shop/checkout/{}'.format(self.template_leaf_name).format(render_type),
]
return select_template(template_names)
except (AttributeError, TemplateDoesNotExist):
return self.render_template
def render(self, context, instance, placeholder):
"""
Return the context to render a DialogFormPlugin
"""
request = context['request']
form_data = self.get_form_data(context, instance, placeholder)
request._plugin_order = getattr(request, '_plugin_order', 0) + 1
if not isinstance(form_data.get('initial'), dict):
form_data['initial'] = {}
form_data['initial'].update(plugin_id=instance.pk, plugin_order=request._plugin_order)
bound_form = self.get_form_class(instance)(**form_data)
context[bound_form.form_name] = bound_form
context['headline_legend'] = bool(instance.glossary.get('headline_legend', True))
return self.super(DialogFormPluginBase, self).render(context, instance, placeholder)
|
from unittest import TestCase, skipIf
import responses
from django.http import HttpRequest
from django.test.utils import override_settings
from weblate.utils.antispam import is_spam, report_spam
try:
# pylint: disable=unused-import
import akismet # noqa: F401
HAS_AKISMET = True
except ImportError:
HAS_AKISMET = False
class SpamTest(TestCase):
@override_settings(AKISMET_API_KEY=None)
def test_disabled(self):
self.assertFalse(is_spam("text", HttpRequest()))
def mock_akismet(self, body):
responses.add(
responses.POST, "https://key.rest.akismet.com/1.1/comment-check", body=body
)
responses.add(
responses.POST, "https://key.rest.akismet.com/1.1/submit-spam", body=body
)
responses.add(
responses.POST, "https://rest.akismet.com/1.1/verify-key", body="valid"
)
@skipIf(not HAS_AKISMET, "akismet module not installed")
@responses.activate
@override_settings(AKISMET_API_KEY="key")
def test_akismet_spam(self):
self.mock_akismet("true")
self.assertTrue(is_spam("text", HttpRequest()))
@skipIf(not HAS_AKISMET, "akismet module not installed")
@responses.activate
@override_settings(AKISMET_API_KEY="key")
def test_akismet_nospam(self):
self.mock_akismet("false")
self.assertFalse(is_spam("text", HttpRequest()))
@skipIf(not HAS_AKISMET, "akismet module not installed")
@responses.activate
@override_settings(AKISMET_API_KEY="key")
def test_akismet_submit_spam(self):
self.mock_akismet("Thanks for making the web a better place.")
self.assertIsNone(report_spam("1.2.3.4", "Agent", "text"))
@skipIf(not HAS_AKISMET, "akismet module not installed")
@responses.activate
@override_settings(AKISMET_API_KEY="key")
def test_akismet_submit_spam_error(self):
self.mock_akismet("false")
self.assertIsNone(report_spam("1.2.3.4", "Agent", "text"))
|
from homeassistant.components.cover import DOMAIN as COVER_DOMAIN
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_DEVICE
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from .common import get_data_update_coordinator
from .const import DEVICE_TYPE_GOGOGATE2
async def async_setup(hass: HomeAssistant, base_config: dict) -> bool:
"""Set up for Gogogate2 controllers."""
return True
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Do setup of Gogogate2."""
# Update the config entry.
config_updates = {}
if CONF_DEVICE not in config_entry.data:
config_updates["data"] = {
**config_entry.data,
**{CONF_DEVICE: DEVICE_TYPE_GOGOGATE2},
}
if config_updates:
hass.config_entries.async_update_entry(config_entry, **config_updates)
data_update_coordinator = get_data_update_coordinator(hass, config_entry)
await data_update_coordinator.async_refresh()
if not data_update_coordinator.last_update_success:
raise ConfigEntryNotReady()
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, COVER_DOMAIN)
)
return True
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Unload Gogogate2 config entry."""
hass.async_create_task(
hass.config_entries.async_forward_entry_unload(config_entry, COVER_DOMAIN)
)
return True
|
import voluptuous as vol
from homeassistant.components import http
from homeassistant.components.http.data_validator import RequestDataValidator
from homeassistant.const import SERVICE_TOGGLE, SERVICE_TURN_OFF, SERVICE_TURN_ON
from homeassistant.core import DOMAIN as HA_DOMAIN, HomeAssistant
from homeassistant.helpers import config_validation as cv, integration_platform, intent
from .const import DOMAIN
CONFIG_SCHEMA = vol.Schema({DOMAIN: vol.Schema({})}, extra=vol.ALLOW_EXTRA)
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the Intent component."""
hass.http.register_view(IntentHandleView())
await integration_platform.async_process_integration_platforms(
hass, DOMAIN, _async_process_intent
)
hass.helpers.intent.async_register(
intent.ServiceIntentHandler(
intent.INTENT_TURN_ON, HA_DOMAIN, SERVICE_TURN_ON, "Turned {} on"
)
)
hass.helpers.intent.async_register(
intent.ServiceIntentHandler(
intent.INTENT_TURN_OFF, HA_DOMAIN, SERVICE_TURN_OFF, "Turned {} off"
)
)
hass.helpers.intent.async_register(
intent.ServiceIntentHandler(
intent.INTENT_TOGGLE, HA_DOMAIN, SERVICE_TOGGLE, "Toggled {}"
)
)
return True
async def _async_process_intent(hass: HomeAssistant, domain: str, platform):
"""Process the intents of an integration."""
await platform.async_setup_intents(hass)
class IntentHandleView(http.HomeAssistantView):
"""View to handle intents from JSON."""
url = "/api/intent/handle"
name = "api:intent:handle"
@RequestDataValidator(
vol.Schema(
{
vol.Required("name"): cv.string,
vol.Optional("data"): vol.Schema({cv.string: object}),
}
)
)
async def post(self, request, data):
"""Handle intent with name/data."""
hass = request.app["hass"]
try:
intent_name = data["name"]
slots = {
key: {"value": value} for key, value in data.get("data", {}).items()
}
intent_result = await intent.async_handle(
hass, DOMAIN, intent_name, slots, "", self.context(request)
)
except intent.IntentHandleError as err:
intent_result = intent.IntentResponse()
intent_result.async_set_speech(str(err))
if intent_result is None:
intent_result = intent.IntentResponse()
intent_result.async_set_speech("Sorry, I couldn't handle that")
return self.json(intent_result)
|
from datetime import timedelta
from ipaddress import ip_network
from aiohttp import BasicAuth, web
from aiohttp.web_exceptions import HTTPUnauthorized
import pytest
from homeassistant.auth.providers import trusted_networks
from homeassistant.components.http.auth import async_sign_path, setup_auth
from homeassistant.components.http.const import KEY_AUTHENTICATED
from homeassistant.components.http.forwarded import async_setup_forwarded
from homeassistant.setup import async_setup_component
from . import HTTP_HEADER_HA_AUTH, mock_real_ip
from tests.async_mock import patch
API_PASSWORD = "test-password"
# Don't add 127.0.0.1/::1 as trusted, as it may interfere with other test cases
TRUSTED_NETWORKS = [
ip_network("192.0.2.0/24"),
ip_network("2001:DB8:ABCD::/48"),
ip_network("100.64.0.1"),
ip_network("FD01:DB8::1"),
]
TRUSTED_ADDRESSES = ["100.64.0.1", "192.0.2.100", "FD01:DB8::1", "2001:DB8:ABCD::1"]
UNTRUSTED_ADDRESSES = ["198.51.100.1", "2001:DB8:FA1::1", "127.0.0.1", "::1"]
async def mock_handler(request):
"""Return if request was authenticated."""
if not request[KEY_AUTHENTICATED]:
raise HTTPUnauthorized
user = request.get("hass_user")
user_id = user.id if user else None
return web.json_response(status=200, data={"user_id": user_id})
async def get_legacy_user(auth):
"""Get the user in legacy_api_password auth provider."""
provider = auth.get_auth_provider("legacy_api_password", None)
return await auth.async_get_or_create_user(
await provider.async_get_or_create_credentials({})
)
@pytest.fixture
def app(hass):
"""Fixture to set up a web.Application."""
app = web.Application()
app["hass"] = hass
app.router.add_get("/", mock_handler)
async_setup_forwarded(app, [])
return app
@pytest.fixture
def app2(hass):
"""Fixture to set up a web.Application without real_ip middleware."""
app = web.Application()
app["hass"] = hass
app.router.add_get("/", mock_handler)
return app
@pytest.fixture
def trusted_networks_auth(hass):
"""Load trusted networks auth provider."""
prv = trusted_networks.TrustedNetworksAuthProvider(
hass,
hass.auth._store,
{"type": "trusted_networks", "trusted_networks": TRUSTED_NETWORKS},
)
hass.auth._providers[(prv.type, prv.id)] = prv
return prv
async def test_auth_middleware_loaded_by_default(hass):
"""Test accessing to server from banned IP when feature is off."""
with patch("homeassistant.components.http.setup_auth") as mock_setup:
await async_setup_component(hass, "http", {"http": {}})
assert len(mock_setup.mock_calls) == 1
async def test_cant_access_with_password_in_header(
app, aiohttp_client, legacy_auth, hass
):
"""Test access with password in header."""
setup_auth(hass, app)
client = await aiohttp_client(app)
req = await client.get("/", headers={HTTP_HEADER_HA_AUTH: API_PASSWORD})
assert req.status == 401
req = await client.get("/", headers={HTTP_HEADER_HA_AUTH: "wrong-pass"})
assert req.status == 401
async def test_cant_access_with_password_in_query(
app, aiohttp_client, legacy_auth, hass
):
"""Test access with password in URL."""
setup_auth(hass, app)
client = await aiohttp_client(app)
resp = await client.get("/", params={"api_password": API_PASSWORD})
assert resp.status == 401
resp = await client.get("/")
assert resp.status == 401
resp = await client.get("/", params={"api_password": "wrong-password"})
assert resp.status == 401
async def test_basic_auth_does_not_work(app, aiohttp_client, hass, legacy_auth):
"""Test access with basic authentication."""
setup_auth(hass, app)
client = await aiohttp_client(app)
req = await client.get("/", auth=BasicAuth("homeassistant", API_PASSWORD))
assert req.status == 401
req = await client.get("/", auth=BasicAuth("wrong_username", API_PASSWORD))
assert req.status == 401
req = await client.get("/", auth=BasicAuth("homeassistant", "wrong password"))
assert req.status == 401
req = await client.get("/", headers={"authorization": "NotBasic abcdefg"})
assert req.status == 401
async def test_cannot_access_with_trusted_ip(
hass, app2, trusted_networks_auth, aiohttp_client, hass_owner_user
):
"""Test access with an untrusted ip address."""
setup_auth(hass, app2)
set_mock_ip = mock_real_ip(app2)
client = await aiohttp_client(app2)
for remote_addr in UNTRUSTED_ADDRESSES:
set_mock_ip(remote_addr)
resp = await client.get("/")
assert resp.status == 401, f"{remote_addr} shouldn't be trusted"
for remote_addr in TRUSTED_ADDRESSES:
set_mock_ip(remote_addr)
resp = await client.get("/")
assert resp.status == 401, f"{remote_addr} shouldn't be trusted"
async def test_auth_active_access_with_access_token_in_header(
hass, app, aiohttp_client, hass_access_token
):
"""Test access with access token in header."""
token = hass_access_token
setup_auth(hass, app)
client = await aiohttp_client(app)
refresh_token = await hass.auth.async_validate_access_token(hass_access_token)
req = await client.get("/", headers={"Authorization": f"Bearer {token}"})
assert req.status == 200
assert await req.json() == {"user_id": refresh_token.user.id}
req = await client.get("/", headers={"AUTHORIZATION": f"Bearer {token}"})
assert req.status == 200
assert await req.json() == {"user_id": refresh_token.user.id}
req = await client.get("/", headers={"authorization": f"Bearer {token}"})
assert req.status == 200
assert await req.json() == {"user_id": refresh_token.user.id}
req = await client.get("/", headers={"Authorization": token})
assert req.status == 401
req = await client.get("/", headers={"Authorization": f"BEARER {token}"})
assert req.status == 401
refresh_token = await hass.auth.async_validate_access_token(hass_access_token)
refresh_token.user.is_active = False
req = await client.get("/", headers={"Authorization": f"Bearer {token}"})
assert req.status == 401
async def test_auth_active_access_with_trusted_ip(
hass, app2, trusted_networks_auth, aiohttp_client, hass_owner_user
):
"""Test access with an untrusted ip address."""
setup_auth(hass, app2)
set_mock_ip = mock_real_ip(app2)
client = await aiohttp_client(app2)
for remote_addr in UNTRUSTED_ADDRESSES:
set_mock_ip(remote_addr)
resp = await client.get("/")
assert resp.status == 401, f"{remote_addr} shouldn't be trusted"
for remote_addr in TRUSTED_ADDRESSES:
set_mock_ip(remote_addr)
resp = await client.get("/")
assert resp.status == 401, f"{remote_addr} shouldn't be trusted"
async def test_auth_legacy_support_api_password_cannot_access(
app, aiohttp_client, legacy_auth, hass
):
"""Test access using api_password if auth.support_legacy."""
setup_auth(hass, app)
client = await aiohttp_client(app)
req = await client.get("/", headers={HTTP_HEADER_HA_AUTH: API_PASSWORD})
assert req.status == 401
resp = await client.get("/", params={"api_password": API_PASSWORD})
assert resp.status == 401
req = await client.get("/", auth=BasicAuth("homeassistant", API_PASSWORD))
assert req.status == 401
async def test_auth_access_signed_path(hass, app, aiohttp_client, hass_access_token):
"""Test access with signed url."""
app.router.add_post("/", mock_handler)
app.router.add_get("/another_path", mock_handler)
setup_auth(hass, app)
client = await aiohttp_client(app)
refresh_token = await hass.auth.async_validate_access_token(hass_access_token)
signed_path = async_sign_path(hass, refresh_token.id, "/", timedelta(seconds=5))
req = await client.get(signed_path)
assert req.status == 200
data = await req.json()
assert data["user_id"] == refresh_token.user.id
# Use signature on other path
req = await client.get("/another_path?{}".format(signed_path.split("?")[1]))
assert req.status == 401
# We only allow GET
req = await client.post(signed_path)
assert req.status == 401
# Never valid as expired in the past.
expired_signed_path = async_sign_path(
hass, refresh_token.id, "/", timedelta(seconds=-5)
)
req = await client.get(expired_signed_path)
assert req.status == 401
# refresh token gone should also invalidate signature
await hass.auth.async_remove_refresh_token(refresh_token)
req = await client.get(signed_path)
assert req.status == 401
|
import pytest
import voluptuous as vol
from homeassistant.components.homeassistant import scene as ha_scene
from homeassistant.components.homeassistant.scene import EVENT_SCENE_RELOADED
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
from tests.common import async_mock_service
async def test_reload_config_service(hass):
"""Test the reload config service."""
assert await async_setup_component(hass, "scene", {})
test_reloaded_event = []
hass.bus.async_listen(
EVENT_SCENE_RELOADED, lambda event: test_reloaded_event.append(event)
)
with patch(
"homeassistant.config.load_yaml_config_file",
autospec=True,
return_value={"scene": {"name": "Hallo", "entities": {"light.kitchen": "on"}}},
):
await hass.services.async_call("scene", "reload", blocking=True)
await hass.async_block_till_done()
assert hass.states.get("scene.hallo") is not None
assert len(test_reloaded_event) == 1
with patch(
"homeassistant.config.load_yaml_config_file",
autospec=True,
return_value={"scene": {"name": "Bye", "entities": {"light.kitchen": "on"}}},
):
await hass.services.async_call("scene", "reload", blocking=True)
await hass.async_block_till_done()
assert len(test_reloaded_event) == 2
assert hass.states.get("scene.hallo") is None
assert hass.states.get("scene.bye") is not None
async def test_apply_service(hass):
"""Test the apply service."""
assert await async_setup_component(hass, "scene", {})
assert await async_setup_component(hass, "light", {"light": {"platform": "demo"}})
await hass.async_block_till_done()
assert await hass.services.async_call(
"scene", "apply", {"entities": {"light.bed_light": "off"}}, blocking=True
)
assert hass.states.get("light.bed_light").state == "off"
assert await hass.services.async_call(
"scene",
"apply",
{"entities": {"light.bed_light": {"state": "on", "brightness": 50}}},
blocking=True,
)
state = hass.states.get("light.bed_light")
assert state.state == "on"
assert state.attributes["brightness"] == 50
turn_on_calls = async_mock_service(hass, "light", "turn_on")
assert await hass.services.async_call(
"scene",
"apply",
{
"transition": 42,
"entities": {"light.bed_light": {"state": "on", "brightness": 50}},
},
blocking=True,
)
assert len(turn_on_calls) == 1
assert turn_on_calls[0].domain == "light"
assert turn_on_calls[0].service == "turn_on"
assert turn_on_calls[0].data.get("transition") == 42
assert turn_on_calls[0].data.get("entity_id") == "light.bed_light"
assert turn_on_calls[0].data.get("brightness") == 50
async def test_create_service(hass, caplog):
"""Test the create service."""
assert await async_setup_component(
hass,
"scene",
{"scene": {"name": "hallo_2", "entities": {"light.kitchen": "on"}}},
)
await hass.async_block_till_done()
assert hass.states.get("scene.hallo") is None
assert hass.states.get("scene.hallo_2") is not None
assert await hass.services.async_call(
"scene",
"create",
{"scene_id": "hallo", "entities": {}, "snapshot_entities": []},
blocking=True,
)
await hass.async_block_till_done()
assert "Empty scenes are not allowed" in caplog.text
assert hass.states.get("scene.hallo") is None
assert await hass.services.async_call(
"scene",
"create",
{
"scene_id": "hallo",
"entities": {"light.bed_light": {"state": "on", "brightness": 50}},
},
blocking=True,
)
await hass.async_block_till_done()
scene = hass.states.get("scene.hallo")
assert scene is not None
assert scene.domain == "scene"
assert scene.name == "hallo"
assert scene.state == "scening"
assert scene.attributes.get("entity_id") == ["light.bed_light"]
assert await hass.services.async_call(
"scene",
"create",
{
"scene_id": "hallo",
"entities": {"light.kitchen_light": {"state": "on", "brightness": 100}},
},
blocking=True,
)
await hass.async_block_till_done()
scene = hass.states.get("scene.hallo")
assert scene is not None
assert scene.domain == "scene"
assert scene.name == "hallo"
assert scene.state == "scening"
assert scene.attributes.get("entity_id") == ["light.kitchen_light"]
assert await hass.services.async_call(
"scene",
"create",
{
"scene_id": "hallo_2",
"entities": {"light.bed_light": {"state": "on", "brightness": 50}},
},
blocking=True,
)
await hass.async_block_till_done()
assert "The scene scene.hallo_2 already exists" in caplog.text
scene = hass.states.get("scene.hallo_2")
assert scene is not None
assert scene.domain == "scene"
assert scene.name == "hallo_2"
assert scene.state == "scening"
assert scene.attributes.get("entity_id") == ["light.kitchen"]
async def test_snapshot_service(hass, caplog):
"""Test the snapshot option."""
assert await async_setup_component(hass, "scene", {"scene": {}})
await hass.async_block_till_done()
hass.states.async_set("light.my_light", "on", {"hs_color": (345, 75)})
assert hass.states.get("scene.hallo") is None
assert await hass.services.async_call(
"scene",
"create",
{"scene_id": "hallo", "snapshot_entities": ["light.my_light"]},
blocking=True,
)
await hass.async_block_till_done()
scene = hass.states.get("scene.hallo")
assert scene is not None
assert scene.attributes.get("entity_id") == ["light.my_light"]
hass.states.async_set("light.my_light", "off", {"hs_color": (123, 45)})
turn_on_calls = async_mock_service(hass, "light", "turn_on")
assert await hass.services.async_call(
"scene", "turn_on", {"entity_id": "scene.hallo"}, blocking=True
)
await hass.async_block_till_done()
assert len(turn_on_calls) == 1
assert turn_on_calls[0].data.get("entity_id") == "light.my_light"
assert turn_on_calls[0].data.get("hs_color") == (345, 75)
assert await hass.services.async_call(
"scene",
"create",
{"scene_id": "hallo_2", "snapshot_entities": ["light.not_existent"]},
blocking=True,
)
await hass.async_block_till_done()
assert hass.states.get("scene.hallo_2") is None
assert (
"Entity light.not_existent does not exist and therefore cannot be snapshotted"
in caplog.text
)
assert await hass.services.async_call(
"scene",
"create",
{
"scene_id": "hallo_3",
"entities": {"light.bed_light": {"state": "on", "brightness": 50}},
"snapshot_entities": ["light.my_light"],
},
blocking=True,
)
await hass.async_block_till_done()
scene = hass.states.get("scene.hallo_3")
assert scene is not None
assert "light.my_light" in scene.attributes.get("entity_id")
assert "light.bed_light" in scene.attributes.get("entity_id")
async def test_ensure_no_intersection(hass):
"""Test that entities and snapshot_entities do not overlap."""
assert await async_setup_component(hass, "scene", {"scene": {}})
await hass.async_block_till_done()
with pytest.raises(vol.MultipleInvalid) as ex:
assert await hass.services.async_call(
"scene",
"create",
{
"scene_id": "hallo",
"entities": {"light.my_light": {"state": "on", "brightness": 50}},
"snapshot_entities": ["light.my_light"],
},
blocking=True,
)
await hass.async_block_till_done()
assert "entities and snapshot_entities must not overlap" in str(ex.value)
assert hass.states.get("scene.hallo") is None
async def test_scenes_with_entity(hass):
"""Test finding scenes with a specific entity."""
assert await async_setup_component(
hass,
"scene",
{
"scene": [
{"name": "scene_1", "entities": {"light.kitchen": "on"}},
{"name": "scene_2", "entities": {"light.living_room": "off"}},
{
"name": "scene_3",
"entities": {"light.kitchen": "on", "light.living_room": "off"},
},
]
},
)
await hass.async_block_till_done()
assert sorted(ha_scene.scenes_with_entity(hass, "light.kitchen")) == [
"scene.scene_1",
"scene.scene_3",
]
async def test_entities_in_scene(hass):
"""Test finding entities in a scene."""
assert await async_setup_component(
hass,
"scene",
{
"scene": [
{"name": "scene_1", "entities": {"light.kitchen": "on"}},
{"name": "scene_2", "entities": {"light.living_room": "off"}},
{
"name": "scene_3",
"entities": {"light.kitchen": "on", "light.living_room": "off"},
},
]
},
)
await hass.async_block_till_done()
for scene_id, entities in (
("scene.scene_1", ["light.kitchen"]),
("scene.scene_2", ["light.living_room"]),
("scene.scene_3", ["light.kitchen", "light.living_room"]),
):
assert ha_scene.entities_in_scene(hass, scene_id) == entities
async def test_config(hass):
"""Test passing config in YAML."""
assert await async_setup_component(
hass,
"scene",
{
"scene": [
{
"id": "scene_id",
"name": "Scene Icon",
"icon": "mdi:party",
"entities": {"light.kitchen": "on"},
},
{
"name": "Scene No Icon",
"entities": {"light.kitchen": {"state": "on"}},
},
]
},
)
await hass.async_block_till_done()
icon = hass.states.get("scene.scene_icon")
assert icon is not None
assert icon.attributes["icon"] == "mdi:party"
no_icon = hass.states.get("scene.scene_no_icon")
assert no_icon is not None
assert "icon" not in no_icon.attributes
def test_validator():
"""Test validators."""
parsed = ha_scene.STATES_SCHEMA({"light.Test": {"state": "on"}})
assert len(parsed) == 1
assert "light.test" in parsed
assert parsed["light.test"].entity_id == "light.test"
assert parsed["light.test"].state == "on"
|
from django.utils.cache import add_never_cache_headers
from rest_framework import status, viewsets
from rest_framework.decorators import action
from rest_framework.response import Response
from shop.models.cart import CartModel, CartItemModel
from shop.serializers.cart import CartSerializer, CartItemSerializer, WatchSerializer, WatchItemSerializer, CartItems
class BaseViewSet(viewsets.ModelViewSet):
pagination_class = None
with_items = CartItems.arranged
def get_queryset(self):
try:
cart = CartModel.objects.get_from_request(self.request)
if self.kwargs.get(self.lookup_field):
# we're interest only into a certain cart item
return CartItemModel.objects.filter(cart=cart)
return cart
except CartModel.DoesNotExist:
return CartModel()
def list(self, request, *args, **kwargs):
cart = self.get_queryset()
context = self.get_serializer_context()
serializer = self.serializer_class(cart, context=context, label=self.serializer_label,
with_items=self.with_items)
return Response(serializer.data)
def create(self, request, *args, **kwargs):
"""
Create a new item in the cart.
"""
context = self.get_serializer_context()
item_serializer = self.item_serializer_class(context=context, data=request.data, label=self.serializer_label)
item_serializer.is_valid(raise_exception=True)
self.perform_create(item_serializer)
headers = self.get_success_headers(item_serializer.data)
return Response(item_serializer.data, status=status.HTTP_201_CREATED, headers=headers)
def update(self, request, *args, **kwargs):
"""
Handle changing the amount of the cart item referred by its primary key.
"""
cart_item = self.get_object()
context = self.get_serializer_context()
item_serializer = self.item_serializer_class(
cart_item,
context=context,
data=request.data,
label=self.serializer_label,
)
item_serializer.is_valid(raise_exception=True)
self.perform_update(item_serializer)
cart_serializer = CartSerializer(cart_item.cart, context=context, label='cart')
response_data = {
'cart': cart_serializer.data,
'cart_item': item_serializer.data,
}
return Response(data=response_data)
def destroy(self, request, *args, **kwargs):
"""
Delete a cart item referred by its primary key.
"""
cart_item = self.get_object()
context = self.get_serializer_context()
cart_serializer = CartSerializer(cart_item.cart, context=context, label=self.serializer_label)
self.perform_destroy(cart_item)
response_data = {
'cart_item': None,
'cart': cart_serializer.data,
}
return Response(data=response_data)
def finalize_response(self, request, response, *args, **kwargs):
"""Set HTTP headers to not cache this view"""
if self.action != 'render_product_summary':
add_never_cache_headers(response)
return super().finalize_response(request, response, *args, **kwargs)
class CartViewSet(BaseViewSet):
serializer_label = 'cart'
serializer_class = CartSerializer
item_serializer_class = CartItemSerializer
@action(detail=True, methods=['get'])
def fetch(self, request):
cart = self.get_queryset()
context = self.get_serializer_context()
serializer = self.serializer_class(cart, context=context, with_items=CartItems.without)
return Response(serializer.data)
@action(detail=False, methods=['get'], url_path='fetch-dropdown')
def fetch_dropdown(self, request):
cart = self.get_queryset()
context = self.get_serializer_context()
serializer = self.serializer_class(cart, context=context, label='dropdown', with_items=CartItems.unsorted)
return Response(serializer.data)
class WatchViewSet(BaseViewSet):
serializer_label = 'watch'
serializer_class = WatchSerializer
item_serializer_class = WatchItemSerializer
|
import socket
from urllib.parse import urlparse
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components.ssdp import (
ATTR_SSDP_LOCATION,
ATTR_UPNP_MANUFACTURER,
ATTR_UPNP_MODEL_NAME,
ATTR_UPNP_UDN,
)
from homeassistant.const import (
CONF_HOST,
CONF_ID,
CONF_IP_ADDRESS,
CONF_METHOD,
CONF_NAME,
CONF_PORT,
CONF_TOKEN,
)
# pylint:disable=unused-import
from .bridge import SamsungTVBridge
from .const import (
CONF_MANUFACTURER,
CONF_MODEL,
DOMAIN,
LOGGER,
METHOD_LEGACY,
METHOD_WEBSOCKET,
RESULT_AUTH_MISSING,
RESULT_CANNOT_CONNECT,
RESULT_SUCCESS,
)
DATA_SCHEMA = vol.Schema({vol.Required(CONF_HOST): str, vol.Required(CONF_NAME): str})
SUPPORTED_METHODS = [METHOD_LEGACY, METHOD_WEBSOCKET]
def _get_ip(host):
if host is None:
return None
return socket.gethostbyname(host)
class SamsungTVConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a Samsung TV config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
# pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167
def __init__(self):
"""Initialize flow."""
self._host = None
self._ip = None
self._manufacturer = None
self._model = None
self._name = None
self._title = None
self._id = None
self._bridge = None
def _get_entry(self):
data = {
CONF_HOST: self._host,
CONF_ID: self._id,
CONF_IP_ADDRESS: self._ip,
CONF_MANUFACTURER: self._manufacturer,
CONF_METHOD: self._bridge.method,
CONF_MODEL: self._model,
CONF_NAME: self._name,
CONF_PORT: self._bridge.port,
}
if self._bridge.token:
data[CONF_TOKEN] = self._bridge.token
return self.async_create_entry(
title=self._title,
data=data,
)
def _try_connect(self):
"""Try to connect and check auth."""
for method in SUPPORTED_METHODS:
self._bridge = SamsungTVBridge.get_bridge(method, self._host)
result = self._bridge.try_connect()
if result != RESULT_CANNOT_CONNECT:
return result
LOGGER.debug("No working config found")
return RESULT_CANNOT_CONNECT
async def async_step_import(self, user_input=None):
"""Handle configuration by yaml file."""
return await self.async_step_user(user_input)
async def async_step_user(self, user_input=None):
"""Handle a flow initialized by the user."""
if user_input is not None:
ip_address = await self.hass.async_add_executor_job(
_get_ip, user_input[CONF_HOST]
)
await self.async_set_unique_id(ip_address)
self._abort_if_unique_id_configured()
self._host = user_input.get(CONF_HOST)
self._ip = self.context[CONF_IP_ADDRESS] = ip_address
self._name = user_input.get(CONF_NAME)
self._title = self._name
result = await self.hass.async_add_executor_job(self._try_connect)
if result != RESULT_SUCCESS:
return self.async_abort(reason=result)
return self._get_entry()
return self.async_show_form(step_id="user", data_schema=DATA_SCHEMA)
async def async_step_ssdp(self, discovery_info):
"""Handle a flow initialized by discovery."""
host = urlparse(discovery_info[ATTR_SSDP_LOCATION]).hostname
ip_address = await self.hass.async_add_executor_job(_get_ip, host)
self._host = host
self._ip = self.context[CONF_IP_ADDRESS] = ip_address
self._manufacturer = discovery_info.get(ATTR_UPNP_MANUFACTURER)
self._model = discovery_info.get(ATTR_UPNP_MODEL_NAME)
self._name = f"Samsung {self._model}"
self._id = discovery_info.get(ATTR_UPNP_UDN)
self._title = self._model
# probably access denied
if self._id is None:
return self.async_abort(reason=RESULT_AUTH_MISSING)
if self._id.startswith("uuid:"):
self._id = self._id[5:]
await self.async_set_unique_id(ip_address)
self._abort_if_unique_id_configured(
{
CONF_ID: self._id,
CONF_MANUFACTURER: self._manufacturer,
CONF_MODEL: self._model,
}
)
self.context["title_placeholders"] = {"model": self._model}
return await self.async_step_confirm()
async def async_step_confirm(self, user_input=None):
"""Handle user-confirmation of discovered node."""
if user_input is not None:
result = await self.hass.async_add_executor_job(self._try_connect)
if result != RESULT_SUCCESS:
return self.async_abort(reason=result)
return self._get_entry()
return self.async_show_form(
step_id="confirm", description_placeholders={"model": self._model}
)
async def async_step_reauth(self, user_input=None):
"""Handle configuration by re-auth."""
self._host = user_input[CONF_HOST]
self._id = user_input.get(CONF_ID)
self._ip = user_input[CONF_IP_ADDRESS]
self._manufacturer = user_input.get(CONF_MANUFACTURER)
self._model = user_input.get(CONF_MODEL)
self._name = user_input.get(CONF_NAME)
self._title = self._model or self._name
await self.async_set_unique_id(self._ip)
self.context["title_placeholders"] = {"model": self._title}
return await self.async_step_confirm()
|
import numpy as np
import xarray as xr
class Combine:
"""Benchmark concatenating and merging large datasets"""
def setup(self):
"""Create 4 datasets with two different variables"""
t_size, x_size, y_size = 100, 900, 800
t = np.arange(t_size)
data = np.random.randn(t_size, x_size, y_size)
self.dsA0 = xr.Dataset(
{"A": xr.DataArray(data, coords={"T": t}, dims=("T", "X", "Y"))}
)
self.dsA1 = xr.Dataset(
{"A": xr.DataArray(data, coords={"T": t + t_size}, dims=("T", "X", "Y"))}
)
self.dsB0 = xr.Dataset(
{"B": xr.DataArray(data, coords={"T": t}, dims=("T", "X", "Y"))}
)
self.dsB1 = xr.Dataset(
{"B": xr.DataArray(data, coords={"T": t + t_size}, dims=("T", "X", "Y"))}
)
def time_combine_manual(self):
datasets = [[self.dsA0, self.dsA1], [self.dsB0, self.dsB1]]
xr.combine_manual(datasets, concat_dim=[None, "t"])
def time_auto_combine(self):
"""Also has to load and arrange t coordinate"""
datasets = [self.dsA0, self.dsA1, self.dsB0, self.dsB1]
xr.combine_auto(datasets)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
from absl import app
from absl import flags
from absl import logging
FLAGS = flags.FLAGS
flags.DEFINE_string('echo', None, 'Text to echo.')
def main(argv):
del argv # Unused.
print('Running under Python {0[0]}.{0[1]}.{0[2]}'.format(sys.version_info),
file=sys.stderr)
logging.info('echo is %s.', FLAGS.echo)
if __name__ == '__main__':
app.run(main)
|
from __future__ import division
from math import sin
def quadratic_interpolate(x, y, precision=250, **kwargs):
"""
Interpolate x, y using a quadratic algorithm
https://en.wikipedia.org/wiki/Spline_(mathematics)
"""
n = len(x) - 1
delta_x = [x2 - x1 for x1, x2 in zip(x, x[1:])]
delta_y = [y2 - y1 for y1, y2 in zip(y, y[1:])]
slope = [delta_y[i] / delta_x[i] if delta_x[i] else 1 for i in range(n)]
# Quadratic spline: a + bx + cx²
a = y
b = [0] * (n + 1)
c = [0] * (n + 1)
for i in range(1, n):
b[i] = 2 * slope[i - 1] - b[i - 1]
c = [(slope[i] - b[i]) / delta_x[i] if delta_x[i] else 0 for i in range(n)]
for i in range(n + 1):
yield x[i], a[i]
if i == n or delta_x[i] == 0:
continue
for s in range(1, precision):
X = s * delta_x[i] / precision
X2 = X * X
yield x[i] + X, a[i] + b[i] * X + c[i] * X2
def cubic_interpolate(x, y, precision=250, **kwargs):
"""
Interpolate x, y using a cubic algorithm
https://en.wikipedia.org/wiki/Spline_interpolation
"""
n = len(x) - 1
# Spline equation is a + bx + cx² + dx³
# ie: Spline part i equation is a[i] + b[i]x + c[i]x² + d[i]x³
a = y
b = [0] * (n + 1)
c = [0] * (n + 1)
d = [0] * (n + 1)
m = [0] * (n + 1)
z = [0] * (n + 1)
h = [x2 - x1 for x1, x2 in zip(x, x[1:])]
k = [a2 - a1 for a1, a2 in zip(a, a[1:])]
g = [k[i] / h[i] if h[i] else 1 for i in range(n)]
for i in range(1, n):
j = i - 1
l = 1 / (2 * (x[i + 1] - x[j]) - h[j] * m[j]) if x[i + 1] - x[j] else 0
m[i] = h[i] * l
z[i] = (3 * (g[i] - g[j]) - h[j] * z[j]) * l
for j in reversed(range(n)):
if h[j] == 0:
continue
c[j] = z[j] - (m[j] * c[j + 1])
b[j] = g[j] - (h[j] * (c[j + 1] + 2 * c[j])) / 3
d[j] = (c[j + 1] - c[j]) / (3 * h[j])
for i in range(n + 1):
yield x[i], a[i]
if i == n or h[i] == 0:
continue
for s in range(1, precision):
X = s * h[i] / precision
X2 = X * X
X3 = X2 * X
yield x[i] + X, a[i] + b[i] * X + c[i] * X2 + d[i] * X3
def hermite_interpolate(
x, y, precision=250, type='cardinal', c=None, b=None, t=None
):
"""
Interpolate x, y using the hermite method.
See https://en.wikipedia.org/wiki/Cubic_Hermite_spline
This interpolation is configurable and contain 4 subtypes:
* Catmull Rom
* Finite Difference
* Cardinal
* Kochanek Bartels
The cardinal subtype is customizable with a parameter:
* c: tension (0, 1)
This last type is also customizable using 3 parameters:
* c: continuity (-1, 1)
* b: bias (-1, 1)
* t: tension (-1, 1)
"""
n = len(x) - 1
m = [1] * (n + 1)
w = [1] * (n + 1)
delta_x = [x2 - x1 for x1, x2 in zip(x, x[1:])]
if type == 'catmull_rom':
type = 'cardinal'
c = 0
if type == 'finite_difference':
for i in range(1, n):
m[i] = w[i] = .5 * ((y[i + 1] - y[i]) / (x[i + 1] - x[i]) +
(y[i] - y[i - 1]) / (x[i] - x[i - 1])
) if x[i + 1] - x[i] and x[i] - x[i - 1] else 0
elif type == 'kochanek_bartels':
c = c or 0
b = b or 0
t = t or 0
for i in range(1, n):
m[i] = .5 * ((1 - t) * (1 + b) * (1 + c) * (y[i] - y[i - 1]) +
(1 - t) * (1 - b) * (1 - c) * (y[i + 1] - y[i]))
w[i] = .5 * ((1 - t) * (1 + b) * (1 - c) * (y[i] - y[i - 1]) +
(1 - t) * (1 - b) * (1 + c) * (y[i + 1] - y[i]))
if type == 'cardinal':
c = c or 0
for i in range(1, n):
m[i] = w[i] = (1 - c) * (y[i + 1] - y[i - 1]) / (
x[i + 1] - x[i - 1]
) if x[i + 1] - x[i - 1] else 0
def p(i, x_):
t = (x_ - x[i]) / delta_x[i]
t2 = t * t
t3 = t2 * t
h00 = 2 * t3 - 3 * t2 + 1
h10 = t3 - 2 * t2 + t
h01 = -2 * t3 + 3 * t2
h11 = t3 - t2
return (
h00 * y[i] + h10 * m[i] * delta_x[i] + h01 * y[i + 1] +
h11 * w[i + 1] * delta_x[i]
)
for i in range(n + 1):
yield x[i], y[i]
if i == n or delta_x[i] == 0:
continue
for s in range(1, precision):
X = x[i] + s * delta_x[i] / precision
yield X, p(i, X)
def lagrange_interpolate(x, y, precision=250, **kwargs):
"""
Interpolate x, y using Lagrange polynomials
https://en.wikipedia.org/wiki/Lagrange_polynomial
"""
n = len(x) - 1
delta_x = [x2 - x1 for x1, x2 in zip(x, x[1:])]
for i in range(n + 1):
yield x[i], y[i]
if i == n or delta_x[i] == 0:
continue
for s in range(1, precision):
X = x[i] + s * delta_x[i] / precision
s = 0
for k in range(n + 1):
p = 1
for m in range(n + 1):
if m == k:
continue
if x[k] - x[m]:
p *= (X - x[m]) / (x[k] - x[m])
s += y[k] * p
yield X, s
def trigonometric_interpolate(x, y, precision=250, **kwargs):
"""
Interpolate x, y using trigonometric
As per http://en.wikipedia.org/wiki/Trigonometric_interpolation
"""
n = len(x) - 1
delta_x = [x2 - x1 for x1, x2 in zip(x, x[1:])]
for i in range(n + 1):
yield x[i], y[i]
if i == n or delta_x[i] == 0:
continue
for s in range(1, precision):
X = x[i] + s * delta_x[i] / precision
s = 0
for k in range(n + 1):
p = 1
for m in range(n + 1):
if m == k:
continue
if sin(0.5 * (x[k] - x[m])):
p *= sin(0.5 * (X - x[m])) / sin(0.5 * (x[k] - x[m]))
s += y[k] * p
yield X, s
INTERPOLATIONS = {
'quadratic': quadratic_interpolate,
'cubic': cubic_interpolate,
'hermite': hermite_interpolate,
'lagrange': lagrange_interpolate,
'trigonometric': trigonometric_interpolate
}
if __name__ == '__main__':
from pygal import XY
points = [(.1, 7), (.3, -4), (.6, 10), (.9, 8), (1.4, 3), (1.7, 1)]
xy = XY(show_dots=False)
xy.add('normal', points)
xy.add('quadratic', quadratic_interpolate(*zip(*points)))
xy.add('cubic', cubic_interpolate(*zip(*points)))
xy.add('lagrange', lagrange_interpolate(*zip(*points)))
xy.add('trigonometric', trigonometric_interpolate(*zip(*points)))
xy.add(
'hermite catmul_rom',
hermite_interpolate(*zip(*points), type='catmul_rom')
)
xy.add(
'hermite finite_difference',
hermite_interpolate(*zip(*points), type='finite_difference')
)
xy.add(
'hermite cardinal -.5',
hermite_interpolate(*zip(*points), type='cardinal', c=-.5)
)
xy.add(
'hermite cardinal .5',
hermite_interpolate(*zip(*points), type='cardinal', c=.5)
)
xy.add(
'hermite kochanek_bartels .5 .75 -.25',
hermite_interpolate(
*zip(*points), type='kochanek_bartels', c=.5, b=.75, t=-.25
)
)
xy.add(
'hermite kochanek_bartels .25 -.75 .5',
hermite_interpolate(
*zip(*points), type='kochanek_bartels', c=.25, b=-.75, t=.5
)
)
xy.render_in_browser()
|
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchEntity
from homeassistant.const import CONF_USERNAME, STATE_OFF
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import dispatcher_send
from . import CONF_SERVERS, DATA_UPCLOUD, SIGNAL_UPDATE_UPCLOUD, UpCloudServerEntity
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_SERVERS): vol.All(cv.ensure_list, [cv.string])}
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the UpCloud server switch."""
coordinator = hass.data[DATA_UPCLOUD].coordinators[config_entry.data[CONF_USERNAME]]
entities = [UpCloudSwitch(coordinator, uuid) for uuid in coordinator.data]
async_add_entities(entities, True)
class UpCloudSwitch(UpCloudServerEntity, SwitchEntity):
"""Representation of an UpCloud server switch."""
def turn_on(self, **kwargs):
"""Start the server."""
if self.state == STATE_OFF:
self._server.start()
dispatcher_send(self.hass, SIGNAL_UPDATE_UPCLOUD)
def turn_off(self, **kwargs):
"""Stop the server."""
if self.is_on:
self._server.stop()
|
import pypck
from homeassistant.const import CONF_ADDRESS, CONF_UNIT_OF_MEASUREMENT
from . import LcnDevice
from .const import (
CONF_CONNECTIONS,
CONF_SOURCE,
DATA_LCN,
LED_PORTS,
S0_INPUTS,
SETPOINTS,
THRESHOLDS,
VARIABLES,
)
from .helpers import get_connection
async def async_setup_platform(
hass, hass_config, async_add_entities, discovery_info=None
):
"""Set up the LCN sensor platform."""
if discovery_info is None:
return
devices = []
for config in discovery_info:
address, connection_id = config[CONF_ADDRESS]
addr = pypck.lcn_addr.LcnAddr(*address)
connections = hass.data[DATA_LCN][CONF_CONNECTIONS]
connection = get_connection(connections, connection_id)
address_connection = connection.get_address_conn(addr)
if config[CONF_SOURCE] in VARIABLES + SETPOINTS + THRESHOLDS + S0_INPUTS:
device = LcnVariableSensor(config, address_connection)
else: # in LED_PORTS + LOGICOP_PORTS
device = LcnLedLogicSensor(config, address_connection)
devices.append(device)
async_add_entities(devices)
class LcnVariableSensor(LcnDevice):
"""Representation of a LCN sensor for variables."""
def __init__(self, config, address_connection):
"""Initialize the LCN sensor."""
super().__init__(config, address_connection)
self.variable = pypck.lcn_defs.Var[config[CONF_SOURCE]]
self.unit = pypck.lcn_defs.VarUnit.parse(config[CONF_UNIT_OF_MEASUREMENT])
self._value = None
async def async_added_to_hass(self):
"""Run when entity about to be added to hass."""
await super().async_added_to_hass()
await self.address_connection.activate_status_request_handler(self.variable)
@property
def state(self):
"""Return the state of the entity."""
return self._value
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self.unit.value
def input_received(self, input_obj):
"""Set sensor value when LCN input object (command) is received."""
if (
not isinstance(input_obj, pypck.inputs.ModStatusVar)
or input_obj.get_var() != self.variable
):
return
self._value = input_obj.get_value().to_var_unit(self.unit)
self.async_write_ha_state()
class LcnLedLogicSensor(LcnDevice):
"""Representation of a LCN sensor for leds and logicops."""
def __init__(self, config, address_connection):
"""Initialize the LCN sensor."""
super().__init__(config, address_connection)
if config[CONF_SOURCE] in LED_PORTS:
self.source = pypck.lcn_defs.LedPort[config[CONF_SOURCE]]
else:
self.source = pypck.lcn_defs.LogicOpPort[config[CONF_SOURCE]]
self._value = None
async def async_added_to_hass(self):
"""Run when entity about to be added to hass."""
await super().async_added_to_hass()
await self.address_connection.activate_status_request_handler(self.source)
@property
def state(self):
"""Return the state of the entity."""
return self._value
def input_received(self, input_obj):
"""Set sensor value when LCN input object (command) is received."""
if not isinstance(input_obj, pypck.inputs.ModStatusLedsAndLogicOps):
return
if self.source in pypck.lcn_defs.LedPort:
self._value = input_obj.get_led_state(self.source.value).name.lower()
elif self.source in pypck.lcn_defs.LogicOpPort:
self._value = input_obj.get_logic_op_state(self.source.value).name.lower()
self.async_write_ha_state()
|
from functools import partial
from ...utils import verbose
from ..utils import (has_dataset, _data_path, _get_version, _version_doc,
_data_path_doc_accept)
has_brainstorm_data = partial(has_dataset,
name='brainstorm.bst_phantom_elekta')
_description = u"""
URL: http://neuroimage.usc.edu/brainstorm/Tutorials/PhantomElekta
"""
@verbose
def data_path(path=None, force_update=False, update_path=True, download=True,
*, accept=False, verbose=None): # noqa: D103
return _data_path(path=path, force_update=force_update,
update_path=update_path, name='brainstorm',
download=download,
archive_name='bst_phantom_elekta.tar.gz',
accept=accept)
_data_path_doc = _data_path_doc_accept.format(
name='brainstorm', conf='MNE_DATASETS_BRAINSTORM_DATA_PATH')
_data_path_doc = _data_path_doc.replace('brainstorm dataset',
'brainstorm (bst_phantom_elekta) '
'dataset')
data_path.__doc__ = _data_path_doc
def get_version(): # noqa: D103
return _get_version('brainstorm.bst_phantom_elekta')
get_version.__doc__ = _version_doc.format(name='brainstorm')
def description():
"""Get description of brainstorm (bst_phantom_elekta) dataset."""
for desc in _description.splitlines():
print(desc)
|
from flexx import flx
from flexxamples.demos.drawing import Drawing
from flexxamples.demos.chatroom import ChatRoom
import tornado.web
# Serve some web apps, just for fun
flx.serve(Drawing)
flx.serve(ChatRoom)
class MyAboutHandler(tornado.web.RequestHandler):
def get(self):
self.write('<html>This is just an <i>example</i>.</html>')
class MyAPIHandler(tornado.web.RequestHandler):
def get(self, path):
# self.request.path -> full path
# path -> the regexp group specified in add_handlers
self.write('echo ' + path)
# Get a ref to the tornado.web.Application object
tornado_app = flx.current_server().app
# Add our handler
tornado_app.add_handlers(r".*", [(r"/about", MyAboutHandler),
(r"/api/(.*)", MyAPIHandler)])
# Note: Tornado tries to match handlers in order, but the handlers
# specified in the constructor come last. Therefore we can easily add
# specific handlers here even though Flexx' main handler is very
# generic.
flx.start()
|
from datetime import datetime, time, timedelta
from unittest import mock
from miio import DeviceException
import pytest
from pytz import utc
from homeassistant.components.vacuum import (
ATTR_BATTERY_ICON,
ATTR_FAN_SPEED,
ATTR_FAN_SPEED_LIST,
DOMAIN,
SERVICE_CLEAN_SPOT,
SERVICE_LOCATE,
SERVICE_RETURN_TO_BASE,
SERVICE_SEND_COMMAND,
SERVICE_SET_FAN_SPEED,
SERVICE_START,
SERVICE_STOP,
STATE_CLEANING,
STATE_ERROR,
)
from homeassistant.components.xiaomi_miio.const import DOMAIN as XIAOMI_DOMAIN
from homeassistant.components.xiaomi_miio.vacuum import (
ATTR_CLEANED_AREA,
ATTR_CLEANED_TOTAL_AREA,
ATTR_CLEANING_COUNT,
ATTR_CLEANING_TIME,
ATTR_CLEANING_TOTAL_TIME,
ATTR_DO_NOT_DISTURB,
ATTR_DO_NOT_DISTURB_END,
ATTR_DO_NOT_DISTURB_START,
ATTR_ERROR,
ATTR_FILTER_LEFT,
ATTR_MAIN_BRUSH_LEFT,
ATTR_SIDE_BRUSH_LEFT,
ATTR_TIMERS,
CONF_HOST,
CONF_NAME,
CONF_TOKEN,
SERVICE_CLEAN_SEGMENT,
SERVICE_CLEAN_ZONE,
SERVICE_GOTO,
SERVICE_MOVE_REMOTE_CONTROL,
SERVICE_MOVE_REMOTE_CONTROL_STEP,
SERVICE_START_REMOTE_CONTROL,
SERVICE_STOP_REMOTE_CONTROL,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES,
CONF_PLATFORM,
STATE_OFF,
STATE_ON,
STATE_UNAVAILABLE,
)
from homeassistant.setup import async_setup_component
from tests.async_mock import MagicMock, patch
PLATFORM = "xiaomi_miio"
# calls made when device status is requested
STATUS_CALLS = [
mock.call.status(),
mock.call.consumable_status(),
mock.call.clean_history(),
mock.call.dnd_status(),
mock.call.timer(),
]
@pytest.fixture(name="mock_mirobo_is_got_error")
def mirobo_is_got_error_fixture():
"""Mock mock_mirobo."""
mock_vacuum = MagicMock()
mock_vacuum.status().data = {"test": "raw"}
mock_vacuum.status().is_on = False
mock_vacuum.status().fanspeed = 38
mock_vacuum.status().got_error = True
mock_vacuum.status().error = "Error message"
mock_vacuum.status().battery = 82
mock_vacuum.status().clean_area = 123.43218
mock_vacuum.status().clean_time = timedelta(hours=2, minutes=35, seconds=34)
mock_vacuum.consumable_status().main_brush_left = timedelta(
hours=12, minutes=35, seconds=34
)
mock_vacuum.consumable_status().side_brush_left = timedelta(
hours=12, minutes=35, seconds=34
)
mock_vacuum.consumable_status().filter_left = timedelta(
hours=12, minutes=35, seconds=34
)
mock_vacuum.clean_history().count = "35"
mock_vacuum.clean_history().total_area = 123.43218
mock_vacuum.clean_history().total_duration = timedelta(
hours=11, minutes=35, seconds=34
)
mock_vacuum.status().state = "Test Xiaomi Charging"
mock_vacuum.dnd_status().enabled = True
mock_vacuum.dnd_status().start = time(hour=22, minute=0)
mock_vacuum.dnd_status().end = time(hour=6, minute=0)
mock_timer_1 = MagicMock()
mock_timer_1.enabled = True
mock_timer_1.cron = "5 5 1 8 1"
mock_timer_1.next_schedule = datetime(2020, 5, 23, 13, 21, 10, tzinfo=utc)
mock_timer_2 = MagicMock()
mock_timer_2.enabled = False
mock_timer_2.cron = "5 5 1 8 2"
mock_timer_2.next_schedule = datetime(2020, 5, 23, 13, 21, 10, tzinfo=utc)
mock_vacuum.timer.return_value = [mock_timer_1, mock_timer_2]
with patch("homeassistant.components.xiaomi_miio.vacuum.Vacuum") as mock_vaccum_cls:
mock_vaccum_cls.return_value = mock_vacuum
yield mock_vacuum
old_fanspeeds = {
"Silent": 38,
"Standard": 60,
"Medium": 77,
"Turbo": 90,
}
new_fanspeeds = {
"Silent": 101,
"Standard": 102,
"Medium": 103,
"Turbo": 104,
"Gentle": 105,
}
@pytest.fixture(name="mock_mirobo_fanspeeds", params=[old_fanspeeds, new_fanspeeds])
def mirobo_old_speeds_fixture(request):
"""Fixture for testing both types of fanspeeds."""
mock_vacuum = MagicMock()
mock_vacuum.status().battery = 32
mock_vacuum.fan_speed_presets.return_value = request.param
mock_vacuum.status().fanspeed = list(request.param.values())[0]
with patch("homeassistant.components.xiaomi_miio.vacuum.Vacuum") as mock_vaccum_cls:
mock_vaccum_cls.return_value = mock_vacuum
yield mock_vacuum
@pytest.fixture(name="mock_mirobo_is_on")
def mirobo_is_on_fixture():
"""Mock mock_mirobo."""
mock_vacuum = MagicMock()
mock_vacuum.status().data = {"test": "raw"}
mock_vacuum.status().is_on = True
mock_vacuum.status().fanspeed = 99
mock_vacuum.status().got_error = False
mock_vacuum.status().battery = 32
mock_vacuum.status().clean_area = 133.43218
mock_vacuum.status().clean_time = timedelta(hours=2, minutes=55, seconds=34)
mock_vacuum.consumable_status().main_brush_left = timedelta(
hours=11, minutes=35, seconds=34
)
mock_vacuum.consumable_status().side_brush_left = timedelta(
hours=11, minutes=35, seconds=34
)
mock_vacuum.consumable_status().filter_left = timedelta(
hours=11, minutes=35, seconds=34
)
mock_vacuum.clean_history().count = "41"
mock_vacuum.clean_history().total_area = 323.43218
mock_vacuum.clean_history().total_duration = timedelta(
hours=11, minutes=15, seconds=34
)
mock_vacuum.status().state = "Test Xiaomi Cleaning"
mock_vacuum.status().state_code = 5
mock_vacuum.dnd_status().enabled = False
mock_timer_1 = MagicMock()
mock_timer_1.enabled = True
mock_timer_1.cron = "5 5 1 8 1"
mock_timer_1.next_schedule = datetime(2020, 5, 23, 13, 21, 10, tzinfo=utc)
mock_timer_2 = MagicMock()
mock_timer_2.enabled = False
mock_timer_2.cron = "5 5 1 8 2"
mock_timer_2.next_schedule = datetime(2020, 5, 23, 13, 21, 10, tzinfo=utc)
mock_vacuum.timer.return_value = [mock_timer_1, mock_timer_2]
with patch("homeassistant.components.xiaomi_miio.vacuum.Vacuum") as mock_vaccum_cls:
mock_vaccum_cls.return_value = mock_vacuum
yield mock_vacuum
async def test_xiaomi_exceptions(hass, caplog, mock_mirobo_is_on):
"""Test error logging on exceptions."""
entity_name = "test_vacuum_cleaner_error"
entity_id = await setup_component(hass, entity_name)
def is_available():
state = hass.states.get(entity_id)
return state.state != STATE_UNAVAILABLE
# The initial setup has to be done successfully
assert "Initializing with host 192.168.1.100 (token 12345...)" in caplog.text
assert "WARNING" not in caplog.text
assert is_available()
# Second update causes an exception, which should be logged
mock_mirobo_is_on.status.side_effect = DeviceException("dummy exception")
await hass.helpers.entity_component.async_update_entity(entity_id)
assert "WARNING" in caplog.text
assert "Got exception while fetching the state" in caplog.text
assert not is_available()
# Third update does not get logged as the device is already unavailable,
# so we clear the log and reset the status to test that
caplog.clear()
mock_mirobo_is_on.status.reset_mock()
await hass.helpers.entity_component.async_update_entity(entity_id)
assert "Got exception while fetching the state" not in caplog.text
assert not is_available()
assert mock_mirobo_is_on.status.call_count == 1
async def test_xiaomi_vacuum_services(hass, caplog, mock_mirobo_is_got_error):
"""Test vacuum supported features."""
entity_name = "test_vacuum_cleaner_1"
entity_id = await setup_component(hass, entity_name)
assert "Initializing with host 192.168.1.100 (token 12345...)" in caplog.text
# Check state attributes
state = hass.states.get(entity_id)
assert state.state == STATE_ERROR
assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 14204
assert state.attributes.get(ATTR_DO_NOT_DISTURB) == STATE_ON
assert state.attributes.get(ATTR_DO_NOT_DISTURB_START) == "22:00:00"
assert state.attributes.get(ATTR_DO_NOT_DISTURB_END) == "06:00:00"
assert state.attributes.get(ATTR_ERROR) == "Error message"
assert state.attributes.get(ATTR_BATTERY_ICON) == "mdi:battery-80"
assert state.attributes.get(ATTR_CLEANING_TIME) == 155
assert state.attributes.get(ATTR_CLEANED_AREA) == 123
assert state.attributes.get(ATTR_MAIN_BRUSH_LEFT) == 12
assert state.attributes.get(ATTR_SIDE_BRUSH_LEFT) == 12
assert state.attributes.get(ATTR_FILTER_LEFT) == 12
assert state.attributes.get(ATTR_CLEANING_COUNT) == 35
assert state.attributes.get(ATTR_CLEANED_TOTAL_AREA) == 123
assert state.attributes.get(ATTR_CLEANING_TOTAL_TIME) == 695
assert state.attributes.get(ATTR_TIMERS) == [
{
"enabled": True,
"cron": "5 5 1 8 1",
"next_schedule": datetime(2020, 5, 23, 13, 21, 10, tzinfo=utc),
},
{
"enabled": False,
"cron": "5 5 1 8 2",
"next_schedule": datetime(2020, 5, 23, 13, 21, 10, tzinfo=utc),
},
]
# Call services
await hass.services.async_call(
DOMAIN, SERVICE_START, {"entity_id": entity_id}, blocking=True
)
mock_mirobo_is_got_error.assert_has_calls(
[mock.call.resume_or_start()], any_order=True
)
mock_mirobo_is_got_error.assert_has_calls(STATUS_CALLS, any_order=True)
mock_mirobo_is_got_error.reset_mock()
await hass.services.async_call(
DOMAIN, SERVICE_STOP, {"entity_id": entity_id}, blocking=True
)
mock_mirobo_is_got_error.assert_has_calls([mock.call.stop()], any_order=True)
mock_mirobo_is_got_error.assert_has_calls(STATUS_CALLS, any_order=True)
mock_mirobo_is_got_error.reset_mock()
await hass.services.async_call(
DOMAIN, SERVICE_RETURN_TO_BASE, {"entity_id": entity_id}, blocking=True
)
mock_mirobo_is_got_error.assert_has_calls([mock.call.home()], any_order=True)
mock_mirobo_is_got_error.assert_has_calls(STATUS_CALLS, any_order=True)
mock_mirobo_is_got_error.reset_mock()
await hass.services.async_call(
DOMAIN, SERVICE_LOCATE, {"entity_id": entity_id}, blocking=True
)
mock_mirobo_is_got_error.assert_has_calls([mock.call.find()], any_order=True)
mock_mirobo_is_got_error.assert_has_calls(STATUS_CALLS, any_order=True)
mock_mirobo_is_got_error.reset_mock()
await hass.services.async_call(
DOMAIN, SERVICE_CLEAN_SPOT, {"entity_id": entity_id}, blocking=True
)
mock_mirobo_is_got_error.assert_has_calls([mock.call.spot()], any_order=True)
mock_mirobo_is_got_error.assert_has_calls(STATUS_CALLS, any_order=True)
mock_mirobo_is_got_error.reset_mock()
await hass.services.async_call(
DOMAIN,
SERVICE_SEND_COMMAND,
{"entity_id": entity_id, "command": "raw"},
blocking=True,
)
mock_mirobo_is_got_error.assert_has_calls(
[mock.call.raw_command("raw", None)], any_order=True
)
mock_mirobo_is_got_error.assert_has_calls(STATUS_CALLS, any_order=True)
mock_mirobo_is_got_error.reset_mock()
await hass.services.async_call(
DOMAIN,
SERVICE_SEND_COMMAND,
{"entity_id": entity_id, "command": "raw", "params": {"k1": 2}},
blocking=True,
)
mock_mirobo_is_got_error.assert_has_calls(
[mock.call.raw_command("raw", {"k1": 2})], any_order=True
)
mock_mirobo_is_got_error.assert_has_calls(STATUS_CALLS, any_order=True)
mock_mirobo_is_got_error.reset_mock()
async def test_xiaomi_specific_services(hass, caplog, mock_mirobo_is_on):
"""Test vacuum supported features."""
entity_name = "test_vacuum_cleaner_2"
entity_id = await setup_component(hass, entity_name)
assert "Initializing with host 192.168.1.100 (token 12345" in caplog.text
# Check state attributes
state = hass.states.get(entity_id)
assert state.state == STATE_CLEANING
assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 14204
assert state.attributes.get(ATTR_DO_NOT_DISTURB) == STATE_OFF
assert state.attributes.get(ATTR_ERROR) is None
assert state.attributes.get(ATTR_BATTERY_ICON) == "mdi:battery-30"
assert state.attributes.get(ATTR_CLEANING_TIME) == 175
assert state.attributes.get(ATTR_CLEANED_AREA) == 133
assert state.attributes.get(ATTR_MAIN_BRUSH_LEFT) == 11
assert state.attributes.get(ATTR_SIDE_BRUSH_LEFT) == 11
assert state.attributes.get(ATTR_FILTER_LEFT) == 11
assert state.attributes.get(ATTR_CLEANING_COUNT) == 41
assert state.attributes.get(ATTR_CLEANED_TOTAL_AREA) == 323
assert state.attributes.get(ATTR_CLEANING_TOTAL_TIME) == 675
assert state.attributes.get(ATTR_TIMERS) == [
{
"enabled": True,
"cron": "5 5 1 8 1",
"next_schedule": datetime(2020, 5, 23, 13, 21, 10, tzinfo=utc),
},
{
"enabled": False,
"cron": "5 5 1 8 2",
"next_schedule": datetime(2020, 5, 23, 13, 21, 10, tzinfo=utc),
},
]
# Xiaomi vacuum specific services:
await hass.services.async_call(
XIAOMI_DOMAIN,
SERVICE_START_REMOTE_CONTROL,
{ATTR_ENTITY_ID: entity_id},
blocking=True,
)
mock_mirobo_is_on.assert_has_calls([mock.call.manual_start()], any_order=True)
mock_mirobo_is_on.assert_has_calls(STATUS_CALLS, any_order=True)
mock_mirobo_is_on.reset_mock()
control = {"duration": 1000, "rotation": -40, "velocity": -0.1}
await hass.services.async_call(
XIAOMI_DOMAIN,
SERVICE_MOVE_REMOTE_CONTROL,
{**control, ATTR_ENTITY_ID: entity_id},
blocking=True,
)
mock_mirobo_is_on.manual_control.assert_has_calls(
[mock.call(**control)], any_order=True
)
mock_mirobo_is_on.assert_has_calls(STATUS_CALLS, any_order=True)
mock_mirobo_is_on.reset_mock()
await hass.services.async_call(
XIAOMI_DOMAIN,
SERVICE_STOP_REMOTE_CONTROL,
{ATTR_ENTITY_ID: entity_id},
blocking=True,
)
mock_mirobo_is_on.assert_has_calls([mock.call.manual_stop()], any_order=True)
mock_mirobo_is_on.assert_has_calls(STATUS_CALLS, any_order=True)
mock_mirobo_is_on.reset_mock()
control_once = {"duration": 2000, "rotation": 120, "velocity": 0.1}
await hass.services.async_call(
XIAOMI_DOMAIN,
SERVICE_MOVE_REMOTE_CONTROL_STEP,
{**control_once, ATTR_ENTITY_ID: entity_id},
blocking=True,
)
mock_mirobo_is_on.manual_control_once.assert_has_calls(
[mock.call(**control_once)], any_order=True
)
mock_mirobo_is_on.assert_has_calls(STATUS_CALLS, any_order=True)
mock_mirobo_is_on.reset_mock()
control = {"zone": [[123, 123, 123, 123]], "repeats": 2}
await hass.services.async_call(
XIAOMI_DOMAIN,
SERVICE_CLEAN_ZONE,
{**control, ATTR_ENTITY_ID: entity_id},
blocking=True,
)
mock_mirobo_is_on.zoned_clean.assert_has_calls(
[mock.call([[123, 123, 123, 123, 2]])], any_order=True
)
mock_mirobo_is_on.assert_has_calls(STATUS_CALLS, any_order=True)
mock_mirobo_is_on.reset_mock()
async def test_xiaomi_vacuum_fanspeeds(hass, caplog, mock_mirobo_fanspeeds):
"""Test Xiaomi vacuum fanspeeds."""
entity_name = "test_vacuum_cleaner_2"
entity_id = await setup_component(hass, entity_name)
assert "Initializing with host 192.168.1.100 (token 12345" in caplog.text
state = hass.states.get(entity_id)
assert state.attributes.get(ATTR_FAN_SPEED) == "Silent"
fanspeeds = state.attributes.get(ATTR_FAN_SPEED_LIST)
for speed in ["Silent", "Standard", "Medium", "Turbo"]:
assert speed in fanspeeds
# Set speed service:
await hass.services.async_call(
DOMAIN,
SERVICE_SET_FAN_SPEED,
{"entity_id": entity_id, "fan_speed": 60},
blocking=True,
)
mock_mirobo_fanspeeds.assert_has_calls(
[mock.call.set_fan_speed(60)], any_order=True
)
mock_mirobo_fanspeeds.assert_has_calls(STATUS_CALLS, any_order=True)
mock_mirobo_fanspeeds.reset_mock()
fan_speed_dict = mock_mirobo_fanspeeds.fan_speed_presets()
await hass.services.async_call(
DOMAIN,
SERVICE_SET_FAN_SPEED,
{"entity_id": entity_id, "fan_speed": "Medium"},
blocking=True,
)
mock_mirobo_fanspeeds.assert_has_calls(
[mock.call.set_fan_speed(fan_speed_dict["Medium"])], any_order=True
)
mock_mirobo_fanspeeds.assert_has_calls(STATUS_CALLS, any_order=True)
mock_mirobo_fanspeeds.reset_mock()
assert "ERROR" not in caplog.text
await hass.services.async_call(
DOMAIN,
SERVICE_SET_FAN_SPEED,
{"entity_id": entity_id, "fan_speed": "invent"},
blocking=True,
)
assert "Fan speed step not recognized" in caplog.text
async def test_xiaomi_vacuum_goto_service(hass, caplog, mock_mirobo_is_on):
"""Test vacuum supported features."""
entity_name = "test_vacuum_cleaner_2"
entity_id = await setup_component(hass, entity_name)
data = {"entity_id": entity_id, "x_coord": 25500, "y_coord": 25500}
await hass.services.async_call(XIAOMI_DOMAIN, SERVICE_GOTO, data, blocking=True)
mock_mirobo_is_on.goto.assert_has_calls(
[mock.call(x_coord=data["x_coord"], y_coord=data["y_coord"])], any_order=True
)
mock_mirobo_is_on.assert_has_calls(STATUS_CALLS, any_order=True)
async def test_xiaomi_vacuum_clean_segment_service(hass, caplog, mock_mirobo_is_on):
"""Test vacuum supported features."""
entity_name = "test_vacuum_cleaner_2"
entity_id = await setup_component(hass, entity_name)
data = {"entity_id": entity_id, "segments": ["1", "2"]}
await hass.services.async_call(
XIAOMI_DOMAIN, SERVICE_CLEAN_SEGMENT, data, blocking=True
)
mock_mirobo_is_on.segment_clean.assert_has_calls(
[mock.call(segments=[int(i) for i in data["segments"]])], any_order=True
)
mock_mirobo_is_on.assert_has_calls(STATUS_CALLS, any_order=True)
async def test_xiaomi_vacuum_clean_segment_service_single_segment(
hass, caplog, mock_mirobo_is_on
):
"""Test vacuum supported features."""
entity_name = "test_vacuum_cleaner_2"
entity_id = await setup_component(hass, entity_name)
data = {"entity_id": entity_id, "segments": 1}
await hass.services.async_call(
XIAOMI_DOMAIN, SERVICE_CLEAN_SEGMENT, data, blocking=True
)
mock_mirobo_is_on.segment_clean.assert_has_calls(
[mock.call(segments=[data["segments"]])], any_order=True
)
mock_mirobo_is_on.assert_has_calls(STATUS_CALLS, any_order=True)
async def setup_component(hass, entity_name):
"""Set up vacuum component."""
entity_id = f"{DOMAIN}.{entity_name}"
await async_setup_component(
hass,
DOMAIN,
{
DOMAIN: {
CONF_PLATFORM: PLATFORM,
CONF_HOST: "192.168.1.100",
CONF_NAME: entity_name,
CONF_TOKEN: "12345678901234567890123456789012",
}
},
)
await hass.async_block_till_done()
return entity_id
|
__docformat__ = "restructuredtext en"
def no_filter(_):
return 1
# Iterators ###################################################################
class FilteredIterator(object):
def __init__(self, node, list_func, filter_func=None):
self._next = [(node, 0)]
if filter_func is None:
filter_func = no_filter
self._list = list_func(node, filter_func)
def __next__(self):
try:
return self._list.pop(0)
except :
return None
next = __next__
# Base Visitor ################################################################
class Visitor(object):
def __init__(self, iterator_class, filter_func=None):
self._iter_class = iterator_class
self.filter = filter_func
def visit(self, node, *args, **kargs):
"""
launch the visit on a given node
call 'open_visit' before the beginning of the visit, with extra args
given
when all nodes have been visited, call the 'close_visit' method
"""
self.open_visit(node, *args, **kargs)
return self.close_visit(self._visit(node))
def _visit(self, node):
iterator = self._get_iterator(node)
n = next(iterator)
while n:
result = n.accept(self)
n = next(iterator)
return result
def _get_iterator(self, node):
return self._iter_class(node, self.filter)
def open_visit(self, *args, **kargs):
"""
method called at the beginning of the visit
"""
pass
def close_visit(self, result):
"""
method called at the end of the visit
"""
return result
# standard visited mixin ######################################################
class VisitedMixIn(object):
"""
Visited interface allow node visitors to use the node
"""
def get_visit_name(self):
"""
return the visit name for the mixed class. When calling 'accept', the
method <'visit_' + name returned by this method> will be called on the
visitor
"""
try:
return self.TYPE.replace('-', '_')
except:
return self.__class__.__name__.lower()
def accept(self, visitor, *args, **kwargs):
func = getattr(visitor, 'visit_%s' % self.get_visit_name())
return func(self, *args, **kwargs)
def leave(self, visitor, *args, **kwargs):
func = getattr(visitor, 'leave_%s' % self.get_visit_name())
return func(self, *args, **kwargs)
|
import pytest
import homeassistant.components.automation as automation
from homeassistant.components.cover import DOMAIN
from homeassistant.const import CONF_PLATFORM
from homeassistant.helpers import device_registry
from homeassistant.setup import async_setup_component
from tests.common import (
MockConfigEntry,
assert_lists_same,
async_get_device_automation_capabilities,
async_get_device_automations,
async_mock_service,
mock_device_registry,
mock_registry,
)
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
async def test_get_actions(hass, device_reg, entity_reg):
"""Test we get the expected actions from a cover."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
ent = platform.ENTITIES[0]
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(
DOMAIN, "test", ent.unique_id, device_id=device_entry.id
)
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
expected_actions = [
{
"domain": DOMAIN,
"type": "open",
"device_id": device_entry.id,
"entity_id": ent.entity_id,
},
{
"domain": DOMAIN,
"type": "close",
"device_id": device_entry.id,
"entity_id": ent.entity_id,
},
{
"domain": DOMAIN,
"type": "stop",
"device_id": device_entry.id,
"entity_id": ent.entity_id,
},
]
actions = await async_get_device_automations(hass, "action", device_entry.id)
assert_lists_same(actions, expected_actions)
async def test_get_actions_tilt(hass, device_reg, entity_reg):
"""Test we get the expected actions from a cover."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
ent = platform.ENTITIES[3]
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(
DOMAIN, "test", ent.unique_id, device_id=device_entry.id
)
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
expected_actions = [
{
"domain": DOMAIN,
"type": "open",
"device_id": device_entry.id,
"entity_id": ent.entity_id,
},
{
"domain": DOMAIN,
"type": "close",
"device_id": device_entry.id,
"entity_id": ent.entity_id,
},
{
"domain": DOMAIN,
"type": "stop",
"device_id": device_entry.id,
"entity_id": ent.entity_id,
},
{
"domain": DOMAIN,
"type": "open_tilt",
"device_id": device_entry.id,
"entity_id": ent.entity_id,
},
{
"domain": DOMAIN,
"type": "close_tilt",
"device_id": device_entry.id,
"entity_id": ent.entity_id,
},
]
actions = await async_get_device_automations(hass, "action", device_entry.id)
assert_lists_same(actions, expected_actions)
async def test_get_actions_set_pos(hass, device_reg, entity_reg):
"""Test we get the expected actions from a cover."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
ent = platform.ENTITIES[1]
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(
DOMAIN, "test", ent.unique_id, device_id=device_entry.id
)
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
expected_actions = [
{
"domain": DOMAIN,
"type": "set_position",
"device_id": device_entry.id,
"entity_id": ent.entity_id,
},
]
actions = await async_get_device_automations(hass, "action", device_entry.id)
assert_lists_same(actions, expected_actions)
async def test_get_actions_set_tilt_pos(hass, device_reg, entity_reg):
"""Test we get the expected actions from a cover."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
ent = platform.ENTITIES[2]
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(
DOMAIN, "test", ent.unique_id, device_id=device_entry.id
)
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
expected_actions = [
{
"domain": DOMAIN,
"type": "open",
"device_id": device_entry.id,
"entity_id": ent.entity_id,
},
{
"domain": DOMAIN,
"type": "close",
"device_id": device_entry.id,
"entity_id": ent.entity_id,
},
{
"domain": DOMAIN,
"type": "stop",
"device_id": device_entry.id,
"entity_id": ent.entity_id,
},
{
"domain": DOMAIN,
"type": "set_tilt_position",
"device_id": device_entry.id,
"entity_id": ent.entity_id,
},
]
actions = await async_get_device_automations(hass, "action", device_entry.id)
assert_lists_same(actions, expected_actions)
async def test_get_action_capabilities(hass, device_reg, entity_reg):
"""Test we get the expected capabilities from a cover action."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
ent = platform.ENTITIES[0]
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(
DOMAIN, "test", ent.unique_id, device_id=device_entry.id
)
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
actions = await async_get_device_automations(hass, "action", device_entry.id)
assert len(actions) == 3 # open, close, stop
for action in actions:
capabilities = await async_get_device_automation_capabilities(
hass, "action", action
)
assert capabilities == {"extra_fields": []}
async def test_get_action_capabilities_set_pos(hass, device_reg, entity_reg):
"""Test we get the expected capabilities from a cover action."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
ent = platform.ENTITIES[1]
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(
DOMAIN, "test", ent.unique_id, device_id=device_entry.id
)
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
expected_capabilities = {
"extra_fields": [
{
"name": "position",
"optional": True,
"type": "integer",
"default": 0,
"valueMax": 100,
"valueMin": 0,
}
]
}
actions = await async_get_device_automations(hass, "action", device_entry.id)
assert len(actions) == 1 # set_position
for action in actions:
capabilities = await async_get_device_automation_capabilities(
hass, "action", action
)
if action["type"] == "set_position":
assert capabilities == expected_capabilities
else:
assert capabilities == {"extra_fields": []}
async def test_get_action_capabilities_set_tilt_pos(hass, device_reg, entity_reg):
"""Test we get the expected capabilities from a cover action."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
ent = platform.ENTITIES[2]
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(
DOMAIN, "test", ent.unique_id, device_id=device_entry.id
)
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
expected_capabilities = {
"extra_fields": [
{
"name": "position",
"optional": True,
"type": "integer",
"default": 0,
"valueMax": 100,
"valueMin": 0,
}
]
}
actions = await async_get_device_automations(hass, "action", device_entry.id)
assert len(actions) == 4 # open, close, stop, set_tilt_position
for action in actions:
capabilities = await async_get_device_automation_capabilities(
hass, "action", action
)
if action["type"] == "set_tilt_position":
assert capabilities == expected_capabilities
else:
assert capabilities == {"extra_fields": []}
async def test_action(hass):
"""Test for cover actions."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {"platform": "event", "event_type": "test_event_open"},
"action": {
"domain": DOMAIN,
"device_id": "abcdefgh",
"entity_id": "cover.entity",
"type": "open",
},
},
{
"trigger": {"platform": "event", "event_type": "test_event_close"},
"action": {
"domain": DOMAIN,
"device_id": "abcdefgh",
"entity_id": "cover.entity",
"type": "close",
},
},
{
"trigger": {"platform": "event", "event_type": "test_event_stop"},
"action": {
"domain": DOMAIN,
"device_id": "abcdefgh",
"entity_id": "cover.entity",
"type": "stop",
},
},
]
},
)
await hass.async_block_till_done()
open_calls = async_mock_service(hass, "cover", "open_cover")
close_calls = async_mock_service(hass, "cover", "close_cover")
stop_calls = async_mock_service(hass, "cover", "stop_cover")
hass.bus.async_fire("test_event_open")
await hass.async_block_till_done()
assert len(open_calls) == 1
assert len(close_calls) == 0
assert len(stop_calls) == 0
hass.bus.async_fire("test_event_close")
await hass.async_block_till_done()
assert len(open_calls) == 1
assert len(close_calls) == 1
assert len(stop_calls) == 0
hass.bus.async_fire("test_event_stop")
await hass.async_block_till_done()
assert len(open_calls) == 1
assert len(close_calls) == 1
assert len(stop_calls) == 1
async def test_action_tilt(hass):
"""Test for cover tilt actions."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {"platform": "event", "event_type": "test_event_open"},
"action": {
"domain": DOMAIN,
"device_id": "abcdefgh",
"entity_id": "cover.entity",
"type": "open_tilt",
},
},
{
"trigger": {"platform": "event", "event_type": "test_event_close"},
"action": {
"domain": DOMAIN,
"device_id": "abcdefgh",
"entity_id": "cover.entity",
"type": "close_tilt",
},
},
]
},
)
await hass.async_block_till_done()
open_calls = async_mock_service(hass, "cover", "open_cover_tilt")
close_calls = async_mock_service(hass, "cover", "close_cover_tilt")
hass.bus.async_fire("test_event_open")
await hass.async_block_till_done()
assert len(open_calls) == 1
assert len(close_calls) == 0
hass.bus.async_fire("test_event_close")
await hass.async_block_till_done()
assert len(open_calls) == 1
assert len(close_calls) == 1
hass.bus.async_fire("test_event_stop")
await hass.async_block_till_done()
assert len(open_calls) == 1
assert len(close_calls) == 1
async def test_action_set_position(hass):
"""Test for cover set position actions."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {
"platform": "event",
"event_type": "test_event_set_pos",
},
"action": {
"domain": DOMAIN,
"device_id": "abcdefgh",
"entity_id": "cover.entity",
"type": "set_position",
"position": 25,
},
},
{
"trigger": {
"platform": "event",
"event_type": "test_event_set_tilt_pos",
},
"action": {
"domain": DOMAIN,
"device_id": "abcdefgh",
"entity_id": "cover.entity",
"type": "set_tilt_position",
"position": 75,
},
},
]
},
)
await hass.async_block_till_done()
cover_pos_calls = async_mock_service(hass, "cover", "set_cover_position")
tilt_pos_calls = async_mock_service(hass, "cover", "set_cover_tilt_position")
hass.bus.async_fire("test_event_set_pos")
await hass.async_block_till_done()
assert len(cover_pos_calls) == 1
assert cover_pos_calls[0].data["position"] == 25
assert len(tilt_pos_calls) == 0
hass.bus.async_fire("test_event_set_tilt_pos")
await hass.async_block_till_done()
assert len(cover_pos_calls) == 1
assert len(tilt_pos_calls) == 1
assert tilt_pos_calls[0].data["tilt_position"] == 75
|
import unittest
import docker_registry.run as run
import mock
class TestRunGunicorn(unittest.TestCase):
@mock.patch('argparse.ArgumentParser.parse_args')
@mock.patch('os.execl')
def test_exec_gunicorn(self, mock_execl, mock_parse_args):
run.run_gunicorn()
self.assertEqual(mock_execl.call_count, 1)
# ensure that the executable's path ends with 'gunicorn', so we have
# some confidence that it called the correct executable
self.assertTrue(mock_execl.call_args[0][0].endswith('gunicorn'))
@mock.patch('argparse.ArgumentParser.parse_args')
@mock.patch('os.execl')
def test_parses_args(self, mock_execl, mock_parse_args):
run.run_gunicorn()
# ensure that argument parsing is happening
mock_parse_args.assert_called_once_with()
@mock.patch('sys.exit')
@mock.patch('distutils.spawn.find_executable', autospec=True)
@mock.patch('argparse.ArgumentParser.parse_args')
@mock.patch('os.execl')
def test_gunicorn_not_found(self, mock_execl, mock_parse_args,
mock_find_exec, mock_exit):
mock_find_exec.return_value = None
run.run_gunicorn()
# ensure that sys.exit was called
mock_exit.assert_called_once_with(1)
|
import voluptuous as vol
from homeassistant.components.device_automation import (
TRIGGER_BASE_SCHEMA,
async_get_device_automation_platform,
)
from homeassistant.const import CONF_DOMAIN
# mypy: allow-untyped-defs, no-check-untyped-defs
TRIGGER_SCHEMA = TRIGGER_BASE_SCHEMA.extend({}, extra=vol.ALLOW_EXTRA)
async def async_validate_trigger_config(hass, config):
"""Validate config."""
platform = await async_get_device_automation_platform(
hass, config[CONF_DOMAIN], "trigger"
)
if hasattr(platform, "async_validate_trigger_config"):
return await getattr(platform, "async_validate_trigger_config")(hass, config)
return platform.TRIGGER_SCHEMA(config)
async def async_attach_trigger(hass, config, action, automation_info):
"""Listen for trigger."""
platform = await async_get_device_automation_platform(
hass, config[CONF_DOMAIN], "trigger"
)
return await platform.async_attach_trigger(hass, config, action, automation_info)
|
from homeassistant.components.scene import DOMAIN
from homeassistant.const import ATTR_ENTITY_ID, ENTITY_MATCH_ALL, SERVICE_TURN_ON
from homeassistant.loader import bind_hass
@bind_hass
def activate(hass, entity_id=ENTITY_MATCH_ALL):
"""Activate a scene."""
data = {}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_TURN_ON, data)
|
import os
from typing import Any
from typing import Dict
from typing import List
from typing import Mapping
from typing import Optional
from service_configuration_lib import read_service_configuration
class BaseSecretProvider:
def __init__(
self,
soa_dir: Optional[str],
service_name: Optional[str],
cluster_names: List[str],
**kwargs: Any,
) -> None:
self.soa_dir = soa_dir
self.cluster_names = cluster_names
self.service_name = service_name
if service_name:
self.secret_dir = os.path.join(self.soa_dir, self.service_name, "secrets")
service_config = read_service_configuration(self.service_name, self.soa_dir)
self.encryption_key = service_config.get("encryption_key", "paasta")
def decrypt_environment(
self, environment: Dict[str, str], **kwargs: Any
) -> Dict[str, str]:
raise NotImplementedError
def write_secret(
self,
action: str,
secret_name: str,
plaintext: bytes,
cross_environment_motivation: Optional[str] = None,
) -> None:
raise NotImplementedError
def decrypt_secret(self, secret_name: str) -> str:
raise NotImplementedError
def decrypt_secret_raw(self, secret_name: str) -> bytes:
raise NotImplementedError
def get_secret_signature_from_data(self, data: Mapping[str, Any]) -> Optional[str]:
raise NotImplementedError
def renew_issue_cert(self, pki_backend: str, ttl: str) -> None:
raise NotImplementedError
class SecretProvider(BaseSecretProvider):
pass
|
from typing import Dict, Optional
from absl import flags
from perfkitbenchmarker import resource
from perfkitbenchmarker.configs import option_decoders
from perfkitbenchmarker.configs import spec
# List of nonrelational database types
DYNAMODB = 'dynamodb'
FLAGS = flags.FLAGS
class BaseNonRelationalDbSpec(spec.BaseSpec):
"""Configurable options of a nonrelational database service."""
# Needed for registering the spec class and its subclasses. See BaseSpec.
SPEC_TYPE = 'BaseNonRelationalDbSpec'
SPEC_ATTRS = ['SERVICE_TYPE']
def __init__(self,
component_full_name: str,
flag_values: Dict[str, flags.FlagValues] = None,
**kwargs):
super().__init__(component_full_name, flag_values=flag_values, **kwargs)
@classmethod
def _GetOptionDecoderConstructions(cls):
"""Gets decoder classes and constructor args for each configurable option.
Returns:
dict. Maps option name string to a (ConfigOptionDecoder class, dict) pair.
The pair specifies a decoder class and its __init__() keyword arguments
to construct in order to decode the named option.
"""
result = super()._GetOptionDecoderConstructions()
result.update({
'service_type': (
option_decoders.EnumDecoder,
{
'default':
None,
'valid_values': [
DYNAMODB,
],
}),
})
return result
class BaseNonRelationalDb(resource.BaseResource):
"""Object representing a nonrelational database."""
REQUIRED_ATTRS = ['SERVICE_TYPE']
RESOURCE_TYPE = 'BaseNonRelationalDb'
SERVICE_TYPE = 'Base'
def GetNonRelationalDbSpecClass(
service_type: str) -> Optional[spec.BaseSpecMetaClass]:
"""Gets the non-relational db spec class corresponding to 'service_type'."""
return spec.GetSpecClass(BaseNonRelationalDbSpec, SERVICE_TYPE=service_type)
def GetNonRelationalDbClass(
service_type: str) -> Optional[resource.AutoRegisterResourceMeta]:
"""Gets the non-relational database class corresponding to 'service_type'."""
return resource.GetResourceClass(BaseNonRelationalDb,
SERVICE_TYPE=service_type)
|
revision = "ce547319f7be"
down_revision = "5bc47fa7cac4"
import sqlalchemy as sa
from alembic import op
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
TABLE = "certificate_notification_associations"
def upgrade():
print("Adding id column")
op.add_column(
TABLE, sa.Column("id", sa.Integer, primary_key=True, autoincrement=True)
)
db.session.commit()
db.session.flush()
def downgrade():
op.drop_column(TABLE, "id")
db.session.commit()
db.session.flush()
|
from __future__ import print_function
import hashlib
import sys
import simplejson as json
from docker_registry.core import exceptions
import docker_registry.storage as storage
store = storage.load()
images_cache = {}
ancestry_cache = {}
dry_run = True
def warning(msg):
print('# Warning: ' + msg, file=sys.stderr)
def get_image_parent(image_id):
if image_id in images_cache:
return images_cache[image_id]
image_json = store.image_json_path(image_id)
parent_id = None
try:
# Note(dmp): unicode patch
info = store.get_json(image_json)
if info['id'] != image_id:
warning('image_id != json image_id for image_id: ' + image_id)
parent_id = info.get('parent')
except exceptions.FileNotFoundError:
warning('graph is broken for image_id: {0}'.format(image_id))
images_cache[image_id] = parent_id
return parent_id
def create_image_ancestry(image_id):
global ancestry_cache
if image_id in ancestry_cache:
# We already generated the ancestry for that one
return
ancestry = [image_id]
parent_id = image_id
while True:
parent_id = get_image_parent(parent_id)
if not parent_id:
break
ancestry.append(parent_id)
create_image_ancestry(parent_id)
ancestry_path = store.image_ancestry_path(image_id)
if dry_run is False:
if not store.exists(ancestry_path):
store.put_content(ancestry_path, json.dumps(ancestry))
ancestry_cache[image_id] = True
print('Generated ancestry (size: {0}) '
'for image_id: {1}'.format(len(ancestry), image_id))
def resolve_all_tags():
for namespace in store.list_directory(store.repositories):
for repos in store.list_directory(namespace):
try:
for tag in store.list_directory(repos):
fname = tag.split('/').pop()
if not fname.startswith('tag_'):
continue
yield store.get_content(tag)
except exceptions.FileNotFoundError:
pass
def compute_image_checksum(image_id, json_data):
layer_path = store.image_layer_path(image_id)
if not store.exists(layer_path):
warning('{0} is broken (no layer)'.format(image_id))
return
print('Writing checksum for {0}'.format(image_id))
if dry_run:
return
h = hashlib.sha256(json_data + '\n')
for buf in store.stream_read(layer_path):
h.update(buf)
checksum = 'sha256:{0}'.format(h.hexdigest())
checksum_path = store.image_checksum_path(image_id)
store.put_content(checksum_path, checksum)
def load_image_json(image_id):
try:
json_path = store.image_json_path(image_id)
json_data = store.get_content(json_path)
# Note(dmp): unicode patch
info = json.loads(json_data.decode('utf8'))
if image_id != info['id']:
warning('{0} is broken (json\'s id mismatch)'.format(image_id))
return
return json_data
except (IOError, exceptions.FileNotFoundError, json.JSONDecodeError):
warning('{0} is broken (invalid json)'.format(image_id))
def compute_missing_checksums():
for image in store.list_directory(store.images):
image_id = image.split('/').pop()
if image_id not in ancestry_cache:
warning('{0} is orphan'.format(image_id))
json_data = load_image_json(image_id)
if not json_data:
continue
checksum_path = store.image_checksum_path(image_id)
if store.exists(checksum_path):
# Checksum already there, skipping
continue
compute_image_checksum(image_id, json_data)
if __name__ == '__main__':
if len(sys.argv) > 1 and sys.argv[1] == '--seriously':
dry_run = False
for image_id in resolve_all_tags():
create_image_ancestry(image_id)
compute_missing_checksums()
if dry_run:
print('-------')
print('/!\ No modification has been made (dry-run)')
print('/!\ In order to apply the changes, re-run with:')
print('$ {0} --seriously'.format(sys.argv[0]))
else:
print('# Changes applied.')
|
from datetime import timedelta
import json
import logging
from netdisco.discovery import NetworkDiscovery
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components import zeroconf
from homeassistant.const import EVENT_HOMEASSISTANT_STARTED
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import async_discover, async_load_platform
from homeassistant.helpers.event import async_track_point_in_utc_time
import homeassistant.util.dt as dt_util
DOMAIN = "discovery"
SCAN_INTERVAL = timedelta(seconds=300)
SERVICE_APPLE_TV = "apple_tv"
SERVICE_DAIKIN = "daikin"
SERVICE_DLNA_DMR = "dlna_dmr"
SERVICE_ENIGMA2 = "enigma2"
SERVICE_FREEBOX = "freebox"
SERVICE_HASS_IOS_APP = "hass_ios"
SERVICE_HASSIO = "hassio"
SERVICE_HEOS = "heos"
SERVICE_KONNECTED = "konnected"
SERVICE_MOBILE_APP = "hass_mobile_app"
SERVICE_NETGEAR = "netgear_router"
SERVICE_OCTOPRINT = "octoprint"
SERVICE_SABNZBD = "sabnzbd"
SERVICE_SAMSUNG_PRINTER = "samsung_printer"
SERVICE_TELLDUSLIVE = "tellstick"
SERVICE_YEELIGHT = "yeelight"
SERVICE_WEMO = "belkin_wemo"
SERVICE_WINK = "wink"
SERVICE_XIAOMI_GW = "xiaomi_gw"
CONFIG_ENTRY_HANDLERS = {
SERVICE_DAIKIN: "daikin",
SERVICE_TELLDUSLIVE: "tellduslive",
"logitech_mediaserver": "squeezebox",
}
SERVICE_HANDLERS = {
SERVICE_MOBILE_APP: ("mobile_app", None),
SERVICE_HASS_IOS_APP: ("ios", None),
SERVICE_NETGEAR: ("device_tracker", None),
SERVICE_HASSIO: ("hassio", None),
SERVICE_APPLE_TV: ("apple_tv", None),
SERVICE_ENIGMA2: ("media_player", "enigma2"),
SERVICE_WINK: ("wink", None),
SERVICE_SABNZBD: ("sabnzbd", None),
SERVICE_SAMSUNG_PRINTER: ("sensor", None),
SERVICE_KONNECTED: ("konnected", None),
SERVICE_OCTOPRINT: ("octoprint", None),
SERVICE_FREEBOX: ("freebox", None),
"yamaha": ("media_player", "yamaha"),
"frontier_silicon": ("media_player", "frontier_silicon"),
"openhome": ("media_player", "openhome"),
"bose_soundtouch": ("media_player", "soundtouch"),
"bluesound": ("media_player", "bluesound"),
"lg_smart_device": ("media_player", "lg_soundbar"),
"nanoleaf_aurora": ("light", "nanoleaf"),
}
OPTIONAL_SERVICE_HANDLERS = {SERVICE_DLNA_DMR: ("media_player", "dlna_dmr")}
MIGRATED_SERVICE_HANDLERS = [
"axis",
"deconz",
"denonavr",
"esphome",
"google_cast",
SERVICE_HEOS,
"harmony",
"homekit",
"ikea_tradfri",
"kodi",
"philips_hue",
"sonos",
"songpal",
SERVICE_WEMO,
SERVICE_XIAOMI_GW,
"volumio",
SERVICE_YEELIGHT,
]
DEFAULT_ENABLED = (
list(CONFIG_ENTRY_HANDLERS) + list(SERVICE_HANDLERS) + MIGRATED_SERVICE_HANDLERS
)
DEFAULT_DISABLED = list(OPTIONAL_SERVICE_HANDLERS) + MIGRATED_SERVICE_HANDLERS
CONF_IGNORE = "ignore"
CONF_ENABLE = "enable"
CONFIG_SCHEMA = vol.Schema(
{
vol.Optional(DOMAIN): vol.Schema(
{
vol.Optional(CONF_IGNORE, default=[]): vol.All(
cv.ensure_list, [vol.In(DEFAULT_ENABLED)]
),
vol.Optional(CONF_ENABLE, default=[]): vol.All(
cv.ensure_list, [vol.In(DEFAULT_DISABLED + DEFAULT_ENABLED)]
),
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Start a discovery service."""
logger = logging.getLogger(__name__)
netdisco = NetworkDiscovery()
already_discovered = set()
if DOMAIN in config:
# Platforms ignore by config
ignored_platforms = config[DOMAIN][CONF_IGNORE]
# Optional platforms enabled by config
enabled_platforms = config[DOMAIN][CONF_ENABLE]
else:
ignored_platforms = []
enabled_platforms = []
for platform in enabled_platforms:
if platform in DEFAULT_ENABLED:
logger.warning(
"Please remove %s from your discovery.enable configuration "
"as it is now enabled by default",
platform,
)
zeroconf_instance = await zeroconf.async_get_instance(hass)
async def new_service_found(service, info):
"""Handle a new service if one is found."""
if service in MIGRATED_SERVICE_HANDLERS:
return
if service in ignored_platforms:
logger.info("Ignoring service: %s %s", service, info)
return
discovery_hash = json.dumps([service, info], sort_keys=True)
if discovery_hash in already_discovered:
logger.debug("Already discovered service %s %s.", service, info)
return
already_discovered.add(discovery_hash)
if service in CONFIG_ENTRY_HANDLERS:
await hass.config_entries.flow.async_init(
CONFIG_ENTRY_HANDLERS[service],
context={"source": config_entries.SOURCE_DISCOVERY},
data=info,
)
return
comp_plat = SERVICE_HANDLERS.get(service)
if not comp_plat and service in enabled_platforms:
comp_plat = OPTIONAL_SERVICE_HANDLERS[service]
# We do not know how to handle this service.
if not comp_plat:
logger.debug("Unknown service discovered: %s %s", service, info)
return
logger.info("Found new service: %s %s", service, info)
component, platform = comp_plat
if platform is None:
await async_discover(hass, service, info, component, config)
else:
await async_load_platform(hass, component, platform, info, config)
async def scan_devices(now):
"""Scan for devices."""
try:
results = await hass.async_add_executor_job(
_discover, netdisco, zeroconf_instance
)
for result in results:
hass.async_create_task(new_service_found(*result))
except OSError:
logger.error("Network is unreachable")
async_track_point_in_utc_time(
hass, scan_devices, dt_util.utcnow() + SCAN_INTERVAL
)
@callback
def schedule_first(event):
"""Schedule the first discovery when Home Assistant starts up."""
async_track_point_in_utc_time(hass, scan_devices, dt_util.utcnow())
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, schedule_first)
return True
def _discover(netdisco, zeroconf_instance):
"""Discover devices."""
results = []
try:
netdisco.scan(zeroconf_instance=zeroconf_instance)
for disc in netdisco.discover():
for service in netdisco.get_info(disc):
results.append((disc, service))
finally:
netdisco.stop()
return results
|
import os
import re
import sys
from setuptools import setup, find_packages
from distutils.version import StrictVersion
import acdcli
def read(fname: str) -> str:
return open(os.path.join(os.path.dirname(__file__), fname), encoding='utf-8').read()
# replace GitHub external links
repl = ('`([^`]*?) <(docs/)?(.*?)\.rst>`_',
'`\g<1> <https://acd-cli.readthedocs.org/en/latest/\g<3>.html>`_')
version = acdcli.__version__
StrictVersion(version)
requests_py32 = ',<2.11.0' if sys.version_info[0:2] == (3, 2) else ''
dependencies = ['appdirs', 'colorama', 'fusepy', 'python_dateutil',
'requests>=2.1.0,!=2.9.0,!=2.12.0%s' % requests_py32, 'requests_toolbelt!=0.5.0']
doc_dependencies = ['sphinx_paramlinks']
test_dependencies = ['httpretty<0.8.11', 'mock']
if os.environ.get('READTHEDOCS') == 'True':
dependencies = doc_dependencies
setup(
name='acdcli',
version=version,
description='a command line interface and FUSE filesystem for Amazon Cloud Drive',
long_description=re.sub(repl[0], repl[1], read('README.rst')),
license='GPLv2+',
author='yadayada',
author_email='[email protected]',
keywords=['amazon cloud drive', 'clouddrive', 'FUSE'],
url='https://github.com/yadayada/acd_cli',
download_url='https://github.com/yadayada/acd_cli/tarball/' + version,
zip_safe=False,
packages=find_packages(exclude=['tests']),
test_suite='tests.get_suite',
scripts=['acd_cli.py'],
entry_points={'console_scripts': ['acd_cli = acd_cli:main', 'acdcli = acd_cli:main'],
# 'acd_cli.plugins': ['stream = plugins.stream',
# 'template = plugins.template']
},
install_requires=dependencies,
tests_require=test_dependencies,
extras_require={'docs': doc_dependencies},
classifiers=[
'Environment :: Console',
'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3 :: Only',
'Development Status :: 4 - Beta',
'Topic :: System :: Archiving :: Backup',
'Topic :: System :: Filesystems'
]
)
|
revision = "7f71c0cea31a"
down_revision = "29d8c8455c86"
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import text
def upgrade():
conn = op.get_bind()
for name in conn.execute(
text("select name from certificates group by name having count(*) > 1")
):
for idx, id in enumerate(
conn.execute(
text(
"select id from certificates where certificates.name like :name order by id ASC"
).bindparams(name=name[0])
)
):
if not idx:
continue
new_name = name[0] + "-" + str(idx)
stmt = text("update certificates set name=:name where id=:id")
stmt = stmt.bindparams(name=new_name, id=id[0])
op.execute(stmt)
op.create_unique_constraint(None, "certificates", ["name"])
def downgrade():
op.drop_constraint(None, "certificates", type_="unique")
|
import warnings
import chainer
from chainercv.utils import download_model
try:
import cv2 # NOQA
_cv2_available = True
except ImportError:
_cv2_available = False
def prepare_pretrained_model(param, pretrained_model, models, default={}):
"""Select parameters based on the existence of pretrained model.
Args:
param (dict): Map from the name of the parameter to values.
pretrained_model (string): Name of the pretrained weight,
path to the pretrained weight or :obj:`None`.
models (dict): Map from the name of the pretrained weight
to :obj:`model`, which is a dictionary containing the
configuration used by the selected weight.
:obj:`model` has four keys: :obj:`param`, :obj:`overwritable`,
:obj:`url` and :obj:`cv2`.
* **param** (*dict*): Parameters assigned to the pretrained \
weight.
* **overwritable** (*set*): Names of parameters that are \
overwritable (i.e., :obj:`param[key] != model['param'][key]` \
is accepted).
* **url** (*string*): Location of the pretrained weight.
* **cv2** (*bool*): If :obj:`True`, a warning is raised \
if :obj:`cv2` is not installed.
"""
if pretrained_model in models:
model = models[pretrained_model]
model_param = model.get('param', {})
overwritable = model.get('overwritable', set())
for key in param.keys():
if key not in model_param:
continue
if param[key] is None:
param[key] = model_param[key]
else:
if key not in overwritable \
and not param[key] == model_param[key]:
raise ValueError(
'{} must be {}'.format(key, model_param[key]))
path = download_model(model['url'])
if model.get('cv2', False):
if not _cv2_available:
warnings.warn(
'cv2 is not installed on your environment. '
'The pretrained model is trained with cv2. '
'The performace may change with Pillow backend.',
RuntimeWarning)
if chainer.config.cv_resize_backend != 'cv2':
warnings.warn(
'Although the pretrained model is trained using cv2 as '
'the backend of resize function, the current '
'setting does not use cv2 as the backend of resize '
'function. The performance may change due to using '
'different backends. To suppress this warning, set '
'`chainer.config.cv_resize_backend = "cv2".',
RuntimeWarning)
elif pretrained_model:
path = pretrained_model
else:
path = None
for key in param.keys():
if param[key] is None:
if key in default:
param[key] = default[key]
else:
raise ValueError('{} must be specified'.format(key))
return param, path
|
import aprslib
import homeassistant.components.aprs.device_tracker as device_tracker
from homeassistant.const import EVENT_HOMEASSISTANT_START
from tests.async_mock import Mock, patch
from tests.common import get_test_home_assistant
DEFAULT_PORT = 14580
TEST_CALLSIGN = "testcall"
TEST_COORDS_NULL_ISLAND = (0, 0)
TEST_FILTER = "testfilter"
TEST_HOST = "testhost"
TEST_PASSWORD = "testpass"
def test_make_filter():
"""Test filter."""
callsigns = ["CALLSIGN1", "callsign2"]
res = device_tracker.make_filter(callsigns)
assert res == "b/CALLSIGN1 b/CALLSIGN2"
def test_gps_accuracy_0():
"""Test GPS accuracy level 0."""
acc = device_tracker.gps_accuracy(TEST_COORDS_NULL_ISLAND, 0)
assert acc == 0
def test_gps_accuracy_1():
"""Test GPS accuracy level 1."""
acc = device_tracker.gps_accuracy(TEST_COORDS_NULL_ISLAND, 1)
assert acc == 186
def test_gps_accuracy_2():
"""Test GPS accuracy level 2."""
acc = device_tracker.gps_accuracy(TEST_COORDS_NULL_ISLAND, 2)
assert acc == 1855
def test_gps_accuracy_3():
"""Test GPS accuracy level 3."""
acc = device_tracker.gps_accuracy(TEST_COORDS_NULL_ISLAND, 3)
assert acc == 18553
def test_gps_accuracy_4():
"""Test GPS accuracy level 4."""
acc = device_tracker.gps_accuracy(TEST_COORDS_NULL_ISLAND, 4)
assert acc == 111319
def test_gps_accuracy_invalid_int():
"""Test GPS accuracy with invalid input."""
level = 5
try:
device_tracker.gps_accuracy(TEST_COORDS_NULL_ISLAND, level)
assert False, "No exception."
except ValueError:
pass
def test_gps_accuracy_invalid_string():
"""Test GPS accuracy with invalid input."""
level = "not an int"
try:
device_tracker.gps_accuracy(TEST_COORDS_NULL_ISLAND, level)
assert False, "No exception."
except ValueError:
pass
def test_gps_accuracy_invalid_float():
"""Test GPS accuracy with invalid input."""
level = 1.2
try:
device_tracker.gps_accuracy(TEST_COORDS_NULL_ISLAND, level)
assert False, "No exception."
except ValueError:
pass
def test_aprs_listener():
"""Test listener thread."""
with patch("aprslib.IS") as mock_ais:
callsign = TEST_CALLSIGN
password = TEST_PASSWORD
host = TEST_HOST
server_filter = TEST_FILTER
port = DEFAULT_PORT
see = Mock()
listener = device_tracker.AprsListenerThread(
callsign, password, host, server_filter, see
)
listener.run()
assert listener.callsign == callsign
assert listener.host == host
assert listener.server_filter == server_filter
assert listener.see == see
assert listener.start_event.is_set()
assert listener.start_success
assert listener.start_message == "Connected to testhost with callsign testcall."
mock_ais.assert_called_with(callsign, passwd=password, host=host, port=port)
def test_aprs_listener_start_fail():
"""Test listener thread start failure."""
with patch(
"aprslib.IS.connect", side_effect=aprslib.ConnectionError("Unable to connect.")
):
callsign = TEST_CALLSIGN
password = TEST_PASSWORD
host = TEST_HOST
server_filter = TEST_FILTER
see = Mock()
listener = device_tracker.AprsListenerThread(
callsign, password, host, server_filter, see
)
listener.run()
assert listener.callsign == callsign
assert listener.host == host
assert listener.server_filter == server_filter
assert listener.see == see
assert listener.start_event.is_set()
assert not listener.start_success
assert listener.start_message == "Unable to connect."
def test_aprs_listener_stop():
"""Test listener thread stop."""
with patch("aprslib.IS"):
callsign = TEST_CALLSIGN
password = TEST_PASSWORD
host = TEST_HOST
server_filter = TEST_FILTER
see = Mock()
listener = device_tracker.AprsListenerThread(
callsign, password, host, server_filter, see
)
listener.ais.close = Mock()
listener.run()
listener.stop()
assert listener.callsign == callsign
assert listener.host == host
assert listener.server_filter == server_filter
assert listener.see == see
assert listener.start_event.is_set()
assert listener.start_message == "Connected to testhost with callsign testcall."
assert listener.start_success
listener.ais.close.assert_called_with()
def test_aprs_listener_rx_msg():
"""Test rx_msg."""
with patch("aprslib.IS"):
callsign = TEST_CALLSIGN
password = TEST_PASSWORD
host = TEST_HOST
server_filter = TEST_FILTER
see = Mock()
sample_msg = {
device_tracker.ATTR_FORMAT: "uncompressed",
device_tracker.ATTR_FROM: "ZZ0FOOBAR-1",
device_tracker.ATTR_LATITUDE: 0.0,
device_tracker.ATTR_LONGITUDE: 0.0,
device_tracker.ATTR_ALTITUDE: 0,
}
listener = device_tracker.AprsListenerThread(
callsign, password, host, server_filter, see
)
listener.run()
listener.rx_msg(sample_msg)
assert listener.callsign == callsign
assert listener.host == host
assert listener.server_filter == server_filter
assert listener.see == see
assert listener.start_event.is_set()
assert listener.start_success
assert listener.start_message == "Connected to testhost with callsign testcall."
see.assert_called_with(
dev_id=device_tracker.slugify("ZZ0FOOBAR-1"),
gps=(0.0, 0.0),
attributes={"altitude": 0},
)
def test_aprs_listener_rx_msg_ambiguity():
"""Test rx_msg with posambiguity."""
with patch("aprslib.IS"):
callsign = TEST_CALLSIGN
password = TEST_PASSWORD
host = TEST_HOST
server_filter = TEST_FILTER
see = Mock()
sample_msg = {
device_tracker.ATTR_FORMAT: "uncompressed",
device_tracker.ATTR_FROM: "ZZ0FOOBAR-1",
device_tracker.ATTR_LATITUDE: 0.0,
device_tracker.ATTR_LONGITUDE: 0.0,
device_tracker.ATTR_POS_AMBIGUITY: 1,
}
listener = device_tracker.AprsListenerThread(
callsign, password, host, server_filter, see
)
listener.run()
listener.rx_msg(sample_msg)
assert listener.callsign == callsign
assert listener.host == host
assert listener.server_filter == server_filter
assert listener.see == see
assert listener.start_event.is_set()
assert listener.start_success
assert listener.start_message == "Connected to testhost with callsign testcall."
see.assert_called_with(
dev_id=device_tracker.slugify("ZZ0FOOBAR-1"),
gps=(0.0, 0.0),
attributes={device_tracker.ATTR_GPS_ACCURACY: 186},
)
def test_aprs_listener_rx_msg_ambiguity_invalid():
"""Test rx_msg with invalid posambiguity."""
with patch("aprslib.IS"):
callsign = TEST_CALLSIGN
password = TEST_PASSWORD
host = TEST_HOST
server_filter = TEST_FILTER
see = Mock()
sample_msg = {
device_tracker.ATTR_FORMAT: "uncompressed",
device_tracker.ATTR_FROM: "ZZ0FOOBAR-1",
device_tracker.ATTR_LATITUDE: 0.0,
device_tracker.ATTR_LONGITUDE: 0.0,
device_tracker.ATTR_POS_AMBIGUITY: 5,
}
listener = device_tracker.AprsListenerThread(
callsign, password, host, server_filter, see
)
listener.run()
listener.rx_msg(sample_msg)
assert listener.callsign == callsign
assert listener.host == host
assert listener.server_filter == server_filter
assert listener.see == see
assert listener.start_event.is_set()
assert listener.start_success
assert listener.start_message == "Connected to testhost with callsign testcall."
see.assert_called_with(
dev_id=device_tracker.slugify("ZZ0FOOBAR-1"), gps=(0.0, 0.0), attributes={}
)
def test_aprs_listener_rx_msg_no_position():
"""Test rx_msg with non-position report."""
with patch("aprslib.IS"):
callsign = TEST_CALLSIGN
password = TEST_PASSWORD
host = TEST_HOST
server_filter = TEST_FILTER
see = Mock()
sample_msg = {device_tracker.ATTR_FORMAT: "invalid"}
listener = device_tracker.AprsListenerThread(
callsign, password, host, server_filter, see
)
listener.run()
listener.rx_msg(sample_msg)
assert listener.callsign == callsign
assert listener.host == host
assert listener.server_filter == server_filter
assert listener.see == see
assert listener.start_event.is_set()
assert listener.start_success
assert listener.start_message == "Connected to testhost with callsign testcall."
see.assert_not_called()
def test_setup_scanner():
"""Test setup_scanner."""
with patch(
"homeassistant.components.aprs.device_tracker.AprsListenerThread"
) as listener:
hass = get_test_home_assistant()
hass.start()
config = {
"username": TEST_CALLSIGN,
"password": TEST_PASSWORD,
"host": TEST_HOST,
"callsigns": ["XX0FOO*", "YY0BAR-1"],
}
see = Mock()
res = device_tracker.setup_scanner(hass, config, see)
hass.bus.fire(EVENT_HOMEASSISTANT_START)
hass.stop()
assert res
listener.assert_called_with(
TEST_CALLSIGN, TEST_PASSWORD, TEST_HOST, "b/XX0FOO* b/YY0BAR-1", see
)
def test_setup_scanner_timeout():
"""Test setup_scanner failure from timeout."""
hass = get_test_home_assistant()
hass.start()
config = {
"username": TEST_CALLSIGN,
"password": TEST_PASSWORD,
"host": "localhost",
"timeout": 0.01,
"callsigns": ["XX0FOO*", "YY0BAR-1"],
}
see = Mock()
try:
assert not device_tracker.setup_scanner(hass, config, see)
finally:
hass.stop()
|
import itertools
import numpy as np
import pandas as pd
import pytest
import xarray as xr
from xarray.core.missing import (
NumpyInterpolator,
ScipyInterpolator,
SplineInterpolator,
_get_nan_block_lengths,
get_clean_interp_index,
)
from xarray.core.pycompat import dask_array_type
from xarray.tests import (
assert_allclose,
assert_array_equal,
assert_equal,
raises_regex,
requires_bottleneck,
requires_cftime,
requires_dask,
requires_scipy,
)
from xarray.tests.test_cftime_offsets import _CFTIME_CALENDARS
@pytest.fixture
def da():
return xr.DataArray([0, np.nan, 1, 2, np.nan, 3, 4, 5, np.nan, 6, 7], dims="time")
@pytest.fixture
def cf_da():
def _cf_da(calendar, freq="1D"):
times = xr.cftime_range(
start="1970-01-01", freq=freq, periods=10, calendar=calendar
)
values = np.arange(10)
return xr.DataArray(values, dims=("time",), coords={"time": times})
return _cf_da
@pytest.fixture
def ds():
ds = xr.Dataset()
ds["var1"] = xr.DataArray(
[0, np.nan, 1, 2, np.nan, 3, 4, 5, np.nan, 6, 7], dims="time"
)
ds["var2"] = xr.DataArray(
[10, np.nan, 11, 12, np.nan, 13, 14, 15, np.nan, 16, 17], dims="x"
)
return ds
def make_interpolate_example_data(shape, frac_nan, seed=12345, non_uniform=False):
rs = np.random.RandomState(seed)
vals = rs.normal(size=shape)
if frac_nan == 1:
vals[:] = np.nan
elif frac_nan == 0:
pass
else:
n_missing = int(vals.size * frac_nan)
ys = np.arange(shape[0])
xs = np.arange(shape[1])
if n_missing:
np.random.shuffle(ys)
ys = ys[:n_missing]
np.random.shuffle(xs)
xs = xs[:n_missing]
vals[ys, xs] = np.nan
if non_uniform:
# construct a datetime index that has irregular spacing
deltas = pd.TimedeltaIndex(unit="d", data=rs.normal(size=shape[0], scale=10))
coords = {"time": (pd.Timestamp("2000-01-01") + deltas).sort_values()}
else:
coords = {"time": pd.date_range("2000-01-01", freq="D", periods=shape[0])}
da = xr.DataArray(vals, dims=("time", "x"), coords=coords)
df = da.to_pandas()
return da, df
@requires_scipy
def test_interpolate_pd_compat():
shapes = [(8, 8), (1, 20), (20, 1), (100, 100)]
frac_nans = [0, 0.5, 1]
methods = ["linear", "nearest", "zero", "slinear", "quadratic", "cubic"]
for (shape, frac_nan, method) in itertools.product(shapes, frac_nans, methods):
da, df = make_interpolate_example_data(shape, frac_nan)
for dim in ["time", "x"]:
actual = da.interpolate_na(method=method, dim=dim, fill_value=np.nan)
expected = df.interpolate(
method=method, axis=da.get_axis_num(dim), fill_value=(np.nan, np.nan)
)
# Note, Pandas does some odd things with the left/right fill_value
# for the linear methods. This next line inforces the xarray
# fill_value convention on the pandas output. Therefore, this test
# only checks that interpolated values are the same (not nans)
expected.values[pd.isnull(actual.values)] = np.nan
np.testing.assert_allclose(actual.values, expected.values)
@requires_scipy
@pytest.mark.parametrize("method", ["barycentric", "krog", "pchip", "spline", "akima"])
def test_scipy_methods_function(method):
# Note: Pandas does some wacky things with these methods and the full
# integration tests wont work.
da, _ = make_interpolate_example_data((25, 25), 0.4, non_uniform=True)
actual = da.interpolate_na(method=method, dim="time")
assert (da.count("time") <= actual.count("time")).all()
@requires_scipy
def test_interpolate_pd_compat_non_uniform_index():
shapes = [(8, 8), (1, 20), (20, 1), (100, 100)]
frac_nans = [0, 0.5, 1]
methods = ["time", "index", "values"]
for (shape, frac_nan, method) in itertools.product(shapes, frac_nans, methods):
da, df = make_interpolate_example_data(shape, frac_nan, non_uniform=True)
for dim in ["time", "x"]:
if method == "time" and dim != "time":
continue
actual = da.interpolate_na(
method="linear", dim=dim, use_coordinate=True, fill_value=np.nan
)
expected = df.interpolate(
method=method, axis=da.get_axis_num(dim), fill_value=np.nan
)
# Note, Pandas does some odd things with the left/right fill_value
# for the linear methods. This next line inforces the xarray
# fill_value convention on the pandas output. Therefore, this test
# only checks that interpolated values are the same (not nans)
expected.values[pd.isnull(actual.values)] = np.nan
np.testing.assert_allclose(actual.values, expected.values)
@requires_scipy
def test_interpolate_pd_compat_polynomial():
shapes = [(8, 8), (1, 20), (20, 1), (100, 100)]
frac_nans = [0, 0.5, 1]
orders = [1, 2, 3]
for (shape, frac_nan, order) in itertools.product(shapes, frac_nans, orders):
da, df = make_interpolate_example_data(shape, frac_nan)
for dim in ["time", "x"]:
actual = da.interpolate_na(
method="polynomial", order=order, dim=dim, use_coordinate=False
)
expected = df.interpolate(
method="polynomial", order=order, axis=da.get_axis_num(dim)
)
np.testing.assert_allclose(actual.values, expected.values)
@requires_scipy
def test_interpolate_unsorted_index_raises():
vals = np.array([1, 2, 3], dtype=np.float64)
expected = xr.DataArray(vals, dims="x", coords={"x": [2, 1, 3]})
with raises_regex(ValueError, "Index 'x' must be monotonically increasing"):
expected.interpolate_na(dim="x", method="index")
def test_interpolate_no_dim_raises():
da = xr.DataArray(np.array([1, 2, np.nan, 5], dtype=np.float64), dims="x")
with raises_regex(NotImplementedError, "dim is a required argument"):
da.interpolate_na(method="linear")
def test_interpolate_invalid_interpolator_raises():
da = xr.DataArray(np.array([1, 2, np.nan, 5], dtype=np.float64), dims="x")
with raises_regex(ValueError, "not a valid"):
da.interpolate_na(dim="x", method="foo")
def test_interpolate_duplicate_values_raises():
data = np.random.randn(2, 3)
da = xr.DataArray(data, coords=[("x", ["a", "a"]), ("y", [0, 1, 2])])
with raises_regex(ValueError, "Index 'x' has duplicate values"):
da.interpolate_na(dim="x", method="foo")
def test_interpolate_multiindex_raises():
data = np.random.randn(2, 3)
data[1, 1] = np.nan
da = xr.DataArray(data, coords=[("x", ["a", "b"]), ("y", [0, 1, 2])])
das = da.stack(z=("x", "y"))
with raises_regex(TypeError, "Index 'z' must be castable to float64"):
das.interpolate_na(dim="z")
def test_interpolate_2d_coord_raises():
coords = {
"x": xr.Variable(("a", "b"), np.arange(6).reshape(2, 3)),
"y": xr.Variable(("a", "b"), np.arange(6).reshape(2, 3)) * 2,
}
data = np.random.randn(2, 3)
data[1, 1] = np.nan
da = xr.DataArray(data, dims=("a", "b"), coords=coords)
with raises_regex(ValueError, "interpolation must be 1D"):
da.interpolate_na(dim="a", use_coordinate="x")
@requires_scipy
def test_interpolate_kwargs():
da = xr.DataArray(np.array([4, 5, np.nan], dtype=np.float64), dims="x")
expected = xr.DataArray(np.array([4, 5, 6], dtype=np.float64), dims="x")
actual = da.interpolate_na(dim="x", fill_value="extrapolate")
assert_equal(actual, expected)
expected = xr.DataArray(np.array([4, 5, -999], dtype=np.float64), dims="x")
actual = da.interpolate_na(dim="x", fill_value=-999)
assert_equal(actual, expected)
def test_interpolate_keep_attrs():
vals = np.array([1, 2, 3, 4, 5, 6], dtype=np.float64)
mvals = vals.copy()
mvals[2] = np.nan
missing = xr.DataArray(mvals, dims="x")
missing.attrs = {"test": "value"}
actual = missing.interpolate_na(dim="x", keep_attrs=True)
assert actual.attrs == {"test": "value"}
def test_interpolate():
vals = np.array([1, 2, 3, 4, 5, 6], dtype=np.float64)
expected = xr.DataArray(vals, dims="x")
mvals = vals.copy()
mvals[2] = np.nan
missing = xr.DataArray(mvals, dims="x")
actual = missing.interpolate_na(dim="x")
assert_equal(actual, expected)
def test_interpolate_nonans():
vals = np.array([1, 2, 3, 4, 5, 6], dtype=np.float64)
expected = xr.DataArray(vals, dims="x")
actual = expected.interpolate_na(dim="x")
assert_equal(actual, expected)
@requires_scipy
def test_interpolate_allnans():
vals = np.full(6, np.nan, dtype=np.float64)
expected = xr.DataArray(vals, dims="x")
actual = expected.interpolate_na(dim="x")
assert_equal(actual, expected)
@requires_bottleneck
def test_interpolate_limits():
da = xr.DataArray(
np.array([1, 2, np.nan, np.nan, np.nan, 6], dtype=np.float64), dims="x"
)
actual = da.interpolate_na(dim="x", limit=None)
assert actual.isnull().sum() == 0
actual = da.interpolate_na(dim="x", limit=2)
expected = xr.DataArray(
np.array([1, 2, 3, 4, np.nan, 6], dtype=np.float64), dims="x"
)
assert_equal(actual, expected)
@requires_scipy
def test_interpolate_methods():
for method in ["linear", "nearest", "zero", "slinear", "quadratic", "cubic"]:
kwargs = {}
da = xr.DataArray(
np.array([0, 1, 2, np.nan, np.nan, np.nan, 6, 7, 8], dtype=np.float64),
dims="x",
)
actual = da.interpolate_na("x", method=method, **kwargs)
assert actual.isnull().sum() == 0
actual = da.interpolate_na("x", method=method, limit=2, **kwargs)
assert actual.isnull().sum() == 1
@requires_scipy
def test_interpolators():
for method, interpolator in [
("linear", NumpyInterpolator),
("linear", ScipyInterpolator),
("spline", SplineInterpolator),
]:
xi = np.array([-1, 0, 1, 2, 5], dtype=np.float64)
yi = np.array([-10, 0, 10, 20, 50], dtype=np.float64)
x = np.array([3, 4], dtype=np.float64)
f = interpolator(xi, yi, method=method)
out = f(x)
assert pd.isnull(out).sum() == 0
def test_interpolate_use_coordinate():
xc = xr.Variable("x", [100, 200, 300, 400, 500, 600])
da = xr.DataArray(
np.array([1, 2, np.nan, np.nan, np.nan, 6], dtype=np.float64),
dims="x",
coords={"xc": xc},
)
# use_coordinate == False is same as using the default index
actual = da.interpolate_na(dim="x", use_coordinate=False)
expected = da.interpolate_na(dim="x")
assert_equal(actual, expected)
# possible to specify non index coordinate
actual = da.interpolate_na(dim="x", use_coordinate="xc")
expected = da.interpolate_na(dim="x")
assert_equal(actual, expected)
# possible to specify index coordinate by name
actual = da.interpolate_na(dim="x", use_coordinate="x")
expected = da.interpolate_na(dim="x")
assert_equal(actual, expected)
@requires_dask
def test_interpolate_dask():
da, _ = make_interpolate_example_data((40, 40), 0.5)
da = da.chunk({"x": 5})
actual = da.interpolate_na("time")
expected = da.load().interpolate_na("time")
assert isinstance(actual.data, dask_array_type)
assert_equal(actual.compute(), expected)
# with limit
da = da.chunk({"x": 5})
actual = da.interpolate_na("time", limit=3)
expected = da.load().interpolate_na("time", limit=3)
assert isinstance(actual.data, dask_array_type)
assert_equal(actual, expected)
@requires_dask
def test_interpolate_dask_raises_for_invalid_chunk_dim():
da, _ = make_interpolate_example_data((40, 40), 0.5)
da = da.chunk({"time": 5})
# this checks for ValueError in dask.array.apply_gufunc
with raises_regex(ValueError, "consists of multiple chunks"):
da.interpolate_na("time")
@requires_bottleneck
def test_ffill():
da = xr.DataArray(np.array([4, 5, np.nan], dtype=np.float64), dims="x")
expected = xr.DataArray(np.array([4, 5, 5], dtype=np.float64), dims="x")
actual = da.ffill("x")
assert_equal(actual, expected)
@requires_bottleneck
@requires_dask
def test_ffill_dask():
da, _ = make_interpolate_example_data((40, 40), 0.5)
da = da.chunk({"x": 5})
actual = da.ffill("time")
expected = da.load().ffill("time")
assert isinstance(actual.data, dask_array_type)
assert_equal(actual, expected)
# with limit
da = da.chunk({"x": 5})
actual = da.ffill("time", limit=3)
expected = da.load().ffill("time", limit=3)
assert isinstance(actual.data, dask_array_type)
assert_equal(actual, expected)
@requires_bottleneck
@requires_dask
def test_bfill_dask():
da, _ = make_interpolate_example_data((40, 40), 0.5)
da = da.chunk({"x": 5})
actual = da.bfill("time")
expected = da.load().bfill("time")
assert isinstance(actual.data, dask_array_type)
assert_equal(actual, expected)
# with limit
da = da.chunk({"x": 5})
actual = da.bfill("time", limit=3)
expected = da.load().bfill("time", limit=3)
assert isinstance(actual.data, dask_array_type)
assert_equal(actual, expected)
@requires_bottleneck
def test_ffill_bfill_nonans():
vals = np.array([1, 2, 3, 4, 5, 6], dtype=np.float64)
expected = xr.DataArray(vals, dims="x")
actual = expected.ffill(dim="x")
assert_equal(actual, expected)
actual = expected.bfill(dim="x")
assert_equal(actual, expected)
@requires_bottleneck
def test_ffill_bfill_allnans():
vals = np.full(6, np.nan, dtype=np.float64)
expected = xr.DataArray(vals, dims="x")
actual = expected.ffill(dim="x")
assert_equal(actual, expected)
actual = expected.bfill(dim="x")
assert_equal(actual, expected)
@requires_bottleneck
def test_ffill_functions(da):
result = da.ffill("time")
assert result.isnull().sum() == 0
@requires_bottleneck
def test_ffill_limit():
da = xr.DataArray(
[0, np.nan, np.nan, np.nan, np.nan, 3, 4, 5, np.nan, 6, 7], dims="time"
)
result = da.ffill("time")
expected = xr.DataArray([0, 0, 0, 0, 0, 3, 4, 5, 5, 6, 7], dims="time")
assert_array_equal(result, expected)
result = da.ffill("time", limit=1)
expected = xr.DataArray(
[0, 0, np.nan, np.nan, np.nan, 3, 4, 5, 5, 6, 7], dims="time"
)
assert_array_equal(result, expected)
def test_interpolate_dataset(ds):
actual = ds.interpolate_na(dim="time")
# no missing values in var1
assert actual["var1"].count("time") == actual.dims["time"]
# var2 should be the same as it was
assert_array_equal(actual["var2"], ds["var2"])
@requires_bottleneck
def test_ffill_dataset(ds):
ds.ffill(dim="time")
@requires_bottleneck
def test_bfill_dataset(ds):
ds.ffill(dim="time")
@requires_bottleneck
@pytest.mark.parametrize(
"y, lengths",
[
[np.arange(9), [[3, 3, 3, 0, 3, 3, 0, 2, 2]]],
[np.arange(9) * 3, [[9, 9, 9, 0, 9, 9, 0, 6, 6]]],
[[0, 2, 5, 6, 7, 8, 10, 12, 14], [[6, 6, 6, 0, 4, 4, 0, 4, 4]]],
],
)
def test_interpolate_na_nan_block_lengths(y, lengths):
arr = [[np.nan, np.nan, np.nan, 1, np.nan, np.nan, 4, np.nan, np.nan]]
da = xr.DataArray(arr * 2, dims=["x", "y"], coords={"x": [0, 1], "y": y})
index = get_clean_interp_index(da, dim="y", use_coordinate=True)
actual = _get_nan_block_lengths(da, dim="y", index=index)
expected = da.copy(data=lengths * 2)
assert_equal(actual, expected)
@requires_cftime
@pytest.mark.parametrize("calendar", _CFTIME_CALENDARS)
def test_get_clean_interp_index_cf_calendar(cf_da, calendar):
"""The index for CFTimeIndex is in units of days. This means that if two series using a 360 and 365 days
calendar each have a trend of .01C/year, the linear regression coefficients will be different because they
have different number of days.
Another option would be to have an index in units of years, but this would likely create other difficulties.
"""
i = get_clean_interp_index(cf_da(calendar), dim="time")
np.testing.assert_array_equal(i, np.arange(10) * 1e9 * 86400)
@requires_cftime
@pytest.mark.parametrize(
("calendar", "freq"), zip(["gregorian", "proleptic_gregorian"], ["1D", "1M", "1Y"])
)
def test_get_clean_interp_index_dt(cf_da, calendar, freq):
"""In the gregorian case, the index should be proportional to normal datetimes."""
g = cf_da(calendar, freq=freq)
g["stime"] = xr.Variable(data=g.time.to_index().to_datetimeindex(), dims=("time",))
gi = get_clean_interp_index(g, "time")
si = get_clean_interp_index(g, "time", use_coordinate="stime")
np.testing.assert_array_equal(gi, si)
def test_get_clean_interp_index_potential_overflow():
da = xr.DataArray(
[0, 1, 2],
dims=("time",),
coords={"time": xr.cftime_range("0000-01-01", periods=3, calendar="360_day")},
)
get_clean_interp_index(da, "time")
@pytest.mark.parametrize("index", ([0, 2, 1], [0, 1, 1]))
def test_get_clean_interp_index_strict(index):
da = xr.DataArray([0, 1, 2], dims=("x",), coords={"x": index})
with pytest.raises(ValueError):
get_clean_interp_index(da, "x")
clean = get_clean_interp_index(da, "x", strict=False)
np.testing.assert_array_equal(index, clean)
assert clean.dtype == np.float64
@pytest.fixture
def da_time():
return xr.DataArray(
[np.nan, 1, 2, np.nan, np.nan, 5, np.nan, np.nan, np.nan, np.nan, 10],
dims=["t"],
)
def test_interpolate_na_max_gap_errors(da_time):
with raises_regex(
NotImplementedError, "max_gap not implemented for unlabeled coordinates"
):
da_time.interpolate_na("t", max_gap=1)
with raises_regex(ValueError, "max_gap must be a scalar."):
da_time.interpolate_na("t", max_gap=(1,))
da_time["t"] = pd.date_range("2001-01-01", freq="H", periods=11)
with raises_regex(TypeError, "Expected value of type str"):
da_time.interpolate_na("t", max_gap=1)
with raises_regex(TypeError, "Expected integer or floating point"):
da_time.interpolate_na("t", max_gap="1H", use_coordinate=False)
with raises_regex(ValueError, "Could not convert 'huh' to timedelta64"):
da_time.interpolate_na("t", max_gap="huh")
@requires_bottleneck
@pytest.mark.parametrize("time_range_func", [pd.date_range, xr.cftime_range])
@pytest.mark.parametrize("transform", [lambda x: x, lambda x: x.to_dataset(name="a")])
@pytest.mark.parametrize(
"max_gap", ["3H", np.timedelta64(3, "h"), pd.to_timedelta("3H")]
)
def test_interpolate_na_max_gap_time_specifier(
da_time, max_gap, transform, time_range_func
):
da_time["t"] = time_range_func("2001-01-01", freq="H", periods=11)
expected = transform(
da_time.copy(data=[np.nan, 1, 2, 3, 4, 5, np.nan, np.nan, np.nan, np.nan, 10])
)
actual = transform(da_time).interpolate_na("t", max_gap=max_gap)
assert_allclose(actual, expected)
@requires_bottleneck
@pytest.mark.parametrize(
"coords",
[
pytest.param(None, marks=pytest.mark.xfail()),
{"x": np.arange(4), "y": np.arange(11)},
],
)
def test_interpolate_na_2d(coords):
da = xr.DataArray(
[
[1, 2, 3, 4, np.nan, 6, 7, np.nan, np.nan, np.nan, 11],
[1, 2, 3, np.nan, np.nan, 6, 7, np.nan, np.nan, np.nan, 11],
[1, 2, 3, np.nan, np.nan, 6, 7, np.nan, np.nan, np.nan, 11],
[1, 2, 3, 4, np.nan, 6, 7, np.nan, np.nan, np.nan, 11],
],
dims=["x", "y"],
coords=coords,
)
actual = da.interpolate_na("y", max_gap=2)
expected_y = da.copy(
data=[
[1, 2, 3, 4, 5, 6, 7, np.nan, np.nan, np.nan, 11],
[1, 2, 3, np.nan, np.nan, 6, 7, np.nan, np.nan, np.nan, 11],
[1, 2, 3, np.nan, np.nan, 6, 7, np.nan, np.nan, np.nan, 11],
[1, 2, 3, 4, 5, 6, 7, np.nan, np.nan, np.nan, 11],
]
)
assert_equal(actual, expected_y)
actual = da.interpolate_na("x", max_gap=3)
expected_x = xr.DataArray(
[
[1, 2, 3, 4, np.nan, 6, 7, np.nan, np.nan, np.nan, 11],
[1, 2, 3, 4, np.nan, 6, 7, np.nan, np.nan, np.nan, 11],
[1, 2, 3, 4, np.nan, 6, 7, np.nan, np.nan, np.nan, 11],
[1, 2, 3, 4, np.nan, 6, 7, np.nan, np.nan, np.nan, 11],
],
dims=["x", "y"],
coords=coords,
)
assert_equal(actual, expected_x)
|
from io import StringIO
from urllib.error import URLError
from urllib.response import addinfourl
from django.test import TestCase
from zinnia.models.entry import Entry
from zinnia.ping import DirectoryPinger
from zinnia.ping import ExternalUrlsPinger
from zinnia.ping import URLRessources
from zinnia.signals import disconnect_entry_signals
class FakeThread(object):
def start(self):
pass
class DirectoryPingerTestCase(TestCase):
"""Test cases for DirectoryPinger"""
def setUp(self):
disconnect_entry_signals()
params = {'title': 'My entry',
'content': 'My content',
'tags': 'zinnia, test',
'slug': 'my-entry'}
self.entry = Entry.objects.create(**params)
self.original_thread = DirectoryPinger.__bases__
DirectoryPinger.__bases__ = (FakeThread,)
def tearDown(self):
DirectoryPinger.__bases__ = self.original_thread
def test_ping_entry(self):
pinger = DirectoryPinger('http://localhost', [self.entry])
self.assertEqual(
pinger.ping_entry(self.entry),
{'message': 'http://localhost is an invalid directory.',
'flerror': True})
self.assertEqual(pinger.results, [])
def test_run(self):
pinger = DirectoryPinger('http://localhost', [self.entry])
pinger.run()
self.assertEqual(
pinger.results,
[{'flerror': True,
'message': 'http://localhost is an invalid directory.'}])
class ExternalUrlsPingerTestCase(TestCase):
"""Test cases for ExternalUrlsPinger"""
def setUp(self):
disconnect_entry_signals()
params = {'title': 'My entry',
'content': 'My content',
'tags': 'zinnia, test',
'slug': 'my-entry'}
self.entry = Entry.objects.create(**params)
self.original_thread = ExternalUrlsPinger.__bases__
ExternalUrlsPinger.__bases__ = (FakeThread,)
def tearDown(self):
ExternalUrlsPinger.__bases__ = self.original_thread
def test_is_external_url(self):
r = URLRessources()
pinger = ExternalUrlsPinger(self.entry)
self.assertEqual(pinger.is_external_url(
'http://example.com/', 'http://google.com/'), True)
self.assertEqual(pinger.is_external_url(
'http://example.com/toto/', 'http://google.com/titi/'), True)
self.assertEqual(pinger.is_external_url(
'http://example.com/blog/', 'http://example.com/page/'), False)
self.assertEqual(pinger.is_external_url(
'%s/blog/' % r.site_url, r.site_url), False)
self.assertEqual(pinger.is_external_url(
'http://google.com/', r.site_url), True)
self.assertEqual(pinger.is_external_url(
'/blog/', r.site_url), False)
def test_find_external_urls(self):
r = URLRessources()
pinger = ExternalUrlsPinger(self.entry)
external_urls = pinger.find_external_urls(self.entry)
self.assertEqual(external_urls, [])
self.entry.content = """
<p>This is a <a href="http://fantomas.willbreak.it/">link</a>
to a site.</p>
<p>This is a <a href="%s/blog/">link</a> within my site.</p>
<p>This is a <a href="/blog/">relative link</a> within my site.</p>
""" % r.site_url
self.entry.save()
external_urls = pinger.find_external_urls(self.entry)
self.assertEqual(external_urls, ['http://fantomas.willbreak.it/'])
def test_find_pingback_href(self):
pinger = ExternalUrlsPinger(self.entry)
result = pinger.find_pingback_href('')
self.assertEqual(result, None)
result = pinger.find_pingback_href("""
<html><head><link rel="pingback" href="/xmlrpc/" /></head>
<body></body></html>
""")
self.assertEqual(result, '/xmlrpc/')
result = pinger.find_pingback_href("""
<html><head><LINK hrEF="/xmlrpc/" REL="PingBack" /></head>
<body></body></html>
""")
self.assertEqual(result, '/xmlrpc/')
result = pinger.find_pingback_href("""
<html><head><LINK REL="PingBack" /></head><body></body></html>
""")
self.assertEqual(result, None)
def fake_urlopen(self, url):
"""Fake urlopen using test client"""
if 'example' in url:
response = StringIO('')
return addinfourl(response, {'X-Pingback': '/xmlrpc.php',
'Content-Type':
'text/html; charset=utf-8'}, url)
elif 'localhost' in url:
response = StringIO(
'<link rel="pingback" href="/xmlrpc/">')
return addinfourl(response, {'Content-Type':
'application/xhtml+xml'}, url)
elif 'google' in url:
response = StringIO('PNG CONTENT')
return addinfourl(response, {'content-type': 'image/png'}, url)
elif 'error' in url:
raise URLError('Invalid ressource')
def test_pingback_url(self):
pinger = ExternalUrlsPinger(self.entry)
self.assertEqual(
pinger.pingback_url('http://localhost',
'http://error.com'),
'http://error.com cannot be pinged.')
def test_find_pingback_urls(self):
# Set up a stub around urlopen
import zinnia.ping
self.original_urlopen = zinnia.ping.urlopen
zinnia.ping.urlopen = self.fake_urlopen
pinger = ExternalUrlsPinger(self.entry)
urls = ['http://localhost/', 'http://example.com/', 'http://error',
'http://www.google.co.uk/images/nav_logo72.png']
self.assertEqual(
pinger.find_pingback_urls(urls),
{'http://localhost/': 'http://localhost/xmlrpc/',
'http://example.com/': 'http://example.com/xmlrpc.php'})
# Remove stub
zinnia.ping.urlopen = self.original_urlopen
def test_run(self):
import zinnia.ping
self.original_urlopen = zinnia.ping.urlopen
zinnia.ping.urlopen = self.fake_urlopen
self.entry.content = """
<a href="http://localhost/">Localhost</a>
<a href="http://example.com/">Example</a>
<a href="http://error">Error</a>
<a href="http://www.google.co.uk/images/nav_logo72.png">Img</a>
"""
pinger = ExternalUrlsPinger(self.entry)
pinger.run()
self.assertEqual(pinger.results, [
'http://localhost/ cannot be pinged.'])
zinnia.ping.urlopen = self.original_urlopen
|
from os import path
from homeassistant import config as hass_config
import homeassistant.components.demo.notify as demo
from homeassistant.components.group import SERVICE_RELOAD
import homeassistant.components.group.notify as group
import homeassistant.components.notify as notify
from homeassistant.setup import async_setup_component
from tests.async_mock import MagicMock, patch
async def test_send_message_with_data(hass):
"""Test sending a message with to a notify group."""
service1 = demo.DemoNotificationService(hass)
service2 = demo.DemoNotificationService(hass)
service1.send_message = MagicMock(autospec=True)
service2.send_message = MagicMock(autospec=True)
def mock_get_service(hass, config, discovery_info=None):
if config["name"] == "demo1":
return service1
return service2
assert await async_setup_component(
hass,
"group",
{},
)
await hass.async_block_till_done()
with patch.object(demo, "get_service", mock_get_service):
await async_setup_component(
hass,
notify.DOMAIN,
{
"notify": [
{"name": "demo1", "platform": "demo"},
{"name": "demo2", "platform": "demo"},
]
},
)
await hass.async_block_till_done()
service = await group.async_get_service(
hass,
{
"services": [
{"service": "demo1"},
{
"service": "demo2",
"data": {
"target": "unnamed device",
"data": {"test": "message"},
},
},
]
},
)
"""Test sending a message with to a notify group."""
await service.async_send_message(
"Hello", title="Test notification", data={"hello": "world"}
)
await hass.async_block_till_done()
assert service1.send_message.mock_calls[0][1][0] == "Hello"
assert service1.send_message.mock_calls[0][2] == {
"title": "Test notification",
"data": {"hello": "world"},
}
assert service2.send_message.mock_calls[0][1][0] == "Hello"
assert service2.send_message.mock_calls[0][2] == {
"target": ["unnamed device"],
"title": "Test notification",
"data": {"hello": "world", "test": "message"},
}
async def test_reload_notify(hass):
"""Verify we can reload the notify service."""
assert await async_setup_component(
hass,
"group",
{},
)
await hass.async_block_till_done()
assert await async_setup_component(
hass,
notify.DOMAIN,
{
notify.DOMAIN: [
{"name": "demo1", "platform": "demo"},
{"name": "demo2", "platform": "demo"},
{
"name": "group_notify",
"platform": "group",
"services": [{"service": "demo1"}],
},
]
},
)
await hass.async_block_till_done()
assert hass.services.has_service(notify.DOMAIN, "demo1")
assert hass.services.has_service(notify.DOMAIN, "demo2")
assert hass.services.has_service(notify.DOMAIN, "group_notify")
yaml_path = path.join(
_get_fixtures_base_path(),
"fixtures",
"group/configuration.yaml",
)
with patch.object(hass_config, "YAML_CONFIG_FILE", yaml_path):
await hass.services.async_call(
"group",
SERVICE_RELOAD,
{},
blocking=True,
)
await hass.async_block_till_done()
assert hass.services.has_service(notify.DOMAIN, "demo1")
assert hass.services.has_service(notify.DOMAIN, "demo2")
assert not hass.services.has_service(notify.DOMAIN, "group_notify")
assert hass.services.has_service(notify.DOMAIN, "new_group_notify")
def _get_fixtures_base_path():
return path.dirname(path.dirname(path.dirname(__file__)))
|
from collections import deque
import datetime
import email
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from homeassistant.components.imap_email_content import sensor as imap_email_content
from homeassistant.helpers.event import async_track_state_change
from homeassistant.helpers.template import Template
class FakeEMailReader:
"""A test class for sending test emails."""
def __init__(self, messages):
"""Set up the fake email reader."""
self._messages = messages
def connect(self):
"""Stay always Connected."""
return True
def read_next(self):
"""Get the next email."""
if len(self._messages) == 0:
return None
return self._messages.popleft()
async def test_allowed_sender(hass):
"""Test emails from allowed sender."""
test_message = email.message.Message()
test_message["From"] = "[email protected]"
test_message["Subject"] = "Test"
test_message["Date"] = datetime.datetime(2016, 1, 1, 12, 44, 57)
test_message.set_payload("Test Message")
sensor = imap_email_content.EmailContentSensor(
hass,
FakeEMailReader(deque([test_message])),
"test_emails_sensor",
["[email protected]"],
None,
)
sensor.entity_id = "sensor.emailtest"
sensor.async_schedule_update_ha_state(True)
await hass.async_block_till_done()
assert "Test" == sensor.state
assert "Test Message" == sensor.device_state_attributes["body"]
assert "[email protected]" == sensor.device_state_attributes["from"]
assert "Test" == sensor.device_state_attributes["subject"]
assert (
datetime.datetime(2016, 1, 1, 12, 44, 57)
== sensor.device_state_attributes["date"]
)
async def test_multi_part_with_text(hass):
"""Test multi part emails."""
msg = MIMEMultipart("alternative")
msg["Subject"] = "Link"
msg["From"] = "[email protected]"
text = "Test Message"
html = "<html><head></head><body>Test Message</body></html>"
textPart = MIMEText(text, "plain")
htmlPart = MIMEText(html, "html")
msg.attach(textPart)
msg.attach(htmlPart)
sensor = imap_email_content.EmailContentSensor(
hass,
FakeEMailReader(deque([msg])),
"test_emails_sensor",
["[email protected]"],
None,
)
sensor.entity_id = "sensor.emailtest"
sensor.async_schedule_update_ha_state(True)
await hass.async_block_till_done()
assert "Link" == sensor.state
assert "Test Message" == sensor.device_state_attributes["body"]
async def test_multi_part_only_html(hass):
"""Test multi part emails with only HTML."""
msg = MIMEMultipart("alternative")
msg["Subject"] = "Link"
msg["From"] = "[email protected]"
html = "<html><head></head><body>Test Message</body></html>"
htmlPart = MIMEText(html, "html")
msg.attach(htmlPart)
sensor = imap_email_content.EmailContentSensor(
hass,
FakeEMailReader(deque([msg])),
"test_emails_sensor",
["[email protected]"],
None,
)
sensor.entity_id = "sensor.emailtest"
sensor.async_schedule_update_ha_state(True)
await hass.async_block_till_done()
assert "Link" == sensor.state
assert (
"<html><head></head><body>Test Message</body></html>"
== sensor.device_state_attributes["body"]
)
async def test_multi_part_only_other_text(hass):
"""Test multi part emails with only other text."""
msg = MIMEMultipart("alternative")
msg["Subject"] = "Link"
msg["From"] = "[email protected]"
other = "Test Message"
htmlPart = MIMEText(other, "other")
msg.attach(htmlPart)
sensor = imap_email_content.EmailContentSensor(
hass,
FakeEMailReader(deque([msg])),
"test_emails_sensor",
["[email protected]"],
None,
)
sensor.entity_id = "sensor.emailtest"
sensor.async_schedule_update_ha_state(True)
await hass.async_block_till_done()
assert "Link" == sensor.state
assert "Test Message" == sensor.device_state_attributes["body"]
async def test_multiple_emails(hass):
"""Test multiple emails."""
states = []
test_message1 = email.message.Message()
test_message1["From"] = "[email protected]"
test_message1["Subject"] = "Test"
test_message1["Date"] = datetime.datetime(2016, 1, 1, 12, 44, 57)
test_message1.set_payload("Test Message")
test_message2 = email.message.Message()
test_message2["From"] = "[email protected]"
test_message2["Subject"] = "Test 2"
test_message2["Date"] = datetime.datetime(2016, 1, 1, 12, 44, 57)
test_message2.set_payload("Test Message 2")
def state_changed_listener(entity_id, from_s, to_s):
states.append(to_s)
async_track_state_change(hass, ["sensor.emailtest"], state_changed_listener)
sensor = imap_email_content.EmailContentSensor(
hass,
FakeEMailReader(deque([test_message1, test_message2])),
"test_emails_sensor",
["[email protected]"],
None,
)
sensor.entity_id = "sensor.emailtest"
sensor.async_schedule_update_ha_state(True)
await hass.async_block_till_done()
sensor.async_schedule_update_ha_state(True)
await hass.async_block_till_done()
assert "Test" == states[0].state
assert "Test 2" == states[1].state
assert "Test Message 2" == sensor.device_state_attributes["body"]
async def test_sender_not_allowed(hass):
"""Test not whitelisted emails."""
test_message = email.message.Message()
test_message["From"] = "[email protected]"
test_message["Subject"] = "Test"
test_message["Date"] = datetime.datetime(2016, 1, 1, 12, 44, 57)
test_message.set_payload("Test Message")
sensor = imap_email_content.EmailContentSensor(
hass,
FakeEMailReader(deque([test_message])),
"test_emails_sensor",
["[email protected]"],
None,
)
sensor.entity_id = "sensor.emailtest"
sensor.async_schedule_update_ha_state(True)
await hass.async_block_till_done()
assert sensor.state is None
async def test_template(hass):
"""Test value template."""
test_message = email.message.Message()
test_message["From"] = "[email protected]"
test_message["Subject"] = "Test"
test_message["Date"] = datetime.datetime(2016, 1, 1, 12, 44, 57)
test_message.set_payload("Test Message")
sensor = imap_email_content.EmailContentSensor(
hass,
FakeEMailReader(deque([test_message])),
"test_emails_sensor",
["[email protected]"],
Template("{{ subject }} from {{ from }} with message {{ body }}", hass),
)
sensor.entity_id = "sensor.emailtest"
sensor.async_schedule_update_ha_state(True)
await hass.async_block_till_done()
assert "Test from [email protected] with message Test Message" == sensor.state
|
from __future__ import division
import io
import json
import os
from datetime import date, datetime
from math import pi
from numbers import Number
from urllib.parse import quote_plus
from pygal import __version__
from pygal._compat import to_str, u
from pygal.etree import etree
from pygal.util import (
coord_abs_project, coord_diff, coord_dual, coord_format, coord_project,
minify_css, template
)
nearly_2pi = 2 * pi - .00001
class Svg(object):
"""Svg related methods"""
ns = 'http://www.w3.org/2000/svg'
xlink_ns = 'http://www.w3.org/1999/xlink'
def __init__(self, graph):
"""Create the svg helper with the chart instance"""
self.graph = graph
if not graph.no_prefix:
self.id = '#chart-%s ' % graph.uuid
else:
self.id = ''
self.processing_instructions = []
if etree.lxml:
attrs = {'nsmap': {None: self.ns, 'xlink': self.xlink_ns}}
else:
attrs = {'xmlns': self.ns}
if hasattr(etree, 'register_namespace'):
etree.register_namespace('xlink', self.xlink_ns)
else:
etree._namespace_map[self.xlink_ns] = 'xlink'
self.root = etree.Element('svg', **attrs)
self.root.attrib['id'] = self.id.lstrip('#').rstrip()
if graph.classes:
self.root.attrib['class'] = ' '.join(graph.classes)
self.root.append(
etree.Comment(
u(
'Generated with pygal %s (%s) ©Kozea 2012-2016 on %s' % (
__version__, 'lxml' if etree.lxml else 'etree',
date.today().isoformat()
)
)
)
)
self.root.append(etree.Comment(u('http://pygal.org')))
self.root.append(etree.Comment(u('http://github.com/Kozea/pygal')))
self.defs = self.node(tag='defs')
self.title = self.node(tag='title')
self.title.text = graph.title or 'Pygal'
for def_ in self.graph.defs:
self.defs.append(etree.fromstring(def_))
def add_styles(self):
"""Add the css to the svg"""
colors = self.graph.style.get_colors(self.id, self.graph._order)
strokes = self.get_strokes()
all_css = []
auto_css = ['file://base.css']
if self.graph.style._google_fonts:
auto_css.append(
'//fonts.googleapis.com/css?family=%s' %
quote_plus('|'.join(self.graph.style._google_fonts))
)
for css in auto_css + list(self.graph.css):
css_text = None
if css.startswith('inline:'):
css_text = css[len('inline:'):]
elif css.startswith('file://'):
css = css[len('file://'):]
if not os.path.exists(css):
css = os.path.join(os.path.dirname(__file__), 'css', css)
with io.open(css, encoding='utf-8') as f:
css_text = template(
f.read(),
style=self.graph.style,
colors=colors,
strokes=strokes,
id=self.id
)
if css_text is not None:
if not self.graph.pretty_print:
css_text = minify_css(css_text)
all_css.append(css_text)
else:
if css.startswith('//') and self.graph.force_uri_protocol:
css = '%s:%s' % (self.graph.force_uri_protocol, css)
self.processing_instructions.append(
etree.PI(u('xml-stylesheet'), u('href="%s"' % css))
)
self.node(
self.defs, 'style', type='text/css'
).text = '\n'.join(all_css)
def add_scripts(self):
"""Add the js to the svg"""
common_script = self.node(self.defs, 'script', type='text/javascript')
def get_js_dict():
return dict(
(k, getattr(self.graph.state, k))
for k in dir(self.graph.config)
if not k.startswith('_') and hasattr(self.graph.state, k)
and not hasattr(getattr(self.graph.state, k), '__call__')
)
def json_default(o):
if isinstance(o, (datetime, date)):
return o.isoformat()
if hasattr(o, 'to_dict'):
return o.to_dict()
return json.JSONEncoder().default(o)
dct = get_js_dict()
# Config adds
dct['legends'] = [
l.get('title') if isinstance(l, dict) else l
for l in self.graph._legends + self.graph._secondary_legends
]
common_js = 'window.pygal = window.pygal || {};'
common_js += 'window.pygal.config = window.pygal.config || {};'
if self.graph.no_prefix:
common_js += 'window.pygal.config = '
else:
common_js += 'window.pygal.config[%r] = ' % self.graph.uuid
common_script.text = common_js + json.dumps(dct, default=json_default)
for js in self.graph.js:
if js.startswith('file://'):
script = self.node(self.defs, 'script', type='text/javascript')
with io.open(js[len('file://'):], encoding='utf-8') as f:
script.text = f.read()
else:
if js.startswith('//') and self.graph.force_uri_protocol:
js = '%s:%s' % (self.graph.force_uri_protocol, js)
self.node(self.defs, 'script', type='text/javascript', href=js)
def node(self, parent=None, tag='g', attrib=None, **extras):
"""Make a new svg node"""
if parent is None:
parent = self.root
attrib = attrib or {}
attrib.update(extras)
def in_attrib_and_number(key):
return key in attrib and isinstance(attrib[key], Number)
for pos, dim in (('x', 'width'), ('y', 'height')):
if in_attrib_and_number(dim) and attrib[dim] < 0:
attrib[dim] = -attrib[dim]
if in_attrib_and_number(pos):
attrib[pos] = attrib[pos] - attrib[dim]
for key, value in dict(attrib).items():
if value is None:
del attrib[key]
attrib[key] = to_str(value)
if key.endswith('_'):
attrib[key.rstrip('_')] = attrib[key]
del attrib[key]
elif key == 'href':
attrib[etree.QName('http://www.w3.org/1999/xlink',
key)] = attrib[key]
del attrib[key]
return etree.SubElement(parent, tag, attrib)
def transposable_node(self, parent=None, tag='g', attrib=None, **extras):
"""Make a new svg node which can be transposed if horizontal"""
if self.graph.horizontal:
for key1, key2 in (('x', 'y'), ('width', 'height'), ('cx', 'cy')):
attr1 = extras.get(key1, None)
attr2 = extras.get(key2, None)
if attr2:
extras[key1] = attr2
elif attr1:
del extras[key1]
if attr1:
extras[key2] = attr1
elif attr2:
del extras[key2]
return self.node(parent, tag, attrib, **extras)
def serie(self, serie):
"""Make serie node"""
return dict(
plot=self.node(
self.graph.nodes['plot'],
class_='series serie-%d color-%d' % (serie.index, serie.index)
),
overlay=self.node(
self.graph.nodes['overlay'],
class_='series serie-%d color-%d' % (serie.index, serie.index)
),
text_overlay=self.node(
self.graph.nodes['text_overlay'],
class_='series serie-%d color-%d' % (serie.index, serie.index)
)
)
def line(self, node, coords, close=False, **kwargs):
"""Draw a svg line"""
line_len = len(coords)
if len([c for c in coords if c[1] is not None]) < 2:
return
root = 'M%s L%s Z' if close else 'M%s L%s'
origin_index = 0
while origin_index < line_len and None in coords[origin_index]:
origin_index += 1
if origin_index == line_len:
return
if self.graph.horizontal:
coord_format = lambda xy: '%f %f' % (xy[1], xy[0])
else:
coord_format = lambda xy: '%f %f' % xy
origin = coord_format(coords[origin_index])
line = ' '.join([
coord_format(c) for c in coords[origin_index + 1:] if None not in c
])
return self.node(node, 'path', d=root % (origin, line), **kwargs)
def slice(
self, serie_node, node, radius, small_radius, angle, start_angle,
center, val, i, metadata
):
"""Draw a pie slice"""
if angle == 2 * pi:
angle = nearly_2pi
if angle > 0:
to = [
coord_abs_project(center, radius, start_angle),
coord_abs_project(center, radius, start_angle + angle),
coord_abs_project(center, small_radius, start_angle + angle),
coord_abs_project(center, small_radius, start_angle)
]
rv = self.node(
node,
'path',
d='M%s A%s 0 %d 1 %s L%s A%s 0 %d 0 %s z' % (
to[0], coord_dual(radius), int(angle > pi), to[1], to[2],
coord_dual(small_radius), int(angle > pi), to[3]
),
class_='slice reactive tooltip-trigger'
)
else:
rv = None
x, y = coord_diff(
center,
coord_project((radius + small_radius) / 2, start_angle + angle / 2)
)
self.graph._tooltip_data(
node, val, x, y, "centered", self.graph._x_labels
and self.graph._x_labels[i][0]
)
if angle >= 0.3: # 0.3 radians is about 17 degrees
self.graph._static_value(serie_node, val, x, y, metadata)
return rv
def gauge_background(
self, serie_node, start_angle, center, radius, small_radius,
end_angle, half_pie, max_value
):
if end_angle == 2 * pi:
end_angle = nearly_2pi
to_shade = [
coord_abs_project(center, radius, start_angle),
coord_abs_project(center, radius, end_angle),
coord_abs_project(center, small_radius, end_angle),
coord_abs_project(center, small_radius, start_angle)
]
self.node(
serie_node['plot'],
'path',
d='M%s A%s 0 1 1 %s L%s A%s 0 1 0 %s z' % (
to_shade[0], coord_dual(radius), to_shade[1], to_shade[2],
coord_dual(small_radius), to_shade[3]
),
class_='gauge-background reactive'
)
if half_pie:
begin_end = [
coord_diff(
center,
coord_project(
radius - (radius - small_radius) / 2, start_angle
)
),
coord_diff(
center,
coord_project(
radius - (radius - small_radius) / 2, end_angle
)
)
]
pos = 0
for i in begin_end:
self.node(
serie_node['plot'],
'text',
class_='y-{} bound reactive'.format(pos),
x=i[0],
y=i[1] + 10,
attrib={
'text-anchor': 'middle'
}
).text = '{}'.format(0 if pos == 0 else max_value)
pos += 1
else:
middle_radius = .5 * (radius + small_radius)
# Correct text vertical alignment
middle_radius -= .1 * (radius - small_radius)
to_labels = [
coord_abs_project(center, middle_radius, 0),
coord_abs_project(center, middle_radius, nearly_2pi)
]
self.node(
self.defs,
'path',
id='valuePath-%s%s' % center,
d='M%s A%s 0 1 1 %s' %
(to_labels[0], coord_dual(middle_radius), to_labels[1])
)
text_ = self.node(serie_node['text_overlay'], 'text')
self.node(
text_,
'textPath',
class_='max-value reactive',
attrib={
'href': '#valuePath-%s%s' % center,
'startOffset': '99%',
'text-anchor': 'end'
}
).text = max_value
def solid_gauge(
self, serie_node, node, radius, small_radius, angle, start_angle,
center, val, i, metadata, half_pie, end_angle, max_value
):
"""Draw a solid gauge slice and background slice"""
if angle == 2 * pi:
angle = nearly_2pi
if angle > 0:
to = [
coord_abs_project(center, radius, start_angle),
coord_abs_project(center, radius, start_angle + angle),
coord_abs_project(center, small_radius, start_angle + angle),
coord_abs_project(center, small_radius, start_angle)
]
self.node(
node,
'path',
d='M%s A%s 0 %d 1 %s L%s A%s 0 %d 0 %s z' % (
to[0], coord_dual(radius), int(angle > pi), to[1], to[2],
coord_dual(small_radius), int(angle > pi), to[3]
),
class_='slice reactive tooltip-trigger'
)
else:
return
x, y = coord_diff(
center,
coord_project((radius + small_radius) / 2, start_angle + angle / 2)
)
self.graph._static_value(serie_node, val, x, y, metadata, 'middle')
self.graph._tooltip_data(
node, val, x, y, "centered", self.graph._x_labels
and self.graph._x_labels[i][0]
)
def confidence_interval(self, node, x, low, high, width=7):
if self.graph.horizontal:
fmt = lambda xy: '%f %f' % (xy[1], xy[0])
else:
fmt = coord_format
shr = lambda xy: (xy[0] + width, xy[1])
shl = lambda xy: (xy[0] - width, xy[1])
top = (x, high)
bottom = (x, low)
ci = self.node(node, class_="ci")
self.node(
ci,
'path',
d="M%s L%s M%s L%s M%s L%s L%s M%s L%s" % tuple(
map(
fmt, (
top, shr(top), top, shl(top), top, bottom, shr(bottom),
bottom, shl(bottom)
)
)
),
class_='nofill reactive'
)
def pre_render(self):
"""Last things to do before rendering"""
self.add_styles()
self.add_scripts()
self.root.set(
'viewBox', '0 0 %d %d' % (self.graph.width, self.graph.height)
)
if self.graph.explicit_size:
self.root.set('width', str(self.graph.width))
self.root.set('height', str(self.graph.height))
def draw_no_data(self):
"""Write the no data text to the svg"""
no_data = self.node(
self.graph.nodes['text_overlay'],
'text',
x=self.graph.view.width / 2,
y=self.graph.view.height / 2,
class_='no_data'
)
no_data.text = self.graph.no_data_text
def render(self, is_unicode=False, pretty_print=False):
"""Last thing to do before rendering"""
for f in self.graph.xml_filters:
self.root = f(self.root)
args = {'encoding': 'utf-8'}
svg = b''
if etree.lxml:
args['pretty_print'] = pretty_print
if not self.graph.disable_xml_declaration:
svg = b"<?xml version='1.0' encoding='utf-8'?>\n"
if not self.graph.disable_xml_declaration:
svg += b'\n'.join([
etree.tostring(pi, **args)
for pi in self.processing_instructions
])
svg += etree.tostring(self.root, **args)
if self.graph.disable_xml_declaration or is_unicode:
svg = svg.decode('utf-8')
return svg
def get_strokes(self):
"""Return a css snippet containing all stroke style options"""
def stroke_dict_to_css(stroke, i=None):
"""Return a css style for the given option"""
css = [
'%s.series%s {\n' %
(self.id, '.serie-%d' % i if i is not None else '')
]
for key in ('width', 'linejoin', 'linecap', 'dasharray',
'dashoffset'):
if stroke.get(key):
css.append(' stroke-%s: %s;\n' % (key, stroke[key]))
css.append('}')
return '\n'.join(css)
css = []
if self.graph.stroke_style is not None:
css.append(stroke_dict_to_css(self.graph.stroke_style))
for serie in self.graph.series:
if serie.stroke_style is not None:
css.append(stroke_dict_to_css(serie.stroke_style, serie.index))
for secondary_serie in self.graph.secondary_series:
if secondary_serie.stroke_style is not None:
css.append(
stroke_dict_to_css(
secondary_serie.stroke_style, secondary_serie.index
)
)
return '\n'.join(css)
|
revision = "449c3d5c7299"
down_revision = "5770674184de"
from alembic import op
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
CONSTRAINT_NAME = "uq_dest_not_ids"
TABLE = "certificate_notification_associations"
COLUMNS = ["notification_id", "certificate_id"]
def upgrade():
connection = op.get_bind()
# Delete duplicate entries
connection.execute(
"""\
DELETE FROM certificate_notification_associations WHERE ctid NOT IN (
-- Select the first tuple ID for each (notification_id, certificate_id) combination and keep that
SELECT min(ctid) FROM certificate_notification_associations GROUP BY notification_id, certificate_id
)
"""
)
op.create_unique_constraint(CONSTRAINT_NAME, TABLE, COLUMNS)
def downgrade():
op.drop_constraint(CONSTRAINT_NAME, TABLE)
|
import random
import unittest
from chainer import testing
from chainercv.utils import unzip
class TestUnzip(unittest.TestCase):
def setUp(self):
self.ints = list(range(10))
self.strs = list('abcdefghij')
self.iterable = zip(self.ints, self.strs)
def test_sequential(self):
i_iter, s_iter = unzip(self.iterable)
ints = list(i_iter)
self.assertEqual(ints, self.ints)
strs = list(s_iter)
self.assertEqual(strs, self.strs)
def test_parallel(self):
i_iter, s_iter = unzip(self.iterable)
ints, strs = [], []
for i, s in zip(i_iter, s_iter):
ints.append(i)
strs.append(s)
self.assertEqual(ints, self.ints)
self.assertEqual(strs, self.strs)
def test_random(self):
i_iter, s_iter = unzip(self.iterable)
ints, strs = [], []
while True:
try:
if random.randrange(2):
ints.append(next(i_iter))
else:
strs.append(next(s_iter))
except StopIteration:
break
ints.extend(i_iter)
strs.extend(s_iter)
self.assertEqual(ints, self.ints)
self.assertEqual(strs, self.strs)
class TestUnzipWithInfiniteIterator(unittest.TestCase):
def setUp(self):
def _iterator():
i = 0
while True:
yield i, i + 1, i * i
i += 1
self.iterable = _iterator()
def test_sequential(self):
iters = unzip(self.iterable)
self.assertEqual(len(iters), 3)
for i in range(10):
self.assertEqual(next(iters[0]), i)
for i in range(10):
self.assertEqual(next(iters[1]), i + 1)
for i in range(10):
self.assertEqual(next(iters[2]), i * i)
def test_parallel(self):
iters = unzip(self.iterable)
self.assertEqual(len(iters), 3)
for i in range(10):
self.assertEqual(next(iters[0]), i)
self.assertEqual(next(iters[1]), i + 1)
self.assertEqual(next(iters[2]), i * i)
class DummyObject(object):
def __init__(self, released, id_):
self.released = released
self.id_ = id_
def __del__(self):
# register id when it is released
self.released.add(self.id_)
class TestUnzipRelease(unittest.TestCase):
def setUp(self):
self.released = set()
def _iterator():
id_ = 0
while True:
yield id_, DummyObject(self.released, id_)
id_ += 1
self.iterable = _iterator()
def test_released(self):
iter_0, iter_1 = unzip(self.iterable)
del iter_1
for i in range(20):
next(iter_0)
self.assertEqual(self.released, set(range(20)))
def test_unreleased(self):
iter_0, iter_1 = unzip(self.iterable)
for i in range(20):
next(iter_0)
self.assertEqual(self.released, set())
testing.run_module(__name__, __file__)
|
import copy
import os
import os.path as op
import sys
import mne
from mne.utils import (run_subprocess, verbose, logger, ETSContext,
get_subjects_dir)
def _check_file(fname, overwrite):
"""Prevent overwrites."""
if op.isfile(fname) and not overwrite:
raise IOError('File %s exists, use --overwrite to overwrite it'
% fname)
def run():
"""Run command."""
from mne.commands.utils import get_optparser, _add_verbose_flag
parser = get_optparser(__file__)
subjects_dir = mne.get_config('SUBJECTS_DIR')
parser.add_option('-o', '--overwrite', dest='overwrite',
action='store_true',
help='Overwrite previously computed surface')
parser.add_option('-s', '--subject', dest='subject',
help='The name of the subject', type='str')
parser.add_option('-f', '--force', dest='force', action='store_true',
help='Force transformation of surface into bem.')
parser.add_option("-d", "--subjects-dir", dest="subjects_dir",
help="Subjects directory", default=subjects_dir)
parser.add_option("-n", "--no-decimate", dest="no_decimate",
help="Disable medium and sparse decimations "
"(dense only)", action='store_true')
_add_verbose_flag(parser)
options, args = parser.parse_args()
subject = vars(options).get('subject', os.getenv('SUBJECT'))
subjects_dir = options.subjects_dir
if subject is None or subjects_dir is None:
parser.print_help()
sys.exit(1)
_run(subjects_dir, subject, options.force, options.overwrite,
options.no_decimate, options.verbose)
@verbose
def _run(subjects_dir, subject, force, overwrite, no_decimate, verbose=None):
this_env = copy.deepcopy(os.environ)
subjects_dir = get_subjects_dir(subjects_dir, raise_error=True)
this_env['SUBJECTS_DIR'] = subjects_dir
this_env['SUBJECT'] = subject
if 'FREESURFER_HOME' not in this_env:
raise RuntimeError('The FreeSurfer environment needs to be set up '
'for this script')
incomplete = 'warn' if force else 'raise'
subj_path = op.join(subjects_dir, subject)
if not op.exists(subj_path):
raise RuntimeError('%s does not exist. Please check your subject '
'directory path.' % subj_path)
mri = 'T1.mgz' if op.exists(op.join(subj_path, 'mri', 'T1.mgz')) else 'T1'
logger.info('1. Creating a dense scalp tessellation with mkheadsurf...')
def check_seghead(surf_path=op.join(subj_path, 'surf')):
surf = None
for k in ['lh.seghead', 'lh.smseghead']:
this_surf = op.join(surf_path, k)
if op.exists(this_surf):
surf = this_surf
break
return surf
my_seghead = check_seghead()
if my_seghead is None:
run_subprocess(['mkheadsurf', '-subjid', subject, '-srcvol', mri],
env=this_env)
surf = check_seghead()
if surf is None:
raise RuntimeError('mkheadsurf did not produce the standard output '
'file.')
bem_dir = op.join(subjects_dir, subject, 'bem')
if not op.isdir(bem_dir):
os.mkdir(bem_dir)
dense_fname = op.join(bem_dir, '%s-head-dense.fif' % subject)
logger.info('2. Creating %s ...' % dense_fname)
_check_file(dense_fname, overwrite)
# Helpful message if we get a topology error
msg = '\n\nConsider using --force as an additional input parameter.'
surf = mne.bem._surfaces_to_bem(
[surf], [mne.io.constants.FIFF.FIFFV_BEM_SURF_ID_HEAD], [1],
incomplete=incomplete, extra=msg)[0]
mne.write_bem_surfaces(dense_fname, surf)
levels = 'medium', 'sparse'
tris = [] if no_decimate else [30000, 2500]
if os.getenv('_MNE_TESTING_SCALP', 'false') == 'true':
tris = [len(surf['tris'])] # don't actually decimate
for ii, (n_tri, level) in enumerate(zip(tris, levels), 3):
logger.info('%i. Creating %s tessellation...' % (ii, level))
logger.info('%i.1 Decimating the dense tessellation...' % ii)
with ETSContext():
points, tris = mne.decimate_surface(points=surf['rr'],
triangles=surf['tris'],
n_triangles=n_tri)
dec_fname = dense_fname.replace('dense', level)
logger.info('%i.2 Creating %s' % (ii, dec_fname))
_check_file(dec_fname, overwrite)
dec_surf = mne.bem._surfaces_to_bem(
[dict(rr=points, tris=tris)],
[mne.io.constants.FIFF.FIFFV_BEM_SURF_ID_HEAD], [1], rescale=False,
incomplete=incomplete, extra=msg)
mne.write_bem_surfaces(dec_fname, dec_surf)
mne.utils.run_command_if_main()
|
from aioazuredevops.client import DevOpsClient
import aiohttp
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components.azure_devops.const import ( # pylint:disable=unused-import
CONF_ORG,
CONF_PAT,
CONF_PROJECT,
DOMAIN,
)
from homeassistant.config_entries import ConfigFlow
class AzureDevOpsFlowHandler(ConfigFlow, domain=DOMAIN):
"""Handle a Azure DevOps config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
def __init__(self):
"""Initialize config flow."""
self._organization = None
self._project = None
self._pat = None
async def _show_setup_form(self, errors=None):
"""Show the setup form to the user."""
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_ORG, default=self._organization): str,
vol.Required(CONF_PROJECT, default=self._project): str,
vol.Optional(CONF_PAT): str,
}
),
errors=errors or {},
)
async def _show_reauth_form(self, errors=None):
"""Show the reauth form to the user."""
return self.async_show_form(
step_id="reauth",
description_placeholders={
"project_url": f"{self._organization}/{self._project}"
},
data_schema=vol.Schema({vol.Required(CONF_PAT): str}),
errors=errors or {},
)
async def _check_setup(self):
"""Check the setup of the flow."""
errors = {}
client = DevOpsClient()
try:
if self._pat is not None:
await client.authorize(self._pat, self._organization)
if not client.authorized:
errors["base"] = "invalid_auth"
return errors
project_info = await client.get_project(self._organization, self._project)
if project_info is None:
errors["base"] = "project_error"
return errors
except aiohttp.ClientError:
errors["base"] = "cannot_connect"
return errors
return None
async def async_step_user(self, user_input=None):
"""Handle a flow initiated by the user."""
if user_input is None:
return await self._show_setup_form(user_input)
self._organization = user_input[CONF_ORG]
self._project = user_input[CONF_PROJECT]
self._pat = user_input.get(CONF_PAT)
await self.async_set_unique_id(f"{self._organization}_{self._project}")
self._abort_if_unique_id_configured()
errors = await self._check_setup()
if errors is not None:
return await self._show_setup_form(errors)
return self._async_create_entry()
async def async_step_reauth(self, user_input):
"""Handle configuration by re-auth."""
if user_input.get(CONF_ORG) and user_input.get(CONF_PROJECT):
self._organization = user_input[CONF_ORG]
self._project = user_input[CONF_PROJECT]
self._pat = user_input[CONF_PAT]
# pylint: disable=no-member
self.context["title_placeholders"] = {
"project_url": f"{self._organization}/{self._project}",
}
await self.async_set_unique_id(f"{self._organization}_{self._project}")
errors = await self._check_setup()
if errors is not None:
return await self._show_reauth_form(errors)
for entry in self._async_current_entries():
if entry.unique_id == self.unique_id:
self.hass.config_entries.async_update_entry(
entry,
data={
CONF_ORG: self._organization,
CONF_PROJECT: self._project,
CONF_PAT: self._pat,
},
)
return self.async_abort(reason="reauth_successful")
def _async_create_entry(self):
"""Handle create entry."""
return self.async_create_entry(
title=f"{self._organization}/{self._project}",
data={
CONF_ORG: self._organization,
CONF_PROJECT: self._project,
CONF_PAT: self._pat,
},
)
|
import redis
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from test import run_only
from mock import Mock
from mock import patch
from diamond.collector import Collector
from sidekiq import SidekiqCollector
##########################################################################
def run_only_if_redis_is_available(func):
"""Decorator for checking if python-redis is available.
Note: this test will be silently skipped if python-redis is missing.
"""
try:
import redis
except ImportError:
redis = None
return run_only(func, lambda: redis is not None)
class TestSidekiqCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('SidekiqWebCollector', {
'password': 'TEST_PASSWORD'
})
self.collector = SidekiqCollector(config, None)
def test_import(self):
self.assertTrue(SidekiqCollector)
@run_only_if_redis_is_available
@patch.object(Collector, 'publish')
def test_sidekiq_queue(self, publish_mock):
self.collector.config.update({
'ports': '6379'
})
patch_collector = patch.object(
redis.Redis, 'smembers', Mock(return_value=['queue_1'])
)
length_collector = patch.object(
redis.Redis, 'llen', Mock(return_value=123)
)
zcard_collector = patch.object(
redis.Redis, 'zcard', Mock(return_value=100)
)
patch_collector.start()
length_collector.start()
zcard_collector.start()
self.collector.collect()
patch_collector.stop()
length_collector.stop()
zcard_collector.stop()
metrics = {
'queue.6379.0.queue_1': 123,
'queue.6379.0.retry': 100,
'queue.6379.0.schedule': 100
}
self.assertPublishedMany(publish_mock, metrics)
@run_only_if_redis_is_available
@patch.object(Collector, 'publish')
def test_sidekiq_queue_with_databases(self, publish_mock):
self.collector.config.update({
'ports': ['6379', '6380'],
'sentinel_ports': ['26379', '26380'],
'databases': 2
})
patch_collector = patch.object(
redis.Redis, 'smembers', Mock(return_value=['queue_1'])
)
length_collector = patch.object(
redis.Redis, 'llen', Mock(return_value=123)
)
zcard_collector = patch.object(
redis.Redis, 'zcard', Mock(return_value=100)
)
patch_collector.start()
length_collector.start()
zcard_collector.start()
self.collector.collect()
patch_collector.stop()
length_collector.stop()
zcard_collector.stop()
metrics = {
'queue.6379.0.queue_1': 123,
'queue.6379.0.retry': 100,
'queue.6379.0.schedule': 100,
'queue.6380.0.queue_1': 123,
'queue.6380.0.retry': 100,
'queue.6380.0.schedule': 100,
'queue.6379.1.queue_1': 123,
'queue.6379.1.retry': 100,
'queue.6379.1.schedule': 100,
'queue.6380.1.queue_1': 123,
'queue.6380.1.retry': 100,
'queue.6380.1.schedule': 100
}
self.assertPublishedMany(publish_mock, metrics)
@run_only_if_redis_is_available
@patch.object(Collector, 'publish')
def test_sidekiq_queue_with_cluster_prefix(self, publish_mock):
self.collector.config.update({
'cluster_prefix': 'test-sidekiq',
'sentinel_ports': '63790'
})
patch_collector = patch.object(
redis.Redis, 'smembers', Mock(return_value=['queue_1', 'queue_2'])
)
length_collector = patch.object(
redis.Redis, 'llen', Mock(return_value=123)
)
zcard_collector = patch.object(
redis.Redis, 'zcard', Mock(return_value=100)
)
patch_collector.start()
length_collector.start()
zcard_collector.start()
self.collector.collect()
patch_collector.stop()
length_collector.stop()
zcard_collector.stop()
metrics = {
'queue.test-sidekiq.6379.0.queue_1': 123,
'queue.test-sidekiq.6379.0.schedule': 100,
'queue.test-sidekiq.6379.0.retry': 100
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components.device_tracker import (
CONF_SCAN_INTERVAL,
DOMAIN as DEVICE_TRACKER,
)
from homeassistant.components.device_tracker.const import (
SCAN_INTERVAL as DEFAULT_SCAN_INTERVAL,
)
from homeassistant.const import (
CONF_EXCLUDE,
CONF_INCLUDE,
CONF_PASSWORD,
CONF_PREFIX,
CONF_USERNAME,
)
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
from .const import (
CONF_AUTHORIZATION,
CONF_CIRCLES,
CONF_DRIVING_SPEED,
CONF_ERROR_THRESHOLD,
CONF_MAX_GPS_ACCURACY,
CONF_MAX_UPDATE_WAIT,
CONF_MEMBERS,
CONF_SHOW_AS_STATE,
CONF_WARNING_THRESHOLD,
DOMAIN,
SHOW_DRIVING,
SHOW_MOVING,
)
from .helpers import get_api
DEFAULT_PREFIX = DOMAIN
CONF_ACCOUNTS = "accounts"
SHOW_AS_STATE_OPTS = [SHOW_DRIVING, SHOW_MOVING]
def _excl_incl_list_to_filter_dict(value):
return {
"include": CONF_INCLUDE in value,
"list": value.get(CONF_EXCLUDE) or value.get(CONF_INCLUDE),
}
def _prefix(value):
if not value:
return ""
if not value.endswith("_"):
return f"{value}_"
return value
def _thresholds(config):
error_threshold = config.get(CONF_ERROR_THRESHOLD)
warning_threshold = config.get(CONF_WARNING_THRESHOLD)
if error_threshold and warning_threshold:
if error_threshold <= warning_threshold:
raise vol.Invalid(
f"{CONF_ERROR_THRESHOLD} must be larger than {CONF_WARNING_THRESHOLD}"
)
elif not error_threshold and warning_threshold:
config[CONF_ERROR_THRESHOLD] = warning_threshold + 1
elif error_threshold and not warning_threshold:
# Make them the same which effectively prevents warnings.
config[CONF_WARNING_THRESHOLD] = error_threshold
else:
# Log all errors as errors.
config[CONF_ERROR_THRESHOLD] = 1
config[CONF_WARNING_THRESHOLD] = 1
return config
ACCOUNT_SCHEMA = vol.Schema(
{vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string}
)
_SLUG_LIST = vol.All(
cv.ensure_list, [cv.slugify], vol.Length(min=1, msg="List cannot be empty")
)
_LOWER_STRING_LIST = vol.All(
cv.ensure_list,
[vol.All(cv.string, vol.Lower)],
vol.Length(min=1, msg="List cannot be empty"),
)
_EXCL_INCL_SLUG_LIST = vol.All(
vol.Schema(
{
vol.Exclusive(CONF_EXCLUDE, "incl_excl"): _SLUG_LIST,
vol.Exclusive(CONF_INCLUDE, "incl_excl"): _SLUG_LIST,
}
),
cv.has_at_least_one_key(CONF_EXCLUDE, CONF_INCLUDE),
_excl_incl_list_to_filter_dict,
)
_EXCL_INCL_LOWER_STRING_LIST = vol.All(
vol.Schema(
{
vol.Exclusive(CONF_EXCLUDE, "incl_excl"): _LOWER_STRING_LIST,
vol.Exclusive(CONF_INCLUDE, "incl_excl"): _LOWER_STRING_LIST,
}
),
cv.has_at_least_one_key(CONF_EXCLUDE, CONF_INCLUDE),
_excl_incl_list_to_filter_dict,
)
_THRESHOLD = vol.All(vol.Coerce(int), vol.Range(min=1))
LIFE360_SCHEMA = vol.All(
vol.Schema(
{
vol.Optional(CONF_ACCOUNTS): vol.All(
cv.ensure_list, [ACCOUNT_SCHEMA], vol.Length(min=1)
),
vol.Optional(CONF_CIRCLES): _EXCL_INCL_LOWER_STRING_LIST,
vol.Optional(CONF_DRIVING_SPEED): vol.Coerce(float),
vol.Optional(CONF_ERROR_THRESHOLD): _THRESHOLD,
vol.Optional(CONF_MAX_GPS_ACCURACY): vol.Coerce(float),
vol.Optional(CONF_MAX_UPDATE_WAIT): vol.All(
cv.time_period, cv.positive_timedelta
),
vol.Optional(CONF_MEMBERS): _EXCL_INCL_SLUG_LIST,
vol.Optional(CONF_PREFIX, default=DEFAULT_PREFIX): vol.All(
vol.Any(None, cv.string), _prefix
),
vol.Optional(
CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL
): cv.time_period,
vol.Optional(CONF_SHOW_AS_STATE, default=[]): vol.All(
cv.ensure_list, [vol.In(SHOW_AS_STATE_OPTS)]
),
vol.Optional(CONF_WARNING_THRESHOLD): _THRESHOLD,
}
),
_thresholds,
)
CONFIG_SCHEMA = vol.Schema({DOMAIN: LIFE360_SCHEMA}, extra=vol.ALLOW_EXTRA)
def setup(hass, config):
"""Set up integration."""
conf = config.get(DOMAIN, LIFE360_SCHEMA({}))
hass.data[DOMAIN] = {"config": conf, "apis": {}}
discovery.load_platform(hass, DEVICE_TRACKER, DOMAIN, None, config)
if CONF_ACCOUNTS not in conf:
return True
# Check existing config entries. For any that correspond to an entry in
# configuration.yaml, and whose password has not changed, nothing needs to
# be done with that config entry or that account from configuration.yaml.
# But if the config entry was created by import and the account no longer
# exists in configuration.yaml, or if the password has changed, then delete
# that out-of-date config entry.
already_configured = []
for entry in hass.config_entries.async_entries(DOMAIN):
# Find corresponding configuration.yaml entry and its password.
password = None
for account in conf[CONF_ACCOUNTS]:
if account[CONF_USERNAME] == entry.data[CONF_USERNAME]:
password = account[CONF_PASSWORD]
if password == entry.data[CONF_PASSWORD]:
already_configured.append(entry.data[CONF_USERNAME])
continue
if (
not password
and entry.source == config_entries.SOURCE_IMPORT
or password
and password != entry.data[CONF_PASSWORD]
):
hass.async_create_task(hass.config_entries.async_remove(entry.entry_id))
# Create config entries for accounts listed in configuration.
for account in conf[CONF_ACCOUNTS]:
if account[CONF_USERNAME] not in already_configured:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data=account,
)
)
return True
async def async_setup_entry(hass, entry):
"""Set up config entry."""
hass.data[DOMAIN]["apis"][entry.data[CONF_USERNAME]] = get_api(
entry.data[CONF_AUTHORIZATION]
)
return True
async def async_unload_entry(hass, entry):
"""Unload config entry."""
try:
hass.data[DOMAIN]["apis"].pop(entry.data[CONF_USERNAME])
return True
except KeyError:
return False
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from test import run_only
from mock import Mock
from mock import patch
from diamond.collector import Collector
from mysqlstat import MySQLCollector
##########################################################################
def run_only_if_MySQLdb_is_available(func):
try:
import MySQLdb
except ImportError:
MySQLdb = None
pred = lambda: MySQLdb is not None
return run_only(func, pred)
class TestMySQLCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('MySQLCollector', {
'slave': 'True',
'master': 'True',
'innodb': 'True',
'hosts': ['root:@localhost:3306/mysql'],
'interval': '1',
})
self.collector = MySQLCollector(config, None)
def test_import(self):
self.assertTrue(MySQLCollector)
@run_only_if_MySQLdb_is_available
@patch.object(MySQLCollector, 'connect', Mock(return_value=True))
@patch.object(MySQLCollector, 'disconnect', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_real_data(self, publish_mock):
p_global_status = patch.object(
MySQLCollector,
'get_db_global_status',
Mock(return_value=self.getPickledResults(
'mysql_get_db_global_status_1.pkl')))
p_master_status = patch.object(
MySQLCollector,
'get_db_master_status',
Mock(return_value=self.getPickledResults(
'get_db_master_status_1.pkl')))
p_slave_status = patch.object(
MySQLCollector,
'get_db_slave_status',
Mock(return_value=self.getPickledResults(
'get_db_slave_status_1.pkl')))
p_innodb_status = patch.object(
MySQLCollector,
'get_db_innodb_status',
Mock(return_value=[{}]))
p_global_status.start()
p_master_status.start()
p_slave_status.start()
p_innodb_status.start()
self.collector.collect()
p_global_status.stop()
p_master_status.stop()
p_slave_status.stop()
p_innodb_status.stop()
self.assertPublishedMany(publish_mock, {})
p_global_status = patch.object(
MySQLCollector,
'get_db_global_status',
Mock(return_value=self.getPickledResults(
'mysql_get_db_global_status_2.pkl')))
p_master_status = patch.object(
MySQLCollector,
'get_db_master_status',
Mock(return_value=self.getPickledResults(
'get_db_master_status_2.pkl')))
p_slave_status = patch.object(
MySQLCollector,
'get_db_slave_status',
Mock(return_value=self.getPickledResults(
'get_db_slave_status_2.pkl')))
p_innodb_status = patch.object(
MySQLCollector,
'get_db_innodb_status',
Mock(return_value=[{}]))
p_global_status.start()
p_master_status.start()
p_slave_status.start()
p_innodb_status.start()
self.collector.collect()
p_global_status.stop()
p_master_status.stop()
p_slave_status.stop()
p_innodb_status.stop()
metrics = {}
metrics.update(self.getPickledResults(
'mysql_get_db_global_status_expected.pkl'))
metrics.update(self.getPickledResults(
'get_db_master_status_expected.pkl'))
metrics.update(self.getPickledResults(
'get_db_slave_status_expected.pkl'))
self.assertPublishedMany(publish_mock, metrics)
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
##########################################################################
if __name__ == "__main__":
unittest.main()
|
import asyncio
from datetime import timedelta
import logging
import aiohttp
import attr
import eternalegypt
import voluptuous as vol
from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN
from homeassistant.components.notify import DOMAIN as NOTIFY_DOMAIN
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.const import (
CONF_HOST,
CONF_MONITORED_CONDITIONS,
CONF_NAME,
CONF_PASSWORD,
CONF_RECIPIENT,
EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.core import callback
from homeassistant.helpers import config_validation as cv, discovery
from homeassistant.helpers.aiohttp_client import async_create_clientsession
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import async_track_time_interval
from . import sensor_types
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(seconds=10)
DISPATCHER_NETGEAR_LTE = "netgear_lte_update"
DOMAIN = "netgear_lte"
DATA_KEY = "netgear_lte"
EVENT_SMS = "netgear_lte_sms"
SERVICE_DELETE_SMS = "delete_sms"
SERVICE_SET_OPTION = "set_option"
SERVICE_CONNECT_LTE = "connect_lte"
SERVICE_DISCONNECT_LTE = "disconnect_lte"
ATTR_HOST = "host"
ATTR_SMS_ID = "sms_id"
ATTR_FROM = "from"
ATTR_MESSAGE = "message"
ATTR_FAILOVER = "failover"
ATTR_AUTOCONNECT = "autoconnect"
FAILOVER_MODES = ["auto", "wire", "mobile"]
AUTOCONNECT_MODES = ["never", "home", "always"]
NOTIFY_SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME, default=DOMAIN): cv.string,
vol.Optional(CONF_RECIPIENT, default=[]): vol.All(cv.ensure_list, [cv.string]),
}
)
SENSOR_SCHEMA = vol.Schema(
{
vol.Optional(
CONF_MONITORED_CONDITIONS, default=sensor_types.DEFAULT_SENSORS
): vol.All(cv.ensure_list, [vol.In(sensor_types.ALL_SENSORS)])
}
)
BINARY_SENSOR_SCHEMA = vol.Schema(
{
vol.Optional(
CONF_MONITORED_CONDITIONS, default=sensor_types.DEFAULT_BINARY_SENSORS
): vol.All(cv.ensure_list, [vol.In(sensor_types.ALL_BINARY_SENSORS)])
}
)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.All(
cv.ensure_list,
[
vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(NOTIFY_DOMAIN, default={}): vol.All(
cv.ensure_list, [NOTIFY_SCHEMA]
),
vol.Optional(SENSOR_DOMAIN, default={}): SENSOR_SCHEMA,
vol.Optional(
BINARY_SENSOR_DOMAIN, default={}
): BINARY_SENSOR_SCHEMA,
}
)
],
)
},
extra=vol.ALLOW_EXTRA,
)
DELETE_SMS_SCHEMA = vol.Schema(
{
vol.Optional(ATTR_HOST): cv.string,
vol.Required(ATTR_SMS_ID): vol.All(cv.ensure_list, [cv.positive_int]),
}
)
SET_OPTION_SCHEMA = vol.Schema(
vol.All(
cv.has_at_least_one_key(ATTR_FAILOVER, ATTR_AUTOCONNECT),
{
vol.Optional(ATTR_HOST): cv.string,
vol.Optional(ATTR_FAILOVER): vol.In(FAILOVER_MODES),
vol.Optional(ATTR_AUTOCONNECT): vol.In(AUTOCONNECT_MODES),
},
)
)
CONNECT_LTE_SCHEMA = vol.Schema({vol.Optional(ATTR_HOST): cv.string})
DISCONNECT_LTE_SCHEMA = vol.Schema({vol.Optional(ATTR_HOST): cv.string})
@attr.s
class ModemData:
"""Class for modem state."""
hass = attr.ib()
host = attr.ib()
modem = attr.ib()
data = attr.ib(init=False, default=None)
connected = attr.ib(init=False, default=True)
async def async_update(self):
"""Call the API to update the data."""
try:
self.data = await self.modem.information()
if not self.connected:
_LOGGER.warning("Connected to %s", self.host)
self.connected = True
except eternalegypt.Error:
if self.connected:
_LOGGER.warning("Lost connection to %s", self.host)
self.connected = False
self.data = None
async_dispatcher_send(self.hass, DISPATCHER_NETGEAR_LTE)
@attr.s
class LTEData:
"""Shared state."""
websession = attr.ib()
modem_data = attr.ib(init=False, factory=dict)
def get_modem_data(self, config):
"""Get modem_data for the host in config."""
if config[CONF_HOST] is not None:
return self.modem_data.get(config[CONF_HOST])
if len(self.modem_data) != 1:
return None
return next(iter(self.modem_data.values()))
async def async_setup(hass, config):
"""Set up Netgear LTE component."""
if DATA_KEY not in hass.data:
websession = async_create_clientsession(
hass, cookie_jar=aiohttp.CookieJar(unsafe=True)
)
hass.data[DATA_KEY] = LTEData(websession)
async def service_handler(service):
"""Apply a service."""
host = service.data.get(ATTR_HOST)
conf = {CONF_HOST: host}
modem_data = hass.data[DATA_KEY].get_modem_data(conf)
if not modem_data:
_LOGGER.error("%s: host %s unavailable", service.service, host)
return
if service.service == SERVICE_DELETE_SMS:
for sms_id in service.data[ATTR_SMS_ID]:
await modem_data.modem.delete_sms(sms_id)
elif service.service == SERVICE_SET_OPTION:
failover = service.data.get(ATTR_FAILOVER)
if failover:
await modem_data.modem.set_failover_mode(failover)
autoconnect = service.data.get(ATTR_AUTOCONNECT)
if autoconnect:
await modem_data.modem.set_autoconnect_mode(autoconnect)
elif service.service == SERVICE_CONNECT_LTE:
await modem_data.modem.connect_lte()
elif service.service == SERVICE_DISCONNECT_LTE:
await modem_data.modem.disconnect_lte()
service_schemas = {
SERVICE_DELETE_SMS: DELETE_SMS_SCHEMA,
SERVICE_SET_OPTION: SET_OPTION_SCHEMA,
SERVICE_CONNECT_LTE: CONNECT_LTE_SCHEMA,
SERVICE_DISCONNECT_LTE: DISCONNECT_LTE_SCHEMA,
}
for service, schema in service_schemas.items():
hass.services.async_register(
DOMAIN, service, service_handler, schema=schema
)
netgear_lte_config = config[DOMAIN]
# Set up each modem
tasks = [_setup_lte(hass, lte_conf) for lte_conf in netgear_lte_config]
await asyncio.wait(tasks)
# Load platforms for each modem
for lte_conf in netgear_lte_config:
# Notify
for notify_conf in lte_conf[NOTIFY_DOMAIN]:
discovery_info = {
CONF_HOST: lte_conf[CONF_HOST],
CONF_NAME: notify_conf.get(CONF_NAME),
NOTIFY_DOMAIN: notify_conf,
}
hass.async_create_task(
discovery.async_load_platform(
hass, NOTIFY_DOMAIN, DOMAIN, discovery_info, config
)
)
# Sensor
sensor_conf = lte_conf.get(SENSOR_DOMAIN)
discovery_info = {CONF_HOST: lte_conf[CONF_HOST], SENSOR_DOMAIN: sensor_conf}
hass.async_create_task(
discovery.async_load_platform(
hass, SENSOR_DOMAIN, DOMAIN, discovery_info, config
)
)
# Binary Sensor
binary_sensor_conf = lte_conf.get(BINARY_SENSOR_DOMAIN)
discovery_info = {
CONF_HOST: lte_conf[CONF_HOST],
BINARY_SENSOR_DOMAIN: binary_sensor_conf,
}
hass.async_create_task(
discovery.async_load_platform(
hass, BINARY_SENSOR_DOMAIN, DOMAIN, discovery_info, config
)
)
return True
async def _setup_lte(hass, lte_config):
"""Set up a Netgear LTE modem."""
host = lte_config[CONF_HOST]
password = lte_config[CONF_PASSWORD]
websession = hass.data[DATA_KEY].websession
modem = eternalegypt.Modem(hostname=host, websession=websession)
modem_data = ModemData(hass, host, modem)
try:
await _login(hass, modem_data, password)
except eternalegypt.Error:
retry_task = hass.loop.create_task(_retry_login(hass, modem_data, password))
@callback
def cleanup_retry(event):
"""Clean up retry task resources."""
if not retry_task.done():
retry_task.cancel()
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, cleanup_retry)
async def _login(hass, modem_data, password):
"""Log in and complete setup."""
await modem_data.modem.login(password=password)
def fire_sms_event(sms):
"""Send an SMS event."""
data = {
ATTR_HOST: modem_data.host,
ATTR_SMS_ID: sms.id,
ATTR_FROM: sms.sender,
ATTR_MESSAGE: sms.message,
}
hass.bus.async_fire(EVENT_SMS, data)
await modem_data.modem.add_sms_listener(fire_sms_event)
await modem_data.async_update()
hass.data[DATA_KEY].modem_data[modem_data.host] = modem_data
async def _update(now):
"""Periodic update."""
await modem_data.async_update()
update_unsub = async_track_time_interval(hass, _update, SCAN_INTERVAL)
async def cleanup(event):
"""Clean up resources."""
update_unsub()
await modem_data.modem.logout()
del hass.data[DATA_KEY].modem_data[modem_data.host]
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, cleanup)
async def _retry_login(hass, modem_data, password):
"""Sleep and retry setup."""
_LOGGER.warning("Could not connect to %s. Will keep trying", modem_data.host)
modem_data.connected = False
delay = 15
while not modem_data.connected:
await asyncio.sleep(delay)
try:
await _login(hass, modem_data, password)
except eternalegypt.Error:
delay = min(2 * delay, 300)
@attr.s
class LTEEntity(Entity):
"""Base LTE entity."""
modem_data = attr.ib()
sensor_type = attr.ib()
_unique_id = attr.ib(init=False)
@_unique_id.default
def _init_unique_id(self):
"""Register unique_id while we know data is valid."""
return f"{self.sensor_type}_{self.modem_data.data.serial_number}"
async def async_added_to_hass(self):
"""Register callback."""
self.async_on_remove(
async_dispatcher_connect(
self.hass, DISPATCHER_NETGEAR_LTE, self.async_write_ha_state
)
)
async def async_update(self):
"""Force update of state."""
await self.modem_data.async_update()
@property
def should_poll(self):
"""Return that the sensor should not be polled."""
return False
@property
def available(self):
"""Return the availability of the sensor."""
return self.modem_data.data is not None
@property
def unique_id(self):
"""Return a unique ID like 'usage_5TG365AB0078V'."""
return self._unique_id
@property
def name(self):
"""Return the name of the sensor."""
return f"Netgear LTE {self.sensor_type}"
|
from trashcli.fs import has_sticky_bit
import os, shutil
def make_empty_file(path):
make_file(path, '')
def make_file(filename, contents=''):
make_parent_for(filename)
with open(filename, 'w') as f:
f.write(contents)
def read_file(path):
with open(path) as f:
return f.read()
def require_empty_dir(path):
if os.path.exists(path): shutil.rmtree(path)
make_dirs(path)
assert os.path.isdir(path)
assert [] == list(os.listdir(path))
def make_dirs(path):
if not os.path.isdir(path):
os.makedirs(path)
assert os.path.isdir(path)
def make_parent_for(path):
parent = os.path.dirname(os.path.realpath(path))
make_dirs(parent)
def make_sticky_dir(path):
os.mkdir(path)
set_sticky_bit(path)
def make_unsticky_dir(path):
os.mkdir(path)
unset_sticky_bit(path)
def make_dir_unsticky(path):
assert_is_dir(path)
unset_sticky_bit(path)
def assert_is_dir(path):
assert os.path.isdir(path)
def set_sticky_bit(path):
import stat
os.chmod(path, os.stat(path).st_mode | stat.S_ISVTX)
def unset_sticky_bit(path):
import stat
os.chmod(path, os.stat(path).st_mode & ~ stat.S_ISVTX)
assert not has_sticky_bit(path)
def touch(path):
open(path,'a+').close()
def ensure_non_sticky_dir(path):
import os
assert os.path.isdir(path)
assert not has_sticky_bit(path)
def make_unreadable_file(path):
make_file(path, '')
import os
os.chmod(path, 0)
|
import base64
import binascii
import json
import os
import requests
from requests.exceptions import RequestException
from requests.exceptions import SSLError
from paasta_tools.cli.utils import get_jenkins_build_output_url
from paasta_tools.cli.utils import validate_full_git_sha
from paasta_tools.cli.utils import validate_service_name
from paasta_tools.generate_deployments_for_service import build_docker_image_name
from paasta_tools.utils import _log
from paasta_tools.utils import _log_audit
from paasta_tools.utils import _run
from paasta_tools.utils import build_docker_tag
from paasta_tools.utils import DEFAULT_SOA_DIR
from paasta_tools.utils import get_service_docker_registry
def add_subparser(subparsers):
list_parser = subparsers.add_parser(
"push-to-registry",
help="Uploads a docker image to a registry",
description=(
"'paasta push-to-registry' is a tool to upload a local docker image "
"to the configured PaaSTA docker registry with a predictable and "
"well-constructed image name. The image name must be predictable because "
"the other PaaSTA components are expecting a particular format for the docker "
"image name."
),
epilog=(
"Note: Uploading to a docker registry often requires access to the local "
"docker socket as well as credentials to the remote registry"
),
)
list_parser.add_argument(
"-s",
"--service",
help='Name of service for which you wish to upload a docker image. Leading "services-", '
"as included in a Jenkins job name, will be stripped.",
required=True,
)
list_parser.add_argument(
"-c",
"--commit",
help="Git sha after which to name the remote image",
required=True,
type=validate_full_git_sha,
)
list_parser.add_argument(
"-d",
"--soa-dir",
dest="soa_dir",
metavar="SOA_DIR",
default=DEFAULT_SOA_DIR,
help="define a different soa config directory",
)
list_parser.add_argument(
"-f",
"--force",
help=(
"Do not check if the image is already in the PaaSTA docker registry. "
"Push it anyway."
),
action="store_true",
)
list_parser.set_defaults(command=paasta_push_to_registry)
def build_command(upstream_job_name, upstream_git_commit):
# This is kinda dumb since we just cleaned the 'services-' off of the
# service so we could validate it, but the Docker image will have the full
# name with 'services-' so add it back.
tag = build_docker_tag(upstream_job_name, upstream_git_commit)
cmd = f"docker push {tag}"
return cmd
def paasta_push_to_registry(args):
"""Upload a docker image to a registry"""
service = args.service
if service and service.startswith("services-"):
service = service.split("services-", 1)[1]
validate_service_name(service, args.soa_dir)
if not args.force:
try:
if is_docker_image_already_in_registry(service, args.soa_dir, args.commit):
print(
"The docker image is already in the PaaSTA docker registry. "
"I'm NOT overriding the existing image. "
"Add --force to override the image in the registry if you are sure what you are doing."
)
return 0
except RequestException as e:
registry_uri = get_service_docker_registry(service, args.soa_dir)
print(
"Can not connect to the PaaSTA docker registry '%s' to verify if this image exists.\n"
"%s" % (registry_uri, str(e))
)
return 1
cmd = build_command(service, args.commit)
loglines = []
returncode, output = _run(
cmd,
timeout=3600,
log=True,
component="build",
service=service,
loglevel="debug",
)
if returncode != 0:
loglines.append("ERROR: Failed to promote image for %s." % args.commit)
output = get_jenkins_build_output_url()
if output:
loglines.append("See output: %s" % output)
else:
loglines.append("Successfully pushed image for %s to registry" % args.commit)
_log_audit(
action="push-to-registry",
action_details={"commit": args.commit},
service=service,
)
for logline in loglines:
_log(service=service, line=logline, component="build", level="event")
return returncode
def read_docker_registry_creds(registry_uri):
dockercfg_path = os.path.expanduser("~/.dockercfg")
try:
with open(dockercfg_path) as f:
dockercfg = json.load(f)
auth = base64.b64decode(dockercfg[registry_uri]["auth"]).decode("utf-8")
first_colon = auth.find(":")
if first_colon != -1:
return (auth[:first_colon], auth[first_colon + 1 : -2])
except IOError: # Can't open ~/.dockercfg
pass
except json.scanner.JSONDecodeError: # JSON decoder error
pass
except binascii.Error: # base64 decode error
pass
return (None, None)
def is_docker_image_already_in_registry(service, soa_dir, sha):
"""Verifies that docker image exists in the paasta registry.
:param service: name of the service
:param sha: git sha
:returns: True, False or raises requests.exceptions.RequestException
"""
registry_uri = get_service_docker_registry(service, soa_dir)
repository, tag = build_docker_image_name(service, sha).split(":")
creds = read_docker_registry_creds(registry_uri)
uri = f"{registry_uri}/v2/{repository}/manifests/paasta-{sha}"
with requests.Session() as s:
try:
url = "https://" + uri
r = (
s.head(url, timeout=30)
if creds[0] is None
else s.head(url, auth=creds, timeout=30)
)
except SSLError:
# If no auth creds, fallback to trying http
if creds[0] is not None:
raise
url = "http://" + uri
r = s.head(url, timeout=30)
if r.status_code == 200:
return True
elif r.status_code == 404:
return False # No Such Repository Error
r.raise_for_status()
|
import mock
import pytest
from asynctest import CoroutineMock
from paasta_tools.monitoring.check_mesos_active_frameworks import (
check_mesos_active_frameworks,
)
def test_check_mesos_active_frameworks_fails(capfd):
with mock.patch(
"paasta_tools.monitoring.check_mesos_active_frameworks.parse_args",
autospec=True,
) as mock_parse_args, mock.patch(
"paasta_tools.monitoring.check_mesos_active_frameworks.get_mesos_master",
autospec=True,
) as mock_get_mesos_master:
mock_opts = mock.MagicMock()
mock_opts.expected = "foo,bar"
mock_parse_args.return_value = mock_opts
mock_master = mock.MagicMock()
mock_master.state = CoroutineMock(
func=CoroutineMock(), # https://github.com/notion/a_sync/pull/40
return_value={"frameworks": [{"name": "foo"}]},
)
mock_get_mesos_master.return_value = mock_master
with pytest.raises(SystemExit) as error:
check_mesos_active_frameworks()
out, err = capfd.readouterr()
assert "CRITICAL" in out
assert "bar" in out
assert "foo" not in out
assert error.value.code == 2
def test_check_mesos_active_frameworks_succeeds(capfd):
with mock.patch(
"paasta_tools.monitoring.check_mesos_active_frameworks.parse_args",
autospec=True,
) as mock_parse_args, mock.patch(
"paasta_tools.monitoring.check_mesos_active_frameworks.get_mesos_master",
autospec=True,
) as mock_get_mesos_master:
mock_opts = mock.MagicMock()
mock_opts.expected = "foo,bar"
mock_parse_args.return_value = mock_opts
mock_master = mock.MagicMock()
mock_master.state = CoroutineMock(
func=CoroutineMock(), # https://github.com/notion/a_sync/pull/40
return_value={"frameworks": [{"name": "foo"}, {"name": "bar"}]},
)
mock_get_mesos_master.return_value = mock_master
with pytest.raises(SystemExit) as error:
check_mesos_active_frameworks()
out, err = capfd.readouterr()
assert "OK" in out
assert error.value.code == 0
|
import logging
from PyQt5.QtCore import Qt, QSize
from PyQt5.QtWidgets import QWidget
import pytest
from qutebrowser.misc import miscwidgets
from qutebrowser.browser import inspector
class TestCommandLineEdit:
"""Tests for CommandLineEdit widget."""
@pytest.fixture
def cmd_edit(self, qtbot):
"""Fixture to initialize a CommandLineEdit."""
cmd_edit = miscwidgets.CommandLineEdit()
cmd_edit.set_prompt(':')
qtbot.add_widget(cmd_edit)
assert cmd_edit.text() == ''
yield cmd_edit
def test_position(self, qtbot, cmd_edit):
"""Test cursor position based on the prompt."""
qtbot.keyClicks(cmd_edit, ':hello')
assert cmd_edit.text() == ':hello'
assert cmd_edit.cursorPosition() == len(':hello')
cmd_edit.home(True)
assert cmd_edit.cursorPosition() == len(':')
qtbot.keyClick(cmd_edit, Qt.Key_Delete)
assert cmd_edit.text() == ':'
qtbot.keyClick(cmd_edit, Qt.Key_Backspace)
assert cmd_edit.text() == ':'
qtbot.keyClicks(cmd_edit, 'hey again')
assert cmd_edit.text() == ':hey again'
def test_invalid_prompt(self, qtbot, cmd_edit):
"""Test preventing of an invalid prompt being entered."""
qtbot.keyClicks(cmd_edit, '$hello')
assert cmd_edit.text() == ''
def test_selection_home(self, qtbot, cmd_edit):
"""Test selection persisting when pressing home."""
qtbot.keyClicks(cmd_edit, ':hello')
assert cmd_edit.text() == ':hello'
assert cmd_edit.cursorPosition() == len(':hello')
cmd_edit.home(True)
assert cmd_edit.cursorPosition() == len(':')
assert cmd_edit.selectionStart() == len(':')
def test_selection_cursor_left(self, qtbot, cmd_edit):
"""Test selection persisting when moving to the first char."""
qtbot.keyClicks(cmd_edit, ':hello')
assert cmd_edit.text() == ':hello'
assert cmd_edit.cursorPosition() == len(':hello')
for _ in ':hello':
qtbot.keyClick(cmd_edit, Qt.Key_Left, modifier=Qt.ShiftModifier)
assert cmd_edit.cursorPosition() == len(':')
assert cmd_edit.selectionStart() == len(':')
class WrappedWidget(QWidget):
def sizeHint(self):
return QSize(23, 42)
class TestWrapperLayout:
@pytest.fixture
def container(self, qtbot):
wrapped = WrappedWidget()
parent = QWidget()
qtbot.add_widget(wrapped)
qtbot.add_widget(parent)
layout = miscwidgets.WrapperLayout(parent)
layout.wrap(parent, wrapped)
parent.wrapped = wrapped
return parent
def test_size_hint(self, container):
assert container.sizeHint() == QSize(23, 42)
def test_wrapped(self, container):
assert container.wrapped.parent() is container
assert container.focusProxy() is container.wrapped
class TestFullscreenNotification:
@pytest.mark.parametrize('bindings, text', [
({'<escape>': 'fullscreen --leave'},
"Press <Escape> to exit fullscreen."),
({'<escape>': 'fullscreen'}, "Page is now fullscreen."),
({'a': 'fullscreen --leave'}, "Press a to exit fullscreen."),
({}, "Page is now fullscreen."),
])
def test_text(self, qtbot, config_stub, key_config_stub, bindings, text):
config_stub.val.bindings.default = {}
config_stub.val.bindings.commands = {'normal': bindings}
w = miscwidgets.FullscreenNotification()
qtbot.add_widget(w)
assert w.text() == text
def test_timeout(self, qtbot, key_config_stub):
w = miscwidgets.FullscreenNotification()
qtbot.add_widget(w)
with qtbot.waitSignal(w.destroyed):
w.set_timeout(1)
@pytest.mark.usefixtures('state_config')
class TestInspectorSplitter:
@pytest.fixture
def fake_webview(self, blue_widget):
return blue_widget
@pytest.fixture
def fake_inspector(self, red_widget):
return red_widget
@pytest.fixture
def splitter(self, qtbot, fake_webview):
inspector_splitter = miscwidgets.InspectorSplitter(
win_id=0, main_webview=fake_webview)
qtbot.add_widget(inspector_splitter)
return inspector_splitter
def test_no_inspector(self, splitter, fake_webview):
assert splitter.count() == 1
assert splitter.widget(0) is fake_webview
assert splitter.focusProxy() is fake_webview
def test_no_inspector_resize(self, splitter):
splitter.show()
splitter.resize(800, 600)
def test_cycle_focus_no_inspector(self, splitter):
with pytest.raises(inspector.Error,
match='No inspector inside main window'):
splitter.cycle_focus()
@pytest.mark.parametrize(
'position, orientation, inspector_idx, webview_idx', [
(inspector.Position.left, Qt.Horizontal, 0, 1),
(inspector.Position.right, Qt.Horizontal, 1, 0),
(inspector.Position.top, Qt.Vertical, 0, 1),
(inspector.Position.bottom, Qt.Vertical, 1, 0),
]
)
def test_set_inspector(self, position, orientation,
inspector_idx, webview_idx,
splitter, fake_inspector, fake_webview):
splitter.set_inspector(fake_inspector, position)
assert splitter.indexOf(fake_inspector) == inspector_idx
assert splitter._inspector_idx == inspector_idx
assert splitter.indexOf(fake_webview) == webview_idx
assert splitter._main_idx == webview_idx
assert splitter.orientation() == orientation
def test_cycle_focus_hidden_inspector(self, splitter, fake_inspector):
splitter.set_inspector(fake_inspector, inspector.Position.right)
splitter.show()
fake_inspector.hide()
with pytest.raises(inspector.Error,
match='No inspector inside main window'):
splitter.cycle_focus()
@pytest.mark.parametrize(
'config, width, height, position, expected_size', [
# No config but enough big window
(None, 1024, 768, inspector.Position.left, 512),
(None, 1024, 768, inspector.Position.top, 384),
# No config and small window
(None, 320, 240, inspector.Position.left, 300),
(None, 320, 240, inspector.Position.top, 300),
# Invalid config
('verybig', 1024, 768, inspector.Position.left, 512),
# Value from config
('666', 1024, 768, inspector.Position.left, 666),
]
)
def test_read_size(self, config, width, height, position, expected_size,
state_config, splitter, fake_inspector, caplog):
if config is not None:
state_config['inspector'] = {position.name: config}
splitter.resize(width, height)
assert splitter.size() == QSize(width, height)
with caplog.at_level(logging.ERROR):
splitter.set_inspector(fake_inspector, position)
assert splitter._preferred_size == expected_size
if config == {'left': 'verybig'}:
assert caplog.messages == ["Could not read inspector size: "
"invalid literal for int() with "
"base 10: 'verybig'"]
@pytest.mark.parametrize('position', [
inspector.Position.left,
inspector.Position.right,
inspector.Position.top,
inspector.Position.bottom,
])
def test_save_size(self, position, state_config, splitter, fake_inspector):
splitter.set_inspector(fake_inspector, position)
splitter._preferred_size = 1337
splitter._save_preferred_size()
assert state_config['inspector'][position.name] == '1337'
@pytest.mark.parametrize(
'old_window_size, preferred_size, new_window_size, '
'exp_inspector_size', [
# Plenty of space -> Keep inspector at configured absolute size
(600, 300, # 1/2 of window
500, 300), # 300px of 600px -> 300px of 500px
# Slowly running out of space -> Reserve space for website
(600, 450, # 3/4 of window
500, 350), # 450px of 600px -> 350px of 500px
# (so website has 150px)
# Very small window -> Keep ratio distribution
(600, 300, # 1/2 of window
200, 100), # 300px of 600px -> 100px of 200px (1/2)
]
)
@pytest.mark.parametrize('position', [
inspector.Position.left, inspector.Position.right,
inspector.Position.top, inspector.Position.bottom])
def test_adjust_size(self, old_window_size, preferred_size,
new_window_size, exp_inspector_size,
position, splitter, fake_inspector, qtbot):
def resize(dim):
size = (QSize(dim, 666) if splitter.orientation() == Qt.Horizontal
else QSize(666, dim))
splitter.resize(size)
if splitter.size() != size:
pytest.skip("Resizing window failed")
splitter.set_inspector(fake_inspector, position)
splitter.show()
resize(old_window_size)
handle_width = 4
splitter.setHandleWidth(handle_width)
splitter_idx = 1
if position in [inspector.Position.left, inspector.Position.top]:
splitter_pos = preferred_size - handle_width//2
else:
splitter_pos = old_window_size - preferred_size - handle_width//2
splitter.moveSplitter(splitter_pos, splitter_idx)
resize(new_window_size)
sizes = splitter.sizes()
inspector_size = sizes[splitter._inspector_idx]
main_size = sizes[splitter._main_idx]
exp_main_size = new_window_size - exp_inspector_size
exp_main_size -= handle_width // 2
exp_inspector_size -= handle_width // 2
assert (inspector_size, main_size) == (exp_inspector_size,
exp_main_size)
|
import numpy as np
from numpy.testing import assert_allclose
from scipy.interpolate import interp1d
import pytest
from mne import create_info, find_events, Epochs
from mne.io import RawArray
from mne.preprocessing import realign_raw
@pytest.mark.parametrize('ratio_other', (1., 0.999, 1.001)) # drifts
@pytest.mark.parametrize('start_raw, start_other', [(0, 0), (0, 3), (3, 0)])
@pytest.mark.parametrize('stop_raw, stop_other', [(0, 0), (0, 3), (3, 0)])
def test_realign(ratio_other, start_raw, start_other, stop_raw, stop_other):
"""Test realigning raw."""
# construct a true signal
sfreq = 100.
duration = 50
stop_raw = duration - stop_raw
stop_other = duration - stop_other
signal = np.zeros(int(round((duration + 1) * sfreq)))
orig_events = np.round(
np.arange(max(start_raw, start_other) + 2,
min(stop_raw, stop_other) - 2) * sfreq).astype(int)
signal[orig_events] = 1.
n_events = len(orig_events)
times = np.arange(len(signal)) / sfreq
stim = np.convolve(signal, np.ones(int(round(0.02 * sfreq))))[:len(times)]
signal = np.convolve(
signal, np.hanning(int(round(0.2 * sfreq))))[:len(times)]
# construct our sampled versions of these signals (linear interp is fine)
sfreq_raw = sfreq
sfreq_other = ratio_other * sfreq
raw_times = np.arange(start_raw, stop_raw, 1. / sfreq_raw)
other_times = np.arange(start_other, stop_other, 1. / sfreq_other)
assert raw_times[0] >= times[0]
assert raw_times[-1] <= times[-1]
assert other_times[0] >= times[0]
assert other_times[-1] <= times[-1]
data_raw = np.array(
[interp1d(times, d, kind)(raw_times)
for d, kind in ((signal, 'linear'), (stim, 'nearest'))])
data_other = np.array(
[interp1d(times, d, kind)(other_times)
for d, kind in ((signal, 'linear'), (stim, 'nearest'))])
info_raw = create_info(
['raw_data', 'raw_stim'], sfreq, ['eeg', 'stim'])
info_other = create_info(
['other_data', 'other_stim'], sfreq, ['eeg', 'stim'])
raw = RawArray(data_raw, info_raw, first_samp=111)
other = RawArray(data_other, info_other, first_samp=222)
# naive processing
evoked_raw, events_raw, _, events_other = _assert_similarity(
raw, other, n_events)
if start_raw == start_other: # can just naively crop
a, b = data_raw[0], data_other[0]
n = min(len(a), len(b))
corr = np.corrcoef(a[:n], b[:n])[0, 1]
min_, max_ = (0.99999, 1.) if sfreq_raw == sfreq_other else (0.8, 0.9)
assert min_ <= corr <= max_
# realign
t_raw = (events_raw[:, 0] - raw.first_samp) / other.info['sfreq']
t_other = (events_other[:, 0] - other.first_samp) / other.info['sfreq']
assert duration - 10 <= len(events_raw) < duration
raw_orig, other_orig = raw.copy(), other.copy()
realign_raw(raw, other, t_raw, t_other)
# old events should still work for raw and produce the same result
evoked_raw_2, _, _, _ = _assert_similarity(
raw, other, n_events, events_raw=events_raw)
assert_allclose(evoked_raw.data, evoked_raw_2.data)
assert_allclose(raw.times, other.times)
# raw data now aligned
corr = np.corrcoef(raw.get_data([0])[0], other.get_data([0])[0])[0, 1]
assert 0.99 < corr <= 1.
# Degenerate conditions -- only test in one run
test_degenerate = (start_raw == start_other and
stop_raw == stop_other and
ratio_other == 1)
if not test_degenerate:
return
# these alignments will not be correct but it shouldn't matter
with pytest.warns(RuntimeWarning, match='^Fewer.*may be unreliable.*'):
realign_raw(raw, other, raw_times[:5], other_times[:5])
with pytest.raises(ValueError, match='same shape'):
realign_raw(raw_orig, other_orig, raw_times[:5], other_times)
rand_times = np.random.RandomState(0).randn(len(other_times))
with pytest.raises(ValueError, match='cannot resample safely'):
realign_raw(raw_orig, other_orig, rand_times, other_times)
with pytest.warns(RuntimeWarning, match='.*computed as R=.*unreliable'):
realign_raw(
raw_orig, other_orig, raw_times + rand_times * 1000, other_times)
def _assert_similarity(raw, other, n_events, events_raw=None):
if events_raw is None:
events_raw = find_events(raw)
events_other = find_events(other)
assert len(events_raw) == n_events
assert len(events_other) == n_events
kwargs = dict(baseline=None, tmin=0, tmax=0.2)
evoked_raw = Epochs(raw, events_raw, **kwargs).average()
evoked_other = Epochs(other, events_other, **kwargs).average()
assert evoked_raw.nave == evoked_other.nave == len(events_raw)
assert len(evoked_raw.data) == len(evoked_other.data) == 1 # just EEG
corr = np.corrcoef(evoked_raw.data[0], evoked_other.data[0])[0, 1]
assert 0.9 <= corr <= 1.
return evoked_raw, events_raw, evoked_other, events_other
|
import logging
import re
from perfkitbenchmarker import configs
from perfkitbenchmarker import errors
from perfkitbenchmarker import regex_util
from perfkitbenchmarker import sample
from perfkitbenchmarker.linux_packages import scimark2
BENCHMARK_NAME = 'scimark2'
BENCHMARK_CONFIG = """
scimark2:
description: Runs SciMark2
vm_groups:
default:
vm_spec: *default_single_core
"""
def GetConfig(user_config):
return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
def CheckPrerequisites(benchmark_config):
pass
def Prepare(benchmark_spec):
"""Install SciMark2 on the target vm.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
vms = benchmark_spec.vms
vm = vms[0]
logging.info('Preparing SciMark2 on %s', vm)
vm.Install('scimark2')
def Run(benchmark_spec):
"""Run SciMark2 on the target vm.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample objects.
"""
vms = benchmark_spec.vms
vm = vms[0]
logging.info('Running SciMark2 on %s', vm)
samples = []
# Run the Java and C benchmarks twice each, once with defaults and
# once with the "-large" flag to use a larger working set size.
#
# Since the default output is not very parsing-friendly, print an
# extra header to identify the tests. This must match
# RESULT_START_REGEX as used below.
cmds = [
'(echo ";;; Java small"; cd {0} && java -cp {1} {2})'.format(
scimark2.PATH, scimark2.JAVA_JAR, scimark2.JAVA_MAIN),
'(echo ";;; C small"; cd {0} && ./scimark2)'.format(
scimark2.C_SRC),
'(echo ";;; Java large"; cd {0} && java -cp {1} {2} -large)'.format(
scimark2.PATH, scimark2.JAVA_JAR, scimark2.JAVA_MAIN),
'(echo ";;; C large"; cd {0} && ./scimark2 -large)'.format(
scimark2.C_SRC),
]
for cmd in cmds:
stdout, _ = vm.RemoteCommand(cmd, should_log=True)
samples.extend(ParseResults(stdout))
return samples
def Cleanup(unused_benchmark_spec):
pass
def ParseResults(results):
"""Result parser for SciMark2.
Sample Results (C version):
** **
** SciMark2 Numeric Benchmark, see http://math.nist.gov/scimark **
** for details. (Results can be submitted to [email protected]) **
** **
Using 2.00 seconds min time per kenel.
Composite Score: 1596.04
FFT Mflops: 1568.64 (N=1024)
SOR Mflops: 1039.98 (100 x 100)
MonteCarlo: Mflops: 497.64
Sparse matmult Mflops: 1974.39 (N=1000, nz=5000)
LU Mflops: 2899.56 (M=100, N=100)
(Yes, "kenel" is part of the original output.)
Sample Results (Java version):
SciMark 2.0a
Composite Score: 1731.4467627163242
FFT (1024): 996.9938397943672
SOR (100x100): 1333.5328291027124
Monte Carlo : 724.5221517116782
Sparse matmult (N=1000, nz=5000): 1488.18620413327
LU (100x100): 4113.998788839592
java.vendor: Oracle Corporation
java.version: 1.7.0_75
os.arch: amd64
os.name: Linux
os.version: 3.16.0-25-generic
Args:
results: SciMark2 result.
Returns:
A list of sample.Sample objects.
"""
result_start_regex = re.compile(r"""
^
;;; \s+ (\S+) #1: Language ("C" or "Java")
\s+ (\S+) #2: Size ("small" or "large")
""", re.VERBOSE | re.MULTILINE)
score_regex = re.compile(r"""
^ (Composite \s+ Score) : \s+ (\d+ \. \d+)
""", re.VERBOSE | re.MULTILINE)
result_regex_c = re.compile(r"""
^
( .+? ) \s+ #1: Test name
Mflops: \s+
( \d+ \. \d+ ) #2: Test score
( \s+ \( .+? \) )? #3: Optional test details
""", re.VERBOSE | re.MULTILINE)
result_regex_java = re.compile(r"""
^
( .+? ) #1: Test name
: \s+
( \d+ \. \d+ ) #2: Test score
""", re.VERBOSE | re.MULTILINE)
platform_regex = re.compile(r"""
^
( \w+ \. \w+ ) #1: Property name
: \s+
( .* ) #2: Property value
""", re.VERBOSE | re.MULTILINE)
def FindBenchStart(results, start_index=0):
m = result_start_regex.search(results, start_index)
if m is None:
return -1, None, None
return m.start(), m.group(1), m.group(2)
def ExtractPlatform(result, benchmark_language):
"""Retrieves platform data from the result string."""
metadata = {}
meta_start = None
if benchmark_language == 'C':
pass
elif benchmark_language == 'Java':
for m in platform_regex.finditer(result):
if meta_start is None:
meta_start = m.start()
metadata[m.group(1)] = m.group(2)
return metadata, meta_start
def ExtractScore(result):
m = score_regex.search(result)
if m is None:
raise errors.Benchmarks.RunError('scimark2: Cannot find score in output.')
label = m.group(1)
score = float(m.group(2))
return score, label, m.end()
def ExtractResults(result, benchmark_language):
"""Retrieves data points from the result string."""
datapoints = []
if benchmark_language == 'C':
for groups in regex_util.ExtractAllMatches(result_regex_c, result):
metric = '{0} {1}'.format(groups[0].strip(), groups[2].strip())
metric = metric.strip().strip(':') # Extra ':' in 'MonteCarlo:'.
value = float(groups[1])
datapoints.append((metric, value))
elif benchmark_language == 'Java':
for groups in regex_util.ExtractAllMatches(result_regex_java, result):
datapoints.append((groups[0].strip(), float(groups[1])))
return datapoints
# Find start positions for all the test results.
tests = []
test_start_pos = 0
while True:
start_index, benchmark_language, benchmark_size = FindBenchStart(
results, test_start_pos)
if start_index == -1:
break
tests.append((start_index, benchmark_language, benchmark_size))
test_start_pos = start_index + 1
# Now loop over individual tests collecting samples.
samples = []
for test_num, test_details in enumerate(tests):
start_index, benchmark_language, benchmark_size = test_details
# Get end index - either start of next test, or None for the last test.
end_index = None
if test_num + 1 < len(tests):
end_index = tests[test_num + 1][0]
result = results[start_index:end_index]
metadata = {'benchmark_language': benchmark_language,
'benchmark_size': benchmark_size}
# Assume that the result consists of overall score followed by
# specific scores and then platform metadata.
# Get the metadata first since we need that to annotate samples.
platform_metadata, meta_start = ExtractPlatform(result, benchmark_language)
metadata.update(platform_metadata)
# Get the overall score.
score, label, score_end = ExtractScore(result)
samples.append(sample.Sample(label, score, 'Mflops', metadata))
# For the specific scores, only look at the part of the string
# bounded by score_end and meta_start to avoid adding extraneous
# items. The overall score and platform data would match the
# result regex.
datapoints = ExtractResults(
result[score_end:meta_start], benchmark_language)
for metric, value in datapoints:
samples.append(sample.Sample(metric, value, 'Mflops', metadata))
return samples
|
import logging
import openevsewifi
from requests import RequestException
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_HOST,
CONF_MONITORED_VARIABLES,
ENERGY_KILO_WATT_HOUR,
TEMP_CELSIUS,
TIME_MINUTES,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
SENSOR_TYPES = {
"status": ["Charging Status", None],
"charge_time": ["Charge Time Elapsed", TIME_MINUTES],
"ambient_temp": ["Ambient Temperature", TEMP_CELSIUS],
"ir_temp": ["IR Temperature", TEMP_CELSIUS],
"rtc_temp": ["RTC Temperature", TEMP_CELSIUS],
"usage_session": ["Usage this Session", ENERGY_KILO_WATT_HOUR],
"usage_total": ["Total Usage", ENERGY_KILO_WATT_HOUR],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_MONITORED_VARIABLES, default=["status"]): vol.All(
cv.ensure_list, [vol.In(SENSOR_TYPES)]
),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the OpenEVSE sensor."""
host = config.get(CONF_HOST)
monitored_variables = config.get(CONF_MONITORED_VARIABLES)
charger = openevsewifi.Charger(host)
dev = []
for variable in monitored_variables:
dev.append(OpenEVSESensor(variable, charger))
add_entities(dev, True)
class OpenEVSESensor(Entity):
"""Implementation of an OpenEVSE sensor."""
def __init__(self, sensor_type, charger):
"""Initialize the sensor."""
self._name = SENSOR_TYPES[sensor_type][0]
self.type = sensor_type
self._state = None
self.charger = charger
self._unit_of_measurement = SENSOR_TYPES[sensor_type][1]
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this sensor."""
return self._unit_of_measurement
def update(self):
"""Get the monitored data from the charger."""
try:
if self.type == "status":
self._state = self.charger.getStatus()
elif self.type == "charge_time":
self._state = self.charger.getChargeTimeElapsed() / 60
elif self.type == "ambient_temp":
self._state = self.charger.getAmbientTemperature()
elif self.type == "ir_temp":
self._state = self.charger.getIRTemperature()
elif self.type == "rtc_temp":
self._state = self.charger.getRTCTemperature()
elif self.type == "usage_session":
self._state = float(self.charger.getUsageSession()) / 1000
elif self.type == "usage_total":
self._state = float(self.charger.getUsageTotal()) / 1000
else:
self._state = "Unknown"
except (RequestException, ValueError, KeyError):
_LOGGER.warning("Could not update status for %s", self.name)
|
import pytest
from unittest.mock import Mock, patch
pytest.importorskip('librabbitmq')
from kombu.transport import librabbitmq # noqa
class test_Message:
def test_init(self):
chan = Mock(name='channel')
message = librabbitmq.Message(
chan, {'prop': 42}, {'delivery_tag': 337}, 'body',
)
assert message.body == 'body'
assert message.delivery_tag == 337
assert message.properties['prop'] == 42
class test_Channel:
def test_prepare_message(self):
conn = Mock(name='connection')
chan = librabbitmq.Channel(conn, 1)
assert chan
body = 'the quick brown fox...'
properties = {'name': 'Elaine M.'}
body2, props2 = chan.prepare_message(
body, properties=properties,
priority=999,
content_type='ctype',
content_encoding='cenc',
headers={'H': 2},
)
assert props2['name'] == 'Elaine M.'
assert props2['priority'] == 999
assert props2['content_type'] == 'ctype'
assert props2['content_encoding'] == 'cenc'
assert props2['headers'] == {'H': 2}
assert body2 == body
body3, props3 = chan.prepare_message(body, priority=777)
assert props3['priority'] == 777
assert body3 == body
class test_Transport:
def setup(self):
self.client = Mock(name='client')
self.T = librabbitmq.Transport(self.client)
def test_driver_version(self):
assert self.T.driver_version()
def test_create_channel(self):
conn = Mock(name='connection')
chan = self.T.create_channel(conn)
assert chan
conn.channel.assert_called_with()
def test_drain_events(self):
conn = Mock(name='connection')
self.T.drain_events(conn, timeout=1.33)
conn.drain_events.assert_called_with(timeout=1.33)
def test_establish_connection_SSL_not_supported(self):
self.client.ssl = True
with pytest.raises(NotImplementedError):
self.T.establish_connection()
def test_establish_connection(self):
self.T.Connection = Mock(name='Connection')
self.T.client.ssl = False
self.T.client.port = None
self.T.client.transport_options = {}
conn = self.T.establish_connection()
assert self.T.client.port == self.T.default_connection_params['port']
assert conn.client == self.T.client
assert self.T.client.drain_events == conn.drain_events
def test_collect__no_conn(self):
self.T.client.drain_events = 1234
self.T._collect(None)
assert self.client.drain_events is None
assert self.T.client is None
def test_collect__with_conn(self):
self.T.client.drain_events = 1234
conn = Mock(name='connection')
chans = conn.channels = {1: Mock(name='chan1'), 2: Mock(name='chan2')}
conn.callbacks = {'foo': Mock(name='cb1'), 'bar': Mock(name='cb2')}
for i, chan in enumerate(conn.channels.values()):
chan.connection = i
with patch('os.close') as close:
self.T._collect(conn)
close.assert_called_with(conn.fileno())
assert not conn.channels
assert not conn.callbacks
for chan in chans.values():
assert chan.connection is None
assert self.client.drain_events is None
assert self.T.client is None
with patch('os.close') as close:
self.T.client = self.client
close.side_effect = OSError()
self.T._collect(conn)
close.assert_called_with(conn.fileno())
def test_collect__with_fileno_raising_value_error(self):
conn = Mock(name='connection')
conn.channels = {1: Mock(name='chan1'), 2: Mock(name='chan2')}
with patch('os.close') as close:
self.T.client = self.client
conn.fileno.side_effect = ValueError("Socket not connected")
self.T._collect(conn)
close.assert_not_called()
conn.fileno.assert_called_with()
assert self.client.drain_events is None
assert self.T.client is None
def test_register_with_event_loop(self):
conn = Mock(name='conn')
loop = Mock(name='loop')
self.T.register_with_event_loop(conn, loop)
loop.add_reader.assert_called_with(
conn.fileno(), self.T.on_readable, conn, loop,
)
def test_verify_connection(self):
conn = Mock(name='connection')
conn.connected = True
assert self.T.verify_connection(conn)
def test_close_connection(self):
conn = Mock(name='connection')
self.client.drain_events = 1234
self.T.close_connection(conn)
assert self.client.drain_events is None
conn.close.assert_called_with()
|
import threading
from threading import Thread
import logging
from kalliope import Utils, SettingLoader
from kalliope.stt import SpeechRecognizer
import speech_recognition as sr
logging.basicConfig()
logger = logging.getLogger("kalliope")
class SpeechRecognition(Thread):
def __init__(self, audio_file=None):
"""
Thread used to process n audio file and pass it to a callback method
"""
super(SpeechRecognition, self).__init__()
self.callback = None
self.audio_stream = None
# get global configuration
sl = SettingLoader()
self.settings = sl.settings
self.recognizer = SpeechRecognizer.ResponsiveRecognizer(multiplier=self.settings.options.recognizer_multiplier,
energy_ratio=self.settings.options.recognizer_energy_ratio,
recording_timeout=self.settings.options.recognizer_recording_timeout,
recording_timeout_with_silence=self.settings.options.recognizer_recording_timeout_with_silence)
if audio_file is None:
# audio file not set, we need to capture a sample from the microphone
self.microphone = SpeechRecognizer.MutableMicrophone()
else:
# audio file provided
with sr.AudioFile(audio_file) as source:
self.audio_stream = self.recognizer.record(source) # read the entire audio file
def run(self):
"""
Start the thread that listen the microphone and then give the audio to the callback method
"""
if self.audio_stream is None:
Utils.print_success("Say something!")
with self.microphone as source:
self.audio_stream = self.recognizer.listen(source)
self.callback(self.recognizer, self.audio_stream)
def start_processing(self):
"""
A method to start the thread
"""
self.start()
def set_callback(self, callback):
"""
set the callback method that will receive the audio stream caught by the microphone
:param callback: callback method
:return:
"""
self.callback = callback
|
from abc import ABC, abstractmethod
from collections import defaultdict
from copy import copy, deepcopy
from pprint import pformat
from typing import (
TYPE_CHECKING,
cast,
Any,
MutableMapping,
Optional,
Dict,
Union,
Iterator,
Iterable,
)
from typing import DefaultDict
from cerberus.typing import DocumentPath, FieldName
from cerberus.utils import compare_paths_lt, quote_string
if TYPE_CHECKING:
from cerberus.base import UnconcernedValidator # noqa: F401
class ErrorDefinition:
"""
This class is used to define possible errors. Each distinguishable error is
defined by a *unique* error ``code`` as integer and the ``rule`` that can
cause it as string.
The instances' names do not contain a common prefix as they are supposed to be
referenced within the module namespace, e.g. ``errors.CUSTOM``.
"""
__slots__ = ('code', 'rule')
def __init__(self, code: int, rule: Optional[str]) -> None:
self.code = code
self.rule = rule
# custom
CUSTOM = ErrorDefinition(0x00, None)
# existence
DOCUMENT_MISSING = ErrorDefinition(0x01, None) # issues/141
DOCUMENT_MISSING = "document is missing" # type: ignore
REQUIRED_FIELD = ErrorDefinition(0x02, 'required')
UNKNOWN_FIELD = ErrorDefinition(0x03, None)
DEPENDENCIES_FIELD = ErrorDefinition(0x04, 'dependencies')
DEPENDENCIES_FIELD_VALUE = ErrorDefinition(0x05, 'dependencies')
EXCLUDES_FIELD = ErrorDefinition(0x06, 'excludes')
# shape
DOCUMENT_FORMAT = ErrorDefinition(0x21, None) # issues/141
DOCUMENT_FORMAT = "'{0}' is not a document, must be a dict" # type: ignore
EMPTY = ErrorDefinition(0x22, 'empty')
NULLABLE = ErrorDefinition(0x23, 'nullable')
TYPE = ErrorDefinition(0x24, 'type')
ITEMS_LENGTH = ErrorDefinition(0x26, 'items')
MIN_LENGTH = ErrorDefinition(0x27, 'minlength')
MAX_LENGTH = ErrorDefinition(0x28, 'maxlength')
# color
REGEX_MISMATCH = ErrorDefinition(0x41, 'regex')
MIN_VALUE = ErrorDefinition(0x42, 'min')
MAX_VALUE = ErrorDefinition(0x43, 'max')
UNALLOWED_VALUE = ErrorDefinition(0x44, 'allowed')
UNALLOWED_VALUES = ErrorDefinition(0x45, 'allowed')
FORBIDDEN_VALUE = ErrorDefinition(0x46, 'forbidden')
FORBIDDEN_VALUES = ErrorDefinition(0x47, 'forbidden')
MISSING_MEMBERS = ErrorDefinition(0x48, 'contains')
# other
NORMALIZATION = ErrorDefinition(0x60, None)
COERCION_FAILED = ErrorDefinition(0x61, 'coerce')
RENAMING_FAILED = ErrorDefinition(0x62, 'rename_handler')
READONLY_FIELD = ErrorDefinition(0x63, 'readonly')
SETTING_DEFAULT_FAILED = ErrorDefinition(0x64, 'default_setter')
# groups
ERROR_GROUP = ErrorDefinition(0x80, None)
SCHEMA = ErrorDefinition(0x81, 'schema')
ITEMSRULES = ErrorDefinition(0x82, 'itemsrules')
KEYSRULES = ErrorDefinition(0x83, 'keysrules')
VALUESRULES = ErrorDefinition(0x84, 'valuesrules')
ITEMS = ErrorDefinition(0x8F, 'items')
LOGICAL = ErrorDefinition(0x90, None)
NONEOF = ErrorDefinition(0x91, 'noneof')
ONEOF = ErrorDefinition(0x92, 'oneof')
ANYOF = ErrorDefinition(0x93, 'anyof')
ALLOF = ErrorDefinition(0x94, 'allof')
""" SchemaError messages """
MISSING_SCHEMA = "validation schema missing"
SCHEMA_TYPE = "schema definition for field '{0}' must be a dict"
""" Error representations """
class ValidationError:
""" A simple class to store and query basic error information. """
def __init__(
self,
document_path: DocumentPath,
schema_path: DocumentPath,
code: int,
rule: str,
constraint: Any,
value: Any,
info: Any,
) -> None:
self.document_path = document_path
""" The path to the field within the document that caused the error.
Type: :class:`tuple` """
self.schema_path = schema_path
""" The path to the rule within the schema that caused the error.
Type: :class:`tuple` """
self.code = code
""" The error's identifier code. Type: :class:`int` """
self.rule = rule
""" The rule that failed. Type: `string` """
self.constraint = constraint
""" The constraint that failed. """
self.value = value
""" The value that failed. """
self.info = info
""" May hold additional information about the error.
Type: :class:`tuple` """
def __eq__(self, other):
""" Assumes the errors relate to the same document and schema. """
return hash(self) == hash(other)
def __hash__(self):
""" Expects that all other properties are transitively determined. """
return hash(self.document_path) ^ hash(self.schema_path) ^ hash(self.code)
def __lt__(self, other):
if self.document_path != other.document_path:
return compare_paths_lt(self.document_path, other.document_path)
else:
return compare_paths_lt(self.schema_path, other.schema_path)
def __repr__(self):
return (
"{class_name} @ {memptr} ( "
"document_path={document_path},"
"schema_path={schema_path},"
"code={code},"
"constraint={constraint},"
"value={value},"
"info={info} )".format(
class_name=self.__class__.__name__,
memptr=hex(id(self)), # noqa: E501
document_path=self.document_path,
schema_path=self.schema_path,
code=hex(self.code),
constraint=quote_string(self.constraint),
value=quote_string(self.value),
info=self.info,
)
)
@property
def child_errors(self) -> Optional["ErrorList"]:
"""
A list that contains the individual errors of a bulk validation error.
"""
return self.info[0] if self.is_group_error else None
@property
def definitions_errors(self) -> Optional[DefaultDict[int, "ErrorList"]]:
r"""
Dictionary with errors of an \*of-rule mapped to the index of the definition it
occurred in. Returns :obj:`None` if not applicable.
"""
if not self.is_logic_error:
return None
result = defaultdict(ErrorList) # type: DefaultDict[int, ErrorList]
for error in self.child_errors: # type: ignore
i = error.schema_path[len(self.schema_path)]
result[i].append(error)
return result
@property
def field(self) -> Optional[FieldName]:
""" Field of the contextual mapping, possibly :obj:`None`. """
if self.document_path:
return self.document_path[-1]
else:
return None
@property
def is_group_error(self) -> bool:
""" ``True`` for errors of bulk validations. """
return bool(self.code & ERROR_GROUP.code)
@property
def is_logic_error(self) -> bool:
r"""
``True`` for validation errors against different schemas with \*of-rules.
"""
return bool(self.code & LOGICAL.code - ERROR_GROUP.code)
@property
def is_normalization_error(self) -> bool:
""" ``True`` for normalization errors. """
return bool(self.code & NORMALIZATION.code)
class ErrorList(list):
"""
A list for :class:`~cerberus.errors.ValidationError` instances that can be queried
with the ``in`` keyword for a particular :class:`~cerberus.errors.ErrorDefinition`.
"""
def __contains__(self, error_definition):
if not isinstance(error_definition, ErrorDefinition):
raise TypeError
wanted_code = error_definition.code
return any(x.code == wanted_code for x in self)
class ErrorTreeNode(MutableMapping):
__slots__ = ('descendants', 'errors', 'parent_node', 'path', 'tree_root')
def __init__(self, path: DocumentPath, parent_node: 'ErrorTreeNode') -> None:
self.parent_node = parent_node # type: Optional[ErrorTreeNode]
self.tree_root = self.parent_node.tree_root # type: ErrorTree
self.path = path[: self.parent_node.depth + 1]
self.errors = ErrorList()
self.descendants = {} # type: Dict[FieldName, ErrorTreeNode]
def __contains__(self, item):
if isinstance(item, ErrorDefinition):
return item in self.errors
else:
return item in self.descendants
def __delitem__(self, key):
del self.descendants[key]
def __iter__(self) -> Iterator[ValidationError]:
return iter(self.errors)
def __getitem__(
self, item: Union[ErrorDefinition, FieldName]
) -> Union[Optional[ValidationError], Optional['ErrorTreeNode']]:
if isinstance(item, ErrorDefinition):
for error in self.errors:
if item.code == error.code:
return error
return None
else:
return self.descendants.get(item)
def __len__(self):
return len(self.errors)
def __repr__(self):
return self.__str__()
def __setitem__(self, key: FieldName, value: "ErrorTreeNode") -> None:
self.descendants[key] = value
def __str__(self):
return str(self.errors) + ',' + str(self.descendants)
@property
def depth(self) -> int:
return len(self.path)
@property
def tree_type(self) -> str:
return self.tree_root.tree_type
def add(self, error: ValidationError) -> None:
error_path = self._path_of_(error)
key = error_path[self.depth]
if key not in self.descendants:
self[key] = ErrorTreeNode(error_path, self)
node = cast(ErrorTreeNode, self[key])
if len(error_path) == self.depth + 1:
node.errors.append(error)
node.errors.sort()
if error.is_group_error:
for child_error in error.child_errors: # type: ignore
self.tree_root.add(child_error)
else:
node.add(error)
def _path_of_(self, error):
return getattr(error, self.tree_type + '_path')
class ErrorTree(ErrorTreeNode):
"""
Base class for :class:`~cerberus.errors.DocumentErrorTree` and
:class:`~cerberus.errors.SchemaErrorTree`.
"""
depth = 0
parent = None
path = ()
def __init__(self, errors: Iterable[ValidationError] = ()) -> None:
self.tree_root = self
self.errors = ErrorList()
self.descendants = {}
for error in errors:
self.add(error)
def add(self, error: ValidationError) -> None:
""" Add an error to the tree. """
if not self._path_of_(error):
self.errors.append(error)
self.errors.sort()
else:
super().add(error)
def fetch_errors_from(self, path: DocumentPath) -> ErrorList:
""" Returns all errors for a particular path. """
node = self.fetch_node_from(path)
if node is None:
return ErrorList()
else:
return node.errors
def fetch_node_from(self, path: DocumentPath) -> ErrorTreeNode:
""" Returns a node for a path. """
context = self
for key in path:
context = context.get(key, None)
if context is None:
break
return context
class DocumentErrorTree(ErrorTree):
"""
Implements a dict-like class to query errors by indexes following the structure of a
validated document.
"""
tree_type = 'document'
class SchemaErrorTree(ErrorTree):
"""
Implements a dict-like class to query errors by indexes following the structure of
the used schema.
"""
tree_type = 'schema'
class BaseErrorHandler(ABC):
""" Base class for all error handlers. """
def __init__(self, *args, **kwargs):
""" Optionally initialize a new instance. """
pass
@abstractmethod
def __call__(self, errors: Iterable[ValidationError]) -> Any:
""" Returns errors in a handler-specific format. """
raise NotImplementedError
def __iter__(self) -> Iterator[Any]:
""" Be a superhero and implement an iterator over errors. """
raise NotImplementedError
@abstractmethod
def add(self, error: ValidationError) -> None:
"""
Add an error to the errors' container object of a handler.
:param error: The error to add.
"""
pass
def emit(self, error: ValidationError) -> None:
"""
Optionally emits an error in the handler's format to a stream. Or light a LED,
or even shut down a power plant.
:param error: The error to emit.
"""
pass
def end(self, validator: "UnconcernedValidator") -> None:
"""
Gets called when a validation ends.
:param validator: The calling validator.
"""
pass
def extend(self, errors: Iterable[ValidationError]) -> None:
""" Adds all errors to the handler's container object. """
for error in errors:
self.add(error)
def start(self, validator: "UnconcernedValidator") -> None:
"""
Gets called when a validation starts.
:param validator: The calling validator.
"""
pass
class ToyErrorHandler(BaseErrorHandler):
def __call__(self, *args, **kwargs):
raise RuntimeError('This is not supposed to happen.')
add = __call__
class BasicErrorHandler(BaseErrorHandler):
"""
Models cerberus' legacy. Returns a :class:`dict`. When mangled through :class:`str`
a pretty-formatted representation of that tree is returned.
"""
messages = {
0x00: "{0}",
0x01: "document is missing",
0x02: "required field",
0x03: "unknown field",
0x04: "field '{0}' is required",
0x05: "depends on these values: {constraint}",
0x06: "{0} must not be present with '{field}'",
0x21: "'{0}' is not a document, must be a dict",
0x22: "empty values not allowed",
0x23: "null value not allowed",
0x24: "must be one of these types: {constraint}",
0x26: "length of list should be {0}, it is {1}",
0x27: "min length is {constraint}",
0x28: "max length is {constraint}",
0x41: "value does not match regex '{constraint}'",
0x42: "min value is {constraint}",
0x43: "max value is {constraint}",
0x44: "unallowed value {value}",
0x45: "unallowed values {0}",
0x46: "unallowed value {value}",
0x47: "unallowed values {0}",
0x48: "missing members {0}",
0x61: "field '{field}' cannot be coerced: {0}",
0x62: "field '{field}' cannot be renamed: {0}",
0x63: "field is read-only",
0x64: "default value for '{field}' cannot be set: {0}",
0x81: "mapping doesn't validate subschema: {0}",
0x82: "one or more sequence-items don't validate: {0}",
0x83: "one or more keys of a mapping don't validate: {0}",
0x84: "one or more values in a mapping don't validate: {0}",
0x85: "one or more sequence-items don't validate: {0}",
0x91: "one or more definitions validate",
0x92: "none or more than one rule validate",
0x93: "no definitions validate",
0x94: "one or more definitions don't validate",
}
def __init__(self, tree: Dict = None) -> None:
self.tree = {} if tree is None else tree
def __call__(self, errors):
self.clear()
self.extend(errors)
return self.pretty_tree
def __str__(self):
return pformat(self.pretty_tree)
@property
def pretty_tree(self) -> Dict:
pretty = deepcopy(self.tree)
for field in pretty:
self._purge_empty_dicts(pretty[field])
return pretty
def add(self, error):
# Make sure the original error is not altered with
# error paths specific to the handler.
error = deepcopy(error)
self._rewrite_error_path(error)
if error.is_logic_error:
self._insert_logic_error(error)
elif error.is_group_error:
self._insert_group_error(error)
elif error.code in self.messages:
self._insert_error(
error.document_path, self._format_message(error.field, error)
)
def clear(self):
self.tree = {}
def start(self, validator):
self.clear()
def _format_message(self, field, error):
return self.messages[error.code].format(
*error.info, constraint=error.constraint, field=field, value=error.value
)
def _insert_error(self, path, node):
"""
Adds an error or sub-tree to :attr:tree.
:param path: Path to the error.
:type path: Tuple of strings and integers.
:param node: An error message or a sub-tree.
:type node: String or dictionary.
"""
field = path[0]
if len(path) == 1:
if field in self.tree:
subtree = self.tree[field].pop()
self.tree[field] += [node, subtree]
else:
self.tree[field] = [node, {}]
elif len(path) >= 1:
if field not in self.tree:
self.tree[field] = [{}]
subtree = self.tree[field][-1]
if subtree:
new = self.__class__(tree=copy(subtree))
else:
new = self.__class__()
new._insert_error(path[1:], node)
subtree.update(new.tree)
def _insert_group_error(self, error):
for child_error in error.child_errors:
if child_error.is_logic_error:
self._insert_logic_error(child_error)
elif child_error.is_group_error:
self._insert_group_error(child_error)
else:
self._insert_error(
child_error.document_path,
self._format_message(child_error.field, child_error),
)
def _insert_logic_error(self, error):
field = error.field
self._insert_error(error.document_path, self._format_message(field, error))
for definition_errors in error.definitions_errors.values():
for child_error in definition_errors:
if child_error.is_logic_error:
self._insert_logic_error(child_error)
elif child_error.is_group_error:
self._insert_group_error(child_error)
else:
self._insert_error(
child_error.document_path,
self._format_message(field, child_error),
)
def _purge_empty_dicts(self, error_list):
subtree = error_list[-1]
if not error_list[-1]:
error_list.pop()
else:
for key in subtree:
self._purge_empty_dicts(subtree[key])
def _rewrite_error_path(self, error, offset=0):
"""
Recursively rewrites the error path to correctly represent logic errors
"""
if error.is_logic_error:
self._rewrite_logic_error_path(error, offset)
elif error.is_group_error:
self._rewrite_group_error_path(error, offset)
def _rewrite_group_error_path(self, error, offset=0):
child_start = len(error.document_path) - offset
for child_error in error.child_errors:
relative_path = child_error.document_path[child_start:]
child_error.document_path = error.document_path + relative_path
self._rewrite_error_path(child_error, offset)
def _rewrite_logic_error_path(self, error, offset=0):
child_start = len(error.document_path) - offset
for i, definition_errors in error.definitions_errors.items():
if not definition_errors:
continue
nodename = '%s definition %s' % (error.rule, i)
path = error.document_path + (nodename,)
for child_error in definition_errors:
rel_path = child_error.document_path[child_start:]
child_error.document_path = path + rel_path
self._rewrite_error_path(child_error, offset + 1)
class SchemaErrorHandler(BasicErrorHandler):
messages = BasicErrorHandler.messages.copy()
messages[0x03] = "unknown rule"
|
import argparse
import sys
from paasta_tools import kubernetes_tools
from paasta_tools.utils import compose_job_id
from paasta_tools.utils import DEFAULT_SOA_DIR
from paasta_tools.utils import get_services_for_cluster
def parse_args():
parser = argparse.ArgumentParser(
description="Lists kubernetes instances for a service."
)
parser.add_argument(
"-c",
"--cluster",
dest="cluster",
metavar="CLUSTER",
default=None,
help="define a specific cluster to read from",
)
parser.add_argument(
"-d",
"--soa-dir",
dest="soa_dir",
metavar="SOA_DIR",
default=DEFAULT_SOA_DIR,
help="define a different soa config directory",
)
parser.add_argument(
"--sanitise",
action="store_true",
help=(
"Whether or not to sanitise service instance names before displaying "
"them. Kubernets apps created by PaaSTA use sanitised names."
),
)
args = parser.parse_args()
return args
def main():
args = parse_args()
soa_dir = args.soa_dir
cluster = args.cluster
instances = get_services_for_cluster(
cluster=cluster, instance_type="kubernetes", soa_dir=soa_dir
)
service_instances = []
for name, instance in instances:
if args.sanitise:
app_name = kubernetes_tools.get_kubernetes_app_name(name, instance)
else:
app_name = compose_job_id(name, instance)
service_instances.append(app_name)
print("\n".join(service_instances))
sys.exit(0)
if __name__ == "__main__":
main()
|
import logging
import subprocess
import piglow
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_HS_COLOR,
PLATFORM_SCHEMA,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
LightEntity,
)
from homeassistant.const import CONF_NAME
import homeassistant.helpers.config_validation as cv
import homeassistant.util.color as color_util
_LOGGER = logging.getLogger(__name__)
SUPPORT_PIGLOW = SUPPORT_BRIGHTNESS | SUPPORT_COLOR
DEFAULT_NAME = "Piglow"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Piglow Light platform."""
if subprocess.getoutput("i2cdetect -q -y 1 | grep -o 54") != "54":
_LOGGER.error("A Piglow device was not found")
return False
name = config.get(CONF_NAME)
add_entities([PiglowLight(name)])
class PiglowLight(LightEntity):
"""Representation of an Piglow Light."""
def __init__(self, name):
"""Initialize an PiglowLight."""
self._name = name
self._is_on = False
self._brightness = 255
self._hs_color = [0, 0]
@property
def name(self):
"""Return the display name of this light."""
return self._name
@property
def brightness(self):
"""Return the brightness of the light."""
return self._brightness
@property
def hs_color(self):
"""Read back the color of the light."""
return self._hs_color
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_PIGLOW
@property
def should_poll(self):
"""Return if we should poll this device."""
return False
@property
def assumed_state(self) -> bool:
"""Return True if unable to access real state of the entity."""
return True
@property
def is_on(self):
"""Return true if light is on."""
return self._is_on
def turn_on(self, **kwargs):
"""Instruct the light to turn on."""
piglow.clear()
if ATTR_BRIGHTNESS in kwargs:
self._brightness = kwargs[ATTR_BRIGHTNESS]
if ATTR_HS_COLOR in kwargs:
self._hs_color = kwargs[ATTR_HS_COLOR]
rgb = color_util.color_hsv_to_RGB(
self._hs_color[0], self._hs_color[1], self._brightness / 255 * 100
)
piglow.red(rgb[0])
piglow.green(rgb[1])
piglow.blue(rgb[2])
piglow.show()
self._is_on = True
self.schedule_update_ha_state()
def turn_off(self, **kwargs):
"""Instruct the light to turn off."""
piglow.clear()
piglow.show()
self._is_on = False
self.schedule_update_ha_state()
|
from typing import List
from homeassistant.components.water_heater import (
SUPPORT_OPERATION_MODE,
SUPPORT_TARGET_TEMPERATURE,
WaterHeaterEntity,
)
from homeassistant.const import STATE_OFF
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from . import DOMAIN, GeniusHeatingZone
STATE_AUTO = "auto"
STATE_MANUAL = "manual"
# Genius Hub HW zones support only Off, Override/Boost & Timer modes
HA_OPMODE_TO_GH = {STATE_OFF: "off", STATE_AUTO: "timer", STATE_MANUAL: "override"}
GH_STATE_TO_HA = {
"off": STATE_OFF,
"timer": STATE_AUTO,
"footprint": None,
"away": None,
"override": STATE_MANUAL,
"early": None,
"test": None,
"linked": None,
"other": None,
}
GH_HEATERS = ["hot water temperature"]
async def async_setup_platform(
hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info=None
) -> None:
"""Set up the Genius Hub water_heater entities."""
if discovery_info is None:
return
broker = hass.data[DOMAIN]["broker"]
async_add_entities(
[
GeniusWaterHeater(broker, z)
for z in broker.client.zone_objs
if z.data["type"] in GH_HEATERS
]
)
class GeniusWaterHeater(GeniusHeatingZone, WaterHeaterEntity):
"""Representation of a Genius Hub water_heater device."""
def __init__(self, broker, zone) -> None:
"""Initialize the water_heater device."""
super().__init__(broker, zone)
self._max_temp = 80.0
self._min_temp = 30.0
self._supported_features = SUPPORT_TARGET_TEMPERATURE | SUPPORT_OPERATION_MODE
@property
def operation_list(self) -> List[str]:
"""Return the list of available operation modes."""
return list(HA_OPMODE_TO_GH)
@property
def current_operation(self) -> str:
"""Return the current operation mode."""
return GH_STATE_TO_HA[self._zone.data["mode"]]
async def async_set_operation_mode(self, operation_mode) -> None:
"""Set a new operation mode for this boiler."""
await self._zone.set_mode(HA_OPMODE_TO_GH[operation_mode])
|
from typing import List
import voluptuous as vol
from homeassistant.components.automation import AutomationActionType
from homeassistant.components.device_automation import TRIGGER_BASE_SCHEMA
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_DEVICE_ID,
CONF_DOMAIN,
CONF_ENTITY_ID,
CONF_PLATFORM,
CONF_TYPE,
)
from homeassistant.core import CALLBACK_TYPE, Event, HassJob, HomeAssistant, callback
from homeassistant.helpers import config_validation as cv, entity_registry
from homeassistant.helpers.typing import ConfigType
from .const import DOMAIN, EVENT_TURN_OFF, EVENT_TURN_ON
TRIGGER_TYPES = {"turn_on", "turn_off"}
TRIGGER_SCHEMA = TRIGGER_BASE_SCHEMA.extend(
{
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Required(CONF_TYPE): vol.In(TRIGGER_TYPES),
}
)
async def async_get_triggers(hass: HomeAssistant, device_id: str) -> List[dict]:
"""List device triggers for Kodi devices."""
registry = await entity_registry.async_get_registry(hass)
triggers = []
# Get all the integrations entities for this device
for entry in entity_registry.async_entries_for_device(registry, device_id):
if entry.domain == "media_player":
triggers.append(
{
CONF_PLATFORM: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "turn_on",
}
)
triggers.append(
{
CONF_PLATFORM: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "turn_off",
}
)
return triggers
@callback
def _attach_trigger(
hass: HomeAssistant, config: ConfigType, action: AutomationActionType, event_type
):
job = HassJob(action)
@callback
def _handle_event(event: Event):
if event.data[ATTR_ENTITY_ID] == config[CONF_ENTITY_ID]:
hass.async_run_hass_job(
job,
{"trigger": {**config, "description": event_type}},
event.context,
)
return hass.bus.async_listen(event_type, _handle_event)
async def async_attach_trigger(
hass: HomeAssistant,
config: ConfigType,
action: AutomationActionType,
automation_info: dict,
) -> CALLBACK_TYPE:
"""Attach a trigger."""
config = TRIGGER_SCHEMA(config)
if config[CONF_TYPE] == "turn_on":
return _attach_trigger(hass, config, action, EVENT_TURN_ON)
if config[CONF_TYPE] == "turn_off":
return _attach_trigger(hass, config, action, EVENT_TURN_OFF)
return lambda: None
|
from plexapi.exceptions import NotFound
from homeassistant.components.media_player.const import (
ATTR_MEDIA_CONTENT_ID,
ATTR_MEDIA_CONTENT_TYPE,
MEDIA_TYPE_MUSIC,
)
from homeassistant.components.plex.const import (
CONF_SERVER,
DOMAIN,
SERVERS,
SERVICE_PLAY_ON_SONOS,
)
from homeassistant.const import ATTR_ENTITY_ID
from homeassistant.exceptions import HomeAssistantError
from .const import DEFAULT_OPTIONS, SECONDARY_DATA
from tests.async_mock import patch
from tests.common import MockConfigEntry
async def test_sonos_playback(hass, mock_plex_server):
"""Test playing media on a Sonos speaker."""
server_id = mock_plex_server.machineIdentifier
loaded_server = hass.data[DOMAIN][SERVERS][server_id]
# Test Sonos integration lookup failure
with patch.object(
hass.components.sonos, "get_coordinator_name", side_effect=HomeAssistantError
):
assert await hass.services.async_call(
DOMAIN,
SERVICE_PLAY_ON_SONOS,
{
ATTR_ENTITY_ID: "media_player.sonos_kitchen",
ATTR_MEDIA_CONTENT_TYPE: MEDIA_TYPE_MUSIC,
ATTR_MEDIA_CONTENT_ID: '{"library_name": "Music", "artist_name": "Artist", "album_name": "Album"}',
},
True,
)
# Test success with plex_key
with patch.object(
hass.components.sonos,
"get_coordinator_name",
return_value="media_player.sonos_kitchen",
), patch("plexapi.playqueue.PlayQueue.create"):
assert await hass.services.async_call(
DOMAIN,
SERVICE_PLAY_ON_SONOS,
{
ATTR_ENTITY_ID: "media_player.sonos_kitchen",
ATTR_MEDIA_CONTENT_TYPE: MEDIA_TYPE_MUSIC,
ATTR_MEDIA_CONTENT_ID: "2",
},
True,
)
# Test success with dict
with patch.object(
hass.components.sonos,
"get_coordinator_name",
return_value="media_player.sonos_kitchen",
), patch("plexapi.playqueue.PlayQueue.create"):
assert await hass.services.async_call(
DOMAIN,
SERVICE_PLAY_ON_SONOS,
{
ATTR_ENTITY_ID: "media_player.sonos_kitchen",
ATTR_MEDIA_CONTENT_TYPE: MEDIA_TYPE_MUSIC,
ATTR_MEDIA_CONTENT_ID: '{"library_name": "Music", "artist_name": "Artist", "album_name": "Album"}',
},
True,
)
# Test media lookup failure
with patch.object(
hass.components.sonos,
"get_coordinator_name",
return_value="media_player.sonos_kitchen",
), patch.object(mock_plex_server, "fetchItem", side_effect=NotFound):
assert await hass.services.async_call(
DOMAIN,
SERVICE_PLAY_ON_SONOS,
{
ATTR_ENTITY_ID: "media_player.sonos_kitchen",
ATTR_MEDIA_CONTENT_TYPE: MEDIA_TYPE_MUSIC,
ATTR_MEDIA_CONTENT_ID: "999",
},
True,
)
# Test invalid Plex server requested
with patch.object(
hass.components.sonos,
"get_coordinator_name",
return_value="media_player.sonos_kitchen",
):
assert await hass.services.async_call(
DOMAIN,
SERVICE_PLAY_ON_SONOS,
{
ATTR_ENTITY_ID: "media_player.sonos_kitchen",
ATTR_MEDIA_CONTENT_TYPE: MEDIA_TYPE_MUSIC,
ATTR_MEDIA_CONTENT_ID: '{"plex_server": "unknown_plex_server", "library_name": "Music", "artist_name": "Artist", "album_name": "Album"}',
},
True,
)
# Test no speakers available
with patch.object(
loaded_server.account, "sonos_speaker", return_value=None
), patch.object(
hass.components.sonos,
"get_coordinator_name",
return_value="media_player.sonos_kitchen",
), patch(
"plexapi.playqueue.PlayQueue.create"
):
assert await hass.services.async_call(
DOMAIN,
SERVICE_PLAY_ON_SONOS,
{
ATTR_ENTITY_ID: "media_player.sonos_kitchen",
ATTR_MEDIA_CONTENT_TYPE: MEDIA_TYPE_MUSIC,
ATTR_MEDIA_CONTENT_ID: '{"library_name": "Music", "artist_name": "Artist", "album_name": "Album"}',
},
True,
)
async def test_playback_multiple_servers(hass, mock_websocket, setup_plex_server):
"""Test playing media when multiple servers available."""
secondary_entry = MockConfigEntry(
domain=DOMAIN,
data=SECONDARY_DATA,
options=DEFAULT_OPTIONS,
unique_id=SECONDARY_DATA["server_id"],
)
await setup_plex_server()
await setup_plex_server(config_entry=secondary_entry)
with patch.object(
hass.components.sonos,
"get_coordinator_name",
return_value="media_player.sonos_kitchen",
), patch("plexapi.playqueue.PlayQueue.create"):
assert await hass.services.async_call(
DOMAIN,
SERVICE_PLAY_ON_SONOS,
{
ATTR_ENTITY_ID: "media_player.sonos_kitchen",
ATTR_MEDIA_CONTENT_TYPE: MEDIA_TYPE_MUSIC,
ATTR_MEDIA_CONTENT_ID: f'{{"plex_server": "{SECONDARY_DATA[CONF_SERVER]}", "library_name": "Music", "artist_name": "Artist", "album_name": "Album"}}',
},
True,
)
|
import unittest
from subprocess import PIPE, Popen
import sys
class TestScriptsSmoke(unittest.TestCase):
def test_trash_rm_works(self):
self.run_script('trash-rm')
assert "Usage:" in self.stderr.splitlines()
def test_trash_put_works(self):
self.run_script('trash-put')
assert ("Usage: trash-put [OPTION]... FILE..." in
self.stderr.splitlines())
def test_trash_put_touch_filesystem(self):
self.run_script('trash-put', 'non-existent')
assert ("trash-put: cannot trash non existent 'non-existent'\n" ==
self.stderr)
def run_script(self, script, *args):
process = Popen([sys.executable, script] + list(args),
env={'PYTHONPATH':'.'},
stdin=None,
stdout=PIPE,
stderr=PIPE)
(self.stdout, self.stderr) = process.communicate()
self.stderr = self.stderr.decode('utf-8')
process.wait()
self.returncode = process.returncode
|
from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.ensemble import RandomForestClassifier, RandomForestRegressor
import scipy
import itertools
from sklearn.feature_selection import GenericUnivariateSelect, RFECV, SelectFromModel
def get_feature_selection_model_from_name(type_of_estimator, model_name):
model_map = {
'classifier': {
'SelectFromModel': SelectFromModel(RandomForestClassifier(n_jobs=-1, max_depth=10, n_estimators=15), threshold='20*mean'),
'RFECV': RFECV(estimator=RandomForestClassifier(n_jobs=-1), step=0.1),
'GenericUnivariateSelect': GenericUnivariateSelect(),
'KeepAll': 'KeepAll'
},
'regressor': {
'SelectFromModel': SelectFromModel(RandomForestRegressor(n_jobs=-1, max_depth=10, n_estimators=15), threshold='0.7*mean'),
'RFECV': RFECV(estimator=RandomForestRegressor(n_jobs=-1), step=0.1),
'GenericUnivariateSelect': GenericUnivariateSelect(),
'KeepAll': 'KeepAll'
}
}
return model_map[type_of_estimator][model_name]
class FeatureSelectionTransformer(BaseEstimator, TransformerMixin):
def __init__(self, type_of_estimator, column_descriptions, feature_selection_model='SelectFromModel'):
self.column_descriptions = column_descriptions
self.type_of_estimator = type_of_estimator
self.feature_selection_model = feature_selection_model
def get(self, prop_name, default=None):
try:
return getattr(self, prop_name)
except AttributeError:
return default
def fit(self, X, y=None):
print('Performing feature selection')
self.selector = get_feature_selection_model_from_name(self.type_of_estimator, self.feature_selection_model)
if self.selector == 'KeepAll':
if scipy.sparse.issparse(X):
num_cols = X.shape[0]
else:
num_cols = len(X[0])
self.support_mask = [True for col_idx in range(num_cols) ]
else:
if self.feature_selection_model == 'SelectFromModel':
num_cols = X.shape[1]
num_rows = X.shape[0]
if self.type_of_estimator == 'regressor':
self.estimator = RandomForestRegressor(n_jobs=-1, max_depth=10, n_estimators=15)
else:
self.estimator = RandomForestClassifier(n_jobs=-1, max_depth=10, n_estimators=15)
self.estimator.fit(X, y)
feature_importances = self.estimator.feature_importances_
# Two ways of doing feature selection
# 1. Any feature with a feature importance of at least 1/100th of our max feature
max_feature_importance = max(feature_importances)
threshold_by_relative_importance = 0.01 * max_feature_importance
# 2. 1/4 the number of rows (so 100 rows means 25 columns)
sorted_importances = sorted(feature_importances, reverse=True)
max_cols = int(num_rows * 0.25)
try:
threshold_by_max_cols = sorted_importances[max_cols]
except IndexError:
threshold_by_max_cols = sorted_importances[-1]
threshold = max(threshold_by_relative_importance, threshold_by_max_cols)
self.support_mask = [True if x > threshold else False for x in feature_importances]
else:
self.selector.fit(X, y)
self.support_mask = self.selector.get_support()
# Get a mask of which indices it is we want to keep
self.index_mask = [idx for idx, val in enumerate(self.support_mask) if val == True]
return self
def transform(self, X, y=None):
if self.selector == 'KeepAll':
return X
if scipy.sparse.issparse(X):
if X.getformat() == 'csr':
# convert to a csc (column) matrix, rather than a csr (row) matrix
X = X.tocsc()
# Slice that column matrix to only get the relevant columns that we already calculated in fit:
X = X[:, self.index_mask]
# convert back to a csr matrix
return X.tocsr()
# If this is a dense matrix:
else:
X = X[:, self.index_mask]
return X
|
import io
import os
import numpy as np
from ..core.indexing import NumpyIndexingAdapter
from ..core.utils import Frozen, FrozenDict, close_on_error, read_magic_number
from ..core.variable import Variable
from .common import BackendArray, BackendEntrypoint, WritableCFDataStore
from .file_manager import CachingFileManager, DummyFileManager
from .locks import ensure_lock, get_write_lock
from .netcdf3 import encode_nc3_attr_value, encode_nc3_variable, is_valid_nc3_name
from .store import open_backend_dataset_store
def _decode_string(s):
if isinstance(s, bytes):
return s.decode("utf-8", "replace")
return s
def _decode_attrs(d):
# don't decode _FillValue from bytes -> unicode, because we want to ensure
# that its type matches the data exactly
return {k: v if k == "_FillValue" else _decode_string(v) for (k, v) in d.items()}
class ScipyArrayWrapper(BackendArray):
def __init__(self, variable_name, datastore):
self.datastore = datastore
self.variable_name = variable_name
array = self.get_variable().data
self.shape = array.shape
self.dtype = np.dtype(array.dtype.kind + str(array.dtype.itemsize))
def get_variable(self, needs_lock=True):
ds = self.datastore._manager.acquire(needs_lock)
return ds.variables[self.variable_name]
def __getitem__(self, key):
data = NumpyIndexingAdapter(self.get_variable().data)[key]
# Copy data if the source file is mmapped. This makes things consistent
# with the netCDF4 library by ensuring we can safely read arrays even
# after closing associated files.
copy = self.datastore.ds.use_mmap
return np.array(data, dtype=self.dtype, copy=copy)
def __setitem__(self, key, value):
with self.datastore.lock:
data = self.get_variable(needs_lock=False)
try:
data[key] = value
except TypeError:
if key is Ellipsis:
# workaround for GH: scipy/scipy#6880
data[:] = value
else:
raise
def _open_scipy_netcdf(filename, mode, mmap, version):
import gzip
import scipy.io
# if the string ends with .gz, then gunzip and open as netcdf file
if isinstance(filename, str) and filename.endswith(".gz"):
try:
return scipy.io.netcdf_file(
gzip.open(filename), mode=mode, mmap=mmap, version=version
)
except TypeError as e:
# TODO: gzipped loading only works with NetCDF3 files.
if "is not a valid NetCDF 3 file" in e.message:
raise ValueError("gzipped file loading only supports NetCDF 3 files.")
else:
raise
if isinstance(filename, bytes) and filename.startswith(b"CDF"):
# it's a NetCDF3 bytestring
filename = io.BytesIO(filename)
try:
return scipy.io.netcdf_file(filename, mode=mode, mmap=mmap, version=version)
except TypeError as e: # netcdf3 message is obscure in this case
errmsg = e.args[0]
if "is not a valid NetCDF 3 file" in errmsg:
msg = """
If this is a NetCDF4 file, you may need to install the
netcdf4 library, e.g.,
$ pip install netcdf4
"""
errmsg += msg
raise TypeError(errmsg)
else:
raise
class ScipyDataStore(WritableCFDataStore):
"""Store for reading and writing data via scipy.io.netcdf.
This store has the advantage of being able to be initialized with a
StringIO object, allow for serialization without writing to disk.
It only supports the NetCDF3 file-format.
"""
def __init__(
self, filename_or_obj, mode="r", format=None, group=None, mmap=None, lock=None
):
if group is not None:
raise ValueError("cannot save to a group with the scipy.io.netcdf backend")
if format is None or format == "NETCDF3_64BIT":
version = 2
elif format == "NETCDF3_CLASSIC":
version = 1
else:
raise ValueError("invalid format for scipy.io.netcdf backend: %r" % format)
if lock is None and mode != "r" and isinstance(filename_or_obj, str):
lock = get_write_lock(filename_or_obj)
self.lock = ensure_lock(lock)
if isinstance(filename_or_obj, str):
manager = CachingFileManager(
_open_scipy_netcdf,
filename_or_obj,
mode=mode,
lock=lock,
kwargs=dict(mmap=mmap, version=version),
)
else:
scipy_dataset = _open_scipy_netcdf(
filename_or_obj, mode=mode, mmap=mmap, version=version
)
manager = DummyFileManager(scipy_dataset)
self._manager = manager
@property
def ds(self):
return self._manager.acquire()
def open_store_variable(self, name, var):
return Variable(
var.dimensions,
ScipyArrayWrapper(name, self),
_decode_attrs(var._attributes),
)
def get_variables(self):
return FrozenDict(
(k, self.open_store_variable(k, v)) for k, v in self.ds.variables.items()
)
def get_attrs(self):
return Frozen(_decode_attrs(self.ds._attributes))
def get_dimensions(self):
return Frozen(self.ds.dimensions)
def get_encoding(self):
encoding = {}
encoding["unlimited_dims"] = {
k for k, v in self.ds.dimensions.items() if v is None
}
return encoding
def set_dimension(self, name, length, is_unlimited=False):
if name in self.ds.dimensions:
raise ValueError(
"%s does not support modifying dimensions" % type(self).__name__
)
dim_length = length if not is_unlimited else None
self.ds.createDimension(name, dim_length)
def _validate_attr_key(self, key):
if not is_valid_nc3_name(key):
raise ValueError("Not a valid attribute name")
def set_attribute(self, key, value):
self._validate_attr_key(key)
value = encode_nc3_attr_value(value)
setattr(self.ds, key, value)
def encode_variable(self, variable):
variable = encode_nc3_variable(variable)
return variable
def prepare_variable(
self, name, variable, check_encoding=False, unlimited_dims=None
):
if check_encoding and variable.encoding:
if variable.encoding != {"_FillValue": None}:
raise ValueError(
"unexpected encoding for scipy backend: %r"
% list(variable.encoding)
)
data = variable.data
# nb. this still creates a numpy array in all memory, even though we
# don't write the data yet; scipy.io.netcdf does not not support
# incremental writes.
if name not in self.ds.variables:
self.ds.createVariable(name, data.dtype, variable.dims)
scipy_var = self.ds.variables[name]
for k, v in variable.attrs.items():
self._validate_attr_key(k)
setattr(scipy_var, k, v)
target = ScipyArrayWrapper(name, self)
return target, data
def sync(self):
self.ds.sync()
def close(self):
self._manager.close()
def guess_can_open_scipy(store_spec):
try:
return read_magic_number(store_spec).startswith(b"CDF")
except TypeError:
pass
try:
_, ext = os.path.splitext(store_spec)
except TypeError:
return False
return ext in {".nc", ".nc4", ".cdf", ".gz"}
def open_backend_dataset_scipy(
filename_or_obj,
mask_and_scale=True,
decode_times=None,
concat_characters=None,
decode_coords=None,
drop_variables=None,
use_cftime=None,
decode_timedelta=None,
mode="r",
format=None,
group=None,
mmap=None,
lock=None,
):
store = ScipyDataStore(
filename_or_obj, mode=mode, format=format, group=group, mmap=mmap, lock=lock
)
with close_on_error(store):
ds = open_backend_dataset_store(
store,
mask_and_scale=mask_and_scale,
decode_times=decode_times,
concat_characters=concat_characters,
decode_coords=decode_coords,
drop_variables=drop_variables,
use_cftime=use_cftime,
decode_timedelta=decode_timedelta,
)
return ds
scipy_backend = BackendEntrypoint(
open_dataset=open_backend_dataset_scipy, guess_can_open=guess_can_open_scipy
)
|
import functools
import logging
import os
import posixpath
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker import data
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.linux_packages import hadoop
from perfkitbenchmarker.linux_packages import hbase
from perfkitbenchmarker.linux_packages import ycsb
FLAGS = flags.FLAGS
flags.DEFINE_integer('hbase_zookeeper_nodes', 1, 'Number of Zookeeper nodes.')
flags.DEFINE_boolean('hbase_use_snappy', True,
'Whether to use snappy compression.')
BENCHMARK_NAME = 'hbase_ycsb'
BENCHMARK_CONFIG = """
hbase_ycsb:
description: >
Run YCSB against HBase. Specify the HBase
cluster size with --num_vms. Specify the number of YCSB VMs
with --ycsb_client_vms.
vm_groups:
clients:
vm_spec: *default_single_core
master:
vm_spec: *default_single_core
disk_spec: *default_500_gb
workers:
vm_spec: *default_single_core
disk_spec: *default_500_gb
"""
HBASE_SITE = 'hbase-site.xml'
CREATE_TABLE_SCRIPT = 'hbase/create-ycsb-table.hbaseshell.j2'
TABLE_NAME = 'usertable'
COLUMN_FAMILY = 'cf'
TABLE_SPLIT_COUNT = 200
def GetConfig(user_config):
config = configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
num_vms = max(FLAGS.num_vms, 2)
if FLAGS['num_vms'].present and FLAGS.num_vms < 2:
raise ValueError('hbase_ycsb requires at least 2 HBase VMs.')
if FLAGS['ycsb_client_vms'].present:
config['vm_groups']['clients']['vm_count'] = FLAGS.ycsb_client_vms
if FLAGS['num_vms'].present:
config['vm_groups']['workers']['vm_count'] = num_vms - 1
return config
def CheckPrerequisites(benchmark_config):
"""Verifies that the required resources are present.
Raises:
perfkitbenchmarker.data.ResourceNotFound: On missing resource.
"""
hbase.CheckPrerequisites()
hadoop.CheckPrerequisites()
ycsb.CheckPrerequisites()
def CreateYCSBTable(vm, table_name=TABLE_NAME, family=COLUMN_FAMILY,
n_splits=TABLE_SPLIT_COUNT, limit_filesize=True,
use_snappy=True):
"""Create a table for use with YCSB.
Args:
vm: Virtual machine from which to create the table.
table_name: Name for the table.
family: Column family name.
limit_filesize: boolean. Should the filesize be limited to 4GB?
n_splits: Initial number of regions for the table. Default follows
HBASE-4163.
"""
# See: https://issues.apache.org/jira/browse/HBASE-4163
template_path = data.ResourcePath(CREATE_TABLE_SCRIPT)
remote = posixpath.join(hbase.HBASE_DIR,
os.path.basename(os.path.splitext(template_path)[0]))
vm.RenderTemplate(template_path, remote,
context={'table_name': table_name,
'family': family,
'limit_filesize': limit_filesize,
'n_splits': n_splits,
'use_snappy': use_snappy})
# TODO(connormccoy): on HBase update, add '-n' flag.
command = "{0}/hbase shell {1}".format(hbase.HBASE_BIN, remote)
vm.RemoteCommand(command, should_log=True)
def _GetVMsByRole(vm_groups):
"""Partition "vms" by role in the benchmark.
* The first VM is the master.
* The first FLAGS.hbase_zookeeper_nodes form the Zookeeper quorum.
* The last FLAGS.ycsb_client_vms are loader nodes.
* The nodes which are neither the master nor loaders are HBase region servers.
Args:
vm_groups: The benchmark_spec's vm_groups dict.
Returns:
A dictionary with keys 'vms', 'hbase_vms', 'master', 'zk_quorum', 'workers',
and 'clients'.
"""
hbase_vms = vm_groups['master'] + vm_groups['workers']
vms = hbase_vms + vm_groups['clients']
return {'vms': vms,
'hbase_vms': hbase_vms,
'master': vm_groups['master'][0],
'zk_quorum': hbase_vms[:FLAGS.hbase_zookeeper_nodes],
'workers': vm_groups['workers'],
'clients': vm_groups['clients']}
def Prepare(benchmark_spec):
"""Prepare the virtual machines to run hadoop.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
by_role = _GetVMsByRole(benchmark_spec.vm_groups)
loaders = by_role['clients']
assert loaders, 'No loader VMs: {0}'.format(by_role)
# HBase cluster
hbase_vms = by_role['hbase_vms']
assert hbase_vms, 'No HBase VMs: {0}'.format(by_role)
master = by_role['master']
zk_quorum = by_role['zk_quorum']
assert zk_quorum, 'No zookeeper quorum: {0}'.format(by_role)
workers = by_role['workers']
assert workers, 'No workers: {0}'.format(by_role)
hbase_install_fns = [functools.partial(vm.Install, 'hbase')
for vm in hbase_vms]
ycsb_install_fns = [functools.partial(vm.Install, 'ycsb')
for vm in loaders]
vm_util.RunThreaded(lambda f: f(), hbase_install_fns + ycsb_install_fns)
hadoop.ConfigureAndStart(master, workers, start_yarn=False)
hbase.ConfigureAndStart(master, workers, zk_quorum)
CreateYCSBTable(master, use_snappy=FLAGS.hbase_use_snappy)
# Populate hbase-site.xml on the loaders.
master.PullFile(
vm_util.GetTempDir(),
posixpath.join(hbase.HBASE_CONF_DIR, HBASE_SITE))
def PushHBaseSite(vm):
conf_dir = posixpath.join(ycsb.YCSB_DIR, 'hbase10-binding', 'conf')
vm.RemoteCommand('mkdir -p {}'.format(conf_dir))
vm.PushFile(
os.path.join(vm_util.GetTempDir(), HBASE_SITE),
posixpath.join(conf_dir, HBASE_SITE))
vm_util.RunThreaded(PushHBaseSite, loaders)
benchmark_spec.executor = ycsb.YCSBExecutor('hbase10')
def Run(benchmark_spec):
"""Spawn YCSB and gather the results.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample objects.
"""
by_role = _GetVMsByRole(benchmark_spec.vm_groups)
loaders = by_role['clients']
logging.info('Loaders: %s', loaders)
metadata = {
'ycsb_client_vms': len(loaders),
'hbase_cluster_size': len(by_role['hbase_vms']),
'hbase_zookeeper_nodes': FLAGS.hbase_zookeeper_nodes,
'hbase_version': hbase.GetHBaseVersion(by_role['hbase_vms'][0]),
}
# By default YCSB uses a BufferedMutator for Puts / Deletes.
# This leads to incorrect update latencies, since since the call returns
# before the request is acked by the server.
# Disable this behavior during the benchmark run.
run_kwargs = {'columnfamily': COLUMN_FAMILY,
'clientbuffering': 'false'}
load_kwargs = run_kwargs.copy()
# During the load stage, use a buffered mutator with a single thread.
# The BufferedMutator will handle multiplexing RPCs.
load_kwargs['clientbuffering'] = 'true'
if not FLAGS['ycsb_preload_threads'].present:
load_kwargs['threads'] = 1
samples = list(benchmark_spec.executor.LoadAndRun(
loaders, load_kwargs=load_kwargs, run_kwargs=run_kwargs))
for sample in samples:
sample.metadata.update(metadata)
return samples
def Cleanup(benchmark_spec):
"""Cleanup.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
by_role = _GetVMsByRole(benchmark_spec.vm_groups)
hbase.Stop(by_role['master'])
hadoop.StopHDFS(by_role['master'])
vm_util.RunThreaded(hadoop.CleanDatanode, by_role['workers'])
|
import mock
from paasta_tools.cli.cmds.autoscale import paasta_autoscale
@mock.patch("paasta_tools.cli.cmds.autoscale.figure_out_service_name", autospec=True)
@mock.patch(
"paasta_tools.cli.cmds.autoscale.client.get_paasta_oapi_client", autospec=True
)
@mock.patch("paasta_tools.cli.cmds.autoscale._log_audit", autospec=True)
def test_paasta_autoscale(
mock__log_audit, mock_get_paasta_oapi_client, mock_figure_out_service_name
):
service = "fake_service"
instance = "fake_instance"
cluster = "fake_cluster"
mock_figure_out_service_name.return_value = service
mock_api = mock.Mock()
mock_get_paasta_oapi_client.return_value = mock.Mock(autoscaler=mock_api)
args = mock.MagicMock()
args.service = service
args.clusters = cluster
args.instances = instance
args.set = 14
mock_api.update_autoscaler_count.return_value = (
mock.Mock(desired_instances=14),
200,
None,
)
mock__log_audit.return_value = None
paasta_autoscale(args)
assert mock_api.update_autoscaler_count.call_count == 1
|
import arrow
from sqlalchemy.orm import relationship
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy import Column, Integer, String, Boolean, ForeignKey
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.sql.expression import case
from sqlalchemy_utils import ArrowType
from lemur.database import db
from lemur.models import policies_ciphers
BAD_CIPHERS = ["Protocol-SSLv3", "Protocol-SSLv2", "Protocol-TLSv1"]
class Cipher(db.Model):
__tablename__ = "ciphers"
id = Column(Integer, primary_key=True)
name = Column(String(128), nullable=False)
@hybrid_property
def deprecated(self):
return self.name in BAD_CIPHERS
@deprecated.expression
def deprecated(cls):
return case([(cls.name in BAD_CIPHERS, True)], else_=False)
class Policy(db.Model):
___tablename__ = "policies"
id = Column(Integer, primary_key=True)
name = Column(String(128), nullable=True)
ciphers = relationship("Cipher", secondary=policies_ciphers, backref="policy")
class Endpoint(db.Model):
__tablename__ = "endpoints"
id = Column(Integer, primary_key=True)
owner = Column(String(128))
name = Column(String(128))
dnsname = Column(String(256))
type = Column(String(128))
active = Column(Boolean, default=True)
port = Column(Integer)
policy_id = Column(Integer, ForeignKey("policy.id"))
policy = relationship("Policy", backref="endpoint")
certificate_id = Column(Integer, ForeignKey("certificates.id"))
source_id = Column(Integer, ForeignKey("sources.id"))
sensitive = Column(Boolean, default=False)
source = relationship("Source", back_populates="endpoints")
last_updated = Column(ArrowType, default=arrow.utcnow, nullable=False)
date_created = Column(
ArrowType, default=arrow.utcnow, onupdate=arrow.utcnow, nullable=False
)
replaced = association_proxy("certificate", "replaced")
@property
def issues(self):
issues = []
for cipher in self.policy.ciphers:
if cipher.deprecated:
issues.append(
{
"name": "deprecated cipher",
"value": "{0} has been deprecated consider removing it.".format(
cipher.name
),
}
)
if self.certificate.expired:
issues.append(
{
"name": "expired certificate",
"value": "There is an expired certificate attached to this endpoint consider replacing it.",
}
)
if self.certificate.revoked:
issues.append(
{
"name": "revoked",
"value": "There is a revoked certificate attached to this endpoint consider replacing it.",
}
)
return issues
def __repr__(self):
return "Endpoint(name={name})".format(name=self.name)
|
from google_nest_sdm.device import Device
from google_nest_sdm.device_traits import InfoTrait
from .const import DOMAIN
DEVICE_TYPE_MAP = {
"sdm.devices.types.CAMERA": "Camera",
"sdm.devices.types.DISPLAY": "Display",
"sdm.devices.types.DOORBELL": "Doorbell",
"sdm.devices.types.THERMOSTAT": "Thermostat",
}
class DeviceInfo:
"""Provide device info from the SDM device, shared across platforms."""
device_brand = "Google Nest"
def __init__(self, device: Device):
"""Initialize the DeviceInfo."""
self._device = device
@property
def device_info(self):
"""Return device specific attributes."""
return {
# The API "name" field is a unique device identifier.
"identifiers": {(DOMAIN, self._device.name)},
"name": self.device_name,
"manufacturer": self.device_brand,
"model": self.device_model,
}
@property
def device_name(self):
"""Return the name of the physical device that includes the sensor."""
if InfoTrait.NAME in self._device.traits:
trait = self._device.traits[InfoTrait.NAME]
if trait.custom_name:
return trait.custom_name
# Build a name from the room/structure. Note: This room/structure name
# is not associated with a home assistant Area.
parent_relations = self._device.parent_relations
if parent_relations:
items = sorted(parent_relations.items())
names = [name for id, name in items]
return " ".join(names)
return self.device_model
@property
def device_model(self):
"""Return device model information."""
# The API intentionally returns minimal information about specific
# devices, instead relying on traits, but we can infer a generic model
# name based on the type
return DEVICE_TYPE_MAP.get(self._device.type)
|
import pytest
from molecule.model import schema_v2
@pytest.fixture
def _model_verifier_section_data():
return {
'verifier': {
'name': 'testinfra',
'enabled': True,
'directory': 'foo',
'options': {
'foo': 'bar'
},
'env': {
'FOO': 'foo',
'FOO_BAR': 'foo_bar',
},
'additional_files_or_dirs': [
'foo',
],
'lint': {
'name': 'flake8',
'enabled': True,
'options': {
'foo': 'bar',
},
'env': {
'FOO': 'foo',
'FOO_BAR': 'foo_bar',
},
},
}
}
@pytest.mark.parametrize(
'_config', ['_model_verifier_section_data'], indirect=True)
def test_verifier(_config):
assert {} == schema_v2.validate(_config)
@pytest.fixture
def _model_verifier_errors_section_data():
return {
'verifier': {
'name': int(),
'enabled': str(),
'directory': int(),
'options': [],
'env': {
'foo': 'foo',
'foo-bar': 'foo-bar',
},
'additional_files_or_dirs': [
int(),
],
'lint': {
'name': int(),
'enabled': str(),
'options': [],
'env': {
'foo': 'foo',
'foo-bar': 'foo-bar',
},
},
}
}
@pytest.mark.parametrize(
'_config', ['_model_verifier_errors_section_data'], indirect=True)
def test_verifier_has_errors(_config):
x = {
'verifier': [{
'name': ['must be of string type'],
'lint': [{
'enabled': ['must be of boolean type'],
'name': ['must be of string type'],
'env': [{
'foo': ["value does not match regex '^[A-Z0-9_-]+$'"],
'foo-bar': ["value does not match regex '^[A-Z0-9_-]+$'"],
}],
'options': ['must be of dict type'],
}],
'enabled': ['must be of boolean type'],
'env': [{
'foo': ["value does not match regex '^[A-Z0-9_-]+$'"],
'foo-bar': ["value does not match regex '^[A-Z0-9_-]+$'"],
}],
'directory': ['must be of string type'],
'additional_files_or_dirs': [{
0: ['must be of string type'],
}],
'options': ['must be of dict type'],
}]
}
assert x == schema_v2.validate(_config)
@pytest.fixture
def _model_verifier_allows_testinfra_section_data():
return {
'verifier': {
'name': 'testinfra',
'lint': {
'name': 'flake8',
},
}
}
@pytest.fixture
def _model_verifier_allows_inspec_section_data():
return {
'verifier': {
'name': 'inspec',
'lint': {
'name': 'rubocop',
},
}
}
@pytest.fixture
def _model_verifier_allows_goss_section_data():
return {
'verifier': {
'name': 'goss',
'lint': {
'name': 'yamllint',
},
}
}
@pytest.mark.parametrize(
'_config', [
('_model_verifier_allows_testinfra_section_data'),
('_model_verifier_allows_inspec_section_data'),
('_model_verifier_allows_goss_section_data'),
],
indirect=True)
def test_verifier_allows_name(_config):
assert {} == schema_v2.validate(_config)
@pytest.fixture
def _model_verifier_errors_inspec_readonly_options_section_data():
return {
'verifier': {
'name': 'inspec',
'options': {
'foo': 'bar',
},
'lint': {
'name': 'rubocop',
},
}
}
@pytest.fixture
def _model_verifier_errors_goss_readonly_options_section_data():
return {
'verifier': {
'name': 'goss',
'options': {
'foo': 'bar',
},
'lint': {
'name': 'yamllint',
},
}
}
@pytest.mark.parametrize(
'_config', [
('_model_verifier_errors_inspec_readonly_options_section_data'),
('_model_verifier_errors_goss_readonly_options_section_data'),
],
indirect=True)
def test_verifier_errors_readonly_options_section_data(_config):
x = {'verifier': [{'options': [{'foo': ['field is read-only']}]}]}
assert x == schema_v2.validate(_config)
|
import fnmatch
import itertools
import json
import logging
import os
import re
from typing import List
from urllib.parse import urljoin
from urllib.parse import urlparse
import aiohttp
from kazoo.handlers.threading import KazooTimeoutError
from kazoo.retry import KazooRetry
from mypy_extensions import TypedDict
from retry import retry
from . import exceptions
from . import framework
from . import log
from . import mesos_file
from . import slave
from . import task
from . import util
from . import zookeeper
from paasta_tools.async_utils import async_ttl_cache
from paasta_tools.utils import get_user_agent
ZOOKEEPER_TIMEOUT = 1
INVALID_PATH = "{0} does not have a valid path. Did you forget /mesos?"
MISSING_MASTER = """unable to connect to a master at {0}.
Try running `mesos config master zk://localhost:2181/mesos`. See the README for
more examples."""
MULTIPLE_SLAVES = "There are multiple slaves with that id. Please choose one: "
logger = logging.getLogger(__name__)
class MesosState(TypedDict):
slaves: List
frameworks: List
orphan_tasks: List
MesosMetrics = TypedDict(
"MesosMetrics",
{
"master/cpus_total": int,
"master/cpus_used": int,
"master/disk_total": int,
"master/disk_used": int,
"master/gpus_total": int,
"master/gpus_used": int,
"master/mem_total": int,
"master/mem_used": int,
"master/tasks_running": int,
"master/tasks_staging": int,
"master/tasks_starting": int,
"master/slaves_active": int,
"master/slaves_inactive": int,
},
)
class MesosMaster:
def __init__(self, config):
self.config = config
def __str__(self):
return "<master: {}>".format(self.key())
def key(self):
return self.config["master"]
@util.CachedProperty(ttl=5)
def host(self):
return "{}://{}".format(
self.config["scheme"], self.resolve(self.config["master"])
)
@util.CachedProperty(ttl=5)
def cache_host(self):
host_url = urlparse(self.host)
replaced = host_url._replace(netloc=host_url.hostname + ":5055")
return replaced.geturl()
async def _request(
self, url: str, method: str = "GET", cached: bool = False, **kwargs
) -> aiohttp.ClientResponse:
headers = {"User-Agent": get_user_agent()}
if cached and self.config.get("use_mesos_cache", False):
# TODO: fall back to original host if this fails?
host = self.cache_host
else:
host = self.host
try:
async with aiohttp.ClientSession(
conn_timeout=self.config["response_timeout"],
read_timeout=self.config["response_timeout"],
) as session:
async with session.request(
method=method, url=urljoin(host, url), headers=headers, **kwargs
) as resp:
# if nobody awaits resp.text() or resp.json() before we exit the session context manager, then the
# http connection gets closed before we read the response; then later calls to resp.text/json will
# fail.
await resp.text()
return resp
except aiohttp.client_exceptions.ClientConnectionError:
raise exceptions.MasterNotAvailableException(MISSING_MASTER.format(host))
except aiohttp.client_exceptions.TooManyRedirects:
raise exceptions.MasterTemporarilyNotAvailableException(
(
"Unable to connect to master at %s, likely due to "
"an ongoing leader election"
)
% host
)
async def fetch(self, url, **kwargs):
return await self._request(url, **kwargs)
async def post(self, url, **kwargs):
return await self._request(url, method="POST", **kwargs)
def _file_resolver(self, cfg):
return self.resolve(open(cfg[6:], "r+").read().strip())
@retry(KazooTimeoutError, tries=5, delay=0.5, logger=logger)
def _zookeeper_resolver(self, cfg):
hosts, path = cfg[5:].split("/", 1)
path = "/" + path
retry = KazooRetry(max_tries=10)
with zookeeper.client(
hosts=hosts, read_only=True, connection_retry=retry, command_retry=retry
) as zk:
def master_id(key):
return int(key.split("_")[-1])
def get_masters():
return [x for x in zk.get_children(path) if re.search(r"\d+", x)]
leader = sorted(get_masters(), key=lambda x: master_id(x))
if len(leader) == 0:
raise exceptions.MasterNotAvailableException(
f"cannot find any masters at {cfg}"
)
data, stat = zk.get(os.path.join(path, leader[0]))
if not data:
exceptions.MasterNotAvailableException(
"Cannot retrieve valid MasterInfo data from ZooKeeper"
)
else:
data = data.decode("utf8")
try:
parsed = json.loads(data)
if parsed and "address" in parsed:
ip = parsed["address"].get("ip")
port = parsed["address"].get("port")
if ip and port:
return f"{ip}:{port}"
except ValueError as parse_error:
log.debug(
"[WARN] No JSON content, probably connecting to older "
"Mesos version. Reason: {}".format(parse_error)
)
raise exceptions.MasterNotAvailableException(
"Failed to parse mesos master ip from ZK"
)
@log.duration
def resolve(self, cfg):
"""Resolve the URL to the mesos master.
The value of cfg should be one of:
- host:port
- zk://host1:port1,host2:port2/path
- zk://username:password@host1:port1/path
- file:///path/to/file (where file contains one of the above)
"""
if cfg.startswith("zk:"):
return self._zookeeper_resolver(cfg)
elif cfg.startswith("file:"):
return self._file_resolver(cfg)
else:
return cfg
@async_ttl_cache(ttl=15, cleanup_self=True)
async def state(self) -> MesosState:
return await (await self.fetch("/master/state.json", cached=True)).json()
async def state_summary(self) -> MesosState:
return await (await self.fetch("/master/state-summary")).json()
@async_ttl_cache(ttl=None, cleanup_self=True)
async def slave(self, fltr):
lst = await self.slaves(fltr)
log.debug(f"master.slave({fltr})")
if len(lst) == 0:
raise exceptions.SlaveDoesNotExist(f"Slave {fltr} no longer exists.")
elif len(lst) > 1:
raise exceptions.MultipleSlavesForIDError(
"Multiple slaves matching filter {}. {}".format(
fltr, ",".join([slave.id for slave in lst])
)
)
return lst[0]
async def slaves(self, fltr=""):
return [
slave.MesosSlave(self.config, x)
for x in (await self.state())["slaves"]
if fltr == x["id"]
]
async def _task_list(self, active_only=False):
keys = ["tasks"]
if not active_only:
keys.append("completed_tasks")
return itertools.chain(
*[util.merge(x, *keys) for x in await self._framework_list(active_only)]
)
async def task(self, fltr):
lst = await self.tasks(fltr)
if len(lst) == 0:
raise exceptions.TaskNotFoundException(
"Cannot find a task with filter %s" % fltr
)
elif len(lst) > 1:
raise exceptions.MultipleTasksForIDError(
"Multiple tasks matching filter {}. {}".format(
fltr, ",".join([task.id for task in lst])
)
)
return lst[0]
async def orphan_tasks(self):
return (await self.state())["orphan_tasks"]
# XXX - need to filter on task state as well as id
async def tasks(self, fltr="", active_only=False):
return [
task.Task(self, x)
for x in await self._task_list(active_only)
if fltr in x["id"] or fnmatch.fnmatch(x["id"], fltr)
]
async def framework(self, fwid):
return list(filter(lambda x: x.id == fwid, await self.frameworks()))[0]
async def _framework_list(self, active_only=False):
keys = ["frameworks"]
if not active_only:
keys.append("completed_frameworks")
return util.merge(await self._frameworks(), *keys)
@async_ttl_cache(ttl=15, cleanup_self=True)
async def _frameworks(self):
return await (await self.fetch("/master/frameworks", cached=True)).json()
async def frameworks(self, active_only=False):
return [framework.Framework(f) for f in await self._framework_list(active_only)]
async def teardown(self, framework_id):
return await self.post("/master/teardown", data="frameworkId=%s" % framework_id)
async def metrics_snapshot(self) -> MesosMetrics:
return await (await self.fetch("/metrics/snapshot")).json()
@property # type: ignore
@util.memoize
def log(self):
return mesos_file.File(self, path="/master/log")
|
import asyncio
from io import BytesIO
from typing import Any, Dict, Optional, Tuple
import voluptuous as vol
from homeassistant.auth.models import User
from homeassistant.core import HomeAssistant
from . import (
MULTI_FACTOR_AUTH_MODULE_SCHEMA,
MULTI_FACTOR_AUTH_MODULES,
MultiFactorAuthModule,
SetupFlow,
)
REQUIREMENTS = ["pyotp==2.3.0", "PyQRCode==1.2.1"]
CONFIG_SCHEMA = MULTI_FACTOR_AUTH_MODULE_SCHEMA.extend({}, extra=vol.PREVENT_EXTRA)
STORAGE_VERSION = 1
STORAGE_KEY = "auth_module.totp"
STORAGE_USERS = "users"
STORAGE_USER_ID = "user_id"
STORAGE_OTA_SECRET = "ota_secret"
INPUT_FIELD_CODE = "code"
DUMMY_SECRET = "FPPTH34D4E3MI2HG"
def _generate_qr_code(data: str) -> str:
"""Generate a base64 PNG string represent QR Code image of data."""
import pyqrcode # pylint: disable=import-outside-toplevel
qr_code = pyqrcode.create(data)
with BytesIO() as buffer:
qr_code.svg(file=buffer, scale=4)
return str(
buffer.getvalue()
.decode("ascii")
.replace("\n", "")
.replace(
'<?xml version="1.0" encoding="UTF-8"?>'
'<svg xmlns="http://www.w3.org/2000/svg"',
"<svg",
)
)
def _generate_secret_and_qr_code(username: str) -> Tuple[str, str, str]:
"""Generate a secret, url, and QR code."""
import pyotp # pylint: disable=import-outside-toplevel
ota_secret = pyotp.random_base32()
url = pyotp.totp.TOTP(ota_secret).provisioning_uri(
username, issuer_name="Home Assistant"
)
image = _generate_qr_code(url)
return ota_secret, url, image
@MULTI_FACTOR_AUTH_MODULES.register("totp")
class TotpAuthModule(MultiFactorAuthModule):
"""Auth module validate time-based one time password."""
DEFAULT_TITLE = "Time-based One Time Password"
MAX_RETRY_TIME = 5
def __init__(self, hass: HomeAssistant, config: Dict[str, Any]) -> None:
"""Initialize the user data store."""
super().__init__(hass, config)
self._users: Optional[Dict[str, str]] = None
self._user_store = hass.helpers.storage.Store(
STORAGE_VERSION, STORAGE_KEY, private=True
)
self._init_lock = asyncio.Lock()
@property
def input_schema(self) -> vol.Schema:
"""Validate login flow input data."""
return vol.Schema({INPUT_FIELD_CODE: str})
async def _async_load(self) -> None:
"""Load stored data."""
async with self._init_lock:
if self._users is not None:
return
data = await self._user_store.async_load()
if data is None:
data = {STORAGE_USERS: {}}
self._users = data.get(STORAGE_USERS, {})
async def _async_save(self) -> None:
"""Save data."""
await self._user_store.async_save({STORAGE_USERS: self._users})
def _add_ota_secret(self, user_id: str, secret: Optional[str] = None) -> str:
"""Create a ota_secret for user."""
import pyotp # pylint: disable=import-outside-toplevel
ota_secret: str = secret or pyotp.random_base32()
self._users[user_id] = ota_secret # type: ignore
return ota_secret
async def async_setup_flow(self, user_id: str) -> SetupFlow:
"""Return a data entry flow handler for setup module.
Mfa module should extend SetupFlow
"""
user = await self.hass.auth.async_get_user(user_id)
assert user is not None
return TotpSetupFlow(self, self.input_schema, user)
async def async_setup_user(self, user_id: str, setup_data: Any) -> str:
"""Set up auth module for user."""
if self._users is None:
await self._async_load()
result = await self.hass.async_add_executor_job(
self._add_ota_secret, user_id, setup_data.get("secret")
)
await self._async_save()
return result
async def async_depose_user(self, user_id: str) -> None:
"""Depose auth module for user."""
if self._users is None:
await self._async_load()
if self._users.pop(user_id, None): # type: ignore
await self._async_save()
async def async_is_user_setup(self, user_id: str) -> bool:
"""Return whether user is setup."""
if self._users is None:
await self._async_load()
return user_id in self._users # type: ignore
async def async_validate(self, user_id: str, user_input: Dict[str, Any]) -> bool:
"""Return True if validation passed."""
if self._users is None:
await self._async_load()
# user_input has been validate in caller
# set INPUT_FIELD_CODE as vol.Required is not user friendly
return await self.hass.async_add_executor_job(
self._validate_2fa, user_id, user_input.get(INPUT_FIELD_CODE, "")
)
def _validate_2fa(self, user_id: str, code: str) -> bool:
"""Validate two factor authentication code."""
import pyotp # pylint: disable=import-outside-toplevel
ota_secret = self._users.get(user_id) # type: ignore
if ota_secret is None:
# even we cannot find user, we still do verify
# to make timing the same as if user was found.
pyotp.TOTP(DUMMY_SECRET).verify(code, valid_window=1)
return False
return bool(pyotp.TOTP(ota_secret).verify(code, valid_window=1))
class TotpSetupFlow(SetupFlow):
"""Handler for the setup flow."""
def __init__(
self, auth_module: TotpAuthModule, setup_schema: vol.Schema, user: User
) -> None:
"""Initialize the setup flow."""
super().__init__(auth_module, setup_schema, user.id)
# to fix typing complaint
self._auth_module: TotpAuthModule = auth_module
self._user = user
self._ota_secret: Optional[str] = None
self._url = None # type Optional[str]
self._image = None # type Optional[str]
async def async_step_init(
self, user_input: Optional[Dict[str, str]] = None
) -> Dict[str, Any]:
"""Handle the first step of setup flow.
Return self.async_show_form(step_id='init') if user_input is None.
Return self.async_create_entry(data={'result': result}) if finish.
"""
import pyotp # pylint: disable=import-outside-toplevel
errors: Dict[str, str] = {}
if user_input:
verified = await self.hass.async_add_executor_job( # type: ignore
pyotp.TOTP(self._ota_secret).verify, user_input["code"]
)
if verified:
result = await self._auth_module.async_setup_user(
self._user_id, {"secret": self._ota_secret}
)
return self.async_create_entry(
title=self._auth_module.name, data={"result": result}
)
errors["base"] = "invalid_code"
else:
hass = self._auth_module.hass
(
self._ota_secret,
self._url,
self._image,
) = await hass.async_add_executor_job(
_generate_secret_and_qr_code, # type: ignore
str(self._user.name),
)
return self.async_show_form(
step_id="init",
data_schema=self._setup_schema,
description_placeholders={
"code": self._ota_secret,
"url": self._url,
"qr_code": self._image,
},
errors=errors,
)
|
import logging
from homeassistant.components.lock import LockEntity
from . import DOMAIN as TESLA_DOMAIN, TeslaDevice
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Tesla binary_sensors by config_entry."""
entities = [
TeslaLock(
device,
hass.data[TESLA_DOMAIN][config_entry.entry_id]["coordinator"],
)
for device in hass.data[TESLA_DOMAIN][config_entry.entry_id]["devices"]["lock"]
]
async_add_entities(entities, True)
class TeslaLock(TeslaDevice, LockEntity):
"""Representation of a Tesla door lock."""
async def async_lock(self, **kwargs):
"""Send the lock command."""
_LOGGER.debug("Locking doors for: %s", self.name)
await self.tesla_device.lock()
async def async_unlock(self, **kwargs):
"""Send the unlock command."""
_LOGGER.debug("Unlocking doors for: %s", self.name)
await self.tesla_device.unlock()
@property
def is_locked(self):
"""Get whether the lock is in locked state."""
if self.tesla_device.is_locked() is None:
return None
return self.tesla_device.is_locked()
|
import gc
import pickle
import threading
from unittest import mock
import pytest
from xarray.backends.file_manager import CachingFileManager
from xarray.backends.lru_cache import LRUCache
from xarray.core.options import set_options
@pytest.fixture(params=[1, 2, 3, None])
def file_cache(request):
maxsize = request.param
if maxsize is None:
yield {}
else:
yield LRUCache(maxsize)
def test_file_manager_mock_write(file_cache):
mock_file = mock.Mock()
opener = mock.Mock(spec=open, return_value=mock_file)
lock = mock.MagicMock(spec=threading.Lock())
manager = CachingFileManager(opener, "filename", lock=lock, cache=file_cache)
f = manager.acquire()
f.write("contents")
manager.close()
assert not file_cache
opener.assert_called_once_with("filename")
mock_file.write.assert_called_once_with("contents")
mock_file.close.assert_called_once_with()
lock.__enter__.assert_has_calls([mock.call(), mock.call()])
@pytest.mark.parametrize("expected_warning", [None, RuntimeWarning])
def test_file_manager_autoclose(expected_warning):
mock_file = mock.Mock()
opener = mock.Mock(return_value=mock_file)
cache = {}
manager = CachingFileManager(opener, "filename", cache=cache)
manager.acquire()
assert cache
with set_options(warn_for_unclosed_files=expected_warning is not None):
with pytest.warns(expected_warning):
del manager
gc.collect()
assert not cache
mock_file.close.assert_called_once_with()
def test_file_manager_autoclose_while_locked():
opener = mock.Mock()
lock = threading.Lock()
cache = {}
manager = CachingFileManager(opener, "filename", lock=lock, cache=cache)
manager.acquire()
assert cache
lock.acquire()
with set_options(warn_for_unclosed_files=False):
del manager
gc.collect()
# can't clear the cache while locked, but also don't block in __del__
assert cache
def test_file_manager_repr():
opener = mock.Mock()
manager = CachingFileManager(opener, "my-file")
assert "my-file" in repr(manager)
def test_file_manager_refcounts():
mock_file = mock.Mock()
opener = mock.Mock(spec=open, return_value=mock_file)
cache = {}
ref_counts = {}
manager = CachingFileManager(opener, "filename", cache=cache, ref_counts=ref_counts)
assert ref_counts[manager._key] == 1
manager.acquire()
assert cache
manager2 = CachingFileManager(
opener, "filename", cache=cache, ref_counts=ref_counts
)
assert cache
assert manager._key == manager2._key
assert ref_counts[manager._key] == 2
with set_options(warn_for_unclosed_files=False):
del manager
gc.collect()
assert cache
assert ref_counts[manager2._key] == 1
mock_file.close.assert_not_called()
with set_options(warn_for_unclosed_files=False):
del manager2
gc.collect()
assert not ref_counts
assert not cache
def test_file_manager_replace_object():
opener = mock.Mock()
cache = {}
ref_counts = {}
manager = CachingFileManager(opener, "filename", cache=cache, ref_counts=ref_counts)
manager.acquire()
assert ref_counts[manager._key] == 1
assert cache
manager = CachingFileManager(opener, "filename", cache=cache, ref_counts=ref_counts)
assert ref_counts[manager._key] == 1
assert cache
manager.close()
def test_file_manager_write_consecutive(tmpdir, file_cache):
path1 = str(tmpdir.join("testing1.txt"))
path2 = str(tmpdir.join("testing2.txt"))
manager1 = CachingFileManager(open, path1, mode="w", cache=file_cache)
manager2 = CachingFileManager(open, path2, mode="w", cache=file_cache)
f1a = manager1.acquire()
f1a.write("foo")
f1a.flush()
f2 = manager2.acquire()
f2.write("bar")
f2.flush()
f1b = manager1.acquire()
f1b.write("baz")
assert (getattr(file_cache, "maxsize", float("inf")) > 1) == (f1a is f1b)
manager1.close()
manager2.close()
with open(path1) as f:
assert f.read() == "foobaz"
with open(path2) as f:
assert f.read() == "bar"
def test_file_manager_write_concurrent(tmpdir, file_cache):
path = str(tmpdir.join("testing.txt"))
manager = CachingFileManager(open, path, mode="w", cache=file_cache)
f1 = manager.acquire()
f2 = manager.acquire()
f3 = manager.acquire()
assert f1 is f2
assert f2 is f3
f1.write("foo")
f1.flush()
f2.write("bar")
f2.flush()
f3.write("baz")
f3.flush()
manager.close()
with open(path) as f:
assert f.read() == "foobarbaz"
def test_file_manager_write_pickle(tmpdir, file_cache):
path = str(tmpdir.join("testing.txt"))
manager = CachingFileManager(open, path, mode="w", cache=file_cache)
f = manager.acquire()
f.write("foo")
f.flush()
manager2 = pickle.loads(pickle.dumps(manager))
f2 = manager2.acquire()
f2.write("bar")
manager2.close()
manager.close()
with open(path) as f:
assert f.read() == "foobar"
def test_file_manager_read(tmpdir, file_cache):
path = str(tmpdir.join("testing.txt"))
with open(path, "w") as f:
f.write("foobar")
manager = CachingFileManager(open, path, cache=file_cache)
f = manager.acquire()
assert f.read() == "foobar"
manager.close()
def test_file_manager_invalid_kwargs():
with pytest.raises(TypeError):
CachingFileManager(open, "dummy", mode="w", invalid=True)
def test_file_manager_acquire_context(tmpdir, file_cache):
path = str(tmpdir.join("testing.txt"))
with open(path, "w") as f:
f.write("foobar")
class AcquisitionError(Exception):
pass
manager = CachingFileManager(open, path, cache=file_cache)
with pytest.raises(AcquisitionError):
with manager.acquire_context() as f:
assert f.read() == "foobar"
raise AcquisitionError
assert not file_cache # file was *not* already open
with manager.acquire_context() as f:
assert f.read() == "foobar"
with pytest.raises(AcquisitionError):
with manager.acquire_context() as f:
f.seek(0)
assert f.read() == "foobar"
raise AcquisitionError
assert file_cache # file *was* already open
manager.close()
|
import sys
import markups
import markups.common
from os.path import dirname, exists, join, expanduser
from PyQt5.QtCore import QByteArray, QLocale, QSettings, QStandardPaths
from PyQt5.QtGui import QFont
app_version = "7.1.0"
settings = QSettings('ReText project', 'ReText')
if not str(settings.fileName()).endswith('.conf'):
# We are on Windows probably
settings = QSettings(QSettings.IniFormat, QSettings.UserScope,
'ReText project', 'ReText')
datadirs = []
def initializeDataDirs():
assert not datadirs
try:
datadirs.append(dirname(dirname(__file__)))
except NameError:
pass
dataLocations = QStandardPaths.standardLocations(QStandardPaths.GenericDataLocation)
datadirs.extend(join(d, 'retext') for d in dataLocations)
if sys.platform == "win32":
# Windows compatibility: Add "PythonXXX\share\" path
datadirs.append(join(dirname(sys.executable), 'share', 'retext'))
# For virtualenvs
datadirs.append(join(dirname(dirname(sys.executable)), 'share', 'retext'))
_iconPath = None
def getBundledIcon(iconName):
global _iconPath
if _iconPath is None:
for dir in ['icons'] + datadirs:
_iconPath = join(dir, 'icons')
if exists(_iconPath):
break
return join(_iconPath, iconName + '.png')
configOptions = {
'appStyleSheet': '',
'autoSave': False,
'defaultCodec': '',
'defaultMarkup': markups.MarkdownMarkup.name,
'defaultPreviewState': 'editor',
'detectEncoding': True,
'directoryPath': expanduser("~"),
'documentStatsEnabled': False,
'editorFont': QFont(),
'font': QFont(),
'handleWebLinks': False,
'hideToolBar': False,
'highlightCurrentLine': 'disabled',
'iconTheme': '',
'lastTabIndex': 0,
'lineNumbersEnabled': False,
'markdownDefaultFileExtension': '.mkd',
'openFilesInExistingWindow': True,
'openLastFilesOnStartup': False,
'orderedListMode': 'increment',
'paperSize': '',
'pygmentsStyle': 'default',
'recentDocumentsCount': 10,
'relativeLineNumbers': False,
'restDefaultFileExtension': '.rst',
'rightMargin': 0,
'rightMarginWrap': False,
'saveWindowGeometry': False,
'showDirectoryTree': False,
'spellCheck': False,
'spellCheckLocale': '',
'styleSheet': '',
'syncScroll': True,
'tabBarAutoHide': False,
'tabInsertsSpaces': True,
'tabWidth': 4,
'uiLanguage': QLocale.system().name(),
'useFakeVim': False,
'useWebEngine': False,
'useWebKit': False,
'wideCursor': False,
'windowGeometry': QByteArray(),
'windowTitleFullPath': False,
}
def readFromSettings(key, keytype, settings=settings, default=None):
if isinstance(default, QFont):
family = readFromSettings(key, str, settings, default.family())
size = readFromSettings(key + 'Size', int, settings, 0)
return QFont(family, size)
if not settings.contains(key):
return default
try:
value = settings.value(key, type=keytype)
if isinstance(value, keytype):
return value
return keytype(value)
except TypeError as error:
# Type mismatch
print('Warning: '+str(error))
# Return an instance of keytype
return default if (default is not None) else keytype()
def readListFromSettings(key, settings=settings):
if not settings.contains(key):
return []
value = settings.value(key)
if isinstance(value, str):
return [value]
else:
return value
def writeToSettings(key, value, default, settings=settings):
if isinstance(value, QFont):
writeToSettings(key, value.family(), '', settings)
writeToSettings(key + 'Size', max(value.pointSize(), 0), 0, settings)
elif value == default:
settings.remove(key)
else:
settings.setValue(key, value)
def writeListToSettings(key, value, settings=settings):
if len(value) > 1:
settings.setValue(key, value)
elif len(value) == 1:
settings.setValue(key, value[0])
else:
settings.remove(key)
def getSettingsFilePath(settings=settings):
return settings.fileName()
def chooseMonospaceFont():
font = QFont('monospace')
font.setStyleHint(QFont.TypeWriter)
return font
class ReTextSettings(object):
def __init__(self):
for option in configOptions:
value = configOptions[option]
object.__setattr__(self, option, readFromSettings(
option, type(value), default=value))
def __setattr__(self, option, value):
if not option in configOptions:
raise AttributeError('Unknown attribute')
object.__setattr__(self, option, value)
writeToSettings(option, value, configOptions[option])
def __getattribute__(self, option):
value = object.__getattribute__(self, option)
# Choose a font just-in-time, because when the settings are
# loaded it is too early to work.
if option == 'font' and not value.family():
value = QFont()
if option == 'editorFont' and not value.family():
value = chooseMonospaceFont()
return value
globalSettings = ReTextSettings()
markups.common.PYGMENTS_STYLE = globalSettings.pygmentsStyle
|
from plexapi.exceptions import NotFound
from homeassistant.components.plex.const import DOMAIN, SERVERS
from tests.async_mock import patch
async def test_plex_tv_clients(hass, entry, mock_plex_account, setup_plex_server):
"""Test getting Plex clients from plex.tv."""
mock_plex_server = await setup_plex_server()
server_id = mock_plex_server.machineIdentifier
plex_server = hass.data[DOMAIN][SERVERS][server_id]
resource = next(
x
for x in mock_plex_account.resources()
if x.name.startswith("plex.tv Resource Player")
)
with patch.object(resource, "connect", side_effect=NotFound):
await plex_server._async_update_platforms()
await hass.async_block_till_done()
media_players_before = len(hass.states.async_entity_ids("media_player"))
# Ensure one more client is discovered
await hass.config_entries.async_unload(entry.entry_id)
mock_plex_server = await setup_plex_server()
plex_server = hass.data[DOMAIN][SERVERS][server_id]
await plex_server._async_update_platforms()
await hass.async_block_till_done()
media_players_after = len(hass.states.async_entity_ids("media_player"))
assert media_players_after == media_players_before + 1
# Ensure only plex.tv resource client is found
await hass.config_entries.async_unload(entry.entry_id)
mock_plex_server = await setup_plex_server()
mock_plex_server.clear_clients()
mock_plex_server.clear_sessions()
plex_server = hass.data[DOMAIN][SERVERS][server_id]
await plex_server._async_update_platforms()
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids("media_player")) == 1
# Ensure cache gets called
await plex_server._async_update_platforms()
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids("media_player")) == 1
|
import pytest
from homeassistant import setup
from homeassistant.components import zone
from homeassistant.components.zone import DOMAIN
from homeassistant.const import (
ATTR_EDITABLE,
ATTR_FRIENDLY_NAME,
ATTR_ICON,
ATTR_NAME,
SERVICE_RELOAD,
)
from homeassistant.core import Context
from homeassistant.exceptions import Unauthorized
from homeassistant.helpers import entity_registry
from tests.async_mock import patch
from tests.common import MockConfigEntry
@pytest.fixture
def storage_setup(hass, hass_storage):
"""Storage setup."""
async def _storage(items=None, config=None):
if items is None:
hass_storage[DOMAIN] = {
"key": DOMAIN,
"version": 1,
"data": {
"items": [
{
"id": "from_storage",
"name": "from storage",
"latitude": 1,
"longitude": 2,
"radius": 3,
"passive": False,
"icon": "mdi:from-storage",
}
]
},
}
else:
hass_storage[DOMAIN] = {
"key": DOMAIN,
"version": 1,
"data": {"items": items},
}
if config is None:
config = {}
return await setup.async_setup_component(hass, DOMAIN, config)
return _storage
async def test_setup_no_zones_still_adds_home_zone(hass):
"""Test if no config is passed in we still get the home zone."""
assert await setup.async_setup_component(hass, zone.DOMAIN, {"zone": None})
assert len(hass.states.async_entity_ids("zone")) == 1
state = hass.states.get("zone.home")
assert hass.config.location_name == state.name
assert hass.config.latitude == state.attributes["latitude"]
assert hass.config.longitude == state.attributes["longitude"]
assert not state.attributes.get("passive", False)
async def test_setup(hass):
"""Test a successful setup."""
info = {
"name": "Test Zone",
"latitude": 32.880837,
"longitude": -117.237561,
"radius": 250,
"passive": True,
}
assert await setup.async_setup_component(hass, zone.DOMAIN, {"zone": info})
assert len(hass.states.async_entity_ids("zone")) == 2
state = hass.states.get("zone.test_zone")
assert info["name"] == state.name
assert info["latitude"] == state.attributes["latitude"]
assert info["longitude"] == state.attributes["longitude"]
assert info["radius"] == state.attributes["radius"]
assert info["passive"] == state.attributes["passive"]
async def test_setup_zone_skips_home_zone(hass):
"""Test that zone named Home should override hass home zone."""
info = {"name": "Home", "latitude": 1.1, "longitude": -2.2}
assert await setup.async_setup_component(hass, zone.DOMAIN, {"zone": info})
assert len(hass.states.async_entity_ids("zone")) == 1
state = hass.states.get("zone.home")
assert info["name"] == state.name
async def test_setup_name_can_be_same_on_multiple_zones(hass):
"""Test that zone named Home should override hass home zone."""
info = {"name": "Test Zone", "latitude": 1.1, "longitude": -2.2}
assert await setup.async_setup_component(hass, zone.DOMAIN, {"zone": [info, info]})
assert len(hass.states.async_entity_ids("zone")) == 3
async def test_active_zone_skips_passive_zones(hass):
"""Test active and passive zones."""
assert await setup.async_setup_component(
hass,
zone.DOMAIN,
{
"zone": [
{
"name": "Passive Zone",
"latitude": 32.880600,
"longitude": -117.237561,
"radius": 250,
"passive": True,
}
]
},
)
await hass.async_block_till_done()
active = zone.async_active_zone(hass, 32.880600, -117.237561)
assert active is None
async def test_active_zone_skips_passive_zones_2(hass):
"""Test active and passive zones."""
assert await setup.async_setup_component(
hass,
zone.DOMAIN,
{
"zone": [
{
"name": "Active Zone",
"latitude": 32.880800,
"longitude": -117.237561,
"radius": 500,
}
]
},
)
await hass.async_block_till_done()
active = zone.async_active_zone(hass, 32.880700, -117.237561)
assert "zone.active_zone" == active.entity_id
async def test_active_zone_prefers_smaller_zone_if_same_distance(hass):
"""Test zone size preferences."""
latitude = 32.880600
longitude = -117.237561
assert await setup.async_setup_component(
hass,
zone.DOMAIN,
{
"zone": [
{
"name": "Small Zone",
"latitude": latitude,
"longitude": longitude,
"radius": 250,
},
{
"name": "Big Zone",
"latitude": latitude,
"longitude": longitude,
"radius": 500,
},
]
},
)
active = zone.async_active_zone(hass, latitude, longitude)
assert "zone.small_zone" == active.entity_id
async def test_active_zone_prefers_smaller_zone_if_same_distance_2(hass):
"""Test zone size preferences."""
latitude = 32.880600
longitude = -117.237561
assert await setup.async_setup_component(
hass,
zone.DOMAIN,
{
"zone": [
{
"name": "Smallest Zone",
"latitude": latitude,
"longitude": longitude,
"radius": 50,
}
]
},
)
active = zone.async_active_zone(hass, latitude, longitude)
assert "zone.smallest_zone" == active.entity_id
async def test_in_zone_works_for_passive_zones(hass):
"""Test working in passive zones."""
latitude = 32.880600
longitude = -117.237561
assert await setup.async_setup_component(
hass,
zone.DOMAIN,
{
"zone": [
{
"name": "Passive Zone",
"latitude": latitude,
"longitude": longitude,
"radius": 250,
"passive": True,
}
]
},
)
assert zone.in_zone(hass.states.get("zone.passive_zone"), latitude, longitude)
async def test_core_config_update(hass):
"""Test updating core config will update home zone."""
assert await setup.async_setup_component(hass, "zone", {})
home = hass.states.get("zone.home")
await hass.config.async_update(
location_name="Updated Name", latitude=10, longitude=20
)
await hass.async_block_till_done()
home_updated = hass.states.get("zone.home")
assert home is not home_updated
assert home_updated.name == "Updated Name"
assert home_updated.attributes["latitude"] == 10
assert home_updated.attributes["longitude"] == 20
async def test_reload(hass, hass_admin_user, hass_read_only_user):
"""Test reload service."""
count_start = len(hass.states.async_entity_ids())
ent_reg = await entity_registry.async_get_registry(hass)
assert await setup.async_setup_component(
hass,
DOMAIN,
{
DOMAIN: [
{"name": "yaml 1", "latitude": 1, "longitude": 2},
{"name": "yaml 2", "latitude": 3, "longitude": 4},
],
},
)
assert count_start + 3 == len(hass.states.async_entity_ids())
state_1 = hass.states.get("zone.yaml_1")
state_2 = hass.states.get("zone.yaml_2")
state_3 = hass.states.get("zone.yaml_3")
assert state_1 is not None
assert state_1.attributes["latitude"] == 1
assert state_1.attributes["longitude"] == 2
assert state_2 is not None
assert state_2.attributes["latitude"] == 3
assert state_2.attributes["longitude"] == 4
assert state_3 is None
assert len(ent_reg.entities) == 0
with patch(
"homeassistant.config.load_yaml_config_file",
autospec=True,
return_value={
DOMAIN: [
{"name": "yaml 2", "latitude": 3, "longitude": 4},
{"name": "yaml 3", "latitude": 5, "longitude": 6},
]
},
):
with pytest.raises(Unauthorized):
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
blocking=True,
context=Context(user_id=hass_read_only_user.id),
)
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
blocking=True,
context=Context(user_id=hass_admin_user.id),
)
await hass.async_block_till_done()
assert count_start + 3 == len(hass.states.async_entity_ids())
state_1 = hass.states.get("zone.yaml_1")
state_2 = hass.states.get("zone.yaml_2")
state_3 = hass.states.get("zone.yaml_3")
assert state_1 is None
assert state_2 is not None
assert state_2.attributes["latitude"] == 3
assert state_2.attributes["longitude"] == 4
assert state_3 is not None
assert state_3.attributes["latitude"] == 5
assert state_3.attributes["longitude"] == 6
async def test_load_from_storage(hass, storage_setup):
"""Test set up from storage."""
assert await storage_setup()
state = hass.states.get(f"{DOMAIN}.from_storage")
assert state.state == "zoning"
assert state.name == "from storage"
assert state.attributes.get(ATTR_EDITABLE)
async def test_editable_state_attribute(hass, storage_setup):
"""Test editable attribute."""
assert await storage_setup(
config={DOMAIN: [{"name": "yaml option", "latitude": 3, "longitude": 4}]}
)
state = hass.states.get(f"{DOMAIN}.from_storage")
assert state.state == "zoning"
assert state.attributes.get(ATTR_FRIENDLY_NAME) == "from storage"
assert state.attributes.get(ATTR_EDITABLE)
state = hass.states.get(f"{DOMAIN}.yaml_option")
assert state.state == "zoning"
assert not state.attributes.get(ATTR_EDITABLE)
async def test_ws_list(hass, hass_ws_client, storage_setup):
"""Test listing via WS."""
assert await storage_setup(
config={DOMAIN: [{"name": "yaml option", "latitude": 3, "longitude": 4}]}
)
client = await hass_ws_client(hass)
await client.send_json({"id": 6, "type": f"{DOMAIN}/list"})
resp = await client.receive_json()
assert resp["success"]
storage_ent = "from_storage"
yaml_ent = "from_yaml"
result = {item["id"]: item for item in resp["result"]}
assert len(result) == 1
assert storage_ent in result
assert yaml_ent not in result
assert result[storage_ent][ATTR_NAME] == "from storage"
async def test_ws_delete(hass, hass_ws_client, storage_setup):
"""Test WS delete cleans up entity registry."""
assert await storage_setup()
input_id = "from_storage"
input_entity_id = f"{DOMAIN}.{input_id}"
ent_reg = await entity_registry.async_get_registry(hass)
state = hass.states.get(input_entity_id)
assert state is not None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, input_id) is not None
client = await hass_ws_client(hass)
await client.send_json(
{"id": 6, "type": f"{DOMAIN}/delete", f"{DOMAIN}_id": f"{input_id}"}
)
resp = await client.receive_json()
assert resp["success"]
state = hass.states.get(input_entity_id)
assert state is None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, input_id) is None
async def test_update(hass, hass_ws_client, storage_setup):
"""Test updating min/max updates the state."""
items = [
{
"id": "from_storage",
"name": "from storage",
"latitude": 1,
"longitude": 2,
"radius": 3,
"passive": False,
}
]
assert await storage_setup(items)
input_id = "from_storage"
input_entity_id = f"{DOMAIN}.{input_id}"
ent_reg = await entity_registry.async_get_registry(hass)
state = hass.states.get(input_entity_id)
assert state.attributes["latitude"] == 1
assert state.attributes["longitude"] == 2
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, input_id) is not None
client = await hass_ws_client(hass)
await client.send_json(
{
"id": 6,
"type": f"{DOMAIN}/update",
f"{DOMAIN}_id": f"{input_id}",
"latitude": 3,
"longitude": 4,
"passive": True,
}
)
resp = await client.receive_json()
assert resp["success"]
state = hass.states.get(input_entity_id)
assert state.attributes["latitude"] == 3
assert state.attributes["longitude"] == 4
assert state.attributes["passive"] is True
async def test_ws_create(hass, hass_ws_client, storage_setup):
"""Test create WS."""
assert await storage_setup(items=[])
input_id = "new_input"
input_entity_id = f"{DOMAIN}.{input_id}"
ent_reg = await entity_registry.async_get_registry(hass)
state = hass.states.get(input_entity_id)
assert state is None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, input_id) is None
client = await hass_ws_client(hass)
await client.send_json(
{
"id": 6,
"type": f"{DOMAIN}/create",
"name": "New Input",
"latitude": 3,
"longitude": 4,
"passive": True,
}
)
resp = await client.receive_json()
assert resp["success"]
state = hass.states.get(input_entity_id)
assert state.state == "zoning"
assert state.attributes["latitude"] == 3
assert state.attributes["longitude"] == 4
assert state.attributes["passive"] is True
async def test_import_config_entry(hass):
"""Test we import config entry and then delete it."""
entry = MockConfigEntry(
domain="zone",
data={
"name": "from config entry",
"latitude": 1,
"longitude": 2,
"radius": 3,
"passive": False,
"icon": "mdi:from-config-entry",
},
)
entry.add_to_hass(hass)
assert await setup.async_setup_component(hass, DOMAIN, {})
await hass.async_block_till_done()
assert len(hass.config_entries.async_entries()) == 0
state = hass.states.get("zone.from_config_entry")
assert state is not None
assert state.attributes[zone.ATTR_LATITUDE] == 1
assert state.attributes[zone.ATTR_LONGITUDE] == 2
assert state.attributes[zone.ATTR_RADIUS] == 3
assert state.attributes[zone.ATTR_PASSIVE] is False
assert state.attributes[ATTR_ICON] == "mdi:from-config-entry"
async def test_zone_empty_setup(hass):
"""Set up zone with empty config."""
assert await setup.async_setup_component(hass, DOMAIN, {"zone": {}})
async def test_unavailable_zone(hass):
"""Test active zone with unavailable zones."""
assert await setup.async_setup_component(hass, DOMAIN, {"zone": {}})
hass.states.async_set("zone.bla", "unavailable", {"restored": True})
assert zone.async_active_zone(hass, 0.0, 0.01) is None
assert zone.in_zone(hass.states.get("zone.bla"), 0, 0) is False
|
import asyncio
from collections import namedtuple
import os
import re
import shlex
import sys
try:
from colorlog.escape_codes import escape_codes
except ImportError:
escape_codes = None
RE_ASCII = re.compile(r"\033\[[^m]*m")
Error = namedtuple("Error", ["file", "line", "col", "msg", "skip"])
PASS = "green"
FAIL = "bold_red"
def printc(the_color, *args):
"""Color print helper."""
msg = " ".join(args)
if not escape_codes:
print(msg)
return
try:
print(escape_codes[the_color] + msg + escape_codes["reset"])
except KeyError:
print(msg)
raise ValueError(f"Invalid color {the_color}")
def validate_requirements_ok():
"""Validate requirements, returns True of ok."""
from gen_requirements_all import main as req_main
return req_main(True) == 0
async def read_stream(stream, display):
"""Read from stream line by line until EOF, display, and capture lines."""
output = []
while True:
line = await stream.readline()
if not line:
break
output.append(line)
display(line.decode()) # assume it doesn't block
return b"".join(output)
async def async_exec(*args, display=False):
"""Execute, return code & log."""
argsp = []
for arg in args:
if os.path.isfile(arg):
argsp.append(f"\\\n {shlex.quote(arg)}")
else:
argsp.append(shlex.quote(arg))
printc("cyan", *argsp)
try:
kwargs = {
"loop": LOOP,
"stdout": asyncio.subprocess.PIPE,
"stderr": asyncio.subprocess.STDOUT,
}
if display:
kwargs["stderr"] = asyncio.subprocess.PIPE
proc = await asyncio.create_subprocess_exec(*args, **kwargs)
except FileNotFoundError as err:
printc(FAIL, f"Could not execute {args[0]}. Did you install test requirements?")
raise err
if not display:
# Readin stdout into log
stdout, _ = await proc.communicate()
else:
# read child's stdout/stderr concurrently (capture and display)
stdout, _ = await asyncio.gather(
read_stream(proc.stdout, sys.stdout.write),
read_stream(proc.stderr, sys.stderr.write),
)
exit_code = await proc.wait()
stdout = stdout.decode("utf-8")
return exit_code, stdout
async def git():
"""Exec git."""
if len(sys.argv) > 2 and sys.argv[1] == "--":
return sys.argv[2:]
_, log = await async_exec("git", "merge-base", "upstream/dev", "HEAD")
merge_base = log.splitlines()[0]
_, log = await async_exec("git", "diff", merge_base, "--name-only")
return log.splitlines()
async def pylint(files):
"""Exec pylint."""
_, log = await async_exec("pylint", "-f", "parseable", "--persistent=n", *files)
res = []
for line in log.splitlines():
line = line.split(":")
if len(line) < 3:
continue
_fn = line[0].replace("\\", "/")
res.append(Error(_fn, line[1], "", line[2].strip(), _fn.startswith("tests/")))
return res
async def flake8(files):
"""Exec flake8."""
_, log = await async_exec("pre-commit", "run", "flake8", "--files", *files)
res = []
for line in log.splitlines():
line = line.split(":")
if len(line) < 4:
continue
_fn = line[0].replace("\\", "/")
res.append(Error(_fn, line[1], line[2], line[3].strip(), False))
return res
async def lint(files):
"""Perform lint."""
files = [file for file in files if os.path.isfile(file)]
fres, pres = await asyncio.gather(flake8(files), pylint(files))
res = fres + pres
res.sort(key=lambda item: item.file)
if res:
print("Pylint & Flake8 errors:")
else:
printc(PASS, "Pylint and Flake8 passed")
lint_ok = True
for err in res:
err_msg = f"{err.file} {err.line}:{err.col} {err.msg}"
# tests/* does not have to pass lint
if err.skip:
print(err_msg)
else:
printc(FAIL, err_msg)
lint_ok = False
return lint_ok
async def main():
"""Run the main loop."""
# Ensure we are in the homeassistant root
os.chdir(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
files = await git()
if not files:
print(
"No changed files found. Please ensure you have added your "
"changes with git add & git commit"
)
return
pyfile = re.compile(r".+\.py$")
pyfiles = [file for file in files if pyfile.match(file)]
print("=============================")
printc("bold", "CHANGED FILES:\n", "\n ".join(pyfiles))
print("=============================")
skip_lint = len(sys.argv) > 1 and sys.argv[1] == "--skiplint"
if skip_lint:
printc(FAIL, "LINT DISABLED")
elif not await lint(pyfiles):
printc(FAIL, "Please fix your lint issues before continuing")
return
test_files = set()
gen_req = False
for fname in pyfiles:
if fname.startswith("homeassistant/components/"):
gen_req = True # requirements script for components
# Find test files...
if fname.startswith("tests/"):
if "/test_" in fname and os.path.isfile(fname):
# All test helpers should be excluded
test_files.add(fname)
else:
parts = fname.split("/")
parts[0] = "tests"
if parts[-1] == "__init__.py":
parts[-1] = "test_init.py"
elif parts[-1] == "__main__.py":
parts[-1] = "test_main.py"
else:
parts[-1] = f"test_{parts[-1]}"
fname = "/".join(parts)
if os.path.isfile(fname):
test_files.add(fname)
if gen_req:
print("=============================")
if validate_requirements_ok():
printc(PASS, "script/gen_requirements.py passed")
else:
printc(FAIL, "Please run script/gen_requirements.py")
return
print("=============================")
if not test_files:
print("No test files identified, ideally you should run tox")
return
code, _ = await async_exec(
"pytest", "-vv", "--force-sugar", "--", *test_files, display=True
)
print("=============================")
if code == 0:
printc(PASS, "Yay! This will most likely pass tox")
else:
printc(FAIL, "Tests not passing")
if skip_lint:
printc(FAIL, "LINT DISABLED")
if __name__ == "__main__":
LOOP = (
asyncio.ProactorEventLoop()
if sys.platform == "win32"
else asyncio.get_event_loop()
)
try:
LOOP.run_until_complete(main())
except (FileNotFoundError, KeyboardInterrupt):
pass
finally:
LOOP.close()
|
Subsets and Splits