text
stringlengths 213
32.3k
|
---|
from homeassistant.const import (
ATTR_BATTERY_LEVEL,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_TEMPERATURE,
PERCENTAGE,
TEMP_CELSIUS,
)
from homeassistant.helpers.entity import Entity
from . import DOMAIN
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Demo sensors."""
async_add_entities(
[
DemoSensor(
"sensor_1",
"Outside Temperature",
15.6,
DEVICE_CLASS_TEMPERATURE,
TEMP_CELSIUS,
12,
),
DemoSensor(
"sensor_2",
"Outside Humidity",
54,
DEVICE_CLASS_HUMIDITY,
PERCENTAGE,
None,
),
]
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Demo config entry."""
await async_setup_platform(hass, {}, async_add_entities)
class DemoSensor(Entity):
"""Representation of a Demo sensor."""
def __init__(
self, unique_id, name, state, device_class, unit_of_measurement, battery
):
"""Initialize the sensor."""
self._unique_id = unique_id
self._name = name
self._state = state
self._device_class = device_class
self._unit_of_measurement = unit_of_measurement
self._battery = battery
@property
def device_info(self):
"""Return device info."""
return {
"identifiers": {
# Serial numbers are unique identifiers within a specific domain
(DOMAIN, self.unique_id)
},
"name": self.name,
}
@property
def unique_id(self):
"""Return the unique id."""
return self._unique_id
@property
def should_poll(self):
"""No polling needed for a demo sensor."""
return False
@property
def device_class(self):
"""Return the device class of the sensor."""
return self._device_class
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit this state is expressed in."""
return self._unit_of_measurement
@property
def device_state_attributes(self):
"""Return the state attributes."""
if self._battery:
return {ATTR_BATTERY_LEVEL: self._battery}
|
import logging
from pyrisco import CannotConnectError, RiscoAPI, UnauthorizedError
import voluptuous as vol
from homeassistant import config_entries, core
from homeassistant.const import (
CONF_PASSWORD,
CONF_PIN,
CONF_SCAN_INTERVAL,
CONF_USERNAME,
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_CUSTOM_BYPASS,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
)
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import (
CONF_CODE_ARM_REQUIRED,
CONF_CODE_DISARM_REQUIRED,
CONF_HA_STATES_TO_RISCO,
CONF_RISCO_STATES_TO_HA,
DEFAULT_OPTIONS,
RISCO_STATES,
)
from .const import DOMAIN # pylint:disable=unused-import
_LOGGER = logging.getLogger(__name__)
DATA_SCHEMA = vol.Schema({CONF_USERNAME: str, CONF_PASSWORD: str, CONF_PIN: str})
HA_STATES = [
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_ARMED_CUSTOM_BYPASS,
]
async def validate_input(hass: core.HomeAssistant, data):
"""Validate the user input allows us to connect.
Data has the keys from DATA_SCHEMA with values provided by the user.
"""
risco = RiscoAPI(data[CONF_USERNAME], data[CONF_PASSWORD], data[CONF_PIN])
try:
await risco.login(async_get_clientsession(hass))
finally:
await risco.close()
return {"title": risco.site_name}
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Risco."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
@staticmethod
@core.callback
def async_get_options_flow(config_entry):
"""Define the config flow to handle options."""
return RiscoOptionsFlowHandler(config_entry)
async def async_step_user(self, user_input=None):
"""Handle the initial step."""
errors = {}
if user_input is not None:
await self.async_set_unique_id(user_input[CONF_USERNAME])
self._abort_if_unique_id_configured()
try:
info = await validate_input(self.hass, user_input)
except CannotConnectError:
errors["base"] = "cannot_connect"
except UnauthorizedError:
errors["base"] = "invalid_auth"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self.async_create_entry(title=info["title"], data=user_input)
return self.async_show_form(
step_id="user", data_schema=DATA_SCHEMA, errors=errors
)
class RiscoOptionsFlowHandler(config_entries.OptionsFlow):
"""Handle a Risco options flow."""
def __init__(self, config_entry):
"""Initialize."""
self.config_entry = config_entry
self._data = {**DEFAULT_OPTIONS, **config_entry.options}
def _options_schema(self):
return vol.Schema(
{
vol.Required(
CONF_SCAN_INTERVAL, default=self._data[CONF_SCAN_INTERVAL]
): int,
vol.Required(
CONF_CODE_ARM_REQUIRED, default=self._data[CONF_CODE_ARM_REQUIRED]
): bool,
vol.Required(
CONF_CODE_DISARM_REQUIRED,
default=self._data[CONF_CODE_DISARM_REQUIRED],
): bool,
}
)
async def async_step_init(self, user_input=None):
"""Manage the options."""
if user_input is not None:
self._data = {**self._data, **user_input}
return await self.async_step_risco_to_ha()
return self.async_show_form(step_id="init", data_schema=self._options_schema())
async def async_step_risco_to_ha(self, user_input=None):
"""Map Risco states to HA states."""
if user_input is not None:
self._data[CONF_RISCO_STATES_TO_HA] = user_input
return await self.async_step_ha_to_risco()
risco_to_ha = self._data[CONF_RISCO_STATES_TO_HA]
options = vol.Schema(
{
vol.Required(risco_state, default=risco_to_ha[risco_state]): vol.In(
HA_STATES
)
for risco_state in RISCO_STATES
}
)
return self.async_show_form(step_id="risco_to_ha", data_schema=options)
async def async_step_ha_to_risco(self, user_input=None):
"""Map HA states to Risco states."""
if user_input is not None:
self._data[CONF_HA_STATES_TO_RISCO] = user_input
return self.async_create_entry(title="", data=self._data)
options = {}
risco_to_ha = self._data[CONF_RISCO_STATES_TO_HA]
# we iterate over HA_STATES, instead of set(self._risco_to_ha.values())
# to ensure a consistent order
for ha_state in HA_STATES:
if ha_state not in risco_to_ha.values():
continue
values = [
risco_state
for risco_state in RISCO_STATES
if risco_to_ha[risco_state] == ha_state
]
current = self._data[CONF_HA_STATES_TO_RISCO].get(ha_state)
if current not in values:
current = values[0]
options[vol.Required(ha_state, default=current)] = vol.In(values)
return self.async_show_form(
step_id="ha_to_risco", data_schema=vol.Schema(options)
)
|
import logging
from libpurecool.const import FanMode, FanSpeed, NightMode, Oscillation
from libpurecool.dyson_pure_cool import DysonPureCool
from libpurecool.dyson_pure_cool_link import DysonPureCoolLink
from libpurecool.dyson_pure_state import DysonPureCoolState
from libpurecool.dyson_pure_state_v2 import DysonPureCoolV2State
import voluptuous as vol
from homeassistant.components.fan import (
SPEED_HIGH,
SPEED_LOW,
SPEED_MEDIUM,
SUPPORT_OSCILLATE,
SUPPORT_SET_SPEED,
FanEntity,
)
from homeassistant.const import ATTR_ENTITY_ID
import homeassistant.helpers.config_validation as cv
from . import DYSON_DEVICES
_LOGGER = logging.getLogger(__name__)
ATTR_NIGHT_MODE = "night_mode"
ATTR_AUTO_MODE = "auto_mode"
ATTR_ANGLE_LOW = "angle_low"
ATTR_ANGLE_HIGH = "angle_high"
ATTR_FLOW_DIRECTION_FRONT = "flow_direction_front"
ATTR_TIMER = "timer"
ATTR_HEPA_FILTER = "hepa_filter"
ATTR_CARBON_FILTER = "carbon_filter"
ATTR_DYSON_SPEED = "dyson_speed"
ATTR_DYSON_SPEED_LIST = "dyson_speed_list"
DYSON_DOMAIN = "dyson"
DYSON_FAN_DEVICES = "dyson_fan_devices"
SERVICE_SET_NIGHT_MODE = "set_night_mode"
SERVICE_SET_AUTO_MODE = "set_auto_mode"
SERVICE_SET_ANGLE = "set_angle"
SERVICE_SET_FLOW_DIRECTION_FRONT = "set_flow_direction_front"
SERVICE_SET_TIMER = "set_timer"
SERVICE_SET_DYSON_SPEED = "set_speed"
DYSON_SET_NIGHT_MODE_SCHEMA = vol.Schema(
{
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
vol.Required(ATTR_NIGHT_MODE): cv.boolean,
}
)
SET_AUTO_MODE_SCHEMA = vol.Schema(
{
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
vol.Required(ATTR_AUTO_MODE): cv.boolean,
}
)
SET_ANGLE_SCHEMA = vol.Schema(
{
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
vol.Required(ATTR_ANGLE_LOW): cv.positive_int,
vol.Required(ATTR_ANGLE_HIGH): cv.positive_int,
}
)
SET_FLOW_DIRECTION_FRONT_SCHEMA = vol.Schema(
{
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
vol.Required(ATTR_FLOW_DIRECTION_FRONT): cv.boolean,
}
)
SET_TIMER_SCHEMA = vol.Schema(
{
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
vol.Required(ATTR_TIMER): cv.positive_int,
}
)
SET_DYSON_SPEED_SCHEMA = vol.Schema(
{
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
vol.Required(ATTR_DYSON_SPEED): cv.positive_int,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Dyson fan components."""
if discovery_info is None:
return
_LOGGER.debug("Creating new Dyson fans")
if DYSON_FAN_DEVICES not in hass.data:
hass.data[DYSON_FAN_DEVICES] = []
# Get Dyson Devices from parent component
has_purecool_devices = False
device_serials = [device.serial for device in hass.data[DYSON_FAN_DEVICES]]
for device in hass.data[DYSON_DEVICES]:
if device.serial not in device_serials:
if isinstance(device, DysonPureCool):
has_purecool_devices = True
dyson_entity = DysonPureCoolDevice(device)
hass.data[DYSON_FAN_DEVICES].append(dyson_entity)
elif isinstance(device, DysonPureCoolLink):
dyson_entity = DysonPureCoolLinkDevice(hass, device)
hass.data[DYSON_FAN_DEVICES].append(dyson_entity)
add_entities(hass.data[DYSON_FAN_DEVICES])
def service_handle(service):
"""Handle the Dyson services."""
entity_id = service.data[ATTR_ENTITY_ID]
fan_device = next(
(fan for fan in hass.data[DYSON_FAN_DEVICES] if fan.entity_id == entity_id),
None,
)
if fan_device is None:
_LOGGER.warning("Unable to find Dyson fan device %s", str(entity_id))
return
if service.service == SERVICE_SET_NIGHT_MODE:
fan_device.set_night_mode(service.data[ATTR_NIGHT_MODE])
if service.service == SERVICE_SET_AUTO_MODE:
fan_device.set_auto_mode(service.data[ATTR_AUTO_MODE])
if service.service == SERVICE_SET_ANGLE:
fan_device.set_angle(
service.data[ATTR_ANGLE_LOW], service.data[ATTR_ANGLE_HIGH]
)
if service.service == SERVICE_SET_FLOW_DIRECTION_FRONT:
fan_device.set_flow_direction_front(service.data[ATTR_FLOW_DIRECTION_FRONT])
if service.service == SERVICE_SET_TIMER:
fan_device.set_timer(service.data[ATTR_TIMER])
if service.service == SERVICE_SET_DYSON_SPEED:
fan_device.set_dyson_speed(service.data[ATTR_DYSON_SPEED])
# Register dyson service(s)
hass.services.register(
DYSON_DOMAIN,
SERVICE_SET_NIGHT_MODE,
service_handle,
schema=DYSON_SET_NIGHT_MODE_SCHEMA,
)
hass.services.register(
DYSON_DOMAIN, SERVICE_SET_AUTO_MODE, service_handle, schema=SET_AUTO_MODE_SCHEMA
)
if has_purecool_devices:
hass.services.register(
DYSON_DOMAIN, SERVICE_SET_ANGLE, service_handle, schema=SET_ANGLE_SCHEMA
)
hass.services.register(
DYSON_DOMAIN,
SERVICE_SET_FLOW_DIRECTION_FRONT,
service_handle,
schema=SET_FLOW_DIRECTION_FRONT_SCHEMA,
)
hass.services.register(
DYSON_DOMAIN, SERVICE_SET_TIMER, service_handle, schema=SET_TIMER_SCHEMA
)
hass.services.register(
DYSON_DOMAIN,
SERVICE_SET_DYSON_SPEED,
service_handle,
schema=SET_DYSON_SPEED_SCHEMA,
)
class DysonPureCoolLinkDevice(FanEntity):
"""Representation of a Dyson fan."""
def __init__(self, hass, device):
"""Initialize the fan."""
_LOGGER.debug("Creating device %s", device.name)
self.hass = hass
self._device = device
async def async_added_to_hass(self):
"""Call when entity is added to hass."""
self._device.add_message_listener(self.on_message)
def on_message(self, message):
"""Call when new messages received from the fan."""
if isinstance(message, DysonPureCoolState):
_LOGGER.debug("Message received for fan device %s: %s", self.name, message)
self.schedule_update_ha_state()
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the display name of this fan."""
return self._device.name
def set_speed(self, speed: str) -> None:
"""Set the speed of the fan. Never called ??."""
_LOGGER.debug("Set fan speed to: %s", speed)
if speed == FanSpeed.FAN_SPEED_AUTO.value:
self._device.set_configuration(fan_mode=FanMode.AUTO)
else:
fan_speed = FanSpeed(f"{int(speed):04d}")
self._device.set_configuration(fan_mode=FanMode.FAN, fan_speed=fan_speed)
def turn_on(self, speed: str = None, **kwargs) -> None:
"""Turn on the fan."""
_LOGGER.debug("Turn on fan %s with speed %s", self.name, speed)
if speed:
if speed == FanSpeed.FAN_SPEED_AUTO.value:
self._device.set_configuration(fan_mode=FanMode.AUTO)
else:
fan_speed = FanSpeed(f"{int(speed):04d}")
self._device.set_configuration(
fan_mode=FanMode.FAN, fan_speed=fan_speed
)
else:
# Speed not set, just turn on
self._device.set_configuration(fan_mode=FanMode.FAN)
def turn_off(self, **kwargs) -> None:
"""Turn off the fan."""
_LOGGER.debug("Turn off fan %s", self.name)
self._device.set_configuration(fan_mode=FanMode.OFF)
def oscillate(self, oscillating: bool) -> None:
"""Turn on/off oscillating."""
_LOGGER.debug("Turn oscillation %s for device %s", oscillating, self.name)
if oscillating:
self._device.set_configuration(oscillation=Oscillation.OSCILLATION_ON)
else:
self._device.set_configuration(oscillation=Oscillation.OSCILLATION_OFF)
@property
def oscillating(self):
"""Return the oscillation state."""
return self._device.state and self._device.state.oscillation == "ON"
@property
def is_on(self):
"""Return true if the entity is on."""
if self._device.state:
return self._device.state.fan_mode == "FAN"
return False
@property
def speed(self) -> str:
"""Return the current speed."""
if self._device.state:
if self._device.state.speed == FanSpeed.FAN_SPEED_AUTO.value:
return self._device.state.speed
return int(self._device.state.speed)
return None
@property
def current_direction(self):
"""Return direction of the fan [forward, reverse]."""
return None
@property
def night_mode(self):
"""Return Night mode."""
return self._device.state.night_mode == "ON"
def set_night_mode(self, night_mode: bool) -> None:
"""Turn fan in night mode."""
_LOGGER.debug("Set %s night mode %s", self.name, night_mode)
if night_mode:
self._device.set_configuration(night_mode=NightMode.NIGHT_MODE_ON)
else:
self._device.set_configuration(night_mode=NightMode.NIGHT_MODE_OFF)
@property
def auto_mode(self):
"""Return auto mode."""
return self._device.state.fan_mode == "AUTO"
def set_auto_mode(self, auto_mode: bool) -> None:
"""Turn fan in auto mode."""
_LOGGER.debug("Set %s auto mode %s", self.name, auto_mode)
if auto_mode:
self._device.set_configuration(fan_mode=FanMode.AUTO)
else:
self._device.set_configuration(fan_mode=FanMode.FAN)
@property
def speed_list(self) -> list:
"""Get the list of available speeds."""
supported_speeds = [
FanSpeed.FAN_SPEED_AUTO.value,
int(FanSpeed.FAN_SPEED_1.value),
int(FanSpeed.FAN_SPEED_2.value),
int(FanSpeed.FAN_SPEED_3.value),
int(FanSpeed.FAN_SPEED_4.value),
int(FanSpeed.FAN_SPEED_5.value),
int(FanSpeed.FAN_SPEED_6.value),
int(FanSpeed.FAN_SPEED_7.value),
int(FanSpeed.FAN_SPEED_8.value),
int(FanSpeed.FAN_SPEED_9.value),
int(FanSpeed.FAN_SPEED_10.value),
]
return supported_speeds
@property
def supported_features(self) -> int:
"""Flag supported features."""
return SUPPORT_OSCILLATE | SUPPORT_SET_SPEED
@property
def device_state_attributes(self) -> dict:
"""Return optional state attributes."""
return {ATTR_NIGHT_MODE: self.night_mode, ATTR_AUTO_MODE: self.auto_mode}
class DysonPureCoolDevice(FanEntity):
"""Representation of a Dyson Purecool (TP04/DP04) fan."""
def __init__(self, device):
"""Initialize the fan."""
self._device = device
async def async_added_to_hass(self):
"""Call when entity is added to hass."""
self._device.add_message_listener(self.on_message)
def on_message(self, message):
"""Call when new messages received from the fan."""
if isinstance(message, DysonPureCoolV2State):
_LOGGER.debug("Message received for fan device %s: %s", self.name, message)
self.schedule_update_ha_state()
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the display name of this fan."""
return self._device.name
def turn_on(self, speed: str = None, **kwargs) -> None:
"""Turn on the fan."""
_LOGGER.debug("Turn on fan %s", self.name)
if speed is not None:
self.set_speed(speed)
else:
self._device.turn_on()
def set_speed(self, speed: str) -> None:
"""Set the speed of the fan."""
if speed == SPEED_LOW:
self._device.set_fan_speed(FanSpeed.FAN_SPEED_4)
elif speed == SPEED_MEDIUM:
self._device.set_fan_speed(FanSpeed.FAN_SPEED_7)
elif speed == SPEED_HIGH:
self._device.set_fan_speed(FanSpeed.FAN_SPEED_10)
def turn_off(self, **kwargs):
"""Turn off the fan."""
_LOGGER.debug("Turn off fan %s", self.name)
self._device.turn_off()
def set_dyson_speed(self, speed: str = None) -> None:
"""Set the exact speed of the purecool fan."""
_LOGGER.debug("Set exact speed for fan %s", self.name)
fan_speed = FanSpeed(f"{int(speed):04d}")
self._device.set_fan_speed(fan_speed)
def oscillate(self, oscillating: bool) -> None:
"""Turn on/off oscillating."""
_LOGGER.debug("Turn oscillation %s for device %s", oscillating, self.name)
if oscillating:
self._device.enable_oscillation()
else:
self._device.disable_oscillation()
def set_night_mode(self, night_mode: bool) -> None:
"""Turn on/off night mode."""
_LOGGER.debug("Turn night mode %s for device %s", night_mode, self.name)
if night_mode:
self._device.enable_night_mode()
else:
self._device.disable_night_mode()
def set_auto_mode(self, auto_mode: bool) -> None:
"""Turn auto mode on/off."""
_LOGGER.debug("Turn auto mode %s for device %s", auto_mode, self.name)
if auto_mode:
self._device.enable_auto_mode()
else:
self._device.disable_auto_mode()
def set_angle(self, angle_low: int, angle_high: int) -> None:
"""Set device angle."""
_LOGGER.debug(
"set low %s and high angle %s for device %s",
angle_low,
angle_high,
self.name,
)
self._device.enable_oscillation(angle_low, angle_high)
def set_flow_direction_front(self, flow_direction_front: bool) -> None:
"""Set frontal airflow direction."""
_LOGGER.debug(
"Set frontal flow direction to %s for device %s",
flow_direction_front,
self.name,
)
if flow_direction_front:
self._device.enable_frontal_direction()
else:
self._device.disable_frontal_direction()
def set_timer(self, timer) -> None:
"""Set timer."""
_LOGGER.debug("Set timer to %s for device %s", timer, self.name)
if timer == 0:
self._device.disable_sleep_timer()
else:
self._device.enable_sleep_timer(timer)
@property
def oscillating(self):
"""Return the oscillation state."""
return self._device.state and self._device.state.oscillation == "OION"
@property
def is_on(self):
"""Return true if the entity is on."""
if self._device.state:
return self._device.state.fan_power == "ON"
@property
def speed(self):
"""Return the current speed."""
speed_map = {
FanSpeed.FAN_SPEED_1.value: SPEED_LOW,
FanSpeed.FAN_SPEED_2.value: SPEED_LOW,
FanSpeed.FAN_SPEED_3.value: SPEED_LOW,
FanSpeed.FAN_SPEED_4.value: SPEED_LOW,
FanSpeed.FAN_SPEED_AUTO.value: SPEED_MEDIUM,
FanSpeed.FAN_SPEED_5.value: SPEED_MEDIUM,
FanSpeed.FAN_SPEED_6.value: SPEED_MEDIUM,
FanSpeed.FAN_SPEED_7.value: SPEED_MEDIUM,
FanSpeed.FAN_SPEED_8.value: SPEED_HIGH,
FanSpeed.FAN_SPEED_9.value: SPEED_HIGH,
FanSpeed.FAN_SPEED_10.value: SPEED_HIGH,
}
return speed_map[self._device.state.speed]
@property
def dyson_speed(self):
"""Return the current speed."""
if self._device.state:
if self._device.state.speed == FanSpeed.FAN_SPEED_AUTO.value:
return self._device.state.speed
return int(self._device.state.speed)
@property
def night_mode(self):
"""Return Night mode."""
return self._device.state.night_mode == "ON"
@property
def auto_mode(self):
"""Return Auto mode."""
return self._device.state.auto_mode == "ON"
@property
def angle_low(self):
"""Return angle high."""
return int(self._device.state.oscillation_angle_low)
@property
def angle_high(self):
"""Return angle low."""
return int(self._device.state.oscillation_angle_high)
@property
def flow_direction_front(self):
"""Return frontal flow direction."""
return self._device.state.front_direction == "ON"
@property
def timer(self):
"""Return timer."""
return self._device.state.sleep_timer
@property
def hepa_filter(self):
"""Return the HEPA filter state."""
return int(self._device.state.hepa_filter_state)
@property
def carbon_filter(self):
"""Return the carbon filter state."""
if self._device.state.carbon_filter_state == "INV":
return self._device.state.carbon_filter_state
return int(self._device.state.carbon_filter_state)
@property
def speed_list(self) -> list:
"""Get the list of available speeds."""
return [SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH]
@property
def dyson_speed_list(self) -> list:
"""Get the list of available dyson speeds."""
return [
int(FanSpeed.FAN_SPEED_1.value),
int(FanSpeed.FAN_SPEED_2.value),
int(FanSpeed.FAN_SPEED_3.value),
int(FanSpeed.FAN_SPEED_4.value),
int(FanSpeed.FAN_SPEED_5.value),
int(FanSpeed.FAN_SPEED_6.value),
int(FanSpeed.FAN_SPEED_7.value),
int(FanSpeed.FAN_SPEED_8.value),
int(FanSpeed.FAN_SPEED_9.value),
int(FanSpeed.FAN_SPEED_10.value),
]
@property
def device_serial(self):
"""Return fan's serial number."""
return self._device.serial
@property
def supported_features(self) -> int:
"""Flag supported features."""
return SUPPORT_OSCILLATE | SUPPORT_SET_SPEED
@property
def device_state_attributes(self) -> dict:
"""Return optional state attributes."""
return {
ATTR_NIGHT_MODE: self.night_mode,
ATTR_AUTO_MODE: self.auto_mode,
ATTR_ANGLE_LOW: self.angle_low,
ATTR_ANGLE_HIGH: self.angle_high,
ATTR_FLOW_DIRECTION_FRONT: self.flow_direction_front,
ATTR_TIMER: self.timer,
ATTR_HEPA_FILTER: self.hepa_filter,
ATTR_CARBON_FILTER: self.carbon_filter,
ATTR_DYSON_SPEED: self.dyson_speed,
ATTR_DYSON_SPEED_LIST: self.dyson_speed_list,
}
|
from elephas.ml.params import *
def test_has_keras_model_config():
param = HasKerasModelConfig()
conf = {"foo": "bar"}
param.set_keras_model_config(conf)
assert conf == param.get_keras_model_config()
def test_has_optimizer_config():
param = HasKerasOptimizerConfig()
conf = {"foo": "bar"}
param.set_optimizer_config(conf)
assert conf == param.get_optimizer_config()
def test_has_mode():
param = HasMode()
assert param.get_mode() == "asynchronous"
mode = "foobar"
param.set_mode(mode)
assert param.get_mode() == mode
def test_has_frequency():
param = HasFrequency()
assert param.get_frequency() == "epoch"
freq = "foobar"
param.set_frequency(freq)
assert param.get_frequency() == freq
def test_has_number_of_classes():
param = HasNumberOfClasses()
assert param.get_nb_classes() == 10
classes = 42
param.set_nb_classes(classes)
assert param.get_nb_classes() == classes
def test_has_categorical_labels():
param = HasCategoricalLabels()
assert param.get_categorical_labels()
has_labels = False
param.set_categorical_labels(has_labels)
assert param.get_categorical_labels() == has_labels
def test_has_epochs():
param = HasEpochs()
assert param.get_epochs() == 10
epochs = 42
param.set_epochs(epochs)
assert param.get_epochs() == epochs
def test_has_batch_size():
param = HasBatchSize()
assert param.get_batch_size() == 32
bs = 42
param.set_batch_size(bs)
assert param.get_batch_size() == bs
def test_has_verbosity():
param = HasVerbosity()
assert param.get_verbosity() == 0
verbosity = 2
param.set_verbosity(verbosity)
assert param.get_verbosity() == verbosity
def test_has_validation_split():
param = HasValidationSplit()
assert param.get_validation_split() == 0.1
split = 0.5
param.set_validation_split(split)
assert param.get_validation_split() == split
def test_has_number_of_workers():
param = HasNumberOfWorkers()
assert param.get_num_workers() == 8
workers = 12
param.set_num_workers(workers)
assert param.get_num_workers() == workers
|
import os
import sys
import unittest
import roslib.names
class NamesTest(unittest.TestCase):
def test_get_ros_namespace(self):
if 'ROS_NAMESPACE' in os.environ:
rosns = os.environ['ROS_NAMESPACE']
del os.environ['ROS_NAMESPACE']
else:
rosns = None
sysargv = sys.argv
try:
sys.argv = []
self.assertEquals('/', roslib.names.get_ros_namespace())
self.assertEquals('/', roslib.names.get_ros_namespace(argv=[]))
self.assertEquals('/', roslib.names.get_ros_namespace(env={}))
self.assertEquals('/', roslib.names.get_ros_namespace(env={}, argv=[]))
os.environ['ROS_NAMESPACE'] = 'unresolved'
self.assertEquals('/unresolved/', roslib.names.get_ros_namespace())
self.assertEquals('/unresolved/', roslib.names.get_ros_namespace(env={'ROS_NAMESPACE': 'unresolved'}))
sys.argv = ['foo', '__ns:=unresolved_override']
self.assertEquals('/unresolved_override/', roslib.names.get_ros_namespace(env={'ROS_NAMESPACE': 'unresolved'}))
self.assertEquals('/override2/', roslib.names.get_ros_namespace(env={'ROS_NAMESPACE': 'unresolved'}, argv=['foo', '__ns:=override2']))
sys.argv = []
os.environ['ROS_NAMESPACE'] = '/resolved/'
self.assertEquals('/resolved/', roslib.names.get_ros_namespace())
self.assertEquals('/resolved/', roslib.names.get_ros_namespace(env={'ROS_NAMESPACE': '/resolved'}))
del os.environ['ROS_NAMESPACE']
sys.argv = ['foo', '__ns:=unresolved_ns']
self.assertEquals('/unresolved_ns/', roslib.names.get_ros_namespace())
self.assertEquals('/unresolved_ns2/', roslib.names.get_ros_namespace(argv=['foo', '__ns:=unresolved_ns2']))
sys.argv = ['foo', '__ns:=/resolved_ns/']
self.assertEquals('/resolved_ns/', roslib.names.get_ros_namespace())
self.assertEquals('/resolved_ns2/', roslib.names.get_ros_namespace(argv=['foo', '__ns:=resolved_ns2']))
finally:
sys.argv = sysargv
# restore
if rosns:
os.environ['ROS_NAMESPACE'] = rosns
def test_make_global_ns(self):
from roslib.names import make_global_ns
for n in ['~foo']:
try:
make_global_ns(n)
self.fail('make_global_ns should fail on %s' % n)
except ValueError:
pass
self.assertEquals('/foo/', make_global_ns('foo'))
self.assertEquals('/', make_global_ns(''))
self.assertEquals('/foo/', make_global_ns('/foo'))
self.assertEquals('/foo/', make_global_ns('/foo/'))
self.assertEquals('/foo/bar/', make_global_ns('/foo/bar'))
self.assertEquals('/foo/bar/', make_global_ns('/foo/bar/'))
def test_is_global(self):
try:
roslib.names.is_global(None)
self.fail('is_global should raise exception on invalid param')
except Exception:
pass
tests = ['/', '/global', '/global2']
for t in tests:
self.assert_(roslib.names.is_global(t))
fails = ['', 'not_global', 'not/global']
for t in fails:
self.failIf(roslib.names.is_global(t))
def test_is_private(self):
try:
roslib.names.is_private(None)
self.fail('is_private should raise exception on invalid param')
except Exception:
pass
tests = ['~name', '~name/sub']
for t in tests:
self.assert_(roslib.names.is_private(t))
fails = ['', 'not_private', 'not/private', 'not/~private', '/not/~private']
for t in fails:
self.failIf(roslib.names.is_private(t))
def test_namespace(self):
from roslib.names import namespace
try:
namespace(1)
self.fail('1')
except TypeError:
pass
try:
namespace(None)
self.fail('None')
except ValueError:
pass
self.assertEquals('/', namespace(''))
self.assertEquals('/', namespace('/'))
self.assertEquals('/', namespace('/foo'))
self.assertEquals('/', namespace('/foo/'))
self.assertEquals('/foo/', namespace('/foo/bar'))
self.assertEquals('/foo/', namespace('/foo/bar/'))
self.assertEquals('/foo/bar/', namespace('/foo/bar/baz'))
self.assertEquals('/foo/bar/', namespace('/foo/bar/baz/'))
# unicode tests
self.assertEquals(u'/', namespace(u''))
self.assertEquals(u'/', namespace(u'/'))
self.assertEquals(u'/foo/bar/', namespace(u'/foo/bar/baz/'))
def test_nsjoin(self):
from roslib.names import ns_join
# private and global names cannot be joined
self.assertEquals('~name', ns_join('/foo', '~name'))
self.assertEquals('/name', ns_join('/foo', '/name'))
self.assertEquals('~name', ns_join('~', '~name'))
self.assertEquals('/name', ns_join('/', '/name'))
# ns can be '~' or '/'
self.assertEquals('~name', ns_join('~', 'name'))
self.assertEquals('/name', ns_join('/', 'name'))
self.assertEquals('/ns/name', ns_join('/ns', 'name'))
self.assertEquals('/ns/name', ns_join('/ns/', 'name'))
self.assertEquals('/ns/ns2/name', ns_join('/ns', 'ns2/name'))
self.assertEquals('/ns/ns2/name', ns_join('/ns/', 'ns2/name'))
# allow ns to be empty
self.assertEquals('name', ns_join('', 'name'))
def test_load_mappings(self):
from roslib.names import load_mappings
self.assertEquals({}, load_mappings([]))
self.assertEquals({}, load_mappings(['foo']))
self.assertEquals({}, load_mappings([':=']))
self.assertEquals({}, load_mappings([':=:=']))
self.assertEquals({}, load_mappings(['f:=']))
self.assertEquals({}, load_mappings([':=b']))
self.assertEquals({}, load_mappings(['foo:=bar:=baz']))
# should ignore node param assignments
self.assertEquals({}, load_mappings(['_foo:=bar']))
self.assertEquals({'foo': 'bar'}, load_mappings(['foo:=bar']))
# should allow double-underscore names
self.assertEquals({'__foo': 'bar'}, load_mappings(['__foo:=bar']))
self.assertEquals({'foo': 'bar'}, load_mappings(['./f', '-x', '--blah', 'foo:=bar']))
self.assertEquals({'a': '1', 'b': '2', 'c': '3'}, load_mappings(['c:=3', 'c:=', ':=3', 'a:=1', 'b:=2']))
def test_resource_name(self):
from roslib.names import resource_name
self.assertEquals('foo/bar', resource_name('foo', 'bar'))
self.assertEquals('bar', resource_name('foo', 'bar', my_pkg='foo'))
self.assertEquals('foo/bar', resource_name('foo', 'bar', my_pkg='bar'))
self.assertEquals('foo/bar', resource_name('foo', 'bar', my_pkg=''))
self.assertEquals('foo/bar', resource_name('foo', 'bar', my_pkg=None))
def test_resource_name_base(self):
from roslib.names import resource_name_base
self.assertEquals('', resource_name_base(''))
self.assertEquals('bar', resource_name_base('bar'))
self.assertEquals('bar', resource_name_base('foo/bar'))
self.assertEquals('bar', resource_name_base('/bar'))
self.assertEquals('', resource_name_base('foo/'))
def test_resource_name_package(self):
from roslib.names import resource_name_package
self.assertEquals(None, resource_name_package(''))
self.assertEquals(None, resource_name_package('foo'))
self.assertEquals('foo', resource_name_package('foo/'))
self.assertEquals('foo', resource_name_package('foo/bar'))
def test_package_resource_name(self):
from roslib.names import package_resource_name
self.assertEquals(('', ''), package_resource_name(''))
self.assertEquals(('', 'foo'), package_resource_name('foo'))
self.assertEquals(('foo', 'bar'), package_resource_name('foo/bar'))
self.assertEquals(('foo', ''), package_resource_name('foo/'))
try:
# only allowed single separator
package_resource_name('foo/bar/baz')
self.fail('should have raised ValueError')
except ValueError:
pass
def test_is_legal_resource_name(self):
from roslib.names import is_legal_resource_name
failures = [None, '', 'hello\n', '\t', 'foo++', 'foo-bar', '#foo',
' name', 'name ',
'~name', '/name',
'1name', 'foo\\']
for f in failures:
self.failIf(is_legal_resource_name(f), f)
tests = ['f', 'f1', 'f_', 'foo', 'foo_bar', 'foo/bar', 'roslib/Log']
for t in tests:
self.assert_(is_legal_resource_name(t), t)
def test_is_legal_name(self):
from roslib.names import is_legal_name
failures = [None,
'foo++', 'foo-bar', '#foo',
'hello\n', '\t', ' name', 'name ',
'f//b',
'1name', 'foo\\']
for f in failures:
self.failIf(is_legal_name(f), f)
tests = ['',
'f', 'f1', 'f_', 'f/', 'foo', 'foo_bar', 'foo/bar', 'foo/bar/baz',
'~f', '~a/b/c',
'~/f',
'/a/b/c/d', '/']
for t in tests:
self.assert_(is_legal_name(t), '[%s]' % t)
def test_is_legal_base_name(self):
from roslib.names import is_legal_base_name
failures = [None, '', 'hello\n', '\t', 'foo++', 'foo-bar', '#foo',
'f/', 'foo/bar', '/', '/a',
'f//b',
'~f', '~a/b/c',
' name', 'name ',
'1name', 'foo\\']
for f in failures:
self.failIf(is_legal_base_name(f), f)
tests = ['f', 'f1', 'f_', 'foo', 'foo_bar']
for t in tests:
self.assert_(is_legal_base_name(t), '[%s]' % t)
def test_is_legal_resource_base_name(self):
from roslib.names import is_legal_resource_base_name
failures = [None, '', 'hello\n', '\t', 'foo++', 'foo-bar', '#foo',
'f/', 'foo/bar', '/', '/a',
'f//b',
'~f', '~a/b/c',
'~/f',
' name', 'name ',
'1name', 'foo\\']
for f in failures:
self.failIf(is_legal_resource_base_name(f), f)
tests = ['f', 'f1', 'f_', 'foo', 'foo_bar']
for t in tests:
self.assert_(is_legal_resource_base_name(t), '[%s]' % t)
def test_resolve_name(self):
from roslib.names import resolve_name
# TODO: test with remappings
tests = [
('', '/', '/'),
('', '/node', '/'),
('', '/ns1/node', '/ns1/'),
('foo', '', '/foo'),
('foo/', '', '/foo'),
('/foo', '', '/foo'),
('/foo/', '', '/foo'),
('/foo', '/', '/foo'),
('/foo/', '/', '/foo'),
('/foo', '/bar', '/foo'),
('/foo/', '/bar', '/foo'),
('foo', '/ns1/ns2', '/ns1/foo'),
('foo', '/ns1/ns2/', '/ns1/foo'),
('foo', '/ns1/ns2/ns3/', '/ns1/ns2/foo'),
('foo/', '/ns1/ns2', '/ns1/foo'),
('/foo', '/ns1/ns2', '/foo'),
('foo/bar', '/ns1/ns2', '/ns1/foo/bar'),
('foo//bar', '/ns1/ns2', '/ns1/foo/bar'),
('foo/bar', '/ns1/ns2/ns3', '/ns1/ns2/foo/bar'),
('foo//bar//', '/ns1/ns2/ns3', '/ns1/ns2/foo/bar'),
('~foo', '/', '/foo'),
('~foo', '/node', '/node/foo'),
('~foo', '/ns1/ns2', '/ns1/ns2/foo'),
('~foo/', '/ns1/ns2', '/ns1/ns2/foo'),
('~foo/bar', '/ns1/ns2', '/ns1/ns2/foo/bar'),
# #3044
('~/foo', '/', '/foo'),
('~/foo', '/node', '/node/foo'),
('~/foo', '/ns1/ns2', '/ns1/ns2/foo'),
('~/foo/', '/ns1/ns2', '/ns1/ns2/foo'),
('~/foo/bar', '/ns1/ns2', '/ns1/ns2/foo/bar'),
]
for name, node_name, v in tests:
self.assertEquals(v, resolve_name(name, node_name))
|
import abc
import json
from absl import flags
from perfkitbenchmarker import placement_group
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.configs import option_decoders
from perfkitbenchmarker.providers import azure
from perfkitbenchmarker.providers.azure import util
FLAGS = flags.FLAGS
class AzurePlacementGroupSpec(placement_group.BasePlacementGroupSpec):
"""Object containing the information needed to create an AzurePlacementGroup.
Attributes:
zone: The Azure zone the Placement Group is in.
"""
CLOUD = azure.CLOUD
@classmethod
def _GetOptionDecoderConstructions(cls):
"""Gets decoder classes and constructor args for each configurable option.
Returns:
dict. Maps option name string to a (ConfigOptionDecoder class, dict) pair.
The pair specifies a decoder class and its __init__() keyword
arguments to construct in order to decode the named option.
"""
result = super(AzurePlacementGroupSpec,
cls)._GetOptionDecoderConstructions()
result.update({
'resource_group': (option_decoders.StringDecoder, {'none_ok': False}),
'placement_group_style': (option_decoders.EnumDecoder, {
'valid_values': placement_group.PLACEMENT_GROUP_OPTIONS,
'default': placement_group.PLACEMENT_GROUP_NONE,
})
})
return result
class AzurePlacementGroup(placement_group.BasePlacementGroup):
"""Object representing an Azure Placement Group."""
CLOUD = azure.CLOUD
def __init__(self, azure_placement_group_spec):
"""Init method for AzurePlacementGroup.
Args:
azure_placement_group_spec: Object containing the
information needed to create an AzurePlacementGroup.
"""
super(AzurePlacementGroup, self).__init__(azure_placement_group_spec)
self.resource_group = azure_placement_group_spec.resource_group
self.name = '%s-%s' % (self.resource_group, self.zone)
self.location = util.GetLocationFromZone(self.zone)
self.strategy = azure_placement_group_spec.placement_group_style
@abc.abstractmethod
def AddVmArgs(self):
"""List of arguments to add to vm creation."""
raise NotImplementedError()
class AzureAvailSet(AzurePlacementGroup):
"""Object representing an Azure Availability Set."""
def _Create(self):
"""Create the availability set."""
create_cmd = [
azure.AZURE_PATH, 'vm', 'availability-set', 'create',
'--resource-group', self.resource_group, '--name', self.name
]
if self.location:
create_cmd.extend(['--location', self.location])
vm_util.IssueCommand(create_cmd)
def _Delete(self):
pass
@vm_util.Retry()
def _Exists(self):
"""Returns True if the availability set exists."""
show_cmd = [
azure.AZURE_PATH, 'vm', 'availability-set', 'show', '--output', 'json',
'--resource-group', self.resource_group, '--name', self.name
]
stdout, _, _ = vm_util.IssueCommand(show_cmd, raise_on_failure=False)
return bool(json.loads(stdout))
def AddVmArgs(self):
"""Returns Azure command to add VM to availability set."""
return ['--availability-set', self.name]
class AzureProximityGroup(AzurePlacementGroup):
"""Object representing an Azure Proximity Placement Group."""
def _Create(self):
"""Create the Proximity Placement Group."""
create_cmd = [
azure.AZURE_PATH, 'ppg', 'create',
'--resource-group', self.resource_group, '--name', self.name
]
if self.location:
create_cmd.extend(['--location', self.location])
vm_util.IssueCommand(create_cmd)
def _Delete(self):
pass
@vm_util.Retry()
def _Exists(self):
"""Returns True if the Proximity Placement Group exists."""
show_cmd = [
azure.AZURE_PATH, 'ppg', 'show', '--output', 'json',
'--resource-group', self.resource_group, '--name', self.name
]
stdout, _, _ = vm_util.IssueCommand(show_cmd, raise_on_failure=False)
return bool(json.loads(stdout))
def AddVmArgs(self):
"""Returns Azure command to add VM to placement group."""
return ['--ppg', self.name]
|
import logging
import os
import sys
import textwrap
from itertools import chain
import click
from twtxt.cache import Cache
from twtxt.config import Config
from twtxt.helper import run_pre_tweet_hook, run_post_tweet_hook
from twtxt.helper import sort_and_truncate_tweets
from twtxt.helper import style_timeline, style_source, style_source_with_status
from twtxt.helper import validate_created_at, validate_text, validate_config_key
from twtxt.log import init_logging
from twtxt.mentions import expand_mentions
from twtxt.models import Tweet, Source
from twtxt.twfile import get_local_tweets, add_local_tweet
from twtxt.twhttp import get_remote_tweets, get_remote_status
logger = logging.getLogger(__name__)
@click.group()
@click.option("--config", "-c",
type=click.Path(exists=True, file_okay=True, readable=True, writable=True, resolve_path=True),
help="Specify a custom config file location.")
@click.option("--verbose", "-v",
is_flag=True, default=False,
help="Enable verbose output for debugging purposes.")
@click.version_option()
@click.pass_context
def cli(ctx, config, verbose):
"""Decentralised, minimalist microblogging service for hackers."""
init_logging(debug=verbose)
if ctx.invoked_subcommand == "quickstart":
return # Skip initializing config file
try:
if config:
conf = Config.from_file(config)
else:
conf = Config.discover()
except ValueError as e:
if "Error in config file." in str(e):
click.echo("✗ Please correct the errors mentioned above an run twtxt again.")
else:
click.echo("✗ Config file not found or not readable. You may want to run twtxt quickstart.")
sys.exit()
ctx.default_map = conf.build_default_map()
ctx.obj = {'conf': conf}
@cli.command()
@click.option("--created-at",
callback=validate_created_at,
help="ISO 8601 formatted datetime string to use in Tweet, instead of current time.")
@click.option("--twtfile", "-f",
type=click.Path(file_okay=True, writable=True, resolve_path=True),
help="Location of your twtxt file. (Default: twtxt.txt)")
@click.argument("text", callback=validate_text, nargs=-1)
@click.pass_context
def tweet(ctx, created_at, twtfile, text):
"""Append a new tweet to your twtxt file."""
text = expand_mentions(text)
tweet = Tweet(text, created_at) if created_at else Tweet(text)
pre_tweet_hook = ctx.obj["conf"].pre_tweet_hook
if pre_tweet_hook:
run_pre_tweet_hook(pre_tweet_hook, ctx.obj["conf"].options)
if not add_local_tweet(tweet, twtfile):
click.echo("✗ Couldn’t write to file.")
else:
post_tweet_hook = ctx.obj["conf"].post_tweet_hook
if post_tweet_hook:
run_post_tweet_hook(post_tweet_hook, ctx.obj["conf"].options)
@cli.command()
@click.option("--pager/--no-pager",
is_flag=True,
help="Use a pager to display content. (Default: False)")
@click.option("--limit", "-l",
type=click.INT,
help="Limit total number of shown tweets. (Default: 20)")
@click.option("--twtfile", "-f",
type=click.Path(exists=True, file_okay=True, readable=True, resolve_path=True),
help="Location of your twtxt file. (Default: twtxt.txt")
@click.option("--ascending", "sorting",
flag_value="ascending",
help="Sort timeline in ascending order.")
@click.option("--descending", "sorting",
flag_value="descending",
help="Sort timeline in descending order. (Default)")
@click.option("--timeout",
type=click.FLOAT,
help="Maximum time requests are allowed to take. (Default: 5.0)")
@click.option("--porcelain",
is_flag=True,
help="Style output in an easy-to-parse format. (Default: False)")
@click.option("--source", "-s",
help="Only show feed of the given source. (Can be nick or URL)")
@click.option("--cache/--no-cache",
is_flag=True,
help="Cache remote twtxt files locally. (Default: True)")
@click.option("--force-update",
is_flag=True,
help="Force update even if cache is up-to-date. (Default: False)")
@click.pass_context
def timeline(ctx, pager, limit, twtfile, sorting, timeout, porcelain, source, cache, force_update):
"""Retrieve your personal timeline."""
if source:
source_obj = ctx.obj["conf"].get_source_by_nick(source)
if not source_obj:
logger.debug("Not following {0}, trying as URL".format(source))
source_obj = Source(source, source)
sources = [source_obj]
else:
sources = ctx.obj["conf"].following
tweets = []
if cache:
try:
with Cache.discover(update_interval=ctx.obj["conf"].timeline_update_interval) as cache:
force_update = force_update or not cache.is_valid
if force_update:
tweets = get_remote_tweets(sources, limit, timeout, cache)
else:
logger.debug("Multiple calls to 'timeline' within {0} seconds. Skipping update".format(
cache.update_interval))
# Behold, almighty list comprehensions! (I might have gone overboard here…)
tweets = list(chain.from_iterable([cache.get_tweets(source.url) for source in sources]))
except OSError as e:
logger.debug(e)
tweets = get_remote_tweets(sources, limit, timeout)
else:
tweets = get_remote_tweets(sources, limit, timeout)
if twtfile and not source:
source = Source(ctx.obj["conf"].nick, ctx.obj["conf"].twturl, file=twtfile)
tweets.extend(get_local_tweets(source, limit))
if not tweets:
return
tweets = sort_and_truncate_tweets(tweets, sorting, limit)
if pager:
click.echo_via_pager(style_timeline(tweets, porcelain))
else:
click.echo(style_timeline(tweets, porcelain))
@cli.command()
@click.option("--pager/--no-pager",
is_flag=True,
help="Use a pager to display content. (Default: False)")
@click.option("--limit", "-l",
type=click.INT,
help="Limit total number of shown tweets. (Default: 20)")
@click.option("--ascending", "sorting",
flag_value="ascending",
help="Sort timeline in ascending order.")
@click.option("--descending", "sorting",
flag_value="descending",
help="Sort timeline in descending order. (Default)")
@click.option("--timeout",
type=click.FLOAT,
help="Maximum time requests are allowed to take. (Default: 5.0)")
@click.option("--porcelain",
is_flag=True,
help="Style output in an easy-to-parse format. (Default: False)")
@click.option("--cache/--no-cache",
is_flag=True,
help="Cache remote twtxt files locally. (Default: True)")
@click.option("--force-update",
is_flag=True,
help="Force update even if cache is up-to-date. (Default: False)")
@click.argument("source")
@click.pass_context
def view(ctx, **kwargs):
"""Show feed of given source."""
ctx.forward(timeline)
@cli.command()
@click.option("--check/--no-check",
is_flag=True,
help="Check if source URL is valid and readable. (Default: True)")
@click.option("--timeout",
type=click.FLOAT,
help="Maximum time requests are allowed to take. (Default: 5.0)")
@click.option("--porcelain",
is_flag=True,
help="Style output in an easy-to-parse format. (Default: False)")
@click.pass_context
def following(ctx, check, timeout, porcelain):
"""Return the list of sources you’re following."""
sources = ctx.obj['conf'].following
if check:
sources = get_remote_status(sources, timeout)
for (source, status) in sources:
click.echo(style_source_with_status(source, status, porcelain))
else:
sources = sorted(sources, key=lambda source: source.nick)
for source in sources:
click.echo(style_source(source, porcelain))
@cli.command()
@click.argument("nick")
@click.argument("url")
@click.option("--force", "-f",
flag_value=True,
help="Force adding and overwriting nick")
@click.pass_context
def follow(ctx, nick, url, force):
"""Add a new source to your followings."""
source = Source(nick, url)
sources = ctx.obj['conf'].following
if not force:
if source.nick in (source.nick for source in sources):
click.confirm("➤ You’re already following {0}. Overwrite?".format(
click.style(source.nick, bold=True)), default=False, abort=True)
_, status = get_remote_status([source])[0]
if not status or status.status_code != 200:
click.confirm("➤ The feed of {0} at {1} is not available. Follow anyway?".format(
click.style(source.nick, bold=True),
click.style(source.url, bold=True)), default=False, abort=True)
ctx.obj['conf'].add_source(source)
click.echo("✓ You’re now following {0}.".format(
click.style(source.nick, bold=True)))
@cli.command()
@click.argument("nick")
@click.pass_context
def unfollow(ctx, nick):
"""Remove an existing source from your followings."""
source = ctx.obj['conf'].get_source_by_nick(nick)
try:
with Cache.discover() as cache:
cache.remove_tweets(source.url)
except OSError as e:
logger.debug(e)
ret_val = ctx.obj['conf'].remove_source_by_nick(nick)
if ret_val:
click.echo("✓ You’ve unfollowed {0}.".format(
click.style(source.nick, bold=True)))
else:
click.echo("✗ You’re not following {0}.".format(
click.style(nick, bold=True)))
@cli.command()
def quickstart():
"""Quickstart wizard for setting up twtxt."""
width = click.get_terminal_size()[0]
width = width if width <= 79 else 79
click.secho("twtxt - quickstart", fg="cyan")
click.secho("==================", fg="cyan")
click.echo()
help_text = "This wizard will generate a basic configuration file for twtxt with all mandatory options set. " \
"You can change all of these later with either twtxt itself or by editing the config file manually. " \
"Have a look at the docs to get information about the other available options and their meaning."
click.echo(textwrap.fill(help_text, width))
click.echo()
nick = click.prompt("➤ Please enter your desired nick", default=os.environ.get("USER", ""))
def overwrite_check(path):
if os.path.isfile(path):
click.confirm("➤ '{0}' already exists. Overwrite?".format(path), abort=True)
cfgfile = click.prompt("➤ Please enter the desired location for your config file",
os.path.join(Config.config_dir, Config.config_name),
type=click.Path(readable=True, writable=True, file_okay=True))
cfgfile = os.path.expanduser(cfgfile)
overwrite_check(cfgfile)
twtfile = click.prompt("➤ Please enter the desired location for your twtxt file",
os.path.expanduser("~/twtxt.txt"),
type=click.Path(readable=True, writable=True, file_okay=True))
twtfile = os.path.expanduser(twtfile)
overwrite_check(twtfile)
twturl = click.prompt("➤ Please enter the URL your twtxt file will be accessible from",
default="https://example.org/twtxt.txt")
disclose_identity = click.confirm("➤ Do you want to disclose your identity? Your nick and URL will be shared when "
"making HTTP requests", default=False)
click.echo()
add_news = click.confirm("➤ Do you want to follow the twtxt news feed?", default=True)
conf = Config.create_config(cfgfile, nick, twtfile, twturl, disclose_identity, add_news)
twtfile_dir = os.path.dirname(twtfile)
if not os.path.exists(twtfile_dir):
os.makedirs(twtfile_dir)
open(twtfile, "a").close()
click.echo()
click.echo("✓ Created config file at '{0}'.".format(click.format_filename(conf.config_file)))
click.echo("✓ Created twtxt file at '{0}'.".format(click.format_filename(twtfile)))
@cli.command()
@click.argument("key", required=False, callback=validate_config_key)
@click.argument("value", required=False)
@click.option("--remove",
flag_value=True,
help="Remove given item")
@click.option("--edit", "-e",
flag_value=True,
help="Open config file in editor")
@click.pass_context
def config(ctx, key, value, remove, edit):
"""Get or set config item."""
conf = ctx.obj["conf"]
if not edit and not key:
raise click.BadArgumentUsage("You have to specify either a key or use --edit.")
if edit:
return click.edit(filename=conf.config_file)
if remove:
try:
conf.cfg.remove_option(key[0], key[1])
except Exception as e:
logger.debug(e)
else:
conf.write_config()
return
if not value:
try:
click.echo(conf.cfg.get(key[0], key[1]))
except Exception as e:
logger.debug(e)
return
if not conf.cfg.has_section(key[0]):
conf.cfg.add_section(key[0])
conf.cfg.set(key[0], key[1], value)
conf.write_config()
main = cli
|
import itertools
class Independencies(object):
"""
Base class for independencies.
independencies class represents a set of Conditional Independence
assertions (eg: "X is independent of Y given Z" where X, Y and Z
are random variables) or Independence assertions (eg: "X is
independent of Y" where X and Y are random variables).
Initialize the independencies Class with Conditional Independence
assertions or Independence assertions.
Parameters
----------
assertions: Lists or Tuples
Each assertion is a list or tuple of the form: [event1,
event2 and event3]
eg: assertion ['X', 'Y', 'Z'] would be X is independent
of Y given Z.
Examples
--------
Creating an independencies object with one independence assertion:
Random Variable X is independent of Y
>>> independencies = independencies(['X', 'Y'])
Creating an independencies object with three conditional
independence assertions:
First assertion is Random Variable X is independent of Y given Z.
>>> independencies = independencies(['X', 'Y', 'Z'],
... ['a', ['b', 'c'], 'd'],
... ['l', ['m', 'n'], 'o'])
Public Methods
--------------
add_assertions
get_assertions
get_factorized_product
closure
entails
is_equivalent
"""
def __init__(self, *assertions):
self.independencies = []
self.add_assertions(*assertions)
def __str__(self):
string = "\n".join([str(assertion) for assertion in self.independencies])
return string
__repr__ = __str__
def __eq__(self, other):
if not isinstance(other, Independencies):
return False
return all(
independency in other.get_assertions()
for independency in self.get_assertions()
) and all(
independency in self.get_assertions()
for independency in other.get_assertions()
)
def __ne__(self, other):
return not self.__eq__(other)
def contains(self, assertion):
"""
Returns `True` if `assertion` is contained in this `Independencies`-object,
otherwise `False`.
Parameters
----------
assertion: IndependenceAssertion()-object
Examples
--------
>>> from pgmpy.independencies import Independencies, IndependenceAssertion
>>> ind = Independencies(['A', 'B', ['C', 'D']])
>>> IndependenceAssertion('A', 'B', ['C', 'D']) in ind
True
>>> # does not depend on variable order:
>>> IndependenceAssertion('B', 'A', ['D', 'C']) in ind
True
>>> # but does not check entailment:
>>> IndependenceAssertion('X', 'Y', 'Z') in Independencies(['X', 'Y'])
False
"""
if not isinstance(assertion, IndependenceAssertion):
raise TypeError(
f"' in <Independencies()>' requires IndependenceAssertion as left operand, not {type(assertion)}"
)
return assertion in self.get_assertions()
__contains__ = contains
def get_all_variables(self):
"""
Returns a set of all the variables in all the independence assertions.
"""
return frozenset().union(*[ind.all_vars for ind in self.independencies])
def get_assertions(self):
"""
Returns the independencies object which is a set of IndependenceAssertion objects.
Examples
--------
>>> from pgmpy.independencies import Independencies
>>> independencies = Independencies(['X', 'Y', 'Z'])
>>> independencies.get_assertions()
"""
return self.independencies
def add_assertions(self, *assertions):
"""
Adds assertions to independencies.
Parameters
----------
assertions: Lists or Tuples
Each assertion is a list or tuple of variable, independent_of and given.
Examples
--------
>>> from pgmpy.independencies import Independencies
>>> independencies = Independencies()
>>> independencies.add_assertions(['X', 'Y', 'Z'])
>>> independencies.add_assertions(['a', ['b', 'c'], 'd'])
"""
for assertion in assertions:
if isinstance(assertion, IndependenceAssertion):
self.independencies.append(assertion)
else:
try:
self.independencies.append(
IndependenceAssertion(assertion[0], assertion[1], assertion[2])
)
except IndexError:
self.independencies.append(
IndependenceAssertion(assertion[0], assertion[1])
)
def closure(self):
"""
Returns a new `Independencies()`-object that additionally contains those `IndependenceAssertions`
that are implied by the the current independencies (using with the `semi-graphoid axioms
<https://en.wikipedia.org/w/index.php?title=Conditional_independence&oldid=708760689#Rules_of_conditional_independence>`_;
see (Pearl, 1989, `Conditional Independence and its representations
<http://www.cs.technion.ac.il/~dang/journal_papers/pearl1989conditional.pdf>`_)).
Might be very slow if more than six variables are involved.
Examples
--------
>>> from pgmpy.independencies import Independencies
>>> ind1 = Independencies(('A', ['B', 'C'], 'D'))
>>> ind1.closure()
(A _|_ B | D, C)
(A _|_ B, C | D)
(A _|_ B | D)
(A _|_ C | D, B)
(A _|_ C | D)
>>> ind2 = Independencies(('W', ['X', 'Y', 'Z']))
>>> ind2.closure()
(W _|_ Y)
(W _|_ Y | X)
(W _|_ Z | Y)
(W _|_ Z, X, Y)
(W _|_ Z)
(W _|_ Z, X)
(W _|_ X, Y)
(W _|_ Z | X)
(W _|_ Z, Y | X)
[..]
"""
def single_var(var):
"Checks if var represents a single variable"
if not hasattr(var, "__iter__"):
return True
else:
return len(var) == 1
def sg0(ind):
"Symmetry rule: 'X ⟂ Y | Z' -> 'Y ⟂ X | Z'"
return IndependenceAssertion(ind.event2, ind.event1, ind.event3)
# since X⟂Y|Z == Y⟂X|Z in pgmpy, sg0 (symmetry) is not used as an axiom/rule.
# instead we use a decorator for the other axioms to apply them on both sides
def apply_left_and_right(func):
def symmetric_func(*args):
if len(args) == 1:
return func(args[0]) + func(sg0(args[0]))
if len(args) == 2:
return (
func(*args)
+ func(args[0], sg0(args[1]))
+ func(sg0(args[0]), args[1])
+ func(sg0(args[0]), sg0(args[1]))
)
return symmetric_func
@apply_left_and_right
def sg1(ind):
"Decomposition rule: 'X ⟂ Y,W | Z' -> 'X ⟂ Y | Z', 'X ⟂ W | Z'"
if single_var(ind.event2):
return []
else:
return [
IndependenceAssertion(ind.event1, ind.event2 - {elem}, ind.event3)
for elem in ind.event2
]
@apply_left_and_right
def sg2(ind):
"Weak Union rule: 'X ⟂ Y,W | Z' -> 'X ⟂ Y | W,Z', 'X ⟂ W | Y,Z' "
if single_var(ind.event2):
return []
else:
return [
IndependenceAssertion(
ind.event1, ind.event2 - {elem}, {elem} | ind.event3
)
for elem in ind.event2
]
@apply_left_and_right
def sg3(ind1, ind2):
"Contraction rule: 'X ⟂ W | Y,Z' & 'X ⟂ Y | Z' -> 'X ⟂ W,Y | Z'"
if ind1.event1 != ind2.event1:
return []
Y = ind2.event2
Z = ind2.event3
Y_Z = ind1.event3
if Y < Y_Z and Z < Y_Z and Y.isdisjoint(Z):
return [IndependenceAssertion(ind1.event1, ind1.event2 | Y, Z)]
else:
return []
# apply semi-graphoid axioms as long as new independencies are found.
all_independencies = set()
new_inds = set(self.independencies)
while new_inds:
new_pairs = (
set(itertools.permutations(new_inds, 2))
| set(itertools.product(new_inds, all_independencies))
| set(itertools.product(all_independencies, new_inds))
)
all_independencies |= new_inds
new_inds = set(
sum(
[sg1(ind) for ind in new_inds]
+ [sg2(ind) for ind in new_inds]
+ [sg3(*inds) for inds in new_pairs],
[],
)
)
new_inds -= all_independencies
return Independencies(*list(all_independencies))
def entails(self, entailed_independencies):
"""
Returns `True` if the `entailed_independencies` are implied by this `Independencies`-object, otherwise `False`.
Entailment is checked using the semi-graphoid axioms.
Might be very slow if more than six variables are involved.
Parameters
----------
entailed_independencies: Independencies()-object
Examples
--------
>>> from pgmpy.independencies import Independencies
>>> ind1 = Independencies([['A', 'B'], ['C', 'D'], 'E'])
>>> ind2 = Independencies(['A', 'C', 'E'])
>>> ind1.entails(ind2)
True
>>> ind2.entails(ind1)
False
"""
if not isinstance(entailed_independencies, Independencies):
return False
implications = self.closure().get_assertions()
return all(
ind in implications for ind in entailed_independencies.get_assertions()
)
def is_equivalent(self, other):
"""
Returns True if the two Independencies-objects are equivalent, otherwise False.
(i.e. any Bayesian Network that satisfies the one set
of conditional independencies also satisfies the other).
Might be very slow if more than six variables are involved.
Parameters
----------
other: Independencies()-object
Examples
--------
>>> from pgmpy.independencies import Independencies
>>> ind1 = Independencies(['X', ['Y', 'W'], 'Z'])
>>> ind2 = Independencies(['X', 'Y', 'Z'], ['X', 'W', 'Z'])
>>> ind3 = Independencies(['X', 'Y', 'Z'], ['X', 'W', 'Z'], ['X', 'Y', ['W','Z']])
>>> ind1.is_equivalent(ind2)
False
>>> ind1.is_equivalent(ind3)
True
"""
return self.entails(other) and other.entails(self)
# TODO: write reduce function.
def reduce(self):
"""
Add function to remove duplicate Independence Assertions
"""
pass
def latex_string(self):
"""
Returns a list of string.
Each string represents the IndependenceAssertion in latex.
"""
return [assertion.latex_string() for assertion in self.get_assertions()]
def get_factorized_product(self, random_variables=None, latex=False):
# TODO: Write this whole function
#
# The problem right now is that the factorized product for all
# P(A, B, C), P(B, A, C) etc should be same but on solving normally
# we get different results which have to be simplified to a simpler
# form. How to do that ??? and also how to decide which is the most
# simplified form???
#
pass
class IndependenceAssertion(object):
r"""
Represents Conditional Independence or Independence assertion.
Each assertion has 3 attributes: event1, event2, event3.
The attributes for
.. math:: U \perp X, Y | Z
is read as: Random Variable U is independent of X and Y given Z would be:
event1 = {U}
event2 = {X, Y}
event3 = {Z}
Parameters
----------
event1: String or List of strings
Random Variable which is independent.
event2: String or list of strings.
Random Variables from which event1 is independent
event3: String or list of strings.
Random Variables given which event1 is independent of event2.
Examples
--------
>>> from pgmpy.independencies import IndependenceAssertion
>>> assertion = IndependenceAssertion('U', 'X')
>>> assertion = IndependenceAssertion('U', ['X', 'Y'])
>>> assertion = IndependenceAssertion('U', ['X', 'Y'], 'Z')
>>> assertion = IndependenceAssertion(['U', 'V'], ['X', 'Y'], ['Z', 'A'])
Public Methods
--------------
get_assertion
"""
def __init__(self, event1=[], event2=[], event3=[]):
r"""
Initialize an IndependenceAssertion object with event1, event2 and event3 attributes.
event2
^
event1 / event3
^ / ^
| / |
(U || X, Y | Z) read as Random variable U is independent of X and Y given Z.
---
"""
if event1 and not event2:
raise ValueError("event2 needs to be specified")
if any([event2, event3]) and not event1:
raise ValueError("event1 needs to be specified")
if event3 and not all([event1, event2]):
raise ValueError(
"event1" if not event1 else "event2" + " needs to be specified"
)
self.event1 = frozenset(self._return_list_if_str(event1))
self.event2 = frozenset(self._return_list_if_str(event2))
self.event3 = frozenset(self._return_list_if_str(event3))
self.all_vars = frozenset().union(self.event1, self.event2, self.event3)
def __str__(self):
if self.event3:
return "({event1} _|_ {event2} | {event3})".format(
event1=", ".join(self.event1),
event2=", ".join(self.event2),
event3=", ".join(self.event3),
)
else:
return "({event1} _|_ {event2})".format(
event1=", ".join(self.event1), event2=", ".join(self.event2)
)
__repr__ = __str__
def __eq__(self, other):
if not isinstance(other, IndependenceAssertion):
return False
return (self.event1, self.event2, self.event3) == other.get_assertion() or (
self.event2,
self.event1,
self.event3,
) == other.get_assertion()
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash((frozenset((self.event1, self.event2)), self.event3))
@staticmethod
def _return_list_if_str(event):
"""
If variable is a string returns a list containing variable.
Else returns variable itself.
"""
if isinstance(event, str):
return [event]
else:
return event
def get_assertion(self):
"""
Returns a tuple of the attributes: variable, independent_of, given.
Examples
--------
>>> from pgmpy.independencies import IndependenceAssertion
>>> asser = IndependenceAssertion('X', 'Y', 'Z')
>>> asser.get_assertion()
"""
return self.event1, self.event2, self.event3
def latex_string(self):
return r"%s \perp %s \mid %s" % (
", ".join(self.event1),
", ".join(self.event2),
", ".join(self.event3),
)
|
import json
import mock
from Tests.test_api.base import RestAPITestBase
class TestSettingsView(RestAPITestBase):
def test_get_mute(self):
"""
Test for api get mute.
"""
url = self.get_server_url() + "/settings/mute"
headers = {"Content-Type": "application/json"}
self.settings.options.mute = False
result = self.client.get(url, headers=headers)
expected_content = {
"mute": False
}
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(result.get_data().decode('utf-8')), sort_keys=True))
self.assertEqual(result.status_code, 200)
def test_set_mute(self):
"""
Test for api set mute.
"""
url = self.get_server_url() + "/settings/mute"
data = {"mute": "True"}
headers = {"Content-Type": "application/json"}
self.settings.options.mute = False
result = self.client.post(url,
headers=headers,
data=json.dumps(data))
expected_content = {
"mute": True
}
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(result.get_data().decode('utf-8')), sort_keys=True))
self.assertEqual(result.status_code, 200)
def test_get_deaf(self):
"""
Test for api get deaf.
"""
url = self.get_server_url() + "/settings/deaf"
headers = {"Content-Type": "application/json"}
self.settings.options.deaf = False
result = self.client.get(url, headers=headers)
expected_content = {
"deaf": False
}
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(result.get_data().decode('utf-8')), sort_keys=True))
self.assertEqual(result.status_code, 200)
def test_set_deaf(self):
"""
Test for api set deaf.
"""
url = self.get_server_url() + "/settings/deaf"
data = {"deaf": "True"}
headers = {"Content-Type": "application/json"}
self.settings.options.deaf = False
with mock.patch("kalliope.core.SignalLauncher.SignalLauncher.get_order_instance") as mock_order:
result = self.client.post(url,
headers=headers,
data=json.dumps(data))
expected_content = {
"deaf": True
}
mock_order.assert_called_once()
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(result.get_data().decode('utf-8')), sort_keys=True))
self.assertEqual(result.status_code, 200)
def test_get_recognizer_multiplier(self):
"""
Test for api get recognizer_multiplier.
"""
url = self.get_server_url() + "/settings/recognizer_multiplier"
headers = {"Content-Type": "application/json"}
self.settings.options.recognizer_multiplier = 3000
result = self.client.get(url, headers=headers)
expected_content = {
"recognizer_multiplier": 3000
}
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(result.get_data().decode('utf-8')), sort_keys=True))
self.assertEqual(result.status_code, 200)
def test_set_recognizer_multiplier(self):
"""
Test for api set recognizer_multiplier.
"""
url = self.get_server_url() + "/settings/recognizer_multiplier"
data = {"recognizer_multiplier": "6000"}
headers = {"Content-Type": "application/json"}
self.settings.recognizer_multiplier = 4000
result = self.client.post(url,
headers=headers,
data=json.dumps(data))
expected_content = {
"recognizer_multiplier": "6000"
}
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(result.get_data().decode('utf-8')), sort_keys=True))
self.assertEqual(result.status_code, 200)
def test_get_recognizer_energy_ratio(self):
"""
Test for api get recognizer_energy_ratio second.
"""
url = self.get_server_url() + "/settings/recognizer_energy_ratio"
headers = {"Content-Type": "application/json"}
self.settings.options.recognizer_energy_ratio = 30
result = self.client.get(url, headers=headers)
expected_content = {
"recognizer_energy_ratio": 30
}
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(result.get_data().decode('utf-8')), sort_keys=True))
self.assertEqual(result.status_code, 200)
def test_set_recognizer_energy_ratio(self):
"""
Test for api set recognizer_energy_ratio.
"""
url = self.get_server_url() + "/settings/recognizer_energy_ratio"
data = {"recognizer_energy_ratio": "60"}
headers = {"Content-Type": "application/json"}
self.settings.recognizer_energy_ratio = 40
result = self.client.post(url,
headers=headers,
data=json.dumps(data))
expected_content = {
"recognizer_energy_ratio": "60"
}
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(result.get_data().decode('utf-8')), sort_keys=True))
self.assertEqual(result.status_code, 200)
def test_get_recognizer_recording_timeout(self):
"""
Test for api get recognizer_recording_timeout.
"""
url = self.get_server_url() + "/settings/recognizer_recording_timeout"
headers = {"Content-Type": "application/json"}
self.settings.options.recognizer_recording_timeout = 3000
result = self.client.get(url, headers=headers)
expected_content = {
"recognizer_recording_timeout": 3000
}
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(result.get_data().decode('utf-8')), sort_keys=True))
self.assertEqual(result.status_code, 200)
def test_set_recognizer_recording_timeout(self):
"""
Test for api set recognizer_recording_timeout.
"""
url = self.get_server_url() + "/settings/recognizer_recording_timeout"
data = {"recognizer_recording_timeout": "6000"}
headers = {"Content-Type": "application/json"}
self.settings.recognizer_recording_timeout = 4000
result = self.client.post(url,
headers=headers,
data=json.dumps(data))
expected_content = {
"recognizer_recording_timeout": "6000"
}
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(result.get_data().decode('utf-8')), sort_keys=True))
self.assertEqual(result.status_code, 200)
def test_get_recognizer_recording_timeout_with_silence(self):
"""
Test for api get recognizer_recording_timeout_with_silence second.
"""
url = self.get_server_url() + "/settings/recognizer_recording_timeout_with_silence"
headers = {"Content-Type": "application/json"}
self.settings.options.recognizer_recording_timeout_with_silence = 30
result = self.client.get(url, headers=headers)
expected_content = {
"recognizer_recording_timeout_with_silence": 30
}
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(result.get_data().decode('utf-8')), sort_keys=True))
self.assertEqual(result.status_code, 200)
def test_set_recognizer_recording_timeout_with_silence(self):
"""
Test for api set recognizer_recording_timeout_with_silence.
"""
url = self.get_server_url() + "/settings/recognizer_recording_timeout_with_silence"
data = {"recognizer_recording_timeout_with_silence": "60"}
headers = {"Content-Type": "application/json"}
self.settings.recognizer_recording_timeout_with_silence = 40
result = self.client.post(url,
headers=headers,
data=json.dumps(data))
expected_content = {
"recognizer_recording_timeout_with_silence": "60"
}
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(result.get_data().decode('utf-8')), sort_keys=True))
self.assertEqual(result.status_code, 200)
def test_get_default_tts(self):
"""
Test for api get default tts name.
"""
url = self.get_server_url() + "/settings/default_tts"
headers = {"Content-Type": "application/json"}
self.settings.default_tts_name = "test"
result = self.client.get(url, headers=headers)
expected_content = {
"default_tts": "test"
}
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(result.get_data().decode('utf-8')), sort_keys=True))
self.assertEqual(result.status_code, 200)
def test_set_default_tts(self):
"""
Test for api set default_tts.
"""
url = self.get_server_url() + "/settings/default_tts"
data = {"default_tts": "pico2wave"}
headers = {"Content-Type": "application/json"}
self.settings.default_tts_name = "test"
result = self.client.post(url,
headers=headers,
data=json.dumps(data))
expected_content = {
"default_tts": "pico2wave"
}
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(result.get_data().decode('utf-8')), sort_keys=True))
self.assertEqual(result.status_code, 200)
def test_get_default_stt(self):
"""
Test for api get default stt name.
"""
url = self.get_server_url() + "/settings/default_stt"
headers = {"Content-Type": "application/json"}
self.settings.default_stt_name = "test"
result = self.client.get(url, headers=headers)
expected_content = {
"default_stt": "test"
}
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(result.get_data().decode('utf-8')), sort_keys=True))
self.assertEqual(result.status_code, 200)
def test_set_default_stt(self):
"""
Test for api set default_stt.
"""
url = self.get_server_url() + "/settings/default_stt"
data = {"default_stt": "google"}
headers = {"Content-Type": "application/json"}
self.settings.default_stt_name = "test"
result = self.client.post(url,
headers=headers,
data=json.dumps(data))
expected_content = {
"default_stt": "google"
}
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(result.get_data().decode('utf-8')), sort_keys=True))
self.assertEqual(result.status_code, 200)
def test_get_default_player(self):
"""
Test for api get default player.
"""
url = self.get_server_url() + "/settings/default_player"
headers = {"Content-Type": "application/json"}
self.settings.default_player_name = "test"
result = self.client.get(url, headers=headers)
expected_content = {
"default_player": "test"
}
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(result.get_data().decode('utf-8')), sort_keys=True))
self.assertEqual(result.status_code, 200)
def test_set_default_player(self):
"""
Test for api set default_player.
"""
url = self.get_server_url() + "/settings/default_player"
data = {"default_player": "mplayer"}
headers = {"Content-Type": "application/json"}
self.settings.default_player_name = "test"
result = self.client.post(url,
headers=headers,
data=json.dumps(data))
expected_content = {
"default_player": "mplayer"
}
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(result.get_data().decode('utf-8')), sort_keys=True))
self.assertEqual(result.status_code, 200)
def test_get_default_trigger(self):
"""
Test for api get default trigger.
"""
url = self.get_server_url() + "/settings/default_trigger"
headers = {"Content-Type": "application/json"}
self.settings.default_trigger_name = "test"
result = self.client.get(url, headers=headers)
expected_content = {
"default_trigger": "test"
}
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(result.get_data().decode('utf-8')), sort_keys=True))
self.assertEqual(result.status_code, 200)
def test_set_default_trigger(self):
"""
Test for api set default_trigger.
"""
url = self.get_server_url() + "/settings/default_trigger"
data = {"default_trigger": "snowboy"}
headers = {"Content-Type": "application/json"}
self.settings.default_trigger_name = "test"
result = self.client.post(url,
headers=headers,
data=json.dumps(data))
expected_content = {
"default_trigger": "snowboy"
}
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(result.get_data().decode('utf-8')), sort_keys=True))
self.assertEqual(result.status_code, 200)
def test_get_hooks(self):
"""
Test for api get hooks.
"""
url = self.get_server_url() + "/settings/hooks"
headers = {"Content-Type": "application/json"}
self.settings.hooks = {
"on_start": "on-start-synapse",
"on_waiting_for_trigger": {},
"on_triggered": "on-triggered-synapse",
"on_start_listening": {},
"on_stop_listening": {},
"on_order_found": {},
"on_order_not_found": "order-not-found-synapse",
"on_processed_synapses": {},
"on_deaf": {},
"on_undeaf": {},
"on_start_speaking": {},
"on_stop_speaking": {},
"on_stt_error": {}
}
result = self.client.get(url, headers=headers)
expected_content = {
"hooks": {
"on_start": "on-start-synapse",
"on_waiting_for_trigger": {},
"on_triggered": "on-triggered-synapse",
"on_start_listening": {},
"on_stop_listening": {},
"on_order_found": {},
"on_order_not_found": "order-not-found-synapse",
"on_processed_synapses": {},
"on_deaf": {},
"on_undeaf": {},
"on_start_speaking": {},
"on_stop_speaking": {},
"on_stt_error": {}
}
}
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(result.get_data().decode('utf-8')), sort_keys=True))
self.assertEqual(result.status_code, 200)
def test_set_hooks(self):
"""
Test for api set hooks.
"""
url = self.get_server_url() + "/settings/hooks"
data = {"on_waiting_for_trigger": "synapse"}
headers = {"Content-Type": "application/json"}
self.settings.hooks = {
"on_start": "on-start-synapse",
"on_waiting_for_trigger": {},
"on_triggered": "on-triggered-synapse",
"on_start_listening": {},
"on_stop_listening": {},
"on_order_found": {},
"on_order_not_found": "order-not-found-synapse",
"on_processed_synapses": {},
"on_deaf": {},
"on_undeaf": {},
"on_start_speaking": {},
"on_stop_speaking": {},
"on_stt_error": {}
}
result = self.client.post(url,
headers=headers,
data=json.dumps(data))
expected_content = {
"hooks": {
"on_start": "on-start-synapse",
"on_waiting_for_trigger": "synapse",
"on_triggered": "on-triggered-synapse",
"on_start_listening": {},
"on_stop_listening": {},
"on_order_found": {},
"on_order_not_found": "order-not-found-synapse",
"on_processed_synapses": {},
"on_deaf": {},
"on_undeaf": {},
"on_start_speaking": {},
"on_stop_speaking": {},
"on_stt_error": {}
}
}
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(result.get_data().decode('utf-8')), sort_keys=True))
self.assertEqual(result.status_code, 200)
def test_get_variables(self):
"""
Test for api get variables.
"""
url = self.get_server_url() + "/settings/variables"
headers = {"Content-Type": "application/json"}
self.settings.variables = {
"test": "tust",
"tost": "tist"
}
result = self.client.get(url, headers=headers)
expected_content = {
"variables": {
"test": "tust",
"tost": "tist"
}
}
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(result.get_data().decode('utf-8')), sort_keys=True))
self.assertEqual(result.status_code, 200)
def test_set_variables(self):
"""
Test for api set variables.
"""
url = self.get_server_url() + "/settings/variables"
data = {"toto": "titi"}
headers = {"Content-Type": "application/json"}
self.settings.variables = {
"tt": "aa",
"uu": "ii"
}
result = self.client.post(url,
headers=headers,
data=json.dumps(data))
expected_content = {
"variables": {
"tt": "aa",
"uu": "ii",
"toto": "titi"
}
}
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(result.get_data().decode('utf-8')), sort_keys=True))
self.assertEqual(result.status_code, 200)
|
from homeassistant.components.lock import LockEntity
from .account import StarlineAccount, StarlineDevice
from .const import DOMAIN
from .entity import StarlineEntity
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up the StarLine lock."""
account: StarlineAccount = hass.data[DOMAIN][entry.entry_id]
entities = []
for device in account.api.devices.values():
if device.support_state:
lock = StarlineLock(account, device)
if lock.is_locked is not None:
entities.append(lock)
async_add_entities(entities)
class StarlineLock(StarlineEntity, LockEntity):
"""Representation of a StarLine lock."""
def __init__(self, account: StarlineAccount, device: StarlineDevice):
"""Initialize the lock."""
super().__init__(account, device, "lock", "Security")
@property
def available(self):
"""Return True if entity is available."""
return super().available and self._device.online
@property
def device_state_attributes(self):
"""Return the state attributes of the lock.
Possible dictionary keys:
add_h - Additional sensor alarm status (high level)
add_l - Additional channel alarm status (low level)
door - Doors alarm status
hbrake - Hand brake alarm status
hijack - Hijack mode status
hood - Hood alarm status
ign - Ignition alarm status
pbrake - Brake pedal alarm status
shock_h - Shock sensor alarm status (high level)
shock_l - Shock sensor alarm status (low level)
tilt - Tilt sensor alarm status
trunk - Trunk alarm status
Documentation: https://developer.starline.ru/#api-Device-DeviceState
"""
return self._device.alarm_state
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return (
"mdi:shield-check-outline" if self.is_locked else "mdi:shield-alert-outline"
)
@property
def is_locked(self):
"""Return true if lock is locked."""
return self._device.car_state.get("arm")
def lock(self, **kwargs):
"""Lock the car."""
self._account.api.set_car_state(self._device.device_id, "arm", True)
def unlock(self, **kwargs):
"""Unlock the car."""
self._account.api.set_car_state(self._device.device_id, "arm", False)
|
from __future__ import print_function
import argparse
import os
import platform
import subprocess
import sys
from distutils.spawn import find_executable
import rospkg
__version__ = '1.7.0'
class CleanupException(Exception):
pass
def _ask_and_call(cmds, cwd=None):
"""
Pretty print cmds, ask if they should be run, and if so, runs
them using _call().
:param cmds: a list of commands executed one after another, ``list``
:param cwd: (optional) set cwd of command that is executed, ``str``
:returns: ``True`` if cmds were run.
"""
# Pretty-print a string version of the commands
def quote(s):
return '"%s"' % s if ' ' in s else s
accepted = _ask('\n'.join([' '.join([quote(s) for s in c]) for c in cmds]))
if accepted:
_call(cmds, cwd)
return accepted
def _ask(comment):
"""
ask user with provided comment. If user responds with y, return True
:param comment: comment, ``str``
:return: ``True`` if user responds with y
"""
sys.stdout.write('Okay to perform:\n\n%s\n(y/n)?\n' % comment)
while 1:
input = sys.stdin.readline().strip().lower()
if input in ['y', 'n']:
break
return input == 'y'
def _call(cmds, cwd=None):
"""
Runs cmds using subprocess.check_call.
:param cmds: a list of commands executed one after another, ``list``
:param cwd: (optional) set cwd of command that is executed, ``str``
"""
for c in cmds:
if cwd:
subprocess.check_call(c, cwd=cwd)
else:
subprocess.check_call(c)
def _usage():
print("""Usage: rosclean <command>
Commands:
\trosclean check\tCheck usage of log files
\trosclean purge\tRemove log files
""")
sys.exit(getattr(os, 'EX_USAGE', 1))
def _get_check_dirs():
home_dir = rospkg.get_ros_home()
log_dir = rospkg.get_log_dir()
dirs = [(log_dir, 'ROS node logs'),
(os.path.join(home_dir, 'rosmake'), 'rosmake logs')]
return [x for x in dirs if os.path.isdir(x[0])]
def _rosclean_cmd_check(args):
dirs = _get_check_dirs()
for d, label in dirs:
desc = get_human_readable_disk_usage(d)
print('%s %s' % (desc, label))
def _get_disk_usage_by_walking_tree(d):
total_size = 0
for dirpath, dirnames, filenames in os.walk(d):
for f in filenames:
fp = os.path.join(dirpath, f)
total_size += os.path.getsize(fp)
return total_size
def get_human_readable_disk_usage(d):
"""
Get human-readable disk usage for directory
:param d: directory path, ``str`
:returns: human-readable disk usage (du -h), ``str``
"""
# only implemented on Linux and FreeBSD for now. Should work on OS X but need to verify first (du is not identical)
if platform.system() in ['Linux', 'FreeBSD']:
try:
return subprocess.Popen(['du', '-sh', d], stdout=subprocess.PIPE).communicate()[0].split()[0].decode()
except Exception:
raise CleanupException('rosclean is not supported on this platform')
elif platform.system() == 'Windows':
total_size = _get_disk_usage_by_walking_tree(d)
return 'Total Size: ' + str(total_size) + ' ' + d
else:
raise CleanupException('rosclean is not supported on this platform')
def get_disk_usage(d):
"""
Get disk usage in bytes for directory
:param d: directory path, ``str``
:returns: disk usage in bytes (du -b) or (du -A) * 1024, ``int``
:raises: :exc:`CleanupException` If get_disk_usage() cannot be used on this platform
"""
if platform.system() == 'Windows':
return _get_disk_usage_by_walking_tree(d)
# only implemented on Linux and FreeBSD for now. Should work on OS X but need to verify first (du is not identical)
cmd = None
unit = 1
du = find_executable('du')
if du is not None:
if platform.system() == 'Linux':
cmd = [du, '-sb', d]
elif platform.system() == 'FreeBSD':
cmd = [du, '-skA', d]
unit = 1024
try:
# detect BusyBox du command by following symlink
if os.path.basename(os.readlink(du)) == 'busybox':
cmd = [du, '-sk', d]
unit = 1024
except OSError:
# readlink raises OSError if the target is not symlink
pass
if cmd is None:
raise CleanupException('rosclean is not supported on this platform')
try:
return int(subprocess.Popen(cmd, stdout=subprocess.PIPE).communicate()[0].split()[0]) * unit
except Exception:
raise CleanupException('rosclean is not supported on this platform')
def _sort_file_by_oldest(d):
"""
Get files and directories in specified path sorted by last modified time
:param d: directory path, ```str```
:return: a list of files and directories sorted by last modified time (old first), ```list```
"""
files = os.listdir(d)
files.sort(key=lambda f: os.path.getmtime(os.path.join(d, f)))
return files
def _rosclean_cmd_purge(args):
dirs = _get_check_dirs()
for d, label in dirs:
if not args.size:
print('Purging %s.' % label)
if platform.system() == 'Windows':
cmds = [['cmd', '/c', 'rd', '/s', '/q', d]]
else:
cmds = [['rm', '-rf', d]]
try:
if args.y:
_call(cmds)
else:
print('PLEASE BE CAREFUL TO VERIFY THE COMMAND BELOW!')
_ask_and_call(cmds)
except Exception:
print('FAILED to execute command', file=sys.stderr)
else:
files = _sort_file_by_oldest(d)
log_size = get_disk_usage(d)
if log_size <= args.size * 1024 * 1024:
print('Directory size of %s is %d MB which is already below the requested threshold of %d MB.' % (label, log_size / 1024 / 1024, args.size))
continue
print('Purging %s until directory size is at most %d MB (currently %d MB).' % (label, args.size, log_size / 1024 / 1024))
if not args.y:
print('PLEASE BE CAREFUL TO VERIFY THE COMMAND BELOW!')
if not _ask('Purge some of old logs in %s' % d):
return
for f in files:
if log_size <= args.size * 1024 * 1024:
break
path = os.path.join(d, f)
log_size -= get_disk_usage(path)
if platform.system() == 'Windows':
cmds = [['cmd', '/c', 'rd', '/s', '/q', path]]
else:
cmds = [['rm', '-rf', path]]
try:
_call(cmds)
except Exception:
print('FAILED to execute command', file=sys.stderr)
def rosclean_main(argv=None):
if argv is None:
argv = sys.argv
parser = argparse.ArgumentParser(prog='rosclean')
subparsers = parser.add_subparsers(required=True, dest='{check,purge}') # help='sub-command help')
parser_check = subparsers.add_parser('check', help='Check usage of log files')
parser_check.set_defaults(func=_rosclean_cmd_check)
parser_purge = subparsers.add_parser('purge', help='Remove log files')
parser_purge.set_defaults(func=_rosclean_cmd_purge)
parser_purge.add_argument('-y', action='store_true', default=False, help='CAUTION: automatically confirms all questions to delete files')
parser_purge.add_argument('--size', action='store', default=None, type=int, help='Maximum total size in MB to keep when deleting old files')
args = parser.parse_args(argv[1:])
args.func(args)
if __name__ == '__main__':
rosclean_main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from compare_gan.metrics import ms_ssim_score
import tensorflow as tf
class MsSsimScoreTest(tf.test.TestCase):
def test_on_one_vs_07_vs_zero_images(self):
"""Computes the SSIM value for 3 simple images."""
with tf.Graph().as_default():
generated_images = tf.stack([
tf.ones([64, 64, 3]),
tf.ones([64, 64, 3]) * 0.7,
tf.zeros([64, 64, 3]),
])
metric = ms_ssim_score.compute_msssim(generated_images, 1)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
result = metric(sess)
self.assertNear(result, 0.989989, 0.001)
if __name__ == '__main__':
tf.test.main()
|
import logging
from typing import Optional, Sequence
import voluptuous as vol
from homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE
from homeassistant.core import State
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.util import location as loc_util
_LOGGER = logging.getLogger(__name__)
def has_location(state: State) -> bool:
"""Test if state contains a valid location.
Async friendly.
"""
# type ignore: https://github.com/python/mypy/issues/7207
return (
isinstance(state, State) # type: ignore
and isinstance(state.attributes.get(ATTR_LATITUDE), float)
and isinstance(state.attributes.get(ATTR_LONGITUDE), float)
)
def closest(
latitude: float, longitude: float, states: Sequence[State]
) -> Optional[State]:
"""Return closest state to point.
Async friendly.
"""
with_location = [state for state in states if has_location(state)]
if not with_location:
return None
return min(
with_location,
key=lambda state: loc_util.distance(
state.attributes.get(ATTR_LATITUDE),
state.attributes.get(ATTR_LONGITUDE),
latitude,
longitude,
)
or 0,
)
def find_coordinates(
hass: HomeAssistantType, entity_id: str, recursion_history: Optional[list] = None
) -> Optional[str]:
"""Find the gps coordinates of the entity in the form of '90.000,180.000'."""
entity_state = hass.states.get(entity_id)
if entity_state is None:
_LOGGER.error("Unable to find entity %s", entity_id)
return None
# Check if the entity has location attributes
if has_location(entity_state):
return _get_location_from_attributes(entity_state)
# Check if device is in a zone
zone_entity = hass.states.get(f"zone.{entity_state.state}")
if has_location(zone_entity): # type: ignore
_LOGGER.debug(
"%s is in %s, getting zone location", entity_id, zone_entity.entity_id # type: ignore
)
return _get_location_from_attributes(zone_entity) # type: ignore
# Resolve nested entity
if recursion_history is None:
recursion_history = []
recursion_history.append(entity_id)
if entity_state.state in recursion_history:
_LOGGER.error(
"Circular reference detected while trying to find coordinates of an entity. The state of %s has already been checked",
entity_state.state,
)
return None
_LOGGER.debug("Getting nested entity for state: %s", entity_state.state)
nested_entity = hass.states.get(entity_state.state)
if nested_entity is not None:
_LOGGER.debug("Resolving nested entity_id: %s", entity_state.state)
return find_coordinates(hass, entity_state.state, recursion_history)
# Check if state is valid coordinate set
try:
cv.gps(entity_state.state.split(","))
except vol.Invalid:
_LOGGER.error(
"Entity %s does not contain a location and does not point at an entity that does: %s",
entity_id,
entity_state.state,
)
return None
else:
return entity_state.state
def _get_location_from_attributes(entity_state: State) -> str:
"""Get the lat/long string from an entities attributes."""
attr = entity_state.attributes
return "{},{}".format(attr.get(ATTR_LATITUDE), attr.get(ATTR_LONGITUDE))
|
import re # noqa: F401
import sys # noqa: F401
import nulltype # noqa: F401
from paasta_tools.paastaapi.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from paasta_tools.paastaapi.model.float_and_error import FloatAndError
from paasta_tools.paastaapi.model.integer_and_error import IntegerAndError
from paasta_tools.paastaapi.model.task_tail_lines import TaskTailLines
globals()['FloatAndError'] = FloatAndError
globals()['IntegerAndError'] = IntegerAndError
globals()['TaskTailLines'] = TaskTailLines
class MarathonMesosRunningTask(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'cpu_shares': (FloatAndError,), # noqa: E501
'cpu_used_seconds': (FloatAndError,), # noqa: E501
'deployed_timestamp': (float,), # noqa: E501
'duration_seconds': (int,), # noqa: E501
'hostname': (str,), # noqa: E501
'id': (str,), # noqa: E501
'mem_limit': (IntegerAndError,), # noqa: E501
'rss': (IntegerAndError,), # noqa: E501
'tail_lines': (TaskTailLines,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'cpu_shares': 'cpu_shares', # noqa: E501
'cpu_used_seconds': 'cpu_used_seconds', # noqa: E501
'deployed_timestamp': 'deployed_timestamp', # noqa: E501
'duration_seconds': 'duration_seconds', # noqa: E501
'hostname': 'hostname', # noqa: E501
'id': 'id', # noqa: E501
'mem_limit': 'mem_limit', # noqa: E501
'rss': 'rss', # noqa: E501
'tail_lines': 'tail_lines', # noqa: E501
}
_composed_schemas = {}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""MarathonMesosRunningTask - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
cpu_shares (FloatAndError): [optional] # noqa: E501
cpu_used_seconds (FloatAndError): [optional] # noqa: E501
deployed_timestamp (float): The unix timestamp at which the task was deployed. [optional] # noqa: E501
duration_seconds (int): The duration over which the task has been running in seconds. [optional] # noqa: E501
hostname (str): Name of the Mesos agent on which this task is running. [optional] # noqa: E501
id (str): The ID of the task in Mesos. [optional] # noqa: E501
mem_limit (IntegerAndError): [optional] # noqa: E501
rss (IntegerAndError): [optional] # noqa: E501
tail_lines (TaskTailLines): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
|
from itertools import permutations
from collections import deque
import networkx as nx
from tqdm import trange
from pgmpy.estimators import (
StructureScore,
StructureEstimator,
K2Score,
ScoreCache,
BDeuScore,
BicScore,
)
from pgmpy.base import DAG
from pgmpy.global_vars import SHOW_PROGRESS
class HillClimbSearch(StructureEstimator):
def __init__(self, data, scoring_method=None, use_cache=True, **kwargs):
"""
Class for heuristic hill climb searches for DAGs, to learn
network structure from data. `estimate` attempts to find a model with optimal score.
Parameters
----------
data: pandas DataFrame object
datafame object where each column represents one variable.
(If some values in the data are missing the data cells should be set to `numpy.NaN`.
Note that pandas converts each column containing `numpy.NaN`s to dtype `float`.)
state_names: dict (optional)
A dict indicating, for each variable, the discrete set of states (or values)
that the variable can take. If unspecified, the observed values in the data set
are taken to be the only possible states.
complete_samples_only: bool (optional, default `True`)
Specifies how to deal with missing data, if present. If set to `True` all rows
that contain `np.Nan` somewhere are ignored. If `False` then, for each variable,
every row where neither the variable nor its parents are `np.NaN` is used.
This sets the behavior of the `state_count`-method.
use_caching: boolean
If True, uses caching of score for faster computation.
Note: Caching only works for scoring methods which are decomposible. Can
give wrong results in case of custom scoring methods.
References
----------
Koller & Friedman, Probabilistic Graphical Models - Principles and Techniques, 2009
Section 18.4.3 (page 811ff)
"""
self.use_cache = use_cache
super(HillClimbSearch, self).__init__(data, **kwargs)
def _legal_operations(
self, model, score, tabu_list, max_indegree, black_list, white_list, fixed_edges
):
"""Generates a list of legal (= not in tabu_list) graph modifications
for a given model, together with their score changes. Possible graph modifications:
(1) add, (2) remove, or (3) flip a single edge. For details on scoring
see Koller & Friedman, Probabilistic Graphical Models, Section 18.4.3.3 (page 818).
If a number `max_indegree` is provided, only modifications that keep the number
of parents for each node below `max_indegree` are considered. A list of
edges can optionally be passed as `black_list` or `white_list` to exclude those
edges or to limit the search.
"""
tabu_list = set(tabu_list)
# Step 1: Get all legal operations for adding edges.
potential_new_edges = (
set(permutations(self.variables, 2))
- set(model.edges())
- set([(Y, X) for (X, Y) in model.edges()])
)
for (X, Y) in potential_new_edges:
# Check if adding (X, Y) will create a cycle.
if not nx.has_path(model, Y, X):
operation = ("+", (X, Y))
if (
(operation not in tabu_list)
and ((X, Y) not in black_list)
and ((X, Y) in white_list)
):
old_parents = model.get_parents(Y)
new_parents = old_parents + [X]
if len(new_parents) <= max_indegree:
score_delta = score(Y, new_parents) - score(Y, old_parents)
yield (operation, score_delta)
# Step 2: Get all legal operations for removing edges
for (X, Y) in model.edges():
operation = ("-", (X, Y))
if (operation not in tabu_list) and ((X, Y) not in fixed_edges):
old_parents = model.get_parents(Y)
new_parents = old_parents[:]
new_parents.remove(X)
score_delta = score(Y, new_parents) - score(Y, old_parents)
yield (operation, score_delta)
# Step 3: Get all legal operations for flipping edges
for (X, Y) in model.edges():
# Check if flipping creates any cycles
if not any(
map(lambda path: len(path) > 2, nx.all_simple_paths(model, X, Y))
):
operation = ("flip", (X, Y))
if (
((operation not in tabu_list) and ("flip", (Y, X)) not in tabu_list)
and ((X, Y) not in fixed_edges)
and ((Y, X) not in black_list)
and ((Y, X) in white_list)
):
old_X_parents = model.get_parents(X)
old_Y_parents = model.get_parents(Y)
new_X_parents = old_X_parents + [Y]
new_Y_parents = old_Y_parents[:]
new_Y_parents.remove(X)
if len(new_X_parents) <= max_indegree:
score_delta = (
score(X, new_X_parents)
+ score(Y, new_Y_parents)
- score(X, old_X_parents)
- score(Y, old_Y_parents)
)
yield (operation, score_delta)
def estimate(
self,
scoring_method="k2score",
start_dag=None,
fixed_edges=set(),
tabu_length=100,
max_indegree=None,
black_list=None,
white_list=None,
epsilon=1e-4,
max_iter=1e6,
show_progress=True,
):
"""
Performs local hill climb search to estimates the `DAG` structure that
has optimal score, according to the scoring method supplied. Starts at
model `start_dag` and proceeds by step-by-step network modifications
until a local maximum is reached. Only estimates network structure, no
parametrization.
Parameters
----------
scoring_method: str or StructureScore instance
The score to be optimized during structure estimation. Supported
structure scores: k2score, bdeuscore, bicscore. Also accepts a
custom score but it should be an instance of `StructureScore`.
start_dag: DAG instance
The starting point for the local search. By default a completely
disconnected network is used.
fixed_edges: iterable
A list of edges that will always be there in the final learned model.
The algorithm will add these edges at the start of the algorithm and
will never change it.
tabu_length: int
If provided, the last `tabu_length` graph modifications cannot be reversed
during the search procedure. This serves to enforce a wider exploration
of the search space. Default value: 100.
max_indegree: int or None
If provided and unequal None, the procedure only searches among models
where all nodes have at most `max_indegree` parents. Defaults to None.
black_list: list or None
If a list of edges is provided as `black_list`, they are excluded from the search
and the resulting model will not contain any of those edges. Default: None
white_list: list or None
If a list of edges is provided as `white_list`, the search is limited to those
edges. The resulting model will then only contain edges that are in `white_list`.
Default: None
epsilon: float (default: 1e-4)
Defines the exit condition. If the improvement in score is less than `epsilon`,
the learned model is returned.
max_iter: int (default: 1e6)
The maximum number of iterations allowed. Returns the learned model when the
number of iterations is greater than `max_iter`.
Returns
-------
model: `DAG` instance
A `DAG` at a (local) score maximum.
Examples
--------
>>> import pandas as pd
>>> import numpy as np
>>> from pgmpy.estimators import HillClimbSearch, BicScore
>>> # create data sample with 9 random variables:
... data = pd.DataFrame(np.random.randint(0, 5, size=(5000, 9)), columns=list('ABCDEFGHI'))
>>> # add 10th dependent variable
... data['J'] = data['A'] * data['B']
>>> est = HillClimbSearch(data)
>>> best_model = est.estimate(scoring_method=BicScore(data))
>>> sorted(best_model.nodes())
['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J']
>>> best_model.edges()
[('B', 'J'), ('A', 'J')]
>>> # search a model with restriction on the number of parents:
>>> est.estimate(max_indegree=1).edges()
[('J', 'A'), ('B', 'J')]
"""
# Step 1: Initial checks and setup for arguments
# Step 1.1: Check scoring_method
supported_methods = {
"k2score": K2Score,
"bdeuscore": BDeuScore,
"bicscore": BicScore,
}
if (
(
isinstance(scoring_method, str)
and (scoring_method.lower() not in supported_methods)
)
) and (not isinstance(scoring_method, StructureScore)):
raise ValueError(
"scoring_method should either be one of k2score, bdeuscore, bicscore, or an instance of StructureScore"
)
if isinstance(scoring_method, str):
score = supported_methods[scoring_method.lower()](data=self.data)
else:
score = scoring_method
if self.use_cache:
score_fn = ScoreCache.ScoreCache(score, self.data).local_score
else:
score_fn = score.local_score
# Step 1.2: Check the start_dag
if start_dag is None:
start_dag = DAG()
start_dag.add_nodes_from(self.variables)
elif not isinstance(start_dag, DAG) or not set(start_dag.nodes()) == set(
self.variables
):
raise ValueError(
"'start_dag' should be a DAG with the same variables as the data set, or 'None'."
)
# Step 1.3: Check fixed_edges
if not hasattr(fixed_edges, "__iter__"):
raise ValueError("fixed_edges must be an iterable")
else:
fixed_edges = set(fixed_edges)
start_dag.add_edges_from(fixed_edges)
if not nx.is_directed_acyclic_graph(start_dag):
raise ValueError(
"fixed_edges creates a cycle in start_dag. Please modify either fixed_edges or start_dag."
)
# Step 1.4: Check black list and white list
black_list = set() if black_list is None else set(black_list)
white_list = (
set([(u, v) for u in self.variables for v in self.variables])
if white_list is None
else set(white_list)
)
# Step 1.5: Initialize max_indegree, tabu_list, and progress bar
if max_indegree is None:
max_indegree = float("inf")
tabu_list = deque(maxlen=tabu_length)
current_model = start_dag
if show_progress and SHOW_PROGRESS:
iteration = trange(int(max_iter))
else:
iteration = range(int(max_iter))
# Step 2: For each iteration, find the best scoring operation and
# do that to the current model. If no legal operation is
# possible, sets best_operation=None.
for _ in iteration:
best_operation, best_score_delta = max(
self._legal_operations(
current_model,
score_fn,
tabu_list,
max_indegree,
black_list,
white_list,
fixed_edges,
),
key=lambda t: t[1],
default=(None, None),
)
if best_operation is None or best_score_delta < epsilon:
break
elif best_operation[0] == "+":
current_model.add_edge(*best_operation[1])
tabu_list.append(("-", best_operation[1]))
elif best_operation[0] == "-":
current_model.remove_edge(*best_operation[1])
tabu_list.append(("+", best_operation[1]))
elif best_operation[0] == "flip":
X, Y = best_operation[1]
current_model.remove_edge(X, Y)
current_model.add_edge(Y, X)
tabu_list.append(best_operation)
# Step 3: Return if no more improvements or maximum iterations reached.
return current_model
|
import re
from socket import gaierror as SocketGIAError
from homeassistant.components.roku.const import DOMAIN
from homeassistant.components.ssdp import (
ATTR_SSDP_LOCATION,
ATTR_UPNP_FRIENDLY_NAME,
ATTR_UPNP_SERIAL,
)
from homeassistant.const import CONF_HOST
from homeassistant.helpers.typing import HomeAssistantType
from tests.common import MockConfigEntry, load_fixture
from tests.test_util.aiohttp import AiohttpClientMocker
HOST = "192.168.1.160"
NAME = "Roku 3"
SSDP_LOCATION = "http://192.168.1.160/"
UPNP_FRIENDLY_NAME = "My Roku 3"
UPNP_SERIAL = "1GU48T017973"
MOCK_SSDP_DISCOVERY_INFO = {
ATTR_SSDP_LOCATION: SSDP_LOCATION,
ATTR_UPNP_FRIENDLY_NAME: UPNP_FRIENDLY_NAME,
ATTR_UPNP_SERIAL: UPNP_SERIAL,
}
def mock_connection(
aioclient_mock: AiohttpClientMocker,
device: str = "roku3",
app: str = "roku",
host: str = HOST,
power: bool = True,
media_state: str = "close",
error: bool = False,
server_error: bool = False,
) -> None:
"""Mock the Roku connection."""
roku_url = f"http://{host}:8060"
if error:
mock_connection_error(
aioclient_mock=aioclient_mock, device=device, app=app, host=host
)
return
if server_error:
mock_connection_server_error(
aioclient_mock=aioclient_mock, device=device, app=app, host=host
)
return
info_fixture = f"roku/{device}-device-info.xml"
if not power:
info_fixture = f"roku/{device}-device-info-power-off.xml"
aioclient_mock.get(
f"{roku_url}/query/device-info",
text=load_fixture(info_fixture),
headers={"Content-Type": "text/xml"},
)
apps_fixture = "roku/apps.xml"
if device == "rokutv":
apps_fixture = "roku/apps-tv.xml"
aioclient_mock.get(
f"{roku_url}/query/apps",
text=load_fixture(apps_fixture),
headers={"Content-Type": "text/xml"},
)
aioclient_mock.get(
f"{roku_url}/query/active-app",
text=load_fixture(f"roku/active-app-{app}.xml"),
headers={"Content-Type": "text/xml"},
)
aioclient_mock.get(
f"{roku_url}/query/tv-active-channel",
text=load_fixture("roku/rokutv-tv-active-channel.xml"),
headers={"Content-Type": "text/xml"},
)
aioclient_mock.get(
f"{roku_url}/query/tv-channels",
text=load_fixture("roku/rokutv-tv-channels.xml"),
headers={"Content-Type": "text/xml"},
)
aioclient_mock.get(
f"{roku_url}/query/media-player",
text=load_fixture(f"roku/media-player-{media_state}.xml"),
headers={"Content-Type": "text/xml"},
)
aioclient_mock.post(
re.compile(f"{roku_url}/keypress/.*"),
text="OK",
)
aioclient_mock.post(
re.compile(f"{roku_url}/launch/.*"),
text="OK",
)
aioclient_mock.post(f"{roku_url}/search", text="OK")
def mock_connection_error(
aioclient_mock: AiohttpClientMocker,
device: str = "roku3",
app: str = "roku",
host: str = HOST,
) -> None:
"""Mock the Roku connection error."""
roku_url = f"http://{host}:8060"
aioclient_mock.get(f"{roku_url}/query/device-info", exc=SocketGIAError)
aioclient_mock.get(f"{roku_url}/query/apps", exc=SocketGIAError)
aioclient_mock.get(f"{roku_url}/query/active-app", exc=SocketGIAError)
aioclient_mock.get(f"{roku_url}/query/tv-active-channel", exc=SocketGIAError)
aioclient_mock.get(f"{roku_url}/query/tv-channels", exc=SocketGIAError)
aioclient_mock.post(re.compile(f"{roku_url}/keypress/.*"), exc=SocketGIAError)
aioclient_mock.post(re.compile(f"{roku_url}/launch/.*"), exc=SocketGIAError)
aioclient_mock.post(f"{roku_url}/search", exc=SocketGIAError)
def mock_connection_server_error(
aioclient_mock: AiohttpClientMocker,
device: str = "roku3",
app: str = "roku",
host: str = HOST,
) -> None:
"""Mock the Roku server error."""
roku_url = f"http://{host}:8060"
aioclient_mock.get(f"{roku_url}/query/device-info", status=500)
aioclient_mock.get(f"{roku_url}/query/apps", status=500)
aioclient_mock.get(f"{roku_url}/query/active-app", status=500)
aioclient_mock.get(f"{roku_url}/query/tv-active-channel", status=500)
aioclient_mock.get(f"{roku_url}/query/tv-channels", status=500)
aioclient_mock.post(re.compile(f"{roku_url}/keypress/.*"), status=500)
aioclient_mock.post(re.compile(f"{roku_url}/launch/.*"), status=500)
aioclient_mock.post(f"{roku_url}/search", status=500)
async def setup_integration(
hass: HomeAssistantType,
aioclient_mock: AiohttpClientMocker,
device: str = "roku3",
app: str = "roku",
host: str = HOST,
unique_id: str = UPNP_SERIAL,
error: bool = False,
power: bool = True,
media_state: str = "close",
server_error: bool = False,
skip_entry_setup: bool = False,
) -> MockConfigEntry:
"""Set up the Roku integration in Home Assistant."""
entry = MockConfigEntry(domain=DOMAIN, unique_id=unique_id, data={CONF_HOST: host})
entry.add_to_hass(hass)
if not skip_entry_setup:
mock_connection(
aioclient_mock,
device,
app=app,
host=host,
error=error,
power=power,
media_state=media_state,
server_error=server_error,
)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry
|
import datetime
import os
import dateutil
import numpy as np
import pandas as pd
from pandas import __version__ as pandas_version
import pathos
from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.feature_extraction.text import TfidfVectorizer
import warnings
# The easiest way to check against a bunch of different bad values is to convert whatever val we have into a string, then check it against a set containing the string representation of a bunch of bad values
bad_vals_as_strings = set([str(float('nan')), str(float('inf')), str(float('-inf')), 'None', 'none', 'NaN', 'nan', 'NULL', 'null', '', 'inf', '-inf'])
# clean_val will try to turn a value into a float.
# If it fails, it will attempt to strip commas and then attempt to turn it into a float again
# Additionally, it will check to make sure the value is not in a set of bad vals (nan, None, inf, etc.)
# This function will either return a clean value, or raise an error if we cannot turn the value into a float or the value is a bad val
def clean_val(val):
if str(val) in bad_vals_as_strings:
raise(ValueError('clean_val failed'))
else:
try:
float_val = float(val)
except ValueError:
# This will throw a ValueError if it fails
# remove any commas in the string, and try to turn into a float again
try:
cleaned_string = val.replace(',', '')
float_val = float(cleaned_string)
except TypeError:
return None
return float_val
# Same as above, except this version returns float('nan') when it fails
# This plays more nicely with df.apply, and assumes we will be handling nans appropriately when doing DataFrameVectorizer later.
def clean_val_nan_version(key, val, replacement_val=np.nan):
try:
str_val = str(val)
except UnicodeEncodeError as e:
str_val = val.encode('ascii', 'ignore').decode('ascii')
print('Here is the value that causes the UnicodeEncodeError to be thrown:')
print(val)
print('Here is the feature name:')
print(key)
raise(e)
if str_val in bad_vals_as_strings:
return replacement_val
else:
try:
float_val = float(val)
except ValueError:
# remove any commas in the string, and try to turn into a float again
try:
cleaned_string = val.replace(',', '')
except TypeError:
print('*************************************')
print('We expected this value to be numeric, but were unable to convert it to a float:')
print(val)
print('Here is the feature name:')
print(key)
print('*************************************')
return replacement_val
try:
float_val = float(cleaned_string)
except:
return replacement_val
except TypeError:
# This is what happens if you feed in a datetime object to float
print('*************************************')
print('We expected this value to be numeric, but were unable to convert it to a float:')
print(val)
print('Here is the feature name:')
print(key)
print('*************************************')
return replacement_val
return float_val
class BasicDataCleaning(BaseEstimator, TransformerMixin):
def __init__(self, column_descriptions=None):
self.column_descriptions = column_descriptions
self.transformed_column_descriptions = column_descriptions.copy()
self.text_col_indicators = set(['text', 'nlp'])
self.numeric_col_types = ['int8', 'int16', 'int32', 'int64', 'float16', 'float32', 'float64']
self.text_columns = {}
for key, val in self.column_descriptions.items():
if val in self.text_col_indicators:
self.text_columns[key] = TfidfVectorizer(
# If we have any documents that cannot be decoded properly, just ignore them and keep going as planned with everything else
decode_error='ignore'
# Try to strip accents from characters. Using unicode is slightly slower but more comprehensive than 'ascii'
, strip_accents='unicode'
# Can also choose 'character', which will likely increase accuracy, at the cost of much more space, generally
, analyzer='word'
# Remove commonly found english words ('it', 'a', 'the') which do not typically contain much signal
, stop_words='english'
# Convert all characters to lowercase
, lowercase=True
# Only consider words that appear in fewer than max_df percent of all documents
# In this case, ignore all words that appear in 90% of all documents
, max_df=0.9
# Consider only the most frequently occurring 3000 words, after taking into account all the other filtering going on
, max_features=3000
)
def get(self, prop_name, default=None):
try:
return getattr(self, prop_name)
except AttributeError:
return default
def fit(self, X_df, y=None):
print('Running basic data cleaning')
self.vals_to_drop = set(['ignore', 'output', 'regressor', 'classifier'])
# See if we should fit TfidfVectorizer or not
for key in X_df.columns:
if X_df[key].dtype == 'object' and self.column_descriptions.get(key, False) not in ['categorical', 'ignore', 'nlp']:
# First, make sure that the values in this column are not just ints, or float('nan')
vals = X_df[key].sample(n=10)
is_categorical = False
for val in vals:
try:
if val is not None:
float(val)
except Exception as e:
print(e)
is_categorical = True
if is_categorical:
print('\n')
print('Encountered a column that is not marked as categorical, but is an "object" pandas type, which typically indicates a categorical column.')
print('The name of this columns is: "{}"'.format(key))
print('Some example features in this column are: {}'.format(list(X_df[key].sample(n=5))))
print('If this is a categorical column, please mark it as `{}: "categorical"` as part of your column_descriptions'.format(key))
print('If this is not a categorical column, please consider converting its dtype before passing data into auto_ml')
print('\n')
warnings.warn('Consider marking the "{}" column as categorical'.format(key))
if self.transformed_column_descriptions.get(key) is None:
self.transformed_column_descriptions[key] = 'continuous'
if key in self.text_columns:
X_df[key].fillna('nan', inplace=True)
if pandas_version < '0.20.0':
text_col = X_df[key].astype(str, raise_on_error=False)
else:
text_col = X_df[key].astype(str, errors='ignore')
self.text_columns[key].fit(text_col)
col_names = self.text_columns[key].get_feature_names()
# Make weird characters play nice, or just ignore them :)
for idx, word in enumerate(col_names):
try:
col_names[idx] = str(word)
except:
col_names[idx] = 'non_ascii_word_' + str(idx)
col_names = ['nlp_' + key + '_' + str(word) for word in col_names]
self.text_columns[key].cleaned_feature_names = col_names
return self
def transform(self, X, y=None):
ignore_none_fields = False
if self.get('transformed_column_descriptions', None) is not None:
ignore_none_fields = True
column_descriptions = self.get('transformed_column_descriptions', self.column_descriptions)
# Convert input to DataFrame if we were given a list of dictionaries
if isinstance(X, list):
X = pd.DataFrame(X)
X = X.copy()
# All of these are values we will not want to keep for training this particular estimator.
# Note that we have already split out the output column and saved it into it's own variable
if isinstance(X, dict):
dict_copy = {}
for key, val in X.items():
col_desc = column_descriptions.get(key, None)
if col_desc is None:
continue
elif col_desc in (None, 'continuous', 'numerical', 'float', 'int'):
dict_copy[key] = clean_val_nan_version(key, val, replacement_val=0)
elif col_desc == 'date':
date_feature_dict = add_date_features_dict(X, key)
dict_copy.update(date_feature_dict)
elif col_desc == 'categorical':
dict_copy[key] = val
elif key in self.text_columns:
col_names = self.text_columns[key].cleaned_feature_names
try:
text_val = str(X[key])
except UnicodeEncodeError:
text_val = X[key].encode('ascii', 'ignore').decode('ascii')
# the transform function expects a list
text_val = [text_val]
nlp_matrix = self.text_columns[key].transform(text_val)
# From here, it's all about transforming the output from the tf-idf transform into a dictionary
# Borrowed from: http://stackoverflow.com/a/40696119/3823857
# it outputs a sparse csr matrics
# first, we transform to coo
nlp_matrix = nlp_matrix.tocoo()
# Then, we grab the relevant column names
relevant_col_names = []
for col_idx in nlp_matrix.col:
relevant_col_names.append(col_names[col_idx])
# Then we zip together the relevant columns and the sparse data into a dictionary
relevant_nlp_cols = {k:v for k,v in zip(relevant_col_names, nlp_matrix.data)}
dict_copy.update(relevant_nlp_cols)
else:
pass
return dict_copy
else:
X.reset_index(drop=True, inplace=True)
# Run data cleaning only for columns that are not already pandas numeric dtypes
cols_to_clean = []
dtypes = X.dtypes
for idx, col in enumerate(X.columns):
if dtypes[idx] not in self.numeric_col_types:
cols_to_clean.append(col)
if len(cols_to_clean) > 0:
df_to_clean = X[cols_to_clean]
X.drop(cols_to_clean, axis=1, inplace=True)
if df_to_clean.shape[0] > 100000 or os.environ.get('is_test_suite', 0) == 'True':
results = list(map(lambda col: self.process_one_column(col_vals=df_to_clean[col], col_name=col), df_to_clean.columns))
else:
pool = pathos.multiprocessing.ProcessPool()
try:
pool.restart()
except AssertionError as e:
pass
results = list(pool.map(lambda col: self.process_one_column(col_vals=df_to_clean[col], col_name=col), df_to_clean.columns))
pool.close()
try:
pool.join()
except AssertionError:
pass
result = {}
for val in results:
result.update(val)
del val
df_result = pd.DataFrame(result)
X[df_result.columns] = df_result
return X
def process_one_column(self, col_vals, col_name):
ignore_none_fields = False
if self.get('transformed_column_descriptions', None) is not None:
ignore_none_fields = True
column_descriptions = self.get('transformed_column_descriptions', self.column_descriptions)
col_desc = column_descriptions.get(col_name)
# This is what we do to columns that were not present at fitting time.
# All columns that were present at fitting time that had no entry in column_descriptions were filled in with 'continuous'
if col_desc is None:
result = {}
elif col_desc == 'categorical':
# We will handle categorical data later, one-hot-encoding it inside DataFrameVectorizer (or LabelEncoding it for lgbm)
result = {
col_name: col_vals
}
elif col_desc in (None, 'continuous', 'numerical', 'float', 'int'):
# For all of our numerical columns, try to turn all of these values into floats
# This function handles commas inside strings that represent numbers, and returns nan if we cannot turn this value into a float. nans are ignored in DataFrameVectorizer
try:
col_vals = col_vals.apply(lambda x: clean_val_nan_version(col_name, x, replacement_val=0))
result = {
col_name: col_vals
}
except TypeError as e:
raise(e)
except UnicodeEncodeError as e:
print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
print('We have found a column that is not marked as a categorical column that has unicode values in it')
print('Here is the column name:')
print(col_name)
print('The actual value that caused the issue is logged right above the exclamation points')
print('Please either mark this column as categorical, or clean up the values in this column')
print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
elif col_desc == 'date':
result = add_date_features_df(col_vals, col_name)
elif col_name in self.text_columns:
col_names = self.text_columns[col_name].cleaned_feature_names
col_vals.fillna('nan', inplace=True)
if pandas_version < '0.20.0':
nlp_matrix = self.text_columns[col_name].transform(col_vals.astype(str, raise_on_error=False))
else:
nlp_matrix = self.text_columns[col_name].transform(col_vals.astype(str, errors='ignore'))
nlp_matrix = nlp_matrix.toarray()
text_df = pd.DataFrame(nlp_matrix)
text_df.columns = col_names
result = {}
for col_vals in text_df.columns:
result[col_vals] = text_df[col_vals].astype(int)
elif col_desc in self.vals_to_drop:
result = {}
else:
# If we have gotten here, the value is not any that we recognize
# This is most likely a typo that the user would want to be informed of, or a case while we're developing on auto_ml itself.
# In either case, it's useful to log it.
print('When transforming the data, we have encountered a value in column_descriptions that is not currently supported. The column has been dropped to allow the rest of the pipeline to run. Here\'s the name of the column:' )
print(col_name)
print('And here is the value for this column passed into column_descriptions:')
print(col_desc)
warnings.warn('UnknownValueInColumnDescriptions: Please make sure all the values you pass into column_descriptions are valid.')
result = {}
return result
def minutes_into_day_parts(minutes_into_day):
if minutes_into_day < 6 * 60:
return 'late_night'
elif minutes_into_day < 10 * 60:
return 'morning'
elif minutes_into_day < 11.5 * 60:
return 'mid_morning'
elif minutes_into_day < 14 * 60:
return 'lunchtime'
elif minutes_into_day < 18 * 60:
return 'afternoon'
elif minutes_into_day < 20.5 * 60:
return 'dinnertime'
elif minutes_into_day < 23.5 * 60:
return 'early_night'
else:
return 'late_night'
# Note: assumes that the column is already formatted as a pandas date type
def add_date_features_df(col_data, date_col):
# Pandas nicely tries to prevent you from doing stupid things, like setting values on a copy of a df, not your real one
# However, it's a bit overzealous in this case, so we'll side-step a bunch of warnings by setting is_copy to false here
result = {}
col_data = pd.to_datetime(col_data)
if pandas_version < '0.20.0':
result[date_col + '_day_of_week'] = col_data.apply(lambda x: x.weekday()).astype(int, raise_on_error=False)
else:
result[date_col + '_day_of_week'] = col_data.apply(lambda x: x.weekday()).astype(int, errors='ignore')
try:
if pandas_version < '0.20.0':
result[date_col + '_hour'] = col_data.apply(lambda x: x.hour).astype(int, raise_on_error=False)
else:
result[date_col + '_hour'] = col_data.apply(lambda x: x.hour).astype(int, errors='ignore')
result[date_col + '_minutes_into_day'] = col_data.apply(lambda x: x.hour * 60 + x.minute)
result[date_col + '_hour'] = result[date_col + '_hour'].fillna(0)
result[date_col + '_minutes_into_day'] = result[date_col + '_minutes_into_day'].fillna(0)
except AttributeError:
pass
result[date_col + '_is_weekend'] = col_data.apply(lambda x: x.weekday() in (5,6))
result[date_col + '_day_part'] = result[date_col + '_minutes_into_day'].apply(minutes_into_day_parts)
result[date_col + '_day_of_week'] = result[date_col + '_day_of_week'].fillna(0)
result[date_col + '_is_weekend'] = result[date_col + '_is_weekend'].fillna(0)
result[date_col + '_day_part'] = result[date_col + '_day_part'].fillna(0)
return result
# Same logic as above, except implemented for a single dictionary, which is much faster at prediction time when getting just a single prediction
def add_date_features_dict(row, date_col):
date_feature_dict = {}
# Handle cases where the val for the date_col is None
try:
date_val = row[date_col]
if date_val == None:
return date_feature_dict
if not isinstance(date_val, (datetime.datetime, datetime.date)):
date_val = dateutil.parser.parse(date_val)
except:
return date_feature_dict
# Make a copy of all the engineered features from the date, without modifying the original object at all
# This way the same original object can be passed into a number of different trained auto_ml predictors
date_feature_dict[date_col + '_day_of_week'] = date_val.weekday()
# nesting this inside a try/except block because the date might be a datetime.date, not a datetime.datetime
try:
date_feature_dict[date_col + '_hour'] = date_val.hour
date_feature_dict[date_col + '_minutes_into_day'] = date_val.hour * 60 + date_val.minute
except AttributeError:
pass
date_feature_dict[date_col + '_is_weekend'] = date_val.weekday() in (5,6)
return date_feature_dict
|
from sqlalchemy import Column, Integer, String
from lemur.database import db
class RotationPolicy(db.Model):
__tablename__ = "rotation_policies"
id = Column(Integer, primary_key=True)
name = Column(String)
days = Column(Integer)
def __repr__(self):
return "RotationPolicy(days={days}, name={name})".format(
days=self.days, name=self.name
)
|
import os
import re
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
def test_hostname(host):
assert re.search(r'instance-[12]', host.check_output('hostname -s'))
def test_etc_molecule_directory(host):
f = host.file('/etc/molecule')
assert f.is_directory
assert f.user == 'root'
assert f.group == 'root'
assert f.mode == 0o755
def test_etc_molecule_ansible_hostname_file(host):
filename = '/etc/molecule/{}'.format(host.check_output('hostname -s'))
f = host.file(filename)
assert f.is_file
assert f.user == 'root'
assert f.group == 'root'
assert f.mode == 0o644
|
import os
import sys
import functools
import html
import enum
import shutil
import argparse
from typing import Any, List, Sequence, Tuple
import attr
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import (QApplication, QDialog, QPushButton, QHBoxLayout,
QVBoxLayout, QLabel, QMessageBox, QWidget)
from PyQt5.QtNetwork import QSslSocket
from qutebrowser.config import config, configfiles
from qutebrowser.utils import (usertypes, version, qtutils, log, utils,
standarddir)
from qutebrowser.misc import objects, msgbox, savemanager, quitter
class _Result(enum.IntEnum):
"""The result code returned by the backend problem dialog."""
quit = QDialog.Accepted + 1
restart = QDialog.Accepted + 2
restart_webkit = QDialog.Accepted + 3
restart_webengine = QDialog.Accepted + 4
@attr.s
class _Button:
"""A button passed to BackendProblemDialog."""
text: str = attr.ib()
setting: str = attr.ib()
value: Any = attr.ib()
default: bool = attr.ib(default=False)
def _other_backend(backend: usertypes.Backend) -> Tuple[usertypes.Backend, str]:
"""Get the other backend enum/setting for a given backend."""
other_backend = {
usertypes.Backend.QtWebKit: usertypes.Backend.QtWebEngine,
usertypes.Backend.QtWebEngine: usertypes.Backend.QtWebKit,
}[backend]
other_setting = other_backend.name.lower()[2:]
return (other_backend, other_setting)
def _error_text(because: str, text: str, backend: usertypes.Backend) -> str:
"""Get an error text for the given information."""
other_backend, other_setting = _other_backend(backend)
if other_backend == usertypes.Backend.QtWebKit:
warning = ("<i>Note that QtWebKit hasn't been updated since "
"July 2017 (including security updates).</i>")
suffix = " (not recommended)"
else:
warning = ""
suffix = ""
return ("<b>Failed to start with the {backend} backend!</b>"
"<p>qutebrowser tried to start with the {backend} backend but "
"failed because {because}.</p>{text}"
"<p><b>Forcing the {other_backend.name} backend{suffix}</b></p>"
"<p>This forces usage of the {other_backend.name} backend by "
"setting the <i>backend = '{other_setting}'</i> option "
"(if you have a <i>config.py</i> file, you'll need to set "
"this manually). {warning}</p>".format(
backend=backend.name, because=because, text=text,
other_backend=other_backend, other_setting=other_setting,
warning=warning, suffix=suffix))
class _Dialog(QDialog):
"""A dialog which gets shown if there are issues with the backend."""
def __init__(self, *, because: str,
text: str,
backend: usertypes.Backend,
buttons: Sequence[_Button] = None,
parent: QWidget = None) -> None:
super().__init__(parent)
vbox = QVBoxLayout(self)
other_backend, other_setting = _other_backend(backend)
text = _error_text(because, text, backend)
label = QLabel(text)
label.setWordWrap(True)
label.setTextFormat(Qt.RichText)
vbox.addWidget(label)
hbox = QHBoxLayout()
buttons = [] if buttons is None else buttons
quit_button = QPushButton("Quit")
quit_button.clicked.connect(lambda: self.done(_Result.quit))
hbox.addWidget(quit_button)
backend_text = "Force {} backend".format(other_backend.name)
if other_backend == usertypes.Backend.QtWebKit:
backend_text += ' (not recommended)'
backend_button = QPushButton(backend_text)
backend_button.clicked.connect(functools.partial(
self._change_setting, 'backend', other_setting))
hbox.addWidget(backend_button)
for button in buttons:
btn = QPushButton(button.text)
btn.setDefault(button.default)
btn.clicked.connect(functools.partial(
self._change_setting, button.setting, button.value))
hbox.addWidget(btn)
vbox.addLayout(hbox)
def _change_setting(self, setting: str, value: str) -> None:
"""Change the given setting and restart."""
config.instance.set_obj(setting, value, save_yaml=True)
if setting == 'backend' and value == 'webkit':
self.done(_Result.restart_webkit)
elif setting == 'backend' and value == 'webengine':
self.done(_Result.restart_webengine)
else:
self.done(_Result.restart)
@attr.s
class _BackendImports:
"""Whether backend modules could be imported."""
webkit_available: bool = attr.ib(default=None)
webengine_available: bool = attr.ib(default=None)
webkit_error: str = attr.ib(default=None)
webengine_error: str = attr.ib(default=None)
class _BackendProblemChecker:
"""Check for various backend-specific issues."""
def __init__(self, *,
no_err_windows: bool,
save_manager: savemanager.SaveManager) -> None:
self._save_manager = save_manager
self._no_err_windows = no_err_windows
def _show_dialog(self, *args: Any, **kwargs: Any) -> None:
"""Show a dialog for a backend problem."""
if self._no_err_windows:
text = _error_text(*args, **kwargs)
print(text, file=sys.stderr)
sys.exit(usertypes.Exit.err_init)
dialog = _Dialog(*args, **kwargs)
status = dialog.exec_()
self._save_manager.save_all(is_exit=True)
if status in [_Result.quit, QDialog.Rejected]:
pass
elif status == _Result.restart_webkit:
quitter.instance.restart(override_args={'backend': 'webkit'})
elif status == _Result.restart_webengine:
quitter.instance.restart(override_args={'backend': 'webengine'})
elif status == _Result.restart:
quitter.instance.restart()
else:
raise utils.Unreachable(status)
sys.exit(usertypes.Exit.err_init)
def _nvidia_shader_workaround(self) -> None:
"""Work around QOpenGLShaderProgram issues.
See https://bugs.launchpad.net/ubuntu/+source/python-qt4/+bug/941826
"""
self._assert_backend(usertypes.Backend.QtWebEngine)
utils.libgl_workaround()
def _xwayland_options(self) -> Tuple[str, List[_Button]]:
"""Get buttons/text for a possible XWayland solution."""
buttons = []
text = "<p>You can work around this in one of the following ways:</p>"
if 'DISPLAY' in os.environ:
# XWayland is available, but QT_QPA_PLATFORM=wayland is set
buttons.append(
_Button("Force XWayland", 'qt.force_platform', 'xcb'))
text += ("<p><b>Force Qt to use XWayland</b></p>"
"<p>This allows you to use the newer QtWebEngine backend "
"(based on Chromium). "
"This sets the <i>qt.force_platform = 'xcb'</i> option "
"(if you have a <i>config.py</i> file, you'll need to "
"set this manually).</p>")
else:
text += ("<p><b>Set up XWayland</b></p>"
"<p>This allows you to use the newer QtWebEngine backend "
"(based on Chromium). ")
return text, buttons
def _handle_wayland_webgl(self) -> None:
"""On older graphic hardware, WebGL on Wayland causes segfaults.
See https://github.com/qutebrowser/qutebrowser/issues/5313
"""
self._assert_backend(usertypes.Backend.QtWebEngine)
if os.environ.get('QUTE_SKIP_WAYLAND_WEBGL_CHECK'):
return
platform = QApplication.instance().platformName()
if platform not in ['wayland', 'wayland-egl']:
return
# Only Qt 5.14 should be affected
if not qtutils.version_check('5.14', compiled=False):
return
if qtutils.version_check('5.15', compiled=False):
return
# Newer graphic hardware isn't affected
opengl_info = version.opengl_info()
if (opengl_info is None or
opengl_info.gles or
opengl_info.version is None or
opengl_info.version >= (4, 3)):
return
# If WebGL is turned off, we're fine
if not config.val.content.webgl:
return
text, buttons = self._xwayland_options()
buttons.append(_Button("Turn off WebGL (recommended)",
'content.webgl',
False))
text += ("<p><b>Disable WebGL (recommended)</b></p>"
"This sets the <i>content.webgl = False</i> option "
"(if you have a <i>config.py</i> file, you'll need to "
"set this manually).</p>")
self._show_dialog(backend=usertypes.Backend.QtWebEngine,
because=("of frequent crashes with Qt 5.14 on "
"Wayland with older graphics hardware"),
text=text,
buttons=buttons)
def _try_import_backends(self) -> _BackendImports:
"""Check whether backends can be imported and return BackendImports."""
# pylint: disable=unused-import
results = _BackendImports()
try:
from PyQt5 import QtWebKit
from PyQt5.QtWebKit import qWebKitVersion
from PyQt5 import QtWebKitWidgets
except (ImportError, ValueError) as e:
results.webkit_available = False
results.webkit_error = str(e)
else:
if qtutils.is_new_qtwebkit():
results.webkit_available = True
else:
results.webkit_available = False
results.webkit_error = "Unsupported legacy QtWebKit found"
try:
from PyQt5 import QtWebEngineWidgets
except (ImportError, ValueError) as e:
results.webengine_available = False
results.webengine_error = str(e)
else:
results.webengine_available = True
assert results.webkit_available is not None
assert results.webengine_available is not None
if not results.webkit_available:
assert results.webkit_error is not None
if not results.webengine_available:
assert results.webengine_error is not None
return results
def _handle_ssl_support(self, fatal: bool = False) -> None:
"""Check for full SSL availability.
If "fatal" is given, show an error and exit.
"""
if QSslSocket.supportsSsl():
return
if qtutils.version_check('5.12.4'):
version_text = ("If you use OpenSSL 1.0 with a PyQt package from "
"PyPI (e.g. on Ubuntu 16.04), you will need to "
"build OpenSSL 1.1 from sources and set "
"LD_LIBRARY_PATH accordingly.")
else:
version_text = ("If you use OpenSSL 1.1 with a PyQt package from "
"PyPI (e.g. on Archlinux or Debian Stretch), you "
"need to set LD_LIBRARY_PATH to the path of "
"OpenSSL 1.0 or use Qt >= 5.12.4.")
text = ("Could not initialize QtNetwork SSL support. {} This only "
"affects downloads and :adblock-update.".format(version_text))
if fatal:
errbox = msgbox.msgbox(parent=None,
title="SSL error",
text="Could not initialize SSL support.",
icon=QMessageBox.Critical,
plain_text=False)
errbox.exec_()
sys.exit(usertypes.Exit.err_init)
assert not fatal
log.init.warning(text)
def _check_backend_modules(self) -> None:
"""Check for the modules needed for QtWebKit/QtWebEngine."""
imports = self._try_import_backends()
if imports.webkit_available and imports.webengine_available:
return
elif not imports.webkit_available and not imports.webengine_available:
text = ("<p>qutebrowser needs QtWebKit or QtWebEngine, but "
"neither could be imported!</p>"
"<p>The errors encountered were:<ul>"
"<li><b>QtWebKit:</b> {webkit_error}"
"<li><b>QtWebEngine:</b> {webengine_error}"
"</ul></p>".format(
webkit_error=html.escape(imports.webkit_error),
webengine_error=html.escape(imports.webengine_error)))
errbox = msgbox.msgbox(parent=None,
title="No backend library found!",
text=text,
icon=QMessageBox.Critical,
plain_text=False)
errbox.exec_()
sys.exit(usertypes.Exit.err_init)
elif objects.backend == usertypes.Backend.QtWebKit:
if imports.webkit_available:
return
assert imports.webengine_available
self._show_dialog(
backend=usertypes.Backend.QtWebKit,
because="QtWebKit could not be imported",
text="<p><b>The error encountered was:</b><br/>{}</p>".format(
html.escape(imports.webkit_error))
)
elif objects.backend == usertypes.Backend.QtWebEngine:
if imports.webengine_available:
return
assert imports.webkit_available
self._show_dialog(
backend=usertypes.Backend.QtWebEngine,
because="QtWebEngine could not be imported",
text="<p><b>The error encountered was:</b><br/>{}</p>".format(
html.escape(imports.webengine_error))
)
raise utils.Unreachable
def _handle_cache_nuking(self) -> None:
"""Nuke the QtWebEngine cache if the Qt version changed.
WORKAROUND for https://bugreports.qt.io/browse/QTBUG-72532
"""
if not configfiles.state.qt_version_changed:
return
# Only nuke the cache in cases where we know there are problems.
# It seems these issues started with Qt 5.12.
# They should be fixed with Qt 5.12.5:
# https://codereview.qt-project.org/c/qt/qtwebengine-chromium/+/265408
if qtutils.version_check('5.12.5', compiled=False):
return
log.init.info("Qt version changed, nuking QtWebEngine cache")
cache_dir = os.path.join(standarddir.cache(), 'webengine')
if os.path.exists(cache_dir):
shutil.rmtree(cache_dir)
def _handle_serviceworker_nuking(self) -> None:
"""Nuke the service workers directory if the Qt version changed.
WORKAROUND for:
https://bugreports.qt.io/browse/QTBUG-72532
https://bugreports.qt.io/browse/QTBUG-82105
"""
if ('serviceworker_workaround' not in configfiles.state['general'] and
qtutils.version_check('5.14', compiled=False)):
# Nuke the service worker directory once for every install with Qt
# 5.14, given that it seems to cause a variety of segfaults.
configfiles.state['general']['serviceworker_workaround'] = '514'
affected = True
else:
# Otherwise, just nuke it when the Qt version changed.
affected = configfiles.state.qt_version_changed
if not affected:
return
service_worker_dir = os.path.join(standarddir.data(), 'webengine',
'Service Worker')
bak_dir = service_worker_dir + '-bak'
if not os.path.exists(service_worker_dir):
return
log.init.info("Qt version changed, removing service workers")
# Keep one backup around - we're not 100% sure what persistent data
# could be in there, but this folder can grow to ~300 MB.
if os.path.exists(bak_dir):
shutil.rmtree(bak_dir)
shutil.move(service_worker_dir, bak_dir)
def _assert_backend(self, backend: usertypes.Backend) -> None:
assert objects.backend == backend, objects.backend
def check(self) -> None:
"""Run all checks."""
self._check_backend_modules()
if objects.backend == usertypes.Backend.QtWebEngine:
self._handle_ssl_support()
self._nvidia_shader_workaround()
self._handle_wayland_webgl()
self._handle_cache_nuking()
self._handle_serviceworker_nuking()
else:
self._assert_backend(usertypes.Backend.QtWebKit)
self._handle_ssl_support(fatal=True)
def init(*, args: argparse.Namespace,
save_manager: savemanager.SaveManager) -> None:
"""Run all checks."""
checker = _BackendProblemChecker(no_err_windows=args.no_err_windows,
save_manager=save_manager)
checker.check()
|
from io import StringIO
import pytest
from contextlib import contextmanager
from unittest.mock import Mock
from vine.abstract import Thenable
from kombu.exceptions import HttpError
from kombu.asynchronous import http
from kombu.asynchronous.aws.connection import (
AsyncHTTPSConnection,
AsyncHTTPResponse,
AsyncConnection,
AsyncAWSQueryConnection,
)
from kombu.asynchronous.aws.ext import boto3
from .case import AWSCase
from t.mocks import PromiseMock
try:
from urllib.parse import urlparse, parse_qs
except ImportError:
from urlparse import urlparse, parse_qs # noqa
# Not currently working
VALIDATES_CERT = False
def passthrough(*args, **kwargs):
m = Mock(*args, **kwargs)
def side_effect(ret):
return ret
m.side_effect = side_effect
return m
class test_AsyncHTTPSConnection(AWSCase):
def test_http_client(self):
x = AsyncHTTPSConnection()
assert x.http_client is http.get_client()
client = Mock(name='http_client')
y = AsyncHTTPSConnection(http_client=client)
assert y.http_client is client
def test_args(self):
x = AsyncHTTPSConnection(
strict=True, timeout=33.3,
)
assert x.strict
assert x.timeout == 33.3
def test_request(self):
x = AsyncHTTPSConnection('aws.vandelay.com')
x.request('PUT', '/importer-exporter')
assert x.path == '/importer-exporter'
assert x.method == 'PUT'
def test_request_with_body_buffer(self):
x = AsyncHTTPSConnection('aws.vandelay.com')
body = Mock(name='body')
body.read.return_value = 'Vandelay Industries'
x.request('PUT', '/importer-exporter', body)
assert x.method == 'PUT'
assert x.path == '/importer-exporter'
assert x.body == 'Vandelay Industries'
body.read.assert_called_with()
def test_request_with_body_text(self):
x = AsyncHTTPSConnection('aws.vandelay.com')
x.request('PUT', '/importer-exporter', 'Vandelay Industries')
assert x.method == 'PUT'
assert x.path == '/importer-exporter'
assert x.body == 'Vandelay Industries'
def test_request_with_headers(self):
x = AsyncHTTPSConnection()
headers = {'Proxy': 'proxy.vandelay.com'}
x.request('PUT', '/importer-exporter', None, headers)
assert 'Proxy' in dict(x.headers)
assert dict(x.headers)['Proxy'] == 'proxy.vandelay.com'
def assert_request_created_with(self, url, conn):
conn.Request.assert_called_with(
url, method=conn.method,
headers=http.Headers(conn.headers), body=conn.body,
connect_timeout=conn.timeout, request_timeout=conn.timeout,
validate_cert=VALIDATES_CERT,
)
def test_getresponse(self):
client = Mock(name='client')
client.add_request = passthrough(name='client.add_request')
x = AsyncHTTPSConnection(http_client=client)
x.Response = Mock(name='x.Response')
request = x.getresponse()
x.http_client.add_request.assert_called_with(request)
assert isinstance(request, Thenable)
assert isinstance(request.on_ready, Thenable)
response = Mock(name='Response')
request.on_ready(response)
x.Response.assert_called_with(response)
def test_getresponse__real_response(self):
client = Mock(name='client')
client.add_request = passthrough(name='client.add_request')
callback = PromiseMock(name='callback')
x = AsyncHTTPSConnection(http_client=client)
request = x.getresponse(callback)
x.http_client.add_request.assert_called_with(request)
buf = StringIO()
buf.write('The quick brown fox jumps')
headers = http.Headers({'X-Foo': 'Hello', 'X-Bar': 'World'})
response = http.Response(request, 200, headers, buf)
request.on_ready(response)
callback.assert_called()
wresponse = callback.call_args[0][0]
assert wresponse.read() == 'The quick brown fox jumps'
assert wresponse.status == 200
assert wresponse.getheader('X-Foo') == 'Hello'
headers_dict = wresponse.getheaders()
assert dict(headers_dict) == headers
assert wresponse.msg
assert repr(wresponse)
def test_repr(self):
assert repr(AsyncHTTPSConnection())
def test_putrequest(self):
x = AsyncHTTPSConnection()
x.putrequest('UPLOAD', '/new')
assert x.method == 'UPLOAD'
assert x.path == '/new'
def test_putheader(self):
x = AsyncHTTPSConnection()
x.putheader('X-Foo', 'bar')
assert x.headers == [('X-Foo', 'bar')]
x.putheader('X-Bar', 'baz')
assert x.headers == [
('X-Foo', 'bar'),
('X-Bar', 'baz'),
]
def test_send(self):
x = AsyncHTTPSConnection()
x.send('foo')
assert x.body == 'foo'
x.send('bar')
assert x.body == 'foobar'
def test_interface(self):
x = AsyncHTTPSConnection()
assert x.set_debuglevel(3) is None
assert x.connect() is None
assert x.close() is None
assert x.endheaders() is None
class test_AsyncHTTPResponse(AWSCase):
def test_with_error(self):
r = Mock(name='response')
r.error = HttpError(404, 'NotFound')
x = AsyncHTTPResponse(r)
assert x.reason == 'NotFound'
r.error = None
assert not x.reason
class test_AsyncConnection(AWSCase):
def test_client(self):
sqs = Mock(name='sqs')
x = AsyncConnection(sqs)
assert x._httpclient is http.get_client()
client = Mock(name='client')
y = AsyncConnection(sqs, http_client=client)
assert y._httpclient is client
def test_get_http_connection(self):
sqs = Mock(name='sqs')
x = AsyncConnection(sqs)
assert isinstance(
x.get_http_connection(),
AsyncHTTPSConnection,
)
conn = x.get_http_connection()
assert conn.http_client is x._httpclient
class test_AsyncAWSQueryConnection(AWSCase):
def setup(self):
session = boto3.session.Session(
aws_access_key_id='AAA',
aws_secret_access_key='AAAA',
region_name='us-west-2',
)
sqs_client = session.client('sqs')
self.x = AsyncAWSQueryConnection(sqs_client,
http_client=Mock(name='client'))
def test_make_request(self):
_mexe, self.x._mexe = self.x._mexe, Mock(name='_mexe')
Conn = self.x.get_http_connection = Mock(name='get_http_connection')
callback = PromiseMock(name='callback')
self.x.make_request(
'action', {'foo': 1}, 'https://foo.com/', 'GET', callback=callback,
)
self.x._mexe.assert_called()
request = self.x._mexe.call_args[0][0]
parsed = urlparse(request.url)
params = parse_qs(parsed.query)
assert params['Action'][0] == 'action'
ret = _mexe(request, callback=callback)
assert ret is callback
Conn.return_value.request.assert_called()
Conn.return_value.getresponse.assert_called_with(
callback=callback,
)
def test_make_request__no_action(self):
self.x._mexe = Mock(name='_mexe')
self.x.get_http_connection = Mock(name='get_http_connection')
callback = PromiseMock(name='callback')
self.x.make_request(
None, {'foo': 1}, 'http://foo.com/', 'GET', callback=callback,
)
self.x._mexe.assert_called()
request = self.x._mexe.call_args[0][0]
parsed = urlparse(request.url)
params = parse_qs(parsed.query)
assert 'Action' not in params
@pytest.mark.parametrize('error_status_code', [
AsyncAWSQueryConnection.STATUS_CODE_REQUEST_TIMEOUT,
AsyncAWSQueryConnection.STATUS_CODE_NETWORK_CONNECT_TIMEOUT_ERROR,
AsyncAWSQueryConnection.STATUS_CODE_INTERNAL_ERROR,
AsyncAWSQueryConnection.STATUS_CODE_BAD_GATEWAY,
AsyncAWSQueryConnection.STATUS_CODE_SERVICE_UNAVAILABLE_ERROR,
AsyncAWSQueryConnection.STATUS_CODE_GATEWAY_TIMEOUT
])
def test_on_list_ready_error_response(self, error_status_code):
mocked_response_error = self.Response(
error_status_code,
"error_status_code"
)
result = self.x._on_list_ready(
"parent",
"markers",
"operation",
mocked_response_error
)
assert result == []
def Response(self, status, body):
r = Mock(name='response')
r.status = status
r.read.return_value = body
return r
@contextmanager
def mock_make_request(self):
self.x.make_request = Mock(name='make_request')
callback = PromiseMock(name='callback')
yield callback
def assert_make_request_called(self):
self.x.make_request.assert_called()
return self.x.make_request.call_args[1]['callback']
|
from __future__ import absolute_import
from __future__ import print_function
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation
from keras.optimizers import SGD
from keras.utils import np_utils
from elephas.spark_model import SparkModel
from elephas.utils.rdd_utils import to_simple_rdd
from pyspark import SparkContext, SparkConf
# Define basic parameters
batch_size = 64
nb_classes = 10
epochs = 1
# Create Spark context
conf = SparkConf().setAppName('Mnist_Spark_MLP').setMaster('local[8]')
sc = SparkContext(conf=conf)
# Load data
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train = x_train.reshape(60000, 784)
x_test = x_test.reshape(10000, 784)
x_train = x_train.astype("float32")
x_test = x_test.astype("float32")
x_train /= 255
x_test /= 255
print(x_train.shape[0], 'train samples')
print(x_test.shape[0], 'test samples')
# Convert class vectors to binary class matrices
y_train = np_utils.to_categorical(y_train, nb_classes)
y_test = np_utils.to_categorical(y_test, nb_classes)
model = Sequential()
model.add(Dense(128, input_dim=784))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(128))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(10))
model.add(Activation('softmax'))
sgd = SGD(lr=0.1)
model.compile(sgd, 'categorical_crossentropy', ['acc'])
# Build RDD from numpy features and labels
rdd = to_simple_rdd(sc, x_train, y_train)
# Initialize SparkModel from Keras model and Spark context
spark_model = SparkModel(model, mode='synchronous')
# Train Spark model
spark_model.fit(rdd, epochs=epochs, batch_size=batch_size, verbose=2, validation_split=0.1)
# Evaluate Spark model by evaluating the underlying model
score = spark_model.master_network.evaluate(x_test, y_test, verbose=2)
print('Test accuracy:', score[1])
|
from functools import wraps
from .utils import smart_decorator, combine_alternatives
from .tree import Tree
from .exceptions import VisitError, GrammarError
from .lexer import Token
###{standalone
from inspect import getmembers, getmro
class Discard(Exception):
"""When raising the Discard exception in a transformer callback,
that node is discarded and won't appear in the parent.
"""
pass
# Transformers
class _Decoratable:
"Provides support for decorating methods with @v_args"
@classmethod
def _apply_decorator(cls, decorator, **kwargs):
mro = getmro(cls)
assert mro[0] is cls
libmembers = {name for _cls in mro[1:] for name, _ in getmembers(_cls)}
for name, value in getmembers(cls):
# Make sure the function isn't inherited (unless it's overwritten)
if name.startswith('_') or (name in libmembers and name not in cls.__dict__):
continue
if not callable(value):
continue
# Skip if v_args already applied (at the function level)
if hasattr(cls.__dict__[name], 'vargs_applied') or hasattr(value, 'vargs_applied'):
continue
static = isinstance(cls.__dict__[name], (staticmethod, classmethod))
setattr(cls, name, decorator(value, static=static, **kwargs))
return cls
def __class_getitem__(cls, _):
return cls
class Transformer(_Decoratable):
"""Transformers visit each node of the tree, and run the appropriate method on it according to the node's data.
Calls its methods (provided by the user via inheritance) according to ``tree.data``.
The returned value replaces the old one in the structure.
They work bottom-up (or depth-first), starting with the leaves and ending at the root of the tree.
Transformers can be used to implement map & reduce patterns. Because nodes are reduced from leaf to root,
at any point the callbacks may assume the children have already been transformed (if applicable).
``Transformer`` can do anything ``Visitor`` can do, but because it reconstructs the tree,
it is slightly less efficient. It can be used to implement map or reduce patterns.
All these classes implement the transformer interface:
- ``Transformer`` - Recursively transforms the tree. This is the one you probably want.
- ``Transformer_InPlace`` - Non-recursive. Changes the tree in-place instead of returning new instances
- ``Transformer_InPlaceRecursive`` - Recursive. Changes the tree in-place instead of returning new instances
Parameters:
visit_tokens (bool, optional): Should the transformer visit tokens in addition to rules.
Setting this to ``False`` is slightly faster. Defaults to ``True``.
(For processing ignored tokens, use the ``lexer_callbacks`` options)
NOTE: A transformer without methods essentially performs a non-memoized deepcopy.
"""
__visit_tokens__ = True # For backwards compatibility
def __init__(self, visit_tokens=True):
self.__visit_tokens__ = visit_tokens
def _call_userfunc(self, tree, new_children=None):
# Assumes tree is already transformed
children = new_children if new_children is not None else tree.children
try:
f = getattr(self, tree.data)
except AttributeError:
return self.__default__(tree.data, children, tree.meta)
else:
try:
wrapper = getattr(f, 'visit_wrapper', None)
if wrapper is not None:
return f.visit_wrapper(f, tree.data, children, tree.meta)
else:
return f(children)
except (GrammarError, Discard):
raise
except Exception as e:
raise VisitError(tree.data, tree, e)
def _call_userfunc_token(self, token):
try:
f = getattr(self, token.type)
except AttributeError:
return self.__default_token__(token)
else:
try:
return f(token)
except (GrammarError, Discard):
raise
except Exception as e:
raise VisitError(token.type, token, e)
def _transform_children(self, children):
for c in children:
try:
if isinstance(c, Tree):
yield self._transform_tree(c)
elif self.__visit_tokens__ and isinstance(c, Token):
yield self._call_userfunc_token(c)
else:
yield c
except Discard:
pass
def _transform_tree(self, tree):
children = list(self._transform_children(tree.children))
return self._call_userfunc(tree, children)
def transform(self, tree):
"Transform the given tree, and return the final result"
return self._transform_tree(tree)
def __mul__(self, other):
"""Chain two transformers together, returning a new transformer.
"""
return TransformerChain(self, other)
def __default__(self, data, children, meta):
"""Default function that is called if there is no attribute matching ``data``
Can be overridden. Defaults to creating a new copy of the tree node (i.e. ``return Tree(data, children, meta)``)
"""
return Tree(data, children, meta)
def __default_token__(self, token):
"""Default function that is called if there is no attribute matching ``token.type``
Can be overridden. Defaults to returning the token as-is.
"""
return token
class InlineTransformer(Transformer): # XXX Deprecated
def _call_userfunc(self, tree, new_children=None):
# Assumes tree is already transformed
children = new_children if new_children is not None else tree.children
try:
f = getattr(self, tree.data)
except AttributeError:
return self.__default__(tree.data, children, tree.meta)
else:
return f(*children)
class TransformerChain(object):
def __init__(self, *transformers):
self.transformers = transformers
def transform(self, tree):
for t in self.transformers:
tree = t.transform(tree)
return tree
def __mul__(self, other):
return TransformerChain(*self.transformers + (other,))
class Transformer_InPlace(Transformer):
"""Same as Transformer, but non-recursive, and changes the tree in-place instead of returning new instances
Useful for huge trees. Conservative in memory.
"""
def _transform_tree(self, tree): # Cancel recursion
return self._call_userfunc(tree)
def transform(self, tree):
for subtree in tree.iter_subtrees():
subtree.children = list(self._transform_children(subtree.children))
return self._transform_tree(tree)
class Transformer_NonRecursive(Transformer):
"""Same as Transformer but non-recursive.
Like Transformer, it doesn't change the original tree.
Useful for huge trees.
"""
def transform(self, tree):
# Tree to postfix
rev_postfix = []
q = [tree]
while q:
t = q.pop()
rev_postfix.append(t)
if isinstance(t, Tree):
q += t.children
# Postfix to tree
stack = []
for x in reversed(rev_postfix):
if isinstance(x, Tree):
size = len(x.children)
if size:
args = stack[-size:]
del stack[-size:]
else:
args = []
stack.append(self._call_userfunc(x, args))
else:
stack.append(x)
t ,= stack # We should have only one tree remaining
return t
class Transformer_InPlaceRecursive(Transformer):
"Same as Transformer, recursive, but changes the tree in-place instead of returning new instances"
def _transform_tree(self, tree):
tree.children = list(self._transform_children(tree.children))
return self._call_userfunc(tree)
# Visitors
class VisitorBase:
def _call_userfunc(self, tree):
return getattr(self, tree.data, self.__default__)(tree)
def __default__(self, tree):
"""Default function that is called if there is no attribute matching ``tree.data``
Can be overridden. Defaults to doing nothing.
"""
return tree
def __class_getitem__(cls, _):
return cls
class Visitor(VisitorBase):
"""Tree visitor, non-recursive (can handle huge trees).
Visiting a node calls its methods (provided by the user via inheritance) according to ``tree.data``
"""
def visit(self, tree):
"Visits the tree, starting with the leaves and finally the root (bottom-up)"
for subtree in tree.iter_subtrees():
self._call_userfunc(subtree)
return tree
def visit_topdown(self,tree):
"Visit the tree, starting at the root, and ending at the leaves (top-down)"
for subtree in tree.iter_subtrees_topdown():
self._call_userfunc(subtree)
return tree
class Visitor_Recursive(VisitorBase):
"""Bottom-up visitor, recursive.
Visiting a node calls its methods (provided by the user via inheritance) according to ``tree.data``
Slightly faster than the non-recursive version.
"""
def visit(self, tree):
"Visits the tree, starting with the leaves and finally the root (bottom-up)"
for child in tree.children:
if isinstance(child, Tree):
self.visit(child)
self._call_userfunc(tree)
return tree
def visit_topdown(self,tree):
"Visit the tree, starting at the root, and ending at the leaves (top-down)"
self._call_userfunc(tree)
for child in tree.children:
if isinstance(child, Tree):
self.visit_topdown(child)
return tree
def visit_children_decor(func):
"See Interpreter"
@wraps(func)
def inner(cls, tree):
values = cls.visit_children(tree)
return func(cls, values)
return inner
class Interpreter(_Decoratable):
"""Interpreter walks the tree starting at the root.
Visits the tree, starting with the root and finally the leaves (top-down)
For each tree node, it calls its methods (provided by user via inheritance) according to ``tree.data``.
Unlike ``Transformer`` and ``Visitor``, the Interpreter doesn't automatically visit its sub-branches.
The user has to explicitly call ``visit``, ``visit_children``, or use the ``@visit_children_decor``.
This allows the user to implement branching and loops.
"""
def visit(self, tree):
f = getattr(self, tree.data)
wrapper = getattr(f, 'visit_wrapper', None)
if wrapper is not None:
return f.visit_wrapper(f, tree.data, tree.children, tree.meta)
else:
return f(tree)
def visit_children(self, tree):
return [self.visit(child) if isinstance(child, Tree) else child
for child in tree.children]
def __getattr__(self, name):
return self.__default__
def __default__(self, tree):
return self.visit_children(tree)
# Decorators
def _apply_decorator(obj, decorator, **kwargs):
try:
_apply = obj._apply_decorator
except AttributeError:
return decorator(obj, **kwargs)
else:
return _apply(decorator, **kwargs)
def _inline_args__func(func):
@wraps(func)
def create_decorator(_f, with_self):
if with_self:
def f(self, children):
return _f(self, *children)
else:
def f(self, children):
return _f(*children)
return f
return smart_decorator(func, create_decorator)
def inline_args(obj): # XXX Deprecated
return _apply_decorator(obj, _inline_args__func)
def _visitor_args_func_dec(func, visit_wrapper=None, static=False):
def create_decorator(_f, with_self):
if with_self:
def f(self, *args, **kwargs):
return _f(self, *args, **kwargs)
else:
def f(self, *args, **kwargs):
return _f(*args, **kwargs)
return f
if static:
f = wraps(func)(create_decorator(func, False))
else:
f = smart_decorator(func, create_decorator)
f.vargs_applied = True
f.visit_wrapper = visit_wrapper
return f
def _vargs_inline(f, _data, children, _meta):
return f(*children)
def _vargs_meta_inline(f, _data, children, meta):
return f(meta, *children)
def _vargs_meta(f, _data, children, meta):
return f(children, meta) # TODO swap these for consistency? Backwards incompatible!
def _vargs_tree(f, data, children, meta):
return f(Tree(data, children, meta))
def v_args(inline=False, meta=False, tree=False, wrapper=None):
"""A convenience decorator factory for modifying the behavior of user-supplied visitor methods.
By default, callback methods of transformers/visitors accept one argument - a list of the node's children.
``v_args`` can modify this behavior. When used on a transformer/visitor class definition,
it applies to all the callback methods inside it.
``v_args`` can be applied to a single method, or to an entire class. When applied to both,
the options given to the method take precedence.
Parameters:
inline (bool, optional): Children are provided as ``*args`` instead of a list argument (not recommended for very long lists).
meta (bool, optional): Provides two arguments: ``children`` and ``meta`` (instead of just the first)
tree (bool, optional): Provides the entire tree as the argument, instead of the children.
wrapper (function, optional): Provide a function to decorate all methods.
Example:
::
@v_args(inline=True)
class SolveArith(Transformer):
def add(self, left, right):
return left + right
class ReverseNotation(Transformer_InPlace):
@v_args(tree=True)
def tree_node(self, tree):
tree.children = tree.children[::-1]
"""
if tree and (meta or inline):
raise ValueError("Visitor functions cannot combine 'tree' with 'meta' or 'inline'.")
func = None
if meta:
if inline:
func = _vargs_meta_inline
else:
func = _vargs_meta
elif inline:
func = _vargs_inline
elif tree:
func = _vargs_tree
if wrapper is not None:
if func is not None:
raise ValueError("Cannot use 'wrapper' along with 'tree', 'meta' or 'inline'.")
func = wrapper
def _visitor_args_dec(obj):
return _apply_decorator(obj, _visitor_args_func_dec, visit_wrapper=func)
return _visitor_args_dec
###}
# --- Visitor Utilities ---
class CollapseAmbiguities(Transformer):
"""
Transforms a tree that contains any number of _ambig nodes into a list of trees,
each one containing an unambiguous tree.
The length of the resulting list is the product of the length of all _ambig nodes.
Warning: This may quickly explode for highly ambiguous trees.
"""
def _ambig(self, options):
return sum(options, [])
def __default__(self, data, children_lists, meta):
return [Tree(data, children, meta) for children in combine_alternatives(children_lists)]
def __default_token__(self, t):
return [t]
|
from collections import Counter
from datetime import timedelta
import functools
import itertools
import logging
import random
from typing import Any, Dict, List, Optional, Tuple
from zigpy.zcl.clusters.general import Identify, LevelControl, OnOff
from zigpy.zcl.clusters.lighting import Color
from zigpy.zcl.foundation import Status
from homeassistant.components import light
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP,
ATTR_EFFECT,
ATTR_EFFECT_LIST,
ATTR_HS_COLOR,
ATTR_MAX_MIREDS,
ATTR_MIN_MIREDS,
ATTR_WHITE_VALUE,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_COLOR_TEMP,
SUPPORT_EFFECT,
SUPPORT_FLASH,
SUPPORT_TRANSITION,
SUPPORT_WHITE_VALUE,
)
from homeassistant.const import ATTR_SUPPORTED_FEATURES, STATE_ON, STATE_UNAVAILABLE
from homeassistant.core import State, callback
from homeassistant.helpers.debounce import Debouncer
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.event import async_track_time_interval
import homeassistant.util.color as color_util
from .core import discovery, helpers
from .core.const import (
CHANNEL_COLOR,
CHANNEL_LEVEL,
CHANNEL_ON_OFF,
DATA_ZHA,
DATA_ZHA_DISPATCHERS,
EFFECT_BLINK,
EFFECT_BREATHE,
EFFECT_DEFAULT_VARIANT,
SIGNAL_ADD_ENTITIES,
SIGNAL_ATTR_UPDATED,
SIGNAL_SET_LEVEL,
)
from .core.helpers import LogMixin
from .core.registries import ZHA_ENTITIES
from .core.typing import ZhaDeviceType
from .entity import ZhaEntity, ZhaGroupEntity
_LOGGER = logging.getLogger(__name__)
CAPABILITIES_COLOR_LOOP = 0x4
CAPABILITIES_COLOR_XY = 0x08
CAPABILITIES_COLOR_TEMP = 0x10
UPDATE_COLORLOOP_ACTION = 0x1
UPDATE_COLORLOOP_DIRECTION = 0x2
UPDATE_COLORLOOP_TIME = 0x4
UPDATE_COLORLOOP_HUE = 0x8
FLASH_EFFECTS = {light.FLASH_SHORT: EFFECT_BLINK, light.FLASH_LONG: EFFECT_BREATHE}
UNSUPPORTED_ATTRIBUTE = 0x86
STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, light.DOMAIN)
GROUP_MATCH = functools.partial(ZHA_ENTITIES.group_match, light.DOMAIN)
PARALLEL_UPDATES = 0
SIGNAL_LIGHT_GROUP_STATE_CHANGED = "zha_light_group_state_changed"
SUPPORT_GROUP_LIGHT = (
SUPPORT_BRIGHTNESS
| SUPPORT_COLOR_TEMP
| SUPPORT_EFFECT
| SUPPORT_FLASH
| SUPPORT_COLOR
| SUPPORT_TRANSITION
| SUPPORT_WHITE_VALUE
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Zigbee Home Automation light from config entry."""
entities_to_create = hass.data[DATA_ZHA][light.DOMAIN]
unsub = async_dispatcher_connect(
hass,
SIGNAL_ADD_ENTITIES,
functools.partial(
discovery.async_add_entities, async_add_entities, entities_to_create
),
)
hass.data[DATA_ZHA][DATA_ZHA_DISPATCHERS].append(unsub)
class BaseLight(LogMixin, light.LightEntity):
"""Operations common to all light entities."""
def __init__(self, *args, **kwargs):
"""Initialize the light."""
super().__init__(*args, **kwargs)
self._available: bool = False
self._brightness: Optional[int] = None
self._off_brightness: Optional[int] = None
self._hs_color: Optional[Tuple[float, float]] = None
self._color_temp: Optional[int] = None
self._min_mireds: Optional[int] = 153
self._max_mireds: Optional[int] = 500
self._white_value: Optional[int] = None
self._effect_list: Optional[List[str]] = None
self._effect: Optional[str] = None
self._supported_features: int = 0
self._state: bool = False
self._on_off_channel = None
self._level_channel = None
self._color_channel = None
self._identify_channel = None
@property
def device_state_attributes(self) -> Dict[str, Any]:
"""Return state attributes."""
attributes = {"off_brightness": self._off_brightness}
return attributes
@property
def is_on(self) -> bool:
"""Return true if entity is on."""
if self._state is None:
return False
return self._state
@property
def brightness(self):
"""Return the brightness of this light."""
return self._brightness
@property
def min_mireds(self):
"""Return the coldest color_temp that this light supports."""
return self._min_mireds
@property
def max_mireds(self):
"""Return the warmest color_temp that this light supports."""
return self._max_mireds
def set_level(self, value):
"""Set the brightness of this light between 0..254.
brightness level 255 is a special value instructing the device to come
on at `on_level` Zigbee attribute value, regardless of the last set
level
"""
value = max(0, min(254, value))
self._brightness = value
self.async_write_ha_state()
@property
def hs_color(self):
"""Return the hs color value [int, int]."""
return self._hs_color
@property
def color_temp(self):
"""Return the CT color value in mireds."""
return self._color_temp
@property
def effect_list(self):
"""Return the list of supported effects."""
return self._effect_list
@property
def effect(self):
"""Return the current effect."""
return self._effect
@property
def supported_features(self):
"""Flag supported features."""
return self._supported_features
async def async_turn_on(self, **kwargs):
"""Turn the entity on."""
transition = kwargs.get(light.ATTR_TRANSITION)
duration = transition * 10 if transition else 1
brightness = kwargs.get(light.ATTR_BRIGHTNESS)
effect = kwargs.get(light.ATTR_EFFECT)
flash = kwargs.get(light.ATTR_FLASH)
if brightness is None and self._off_brightness is not None:
brightness = self._off_brightness
t_log = {}
if (
brightness is not None or transition
) and self._supported_features & light.SUPPORT_BRIGHTNESS:
if brightness is not None:
level = min(254, brightness)
else:
level = self._brightness or 254
result = await self._level_channel.move_to_level_with_on_off(
level, duration
)
t_log["move_to_level_with_on_off"] = result
if not isinstance(result, list) or result[1] is not Status.SUCCESS:
self.debug("turned on: %s", t_log)
return
self._state = bool(level)
if level:
self._brightness = level
if brightness is None or brightness:
# since some lights don't always turn on with move_to_level_with_on_off,
# we should call the on command on the on_off cluster if brightness is not 0.
result = await self._on_off_channel.on()
t_log["on_off"] = result
if not isinstance(result, list) or result[1] is not Status.SUCCESS:
self.debug("turned on: %s", t_log)
return
self._state = True
if (
light.ATTR_COLOR_TEMP in kwargs
and self.supported_features & light.SUPPORT_COLOR_TEMP
):
temperature = kwargs[light.ATTR_COLOR_TEMP]
result = await self._color_channel.move_to_color_temp(temperature, duration)
t_log["move_to_color_temp"] = result
if not isinstance(result, list) or result[1] is not Status.SUCCESS:
self.debug("turned on: %s", t_log)
return
self._color_temp = temperature
if (
light.ATTR_HS_COLOR in kwargs
and self.supported_features & light.SUPPORT_COLOR
):
hs_color = kwargs[light.ATTR_HS_COLOR]
xy_color = color_util.color_hs_to_xy(*hs_color)
result = await self._color_channel.move_to_color(
int(xy_color[0] * 65535), int(xy_color[1] * 65535), duration
)
t_log["move_to_color"] = result
if not isinstance(result, list) or result[1] is not Status.SUCCESS:
self.debug("turned on: %s", t_log)
return
self._hs_color = hs_color
if (
effect == light.EFFECT_COLORLOOP
and self.supported_features & light.SUPPORT_EFFECT
):
result = await self._color_channel.color_loop_set(
UPDATE_COLORLOOP_ACTION
| UPDATE_COLORLOOP_DIRECTION
| UPDATE_COLORLOOP_TIME,
0x2, # start from current hue
0x1, # only support up
transition if transition else 7, # transition
0, # no hue
)
t_log["color_loop_set"] = result
self._effect = light.EFFECT_COLORLOOP
elif (
self._effect == light.EFFECT_COLORLOOP
and effect != light.EFFECT_COLORLOOP
and self.supported_features & light.SUPPORT_EFFECT
):
result = await self._color_channel.color_loop_set(
UPDATE_COLORLOOP_ACTION,
0x0,
0x0,
0x0,
0x0, # update action only, action off, no dir,time,hue
)
t_log["color_loop_set"] = result
self._effect = None
if flash is not None and self._supported_features & light.SUPPORT_FLASH:
result = await self._identify_channel.trigger_effect(
FLASH_EFFECTS[flash], EFFECT_DEFAULT_VARIANT
)
t_log["trigger_effect"] = result
self._off_brightness = None
self.debug("turned on: %s", t_log)
self.async_write_ha_state()
async def async_turn_off(self, **kwargs):
"""Turn the entity off."""
duration = kwargs.get(light.ATTR_TRANSITION)
supports_level = self.supported_features & light.SUPPORT_BRIGHTNESS
if duration and supports_level:
result = await self._level_channel.move_to_level_with_on_off(
0, duration * 10
)
else:
result = await self._on_off_channel.off()
self.debug("turned off: %s", result)
if not isinstance(result, list) or result[1] is not Status.SUCCESS:
return
self._state = False
if duration and supports_level:
# store current brightness so that the next turn_on uses it.
self._off_brightness = self._brightness
self.async_write_ha_state()
@STRICT_MATCH(channel_names=CHANNEL_ON_OFF, aux_channels={CHANNEL_COLOR, CHANNEL_LEVEL})
class Light(BaseLight, ZhaEntity):
"""Representation of a ZHA or ZLL light."""
_REFRESH_INTERVAL = (45, 75)
def __init__(self, unique_id, zha_device: ZhaDeviceType, channels, **kwargs):
"""Initialize the ZHA light."""
super().__init__(unique_id, zha_device, channels, **kwargs)
self._on_off_channel = self.cluster_channels.get(CHANNEL_ON_OFF)
self._level_channel = self.cluster_channels.get(CHANNEL_LEVEL)
self._color_channel = self.cluster_channels.get(CHANNEL_COLOR)
self._identify_channel = self.zha_device.channels.identify_ch
if self._color_channel:
self._min_mireds: Optional[int] = self._color_channel.min_mireds
self._max_mireds: Optional[int] = self._color_channel.max_mireds
self._cancel_refresh_handle = None
effect_list = []
if self._level_channel:
self._supported_features |= light.SUPPORT_BRIGHTNESS
self._supported_features |= light.SUPPORT_TRANSITION
self._brightness = 0
if self._color_channel:
color_capabilities = self._color_channel.get_color_capabilities()
if color_capabilities & CAPABILITIES_COLOR_TEMP:
self._supported_features |= light.SUPPORT_COLOR_TEMP
if color_capabilities & CAPABILITIES_COLOR_XY:
self._supported_features |= light.SUPPORT_COLOR
self._hs_color = (0, 0)
if color_capabilities & CAPABILITIES_COLOR_LOOP:
self._supported_features |= light.SUPPORT_EFFECT
effect_list.append(light.EFFECT_COLORLOOP)
if self._identify_channel:
self._supported_features |= light.SUPPORT_FLASH
if effect_list:
self._effect_list = effect_list
@callback
def async_set_state(self, attr_id, attr_name, value):
"""Set the state."""
self._state = bool(value)
if value:
self._off_brightness = None
self.async_write_ha_state()
async def async_added_to_hass(self):
"""Run when about to be added to hass."""
await super().async_added_to_hass()
self.async_accept_signal(
self._on_off_channel, SIGNAL_ATTR_UPDATED, self.async_set_state
)
if self._level_channel:
self.async_accept_signal(
self._level_channel, SIGNAL_SET_LEVEL, self.set_level
)
refresh_interval = random.randint(*[x * 60 for x in self._REFRESH_INTERVAL])
self._cancel_refresh_handle = async_track_time_interval(
self.hass, self._refresh, timedelta(seconds=refresh_interval)
)
self.async_accept_signal(
None,
SIGNAL_LIGHT_GROUP_STATE_CHANGED,
self._maybe_force_refresh,
signal_override=True,
)
async def async_will_remove_from_hass(self) -> None:
"""Disconnect entity object when removed."""
self._cancel_refresh_handle()
await super().async_will_remove_from_hass()
@callback
def async_restore_last_state(self, last_state):
"""Restore previous state."""
self._state = last_state.state == STATE_ON
if "brightness" in last_state.attributes:
self._brightness = last_state.attributes["brightness"]
if "off_brightness" in last_state.attributes:
self._off_brightness = last_state.attributes["off_brightness"]
if "color_temp" in last_state.attributes:
self._color_temp = last_state.attributes["color_temp"]
if "hs_color" in last_state.attributes:
self._hs_color = last_state.attributes["hs_color"]
if "effect" in last_state.attributes:
self._effect = last_state.attributes["effect"]
async def async_get_state(self, from_cache=True):
"""Attempt to retrieve on off state from the light."""
if not from_cache and not self.available:
return
self.debug("polling current state - from cache: %s", from_cache)
if self._on_off_channel:
state = await self._on_off_channel.get_attribute_value(
"on_off", from_cache=from_cache
)
if state is not None:
self._state = state
if self._level_channel:
level = await self._level_channel.get_attribute_value(
"current_level", from_cache=from_cache
)
if level is not None:
self._brightness = level
if self._color_channel:
attributes = []
color_capabilities = self._color_channel.get_color_capabilities()
if (
color_capabilities is not None
and color_capabilities & CAPABILITIES_COLOR_TEMP
):
attributes.append("color_temperature")
if (
color_capabilities is not None
and color_capabilities & CAPABILITIES_COLOR_XY
):
attributes.append("current_x")
attributes.append("current_y")
if (
color_capabilities is not None
and color_capabilities & CAPABILITIES_COLOR_LOOP
):
attributes.append("color_loop_active")
results = await self._color_channel.get_attributes(
attributes, from_cache=from_cache
)
if (
"color_temperature" in results
and results["color_temperature"] is not None
):
self._color_temp = results["color_temperature"]
color_x = results.get("current_x")
color_y = results.get("current_y")
if color_x is not None and color_y is not None:
self._hs_color = color_util.color_xy_to_hs(
float(color_x / 65535), float(color_y / 65535)
)
if (
"color_loop_active" in results
and results["color_loop_active"] is not None
):
color_loop_active = results["color_loop_active"]
if color_loop_active == 1:
self._effect = light.EFFECT_COLORLOOP
async def _refresh(self, time):
"""Call async_get_state at an interval."""
await self.async_get_state(from_cache=False)
self.async_write_ha_state()
async def _maybe_force_refresh(self, signal):
"""Force update the state if the signal contains the entity id for this entity."""
if self.entity_id in signal["entity_ids"]:
await self.async_get_state(from_cache=False)
self.async_write_ha_state()
@STRICT_MATCH(
channel_names=CHANNEL_ON_OFF,
aux_channels={CHANNEL_COLOR, CHANNEL_LEVEL},
manufacturers="Philips",
)
class HueLight(Light):
"""Representation of a HUE light which does not report attributes."""
_REFRESH_INTERVAL = (3, 5)
@GROUP_MATCH()
class LightGroup(BaseLight, ZhaGroupEntity):
"""Representation of a light group."""
def __init__(
self, entity_ids: List[str], unique_id: str, group_id: int, zha_device, **kwargs
) -> None:
"""Initialize a light group."""
super().__init__(entity_ids, unique_id, group_id, zha_device, **kwargs)
group = self.zha_device.gateway.get_group(self._group_id)
self._on_off_channel = group.endpoint[OnOff.cluster_id]
self._level_channel = group.endpoint[LevelControl.cluster_id]
self._color_channel = group.endpoint[Color.cluster_id]
self._identify_channel = group.endpoint[Identify.cluster_id]
self._debounced_member_refresh = None
async def async_added_to_hass(self):
"""Run when about to be added to hass."""
await super().async_added_to_hass()
if self._debounced_member_refresh is None:
force_refresh_debouncer = Debouncer(
self.hass,
_LOGGER,
cooldown=3,
immediate=True,
function=self._force_member_updates,
)
self._debounced_member_refresh = force_refresh_debouncer
async def async_turn_on(self, **kwargs):
"""Turn the entity on."""
await super().async_turn_on(**kwargs)
await self._debounced_member_refresh.async_call()
async def async_turn_off(self, **kwargs):
"""Turn the entity off."""
await super().async_turn_off(**kwargs)
await self._debounced_member_refresh.async_call()
async def async_update(self) -> None:
"""Query all members and determine the light group state."""
all_states = [self.hass.states.get(x) for x in self._entity_ids]
states: List[State] = list(filter(None, all_states))
on_states = [state for state in states if state.state == STATE_ON]
self._state = len(on_states) > 0
self._available = any(state.state != STATE_UNAVAILABLE for state in states)
self._brightness = helpers.reduce_attribute(on_states, ATTR_BRIGHTNESS)
self._hs_color = helpers.reduce_attribute(
on_states, ATTR_HS_COLOR, reduce=helpers.mean_tuple
)
self._white_value = helpers.reduce_attribute(on_states, ATTR_WHITE_VALUE)
self._color_temp = helpers.reduce_attribute(on_states, ATTR_COLOR_TEMP)
self._min_mireds = helpers.reduce_attribute(
states, ATTR_MIN_MIREDS, default=153, reduce=min
)
self._max_mireds = helpers.reduce_attribute(
states, ATTR_MAX_MIREDS, default=500, reduce=max
)
self._effect_list = None
all_effect_lists = list(helpers.find_state_attributes(states, ATTR_EFFECT_LIST))
if all_effect_lists:
# Merge all effects from all effect_lists with a union merge.
self._effect_list = list(set().union(*all_effect_lists))
self._effect = None
all_effects = list(helpers.find_state_attributes(on_states, ATTR_EFFECT))
if all_effects:
# Report the most common effect.
effects_count = Counter(itertools.chain(all_effects))
self._effect = effects_count.most_common(1)[0][0]
self._supported_features = 0
for support in helpers.find_state_attributes(states, ATTR_SUPPORTED_FEATURES):
# Merge supported features by emulating support for every feature
# we find.
self._supported_features |= support
# Bitwise-and the supported features with the GroupedLight's features
# so that we don't break in the future when a new feature is added.
self._supported_features &= SUPPORT_GROUP_LIGHT
async def _force_member_updates(self):
"""Force the update of member entities to ensure the states are correct for bulbs that don't report their state."""
async_dispatcher_send(
self.hass,
SIGNAL_LIGHT_GROUP_STATE_CHANGED,
{"entity_ids": self._entity_ids},
)
|
from __future__ import division
from babelfish import Language
from subliminal.providers.addic7ed import Addic7edSubtitle
from subliminal.providers.opensubtitles import OpenSubtitlesSubtitle
from subliminal.providers.podnapisi import PodnapisiSubtitle
from subliminal.score import compute_score, episode_scores, movie_scores, solve_episode_equations, solve_movie_equations
def test_episode_equations():
expected_scores = {}
for symbol, score in solve_episode_equations().items():
expected_scores[str(symbol)] = score
assert episode_scores == expected_scores
def test_movie_equations():
expected_scores = {}
for symbol, score in solve_movie_equations().items():
expected_scores[str(symbol)] = score
assert movie_scores == expected_scores
def test_compute_score(episodes):
video = episodes['bbt_s07e05']
subtitle = Addic7edSubtitle(Language('eng'), True, None, 'the big BANG theory', 6, 4, None, None, '1080p', None)
expected_score = episode_scores['series'] + episode_scores['year'] + episode_scores['country']
assert compute_score(subtitle, video) == expected_score
def test_get_score_cap(movies):
video = movies['man_of_steel']
subtitle = OpenSubtitlesSubtitle(Language('eng'), True, None, 1, 'hash', 'movie', '5b8f8f4e41ccb21e',
'Man of Steel', 'man.of.steel.2013.720p.bluray.x264-felony.mkv', 2013, 770828,
None, None, '', 'utf-8')
assert compute_score(subtitle, video) == movie_scores['hash']
def test_compute_score_episode_imdb_id(movies):
video = movies['man_of_steel']
subtitle = OpenSubtitlesSubtitle(Language('eng'), True, None, 1, 'hash', 'movie', None,
'Man of Steel', 'man.of.steel.2013.720p.bluray.x264-felony.mkv', 2013, 770828,
None, None, '', 'utf-8')
assert compute_score(subtitle, video) == sum(movie_scores.get(m, 0) for m in
('imdb_id', 'title', 'year', 'country', 'release_group', 'source',
'resolution', 'video_codec'))
def test_compute_score_episode_title(episodes):
video = episodes['bbt_s07e05']
subtitle = PodnapisiSubtitle(Language('eng'), True, None, 1,
['The.Big.Bang.Theory.S07E05.The.Workplace.Proximity.720p.HDTV.x264-DIMENSION.mkv'],
None, 7, 5, None)
assert compute_score(subtitle, video) == sum(episode_scores.get(m, 0) for m in
('series', 'year', 'country', 'season', 'episode', 'release_group',
'source', 'resolution', 'video_codec', 'title'))
def test_compute_score_hash_hearing_impaired(movies):
video = movies['man_of_steel']
subtitle = OpenSubtitlesSubtitle(Language('eng'), True, None, 1, 'hash', 'movie', '5b8f8f4e41ccb21e',
'Man of Steel', 'man.of.steel.2013.720p.bluray.x264-felony.mkv', 2013, 770828,
None, None, '', 'utf-8')
assert compute_score(subtitle, video, hearing_impaired=True) == (movie_scores['hash'] +
movie_scores['hearing_impaired'])
|
from django.core.exceptions import ImproperlyConfigured
from shop.models.order import OrderModel
class PaymentProvider:
"""
Base class for all Payment Service Providers.
"""
@property
def namespace(self):
"""
Use a unique namespace for this payment provider. It is used to build the communication URLs
exposed to an external payment service provider.
"""
msg = "The attribute `namespace` must be implemented by the class `{}`"
raise NotImplementedError(msg.format(self.__class__.__name__))
def get_urls(self):
"""
Return a list of URL patterns for external communication with the payment service provider.
"""
return []
def get_payment_request(self, cart, request):
"""
Build a JavaScript expression which is evaluated by the success handler on the page
submitting the purchase command. When redirecting to another page, use:
```
window.location.href="URL-of-other-page";
```
since this expression is evaluated inside an AngularJS directive.
"""
return 'alert("Please implement method `get_payment_request` in the Python class inheriting from `PaymentProvider`!");'
class ForwardFundPayment(PaymentProvider):
"""
Provides a simple prepayment payment provider.
"""
namespace = 'forward-fund-payment'
def __init__(self):
if (not (callable(getattr(OrderModel, 'no_payment_required', None)) and callable(
getattr(OrderModel, 'awaiting_payment', None)))):
msg = "Missing methods in Order model. Add 'shop.payment.workflows.ManualPaymentWorkflowMixin' to SHOP_ORDER_WORKFLOWS."
raise ImproperlyConfigured(msg)
super().__init__()
def get_payment_request(self, cart, request):
order = OrderModel.objects.create_from_cart(cart, request)
order.populate_from_cart(cart, request)
if order.total == 0:
order.no_payment_required()
else:
order.awaiting_payment()
order.save(with_notification=True)
return 'window.location.href="{}";'.format(order.get_absolute_url())
|
from flexx.util.testing import run_tests_if_main, raises
from flexx.util.logging import capture_log
import time
import asyncio
import threading
import multiprocessing
from flexx import app, event
def test_add_handlers():
server = app.current_server()
tornado_app = server.app
assert tornado_app.add_handlers
def test_restarting():
""" Test stopping and starting the ioloop.
"""
res = []
def add_res(i):
res.append(i)
def try_start():
try:
app.start()
except RuntimeError:
res.append('RTE')
# Create new ioloop always
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
# Make Flexx use it
server = app.create_server()
loop.call_soon(add_res, 1)
loop.call_soon(add_res, 2)
loop.call_soon(app.stop) # actually, just calling stop() would work as well
app.start()
assert server._running == False
loop.call_soon(try_start) # test that cannot start twice
loop.call_soon(add_res, 3)
loop.call_soon(add_res, 4)
loop.call_soon(app.stop)
app.start()
assert server._running == False
loop.call_soon(try_start) # test that cannot start twice
loop.call_soon(add_res, 5)
loop.call_soon(add_res, 6)
loop.call_soon(app.stop)
app.start()
assert server._running == False
assert res == [1, 2, 'RTE', 3, 4, 'RTE', 5, 6]
def test_more_stopping():
""" Test calling stop multiple times.
"""
# This is why you want to create new IOLoop instances for each test
# Create new ioloop and make Flexx use it
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
server = app.create_server()
app.stop() # triggers event to stop
app.start()
app.stop() # Extra stop - pending stop event
# Which means the next stop does hardly block
t0 = time.time()
loop.call_later(0.2, app.stop)
app.start()
assert time.time() - t0 < 0.1
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
server = app.create_server()
# But stops dont stack
app.stop()
app.stop()
app.stop()
app.stop()
# Flush all stops ...
app.stop()
app.start()
# ... so that we now have an expected loop
t0 = time.time()
loop.call_later(0.2, app.stop)
app.start()
assert time.time() - t0 >= 0.1
def test_rebinding_ioloop():
""" Test recreating server objects, and its binding to the current ioloop.
"""
res = []
def add_res(i):
res.append(i)
# Create new ioloop
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
# Create new flexx server, which binds to that loop
server1 = app.create_server()
assert server1 is app.current_server()
#
assert loop is server1._loop
# Create new ioloop
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
# This is a new loop
assert loop is not server1._loop
# Create new flexx server, which binds to that loop
server2 = app.create_server()
assert server2 is app.current_server()
assert server1 is not server2
#
assert loop is server2._loop
def test_flexx_in_thread1():
""" Test threading and ioloop selection.
"""
def main():
asyncio.set_event_loop(loop2)
app.create_server()
# Create 3 loops, nr 2 is made current in the thread
loop1 = asyncio.new_event_loop()
loop2 = asyncio.new_event_loop()
loop3 = asyncio.new_event_loop()
asyncio.set_event_loop(loop1)
server1 = app.create_server()
t = threading.Thread(target=main)
t.start()
t.join()
server2 = app.current_server() # still current as set by the thread
asyncio.set_event_loop(loop3)
server3 = app.create_server()
assert server1._loop is loop1
assert server2._loop is loop2
assert server3._loop is loop3
def test_flexx_in_thread2():
""" Test running a component in another thread.
"""
res = []
class MyComponent1(event.Component):
foo = event.IntProp(0, settable=True)
@event.reaction('foo')
def on_foo(self, *events):
for ev in events:
res.append(ev.new_value)
def main():
# Create fresh ioloop and make flexx use it
# event.loop.reset()
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
app.create_server()
# Create component and manipulate prop
comp = MyComponent1()
comp.set_foo(3)
comp.set_foo(4)
# Run mainloop for one iterartion
loop.call_later(0.2, app.stop)
app.start()
t = threading.Thread(target=main)
event.loop.reset()
t.start()
t.join()
event.loop.integrate()
assert res == [0, 3, 4]
def test_flexx_in_thread3():
""" Test starting and creating server when a server is currently running.
"""
res = []
def main():
# Create fresh ioloop and make flexx use it
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
app.create_server() # calls event.loop.integrate()
app.start()
def try_start():
try:
app.start()
except RuntimeError:
res.append('start-fail')
def try_create():
try:
main()
except RuntimeError:
res.append('create-fail') # because create_server() cannot close current
t = threading.Thread(target=main)
t.start()
# With that thread running ...
while not app.current_server()._running:
time.sleep(0.01)
with raises(RuntimeError):
app.start()
with raises(RuntimeError):
app.create_server()
t1 = threading.Thread(target=try_start)
t1.start()
t1.join()
t2 = threading.Thread(target=try_create)
t2.start()
t2.join()
# Stop
app.stop() # Start does not work, but we can stop it!
t.join() # Otherwise it would never join
# Note that we cannot start it right after calling stop, because it wont
# stop *at once*. We need to join first.
assert res == ['start-fail', 'create-fail']
def test_flexx_in_thread4():
""" Test threading starting server in other thread where it is created.
"""
res = []
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
app.create_server()
def try_start():
try:
app.stop()
app.start()
except RuntimeError:
res.append('start-fail')
else:
res.append('start-ok')
def main():
app.create_server(loop=asyncio.new_event_loop())
try_start()
# Try to start server that was created in other thread -> fail
t = threading.Thread(target=try_start)
t.start()
t.join()
# Try to start in same thread as created -> ok
t = threading.Thread(target=main)
t.start()
t.join()
assert res == ['start-fail', 'start-ok']
def test_flexx_in_thread5():
""" Test using loop arg for easier use.
"""
res = []
server = app.create_server(loop=asyncio.new_event_loop())
assert server.serving
# note: mmmm, I don't particularly like this, but need it to get Tornado working
assert server._loop is asyncio.get_event_loop()
def main():
# likewise, we cannot do this atm
# app.stop()
# app.start()
try:
curloop = asyncio.get_event_loop()
except RuntimeError:
res.append(4)
else:
assert server._loop is curloop
res.append(3)
t = threading.Thread(target=main)
t.start()
t.join()
assert res == [4]
def multiprocessing_func():
import flexx
app.create_server(port=0) # Explicitly ask for unused port
app.call_later(0.1, app.stop)
app.start()
def test_flexx_multiprocessing():
""" Using multiprocessing, multiple Flexx event loops can run in parallel.
"""
# Can't do this with threading, because Flexx uses a global server
t0 = time.time()
processes = []
for i in range(10):
p = multiprocessing.Process(target=multiprocessing_func)
p.daemon = True
p.start()
processes.append(p)
for p in processes:
p.join()
# time to start processes is unpredictable, especially on pypy
t1 = time.time()
# assert t1 - t0 < len(processes) * 0.1
assert True # Just arriving here is enough to pass this test
def test_serving_apps_at_output_message():
""" Test for 'Serving apps at' ready signal.
"""
with capture_log('info') as log:
server = app.create_server()
app.stop() # triggers event to stop
app.start()
assert 'Serving apps at' in ''.join(log)
run_tests_if_main()
|
import asyncio
import logging
from typing import Any, Dict, Iterable, Optional
from homeassistant.const import ATTR_ENTITY_ID
from homeassistant.core import Context, State
from homeassistant.helpers.typing import HomeAssistantType
from . import (
ATTR_INITIAL,
ATTR_MAXIMUM,
ATTR_MINIMUM,
ATTR_STEP,
DOMAIN,
SERVICE_CONFIGURE,
VALUE,
)
_LOGGER = logging.getLogger(__name__)
async def _async_reproduce_state(
hass: HomeAssistantType,
state: State,
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Reproduce a single state."""
cur_state = hass.states.get(state.entity_id)
if cur_state is None:
_LOGGER.warning("Unable to find entity %s", state.entity_id)
return
if not state.state.isdigit():
_LOGGER.warning(
"Invalid state specified for %s: %s", state.entity_id, state.state
)
return
# Return if we are already at the right state.
if (
cur_state.state == state.state
and cur_state.attributes.get(ATTR_INITIAL) == state.attributes.get(ATTR_INITIAL)
and cur_state.attributes.get(ATTR_MAXIMUM) == state.attributes.get(ATTR_MAXIMUM)
and cur_state.attributes.get(ATTR_MINIMUM) == state.attributes.get(ATTR_MINIMUM)
and cur_state.attributes.get(ATTR_STEP) == state.attributes.get(ATTR_STEP)
):
return
service_data = {ATTR_ENTITY_ID: state.entity_id, VALUE: state.state}
service = SERVICE_CONFIGURE
if ATTR_INITIAL in state.attributes:
service_data[ATTR_INITIAL] = state.attributes[ATTR_INITIAL]
if ATTR_MAXIMUM in state.attributes:
service_data[ATTR_MAXIMUM] = state.attributes[ATTR_MAXIMUM]
if ATTR_MINIMUM in state.attributes:
service_data[ATTR_MINIMUM] = state.attributes[ATTR_MINIMUM]
if ATTR_STEP in state.attributes:
service_data[ATTR_STEP] = state.attributes[ATTR_STEP]
await hass.services.async_call(
DOMAIN, service, service_data, context=context, blocking=True
)
async def async_reproduce_states(
hass: HomeAssistantType,
states: Iterable[State],
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Reproduce Counter states."""
await asyncio.gather(
*(
_async_reproduce_state(
hass, state, context=context, reproduce_options=reproduce_options
)
for state in states
)
)
|
import logging
import voluptuous as vol
from homeassistant.components.image_processing import (
ATTR_AGE,
ATTR_GENDER,
ATTR_GLASSES,
CONF_ENTITY_ID,
CONF_NAME,
CONF_SOURCE,
PLATFORM_SCHEMA,
ImageProcessingFaceEntity,
)
from homeassistant.components.microsoft_face import DATA_MICROSOFT_FACE
from homeassistant.core import split_entity_id
from homeassistant.exceptions import HomeAssistantError
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
SUPPORTED_ATTRIBUTES = [ATTR_AGE, ATTR_GENDER, ATTR_GLASSES]
CONF_ATTRIBUTES = "attributes"
DEFAULT_ATTRIBUTES = [ATTR_AGE, ATTR_GENDER]
def validate_attributes(list_attributes):
"""Validate face attributes."""
for attr in list_attributes:
if attr not in SUPPORTED_ATTRIBUTES:
raise vol.Invalid(f"Invalid attribute {attr}")
return list_attributes
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_ATTRIBUTES, default=DEFAULT_ATTRIBUTES): vol.All(
cv.ensure_list, validate_attributes
)
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Microsoft Face detection platform."""
api = hass.data[DATA_MICROSOFT_FACE]
attributes = config[CONF_ATTRIBUTES]
entities = []
for camera in config[CONF_SOURCE]:
entities.append(
MicrosoftFaceDetectEntity(
camera[CONF_ENTITY_ID], api, attributes, camera.get(CONF_NAME)
)
)
async_add_entities(entities)
class MicrosoftFaceDetectEntity(ImageProcessingFaceEntity):
"""Microsoft Face API entity for identify."""
def __init__(self, camera_entity, api, attributes, name=None):
"""Initialize Microsoft Face."""
super().__init__()
self._api = api
self._camera = camera_entity
self._attributes = attributes
if name:
self._name = name
else:
self._name = f"MicrosoftFace {split_entity_id(camera_entity)[1]}"
@property
def camera_entity(self):
"""Return camera entity id from process pictures."""
return self._camera
@property
def name(self):
"""Return the name of the entity."""
return self._name
async def async_process_image(self, image):
"""Process image.
This method is a coroutine.
"""
face_data = None
try:
face_data = await self._api.call_api(
"post",
"detect",
image,
binary=True,
params={"returnFaceAttributes": ",".join(self._attributes)},
)
except HomeAssistantError as err:
_LOGGER.error("Can't process image on microsoft face: %s", err)
return
if not face_data:
face_data = []
faces = []
for face in face_data:
face_attr = {}
for attr in self._attributes:
if attr in face["faceAttributes"]:
face_attr[attr] = face["faceAttributes"][attr]
if face_attr:
faces.append(face_attr)
self.async_process_faces(faces, len(face_data))
|
import argparse
import json
import sys
import time
from pysensu_yelp import Status
from paasta_tools import monitoring_tools
from paasta_tools.cli.cmds.logs import scribe_env_to_locations
from paasta_tools.cli.utils import get_instance_config
from paasta_tools.utils import DEFAULT_SOA_DIR
from paasta_tools.utils import get_services_for_cluster
from paasta_tools.utils import load_system_paasta_config
try:
from scribereader import scribereader
from clog.readers import StreamTailerSetupError
except ImportError:
scribereader = None
OOM_EVENTS_STREAM = "tmp_paasta_oom_events"
def compose_check_name_for_service_instance(check_name, service, instance):
return f"{check_name}.{service}.{instance}"
def parse_args(args):
parser = argparse.ArgumentParser(
description=(
"Check the %s stream and report to Sensu if"
" there are any OOM events." % OOM_EVENTS_STREAM
)
)
parser.add_argument(
"-d",
"--soa-dir",
dest="soa_dir",
default=DEFAULT_SOA_DIR,
help="define a different soa config directory",
)
parser.add_argument(
"-r",
"--realert-every",
dest="realert_every",
type=int,
default=1,
help="Sensu 'realert_every' to use.",
)
parser.add_argument(
"--check-interval",
dest="check_interval",
type=int,
default=1,
help="How often this check runs, in minutes.",
)
parser.add_argument(
"--alert-threshold",
dest="alert_threshold",
type=int,
default=1,
help="Number of OOM kills required in the check interval to send an alert.",
)
parser.add_argument(
"-s",
"--superregion",
dest="superregion",
required=True,
help="The superregion to read OOM events from.",
)
return parser.parse_args(args)
def read_oom_events_from_scribe(cluster, superregion, num_lines=1000):
"""Read the latest 'num_lines' lines from OOM_EVENTS_STREAM and iterate over them."""
# paasta configs incls a map for cluster -> env that is expected by scribe
log_reader_config = load_system_paasta_config().get_log_reader()
cluster_map = log_reader_config["options"]["cluster_map"]
scribe_env = cluster_map[cluster]
# `scribe_env_to_locations` slightly mutates the scribe env based on whether
# or not it is in dev or prod
host, port = scribereader.get_tail_host_and_port(
**scribe_env_to_locations(scribe_env),
)
stream = scribereader.get_stream_tailer(
stream_name=OOM_EVENTS_STREAM,
tailing_host=host,
tailing_port=port,
lines=num_lines,
superregion=superregion,
)
try:
for line in stream:
try:
j = json.loads(line)
if j.get("cluster", "") == cluster:
yield j
except json.decoder.JSONDecodeError:
pass
except StreamTailerSetupError as e:
if "No data in stream" in str(e):
pass
else:
raise e
def latest_oom_events(cluster, superregion, interval=60):
"""
:returns: {(service, instance): [OOMEvent, OOMEvent,...] }
if the number of events > 0
"""
start_timestamp = int(time.time()) - interval
res = {}
for e in read_oom_events_from_scribe(cluster, superregion):
if e["timestamp"] > start_timestamp:
key = (e["service"], e["instance"])
res.setdefault(key, set()).add(e.get("container_id", ""))
return res
def compose_sensu_status(
instance, oom_events, is_check_enabled, alert_threshold, check_interval
):
"""
:param instance: InstanceConfig
:param oom_events: a list of OOMEvents
:param is_check_enabled: boolean to indicate whether the check enabled for the instance
"""
interval_string = f"{check_interval} minute(s)"
instance_name = f"{instance.service}.{instance.instance}"
if not is_check_enabled:
return (Status.OK, f"This check is disabled for {instance_name}.")
if not oom_events:
return (
Status.OK,
f"No oom events for {instance_name} in the last {interval_string}.",
)
elif len(oom_events) >= alert_threshold:
return (
Status.CRITICAL,
f"The Out Of Memory killer killed processes for {instance_name} "
f"in the last {interval_string}.",
)
else:
# If the number of OOM kills isn't above the alert threshold,
# don't send anything. This will keep an alert open if it's already open,
# but won't start a new alert if there wasn't one yet
return None
def send_sensu_event(instance, oom_events, args):
"""
:param instance: InstanceConfig
:param oom_events: a list of OOMEvents
"""
check_name = compose_check_name_for_service_instance(
"oom-killer", instance.service, instance.instance
)
monitoring_overrides = instance.get_monitoring()
status = compose_sensu_status(
instance=instance,
oom_events=oom_events,
is_check_enabled=monitoring_overrides.get("check_oom_events", True),
alert_threshold=args.alert_threshold,
check_interval=args.check_interval,
)
if not status:
return
memory_limit = instance.get_mem()
try:
memory_limit_str = f"{int(memory_limit)}MB"
except ValueError:
memory_limit_str = memory_limit
monitoring_overrides.update(
{
"page": False,
"alert_after": "0m",
"realert_every": args.realert_every,
"runbook": "y/check-oom-events",
"tip": "Try bumping the memory limit past %s" % memory_limit_str,
}
)
return monitoring_tools.send_event(
service=instance.service,
check_name=check_name,
overrides=monitoring_overrides,
status=status[0],
output=status[1],
soa_dir=instance.soa_dir,
)
def main(sys_argv):
args = parse_args(sys_argv[1:])
cluster = load_system_paasta_config().get_cluster()
victims = latest_oom_events(
cluster, args.superregion, interval=(60 * args.check_interval)
)
for (service, instance) in get_services_for_cluster(cluster, soa_dir=args.soa_dir):
try:
instance_config = get_instance_config(
service=service,
instance=instance,
cluster=cluster,
load_deployments=False,
soa_dir=args.soa_dir,
)
oom_events = victims.get((service, instance), [])
send_sensu_event(instance_config, oom_events, args)
except NotImplementedError: # When instance_type is not supported by get_instance_config
pass
if __name__ == "__main__":
main(sys.argv)
|
from copy import deepcopy
import numpy as np
from numpy.testing import assert_allclose
from mne.defaults import _handle_default
def test_handle_default():
"""Test mutable default."""
x = deepcopy(_handle_default('scalings'))
y = _handle_default('scalings')
z = _handle_default('scalings', dict(mag=1, grad=2))
w = _handle_default('scalings', {})
assert set(x.keys()) == set(y.keys())
assert set(x.keys()) == set(z.keys())
for key in x.keys():
assert x[key] == y[key]
assert x[key] == w[key]
if key in ('mag', 'grad'):
assert x[key] != z[key]
else:
assert x[key] == z[key]
def test_si_units():
"""Test that our scalings actually produce SI units."""
scalings = _handle_default('scalings', None)
units = _handle_default('units', None)
# Add a bad one to test that we actually detect it
assert 'csd_bad' not in scalings
scalings['csd_bad'] = 1e5
units['csd_bad'] = 'V/m²'
assert set(scalings) == set(units)
known_prefixes = {
'': 1,
'm': 1e-3,
'c': 1e-2,
'µ': 1e-6,
'n': 1e-9,
'f': 1e-15,
}
known_SI = {'V', 'T', 'Am', 'm', 'M', 'rad',
'AU', 'GOF'} # not really SI but we tolerate them
powers = '²'
def _split_si(x):
if x == 'nAm':
prefix, si = 'n', 'Am'
elif x == 'GOF':
prefix, si = '', 'GOF'
elif x == 'AU':
prefix, si = '', 'AU'
elif x == 'rad':
prefix, si = '', 'rad'
elif len(x) == 2:
if x[1] in powers:
prefix, si = '', x
else:
prefix, si = x
else:
assert len(x) in (0, 1), x
prefix, si = '', x
return prefix, si
for key, scale in scalings.items():
unit = units[key]
try:
num, denom = unit.split('/')
except ValueError: # not enough to unpack
num, denom = unit, ''
# check the numerator and denominator
num_prefix, num_SI = _split_si(num)
assert num_prefix in known_prefixes
assert num_SI in known_SI
den_prefix, den_SI = _split_si(denom)
assert den_prefix in known_prefixes
if not (den_SI == den_prefix == ''):
assert den_SI.strip(powers) in known_SI
# reconstruct the scale factor
want_scale = known_prefixes[den_prefix] / known_prefixes[num_prefix]
if key == 'csd_bad':
assert not np.isclose(scale, want_scale, rtol=10)
else:
assert_allclose(scale, want_scale, rtol=1e-12)
|
import typing
import matchzoo as mz
from .preparer import Preparer
from matchzoo.engine.base_task import BaseTask
from matchzoo.engine.base_model import BaseModel
from matchzoo.engine.base_preprocessor import BasePreprocessor
def prepare(
task: BaseTask,
model_class: typing.Type[BaseModel],
data_pack: mz.DataPack,
preprocessor: typing.Optional[BasePreprocessor] = None,
embedding: typing.Optional['mz.Embedding'] = None,
config: typing.Optional[dict] = None,
):
"""
A simple shorthand for using :class:`matchzoo.Preparer`.
`config` is used to control specific behaviors. The default `config`
will be updated accordingly if a `config` dictionary is passed. e.g. to
override the default `bin_size`, pass `config={'bin_size': 15}`.
:param task: Task.
:param model_class: Model class.
:param data_pack: DataPack used to fit the preprocessor.
:param preprocessor: Preprocessor used to fit the `data_pack`.
(default: the default preprocessor of `model_class`)
:param embedding: Embedding to build a embedding matrix. If not set,
then a correctly shaped randomized matrix will be built.
:param config: Configuration of specific behaviors. (default: return
value of `mz.Preparer.get_default_config()`)
:return: A tuple of `(model, preprocessor, data_generator_builder,
embedding_matrix)`.
"""
preparer = Preparer(task=task, config=config)
return preparer.prepare(
model_class=model_class,
data_pack=data_pack,
preprocessor=preprocessor,
embedding=embedding
)
|
from django.core.exceptions import ValidationError
from django.db import models
from django.db.models import Q
from django.utils import timezone
from django.utils.translation import gettext as _
from django.utils.translation import gettext_lazy
from weblate.lang.models import Language
class AnnouncementManager(models.Manager):
def context_filter(self, project=None, component=None, language=None):
"""Filter announcements by context."""
base = self.filter(Q(expiry__isnull=True) | Q(expiry__gte=timezone.now()))
if language and project is None and component is None:
return base.filter(project=None, component=None, language=language)
if component:
if language:
return base.filter(
(Q(component=component) & Q(language=language))
| (Q(component=None) & Q(language=language))
| (Q(component=component) & Q(language=None))
| (Q(project=component.project) & Q(component=None))
)
return base.filter(
(Q(component=component) & Q(language=None))
| (Q(project=component.project) & Q(component=None))
)
if project:
return base.filter(Q(project=project) & Q(component=None))
# All are None
return base.filter(project=None, component=None, language=None)
def create(self, user=None, **kwargs):
from weblate.trans.models.change import Change
result = super().create(**kwargs)
Change.objects.create(
action=Change.ACTION_ANNOUNCEMENT,
project=result.project,
component=result.component,
announcement=result,
target=result.message,
user=user,
)
return result
class Announcement(models.Model):
message = models.TextField(
verbose_name=gettext_lazy("Message"),
help_text=gettext_lazy("You can use Markdown and mention users by @username."),
)
project = models.ForeignKey(
"Project",
verbose_name=gettext_lazy("Project"),
null=True,
blank=True,
on_delete=models.deletion.CASCADE,
)
component = models.ForeignKey(
"Component",
verbose_name=gettext_lazy("Component"),
null=True,
blank=True,
on_delete=models.deletion.CASCADE,
)
language = models.ForeignKey(
Language,
verbose_name=gettext_lazy("Language"),
null=True,
blank=True,
on_delete=models.deletion.CASCADE,
)
category = models.CharField(
max_length=25,
verbose_name=gettext_lazy("Category"),
help_text=gettext_lazy("Category defines color used for the message."),
choices=(
("info", gettext_lazy("Info (light blue)")),
("warning", gettext_lazy("Warning (yellow)")),
("danger", gettext_lazy("Danger (red)")),
("success", gettext_lazy("Success (green)")),
),
default="info",
)
expiry = models.DateField(
null=True,
blank=True,
db_index=True,
verbose_name=gettext_lazy("Expiry date"),
help_text=gettext_lazy(
"The message will be not shown after this date. "
"Use it to announce string freeze and translation "
"deadline for next release."
),
)
notify = models.BooleanField(
blank=True,
default=True,
verbose_name=gettext_lazy("Notify users"),
)
objects = AnnouncementManager()
class Meta:
app_label = "trans"
verbose_name = gettext_lazy("Announcement")
verbose_name_plural = gettext_lazy("Announcements")
def __str__(self):
return self.message
def clean(self):
if self.project and self.component and self.component.project != self.project:
raise ValidationError(_("Do not specify both component and project!"))
if not self.project and self.component:
self.project = self.component.project
|
import diamond.collector
import json
import urllib2
import os
class MesosCGroupCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(MesosCGroupCollector,
self).get_default_config_help()
config_help.update({
'host': 'Hostname',
'port': 'Port'
})
return config_help
def get_default_config(self):
# https://github.com/python-diamond/Diamond/blob/master/src/diamond/collector.py#L312-L358
config = super(MesosCGroupCollector, self).get_default_config()
config.update({
'mesos_state_path': 'state.json',
'cgroup_fs_path': '/sys/fs/cgroup',
'host': 'localhost',
'port': 5051,
'path_prefix': 'mesos',
'path': 'tasks',
'hostname': None
})
return config
def __init__(self, *args, **kwargs):
super(MesosCGroupCollector, self).__init__(*args, **kwargs)
def collect(self):
containers = self.get_containers()
sysfs = containers['flags']['cgroups_hierarchy']
cgroup_root = containers['flags']['cgroups_root']
for aspect in ['cpuacct', 'cpu', 'memory']:
aspect_path = os.path.join(sysfs, aspect, cgroup_root)
contents = os.listdir(aspect_path)
for task_id in [entry for entry in contents if
os.path.isdir(os.path.join(aspect_path, entry))]:
if task_id not in containers:
continue
key_parts = [containers[task_id]['environment'],
containers[task_id]['role'],
containers[task_id]['task'],
containers[task_id]['id'],
aspect]
# list task_id items
task_id = os.path.join(aspect_path, task_id)
if aspect == "cpuacct":
with open(os.path.join(task_id, "%s.usage" % aspect)) as f:
value = f.readline()
self.publish(
self.clean_up(
'.'.join(key_parts + ['usage'])), value)
with open(os.path.join(task_id, "%s.stat" % aspect)) as f:
data = f.readlines()
for kv_pair in data:
key, value = kv_pair.split()
self.publish(
self.clean_up(
'.'.join(key_parts + [key])), value)
def get_containers(self):
state = self.get_mesos_state()
containers = {
'flags': state['flags']
}
if 'frameworks' in state:
for framework in state['frameworks']:
for executor in framework['executors']:
container = executor['container']
source = executor['source']
role, environment, task, number = source.split('.')
containers[container] = {'role': role,
'environment': environment,
'task': task,
'id': number
}
return containers
def get_mesos_state(self):
try:
url = "http://%s:%s/%s" % (self.config['host'],
self.config['port'],
self.config['mesos_state_path'])
return json.load(urllib2.urlopen(url))
except (urllib2.HTTPError, ValueError) as err:
self.log.error('Unable to read JSON response: %s' % err)
return {}
def clean_up(self, text):
return text.replace('/', '.')
|
import os
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
# EC2 provides unique random hostnames.
def test_hostname(host):
pass
def test_etc_molecule_directory(host):
f = host.file('/etc/molecule')
assert f.is_directory
assert f.user == 'root'
assert f.group == 'root'
assert f.mode == 0o755
def test_etc_molecule_ansible_hostname_file(host):
filename = '/etc/molecule/{}'.format(host.check_output('hostname -s'))
f = host.file(filename)
assert f.is_file
assert f.user == 'root'
assert f.group == 'root'
assert f.mode == 0o644
|
import pytest
from queue import Empty
from unittest.mock import Mock, patch
from kombu.transport.etcd import Channel, Transport
pytest.importorskip('etcd')
class test_Etcd:
def setup(self):
self.connection = Mock()
self.connection.client.transport_options = {}
self.connection.client.port = 2739
self.client = self.patch('etcd.Client').return_value
self.channel = Channel(connection=self.connection)
def test_driver_version(self):
assert Transport(self.connection.client).driver_version()
def test_failed_get(self):
self.channel._acquire_lock = Mock(return_value=False)
self.channel.client.read.side_effect = IndexError
with patch('etcd.Lock'):
with pytest.raises(Empty):
self.channel._get('empty')()
def test_test_purge(self):
with patch('etcd.Lock'):
self.client.delete = Mock(return_value=True)
assert self.channel._purge('foo')
def test_key_prefix(self):
key = self.channel._key_prefix('myqueue')
assert key == 'kombu/myqueue'
def test_create_delete_queue(self):
queue = 'mynewqueue'
with patch('etcd.Lock'):
self.client.write.return_value = self.patch('etcd.EtcdResult')
assert self.channel._new_queue(queue)
self.client.delete.return_value = self.patch('etcd.EtcdResult')
self.channel._delete(queue)
def test_size(self):
with patch('etcd.Lock'):
self.client.read.return_value = self.patch(
'etcd.EtcdResult', _children=[{}, {}])
assert self.channel._size('q') == 2
def test_get(self):
with patch('etcd.Lock'):
self.client.read.return_value = self.patch(
'etcd.EtcdResult',
_children=[{'key': 'myqueue', 'modifyIndex': 1, 'value': '1'}])
assert self.channel._get('myqueue') is not None
def test_put(self):
with patch('etcd.Lock'):
self.client.write.return_value = self.patch('etcd.EtcdResult')
assert self.channel._put('myqueue', 'mydata') is None
|
from datetime import datetime, timedelta
from functools import partial
import json
import logging
import time
from urllib.parse import urlparse
import uuid
from aiohttp.hdrs import AUTHORIZATION
import jwt
from py_vapid import Vapid
from pywebpush import WebPusher
import voluptuous as vol
from voluptuous.humanize import humanize_error
from homeassistant.components import websocket_api
from homeassistant.components.frontend import add_manifest_json_key
from homeassistant.components.http import HomeAssistantView
from homeassistant.components.notify import (
ATTR_DATA,
ATTR_TARGET,
ATTR_TITLE,
ATTR_TITLE_DEFAULT,
PLATFORM_SCHEMA,
BaseNotificationService,
)
from homeassistant.const import (
HTTP_BAD_REQUEST,
HTTP_INTERNAL_SERVER_ERROR,
HTTP_UNAUTHORIZED,
URL_ROOT,
)
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv
from homeassistant.util import ensure_unique_string
from homeassistant.util.json import load_json, save_json
from .const import DOMAIN, SERVICE_DISMISS
_LOGGER = logging.getLogger(__name__)
REGISTRATIONS_FILE = "html5_push_registrations.conf"
ATTR_GCM_SENDER_ID = "gcm_sender_id"
ATTR_GCM_API_KEY = "gcm_api_key"
ATTR_VAPID_PUB_KEY = "vapid_pub_key"
ATTR_VAPID_PRV_KEY = "vapid_prv_key"
ATTR_VAPID_EMAIL = "vapid_email"
def gcm_api_deprecated(value):
"""Warn user that GCM API config is deprecated."""
if value:
_LOGGER.warning(
"Configuring html5_push_notifications via the GCM api"
" has been deprecated and will stop working after April 11,"
" 2019. Use the VAPID configuration instead. For instructions,"
" see https://www.home-assistant.io/integrations/html5/"
)
return value
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(ATTR_GCM_SENDER_ID): vol.All(cv.string, gcm_api_deprecated),
vol.Optional(ATTR_GCM_API_KEY): cv.string,
vol.Optional(ATTR_VAPID_PUB_KEY): cv.string,
vol.Optional(ATTR_VAPID_PRV_KEY): cv.string,
vol.Optional(ATTR_VAPID_EMAIL): cv.string,
}
)
ATTR_SUBSCRIPTION = "subscription"
ATTR_BROWSER = "browser"
ATTR_NAME = "name"
ATTR_ENDPOINT = "endpoint"
ATTR_KEYS = "keys"
ATTR_AUTH = "auth"
ATTR_P256DH = "p256dh"
ATTR_EXPIRATIONTIME = "expirationTime"
ATTR_TAG = "tag"
ATTR_ACTION = "action"
ATTR_ACTIONS = "actions"
ATTR_TYPE = "type"
ATTR_URL = "url"
ATTR_DISMISS = "dismiss"
ATTR_PRIORITY = "priority"
DEFAULT_PRIORITY = "normal"
ATTR_TTL = "ttl"
DEFAULT_TTL = 86400
ATTR_JWT = "jwt"
WS_TYPE_APPKEY = "notify/html5/appkey"
SCHEMA_WS_APPKEY = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
{vol.Required("type"): WS_TYPE_APPKEY}
)
# The number of days after the moment a notification is sent that a JWT
# is valid.
JWT_VALID_DAYS = 7
KEYS_SCHEMA = vol.All(
dict,
vol.Schema(
{vol.Required(ATTR_AUTH): cv.string, vol.Required(ATTR_P256DH): cv.string}
),
)
SUBSCRIPTION_SCHEMA = vol.All(
dict,
vol.Schema(
{
# pylint: disable=no-value-for-parameter
vol.Required(ATTR_ENDPOINT): vol.Url(),
vol.Required(ATTR_KEYS): KEYS_SCHEMA,
vol.Optional(ATTR_EXPIRATIONTIME): vol.Any(None, cv.positive_int),
}
),
)
DISMISS_SERVICE_SCHEMA = vol.Schema(
{
vol.Optional(ATTR_TARGET): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(ATTR_DATA): dict,
}
)
REGISTER_SCHEMA = vol.Schema(
{
vol.Required(ATTR_SUBSCRIPTION): SUBSCRIPTION_SCHEMA,
vol.Required(ATTR_BROWSER): vol.In(["chrome", "firefox"]),
vol.Optional(ATTR_NAME): cv.string,
}
)
CALLBACK_EVENT_PAYLOAD_SCHEMA = vol.Schema(
{
vol.Required(ATTR_TAG): cv.string,
vol.Required(ATTR_TYPE): vol.In(["received", "clicked", "closed"]),
vol.Required(ATTR_TARGET): cv.string,
vol.Optional(ATTR_ACTION): cv.string,
vol.Optional(ATTR_DATA): dict,
}
)
NOTIFY_CALLBACK_EVENT = "html5_notification"
# Badge and timestamp are Chrome specific (not in official spec)
HTML5_SHOWNOTIFICATION_PARAMETERS = (
"actions",
"badge",
"body",
"dir",
"icon",
"image",
"lang",
"renotify",
"requireInteraction",
"tag",
"timestamp",
"vibrate",
)
def get_service(hass, config, discovery_info=None):
"""Get the HTML5 push notification service."""
json_path = hass.config.path(REGISTRATIONS_FILE)
registrations = _load_config(json_path)
if registrations is None:
return None
vapid_pub_key = config.get(ATTR_VAPID_PUB_KEY)
vapid_prv_key = config.get(ATTR_VAPID_PRV_KEY)
vapid_email = config.get(ATTR_VAPID_EMAIL)
def websocket_appkey(hass, connection, msg):
connection.send_message(websocket_api.result_message(msg["id"], vapid_pub_key))
hass.components.websocket_api.async_register_command(
WS_TYPE_APPKEY, websocket_appkey, SCHEMA_WS_APPKEY
)
hass.http.register_view(HTML5PushRegistrationView(registrations, json_path))
hass.http.register_view(HTML5PushCallbackView(registrations))
gcm_api_key = config.get(ATTR_GCM_API_KEY)
gcm_sender_id = config.get(ATTR_GCM_SENDER_ID)
if gcm_sender_id is not None:
add_manifest_json_key(ATTR_GCM_SENDER_ID, config.get(ATTR_GCM_SENDER_ID))
return HTML5NotificationService(
hass, gcm_api_key, vapid_prv_key, vapid_email, registrations, json_path
)
def _load_config(filename):
"""Load configuration."""
try:
return load_json(filename)
except HomeAssistantError:
pass
return {}
class HTML5PushRegistrationView(HomeAssistantView):
"""Accepts push registrations from a browser."""
url = "/api/notify.html5"
name = "api:notify.html5"
def __init__(self, registrations, json_path):
"""Init HTML5PushRegistrationView."""
self.registrations = registrations
self.json_path = json_path
async def post(self, request):
"""Accept the POST request for push registrations from a browser."""
try:
data = await request.json()
except ValueError:
return self.json_message("Invalid JSON", HTTP_BAD_REQUEST)
try:
data = REGISTER_SCHEMA(data)
except vol.Invalid as ex:
return self.json_message(humanize_error(data, ex), HTTP_BAD_REQUEST)
devname = data.get(ATTR_NAME)
data.pop(ATTR_NAME, None)
name = self.find_registration_name(data, devname)
previous_registration = self.registrations.get(name)
self.registrations[name] = data
try:
hass = request.app["hass"]
await hass.async_add_executor_job(
save_json, self.json_path, self.registrations
)
return self.json_message("Push notification subscriber registered.")
except HomeAssistantError:
if previous_registration is not None:
self.registrations[name] = previous_registration
else:
self.registrations.pop(name)
return self.json_message(
"Error saving registration.", HTTP_INTERNAL_SERVER_ERROR
)
def find_registration_name(self, data, suggested=None):
"""Find a registration name matching data or generate a unique one."""
endpoint = data.get(ATTR_SUBSCRIPTION).get(ATTR_ENDPOINT)
for key, registration in self.registrations.items():
subscription = registration.get(ATTR_SUBSCRIPTION)
if subscription.get(ATTR_ENDPOINT) == endpoint:
return key
return ensure_unique_string(suggested or "unnamed device", self.registrations)
async def delete(self, request):
"""Delete a registration."""
try:
data = await request.json()
except ValueError:
return self.json_message("Invalid JSON", HTTP_BAD_REQUEST)
subscription = data.get(ATTR_SUBSCRIPTION)
found = None
for key, registration in self.registrations.items():
if registration.get(ATTR_SUBSCRIPTION) == subscription:
found = key
break
if not found:
# If not found, unregistering was already done. Return 200
return self.json_message("Registration not found.")
reg = self.registrations.pop(found)
try:
hass = request.app["hass"]
await hass.async_add_executor_job(
save_json, self.json_path, self.registrations
)
except HomeAssistantError:
self.registrations[found] = reg
return self.json_message(
"Error saving registration.", HTTP_INTERNAL_SERVER_ERROR
)
return self.json_message("Push notification subscriber unregistered.")
class HTML5PushCallbackView(HomeAssistantView):
"""Accepts push registrations from a browser."""
requires_auth = False
url = "/api/notify.html5/callback"
name = "api:notify.html5/callback"
def __init__(self, registrations):
"""Init HTML5PushCallbackView."""
self.registrations = registrations
def decode_jwt(self, token):
"""Find the registration that signed this JWT and return it."""
# 1. Check claims w/o verifying to see if a target is in there.
# 2. If target in claims, attempt to verify against the given name.
# 2a. If decode is successful, return the payload.
# 2b. If decode is unsuccessful, return a 401.
target_check = jwt.decode(token, verify=False)
if target_check.get(ATTR_TARGET) in self.registrations:
possible_target = self.registrations[target_check[ATTR_TARGET]]
key = possible_target[ATTR_SUBSCRIPTION][ATTR_KEYS][ATTR_AUTH]
try:
return jwt.decode(token, key, algorithms=["ES256", "HS256"])
except jwt.exceptions.DecodeError:
pass
return self.json_message(
"No target found in JWT", status_code=HTTP_UNAUTHORIZED
)
# The following is based on code from Auth0
# https://auth0.com/docs/quickstart/backend/python
def check_authorization_header(self, request):
"""Check the authorization header."""
auth = request.headers.get(AUTHORIZATION)
if not auth:
return self.json_message(
"Authorization header is expected", status_code=HTTP_UNAUTHORIZED
)
parts = auth.split()
if parts[0].lower() != "bearer":
return self.json_message(
"Authorization header must start with Bearer",
status_code=HTTP_UNAUTHORIZED,
)
if len(parts) != 2:
return self.json_message(
"Authorization header must be Bearer token",
status_code=HTTP_UNAUTHORIZED,
)
token = parts[1]
try:
payload = self.decode_jwt(token)
except jwt.exceptions.InvalidTokenError:
return self.json_message("token is invalid", status_code=HTTP_UNAUTHORIZED)
return payload
async def post(self, request):
"""Accept the POST request for push registrations event callback."""
auth_check = self.check_authorization_header(request)
if not isinstance(auth_check, dict):
return auth_check
try:
data = await request.json()
except ValueError:
return self.json_message("Invalid JSON", HTTP_BAD_REQUEST)
event_payload = {
ATTR_TAG: data.get(ATTR_TAG),
ATTR_TYPE: data[ATTR_TYPE],
ATTR_TARGET: auth_check[ATTR_TARGET],
}
if data.get(ATTR_ACTION) is not None:
event_payload[ATTR_ACTION] = data.get(ATTR_ACTION)
if data.get(ATTR_DATA) is not None:
event_payload[ATTR_DATA] = data.get(ATTR_DATA)
try:
event_payload = CALLBACK_EVENT_PAYLOAD_SCHEMA(event_payload)
except vol.Invalid as ex:
_LOGGER.warning(
"Callback event payload is not valid: %s",
humanize_error(event_payload, ex),
)
event_name = f"{NOTIFY_CALLBACK_EVENT}.{event_payload[ATTR_TYPE]}"
request.app["hass"].bus.fire(event_name, event_payload)
return self.json({"status": "ok", "event": event_payload[ATTR_TYPE]})
class HTML5NotificationService(BaseNotificationService):
"""Implement the notification service for HTML5."""
def __init__(self, hass, gcm_key, vapid_prv, vapid_email, registrations, json_path):
"""Initialize the service."""
self._gcm_key = gcm_key
self._vapid_prv = vapid_prv
self._vapid_email = vapid_email
self.registrations = registrations
self.registrations_json_path = json_path
async def async_dismiss_message(service):
"""Handle dismissing notification message service calls."""
kwargs = {}
if self.targets is not None:
kwargs[ATTR_TARGET] = self.targets
elif service.data.get(ATTR_TARGET) is not None:
kwargs[ATTR_TARGET] = service.data.get(ATTR_TARGET)
kwargs[ATTR_DATA] = service.data.get(ATTR_DATA)
await self.async_dismiss(**kwargs)
hass.services.async_register(
DOMAIN,
SERVICE_DISMISS,
async_dismiss_message,
schema=DISMISS_SERVICE_SCHEMA,
)
@property
def targets(self):
"""Return a dictionary of registered targets."""
targets = {}
for registration in self.registrations:
targets[registration] = registration
return targets
def dismiss(self, **kwargs):
"""Dismisses a notification."""
data = kwargs.get(ATTR_DATA)
tag = data.get(ATTR_TAG) if data else ""
payload = {ATTR_TAG: tag, ATTR_DISMISS: True, ATTR_DATA: {}}
self._push_message(payload, **kwargs)
async def async_dismiss(self, **kwargs):
"""Dismisses a notification.
This method must be run in the event loop.
"""
await self.hass.async_add_executor_job(partial(self.dismiss, **kwargs))
def send_message(self, message="", **kwargs):
"""Send a message to a user."""
tag = str(uuid.uuid4())
payload = {
"badge": "/static/images/notification-badge.png",
"body": message,
ATTR_DATA: {},
"icon": "/static/icons/favicon-192x192.png",
ATTR_TAG: tag,
ATTR_TITLE: kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT),
}
data = kwargs.get(ATTR_DATA)
if data:
# Pick out fields that should go into the notification directly vs
# into the notification data dictionary.
data_tmp = {}
for key, val in data.items():
if key in HTML5_SHOWNOTIFICATION_PARAMETERS:
payload[key] = val
else:
data_tmp[key] = val
payload[ATTR_DATA] = data_tmp
if (
payload[ATTR_DATA].get(ATTR_URL) is None
and payload.get(ATTR_ACTIONS) is None
):
payload[ATTR_DATA][ATTR_URL] = URL_ROOT
self._push_message(payload, **kwargs)
def _push_message(self, payload, **kwargs):
"""Send the message."""
timestamp = int(time.time())
ttl = int(kwargs.get(ATTR_TTL, DEFAULT_TTL))
priority = kwargs.get(ATTR_PRIORITY, DEFAULT_PRIORITY)
if priority not in ["normal", "high"]:
priority = DEFAULT_PRIORITY
payload["timestamp"] = timestamp * 1000 # Javascript ms since epoch
targets = kwargs.get(ATTR_TARGET)
if not targets:
targets = self.registrations.keys()
for target in list(targets):
info = self.registrations.get(target)
try:
info = REGISTER_SCHEMA(info)
except vol.Invalid:
_LOGGER.error(
"%s is not a valid HTML5 push notification target", target
)
continue
payload[ATTR_DATA][ATTR_JWT] = add_jwt(
timestamp,
target,
payload[ATTR_TAG],
info[ATTR_SUBSCRIPTION][ATTR_KEYS][ATTR_AUTH],
)
webpusher = WebPusher(info[ATTR_SUBSCRIPTION])
if self._vapid_prv and self._vapid_email:
vapid_headers = create_vapid_headers(
self._vapid_email, info[ATTR_SUBSCRIPTION], self._vapid_prv
)
vapid_headers.update({"urgency": priority, "priority": priority})
response = webpusher.send(
data=json.dumps(payload), headers=vapid_headers, ttl=ttl
)
else:
# Only pass the gcm key if we're actually using GCM
# If we don't, notifications break on FireFox
gcm_key = (
self._gcm_key
if "googleapis.com" in info[ATTR_SUBSCRIPTION][ATTR_ENDPOINT]
else None
)
response = webpusher.send(json.dumps(payload), gcm_key=gcm_key, ttl=ttl)
if response.status_code == 410:
_LOGGER.info("Notification channel has expired")
reg = self.registrations.pop(target)
if not save_json(self.registrations_json_path, self.registrations):
self.registrations[target] = reg
_LOGGER.error("Error saving registration")
else:
_LOGGER.info("Configuration saved")
def add_jwt(timestamp, target, tag, jwt_secret):
"""Create JWT json to put into payload."""
jwt_exp = datetime.fromtimestamp(timestamp) + timedelta(days=JWT_VALID_DAYS)
jwt_claims = {
"exp": jwt_exp,
"nbf": timestamp,
"iat": timestamp,
ATTR_TARGET: target,
ATTR_TAG: tag,
}
return jwt.encode(jwt_claims, jwt_secret).decode("utf-8")
def create_vapid_headers(vapid_email, subscription_info, vapid_private_key):
"""Create encrypted headers to send to WebPusher."""
if vapid_email and vapid_private_key and ATTR_ENDPOINT in subscription_info:
url = urlparse(subscription_info.get(ATTR_ENDPOINT))
vapid_claims = {
"sub": f"mailto:{vapid_email}",
"aud": f"{url.scheme}://{url.netloc}",
}
vapid = Vapid.from_string(private_key=vapid_private_key)
return vapid.sign(vapid_claims)
return None
|
import os
import socket
import atexit
import tempfile
from http.client import HTTPConnection
import pytest
import cherrypy
from cherrypy.test import helper
def usocket_path():
fd, path = tempfile.mkstemp('cp_test.sock')
os.close(fd)
os.remove(path)
return path
USOCKET_PATH = usocket_path()
class USocketHTTPConnection(HTTPConnection):
"""
HTTPConnection over a unix socket.
"""
def __init__(self, path):
HTTPConnection.__init__(self, 'localhost')
self.path = path
def __call__(self, *args, **kwargs):
"""
Catch-all method just to present itself as a constructor for the
HTTPConnection.
"""
return self
def connect(self):
"""
Override the connect method and assign a unix socket as a transport.
"""
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.connect(self.path)
self.sock = sock
atexit.register(lambda: os.remove(self.path))
@pytest.mark.skipif("sys.platform == 'win32'")
class WSGI_UnixSocket_Test(helper.CPWebCase):
"""
Test basic behavior on a cherrypy wsgi server listening
on a unix socket.
It exercises the config option `server.socket_file`.
"""
HTTP_CONN = USocketHTTPConnection(USOCKET_PATH)
@staticmethod
def setup_server():
class Root(object):
@cherrypy.expose
def index(self):
return 'Test OK'
@cherrypy.expose
def error(self):
raise Exception('Invalid page')
config = {
'server.socket_file': USOCKET_PATH
}
cherrypy.config.update(config)
cherrypy.tree.mount(Root())
def tearDown(self):
cherrypy.config.update({'server.socket_file': None})
def test_simple_request(self):
self.getPage('/')
self.assertStatus('200 OK')
self.assertInBody('Test OK')
def test_not_found(self):
self.getPage('/invalid_path')
self.assertStatus('404 Not Found')
def test_internal_error(self):
self.getPage('/error')
self.assertStatus('500 Internal Server Error')
self.assertInBody('Invalid page')
|
import random
import dedupe.core
import dedupe.dedupe # noqa: F401
# simulated_candidates = (((1, {'name': 'asdffdsa'}), (2, {'name': 'fdsaasdf'}))
# for _ in xrange(10**6))
# data_model = {"fields": {"name": {"type": "String", "weight": -1.0}},
# "bias": 1.0}
# threshold = 0
# dupes = dedupe.core.scoreDuplicates(simulated_candidates,
# data_model,
# 0)
# simulated_candidates = (((1, {'name': 'asdffdsa'}), (2, {'name': 'fdsaasdf'}))
# for _ in xrange(10**7))
# deduper = dedupe.dedupe.Dedupe({"name": {"type": "String", "weight": -1.0}})
# clusters = deduper.duplicateClusters(simulated_candidates, 0, 0)
def candidates_gen():
candidate_set = set([])
for _ in range(10**5):
block = [((random.randint(0, 1000), 'a'),
(random.randint(0, 1000), 'b'))]
for candidate in block:
pair_ids = (candidate[0][0], candidate[1][0])
if pair_ids not in candidate_set:
yield candidate
candidate_set.add(pair_ids)
del candidate_set
@profile # noqa: F821
def generator_test():
a = sum(candidate[0][0] for candidate in candidates_gen())
print(a)
generator_test()
|
import asyncio
from aiohttp import ClientError, ClientResponseError
import pymelcloud
import pytest
from homeassistant import config_entries
from homeassistant.components.melcloud.const import DOMAIN
from homeassistant.const import HTTP_FORBIDDEN, HTTP_INTERNAL_SERVER_ERROR
from tests.async_mock import patch
from tests.common import MockConfigEntry
@pytest.fixture
def mock_login():
"""Mock pymelcloud login."""
with patch(
"homeassistant.components.melcloud.config_flow.pymelcloud.login"
) as mock:
mock.return_value = "test-token"
yield mock
@pytest.fixture
def mock_get_devices():
"""Mock pymelcloud get_devices."""
with patch(
"homeassistant.components.melcloud.config_flow.pymelcloud.get_devices"
) as mock:
mock.return_value = {
pymelcloud.DEVICE_TYPE_ATA: [],
pymelcloud.DEVICE_TYPE_ATW: [],
}
yield mock
@pytest.fixture
def mock_request_info():
"""Mock RequestInfo to create ClientResponseErrors."""
with patch("aiohttp.RequestInfo") as mock_ri:
mock_ri.return_value.real_url.return_value = ""
yield mock_ri
async def test_form(hass, mock_login, mock_get_devices):
"""Test we get the form."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] is None
with patch(
"homeassistant.components.melcloud.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.melcloud.async_setup_entry", return_value=True
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"username": "[email protected]", "password": "test-password"},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "[email protected]"
assert result2["data"] == {
"username": "[email protected]",
"token": "test-token",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
@pytest.mark.parametrize(
"error,reason",
[(ClientError(), "cannot_connect"), (asyncio.TimeoutError(), "cannot_connect")],
)
async def test_form_errors(hass, mock_login, mock_get_devices, error, reason):
"""Test we handle cannot connect error."""
mock_login.side_effect = error
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_USER},
data={"username": "[email protected]", "password": "test-password"},
)
assert len(mock_login.mock_calls) == 1
assert result["type"] == "abort"
assert result["reason"] == reason
@pytest.mark.parametrize(
"error,message",
[
(401, "invalid_auth"),
(HTTP_FORBIDDEN, "invalid_auth"),
(HTTP_INTERNAL_SERVER_ERROR, "cannot_connect"),
],
)
async def test_form_response_errors(
hass, mock_login, mock_get_devices, mock_request_info, error, message
):
"""Test we handle response errors."""
mock_login.side_effect = ClientResponseError(mock_request_info(), (), status=error)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_USER},
data={"username": "[email protected]", "password": "test-password"},
)
assert result["type"] == "abort"
assert result["reason"] == message
async def test_import_with_token(hass, mock_login, mock_get_devices):
"""Test successful import."""
with patch(
"homeassistant.components.melcloud.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.melcloud.async_setup_entry", return_value=True
) as mock_setup_entry:
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={"username": "[email protected]", "token": "test-token"},
)
await hass.async_block_till_done()
assert result["type"] == "create_entry"
assert result["title"] == "[email protected]"
assert result["data"] == {
"username": "[email protected]",
"token": "test-token",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_token_refresh(hass, mock_login, mock_get_devices):
"""Re-configuration with existing username should refresh token."""
mock_entry = MockConfigEntry(
domain=DOMAIN,
data={"username": "[email protected]", "token": "test-original-token"},
unique_id="[email protected]",
)
mock_entry.add_to_hass(hass)
with patch(
"homeassistant.components.melcloud.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.melcloud.async_setup_entry", return_value=True
) as mock_setup_entry:
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_USER},
data={
"username": "[email protected]",
"password": "test-password",
},
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
await hass.async_block_till_done()
assert len(mock_setup.mock_calls) == 0
assert len(mock_setup_entry.mock_calls) == 0
entries = hass.config_entries.async_entries(DOMAIN)
assert len(entries) == 1
entry = entries[0]
assert entry.data["username"] == "[email protected]"
assert entry.data["token"] == "test-token"
|
from datetime import timedelta
import json
from homeassistant.components.media_player import MediaPlayerEntity
from homeassistant.components.media_player.const import (
MEDIA_TYPE_MUSIC,
SUPPORT_BROWSE_MEDIA,
SUPPORT_CLEAR_PLAYLIST,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SEEK,
SUPPORT_SELECT_SOURCE,
SUPPORT_SHUFFLE_SET,
SUPPORT_STOP,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
SUPPORT_VOLUME_STEP,
)
from homeassistant.const import (
CONF_ID,
CONF_NAME,
STATE_IDLE,
STATE_PAUSED,
STATE_PLAYING,
)
from homeassistant.util import Throttle
from .browse_media import browse_node, browse_top_level
from .const import DATA_INFO, DATA_VOLUMIO, DOMAIN
_CONFIGURING = {}
SUPPORT_VOLUMIO = (
SUPPORT_PAUSE
| SUPPORT_VOLUME_SET
| SUPPORT_VOLUME_MUTE
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_NEXT_TRACK
| SUPPORT_SEEK
| SUPPORT_STOP
| SUPPORT_PLAY
| SUPPORT_PLAY_MEDIA
| SUPPORT_VOLUME_STEP
| SUPPORT_SELECT_SOURCE
| SUPPORT_SHUFFLE_SET
| SUPPORT_CLEAR_PLAYLIST
| SUPPORT_BROWSE_MEDIA
)
PLAYLIST_UPDATE_INTERVAL = timedelta(seconds=15)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Volumio media player platform."""
data = hass.data[DOMAIN][config_entry.entry_id]
volumio = data[DATA_VOLUMIO]
info = data[DATA_INFO]
uid = config_entry.data[CONF_ID]
name = config_entry.data[CONF_NAME]
entity = Volumio(volumio, uid, name, info)
async_add_entities([entity])
class Volumio(MediaPlayerEntity):
"""Volumio Player Object."""
def __init__(self, volumio, uid, name, info):
"""Initialize the media player."""
self._volumio = volumio
self._uid = uid
self._name = name
self._info = info
self._state = {}
self._playlists = []
self._currentplaylist = None
async def async_update(self):
"""Update state."""
self._state = await self._volumio.get_state()
await self._async_update_playlists()
@property
def unique_id(self):
"""Return the unique id for the entity."""
return self._uid
@property
def name(self):
"""Return the name of the entity."""
return self._name
@property
def device_info(self):
"""Return device info for this device."""
return {
"identifiers": {(DOMAIN, self.unique_id)},
"name": self.name,
"manufacturer": "Volumio",
"sw_version": self._info["systemversion"],
"model": self._info["hardware"],
}
@property
def media_content_type(self):
"""Content type of current playing media."""
return MEDIA_TYPE_MUSIC
@property
def state(self):
"""Return the state of the device."""
status = self._state.get("status", None)
if status == "pause":
return STATE_PAUSED
if status == "play":
return STATE_PLAYING
return STATE_IDLE
@property
def media_title(self):
"""Title of current playing media."""
return self._state.get("title", None)
@property
def media_artist(self):
"""Artist of current playing media (Music track only)."""
return self._state.get("artist", None)
@property
def media_album_name(self):
"""Artist of current playing media (Music track only)."""
return self._state.get("album", None)
@property
def media_image_url(self):
"""Image url of current playing media."""
url = self._state.get("albumart", None)
return self._volumio.canonic_url(url)
@property
def media_seek_position(self):
"""Time in seconds of current seek position."""
return self._state.get("seek", None)
@property
def media_duration(self):
"""Time in seconds of current song duration."""
return self._state.get("duration", None)
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
volume = self._state.get("volume", None)
if volume is not None and volume != "":
volume = int(volume) / 100
return volume
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._state.get("mute", None)
@property
def shuffle(self):
"""Boolean if shuffle is enabled."""
return self._state.get("random", False)
@property
def source_list(self):
"""Return the list of available input sources."""
return self._playlists
@property
def source(self):
"""Name of the current input source."""
return self._currentplaylist
@property
def supported_features(self):
"""Flag of media commands that are supported."""
return SUPPORT_VOLUMIO
async def async_media_next_track(self):
"""Send media_next command to media player."""
await self._volumio.next()
async def async_media_previous_track(self):
"""Send media_previous command to media player."""
await self._volumio.previous()
async def async_media_play(self):
"""Send media_play command to media player."""
await self._volumio.play()
async def async_media_pause(self):
"""Send media_pause command to media player."""
if self._state["trackType"] == "webradio":
await self._volumio.stop()
else:
await self._volumio.pause()
async def async_media_stop(self):
"""Send media_stop command to media player."""
await self._volumio.stop()
async def async_set_volume_level(self, volume):
"""Send volume_up command to media player."""
await self._volumio.set_volume_level(int(volume * 100))
async def async_volume_up(self):
"""Service to send the Volumio the command for volume up."""
await self._volumio.volume_up()
async def async_volume_down(self):
"""Service to send the Volumio the command for volume down."""
await self._volumio.volume_down()
async def async_mute_volume(self, mute):
"""Send mute command to media player."""
if mute:
await self._volumio.mute()
else:
await self._volumio.unmute()
async def async_set_shuffle(self, shuffle):
"""Enable/disable shuffle mode."""
await self._volumio.set_shuffle(shuffle)
async def async_select_source(self, source):
"""Choose an available playlist and play it."""
await self._volumio.play_playlist(source)
self._currentplaylist = source
async def async_clear_playlist(self):
"""Clear players playlist."""
await self._volumio.clear_playlist()
self._currentplaylist = None
@Throttle(PLAYLIST_UPDATE_INTERVAL)
async def _async_update_playlists(self, **kwargs):
"""Update available Volumio playlists."""
self._playlists = await self._volumio.get_playlists()
async def async_play_media(self, media_type, media_id, **kwargs):
"""Send the play_media command to the media player."""
await self._volumio.replace_and_play(json.loads(media_id))
async def async_browse_media(self, media_content_type=None, media_content_id=None):
"""Implement the websocket media browsing helper."""
if media_content_type in [None, "library"]:
return await browse_top_level(self._volumio)
return await browse_node(self._volumio, media_content_type, media_content_id)
|
import re
import os
import json
import fnmatch
import functools
import glob
import textwrap
from typing import cast, List, Sequence
import attr
from PyQt5.QtCore import pyqtSignal, QObject, QUrl
from qutebrowser.utils import (log, standarddir, jinja, objreg, utils,
javascript, urlmatch, version, usertypes)
from qutebrowser.api import cmdutils
from qutebrowser.browser import downloads
from qutebrowser.misc import objects
gm_manager = cast('GreasemonkeyManager', None)
def _scripts_dir():
"""Get the directory of the scripts."""
return os.path.join(standarddir.data(), 'greasemonkey')
class GreasemonkeyScript:
"""Container class for userscripts, parses metadata blocks."""
def __init__(self, properties, code, # noqa: C901 pragma: no mccabe
filename=None):
self._code = code
self.includes: Sequence[str] = []
self.matches: Sequence[str] = []
self.excludes: Sequence[str] = []
self.requires: Sequence[str] = []
self.description = None
self.namespace = None
self.run_at = None
self.script_meta = None
self.runs_on_sub_frames = True
self.jsworld = "main"
self.name = ''
for name, value in properties:
if name == 'name':
self.name = value
elif name == 'namespace':
self.namespace = value
elif name == 'description':
self.description = value
elif name == 'include':
self.includes.append(value)
elif name == 'match':
self.matches.append(value)
elif name in ['exclude', 'exclude_match']:
self.excludes.append(value)
elif name == 'run-at':
self.run_at = value
elif name == 'noframes':
self.runs_on_sub_frames = False
elif name == 'require':
self.requires.append(value)
elif name == 'qute-js-world':
self.jsworld = value
if not self.name:
if filename:
self.name = filename
else:
raise ValueError(
"@name key required or pass filename to init."
)
HEADER_REGEX = r'// ==UserScript==|\n+// ==/UserScript==\n'
PROPS_REGEX = r'// @(?P<prop>[^\s]+)\s*(?P<val>.*)'
@classmethod
def parse(cls, source, filename=None):
"""GreasemonkeyScript factory.
Takes a userscript source and returns a GreasemonkeyScript.
Parses the Greasemonkey metadata block, if present, to fill out
attributes.
"""
matches = re.split(cls.HEADER_REGEX, source, maxsplit=2)
try:
_head, props, _code = matches
except ValueError:
props = ""
script = cls(
re.findall(cls.PROPS_REGEX, props),
source,
filename=filename
)
script.script_meta = props
if not script.includes and not script.matches:
script.includes = ['*']
return script
def needs_document_end_workaround(self):
"""Check whether to force @run-at document-end.
This needs to be done on QtWebEngine (since Qt 5.12) for known-broken scripts.
On Qt 5.12, accessing the DOM isn't possible with "@run-at
document-start". It was documented to be impossible before, but seems
to work fine.
However, some scripts do DOM access with "@run-at document-start". Fix
those by forcing them to use document-end instead.
"""
if objects.backend == usertypes.Backend.QtWebKit:
return False
assert objects.backend == usertypes.Backend.QtWebEngine, objects.backend
broken_scripts = [
('http://userstyles.org', None),
('https://github.com/ParticleCore', 'Iridium'),
]
return any(self._matches_id(namespace=namespace, name=name)
for namespace, name in broken_scripts)
def _matches_id(self, *, namespace, name):
"""Check if this script matches the given namespace/name.
Both namespace and name can be None in order to match any script.
"""
matches_namespace = namespace is None or self.namespace == namespace
matches_name = name is None or self.name == name
return matches_namespace and matches_name
def code(self):
"""Return the processed JavaScript code of this script.
Adorns the source code with GM_* methods for Greasemonkey
compatibility and wraps it in an IIFE to hide it within a
lexical scope. Note that this means line numbers in your
browser's debugger/inspector will not match up to the line
numbers in the source script directly.
"""
# Don't use Proxy on this webkit version, the support isn't there.
use_proxy = not (
objects.backend == usertypes.Backend.QtWebKit and
version.qWebKitVersion() == '602.1')
template = jinja.js_environment.get_template('greasemonkey_wrapper.js')
return template.render(
scriptName=javascript.string_escape(
"/".join([self.namespace or '', self.name])),
scriptInfo=self._meta_json(),
scriptMeta=javascript.string_escape(self.script_meta or ''),
scriptSource=self._code,
use_proxy=use_proxy)
def _meta_json(self):
return json.dumps({
'name': self.name,
'description': self.description,
'matches': self.matches,
'includes': self.includes,
'excludes': self.excludes,
'run-at': self.run_at,
})
def add_required_script(self, source):
"""Add the source of a required script to this script."""
# The additional source is indented in case it also contains a
# metadata block. Because we pass everything at once to
# QWebEngineScript and that would parse the first metadata block
# found as the valid one.
self._code = "\n".join([textwrap.indent(source, " "), self._code])
@attr.s
class MatchingScripts:
"""All userscripts registered to run on a particular url."""
url = attr.ib()
start = attr.ib(default=attr.Factory(list))
end = attr.ib(default=attr.Factory(list))
idle = attr.ib(default=attr.Factory(list))
class GreasemonkeyMatcher:
"""Check whether scripts should be loaded for a given URL."""
# https://wiki.greasespot.net/Include_and_exclude_rules#Greaseable_schemes
# Limit the schemes scripts can run on due to unreasonable levels of
# exploitability
GREASEABLE_SCHEMES = ['http', 'https', 'ftp', 'file']
def __init__(self, url):
self._url = url
self._url_string = url.toString(QUrl.FullyEncoded)
self.is_greaseable = url.scheme() in self.GREASEABLE_SCHEMES
def _match_pattern(self, pattern):
# For include and exclude rules if they start and end with '/' they
# should be treated as a (ecma syntax) regular expression.
if pattern.startswith('/') and pattern.endswith('/'):
matches = re.search(pattern[1:-1], self._url_string, flags=re.I)
return matches is not None
# Otherwise they are glob expressions.
return fnmatch.fnmatch(self._url_string, pattern)
def matches(self, script):
"""Check whether the URL matches filtering rules of the script."""
assert self.is_greaseable
matching_includes = any(self._match_pattern(pat)
for pat in script.includes)
matching_match = any(urlmatch.UrlPattern(pat).matches(self._url)
for pat in script.matches)
matching_excludes = any(self._match_pattern(pat)
for pat in script.excludes)
return (matching_includes or matching_match) and not matching_excludes
class GreasemonkeyManager(QObject):
"""Manager of userscripts and a Greasemonkey compatible environment.
Signals:
scripts_reloaded: Emitted when scripts are reloaded from disk.
Any cached or already-injected scripts should be
considered obsolete.
"""
scripts_reloaded = pyqtSignal()
def __init__(self, parent=None):
super().__init__(parent)
self._run_start: List[GreasemonkeyScript] = []
self._run_end: List[GreasemonkeyScript] = []
self._run_idle: List[GreasemonkeyScript] = []
self._in_progress_dls: List[downloads.AbstractDownloadItem] = []
self.load_scripts()
def load_scripts(self, *, force=False):
"""Re-read Greasemonkey scripts from disk.
The scripts are read from a 'greasemonkey' subdirectory in
qutebrowser's data directory (see `:version`).
Args:
force: For any scripts that have required dependencies,
re-download them.
"""
self._run_start = []
self._run_end = []
self._run_idle = []
scripts_dir = os.path.abspath(_scripts_dir())
log.greasemonkey.debug("Reading scripts from: {}".format(scripts_dir))
for script_filename in glob.glob(os.path.join(scripts_dir, '*.js')):
if not os.path.isfile(script_filename):
continue
script_path = os.path.join(scripts_dir, script_filename)
with open(script_path, encoding='utf-8-sig') as script_file:
script = GreasemonkeyScript.parse(script_file.read(),
script_filename)
if not script.name:
script.name = script_filename
self.add_script(script, force)
self.scripts_reloaded.emit()
def add_script(self, script, force=False):
"""Add a GreasemonkeyScript to this manager.
Args:
force: Fetch and overwrite any dependancies which are
already locally cached.
"""
if script.requires:
log.greasemonkey.debug(
"Deferring script until requirements are "
"fulfilled: {}".format(script.name))
self._get_required_scripts(script, force)
else:
self._add_script(script)
def _add_script(self, script):
if script.run_at == 'document-start':
self._run_start.append(script)
elif script.run_at == 'document-end':
self._run_end.append(script)
elif script.run_at == 'document-idle':
self._run_idle.append(script)
else:
if script.run_at:
log.greasemonkey.warning("Script {} has invalid run-at "
"defined, defaulting to "
"document-end"
.format(script.name))
# Default as per
# https://wiki.greasespot.net/Metadata_Block#.40run-at
self._run_end.append(script)
log.greasemonkey.debug("Loaded script: {}".format(script.name))
def _required_url_to_file_path(self, url):
requires_dir = os.path.join(_scripts_dir(), 'requires')
if not os.path.exists(requires_dir):
os.mkdir(requires_dir)
return os.path.join(requires_dir, utils.sanitize_filename(url))
def _on_required_download_finished(self, script, download):
self._in_progress_dls.remove(download)
if not self._add_script_with_requires(script):
log.greasemonkey.debug(
"Finished download {} for script {} "
"but some requirements are still pending"
.format(download.basename, script.name))
def _add_script_with_requires(self, script, quiet=False):
"""Add a script with pending downloads to this GreasemonkeyManager.
Specifically a script that has dependancies specified via an
`@require` rule.
Args:
script: The GreasemonkeyScript to add.
quiet: True to suppress the scripts_reloaded signal after
adding `script`.
Returns: True if the script was added, False if there are still
dependancies being downloaded.
"""
# See if we are still waiting on any required scripts for this one
for dl in self._in_progress_dls:
if dl.requested_url in script.requires:
return False
# Need to add the required scripts to the IIFE now
for url in reversed(script.requires):
target_path = self._required_url_to_file_path(url)
log.greasemonkey.debug(
"Adding required script for {} to IIFE: {}"
.format(script.name, url))
with open(target_path, encoding='utf8') as f:
script.add_required_script(f.read())
self._add_script(script)
if not quiet:
self.scripts_reloaded.emit()
return True
def _get_required_scripts(self, script, force=False):
required_dls = [(url, self._required_url_to_file_path(url))
for url in script.requires]
if not force:
required_dls = [(url, path) for (url, path) in required_dls
if not os.path.exists(path)]
if not required_dls:
# All the required files exist already
self._add_script_with_requires(script, quiet=True)
return
download_manager = objreg.get('qtnetwork-download-manager')
for url, target_path in required_dls:
target = downloads.FileDownloadTarget(target_path,
force_overwrite=True)
download = download_manager.get(QUrl(url), target=target,
auto_remove=True)
download.requested_url = url
self._in_progress_dls.append(download)
if download.successful:
self._on_required_download_finished(script, download)
else:
download.finished.connect(
functools.partial(self._on_required_download_finished,
script, download))
def scripts_for(self, url):
"""Fetch scripts that are registered to run for url.
returns a tuple of lists of scripts meant to run at (document-start,
document-end, document-idle)
"""
matcher = GreasemonkeyMatcher(url)
if not matcher.is_greaseable:
return MatchingScripts(url, [], [], [])
return MatchingScripts(
url=url,
start=[script for script in self._run_start
if matcher.matches(script)],
end=[script for script in self._run_end
if matcher.matches(script)],
idle=[script for script in self._run_idle
if matcher.matches(script)]
)
def all_scripts(self):
"""Return all scripts found in the configured script directory."""
return self._run_start + self._run_end + self._run_idle
@cmdutils.register()
def greasemonkey_reload(force=False):
"""Re-read Greasemonkey scripts from disk.
The scripts are read from a 'greasemonkey' subdirectory in
qutebrowser's data directory (see `:version`).
Args:
force: For any scripts that have required dependencies,
re-download them.
"""
gm_manager.load_scripts(force=force)
def init():
"""Initialize Greasemonkey support."""
global gm_manager
gm_manager = GreasemonkeyManager()
try:
os.mkdir(_scripts_dir())
except FileExistsError:
pass
|
import asyncio
from datetime import timedelta
import logging
from regenmaschine import Client
from regenmaschine.errors import RainMachineError
import voluptuous as vol
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_IP_ADDRESS,
CONF_PASSWORD,
CONF_PORT,
CONF_SSL,
)
from homeassistant.core import callback
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import aiohttp_client, config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.helpers.service import verify_domain_control
from .const import (
CONF_ZONE_RUN_TIME,
DATA_CLIENT,
DATA_PROGRAMS,
DATA_PROVISION_SETTINGS,
DATA_RESTRICTIONS_CURRENT,
DATA_RESTRICTIONS_UNIVERSAL,
DATA_ZONES,
DATA_ZONES_DETAILS,
DEFAULT_ZONE_RUN,
DOMAIN,
PROGRAM_UPDATE_TOPIC,
SENSOR_UPDATE_TOPIC,
ZONE_UPDATE_TOPIC,
)
_LOGGER = logging.getLogger(__name__)
CONF_PROGRAM_ID = "program_id"
CONF_SECONDS = "seconds"
CONF_ZONE_ID = "zone_id"
DATA_LISTENER = "listener"
DEFAULT_ATTRIBUTION = "Data provided by Green Electronics LLC"
DEFAULT_ICON = "mdi:water"
DEFAULT_SCAN_INTERVAL = timedelta(seconds=60)
DEFAULT_SSL = True
SERVICE_ALTER_PROGRAM = vol.Schema({vol.Required(CONF_PROGRAM_ID): cv.positive_int})
SERVICE_ALTER_ZONE = vol.Schema({vol.Required(CONF_ZONE_ID): cv.positive_int})
SERVICE_PAUSE_WATERING = vol.Schema({vol.Required(CONF_SECONDS): cv.positive_int})
SERVICE_START_PROGRAM_SCHEMA = vol.Schema(
{vol.Required(CONF_PROGRAM_ID): cv.positive_int}
)
SERVICE_START_ZONE_SCHEMA = vol.Schema(
{
vol.Required(CONF_ZONE_ID): cv.positive_int,
vol.Optional(CONF_ZONE_RUN_TIME, default=DEFAULT_ZONE_RUN): cv.positive_int,
}
)
SERVICE_STOP_PROGRAM_SCHEMA = vol.Schema(
{vol.Required(CONF_PROGRAM_ID): cv.positive_int}
)
SERVICE_STOP_ZONE_SCHEMA = vol.Schema({vol.Required(CONF_ZONE_ID): cv.positive_int})
CONFIG_SCHEMA = cv.deprecated(DOMAIN, invalidation_version="0.119")
async def async_setup(hass, config):
"""Set up the RainMachine component."""
hass.data[DOMAIN] = {DATA_CLIENT: {}, DATA_LISTENER: {}}
return True
async def async_setup_entry(hass, config_entry):
"""Set up RainMachine as config entry."""
entry_updates = {}
if not config_entry.unique_id:
# If the config entry doesn't already have a unique ID, set one:
entry_updates["unique_id"] = config_entry.data[CONF_IP_ADDRESS]
if CONF_ZONE_RUN_TIME in config_entry.data:
# If a zone run time exists in the config entry's data, pop it and move it to
# options:
data = {**config_entry.data}
entry_updates["data"] = data
entry_updates["options"] = {
**config_entry.options,
CONF_ZONE_RUN_TIME: data.pop(CONF_ZONE_RUN_TIME),
}
if entry_updates:
hass.config_entries.async_update_entry(config_entry, **entry_updates)
_verify_domain_control = verify_domain_control(hass, DOMAIN)
websession = aiohttp_client.async_get_clientsession(hass)
client = Client(session=websession)
try:
await client.load_local(
config_entry.data[CONF_IP_ADDRESS],
config_entry.data[CONF_PASSWORD],
port=config_entry.data[CONF_PORT],
ssl=config_entry.data.get(CONF_SSL, DEFAULT_SSL),
)
except RainMachineError as err:
_LOGGER.error("An error occurred: %s", err)
raise ConfigEntryNotReady from err
else:
# regenmaschine can load multiple controllers at once, but we only grab the one
# we loaded above:
controller = next(iter(client.controllers.values()))
rainmachine = RainMachine(hass, config_entry, controller)
# Update the data object, which at this point (prior to any sensors registering
# "interest" in the API), will focus on grabbing the latest program and zone data:
await rainmachine.async_update()
hass.data[DOMAIN][DATA_CLIENT][config_entry.entry_id] = rainmachine
for component in ("binary_sensor", "sensor", "switch"):
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, component)
)
@_verify_domain_control
async def disable_program(call):
"""Disable a program."""
await rainmachine.controller.programs.disable(call.data[CONF_PROGRAM_ID])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def disable_zone(call):
"""Disable a zone."""
await rainmachine.controller.zones.disable(call.data[CONF_ZONE_ID])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def enable_program(call):
"""Enable a program."""
await rainmachine.controller.programs.enable(call.data[CONF_PROGRAM_ID])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def enable_zone(call):
"""Enable a zone."""
await rainmachine.controller.zones.enable(call.data[CONF_ZONE_ID])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def pause_watering(call):
"""Pause watering for a set number of seconds."""
await rainmachine.controller.watering.pause_all(call.data[CONF_SECONDS])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def start_program(call):
"""Start a particular program."""
await rainmachine.controller.programs.start(call.data[CONF_PROGRAM_ID])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def start_zone(call):
"""Start a particular zone for a certain amount of time."""
await rainmachine.controller.zones.start(
call.data[CONF_ZONE_ID], call.data[CONF_ZONE_RUN_TIME]
)
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def stop_all(call):
"""Stop all watering."""
await rainmachine.controller.watering.stop_all()
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def stop_program(call):
"""Stop a program."""
await rainmachine.controller.programs.stop(call.data[CONF_PROGRAM_ID])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def stop_zone(call):
"""Stop a zone."""
await rainmachine.controller.zones.stop(call.data[CONF_ZONE_ID])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def unpause_watering(call):
"""Unpause watering."""
await rainmachine.controller.watering.unpause_all()
await rainmachine.async_update_programs_and_zones()
for service, method, schema in [
("disable_program", disable_program, SERVICE_ALTER_PROGRAM),
("disable_zone", disable_zone, SERVICE_ALTER_ZONE),
("enable_program", enable_program, SERVICE_ALTER_PROGRAM),
("enable_zone", enable_zone, SERVICE_ALTER_ZONE),
("pause_watering", pause_watering, SERVICE_PAUSE_WATERING),
("start_program", start_program, SERVICE_START_PROGRAM_SCHEMA),
("start_zone", start_zone, SERVICE_START_ZONE_SCHEMA),
("stop_all", stop_all, {}),
("stop_program", stop_program, SERVICE_STOP_PROGRAM_SCHEMA),
("stop_zone", stop_zone, SERVICE_STOP_ZONE_SCHEMA),
("unpause_watering", unpause_watering, {}),
]:
hass.services.async_register(DOMAIN, service, method, schema=schema)
hass.data[DOMAIN][DATA_LISTENER] = config_entry.add_update_listener(
async_reload_entry
)
return True
async def async_unload_entry(hass, config_entry):
"""Unload an OpenUV config entry."""
hass.data[DOMAIN][DATA_CLIENT].pop(config_entry.entry_id)
cancel_listener = hass.data[DOMAIN][DATA_LISTENER].pop(config_entry.entry_id)
cancel_listener()
tasks = [
hass.config_entries.async_forward_entry_unload(config_entry, component)
for component in ("binary_sensor", "sensor", "switch")
]
await asyncio.gather(*tasks)
return True
async def async_reload_entry(hass, config_entry):
"""Handle an options update."""
await hass.config_entries.async_reload(config_entry.entry_id)
class RainMachine:
"""Define a generic RainMachine object."""
def __init__(self, hass, config_entry, controller):
"""Initialize."""
self._async_cancel_time_interval_listener = None
self.config_entry = config_entry
self.controller = controller
self.data = {}
self.device_mac = controller.mac
self.hass = hass
self._api_category_count = {
DATA_PROVISION_SETTINGS: 0,
DATA_RESTRICTIONS_CURRENT: 0,
DATA_RESTRICTIONS_UNIVERSAL: 0,
}
self._api_category_locks = {
DATA_PROVISION_SETTINGS: asyncio.Lock(),
DATA_RESTRICTIONS_CURRENT: asyncio.Lock(),
DATA_RESTRICTIONS_UNIVERSAL: asyncio.Lock(),
}
async def _async_update_listener_action(self, now):
"""Define an async_track_time_interval action to update data."""
await self.async_update()
@callback
def async_deregister_sensor_api_interest(self, api_category):
"""Decrement the number of entities with data needs from an API category."""
# If this deregistration should leave us with no registration at all, remove the
# time interval:
if sum(self._api_category_count.values()) == 0:
if self._async_cancel_time_interval_listener:
self._async_cancel_time_interval_listener()
self._async_cancel_time_interval_listener = None
return
self._api_category_count[api_category] -= 1
async def async_fetch_from_api(self, api_category):
"""Execute the appropriate coroutine to fetch particular data from the API."""
if api_category == DATA_PROGRAMS:
data = await self.controller.programs.all(include_inactive=True)
elif api_category == DATA_PROVISION_SETTINGS:
data = await self.controller.provisioning.settings()
elif api_category == DATA_RESTRICTIONS_CURRENT:
data = await self.controller.restrictions.current()
elif api_category == DATA_RESTRICTIONS_UNIVERSAL:
data = await self.controller.restrictions.universal()
elif api_category == DATA_ZONES:
data = await self.controller.zones.all(include_inactive=True)
elif api_category == DATA_ZONES_DETAILS:
# This API call needs to be separate from the DATA_ZONES one above because,
# maddeningly, the DATA_ZONES_DETAILS API call doesn't include the current
# state of the zone:
data = await self.controller.zones.all(details=True, include_inactive=True)
self.data[api_category] = data
async def async_register_sensor_api_interest(self, api_category):
"""Increment the number of entities with data needs from an API category."""
# If this is the first registration we have, start a time interval:
if not self._async_cancel_time_interval_listener:
self._async_cancel_time_interval_listener = async_track_time_interval(
self.hass,
self._async_update_listener_action,
DEFAULT_SCAN_INTERVAL,
)
self._api_category_count[api_category] += 1
# If a sensor registers interest in a particular API call and the data doesn't
# exist for it yet, make the API call and grab the data:
async with self._api_category_locks[api_category]:
if api_category not in self.data:
await self.async_fetch_from_api(api_category)
async def async_update(self):
"""Update all RainMachine data."""
tasks = [self.async_update_programs_and_zones(), self.async_update_sensors()]
await asyncio.gather(*tasks)
async def async_update_sensors(self):
"""Update sensor/binary sensor data."""
_LOGGER.debug("Updating sensor data for RainMachine")
# Fetch an API category if there is at least one interested entity:
tasks = {}
for category, count in self._api_category_count.items():
if count == 0:
continue
tasks[category] = self.async_fetch_from_api(category)
results = await asyncio.gather(*tasks.values(), return_exceptions=True)
for api_category, result in zip(tasks, results):
if isinstance(result, RainMachineError):
_LOGGER.error(
"There was an error while updating %s: %s", api_category, result
)
continue
async_dispatcher_send(self.hass, SENSOR_UPDATE_TOPIC)
async def async_update_programs_and_zones(self):
"""Update program and zone data.
Program and zone updates always go together because of how linked they are:
programs affect zones and certain combinations of zones affect programs.
Note that this call does not take into account interested entities when making
the API calls; we make the reasonable assumption that switches will always be
enabled.
"""
_LOGGER.debug("Updating program and zone data for RainMachine")
tasks = {
DATA_PROGRAMS: self.async_fetch_from_api(DATA_PROGRAMS),
DATA_ZONES: self.async_fetch_from_api(DATA_ZONES),
DATA_ZONES_DETAILS: self.async_fetch_from_api(DATA_ZONES_DETAILS),
}
results = await asyncio.gather(*tasks.values(), return_exceptions=True)
for api_category, result in zip(tasks, results):
if isinstance(result, RainMachineError):
_LOGGER.error(
"There was an error while updating %s: %s", api_category, result
)
async_dispatcher_send(self.hass, PROGRAM_UPDATE_TOPIC)
async_dispatcher_send(self.hass, ZONE_UPDATE_TOPIC)
class RainMachineEntity(Entity):
"""Define a generic RainMachine entity."""
def __init__(self, rainmachine):
"""Initialize."""
self._attrs = {ATTR_ATTRIBUTION: DEFAULT_ATTRIBUTION}
self._device_class = None
self._name = None
self.rainmachine = rainmachine
@property
def device_class(self):
"""Return the device class."""
return self._device_class
@property
def device_info(self):
"""Return device registry information for this entity."""
return {
"identifiers": {(DOMAIN, self.rainmachine.controller.mac)},
"name": self.rainmachine.controller.name,
"manufacturer": "RainMachine",
"model": (
f"Version {self.rainmachine.controller.hardware_version} "
f"(API: {self.rainmachine.controller.api_version})"
),
"sw_version": self.rainmachine.controller.software_version,
}
@property
def device_state_attributes(self) -> dict:
"""Return the state attributes."""
return self._attrs
@property
def name(self) -> str:
"""Return the name of the entity."""
return self._name
@property
def should_poll(self):
"""Disable polling."""
return False
@callback
def _update_state(self):
"""Update the state."""
self.update_from_latest_data()
self.async_write_ha_state()
@callback
def update_from_latest_data(self):
"""Update the entity."""
raise NotImplementedError
|
from unittest import TestCase
import pandas as pd
from scattertext import whitespace_nlp, CorpusFromParsedDocuments
from scattertext.test.test_corpusFromPandas import get_docs_categories
class TestParsedCorpus(TestCase):
@classmethod
def setUp(cls):
cls.categories, cls.documents = get_docs_categories()
cls.parsed_docs = []
for doc in cls.documents:
cls.parsed_docs.append(whitespace_nlp(doc))
cls.df = pd.DataFrame({'category': cls.categories,
'author': ['a', 'a', 'c', 'c', 'c',
'c', 'd', 'd', 'e', 'e'],
'parsed': cls.parsed_docs,
'document_lengths': [len(doc) for doc in cls.documents]})
cls.corpus = CorpusFromParsedDocuments(cls.df, 'category', 'parsed').build()
def test_get_text(self):
self.assertEqual(len([x for x in self.corpus.get_texts()]),
len(self.documents))
self.assertEqual([str(x) for x in self.corpus.get_texts()][0],
"what art thou that usurp'st this time of night,")
def test_get_field(self):
self.assertEqual(list(self.corpus.get_field('author')),
list(self.df.author))
def test_get_parsed_docs(self):
doc = [x for x in self.corpus.get_parsed_docs()][0]
doc.sents
def test_get_unigram_corpus(self):
unicorp = self.corpus.get_unigram_corpus()
self.assertEqual(len([x for x in unicorp.get_texts()]),
len(self.documents))
self.assertEqual([str(x) for x in unicorp.get_texts()][0],
"what art thou that usurp'st this time of night,")
def test_search(self):
self.assertEqual(len(self.corpus.search('bigram')), 1)
df = self.corpus.search('bigram')
d = dict(df.iloc[0])
self.assertEqual(d['category'], '???')
self.assertEqual(d['document_lengths'], 44)
self.assertEqual(str(d['parsed']), 'speak up, speak up, this is a repeat bigram.')
self.assertEqual(len(self.corpus.search('the')), 2)
def test_term_group_freq_df(self):
'''
Returns
-------
return pd.DataFrame indexed on terms with columns giving how many attributes in convention_df
'''
group_df = self.corpus.term_group_freq_df('author')
self.assertEqual(set(group_df.index),
set(self.corpus._term_idx_store.values()))
self.assertEqual(dict(group_df.loc['of']), {'??? freq': 0, 'hamlet freq': 2, 'jay-z/r. kelly freq': 1})
self.assertEqual(dict(group_df.loc['speak up']),
{'??? freq': 1, 'hamlet freq': 0, 'jay-z/r. kelly freq': 1})
|
import pytest
from jks import KeyStore, TrustedCertEntry, PrivateKeyEntry
from lemur.tests.vectors import (
INTERNAL_CERTIFICATE_A_STR,
SAN_CERT_STR,
INTERMEDIATE_CERT_STR,
ROOTCA_CERT_STR,
SAN_CERT_KEY,
)
def test_export_truststore(app):
from lemur.plugins.base import plugins
p = plugins.get("java-truststore-jks")
options = [
{"name": "passphrase", "value": "hunter2"},
{"name": "alias", "value": "AzureDiamond"},
]
chain = INTERMEDIATE_CERT_STR + "\n" + ROOTCA_CERT_STR
ext, password, raw = p.export(SAN_CERT_STR, chain, SAN_CERT_KEY, options)
assert ext == "jks"
assert password == "hunter2"
assert isinstance(raw, bytes)
ks = KeyStore.loads(raw, "hunter2")
assert ks.store_type == "jks"
# JKS lower-cases alias strings
assert ks.entries.keys() == {
"azurediamond_cert",
"azurediamond_cert_1",
"azurediamond_cert_2",
}
assert isinstance(ks.entries["azurediamond_cert"], TrustedCertEntry)
def test_export_truststore_defaults(app):
from lemur.plugins.base import plugins
p = plugins.get("java-truststore-jks")
options = []
ext, password, raw = p.export(INTERNAL_CERTIFICATE_A_STR, "", "", options)
assert ext == "jks"
assert isinstance(password, str)
assert isinstance(raw, bytes)
ks = KeyStore.loads(raw, password)
assert ks.store_type == "jks"
# JKS lower-cases alias strings
assert ks.entries.keys() == {"acommonname_cert"}
assert isinstance(ks.entries["acommonname_cert"], TrustedCertEntry)
def test_export_keystore(app):
from lemur.plugins.base import plugins
p = plugins.get("java-keystore-jks")
options = [
{"name": "passphrase", "value": "hunter2"},
{"name": "alias", "value": "AzureDiamond"},
]
chain = INTERMEDIATE_CERT_STR + "\n" + ROOTCA_CERT_STR
with pytest.raises(Exception):
p.export(INTERNAL_CERTIFICATE_A_STR, chain, "", options)
ext, password, raw = p.export(SAN_CERT_STR, chain, SAN_CERT_KEY, options)
assert ext == "jks"
assert password == "hunter2"
assert isinstance(raw, bytes)
ks = KeyStore.loads(raw, password)
assert ks.store_type == "jks"
# JKS lower-cases alias strings
assert ks.entries.keys() == {"azurediamond"}
entry = ks.entries["azurediamond"]
assert isinstance(entry, PrivateKeyEntry)
assert len(entry.cert_chain) == 3 # Cert and chain were provided
def test_export_keystore_defaults(app):
from lemur.plugins.base import plugins
p = plugins.get("java-keystore-jks")
options = []
with pytest.raises(Exception):
p.export(INTERNAL_CERTIFICATE_A_STR, "", "", options)
ext, password, raw = p.export(SAN_CERT_STR, "", SAN_CERT_KEY, options)
assert ext == "jks"
assert isinstance(password, str)
assert isinstance(raw, bytes)
ks = KeyStore.loads(raw, password)
assert ks.store_type == "jks"
assert ks.entries.keys() == {"san.example.org"}
entry = ks.entries["san.example.org"]
assert isinstance(entry, PrivateKeyEntry)
assert len(entry.cert_chain) == 1 # Only cert itself, no chain was provided
|
from chainer.backends import cuda
class GradientScaling(object):
"""Optimizer/UpdateRule hook function for scaling gradient.
This hook function scales gradient by a constant value.
Args:
rate (float): Coefficient for scaling.
Attributes:
rate (float): Coefficient for scaling.
"""
name = 'GradientScaling'
call_for_each_param = True
def __init__(self, rate):
self.rate = rate
def __call__(self, rule, param):
g = param.grad
with cuda.get_device_from_array(g):
g *= self.rate
|
import numpy as np
def flip_point(point, size, y_flip=False, x_flip=False):
"""Modify points according to image flips.
Args:
point (~numpy.ndarray or list of arrays): See the table below.
size (tuple): A tuple of length 2. The height and the width
of the image, which is associated with the points.
y_flip (bool): Modify points according to a vertical flip of
an image.
x_flip (bool): Modify keypoipoints according to a horizontal flip of
an image.
.. csv-table::
:header: name, shape, dtype, format
:obj:`point`, ":math:`(R, K, 2)` or :math:`[(K, 2)]`", \
:obj:`float32`, ":math:`(y, x)`"
Returns:
~numpy.ndarray or list of arrays:
Points modified according to image flips.
"""
H, W = size
if isinstance(point, np.ndarray):
out_point = point.copy()
if y_flip:
out_point[:, :, 0] = H - out_point[:, :, 0]
if x_flip:
out_point[:, :, 1] = W - out_point[:, :, 1]
else:
out_point = []
for pnt in point:
pnt = pnt.copy()
if y_flip:
pnt[:, 0] = H - pnt[:, 0]
if x_flip:
pnt[:, 1] = W - pnt[:, 1]
out_point.append(pnt)
return out_point
|
from pathlib import Path
import pytest
from redbot.pytest.cog_manager import *
from redbot.core import cog_manager
@pytest.mark.skip
@pytest.mark.asyncio
async def test_ensure_cogs_in_paths(cog_mgr, default_dir):
cogs_dir = default_dir / "redbot" / "cogs"
assert cogs_dir in await cog_mgr.paths()
@pytest.mark.asyncio
async def test_install_path_set(cog_mgr: cog_manager.CogManager, tmpdir):
path = Path(str(tmpdir))
await cog_mgr.set_install_path(path)
assert await cog_mgr.install_path() == path
@pytest.mark.asyncio
async def test_install_path_set_bad(cog_mgr):
path = Path("something")
with pytest.raises(ValueError):
await cog_mgr.set_install_path(path)
@pytest.mark.asyncio
async def test_add_path(cog_mgr, tmpdir):
path = Path(str(tmpdir))
await cog_mgr.add_path(path)
assert path in await cog_mgr.paths()
@pytest.mark.asyncio
async def test_add_path_already_install_path(cog_mgr, tmpdir):
path = Path(str(tmpdir))
await cog_mgr.set_install_path(path)
with pytest.raises(ValueError):
await cog_mgr.add_path(path)
@pytest.mark.asyncio
async def test_remove_path(cog_mgr, tmpdir):
path = Path(str(tmpdir))
await cog_mgr.add_path(path)
await cog_mgr.remove_path(path)
assert path not in await cog_mgr.paths()
|
import asyncio
import logging
import os
import aiohttp
import async_timeout
from homeassistant.components.http import (
CONF_SERVER_HOST,
CONF_SERVER_PORT,
CONF_SSL_CERTIFICATE,
)
from homeassistant.const import HTTP_BAD_REQUEST, HTTP_OK, SERVER_PORT
from .const import X_HASSIO
_LOGGER = logging.getLogger(__name__)
class HassioAPIError(RuntimeError):
"""Return if a API trow a error."""
def _api_bool(funct):
"""Return a boolean."""
async def _wrapper(*argv, **kwargs):
"""Wrap function."""
try:
data = await funct(*argv, **kwargs)
return data["result"] == "ok"
except HassioAPIError:
return False
return _wrapper
def api_data(funct):
"""Return data of an api."""
async def _wrapper(*argv, **kwargs):
"""Wrap function."""
data = await funct(*argv, **kwargs)
if data["result"] == "ok":
return data["data"]
raise HassioAPIError(data["message"])
return _wrapper
class HassIO:
"""Small API wrapper for Hass.io."""
def __init__(self, loop, websession, ip):
"""Initialize Hass.io API."""
self.loop = loop
self.websession = websession
self._ip = ip
@_api_bool
def is_connected(self):
"""Return true if it connected to Hass.io supervisor.
This method return a coroutine.
"""
return self.send_command("/supervisor/ping", method="get", timeout=15)
@api_data
def get_info(self):
"""Return generic Supervisor information.
This method return a coroutine.
"""
return self.send_command("/info", method="get")
@api_data
def get_host_info(self):
"""Return data for Host.
This method return a coroutine.
"""
return self.send_command("/host/info", method="get")
@api_data
def get_core_info(self):
"""Return data for Home Asssistant Core.
This method returns a coroutine.
"""
return self.send_command("/core/info", method="get")
@api_data
def get_addon_info(self, addon):
"""Return data for a Add-on.
This method return a coroutine.
"""
return self.send_command(f"/addons/{addon}/info", method="get")
@api_data
def get_ingress_panels(self):
"""Return data for Add-on ingress panels.
This method return a coroutine.
"""
return self.send_command("/ingress/panels", method="get")
@_api_bool
def restart_homeassistant(self):
"""Restart Home-Assistant container.
This method return a coroutine.
"""
return self.send_command("/homeassistant/restart")
@_api_bool
def stop_homeassistant(self):
"""Stop Home-Assistant container.
This method return a coroutine.
"""
return self.send_command("/homeassistant/stop")
@api_data
def retrieve_discovery_messages(self):
"""Return all discovery data from Hass.io API.
This method return a coroutine.
"""
return self.send_command("/discovery", method="get")
@api_data
def get_discovery_message(self, uuid):
"""Return a single discovery data message.
This method return a coroutine.
"""
return self.send_command(f"/discovery/{uuid}", method="get")
@_api_bool
async def update_hass_api(self, http_config, refresh_token):
"""Update Home Assistant API data on Hass.io."""
port = http_config.get(CONF_SERVER_PORT) or SERVER_PORT
options = {
"ssl": CONF_SSL_CERTIFICATE in http_config,
"port": port,
"watchdog": True,
"refresh_token": refresh_token.token,
}
if http_config.get(CONF_SERVER_HOST) is not None:
options["watchdog"] = False
_LOGGER.warning(
"Found incompatible HTTP option 'server_host'. Watchdog feature disabled"
)
return await self.send_command("/homeassistant/options", payload=options)
@_api_bool
def update_hass_timezone(self, timezone):
"""Update Home-Assistant timezone data on Hass.io.
This method return a coroutine.
"""
return self.send_command("/supervisor/options", payload={"timezone": timezone})
async def send_command(self, command, method="post", payload=None, timeout=10):
"""Send API command to Hass.io.
This method is a coroutine.
"""
try:
with async_timeout.timeout(timeout):
request = await self.websession.request(
method,
f"http://{self._ip}{command}",
json=payload,
headers={X_HASSIO: os.environ.get("HASSIO_TOKEN", "")},
)
if request.status not in (HTTP_OK, HTTP_BAD_REQUEST):
_LOGGER.error("%s return code %d", command, request.status)
raise HassioAPIError()
answer = await request.json()
return answer
except asyncio.TimeoutError:
_LOGGER.error("Timeout on %s request", command)
except aiohttp.ClientError as err:
_LOGGER.error("Client error on %s request %s", command, err)
raise HassioAPIError()
|
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components.media_player import DOMAIN as MP_DOMAIN
from homeassistant.const import CONF_HOSTS
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv
from homeassistant.loader import bind_hass
from .const import DATA_SONOS, DOMAIN
CONF_ADVERTISE_ADDR = "advertise_addr"
CONF_INTERFACE_ADDR = "interface_addr"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
MP_DOMAIN: vol.Schema(
{
vol.Optional(CONF_ADVERTISE_ADDR): cv.string,
vol.Optional(CONF_INTERFACE_ADDR): cv.string,
vol.Optional(CONF_HOSTS): vol.All(
cv.ensure_list_csv, [cv.string]
),
}
)
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Set up the Sonos component."""
conf = config.get(DOMAIN)
hass.data[DOMAIN] = conf or {}
if conf is not None:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}
)
)
return True
async def async_setup_entry(hass, entry):
"""Set up Sonos from a config entry."""
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, MP_DOMAIN)
)
return True
@bind_hass
def get_coordinator_name(hass, entity_id):
"""Obtain the room/name of a device's coordinator.
Used by the Plex integration.
This function is safe to run inside the event loop.
"""
if DATA_SONOS not in hass.data:
raise HomeAssistantError("Sonos integration not set up")
device = next(
(x for x in hass.data[DATA_SONOS].entities if x.entity_id == entity_id), None
)
if device.is_coordinator:
return device.name
return device.coordinator.name
|
import json
import pytest
from mock import Mock
from mock import patch
from paasta_tools.oom_logger import capture_oom_events_from_stdin
from paasta_tools.oom_logger import log_to_clog
from paasta_tools.oom_logger import LogLine
from paasta_tools.oom_logger import main
from paasta_tools.oom_logger import send_sfx_event
@pytest.fixture
def sys_stdin():
return [
"some random line1\n",
"1500316299 dev37-devc [30533610.306528] apache2 invoked oom-killer: "
"gfp_mask=0x24000c0, order=0, oom_score_adj=0\n,"
"some random line2\n",
"1500316300 dev37-devc [30533610.306529] Task in "
"/docker/a687af92e281725daf5b4cda0b487f20d2055d2bb6814b76d0e39c18a52a4e79 "
"killed as a result of limit of "
"/docker/a687af92e281725daf5b4cda0b487f20d2055d2bb6814b76d0e39c18a52a4e79\n",
]
@pytest.fixture
def sys_stdin_kubernetes_burstable_qos():
return [
"some random line1\n",
"1500316299 dev37-devc [30533610.306528] apache2 invoked oom-killer: "
"gfp_mask=0x24000c0, order=0, oom_score_adj=0\n",
"some random line2\n",
"1500316300 dev37-devc [30533610.306529] Task in "
"/kubepods/burstable/podf91e9681-4741-4ef4-8f5a-182c5683df8b/"
"0e4a814eda03622476ff47871e6c397e5b8747af209b44f3b3e1c5289b0f9772 "
"killed as a result of limit of /kubepods/burstable/"
"podf91e9681-4741-4ef4-8f5a-182c5683df8b/"
"0e4a814eda03622476ff47871e6c397e5b8747af209b44f3b3e1c5289b0f9772\n",
]
@pytest.fixture
def sys_stdin_kubernetes_guaranteed_qos():
return [
"some random line1\n",
"1500316299 dev37-devc [30533610.306528] apache2 invoked oom-killer: "
"gfp_mask=0x24000c0, order=0, oom_score_adj=0\n",
"some random line2\n",
"1500316300 dev37-devc [30533610.306529] Task in "
"/kubepods/podf91e9681-4741-4ef4-8f5a-182c5683df8b/"
"0e4a814eda03622476ff47871e6c397e5b8747af209b44f3b3e1c5289b0f9772 "
"killed as a result of limit of /kubepods/"
"podf91e9681-4741-4ef4-8f5a-182c5683df8b/"
"0e4a814eda03622476ff47871e6c397e5b8747af209b44f3b3e1c5289b0f9772\n",
]
@pytest.fixture
def sys_stdin_kubernetes_besteffort_qos():
return [
"some random line1\n",
"1500316299 dev37-devc [30533610.306528] apache2 invoked oom-killer: "
"gfp_mask=0x24000c0, order=0, oom_score_adj=0\n",
"some random line2\n",
"1500316300 dev37-devc [30533610.306529] Task in "
"/kubepods/besteffort/podf91e9681-4741-4ef4-8f5a-182c5683df8b/"
"0e4a814eda03622476ff47871e6c397e5b8747af209b44f3b3e1c5289b0f9772 "
"killed as a result of limit of /kubepods/besteffort/"
"podf91e9681-4741-4ef4-8f5a-182c5683df8b/"
"0e4a814eda03622476ff47871e6c397e5b8747af209b44f3b3e1c5289b0f9772\n",
]
@pytest.fixture
def sys_stdin_process_name_with_slashes():
return [
"some random line1\n",
"1500316299 dev37-devc [30533610.306528] /nail/live/yelp invoked oom-killer: "
"gfp_mask=0x24000c0, order=0, oom_score_adj=0\n,"
"some random line2\n",
"1500316300 dev37-devc [30533610.306529] Task in "
"/docker/a687af92e281725daf5b4cda0b487f20d2055d2bb6814b76d0e39c18a52a4e79 "
"killed as a result of limit of "
"/docker/a687af92e281725daf5b4cda0b487f20d2055d2bb6814b76d0e39c18a52a4e79\n",
]
@pytest.fixture
def sys_stdin_process_name_with_spaces():
return [
"some random line1\n",
"1500316299 dev37-devc [30533610.306528] python batch/ke invoked oom-killer: "
"gfp_mask=0x24000c0, order=0, oom_score_adj=0\n,"
"some random line2\n",
"1500316300 dev37-devc [30533610.306529] Task in "
"/docker/a687af92e281725daf5b4cda0b487f20d2055d2bb6814b76d0e39c18a52a4e79 "
"killed as a result of limit of "
"/docker/a687af92e281725daf5b4cda0b487f20d2055d2bb6814b76d0e39c18a52a4e79\n",
]
@pytest.fixture
def sys_stdin_without_process_name():
return [
"some random line1\n",
"1500216300 dev37-devc [1140036.678311] Task in "
"/docker/e3a1057fdd485f5dffe48f1584e6f30c2bf6d30107d95518aea32bbb8bb29560 "
"killed as a result of limit of "
"/docker/e3a1057fdd485f5dffe48f1584e6f30c2bf6d30107d95518aea32bbb8bb29560\n,"
"some random line2\n",
"1500316300 dev37-devc [30533610.306529] Task in "
"/docker/a687af92e281725daf5b4cda0b487f20d2055d2bb6814b76d0e39c18a52a4e79 "
"killed as a result of limit of "
"/docker/a687af92e281725daf5b4cda0b487f20d2055d2bb6814b76d0e39c18a52a4e79\n",
]
@pytest.fixture
def docker_inspect():
return {
"Config": {
"Env": [
"PAASTA_SERVICE=fake_service",
"PAASTA_INSTANCE=fake_instance",
"PAASTA_RESOURCE_MEM=512",
"MESOS_CONTAINER_NAME=mesos-a04c14a6-83ea-4047-a802-92b850b1624e",
]
}
}
@pytest.fixture
def log_line():
return LogLine(
timestamp=1500316300,
hostname="dev37-devc",
container_id="a687af92e281",
cluster="fake_cluster",
service="fake_service",
instance="fake_instance",
process_name="apache2",
mesos_container_id="mesos-a04c14a6-83ea-4047-a802-92b850b1624e",
mem_limit="512",
)
@patch("paasta_tools.oom_logger.sys.stdin", autospec=True)
def test_capture_oom_events_from_stdin(mock_sys_stdin, sys_stdin):
mock_sys_stdin.readline.side_effect = sys_stdin
test_output = []
for a_tuple in capture_oom_events_from_stdin():
test_output.append(a_tuple)
assert test_output == [(1500316300, "dev37-devc", "a687af92e281", "apache2")]
@patch("paasta_tools.oom_logger.sys.stdin", autospec=True)
def test_capture_oom_events_from_stdin_kubernetes_qos(
mock_sys_stdin,
sys_stdin_kubernetes_besteffort_qos,
sys_stdin_kubernetes_burstable_qos,
sys_stdin_kubernetes_guaranteed_qos,
):
for qos in (
sys_stdin_kubernetes_besteffort_qos,
sys_stdin_kubernetes_burstable_qos,
sys_stdin_kubernetes_guaranteed_qos,
):
mock_sys_stdin.readline.side_effect = qos
test_output = []
for a_tuple in capture_oom_events_from_stdin():
test_output.append(a_tuple)
assert test_output == [(1500316300, "dev37-devc", "0e4a814eda03", "apache2")]
@patch("paasta_tools.oom_logger.sys.stdin", autospec=True)
def test_capture_oom_events_from_stdin_with_slashes(
mock_sys_stdin, sys_stdin_process_name_with_slashes
):
mock_sys_stdin.readline.side_effect = sys_stdin_process_name_with_slashes
test_output = []
for a_tuple in capture_oom_events_from_stdin():
test_output.append(a_tuple)
assert test_output == [
(1500316300, "dev37-devc", "a687af92e281", "/nail/live/yelp")
]
@patch("paasta_tools.oom_logger.sys.stdin", autospec=True)
def test_capture_oom_events_from_stdin_with_spaces(
mock_sys_stdin, sys_stdin_process_name_with_spaces
):
mock_sys_stdin.readline.side_effect = sys_stdin_process_name_with_spaces
test_output = []
for a_tuple in capture_oom_events_from_stdin():
test_output.append(a_tuple)
assert test_output == [
(1500316300, "dev37-devc", "a687af92e281", "python batch/ke")
]
@patch("paasta_tools.oom_logger.sys.stdin", autospec=True)
def test_capture_oom_events_from_stdin_without_process_name(
mock_sys_stdin, sys_stdin_without_process_name
):
mock_sys_stdin.readline.side_effect = sys_stdin_without_process_name
test_output = []
for a_tuple in capture_oom_events_from_stdin():
test_output.append(a_tuple)
assert test_output == [
(1500216300, "dev37-devc", "e3a1057fdd48", ""),
(1500316300, "dev37-devc", "a687af92e281", ""),
]
@patch("paasta_tools.oom_logger.clog", autospec=True)
def test_log_to_clog(mock_clog, log_line):
log_to_clog(log_line)
mock_clog.log_line.assert_called_once_with(
"tmp_paasta_oom_events",
json.dumps(
{
"timestamp": log_line.timestamp,
"hostname": log_line.hostname,
"container_id": log_line.container_id,
"cluster": log_line.cluster,
"service": log_line.service,
"instance": log_line.instance,
"process_name": log_line.process_name,
"mesos_container_id": log_line.mesos_container_id,
"mem_limit": log_line.mem_limit,
}
),
)
@patch("paasta_tools.oom_logger.get_instance_config", autospec=True)
def test_send_sfx_event(mock_get_instance_config):
service = "foo"
instance = "bar"
cluster = "baz"
# Try to use the autospec if it's available
from paasta_tools.oom_logger import yelp_meteorite
if yelp_meteorite is None:
autospec = None
else:
autospec = True
with patch(
"paasta_tools.oom_logger.yelp_meteorite", autospec=autospec
) as mock_meteorite:
send_sfx_event(service, instance, cluster)
expected_dimensions = {
"paasta_service": service,
"paasta_instance": instance,
"paasta_cluster": cluster,
"paasta_pool": mock_get_instance_config.return_value.get_pool.return_value,
}
mock_meteorite.events.emit_event.assert_called_once_with(
"paasta.service.oom_events", dimensions=expected_dimensions
)
mock_meteorite.create_counter.assert_called_once_with(
"paasta.service.oom_count", default_dimensions=expected_dimensions
)
assert mock_meteorite.create_counter.return_value.count.call_count == 1
@patch("paasta_tools.oom_logger.sys.stdin", autospec=True)
@patch("paasta_tools.oom_logger.clog", autospec=True)
@patch("paasta_tools.oom_logger.send_sfx_event", autospec=True)
@patch("paasta_tools.oom_logger.load_system_paasta_config", autospec=True)
@patch("paasta_tools.oom_logger.log_to_clog", autospec=True)
@patch("paasta_tools.oom_logger.log_to_paasta", autospec=True)
@patch("paasta_tools.oom_logger.get_docker_client", autospec=True)
def test_main(
mock_get_docker_client,
mock_log_to_paasta,
mock_log_to_clog,
mock_load_system_paasta_config,
mock_send_sfx_event,
mock_clog,
mock_sys_stdin,
sys_stdin,
docker_inspect,
log_line,
):
mock_sys_stdin.readline.side_effect = sys_stdin
docker_client = Mock(inspect_container=Mock(return_value=docker_inspect))
mock_get_docker_client.return_value = docker_client
mock_load_system_paasta_config.return_value.get_cluster.return_value = (
"fake_cluster"
)
main()
mock_log_to_paasta.assert_called_once_with(log_line)
mock_log_to_clog.assert_called_once_with(log_line)
mock_send_sfx_event.assert_called_once_with(
"fake_service", "fake_instance", "fake_cluster"
)
|
from django.forms import widgets
from django.core.exceptions import ValidationError
from django.template import engines
from django.template.loader import select_template
from django.utils.translation import gettext_lazy as _
from entangled.forms import EntangledModelFormMixin
from cms.plugin_pool import plugin_pool
from cmsplugin_cascade.bootstrap4.buttons import BootstrapButtonMixin, ButtonFormMixin
from cmsplugin_cascade.icon.forms import IconFormMixin
from cmsplugin_cascade.plugin_base import TransparentWrapper
from djng.forms import fields, NgModelFormMixin
from djng.styling.bootstrap3.forms import Bootstrap3Form
from shop.cascade.extensions import ShopExtendableMixin, LeftRightExtensionMixin
from shop.cascade.plugin_base import ShopPluginBase
from shop.conf import app_settings
class ShopOrderViewsFormMixin(EntangledModelFormMixin):
def clean(self):
cleaned_data = super().clean()
if self.instance.page and self.instance.page.application_urls != 'OrderApp':
msg = "This plugin only makes sense if used on a CMS page with an application of type 'OrderApp'."
raise ValidationError(msg)
return cleaned_data
class ShopOrderViewsPlugin(LeftRightExtensionMixin, TransparentWrapper, ShopPluginBase):
name = _("Order Views")
require_parent = True
parent_classes = ['BootstrapColumnPlugin']
allow_children = True
model_mixins = (ShopExtendableMixin,)
form = ShopOrderViewsFormMixin
cache = False
def get_render_template(self, context, instance, placeholder):
many = context.get('many')
if many is True:
# render Order List View
return select_template([
'{}/order/list.html'.format(app_settings.APP_LABEL),
'shop/order/list.html',
])
if many is False:
# render Order Detail View
return select_template([
'{}/order/detail.html'.format(app_settings.APP_LABEL),
'shop/order/detail.html',
])
# can happen, if this plugin is abused outside of an OrderView
alert_msg = '''<div class="alert alert-danger">
This {} plugin is used on a CMS page without an application of type "View Order".
</div>'''
return engines['django'].from_string(alert_msg.format(self.name))
plugin_pool.register_plugin(ShopOrderViewsPlugin)
class OrderButtonForm(ShopOrderViewsFormMixin, IconFormMixin, ButtonFormMixin):
require_icon = False
class OrderButtonBase(BootstrapButtonMixin, ShopPluginBase):
parent_classes = ['BootstrapColumnPlugin']
form = OrderButtonForm
@classmethod
def get_identifier(cls, instance):
return instance.glossary.get('button_content', '')
class ShopReorderButtonPlugin(OrderButtonBase):
name = _("Reorder Button")
def get_render_template(self, context, instance, placeholder):
template_names = [
'{}/order/reorder-button.html'.format(app_settings.APP_LABEL),
'shop/order/reorder-button.html',
]
return select_template(template_names)
plugin_pool.register_plugin(ShopReorderButtonPlugin)
class ShopCancelOrderButtonPlugin(OrderButtonBase):
name = _("Cancel Order Button")
def get_render_template(self, context, instance, placeholder):
template_names = [
'{}/order/cancel-button.html'.format(app_settings.APP_LABEL),
'shop/order/cancel-button.html',
]
return select_template(template_names)
plugin_pool.register_plugin(ShopCancelOrderButtonPlugin)
class AddendumForm(NgModelFormMixin, Bootstrap3Form):
annotation = fields.CharField(
label="",
widget=widgets.Textarea(attrs={'rows': 2}),
)
class ShopOrderAddendumFormMixin(OrderButtonForm):
show_history = fields.BooleanField(
label=_("Show History"),
initial=True,
required=False,
help_text=_("Show historical annotations."),
)
class Meta:
entangled_fields = {'glossary': ['show_history']}
class ShopOrderAddendumFormPlugin(OrderButtonBase):
name = _("Order Addendum Form")
form = ShopOrderAddendumFormMixin
def get_render_template(self, context, instance, placeholder):
template_names = [
'{}/order/addendum-form.html'.format(app_settings.APP_LABEL),
'shop/order/addendum-form.html',
]
return select_template(template_names)
def render(self, context, instance, placeholder):
context = self.super(ShopOrderAddendumFormPlugin, self).render(context, instance, placeholder)
context.update({
'addendum_form': AddendumForm(),
'show_history': instance.glossary.get('show_history', True),
})
return context
plugin_pool.register_plugin(ShopOrderAddendumFormPlugin)
|
from kombu import Connection, Queue
from kombu.mixins import ConsumerProducerMixin
rpc_queue = Queue('rpc_queue')
def fib(n):
if n == 0:
return 0
elif n == 1:
return 1
else:
return fib(n - 1) + fib(n - 2)
class Worker(ConsumerProducerMixin):
def __init__(self, connection):
self.connection = connection
def get_consumers(self, Consumer, channel):
return [Consumer(
queues=[rpc_queue],
on_message=self.on_request,
accept={'application/json'},
prefetch_count=1,
)]
def on_request(self, message):
n = message.payload['n']
print(f' [.] fib({n})')
result = fib(n)
self.producer.publish(
{'result': result},
exchange='', routing_key=message.properties['reply_to'],
correlation_id=message.properties['correlation_id'],
serializer='json',
retry=True,
)
message.ack()
def start_worker(broker_url):
connection = Connection(broker_url)
print(' [x] Awaiting RPC requests')
worker = Worker(connection)
worker.run()
if __name__ == '__main__':
try:
start_worker('pyamqp://')
except KeyboardInterrupt:
pass
|
from datetime import timedelta
import logging
from sml import SmlGetListResponse
from sml.asyncio import SmlProtocol
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_NAME
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_registry import async_get_registry
from homeassistant.helpers.typing import Optional
from homeassistant.util.dt import utcnow
_LOGGER = logging.getLogger(__name__)
DOMAIN = "edl21"
CONF_SERIAL_PORT = "serial_port"
ICON_POWER = "mdi:flash"
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
SIGNAL_EDL21_TELEGRAM = "edl21_telegram"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_SERIAL_PORT): cv.string,
vol.Optional(CONF_NAME, default=""): cv.string,
},
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the EDL21 sensor."""
hass.data[DOMAIN] = EDL21(hass, config, async_add_entities)
await hass.data[DOMAIN].connect()
class EDL21:
"""EDL21 handles telegrams sent by a compatible smart meter."""
# OBIS format: A-B:C.D.E*F
_OBIS_NAMES = {
# A=1: Electricity
# C=0: General purpose objects
"1-0:0.0.9*255": "Electricity ID",
# C=1: Active power +
# D=8: Time integral 1
# E=0: Total
"1-0:1.8.0*255": "Positive active energy total",
# E=1: Rate 1
"1-0:1.8.1*255": "Positive active energy in tariff T1",
# E=2: Rate 2
"1-0:1.8.2*255": "Positive active energy in tariff T2",
# D=17: Time integral 7
# E=0: Total
"1-0:1.17.0*255": "Last signed positive active energy total",
# C=2: Active power -
# D=8: Time integral 1
# E=0: Total
"1-0:2.8.0*255": "Negative active energy total",
# E=1: Rate 1
"1-0:2.8.1*255": "Negative active energy in tariff T1",
# E=2: Rate 2
"1-0:2.8.2*255": "Negative active energy in tariff T2",
# C=15: Active power absolute
# D=7: Instantaneous value
# E=0: Total
"1-0:15.7.0*255": "Absolute active instantaneous power",
# C=16: Active power sum
# D=7: Instantaneous value
# E=0: Total
"1-0:16.7.0*255": "Sum active instantaneous power",
# C=36: Active power L1
# D=7: Instantaneous value
# E=0: Total
"1-0:36.7.0*255": "L1 active instantaneous power",
# C=56: Active power L1
# D=7: Instantaneous value
# E=0: Total
"1-0:56.7.0*255": "L2 active instantaneous power",
# C=76: Active power L1
# D=7: Instantaneous value
# E=0: Total
"1-0:76.7.0*255": "L3 active instantaneous power",
}
_OBIS_BLACKLIST = {
# A=129: Manufacturer specific
"129-129:199.130.3*255", # Iskraemeco: Manufacturer
"129-129:199.130.5*255", # Iskraemeco: Public Key
}
def __init__(self, hass, config, async_add_entities) -> None:
"""Initialize an EDL21 object."""
self._registered_obis = set()
self._hass = hass
self._async_add_entities = async_add_entities
self._name = config[CONF_NAME]
self._proto = SmlProtocol(config[CONF_SERIAL_PORT])
self._proto.add_listener(self.event, ["SmlGetListResponse"])
async def connect(self):
"""Connect to an EDL21 reader."""
await self._proto.connect(self._hass.loop)
def event(self, message_body) -> None:
"""Handle events from pysml."""
assert isinstance(message_body, SmlGetListResponse)
electricity_id = None
for telegram in message_body.get("valList", []):
if telegram.get("objName") == "1-0:0.0.9*255":
electricity_id = telegram.get("value")
break
if electricity_id is None:
return
electricity_id = electricity_id.replace(" ", "")
new_entities = []
for telegram in message_body.get("valList", []):
obis = telegram.get("objName")
if not obis:
continue
if (electricity_id, obis) in self._registered_obis:
async_dispatcher_send(
self._hass, SIGNAL_EDL21_TELEGRAM, electricity_id, telegram
)
else:
name = self._OBIS_NAMES.get(obis)
if name:
if self._name:
name = f"{self._name}: {name}"
new_entities.append(
EDL21Entity(electricity_id, obis, name, telegram)
)
self._registered_obis.add((electricity_id, obis))
elif obis not in self._OBIS_BLACKLIST:
_LOGGER.warning(
"Unhandled sensor %s detected. Please report at "
'https://github.com/home-assistant/core/issues?q=is%%3Aissue+label%%3A"integration%%3A+edl21"+',
obis,
)
self._OBIS_BLACKLIST.add(obis)
if new_entities:
self._hass.loop.create_task(self.add_entities(new_entities))
async def add_entities(self, new_entities) -> None:
"""Migrate old unique IDs, then add entities to hass."""
registry = await async_get_registry(self._hass)
for entity in new_entities:
old_entity_id = registry.async_get_entity_id(
"sensor", DOMAIN, entity.old_unique_id
)
if old_entity_id is not None:
_LOGGER.debug(
"Migrating unique_id from [%s] to [%s]",
entity.old_unique_id,
entity.unique_id,
)
if registry.async_get_entity_id("sensor", DOMAIN, entity.unique_id):
registry.async_remove(old_entity_id)
else:
registry.async_update_entity(
old_entity_id, new_unique_id=entity.unique_id
)
self._async_add_entities(new_entities, update_before_add=True)
class EDL21Entity(Entity):
"""Entity reading values from EDL21 telegram."""
def __init__(self, electricity_id, obis, name, telegram):
"""Initialize an EDL21Entity."""
self._electricity_id = electricity_id
self._obis = obis
self._name = name
self._unique_id = f"{electricity_id}_{obis}"
self._telegram = telegram
self._min_time = MIN_TIME_BETWEEN_UPDATES
self._last_update = utcnow()
self._state_attrs = {
"status": "status",
"valTime": "val_time",
"scaler": "scaler",
"valueSignature": "value_signature",
}
self._async_remove_dispatcher = None
async def async_added_to_hass(self):
"""Run when entity about to be added to hass."""
@callback
def handle_telegram(electricity_id, telegram):
"""Update attributes from last received telegram for this object."""
if self._electricity_id != electricity_id:
return
if self._obis != telegram.get("objName"):
return
if self._telegram == telegram:
return
now = utcnow()
if now - self._last_update < self._min_time:
return
self._telegram = telegram
self._last_update = now
self.async_write_ha_state()
self._async_remove_dispatcher = async_dispatcher_connect(
self.hass, SIGNAL_EDL21_TELEGRAM, handle_telegram
)
async def async_will_remove_from_hass(self):
"""Run when entity will be removed from hass."""
if self._async_remove_dispatcher:
self._async_remove_dispatcher()
@property
def should_poll(self) -> bool:
"""Do not poll."""
return False
@property
def unique_id(self) -> str:
"""Return a unique ID."""
return self._unique_id
@property
def old_unique_id(self) -> str:
"""Return a less unique ID as used in the first version of edl21."""
return self._obis
@property
def name(self) -> Optional[str]:
"""Return a name."""
return self._name
@property
def state(self) -> str:
"""Return the value of the last received telegram."""
return self._telegram.get("value")
@property
def device_state_attributes(self):
"""Enumerate supported attributes."""
return {
self._state_attrs[k]: v
for k, v in self._telegram.items()
if k in self._state_attrs
}
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self._telegram.get("unit")
@property
def icon(self):
"""Return an icon."""
return ICON_POWER
|
import shlex
import sys
import subprocess as sp
import tempfile
import textwrap
import venv
from pathlib import Path
from typing import Sequence, Iterable, Dict
import packaging.requirements
import setuptools.config
THIS_DIRECTORY = Path(__file__).parent
REQUIREMENTS_INI_PTH: Path = THIS_DIRECTORY / "primary_deps.ini"
PIP_INSTALL_ARGS = ("install", "--upgrade")
PIP_FREEZE_ARGS = ("freeze", "--no-color")
def main() -> int:
if not REQUIREMENTS_INI_PTH.is_file():
print("No primary_deps.ini found in the same directory as bumpdeps.py", file=sys.stderr)
return 1
primary_reqs_cfg = setuptools.config.read_configuration(str(REQUIREMENTS_INI_PTH))
print("[options]")
print("install_requires =")
core_primary_deps = primary_reqs_cfg["options"]["install_requires"]
full_core_reqs = get_all_reqs(core_primary_deps)
print(textwrap.indent("\n".join(map(str, full_core_reqs)), " " * 4))
print()
print("[options.extras_require]")
for extra, extra_primary_deps in primary_reqs_cfg["options"]["extras_require"].items():
print(extra, "=")
full_extra_reqs = get_all_reqs(
extra_primary_deps, all_core_deps={r.name.lower(): r for r in full_core_reqs}
)
print(textwrap.indent("\n".join(map(str, full_extra_reqs)), " " * 4))
return 0
def get_all_reqs(
primary_deps: Iterable[str], all_core_deps: Dict[str, packaging.requirements.Requirement] = ()
) -> Sequence[packaging.requirements.Requirement]:
reqs_dict = {r.name.lower(): r for r in map(packaging.requirements.Requirement, primary_deps)}
with tempfile.TemporaryDirectory() as tmpdir:
venv.create(tmpdir, system_site_packages=False, clear=True, with_pip=True)
tmpdir_pth = Path(tmpdir)
pip_exe_pth = tmpdir_pth / "bin" / "pip"
# Upgrade pip to latest version
sp.run((pip_exe_pth, *PIP_INSTALL_ARGS, "pip"), stdout=sp.DEVNULL, check=True)
# Install the primary dependencies
sp.run(
(pip_exe_pth, *PIP_INSTALL_ARGS, *map(str, reqs_dict.values())),
stdout=sp.DEVNULL,
check=True,
)
# Get pinned primary+secondary dependencies from pip freeze
proc = sp.run(
(pip_exe_pth, *PIP_FREEZE_ARGS), stdout=sp.PIPE, check=True, encoding="utf-8"
)
# Return Requirement objects
ret = []
for req_obj in map(packaging.requirements.Requirement, proc.stdout.strip().split("\n")):
dep_name = req_obj.name.lower()
# Don't include core dependencies if these are extra dependencies
if dep_name in all_core_deps:
if req_obj.specifier != all_core_deps[dep_name].specifier:
print(
f"[WARNING] {dep_name} is listed as both a core requirement and an extra "
f"requirement, and it's possible that their versions conflict!",
file=sys.stderr,
)
continue
# Preserve environment markers
if dep_name in reqs_dict:
req_obj.marker = reqs_dict[dep_name].marker
ret.append(req_obj)
return ret
if __name__ == "__main__":
try:
exit_code = main()
except sp.CalledProcessError as exc:
cmd = " ".join(map(lambda c: shlex.quote(str(c)), exc.cmd))
print(
f"The following command failed with code {exc.returncode}:\n ", cmd, file=sys.stderr
)
exit_code = 1
sys.exit(exit_code)
|
import os
from typing import List
from django.db import transaction
from lxml import html
from weblate.addons.events import EVENT_DAILY
from weblate.addons.models import Addon
from weblate.lang.models import Language
from weblate.trans.models import Component, Project
from weblate.utils.celery import app
from weblate.utils.hash import calculate_checksum
from weblate.utils.requests import request
@app.task(trail=False)
def cdn_parse_html(files: str, selector: str, component_id: int):
component = Component.objects.get(pk=component_id)
source_translation = component.source_translation
source_units = set(source_translation.unit_set.values_list("source", flat=True))
units = []
errors = []
for filename in files.splitlines():
filename = filename.strip()
try:
if filename.startswith("http://") or filename.startswith("https://"):
with request("get", filename) as handle:
content = handle.read()
else:
with open(os.path.join(component.full_path, filename)) as handle:
content = handle.read()
except OSError as error:
errors.append({"filename": filename, "error": str(error)})
continue
document = html.fromstring(content)
for element in document.cssselect(selector):
text = element.text
if (
element.getchildren()
or not text
or text in source_units
or text in units
):
continue
units.append(text)
# Actually create units
if units:
source_translation.add_units(
None,
{calculate_checksum(text): text for text in units},
)
if errors:
component.add_alert("CDNAddonError", occurrences=errors)
else:
component.delete_alert("CDNAddonError")
@app.task(trail=False)
def language_consistency(project_id: int, language_ids: List[int]):
project = Project.objects.get(pk=project_id)
languages = Language.objects.filter(id__in=language_ids)
for component in project.component_set.iterator():
missing = languages.exclude(translation__component=component)
for language in missing:
component.add_new_language(language, None, send_signal=False)
@app.task(trail=False)
def daily_addons():
for addon in Addon.objects.filter(event__event=EVENT_DAILY).prefetch_related(
"component"
):
with transaction.atomic():
addon.addon.daily(addon.component)
@app.on_after_finalize.connect
def setup_periodic_tasks(sender, **kwargs):
sender.add_periodic_task(3600 * 24, daily_addons.s(), name="daily-addons")
|
from typing import List
from pycfdns import CFRecord
from homeassistant.components.cloudflare.const import CONF_RECORDS, DOMAIN
from homeassistant.const import CONF_API_TOKEN, CONF_ZONE
from tests.async_mock import AsyncMock, patch
from tests.common import MockConfigEntry
ENTRY_CONFIG = {
CONF_API_TOKEN: "mock-api-token",
CONF_ZONE: "mock.com",
CONF_RECORDS: ["ha.mock.com", "homeassistant.mock.com"],
}
ENTRY_OPTIONS = {}
USER_INPUT = {
CONF_API_TOKEN: "mock-api-token",
}
USER_INPUT_ZONE = {CONF_ZONE: "mock.com"}
USER_INPUT_RECORDS = {CONF_RECORDS: ["ha.mock.com", "homeassistant.mock.com"]}
MOCK_ZONE = "mock.com"
MOCK_ZONE_ID = "mock-zone-id"
MOCK_ZONE_RECORDS = [
{
"id": "zone-record-id",
"type": "A",
"name": "ha.mock.com",
"proxied": True,
"content": "127.0.0.1",
},
{
"id": "zone-record-id-2",
"type": "A",
"name": "homeassistant.mock.com",
"proxied": True,
"content": "127.0.0.1",
},
{
"id": "zone-record-id-3",
"type": "A",
"name": "mock.com",
"proxied": True,
"content": "127.0.0.1",
},
]
async def init_integration(
hass,
*,
data: dict = ENTRY_CONFIG,
options: dict = ENTRY_OPTIONS,
) -> MockConfigEntry:
"""Set up the Cloudflare integration in Home Assistant."""
entry = MockConfigEntry(domain=DOMAIN, data=data, options=options)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry
def _get_mock_cfupdate(
zone: str = MOCK_ZONE,
zone_id: str = MOCK_ZONE_ID,
records: List = MOCK_ZONE_RECORDS,
):
client = AsyncMock()
zone_records = [record["name"] for record in records]
cf_records = [CFRecord(record) for record in records]
client.get_zones = AsyncMock(return_value=[zone])
client.get_zone_records = AsyncMock(return_value=zone_records)
client.get_record_info = AsyncMock(return_value=cf_records)
client.get_zone_id = AsyncMock(return_value=zone_id)
client.update_records = AsyncMock(return_value=None)
return client
def _patch_async_setup(return_value=True):
return patch(
"homeassistant.components.cloudflare.async_setup",
return_value=return_value,
)
def _patch_async_setup_entry(return_value=True):
return patch(
"homeassistant.components.cloudflare.async_setup_entry",
return_value=return_value,
)
|
from homeassistant.components.group import GroupIntegrationRegistry
from homeassistant.const import STATE_CLOSED, STATE_OPEN
from homeassistant.core import callback
from homeassistant.helpers.typing import HomeAssistantType
@callback
def async_describe_on_off_states(
hass: HomeAssistantType, registry: GroupIntegrationRegistry
) -> None:
"""Describe group on off states."""
# On means open, Off means closed
registry.on_off_states({STATE_OPEN}, STATE_CLOSED)
|
from __future__ import annotations
import asyncio
import json
import logging
from asyncio import as_completed, Semaphore
from asyncio.futures import isfuture
from itertools import chain
from pathlib import Path
from typing import (
Any,
AsyncIterator,
AsyncIterable,
Awaitable,
Callable,
Iterable,
Iterator,
List,
Optional,
Tuple,
TypeVar,
Union,
Generator,
Coroutine,
)
from discord.utils import maybe_coroutine
__all__ = (
"bounded_gather",
"bounded_gather_iter",
"deduplicate_iterables",
"AsyncIter",
"get_end_user_data_statement",
"get_end_user_data_statement_or_raise",
)
log = logging.getLogger("red.core.utils")
_T = TypeVar("_T")
_S = TypeVar("_S")
# Benchmarked to be the fastest method.
def deduplicate_iterables(*iterables):
"""
Returns a list of all unique items in ``iterables``, in the order they
were first encountered.
"""
# dict insertion order is guaranteed to be preserved in 3.6+
return list(dict.fromkeys(chain.from_iterable(iterables)))
# https://github.com/PyCQA/pylint/issues/2717
class AsyncFilter(AsyncIterator[_T], Awaitable[List[_T]]): # pylint: disable=duplicate-bases
"""Class returned by `async_filter`. See that function for details.
We don't recommend instantiating this class directly.
"""
def __init__(
self,
func: Callable[[_T], Union[bool, Awaitable[bool]]],
iterable: Union[AsyncIterable[_T], Iterable[_T]],
) -> None:
self.__func: Callable[[_T], Union[bool, Awaitable[bool]]] = func
self.__iterable: Union[AsyncIterable[_T], Iterable[_T]] = iterable
# We assign the generator strategy based on the arguments' types
if isinstance(iterable, AsyncIterable):
if asyncio.iscoroutinefunction(func):
self.__generator_instance = self.__async_generator_async_pred()
else:
self.__generator_instance = self.__async_generator_sync_pred()
elif asyncio.iscoroutinefunction(func):
self.__generator_instance = self.__sync_generator_async_pred()
else:
raise TypeError("Must be either an async predicate, an async iterable, or both.")
async def __sync_generator_async_pred(self) -> AsyncIterator[_T]:
for item in self.__iterable:
if await self.__func(item):
yield item
async def __async_generator_sync_pred(self) -> AsyncIterator[_T]:
async for item in self.__iterable:
if self.__func(item):
yield item
async def __async_generator_async_pred(self) -> AsyncIterator[_T]:
async for item in self.__iterable:
if await self.__func(item):
yield item
async def __flatten(self) -> List[_T]:
return [item async for item in self]
def __aiter__(self):
return self
def __await__(self):
# Simply return the generator filled into a list
return self.__flatten().__await__()
def __anext__(self) -> Awaitable[_T]:
# This will use the generator strategy set in __init__
return self.__generator_instance.__anext__()
def async_filter(
func: Callable[[_T], Union[bool, Awaitable[bool]]],
iterable: Union[AsyncIterable[_T], Iterable[_T]],
) -> AsyncFilter[_T]:
"""Filter an (optionally async) iterable with an (optionally async) predicate.
At least one of the arguments must be async.
Parameters
----------
func : Callable[[T], Union[bool, Awaitable[bool]]]
A function or coroutine function which takes one item of ``iterable``
as an argument, and returns ``True`` or ``False``.
iterable : Union[AsyncIterable[_T], Iterable[_T]]
An iterable or async iterable which is to be filtered.
Raises
------
TypeError
If neither of the arguments are async.
Returns
-------
AsyncFilter[T]
An object which can either be awaited to yield a list of the filtered
items, or can also act as an async iterator to yield items one by one.
"""
return AsyncFilter(func, iterable)
async def async_enumerate(
async_iterable: AsyncIterable[_T], start: int = 0
) -> AsyncIterator[Tuple[int, _T]]:
"""Async iterable version of `enumerate`.
Parameters
----------
async_iterable : AsyncIterable[T]
The iterable to enumerate.
start : int
The index to start from. Defaults to 0.
Returns
-------
AsyncIterator[Tuple[int, T]]
An async iterator of tuples in the form of ``(index, item)``.
"""
async for item in async_iterable:
yield start, item
start += 1
async def _sem_wrapper(sem, task):
async with sem:
return await task
def bounded_gather_iter(
*coros_or_futures, limit: int = 4, semaphore: Optional[Semaphore] = None
) -> Iterator[Awaitable[Any]]:
"""
An iterator that returns tasks as they are ready, but limits the
number of tasks running at a time.
Parameters
----------
*coros_or_futures
The awaitables to run in a bounded concurrent fashion.
limit : Optional[`int`]
The maximum number of concurrent tasks. Used when no ``semaphore``
is passed.
semaphore : Optional[:class:`asyncio.Semaphore`]
The semaphore to use for bounding tasks. If `None`, create one
using ``loop`` and ``limit``.
Raises
------
TypeError
When invalid parameters are passed
"""
loop = asyncio.get_running_loop()
if semaphore is None:
if not isinstance(limit, int) or limit <= 0:
raise TypeError("limit must be an int > 0")
semaphore = Semaphore(limit)
pending = []
for cof in coros_or_futures:
if isfuture(cof) and cof._loop is not loop:
raise ValueError("futures are tied to different event loops")
cof = _sem_wrapper(semaphore, cof)
pending.append(cof)
return as_completed(pending)
def bounded_gather(
*coros_or_futures,
return_exceptions: bool = False,
limit: int = 4,
semaphore: Optional[Semaphore] = None,
) -> Awaitable[List[Any]]:
"""
A semaphore-bounded wrapper to :meth:`asyncio.gather`.
Parameters
----------
*coros_or_futures
The awaitables to run in a bounded concurrent fashion.
return_exceptions : bool
If true, gather exceptions in the result list instead of raising.
limit : Optional[`int`]
The maximum number of concurrent tasks. Used when no ``semaphore``
is passed.
semaphore : Optional[:class:`asyncio.Semaphore`]
The semaphore to use for bounding tasks. If `None`, create one
using ``loop`` and ``limit``.
Raises
------
TypeError
When invalid parameters are passed
"""
loop = asyncio.get_running_loop()
if semaphore is None:
if not isinstance(limit, int) or limit <= 0:
raise TypeError("limit must be an int > 0")
semaphore = Semaphore(limit)
tasks = (_sem_wrapper(semaphore, task) for task in coros_or_futures)
return asyncio.gather(*tasks, return_exceptions=return_exceptions)
class AsyncIter(AsyncIterator[_T], Awaitable[List[_T]]): # pylint: disable=duplicate-bases
"""Asynchronous iterator yielding items from ``iterable``
that sleeps for ``delay`` seconds every ``steps`` items.
Parameters
----------
iterable: Iterable
The iterable to make async.
delay: Union[float, int]
The amount of time in seconds to sleep.
steps: int
The number of iterations between sleeps.
Raises
------
ValueError
When ``steps`` is lower than 1.
Examples
--------
>>> from redbot.core.utils import AsyncIter
>>> async for value in AsyncIter(range(3)):
... print(value)
0
1
2
"""
def __init__(
self, iterable: Iterable[_T], delay: Union[float, int] = 0, steps: int = 1
) -> None:
if steps < 1:
raise ValueError("Steps must be higher than or equals to 1")
self._delay = delay
self._iterator = iter(iterable)
self._i = 0
self._steps = steps
self._map = None
def __aiter__(self) -> AsyncIter[_T]:
return self
async def __anext__(self) -> _T:
try:
item = next(self._iterator)
except StopIteration:
raise StopAsyncIteration
if self._i == self._steps:
self._i = 0
await asyncio.sleep(self._delay)
self._i += 1
return await maybe_coroutine(self._map, item) if self._map is not None else item
def __await__(self) -> Generator[Any, None, List[_T]]:
"""Returns a list of the iterable.
Examples
--------
>>> from redbot.core.utils import AsyncIter
>>> iterator = AsyncIter(range(5))
>>> await iterator
[0, 1, 2, 3, 4]
"""
return self.flatten().__await__()
async def next(self, default: Any = ...) -> _T:
"""Returns a next entry of the iterable.
Parameters
----------
default: Optional[Any]
The value to return if the iterator is exhausted.
Raises
------
StopAsyncIteration
When ``default`` is not specified and the iterator has been exhausted.
Examples
--------
>>> from redbot.core.utils import AsyncIter
>>> iterator = AsyncIter(range(5))
>>> await iterator.next()
0
>>> await iterator.next()
1
"""
try:
value = await self.__anext__()
except StopAsyncIteration:
if default is ...:
raise
value = default
return value
async def flatten(self) -> List[_T]:
"""Returns a list of the iterable.
Examples
--------
>>> from redbot.core.utils import AsyncIter
>>> iterator = AsyncIter(range(5))
>>> await iterator.flatten()
[0, 1, 2, 3, 4]
"""
return [item async for item in self]
def filter(self, function: Callable[[_T], Union[bool, Awaitable[bool]]]) -> AsyncFilter[_T]:
"""Filter the iterable with an (optionally async) predicate.
Parameters
----------
function: Callable[[T], Union[bool, Awaitable[bool]]]
A function or coroutine function which takes one item of ``iterable``
as an argument, and returns ``True`` or ``False``.
Returns
-------
AsyncFilter[T]
An object which can either be awaited to yield a list of the filtered
items, or can also act as an async iterator to yield items one by one.
Examples
--------
>>> from redbot.core.utils import AsyncIter
>>> def predicate(value):
... return value <= 5
>>> iterator = AsyncIter([1, 10, 5, 100])
>>> async for i in iterator.filter(predicate):
... print(i)
1
5
>>> from redbot.core.utils import AsyncIter
>>> def predicate(value):
... return value <= 5
>>> iterator = AsyncIter([1, 10, 5, 100])
>>> await iterator.filter(predicate)
[1, 5]
"""
return async_filter(function, self)
def enumerate(self, start: int = 0) -> AsyncIterator[Tuple[int, _T]]:
"""Async iterable version of `enumerate`.
Parameters
----------
start: int
The index to start from. Defaults to 0.
Returns
-------
AsyncIterator[Tuple[int, T]]
An async iterator of tuples in the form of ``(index, item)``.
Examples
--------
>>> from redbot.core.utils import AsyncIter
>>> iterator = AsyncIter(['one', 'two', 'three'])
>>> async for i in iterator.enumerate(start=10):
... print(i)
(10, 'one')
(11, 'two')
(12, 'three')
"""
return async_enumerate(self, start)
async def without_duplicates(self) -> AsyncIterator[_T]:
"""
Iterates while omitting duplicated entries.
Examples
--------
>>> from redbot.core.utils import AsyncIter
>>> iterator = AsyncIter([1,2,3,3,4,4,5])
>>> async for i in iterator.without_duplicates():
... print(i)
1
2
3
4
5
"""
_temp = set()
async for item in self:
if item not in _temp:
yield item
_temp.add(item)
del _temp
async def find(
self,
predicate: Callable[[_T], Union[bool, Awaitable[bool]]],
default: Optional[Any] = None,
) -> AsyncIterator[_T]:
"""Calls ``predicate`` over items in iterable and return first value to match.
Parameters
----------
predicate: Union[Callable, Coroutine]
A function that returns a boolean-like result. The predicate provided can be a coroutine.
default: Optional[Any]
The value to return if there are no matches.
Raises
------
TypeError
When ``predicate`` is not a callable.
Examples
--------
>>> from redbot.core.utils import AsyncIter
>>> await AsyncIter(range(3)).find(lambda x: x == 1)
1
"""
while True:
try:
elem = await self.__anext__()
except StopAsyncIteration:
return default
ret = await maybe_coroutine(predicate, elem)
if ret:
return elem
def map(self, func: Callable[[_T], Union[_S, Awaitable[_S]]]) -> AsyncIter[_S]:
"""Set the mapping callable for this instance of `AsyncIter`.
.. important::
This should be called after AsyncIter initialization and before any other of its methods.
Parameters
----------
func: Union[Callable, Coroutine]
The function to map values to. The function provided can be a coroutine.
Raises
------
TypeError
When ``func`` is not a callable.
Examples
--------
>>> from redbot.core.utils import AsyncIter
>>> async for value in AsyncIter(range(3)).map(bool):
... print(value)
False
True
True
"""
if not callable(func):
raise TypeError("Mapping must be a callable.")
self._map = func
return self
def get_end_user_data_statement(file: Union[Path, str]) -> Optional[str]:
"""
This function attempts to get the ``end_user_data_statement`` key from cog's ``info.json``.
This will log the reason if ``None`` is returned.
Parameters
----------
file: Union[pathlib.Path, str]
The ``__file__`` variable for the cog's ``__init__.py`` file.
Returns
-------
Optional[str]
The end user data statement found in the info.json
or ``None`` if there was an issue finding one.
Examples
--------
>>> # In cog's `__init__.py`
>>> from redbot.core.utils import get_end_user_data_statement
>>> __red_end_user_data_statement__ = get_end_user_data_statement(__file__)
>>> def setup(bot):
... ...
"""
try:
file = Path(file).parent.absolute()
info_json = file / "info.json"
statement = get_end_user_data_statement_or_raise(info_json)
except FileNotFoundError:
log.critical("'%s' does not exist.", str(info_json))
except KeyError:
log.critical("'%s' is missing an entry for 'end_user_data_statement'", str(info_json))
except json.JSONDecodeError as exc:
log.critical("'%s' is not a valid JSON file.", str(info_json), exc_info=exc)
except UnicodeError as exc:
log.critical("'%s' has a bad encoding.", str(info_json), exc_info=exc)
except Exception as exc:
log.critical(
"There was an error when trying to load the end user data statement from '%s'.",
str(info_json),
exc_info=exc,
)
else:
return statement
return None
def get_end_user_data_statement_or_raise(file: Union[Path, str]) -> str:
"""
This function attempts to get the ``end_user_data_statement`` key from cog's ``info.json``.
Parameters
----------
file: Union[pathlib.Path, str]
The ``__file__`` variable for the cog's ``__init__.py`` file.
Returns
-------
str
The end user data statement found in the info.json.
Raises
------
FileNotFoundError
When ``info.json`` does not exist.
KeyError
When ``info.json`` does not have the ``end_user_data_statement`` key.
json.JSONDecodeError
When ``info.json`` can't be decoded with ``json.load()``
UnicodeError
When ``info.json`` can't be decoded due to bad encoding.
Exception
Any other exception raised from ``pathlib`` and ``json`` modules
when attempting to parse the ``info.json`` for the ``end_user_data_statement`` key.
"""
file = Path(file).parent.absolute()
info_json = file / "info.json"
with info_json.open(encoding="utf-8") as fp:
return json.load(fp)["end_user_data_statement"]
|
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker.linux_benchmarks import netperf_benchmark
FLAGS = flags.FLAGS
# We set the default to 128KB (131072 bytes) to override the Linux default
# of 16K so that we can achieve the "link rate".
flags.DEFINE_integer('container_netperf_tcp_stream_send_size_in_bytes', 131072,
'Send size to use for TCP_STREAM tests (netperf -m flag)')
BENCHMARK_NAME = 'container_netperf'
BENCHMARK_CONFIG = """
container_netperf:
description: Run netperf between containers.
container_specs:
netperf:
image: netperf
cpus: 2
memory: 4GiB
container_registry: {}
container_cluster:
vm_count: 2
vm_spec:
AWS:
zone: us-east-1a
machine_type: c5.xlarge
Azure:
zone: westus
machine_type: Standard_D3_v2
GCP:
machine_type: n1-standard-4
zone: us-west1-a
"""
def GetConfig(user_config):
return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
def Prepare(benchmark_spec):
"""Start the netserver container.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
cluster = benchmark_spec.container_cluster
cluster.DeployContainer('netperf', benchmark_spec.container_specs['netperf'])
def Run(benchmark_spec):
"""Run netperf TCP_STREAM between containers.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample objects.
"""
samples = []
cluster = benchmark_spec.container_cluster
container_0 = cluster.containers['netperf'][0]
spec = benchmark_spec.container_specs['netperf']
spec.command = ['netperf',
'-t', 'TCP_STREAM',
'-H', container_0.ip_address,
'-l', '100',
'--',
'-m', FLAGS.container_netperf_tcp_stream_send_size_in_bytes,
'-o', netperf_benchmark.OUTPUT_SELECTOR]
cluster.DeployContainer('netperf', benchmark_spec.container_specs['netperf'])
container_1 = cluster.containers['netperf'][1]
container_1.WaitForExit()
throughput_sample, _, _ = netperf_benchmark.ParseNetperfOutput(
container_1.GetLogs(), {}, 'TCP_STREAM', False)
samples.append(throughput_sample)
return samples
def Cleanup(unused_benchmark_spec):
"""Cleanup netperf.
Args:
unused_benchmark_spec: The benchmark specification. Contains all data that
is required to run the benchmark.
"""
pass
|
import logging
import voluptuous as vol
from homeassistant.components.cover import (
ATTR_POSITION,
ATTR_TILT_POSITION,
DEVICE_CLASSES_SCHEMA,
ENTITY_ID_FORMAT,
PLATFORM_SCHEMA,
SUPPORT_CLOSE,
SUPPORT_CLOSE_TILT,
SUPPORT_OPEN,
SUPPORT_OPEN_TILT,
SUPPORT_SET_POSITION,
SUPPORT_SET_TILT_POSITION,
SUPPORT_STOP,
SUPPORT_STOP_TILT,
CoverEntity,
)
from homeassistant.const import (
CONF_DEVICE_CLASS,
CONF_ENTITY_ID,
CONF_ENTITY_PICTURE_TEMPLATE,
CONF_FRIENDLY_NAME,
CONF_ICON_TEMPLATE,
CONF_OPTIMISTIC,
CONF_UNIQUE_ID,
CONF_VALUE_TEMPLATE,
STATE_CLOSED,
STATE_CLOSING,
STATE_OPEN,
STATE_OPENING,
)
from homeassistant.core import callback
from homeassistant.exceptions import TemplateError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import async_generate_entity_id
from homeassistant.helpers.reload import async_setup_reload_service
from homeassistant.helpers.script import Script
from .const import CONF_AVAILABILITY_TEMPLATE, DOMAIN, PLATFORMS
from .template_entity import TemplateEntity
_LOGGER = logging.getLogger(__name__)
_VALID_STATES = [
STATE_OPEN,
STATE_OPENING,
STATE_CLOSED,
STATE_CLOSING,
"true",
"false",
]
CONF_COVERS = "covers"
CONF_POSITION_TEMPLATE = "position_template"
CONF_TILT_TEMPLATE = "tilt_template"
OPEN_ACTION = "open_cover"
CLOSE_ACTION = "close_cover"
STOP_ACTION = "stop_cover"
POSITION_ACTION = "set_cover_position"
TILT_ACTION = "set_cover_tilt_position"
CONF_TILT_OPTIMISTIC = "tilt_optimistic"
CONF_VALUE_OR_POSITION_TEMPLATE = "value_or_position"
CONF_OPEN_OR_CLOSE = "open_or_close"
TILT_FEATURES = (
SUPPORT_OPEN_TILT
| SUPPORT_CLOSE_TILT
| SUPPORT_STOP_TILT
| SUPPORT_SET_TILT_POSITION
)
COVER_SCHEMA = vol.All(
cv.deprecated(CONF_ENTITY_ID),
vol.Schema(
{
vol.Inclusive(OPEN_ACTION, CONF_OPEN_OR_CLOSE): cv.SCRIPT_SCHEMA,
vol.Inclusive(CLOSE_ACTION, CONF_OPEN_OR_CLOSE): cv.SCRIPT_SCHEMA,
vol.Optional(STOP_ACTION): cv.SCRIPT_SCHEMA,
vol.Exclusive(
CONF_POSITION_TEMPLATE, CONF_VALUE_OR_POSITION_TEMPLATE
): cv.template,
vol.Exclusive(
CONF_VALUE_TEMPLATE, CONF_VALUE_OR_POSITION_TEMPLATE
): cv.template,
vol.Optional(CONF_AVAILABILITY_TEMPLATE): cv.template,
vol.Optional(CONF_POSITION_TEMPLATE): cv.template,
vol.Optional(CONF_TILT_TEMPLATE): cv.template,
vol.Optional(CONF_ICON_TEMPLATE): cv.template,
vol.Optional(CONF_ENTITY_PICTURE_TEMPLATE): cv.template,
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_OPTIMISTIC): cv.boolean,
vol.Optional(CONF_TILT_OPTIMISTIC): cv.boolean,
vol.Optional(POSITION_ACTION): cv.SCRIPT_SCHEMA,
vol.Optional(TILT_ACTION): cv.SCRIPT_SCHEMA,
vol.Optional(CONF_FRIENDLY_NAME): cv.string,
vol.Optional(CONF_ENTITY_ID): cv.entity_ids,
vol.Optional(CONF_UNIQUE_ID): cv.string,
}
),
cv.has_at_least_one_key(OPEN_ACTION, POSITION_ACTION),
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_COVERS): cv.schema_with_slug_keys(COVER_SCHEMA)}
)
async def _async_create_entities(hass, config):
"""Create the Template cover."""
covers = []
for device, device_config in config[CONF_COVERS].items():
state_template = device_config.get(CONF_VALUE_TEMPLATE)
position_template = device_config.get(CONF_POSITION_TEMPLATE)
tilt_template = device_config.get(CONF_TILT_TEMPLATE)
icon_template = device_config.get(CONF_ICON_TEMPLATE)
availability_template = device_config.get(CONF_AVAILABILITY_TEMPLATE)
entity_picture_template = device_config.get(CONF_ENTITY_PICTURE_TEMPLATE)
friendly_name = device_config.get(CONF_FRIENDLY_NAME, device)
device_class = device_config.get(CONF_DEVICE_CLASS)
open_action = device_config.get(OPEN_ACTION)
close_action = device_config.get(CLOSE_ACTION)
stop_action = device_config.get(STOP_ACTION)
position_action = device_config.get(POSITION_ACTION)
tilt_action = device_config.get(TILT_ACTION)
optimistic = device_config.get(CONF_OPTIMISTIC)
tilt_optimistic = device_config.get(CONF_TILT_OPTIMISTIC)
unique_id = device_config.get(CONF_UNIQUE_ID)
covers.append(
CoverTemplate(
hass,
device,
friendly_name,
device_class,
state_template,
position_template,
tilt_template,
icon_template,
entity_picture_template,
availability_template,
open_action,
close_action,
stop_action,
position_action,
tilt_action,
optimistic,
tilt_optimistic,
unique_id,
)
)
return covers
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Template cover."""
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
async_add_entities(await _async_create_entities(hass, config))
class CoverTemplate(TemplateEntity, CoverEntity):
"""Representation of a Template cover."""
def __init__(
self,
hass,
device_id,
friendly_name,
device_class,
state_template,
position_template,
tilt_template,
icon_template,
entity_picture_template,
availability_template,
open_action,
close_action,
stop_action,
position_action,
tilt_action,
optimistic,
tilt_optimistic,
unique_id,
):
"""Initialize the Template cover."""
super().__init__(
availability_template=availability_template,
icon_template=icon_template,
entity_picture_template=entity_picture_template,
)
self.entity_id = async_generate_entity_id(
ENTITY_ID_FORMAT, device_id, hass=hass
)
self._name = friendly_name
self._template = state_template
self._position_template = position_template
self._tilt_template = tilt_template
self._device_class = device_class
self._open_script = None
domain = __name__.split(".")[-2]
if open_action is not None:
self._open_script = Script(hass, open_action, friendly_name, domain)
self._close_script = None
if close_action is not None:
self._close_script = Script(hass, close_action, friendly_name, domain)
self._stop_script = None
if stop_action is not None:
self._stop_script = Script(hass, stop_action, friendly_name, domain)
self._position_script = None
if position_action is not None:
self._position_script = Script(hass, position_action, friendly_name, domain)
self._tilt_script = None
if tilt_action is not None:
self._tilt_script = Script(hass, tilt_action, friendly_name, domain)
self._optimistic = optimistic or (not state_template and not position_template)
self._tilt_optimistic = tilt_optimistic or not tilt_template
self._position = None
self._tilt_value = None
self._unique_id = unique_id
async def async_added_to_hass(self):
"""Register callbacks."""
if self._template:
self.add_template_attribute(
"_position", self._template, None, self._update_state
)
if self._position_template:
self.add_template_attribute(
"_position",
self._position_template,
None,
self._update_position,
none_on_template_error=True,
)
if self._tilt_template:
self.add_template_attribute(
"_tilt_value",
self._tilt_template,
None,
self._update_tilt,
none_on_template_error=True,
)
await super().async_added_to_hass()
@callback
def _update_state(self, result):
super()._update_state(result)
if isinstance(result, TemplateError):
self._position = None
return
state = str(result).lower()
if state in _VALID_STATES:
if state in ("true", STATE_OPEN):
self._position = 100
else:
self._position = 0
else:
_LOGGER.error(
"Received invalid cover is_on state: %s. Expected: %s",
state,
", ".join(_VALID_STATES),
)
self._position = None
@callback
def _update_position(self, result):
try:
state = float(result)
except ValueError as err:
_LOGGER.error(err)
self._position = None
return
if state < 0 or state > 100:
self._position = None
_LOGGER.error(
"Cover position value must be" " between 0 and 100." " Value was: %.2f",
state,
)
else:
self._position = state
@callback
def _update_tilt(self, result):
try:
state = float(result)
except ValueError as err:
_LOGGER.error(err)
self._tilt_value = None
return
if state < 0 or state > 100:
self._tilt_value = None
_LOGGER.error(
"Tilt value must be between 0 and 100. Value was: %.2f",
state,
)
else:
self._tilt_value = state
@property
def name(self):
"""Return the name of the cover."""
return self._name
@property
def unique_id(self):
"""Return the unique id of this cover."""
return self._unique_id
@property
def is_closed(self):
"""Return if the cover is closed."""
return self._position == 0
@property
def current_cover_position(self):
"""Return current position of cover.
None is unknown, 0 is closed, 100 is fully open.
"""
if self._position_template or self._position_script:
return self._position
return None
@property
def current_cover_tilt_position(self):
"""Return current position of cover tilt.
None is unknown, 0 is closed, 100 is fully open.
"""
return self._tilt_value
@property
def device_class(self):
"""Return the device class of the cover."""
return self._device_class
@property
def supported_features(self):
"""Flag supported features."""
supported_features = SUPPORT_OPEN | SUPPORT_CLOSE
if self._stop_script is not None:
supported_features |= SUPPORT_STOP
if self._position_script is not None:
supported_features |= SUPPORT_SET_POSITION
if self._tilt_script is not None:
supported_features |= TILT_FEATURES
return supported_features
async def async_open_cover(self, **kwargs):
"""Move the cover up."""
if self._open_script:
await self._open_script.async_run(context=self._context)
elif self._position_script:
await self._position_script.async_run(
{"position": 100}, context=self._context
)
if self._optimistic:
self._position = 100
self.async_write_ha_state()
async def async_close_cover(self, **kwargs):
"""Move the cover down."""
if self._close_script:
await self._close_script.async_run(context=self._context)
elif self._position_script:
await self._position_script.async_run(
{"position": 0}, context=self._context
)
if self._optimistic:
self._position = 0
self.async_write_ha_state()
async def async_stop_cover(self, **kwargs):
"""Fire the stop action."""
if self._stop_script:
await self._stop_script.async_run(context=self._context)
async def async_set_cover_position(self, **kwargs):
"""Set cover position."""
self._position = kwargs[ATTR_POSITION]
await self._position_script.async_run(
{"position": self._position}, context=self._context
)
if self._optimistic:
self.async_write_ha_state()
async def async_open_cover_tilt(self, **kwargs):
"""Tilt the cover open."""
self._tilt_value = 100
await self._tilt_script.async_run(
{"tilt": self._tilt_value}, context=self._context
)
if self._tilt_optimistic:
self.async_write_ha_state()
async def async_close_cover_tilt(self, **kwargs):
"""Tilt the cover closed."""
self._tilt_value = 0
await self._tilt_script.async_run(
{"tilt": self._tilt_value}, context=self._context
)
if self._tilt_optimistic:
self.async_write_ha_state()
async def async_set_cover_tilt_position(self, **kwargs):
"""Move the cover tilt to a specific position."""
self._tilt_value = kwargs[ATTR_TILT_POSITION]
await self._tilt_script.async_run(
{"tilt": self._tilt_value}, context=self._context
)
if self._tilt_optimistic:
self.async_write_ha_state()
|
import datetime
from homeassistant.components import geonetnz_quakes
from homeassistant.components.geonetnz_quakes import DEFAULT_SCAN_INTERVAL
from homeassistant.components.geonetnz_quakes.sensor import (
ATTR_CREATED,
ATTR_LAST_UPDATE,
ATTR_LAST_UPDATE_SUCCESSFUL,
ATTR_REMOVED,
ATTR_STATUS,
ATTR_UPDATED,
)
from homeassistant.const import (
ATTR_ICON,
ATTR_UNIT_OF_MEASUREMENT,
CONF_RADIUS,
EVENT_HOMEASSISTANT_START,
)
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.async_mock import patch
from tests.common import async_fire_time_changed
from tests.components.geonetnz_quakes import _generate_mock_feed_entry
CONFIG = {geonetnz_quakes.DOMAIN: {CONF_RADIUS: 200}}
async def test_setup(hass, legacy_patchable_time):
"""Test the general setup of the integration."""
# Set up some mock feed entries for this test.
mock_entry_1 = _generate_mock_feed_entry(
"1234",
"Title 1",
15.5,
(38.0, -3.0),
locality="Locality 1",
attribution="Attribution 1",
time=datetime.datetime(2018, 9, 22, 8, 0, tzinfo=datetime.timezone.utc),
magnitude=5.7,
mmi=5,
depth=10.5,
quality="best",
)
mock_entry_2 = _generate_mock_feed_entry(
"2345", "Title 2", 20.5, (38.1, -3.1), magnitude=4.6
)
mock_entry_3 = _generate_mock_feed_entry(
"3456", "Title 3", 25.5, (38.2, -3.2), locality="Locality 3"
)
mock_entry_4 = _generate_mock_feed_entry("4567", "Title 4", 12.5, (38.3, -3.3))
# Patching 'utcnow' to gain more control over the timed update.
utcnow = dt_util.utcnow()
with patch("homeassistant.util.dt.utcnow", return_value=utcnow), patch(
"aio_geojson_client.feed.GeoJsonFeed.update"
) as mock_feed_update:
mock_feed_update.return_value = "OK", [mock_entry_1, mock_entry_2, mock_entry_3]
assert await async_setup_component(hass, geonetnz_quakes.DOMAIN, CONFIG)
# Artificially trigger update and collect events.
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
all_states = hass.states.async_all()
# 3 geolocation and 1 sensor entities
assert len(all_states) == 4
state = hass.states.get("sensor.geonet_nz_quakes_32_87336_117_22743")
assert state is not None
assert int(state.state) == 3
assert state.name == "GeoNet NZ Quakes (32.87336, -117.22743)"
attributes = state.attributes
assert attributes[ATTR_STATUS] == "OK"
assert attributes[ATTR_CREATED] == 3
assert attributes[ATTR_LAST_UPDATE].tzinfo == dt_util.UTC
assert attributes[ATTR_LAST_UPDATE_SUCCESSFUL].tzinfo == dt_util.UTC
assert attributes[ATTR_LAST_UPDATE] == attributes[ATTR_LAST_UPDATE_SUCCESSFUL]
assert attributes[ATTR_UNIT_OF_MEASUREMENT] == "quakes"
assert attributes[ATTR_ICON] == "mdi:pulse"
# Simulate an update - two existing, one new entry, one outdated entry
mock_feed_update.return_value = "OK", [mock_entry_1, mock_entry_4, mock_entry_3]
async_fire_time_changed(hass, utcnow + DEFAULT_SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 4
state = hass.states.get("sensor.geonet_nz_quakes_32_87336_117_22743")
attributes = state.attributes
assert attributes[ATTR_CREATED] == 1
assert attributes[ATTR_UPDATED] == 2
assert attributes[ATTR_REMOVED] == 1
# Simulate an update - empty data, but successful update,
# so no changes to entities.
mock_feed_update.return_value = "OK_NO_DATA", None
async_fire_time_changed(hass, utcnow + 2 * DEFAULT_SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 4
# Simulate an update - empty data, removes all entities
mock_feed_update.return_value = "ERROR", None
async_fire_time_changed(hass, utcnow + 3 * DEFAULT_SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 1
state = hass.states.get("sensor.geonet_nz_quakes_32_87336_117_22743")
attributes = state.attributes
assert attributes[ATTR_REMOVED] == 3
|
import socket
from logging import getLogger
from threading import Thread
from urllib.parse import urlsplit
from urllib.request import urlopen
from xmlrpc.client import Error
from xmlrpc.client import ServerProxy
from bs4 import BeautifulSoup
from django.contrib.sites.models import Site
from django.urls import reverse
from zinnia.flags import PINGBACK
from zinnia.settings import PROTOCOL
class URLRessources(object):
"""
Object defining the ressources of the Website.
"""
def __init__(self):
self.current_site = Site.objects.get_current()
self.site_url = '%s://%s' % (PROTOCOL, self.current_site.domain)
self.blog_url = '%s%s' % (self.site_url,
reverse('zinnia:entry_archive_index'))
self.blog_feed = '%s%s' % (self.site_url,
reverse('zinnia:entry_feed'))
class DirectoryPinger(Thread):
"""
Threaded web directory pinger.
"""
def __init__(self, server_name, entries, timeout=10):
self.results = []
self.timeout = timeout
self.entries = entries
self.server_name = server_name
self.server = ServerProxy(self.server_name)
self.ressources = URLRessources()
super(DirectoryPinger, self).__init__()
self.start()
def run(self):
"""
Ping entries to a directory in a thread.
"""
logger = getLogger('zinnia.ping.directory')
socket.setdefaulttimeout(self.timeout)
for entry in self.entries:
reply = self.ping_entry(entry)
self.results.append(reply)
logger.info('%s : %s', self.server_name, reply['message'])
socket.setdefaulttimeout(None)
def ping_entry(self, entry):
"""
Ping an entry to a directory.
"""
entry_url = '%s%s' % (self.ressources.site_url,
entry.get_absolute_url())
categories = '|'.join([c.title for c in entry.categories.all()])
try:
reply = self.server.weblogUpdates.extendedPing(
self.ressources.current_site.name,
self.ressources.blog_url, entry_url,
self.ressources.blog_feed, categories)
except Exception:
try:
reply = self.server.weblogUpdates.ping(
self.ressources.current_site.name,
self.ressources.blog_url, entry_url,
categories)
except Exception:
reply = {'message': '%s is an invalid directory.' %
self.server_name,
'flerror': True}
return reply
class ExternalUrlsPinger(Thread):
"""
Threaded external URLs pinger.
"""
def __init__(self, entry, timeout=10):
self.results = []
self.entry = entry
self.timeout = timeout
self.ressources = URLRessources()
self.entry_url = '%s%s' % (self.ressources.site_url,
self.entry.get_absolute_url())
super(ExternalUrlsPinger, self).__init__()
self.start()
def run(self):
"""
Ping external URLs in a Thread.
"""
logger = getLogger('zinnia.ping.external_urls')
socket.setdefaulttimeout(self.timeout)
external_urls = self.find_external_urls(self.entry)
external_urls_pingable = self.find_pingback_urls(external_urls)
for url, server_name in external_urls_pingable.items():
reply = self.pingback_url(server_name, url)
self.results.append(reply)
logger.info('%s : %s', url, reply)
socket.setdefaulttimeout(None)
def is_external_url(self, url, site_url):
"""
Check if the URL is an external URL.
"""
url_splitted = urlsplit(url)
if not url_splitted.netloc:
return False
return url_splitted.netloc != urlsplit(site_url).netloc
def find_external_urls(self, entry):
"""
Find external URLs in an entry.
"""
soup = BeautifulSoup(entry.html_content, 'html.parser')
external_urls = [a['href'] for a in soup.find_all('a')
if self.is_external_url(
a['href'], self.ressources.site_url)]
return external_urls
def find_pingback_href(self, content):
"""
Try to find LINK markups to pingback URL.
"""
soup = BeautifulSoup(content, 'html.parser')
for link in soup.find_all('link'):
dict_attr = dict(link.attrs)
if 'rel' in dict_attr and 'href' in dict_attr:
for rel_type in dict_attr['rel']:
if rel_type.lower() == PINGBACK:
return dict_attr.get('href')
def find_pingback_urls(self, urls):
"""
Find the pingback URL for each URLs.
"""
pingback_urls = {}
for url in urls:
try:
page = urlopen(url)
headers = page.info()
server_url = headers.get('X-Pingback')
if not server_url:
content_type = headers.get('Content-Type', '').split(
';')[0].strip().lower()
if content_type in ['text/html', 'application/xhtml+xml']:
server_url = self.find_pingback_href(
page.read(5 * 1024))
if server_url:
server_url_splitted = urlsplit(server_url)
if not server_url_splitted.netloc:
url_splitted = urlsplit(url)
server_url = '%s://%s%s' % (url_splitted.scheme,
url_splitted.netloc,
server_url)
pingback_urls[url] = server_url
except IOError:
pass
return pingback_urls
def pingback_url(self, server_name, target_url):
"""
Do a pingback call for the target URL.
"""
try:
server = ServerProxy(server_name)
reply = server.pingback.ping(self.entry_url, target_url)
except (Error, socket.error):
reply = '%s cannot be pinged.' % target_url
return reply
|
from typing import List, Optional
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
CURRENT_HVAC_OFF,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
PRESET_ACTIVITY,
PRESET_BOOST,
SUPPORT_PRESET_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from . import DOMAIN, GeniusHeatingZone
# GeniusHub Zones support: Off, Timer, Override/Boost, Footprint & Linked modes
HA_HVAC_TO_GH = {HVAC_MODE_OFF: "off", HVAC_MODE_HEAT: "timer"}
GH_HVAC_TO_HA = {v: k for k, v in HA_HVAC_TO_GH.items()}
HA_PRESET_TO_GH = {PRESET_ACTIVITY: "footprint", PRESET_BOOST: "override"}
GH_PRESET_TO_HA = {v: k for k, v in HA_PRESET_TO_GH.items()}
GH_ZONES = ["radiator", "wet underfloor"]
async def async_setup_platform(
hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info=None
) -> None:
"""Set up the Genius Hub climate entities."""
if discovery_info is None:
return
broker = hass.data[DOMAIN]["broker"]
async_add_entities(
[
GeniusClimateZone(broker, z)
for z in broker.client.zone_objs
if z.data["type"] in GH_ZONES
]
)
class GeniusClimateZone(GeniusHeatingZone, ClimateEntity):
"""Representation of a Genius Hub climate device."""
def __init__(self, broker, zone) -> None:
"""Initialize the climate device."""
super().__init__(broker, zone)
self._max_temp = 28.0
self._min_temp = 4.0
self._supported_features = SUPPORT_TARGET_TEMPERATURE | SUPPORT_PRESET_MODE
@property
def icon(self) -> str:
"""Return the icon to use in the frontend UI."""
return "mdi:radiator"
@property
def hvac_mode(self) -> str:
"""Return hvac operation ie. heat, cool mode."""
return GH_HVAC_TO_HA.get(self._zone.data["mode"], HVAC_MODE_HEAT)
@property
def hvac_modes(self) -> List[str]:
"""Return the list of available hvac operation modes."""
return list(HA_HVAC_TO_GH)
@property
def hvac_action(self) -> Optional[str]:
"""Return the current running hvac operation if supported."""
if "_state" in self._zone.data: # only for v3 API
if not self._zone.data["_state"].get("bIsActive"):
return CURRENT_HVAC_OFF
if self._zone.data["_state"].get("bOutRequestHeat"):
return CURRENT_HVAC_HEAT
return CURRENT_HVAC_IDLE
return None
@property
def preset_mode(self) -> Optional[str]:
"""Return the current preset mode, e.g., home, away, temp."""
return GH_PRESET_TO_HA.get(self._zone.data["mode"])
@property
def preset_modes(self) -> Optional[List[str]]:
"""Return a list of available preset modes."""
if "occupied" in self._zone.data: # if has a movement sensor
return [PRESET_ACTIVITY, PRESET_BOOST]
return [PRESET_BOOST]
async def async_set_hvac_mode(self, hvac_mode: str) -> None:
"""Set a new hvac mode."""
await self._zone.set_mode(HA_HVAC_TO_GH.get(hvac_mode))
async def async_set_preset_mode(self, preset_mode: str) -> None:
"""Set a new preset mode."""
await self._zone.set_mode(HA_PRESET_TO_GH.get(preset_mode, "timer"))
|
import sys
from mne.utils import run_subprocess
run_script = """
import sys
import mne
out = set()
# check scipy
ok_scipy_submodules = set(['scipy', 'numpy', # these appear in old scipy
'fftpack', 'lib', 'linalg', 'fft',
'misc', 'sparse', 'version'])
scipy_submodules = set(x.split('.')[1] for x in sys.modules.keys()
if x.startswith('scipy.') and '__' not in x and
not x.split('.')[1].startswith('_')
and sys.modules[x] is not None)
bad = scipy_submodules - ok_scipy_submodules
if len(bad) > 0:
out |= {'scipy submodules: %s' % list(bad)}
# check sklearn and others
for x in sys.modules.keys():
for key in ('sklearn', 'pandas', 'mayavi', 'pyvista', 'matplotlib',
'dipy', 'nibabel', 'cupy', 'picard', 'pyvistaqt'):
if x.startswith(key):
out |= {key}
if len(out) > 0:
print('\\nFound un-nested import(s) for %s' % (sorted(out),), end='')
exit(len(out))
"""
def test_module_nesting():
"""Test that module imports are properly nested."""
stdout, stderr, code = run_subprocess([sys.executable, '-c', run_script],
return_code=True)
assert code == 0, stdout + stderr
|
import itertools
from scattertext import ParsedCorpus
class CorpusSentenceIterator(object):
@staticmethod
def get_sentences(corpus):
'''
Parameters
----------
corpus, ParsedCorpus
Returns
-------
iter: [sentence1word1, ...], [sentence2word1, ...]
'''
assert isinstance(corpus, ParsedCorpus)
return itertools.chain(*[[[corpus._term_idx_store.getidxstrict(t.lower_) for t in sent
if not t.is_punct]
for sent in doc.sents]
for doc in corpus.get_parsed_docs()])
|
import datetime
import os
import re
import sys
import pytest
import coverage
from coverage.backunittest import TestCase, unittest
from coverage.files import actual_path
from coverage.misc import StopEverything
import coverage.optional
from tests.coveragetest import CoverageTest, convert_skip_exceptions
from tests.helpers import arcs_to_arcz_repr, arcz_to_arcs
from tests.helpers import CheckUniqueFilenames, re_lines, re_line
def test_xdist_sys_path_nuttiness_is_fixed():
# See conftest.py:fix_xdist_sys_path
assert sys.path[1] != ''
assert os.environ.get('PYTHONPATH') is None
class TestingTest(TestCase):
"""Tests of helper methods on `backunittest.TestCase`."""
def test_assert_count_equal(self):
self.assertCountEqual(set(), set())
self.assertCountEqual(set([1,2,3]), set([3,1,2]))
with self.assertRaises(AssertionError):
self.assertCountEqual(set([1,2,3]), set())
with self.assertRaises(AssertionError):
self.assertCountEqual(set([1,2,3]), set([4,5,6]))
class CoverageTestTest(CoverageTest):
"""Test the methods in `CoverageTest`."""
def test_file_exists(self):
self.make_file("whoville.txt", "We are here!")
self.assert_exists("whoville.txt")
self.assert_doesnt_exist("shadow.txt")
msg = "False is not true : File 'whoville.txt' shouldn't exist"
with self.assertRaisesRegex(AssertionError, msg):
self.assert_doesnt_exist("whoville.txt")
msg = "False is not true : File 'shadow.txt' should exist"
with self.assertRaisesRegex(AssertionError, msg):
self.assert_exists("shadow.txt")
def test_file_count(self):
self.make_file("abcde.txt", "abcde")
self.make_file("axczz.txt", "axczz")
self.make_file("afile.txt", "afile")
self.assert_file_count("a*.txt", 3)
self.assert_file_count("*c*.txt", 2)
self.assert_file_count("afile.*", 1)
self.assert_file_count("*.q", 0)
msg = re.escape(
"3 != 13 : There should be 13 files matching 'a*.txt', but there are these: "
"['abcde.txt', 'afile.txt', 'axczz.txt']"
)
with self.assertRaisesRegex(AssertionError, msg):
self.assert_file_count("a*.txt", 13)
msg = re.escape(
"2 != 12 : There should be 12 files matching '*c*.txt', but there are these: "
"['abcde.txt', 'axczz.txt']"
)
with self.assertRaisesRegex(AssertionError, msg):
self.assert_file_count("*c*.txt", 12)
msg = re.escape(
"1 != 11 : There should be 11 files matching 'afile.*', but there are these: "
"['afile.txt']"
)
with self.assertRaisesRegex(AssertionError, msg):
self.assert_file_count("afile.*", 11)
msg = re.escape(
"0 != 10 : There should be 10 files matching '*.q', but there are these: []"
)
with self.assertRaisesRegex(AssertionError, msg):
self.assert_file_count("*.q", 10)
def test_assert_startwith(self):
self.assert_starts_with("xyzzy", "xy")
self.assert_starts_with("xyz\nabc", "xy")
self.assert_starts_with("xyzzy", ("x", "z"))
msg = re.escape("'xyz' doesn't start with 'a'")
with self.assertRaisesRegex(AssertionError, msg):
self.assert_starts_with("xyz", "a")
msg = re.escape("'xyz\\nabc' doesn't start with 'a'")
with self.assertRaisesRegex(AssertionError, msg):
self.assert_starts_with("xyz\nabc", "a")
def test_assert_recent_datetime(self):
def now_delta(seconds):
"""Make a datetime `seconds` seconds from now."""
return datetime.datetime.now() + datetime.timedelta(seconds=seconds)
# Default delta is 10 seconds.
self.assert_recent_datetime(now_delta(0))
self.assert_recent_datetime(now_delta(-9))
with self.assertRaises(AssertionError):
self.assert_recent_datetime(now_delta(-11))
with self.assertRaises(AssertionError):
self.assert_recent_datetime(now_delta(1))
# Delta is settable.
self.assert_recent_datetime(now_delta(0), seconds=120)
self.assert_recent_datetime(now_delta(-100), seconds=120)
with self.assertRaises(AssertionError):
self.assert_recent_datetime(now_delta(-1000), seconds=120)
with self.assertRaises(AssertionError):
self.assert_recent_datetime(now_delta(1), seconds=120)
def test_assert_warnings(self):
cov = coverage.Coverage()
# Make a warning, it should catch it properly.
with self.assert_warnings(cov, ["Hello there!"]):
cov._warn("Hello there!")
# The expected warnings are regexes.
with self.assert_warnings(cov, ["Hello.*!"]):
cov._warn("Hello there!")
# There can be a bunch of actual warnings.
with self.assert_warnings(cov, ["Hello.*!"]):
cov._warn("You there?")
cov._warn("Hello there!")
# There can be a bunch of expected warnings.
with self.assert_warnings(cov, ["Hello.*!", "You"]):
cov._warn("You there?")
cov._warn("Hello there!")
# But if there are a bunch of expected warnings, they have to all happen.
warn_regex = r"Didn't find warning 'You' in \['Hello there!'\]"
with self.assertRaisesRegex(AssertionError, warn_regex):
with self.assert_warnings(cov, ["Hello.*!", "You"]):
cov._warn("Hello there!")
# Make a different warning than expected, it should raise an assertion.
warn_regex = r"Didn't find warning 'Not me' in \['Hello there!'\]"
with self.assertRaisesRegex(AssertionError, warn_regex):
with self.assert_warnings(cov, ["Not me"]):
cov._warn("Hello there!")
# Try checking a warning that shouldn't appear: happy case.
with self.assert_warnings(cov, ["Hi"], not_warnings=["Bye"]):
cov._warn("Hi")
# But it should fail if the unexpected warning does appear.
warn_regex = r"Found warning 'Bye' in \['Hi', 'Bye'\]"
with self.assertRaisesRegex(AssertionError, warn_regex):
with self.assert_warnings(cov, ["Hi"], not_warnings=["Bye"]):
cov._warn("Hi")
cov._warn("Bye")
# assert_warnings shouldn't hide a real exception.
with self.assertRaisesRegex(ZeroDivisionError, "oops"):
with self.assert_warnings(cov, ["Hello there!"]):
raise ZeroDivisionError("oops")
def test_assert_no_warnings(self):
cov = coverage.Coverage()
# Happy path: no warnings.
with self.assert_warnings(cov, []):
pass
# If you said there would be no warnings, and there were, fail!
warn_regex = r"Unexpected warnings: \['Watch out!'\]"
with self.assertRaisesRegex(AssertionError, warn_regex):
with self.assert_warnings(cov, []):
cov._warn("Watch out!")
def test_sub_python_is_this_python(self):
# Try it with a Python command.
self.set_environ('COV_FOOBAR', 'XYZZY')
self.make_file("showme.py", """\
import os, sys
print(sys.executable)
print(os.__file__)
print(os.environ['COV_FOOBAR'])
""")
out = self.run_command("python showme.py").splitlines()
self.assertEqual(actual_path(out[0]), actual_path(sys.executable))
self.assertEqual(out[1], os.__file__)
self.assertEqual(out[2], 'XYZZY')
# Try it with a "coverage debug sys" command.
out = self.run_command("coverage debug sys")
executable = re_line(out, "executable:")
executable = executable.split(":", 1)[1].strip()
self.assertTrue(_same_python_executable(executable, sys.executable))
# "environment: COV_FOOBAR = XYZZY" or "COV_FOOBAR = XYZZY"
environ = re_line(out, "COV_FOOBAR")
_, _, environ = environ.rpartition(":")
self.assertEqual(environ.strip(), "COV_FOOBAR = XYZZY")
def test_run_command_stdout_stderr(self):
# run_command should give us both stdout and stderr.
self.make_file("outputs.py", """\
import sys
sys.stderr.write("StdErr\\n")
print("StdOut")
""")
out = self.run_command("python outputs.py")
self.assertIn("StdOut\n", out)
self.assertIn("StdErr\n", out)
class CheckUniqueFilenamesTest(CoverageTest):
"""Tests of CheckUniqueFilenames."""
run_in_temp_dir = False
class Stub(object):
"""A stand-in for the class we're checking."""
def __init__(self, x):
self.x = x
def method(self, filename, a=17, b="hello"):
"""The method we'll wrap, with args to be sure args work."""
return (self.x, filename, a, b)
def test_detect_duplicate(self):
stub = self.Stub(23)
CheckUniqueFilenames.hook(stub, "method")
# Two method calls with different names are fine.
assert stub.method("file1") == (23, "file1", 17, "hello")
assert stub.method("file2", 1723, b="what") == (23, "file2", 1723, "what")
# A duplicate file name trips an assertion.
with self.assertRaises(AssertionError):
stub.method("file1")
@pytest.mark.parametrize("text, pat, result", [
("line1\nline2\nline3\n", "line", "line1\nline2\nline3\n"),
("line1\nline2\nline3\n", "[13]", "line1\nline3\n"),
("line1\nline2\nline3\n", "X", ""),
])
def test_re_lines(text, pat, result):
assert re_lines(text, pat) == result
@pytest.mark.parametrize("text, pat, result", [
("line1\nline2\nline3\n", "line", ""),
("line1\nline2\nline3\n", "[13]", "line2\n"),
("line1\nline2\nline3\n", "X", "line1\nline2\nline3\n"),
])
def test_re_lines_inverted(text, pat, result):
assert re_lines(text, pat, match=False) == result
@pytest.mark.parametrize("text, pat, result", [
("line1\nline2\nline3\n", "2", "line2"),
])
def test_re_line(text, pat, result):
assert re_line(text, pat) == result
@pytest.mark.parametrize("text, pat", [
("line1\nline2\nline3\n", "line"), # too many matches
("line1\nline2\nline3\n", "X"), # no matches
])
def test_re_line_bad(text, pat):
with pytest.raises(AssertionError):
re_line(text, pat)
def test_convert_skip_exceptions():
@convert_skip_exceptions
def some_method(ret=None, exc=None):
"""Be like a test case."""
if exc:
raise exc("yikes!")
return ret
# Normal flow is normal.
assert some_method(ret=[17, 23]) == [17, 23]
# Exceptions are raised normally.
with pytest.raises(ValueError):
some_method(exc=ValueError)
# But a StopEverything becomes a SkipTest.
with pytest.raises(unittest.SkipTest):
some_method(exc=StopEverything)
def _same_python_executable(e1, e2):
"""Determine if `e1` and `e2` refer to the same Python executable.
Either path could include symbolic links. The two paths might not refer
to the exact same file, but if they are in the same directory and their
numeric suffixes aren't different, they are the same executable.
"""
e1 = os.path.abspath(os.path.realpath(e1))
e2 = os.path.abspath(os.path.realpath(e2))
if os.path.dirname(e1) != os.path.dirname(e2):
return False # pragma: only failure
e1 = os.path.basename(e1)
e2 = os.path.basename(e2)
if e1 == "python" or e2 == "python" or e1 == e2:
# Python and Python2.3: OK
# Python2.3 and Python: OK
# Python and Python: OK
# Python2.3 and Python2.3: OK
return True
return False # pragma: only failure
def test_optional_without():
# pylint: disable=reimported
from coverage.optional import toml as toml1
with coverage.optional.without('toml'):
from coverage.optional import toml as toml2
from coverage.optional import toml as toml3
assert toml1 is toml3 is not None
assert toml2 is None
@pytest.mark.parametrize("arcz, arcs", [
(".1 12 2.", [(-1, 1), (1, 2), (2, -1)]),
("-11 12 2-5", [(-1, 1), (1, 2), (2, -5)]),
("-QA CB IT Z-A", [(-26, 10), (12, 11), (18, 29), (35, -10)]),
])
def test_arcz_to_arcs(arcz, arcs):
assert arcz_to_arcs(arcz) == arcs
@pytest.mark.parametrize("arcs, arcz_repr", [
([(-1, 1), (1, 2), (2, -1)], "(-1, 1) # .1\n(1, 2) # 12\n(2, -1) # 2.\n"),
([(-1, 1), (1, 2), (2, -5)], "(-1, 1) # .1\n(1, 2) # 12\n(2, -5) # 2-5\n"),
([(-26, 10), (12, 11), (18, 29), (35, -10), (1, 33), (100, 7)],
(
"(-26, 10) # -QA\n"
"(12, 11) # CB\n"
"(18, 29) # IT\n"
"(35, -10) # Z-A\n"
"(1, 33) # 1X\n"
"(100, 7) # ?7\n"
)
),
])
def test_arcs_to_arcz_repr(arcs, arcz_repr):
assert arcs_to_arcz_repr(arcs) == arcz_repr
|
import doctest
import re
from os import linesep
from logilab.common import textutils as tu
from logilab.common.testlib import TestCase, unittest_main
if linesep != '\n':
import re
LINE_RGX = re.compile(linesep)
def ulines(string):
return LINE_RGX.sub('\n', string)
else:
def ulines(string):
return string
class NormalizeTextTC(TestCase):
def test_known_values(self):
self.assertEqual(ulines(tu.normalize_text('''some really malformated
text.
With some times some veeeeeeeeeeeeeeerrrrryyyyyyyyyyyyyyyyyyy loooooooooooooooooooooong linnnnnnnnnnnes
and empty lines!
''')),
'''some really malformated text. With some times some
veeeeeeeeeeeeeeerrrrryyyyyyyyyyyyyyyyyyy loooooooooooooooooooooong
linnnnnnnnnnnes
and empty lines!''')
self.assertMultiLineEqual(ulines(tu.normalize_text('''\
some ReST formated text
=======================
With some times some veeeeeeeeeeeeeeerrrrryyyyyyyyyyyyyyyyyyy loooooooooooooooooooooong linnnnnnnnnnnes
and normal lines!
another paragraph
''', rest=True)),
'''\
some ReST formated text
=======================
With some times some veeeeeeeeeeeeeeerrrrryyyyyyyyyyyyyyyyyyy
loooooooooooooooooooooong linnnnnnnnnnnes
and normal lines!
another paragraph''')
def test_nonregr_unsplitable_word(self):
self.assertEqual(ulines(tu.normalize_text('''petit complement :
http://www.plonefr.net/blog/archive/2005/10/30/tester-la-future-infrastructure-i18n
''', 80)),
'''petit complement :
http://www.plonefr.net/blog/archive/2005/10/30/tester-la-future-infrastructure-i18n''')
def test_nonregr_rest_normalize(self):
self.assertEqual(ulines(tu.normalize_text("""... Il est donc evident que tout le monde doit lire le compte-rendu de RSH et aller discuter avec les autres si c'est utile ou necessaire.
""", rest=True)), """... Il est donc evident que tout le monde doit lire le compte-rendu de RSH et
aller discuter avec les autres si c'est utile ou necessaire.""")
def test_normalize_rest_paragraph(self):
self.assertEqual(ulines(tu.normalize_rest_paragraph("""**nico**: toto""")),
"""**nico**: toto""")
def test_normalize_rest_paragraph2(self):
self.assertEqual(ulines(tu.normalize_rest_paragraph(""".. _tdm: http://www.editions-eni.fr/Livres/Python-Les-fondamentaux-du-langage---La-programmation-pour-les-scientifiques-Table-des-matieres/.20_adaa41fb-c125-4919-aece-049601e81c8e_0_0.pdf
.. _extrait: http://www.editions-eni.fr/Livres/Python-Les-fondamentaux-du-langage---La-programmation-pour-les-scientifiques-Extrait-du-livre/.20_d6eed0be-0d36-4384-be59-2dd09e081012_0_0.pdf""", indent='> ')),
"""> .. _tdm:
> http://www.editions-eni.fr/Livres/Python-Les-fondamentaux-du-langage---La-programmation-pour-les-scientifiques-Table-des-matieres/.20_adaa41fb-c125-4919-aece-049601e81c8e_0_0.pdf
> .. _extrait:
> http://www.editions-eni.fr/Livres/Python-Les-fondamentaux-du-langage---La-programmation-pour-les-scientifiques-Extrait-du-livre/.20_d6eed0be-0d36-4384-be59-2dd09e081012_0_0.pdf""")
def test_normalize_paragraph2(self):
self.assertEqual(ulines(tu.normalize_paragraph(""".. _tdm: http://www.editions-eni.fr/Livres/Python-Les-fondamentaux-du-langage---La-programmation-pour-les-scientifiques-Table-des-matieres/.20_adaa41fb-c125-4919-aece-049601e81c8e_0_0.pdf
.. _extrait: http://www.editions-eni.fr/Livres/Python-Les-fondamentaux-du-langage---La-programmation-pour-les-scientifiques-Extrait-du-livre/.20_d6eed0be-0d36-4384-be59-2dd09e081012_0_0.pdf""", indent='> ')),
"""> .. _tdm:
> http://www.editions-eni.fr/Livres/Python-Les-fondamentaux-du-langage---La-programmation-pour-les-scientifiques-Table-des-matieres/.20_adaa41fb-c125-4919-aece-049601e81c8e_0_0.pdf
> .. _extrait:
> http://www.editions-eni.fr/Livres/Python-Les-fondamentaux-du-langage---La-programmation-pour-les-scientifiques-Extrait-du-livre/.20_d6eed0be-0d36-4384-be59-2dd09e081012_0_0.pdf""")
class NormalizeParagraphTC(TestCase):
def test_known_values(self):
self.assertEqual(ulines(tu.normalize_text("""This package contains test files shared by the logilab-common package. It isn't
necessary to install this package unless you want to execute or look at
the tests.""", indent=' ', line_len=70)),
"""\
This package contains test files shared by the logilab-common
package. It isn't necessary to install this package unless you want
to execute or look at the tests.""")
class GetCsvTC(TestCase):
def test_known(self):
self.assertEqual(tu.splitstrip('a, b,c '), ['a', 'b', 'c'])
class UnitsTC(TestCase):
def setUp(self):
self.units = {
'm': 60,
'kb': 1024,
'mb': 1024*1024,
}
def test_empty_base(self):
self.assertEqual(tu.apply_units('17', {}), 17)
def test_empty_inter(self):
def inter(value):
return int(float(value)) * 2
result = tu.apply_units('12.4', {}, inter=inter)
self.assertEqual(result, 12 * 2)
self.assertIsInstance(result, float)
def test_empty_final(self):
# int('12.4') raise value error
self.assertRaises(ValueError, tu.apply_units, '12.4', {}, final=int)
def test_empty_inter_final(self):
result = tu.apply_units('12.4', {}, inter=float, final=int)
self.assertEqual(result, 12)
self.assertIsInstance(result, int)
def test_blank_base(self):
result = tu.apply_units(' 42 ', {}, final=int)
self.assertEqual(result, 42)
def test_blank_space(self):
result = tu.apply_units(' 1 337 ', {}, final=int)
self.assertEqual(result, 1337)
def test_blank_coma(self):
result = tu.apply_units(' 4,298.42 ', {})
self.assertEqual(result, 4298.42)
def test_blank_mixed(self):
result = tu.apply_units('45, 317, 337', {}, final=int)
self.assertEqual(result, 45317337)
def test_unit_singleunit_singleletter(self):
result = tu.apply_units('15m', self.units)
self.assertEqual(result, 15 * self.units['m'] )
def test_unit_singleunit_multipleletter(self):
result = tu.apply_units('47KB', self.units)
self.assertEqual(result, 47 * self.units['kb'] )
def test_unit_singleunit_caseinsensitive(self):
result = tu.apply_units('47kb', self.units)
self.assertEqual(result, 47 * self.units['kb'] )
def test_unit_multipleunit(self):
result = tu.apply_units('47KB 1.5MB', self.units)
self.assertEqual(result, 47 * self.units['kb'] + 1.5 * self.units['mb'])
def test_unit_with_blank(self):
result = tu.apply_units('1 000 KB', self.units)
self.assertEqual(result, 1000 * self.units['kb'])
def test_unit_wrong_input(self):
self.assertRaises(ValueError, tu.apply_units, '', self.units)
self.assertRaises(ValueError, tu.apply_units, 'wrong input', self.units)
self.assertRaises(ValueError, tu.apply_units, 'wrong13 input', self.units)
self.assertRaises(ValueError, tu.apply_units, 'wrong input42', self.units)
RGX = re.compile('abcd')
class PrettyMatchTC(TestCase):
def test_known(self):
string = 'hiuherabcdef'
self.assertEqual(ulines(tu.pretty_match(RGX.search(string), string)),
'hiuherabcdef\n ^^^^')
def test_known_values_1(self):
rgx = re.compile('(to*)')
string = 'toto'
match = rgx.search(string)
self.assertEqual(ulines(tu.pretty_match(match, string)), '''toto
^^''')
def test_known_values_2(self):
rgx = re.compile('(to*)')
string = ''' ... ... to to
... ... '''
match = rgx.search(string)
self.assertEqual(ulines(tu.pretty_match(match, string)), ''' ... ... to to
^^
... ...''')
class UnquoteTC(TestCase):
def test(self):
self.assertEqual(tu.unquote('"toto"'), 'toto')
self.assertEqual(tu.unquote("'l'inenarrable toto'"), "l'inenarrable toto")
self.assertEqual(tu.unquote("no quote"), "no quote")
class ColorizeAnsiTC(TestCase):
def test_known(self):
self.assertEqual(tu.colorize_ansi('hello', 'blue', 'strike'), '\x1b[9;34mhello\x1b[0m')
self.assertEqual(tu.colorize_ansi('hello', style='strike, inverse'), '\x1b[9;7mhello\x1b[0m')
self.assertEqual(tu.colorize_ansi('hello', None, None), 'hello')
self.assertEqual(tu.colorize_ansi('hello', '', ''), 'hello')
def test_raise(self):
self.assertRaises(KeyError, tu.colorize_ansi, 'hello', 'bleu', None)
self.assertRaises(KeyError, tu.colorize_ansi, 'hello', None, 'italique')
class UnormalizeTC(TestCase):
def test_unormalize_no_substitute(self):
data = [(u'\u0153nologie', u'oenologie'),
(u'\u0152nologie', u'OEnologie'),
(u'l\xf8to', u'loto'),
(u'été', u'ete'),
(u'àèùéïîôêç', u'aeueiioec'),
(u'ÀÈÙÉÏÎÔÊÇ', u'AEUEIIOEC'),
(u'\xa0', u' '), # NO-BREAK SPACE managed by NFKD decomposition
(u'\u0154', u'R'),
(u'Pointe d\u2019Yves', u"Pointe d'Yves"),
(u'Bordeaux\u2013Mérignac', u'Bordeaux-Merignac'),
]
for input, output in data:
yield self.assertEqual, tu.unormalize(input), output
def test_unormalize_substitute(self):
self.assertEqual(tu.unormalize(u'ab \u8000 cd', substitute='_'),
'ab _ cd')
def test_unormalize_backward_compat(self):
self.assertRaises(ValueError, tu.unormalize, u"\u8000")
self.assertEqual(tu.unormalize(u"\u8000", substitute=''), u'')
def load_tests(loader, tests, ignore):
tests.addTests(doctest.DocTestSuite(tu))
return tests
if __name__ == '__main__':
unittest_main()
|
import cherrypy
from cherrypy.test import helper
class IteratorBase(object):
created = 0
datachunk = 'butternut squash' * 256
@classmethod
def incr(cls):
cls.created += 1
@classmethod
def decr(cls):
cls.created -= 1
class OurGenerator(IteratorBase):
def __iter__(self):
self.incr()
try:
for i in range(1024):
yield self.datachunk
finally:
self.decr()
class OurIterator(IteratorBase):
started = False
closed_off = False
count = 0
def increment(self):
self.incr()
def decrement(self):
if not self.closed_off:
self.closed_off = True
self.decr()
def __iter__(self):
return self
def __next__(self):
if not self.started:
self.started = True
self.increment()
self.count += 1
if self.count > 1024:
raise StopIteration
return self.datachunk
next = __next__
def __del__(self):
self.decrement()
class OurClosableIterator(OurIterator):
def close(self):
self.decrement()
class OurNotClosableIterator(OurIterator):
# We can't close something which requires an additional argument.
def close(self, somearg):
self.decrement()
class OurUnclosableIterator(OurIterator):
close = 'close' # not callable!
class IteratorTest(helper.CPWebCase):
@staticmethod
def setup_server():
class Root(object):
@cherrypy.expose
def count(self, clsname):
cherrypy.response.headers['Content-Type'] = 'text/plain'
return str(globals()[clsname].created)
@cherrypy.expose
def getall(self, clsname):
cherrypy.response.headers['Content-Type'] = 'text/plain'
return globals()[clsname]()
@cherrypy.expose
@cherrypy.config(**{'response.stream': True})
def stream(self, clsname):
return self.getall(clsname)
cherrypy.tree.mount(Root())
def test_iterator(self):
try:
self._test_iterator()
except Exception:
'Test fails intermittently. See #1419'
def _test_iterator(self):
if cherrypy.server.protocol_version != 'HTTP/1.1':
return self.skip()
self.PROTOCOL = 'HTTP/1.1'
# Check the counts of all the classes, they should be zero.
closables = ['OurClosableIterator', 'OurGenerator']
unclosables = ['OurUnclosableIterator', 'OurNotClosableIterator']
all_classes = closables + unclosables
import random
random.shuffle(all_classes)
for clsname in all_classes:
self.getPage('/count/' + clsname)
self.assertStatus(200)
self.assertBody('0')
# We should also be able to read the entire content body
# successfully, though we don't need to, we just want to
# check the header.
for clsname in all_classes:
itr_conn = self.get_conn()
itr_conn.putrequest('GET', '/getall/' + clsname)
itr_conn.endheaders()
response = itr_conn.getresponse()
self.assertEqual(response.status, 200)
headers = response.getheaders()
for header_name, header_value in headers:
if header_name.lower() == 'content-length':
expected = str(1024 * 16 * 256)
assert header_value == expected, header_value
break
else:
raise AssertionError('No Content-Length header found')
# As the response should be fully consumed by CherryPy
# before sending back, the count should still be at zero
# by the time the response has been sent.
self.getPage('/count/' + clsname)
self.assertStatus(200)
self.assertBody('0')
# Now we do the same check with streaming - some classes will
# be automatically closed, while others cannot.
stream_counts = {}
for clsname in all_classes:
itr_conn = self.get_conn()
itr_conn.putrequest('GET', '/stream/' + clsname)
itr_conn.endheaders()
response = itr_conn.getresponse()
self.assertEqual(response.status, 200)
response.fp.read(65536)
# Let's check the count - this should always be one.
self.getPage('/count/' + clsname)
self.assertBody('1')
# Now if we close the connection, the count should go back
# to zero.
itr_conn.close()
self.getPage('/count/' + clsname)
# If this is a response which should be easily closed, then
# we will test to see if the value has gone back down to
# zero.
if clsname in closables:
# Sometimes we try to get the answer too quickly - we
# will wait for 100 ms before asking again if we didn't
# get the answer we wanted.
if self.body != '0':
import time
time.sleep(0.1)
self.getPage('/count/' + clsname)
stream_counts[clsname] = int(self.body)
# Check that we closed off the classes which should provide
# easy mechanisms for doing so.
for clsname in closables:
assert stream_counts[clsname] == 0, (
'did not close off stream response correctly, expected '
'count of zero for %s: %s' % (clsname, stream_counts)
)
|
import urwid
def exit_on_q(key):
if key in ('q', 'Q'):
raise urwid.ExitMainLoop()
palette = [
('banner', 'black', 'light gray'),
('streak', 'black', 'dark red'),
('bg', 'black', 'dark blue'),]
txt = urwid.Text(('banner', u" Hello World "), align='center')
map1 = urwid.AttrMap(txt, 'streak')
fill = urwid.Filler(map1)
map2 = urwid.AttrMap(fill, 'bg')
loop = urwid.MainLoop(map2, palette, unhandled_input=exit_on_q)
loop.run()
|
import pytest
from PyQt5.QtCore import Qt
from qutebrowser.mainwindow import messageview
from qutebrowser.utils import usertypes
@pytest.fixture
def view(qtbot, config_stub):
config_stub.val.messages.timeout = 100
mv = messageview.MessageView()
qtbot.add_widget(mv)
return mv
@pytest.mark.parametrize('level', [usertypes.MessageLevel.info,
usertypes.MessageLevel.warning,
usertypes.MessageLevel.error])
@pytest.mark.flaky # on macOS
def test_single_message(qtbot, view, level):
with qtbot.waitExposed(view, timeout=5000):
view.show_message(level, 'test')
assert view._messages[0].isVisible()
def test_message_hiding(qtbot, view):
"""Messages should be hidden after the timer times out."""
with qtbot.waitSignal(view._clear_timer.timeout):
view.show_message(usertypes.MessageLevel.info, 'test')
assert not view._messages
def test_size_hint(view):
"""The message height should increase with more messages."""
view.show_message(usertypes.MessageLevel.info, 'test1')
height1 = view.sizeHint().height()
assert height1 > 0
view.show_message(usertypes.MessageLevel.info, 'test2')
height2 = view.sizeHint().height()
assert height2 == height1 * 2
def test_word_wrap(view, qtbot):
"""A long message should be wrapped."""
with qtbot.waitSignal(view._clear_timer.timeout):
view.show_message(usertypes.MessageLevel.info, 'short')
height1 = view.sizeHint().height()
assert height1 > 0
text = ("Athene, the bright-eyed goddess, answered him at once: Father of "
"us all, Son of Cronos, Highest King, clearly that man deserved to be "
"destroyed: so let all be destroyed who act as he did. But my heart aches "
"for Odysseus, wise but ill fated, who suffers far from his friends on an "
"island deep in the sea.")
view.show_message(usertypes.MessageLevel.info, text)
height2 = view.sizeHint().height()
assert height2 > height1
assert view._messages[0].wordWrap()
def test_show_message_twice(view):
"""Show the same message twice -> only one should be shown."""
view.show_message(usertypes.MessageLevel.info, 'test')
view.show_message(usertypes.MessageLevel.info, 'test')
assert len(view._messages) == 1
def test_show_message_twice_after_first_disappears(qtbot, view):
"""Show the same message twice after the first is gone."""
with qtbot.waitSignal(view._clear_timer.timeout):
view.show_message(usertypes.MessageLevel.info, 'test')
# Just a sanity check
assert not view._messages
view.show_message(usertypes.MessageLevel.info, 'test')
assert len(view._messages) == 1
def test_changing_timer_with_messages_shown(qtbot, view, config_stub):
"""When we change messages.timeout, the timer should be restarted."""
config_stub.val.messages.timeout = 900000 # 15s
view.show_message(usertypes.MessageLevel.info, 'test')
with qtbot.waitSignal(view._clear_timer.timeout):
config_stub.val.messages.timeout = 100
@pytest.mark.parametrize('count, expected', [(1, 100), (3, 300),
(5, 500), (7, 500)])
def test_show_multiple_messages_longer(view, count, expected):
"""When there are multiple messages, messages should be shown longer.
There is an upper maximum to avoid messages never disappearing.
"""
for message_number in range(1, count+1):
view.show_message(usertypes.MessageLevel.info,
'test ' + str(message_number))
assert view._clear_timer.interval() == expected
@pytest.mark.parametrize('replace1, replace2, length', [
(False, False, 2), # Two stacked messages
(True, True, 1), # Two replaceable messages
(False, True, 2), # Stacked and replaceable
(True, False, 2), # Replaceable and stacked
])
def test_replaced_messages(view, replace1, replace2, length):
"""Show two stack=False messages which should replace each other."""
view.show_message(usertypes.MessageLevel.info, 'test', replace=replace1)
view.show_message(usertypes.MessageLevel.info, 'test 2', replace=replace2)
assert len(view._messages) == length
@pytest.mark.parametrize('button, count', [
(Qt.LeftButton, 0),
(Qt.MiddleButton, 0),
(Qt.RightButton, 0),
(Qt.BackButton, 2),
])
def test_click_messages(qtbot, view, button, count):
"""Messages should disappear when we click on them."""
view.show_message(usertypes.MessageLevel.info, 'test mouse click')
view.show_message(usertypes.MessageLevel.info, 'test mouse click 2')
qtbot.mousePress(view, button)
assert len(view._messages) == count
|
import argparse
from boto3 import session
def get_client(endpoint):
s = session.Session(
region_name="foo", aws_access_key_id="foo", aws_secret_access_key="bar"
)
client = s.client(service_name="dynamodb", endpoint_url=endpoint)
return client
def create_table(client, table_name):
return client.create_table(
TableName=table_name,
KeySchema=[
{"AttributeName": "task_id", "KeyType": "HASH"},
{"AttributeName": "timestamp", "KeyType": "RANGE"},
],
AttributeDefinitions=[
{"AttributeName": "task_id", "AttributeType": "S"},
{"AttributeName": "timestamp", "AttributeType": "N"},
],
ProvisionedThroughput={"ReadCapacityUnits": 123, "WriteCapacityUnits": 123},
)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Create a dummy dynamodb table")
parser.add_argument("endpoint", type=str, help="the dynamodb endpoint")
parser.add_argument("table_name", type=str, help="the name of the table to create")
args = parser.parse_args()
create_table(get_client(args.endpoint), args.table_name)
|
import contextlib
import functools
import itertools
from unittest import mock
from .stubs import VCRHTTPConnection, VCRHTTPSConnection
import http.client as httplib
import logging
log = logging.getLogger(__name__)
# Save some of the original types for the purposes of unpatching
_HTTPConnection = httplib.HTTPConnection
_HTTPSConnection = httplib.HTTPSConnection
# Try to save the original types for boto3
try:
from botocore.awsrequest import AWSHTTPSConnection, AWSHTTPConnection
except ImportError:
try:
import botocore.vendored.requests.packages.urllib3.connectionpool as cpool
except ImportError: # pragma: no cover
pass
else:
_Boto3VerifiedHTTPSConnection = cpool.VerifiedHTTPSConnection
_cpoolBoto3HTTPConnection = cpool.HTTPConnection
_cpoolBoto3HTTPSConnection = cpool.HTTPSConnection
else:
_Boto3VerifiedHTTPSConnection = AWSHTTPSConnection
_cpoolBoto3HTTPConnection = AWSHTTPConnection
_cpoolBoto3HTTPSConnection = AWSHTTPSConnection
cpool = None
# Try to save the original types for urllib3
try:
import urllib3.connectionpool as cpool
except ImportError: # pragma: no cover
pass
else:
_VerifiedHTTPSConnection = cpool.VerifiedHTTPSConnection
_cpoolHTTPConnection = cpool.HTTPConnection
_cpoolHTTPSConnection = cpool.HTTPSConnection
# Try to save the original types for requests
try:
if not cpool:
import requests.packages.urllib3.connectionpool as cpool
except ImportError: # pragma: no cover
pass
else:
_VerifiedHTTPSConnection = cpool.VerifiedHTTPSConnection
_cpoolHTTPConnection = cpool.HTTPConnection
_cpoolHTTPSConnection = cpool.HTTPSConnection
# Try to save the original types for httplib2
try:
import httplib2
except ImportError: # pragma: no cover
pass
else:
_HTTPConnectionWithTimeout = httplib2.HTTPConnectionWithTimeout
_HTTPSConnectionWithTimeout = httplib2.HTTPSConnectionWithTimeout
_SCHEME_TO_CONNECTION = httplib2.SCHEME_TO_CONNECTION
# Try to save the original types for boto
try:
import boto.https_connection
except ImportError: # pragma: no cover
pass
else:
_CertValidatingHTTPSConnection = boto.https_connection.CertValidatingHTTPSConnection
# Try to save the original types for Tornado
try:
import tornado.simple_httpclient
except ImportError: # pragma: no cover
pass
else:
_SimpleAsyncHTTPClient_fetch_impl = tornado.simple_httpclient.SimpleAsyncHTTPClient.fetch_impl
try:
import tornado.curl_httpclient
except ImportError: # pragma: no cover
pass
else:
_CurlAsyncHTTPClient_fetch_impl = tornado.curl_httpclient.CurlAsyncHTTPClient.fetch_impl
try:
import aiohttp.client
except ImportError: # pragma: no cover
pass
else:
_AiohttpClientSessionRequest = aiohttp.client.ClientSession._request
try:
import httpx
except ImportError: # pragma: no cover
pass
else:
_HttpxSyncClient_send = httpx.Client.send
_HttpxAsyncClient_send = httpx.AsyncClient.send
class CassettePatcherBuilder:
def _build_patchers_from_mock_triples_decorator(function):
@functools.wraps(function)
def wrapped(self, *args, **kwargs):
return self._build_patchers_from_mock_triples(function(self, *args, **kwargs))
return wrapped
def __init__(self, cassette):
self._cassette = cassette
self._class_to_cassette_subclass = {}
def build(self):
return itertools.chain(
self._httplib(),
self._requests(),
self._boto3(),
self._urllib3(),
self._httplib2(),
self._boto(),
self._tornado(),
self._aiohttp(),
self._httpx(),
self._build_patchers_from_mock_triples(self._cassette.custom_patches),
)
def _build_patchers_from_mock_triples(self, mock_triples):
for args in mock_triples:
patcher = self._build_patcher(*args)
if patcher:
yield patcher
def _build_patcher(self, obj, patched_attribute, replacement_class):
if not hasattr(obj, patched_attribute):
return
return mock.patch.object(
obj, patched_attribute, self._recursively_apply_get_cassette_subclass(replacement_class)
)
def _recursively_apply_get_cassette_subclass(self, replacement_dict_or_obj):
"""One of the subtleties of this class is that it does not directly
replace HTTPSConnection with `VCRRequestsHTTPSConnection`, but a
subclass of the aforementioned class that has the `cassette`
class attribute assigned to `self._cassette`. This behavior is
necessary to properly support nested cassette contexts.
This function exists to ensure that we use the same class
object (reference) to patch everything that replaces
VCRRequestHTTP[S]Connection, but that we can talk about
patching them with the raw references instead, and without
worrying about exactly where the subclass with the relevant
value for `cassette` is first created.
The function is recursive because it looks in to dictionaries
and replaces class values at any depth with the subclass
described in the previous paragraph.
"""
if isinstance(replacement_dict_or_obj, dict):
for key, replacement_obj in replacement_dict_or_obj.items():
replacement_obj = self._recursively_apply_get_cassette_subclass(replacement_obj)
replacement_dict_or_obj[key] = replacement_obj
return replacement_dict_or_obj
if hasattr(replacement_dict_or_obj, "cassette"):
replacement_dict_or_obj = self._get_cassette_subclass(replacement_dict_or_obj)
return replacement_dict_or_obj
def _get_cassette_subclass(self, klass):
if klass.cassette is not None:
return klass
if klass not in self._class_to_cassette_subclass:
subclass = self._build_cassette_subclass(klass)
self._class_to_cassette_subclass[klass] = subclass
return self._class_to_cassette_subclass[klass]
def _build_cassette_subclass(self, base_class):
bases = (base_class,)
if not issubclass(base_class, object): # Check for old style class
bases += (object,)
return type(
"{}{}".format(base_class.__name__, self._cassette._path), bases, dict(cassette=self._cassette)
)
@_build_patchers_from_mock_triples_decorator
def _httplib(self):
yield httplib, "HTTPConnection", VCRHTTPConnection
yield httplib, "HTTPSConnection", VCRHTTPSConnection
def _requests(self):
try:
from .stubs import requests_stubs
except ImportError: # pragma: no cover
return ()
return self._urllib3_patchers(cpool, requests_stubs)
@_build_patchers_from_mock_triples_decorator
def _boto3(self):
try:
# botocore using awsrequest
import botocore.awsrequest as cpool
except ImportError: # pragma: no cover
try:
# botocore using vendored requests
import botocore.vendored.requests.packages.urllib3.connectionpool as cpool
except ImportError: # pragma: no cover
pass
else:
from .stubs import boto3_stubs
yield self._urllib3_patchers(cpool, boto3_stubs)
else:
from .stubs import boto3_stubs
log.debug("Patching boto3 cpool with %s", cpool)
yield cpool.AWSHTTPConnectionPool, "ConnectionCls", boto3_stubs.VCRRequestsHTTPConnection
yield cpool.AWSHTTPSConnectionPool, "ConnectionCls", boto3_stubs.VCRRequestsHTTPSConnection
def _patched_get_conn(self, connection_pool_class, connection_class_getter):
get_conn = connection_pool_class._get_conn
@functools.wraps(get_conn)
def patched_get_conn(pool, timeout=None):
connection = get_conn(pool, timeout)
connection_class = (
pool.ConnectionCls if hasattr(pool, "ConnectionCls") else connection_class_getter()
)
# We need to make sure that we are actually providing a
# patched version of the connection class. This might not
# always be the case because the pool keeps previously
# used connections (which might actually be of a different
# class) around. This while loop will terminate because
# eventually the pool will run out of connections.
while not isinstance(connection, connection_class):
connection = get_conn(pool, timeout)
return connection
return patched_get_conn
def _patched_new_conn(self, connection_pool_class, connection_remover):
new_conn = connection_pool_class._new_conn
@functools.wraps(new_conn)
def patched_new_conn(pool):
new_connection = new_conn(pool)
connection_remover.add_connection_to_pool_entry(pool, new_connection)
return new_connection
return patched_new_conn
def _urllib3(self):
try:
import urllib3.connectionpool as cpool
except ImportError: # pragma: no cover
return ()
from .stubs import urllib3_stubs
return self._urllib3_patchers(cpool, urllib3_stubs)
@_build_patchers_from_mock_triples_decorator
def _httplib2(self):
try:
import httplib2 as cpool
except ImportError: # pragma: no cover
pass
else:
from .stubs.httplib2_stubs import VCRHTTPConnectionWithTimeout
from .stubs.httplib2_stubs import VCRHTTPSConnectionWithTimeout
yield cpool, "HTTPConnectionWithTimeout", VCRHTTPConnectionWithTimeout
yield cpool, "HTTPSConnectionWithTimeout", VCRHTTPSConnectionWithTimeout
yield cpool, "SCHEME_TO_CONNECTION", {
"http": VCRHTTPConnectionWithTimeout,
"https": VCRHTTPSConnectionWithTimeout,
}
@_build_patchers_from_mock_triples_decorator
def _boto(self):
try:
import boto.https_connection as cpool
except ImportError: # pragma: no cover
pass
else:
from .stubs.boto_stubs import VCRCertValidatingHTTPSConnection
yield cpool, "CertValidatingHTTPSConnection", VCRCertValidatingHTTPSConnection
@_build_patchers_from_mock_triples_decorator
def _tornado(self):
try:
import tornado.simple_httpclient as simple
except ImportError: # pragma: no cover
pass
else:
from .stubs.tornado_stubs import vcr_fetch_impl
new_fetch_impl = vcr_fetch_impl(self._cassette, _SimpleAsyncHTTPClient_fetch_impl)
yield simple.SimpleAsyncHTTPClient, "fetch_impl", new_fetch_impl
try:
import tornado.curl_httpclient as curl
except ImportError: # pragma: no cover
pass
else:
from .stubs.tornado_stubs import vcr_fetch_impl
new_fetch_impl = vcr_fetch_impl(self._cassette, _CurlAsyncHTTPClient_fetch_impl)
yield curl.CurlAsyncHTTPClient, "fetch_impl", new_fetch_impl
@_build_patchers_from_mock_triples_decorator
def _aiohttp(self):
try:
import aiohttp.client as client
except ImportError: # pragma: no cover
pass
else:
from .stubs.aiohttp_stubs import vcr_request
new_request = vcr_request(self._cassette, _AiohttpClientSessionRequest)
yield client.ClientSession, "_request", new_request
@_build_patchers_from_mock_triples_decorator
def _httpx(self):
try:
import httpx
except ImportError: # pragma: no cover
return
else:
from .stubs.httpx_stubs import async_vcr_send, sync_vcr_send
new_async_client_send = async_vcr_send(self._cassette, _HttpxAsyncClient_send)
yield httpx.AsyncClient, "send", new_async_client_send
new_sync_client_send = sync_vcr_send(self._cassette, _HttpxSyncClient_send)
yield httpx.Client, "send", new_sync_client_send
def _urllib3_patchers(self, cpool, stubs):
http_connection_remover = ConnectionRemover(
self._get_cassette_subclass(stubs.VCRRequestsHTTPConnection)
)
https_connection_remover = ConnectionRemover(
self._get_cassette_subclass(stubs.VCRRequestsHTTPSConnection)
)
mock_triples = (
(cpool, "VerifiedHTTPSConnection", stubs.VCRRequestsHTTPSConnection),
(cpool, "HTTPConnection", stubs.VCRRequestsHTTPConnection),
(cpool, "HTTPSConnection", stubs.VCRRequestsHTTPSConnection),
(cpool, "is_connection_dropped", mock.Mock(return_value=False)), # Needed on Windows only
(cpool.HTTPConnectionPool, "ConnectionCls", stubs.VCRRequestsHTTPConnection),
(cpool.HTTPSConnectionPool, "ConnectionCls", stubs.VCRRequestsHTTPSConnection),
)
# These handle making sure that sessions only use the
# connections of the appropriate type.
mock_triples += (
(
cpool.HTTPConnectionPool,
"_get_conn",
self._patched_get_conn(cpool.HTTPConnectionPool, lambda: cpool.HTTPConnection),
),
(
cpool.HTTPSConnectionPool,
"_get_conn",
self._patched_get_conn(cpool.HTTPSConnectionPool, lambda: cpool.HTTPSConnection),
),
(
cpool.HTTPConnectionPool,
"_new_conn",
self._patched_new_conn(cpool.HTTPConnectionPool, http_connection_remover),
),
(
cpool.HTTPSConnectionPool,
"_new_conn",
self._patched_new_conn(cpool.HTTPSConnectionPool, https_connection_remover),
),
)
return itertools.chain(
self._build_patchers_from_mock_triples(mock_triples),
(http_connection_remover, https_connection_remover),
)
class ConnectionRemover:
def __init__(self, connection_class):
self._connection_class = connection_class
self._connection_pool_to_connections = {}
def add_connection_to_pool_entry(self, pool, connection):
if isinstance(connection, self._connection_class):
self._connection_pool_to_connections.setdefault(pool, set()).add(connection)
def remove_connection_to_pool_entry(self, pool, connection):
if isinstance(connection, self._connection_class):
self._connection_pool_to_connections[self._connection_class].remove(connection)
def __enter__(self):
return self
def __exit__(self, *args):
for pool, connections in self._connection_pool_to_connections.items():
readd_connections = []
while pool.pool and not pool.pool.empty() and connections:
connection = pool.pool.get()
if isinstance(connection, self._connection_class):
connections.remove(connection)
else:
readd_connections.append(connection)
for connection in readd_connections:
pool._put_conn(connection)
def reset_patchers():
yield mock.patch.object(httplib, "HTTPConnection", _HTTPConnection)
yield mock.patch.object(httplib, "HTTPSConnection", _HTTPSConnection)
try:
import requests
if requests.__build__ < 0x021603:
# Avoid double unmock if requests 2.16.3
# First, this is pointless, requests.packages.urllib3 *IS* urllib3 (see packages.py)
# Second, this is unmocking twice the same classes with different namespaces
# and is creating weird issues and bugs:
# > AssertionError: assert <class 'urllib3.connection.HTTPConnection'>
# > is <class 'requests.packages.urllib3.connection.HTTPConnection'>
# This assert should work!!!
# Note that this also means that now, requests.packages is never imported
# if requests 2.16.3 or greater is used with VCRPy.
import requests.packages.urllib3.connectionpool as cpool
else:
raise ImportError("Skip requests not vendored anymore")
except ImportError: # pragma: no cover
pass
else:
# unpatch requests v1.x
yield mock.patch.object(cpool, "VerifiedHTTPSConnection", _VerifiedHTTPSConnection)
yield mock.patch.object(cpool, "HTTPConnection", _cpoolHTTPConnection)
# unpatch requests v2.x
if hasattr(cpool.HTTPConnectionPool, "ConnectionCls"):
yield mock.patch.object(cpool.HTTPConnectionPool, "ConnectionCls", _cpoolHTTPConnection)
yield mock.patch.object(cpool.HTTPSConnectionPool, "ConnectionCls", _cpoolHTTPSConnection)
if hasattr(cpool, "HTTPSConnection"):
yield mock.patch.object(cpool, "HTTPSConnection", _cpoolHTTPSConnection)
try:
import urllib3.connectionpool as cpool
except ImportError: # pragma: no cover
pass
else:
yield mock.patch.object(cpool, "VerifiedHTTPSConnection", _VerifiedHTTPSConnection)
yield mock.patch.object(cpool, "HTTPConnection", _cpoolHTTPConnection)
yield mock.patch.object(cpool, "HTTPSConnection", _cpoolHTTPSConnection)
if hasattr(cpool.HTTPConnectionPool, "ConnectionCls"):
yield mock.patch.object(cpool.HTTPConnectionPool, "ConnectionCls", _cpoolHTTPConnection)
yield mock.patch.object(cpool.HTTPSConnectionPool, "ConnectionCls", _cpoolHTTPSConnection)
try:
# unpatch botocore with awsrequest
import botocore.awsrequest as cpool
except ImportError: # pragma: no cover
try:
# unpatch botocore with vendored requests
import botocore.vendored.requests.packages.urllib3.connectionpool as cpool
except ImportError: # pragma: no cover
pass
else:
# unpatch requests v1.x
yield mock.patch.object(cpool, "VerifiedHTTPSConnection", _Boto3VerifiedHTTPSConnection)
yield mock.patch.object(cpool, "HTTPConnection", _cpoolBoto3HTTPConnection)
# unpatch requests v2.x
if hasattr(cpool.HTTPConnectionPool, "ConnectionCls"):
yield mock.patch.object(cpool.HTTPConnectionPool, "ConnectionCls", _cpoolBoto3HTTPConnection)
yield mock.patch.object(
cpool.HTTPSConnectionPool, "ConnectionCls", _cpoolBoto3HTTPSConnection
)
if hasattr(cpool, "HTTPSConnection"):
yield mock.patch.object(cpool, "HTTPSConnection", _cpoolBoto3HTTPSConnection)
else:
if hasattr(cpool.AWSHTTPConnectionPool, "ConnectionCls"):
yield mock.patch.object(cpool.AWSHTTPConnectionPool, "ConnectionCls", _cpoolBoto3HTTPConnection)
yield mock.patch.object(cpool.AWSHTTPSConnectionPool, "ConnectionCls", _cpoolBoto3HTTPSConnection)
if hasattr(cpool, "AWSHTTPSConnection"):
yield mock.patch.object(cpool, "AWSHTTPSConnection", _cpoolBoto3HTTPSConnection)
try:
import httplib2 as cpool
except ImportError: # pragma: no cover
pass
else:
yield mock.patch.object(cpool, "HTTPConnectionWithTimeout", _HTTPConnectionWithTimeout)
yield mock.patch.object(cpool, "HTTPSConnectionWithTimeout", _HTTPSConnectionWithTimeout)
yield mock.patch.object(cpool, "SCHEME_TO_CONNECTION", _SCHEME_TO_CONNECTION)
try:
import boto.https_connection as cpool
except ImportError: # pragma: no cover
pass
else:
yield mock.patch.object(cpool, "CertValidatingHTTPSConnection", _CertValidatingHTTPSConnection)
try:
import tornado.simple_httpclient as simple
except ImportError: # pragma: no cover
pass
else:
yield mock.patch.object(simple.SimpleAsyncHTTPClient, "fetch_impl", _SimpleAsyncHTTPClient_fetch_impl)
try:
import tornado.curl_httpclient as curl
except ImportError: # pragma: no cover
pass
else:
yield mock.patch.object(curl.CurlAsyncHTTPClient, "fetch_impl", _CurlAsyncHTTPClient_fetch_impl)
@contextlib.contextmanager
def force_reset():
with contextlib.ExitStack() as exit_stack:
for patcher in reset_patchers():
exit_stack.enter_context(patcher)
yield
|
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker.windows_packages import hammerdb
FLAGS = flags.FLAGS
BENCHMARK_NAME = 'hammerdb'
BENCHMARK_CONFIG = """
hammerdb:
description: Run hammerdb on a single machine
vm_groups:
default:
vm_spec: *default_single_core
vm_count: 1
disk_spec: *default_500_gb
"""
def GetConfig(user_config):
return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
def Prepare(benchmark_spec):
vm = benchmark_spec.vms[0]
vm.Install('hammerdb')
def Run(benchmark_spec):
"""Measure the sql performance in one VM.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample objects with the benchmark results.
"""
vm = benchmark_spec.vms[0]
return hammerdb.RunHammerDB(vm)
def Cleanup(unused_benchmark_spec):
pass
|
import mock
from pytest import raises
from paasta_tools import cleanup_marathon_jobs
from paasta_tools import utils
class TestCleanupMarathonJobs:
cleanup_marathon_jobs.log = mock.Mock()
fake_docker_registry = "http://del.icio.us/"
fake_cluster = "fake_test_cluster"
fake_system_config = utils.SystemPaastaConfig(
{
"marathon_servers": [
{"url": "http://mess_url", "user": "namnin", "password": "pass_nememim"}
]
},
directory="/fake/etc/paasta",
)
fake_marathon_client = mock.Mock()
fake_marathon_clients = mock.Mock(
get_all_clients=mock.Mock(return_value=[fake_marathon_client])
)
def test_main(self):
soa_dir = "paasta_maaaachine"
with mock.patch(
"paasta_tools.cleanup_marathon_jobs.cleanup_apps", autospec=True
) as cleanup_patch:
cleanup_marathon_jobs.main(("--soa-dir", soa_dir))
cleanup_patch.assert_called_once_with(
soa_dir, kill_threshold=0.5, force=False
)
def test_cleanup_apps(self):
soa_dir = "not_really_a_dir"
expected_apps = [("present", "away"), ("on-app", "off")]
fake_app_ids = [
mock.Mock(id="present.away.gone.wtf"),
mock.Mock(id="on-app.off.stop.jrm"),
mock.Mock(id="not-here.oh.no.weirdo"),
]
self.fake_marathon_client.list_apps = mock.Mock(return_value=fake_app_ids)
with mock.patch(
"paasta_tools.cleanup_marathon_jobs.get_services_for_cluster",
return_value=expected_apps,
autospec=True,
) as get_services_for_cluster_patch, mock.patch(
"paasta_tools.cleanup_marathon_jobs.load_system_paasta_config",
autospec=True,
return_value=self.fake_system_config,
) as config_patch, mock.patch(
"paasta_tools.marathon_tools.get_marathon_clients",
autospec=True,
return_value=self.fake_marathon_clients,
) as clients_patch, mock.patch(
"paasta_tools.cleanup_marathon_jobs.delete_app", autospec=True
) as delete_patch:
cleanup_marathon_jobs.cleanup_apps(soa_dir)
config_patch.assert_called_once_with()
get_services_for_cluster_patch.assert_called_once_with(
instance_type="marathon", soa_dir=soa_dir
)
clients_patch.assert_called_once_with(mock.ANY)
delete_patch.assert_called_once_with(
app_id="not-here.oh.no.weirdo",
client=self.fake_marathon_client,
soa_dir=soa_dir,
)
def test_cleanup_apps_dont_kill_everything(self):
soa_dir = "not_really_a_dir"
expected_apps = []
fake_app_ids = [
mock.Mock(id="present.away.gone.wtf"),
mock.Mock(id="on-app.off.stop.jrm"),
mock.Mock(id="not-here.oh.no.weirdo"),
]
self.fake_marathon_client.list_apps = mock.Mock(return_value=fake_app_ids)
with mock.patch(
"paasta_tools.cleanup_marathon_jobs.get_services_for_cluster",
return_value=expected_apps,
autospec=True,
) as get_services_for_cluster_patch, mock.patch(
"paasta_tools.cleanup_marathon_jobs.load_system_paasta_config",
autospec=True,
return_value=self.fake_system_config,
) as config_patch, mock.patch(
"paasta_tools.marathon_tools.get_marathon_clients",
autospec=True,
return_value=self.fake_marathon_clients,
) as clients_patch, mock.patch(
"paasta_tools.cleanup_marathon_jobs.delete_app", autospec=True
) as delete_patch:
with raises(cleanup_marathon_jobs.DontKillEverythingError):
cleanup_marathon_jobs.cleanup_apps(soa_dir)
config_patch.assert_called_once_with()
get_services_for_cluster_patch.assert_called_once_with(
instance_type="marathon", soa_dir=soa_dir
)
clients_patch.assert_called_once_with(mock.ANY)
assert delete_patch.call_count == 0
def test_cleanup_apps_force(self):
soa_dir = "not_really_a_dir"
expected_apps = []
fake_app_ids = [
mock.Mock(id="present.away.gone.wtf"),
mock.Mock(id="on-app.off.stop.jrm"),
mock.Mock(id="not-here.oh.no.weirdo"),
]
self.fake_marathon_client.list_apps = mock.Mock(return_value=fake_app_ids)
with mock.patch(
"paasta_tools.cleanup_marathon_jobs.get_services_for_cluster",
return_value=expected_apps,
autospec=True,
) as get_services_for_cluster_patch, mock.patch(
"paasta_tools.cleanup_marathon_jobs.load_system_paasta_config",
autospec=True,
return_value=self.fake_system_config,
) as config_patch, mock.patch(
"paasta_tools.marathon_tools.get_marathon_clients",
autospec=True,
return_value=self.fake_marathon_clients,
) as clients_patch, mock.patch(
"paasta_tools.cleanup_marathon_jobs.delete_app", autospec=True
) as delete_patch:
cleanup_marathon_jobs.cleanup_apps(soa_dir, force=True)
config_patch.assert_called_once_with()
get_services_for_cluster_patch.assert_called_once_with(
instance_type="marathon", soa_dir=soa_dir
)
clients_patch.assert_called_once_with(mock.ANY)
assert delete_patch.call_count == 3
def test_cleanup_apps_doesnt_delete_unknown_apps(self):
soa_dir = "not_really_a_dir"
expected_apps = [("present", "away"), ("on-app", "off")]
fake_app_ids = [mock.Mock(id="non_conforming_app")]
self.fake_marathon_client.list_apps = mock.Mock(return_value=fake_app_ids)
with mock.patch(
"paasta_tools.cleanup_marathon_jobs.get_services_for_cluster",
return_value=expected_apps,
autospec=True,
), mock.patch(
"paasta_tools.cleanup_marathon_jobs.load_system_paasta_config",
autospec=True,
return_value=self.fake_system_config,
), mock.patch(
"paasta_tools.marathon_tools.get_marathon_clients",
autospec=True,
return_value=self.fake_marathon_clients,
), mock.patch(
"paasta_tools.cleanup_marathon_jobs.delete_app", autospec=True
) as delete_patch:
cleanup_marathon_jobs.cleanup_apps(soa_dir)
assert delete_patch.call_count == 0
def test_delete_app(self):
app_id = "example--service.main.git93340779.configddb38a65"
client = self.fake_marathon_client
with mock.patch(
"paasta_tools.cleanup_marathon_jobs.load_system_paasta_config",
autospec=True,
) as mock_load_system_paasta_config, mock.patch(
"paasta_tools.bounce_lib.bounce_lock_zookeeper", autospec=True
), mock.patch(
"paasta_tools.bounce_lib.delete_marathon_app", autospec=True
) as mock_delete_marathon_app, mock.patch(
"paasta_tools.cleanup_marathon_jobs._log", autospec=True
) as mock_log, mock.patch(
"paasta_tools.cleanup_marathon_jobs.send_event", autospec=True
) as mock_send_sensu_event:
mock_load_system_paasta_config.return_value.get_cluster = mock.Mock(
return_value="fake_cluster"
)
cleanup_marathon_jobs.delete_app(app_id, client, "fake_soa_dir")
mock_delete_marathon_app.assert_called_once_with(app_id, client)
mock_load_system_paasta_config.return_value.get_cluster.assert_called_once_with()
expected_log_line = "Deleted stale marathon job that looks lost: " + app_id
mock_log.assert_called_once_with(
instance="main",
service="example_service",
level="event",
component="deploy",
cluster="fake_cluster",
line=expected_log_line,
)
assert mock_send_sensu_event.call_count == 2
def test_delete_app_throws_exception(self):
app_id = "example--service.main.git93340779.configddb38a65"
client = self.fake_marathon_client
with mock.patch(
"paasta_tools.cleanup_marathon_jobs.load_system_paasta_config",
autospec=True,
), mock.patch(
"paasta_tools.bounce_lib.bounce_lock_zookeeper", autospec=True
), mock.patch(
"paasta_tools.bounce_lib.delete_marathon_app",
side_effect=ValueError("foo"),
autospec=True,
), mock.patch(
"paasta_tools.cleanup_marathon_jobs._log", autospec=True
) as mock_log:
with raises(ValueError):
cleanup_marathon_jobs.delete_app(app_id, client, "fake_soa_dir")
assert "example_service" in mock_log.mock_calls[0][2]["line"]
assert "Traceback" in mock_log.mock_calls[1][2]["line"]
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
|
import json
import numpy as np
import os
from chainercv.chainer_experimental.datasets.sliceable import GetterDataset
from chainercv.datasets.coco.coco_utils import get_coco
from chainercv import utils
def _rgb2id(color):
return color[0] + 256 * color[1] + 256 * 256 * color[2]
class COCOSemanticSegmentationDataset(GetterDataset):
"""Semantic segmentation dataset for `MS COCO`_.
Semantic segmentations are generated from panoptic segmentations
as done in the `official toolkit`_.
.. _`MS COCO`: http://cocodataset.org/#home
.. _`official toolkit`: https://github.com/cocodataset/panopticapi/
blob/master/converters/panoptic2semantic_segmentation.py
Args:
data_dir (string): Path to the root of the training data. If this is
:obj:`auto`, this class will automatically download data for you
under :obj:`$CHAINER_DATASET_ROOT/pfnet/chainercv/coco`.
split ({'train', 'val'}): Select a split of the dataset.
This dataset returns the following data.
.. csv-table::
:header: name, shape, dtype, format
:obj:`img`, ":math:`(3, H, W)`", :obj:`float32`, \
"RGB, :math:`[0, 255]`"
:obj:`label`, ":math:`(H, W)`", :obj:`int32`, \
":math:`[-1, \#class - 1]`"
"""
def __init__(self, data_dir='auto', split='train'):
super(COCOSemanticSegmentationDataset, self).__init__()
if data_dir == 'auto':
data_dir = get_coco(split, split, '2017', 'panoptic')
self.img_root = os.path.join(
data_dir, 'images', '{}{}'.format(split, 2017))
self.label_root = os.path.join(
data_dir, 'annotations', 'panoptic_{}{}'.format(split, 2017))
anno_path = os.path.join(
data_dir, 'annotations',
'panoptic_{}{}.json'.format(split, 2017))
self.data_dir = data_dir
annos = json.load(open(anno_path, 'r'))
self.annos = annos
self.cat_ids = [cat['id'] for cat in annos['categories']]
self.img_paths = [ann['file_name'][:-4] + '.jpg'
for ann in annos['annotations']]
self.add_getter('img', self._get_image)
self.add_getter('label', self._get_label)
self.keys = ('img', 'label')
def __len__(self):
return len(self.img_paths)
def _get_image(self, i):
img_path = os.path.join(
self.img_root, self.img_paths[i])
img = utils.read_image(img_path, dtype=np.float32, color=True)
return img
def _get_label(self, i):
# https://github.com/cocodataset/panopticapi/blob/master/converters/
# panoptic2semantic_segmentation.py#L58
anno = self.annos['annotations'][i]
label_path = os.path.join(self.label_root, anno['file_name'])
rgb_id_map = utils.read_image(
label_path,
dtype=np.uint32, color=True)
id_map = _rgb2id(rgb_id_map)
label = -1 * np.ones_like(id_map, dtype=np.int32)
for inst in anno['segments_info']:
mask = id_map == inst['id']
label[mask] = self.cat_ids.index(inst['category_id'])
return label
|
import itertools
import time
from ...common import util
def uts(dt):
return int(time.mktime(dt.timetuple()))
class Decoder(object):
"""
Decode metrics incoming from tank into points for OpenTSDB
Parameters
----------
parent_tags : dict
common per-test tags
tank_tag : str
tank identifier tag
uuid : str
test id tag
labeled : bool
detailed stats for each label
histograms : bool
response time histograms measurements
"""
def __init__(self, tank_tag, uuid, parent_tags, labeled, histograms):
self.labeled = labeled
initial_tags = {"tank": tank_tag, "uuid": uuid}
initial_tags.update(parent_tags)
self.tags = initial_tags
self.histograms = histograms
def set_uuid(self, id_):
self.tags['uuid'] = id_
def decode_monitoring(self, data):
"""
The reason why we have two separate methods for monitoring
and aggregates is a strong difference in incoming data.
"""
points = list()
for second_data in data:
for host, host_data in second_data["data"].items():
points.append(
self.__make_points(
"monitoring",
{
"host": host,
"comment": host_data.get("comment")
},
second_data["timestamp"],
{
# cast int to float. avoid
# https://github.com/yandex/yandex-tank/issues/776
metric:
float(value) if isinstance(value, int) else value
for metric, value in
host_data["metrics"].items()
}))
return list(itertools.chain(*points))
def decode_aggregates(self, aggregated_data, gun_stats, prefix):
ts = aggregated_data["ts"]
points = list()
# stats overall w/ __OVERALL__ label
points += self.__make_points_for_label(
ts, aggregated_data["overall"], "__OVERALL__", prefix, gun_stats)
# detailed stats per tag
if self.labeled:
for label, aggregated_data_by_tag in aggregated_data[
"tagged"].items():
points += self.__make_points_for_label(
ts, aggregated_data_by_tag, label, prefix, gun_stats)
return points
def __make_points_for_label(self, ts, data, label, prefix, gun_stats):
"""x
Make a set of points for `this` label
overall_quantiles, overall_meta, net_codes, proto_codes, histograms
"""
label_points = list()
label_points.extend((
# overall quantiles for label
self.__make_points(
prefix + "overall_quantiles", {"label": label}, ts,
self.__make_quantile_fields(data)),
# overall meta (gun status) for label
self.__make_points(
prefix + "overall_meta", {"label": label}, ts,
self.__make_overall_meta_fields(data, gun_stats)),
# net codes for label
self.__make_points(
prefix + "net_codes", {"label": label},
ts,
self.__make_netcodes_fields(data),
field_lookup_table=util.NET),
# proto codes for label
self.__make_points(
prefix + "proto_codes", {"label": label},
ts,
self.__make_protocodes_fields(data),
field_lookup_table=util.HTTP)))
# histograms, one row for each bin
if self.histograms:
for bin_, count in zip(data["interval_real"]["hist"]["bins"],
data["interval_real"]["hist"]["data"]):
label_points.append(
self.__make_points(
prefix + "histograms", {"label": label}, ts, {
"bin": bin_,
"count": count
}))
return list(itertools.chain(*label_points))
@staticmethod
def __make_quantile_fields(data):
return {
'q' + str(q): value / 1000.0
for q, value in zip(
data["interval_real"]["q"]["q"], data["interval_real"]["q"]
["value"])
}
@staticmethod
def __make_overall_meta_fields(data, stats):
return {
"active_threads":
stats["metrics"]["instances"],
"RPS":
data["interval_real"]["len"],
"planned_requests":
float(stats["metrics"]["reqps"]),
"avg_rt":
float(data['interval_real']['total']) / data['interval_real']['len'] / 1000.0,
"min":
data['interval_real']['min'] / 1000.0,
"max":
data['interval_real']['max'] / 1000.0
}
@staticmethod
def __make_netcodes_fields(data):
return {
int(code): int(cnt)
for code, cnt in data["net_code"]["count"].items()
}
@staticmethod
def __make_protocodes_fields(data):
return {
int(code): int(cnt)
for code, cnt in data["proto_code"]["count"].items()
}
def __make_points(
self, metric, additional_tags, ts, fields, field_lookup_table={}):
"""
Parameters
----------
metric : string
metric type (e.g. monitoring, overall_meta, net_codes, proto_codes, overall_quantiles)
additional_tags : dict
custom additional tags for this points
ts : integer
timestamp
fields : dict
opentsdb fields tag
Returns
-------
dict
points for OpenTSDB client
"""
result = []
for key, value in fields.items():
tags = self.tags.copy()
tags.update(additional_tags)
tags["field"] = str(key)
if field_lookup_table.get(key):
tags["field_label"] = field_lookup_table.get(key).replace(" ", "_")
result.append({
"metric": metric,
"tags": tags,
"timestamp": int(ts),
"value": value,
})
return result
|
from ibm_cloud_sdk_core.authenticators import IAMAuthenticator
from ibm_watson import TextToSpeechV1
import voluptuous as vol
from homeassistant.components.tts import PLATFORM_SCHEMA, Provider
import homeassistant.helpers.config_validation as cv
CONF_URL = "watson_url"
CONF_APIKEY = "watson_apikey"
ATTR_CREDENTIALS = "credentials"
DEFAULT_URL = "https://stream.watsonplatform.net/text-to-speech/api"
CONF_VOICE = "voice"
CONF_OUTPUT_FORMAT = "output_format"
CONF_TEXT_TYPE = "text"
# List from https://tinyurl.com/watson-tts-docs
SUPPORTED_VOICES = [
"ar-AR_OmarVoice",
"de-DE_BirgitV3Voice",
"de-DE_BirgitVoice",
"de-DE_DieterV3Voice",
"de-DE_DieterVoice",
"de-DE_ErikaV3Voice",
"en-GB_KateV3Voice",
"en-GB_KateVoice",
"en-US_AllisonV3Voice",
"en-US_AllisonVoice",
"en-US_EmilyV3Voice",
"en-US_HenryV3Voice",
"en-US_KevinV3Voice",
"en-US_LisaV3Voice",
"en-US_LisaVoice",
"en-US_MichaelV3Voice",
"en-US_MichaelVoice",
"en-US_OliviaV3Voice",
"es-ES_EnriqueV3Voice",
"es-ES_EnriqueVoice",
"es-ES_LauraV3Voice",
"es-ES_LauraVoice",
"es-LA_SofiaV3Voice",
"es-LA_SofiaVoice",
"es-US_SofiaV3Voice",
"es-US_SofiaVoice",
"fr-FR_ReneeV3Voice",
"fr-FR_ReneeVoice",
"it-IT_FrancescaV3Voice",
"it-IT_FrancescaVoice",
"ja-JP_EmiV3Voice",
"ja-JP_EmiVoice",
"ko-KR_YoungmiVoice",
"ko-KR_YunaVoice",
"nl-NL_EmmaVoice",
"nl-NL_LiamVoice",
"pt-BR_IsabelaV3Voice",
"pt-BR_IsabelaVoice",
"zh-CN_LiNaVoice",
"zh-CN_WangWeiVoice",
"zh-CN_ZhangJingVoice",
]
SUPPORTED_OUTPUT_FORMATS = [
"audio/flac",
"audio/mp3",
"audio/mpeg",
"audio/ogg",
"audio/ogg;codecs=opus",
"audio/ogg;codecs=vorbis",
"audio/wav",
]
CONTENT_TYPE_EXTENSIONS = {
"audio/flac": "flac",
"audio/mp3": "mp3",
"audio/mpeg": "mp3",
"audio/ogg": "ogg",
"audio/ogg;codecs=opus": "ogg",
"audio/ogg;codecs=vorbis": "ogg",
"audio/wav": "wav",
}
DEFAULT_VOICE = "en-US_AllisonVoice"
DEFAULT_OUTPUT_FORMAT = "audio/mp3"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_URL, default=DEFAULT_URL): cv.string,
vol.Required(CONF_APIKEY): cv.string,
vol.Optional(CONF_VOICE, default=DEFAULT_VOICE): vol.In(SUPPORTED_VOICES),
vol.Optional(CONF_OUTPUT_FORMAT, default=DEFAULT_OUTPUT_FORMAT): vol.In(
SUPPORTED_OUTPUT_FORMATS
),
}
)
def get_engine(hass, config, discovery_info=None):
"""Set up IBM Watson TTS component."""
authenticator = IAMAuthenticator(config[CONF_APIKEY])
service = TextToSpeechV1(authenticator)
service.set_service_url(config[CONF_URL])
supported_languages = list({s[:5] for s in SUPPORTED_VOICES})
default_voice = config[CONF_VOICE]
output_format = config[CONF_OUTPUT_FORMAT]
service.set_default_headers({"x-watson-learning-opt-out": "true"})
return WatsonTTSProvider(service, supported_languages, default_voice, output_format)
class WatsonTTSProvider(Provider):
"""IBM Watson TTS api provider."""
def __init__(self, service, supported_languages, default_voice, output_format):
"""Initialize Watson TTS provider."""
self.service = service
self.supported_langs = supported_languages
self.default_lang = default_voice[:5]
self.default_voice = default_voice
self.output_format = output_format
self.name = "Watson TTS"
@property
def supported_languages(self):
"""Return a list of supported languages."""
return self.supported_langs
@property
def default_language(self):
"""Return the default language."""
return self.default_lang
@property
def default_options(self):
"""Return dict include default options."""
return {CONF_VOICE: self.default_voice}
@property
def supported_options(self):
"""Return a list of supported options."""
return [CONF_VOICE]
def get_tts_audio(self, message, language=None, options=None):
"""Request TTS file from Watson TTS."""
response = self.service.synthesize(
message, accept=self.output_format, voice=self.default_voice
).get_result()
return (CONTENT_TYPE_EXTENSIONS[self.output_format], response.content)
|
import unittest
from unittest import mock
from libpurecool.const import Dyson360EyeMode, PowerMode
from libpurecool.dyson_360_eye import Dyson360Eye
from homeassistant.components.dyson import vacuum as dyson
from homeassistant.components.dyson.vacuum import Dyson360EyeDevice
from tests.common import get_test_home_assistant
def _get_non_vacuum_device():
"""Return a non vacuum device."""
device = mock.Mock()
device.name = "Device_Fan"
device.state = None
return device
def _get_vacuum_device_cleaning():
"""Return a vacuum device running."""
device = mock.Mock(spec=Dyson360Eye)
device.name = "Device_Vacuum"
device.state = mock.MagicMock()
device.state.state = Dyson360EyeMode.FULL_CLEAN_RUNNING
device.state.battery_level = 85
device.state.power_mode = PowerMode.QUIET
device.state.position = (0, 0)
return device
def _get_vacuum_device_charging():
"""Return a vacuum device charging."""
device = mock.Mock(spec=Dyson360Eye)
device.name = "Device_Vacuum"
device.state = mock.MagicMock()
device.state.state = Dyson360EyeMode.INACTIVE_CHARGING
device.state.battery_level = 40
device.state.power_mode = PowerMode.QUIET
device.state.position = (0, 0)
return device
def _get_vacuum_device_pause():
"""Return a vacuum device in pause."""
device = mock.MagicMock(spec=Dyson360Eye)
device.name = "Device_Vacuum"
device.state = mock.MagicMock()
device.state.state = Dyson360EyeMode.FULL_CLEAN_PAUSED
device.state.battery_level = 40
device.state.power_mode = PowerMode.QUIET
device.state.position = (0, 0)
return device
def _get_vacuum_device_unknown_state():
"""Return a vacuum device with unknown state."""
device = mock.Mock(spec=Dyson360Eye)
device.name = "Device_Vacuum"
device.state = mock.MagicMock()
device.state.state = "Unknown"
return device
class DysonTest(unittest.TestCase):
"""Dyson 360 eye robot vacuum component test class."""
def setUp(self): # pylint: disable=invalid-name
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.addCleanup(self.tear_down_cleanup)
def tear_down_cleanup(self):
"""Stop everything that was started."""
self.hass.stop()
def test_setup_component_with_no_devices(self):
"""Test setup component with no devices."""
self.hass.data[dyson.DYSON_DEVICES] = []
add_entities = mock.MagicMock()
dyson.setup_platform(self.hass, {}, add_entities)
add_entities.assert_called_with([])
def test_setup_component(self):
"""Test setup component with devices."""
def _add_device(devices):
assert len(devices) == 1
assert devices[0].name == "Device_Vacuum"
device_vacuum = _get_vacuum_device_cleaning()
device_non_vacuum = _get_non_vacuum_device()
self.hass.data[dyson.DYSON_DEVICES] = [device_vacuum, device_non_vacuum]
dyson.setup_platform(self.hass, {}, _add_device)
def test_on_message(self):
"""Test when message is received."""
device = _get_vacuum_device_cleaning()
component = Dyson360EyeDevice(device)
component.entity_id = "entity_id"
component.schedule_update_ha_state = mock.Mock()
component.on_message(mock.Mock())
assert component.schedule_update_ha_state.called
def test_should_poll(self):
"""Test polling is disable."""
device = _get_vacuum_device_cleaning()
component = Dyson360EyeDevice(device)
assert not component.should_poll
def test_properties(self):
"""Test component properties."""
device1 = _get_vacuum_device_cleaning()
device2 = _get_vacuum_device_unknown_state()
device3 = _get_vacuum_device_charging()
component = Dyson360EyeDevice(device1)
component2 = Dyson360EyeDevice(device2)
component3 = Dyson360EyeDevice(device3)
assert component.name == "Device_Vacuum"
assert component.is_on
assert component.status == "Cleaning"
assert component2.status == "Unknown"
assert component.battery_level == 85
assert component.fan_speed == "Quiet"
assert component.fan_speed_list == ["Quiet", "Max"]
assert component.device_state_attributes["position"] == "(0, 0)"
assert component.available
assert component.supported_features == 255
assert component.battery_icon == "mdi:battery-80"
assert component3.battery_icon == "mdi:battery-charging-40"
def test_turn_on(self):
"""Test turn on vacuum."""
device1 = _get_vacuum_device_charging()
component1 = Dyson360EyeDevice(device1)
component1.turn_on()
assert device1.start.called
device2 = _get_vacuum_device_pause()
component2 = Dyson360EyeDevice(device2)
component2.turn_on()
assert device2.resume.called
def test_turn_off(self):
"""Test turn off vacuum."""
device1 = _get_vacuum_device_cleaning()
component1 = Dyson360EyeDevice(device1)
component1.turn_off()
assert device1.pause.called
def test_stop(self):
"""Test stop vacuum."""
device1 = _get_vacuum_device_cleaning()
component1 = Dyson360EyeDevice(device1)
component1.stop()
assert device1.pause.called
def test_set_fan_speed(self):
"""Test set fan speed vacuum."""
device1 = _get_vacuum_device_cleaning()
component1 = Dyson360EyeDevice(device1)
component1.set_fan_speed("Max")
device1.set_power_mode.assert_called_with(PowerMode.MAX)
def test_start_pause(self):
"""Test start/pause."""
device1 = _get_vacuum_device_charging()
component1 = Dyson360EyeDevice(device1)
component1.start_pause()
assert device1.start.called
device2 = _get_vacuum_device_pause()
component2 = Dyson360EyeDevice(device2)
component2.start_pause()
assert device2.resume.called
device3 = _get_vacuum_device_cleaning()
component3 = Dyson360EyeDevice(device3)
component3.start_pause()
assert device3.pause.called
def test_return_to_base(self):
"""Test return to base."""
device = _get_vacuum_device_pause()
component = Dyson360EyeDevice(device)
component.return_to_base()
assert device.abort.called
|
import attr
from PyQt5.QtCore import pyqtSlot, QObject, QTimer
from qutebrowser.config import config
from qutebrowser.commands import runners
from qutebrowser.misc import objects
from qutebrowser.utils import log, utils, debug, objreg
from qutebrowser.completion.models import miscmodels
@attr.s
class CompletionInfo:
"""Context passed into all completion functions."""
config = attr.ib()
keyconf = attr.ib()
win_id = attr.ib()
cur_tab = attr.ib()
class Completer(QObject):
"""Completer which manages completions in a CompletionView.
Attributes:
_cmd: The statusbar Command object this completer belongs to.
_win_id: The id of the window that owns this object.
_timer: The timer used to trigger the completion update.
_last_cursor_pos: The old cursor position so we avoid double completion
updates.
_last_text: The old command text so we avoid double completion updates.
_last_before_cursor: The prior value of before_cursor.
"""
def __init__(self, *, cmd, win_id, parent=None):
super().__init__(parent)
self._cmd = cmd
self._win_id = win_id
self._timer = QTimer()
self._timer.setSingleShot(True)
self._timer.setInterval(0)
self._timer.timeout.connect(self._update_completion)
self._last_cursor_pos = -1
self._last_text = None
self._last_before_cursor = None
self._cmd.update_completion.connect(self.schedule_completion_update)
def __repr__(self):
return utils.get_repr(self)
def _model(self):
"""Convenience method to get the current completion model."""
completion = self.parent()
return completion.model()
def _get_new_completion(self, before_cursor, under_cursor):
"""Get the completion function based on the current command text.
Args:
before_cursor: The command chunks before the cursor.
under_cursor: The command chunk under the cursor.
Return:
A completion model.
"""
if '--' in before_cursor or under_cursor.startswith('-'):
# cursor on a flag or after an explicit split (--)
return None
log.completion.debug("Before removing flags: {}".format(before_cursor))
if not before_cursor:
# '|' or 'set|'
log.completion.debug('Starting command completion')
return miscmodels.command
try:
cmd = objects.commands[before_cursor[0]]
except KeyError:
log.completion.debug("No completion for unknown command: {}"
.format(before_cursor[0]))
return None
before_cursor = [x for x in before_cursor if not x.startswith('-')]
log.completion.debug("After removing flags: {}".format(before_cursor))
argpos = len(before_cursor) - 1
try:
func = cmd.get_pos_arg_info(argpos).completion
except IndexError:
log.completion.debug("No completion in position {}".format(argpos))
return None
return func
def _quote(self, s):
"""Quote s if it needs quoting for the commandline.
Note we don't use shlex.quote because that quotes a lot of shell
metachars we don't need to have quoted.
"""
if not s:
return "''"
elif any(c in s for c in ' "\'\t\n\\'):
# use single quotes, and put single quotes into double quotes
# the string $'b is then quoted as '$'"'"'b'
return "'" + s.replace("'", "'\"'\"'") + "'"
else:
return s
def _partition(self):
"""Divide the commandline text into chunks around the cursor position.
Return:
([parts_before_cursor], 'part_under_cursor', [parts_after_cursor])
"""
text = self._cmd.text()[len(self._cmd.prefix()):]
if not text or not text.strip():
# Only ":", empty part under the cursor with nothing before/after
return [], '', []
parser = runners.CommandParser()
result = parser.parse(text, fallback=True, keep=True)
parts = [x for x in result.cmdline if x]
pos = self._cmd.cursorPosition() - len(self._cmd.prefix())
pos = min(pos, len(text)) # Qt treats 2-byte UTF-16 chars as 2 chars
log.completion.debug('partitioning {} around position {}'.format(parts,
pos))
for i, part in enumerate(parts):
pos -= len(part)
if pos <= 0:
if part[pos-1:pos+1].isspace():
# cursor is in a space between two existing words
parts.insert(i, '')
prefix = [x.strip() for x in parts[:i]]
center = parts[i].strip()
# strip trailing whitepsace included as a separate token
postfix = [x.strip() for x in parts[i+1:] if not x.isspace()]
log.completion.debug(
"partitioned: {} '{}' {}".format(prefix, center, postfix))
return prefix, center, postfix
raise utils.Unreachable("Not all parts consumed: {}".format(parts))
@pyqtSlot(str)
def on_selection_changed(self, text):
"""Change the completed part if a new item was selected.
Called from the views selectionChanged method.
Args:
text: Newly selected text.
"""
if text is None:
return
before, center, after = self._partition()
log.completion.debug("Changing {} to '{}'".format(center, text))
try:
maxsplit = objects.commands[before[0]].maxsplit
except (KeyError, IndexError):
maxsplit = None
if maxsplit is None:
text = self._quote(text)
model = self._model()
if model.count() == 1 and config.val.completion.quick:
# If we only have one item, we want to apply it immediately and go
# on to the next part, unless we are quick-completing the part
# after maxsplit, so that we don't keep offering completions
# (see issue #1519)
if maxsplit is not None and maxsplit < len(before):
self._change_completed_part(text, before, after)
else:
self._change_completed_part(text, before, after,
immediate=True)
else:
self._change_completed_part(text, before, after)
@pyqtSlot()
def schedule_completion_update(self):
"""Schedule updating/enabling completion.
For performance reasons we don't want to block here, instead we do this
in the background.
We delay the update only if we've already input some text and ignore
updates if the text is shorter than completion.min_chars (unless we're
hitting backspace in which case updates won't be ignored).
"""
_cmd, _sep, rest = self._cmd.text().partition(' ')
input_length = len(rest)
if (0 < input_length < config.val.completion.min_chars and
self._cmd.cursorPosition() > self._last_cursor_pos):
log.completion.debug("Ignoring update because the length of "
"the text is less than completion.min_chars.")
elif (self._cmd.cursorPosition() == self._last_cursor_pos and
self._cmd.text() == self._last_text):
log.completion.debug("Ignoring update because there were no "
"changes.")
else:
log.completion.debug("Scheduling completion update.")
start_delay = config.val.completion.delay if self._last_text else 0
self._timer.start(start_delay)
self._last_cursor_pos = self._cmd.cursorPosition()
self._last_text = self._cmd.text()
@pyqtSlot()
def _update_completion(self):
"""Check if completions are available and activate them."""
completion = self.parent()
if self._cmd.prefix() != ':':
# This is a search or gibberish, so we don't need to complete
# anything (yet)
# FIXME complete searches
# https://github.com/qutebrowser/qutebrowser/issues/32
completion.set_model(None)
self._last_before_cursor = None
return
before_cursor, pattern, after_cursor = self._partition()
log.completion.debug("Updating completion: {} {} {}".format(
before_cursor, pattern, after_cursor))
pattern = pattern.strip("'\"")
func = self._get_new_completion(before_cursor, pattern)
if func is None:
log.completion.debug('Clearing completion')
completion.set_model(None)
self._last_before_cursor = None
return
if before_cursor == self._last_before_cursor:
# If the part before the cursor didn't change since the last
# completion, we only need to filter existing matches without
# having to regenerate completion results.
completion.set_pattern(pattern)
return
self._last_before_cursor = before_cursor
args = (x for x in before_cursor[1:] if not x.startswith('-'))
cur_tab = objreg.get('tab', scope='tab', window=self._win_id,
tab='current')
with debug.log_time(log.completion, 'Starting {} completion'
.format(func.__name__)):
info = CompletionInfo(config=config.instance,
keyconf=config.key_instance,
win_id=self._win_id,
cur_tab=cur_tab)
model = func(*args, info=info)
with debug.log_time(log.completion, 'Set completion model'):
completion.set_model(model)
completion.set_pattern(pattern)
def _change_completed_part(self, newtext, before, after, immediate=False):
"""Change the part we're currently completing in the commandline.
Args:
text: The text to set (string) for the token under the cursor.
before: Commandline tokens before the token under the cursor.
after: Commandline tokens after the token under the cursor.
immediate: True if the text should be completed immediately
including a trailing space and we shouldn't continue
completing the current item.
"""
text = self._cmd.prefix() + ' '.join(before + [newtext])
pos = len(text) + (1 if immediate else 0)
if after:
text += ' ' + ' '.join(after)
elif immediate:
# pad with a space if quick-completing the last entry
text += ' '
log.completion.debug("setting text = '{}', pos = {}".format(text, pos))
# generally, we don't want to let self._cmd emit cursorPositionChanged,
# because that'll schedule a completion update. That happens when
# tabbing through the completions, and we want to change the command
# text but we also want to keep the original completion list for the
# command the user manually entered. The exception is when we're
# immediately completing, in which case we *do* want to update the
# completion view so that we can start completing the next part
if not immediate:
self._cmd.blockSignals(True)
self._cmd.setText(text)
self._cmd.setCursorPosition(pos)
self._cmd.setFocus()
self._cmd.blockSignals(False)
self._cmd.show_cmd.emit()
|
from perfkitbenchmarker import linux_packages
AMDBLIS_DIR = '%s/amdblis' % linux_packages.INSTALL_DIR
GIT_REPO = 'https://github.com/amd/blis'
GIT_TAG = '1.3'
def _Install(vm):
"""Installs the AMD BLIS package on the VM."""
vm.Install('build_tools')
vm.Install('fortran')
vm.RemoteCommand('git clone {0} {1}'.format(GIT_REPO, AMDBLIS_DIR))
vm.RemoteCommand('cd {0} && git checkout {1}'.format(AMDBLIS_DIR, GIT_TAG))
vm.RemoteCommand(
'cd {0} && ./configure --enable-cblas zen'.format(AMDBLIS_DIR))
vm.RemoteCommand('cd {0} && make -j'.format(AMDBLIS_DIR))
def Install(vm):
"""Installs the AMD BLIS package on the VM."""
_Install(vm)
|
import types
import sys
from warnings import warn
import numpy
import scipy.io
if sys.version_info <= (2, 7):
chr = unichr # noqa This is needed for python 2 and 3 compatibility
standard_matlab_classes = ('char', 'cell', 'float', 'double', 'int',
'int8', 'int16', 'int32',
'int64', 'uint', 'uint8', 'uint16', 'logical',
'uint32', 'uint64', 'struct', 'unknown')
def _import_h5py():
try:
import h5py
except Exception as exc:
raise ImportError('h5py is required to read MATLAB files >= v7.3 '
'(%s)' % (exc,))
return h5py
def _hdf5todict(hdf5_object, variable_names=None, ignore_fields=None):
"""
Recursively converts a hdf5 object to a python dictionary,
converting all types as well.
Parameters
----------
hdf5_object: Union[h5py.Group, h5py.Dataset]
Object to convert. Can be a h5py File, Group or Dataset
variable_names: iterable, optional
Tuple or list of variables to include. If set to none, all
variable are read.
ignore_fields: iterable, optional
Tuple or list of fields to ignore. If set to none, all fields will
be read.
Returns
-------
dict
Python dictionary
"""
h5py = _import_h5py()
if isinstance(hdf5_object, h5py.Group):
return _handle_hdf5_group(hdf5_object, variable_names=variable_names,
ignore_fields=ignore_fields)
elif isinstance(hdf5_object, h5py.Dataset):
return _handle_hdf5_dataset(hdf5_object)
elif isinstance(hdf5_object, (list, types.GeneratorType)):
return [_hdf5todict(item) for item in hdf5_object]
raise TypeError('Unknown type in hdf5 file')
def _handle_hdf5_group(hdf5_object, variable_names=None, ignore_fields=None):
all_keys = set(hdf5_object.keys())
if ignore_fields:
all_keys = all_keys - set(ignore_fields)
if variable_names:
all_keys = all_keys & set(variable_names)
return_dict = dict()
for key in all_keys:
return_dict[key] = _hdf5todict(hdf5_object[key],
variable_names=None,
ignore_fields=ignore_fields)
return return_dict
def _handle_hdf5_dataset(hdf5_object):
if 'MATLAB_empty' in hdf5_object.attrs.keys():
data = numpy.empty((0,))
else:
# this used to be just hdf5_object.value, but this is deprecated
data = hdf5_object[()]
matlab_class = hdf5_object.attrs.get('MATLAB_class', b'unknown').decode()
if matlab_class not in standard_matlab_classes:
warn('Complex objects (like classes) are not supported. '
'They are imported on a best effort base '
'but your mileage will vary.')
if isinstance(data, numpy.ndarray) and \
data.dtype == numpy.dtype('object'):
data = [hdf5_object.file[cur_data] for cur_data in data.flatten()]
if len(data) == 1 and matlab_class == 'cell':
data = data[0]
matlab_class = data.attrs.get('MATLAB_class',
matlab_class).decode()
data = data[()]
return _assign_types(data, matlab_class)
data = _hdf5todict(data)
return _assign_types(data, matlab_class)
def _convert_string_hdf5(values):
if values.size > 1:
assigned_values = u''.join(chr(c) for c in values.flatten())
else:
try:
assigned_values = chr(values)
except TypeError:
assigned_values = numpy.array([])
return assigned_values
def _assign_types(values, matlab_class):
"""private function, which assigns correct types to h5py extracted values
from _browse_dataset()"""
if matlab_class == 'char':
values = numpy.squeeze(values).T
assigned_values = _handle_hdf5_strings(values)
elif type(values) == numpy.ndarray:
assigned_values = _handle_ndarray(values)
elif type(values) == numpy.float64:
assigned_values = float(values)
else:
assigned_values = values
return assigned_values
def _handle_ndarray(values):
"""Handle conversion of ndarrays."""
values = numpy.squeeze(values).T
if (isinstance(values, numpy.ndarray) and
values.dtype.names == ('real', 'imag')):
values = numpy.array(values.view(numpy.complex))
if isinstance(values, numpy.ndarray) and \
values.size == 1:
values = values.item()
return values
def _handle_hdf5_strings(values):
if values.ndim in (0, 1):
values = _convert_string_hdf5(values)
elif values.ndim == 2:
values = [_convert_string_hdf5(cur_val)
for cur_val in values]
else:
raise RuntimeError('String arrays with more than 2 dimensions'
'are not supported at the moment.')
return values
def _check_for_scipy_mat_struct(data):
"""
Private function to check all entries of data for occurrences of
scipy.io.matlab.mio5_params.mat_struct and convert them.
Parameters
==========
data: any
data to be checked
Returns
=========
object
checked and converted data
"""
if isinstance(data, dict):
for key in data:
data[key] = _check_for_scipy_mat_struct(data[key])
if isinstance(data, scipy.io.matlab.mio5_params.MatlabOpaque):
warn('Complex objects (like classes) are not supported. '
'They are imported on a best effort base '
'but your mileage will vary.')
if isinstance(data, numpy.ndarray):
data = _handle_scipy_ndarray(data)
return data
def _handle_scipy_ndarray(data):
if data.dtype == numpy.dtype('object') and not \
isinstance(data, scipy.io.matlab.mio5.MatlabFunction):
as_list = []
for element in data:
as_list.append(_check_for_scipy_mat_struct(element))
data = as_list
elif isinstance(data.dtype.names, tuple):
data = _todict_from_np_struct(data)
data = _check_for_scipy_mat_struct(data)
if isinstance(data, numpy.ndarray):
data = numpy.array(data)
return data
def _todict_from_np_struct(data):
data_dict = dict()
for cur_field_name in data.dtype.names:
try:
n_items = len(data[cur_field_name])
cur_list = list()
for idx in numpy.arange(n_items):
cur_value = data[cur_field_name].item(idx)
cur_value = _check_for_scipy_mat_struct(cur_value)
cur_list.append(cur_value)
data_dict[cur_field_name] = cur_list
except TypeError:
cur_value = data[cur_field_name].item(0)
cur_value = _check_for_scipy_mat_struct(cur_value)
data_dict[cur_field_name] = cur_value
return data_dict
|
import io
import glob
import sys
import os
import re
import json
import shutil
import tempfile
import jinja2
dumb_replacements = [
["{% if any(post.has_math for post in posts) %}", '{% if posts|selectattr("has_math")|list %}'],
["json.dumps(title)", "title|tojson"],
["{{ parent.extra_head() }}", "{{ super() }}"],
["{{ parent.content() }}", "{{ super() }}"],
["prefix='\\", "prefix='"],
["og: http://ogp.me/ns# \\", "og: http://ogp.me/ns#"],
["article: http://ogp.me/ns/article# \\", "article: http://ogp.me/ns/article#"],
["fb: http://ogp.me/ns/fb# \\", "fb: http://ogp.me/ns/fb#"],
['dir="rtl" \\', 'dir="rtl"'],
['sorted(translations)', 'translations|sort'],
['abs(i - current_page)', '(i - current_page)|abs'],
['loop.index', 'loop.index0'],
['is None', 'is none'],
['is not None', 'is not none'],
]
dumber_replacements = [
['<%! import json %>\n\n', ''],
["<html\n\\", "<html\n"],
["\n'\\\n", "\n'\n"],
["{% endif %}\n\\", "{% endif %}\n"]
]
def jinjify(in_theme, out_theme):
"""Convert in_theme into a jinja version and put it in out_theme"""
in_templates_path = os.path.join(in_theme, "templates")
out_templates_path = os.path.join(out_theme, "templates")
try:
os.makedirs(out_templates_path)
except Exception:
pass
lookup = jinja2.Environment()
lookup.filters['tojson'] = json.dumps
lookup.loader = jinja2.FileSystemLoader([out_templates_path], encoding='utf-8-sig')
for template in glob.glob(os.path.join(in_templates_path, "*.tmpl")):
out_template = os.path.join(out_templates_path, os.path.basename(template))
with io.open(template, "r", encoding="utf-8") as inf:
data = mako2jinja(inf)
lines = []
for line in data.splitlines():
for repl in dumb_replacements:
line = line.replace(*repl)
lines.append(line)
data = '\n'.join(lines)
for repl in dumber_replacements:
data = data.replace(*repl)
with io.open(out_template, "w+", encoding="utf-8") as outf:
outf.write(data + '\n')
# Syntax check output
source, filename = lookup.loader.get_source(lookup, os.path.basename(template))[:2]
try:
lookup.parse(source)
except Exception as e:
error("Syntax error in {0}:{1}".format(out_template, e.lineno))
parent = os.path.basename(in_theme.rstrip('/'))
child = os.path.basename(out_theme.rstrip('/'))
mappings = {
'base-jinja': 'base',
'bootstrap4-jinja': 'base-jinja',
}
if child in mappings:
parent = mappings[child]
# Copy assets in bootstrap/bootstrap4
if child == 'bootstrap4-jinja':
assets_dir = os.path.join(out_theme, "assets")
if os.path.exists(assets_dir):
shutil.rmtree(assets_dir)
shutil.copytree(
os.path.join(in_theme, "assets"), os.path.join(out_theme, "assets"),
symlinks=True)
# Copy bundles
# shutil.copy(os.path.join(in_theme, "bundles"), os.path.join(out_theme, "bundles"))
# Copy README
if os.path.isfile(os.path.join(in_theme, "README.md")):
shutil.copy(os.path.join(in_theme, "README.md"), os.path.join(out_theme, "README.md"))
def error(msg):
print("\033[1;31mERROR: {0}\033[0m".format(msg))
def mako2jinja(input_file):
output = ''
# TODO: OMG, this code is so horrible. Look at it; just look at it:
macro_start = re.compile(r'(.*)<%\s*def name="([^"]*?)"\s*>(.*)', re.IGNORECASE)
macro_end = re.compile(r'(.*)</%def>(.*)', re.IGNORECASE)
if_start = re.compile(r'(.*)% *if (.*):(.*)', re.IGNORECASE)
if_else = re.compile(r'(.*)% *else.*:(.*)', re.IGNORECASE)
if_elif = re.compile(r'(.*)% *elif (.*):(.*)', re.IGNORECASE)
if_end = re.compile(r'(.*)% *endif(.*)', re.IGNORECASE)
for_start = re.compile(r'(.*)% *for (.*):(.*)', re.IGNORECASE)
for_end = re.compile(r'(.*)% *endfor(.*)', re.IGNORECASE)
namespace = re.compile(r'(.*)<% *namespace name="(.*?)".* file="(.*?)".*/>(.*)', re.IGNORECASE)
inherit = re.compile(r'(.*)<% *inherit file="(.*?)".*/>(.*)', re.IGNORECASE)
block_single_line = re.compile(r'(.*)<% *block.*name="(.*?)".*>(.*)</% *block>(.*)', re.IGNORECASE)
block_start = re.compile(r'(.*)<% *block.*name="(.*?)".*>(.*)', re.IGNORECASE)
block_end = re.compile(r'(.*)</%block>(.*)', re.IGNORECASE)
val = re.compile(r'\$\{(.*?)\}', re.IGNORECASE)
func_len = re.compile(r'len\((.*?)\)', re.IGNORECASE)
filter_h = re.compile(r'\|h', re.IGNORECASE)
filter_striphtml = re.compile(r'\|striphtml', re.IGNORECASE)
filter_u = re.compile(r'\|u', re.IGNORECASE)
comment_single_line = re.compile(r'^.*##(.*?)$', re.IGNORECASE)
for line in input_file:
# Process line for repeated inline replacements
m_val = val.search(line)
m_func_len = func_len.search(line)
m_filter_h = filter_h.search(line)
m_filter_striphtml = filter_striphtml.search(line)
m_filter_u = filter_u.search(line)
if m_val:
line = val.sub(r'{{ \1 }}', line)
if m_filter_h:
line = filter_h.sub(r'|e', line)
if m_filter_striphtml:
line = filter_striphtml.sub(r'|e', line)
if m_filter_u:
line = filter_u.sub(r'|urlencode', line)
if m_func_len:
line = func_len.sub(r'\1|length', line)
# Macro start/end
m_macro_start = macro_start.search(line)
if m_macro_start:
line = m_macro_start.expand(r'\1{% macro \2 %}\3') + '\n'
m_macro_end = macro_end.search(line)
if m_macro_end:
line = m_macro_end.expand(r'\1{% endmacro %}\2') + '\n'
# Process line for single 'whole line' replacements
m_macro_start = macro_start.search(line)
m_macro_end = macro_end.search(line)
m_if_start = if_start.search(line)
m_if_else = if_else.search(line)
m_if_elif = if_elif.search(line)
m_if_end = if_end.search(line)
m_for_start = for_start.search(line)
m_for_end = for_end.search(line)
m_namspace = namespace.search(line)
m_inherit = inherit.search(line)
m_block_single_line = block_single_line.search(line)
m_block_start = block_start.search(line)
m_block_end = block_end.search(line)
m_comment_single_line = comment_single_line.search(line)
if m_comment_single_line:
output += m_comment_single_line.expand(r'{# \1 #}') + '\n'
elif m_if_start:
output += m_if_start.expand(r'\1{% if \2 %}\3') + '\n'
elif m_if_else:
output += m_if_else.expand(r'\1{% else %}\2') + '\n'
elif m_if_elif:
output += m_if_elif.expand(r'\1{% elif \2 %}\3') + '\n'
elif m_if_end:
output += m_if_end.expand(r'\1{% endif %}\2') + '\n'
elif m_for_start:
output += m_for_start.expand(r'\1{% for \2 %}\3') + '\n'
elif m_for_end:
output += m_for_end.expand(r'\1{% endfor %}\2') + '\n'
elif m_namspace:
output += m_namspace.expand(r"\1{% import '\3' as \2 with context %}\4") + '\n'
elif m_inherit:
output += m_inherit.expand(r"{% extends '\2' %}\3") + '\n'
elif m_block_single_line:
output += m_block_single_line.expand(r'\1{% block \2 %}\3{% endblock %}\4') + '\n'
elif m_block_start:
output += m_block_start.expand(r'\1{% block \2 %}\3') + '\n'
elif m_block_end:
output += m_block_end.expand(r'\1{% endblock %}\2') + '\n'
else:
# Doesn't match anything we're going to process, pass though
output += line
return output
def jinjify_shortcodes(in_dir, out_dir):
for fname in os.listdir(in_dir):
if not fname.endswith('.tmpl'):
continue
in_file = os.path.join(in_dir, fname)
out_file = os.path.join(out_dir, fname)
with open(in_file) as inf:
data = mako2jinja(inf)
with open(out_file, 'w') as outf:
outf.write(data)
def usage():
print("Usage: python {} [in-dir] [out-dir]".format(sys.argv[0]))
print("OR")
print("Usage: python {} [in-file] [out-file]".format(sys.argv[0]))
if __name__ == "__main__":
if len(sys.argv) == 1:
print('Performing standard conversions:')
for m, j in (
('nikola/data/themes/base', 'nikola/data/themes/base-jinja'),
('nikola/data/themes/bootstrap4', 'nikola/data/themes/bootstrap4-jinja'),
('nikola/data/themes/bootblog4', 'nikola/data/themes/bootblog4-jinja'),
):
print(' {0} -> {1}'.format(m, j))
jinjify(m, j)
jinjify_shortcodes('nikola/data/shortcodes/mako', 'nikola/data/shortcodes/jinja')
elif len(sys.argv) != 3:
print('ERROR: needs input and output directory (file), or no arguments for default conversions.')
usage()
elif os.path.isdir(sys.argv[1]) and (os.path.isdir(sys.argv[2]) or not os.path.exists(sys.argv[2])):
jinjify(sys.argv[1], sys.argv[2])
elif os.path.isfile(sys.argv[1]) and (os.path.isfile(sys.argv[2]) or not os.path.exists(sys.argv[2])):
tmpdir = tempfile.mkdtemp()
indir = os.path.sep.join((tmpdir, 'in', 'templates'))
outdir = os.path.sep.join((tmpdir, 'out', 'templates'))
os.makedirs(indir)
shutil.copy(sys.argv[1], indir)
jinjify(os.path.dirname(indir), os.path.dirname(outdir))
shutil.move(os.path.sep.join((outdir, os.path.basename(sys.argv[1]))), sys.argv[2])
else:
print('ERROR: the two arguments must be both directories or files')
usage()
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from filterpy.common import (linear_ode_discretation, Q_discrete_white_noise,
kinematic_kf)
from numpy import array
def near_eq(x,y):
return abs(x-y) < 1.e-17
def test_kinematic():
kf = kinematic_kf(1,1)
def test_Q_discrete_white_noise():
Q = Q_discrete_white_noise (2)
assert Q[0,0] == .25
assert Q[1,0] == .5
assert Q[0,1] == .5
assert Q[1,1] == 1
assert Q.shape == (2,2)
def test_linear_ode():
F = array([[0,0,1,0,0,0],
[0,0,0,1,0,0],
[0,0,0,0,1,0],
[0,0,0,0,0,1],
[0,0,0,0,0,0],
[0,0,0,0,0,0]], dtype=float)
L = array ([[0,0],
[0,0],
[0,0],
[0,0],
[1,0],
[0,1]], dtype=float)
q = .2
Q = array([[q, 0],[0, q]])
dt = 0.5
A,Q = linear_ode_discretation(F, L, Q, dt)
val = [1, 0, dt, 0, 0.5*dt**2, 0]
for i in range(6):
assert val[i] == A[0,i]
for i in range(6):
assert val[i-1] == A[1,i] if i > 0 else A[1,i] == 0
for i in range(6):
assert val[i-2] == A[2,i] if i > 1 else A[2,i] == 0
for i in range(6):
assert val[i-3] == A[3,i] if i > 2 else A[3,i] == 0
for i in range(6):
assert val[i-4] == A[4,i] if i > 3 else A[4,i] == 0
for i in range(6):
assert val[i-5] == A[5,i] if i > 4 else A[5,i] == 0
assert near_eq(Q[0,0], (1./20)*(dt**5)*q)
assert near_eq(Q[0,1], 0)
assert near_eq(Q[0,2], (1/8)*(dt**4)*q)
assert near_eq(Q[0,3], 0)
assert near_eq(Q[0,4], (1./6)*(dt**3)*q)
assert near_eq(Q[0,5], 0)
if __name__ == "__main__":
test_linear_ode()
test_Q_discrete_white_noise()
F = array([[0,0,1,0,0,0],
[0,0,0,1,0,0],
[0,0,0,0,1,0],
[0,0,0,0,0,1],
[0,0,0,0,0,0],
[0,0,0,0,0,0]], dtype=float)
L = array ([[0,0],
[0,0],
[0,0],
[0,0],
[1,0],
[0,1]], dtype=float)
q = .2
Q = array([[q, 0],[0, q]])
dt = 1/30
A,Q = linear_ode_discretation(F, L, Q, dt)
print(Q)
|
from stash.tests.stashtest import StashTestCase
class RuntimeTests(StashTestCase):
setup_commands = ['BIN_PATH=$STASH_ROOT/tests/system/data:$BIN_PATH']
def test_03(self):
cmp_str = r"""[stash]$ x y
A is{0}
A is 8
bin
[stash]$ """.format(' ')
self.do_test('test03.sh x y', cmp_str, ensure_undefined=('A', ))
def test_05(self):
cmp_str = r"""[stash]$ AA is{0}
AA is Hello
stash
bin
1
B is{0}
B is 89
bin
2
B is{0}
stash
[stash]$ """.format(' ')
self.do_test('test05.py', cmp_str, ensure_undefined=('AA', 'B'))
def test_06(self):
cmp_str = r"""[stash]$ AA is{0}
--- direct execution without sourcing ---
From tobesourced AA is sourced
copy=pbcopy
env=printenv
help=man
l1=ls -1
la=ls -a
ll=ls -la
paste=pbpaste
unmount=umount
AA is{0}
copy=pbcopy
env=printenv
help=man
la=ls -a
ll=ls -la
paste=pbpaste
unmount=umount
--- source the file ---
From tobesourced AA is sourced
copy=pbcopy
env=printenv
help=man
l1=ls -1
la=ls -a
ll=ls -la
paste=pbpaste
unmount=umount
AA is sourced
copy=pbcopy
env=printenv
help=man
l1=ls -1
la=ls -a
ll=ls -la
paste=pbpaste
unmount=umount
[stash]$ """.format(' ')
self.do_test('test06.sh', cmp_str, ensure_undefined=('A', ))
def test_07(self):
cmp_str = r"""[stash]$ A is 999
A is{0}
[stash]$ """.format(" ")
self.do_test('test07.sh', cmp_str, ensure_undefined=('A', ))
def test_08(self):
cmp_str = r"""[stash]$ A is{0}
[stash]$ """.format(" ")
self.do_test('test08.sh', cmp_str, ensure_undefined=('A', ))
def test_09(self):
cmp_str = r"""[stash]$ A is{0}
[stash]$ """.format(' ')
self.do_test('test09.sh', cmp_str, ensure_undefined=('A', ))
def test_10(self):
cmp_str = r"""[stash]$ 1: #!/bin/bash
[stash]$ """
self.do_test('test10.sh', cmp_str)
def test_11(self):
cmp_str = r"""[stash]$ A is 42
[stash]$ """
self.do_test('test11.sh', cmp_str, ensure_undefined=('A', ))
def test_12(self):
"""
Directory changes in script via callable interface should not
affect parent shell but is persistent for any following calls
from the same parent shell.
"""
cmp_str = r"""[stash]$ parent script stash
parent script stash
from child script 2 bin
parent script stash
[stash]$ """
self.do_test('test_12.py', cmp_str)
|
import logging
from homeassistant.components import light, tellduslive
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
SUPPORT_BRIGHTNESS,
LightEntity,
)
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .entry import TelldusLiveEntity
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up tellduslive sensors dynamically."""
async def async_discover_light(device_id):
"""Discover and add a discovered sensor."""
client = hass.data[tellduslive.DOMAIN]
async_add_entities([TelldusLiveLight(client, device_id)])
async_dispatcher_connect(
hass,
tellduslive.TELLDUS_DISCOVERY_NEW.format(light.DOMAIN, tellduslive.DOMAIN),
async_discover_light,
)
class TelldusLiveLight(TelldusLiveEntity, LightEntity):
"""Representation of a Tellstick Net light."""
def __init__(self, client, device_id):
"""Initialize the Tellstick Net light."""
super().__init__(client, device_id)
self._last_brightness = self.brightness
def changed(self):
"""Define a property of the device that might have changed."""
self._last_brightness = self.brightness
self._update_callback()
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return self.device.dim_level
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_BRIGHTNESS
@property
def is_on(self):
"""Return true if light is on."""
return self.device.is_on
def turn_on(self, **kwargs):
"""Turn the light on."""
brightness = kwargs.get(ATTR_BRIGHTNESS, self._last_brightness)
if brightness == 0:
fallback_brightness = 100
_LOGGER.info(
"Setting brightness to %d%%, because it was 0", fallback_brightness
)
brightness = int(fallback_brightness * 255 / 100)
self.device.dim(level=brightness)
self.changed()
def turn_off(self, **kwargs):
"""Turn the light off."""
self.device.turn_off()
self.changed()
|
import logging
from august.activity import ActivityType
from homeassistant.components.sensor import DEVICE_CLASS_BATTERY
from homeassistant.const import ATTR_ENTITY_PICTURE, PERCENTAGE
from homeassistant.core import callback
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_registry import async_get_registry
from homeassistant.helpers.restore_state import RestoreEntity
from .const import (
ATTR_OPERATION_AUTORELOCK,
ATTR_OPERATION_KEYPAD,
ATTR_OPERATION_METHOD,
ATTR_OPERATION_REMOTE,
DATA_AUGUST,
DOMAIN,
OPERATION_METHOD_AUTORELOCK,
OPERATION_METHOD_KEYPAD,
OPERATION_METHOD_MOBILE_DEVICE,
OPERATION_METHOD_REMOTE,
)
from .entity import AugustEntityMixin
_LOGGER = logging.getLogger(__name__)
def _retrieve_device_battery_state(detail):
"""Get the latest state of the sensor."""
return detail.battery_level
def _retrieve_linked_keypad_battery_state(detail):
"""Get the latest state of the sensor."""
return detail.battery_percentage
SENSOR_TYPES_BATTERY = {
"device_battery": {"state_provider": _retrieve_device_battery_state},
"linked_keypad_battery": {"state_provider": _retrieve_linked_keypad_battery_state},
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the August sensors."""
data = hass.data[DOMAIN][config_entry.entry_id][DATA_AUGUST]
devices = []
migrate_unique_id_devices = []
operation_sensors = []
batteries = {
"device_battery": [],
"linked_keypad_battery": [],
}
for device in data.doorbells:
batteries["device_battery"].append(device)
for device in data.locks:
batteries["device_battery"].append(device)
batteries["linked_keypad_battery"].append(device)
operation_sensors.append(device)
for device in batteries["device_battery"]:
state_provider = SENSOR_TYPES_BATTERY["device_battery"]["state_provider"]
detail = data.get_device_detail(device.device_id)
if detail is None or state_provider(detail) is None:
_LOGGER.debug(
"Not adding battery sensor for %s because it is not present",
device.device_name,
)
continue
_LOGGER.debug(
"Adding battery sensor for %s",
device.device_name,
)
devices.append(AugustBatterySensor(data, "device_battery", device, device))
for device in batteries["linked_keypad_battery"]:
detail = data.get_device_detail(device.device_id)
if detail.keypad is None:
_LOGGER.debug(
"Not adding keypad battery sensor for %s because it is not present",
device.device_name,
)
continue
_LOGGER.debug(
"Adding keypad battery sensor for %s",
device.device_name,
)
keypad_battery_sensor = AugustBatterySensor(
data, "linked_keypad_battery", detail.keypad, device
)
devices.append(keypad_battery_sensor)
migrate_unique_id_devices.append(keypad_battery_sensor)
for device in operation_sensors:
devices.append(AugustOperatorSensor(data, device))
await _async_migrate_old_unique_ids(hass, migrate_unique_id_devices)
async_add_entities(devices, True)
async def _async_migrate_old_unique_ids(hass, devices):
"""Keypads now have their own serial number."""
registry = await async_get_registry(hass)
for device in devices:
old_entity_id = registry.async_get_entity_id(
"sensor", DOMAIN, device.old_unique_id
)
if old_entity_id is not None:
_LOGGER.debug(
"Migrating unique_id from [%s] to [%s]",
device.old_unique_id,
device.unique_id,
)
registry.async_update_entity(old_entity_id, new_unique_id=device.unique_id)
class AugustOperatorSensor(AugustEntityMixin, RestoreEntity, Entity):
"""Representation of an August lock operation sensor."""
def __init__(self, data, device):
"""Initialize the sensor."""
super().__init__(data, device)
self._data = data
self._device = device
self._state = None
self._operated_remote = None
self._operated_keypad = None
self._operated_autorelock = None
self._operated_time = None
self._available = False
self._entity_picture = None
self._update_from_data()
@property
def available(self):
"""Return the availability of this sensor."""
return self._available
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def name(self):
"""Return the name of the sensor."""
return f"{self._device.device_name} Operator"
@callback
def _update_from_data(self):
"""Get the latest state of the sensor and update activity."""
lock_activity = self._data.activity_stream.get_latest_device_activity(
self._device_id, [ActivityType.LOCK_OPERATION]
)
if lock_activity is not None:
self._available = True
self._state = lock_activity.operated_by
self._operated_remote = lock_activity.operated_remote
self._operated_keypad = lock_activity.operated_keypad
self._operated_autorelock = lock_activity.operated_autorelock
self._entity_picture = lock_activity.operator_thumbnail_url
@property
def device_state_attributes(self):
"""Return the device specific state attributes."""
attributes = {}
if self._operated_remote is not None:
attributes[ATTR_OPERATION_REMOTE] = self._operated_remote
if self._operated_keypad is not None:
attributes[ATTR_OPERATION_KEYPAD] = self._operated_keypad
if self._operated_autorelock is not None:
attributes[ATTR_OPERATION_AUTORELOCK] = self._operated_autorelock
if self._operated_remote:
attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_REMOTE
elif self._operated_keypad:
attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_KEYPAD
elif self._operated_autorelock:
attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_AUTORELOCK
else:
attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_MOBILE_DEVICE
return attributes
async def async_added_to_hass(self):
"""Restore ATTR_CHANGED_BY on startup since it is likely no longer in the activity log."""
await super().async_added_to_hass()
last_state = await self.async_get_last_state()
if not last_state:
return
self._state = last_state.state
if ATTR_ENTITY_PICTURE in last_state.attributes:
self._entity_picture = last_state.attributes[ATTR_ENTITY_PICTURE]
if ATTR_OPERATION_REMOTE in last_state.attributes:
self._operated_remote = last_state.attributes[ATTR_OPERATION_REMOTE]
if ATTR_OPERATION_KEYPAD in last_state.attributes:
self._operated_keypad = last_state.attributes[ATTR_OPERATION_KEYPAD]
if ATTR_OPERATION_AUTORELOCK in last_state.attributes:
self._operated_autorelock = last_state.attributes[ATTR_OPERATION_AUTORELOCK]
@property
def entity_picture(self):
"""Return the entity picture to use in the frontend, if any."""
return self._entity_picture
@property
def unique_id(self) -> str:
"""Get the unique id of the device sensor."""
return f"{self._device_id}_lock_operator"
class AugustBatterySensor(AugustEntityMixin, Entity):
"""Representation of an August sensor."""
def __init__(self, data, sensor_type, device, old_device):
"""Initialize the sensor."""
super().__init__(data, device)
self._data = data
self._sensor_type = sensor_type
self._device = device
self._old_device = old_device
self._state = None
self._available = False
self._update_from_data()
@property
def available(self):
"""Return the availability of this sensor."""
return self._available
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return PERCENTAGE
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return DEVICE_CLASS_BATTERY
@property
def name(self):
"""Return the name of the sensor."""
device_name = self._device.device_name
return f"{device_name} Battery"
@callback
def _update_from_data(self):
"""Get the latest state of the sensor."""
state_provider = SENSOR_TYPES_BATTERY[self._sensor_type]["state_provider"]
self._state = state_provider(self._detail)
self._available = self._state is not None
@property
def unique_id(self) -> str:
"""Get the unique id of the device sensor."""
return f"{self._device_id}_{self._sensor_type}"
@property
def old_unique_id(self) -> str:
"""Get the old unique id of the device sensor."""
return f"{self._old_device.device_id}_{self._sensor_type}"
|
import pytest
from queue import Empty
from unittest.mock import patch
from kombu import messaging
from kombu import Connection, Exchange, Queue
from kombu.transport import azureservicebus
pytest.importorskip('azure.servicebus')
try:
# azure-servicebus version >= 0.50.0
from azure.servicebus.control_client import Message, ServiceBusService
except ImportError:
try:
# azure-servicebus version <= 0.21.1
from azure.servicebus import Message, ServiceBusService
except ImportError:
ServiceBusService = Message = None
class QueueMock:
""" Hold information about a queue. """
def __init__(self, name):
self.name = name
self.messages = []
self.message_count = len(self.messages)
def __repr__(self):
return 'QueueMock: {} messages'.format(len(self.messages))
def _create_mock_connection(url='', **kwargs):
class _Channel(azureservicebus.Channel):
# reset _fanout_queues for each instance
queues = []
_queue_service = None
def list_queues(self):
return self.queues
@property
def queue_service(self):
if self._queue_service is None:
self._queue_service = AzureServiceBusClientMock()
return self._queue_service
class Transport(azureservicebus.Transport):
Channel = _Channel
return Connection(url, transport=Transport, **kwargs)
class AzureServiceBusClientMock:
def __init__(self):
"""
Imitate the ServiceBus Client.
"""
# queues doesn't exist on the real client, here for testing.
self.queues = []
self._queue_cache = {}
self.queues.append(self.create_queue(queue_name='unittest_queue'))
def create_queue(self, queue_name, queue=None, fail_on_exist=False):
queue = QueueMock(name=queue_name)
self.queues.append(queue)
self._queue_cache[queue_name] = queue
return queue
def get_queue(self, queue_name=None):
for queue in self.queues:
if queue.name == queue_name:
return queue
def list_queues(self):
return self.queues
def send_queue_message(self, queue_name=None, message=None):
queue = self.get_queue(queue_name)
queue.messages.append(message)
def receive_queue_message(self, queue_name, peek_lock=True, timeout=60):
queue = self.get_queue(queue_name)
if queue and len(queue.messages):
return queue.messages.pop(0)
return Message()
def read_delete_queue_message(self, queue_name, timeout='60'):
return self.receive_queue_message(queue_name, timeout=timeout)
def delete_queue(self, queue_name=None):
queue = self.get_queue(queue_name)
if queue:
del queue
class test_Channel:
def handleMessageCallback(self, message):
self.callback_message = message
def setup(self):
self.url = 'azureservicebus://'
self.queue_name = 'unittest_queue'
self.exchange = Exchange('test_servicebus', type='direct')
self.queue = Queue(self.queue_name, self.exchange, self.queue_name)
self.connection = _create_mock_connection(self.url)
self.channel = self.connection.default_channel
self.queue(self.channel).declare()
self.producer = messaging.Producer(self.channel,
self.exchange,
routing_key=self.queue_name)
self.channel.basic_consume(self.queue_name,
no_ack=False,
callback=self.handleMessageCallback,
consumer_tag='unittest')
def teardown(self):
# Removes QoS reserved messages so we don't restore msgs on shutdown.
try:
qos = self.channel._qos
except AttributeError:
pass
else:
if qos:
qos._dirty.clear()
qos._delivered.clear()
def test_queue_service(self):
# Test gettings queue service without credentials
conn = Connection(self.url, transport=azureservicebus.Transport)
with pytest.raises(ValueError) as exc:
conn.channel()
assert exc == 'You need to provide servicebus namespace'
# Test getting queue service when queue_service is not setted
with patch('kombu.transport.azureservicebus.ServiceBusService') as m:
channel = conn.channel()
# Remove queue service to get from service bus again
channel._queue_service = None
channel.queue_service
assert m.call_count == 2
# Calling queue_service again needs to reuse ServiceBus instance
channel.queue_service
assert m.call_count == 2
def test_conninfo(self):
conninfo = self.channel.conninfo
assert conninfo is self.connection
def test_transport_type(self):
transport_options = self.channel.transport_options
assert transport_options == {}
def test_visibility_timeout(self):
# Test getting default visibility timeout
assert (
self.channel.visibility_timeout ==
azureservicebus.Channel.default_visibility_timeout
)
# Test getting value setted in transport options
del self.channel.visibility_timeout
self.channel.transport_options['visibility_timeout'] = 10
assert self.channel.visibility_timeout == 10
def test_wait_timeout_seconds(self):
# Test getting default wait timeout seconds
assert (
self.channel.wait_time_seconds ==
azureservicebus.Channel.default_wait_time_seconds
)
# Test getting value setted in transport options
del self.channel.wait_time_seconds
self.channel.transport_options['wait_time_seconds'] = 10
assert self.channel.wait_time_seconds == 10
def test_peek_lock(self):
# Test getting default peek lock
assert (
self.channel.peek_lock ==
azureservicebus.Channel.default_peek_lock
)
# Test getting value setted in transport options
del self.channel.peek_lock
self.channel.transport_options['peek_lock'] = True
assert self.channel.peek_lock is True
def test_get_from_azure(self):
# Test getting a single message
message = 'my test message'
self.producer.publish(message)
result = self.channel._get(self.queue_name)
assert 'body' in result.keys()
# Test getting multiple messages
for i in range(3):
message = f'message: {i}'
self.producer.publish(message)
queue_service = self.channel.queue_service
assert len(queue_service.get_queue(self.queue_name).messages) == 3
for i in range(3):
result = self.channel._get(self.queue_name)
assert len(queue_service.get_queue(self.queue_name).messages) == 0
def test_get_with_empty_list(self):
with pytest.raises(Empty):
self.channel._get(self.queue_name)
def test_put_and_get(self):
message = 'my test message'
self.producer.publish(message)
results = self.queue(self.channel).get().payload
assert message == results
def test_delete_queue(self):
# Test deleting queue without message
queue_name = 'new_unittest_queue'
self.channel._new_queue(queue_name)
assert queue_name in self.channel._queue_cache
self.channel._delete(queue_name)
assert queue_name not in self.channel._queue_cache
# Test deleting queue with message
message = 'my test message'
self.producer.publish(message)
self.channel._delete(self.queue_name)
assert queue_name not in self.channel._queue_cache
|
import unittest
import responses
from slacker import Channels
from slacker.utilities import get_api_url
class TestChannels(unittest.TestCase):
@responses.activate
def test_valid_ids_return_channel_id(self):
response = {
'ok': 'true',
'channels': [
{'name': 'general', 'id': 'C111'},
{'name': 'random', 'id': 'C222'}
]
}
responses.add(
responses.GET,
get_api_url('channels.list'),
json=response,
status=200
)
channels = Channels(token='aaa')
self.assertEqual(channels.get_channel_id('general'), 'C111')
@responses.activate
def test_invalid_channel_ids_return_none(self):
response = {
'ok': 'true',
'channels': [
{'name': 'general', 'id': 'C111'},
{'name': 'random', 'id': 'C222'}
]
}
responses.add(
responses.GET,
get_api_url('channels.list'),
json=response,
status=200
)
channels = Channels(token='aaa')
self.assertEqual(channels.get_channel_id('fake_group'), None)
|
from typing import Any, Callable, Dict, List, Optional
import pyvera as veraApi
from homeassistant.components.lock import (
DOMAIN as PLATFORM_DOMAIN,
ENTITY_ID_FORMAT,
LockEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import STATE_LOCKED, STATE_UNLOCKED
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity import Entity
from . import VeraDevice
from .common import ControllerData, get_controller_data
ATTR_LAST_USER_NAME = "changed_by_name"
ATTR_LOW_BATTERY = "low_battery"
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
async_add_entities: Callable[[List[Entity], bool], None],
) -> None:
"""Set up the sensor config entry."""
controller_data = get_controller_data(hass, entry)
async_add_entities(
[
VeraLock(device, controller_data)
for device in controller_data.devices.get(PLATFORM_DOMAIN)
]
)
class VeraLock(VeraDevice[veraApi.VeraLock], LockEntity):
"""Representation of a Vera lock."""
def __init__(self, vera_device: veraApi.VeraLock, controller_data: ControllerData):
"""Initialize the Vera device."""
self._state = None
VeraDevice.__init__(self, vera_device, controller_data)
self.entity_id = ENTITY_ID_FORMAT.format(self.vera_id)
def lock(self, **kwargs: Any) -> None:
"""Lock the device."""
self.vera_device.lock()
self._state = STATE_LOCKED
def unlock(self, **kwargs: Any) -> None:
"""Unlock the device."""
self.vera_device.unlock()
self._state = STATE_UNLOCKED
@property
def is_locked(self) -> Optional[bool]:
"""Return true if device is on."""
return self._state == STATE_LOCKED
@property
def device_state_attributes(self) -> Optional[Dict[str, Any]]:
"""Who unlocked the lock and did a low battery alert fire.
Reports on the previous poll cycle.
changed_by_name is a string like 'Bob'.
low_battery is 1 if an alert fired, 0 otherwise.
"""
data = super().device_state_attributes
last_user = self.vera_device.get_last_user_alert()
if last_user is not None:
data[ATTR_LAST_USER_NAME] = last_user[1]
data[ATTR_LOW_BATTERY] = self.vera_device.get_low_battery_alert()
return data
@property
def changed_by(self) -> Optional[str]:
"""Who unlocked the lock.
Reports on the previous poll cycle.
changed_by is an integer user ID.
"""
last_user = self.vera_device.get_last_user_alert()
if last_user is not None:
return last_user[0]
return None
def update(self) -> None:
"""Update state by the Vera device callback."""
self._state = (
STATE_LOCKED if self.vera_device.is_locked(True) else STATE_UNLOCKED
)
|
import os.path as op
import numpy as np
import pytest
import matplotlib.pyplot as plt
from mne import (read_events, Epochs, pick_types, read_cov, create_info,
EpochsArray)
from mne.channels import read_layout
from mne.io import read_raw_fif, read_raw_ctf
from mne.utils import run_tests_if_main, _click_ch_name, _close_event
from mne.viz import plot_drop_log
from mne.viz.utils import _fake_click
from mne.datasets import testing
from mne.event import make_fixed_length_events
base_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data')
evoked_fname = op.join(base_dir, 'test-ave.fif')
raw_fname = op.join(base_dir, 'test_raw.fif')
cov_fname = op.join(base_dir, 'test-cov.fif')
event_name = op.join(base_dir, 'test-eve.fif')
event_id, tmin, tmax = 1, -0.1, 1.0
layout = read_layout('Vectorview-all')
test_base_dir = testing.data_path(download=False)
ctf_fname = op.join(test_base_dir, 'CTF', 'testdata_ctf.ds')
def _get_epochs(stop=5, meg=True, eeg=False, n_chan=20):
"""Get epochs."""
raw = read_raw_fif(raw_fname)
events = read_events(event_name)
picks = pick_types(raw.info, meg=meg, eeg=eeg, stim=False,
ecg=False, eog=False, exclude='bads')
# Use a subset of channels for plotting speed
picks = np.round(np.linspace(0, len(picks) + 1, n_chan)).astype(int)
with pytest.warns(RuntimeWarning, match='projection'):
epochs = Epochs(raw, events[:stop], event_id, tmin, tmax, picks=picks,
proj=False, preload=False)
epochs.info.normalize_proj() # avoid warnings
return epochs
@pytest.fixture()
def epochs():
"""Get minimal, pre-loaded epochs data suitable for most tests."""
return _get_epochs().load_data()
def test_plot_epochs_not_preloaded():
"""Test plotting non-preloaded epochs."""
epochs = _get_epochs()
assert epochs._data is None
epochs.plot()
assert epochs._data is None
def test_plot_epochs_basic(epochs, capsys):
"""Test epoch plotting."""
assert len(epochs.events) == 1
epochs.info['lowpass'] = 10. # allow heavy decim during plotting
fig = epochs.plot(scalings=None, title='Epochs')
ticks = [x.get_text() for x in fig.mne.ax_main.get_xticklabels(minor=True)]
assert ticks == ['2']
plt.close('all')
# covariance / whitening
cov = read_cov(cov_fname)
assert len(cov['names']) == 366 # all channels
assert cov['bads'] == []
assert epochs.info['bads'] == [] # all good
with pytest.warns(RuntimeWarning, match='projection'):
epochs.plot(noise_cov=cov)
plt.close('all')
# add a channel to the epochs.info['bads']
epochs.info['bads'] = [epochs.ch_names[0]]
with pytest.warns(RuntimeWarning, match='projection'):
epochs.plot(noise_cov=cov)
plt.close('all')
# add a channel to cov['bads']
cov['bads'] = [epochs.ch_names[1]]
with pytest.warns(RuntimeWarning, match='projection'):
epochs.plot(noise_cov=cov)
plt.close('all')
# have a data channel missing from the covariance
cov['names'] = cov['names'][:306]
cov['data'] = cov['data'][:306][:306]
with pytest.warns(RuntimeWarning, match='projection'):
epochs.plot(noise_cov=cov)
plt.close('all')
# other options
fig = epochs[0].plot(picks=[0, 2, 3], scalings=None)
fig.canvas.key_press_event('escape')
with pytest.raises(ValueError, match='No appropriate channels found'):
epochs.plot(picks=[])
# gh-5906
epochs = _get_epochs(None).load_data()
epochs.load_data()
assert len(epochs) == 7
epochs.info['bads'] = [epochs.ch_names[0]]
capsys.readouterr()
# test title error handling
with pytest.raises(TypeError, match='title must be None or a string, got'):
epochs.plot(title=7)
# test auto-generated title, and selection mode
epochs.plot(group_by='selection', title='')
@pytest.mark.parametrize('scalings', (dict(mag=1e-12, grad=1e-11, stim='auto'),
None, 'auto'))
def test_plot_epochs_scalings(epochs, scalings):
"""Test the valid options for scalings."""
epochs.plot(scalings=scalings)
def test_plot_epochs_colors(epochs):
"""Test epoch_colors, for compatibility with autoreject."""
epoch_colors = [['r'] * len(epochs.ch_names) for _ in
range(len(epochs.events))]
epochs.plot(epoch_colors=epoch_colors)
with pytest.raises(ValueError, match='length equal to the number of epo'):
epochs.plot(epoch_colors=[['r'], ['b']]) # epochs obj has only 1 epoch
with pytest.raises(ValueError, match=r'epoch colors for epoch \d+ has'):
epochs.plot(epoch_colors=[['r']]) # need 1 color for each channel
# also test event_color
epochs.plot(event_color='b')
def test_plot_epochs_scale_bar(epochs):
"""Test scale bar for epochs."""
fig = epochs.plot()
fig.canvas.key_press_event('s') # default is to not show scalebars
ax = fig.mne.ax_main
assert len(ax.texts) == 2 # only mag & grad in this instance
texts = tuple(t.get_text().strip() for t in ax.texts)
wants = ('800.0 fT/cm', '2000.0 fT')
assert texts == wants
def test_plot_epochs_clicks(epochs, capsys):
"""Test plot_epochs mouse interaction."""
fig = epochs.plot(events=epochs.events)
data_ax = fig.mne.ax_main
x = fig.mne.traces[0].get_xdata()[3]
y = fig.mne.traces[0].get_ydata()[3]
n_epochs = len(epochs)
epoch_num = fig.mne.inst.selection[0]
# test (un)marking bad epochs
_fake_click(fig, data_ax, [x, y], xform='data') # mark a bad epoch
assert epoch_num in fig.mne.bad_epochs
_fake_click(fig, data_ax, [x, y], xform='data') # unmark it
assert epoch_num not in fig.mne.bad_epochs
_fake_click(fig, data_ax, [x, y], xform='data') # mark it bad again
assert epoch_num in fig.mne.bad_epochs
# test vline
fig.canvas.key_press_event('escape') # close and drop epochs
_close_event(fig) # XXX workaround, MPL Agg doesn't trigger close event
assert(n_epochs - 1 == len(epochs))
# test marking bad channels
epochs = _get_epochs(None).load_data() # need more than 1 epoch this time
fig = epochs.plot(n_epochs=3)
data_ax = fig.mne.ax_main
first_ch = data_ax.get_yticklabels()[0].get_text()
assert first_ch not in fig.mne.info['bads']
_click_ch_name(fig, ch_index=0, button=1) # click ch name to mark bad
assert first_ch in fig.mne.info['bads']
# test clicking scrollbars
_fake_click(fig, fig.mne.ax_vscroll, [0.5, 0.5])
_fake_click(fig, fig.mne.ax_hscroll, [0.5, 0.5])
# test moving bad epoch offscreen
fig.canvas.key_press_event('right') # move right
x = fig.mne.traces[0].get_xdata()[-3]
y = fig.mne.traces[0].get_ydata()[-3]
_fake_click(fig, data_ax, [x, y], xform='data') # mark a bad epoch
fig.canvas.key_press_event('left') # move back
out, err = capsys.readouterr()
assert 'out of bounds' not in out
assert 'out of bounds' not in err
fig.canvas.key_press_event('escape')
_close_event(fig) # XXX workaround, MPL Agg doesn't trigger close event
assert len(epochs) == 6
# test rightclick → image plot
fig = epochs.plot()
_click_ch_name(fig, ch_index=0, button=3) # show image plot
assert len(fig.mne.child_figs) == 1
# test scroll wheel
fig.canvas.scroll_event(0.5, 0.5, -0.5) # scroll down
fig.canvas.scroll_event(0.5, 0.5, 0.5) # scroll up
def test_plot_epochs_keypresses():
"""Test plot_epochs keypress interaction."""
epochs = _get_epochs(stop=15).load_data() # we need more than 1 epoch
epochs.drop_bad(dict(mag=4e-12)) # for histogram plot coverage
fig = epochs.plot(n_epochs=3)
data_ax = fig.mne.ax_main
# make sure green vlines are visible first (for coverage)
sample_idx = len(epochs.times) // 2 # halfway through the first epoch
x = fig.mne.traces[0].get_xdata()[sample_idx]
y = (fig.mne.traces[0].get_ydata()[sample_idx]
+ fig.mne.traces[1].get_ydata()[sample_idx]) / 2
_fake_click(fig, data_ax, [x, y], xform='data') # click between traces
# test keys
keys = ('pagedown', 'down', 'up', 'down', 'right', 'left', '-', '+', '=',
'd', 'd', 'pageup', 'home', 'shift+right', 'end', 'shift+left',
'z', 'z', 's', 's', 'f11', '?', 'h', 'j', 'b')
for key in keys * 2: # test twice → once in normal, once in butterfly view
fig.canvas.key_press_event(key)
_fake_click(fig, data_ax, [x, y], xform='data', button=3) # remove vlines
def test_epochs_plot_sensors(epochs):
"""Test sensor plotting."""
epochs.plot_sensors()
def test_plot_epochs_nodata():
"""Test plotting of epochs when no data channels are present."""
data = np.random.RandomState(0).randn(10, 2, 1000)
info = create_info(2, 1000., 'stim')
epochs = EpochsArray(data, info)
with pytest.raises(ValueError, match='consider passing picks explicitly'):
epochs.plot()
def test_plot_epochs_image(epochs):
"""Test plotting of epochs image.
Note that some of these tests that should pass are triggering MPL
UserWarnings about tight_layout not being applied ("tight_layout cannot
make axes width small enough to accommodate all axes decorations"). Calling
`plt.close('all')` just before the offending test seems to prevent this
warning, though it's unclear why.
"""
figs = epochs.plot_image()
assert len(figs) == 2 # one fig per ch_type (test data has mag, grad)
assert len(plt.get_fignums()) == 2
figs = epochs.plot_image()
assert len(figs) == 2
assert len(plt.get_fignums()) == 4 # should create new figures
epochs.plot_image(picks='mag', sigma=0.1)
epochs.plot_image(picks=[0, 1], combine='mean',
ts_args=dict(show_sensors=False))
epochs.plot_image(picks=[1], order=[0], overlay_times=[0.1], vmin=0.01,
title='test')
plt.close('all')
epochs.plot_image(picks=[1], overlay_times=[0.1], vmin=-0.001, vmax=0.001)
plt.close('all')
epochs.plot_image(picks=[1], vmin=lambda x: x.min())
# test providing figure
fig, axs = plt.subplots(3, 1)
epochs.plot_image(picks=[1], fig=fig)
# test providing axes instance
epochs.plot_image(picks=[1], axes=axs[0], evoked=False, colorbar=False)
plt.close('all')
# test order=callable
epochs.plot_image(picks=[0, 1],
order=lambda times, data: np.arange(len(data))[::-1])
# test warning
with pytest.warns(RuntimeWarning, match='Only one channel in group'):
epochs.plot_image(picks=[1], combine='mean')
# group_by should be a dict
with pytest.raises(TypeError, match="dict or None"):
epochs.plot_image(group_by='foo')
# units and scalings keys must match
with pytest.raises(ValueError, match='Scalings and units must have the'):
epochs.plot_image(units=dict(hi=1), scalings=dict(ho=1))
plt.close('all')
# test invert_y
epochs.plot_image(ts_args=dict(invert_y=True))
# can't combine different sensor types
with pytest.raises(ValueError, match='Cannot combine sensors of differ'):
epochs.plot_image(group_by=dict(foo=[0, 1, 2]))
# can't pass both fig and axes
with pytest.raises(ValueError, match='one of "fig" or "axes" must be'):
epochs.plot_image(fig='foo', axes='bar')
# wrong number of axes in fig
with pytest.raises(ValueError, match='"fig" must contain . axes, got .'):
epochs.plot_image(fig=plt.figure())
# only 1 group allowed when fig is passed
with pytest.raises(ValueError, match='"group_by" can only have one group'):
fig, axs = plt.subplots(3, 1)
epochs.plot_image(fig=fig, group_by=dict(foo=[0, 1], bar=[5, 6]))
del fig, axs
plt.close('all')
# must pass correct number of axes (1, 2, or 3)
with pytest.raises(ValueError, match='is a list, can only plot one group'):
fig, axs = plt.subplots(1, 3)
epochs.plot_image(axes=axs)
for length, kwargs in ([3, dict()],
[2, dict(evoked=False)],
[2, dict(colorbar=False)],
[1, dict(evoked=False, colorbar=False)]):
fig, axs = plt.subplots(1, length + 1)
epochs.plot_image(picks='mag', axes=axs[:length], **kwargs)
with pytest.raises(ValueError, match='"axes" must be length ., got .'):
epochs.plot_image(picks='mag', axes=axs, **kwargs)
plt.close('all')
# mismatch between axes dict keys and group_by dict keys
with pytest.raises(ValueError, match='must match the keys in "group_by"'):
epochs.plot_image(axes=dict())
# wrong number of axes in dict
match = 'each value in "axes" must be a list of . axes, got .'
with pytest.raises(ValueError, match=match):
epochs.plot_image(axes=dict(foo=axs[:2], bar=axs[:3]),
group_by=dict(foo=[0, 1], bar=[5, 6]))
# bad value of "combine"
with pytest.raises(ValueError, match='"combine" must be None, a callable'):
epochs.plot_image(combine='foo')
# mismatched picks and overlay_times
with pytest.raises(ValueError, match='size of overlay_times parameter'):
epochs.plot_image(picks=[1], overlay_times=[0.1, 0.2])
# bad overlay times
with pytest.warns(RuntimeWarning, match='fall outside'):
epochs.plot_image(overlay_times=[999.])
# mismatched picks and order
with pytest.raises(ValueError, match='must match the length of the data'):
epochs.plot_image(picks=[1], order=[0, 1])
plt.close('all')
def test_plot_drop_log():
"""Test plotting a drop log."""
epochs = _get_epochs() # not loaded
with pytest.raises(ValueError, match='bad epochs have not yet been'):
epochs.plot_drop_log()
epochs.drop_bad()
epochs.plot_drop_log()
plot_drop_log((('One',), (), ()))
plot_drop_log((('One',), ('Two',), ()))
plot_drop_log((('One',), ('One', 'Two'), ()))
for arg in ([], ([],), (1,)):
with pytest.raises(TypeError, match='tuple of tuple of str'):
plot_drop_log(arg)
plt.close('all')
def test_plot_psd_epochs(epochs):
"""Test plotting epochs psd (+topomap)."""
epochs.plot_psd(average=True, spatial_colors=False)
epochs.plot_psd(average=False, spatial_colors=True)
epochs.plot_psd(average=False, spatial_colors=False)
# test plot_psd_topomap errors
with pytest.raises(RuntimeError, match='No frequencies in band'):
epochs.plot_psd_topomap(bands=[(0, 0.01, 'foo')])
plt.close('all')
# test defaults
fig = epochs.plot_psd_topomap()
assert len(fig.axes) == 10 # default: 5 bands (δ, θ, α, β, γ) + colorbars
# test joint vlim
fig = epochs.plot_psd_topomap(vlim='joint')
vmin_0 = fig.axes[0].images[0].norm.vmin
vmax_0 = fig.axes[0].images[0].norm.vmax
assert all(vmin_0 == ax.images[0].norm.vmin for ax in fig.axes[1:5])
assert all(vmax_0 == ax.images[0].norm.vmax for ax in fig.axes[1:5])
# test support for single-bin bands
fig = epochs.plot_psd_topomap(bands=[(20, '20 Hz'), (15, 25, '15-25 Hz')])
# test with a flat channel
err_str = 'for channel %s' % epochs.ch_names[2]
epochs.get_data()[0, 2, :] = 0
for dB in [True, False]:
with pytest.warns(UserWarning, match=err_str):
epochs.plot_psd(dB=dB)
def test_plot_psdtopo_nirs(fnirs_epochs):
"""Test plotting of PSD topography for nirs data."""
bands = [(0.2, '0.2 Hz'), (0.4, '0.4 Hz'), (0.8, '0.8 Hz')]
fig = fnirs_epochs.plot_psd_topomap(bands=bands)
assert len(fig.axes) == 6 # 3 band x (plot + cmap)
@testing.requires_testing_data
def test_plot_epochs_ctf():
"""Test of basic CTF plotting."""
raw = read_raw_ctf(ctf_fname, preload=True)
raw.pick_channels(['UDIO001', 'UPPT001', 'SCLK01-177',
'BG1-4304', 'MLC11-4304', 'MLC11-4304',
'EEG058', 'UADC007-4302'])
evts = make_fixed_length_events(raw)
epochs = Epochs(raw, evts, preload=True)
epochs.plot()
plt.close('all')
# test butterfly
fig = epochs.plot(butterfly=True)
keys = ('b', 'b', 'pagedown', 'down', 'up', 'down', 'right', 'left', '-',
'+', '=', 'd', 'd', 'pageup', 'home', 'end', 'z', 'z', 's', 's',
'f11', '?', 'h', 'j')
for key in keys:
fig.canvas.key_press_event(key)
fig.canvas.scroll_event(0.5, 0.5, -0.5) # scroll down
fig.canvas.scroll_event(0.5, 0.5, 0.5) # scroll up
fig.canvas.resize_event()
fig.canvas.key_press_event('escape') # close and drop epochs
@testing.requires_testing_data
def test_plot_psd_epochs_ctf():
"""Test plotting CTF epochs psd (+topomap)."""
raw = read_raw_ctf(ctf_fname, preload=True)
evts = make_fixed_length_events(raw)
epochs = Epochs(raw, evts, preload=True)
pytest.raises(RuntimeError, epochs.plot_psd_topomap,
bands=[(0, 0.01, 'foo')]) # no freqs in range
epochs.plot_psd_topomap()
# EEG060 is flat in this dataset
for dB in [True, False]:
with pytest.warns(UserWarning, match='for channel EEG060'):
epochs.plot_psd(dB=dB)
epochs.drop_channels(['EEG060'])
epochs.plot_psd(spatial_colors=False, average=False)
run_tests_if_main()
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import numpy as np
from numpy import random
import matplotlib.pyplot as plt
from filterpy.kalman import KalmanFilter, FixedLagSmoother
DO_PLOT = False
def test_fls():
# it is possible for the fixed lag to rarely perform worse than the
# kalman filter. Let it happen once in 50 times before we become
# alarmed.
fail_count = 0
for i in range(50):
fail_count = one_run_test_fls()
assert fail_count < 2
def test_batch_equals_recursive():
""" ensures that the batch filter and the recursive version both
produce the same results.
"""
N = 4 # size of lag
fls = FixedLagSmoother(dim_x=2, dim_z=1, N=N)
fls.x = np.array([0., .5])
fls.F = np.array([[1.,1.],
[0.,1.]])
fls.H = np.array([[1.,0.]])
fls.P *= 200
fls.R *= 5.
fls.Q *= 0.001
nom = np.array([t/2. for t in range (0,40)])
zs = np.array([t + random.randn()*1.1 for t in nom])
xs, x = fls.smooth_batch(zs, N)
for k,z in enumerate(zs):
fls.smooth(z)
xSmooth = np.asarray(fls.xSmooth)
xfl = xs[:,0].T[0]
res = xSmooth.T[0,0] - xfl
assert np.sum(res) < 1.e-12
def one_run_test_fls():
fls = FixedLagSmoother(dim_x=2, dim_z=1)
fls.x = np.array([0., .5])
fls.F = np.array([[1.,1.],
[0.,1.]])
fls.H = np.array([[1.,0.]])
fls.P *= 200
fls.R *= 5.
fls.Q *= 0.001
kf = KalmanFilter(dim_x=2, dim_z=1)
kf.x = np.array([0., .5])
kf.F = np.array([[1.,1.],
[0.,1.]])
kf.H = np.array([[1.,0.]])
kf.P *= 2000
kf.R *= 1.
kf.Q *= 0.001
N = 4 # size of lag
nom = np.array([t/2. for t in range (0,40)])
zs = np.array([t + random.randn()*1.1 for t in nom])
xs, x = fls.smooth_batch(zs, N)
M, P, _, _ = kf.batch_filter(zs)
rts_x, _, _, _ = kf.rts_smoother(M, P)
xfl = xs[:,0].T[0]
xkf = M[:,0].T[0]
fl_res = abs(xfl-nom)
kf_res = abs(xkf-nom)
if DO_PLOT:
plt.cla()
plt.plot(zs,'o', alpha=0.5, marker='o', label='zs')
plt.plot(x[:,0], label='FLS')
plt.plot(xfl, label='FLS S')
plt.plot(xkf, label='KF')
plt.plot(rts_x[:,0], label='RTS')
plt.legend(loc=4)
plt.show()
print(fl_res)
print(kf_res)
print('std fixed lag:', np.mean(fl_res[N:]))
print('std kalman:', np.mean(kf_res[N:]))
return np.mean(fl_res) <= np.mean(kf_res)
if __name__ == '__main__':
DO_PLOT = True
one_run_test_fls()
DO_PLOT = False
test_fls()
test_batch_equals_recursive()
|
import logging
from pylutron_caseta import FAN_HIGH, FAN_LOW, FAN_MEDIUM, FAN_MEDIUM_HIGH, FAN_OFF
from homeassistant.components.fan import (
DOMAIN,
SPEED_HIGH,
SPEED_LOW,
SPEED_MEDIUM,
SPEED_OFF,
SUPPORT_SET_SPEED,
FanEntity,
)
from . import DOMAIN as CASETA_DOMAIN, LutronCasetaDevice
_LOGGER = logging.getLogger(__name__)
VALUE_TO_SPEED = {
None: SPEED_OFF,
FAN_OFF: SPEED_OFF,
FAN_LOW: SPEED_LOW,
FAN_MEDIUM: SPEED_MEDIUM,
FAN_MEDIUM_HIGH: SPEED_MEDIUM,
FAN_HIGH: SPEED_HIGH,
}
SPEED_TO_VALUE = {
SPEED_OFF: FAN_OFF,
SPEED_LOW: FAN_LOW,
SPEED_MEDIUM: FAN_MEDIUM,
SPEED_HIGH: FAN_HIGH,
}
FAN_SPEEDS = [SPEED_OFF, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH]
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Lutron Caseta fan platform.
Adds fan controllers from the Caseta bridge associated with the config_entry
as fan entities.
"""
entities = []
bridge = hass.data[CASETA_DOMAIN][config_entry.entry_id]
fan_devices = bridge.get_devices_by_domain(DOMAIN)
for fan_device in fan_devices:
entity = LutronCasetaFan(fan_device, bridge)
entities.append(entity)
async_add_entities(entities, True)
class LutronCasetaFan(LutronCasetaDevice, FanEntity):
"""Representation of a Lutron Caseta fan. Including Fan Speed."""
@property
def speed(self) -> str:
"""Return the current speed."""
return VALUE_TO_SPEED[self._device["fan_speed"]]
@property
def speed_list(self) -> list:
"""Get the list of available speeds."""
return FAN_SPEEDS
@property
def supported_features(self) -> int:
"""Flag supported features. Speed Only."""
return SUPPORT_SET_SPEED
async def async_turn_on(self, speed: str = None, **kwargs):
"""Turn the fan on."""
if speed is None:
speed = SPEED_MEDIUM
await self.async_set_speed(speed)
async def async_turn_off(self, **kwargs):
"""Turn the fan off."""
await self.async_set_speed(SPEED_OFF)
async def async_set_speed(self, speed: str) -> None:
"""Set the speed of the fan."""
await self._smartbridge.set_fan(self.device_id, SPEED_TO_VALUE[speed])
@property
def is_on(self):
"""Return true if device is on."""
return VALUE_TO_SPEED[self._device["fan_speed"]] in [
SPEED_LOW,
SPEED_MEDIUM,
SPEED_HIGH,
]
async def async_update(self):
"""Update when forcing a refresh of the device."""
self._device = self._smartbridge.get_device_by_id(self.device_id)
_LOGGER.debug("State of this lutron fan device is %s", self._device)
|
from datetime import timedelta
import logging
from glances_api import Glances, exceptions
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_SCAN_INTERVAL,
CONF_SSL,
CONF_USERNAME,
CONF_VERIFY_SSL,
)
from homeassistant.core import Config, HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.event import async_track_time_interval
from .const import (
CONF_VERSION,
DATA_UPDATED,
DEFAULT_HOST,
DEFAULT_NAME,
DEFAULT_PORT,
DEFAULT_SCAN_INTERVAL,
DEFAULT_VERSION,
DOMAIN,
)
_LOGGER = logging.getLogger(__name__)
GLANCES_SCHEMA = vol.All(
vol.Schema(
{
vol.Required(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_USERNAME): cv.string,
vol.Optional(CONF_PASSWORD): cv.string,
vol.Optional(CONF_SSL, default=False): cv.boolean,
vol.Optional(CONF_VERIFY_SSL, default=True): cv.boolean,
vol.Optional(CONF_VERSION, default=DEFAULT_VERSION): vol.In([2, 3]),
}
)
)
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: vol.All(cv.ensure_list, [GLANCES_SCHEMA])}, extra=vol.ALLOW_EXTRA
)
async def async_setup(hass: HomeAssistant, config: Config) -> bool:
"""Configure Glances using config flow only."""
if DOMAIN in config:
for entry in config[DOMAIN]:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=entry
)
)
return True
async def async_setup_entry(hass, config_entry):
"""Set up Glances from config entry."""
client = GlancesData(hass, config_entry)
hass.data.setdefault(DOMAIN, {})[config_entry.entry_id] = client
if not await client.async_setup():
return False
return True
async def async_unload_entry(hass, config_entry):
"""Unload a config entry."""
await hass.config_entries.async_forward_entry_unload(config_entry, "sensor")
hass.data[DOMAIN].pop(config_entry.entry_id)
return True
class GlancesData:
"""Get the latest data from Glances api."""
def __init__(self, hass, config_entry):
"""Initialize the Glances data."""
self.hass = hass
self.config_entry = config_entry
self.api = None
self.unsub_timer = None
self.available = False
@property
def host(self):
"""Return client host."""
return self.config_entry.data[CONF_HOST]
async def async_update(self):
"""Get the latest data from the Glances REST API."""
try:
await self.api.get_data()
self.available = True
except exceptions.GlancesApiError:
_LOGGER.error("Unable to fetch data from Glances")
self.available = False
_LOGGER.debug("Glances data updated")
async_dispatcher_send(self.hass, DATA_UPDATED)
async def async_setup(self):
"""Set up the Glances client."""
try:
self.api = get_api(self.hass, self.config_entry.data)
await self.api.get_data()
self.available = True
_LOGGER.debug("Successfully connected to Glances")
except exceptions.GlancesApiConnectionError as err:
_LOGGER.debug("Can not connect to Glances")
raise ConfigEntryNotReady from err
self.add_options()
self.set_scan_interval(self.config_entry.options[CONF_SCAN_INTERVAL])
self.config_entry.add_update_listener(self.async_options_updated)
self.hass.async_create_task(
self.hass.config_entries.async_forward_entry_setup(
self.config_entry, "sensor"
)
)
return True
def add_options(self):
"""Add options for Glances integration."""
if not self.config_entry.options:
options = {CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL}
self.hass.config_entries.async_update_entry(
self.config_entry, options=options
)
def set_scan_interval(self, scan_interval):
"""Update scan interval."""
async def refresh(event_time):
"""Get the latest data from Glances api."""
await self.async_update()
if self.unsub_timer is not None:
self.unsub_timer()
self.unsub_timer = async_track_time_interval(
self.hass, refresh, timedelta(seconds=scan_interval)
)
@staticmethod
async def async_options_updated(hass, entry):
"""Triggered by config entry options updates."""
hass.data[DOMAIN][entry.entry_id].set_scan_interval(
entry.options[CONF_SCAN_INTERVAL]
)
def get_api(hass, entry):
"""Return the api from glances_api."""
params = entry.copy()
params.pop(CONF_NAME)
verify_ssl = params.pop(CONF_VERIFY_SSL)
session = async_get_clientsession(hass, verify_ssl)
return Glances(hass.loop, session, **params)
|
import json
import logging
from plexapi.exceptions import NotFound
import voluptuous as vol
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .const import (
DOMAIN,
PLEX_UPDATE_PLATFORMS_SIGNAL,
SERVERS,
SERVICE_REFRESH_LIBRARY,
SERVICE_SCAN_CLIENTS,
)
REFRESH_LIBRARY_SCHEMA = vol.Schema(
{vol.Optional("server_name"): str, vol.Required("library_name"): str}
)
_LOGGER = logging.getLogger(__package__)
async def async_setup_services(hass):
"""Set up services for the Plex component."""
async def async_refresh_library_service(service_call):
await hass.async_add_executor_job(refresh_library, hass, service_call)
async def async_scan_clients_service(_):
_LOGGER.debug("Scanning for new Plex clients")
for server_id in hass.data[DOMAIN][SERVERS]:
async_dispatcher_send(hass, PLEX_UPDATE_PLATFORMS_SIGNAL.format(server_id))
hass.services.async_register(
DOMAIN,
SERVICE_REFRESH_LIBRARY,
async_refresh_library_service,
schema=REFRESH_LIBRARY_SCHEMA,
)
hass.services.async_register(
DOMAIN, SERVICE_SCAN_CLIENTS, async_scan_clients_service
)
return True
def refresh_library(hass, service_call):
"""Scan a Plex library for new and updated media."""
plex_server_name = service_call.data.get("server_name")
library_name = service_call.data["library_name"]
plex_server = get_plex_server(hass, plex_server_name)
if not plex_server:
return
try:
library = plex_server.library.section(title=library_name)
except NotFound:
_LOGGER.error(
"Library with name '%s' not found in %s",
library_name,
[x.title for x in plex_server.library.sections()],
)
return
_LOGGER.debug("Scanning %s for new and updated media", library_name)
library.update()
def get_plex_server(hass, plex_server_name=None):
"""Retrieve a configured Plex server by name."""
plex_servers = hass.data[DOMAIN][SERVERS].values()
if plex_server_name:
plex_server = next(
(x for x in plex_servers if x.friendly_name == plex_server_name), None
)
if plex_server is not None:
return plex_server
_LOGGER.error(
"Requested Plex server '%s' not found in %s",
plex_server_name,
[x.friendly_name for x in plex_servers],
)
return None
if len(plex_servers) == 1:
return next(iter(plex_servers))
_LOGGER.error(
"Multiple Plex servers configured, choose with 'plex_server' key: %s",
[x.friendly_name for x in plex_servers],
)
return None
def lookup_plex_media(hass, content_type, content_id):
"""Look up Plex media using media_player.play_media service payloads."""
content = json.loads(content_id)
if isinstance(content, int):
content = {"plex_key": content}
content_type = DOMAIN
plex_server_name = content.pop("plex_server", None)
shuffle = content.pop("shuffle", 0)
plex_server = get_plex_server(hass, plex_server_name=plex_server_name)
if not plex_server:
return (None, None)
media = plex_server.lookup_media(content_type, **content)
if media is None:
_LOGGER.error("Media could not be found: %s", content)
return (None, None)
playqueue = plex_server.create_playqueue(media, shuffle=shuffle)
return (playqueue, plex_server)
|
import json
import logging
from absl import flags
from perfkitbenchmarker import resource
from perfkitbenchmarker.providers.gcp import util
FLAGS = flags.FLAGS
def _ValidateReplicationFlags(flag_dict):
"""Verifies correct usage of the bigtable replication flags."""
return (not flag_dict['bigtable_replication_cluster'] or
flag_dict['bigtable_replication_cluster_zone'])
def _ValidateRoutingFlags(flag_dict):
"""Verifies correct usage of the bigtable routing flags."""
return (not flag_dict['bigtable_multicluster_routing'] or
flag_dict['bigtable_replication_cluster'])
flags.DEFINE_integer('bigtable_node_count', 3,
'Number of nodes to create in the bigtable cluster.')
flags.DEFINE_enum('bigtable_storage_type', 'ssd', ['ssd', 'hdd'],
'Storage class for the cluster')
flags.DEFINE_string('google_bigtable_zone', 'us-central1-b',
'Bigtable zone.')
flags.DEFINE_boolean('bigtable_replication_cluster', False,
'Whether to create a Bigtable replication cluster.')
flags.DEFINE_string('bigtable_replication_cluster_zone', None,
'Zone in which to create a Bigtable replication cluster.')
flags.DEFINE_boolean('bigtable_multicluster_routing', False,
'Whether to use multi-cluster routing.')
flags.register_multi_flags_validator(
['bigtable_replication_cluster', 'bigtable_replication_cluster_zone'],
_ValidateReplicationFlags, message='bigtable_replication_cluster_zone must '
'be set if bigtable_replication_cluster is True.')
flags.register_multi_flags_validator(
['bigtable_replication_cluster', 'bigtable_multicluster_routing'],
_ValidateRoutingFlags, message='bigtable_replication_cluster must '
'be set if bigtable_multicluster_routing is True.')
class GcpBigtableInstance(resource.BaseResource):
"""Object representing a GCP Bigtable Instance.
Attributes:
name: Instance and cluster name.
num_nodes: Number of nodes in the instance's cluster.
project: Enclosing project for the instance.
zone: zone of the instance's cluster.
"""
def __init__(self, name, project, zone):
super(GcpBigtableInstance, self).__init__()
self.num_nodes = FLAGS.bigtable_node_count
self.storage_type = FLAGS.bigtable_storage_type
self.name = name
self.zone = zone
self.project = project
def _Create(self):
"""Creates the instance."""
cmd = util.GcloudCommand(self, 'beta', 'bigtable', 'instances', 'create',
self.name)
cmd.flags['display-name'] = self.name
cmd.flags['cluster'] = '{}-0'.format(self.name)
cmd.flags['cluster-num-nodes'] = self.num_nodes
cmd.flags['cluster-storage-type'] = self.storage_type
cmd.flags['cluster-zone'] = self.zone
cmd.flags['project'] = self.project
# The zone flag makes this command fail.
cmd.flags['zone'] = []
cmd.Issue()
if FLAGS.bigtable_replication_cluster:
cmd = util.GcloudCommand(self, 'beta', 'bigtable', 'clusters', 'create',
'{}-1'.format(self.name))
cmd.flags['instance'] = self.name
cmd.flags['zone'] = FLAGS.bigtable_replication_cluster_zone
cmd.flags['num-nodes'] = self.num_nodes
cmd.Issue()
logging.info('Creating instance %s.', self.name)
if FLAGS.bigtable_multicluster_routing:
cmd = util.GcloudCommand(self, 'beta', 'bigtable', 'app-profiles',
'update', 'default')
cmd.flags['instance'] = self.name
cmd.flags['route-any'] = True
cmd.flags['force'] = True
cmd.flags['zone'] = []
cmd.Issue()
def _Delete(self):
"""Deletes the instance."""
cmd = util.GcloudCommand(self, 'beta', 'bigtable', 'instances', 'delete',
self.name)
# The zone flag makes this command fail.
cmd.flags['zone'] = []
cmd.Issue(raise_on_failure=False)
def _Exists(self):
"""Returns true if the instance exists."""
cmd = util.GcloudCommand(self, 'beta', 'bigtable', 'instances', 'list')
cmd.flags['format'] = 'json'
cmd.flags['project'] = self.project
# The zone flag makes this command fail.
cmd.flags['zone'] = []
stdout, stderr, retcode = cmd.Issue(
suppress_warning=True, raise_on_failure=False)
if retcode != 0:
# This is not ideal, as we're returning false not because we know
# the table isn't there, but because we can't figure out whether
# it is there. This behavior is consistent without other
# _Exists methods.
logging.error('Unable to list GCP Bigtable instances. Return code %s '
'STDOUT: %s\nSTDERR: %s', retcode, stdout, stderr)
return False
result = json.loads(stdout)
for instance in result:
if instance['displayName'] == self.name:
return instance['state'] == 'READY'
def GetClustersDecription(instance_name, project):
"""Gets descriptions of all the clusters given the instance and project.
This is a module function to allow getting description of clusters not created
by pkb.
Args:
instance_name: Instance to get cluster descriptions for.
project: Project where instance is in.
Returns:
A list of cluster descriptions dicts.
"""
cmd = util.GcloudCommand(None, 'beta', 'bigtable', 'clusters', 'list')
cmd.flags['instances'] = instance_name
cmd.flags['project'] = project
stdout, stderr, retcode = cmd.Issue(
suppress_warning=True, raise_on_failure=False)
if retcode:
logging.error('Command "%s" failed:\nSTDOUT:\n%s\nSTDERR:\n%s',
' '.join(cmd), stdout, stderr)
output = json.loads(stdout)
result = []
for cluster_details in output:
current_instance_name = cluster_details['name'].split('/')[3]
if current_instance_name == instance_name:
cluster_details['name'] = cluster_details['name'].split('/')[5]
cluster_details['zone'] = cluster_details['location'].split('/')[3]
result.append(cluster_details)
return result
|
import pytest
from qutebrowser.config import config
def test_configcache_except_pattern(config_stub):
with pytest.raises(AssertionError):
assert config.cache['content.javascript.enabled']
def test_configcache_error_set(config_stub):
# pylint: disable=unsupported-assignment-operation,useless-suppression
with pytest.raises(TypeError):
config.cache['content.javascript.enabled'] = True
def test_configcache_get(config_stub):
assert len(config.cache._cache) == 0
assert not config.cache['auto_save.session']
assert len(config.cache._cache) == 1
assert not config.cache['auto_save.session']
def test_configcache_get_after_set(config_stub):
assert not config.cache['auto_save.session']
config_stub.val.auto_save.session = True
assert config.cache['auto_save.session']
def test_configcache_naive_benchmark(config_stub, benchmark):
def _run_bench():
for _i in range(10000):
# pylint: disable=pointless-statement
config.cache['tabs.padding']
config.cache['tabs.indicator.width']
config.cache['tabs.indicator.padding']
config.cache['tabs.min_width']
config.cache['tabs.max_width']
config.cache['tabs.pinned.shrink']
# pylint: enable=pointless-statement
benchmark(_run_bench)
|
import logging
import threading
import time
from nx584 import client as nx584_client
import requests
import voluptuous as vol
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_OPENING,
DEVICE_CLASSES,
PLATFORM_SCHEMA,
BinarySensorEntity,
)
from homeassistant.const import CONF_HOST, CONF_PORT
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_EXCLUDE_ZONES = "exclude_zones"
CONF_ZONE_TYPES = "zone_types"
DEFAULT_HOST = "localhost"
DEFAULT_PORT = "5007"
DEFAULT_SSL = False
ZONE_TYPES_SCHEMA = vol.Schema({cv.positive_int: vol.In(DEVICE_CLASSES)})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_EXCLUDE_ZONES, default=[]): vol.All(
cv.ensure_list, [cv.positive_int]
),
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_ZONE_TYPES, default={}): ZONE_TYPES_SCHEMA,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the NX584 binary sensor platform."""
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
exclude = config.get(CONF_EXCLUDE_ZONES)
zone_types = config.get(CONF_ZONE_TYPES)
try:
client = nx584_client.Client(f"http://{host}:{port}")
zones = client.list_zones()
except requests.exceptions.ConnectionError as ex:
_LOGGER.error("Unable to connect to NX584: %s", str(ex))
return False
version = [int(v) for v in client.get_version().split(".")]
if version < [1, 1]:
_LOGGER.error("NX584 is too old to use for sensors (>=0.2 required)")
return False
zone_sensors = {
zone["number"]: NX584ZoneSensor(
zone, zone_types.get(zone["number"], DEVICE_CLASS_OPENING)
)
for zone in zones
if zone["number"] not in exclude
}
if zone_sensors:
add_entities(zone_sensors.values())
watcher = NX584Watcher(client, zone_sensors)
watcher.start()
else:
_LOGGER.warning("No zones found on NX584")
return True
class NX584ZoneSensor(BinarySensorEntity):
"""Representation of a NX584 zone as a sensor."""
def __init__(self, zone, zone_type):
"""Initialize the nx594 binary sensor."""
self._zone = zone
self._zone_type = zone_type
@property
def device_class(self):
"""Return the class of this sensor, from DEVICE_CLASSES."""
return self._zone_type
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the binary sensor."""
return self._zone["name"]
@property
def is_on(self):
"""Return true if the binary sensor is on."""
# True means "faulted" or "open" or "abnormal state"
return self._zone["state"]
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {"zone_number": self._zone["number"]}
class NX584Watcher(threading.Thread):
"""Event listener thread to process NX584 events."""
def __init__(self, client, zone_sensors):
"""Initialize NX584 watcher thread."""
super().__init__()
self.daemon = True
self._client = client
self._zone_sensors = zone_sensors
def _process_zone_event(self, event):
zone = event["zone"]
zone_sensor = self._zone_sensors.get(zone)
# pylint: disable=protected-access
if not zone_sensor:
return
zone_sensor._zone["state"] = event["zone_state"]
zone_sensor.schedule_update_ha_state()
def _process_events(self, events):
for event in events:
if event.get("type") == "zone_status":
self._process_zone_event(event)
def _run(self):
"""Throw away any existing events so we don't replay history."""
self._client.get_events()
while True:
events = self._client.get_events()
if events:
self._process_events(events)
def run(self):
"""Run the watcher."""
while True:
try:
self._run()
except requests.exceptions.ConnectionError:
_LOGGER.error("Failed to reach NX584 server")
time.sleep(10)
|
import json
import logging
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker.linux_packages import fio
LOGGING = 'logging'
DATABASE = 'database'
STREAMING = 'streaming'
flags.DEFINE_enum('workload_mode', LOGGING,
[LOGGING, DATABASE, STREAMING],
'Simulate a logging, database or streaming scenario.')
flags.DEFINE_list('iodepth_list', [], 'A list of iodepth parameter used by '
'fio command in simulated database and streaming scenarios '
'only.')
flags.DEFINE_integer('maxjobs', 0,
'The maximum allowed number of jobs to support.')
FLAGS = flags.FLAGS
BENCHMARK_NAME = 'block_storage_workload'
BENCHMARK_CONFIG = """
block_storage_workload:
description: >
Runs FIO in sequential, random, read and
write modes to simulate various scenarios.
vm_groups:
default:
vm_spec: *default_single_core
disk_spec: *default_500_gb
"""
DESCRIPTION = 'description'
METHOD = 'method'
DEFAULT_IODEPTH = 8
DEFAULT_DATABASE_SIMULATION_IODEPTH_LIST = [16, 64]
DEFAULT_STREAMING_SIMULATION_IODEPTH_LIST = [1, 16]
LATENCY_REGEX = r'[=\s]+([\d\.]+)[\s,]+'
BANDWIDTH_REGEX = r'(\d+)(\w+/*\w*)'
def GetConfig(user_config):
return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
def Prepare(benchmark_spec):
"""Prepare the virtual machine to run FIO.
This includes installing fio, bc. and libaio1 and insuring that the
attached disk is large enough to support the fio benchmark.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
vms = benchmark_spec.vms
vm = vms[0]
logging.info('FIO prepare on %s', vm)
vm.Install('fio')
def UpdateWorkloadMetadata(results):
"""Update metadata fields in results with workload_mode flag value.
Args:
results: A list of sample.Sample objects.
"""
for result in results:
result.metadata.update({'workload_mode': FLAGS.workload_mode})
def RunSimulatedLogging(vm):
"""Spawn fio to simulate logging and gather the results.
Args:
vm: The vm that synthetic_storage_workloads_benchmark will be run upon.
Returns:
A list of sample.Sample objects
"""
test_size = vm.total_memory_kb
cmd = (
'--filesize=10g '
'--directory=%s '
'--ioengine=libaio '
'--filename=fio_test_file '
'--invalidate=1 '
'--randrepeat=0 '
'--direct=0 '
'--size=%dk '
'--iodepth=%d ') % (vm.GetScratchDir(),
test_size,
DEFAULT_IODEPTH)
if FLAGS.maxjobs:
cmd += '--max-jobs=%s ' % FLAGS.maxjobs
cmd += (
'--name=sequential_write '
'--overwrite=0 '
'--rw=write '
'--end_fsync=1 '
'--name=random_read '
'--size=%dk '
'--stonewall '
'--rw=randread '
'--name=sequential_read '
'--stonewall '
'--rw=read ') % (test_size / 10)
logging.info('FIO Results for simulated %s', LOGGING)
res, _ = vm.RemoteCommand('%s %s' % (fio.FIO_CMD_PREFIX, cmd),
should_log=True)
results = fio.ParseResults(fio.FioParametersToJob(cmd), json.loads(res))
UpdateWorkloadMetadata(results)
return results
def RunSimulatedDatabase(vm):
"""Spawn fio to simulate database and gather the results.
Args:
vm: The vm that synthetic_storage_workloads_benchmark will be run upon.
Returns:
A list of sample.Sample objects
"""
test_size = min(vm.total_memory_kb / 10, 1000000)
iodepth_list = FLAGS.iodepth_list or DEFAULT_DATABASE_SIMULATION_IODEPTH_LIST
results = []
for depth in iodepth_list:
cmd = (
'--filesize=10g '
'--directory=%s '
'--ioengine=libaio '
'--filename=fio_test_file '
'--overwrite=1 '
'--invalidate=0 '
'--direct=1 '
'--randrepeat=0 '
'--iodepth=%s '
'--size=%dk '
'--blocksize=4k ') % (vm.GetScratchDir(),
depth,
test_size)
if FLAGS.maxjobs:
cmd += '--max-jobs=%s ' % FLAGS.maxjobs
cmd += (
'--name=random_write '
'--rw=randwrite '
'--end_fsync=1 '
'--name=random_read '
'--stonewall '
'--rw=randread '
'--name=mixed_randrw '
'--stonewall '
'--rw=randrw '
'--rwmixread=90 '
'--rwmixwrite=10 '
'--end_fsync=1 ')
logging.info('FIO Results for simulated %s, iodepth %s', DATABASE, depth)
res, _ = vm.RemoteCommand('%s %s' % (fio.FIO_CMD_PREFIX, cmd),
should_log=True)
results.extend(
fio.ParseResults(fio.FioParametersToJob(cmd), json.loads(res)))
UpdateWorkloadMetadata(results)
return results
def RunSimulatedStreaming(vm):
"""Spawn fio to simulate streaming and gather the results.
Args:
vm: The vm that synthetic_storage_workloads_benchmark will be run upon.
Returns:
A list of sample.Sample objects
"""
test_size = min(vm.total_memory_kb / 10, 1000000)
iodepth_list = FLAGS.iodepth_list or DEFAULT_STREAMING_SIMULATION_IODEPTH_LIST
results = []
for depth in iodepth_list:
cmd = (
'--filesize=10g '
'--directory=%s '
'--ioengine=libaio '
'--overwrite=0 '
'--invalidate=1 '
'--direct=1 '
'--randrepeat=0 '
'--iodepth=%s '
'--blocksize=1m '
'--size=%dk '
'--filename=fio_test_file ') % (vm.GetScratchDir(),
depth,
test_size)
if FLAGS.maxjobs:
cmd += '--max-jobs=%s ' % FLAGS.maxjobs
cmd += (
'--name=sequential_write '
'--rw=write '
'--end_fsync=1 '
'--name=sequential_read '
'--stonewall '
'--rw=read ')
logging.info('FIO Results for simulated %s', STREAMING)
res, _ = vm.RemoteCommand('%s %s' % (fio.FIO_CMD_PREFIX, cmd),
should_log=True)
results.extend(
fio.ParseResults(fio.FioParametersToJob(cmd), json.loads(res)))
UpdateWorkloadMetadata(results)
return results
RUN_SCENARIO_FUNCTION_DICT = {
LOGGING: {DESCRIPTION: 'simulated_logging', METHOD: RunSimulatedLogging},
DATABASE: {DESCRIPTION: 'simulated_database', METHOD: RunSimulatedDatabase},
STREAMING: {DESCRIPTION: 'simulated_streaming',
METHOD: RunSimulatedStreaming}}
def Run(benchmark_spec):
"""Spawn fio and gather the results.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample objects.
"""
logging.info('Simulating %s scenario.', FLAGS.workload_mode)
vms = benchmark_spec.vms
vm = vms[0]
return RUN_SCENARIO_FUNCTION_DICT[FLAGS.workload_mode][METHOD](vm)
def Cleanup(benchmark_spec):
"""Uninstall packages required for fio and remove benchmark files.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
vms = benchmark_spec.vms
vm = vms[0]
logging.info('FIO Cleanup up on %s', vm)
vm.RemoveFile(vm.GetScratchDir() + '/fio_test_file')
|
import json
from unittest import mock
from libpurecool.dyson_pure_cool import DysonPureCool
from libpurecool.dyson_pure_state_v2 import DysonEnvironmentalSensorV2State
from homeassistant.components import dyson as dyson_parent
from homeassistant.components.air_quality import (
ATTR_NO2,
ATTR_PM_2_5,
ATTR_PM_10,
DOMAIN as AIQ_DOMAIN,
)
import homeassistant.components.dyson.air_quality as dyson
from homeassistant.helpers import discovery
from homeassistant.setup import async_setup_component
from .common import load_mock_device
from tests.async_mock import patch
def _get_dyson_purecool_device():
"""Return a valid device as provided by the Dyson web services."""
device = mock.Mock(spec=DysonPureCool)
load_mock_device(device)
device.name = "Living room"
device.environmental_state.particulate_matter_25 = "0014"
device.environmental_state.particulate_matter_10 = "0025"
device.environmental_state.nitrogen_dioxide = "0042"
device.environmental_state.volatile_organic_compounds = "0035"
return device
def _get_config():
"""Return a config dictionary."""
return {
dyson_parent.DOMAIN: {
dyson_parent.CONF_USERNAME: "email",
dyson_parent.CONF_PASSWORD: "password",
dyson_parent.CONF_LANGUAGE: "GB",
dyson_parent.CONF_DEVICES: [
{"device_id": "XX-XXXXX-XX", "device_ip": "192.168.0.1"}
],
}
}
@patch("libpurecool.dyson.DysonAccount.login", return_value=True)
@patch(
"libpurecool.dyson.DysonAccount.devices",
return_value=[_get_dyson_purecool_device()],
)
async def test_purecool_aiq_attributes(devices, login, hass):
"""Test state attributes."""
await async_setup_component(hass, dyson_parent.DOMAIN, _get_config())
await hass.async_block_till_done()
fan_state = hass.states.get("air_quality.living_room")
attributes = fan_state.attributes
assert fan_state.state == "14"
assert attributes[ATTR_PM_2_5] == 14
assert attributes[ATTR_PM_10] == 25
assert attributes[ATTR_NO2] == 42
assert attributes[dyson.ATTR_VOC] == 35
@patch("libpurecool.dyson.DysonAccount.login", return_value=True)
@patch(
"libpurecool.dyson.DysonAccount.devices",
return_value=[_get_dyson_purecool_device()],
)
async def test_purecool_aiq_update_state(devices, login, hass):
"""Test state update."""
device = devices.return_value[0]
await async_setup_component(hass, dyson_parent.DOMAIN, _get_config())
await hass.async_block_till_done()
event = {
"msg": "ENVIRONMENTAL-CURRENT-SENSOR-DATA",
"time": "2019-03-29T10:00:01.000Z",
"data": {
"pm10": "0080",
"p10r": "0151",
"hact": "0040",
"va10": "0055",
"p25r": "0161",
"noxl": "0069",
"pm25": "0035",
"sltm": "OFF",
"tact": "2960",
},
}
device.environmental_state = DysonEnvironmentalSensorV2State(json.dumps(event))
for call in device.add_message_listener.call_args_list:
callback = call[0][0]
if type(callback.__self__) == dyson.DysonAirSensor:
callback(device.environmental_state)
await hass.async_block_till_done()
fan_state = hass.states.get("air_quality.living_room")
attributes = fan_state.attributes
assert fan_state.state == "35"
assert attributes[ATTR_PM_2_5] == 35
assert attributes[ATTR_PM_10] == 80
assert attributes[ATTR_NO2] == 69
assert attributes[dyson.ATTR_VOC] == 55
@patch("libpurecool.dyson.DysonAccount.login", return_value=True)
@patch(
"libpurecool.dyson.DysonAccount.devices",
return_value=[_get_dyson_purecool_device()],
)
async def test_purecool_component_setup_only_once(devices, login, hass):
"""Test if entities are created only once."""
config = _get_config()
await async_setup_component(hass, dyson_parent.DOMAIN, config)
await hass.async_block_till_done()
discovery.load_platform(hass, AIQ_DOMAIN, dyson_parent.DOMAIN, {}, config)
await hass.async_block_till_done()
assert len(hass.data[dyson.DYSON_AIQ_DEVICES]) == 1
@patch("libpurecool.dyson.DysonAccount.login", return_value=True)
@patch(
"libpurecool.dyson.DysonAccount.devices",
return_value=[_get_dyson_purecool_device()],
)
async def test_purecool_aiq_without_discovery(devices, login, hass):
"""Test if component correctly returns if discovery not set."""
await async_setup_component(hass, dyson_parent.DOMAIN, _get_config())
await hass.async_block_till_done()
add_entities_mock = mock.MagicMock()
dyson.setup_platform(hass, None, add_entities_mock, None)
assert add_entities_mock.call_count == 0
@patch("libpurecool.dyson.DysonAccount.login", return_value=True)
@patch(
"libpurecool.dyson.DysonAccount.devices",
return_value=[_get_dyson_purecool_device()],
)
async def test_purecool_aiq_empty_environment_state(devices, login, hass):
"""Test device with empty environmental state."""
await async_setup_component(hass, dyson_parent.DOMAIN, _get_config())
await hass.async_block_till_done()
device = hass.data[dyson.DYSON_AIQ_DEVICES][0]
device._device.environmental_state = None
assert device.state is None
assert device.particulate_matter_2_5 is None
assert device.particulate_matter_10 is None
assert device.nitrogen_dioxide is None
assert device.volatile_organic_compounds is None
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.