text
stringlengths 213
32.3k
|
---|
from importlib.abc import MetaPathFinder
import warnings
class SharedLibDeprecationWarning(DeprecationWarning):
pass
warnings.simplefilter("always", SharedLibDeprecationWarning)
class SharedLibImportWarner(MetaPathFinder):
"""
Deprecation warner for shared libraries. This class sits on `sys.meta_path`
and prints warning if imported module is a shared library
"""
def find_spec(self, fullname, path, target=None) -> None:
"""This is only supposed to print warnings, it won't ever return module spec."""
parts = fullname.split(".")
if parts[0] != "cog_shared" or len(parts) != 2:
return None
msg = (
"One of cogs uses shared libraries which are"
" deprecated and scheduled for removal in the future.\n"
"You should inform author of the cog about this message."
)
warnings.warn(msg, SharedLibDeprecationWarning, stacklevel=2)
return None
|
import platform
import sys
def whatever(f):
try:
return f()
except:
return f
def dump_module(mod):
print("\n### {} ---------------------------".format(mod.__name__))
for name in dir(mod):
if name.startswith("_"):
continue
print("{:30s}: {!r:.100}".format(name, whatever(getattr(mod, name))))
for mod in [platform, sys]:
dump_module(mod)
|
from __future__ import unicode_literals
import itertools
from lib.fun.decorator import magic
from lib.data.data import pystrs, pyoptions
from lib.parse.confparse import elementparser, confmatcher
from lib.fun.fun import countchecker, lengthchecker, range_compatible, cool
def build_conf_dic(source=""):
@magic
def conf():
for item in confcore(source):
yield item
def get_conf_dic(minlength, maxlength, objflag, encodeflag, head, tail):
diclist = []
for i in range_compatible(minlength, maxlength+1):
for item in itertools.product(objflag, repeat=i):
if encodeflag in pyoptions.operator.keys():
diclist.append(pyoptions.operator.get(encodeflag)(head + "".join(item) + tail))
else:
exit(pyoptions.CRLF + cool.red('[-] wrong encode type'))
# items count check
countchecker(-1, len(diclist))
return diclist
# if you have better way to actualize it, please pull request
def confcore(resource):
try:
confdicts = elementparser(confmatcher(resource))
except IndexError:
confdicts = {}
exit(cool.red("[-] parse element error, please check your parsing element"))
finalen = len(confdicts[pystrs.conf_head])
listpool = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
for x in range(0, finalen):
lengthchecker(confdicts[pystrs.conf_minlen][x], confdicts[pystrs.conf_maxlen][x])
listpool[x] = get_conf_dic(int(confdicts[pystrs.conf_minlen][x]), int(confdicts[pystrs.conf_maxlen][x]),
confdicts[pystrs.conf_char][x], confdicts[pystrs.conf_encode][x],
confdicts[pystrs.conf_head][x], confdicts[pystrs.conf_tail][x])
if finalen == 1:
countchecker(-1, len(listpool[0]))
for item in itertools.product(listpool[0]):
yield "".join(item)
elif finalen == 2:
countchecker(-1, len(listpool[0]), len(listpool[1]))
for item in itertools.product(listpool[0], listpool[1]):
yield "".join(item)
elif finalen == 3:
countchecker(-1, len(listpool[0]), len(listpool[1]), len(listpool[2]))
for item in itertools.product(listpool[0], listpool[1], listpool[2]):
yield "".join(item)
elif finalen == 4:
countchecker(-1, len(listpool[0]), len(listpool[1]), len(listpool[2]), len(listpool[3]))
for item in itertools.product(listpool[0], listpool[1], listpool[2], listpool[3]):
# print("".join(item) + '\n')
yield "".join(item)
elif finalen == 5:
countchecker(-1, len(listpool[0]), len(listpool[1]), len(listpool[2]), len(listpool[3]), len(listpool[4]))
for item in itertools.product(listpool[0], listpool[1], listpool[2], listpool[3], listpool[4]):
yield "".join(item)
elif finalen == 6:
countchecker(-1, len(listpool[0]), len(listpool[1]), len(listpool[2]), len(listpool[3]), len(listpool[4]),
len(listpool[5]))
for item in itertools.product(listpool[0], listpool[1], listpool[2], listpool[3], listpool[4], listpool[5]):
yield "".join(item)
elif finalen == 7:
countchecker(-1, len(listpool[0]), len(listpool[1]), len(listpool[2]), len(listpool[3]), len(listpool[4]),
len(listpool[5]), len(listpool[6]))
for item in itertools.product(listpool[0], listpool[1], listpool[2], listpool[3], listpool[4], listpool[5],
listpool[6]):
yield "".join(item)
elif finalen == 8:
countchecker(-1, len(listpool[0]), len(listpool[1]), len(listpool[2]), len(listpool[3]), len(listpool[4]),
len(listpool[5]), len(listpool[6]), len(listpool[7]))
for item in itertools.product(listpool[0], listpool[1], listpool[2], listpool[3], listpool[4], listpool[5],
listpool[6], listpool[7]):
yield "".join(item)
elif finalen == 9:
countchecker(-1, len(listpool[0]), len(listpool[1]), len(listpool[2]), len(listpool[3]), len(listpool[4]),
len(listpool[5]), len(listpool[6]), len(listpool[7]), len(listpool[8]))
for item in itertools.product(listpool[0], listpool[1], listpool[2], listpool[3], listpool[4], listpool[5],
listpool[6], listpool[7], listpool[8]):
yield "".join(item)
elif finalen == 10:
countchecker(-1, len(listpool[0]), len(listpool[1]), len(listpool[2]), len(listpool[3]), len(listpool[4]),
len(listpool[5]), len(listpool[6]), len(listpool[7]), len(listpool[8]), len(listpool[9]))
for item in itertools.product(listpool[0], listpool[1], listpool[2], listpool[3], listpool[4], listpool[5],
listpool[6], listpool[7], listpool[8], listpool[9]):
yield "".join(item)
|
from typing import Any, Dict
from homematicip.aio.device import (
AsyncAccelerationSensor,
AsyncContactInterface,
AsyncDevice,
AsyncFullFlushContactInterface,
AsyncMotionDetectorIndoor,
AsyncMotionDetectorOutdoor,
AsyncMotionDetectorPushButton,
AsyncPluggableMainsFailureSurveillance,
AsyncPresenceDetectorIndoor,
AsyncRotaryHandleSensor,
AsyncShutterContact,
AsyncShutterContactMagnetic,
AsyncSmokeDetector,
AsyncTiltVibrationSensor,
AsyncWaterSensor,
AsyncWeatherSensor,
AsyncWeatherSensorPlus,
AsyncWeatherSensorPro,
AsyncWiredInput32,
)
from homematicip.aio.group import AsyncSecurityGroup, AsyncSecurityZoneGroup
from homematicip.base.enums import SmokeDetectorAlarmType, WindowState
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_BATTERY,
DEVICE_CLASS_DOOR,
DEVICE_CLASS_LIGHT,
DEVICE_CLASS_MOISTURE,
DEVICE_CLASS_MOTION,
DEVICE_CLASS_MOVING,
DEVICE_CLASS_OPENING,
DEVICE_CLASS_POWER,
DEVICE_CLASS_PRESENCE,
DEVICE_CLASS_SAFETY,
DEVICE_CLASS_SMOKE,
BinarySensorEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.helpers.typing import HomeAssistantType
from . import DOMAIN as HMIPC_DOMAIN, HomematicipGenericEntity
from .hap import HomematicipHAP
ATTR_ACCELERATION_SENSOR_MODE = "acceleration_sensor_mode"
ATTR_ACCELERATION_SENSOR_NEUTRAL_POSITION = "acceleration_sensor_neutral_position"
ATTR_ACCELERATION_SENSOR_SENSITIVITY = "acceleration_sensor_sensitivity"
ATTR_ACCELERATION_SENSOR_TRIGGER_ANGLE = "acceleration_sensor_trigger_angle"
ATTR_INTRUSION_ALARM = "intrusion_alarm"
ATTR_MOISTURE_DETECTED = "moisture_detected"
ATTR_MOTION_DETECTED = "motion_detected"
ATTR_POWER_MAINS_FAILURE = "power_mains_failure"
ATTR_PRESENCE_DETECTED = "presence_detected"
ATTR_SMOKE_DETECTOR_ALARM = "smoke_detector_alarm"
ATTR_TODAY_SUNSHINE_DURATION = "today_sunshine_duration_in_minutes"
ATTR_WATER_LEVEL_DETECTED = "water_level_detected"
ATTR_WINDOW_STATE = "window_state"
GROUP_ATTRIBUTES = {
"moistureDetected": ATTR_MOISTURE_DETECTED,
"motionDetected": ATTR_MOTION_DETECTED,
"powerMainsFailure": ATTR_POWER_MAINS_FAILURE,
"presenceDetected": ATTR_PRESENCE_DETECTED,
"waterlevelDetected": ATTR_WATER_LEVEL_DETECTED,
}
SAM_DEVICE_ATTRIBUTES = {
"accelerationSensorNeutralPosition": ATTR_ACCELERATION_SENSOR_NEUTRAL_POSITION,
"accelerationSensorMode": ATTR_ACCELERATION_SENSOR_MODE,
"accelerationSensorSensitivity": ATTR_ACCELERATION_SENSOR_SENSITIVITY,
"accelerationSensorTriggerAngle": ATTR_ACCELERATION_SENSOR_TRIGGER_ANGLE,
}
async def async_setup_entry(
hass: HomeAssistantType, config_entry: ConfigEntry, async_add_entities
) -> None:
"""Set up the HomematicIP Cloud binary sensor from a config entry."""
hap = hass.data[HMIPC_DOMAIN][config_entry.unique_id]
entities = [HomematicipCloudConnectionSensor(hap)]
for device in hap.home.devices:
if isinstance(device, AsyncAccelerationSensor):
entities.append(HomematicipAccelerationSensor(hap, device))
if isinstance(device, AsyncTiltVibrationSensor):
entities.append(HomematicipTiltVibrationSensor(hap, device))
if isinstance(device, AsyncWiredInput32):
for channel in range(1, 33):
entities.append(
HomematicipMultiContactInterface(hap, device, channel=channel)
)
elif isinstance(
device, (AsyncContactInterface, AsyncFullFlushContactInterface)
):
entities.append(HomematicipContactInterface(hap, device))
if isinstance(
device,
(AsyncShutterContact, AsyncShutterContactMagnetic),
):
entities.append(HomematicipShutterContact(hap, device))
if isinstance(device, AsyncRotaryHandleSensor):
entities.append(HomematicipShutterContact(hap, device, True))
if isinstance(
device,
(
AsyncMotionDetectorIndoor,
AsyncMotionDetectorOutdoor,
AsyncMotionDetectorPushButton,
),
):
entities.append(HomematicipMotionDetector(hap, device))
if isinstance(device, AsyncPluggableMainsFailureSurveillance):
entities.append(
HomematicipPluggableMainsFailureSurveillanceSensor(hap, device)
)
if isinstance(device, AsyncPresenceDetectorIndoor):
entities.append(HomematicipPresenceDetector(hap, device))
if isinstance(device, AsyncSmokeDetector):
entities.append(HomematicipSmokeDetector(hap, device))
if isinstance(device, AsyncWaterSensor):
entities.append(HomematicipWaterDetector(hap, device))
if isinstance(device, (AsyncWeatherSensorPlus, AsyncWeatherSensorPro)):
entities.append(HomematicipRainSensor(hap, device))
if isinstance(
device, (AsyncWeatherSensor, AsyncWeatherSensorPlus, AsyncWeatherSensorPro)
):
entities.append(HomematicipStormSensor(hap, device))
entities.append(HomematicipSunshineSensor(hap, device))
if isinstance(device, AsyncDevice) and device.lowBat is not None:
entities.append(HomematicipBatterySensor(hap, device))
for group in hap.home.groups:
if isinstance(group, AsyncSecurityGroup):
entities.append(HomematicipSecuritySensorGroup(hap, device=group))
elif isinstance(group, AsyncSecurityZoneGroup):
entities.append(HomematicipSecurityZoneSensorGroup(hap, device=group))
if entities:
async_add_entities(entities)
class HomematicipCloudConnectionSensor(HomematicipGenericEntity, BinarySensorEntity):
"""Representation of the HomematicIP cloud connection sensor."""
def __init__(self, hap: HomematicipHAP) -> None:
"""Initialize the cloud connection sensor."""
super().__init__(hap, hap.home, "Cloud Connection")
@property
def device_info(self) -> Dict[str, Any]:
"""Return device specific attributes."""
# Adds a sensor to the existing HAP device
return {
"identifiers": {
# Serial numbers of Homematic IP device
(HMIPC_DOMAIN, self._home.id)
}
}
@property
def icon(self) -> str:
"""Return the icon of the access point entity."""
return (
"mdi:access-point-network"
if self._home.connected
else "mdi:access-point-network-off"
)
@property
def is_on(self) -> bool:
"""Return true if hap is connected to cloud."""
return self._home.connected
@property
def available(self) -> bool:
"""Sensor is always available."""
return True
class HomematicipBaseActionSensor(HomematicipGenericEntity, BinarySensorEntity):
"""Representation of the HomematicIP base action sensor."""
@property
def device_class(self) -> str:
"""Return the class of this sensor."""
return DEVICE_CLASS_MOVING
@property
def is_on(self) -> bool:
"""Return true if acceleration is detected."""
return self._device.accelerationSensorTriggered
@property
def device_state_attributes(self) -> Dict[str, Any]:
"""Return the state attributes of the acceleration sensor."""
state_attr = super().device_state_attributes
for attr, attr_key in SAM_DEVICE_ATTRIBUTES.items():
attr_value = getattr(self._device, attr, None)
if attr_value:
state_attr[attr_key] = attr_value
return state_attr
class HomematicipAccelerationSensor(HomematicipBaseActionSensor):
"""Representation of the HomematicIP acceleration sensor."""
class HomematicipTiltVibrationSensor(HomematicipBaseActionSensor):
"""Representation of the HomematicIP tilt vibration sensor."""
class HomematicipMultiContactInterface(HomematicipGenericEntity, BinarySensorEntity):
"""Representation of the HomematicIP multi room/area contact interface."""
def __init__(self, hap: HomematicipHAP, device, channel: int) -> None:
"""Initialize the multi contact entity."""
super().__init__(hap, device, channel=channel)
@property
def device_class(self) -> str:
"""Return the class of this sensor."""
return DEVICE_CLASS_OPENING
@property
def is_on(self) -> bool:
"""Return true if the contact interface is on/open."""
if self._device.functionalChannels[self._channel].windowState is None:
return None
return (
self._device.functionalChannels[self._channel].windowState
!= WindowState.CLOSED
)
class HomematicipContactInterface(HomematicipGenericEntity, BinarySensorEntity):
"""Representation of the HomematicIP contact interface."""
@property
def device_class(self) -> str:
"""Return the class of this sensor."""
return DEVICE_CLASS_OPENING
@property
def is_on(self) -> bool:
"""Return true if the contact interface is on/open."""
if self._device.windowState is None:
return None
return self._device.windowState != WindowState.CLOSED
class HomematicipShutterContact(HomematicipGenericEntity, BinarySensorEntity):
"""Representation of the HomematicIP shutter contact."""
def __init__(
self, hap: HomematicipHAP, device, has_additional_state: bool = False
) -> None:
"""Initialize the shutter contact."""
super().__init__(hap, device)
self.has_additional_state = has_additional_state
@property
def device_class(self) -> str:
"""Return the class of this sensor."""
return DEVICE_CLASS_DOOR
@property
def is_on(self) -> bool:
"""Return true if the shutter contact is on/open."""
if self._device.windowState is None:
return None
return self._device.windowState != WindowState.CLOSED
@property
def device_state_attributes(self) -> Dict[str, Any]:
"""Return the state attributes of the Shutter Contact."""
state_attr = super().device_state_attributes
if self.has_additional_state:
window_state = getattr(self._device, "windowState", None)
if window_state and window_state != WindowState.CLOSED:
state_attr[ATTR_WINDOW_STATE] = window_state
return state_attr
class HomematicipMotionDetector(HomematicipGenericEntity, BinarySensorEntity):
"""Representation of the HomematicIP motion detector."""
@property
def device_class(self) -> str:
"""Return the class of this sensor."""
return DEVICE_CLASS_MOTION
@property
def is_on(self) -> bool:
"""Return true if motion is detected."""
return self._device.motionDetected
class HomematicipPresenceDetector(HomematicipGenericEntity, BinarySensorEntity):
"""Representation of the HomematicIP presence detector."""
@property
def device_class(self) -> str:
"""Return the class of this sensor."""
return DEVICE_CLASS_PRESENCE
@property
def is_on(self) -> bool:
"""Return true if presence is detected."""
return self._device.presenceDetected
class HomematicipSmokeDetector(HomematicipGenericEntity, BinarySensorEntity):
"""Representation of the HomematicIP smoke detector."""
@property
def device_class(self) -> str:
"""Return the class of this sensor."""
return DEVICE_CLASS_SMOKE
@property
def is_on(self) -> bool:
"""Return true if smoke is detected."""
if self._device.smokeDetectorAlarmType:
return (
self._device.smokeDetectorAlarmType
== SmokeDetectorAlarmType.PRIMARY_ALARM
)
return False
class HomematicipWaterDetector(HomematicipGenericEntity, BinarySensorEntity):
"""Representation of the HomematicIP water detector."""
@property
def device_class(self) -> str:
"""Return the class of this sensor."""
return DEVICE_CLASS_MOISTURE
@property
def is_on(self) -> bool:
"""Return true, if moisture or waterlevel is detected."""
return self._device.moistureDetected or self._device.waterlevelDetected
class HomematicipStormSensor(HomematicipGenericEntity, BinarySensorEntity):
"""Representation of the HomematicIP storm sensor."""
def __init__(self, hap: HomematicipHAP, device) -> None:
"""Initialize storm sensor."""
super().__init__(hap, device, "Storm")
@property
def icon(self) -> str:
"""Return the icon."""
return "mdi:weather-windy" if self.is_on else "mdi:pinwheel-outline"
@property
def is_on(self) -> bool:
"""Return true, if storm is detected."""
return self._device.storm
class HomematicipRainSensor(HomematicipGenericEntity, BinarySensorEntity):
"""Representation of the HomematicIP rain sensor."""
def __init__(self, hap: HomematicipHAP, device) -> None:
"""Initialize rain sensor."""
super().__init__(hap, device, "Raining")
@property
def device_class(self) -> str:
"""Return the class of this sensor."""
return DEVICE_CLASS_MOISTURE
@property
def is_on(self) -> bool:
"""Return true, if it is raining."""
return self._device.raining
class HomematicipSunshineSensor(HomematicipGenericEntity, BinarySensorEntity):
"""Representation of the HomematicIP sunshine sensor."""
def __init__(self, hap: HomematicipHAP, device) -> None:
"""Initialize sunshine sensor."""
super().__init__(hap, device, post="Sunshine")
@property
def device_class(self) -> str:
"""Return the class of this sensor."""
return DEVICE_CLASS_LIGHT
@property
def is_on(self) -> bool:
"""Return true if sun is shining."""
return self._device.sunshine
@property
def device_state_attributes(self) -> Dict[str, Any]:
"""Return the state attributes of the illuminance sensor."""
state_attr = super().device_state_attributes
today_sunshine_duration = getattr(self._device, "todaySunshineDuration", None)
if today_sunshine_duration:
state_attr[ATTR_TODAY_SUNSHINE_DURATION] = today_sunshine_duration
return state_attr
class HomematicipBatterySensor(HomematicipGenericEntity, BinarySensorEntity):
"""Representation of the HomematicIP low battery sensor."""
def __init__(self, hap: HomematicipHAP, device) -> None:
"""Initialize battery sensor."""
super().__init__(hap, device, post="Battery")
@property
def device_class(self) -> str:
"""Return the class of this sensor."""
return DEVICE_CLASS_BATTERY
@property
def is_on(self) -> bool:
"""Return true if battery is low."""
return self._device.lowBat
class HomematicipPluggableMainsFailureSurveillanceSensor(
HomematicipGenericEntity, BinarySensorEntity
):
"""Representation of the HomematicIP pluggable mains failure surveillance sensor."""
def __init__(self, hap: HomematicipHAP, device) -> None:
"""Initialize pluggable mains failure surveillance sensor."""
super().__init__(hap, device)
@property
def device_class(self) -> str:
"""Return the class of this sensor."""
return DEVICE_CLASS_POWER
@property
def is_on(self) -> bool:
"""Return true if power mains fails."""
return not self._device.powerMainsFailure
class HomematicipSecurityZoneSensorGroup(HomematicipGenericEntity, BinarySensorEntity):
"""Representation of the HomematicIP security zone sensor group."""
def __init__(self, hap: HomematicipHAP, device, post: str = "SecurityZone") -> None:
"""Initialize security zone group."""
device.modelType = f"HmIP-{post}"
super().__init__(hap, device, post=post)
@property
def device_class(self) -> str:
"""Return the class of this sensor."""
return DEVICE_CLASS_SAFETY
@property
def available(self) -> bool:
"""Security-Group available."""
# A security-group must be available, and should not be affected by
# the individual availability of group members.
return True
@property
def device_state_attributes(self) -> Dict[str, Any]:
"""Return the state attributes of the security zone group."""
state_attr = super().device_state_attributes
for attr, attr_key in GROUP_ATTRIBUTES.items():
attr_value = getattr(self._device, attr, None)
if attr_value:
state_attr[attr_key] = attr_value
window_state = getattr(self._device, "windowState", None)
if window_state and window_state != WindowState.CLOSED:
state_attr[ATTR_WINDOW_STATE] = str(window_state)
return state_attr
@property
def is_on(self) -> bool:
"""Return true if security issue detected."""
if (
self._device.motionDetected
or self._device.presenceDetected
or self._device.unreach
or self._device.sabotage
):
return True
if (
self._device.windowState is not None
and self._device.windowState != WindowState.CLOSED
):
return True
return False
class HomematicipSecuritySensorGroup(
HomematicipSecurityZoneSensorGroup, BinarySensorEntity
):
"""Representation of the HomematicIP security group."""
def __init__(self, hap: HomematicipHAP, device) -> None:
"""Initialize security group."""
super().__init__(hap, device, post="Sensors")
@property
def device_state_attributes(self) -> Dict[str, Any]:
"""Return the state attributes of the security group."""
state_attr = super().device_state_attributes
smoke_detector_at = getattr(self._device, "smokeDetectorAlarmType", None)
if smoke_detector_at:
if smoke_detector_at == SmokeDetectorAlarmType.PRIMARY_ALARM:
state_attr[ATTR_SMOKE_DETECTOR_ALARM] = str(smoke_detector_at)
if smoke_detector_at == SmokeDetectorAlarmType.INTRUSION_ALARM:
state_attr[ATTR_INTRUSION_ALARM] = str(smoke_detector_at)
return state_attr
@property
def is_on(self) -> bool:
"""Return true if safety issue detected."""
parent_is_on = super().is_on
if parent_is_on:
return True
if (
self._device.powerMainsFailure
or self._device.moistureDetected
or self._device.waterlevelDetected
or self._device.lowBat
or self._device.dutyCycle
):
return True
if (
self._device.smokeDetectorAlarmType is not None
and self._device.smokeDetectorAlarmType != SmokeDetectorAlarmType.IDLE_OFF
):
return True
return False
|
from django.core.exceptions import ValidationError
from django.utils.translation import gettext as _
from weblate.checks.flags import Flags
def validate_filemask(val):
"""Validate that filemask contains *."""
if "*" not in val:
raise ValidationError(
_("Filemask does not contain * as a language placeholder!")
)
def validate_autoaccept(val):
"""Validate correct value for autoaccept."""
if val == 1:
raise ValidationError(
_(
"A value of 1 is not allowed for autoaccept as "
"it would permit users to vote on their own suggestions."
)
)
def validate_check_flags(val):
"""Validate check influencing flags."""
flags = Flags(val)
flags.validate()
|
import asyncio
import logging
from typing import Any, Dict, Iterable, Optional
from homeassistant.const import ATTR_ENTITY_ID
from homeassistant.core import Context, State
from homeassistant.helpers.typing import HomeAssistantType
from . import (
ATTR_DURATION,
DOMAIN,
SERVICE_CANCEL,
SERVICE_PAUSE,
SERVICE_START,
STATUS_ACTIVE,
STATUS_IDLE,
STATUS_PAUSED,
)
_LOGGER = logging.getLogger(__name__)
VALID_STATES = {STATUS_IDLE, STATUS_ACTIVE, STATUS_PAUSED}
async def _async_reproduce_state(
hass: HomeAssistantType,
state: State,
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Reproduce a single state."""
cur_state = hass.states.get(state.entity_id)
if cur_state is None:
_LOGGER.warning("Unable to find entity %s", state.entity_id)
return
if state.state not in VALID_STATES:
_LOGGER.warning(
"Invalid state specified for %s: %s", state.entity_id, state.state
)
return
# Return if we are already at the right state.
if cur_state.state == state.state and cur_state.attributes.get(
ATTR_DURATION
) == state.attributes.get(ATTR_DURATION):
return
service_data = {ATTR_ENTITY_ID: state.entity_id}
if state.state == STATUS_ACTIVE:
service = SERVICE_START
if ATTR_DURATION in state.attributes:
service_data[ATTR_DURATION] = state.attributes[ATTR_DURATION]
elif state.state == STATUS_PAUSED:
service = SERVICE_PAUSE
elif state.state == STATUS_IDLE:
service = SERVICE_CANCEL
await hass.services.async_call(
DOMAIN, service, service_data, context=context, blocking=True
)
async def async_reproduce_states(
hass: HomeAssistantType,
states: Iterable[State],
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Reproduce Timer states."""
await asyncio.gather(
*(
_async_reproduce_state(
hass, state, context=context, reproduce_options=reproduce_options
)
for state in states
)
)
|
from collections import namedtuple
import unittest
from absl import flags
from perfkitbenchmarker import sample
from perfkitbenchmarker import test_util
from perfkitbenchmarker.windows_packages import iperf3
FLAGS = flags.FLAGS
FLAGS.mark_as_parsed()
# Command generating these results:
# ./iperf3.exe --client 10.129.0.3 --port 5201 --udp -t 3 -b 5G
iperf3_results = """
[ 4] local 10.129.0.4 port 49526 connected to 10.129.0.3 port 5201
[ ID] Interval Transfer Bandwidth Total Datagrams
[ 4] 0.00-1.00 sec 159 MBytes 1.34 Gbits/sec 20398
[ 4] 1.00-2.00 sec 166 MBytes 1.40 Gbits/sec 21292
[ 4] 2.00-3.00 sec 167 MBytes 1.40 Gbits/sec 21323
- - - - - - - - - - - - - - - - - - - - - - - - -
[ ID] Interval Transfer Bandwidth Jitter Lost/Total Datagrams
[ 4] 0.00-3.00 sec 492 MBytes 1.38 Gbits/sec 0.072 ms 35148/62949 (56%)
[ 4] Sent 62949 datagrams
iperf Done.
"""
# ./iperf3.exe --client 127.0.0.1 --port 5201 -t 3 -f M -P 5
iperf3_tcp_results = """
Connecting to host 127.0.0.1, port 5201
[ 4] local 127.0.0.1 port 53966 connected to 127.0.0.1 port 5201
[ 6] local 127.0.0.1 port 53967 connected to 127.0.0.1 port 5201
[ 8] local 127.0.0.1 port 53968 connected to 127.0.0.1 port 5201
[ 10] local 127.0.0.1 port 53969 connected to 127.0.0.1 port 5201
[ 12] local 127.0.0.1 port 53970 connected to 127.0.0.1 port 5201
[ ID] Interval Transfer Bandwidth
[ 4] 0.00-1.01 sec 102 MBytes 854 Mbits/sec
[ 6] 0.00-1.01 sec 102 MBytes 854 Mbits/sec
[ 8] 0.00-1.01 sec 102 MBytes 854 Mbits/sec
[ 10] 0.00-1.01 sec 102 MBytes 854 Mbits/sec
[ 12] 0.00-1.01 sec 102 MBytes 854 Mbits/sec
[SUM] 0.00-1.01 sec 512 MBytes 4.27 Gbits/sec
- - - - - - - - - - - - - - - - - - - - - - - - -
[ 4] 1.01-2.00 sec 106 MBytes 895 Mbits/sec
[ 6] 1.01-2.00 sec 106 MBytes 895 Mbits/sec
[ 8] 1.01-2.00 sec 106 MBytes 895 Mbits/sec
[ 10] 1.01-2.00 sec 106 MBytes 895 Mbits/sec
[ 12] 1.01-2.00 sec 106 MBytes 895 Mbits/sec
[SUM] 1.01-2.00 sec 531 MBytes 4.48 Gbits/sec
- - - - - - - - - - - - - - - - - - - - - - - - -
[ 4] 2.00-3.01 sec 126 MBytes 1.05 Gbits/sec
[ 6] 2.00-3.01 sec 126 MBytes 1.05 Gbits/sec
[ 8] 2.00-3.01 sec 126 MBytes 1.05 Gbits/sec
[ 10] 2.00-3.01 sec 126 MBytes 1.05 Gbits/sec
[ 12] 2.00-3.01 sec 126 MBytes 1.05 Gbits/sec
[SUM] 2.00-3.01 sec 631 MBytes 5.27 Gbits/sec
- - - - - - - - - - - - - - - - - - - - - - - - -
[ ID] Interval Transfer Bandwidth
[ 4] 0.00-3.01 sec 335 MBytes 935 Mbits/sec sender
[ 4] 0.00-3.01 sec 335 MBytes 935 Mbits/sec receiver
[ 6] 0.00-3.01 sec 335 MBytes 935 Mbits/sec sender
[ 6] 0.00-3.01 sec 335 MBytes 935 Mbits/sec receiver
[ 8] 0.00-3.01 sec 335 MBytes 935 Mbits/sec sender
[ 8] 0.00-3.01 sec 335 MBytes 935 Mbits/sec receiver
[ 10] 0.00-3.01 sec 335 MBytes 935 Mbits/sec sender
[ 10] 0.00-3.01 sec 335 MBytes 935 Mbits/sec receiver
[ 12] 0.00-3.01 sec 335 MBytes 935 Mbits/sec sender
[ 12] 0.00-3.01 sec 335 MBytes 935 Mbits/sec receiver
[SUM] 0.00-3.01 sec 1.64 GBytes 4670 Mbits/sec sender
[SUM] 0.00-3.01 sec 1.64 GBytes 4670 Mbits/sec receiver
iperf Done.
"""
class Iperf3TestCase(unittest.TestCase, test_util.SamplesTestMixin):
def testIperfUDPStreamSamples(self):
lost = 35148
sent = 62949
bandwidth = 5000
internal_ip_used = True
fake_vm = namedtuple('fake_vm', 'machine_type zone')
sending_vm = fake_vm(machine_type='A', zone='B')
receiving_vm = fake_vm(machine_type='A', zone='B')
samples = iperf3.GetUDPStreamSamples(
sending_vm, receiving_vm, iperf3_results, bandwidth, internal_ip_used)
expected_metadata = {
'protocol': 'UDP',
'total_lost': lost,
'total_sent': sent,
'bandwidth': bandwidth,
'receiving_machine_type': receiving_vm.machine_type,
'receiving_zone': receiving_vm.zone,
'sending_machine_type': sending_vm.machine_type,
'sending_zone': sending_vm.zone,
'internal_ip_used': internal_ip_used,
}
expected_samples = [
sample.Sample('Loss Rate', 55.836, 'Percent',
expected_metadata),
sample.Sample('Bandwidth Achieved', 1380, 'Mbits/sec',
expected_metadata),
sample.Sample('Jitter', 0.072, 'ms',
expected_metadata),
]
self.assertSampleListsEqualUpToTimestamp(samples, expected_samples)
def testIperfTCPMultiStream(self):
tcp_number_of_streams = 10
fake_vm = namedtuple('fake_vm', 'machine_type zone')
sending_vm = fake_vm(machine_type='A', zone='B')
receiving_vm = fake_vm(machine_type='A', zone='B')
def _Metadata(thread_id):
return {
'protocol': 'TCP',
'num_threads': tcp_number_of_streams,
'receiving_machine_type': receiving_vm.machine_type,
'receiving_zone': receiving_vm.zone,
'sending_machine_type': sending_vm.machine_type,
'sending_zone': sending_vm.zone,
'thread_id': thread_id,
'internal_ip_used': True,
'tcp_window_size': None,
}
expected_samples = [
sample.Sample('Bandwidth', 935.0, 'Mbits/sec', _Metadata('4')),
sample.Sample('Bandwidth', 935.0, 'Mbits/sec', _Metadata('6')),
sample.Sample('Bandwidth', 935.0, 'Mbits/sec', _Metadata('8')),
sample.Sample('Bandwidth', 935.0, 'Mbits/sec', _Metadata('10')),
sample.Sample('Bandwidth', 935.0, 'Mbits/sec', _Metadata('12')),
sample.Sample('Bandwidth', 4670.0, 'Mbits/sec', _Metadata('SUM')),
]
samples = iperf3.ParseTCPMultiStreamOutput(iperf3_tcp_results, sending_vm,
receiving_vm,
tcp_number_of_streams, True)
self.assertSampleListsEqualUpToTimestamp(samples, expected_samples)
if __name__ == '__main__':
unittest.main()
|
import asyncio
from datetime import timedelta
import logging
import urllib.error
import aiohttp
import pytest
import requests
from homeassistant.helpers import update_coordinator
from homeassistant.util.dt import utcnow
from tests.async_mock import AsyncMock, Mock, patch
from tests.common import async_fire_time_changed
_LOGGER = logging.getLogger(__name__)
def get_crd(hass, update_interval):
"""Make coordinator mocks."""
calls = 0
async def refresh() -> int:
nonlocal calls
calls += 1
return calls
crd = update_coordinator.DataUpdateCoordinator[int](
hass,
_LOGGER,
name="test",
update_method=refresh,
update_interval=update_interval,
)
return crd
DEFAULT_UPDATE_INTERVAL = timedelta(seconds=10)
@pytest.fixture
def crd(hass):
"""Coordinator mock with default update interval."""
return get_crd(hass, DEFAULT_UPDATE_INTERVAL)
@pytest.fixture
def crd_without_update_interval(hass):
"""Coordinator mock that never automatically updates."""
return get_crd(hass, None)
async def test_async_refresh(crd):
"""Test async_refresh for update coordinator."""
assert crd.data is None
await crd.async_refresh()
assert crd.data == 1
assert crd.last_update_success is True
# Make sure we didn't schedule a refresh because we have 0 listeners
assert crd._unsub_refresh is None
updates = []
def update_callback():
updates.append(crd.data)
unsub = crd.async_add_listener(update_callback)
await crd.async_refresh()
assert updates == [2]
assert crd._unsub_refresh is not None
# Test unsubscribing through function
unsub()
await crd.async_refresh()
assert updates == [2]
# Test unsubscribing through method
crd.async_add_listener(update_callback)
crd.async_remove_listener(update_callback)
await crd.async_refresh()
assert updates == [2]
async def test_request_refresh(crd):
"""Test request refresh for update coordinator."""
assert crd.data is None
await crd.async_request_refresh()
assert crd.data == 1
assert crd.last_update_success is True
# Second time we hit the debonuce
await crd.async_request_refresh()
assert crd.data == 1
assert crd.last_update_success is True
async def test_request_refresh_no_auto_update(crd_without_update_interval):
"""Test request refresh for update coordinator without automatic update."""
crd = crd_without_update_interval
assert crd.data is None
await crd.async_request_refresh()
assert crd.data == 1
assert crd.last_update_success is True
# Second time we hit the debonuce
await crd.async_request_refresh()
assert crd.data == 1
assert crd.last_update_success is True
@pytest.mark.parametrize(
"err_msg",
[
(asyncio.TimeoutError, "Timeout fetching test data"),
(requests.exceptions.Timeout, "Timeout fetching test data"),
(urllib.error.URLError("timed out"), "Timeout fetching test data"),
(aiohttp.ClientError, "Error requesting test data"),
(requests.exceptions.RequestException, "Error requesting test data"),
(urllib.error.URLError("something"), "Error requesting test data"),
(update_coordinator.UpdateFailed, "Error fetching test data"),
],
)
async def test_refresh_known_errors(err_msg, crd, caplog):
"""Test raising known errors."""
crd.update_method = AsyncMock(side_effect=err_msg[0])
await crd.async_refresh()
assert crd.data is None
assert crd.last_update_success is False
assert err_msg[1] in caplog.text
async def test_refresh_fail_unknown(crd, caplog):
"""Test raising unknown error."""
await crd.async_refresh()
crd.update_method = AsyncMock(side_effect=ValueError)
await crd.async_refresh()
assert crd.data == 1 # value from previous fetch
assert crd.last_update_success is False
assert "Unexpected error fetching test data" in caplog.text
async def test_refresh_no_update_method(crd):
"""Test raising error is no update method is provided."""
await crd.async_refresh()
crd.update_method = None
with pytest.raises(NotImplementedError):
await crd.async_refresh()
async def test_update_interval(hass, crd):
"""Test update interval works."""
# Test we don't update without subscriber
async_fire_time_changed(hass, utcnow() + crd.update_interval)
await hass.async_block_till_done()
assert crd.data is None
# Add subscriber
update_callback = Mock()
crd.async_add_listener(update_callback)
# Test twice we update with subscriber
async_fire_time_changed(hass, utcnow() + crd.update_interval)
await hass.async_block_till_done()
assert crd.data == 1
async_fire_time_changed(hass, utcnow() + crd.update_interval)
await hass.async_block_till_done()
assert crd.data == 2
# Test removing listener
crd.async_remove_listener(update_callback)
async_fire_time_changed(hass, utcnow() + crd.update_interval)
await hass.async_block_till_done()
# Test we stop updating after we lose last subscriber
assert crd.data == 2
async def test_update_interval_not_present(hass, crd_without_update_interval):
"""Test update never happens with no update interval."""
crd = crd_without_update_interval
# Test we don't update without subscriber with no update interval
async_fire_time_changed(hass, utcnow() + DEFAULT_UPDATE_INTERVAL)
await hass.async_block_till_done()
assert crd.data is None
# Add subscriber
update_callback = Mock()
crd.async_add_listener(update_callback)
# Test twice we don't update with subscriber with no update interval
async_fire_time_changed(hass, utcnow() + DEFAULT_UPDATE_INTERVAL)
await hass.async_block_till_done()
assert crd.data is None
async_fire_time_changed(hass, utcnow() + DEFAULT_UPDATE_INTERVAL)
await hass.async_block_till_done()
assert crd.data is None
# Test removing listener
crd.async_remove_listener(update_callback)
async_fire_time_changed(hass, utcnow() + DEFAULT_UPDATE_INTERVAL)
await hass.async_block_till_done()
# Test we stop don't update after we lose last subscriber
assert crd.data is None
async def test_refresh_recover(crd, caplog):
"""Test recovery of freshing data."""
crd.last_update_success = False
await crd.async_refresh()
assert crd.last_update_success is True
assert "Fetching test data recovered" in caplog.text
async def test_coordinator_entity(crd):
"""Test the CoordinatorEntity class."""
entity = update_coordinator.CoordinatorEntity(crd)
assert entity.should_poll is False
crd.last_update_success = False
assert entity.available is False
await entity.async_update()
assert entity.available is True
with patch(
"homeassistant.helpers.entity.Entity.async_on_remove"
) as mock_async_on_remove:
await entity.async_added_to_hass()
assert mock_async_on_remove.called
# Verify we do not update if the entity is disabled
crd.last_update_success = False
with patch("homeassistant.helpers.entity.Entity.enabled", False):
await entity.async_update()
assert entity.available is False
|
import logging
from pythonegardia import egardiadevice, egardiaserver
import requests
import voluptuous as vol
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_USERNAME,
EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
ATTR_DISCOVER_DEVICES = "egardia_sensor"
CONF_REPORT_SERVER_CODES = "report_server_codes"
CONF_REPORT_SERVER_ENABLED = "report_server_enabled"
CONF_REPORT_SERVER_PORT = "report_server_port"
CONF_VERSION = "version"
DEFAULT_NAME = "Egardia"
DEFAULT_PORT = 80
DEFAULT_REPORT_SERVER_ENABLED = False
DEFAULT_REPORT_SERVER_PORT = 52010
DEFAULT_VERSION = "GATE-01"
DOMAIN = "egardia"
EGARDIA_DEVICE = "egardiadevice"
EGARDIA_NAME = "egardianame"
EGARDIA_REPORT_SERVER_CODES = "egardia_rs_codes"
EGARDIA_REPORT_SERVER_ENABLED = "egardia_rs_enabled"
EGARDIA_SERVER = "egardia_server"
NOTIFICATION_ID = "egardia_notification"
NOTIFICATION_TITLE = "Egardia"
REPORT_SERVER_CODES_IGNORE = "ignore"
SERVER_CODE_SCHEMA = vol.Schema(
{
vol.Optional("arm"): vol.All(cv.ensure_list_csv, [cv.string]),
vol.Optional("disarm"): vol.All(cv.ensure_list_csv, [cv.string]),
vol.Optional("armhome"): vol.All(cv.ensure_list_csv, [cv.string]),
vol.Optional("triggered"): vol.All(cv.ensure_list_csv, [cv.string]),
vol.Optional("ignore"): vol.All(cv.ensure_list_csv, [cv.string]),
}
)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Optional(CONF_VERSION, default=DEFAULT_VERSION): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_REPORT_SERVER_CODES, default={}): SERVER_CODE_SCHEMA,
vol.Optional(
CONF_REPORT_SERVER_ENABLED, default=DEFAULT_REPORT_SERVER_ENABLED
): cv.boolean,
vol.Optional(
CONF_REPORT_SERVER_PORT, default=DEFAULT_REPORT_SERVER_PORT
): cv.port,
}
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Set up the Egardia platform."""
conf = config[DOMAIN]
username = conf.get(CONF_USERNAME)
password = conf.get(CONF_PASSWORD)
host = conf.get(CONF_HOST)
port = conf.get(CONF_PORT)
version = conf.get(CONF_VERSION)
rs_enabled = conf.get(CONF_REPORT_SERVER_ENABLED)
rs_port = conf.get(CONF_REPORT_SERVER_PORT)
try:
device = hass.data[EGARDIA_DEVICE] = egardiadevice.EgardiaDevice(
host, port, username, password, "", version
)
except requests.exceptions.RequestException:
_LOGGER.error(
"An error occurred accessing your Egardia device. "
"Please check configuration"
)
return False
except egardiadevice.UnauthorizedError:
_LOGGER.error("Unable to authorize. Wrong password or username")
return False
# Set up the egardia server if enabled
if rs_enabled:
_LOGGER.debug("Setting up EgardiaServer")
try:
if EGARDIA_SERVER not in hass.data:
server = egardiaserver.EgardiaServer("", rs_port)
bound = server.bind()
if not bound:
raise OSError(
"Binding error occurred while starting EgardiaServer."
)
hass.data[EGARDIA_SERVER] = server
server.start()
def handle_stop_event(event):
"""Handle Home Assistant stop event."""
server.stop()
# listen to Home Assistant stop event
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, handle_stop_event)
except OSError:
_LOGGER.error("Binding error occurred while starting EgardiaServer")
return False
discovery.load_platform(
hass, "alarm_control_panel", DOMAIN, discovered=conf, hass_config=config
)
# Get the sensors from the device and add those
sensors = device.getsensors()
discovery.load_platform(
hass, "binary_sensor", DOMAIN, {ATTR_DISCOVER_DEVICES: sensors}, config
)
return True
|
import os
import unittest
def get_test_path():
return os.path.abspath(os.path.dirname(__file__))
class RoslibStackManifestTest(unittest.TestCase):
def _subtest_parse_stack_example1(self, m):
from roslib.manifestlib import _Manifest
self.assert_(isinstance(m, _Manifest))
self.assertEquals('stack', m._type)
self.assertEquals('a brief description', m.brief)
self.assertEquals('Line 1\nLine 2', m.description.strip())
self.assertEquals('The authors\ngo here', m.author.strip())
self.assertEquals('Public Domain\nwith other stuff', m.license.strip())
self.assertEquals('http://ros.org/stack/', m.url)
self.assertEquals('http://www.willowgarage.com/files/willowgarage/robot10.jpg', m.logo)
dpkgs = [d.stack for d in m.depends]
self.assertEquals({'stackname', 'common'}, set(dpkgs))
self.assertEquals([], m.rosdeps)
self.assertEquals([], m.exports)
def _subtest_parse_stack_version(self, m):
self.assertEquals('1.2.3', m.version)
def test_parse_example1_file(self):
from roslib.stack_manifest import parse_file
p = os.path.join(get_test_path(), 'manifest_tests', 'stack_example1.xml')
self._subtest_parse_stack_example1(parse_file(p))
p = os.path.join(get_test_path(), 'manifest_tests', 'stack_version.xml')
self._subtest_parse_stack_version(parse_file(p))
def test_parse_example1_string(self):
from roslib.manifestlib import parse, _Manifest
self._subtest_parse_stack_example1(parse(_Manifest('stack'), STACK_EXAMPLE1))
def test_StackManifest(self):
from roslib.stack_manifest import StackManifest
m = StackManifest()
self.assertEquals('stack', m._type)
def test_StackManifest_str(self):
# just make sure it doesn't crash
from roslib.stack_manifest import parse
str(parse(STACK_EXAMPLE1))
def test_StackManifest_xml(self):
from roslib.stack_manifest import parse
m = parse(STACK_EXAMPLE1)
self._subtest_parse_stack_example1(m)
# verify roundtrip
m2 = parse(m.xml())
self._subtest_parse_stack_example1(m2)
# bad file examples should be more like the roslaunch tests where there is just 1 thing wrong
STACK_EXAMPLE1 = """<stack>
<description brief="a brief description">Line 1
Line 2
</description>
<author>The authors
go here</author>
<license>Public Domain
with other stuff</license>
<url>http://ros.org/stack/</url>
<logo>http://www.willowgarage.com/files/willowgarage/robot10.jpg</logo>
<depend stack="stackname" />
<depend stack="common"/>
</stack>"""
STACK_INVALID1 = """<stack>
<description brief="a brief description">Line 1</description>
<author>The authors</author>
<license>Public Domain</license>
<rosdep name="python" />
</stack>"""
STACK_INVALID2 = """<stack>
<description brief="a brief description">Line 1</description>
<author>The authors</author>
<license>Public Domain</license>
<export>
<cpp cflags="-I${prefix}/include" lflags="-L${prefix}/lib -lros"/>
<cpp os="osx" cflags="-I${prefix}/include" lflags="-L${prefix}/lib -lrosthread -framework CoreServices"/>
</export>
</stack>"""
|
import logging
import re
from . import info
from itertools import cycle, repeat, chain
from .util import parse_duration
from .module_exceptions import StepperConfigurationError
from builtins import range
class LoadPlanBuilder(object):
def __init__(self):
self.generators = []
self.steps = []
self.instances = 0
self.duration = 0
self.log = logging.getLogger(__name__)
def start(self, count):
self.log.debug("Start %s instances at %sms" % (count, self.duration))
if count < 0:
raise StepperConfigurationError(
"Can not stop instances in instances_schedule.")
self.generators.append(repeat(int(self.duration), count))
self.instances += count
return self
def wait(self, duration):
self.log.debug("Wait for %sms from %sms" % (duration, self.duration))
self.duration += duration
self.steps.append((self.instances, int(duration) // 1000))
return self
def ramp(self, count, duration):
self.log.debug(
"Ramp %s instances in %sms from %sms" %
(count, duration, self.duration))
if count < 0:
raise StepperConfigurationError(
"Can not stop instances in instances_schedule.")
interval = float(duration) / (count - 1)
start_time = self.duration
self.generators.append(
int(start_time + i * interval) for i in range(0, count))
self.steps += [(self.instances + i + 1, int(interval / 1000.0))
for i in range(0, count)]
self.instances += count
self.duration += duration
return self
def const(self, instances, duration):
self.start(instances - self.instances)
self.wait(duration)
return self
def line(self, initial_instances, final_instances, duration):
self.start(initial_instances - self.instances - 1)
self.ramp(final_instances - initial_instances + 1, duration)
return self
def stairway(
self, initial_instances, final_instances, step_size, step_duration):
step_count = (final_instances - initial_instances) // step_size
self.log.debug("Making a stairway: %s steps" % step_count)
self.start(initial_instances - self.instances)
for i in range(1, step_count + 1):
self.wait(step_duration).start(step_size)
if final_instances != self.instances:
self.wait(step_duration).start(final_instances - self.instances)
self.wait(step_duration)
return self
def add_step(self, step_config):
def parse_ramp(params):
template = re.compile(r'(\d+),\s*([0-9.]+[dhms]?)+\)')
s_res = template.search(params)
if s_res:
instances, interval = s_res.groups()
self.ramp(int(instances), parse_duration(interval))
else:
self.log.info(
"Ramp step format: 'ramp(<instances_to_start>, <step_duration>)'"
)
raise StepperConfigurationError(
"Error in step configuration: 'ramp(%s'" % params)
def parse_const(params):
template = re.compile(r'(\d+),\s*([0-9.]+[dhms]?)+\)')
s_res = template.search(params)
if s_res:
instances, interval = s_res.groups()
self.const(int(instances), parse_duration(interval))
else:
self.log.info(
"Const step format: 'const(<instances_count>, <step_duration>)'"
)
raise StepperConfigurationError(
"Error in step configuration: 'const(%s'" % params)
def parse_start(params):
template = re.compile(r'(\d+)\)')
s_res = template.search(params)
if s_res:
instances = s_res.groups()
self.start(int(instances))
else:
self.log.info("Start step format: 'start(<instances_count>)'")
raise StepperConfigurationError(
"Error in step configuration: 'start(%s'" % params)
def parse_line(params):
template = re.compile(r'(\d+),\s*(\d+),\s*([0-9.]+[dhms]?)+\)')
s_res = template.search(params)
if s_res:
initial_instances, final_instances, interval = s_res.groups()
self.line(
int(initial_instances),
int(final_instances), parse_duration(interval))
else:
self.log.info(
"Line step format: 'line(<initial_instances>, <final_instances>, <step_duration>)'"
)
raise StepperConfigurationError(
"Error in step configuration: 'line(%s'" % params)
def parse_wait(params):
template = re.compile(r'([0-9.]+[dhms]?)+\)')
s_res = template.search(params)
if s_res:
duration = s_res.groups()[0]
self.wait(parse_duration(duration))
else:
self.log.info("Wait step format: 'wait(<step_duration>)'")
raise StepperConfigurationError(
"Error in step configuration: 'wait(%s'" % params)
def parse_stairway(params):
template = re.compile(
r'(\d+),\s*(\d+),\s*(\d+),\s*([0-9.]+[dhms]?)+\)')
s_res = template.search(params)
if s_res:
initial_instances, final_instances, step_size, step_duration = s_res.groups(
)
self.stairway(
int(initial_instances),
int(final_instances),
int(step_size), parse_duration(step_duration))
else:
self.log.info(
"Stairway step format: 'step(<initial_instances>, <final_instances>, <step_size>, <step_duration>)'"
)
raise StepperConfigurationError(
"Error in step configuration: 'step(%s'" % params)
_plans = {
'line': parse_line,
'const': parse_const,
'step': parse_stairway,
'ramp': parse_ramp,
'wait': parse_wait,
'start': parse_start,
}
step_type, params = step_config.split('(')
step_type = step_type.strip()
if step_type in _plans:
_plans[step_type](params)
else:
raise NotImplementedError(
'No such load type implemented for instances_schedule: "%s"' %
step_type)
def add_all_steps(self, steps):
for step in steps:
self.add_step(step)
return self
def create(self):
self.generators.append(cycle([0]))
return chain(*self.generators)
def create(instances_schedule):
'''
Creates load plan timestamps generator
>>> from util import take
>>> take(7, LoadPlanBuilder().ramp(5, 4000).create())
[0, 1000, 2000, 3000, 4000, 0, 0]
>>> take(7, create(['ramp(5, 4s)']))
[0, 1000, 2000, 3000, 4000, 0, 0]
>>> take(12, create(['ramp(5, 4s)', 'wait(5s)', 'ramp(5,4s)']))
[0, 1000, 2000, 3000, 4000, 9000, 10000, 11000, 12000, 13000, 0, 0]
>>> take(7, create(['wait(5s)', 'ramp(5, 0)']))
[5000, 5000, 5000, 5000, 5000, 0, 0]
>>> take(7, create([]))
[0, 0, 0, 0, 0, 0, 0]
>>> take(12, create(['line(1, 9, 4s)']))
[0, 500, 1000, 1500, 2000, 2500, 3000, 3500, 4000, 0, 0, 0]
>>> take(12, create(['const(3, 5s)', 'line(7, 11, 2s)']))
[0, 0, 0, 5000, 5000, 5000, 5000, 5500, 6000, 6500, 7000, 0]
>>> take(12, create(['step(2, 10, 2, 3s)']))
[0, 0, 3000, 3000, 6000, 6000, 9000, 9000, 12000, 12000, 0, 0]
>>> take(12, LoadPlanBuilder().const(3, 1000).line(5, 10, 5000).steps)
[(3, 1), (5, 1), (6, 1), (7, 1), (8, 1), (9, 1), (10, 1)]
>>> take(12, LoadPlanBuilder().stairway(100, 950, 100, 30000).steps)
[(100, 30), (200, 30), (300, 30), (400, 30), (500, 30), (600, 30), (700, 30), (800, 30), (900, 30), (950, 30)]
>>> LoadPlanBuilder().stairway(100, 950, 100, 30000).instances
950
>>> LoadPlanBuilder().const(3, 1000).line(5, 10, 5000).instances
10
>>> LoadPlanBuilder().line(1, 100, 60000).instances
100
'''
lpb = LoadPlanBuilder().add_all_steps(instances_schedule)
lp = lpb.create()
info.status.publish('duration', 0)
# info.status.publish('steps', lpb.steps)
info.status.publish('steps', [])
info.status.publish('instances', lpb.instances)
return lp
|
import hangups
from common import run_example
async def upload_image(client, args):
# Upload image to obtain image_id:
image_file = open(args.image, 'rb')
uploaded_image = await client.upload_image(
image_file, return_uploaded_image=True
)
# Send a chat message referencing the uploaded image_id:
request = hangups.hangouts_pb2.SendChatMessageRequest(
request_header=client.get_request_header(),
event_request_header=hangups.hangouts_pb2.EventRequestHeader(
conversation_id=hangups.hangouts_pb2.ConversationId(
id=args.conversation_id
),
client_generated_id=client.get_client_generated_id(),
),
existing_media=hangups.hangouts_pb2.ExistingMedia(
photo=hangups.hangouts_pb2.Photo(
photo_id=uploaded_image.image_id,
),
),
)
await client.send_chat_message(request)
if __name__ == '__main__':
run_example(upload_image, '--conversation-id', '--image')
|
import logging
from pencompy.pencompy import Pencompy
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchEntity
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT
from homeassistant.exceptions import PlatformNotReady
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_BOARDS = "boards"
CONF_BOARD = "board"
CONF_ADDR = "addr"
CONF_RELAYS = "relays"
RELAY_SCHEMA = vol.Schema(
{
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_ADDR): cv.positive_int,
vol.Optional(CONF_BOARD, default=0): cv.positive_int,
}
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PORT): cv.port,
vol.Optional(CONF_BOARDS, default=1): cv.positive_int,
vol.Required(CONF_RELAYS): vol.All(cv.ensure_list, [RELAY_SCHEMA]),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Pencom relay platform (pencompy)."""
# Assign configuration variables.
host = config[CONF_HOST]
port = config[CONF_PORT]
boards = config[CONF_BOARDS]
# Setup connection
try:
hub = Pencompy(host, port, boards=boards)
except OSError as error:
_LOGGER.error("Could not connect to pencompy: %s", error)
raise PlatformNotReady from error
# Add devices.
devs = []
for relay in config[CONF_RELAYS]:
name = relay[CONF_NAME]
board = relay[CONF_BOARD]
addr = relay[CONF_ADDR]
devs.append(PencomRelay(hub, board, addr, name))
add_entities(devs, True)
class PencomRelay(SwitchEntity):
"""Representation of a pencom relay."""
def __init__(self, hub, board, addr, name):
"""Create a relay."""
self._hub = hub
self._board = board
self._addr = addr
self._name = name
self._state = None
@property
def name(self):
"""Relay name."""
return self._name
@property
def is_on(self):
"""Return a relay's state."""
return self._state
def turn_on(self, **kwargs):
"""Turn a relay on."""
self._hub.set(self._board, self._addr, True)
def turn_off(self, **kwargs):
"""Turn a relay off."""
self._hub.set(self._board, self._addr, False)
def update(self):
"""Refresh a relay's state."""
self._state = self._hub.get(self._board, self._addr)
@property
def device_state_attributes(self):
"""Return supported attributes."""
return {"board": self._board, "addr": self._addr}
|
from flask import Blueprint, request
from app.agents.models import Bot
from app.commons import build_response
bots = Blueprint('bots_blueprint', __name__,
url_prefix='/agents/<bot_name>')
@bots.route('/config', methods=['PUT'])
def set_config(bot_name):
"""
Read bot config
:param bot_name:
:return:
"""
content = request.get_json(silent=True)
bot = Bot.objects.get(name=bot_name)
bot.config = content
bot.save()
return build_response.sent_ok()
@bots.route('/config', methods=['GET'])
def get_config(bot_name):
"""
Update bot config
:return:
"""
bot = Bot.objects.get(name=bot_name)
return build_response.build_json(bot.config)
|
from cms.utils import get_current_site
from cms.utils.page import get_page_from_path
from django.urls import reverse
from filer.models.imagemodels import Image
from rest_framework import serializers
class CMSPagesField(serializers.Field):
"""
A serializer field used to create the many-to-many relations for models inheriting from the
unmanaged :class:`shop.models.related.BaseProductPage`.
Usage in serializers to import/export product model data:
class MyProductSerializer():
...
cms_pages = CMSPagesField()
...
"""
def to_representation(self, value):
urls = {page.get_absolute_url() for page in value.all()}
return list(urls)
def to_internal_value(self, data):
site = get_current_site()
pages_root = reverse('pages-root')
ret = []
for path in data:
if path.startswith(pages_root):
path = path[len(pages_root):]
# strip any final slash
if path.endswith('/'):
path = path[:-1]
page = get_page_from_path(site, path)
if page:
ret.append(page)
return ret
class ImagesField(serializers.Field):
"""
A serializer field used to create the many-to-many relations for models inheriting from the
unmanaged :class:`shop.models.related.BaseProductImage`.
Usage in serializers to import/export product model data:
class MyProductSerializer():
...
images = ImagesField()
...
"""
def to_representation(self, value):
return list(value.values_list('pk', flat=True))
def to_internal_value(self, data):
return list(Image.objects.filter(pk__in=data))
class ValueRelatedField(serializers.RelatedField):
"""
A serializer field used to access a single value from a related model.
Usage:
myfield = ValueRelatedField(model=MyModel)
myfield = ValueRelatedField(model=MyModel, field_name='myfield')
This serializes objects of type ``MyModel`` so that that the return data is a simple scalar.
On deserialization it creates an object of type ``MyModel``, if none could be found with the
given field name.
"""
def __init__(self, *args, **kwargs):
self.model = kwargs.pop('model')
self.related_field_name = kwargs.pop('field_name', 'name')
super().__init__(*args, **kwargs)
def get_queryset(self):
return self.model.objects.all()
def to_representation(self, value):
return getattr(value, self.related_field_name)
def to_internal_value(self, value):
data = {self.related_field_name: value}
instance, _ = self.model.objects.get_or_create(**data)
return instance
|
from homeassistant.components.sensor import DEVICE_CLASS_TEMPERATURE
from homeassistant.const import PERCENTAGE, TEMP_CELSIUS, VOLT
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.icon import icon_for_battery_level, icon_for_signal_level
from .account import StarlineAccount, StarlineDevice
from .const import DOMAIN
from .entity import StarlineEntity
SENSOR_TYPES = {
"battery": ["Battery", None, VOLT, None],
"balance": ["Balance", None, None, "mdi:cash-multiple"],
"ctemp": ["Interior Temperature", DEVICE_CLASS_TEMPERATURE, TEMP_CELSIUS, None],
"etemp": ["Engine Temperature", DEVICE_CLASS_TEMPERATURE, TEMP_CELSIUS, None],
"gsm_lvl": ["GSM Signal", None, PERCENTAGE, None],
}
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up the StarLine sensors."""
account: StarlineAccount = hass.data[DOMAIN][entry.entry_id]
entities = []
for device in account.api.devices.values():
for key, value in SENSOR_TYPES.items():
sensor = StarlineSensor(account, device, key, *value)
if sensor.state is not None:
entities.append(sensor)
async_add_entities(entities)
class StarlineSensor(StarlineEntity, Entity):
"""Representation of a StarLine sensor."""
def __init__(
self,
account: StarlineAccount,
device: StarlineDevice,
key: str,
name: str,
device_class: str,
unit: str,
icon: str,
):
"""Initialize StarLine sensor."""
super().__init__(account, device, key, name)
self._device_class = device_class
self._unit = unit
self._icon = icon
@property
def icon(self):
"""Icon to use in the frontend, if any."""
if self._key == "battery":
return icon_for_battery_level(
battery_level=self._device.battery_level_percent,
charging=self._device.car_state.get("ign", False),
)
if self._key == "gsm_lvl":
return icon_for_signal_level(signal_level=self._device.gsm_level_percent)
return self._icon
@property
def state(self):
"""Return the state of the sensor."""
if self._key == "battery":
return self._device.battery_level
if self._key == "balance":
return self._device.balance.get("value")
if self._key == "ctemp":
return self._device.temp_inner
if self._key == "etemp":
return self._device.temp_engine
if self._key == "gsm_lvl":
return self._device.gsm_level_percent
return None
@property
def unit_of_measurement(self):
"""Get the unit of measurement."""
if self._key == "balance":
return self._device.balance.get("currency") or "₽"
return self._unit
@property
def device_class(self):
"""Return the class of the sensor."""
return self._device_class
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
if self._key == "balance":
return self._account.balance_attrs(self._device)
if self._key == "gsm_lvl":
return self._account.gsm_attrs(self._device)
return None
|
import functools
from pychromecast.discovery import discover_chromecasts, stop_discovery
from homeassistant import config_entries
from homeassistant.components import zeroconf
from homeassistant.helpers import config_entry_flow
from .const import DOMAIN
from .helpers import ChromeCastZeroconf
async def _async_has_devices(hass):
"""
Return if there are devices that can be discovered.
This function will be called if no devices are already found through the zeroconf
integration.
"""
zeroconf_instance = ChromeCastZeroconf.get_zeroconf()
if zeroconf_instance is None:
zeroconf_instance = await zeroconf.async_get_instance(hass)
casts, browser = await hass.async_add_executor_job(
functools.partial(discover_chromecasts, zeroconf_instance=zeroconf_instance)
)
stop_discovery(browser)
return casts
config_entry_flow.register_discovery_flow(
DOMAIN, "Google Cast", _async_has_devices, config_entries.CONN_CLASS_LOCAL_PUSH
)
|
import unittest
from homeassistant.components.command_line import sensor as command_line
from homeassistant.helpers.template import Template
from tests.async_mock import patch
from tests.common import get_test_home_assistant
class TestCommandSensorSensor(unittest.TestCase):
"""Test the Command line sensor."""
def setUp(self):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.addCleanup(self.hass.stop)
def update_side_effect(self, data):
"""Side effect function for mocking CommandSensorData.update()."""
self.commandline.data = data
def test_setup(self):
"""Test sensor setup."""
config = {
"name": "Test",
"unit_of_measurement": "in",
"command": "echo 5",
"command_timeout": 15,
}
devices = []
def add_dev_callback(devs, update):
"""Add callback to add devices."""
for dev in devs:
devices.append(dev)
command_line.setup_platform(self.hass, config, add_dev_callback)
assert len(devices) == 1
entity = devices[0]
entity.update()
assert entity.name == "Test"
assert entity.unit_of_measurement == "in"
assert entity.state == "5"
def test_template(self):
"""Test command sensor with template."""
data = command_line.CommandSensorData(self.hass, "echo 50", 15)
entity = command_line.CommandSensor(
self.hass,
data,
"test",
"in",
Template("{{ value | multiply(0.1) }}", self.hass),
[],
)
entity.update()
assert float(entity.state) == 5
def test_template_render(self):
"""Ensure command with templates get rendered properly."""
self.hass.states.set("sensor.test_state", "Works")
data = command_line.CommandSensorData(
self.hass, "echo {{ states.sensor.test_state.state }}", 15
)
data.update()
assert data.value == "Works"
def test_template_render_with_quote(self):
"""Ensure command with templates and quotes get rendered properly."""
self.hass.states.set("sensor.test_state", "Works 2")
with patch(
"homeassistant.components.command_line.subprocess.check_output",
return_value=b"Works\n",
) as check_output:
data = command_line.CommandSensorData(
self.hass,
'echo "{{ states.sensor.test_state.state }}" "3 4"',
15,
)
data.update()
assert data.value == "Works"
check_output.assert_called_once_with(
'echo "Works 2" "3 4"', shell=True, timeout=15 # nosec # shell by design
)
def test_bad_command(self):
"""Test bad command."""
data = command_line.CommandSensorData(self.hass, "asdfasdf", 15)
data.update()
assert data.value is None
def test_update_with_json_attrs(self):
"""Test attributes get extracted from a JSON result."""
data = command_line.CommandSensorData(
self.hass,
(
'echo { \\"key\\": \\"some_json_value\\", \\"another_key\\":\
\\"another_json_value\\", \\"key_three\\": \\"value_three\\" }'
),
15,
)
self.sensor = command_line.CommandSensor(
self.hass, data, "test", None, None, ["key", "another_key", "key_three"]
)
self.sensor.update()
assert self.sensor.device_state_attributes["key"] == "some_json_value"
assert (
self.sensor.device_state_attributes["another_key"] == "another_json_value"
)
assert self.sensor.device_state_attributes["key_three"] == "value_three"
@patch("homeassistant.components.command_line.sensor._LOGGER")
def test_update_with_json_attrs_no_data(self, mock_logger):
"""Test attributes when no JSON result fetched."""
data = command_line.CommandSensorData(self.hass, "echo ", 15)
self.sensor = command_line.CommandSensor(
self.hass, data, "test", None, None, ["key"]
)
self.sensor.update()
assert {} == self.sensor.device_state_attributes
assert mock_logger.warning.called
@patch("homeassistant.components.command_line.sensor._LOGGER")
def test_update_with_json_attrs_not_dict(self, mock_logger):
"""Test attributes get extracted from a JSON result."""
data = command_line.CommandSensorData(self.hass, "echo [1, 2, 3]", 15)
self.sensor = command_line.CommandSensor(
self.hass, data, "test", None, None, ["key"]
)
self.sensor.update()
assert {} == self.sensor.device_state_attributes
assert mock_logger.warning.called
@patch("homeassistant.components.command_line.sensor._LOGGER")
def test_update_with_json_attrs_bad_JSON(self, mock_logger):
"""Test attributes get extracted from a JSON result."""
data = command_line.CommandSensorData(
self.hass, "echo This is text rather than JSON data.", 15
)
self.sensor = command_line.CommandSensor(
self.hass, data, "test", None, None, ["key"]
)
self.sensor.update()
assert {} == self.sensor.device_state_attributes
assert mock_logger.warning.called
def test_update_with_missing_json_attrs(self):
"""Test attributes get extracted from a JSON result."""
data = command_line.CommandSensorData(
self.hass,
(
'echo { \\"key\\": \\"some_json_value\\", \\"another_key\\":\
\\"another_json_value\\", \\"key_three\\": \\"value_three\\" }'
),
15,
)
self.sensor = command_line.CommandSensor(
self.hass,
data,
"test",
None,
None,
["key", "another_key", "key_three", "special_key"],
)
self.sensor.update()
assert self.sensor.device_state_attributes["key"] == "some_json_value"
assert (
self.sensor.device_state_attributes["another_key"] == "another_json_value"
)
assert self.sensor.device_state_attributes["key_three"] == "value_three"
assert "special_key" not in self.sensor.device_state_attributes
def test_update_with_unnecessary_json_attrs(self):
"""Test attributes get extracted from a JSON result."""
data = command_line.CommandSensorData(
self.hass,
(
'echo { \\"key\\": \\"some_json_value\\", \\"another_key\\":\
\\"another_json_value\\", \\"key_three\\": \\"value_three\\" }'
),
15,
)
self.sensor = command_line.CommandSensor(
self.hass, data, "test", None, None, ["key", "another_key"]
)
self.sensor.update()
assert self.sensor.device_state_attributes["key"] == "some_json_value"
assert (
self.sensor.device_state_attributes["another_key"] == "another_json_value"
)
assert "key_three" not in self.sensor.device_state_attributes
|
import os
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
def test_hostname(host):
assert 'delegated-instance-vagrant' == host.check_output('hostname -s')
def test_etc_molecule_directory(host):
f = host.file('/etc/molecule')
assert f.is_directory
assert f.user == 'root'
assert f.group == 'root'
assert f.mode == 0o755
def test_etc_molecule_ansible_hostname_file(host):
f = host.file('/etc/molecule/delegated-instance-vagrant')
assert f.is_file
assert f.user == 'root'
assert f.group == 'root'
assert f.mode == 0o644
|
from tempfile import NamedTemporaryFile
from pygal import (
XY, Bar, Box, Config, DateLine, DateTimeLine, Dot, Funnel, Gauge,
Histogram, HorizontalBar, HorizontalLine, HorizontalStackedBar,
HorizontalStackedLine, Line, Pie, Pyramid, Radar, SolidGauge,
TimeDeltaLine, TimeLine, Treemap, formatters
)
from pygal._compat import _ellipsis, u
from pygal.graph.dual import Dual
from pygal.graph.horizontal import HorizontalGraph
from pygal.graph.map import BaseMap
from pygal.test.utils import texts
def test_config_behaviours():
"""Test that all different way to set config produce same results"""
line1 = Line()
line1.show_legend = False
line1.fill = True
line1.pretty_print = True
line1.no_prefix = True
line1.x_labels = ['a', 'b', 'c']
line1.add('_', [1, 2, 3])
l1 = line1.render()
q = line1.render_pyquery()
assert len(q(".axis.x")) == 1
assert len(q(".axis.y")) == 1
assert len(q(".plot .series path")) == 1
assert len(q(".legend")) == 0
assert len(q(".x.axis .guides")) == 3
assert len(q(".y.axis .guides")) == 11
assert len(q(".dots")) == 3
assert q(".axis.x text").map(texts) == ['a', 'b', 'c']
line2 = Line(
show_legend=False,
fill=True,
pretty_print=True,
no_prefix=True,
x_labels=['a', 'b', 'c']
)
line2.add('_', [1, 2, 3])
l2 = line2.render()
assert l1 == l2
class LineConfig(Config):
show_legend = False
fill = True
pretty_print = True
no_prefix = True
x_labels = ['a', 'b', 'c']
line3 = Line(LineConfig)
line3.add('_', [1, 2, 3])
l3 = line3.render()
assert l1 == l3
line4 = Line(LineConfig())
line4.add('_', [1, 2, 3])
l4 = line4.render()
assert l1 == l4
line_config = Config()
line_config.show_legend = False
line_config.fill = True
line_config.pretty_print = True
line_config.no_prefix = True
line_config.x_labels = ['a', 'b', 'c']
line5 = Line(line_config)
line5.add('_', [1, 2, 3])
l5 = line5.render()
assert l1 == l5
l6 = Line(line_config)(1, 2, 3, title='_').render()
assert l1 == l6
def test_config_alterations_class():
"""Assert a config can be changed on config class"""
class LineConfig(Config):
no_prefix = True
show_legend = False
fill = True
pretty_print = True
x_labels = ['a', 'b', 'c']
line1 = Line(LineConfig)
line1.add('_', [1, 2, 3])
l1 = line1.render()
LineConfig.stroke = False
line2 = Line(LineConfig)
line2.add('_', [1, 2, 3])
l2 = line2.render()
assert l1 != l2
l1bis = line1.render()
assert l1 == l1bis
def test_config_alterations_instance():
"""Assert a config can be changed on instance"""
class LineConfig(Config):
no_prefix = True
show_legend = False
fill = True
pretty_print = True
x_labels = ['a', 'b', 'c']
config = LineConfig()
line1 = Line(config)
line1.add('_', [1, 2, 3])
l1 = line1.render()
config.stroke = False
line2 = Line(config)
line2.add('_', [1, 2, 3])
l2 = line2.render()
assert l1 != l2
l1bis = line1.render()
assert l1 == l1bis
def test_config_alterations_kwargs():
"""Assert a config can be changed with keyword args"""
class LineConfig(Config):
no_prefix = True
show_legend = False
fill = True
pretty_print = True
x_labels = ['a', 'b', 'c']
config = LineConfig()
line1 = Line(config)
line1.add('_', [1, 2, 3])
l1 = line1.render()
line1.stroke = False
l1bis = line1.render()
assert l1 != l1bis
line2 = Line(config)
line2.add('_', [1, 2, 3])
l2 = line2.render()
assert l1 == l2
assert l1bis != l2
line3 = Line(config, title='Title')
line3.add('_', [1, 2, 3])
l3 = line3.render()
assert l3 != l2
l2bis = line2.render()
assert l2 == l2bis
def test_logarithmic():
"""Test logarithmic option"""
line = Line(logarithmic=True)
line.add('_', [1, 10**10, 1])
q = line.render_pyquery()
assert len(q(".axis.x")) == 0
assert len(q(".axis.y")) == 1
assert len(q(".plot .series path")) == 1
assert len(q(".legend")) == 1
assert len(q(".x.axis .guides")) == 0
assert len(q(".y.axis .guides")) == 21
assert len(q(".dots")) == 3
def test_interpolation(Chart):
"""Test interpolation option"""
chart = Chart(interpolate='cubic')
chart.add('1', [1, 3, 12, 3, 4])
chart.add('2', [7, -4, 10, None, 8, 3, 1])
q = chart.render_pyquery()
assert len(q(".legend")) == 2
def test_no_data_interpolation(Chart):
"""Test interpolation option with no data"""
chart = Chart(interpolate='cubic')
q = chart.render_pyquery()
assert q(".text-overlay text").text() == "No data"
def test_no_data_with_empty_serie_interpolation(Chart):
"""Test interpolation option with an empty serie"""
chart = Chart(interpolate='cubic')
chart.add('Serie', [])
q = chart.render_pyquery()
assert q(".text-overlay text").text() == "No data"
def test_logarithmic_bad_interpolation():
"""Test interpolation option with a logarithmic chart"""
line = Line(logarithmic=True, interpolate='cubic')
line.add('_', [.001, .00000001, 1])
q = line.render_pyquery()
assert len(q(".y.axis .guides")) == 41
def test_logarithmic_big_scale():
"""Test logarithmic option with a large range of value"""
line = Line(logarithmic=True)
line.add('_', [10**-10, 10**10, 1])
q = line.render_pyquery()
assert len(q(".y.axis .guides")) == 21
def test_value_formatter():
"""Test value formatter option"""
line = Line(value_formatter=lambda x: str(x) + u('‰'))
line.add('_', [10**4, 10**5, 23 * 10**4])
q = line.render_pyquery()
assert len(q(".y.axis .guides")) == 11
assert q(".axis.y text").map(texts) == list(
map(
lambda x: str(x) + u('‰'), map(float, range(20000, 240000, 20000))
)
)
def test_logarithmic_small_scale():
"""Test logarithmic with a small range of values"""
line = Line(logarithmic=True)
line.add('_', [1 + 10**10, 3 + 10**10, 2 + 10**10])
q = line.render_pyquery()
assert len(q(".y.axis .guides")) == 11
def test_human_readable():
"""Test human readable option"""
line = Line()
line.add('_', [10**4, 10**5, 23 * 10**4])
q = line.render_pyquery()
assert q(".axis.y text").map(texts) == list(
map(str, range(20000, 240000, 20000))
)
line.value_formatter = formatters.human_readable
q = line.render_pyquery()
assert q(".axis.y text").map(texts) == list(
map(lambda x: '%dk' % x, range(20, 240, 20))
)
def test_show_legend():
"""Test show legend option"""
line = Line()
line.add('_', [1, 2, 3])
q = line.render_pyquery()
assert len(q(".legend")) == 1
line.show_legend = False
q = line.render_pyquery()
assert len(q(".legend")) == 0
def test_show_dots():
"""Test show dots option"""
line = Line()
line.add('_', [1, 2, 3])
q = line.render_pyquery()
assert len(q(".dots")) == 3
line.show_dots = False
q = line.render_pyquery()
assert len(q(".dots")) == 0
def test_no_data():
"""Test no data and no data text option"""
line = Line()
q = line.render_pyquery()
assert q(".text-overlay text").text() == "No data"
line.no_data_text = u("þæ®þ怀&ij¿’€")
q = line.render_pyquery()
assert q(".text-overlay text").text() == u("þæ®þ怀&ij¿’€")
def test_include_x_axis(Chart):
"""Test x axis inclusion option"""
chart = Chart()
if Chart in (Pie, Treemap, Radar, Funnel, Dot, Gauge, Histogram, Box,
SolidGauge) or issubclass(Chart, BaseMap):
return
if not chart._dual:
data = 100, 200, 150
else:
data = (1, 100), (3, 200), (2, 150)
chart.add('_', data)
q = chart.render_pyquery()
# Ghost thing
yaxis = ".axis.%s .guides text" % (
'y' if not getattr(chart, 'horizontal', False) else 'x'
)
if not isinstance(chart, Bar):
assert '0' not in q(yaxis).map(texts)
else:
assert '0' in q(yaxis).map(texts)
chart.include_x_axis = True
q = chart.render_pyquery()
assert '0' in q(yaxis).map(texts)
def test_css(Chart):
"""Test css file option"""
css = "{{ id }}text { fill: #bedead; }\n"
with NamedTemporaryFile('w') as f:
f.write(css)
f.flush()
config = Config()
config.css.append('file://' + f.name)
chart = Chart(config)
chart.add('/', [10, 1, 5])
svg = chart.render().decode('utf-8')
assert '#bedead' in svg
chart = Chart(css=(_ellipsis, 'file://' + f.name))
chart.add('/', [10, 1, 5])
svg = chart.render().decode('utf-8')
assert '#bedead' in svg
def test_inline_css(Chart):
"""Test inline css option"""
css = "{{ id }}text { fill: #bedead; }\n"
config = Config()
config.css.append('inline:' + css)
chart = Chart(config)
chart.add('/', [10, 1, 5])
svg = chart.render().decode('utf-8')
assert '#bedead' in svg
def test_meta_config():
"""Test config metaclass"""
from pygal.config import CONFIG_ITEMS
assert all(c.name != 'Unbound' for c in CONFIG_ITEMS)
def test_label_rotation(Chart):
"""Test label rotation option"""
chart = Chart(x_label_rotation=28, y_label_rotation=76)
chart.add('1', [4, -5, 123, 59, 38])
chart.add('2', [89, 0, 8, .12, 8])
if not chart._dual:
chart.x_labels = ['one', 'twoooooooooooooooooooooo', 'three', '4']
q = chart.render_pyquery()
if Chart in (Line, Bar):
assert len(q('.axis.x text[transform^="rotate(28"]')) == 4
assert len(q('.axis.y text[transform^="rotate(76"]')) == 13
def test_legend_at_bottom(Chart):
"""Test legend at bottom option"""
chart = Chart(legend_at_bottom=True)
chart.add('1', [4, -5, 123, 59, 38])
chart.add('2', [89, 0, 8, .12, 8])
lab = chart.render()
chart.legend_at_bottom = False
assert lab != chart.render()
def test_x_y_title(Chart):
"""Test x title and y title options"""
chart = Chart(
title='I Am A Title',
x_title="I am a x title",
y_title="I am a y title"
)
chart.add('1', [4, -5, 123, 59, 38])
chart.add('2', [89, 0, 8, .12, 8])
q = chart.render_pyquery()
assert len(q('.titles .title')) == 3
def test_range(Chart):
"""Test y label major option"""
if Chart in (Pie, Treemap, Dot, SolidGauge) or issubclass(Chart, BaseMap):
return
chart = Chart()
chart.range = (0, 100)
chart.add('', [1, 2, 10])
q = chart.render_pyquery()
axis = map(str, range(0, 101, 10))
if Chart == Radar:
axis = map(str, range(100, -1, -20))
z = 'x' if getattr(chart, 'horizontal', False) or Chart == Gauge else 'y'
assert [t.text for t in q('.axis.%s .guides text' % z)] == list(axis)
def test_x_label_major(Chart):
"""Test x label major option"""
if Chart in (Pie, Treemap, Funnel, Dot, Gauge, Histogram, Box, SolidGauge,
Pyramid, DateTimeLine, TimeLine, DateLine,
TimeDeltaLine) or issubclass(
Chart, (BaseMap, Dual, HorizontalGraph)):
return
chart = Chart()
chart.add('test', range(12))
chart.x_labels = map(str, range(12))
q = chart.render_pyquery()
assert len(q(".axis.x text.major")) == 0
chart.x_labels_major = ['1', '5', '11', '1.0', '5.0', '11.0']
q = chart.render_pyquery()
assert len(q(".axis.x text.major")) == 3
assert len(q(".axis.x text")) == 12
chart.show_minor_x_labels = False
q = chart.render_pyquery()
assert len(q(".axis.x text.major")) == 3
assert len(q(".axis.x text")) == 3
chart.show_minor_x_labels = True
chart.x_labels_major = None
chart.x_labels_major_every = 2
q = chart.render_pyquery()
assert len(q(".axis.x text.major")) == 6
assert len(q(".axis.x text")) == 12
chart.x_labels_major_every = None
chart.x_labels_major_count = 4
q = chart.render_pyquery()
assert len(q(".axis.x text.major")) == 4
assert len(q(".axis.x text")) == 12
chart.x_labels_major_every = None
chart.x_labels_major_count = 78
q = chart.render_pyquery()
assert len(q(".axis.x text.major")) == 12
assert len(q(".axis.x text")) == 12
def test_y_label_major(Chart):
"""Test y label major option"""
if Chart in (Pie, Treemap, Funnel, Dot, Gauge, Histogram, Box, SolidGauge,
HorizontalBar, HorizontalStackedBar, HorizontalStackedLine,
HorizontalLine, Pyramid, DateTimeLine, TimeLine, DateLine,
TimeDeltaLine) or issubclass(Chart, BaseMap):
return
chart = Chart()
data = range(12)
if Chart == XY:
data = list(zip(*[range(12), range(12)]))
chart.add('test', data)
chart.y_labels = range(12)
q = chart.render_pyquery()
assert len(q(".axis.y text.major")) == 3
chart.y_labels_major = [1.0, 5.0, 11.0]
q = chart.render_pyquery()
assert len(q(".axis.y text.major")) == 3
assert len(q(".axis.y text")) == 12
chart.show_minor_y_labels = False
q = chart.render_pyquery()
assert len(q(".axis.y text.major")) == 3
assert len(q(".axis.y text")) == 3
chart.show_minor_y_labels = True
chart.y_labels_major = None
chart.y_labels_major_every = 2
q = chart.render_pyquery()
assert len(q(".axis.y text.major")) == 6
assert len(q(".axis.y text")) == 12
chart.y_labels_major_every = None
chart.y_labels_major_count = 4
q = chart.render_pyquery()
assert len(q(".axis.y text.major")) == 4
assert len(q(".axis.y text")) == 12
chart.y_labels_major_every = None
chart.y_labels_major_count = 78
q = chart.render_pyquery()
assert len(q(".axis.y text.major")) == 12
assert len(q(".axis.y text")) == 12
def test_no_y_labels(Chart):
"""Test no y labels chart"""
chart = Chart()
chart.y_labels = []
chart.add('_', [1, 2, 3])
chart.add('?', [10, 21, 5])
assert chart.render_pyquery()
def test_fill(Chart):
"""Test fill option"""
chart = Chart(fill=True)
chart.add('_', [1, 2, 3])
chart.add('?', [10, 21, 5])
assert chart.render_pyquery()
def test_render_data_uri(Chart):
"""Test the render data uri"""
chart = Chart(fill=True)
chart.add(u('ééé'), [1, 2, 3])
chart.add(u('èèè'), [10, 21, 5])
assert chart.render_data_uri(
).startswith('data:image/svg+xml;charset=utf-8;base64,')
def test_formatters(Chart):
"""Test custom formatters"""
if Chart._dual or Chart == Box:
return
chart = Chart(formatter=lambda x, chart, serie: '%s%s$' % (x, serie.title))
chart.add('_a', [1, 2, {'value': 3, 'formatter': lambda x: u('%s¥') % x}])
chart.add('_b', [4, 5, 6], formatter=lambda x: u('%s€') % x)
chart.x_labels = [2, 4, 6]
chart.x_labels_major = [4]
q = chart.render_pyquery()
assert set(
[v.text for v in q(".value")]
) == set((u('4€'), u('5€'), u('6€'), '1_a$', '2_a$', u('3¥')) +
(('6_a$', u('15€')) if Chart in (Pie, SolidGauge) else ()))
def test_classes(Chart):
"""Test classes option"""
chart = Chart()
assert chart.render_pyquery().attr('class') == 'pygal-chart'
chart = Chart(classes=())
assert not chart.render_pyquery().attr('class')
chart = Chart(classes=(_ellipsis, ))
assert chart.render_pyquery().attr('class') == 'pygal-chart'
chart = Chart(classes=('graph', ))
assert chart.render_pyquery().attr('class') == 'graph'
chart = Chart(classes=('pygal-chart', 'graph'))
assert chart.render_pyquery().attr('class') == 'pygal-chart graph'
chart = Chart(classes=(_ellipsis, 'graph'))
assert chart.render_pyquery().attr('class') == 'pygal-chart graph'
chart = Chart(classes=('graph', _ellipsis))
assert chart.render_pyquery().attr('class') == 'graph pygal-chart'
|
from pysmartthings import Attribute, Capability
from homeassistant.components.fan import (
ATTR_SPEED,
ATTR_SPEED_LIST,
DOMAIN as FAN_DOMAIN,
SPEED_HIGH,
SPEED_LOW,
SPEED_MEDIUM,
SPEED_OFF,
SUPPORT_SET_SPEED,
)
from homeassistant.components.smartthings.const import DOMAIN, SIGNAL_SMARTTHINGS_UPDATE
from homeassistant.const import ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .conftest import setup_platform
async def test_entity_state(hass, device_factory):
"""Tests the state attributes properly match the fan types."""
device = device_factory(
"Fan 1",
capabilities=[Capability.switch, Capability.fan_speed],
status={Attribute.switch: "on", Attribute.fan_speed: 2},
)
await setup_platform(hass, FAN_DOMAIN, devices=[device])
# Dimmer 1
state = hass.states.get("fan.fan_1")
assert state.state == "on"
assert state.attributes[ATTR_SUPPORTED_FEATURES] == SUPPORT_SET_SPEED
assert state.attributes[ATTR_SPEED] == SPEED_MEDIUM
assert state.attributes[ATTR_SPEED_LIST] == [
SPEED_OFF,
SPEED_LOW,
SPEED_MEDIUM,
SPEED_HIGH,
]
async def test_entity_and_device_attributes(hass, device_factory):
"""Test the attributes of the entity are correct."""
# Arrange
device = device_factory(
"Fan 1",
capabilities=[Capability.switch, Capability.fan_speed],
status={Attribute.switch: "on", Attribute.fan_speed: 2},
)
# Act
await setup_platform(hass, FAN_DOMAIN, devices=[device])
entity_registry = await hass.helpers.entity_registry.async_get_registry()
device_registry = await hass.helpers.device_registry.async_get_registry()
# Assert
entry = entity_registry.async_get("fan.fan_1")
assert entry
assert entry.unique_id == device.device_id
entry = device_registry.async_get_device({(DOMAIN, device.device_id)}, [])
assert entry
assert entry.name == device.label
assert entry.model == device.device_type_name
assert entry.manufacturer == "Unavailable"
async def test_turn_off(hass, device_factory):
"""Test the fan turns of successfully."""
# Arrange
device = device_factory(
"Fan 1",
capabilities=[Capability.switch, Capability.fan_speed],
status={Attribute.switch: "on", Attribute.fan_speed: 2},
)
await setup_platform(hass, FAN_DOMAIN, devices=[device])
# Act
await hass.services.async_call(
"fan", "turn_off", {"entity_id": "fan.fan_1"}, blocking=True
)
# Assert
state = hass.states.get("fan.fan_1")
assert state is not None
assert state.state == "off"
async def test_turn_on(hass, device_factory):
"""Test the fan turns of successfully."""
# Arrange
device = device_factory(
"Fan 1",
capabilities=[Capability.switch, Capability.fan_speed],
status={Attribute.switch: "off", Attribute.fan_speed: 0},
)
await setup_platform(hass, FAN_DOMAIN, devices=[device])
# Act
await hass.services.async_call(
"fan", "turn_on", {ATTR_ENTITY_ID: "fan.fan_1"}, blocking=True
)
# Assert
state = hass.states.get("fan.fan_1")
assert state is not None
assert state.state == "on"
async def test_turn_on_with_speed(hass, device_factory):
"""Test the fan turns on to the specified speed."""
# Arrange
device = device_factory(
"Fan 1",
capabilities=[Capability.switch, Capability.fan_speed],
status={Attribute.switch: "off", Attribute.fan_speed: 0},
)
await setup_platform(hass, FAN_DOMAIN, devices=[device])
# Act
await hass.services.async_call(
"fan",
"turn_on",
{ATTR_ENTITY_ID: "fan.fan_1", ATTR_SPEED: SPEED_HIGH},
blocking=True,
)
# Assert
state = hass.states.get("fan.fan_1")
assert state is not None
assert state.state == "on"
assert state.attributes[ATTR_SPEED] == SPEED_HIGH
async def test_set_speed(hass, device_factory):
"""Test setting to specific fan speed."""
# Arrange
device = device_factory(
"Fan 1",
capabilities=[Capability.switch, Capability.fan_speed],
status={Attribute.switch: "off", Attribute.fan_speed: 0},
)
await setup_platform(hass, FAN_DOMAIN, devices=[device])
# Act
await hass.services.async_call(
"fan",
"set_speed",
{ATTR_ENTITY_ID: "fan.fan_1", ATTR_SPEED: SPEED_HIGH},
blocking=True,
)
# Assert
state = hass.states.get("fan.fan_1")
assert state is not None
assert state.state == "on"
assert state.attributes[ATTR_SPEED] == SPEED_HIGH
async def test_update_from_signal(hass, device_factory):
"""Test the fan updates when receiving a signal."""
# Arrange
device = device_factory(
"Fan 1",
capabilities=[Capability.switch, Capability.fan_speed],
status={Attribute.switch: "off", Attribute.fan_speed: 0},
)
await setup_platform(hass, FAN_DOMAIN, devices=[device])
await device.switch_on(True)
# Act
async_dispatcher_send(hass, SIGNAL_SMARTTHINGS_UPDATE, [device.device_id])
# Assert
await hass.async_block_till_done()
state = hass.states.get("fan.fan_1")
assert state is not None
assert state.state == "on"
async def test_unload_config_entry(hass, device_factory):
"""Test the fan is removed when the config entry is unloaded."""
# Arrange
device = device_factory(
"Fan 1",
capabilities=[Capability.switch, Capability.fan_speed],
status={Attribute.switch: "off", Attribute.fan_speed: 0},
)
config_entry = await setup_platform(hass, FAN_DOMAIN, devices=[device])
# Act
await hass.config_entries.async_forward_entry_unload(config_entry, "fan")
# Assert
assert not hass.states.get("fan.fan_1")
|
def test_battery_icon():
"""Test icon generator for battery sensor."""
from homeassistant.helpers.icon import icon_for_battery_level
assert icon_for_battery_level(None, True) == "mdi:battery-unknown"
assert icon_for_battery_level(None, False) == "mdi:battery-unknown"
assert icon_for_battery_level(5, True) == "mdi:battery-outline"
assert icon_for_battery_level(5, False) == "mdi:battery-alert"
assert icon_for_battery_level(100, True) == "mdi:battery-charging-100"
assert icon_for_battery_level(100, False) == "mdi:battery"
iconbase = "mdi:battery"
for level in range(0, 100, 5):
print(
"Level: %d. icon: %s, charging: %s"
% (
level,
icon_for_battery_level(level, False),
icon_for_battery_level(level, True),
)
)
if level <= 10:
postfix_charging = "-outline"
elif level <= 30:
postfix_charging = "-charging-20"
elif level <= 50:
postfix_charging = "-charging-40"
elif level <= 70:
postfix_charging = "-charging-60"
elif level <= 90:
postfix_charging = "-charging-80"
else:
postfix_charging = "-charging-100"
if 5 < level < 95:
postfix = "-{}".format(int(round(level / 10 - 0.01)) * 10)
elif level <= 5:
postfix = "-alert"
else:
postfix = ""
assert iconbase + postfix == icon_for_battery_level(level, False)
assert iconbase + postfix_charging == icon_for_battery_level(level, True)
def test_signal_icon():
"""Test icon generator for signal sensor."""
from homeassistant.helpers.icon import icon_for_signal_level
assert icon_for_signal_level(None) == "mdi:signal-cellular-outline"
assert icon_for_signal_level(0) == "mdi:signal-cellular-outline"
assert icon_for_signal_level(5) == "mdi:signal-cellular-1"
assert icon_for_signal_level(40) == "mdi:signal-cellular-2"
assert icon_for_signal_level(80) == "mdi:signal-cellular-3"
assert icon_for_signal_level(100) == "mdi:signal-cellular-3"
|
from uuid import uuid4
from yandextank.plugins.InfluxUploader.decoder import Decoder
class TestDecoder(object):
def test_metrics_cast(self):
test_uuid = str(uuid4())
tank_tag = 'test_tank_tag'
comment = 'test comment'
raw_metrics = {
'metric1': -123,
'metric2': -123.456,
'metric3': 123,
'metric4': 123.456,
'metric5': 0,
'metric6': -0.1,
'metric7': 0.1,
'metric8': 'down',
}
timestamp = 123456789
host = '127.0.0.1'
data = [
{
'data': {
host: {
'comment': comment,
'metrics': raw_metrics
}
},
'timestamp': timestamp
}
]
expected_metrics = {
'metric1': -123.0,
'metric2': -123.456,
'metric3': 123.0,
'metric4': 123.456,
'metric5': 0.0,
'metric6': -0.1,
'metric7': 0.1,
'metric8': 'down'
}
decoder = Decoder(tank_tag, test_uuid, {}, True, True)
result_points = decoder.decode_monitoring(data)
assert (len(result_points) == 1)
r_point = result_points[0]
# check other props
assert (r_point['time'] == timestamp)
assert (r_point['measurement'] == 'monitoring')
assert (r_point['tags']['comment'] == comment)
assert (r_point['tags']['host'] == host)
assert (r_point['tags']['tank'] == tank_tag)
assert (r_point['tags']['uuid'] == test_uuid)
# check metric cast
assert (len(r_point['fields']) == len(expected_metrics))
for metric, value in r_point['fields'].items():
if metric not in expected_metrics:
assert False
if not isinstance(value, type(expected_metrics[metric])):
assert False
if not value == expected_metrics[metric]:
assert False
|
from aiohomekit.model.characteristics import CharacteristicsTypes
from homeassistant.components.cover import (
ATTR_POSITION,
ATTR_TILT_POSITION,
SUPPORT_CLOSE,
SUPPORT_CLOSE_TILT,
SUPPORT_OPEN,
SUPPORT_OPEN_TILT,
SUPPORT_SET_POSITION,
SUPPORT_SET_TILT_POSITION,
SUPPORT_STOP,
CoverEntity,
)
from homeassistant.const import STATE_CLOSED, STATE_CLOSING, STATE_OPEN, STATE_OPENING
from homeassistant.core import callback
from . import KNOWN_DEVICES, HomeKitEntity
STATE_STOPPED = "stopped"
CURRENT_GARAGE_STATE_MAP = {
0: STATE_OPEN,
1: STATE_CLOSED,
2: STATE_OPENING,
3: STATE_CLOSING,
4: STATE_STOPPED,
}
TARGET_GARAGE_STATE_MAP = {STATE_OPEN: 0, STATE_CLOSED: 1, STATE_STOPPED: 2}
CURRENT_WINDOW_STATE_MAP = {0: STATE_CLOSING, 1: STATE_OPENING, 2: STATE_STOPPED}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Homekit covers."""
hkid = config_entry.data["AccessoryPairingID"]
conn = hass.data[KNOWN_DEVICES][hkid]
@callback
def async_add_service(aid, service):
info = {"aid": aid, "iid": service["iid"]}
if service["stype"] == "garage-door-opener":
async_add_entities([HomeKitGarageDoorCover(conn, info)], True)
return True
if service["stype"] in ("window-covering", "window"):
async_add_entities([HomeKitWindowCover(conn, info)], True)
return True
return False
conn.add_listener(async_add_service)
class HomeKitGarageDoorCover(HomeKitEntity, CoverEntity):
"""Representation of a HomeKit Garage Door."""
@property
def device_class(self):
"""Define this cover as a garage door."""
return "garage"
def get_characteristic_types(self):
"""Define the homekit characteristics the entity cares about."""
return [
CharacteristicsTypes.DOOR_STATE_CURRENT,
CharacteristicsTypes.DOOR_STATE_TARGET,
CharacteristicsTypes.OBSTRUCTION_DETECTED,
]
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_OPEN | SUPPORT_CLOSE
@property
def state(self):
"""Return the current state of the garage door."""
value = self.service.value(CharacteristicsTypes.DOOR_STATE_CURRENT)
return CURRENT_GARAGE_STATE_MAP[value]
@property
def is_closed(self):
"""Return true if cover is closed, else False."""
return self.state == STATE_CLOSED
@property
def is_closing(self):
"""Return if the cover is closing or not."""
return self.state == STATE_CLOSING
@property
def is_opening(self):
"""Return if the cover is opening or not."""
return self.state == STATE_OPENING
async def async_open_cover(self, **kwargs):
"""Send open command."""
await self.set_door_state(STATE_OPEN)
async def async_close_cover(self, **kwargs):
"""Send close command."""
await self.set_door_state(STATE_CLOSED)
async def set_door_state(self, state):
"""Send state command."""
await self.async_put_characteristics(
{CharacteristicsTypes.DOOR_STATE_TARGET: TARGET_GARAGE_STATE_MAP[state]}
)
@property
def device_state_attributes(self):
"""Return the optional state attributes."""
obstruction_detected = self.service.value(
CharacteristicsTypes.OBSTRUCTION_DETECTED
)
return {"obstruction-detected": obstruction_detected is True}
class HomeKitWindowCover(HomeKitEntity, CoverEntity):
"""Representation of a HomeKit Window or Window Covering."""
def get_characteristic_types(self):
"""Define the homekit characteristics the entity cares about."""
return [
CharacteristicsTypes.POSITION_STATE,
CharacteristicsTypes.POSITION_CURRENT,
CharacteristicsTypes.POSITION_TARGET,
CharacteristicsTypes.POSITION_HOLD,
CharacteristicsTypes.VERTICAL_TILT_CURRENT,
CharacteristicsTypes.VERTICAL_TILT_TARGET,
CharacteristicsTypes.HORIZONTAL_TILT_CURRENT,
CharacteristicsTypes.HORIZONTAL_TILT_TARGET,
CharacteristicsTypes.OBSTRUCTION_DETECTED,
]
@property
def supported_features(self):
"""Flag supported features."""
features = SUPPORT_OPEN | SUPPORT_CLOSE | SUPPORT_SET_POSITION
if self.service.has(CharacteristicsTypes.POSITION_HOLD):
features |= SUPPORT_STOP
supports_tilt = any(
(
self.service.has(CharacteristicsTypes.VERTICAL_TILT_CURRENT),
self.service.has(CharacteristicsTypes.HORIZONTAL_TILT_CURRENT),
)
)
if supports_tilt:
features |= (
SUPPORT_OPEN_TILT | SUPPORT_CLOSE_TILT | SUPPORT_SET_TILT_POSITION
)
return features
@property
def current_cover_position(self):
"""Return the current position of cover."""
return self.service.value(CharacteristicsTypes.POSITION_CURRENT)
@property
def is_closed(self):
"""Return true if cover is closed, else False."""
return self.current_cover_position == 0
@property
def is_closing(self):
"""Return if the cover is closing or not."""
value = self.service.value(CharacteristicsTypes.POSITION_STATE)
state = CURRENT_WINDOW_STATE_MAP[value]
return state == STATE_CLOSING
@property
def is_opening(self):
"""Return if the cover is opening or not."""
value = self.service.value(CharacteristicsTypes.POSITION_STATE)
state = CURRENT_WINDOW_STATE_MAP[value]
return state == STATE_OPENING
@property
def is_horizontal_tilt(self):
"""Return True if the service has a horizontal tilt characteristic."""
return (
self.service.value(CharacteristicsTypes.HORIZONTAL_TILT_CURRENT) is not None
)
@property
def is_vertical_tilt(self):
"""Return True if the service has a vertical tilt characteristic."""
return (
self.service.value(CharacteristicsTypes.VERTICAL_TILT_CURRENT) is not None
)
@property
def current_cover_tilt_position(self):
"""Return current position of cover tilt."""
tilt_position = self.service.value(CharacteristicsTypes.VERTICAL_TILT_CURRENT)
if not tilt_position:
tilt_position = self.service.value(
CharacteristicsTypes.HORIZONTAL_TILT_CURRENT
)
return tilt_position
async def async_stop_cover(self, **kwargs):
"""Send hold command."""
await self.async_put_characteristics({CharacteristicsTypes.POSITION_HOLD: 1})
async def async_open_cover(self, **kwargs):
"""Send open command."""
await self.async_set_cover_position(position=100)
async def async_close_cover(self, **kwargs):
"""Send close command."""
await self.async_set_cover_position(position=0)
async def async_set_cover_position(self, **kwargs):
"""Send position command."""
position = kwargs[ATTR_POSITION]
await self.async_put_characteristics(
{CharacteristicsTypes.POSITION_TARGET: position}
)
async def async_set_cover_tilt_position(self, **kwargs):
"""Move the cover tilt to a specific position."""
tilt_position = kwargs[ATTR_TILT_POSITION]
if self.is_vertical_tilt:
await self.async_put_characteristics(
{CharacteristicsTypes.VERTICAL_TILT_TARGET: tilt_position}
)
elif self.is_horizontal_tilt:
await self.async_put_characteristics(
{CharacteristicsTypes.HORIZONTAL_TILT_TARGET: tilt_position}
)
@property
def device_state_attributes(self):
"""Return the optional state attributes."""
obstruction_detected = self.service.value(
CharacteristicsTypes.OBSTRUCTION_DETECTED
)
if not obstruction_detected:
return {}
return {"obstruction-detected": obstruction_detected}
|
import sys
import io
import os
import os.path
import subprocess
from distutils.core import Extension
from distutils.errors import CompileError, DistutilsOptionError
from distutils.command.build_ext import build_ext as _build_ext
from versioninfo import get_base_dir
try:
import Cython.Compiler.Version
CYTHON_INSTALLED = True
except ImportError:
CYTHON_INSTALLED = False
EXT_MODULES = ["lxml.etree", "lxml.objectify"]
COMPILED_MODULES = [
"lxml.builder",
"lxml._elementpath",
"lxml.html.diff",
"lxml.html.clean",
"lxml.sax",
]
HEADER_FILES = ['etree.h', 'etree_api.h']
if hasattr(sys, 'pypy_version_info') or (
getattr(sys, 'implementation', None) and sys.implementation.name != 'cpython'):
# disable Cython compilation of Python modules in PyPy and other non-CPythons
del COMPILED_MODULES[:]
SOURCE_PATH = "src"
INCLUDE_PACKAGE_PATH = os.path.join(SOURCE_PATH, 'lxml', 'includes')
if sys.version_info[0] >= 3:
_system_encoding = sys.getdefaultencoding()
if _system_encoding is None:
_system_encoding = "iso-8859-1" # :-)
def decode_input(data):
if isinstance(data, str):
return data
return data.decode(_system_encoding)
else:
def decode_input(data):
return data
def env_var(name):
value = os.getenv(name)
if value:
value = decode_input(value)
if sys.platform == 'win32' and ';' in value:
return value.split(';')
else:
return value.split()
else:
return []
def _prefer_reldirs(base_dir, dirs):
return [
os.path.relpath(path) if path.startswith(base_dir) else path
for path in dirs
]
def ext_modules(static_include_dirs, static_library_dirs,
static_cflags, static_binaries):
global XML2_CONFIG, XSLT_CONFIG
if OPTION_BUILD_LIBXML2XSLT:
from buildlibxml import build_libxml2xslt, get_prebuilt_libxml2xslt
if sys.platform.startswith('win'):
get_prebuilt_libxml2xslt(
OPTION_DOWNLOAD_DIR, static_include_dirs, static_library_dirs)
else:
XML2_CONFIG, XSLT_CONFIG = build_libxml2xslt(
OPTION_DOWNLOAD_DIR, 'build/tmp',
static_include_dirs, static_library_dirs,
static_cflags, static_binaries,
libiconv_version=OPTION_LIBICONV_VERSION,
libxml2_version=OPTION_LIBXML2_VERSION,
libxslt_version=OPTION_LIBXSLT_VERSION,
zlib_version=OPTION_ZLIB_VERSION,
multicore=OPTION_MULTICORE)
modules = EXT_MODULES + COMPILED_MODULES
if OPTION_WITHOUT_OBJECTIFY:
modules = [entry for entry in modules if 'objectify' not in entry]
module_files = list(os.path.join(SOURCE_PATH, *module.split('.')) for module in modules)
c_files_exist = [os.path.exists(module + '.c') for module in module_files]
use_cython = True
if CYTHON_INSTALLED and (OPTION_WITH_CYTHON or not all(c_files_exist)):
print("Building with Cython %s." % Cython.Compiler.Version.version)
# generate module cleanup code
from Cython.Compiler import Options
Options.generate_cleanup_code = 3
Options.clear_to_none = False
elif not OPTION_WITHOUT_CYTHON and not all(c_files_exist):
for exists, module in zip(c_files_exist, module_files):
if not exists:
raise RuntimeError(
"ERROR: Trying to build without Cython, but pre-generated '%s.c' "
"is not available (pass --without-cython to ignore this error)." % module)
else:
if not all(c_files_exist):
for exists, module in zip(c_files_exist, module_files):
if not exists:
print("WARNING: Trying to build without Cython, but pre-generated "
"'%s.c' is not available." % module)
use_cython = False
print("Building without Cython.")
if not check_build_dependencies():
raise RuntimeError("Dependency missing")
base_dir = get_base_dir()
_include_dirs = _prefer_reldirs(
base_dir, include_dirs(static_include_dirs) + [
SOURCE_PATH,
INCLUDE_PACKAGE_PATH,
])
_library_dirs = _prefer_reldirs(base_dir, library_dirs(static_library_dirs))
_cflags = cflags(static_cflags)
_ldflags = ['-isysroot', get_xcode_isysroot()] if sys.platform == 'darwin' else None
_define_macros = define_macros()
_libraries = libraries()
if _library_dirs:
message = "Building against libxml2/libxslt in "
if len(_library_dirs) > 1:
print(message + "one of the following directories:")
for dir in _library_dirs:
print(" " + dir)
else:
print(message + "the following directory: " +
_library_dirs[0])
if OPTION_AUTO_RPATH:
runtime_library_dirs = _library_dirs
else:
runtime_library_dirs = []
if CYTHON_INSTALLED and OPTION_SHOW_WARNINGS:
from Cython.Compiler import Errors
Errors.LEVEL = 0
cythonize_directives = {
'binding': True,
}
if OPTION_WITH_COVERAGE:
cythonize_directives['linetrace'] = True
result = []
for module, src_file in zip(modules, module_files):
is_py = module in COMPILED_MODULES
main_module_source = src_file + (
'.c' if not use_cython else '.py' if is_py else '.pyx')
result.append(
Extension(
module,
sources = [main_module_source],
depends = find_dependencies(module),
extra_compile_args = _cflags,
extra_link_args = None if is_py else _ldflags,
extra_objects = None if is_py else static_binaries,
define_macros = _define_macros,
include_dirs = _include_dirs,
library_dirs = None if is_py else _library_dirs,
runtime_library_dirs = None if is_py else runtime_library_dirs,
libraries = None if is_py else _libraries,
))
if CYTHON_INSTALLED and OPTION_WITH_CYTHON_GDB:
for ext in result:
ext.cython_gdb = True
if CYTHON_INSTALLED and use_cython:
# build .c files right now and convert Extension() objects
from Cython.Build import cythonize
result = cythonize(result, compiler_directives=cythonize_directives)
# for backwards compatibility reasons, provide "etree[_api].h" also as "lxml.etree[_api].h"
for header_filename in HEADER_FILES:
src_file = os.path.join(SOURCE_PATH, 'lxml', header_filename)
dst_file = os.path.join(SOURCE_PATH, 'lxml', 'lxml.' + header_filename)
if not os.path.exists(src_file):
continue
if os.path.exists(dst_file) and os.path.getmtime(dst_file) >= os.path.getmtime(src_file):
continue
with io.open(src_file, 'r', encoding='iso8859-1') as f:
content = f.read()
for filename in HEADER_FILES:
content = content.replace('"%s"' % filename, '"lxml.%s"' % filename)
with io.open(dst_file, 'w', encoding='iso8859-1') as f:
f.write(content)
return result
def find_dependencies(module):
if not CYTHON_INSTALLED or 'lxml.html' in module:
return []
base_dir = get_base_dir()
package_dir = os.path.join(base_dir, SOURCE_PATH, 'lxml')
includes_dir = os.path.join(base_dir, INCLUDE_PACKAGE_PATH)
pxd_files = [
os.path.join(INCLUDE_PACKAGE_PATH, filename)
for filename in os.listdir(includes_dir)
if filename.endswith('.pxd')
]
if module == 'lxml.etree':
pxi_files = [
os.path.join(SOURCE_PATH, 'lxml', filename)
for filename in os.listdir(package_dir)
if filename.endswith('.pxi') and 'objectpath' not in filename
]
pxd_files = [
filename for filename in pxd_files
if 'etreepublic' not in filename
]
elif module == 'lxml.objectify':
pxi_files = [os.path.join(SOURCE_PATH, 'lxml', 'objectpath.pxi')]
else:
pxi_files = pxd_files = []
return pxd_files + pxi_files
def extra_setup_args():
class CheckLibxml2BuildExt(_build_ext):
"""Subclass to check whether libxml2 is really available if the build fails"""
def run(self):
try:
_build_ext.run(self) # old-style class in Py2
except CompileError as e:
print('Compile failed: %s' % e)
if not seems_to_have_libxml2():
print_libxml_error()
raise
result = {'cmdclass': {'build_ext': CheckLibxml2BuildExt}}
return result
def seems_to_have_libxml2():
from distutils import ccompiler
compiler = ccompiler.new_compiler()
return compiler.has_function(
'xmlXPathInit',
include_dirs=include_dirs([]) + ['/usr/include/libxml2'],
includes=['libxml/xpath.h'],
library_dirs=library_dirs([]),
libraries=['xml2'])
def print_libxml_error():
print('*********************************************************************************')
print('Could not find function xmlCheckVersion in library libxml2. Is libxml2 installed?')
if sys.platform in ('darwin',):
print('Perhaps try: xcode-select --install')
print('*********************************************************************************')
def libraries():
standard_libs = []
if 'linux' in sys.platform:
standard_libs.append('rt')
if not OPTION_BUILD_LIBXML2XSLT:
standard_libs.append('z')
standard_libs.append('m')
if sys.platform in ('win32',):
libs = ['libxslt', 'libexslt', 'libxml2', 'iconv']
if OPTION_STATIC:
libs = ['%s_a' % lib for lib in libs]
libs.extend(['zlib', 'WS2_32'])
elif OPTION_STATIC:
libs = standard_libs
else:
libs = ['xslt', 'exslt', 'xml2'] + standard_libs
return libs
def library_dirs(static_library_dirs):
if OPTION_STATIC:
if not static_library_dirs:
static_library_dirs = env_var('LIBRARY')
assert static_library_dirs, "Static build not configured, see doc/build.txt"
return static_library_dirs
# filter them from xslt-config --libs
result = []
possible_library_dirs = flags('libs')
for possible_library_dir in possible_library_dirs:
if possible_library_dir.startswith('-L'):
result.append(possible_library_dir[2:])
return result
def include_dirs(static_include_dirs):
if OPTION_STATIC:
if not static_include_dirs:
static_include_dirs = env_var('INCLUDE')
return static_include_dirs
# filter them from xslt-config --cflags
result = []
possible_include_dirs = flags('cflags')
for possible_include_dir in possible_include_dirs:
if possible_include_dir.startswith('-I'):
result.append(possible_include_dir[2:])
return result
def cflags(static_cflags):
result = []
if not OPTION_SHOW_WARNINGS:
result.append('-w')
if OPTION_DEBUG_GCC:
result.append('-g2')
if OPTION_STATIC:
if not static_cflags:
static_cflags = env_var('CFLAGS')
result.extend(static_cflags)
else:
# anything from xslt-config --cflags that doesn't start with -I
possible_cflags = flags('cflags')
for possible_cflag in possible_cflags:
if not possible_cflag.startswith('-I'):
result.append(possible_cflag)
if sys.platform in ('darwin',):
for opt in result:
if 'flat_namespace' in opt:
break
else:
result.append('-flat_namespace')
return result
def define_macros():
macros = []
if OPTION_WITHOUT_ASSERT:
macros.append(('PYREX_WITHOUT_ASSERTIONS', None))
if OPTION_WITHOUT_THREADING:
macros.append(('WITHOUT_THREADING', None))
if OPTION_WITH_REFNANNY:
macros.append(('CYTHON_REFNANNY', None))
if OPTION_WITH_UNICODE_STRINGS:
macros.append(('LXML_UNICODE_STRINGS', '1'))
if OPTION_WITH_COVERAGE:
macros.append(('CYTHON_TRACE_NOGIL', '1'))
# Disable showing C lines in tracebacks, unless explicitly requested.
macros.append(('CYTHON_CLINE_IN_TRACEBACK', '1' if OPTION_WITH_CLINES else '0'))
return macros
def run_command(cmd, *args):
if not cmd:
return ''
if args:
cmd = ' '.join((cmd,) + args)
p = subprocess.Popen(cmd, shell=True,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout_data, errors = p.communicate()
if errors:
return ''
return decode_input(stdout_data).strip()
def check_min_version(version, min_version, libname):
if not version:
# this is ok for targets like sdist etc.
return True
lib_version = tuple(map(int, version.split('.')[:3]))
req_version = tuple(map(int, min_version.split('.')[:3]))
if lib_version < req_version:
print("Minimum required version of %s is %s. Your system has version %s." % (
libname, min_version, version))
return False
return True
def get_library_version(prog, libname=None):
if libname:
return run_command(prog, '--modversion %s' % libname)
else:
return run_command(prog, '--version')
PKG_CONFIG = None
XML2_CONFIG = None
XSLT_CONFIG = None
def get_library_versions():
global XML2_CONFIG, XSLT_CONFIG
# Pre-built libraries
if XML2_CONFIG and XSLT_CONFIG:
xml2_version = get_library_version(XML2_CONFIG)
xslt_version = get_library_version(XSLT_CONFIG)
return xml2_version, xslt_version
# Path to xml2-config and xslt-config specified on the command line
if OPTION_WITH_XML2_CONFIG:
xml2_version = get_library_version(OPTION_WITH_XML2_CONFIG)
if xml2_version and OPTION_WITH_XSLT_CONFIG:
xslt_version = get_library_version(OPTION_WITH_XSLT_CONFIG)
if xslt_version:
XML2_CONFIG = OPTION_WITH_XML2_CONFIG
XSLT_CONFIG = OPTION_WITH_XSLT_CONFIG
return xml2_version, xslt_version
# Try pkg-config
global PKG_CONFIG
PKG_CONFIG = os.getenv('PKG_CONFIG', 'pkg-config')
xml2_version = get_library_version(PKG_CONFIG, 'libxml-2.0')
if xml2_version:
xslt_version = get_library_version(PKG_CONFIG, 'libxslt')
if xml2_version and xslt_version:
return xml2_version, xslt_version
# Try xml2-config and xslt-config
XML2_CONFIG = os.getenv('XML2_CONFIG', 'xml2-config')
xml2_version = get_library_version(XML2_CONFIG)
if xml2_version:
XSLT_CONFIG = os.getenv('XSLT_CONFIG', 'xslt-config')
xslt_version = get_library_version(XSLT_CONFIG)
if xml2_version and xslt_version:
return xml2_version, xslt_version
# One or both build dependencies not found. Fail on Linux platforms only.
if sys.platform.startswith('win'):
return '', ''
print("Error: Please make sure the libxml2 and libxslt development packages are installed.")
sys.exit(1)
def check_build_dependencies():
xml2_version, xslt_version = get_library_versions()
xml2_ok = check_min_version(xml2_version, '2.7.0', 'libxml2')
xslt_ok = check_min_version(xslt_version, '1.1.23', 'libxslt')
if xml2_version and xslt_version:
print("Building against libxml2 %s and libxslt %s" % (xml2_version, xslt_version))
else:
print("Building against pre-built libxml2 andl libxslt libraries")
return (xml2_ok and xslt_ok)
def get_flags(prog, option, libname=None):
if libname:
return run_command(prog, '--%s %s' % (option, libname))
else:
return run_command(prog, '--%s' % option)
def flags(option):
if XML2_CONFIG:
xml2_flags = get_flags(XML2_CONFIG, option)
xslt_flags = get_flags(XSLT_CONFIG, option)
else:
xml2_flags = get_flags(PKG_CONFIG, option, 'libxml-2.0')
xslt_flags = get_flags(PKG_CONFIG, option, 'libxslt')
flag_list = xml2_flags.split()
for flag in xslt_flags.split():
if flag not in flag_list:
flag_list.append(flag)
return flag_list
def get_xcode_isysroot():
return run_command('xcrun', '--show-sdk-path')
## Option handling:
def has_option(name):
try:
sys.argv.remove('--%s' % name)
return True
except ValueError:
pass
# allow passing all cmd line options also as environment variables
env_val = os.getenv(name.upper().replace('-', '_'), 'false').lower()
if env_val == "true":
return True
return False
def option_value(name, deprecated_for=None):
for index, option in enumerate(sys.argv):
if option == '--' + name:
if index+1 >= len(sys.argv):
raise DistutilsOptionError(
'The option %s requires a value' % option)
value = sys.argv[index+1]
sys.argv[index:index+2] = []
if deprecated_for:
print_deprecated_option(name, deprecated_for)
return value
if option.startswith('--' + name + '='):
value = option[len(name)+3:]
sys.argv[index:index+1] = []
if deprecated_for:
print_deprecated_option(name, deprecated_for)
return value
env_name = name.upper().replace('-', '_')
env_val = os.getenv(env_name)
if env_val and deprecated_for:
print_deprecated_option(env_name, deprecated_for.upper().replace('-', '_'))
return env_val
def print_deprecated_option(name, new_name):
print("WARN: Option '%s' is deprecated. Use '%s' instead." % (name, new_name))
staticbuild = bool(os.environ.get('STATICBUILD', ''))
# pick up any commandline options and/or env variables
OPTION_WITHOUT_OBJECTIFY = has_option('without-objectify')
OPTION_WITH_UNICODE_STRINGS = has_option('with-unicode-strings')
OPTION_WITHOUT_ASSERT = has_option('without-assert')
OPTION_WITHOUT_THREADING = has_option('without-threading')
OPTION_WITHOUT_CYTHON = has_option('without-cython')
OPTION_WITH_CYTHON = has_option('with-cython')
OPTION_WITH_CYTHON_GDB = has_option('cython-gdb')
OPTION_WITH_REFNANNY = has_option('with-refnanny')
OPTION_WITH_COVERAGE = has_option('with-coverage')
OPTION_WITH_CLINES = has_option('with-clines')
if OPTION_WITHOUT_CYTHON:
CYTHON_INSTALLED = False
OPTION_STATIC = staticbuild or has_option('static')
OPTION_DEBUG_GCC = has_option('debug-gcc')
OPTION_SHOW_WARNINGS = has_option('warnings')
OPTION_AUTO_RPATH = has_option('auto-rpath')
OPTION_BUILD_LIBXML2XSLT = staticbuild or has_option('static-deps')
if OPTION_BUILD_LIBXML2XSLT:
OPTION_STATIC = True
OPTION_WITH_XML2_CONFIG = option_value('with-xml2-config') or option_value('xml2-config', deprecated_for='with-xml2-config')
OPTION_WITH_XSLT_CONFIG = option_value('with-xslt-config') or option_value('xslt-config', deprecated_for='with-xslt-config')
OPTION_LIBXML2_VERSION = option_value('libxml2-version')
OPTION_LIBXSLT_VERSION = option_value('libxslt-version')
OPTION_LIBICONV_VERSION = option_value('libiconv-version')
OPTION_ZLIB_VERSION = option_value('zlib-version')
OPTION_MULTICORE = option_value('multicore')
OPTION_DOWNLOAD_DIR = option_value('download-dir')
if OPTION_DOWNLOAD_DIR is None:
OPTION_DOWNLOAD_DIR = 'libs'
|
import voluptuous as vol
from homeassistant.components.notify import (
ATTR_DATA,
ATTR_MESSAGE,
ATTR_TARGET,
PLATFORM_SCHEMA,
BaseNotificationService,
)
from .const import (
CONF_DEFAULT_CONVERSATIONS,
DOMAIN,
SERVICE_SEND_MESSAGE,
TARGETS_SCHEMA,
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_DEFAULT_CONVERSATIONS): [TARGETS_SCHEMA]}
)
def get_service(hass, config, discovery_info=None):
"""Get the Hangouts notification service."""
return HangoutsNotificationService(config.get(CONF_DEFAULT_CONVERSATIONS))
class HangoutsNotificationService(BaseNotificationService):
"""Send Notifications to Hangouts conversations."""
def __init__(self, default_conversations):
"""Set up the notification service."""
self._default_conversations = default_conversations
def send_message(self, message="", **kwargs):
"""Send the message to the Google Hangouts server."""
target_conversations = None
if ATTR_TARGET in kwargs:
target_conversations = []
for target in kwargs.get(ATTR_TARGET):
target_conversations.append({"id": target})
else:
target_conversations = self._default_conversations
messages = []
if "title" in kwargs:
messages.append({"text": kwargs["title"], "is_bold": True})
messages.append({"text": message, "parse_str": True})
service_data = {ATTR_TARGET: target_conversations, ATTR_MESSAGE: messages}
if kwargs[ATTR_DATA]:
service_data[ATTR_DATA] = kwargs[ATTR_DATA]
return self.hass.services.call(
DOMAIN, SERVICE_SEND_MESSAGE, service_data=service_data
)
|
from datetime import timedelta
import logging
from sleepyq import Sleepyq
import voluptuous as vol
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
from .const import DOMAIN
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=30)
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{
vol.Required(DOMAIN): vol.Schema(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Set up the SleepIQ component.
Will automatically load sensor components to support
devices discovered on the account.
"""
username = config[DOMAIN][CONF_USERNAME]
password = config[DOMAIN][CONF_PASSWORD]
client = Sleepyq(username, password)
try:
data = SleepIQData(client)
data.update()
except ValueError:
message = """
SleepIQ failed to login, double check your username and password"
"""
_LOGGER.error(message)
return False
hass.data[DOMAIN] = data
discovery.load_platform(hass, "sensor", DOMAIN, {}, config)
discovery.load_platform(hass, "binary_sensor", DOMAIN, {}, config)
return True
class SleepIQData:
"""Get the latest data from SleepIQ."""
def __init__(self, client):
"""Initialize the data object."""
self._client = client
self.beds = {}
self.update()
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data from SleepIQ."""
self._client.login()
beds = self._client.beds_with_sleeper_status()
self.beds = {bed.bed_id: bed for bed in beds}
class SleepIQSensor(Entity):
"""Implementation of a SleepIQ sensor."""
def __init__(self, sleepiq_data, bed_id, side):
"""Initialize the sensor."""
self._bed_id = bed_id
self._side = side
self.sleepiq_data = sleepiq_data
self.side = None
self.bed = None
# added by subclass
self._name = None
self.type = None
@property
def name(self):
"""Return the name of the sensor."""
return "SleepNumber {} {} {}".format(
self.bed.name, self.side.sleeper.first_name, self._name
)
def update(self):
"""Get the latest data from SleepIQ and updates the states."""
# Call the API for new sleepiq data. Each sensor will re-trigger this
# same exact call, but that's fine. We cache results for a short period
# of time to prevent hitting API limits.
self.sleepiq_data.update()
self.bed = self.sleepiq_data.beds[self._bed_id]
self.side = getattr(self.bed, self._side)
|
from typing import Any
import voluptuous as vol
from homeassistant.const import CONF_PAYLOAD
from homeassistant.helpers import config_validation as cv
from .const import (
ATTR_PAYLOAD,
ATTR_QOS,
ATTR_RETAIN,
ATTR_TOPIC,
DEFAULT_QOS,
DEFAULT_RETAIN,
)
def valid_topic(value: Any) -> str:
"""Validate that this is a valid topic name/filter."""
value = cv.string(value)
try:
raw_value = value.encode("utf-8")
except UnicodeError as err:
raise vol.Invalid("MQTT topic name/filter must be valid UTF-8 string.") from err
if not raw_value:
raise vol.Invalid("MQTT topic name/filter must not be empty.")
if len(raw_value) > 65535:
raise vol.Invalid(
"MQTT topic name/filter must not be longer than 65535 encoded bytes."
)
if "\0" in value:
raise vol.Invalid("MQTT topic name/filter must not contain null character.")
return value
def valid_subscribe_topic(value: Any) -> str:
"""Validate that we can subscribe using this MQTT topic."""
value = valid_topic(value)
for i in (i for i, c in enumerate(value) if c == "+"):
if (i > 0 and value[i - 1] != "/") or (
i < len(value) - 1 and value[i + 1] != "/"
):
raise vol.Invalid(
"Single-level wildcard must occupy an entire level of the filter"
)
index = value.find("#")
if index != -1:
if index != len(value) - 1:
# If there are multiple wildcards, this will also trigger
raise vol.Invalid(
"Multi-level wildcard must be the last "
"character in the topic filter."
)
if len(value) > 1 and value[index - 1] != "/":
raise vol.Invalid(
"Multi-level wildcard must be after a topic level separator."
)
return value
def valid_publish_topic(value: Any) -> str:
"""Validate that we can publish using this MQTT topic."""
value = valid_topic(value)
if "+" in value or "#" in value:
raise vol.Invalid("Wildcards can not be used in topic names")
return value
_VALID_QOS_SCHEMA = vol.All(vol.Coerce(int), vol.In([0, 1, 2]))
MQTT_WILL_BIRTH_SCHEMA = vol.Schema(
{
vol.Required(ATTR_TOPIC): valid_publish_topic,
vol.Required(ATTR_PAYLOAD, CONF_PAYLOAD): cv.string,
vol.Optional(ATTR_QOS, default=DEFAULT_QOS): _VALID_QOS_SCHEMA,
vol.Optional(ATTR_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
},
required=True,
)
|
import numpy as np
from ._peak_finder import peak_finder
from .. import pick_types, pick_channels
from ..utils import logger, verbose, _pl
from ..filter import filter_data
from ..epochs import Epochs
@verbose
def find_eog_events(raw, event_id=998, l_freq=1, h_freq=10,
filter_length='10s', ch_name=None, tstart=0,
reject_by_annotation=False, thresh=None, verbose=None):
"""Locate EOG artifacts.
Parameters
----------
raw : instance of Raw
The raw data.
event_id : int
The index to assign to found events.
l_freq : float
Low cut-off frequency to apply to the EOG channel in Hz.
h_freq : float
High cut-off frequency to apply to the EOG channel in Hz.
filter_length : str | int | None
Number of taps to use for filtering.
ch_name : str | None
If not None, use specified channel(s) for EOG.
tstart : float
Start detection after tstart seconds.
reject_by_annotation : bool
Whether to omit data that is annotated as bad.
thresh : float
Threshold to trigger EOG event.
%(verbose)s
Returns
-------
eog_events : array
Events.
See Also
--------
create_eog_epochs
compute_proj_eog
"""
# Getting EOG Channel
eog_inds = _get_eog_channel_index(ch_name, raw)
logger.info('EOG channel index for this subject is: %s' % eog_inds)
# Reject bad segments.
reject_by_annotation = 'omit' if reject_by_annotation else None
eog, times = raw.get_data(picks=eog_inds,
reject_by_annotation=reject_by_annotation,
return_times=True)
times = times * raw.info['sfreq'] + raw.first_samp
eog_events = _find_eog_events(eog, event_id=event_id, l_freq=l_freq,
h_freq=h_freq,
sampling_rate=raw.info['sfreq'],
first_samp=raw.first_samp,
filter_length=filter_length,
tstart=tstart, thresh=thresh,
verbose=verbose)
# Map times to corresponding samples.
eog_events[:, 0] = np.round(times[eog_events[:, 0] -
raw.first_samp]).astype(int)
return eog_events
@verbose
def _find_eog_events(eog, event_id, l_freq, h_freq, sampling_rate, first_samp,
filter_length='10s', tstart=0., thresh=None,
verbose=None):
"""Find EOG events."""
logger.info('Filtering the data to remove DC offset to help '
'distinguish blinks from saccades')
# filtering to remove dc offset so that we know which is blink and saccades
# hardcode verbose=False to suppress filter param messages (since this
# filter is not under user control)
fmax = np.minimum(45, sampling_rate / 2.0 - 0.75) # protect Nyquist
filteog = np.array([filter_data(
x, sampling_rate, 2, fmax, None, filter_length, 0.5, 0.5,
phase='zero-double', fir_window='hann', fir_design='firwin2',
verbose=False) for x in eog])
temp = np.sqrt(np.sum(filteog ** 2, axis=1))
indexmax = np.argmax(temp)
# easier to detect peaks with filtering.
filteog = filter_data(
eog[indexmax], sampling_rate, l_freq, h_freq, None,
filter_length, 0.5, 0.5, phase='zero-double', fir_window='hann',
fir_design='firwin2')
# detecting eog blinks and generating event file
logger.info('Now detecting blinks and generating corresponding events')
temp = filteog - np.mean(filteog)
n_samples_start = int(sampling_rate * tstart)
if np.abs(np.max(temp)) > np.abs(np.min(temp)):
eog_events, _ = peak_finder(filteog[n_samples_start:],
thresh, extrema=1)
else:
eog_events, _ = peak_finder(filteog[n_samples_start:],
thresh, extrema=-1)
eog_events += n_samples_start
n_events = len(eog_events)
logger.info("Number of EOG events detected : %d" % n_events)
eog_events = np.array([eog_events + first_samp,
np.zeros(n_events, int),
event_id * np.ones(n_events, int)]).T
return eog_events
def _get_eog_channel_index(ch_name, inst):
"""Get EOG channel index."""
if isinstance(ch_name, str):
# Check if multiple EOG Channels
if ',' in ch_name:
ch_name = ch_name.split(',')
else:
ch_name = [ch_name]
eog_inds = pick_channels(inst.ch_names, include=ch_name)
if len(eog_inds) == 0:
raise ValueError('%s not in channel list' % ch_name)
else:
logger.info('Using channel %s as EOG channel%s' % (
" and ".join(ch_name), _pl(eog_inds)))
elif ch_name is None:
eog_inds = pick_types(inst.info, meg=False, eeg=False, stim=False,
eog=True, ecg=False, emg=False, ref_meg=False,
exclude='bads')
if len(eog_inds) == 0:
logger.info('No EOG channels found')
logger.info('Trying with EEG 061 and EEG 062')
eog_inds = pick_channels(inst.ch_names,
include=['EEG 061', 'EEG 062'])
if len(eog_inds) != 2:
raise RuntimeError('EEG 61 or EEG 62 channel not found !!')
else:
raise ValueError('Could not find EOG channel.')
return eog_inds
@verbose
def create_eog_epochs(raw, ch_name=None, event_id=998, picks=None, tmin=-0.5,
tmax=0.5, l_freq=1, h_freq=10, reject=None, flat=None,
baseline=None, preload=True, reject_by_annotation=True,
thresh=None, decim=1, verbose=None):
"""Conveniently generate epochs around EOG artifact events.
%(create_eog_epochs)s
Parameters
----------
raw : instance of Raw
The raw data.
ch_name : str
The name of the channel to use for EOG peak detection.
The argument is mandatory if the dataset contains no EOG channels.
event_id : int
The index to assign to found events.
%(picks_all)s
tmin : float
Start time before event.
tmax : float
End time after event.
l_freq : float
Low pass frequency to apply to the EOG channel while finding events.
h_freq : float
High pass frequency to apply to the EOG channel while finding events.
reject : dict | None
Rejection parameters based on peak-to-peak amplitude.
Valid keys are 'grad' | 'mag' | 'eeg' | 'eog' | 'ecg'.
If reject is None then no rejection is done. Example::
reject = dict(grad=4000e-13, # T / m (gradiometers)
mag=4e-12, # T (magnetometers)
eeg=40e-6, # V (EEG channels)
eog=250e-6 # V (EOG channels)
)
flat : dict | None
Rejection parameters based on flatness of signal.
Valid keys are 'grad' | 'mag' | 'eeg' | 'eog' | 'ecg', and values
are floats that set the minimum acceptable peak-to-peak amplitude.
If flat is None then no rejection is done.
baseline : tuple or list of length 2, or None
The time interval to apply rescaling / baseline correction.
If None do not apply it. If baseline is (a, b)
the interval is between "a (s)" and "b (s)".
If a is None the beginning of the data is used
and if b is None then b is set to the end of the interval.
If baseline is equal to (None, None) all the time
interval is used. If None, no correction is applied.
preload : bool
Preload epochs or not.
%(reject_by_annotation_epochs)s
.. versionadded:: 0.14.0
thresh : float
Threshold to trigger EOG event.
%(decim)s
.. versionadded:: 0.21.0
%(verbose)s
Returns
-------
eog_epochs : instance of Epochs
Data epoched around EOG events.
See Also
--------
find_eog_events
compute_proj_eog
Notes
-----
Filtering is only applied to the EOG channel while finding events.
The resulting ``eog_epochs`` will have no filtering applied (i.e., have
the same filter properties as the input ``raw`` instance).
"""
events = find_eog_events(raw, ch_name=ch_name, event_id=event_id,
l_freq=l_freq, h_freq=h_freq,
reject_by_annotation=reject_by_annotation,
thresh=thresh)
# create epochs around EOG events
eog_epochs = Epochs(raw, events=events, event_id=event_id, tmin=tmin,
tmax=tmax, proj=False, reject=reject, flat=flat,
picks=picks, baseline=baseline, preload=preload,
reject_by_annotation=reject_by_annotation,
decim=decim)
return eog_epochs
|
from datetime import datetime
import pytest
import pytz
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from homeassistant.components.recorder.models import (
Base,
Events,
RecorderRuns,
States,
process_timestamp,
process_timestamp_to_utc_isoformat,
)
from homeassistant.const import EVENT_STATE_CHANGED
import homeassistant.core as ha
from homeassistant.exceptions import InvalidEntityFormatError
from homeassistant.util import dt
import homeassistant.util.dt as dt_util
def test_from_event_to_db_event():
"""Test converting event to db event."""
event = ha.Event("test_event", {"some_data": 15})
assert event == Events.from_event(event).to_native()
def test_from_event_to_db_state():
"""Test converting event to db state."""
state = ha.State("sensor.temperature", "18")
event = ha.Event(
EVENT_STATE_CHANGED,
{"entity_id": "sensor.temperature", "old_state": None, "new_state": state},
context=state.context,
)
# We don't restore context unless we need it by joining the
# events table on the event_id for state_changed events
state.context = ha.Context(id=None)
assert state == States.from_event(event).to_native()
def test_from_event_to_delete_state():
"""Test converting deleting state event to db state."""
event = ha.Event(
EVENT_STATE_CHANGED,
{
"entity_id": "sensor.temperature",
"old_state": ha.State("sensor.temperature", "18"),
"new_state": None,
},
)
db_state = States.from_event(event)
assert db_state.entity_id == "sensor.temperature"
assert db_state.domain == "sensor"
assert db_state.state == ""
assert db_state.last_changed == event.time_fired
assert db_state.last_updated == event.time_fired
def test_entity_ids():
"""Test if entity ids helper method works."""
engine = create_engine("sqlite://")
Base.metadata.create_all(engine)
session_factory = sessionmaker(bind=engine)
session = scoped_session(session_factory)
session.query(Events).delete()
session.query(States).delete()
session.query(RecorderRuns).delete()
run = RecorderRuns(
start=datetime(2016, 7, 9, 11, 0, 0, tzinfo=dt.UTC),
end=datetime(2016, 7, 9, 23, 0, 0, tzinfo=dt.UTC),
closed_incorrect=False,
created=datetime(2016, 7, 9, 11, 0, 0, tzinfo=dt.UTC),
)
session.add(run)
session.commit()
before_run = datetime(2016, 7, 9, 8, 0, 0, tzinfo=dt.UTC)
in_run = datetime(2016, 7, 9, 13, 0, 0, tzinfo=dt.UTC)
in_run2 = datetime(2016, 7, 9, 15, 0, 0, tzinfo=dt.UTC)
in_run3 = datetime(2016, 7, 9, 18, 0, 0, tzinfo=dt.UTC)
after_run = datetime(2016, 7, 9, 23, 30, 0, tzinfo=dt.UTC)
assert run.to_native() == run
assert run.entity_ids() == []
session.add(
States(
entity_id="sensor.temperature",
state="20",
last_changed=before_run,
last_updated=before_run,
)
)
session.add(
States(
entity_id="sensor.sound",
state="10",
last_changed=after_run,
last_updated=after_run,
)
)
session.add(
States(
entity_id="sensor.humidity",
state="76",
last_changed=in_run,
last_updated=in_run,
)
)
session.add(
States(
entity_id="sensor.lux",
state="5",
last_changed=in_run3,
last_updated=in_run3,
)
)
assert sorted(run.entity_ids()) == ["sensor.humidity", "sensor.lux"]
assert run.entity_ids(in_run2) == ["sensor.humidity"]
def test_states_from_native_invalid_entity_id():
"""Test loading a state from an invalid entity ID."""
state = States()
state.entity_id = "test.invalid__id"
state.attributes = "{}"
with pytest.raises(InvalidEntityFormatError):
state = state.to_native()
state = state.to_native(validate_entity_id=False)
assert state.entity_id == "test.invalid__id"
async def test_process_timestamp():
"""Test processing time stamp to UTC."""
datetime_with_tzinfo = datetime(2016, 7, 9, 11, 0, 0, tzinfo=dt.UTC)
datetime_without_tzinfo = datetime(2016, 7, 9, 11, 0, 0)
est = pytz.timezone("US/Eastern")
datetime_est_timezone = datetime(2016, 7, 9, 11, 0, 0, tzinfo=est)
nst = pytz.timezone("Canada/Newfoundland")
datetime_nst_timezone = datetime(2016, 7, 9, 11, 0, 0, tzinfo=nst)
hst = pytz.timezone("US/Hawaii")
datetime_hst_timezone = datetime(2016, 7, 9, 11, 0, 0, tzinfo=hst)
assert process_timestamp(datetime_with_tzinfo) == datetime(
2016, 7, 9, 11, 0, 0, tzinfo=dt.UTC
)
assert process_timestamp(datetime_without_tzinfo) == datetime(
2016, 7, 9, 11, 0, 0, tzinfo=dt.UTC
)
assert process_timestamp(datetime_est_timezone) == datetime(
2016, 7, 9, 15, 56, tzinfo=dt.UTC
)
assert process_timestamp(datetime_nst_timezone) == datetime(
2016, 7, 9, 14, 31, tzinfo=dt.UTC
)
assert process_timestamp(datetime_hst_timezone) == datetime(
2016, 7, 9, 21, 31, tzinfo=dt.UTC
)
assert process_timestamp(None) is None
async def test_process_timestamp_to_utc_isoformat():
"""Test processing time stamp to UTC isoformat."""
datetime_with_tzinfo = datetime(2016, 7, 9, 11, 0, 0, tzinfo=dt.UTC)
datetime_without_tzinfo = datetime(2016, 7, 9, 11, 0, 0)
est = pytz.timezone("US/Eastern")
datetime_est_timezone = datetime(2016, 7, 9, 11, 0, 0, tzinfo=est)
est = pytz.timezone("US/Eastern")
datetime_est_timezone = datetime(2016, 7, 9, 11, 0, 0, tzinfo=est)
nst = pytz.timezone("Canada/Newfoundland")
datetime_nst_timezone = datetime(2016, 7, 9, 11, 0, 0, tzinfo=nst)
hst = pytz.timezone("US/Hawaii")
datetime_hst_timezone = datetime(2016, 7, 9, 11, 0, 0, tzinfo=hst)
assert (
process_timestamp_to_utc_isoformat(datetime_with_tzinfo)
== "2016-07-09T11:00:00+00:00"
)
assert (
process_timestamp_to_utc_isoformat(datetime_without_tzinfo)
== "2016-07-09T11:00:00+00:00"
)
assert (
process_timestamp_to_utc_isoformat(datetime_est_timezone)
== "2016-07-09T15:56:00+00:00"
)
assert (
process_timestamp_to_utc_isoformat(datetime_nst_timezone)
== "2016-07-09T14:31:00+00:00"
)
assert (
process_timestamp_to_utc_isoformat(datetime_hst_timezone)
== "2016-07-09T21:31:00+00:00"
)
assert process_timestamp_to_utc_isoformat(None) is None
async def test_event_to_db_model():
"""Test we can round trip Event conversion."""
event = ha.Event(
"state_changed", {"some": "attr"}, ha.EventOrigin.local, dt_util.utcnow()
)
native = Events.from_event(event).to_native()
assert native == event
native = Events.from_event(event, event_data="{}").to_native()
event.data = {}
assert native == event
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
from absl.flags import _helpers
from absl.flags.tests import module_bar
from absl.flags.tests import module_foo
from absl.testing import absltest
class FlagSuggestionTest(absltest.TestCase):
def setUp(self):
self.longopts = [
'fsplit-ivs-in-unroller=',
'fsplit-wide-types=',
'fstack-protector=',
'fstack-protector-all=',
'fstrict-aliasing=',
'fstrict-overflow=',
'fthread-jumps=',
'ftracer',
'ftree-bit-ccp',
'ftree-builtin-call-dce',
'ftree-ccp',
'ftree-ch']
def test_damerau_levenshtein_id(self):
self.assertEqual(0, _helpers._damerau_levenshtein('asdf', 'asdf'))
def test_damerau_levenshtein_empty(self):
self.assertEqual(5, _helpers._damerau_levenshtein('', 'kites'))
self.assertEqual(6, _helpers._damerau_levenshtein('kitten', ''))
def test_damerau_levenshtein_commutative(self):
self.assertEqual(2, _helpers._damerau_levenshtein('kitten', 'kites'))
self.assertEqual(2, _helpers._damerau_levenshtein('kites', 'kitten'))
def test_damerau_levenshtein_transposition(self):
self.assertEqual(1, _helpers._damerau_levenshtein('kitten', 'ktiten'))
def test_mispelled_suggestions(self):
suggestions = _helpers.get_flag_suggestions('fstack_protector_all',
self.longopts)
self.assertEqual(['fstack-protector-all'], suggestions)
def test_ambiguous_prefix_suggestion(self):
suggestions = _helpers.get_flag_suggestions('fstack', self.longopts)
self.assertEqual(['fstack-protector', 'fstack-protector-all'], suggestions)
def test_misspelled_ambiguous_prefix_suggestion(self):
suggestions = _helpers.get_flag_suggestions('stack', self.longopts)
self.assertEqual(['fstack-protector', 'fstack-protector-all'], suggestions)
def test_crazy_suggestion(self):
suggestions = _helpers.get_flag_suggestions('asdfasdgasdfa', self.longopts)
self.assertEqual([], suggestions)
def test_suggestions_are_sorted(self):
sorted_flags = sorted(['aab', 'aac', 'aad'])
misspelt_flag = 'aaa'
suggestions = _helpers.get_flag_suggestions(misspelt_flag,
reversed(sorted_flags))
self.assertEqual(sorted_flags, suggestions)
class GetCallingModuleTest(absltest.TestCase):
"""Test whether we correctly determine the module which defines the flag."""
def test_get_calling_module(self):
self.assertEqual(_helpers.get_calling_module(), sys.argv[0])
self.assertEqual(module_foo.get_module_name(),
'absl.flags.tests.module_foo')
self.assertEqual(module_bar.get_module_name(),
'absl.flags.tests.module_bar')
# We execute the following exec statements for their side-effect
# (i.e., not raising an error). They emphasize the case that not
# all code resides in one of the imported modules: Python is a
# really dynamic language, where we can dynamically construct some
# code and execute it.
code = ('from absl.flags import _helpers\n'
'module_name = _helpers.get_calling_module()')
exec(code) # pylint: disable=exec-used
# Next two exec statements executes code with a global environment
# that is different from the global environment of any imported
# module.
exec(code, {}) # pylint: disable=exec-used
# vars(self) returns a dictionary corresponding to the symbol
# table of the self object. dict(...) makes a distinct copy of
# this dictionary, such that any new symbol definition by the
# exec-ed code (e.g., import flags, module_name = ...) does not
# affect the symbol table of self.
exec(code, dict(vars(self))) # pylint: disable=exec-used
# Next test is actually more involved: it checks not only that
# get_calling_module does not crash inside exec code, it also checks
# that it returns the expected value: the code executed via exec
# code is treated as being executed by the current module. We
# check it twice: first time by executing exec from the main
# module, second time by executing it from module_bar.
global_dict = {}
exec(code, global_dict) # pylint: disable=exec-used
self.assertEqual(global_dict['module_name'],
sys.argv[0])
global_dict = {}
module_bar.execute_code(code, global_dict)
self.assertEqual(global_dict['module_name'],
'absl.flags.tests.module_bar')
def test_get_calling_module_with_iteritems_error(self):
# This test checks that get_calling_module is using
# sys.modules.items(), instead of .iteritems().
orig_sys_modules = sys.modules
# Mock sys.modules: simulates error produced by importing a module
# in paralel with our iteration over sys.modules.iteritems().
class SysModulesMock(dict):
def __init__(self, original_content):
dict.__init__(self, original_content)
def iteritems(self):
# Any dictionary method is fine, but not .iteritems().
raise RuntimeError('dictionary changed size during iteration')
sys.modules = SysModulesMock(orig_sys_modules)
try:
# _get_calling_module should still work as expected:
self.assertEqual(_helpers.get_calling_module(), sys.argv[0])
self.assertEqual(module_foo.get_module_name(),
'absl.flags.tests.module_foo')
finally:
sys.modules = orig_sys_modules
class IsBytesOrString(absltest.TestCase):
def test_bytes(self):
self.assertTrue(_helpers.is_bytes_or_string(b'bytes'))
def test_str(self):
self.assertTrue(_helpers.is_bytes_or_string('str'))
def test_unicode(self):
self.assertTrue(_helpers.is_bytes_or_string(u'unicode'))
def test_list(self):
self.assertFalse(_helpers.is_bytes_or_string(['str']))
if __name__ == '__main__':
absltest.main()
|
import logging
import typing
import uuid
import voluptuous as vol
from homeassistant.const import CONF_NAME
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import collection
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.storage import Store
from homeassistant.loader import bind_hass
import homeassistant.util.dt as dt_util
from .const import DEVICE_ID, DOMAIN, EVENT_TAG_SCANNED, TAG_ID
_LOGGER = logging.getLogger(__name__)
LAST_SCANNED = "last_scanned"
STORAGE_KEY = DOMAIN
STORAGE_VERSION = 1
TAGS = "tags"
CREATE_FIELDS = {
vol.Optional(TAG_ID): cv.string,
vol.Optional(CONF_NAME): vol.All(str, vol.Length(min=1)),
vol.Optional("description"): cv.string,
vol.Optional(LAST_SCANNED): cv.datetime,
}
UPDATE_FIELDS = {
vol.Optional(CONF_NAME): vol.All(str, vol.Length(min=1)),
vol.Optional("description"): cv.string,
vol.Optional(LAST_SCANNED): cv.datetime,
}
class TagIDExistsError(HomeAssistantError):
"""Raised when an item is not found."""
def __init__(self, item_id: str):
"""Initialize tag id exists error."""
super().__init__(f"Tag with id: {item_id} already exists.")
self.item_id = item_id
class TagIDManager(collection.IDManager):
"""ID manager for tags."""
def generate_id(self, suggestion: str) -> str:
"""Generate an ID."""
if self.has_id(suggestion):
raise TagIDExistsError(suggestion)
return suggestion
class TagStorageCollection(collection.StorageCollection):
"""Tag collection stored in storage."""
CREATE_SCHEMA = vol.Schema(CREATE_FIELDS)
UPDATE_SCHEMA = vol.Schema(UPDATE_FIELDS)
async def _process_create_data(self, data: typing.Dict) -> typing.Dict:
"""Validate the config is valid."""
data = self.CREATE_SCHEMA(data)
if not data[TAG_ID]:
data[TAG_ID] = str(uuid.uuid4())
# make last_scanned JSON serializeable
if LAST_SCANNED in data:
data[LAST_SCANNED] = data[LAST_SCANNED].isoformat()
return data
@callback
def _get_suggested_id(self, info: typing.Dict) -> str:
"""Suggest an ID based on the config."""
return info[TAG_ID]
async def _update_data(self, data: dict, update_data: typing.Dict) -> typing.Dict:
"""Return a new updated data object."""
data = {**data, **self.UPDATE_SCHEMA(update_data)}
# make last_scanned JSON serializeable
if LAST_SCANNED in update_data:
data[LAST_SCANNED] = data[LAST_SCANNED].isoformat()
return data
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the Tag component."""
hass.data[DOMAIN] = {}
id_manager = TagIDManager()
hass.data[DOMAIN][TAGS] = storage_collection = TagStorageCollection(
Store(hass, STORAGE_VERSION, STORAGE_KEY),
logging.getLogger(f"{__name__}.storage_collection"),
id_manager,
)
await storage_collection.async_load()
collection.StorageCollectionWebsocket(
storage_collection, DOMAIN, DOMAIN, CREATE_FIELDS, UPDATE_FIELDS
).async_setup(hass)
return True
@bind_hass
async def async_scan_tag(hass, tag_id, device_id, context=None):
"""Handle when a tag is scanned."""
if DOMAIN not in hass.config.components:
raise HomeAssistantError("tag component has not been set up.")
hass.bus.async_fire(
EVENT_TAG_SCANNED, {TAG_ID: tag_id, DEVICE_ID: device_id}, context=context
)
helper = hass.data[DOMAIN][TAGS]
if tag_id in helper.data:
await helper.async_update_item(tag_id, {LAST_SCANNED: dt_util.utcnow()})
else:
await helper.async_create_item({TAG_ID: tag_id, LAST_SCANNED: dt_util.utcnow()})
_LOGGER.debug("Tag: %s scanned by device: %s", tag_id, device_id)
|
import argparse
import logging
import os.path
import time
from collections import defaultdict
from inotify.adapters import Inotify
from inotify.constants import IN_MODIFY
from inotify.constants import IN_MOVED_TO
from paasta_tools import firewall
from paasta_tools.cli.utils import get_instance_config
from paasta_tools.utils import DEFAULT_SOA_DIR
from paasta_tools.utils import load_system_paasta_config
from paasta_tools.utils import TimeoutError
log = logging.getLogger(__name__)
DEFAULT_UPDATE_SECS = 5
def parse_args(argv):
parser = argparse.ArgumentParser(
description="Monitor synapse changes and update service firewall rules"
)
parser.add_argument(
"-d",
"--soa-dir",
dest="soa_dir",
metavar="soa_dir",
default=DEFAULT_SOA_DIR,
help="define a different soa config directory (default %(default)s)",
)
parser.add_argument(
"--synapse-service-dir",
dest="synapse_service_dir",
default=firewall.DEFAULT_SYNAPSE_SERVICE_DIR,
help="Path to synapse service dir (default %(default)s)",
)
parser.add_argument("-v", "--verbose", dest="verbose", action="store_true")
subparsers = parser.add_subparsers(
help="mode to run firewall update in", dest="mode"
)
subparsers.required = True
daemon_parser = subparsers.add_parser(
"daemon",
description=(
"Run a daemon which watches updates to synapse backends and updates iptables rules."
),
)
daemon_parser.add_argument(
"-u",
"--update-secs",
dest="update_secs",
default=DEFAULT_UPDATE_SECS,
type=int,
help="Poll for new containers every N secs (default %(default)s)",
)
subparsers.add_parser(
"cron",
description=(
"Do a one-time update of iptables rules to match the current running services."
),
)
args = parser.parse_args(argv)
return args
def setup_logging(verbose):
level = logging.DEBUG if verbose else logging.WARNING
logging.basicConfig(level=level)
def run_daemon(args):
# Main loop waiting on inotify file events
inotify = Inotify(block_duration_s=1) # event_gen blocks for 1 second
inotify.add_watch(args.synapse_service_dir.encode(), IN_MOVED_TO | IN_MODIFY)
services_by_dependencies_time = 0
for event in inotify.event_gen(): # blocks for only up to 1 second at a time
if services_by_dependencies_time + args.update_secs < time.time():
services_by_dependencies = smartstack_dependencies_of_running_firewalled_services(
soa_dir=args.soa_dir
)
services_by_dependencies_time = time.time()
if event is None:
continue
process_inotify_event(
event, services_by_dependencies, args.soa_dir, args.synapse_service_dir
)
def run_cron(args):
with firewall.firewall_flock():
firewall.general_update(args.soa_dir, args.synapse_service_dir)
def process_inotify_event(
event, services_by_dependencies, soa_dir, synapse_service_dir
):
filename = event[3].decode()
log.debug(f"process_inotify_event on {filename}")
service_instance, suffix = os.path.splitext(filename)
if suffix != ".json":
return
services_to_update = services_by_dependencies.get(service_instance, ())
if not services_to_update:
return
# filter active_service_groups() down to just the names in services_to_update
service_groups = {
service_group: macs
for service_group, macs in firewall.active_service_groups().items()
if service_group in services_to_update
}
try:
with firewall.firewall_flock():
firewall.ensure_service_chains(service_groups, soa_dir, synapse_service_dir)
for service_to_update in services_to_update:
log.debug(f"Updated {service_to_update}")
except TimeoutError as e:
log.error(
"Unable to update firewalls for {} because time-out obtaining flock: {}".format(
service_groups.keys(), e
)
)
def smartstack_dependencies_of_running_firewalled_services(soa_dir=DEFAULT_SOA_DIR):
dependencies_to_services = defaultdict(set)
for service, instance, _, _ in firewall.services_running_here():
config = get_instance_config(
service=service,
instance=instance,
cluster=load_system_paasta_config().get_cluster(),
load_deployments=False,
soa_dir=soa_dir,
)
inbound_firewall = config.get_inbound_firewall()
outbound_firewall = config.get_outbound_firewall()
if not inbound_firewall and not outbound_firewall:
continue
dependencies = config.get_dependencies() or ()
smartstack_dependencies = [
d["smartstack"] for d in dependencies if d.get("smartstack")
]
for smartstack_dependency in smartstack_dependencies:
# TODO: filter down to only services that have no proxy_port
dependencies_to_services[smartstack_dependency].add(
firewall.ServiceGroup(service, instance)
)
return dependencies_to_services
def main(argv=None):
args = parse_args(argv)
setup_logging(args.verbose)
{"daemon": run_daemon, "cron": run_cron}[args.mode](args)
|
from copy import deepcopy
from tempfile import NamedTemporaryFile
from cerberus import Validator
from cerberus.tests import assert_normalized, assert_success
def test_normalized():
schema = {'amount': {'coerce': int}}
document = {'amount': '2'}
expected = {'amount': 2}
assert_normalized(document, expected, schema)
def test_normalize_complex_objects():
# https://github.com/pyeve/cerberus/issues/147
schema = {'revision': {'coerce': int}}
document = {'revision': '5', 'file': NamedTemporaryFile(mode='w+')}
document['file'].write(r'foobar')
document['file'].seek(0)
normalized = Validator(schema, allow_unknown=True).normalized(document)
assert normalized['revision'] == 5
assert normalized['file'].read() == 'foobar'
document['file'].close()
normalized['file'].close()
def test_normalize_does_not_change_input_document():
# https://github.com/pyeve/cerberus/issues/147
schema = {'thing': {'type': 'dict', 'schema': {'amount': {'coerce': int}}}}
ref_obj = '2'
document = {'thing': {'amount': ref_obj}}
normalized = Validator(schema).normalized(document)
assert document is not normalized
assert normalized['thing']['amount'] == 2
assert document['thing']['amount'] is ref_obj
def test_normalize_tuples():
# https://github.com/pyeve/cerberus/issues/271
schema = {
'my_field': {
'type': 'tuple',
'itemsrules': {'type': ('string', 'number', 'dict')},
}
}
document = {'my_field': ('foo', 'bar', 42, 'albert', 'kandinsky', {'items': 23})}
assert_success(document, schema)
normalized = Validator(schema).normalized(document)
assert normalized['my_field'] == (
'foo',
'bar',
42,
'albert',
'kandinsky',
{'items': 23},
)
def test_purge_readonly():
schema = {
'description': {'type': 'string', 'maxlength': 500},
'last_updated': {'readonly': True},
}
validator = Validator(schema=schema, purge_readonly=True)
document = {'description': 'it is a thing'}
expected = deepcopy(document)
document['last_updated'] = 'future'
assert_normalized(document, expected, validator=validator)
def test_purge_unknown():
validator = Validator(purge_unknown=True)
schema = {'foo': {'type': 'string'}}
document = {'bar': 'foo'}
expected = {}
assert_normalized(document, expected, schema, validator)
def test_purge_unknown_in_subschema():
schema = {
'foo': {
'type': 'dict',
'schema': {'foo': {'type': 'string'}},
'purge_unknown': True,
}
}
document = {'foo': {'bar': ''}}
expected = {'foo': {}}
assert_normalized(document, expected, schema)
|
import tests
from pyVim import connect
from pyVmomi import vim
class ContainerViewTests(tests.VCRTestBase):
@tests.VCRTestBase.my_vcr.use_cassette('basic_container_view.yaml',
cassette_library_dir=tests.fixtures_path,
record_mode='once')
def test_basic_container_view(self):
# see: http://python3porting.com/noconv.html
si = connect.SmartConnect(host='vcsa',
user='my_user',
pwd='my_password')
content = si.RetrieveContent()
datacenter_object_view = content.viewManager.CreateContainerView(
content.rootFolder, [vim.Datacenter], True)
for datacenter in datacenter_object_view.view:
datastores = datacenter.datastore
# NOTE (hartsocks): the object handle here is a managed object
# reference, until we ask for more details, no other detail is
# transmitted. Our sample fixture is quite small.
self.assertEqual(1, len(datastores))
datacenter_object_view.Destroy()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.python.ops import control_flow_ops
from datasets import dataset_factory
from deployment import model_deploy
from nets import nets_factory
from preprocessing import preprocessing_factory
from optimizer.yellowfin import YFOptimizer
slim = tf.contrib.slim
tf.app.flags.DEFINE_string(
'master', '', 'The address of the TensorFlow master to use.')
tf.app.flags.DEFINE_string(
'train_dir', '/tmp/tfmodel/',
'Directory where checkpoints and event logs are written to.')
tf.app.flags.DEFINE_integer('num_clones', 1,
'Number of model clones to deploy.')
tf.app.flags.DEFINE_boolean('clone_on_cpu', False,
'Use CPUs to deploy clones.')
tf.app.flags.DEFINE_integer('worker_replicas', 1, 'Number of worker replicas.')
tf.app.flags.DEFINE_integer(
'num_ps_tasks', 0,
'The number of parameter servers. If the value is 0, then the parameters '
'are handled locally by the worker.')
tf.app.flags.DEFINE_integer(
'num_readers', 4,
'The number of parallel readers that read data from the dataset.')
tf.app.flags.DEFINE_integer(
'num_preprocessing_threads', 4,
'The number of threads used to create the batches.')
tf.app.flags.DEFINE_integer(
'log_every_n_steps', 10,
'The frequency with which logs are print.')
tf.app.flags.DEFINE_integer(
'save_summaries_secs', 600,
'The frequency with which summaries are saved, in seconds.')
tf.app.flags.DEFINE_integer(
'save_interval_secs', 600,
'The frequency with which the model is saved, in seconds.')
tf.app.flags.DEFINE_integer(
'task', 0, 'Task id of the replica running the training.')
######################
# Optimization Flags #
######################
tf.app.flags.DEFINE_float(
'weight_decay', 0.00004, 'The weight decay on the model weights.')
tf.app.flags.DEFINE_string(
'optimizer', 'rmsprop',
'The name of the optimizer, one of "adadelta", "adagrad", "adam",'
'"ftrl", "momentum", "sgd" or "rmsprop".')
tf.app.flags.DEFINE_float(
'adadelta_rho', 0.95,
'The decay rate for adadelta.')
tf.app.flags.DEFINE_float(
'adagrad_initial_accumulator_value', 0.1,
'Starting value for the AdaGrad accumulators.')
tf.app.flags.DEFINE_float(
'adam_beta1', 0.9,
'The exponential decay rate for the 1st moment estimates.')
tf.app.flags.DEFINE_float(
'adam_beta2', 0.999,
'The exponential decay rate for the 2nd moment estimates.')
tf.app.flags.DEFINE_float('opt_epsilon', 1.0, 'Epsilon term for the optimizer.')
tf.app.flags.DEFINE_float('ftrl_learning_rate_power', -0.5,
'The learning rate power.')
tf.app.flags.DEFINE_float(
'ftrl_initial_accumulator_value', 0.1,
'Starting value for the FTRL accumulators.')
tf.app.flags.DEFINE_float(
'ftrl_l1', 0.0, 'The FTRL l1 regularization strength.')
tf.app.flags.DEFINE_float(
'ftrl_l2', 0.0, 'The FTRL l2 regularization strength.')
tf.app.flags.DEFINE_float(
'momentum', 0.9,
'The momentum for the MomentumOptimizer and RMSPropOptimizer.')
tf.app.flags.DEFINE_float('rmsprop_momentum', 0.9, 'Momentum.')
tf.app.flags.DEFINE_float('rmsprop_decay', 0.9, 'Decay term for RMSProp.')
#######################
# Learning Rate Flags #
#######################
tf.app.flags.DEFINE_string(
'learning_rate_decay_type',
'exponential',
'Specifies how the learning rate is decayed. One of "fixed", "exponential",'
' or "polynomial"')
tf.app.flags.DEFINE_float('learning_rate', 0.01, 'Initial learning rate.')
tf.app.flags.DEFINE_float(
'end_learning_rate', 0.0001,
'The minimal end learning rate used by a polynomial decay learning rate.')
tf.app.flags.DEFINE_float(
'label_smoothing', 0.0, 'The amount of label smoothing.')
tf.app.flags.DEFINE_float(
'learning_rate_decay_factor', 0.94, 'Learning rate decay factor.')
tf.app.flags.DEFINE_float(
'num_epochs_per_decay', 2.0,
'Number of epochs after which learning rate decays.')
tf.app.flags.DEFINE_bool(
'sync_replicas', False,
'Whether or not to synchronize the replicas during training.')
tf.app.flags.DEFINE_integer(
'replicas_to_aggregate', 1,
'The Number of gradients to collect before updating params.')
tf.app.flags.DEFINE_float(
'moving_average_decay', None,
'The decay to use for the moving average.'
'If left as None, then moving averages are not used.')
#######################
# Dataset Flags #
#######################
tf.app.flags.DEFINE_string(
'dataset_name', 'imagenet', 'The name of the dataset to load.')
tf.app.flags.DEFINE_string(
'dataset_split_name', 'train', 'The name of the train/test split.')
tf.app.flags.DEFINE_string(
'dataset_dir', None, 'The directory where the dataset files are stored.')
tf.app.flags.DEFINE_integer(
'labels_offset', 0,
'An offset for the labels in the dataset. This flag is primarily used to '
'evaluate the VGG and ResNet architectures which do not use a background '
'class for the ImageNet dataset.')
tf.app.flags.DEFINE_string(
'model_name', 'inception_v3', 'The name of the architecture to train.')
tf.app.flags.DEFINE_string(
'preprocessing_name', None, 'The name of the preprocessing to use. If left '
'as `None`, then the model_name flag is used.')
tf.app.flags.DEFINE_integer(
'batch_size', 32, 'The number of samples in each batch.')
tf.app.flags.DEFINE_integer(
'train_image_size', None, 'Train image size')
tf.app.flags.DEFINE_integer('max_number_of_steps', None,
'The maximum number of training steps.')
tf.app.flags.DEFINE_float('width_multiplier', 1.0,
'Width Multiplier, for MobileNet only.')
#####################
# Fine-Tuning Flags #
#####################
tf.app.flags.DEFINE_string(
'checkpoint_path', None,
'The path to a checkpoint from which to fine-tune.')
tf.app.flags.DEFINE_string(
'checkpoint_exclude_scopes', None,
'Comma-separated list of scopes of variables to exclude when restoring '
'from a checkpoint.')
tf.app.flags.DEFINE_string(
'trainable_scopes', None,
'Comma-separated list of scopes to filter the set of variables to train.'
'By default, None would train all the variables.')
tf.app.flags.DEFINE_boolean(
'ignore_missing_vars', False,
'When restoring a checkpoint would ignore missing variables.')
FLAGS = tf.app.flags.FLAGS
def _configure_learning_rate(num_samples_per_epoch, global_step):
"""Configures the learning rate.
Args:
num_samples_per_epoch: The number of samples in each epoch of training.
global_step: The global_step tensor.
Returns:
A `Tensor` representing the learning rate.
Raises:
ValueError: if
"""
decay_steps = int(num_samples_per_epoch / FLAGS.batch_size *
FLAGS.num_epochs_per_decay)
if FLAGS.sync_replicas:
decay_steps /= FLAGS.replicas_to_aggregate
if FLAGS.learning_rate_decay_type == 'exponential':
return tf.train.exponential_decay(FLAGS.learning_rate,
global_step,
decay_steps,
FLAGS.learning_rate_decay_factor,
staircase=True,
name='exponential_decay_learning_rate')
elif FLAGS.learning_rate_decay_type == 'fixed':
return tf.constant(FLAGS.learning_rate, name='fixed_learning_rate')
elif FLAGS.learning_rate_decay_type == 'polynomial':
return tf.train.polynomial_decay(FLAGS.learning_rate,
global_step,
decay_steps,
FLAGS.end_learning_rate,
power=1.0,
cycle=False,
name='polynomial_decay_learning_rate')
else:
raise ValueError('learning_rate_decay_type [%s] was not recognized',
FLAGS.learning_rate_decay_type)
def _configure_optimizer(learning_rate):
"""Configures the optimizer used for training.
Args:
learning_rate: A scalar or `Tensor` learning rate.
Returns:
An instance of an optimizer.
Raises:
ValueError: if FLAGS.optimizer is not recognized.
"""
if FLAGS.optimizer == 'adadelta':
optimizer = tf.train.AdadeltaOptimizer(
learning_rate,
rho=FLAGS.adadelta_rho,
epsilon=FLAGS.opt_epsilon)
elif FLAGS.optimizer == 'adagrad':
optimizer = tf.train.AdagradOptimizer(
learning_rate,
initial_accumulator_value=FLAGS.adagrad_initial_accumulator_value)
elif FLAGS.optimizer == 'adam':
optimizer = tf.train.AdamOptimizer(
learning_rate,
beta1=FLAGS.adam_beta1,
beta2=FLAGS.adam_beta2,
epsilon=FLAGS.opt_epsilon)
elif FLAGS.optimizer == 'ftrl':
optimizer = tf.train.FtrlOptimizer(
learning_rate,
learning_rate_power=FLAGS.ftrl_learning_rate_power,
initial_accumulator_value=FLAGS.ftrl_initial_accumulator_value,
l1_regularization_strength=FLAGS.ftrl_l1,
l2_regularization_strength=FLAGS.ftrl_l2)
elif FLAGS.optimizer == 'momentum':
optimizer = tf.train.MomentumOptimizer(
learning_rate,
momentum=FLAGS.momentum,
name='Momentum')
elif FLAGS.optimizer == 'rmsprop':
optimizer = tf.train.RMSPropOptimizer(
learning_rate,
decay=FLAGS.rmsprop_decay,
momentum=FLAGS.rmsprop_momentum,
epsilon=FLAGS.opt_epsilon)
elif FLAGS.optimizer == 'sgd':
optimizer = tf.train.GradientDescentOptimizer(learning_rate)
elif FLAGS.optimizer == 'yellowfin':
optimizer = YFOptimizer(lr=1.0, mu=0.0)
else:
raise ValueError('Optimizer [%s] was not recognized', FLAGS.optimizer)
return optimizer
def _add_variables_summaries(learning_rate):
summaries = []
for variable in slim.get_model_variables():
summaries.append(tf.summary.histogram(variable.op.name, variable))
summaries.append(tf.summary.scalar('training/Learning Rate', learning_rate))
return summaries
def _get_init_fn():
"""Returns a function run by the chief worker to warm-start the training.
Note that the init_fn is only run when initializing the model during the very
first global step.
Returns:
An init function run by the supervisor.
"""
if FLAGS.checkpoint_path is None:
return None
# Warn the user if a checkpoint exists in the train_dir. Then we'll be
# ignoring the checkpoint anyway.
if tf.train.latest_checkpoint(FLAGS.train_dir):
tf.logging.info(
'Ignoring --checkpoint_path because a checkpoint already exists in %s'
% FLAGS.train_dir)
return None
exclusions = []
if FLAGS.checkpoint_exclude_scopes:
exclusions = [scope.strip()
for scope in FLAGS.checkpoint_exclude_scopes.split(',')]
# TODO(sguada) variables.filter_variables()
variables_to_restore = []
for var in slim.get_model_variables():
excluded = False
for exclusion in exclusions:
if var.op.name.startswith(exclusion):
excluded = True
break
if not excluded:
variables_to_restore.append(var)
if tf.gfile.IsDirectory(FLAGS.checkpoint_path):
checkpoint_path = tf.train.latest_checkpoint(FLAGS.checkpoint_path)
else:
checkpoint_path = FLAGS.checkpoint_path
tf.logging.info('Fine-tuning from %s' % checkpoint_path)
return slim.assign_from_checkpoint_fn(
checkpoint_path,
variables_to_restore,
ignore_missing_vars=FLAGS.ignore_missing_vars)
def _get_variables_to_train():
"""Returns a list of variables to train.
Returns:
A list of variables to train by the optimizer.
"""
if FLAGS.trainable_scopes is None:
return tf.trainable_variables()
else:
scopes = [scope.strip() for scope in FLAGS.trainable_scopes.split(',')]
variables_to_train = []
for scope in scopes:
variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope)
variables_to_train.extend(variables)
return variables_to_train
def main(_):
if not FLAGS.dataset_dir:
raise ValueError('You must supply the dataset directory with --dataset_dir')
tf.logging.set_verbosity(tf.logging.INFO)
with tf.Graph().as_default():
#######################
# Config model_deploy #
#######################
deploy_config = model_deploy.DeploymentConfig(
num_clones=FLAGS.num_clones,
clone_on_cpu=FLAGS.clone_on_cpu,
replica_id=FLAGS.task,
num_replicas=FLAGS.worker_replicas,
num_ps_tasks=FLAGS.num_ps_tasks)
# Create global_step
with tf.device(deploy_config.variables_device()):
global_step = slim.create_global_step()
######################
# Select the dataset #
######################
dataset = dataset_factory.get_dataset(
FLAGS.dataset_name, FLAGS.dataset_split_name, FLAGS.dataset_dir)
######################
# Select the network #
######################
network_fn = nets_factory.get_network_fn(
FLAGS.model_name,
num_classes=(dataset.num_classes - FLAGS.labels_offset),
weight_decay=FLAGS.weight_decay,
is_training=True,
width_multiplier=FLAGS.width_multiplier)
#####################################
# Select the preprocessing function #
#####################################
preprocessing_name = FLAGS.preprocessing_name or FLAGS.model_name
image_preprocessing_fn = preprocessing_factory.get_preprocessing(
preprocessing_name,
is_training=True)
##############################################################
# Create a dataset provider that loads data from the dataset #
##############################################################
with tf.device(deploy_config.inputs_device()):
provider = slim.dataset_data_provider.DatasetDataProvider(
dataset,
num_readers=FLAGS.num_readers,
common_queue_capacity=20 * FLAGS.batch_size,
common_queue_min=10 * FLAGS.batch_size)
[image, label] = provider.get(['image', 'label'])
label -= FLAGS.labels_offset
train_image_size = FLAGS.train_image_size or network_fn.default_image_size
image = image_preprocessing_fn(image, train_image_size, train_image_size)
images, labels = tf.train.batch(
[image, label],
batch_size=FLAGS.batch_size,
num_threads=FLAGS.num_preprocessing_threads,
capacity=5 * FLAGS.batch_size)
labels = slim.one_hot_encoding(
labels, dataset.num_classes - FLAGS.labels_offset)
batch_queue = slim.prefetch_queue.prefetch_queue(
[images, labels], capacity=2 * deploy_config.num_clones)
####################
# Define the model #
####################
def clone_fn(batch_queue):
"""Allows data parallelism by creating multiple clones of network_fn."""
images, labels = batch_queue.dequeue()
logits, end_points = network_fn(images)
#############################
# Specify the loss function #
#############################
if 'AuxLogits' in end_points:
tf.losses.softmax_cross_entropy(
logits=end_points['AuxLogits'], onehot_labels=labels,
label_smoothing=FLAGS.label_smoothing, weights=0.4, scope='aux_loss')
tf.losses.softmax_cross_entropy(
logits=logits, onehot_labels=labels,
label_smoothing=FLAGS.label_smoothing, weights=1.0)
return end_points
# Gather initial summaries.
summaries = set(tf.get_collection(tf.GraphKeys.SUMMARIES))
clones = model_deploy.create_clones(deploy_config, clone_fn, [batch_queue])
first_clone_scope = deploy_config.clone_scope(0)
# Gather update_ops from the first clone. These contain, for example,
# the updates for the batch_norm variables created by network_fn.
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS, first_clone_scope)
# Add summaries for end_points.
end_points = clones[0].outputs
for end_point in end_points:
x = end_points[end_point]
summaries.add(tf.summary.histogram('activations/' + end_point, x))
summaries.add(tf.summary.scalar('sparsity/' + end_point,
tf.nn.zero_fraction(x)))
# Add summaries for losses.
for loss in tf.get_collection(tf.GraphKeys.LOSSES, first_clone_scope):
summaries.add(tf.summary.scalar('losses/%s' % loss.op.name, loss))
# Add summaries for variables.
for variable in slim.get_model_variables():
summaries.add(tf.summary.histogram(variable.op.name, variable))
#################################
# Configure the moving averages #
#################################
if FLAGS.moving_average_decay:
moving_average_variables = slim.get_model_variables()
variable_averages = tf.train.ExponentialMovingAverage(
FLAGS.moving_average_decay, global_step)
else:
moving_average_variables, variable_averages = None, None
#########################################
# Configure the optimization procedure. #
#########################################
with tf.device(deploy_config.optimizer_device()):
learning_rate = _configure_learning_rate(dataset.num_samples, global_step)
optimizer = _configure_optimizer(learning_rate)
summaries.add(tf.summary.scalar('learning_rate', learning_rate))
if FLAGS.sync_replicas:
# If sync_replicas is enabled, the averaging will be done in the chief
# queue runner.
optimizer = tf.train.SyncReplicasOptimizer(
opt=optimizer,
replicas_to_aggregate=FLAGS.replicas_to_aggregate,
variable_averages=variable_averages,
variables_to_average=moving_average_variables,
replica_id=tf.constant(FLAGS.task, tf.int32, shape=()),
total_num_replicas=FLAGS.worker_replicas)
elif FLAGS.moving_average_decay:
# Update ops executed locally by trainer.
update_ops.append(variable_averages.apply(moving_average_variables))
# Variables to train.
variables_to_train = _get_variables_to_train()
# and returns a train_tensor and summary_op
total_loss, clones_gradients = model_deploy.optimize_clones(
clones,
optimizer,
var_list=variables_to_train)
# Add total_loss to summary.
summaries.add(tf.summary.scalar('total_loss', total_loss))
# Create gradient updates.
grad_updates = optimizer.apply_gradients(clones_gradients,
global_step=global_step)
update_ops.append(grad_updates)
update_op = tf.group(*update_ops)
train_tensor = control_flow_ops.with_dependencies([update_op], total_loss,
name='train_op')
# Add the summaries from the first clone. These contain the summaries
# created by model_fn and either optimize_clones() or _gather_clone_loss().
summaries |= set(tf.get_collection(tf.GraphKeys.SUMMARIES,
first_clone_scope))
# Merge all summaries together.
summary_op = tf.summary.merge(list(summaries), name='summary_op')
###########################
# Kicks off the training. #
###########################
slim.learning.train(
train_tensor,
logdir=FLAGS.train_dir,
master=FLAGS.master,
is_chief=(FLAGS.task == 0),
init_fn=_get_init_fn(),
summary_op=summary_op,
number_of_steps=FLAGS.max_number_of_steps,
log_every_n_steps=FLAGS.log_every_n_steps,
save_summaries_secs=FLAGS.save_summaries_secs,
save_interval_secs=FLAGS.save_interval_secs,
sync_optimizer=optimizer if FLAGS.sync_replicas else None)
if __name__ == '__main__':
tf.app.run()
|
from decimal import Decimal
import logging
from urllib.parse import urljoin
from django.core import checks
from django.core.exceptions import ImproperlyConfigured
from django.db import models, transaction
from django.db.models.aggregates import Sum
from django.urls import NoReverseMatch, reverse
from django.utils.functional import cached_property
from django.utils.translation import gettext_lazy as _, pgettext_lazy, get_language_from_request
from django_fsm import FSMField, transition
from ipware.ip import get_client_ip
from cms.models import Page
from shop.conf import app_settings
from shop.models.cart import CartItemModel
from shop.models.fields import JSONField
from shop.money.fields import MoneyField, MoneyMaker
from shop import deferred
from shop.models.product import BaseProduct
class OrderQuerySet(models.QuerySet):
def _filter_or_exclude(self, negate, *args, **kwargs):
"""
Emulate filter queries on the Order model using a pseudo slug attribute.
This allows to use order numbers as slugs, formatted by method `Order.get_number()`.
"""
lookup_kwargs = {}
for key, lookup in kwargs.items():
try:
index = key.index('__')
field_name, lookup_type = key[:index], key[index:]
except ValueError:
field_name, lookup_type = key, ''
if field_name == 'slug':
key, lookup = self.model.resolve_number(lookup).popitem()
lookup_kwargs.update({key + lookup_type: lookup})
else:
lookup_kwargs.update({key: lookup})
return super()._filter_or_exclude(negate, *args, **lookup_kwargs)
class OrderManager(models.Manager):
_queryset_class = OrderQuerySet
def create_from_cart(self, cart, request):
"""
This creates a new empty Order object with a valid order number (many payment service
providers require an order number, before the purchase is actually completed). Therefore
the order is not populated with any cart items yet; this must be performed in the next step
by calling ``order.populate_from_cart(cart, request)``, otherwise the order object remains
in state ``new``. The latter can happen, if a payment service provider did not acknowledge
a payment, hence the items remain in the cart.
"""
cart.update(request)
cart.customer.get_or_assign_number()
order = self.model(
customer=cart.customer,
currency=cart.total.currency,
_subtotal=Decimal(0),
_total=Decimal(0),
stored_request=self.stored_request(request),
)
order.get_or_assign_number()
order.assign_secret()
order.save()
return order
def stored_request(self, request):
"""
Extract useful information about the request to be used for emulating a Django request
during offline rendering.
"""
return {
'language': get_language_from_request(request),
'absolute_base_uri': request.build_absolute_uri('/'),
'remote_ip': get_client_ip(request)[0],
'user_agent': request.META.get('HTTP_USER_AGENT'),
}
def get_summary_url(self):
"""
Returns the URL of the page with the list view for all orders related to the current customer
"""
if not hasattr(self, '_summary_url'):
try: # via CMS pages
page = Page.objects.public().get(reverse_id='shop-order')
except Page.DoesNotExist:
page = Page.objects.public().filter(application_urls='OrderApp').first()
if page:
self._summary_url = page.get_absolute_url()
else:
try: # through hardcoded urlpatterns
self._summary_url = reverse('shop-order')
except NoReverseMatch:
self._summary_url = '/cms-page_or_view_with__reverse_id=shop-order__does_not_exist/'
return self._summary_url
class WorkflowMixinMetaclass(deferred.ForeignKeyBuilder):
"""
Add configured Workflow mixin classes to ``OrderModel`` and ``OrderPayment`` to customize
all kinds of state transitions in a pluggable manner.
"""
def __new__(cls, name, bases, attrs):
if 'BaseOrder' in (b.__name__ for b in bases):
bases = tuple(app_settings.ORDER_WORKFLOWS) + bases
# merge the dicts of TRANSITION_TARGETS
attrs.update(_transition_targets={}, _auto_transitions={})
for b in reversed(bases):
TRANSITION_TARGETS = getattr(b, 'TRANSITION_TARGETS', {})
try:
delattr(b, 'TRANSITION_TARGETS')
except AttributeError:
pass
if set(TRANSITION_TARGETS.keys()).intersection(attrs['_transition_targets']):
msg = "Mixin class {} already contains a transition named '{}'"
raise ImproperlyConfigured(msg.format(b.__name__, ', '.join(TRANSITION_TARGETS.keys())))
attrs['_transition_targets'].update(TRANSITION_TARGETS)
attrs['_auto_transitions'].update(cls.add_to_auto_transitions(b))
Model = super().__new__(cls, name, bases, attrs)
return Model
@classmethod
def add_to_auto_transitions(cls, base):
result = {}
for name, method in base.__dict__.items():
if callable(method) and hasattr(method, '_django_fsm'):
for name, transition in method._django_fsm.transitions.items():
if transition.custom.get('auto'):
result.update({name: method})
return result
class BaseOrder(models.Model, metaclass=WorkflowMixinMetaclass):
"""
An Order is the "in process" counterpart of the shopping cart, which freezes the state of the
cart on the moment of purchase. It also holds stuff like the shipping and billing addresses,
and keeps all the additional entities, as determined by the cart modifiers.
"""
TRANSITION_TARGETS = {
'new': _("New order without content"),
'created': _("Order freshly created"),
'payment_confirmed': _("Payment confirmed"),
'payment_declined': _("Payment declined"),
}
decimalfield_kwargs = {
'max_digits': 30,
'decimal_places': 2,
}
decimal_exp = Decimal('.' + '0' * decimalfield_kwargs['decimal_places'])
customer = deferred.ForeignKey(
'BaseCustomer',
on_delete=models.PROTECT,
verbose_name=_("Customer"),
related_name='orders',
)
status = FSMField(
default='new',
protected=True,
verbose_name=_("Status"),
)
currency = models.CharField(
max_length=7,
editable=False,
help_text=_("Currency in which this order was concluded"),
)
_subtotal = models.DecimalField(
_("Subtotal"),
**decimalfield_kwargs
)
_total = models.DecimalField(
_("Total"),
**decimalfield_kwargs
)
created_at = models.DateTimeField(
_("Created at"),
auto_now_add=True,
)
updated_at = models.DateTimeField(
_("Updated at"),
auto_now=True,
)
extra = JSONField(
verbose_name=_("Extra fields"),
help_text=_("Arbitrary information for this order object on the moment of purchase."),
)
stored_request = JSONField(
help_text=_("Parts of the Request objects on the moment of purchase."),
)
objects = OrderManager()
class Meta:
abstract = True
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.logger = logging.getLogger('shop.order')
def __str__(self):
return self.get_number()
def __repr__(self):
return "<{}(pk={})>".format(self.__class__.__name__, self.pk)
def get_or_assign_number(self):
"""
Hook to get or to assign the order number. It shall be invoked, every time an Order
object is created. If you prefer to use an order number which differs from the primary
key, then override this method.
"""
return self.get_number()
def get_number(self):
"""
Hook to get the order number.
A class inheriting from Order may transform this into a string which is better readable.
"""
return str(self.pk)
def assign_secret(self):
"""
Hook to assign a secret to authorize access on this Order object without authentication.
"""
@property
def secret(self):
"""
Hook to return a secret if available.
"""
@classmethod
def resolve_number(cls, number):
"""
Return a lookup pair used to filter down a queryset.
It should revert the effect from the above method `get_number`.
"""
return dict(pk=number)
@property
def subtotal(self):
"""
The summed up amount for all ordered items excluding extra order lines.
"""
return MoneyMaker(self.currency)(self._subtotal)
@property
def total(self):
"""
The final total to charge for this order.
"""
return MoneyMaker(self.currency)(self._total)
@classmethod
def round_amount(cls, amount):
if amount.is_finite():
return Decimal(amount).quantize(cls.decimal_exp)
def get_absolute_url(self):
"""
Returns the URL for the detail view of this order.
"""
return urljoin(OrderModel.objects.get_summary_url(), self.get_number())
@transaction.atomic
@transition(field=status, source='new', target='created')
def populate_from_cart(self, cart, request):
"""
Populate the order object with the fields from the given cart.
For each cart item a corresponding order item is created populating its fields and removing
that cart item.
Override this method, in case a customized cart has some fields which have to be transferred
to the cart.
"""
assert hasattr(cart, 'subtotal') and hasattr(cart, 'total'), \
"Did you forget to invoke 'cart.update(request)' before populating from cart?"
for cart_item in cart.items.all():
cart_item.update(request)
order_item = OrderItemModel(order=self)
try:
order_item.populate_from_cart_item(cart_item, request)
order_item.save()
cart_item.delete()
except CartItemModel.DoesNotExist:
pass
self._subtotal = Decimal(cart.subtotal)
self._total = Decimal(cart.total)
self.extra = dict(cart.extra)
self.extra.update(rows=[(modifier, extra_row.data) for modifier, extra_row in cart.extra_rows.items()])
self.save()
@transaction.atomic
def readd_to_cart(self, cart):
"""
Re-add the items of this order back to the cart.
"""
for order_item in self.items.all():
extra = dict(order_item.extra)
extra.pop('rows', None)
extra.update(product_code=order_item.product_code)
cart_item = order_item.product.is_in_cart(cart, **extra)
if cart_item:
cart_item.quantity = max(cart_item.quantity, order_item.quantity)
else:
cart_item = CartItemModel(cart=cart, product=order_item.product,
product_code=order_item.product_code,
quantity=order_item.quantity, extra=extra)
cart_item.save()
def save(self, with_notification=False, **kwargs):
"""
:param with_notification: If ``True``, all notifications for the state of this Order object
are executed.
"""
from shop.transition import transition_change_notification
auto_transition = self._auto_transitions.get(self.status)
if callable(auto_transition):
auto_transition(self)
# round the total to the given decimal_places
self._subtotal = BaseOrder.round_amount(self._subtotal)
self._total = BaseOrder.round_amount(self._total)
super().save(**kwargs)
if with_notification:
transition_change_notification(self)
@cached_property
def amount_paid(self):
"""
The amount paid is the sum of related orderpayments
"""
amount = self.orderpayment_set.aggregate(amount=Sum('amount'))['amount']
if amount is None:
amount = MoneyMaker(self.currency)()
return amount
@property
def outstanding_amount(self):
"""
Return the outstanding amount paid for this order
"""
return self.total - self.amount_paid
def is_fully_paid(self):
return self.amount_paid >= self.total
@transition(field='status', source='*', target='payment_confirmed', conditions=[is_fully_paid])
def acknowledge_payment(self, by=None):
"""
Change status to ``payment_confirmed``. This status code is known globally and can be used
by all external plugins to check, if an Order object has been fully paid.
"""
self.logger.info("Acknowledge payment by user %s", by)
def cancelable(self):
"""
A hook method to be overridden by mixin classes managing Order cancellations.
:returns: ``True`` if the current Order is cancelable.
"""
return False
def refund_payment(self):
"""
Hook to handle payment refunds.
"""
def withdraw_from_delivery(self):
"""
Hook to withdraw shipping order.
"""
@classmethod
def get_all_transitions(cls):
"""
:returns: A generator over all transition objects for this Order model.
"""
return cls.status.field.get_all_transitions(OrderModel)
@classmethod
def get_transition_name(cls, target):
"""
:returns: The verbose name for a given transition target.
"""
return cls._transition_targets.get(target, target)
def status_name(self):
"""
:returns: The verbose name for the current transition state.
"""
return self._transition_targets.get(self.status, self.status)
status_name.short_description = pgettext_lazy('order_models', "State")
OrderModel = deferred.MaterializedModel(BaseOrder)
class OrderPayment(models.Model, metaclass=deferred.ForeignKeyBuilder):
"""
A model to hold received payments for a given order.
"""
order = deferred.ForeignKey(
BaseOrder,
on_delete=models.CASCADE,
verbose_name=_("Order"),
)
amount = MoneyField(
_("Amount paid"),
help_text=_("How much was paid with this particular transfer."),
)
transaction_id = models.CharField(
_("Transaction ID"),
max_length=255,
help_text=_("The transaction processor's reference"),
)
created_at = models.DateTimeField(
_("Received at"),
auto_now_add=True,
)
payment_method = models.CharField(
_("Payment method"),
max_length=50,
help_text=_("The payment backend used to process the purchase"),
)
class Meta:
verbose_name = pgettext_lazy('order_models', "Order payment")
verbose_name_plural = pgettext_lazy('order_models', "Order payments")
def __str__(self):
return _("Payment ID: {}").format(self.id)
class BaseOrderItem(models.Model, metaclass=deferred.ForeignKeyBuilder):
"""
An item for an order.
"""
order = deferred.ForeignKey(
BaseOrder,
on_delete=models.CASCADE,
related_name='items',
verbose_name=_("Order"),
)
product_name = models.CharField(
_("Product name"),
max_length=255,
null=True,
blank=True,
help_text=_("Product name at the moment of purchase."),
)
product_code = models.CharField(
_("Product code"),
max_length=255,
null=True,
blank=True,
help_text=_("Product code at the moment of purchase."),
)
product = deferred.ForeignKey(
BaseProduct,
on_delete=models.SET_NULL,
verbose_name=_("Product"),
null=True,
blank=True,
)
_unit_price = models.DecimalField(
_("Unit price"),
null=True, # may be NaN
help_text=_("Products unit price at the moment of purchase."),
**BaseOrder.decimalfield_kwargs
)
_line_total = models.DecimalField(
_("Line Total"),
null=True, # may be NaN
help_text=_("Line total on the invoice at the moment of purchase."),
**BaseOrder.decimalfield_kwargs
)
extra = JSONField(
verbose_name=_("Extra fields"),
help_text=_("Arbitrary information for this order item"),
)
class Meta:
abstract = True
verbose_name = pgettext_lazy('order_models', "Ordered Item")
verbose_name_plural = pgettext_lazy('order_models', "Ordered Items")
def __str__(self):
return self.product_name
@classmethod
def check(cls, **kwargs):
errors = super().check(**kwargs)
for cart_field in CartItemModel._meta.fields:
if cart_field.attname == 'quantity':
break
else:
msg = "Class `{}` must implement a field named `quantity`."
errors.append(checks.Error(msg.format(CartItemModel.__name__)))
for field in cls._meta.fields:
if field.attname == 'quantity':
if field.get_internal_type() != cart_field.get_internal_type():
msg = "Field `{}.quantity` must be of same type as `{}.quantity`."
errors.append(checks.Error(msg.format(cls.__name__, CartItemModel.__name__)))
break
else:
msg = "Class `{}` must implement a field named `quantity`."
errors.append(checks.Error(msg.format(cls.__name__)))
return errors
@property
def unit_price(self):
return MoneyMaker(self.order.currency)(self._unit_price)
@property
def line_total(self):
return MoneyMaker(self.order.currency)(self._line_total)
def populate_from_cart_item(self, cart_item, request):
"""
From a given cart item, populate the current order item.
If the operation was successful, the given item shall be removed from the cart.
If an exception of type :class:`CartItem.DoesNotExist` is raised, discard the order item.
"""
if cart_item.quantity == 0:
raise CartItemModel.DoesNotExist("Cart Item is on the Wish List")
kwargs = {'product_code': cart_item.product_code}
kwargs.update(cart_item.extra)
cart_item.product.deduct_from_stock(cart_item.quantity, **kwargs)
self.product = cart_item.product
# for historical integrity, store the product's name and price at the moment of purchase
self.product_name = cart_item.product.product_name
self.product_code = cart_item.product_code
self._unit_price = Decimal(cart_item.unit_price)
self._line_total = Decimal(cart_item.line_total)
self.quantity = cart_item.quantity
self.extra = dict(cart_item.extra)
extra_rows = [(modifier, extra_row.data) for modifier, extra_row in cart_item.extra_rows.items()]
self.extra.update(rows=extra_rows)
def save(self, *args, **kwargs):
"""
Before saving the OrderItem object to the database, round the amounts to the given decimal places
"""
self._unit_price = BaseOrder.round_amount(self._unit_price)
self._line_total = BaseOrder.round_amount(self._line_total)
super().save(*args, **kwargs)
OrderItemModel = deferred.MaterializedModel(BaseOrderItem)
|
import sys
from collections import defaultdict
import iptc
from paasta_tools import iptables
from paasta_tools.utils import get_docker_client
def list_docker_nat_rules():
chain_name = "DOCKER"
table = iptc.Table(iptc.Table.NAT)
chain = iptc.Chain(table, chain_name)
for rule in chain.rules:
yield iptables.Rule.from_iptc(rule)
def main():
docker_client = get_docker_client()
ip_to_containers = defaultdict(list)
for container in docker_client.containers():
networks = container["NetworkSettings"]["Networks"]
if "bridge" in networks:
ip = networks["bridge"]["IPAddress"]
if ip:
ip_to_containers[ip].append(container)
output = []
for ip, containers in ip_to_containers.items():
if len(containers) > 1:
output.append(f"{ip} shared by the following containers:")
for container in containers:
output.append(" Image: {}".format(container["Image"]))
output.append(" ID: {}".format(container["Id"]))
output.append(" State: {}".format(container["State"]))
output.append(" Status: {}".format(container["Status"]))
output.append("")
if output:
print(
"CRITICAL - There are multiple Docker containers assigned to the same IP."
)
print(
"There should only be one per IP. Choose one to keep and try stopping the others."
)
print("\n".join(output))
return 2
else:
print("OK - No Docker containers sharing an IP on this host.")
targets_seen = {}
duplicates_found = False
for rule in list_docker_nat_rules():
target = rule.target_parameters
if target not in targets_seen:
targets_seen[target] = rule
else:
print(
"This is the second time we've seen a rule with the same target_parameters!"
)
print(rule)
print("The other rule with that target is:")
print(targets_seen[target])
duplicates_found = True
if duplicates_found is True:
print(
"CRITICAL - Duplicate iptables rules found! This will route traffic to the wrong service!"
)
return 2
else:
print("OK - No duplicate Docker iptables rules detected")
if __name__ == "__main__":
sys.exit(main())
|
import logging
import unittest
import os
import os.path
import numpy as np
from gensim.corpora import mmcorpus, Dictionary
from gensim.models.wrappers import ldamallet
from gensim import matutils
from gensim.utils import simple_preprocess
from gensim.models import ldamodel
from gensim.test import basetmtests
from gensim.test.utils import datapath, get_tmpfile, common_texts
import gensim.downloader as api
dictionary = Dictionary(common_texts)
corpus = [dictionary.doc2bow(text) for text in common_texts]
class TestLdaMallet(unittest.TestCase, basetmtests.TestBaseTopicModel):
def setUp(self):
mallet_home = os.environ.get('MALLET_HOME', None)
self.mallet_path = os.path.join(mallet_home, 'bin', 'mallet') if mallet_home else None
if not self.mallet_path:
raise unittest.SkipTest("MALLET_HOME not specified. Skipping Mallet tests.")
self.corpus = mmcorpus.MmCorpus(datapath('testcorpus.mm'))
# self.model is used in TestBaseTopicModel
self.model = ldamallet.LdaMallet(self.mallet_path, corpus, id2word=dictionary, num_topics=2, iterations=1)
def testTransform(self):
if not self.mallet_path:
return
passed = False
for i in range(5): # restart at most 5 times
# create the transformation model
model = ldamallet.LdaMallet(self.mallet_path, corpus, id2word=dictionary, num_topics=2, iterations=200)
# transform one document
doc = list(corpus)[0]
transformed = model[doc]
vec = matutils.sparse2full(transformed, 2) # convert to dense vector, for easier equality tests
expected = [0.49, 0.51]
# must contain the same values, up to re-ordering
passed = np.allclose(sorted(vec), sorted(expected), atol=1e-1)
if passed:
break
logging.warning(
"LDA failed to converge on attempt %i (got %s, expected %s)",
i, sorted(vec), sorted(expected)
)
self.assertTrue(passed)
def testSparseTransform(self):
if not self.mallet_path:
return
passed = False
for i in range(5): # restart at most 5 times
# create the sparse transformation model with the appropriate topic_threshold
model = ldamallet.LdaMallet(
self.mallet_path, corpus, id2word=dictionary, num_topics=2, iterations=200, topic_threshold=0.5
)
# transform one document
doc = list(corpus)[0]
transformed = model[doc]
vec = matutils.sparse2full(transformed, 2) # convert to dense vector, for easier equality tests
expected = [1.0, 0.0]
# must contain the same values, up to re-ordering
passed = np.allclose(sorted(vec), sorted(expected), atol=1e-2)
if passed:
break
logging.warning(
"LDA failed to converge on attempt %i (got %s, expected %s)",
i, sorted(vec), sorted(expected)
)
self.assertTrue(passed)
def testMallet2Model(self):
if not self.mallet_path:
return
tm1 = ldamallet.LdaMallet(self.mallet_path, corpus=corpus, num_topics=2, id2word=dictionary)
tm2 = ldamallet.malletmodel2ldamodel(tm1)
# set num_topics=-1 to exclude random influence
self.assertEqual(tm1.show_topics(-1, 10), tm2.show_topics(-1, 10))
for document in corpus:
element1_1, element1_2 = tm1[document][0]
element2_1, element2_2 = tm2[document][0]
self.assertAlmostEqual(element1_1, element2_1)
self.assertAlmostEqual(element1_2, element2_2, 1)
element1_1, element1_2 = tm1[document][1]
element2_1, element2_2 = tm2[document][1]
self.assertAlmostEqual(element1_1, element2_1)
self.assertAlmostEqual(element1_2, element2_2, 1)
logging.debug('%d %d', element1_1, element2_1)
logging.debug('%d %d', element1_2, element2_2)
logging.debug('%s %s', tm1[document][1], tm2[document][1])
def testMallet2ModelOn20NewsGroups(self):
corpus = [simple_preprocess(doc["data"]) for doc in api.load("20-newsgroups")]
dictionary = Dictionary(corpus)
corpus = [dictionary.doc2bow(text) for text in corpus]
lda_mallet_model = ldamallet.LdaMallet(
self.mallet_path, corpus=corpus,
num_topics=20, id2word=dictionary, iterations=500)
lda_gensim_model = ldamallet.malletmodel2ldamodel(lda_mallet_model, iterations=1000)
self.assertEqual(lda_mallet_model.show_topics(20, 50), lda_gensim_model.show_topics(20, 50))
def testPersistence(self):
if not self.mallet_path:
return
fname = get_tmpfile('gensim_models_lda_mallet.tst')
model = ldamallet.LdaMallet(self.mallet_path, self.corpus, num_topics=2, iterations=100)
model.save(fname)
model2 = ldamallet.LdaMallet.load(fname)
self.assertEqual(model.num_topics, model2.num_topics)
self.assertTrue(np.allclose(model.word_topics, model2.word_topics))
tstvec = []
self.assertTrue(np.allclose(model[tstvec], model2[tstvec])) # try projecting an empty vector
def testPersistenceCompressed(self):
if not self.mallet_path:
return
fname = get_tmpfile('gensim_models_lda_mallet.tst.gz')
model = ldamallet.LdaMallet(self.mallet_path, self.corpus, num_topics=2, iterations=100)
model.save(fname)
model2 = ldamallet.LdaMallet.load(fname, mmap=None)
self.assertEqual(model.num_topics, model2.num_topics)
self.assertTrue(np.allclose(model.word_topics, model2.word_topics))
tstvec = []
self.assertTrue(np.allclose(model[tstvec], model2[tstvec])) # try projecting an empty vector
def testLargeMmap(self):
if not self.mallet_path:
return
fname = get_tmpfile('gensim_models_lda_mallet.tst')
model = ldamallet.LdaMallet(self.mallet_path, self.corpus, num_topics=2, iterations=100)
# simulate storing large arrays separately
model.save(fname, sep_limit=0)
# test loading the large model arrays with mmap
model2 = ldamodel.LdaModel.load(fname, mmap='r')
self.assertEqual(model.num_topics, model2.num_topics)
self.assertTrue(isinstance(model2.word_topics, np.memmap))
self.assertTrue(np.allclose(model.word_topics, model2.word_topics))
tstvec = []
self.assertTrue(np.allclose(model[tstvec], model2[tstvec])) # try projecting an empty vector
def testLargeMmapCompressed(self):
if not self.mallet_path:
return
fname = get_tmpfile('gensim_models_lda_mallet.tst.gz')
model = ldamallet.LdaMallet(self.mallet_path, self.corpus, num_topics=2, iterations=100)
# simulate storing large arrays separately
model.save(fname, sep_limit=0)
# test loading the large model arrays with mmap
self.assertRaises(IOError, ldamodel.LdaModel.load, fname, mmap='r')
def test_random_seed(self):
if not self.mallet_path:
return
# test that 2 models created with the same random_seed are equal in their topics treatment
SEED = 10
NUM_TOPICS = 10
ITER = 500
tm1 = ldamallet.LdaMallet(
self.mallet_path,
corpus=corpus,
num_topics=NUM_TOPICS,
id2word=dictionary,
random_seed=SEED,
iterations=ITER,
)
tm2 = ldamallet.LdaMallet(
self.mallet_path,
corpus=corpus,
num_topics=NUM_TOPICS,
id2word=dictionary,
random_seed=SEED,
iterations=ITER,
)
self.assertTrue(np.allclose(tm1.word_topics, tm2.word_topics))
for doc in corpus:
tm1_vector = matutils.sparse2full(tm1[doc], NUM_TOPICS)
tm2_vector = matutils.sparse2full(tm2[doc], NUM_TOPICS)
self.assertTrue(np.allclose(tm1_vector, tm2_vector))
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.DEBUG)
unittest.main()
|
import errno
import math
import select as __select__
import sys
from numbers import Integral
from . import fileno
from .compat import detect_environment
__all__ = ('poll',)
_selectf = __select__.select
_selecterr = __select__.error
xpoll = getattr(__select__, 'poll', None)
epoll = getattr(__select__, 'epoll', None)
kqueue = getattr(__select__, 'kqueue', None)
kevent = getattr(__select__, 'kevent', None)
KQ_EV_ADD = getattr(__select__, 'KQ_EV_ADD', 1)
KQ_EV_DELETE = getattr(__select__, 'KQ_EV_DELETE', 2)
KQ_EV_ENABLE = getattr(__select__, 'KQ_EV_ENABLE', 4)
KQ_EV_CLEAR = getattr(__select__, 'KQ_EV_CLEAR', 32)
KQ_EV_ERROR = getattr(__select__, 'KQ_EV_ERROR', 16384)
KQ_EV_EOF = getattr(__select__, 'KQ_EV_EOF', 32768)
KQ_FILTER_READ = getattr(__select__, 'KQ_FILTER_READ', -1)
KQ_FILTER_WRITE = getattr(__select__, 'KQ_FILTER_WRITE', -2)
KQ_FILTER_AIO = getattr(__select__, 'KQ_FILTER_AIO', -3)
KQ_FILTER_VNODE = getattr(__select__, 'KQ_FILTER_VNODE', -4)
KQ_FILTER_PROC = getattr(__select__, 'KQ_FILTER_PROC', -5)
KQ_FILTER_SIGNAL = getattr(__select__, 'KQ_FILTER_SIGNAL', -6)
KQ_FILTER_TIMER = getattr(__select__, 'KQ_FILTER_TIMER', -7)
KQ_NOTE_LOWAT = getattr(__select__, 'KQ_NOTE_LOWAT', 1)
KQ_NOTE_DELETE = getattr(__select__, 'KQ_NOTE_DELETE', 1)
KQ_NOTE_WRITE = getattr(__select__, 'KQ_NOTE_WRITE', 2)
KQ_NOTE_EXTEND = getattr(__select__, 'KQ_NOTE_EXTEND', 4)
KQ_NOTE_ATTRIB = getattr(__select__, 'KQ_NOTE_ATTRIB', 8)
KQ_NOTE_LINK = getattr(__select__, 'KQ_NOTE_LINK', 16)
KQ_NOTE_RENAME = getattr(__select__, 'KQ_NOTE_RENAME', 32)
KQ_NOTE_REVOKE = getattr(__select__, 'KQ_NOTE_REVOKE', 64)
POLLIN = getattr(__select__, 'POLLIN', 1)
POLLOUT = getattr(__select__, 'POLLOUT', 4)
POLLERR = getattr(__select__, 'POLLERR', 8)
POLLHUP = getattr(__select__, 'POLLHUP', 16)
POLLNVAL = getattr(__select__, 'POLLNVAL', 32)
READ = POLL_READ = 0x001
WRITE = POLL_WRITE = 0x004
ERR = POLL_ERR = 0x008 | 0x010
try:
SELECT_BAD_FD = {errno.EBADF, errno.WSAENOTSOCK}
except AttributeError:
SELECT_BAD_FD = {errno.EBADF}
class _epoll:
def __init__(self):
self._epoll = epoll()
def register(self, fd, events):
try:
self._epoll.register(fd, events)
except Exception as exc:
if getattr(exc, 'errno', None) != errno.EEXIST:
raise
return fd
def unregister(self, fd):
try:
self._epoll.unregister(fd)
except (OSError, ValueError, KeyError, TypeError):
pass
except OSError as exc:
if getattr(exc, 'errno', None) not in (errno.ENOENT, errno.EPERM):
raise
def poll(self, timeout):
try:
return self._epoll.poll(timeout if timeout is not None else -1)
except Exception as exc:
if getattr(exc, 'errno', None) != errno.EINTR:
raise
def close(self):
self._epoll.close()
class _kqueue:
w_fflags = (KQ_NOTE_WRITE | KQ_NOTE_EXTEND |
KQ_NOTE_ATTRIB | KQ_NOTE_DELETE)
def __init__(self):
self._kqueue = kqueue()
self._active = {}
self.on_file_change = None
self._kcontrol = self._kqueue.control
def register(self, fd, events):
self._control(fd, events, KQ_EV_ADD)
self._active[fd] = events
return fd
def unregister(self, fd):
events = self._active.pop(fd, None)
if events:
try:
self._control(fd, events, KQ_EV_DELETE)
except OSError:
pass
def watch_file(self, fd):
ev = kevent(fd,
filter=KQ_FILTER_VNODE,
flags=KQ_EV_ADD | KQ_EV_ENABLE | KQ_EV_CLEAR,
fflags=self.w_fflags)
self._kcontrol([ev], 0)
def unwatch_file(self, fd):
ev = kevent(fd,
filter=KQ_FILTER_VNODE,
flags=KQ_EV_DELETE,
fflags=self.w_fflags)
self._kcontrol([ev], 0)
def _control(self, fd, events, flags):
if not events:
return
kevents = []
if events & WRITE:
kevents.append(kevent(fd,
filter=KQ_FILTER_WRITE,
flags=flags))
if not kevents or events & READ:
kevents.append(
kevent(fd, filter=KQ_FILTER_READ, flags=flags),
)
control = self._kcontrol
for e in kevents:
try:
control([e], 0)
except ValueError:
pass
def poll(self, timeout):
try:
kevents = self._kcontrol(None, 1000, timeout)
except Exception as exc:
if getattr(exc, 'errno', None) == errno.EINTR:
return
raise
events, file_changes = {}, []
for k in kevents:
fd = k.ident
if k.filter == KQ_FILTER_READ:
events[fd] = events.get(fd, 0) | READ
elif k.filter == KQ_FILTER_WRITE:
if k.flags & KQ_EV_EOF:
events[fd] = ERR
else:
events[fd] = events.get(fd, 0) | WRITE
elif k.filter == KQ_EV_ERROR:
events[fd] = events.get(fd, 0) | ERR
elif k.filter == KQ_FILTER_VNODE:
if k.fflags & KQ_NOTE_DELETE:
self.unregister(fd)
file_changes.append(k)
if file_changes:
self.on_file_change(file_changes)
return list(events.items())
def close(self):
self._kqueue.close()
class _poll:
def __init__(self):
self._poller = xpoll()
self._quick_poll = self._poller.poll
self._quick_register = self._poller.register
self._quick_unregister = self._poller.unregister
def register(self, fd, events):
fd = fileno(fd)
poll_flags = 0
if events & ERR:
poll_flags |= POLLERR
if events & WRITE:
poll_flags |= POLLOUT
if events & READ:
poll_flags |= POLLIN
self._quick_register(fd, poll_flags)
return fd
def unregister(self, fd):
try:
fd = fileno(fd)
except OSError as exc:
# we don't know the previous fd of this object
# but it will be removed by the next poll iteration.
if getattr(exc, 'errno', None) in SELECT_BAD_FD:
return fd
raise
self._quick_unregister(fd)
return fd
def poll(self, timeout, round=math.ceil,
POLLIN=POLLIN, POLLOUT=POLLOUT, POLLERR=POLLERR,
READ=READ, WRITE=WRITE, ERR=ERR, Integral=Integral):
timeout = 0 if timeout and timeout < 0 else round((timeout or 0) * 1e3)
try:
event_list = self._quick_poll(timeout)
except (_selecterr, OSError) as exc:
if getattr(exc, 'errno', None) == errno.EINTR:
return
raise
ready = []
for fd, event in event_list:
events = 0
if event & POLLIN:
events |= READ
if event & POLLOUT:
events |= WRITE
if event & POLLERR or event & POLLNVAL or event & POLLHUP:
events |= ERR
assert events
if not isinstance(fd, Integral):
fd = fd.fileno()
ready.append((fd, events))
return ready
def close(self):
self._poller = None
class _select:
def __init__(self):
self._all = (self._rfd,
self._wfd,
self._efd) = set(), set(), set()
def register(self, fd, events):
fd = fileno(fd)
if events & ERR:
self._efd.add(fd)
if events & WRITE:
self._wfd.add(fd)
if events & READ:
self._rfd.add(fd)
return fd
def _remove_bad(self):
for fd in self._rfd | self._wfd | self._efd:
try:
_selectf([fd], [], [], 0)
except (_selecterr, OSError) as exc:
if getattr(exc, 'errno', None) in SELECT_BAD_FD:
self.unregister(fd)
def unregister(self, fd):
try:
fd = fileno(fd)
except OSError as exc:
# we don't know the previous fd of this object
# but it will be removed by the next poll iteration.
if getattr(exc, 'errno', None) in SELECT_BAD_FD:
return
raise
self._rfd.discard(fd)
self._wfd.discard(fd)
self._efd.discard(fd)
def poll(self, timeout):
try:
read, write, error = _selectf(
self._rfd, self._wfd, self._efd, timeout,
)
except (_selecterr, OSError) as exc:
if getattr(exc, 'errno', None) == errno.EINTR:
return
elif getattr(exc, 'errno', None) in SELECT_BAD_FD:
return self._remove_bad()
raise
events = {}
for fd in read:
if not isinstance(fd, Integral):
fd = fd.fileno()
events[fd] = events.get(fd, 0) | READ
for fd in write:
if not isinstance(fd, Integral):
fd = fd.fileno()
events[fd] = events.get(fd, 0) | WRITE
for fd in error:
if not isinstance(fd, Integral):
fd = fd.fileno()
events[fd] = events.get(fd, 0) | ERR
return list(events.items())
def close(self):
self._rfd.clear()
self._wfd.clear()
self._efd.clear()
def _get_poller():
if detect_environment() != 'default':
# greenlet
return _select
elif epoll:
# Py2.6+ Linux
return _epoll
elif kqueue and 'netbsd' in sys.platform:
return _kqueue
elif xpoll:
return _poll
else:
return _select
def poll(*args, **kwargs):
"""Create new poller instance."""
return _get_poller()(*args, **kwargs)
|
import logging
import requests
import voluptuous as vol
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
MEDIA_TYPE_MUSIC,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_STOP,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
)
from homeassistant.const import (
CONF_PASSWORD,
CONF_USERNAME,
STATE_IDLE,
STATE_OFF,
STATE_PAUSED,
STATE_PLAYING,
)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
ICON = "mdi:radio"
URL = "http://decibel.logitechmusic.com/jsonrpc.js"
SUPPORT_UE_SMART_RADIO = (
SUPPORT_PLAY
| SUPPORT_PAUSE
| SUPPORT_STOP
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_NEXT_TRACK
| SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_VOLUME_SET
| SUPPORT_VOLUME_MUTE
)
PLAYBACK_DICT = {"play": STATE_PLAYING, "pause": STATE_PAUSED, "stop": STATE_IDLE}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string}
)
def send_request(payload, session):
"""Send request to radio."""
try:
request = requests.post(
URL,
cookies={"sdi_squeezenetwork_session": session},
json=payload,
timeout=5,
)
except requests.exceptions.Timeout:
_LOGGER.error("Timed out when sending request")
except requests.exceptions.ConnectionError:
_LOGGER.error("An error occurred while connecting")
else:
return request.json()
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Logitech UE Smart Radio platform."""
email = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
session_request = requests.post(
"https://www.uesmartradio.com/user/login",
data={"email": email, "password": password},
timeout=5,
)
session = session_request.cookies["sdi_squeezenetwork_session"]
player_request = send_request({"params": ["", ["serverstatus"]]}, session)
players = [
UERadioDevice(session, player["playerid"], player["name"])
for player in player_request["result"]["players_loop"]
]
add_entities(players)
class UERadioDevice(MediaPlayerEntity):
"""Representation of a Logitech UE Smart Radio device."""
def __init__(self, session, player_id, player_name):
"""Initialize the Logitech UE Smart Radio device."""
self._session = session
self._player_id = player_id
self._name = player_name
self._state = None
self._volume = 0
self._last_volume = 0
self._media_title = None
self._media_artist = None
self._media_artwork_url = None
def send_command(self, command):
"""Send command to radio."""
send_request(
{"method": "slim.request", "params": [self._player_id, command]},
self._session,
)
def update(self):
"""Get the latest details from the device."""
request = send_request(
{
"method": "slim.request",
"params": [
self._player_id,
["status", "-", 1, "tags:cgABbehldiqtyrSuoKLN"],
],
},
self._session,
)
if request["error"] is not None:
self._state = None
return
if request["result"]["power"] == 0:
self._state = STATE_OFF
else:
self._state = PLAYBACK_DICT[request["result"]["mode"]]
media_info = request["result"]["playlist_loop"][0]
self._volume = request["result"]["mixer volume"] / 100
self._media_artwork_url = media_info["artwork_url"]
self._media_title = media_info["title"]
if "artist" in media_info:
self._media_artist = media_info["artist"]
else:
self._media_artist = media_info.get("remote_title")
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return ICON
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._volume <= 0
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._volume
@property
def supported_features(self):
"""Flag of features that are supported."""
return SUPPORT_UE_SMART_RADIO
@property
def media_content_type(self):
"""Return the media content type."""
return MEDIA_TYPE_MUSIC
@property
def media_image_url(self):
"""Image URL of current playing media."""
return self._media_artwork_url
@property
def media_artist(self):
"""Artist of current playing media, music track only."""
return self._media_artist
@property
def media_title(self):
"""Title of current playing media."""
return self._media_title
def turn_on(self):
"""Turn on specified media player or all."""
self.send_command(["power", 1])
def turn_off(self):
"""Turn off specified media player or all."""
self.send_command(["power", 0])
def media_play(self):
"""Send the media player the command for play/pause."""
self.send_command(["play"])
def media_pause(self):
"""Send the media player the command for pause."""
self.send_command(["pause"])
def media_stop(self):
"""Send the media player the stop command."""
self.send_command(["stop"])
def media_previous_track(self):
"""Send the media player the command for prev track."""
self.send_command(["button", "rew"])
def media_next_track(self):
"""Send the media player the command for next track."""
self.send_command(["button", "fwd"])
def mute_volume(self, mute):
"""Send mute command."""
if mute:
self._last_volume = self._volume
self.send_command(["mixer", "volume", 0])
else:
self.send_command(["mixer", "volume", self._last_volume * 100])
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
self.send_command(["mixer", "volume", volume * 100])
|
import sys
from vine import Thenable, promise, maybe_promise
from kombu.exceptions import HttpError
from kombu.utils.compat import coro
from kombu.utils.encoding import bytes_to_str
from kombu.utils.functional import maybe_list, memoize
try: # pragma: no cover
from http.client import responses
except ImportError:
from httplib import responses # noqa
__all__ = ('Headers', 'Response', 'Request')
PYPY = hasattr(sys, 'pypy_version_info')
@memoize(maxsize=1000)
def normalize_header(key):
return '-'.join(p.capitalize() for p in key.split('-'))
class Headers(dict):
"""Represents a mapping of HTTP headers."""
# TODO: This is just a regular dict and will not perform normalization
# when looking up keys etc.
#: Set when all of the headers have been read.
complete = False
#: Internal attribute used to keep track of continuation lines.
_prev_key = None
@Thenable.register
class Request:
"""A HTTP Request.
Arguments:
url (str): The URL to request.
method (str): The HTTP method to use (defaults to ``GET``).
Keyword Arguments:
headers (Dict, ~kombu.asynchronous.http.Headers): Optional headers for
this request
body (str): Optional body for this request.
connect_timeout (float): Connection timeout in float seconds
Default is 30.0.
timeout (float): Time in float seconds before the request times out
Default is 30.0.
follow_redirects (bool): Specify if the client should follow redirects
Enabled by default.
max_redirects (int): Maximum number of redirects (default 6).
use_gzip (bool): Allow the server to use gzip compression.
Enabled by default.
validate_cert (bool): Set to true if the server certificate should be
verified when performing ``https://`` requests.
Enabled by default.
auth_username (str): Username for HTTP authentication.
auth_password (str): Password for HTTP authentication.
auth_mode (str): Type of HTTP authentication (``basic`` or ``digest``).
user_agent (str): Custom user agent for this request.
network_interace (str): Network interface to use for this request.
on_ready (Callable): Callback to be called when the response has been
received. Must accept single ``response`` argument.
on_stream (Callable): Optional callback to be called every time body
content has been read from the socket. If specified then the
response body and buffer attributes will not be available.
on_timeout (callable): Optional callback to be called if the request
times out.
on_header (Callable): Optional callback to be called for every header
line received from the server. The signature
is ``(headers, line)`` and note that if you want
``response.headers`` to be populated then your callback needs to
also call ``client.on_header(headers, line)``.
on_prepare (Callable): Optional callback that is implementation
specific (e.g. curl client will pass the ``curl`` instance to
this callback).
proxy_host (str): Optional proxy host. Note that a ``proxy_port`` must
also be provided or a :exc:`ValueError` will be raised.
proxy_username (str): Optional username to use when logging in
to the proxy.
proxy_password (str): Optional password to use when authenticating
with the proxy server.
ca_certs (str): Custom CA certificates file to use.
client_key (str): Optional filename for client SSL key.
client_cert (str): Optional filename for client SSL certificate.
"""
body = user_agent = network_interface = \
auth_username = auth_password = auth_mode = \
proxy_host = proxy_port = proxy_username = proxy_password = \
ca_certs = client_key = client_cert = None
connect_timeout = 30.0
request_timeout = 30.0
follow_redirects = True
max_redirects = 6
use_gzip = True
validate_cert = True
if not PYPY: # pragma: no cover
__slots__ = ('url', 'method', 'on_ready', 'on_timeout', 'on_stream',
'on_prepare', 'on_header', 'headers',
'__weakref__', '__dict__')
def __init__(self, url, method='GET', on_ready=None, on_timeout=None,
on_stream=None, on_prepare=None, on_header=None,
headers=None, **kwargs):
self.url = url
self.method = method or self.method
self.on_ready = maybe_promise(on_ready) or promise()
self.on_timeout = maybe_promise(on_timeout)
self.on_stream = maybe_promise(on_stream)
self.on_prepare = maybe_promise(on_prepare)
self.on_header = maybe_promise(on_header)
if kwargs:
for k, v in kwargs.items():
setattr(self, k, v)
if not isinstance(headers, Headers):
headers = Headers(headers or {})
self.headers = headers
def then(self, callback, errback=None):
self.on_ready.then(callback, errback)
def __repr__(self):
return '<Request: {0.method} {0.url} {0.body}>'.format(self)
class Response:
"""HTTP Response.
Arguments:
request (~kombu.asynchronous.http.Request): See :attr:`request`.
code (int): See :attr:`code`.
headers (~kombu.asynchronous.http.Headers): See :attr:`headers`.
buffer (bytes): See :attr:`buffer`
effective_url (str): See :attr:`effective_url`.
status (str): See :attr:`status`.
Attributes:
request (~kombu.asynchronous.http.Request): object used to
get this response.
code (int): HTTP response code (e.g. 200, 404, or 500).
headers (~kombu.asynchronous.http.Headers): HTTP headers
for this response.
buffer (bytes): Socket read buffer.
effective_url (str): The destination url for this request after
following redirects.
error (Exception): Error instance if the request resulted in
a HTTP error code.
status (str): Human equivalent of :attr:`code`,
e.g. ``OK``, `Not found`, or 'Internal Server Error'.
"""
if not PYPY: # pragma: no cover
__slots__ = ('request', 'code', 'headers', 'buffer', 'effective_url',
'error', 'status', '_body', '__weakref__')
def __init__(self, request, code, headers=None, buffer=None,
effective_url=None, error=None, status=None):
self.request = request
self.code = code
self.headers = headers if headers is not None else Headers()
self.buffer = buffer
self.effective_url = effective_url or request.url
self._body = None
self.status = status or responses.get(self.code, 'Unknown')
self.error = error
if self.error is None and (self.code < 200 or self.code > 299):
self.error = HttpError(self.code, self.status, self)
def raise_for_error(self):
"""Raise if the request resulted in an HTTP error code.
Raises:
:class:`~kombu.exceptions.HttpError`
"""
if self.error:
raise self.error
@property
def body(self):
"""The full contents of the response body.
Note:
Accessing this property will evaluate the buffer
and subsequent accesses will be cached.
"""
if self._body is None:
if self.buffer is not None:
self._body = self.buffer.getvalue()
return self._body
# these are for compatibility with Requests
@property
def status_code(self):
return self.code
@property
def content(self):
return self.body
@coro
def header_parser(keyt=normalize_header):
while 1:
(line, headers) = yield
if line.startswith('HTTP/'):
continue
elif not line:
headers.complete = True
continue
elif line[0].isspace():
pkey = headers._prev_key
headers[pkey] = ' '.join([headers.get(pkey) or '', line.lstrip()])
else:
key, value = line.split(':', 1)
key = headers._prev_key = keyt(key)
headers[key] = value.strip()
class BaseClient:
Headers = Headers
Request = Request
Response = Response
def __init__(self, hub, **kwargs):
self.hub = hub
self._header_parser = header_parser()
def perform(self, request, **kwargs):
for req in maybe_list(request) or []:
if not isinstance(req, self.Request):
req = self.Request(req, **kwargs)
self.add_request(req)
def add_request(self, request):
raise NotImplementedError('must implement add_request')
def close(self):
pass
def on_header(self, headers, line):
try:
self._header_parser.send((bytes_to_str(line), headers))
except StopIteration:
self._header_parser = header_parser()
def __enter__(self):
return self
def __exit__(self, *exc_info):
self.close()
|
from django.db.models.signals import post_save, pre_save
from django.dispatch import receiver
from django.urls import reverse
from weblate.trans.models import Component, Project
from weblate.utils.decorators import disable_for_loaddata
from weblate.utils.site import get_site_url
SUPPORTED_VCS = {
"git",
"gerrit",
"github",
"gitlab",
"pagure",
"subversion",
"local",
"git-force-push",
}
def get_export_url(component):
"""Return Git export URL for component."""
return get_site_url(
reverse(
"git-export",
kwargs={
"project": component.project.slug,
"component": component.slug,
"path": "",
},
)
)
@receiver(pre_save, sender=Component)
@disable_for_loaddata
def save_component(sender, instance, **kwargs):
if not instance.is_repo_link and instance.vcs in SUPPORTED_VCS:
instance.git_export = get_export_url(instance)
@receiver(post_save, sender=Project)
@disable_for_loaddata
def save_project(sender, instance, **kwargs):
for component in instance.component_set.iterator():
if not component.is_repo_link and component.vcs in SUPPORTED_VCS:
new_url = get_export_url(component)
if component.git_export != new_url:
component.git_export = new_url
component.save(update_fields=["git_export"])
|
import asyncio
from os import path
import httpx
import respx
from homeassistant import config as hass_config
import homeassistant.components.sensor as sensor
from homeassistant.const import (
ATTR_UNIT_OF_MEASUREMENT,
CONTENT_TYPE_JSON,
DATA_MEGABYTES,
SERVICE_RELOAD,
STATE_UNKNOWN,
)
from homeassistant.setup import async_setup_component
from tests.async_mock import Mock, patch
async def test_setup_missing_config(hass):
"""Test setup with configuration missing required entries."""
assert await async_setup_component(
hass, sensor.DOMAIN, {"sensor": {"platform": "rest"}}
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 0
async def test_setup_missing_schema(hass):
"""Test setup with resource missing schema."""
assert await async_setup_component(
hass,
sensor.DOMAIN,
{"sensor": {"platform": "rest", "resource": "localhost", "method": "GET"}},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 0
@respx.mock
async def test_setup_failed_connect(hass):
"""Test setup when connection error occurs."""
respx.get(
"http://localhost", content=httpx.RequestError(message="any", request=Mock())
)
assert await async_setup_component(
hass,
sensor.DOMAIN,
{
"sensor": {
"platform": "rest",
"resource": "http://localhost",
"method": "GET",
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 0
@respx.mock
async def test_setup_timeout(hass):
"""Test setup when connection timeout occurs."""
respx.get("http://localhost", content=asyncio.TimeoutError())
assert await async_setup_component(
hass,
sensor.DOMAIN,
{"sensor": {"platform": "rest", "resource": "localhost", "method": "GET"}},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 0
@respx.mock
async def test_setup_minimum(hass):
"""Test setup with minimum configuration."""
respx.get("http://localhost", status_code=200)
assert await async_setup_component(
hass,
sensor.DOMAIN,
{
"sensor": {
"platform": "rest",
"resource": "http://localhost",
"method": "GET",
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
@respx.mock
async def test_setup_minimum_resource_template(hass):
"""Test setup with minimum configuration (resource_template)."""
respx.get("http://localhost", status_code=200)
assert await async_setup_component(
hass,
sensor.DOMAIN,
{
"sensor": {
"platform": "rest",
"resource_template": "http://localhost",
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
@respx.mock
async def test_setup_duplicate_resource_template(hass):
"""Test setup with duplicate resources."""
respx.get("http://localhost", status_code=200)
assert await async_setup_component(
hass,
sensor.DOMAIN,
{
"sensor": {
"platform": "rest",
"resource": "http://localhost",
"resource_template": "http://localhost",
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 0
@respx.mock
async def test_setup_get(hass):
"""Test setup with valid configuration."""
respx.get("http://localhost", status_code=200, content="{}")
assert await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": "rest",
"resource": "http://localhost",
"method": "GET",
"value_template": "{{ value_json.key }}",
"name": "foo",
"unit_of_measurement": DATA_MEGABYTES,
"verify_ssl": "true",
"timeout": 30,
"authentication": "basic",
"username": "my username",
"password": "my password",
"headers": {"Accept": CONTENT_TYPE_JSON},
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
@respx.mock
async def test_setup_get_digest_auth(hass):
"""Test setup with valid configuration."""
respx.get("http://localhost", status_code=200, content="{}")
assert await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": "rest",
"resource": "http://localhost",
"method": "GET",
"value_template": "{{ value_json.key }}",
"name": "foo",
"unit_of_measurement": DATA_MEGABYTES,
"verify_ssl": "true",
"timeout": 30,
"authentication": "digest",
"username": "my username",
"password": "my password",
"headers": {"Accept": CONTENT_TYPE_JSON},
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
@respx.mock
async def test_setup_post(hass):
"""Test setup with valid configuration."""
respx.post("http://localhost", status_code=200, content="{}")
assert await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": "rest",
"resource": "http://localhost",
"method": "POST",
"value_template": "{{ value_json.key }}",
"payload": '{ "device": "toaster"}',
"name": "foo",
"unit_of_measurement": DATA_MEGABYTES,
"verify_ssl": "true",
"timeout": 30,
"authentication": "basic",
"username": "my username",
"password": "my password",
"headers": {"Accept": CONTENT_TYPE_JSON},
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
@respx.mock
async def test_setup_get_xml(hass):
"""Test setup with valid xml configuration."""
respx.get(
"http://localhost",
status_code=200,
headers={"content-type": "text/xml"},
content="<dog>abc</dog>",
)
assert await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": "rest",
"resource": "http://localhost",
"method": "GET",
"value_template": "{{ value_json.dog }}",
"name": "foo",
"unit_of_measurement": DATA_MEGABYTES,
"verify_ssl": "true",
"timeout": 30,
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
state = hass.states.get("sensor.foo")
assert state.state == "abc"
assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == DATA_MEGABYTES
@respx.mock
async def test_update_with_json_attrs(hass):
"""Test attributes get extracted from a JSON result."""
respx.get(
"http://localhost",
status_code=200,
headers={"content-type": CONTENT_TYPE_JSON},
content='{ "key": "some_json_value" }',
)
assert await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": "rest",
"resource": "http://localhost",
"method": "GET",
"value_template": "{{ value_json.key }}",
"json_attributes": ["key"],
"name": "foo",
"unit_of_measurement": DATA_MEGABYTES,
"verify_ssl": "true",
"timeout": 30,
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
state = hass.states.get("sensor.foo")
assert state.state == "some_json_value"
assert state.attributes["key"] == "some_json_value"
@respx.mock
async def test_update_with_no_template(hass):
"""Test update when there is no value template."""
respx.get(
"http://localhost",
status_code=200,
headers={"content-type": CONTENT_TYPE_JSON},
content='{ "key": "some_json_value" }',
)
assert await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": "rest",
"resource": "http://localhost",
"method": "GET",
"json_attributes": ["key"],
"name": "foo",
"unit_of_measurement": DATA_MEGABYTES,
"verify_ssl": "true",
"timeout": 30,
"headers": {"Accept": "text/xml"},
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
state = hass.states.get("sensor.foo")
assert state.state == '{ "key": "some_json_value" }'
@respx.mock
async def test_update_with_json_attrs_no_data(hass, caplog):
"""Test attributes when no JSON result fetched."""
respx.get(
"http://localhost",
status_code=200,
headers={"content-type": CONTENT_TYPE_JSON},
content="",
)
assert await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": "rest",
"resource": "http://localhost",
"method": "GET",
"value_template": "{{ value_json.key }}",
"json_attributes": ["key"],
"name": "foo",
"unit_of_measurement": DATA_MEGABYTES,
"verify_ssl": "true",
"timeout": 30,
"headers": {"Accept": "text/xml"},
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
state = hass.states.get("sensor.foo")
assert state.state == STATE_UNKNOWN
assert state.attributes == {"unit_of_measurement": "MB", "friendly_name": "foo"}
assert "Empty reply" in caplog.text
@respx.mock
async def test_update_with_json_attrs_not_dict(hass, caplog):
"""Test attributes get extracted from a JSON result."""
respx.get(
"http://localhost",
status_code=200,
headers={"content-type": CONTENT_TYPE_JSON},
content='["list", "of", "things"]',
)
assert await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": "rest",
"resource": "http://localhost",
"method": "GET",
"value_template": "{{ value_json.key }}",
"json_attributes": ["key"],
"name": "foo",
"unit_of_measurement": DATA_MEGABYTES,
"verify_ssl": "true",
"timeout": 30,
"headers": {"Accept": "text/xml"},
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
state = hass.states.get("sensor.foo")
assert state.state == ""
assert state.attributes == {"unit_of_measurement": "MB", "friendly_name": "foo"}
assert "not a dictionary or list" in caplog.text
@respx.mock
async def test_update_with_json_attrs_bad_JSON(hass, caplog):
"""Test attributes get extracted from a JSON result."""
respx.get(
"http://localhost",
status_code=200,
headers={"content-type": CONTENT_TYPE_JSON},
content="This is text rather than JSON data.",
)
assert await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": "rest",
"resource": "http://localhost",
"method": "GET",
"value_template": "{{ value_json.key }}",
"json_attributes": ["key"],
"name": "foo",
"unit_of_measurement": DATA_MEGABYTES,
"verify_ssl": "true",
"timeout": 30,
"headers": {"Accept": "text/xml"},
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
state = hass.states.get("sensor.foo")
assert state.state == STATE_UNKNOWN
assert state.attributes == {"unit_of_measurement": "MB", "friendly_name": "foo"}
assert "Erroneous JSON" in caplog.text
@respx.mock
async def test_update_with_json_attrs_with_json_attrs_path(hass):
"""Test attributes get extracted from a JSON result with a template for the attributes."""
respx.get(
"http://localhost",
status_code=200,
headers={"content-type": CONTENT_TYPE_JSON},
content='{ "toplevel": {"master_value": "master", "second_level": {"some_json_key": "some_json_value", "some_json_key2": "some_json_value2" } } }',
)
assert await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": "rest",
"resource": "http://localhost",
"method": "GET",
"value_template": "{{ value_json.toplevel.master_value }}",
"json_attributes_path": "$.toplevel.second_level",
"json_attributes": ["some_json_key", "some_json_key2"],
"name": "foo",
"unit_of_measurement": DATA_MEGABYTES,
"verify_ssl": "true",
"timeout": 30,
"headers": {"Accept": "text/xml"},
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
state = hass.states.get("sensor.foo")
assert state.state == "master"
assert state.attributes["some_json_key"] == "some_json_value"
assert state.attributes["some_json_key2"] == "some_json_value2"
@respx.mock
async def test_update_with_xml_convert_json_attrs_with_json_attrs_path(hass):
"""Test attributes get extracted from a JSON result that was converted from XML with a template for the attributes."""
respx.get(
"http://localhost",
status_code=200,
headers={"content-type": "text/xml"},
content="<toplevel><master_value>master</master_value><second_level><some_json_key>some_json_value</some_json_key><some_json_key2>some_json_value2</some_json_key2></second_level></toplevel>",
)
assert await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": "rest",
"resource": "http://localhost",
"method": "GET",
"value_template": "{{ value_json.toplevel.master_value }}",
"json_attributes_path": "$.toplevel.second_level",
"json_attributes": ["some_json_key", "some_json_key2"],
"name": "foo",
"unit_of_measurement": DATA_MEGABYTES,
"verify_ssl": "true",
"timeout": 30,
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
state = hass.states.get("sensor.foo")
assert state.state == "master"
assert state.attributes["some_json_key"] == "some_json_value"
assert state.attributes["some_json_key2"] == "some_json_value2"
@respx.mock
async def test_update_with_xml_convert_json_attrs_with_jsonattr_template(hass):
"""Test attributes get extracted from a JSON result that was converted from XML."""
respx.get(
"http://localhost",
status_code=200,
headers={"content-type": "text/xml"},
content='<?xml version="1.0" encoding="utf-8"?><response><scan>0</scan><ver>12556</ver><count>48</count><ssid>alexander</ssid><bss><valid>0</valid><name>0</name><privacy>0</privacy><wlan>bogus</wlan><strength>0</strength></bss><led0>0</led0><led1>0</led1><led2>0</led2><led3>0</led3><led4>0</led4><led5>0</led5><led6>0</led6><led7>0</led7><btn0>up</btn0><btn1>up</btn1><btn2>up</btn2><btn3>up</btn3><pot0>0</pot0><usr0>0</usr0><temp0>0x0XF0x0XF</temp0><time0> 0</time0></response>',
)
assert await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": "rest",
"resource": "http://localhost",
"method": "GET",
"value_template": "{{ value_json.response.bss.wlan }}",
"json_attributes_path": "$.response",
"json_attributes": ["led0", "led1", "temp0", "time0", "ver"],
"name": "foo",
"unit_of_measurement": DATA_MEGABYTES,
"verify_ssl": "true",
"timeout": 30,
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
state = hass.states.get("sensor.foo")
assert state.state == "bogus"
assert state.attributes["led0"] == "0"
assert state.attributes["led1"] == "0"
assert state.attributes["temp0"] == "0x0XF0x0XF"
assert state.attributes["time0"] == "0"
assert state.attributes["ver"] == "12556"
@respx.mock
async def test_update_with_application_xml_convert_json_attrs_with_jsonattr_template(
hass,
):
"""Test attributes get extracted from a JSON result that was converted from XML with application/xml mime type."""
respx.get(
"http://localhost",
status_code=200,
headers={"content-type": "application/xml"},
content="<main><dog>1</dog><cat>3</cat></main>",
)
assert await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": "rest",
"resource": "http://localhost",
"method": "GET",
"value_template": "{{ value_json.main.dog }}",
"json_attributes_path": "$.main",
"json_attributes": ["dog", "cat"],
"name": "foo",
"unit_of_measurement": DATA_MEGABYTES,
"verify_ssl": "true",
"timeout": 30,
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
state = hass.states.get("sensor.foo")
assert state.state == "1"
assert state.attributes["dog"] == "1"
assert state.attributes["cat"] == "3"
@respx.mock
async def test_update_with_xml_convert_bad_xml(hass, caplog):
"""Test attributes get extracted from a XML result with bad xml."""
respx.get(
"http://localhost",
status_code=200,
headers={"content-type": "text/xml"},
content="",
)
assert await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": "rest",
"resource": "http://localhost",
"method": "GET",
"value_template": "{{ value_json.toplevel.master_value }}",
"json_attributes": ["key"],
"name": "foo",
"unit_of_measurement": DATA_MEGABYTES,
"verify_ssl": "true",
"timeout": 30,
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
state = hass.states.get("sensor.foo")
assert state.state == STATE_UNKNOWN
assert "Erroneous XML" in caplog.text
assert "Empty reply" in caplog.text
@respx.mock
async def test_update_with_failed_get(hass, caplog):
"""Test attributes get extracted from a XML result with bad xml."""
respx.get(
"http://localhost",
status_code=200,
headers={"content-type": "text/xml"},
content="",
)
assert await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": "rest",
"resource": "http://localhost",
"method": "GET",
"value_template": "{{ value_json.toplevel.master_value }}",
"json_attributes": ["key"],
"name": "foo",
"unit_of_measurement": DATA_MEGABYTES,
"verify_ssl": "true",
"timeout": 30,
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
state = hass.states.get("sensor.foo")
assert state.state == STATE_UNKNOWN
assert "Erroneous XML" in caplog.text
assert "Empty reply" in caplog.text
@respx.mock
async def test_reload(hass):
"""Verify we can reload reset sensors."""
respx.get("http://localhost", status_code=200)
await async_setup_component(
hass,
"sensor",
{
"sensor": {
"platform": "rest",
"method": "GET",
"name": "mockrest",
"resource": "http://localhost",
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
assert hass.states.get("sensor.mockrest")
yaml_path = path.join(
_get_fixtures_base_path(),
"fixtures",
"rest/configuration.yaml",
)
with patch.object(hass_config, "YAML_CONFIG_FILE", yaml_path):
await hass.services.async_call(
"rest",
SERVICE_RELOAD,
{},
blocking=True,
)
await hass.async_block_till_done()
assert hass.states.get("sensor.mockreset") is None
assert hass.states.get("sensor.rollout")
def _get_fixtures_base_path():
return path.dirname(path.dirname(path.dirname(__file__)))
|
from functools import partial
import logging
from urllib.request import URLError
from panasonic_viera import TV_TYPE_ENCRYPTED, RemoteControl, SOAPError
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PIN, CONF_PORT
from .const import ( # pylint: disable=unused-import
ATTR_DEVICE_INFO,
ATTR_FRIENDLY_NAME,
ATTR_UDN,
CONF_APP_ID,
CONF_ENCRYPTION_KEY,
CONF_ON_ACTION,
DEFAULT_NAME,
DEFAULT_PORT,
DOMAIN,
ERROR_INVALID_PIN_CODE,
)
_LOGGER = logging.getLogger(__name__)
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Config flow for Panasonic Viera."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
def __init__(self):
"""Initialize the Panasonic Viera config flow."""
self._data = {
CONF_HOST: None,
CONF_NAME: None,
CONF_PORT: None,
CONF_ON_ACTION: None,
ATTR_DEVICE_INFO: None,
}
self._remote = None
async def async_step_user(self, user_input=None):
"""Handle the initial step."""
errors = {}
if user_input is not None:
await self.async_load_data(user_input)
try:
self._remote = await self.hass.async_add_executor_job(
partial(RemoteControl, self._data[CONF_HOST], self._data[CONF_PORT])
)
self._data[ATTR_DEVICE_INFO] = await self.hass.async_add_executor_job(
self._remote.get_device_info
)
except (TimeoutError, URLError, SOAPError, OSError) as err:
_LOGGER.error("Could not establish remote connection: %s", err)
errors["base"] = "cannot_connect"
except Exception as err: # pylint: disable=broad-except
_LOGGER.exception("An unknown error occurred: %s", err)
return self.async_abort(reason="unknown")
if "base" not in errors:
await self.async_set_unique_id(self._data[ATTR_DEVICE_INFO][ATTR_UDN])
self._abort_if_unique_id_configured()
if self._data[CONF_NAME] == DEFAULT_NAME:
self._data[CONF_NAME] = self._data[ATTR_DEVICE_INFO][
ATTR_FRIENDLY_NAME
].replace("_", " ")
if self._remote.type == TV_TYPE_ENCRYPTED:
return await self.async_step_pairing()
return self.async_create_entry(
title=self._data[CONF_NAME],
data=self._data,
)
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(
CONF_HOST,
default=self._data[CONF_HOST]
if self._data[CONF_HOST] is not None
else "",
): str,
vol.Optional(
CONF_NAME,
default=self._data[CONF_NAME]
if self._data[CONF_NAME] is not None
else DEFAULT_NAME,
): str,
}
),
errors=errors,
)
async def async_step_pairing(self, user_input=None):
"""Handle the pairing step."""
errors = {}
if user_input is not None:
pin = user_input[CONF_PIN]
try:
await self.hass.async_add_executor_job(
partial(self._remote.authorize_pin_code, pincode=pin)
)
except SOAPError as err:
_LOGGER.error("Invalid PIN code: %s", err)
errors["base"] = ERROR_INVALID_PIN_CODE
except (TimeoutError, URLError, OSError) as err:
_LOGGER.error("The remote connection was lost: %s", err)
return self.async_abort(reason="cannot_connect")
except Exception as err: # pylint: disable=broad-except
_LOGGER.exception("Unknown error: %s", err)
return self.async_abort(reason="unknown")
if "base" not in errors:
encryption_data = {
CONF_APP_ID: self._remote.app_id,
CONF_ENCRYPTION_KEY: self._remote.enc_key,
}
self._data = {**self._data, **encryption_data}
return self.async_create_entry(
title=self._data[CONF_NAME],
data=self._data,
)
try:
await self.hass.async_add_executor_job(
partial(self._remote.request_pin_code, name="Home Assistant")
)
except (TimeoutError, URLError, SOAPError, OSError) as err:
_LOGGER.error("The remote connection was lost: %s", err)
return self.async_abort(reason="cannot_connect")
except Exception as err: # pylint: disable=broad-except
_LOGGER.exception("Unknown error: %s", err)
return self.async_abort(reason="unknown")
return self.async_show_form(
step_id="pairing",
data_schema=vol.Schema({vol.Required(CONF_PIN): str}),
errors=errors,
)
async def async_step_import(self, import_config):
"""Import a config entry from configuration.yaml."""
return await self.async_step_user(user_input=import_config)
async def async_load_data(self, config):
"""Load the data."""
self._data = config
self._data[CONF_PORT] = (
self._data[CONF_PORT] if CONF_PORT in self._data else DEFAULT_PORT
)
self._data[CONF_ON_ACTION] = (
self._data[CONF_ON_ACTION] if CONF_ON_ACTION in self._data else None
)
await self.async_set_unique_id(self._data[CONF_HOST])
self._abort_if_unique_id_configured()
|
import unittest
from category_encoders import CountEncoder
import pandas as pd
## Need to make sure we have CountEncoder available from the category_encoders library
class TestCategoryEncoders(unittest.TestCase):
def test_count_encoder(self):
encoder = CountEncoder(cols="data")
data = pd.DataFrame([1, 2, 3, 1, 4, 5, 3, 1], columns=["data"])
encoded = encoder.fit_transform(data)
self.assertTrue((encoded.data == [3, 1, 2, 3, 1, 1, 2, 3]).all())
|
import datetime
import json
import logging
from absl import flags
from perfkitbenchmarker import dpb_service
from perfkitbenchmarker import errors
from perfkitbenchmarker.linux_packages import aws_credentials
from perfkitbenchmarker.providers import gcp
from perfkitbenchmarker.providers.gcp import gcs
from perfkitbenchmarker.providers.gcp import util
FLAGS = flags.FLAGS
flags.DEFINE_string('dpb_dataproc_image_version', None,
'The image version to use for the cluster.')
flags.DEFINE_integer('dpb_dataproc_distcp_num_maps', None,
'Number of maps to copy data.')
SPARK_SAMPLE_LOCATION = ('file:///usr/lib/spark/examples/jars/'
'spark-examples.jar')
TESTDFSIO_JAR_LOCATION = ('file:///usr/lib/hadoop-mapreduce/'
'hadoop-mapreduce-client-jobclient.jar')
TESTDFSIO_PROGRAM = 'TestDFSIO'
disk_to_hdfs_map = {
'pd-standard': 'HDD',
'pd-ssd': 'SSD'
}
class GcpDpbDataproc(dpb_service.BaseDpbService):
"""Object representing a GCP Dataproc cluster.
Attributes:
project: ID of the project.
"""
CLOUD = gcp.CLOUD
SERVICE_TYPE = 'dataproc'
PERSISTENT_FS_PREFIX = 'gs://'
def __init__(self, dpb_service_spec):
super(GcpDpbDataproc, self).__init__(dpb_service_spec)
self.dpb_service_type = GcpDpbDataproc.SERVICE_TYPE
self.project = FLAGS.project
if FLAGS.dpb_dataproc_image_version:
self.dpb_version = FLAGS.dpb_dataproc_image_version
if not self.dpb_service_zone:
raise errors.Setup.InvalidSetupError(
'dpb_service_zone must be provided, for provisioning.')
self.region = self.dpb_service_zone.rsplit('-', 1)[0]
self.storage_service = gcs.GoogleCloudStorageService()
self.storage_service.PrepareService(location=self.region)
@staticmethod
def _ParseTime(state_time: str) -> datetime:
"""Parses time from json output.
Args:
state_time: string. the state start time.
Returns:
Parsed datetime.
"""
try:
return datetime.datetime.strptime(state_time, '%Y-%m-%dT%H:%M:%S.%fZ')
except ValueError:
return datetime.datetime.strptime(state_time, '%Y-%m-%dT%H:%M:%SZ')
@staticmethod
def CheckPrerequisites(benchmark_config):
del benchmark_config # Unused
def DataprocGcloudCommand(self, *args):
all_args = ('dataproc',) + args
cmd = util.GcloudCommand(self, *all_args)
cmd.flags['region'] = self.region
return cmd
def _Create(self):
"""Creates the cluster."""
cmd = self.DataprocGcloudCommand('clusters', 'create', self.cluster_id)
if self.project is not None:
cmd.flags['project'] = self.project
if self.spec.worker_count:
# The number of worker machines in the cluster
cmd.flags['num-workers'] = self.spec.worker_count
else:
cmd.flags['single-node'] = True
# Initialize applications on the dataproc cluster
if self.spec.applications:
logging.info('Include the requested applications')
cmd.flags['optional-components'] = ','.join(self.spec.applications)
# Enable component gateway for debuggability. Does not impact performance.
cmd.flags['enable-component-gateway'] = True
# TODO(pclay): stop ignoring spec.master_group?
for role in ['worker', 'master']:
# Set machine type
if self.spec.worker_group.vm_spec.machine_type:
self._AddToCmd(cmd, '{0}-machine-type'.format(role),
self.spec.worker_group.vm_spec.machine_type)
# Set boot_disk_size
if self.spec.worker_group.disk_spec.disk_size:
size_in_gb = '{}GB'.format(
str(self.spec.worker_group.disk_spec.disk_size))
self._AddToCmd(cmd, '{0}-boot-disk-size'.format(role), size_in_gb)
# Set boot_disk_type
if self.spec.worker_group.disk_spec.disk_type:
self._AddToCmd(cmd, '{0}-boot-disk-type'.format(role),
self.spec.worker_group.disk_spec.disk_type)
self.dpb_hdfs_type = disk_to_hdfs_map[
self.spec.worker_group.disk_spec.disk_type]
# Set ssd count
if self.spec.worker_group.vm_spec.num_local_ssds:
self._AddToCmd(cmd, 'num-{0}-local-ssds'.format(role),
self.spec.worker_group.vm_spec.num_local_ssds)
# Set zone
cmd.flags['zone'] = self.dpb_service_zone
if self.dpb_version:
cmd.flags['image-version'] = self.dpb_version
if FLAGS.gcp_dataproc_image:
cmd.flags['image'] = FLAGS.gcp_dataproc_image
cmd.flags['metadata'] = util.MakeFormattedDefaultTags()
cmd.flags['labels'] = util.MakeFormattedDefaultTags()
timeout = 900 # 15 min
# TODO(saksena): Retrieve the cluster create time and hold in a var
_, stderr, retcode = cmd.Issue(timeout=timeout, raise_on_failure=False)
if retcode:
util.CheckGcloudResponseKnownFailures(stderr, retcode)
raise errors.Resource.CreationError(stderr)
def _Delete(self):
"""Deletes the cluster."""
cmd = self.DataprocGcloudCommand('clusters', 'delete', self.cluster_id)
cmd.Issue(raise_on_failure=False)
def _Exists(self):
"""Check to see whether the cluster exists."""
cmd = self.DataprocGcloudCommand('clusters', 'describe', self.cluster_id)
_, _, retcode = cmd.Issue(raise_on_failure=False)
return retcode == 0
def SubmitJob(self,
jarfile=None,
classname=None,
pyspark_file=None,
query_file=None,
job_poll_interval=None,
job_stdout_file=None,
job_arguments=None,
job_files=None,
job_jars=None,
job_type=None,
properties=None):
"""See base class."""
assert job_type
args = ['jobs', 'submit', job_type]
if job_type == self.PYSPARK_JOB_TYPE:
args.append(pyspark_file)
cmd = self.DataprocGcloudCommand(*args)
cmd.flags['cluster'] = self.cluster_id
cmd.flags['labels'] = util.MakeFormattedDefaultTags()
job_jars = job_jars or []
if classname:
if jarfile:
# Dataproc does not support both a main class and a main jar so just
# make the main jar an additional jar instead.
job_jars.append(jarfile)
cmd.flags['class'] = classname
elif jarfile:
cmd.flags['jar'] = jarfile
if query_file:
cmd.flags['file'] = query_file
if job_files:
cmd.flags['files'] = ','.join(job_files)
if job_jars:
cmd.flags['jars'] = ','.join(job_jars)
# Dataproc gives as stdout an object describing job execution.
# Its stderr contains a mix of the stderr of the job, and the
# stdout of the job. We set the driver log level to FATAL
# to suppress those messages, and we can then separate, hopefully
# the job standard out from the log messages.
cmd.flags['driver-log-levels'] = 'root={}'.format(FLAGS.dpb_log_level)
all_properties = self.GetJobProperties()
all_properties.update(properties or {})
if all_properties:
# For commas: https://cloud.google.com/sdk/gcloud/reference/topic/escaping
cmd.flags['properties'] = '^@^' + '@'.join(
'{}={}'.format(k, v) for k, v in all_properties.items())
if job_arguments:
cmd.additional_flags = ['--'] + job_arguments
stdout, stderr, retcode = cmd.Issue(timeout=None, raise_on_failure=False)
if retcode != 0:
raise dpb_service.JobSubmissionError(stderr)
results = json.loads(stdout)
# Otherwise retcode would not have been 0
assert results['status']['state'] == 'DONE'
done_time = GcpDpbDataproc._ParseTime(results['status']['stateStartTime'])
pending_time = None
start_time = None
for state in results['statusHistory']:
if state['state'] == 'PENDING':
pending_time = GcpDpbDataproc._ParseTime(state['stateStartTime'])
elif state['state'] == 'RUNNING':
start_time = GcpDpbDataproc._ParseTime(state['stateStartTime'])
assert pending_time and start_time and done_time
return dpb_service.JobResult(
run_time=(done_time - start_time).total_seconds(),
pending_time=(start_time - pending_time).total_seconds())
def SetClusterProperty(self):
pass
def _AddToCmd(self, cmd, cmd_property, cmd_value):
flag_name = cmd_property
cmd.flags[flag_name] = cmd_value
def CreateBucket(self, source_bucket):
"""Create a bucket on GCS used during the persistent data processing.
Args:
source_bucket: String, name of the bucket to create.
"""
self.storage_service.MakeBucket(source_bucket)
def DeleteBucket(self, source_bucket):
"""Delete a bucket on GCS used during the persistent data processing.
Args:
source_bucket: String, name of the bucket to delete.
"""
self.storage_service.DeleteBucket(source_bucket)
def distributed_copy(self, source_location, destination_location):
"""Method to copy data using a distributed job on the cluster."""
cmd = self.DataprocGcloudCommand('jobs', 'submit', 'hadoop')
cmd.flags['cluster'] = self.cluster_id
cmd.flags['class'] = 'org.apache.hadoop.tools.DistCp'
job_arguments = (['-m={}'.format(FLAGS.dpb_dataproc_distcp_num_maps)]
if FLAGS.dpb_dataproc_distcp_num_maps is not None else [])
job_arguments.extend([source_location, destination_location])
cmd.additional_flags = ['--'] + job_arguments
_, _, retcode = cmd.Issue(timeout=None, raise_on_failure=False)
return {dpb_service.SUCCESS: retcode == 0}
def MigrateCrossCloud(self,
source_location,
destination_location,
dest_cloud='AWS'):
"""Method to copy data cross cloud using a distributed job on the cluster.
Currently the only supported destination cloud is AWS.
TODO(user): Add support for other destination clouds.
Args:
source_location: The source GCS path to migrate.
destination_location: The destination path.
dest_cloud: The cloud to copy data to.
Returns:
A dictionary with key 'success' and boolean value set to the status of
data migration command.
"""
if dest_cloud == 'AWS':
dest_prefix = 's3a://'
else:
raise ValueError('Unsupported destination cloud.')
cmd = self.DataprocGcloudCommand('jobs', 'submit', 'hadoop')
if self.project is not None:
cmd.flags['project'] = self.project
cmd.flags['cluster'] = self.cluster_id
cmd.flags['class'] = 'org.apache.hadoop.tools.DistCp'
s3_access_key, s3_secret_key = aws_credentials.GetCredentials()
cmd.flags['properties'] = 'fs.s3a.access.key=%s,fs.s3a.secret.key=%s' % (
s3_access_key, s3_secret_key)
cmd.additional_flags = ['--'] + [
'gs://' + source_location, dest_prefix + destination_location
]
_, _, retcode = cmd.Issue(timeout=None, raise_on_failure=False)
return {dpb_service.SUCCESS: retcode == 0}
|
from stash.tests.stashtest import StashTestCase
class TermemuTests(StashTestCase):
setup_commands = ['BIN_PATH=$STASH_ROOT/tests/system/data:$BIN_PATH']
def test_201(self):
self.stash('test_201.py')
cmp_str = """[stash]$ The first line
[stash]$ rown fox jumps over the lazy dog"""
assert self.stash.main_screen.text == cmp_str, 'output not identical'
def test_202(self):
self.stash('test_202.py')
cmp_str = """[stash]$ The first line
[stash]$ """
assert self.stash.main_screen.text == cmp_str, 'output not identical'
def test_203(self):
self.stash('test_203.py')
cmp_str = """[stash]$ The first line
[stash]$ """
assert self.stash.main_screen.text == cmp_str
def test_204(self):
self.stash('test_204.py')
cmp_str = """[stash]$ The first line
A quick brown fox jumps over the lazy do[stash]$ """
assert self.stash.main_screen.text == cmp_str
|
from copy import deepcopy
from aiounifi.controller import MESSAGE_CLIENT_REMOVED, MESSAGE_EVENT
from aiounifi.websocket import SIGNAL_DATA
from homeassistant import config_entries
from homeassistant.components.device_tracker import DOMAIN as TRACKER_DOMAIN
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
from homeassistant.components.unifi.const import (
CONF_BLOCK_CLIENT,
CONF_TRACK_CLIENTS,
CONF_TRACK_DEVICES,
DOMAIN as UNIFI_DOMAIN,
)
from homeassistant.components.unifi.switch import POE_SWITCH
from homeassistant.helpers import entity_registry
from homeassistant.setup import async_setup_component
from .test_controller import (
CONTROLLER_HOST,
DESCRIPTION,
ENTRY_CONFIG,
setup_unifi_integration,
)
CLIENT_1 = {
"hostname": "client_1",
"ip": "10.0.0.1",
"is_wired": True,
"last_seen": 1562600145,
"mac": "00:00:00:00:00:01",
"name": "POE Client 1",
"oui": "Producer",
"sw_mac": "00:00:00:00:01:01",
"sw_port": 1,
"wired-rx_bytes": 1234000000,
"wired-tx_bytes": 5678000000,
}
CLIENT_2 = {
"hostname": "client_2",
"ip": "10.0.0.2",
"is_wired": True,
"last_seen": 1562600145,
"mac": "00:00:00:00:00:02",
"name": "POE Client 2",
"oui": "Producer",
"sw_mac": "00:00:00:00:01:01",
"sw_port": 2,
"wired-rx_bytes": 1234000000,
"wired-tx_bytes": 5678000000,
}
CLIENT_3 = {
"hostname": "client_3",
"ip": "10.0.0.3",
"is_wired": True,
"last_seen": 1562600145,
"mac": "00:00:00:00:00:03",
"name": "Non-POE Client 3",
"oui": "Producer",
"sw_mac": "00:00:00:00:01:01",
"sw_port": 3,
"wired-rx_bytes": 1234000000,
"wired-tx_bytes": 5678000000,
}
CLIENT_4 = {
"hostname": "client_4",
"ip": "10.0.0.4",
"is_wired": True,
"last_seen": 1562600145,
"mac": "00:00:00:00:00:04",
"name": "Non-POE Client 4",
"oui": "Producer",
"sw_mac": "00:00:00:00:01:01",
"sw_port": 4,
"wired-rx_bytes": 1234000000,
"wired-tx_bytes": 5678000000,
}
POE_SWITCH_CLIENTS = [
{
"hostname": "client_1",
"ip": "10.0.0.1",
"is_wired": True,
"last_seen": 1562600145,
"mac": "00:00:00:00:00:01",
"name": "POE Client 1",
"oui": "Producer",
"sw_mac": "00:00:00:00:01:01",
"sw_port": 1,
"wired-rx_bytes": 1234000000,
"wired-tx_bytes": 5678000000,
},
{
"hostname": "client_2",
"ip": "10.0.0.2",
"is_wired": True,
"last_seen": 1562600145,
"mac": "00:00:00:00:00:02",
"name": "POE Client 2",
"oui": "Producer",
"sw_mac": "00:00:00:00:01:01",
"sw_port": 1,
"wired-rx_bytes": 1234000000,
"wired-tx_bytes": 5678000000,
},
]
DEVICE_1 = {
"device_id": "mock-id",
"ip": "10.0.1.1",
"mac": "00:00:00:00:01:01",
"last_seen": 1562600145,
"model": "US16P150",
"name": "mock-name",
"port_overrides": [],
"port_table": [
{
"media": "GE",
"name": "Port 1",
"port_idx": 1,
"poe_class": "Class 4",
"poe_enable": True,
"poe_mode": "auto",
"poe_power": "2.56",
"poe_voltage": "53.40",
"portconf_id": "1a1",
"port_poe": True,
"up": True,
},
{
"media": "GE",
"name": "Port 2",
"port_idx": 2,
"poe_class": "Class 4",
"poe_enable": True,
"poe_mode": "auto",
"poe_power": "2.56",
"poe_voltage": "53.40",
"portconf_id": "1a2",
"port_poe": True,
"up": True,
},
{
"media": "GE",
"name": "Port 3",
"port_idx": 3,
"poe_class": "Unknown",
"poe_enable": False,
"poe_mode": "off",
"poe_power": "0.00",
"poe_voltage": "0.00",
"portconf_id": "1a3",
"port_poe": False,
"up": True,
},
{
"media": "GE",
"name": "Port 4",
"port_idx": 4,
"poe_class": "Unknown",
"poe_enable": False,
"poe_mode": "auto",
"poe_power": "0.00",
"poe_voltage": "0.00",
"portconf_id": "1a4",
"port_poe": True,
"up": True,
},
],
"state": 1,
"type": "usw",
"version": "4.0.42.10433",
}
BLOCKED = {
"blocked": True,
"hostname": "block_client_1",
"ip": "10.0.0.1",
"is_guest": False,
"is_wired": False,
"last_seen": 1562600145,
"mac": "00:00:00:00:01:01",
"name": "Block Client 1",
"noted": True,
"oui": "Producer",
}
UNBLOCKED = {
"blocked": False,
"hostname": "block_client_2",
"ip": "10.0.0.2",
"is_guest": False,
"is_wired": True,
"last_seen": 1562600145,
"mac": "00:00:00:00:01:02",
"name": "Block Client 2",
"noted": True,
"oui": "Producer",
}
EVENT_BLOCKED_CLIENT_CONNECTED = {
"user": BLOCKED["mac"],
"radio": "na",
"channel": "44",
"hostname": BLOCKED["hostname"],
"key": "EVT_WU_Connected",
"subsystem": "wlan",
"site_id": "name",
"time": 1587753456179,
"datetime": "2020-04-24T18:37:36Z",
"msg": f'User{[BLOCKED["mac"]]} has connected."',
"_id": "5ea331fa30c49e00f90ddc1a",
}
EVENT_BLOCKED_CLIENT_BLOCKED = {
"user": BLOCKED["mac"],
"hostname": BLOCKED["hostname"],
"key": "EVT_WC_Blocked",
"subsystem": "wlan",
"site_id": "name",
"time": 1587753456179,
"datetime": "2020-04-24T18:37:36Z",
"msg": f'User{[BLOCKED["mac"]]} has been blocked."',
"_id": "5ea331fa30c49e00f90ddc1a",
}
EVENT_BLOCKED_CLIENT_UNBLOCKED = {
"user": BLOCKED["mac"],
"hostname": BLOCKED["hostname"],
"key": "EVT_WC_Unblocked",
"subsystem": "wlan",
"site_id": "name",
"time": 1587753456179,
"datetime": "2020-04-24T18:37:36Z",
"msg": f'User{[BLOCKED["mac"]]} has been unblocked."',
"_id": "5ea331fa30c49e00f90ddc1a",
}
EVENT_CLIENT_2_CONNECTED = {
"user": CLIENT_2["mac"],
"radio": "na",
"channel": "44",
"hostname": CLIENT_2["hostname"],
"key": "EVT_WU_Connected",
"subsystem": "wlan",
"site_id": "name",
"time": 1587753456179,
"datetime": "2020-04-24T18:37:36Z",
"msg": f'User{[CLIENT_2["mac"]]} has connected."',
"_id": "5ea331fa30c49e00f90ddc1a",
}
async def test_platform_manually_configured(hass):
"""Test that we do not discover anything or try to set up a controller."""
assert (
await async_setup_component(
hass, SWITCH_DOMAIN, {SWITCH_DOMAIN: {"platform": UNIFI_DOMAIN}}
)
is True
)
assert UNIFI_DOMAIN not in hass.data
async def test_no_clients(hass):
"""Test the update_clients function when no clients are found."""
controller = await setup_unifi_integration(
hass,
options={CONF_TRACK_CLIENTS: False, CONF_TRACK_DEVICES: False},
)
assert len(controller.mock_requests) == 4
assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 0
async def test_controller_not_client(hass):
"""Test that the controller doesn't become a switch."""
controller = await setup_unifi_integration(
hass,
options={CONF_TRACK_CLIENTS: False, CONF_TRACK_DEVICES: False},
clients_response=[CONTROLLER_HOST],
devices_response=[DEVICE_1],
)
assert len(controller.mock_requests) == 4
assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 0
cloudkey = hass.states.get("switch.cloud_key")
assert cloudkey is None
async def test_not_admin(hass):
"""Test that switch platform only work on an admin account."""
description = deepcopy(DESCRIPTION)
description[0]["site_role"] = "not admin"
controller = await setup_unifi_integration(
hass,
options={CONF_TRACK_CLIENTS: False, CONF_TRACK_DEVICES: False},
site_description=description,
clients_response=[CLIENT_1],
devices_response=[DEVICE_1],
)
assert len(controller.mock_requests) == 4
assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 0
async def test_switches(hass):
"""Test the update_items function with some clients."""
controller = await setup_unifi_integration(
hass,
options={
CONF_BLOCK_CLIENT: [BLOCKED["mac"], UNBLOCKED["mac"]],
CONF_TRACK_CLIENTS: False,
CONF_TRACK_DEVICES: False,
},
clients_response=[CLIENT_1, CLIENT_4],
devices_response=[DEVICE_1],
clients_all_response=[BLOCKED, UNBLOCKED, CLIENT_1],
)
assert len(controller.mock_requests) == 4
assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 3
switch_1 = hass.states.get("switch.poe_client_1")
assert switch_1 is not None
assert switch_1.state == "on"
assert switch_1.attributes["power"] == "2.56"
assert switch_1.attributes[SWITCH_DOMAIN] == "00:00:00:00:01:01"
assert switch_1.attributes["port"] == 1
assert switch_1.attributes["poe_mode"] == "auto"
switch_4 = hass.states.get("switch.poe_client_4")
assert switch_4 is None
blocked = hass.states.get("switch.block_client_1")
assert blocked is not None
assert blocked.state == "off"
unblocked = hass.states.get("switch.block_client_2")
assert unblocked is not None
assert unblocked.state == "on"
await hass.services.async_call(
SWITCH_DOMAIN, "turn_off", {"entity_id": "switch.block_client_1"}, blocking=True
)
assert len(controller.mock_requests) == 5
assert controller.mock_requests[4] == {
"json": {"mac": "00:00:00:00:01:01", "cmd": "block-sta"},
"method": "post",
"path": "/cmd/stamgr",
}
await hass.services.async_call(
SWITCH_DOMAIN, "turn_on", {"entity_id": "switch.block_client_1"}, blocking=True
)
assert len(controller.mock_requests) == 6
assert controller.mock_requests[5] == {
"json": {"mac": "00:00:00:00:01:01", "cmd": "unblock-sta"},
"method": "post",
"path": "/cmd/stamgr",
}
async def test_remove_switches(hass):
"""Test the update_items function with some clients."""
controller = await setup_unifi_integration(
hass,
options={CONF_BLOCK_CLIENT: [UNBLOCKED["mac"]]},
clients_response=[CLIENT_1, UNBLOCKED],
devices_response=[DEVICE_1],
)
assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 2
poe_switch = hass.states.get("switch.poe_client_1")
assert poe_switch is not None
block_switch = hass.states.get("switch.block_client_2")
assert block_switch is not None
controller.api.websocket._data = {
"meta": {"message": MESSAGE_CLIENT_REMOVED},
"data": [CLIENT_1, UNBLOCKED],
}
controller.api.session_handler(SIGNAL_DATA)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 0
poe_switch = hass.states.get("switch.poe_client_1")
assert poe_switch is None
block_switch = hass.states.get("switch.block_client_2")
assert block_switch is None
async def test_block_switches(hass):
"""Test the update_items function with some clients."""
controller = await setup_unifi_integration(
hass,
options={
CONF_BLOCK_CLIENT: [BLOCKED["mac"], UNBLOCKED["mac"]],
CONF_TRACK_CLIENTS: False,
CONF_TRACK_DEVICES: False,
},
clients_response=[UNBLOCKED],
clients_all_response=[BLOCKED],
)
assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 2
blocked = hass.states.get("switch.block_client_1")
assert blocked is not None
assert blocked.state == "off"
unblocked = hass.states.get("switch.block_client_2")
assert unblocked is not None
assert unblocked.state == "on"
controller.api.websocket._data = {
"meta": {"message": MESSAGE_EVENT},
"data": [EVENT_BLOCKED_CLIENT_UNBLOCKED],
}
controller.api.session_handler(SIGNAL_DATA)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 2
blocked = hass.states.get("switch.block_client_1")
assert blocked is not None
assert blocked.state == "on"
controller.api.websocket._data = {
"meta": {"message": MESSAGE_EVENT},
"data": [EVENT_BLOCKED_CLIENT_BLOCKED],
}
controller.api.session_handler(SIGNAL_DATA)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 2
blocked = hass.states.get("switch.block_client_1")
assert blocked is not None
assert blocked.state == "off"
await hass.services.async_call(
SWITCH_DOMAIN, "turn_off", {"entity_id": "switch.block_client_1"}, blocking=True
)
assert len(controller.mock_requests) == 5
assert controller.mock_requests[4] == {
"json": {"mac": "00:00:00:00:01:01", "cmd": "block-sta"},
"method": "post",
"path": "/cmd/stamgr",
}
await hass.services.async_call(
SWITCH_DOMAIN, "turn_on", {"entity_id": "switch.block_client_1"}, blocking=True
)
assert len(controller.mock_requests) == 6
assert controller.mock_requests[5] == {
"json": {"mac": "00:00:00:00:01:01", "cmd": "unblock-sta"},
"method": "post",
"path": "/cmd/stamgr",
}
async def test_new_client_discovered_on_block_control(hass):
"""Test if 2nd update has a new client."""
controller = await setup_unifi_integration(
hass,
options={
CONF_BLOCK_CLIENT: [BLOCKED["mac"]],
CONF_TRACK_CLIENTS: False,
CONF_TRACK_DEVICES: False,
},
)
assert len(controller.mock_requests) == 4
assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 0
blocked = hass.states.get("switch.block_client_1")
assert blocked is None
controller.api.websocket._data = {
"meta": {"message": "sta:sync"},
"data": [BLOCKED],
}
controller.api.session_handler(SIGNAL_DATA)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 0
controller.api.websocket._data = {
"meta": {"message": MESSAGE_EVENT},
"data": [EVENT_BLOCKED_CLIENT_CONNECTED],
}
controller.api.session_handler(SIGNAL_DATA)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1
blocked = hass.states.get("switch.block_client_1")
assert blocked is not None
async def test_option_block_clients(hass):
"""Test the changes to option reflects accordingly."""
controller = await setup_unifi_integration(
hass,
options={CONF_BLOCK_CLIENT: [BLOCKED["mac"]]},
clients_all_response=[BLOCKED, UNBLOCKED],
)
assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1
# Add a second switch
hass.config_entries.async_update_entry(
controller.config_entry,
options={CONF_BLOCK_CLIENT: [BLOCKED["mac"], UNBLOCKED["mac"]]},
)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1
# Remove the second switch again
hass.config_entries.async_update_entry(
controller.config_entry,
options={CONF_BLOCK_CLIENT: [BLOCKED["mac"]]},
)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1
# Enable one and remove another one
hass.config_entries.async_update_entry(
controller.config_entry,
options={CONF_BLOCK_CLIENT: [UNBLOCKED["mac"]]},
)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 0
# Remove one
hass.config_entries.async_update_entry(
controller.config_entry,
options={CONF_BLOCK_CLIENT: []},
)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 0
async def test_new_client_discovered_on_poe_control(hass):
"""Test if 2nd update has a new client."""
controller = await setup_unifi_integration(
hass,
options={CONF_TRACK_CLIENTS: False, CONF_TRACK_DEVICES: False},
clients_response=[CLIENT_1],
devices_response=[DEVICE_1],
)
assert len(controller.mock_requests) == 4
assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1
controller.api.websocket._data = {
"meta": {"message": "sta:sync"},
"data": [CLIENT_2],
}
controller.api.session_handler(SIGNAL_DATA)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1
controller.api.websocket._data = {
"meta": {"message": MESSAGE_EVENT},
"data": [EVENT_CLIENT_2_CONNECTED],
}
controller.api.session_handler(SIGNAL_DATA)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 2
switch_2 = hass.states.get("switch.poe_client_2")
assert switch_2 is not None
await hass.services.async_call(
SWITCH_DOMAIN, "turn_off", {"entity_id": "switch.poe_client_1"}, blocking=True
)
assert len(controller.mock_requests) == 5
assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 2
assert controller.mock_requests[4] == {
"json": {
"port_overrides": [{"port_idx": 1, "portconf_id": "1a1", "poe_mode": "off"}]
},
"method": "put",
"path": "/rest/device/mock-id",
}
await hass.services.async_call(
SWITCH_DOMAIN, "turn_on", {"entity_id": "switch.poe_client_1"}, blocking=True
)
assert len(controller.mock_requests) == 6
assert controller.mock_requests[4] == {
"json": {
"port_overrides": [
{"port_idx": 1, "portconf_id": "1a1", "poe_mode": "auto"}
]
},
"method": "put",
"path": "/rest/device/mock-id",
}
async def test_ignore_multiple_poe_clients_on_same_port(hass):
"""Ignore when there are multiple POE driven clients on same port.
If there is a non-UniFi switch powered by POE,
clients will be transparently marked as having POE as well.
"""
controller = await setup_unifi_integration(
hass,
clients_response=POE_SWITCH_CLIENTS,
devices_response=[DEVICE_1],
)
assert len(controller.mock_requests) == 4
assert len(hass.states.async_entity_ids(TRACKER_DOMAIN)) == 3
switch_1 = hass.states.get("switch.poe_client_1")
switch_2 = hass.states.get("switch.poe_client_2")
assert switch_1 is None
assert switch_2 is None
async def test_restoring_client(hass):
"""Test the update_items function with some clients."""
config_entry = config_entries.ConfigEntry(
version=1,
domain=UNIFI_DOMAIN,
title="Mock Title",
data=ENTRY_CONFIG,
source="test",
connection_class=config_entries.CONN_CLASS_LOCAL_POLL,
system_options={},
options={},
entry_id=1,
)
registry = await entity_registry.async_get_registry(hass)
registry.async_get_or_create(
SWITCH_DOMAIN,
UNIFI_DOMAIN,
f'{POE_SWITCH}-{CLIENT_1["mac"]}',
suggested_object_id=CLIENT_1["hostname"],
config_entry=config_entry,
)
registry.async_get_or_create(
SWITCH_DOMAIN,
UNIFI_DOMAIN,
f'{POE_SWITCH}-{CLIENT_2["mac"]}',
suggested_object_id=CLIENT_2["hostname"],
config_entry=config_entry,
)
controller = await setup_unifi_integration(
hass,
options={
CONF_BLOCK_CLIENT: ["random mac"],
CONF_TRACK_CLIENTS: False,
CONF_TRACK_DEVICES: False,
},
clients_response=[CLIENT_2],
devices_response=[DEVICE_1],
clients_all_response=[CLIENT_1],
)
assert len(controller.mock_requests) == 4
assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 2
device_1 = hass.states.get("switch.client_1")
assert device_1 is not None
|
import pandas as pd
class TimeChopper(object):
"""
TimeChopper splits incoming dataframes by index. Chunks are cached and
chunks for same key from different DFs are joined. Then chunks are passed
further as (<timestamp>, <dataframe>) tuples.
"""
def __init__(self, source, cache_size):
self.cache_size = cache_size
self.source = source
self.cache = {}
def __iter__(self):
for chunk in self.source:
grouped = chunk.groupby(level=0)
for group_key, group_data in list(grouped):
if group_key in self.cache:
self.cache[group_key] = pd.concat(
[self.cache[group_key], group_data])
else:
self.cache[group_key] = group_data
while len(self.cache) > self.cache_size:
yield self.__get_result()
while self.cache:
yield self.__get_result()
def __get_result(self):
key = min(self.cache.keys())
result = self.cache.pop(key, None)
cardinality = len(result) if result is not None else 0
return (key, result, cardinality)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import json
import logging
import time
import py4j
from pyspark import sql
def parse_args():
"""Parse argv."""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'--sql-scripts',
type=lambda csv: csv.split(','),
required=True,
help='List of SQL scripts staged in object storage to run')
parser.add_argument(
'--table-metadata',
metavar='METADATA',
type=lambda s: json.loads(s).items(),
default={},
help="""\
JSON Object mappiing table names to arrays of length 2. The arrays contain the
format of the data and the options to pass to the dataframe reader. e.g.:
{
"my_bq_table": ["bigquery", {"table": "bigquery_public_data:dataset.table"}],
"my_parquet_table": ["parquet", {"path": "gs://some/directory"}]
}""")
parser.add_argument(
'--report-dir',
required=True,
help='Directory to write out query timings to.')
return parser.parse_args()
def main(args):
spark = (sql.SparkSession.builder
.appName('Spark SQL Query')
.enableHiveSupport()
.getOrCreate())
for name, (fmt, options) in args.table_metadata:
logging.info('Loading %s', name)
spark.read.format(fmt).options(**options).load().createTempView(name)
results = []
for script in args.sql_scripts:
# Read script from object storage using rdd API
query = '\n'.join(spark.sparkContext.textFile(script).collect())
try:
logging.info('Running %s', script)
start = time.time()
# spark-sql does not limit its output. Replicate that here by setting
# limit to max Java Integer. Hopefully you limited the output in SQL or
# you are going to have a bad time. Note this is not true of all TPC-DS or
# TPC-H queries and they may crash with small JVMs.
# pylint: disable=protected-access
spark.sql(query).show(spark._jvm.java.lang.Integer.MAX_VALUE)
# pylint: enable=protected-access
duration = time.time() - start
results.append(sql.Row(script=script, duration=duration))
# These correspond to errors in low level Spark Excecution.
# Let ParseException and AnalysisException fail the job.
except (sql.utils.QueryExecutionException,
py4j.protocol.Py4JJavaError) as e:
logging.error('Script %s failed', script, exc_info=e)
logging.info('Writing results to %s', args.report_dir)
spark.createDataFrame(results).coalesce(1).write.json(args.report_dir)
if __name__ == '__main__':
main(parse_args())
|
try:
import json
except ImportError:
import simplejson as json
import urllib2
import diamond.collector
class SidekiqWebCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(SidekiqWebCollector,
self).get_default_config_help()
config_help.update({
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(SidekiqWebCollector, self).get_default_config()
config.update({
'host': 'localhost',
'port': 9999,
'byte_unit': ['byte'],
})
return config
def collect(self):
try:
response = urllib2.urlopen("http://%s:%s/dashboard/stats" % (
self.config['host'], int(self.config['port'])))
except Exception as e:
self.log.error('Couldnt connect to sidekiq-web: %s', e)
return {}
try:
j = json.loads(response.read())
except Exception as e:
self.log.error('Couldnt parse json: %s', e)
return {}
for k in j:
for item, value in j[k].items():
if isinstance(value, (str, unicode)) and 'M' in value:
value = float(value.replace('M', ''))
for unit in self.config['byte_unit']:
unit_value = diamond.convertor.binary.convert(
value=value,
oldUnit='megabyte',
newUnit=unit)
self.publish("%s.%s_%s" % (k, item, unit), unit_value)
else:
self.publish("%s.%s" % (k, item), value)
|
from datetime import timedelta
import logging
import threading
from jsonpath import jsonpath
import verisure
import voluptuous as vol
from homeassistant.const import (
CONF_PASSWORD,
CONF_SCAN_INTERVAL,
CONF_USERNAME,
EVENT_HOMEASSISTANT_STOP,
HTTP_SERVICE_UNAVAILABLE,
)
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
ATTR_DEVICE_SERIAL = "device_serial"
CONF_ALARM = "alarm"
CONF_CODE_DIGITS = "code_digits"
CONF_DOOR_WINDOW = "door_window"
CONF_GIID = "giid"
CONF_HYDROMETERS = "hygrometers"
CONF_LOCKS = "locks"
CONF_DEFAULT_LOCK_CODE = "default_lock_code"
CONF_MOUSE = "mouse"
CONF_SMARTPLUGS = "smartplugs"
CONF_THERMOMETERS = "thermometers"
CONF_SMARTCAM = "smartcam"
DOMAIN = "verisure"
MIN_SCAN_INTERVAL = timedelta(minutes=1)
DEFAULT_SCAN_INTERVAL = timedelta(minutes=1)
SERVICE_CAPTURE_SMARTCAM = "capture_smartcam"
SERVICE_DISABLE_AUTOLOCK = "disable_autolock"
SERVICE_ENABLE_AUTOLOCK = "enable_autolock"
HUB = None
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Optional(CONF_ALARM, default=True): cv.boolean,
vol.Optional(CONF_CODE_DIGITS, default=4): cv.positive_int,
vol.Optional(CONF_DOOR_WINDOW, default=True): cv.boolean,
vol.Optional(CONF_GIID): cv.string,
vol.Optional(CONF_HYDROMETERS, default=True): cv.boolean,
vol.Optional(CONF_LOCKS, default=True): cv.boolean,
vol.Optional(CONF_DEFAULT_LOCK_CODE): cv.string,
vol.Optional(CONF_MOUSE, default=True): cv.boolean,
vol.Optional(CONF_SMARTPLUGS, default=True): cv.boolean,
vol.Optional(CONF_THERMOMETERS, default=True): cv.boolean,
vol.Optional(CONF_SMARTCAM, default=True): cv.boolean,
vol.Optional(CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL): (
vol.All(cv.time_period, vol.Clamp(min=MIN_SCAN_INTERVAL))
),
}
)
},
extra=vol.ALLOW_EXTRA,
)
DEVICE_SERIAL_SCHEMA = vol.Schema({vol.Required(ATTR_DEVICE_SERIAL): cv.string})
def setup(hass, config):
"""Set up the Verisure component."""
global HUB # pylint: disable=global-statement
HUB = VerisureHub(config[DOMAIN])
HUB.update_overview = Throttle(config[DOMAIN][CONF_SCAN_INTERVAL])(
HUB.update_overview
)
if not HUB.login():
return False
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, lambda event: HUB.logout())
HUB.update_overview()
for component in (
"sensor",
"switch",
"alarm_control_panel",
"lock",
"camera",
"binary_sensor",
):
discovery.load_platform(hass, component, DOMAIN, {}, config)
async def capture_smartcam(service):
"""Capture a new picture from a smartcam."""
device_id = service.data[ATTR_DEVICE_SERIAL]
try:
await hass.async_add_executor_job(HUB.smartcam_capture, device_id)
_LOGGER.debug("Capturing new image from %s", ATTR_DEVICE_SERIAL)
except verisure.Error as ex:
_LOGGER.error("Could not capture image, %s", ex)
hass.services.register(
DOMAIN, SERVICE_CAPTURE_SMARTCAM, capture_smartcam, schema=DEVICE_SERIAL_SCHEMA
)
async def disable_autolock(service):
"""Disable autolock on a doorlock."""
device_id = service.data[ATTR_DEVICE_SERIAL]
try:
await hass.async_add_executor_job(HUB.disable_autolock, device_id)
_LOGGER.debug("Disabling autolock on%s", ATTR_DEVICE_SERIAL)
except verisure.Error as ex:
_LOGGER.error("Could not disable autolock, %s", ex)
hass.services.register(
DOMAIN, SERVICE_DISABLE_AUTOLOCK, disable_autolock, schema=DEVICE_SERIAL_SCHEMA
)
async def enable_autolock(service):
"""Enable autolock on a doorlock."""
device_id = service.data[ATTR_DEVICE_SERIAL]
try:
await hass.async_add_executor_job(HUB.enable_autolock, device_id)
_LOGGER.debug("Enabling autolock on %s", ATTR_DEVICE_SERIAL)
except verisure.Error as ex:
_LOGGER.error("Could not enable autolock, %s", ex)
hass.services.register(
DOMAIN, SERVICE_ENABLE_AUTOLOCK, enable_autolock, schema=DEVICE_SERIAL_SCHEMA
)
return True
class VerisureHub:
"""A Verisure hub wrapper class."""
def __init__(self, domain_config):
"""Initialize the Verisure hub."""
self.overview = {}
self.imageseries = {}
self.config = domain_config
self._lock = threading.Lock()
self.session = verisure.Session(
domain_config[CONF_USERNAME], domain_config[CONF_PASSWORD]
)
self.giid = domain_config.get(CONF_GIID)
def login(self):
"""Login to Verisure."""
try:
self.session.login()
except verisure.Error as ex:
_LOGGER.error("Could not log in to verisure, %s", ex)
return False
if self.giid:
return self.set_giid()
return True
def logout(self):
"""Logout from Verisure."""
try:
self.session.logout()
except verisure.Error as ex:
_LOGGER.error("Could not log out from verisure, %s", ex)
return False
return True
def set_giid(self):
"""Set installation GIID."""
try:
self.session.set_giid(self.giid)
except verisure.Error as ex:
_LOGGER.error("Could not set installation GIID, %s", ex)
return False
return True
def update_overview(self):
"""Update the overview."""
try:
self.overview = self.session.get_overview()
except verisure.ResponseError as ex:
_LOGGER.error("Could not read overview, %s", ex)
if ex.status_code == HTTP_SERVICE_UNAVAILABLE: # Service unavailable
_LOGGER.info("Trying to log in again")
self.login()
else:
raise
@Throttle(timedelta(seconds=60))
def update_smartcam_imageseries(self):
"""Update the image series."""
self.imageseries = self.session.get_camera_imageseries()
@Throttle(timedelta(seconds=30))
def smartcam_capture(self, device_id):
"""Capture a new image from a smartcam."""
self.session.capture_image(device_id)
def disable_autolock(self, device_id):
"""Disable autolock."""
self.session.set_lock_config(device_id, auto_lock_enabled=False)
def enable_autolock(self, device_id):
"""Enable autolock."""
self.session.set_lock_config(device_id, auto_lock_enabled=True)
def get(self, jpath, *args):
"""Get values from the overview that matches the jsonpath."""
res = jsonpath(self.overview, jpath % args)
return res if res else []
def get_first(self, jpath, *args):
"""Get first value from the overview that matches the jsonpath."""
res = self.get(jpath, *args)
return res[0] if res else None
def get_image_info(self, jpath, *args):
"""Get values from the imageseries that matches the jsonpath."""
res = jsonpath(self.imageseries, jpath % args)
return res if res else []
|
from flask import Flask, jsonify
from flasgger import Swagger
app = Flask(__name__)
app.config['SWAGGER'] = {
'title': 'Colors API',
'uiversion': 2
}
swag = Swagger(app, template_file='colors_template.json')
@app.route('/colors/<palette>/')
def colors(palette):
"""
Example using a dictionary as specification
This is the description
You can also set 'summary' and 'description' in
specs_dict
---
# values here overrides the specs dict
deprecated: true
"""
all_colors = {
'cmyk': ['cian', 'magenta', 'yellow', 'black'],
'rgb': ['red', 'green', 'blue']
}
if palette == 'all':
result = all_colors
else:
result = {palette: all_colors.get(palette)}
return jsonify(result)
if __name__ == "__main__":
app.run(debug=True)
|
import re
import subprocess
import diamond.collector
import diamond.convertor
from diamond.collector import str_to_bool
LINE_PATTERN = re.compile(
'^(?P<source>\S+).*\s+(?P<offset>[+-]\d+)(?P<unit>\w+)\s+')
IP_PATTERN = re.compile('^\d+\.\d+\.\d+\.\d+$')
def cleanup_source(source):
if IP_PATTERN.search(source):
return source.replace('.', '_')
if '.' in source:
hostname, _ = source.split('.', 1)
return hostname
return source
class ChronydCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(ChronydCollector, self).get_default_config_help()
config_help.update({
'bin': 'The path to the chronyc binary',
'use_sudo': 'Use sudo?',
'sudo_cmd': 'Path to sudo',
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(ChronydCollector, self).get_default_config()
config.update({
'path': 'chrony',
'bin': '/usr/bin/chronyc',
'use_sudo': False,
'sudo_cmd': '/usr/bin/sudo',
})
return config
def get_output(self):
try:
command = [self.config['bin'], 'sourcestats']
if str_to_bool(self.config['use_sudo']):
command.insert(0, self.config['sudo_cmd'])
return subprocess.Popen(command,
stdout=subprocess.PIPE).communicate()[0]
except OSError:
return ""
def collect(self):
output = self.get_output()
for line in output.strip().split("\n"):
m = LINE_PATTERN.search(line)
if m is None:
continue
source = cleanup_source(m.group('source'))
offset = float(m.group('offset'))
unit = m.group('unit')
try:
value = diamond.convertor.time.convert(offset, unit, 'ms')
except NotImplementedError as e:
self.log.error('Unable to convert %s%s: %s', offset, unit, e)
continue
self.publish('%s.offset_ms' % source, value)
|
import pickle
import pytest
dask = pytest.importorskip("dask") # isort:skip
distributed = pytest.importorskip("distributed") # isort:skip
from dask.distributed import Client, Lock
from distributed.utils_test import cluster, gen_cluster
from distributed.utils_test import loop
from distributed.client import futures_of
import xarray as xr
from xarray.backends.locks import HDF5_LOCK, CombinedLock
from xarray.tests.test_backends import (
ON_WINDOWS,
create_tmp_file,
create_tmp_geotiff,
open_example_dataset,
)
from xarray.tests.test_dataset import create_test_data
from . import (
assert_allclose,
has_h5netcdf,
has_netCDF4,
requires_rasterio,
has_scipy,
requires_zarr,
requires_cfgrib,
)
# this is to stop isort throwing errors. May have been easier to just use
# `isort:skip` in retrospect
da = pytest.importorskip("dask.array")
loop = loop # loop is an imported fixture, which flake8 has issues ack-ing
@pytest.fixture
def tmp_netcdf_filename(tmpdir):
return str(tmpdir.join("testfile.nc"))
ENGINES = []
if has_scipy:
ENGINES.append("scipy")
if has_netCDF4:
ENGINES.append("netcdf4")
if has_h5netcdf:
ENGINES.append("h5netcdf")
NC_FORMATS = {
"netcdf4": [
"NETCDF3_CLASSIC",
"NETCDF3_64BIT_OFFSET",
"NETCDF3_64BIT_DATA",
"NETCDF4_CLASSIC",
"NETCDF4",
],
"scipy": ["NETCDF3_CLASSIC", "NETCDF3_64BIT"],
"h5netcdf": ["NETCDF4"],
}
ENGINES_AND_FORMATS = [
("netcdf4", "NETCDF3_CLASSIC"),
("netcdf4", "NETCDF4_CLASSIC"),
("netcdf4", "NETCDF4"),
("h5netcdf", "NETCDF4"),
("scipy", "NETCDF3_64BIT"),
]
@pytest.mark.parametrize("engine,nc_format", ENGINES_AND_FORMATS)
def test_dask_distributed_netcdf_roundtrip(
loop, tmp_netcdf_filename, engine, nc_format
):
if engine not in ENGINES:
pytest.skip("engine not available")
chunks = {"dim1": 4, "dim2": 3, "dim3": 6}
with cluster() as (s, [a, b]):
with Client(s["address"], loop=loop):
original = create_test_data().chunk(chunks)
if engine == "scipy":
with pytest.raises(NotImplementedError):
original.to_netcdf(
tmp_netcdf_filename, engine=engine, format=nc_format
)
return
original.to_netcdf(tmp_netcdf_filename, engine=engine, format=nc_format)
with xr.open_dataset(
tmp_netcdf_filename, chunks=chunks, engine=engine
) as restored:
assert isinstance(restored.var1.data, da.Array)
computed = restored.compute()
assert_allclose(original, computed)
@pytest.mark.parametrize("engine,nc_format", ENGINES_AND_FORMATS)
def test_dask_distributed_read_netcdf_integration_test(
loop, tmp_netcdf_filename, engine, nc_format
):
if engine not in ENGINES:
pytest.skip("engine not available")
chunks = {"dim1": 4, "dim2": 3, "dim3": 6}
with cluster() as (s, [a, b]):
with Client(s["address"], loop=loop):
original = create_test_data()
original.to_netcdf(tmp_netcdf_filename, engine=engine, format=nc_format)
with xr.open_dataset(
tmp_netcdf_filename, chunks=chunks, engine=engine
) as restored:
assert isinstance(restored.var1.data, da.Array)
computed = restored.compute()
assert_allclose(original, computed)
@requires_zarr
@pytest.mark.parametrize("consolidated", [True, False])
@pytest.mark.parametrize("compute", [True, False])
def test_dask_distributed_zarr_integration_test(loop, consolidated, compute):
if consolidated:
pytest.importorskip("zarr", minversion="2.2.1.dev2")
write_kwargs = {"consolidated": True}
read_kwargs = {"backend_kwargs": {"consolidated": True}}
else:
write_kwargs = read_kwargs = {}
chunks = {"dim1": 4, "dim2": 3, "dim3": 5}
with cluster() as (s, [a, b]):
with Client(s["address"], loop=loop):
original = create_test_data().chunk(chunks)
with create_tmp_file(
allow_cleanup_failure=ON_WINDOWS, suffix=".zarrc"
) as filename:
maybe_futures = original.to_zarr(
filename, compute=compute, **write_kwargs
)
if not compute:
maybe_futures.compute()
with xr.open_dataset(
filename, chunks="auto", engine="zarr", **read_kwargs
) as restored:
assert isinstance(restored.var1.data, da.Array)
computed = restored.compute()
assert_allclose(original, computed)
@requires_rasterio
def test_dask_distributed_rasterio_integration_test(loop):
with create_tmp_geotiff() as (tmp_file, expected):
with cluster() as (s, [a, b]):
with Client(s["address"], loop=loop):
da_tiff = xr.open_rasterio(tmp_file, chunks={"band": 1})
assert isinstance(da_tiff.data, da.Array)
actual = da_tiff.compute()
assert_allclose(actual, expected)
@requires_cfgrib
def test_dask_distributed_cfgrib_integration_test(loop):
with cluster() as (s, [a, b]):
with Client(s["address"], loop=loop):
with open_example_dataset(
"example.grib", engine="cfgrib", chunks={"time": 1}
) as ds:
with open_example_dataset("example.grib", engine="cfgrib") as expected:
assert isinstance(ds["t"].data, da.Array)
actual = ds.compute()
assert_allclose(actual, expected)
@pytest.mark.skipif(
distributed.__version__ <= "1.19.3",
reason="Need recent distributed version to clean up get",
)
@gen_cluster(client=True, timeout=None)
async def test_async(c, s, a, b):
x = create_test_data()
assert not dask.is_dask_collection(x)
y = x.chunk({"dim2": 4}) + 10
assert dask.is_dask_collection(y)
assert dask.is_dask_collection(y.var1)
assert dask.is_dask_collection(y.var2)
z = y.persist()
assert str(z)
assert dask.is_dask_collection(z)
assert dask.is_dask_collection(z.var1)
assert dask.is_dask_collection(z.var2)
assert len(y.__dask_graph__()) > len(z.__dask_graph__())
assert not futures_of(y)
assert futures_of(z)
future = c.compute(z)
w = await future
assert not dask.is_dask_collection(w)
assert_allclose(x + 10, w)
assert s.tasks
def test_hdf5_lock():
assert isinstance(HDF5_LOCK, dask.utils.SerializableLock)
@gen_cluster(client=True)
async def test_serializable_locks(c, s, a, b):
def f(x, lock=None):
with lock:
return x + 1
# note, the creation of Lock needs to be done inside a cluster
for lock in [
HDF5_LOCK,
Lock(),
Lock("filename.nc"),
CombinedLock([HDF5_LOCK]),
CombinedLock([HDF5_LOCK, Lock("filename.nc")]),
]:
futures = c.map(f, list(range(10)), lock=lock)
await c.gather(futures)
lock2 = pickle.loads(pickle.dumps(lock))
assert type(lock) == type(lock2)
|
import glob
import os
from chainer.dataset import download
from chainercv.chainer_experimental.datasets.sliceable import GetterDataset
from chainercv.utils import read_image
class CityscapesTestImageDataset(GetterDataset):
"""Image dataset for test split of `Cityscapes dataset`_.
.. _`Cityscapes dataset`: https://www.cityscapes-dataset.com
.. note::
Please manually download the data because it is not allowed to
re-distribute Cityscapes dataset.
Args:
data_dir (string): Path to the dataset directory. The directory should
contain the :obj:`leftImg8bit` directory. If :obj:`auto` is given,
it uses :obj:`$CHAINER_DATSET_ROOT/pfnet/chainercv/cityscapes` by
default.
This dataset returns the following data.
.. csv-table::
:header: name, shape, dtype, format
:obj:`img`, ":math:`(3, H, W)`", :obj:`float32`, \
"RGB, :math:`[0, 255]`"
"""
def __init__(self, data_dir='auto'):
super(CityscapesTestImageDataset, self).__init__()
if data_dir == 'auto':
data_dir = download.get_dataset_directory(
'pfnet/chainercv/cityscapes')
img_dir = os.path.join(data_dir, os.path.join('leftImg8bit', 'test'))
if not os.path.exists(img_dir):
raise ValueError(
'Cityscapes dataset does not exist at the expected location.'
'Please download it from https://www.cityscapes-dataset.com/.'
'Then place directory leftImg8bit at {}.'.format(
os.path.join(data_dir, 'leftImg8bit')))
self.img_paths = []
for city_dname in sorted(glob.glob(os.path.join(img_dir, '*'))):
for img_path in sorted(glob.glob(
os.path.join(city_dname, '*_leftImg8bit.png'))):
self.img_paths.append(img_path)
self.add_getter('img', self._get_image)
self.keys = 'img' # do not return tuple
def __len__(self):
return len(self.img_paths)
def _get_image(self, i):
return read_image(self.img_paths[i])
|
import io
import mock
from pytest import raises
from paasta_tools.cli.fsm import autosuggest
class TestGetSmartstackProxyPortFromFile:
@mock.patch("paasta_tools.cli.fsm.autosuggest.read_etc_services", autospec=True)
def test_multiple_stanzas_per_file(self, mock_read_etc_services):
with mock.patch("builtins.open", autospec=True):
with mock.patch(
"paasta_tools.cli.fsm.autosuggest.yaml", autospec=True
) as mock_yaml:
mock_yaml.safe_load.return_value = {
"main": {"proxy_port": 1},
"foo": {"proxy_port": 2},
}
actual = autosuggest._get_smartstack_proxy_ports_from_file(
"fake_root", "smartstack.yaml"
)
assert actual == {1, 2}
# Shamelessly copied from TestSuggestPort
class TestSuggestSmartstackProxyPort:
@mock.patch("paasta_tools.cli.fsm.autosuggest.read_etc_services", autospec=True)
def test_suggest_smartstack_proxy_port(self, mock_read_etc_services):
yelpsoa_config_root = "fake_yelpsoa_config_root"
walk_return = [
("fake_root1", "fake_dir1", ["smartstack.yaml"]),
("fake_root2", "fake_dir2", ["smartstack.yaml"]),
("fake_root3", "fake_dir3", ["smartstack.yaml"]),
]
mock_walk = mock.Mock(return_value=walk_return)
# See http://www.voidspace.org.uk/python/mock/examples.html#multiple-calls-with-different-effects
get_smartstack_proxy_ports_from_file_returns = [
{20001, 20003},
{20002},
{55555}, # bogus out-of-range value
]
def get_smarstack_proxy_ports_from_file_side_effect(*args):
return get_smartstack_proxy_ports_from_file_returns.pop(0)
mock_get_smartstack_proxy_ports_from_file = mock.Mock(
side_effect=get_smarstack_proxy_ports_from_file_side_effect
)
with mock.patch("os.walk", mock_walk, autospec=None):
with mock.patch(
"paasta_tools.cli.fsm.autosuggest._get_smartstack_proxy_ports_from_file",
mock_get_smartstack_proxy_ports_from_file,
autospec=None,
):
actual = autosuggest.suggest_smartstack_proxy_port(
yelpsoa_config_root, range_min=20001, range_max=20004
)
# Sanity check: our mock was called once for each legit port file in
# walk_return
assert mock_get_smartstack_proxy_ports_from_file.call_count == 3
# What we came here for: the actual output of the function under test
assert actual == 20004 # The only available integer in [20001, 20004]
@mock.patch("paasta_tools.cli.fsm.autosuggest.read_etc_services", autospec=True)
def test_suggest_smartstack_proxy_port_too_many_services(
self, mock_read_etc_services
):
"""If all the ports are taken, we should raise an error"""
yelpsoa_config_root = "fake_yelpsoa_config_root"
walk_return = [
("fake_root1", "fake_dir1", ["smartstack.yaml"]),
("fake_root2", "fake_dir2", ["smartstack.yaml"]),
("fake_root3", "fake_dir3", ["smartstack.yaml"]),
]
mock_walk = mock.Mock(return_value=walk_return)
# See http://www.voidspace.org.uk/python/mock/examples.html#multiple-calls-with-different-effects
get_smartstack_proxy_ports_from_file_returns = [
{20001, 20003},
{20002},
{55555}, # bogus out-of-range value
]
def get_smarstack_proxy_ports_from_file_side_effect(*args):
return get_smartstack_proxy_ports_from_file_returns.pop(0)
mock_get_smartstack_proxy_ports_from_file = mock.Mock(
side_effect=get_smarstack_proxy_ports_from_file_side_effect
)
with mock.patch("os.walk", mock_walk, autospec=None):
with mock.patch(
"paasta_tools.cli.fsm.autosuggest._get_smartstack_proxy_ports_from_file",
mock_get_smartstack_proxy_ports_from_file,
autospec=None,
):
with raises(Exception) as exc:
autosuggest.suggest_smartstack_proxy_port(
yelpsoa_config_root, range_min=20001, range_max=20003
)
assert (
"There are no more ports available in the range [20001, 20003]"
== str(exc.value)
)
@mock.patch("paasta_tools.cli.fsm.autosuggest.read_etc_services", autospec=True)
def test_get_inuse_ports_from_etc_services_parses_correctly(mock_read_etc_services):
input_services = """
# by IANA and used in the real-world or are needed by a debian package.
# If you need a huge list of used numbers please install the nmap package.
tcpmux 1/tcp # TCP port service multiplexer
echo 7/tcp
echo 7/udp
discard 9/tcp sink null
discard 9/udp sink null
systat 11/tcp users
daytime 13/tcp
"""
mock_read_etc_services.return_value = io.StringIO(input_services)
actual = autosuggest.get_inuse_ports_from_etc_services()
expected = {1, 7, 9, 11, 13}
assert actual == expected
|
__docformat__ = "restructuredtext en"
from cgi import escape
from six.moves import range
from logilab.common.ureports import BaseWriter
class HTMLWriter(BaseWriter):
"""format layouts as HTML"""
def __init__(self, snippet=None):
super(HTMLWriter, self).__init__()
self.snippet = snippet
def handle_attrs(self, layout):
"""get an attribute string from layout member attributes"""
attrs = u''
klass = getattr(layout, 'klass', None)
if klass:
attrs += u' class="%s"' % klass
nid = getattr(layout, 'id', None)
if nid:
attrs += u' id="%s"' % nid
return attrs
def begin_format(self, layout):
"""begin to format a layout"""
super(HTMLWriter, self).begin_format(layout)
if self.snippet is None:
self.writeln(u'<html>')
self.writeln(u'<body>')
def end_format(self, layout):
"""finished to format a layout"""
if self.snippet is None:
self.writeln(u'</body>')
self.writeln(u'</html>')
def visit_section(self, layout):
"""display a section as html, using div + h[section level]"""
self.section += 1
self.writeln(u'<div%s>' % self.handle_attrs(layout))
self.format_children(layout)
self.writeln(u'</div>')
self.section -= 1
def visit_title(self, layout):
"""display a title using <hX>"""
self.write(u'<h%s%s>' % (self.section, self.handle_attrs(layout)))
self.format_children(layout)
self.writeln(u'</h%s>' % self.section)
def visit_table(self, layout):
"""display a table as html"""
self.writeln(u'<table%s>' % self.handle_attrs(layout))
table_content = self.get_table_content(layout)
for i in range(len(table_content)):
row = table_content[i]
if i == 0 and layout.rheaders:
self.writeln(u'<tr class="header">')
elif i+1 == len(table_content) and layout.rrheaders:
self.writeln(u'<tr class="header">')
else:
self.writeln(u'<tr class="%s">' % (i%2 and 'even' or 'odd'))
for j in range(len(row)):
cell = row[j] or u' '
if (layout.rheaders and i == 0) or \
(layout.cheaders and j == 0) or \
(layout.rrheaders and i+1 == len(table_content)) or \
(layout.rcheaders and j+1 == len(row)):
self.writeln(u'<th>%s</th>' % cell)
else:
self.writeln(u'<td>%s</td>' % cell)
self.writeln(u'</tr>')
self.writeln(u'</table>')
def visit_list(self, layout):
"""display a list as html"""
self.writeln(u'<ul%s>' % self.handle_attrs(layout))
for row in list(self.compute_content(layout)):
self.writeln(u'<li>%s</li>' % row)
self.writeln(u'</ul>')
def visit_paragraph(self, layout):
"""display links (using <p>)"""
self.write(u'<p>')
self.format_children(layout)
self.write(u'</p>')
def visit_span(self, layout):
"""display links (using <p>)"""
self.write(u'<span%s>' % self.handle_attrs(layout))
self.format_children(layout)
self.write(u'</span>')
def visit_link(self, layout):
"""display links (using <a>)"""
self.write(u' <a href="%s"%s>%s</a>' % (layout.url,
self.handle_attrs(layout),
layout.label))
def visit_verbatimtext(self, layout):
"""display verbatim text (using <pre>)"""
self.write(u'<pre>')
self.write(layout.data.replace(u'&', u'&').replace(u'<', u'<'))
self.write(u'</pre>')
def visit_text(self, layout):
"""add some text"""
data = layout.data
if layout.escaped:
data = data.replace(u'&', u'&').replace(u'<', u'<')
self.write(data)
|
import copy
import logging
import multiprocessing
import sys
import urllib3
from http import client as http_client
from paasta_tools.paastaapi.exceptions import ApiValueError
JSON_SCHEMA_VALIDATION_KEYWORDS = {
'multipleOf', 'maximum', 'exclusiveMaximum',
'minimum', 'exclusiveMinimum', 'maxLength',
'minLength', 'pattern', 'maxItems', 'minItems'
}
class Configuration(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
:param host: Base url
:param api_key: Dict to store API key(s).
Each entry in the dict specifies an API key.
The dict key is the name of the security scheme in the OAS specification.
The dict value is the API key secret.
:param api_key_prefix: Dict to store API prefix (e.g. Bearer)
The dict key is the name of the security scheme in the OAS specification.
The dict value is an API key prefix when generating the auth data.
:param username: Username for HTTP basic authentication
:param password: Password for HTTP basic authentication
:param discard_unknown_keys: Boolean value indicating whether to discard
unknown properties. A server may send a response that includes additional
properties that are not known by the client in the following scenarios:
1. The OpenAPI document is incomplete, i.e. it does not match the server
implementation.
2. The client was generated using an older version of the OpenAPI document
and the server has been upgraded since then.
If a schema in the OpenAPI document defines the additionalProperties attribute,
then all undeclared properties received by the server are injected into the
additional properties map. In that case, there are undeclared properties, and
nothing to discard.
:param disabled_client_side_validations (string): Comma-separated list of
JSON schema validation keywords to disable JSON schema structural validation
rules. The following keywords may be specified: multipleOf, maximum,
exclusiveMaximum, minimum, exclusiveMinimum, maxLength, minLength, pattern,
maxItems, minItems.
By default, the validation is performed for data generated locally by the client
and data received from the server, independent of any validation performed by
the server side. If the input data does not satisfy the JSON schema validation
rules specified in the OpenAPI document, an exception is raised.
If disabled_client_side_validations is set, structural validation is
disabled. This can be useful to troubleshoot data validation problem, such as
when the OpenAPI document validation rules do not match the actual API data
received by the server.
:param server_index: Index to servers configuration.
:param server_variables: Mapping with string values to replace variables in
templated server configuration. The validation of enums is performed for
variables with defined enum values before.
:param server_operation_index: Mapping from operation ID to an index to server
configuration.
:param server_operation_variables: Mapping from operation ID to a mapping with
string values to replace variables in templated server configuration.
The validation of enums is performed for variables with defined enum values before.
"""
_default = None
def __init__(self, host=None,
api_key=None, api_key_prefix=None,
username=None, password=None,
discard_unknown_keys=False,
disabled_client_side_validations="",
server_index=None, server_variables=None,
server_operation_index=None, server_operation_variables=None,
):
"""Constructor
"""
self._base_path = "http://localhost/v1" if host is None else host
"""Default Base url
"""
self.server_index = 0 if server_index is None and host is None else server_index
self.server_operation_index = server_operation_index or {}
"""Default server index
"""
self.server_variables = server_variables or {}
self.server_operation_variables = server_operation_variables or {}
"""Default server variables
"""
self.temp_folder_path = None
"""Temp file folder for downloading files
"""
# Authentication Settings
self.api_key = {}
if api_key:
self.api_key = api_key
"""dict to store API key(s)
"""
self.api_key_prefix = {}
if api_key_prefix:
self.api_key_prefix = api_key_prefix
"""dict to store API prefix (e.g. Bearer)
"""
self.refresh_api_key_hook = None
"""function hook to refresh API key if expired
"""
self.username = username
"""Username for HTTP basic authentication
"""
self.password = password
"""Password for HTTP basic authentication
"""
self.discard_unknown_keys = discard_unknown_keys
self.disabled_client_side_validations = disabled_client_side_validations
self.logger = {}
"""Logging Settings
"""
self.logger["package_logger"] = logging.getLogger("paasta_tools.paastaapi")
self.logger["urllib3_logger"] = logging.getLogger("urllib3")
self.logger_format = '%(asctime)s %(levelname)s %(message)s'
"""Log format
"""
self.logger_stream_handler = None
"""Log stream handler
"""
self.logger_file_handler = None
"""Log file handler
"""
self.logger_file = None
"""Debug file location
"""
self.debug = False
"""Debug switch
"""
self.verify_ssl = True
"""SSL/TLS verification
Set this to false to skip verifying SSL certificate when calling API
from https server.
"""
self.ssl_ca_cert = None
"""Set this to customize the certificate file to verify the peer.
"""
self.cert_file = None
"""client certificate file
"""
self.key_file = None
"""client key file
"""
self.assert_hostname = None
"""Set this to True/False to enable/disable SSL hostname verification.
"""
self.connection_pool_maxsize = multiprocessing.cpu_count() * 5
"""urllib3 connection pool's maximum number of connections saved
per pool. urllib3 uses 1 connection as default value, but this is
not the best value when you are making a lot of possibly parallel
requests to the same host, which is often the case here.
cpu_count * 5 is used as default value to increase performance.
"""
self.proxy = None
"""Proxy URL
"""
self.proxy_headers = None
"""Proxy headers
"""
self.safe_chars_for_path_param = ''
"""Safe chars for path_param
"""
self.retries = None
"""Adding retries to override urllib3 default value 3
"""
# Enable client side validation
self.client_side_validation = True
def __deepcopy__(self, memo):
cls = self.__class__
result = cls.__new__(cls)
memo[id(self)] = result
for k, v in self.__dict__.items():
if k not in ('logger', 'logger_file_handler'):
setattr(result, k, copy.deepcopy(v, memo))
# shallow copy of loggers
result.logger = copy.copy(self.logger)
# use setters to configure loggers
result.logger_file = self.logger_file
result.debug = self.debug
return result
def __setattr__(self, name, value):
object.__setattr__(self, name, value)
if name == 'disabled_client_side_validations':
s = set(filter(None, value.split(',')))
for v in s:
if v not in JSON_SCHEMA_VALIDATION_KEYWORDS:
raise ApiValueError(
"Invalid keyword: '{0}''".format(v))
self._disabled_client_side_validations = s
@classmethod
def set_default(cls, default):
"""Set default instance of configuration.
It stores default configuration, which can be
returned by get_default_copy method.
:param default: object of Configuration
"""
cls._default = copy.deepcopy(default)
@classmethod
def get_default_copy(cls):
"""Return new instance of configuration.
This method returns newly created, based on default constructor,
object of Configuration class or returns a copy of default
configuration passed by the set_default method.
:return: The configuration object.
"""
if cls._default is not None:
return copy.deepcopy(cls._default)
return Configuration()
@property
def logger_file(self):
"""The logger file.
If the logger_file is None, then add stream handler and remove file
handler. Otherwise, add file handler and remove stream handler.
:param value: The logger_file path.
:type: str
"""
return self.__logger_file
@logger_file.setter
def logger_file(self, value):
"""The logger file.
If the logger_file is None, then add stream handler and remove file
handler. Otherwise, add file handler and remove stream handler.
:param value: The logger_file path.
:type: str
"""
self.__logger_file = value
if self.__logger_file:
# If set logging file,
# then add file handler and remove stream handler.
self.logger_file_handler = logging.FileHandler(self.__logger_file)
self.logger_file_handler.setFormatter(self.logger_formatter)
for _, logger in self.logger.items():
logger.addHandler(self.logger_file_handler)
@property
def debug(self):
"""Debug status
:param value: The debug status, True or False.
:type: bool
"""
return self.__debug
@debug.setter
def debug(self, value):
"""Debug status
:param value: The debug status, True or False.
:type: bool
"""
self.__debug = value
if self.__debug:
# if debug status is True, turn on debug logging
for _, logger in self.logger.items():
logger.setLevel(logging.DEBUG)
# turn on http_client debug
http_client.HTTPConnection.debuglevel = 1
else:
# if debug status is False, turn off debug logging,
# setting log level to default `logging.WARNING`
for _, logger in self.logger.items():
logger.setLevel(logging.WARNING)
# turn off http_client debug
http_client.HTTPConnection.debuglevel = 0
@property
def logger_format(self):
"""The logger format.
The logger_formatter will be updated when sets logger_format.
:param value: The format string.
:type: str
"""
return self.__logger_format
@logger_format.setter
def logger_format(self, value):
"""The logger format.
The logger_formatter will be updated when sets logger_format.
:param value: The format string.
:type: str
"""
self.__logger_format = value
self.logger_formatter = logging.Formatter(self.__logger_format)
def get_api_key_with_prefix(self, identifier, alias=None):
"""Gets API key (with prefix if set).
:param identifier: The identifier of apiKey.
:param alias: The alternative identifier of apiKey.
:return: The token for api key authentication.
"""
if self.refresh_api_key_hook is not None:
self.refresh_api_key_hook(self)
key = self.api_key.get(identifier, self.api_key.get(alias) if alias is not None else None)
if key:
prefix = self.api_key_prefix.get(identifier)
if prefix:
return "%s %s" % (prefix, key)
else:
return key
def get_basic_auth_token(self):
"""Gets HTTP basic authentication header (string).
:return: The token for basic HTTP authentication.
"""
username = ""
if self.username is not None:
username = self.username
password = ""
if self.password is not None:
password = self.password
return urllib3.util.make_headers(
basic_auth=username + ':' + password
).get('authorization')
def auth_settings(self):
"""Gets Auth Settings dict for api client.
:return: The Auth Settings information dict.
"""
auth = {}
return auth
def to_debug_report(self):
"""Gets the essential information for debugging.
:return: The report for debugging.
"""
return "Python SDK Debug Report:\n"\
"OS: {env}\n"\
"Python Version: {pyversion}\n"\
"Version of the API: 1.0.0\n"\
"SDK Package Version: 1.0.0".\
format(env=sys.platform, pyversion=sys.version)
def get_host_settings(self):
"""Gets an array of host settings
:return: An array of host settings
"""
return [
{
'url': "{scheme}://{host}/{basePath}",
'description': "No description provided",
'variables': {
'basePath': {
'description': "No description provided",
'default_value': "v1",
},
'host': {
'description': "No description provided",
'default_value': "localhost",
},
'scheme': {
'description': "No description provided",
'default_value': "http",
'enum_values': [
"http",
"https"
]
}
}
}
]
def get_host_from_settings(self, index, variables=None, servers=None):
"""Gets host URL based on the index and variables
:param index: array index of the host settings
:param variables: hash of variable and the corresponding value
:param servers: an array of host settings or None
:return: URL based on host settings
"""
if index is None:
return self._base_path
variables = {} if variables is None else variables
servers = self.get_host_settings() if servers is None else servers
try:
server = servers[index]
except IndexError:
raise ValueError(
"Invalid index {0} when selecting the host settings. "
"Must be less than {1}".format(index, len(servers)))
url = server['url']
# go through variables and replace placeholders
for variable_name, variable in server.get('variables', {}).items():
used_value = variables.get(
variable_name, variable['default_value'])
if 'enum_values' in variable \
and used_value not in variable['enum_values']:
raise ValueError(
"The variable `{0}` in the host URL has invalid value "
"{1}. Must be {2}.".format(
variable_name, variables[variable_name],
variable['enum_values']))
url = url.replace("{" + variable_name + "}", used_value)
return url
@property
def host(self):
"""Return generated host."""
return self.get_host_from_settings(self.server_index, variables=self.server_variables)
@host.setter
def host(self, value):
"""Fix base path."""
self._base_path = value
self.server_index = None
|
import asyncio
from homeassistant.config_entries import ConfigEntry
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.typing import HomeAssistantType
from .const import DOMAIN, SUPPORTED_PLATFORMS
from .onewirehub import CannotConnect, OneWireHub
async def async_setup(hass, config):
"""Set up 1-Wire integrations."""
return True
async def async_setup_entry(hass: HomeAssistantType, config_entry: ConfigEntry):
"""Set up a 1-Wire proxy for a config entry."""
hass.data.setdefault(DOMAIN, {})
onewirehub = OneWireHub(hass)
try:
await onewirehub.initialize(config_entry)
except CannotConnect as exc:
raise ConfigEntryNotReady() from exc
hass.data[DOMAIN][config_entry.unique_id] = onewirehub
for component in SUPPORTED_PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, component)
)
return True
async def async_unload_entry(hass: HomeAssistantType, config_entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(config_entry, component)
for component in SUPPORTED_PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN].pop(config_entry.unique_id)
return unload_ok
|
from bravia_tv.braviarc import NoIPControl
from homeassistant import data_entry_flow
from homeassistant.components.braviatv.const import CONF_IGNORED_SOURCES, DOMAIN
from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PIN
from tests.async_mock import patch
from tests.common import MockConfigEntry
BRAVIA_SYSTEM_INFO = {
"product": "TV",
"region": "XEU",
"language": "pol",
"model": "TV-Model",
"serial": "serial_number",
"macAddr": "AA:BB:CC:DD:EE:FF",
"name": "BRAVIA",
"generation": "5.2.0",
"area": "POL",
"cid": "very_unique_string",
}
BRAVIA_SOURCE_LIST = {
"HDMI 1": "extInput:hdmi?port=1",
"HDMI 2": "extInput:hdmi?port=2",
"HDMI 3/ARC": "extInput:hdmi?port=3",
"HDMI 4": "extInput:hdmi?port=4",
"AV/Component": "extInput:component?port=1",
}
IMPORT_CONFIG_HOSTNAME = {CONF_HOST: "bravia-host", CONF_PIN: "1234"}
IMPORT_CONFIG_IP = {CONF_HOST: "10.10.10.12", CONF_PIN: "1234"}
async def test_show_form(hass):
"""Test that the form is served with no input."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == SOURCE_USER
async def test_import(hass):
"""Test that the import works."""
with patch("bravia_tv.BraviaRC.connect", return_value=True), patch(
"bravia_tv.BraviaRC.is_connected", return_value=True
), patch(
"bravia_tv.BraviaRC.get_system_info", return_value=BRAVIA_SYSTEM_INFO
), patch(
"homeassistant.components.braviatv.async_setup_entry", return_value=True
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=IMPORT_CONFIG_HOSTNAME
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["result"].unique_id == "very_unique_string"
assert result["title"] == "TV-Model"
assert result["data"] == {
CONF_HOST: "bravia-host",
CONF_PIN: "1234",
CONF_MAC: "AA:BB:CC:DD:EE:FF",
}
async def test_import_cannot_connect(hass):
"""Test that errors are shown when cannot connect to the host during import."""
with patch("bravia_tv.BraviaRC.connect", return_value=True), patch(
"bravia_tv.BraviaRC.is_connected", return_value=False
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=IMPORT_CONFIG_HOSTNAME
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "cannot_connect"
async def test_import_model_unsupported(hass):
"""Test that errors are shown when the TV is not supported during import."""
with patch("bravia_tv.BraviaRC.connect", return_value=True), patch(
"bravia_tv.BraviaRC.is_connected", return_value=True
), patch("bravia_tv.BraviaRC.get_system_info", return_value={}):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=IMPORT_CONFIG_IP
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "unsupported_model"
async def test_import_no_ip_control(hass):
"""Test that errors are shown when IP Control is disabled on the TV during import."""
with patch("bravia_tv.BraviaRC.connect", side_effect=NoIPControl("No IP Control")):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=IMPORT_CONFIG_IP
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "no_ip_control"
async def test_import_duplicate_error(hass):
"""Test that errors are shown when duplicates are added during import."""
config_entry = MockConfigEntry(
domain=DOMAIN,
unique_id="very_unique_string",
data={
CONF_HOST: "bravia-host",
CONF_PIN: "1234",
CONF_MAC: "AA:BB:CC:DD:EE:FF",
},
title="TV-Model",
)
config_entry.add_to_hass(hass)
with patch("bravia_tv.BraviaRC.connect", return_value=True), patch(
"bravia_tv.BraviaRC.is_connected", return_value=True
), patch("bravia_tv.BraviaRC.get_system_info", return_value=BRAVIA_SYSTEM_INFO):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=IMPORT_CONFIG_HOSTNAME
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_user_invalid_host(hass):
"""Test that errors are shown when the host is invalid."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data={CONF_HOST: "invalid/host"}
)
assert result["errors"] == {CONF_HOST: "invalid_host"}
async def test_authorize_cannot_connect(hass):
"""Test that errors are shown when cannot connect to host at the authorize step."""
with patch("bravia_tv.BraviaRC.connect", return_value=True):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data={CONF_HOST: "bravia-host"}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={CONF_PIN: "1234"}
)
assert result["errors"] == {"base": "cannot_connect"}
async def test_authorize_model_unsupported(hass):
"""Test that errors are shown when the TV is not supported at the authorize step."""
with patch("bravia_tv.BraviaRC.connect", return_value=True), patch(
"bravia_tv.BraviaRC.is_connected", return_value=True
), patch("bravia_tv.BraviaRC.get_system_info", return_value={}):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data={CONF_HOST: "10.10.10.12"}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={CONF_PIN: "1234"}
)
assert result["errors"] == {"base": "unsupported_model"}
async def test_authorize_no_ip_control(hass):
"""Test that errors are shown when IP Control is disabled on the TV."""
with patch("bravia_tv.BraviaRC.connect", side_effect=NoIPControl("No IP Control")):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data={CONF_HOST: "bravia-host"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "no_ip_control"
async def test_duplicate_error(hass):
"""Test that errors are shown when duplicates are added."""
config_entry = MockConfigEntry(
domain=DOMAIN,
unique_id="very_unique_string",
data={
CONF_HOST: "bravia-host",
CONF_PIN: "1234",
CONF_MAC: "AA:BB:CC:DD:EE:FF",
},
title="TV-Model",
)
config_entry.add_to_hass(hass)
with patch("bravia_tv.BraviaRC.connect", return_value=True), patch(
"bravia_tv.BraviaRC.is_connected", return_value=True
), patch("bravia_tv.BraviaRC.get_system_info", return_value=BRAVIA_SYSTEM_INFO):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data={CONF_HOST: "bravia-host"}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={CONF_PIN: "1234"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_create_entry(hass):
"""Test that the user step works."""
with patch("bravia_tv.BraviaRC.connect", return_value=True), patch(
"bravia_tv.BraviaRC.is_connected", return_value=True
), patch(
"bravia_tv.BraviaRC.get_system_info", return_value=BRAVIA_SYSTEM_INFO
), patch(
"homeassistant.components.braviatv.async_setup_entry", return_value=True
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data={CONF_HOST: "bravia-host"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "authorize"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={CONF_PIN: "1234"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["result"].unique_id == "very_unique_string"
assert result["title"] == "TV-Model"
assert result["data"] == {
CONF_HOST: "bravia-host",
CONF_PIN: "1234",
CONF_MAC: "AA:BB:CC:DD:EE:FF",
}
async def test_options_flow(hass):
"""Test config flow options."""
config_entry = MockConfigEntry(
domain=DOMAIN,
unique_id="very_unique_string",
data={
CONF_HOST: "bravia-host",
CONF_PIN: "1234",
CONF_MAC: "AA:BB:CC:DD:EE:FF",
},
title="TV-Model",
)
config_entry.add_to_hass(hass)
with patch("bravia_tv.BraviaRC.connect", return_value=True), patch(
"bravia_tv.BraviaRC.is_connected", return_value=True
), patch("bravia_tv.BraviaRC.get_system_info", return_value=BRAVIA_SYSTEM_INFO):
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
with patch("bravia_tv.BraviaRC.load_source_list", return_value=BRAVIA_SOURCE_LIST):
result = await hass.config_entries.options.async_init(config_entry.entry_id)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
result = await hass.config_entries.options.async_configure(
result["flow_id"], user_input={CONF_IGNORED_SOURCES: ["HDMI 1", "HDMI 2"]}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert config_entry.options == {CONF_IGNORED_SOURCES: ["HDMI 1", "HDMI 2"]}
|
import json
from django.conf import settings
from weblate.machinery.base import (
MachineTranslation,
MachineTranslationError,
MissingConfiguration,
)
GOOGLE_API_ROOT = "https://translation.googleapis.com/language/translate/v2/"
class GoogleBaseTranslation(MachineTranslation):
# Map codes used by Google to the ones used by Weblate
language_map = {
"nb_NO": "no",
"fil": "tl",
"zh_Hant": "zh-TW",
"zh_Hans": "zh-CN",
}
def map_language_code(self, code):
"""Convert language to service specific code."""
return super().map_language_code(code).replace("_", "-").split("@")[0]
class GoogleTranslation(GoogleBaseTranslation):
"""Google Translate API v2 machine translation support."""
name = "Google Translate"
max_score = 90
def __init__(self):
"""Check configuration."""
super().__init__()
if settings.MT_GOOGLE_KEY is None:
raise MissingConfiguration("Google Translate requires API key")
def download_languages(self):
"""List of supported languages."""
response = self.request(
"get", GOOGLE_API_ROOT + "languages", params={"key": settings.MT_GOOGLE_KEY}
)
payload = response.json()
if "error" in payload:
raise MachineTranslationError(payload["error"]["message"])
return [d["language"] for d in payload["data"]["languages"]]
def download_translations(
self,
source,
language,
text: str,
unit,
user,
search: bool,
threshold: int = 75,
):
"""Download list of possible translations from a service."""
response = self.request(
"get",
GOOGLE_API_ROOT,
params={
"key": settings.MT_GOOGLE_KEY,
"q": text,
"source": source,
"target": language,
"format": "text",
},
)
payload = response.json()
if "error" in payload:
raise MachineTranslationError(payload["error"]["message"])
translation = payload["data"]["translations"][0]["translatedText"]
yield {
"text": translation,
"quality": self.max_score,
"service": self.name,
"source": text,
}
def get_error_message(self, exc):
if hasattr(exc, "read"):
content = exc.read()
try:
data = json.loads(content)
return data["error"]["message"]
except Exception:
pass
return super().get_error_message(exc)
|
import argparse
import json
from .const import CORE_PROJECT_ID, FRONTEND_DIR, FRONTEND_PROJECT_ID, INTEGRATIONS_DIR
from .error import ExitApp
from .lokalise import get_api
from .util import get_base_arg_parser
def get_arguments() -> argparse.Namespace:
"""Get parsed passed in arguments."""
parser = get_base_arg_parser()
parser.add_argument(
"--target",
type=str,
default="core",
choices=["core", "frontend"],
)
return parser.parse_args()
def find_extra(base, translations, path_prefix, missing_keys):
"""Find all keys that are in translations but not in base."""
for key, value in translations.items():
cur_path = f"{path_prefix}::{key}" if path_prefix else key
# Value is either a dict or a string
if isinstance(value, dict):
base_search = None if base is None else base.get(key)
find_extra(base_search, value, cur_path, missing_keys)
elif base is None or key not in base:
missing_keys.append(cur_path)
def find_core():
"""Find all missing keys in core."""
missing_keys = []
for int_dir in INTEGRATIONS_DIR.iterdir():
strings = int_dir / "strings.json"
if not strings.is_file():
continue
translations = int_dir / "translations" / "en.json"
strings_json = json.loads(strings.read_text())
if translations.is_file():
translations_json = json.loads(translations.read_text())
else:
translations_json = {}
find_extra(
strings_json, translations_json, f"component::{int_dir.name}", missing_keys
)
return missing_keys
def find_frontend():
"""Find all missing keys in frontend."""
if not FRONTEND_DIR.is_dir():
raise ExitApp(f"Unable to find frontend at {FRONTEND_DIR}")
source = FRONTEND_DIR / "src/translations/en.json"
translated = FRONTEND_DIR / "translations/en.json"
missing_keys = []
find_extra(
json.loads(source.read_text()),
json.loads(translated.read_text()),
"",
missing_keys,
)
return missing_keys
def run():
"""Clean translations."""
args = get_arguments()
if args.target == "frontend":
missing_keys = find_frontend()
lokalise = get_api(FRONTEND_PROJECT_ID)
else:
missing_keys = find_core()
lokalise = get_api(CORE_PROJECT_ID)
if not missing_keys:
print("No missing translations!")
return 0
key_data = lokalise.keys_list(
{"filter_keys": ",".join(missing_keys), "limit": 1000}
)
if len(key_data) != len(missing_keys):
print(
f"Lookin up key in Lokalise returns {len(key_data)} results, expected {len(missing_keys)}"
)
return 1
print(f"Deleting {len(missing_keys)} keys:")
for key in missing_keys:
print(" -", key)
print()
while input("Type YES to delete these keys: ") != "YES":
pass
print(lokalise.keys_delete_multiple([key["key_id"] for key in key_data]))
return 0
|
from __future__ import division
import numpy as np
import unittest
import chainer
from chainer.datasets import TupleDataset
from chainer.iterators import SerialIterator
from chainer import testing
from chainercv.extensions import SemanticSegmentationEvaluator
from chainercv.utils.testing import attr
from chainermn import create_communicator
class _SemanticSegmentationStubLink(chainer.Link):
def __init__(self, labels, initial_count=0):
super(_SemanticSegmentationStubLink, self).__init__()
self.count = initial_count
self.labels = labels
def predict(self, imgs):
n_img = len(imgs)
labels = self.labels[self.count:self.count + n_img]
self.count += n_img
return labels
class TestSemanticSegmentationEvaluator(unittest.TestCase):
def setUp(self):
self.label_names = ('a', 'b', 'c')
imgs = np.random.uniform(size=(1, 3, 2, 3))
# There are labels for 'a' and 'b', but none for 'c'.
pred_labels = np.array([[[1, 1, 1], [0, 0, 1]]])
gt_labels = np.array([[[1, 0, 0], [0, -1, 1]]])
self.iou_a = 1 / 3
self.iou_b = 2 / 4
self.pixel_accuracy = 3 / 5
self.class_accuracy_a = 1 / 3
self.class_accuracy_b = 2 / 2
self.miou = np.mean((self.iou_a, self.iou_b))
self.mean_class_accuracy = np.mean(
(self.class_accuracy_a, self.class_accuracy_b))
self.dataset = TupleDataset(imgs, gt_labels)
self.link = _SemanticSegmentationStubLink(pred_labels)
self.iterator = SerialIterator(
self.dataset, 5, repeat=False, shuffle=False)
self.evaluator = SemanticSegmentationEvaluator(
self.iterator, self.link, self.label_names)
def test_evaluate(self):
reporter = chainer.Reporter()
reporter.add_observer('main', self.link)
with reporter:
eval_ = self.evaluator.evaluate()
# No observation is reported to the current reporter. Instead the
# evaluator collect results in order to calculate their mean.
np.testing.assert_equal(len(reporter.observation), 0)
np.testing.assert_equal(eval_['main/miou'], self.miou)
np.testing.assert_equal(eval_['main/pixel_accuracy'],
self.pixel_accuracy)
np.testing.assert_equal(eval_['main/mean_class_accuracy'],
self.mean_class_accuracy)
np.testing.assert_equal(eval_['main/iou/a'], self.iou_a)
np.testing.assert_equal(eval_['main/iou/b'], self.iou_b)
np.testing.assert_equal(eval_['main/iou/c'], np.nan)
np.testing.assert_equal(eval_['main/class_accuracy/a'],
self.class_accuracy_a)
np.testing.assert_equal(eval_['main/class_accuracy/b'],
self.class_accuracy_b)
np.testing.assert_equal(eval_['main/class_accuracy/c'], np.nan)
def test_call(self):
eval_ = self.evaluator()
# main is used as default
np.testing.assert_equal(eval_['main/miou'], self.miou)
np.testing.assert_equal(eval_['main/pixel_accuracy'],
self.pixel_accuracy)
np.testing.assert_equal(eval_['main/mean_class_accuracy'],
self.mean_class_accuracy)
np.testing.assert_equal(eval_['main/iou/a'], self.iou_a)
np.testing.assert_equal(eval_['main/iou/b'], self.iou_b)
np.testing.assert_equal(eval_['main/iou/c'], np.nan)
np.testing.assert_equal(eval_['main/class_accuracy/a'],
self.class_accuracy_a)
np.testing.assert_equal(eval_['main/class_accuracy/b'],
self.class_accuracy_b)
np.testing.assert_equal(eval_['main/class_accuracy/c'], np.nan)
def test_evaluator_name(self):
self.evaluator.name = 'eval'
eval_ = self.evaluator()
# name is used as a prefix
np.testing.assert_equal(eval_['eval/main/miou'], self.miou)
np.testing.assert_equal(eval_['eval/main/pixel_accuracy'],
self.pixel_accuracy)
np.testing.assert_equal(eval_['eval/main/mean_class_accuracy'],
self.mean_class_accuracy)
np.testing.assert_equal(eval_['eval/main/iou/a'], self.iou_a)
np.testing.assert_equal(eval_['eval/main/iou/b'], self.iou_b)
np.testing.assert_equal(eval_['eval/main/iou/c'], np.nan)
np.testing.assert_equal(eval_['eval/main/class_accuracy/a'],
self.class_accuracy_a)
np.testing.assert_equal(eval_['eval/main/class_accuracy/b'],
self.class_accuracy_b)
np.testing.assert_equal(eval_['eval/main/class_accuracy/c'], np.nan)
def test_current_report(self):
reporter = chainer.Reporter()
with reporter:
eval_ = self.evaluator()
# The result is reported to the current reporter.
np.testing.assert_equal(reporter.observation, eval_)
@attr.mpi
class TestSemanticSegmentationEvaluatorMPI(unittest.TestCase):
def setUp(self):
self.comm = create_communicator('naive')
batchsize_per_process = 5
batchsize = batchsize_per_process * self.comm.size
if self.comm.rank == 0:
labels = [np.random.choice(
np.arange(3, dtype=np.int32), size=(32, 48))
for _ in range(10)]
else:
labels = None
initial_count = self.comm.rank * batchsize_per_process
labels = self.comm.bcast_obj(labels)
self.labels = labels
self.dataset = TupleDataset(
np.random.uniform(size=(10, 3, 32, 48)),
labels)
self.initial_count = initial_count
self.batchsize = batchsize
def test_consistency(self):
reporter = chainer.Reporter()
if self.comm.rank == 0:
multi_iterator = SerialIterator(
self.dataset, self.batchsize, repeat=False, shuffle=False)
else:
multi_iterator = None
multi_link = _SemanticSegmentationStubLink(
self.labels, self.initial_count)
multi_evaluator = SemanticSegmentationEvaluator(
multi_iterator, multi_link,
label_names=('cls0', 'cls1', 'cls2'),
comm=self.comm)
reporter.add_observer('target', multi_link)
with reporter:
multi_mean = multi_evaluator.evaluate()
if self.comm.rank != 0:
self.assertEqual(multi_mean, {})
return
single_iterator = SerialIterator(
self.dataset, self.batchsize, repeat=False, shuffle=False)
single_link = _SemanticSegmentationStubLink(
self.labels)
single_evaluator = SemanticSegmentationEvaluator(
single_iterator, single_link,
label_names=('cls0', 'cls1', 'cls2'))
reporter.add_observer('target', single_link)
with reporter:
single_mean = single_evaluator.evaluate()
self.assertEqual(set(multi_mean.keys()), set(single_mean.keys()))
for key in multi_mean.keys():
np.testing.assert_equal(single_mean[key], multi_mean[key])
testing.run_module(__name__, __file__)
|
from datetime import datetime
from functools import partial
import voluptuous as vol
from homeassistant.const import CONF_AT, CONF_PLATFORM
from homeassistant.core import HassJob, callback
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.event import (
async_track_point_in_time,
async_track_state_change_event,
async_track_time_change,
)
import homeassistant.util.dt as dt_util
# mypy: allow-untyped-defs, no-check-untyped-defs
_TIME_TRIGGER_SCHEMA = vol.Any(
cv.time,
vol.All(str, cv.entity_domain("input_datetime")),
msg="Expected HH:MM, HH:MM:SS or Entity ID from domain 'input_datetime'",
)
TRIGGER_SCHEMA = vol.Schema(
{
vol.Required(CONF_PLATFORM): "time",
vol.Required(CONF_AT): vol.All(cv.ensure_list, [_TIME_TRIGGER_SCHEMA]),
}
)
async def async_attach_trigger(hass, config, action, automation_info):
"""Listen for state changes based on configuration."""
entities = {}
removes = []
job = HassJob(action)
@callback
def time_automation_listener(description, now, *, entity_id=None):
"""Listen for time changes and calls action."""
hass.async_run_hass_job(
job,
{
"trigger": {
"platform": "time",
"now": now,
"description": description,
"entity_id": entity_id,
}
},
)
@callback
def update_entity_trigger_event(event):
"""update_entity_trigger from the event."""
return update_entity_trigger(event.data["entity_id"], event.data["new_state"])
@callback
def update_entity_trigger(entity_id, new_state=None):
"""Update the entity trigger for the entity_id."""
# If a listener was already set up for entity, remove it.
remove = entities.get(entity_id)
if remove:
remove()
removes.remove(remove)
remove = None
# Check state of entity. If valid, set up a listener.
if new_state:
has_date = new_state.attributes["has_date"]
if has_date:
year = new_state.attributes["year"]
month = new_state.attributes["month"]
day = new_state.attributes["day"]
has_time = new_state.attributes["has_time"]
if has_time:
hour = new_state.attributes["hour"]
minute = new_state.attributes["minute"]
second = new_state.attributes["second"]
else:
# If no time then use midnight.
hour = minute = second = 0
if has_date:
# If input_datetime has date, then track point in time.
trigger_dt = dt_util.DEFAULT_TIME_ZONE.localize(
datetime(year, month, day, hour, minute, second)
)
# Only set up listener if time is now or in the future.
if trigger_dt >= dt_util.now():
remove = async_track_point_in_time(
hass,
partial(
time_automation_listener,
f"time set in {entity_id}",
entity_id=entity_id,
),
trigger_dt,
)
elif has_time:
# Else if it has time, then track time change.
remove = async_track_time_change(
hass,
partial(
time_automation_listener,
f"time set in {entity_id}",
entity_id=entity_id,
),
hour=hour,
minute=minute,
second=second,
)
# Was a listener set up?
if remove:
removes.append(remove)
entities[entity_id] = remove
for at_time in config[CONF_AT]:
if isinstance(at_time, str):
# input_datetime entity
update_entity_trigger(at_time, new_state=hass.states.get(at_time))
else:
# datetime.time
removes.append(
async_track_time_change(
hass,
partial(time_automation_listener, "time"),
hour=at_time.hour,
minute=at_time.minute,
second=at_time.second,
)
)
# Track state changes of any entities.
removes.append(
async_track_state_change_event(
hass, list(entities), update_entity_trigger_event
)
)
@callback
def remove_track_time_changes():
"""Remove tracked time changes."""
for remove in removes:
remove()
return remove_track_time_changes
|
import diamond.collector
import diamond.convertor
import os
import subprocess
class UserScriptsCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(UserScriptsCollector,
self).get_default_config_help()
config_help.update({
'scripts_path': "Path to find the scripts to run",
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(UserScriptsCollector, self).get_default_config()
config.update({
'path': '.',
'scripts_path': '/etc/diamond/user_scripts/',
'floatprecision': 4,
})
return config
def collect(self):
scripts_path = self.config['scripts_path']
if not os.access(scripts_path, os.R_OK):
return None
for script in os.listdir(scripts_path):
absolutescriptpath = os.path.join(scripts_path, script)
executable = os.access(absolutescriptpath, os.X_OK)
is_file = os.path.isfile(absolutescriptpath)
if is_file:
if not executable:
self.log.info("%s is not executable" % absolutescriptpath)
continue
else:
# Don't bother logging skipped non-file files (typically
# directories)
continue
out = None
self.log.debug("Executing %s" % absolutescriptpath)
try:
proc = subprocess.Popen([absolutescriptpath],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(out, err) = proc.communicate()
except subprocess.CalledProcessError as e:
self.log.error("%s error launching: %s; skipping" %
(absolutescriptpath, e))
continue
if proc.returncode:
self.log.error("%s return exit value %s; skipping" %
(absolutescriptpath, proc.returncode))
if not out:
self.log.info("%s return no output" % absolutescriptpath)
continue
if err:
self.log.error("%s returned error output (stderr): %s" %
(absolutescriptpath, err))
# Use filter to remove empty lines of output
for line in filter(None, out.split('\n')):
# Ignore invalid lines
try:
name, value = line.split()
float(value)
except ValueError:
self.log.error("%s returned invalid/unparsable output: %s" %
(absolutescriptpath, line))
continue
name, value = line.split()
floatprecision = 0
if "." in value:
floatprecision = self.config['floatprecision']
self.publish(name, value, precision=floatprecision)
|
import warnings
from urllib.parse import parse_qsl, urlparse
import gmusicapi
from gmusicapi.clients.shared import _Base
from gmusicapi.exceptions import GmusicapiWarning
from gmusicapi.protocol import webclient
from gmusicapi.utils import utils
import gmusicapi.session
class Webclient(_Base):
"""Allows library management and streaming by posing as the
music.google.com webclient.
Uploading is not supported by this client (use the :class:`Musicmanager`
to upload).
Any methods in this class that are duplicated by
the :class:`Mobileclient` are deprecated, and will generate a
warning at runtime.
The following methods are *not* deprecated:
* :func:`get_shared_playlist_info`
* :func:`get_song_download_info`
* :func:`get_stream_urls`
* :func:`get_stream_audio`
* :func:`report_incorrect_match`
* :func:`upload_album_art`
"""
_session_class = gmusicapi.session.Webclient
def __init__(self, debug_logging=True, validate=True, verify_ssl=True):
warnings.warn(
"Webclient functionality is not tested nor well supported. "
"Use Mobileclient or Musicmanager if possible.",
GmusicapiWarning
)
super().__init__(self.__class__.__name__,
debug_logging,
validate,
verify_ssl)
def login(self, email, password):
"""Authenticates the webclient.
Returns ``True`` on success, ``False`` on failure.
:param email: eg ``'[email protected]'`` or just ``'test'``.
:param password: the account's password.
This is not stored locally, and is sent securely over SSL.
App-specific passwords are not supported on the webclient.
Users who don't use two-factor auth will likely need to enable
`less secure login <https://www.google.com/settings/security/lesssecureapps>`__.
If this is needed, a warning will be logged during login (which will print to stderr
in the default logging configuration).
"""
if not self.session.login(email, password):
self.logger.info("failed to authenticate")
return False
self.logger.info("authenticated")
return True
def logout(self):
return super().logout()
def get_shared_playlist_info(self, share_token):
"""
Returns a dictionary with four keys: author, description, num_tracks, and title.
:param share_token: from ``playlist['shareToken']``, or a playlist share
url (``https://play.google.com/music/playlist/<token>``).
Note that tokens from urls will need to be url-decoded,
eg ``AM...%3D%3D`` becomes ``AM...==``.
"""
res = self._make_call(webclient.GetSharedPlaylist, '', share_token)
num_tracks = len(res[1][0])
md = res[1][1]
return {
'author': md[8],
'description': md[7],
'num_tracks': num_tracks,
'title': md[1],
}
@utils.enforce_id_param
def get_song_download_info(self, song_id):
"""Returns a tuple: ``('<url>', <download count>)``.
:param song_id: a single song id.
``url`` will be ``None`` if the download limit is exceeded.
GM allows 2 downloads per song. The download count may not always be accurate,
and the 2 download limit seems to be loosely enforced.
This call alone does not count towards a download -
the count is incremented when ``url`` is retrieved.
"""
# TODO the protocol expects a list of songs - could extend with accept_singleton
info = self._make_call(webclient.GetDownloadInfo, [song_id])
url = info.get('url')
return (url, info["downloadCounts"][song_id])
@utils.enforce_id_param
def get_stream_urls(self, song_id):
"""Returns a list of urls that point to a streamable version of this song.
If you just need the audio and are ok with gmusicapi doing the download,
consider using :func:`get_stream_audio` instead.
This abstracts away the differences between different kinds of tracks:
* normal tracks return a single url
* All Access tracks return multiple urls, which must be combined
:param song_id: a single song id.
While acquiring the urls requires authentication, retreiving the
contents does not.
However, there are limitations on how the stream urls can be used:
* the urls expire after a minute
* only one IP can be streaming music at once.
Other attempts will get an http 403 with
``X-Rejected-Reason: ANOTHER_STREAM_BEING_PLAYED``.
*This is only intended for streaming*. The streamed audio does not contain metadata.
Use :func:`get_song_download_info` or :func:`Musicmanager.download_song
<gmusicapi.clients.Musicmanager.download_song>`
to download files with metadata.
"""
res = self._make_call(webclient.GetStreamUrl, song_id)
try:
return [res['url']]
except KeyError:
return res['urls']
@utils.enforce_id_param
def get_stream_audio(self, song_id, use_range_header=None):
"""Returns a bytestring containing mp3 audio for this song.
:param song_id: a single song id
:param use_range_header: in some cases, an HTTP range header can be
used to save some bandwidth.
However, there's no guarantee that the server will respect it,
meaning that the client may get back an unexpected response when
using it.
There are three possible values for this argument:
* None: (default) send header; fix response locally on problems
* True: send header; raise OSError on problems
* False: do not send header
"""
urls = self.get_stream_urls(song_id)
# TODO shouldn't session.send be used throughout?
if len(urls) == 1:
return self.session._rsession.get(urls[0]).content
# AA tracks are separated into multiple files.
# the url contains the range of each file to be used.
range_pairs = [[int(s) for s in val.split('-')]
for url in urls
for key, val in parse_qsl(urlparse(url)[4])
if key == 'range']
stream_pieces = bytearray()
prev_end = 0
headers = None
for url, (start, end) in zip(urls, range_pairs):
if use_range_header or use_range_header is None:
headers = {'Range': 'bytes=' + str(prev_end - start) + '-'}
audio = self.session._rsession.get(url, headers=headers).content
if end - prev_end != len(audio) - 1:
# content length is not in the right range
if use_range_header:
# the user didn't want automatic response fixup
raise OSError('use_range_header is True but the response'
' was not the correct content length.'
' This might be caused by a (poorly-written) http proxy.')
# trim to the proper range
audio = audio[prev_end - start:]
stream_pieces.extend(audio)
prev_end = end + 1
return bytes(stream_pieces)
@utils.accept_singleton(str)
@utils.enforce_ids_param
@utils.empty_arg_shortcircuit
def report_incorrect_match(self, song_ids):
"""Equivalent to the 'Fix Incorrect Match' button, this requests re-uploading of songs.
Returns the song_ids provided.
:param song_ids: a list of song ids to report, or a single song id.
Note that if you uploaded a song through gmusicapi, it won't be reuploaded
automatically - this currently only works for songs uploaded with the Music Manager.
See issue `#89 <https://github.com/simon-weber/gmusicapi/issues/89>`__.
This should only be used on matched tracks (``song['type'] == 6``).
"""
self._make_call(webclient.ReportBadSongMatch, song_ids)
return song_ids
@utils.accept_singleton(str)
@utils.enforce_ids_param
@utils.empty_arg_shortcircuit
def upload_album_art(self, song_ids, image_filepath):
"""Uploads an image and sets it as the album art for songs.
Returns a url to the image on Google's servers.
:param song_ids: a list of song ids, or a single song id.
:param image_filepath: filepath of the art to use. jpg and png are known to work.
This function will *always* upload the provided image, even if it's already uploaded.
If the art is already uploaded and set for another song, copy over the
value of the ``'albumArtUrl'`` key using :func:`Mobileclient.change_song_metadata` instead.
"""
res = self._make_call(webclient.UploadImage, image_filepath)
url = res['imageUrl']
song_dicts = [dict((('id', id), ('albumArtUrl', url))) for id in song_ids]
self._make_call(webclient.ChangeSongMetadata, song_dicts)
return url
@utils.accept_singleton(dict)
@utils.empty_arg_shortcircuit
def change_song_metadata(self, songs):
"""Changes metadata of songs.
Returns a list of the song ids changed.
:param songs: a list of song dictionaries, each dictionary must contain valid song 'id'
The following fields are supported: title, album, albumArtist, artist
"""
self._make_call(webclient.ChangeSongMetadata, songs)
return list(song['id'] for song in songs)
# deprecated methods follow:
@utils.deprecated('prefer Mobileclient.create_playlist')
def create_playlist(self, name, description=None, public=False):
"""
Creates a playlist and returns its id.
:param name: the name of the playlist.
:param description: (optional) the description of the playlist.
:param public: if True and the user has All Access, create a shared playlist.
"""
res = self._make_call(webclient.CreatePlaylist, name, description, public)
return res[1][0]
@utils.deprecated('prefer Mobileclient.get_registered_devices')
def get_registered_devices(self):
"""
Returns a list of dictionaries representing devices associated with the account.
Performing the :class:`Musicmanager` OAuth flow will register a device
of type 1.
Installing the Google Music app on an android or ios device
and logging into it will register a device of type 2 or 3,
which is used for streaming with the :class:`Mobileclient`.
Here is an example response::
[
{
u'deviceType': 1, # laptop/desktop
u'id': u'00:11:22:33:AA:BB',
u'lastAccessedFormatted': u'May 24, 2015',
u'lastAccessedTimeMillis': 1432468588200, # utc-millisecond
u'lastEventTimeMillis': 1434211605335,
u'name': u'my computer'},
},
{
u'deviceType': 2, # android device
u'carrier': u'Google',
u'id': u'0x00112233aabbccdd', # remove 0x when streaming
u'lastAccessedFormatted': u'September 19, 2015',
u'lastAccessedTimeMillis': 1442706069906,
u'lastEventTimeMillis': 1435271137193,
u'manufacturer': u'Asus',
u'model': u'Nexus 7',
u'name': u'my nexus 7'
},
{
u'deviceType': 3, # ios device
u'id': u'ios:01234567-0123-0123-0123-0123456789AB',
u'lastAccessedFormatted': u'June 25, 2015',
u'lastAccessedTimeMillis': 1435271588780,
u'lastEventTimeMillis': 1435271442417,
u'name': u'my iphone'
}
]
"""
# TODO sessionid stuff
res = self._make_call(webclient.GetSettings, '')
return res['settings']['uploadDevice']
@utils.accept_singleton(str)
@utils.enforce_ids_param
@utils.empty_arg_shortcircuit
@utils.deprecated('prefer Mobileclient.delete_songs')
def delete_songs(self, song_ids):
"""**Deprecated**: prefer :func:`Mobileclient.delete_songs`.
Deletes songs from the entire library. Returns a list of deleted song ids.
:param song_ids: a list of song ids, or a single song id.
"""
res = self._make_call(webclient.DeleteSongs, song_ids)
return res['deleteIds']
@utils.accept_singleton(str, 2)
@utils.enforce_ids_param(2)
@utils.enforce_id_param
@utils.empty_arg_shortcircuit(position=2)
@utils.deprecated('prefer Mobileclient.add_songs_to_playlist')
def add_songs_to_playlist(self, playlist_id, song_ids):
"""**Deprecated**: prefer :func:`Mobileclient.add_songs_to_playlist`.
Appends songs to a playlist.
Returns a list of (song id, playlistEntryId) tuples that were added.
:param playlist_id: id of the playlist to add to.
:param song_ids: a list of song ids, or a single song id.
Playlists have a maximum size of 1000 songs.
"""
res = self._make_call(webclient.AddToPlaylist, playlist_id, song_ids)
new_entries = res['songIds']
return [(e['songId'], e['playlistEntryId']) for e in new_entries]
@utils.accept_singleton(str, 2)
@utils.enforce_ids_param(2)
@utils.enforce_id_param
@utils.empty_arg_shortcircuit(position=2)
@utils.deprecated('prefer Mobileclient.remove_entries_from_playlist')
def remove_songs_from_playlist(self, playlist_id, sids_to_match):
"""**Deprecated**: prefer :func:`Mobileclient.remove_entries_from_playlist`.
Removes all copies of the given song ids from a playlist.
Returns a list of removed (sid, eid) pairs.
:param playlist_id: id of the playlist to remove songs from.
:param sids_to_match: a list of song ids to match, or a single song id.
This does *not always* the inverse of a call to :func:`add_songs_to_playlist`,
since multiple copies of the same song are removed.
"""
playlist_tracks = self.get_playlist_songs(playlist_id)
sid_set = set(sids_to_match)
matching_eids = [t["playlistEntryId"]
for t in playlist_tracks
if t["id"] in sid_set]
if matching_eids:
# Call returns "sid_eid" strings.
sid_eids = self._remove_entries_from_playlist(playlist_id,
matching_eids)
return [s.split("_") for s in sid_eids]
else:
return []
@utils.accept_singleton(str, 2)
@utils.empty_arg_shortcircuit(position=2)
def _remove_entries_from_playlist(self, playlist_id, entry_ids_to_remove):
"""Removes entries from a playlist. Returns a list of removed "sid_eid" strings.
:param playlist_id: the playlist to be modified.
:param entry_ids: a list of entry ids, or a single entry id.
"""
# GM requires the song ids in the call as well; find them.
playlist_tracks = self.get_playlist_songs(playlist_id)
remove_eid_set = set(entry_ids_to_remove)
e_s_id_pairs = [(t["id"], t["playlistEntryId"])
for t in playlist_tracks
if t["playlistEntryId"] in remove_eid_set]
num_not_found = len(entry_ids_to_remove) - len(e_s_id_pairs)
if num_not_found > 0:
self.logger.warning("when removing, %d entry ids could not be found in playlist id %s",
num_not_found, playlist_id)
# Unzip the pairs.
sids, eids = list(zip(*e_s_id_pairs))
res = self._make_call(webclient.DeleteSongs, sids, playlist_id, eids)
return res['deleteIds']
|
from homeassistant.components.ozw.const import DOMAIN
from homeassistant.components.sensor import (
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_POWER,
DEVICE_CLASS_PRESSURE,
DOMAIN as SENSOR_DOMAIN,
)
from homeassistant.const import ATTR_DEVICE_CLASS
from .common import setup_ozw
async def test_sensor(hass, generic_data):
"""Test setting up config entry."""
await setup_ozw(hass, fixture=generic_data)
# Test standard sensor
state = hass.states.get("sensor.smart_plug_electric_v")
assert state is not None
assert state.state == "123.9"
assert state.attributes["unit_of_measurement"] == "V"
# Test device classes
state = hass.states.get("sensor.trisensor_relative_humidity")
assert state.attributes[ATTR_DEVICE_CLASS] == DEVICE_CLASS_HUMIDITY
state = hass.states.get("sensor.trisensor_pressure")
assert state.attributes[ATTR_DEVICE_CLASS] == DEVICE_CLASS_PRESSURE
state = hass.states.get("sensor.trisensor_fake_power")
assert state.attributes[ATTR_DEVICE_CLASS] == DEVICE_CLASS_POWER
state = hass.states.get("sensor.trisensor_fake_energy")
assert state.attributes[ATTR_DEVICE_CLASS] == DEVICE_CLASS_POWER
state = hass.states.get("sensor.trisensor_fake_electric")
assert state.attributes[ATTR_DEVICE_CLASS] == DEVICE_CLASS_POWER
# Test ZWaveListSensor disabled by default
registry = await hass.helpers.entity_registry.async_get_registry()
entity_id = "sensor.water_sensor_6_instance_1_water"
state = hass.states.get(entity_id)
assert state is None
entry = registry.async_get(entity_id)
assert entry
assert entry.disabled
assert entry.disabled_by == "integration"
# Test enabling entity
updated_entry = registry.async_update_entity(
entry.entity_id, **{"disabled_by": None}
)
assert updated_entry != entry
assert updated_entry.disabled is False
async def test_sensor_enabled(hass, generic_data, sensor_msg):
"""Test enabling an advanced sensor."""
registry = await hass.helpers.entity_registry.async_get_registry()
entry = registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"1-36-1407375493578772",
suggested_object_id="water_sensor_6_instance_1_water",
disabled_by=None,
)
assert entry.disabled is False
receive_msg = await setup_ozw(hass, fixture=generic_data)
receive_msg(sensor_msg)
await hass.async_block_till_done()
state = hass.states.get(entry.entity_id)
assert state is not None
assert state.state == "0"
assert state.attributes["label"] == "Clear"
async def test_string_sensor(hass, string_sensor_data):
"""Test so the returned type is a string sensor."""
registry = await hass.helpers.entity_registry.async_get_registry()
entry = registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"1-49-73464969749610519",
suggested_object_id="id_150_z_wave_module_user_code",
disabled_by=None,
)
await setup_ozw(hass, fixture=string_sensor_data)
await hass.async_block_till_done()
state = hass.states.get(entry.entity_id)
assert state is not None
assert state.state == "asdfgh"
|
import os
from weblate.fonts.models import FONT_STORAGE, Font
from weblate.trans.tests.test_views import FixtureTestCase
FONT = os.path.join(
os.path.dirname(os.path.dirname(__file__)),
"static",
"font-droid",
"DroidSansFallback.ttf",
)
class FontTestCase(FixtureTestCase):
def add_font(self):
with open(FONT, "rb") as handle:
fontfile = FONT_STORAGE.save("DroidSansFallback.ttf", handle)
return Font.objects.create(font=fontfile, project=self.project, user=self.user)
|
import numpy as np
from scipy.linalg import eigh
from ..filter import filter_data
from ..cov import _regularized_covariance
from . import TransformerMixin, BaseEstimator
from ..time_frequency import psd_array_welch
from ..utils import _time_mask, fill_doc, _validate_type, _check_option
from ..io.pick import _get_channel_types, _picks_to_idx
@fill_doc
class SSD(BaseEstimator, TransformerMixin):
"""
M/EEG signal decomposition using the Spatio-Spectral Decomposition (SSD).
SSD seeks to maximize the power at a frequency band of interest while
simultaneously minimizing it at the flanking (surrounding) frequency bins
(considered noise). It extremizes the covariance matrices associated with
signal and noise :footcite:`NikulinEtAl2011`.
SSD can either be used as a dimensionality reduction method or a
‘denoised’ low rank factorization method :footcite:`HaufeEtAl2014b`.
Parameters
----------
info : instance of mne.Info
The info object containing the channel and sampling information.
It must match the input data.
filt_params_signal : dict
Filtering for the frequencies of interest.
filt_params_noise : dict
Filtering for the frequencies of non-interest.
reg : float | str | None (default)
Which covariance estimator to use.
If not None (same as 'empirical'), allow regularization for
covariance estimation. If float, shrinkage is used
(0 <= shrinkage <= 1). For str options, reg will be passed to
method to :func:`mne.compute_covariance`.
n_components : int | None (default None)
The number of components to extract from the signal.
If n_components is None, no dimensionality reduction is applied.
picks : array of int | None (default None)
The indices of good channels.
sort_by_spectral_ratio : bool (default False)
If set to True, the components are sorted accordingly
to the spectral ratio.
See Eq. (24) in :footcite:`NikulinEtAl2011`.
return_filtered : bool (default True)
If return_filtered is True, data is bandpassed and projected onto
the SSD components.
n_fft : int (default None)
If sort_by_spectral_ratio is set to True, then the SSD sources will be
sorted accordingly to their spectral ratio which is calculated based on
:func:`mne.time_frequency.psd_array_welch` function. The n_fft parameter
set the length of FFT used.
See :func:`mne.time_frequency.psd_array_welch` for more information.
cov_method_params : dict | None (default None)
As in :class:`mne.decoding.SPoC`
The default is None.
rank : None | dict | ‘info’ | ‘full’
As in :class:`mne.decoding.SPoC`
This controls the rank computation that can be read from the
measurement info or estimated from the data.
See Notes of :func:`mne.compute_rank` for details.
We recommend to use 'full' when working with epoched data.
Attributes
----------
filters_ : array, shape (n_channels, n_components)
The spatial filters to be multiplied with the signal.
patterns_ : array, shape (n_components, n_channels)
The patterns for reconstructing the signal from the filtered data.
References
----------
.. footbibliography::
"""
def __init__(self, info, filt_params_signal, filt_params_noise,
reg=None, n_components=None, picks=None,
sort_by_spectral_ratio=True, return_filtered=False,
n_fft=None, cov_method_params=None, rank=None):
"""Initialize instance."""
dicts = {"signal": filt_params_signal, "noise": filt_params_noise}
for param, dd in [('l', 0), ('h', 0), ('l', 1), ('h', 1)]:
key = ('signal', 'noise')[dd]
if param + '_freq' not in dicts[key]:
raise ValueError(
'%s must be defined in filter parameters for %s'
% (param + '_freq', key))
val = dicts[key][param + '_freq']
if not isinstance(val, (int, float)):
_validate_type(val, ('numeric',), f'{key} {param}_freq')
# check freq bands
if (filt_params_noise['l_freq'] > filt_params_signal['l_freq'] or
filt_params_signal['h_freq'] > filt_params_noise['h_freq']):
raise ValueError('Wrongly specified frequency bands!\n'
'The signal band-pass must be within the noise '
'band-pass!')
self.picks_ = _picks_to_idx(info, picks, none='data', exclude='bads')
del picks
ch_types = _get_channel_types(info, picks=self.picks_, unique=True)
if len(ch_types) > 1:
raise ValueError('At this point SSD only supports fitting '
'single channel types. Your info has %i types' %
(len(ch_types)))
self.info = info
self.freqs_signal = (filt_params_signal['l_freq'],
filt_params_signal['h_freq'])
self.freqs_noise = (filt_params_noise['l_freq'],
filt_params_noise['h_freq'])
self.filt_params_signal = filt_params_signal
self.filt_params_noise = filt_params_noise
self.sort_by_spectral_ratio = sort_by_spectral_ratio
if n_fft is None:
self.n_fft = int(self.info['sfreq'])
else:
self.n_fft = int(n_fft)
self.return_filtered = return_filtered
self.reg = reg
self.n_components = n_components
self.rank = rank
self.cov_method_params = cov_method_params
def _check_X(self, X):
"""Check input data."""
_validate_type(X, np.ndarray, 'X')
_check_option('X.ndim', X.ndim, (2, 3))
n_chan = X.shape[-2]
if n_chan != self.info['nchan']:
raise ValueError('Info must match the input data.'
'Found %i channels but expected %i.' %
(n_chan, self.info['nchan']))
def fit(self, X, y=None):
"""Estimate the SSD decomposition on raw or epoched data.
Parameters
----------
X : array, shape ([n_epochs, ]n_channels, n_times)
The input data from which to estimate the SSD. Either 2D array
obtained from continuous data or 3D array obtained from epoched
data.
y : None | array, shape (n_samples,)
Used for scikit-learn compatibility.
Returns
-------
self : instance of SSD
Returns the modified instance.
"""
self._check_X(X)
X_aux = X[..., self.picks_, :]
X_signal = filter_data(
X_aux, self.info['sfreq'], **self.filt_params_signal)
X_noise = filter_data(
X_aux, self.info['sfreq'], **self.filt_params_noise)
X_noise -= X_signal
if X.ndim == 3:
X_signal = np.hstack(X_signal)
X_noise = np.hstack(X_noise)
cov_signal = _regularized_covariance(
X_signal, reg=self.reg, method_params=self.cov_method_params,
rank=self.rank, info=self.info)
cov_noise = _regularized_covariance(
X_noise, reg=self.reg, method_params=self.cov_method_params,
rank=self.rank, info=self.info)
eigvals_, eigvects_ = eigh(cov_signal, cov_noise)
# sort in descending order
ix = np.argsort(eigvals_)[::-1]
self.eigvals_ = eigvals_[ix]
self.filters_ = eigvects_[:, ix]
self.patterns_ = np.linalg.pinv(self.filters_)
return self
def transform(self, X):
"""Estimate epochs sources given the SSD filters.
Parameters
----------
X : array, shape ([n_epochs, ]n_channels, n_times)
The input data from which to estimate the SSD. Either 2D array
obtained from continuous data or 3D array obtained from epoched
data.
Returns
-------
X_ssd : array, shape ([n_epochs, ]n_components, n_times)
The processed data.
"""
self._check_X(X)
if self.filters_ is None:
raise RuntimeError('No filters available. Please first call fit')
X_ssd = self.filters_.T @ X[..., self.picks_, :]
# We assume that ordering by spectral ratio is more important
# than the initial ordering. This is why we apply component picks
# after ordering.
sorter_spec = Ellipsis
if self.sort_by_spectral_ratio:
_, sorter_spec = self.get_spectral_ratio(ssd_sources=X_ssd)
if X.ndim == 2:
X_ssd = X_ssd[sorter_spec][:self.n_components]
else:
X_ssd = X_ssd[:, sorter_spec, :][:, :self.n_components, :]
return X_ssd
def get_spectral_ratio(self, ssd_sources):
"""Get the spectal signal-to-noise ratio for each spatial filter.
Spectral ratio measure for best n_components selection
See :footcite:`NikulinEtAl2011`, Eq. (24).
Parameters
----------
ssd_sources : array
Data projectded to SSD space.
Returns
-------
spec_ratio : array, shape (n_channels)
Array with the sprectal ratio value for each component.
sorter_spec : array, shape (n_channels)
Array of indices for sorting spec_ratio.
References
----------
.. footbibliography::
"""
psd, freqs = psd_array_welch(
ssd_sources, sfreq=self.info['sfreq'], n_fft=self.n_fft)
sig_idx = _time_mask(freqs, *self.freqs_signal)
noise_idx = _time_mask(freqs, *self.freqs_noise)
if psd.ndim == 3:
mean_sig = psd[:, :, sig_idx].mean(axis=2).mean(axis=0)
mean_noise = psd[:, :, noise_idx].mean(axis=2).mean(axis=0)
spec_ratio = mean_sig / mean_noise
else:
mean_sig = psd[:, sig_idx].mean(axis=1)
mean_noise = psd[:, noise_idx].mean(axis=1)
spec_ratio = mean_sig / mean_noise
sorter_spec = spec_ratio.argsort()[::-1]
return spec_ratio, sorter_spec
def inverse_transform(self):
"""Not implemented yet."""
raise NotImplementedError('inverse_transform is not yet available.')
def apply(self, X):
"""Remove selected components from the signal.
This procedure will reconstruct M/EEG signals from which the dynamics
described by the excluded components is subtracted
(denoised by low-rank factorization).
See :footcite:`HaufeEtAl2014b` for more information.
.. note:: Unlike in other classes with an apply method,
only NumPy arrays are supported (not instances of MNE objects).
Parameters
----------
X : array, shape ([n_epochs, ]n_channels, n_times)
The input data from which to estimate the SSD. Either 2D array
obtained from continuous data or 3D array obtained from epoched
data.
Returns
-------
X : array, shape ([n_epochs, ]n_channels, n_times)
The processed data.
"""
X_ssd = self.transform(X)
sorter_spec = Ellipsis
if self.sort_by_spectral_ratio:
_, sorter_spec = self.get_spectral_ratio(ssd_sources=X_ssd)
pick_patterns = self.patterns_[sorter_spec, :self.n_components].T
X = pick_patterns @ X_ssd
return X
|
from asyncio import Event
import logging
from threading import Thread
from typing import Optional
import debugpy
import voluptuous as vol
from homeassistant.const import CONF_HOST, CONF_PORT
from homeassistant.core import HomeAssistant, ServiceCall
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.service import async_register_admin_service
from homeassistant.helpers.typing import ConfigType
DOMAIN = "debugpy"
CONF_WAIT = "wait"
CONF_START = "start"
SERVICE_START = "start"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_HOST, default="0.0.0.0"): cv.string,
vol.Optional(CONF_PORT, default=5678): cv.port,
vol.Optional(CONF_START, default=True): cv.boolean,
vol.Optional(CONF_WAIT, default=False): cv.boolean,
}
)
},
extra=vol.ALLOW_EXTRA,
)
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Remote Python Debugger component."""
conf = config[DOMAIN]
async def debug_start(
call: Optional[ServiceCall] = None, *, wait: bool = True
) -> None:
"""Start the debugger."""
debugpy.listen((conf[CONF_HOST], conf[CONF_PORT]))
wait = conf[CONF_WAIT]
if wait:
_LOGGER.warning(
"Waiting for remote debug connection on %s:%s",
conf[CONF_HOST],
conf[CONF_PORT],
)
ready = Event()
def waitfor():
debugpy.wait_for_client()
hass.loop.call_soon_threadsafe(ready.set)
Thread(target=waitfor).start()
await ready.wait()
else:
_LOGGER.warning(
"Listening for remote debug connection on %s:%s",
conf[CONF_HOST],
conf[CONF_PORT],
)
async_register_admin_service(
hass, DOMAIN, SERVICE_START, debug_start, schema=vol.Schema({})
)
# If set to start the debugger on startup, do so
if conf[CONF_START]:
await debug_start(wait=conf[CONF_WAIT])
return True
|
from weblate.checks.render import MaxSizeCheck
from weblate.fonts.models import FontGroup, FontOverride
from weblate.fonts.tests.utils import FontTestCase
from weblate.utils.state import STATE_TRANSLATED
class MaxSizeCheckTest(FontTestCase):
def setUp(self):
super().setUp()
self.check = MaxSizeCheck()
def perform_check(self, target, flags):
unit = self.get_unit()
unit.flags = flags
unit.target = target
unit.state = STATE_TRANSLATED
return self.check.check_target(["source"], [target], unit)
def test_good(self):
self.assertFalse(self.perform_check("short", "max-size:500"))
self.assertEqual(self.check.last_font, "sans")
def test_bad_long(self):
self.assertTrue(self.perform_check("long" * 50, "max-size:500"))
self.assertEqual(self.check.last_font, "sans")
def test_bad_multiline(self):
self.assertTrue(self.perform_check("long " * 50, "max-size:500"))
self.assertEqual(self.check.last_font, "sans")
def test_good_multiline(self):
self.assertFalse(self.perform_check("long " * 50, "max-size:500:50"))
self.assertEqual(self.check.last_font, "sans")
def add_font_group(self):
font = self.add_font()
return FontGroup.objects.create(name="droid", font=font, project=self.project)
def test_custom_font(self):
self.add_font_group()
self.assertFalse(self.perform_check("short", "max-size:500,font-family:droid"))
self.assertEqual(self.check.last_font, "Droid Sans Fallback Regular")
def test_custom_font_override(self):
group = self.add_font_group()
FontOverride.objects.create(
group=group, language=self.get_translation().language, font=group.font
)
self.assertFalse(self.perform_check("short", "max-size:500,font-family:droid"))
self.assertEqual(self.check.last_font, "Droid Sans Fallback Regular")
|
import os
import sys
import time
import tempfile
import shutil
from flexx.util.testing import run_tests_if_main, raises
from flexx.util.logging import capture_log
from flexx import app
from flexx.app._modules import JSModule
tempdirname = os.path.join(tempfile.gettempdir(), 'flexx_module_test')
files = {}
files['__init__'] = """
def x():
pass
"""
files['foo'] = """
from flexx import app
from flxtest.lib3 import tan, atan
from flxtest.lib4 import magic_number, random
import pscript
import sys
sas = None
console = pscript.JSConstant('console')
def do_something():
console.log('do something')
return 40 + tan(1)
def do_more():
return atan(1) + random(magic_number)
class Foo(app.JsComponent):
CSS = ".foo-css-rule {}"
def init(self):
do_something()
"""
files['bar'] = """
from flxtest import lib1
from flxtest import lib2
from flxtest.lib2 import AA
from flxtest.foo import Foo
def use_lib1():
return lib1.sin
def use_lib1_wrong():
return lib1.sinasappel
def use_lib2():
return lib2.cos
class BB(AA):
pass
class CC(BB):
pass
class Bar(Foo):
pass
class Spam(Bar):
pass
def cannot_transpile():
# functionality not supported by PScript. Note that some may be at some point
# f"format strings" - also not in Python < 3.6
{'no', 'set', 'in', 'js'}
a[1:2:3] # no slicing with step
import xx
cannot_serialize = [1, 2, use_lib1]
cannot_do_anything = BB()
"""
files['lib1'] = """
__pscript__ = True
sas = None
offset = 2
def sin(t):
return t + offset
def asin(t):
return t - offset
"""
files['lib2'] = """
from pscript import RawJS
import sys
sas = None
offset = 3
bias = RawJS("[]")
def cos(t):
return t + offset + bias
def acos(t):
return t - offset + bias
class AA(object):
pass
"""
files['lib3'] = """
from flxtest.lib1 import sin
from flxtest.lib2 import cos, offset, bias
from flxtest import x
def tan(t):
return sin(t) / cos(t) + offset * 0 + bias + x()
def atan(t):
return 1/tan(t)
"""
files['lib4'] = """
magic_number = 42
def random():
return 1
"""
files['globals0'] = """
def apply():
console = 4
def main():
apply()
print(console)
"""
files['globals1'] = """
console = 3
def apply():
global console
console = 4
def main():
global console
apply()
print(console)
"""
files['globals2'] = """
def apply():
global console
console.xx = 4
def main():
global console
apply()
print(console.xx)
"""
PKG_NAME = 'flxtest'
def setup_module():
packdirname = os.path.join(tempdirname, PKG_NAME)
if not os.path.isdir(tempdirname):
os.makedirs(tempdirname)
if not os.path.isdir(packdirname):
os.makedirs(packdirname)
sys.path.insert(0, tempdirname)
for name in files:
# Mangle names
text = '\n'.join(line[4:] for line in files[name].splitlines())
# Write code
filename = os.path.join(packdirname, name + '.py')
with open(filename, 'wb') as f:
f.write(text.encode())
def teardown_module():
if os.path.isdir(tempdirname):
shutil.rmtree(tempdirname)
while tempdirname in sys.path:
sys.path.remove(tempdirname)
# Remove trace of these classes, since their source no longer exists,
# PScript wont be able to resolve them for JS
for cls in list(app._component2.AppComponentMeta.CLASSES):
if cls.__jsmodule__.startswith(PKG_NAME + '.'):
app._component2.AppComponentMeta.CLASSES.remove(cls)
def test_modules():
import flxtest.foo
store = {}
m = JSModule('flxtest.foo', store)
assert len(store) == 1
# Add Foo, this will bring everything else in
m.add_variable('Foo')
assert len(m.component_classes) == 1
assert m.component_classes.pop().__name__ == 'Foo'
# Modules exists
assert len(store) == 7
assert 'flxtest.foo' in store
assert 'flxtest.lib1' in store
assert 'flxtest.lib2' in store
assert 'flxtest.lib3' in store
assert 'flxtest.__init__' in store # different from how Python works!
assert 'flexx.app._component2' in store
# CSS
assert 'foo-css-rule' in store['flxtest.foo'].get_css()
# Stubs prevented loading of console
assert 'console =' not in store['flxtest.foo'].get_js()
# Function defs defined
assert 'sin = function' in store['flxtest.lib1'].get_js()
assert 'asin = function' in store['flxtest.lib1'].get_js() # __pscript__
assert 'cos = function' in store['flxtest.lib2'].get_js()
assert 'acos = function' not in store['flxtest.lib2'].get_js() # not __pscript__
assert 'tan = function' in store['flxtest.lib3'].get_js()
assert 'do_something = function' in store['flxtest.foo'].get_js()
# Function defs imported
assert 'sin = flxtest$lib1.sin' in store['flxtest.lib3'].get_js()
assert 'cos = flxtest$lib2.cos' in store['flxtest.lib3'].get_js()
assert 'tan = flxtest$lib3.tan' in store['flxtest.foo'].get_js()
# Unused constants
assert 'sys' not in store['flxtest.foo'].get_js()
assert 'sas' not in store['flxtest.foo'].get_js()
assert 'sys' not in store['flxtest.lib2'].get_js()
assert 'sas' not in store['flxtest.lib2'].get_js()
assert 'sas' in store['flxtest.lib1'].get_js() # __pscript__
# Constants replicate, not import
assert 'offset = 3' in store['flxtest.lib2'].get_js()
assert 'offset = 3' in store['flxtest.lib3'].get_js()
# But RawJS constants can be shared!
assert 'bias = []' in store['flxtest.lib2'].get_js()
assert 'bias = flxtest$lib2.bias' in store['flxtest.lib3'].get_js()
# So ,,, lib4 is omitted, right?
assert 'flxtest.lib4' not in store
assert 'magic_number' not in store['flxtest.foo'].get_js()
assert 'random' not in store['flxtest.foo'].get_js()
assert 'atan' not in store['flxtest.foo'].get_js()
assert 'atan' not in store['flxtest.lib3'].get_js()
# Use more of foo module
m.add_variable('do_more')
# Now, lib4 is used
assert len(store) == 8
assert 'flxtest.lib4' in store
# And names added in foo
assert 'magic_number = 42' in store['flxtest.foo'].get_js()
assert 'random' in store['flxtest.foo'].get_js()
assert 'atan' in store['flxtest.foo'].get_js()
assert 'atan' in store['flxtest.lib3'].get_js()
def test_misc():
import flxtest.foo
store = {}
# repr
m = JSModule('flxtest.foo', store)
assert '0' in repr(m)
m.add_variable('do_something')
assert '1' in repr(m)
m.add_variable('do_more')
assert '3' in repr(m) # also the const
# Deps
assert len(m.deps) == 2
assert 'flxtest.lib3' in m.deps
assert 'flxtest.lib4' in m.deps
#
m.add_variable('Foo')
assert len(m.deps) == 3
assert 'flexx.app._component2' in m.deps
def test_add_variable():
import flxtest.foo
import flxtest.bar
store = {}
m = JSModule('flxtest.foo', store)
assert not m.variables
m.add_variable('Foo')
assert 'Foo' in m.variables
# add_variable is ignored for pscript mods
assert not store['flxtest.lib1'].deps
with capture_log('info') as log:
store['flxtest.lib1'].add_variable('spam')
assert not log
# add_variable warns for other mods
with capture_log('info') as log:
store['flxtest.lib2'].add_variable('spam')
assert len(log) == 1 and 'undefined variable' in log[0]
with capture_log('info') as log:
store['flxtest.lib2'].add_variable('spam', is_global=True)
assert not log
m = JSModule('flxtest.bar', store)
# Can use stuff from module if its a __pscript__ modules
m.add_variable('use_lib1')
# The module code is smart enough that lib1 does not contain sinasappel
with raises(RuntimeError):
m.add_variable('use_lib1_wrong')
# Also for dotted names
m.add_variable('use_lib2')
# Has changed flag
our_time = time.time(); time.sleep(0.01)
m = JSModule('flxtest.bar', {})
time.sleep(0.01); our_time = time.time();
m.get_js()
#
our_time = time.time(); time.sleep(0.01)
m.add_variable('use_lib1')
m.add_variable('AA')
#
our_time = time.time(); time.sleep(0.01)
m.add_variable('use_lib1') # no effect because already known
#
m.add_variable('AA') # no effect bacause is imported name
def test_subclasses():
import flxtest.foo
import flxtest.bar
# Using a class CC > BB > AA > object
store = {}
JSModule('flxtest.foo', store).add_variable('Foo')
m = JSModule('flxtest.bar', store)
#
assert 'CC' not in m.get_js()
assert 'BB' not in m.get_js()
assert 'AA' not in store['flxtest.lib2'].get_js()
#
m.add_variable('CC')
assert 'CC' in m.get_js()
assert 'BB' in m.get_js()
assert 'AA' in store['flxtest.lib2'].get_js()
# Using a class Spam > Bar > Foo > Component
store = {}
m = JSModule('flxtest.bar', store)
assert 'flxtest.foo' not in store
#
m.add_variable('Spam')
assert 'flxtest.foo' in store
assert 'flexx.app._component2' in store
# Using Foo in modules that imports it
store = {}
m = JSModule('flxtest.bar', store)
assert 'flxtest.foo' not in store
#
m.add_variable('Foo')
assert 'flxtest.foo' in store
assert 'flexx.app._component2' in store
def test_globals():
import flxtest.globals0
import flxtest.globals1
import flxtest.globals2
store = {}
m0 = JSModule('flxtest.globals0', store)
m1 = JSModule('flxtest.globals1', store)
m2 = JSModule('flxtest.globals2', store)
with capture_log('info') as log:
m0.add_variable('main')
assert len(log) == 1 and 'undefined variable' in log[0]
with capture_log('info') as log:
m1.add_variable('main')
assert not log
with capture_log('info') as log:
m2.add_variable('main')
assert not log
# m0 has local definitions, but no global
assert '\nvar console' not in m0.get_js()
assert ' var console' in m0.get_js()
# m1 has global definition but no local
assert '\nvar console' in m1.get_js()
assert ' var console' not in m1.get_js()
# m2 has neither
assert 'var console' not in m2.get_js()
def test_fails():
import flxtest.foo
import flxtest.bar
assert JSModule('flxtest.foo', {})
# Wrong init
with raises(TypeError):
JSModule()
with raises(TypeError):
JSModule('flxtest.foo')
with raises(TypeError):
JSModule(3, {})
with raises(TypeError):
JSModule('flxtest.foo', 3)
with raises(TypeError):
JSModule('flxtest.foo', {}, 3)
# Name issues
with raises(ValueError):
JSModule('flxtest.doesnotexist', {})
with raises(ValueError):
JSModule('flxtest', {}) # must be flxtest.__init__
with raises(ValueError):
JSModule('flxtest.foo.__init__', {}) # only for actual package names!
# Cannot create module with same name twice (in same store)
store = {}
JSModule('flxtest.foo', store)
with raises(RuntimeError):
JSModule('flxtest.foo', store)
JSModule('flxtest.foo', {}) # in alt store its ok though
# Untranspilable
m = JSModule('flxtest.bar', {})
with raises(ValueError) as err:
m.add_variable('cannot_transpile')
assert 'cannot transpile' in str(err)
# Unserializable
m = JSModule('flxtest.bar', {})
with raises(ValueError) as err:
m.add_variable('cannot_serialize')
assert 'cannot serialize' in str(err)
# Un-anythingable
m = JSModule('flxtest.bar', {})
with raises(ValueError) as err:
m.add_variable('cannot_do_anything')
assert 'cannot convert' in str(err)
run_tests_if_main()
|
import os.path as op
import numpy as np
from numpy.testing import (assert_equal, assert_array_almost_equal,
assert_array_equal, assert_allclose)
import mne
from mne.datasets import testing
from mne.minimum_norm.resolution_matrix import (make_inverse_resolution_matrix,
get_cross_talk,
get_point_spread,
_vertices_for_get_psf_ctf)
data_path = testing.data_path(download=False)
subjects_dir = op.join(data_path, 'subjects')
fname_inv = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc-meg-eeg-oct-6-meg-inv.fif')
fname_evoked = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc-ave.fif')
fname_raw = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc_raw.fif')
fname_t1 = op.join(data_path, 'subjects', 'sample', 'mri', 'T1.mgz')
fname_src = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc-meg-eeg-oct-6-fwd.fif')
fname_src_fs = op.join(data_path, 'subjects', 'fsaverage', 'bem',
'fsaverage-ico-5-src.fif')
fname_src_3 = op.join(data_path, 'subjects', 'sample', 'bem',
'sample-oct-4-src.fif')
fname_stc = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-meg')
fname_vol = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc-grad-vol-7-fwd-sensmap-vol.w')
fname_vsrc = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc-meg-vol-7-fwd.fif')
fname_inv_vol = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc-meg-vol-7-meg-inv.fif')
rng = np.random.RandomState(0)
fname_fwd = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc-meg-eeg-oct-4-fwd.fif')
fname_cov = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-cov.fif')
fname_label = op.join(data_path, 'subjects', 'sample', 'label', 'lh.V1.label')
@testing.requires_testing_data
def test_resolution_matrix():
"""Test make_inverse_resolution_matrix() function."""
# read forward solution
forward = mne.read_forward_solution(fname_fwd)
# forward operator with fixed source orientations
forward_fxd = mne.convert_forward_solution(forward, surf_ori=True,
force_fixed=True)
# noise covariance matrix
noise_cov = mne.read_cov(fname_cov)
# evoked data for info
evoked = mne.read_evokeds(fname_evoked, 0)
# make inverse operator from forward solution
# free source orientation
inverse_operator = mne.minimum_norm.make_inverse_operator(
info=evoked.info, forward=forward, noise_cov=noise_cov, loose=1.,
depth=None)
# fixed source orientation
inverse_operator_fxd = mne.minimum_norm.make_inverse_operator(
info=evoked.info, forward=forward, noise_cov=noise_cov, loose=0.,
depth=None, fixed=True)
# regularisation parameter based on SNR
snr = 3.0
lambda2 = 1.0 / snr ** 2
# resolution matrices for free source orientation
# compute resolution matrix for MNE with free source orientations
rm_mne_free = make_inverse_resolution_matrix(forward, inverse_operator,
method='MNE', lambda2=lambda2)
# compute resolution matrix for MNE, fwd fixed and inv free
rm_mne_fxdfree = make_inverse_resolution_matrix(forward_fxd,
inverse_operator,
method='MNE',
lambda2=lambda2)
# resolution matrices for fixed source orientation
# compute resolution matrix for MNE
rm_mne = make_inverse_resolution_matrix(forward_fxd, inverse_operator_fxd,
method='MNE', lambda2=lambda2)
# compute resolution matrix for sLORETA
rm_lor = make_inverse_resolution_matrix(forward_fxd, inverse_operator_fxd,
method='sLORETA', lambda2=lambda2)
# rectify resolution matrix for sLORETA before determining maxima
rm_lor_abs = np.abs(rm_lor)
# get maxima per column
maxidxs = rm_lor_abs.argmax(axis=0)
# create array with the expected stepwise increase in maximum indices
goodidxs = np.arange(0, len(maxidxs), 1)
# Tests
# Does sLORETA have zero dipole localization error for columns/PSFs?
assert_array_equal(maxidxs, goodidxs)
# MNE resolution matrices symmetric?
assert_array_almost_equal(rm_mne, rm_mne.T)
assert_array_almost_equal(rm_mne_free, rm_mne_free.T)
# Some arbitrary vertex numbers
idx = [1, 100, 400]
# check various summary and normalisation options
for mode in [None, 'sum', 'mean', 'maxval', 'maxnorm', 'pca']:
n_comps = [1, 3]
if mode in [None, 'sum', 'mean']:
n_comps = [1]
for n_comp in n_comps:
for norm in [None, 'max', 'norm', True]:
stc_psf = get_point_spread(
rm_mne, forward_fxd['src'], idx, mode=mode, n_comp=n_comp,
norm=norm, return_pca_vars=False)
stc_ctf = get_cross_talk(
rm_mne, forward_fxd['src'], idx, mode=mode, n_comp=n_comp,
norm=norm, return_pca_vars=False)
# for MNE, PSF/CTFs for same vertices should be the same
assert_array_almost_equal(stc_psf.data, stc_ctf.data)
# check SVD variances
stc_psf, s_vars_psf = get_point_spread(
rm_mne, forward_fxd['src'], idx, mode=mode, n_comp=n_comp,
norm='norm', return_pca_vars=True)
stc_ctf, s_vars_ctf = get_cross_talk(
rm_mne, forward_fxd['src'], idx, mode=mode, n_comp=n_comp,
norm='norm', return_pca_vars=True)
assert_array_almost_equal(s_vars_psf, s_vars_ctf)
# variances for SVD components should be ordered
assert s_vars_psf[0] > s_vars_psf[1] > s_vars_psf[2]
# all variances should sum up to 100
assert_allclose(s_vars_psf.sum(), 100.)
# Test application of free inv to fixed fwd
assert_equal(rm_mne_fxdfree.shape, (3 * rm_mne.shape[0],
rm_mne.shape[0]))
# Test PSF/CTF for labels
label = mne.read_label(fname_label)
# must be list of Label
label = [label]
label2 = 2 * label
# get relevant vertices in source space
verts = _vertices_for_get_psf_ctf(label, forward_fxd['src'])[0]
stc_psf_label = get_point_spread(rm_mne, forward_fxd['src'], label,
norm='max')
# for list of indices
stc_psf_idx = get_point_spread(rm_mne, forward_fxd['src'], verts,
norm='max')
stc_ctf_label = get_cross_talk(rm_mne, forward_fxd['src'], label,
norm='max')
# For MNE, PSF and CTF for same vertices should be the same
assert_array_almost_equal(stc_psf_label.data, stc_ctf_label.data)
# test multiple labels
stc_psf_label2 = get_point_spread(rm_mne, forward_fxd['src'], label2,
norm='max')
m, n = stc_psf_label.data.shape
assert_array_equal(
stc_psf_label.data, stc_psf_label2[0].data)
assert_array_equal(
stc_psf_label.data, stc_psf_label2[1].data)
assert_array_equal(
stc_psf_label.data, stc_psf_idx.data)
|
import logging
from hole.exceptions import HoleError
from homeassistant.components import pi_hole, switch
from homeassistant.components.pi_hole.const import (
CONF_LOCATION,
DEFAULT_LOCATION,
DEFAULT_NAME,
DEFAULT_SSL,
DEFAULT_VERIFY_SSL,
SERVICE_DISABLE,
SERVICE_DISABLE_ATTR_DURATION,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_HOST,
CONF_NAME,
CONF_SSL,
CONF_VERIFY_SSL,
)
from homeassistant.setup import async_setup_component
from . import (
SWITCH_ENTITY_ID,
_create_mocked_hole,
_patch_config_flow_hole,
_patch_init_hole,
)
from tests.async_mock import AsyncMock
from tests.common import MockConfigEntry
async def test_setup_minimal_config(hass):
"""Tests component setup with minimal config."""
mocked_hole = _create_mocked_hole()
with _patch_config_flow_hole(mocked_hole), _patch_init_hole(mocked_hole):
assert await async_setup_component(
hass, pi_hole.DOMAIN, {pi_hole.DOMAIN: [{"host": "pi.hole"}]}
)
await hass.async_block_till_done()
assert (
hass.states.get("sensor.pi_hole_ads_blocked_today").name
== "Pi-Hole Ads Blocked Today"
)
assert (
hass.states.get("sensor.pi_hole_ads_percentage_blocked_today").name
== "Pi-Hole Ads Percentage Blocked Today"
)
assert (
hass.states.get("sensor.pi_hole_dns_queries_cached").name
== "Pi-Hole DNS Queries Cached"
)
assert (
hass.states.get("sensor.pi_hole_dns_queries_forwarded").name
== "Pi-Hole DNS Queries Forwarded"
)
assert (
hass.states.get("sensor.pi_hole_dns_queries_today").name
== "Pi-Hole DNS Queries Today"
)
assert (
hass.states.get("sensor.pi_hole_dns_unique_clients").name
== "Pi-Hole DNS Unique Clients"
)
assert (
hass.states.get("sensor.pi_hole_dns_unique_domains").name
== "Pi-Hole DNS Unique Domains"
)
assert (
hass.states.get("sensor.pi_hole_domains_blocked").name
== "Pi-Hole Domains Blocked"
)
assert hass.states.get("sensor.pi_hole_seen_clients").name == "Pi-Hole Seen Clients"
assert hass.states.get("sensor.pi_hole_ads_blocked_today").state == "0"
assert hass.states.get("sensor.pi_hole_ads_percentage_blocked_today").state == "0"
assert hass.states.get("sensor.pi_hole_dns_queries_cached").state == "0"
assert hass.states.get("sensor.pi_hole_dns_queries_forwarded").state == "0"
assert hass.states.get("sensor.pi_hole_dns_queries_today").state == "0"
assert hass.states.get("sensor.pi_hole_dns_unique_clients").state == "0"
assert hass.states.get("sensor.pi_hole_dns_unique_domains").state == "0"
assert hass.states.get("sensor.pi_hole_domains_blocked").state == "0"
assert hass.states.get("sensor.pi_hole_seen_clients").state == "0"
assert hass.states.get("binary_sensor.pi_hole").name == "Pi-Hole"
assert hass.states.get("binary_sensor.pi_hole").state == "off"
async def test_setup_name_config(hass):
"""Tests component setup with a custom name."""
mocked_hole = _create_mocked_hole()
with _patch_config_flow_hole(mocked_hole), _patch_init_hole(mocked_hole):
assert await async_setup_component(
hass,
pi_hole.DOMAIN,
{pi_hole.DOMAIN: [{"host": "pi.hole", "name": "Custom"}]},
)
await hass.async_block_till_done()
assert (
hass.states.get("sensor.custom_ads_blocked_today").name
== "Custom Ads Blocked Today"
)
async def test_switch(hass, caplog):
"""Test Pi-hole switch."""
mocked_hole = _create_mocked_hole()
with _patch_config_flow_hole(mocked_hole), _patch_init_hole(mocked_hole):
assert await async_setup_component(
hass,
pi_hole.DOMAIN,
{pi_hole.DOMAIN: [{"host": "pi.hole1", "api_key": "1"}]},
)
await hass.async_block_till_done()
await hass.services.async_call(
switch.DOMAIN,
switch.SERVICE_TURN_ON,
{"entity_id": SWITCH_ENTITY_ID},
blocking=True,
)
mocked_hole.enable.assert_called_once()
await hass.services.async_call(
switch.DOMAIN,
switch.SERVICE_TURN_OFF,
{"entity_id": SWITCH_ENTITY_ID},
blocking=True,
)
mocked_hole.disable.assert_called_once_with(True)
# Failed calls
type(mocked_hole).enable = AsyncMock(side_effect=HoleError("Error1"))
await hass.services.async_call(
switch.DOMAIN,
switch.SERVICE_TURN_ON,
{"entity_id": SWITCH_ENTITY_ID},
blocking=True,
)
type(mocked_hole).disable = AsyncMock(side_effect=HoleError("Error2"))
await hass.services.async_call(
switch.DOMAIN,
switch.SERVICE_TURN_OFF,
{"entity_id": SWITCH_ENTITY_ID},
blocking=True,
)
errors = [x for x in caplog.records if x.levelno == logging.ERROR]
assert errors[-2].message == "Unable to enable Pi-hole: Error1"
assert errors[-1].message == "Unable to disable Pi-hole: Error2"
async def test_disable_service_call(hass):
"""Test disable service call with no Pi-hole named."""
mocked_hole = _create_mocked_hole()
with _patch_config_flow_hole(mocked_hole), _patch_init_hole(mocked_hole):
assert await async_setup_component(
hass,
pi_hole.DOMAIN,
{
pi_hole.DOMAIN: [
{"host": "pi.hole1", "api_key": "1"},
{"host": "pi.hole2", "name": "Custom"},
]
},
)
await hass.async_block_till_done()
await hass.services.async_call(
pi_hole.DOMAIN,
SERVICE_DISABLE,
{ATTR_ENTITY_ID: "all", SERVICE_DISABLE_ATTR_DURATION: "00:00:01"},
blocking=True,
)
await hass.async_block_till_done()
mocked_hole.disable.assert_called_once_with(1)
async def test_unload(hass):
"""Test unload entities."""
entry = MockConfigEntry(
domain=pi_hole.DOMAIN,
data={
CONF_NAME: DEFAULT_NAME,
CONF_HOST: "pi.hole",
CONF_LOCATION: DEFAULT_LOCATION,
CONF_SSL: DEFAULT_SSL,
CONF_VERIFY_SSL: DEFAULT_VERIFY_SSL,
},
)
entry.add_to_hass(hass)
mocked_hole = _create_mocked_hole()
with _patch_config_flow_hole(mocked_hole), _patch_init_hole(mocked_hole):
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert entry.entry_id in hass.data[pi_hole.DOMAIN]
assert await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
assert entry.entry_id not in hass.data[pi_hole.DOMAIN]
|
from collections import namedtuple
import os
import time
from unittest.mock import MagicMock
from proboscis.asserts import (
assert_raises, assert_true, assert_false, assert_equal,
assert_is_not, Check
)
from proboscis import test
import gmusicapi.session
from gmusicapi.clients import Mobileclient, Musicmanager
from gmusicapi.exceptions import AlreadyLoggedIn
from gmusicapi.protocol.shared import authtypes
from gmusicapi.protocol import mobileclient
from gmusicapi.utils import utils, jsarray
jsarray_samples = []
jsarray_filenames = [base + '.jsarray' for base in ('searchresult', 'fetchartist')]
test_file_dir = os.path.dirname(os.path.abspath(__file__))
for filepath in [os.path.join(test_file_dir, p) for p in jsarray_filenames]:
with open(filepath, 'r', encoding="utf-8") as f:
jsarray_samples.append(f.read())
# TODO test gather_local, transcoding
# All tests end up in the local group.
test = test(groups=['local'])
@test
def longest_increasing_sub():
lisi = utils.longest_increasing_subseq
assert_equal(lisi([]), [])
assert_equal(lisi(list(range(10, 0, -1))), [1])
assert_equal(lisi(list(range(10, 20))), list(range(10, 20)))
assert_equal(lisi([3, 1, 4, 1, 5, 9, 2, 6, 5, 3, 5, 8, 9, 7, 9]),
[1, 2, 3, 5, 8, 9])
#
# clients
#
# this feels like a dumb pattern, but I can't think of a better way
names = ('Mobileclient', 'Musicmanager') # Webclient removed since testing is disabled.
Clients = namedtuple('Clients', [n.lower() for n in names])
def create_clients():
clients = []
for name in names:
cls = getattr(gmusicapi.clients, name)
c = cls()
# mock out the underlying session
c.session = MagicMock()
clients.append(c)
return Clients(*clients)
@test
def no_client_auth_initially():
# wc = Webclient()
# assert_false(wc.is_authenticated())
mc = Mobileclient()
assert_false(mc.is_authenticated())
mm = Musicmanager()
assert_false(mm.is_authenticated())
@test
def mm_prevents_bad_mac_format():
mm = create_clients().musicmanager
with Check() as check:
for bad_mac in ['bogus',
'11:22:33:44:55:66:',
'11:22:33:44:55:ab',
'11:22:33:44:55']:
check.raises(
ValueError,
mm._perform_upauth,
uploader_id=bad_mac,
uploader_name='valid')
# @test
# def auto_playlists_are_empty():
# # this doesn't actually hit the server at the moment.
# # see issue 102
# api = Api()
# assert_equal(api.get_all_playlist_ids(auto=True, user=False),
# {'auto': {}})
#
# sessions
#
Sessions = namedtuple('Sessions', [n.lower() for n in names])
def create_sessions():
sessions = []
for name in names:
cls = getattr(gmusicapi.session, name)
s = cls()
# mock out the underlying requests.session
s._rsession = MagicMock()
sessions.append(s)
return Sessions(*sessions)
@test
def no_session_auth_initially():
for s in create_sessions():
assert_false(s.is_authenticated)
@test
def session_raises_alreadyloggedin():
for s in create_sessions():
s.is_authenticated = True
def login():
# hackish: login ignores args so we can test them all here;
# this just ensures we have an acceptable amount of args
s.login(*([None] * 3))
assert_raises(AlreadyLoggedIn, login)
@test
def session_logout():
for s in create_sessions():
s.is_authenticated = True
old_session = s._rsession
s.logout()
assert_false(s.is_authenticated)
old_session.close.assert_called_once_with()
assert_is_not(s._rsession, old_session)
@test
def send_without_auth():
for s in create_sessions():
s.is_authenticated = True
mock_session = MagicMock()
mock_req_kwargs = {'fake': 'kwargs'}
s.send(mock_req_kwargs, authtypes(), mock_session)
# sending without auth should not use the normal session,
# since that might have auth cookies automatically attached
assert_false(s._rsession.called)
mock_session.request.called_once_with(**mock_req_kwargs)
mock_session.closed.called_once_with()
#
# protocol
#
@test
def authtypes_factory_defaults():
auth = authtypes()
assert_false(auth.oauth)
assert_false(auth.sso)
assert_false(auth.xt)
@test
def authtypes_factory_args():
auth = authtypes(oauth=True)
assert_true(auth.oauth)
assert_false(auth.sso)
assert_false(auth.xt)
@test
def mc_url_signing():
sig, _ = mobileclient.GetStreamUrl.get_signature("Tdr6kq3xznv5kdsphyojox6dtoq",
"1373247112519")
assert_equal(sig, b"gua1gInBdaVo7_dSwF9y0kodua0")
#
# utils
#
@test
def retry_failure_propogation():
@utils.retry(tries=1)
def raise_exception():
raise AssertionError
assert_raises(AssertionError, raise_exception)
@test
def retry_sleep_timing():
@utils.retry(tries=3, delay=.05, backoff=2)
def raise_exception():
raise AssertionError
pre = time.time()
assert_raises(AssertionError, raise_exception)
post = time.time()
delta = post - pre
assert_true(.15 < delta < .2, "delta: %s" % delta)
@test
def retry_is_dual_decorator():
@utils.retry
def return_arg(arg=None):
return arg
assert_equal(return_arg(1), 1)
@test
def jsarray_parsing():
for raw in jsarray_samples:
# should not raise an exception
jsarray.loads(raw)
@test
def locate_transcoder():
utils.locate_mp3_transcoder() # should not raise
|
from logilab.common.testlib import TestCase, unittest_main
from logilab.common.tasksqueue import *
class TaskTC(TestCase):
def test_eq(self):
self.assertFalse(Task('t1') == Task('t2'))
self.assertTrue(Task('t1') == Task('t1'))
def test_cmp(self):
self.assertTrue(Task('t1', LOW) < Task('t2', MEDIUM))
self.assertFalse(Task('t1', LOW) > Task('t2', MEDIUM))
self.assertTrue(Task('t1', HIGH) > Task('t2', MEDIUM))
self.assertFalse(Task('t1', HIGH) < Task('t2', MEDIUM))
class PrioritizedTasksQueueTC(TestCase):
def test_priority(self):
queue = PrioritizedTasksQueue()
queue.put(Task('t1'))
queue.put(Task('t2', MEDIUM))
queue.put(Task('t3', HIGH))
queue.put(Task('t4', LOW))
self.assertEqual(queue.get().id, 't3')
self.assertEqual(queue.get().id, 't2')
self.assertEqual(queue.get().id, 't1')
self.assertEqual(queue.get().id, 't4')
def test_remove_equivalent(self):
queue = PrioritizedTasksQueue()
queue.put(Task('t1'))
queue.put(Task('t2', MEDIUM))
queue.put(Task('t1', HIGH))
queue.put(Task('t3', MEDIUM))
queue.put(Task('t2', MEDIUM))
self.assertEqual(queue.qsize(), 3)
self.assertEqual(queue.get().id, 't1')
self.assertEqual(queue.get().id, 't2')
self.assertEqual(queue.get().id, 't3')
self.assertEqual(queue.qsize(), 0)
def test_remove(self):
queue = PrioritizedTasksQueue()
queue.put(Task('t1'))
queue.put(Task('t2'))
queue.put(Task('t3'))
queue.remove('t2')
self.assertEqual([t.id for t in queue], ['t3', 't1'])
self.assertRaises(ValueError, queue.remove, 't4')
if __name__ == '__main__':
unittest_main()
|
from datetime import timedelta
from adguardhome import AdGuardHomeConnectionError
from homeassistant.components.adguard import AdGuardHomeDeviceEntity
from homeassistant.components.adguard.const import (
DATA_ADGUARD_CLIENT,
DATA_ADGUARD_VERION,
DOMAIN,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import PERCENTAGE, TIME_MILLISECONDS
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers.typing import HomeAssistantType
SCAN_INTERVAL = timedelta(seconds=300)
PARALLEL_UPDATES = 4
async def async_setup_entry(
hass: HomeAssistantType, entry: ConfigEntry, async_add_entities
) -> None:
"""Set up AdGuard Home sensor based on a config entry."""
adguard = hass.data[DOMAIN][DATA_ADGUARD_CLIENT]
try:
version = await adguard.version()
except AdGuardHomeConnectionError as exception:
raise PlatformNotReady from exception
hass.data[DOMAIN][DATA_ADGUARD_VERION] = version
sensors = [
AdGuardHomeDNSQueriesSensor(adguard),
AdGuardHomeBlockedFilteringSensor(adguard),
AdGuardHomePercentageBlockedSensor(adguard),
AdGuardHomeReplacedParentalSensor(adguard),
AdGuardHomeReplacedSafeBrowsingSensor(adguard),
AdGuardHomeReplacedSafeSearchSensor(adguard),
AdGuardHomeAverageProcessingTimeSensor(adguard),
AdGuardHomeRulesCountSensor(adguard),
]
async_add_entities(sensors, True)
class AdGuardHomeSensor(AdGuardHomeDeviceEntity):
"""Defines a AdGuard Home sensor."""
def __init__(
self,
adguard,
name: str,
icon: str,
measurement: str,
unit_of_measurement: str,
enabled_default: bool = True,
) -> None:
"""Initialize AdGuard Home sensor."""
self._state = None
self._unit_of_measurement = unit_of_measurement
self.measurement = measurement
super().__init__(adguard, name, icon, enabled_default)
@property
def unique_id(self) -> str:
"""Return the unique ID for this sensor."""
return "_".join(
[
DOMAIN,
self.adguard.host,
str(self.adguard.port),
"sensor",
self.measurement,
]
)
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self) -> str:
"""Return the unit this state is expressed in."""
return self._unit_of_measurement
class AdGuardHomeDNSQueriesSensor(AdGuardHomeSensor):
"""Defines a AdGuard Home DNS Queries sensor."""
def __init__(self, adguard):
"""Initialize AdGuard Home sensor."""
super().__init__(
adguard, "AdGuard DNS Queries", "mdi:magnify", "dns_queries", "queries"
)
async def _adguard_update(self) -> None:
"""Update AdGuard Home entity."""
self._state = await self.adguard.stats.dns_queries()
class AdGuardHomeBlockedFilteringSensor(AdGuardHomeSensor):
"""Defines a AdGuard Home blocked by filtering sensor."""
def __init__(self, adguard):
"""Initialize AdGuard Home sensor."""
super().__init__(
adguard,
"AdGuard DNS Queries Blocked",
"mdi:magnify-close",
"blocked_filtering",
"queries",
enabled_default=False,
)
async def _adguard_update(self) -> None:
"""Update AdGuard Home entity."""
self._state = await self.adguard.stats.blocked_filtering()
class AdGuardHomePercentageBlockedSensor(AdGuardHomeSensor):
"""Defines a AdGuard Home blocked percentage sensor."""
def __init__(self, adguard):
"""Initialize AdGuard Home sensor."""
super().__init__(
adguard,
"AdGuard DNS Queries Blocked Ratio",
"mdi:magnify-close",
"blocked_percentage",
PERCENTAGE,
)
async def _adguard_update(self) -> None:
"""Update AdGuard Home entity."""
percentage = await self.adguard.stats.blocked_percentage()
self._state = f"{percentage:.2f}"
class AdGuardHomeReplacedParentalSensor(AdGuardHomeSensor):
"""Defines a AdGuard Home replaced by parental control sensor."""
def __init__(self, adguard):
"""Initialize AdGuard Home sensor."""
super().__init__(
adguard,
"AdGuard Parental Control Blocked",
"mdi:human-male-girl",
"blocked_parental",
"requests",
)
async def _adguard_update(self) -> None:
"""Update AdGuard Home entity."""
self._state = await self.adguard.stats.replaced_parental()
class AdGuardHomeReplacedSafeBrowsingSensor(AdGuardHomeSensor):
"""Defines a AdGuard Home replaced by safe browsing sensor."""
def __init__(self, adguard):
"""Initialize AdGuard Home sensor."""
super().__init__(
adguard,
"AdGuard Safe Browsing Blocked",
"mdi:shield-half-full",
"blocked_safebrowsing",
"requests",
)
async def _adguard_update(self) -> None:
"""Update AdGuard Home entity."""
self._state = await self.adguard.stats.replaced_safebrowsing()
class AdGuardHomeReplacedSafeSearchSensor(AdGuardHomeSensor):
"""Defines a AdGuard Home replaced by safe search sensor."""
def __init__(self, adguard):
"""Initialize AdGuard Home sensor."""
super().__init__(
adguard,
"AdGuard Safe Searches Enforced",
"mdi:shield-search",
"enforced_safesearch",
"requests",
)
async def _adguard_update(self) -> None:
"""Update AdGuard Home entity."""
self._state = await self.adguard.stats.replaced_safesearch()
class AdGuardHomeAverageProcessingTimeSensor(AdGuardHomeSensor):
"""Defines a AdGuard Home average processing time sensor."""
def __init__(self, adguard):
"""Initialize AdGuard Home sensor."""
super().__init__(
adguard,
"AdGuard Average Processing Speed",
"mdi:speedometer",
"average_speed",
TIME_MILLISECONDS,
)
async def _adguard_update(self) -> None:
"""Update AdGuard Home entity."""
average = await self.adguard.stats.avg_processing_time()
self._state = f"{average:.2f}"
class AdGuardHomeRulesCountSensor(AdGuardHomeSensor):
"""Defines a AdGuard Home rules count sensor."""
def __init__(self, adguard):
"""Initialize AdGuard Home sensor."""
super().__init__(
adguard,
"AdGuard Rules Count",
"mdi:counter",
"rules_count",
"rules",
enabled_default=False,
)
async def _adguard_update(self) -> None:
"""Update AdGuard Home entity."""
self._state = await self.adguard.filtering.rules_count()
|
from __future__ import print_function, absolute_import
from .factories import StyleFactory
from .styles import Style, ANSIStyle, HTMLStyle, ColorNotFound
ansicolors = StyleFactory(ANSIStyle)
htmlcolors = StyleFactory(HTMLStyle)
def load_ipython_extension(ipython): # pragma: no cover
try:
from ._ipython_ext import OutputMagics
except ImportError:
print("IPython required for the IPython extension to be loaded.")
raise
ipython.push({"colors": htmlcolors})
ipython.register_magics(OutputMagics)
def main(): # pragma: no cover
"""Color changing script entry. Call using
python -m plumbum.colors, will reset if no arguments given."""
import sys
color = ' '.join(sys.argv[1:]) if len(sys.argv) > 1 else ''
ansicolors.use_color = True
ansicolors.get_colors_from_string(color).now()
|
import logging
from typing import Callable, List, Optional
from directv import DIRECTV
from homeassistant.components.media_player import (
DEVICE_CLASS_RECEIVER,
MediaPlayerEntity,
)
from homeassistant.components.media_player.const import (
MEDIA_TYPE_CHANNEL,
MEDIA_TYPE_MOVIE,
MEDIA_TYPE_MUSIC,
MEDIA_TYPE_TVSHOW,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_STOP,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import STATE_OFF, STATE_PAUSED, STATE_PLAYING
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.util import dt as dt_util
from . import DIRECTVEntity
from .const import (
ATTR_MEDIA_CURRENTLY_RECORDING,
ATTR_MEDIA_RATING,
ATTR_MEDIA_RECORDED,
ATTR_MEDIA_START_TIME,
DOMAIN,
)
_LOGGER = logging.getLogger(__name__)
KNOWN_MEDIA_TYPES = [MEDIA_TYPE_MOVIE, MEDIA_TYPE_MUSIC, MEDIA_TYPE_TVSHOW]
SUPPORT_DTV = (
SUPPORT_PAUSE
| SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_PLAY_MEDIA
| SUPPORT_STOP
| SUPPORT_NEXT_TRACK
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_PLAY
)
SUPPORT_DTV_CLIENT = (
SUPPORT_PAUSE
| SUPPORT_PLAY_MEDIA
| SUPPORT_STOP
| SUPPORT_NEXT_TRACK
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_PLAY
)
async def async_setup_entry(
hass: HomeAssistantType,
entry: ConfigEntry,
async_add_entities: Callable[[List, bool], None],
) -> bool:
"""Set up the DirecTV config entry."""
dtv = hass.data[DOMAIN][entry.entry_id]
entities = []
for location in dtv.device.locations:
entities.append(
DIRECTVMediaPlayer(
dtv=dtv,
name=str.title(location.name),
address=location.address,
)
)
async_add_entities(entities, True)
class DIRECTVMediaPlayer(DIRECTVEntity, MediaPlayerEntity):
"""Representation of a DirecTV receiver on the network."""
def __init__(self, *, dtv: DIRECTV, name: str, address: str = "0") -> None:
"""Initialize DirecTV media player."""
super().__init__(
dtv=dtv,
name=name,
address=address,
)
self._assumed_state = None
self._available = False
self._is_recorded = None
self._is_standby = True
self._last_position = None
self._last_update = None
self._paused = None
self._program = None
self._state = None
async def async_update(self):
"""Retrieve latest state."""
self._state = await self.dtv.state(self._address)
self._available = self._state.available
self._is_standby = self._state.standby
self._program = self._state.program
if self._is_standby:
self._assumed_state = False
self._is_recorded = None
self._last_position = None
self._last_update = None
self._paused = None
elif self._program is not None:
self._paused = self._last_position == self._program.position
self._is_recorded = self._program.recorded
self._last_position = self._program.position
self._last_update = self._state.at
self._assumed_state = self._is_recorded
@property
def device_state_attributes(self):
"""Return device specific state attributes."""
if self._is_standby:
return {}
return {
ATTR_MEDIA_CURRENTLY_RECORDING: self.media_currently_recording,
ATTR_MEDIA_RATING: self.media_rating,
ATTR_MEDIA_RECORDED: self.media_recorded,
ATTR_MEDIA_START_TIME: self.media_start_time,
}
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def device_class(self) -> Optional[str]:
"""Return the class of this device."""
return DEVICE_CLASS_RECEIVER
@property
def unique_id(self):
"""Return a unique ID to use for this media player."""
if self._address == "0":
return self.dtv.device.info.receiver_id
return self._address
# MediaPlayerEntity properties and methods
@property
def state(self):
"""Return the state of the device."""
if self._is_standby:
return STATE_OFF
# For recorded media we can determine if it is paused or not.
# For live media we're unable to determine and will always return
# playing instead.
if self._paused:
return STATE_PAUSED
return STATE_PLAYING
@property
def available(self):
"""Return if able to retrieve information from DVR or not."""
return self._available
@property
def assumed_state(self):
"""Return if we assume the state or not."""
return self._assumed_state
@property
def media_content_id(self):
"""Return the content ID of current playing media."""
if self._is_standby or self._program is None:
return None
return self._program.program_id
@property
def media_content_type(self):
"""Return the content type of current playing media."""
if self._is_standby or self._program is None:
return None
if self._program.program_type in KNOWN_MEDIA_TYPES:
return self._program.program_type
return MEDIA_TYPE_MOVIE
@property
def media_duration(self):
"""Return the duration of current playing media in seconds."""
if self._is_standby or self._program is None:
return None
return self._program.duration
@property
def media_position(self):
"""Position of current playing media in seconds."""
if self._is_standby:
return None
return self._last_position
@property
def media_position_updated_at(self):
"""When was the position of the current playing media valid."""
if self._is_standby:
return None
return self._last_update
@property
def media_title(self):
"""Return the title of current playing media."""
if self._is_standby or self._program is None:
return None
if self.media_content_type == MEDIA_TYPE_MUSIC:
return self._program.music_title
return self._program.title
@property
def media_artist(self):
"""Artist of current playing media, music track only."""
if self._is_standby or self._program is None:
return None
return self._program.music_artist
@property
def media_album_name(self):
"""Album name of current playing media, music track only."""
if self._is_standby or self._program is None:
return None
return self._program.music_album
@property
def media_series_title(self):
"""Return the title of current episode of TV show."""
if self._is_standby or self._program is None:
return None
return self._program.episode_title
@property
def media_channel(self):
"""Return the channel current playing media."""
if self._is_standby or self._program is None:
return None
return f"{self._program.channel_name} ({self._program.channel})"
@property
def source(self):
"""Name of the current input source."""
if self._is_standby or self._program is None:
return None
return self._program.channel
@property
def supported_features(self):
"""Flag media player features that are supported."""
return SUPPORT_DTV_CLIENT if self._is_client else SUPPORT_DTV
@property
def media_currently_recording(self):
"""If the media is currently being recorded or not."""
if self._is_standby or self._program is None:
return None
return self._program.recording
@property
def media_rating(self):
"""TV Rating of the current playing media."""
if self._is_standby or self._program is None:
return None
return self._program.rating
@property
def media_recorded(self):
"""If the media was recorded or live."""
if self._is_standby:
return None
return self._is_recorded
@property
def media_start_time(self):
"""Start time the program aired."""
if self._is_standby or self._program is None:
return None
return dt_util.as_local(self._program.start_time)
async def async_turn_on(self):
"""Turn on the receiver."""
if self._is_client:
raise NotImplementedError()
_LOGGER.debug("Turn on %s", self._name)
await self.dtv.remote("poweron", self._address)
async def async_turn_off(self):
"""Turn off the receiver."""
if self._is_client:
raise NotImplementedError()
_LOGGER.debug("Turn off %s", self._name)
await self.dtv.remote("poweroff", self._address)
async def async_media_play(self):
"""Send play command."""
_LOGGER.debug("Play on %s", self._name)
await self.dtv.remote("play", self._address)
async def async_media_pause(self):
"""Send pause command."""
_LOGGER.debug("Pause on %s", self._name)
await self.dtv.remote("pause", self._address)
async def async_media_stop(self):
"""Send stop command."""
_LOGGER.debug("Stop on %s", self._name)
await self.dtv.remote("stop", self._address)
async def async_media_previous_track(self):
"""Send rewind command."""
_LOGGER.debug("Rewind on %s", self._name)
await self.dtv.remote("rew", self._address)
async def async_media_next_track(self):
"""Send fast forward command."""
_LOGGER.debug("Fast forward on %s", self._name)
await self.dtv.remote("ffwd", self._address)
async def async_play_media(self, media_type, media_id, **kwargs):
"""Select input source."""
if media_type != MEDIA_TYPE_CHANNEL:
_LOGGER.error(
"Invalid media type %s. Only %s is supported",
media_type,
MEDIA_TYPE_CHANNEL,
)
return
_LOGGER.debug("Changing channel on %s to %s", self._name, media_id)
await self.dtv.tune(media_id, self._address)
|
from datetime import timedelta
import logging
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_SMOKE,
BinarySensorEntity,
)
from homeassistant.const import ATTR_BATTERY_LEVEL, STATE_OFF, STATE_ON
from . import DOMAIN as TAHOMA_DOMAIN, TahomaDevice
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(seconds=120)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up Tahoma controller devices."""
if discovery_info is None:
return
_LOGGER.debug("Setup Tahoma Binary sensor platform")
controller = hass.data[TAHOMA_DOMAIN]["controller"]
devices = []
for device in hass.data[TAHOMA_DOMAIN]["devices"]["smoke"]:
devices.append(TahomaBinarySensor(device, controller))
add_entities(devices, True)
class TahomaBinarySensor(TahomaDevice, BinarySensorEntity):
"""Representation of a Tahoma Binary Sensor."""
def __init__(self, tahoma_device, controller):
"""Initialize the sensor."""
super().__init__(tahoma_device, controller)
self._state = None
self._icon = None
self._battery = None
self._available = False
@property
def is_on(self):
"""Return the state of the sensor."""
return bool(self._state == STATE_ON)
@property
def device_class(self):
"""Return the class of the device."""
if self.tahoma_device.type == "rtds:RTDSSmokeSensor":
return DEVICE_CLASS_SMOKE
return None
@property
def icon(self):
"""Icon for device by its type."""
return self._icon
@property
def device_state_attributes(self):
"""Return the device state attributes."""
attr = {}
super_attr = super().device_state_attributes
if super_attr is not None:
attr.update(super_attr)
if self._battery is not None:
attr[ATTR_BATTERY_LEVEL] = self._battery
return attr
@property
def available(self):
"""Return True if entity is available."""
return self._available
def update(self):
"""Update the state."""
self.controller.get_states([self.tahoma_device])
if self.tahoma_device.type == "rtds:RTDSSmokeSensor":
if self.tahoma_device.active_states["core:SmokeState"] == "notDetected":
self._state = STATE_OFF
else:
self._state = STATE_ON
if "core:SensorDefectState" in self.tahoma_device.active_states:
# 'lowBattery' for low battery warning. 'dead' for not available.
self._battery = self.tahoma_device.active_states["core:SensorDefectState"]
self._available = bool(self._battery != "dead")
else:
self._battery = None
self._available = True
if self._state == STATE_ON:
self._icon = "mdi:fire"
elif self._battery == "lowBattery":
self._icon = "mdi:battery-alert"
else:
self._icon = None
_LOGGER.debug("Update %s, state: %s", self._name, self._state)
|
import logging
import unittest
from collections import namedtuple
from gensim.topic_coherence import direct_confirmation_measure
from gensim.topic_coherence import text_analysis
class TestDirectConfirmationMeasure(unittest.TestCase):
def setUp(self):
# Set up toy example for better understanding and testing
# of this module. See the modules for the mathematical formulas
self.segmentation = [[(1, 2)]]
self.posting_list = {1: {2, 3, 4}, 2: {3, 5}}
self.num_docs = 5
id2token = {1: 'test', 2: 'doc'}
token2id = {v: k for k, v in id2token.items()}
dictionary = namedtuple('Dictionary', 'token2id, id2token')(token2id, id2token)
self.accumulator = text_analysis.InvertedIndexAccumulator({1, 2}, dictionary)
self.accumulator._inverted_index = {0: {2, 3, 4}, 1: {3, 5}}
self.accumulator._num_docs = self.num_docs
def testLogConditionalProbability(self):
"""Test log_conditional_probability()"""
obtained = direct_confirmation_measure.log_conditional_probability(
self.segmentation, self.accumulator)[0]
# Answer should be ~ ln(1 / 2) = -0.693147181
expected = -0.693147181
self.assertAlmostEqual(expected, obtained)
mean, std = direct_confirmation_measure.log_conditional_probability(
self.segmentation, self.accumulator, with_std=True)[0]
self.assertAlmostEqual(expected, mean)
self.assertEqual(0.0, std)
def testLogRatioMeasure(self):
"""Test log_ratio_measure()"""
obtained = direct_confirmation_measure.log_ratio_measure(
self.segmentation, self.accumulator)[0]
# Answer should be ~ ln{(1 / 5) / [(3 / 5) * (2 / 5)]} = -0.182321557
expected = -0.182321557
self.assertAlmostEqual(expected, obtained)
mean, std = direct_confirmation_measure.log_ratio_measure(
self.segmentation, self.accumulator, with_std=True)[0]
self.assertAlmostEqual(expected, mean)
self.assertEqual(0.0, std)
def testNormalizedLogRatioMeasure(self):
"""Test normalized_log_ratio_measure()"""
obtained = direct_confirmation_measure.log_ratio_measure(
self.segmentation, self.accumulator, normalize=True)[0]
# Answer should be ~ -0.182321557 / -ln(1 / 5) = -0.113282753
expected = -0.113282753
self.assertAlmostEqual(expected, obtained)
mean, std = direct_confirmation_measure.log_ratio_measure(
self.segmentation, self.accumulator, normalize=True, with_std=True)[0]
self.assertAlmostEqual(expected, mean)
self.assertEqual(0.0, std)
if __name__ == '__main__':
logging.root.setLevel(logging.WARNING)
unittest.main()
|
import pytest
from jinja2 import DictLoader
from jinja2 import Environment
from jinja2 import TemplateRuntimeError
LAYOUTTEMPLATE = """\
|{% block block1 %}block 1 from layout{% endblock %}
|{% block block2 %}block 2 from layout{% endblock %}
|{% block block3 %}
{% block block4 %}nested block 4 from layout{% endblock %}
{% endblock %}|"""
LEVEL1TEMPLATE = """\
{% extends "layout" %}
{% block block1 %}block 1 from level1{% endblock %}"""
LEVEL2TEMPLATE = """\
{% extends "level1" %}
{% block block2 %}{% block block5 %}nested block 5 from level2{%
endblock %}{% endblock %}"""
LEVEL3TEMPLATE = """\
{% extends "level2" %}
{% block block5 %}block 5 from level3{% endblock %}
{% block block4 %}block 4 from level3{% endblock %}
"""
LEVEL4TEMPLATE = """\
{% extends "level3" %}
{% block block3 %}block 3 from level4{% endblock %}
"""
WORKINGTEMPLATE = """\
{% extends "layout" %}
{% block block1 %}
{% if false %}
{% block block2 %}
this should workd
{% endblock %}
{% endif %}
{% endblock %}
"""
DOUBLEEXTENDS = """\
{% extends "layout" %}
{% extends "layout" %}
{% block block1 %}
{% if false %}
{% block block2 %}
this should workd
{% endblock %}
{% endif %}
{% endblock %}
"""
@pytest.fixture
def env():
return Environment(
loader=DictLoader(
{
"layout": LAYOUTTEMPLATE,
"level1": LEVEL1TEMPLATE,
"level2": LEVEL2TEMPLATE,
"level3": LEVEL3TEMPLATE,
"level4": LEVEL4TEMPLATE,
"working": WORKINGTEMPLATE,
"doublee": DOUBLEEXTENDS,
}
),
trim_blocks=True,
)
class TestInheritance:
def test_layout(self, env):
tmpl = env.get_template("layout")
assert tmpl.render() == (
"|block 1 from layout|block 2 from layout|nested block 4 from layout|"
)
def test_level1(self, env):
tmpl = env.get_template("level1")
assert tmpl.render() == (
"|block 1 from level1|block 2 from layout|nested block 4 from layout|"
)
def test_level2(self, env):
tmpl = env.get_template("level2")
assert tmpl.render() == (
"|block 1 from level1|nested block 5 from "
"level2|nested block 4 from layout|"
)
def test_level3(self, env):
tmpl = env.get_template("level3")
assert tmpl.render() == (
"|block 1 from level1|block 5 from level3|block 4 from level3|"
)
def test_level4(self, env):
tmpl = env.get_template("level4")
assert tmpl.render() == (
"|block 1 from level1|block 5 from level3|block 3 from level4|"
)
def test_super(self, env):
env = Environment(
loader=DictLoader(
{
"a": "{% block intro %}INTRO{% endblock %}|"
"BEFORE|{% block data %}INNER{% endblock %}|AFTER",
"b": '{% extends "a" %}{% block data %}({{ '
"super() }}){% endblock %}",
"c": '{% extends "b" %}{% block intro %}--{{ '
"super() }}--{% endblock %}\n{% block data "
"%}[{{ super() }}]{% endblock %}",
}
)
)
tmpl = env.get_template("c")
assert tmpl.render() == "--INTRO--|BEFORE|[(INNER)]|AFTER"
def test_working(self, env):
env.get_template("working")
def test_reuse_blocks(self, env):
tmpl = env.from_string(
"{{ self.foo() }}|{% block foo %}42{% endblock %}|{{ self.foo() }}"
)
assert tmpl.render() == "42|42|42"
def test_preserve_blocks(self, env):
env = Environment(
loader=DictLoader(
{
"a": "{% if false %}{% block x %}A{% endblock %}"
"{% endif %}{{ self.x() }}",
"b": '{% extends "a" %}{% block x %}B{{ super() }}{% endblock %}',
}
)
)
tmpl = env.get_template("b")
assert tmpl.render() == "BA"
def test_dynamic_inheritance(self, env):
env = Environment(
loader=DictLoader(
{
"master1": "MASTER1{% block x %}{% endblock %}",
"master2": "MASTER2{% block x %}{% endblock %}",
"child": "{% extends master %}{% block x %}CHILD{% endblock %}",
}
)
)
tmpl = env.get_template("child")
for m in range(1, 3):
assert tmpl.render(master=f"master{m}") == f"MASTER{m}CHILD"
def test_multi_inheritance(self, env):
env = Environment(
loader=DictLoader(
{
"master1": "MASTER1{% block x %}{% endblock %}",
"master2": "MASTER2{% block x %}{% endblock %}",
"child": """{% if master %}{% extends master %}{% else %}{% extends
'master1' %}{% endif %}{% block x %}CHILD{% endblock %}""",
}
)
)
tmpl = env.get_template("child")
assert tmpl.render(master="master2") == "MASTER2CHILD"
assert tmpl.render(master="master1") == "MASTER1CHILD"
assert tmpl.render() == "MASTER1CHILD"
def test_scoped_block(self, env):
env = Environment(
loader=DictLoader(
{
"master.html": "{% for item in seq %}[{% block item scoped %}"
"{% endblock %}]{% endfor %}"
}
)
)
t = env.from_string(
"{% extends 'master.html' %}{% block item %}{{ item }}{% endblock %}"
)
assert t.render(seq=list(range(5))) == "[0][1][2][3][4]"
def test_super_in_scoped_block(self, env):
env = Environment(
loader=DictLoader(
{
"master.html": "{% for item in seq %}[{% block item scoped %}"
"{{ item }}{% endblock %}]{% endfor %}"
}
)
)
t = env.from_string(
'{% extends "master.html" %}{% block item %}'
"{{ super() }}|{{ item * 2 }}{% endblock %}"
)
assert t.render(seq=list(range(5))) == "[0|0][1|2][2|4][3|6][4|8]"
def test_scoped_block_after_inheritance(self, env):
env = Environment(
loader=DictLoader(
{
"layout.html": """
{% block useless %}{% endblock %}
""",
"index.html": """
{%- extends 'layout.html' %}
{% from 'helpers.html' import foo with context %}
{% block useless %}
{% for x in [1, 2, 3] %}
{% block testing scoped %}
{{ foo(x) }}
{% endblock %}
{% endfor %}
{% endblock %}
""",
"helpers.html": """
{% macro foo(x) %}{{ the_foo + x }}{% endmacro %}
""",
}
)
)
rv = env.get_template("index.html").render(the_foo=42).split()
assert rv == ["43", "44", "45"]
class TestBugFix:
def test_fixed_macro_scoping_bug(self, env):
assert (
Environment(
loader=DictLoader(
{
"test.html": """\
{% extends 'details.html' %}
{% macro my_macro() %}
my_macro
{% endmacro %}
{% block inner_box %}
{{ my_macro() }}
{% endblock %}
""",
"details.html": """\
{% extends 'standard.html' %}
{% macro my_macro() %}
my_macro
{% endmacro %}
{% block content %}
{% block outer_box %}
outer_box
{% block inner_box %}
inner_box
{% endblock %}
{% endblock %}
{% endblock %}
""",
"standard.html": """
{% block content %} {% endblock %}
""",
}
)
)
.get_template("test.html")
.render()
.split()
== ["outer_box", "my_macro"]
)
def test_double_extends(self, env):
"""Ensures that a template with more than 1 {% extends ... %} usage
raises a ``TemplateError``.
"""
with pytest.raises(TemplateRuntimeError, match="extended multiple times"):
env.get_template("doublee").render()
|
import asyncio
import async_timeout
from sharkiqpy import (
AylaApi,
SharkIqAuthError,
SharkIqAuthExpiringError,
SharkIqNotAuthedError,
get_ayla_api,
)
from homeassistant import exceptions
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from .const import _LOGGER, API_TIMEOUT, COMPONENTS, DOMAIN
from .update_coordinator import SharkIqUpdateCoordinator
class CannotConnect(exceptions.HomeAssistantError):
"""Error to indicate we cannot connect."""
async def async_setup(hass, config):
"""Set up the sharkiq environment."""
hass.data.setdefault(DOMAIN, {})
return True
async def async_connect_or_timeout(ayla_api: AylaApi) -> bool:
"""Connect to vacuum."""
try:
with async_timeout.timeout(API_TIMEOUT):
_LOGGER.debug("Initialize connection to Ayla networks API")
await ayla_api.async_sign_in()
except SharkIqAuthError:
_LOGGER.error("Authentication error connecting to Shark IQ api")
return False
except asyncio.TimeoutError as exc:
_LOGGER.error("Timeout expired")
raise CannotConnect from exc
return True
async def async_setup_entry(hass, config_entry):
"""Initialize the sharkiq platform via config entry."""
ayla_api = get_ayla_api(
username=config_entry.data[CONF_USERNAME],
password=config_entry.data[CONF_PASSWORD],
websession=hass.helpers.aiohttp_client.async_get_clientsession(),
)
try:
if not await async_connect_or_timeout(ayla_api):
return False
except CannotConnect as exc:
raise exceptions.ConfigEntryNotReady from exc
shark_vacs = await ayla_api.async_get_devices(False)
device_names = ", ".join([d.name for d in shark_vacs])
_LOGGER.debug("Found %d Shark IQ device(s): %s", len(shark_vacs), device_names)
coordinator = SharkIqUpdateCoordinator(hass, config_entry, ayla_api, shark_vacs)
await coordinator.async_refresh()
if not coordinator.last_update_success:
raise exceptions.ConfigEntryNotReady
hass.data[DOMAIN][config_entry.entry_id] = coordinator
for component in COMPONENTS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, component)
)
return True
async def async_disconnect_or_timeout(coordinator: SharkIqUpdateCoordinator):
"""Disconnect to vacuum."""
_LOGGER.debug("Disconnecting from Ayla Api")
with async_timeout.timeout(5):
try:
await coordinator.ayla_api.async_sign_out()
except (SharkIqAuthError, SharkIqAuthExpiringError, SharkIqNotAuthedError):
pass
async def async_update_options(hass, config_entry):
"""Update options."""
await hass.config_entries.async_reload(config_entry.entry_id)
async def async_unload_entry(hass, config_entry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(config_entry, component)
for component in COMPONENTS
]
)
)
if unload_ok:
domain_data = hass.data[DOMAIN][config_entry.entry_id]
try:
await async_disconnect_or_timeout(coordinator=domain_data)
except SharkIqAuthError:
pass
hass.data[DOMAIN].pop(config_entry.entry_id)
return unload_ok
|
from __future__ import print_function
import json
import tempfile
from perfkitbenchmarker import resource
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.providers.aws import util
class LogGroup(resource.BaseResource):
"""Class representing a CloudWatch log group."""
def __init__(self, region, name, retention_in_days=7):
super(LogGroup, self).__init__()
self.region = region
self.name = name
self.retention_in_days = retention_in_days
def _Create(self):
"""Create the log group."""
create_cmd = util.AWS_PREFIX + [
'--region', self.region,
'logs', 'create-log-group',
'--log-group-name', self.name
]
vm_util.IssueCommand(create_cmd)
def _Delete(self):
"""Delete the log group."""
delete_cmd = util.AWS_PREFIX + [
'--region', self.region,
'logs', 'delete-log-group',
'--log-group-name', self.name
]
vm_util.IssueCommand(delete_cmd, raise_on_failure=False)
def Exists(self):
"""Returns True if the log group exists."""
describe_cmd = util.AWS_PREFIX + [
'--region', self.region,
'logs', 'describe-log-groups',
'--log-group-name-prefix', self.name,
'--no-paginate'
]
stdout, _, _ = vm_util.IssueCommand(describe_cmd)
log_groups = json.loads(stdout)['logGroups']
group = next((group for group in log_groups
if group['logGroupName'] == self.name), None)
return bool(group)
def _PostCreate(self):
"""Set the retention policy."""
put_cmd = util.AWS_PREFIX + [
'--region', self.region,
'logs', 'put-retention-policy',
'--log-group-name', self.name,
'--retention-in-days', str(self.retention_in_days)
]
vm_util.IssueCommand(put_cmd)
def GetLogs(region, stream_name, group_name, token=None):
"""Fetches the JSON formatted log stream starting at the token."""
get_cmd = util.AWS_PREFIX + [
'--region', region,
'logs', 'get-log-events',
'--start-from-head',
'--log-group-name', group_name,
'--log-stream-name', stream_name,
]
if token:
get_cmd.extend(['--next-token', token])
stdout, _, _ = vm_util.IssueCommand(get_cmd)
return json.loads(stdout)
def GetLogStreamAsString(region, stream_name, log_group):
"""Returns the messages of the log stream as a string."""
with tempfile.TemporaryFile() as tf:
token = None
events = []
while token is None or events:
response = GetLogs(region, stream_name, log_group, token)
events = response['events']
token = response['nextForwardToken']
for event in events:
print(event['message'], file=tf)
tf.seek(0)
return tf.read()
|
import datetime
import logging
import math
import voluptuous as vol
from homeassistant.components import history
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_ENTITY_ID,
CONF_NAME,
CONF_STATE,
CONF_TYPE,
EVENT_HOMEASSISTANT_START,
PERCENTAGE,
TIME_HOURS,
)
from homeassistant.core import CoreState, callback
from homeassistant.exceptions import TemplateError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import async_track_state_change_event
from homeassistant.helpers.reload import setup_reload_service
import homeassistant.util.dt as dt_util
from . import DOMAIN, PLATFORMS
_LOGGER = logging.getLogger(__name__)
CONF_START = "start"
CONF_END = "end"
CONF_DURATION = "duration"
CONF_PERIOD_KEYS = [CONF_START, CONF_END, CONF_DURATION]
CONF_TYPE_TIME = "time"
CONF_TYPE_RATIO = "ratio"
CONF_TYPE_COUNT = "count"
CONF_TYPE_KEYS = [CONF_TYPE_TIME, CONF_TYPE_RATIO, CONF_TYPE_COUNT]
DEFAULT_NAME = "unnamed statistics"
UNITS = {
CONF_TYPE_TIME: TIME_HOURS,
CONF_TYPE_RATIO: PERCENTAGE,
CONF_TYPE_COUNT: "",
}
ICON = "mdi:chart-line"
ATTR_VALUE = "value"
def exactly_two_period_keys(conf):
"""Ensure exactly 2 of CONF_PERIOD_KEYS are provided."""
if sum(param in conf for param in CONF_PERIOD_KEYS) != 2:
raise vol.Invalid(
"You must provide exactly 2 of the following: start, end, duration"
)
return conf
PLATFORM_SCHEMA = vol.All(
PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Required(CONF_STATE): cv.string,
vol.Optional(CONF_START): cv.template,
vol.Optional(CONF_END): cv.template,
vol.Optional(CONF_DURATION): cv.time_period,
vol.Optional(CONF_TYPE, default=CONF_TYPE_TIME): vol.In(CONF_TYPE_KEYS),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
),
exactly_two_period_keys,
)
# noinspection PyUnusedLocal
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the History Stats sensor."""
setup_reload_service(hass, DOMAIN, PLATFORMS)
entity_id = config.get(CONF_ENTITY_ID)
entity_state = config.get(CONF_STATE)
start = config.get(CONF_START)
end = config.get(CONF_END)
duration = config.get(CONF_DURATION)
sensor_type = config.get(CONF_TYPE)
name = config.get(CONF_NAME)
for template in [start, end]:
if template is not None:
template.hass = hass
add_entities(
[
HistoryStatsSensor(
hass, entity_id, entity_state, start, end, duration, sensor_type, name
)
]
)
return True
class HistoryStatsSensor(Entity):
"""Representation of a HistoryStats sensor."""
def __init__(
self, hass, entity_id, entity_state, start, end, duration, sensor_type, name
):
"""Initialize the HistoryStats sensor."""
self._entity_id = entity_id
self._entity_state = entity_state
self._duration = duration
self._start = start
self._end = end
self._type = sensor_type
self._name = name
self._unit_of_measurement = UNITS[sensor_type]
self._period = (datetime.datetime.now(), datetime.datetime.now())
self.value = None
self.count = None
async def async_added_to_hass(self):
"""Create listeners when the entity is added."""
@callback
def start_refresh(*args):
"""Register state tracking."""
@callback
def force_refresh(*args):
"""Force the component to refresh."""
self.async_schedule_update_ha_state(True)
force_refresh()
self.async_on_remove(
async_track_state_change_event(
self.hass, [self._entity_id], force_refresh
)
)
if self.hass.state == CoreState.running:
start_refresh()
return
# Delay first refresh to keep startup fast
self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, start_refresh)
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
if self.value is None or self.count is None:
return None
if self._type == CONF_TYPE_TIME:
return round(self.value, 2)
if self._type == CONF_TYPE_RATIO:
return HistoryStatsHelper.pretty_ratio(self.value, self._period)
if self._type == CONF_TYPE_COUNT:
return self.count
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
if self.value is None:
return {}
hsh = HistoryStatsHelper
return {ATTR_VALUE: hsh.pretty_duration(self.value)}
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return ICON
def update(self):
"""Get the latest data and updates the states."""
# Get previous values of start and end
p_start, p_end = self._period
# Parse templates
self.update_period()
start, end = self._period
# Convert times to UTC
start = dt_util.as_utc(start)
end = dt_util.as_utc(end)
p_start = dt_util.as_utc(p_start)
p_end = dt_util.as_utc(p_end)
now = datetime.datetime.now()
# Compute integer timestamps
start_timestamp = math.floor(dt_util.as_timestamp(start))
end_timestamp = math.floor(dt_util.as_timestamp(end))
p_start_timestamp = math.floor(dt_util.as_timestamp(p_start))
p_end_timestamp = math.floor(dt_util.as_timestamp(p_end))
now_timestamp = math.floor(dt_util.as_timestamp(now))
# If period has not changed and current time after the period end...
if (
start_timestamp == p_start_timestamp
and end_timestamp == p_end_timestamp
and end_timestamp <= now_timestamp
):
# Don't compute anything as the value cannot have changed
return
# Get history between start and end
history_list = history.state_changes_during_period(
self.hass, start, end, str(self._entity_id)
)
if self._entity_id not in history_list:
return
# Get the first state
last_state = history.get_state(self.hass, start, self._entity_id)
last_state = last_state is not None and last_state == self._entity_state
last_time = start_timestamp
elapsed = 0
count = 0
# Make calculations
for item in history_list.get(self._entity_id):
current_state = item.state == self._entity_state
current_time = item.last_changed.timestamp()
if last_state:
elapsed += current_time - last_time
if current_state and not last_state:
count += 1
last_state = current_state
last_time = current_time
# Count time elapsed between last history state and end of measure
if last_state:
measure_end = min(end_timestamp, now_timestamp)
elapsed += measure_end - last_time
# Save value in hours
self.value = elapsed / 3600
# Save counter
self.count = count
def update_period(self):
"""Parse the templates and store a datetime tuple in _period."""
start = None
end = None
# Parse start
if self._start is not None:
try:
start_rendered = self._start.render()
except (TemplateError, TypeError) as ex:
HistoryStatsHelper.handle_template_exception(ex, "start")
return
if isinstance(start_rendered, str):
start = dt_util.parse_datetime(start_rendered)
if start is None:
try:
start = dt_util.as_local(
dt_util.utc_from_timestamp(math.floor(float(start_rendered)))
)
except ValueError:
_LOGGER.error(
"Parsing error: start must be a datetime or a timestamp"
)
return
# Parse end
if self._end is not None:
try:
end_rendered = self._end.render()
except (TemplateError, TypeError) as ex:
HistoryStatsHelper.handle_template_exception(ex, "end")
return
if isinstance(end_rendered, str):
end = dt_util.parse_datetime(end_rendered)
if end is None:
try:
end = dt_util.as_local(
dt_util.utc_from_timestamp(math.floor(float(end_rendered)))
)
except ValueError:
_LOGGER.error(
"Parsing error: end must be a datetime or a timestamp"
)
return
# Calculate start or end using the duration
if start is None:
start = end - self._duration
if end is None:
end = start + self._duration
if start > dt_util.now():
# History hasn't been written yet for this period
return
if dt_util.now() < end:
# No point in making stats of the future
end = dt_util.now()
self._period = start, end
class HistoryStatsHelper:
"""Static methods to make the HistoryStatsSensor code lighter."""
@staticmethod
def pretty_duration(hours):
"""Format a duration in days, hours, minutes, seconds."""
seconds = int(3600 * hours)
days, seconds = divmod(seconds, 86400)
hours, seconds = divmod(seconds, 3600)
minutes, seconds = divmod(seconds, 60)
if days > 0:
return "%dd %dh %dm" % (days, hours, minutes)
if hours > 0:
return "%dh %dm" % (hours, minutes)
return "%dm" % minutes
@staticmethod
def pretty_ratio(value, period):
"""Format the ratio of value / period duration."""
if len(period) != 2 or period[0] == period[1]:
return 0.0
ratio = 100 * 3600 * value / (period[1] - period[0]).total_seconds()
return round(ratio, 1)
@staticmethod
def handle_template_exception(ex, field):
"""Log an error nicely if the template cannot be interpreted."""
if ex.args and ex.args[0].startswith("UndefinedError: 'None' has no attribute"):
# Common during HA startup - so just a warning
_LOGGER.warning(ex)
return
_LOGGER.error("Error parsing template for field %s", field)
_LOGGER.error(ex)
|
from pyowm import OWM
from pyowm.exceptions.api_call_error import APICallError
from pyowm.exceptions.api_response_error import UnauthorizedError
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import (
CONF_API_KEY,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_MODE,
CONF_NAME,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from .const import (
CONF_LANGUAGE,
DEFAULT_FORECAST_MODE,
DEFAULT_LANGUAGE,
DEFAULT_NAME,
FORECAST_MODES,
LANGUAGES,
)
from .const import DOMAIN # pylint:disable=unused-import
SCHEMA = vol.Schema(
{
vol.Required(CONF_API_KEY): str,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): str,
vol.Optional(CONF_LATITUDE): cv.latitude,
vol.Optional(CONF_LONGITUDE): cv.longitude,
vol.Optional(CONF_MODE, default=DEFAULT_FORECAST_MODE): vol.In(FORECAST_MODES),
vol.Optional(CONF_LANGUAGE, default=DEFAULT_LANGUAGE): vol.In(LANGUAGES),
}
)
class OpenWeatherMapConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Config flow for OpenWeatherMap."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
@staticmethod
@callback
def async_get_options_flow(config_entry):
"""Get the options flow for this handler."""
return OpenWeatherMapOptionsFlow(config_entry)
async def async_step_user(self, user_input=None):
"""Handle a flow initialized by the user."""
errors = {}
if user_input is not None:
latitude = user_input[CONF_LATITUDE]
longitude = user_input[CONF_LONGITUDE]
await self.async_set_unique_id(f"{latitude}-{longitude}")
self._abort_if_unique_id_configured()
try:
api_online = await _is_owm_api_online(
self.hass, user_input[CONF_API_KEY]
)
if not api_online:
errors["base"] = "invalid_api_key"
except UnauthorizedError:
errors["base"] = "invalid_api_key"
except APICallError:
errors["base"] = "cannot_connect"
if not errors:
return self.async_create_entry(
title=user_input[CONF_NAME], data=user_input
)
return self.async_show_form(step_id="user", data_schema=SCHEMA, errors=errors)
async def async_step_import(self, import_input=None):
"""Set the config entry up from yaml."""
config = import_input.copy()
if CONF_NAME not in config:
config[CONF_NAME] = DEFAULT_NAME
if CONF_LATITUDE not in config:
config[CONF_LATITUDE] = self.hass.config.latitude
if CONF_LONGITUDE not in config:
config[CONF_LONGITUDE] = self.hass.config.longitude
if CONF_MODE not in config:
config[CONF_MODE] = DEFAULT_FORECAST_MODE
if CONF_LANGUAGE not in config:
config[CONF_LANGUAGE] = DEFAULT_LANGUAGE
return await self.async_step_user(config)
class OpenWeatherMapOptionsFlow(config_entries.OptionsFlow):
"""Handle options."""
def __init__(self, config_entry):
"""Initialize options flow."""
self.config_entry = config_entry
async def async_step_init(self, user_input=None):
"""Manage the options."""
if user_input is not None:
return self.async_create_entry(title="", data=user_input)
return self.async_show_form(
step_id="init",
data_schema=self._get_options_schema(),
)
def _get_options_schema(self):
return vol.Schema(
{
vol.Optional(
CONF_MODE,
default=self.config_entry.options.get(
CONF_MODE, DEFAULT_FORECAST_MODE
),
): vol.In(FORECAST_MODES),
vol.Optional(
CONF_LANGUAGE,
default=self.config_entry.options.get(
CONF_LANGUAGE, DEFAULT_LANGUAGE
),
): vol.In(LANGUAGES),
}
)
async def _is_owm_api_online(hass, api_key):
owm = OWM(api_key)
return await hass.async_add_executor_job(owm.is_API_online)
|
import logging
from typing import Any, Dict, List
from surepy import (
SurePetcare,
SurePetcareAuthenticationError,
SurePetcareError,
SureProductID,
)
import voluptuous as vol
from homeassistant.const import (
CONF_ID,
CONF_PASSWORD,
CONF_SCAN_INTERVAL,
CONF_TYPE,
CONF_USERNAME,
)
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.event import async_track_time_interval
from .const import (
CONF_FEEDERS,
CONF_FLAPS,
CONF_PARENT,
CONF_PETS,
CONF_PRODUCT_ID,
DATA_SURE_PETCARE,
DEFAULT_SCAN_INTERVAL,
DOMAIN,
SPC,
SURE_API_TIMEOUT,
TOPIC_UPDATE,
)
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_FEEDERS, default=[]): vol.All(
cv.ensure_list, [cv.positive_int]
),
vol.Optional(CONF_FLAPS, default=[]): vol.All(
cv.ensure_list, [cv.positive_int]
),
vol.Optional(CONF_PETS): vol.All(cv.ensure_list, [cv.positive_int]),
vol.Optional(
CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL
): cv.time_period,
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config) -> bool:
"""Initialize the Sure Petcare component."""
conf = config[DOMAIN]
# update interval
scan_interval = conf[CONF_SCAN_INTERVAL]
# shared data
hass.data[DOMAIN] = hass.data[DATA_SURE_PETCARE] = {}
# sure petcare api connection
try:
surepy = SurePetcare(
conf[CONF_USERNAME],
conf[CONF_PASSWORD],
hass.loop,
async_get_clientsession(hass),
api_timeout=SURE_API_TIMEOUT,
)
await surepy.get_data()
except SurePetcareAuthenticationError:
_LOGGER.error("Unable to connect to surepetcare.io: Wrong credentials!")
return False
except SurePetcareError as error:
_LOGGER.error("Unable to connect to surepetcare.io: Wrong %s!", error)
return False
# add feeders
things = [
{CONF_ID: feeder, CONF_TYPE: SureProductID.FEEDER}
for feeder in conf[CONF_FEEDERS]
]
# add flaps (don't differentiate between CAT and PET for now)
things.extend(
[
{CONF_ID: flap, CONF_TYPE: SureProductID.PET_FLAP}
for flap in conf[CONF_FLAPS]
]
)
# discover hubs the flaps/feeders are connected to
hub_ids = set()
for device in things.copy():
device_data = await surepy.device(device[CONF_ID])
if (
CONF_PARENT in device_data
and device_data[CONF_PARENT][CONF_PRODUCT_ID] == SureProductID.HUB
and device_data[CONF_PARENT][CONF_ID] not in hub_ids
):
things.append(
{
CONF_ID: device_data[CONF_PARENT][CONF_ID],
CONF_TYPE: SureProductID.HUB,
}
)
hub_ids.add(device_data[CONF_PARENT][CONF_ID])
# add pets
things.extend(
[{CONF_ID: pet, CONF_TYPE: SureProductID.PET} for pet in conf[CONF_PETS]]
)
_LOGGER.debug("Devices and Pets to setup: %s", things)
spc = hass.data[DATA_SURE_PETCARE][SPC] = SurePetcareAPI(hass, surepy, things)
# initial update
await spc.async_update()
async_track_time_interval(hass, spc.async_update, scan_interval)
# load platforms
hass.async_create_task(
hass.helpers.discovery.async_load_platform("binary_sensor", DOMAIN, {}, config)
)
hass.async_create_task(
hass.helpers.discovery.async_load_platform("sensor", DOMAIN, {}, config)
)
return True
class SurePetcareAPI:
"""Define a generic Sure Petcare object."""
def __init__(self, hass, surepy: SurePetcare, ids: List[Dict[str, Any]]) -> None:
"""Initialize the Sure Petcare object."""
self.hass = hass
self.surepy = surepy
self.ids = ids
self.states: Dict[str, Any] = {}
async def async_update(self, arg: Any = None) -> None:
"""Refresh Sure Petcare data."""
await self.surepy.get_data()
for thing in self.ids:
sure_id = thing[CONF_ID]
sure_type = thing[CONF_TYPE]
try:
type_state = self.states.setdefault(sure_type, {})
if sure_type in [
SureProductID.CAT_FLAP,
SureProductID.PET_FLAP,
SureProductID.FEEDER,
SureProductID.HUB,
]:
type_state[sure_id] = await self.surepy.device(sure_id)
elif sure_type == SureProductID.PET:
type_state[sure_id] = await self.surepy.pet(sure_id)
except SurePetcareError as error:
_LOGGER.error("Unable to retrieve data from surepetcare.io: %s", error)
async_dispatcher_send(self.hass, TOPIC_UPDATE)
|
import asyncio
from agent import AgentError
from agent.a import Agent
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import CONNECTION, DOMAIN as AGENT_DOMAIN, SERVER_URL
ATTRIBUTION = "ispyconnect.com"
DEFAULT_BRAND = "Agent DVR by ispyconnect.com"
FORWARDS = ["alarm_control_panel", "camera"]
async def async_setup(hass, config):
"""Old way to set up integrations."""
return True
async def async_setup_entry(hass, config_entry):
"""Set up the Agent component."""
hass.data.setdefault(AGENT_DOMAIN, {})
server_origin = config_entry.data[SERVER_URL]
agent_client = Agent(server_origin, async_get_clientsession(hass))
try:
await agent_client.update()
except AgentError as err:
await agent_client.close()
raise ConfigEntryNotReady from err
if not agent_client.is_available:
raise ConfigEntryNotReady
await agent_client.get_devices()
hass.data[AGENT_DOMAIN][config_entry.entry_id] = {CONNECTION: agent_client}
device_registry = await dr.async_get_registry(hass)
device_registry.async_get_or_create(
config_entry_id=config_entry.entry_id,
identifiers={(AGENT_DOMAIN, agent_client.unique)},
manufacturer="iSpyConnect",
name=f"Agent {agent_client.name}",
model="Agent DVR",
sw_version=agent_client.version,
)
for forward in FORWARDS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, forward)
)
return True
async def async_unload_entry(hass, config_entry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(config_entry, forward)
for forward in FORWARDS
]
)
)
await hass.data[AGENT_DOMAIN][config_entry.entry_id][CONNECTION].close()
if unload_ok:
hass.data[AGENT_DOMAIN].pop(config_entry.entry_id)
return unload_ok
|
import numpy as np
import tensorflow as tf
from tensornetwork.backends.tensorflow import tensordot2
import pytest
_MAXDIM = 5
class TensordotTest(tf.compat.v1.test.TestCase):
def test_invalid_shape(self):
a = [[1, 2], [3, 4]]
b = [[1, 2], [3, 4], [5, 6]]
a_axes = [1]
b_axes = [0]
# Invalid static shapes.
with self.assertRaises(tf.errors.InvalidArgumentError):
tensordot2.tensordot(tf, a, b, (a_axes, b_axes))
# Invalid dynamic shapes.
# pylint: disable=not-context-manager
with tf.compat.v1.Graph().as_default():
with self.cached_session() as sess:
with self.assertRaisesRegexp(tf.errors.InvalidArgumentError,
"Matrix size-incompatible"):
a_ph = tf.compat.v1.placeholder(tf.float32)
b_ph = tf.compat.v1.placeholder(tf.float32)
axes_ph = tf.compat.v1.placeholder(tf.int32)
output = tensordot2.tensordot(tf, a_ph, b_ph, axes_ph)
_ = sess.run([output],
feed_dict={
a_ph: a,
b_ph: b,
axes_ph: (a_axes, b_axes)
})
def test_invalid_axes(self):
# pylint: disable=not-context-manager
with tf.compat.v1.Graph().as_default():
a = [[1, 2], [3, 4]]
b = [[1, 2], [3, 4]]
# Invalid static axes.
for axes_value in -1, 3, [1], [[1]], [[1], [0, 1]]:
with self.assertRaises(ValueError):
tensordot2.tensordot(tf, a, b, axes_value)
with self.assertRaises(IndexError):
tensordot2.tensordot(tf, a, b, [[0], [7]])
# Invalid dynamic axes.
a_ph = tf.compat.v1.placeholder(tf.float32)
b_ph = tf.compat.v1.placeholder(tf.float32)
axes_ph = tf.compat.v1.placeholder(tf.int32)
output = tensordot2.tensordot(tf, a_ph, b_ph, axes_ph)
# Note: We don't support scalar Tensor values for axes.
for axes_value in 1, [1], [0, 1], [[1]], [[0, 1]], [[0], [7]]:
with self.cached_session() as sess:
with self.assertRaises(tf.errors.InvalidArgumentError):
_ = sess.run([output],
feed_dict={
a_ph: a,
b_ph: b,
axes_ph: axes_value
})
# Test case for 11950
def test_valid_axis(self):
for axes_value in [1, 2], [[1], [2]], [[], []], 0:
with self.cached_session():
np_a = np.ones((3, 3))
np_b = np.array([2, 3, 1])[None, None]
np_ans = np.tensordot(np_a, np_b, axes_value)
tf_a = tf.ones((3, 3), dtype=tf.float32)
tf_b = tf.constant([2, 3, 1], dtype=tf.float32)[None, None]
tf_ans = tensordot2.tensordot(tf, tf_a, tf_b, axes_value)
self.assertAllEqual(tf_ans.shape, np_ans.shape)
self.assertAllEqual(tf_ans, np_ans)
def test_partial_shape_inference(self):
# pylint: disable=not-context-manager
with tf.compat.v1.Graph().as_default():
for axes in ([1], [0]), 1:
a = tf.compat.v1.placeholder(tf.float32)
b = tf.compat.v1.placeholder(tf.float32)
output = tensordot2.tensordot(tf, a, b, axes)
self.assertEqual(output.get_shape().ndims, None)
a.set_shape([None, 2])
b.set_shape([2, 3])
output = tensordot2.tensordot(tf, a, b, axes)
output_shape = output.get_shape()
self.assertEqual(output_shape.ndims, 2)
output_shape = output_shape.as_list()
self.assertEqual(output_shape[0], None)
self.assertEqual(output_shape[1], 3)
a = tf.compat.v1.placeholder(tf.float32)
b = tf.compat.v1.placeholder(tf.float32)
a.set_shape([2, 2])
b.set_shape([2, None])
output = tensordot2.tensordot(tf, a, b, axes)
output_shape = output.get_shape()
self.assertEqual(output_shape.ndims, 2)
output_shape = output_shape.as_list()
self.assertEqual(output_shape[0], 2)
self.assertEqual(output_shape[1], None)
# Select a random subset of size m from [0, 1, ..., n-1].
def _random_subset(m, n):
assert m <= n
return (np.random.permutation(n)[:m]).astype(np.int32)
def _generate_random_tensors_and_dims(dtype_, rank_a_, rank_b_, num_dims_):
a_shape = np.random.randint(1, _MAXDIM + 1, rank_a_)
b_shape = np.random.randint(1, _MAXDIM + 1, rank_b_)
shared_shape = np.random.randint(1, _MAXDIM + 1, num_dims_)
a_dims = _random_subset(num_dims_, rank_a_)
b_dims = _random_subset(num_dims_, rank_b_)
for i in range(num_dims_):
a_shape[a_dims[i]] = shared_shape[i]
b_shape[b_dims[i]] = shared_shape[i]
a = np.random.uniform(
low=-1.0, high=1.0, size=np.prod(a_shape)).reshape(a_shape).astype(dtype_)
b = np.random.uniform(
low=-1.0, high=1.0, size=np.prod(b_shape)).reshape(b_shape).astype(dtype_)
return a, b, a_dims, b_dims
@pytest.mark.parametrize("dtype_", [np.float32, np.complex64])
@pytest.mark.parametrize("rank_a_", [1, 2, 3])
@pytest.mark.parametrize("rank_b_", [1, 2, 3])
@pytest.mark.parametrize("num_dims_", [1, 2, 3])
def test_tensordot_scalar_axes(dtype_, rank_a_, rank_b_, num_dims_):
if not num_dims_ <= min(rank_a_, rank_b_):
pytest.skip("Not a test")
if dtype_ == np.float16:
tol = 0.05
elif dtype_ in (np.float32, np.complex64):
tol = 1e-5
else:
tol = 1e-12
shape = [5] * num_dims_
a_np = np.random.uniform(
low=-1.0, high=1.0, size=np.prod(shape)).reshape(shape).astype(dtype_)
b_np = np.random.uniform(
low=-1.0, high=1.0, size=np.prod(shape)).reshape(shape).astype(dtype_)
all_axes = [0, 1]
if a_np.ndim > 2:
all_axes.append(a_np.ndim - 1)
for axes in all_axes:
np_ans = np.tensordot(a_np, b_np, axes=axes)
tf_ans = tensordot2.tensordot(tf, a_np, b_np, axes=axes)
np.testing.assert_allclose(tf_ans, np_ans, rtol=tol, atol=tol)
assert tf_ans.shape == np_ans.shape
@pytest.mark.parametrize("dtype_", [np.float32, np.complex64])
@pytest.mark.parametrize("rank_a_", [1, 2, 3])
@pytest.mark.parametrize("rank_b_", [1, 2, 3])
@pytest.mark.parametrize("num_dims_", [0, 1, 2, 3])
def test_tensordot(dtype_, rank_a_, rank_b_, num_dims_):
if not num_dims_ <= min(rank_a_, rank_b_):
pytest.skip("Not a test")
num_trials = min(30, num_dims_ * num_dims_)
if dtype_ == np.float16:
tol = 0.05
elif dtype_ in (np.float32, np.complex64):
tol = 1e-5
else:
tol = 1e-12
for _ in range(num_trials):
a_np, b_np, a_dims_np, b_dims_np = _generate_random_tensors_and_dims(
dtype_, rank_a_, rank_b_, num_dims_)
np_ans = np.tensordot(a_np, b_np, axes=(a_dims_np, b_dims_np))
tf_ans = tensordot2.tensordot(tf, a_np, b_np, (a_dims_np, b_dims_np))
np.testing.assert_allclose(tf_ans, np_ans, rtol=tol, atol=tol)
assert tf_ans.shape == np_ans.shape
|
import itertools
from typing import cast, overload, Iterable, Iterator, List, Mapping, Optional, Union
import attr
from PyQt5.QtCore import Qt, QEvent
from PyQt5.QtGui import QKeySequence, QKeyEvent
from qutebrowser.utils import utils
# Map Qt::Key values to their Qt::KeyboardModifier value.
_MODIFIER_MAP = {
Qt.Key_Shift: Qt.ShiftModifier,
Qt.Key_Control: Qt.ControlModifier,
Qt.Key_Alt: Qt.AltModifier,
Qt.Key_Meta: Qt.MetaModifier,
Qt.Key_AltGr: Qt.GroupSwitchModifier,
Qt.Key_Mode_switch: Qt.GroupSwitchModifier,
}
_NIL_KEY = Qt.Key(0)
_ModifierType = Union[Qt.KeyboardModifier, Qt.KeyboardModifiers]
_SPECIAL_NAMES = {
# Some keys handled in a weird way by QKeySequence::toString.
# See https://bugreports.qt.io/browse/QTBUG-40030
# Most are unlikely to be ever needed, but you never know ;)
# For dead/combining keys, we return the corresponding non-combining
# key, as that's easier to add to the config.
Qt.Key_Super_L: 'Super L',
Qt.Key_Super_R: 'Super R',
Qt.Key_Hyper_L: 'Hyper L',
Qt.Key_Hyper_R: 'Hyper R',
Qt.Key_Direction_L: 'Direction L',
Qt.Key_Direction_R: 'Direction R',
Qt.Key_Shift: 'Shift',
Qt.Key_Control: 'Control',
Qt.Key_Meta: 'Meta',
Qt.Key_Alt: 'Alt',
Qt.Key_AltGr: 'AltGr',
Qt.Key_Multi_key: 'Multi key',
Qt.Key_SingleCandidate: 'Single Candidate',
Qt.Key_Mode_switch: 'Mode switch',
Qt.Key_Dead_Grave: '`',
Qt.Key_Dead_Acute: '´',
Qt.Key_Dead_Circumflex: '^',
Qt.Key_Dead_Tilde: '~',
Qt.Key_Dead_Macron: '¯',
Qt.Key_Dead_Breve: '˘',
Qt.Key_Dead_Abovedot: '˙',
Qt.Key_Dead_Diaeresis: '¨',
Qt.Key_Dead_Abovering: '˚',
Qt.Key_Dead_Doubleacute: '˝',
Qt.Key_Dead_Caron: 'ˇ',
Qt.Key_Dead_Cedilla: '¸',
Qt.Key_Dead_Ogonek: '˛',
Qt.Key_Dead_Iota: 'Iota',
Qt.Key_Dead_Voiced_Sound: 'Voiced Sound',
Qt.Key_Dead_Semivoiced_Sound: 'Semivoiced Sound',
Qt.Key_Dead_Belowdot: 'Belowdot',
Qt.Key_Dead_Hook: 'Hook',
Qt.Key_Dead_Horn: 'Horn',
Qt.Key_Dead_Stroke: '\u0335', # '̵'
Qt.Key_Dead_Abovecomma: '\u0313', # '̓'
Qt.Key_Dead_Abovereversedcomma: '\u0314', # '̔'
Qt.Key_Dead_Doublegrave: '\u030f', # '̏'
Qt.Key_Dead_Belowring: '\u0325', # '̥'
Qt.Key_Dead_Belowmacron: '\u0331', # '̱'
Qt.Key_Dead_Belowcircumflex: '\u032d', # '̭'
Qt.Key_Dead_Belowtilde: '\u0330', # '̰'
Qt.Key_Dead_Belowbreve: '\u032e', # '̮'
Qt.Key_Dead_Belowdiaeresis: '\u0324', # '̤'
Qt.Key_Dead_Invertedbreve: '\u0311', # '̑'
Qt.Key_Dead_Belowcomma: '\u0326', # '̦'
Qt.Key_Dead_Currency: '¤',
Qt.Key_Dead_a: 'a',
Qt.Key_Dead_A: 'A',
Qt.Key_Dead_e: 'e',
Qt.Key_Dead_E: 'E',
Qt.Key_Dead_i: 'i',
Qt.Key_Dead_I: 'I',
Qt.Key_Dead_o: 'o',
Qt.Key_Dead_O: 'O',
Qt.Key_Dead_u: 'u',
Qt.Key_Dead_U: 'U',
Qt.Key_Dead_Small_Schwa: 'ə',
Qt.Key_Dead_Capital_Schwa: 'Ə',
Qt.Key_Dead_Greek: 'Greek',
Qt.Key_Dead_Lowline: '\u0332', # '̲'
Qt.Key_Dead_Aboveverticalline: '\u030d', # '̍'
Qt.Key_Dead_Belowverticalline: '\u0329',
Qt.Key_Dead_Longsolidusoverlay: '\u0338', # '̸'
Qt.Key_Memo: 'Memo',
Qt.Key_ToDoList: 'To Do List',
Qt.Key_Calendar: 'Calendar',
Qt.Key_ContrastAdjust: 'Contrast Adjust',
Qt.Key_LaunchG: 'Launch (G)',
Qt.Key_LaunchH: 'Launch (H)',
Qt.Key_MediaLast: 'Media Last',
Qt.Key_unknown: 'Unknown',
# For some keys, we just want a different name
Qt.Key_Escape: 'Escape',
_NIL_KEY: 'nil',
}
def _assert_plain_key(key: Qt.Key) -> None:
"""Make sure this is a key without KeyboardModifiers mixed in."""
assert not key & Qt.KeyboardModifierMask, hex(key)
def _assert_plain_modifier(key: _ModifierType) -> None:
"""Make sure this is a modifier without a key mixed in."""
mask = Qt.KeyboardModifierMask
assert not key & ~mask, hex(key) # type: ignore[operator]
def _is_printable(key: Qt.Key) -> bool:
_assert_plain_key(key)
return key <= 0xff and key not in [Qt.Key_Space, _NIL_KEY]
def is_special(key: Qt.Key, modifiers: _ModifierType) -> bool:
"""Check whether this key requires special key syntax."""
_assert_plain_key(key)
_assert_plain_modifier(modifiers)
return not (_is_printable(key) and
modifiers in [Qt.ShiftModifier, Qt.NoModifier])
def is_modifier_key(key: Qt.Key) -> bool:
"""Test whether the given key is a modifier.
This only considers keys which are part of Qt::KeyboardModifiers, i.e.
which would interrupt a key chain like "yY" when handled.
"""
_assert_plain_key(key)
return key in _MODIFIER_MAP
def _is_surrogate(key: Qt.Key) -> bool:
"""Check if a codepoint is a UTF-16 surrogate.
UTF-16 surrogates are a reserved range of Unicode from 0xd800
to 0xd8ff, used to encode Unicode codepoints above the BMP
(Base Multilingual Plane).
"""
_assert_plain_key(key)
return 0xd800 <= key <= 0xdfff
def _remap_unicode(key: Qt.Key, text: str) -> Qt.Key:
"""Work around QtKeyEvent's bad values for high codepoints.
QKeyEvent handles higher unicode codepoints poorly. It uses UTF-16 to
handle key events, and for higher codepoints that require UTF-16 surrogates
(e.g. emoji and some CJK characters), it sets the keycode to just the upper
half of the surrogate, which renders it useless, and breaks UTF-8 encoding,
causing crashes. So we detect this case, and reassign the key code to be
the full Unicode codepoint, which we can recover from the text() property,
which has the full character.
This is a WORKAROUND for https://bugreports.qt.io/browse/QTBUG-72776.
"""
_assert_plain_key(key)
if _is_surrogate(key):
if len(text) != 1:
raise KeyParseError(text, "Expected 1 character for surrogate, "
"but got {}!".format(len(text)))
return Qt.Key(ord(text[0]))
return key
def _check_valid_utf8(s: str, data: Union[Qt.Key, _ModifierType]) -> None:
"""Make sure the given string is valid UTF-8.
Makes sure there are no chars where Qt did fall back to weird UTF-16
surrogates.
"""
try:
s.encode('utf-8')
except UnicodeEncodeError as e: # pragma: no cover
raise ValueError("Invalid encoding in 0x{:x} -> {}: {}"
.format(int(data), s, e))
def _key_to_string(key: Qt.Key) -> str:
"""Convert a Qt::Key member to a meaningful name.
Args:
key: A Qt::Key member.
Return:
A name of the key as a string.
"""
_assert_plain_key(key)
if key in _SPECIAL_NAMES:
return _SPECIAL_NAMES[key]
result = QKeySequence(key).toString()
_check_valid_utf8(result, key)
return result
def _modifiers_to_string(modifiers: _ModifierType) -> str:
"""Convert the given Qt::KeyboardModifiers to a string.
Handles Qt.GroupSwitchModifier because Qt doesn't handle that as a
modifier.
"""
_assert_plain_modifier(modifiers)
altgr = Qt.GroupSwitchModifier
if modifiers & altgr: # type: ignore[operator]
modifiers &= ~altgr # type: ignore[operator, assignment]
result = 'AltGr+'
else:
result = ''
result += QKeySequence(modifiers).toString()
_check_valid_utf8(result, modifiers)
return result
class KeyParseError(Exception):
"""Raised by _parse_single_key/parse_keystring on parse errors."""
def __init__(self, keystr: Optional[str], error: str) -> None:
if keystr is None:
msg = "Could not parse keystring: {}".format(error)
else:
msg = "Could not parse {!r}: {}".format(keystr, error)
super().__init__(msg)
def _parse_keystring(keystr: str) -> Iterator[str]:
key = ''
special = False
for c in keystr:
if c == '>':
if special:
yield _parse_special_key(key)
key = ''
special = False
else:
yield '>'
assert not key, key
elif c == '<':
special = True
elif special:
key += c
else:
yield _parse_single_key(c)
if special:
yield '<'
for c in key:
yield _parse_single_key(c)
def _parse_special_key(keystr: str) -> str:
"""Normalize a keystring like Ctrl-Q to a keystring like Ctrl+Q.
Args:
keystr: The key combination as a string.
Return:
The normalized keystring.
"""
keystr = keystr.lower()
replacements = (
('control', 'ctrl'),
('windows', 'meta'),
('mod4', 'meta'),
('command', 'meta'),
('cmd', 'meta'),
('mod1', 'alt'),
('less', '<'),
('greater', '>'),
)
for (orig, repl) in replacements:
keystr = keystr.replace(orig, repl)
for mod in ['ctrl', 'meta', 'alt', 'shift', 'num']:
keystr = keystr.replace(mod + '-', mod + '+')
return keystr
def _parse_single_key(keystr: str) -> str:
"""Get a keystring for QKeySequence for a single key."""
return 'Shift+' + keystr if keystr.isupper() else keystr
@attr.s(frozen=True)
class KeyInfo:
"""A key with optional modifiers.
Attributes:
key: A Qt::Key member.
modifiers: A Qt::KeyboardModifiers enum value.
"""
key: Qt.Key = attr.ib()
modifiers: _ModifierType = attr.ib()
@classmethod
def from_event(cls, e: QKeyEvent) -> 'KeyInfo':
"""Get a KeyInfo object from a QKeyEvent.
This makes sure that key/modifiers are never mixed and also remaps
UTF-16 surrogates to work around QTBUG-72776.
"""
key = _remap_unicode(Qt.Key(e.key()), e.text())
modifiers = e.modifiers()
_assert_plain_key(key)
_assert_plain_modifier(modifiers)
return cls(key, cast(Qt.KeyboardModifier, modifiers))
def __str__(self) -> str:
"""Convert this KeyInfo to a meaningful name.
Return:
A name of the key (combination) as a string.
"""
key_string = _key_to_string(self.key)
modifiers = int(self.modifiers)
if self.key in _MODIFIER_MAP:
# Don't return e.g. <Shift+Shift>
modifiers &= ~_MODIFIER_MAP[self.key]
elif _is_printable(self.key):
# "normal" binding
if not key_string: # pragma: no cover
raise ValueError("Got empty string for key 0x{:x}!"
.format(self.key))
assert len(key_string) == 1, key_string
if self.modifiers == Qt.ShiftModifier:
assert not is_special(self.key, self.modifiers)
return key_string.upper()
elif self.modifiers == Qt.NoModifier:
assert not is_special(self.key, self.modifiers)
return key_string.lower()
else:
# Use special binding syntax, but <Ctrl-a> instead of <Ctrl-A>
key_string = key_string.lower()
modifiers = Qt.KeyboardModifier(modifiers)
# "special" binding
assert is_special(self.key, self.modifiers)
modifier_string = _modifiers_to_string(modifiers)
return '<{}{}>'.format(modifier_string, key_string)
def text(self) -> str:
"""Get the text which would be displayed when pressing this key."""
control = {
Qt.Key_Space: ' ',
Qt.Key_Tab: '\t',
Qt.Key_Backspace: '\b',
Qt.Key_Return: '\r',
Qt.Key_Enter: '\r',
Qt.Key_Escape: '\x1b',
}
if self.key in control:
return control[self.key]
elif not _is_printable(self.key):
return ''
text = QKeySequence(self.key).toString()
if not self.modifiers & Qt.ShiftModifier: # type: ignore[operator]
text = text.lower()
return text
def to_event(self, typ: QEvent.Type = QEvent.KeyPress) -> QKeyEvent:
"""Get a QKeyEvent from this KeyInfo."""
return QKeyEvent(typ, self.key, self.modifiers, self.text())
def to_int(self) -> int:
"""Get the key as an integer (with key/modifiers)."""
return int(self.key) | int(self.modifiers)
class KeySequence:
"""A sequence of key presses.
This internally uses chained QKeySequence objects and exposes a nicer
interface over it.
NOTE: While private members of this class are in theory mutable, they must
not be mutated in order to ensure consistent hashing.
Attributes:
_sequences: A list of QKeySequence
Class attributes:
_MAX_LEN: The maximum amount of keys in a QKeySequence.
"""
_MAX_LEN = 4
def __init__(self, *keys: int) -> None:
self._sequences: List[QKeySequence] = []
for sub in utils.chunk(keys, self._MAX_LEN):
args = [self._convert_key(key) for key in sub]
sequence = QKeySequence(*args)
self._sequences.append(sequence)
if keys:
assert self
self._validate()
def _convert_key(self, key: Qt.Key) -> int:
"""Convert a single key for QKeySequence."""
assert isinstance(key, (int, Qt.KeyboardModifiers)), key
return int(key)
def __str__(self) -> str:
parts = []
for info in self:
parts.append(str(info))
return ''.join(parts)
def __iter__(self) -> Iterator[KeyInfo]:
"""Iterate over KeyInfo objects."""
for key_and_modifiers in self._iter_keys():
key = Qt.Key(int(key_and_modifiers) & ~Qt.KeyboardModifierMask)
modifiers = Qt.KeyboardModifiers( # type: ignore[call-overload]
int(key_and_modifiers) & Qt.KeyboardModifierMask)
yield KeyInfo(key=key, modifiers=modifiers)
def __repr__(self) -> str:
return utils.get_repr(self, keys=str(self))
def __lt__(self, other: 'KeySequence') -> bool:
return self._sequences < other._sequences
def __gt__(self, other: 'KeySequence') -> bool:
return self._sequences > other._sequences
def __le__(self, other: 'KeySequence') -> bool:
return self._sequences <= other._sequences
def __ge__(self, other: 'KeySequence') -> bool:
return self._sequences >= other._sequences
def __eq__(self, other: object) -> bool:
if not isinstance(other, KeySequence):
return NotImplemented
return self._sequences == other._sequences
def __ne__(self, other: object) -> bool:
if not isinstance(other, KeySequence):
return NotImplemented
return self._sequences != other._sequences
def __hash__(self) -> int:
return hash(tuple(self._sequences))
def __len__(self) -> int:
return sum(len(seq) for seq in self._sequences)
def __bool__(self) -> bool:
return bool(self._sequences)
@overload
def __getitem__(self, item: int) -> KeyInfo:
...
@overload
def __getitem__(self, item: slice) -> 'KeySequence':
...
def __getitem__(self, item: Union[int, slice]) -> Union[KeyInfo, 'KeySequence']:
if isinstance(item, slice):
keys = list(self._iter_keys())
return self.__class__(*keys[item])
else:
infos = list(self)
return infos[item]
def _iter_keys(self) -> Iterator[int]:
sequences = cast(Iterable[Iterable[int]], self._sequences)
return itertools.chain.from_iterable(sequences)
def _validate(self, keystr: str = None) -> None:
for info in self:
if info.key < Qt.Key_Space or info.key >= Qt.Key_unknown:
raise KeyParseError(keystr, "Got invalid key!")
for seq in self._sequences:
if not seq:
raise KeyParseError(keystr, "Got invalid key!")
def matches(self, other: 'KeySequence') -> QKeySequence.SequenceMatch:
"""Check whether the given KeySequence matches with this one.
We store multiple QKeySequences with <= 4 keys each, so we need to
match those pair-wise, and account for an unequal amount of sequences
as well.
"""
# pylint: disable=protected-access
if len(self._sequences) > len(other._sequences):
# If we entered more sequences than there are in the config,
# there's no way there can be a match.
return QKeySequence.NoMatch
for entered, configured in zip(self._sequences, other._sequences):
# If we get NoMatch/PartialMatch in a sequence, we can abort there.
match = entered.matches(configured)
if match != QKeySequence.ExactMatch:
return match
# We checked all common sequences and they had an ExactMatch.
#
# If there's still more sequences configured than entered, that's a
# PartialMatch, as more keypresses can still follow and new sequences
# will appear which we didn't check above.
#
# If there's the same amount of sequences configured and entered,
# that's an EqualMatch.
if len(self._sequences) == len(other._sequences):
return QKeySequence.ExactMatch
elif len(self._sequences) < len(other._sequences):
return QKeySequence.PartialMatch
else:
raise utils.Unreachable("self={!r} other={!r}".format(self, other))
def append_event(self, ev: QKeyEvent) -> 'KeySequence':
"""Create a new KeySequence object with the given QKeyEvent added."""
key = Qt.Key(ev.key())
_assert_plain_key(key)
_assert_plain_modifier(ev.modifiers())
key = _remap_unicode(key, ev.text())
modifiers = int(ev.modifiers())
if key == _NIL_KEY:
raise KeyParseError(None, "Got nil key!")
# We always remove Qt.GroupSwitchModifier because QKeySequence has no
# way to mention that in a binding anyways...
modifiers &= ~Qt.GroupSwitchModifier
# We change Qt.Key_Backtab to Key_Tab here because nobody would
# configure "Shift-Backtab" in their config.
if modifiers & Qt.ShiftModifier and key == Qt.Key_Backtab:
key = Qt.Key_Tab
# We don't care about a shift modifier with symbols (Shift-: should
# match a : binding even though we typed it with a shift on an
# US-keyboard)
#
# However, we *do* care about Shift being involved if we got an
# upper-case letter, as Shift-A should match a Shift-A binding, but not
# an "a" binding.
#
# In addition, Shift also *is* relevant when other modifiers are
# involved. Shift-Ctrl-X should not be equivalent to Ctrl-X.
if (modifiers == Qt.ShiftModifier and
_is_printable(key) and
not ev.text().isupper()):
modifiers = Qt.KeyboardModifiers() # type: ignore[assignment]
keys = list(self._iter_keys())
keys.append(key | int(modifiers))
return self.__class__(*keys)
def strip_modifiers(self) -> 'KeySequence':
"""Strip optional modifiers from keys."""
modifiers = Qt.KeypadModifier
keys = [key & ~modifiers for key in self._iter_keys()]
return self.__class__(*keys)
def with_mappings(
self,
mappings: Mapping['KeySequence', 'KeySequence']
) -> 'KeySequence':
"""Get a new KeySequence with the given mappings applied."""
keys = []
for key in self._iter_keys():
key_seq = KeySequence(key)
if key_seq in mappings:
new_seq = mappings[key_seq]
assert len(new_seq) == 1
key = new_seq[0].to_int()
keys.append(key)
return self.__class__(*keys)
@classmethod
def parse(cls, keystr: str) -> 'KeySequence':
"""Parse a keystring like <Ctrl-x> or xyz and return a KeySequence."""
# pylint: disable=protected-access
new = cls()
strings = list(_parse_keystring(keystr))
for sub in utils.chunk(strings, cls._MAX_LEN):
sequence = QKeySequence(', '.join(sub))
new._sequences.append(sequence)
if keystr:
assert new, keystr
# pylint: disable=protected-access
new._validate(keystr)
return new
|
from flexx import flx
class Example(flx.HFix):
def init(self):
with flx.VBox():
self.b1 = flx.Button(text='apple')
self.b2 = flx.Button(text='banana')
self.b3 = flx.Button(text='pear')
self.buttonlabel= flx.Label(text='...')
with flx.VBox():
self.r1 = flx.RadioButton(text='apple')
self.r2 = flx.RadioButton(text='banana')
self.r3 = flx.RadioButton(text='pear')
self.radiolabel = flx.Label(text='...')
with flx.VBox():
self.c1 = flx.ToggleButton(text='apple')
self.c2 = flx.ToggleButton(text='banana')
self.c3 = flx.ToggleButton(text='pear')
self.checklabel = flx.Label(text='...')
@flx.reaction('b1.pointer_click', 'b2.pointer_click', 'b3.pointer_click')
def _button_clicked(self, *events):
ev = events[-1]
self.buttonlabel.set_text('Clicked on the ' + ev.source.text)
@flx.reaction('r1.checked', 'r2.checked', 'r3.checked')
def _radio_changed(self, *events):
# There will also be events for radio buttons being unchecked, but
# Flexx ensures that the last event is for the one being checked
ev = events[-1]
self.radiolabel.set_text('Selected the ' + ev.source.text)
@flx.reaction('c1.checked', 'c2.checked', 'c3.checked')
def _check_changed(self, *events):
selected = [c.text for c in (self.c1, self.c2, self.c3) if c.checked]
if selected:
self.checklabel.set_text('Selected: ' + ', '.join(selected))
else:
self.checklabel.set_text('None selected')
if __name__ == '__main__':
m = flx.launch(Example)
flx.run()
|
from marshmallow import fields, validates_schema, post_load
from marshmallow.exceptions import ValidationError
from lemur.common import utils, validators
from lemur.authorities.schemas import AuthorityNestedOutputSchema
from lemur.certificates.schemas import CertificateNestedOutputSchema
from lemur.common.schema import LemurInputSchema, LemurOutputSchema
from lemur.destinations.schemas import DestinationNestedOutputSchema
from lemur.domains.schemas import DomainNestedOutputSchema
from lemur.notifications import service as notification_service
from lemur.notifications.schemas import NotificationNestedOutputSchema
from lemur.policies.schemas import RotationPolicyNestedOutputSchema
from lemur.roles.schemas import RoleNestedOutputSchema
from lemur.schemas import (
AssociatedCertificateSchema,
AssociatedDestinationSchema,
AssociatedNotificationSchema,
AssociatedRoleSchema,
EndpointNestedOutputSchema,
ExtensionSchema,
)
from lemur.users.schemas import UserNestedOutputSchema
class PendingCertificateSchema(LemurInputSchema):
owner = fields.Email(required=True)
description = fields.String(missing="", allow_none=True)
class PendingCertificateOutputSchema(LemurOutputSchema):
id = fields.Integer()
external_id = fields.String()
csr = fields.String()
chain = fields.String()
deleted = fields.Boolean(default=False)
description = fields.String()
issuer = fields.String()
name = fields.String()
number_attempts = fields.Integer()
date_created = fields.Date()
last_updated = fields.Date()
resolved = fields.Boolean(required=False)
resolved_cert_id = fields.Integer(required=False)
rotation = fields.Boolean()
# Note aliasing is the first step in deprecating these fields.
notify = fields.Boolean()
active = fields.Boolean(attribute="notify")
cn = fields.String()
common_name = fields.String(attribute="cn")
owner = fields.Email()
status = fields.String()
user = fields.Nested(UserNestedOutputSchema)
extensions = fields.Nested(ExtensionSchema)
# associated objects
domains = fields.Nested(DomainNestedOutputSchema, many=True)
destinations = fields.Nested(DestinationNestedOutputSchema, many=True)
notifications = fields.Nested(NotificationNestedOutputSchema, many=True)
replaces = fields.Nested(CertificateNestedOutputSchema, many=True)
authority = fields.Nested(AuthorityNestedOutputSchema)
roles = fields.Nested(RoleNestedOutputSchema, many=True)
endpoints = fields.Nested(EndpointNestedOutputSchema, many=True, missing=[])
replaced_by = fields.Nested(
CertificateNestedOutputSchema, many=True, attribute="replaced"
)
rotation_policy = fields.Nested(RotationPolicyNestedOutputSchema)
class PendingCertificateEditInputSchema(PendingCertificateSchema):
owner = fields.String()
notify = fields.Boolean()
rotation = fields.Boolean()
destinations = fields.Nested(AssociatedDestinationSchema, missing=[], many=True)
notifications = fields.Nested(AssociatedNotificationSchema, missing=[], many=True)
replaces = fields.Nested(AssociatedCertificateSchema, missing=[], many=True)
roles = fields.Nested(AssociatedRoleSchema, missing=[], many=True)
@post_load
def enforce_notifications(self, data):
"""
Ensures that when an owner changes, default notifications are added for the new owner.
Old owner notifications are retained unless explicitly removed.
:param data:
:return:
"""
if data["owner"]:
notification_name = "DEFAULT_{0}".format(
data["owner"].split("@")[0].upper()
)
data[
"notifications"
] += notification_service.create_default_expiration_notifications(
notification_name, [data["owner"]]
)
return data
class PendingCertificateCancelSchema(LemurInputSchema):
note = fields.String()
class PendingCertificateUploadInputSchema(LemurInputSchema):
external_id = fields.String(missing=None, allow_none=True)
body = fields.String(required=True)
chain = fields.String(missing=None, allow_none=True)
@validates_schema
def validate_cert_chain(self, data):
cert = None
if data.get("body"):
try:
cert = utils.parse_certificate(data["body"])
except ValueError:
raise ValidationError(
"Public certificate presented is not valid.", field_names=["body"]
)
if data.get("chain"):
try:
chain = utils.parse_cert_chain(data["chain"])
except ValueError:
raise ValidationError(
"Invalid certificate in certificate chain.", field_names=["chain"]
)
# Throws ValidationError
validators.verify_cert_chain([cert] + chain)
pending_certificate_output_schema = PendingCertificateOutputSchema()
pending_certificate_edit_input_schema = PendingCertificateEditInputSchema()
pending_certificate_cancel_schema = PendingCertificateCancelSchema()
pending_certificate_upload_input_schema = PendingCertificateUploadInputSchema()
|
import importlib.util
import os
import sys
import weakref
import zipimport
from collections import abc
from hashlib import sha1
from importlib import import_module
from types import ModuleType
from .exceptions import TemplateNotFound
from .utils import internalcode
from .utils import open_if_exists
def split_template_path(template):
"""Split a path into segments and perform a sanity check. If it detects
'..' in the path it will raise a `TemplateNotFound` error.
"""
pieces = []
for piece in template.split("/"):
if (
os.path.sep in piece
or (os.path.altsep and os.path.altsep in piece)
or piece == os.path.pardir
):
raise TemplateNotFound(template)
elif piece and piece != ".":
pieces.append(piece)
return pieces
class BaseLoader:
"""Baseclass for all loaders. Subclass this and override `get_source` to
implement a custom loading mechanism. The environment provides a
`get_template` method that calls the loader's `load` method to get the
:class:`Template` object.
A very basic example for a loader that looks up templates on the file
system could look like this::
from jinja2 import BaseLoader, TemplateNotFound
from os.path import join, exists, getmtime
class MyLoader(BaseLoader):
def __init__(self, path):
self.path = path
def get_source(self, environment, template):
path = join(self.path, template)
if not exists(path):
raise TemplateNotFound(template)
mtime = getmtime(path)
with open(path) as f:
source = f.read()
return source, path, lambda: mtime == getmtime(path)
"""
#: if set to `False` it indicates that the loader cannot provide access
#: to the source of templates.
#:
#: .. versionadded:: 2.4
has_source_access = True
def get_source(self, environment, template):
"""Get the template source, filename and reload helper for a template.
It's passed the environment and template name and has to return a
tuple in the form ``(source, filename, uptodate)`` or raise a
`TemplateNotFound` error if it can't locate the template.
The source part of the returned tuple must be the source of the
template as a string. The filename should be the name of the
file on the filesystem if it was loaded from there, otherwise
``None``. The filename is used by Python for the tracebacks
if no loader extension is used.
The last item in the tuple is the `uptodate` function. If auto
reloading is enabled it's always called to check if the template
changed. No arguments are passed so the function must store the
old state somewhere (for example in a closure). If it returns `False`
the template will be reloaded.
"""
if not self.has_source_access:
raise RuntimeError(
f"{self.__class__.__name__} cannot provide access to the source"
)
raise TemplateNotFound(template)
def list_templates(self):
"""Iterates over all templates. If the loader does not support that
it should raise a :exc:`TypeError` which is the default behavior.
"""
raise TypeError("this loader cannot iterate over all templates")
@internalcode
def load(self, environment, name, globals=None):
"""Loads a template. This method looks up the template in the cache
or loads one by calling :meth:`get_source`. Subclasses should not
override this method as loaders working on collections of other
loaders (such as :class:`PrefixLoader` or :class:`ChoiceLoader`)
will not call this method but `get_source` directly.
"""
code = None
if globals is None:
globals = {}
# first we try to get the source for this template together
# with the filename and the uptodate function.
source, filename, uptodate = self.get_source(environment, name)
# try to load the code from the bytecode cache if there is a
# bytecode cache configured.
bcc = environment.bytecode_cache
if bcc is not None:
bucket = bcc.get_bucket(environment, name, filename, source)
code = bucket.code
# if we don't have code so far (not cached, no longer up to
# date) etc. we compile the template
if code is None:
code = environment.compile(source, name, filename)
# if the bytecode cache is available and the bucket doesn't
# have a code so far, we give the bucket the new code and put
# it back to the bytecode cache.
if bcc is not None and bucket.code is None:
bucket.code = code
bcc.set_bucket(bucket)
return environment.template_class.from_code(
environment, code, globals, uptodate
)
class FileSystemLoader(BaseLoader):
"""Load templates from a directory in the file system.
The path can be relative or absolute. Relative paths are relative to
the current working directory.
.. code-block:: python
loader = FileSystemLoader("templates")
A list of paths can be given. The directories will be searched in
order, stopping at the first matching template.
.. code-block:: python
loader = FileSystemLoader(["/override/templates", "/default/templates"])
:param searchpath: A path, or list of paths, to the directory that
contains the templates.
:param encoding: Use this encoding to read the text from template
files.
:param followlinks: Follow symbolic links in the path.
.. versionchanged:: 2.8
Added the ``followlinks`` parameter.
"""
def __init__(self, searchpath, encoding="utf-8", followlinks=False):
if not isinstance(searchpath, abc.Iterable) or isinstance(searchpath, str):
searchpath = [searchpath]
self.searchpath = list(searchpath)
self.encoding = encoding
self.followlinks = followlinks
def get_source(self, environment, template):
pieces = split_template_path(template)
for searchpath in self.searchpath:
filename = os.path.join(searchpath, *pieces)
f = open_if_exists(filename)
if f is None:
continue
try:
contents = f.read().decode(self.encoding)
finally:
f.close()
mtime = os.path.getmtime(filename)
def uptodate():
try:
return os.path.getmtime(filename) == mtime
except OSError:
return False
return contents, filename, uptodate
raise TemplateNotFound(template)
def list_templates(self):
found = set()
for searchpath in self.searchpath:
walk_dir = os.walk(searchpath, followlinks=self.followlinks)
for dirpath, _, filenames in walk_dir:
for filename in filenames:
template = (
os.path.join(dirpath, filename)[len(searchpath) :]
.strip(os.path.sep)
.replace(os.path.sep, "/")
)
if template[:2] == "./":
template = template[2:]
if template not in found:
found.add(template)
return sorted(found)
class PackageLoader(BaseLoader):
"""Load templates from a directory in a Python package.
:param package_name: Import name of the package that contains the
template directory.
:param package_path: Directory within the imported package that
contains the templates.
:param encoding: Encoding of template files.
The following example looks up templates in the ``pages`` directory
within the ``project.ui`` package.
.. code-block:: python
loader = PackageLoader("project.ui", "pages")
Only packages installed as directories (standard pip behavior) or
zip/egg files (less common) are supported. The Python API for
introspecting data in packages is too limited to support other
installation methods the way this loader requires.
There is limited support for :pep:`420` namespace packages. The
template directory is assumed to only be in one namespace
contributor. Zip files contributing to a namespace are not
supported.
.. versionchanged:: 3.0
No longer uses ``setuptools`` as a dependency.
.. versionchanged:: 3.0
Limited PEP 420 namespace package support.
"""
def __init__(self, package_name, package_path="templates", encoding="utf-8"):
if package_path == os.path.curdir:
package_path = ""
elif package_path[:2] == os.path.curdir + os.path.sep:
package_path = package_path[2:]
package_path = os.path.normpath(package_path).rstrip(os.path.sep)
self.package_path = package_path
self.package_name = package_name
self.encoding = encoding
# Make sure the package exists. This also makes namespace
# packages work, otherwise get_loader returns None.
import_module(package_name)
spec = importlib.util.find_spec(package_name)
self._loader = loader = spec.loader
self._archive = None
self._template_root = None
if isinstance(loader, zipimport.zipimporter):
self._archive = loader.archive
pkgdir = next(iter(spec.submodule_search_locations))
self._template_root = os.path.join(pkgdir, package_path)
elif spec.submodule_search_locations:
# This will be one element for regular packages and multiple
# for namespace packages.
for root in spec.submodule_search_locations:
root = os.path.join(root, package_path)
if os.path.isdir(root):
self._template_root = root
break
if self._template_root is None:
raise ValueError(
f"The {package_name!r} package was not installed in a"
" way that PackageLoader understands."
)
def get_source(self, environment, template):
p = os.path.join(self._template_root, *split_template_path(template))
if self._archive is None:
# Package is a directory.
if not os.path.isfile(p):
raise TemplateNotFound(template)
with open(p, "rb") as f:
source = f.read()
mtime = os.path.getmtime(p)
def up_to_date():
return os.path.isfile(p) and os.path.getmtime(p) == mtime
else:
# Package is a zip file.
try:
source = self._loader.get_data(p)
except OSError:
raise TemplateNotFound(template)
# Could use the zip's mtime for all template mtimes, but
# would need to safely reload the module if it's out of
# date, so just report it as always current.
up_to_date = None
return source.decode(self.encoding), p, up_to_date
def list_templates(self):
results = []
if self._archive is None:
# Package is a directory.
offset = len(self._template_root)
for dirpath, _, filenames in os.walk(self._template_root):
dirpath = dirpath[offset:].lstrip(os.path.sep)
results.extend(
os.path.join(dirpath, name).replace(os.path.sep, "/")
for name in filenames
)
else:
if not hasattr(self._loader, "_files"):
raise TypeError(
"This zip import does not have the required"
" metadata to list templates."
)
# Package is a zip file.
prefix = (
self._template_root[len(self._archive) :].lstrip(os.path.sep)
+ os.path.sep
)
offset = len(prefix)
for name in self._loader._files.keys():
# Find names under the templates directory that aren't directories.
if name.startswith(prefix) and name[-1] != os.path.sep:
results.append(name[offset:].replace(os.path.sep, "/"))
results.sort()
return results
class DictLoader(BaseLoader):
"""Loads a template from a Python dict mapping template names to
template source. This loader is useful for unittesting:
>>> loader = DictLoader({'index.html': 'source here'})
Because auto reloading is rarely useful this is disabled per default.
"""
def __init__(self, mapping):
self.mapping = mapping
def get_source(self, environment, template):
if template in self.mapping:
source = self.mapping[template]
return source, None, lambda: source == self.mapping.get(template)
raise TemplateNotFound(template)
def list_templates(self):
return sorted(self.mapping)
class FunctionLoader(BaseLoader):
"""A loader that is passed a function which does the loading. The
function receives the name of the template and has to return either
a string with the template source, a tuple in the form ``(source,
filename, uptodatefunc)`` or `None` if the template does not exist.
>>> def load_template(name):
... if name == 'index.html':
... return '...'
...
>>> loader = FunctionLoader(load_template)
The `uptodatefunc` is a function that is called if autoreload is enabled
and has to return `True` if the template is still up to date. For more
details have a look at :meth:`BaseLoader.get_source` which has the same
return value.
"""
def __init__(self, load_func):
self.load_func = load_func
def get_source(self, environment, template):
rv = self.load_func(template)
if rv is None:
raise TemplateNotFound(template)
elif isinstance(rv, str):
return rv, None, None
return rv
class PrefixLoader(BaseLoader):
"""A loader that is passed a dict of loaders where each loader is bound
to a prefix. The prefix is delimited from the template by a slash per
default, which can be changed by setting the `delimiter` argument to
something else::
loader = PrefixLoader({
'app1': PackageLoader('mypackage.app1'),
'app2': PackageLoader('mypackage.app2')
})
By loading ``'app1/index.html'`` the file from the app1 package is loaded,
by loading ``'app2/index.html'`` the file from the second.
"""
def __init__(self, mapping, delimiter="/"):
self.mapping = mapping
self.delimiter = delimiter
def get_loader(self, template):
try:
prefix, name = template.split(self.delimiter, 1)
loader = self.mapping[prefix]
except (ValueError, KeyError):
raise TemplateNotFound(template)
return loader, name
def get_source(self, environment, template):
loader, name = self.get_loader(template)
try:
return loader.get_source(environment, name)
except TemplateNotFound:
# re-raise the exception with the correct filename here.
# (the one that includes the prefix)
raise TemplateNotFound(template)
@internalcode
def load(self, environment, name, globals=None):
loader, local_name = self.get_loader(name)
try:
return loader.load(environment, local_name, globals)
except TemplateNotFound:
# re-raise the exception with the correct filename here.
# (the one that includes the prefix)
raise TemplateNotFound(name)
def list_templates(self):
result = []
for prefix, loader in self.mapping.items():
for template in loader.list_templates():
result.append(prefix + self.delimiter + template)
return result
class ChoiceLoader(BaseLoader):
"""This loader works like the `PrefixLoader` just that no prefix is
specified. If a template could not be found by one loader the next one
is tried.
>>> loader = ChoiceLoader([
... FileSystemLoader('/path/to/user/templates'),
... FileSystemLoader('/path/to/system/templates')
... ])
This is useful if you want to allow users to override builtin templates
from a different location.
"""
def __init__(self, loaders):
self.loaders = loaders
def get_source(self, environment, template):
for loader in self.loaders:
try:
return loader.get_source(environment, template)
except TemplateNotFound:
pass
raise TemplateNotFound(template)
@internalcode
def load(self, environment, name, globals=None):
for loader in self.loaders:
try:
return loader.load(environment, name, globals)
except TemplateNotFound:
pass
raise TemplateNotFound(name)
def list_templates(self):
found = set()
for loader in self.loaders:
found.update(loader.list_templates())
return sorted(found)
class _TemplateModule(ModuleType):
"""Like a normal module but with support for weak references"""
class ModuleLoader(BaseLoader):
"""This loader loads templates from precompiled templates.
Example usage:
>>> loader = ChoiceLoader([
... ModuleLoader('/path/to/compiled/templates'),
... FileSystemLoader('/path/to/templates')
... ])
Templates can be precompiled with :meth:`Environment.compile_templates`.
"""
has_source_access = False
def __init__(self, path):
package_name = f"_jinja2_module_templates_{id(self):x}"
# create a fake module that looks for the templates in the
# path given.
mod = _TemplateModule(package_name)
if not isinstance(path, abc.Iterable) or isinstance(path, str):
path = [path]
mod.__path__ = [os.fspath(p) for p in path]
sys.modules[package_name] = weakref.proxy(
mod, lambda x: sys.modules.pop(package_name, None)
)
# the only strong reference, the sys.modules entry is weak
# so that the garbage collector can remove it once the
# loader that created it goes out of business.
self.module = mod
self.package_name = package_name
@staticmethod
def get_template_key(name):
return "tmpl_" + sha1(name.encode("utf-8")).hexdigest()
@staticmethod
def get_module_filename(name):
return ModuleLoader.get_template_key(name) + ".py"
@internalcode
def load(self, environment, name, globals=None):
key = self.get_template_key(name)
module = f"{self.package_name}.{key}"
mod = getattr(self.module, module, None)
if mod is None:
try:
mod = __import__(module, None, None, ["root"])
except ImportError:
raise TemplateNotFound(name)
# remove the entry from sys.modules, we only want the attribute
# on the module object we have stored on the loader.
sys.modules.pop(module, None)
return environment.template_class.from_module_dict(
environment, mod.__dict__, globals
)
|
import asyncio
from contextlib import contextmanager
import json as _json
import re
from unittest import mock
from urllib.parse import parse_qs
from aiohttp import ClientSession
from aiohttp.client_exceptions import ClientError, ClientResponseError
from aiohttp.streams import StreamReader
from yarl import URL
from homeassistant.const import EVENT_HOMEASSISTANT_CLOSE
RETYPE = type(re.compile(""))
def mock_stream(data):
"""Mock a stream with data."""
protocol = mock.Mock(_reading_paused=False)
stream = StreamReader(protocol, limit=2 ** 16)
stream.feed_data(data)
stream.feed_eof()
return stream
class AiohttpClientMocker:
"""Mock Aiohttp client requests."""
def __init__(self):
"""Initialize the request mocker."""
self._mocks = []
self._cookies = {}
self.mock_calls = []
def request(
self,
method,
url,
*,
auth=None,
status=200,
text=None,
data=None,
content=None,
json=None,
params=None,
headers={},
exc=None,
cookies=None,
side_effect=None,
):
"""Mock a request."""
if not isinstance(url, RETYPE):
url = URL(url)
if params:
url = url.with_query(params)
self._mocks.append(
AiohttpClientMockResponse(
method=method,
url=url,
status=status,
response=content,
json=json,
text=text,
cookies=cookies,
exc=exc,
headers=headers,
side_effect=side_effect,
)
)
def get(self, *args, **kwargs):
"""Register a mock get request."""
self.request("get", *args, **kwargs)
def put(self, *args, **kwargs):
"""Register a mock put request."""
self.request("put", *args, **kwargs)
def post(self, *args, **kwargs):
"""Register a mock post request."""
self.request("post", *args, **kwargs)
def delete(self, *args, **kwargs):
"""Register a mock delete request."""
self.request("delete", *args, **kwargs)
def options(self, *args, **kwargs):
"""Register a mock options request."""
self.request("options", *args, **kwargs)
def patch(self, *args, **kwargs):
"""Register a mock patch request."""
self.request("patch", *args, **kwargs)
@property
def call_count(self):
"""Return the number of requests made."""
return len(self.mock_calls)
def clear_requests(self):
"""Reset mock calls."""
self._mocks.clear()
self._cookies.clear()
self.mock_calls.clear()
def create_session(self, loop):
"""Create a ClientSession that is bound to this mocker."""
session = ClientSession(loop=loop)
# Setting directly on `session` will raise deprecation warning
object.__setattr__(session, "_request", self.match_request)
return session
async def match_request(
self,
method,
url,
*,
data=None,
auth=None,
params=None,
headers=None,
allow_redirects=None,
timeout=None,
json=None,
cookies=None,
**kwargs,
):
"""Match a request against pre-registered requests."""
data = data or json
url = URL(url)
if params:
url = url.with_query(params)
for response in self._mocks:
if response.match_request(method, url, params):
self.mock_calls.append((method, url, data, headers))
if response.side_effect:
response = await response.side_effect(method, url, data)
if response.exc:
raise response.exc
return response
assert False, "No mock registered for {} {} {}".format(
method.upper(), url, params
)
class AiohttpClientMockResponse:
"""Mock Aiohttp client response."""
def __init__(
self,
method,
url,
status=200,
response=None,
json=None,
text=None,
cookies=None,
exc=None,
headers=None,
side_effect=None,
):
"""Initialize a fake response."""
if json is not None:
text = _json.dumps(json)
if text is not None:
response = text.encode("utf-8")
if response is None:
response = b""
self.method = method
self._url = url
self.status = status
self.response = response
self.exc = exc
self.side_effect = side_effect
self._headers = headers or {}
self._cookies = {}
if cookies:
for name, data in cookies.items():
cookie = mock.MagicMock()
cookie.value = data
self._cookies[name] = cookie
def match_request(self, method, url, params=None):
"""Test if response answers request."""
if method.lower() != self.method.lower():
return False
# regular expression matching
if isinstance(self._url, RETYPE):
return self._url.search(str(url)) is not None
if (
self._url.scheme != url.scheme
or self._url.host != url.host
or self._url.path != url.path
):
return False
# Ensure all query components in matcher are present in the request
request_qs = parse_qs(url.query_string)
matcher_qs = parse_qs(self._url.query_string)
for key, vals in matcher_qs.items():
for val in vals:
try:
request_qs.get(key, []).remove(val)
except ValueError:
return False
return True
@property
def headers(self):
"""Return content_type."""
return self._headers
@property
def cookies(self):
"""Return dict of cookies."""
return self._cookies
@property
def url(self):
"""Return yarl of URL."""
return self._url
@property
def content_type(self):
"""Return yarl of URL."""
return self._headers.get("content-type")
@property
def content(self):
"""Return content."""
return mock_stream(self.response)
async def read(self):
"""Return mock response."""
return self.response
async def text(self, encoding="utf-8"):
"""Return mock response as a string."""
return self.response.decode(encoding)
async def json(self, encoding="utf-8", content_type=None):
"""Return mock response as a json."""
return _json.loads(self.response.decode(encoding))
def release(self):
"""Mock release."""
def raise_for_status(self):
"""Raise error if status is 400 or higher."""
if self.status >= 400:
request_info = mock.Mock(real_url="http://example.com")
raise ClientResponseError(
request_info=request_info,
history=None,
code=self.status,
headers=self.headers,
)
def close(self):
"""Mock close."""
@contextmanager
def mock_aiohttp_client():
"""Context manager to mock aiohttp client."""
mocker = AiohttpClientMocker()
def create_session(hass, *args):
session = mocker.create_session(hass.loop)
async def close_session(event):
"""Close session."""
await session.close()
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_CLOSE, close_session)
return session
with mock.patch(
"homeassistant.helpers.aiohttp_client.async_create_clientsession",
side_effect=create_session,
):
yield mocker
class MockLongPollSideEffect:
"""Imitate a long_poll request.
It should be created and used as a side effect for a GET/PUT/etc. request.
Once created, actual responses are queued with queue_response
If queue is empty, will await until done.
"""
def __init__(self):
"""Initialize the queue."""
self.semaphore = asyncio.Semaphore(0)
self.response_list = []
self.stopping = False
async def __call__(self, method, url, data):
"""Fetch the next response from the queue or wait until the queue has items."""
if self.stopping:
raise ClientError()
await self.semaphore.acquire()
kwargs = self.response_list.pop(0)
return AiohttpClientMockResponse(method=method, url=url, **kwargs)
def queue_response(self, **kwargs):
"""Add a response to the long_poll queue."""
self.response_list.append(kwargs)
self.semaphore.release()
def stop(self):
"""Stop the current request and future ones.
This avoids an exception if there is someone waiting when exiting test.
"""
self.stopping = True
self.queue_response(exc=ClientError())
|
from cryptography import x509
from cryptography.hazmat.backends import default_backend
from marshmallow.exceptions import ValidationError
from cryptography.hazmat.primitives.asymmetric import rsa, ec
from lemur.common.utils import get_key_type_from_ec_curve
def get_sans_from_csr(data):
"""
Fetches SubjectAlternativeNames from CSR.
Works with any kind of SubjectAlternativeName
:param data: PEM-encoded string with CSR
:return: List of LemurAPI-compatible subAltNames
"""
sub_alt_names = []
try:
request = x509.load_pem_x509_csr(data.encode("utf-8"), default_backend())
except Exception:
raise ValidationError("CSR presented is not valid.")
try:
alt_names = request.extensions.get_extension_for_class(
x509.SubjectAlternativeName
)
for alt_name in alt_names.value:
sub_alt_names.append(
{"nameType": type(alt_name).__name__, "value": alt_name.value}
)
except x509.ExtensionNotFound:
pass
return sub_alt_names
def get_cn_from_csr(data):
"""
Fetches common name (CN) from CSR.
Works with any kind of SubjectAlternativeName
:param data: PEM-encoded string with CSR
:return: the common name
"""
try:
request = x509.load_pem_x509_csr(data.encode("utf-8"), default_backend())
except Exception:
raise ValidationError("CSR presented is not valid.")
common_name = request.subject.get_attributes_for_oid(x509.NameOID.COMMON_NAME)
return common_name[0].value
def get_key_type_from_csr(data):
"""
Fetches key_type from CSR.
Works with any kind of SubjectAlternativeName
:param data: PEM-encoded string with CSR
:return: key_type
"""
try:
request = x509.load_pem_x509_csr(data.encode("utf-8"), default_backend())
except Exception:
raise ValidationError("CSR presented is not valid.")
try:
if isinstance(request.public_key(), rsa.RSAPublicKey):
return "RSA{key_size}".format(
key_size=request.public_key().key_size
)
elif isinstance(request.public_key(), ec.EllipticCurvePublicKey):
return get_key_type_from_ec_curve(request.public_key().curve.name)
else:
raise Exception("Unsupported key type")
except NotImplemented:
raise NotImplementedError
|
from stash.tests.stashtest import StashTestCase
class CompleterTests(StashTestCase):
def setUp(self):
StashTestCase.setUp(self)
self.complete = self.stash.completer.complete
def test_completion_01(self):
newline, possibilities = self.complete('pw')
assert newline == 'pwd.py '
def test_completion_03(self):
newline, possibilities = self.complete('ls ')
assert newline == 'ls '
assert 'README.md' in possibilities
assert 'source.py' not in possibilities
def test_completion_04(self):
newline, possibilities = self.complete('')
assert newline == ''
assert 'source.py' in possibilities
assert 'README.md' not in possibilities
def test_completion_05(self):
newline, possibilities = self.complete('ls README.md ')
assert newline == 'ls README.md '
assert 'CHANGES.md' in possibilities
assert 'source.py' not in possibilities
def test_completion_06(self):
newline, possibilities = self.complete('git ')
assert newline == 'git '
assert 'branch' in possibilities
assert 'clone' in possibilities
assert 'README.md' not in possibilities
def test_completion_07(self):
newline, possibilities = self.complete('ls -')
assert newline == 'ls -'
assert '--all' in possibilities
assert 'README.md' not in possibilities
def test_completion_08(self):
newline, possibilities = self.complete('git br')
assert newline == 'git branch '
def test_completion_09(self):
newline, possibilities = self.complete('$STASH_R')
assert newline == '$STASH_ROOT '
def test_completion_10(self):
newline, possibilities = self.complete('$STASH_ROOT/bi')
assert newline.replace('\\', '/') == '$STASH_ROOT/bin/'
def test_completion_11(self):
newline, possibilities = self.complete('ls $STASH_ROOT/bi')
assert newline.replace('\\', '/') == 'ls $STASH_ROOT/bin/'
def test_completion_12(self):
newline, possibilities = self.complete('ls $STASH_ROOT/bin/ls.')
assert newline.replace('\\', '/') == 'ls $STASH_ROOT/bin/ls.py '
|
version_info = (5, 3, 2, "alpha", 0)
def _make_version(major, minor, micro, releaselevel, serial):
"""Create a readable version string from version_info tuple components."""
assert releaselevel in ['alpha', 'beta', 'candidate', 'final']
version = "%d.%d" % (major, minor)
if micro:
version += ".%d" % (micro,)
if releaselevel != 'final':
short = {'alpha': 'a', 'beta': 'b', 'candidate': 'rc'}[releaselevel]
version += "%s%d" % (short, serial)
return version
def _make_url(major, minor, micro, releaselevel, serial):
"""Make the URL people should start at for this version of coverage.py."""
url = "https://coverage.readthedocs.io"
if releaselevel != 'final':
# For pre-releases, use a version-specific URL.
url += "/en/coverage-" + _make_version(major, minor, micro, releaselevel, serial)
return url
__version__ = _make_version(*version_info)
__url__ = _make_url(*version_info)
|
import base64
import functools
import hashlib
import logging
import os
import random
import re
import string
import time
import urllib
import flask
from M2Crypto import RSA
import requests
from docker_registry.core import compat
json = compat.json
from . import storage
from .lib import config
cfg = config.load()
logger = logging.getLogger(__name__)
_re_docker_version = re.compile('docker/([^\s]+)')
_re_authorization = re.compile(r'(\w+)[:=][\s"]?([^",]+)"?')
_re_hex_image_id = re.compile(r'^([a-f0-9]{16}|[a-f0-9]{64})$')
def valid_image_id(f):
@functools.wraps(f)
def wrapper(*args, **kwargs):
image_id = kwargs.get('image_id', '')
if _re_hex_image_id.match(image_id):
return f(*args, **kwargs)
return api_error("Invalid image ID", 404)
return wrapper
def docker_client_version():
"""Try and extract the client version from the User-Agent string
So we can warn older versions of the Docker engine/daemon about
incompatible APIs. If we can't figure out the version (e.g. the
client is not a Docker engine), just return None.
"""
ua = flask.request.headers.get('user-agent', '')
m = _re_docker_version.search(ua)
if not m:
return
version = m.group(1)
if '-' in version:
version = version.split('-')[0]
try:
return tuple(int(x) for x in version)
except ValueError:
return
class SocketReader(object):
def __init__(self, fp):
self._fp = fp
self.handlers = []
def __iter__(self):
return self.iterate()
def iterate(self, chunk_size=-1):
if isinstance(self._fp, requests.Response):
if chunk_size == -1:
chunk_size = 1024
for chunk in self._fp.iter_content(chunk_size):
for handler in self.handlers:
handler(chunk)
yield chunk
else:
chunk = self._fp.read(chunk_size)
while chunk:
for handler in self.handlers:
handler(chunk)
yield chunk
chunk = self._fp.read(chunk_size)
def add_handler(self, handler):
self.handlers.append(handler)
def read(self, n=-1):
buf = self._fp.read(n)
if not buf:
return ''
for handler in self.handlers:
handler(buf)
return buf
def response(data=None, code=200, headers=None, raw=False):
if data is None:
data = True
h = {
'Cache-Control': 'no-cache',
'Expires': '-1',
'Content-Type': 'application/json'
}
if headers:
h.update(headers)
if h['Cache-Control'] == 'no-cache':
h['Pragma'] = 'no-cache'
try:
if raw is False:
data = json.dumps(data, sort_keys=True, skipkeys=True)
except TypeError:
data = str(data)
return flask.current_app.make_response((data, code, h))
def validate_parent_access(parent_id):
if cfg.standalone:
return True
auth = _parse_auth_header()
if not auth:
return False
full_repos_name = auth.get('repository', '').split('/')
if len(full_repos_name) != 2:
logger.debug('validate_parent: Invalid repository field')
return False
url = '{0}/v1/repositories/{1}/{2}/layer/{3}/access'.format(
cfg.index_endpoint, full_repos_name[0], full_repos_name[1], parent_id
)
headers = {'Authorization': flask.request.headers.get('authorization')}
resp = requests.get(url, verify=True, headers=headers)
if resp.status_code != 200:
logger.debug('validate_parent: index returns status {0}'.format(
resp.status_code
))
return False
try:
# Note(dmp): unicode patch XXX not applied! Assuming requests does it
logger.debug('validate_parent: Content: {0}'.format(resp.text))
return json.loads(resp.text).get('access', False)
except ValueError:
logger.debug('validate_parent: Wrong response format')
return False
def validate_token(auth):
full_repos_name = auth.get('repository', '').split('/')
if len(full_repos_name) != 2:
logger.debug('validate_token: Invalid repository field')
return False
url = '{0}/v1/repositories/{1}/{2}/images'.format(cfg.index_endpoint,
full_repos_name[0],
full_repos_name[1])
headers = {'Authorization': flask.request.headers.get('authorization')}
resp = requests.get(url, verify=True, headers=headers)
logger.debug('validate_token: Index returned {0}'.format(resp.status_code))
if resp.status_code != 200:
return False
store = storage.load()
try:
# Note(dmp): unicode patch XXX not applied (requests)
images_list = [i['id'] for i in json.loads(resp.text)]
store.put_content(store.images_list_path(*full_repos_name),
json.dumps(images_list))
except ValueError:
logger.debug('validate_token: Wrong format for images_list')
return False
return True
def get_remote_ip():
if 'X-Forwarded-For' in flask.request.headers:
return flask.request.headers.getlist('X-Forwarded-For')[0]
if 'X-Real-Ip' in flask.request.headers:
return flask.request.headers.getlist('X-Real-Ip')[0]
return flask.request.remote_addr
def is_ssl():
for header in ('X-Forwarded-Proto', 'X-Forwarded-Protocol'):
if header in flask.request.headers and (
flask.request.headers[header].lower() in ('https', 'ssl')
):
return True
return False
def _parse_auth_header():
auth = flask.request.headers.get('authorization', '')
if auth.split(' ')[0].lower() != 'token':
logger.debug('check_token: Invalid token format')
return None
logger.debug('Auth Token = {0}'.format(auth))
auth = dict(_re_authorization.findall(auth))
logger.debug('auth = {0}'.format(auth))
return auth
def check_token(args):
logger.debug('args = {0}'.format(args))
if cfg.disable_token_auth is True or cfg.standalone is True:
return True
auth = _parse_auth_header()
if not auth:
return False
if 'namespace' in args and 'repository' in args:
# We're authorizing an action on a repository,
# let's check that it matches the repos name provided in the token
full_repos_name = '{namespace}/{repository}'.format(**args)
logger.debug('full_repos_name = {0}'.format(full_repos_name))
if full_repos_name != auth.get('repository'):
logger.debug('check_token: Wrong repository name in the token:'
'{0} != {1}'.format(full_repos_name,
auth.get('repository')))
return False
# Check that the token `access' variable is aligned with the HTTP method
access = auth.get('access')
if access == 'write' and flask.request.method not in ['POST', 'PUT']:
logger.debug('check_token: Wrong access value in the token')
return False
if access == 'read' and flask.request.method != 'GET':
logger.debug('check_token: Wrong access value in the token')
return False
if access == 'delete' and flask.request.method != 'DELETE':
logger.debug('check_token: Wrong access value in the token')
return False
if validate_token(auth) is False:
return False
# Token is valid
return True
def check_signature():
pkey = cfg.privileged_key
if not pkey:
return False
headers = flask.request.headers
signature = headers.get('X-Signature')
if not signature:
logger.debug('No X-Signature header in request')
return False
sig = parse_content_signature(signature)
logger.debug('Parsed signature: {}'.format(sig))
sigdata = base64.b64decode(sig['data'])
header_keys = sorted([
x for x in headers.iterkeys() if x.startswith('X-Docker')
])
message = ','.join([flask.request.method, flask.request.path] +
['{}:{}'.format(k, headers[k]) for k in header_keys])
logger.debug('Signed message: {}'.format(message))
try:
return pkey.verify(message_digest(message), sigdata, 'sha1') == 1
except RSA.RSAError as e:
logger.exception(e)
return False
def parse_content_signature(s):
lst = [x.strip().split('=', 1) for x in s.split(';')]
ret = {}
for k, v in lst:
ret[k] = v
return ret
def message_digest(s):
m = hashlib.new('sha1')
m.update(s)
return m.digest()
def requires_auth(f):
@functools.wraps(f)
def wrapper(*args, **kwargs):
if check_signature() is True or check_token(kwargs) is True:
return f(*args, **kwargs)
headers = {'WWW-Authenticate': 'Token'}
return api_error('Requires authorization', 401, headers)
return wrapper
def api_error(message, code=400, headers=None):
logger.debug('api_error: {0}'.format(message))
return response({'error': message}, code, headers)
def gen_random_string(length=16):
return ''.join([random.choice(string.ascii_uppercase + string.digits)
for x in range(length)])
def parse_repository_name(f):
@functools.wraps(f)
def wrapper(repository, *args, **kwargs):
parts = repository.rstrip('/').split('/', 1)
if len(parts) < 2:
namespace = 'library'
repository = parts[0]
else:
(namespace, repository) = parts
repository = urllib.quote_plus(repository)
return f(namespace=namespace, repository=repository, *args, **kwargs)
return wrapper
def exclusive_lock(f):
@functools.wraps(f)
def wrapper(*args, **kwargs):
lock_path = os.path.join(
'./', 'registry.{0}.lock'.format(f.func_name)
)
if os.path.exists(lock_path):
x = 0
while os.path.exists(lock_path) and x < 100:
logger.warn('Another process is creating the search database')
x += 1
time.sleep(1)
if x == 100:
raise Exception('Timedout waiting for db init')
return
lock_file = open(lock_path, 'w')
lock_file.close()
try:
result = f(*args, **kwargs)
finally:
os.remove(lock_path)
return result
return wrapper
def get_repository():
auth = flask.request.headers.get('authorization', '')
if not auth:
return
auth = dict(_re_authorization.findall(auth))
repository = auth.get('repository')
if repository is None:
return ('', '')
parts = repository.rstrip('/').split('/', 1)
if len(parts) < 2:
return ('library', parts[0])
return (parts[0], parts[1])
def get_endpoints(overcfg=None):
registry_endpoints = (overcfg or cfg).registry_endpoints
if not registry_endpoints:
# registry_endpoints = socket.gethostname()
registry_endpoints = flask.request.environ['HTTP_HOST']
return registry_endpoints
|
Subsets and Splits