text
stringlengths 213
32.3k
|
---|
from homeassistant.components.atag import DOMAIN, WATER_HEATER
from homeassistant.components.water_heater import SERVICE_SET_TEMPERATURE
from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE
from homeassistant.core import HomeAssistant
from tests.async_mock import patch
from tests.components.atag import UID, init_integration
from tests.test_util.aiohttp import AiohttpClientMocker
WATER_HEATER_ID = f"{WATER_HEATER}.{DOMAIN}"
async def test_water_heater(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test the creation of Atag water heater."""
with patch("pyatag.entities.DHW.status"):
entry = await init_integration(hass, aioclient_mock)
registry = await hass.helpers.entity_registry.async_get_registry()
assert registry.async_is_registered(WATER_HEATER_ID)
entry = registry.async_get(WATER_HEATER_ID)
assert entry.unique_id == f"{UID}-{WATER_HEATER}"
async def test_setting_target_temperature(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test setting the water heater device."""
await init_integration(hass, aioclient_mock)
with patch("pyatag.entities.DHW.set_temp") as mock_set_temp:
await hass.services.async_call(
WATER_HEATER,
SERVICE_SET_TEMPERATURE,
{ATTR_ENTITY_ID: WATER_HEATER_ID, ATTR_TEMPERATURE: 50},
blocking=True,
)
await hass.async_block_till_done()
mock_set_temp.assert_called_once_with(50)
|
import codecs
import os
import re
import sys
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
long_description = open("README.rst", "r").read()
here = os.path.abspath(os.path.dirname(__file__))
def read(*parts):
# intentionally *not* adding an encoding option to open, See:
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
with codecs.open(os.path.join(here, *parts), "r") as fp:
return fp.read()
def find_version(*file_paths):
version_file = read(*file_paths)
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
install_requires = [
"PyYAML",
"wrapt",
"six>=1.5",
'yarl; python_version>="3.6"',
'yarl<1.4; python_version=="3.5"',
]
setup(
name="vcrpy",
version=find_version("vcr", "__init__.py"),
description=("Automatically mock your HTTP interactions to simplify and speed up testing"),
long_description=long_description,
long_description_content_type="text/x-rst",
author="Kevin McCarthy",
author_email="[email protected]",
url="https://github.com/kevin1024/vcrpy",
packages=find_packages(exclude=["tests*"]),
python_requires=">=3.5",
install_requires=install_requires,
license="MIT",
tests_require=["pytest", "mock", "pytest-httpbin"],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Software Development :: Testing",
"Topic :: Internet :: WWW/HTTP",
"License :: OSI Approved :: MIT License",
],
)
|
import pygogo as gogo
from functools import reduce
logger = gogo.Gogo(__name__, monolog=True).logger
class DotDict(dict):
"""A dictionary whose keys can be accessed using dot notation
>>> r = DotDict({'a': {'content': 'value'}})
>>> r.get('a.content') == 'value'
True
>>> r['a.content'] == 'value'
True
"""
def __init__(self, data=None, **kwargs):
self.update(data)
def _parse_key(self, key=None):
try:
keys = key.rstrip('.').split('.') if key else []
except AttributeError:
keys = [key['subkey']] if key else []
return keys
def _parse_value(self, value, key, default=None):
try:
parsed = value[key]
except KeyError:
try:
parsed = value['value']
except KeyError:
parsed = default
except (TypeError, IndexError):
if hasattr(value, 'append'):
parsed = [v[key] for v in value]
else:
parsed = value
return default if parsed is None else parsed
def __getitem__(self, key):
keys = self._parse_key(key)
value = super(DotDict, self).__getitem__(keys[0])
if len(keys) > 1:
return value['.'.join(keys[1:])]
elif hasattr(value, 'keys') and 'value' in value:
value = value['value']
return DotDict(value) if hasattr(value, 'keys') else value
def get(self, key=None, default=None, **kwargs):
keys = self._parse_key(key)
value = DotDict(self.copy())
for key in keys:
try:
key = int(key)
except ValueError:
pass
value = self._parse_value(value, key, default)
if hasattr(value, 'keys') and 'terminal' in value:
# value fed in from another module
stream = kwargs[value['terminal']]
value = next(stream)[value.get('path', 'content')]
elif hasattr(value, 'keys') and 'value' in value:
value = value['value']
return DotDict(value) if hasattr(value, 'keys') else value
def delete(self, key):
keys = self._parse_key(key)
last = keys[-1]
try:
del reduce(lambda i, k: DotDict(i).get(k), [self] + keys[:-1])[last]
except KeyError:
pass
def set(self, key, value):
keys = self._parse_key(key)
first = keys[:-1]
last = keys[-1]
item = self.copy()
reduce(lambda i, k: i.setdefault(k, {}), first, item)[last] = value
super(DotDict, self).update(item)
def update(self, data=None):
if not data:
return
_dict = dict(data)
dot_keys = [d for d in _dict if '.' in d]
if dot_keys:
# skip key if a subkey redefines it
# i.e., 'author.name' has precedence over 'author'
keys = ['.'.join(self._parse_key(dk)[:-1]) for dk in dot_keys]
items = ((k, v) for k, v in _dict.items() if k not in keys)
else:
items = _dict.items()
[self.set(key, value) for key, value in items]
|
from .const import DOMAIN, DOOR_STATION
def get_mac_address_from_doorstation_info(doorstation_info):
"""Get the mac address depending on the device type."""
if "PRIMARY_MAC_ADDR" in doorstation_info:
return doorstation_info["PRIMARY_MAC_ADDR"]
return doorstation_info["WIFI_MAC_ADDR"]
def get_doorstation_by_token(hass, token):
"""Get doorstation by slug."""
for config_entry_id in hass.data[DOMAIN]:
doorstation = hass.data[DOMAIN][config_entry_id][DOOR_STATION]
if token == doorstation.token:
return doorstation
|
import pytest
from qutebrowser.mainwindow.statusbar import backforward
@pytest.fixture
def backforward_widget(qtbot):
widget = backforward.Backforward()
qtbot.add_widget(widget)
return widget
@pytest.mark.parametrize('can_go_back, can_go_forward, expected_text', [
(False, False, ''),
(True, False, '[<]'),
(False, True, '[>]'),
(True, True, '[<>]'),
])
def test_backforward_widget(backforward_widget, tabbed_browser_stubs,
fake_web_tab, can_go_back, can_go_forward,
expected_text):
"""Ensure the Backforward widget shows the correct text."""
tab = fake_web_tab(can_go_back=can_go_back, can_go_forward=can_go_forward)
tabbed_browser = tabbed_browser_stubs[0]
tabbed_browser.widget.current_index = 1
tabbed_browser.widget.tabs = [tab]
backforward_widget.enabled = True
backforward_widget.on_tab_cur_url_changed(tabbed_browser)
assert backforward_widget.text() == expected_text
assert backforward_widget.isVisible() == bool(expected_text)
# Check that the widget stays hidden if not in the statusbar
backforward_widget.enabled = False
backforward_widget.hide()
backforward_widget.on_tab_cur_url_changed(tabbed_browser)
assert backforward_widget.isHidden()
# Check that the widget gets reset if empty.
if can_go_back and can_go_forward:
tab = fake_web_tab(can_go_back=False, can_go_forward=False)
tabbed_browser.widget.tabs = [tab]
backforward_widget.enabled = True
backforward_widget.on_tab_cur_url_changed(tabbed_browser)
assert backforward_widget.text() == ''
assert not backforward_widget.isVisible()
def test_none_tab(backforward_widget, tabbed_browser_stubs, fake_web_tab):
"""Make sure nothing crashes when passing None as tab."""
tab = fake_web_tab(can_go_back=True, can_go_forward=True)
tabbed_browser = tabbed_browser_stubs[0]
tabbed_browser.widget.current_index = 1
tabbed_browser.widget.tabs = [tab]
backforward_widget.enabled = True
backforward_widget.on_tab_cur_url_changed(tabbed_browser)
assert backforward_widget.text() == '[<>]'
assert backforward_widget.isVisible()
tabbed_browser.widget.current_index = -1
backforward_widget.on_tab_cur_url_changed(tabbed_browser)
assert backforward_widget.text() == ''
assert not backforward_widget.isVisible()
|
from simplipy.entity import EntityTypes
from homeassistant.const import DEVICE_CLASS_TEMPERATURE, TEMP_FAHRENHEIT
from homeassistant.core import callback
from . import SimpliSafeEntity
from .const import DATA_CLIENT, DOMAIN, LOGGER
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up SimpliSafe freeze sensors based on a config entry."""
simplisafe = hass.data[DOMAIN][DATA_CLIENT][entry.entry_id]
sensors = []
for system in simplisafe.systems.values():
if system.version == 2:
LOGGER.info("Skipping sensor setup for V2 system: %s", system.system_id)
continue
for sensor in system.sensors.values():
if sensor.type == EntityTypes.temperature:
sensors.append(SimplisafeFreezeSensor(simplisafe, system, sensor))
async_add_entities(sensors)
class SimplisafeFreezeSensor(SimpliSafeEntity):
"""Define a SimpliSafe freeze sensor entity."""
def __init__(self, simplisafe, system, sensor):
"""Initialize."""
super().__init__(simplisafe, system, sensor.name, serial=sensor.serial)
self._sensor = sensor
self._state = None
self._device_info["identifiers"] = {(DOMAIN, sensor.serial)}
self._device_info["model"] = "Freeze Sensor"
self._device_info["name"] = sensor.name
@property
def device_class(self):
"""Return type of sensor."""
return DEVICE_CLASS_TEMPERATURE
@property
def unique_id(self):
"""Return unique ID of sensor."""
return self._sensor.serial
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return TEMP_FAHRENHEIT
@property
def state(self):
"""Return the sensor state."""
return self._state
@callback
def async_update_from_rest_api(self):
"""Update the entity with the provided REST API data."""
self._state = self._sensor.temperature
|
from homeassistant.components import luftdaten
from homeassistant.components.luftdaten.const import CONF_SENSOR_ID, DOMAIN
from homeassistant.const import CONF_SCAN_INTERVAL, CONF_SHOW_ON_MAP
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
async def test_config_with_sensor_passed_to_config_entry(hass):
"""Test that configured options for a sensor are loaded."""
conf = {
CONF_SENSOR_ID: "12345abcde",
CONF_SHOW_ON_MAP: False,
CONF_SCAN_INTERVAL: 600,
}
with patch.object(
hass.config_entries.flow, "async_init"
) as mock_config_entries, patch.object(
luftdaten, "configured_sensors", return_value=[]
):
assert await async_setup_component(hass, DOMAIN, conf) is True
assert len(mock_config_entries.flow.mock_calls) == 0
async def test_config_already_registered_not_passed_to_config_entry(hass):
"""Test that an already registered sensor does not initiate an import."""
conf = {CONF_SENSOR_ID: "12345abcde"}
with patch.object(
hass.config_entries.flow, "async_init"
) as mock_config_entries, patch.object(
luftdaten, "configured_sensors", return_value=["12345abcde"]
):
assert await async_setup_component(hass, DOMAIN, conf) is True
assert len(mock_config_entries.flow.mock_calls) == 0
|
import asyncio
from unittest.mock import Mock, patch
import pytest
from homeassistant import data_entry_flow
from homeassistant.components.tellduslive import (
APPLICATION_NAME,
DOMAIN,
KEY_SCAN_INTERVAL,
SCAN_INTERVAL,
config_flow,
)
from homeassistant.config_entries import SOURCE_DISCOVERY
from homeassistant.const import CONF_HOST
from tests.common import MockConfigEntry, mock_coro
def init_config_flow(hass, side_effect=None):
"""Init a configuration flow."""
flow = config_flow.FlowHandler()
flow.hass = hass
if side_effect:
flow._get_auth_url = Mock(side_effect=side_effect)
return flow
@pytest.fixture
def supports_local_api():
"""Set TelldusLive supports_local_api."""
return True
@pytest.fixture
def authorize():
"""Set TelldusLive authorize."""
return True
@pytest.fixture
def mock_tellduslive(supports_local_api, authorize):
"""Mock tellduslive."""
with patch(
"homeassistant.components.tellduslive.config_flow.Session"
) as Session, patch(
"homeassistant.components.tellduslive.config_flow.supports_local_api"
) as tellduslive_supports_local_api:
tellduslive_supports_local_api.return_value = supports_local_api
Session().authorize.return_value = authorize
Session().access_token = "token"
Session().access_token_secret = "token_secret"
Session().authorize_url = "https://example.com"
yield Session, tellduslive_supports_local_api
async def test_abort_if_already_setup(hass):
"""Test we abort if TelldusLive is already setup."""
flow = init_config_flow(hass)
with patch.object(hass.config_entries, "async_entries", return_value=[{}]):
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_setup"
with patch.object(hass.config_entries, "async_entries", return_value=[{}]):
result = await flow.async_step_import(None)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_setup"
async def test_full_flow_implementation(hass, mock_tellduslive):
"""Test registering an implementation and finishing flow works."""
flow = init_config_flow(hass)
flow.context = {"source": SOURCE_DISCOVERY}
result = await flow.async_step_discovery(["localhost", "tellstick"])
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
assert len(flow._hosts) == 2
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
result = await flow.async_step_user({"host": "localhost"})
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "auth"
assert result["description_placeholders"] == {
"auth_url": "https://example.com",
"app_name": APPLICATION_NAME,
}
result = await flow.async_step_auth("")
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "localhost"
assert result["data"]["host"] == "localhost"
assert result["data"]["scan_interval"] == 60
assert result["data"]["session"] == {"token": "token", "host": "localhost"}
async def test_step_import(hass, mock_tellduslive):
"""Test that we trigger auth when configuring from import."""
flow = init_config_flow(hass)
result = await flow.async_step_import({CONF_HOST: DOMAIN, KEY_SCAN_INTERVAL: 0})
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "auth"
async def test_step_import_add_host(hass, mock_tellduslive):
"""Test that we add host and trigger user when configuring from import."""
flow = init_config_flow(hass)
result = await flow.async_step_import(
{CONF_HOST: "localhost", KEY_SCAN_INTERVAL: 0}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
async def test_step_import_no_config_file(hass, mock_tellduslive):
"""Test that we trigger user with no config_file configuring from import."""
flow = init_config_flow(hass)
result = await flow.async_step_import(
{CONF_HOST: "localhost", KEY_SCAN_INTERVAL: 0}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
async def test_step_import_load_json_matching_host(hass, mock_tellduslive):
"""Test that we add host and trigger user when configuring from import."""
flow = init_config_flow(hass)
with patch(
"homeassistant.components.tellduslive.config_flow.load_json",
return_value={"tellduslive": {}},
), patch("os.path.isfile"):
result = await flow.async_step_import(
{CONF_HOST: "Cloud API", KEY_SCAN_INTERVAL: 0}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
async def test_step_import_load_json(hass, mock_tellduslive):
"""Test that we create entry when configuring from import."""
flow = init_config_flow(hass)
with patch(
"homeassistant.components.tellduslive.config_flow.load_json",
return_value={"localhost": {}},
), patch("os.path.isfile"):
result = await flow.async_step_import(
{CONF_HOST: "localhost", KEY_SCAN_INTERVAL: SCAN_INTERVAL}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "localhost"
assert result["data"]["host"] == "localhost"
assert result["data"]["scan_interval"] == 60
assert result["data"]["session"] == {}
@pytest.mark.parametrize("supports_local_api", [False])
async def test_step_disco_no_local_api(hass, mock_tellduslive):
"""Test that we trigger when configuring from discovery, not supporting local api."""
flow = init_config_flow(hass)
flow.context = {"source": SOURCE_DISCOVERY}
result = await flow.async_step_discovery(["localhost", "tellstick"])
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "auth"
assert len(flow._hosts) == 1
async def test_step_auth(hass, mock_tellduslive):
"""Test that create cloud entity from auth."""
flow = init_config_flow(hass)
await flow.async_step_auth()
result = await flow.async_step_auth(["localhost", "tellstick"])
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "Cloud API"
assert result["data"]["host"] == "Cloud API"
assert result["data"]["scan_interval"] == 60
assert result["data"]["session"] == {
"token": "token",
"token_secret": "token_secret",
}
@pytest.mark.parametrize("authorize", [False])
async def test_wrong_auth_flow_implementation(hass, mock_tellduslive):
"""Test wrong auth."""
flow = init_config_flow(hass)
await flow.async_step_auth()
result = await flow.async_step_auth("")
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "auth"
assert result["errors"]["base"] == "invalid_auth"
async def test_not_pick_host_if_only_one(hass, mock_tellduslive):
"""Test not picking host if we have just one."""
flow = init_config_flow(hass)
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "auth"
async def test_abort_if_timeout_generating_auth_url(hass, mock_tellduslive):
"""Test abort if generating authorize url timeout."""
flow = init_config_flow(hass, side_effect=asyncio.TimeoutError)
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "authorize_url_timeout"
async def test_abort_no_auth_url(hass, mock_tellduslive):
"""Test abort if generating authorize url returns none."""
flow = init_config_flow(hass)
flow._get_auth_url = Mock(return_value=False)
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "authorize_url_fail"
async def test_abort_if_exception_generating_auth_url(hass, mock_tellduslive):
"""Test we abort if generating authorize url blows up."""
flow = init_config_flow(hass, side_effect=ValueError)
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "authorize_url_fail"
async def test_discovery_already_configured(hass, mock_tellduslive):
"""Test abort if already configured fires from discovery."""
MockConfigEntry(domain="tellduslive", data={"host": "some-host"}).add_to_hass(hass)
flow = init_config_flow(hass)
flow.context = {"source": SOURCE_DISCOVERY}
with pytest.raises(data_entry_flow.AbortFlow):
result = await flow.async_step_discovery(["some-host", ""])
|
import logging
from openzwavemqtt.const import CommandClass, ValueType
from homeassistant.components.sensor import (
DEVICE_CLASS_BATTERY,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_ILLUMINANCE,
DEVICE_CLASS_POWER,
DEVICE_CLASS_PRESSURE,
DEVICE_CLASS_TEMPERATURE,
DOMAIN as SENSOR_DOMAIN,
)
from homeassistant.const import TEMP_CELSIUS, TEMP_FAHRENHEIT
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import DATA_UNSUBSCRIBE, DOMAIN
from .entity import ZWaveDeviceEntity
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Z-Wave sensor from config entry."""
@callback
def async_add_sensor(value):
"""Add Z-Wave Sensor."""
# Basic Sensor types
if value.primary.type in (
ValueType.BYTE,
ValueType.INT,
ValueType.SHORT,
ValueType.DECIMAL,
):
sensor = ZWaveNumericSensor(value)
elif value.primary.type == ValueType.LIST:
sensor = ZWaveListSensor(value)
elif value.primary.type == ValueType.STRING:
sensor = ZWaveStringSensor(value)
else:
_LOGGER.warning("Sensor not implemented for value %s", value.primary.label)
return
async_add_entities([sensor])
hass.data[DOMAIN][config_entry.entry_id][DATA_UNSUBSCRIBE].append(
async_dispatcher_connect(
hass, f"{DOMAIN}_new_{SENSOR_DOMAIN}", async_add_sensor
)
)
class ZwaveSensorBase(ZWaveDeviceEntity):
"""Basic Representation of a Z-Wave sensor."""
@property
def device_class(self):
"""Return the device class of the sensor."""
if self.values.primary.command_class == CommandClass.BATTERY:
return DEVICE_CLASS_BATTERY
if self.values.primary.command_class == CommandClass.METER:
return DEVICE_CLASS_POWER
if "Temperature" in self.values.primary.label:
return DEVICE_CLASS_TEMPERATURE
if "Illuminance" in self.values.primary.label:
return DEVICE_CLASS_ILLUMINANCE
if "Humidity" in self.values.primary.label:
return DEVICE_CLASS_HUMIDITY
if "Power" in self.values.primary.label:
return DEVICE_CLASS_POWER
if "Energy" in self.values.primary.label:
return DEVICE_CLASS_POWER
if "Electric" in self.values.primary.label:
return DEVICE_CLASS_POWER
if "Pressure" in self.values.primary.label:
return DEVICE_CLASS_PRESSURE
return None
@property
def entity_registry_enabled_default(self) -> bool:
"""Return if the entity should be enabled when first added to the entity registry."""
# We hide some of the more advanced sensors by default to not overwhelm users
if self.values.primary.command_class in [
CommandClass.BASIC,
CommandClass.INDICATOR,
CommandClass.NOTIFICATION,
]:
return False
return True
@property
def force_update(self) -> bool:
"""Force updates."""
return True
class ZWaveStringSensor(ZwaveSensorBase):
"""Representation of a Z-Wave sensor."""
@property
def state(self):
"""Return state of the sensor."""
return self.values.primary.value
@property
def unit_of_measurement(self):
"""Return unit of measurement the value is expressed in."""
return self.values.primary.units
@property
def entity_registry_enabled_default(self):
"""Return if the entity should be enabled when first added to the entity registry."""
return False
class ZWaveNumericSensor(ZwaveSensorBase):
"""Representation of a Z-Wave sensor."""
@property
def state(self):
"""Return state of the sensor."""
return round(self.values.primary.value, 2)
@property
def unit_of_measurement(self):
"""Return unit of measurement the value is expressed in."""
if self.values.primary.units == "C":
return TEMP_CELSIUS
if self.values.primary.units == "F":
return TEMP_FAHRENHEIT
return self.values.primary.units
class ZWaveListSensor(ZwaveSensorBase):
"""Representation of a Z-Wave list sensor."""
@property
def state(self):
"""Return the state of the sensor."""
# We use the id as value for backwards compatibility
return self.values.primary.value["Selected_id"]
@property
def device_state_attributes(self):
"""Return the device specific state attributes."""
attributes = super().device_state_attributes
# add the value's label as property
attributes["label"] = self.values.primary.value["Selected"]
return attributes
@property
def entity_registry_enabled_default(self) -> bool:
"""Return if the entity should be enabled when first added to the entity registry."""
# these sensors are only here for backwards compatibility, disable them by default
return False
|
import logging
from pyqwikswitch.async_ import QSUsb
from pyqwikswitch.qwikswitch import CMD_BUTTONS, QS_CMD, QS_ID, SENSORS, QSType
import voluptuous as vol
from homeassistant.components.binary_sensor import DEVICE_CLASSES_SCHEMA
from homeassistant.components.light import ATTR_BRIGHTNESS
from homeassistant.const import (
CONF_SENSORS,
CONF_SWITCHES,
CONF_URL,
EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.core import callback
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import load_platform
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
DOMAIN = "qwikswitch"
CONF_DIMMER_ADJUST = "dimmer_adjust"
CONF_BUTTON_EVENTS = "button_events"
CV_DIM_VALUE = vol.All(vol.Coerce(float), vol.Range(min=1, max=3))
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_URL, default="http://127.0.0.1:2020"): vol.Coerce(
str
),
vol.Optional(CONF_DIMMER_ADJUST, default=1): CV_DIM_VALUE,
vol.Optional(CONF_BUTTON_EVENTS, default=[]): cv.ensure_list_csv,
vol.Optional(CONF_SENSORS, default=[]): vol.All(
cv.ensure_list,
[
vol.Schema(
{
vol.Required("id"): str,
vol.Optional("channel", default=1): int,
vol.Required("name"): str,
vol.Required("type"): str,
vol.Optional("class"): DEVICE_CLASSES_SCHEMA,
vol.Optional("invert"): bool,
}
)
],
),
vol.Optional(CONF_SWITCHES, default=[]): vol.All(cv.ensure_list, [str]),
}
)
},
extra=vol.ALLOW_EXTRA,
)
class QSEntity(Entity):
"""Qwikswitch Entity base."""
def __init__(self, qsid, name):
"""Initialize the QSEntity."""
self._name = name
self.qsid = qsid
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def should_poll(self):
"""QS sensors gets packets in update_packet."""
return False
@property
def unique_id(self):
"""Return a unique identifier for this sensor."""
return f"qs{self.qsid}"
@callback
def update_packet(self, packet):
"""Receive update packet from QSUSB. Match dispather_send signature."""
self.async_write_ha_state()
async def async_added_to_hass(self):
"""Listen for updates from QSUSb via dispatcher."""
self.async_on_remove(
self.hass.helpers.dispatcher.async_dispatcher_connect(
self.qsid, self.update_packet
)
)
class QSToggleEntity(QSEntity):
"""Representation of a Qwikswitch Toggle Entity.
Implemented:
- QSLight extends QSToggleEntity and Light[2] (ToggleEntity[1])
- QSSwitch extends QSToggleEntity and SwitchEntity[3] (ToggleEntity[1])
[1] /helpers/entity.py
[2] /components/light/__init__.py
[3] /components/switch/__init__.py
"""
def __init__(self, qsid, qsusb):
"""Initialize the ToggleEntity."""
self.device = qsusb.devices[qsid]
super().__init__(qsid, self.device.name)
@property
def is_on(self):
"""Check if device is on (non-zero)."""
return self.device.value > 0
async def async_turn_on(self, **kwargs):
"""Turn the device on."""
new = kwargs.get(ATTR_BRIGHTNESS, 255)
self.hass.data[DOMAIN].devices.set_value(self.qsid, new)
async def async_turn_off(self, **_):
"""Turn the device off."""
self.hass.data[DOMAIN].devices.set_value(self.qsid, 0)
async def async_setup(hass, config):
"""Qwiskswitch component setup."""
# Add cmd's to in /&listen packets will fire events
# By default only buttons of type [TOGGLE,SCENE EXE,LEVEL]
cmd_buttons = set(CMD_BUTTONS)
for btn in config[DOMAIN][CONF_BUTTON_EVENTS]:
cmd_buttons.add(btn)
url = config[DOMAIN][CONF_URL]
dimmer_adjust = config[DOMAIN][CONF_DIMMER_ADJUST]
sensors = config[DOMAIN][CONF_SENSORS]
switches = config[DOMAIN][CONF_SWITCHES]
def callback_value_changed(_qsd, qsid, _val):
"""Update entity values based on device change."""
_LOGGER.debug("Dispatch %s (update from devices)", qsid)
hass.helpers.dispatcher.async_dispatcher_send(qsid, None)
session = async_get_clientsession(hass)
qsusb = QSUsb(
url=url,
dim_adj=dimmer_adjust,
session=session,
callback_value_changed=callback_value_changed,
)
# Discover all devices in QSUSB
if not await qsusb.update_from_devices():
return False
hass.data[DOMAIN] = qsusb
comps = {"switch": [], "light": [], "sensor": [], "binary_sensor": []}
sensor_ids = []
for sens in sensors:
try:
_, _type = SENSORS[sens["type"]]
sensor_ids.append(sens["id"])
if _type is bool:
comps["binary_sensor"].append(sens)
continue
comps["sensor"].append(sens)
for _key in ("invert", "class"):
if _key in sens:
_LOGGER.warning(
"%s should only be used for binary_sensors: %s", _key, sens
)
except KeyError:
_LOGGER.warning(
"Sensor validation failed for sensor id=%s type=%s",
sens["id"],
sens["type"],
)
for qsid, dev in qsusb.devices.items():
if qsid in switches:
if dev.qstype != QSType.relay:
_LOGGER.warning("You specified a switch that is not a relay %s", qsid)
continue
comps["switch"].append(qsid)
elif dev.qstype in (QSType.relay, QSType.dimmer):
comps["light"].append(qsid)
else:
_LOGGER.warning("Ignored unknown QSUSB device: %s", dev)
continue
# Load platforms
for comp_name, comp_conf in comps.items():
if comp_conf:
load_platform(hass, comp_name, DOMAIN, {DOMAIN: comp_conf}, config)
def callback_qs_listen(qspacket):
"""Typically a button press or update signal."""
# If button pressed, fire a hass event
if QS_ID in qspacket:
if qspacket.get(QS_CMD, "") in cmd_buttons:
hass.bus.async_fire(f"qwikswitch.button.{qspacket[QS_ID]}", qspacket)
return
if qspacket[QS_ID] in sensor_ids:
_LOGGER.debug("Dispatch %s ((%s))", qspacket[QS_ID], qspacket)
hass.helpers.dispatcher.async_dispatcher_send(qspacket[QS_ID], qspacket)
# Update all ha_objects
hass.async_add_job(qsusb.update_from_devices)
@callback
def async_start(_):
"""Start listening."""
hass.async_add_job(qsusb.listen, callback_qs_listen)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, async_start)
@callback
def async_stop(_):
"""Stop the listener."""
hass.data[DOMAIN].stop()
hass.bus.async_listen(EVENT_HOMEASSISTANT_STOP, async_stop)
return True
|
from datetime import timedelta
from gogogate2_api import GogoGate2Api, ISmartGateApi
from gogogate2_api.common import (
ApiError,
DoorMode,
DoorStatus,
GogoGate2ActivateResponse,
GogoGate2Door,
GogoGate2InfoResponse,
ISmartGateDoor,
ISmartGateInfoResponse,
Network,
Outputs,
Wifi,
)
from homeassistant.components.cover import (
DEVICE_CLASS_GARAGE,
DEVICE_CLASS_GATE,
DOMAIN as COVER_DOMAIN,
)
from homeassistant.components.gogogate2.const import (
DEVICE_TYPE_GOGOGATE2,
DEVICE_TYPE_ISMARTGATE,
DOMAIN,
MANUFACTURER,
)
from homeassistant.components.homeassistant import DOMAIN as HA_DOMAIN
from homeassistant.config import async_process_ha_core_config
from homeassistant.config_entries import SOURCE_USER
from homeassistant.const import (
ATTR_DEVICE_CLASS,
CONF_DEVICE,
CONF_IP_ADDRESS,
CONF_NAME,
CONF_PASSWORD,
CONF_PLATFORM,
CONF_UNIT_SYSTEM,
CONF_UNIT_SYSTEM_METRIC,
CONF_USERNAME,
STATE_CLOSED,
STATE_OPEN,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
)
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
from homeassistant.util.dt import utcnow
from tests.async_mock import MagicMock, patch
from tests.common import MockConfigEntry, async_fire_time_changed, mock_device_registry
def _mocked_gogogate_open_door_response():
return GogoGate2InfoResponse(
user="user1",
gogogatename="gogogatename0",
model="gogogate2",
apiversion="",
remoteaccessenabled=False,
remoteaccess="abc123.blah.blah",
firmwareversion="222",
apicode="",
door1=GogoGate2Door(
door_id=1,
permission=True,
name="Door1",
gate=False,
mode=DoorMode.GARAGE,
status=DoorStatus.OPENED,
sensor=True,
sensorid=None,
camera=False,
events=2,
temperature=None,
voltage=40,
),
door2=GogoGate2Door(
door_id=2,
permission=True,
name=None,
gate=True,
mode=DoorMode.GARAGE,
status=DoorStatus.UNDEFINED,
sensor=True,
sensorid=None,
camera=False,
events=0,
temperature=None,
voltage=40,
),
door3=GogoGate2Door(
door_id=3,
permission=True,
name=None,
gate=False,
mode=DoorMode.GARAGE,
status=DoorStatus.UNDEFINED,
sensor=True,
sensorid=None,
camera=False,
events=0,
temperature=None,
voltage=40,
),
outputs=Outputs(output1=True, output2=False, output3=True),
network=Network(ip=""),
wifi=Wifi(SSID="", linkquality="", signal=""),
)
def _mocked_ismartgate_closed_door_response():
return ISmartGateInfoResponse(
user="user1",
ismartgatename="ismartgatename0",
model="ismartgatePRO",
apiversion="",
remoteaccessenabled=False,
remoteaccess="abc321.blah.blah",
firmwareversion="555",
pin=123,
lang="en",
newfirmware=False,
door1=ISmartGateDoor(
door_id=1,
permission=True,
name="Door1",
gate=False,
mode=DoorMode.GARAGE,
status=DoorStatus.CLOSED,
sensor=True,
sensorid=None,
camera=False,
events=2,
temperature=None,
enabled=True,
apicode="apicode0",
customimage=False,
voltage=40,
),
door2=ISmartGateDoor(
door_id=2,
permission=True,
name="Door2",
gate=True,
mode=DoorMode.GARAGE,
status=DoorStatus.CLOSED,
sensor=True,
sensorid=None,
camera=False,
events=2,
temperature=None,
enabled=True,
apicode="apicode0",
customimage=False,
voltage=40,
),
door3=ISmartGateDoor(
door_id=3,
permission=True,
name=None,
gate=False,
mode=DoorMode.GARAGE,
status=DoorStatus.UNDEFINED,
sensor=True,
sensorid=None,
camera=False,
events=0,
temperature=None,
enabled=True,
apicode="apicode0",
customimage=False,
voltage=40,
),
network=Network(ip=""),
wifi=Wifi(SSID="", linkquality="", signal=""),
)
@patch("homeassistant.components.gogogate2.common.GogoGate2Api")
async def test_import_fail(gogogate2api_mock, hass: HomeAssistant) -> None:
"""Test the failure to import."""
api = MagicMock(spec=GogoGate2Api)
api.info.side_effect = ApiError(22, "Error")
gogogate2api_mock.return_value = api
hass_config = {
HA_DOMAIN: {CONF_UNIT_SYSTEM: CONF_UNIT_SYSTEM_METRIC},
COVER_DOMAIN: [
{
CONF_PLATFORM: "gogogate2",
CONF_NAME: "cover0",
CONF_DEVICE: DEVICE_TYPE_GOGOGATE2,
CONF_IP_ADDRESS: "127.0.1.0",
CONF_USERNAME: "user0",
CONF_PASSWORD: "password0",
}
],
}
await async_process_ha_core_config(hass, hass_config[HA_DOMAIN])
assert await async_setup_component(hass, HA_DOMAIN, {})
assert await async_setup_component(hass, COVER_DOMAIN, hass_config)
await hass.async_block_till_done()
entity_ids = hass.states.async_entity_ids(COVER_DOMAIN)
assert not entity_ids
@patch("homeassistant.components.gogogate2.common.GogoGate2Api")
@patch("homeassistant.components.gogogate2.common.ISmartGateApi")
async def test_import(
ismartgateapi_mock, gogogate2api_mock, hass: HomeAssistant
) -> None:
"""Test importing of file based config."""
api0 = MagicMock(spec=GogoGate2Api)
api0.info.return_value = _mocked_gogogate_open_door_response()
gogogate2api_mock.return_value = api0
api1 = MagicMock(spec=ISmartGateApi)
api1.info.return_value = _mocked_ismartgate_closed_door_response()
ismartgateapi_mock.return_value = api1
hass_config = {
HA_DOMAIN: {CONF_UNIT_SYSTEM: CONF_UNIT_SYSTEM_METRIC},
COVER_DOMAIN: [
{
CONF_PLATFORM: "gogogate2",
CONF_NAME: "cover0",
CONF_DEVICE: DEVICE_TYPE_GOGOGATE2,
CONF_IP_ADDRESS: "127.0.1.0",
CONF_USERNAME: "user0",
CONF_PASSWORD: "password0",
},
{
CONF_PLATFORM: "gogogate2",
CONF_NAME: "cover1",
CONF_DEVICE: DEVICE_TYPE_ISMARTGATE,
CONF_IP_ADDRESS: "127.0.1.1",
CONF_USERNAME: "user1",
CONF_PASSWORD: "password1",
},
],
}
await async_process_ha_core_config(hass, hass_config[HA_DOMAIN])
assert await async_setup_component(hass, HA_DOMAIN, {})
assert await async_setup_component(hass, COVER_DOMAIN, hass_config)
await hass.async_block_till_done()
entity_ids = hass.states.async_entity_ids(COVER_DOMAIN)
assert entity_ids is not None
assert len(entity_ids) == 3
assert "cover.door1" in entity_ids
assert "cover.door1_2" in entity_ids
assert "cover.door2" in entity_ids
@patch("homeassistant.components.gogogate2.common.GogoGate2Api")
async def test_open_close_update(gogogate2api_mock, hass: HomeAssistant) -> None:
"""Test open and close and data update."""
def info_response(door_status: DoorStatus) -> GogoGate2InfoResponse:
return GogoGate2InfoResponse(
user="user1",
gogogatename="gogogatename0",
model="",
apiversion="",
remoteaccessenabled=False,
remoteaccess="abc123.blah.blah",
firmwareversion="",
apicode="",
door1=GogoGate2Door(
door_id=1,
permission=True,
name="Door1",
gate=False,
mode=DoorMode.GARAGE,
status=door_status,
sensor=True,
sensorid=None,
camera=False,
events=2,
temperature=None,
voltage=40,
),
door2=GogoGate2Door(
door_id=2,
permission=True,
name=None,
gate=True,
mode=DoorMode.GARAGE,
status=DoorStatus.UNDEFINED,
sensor=True,
sensorid=None,
camera=False,
events=0,
temperature=None,
voltage=40,
),
door3=GogoGate2Door(
door_id=3,
permission=True,
name=None,
gate=False,
mode=DoorMode.GARAGE,
status=DoorStatus.UNDEFINED,
sensor=True,
sensorid=None,
camera=False,
events=0,
temperature=None,
voltage=40,
),
outputs=Outputs(output1=True, output2=False, output3=True),
network=Network(ip=""),
wifi=Wifi(SSID="", linkquality="", signal=""),
)
api = MagicMock(GogoGate2Api)
api.activate.return_value = GogoGate2ActivateResponse(result=True)
api.info.return_value = info_response(DoorStatus.OPENED)
gogogate2api_mock.return_value = api
config_entry = MockConfigEntry(
domain=DOMAIN,
source=SOURCE_USER,
data={
CONF_IP_ADDRESS: "127.0.0.1",
CONF_USERNAME: "admin",
CONF_PASSWORD: "password",
},
)
config_entry.add_to_hass(hass)
assert hass.states.get("cover.door1") is None
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert hass.states.get("cover.door1").state == STATE_OPEN
api.info.return_value = info_response(DoorStatus.CLOSED)
await hass.services.async_call(
COVER_DOMAIN,
"close_cover",
service_data={"entity_id": "cover.door1"},
)
async_fire_time_changed(hass, utcnow() + timedelta(hours=2))
await hass.async_block_till_done()
assert hass.states.get("cover.door1").state == STATE_CLOSED
api.close_door.assert_called_with(1)
api.info.return_value = info_response(DoorStatus.OPENED)
await hass.services.async_call(
COVER_DOMAIN,
"open_cover",
service_data={"entity_id": "cover.door1"},
)
async_fire_time_changed(hass, utcnow() + timedelta(hours=2))
await hass.async_block_till_done()
assert hass.states.get("cover.door1").state == STATE_OPEN
api.open_door.assert_called_with(1)
api.info.return_value = info_response(DoorStatus.UNDEFINED)
async_fire_time_changed(hass, utcnow() + timedelta(hours=2))
await hass.async_block_till_done()
assert hass.states.get("cover.door1").state == STATE_UNKNOWN
assert await hass.config_entries.async_unload(config_entry.entry_id)
assert not hass.states.async_entity_ids(DOMAIN)
@patch("homeassistant.components.gogogate2.common.ISmartGateApi")
async def test_availability(ismartgateapi_mock, hass: HomeAssistant) -> None:
"""Test availability."""
closed_door_response = _mocked_ismartgate_closed_door_response()
api = MagicMock(ISmartGateApi)
api.info.return_value = closed_door_response
ismartgateapi_mock.return_value = api
config_entry = MockConfigEntry(
domain=DOMAIN,
source=SOURCE_USER,
data={
CONF_DEVICE: DEVICE_TYPE_ISMARTGATE,
CONF_IP_ADDRESS: "127.0.0.1",
CONF_USERNAME: "admin",
CONF_PASSWORD: "password",
},
)
config_entry.add_to_hass(hass)
assert hass.states.get("cover.door1") is None
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert hass.states.get("cover.door1")
assert (
hass.states.get("cover.door1").attributes[ATTR_DEVICE_CLASS]
== DEVICE_CLASS_GARAGE
)
assert (
hass.states.get("cover.door2").attributes[ATTR_DEVICE_CLASS]
== DEVICE_CLASS_GATE
)
api.info.side_effect = Exception("Error")
async_fire_time_changed(hass, utcnow() + timedelta(hours=2))
await hass.async_block_till_done()
assert hass.states.get("cover.door1").state == STATE_UNAVAILABLE
api.info.side_effect = None
api.info.return_value = closed_door_response
async_fire_time_changed(hass, utcnow() + timedelta(hours=2))
await hass.async_block_till_done()
assert hass.states.get("cover.door1").state == STATE_CLOSED
@patch("homeassistant.components.gogogate2.common.ISmartGateApi")
async def test_device_info_ismartgate(ismartgateapi_mock, hass: HomeAssistant) -> None:
"""Test device info."""
device_registry = mock_device_registry(hass)
closed_door_response = _mocked_ismartgate_closed_door_response()
api = MagicMock(ISmartGateApi)
api.info.return_value = closed_door_response
ismartgateapi_mock.return_value = api
config_entry = MockConfigEntry(
domain=DOMAIN,
source=SOURCE_USER,
title="mycontroller",
unique_id="xyz",
data={
CONF_DEVICE: DEVICE_TYPE_ISMARTGATE,
CONF_IP_ADDRESS: "127.0.0.1",
CONF_USERNAME: "admin",
CONF_PASSWORD: "password",
},
)
config_entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
device = device_registry.async_get_device({(DOMAIN, "xyz")}, set())
assert device
assert device.manufacturer == MANUFACTURER
assert device.name == "mycontroller"
assert device.model == "ismartgatePRO"
assert device.sw_version == "555"
@patch("homeassistant.components.gogogate2.common.GogoGate2Api")
async def test_device_info_gogogate2(gogogate2api_mock, hass: HomeAssistant) -> None:
"""Test device info."""
device_registry = mock_device_registry(hass)
closed_door_response = _mocked_gogogate_open_door_response()
api = MagicMock(GogoGate2Api)
api.info.return_value = closed_door_response
gogogate2api_mock.return_value = api
config_entry = MockConfigEntry(
domain=DOMAIN,
source=SOURCE_USER,
title="mycontroller",
unique_id="xyz",
data={
CONF_DEVICE: DEVICE_TYPE_GOGOGATE2,
CONF_IP_ADDRESS: "127.0.0.1",
CONF_USERNAME: "admin",
CONF_PASSWORD: "password",
},
)
config_entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
device = device_registry.async_get_device({(DOMAIN, "xyz")}, set())
assert device
assert device.manufacturer == MANUFACTURER
assert device.name == "mycontroller"
assert device.model == "gogogate2"
assert device.sw_version == "222"
|
from datetime import timedelta
from functools import partial
import logging
from i2csense.bme280 import BME280 # pylint: disable=import-error
import smbus # pylint: disable=import-error
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_MONITORED_CONDITIONS,
CONF_NAME,
PERCENTAGE,
TEMP_FAHRENHEIT,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
from homeassistant.util.temperature import celsius_to_fahrenheit
_LOGGER = logging.getLogger(__name__)
CONF_I2C_ADDRESS = "i2c_address"
CONF_I2C_BUS = "i2c_bus"
CONF_OVERSAMPLING_TEMP = "oversampling_temperature"
CONF_OVERSAMPLING_PRES = "oversampling_pressure"
CONF_OVERSAMPLING_HUM = "oversampling_humidity"
CONF_OPERATION_MODE = "operation_mode"
CONF_T_STANDBY = "time_standby"
CONF_FILTER_MODE = "filter_mode"
CONF_DELTA_TEMP = "delta_temperature"
DEFAULT_NAME = "BME280 Sensor"
DEFAULT_I2C_ADDRESS = "0x76"
DEFAULT_I2C_BUS = 1
DEFAULT_OVERSAMPLING_TEMP = 1 # Temperature oversampling x 1
DEFAULT_OVERSAMPLING_PRES = 1 # Pressure oversampling x 1
DEFAULT_OVERSAMPLING_HUM = 1 # Humidity oversampling x 1
DEFAULT_OPERATION_MODE = 3 # Normal mode (forced mode: 2)
DEFAULT_T_STANDBY = 5 # Tstandby 5ms
DEFAULT_FILTER_MODE = 0 # Filter off
DEFAULT_DELTA_TEMP = 0.0
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=3)
SENSOR_TEMP = "temperature"
SENSOR_HUMID = "humidity"
SENSOR_PRESS = "pressure"
SENSOR_TYPES = {
SENSOR_TEMP: ["Temperature", None],
SENSOR_HUMID: ["Humidity", PERCENTAGE],
SENSOR_PRESS: ["Pressure", "mb"],
}
DEFAULT_MONITORED = [SENSOR_TEMP, SENSOR_HUMID, SENSOR_PRESS]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_I2C_ADDRESS, default=DEFAULT_I2C_ADDRESS): cv.string,
vol.Optional(CONF_MONITORED_CONDITIONS, default=DEFAULT_MONITORED): vol.All(
cv.ensure_list, [vol.In(SENSOR_TYPES)]
),
vol.Optional(CONF_I2C_BUS, default=DEFAULT_I2C_BUS): vol.Coerce(int),
vol.Optional(
CONF_OVERSAMPLING_TEMP, default=DEFAULT_OVERSAMPLING_TEMP
): vol.Coerce(int),
vol.Optional(
CONF_OVERSAMPLING_PRES, default=DEFAULT_OVERSAMPLING_PRES
): vol.Coerce(int),
vol.Optional(
CONF_OVERSAMPLING_HUM, default=DEFAULT_OVERSAMPLING_HUM
): vol.Coerce(int),
vol.Optional(CONF_OPERATION_MODE, default=DEFAULT_OPERATION_MODE): vol.Coerce(
int
),
vol.Optional(CONF_T_STANDBY, default=DEFAULT_T_STANDBY): vol.Coerce(int),
vol.Optional(CONF_FILTER_MODE, default=DEFAULT_FILTER_MODE): vol.Coerce(int),
vol.Optional(CONF_DELTA_TEMP, default=DEFAULT_DELTA_TEMP): vol.Coerce(float),
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the BME280 sensor."""
SENSOR_TYPES[SENSOR_TEMP][1] = hass.config.units.temperature_unit
name = config[CONF_NAME]
i2c_address = config[CONF_I2C_ADDRESS]
bus = smbus.SMBus(config[CONF_I2C_BUS])
sensor = await hass.async_add_executor_job(
partial(
BME280,
bus,
i2c_address,
osrs_t=config[CONF_OVERSAMPLING_TEMP],
osrs_p=config[CONF_OVERSAMPLING_PRES],
osrs_h=config[CONF_OVERSAMPLING_HUM],
mode=config[CONF_OPERATION_MODE],
t_sb=config[CONF_T_STANDBY],
filter_mode=config[CONF_FILTER_MODE],
delta_temp=config[CONF_DELTA_TEMP],
logger=_LOGGER,
)
)
if not sensor.sample_ok:
_LOGGER.error("BME280 sensor not detected at %s", i2c_address)
return False
sensor_handler = await hass.async_add_executor_job(BME280Handler, sensor)
dev = []
try:
for variable in config[CONF_MONITORED_CONDITIONS]:
dev.append(
BME280Sensor(sensor_handler, variable, SENSOR_TYPES[variable][1], name)
)
except KeyError:
pass
async_add_entities(dev, True)
class BME280Handler:
"""BME280 sensor working in i2C bus."""
def __init__(self, sensor):
"""Initialize the sensor handler."""
self.sensor = sensor
self.update(True)
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self, first_reading=False):
"""Read sensor data."""
self.sensor.update(first_reading)
class BME280Sensor(Entity):
"""Implementation of the BME280 sensor."""
def __init__(self, bme280_client, sensor_type, temp_unit, name):
"""Initialize the sensor."""
self.client_name = name
self._name = SENSOR_TYPES[sensor_type][0]
self.bme280_client = bme280_client
self.temp_unit = temp_unit
self.type = sensor_type
self._state = None
self._unit_of_measurement = SENSOR_TYPES[sensor_type][1]
@property
def name(self):
"""Return the name of the sensor."""
return f"{self.client_name} {self._name}"
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of the sensor."""
return self._unit_of_measurement
async def async_update(self):
"""Get the latest data from the BME280 and update the states."""
await self.hass.async_add_executor_job(self.bme280_client.update)
if self.bme280_client.sensor.sample_ok:
if self.type == SENSOR_TEMP:
temperature = round(self.bme280_client.sensor.temperature, 1)
if self.temp_unit == TEMP_FAHRENHEIT:
temperature = round(celsius_to_fahrenheit(temperature), 1)
self._state = temperature
elif self.type == SENSOR_HUMID:
self._state = round(self.bme280_client.sensor.humidity, 1)
elif self.type == SENSOR_PRESS:
self._state = round(self.bme280_client.sensor.pressure, 1)
else:
_LOGGER.warning("Bad update of sensor.%s", self.name)
|
import numpy as np
import chainer
from chainer.backends import cuda
from chainercv.links.model.faster_rcnn.utils.loc2bbox import loc2bbox
from chainercv.utils.bbox.non_maximum_suppression import \
non_maximum_suppression
class ProposalCreator(object):
"""Proposal regions are generated by calling this object.
The :meth:`__call__` of this object outputs object detection proposals by
applying estimated bounding box offsets
to a set of anchors.
This class takes parameters to control number of bounding boxes to
pass to NMS and keep after NMS.
If the paramters are negative, it uses all the bounding boxes supplied
or keep all the bounding boxes returned by NMS.
This class is used for Region Proposal Networks introduced in
Faster R-CNN [#]_.
.. [#] Shaoqing Ren, Kaiming He, Ross Girshick, Jian Sun. \
Faster R-CNN: Towards Real-Time Object Detection with \
Region Proposal Networks. NIPS 2015.
Args:
nms_thresh (float): Threshold value used when calling NMS.
n_train_pre_nms (int): Number of top scored bounding boxes
to keep before passing to NMS in train mode.
n_train_post_nms (int): Number of top scored bounding boxes
to keep after passing to NMS in train mode.
n_test_pre_nms (int): Number of top scored bounding boxes
to keep before passing to NMS in test mode.
n_test_post_nms (int): Number of top scored bounding boxes
to keep after passing to NMS in test mode.
force_cpu_nms (bool): If this is :obj:`True`,
always use NMS in CPU mode. If :obj:`False`,
the NMS mode is selected based on the type of inputs.
min_size (int): A paramter to determine the threshold on
discarding bounding boxes based on their sizes.
"""
def __init__(self,
nms_thresh=0.7,
n_train_pre_nms=12000,
n_train_post_nms=2000,
n_test_pre_nms=6000,
n_test_post_nms=300,
force_cpu_nms=False,
min_size=16
):
self.nms_thresh = nms_thresh
self.n_train_pre_nms = n_train_pre_nms
self.n_train_post_nms = n_train_post_nms
self.n_test_pre_nms = n_test_pre_nms
self.n_test_post_nms = n_test_post_nms
self.force_cpu_nms = force_cpu_nms
self.min_size = min_size
def __call__(self, loc, score,
anchor, img_size, scale=1.):
"""Propose RoIs.
Inputs :obj:`loc, score, anchor` refer to the same anchor when indexed
by the same index.
On notations, :math:`R` is the total number of anchors. This is equal
to product of the height and the width of an image and the number of
anchor bases per pixel.
Type of the output is same as the inputs.
Args:
loc (array): Predicted offsets and scaling to anchors.
Its shape is :math:`(R, 4)`.
score (array): Predicted foreground probability for anchors.
Its shape is :math:`(R,)`.
anchor (array): Coordinates of anchors. Its shape is
:math:`(R, 4)`.
img_size (tuple of ints): A tuple :obj:`height, width`,
which contains image size after scaling.
scale (float): The scaling factor used to scale an image after
reading it from a file.
Returns:
array:
An array of coordinates of proposal boxes.
Its shape is :math:`(S, 4)`. :math:`S` is less than
:obj:`self.n_test_post_nms` in test time and less than
:obj:`self.n_train_post_nms` in train time. :math:`S` depends on
the size of the predicted bounding boxes and the number of
bounding boxes discarded by NMS.
"""
if chainer.config.train:
n_pre_nms = self.n_train_pre_nms
n_post_nms = self.n_train_post_nms
else:
n_pre_nms = self.n_test_pre_nms
n_post_nms = self.n_test_post_nms
xp = cuda.get_array_module(loc)
loc = cuda.to_cpu(loc)
score = cuda.to_cpu(score)
anchor = cuda.to_cpu(anchor)
# Convert anchors into proposal via bbox transformations.
roi = loc2bbox(anchor, loc)
# Clip predicted boxes to image.
roi[:, slice(0, 4, 2)] = np.clip(
roi[:, slice(0, 4, 2)], 0, img_size[0])
roi[:, slice(1, 4, 2)] = np.clip(
roi[:, slice(1, 4, 2)], 0, img_size[1])
# Remove predicted boxes with either height or width < threshold.
min_size = self.min_size * scale
hs = roi[:, 2] - roi[:, 0]
ws = roi[:, 3] - roi[:, 1]
keep = np.where((hs >= min_size) & (ws >= min_size))[0]
roi = roi[keep, :]
score = score[keep]
# Sort all (proposal, score) pairs by score from highest to lowest.
# Take top pre_nms_topN (e.g. 6000).
order = score.ravel().argsort()[::-1]
if n_pre_nms > 0:
order = order[:n_pre_nms]
roi = roi[order, :]
# Apply nms (e.g. threshold = 0.7).
# Take after_nms_topN (e.g. 300).
if xp != np and not self.force_cpu_nms:
keep = non_maximum_suppression(
cuda.to_gpu(roi),
thresh=self.nms_thresh)
keep = cuda.to_cpu(keep)
else:
keep = non_maximum_suppression(
roi,
thresh=self.nms_thresh)
if n_post_nms > 0:
keep = keep[:n_post_nms]
roi = roi[keep]
if xp != np:
roi = cuda.to_gpu(roi)
return roi
|
from flask_script import Manager
from app import app
manager = Manager(app)
@manager.command
def migrate():
from app.agents.models import Bot
try:
# create default bot
bot = Bot()
bot.name = "default"
bot.save()
print("Created default bot")
except:
print("Default agent exists.. skipping..")
# import some default intents
from app.intents.controllers import import_json
json_file = open("examples/default_intents.json", "r+")
stories = import_json(json_file)
print("Imported {} Stories".format(len(stories)))
try:
print("Training models..")
from app.nlu.tasks import train_models
train_models()
print("Training models finished..")
except Exception as e:
e = str(e)
if e == "NO_DATA":
e = "load Data first into mongodb. Reffer Readme."
print("Could not train models..skipping.. (reason: {})".format(e))
if __name__ == "__main__":
manager.run()
|
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_USERNAME,
CONF_VERIFY_SSL,
)
from homeassistant.core import callback
from .const import (
CONF_ARP_PING,
CONF_DETECTION_TIME,
CONF_FORCE_DHCP,
DEFAULT_API_PORT,
DEFAULT_DETECTION_TIME,
DEFAULT_NAME,
DOMAIN,
)
from .errors import CannotConnect, LoginError
from .hub import get_api
class MikrotikFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a Mikrotik config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
@staticmethod
@callback
def async_get_options_flow(config_entry):
"""Get the options flow for this handler."""
return MikrotikOptionsFlowHandler(config_entry)
async def async_step_user(self, user_input=None):
"""Handle a flow initialized by the user."""
errors = {}
if user_input is not None:
for entry in self.hass.config_entries.async_entries(DOMAIN):
if entry.data[CONF_HOST] == user_input[CONF_HOST]:
return self.async_abort(reason="already_configured")
if entry.data[CONF_NAME] == user_input[CONF_NAME]:
errors[CONF_NAME] = "name_exists"
break
try:
await self.hass.async_add_executor_job(get_api, self.hass, user_input)
except CannotConnect:
errors["base"] = "cannot_connect"
except LoginError:
errors[CONF_USERNAME] = "invalid_auth"
errors[CONF_PASSWORD] = "invalid_auth"
if not errors:
return self.async_create_entry(
title=user_input[CONF_NAME], data=user_input
)
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_NAME, default=DEFAULT_NAME): str,
vol.Required(CONF_HOST): str,
vol.Required(CONF_USERNAME): str,
vol.Required(CONF_PASSWORD): str,
vol.Optional(CONF_PORT, default=DEFAULT_API_PORT): int,
vol.Optional(CONF_VERIFY_SSL, default=False): bool,
}
),
errors=errors,
)
async def async_step_import(self, import_config):
"""Import Miktortik from config."""
import_config[CONF_DETECTION_TIME] = import_config[CONF_DETECTION_TIME].seconds
return await self.async_step_user(user_input=import_config)
class MikrotikOptionsFlowHandler(config_entries.OptionsFlow):
"""Handle Mikrotik options."""
def __init__(self, config_entry):
"""Initialize Mikrotik options flow."""
self.config_entry = config_entry
async def async_step_init(self, user_input=None):
"""Manage the Mikrotik options."""
return await self.async_step_device_tracker()
async def async_step_device_tracker(self, user_input=None):
"""Manage the device tracker options."""
if user_input is not None:
return self.async_create_entry(title="", data=user_input)
options = {
vol.Optional(
CONF_FORCE_DHCP,
default=self.config_entry.options.get(CONF_FORCE_DHCP, False),
): bool,
vol.Optional(
CONF_ARP_PING,
default=self.config_entry.options.get(CONF_ARP_PING, False),
): bool,
vol.Optional(
CONF_DETECTION_TIME,
default=self.config_entry.options.get(
CONF_DETECTION_TIME, DEFAULT_DETECTION_TIME
),
): int,
}
return self.async_show_form(
step_id="device_tracker", data_schema=vol.Schema(options)
)
|
from .. import conventions
from ..core.dataset import Dataset
from .common import AbstractDataStore, BackendEntrypoint
def guess_can_open_store(store_spec):
return isinstance(store_spec, AbstractDataStore)
def open_backend_dataset_store(
store,
*,
mask_and_scale=True,
decode_times=True,
concat_characters=True,
decode_coords=True,
drop_variables=None,
use_cftime=None,
decode_timedelta=None,
):
vars, attrs = store.load()
file_obj = store
encoding = store.get_encoding()
vars, attrs, coord_names = conventions.decode_cf_variables(
vars,
attrs,
mask_and_scale=mask_and_scale,
decode_times=decode_times,
concat_characters=concat_characters,
decode_coords=decode_coords,
drop_variables=drop_variables,
use_cftime=use_cftime,
decode_timedelta=decode_timedelta,
)
ds = Dataset(vars, attrs=attrs)
ds = ds.set_coords(coord_names.intersection(vars))
ds._file_obj = file_obj
ds.encoding = encoding
return ds
store_backend = BackendEntrypoint(
open_dataset=open_backend_dataset_store, guess_can_open=guess_can_open_store
)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from typing import Optional
import unittest
from absl import flags
from absl.testing import parameterized
import dataclasses
import jinja2
import mock
from perfkitbenchmarker import data
from perfkitbenchmarker import errors
from perfkitbenchmarker.linux_benchmarks import hpcc_benchmark
from tests import pkb_common_test_case
FLAGS = flags.FLAGS
def ReadDataFile(file_name: str) -> str:
with open(data.ResourcePath(file_name)) as fp:
return fp.read()
def ReadTestDataFile(file_name: str) -> str:
path = os.path.join(os.path.dirname(__file__), '..', 'data', file_name)
with open(path) as fp:
return fp.read()
def DefaultHpccDimensions(problem_size: int, num_rows: int,
num_columns: int) -> hpcc_benchmark.HpccDimensions:
return hpcc_benchmark.HpccDimensions(
problem_size=problem_size,
block_size=hpcc_benchmark.BLOCK_SIZE,
num_rows=num_rows,
num_columns=num_columns,
pfacts=2,
nbmins=4,
rfacts=1,
bcasts=1,
depths=1,
swap=2,
l1=0,
u=0,
equilibration=1)
class HPCCTestCase(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(HPCCTestCase, self).setUp()
FLAGS.hpcc_math_library = 'openblas'
path = os.path.join(os.path.dirname(__file__), '../data', 'hpcc-sample.txt')
with open(path) as fp:
self.contents = fp.read()
def assertContainsSubDict(self, super_dict, sub_dict):
"""Asserts that every item in sub_dict is in super_dict."""
for key, value in sub_dict.items():
self.assertEqual(super_dict[key], value)
def testParseHpccValues(self):
"""Tests parsing the HPCC values."""
benchmark_spec = mock.MagicMock()
samples = hpcc_benchmark.ParseOutput(self.contents)
hpcc_benchmark._AddCommonMetadata(samples, benchmark_spec, {})
self.assertEqual(46, len(samples))
# Verify metric values and units are parsed correctly.
actual = {metric: (value, units) for metric, value, units, _, _ in samples}
expected = {
'SingleDGEMM_Gflops': (25.6834, 'Gflop/s'),
'HPL_Tflops': (0.0331844, 'Tflop/s'),
'MPIRandomAccess_GUPs': (0.0134972, 'GUP/s'),
'StarSTREAM_Triad': (5.92076, 'GB/s'),
'SingleFFT_Gflops': (1.51539, 'Gflop/s'),
'SingleSTREAM_Copy': (11.5032, 'GB/s'),
'AvgPingPongBandwidth_GBytes': (9.01515, 'GB'),
'MPIRandomAccess_CheckTime': (50.9459, 'seconds'),
'MPIFFT_Gflops': (2.49383, 'Gflop/s'),
'MPIRandomAccess_LCG_CheckTime': (53.1656, 'seconds'),
'StarDGEMM_Gflops': (10.3432, 'Gflop/s'),
'SingleSTREAM_Triad': (12.2433, 'GB/s'),
'PTRANS_time': (9.24124, 'seconds'),
'AvgPingPongLatency_usec': (0.34659, 'usec'),
'RandomlyOrderedRingLatency_usec': (0.534474, 'usec'),
'StarFFT_Gflops': (1.0687, 'Gflop/s'),
'StarRandomAccess_GUPs': (0.00408809, 'GUP/s'),
'StarRandomAccess_LCG_GUPs': (0.00408211, 'GUP/s'),
'MPIRandomAccess_LCG_GUPs': (0.0132051, 'GUP/s'),
'MPIRandomAccess_time': (56.6096, 'seconds'),
'StarSTREAM_Scale': (5.16058, 'GB/s'),
'MaxPingPongBandwidth_GBytes': (9.61445, 'GB'),
'MPIRandomAccess_LCG_time': (60.0762, 'seconds'),
'MinPingPongLatency_usec': (0.304646, 'usec'),
'MPIFFT_time1': (0.603557, 'seconds'),
'MPIFFT_time0': (9.53674e-07, 'seconds'),
'MPIFFT_time3': (0.282359, 'seconds'),
'MPIFFT_time2': (1.89799, 'seconds'),
'MPIFFT_time5': (0.519769, 'seconds'),
'MPIFFT_time4': (3.73566, 'seconds'),
'MPIFFT_time6': (9.53674e-07, 'seconds'),
'SingleRandomAccess_GUPs': (0.0151415, 'GUP/s'),
'NaturallyOrderedRingBandwidth_GBytes': (1.93141, 'GB'),
'MaxPingPongLatency_usec': (0.384119, 'usec'),
'StarSTREAM_Add': (5.80539, 'GB/s'),
'SingleSTREAM_Add': (12.7265, 'GB/s'),
'SingleSTREAM_Scale': (11.6338, 'GB/s'),
'StarSTREAM_Copy': (5.22586, 'GB/s'),
'MPIRandomAccess_ExeUpdates': (764069508.0, 'updates'),
'HPL_time': (1243.1, 'seconds'),
'MPIRandomAccess_LCG_ExeUpdates': (793314388.0, 'updates'),
'NaturallyOrderedRingLatency_usec': (0.548363, 'usec'),
'PTRANS_GBs': (0.338561, 'GB/s'),
'RandomlyOrderedRingBandwidth_GBytes': (2.06416, 'GB'),
'SingleRandomAccess_LCG_GUPs': (0.0141455, 'GUP/s'),
'MinPingPongBandwidth_GBytes': (6.85064, 'GB'),
}
self.assertEqual(expected, actual)
def testParseHpccMetadata(self):
"""Tests parsing the HPCC metadata."""
benchmark_spec = mock.MagicMock()
samples = hpcc_benchmark.ParseOutput(self.contents)
hpcc_benchmark._AddCommonMetadata(samples, benchmark_spec, {})
self.assertEqual(46, len(samples))
results = {metric: metadata for metric, _, _, metadata, _ in samples}
for metadata in results.values():
self.assertEqual(metadata['hpcc_math_library'], 'openblas')
self.assertEqual(metadata['hpcc_version'], '1.5.0')
# Spot check a few benchmark-specific metrics.
self.assertContainsSubDict(
results['PTRANS_time'], {
'PTRANS_n': 19776.0,
'PTRANS_nb': 192.0,
'PTRANS_npcol': 2.0,
'PTRANS_nprow': 2.0,
'PTRANS_residual': 0.0
})
self.assertContainsSubDict(results['SingleRandomAccess_GUPs'],
{'RandomAccess_N': 268435456.0})
self.assertContainsSubDict(
results['MPIRandomAccess_LCG_GUPs'], {
'MPIRandomAccess_LCG_Algorithm': 0.0,
'MPIRandomAccess_LCG_Errors': 0.0,
'MPIRandomAccess_LCG_ErrorsFraction': 0.0,
'MPIRandomAccess_LCG_N': 1073741824.0,
'MPIRandomAccess_LCG_TimeBound': 60.0
})
self.assertContainsSubDict(results['StarRandomAccess_LCG_GUPs'],
{'RandomAccess_LCG_N': 268435456.0})
self.assertContainsSubDict(results['MPIFFT_time6'], {
'MPIFFT_N': 134217728.0,
'MPIFFT_Procs': 4.0,
'MPIFFT_maxErr': 2.31089e-15
})
self.assertContainsSubDict(results['StarFFT_Gflops'], {'FFT_N': 67108864.0})
self.assertContainsSubDict(results['SingleSTREAM_Copy'], {
'STREAM_Threads': 1.0,
'STREAM_VectorSize': 130363392.0
})
self.assertContainsSubDict(results['AvgPingPongLatency_usec'], {})
self.assertContainsSubDict(results['StarRandomAccess_GUPs'],
{'RandomAccess_N': 268435456.0})
def testCreateHpccConfig(self):
vm = mock.Mock(total_free_memory_kb=526536216)
vm.NumCpusForBenchmark.return_value = 128
spec = mock.Mock(vms=[None])
hpcc_benchmark.CreateHpccinf(vm, spec)
context = {
'problem_size': 231936,
'block_size': 192,
'num_rows': 8,
'num_columns': 16,
'pfacts': 2,
'nbmins': 4,
'rfacts': 1,
'bcasts': 1,
'depths': 1,
'swap': 2,
'l1': 0,
'u': 0,
'equilibration': 1,
}
vm.RenderTemplate.assert_called_with(
mock.ANY, remote_path='hpccinf.txt', context=context)
# Test that the template_path file name is correct
self.assertEqual('hpccinf.j2',
os.path.basename(vm.RenderTemplate.call_args[0][0]))
def testMpiRunErrorsOut(self):
vm = mock.Mock()
vm.NumCpusForBenchmark.return_value = 128
vm.RemoteCommand.return_value = 'HPL ERROR', ''
with self.assertRaises(errors.Benchmarks.RunError):
hpcc_benchmark.RunHpccSource([vm])
@parameterized.named_parameters(
('nomem_set_large', 2, 32, 74, None, 124416, 8, 8),
('mem_set', 2, 32, 74, 36000, 86784, 8, 8),
('nomem_set', 1, 48, 48, None, 70656, 6, 8))
def testCreateHpccDimensions(self, num_vms: int, num_vcpus: int,
memory_size_gb: int,
flag_memory_size_mb: Optional[int],
problem_size: int, num_rows: int,
num_columns: int) -> None:
if flag_memory_size_mb:
FLAGS.memory_size_mb = flag_memory_size_mb
expected = DefaultHpccDimensions(problem_size, num_rows, num_columns)
actual = hpcc_benchmark._CalculateHpccDimensions(
num_vms, num_vcpus, memory_size_gb * 1000 * 1024)
self.assertEqual(dataclasses.asdict(expected), dataclasses.asdict(actual))
def testRenderHpcConfig(self):
env = jinja2.Environment(undefined=jinja2.StrictUndefined)
template = env.from_string(ReadDataFile('hpccinf.j2'))
context = dataclasses.asdict(DefaultHpccDimensions(192, 10, 11))
text = template.render(**context).strip()
self.assertEqual(ReadTestDataFile('hpl.dat.txt').strip(), text)
if __name__ == '__main__':
unittest.main()
|
import re
from cryptography import x509
from cryptography.exceptions import UnsupportedAlgorithm, InvalidSignature
from cryptography.hazmat.backends import default_backend
from cryptography.x509 import NameOID
from flask import current_app
from marshmallow.exceptions import ValidationError
from lemur.auth.permissions import SensitiveDomainPermission
from lemur.common.utils import check_cert_signature, is_weekend
def common_name(value):
"""If the common name could be a domain name, apply domain validation rules."""
# Common name could be a domain name, or a human-readable name of the subject (often used in CA names or client
# certificates). As a simple heuristic, we assume that human-readable names always include a space.
# However, to avoid confusion for humans, we also don't count spaces at the beginning or end of the string.
if " " not in value.strip():
return sensitive_domain(value)
def sensitive_domain(domain):
"""
Checks if user has the admin role, the domain does not match sensitive domains and allowed domain patterns.
:param domain: domain name (str)
:return:
"""
if SensitiveDomainPermission().can():
# User has permission, no need to check anything
return
allowlist = current_app.config.get("LEMUR_ALLOWED_DOMAINS", [])
if allowlist and not any(re.match(pattern, domain) for pattern in allowlist):
raise ValidationError(
"Domain {0} does not match allowed domain patterns. "
"Contact an administrator to issue the certificate.".format(domain)
)
# Avoid circular import.
from lemur.domains import service as domain_service
if domain_service.is_domain_sensitive(domain):
raise ValidationError(
"Domain {0} has been marked as sensitive. "
"Contact an administrator to issue the certificate.".format(domain)
)
def encoding(oid_encoding):
"""
Determines if the specified oid type is valid.
:param oid_encoding:
:return:
"""
valid_types = ["b64asn1", "string", "ia5string"]
if oid_encoding.lower() not in [o_type.lower() for o_type in valid_types]:
raise ValidationError(
"Invalid Oid Encoding: {0} choose from {1}".format(
oid_encoding, ",".join(valid_types)
)
)
def sub_alt_type(alt_type):
"""
Determines if the specified subject alternate type is valid.
:param alt_type:
:return:
"""
valid_types = [
"DNSName",
"IPAddress",
"uniFormResourceIdentifier",
"directoryName",
"rfc822Name",
"registrationID",
"otherName",
"x400Address",
"EDIPartyName",
]
if alt_type.lower() not in [a_type.lower() for a_type in valid_types]:
raise ValidationError(
"Invalid SubAltName Type: {0} choose from {1}".format(
type, ",".join(valid_types)
)
)
def csr(data):
"""
Determines if the CSR is valid and allowed.
:param data:
:return:
"""
try:
request = x509.load_pem_x509_csr(data.encode("utf-8"), default_backend())
except Exception:
raise ValidationError("CSR presented is not valid.")
# Validate common name and SubjectAltNames
try:
for name in request.subject.get_attributes_for_oid(NameOID.COMMON_NAME):
common_name(name.value)
except ValueError as err:
current_app.logger.info("Error parsing Subject from CSR: %s", err)
raise ValidationError("Invalid Subject value in supplied CSR")
try:
alt_names = request.extensions.get_extension_for_class(
x509.SubjectAlternativeName
)
for name in alt_names.value.get_values_for_type(x509.DNSName):
sensitive_domain(name)
except x509.ExtensionNotFound:
pass
def dates(data):
if not data.get("validity_start") and data.get("validity_end"):
raise ValidationError("If validity start is specified so must validity end.")
if not data.get("validity_end") and data.get("validity_start"):
raise ValidationError("If validity end is specified so must validity start.")
if data.get("validity_start") and data.get("validity_end"):
if not current_app.config.get("LEMUR_ALLOW_WEEKEND_EXPIRATION", True):
if is_weekend(data.get("validity_end")):
raise ValidationError("Validity end must not land on a weekend.")
if not data["validity_start"] < data["validity_end"]:
raise ValidationError("Validity start must be before validity end.")
if data.get("authority"):
if (
data.get("validity_start").date()
< data["authority"].authority_certificate.not_before.date()
):
raise ValidationError(
"Validity start must not be before {0}".format(
data["authority"].authority_certificate.not_before
)
)
if (
data.get("validity_end").date()
> data["authority"].authority_certificate.not_after.date()
):
raise ValidationError(
"Validity end must not be after {0}".format(
data["authority"].authority_certificate.not_after
)
)
return data
def verify_private_key_match(key, cert, error_class=ValidationError):
"""
Checks that the supplied private key matches the certificate.
:param cert: Parsed certificate
:param key: Parsed private key
:param error_class: Exception class to raise on error
"""
if key.public_key().public_numbers() != cert.public_key().public_numbers():
raise error_class("Private key does not match certificate.")
def verify_cert_chain(certs, error_class=ValidationError):
"""
Verifies that the certificates in the chain are correct.
We don't bother with full cert validation but just check that certs in the chain are signed by the next, to avoid
basic human errors -- such as pasting the wrong certificate.
:param certs: List of parsed certificates, use parse_cert_chain()
:param error_class: Exception class to raise on error
"""
cert = certs[0]
for issuer in certs[1:]:
# Use the current cert's public key to verify the previous signature.
# "certificate validation is a complex problem that involves much more than just signature checks"
try:
check_cert_signature(cert, issuer.public_key())
except InvalidSignature:
# Avoid circular import.
from lemur.common import defaults
raise error_class(
"Incorrect chain certificate(s) provided: '%s' is not signed by '%s'"
% (
defaults.common_name(cert) or "Unknown",
defaults.common_name(issuer),
)
)
except UnsupportedAlgorithm as err:
current_app.logger.warning("Skipping chain validation: %s", err)
# Next loop will validate that *this issuer* cert is signed by the next chain cert.
cert = issuer
|
import sys
import dns
import dns.exception
import dns.name
import dns.query
import dns.resolver
import re
from lemur.extensions import sentry
from lemur.extensions import metrics
class DNSError(Exception):
"""Base class for DNS Exceptions."""
pass
class BadDomainError(DNSError):
"""Error for when a Bad Domain Name is given."""
def __init__(self, message):
self.message = message
class DNSResolveError(DNSError):
"""Error for DNS Resolution Errors."""
def __init__(self, message):
self.message = message
def is_valid_domain(domain):
"""Checks if a domain is syntactically valid and returns a bool"""
if domain[-1] == ".":
domain = domain[:-1]
if len(domain) > 253:
return False
fqdn_re = re.compile("(?=^.{1,63}$)(^(?:[a-z0-9_](?:-*[a-z0-9_])+)$|^[a-z0-9]$)", re.IGNORECASE)
return all(fqdn_re.match(d) for d in domain.split("."))
def get_authoritative_nameserver(domain):
"""Get the authoritative nameservers for the given domain"""
if not is_valid_domain(domain):
raise BadDomainError(f"{domain} is not a valid FQDN")
n = dns.name.from_text(domain)
depth = 2
default = dns.resolver.get_default_resolver()
nameserver = default.nameservers[0]
last = False
while not last:
s = n.split(depth)
last = s[0].to_unicode() == u"@"
sub = s[1]
query = dns.message.make_query(sub, dns.rdatatype.NS)
response = dns.query.udp(query, nameserver)
rcode = response.rcode()
if rcode != dns.rcode.NOERROR:
function = sys._getframe().f_code.co_name
metrics.send(f"{function}.error", "counter", 1)
if rcode == dns.rcode.NXDOMAIN:
raise DNSResolveError(f"{sub} does not exist.")
else:
raise DNSResolveError(f"Error: {dns.rcode.to_text(rcode)}")
if len(response.authority) > 0:
rrset = response.authority[0]
else:
rrset = response.answer[0]
rr = rrset[0]
if rr.rdtype != dns.rdatatype.SOA:
authority = rr.target
nameserver = default.query(authority).rrset[0].to_text()
depth += 1
return nameserver
def get_dns_records(domain, rdtype, nameserver):
"""Retrieves the DNS records matching the name and type and returns a list of records"""
records = []
try:
dns_resolver = dns.resolver.Resolver()
dns_resolver.nameservers = [nameserver]
dns_response = dns_resolver.query(domain, rdtype)
for rdata in dns_response:
for record in rdata.strings:
records.append(record.decode("utf-8"))
except dns.exception.DNSException:
sentry.captureException()
function = sys._getframe().f_code.co_name
metrics.send(f"{function}.fail", "counter", 1)
return records
|
import unittest
from absl import flags
from perfkitbenchmarker import errors
from perfkitbenchmarker.providers.aws import aws_disk
from tests import pkb_common_test_case
FLAGS = flags.FLAGS
_COMPONENT = 'test_component'
class AwsDiskSpecTestCase(pkb_common_test_case.PkbCommonTestCase):
def testDefaults(self):
spec = aws_disk.AwsDiskSpec(_COMPONENT)
self.assertIsNone(spec.device_path)
self.assertIsNone(spec.disk_number)
self.assertIsNone(spec.disk_size)
self.assertIsNone(spec.disk_type)
self.assertIsNone(spec.iops)
self.assertIsNone(spec.mount_point)
self.assertEqual(spec.num_striped_disks, 1)
def testProvidedValid(self):
spec = aws_disk.AwsDiskSpec(
_COMPONENT, device_path='test_device_path', disk_number=1,
disk_size=75, disk_type='test_disk_type', iops=1000,
mount_point='/mountpoint', num_striped_disks=2)
self.assertEqual(spec.device_path, 'test_device_path')
self.assertEqual(spec.disk_number, 1)
self.assertEqual(spec.disk_size, 75)
self.assertEqual(spec.disk_type, 'test_disk_type')
self.assertEqual(spec.iops, 1000)
self.assertEqual(spec.mount_point, '/mountpoint')
self.assertEqual(spec.num_striped_disks, 2)
def testProvidedNone(self):
spec = aws_disk.AwsDiskSpec(_COMPONENT, iops=None)
self.assertIsNone(spec.iops)
def testInvalidOptionTypes(self):
with self.assertRaises(errors.Config.InvalidValue):
aws_disk.AwsDiskSpec(_COMPONENT, iops='ten')
def testNonPresentFlagsDoNotOverrideConfigs(self):
FLAGS.aws_provisioned_iops = 2000
FLAGS.data_disk_size = 100
spec = aws_disk.AwsDiskSpec(_COMPONENT, FLAGS, disk_size=75, iops=1000)
self.assertEqual(spec.disk_size, 75)
self.assertEqual(spec.iops, 1000)
def testPresentFlagsOverrideConfigs(self):
FLAGS['aws_provisioned_iops'].parse(2000)
FLAGS['data_disk_size'].parse(100)
spec = aws_disk.AwsDiskSpec(_COMPONENT, FLAGS, disk_size=75, iops=1000)
self.assertEqual(spec.disk_size, 100)
self.assertEqual(spec.iops, 2000)
if __name__ == '__main__':
unittest.main()
|
from raspyrfm_client import RaspyRFMClient
from raspyrfm_client.device_implementations.controlunit.actions import Action
from raspyrfm_client.device_implementations.controlunit.controlunit_constants import (
ControlUnitModel,
)
from raspyrfm_client.device_implementations.gateway.manufacturer.gateway_constants import (
GatewayModel,
)
from raspyrfm_client.device_implementations.manufacturer_constants import Manufacturer
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchEntity
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PORT,
CONF_SWITCHES,
DEVICE_DEFAULT_NAME,
)
import homeassistant.helpers.config_validation as cv
CONF_GATEWAY_MANUFACTURER = "gateway_manufacturer"
CONF_GATEWAY_MODEL = "gateway_model"
CONF_CONTROLUNIT_MANUFACTURER = "controlunit_manufacturer"
CONF_CONTROLUNIT_MODEL = "controlunit_model"
CONF_CHANNEL_CONFIG = "channel_config"
DEFAULT_HOST = "127.0.0.1"
# define configuration parameters
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_PORT): cv.port,
vol.Optional(CONF_GATEWAY_MANUFACTURER): cv.string,
vol.Optional(CONF_GATEWAY_MODEL): cv.string,
vol.Required(CONF_SWITCHES): vol.Schema(
[
{
vol.Optional(CONF_NAME, default=DEVICE_DEFAULT_NAME): cv.string,
vol.Required(CONF_CONTROLUNIT_MANUFACTURER): cv.string,
vol.Required(CONF_CONTROLUNIT_MODEL): cv.string,
vol.Required(CONF_CHANNEL_CONFIG): {cv.string: cv.match_all},
}
]
),
},
extra=vol.ALLOW_EXTRA,
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the RaspyRFM switch."""
gateway_manufacturer = config.get(
CONF_GATEWAY_MANUFACTURER, Manufacturer.SEEGEL_SYSTEME.value
)
gateway_model = config.get(CONF_GATEWAY_MODEL, GatewayModel.RASPYRFM.value)
host = config[CONF_HOST]
port = config.get(CONF_PORT)
switches = config[CONF_SWITCHES]
raspyrfm_client = RaspyRFMClient()
gateway = raspyrfm_client.get_gateway(
Manufacturer(gateway_manufacturer), GatewayModel(gateway_model), host, port
)
switch_entities = []
for switch in switches:
name = switch[CONF_NAME]
controlunit_manufacturer = switch[CONF_CONTROLUNIT_MANUFACTURER]
controlunit_model = switch[CONF_CONTROLUNIT_MODEL]
channel_config = switch[CONF_CHANNEL_CONFIG]
controlunit = raspyrfm_client.get_controlunit(
Manufacturer(controlunit_manufacturer), ControlUnitModel(controlunit_model)
)
controlunit.set_channel_config(**channel_config)
switch = RaspyRFMSwitch(raspyrfm_client, name, gateway, controlunit)
switch_entities.append(switch)
add_entities(switch_entities)
class RaspyRFMSwitch(SwitchEntity):
"""Representation of a RaspyRFM switch."""
def __init__(self, raspyrfm_client, name: str, gateway, controlunit):
"""Initialize the switch."""
self._raspyrfm_client = raspyrfm_client
self._name = name
self._gateway = gateway
self._controlunit = controlunit
self._state = None
@property
def name(self):
"""Return the name of the device if any."""
return self._name
@property
def should_poll(self):
"""Return True if polling should be used."""
return False
@property
def assumed_state(self):
"""Return True when the current state can not be queried."""
return True
@property
def is_on(self):
"""Return true if switch is on."""
return self._state
def turn_on(self, **kwargs):
"""Turn the switch on."""
self._raspyrfm_client.send(self._gateway, self._controlunit, Action.ON)
self._state = True
self.schedule_update_ha_state()
def turn_off(self, **kwargs):
"""Turn the switch off."""
if Action.OFF in self._controlunit.get_supported_actions():
self._raspyrfm_client.send(self._gateway, self._controlunit, Action.OFF)
else:
self._raspyrfm_client.send(self._gateway, self._controlunit, Action.ON)
self._state = False
self.schedule_update_ha_state()
|
from __future__ import absolute_import
import unittest
from .common_imports import etree, BytesIO, HelperTestCase, fileInTestDir, make_doctest
class ETreeXMLSchemaTestCase(HelperTestCase):
def test_xmlschema(self):
tree_valid = self.parse('<a><b></b></a>')
tree_invalid = self.parse('<a><c></c></a>')
schema = self.parse('''
<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<xsd:element name="a" type="AType"/>
<xsd:complexType name="AType">
<xsd:sequence>
<xsd:element name="b" type="xsd:string" />
</xsd:sequence>
</xsd:complexType>
</xsd:schema>
''')
schema = etree.XMLSchema(schema)
self.assertTrue(schema.validate(tree_valid))
self.assertFalse(schema.validate(tree_invalid))
self.assertTrue(schema.validate(tree_valid)) # retry valid
self.assertFalse(schema.validate(tree_invalid)) # retry invalid
def test_xmlschema_error_log(self):
tree_valid = self.parse('<a><b></b></a>')
tree_invalid = self.parse('<a><c></c></a>')
schema = self.parse('''
<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<xsd:element name="a" type="AType"/>
<xsd:complexType name="AType">
<xsd:sequence>
<xsd:element name="b" type="xsd:string" />
</xsd:sequence>
</xsd:complexType>
</xsd:schema>
''')
schema = etree.XMLSchema(schema)
self.assertTrue(schema.validate(tree_valid))
self.assertFalse(schema.error_log.filter_from_errors())
self.assertFalse(schema.validate(tree_invalid))
self.assertTrue(schema.error_log.filter_from_errors())
self.assertTrue(schema.error_log.filter_types(
etree.ErrorTypes.SCHEMAV_ELEMENT_CONTENT))
self.assertTrue(schema.validate(tree_valid))
self.assertFalse(schema.error_log.filter_from_errors())
self.assertFalse(schema.validate(tree_invalid))
self.assertTrue(schema.error_log.filter_from_errors())
self.assertTrue(schema.error_log.filter_types(
etree.ErrorTypes.SCHEMAV_ELEMENT_CONTENT))
def test_xmlschema_error_log_path(self):
"""We don't have a guarantee that there will always be a path
for a _LogEntry object (or even a node for which to determine
a path), but at least when this test was created schema validation
errors always got a node and an XPath value. If that ever changes,
we can modify this test to something like::
self.assertTrue(error_path is None or tree_path == error_path)
That way, we can at least verify that if we did get a path value
it wasn't bogus.
"""
tree = self.parse('<a><b>42</b><b>dada</b></a>')
schema = self.parse('''
<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<xsd:element name="a" type="AType"/>
<xsd:complexType name="AType">
<xsd:sequence>
<xsd:element name="b" type="xsd:integer" maxOccurs="2"/>
</xsd:sequence>
</xsd:complexType>
</xsd:schema>
''')
schema = etree.XMLSchema(schema)
schema.validate(tree)
tree_path = tree.getpath(tree.findall('b')[1])
error_path = schema.error_log[0].path
self.assertTrue(tree_path == error_path)
def test_xmlschema_default_attributes(self):
schema = self.parse('''
<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<xsd:element name="a" type="AType"/>
<xsd:complexType name="AType">
<xsd:sequence minOccurs="4" maxOccurs="4">
<xsd:element name="b" type="BType" />
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="BType">
<xsd:attribute name="hardy" type="xsd:string" default="hey" />
</xsd:complexType>
</xsd:schema>
''')
schema = etree.XMLSchema(schema, attribute_defaults=True)
tree = self.parse('<a><b hardy="ho"/><b/><b hardy="ho"/><b/></a>')
root = tree.getroot()
self.assertEqual('ho', root[0].get('hardy'))
self.assertEqual(None, root[1].get('hardy'))
self.assertEqual('ho', root[2].get('hardy'))
self.assertEqual(None, root[3].get('hardy'))
self.assertTrue(schema(tree))
root = tree.getroot()
self.assertEqual('ho', root[0].get('hardy'))
self.assertEqual('hey', root[1].get('hardy'))
self.assertEqual('ho', root[2].get('hardy'))
self.assertEqual('hey', root[3].get('hardy'))
def test_xmlschema_parse(self):
schema = self.parse('''
<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<xsd:element name="a" type="AType"/>
<xsd:complexType name="AType">
<xsd:sequence>
<xsd:element name="b" type="xsd:string" />
</xsd:sequence>
</xsd:complexType>
</xsd:schema>
''')
schema = etree.XMLSchema(schema)
parser = etree.XMLParser(schema=schema)
tree_valid = self.parse('<a><b></b></a>', parser=parser)
self.assertEqual('a', tree_valid.getroot().tag)
self.assertRaises(etree.XMLSyntaxError,
self.parse, '<a><c></c></a>', parser=parser)
def test_xmlschema_parse_default_attributes(self):
# does not work as of libxml2 2.7.3
schema = self.parse('''
<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<xsd:element name="a" type="AType"/>
<xsd:complexType name="AType">
<xsd:sequence minOccurs="4" maxOccurs="4">
<xsd:element name="b" type="BType" />
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="BType">
<xsd:attribute name="hardy" type="xsd:string" default="hey" />
</xsd:complexType>
</xsd:schema>
''')
schema = etree.XMLSchema(schema)
parser = etree.XMLParser(schema=schema, attribute_defaults=True)
tree_valid = self.parse('<a><b hardy="ho"/><b/><b hardy="ho"/><b/></a>',
parser=parser)
root = tree_valid.getroot()
self.assertEqual('ho', root[0].get('hardy'))
self.assertEqual('hey', root[1].get('hardy'))
self.assertEqual('ho', root[2].get('hardy'))
self.assertEqual('hey', root[3].get('hardy'))
def test_xmlschema_parse_default_attributes_schema_config(self):
# does not work as of libxml2 2.7.3
schema = self.parse('''
<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<xsd:element name="a" type="AType"/>
<xsd:complexType name="AType">
<xsd:sequence minOccurs="4" maxOccurs="4">
<xsd:element name="b" type="BType" />
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="BType">
<xsd:attribute name="hardy" type="xsd:string" default="hey" />
</xsd:complexType>
</xsd:schema>
''')
schema = etree.XMLSchema(schema, attribute_defaults=True)
parser = etree.XMLParser(schema=schema)
tree_valid = self.parse('<a><b hardy="ho"/><b/><b hardy="ho"/><b/></a>',
parser=parser)
root = tree_valid.getroot()
self.assertEqual('ho', root[0].get('hardy'))
self.assertEqual('hey', root[1].get('hardy'))
self.assertEqual('ho', root[2].get('hardy'))
self.assertEqual('hey', root[3].get('hardy'))
def test_xmlschema_parse_fixed_attributes(self):
# does not work as of libxml2 2.7.3
schema = self.parse('''
<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<xsd:element name="a" type="AType"/>
<xsd:complexType name="AType">
<xsd:sequence minOccurs="3" maxOccurs="3">
<xsd:element name="b" type="BType" />
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="BType">
<xsd:attribute name="hardy" type="xsd:string" fixed="hey" />
</xsd:complexType>
</xsd:schema>
''')
schema = etree.XMLSchema(schema)
parser = etree.XMLParser(schema=schema, attribute_defaults=True)
tree_valid = self.parse('<a><b/><b hardy="hey"/><b/></a>',
parser=parser)
root = tree_valid.getroot()
self.assertEqual('hey', root[0].get('hardy'))
self.assertEqual('hey', root[1].get('hardy'))
self.assertEqual('hey', root[2].get('hardy'))
def test_xmlschema_stringio(self):
schema_file = BytesIO('''
<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<xsd:element name="a" type="AType"/>
<xsd:complexType name="AType">
<xsd:sequence>
<xsd:element name="b" type="xsd:string" />
</xsd:sequence>
</xsd:complexType>
</xsd:schema>
''')
schema = etree.XMLSchema(file=schema_file)
parser = etree.XMLParser(schema=schema)
tree_valid = self.parse('<a><b></b></a>', parser=parser)
self.assertEqual('a', tree_valid.getroot().tag)
self.assertRaises(etree.XMLSyntaxError,
self.parse, '<a><c></c></a>', parser=parser)
def test_xmlschema_iterparse(self):
schema = self.parse('''
<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<xsd:element name="a" type="AType"/>
<xsd:complexType name="AType">
<xsd:sequence>
<xsd:element name="b" type="xsd:string" />
</xsd:sequence>
</xsd:complexType>
</xsd:schema>
''')
schema = etree.XMLSchema(schema)
xml = BytesIO('<a><b></b></a>')
events = [ (event, el.tag)
for (event, el) in etree.iterparse(xml, schema=schema) ]
self.assertEqual([('end', 'b'), ('end', 'a')],
events)
def test_xmlschema_iterparse_incomplete(self):
schema = self.parse('''
<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<xsd:element name="a" type="AType"/>
<xsd:complexType name="AType">
<xsd:sequence>
<xsd:element name="b" type="xsd:string" />
</xsd:sequence>
</xsd:complexType>
</xsd:schema>
''')
schema = etree.XMLSchema(schema)
xml = BytesIO('<a><b></b></a>')
event, element = next(iter(etree.iterparse(xml, schema=schema)))
self.assertEqual('end', event)
self.assertEqual('b', element.tag)
def test_xmlschema_iterparse_fail(self):
schema = self.parse('''
<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<xsd:element name="a" type="AType"/>
<xsd:complexType name="AType">
<xsd:sequence>
<xsd:element name="b" type="xsd:string" />
</xsd:sequence>
</xsd:complexType>
</xsd:schema>
''')
schema = etree.XMLSchema(schema)
self.assertRaises(
etree.XMLSyntaxError,
list, etree.iterparse(BytesIO('<a><c></c></a>'), schema=schema))
def test_xmlschema_elementtree_error(self):
self.assertRaises(ValueError, etree.XMLSchema, etree.ElementTree())
def test_xmlschema_comment_error(self):
self.assertRaises(ValueError, etree.XMLSchema, etree.Comment('TEST'))
def test_xmlschema_illegal_validation_error(self):
schema = self.parse('''
<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<xsd:element name="a" type="xsd:string"/>
</xsd:schema>
''')
schema = etree.XMLSchema(schema)
root = etree.Element('a')
root.text = 'TEST'
self.assertTrue(schema(root))
self.assertRaises(ValueError, schema, etree.Comment('TEST'))
self.assertRaises(ValueError, schema, etree.PI('a', 'text'))
self.assertRaises(ValueError, schema, etree.Entity('text'))
def test_xmlschema_invalid_schema1(self):
schema = self.parse('''\
<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<element name="a" type="AType"/>
<xsd:complexType name="AType">
<xsd:sequence>
<xsd:element name="b" type="xsd:string" />
</xsd:sequence>
</xsd:complexType>
</xsd:schema>
''')
self.assertRaises(etree.XMLSchemaParseError,
etree.XMLSchema, schema)
def test_xmlschema_invalid_schema2(self):
schema = self.parse('<test/>')
self.assertRaises(etree.XMLSchemaParseError,
etree.XMLSchema, schema)
def test_xmlschema_file(self):
# this will only work if we access the file through path or
# file object..
f = open(fileInTestDir('test.xsd'), 'rb')
try:
schema = etree.XMLSchema(file=f)
finally:
f.close()
tree_valid = self.parse('<a><b></b></a>')
self.assertTrue(schema.validate(tree_valid))
def test_xmlschema_import_file(self):
# this will only work if we access the file through path or
# file object..
schema = etree.XMLSchema(file=fileInTestDir('test_import.xsd'))
tree_valid = self.parse(
'<a:x xmlns:a="http://codespeak.net/lxml/schema/ns1"><b></b></a:x>')
self.assertTrue(schema.validate(tree_valid))
def test_xmlschema_shortcut(self):
tree_valid = self.parse('<a><b></b></a>')
tree_invalid = self.parse('<a><c></c></a>')
schema = self.parse('''\
<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<xsd:element name="a" type="AType"/>
<xsd:complexType name="AType">
<xsd:sequence>
<xsd:element name="b" type="xsd:string" />
</xsd:sequence>
</xsd:complexType>
</xsd:schema>
''')
self.assertTrue(tree_valid.xmlschema(schema))
self.assertFalse(tree_invalid.xmlschema(schema))
def test_create_from_partial_doc(self):
# this used to crash because the schema part was not properly copied out
wsdl = self.parse('''\
<wsdl:definitions
xmlns:wsdl="http://schemas.xmlsoap.org/wsdl/"
xmlns:xs="http://www.w3.org/2001/XMLSchema">
<wsdl:types>
<xs:schema>
</xs:schema>
</wsdl:types>
</wsdl:definitions>
''')
schema_element = wsdl.find(
"{http://schemas.xmlsoap.org/wsdl/}types/"
"{http://www.w3.org/2001/XMLSchema}schema"
)
etree.XMLSchema(schema_element)
etree.XMLSchema(schema_element)
etree.XMLSchema(schema_element)
class ETreeXMLSchemaResolversTestCase(HelperTestCase):
resolver_schema_int = BytesIO("""\
<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema"
xmlns:etype="http://codespeak.net/lxml/test/external"
targetNamespace="http://codespeak.net/lxml/test/internal">
<xsd:import namespace="http://codespeak.net/lxml/test/external" schemaLocation="XXX.xsd" />
<xsd:element name="a" type="etype:AType"/>
</xsd:schema>""")
resolver_schema_int2 = BytesIO("""\
<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema"
xmlns:etype="http://codespeak.net/lxml/test/external"
targetNamespace="http://codespeak.net/lxml/test/internal">
<xsd:import namespace="http://codespeak.net/lxml/test/external" schemaLocation="YYY.xsd" />
<xsd:element name="a" type="etype:AType"/>
</xsd:schema>""")
resolver_schema_ext = """\
<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema"
targetNamespace="http://codespeak.net/lxml/test/external">
<xsd:complexType name="AType">
<xsd:sequence><xsd:element name="b" type="xsd:string" minOccurs="0" maxOccurs="unbounded" /></xsd:sequence>
</xsd:complexType>
</xsd:schema>"""
class simple_resolver(etree.Resolver):
def __init__(self, schema):
self.schema = schema
def resolve(self, url, id, context):
assert url == 'XXX.xsd'
return self.resolve_string(self.schema, context)
# tests:
def test_xmlschema_resolvers(self):
# test that resolvers work with schema.
parser = etree.XMLParser()
parser.resolvers.add(self.simple_resolver(self.resolver_schema_ext))
schema_doc = etree.parse(self.resolver_schema_int, parser = parser)
schema = etree.XMLSchema(schema_doc)
def test_xmlschema_resolvers_root(self):
# test that the default resolver will get called if there's no
# specific parser resolver.
root_resolver = self.simple_resolver(self.resolver_schema_ext)
etree.get_default_parser().resolvers.add(root_resolver)
schema_doc = etree.parse(self.resolver_schema_int)
schema = etree.XMLSchema(schema_doc)
etree.get_default_parser().resolvers.remove(root_resolver)
def test_xmlschema_resolvers_noroot(self):
# test that the default resolver will not get called when a
# more specific resolver is registered.
class res_root(etree.Resolver):
def resolve(self, url, id, context):
assert False
return None
root_resolver = res_root()
etree.get_default_parser().resolvers.add(root_resolver)
parser = etree.XMLParser()
parser.resolvers.add(self.simple_resolver(self.resolver_schema_ext))
schema_doc = etree.parse(self.resolver_schema_int, parser = parser)
schema = etree.XMLSchema(schema_doc)
etree.get_default_parser().resolvers.remove(root_resolver)
def test_xmlschema_nested_resolvers(self):
# test that resolvers work in a nested fashion.
resolver_schema = self.resolver_schema_ext
class res_nested(etree.Resolver):
def __init__(self, ext_schema):
self.ext_schema = ext_schema
def resolve(self, url, id, context):
assert url == 'YYY.xsd'
return self.resolve_string(self.ext_schema, context)
class res(etree.Resolver):
def __init__(self, ext_schema_1, ext_schema_2):
self.ext_schema_1 = ext_schema_1
self.ext_schema_2 = ext_schema_2
def resolve(self, url, id, context):
assert url == 'XXX.xsd'
new_parser = etree.XMLParser()
new_parser.resolvers.add(res_nested(self.ext_schema_2))
new_schema_doc = etree.parse(self.ext_schema_1, parser = new_parser)
new_schema = etree.XMLSchema(new_schema_doc)
return self.resolve_string(resolver_schema, context)
parser = etree.XMLParser()
parser.resolvers.add(res(self.resolver_schema_int2, self.resolver_schema_ext))
schema_doc = etree.parse(self.resolver_schema_int, parser = parser)
schema = etree.XMLSchema(schema_doc)
def test_suite():
suite = unittest.TestSuite()
suite.addTests([unittest.makeSuite(ETreeXMLSchemaTestCase)])
suite.addTests([unittest.makeSuite(ETreeXMLSchemaResolversTestCase)])
suite.addTests(
[make_doctest('../../../doc/validation.txt')])
return suite
if __name__ == '__main__':
print('to test use test.py %s' % __file__)
|
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_PRESENCE,
BinarySensorEntity,
)
from .const import DOMAIN
BINARY_SENSOR_PREFIX = "Appliance"
PRESENCE_PREFIX = "Presence"
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Smappee binary sensor."""
smappee_base = hass.data[DOMAIN][config_entry.entry_id]
entities = []
for service_location in smappee_base.smappee.service_locations.values():
for appliance_id, appliance in service_location.appliances.items():
if appliance.type != "Find me" and appliance.source_type == "NILM":
entities.append(
SmappeeAppliance(
smappee_base=smappee_base,
service_location=service_location,
appliance_id=appliance_id,
appliance_name=appliance.name,
appliance_type=appliance.type,
)
)
if not smappee_base.smappee.local_polling:
# presence value only available in cloud env
entities.append(SmappeePresence(smappee_base, service_location))
async_add_entities(entities, True)
class SmappeePresence(BinarySensorEntity):
"""Implementation of a Smappee presence binary sensor."""
def __init__(self, smappee_base, service_location):
"""Initialize the Smappee sensor."""
self._smappee_base = smappee_base
self._service_location = service_location
self._state = self._service_location.is_present
@property
def name(self):
"""Return the name of the binary sensor."""
return f"{self._service_location.service_location_name} - {PRESENCE_PREFIX}"
@property
def is_on(self):
"""Return if the binary sensor is turned on."""
return self._state
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return DEVICE_CLASS_PRESENCE
@property
def unique_id(
self,
):
"""Return the unique ID for this binary sensor."""
return (
f"{self._service_location.device_serial_number}-"
f"{self._service_location.service_location_id}-"
f"{DEVICE_CLASS_PRESENCE}"
)
@property
def device_info(self):
"""Return the device info for this binary sensor."""
return {
"identifiers": {(DOMAIN, self._service_location.device_serial_number)},
"name": self._service_location.service_location_name,
"manufacturer": "Smappee",
"model": self._service_location.device_model,
"sw_version": self._service_location.firmware_version,
}
async def async_update(self):
"""Get the latest data from Smappee and update the state."""
await self._smappee_base.async_update()
self._state = self._service_location.is_present
class SmappeeAppliance(BinarySensorEntity):
"""Implementation of a Smappee binary sensor."""
def __init__(
self,
smappee_base,
service_location,
appliance_id,
appliance_name,
appliance_type,
):
"""Initialize the Smappee sensor."""
self._smappee_base = smappee_base
self._service_location = service_location
self._appliance_id = appliance_id
self._appliance_name = appliance_name
self._appliance_type = appliance_type
self._state = False
@property
def name(self):
"""Return the name of the sensor."""
return (
f"{self._service_location.service_location_name} - "
f"{BINARY_SENSOR_PREFIX} - "
f"{self._appliance_name if self._appliance_name != '' else self._appliance_type}"
)
@property
def is_on(self):
"""Return if the binary sensor is turned on."""
return self._state
@property
def icon(self):
"""Icon to use in the frontend."""
icon_mapping = {
"Car Charger": "mdi:car",
"Coffeemaker": "mdi:coffee",
"Clothes Dryer": "mdi:tumble-dryer",
"Clothes Iron": "mdi:hanger",
"Dishwasher": "mdi:dishwasher",
"Lights": "mdi:lightbulb",
"Fan": "mdi:fan",
"Freezer": "mdi:fridge",
"Microwave": "mdi:microwave",
"Oven": "mdi:stove",
"Refrigerator": "mdi:fridge",
"Stove": "mdi:stove",
"Washing Machine": "mdi:washing-machine",
"Water Pump": "mdi:water-pump",
}
return icon_mapping.get(self._appliance_type)
@property
def unique_id(
self,
):
"""Return the unique ID for this binary sensor."""
return (
f"{self._service_location.device_serial_number}-"
f"{self._service_location.service_location_id}-"
f"appliance-{self._appliance_id}"
)
@property
def device_info(self):
"""Return the device info for this binary sensor."""
return {
"identifiers": {(DOMAIN, self._service_location.device_serial_number)},
"name": self._service_location.service_location_name,
"manufacturer": "Smappee",
"model": self._service_location.device_model,
"sw_version": self._service_location.firmware_version,
}
async def async_update(self):
"""Get the latest data from Smappee and update the state."""
await self._smappee_base.async_update()
appliance = self._service_location.appliances.get(self._appliance_id)
self._state = bool(appliance.state)
|
from homeassistant.components import notify
import homeassistant.components.persistent_notification as pn
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
async def test_async_send_message(hass: HomeAssistant):
"""Test sending a message to notify.persistent_notification service."""
await async_setup_component(hass, pn.DOMAIN, {"core": {}})
await async_setup_component(hass, notify.DOMAIN, {})
await hass.async_block_till_done()
message = {"message": "Hello", "title": "Test notification"}
await hass.services.async_call(
notify.DOMAIN, notify.SERVICE_PERSISTENT_NOTIFICATION, message
)
await hass.async_block_till_done()
entity_ids = hass.states.async_entity_ids(pn.DOMAIN)
assert len(entity_ids) == 1
state = hass.states.get(entity_ids[0])
assert state.attributes.get("message") == "Hello"
assert state.attributes.get("title") == "Test notification"
|
from flexx import flx
# generate self-signed certificate for this example
import os
CERTFILE = '/tmp/self-signed.crt'
KEYFILE = '/tmp/self-signed.key'
os.system('openssl req -x509 -nodes -days 1 -batch -newkey rsa:2048 '
'-keyout %s -out %s' % (KEYFILE, CERTFILE))
# use the self-signed certificate as if specified in normal config
flx.config.ssl_certfile = CERTFILE
flx.config.ssl_keyfile = KEYFILE
# Some very secret Model
class Example(flx.Widget):
def init(self):
flx.Button(text='Secret Button')
# run application
flx.serve(Example, 'Example')
flx.start()
|
import asyncio
import os
import shutil
from homeassistant.components.media_player.const import (
ATTR_MEDIA_CONTENT_ID,
DOMAIN as DOMAIN_MP,
SERVICE_PLAY_MEDIA,
)
import homeassistant.components.tts as tts
from homeassistant.config import async_process_ha_core_config
from homeassistant.setup import setup_component
from tests.async_mock import patch
from tests.common import assert_setup_component, get_test_home_assistant, mock_service
class TestTTSMaryTTSPlatform:
"""Test the speech component."""
def setup_method(self):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
asyncio.run_coroutine_threadsafe(
async_process_ha_core_config(
self.hass, {"internal_url": "http://example.local:8123"}
),
self.hass.loop,
)
self.host = "localhost"
self.port = 59125
self.params = {
"INPUT_TEXT": "HomeAssistant",
"INPUT_TYPE": "TEXT",
"OUTPUT_TYPE": "AUDIO",
"LOCALE": "en_US",
"AUDIO": "WAVE_FILE",
"VOICE": "cmu-slt-hsmm",
}
def teardown_method(self):
"""Stop everything that was started."""
default_tts = self.hass.config.path(tts.DEFAULT_CACHE_DIR)
if os.path.isdir(default_tts):
shutil.rmtree(default_tts)
self.hass.stop()
def test_setup_component(self):
"""Test setup component."""
config = {tts.DOMAIN: {"platform": "marytts"}}
with assert_setup_component(1, tts.DOMAIN):
setup_component(self.hass, tts.DOMAIN, config)
def test_service_say(self):
"""Test service call say."""
calls = mock_service(self.hass, DOMAIN_MP, SERVICE_PLAY_MEDIA)
config = {tts.DOMAIN: {"platform": "marytts"}}
with assert_setup_component(1, tts.DOMAIN):
setup_component(self.hass, tts.DOMAIN, config)
with patch(
"homeassistant.components.marytts.tts.MaryTTS.speak",
return_value=b"audio",
) as mock_speak:
self.hass.services.call(
tts.DOMAIN,
"marytts_say",
{
"entity_id": "media_player.something",
tts.ATTR_MESSAGE: "HomeAssistant",
},
)
self.hass.block_till_done()
mock_speak.assert_called_once()
mock_speak.assert_called_with("HomeAssistant", {})
assert len(calls) == 1
assert calls[0].data[ATTR_MEDIA_CONTENT_ID].find(".wav") != -1
def test_service_say_with_effect(self):
"""Test service call say with effects."""
calls = mock_service(self.hass, DOMAIN_MP, SERVICE_PLAY_MEDIA)
config = {
tts.DOMAIN: {"platform": "marytts", "effect": {"Volume": "amount:2.0;"}}
}
with assert_setup_component(1, tts.DOMAIN):
setup_component(self.hass, tts.DOMAIN, config)
with patch(
"homeassistant.components.marytts.tts.MaryTTS.speak",
return_value=b"audio",
) as mock_speak:
self.hass.services.call(
tts.DOMAIN,
"marytts_say",
{
"entity_id": "media_player.something",
tts.ATTR_MESSAGE: "HomeAssistant",
},
)
self.hass.block_till_done()
mock_speak.assert_called_once()
mock_speak.assert_called_with("HomeAssistant", {"Volume": "amount:2.0;"})
assert len(calls) == 1
assert calls[0].data[ATTR_MEDIA_CONTENT_ID].find(".wav") != -1
def test_service_say_http_error(self):
"""Test service call say."""
calls = mock_service(self.hass, DOMAIN_MP, SERVICE_PLAY_MEDIA)
config = {tts.DOMAIN: {"platform": "marytts"}}
with assert_setup_component(1, tts.DOMAIN):
setup_component(self.hass, tts.DOMAIN, config)
with patch(
"homeassistant.components.marytts.tts.MaryTTS.speak",
side_effect=Exception(),
) as mock_speak:
self.hass.services.call(
tts.DOMAIN,
"marytts_say",
{
"entity_id": "media_player.something",
tts.ATTR_MESSAGE: "HomeAssistant",
},
)
self.hass.block_till_done()
mock_speak.assert_called_once()
assert len(calls) == 0
|
from unittest import mock
import pytest
from paasta_tools import spark_tools
def test_get_webui_url():
with mock.patch("socket.getfqdn", return_value="1.2.3.4"):
assert spark_tools.get_webui_url("1234") == "http://1.2.3.4:1234"
@pytest.mark.parametrize(
"cmd,expected",
[
("spark-shell", "spark-shell --conf spark.max.cores=100"),
(
"/venv/bin/pyspark test.py",
"/venv/bin/pyspark --conf spark.max.cores=100 test.py",
),
(
"spark-submit script.py --other args",
"spark-submit --conf spark.max.cores=100 script.py --other args",
),
("history-server", "history-server"),
],
)
def test_inject_spark_conf_str(cmd, expected):
assert (
spark_tools.inject_spark_conf_str(cmd, "--conf spark.max.cores=100") == expected
)
|
import cherrypy
from cherrypy import tools, url
import os
local_dir = os.path.join(os.getcwd(), os.path.dirname(__file__))
@cherrypy.config(**{'tools.log_tracebacks.on': True})
class Root:
"""Declaration of the CherryPy app URI structure."""
@cherrypy.expose
def index(self):
"""Render HTML-template at the root path of the web-app."""
return """<html>
<body>Try some <a href='%s?a=7'>other</a> path,
or a <a href='%s?n=14'>default</a> path.<br />
Or, just look at the pretty picture:<br />
<img src='%s' />
</body></html>""" % (url('other'), url('else'),
url('files/made_with_cherrypy_small.png'))
@cherrypy.expose
def default(self, *args, **kwargs):
"""Render catch-all args and kwargs."""
return 'args: %s kwargs: %s' % (args, kwargs)
@cherrypy.expose
def other(self, a=2, b='bananas', c=None):
"""Render number of fruits based on third argument."""
cherrypy.response.headers['Content-Type'] = 'text/plain'
if c is None:
return 'Have %d %s.' % (int(a), b)
else:
return 'Have %d %s, %s.' % (int(a), b, c)
files = tools.staticdir.handler(
section='/files',
dir=os.path.join(local_dir, 'static'),
# Ignore .php files, etc.
match=r'\.(css|gif|html?|ico|jpe?g|js|png|swf|xml)$',
)
root = Root()
# Uncomment the following to use your own favicon instead of CP's default.
# favicon_path = os.path.join(local_dir, "favicon.ico")
# root.favicon_ico = tools.staticfile.handler(filename=favicon_path)
|
import os
import inspect
try:
from collections.abc import MutableMapping
except ImportError:
from collections import MutableMapping
NO_DEFAULT = object()
# must not inherit from AttributeError, so not to mess with python's attribute-lookup flow
class EnvironmentVariableError(KeyError):
pass
class TypedEnv(MutableMapping):
"""
This object can be used in 'exploratory' mode:
nv = TypedEnv()
print(nv.HOME)
It can also be used as a parser and validator of environment variables:
class MyEnv(TypedEnv):
username = TypedEnv.Str("USER", default='anonymous')
path = TypedEnv.CSV("PATH", separator=":")
tmp = TypedEnv.Str("TMP TEMP".split()) # support 'fallback' var-names
nv = MyEnv()
print(nv.username)
for p in nv.path:
print(p)
try:
print(p.tmp)
except EnvironmentVariableError:
print("TMP/TEMP is not defined")
else:
assert False
"""
__slots__ = ["_env", "_defined_keys"]
class _BaseVar(object):
def __init__(self, name, default=NO_DEFAULT):
self.names = tuple(name) if isinstance(name, (tuple, list)) else (name,)
self.name = self.names[0]
self.default = default
def convert(self, value):
return value
def __get__(self, instance, owner):
if not instance:
return self
try:
return self.convert(instance._raw_get(*self.names))
except EnvironmentVariableError:
if self.default is NO_DEFAULT:
raise
return self.default
def __set__(self, instance, value):
instance[self.name] = value
class Str(_BaseVar):
pass
class Bool(_BaseVar):
"""
Converts 'yes|true|1|no|false|0' to the appropriate boolean value.
Case-insensitive. Throws a ``ValueError`` for any other value.
"""
def convert(self, s):
s = s.lower()
if s not in ("yes", "no", "true", "false", "1", "0"):
raise ValueError("Unrecognized boolean value: %r" % (s,))
return s in ("yes", "true", "1")
def __set__(self, instance, value):
instance[self.name] = "yes" if value else "no"
class Int(_BaseVar):
convert = staticmethod(int)
class Float(_BaseVar):
convert = staticmethod(float)
class CSV(_BaseVar):
"""
Comma-separated-strings get split using the ``separator`` (',' by default) into
a list of objects of type ``type`` (``str`` by default).
"""
def __init__(self, name, default=NO_DEFAULT, type=str, separator=","):
super(TypedEnv.CSV, self).__init__(name, default=default)
self.type = type
self.separator = separator
def __set__(self, instance, value):
instance[self.name] = self.separator.join(map(str, value))
def convert(self, value):
return [self.type(v.strip()) for v in value.split(self.separator)]
# =========
def __init__(self, env=os.environ):
self._env = env
self._defined_keys = set(k for (k, v) in inspect.getmembers(self.__class__) if isinstance(v, self._BaseVar))
def __iter__(self):
return iter(dir(self))
def __len__(self):
return len(self._env)
def __delitem__(self, name):
del self._env[name]
def __setitem__(self, name, value):
self._env[name] = str(value)
def _raw_get(self, *key_names):
for key in key_names:
value = self._env.get(key, NO_DEFAULT)
if value is not NO_DEFAULT:
return value
else:
raise EnvironmentVariableError(key_names[0])
def __contains__(self, key):
try:
self._raw_get(key)
except EnvironmentVariableError:
return False
else:
return True
def __getattr__(self, name):
# if we're here then there was no descriptor defined
try:
return self._raw_get(name)
except EnvironmentVariableError:
raise AttributeError("%s has no attribute %r" % (self.__class__, name))
def __getitem__(self, key):
return getattr(self, key) # delegate through the descriptors
def get(self, key, default=None):
try:
return self[key]
except EnvironmentVariableError:
return default
def __dir__(self):
if self._defined_keys:
# return only defined
return sorted(self._defined_keys)
# return whatever is in the environemnt (for convenience)
members = set(self._env.keys())
members.update(dir(self.__class__))
return sorted(members)
|
from typing import Any, Callable, Dict
from homeassistant.components.dynalite.bridge import DynaliteBridge
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from .const import DOMAIN, LOGGER
def async_setup_entry_base(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: Callable,
platform: str,
entity_from_device: Callable,
) -> None:
"""Record the async_add_entities function to add them later when received from Dynalite."""
LOGGER.debug("Setting up %s entry = %s", platform, config_entry.data)
bridge = hass.data[DOMAIN][config_entry.entry_id]
@callback
def async_add_entities_platform(devices):
# assumes it is called with a single platform
added_entities = []
for device in devices:
added_entities.append(entity_from_device(device, bridge))
if added_entities:
async_add_entities(added_entities)
bridge.register_add_devices(platform, async_add_entities_platform)
class DynaliteBase(Entity):
"""Base class for the Dynalite entities."""
def __init__(self, device: Any, bridge: DynaliteBridge) -> None:
"""Initialize the base class."""
self._device = device
self._bridge = bridge
self._unsub_dispatchers = []
@property
def name(self) -> str:
"""Return the name of the entity."""
return self._device.name
@property
def unique_id(self) -> str:
"""Return the unique ID of the entity."""
return self._device.unique_id
@property
def available(self) -> bool:
"""Return if entity is available."""
return self._device.available
@property
def device_info(self) -> Dict[str, Any]:
"""Device info for this entity."""
return {
"identifiers": {(DOMAIN, self._device.unique_id)},
"name": self.name,
"manufacturer": "Dynalite",
}
async def async_added_to_hass(self) -> None:
"""Added to hass so need to register to dispatch."""
# register for device specific update
self._unsub_dispatchers.append(
async_dispatcher_connect(
self.hass,
self._bridge.update_signal(self._device),
self.async_schedule_update_ha_state,
)
)
# register for wide update
self._unsub_dispatchers.append(
async_dispatcher_connect(
self.hass,
self._bridge.update_signal(),
self.async_schedule_update_ha_state,
)
)
async def async_will_remove_from_hass(self) -> None:
"""Unregister signal dispatch listeners when being removed."""
for unsub in self._unsub_dispatchers:
unsub()
self._unsub_dispatchers = []
|
import re
from unittest import mock
import pytest
import zigpy.profiles.zha
import zigpy.quirks
import zigpy.types
import zigpy.zcl.clusters.closures
import zigpy.zcl.clusters.general
import zigpy.zcl.clusters.security
import zigpy.zcl.foundation as zcl_f
import homeassistant.components.zha.binary_sensor
import homeassistant.components.zha.core.channels as zha_channels
import homeassistant.components.zha.core.channels.base as base_channels
import homeassistant.components.zha.core.const as zha_const
import homeassistant.components.zha.core.discovery as disc
import homeassistant.components.zha.core.registries as zha_regs
import homeassistant.components.zha.cover
import homeassistant.components.zha.device_tracker
import homeassistant.components.zha.fan
import homeassistant.components.zha.light
import homeassistant.components.zha.lock
import homeassistant.components.zha.sensor
import homeassistant.components.zha.switch
import homeassistant.helpers.entity_registry
from .common import get_zha_gateway
from .zha_devices_list import DEVICES
from tests.async_mock import AsyncMock, patch
NO_TAIL_ID = re.compile("_\\d$")
@pytest.fixture
def channels_mock(zha_device_mock):
"""Channels mock factory."""
def _mock(
endpoints,
ieee="00:11:22:33:44:55:66:77",
manufacturer="mock manufacturer",
model="mock model",
node_desc=b"\x02@\x807\x10\x7fd\x00\x00*d\x00\x00",
patch_cluster=False,
):
zha_dev = zha_device_mock(
endpoints, ieee, manufacturer, model, node_desc, patch_cluster=patch_cluster
)
channels = zha_channels.Channels.new(zha_dev)
return channels
return _mock
@patch(
"zigpy.zcl.clusters.general.Identify.request",
new=AsyncMock(return_value=[mock.sentinel.data, zcl_f.Status.SUCCESS]),
)
@pytest.mark.parametrize("device", DEVICES)
async def test_devices(
device,
hass_disable_services,
zigpy_device_mock,
zha_device_joined_restored,
):
"""Test device discovery."""
entity_registry = await homeassistant.helpers.entity_registry.async_get_registry(
hass_disable_services
)
zigpy_device = zigpy_device_mock(
device["endpoints"],
"00:11:22:33:44:55:66:77",
device["manufacturer"],
device["model"],
node_descriptor=device["node_descriptor"],
patch_cluster=False,
)
cluster_identify = _get_first_identify_cluster(zigpy_device)
if cluster_identify:
cluster_identify.request.reset_mock()
orig_new_entity = zha_channels.ChannelPool.async_new_entity
_dispatch = mock.MagicMock(wraps=orig_new_entity)
try:
zha_channels.ChannelPool.async_new_entity = lambda *a, **kw: _dispatch(*a, **kw)
zha_dev = await zha_device_joined_restored(zigpy_device)
await hass_disable_services.async_block_till_done()
finally:
zha_channels.ChannelPool.async_new_entity = orig_new_entity
entity_ids = hass_disable_services.states.async_entity_ids()
await hass_disable_services.async_block_till_done()
zha_entity_ids = {
ent for ent in entity_ids if ent.split(".")[0] in zha_const.COMPONENTS
}
if cluster_identify:
called = int(zha_device_joined_restored.name == "zha_device_joined")
assert cluster_identify.request.call_count == called
assert cluster_identify.request.await_count == called
if called:
assert cluster_identify.request.call_args == mock.call(
False,
64,
(zigpy.types.uint8_t, zigpy.types.uint8_t),
2,
0,
expect_reply=True,
manufacturer=None,
tsn=None,
)
event_channels = {
ch.id for pool in zha_dev.channels.pools for ch in pool.client_channels.values()
}
entity_map = device["entity_map"]
assert zha_entity_ids == {
e["entity_id"] for e in entity_map.values() if not e.get("default_match", False)
}
assert event_channels == set(device["event_channels"])
for call in _dispatch.call_args_list:
_, component, entity_cls, unique_id, channels = call[0]
key = (component, unique_id)
entity_id = entity_registry.async_get_entity_id(component, "zha", unique_id)
assert key in entity_map
assert entity_id is not None
no_tail_id = NO_TAIL_ID.sub("", entity_map[key]["entity_id"])
assert entity_id.startswith(no_tail_id)
assert {ch.name for ch in channels} == set(entity_map[key]["channels"])
assert entity_cls.__name__ == entity_map[key]["entity_class"]
def _get_first_identify_cluster(zigpy_device):
for endpoint in list(zigpy_device.endpoints.values())[1:]:
if hasattr(endpoint, "identify"):
return endpoint.identify
@mock.patch(
"homeassistant.components.zha.core.discovery.ProbeEndpoint.discover_by_device_type"
)
@mock.patch(
"homeassistant.components.zha.core.discovery.ProbeEndpoint.discover_by_cluster_id"
)
def test_discover_entities(m1, m2):
"""Test discover endpoint class method."""
ep_channels = mock.MagicMock()
disc.PROBE.discover_entities(ep_channels)
assert m1.call_count == 1
assert m1.call_args[0][0] is ep_channels
assert m2.call_count == 1
assert m2.call_args[0][0] is ep_channels
@pytest.mark.parametrize(
"device_type, component, hit",
[
(zigpy.profiles.zha.DeviceType.ON_OFF_LIGHT, zha_const.LIGHT, True),
(zigpy.profiles.zha.DeviceType.ON_OFF_BALLAST, zha_const.SWITCH, True),
(zigpy.profiles.zha.DeviceType.SMART_PLUG, zha_const.SWITCH, True),
(0xFFFF, None, False),
],
)
def test_discover_by_device_type(device_type, component, hit):
"""Test entity discovery by device type."""
ep_channels = mock.MagicMock(spec_set=zha_channels.ChannelPool)
ep_mock = mock.PropertyMock()
ep_mock.return_value.profile_id = 0x0104
ep_mock.return_value.device_type = device_type
type(ep_channels).endpoint = ep_mock
get_entity_mock = mock.MagicMock(
return_value=(mock.sentinel.entity_cls, mock.sentinel.claimed)
)
with mock.patch(
"homeassistant.components.zha.core.registries.ZHA_ENTITIES.get_entity",
get_entity_mock,
):
disc.PROBE.discover_by_device_type(ep_channels)
if hit:
assert get_entity_mock.call_count == 1
assert ep_channels.claim_channels.call_count == 1
assert ep_channels.claim_channels.call_args[0][0] is mock.sentinel.claimed
assert ep_channels.async_new_entity.call_count == 1
assert ep_channels.async_new_entity.call_args[0][0] == component
assert ep_channels.async_new_entity.call_args[0][1] == mock.sentinel.entity_cls
def test_discover_by_device_type_override():
"""Test entity discovery by device type overriding."""
ep_channels = mock.MagicMock(spec_set=zha_channels.ChannelPool)
ep_mock = mock.PropertyMock()
ep_mock.return_value.profile_id = 0x0104
ep_mock.return_value.device_type = 0x0100
type(ep_channels).endpoint = ep_mock
overrides = {ep_channels.unique_id: {"type": zha_const.SWITCH}}
get_entity_mock = mock.MagicMock(
return_value=(mock.sentinel.entity_cls, mock.sentinel.claimed)
)
with mock.patch(
"homeassistant.components.zha.core.registries.ZHA_ENTITIES.get_entity",
get_entity_mock,
):
with mock.patch.dict(disc.PROBE._device_configs, overrides, clear=True):
disc.PROBE.discover_by_device_type(ep_channels)
assert get_entity_mock.call_count == 1
assert ep_channels.claim_channels.call_count == 1
assert ep_channels.claim_channels.call_args[0][0] is mock.sentinel.claimed
assert ep_channels.async_new_entity.call_count == 1
assert ep_channels.async_new_entity.call_args[0][0] == zha_const.SWITCH
assert (
ep_channels.async_new_entity.call_args[0][1] == mock.sentinel.entity_cls
)
def test_discover_probe_single_cluster():
"""Test entity discovery by single cluster."""
ep_channels = mock.MagicMock(spec_set=zha_channels.ChannelPool)
ep_mock = mock.PropertyMock()
ep_mock.return_value.profile_id = 0x0104
ep_mock.return_value.device_type = 0x0100
type(ep_channels).endpoint = ep_mock
get_entity_mock = mock.MagicMock(
return_value=(mock.sentinel.entity_cls, mock.sentinel.claimed)
)
channel_mock = mock.MagicMock(spec_set=base_channels.ZigbeeChannel)
with mock.patch(
"homeassistant.components.zha.core.registries.ZHA_ENTITIES.get_entity",
get_entity_mock,
):
disc.PROBE.probe_single_cluster(zha_const.SWITCH, channel_mock, ep_channels)
assert get_entity_mock.call_count == 1
assert ep_channels.claim_channels.call_count == 1
assert ep_channels.claim_channels.call_args[0][0] is mock.sentinel.claimed
assert ep_channels.async_new_entity.call_count == 1
assert ep_channels.async_new_entity.call_args[0][0] == zha_const.SWITCH
assert ep_channels.async_new_entity.call_args[0][1] == mock.sentinel.entity_cls
assert ep_channels.async_new_entity.call_args[0][3] == mock.sentinel.claimed
@pytest.mark.parametrize("device_info", DEVICES)
async def test_discover_endpoint(device_info, channels_mock, hass):
"""Test device discovery."""
with mock.patch(
"homeassistant.components.zha.core.channels.Channels.async_new_entity"
) as new_ent:
channels = channels_mock(
device_info["endpoints"],
manufacturer=device_info["manufacturer"],
model=device_info["model"],
node_desc=device_info["node_descriptor"],
patch_cluster=False,
)
assert device_info["event_channels"] == sorted(
[ch.id for pool in channels.pools for ch in pool.client_channels.values()]
)
assert new_ent.call_count == len(
[
device_info
for device_info in device_info["entity_map"].values()
if not device_info.get("default_match", False)
]
)
for call_args in new_ent.call_args_list:
comp, ent_cls, unique_id, channels = call_args[0]
map_id = (comp, unique_id)
assert map_id in device_info["entity_map"]
entity_info = device_info["entity_map"][map_id]
assert {ch.name for ch in channels} == set(entity_info["channels"])
assert ent_cls.__name__ == entity_info["entity_class"]
def _ch_mock(cluster):
"""Return mock of a channel with a cluster."""
channel = mock.MagicMock()
type(channel).cluster = mock.PropertyMock(return_value=cluster(mock.MagicMock()))
return channel
@mock.patch(
"homeassistant.components.zha.core.discovery.ProbeEndpoint"
".handle_on_off_output_cluster_exception",
new=mock.MagicMock(),
)
@mock.patch(
"homeassistant.components.zha.core.discovery.ProbeEndpoint.probe_single_cluster"
)
def _test_single_input_cluster_device_class(probe_mock):
"""Test SINGLE_INPUT_CLUSTER_DEVICE_CLASS matching by cluster id or class."""
door_ch = _ch_mock(zigpy.zcl.clusters.closures.DoorLock)
cover_ch = _ch_mock(zigpy.zcl.clusters.closures.WindowCovering)
multistate_ch = _ch_mock(zigpy.zcl.clusters.general.MultistateInput)
class QuirkedIAS(zigpy.quirks.CustomCluster, zigpy.zcl.clusters.security.IasZone):
pass
ias_ch = _ch_mock(QuirkedIAS)
class _Analog(zigpy.quirks.CustomCluster, zigpy.zcl.clusters.general.AnalogInput):
pass
analog_ch = _ch_mock(_Analog)
ch_pool = mock.MagicMock(spec_set=zha_channels.ChannelPool)
ch_pool.unclaimed_channels.return_value = [
door_ch,
cover_ch,
multistate_ch,
ias_ch,
analog_ch,
]
disc.ProbeEndpoint().discover_by_cluster_id(ch_pool)
assert probe_mock.call_count == len(ch_pool.unclaimed_channels())
probes = (
(zha_const.LOCK, door_ch),
(zha_const.COVER, cover_ch),
(zha_const.SENSOR, multistate_ch),
(zha_const.BINARY_SENSOR, ias_ch),
(zha_const.SENSOR, analog_ch),
)
for call, details in zip(probe_mock.call_args_list, probes):
component, ch = details
assert call[0][0] == component
assert call[0][1] == ch
def test_single_input_cluster_device_class():
"""Test SINGLE_INPUT_CLUSTER_DEVICE_CLASS matching by cluster id or class."""
_test_single_input_cluster_device_class()
def test_single_input_cluster_device_class_by_cluster_class():
"""Test SINGLE_INPUT_CLUSTER_DEVICE_CLASS matching by cluster id or class."""
mock_reg = {
zigpy.zcl.clusters.closures.DoorLock.cluster_id: zha_const.LOCK,
zigpy.zcl.clusters.closures.WindowCovering.cluster_id: zha_const.COVER,
zigpy.zcl.clusters.general.AnalogInput: zha_const.SENSOR,
zigpy.zcl.clusters.general.MultistateInput: zha_const.SENSOR,
zigpy.zcl.clusters.security.IasZone: zha_const.BINARY_SENSOR,
}
with mock.patch.dict(
zha_regs.SINGLE_INPUT_CLUSTER_DEVICE_CLASS, mock_reg, clear=True
):
_test_single_input_cluster_device_class()
@pytest.mark.parametrize(
"override, entity_id",
[
(None, "light.manufacturer_model_77665544_level_light_color_on_off"),
("switch", "switch.manufacturer_model_77665544_on_off"),
],
)
async def test_device_override(
hass_disable_services, zigpy_device_mock, setup_zha, override, entity_id
):
"""Test device discovery override."""
zigpy_device = zigpy_device_mock(
{
1: {
"device_type": zigpy.profiles.zha.DeviceType.COLOR_DIMMABLE_LIGHT,
"endpoint_id": 1,
"in_clusters": [0, 3, 4, 5, 6, 8, 768, 2821, 64513],
"out_clusters": [25],
"profile_id": 260,
}
},
"00:11:22:33:44:55:66:77",
"manufacturer",
"model",
patch_cluster=False,
)
if override is not None:
override = {"device_config": {"00:11:22:33:44:55:66:77-1": {"type": override}}}
await setup_zha(override)
assert hass_disable_services.states.get(entity_id) is None
zha_gateway = get_zha_gateway(hass_disable_services)
await zha_gateway.async_device_initialized(zigpy_device)
await hass_disable_services.async_block_till_done()
assert hass_disable_services.states.get(entity_id) is not None
async def test_group_probe_cleanup_called(
hass_disable_services, setup_zha, config_entry
):
"""Test cleanup happens when zha is unloaded."""
await setup_zha()
disc.GROUP_PROBE.cleanup = mock.Mock(wraps=disc.GROUP_PROBE.cleanup)
await config_entry.async_unload(hass_disable_services)
await hass_disable_services.async_block_till_done()
disc.GROUP_PROBE.cleanup.assert_called()
|
from __future__ import absolute_import
import os
from molecule import logger
from molecule import util
LOG = logger.get_logger(__name__)
class AnsiblePlaybooks(object):
""" A class to act as a module to namespace playbook properties. """
def __init__(self, config):
"""
Initialize a new namespace class and returns None.
:param config: An instance of a Molecule config.
:return: None
"""
self._config = config
@property
def cleanup(self):
return self._get_playbook('cleanup')
@property
def create(self):
return self._get_playbook('create')
@property
def converge(self):
c = self._config.config
return self._config.provisioner.abs_path(
c['provisioner']['playbooks']['converge'])
@property
def destroy(self):
return self._get_playbook('destroy')
@property
def prepare(self):
return self._get_playbook('prepare')
@property
def side_effect(self):
return self._get_playbook('side_effect')
@property
def verify(self):
return self._get_playbook('verify')
def _get_playbook_directory(self):
return util.abs_path(
os.path.join(self._config.provisioner.directory, 'playbooks'))
def _get_playbook(self, section):
c = self._config.config
driver_dict = c['provisioner']['playbooks'].get(
self._config.driver.name)
playbook = c['provisioner']['playbooks'][section]
if driver_dict:
try:
playbook = driver_dict[section]
except Exception:
pass
if playbook is not None:
playbook = self._config.provisioner.abs_path(playbook)
if os.path.exists(playbook):
return playbook
elif os.path.exists(self._get_bundled_driver_playbook(section)):
return self._get_bundled_driver_playbook(section)
def _get_bundled_driver_playbook(self, section):
return os.path.join(
self._get_playbook_directory(), self._config.driver.name,
self._config.config['provisioner']['playbooks'][section])
|
from ipaddress import ip_address
from typing import Optional, cast
import yarl
from homeassistant.components.http import current_request
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.loader import bind_hass
from homeassistant.util.network import (
is_ip_address,
is_local,
is_loopback,
is_private,
normalize_url,
)
TYPE_URL_INTERNAL = "internal_url"
TYPE_URL_EXTERNAL = "external_url"
class NoURLAvailableError(HomeAssistantError):
"""An URL to the Home Assistant instance is not available."""
@bind_hass
def get_url(
hass: HomeAssistant,
*,
require_current_request: bool = False,
require_ssl: bool = False,
require_standard_port: bool = False,
allow_internal: bool = True,
allow_external: bool = True,
allow_cloud: bool = True,
allow_ip: bool = True,
prefer_external: bool = False,
prefer_cloud: bool = False,
) -> str:
"""Get a URL to this instance."""
if require_current_request and current_request.get() is None:
raise NoURLAvailableError
order = [TYPE_URL_INTERNAL, TYPE_URL_EXTERNAL]
if prefer_external:
order.reverse()
# Try finding an URL in the order specified
for url_type in order:
if allow_internal and url_type == TYPE_URL_INTERNAL:
try:
return _get_internal_url(
hass,
allow_ip=allow_ip,
require_current_request=require_current_request,
require_ssl=require_ssl,
require_standard_port=require_standard_port,
)
except NoURLAvailableError:
pass
if allow_external and url_type == TYPE_URL_EXTERNAL:
try:
return _get_external_url(
hass,
allow_cloud=allow_cloud,
allow_ip=allow_ip,
prefer_cloud=prefer_cloud,
require_current_request=require_current_request,
require_ssl=require_ssl,
require_standard_port=require_standard_port,
)
except NoURLAvailableError:
pass
# For current request, we accept loopback interfaces (e.g., 127.0.0.1),
# the Supervisor hostname and localhost transparently
request_host = _get_request_host()
if (
require_current_request
and request_host is not None
and hass.config.api is not None
):
scheme = "https" if hass.config.api.use_ssl else "http"
current_url = yarl.URL.build(
scheme=scheme, host=request_host, port=hass.config.api.port
)
known_hostnames = ["localhost"]
if hass.components.hassio.is_hassio():
host_info = hass.components.hassio.get_host_info()
known_hostnames.extend(
[host_info["hostname"], f"{host_info['hostname']}.local"]
)
if (
(
(
allow_ip
and is_ip_address(request_host)
and is_loopback(ip_address(request_host))
)
or request_host in known_hostnames
)
and (not require_ssl or current_url.scheme == "https")
and (not require_standard_port or current_url.is_default_port())
):
return normalize_url(str(current_url))
# We have to be honest now, we have no viable option available
raise NoURLAvailableError
def _get_request_host() -> Optional[str]:
"""Get the host address of the current request."""
request = current_request.get()
if request is None:
raise NoURLAvailableError
return yarl.URL(request.url).host
@bind_hass
def _get_internal_url(
hass: HomeAssistant,
*,
allow_ip: bool = True,
require_current_request: bool = False,
require_ssl: bool = False,
require_standard_port: bool = False,
) -> str:
"""Get internal URL of this instance."""
if hass.config.internal_url:
internal_url = yarl.URL(hass.config.internal_url)
if (
(not require_current_request or internal_url.host == _get_request_host())
and (not require_ssl or internal_url.scheme == "https")
and (not require_standard_port or internal_url.is_default_port())
and (allow_ip or not is_ip_address(str(internal_url.host)))
):
return normalize_url(str(internal_url))
# Fallback to old base_url
try:
return _get_deprecated_base_url(
hass,
internal=True,
allow_ip=allow_ip,
require_current_request=require_current_request,
require_ssl=require_ssl,
require_standard_port=require_standard_port,
)
except NoURLAvailableError:
pass
# Fallback to detected local IP
if allow_ip and not (
require_ssl or hass.config.api is None or hass.config.api.use_ssl
):
ip_url = yarl.URL.build(
scheme="http", host=hass.config.api.local_ip, port=hass.config.api.port
)
if (
not is_loopback(ip_address(ip_url.host))
and (not require_current_request or ip_url.host == _get_request_host())
and (not require_standard_port or ip_url.is_default_port())
):
return normalize_url(str(ip_url))
raise NoURLAvailableError
@bind_hass
def _get_external_url(
hass: HomeAssistant,
*,
allow_cloud: bool = True,
allow_ip: bool = True,
prefer_cloud: bool = False,
require_current_request: bool = False,
require_ssl: bool = False,
require_standard_port: bool = False,
) -> str:
"""Get external URL of this instance."""
if prefer_cloud and allow_cloud:
try:
return _get_cloud_url(hass)
except NoURLAvailableError:
pass
if hass.config.external_url:
external_url = yarl.URL(hass.config.external_url)
if (
(allow_ip or not is_ip_address(str(external_url.host)))
and (
not require_current_request or external_url.host == _get_request_host()
)
and (not require_standard_port or external_url.is_default_port())
and (
not require_ssl
or (
external_url.scheme == "https"
and not is_ip_address(str(external_url.host))
)
)
):
return normalize_url(str(external_url))
try:
return _get_deprecated_base_url(
hass,
allow_ip=allow_ip,
require_current_request=require_current_request,
require_ssl=require_ssl,
require_standard_port=require_standard_port,
)
except NoURLAvailableError:
pass
if allow_cloud:
try:
return _get_cloud_url(hass, require_current_request=require_current_request)
except NoURLAvailableError:
pass
raise NoURLAvailableError
@bind_hass
def _get_cloud_url(hass: HomeAssistant, require_current_request: bool = False) -> str:
"""Get external Home Assistant Cloud URL of this instance."""
if "cloud" in hass.config.components:
try:
cloud_url = yarl.URL(cast(str, hass.components.cloud.async_remote_ui_url()))
except hass.components.cloud.CloudNotAvailable as err:
raise NoURLAvailableError from err
if not require_current_request or cloud_url.host == _get_request_host():
return normalize_url(str(cloud_url))
raise NoURLAvailableError
@bind_hass
def _get_deprecated_base_url(
hass: HomeAssistant,
*,
internal: bool = False,
allow_ip: bool = True,
require_current_request: bool = False,
require_ssl: bool = False,
require_standard_port: bool = False,
) -> str:
"""Work with the deprecated `base_url`, used as fallback."""
if hass.config.api is None or not hass.config.api.deprecated_base_url:
raise NoURLAvailableError
base_url = yarl.URL(hass.config.api.deprecated_base_url)
# Rules that apply to both internal and external
if (
(allow_ip or not is_ip_address(str(base_url.host)))
and (not require_current_request or base_url.host == _get_request_host())
and (not require_ssl or base_url.scheme == "https")
and (not require_standard_port or base_url.is_default_port())
):
# Check to ensure an internal URL
if internal and (
str(base_url.host).endswith(".local")
or (
is_ip_address(str(base_url.host))
and not is_loopback(ip_address(base_url.host))
and is_private(ip_address(base_url.host))
)
):
return normalize_url(str(base_url))
# Check to ensure an external URL (a little)
if (
not internal
and not str(base_url.host).endswith(".local")
and not (
is_ip_address(str(base_url.host))
and is_local(ip_address(str(base_url.host)))
)
):
return normalize_url(str(base_url))
raise NoURLAvailableError
|
import mock
import pytest
from paasta_tools import firewall_logging
@mock.patch.object(firewall_logging, "lookup_service_instance_by_ip")
def test_syslog_to_paasta_log(mock_lookup_service_instance_by_ip, mock_log):
syslog_data = fake_syslog_data("my-hostname", SRC="1.2.3.4")
mock_lookup_service_instance_by_ip.return_value = ("myservice", "myinstance")
firewall_logging.syslog_to_paasta_log(syslog_data, "my-cluster")
assert mock_log.mock_calls == [
mock.call(
service="myservice",
component="security",
level="debug",
cluster="my-cluster",
instance="myinstance",
line="my-hostname: my-prefix IN=docker0 SRC=1.2.3.4",
)
]
@mock.patch.object(firewall_logging, "lookup_service_instance_by_ip")
def test_syslog_to_paasta_log_no_container(
mock_lookup_service_instance_by_ip, mock_log
):
syslog_data = fake_syslog_data("my-hostname", SRC="1.2.3.4")
mock_lookup_service_instance_by_ip.return_value = (None, None)
firewall_logging.syslog_to_paasta_log(syslog_data, "my-cluster")
assert mock_log.mock_calls == []
def test_parse_syslog_undecodable():
assert (
firewall_logging.parse_syslog(b"<4>Jun 6 07:52:38 myhost \xba~\xa6r") is None
)
@pytest.mark.parametrize(
"syslog_data",
[
"<4>Jun 6 07:52:38 myhost someothermessage: hello world",
"<4>Jun 6 07:52:38 myhost kernel: hello world",
"<4>Jun 6 07:52:38 myhost kernel [0.0]: hello world",
],
)
@mock.patch.object(firewall_logging, "lookup_service_instance_by_ip")
def test_syslog_to_paasta_log_bad_message(
mock_lookup_service_instance_by_ip, syslog_data, mock_log
):
firewall_logging.syslog_to_paasta_log(syslog_data.encode(), "my-cluster")
assert mock_lookup_service_instance_by_ip.mock_calls == []
assert mock_log.mock_calls == []
@mock.patch.object(
firewall_logging,
"services_running_here",
return_value=[
("service1", "instance1", "00:00:00:00:00", "1.1.1.1"),
("service1", "instance2", "00:00:00:00:00", "2.2.2.2"),
],
)
@mock.patch.object(firewall_logging, "log")
def test_lookup_service_instance_by_ip(my_mock_log, mock_services_running_here):
assert firewall_logging.lookup_service_instance_by_ip("1.1.1.1") == (
"service1",
"instance1",
)
assert firewall_logging.lookup_service_instance_by_ip("2.2.2.2") == (
"service1",
"instance2",
)
assert firewall_logging.lookup_service_instance_by_ip("3.3.3.3") == (None, None)
assert my_mock_log.info.mock_calls == [
mock.call("Unable to find container for ip 3.3.3.3")
]
def test_parse_args():
assert firewall_logging.parse_args([]).listen_host == "127.0.0.1"
assert firewall_logging.parse_args([]).listen_port == 1516
assert firewall_logging.parse_args([]).verbose is False
assert firewall_logging.parse_args(["-v"]).verbose is True
assert firewall_logging.parse_args(["-p", "1234"]).listen_port == 1234
assert firewall_logging.parse_args(["-l", "0.0.0.0"]).listen_host == "0.0.0.0"
@mock.patch.object(firewall_logging, "logging")
def test_setup_logging(logging_mock):
firewall_logging.setup_logging(True)
assert logging_mock.basicConfig.mock_calls == [mock.call(level=logging_mock.DEBUG)]
@mock.patch.object(firewall_logging, "MultiUDPServer")
def test_run_server(udpserver_mock):
firewall_logging.run_server("myhost", 1234)
assert udpserver_mock.mock_calls == [
mock.call(("myhost", 1234), firewall_logging.SyslogUDPHandler),
mock.call().serve_forever(),
]
@mock.patch.object(firewall_logging, "logging")
@mock.patch.object(firewall_logging, "MultiUDPServer")
@mock.patch.object(firewall_logging, "signal")
def test_main_single_worker(signal_mock, udpserver_mock, logging_mock):
firewall_logging.main(["-w", "1"])
assert logging_mock.basicConfig.mock_calls == [
mock.call(level=logging_mock.WARNING)
]
assert udpserver_mock.mock_calls == [
mock.call(("127.0.0.1", 1516), firewall_logging.SyslogUDPHandler),
mock.call().serve_forever(),
]
@mock.patch.object(firewall_logging, "logging")
@mock.patch.object(firewall_logging, "MultiUDPServer")
@mock.patch.object(firewall_logging.os, "fork", return_value=0)
@mock.patch.object(firewall_logging, "signal")
def test_main_two_workers(signal_mock, fork_mock, udpserver_mock, logging_mock):
firewall_logging.main(["-w", "2"])
assert logging_mock.basicConfig.mock_calls == [
mock.call(level=logging_mock.WARNING)
]
assert udpserver_mock.mock_calls == [
mock.call(("127.0.0.1", 1516), firewall_logging.SyslogUDPHandler),
mock.call().serve_forever(),
mock.call(("127.0.0.1", 1516), firewall_logging.SyslogUDPHandler),
mock.call().serve_forever(),
]
def fake_syslog_data(hostname, **kwargs):
prefix = (
f"<4>Jun 6 07:52:38 {hostname} kernel: [2736265.340132] my-prefix IN=docker0 "
)
fields_str = " ".join(map("=".join, kwargs.items()))
return (prefix + fields_str + " \n").encode()
@pytest.yield_fixture
def mock_log():
with mock.patch.object(firewall_logging, "_log", autospec=True) as mock_log:
yield mock_log
|
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QKeySequence
import pytest
from qutebrowser.keyinput import modeparsers, keyutils
@pytest.fixture
def commandrunner(stubs):
return stubs.FakeCommandRunner()
class TestsNormalKeyParser:
@pytest.fixture(autouse=True)
def patch_stuff(self, monkeypatch, stubs, keyinput_bindings):
"""Set up mocks and read the test config."""
monkeypatch.setattr(
'qutebrowser.keyinput.basekeyparser.usertypes.Timer',
stubs.FakeTimer)
@pytest.fixture
def keyparser(self, commandrunner):
kp = modeparsers.NormalKeyParser(win_id=0, commandrunner=commandrunner)
return kp
def test_keychain(self, keyparser, commandrunner):
"""Test valid keychain."""
# Press 'z' which is ignored because of no match
# Then start the real chain
chain = keyutils.KeySequence.parse('zba')
for info in chain:
keyparser.handle(info.to_event())
assert commandrunner.commands == [('message-info ba', None)]
assert not keyparser._sequence
def test_partial_keychain_timeout(self, keyparser, config_stub,
qtbot, commandrunner):
"""Test partial keychain timeout."""
config_stub.val.input.partial_timeout = 100
timer = keyparser._partial_timer
assert not timer.isActive()
# Press 'b' for a partial match.
# Then we check if the timer has been set up correctly
keyparser.handle(keyutils.KeyInfo(Qt.Key_B, Qt.NoModifier).to_event())
assert timer.isSingleShot()
assert timer.interval() == 100
assert timer.isActive()
assert not commandrunner.commands
assert keyparser._sequence == keyutils.KeySequence.parse('b')
# Now simulate a timeout and check the keystring has been cleared.
with qtbot.wait_signal(keyparser.keystring_updated) as blocker:
timer.timeout.emit()
assert not commandrunner.commands
assert not keyparser._sequence
assert blocker.args == ['']
class TestHintKeyParser:
@pytest.fixture
def hintmanager(self, stubs):
return stubs.FakeHintManager()
@pytest.fixture
def keyparser(self, config_stub, key_config_stub, commandrunner,
hintmanager):
return modeparsers.HintKeyParser(win_id=0,
hintmanager=hintmanager,
commandrunner=commandrunner)
@pytest.mark.parametrize('bindings, keychain, prefix, hint', [
(
['aa', 'as'],
'as',
'a',
'as'
),
(
['21', '22'],
'<Num+2><Num+2>',
'2',
'22'
),
(
['äa', 'äs'],
'äs',
'ä',
'äs'
),
(
['не', 'на'],
'не',
'<Н>',
'не',
),
])
def test_match(self, keyparser, hintmanager,
bindings, keychain, prefix, hint):
keyparser.update_bindings(bindings)
seq = keyutils.KeySequence.parse(keychain)
assert len(seq) == 2
match = keyparser.handle(seq[0].to_event())
assert match == QKeySequence.PartialMatch
assert hintmanager.keystr == prefix
match = keyparser.handle(seq[1].to_event())
assert match == QKeySequence.ExactMatch
assert hintmanager.keystr == hint
def test_match_key_mappings(self, config_stub, keyparser, hintmanager):
config_stub.val.bindings.key_mappings = {'α': 'a', 'σ': 's'}
keyparser.update_bindings(['aa', 'as'])
seq = keyutils.KeySequence.parse('ασ')
assert len(seq) == 2
match = keyparser.handle(seq[0].to_event())
assert match == QKeySequence.PartialMatch
assert hintmanager.keystr == 'a'
match = keyparser.handle(seq[1].to_event())
assert match == QKeySequence.ExactMatch
assert hintmanager.keystr == 'as'
def test_command(self, keyparser, config_stub, hintmanager, commandrunner):
config_stub.val.bindings.commands = {
'hint': {'abc': 'message-info abc'}
}
keyparser.update_bindings(['xabcy'])
steps = [
(Qt.Key_X, QKeySequence.PartialMatch, 'x'),
(Qt.Key_A, QKeySequence.PartialMatch, ''),
(Qt.Key_B, QKeySequence.PartialMatch, ''),
(Qt.Key_C, QKeySequence.ExactMatch, ''),
]
for key, expected_match, keystr in steps:
info = keyutils.KeyInfo(key, Qt.NoModifier)
match = keyparser.handle(info.to_event())
assert match == expected_match
assert hintmanager.keystr == keystr
if key != Qt.Key_C:
assert not commandrunner.commands
assert commandrunner.commands == [('message-info abc', None)]
|
from collections import Counter
from scattertext.features.FeatsFromSpacyDoc import FeatsFromSpacyDoc
class PyTextRankPhrases(FeatsFromSpacyDoc):
def __init__(self, use_lemmas=False, entity_types_to_censor=set(), tag_types_to_censor=set(),
strip_final_period=False):
FeatsFromSpacyDoc.__init__(self, use_lemmas, entity_types_to_censor, tag_types_to_censor, strip_final_period)
self._include_chunks = False
self._rank_smoothing_constant = 0
def include_chunks(self):
'''
Use each chunk in a phrase instead of just the span identified as a phrase
:return: self
'''
self._include_chunks = True
return self
def set_rank_smoothing_constant(self, rank_smoothing_constant):
'''
Add a quantity
:param rank_smoothing_constant: float
:return: self
'''
self._rank_smoothing_constant = rank_smoothing_constant
return self
def get_doc_metadata(self, doc):
import pytextrank
phrase_counter = Counter()
tr = pytextrank.TextRank()
tr.doc = doc
phrases = tr.calc_textrank()
for phrase in phrases:
if self._include_chunks:
for chunk in phrase.chunks:
phrase_counter[str(chunk)] += (phrase.rank + self._rank_smoothing_constant)
else:
phrase_counter[phrase.text] += phrase.count * (phrase.rank + self._rank_smoothing_constant)
return phrase_counter
def get_feats(self, doc):
return Counter()
|
from __future__ import annotations
import functools
import shutil
from enum import IntEnum
from pathlib import Path
from typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Tuple, Union, cast
from .log import log
from .info_schemas import INSTALLABLE_SCHEMA, update_mixin
from .json_mixins import RepoJSONMixin
from redbot.core import VersionInfo
if TYPE_CHECKING:
from .repo_manager import RepoManager, Repo
class InstallableType(IntEnum):
# using IntEnum, because hot-reload breaks its identity
UNKNOWN = 0
COG = 1
SHARED_LIBRARY = 2
class Installable(RepoJSONMixin):
"""Base class for anything the Downloader cog can install.
- Modules
- Repo Libraries
- Other stuff?
The attributes of this class will mostly come from the installation's
info.json.
Attributes
----------
repo_name : `str`
Name of the repository which this package belongs to.
repo : Repo, optional
Repo object of the Installable, if repo is missing this will be `None`
commit : `str`, optional
Installable's commit. This is not the same as ``repo.commit``
author : `tuple` of `str`
Name(s) of the author(s).
end_user_data_statement : `str`
End user data statement of the module.
min_bot_version : `VersionInfo`
The minimum bot version required for this Installable.
max_bot_version : `VersionInfo`
The maximum bot version required for this Installable.
Ignored if `min_bot_version` is newer than `max_bot_version`.
min_python_version : `tuple` of `int`
The minimum python version required for this cog.
hidden : `bool`
Whether or not this cog will be hidden from the user when they use
`Downloader`'s commands.
required_cogs : `dict`
In the form :code:`{cog_name : repo_url}`, these are cogs which are
required for this installation.
requirements : `tuple` of `str`
Required libraries for this installation.
tags : `tuple` of `str`
List of tags to assist in searching.
type : `int`
The type of this installation, as specified by
:class:`InstallationType`.
"""
def __init__(self, location: Path, repo: Optional[Repo] = None, commit: str = ""):
"""Base installable initializer.
Parameters
----------
location : pathlib.Path
Location (file or folder) to the installable.
repo : Repo, optional
Repo object of the Installable, if repo is missing this will be `None`
commit : str
Installable's commit. This is not the same as ``repo.commit``
"""
self._location = location
self.repo = repo
self.repo_name = self._location.parent.stem
self.commit = commit
self.end_user_data_statement: str
self.min_bot_version: VersionInfo
self.max_bot_version: VersionInfo
self.min_python_version: Tuple[int, int, int]
self.hidden: bool
self.disabled: bool
self.required_cogs: Dict[str, str] # Cog name -> repo URL
self.requirements: Tuple[str, ...]
self.tags: Tuple[str, ...]
self.type: InstallableType
super().__init__(location)
def __eq__(self, other: Any) -> bool:
# noinspection PyProtectedMember
return self._location == other._location
def __hash__(self) -> int:
return hash(self._location)
@property
def name(self) -> str:
"""`str` : The name of this package."""
return self._location.stem
async def copy_to(self, target_dir: Path) -> bool:
"""
Copies this cog/shared_lib to the given directory. This
will overwrite any files in the target directory.
:param pathlib.Path target_dir: The installation directory to install to.
:return: Status of installation
:rtype: bool
"""
copy_func: Callable[..., Any]
if self._location.is_file():
copy_func = shutil.copy2
else:
copy_func = functools.partial(shutil.copytree, dirs_exist_ok=True)
# noinspection PyBroadException
try:
copy_func(src=str(self._location), dst=str(target_dir / self._location.stem))
except: # noqa: E722
log.exception("Error occurred when copying path: %s", self._location)
return False
return True
def _read_info_file(self) -> None:
super()._read_info_file()
update_mixin(self, INSTALLABLE_SCHEMA)
if self.type == InstallableType.SHARED_LIBRARY:
self.hidden = True
class InstalledModule(Installable):
"""Base class for installed modules,
this is basically instance of installed `Installable`
used by Downloader.
Attributes
----------
pinned : `bool`
Whether or not this cog is pinned, always `False` if module is not a cog.
"""
def __init__(
self,
location: Path,
repo: Optional[Repo] = None,
commit: str = "",
pinned: bool = False,
json_repo_name: str = "",
):
super().__init__(location=location, repo=repo, commit=commit)
self.pinned: bool = pinned if self.type == InstallableType.COG else False
# this is here so that Downloader could use real repo name instead of "MISSING_REPO"
self._json_repo_name = json_repo_name
def to_json(self) -> Dict[str, Union[str, bool]]:
module_json: Dict[str, Union[str, bool]] = {
"repo_name": self.repo_name,
"module_name": self.name,
"commit": self.commit,
}
if self.type == InstallableType.COG:
module_json["pinned"] = self.pinned
return module_json
@classmethod
def from_json(
cls, data: Dict[str, Union[str, bool]], repo_mgr: RepoManager
) -> InstalledModule:
repo_name = cast(str, data["repo_name"])
cog_name = cast(str, data["module_name"])
commit = cast(str, data.get("commit", ""))
pinned = cast(bool, data.get("pinned", False))
# TypedDict, where are you :/
repo = repo_mgr.get_repo(repo_name)
if repo is not None:
repo_folder = repo.folder_path
else:
repo_folder = repo_mgr.repos_folder / "MISSING_REPO"
location = repo_folder / cog_name
return cls(
location=location, repo=repo, commit=commit, pinned=pinned, json_repo_name=repo_name
)
@classmethod
def from_installable(cls, module: Installable, *, pinned: bool = False) -> InstalledModule:
return cls(
location=module._location, repo=module.repo, commit=module.commit, pinned=pinned
)
|
import gc
import logging
import os.path as osp
import sys
from operator import eq, lt, le, gt
from contextlib import contextmanager
import warnings
logging.basicConfig(level=logging.ERROR)
from logilab.common.testlib import TestCase, unittest_main
from logilab.common.registry import *
class _1_(Predicate):
def __call__(self, *args, **kwargs):
return 1
class _0_(Predicate):
def __call__(self, *args, **kwargs):
return 0
def _2_(*args, **kwargs):
return 2
class SelectorsTC(TestCase):
def test_basic_and(self):
selector = _1_() & _1_()
self.assertEqual(selector(None), 2)
selector = _1_() & _0_()
self.assertEqual(selector(None), 0)
selector = _0_() & _1_()
self.assertEqual(selector(None), 0)
def test_basic_or(self):
selector = _1_() | _1_()
self.assertEqual(selector(None), 1)
selector = _1_() | _0_()
self.assertEqual(selector(None), 1)
selector = _0_() | _1_()
self.assertEqual(selector(None), 1)
selector = _0_() | _0_()
self.assertEqual(selector(None), 0)
def test_selector_and_function(self):
selector = _1_() & _2_
self.assertEqual(selector(None), 3)
selector = _2_ & _1_()
self.assertEqual(selector(None), 3)
def test_three_and(self):
selector = _1_() & _1_() & _1_()
self.assertEqual(selector(None), 3)
selector = _1_() & _0_() & _1_()
self.assertEqual(selector(None), 0)
selector = _0_() & _1_() & _1_()
self.assertEqual(selector(None), 0)
def test_three_or(self):
selector = _1_() | _1_() | _1_()
self.assertEqual(selector(None), 1)
selector = _1_() | _0_() | _1_()
self.assertEqual(selector(None), 1)
selector = _0_() | _1_() | _1_()
self.assertEqual(selector(None), 1)
selector = _0_() | _0_() | _0_()
self.assertEqual(selector(None), 0)
def test_composition(self):
selector = (_1_() & _1_()) & (_1_() & _1_())
self.assertTrue(isinstance(selector, AndPredicate))
self.assertEqual(len(selector.selectors), 4)
self.assertEqual(selector(None), 4)
selector = (_1_() & _0_()) | (_1_() & _1_())
self.assertTrue(isinstance(selector, OrPredicate))
self.assertEqual(len(selector.selectors), 2)
self.assertEqual(selector(None), 2)
def test_search_selectors(self):
sel = _1_()
self.assertIs(sel.search_selector(_1_), sel)
csel = AndPredicate(sel, Predicate())
self.assertIs(csel.search_selector(_1_), sel)
csel = AndPredicate(Predicate(), sel)
self.assertIs(csel.search_selector(_1_), sel)
self.assertIs(csel.search_selector((AndPredicate, OrPredicate)), csel)
self.assertIs(csel.search_selector((OrPredicate, AndPredicate)), csel)
self.assertIs(csel.search_selector((_1_, _0_)), sel)
self.assertIs(csel.search_selector((_0_, _1_)), sel)
def test_inplace_and(self):
selector = _1_()
selector &= _1_()
selector &= _1_()
self.assertEqual(selector(None), 3)
selector = _1_()
selector &= _0_()
selector &= _1_()
self.assertEqual(selector(None), 0)
selector = _0_()
selector &= _1_()
selector &= _1_()
self.assertEqual(selector(None), 0)
selector = _0_()
selector &= _0_()
selector &= _0_()
self.assertEqual(selector(None), 0)
def test_inplace_or(self):
selector = _1_()
selector |= _1_()
selector |= _1_()
self.assertEqual(selector(None), 1)
selector = _1_()
selector |= _0_()
selector |= _1_()
self.assertEqual(selector(None), 1)
selector = _0_()
selector |= _1_()
selector |= _1_()
self.assertEqual(selector(None), 1)
selector = _0_()
selector |= _0_()
selector |= _0_()
self.assertEqual(selector(None), 0)
def test_wrap_selectors(self):
class _temp_(Predicate):
def __call__(self, *args, **kwargs):
return 0
del _temp_ # test weakref
s1 = _1_() & _1_()
s2 = _1_() & _0_()
s3 = _0_() & _1_()
gc.collect()
self.count = 0
def decorate(f, self=self):
def wrapper(*args, **kwargs):
self.count += 1
return f(*args, **kwargs)
return wrapper
wrap_predicates(decorate)
self.assertEqual(s1(None), 2)
self.assertEqual(s2(None), 0)
self.assertEqual(s3(None), 0)
self.assertEqual(self.count, 8)
@contextmanager
def prepended_syspath(path):
sys.path.insert(0, path)
yield
sys.path = sys.path[1:]
class RegistryStoreTC(TestCase):
def test_autoload(self):
store = RegistryStore()
store.setdefault('zereg')
with prepended_syspath(self.datadir):
with warnings.catch_warnings(record=True) as warns:
store.register_objects([self.datapath('regobjects.py'),
self.datapath('regobjects2.py')])
self.assertIn('use register_modnames() instead',
[str(w.message) for w in warns])
self.assertEqual(['zereg'], list(store.keys()))
self.assertEqual(set(('appobject1', 'appobject2', 'appobject3')),
set(store['zereg']))
def test_autoload_modnames(self):
store = RegistryStore()
store.setdefault('zereg')
with prepended_syspath(self.datadir):
store.register_modnames(['regobjects', 'regobjects2'])
self.assertEqual(['zereg'], list(store.keys()))
self.assertEqual(set(('appobject1', 'appobject2', 'appobject3')),
set(store['zereg']))
class RegistrableInstanceTC(TestCase):
def test_instance_modulename(self):
with warnings.catch_warnings(record=True) as warns:
obj = RegistrableInstance()
self.assertEqual(obj.__module__, 'unittest_registry')
self.assertIn('instantiate RegistrableInstance with __module__=__name__',
[str(w.message) for w in warns])
# no inheritance
obj = RegistrableInstance(__module__=__name__)
self.assertEqual(obj.__module__, 'unittest_registry')
# with inheritance from another python file
with prepended_syspath(self.datadir):
from regobjects2 import instance, MyRegistrableInstance
instance2 = MyRegistrableInstance(__module__=__name__)
self.assertEqual(instance.__module__, 'regobjects2')
self.assertEqual(instance2.__module__, 'unittest_registry')
if __name__ == '__main__':
unittest_main()
|
import os.path as op
import numpy as np
import mne
from mne.datasets import sample
print(__doc__)
# For this example, we will be using the information of the sample subject.
# This will download the data if it not already on your machine. We also set
# the subjects directory so we don't need to give it to functions.
data_path = sample.data_path()
subjects_dir = op.join(data_path, 'subjects')
subject = 'sample'
# First, we get an info structure from the test subject.
evoked_fname = op.join(data_path, 'MEG', subject, 'sample_audvis-ave.fif')
info = mne.io.read_info(evoked_fname)
tstep = 1. / info['sfreq']
# To simulate sources, we also need a source space. It can be obtained from the
# forward solution of the sample subject.
fwd_fname = op.join(data_path, 'MEG', subject,
'sample_audvis-meg-eeg-oct-6-fwd.fif')
fwd = mne.read_forward_solution(fwd_fname)
src = fwd['src']
# To select a region to activate, we use the caudal middle frontal to grow
# a region of interest.
selected_label = mne.read_labels_from_annot(
subject, regexp='caudalmiddlefrontal-lh', subjects_dir=subjects_dir)[0]
location = 'center' # Use the center of the region as a seed.
extent = 10. # Extent in mm of the region.
label = mne.label.select_sources(
subject, selected_label, location=location, extent=extent,
subjects_dir=subjects_dir)
# Define the time course of the activity for each source of the region to
# activate. Here we use a sine wave at 18 Hz with a peak amplitude
# of 10 nAm.
source_time_series = np.sin(2. * np.pi * 18. * np.arange(100) * tstep) * 10e-9
# Define when the activity occurs using events. The first column is the sample
# of the event, the second is not used, and the third is the event id. Here the
# events occur every 200 samples.
n_events = 50
events = np.zeros((n_events, 3))
events[:, 0] = 100 + 200 * np.arange(n_events) # Events sample.
events[:, 2] = 1 # All events have the sample id.
# Create simulated source activity. Here we use a SourceSimulator whose
# add_data method is key. It specified where (label), what
# (source_time_series), and when (events) an event type will occur.
source_simulator = mne.simulation.SourceSimulator(src, tstep=tstep)
source_simulator.add_data(label, source_time_series, events)
# Project the source time series to sensor space and add some noise. The source
# simulator can be given directly to the simulate_raw function.
raw = mne.simulation.simulate_raw(info, source_simulator, forward=fwd)
cov = mne.make_ad_hoc_cov(raw.info)
mne.simulation.add_noise(raw, cov, iir_filter=[0.2, -0.2, 0.04])
raw.plot()
# Plot evoked data to get another view of the simulated raw data.
events = mne.find_events(raw)
epochs = mne.Epochs(raw, events, 1, tmin=-0.05, tmax=0.2)
evoked = epochs.average()
evoked.plot()
|
import logging
from OPi import GPIO
from homeassistant.const import EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP
from .const import PIN_MODES
_LOGGER = logging.getLogger(__name__)
DOMAIN = "orangepi_gpio"
async def async_setup(hass, config):
"""Set up the Orange Pi GPIO component."""
def cleanup_gpio(event):
"""Stuff to do before stopping."""
GPIO.cleanup()
def prepare_gpio(event):
"""Stuff to do when Home Assistant starts."""
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, cleanup_gpio)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, prepare_gpio)
return True
def setup_mode(mode):
"""Set GPIO pin mode."""
_LOGGER.debug("Setting GPIO pin mode as %s", PIN_MODES[mode])
GPIO.setmode(PIN_MODES[mode])
def setup_input(port):
"""Set up a GPIO as input."""
_LOGGER.debug("Setting up GPIO pin %i as input", port)
GPIO.setup(port, GPIO.IN)
def read_input(port):
"""Read a value from a GPIO."""
_LOGGER.debug("Reading GPIO pin %i", port)
return GPIO.input(port)
def edge_detect(port, event_callback):
"""Add detection for RISING and FALLING events."""
_LOGGER.debug("Add callback for GPIO pin %i", port)
GPIO.add_event_detect(port, GPIO.BOTH, callback=event_callback)
|
from mock import Mock
from mock import patch
from pytest import raises
from paasta_tools import paasta_metastatus
from paasta_tools.metrics.metastatus_lib import HealthCheckResult
from paasta_tools.metrics.metastatus_lib import ResourceUtilization as RU
def test_main_no_marathon_servers():
with patch(
"paasta_tools.paasta_metastatus.load_system_paasta_config", autospec=True
), patch(
"paasta_tools.marathon_tools.get_marathon_servers",
autospec=True,
return_value={},
), patch(
"paasta_tools.paasta_metastatus.is_mesos_available",
autospec=True,
return_value=True,
), patch(
"paasta_tools.paasta_metastatus.get_mesos_master", autospec=True
) as get_mesos_master, patch(
"paasta_tools.metrics.metastatus_lib.get_mesos_state_status",
autospec=True,
return_value=([("fake_output", True)]),
) as get_mesos_state_status_patch, patch(
"paasta_tools.metrics.metastatus_lib.get_mesos_resource_utilization_health",
autospec=True,
) as get_mesos_resource_utilization_health_patch, patch(
"paasta_tools.metrics.metastatus_lib.get_marathon_status",
autospec=True,
return_value=([HealthCheckResult(message="fake_output", healthy=True)]),
), patch(
"paasta_tools.paasta_metastatus.get_mesos_leader",
autospec=True,
return_value="localhost",
), patch(
"paasta_tools.paasta_metastatus.is_kubernetes_available",
autospec=True,
return_value=False,
):
fake_master = Mock(autospace=True)
fake_master.state.return_value = {}
get_mesos_master.return_value = fake_master
get_mesos_state_status_patch.return_value = []
get_mesos_resource_utilization_health_patch.return_value = []
with raises(SystemExit) as excinfo:
paasta_metastatus.main(())
assert excinfo.value.code == 0
def test_main_marathon_jsondecode_error():
with patch(
"paasta_tools.paasta_metastatus.load_system_paasta_config", autospec=True
), patch(
"paasta_tools.paasta_metastatus.is_mesos_available",
autospec=True,
return_value=True,
), patch(
"paasta_tools.marathon_tools.get_marathon_servers", autospec=True
) as get_marathon_status_patch, patch(
"paasta_tools.paasta_metastatus.get_mesos_master", autospec=True
) as get_mesos_master, patch(
"paasta_tools.metrics.metastatus_lib.get_mesos_state_status",
autospec=True,
return_value=([("fake_output", True)]),
) as get_mesos_state_status_patch, patch(
"paasta_tools.metrics.metastatus_lib.get_mesos_resource_utilization_health",
autospec=True,
) as get_mesos_resource_utilization_health_patch, patch(
"paasta_tools.metrics.metastatus_lib.get_marathon_status", autospec=True
) as get_marathon_status_patch:
fake_master = Mock(autospace=True)
fake_master.state.return_value = {}
get_mesos_master.return_value = fake_master
get_marathon_status_patch.return_value = [{"url": "http://foo"}]
get_marathon_status_patch.side_effect = ValueError("could not decode json")
get_mesos_state_status_patch.return_value = []
get_mesos_resource_utilization_health_patch.return_value = []
with raises(SystemExit) as excinfo:
paasta_metastatus.main(())
assert excinfo.value.code == 2
def test_get_service_instance_stats():
# The patch stuff is confusing.
# Basically we patch the validate_service_instance in the paasta_metastatus module and not the utils module
instance_config_mock = Mock()
instance_config_mock.get_gpus.return_value = None
with patch(
"paasta_tools.paasta_metastatus.get_instance_config",
autospec=True,
return_value=instance_config_mock,
):
stats = paasta_metastatus.get_service_instance_stats(
"fakeservice", "fakeinstance", "fakecluster"
)
assert set(stats.keys()) == {"mem", "cpus", "disk", "gpus"}
def test_fill_table_rows_with_service_instance_stats():
fake_service_instance_stats = {"mem": 40, "cpus": 0.3, "disk": 1.0, "gpus": 0}
fake_table_rows = [[]]
# For reference, ResourceUtilization is (metric, total, free)
fake_rsrc_utils = [RU("mem", 100, 80), RU("cpus", 100, 50), RU("disk", 20, 15)]
paasta_metastatus.fill_table_rows_with_service_instance_stats(
fake_service_instance_stats, fake_rsrc_utils, fake_table_rows
)
result_str = fake_table_rows[0][0]
# Clearly memory is the limiting factor as there is only 80 memory and each service instance takes 40 memory
assert "2" in result_str
assert "mem" in result_str
|
import os
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
# EC2 provides unique random hostnames.
def test_hostname(host):
pass
def test_etc_molecule_directory(host):
f = host.file('/etc/molecule')
assert f.is_directory
assert f.user == 'root'
assert f.group == 'root'
assert f.mode == 0o755
def test_etc_molecule_ansible_hostname_file(host):
filename = '/etc/molecule/{}'.format(host.check_output('hostname -s'))
f = host.file(filename)
assert f.is_file
assert f.user == 'root'
assert f.group == 'root'
assert f.mode == 0o644
|
from collections import namedtuple
from datetime import datetime, timedelta
from distutils.version import LooseVersion
from dateutil.parser import parse
from django.core.cache import cache
from django.core.checks import Info
from weblate import VERSION_BASE
from weblate.utils.checks import weblate_check
from weblate.utils.requests import request
PYPI = "https://pypi.org/pypi/Weblate/json"
CACHE_KEY = "version-check"
Release = namedtuple("Release", ["version", "timestamp"])
def download_version_info():
response = request("get", PYPI)
result = []
for version, info in response.json()["releases"].items():
if not info:
continue
result.append(Release(version, parse(info[0]["upload_time"])))
return sorted(result, key=lambda x: x[1], reverse=True)
def flush_version_cache():
cache.delete(CACHE_KEY)
def get_version_info():
result = cache.get(CACHE_KEY)
if not result:
result = download_version_info()
cache.set(CACHE_KEY, result, 86400)
return result
def get_latest_version():
return get_version_info()[0]
def check_version(app_configs=None, **kwargs):
try:
latest = get_latest_version()
except (ValueError, OSError):
return []
if LooseVersion(latest.version) > LooseVersion(VERSION_BASE):
# With release every two months, this get's triggered after three releases
if latest.timestamp + timedelta(days=180) < datetime.now():
return [
weblate_check(
"weblate.C031",
"You Weblate version is outdated, please upgrade to {}.".format(
latest.version
),
)
]
return [
weblate_check(
"weblate.I031",
"New Weblate version is available, please upgrade to {}.".format(
latest.version
),
Info,
)
]
return []
|
import unittest
import collections
import json
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import torchvision
import torchvision.transforms as transforms
import catalyst
from catalyst.dl import SupervisedRunner, CheckpointCallback
from catalyst import utils
def _to_categorical(y, num_classes=None, dtype='float32'):
"""
Taken from
github.com/keras-team/keras/blob/master/keras/utils/np_utils.py
Converts a class vector (integers) to binary class matrix.
E.g. for use with categorical_crossentropy.
# Arguments
y: class vector to be converted into a matrix
(integers from 0 to num_classes).
num_classes: total number of classes.
dtype: The data type expected by the input, as a string
(`float32`, `float64`, `int32`...)
# Returns
A binary matrix representation of the input. The classes axis
is placed last.
# Example
```python
# Consider an array of 5 labels out of a set of 3 classes {0, 1, 2}:
> labels
array([0, 2, 1, 2, 0])
# `to_categorical` converts this into a matrix with as many
# columns as there are classes. The number of rows
# stays the same.
> to_categorical(labels)
array([[ 1., 0., 0.],
[ 0., 0., 1.],
[ 0., 1., 0.],
[ 0., 0., 1.],
[ 1., 0., 0.]], dtype=float32)
```
"""
y = np.array(y, dtype='int')
input_shape = y.shape
if input_shape and input_shape[-1] == 1 and len(input_shape) > 1:
input_shape = tuple(input_shape[:-1])
y = y.ravel()
if not num_classes:
num_classes = np.max(y) + 1
n = y.shape[0]
categorical = np.zeros((n, num_classes), dtype=dtype)
categorical[np.arange(n), y] = 1
output_shape = input_shape + (num_classes,)
categorical = np.reshape(categorical, output_shape)
return categorical
class Net(nn.Module):
def __init__(self):
super().__init__()
self.conv1 = nn.Conv2d(1, 20, 5, 1)
self.conv2 = nn.Conv2d(20, 50, 5, 1)
self.fc1 = nn.Linear(4 * 4 * 50, 500)
self.fc2 = nn.Linear(500, 10)
def forward(self, x):
x = F.relu(self.conv1(x))
x = F.max_pool2d(x, 2, 2)
x = F.relu(self.conv2(x))
x = F.max_pool2d(x, 2, 2)
x = x.view(-1, 4 * 4 * 50)
x = F.relu(self.fc1(x))
x = self.fc2(x)
return x
class TestCatalyst(unittest.TestCase):
def test_version(self):
self.assertIsNotNone(catalyst.__version__)
def test_mnist(self):
utils.set_global_seed(42)
x_train = np.random.random((100, 1, 28, 28)).astype(np.float32)
y_train = _to_categorical(
np.random.randint(10, size=(100, 1)),
num_classes=10
).astype(np.float32)
x_valid = np.random.random((20, 1, 28, 28)).astype(np.float32)
y_valid = _to_categorical(
np.random.randint(10, size=(20, 1)),
num_classes=10
).astype(np.float32)
x_train, y_train, x_valid, y_valid = \
list(map(torch.tensor, [x_train, y_train, x_valid, y_valid]))
bs = 32
num_workers = 4
data_transform = transforms.ToTensor()
loaders = collections.OrderedDict()
trainset = torch.utils.data.TensorDataset(x_train, y_train)
trainloader = torch.utils.data.DataLoader(
trainset, batch_size=bs,
shuffle=True, num_workers=num_workers)
validset = torch.utils.data.TensorDataset(x_valid, y_valid)
validloader = torch.utils.data.DataLoader(
validset, batch_size=bs,
shuffle=False, num_workers=num_workers)
loaders["train"] = trainloader
loaders["valid"] = validloader
# experiment setup
num_epochs = 3
logdir = "./logs"
# model, criterion, optimizer
model = Net()
criterion = nn.BCEWithLogitsLoss()
optimizer = torch.optim.Adam(model.parameters())
# model runner
runner = SupervisedRunner()
# model training
runner.train(
model=model,
criterion=criterion,
optimizer=optimizer,
loaders=loaders,
logdir=logdir,
num_epochs=num_epochs,
verbose=False,
callbacks=[CheckpointCallback(save_n_best=3)]
)
with open('./logs/checkpoints/_metrics.json') as f:
metrics = json.load(f)
self.assertTrue(metrics['train.3']['loss'] < metrics['train.1']['loss'])
self.assertTrue(metrics['best']['loss'] < 0.35)
|
__author__ = "VMware, Inc."
def Cache(fn):
""" Function cache decorator """
def fnCache(*args, **kwargs):
""" Cache function """
key = (args and tuple(args) or None,
kwargs and frozenset(kwargs.items()) or None)
if key not in fn.__cached__:
fn.__cached__[key] = cache = fn(*args, **kwargs)
else:
cache = fn.__cached__[key]
return cache
def ResetCache():
""" Reset cache """
fn.__cached__ = {}
setattr(fn, "__cached__", {})
setattr(fn, "__resetcache__", ResetCache)
fnCache.__name__ = fn.__name__
fnCache.__doc__ = fn.__doc__
fnCache.__dict__.update(fn.__dict__)
return fnCache
|
import io
import os
import pytest
import nikola.plugins.command.init
from nikola import __main__
from .helper import cd
def test_check_links(build, target_dir):
with cd(target_dir):
assert __main__.main(["check", "-l"]) is None
def test_check_files(build, target_dir):
with cd(target_dir):
assert __main__.main(["check", "-f"]) is None
def test_index_in_sitemap(build, output_dir):
sitemap_path = os.path.join(output_dir, "sitemap.xml")
with io.open(sitemap_path, "r", encoding="utf8") as inf:
sitemap_data = inf.read()
assert "<loc>https://example.com/</loc>" in sitemap_data
def test_avoid_double_slash_in_rss(build, output_dir):
rss_path = os.path.join(output_dir, "rss.xml")
with io.open(rss_path, "r", encoding="utf8") as inf:
rss_data = inf.read()
assert "https://example.com//" not in rss_data
def test_archive_exists(build, output_dir):
"""Ensure the build did something."""
index_path = os.path.join(output_dir, "archive.html")
assert os.path.isfile(index_path)
@pytest.fixture(scope="module")
def build(target_dir):
"""Build the site."""
init_command = nikola.plugins.command.init.CommandInit()
init_command.create_empty_site(target_dir)
init_command.create_configuration(target_dir)
with cd(target_dir):
__main__.main(["build"])
|
import unittest
from kalliope.core.ConfigurationManager.ConfigurationChecker import ConfigurationChecker, NoSynapeName, \
NoSynapeNeurons, NoSynapeSignals, NoValidSignal, MultipleSameSynapseName
from kalliope.core.Models import Synapse
from kalliope.core.Utils.Utils import KalliopeModuleNotFoundError
class TestConfigurationChecker(unittest.TestCase):
"""
Class used to test the ConfigurationChecker class
"""
def setUp(self):
pass
def test_check_synape_dict(self):
valid_synapse_dict = {
'signals': [{'order': 'test_order'}],
'neurons': [{'say': {'message': ['test message']}}],
'name': 'test'
}
synapse_dict_without_name = {
'signals': [{'order': 'test_order'}],
'neurons': [{'say': {'message': ['test message']}}]
}
synapse_dict_without_neurons = {
'signals': [{'order': 'test_order'}],
'name': 'test'
}
synapse_dict_without_signals = {
'neurons': [{'say': {'message': ['test message']}}],
'name': 'test'
}
self.assertTrue(ConfigurationChecker.check_synape_dict(valid_synapse_dict))
with self.assertRaises(NoSynapeName):
ConfigurationChecker.check_synape_dict(synapse_dict_without_name)
with self.assertRaises(NoSynapeNeurons):
ConfigurationChecker.check_synape_dict(synapse_dict_without_neurons)
with self.assertRaises(NoSynapeSignals):
ConfigurationChecker.check_synape_dict(synapse_dict_without_signals)
def test_check_neuron_dict(self):
valid_neuron = {'say': {'message': ['test message']}}
invalid_neuron = {'not_existing_neuron': {'message': ['test message']}}
self.assertTrue(ConfigurationChecker.check_neuron_dict(valid_neuron))
with self.assertRaises(KalliopeModuleNotFoundError):
ConfigurationChecker.check_neuron_dict(invalid_neuron)
def test_check_signal_dict(self):
valid_signal = {'event': {'parameter_1': ['value1']}}
invalid_signal = {'non_existing_signal_name': {'parameter_2': ['value2']}}
self.assertTrue(ConfigurationChecker.check_signal_dict(valid_signal))
with self.assertRaises(KalliopeModuleNotFoundError):
ConfigurationChecker.check_signal_dict(invalid_signal)
def test_check_synapes(self):
synapse_1 = Synapse(name="test")
synapse_2 = Synapse(name="test2")
synapse_3 = Synapse(name="test")
valid_synapse_list = [synapse_1, synapse_2]
invalid_synapse_list = [synapse_1, synapse_3]
self.assertTrue(ConfigurationChecker.check_synapes(valid_synapse_list))
with self.assertRaises(MultipleSameSynapseName):
ConfigurationChecker.check_synapes(invalid_synapse_list)
if __name__ == '__main__':
unittest.main()
|
import os
import io
import time
import weakref
import zipfile
from base64 import encodestring as encodebytes
import webruntime
from .. import config, event
from ._component2 import PyComponent, JsComponent
from ._server import current_server
from ._session import Session, get_page_for_export
from ._assetstore import assets
from . import logger
class ExporterWebSocketDummy:
""" Object that can be used by an app inplace of the websocket to
export apps to standalone HTML. The object tracks the commands send
by the app, so that these can be re-played in the exported document.
"""
close_code = None
def __init__(self):
self.commands = []
def write_command(self, cmd):
self.commands.append(cmd)
class App:
""" Specification of a Flexx app.
Strictly speaking, this is a container for a ``PyComponent``/``JsComponent``
class plus the args and kwargs that it is to be instantiated with.
Arguments:
cls (Component): the PyComponent or JsComponent class (e.g. Widget) that
represents this app.
args: positional arguments used to instantiate the class (and received
in its ``init()`` method).
kwargs: keyword arguments used to initialize the component's properties.
"""
def __init__(self, cls, *args, **kwargs):
if not isinstance(cls, type) and issubclass(type, (PyComponent, JsComponent)):
raise ValueError('App needs a PyComponent or JsComponent class '
'as its first argument.')
self._cls = cls
self.args = args
self.kwargs = kwargs
self._path = cls.__name__ # can be overloaded by serve()
self._is_served = False
# Handle good defaults
if hasattr(cls, 'title') and self.kwargs.get('title', None) is None:
self.kwargs['title'] = 'Flexx app - ' + cls.__name__
if hasattr(cls, 'set_icon') and self.kwargs.get('icon', None) is None:
# Set icon as base64 str; exported apps can still be standalone
fname = os.path.abspath(os.path.join(__file__, '..', '..',
'resources', 'flexx.ico'))
icon_str = encodebytes(open(fname, 'rb').read()).decode()
self.kwargs['icon'] = 'data:image/ico;base64,' + icon_str
def __call__(self, *args, **kwargs):
a = list(self.args) + list(args)
kw = {}
kw.update(self.kwargs)
kw.update(kwargs)
return self.cls(*a, **kw)
def __repr__(self):
t = '<App based on class %s pre-initialized with %i args and %i kwargs>'
return t % (self.cls.__name__, len(self.args), len(self.kwargs))
@property
def cls(self):
""" The Component class that is the basis of this app.
"""
return self._cls
@property
def is_served(self):
""" Whether this app is already registered by the app manager.
"""
return self._is_served
@property
def url(self):
""" The url to acces this app. This raises an error if serve() has not
been called yet or if Flexx' server is not yet running.
"""
server = current_server(False)
if not self._is_served:
raise RuntimeError('Cannot determine app url if app is not yet "served".')
elif not (server and server.serving):
raise RuntimeError('Cannot determine app url if the server is not '
'yet running.')
else:
proto = server.protocol
host, port = server.serving
path = self._path + '/' if self._path else ''
return '%s://%s:%i/%s' % (proto, host, port, path)
@property
def name(self):
""" The name of the app, i.e. the url path that this app is served at.
"""
return self._path or '__main__'
def serve(self, name=None):
""" Start serving this app.
This registers the given class with the internal app manager. The
app can be loaded via 'http://hostname:port/name'.
Arguments:
name (str, optional): the relative URL path to serve the app on.
If this is ``''`` (the empty string), this will be the main app.
If not given or None, the name of the component class is used.
"""
# Note: this talks to the manager; it has nothing to do with the server
if self._is_served:
raise RuntimeError('This app (%s) is already served.' % self.name)
if name is not None:
self._path = name
manager.register_app(self)
self._is_served = True
def launch(self, runtime=None, **runtime_kwargs):
""" Launch this app as a desktop app in the given runtime.
See https://webruntime.readthedocs.io for details.
Arguments:
runtime (str): the runtime to launch the application in.
Default 'app or browser'.
runtime_kwargs: kwargs to pass to the ``webruntime.launch`` function.
A few names are passed to runtime kwargs if not already present
('title' and 'icon').
Returns:
Component: an instance of the given class.
"""
# creates server (and event loop) if it did not yet exist
current_server()
# Create session
if not self._is_served:
self.serve()
session = manager.create_session(self.name)
# Transfer title and icon
if runtime_kwargs.get('title', None) is None and 'title' in self.kwargs:
runtime_kwargs['title'] = self.kwargs['title']
if runtime_kwargs.get('icon', None) is None and 'icon' in self.kwargs:
runtime_kwargs['icon'] = self.kwargs['icon']
# Launch web runtime, the server will wait for the connection
url = self.url + '?session_id=%s' % session.id
if not runtime or '!' in config.webruntime:
runtime = config.webruntime.strip('!')
session._runtime = webruntime.launch(url, runtime=runtime, **runtime_kwargs)
return session.app
def dump(self, fname=None, link=2):
""" Get a dictionary of web assets that statically represents the app.
The returned dict contains at least one html file. Any
session-specific or shared data is also included. If link is
2/3, all shared assets are included too (and the main document
links to them). A link value of 0/1 may be prefered for
performance or ease of distribution, but with link 2/3 debugging
is easier and multiple apps can share common assets.
When a process only dumps/exports an app, no server is started.
Tornado is not even imported (we have a test for this). This makes
it possible to use Flexx to dump an app and then serve it with any
tool one likes.
Arguments:
fname (str, optional): the name of the main html asset.
If not given or None, the name of the component class
is used. Must end in .html/.htm/.hta.
link (int): whether to link (JS and CSS) assets or embed them:
A values of 0/1 is recommended for single (and standalone) apps,
while multiple apps can share common assets by using 2/3.
* 0: all assets are embedded into the main html document.
* 1: normal assets are embedded, remote assets remain remote.
* 2: all assets are linked (as separate files). Default.
* 3: normal assets are linked, remote assets remain remote.
Returns:
dict: A collection of assets.
"""
# Get asset name
if fname is None:
if self.name in ('__main__', ''):
fname = 'index.html'
else:
fname = self.name.lower() + '.html'
# Validate fname
if os.path.basename(fname) != fname:
raise ValueError('App.dump() fname must not contain directory names.')
elif not fname.lower().endswith(('.html', 'htm', '.hta')):
raise ValueError('Invalid extension for dumping {}'.format(fname))
# Do stripped version of manager.create_session()
name = fname.split('.')[0].replace('-', '_').replace(' ', '_')
session = Session(name)
session._id = name # Force id to be the same on each dump
# Instantiate the component
self(flx_session=session, flx_is_app=True)
# Do stripped version of manager.connect_client()
exporter = ExporterWebSocketDummy()
session._set_ws(exporter)
assert link in (0, 1, 2, 3), "Expecting link to be in (0, 1, 2, 3)."
# Warn for PyComponents
if issubclass(self.cls, PyComponent):
logger.warning('Exporting a PyComponent - any Python interactivity will '
'not work in exported apps.')
d = {}
# Get main HTML page
html = get_page_for_export(session, exporter.commands, link)
if fname.lower().endswith('.hta'):
hta_tag = '<meta http-equiv="x-ua-compatible" content="ie=edge" />'
html = html.replace('<head>', '<head>\n ' + hta_tag, 1)
d[fname] = html.encode()
# Add shares assets if we link to it from the main page
if link in (2, 3):
d.update(assets._dump_assets(link==2)) # also_remote if link==2
# Add session specific, and shared data
d.update(session._dump_data())
d.update(assets._dump_data())
return d
def export(self, filename, link=2, overwrite=True):
""" Export this app to a static website.
Also see dump(). An app that contains no data, can be exported to a
single html document by setting link to 0.
Arguments:
filename (str): Path to write the HTML document to.
If the filename ends with .hta, a Windows HTML Application is
created. If a directory is given, the app is exported to
appname.html in that directory.
link (int): whether to link (JS and CSS) assets or embed them:
* 0: all assets are embedded into the main html document.
* 1: normal assets are embedded, remote assets remain remote.
* 2: all assets are linked (as separate files). Default.
* 3: normal assets are linked, remote assets remain remote.
overwrite (bool, optional): if True (default) will overwrite files
that already exist. Otherwise existing files are skipped.
The latter makes it possible to efficiently export a series of
apps to the same directory and have them share common assets.
"""
# Derive dirname and app name
if not isinstance(filename, str):
raise ValueError('str filename required, use dump() for in-memory export.')
filename = os.path.abspath(os.path.expanduser(filename))
if (
os.path.isdir(filename) or
filename.endswith(('/', '\\')) or
'.' not in os.path.basename(filename)
):
dirname = filename
fname = None
else:
dirname, fname = os.path.split(filename)
# Collect asset dict
d = self.dump(fname, link)
# Write all assets to file
for fname, blob in d.items():
filename2 = os.path.join(dirname, fname)
if not overwrite and os.path.isfile(filename2):
continue
dname = os.path.dirname(filename2)
if not os.path.isdir(dname):
os.makedirs(dname)
with open(filename2, 'wb') as f:
f.write(blob)
app_type = 'standalone app' if len(d) == 1 else 'app'
logger.info('Exported %s to %r' % (app_type, filename))
def publish(self, name, token, url=None):
""" Publish this app as static HTML on the web.
This is an experimental feature! We will try to keep your app published,
but make no guarantees. We reserve the right to remove apps or shut down
the web server completely.
Arguments:
name (str): The name by which to publish this app. Must be unique
within the scope of the published site.
token (str): a secret token. This is stored at the target website.
Subsequent publications of the same app name must have the same
token.
url (str): The url to POST the app to. If None (default),
the default Flexx live website url will be used.
"""
# Dump assets into dict
d = self.dump('index.html', 2)
# Zip it up
f = io.BytesIO()
with zipfile.ZipFile(f, 'w') as zf:
for fname in d.keys():
zf.writestr(fname, d[fname])
# POST
try:
import requests
except ImportError:
raise ImportError('App.publish() needs requests lib: pip install requests')
url = url or 'http://flexx.app/submit/{name}/{token}'
real_url = url.format(name=name, token=token)
r = requests.post(real_url, data=f.getvalue())
if r.status_code != 200:
raise RuntimeError('Publish failed: ' + r.text)
else:
print('Publish succeeded, ' + r.text)
if url.startswith('http://flexx.app'):
print('You app is now available at '
'http://flexx.app/open/%s/' % name)
# todo: thread safety
def valid_app_name(name):
T = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_0123456789'
return name and name[0] in T[:-10] and all([c in T for c in name])
# Note that the AppManager is a Component (but not a PyComponent)
class AppManager(event.Component):
""" Manage apps, or more specifically, the session objects.
There is one AppManager class (in ``flexx.app.manager``). It's
purpose is to manage the application classes and instances. It is mostly
intended for internal use, but users can use it to e.g. monitor connections.
Create a reaction using ``@app.manager.reaction('connections_changed')``
to track when the number of connected session changes.
"""
total_sessions = 0 # Keep track how many sessesions we've served in total
def __init__(self):
super().__init__()
# name -> (app, pending, connected) - lists contain Session objects
self._appinfo = {}
self._session_map = weakref.WeakValueDictionary()
self._last_check_time = time.time()
def register_app(self, app):
""" Register an app (an object that wraps a Component class plus init args).
After registering an app (and starting the server) it is
possible to connect to "http://address:port/app_name".
"""
assert isinstance(app, App)
name = app.name
if not valid_app_name(name):
raise ValueError('Given app does not have a valid name %r' % name)
pending, connected = [], []
if name in self._appinfo:
old_app, pending, connected = self._appinfo[name]
if app.cls is not old_app.cls: # if app is not old_app:
logger.warning('Re-defining app class %r' % name)
self._appinfo[name] = app, pending, connected
def create_default_session(self, cls=None):
""" Create a default session for interactive use (e.g. the notebook).
"""
if '__default__' in self._appinfo:
raise RuntimeError('The default session can only be created once.')
if cls is None:
cls = JsComponent
if not isinstance(cls, type) and issubclass(cls, (PyComponent, JsComponent)):
raise TypeError('create_default_session() needs a JsComponent subclass.')
# Create app and register it by __default__ name
app = App(cls)
app.serve('__default__') # calls register_app()
# Create the session instance and register it
session = Session('__default__')
self._session_map[session.id] = session
_, pending, connected = self._appinfo['__default__']
pending.append(session)
# Instantiate the component
app(flx_session=session, flx_is_app=True)
return session
def remove_default_session(self):
""" Remove default session if there is one, closing the session.
"""
s = self.get_default_session()
if s is not None:
s.close()
self._appinfo.pop('__default__', None)
def get_default_session(self):
""" Get the default session that is used for interactive use.
Returns None unless create_default_session() was called earlier.
When a JsComponent class is created without a session, this method
is called to get one (and will then fail if it's None).
"""
x = self._appinfo.get('__default__', None)
if x is None:
return None
else:
_, pending, connected = x
sessions = pending + connected
if sessions:
return sessions[-1]
def _clear_old_pending_sessions(self, max_age=30):
try:
count = 0
for name in self._appinfo:
if name == '__default__':
continue
_, pending, _ = self._appinfo[name]
to_remove = [s for s in pending
if (time.time() - s._creation_time) > max_age]
for s in to_remove:
self._session_map.pop(s.id, None)
pending.remove(s)
count += len(to_remove)
if count:
logger.warning('Cleared %i old pending sessions' % count)
except Exception as err:
logger.error('Error when clearing old pending sessions: %s' % str(err))
def create_session(self, name, id=None, request=None):
""" Create a session for the app with the given name.
Instantiate an app and matching session object corresponding
to the given name, and return the session. The client should
be connected later via connect_client().
"""
# Called by the server when a client connects, and from the
# launch and export functions.
if time.time() - self._last_check_time > 5:
self._last_check_time = time.time()
self._clear_old_pending_sessions()
if name == '__default__':
raise RuntimeError('There can be only one __default__ session.')
elif name not in self._appinfo:
raise ValueError('Can only instantiate a session with a valid app name.')
app, pending, connected = self._appinfo[name]
# Create the session
session = Session(name, request=request)
if id is not None:
session._id = id # use custom id (export() used to use this)
self._session_map[session.id] = session
# Instantiate the component
# This represents the "instance" of the App object (Component class + args)
app(flx_session=session, flx_is_app=True)
# Now wait for the client to connect. The client will be served
# a page that contains the session_id. Upon connecting, the id
# will be communicated, so it connects to the correct session.
pending.append(session)
logger.debug('Instantiate app client %s' % session.app_name)
return session
def connect_client(self, ws, name, session_id, cookies=None):
""" Connect a client to a session that was previously created.
"""
_, pending, connected = self._appinfo[name]
# Search for the session with the specific id
for session in pending:
if session.id == session_id:
pending.remove(session)
break
else:
raise RuntimeError('Asked for session id %r, but could not find it' %
session_id)
# Add app to connected, set ws
assert session.id == session_id
assert session.status == Session.STATUS.PENDING
logger.info('New session %s %s' % (name, session_id))
session._set_cookies(cookies)
session._set_ws(ws)
connected.append(session)
AppManager.total_sessions += 1
self.connections_changed(session.app_name)
return session # For the ws
def disconnect_client(self, session):
""" Close a connection to a client.
This is called by the websocket when the connection is closed.
The manager will remove the session from the list of connected
instances.
"""
if session.app_name == '__default__':
logger.info('Default session lost connection to client.')
return # The default session awaits a re-connect
_, pending, connected = self._appinfo[session.app_name]
try:
connected.remove(session)
except ValueError:
pass
logger.info('Session closed %s %s' %(session.app_name, session.id))
session.close()
self.connections_changed(session.app_name)
def has_app_name(self, name):
""" Returns the case-corrected name if the given name matches
a registered appliciation (case insensitive). Returns None if the
given name does not match any applications.
"""
name = name.lower()
for key in self._appinfo.keys():
if key.lower() == name:
return key
else:
return None
def get_app_names(self):
""" Get a list of registered application names.
"""
return [name for name in sorted(self._appinfo.keys())]
def get_session_by_id(self, id):
""" Get session object by its id, or None.
"""
return self._session_map.get(id, None)
def get_connections(self, name):
""" Given an app name, return the connected session objects.
"""
_, pending, connected = self._appinfo[name]
return list(connected)
@event.emitter
def connections_changed(self, name):
""" Emits an event with the name of the app for which a
connection is added or removed.
"""
return dict(name=str(name))
# Create global app manager object
manager = AppManager()
|
from scrapy_redis.spiders import RedisSpider
class MySpider(RedisSpider):
"""Spider that reads urls from redis queue (myspider:start_urls)."""
name = 'myspider_redis'
redis_key = 'myspider:start_urls'
def __init__(self, *args, **kwargs):
# Dynamically define the allowed domains list.
domain = kwargs.pop('domain', '')
self.allowed_domains = filter(None, domain.split(','))
super(MySpider, self).__init__(*args, **kwargs)
def parse(self, response):
return {
'name': response.css('title::text').extract_first(),
'url': response.url,
}
|
from django.conf import settings
from weblate.machinery.base import (
MachineTranslation,
MachineTranslationError,
MissingConfiguration,
)
class YandexTranslation(MachineTranslation):
"""Yandex machine translation support."""
name = "Yandex"
max_score = 90
def __init__(self):
"""Check configuration."""
super().__init__()
if settings.MT_YANDEX_KEY is None:
raise MissingConfiguration("Yandex Translate requires API key")
def check_failure(self, response):
if "code" not in response or response["code"] == 200:
return
if "message" in response:
raise MachineTranslationError(response["message"])
raise MachineTranslationError("Error: {}".format(response["code"]))
def download_languages(self):
"""Download list of supported languages from a service."""
response = self.request(
"get",
"https://translate.yandex.net/api/v1.5/tr.json/getLangs",
params={"key": settings.MT_YANDEX_KEY, "ui": "en"},
)
payload = response.json()
self.check_failure(payload)
return payload["langs"].keys()
def download_translations(
self,
source,
language,
text: str,
unit,
user,
search: bool,
threshold: int = 75,
):
"""Download list of possible translations from a service."""
response = self.request(
"get",
"https://translate.yandex.net/api/v1.5/tr.json/translate",
params={
"key": settings.MT_YANDEX_KEY,
"text": text,
"lang": f"{source}-{language}",
"target": language,
},
)
payload = response.json()
self.check_failure(payload)
for translation in payload["text"]:
yield {
"text": translation,
"quality": self.max_score,
"service": self.name,
"source": text,
}
def get_error_message(self, exc):
try:
return exc.response.json()["message"]
except Exception:
return super().get_error_message(exc)
|
from gi.repository import GObject
from meld.matchers.myers import (
DiffChunk,
MyersSequenceMatcher,
SyncPointMyersSequenceMatcher,
)
LO, HI = 1, 2
opcode_reverse = {
"replace": "replace",
"insert": "delete",
"delete": "insert",
"conflict": "conflict",
"equal": "equal"
}
def merged_chunk_order(merged_chunk):
if not merged_chunk:
return 0
chunk = merged_chunk[0] or merged_chunk[1]
return chunk.start_a
def reverse_chunk(chunk):
tag = opcode_reverse[chunk[0]]
return DiffChunk._make((tag, chunk[3], chunk[4], chunk[1], chunk[2]))
def consume_blank_lines(chunk, texts, pane1, pane2):
if chunk is None:
return None
def _find_blank_lines(txt, lo, hi):
while lo < hi and not txt[lo]:
lo += 1
while lo < hi and not txt[hi - 1]:
hi -= 1
return lo, hi
tag = chunk.tag
c1, c2 = _find_blank_lines(texts[pane1], chunk[1], chunk[2])
c3, c4 = _find_blank_lines(texts[pane2], chunk[3], chunk[4])
if c1 == c2 and c3 == c4:
return None
if c1 == c2 and tag == "replace":
tag = "insert"
elif c3 == c4 and tag == "replace":
tag = "delete"
return DiffChunk._make((tag, c1, c2, c3, c4))
class Differ(GObject.GObject):
"""Utility class to hold diff2 or diff3 chunks"""
__gsignals__ = {
'diffs-changed': (GObject.SignalFlags.RUN_FIRST, None,
(object,)),
}
_matcher = MyersSequenceMatcher
_sync_matcher = SyncPointMyersSequenceMatcher
def __init__(self):
# Internally, diffs are stored from text1 -> text0 and text1 -> text2.
super().__init__()
self.num_sequences = 0
self.seqlength = [0, 0, 0]
self.diffs = [[], []]
self.syncpoints = []
self.conflicts = []
self._old_merge_cache = set()
self._changed_chunks = tuple()
self._merge_cache = []
self._line_cache = [[], [], []]
self.ignore_blanks = False
self._initialised = False
self._has_mergeable_changes = (False, False, False, False)
def _update_merge_cache(self, texts):
if self.num_sequences == 3:
self._merge_cache = [c for c in self._merge_diffs(self.diffs[0],
self.diffs[1],
texts)]
else:
self._merge_cache = [(c, None) for c in self.diffs[0]]
if self.ignore_blanks:
# We don't handle altering the chunk-type of conflicts in three-way
# comparisons where e.g., pane 1 and 3 differ in blank lines
for i, c in enumerate(self._merge_cache):
self._merge_cache[i] = (consume_blank_lines(c[0], texts, 1, 0),
consume_blank_lines(c[1], texts, 1, 2))
self._merge_cache = [x for x in self._merge_cache if any(x)]
# Calculate chunks that were added (in the new but not the old merge
# cache), removed (in the old but not the new merge cache) and changed
# (where the edit actually occurred, *and* the chunk is still around).
# This information is used by the inline highlighting mechanism to
# avoid re-highlighting existing chunks.
removed_chunks = self._old_merge_cache - set(self._merge_cache)
added_chunks = set(self._merge_cache) - self._old_merge_cache
modified_chunks = self._changed_chunks
if modified_chunks in removed_chunks:
modified_chunks = tuple()
chunk_changes = (removed_chunks, added_chunks, modified_chunks)
mergeable0, mergeable1 = False, False
for (c0, c1) in self._merge_cache:
mergeable0 = mergeable0 or (c0 is not None and c0[0] != 'conflict')
mergeable1 = mergeable1 or (c1 is not None and c1[0] != 'conflict')
if mergeable0 and mergeable1:
break
self._has_mergeable_changes = (False, mergeable0, mergeable1, False)
# Conflicts can only occur when there are three panes, and will always
# involve the middle pane.
self.conflicts = []
for i, (c1, c2) in enumerate(self._merge_cache):
if (c1 is not None and c1[0] == 'conflict') or \
(c2 is not None and c2[0] == 'conflict'):
self.conflicts.append(i)
self._update_line_cache()
self.emit("diffs-changed", chunk_changes)
def _update_line_cache(self):
"""Cache a mapping from line index to per-pane chunk indices
This cache exists so that the UI can quickly query for current,
next and previous chunks when the current cursor line changes,
enabling better action sensitivity feedback.
"""
for i, l in enumerate(self.seqlength):
# seqlength + 1 for after-last-line requests, which we do
self._line_cache[i] = [(None, None, None)] * (l + 1)
last_chunk = len(self._merge_cache)
def find_next(diff, seq, current):
next_chunk = None
if seq == 1 and current + 1 < last_chunk:
next_chunk = current + 1
else:
for j in range(current + 1, last_chunk):
if self._merge_cache[j][diff] is not None:
next_chunk = j
break
return next_chunk
prev = [None, None, None]
next = [find_next(0, 0, -1), find_next(0, 1, -1), find_next(1, 2, -1)]
old_end = [0, 0, 0]
for i, c in enumerate(self._merge_cache):
seq_params = ((0, 0, 3, 4), (0, 1, 1, 2), (1, 2, 3, 4))
for (diff, seq, lo, hi) in seq_params:
if c[diff] is None:
if seq == 1:
diff = 1
else:
continue
start, end, last = c[diff][lo], c[diff][hi], old_end[seq]
if (start > last):
chunk_ids = [(None, prev[seq], next[seq])] * (start - last)
self._line_cache[seq][last:start] = chunk_ids
# For insert chunks, claim the subsequent line.
if start == end:
end += 1
next[seq] = find_next(diff, seq, i)
chunk_ids = [(i, prev[seq], next[seq])] * (end - start)
self._line_cache[seq][start:end] = chunk_ids
prev[seq], old_end[seq] = i, end
for seq in range(3):
last, end = old_end[seq], len(self._line_cache[seq])
if (last < end):
chunk_ids = [(None, prev[seq], next[seq])] * (end - last)
self._line_cache[seq][last:end] = chunk_ids
def change_sequence(self, sequence, startidx, sizechange, texts):
assert sequence in (0, 1, 2)
if sequence == 0 or sequence == 1:
self._change_sequence(0, sequence, startidx, sizechange, texts)
if sequence == 2 or (sequence == 1 and self.num_sequences == 3):
self._change_sequence(1, sequence, startidx, sizechange, texts)
self.seqlength[sequence] += sizechange
def offset(c, start, o1, o2):
"""Offset a chunk by o1/o2 if it's after the inserted lines"""
if c is None:
return None
start_a = c.start_a + (o1 if c.start_a > start else 0)
end_a = c.end_a + (o1 if c.end_a > start else 0)
start_b = c.start_b + (o2 if c.start_b > start else 0)
end_b = c.end_b + (o2 if c.end_b > start else 0)
return DiffChunk._make((c.tag, start_a, end_a, start_b, end_b))
# Calculate the expected differences in the chunk set if no cascading
# changes occur, making sure to not include the changed chunk itself
self._old_merge_cache = set()
self._changed_chunks = tuple()
chunk_changed = False
for (c1, c2) in self._merge_cache:
if sequence == 0:
if c1 and c1.start_b <= startidx < c1.end_b:
chunk_changed = True
c1 = offset(c1, startidx, 0, sizechange)
elif sequence == 2:
if c2 and c2.start_b <= startidx < c2.end_b:
chunk_changed = True
c2 = offset(c2, startidx, 0, sizechange)
else:
# Middle sequence changes alter both chunks
if c1 and c1.start_a <= startidx < c1.end_a:
chunk_changed = True
c1 = offset(c1, startidx, sizechange, 0)
if self.num_sequences == 3:
c2 = offset(c2, startidx, sizechange, 0)
if chunk_changed:
assert not self._changed_chunks
self._changed_chunks = (c1, c2)
chunk_changed = False
self._old_merge_cache.add((c1, c2))
self._update_merge_cache(texts)
def _locate_chunk(self, whichdiffs, sequence, line):
"""Find the index of the chunk which contains line."""
high_index = 2 + 2 * int(sequence != 1)
for i, c in enumerate(self.diffs[whichdiffs]):
if line < c[high_index]:
return i
return len(self.diffs[whichdiffs])
def has_chunk(self, to_pane, chunk):
"""Return whether the pane/chunk exists in the current Differ"""
sequence = 1 if to_pane == 2 else 0
chunk_index, _, _ = self.locate_chunk(1, chunk.start_a)
if chunk_index is None:
return False
return self._merge_cache[chunk_index][sequence] == chunk
def get_chunk(self, index, from_pane, to_pane=None):
"""Return the index-th change in from_pane
If to_pane is provided, then only changes between from_pane and to_pane
are considered, otherwise all changes starting at from_pane are used.
"""
sequence = int(from_pane == 2 or to_pane == 2)
chunk = self._merge_cache[index][sequence]
if from_pane in (0, 2):
if chunk is None:
return None
return reverse_chunk(chunk)
else:
if to_pane is None and chunk is None:
chunk = self._merge_cache[index][1]
return chunk
def get_chunk_starts(self, index):
"""Return the starting lines of all chunks at an index"""
chunks = self._merge_cache[index]
chunk_starts = [
chunks[0].start_b if chunks[0] else None,
chunks[0].start_a if chunks[0] else None,
chunks[1].start_b if chunks[1] else None,
]
return chunk_starts
def locate_chunk(self, pane, line):
"""Find the index of the chunk which contains line
Returns a tuple containing the current, previous and next chunk
indices in that order. If the line has no associated chunk,
None will be returned as the first element. If there are no
previous/next chunks then None will be returned as the
second/third elements.
"""
try:
return self._line_cache[pane][line]
except IndexError:
return (None, None, None)
def diff_count(self):
return len(self._merge_cache)
def has_mergeable_changes(self, which):
return self._has_mergeable_changes[which:which + 2]
def _change_sequence(self, which, sequence, startidx, sizechange, texts):
diffs = self.diffs[which]
lines_added = [0, 0, 0]
lines_added[sequence] = sizechange
loidx = self._locate_chunk(which, sequence, startidx)
if sizechange < 0:
hiidx = self._locate_chunk(which, sequence, startidx - sizechange)
else:
hiidx = loidx
if loidx > 0:
loidx -= 1
lorange = diffs[loidx][3], diffs[loidx][1]
else:
lorange = (0, 0)
x = which * 2
if hiidx < len(diffs):
hiidx += 1
hirange = diffs[hiidx - 1][4], diffs[hiidx - 1][2]
else:
hirange = self.seqlength[x], self.seqlength[1]
rangex = lorange[0], hirange[0] + lines_added[x]
range1 = lorange[1], hirange[1] + lines_added[1]
assert rangex[0] <= rangex[1] and range1[0] <= range1[1]
linesx = texts[x][rangex[0]:rangex[1]]
lines1 = texts[1][range1[0]:range1[1]]
def offset(c, o1, o2):
return DiffChunk._make((c[0], c[1] + o1, c[2] + o1,
c[3] + o2, c[4] + o2))
newdiffs = self._matcher(None, lines1, linesx).get_difference_opcodes()
newdiffs = [offset(c, range1[0], rangex[0]) for c in newdiffs]
if hiidx < len(self.diffs[which]):
offset_diffs = [offset(c, lines_added[1], lines_added[x])
for c in self.diffs[which][hiidx:]]
self.diffs[which][hiidx:] = offset_diffs
self.diffs[which][loidx:hiidx] = newdiffs
def _range_from_lines(self, textindex, lines):
lo_line, hi_line = lines
top_chunk = self.locate_chunk(textindex, lo_line)
start = top_chunk[0]
if start is None:
start = top_chunk[2]
bottom_chunk = self.locate_chunk(textindex, hi_line)
end = bottom_chunk[0]
if end is None:
end = bottom_chunk[1]
return start, end
def all_changes(self):
return iter(self._merge_cache)
def pair_changes(self, fromindex, toindex, lines=(None, None, None, None)):
"""Give all changes between file1 and either file0 or file2.
"""
if None not in lines:
start1, end1 = self._range_from_lines(fromindex, lines[0:2])
start2, end2 = self._range_from_lines(toindex, lines[2:4])
if (start1 is None or end1 is None) and \
(start2 is None or end2 is None):
return
start = min([x for x in (start1, start2) if x is not None])
end = max([x for x in (end1, end2) if x is not None])
merge_cache = self._merge_cache[start:end + 1]
else:
merge_cache = self._merge_cache
if fromindex == 1:
seq = toindex // 2
for c in merge_cache:
if c[seq]:
yield c[seq]
else:
seq = fromindex // 2
for c in merge_cache:
if c[seq]:
yield reverse_chunk(c[seq])
# FIXME: This is gratuitous copy-n-paste at this point
def paired_all_single_changes(self, fromindex, toindex):
if fromindex == 1:
seq = toindex // 2
for c in self._merge_cache:
if c[seq]:
yield c[seq]
else:
seq = fromindex // 2
for c in self._merge_cache:
if c[seq]:
yield reverse_chunk(c[seq])
def single_changes(self, textindex, lines=(None, None)):
"""Give changes for single file only. do not return 'equal' hunks.
"""
if None not in lines:
start, end = self._range_from_lines(textindex, lines)
if start is None or end is None:
return
merge_cache = self._merge_cache[start:end + 1]
else:
merge_cache = self._merge_cache
if textindex in (0, 2):
seq = textindex // 2
for cs in merge_cache:
if cs[seq]:
yield reverse_chunk(cs[seq])
else:
for cs in merge_cache:
yield cs[0] or cs[1]
def sequences_identical(self):
# check so that we don't call an uninitialised comparison 'identical'
return self.diffs == [[], []] and self._initialised
def _merge_blocks(self, using):
lowc = min(using[0][0][LO], using[1][0][LO])
highc = max(using[0][-1][HI], using[1][-1][HI])
low = []
high = []
for i in (0, 1):
d = using[i][0]
low.append(lowc - d[LO] + d[2 + LO])
d = using[i][-1]
high.append(highc - d[HI] + d[2 + HI])
return low[0], high[0], lowc, highc, low[1], high[1]
def _auto_merge(self, using, texts):
"""Automatically merge two sequences of change blocks"""
l0, h0, l1, h1, l2, h2 = self._merge_blocks(using)
if h0 - l0 == h2 - l2 and texts[0][l0:h0] == texts[2][l2:h2]:
if l1 != h1 and l0 == h0:
tag = "delete"
elif l1 != h1:
tag = "replace"
else:
tag = "insert"
else:
tag = "conflict"
out0 = DiffChunk._make((tag, l1, h1, l0, h0))
out1 = DiffChunk._make((tag, l1, h1, l2, h2))
yield out0, out1
def _merge_diffs(self, seq0, seq1, texts):
seq0, seq1 = seq0[:], seq1[:]
seq = seq0, seq1
while len(seq0) or len(seq1):
if not seq0:
high_seq = 1
elif not seq1:
high_seq = 0
else:
high_seq = int(seq0[0].start_a > seq1[0].start_a)
if seq0[0].start_a == seq1[0].start_a:
if seq0[0].tag == "insert":
high_seq = 0
elif seq1[0].tag == "insert":
high_seq = 1
high_diff = seq[high_seq].pop(0)
high_mark = high_diff.end_a
other_seq = 0 if high_seq == 1 else 1
using = [[], []]
using[high_seq].append(high_diff)
while seq[other_seq]:
other_diff = seq[other_seq][0]
if high_mark < other_diff.start_a:
break
if high_mark == other_diff.start_a and \
not (high_diff.tag == other_diff.tag == "insert"):
break
using[other_seq].append(other_diff)
seq[other_seq].pop(0)
if high_mark < other_diff.end_a:
high_seq, other_seq = other_seq, high_seq
high_mark = other_diff.end_a
if len(using[0]) == 0:
assert len(using[1]) == 1
yield None, using[1][0]
elif len(using[1]) == 0:
assert len(using[0]) == 1
yield using[0][0], None
else:
for c in self._auto_merge(using, texts):
yield c
def set_sequences_iter(self, sequences):
assert 0 <= len(sequences) <= 3
self.diffs = [[], []]
self.num_sequences = len(sequences)
self.seqlength = [len(s) for s in sequences]
for i in range(self.num_sequences - 1):
if self.syncpoints:
syncpoints = [(s[i][0](), s[i][1]()) for s in self.syncpoints]
matcher = self._sync_matcher(None,
sequences[1], sequences[i * 2],
syncpoints=syncpoints)
else:
matcher = self._matcher(None, sequences[1], sequences[i * 2])
work = matcher.initialise()
while next(work) is None:
yield None
self.diffs[i] = matcher.get_difference_opcodes()
self._initialised = True
self._update_merge_cache(sequences)
yield 1
def clear(self):
self.diffs = [[], []]
self.seqlength = [0] * self.num_sequences
self._initialised = False
self._old_merge_cache = set()
self._update_merge_cache([""] * self.num_sequences)
|
from typing import Any
from unittest.mock import patch
import arrow
import pytz
from withings_api.common import (
GetSleepSummaryData,
GetSleepSummarySerie,
MeasureGetMeasGroup,
MeasureGetMeasGroupAttrib,
MeasureGetMeasGroupCategory,
MeasureGetMeasMeasure,
MeasureGetMeasResponse,
MeasureType,
NotifyAppli,
SleepGetSummaryResponse,
SleepModel,
)
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.components.withings.common import (
WITHINGS_MEASUREMENTS_MAP,
WithingsAttribute,
async_get_entity_id,
get_platform_attributes,
)
from homeassistant.components.withings.const import Measurement
from homeassistant.core import HomeAssistant, State
from homeassistant.helpers.entity_registry import EntityRegistry
from .common import ComponentFactory, new_profile_config
PERSON0 = new_profile_config(
"person0",
0,
api_response_measure_get_meas=MeasureGetMeasResponse(
measuregrps=(
MeasureGetMeasGroup(
attrib=MeasureGetMeasGroupAttrib.DEVICE_ENTRY_FOR_USER,
category=MeasureGetMeasGroupCategory.REAL,
created=arrow.utcnow().shift(hours=-1),
date=arrow.utcnow().shift(hours=-1),
deviceid="DEV_ID",
grpid=1,
measures=(
MeasureGetMeasMeasure(type=MeasureType.WEIGHT, unit=0, value=70),
MeasureGetMeasMeasure(
type=MeasureType.FAT_MASS_WEIGHT, unit=0, value=5
),
MeasureGetMeasMeasure(
type=MeasureType.FAT_FREE_MASS, unit=0, value=60
),
MeasureGetMeasMeasure(
type=MeasureType.MUSCLE_MASS, unit=0, value=50
),
MeasureGetMeasMeasure(type=MeasureType.BONE_MASS, unit=0, value=10),
MeasureGetMeasMeasure(type=MeasureType.HEIGHT, unit=0, value=2),
MeasureGetMeasMeasure(
type=MeasureType.TEMPERATURE, unit=0, value=40
),
MeasureGetMeasMeasure(
type=MeasureType.BODY_TEMPERATURE, unit=0, value=40
),
MeasureGetMeasMeasure(
type=MeasureType.SKIN_TEMPERATURE, unit=0, value=20
),
MeasureGetMeasMeasure(
type=MeasureType.FAT_RATIO, unit=-3, value=70
),
MeasureGetMeasMeasure(
type=MeasureType.DIASTOLIC_BLOOD_PRESSURE, unit=0, value=70
),
MeasureGetMeasMeasure(
type=MeasureType.SYSTOLIC_BLOOD_PRESSURE, unit=0, value=100
),
MeasureGetMeasMeasure(
type=MeasureType.HEART_RATE, unit=0, value=60
),
MeasureGetMeasMeasure(type=MeasureType.SP02, unit=-2, value=95),
MeasureGetMeasMeasure(
type=MeasureType.HYDRATION, unit=-2, value=95
),
MeasureGetMeasMeasure(
type=MeasureType.PULSE_WAVE_VELOCITY, unit=0, value=100
),
),
),
MeasureGetMeasGroup(
attrib=MeasureGetMeasGroupAttrib.DEVICE_ENTRY_FOR_USER,
category=MeasureGetMeasGroupCategory.REAL,
created=arrow.utcnow().shift(hours=-2),
date=arrow.utcnow().shift(hours=-2),
deviceid="DEV_ID",
grpid=1,
measures=(
MeasureGetMeasMeasure(type=MeasureType.WEIGHT, unit=0, value=71),
MeasureGetMeasMeasure(
type=MeasureType.FAT_MASS_WEIGHT, unit=0, value=51
),
MeasureGetMeasMeasure(
type=MeasureType.FAT_FREE_MASS, unit=0, value=61
),
MeasureGetMeasMeasure(
type=MeasureType.MUSCLE_MASS, unit=0, value=51
),
MeasureGetMeasMeasure(type=MeasureType.BONE_MASS, unit=0, value=11),
MeasureGetMeasMeasure(type=MeasureType.HEIGHT, unit=0, value=21),
MeasureGetMeasMeasure(
type=MeasureType.TEMPERATURE, unit=0, value=41
),
MeasureGetMeasMeasure(
type=MeasureType.BODY_TEMPERATURE, unit=0, value=41
),
MeasureGetMeasMeasure(
type=MeasureType.SKIN_TEMPERATURE, unit=0, value=21
),
MeasureGetMeasMeasure(
type=MeasureType.FAT_RATIO, unit=-3, value=71
),
MeasureGetMeasMeasure(
type=MeasureType.DIASTOLIC_BLOOD_PRESSURE, unit=0, value=71
),
MeasureGetMeasMeasure(
type=MeasureType.SYSTOLIC_BLOOD_PRESSURE, unit=0, value=101
),
MeasureGetMeasMeasure(
type=MeasureType.HEART_RATE, unit=0, value=61
),
MeasureGetMeasMeasure(type=MeasureType.SP02, unit=-2, value=96),
MeasureGetMeasMeasure(
type=MeasureType.HYDRATION, unit=-2, value=96
),
MeasureGetMeasMeasure(
type=MeasureType.PULSE_WAVE_VELOCITY, unit=0, value=101
),
),
),
MeasureGetMeasGroup(
attrib=MeasureGetMeasGroupAttrib.DEVICE_ENTRY_FOR_USER_AMBIGUOUS,
category=MeasureGetMeasGroupCategory.REAL,
created=arrow.utcnow(),
date=arrow.utcnow(),
deviceid="DEV_ID",
grpid=1,
measures=(
MeasureGetMeasMeasure(type=MeasureType.WEIGHT, unit=0, value=71),
MeasureGetMeasMeasure(
type=MeasureType.FAT_MASS_WEIGHT, unit=0, value=4
),
MeasureGetMeasMeasure(
type=MeasureType.FAT_FREE_MASS, unit=0, value=40
),
MeasureGetMeasMeasure(
type=MeasureType.MUSCLE_MASS, unit=0, value=51
),
MeasureGetMeasMeasure(type=MeasureType.BONE_MASS, unit=0, value=11),
MeasureGetMeasMeasure(type=MeasureType.HEIGHT, unit=0, value=201),
MeasureGetMeasMeasure(
type=MeasureType.TEMPERATURE, unit=0, value=41
),
MeasureGetMeasMeasure(
type=MeasureType.BODY_TEMPERATURE, unit=0, value=34
),
MeasureGetMeasMeasure(
type=MeasureType.SKIN_TEMPERATURE, unit=0, value=21
),
MeasureGetMeasMeasure(
type=MeasureType.FAT_RATIO, unit=-3, value=71
),
MeasureGetMeasMeasure(
type=MeasureType.DIASTOLIC_BLOOD_PRESSURE, unit=0, value=71
),
MeasureGetMeasMeasure(
type=MeasureType.SYSTOLIC_BLOOD_PRESSURE, unit=0, value=101
),
MeasureGetMeasMeasure(
type=MeasureType.HEART_RATE, unit=0, value=61
),
MeasureGetMeasMeasure(type=MeasureType.SP02, unit=-2, value=98),
MeasureGetMeasMeasure(
type=MeasureType.HYDRATION, unit=-2, value=96
),
MeasureGetMeasMeasure(
type=MeasureType.PULSE_WAVE_VELOCITY, unit=0, value=102
),
),
),
),
more=False,
timezone=pytz.UTC,
updatetime=arrow.get("2019-08-01"),
offset=0,
),
api_response_sleep_get_summary=SleepGetSummaryResponse(
more=False,
offset=0,
series=(
GetSleepSummarySerie(
timezone=pytz.UTC,
model=SleepModel.SLEEP_MONITOR,
startdate=arrow.get("2019-02-01"),
enddate=arrow.get("2019-02-01"),
date=arrow.get("2019-02-01"),
modified=arrow.get(12345),
data=GetSleepSummaryData(
breathing_disturbances_intensity=110,
deepsleepduration=111,
durationtosleep=112,
durationtowakeup=113,
hr_average=114,
hr_max=115,
hr_min=116,
lightsleepduration=117,
remsleepduration=118,
rr_average=119,
rr_max=120,
rr_min=121,
sleep_score=122,
snoring=123,
snoringepisodecount=124,
wakeupcount=125,
wakeupduration=126,
),
),
GetSleepSummarySerie(
timezone=pytz.UTC,
model=SleepModel.SLEEP_MONITOR,
startdate=arrow.get("2019-02-01"),
enddate=arrow.get("2019-02-01"),
date=arrow.get("2019-02-01"),
modified=arrow.get(12345),
data=GetSleepSummaryData(
breathing_disturbances_intensity=210,
deepsleepduration=211,
durationtosleep=212,
durationtowakeup=213,
hr_average=214,
hr_max=215,
hr_min=216,
lightsleepduration=217,
remsleepduration=218,
rr_average=219,
rr_max=220,
rr_min=221,
sleep_score=222,
snoring=223,
snoringepisodecount=224,
wakeupcount=225,
wakeupduration=226,
),
),
),
),
)
EXPECTED_DATA = (
(PERSON0, Measurement.WEIGHT_KG, 70.0),
(PERSON0, Measurement.FAT_MASS_KG, 5.0),
(PERSON0, Measurement.FAT_FREE_MASS_KG, 60.0),
(PERSON0, Measurement.MUSCLE_MASS_KG, 50.0),
(PERSON0, Measurement.BONE_MASS_KG, 10.0),
(PERSON0, Measurement.HEIGHT_M, 2.0),
(PERSON0, Measurement.FAT_RATIO_PCT, 0.07),
(PERSON0, Measurement.DIASTOLIC_MMHG, 70.0),
(PERSON0, Measurement.SYSTOLIC_MMGH, 100.0),
(PERSON0, Measurement.HEART_PULSE_BPM, 60.0),
(PERSON0, Measurement.SPO2_PCT, 0.95),
(PERSON0, Measurement.HYDRATION, 0.95),
(PERSON0, Measurement.PWV, 100.0),
(PERSON0, Measurement.SLEEP_BREATHING_DISTURBANCES_INTENSITY, 160.0),
(PERSON0, Measurement.SLEEP_DEEP_DURATION_SECONDS, 322),
(PERSON0, Measurement.SLEEP_HEART_RATE_AVERAGE, 164.0),
(PERSON0, Measurement.SLEEP_HEART_RATE_MAX, 165.0),
(PERSON0, Measurement.SLEEP_HEART_RATE_MIN, 166.0),
(PERSON0, Measurement.SLEEP_LIGHT_DURATION_SECONDS, 334),
(PERSON0, Measurement.SLEEP_REM_DURATION_SECONDS, 336),
(PERSON0, Measurement.SLEEP_RESPIRATORY_RATE_AVERAGE, 169.0),
(PERSON0, Measurement.SLEEP_RESPIRATORY_RATE_MAX, 170.0),
(PERSON0, Measurement.SLEEP_RESPIRATORY_RATE_MIN, 171.0),
(PERSON0, Measurement.SLEEP_SCORE, 222),
(PERSON0, Measurement.SLEEP_SNORING, 173.0),
(PERSON0, Measurement.SLEEP_SNORING_EPISODE_COUNT, 348),
(PERSON0, Measurement.SLEEP_TOSLEEP_DURATION_SECONDS, 162.0),
(PERSON0, Measurement.SLEEP_TOWAKEUP_DURATION_SECONDS, 163.0),
(PERSON0, Measurement.SLEEP_WAKEUP_COUNT, 350),
(PERSON0, Measurement.SLEEP_WAKEUP_DURATION_SECONDS, 176.0),
)
def async_assert_state_equals(
entity_id: str, state_obj: State, expected: Any, attribute: WithingsAttribute
) -> None:
"""Assert at given state matches what is expected."""
assert state_obj, f"Expected entity {entity_id} to exist but it did not"
assert state_obj.state == str(expected), (
f"Expected {expected} but was {state_obj.state} "
f"for measure {attribute.measurement}, {entity_id}"
)
async def test_sensor_default_enabled_entities(
hass: HomeAssistant, component_factory: ComponentFactory
) -> None:
"""Test entities enabled by default."""
entity_registry: EntityRegistry = (
await hass.helpers.entity_registry.async_get_registry()
)
await component_factory.configure_component(profile_configs=(PERSON0,))
# Assert entities should not exist yet.
for attribute in get_platform_attributes(SENSOR_DOMAIN):
assert not await async_get_entity_id(hass, attribute, PERSON0.user_id)
# person 0
await component_factory.setup_profile(PERSON0.user_id)
# Assert entities should exist.
for attribute in get_platform_attributes(SENSOR_DOMAIN):
entity_id = await async_get_entity_id(hass, attribute, PERSON0.user_id)
assert entity_id
assert entity_registry.async_is_registered(entity_id)
resp = await component_factory.call_webhook(PERSON0.user_id, NotifyAppli.SLEEP)
assert resp.message_code == 0
resp = await component_factory.call_webhook(PERSON0.user_id, NotifyAppli.WEIGHT)
assert resp.message_code == 0
for person, measurement, expected in EXPECTED_DATA:
attribute = WITHINGS_MEASUREMENTS_MAP[measurement]
entity_id = await async_get_entity_id(hass, attribute, person.user_id)
state_obj = hass.states.get(entity_id)
if attribute.enabled_by_default:
async_assert_state_equals(entity_id, state_obj, expected, attribute)
else:
assert state_obj is None
# Unload
await component_factory.unload(PERSON0)
async def test_all_entities(
hass: HomeAssistant, component_factory: ComponentFactory
) -> None:
"""Test all entities."""
entity_registry: EntityRegistry = (
await hass.helpers.entity_registry.async_get_registry()
)
with patch(
"homeassistant.components.withings.sensor.BaseWithingsSensor.entity_registry_enabled_default"
) as enabled_by_default_mock:
enabled_by_default_mock.return_value = True
await component_factory.configure_component(profile_configs=(PERSON0,))
# Assert entities should not exist yet.
for attribute in get_platform_attributes(SENSOR_DOMAIN):
assert not await async_get_entity_id(hass, attribute, PERSON0.user_id)
# person 0
await component_factory.setup_profile(PERSON0.user_id)
# Assert entities should exist.
for attribute in get_platform_attributes(SENSOR_DOMAIN):
entity_id = await async_get_entity_id(hass, attribute, PERSON0.user_id)
assert entity_id
assert entity_registry.async_is_registered(entity_id)
resp = await component_factory.call_webhook(PERSON0.user_id, NotifyAppli.SLEEP)
assert resp.message_code == 0
resp = await component_factory.call_webhook(PERSON0.user_id, NotifyAppli.WEIGHT)
assert resp.message_code == 0
for person, measurement, expected in EXPECTED_DATA:
attribute = WITHINGS_MEASUREMENTS_MAP[measurement]
entity_id = await async_get_entity_id(hass, attribute, person.user_id)
state_obj = hass.states.get(entity_id)
async_assert_state_equals(entity_id, state_obj, expected, attribute)
# Unload
await component_factory.unload(PERSON0)
|
import pytest
from homeassistant.const import STATE_UNKNOWN
from homeassistant.setup import async_setup_component
from tests.async_mock import Mock, mock_open, patch
from tests.common import mock_registry
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
@patch("os.path.isfile", Mock(return_value=True))
@patch("os.access", Mock(return_value=True))
async def test_file_value(hass, entity_reg):
"""Test the File sensor."""
config = {
"sensor": {"platform": "file", "name": "file1", "file_path": "mock.file1"}
}
m_open = mock_open(read_data="43\n45\n21")
with patch(
"homeassistant.components.file.sensor.open", m_open, create=True
), patch.object(hass.config, "is_allowed_path", return_value=True):
assert await async_setup_component(hass, "sensor", config)
await hass.async_block_till_done()
state = hass.states.get("sensor.file1")
assert state.state == "21"
@patch("os.path.isfile", Mock(return_value=True))
@patch("os.access", Mock(return_value=True))
async def test_file_value_template(hass, entity_reg):
"""Test the File sensor with JSON entries."""
config = {
"sensor": {
"platform": "file",
"name": "file2",
"file_path": "mock.file2",
"value_template": "{{ value_json.temperature }}",
}
}
data = '{"temperature": 29, "humidity": 31}\n' '{"temperature": 26, "humidity": 36}'
m_open = mock_open(read_data=data)
with patch(
"homeassistant.components.file.sensor.open", m_open, create=True
), patch.object(hass.config, "is_allowed_path", return_value=True):
assert await async_setup_component(hass, "sensor", config)
await hass.async_block_till_done()
state = hass.states.get("sensor.file2")
assert state.state == "26"
@patch("os.path.isfile", Mock(return_value=True))
@patch("os.access", Mock(return_value=True))
async def test_file_empty(hass, entity_reg):
"""Test the File sensor with an empty file."""
config = {"sensor": {"platform": "file", "name": "file3", "file_path": "mock.file"}}
m_open = mock_open(read_data="")
with patch(
"homeassistant.components.file.sensor.open", m_open, create=True
), patch.object(hass.config, "is_allowed_path", return_value=True):
assert await async_setup_component(hass, "sensor", config)
await hass.async_block_till_done()
state = hass.states.get("sensor.file3")
assert state.state == STATE_UNKNOWN
|
from functools import wraps
from flask import request, redirect, url_for
from app.utils import RequestUtil, JsonUtil
# type should be in ['page','api]
def login_required(type='api'):
'''need login wrap'''
def _login_required(function):
@wraps(function)
def decorated_function(*args, **kwargs):
if not RequestUtil.get_login_user():
if type == 'page':
return redirect(url_for('login', next=request.url))
else:
return JsonUtil.object_2_json({
'success': 0,
'data': 'the interface need to be login'
})
return function(*args, **kwargs)
return decorated_function
return _login_required
|
from __future__ import division
import numpy as np
from chainercv.datasets import CamVidDataset
n_class = 11
dataset = CamVidDataset(split='train')
n_cls_pixels = np.zeros((n_class,))
n_img_pixels = np.zeros((n_class,))
for img, label in dataset:
for cls_i in np.unique(label):
if cls_i == -1:
continue
n_cls_pixels[cls_i] += np.sum(label == cls_i)
n_img_pixels[cls_i] += label.size
freq = n_cls_pixels / n_img_pixels
median_freq = np.median(freq)
np.save('class_weight', median_freq / freq)
|
import pytest
import homeassistant.components.automation as automation
from homeassistant.components.vacuum import (
DOMAIN,
STATE_CLEANING,
STATE_DOCKED,
STATE_RETURNING,
)
from homeassistant.helpers import device_registry
from homeassistant.setup import async_setup_component
from tests.common import (
MockConfigEntry,
assert_lists_same,
async_get_device_automations,
async_mock_service,
mock_device_registry,
mock_registry,
)
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
@pytest.fixture
def calls(hass):
"""Track calls to a mock service."""
return async_mock_service(hass, "test", "automation")
async def test_get_conditions(hass, device_reg, entity_reg):
"""Test we get the expected conditions from a vacuum."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
expected_conditions = [
{
"condition": "device",
"domain": DOMAIN,
"type": "is_cleaning",
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
},
{
"condition": "device",
"domain": DOMAIN,
"type": "is_docked",
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
},
]
conditions = await async_get_device_automations(hass, "condition", device_entry.id)
assert_lists_same(conditions, expected_conditions)
async def test_if_state(hass, calls):
"""Test for turn_on and turn_off conditions."""
hass.states.async_set("vacuum.entity", STATE_DOCKED)
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {"platform": "event", "event_type": "test_event1"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": "vacuum.entity",
"type": "is_cleaning",
}
],
"action": {
"service": "test.automation",
"data_template": {
"some": "is_cleaning - {{ trigger.platform }} - {{ trigger.event.event_type }}"
},
},
},
{
"trigger": {"platform": "event", "event_type": "test_event2"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": "vacuum.entity",
"type": "is_docked",
}
],
"action": {
"service": "test.automation",
"data_template": {
"some": "is_docked - {{ trigger.platform }} - {{ trigger.event.event_type }}"
},
},
},
]
},
)
hass.bus.async_fire("test_event1")
hass.bus.async_fire("test_event2")
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == "is_docked - event - test_event2"
hass.states.async_set("vacuum.entity", STATE_CLEANING)
hass.bus.async_fire("test_event1")
hass.bus.async_fire("test_event2")
await hass.async_block_till_done()
assert len(calls) == 2
assert calls[1].data["some"] == "is_cleaning - event - test_event1"
# Returning means it's still cleaning
hass.states.async_set("vacuum.entity", STATE_RETURNING)
hass.bus.async_fire("test_event1")
hass.bus.async_fire("test_event2")
await hass.async_block_till_done()
assert len(calls) == 3
assert calls[2].data["some"] == "is_cleaning - event - test_event1"
|
import sys
import cherrypy
from cherrypy.process import plugins, servers
from cherrypy import Application
def start(configfiles=None, daemonize=False, environment=None,
fastcgi=False, scgi=False, pidfile=None, imports=None,
cgi=False):
"""Subscribe all engine plugins and start the engine."""
sys.path = [''] + sys.path
for i in imports or []:
exec('import %s' % i)
for c in configfiles or []:
cherrypy.config.update(c)
# If there's only one app mounted, merge config into it.
if len(cherrypy.tree.apps) == 1:
for app in cherrypy.tree.apps.values():
if isinstance(app, Application):
app.merge(c)
engine = cherrypy.engine
if environment is not None:
cherrypy.config.update({'environment': environment})
# Only daemonize if asked to.
if daemonize:
# Don't print anything to stdout/sterr.
cherrypy.config.update({'log.screen': False})
plugins.Daemonizer(engine).subscribe()
if pidfile:
plugins.PIDFile(engine, pidfile).subscribe()
if hasattr(engine, 'signal_handler'):
engine.signal_handler.subscribe()
if hasattr(engine, 'console_control_handler'):
engine.console_control_handler.subscribe()
if (fastcgi and (scgi or cgi)) or (scgi and cgi):
cherrypy.log.error('You may only specify one of the cgi, fastcgi, and '
'scgi options.', 'ENGINE')
sys.exit(1)
elif fastcgi or scgi or cgi:
# Turn off autoreload when using *cgi.
cherrypy.config.update({'engine.autoreload.on': False})
# Turn off the default HTTP server (which is subscribed by default).
cherrypy.server.unsubscribe()
addr = cherrypy.server.bind_addr
cls = (
servers.FlupFCGIServer if fastcgi else
servers.FlupSCGIServer if scgi else
servers.FlupCGIServer
)
f = cls(application=cherrypy.tree, bindAddress=addr)
s = servers.ServerAdapter(engine, httpserver=f, bind_addr=addr)
s.subscribe()
# Always start the engine; this will start all other services
try:
engine.start()
except Exception:
# Assume the error has been logged already via bus.log.
sys.exit(1)
else:
engine.block()
def run():
"""Run cherryd CLI."""
from optparse import OptionParser
p = OptionParser()
p.add_option('-c', '--config', action='append', dest='config',
help='specify config file(s)')
p.add_option('-d', action='store_true', dest='daemonize',
help='run the server as a daemon')
p.add_option('-e', '--environment', dest='environment', default=None,
help='apply the given config environment')
p.add_option('-f', action='store_true', dest='fastcgi',
help='start a fastcgi server instead of the default HTTP '
'server')
p.add_option('-s', action='store_true', dest='scgi',
help='start a scgi server instead of the default HTTP server')
p.add_option('-x', action='store_true', dest='cgi',
help='start a cgi server instead of the default HTTP server')
p.add_option('-i', '--import', action='append', dest='imports',
help='specify modules to import')
p.add_option('-p', '--pidfile', dest='pidfile', default=None,
help='store the process id in the given file')
p.add_option('-P', '--Path', action='append', dest='Path',
help='add the given paths to sys.path')
options, args = p.parse_args()
if options.Path:
for p in options.Path:
sys.path.insert(0, p)
start(options.config, options.daemonize,
options.environment, options.fastcgi, options.scgi,
options.pidfile, options.imports, options.cgi)
|
import logging
import os
from batinfo import Batteries
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_NAME, CONF_NAME, DEVICE_CLASS_BATTERY, PERCENTAGE
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
ATTR_PATH = "path"
ATTR_ALARM = "alarm"
ATTR_CAPACITY = "capacity"
ATTR_CAPACITY_LEVEL = "capacity_level"
ATTR_CYCLE_COUNT = "cycle_count"
ATTR_ENERGY_FULL = "energy_full"
ATTR_ENERGY_FULL_DESIGN = "energy_full_design"
ATTR_ENERGY_NOW = "energy_now"
ATTR_MANUFACTURER = "manufacturer"
ATTR_MODEL_NAME = "model_name"
ATTR_POWER_NOW = "power_now"
ATTR_SERIAL_NUMBER = "serial_number"
ATTR_STATUS = "status"
ATTR_VOLTAGE_MIN_DESIGN = "voltage_min_design"
ATTR_VOLTAGE_NOW = "voltage_now"
ATTR_HEALTH = "health"
ATTR_STATUS = "status"
CONF_BATTERY = "battery"
CONF_SYSTEM = "system"
DEFAULT_BATTERY = 1
DEFAULT_NAME = "Battery"
DEFAULT_PATH = "/sys/class/power_supply"
DEFAULT_SYSTEM = "linux"
SYSTEMS = ["android", "linux"]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_BATTERY, default=DEFAULT_BATTERY): cv.positive_int,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_SYSTEM, default=DEFAULT_SYSTEM): vol.In(SYSTEMS),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Linux Battery sensor."""
name = config.get(CONF_NAME)
battery_id = config.get(CONF_BATTERY)
system = config.get(CONF_SYSTEM)
try:
if system == "android":
os.listdir(os.path.join(DEFAULT_PATH, "battery"))
else:
os.listdir(os.path.join(DEFAULT_PATH, f"BAT{battery_id}"))
except FileNotFoundError:
_LOGGER.error("No battery found")
return False
add_entities([LinuxBatterySensor(name, battery_id, system)], True)
class LinuxBatterySensor(Entity):
"""Representation of a Linux Battery sensor."""
def __init__(self, name, battery_id, system):
"""Initialize the battery sensor."""
self._battery = Batteries()
self._name = name
self._battery_stat = None
self._battery_id = battery_id - 1
self._system = system
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def device_class(self):
"""Return the device class of the sensor."""
return DEVICE_CLASS_BATTERY
@property
def state(self):
"""Return the state of the sensor."""
return self._battery_stat.capacity
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return PERCENTAGE
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
if self._system == "android":
return {
ATTR_NAME: self._battery_stat.name,
ATTR_PATH: self._battery_stat.path,
ATTR_HEALTH: self._battery_stat.health,
ATTR_STATUS: self._battery_stat.status,
}
return {
ATTR_NAME: self._battery_stat.name,
ATTR_PATH: self._battery_stat.path,
ATTR_ALARM: self._battery_stat.alarm,
ATTR_CAPACITY_LEVEL: self._battery_stat.capacity_level,
ATTR_CYCLE_COUNT: self._battery_stat.cycle_count,
ATTR_ENERGY_FULL: self._battery_stat.energy_full,
ATTR_ENERGY_FULL_DESIGN: self._battery_stat.energy_full_design,
ATTR_ENERGY_NOW: self._battery_stat.energy_now,
ATTR_MANUFACTURER: self._battery_stat.manufacturer,
ATTR_MODEL_NAME: self._battery_stat.model_name,
ATTR_POWER_NOW: self._battery_stat.power_now,
ATTR_SERIAL_NUMBER: self._battery_stat.serial_number,
ATTR_STATUS: self._battery_stat.status,
ATTR_VOLTAGE_MIN_DESIGN: self._battery_stat.voltage_min_design,
ATTR_VOLTAGE_NOW: self._battery_stat.voltage_now,
}
def update(self):
"""Get the latest data and updates the states."""
self._battery.update()
self._battery_stat = self._battery.stat[self._battery_id]
|
import pytest
@pytest.mark.parametrize('txt, expected', [
# blank to be equal blank
(b'', b''),
# one line with spaces
(b' ', b' '),
# two lines empty
(b'\n', b''),
(b'\n ', b' '),
(b' \n', b' '),
(b' \n ', b' \n '),
# tree lines empty
(b'\n\n', b''),
(b'\n\n ', b' '),
(b'\n \n', b' '),
(b'\n \n ', b' \n '),
(b' \n \n ', b' \n \n '),
# one line with space and content
(b' content', b' content'),
# empty line between content
(b'content\n\ncontent', b'content\ncontent'),
# multiple leading and trailing newlines
(b'\n\n\ncontent\n\n\ncontent\n\n\n', b'content\ncontent'),
])
def test_remove_blank_lines(txt, expected):
from meld.dirdiff import remove_blank_lines
result = remove_blank_lines(txt)
assert result == expected
|
from decimal import Decimal, InvalidOperation
from django.conf import settings
from django.utils.formats import get_format
from django.utils.translation import get_language
from cms.utils.helpers import classproperty
from shop.conf import app_settings
from shop.money.iso4217 import CURRENCIES
class AbstractMoney(Decimal):
MONEY_FORMAT = app_settings.MONEY_FORMAT
def __new__(cls, value):
raise TypeError("Can not instantiate {} as AbstractMoney.".format(value))
def _get_format_values(self):
return {
'code': self._currency_code,
'symbol': self._currency[2],
'currency': self._currency[3],
'minus': '',
}
def __str__(self):
"""
Renders the price localized and formatted in its current currency.
"""
vals = self._get_format_values()
if self.is_nan():
return self.MONEY_FORMAT.format(amount='–', **vals)
try:
vals.update(amount=Decimal.__str__(Decimal.quantize(self, self._cents)))
except InvalidOperation:
raise ValueError("Can not represent {} as Money type.".format(self.__repr__()))
return '{:f}'.format(self)
def __repr__(self):
value = Decimal.__str__(self)
return "{}('{}')".format(self.__class__.__name__, value)
def __reduce__(self):
"""Required for pickling MoneyInCUR type"""
return _make_money, (self._currency_code, Decimal.__str__(self))
def __format__(self, specifier, context=None, _localeconv=None):
vals = self._get_format_values()
if self.is_nan():
amount = '–' # mdash
elif specifier in ('', 'f',):
amount = Decimal.quantize(self, self._cents).__format__(specifier)
else:
amount = Decimal.__format__(self, specifier)
if settings.USE_L10N:
lang, use_l10n = get_language(), True
else:
lang, use_l10n = None, False
# handle grouping
use_grouping = settings.USE_L10N and settings.USE_THOUSAND_SEPARATOR
decimal_sep = get_format('DECIMAL_SEPARATOR', lang, use_l10n=use_l10n)
grouping = get_format('NUMBER_GROUPING', lang, use_l10n=use_l10n)
thousand_sep = get_format('THOUSAND_SEPARATOR', lang, use_l10n=use_l10n)
# minus sign for negative amounts
if amount[0] == '-':
vals.update(minus='-')
amount = amount[1:]
# decimal part
if '.' in amount:
int_part, dec_part = amount.split('.')
else:
int_part, dec_part = amount, ''
if dec_part:
dec_part = decimal_sep + dec_part
# grouping
if use_grouping and grouping > 0:
int_part_gd = ''
for cnt, digit in enumerate(int_part[::-1]):
if cnt and not cnt % grouping:
int_part_gd += thousand_sep[::-1]
int_part_gd += digit
int_part = int_part_gd[::-1]
# recombine parts
vals.update(amount=int_part + dec_part)
return self.MONEY_FORMAT.format(**vals)
def __add__(self, other, context=None):
other = self._assert_addable(other)
amount = Decimal.__add__(self, other) if not self.is_nan() else other
return self.__class__(amount)
def __radd__(self, other, context=None):
return self.__add__(other, context)
def __sub__(self, other, context=None):
other = self._assert_addable(other)
# self - other is computed as self + other.copy_negate()
amount = Decimal.__add__(self, other.copy_negate())
return self.__class__(amount)
def __rsub__(self, other, context=None):
raise ValueError("Can not substract money from something else.")
def __neg__(self, context=None):
amount = Decimal.__neg__(self)
return self.__class__(amount)
def __mul__(self, other, context=None):
if other is None:
return self.__class__('NaN')
other = self._assert_multipliable(other)
amount = Decimal.__mul__(self, other)
return self.__class__(amount)
def __rmul__(self, other, context=None):
return self.__mul__(other, context)
def __div__(self, other, context=None):
other = self._assert_dividable(other)
amount = Decimal.__div__(self, other)
return self.__class__(amount)
def __rdiv__(self, other, context=None):
raise ValueError("Can not divide through a currency.")
def __truediv__(self, other, context=None):
other = self._assert_dividable(other)
amount = Decimal.__truediv__(self, other)
return self.__class__(amount)
def __rtruediv__(self, other, context=None):
raise ValueError("Can not divide through a currency.")
def __pow__(self, other, context=None):
raise ValueError("Can not raise currencies to their power.")
def __float__(self):
"""Float representation."""
if self.is_nan():
if self.is_snan():
raise ValueError("Cannot convert signaling NaN to float")
s = '-nan' if self.is_signed() else 'nan'
else:
s = Decimal.__str__(self)
return float(s)
def __eq__(self, other, context=None):
other = self._assert_addable(other)
return Decimal.__eq__(self.as_decimal(), other.as_decimal())
def __lt__(self, other, context=None):
other = self._assert_addable(other)
if self.is_nan():
return Decimal().__lt__(other)
return Decimal.__lt__(self.as_decimal(), other.as_decimal())
def __le__(self, other, context=None):
other = self._assert_addable(other)
if self.is_nan():
return Decimal().__le__(other)
return Decimal.__le__(self.as_decimal(), other.as_decimal())
def __gt__(self, other, context=None):
other = self._assert_addable(other)
if self.is_nan():
return Decimal().__gt__(other)
return Decimal.__gt__(self.as_decimal(), other.as_decimal())
def __ge__(self, other, context=None):
other = self._assert_addable(other)
if self.is_nan():
return Decimal().__ge__(other)
return Decimal.__ge__(self.as_decimal(), other.as_decimal())
def __deepcopy__(self, memo):
return self.__class__(self._cents)
def __bool__(self):
return Decimal.__bool__(self) and not self.is_nan()
@classproperty
def currency(cls):
"""
Return the currency in ISO-4217
"""
return cls._currency_code
def as_decimal(self):
"""
Return the amount as decimal quantized to its subunits.
This representation often is used by payment service providers.
"""
if self.is_nan():
return Decimal()
return Decimal.quantize(self, self._cents)
def as_integer(self):
"""
Return the amount multiplied by its subunits to be handled as integer.
This representation often is used by payment service providers.
"""
return int(self.as_decimal() * self.subunits)
@classproperty
def subunits(cls):
"""
Return the subunits for the given currency.
"""
return 10**CURRENCIES[cls._currency_code][1]
def _assert_addable(self, other):
if not other:
# so that we can add/substract zero or None to any currency
return self.__class__('0')
if self._currency_code != getattr(other, '_currency_code', None):
raise ValueError("Can not add/substract money in different currencies.")
return other
def _assert_multipliable(self, other):
if hasattr(other, '_currency_code'):
raise ValueError("Can not multiply currencies.")
if isinstance(other, float):
return Decimal(other)
return other
def _assert_dividable(self, other):
if hasattr(other, '_currency_code'):
raise ValueError("Can not divide through a currency.")
if isinstance(other, float):
return Decimal(other)
return other
class MoneyMaker(type):
"""
Factory for building Decimal types, which keep track of the used currency. This is to avoid
unintentional price allocations, when combined with decimals or when working in different
currencies.
No automatic conversion of currencies has been implemented. This could however be achieved
quite easily in a separate shop plugin.
"""
def __new__(cls, currency_code=None):
def new_money(cls, value='NaN', context=None):
"""
Build a class named MoneyIn<currency_code> inheriting from Decimal.
"""
if isinstance(value, cls):
assert cls._currency_code == value._currency_code, "Money type currency mismatch"
if value is None:
value = 'NaN'
try:
self = Decimal.__new__(cls, value, context)
except Exception as err:
raise ValueError(err)
return self
if currency_code is None:
currency_code = app_settings.DEFAULT_CURRENCY
else:
currency_code = currency_code.upper()
if currency_code not in CURRENCIES:
raise TypeError("'{}' is an unknown currency code. Please check shop/money/iso4217.py".format(currency_code))
name = str('MoneyIn' + currency_code)
bases = (AbstractMoney,)
try:
cents = Decimal('.' + CURRENCIES[currency_code][1] * '0')
except InvalidOperation:
# Currencies with no decimal places, ex. JPY, HUF
cents = Decimal()
attrs = {'_currency_code': currency_code, '_currency': CURRENCIES[currency_code],
'_cents': cents, '__new__': new_money}
new_class = type(name, bases, attrs)
return new_class
def _make_money(currency_code, value):
"""
Function which curries currency and value
"""
return MoneyMaker(currency_code)(value)
|
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_MONITORED_CONDITIONS, CONF_NAME, DATA_GIGABYTES
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from . import (
ATTR_CURRENT_BANDWIDTH_USED,
ATTR_PENDING_CHARGES,
CONF_SUBSCRIPTION,
DATA_VULTR,
)
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "Vultr {} {}"
MONITORED_CONDITIONS = {
ATTR_CURRENT_BANDWIDTH_USED: [
"Current Bandwidth Used",
DATA_GIGABYTES,
"mdi:chart-histogram",
],
ATTR_PENDING_CHARGES: ["Pending Charges", "US$", "mdi:currency-usd"],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_SUBSCRIPTION): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(
CONF_MONITORED_CONDITIONS, default=list(MONITORED_CONDITIONS)
): vol.All(cv.ensure_list, [vol.In(MONITORED_CONDITIONS)]),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Vultr subscription (server) sensor."""
vultr = hass.data[DATA_VULTR]
subscription = config.get(CONF_SUBSCRIPTION)
name = config.get(CONF_NAME)
monitored_conditions = config.get(CONF_MONITORED_CONDITIONS)
if subscription not in vultr.data:
_LOGGER.error("Subscription %s not found", subscription)
return
sensors = []
for condition in monitored_conditions:
sensors.append(VultrSensor(vultr, subscription, condition, name))
add_entities(sensors, True)
class VultrSensor(Entity):
"""Representation of a Vultr subscription sensor."""
def __init__(self, vultr, subscription, condition, name):
"""Initialize a new Vultr sensor."""
self._vultr = vultr
self._condition = condition
self._name = name
self.subscription = subscription
self.data = None
condition_info = MONITORED_CONDITIONS[condition]
self._condition_name = condition_info[0]
self._units = condition_info[1]
self._icon = condition_info[2]
@property
def name(self):
"""Return the name of the sensor."""
try:
return self._name.format(self._condition_name)
except IndexError:
try:
return self._name.format(self.data["label"], self._condition_name)
except (KeyError, TypeError):
return self._name
@property
def icon(self):
"""Return the icon used in the frontend if any."""
return self._icon
@property
def unit_of_measurement(self):
"""Return the unit of measurement to present the value in."""
return self._units
@property
def state(self):
"""Return the value of this given sensor type."""
try:
return round(float(self.data.get(self._condition)), 2)
except (TypeError, ValueError):
return self.data.get(self._condition)
def update(self):
"""Update state of sensor."""
self._vultr.update()
self.data = self._vultr.data[self.subscription]
|
from homeassistant.components.rflink import (
CONF_RECONNECT_INTERVAL,
DATA_ENTITY_LOOKUP,
EVENT_KEY_COMMAND,
EVENT_KEY_SENSOR,
TMP_ENTITY,
)
from homeassistant.const import (
ATTR_UNIT_OF_MEASUREMENT,
PERCENTAGE,
STATE_UNKNOWN,
TEMP_CELSIUS,
)
from tests.components.rflink.test_init import mock_rflink
DOMAIN = "sensor"
CONFIG = {
"rflink": {
"port": "/dev/ttyABC0",
"ignore_devices": ["ignore_wildcard_*", "ignore_sensor"],
},
DOMAIN: {
"platform": "rflink",
"devices": {"test": {"name": "test", "sensor_type": "temperature"}},
},
}
async def test_default_setup(hass, monkeypatch):
"""Test all basic functionality of the rflink sensor component."""
# setup mocking rflink module
event_callback, create, _, _ = await mock_rflink(hass, CONFIG, DOMAIN, monkeypatch)
# make sure arguments are passed
assert create.call_args_list[0][1]["ignore"]
# test default state of sensor loaded from config
config_sensor = hass.states.get("sensor.test")
assert config_sensor
assert config_sensor.state == "unknown"
assert config_sensor.attributes[ATTR_UNIT_OF_MEASUREMENT] == TEMP_CELSIUS
# test event for config sensor
event_callback(
{"id": "test", "sensor": "temperature", "value": 1, "unit": TEMP_CELSIUS}
)
await hass.async_block_till_done()
assert hass.states.get("sensor.test").state == "1"
# test event for new unconfigured sensor
event_callback(
{"id": "test2", "sensor": "temperature", "value": 0, "unit": TEMP_CELSIUS}
)
await hass.async_block_till_done()
# test state of new sensor
new_sensor = hass.states.get("sensor.test2")
assert new_sensor
assert new_sensor.state == "0"
assert new_sensor.attributes[ATTR_UNIT_OF_MEASUREMENT] == TEMP_CELSIUS
assert new_sensor.attributes["icon"] == "mdi:thermometer"
async def test_disable_automatic_add(hass, monkeypatch):
"""If disabled new devices should not be automatically added."""
config = {
"rflink": {"port": "/dev/ttyABC0"},
DOMAIN: {"platform": "rflink", "automatic_add": False},
}
# setup mocking rflink module
event_callback, _, _, _ = await mock_rflink(hass, config, DOMAIN, monkeypatch)
# test event for new unconfigured sensor
event_callback(
{"id": "test2", "sensor": "temperature", "value": 0, "unit": TEMP_CELSIUS}
)
await hass.async_block_till_done()
# make sure new device is not added
assert not hass.states.get("sensor.test2")
async def test_entity_availability(hass, monkeypatch):
"""If Rflink device is disconnected, entities should become unavailable."""
# Make sure Rflink mock does not 'recover' to quickly from the
# disconnect or else the unavailability cannot be measured
config = CONFIG
failures = [True, True]
config[CONF_RECONNECT_INTERVAL] = 60
# Create platform and entities
_, _, _, disconnect_callback = await mock_rflink(
hass, config, DOMAIN, monkeypatch, failures=failures
)
# Entities are available by default
assert hass.states.get("sensor.test").state == STATE_UNKNOWN
# Mock a disconnect of the Rflink device
disconnect_callback()
# Wait for dispatch events to propagate
await hass.async_block_till_done()
# Entity should be unavailable
assert hass.states.get("sensor.test").state == "unavailable"
# Reconnect the Rflink device
disconnect_callback()
# Wait for dispatch events to propagate
await hass.async_block_till_done()
# Entities should be available again
assert hass.states.get("sensor.test").state == STATE_UNKNOWN
async def test_aliases(hass, monkeypatch):
"""Validate the response to sensor's alias (with aliases)."""
config = {
"rflink": {"port": "/dev/ttyABC0"},
DOMAIN: {
"platform": "rflink",
"devices": {
"test_02": {
"name": "test_02",
"sensor_type": "humidity",
"aliases": ["test_alias_02_0"],
}
},
},
}
# setup mocking rflink module
event_callback, _, _, _ = await mock_rflink(hass, config, DOMAIN, monkeypatch)
# test default state of sensor loaded from config
config_sensor = hass.states.get("sensor.test_02")
assert config_sensor
assert config_sensor.state == "unknown"
# test event for config sensor
event_callback(
{
"id": "test_alias_02_0",
"sensor": "humidity",
"value": 65,
"unit": PERCENTAGE,
}
)
await hass.async_block_till_done()
# test state of new sensor
updated_sensor = hass.states.get("sensor.test_02")
assert updated_sensor
assert updated_sensor.state == "65"
assert updated_sensor.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE
async def test_race_condition(hass, monkeypatch):
"""Test race condition for unknown components."""
config = {"rflink": {"port": "/dev/ttyABC0"}, DOMAIN: {"platform": "rflink"}}
tmp_entity = TMP_ENTITY.format("test3")
# setup mocking rflink module
event_callback, _, _, _ = await mock_rflink(hass, config, DOMAIN, monkeypatch)
# test event for new unconfigured sensor
event_callback({"id": "test3", "sensor": "battery", "value": "ok", "unit": ""})
event_callback({"id": "test3", "sensor": "battery", "value": "ko", "unit": ""})
# tmp_entity added to EVENT_KEY_SENSOR
assert tmp_entity in hass.data[DATA_ENTITY_LOOKUP][EVENT_KEY_SENSOR]["test3"]
# tmp_entity must no be added to EVENT_KEY_COMMAND
assert tmp_entity not in hass.data[DATA_ENTITY_LOOKUP][EVENT_KEY_COMMAND]["test3"]
await hass.async_block_till_done()
# test state of new sensor
updated_sensor = hass.states.get("sensor.test3")
assert updated_sensor
# test state of new sensor
new_sensor = hass.states.get(f"{DOMAIN}.test3")
assert new_sensor
assert new_sensor.state == "ok"
event_callback({"id": "test3", "sensor": "battery", "value": "ko", "unit": ""})
await hass.async_block_till_done()
# tmp_entity must be deleted from EVENT_KEY_COMMAND
assert tmp_entity not in hass.data[DATA_ENTITY_LOOKUP][EVENT_KEY_SENSOR]["test3"]
# test state of new sensor
new_sensor = hass.states.get(f"{DOMAIN}.test3")
assert new_sensor
assert new_sensor.state == "ko"
|
import os
import unittest
import mock
from perfkitbenchmarker.linux_benchmarks import lmbench_benchmark
class LmbenchTestCase(unittest.TestCase):
def setUp(self):
p = mock.patch(lmbench_benchmark.__name__)
p.start()
self.addCleanup(p.stop)
path = os.path.join(
os.path.dirname(__file__), '../data', 'lmbench_output.txt')
with open(path) as fp:
self.contents = fp.read()
def testParseLmbench(self):
samples = lmbench_benchmark._ParseOutput(self.contents)
self.assertEqual(61, len(samples))
# Test metadata
metadata = samples[0].metadata
self.assertEqual('8', metadata['MB'])
self.assertEqual('NO', metadata['BENCHMARK_HARDWARE'])
self.assertEqual('YES', metadata['BENCHMARK_OS'])
# Test metric and value
processor_results = {i.metric: i.value for i in samples}
self.assertAlmostEqual(0.2345, processor_results['syscall'])
self.assertAlmostEqual(0.3515, processor_results['read'])
self.assertAlmostEqual(0.3082, processor_results['write'])
self.assertAlmostEqual(0.6888, processor_results['stat'])
self.assertAlmostEqual(0.3669, processor_results['fstat'])
self.assertAlmostEqual(1.5541, processor_results['open/close'])
self.assertAlmostEqual(0.3226,
processor_results['Signal handler installation'])
self.assertAlmostEqual(1.1736, processor_results['Signal handler overhead'])
self.assertAlmostEqual(0.7491, processor_results['Protection fault'])
self.assertAlmostEqual(25.5437, processor_results['Pipe latency'])
self.assertAlmostEqual(121.7399, processor_results['Process fork+exit'])
self.assertAlmostEqual(318.6445, processor_results['Process fork+execve'])
self.assertAlmostEqual(800.2188,
processor_results['Process fork+/bin/sh -c'])
sample = next(x for x in samples if x.metric == 'context_switching_time' and
x.metadata['num_of_processes'] == 96 and
x.metadata['memory_size'] == '64k')
self.assertAlmostEqual(15.45, sample.value)
sample = next(x for x in samples if x.metric == 'context_switching_time' and
x.metadata['num_of_processes'] == 4 and
x.metadata['memory_size'] == '32k')
self.assertAlmostEqual(13.96, sample.value)
sample = next(x for x in samples if x.metric == 'context_switching_time' and
x.metadata['num_of_processes'] == 2 and
x.metadata['memory_size'] == '16k')
self.assertAlmostEqual(14.21, sample.value)
sample = next(x for x in samples if x.metric == 'context_switching_time' and
x.metadata['num_of_processes'] == 16 and
x.metadata['memory_size'] == '8k')
self.assertAlmostEqual(13.02, sample.value)
sample = next(x for x in samples if x.metric == 'context_switching_time' and
x.metadata['num_of_processes'] == 8 and
x.metadata['memory_size'] == '4k')
self.assertAlmostEqual(12.40, sample.value)
sample = next(x for x in samples if x.metric == 'context_switching_time' and
x.metadata['num_of_processes'] == 32 and
x.metadata['memory_size'] == '0k')
self.assertAlmostEqual(12.63, sample.value)
if __name__ == '__main__':
unittest.main()
|
from __future__ import division, print_function
REDRAW_SCREEN = 'redraw screen'
CURSOR_UP = 'cursor up'
CURSOR_DOWN = 'cursor down'
CURSOR_LEFT = 'cursor left'
CURSOR_RIGHT = 'cursor right'
CURSOR_PAGE_UP = 'cursor page up'
CURSOR_PAGE_DOWN = 'cursor page down'
CURSOR_MAX_LEFT = 'cursor max left'
CURSOR_MAX_RIGHT = 'cursor max right'
ACTIVATE = 'activate'
class CommandMap(object):
"""
dict-like object for looking up commands from keystrokes
Default values (key: command)::
'tab': 'next selectable',
'ctrl n': 'next selectable',
'shift tab': 'prev selectable',
'ctrl p': 'prev selectable',
'ctrl l': 'redraw screen',
'esc': 'menu',
'up': 'cursor up',
'down': 'cursor down',
'left': 'cursor left',
'right': 'cursor right',
'page up': 'cursor page up',
'page down': 'cursor page down',
'home': 'cursor max left',
'end': 'cursor max right',
' ': 'activate',
'enter': 'activate',
"""
_command_defaults = {
'tab': 'next selectable',
'ctrl n': 'next selectable',
'shift tab': 'prev selectable',
'ctrl p': 'prev selectable',
'ctrl l': REDRAW_SCREEN,
'esc': 'menu',
'up': CURSOR_UP,
'down': CURSOR_DOWN,
'left': CURSOR_LEFT,
'right': CURSOR_RIGHT,
'page up': CURSOR_PAGE_UP,
'page down': CURSOR_PAGE_DOWN,
'home': CURSOR_MAX_LEFT,
'end': CURSOR_MAX_RIGHT,
' ': ACTIVATE,
'enter': ACTIVATE,
}
def __init__(self):
self.restore_defaults()
def restore_defaults(self):
self._command = dict(self._command_defaults)
def __getitem__(self, key):
return self._command.get(key, None)
def __setitem__(self, key, command):
self._command[key] = command
def __delitem__(self, key):
del self._command[key]
def clear_command(self, command):
dk = [k for k, v in self._command.items() if v == command]
for k in dk:
del self._command[k]
def copy(self):
"""
Return a new copy of this CommandMap, likely so we can modify
it separate from a shared one.
"""
c = CommandMap()
c._command = dict(self._command)
return c
command_map = CommandMap() # shared command mappings
|
from simplipy.errors import (
InvalidCredentialsError,
PendingAuthorizationError,
SimplipyError,
)
from homeassistant import data_entry_flow
from homeassistant.components.simplisafe import DOMAIN
from homeassistant.config_entries import SOURCE_USER
from homeassistant.const import CONF_CODE, CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME
from tests.async_mock import AsyncMock, MagicMock, PropertyMock, patch
from tests.common import MockConfigEntry
def mock_api():
"""Mock SimpliSafe API class."""
api = MagicMock()
type(api).refresh_token = PropertyMock(return_value="12345abc")
return api
async def test_duplicate_error(hass):
"""Test that errors are shown when duplicates are added."""
conf = {
CONF_USERNAME: "[email protected]",
CONF_PASSWORD: "password",
CONF_CODE: "1234",
}
MockConfigEntry(
domain=DOMAIN,
unique_id="[email protected]",
data={CONF_USERNAME: "[email protected]", CONF_TOKEN: "12345", CONF_CODE: "1234"},
).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=conf
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_invalid_credentials(hass):
"""Test that invalid credentials throws an error."""
conf = {CONF_USERNAME: "[email protected]", CONF_PASSWORD: "password"}
with patch(
"simplipy.API.login_via_credentials",
new=AsyncMock(side_effect=InvalidCredentialsError),
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=conf
)
assert result["errors"] == {"base": "invalid_auth"}
async def test_options_flow(hass):
"""Test config flow options."""
conf = {CONF_USERNAME: "[email protected]", CONF_PASSWORD: "password"}
config_entry = MockConfigEntry(
domain=DOMAIN,
unique_id="abcde12345",
data=conf,
options={CONF_CODE: "1234"},
)
config_entry.add_to_hass(hass)
with patch(
"homeassistant.components.simplisafe.async_setup_entry", return_value=True
):
result = await hass.config_entries.options.async_init(config_entry.entry_id)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "init"
result = await hass.config_entries.options.async_configure(
result["flow_id"], user_input={CONF_CODE: "4321"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert config_entry.options == {CONF_CODE: "4321"}
async def test_show_form(hass):
"""Test that the form is served with no input."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
async def test_step_reauth(hass):
"""Test that the reauth step works."""
MockConfigEntry(
domain=DOMAIN,
unique_id="[email protected]",
data={CONF_USERNAME: "[email protected]", CONF_TOKEN: "12345", CONF_CODE: "1234"},
).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": "reauth"},
data={CONF_CODE: "1234", CONF_USERNAME: "[email protected]"},
)
assert result["step_id"] == "reauth_confirm"
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "reauth_confirm"
with patch(
"homeassistant.components.simplisafe.async_setup_entry", return_value=True
), patch(
"simplipy.API.login_via_credentials", new=AsyncMock(return_value=mock_api())
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={CONF_PASSWORD: "password"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "reauth_successful"
assert len(hass.config_entries.async_entries()) == 1
async def test_step_user(hass):
"""Test that the user step works (without MFA)."""
conf = {
CONF_USERNAME: "[email protected]",
CONF_PASSWORD: "password",
CONF_CODE: "1234",
}
with patch(
"homeassistant.components.simplisafe.async_setup_entry", return_value=True
), patch(
"simplipy.API.login_via_credentials", new=AsyncMock(return_value=mock_api())
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=conf
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "[email protected]"
assert result["data"] == {
CONF_USERNAME: "[email protected]",
CONF_TOKEN: "12345abc",
CONF_CODE: "1234",
}
async def test_step_user_mfa(hass):
"""Test that the user step works when MFA is in the middle."""
conf = {
CONF_USERNAME: "[email protected]",
CONF_PASSWORD: "password",
CONF_CODE: "1234",
}
with patch(
"simplipy.API.login_via_credentials",
new=AsyncMock(side_effect=PendingAuthorizationError),
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=conf
)
assert result["step_id"] == "mfa"
with patch(
"simplipy.API.login_via_credentials",
new=AsyncMock(side_effect=PendingAuthorizationError),
):
# Simulate the user pressing the MFA submit button without having clicked
# the link in the MFA email:
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["step_id"] == "mfa"
with patch(
"homeassistant.components.simplisafe.async_setup_entry", return_value=True
), patch(
"simplipy.API.login_via_credentials", new=AsyncMock(return_value=mock_api())
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "[email protected]"
assert result["data"] == {
CONF_USERNAME: "[email protected]",
CONF_TOKEN: "12345abc",
CONF_CODE: "1234",
}
async def test_unknown_error(hass):
"""Test that an unknown error raises the correct error."""
conf = {CONF_USERNAME: "[email protected]", CONF_PASSWORD: "password"}
with patch(
"simplipy.API.login_via_credentials",
new=AsyncMock(side_effect=SimplipyError),
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=conf
)
assert result["errors"] == {"base": "unknown"}
|
import math
import numpy as np
import tensorflow as tf
from tensornetwork.backends.numpy import decompositions
class DecompositionsTest(tf.test.TestCase):
def test_expected_shapes(self):
val = np.zeros((2, 3, 4, 5))
u, s, vh, _ = decompositions.svd(np, val, 2)
self.assertEqual(u.shape, (2, 3, 6))
self.assertEqual(s.shape, (6,))
self.assertAllClose(s, np.zeros(6))
self.assertEqual(vh.shape, (6, 4, 5))
def test_expected_shapes_qr(self):
val = np.zeros((2, 3, 4, 5))
q, r = decompositions.qr(np, val, 2, False)
self.assertEqual(q.shape, (2, 3, 6))
self.assertEqual(r.shape, (6, 4, 5))
def test_expected_shapes_rq(self):
val = np.zeros((2, 3, 4, 5))
r, q = decompositions.rq(np, val, 2, False)
self.assertEqual(r.shape, (2, 3, 6))
self.assertEqual(q.shape, (6, 4, 5))
def test_rq(self):
random_matrix = np.random.rand(10, 10)
for non_negative_diagonal in [True, False]:
r, q = decompositions.rq(np, random_matrix, 1, non_negative_diagonal)
self.assertAllClose(r.dot(q), random_matrix)
def test_qr(self):
random_matrix = np.random.rand(10, 10)
for non_negative_diagonal in [True, False]:
q, r = decompositions.qr(np, random_matrix, 1, non_negative_diagonal)
self.assertAllClose(q.dot(r), random_matrix)
def test_max_singular_values(self):
random_matrix = np.random.rand(10, 10)
unitary1, _, unitary2 = np.linalg.svd(random_matrix)
singular_values = np.array(range(10))
val = unitary1.dot(np.diag(singular_values).dot(unitary2.T))
u, s, vh, trun = decompositions.svd(
np, val, 1, max_singular_values=7)
self.assertEqual(u.shape, (10, 7))
self.assertEqual(s.shape, (7,))
self.assertAllClose(s, np.arange(9, 2, -1))
self.assertEqual(vh.shape, (7, 10))
self.assertAllClose(trun, np.arange(2, -1, -1))
def test_max_singular_values_larger_than_bond_dimension(self):
random_matrix = np.random.rand(10, 6)
unitary1, _, unitary2 = np.linalg.svd(random_matrix, full_matrices=False)
singular_values = np.array(range(6))
val = unitary1.dot(np.diag(singular_values).dot(unitary2.T))
u, s, vh, _ = decompositions.svd(
np, val, 1, max_singular_values=30)
self.assertEqual(u.shape, (10, 6))
self.assertEqual(s.shape, (6,))
self.assertEqual(vh.shape, (6, 6))
def test_max_truncation_error(self):
random_matrix = np.random.rand(10, 10)
unitary1, _, unitary2 = np.linalg.svd(random_matrix)
singular_values = np.array(range(10))
val = unitary1.dot(np.diag(singular_values).dot(unitary2.T))
u, s, vh, trun = decompositions.svd(
np, val, 1, max_truncation_error=math.sqrt(5.1))
self.assertEqual(u.shape, (10, 7))
self.assertEqual(s.shape, (7,))
self.assertAllClose(s, np.arange(9, 2, -1))
self.assertEqual(vh.shape, (7, 10))
self.assertAllClose(trun, np.arange(2, -1, -1))
def test_max_truncation_error_relative(self):
absolute = np.diag([2.0, 1.0, 0.2, 0.1])
relative = np.diag([2.0, 1.0, 0.2, 0.1])
max_truncation_err = 0.2
_, _, _, trunc_sv_absolute = decompositions.svd(
np,
absolute,
1,
max_truncation_error=max_truncation_err,
relative=False)
_, _, _, trunc_sv_relative = decompositions.svd(
np, relative, 1, max_truncation_error=max_truncation_err, relative=True)
np.testing.assert_almost_equal(trunc_sv_absolute, [0.1])
np.testing.assert_almost_equal(trunc_sv_relative, [0.2, 0.1])
if __name__ == '__main__':
tf.test.main()
|
import gc
import argparse
import sys
_stash = globals()["_stash"]
def main():
parser = argparse.ArgumentParser(description="access to pythons built-in garbage collector")
parser.add_argument(
"command",
help="what to do",
choices=[
"enable",
"disable",
"status",
"collect",
"threshold",
"debug",
"break",
],
action="store"
)
parser.add_argument("args", help="argument for command", action="store", nargs="*")
ns = parser.parse_args()
if ns.command == "enable":
gc.enable()
elif ns.command == "disable":
gc.disable()
elif ns.command == "collect":
gc.collect()
elif ns.command == "status":
if gc.isenabled():
gcs = _stash.text_color("Enabled", "green")
else:
gcs = _stash.text_color("Disabled", "red")
sys.stdout.write("GC status: {s}\n".format(s=gcs))
tracked = gc.get_objects()
n = len(tracked)
sys.stdout.write("Tracked objects: {n}\n".format(n=n))
size = sum([sys.getsizeof(e) for e in tracked])
del tracked # this list may be big, better delete it
size = _stash.libcore.sizeof_fmt(size)
sys.stdout.write("Size of tracked objects: {s} \n".format(s=size))
sys.stdout.write("Garbage: {n}\n".format(n=len(gc.garbage)))
gsize = sum([sys.getsizeof(e) for e in gc.garbage])
gsize = _stash.libcore.sizeof_fmt(gsize)
sys.stdout.write("Size of garbage: {s} \n".format(s=gsize))
sys.stdout.write("Debug: {d}\n".format(d=gc.get_debug()))
elif ns.command == "threshold":
if len(ns.args) == 0:
sys.stdout.write("Threshold:\n G1: {}\n G2: {}\n G3: {}\n".format(*gc.get_threshold()))
elif len(ns.args) > 3:
errmsg = _stash.text_color("Error: to many arguments for threshold!\n", "red")
sys.stdout.write(errmsg)
sys.exit(1)
else:
try:
ts = tuple([int(e) for e in ns.args])
except ValueError:
errmsg = _stash.text_color("Error: expected arguments to be integer!\n", "red")
sys.stdout.write(errmsg)
sys.exit(1)
gc.set_threshold(*ts)
elif ns.command == "debug":
if len(ns.args) == 0:
sys.stdout.write("Debug: {d}\n".format(d=gc.get_debug()))
elif len(ns.args) == 1:
try:
flag = int(ns.args[0])
except ValueError:
sys.stdout.write(_stash.text_color("Error: expected argument to be an integer!\n", "red"))
sys.exit(1)
gc.set_debug(flag)
else:
sys.stdout.write(_stash.text_color("Error: expected exactly one argument for debug!\n", "red"))
sys.exit(1)
elif ns.command == "break":
if len(gc.garbage) == 0:
sys.stdout.write(_stash.text_color("Error: No Garbage found!\n", "red"))
sys.exit(1)
else:
for k in dir(gc.garbage[0]):
try:
delattr(gc.garbage[0], k)
except:
pass
del gc.garbage[:]
if __name__ == "__main__":
main()
|
import unittest
import mock
from py.test import raises
from paasta_tools.metrics import metrics_lib
class TestNoMetrics(unittest.TestCase):
def setUp(self):
self.metrics = metrics_lib.NoMetrics("paasta.deployd")
def test_timer(self):
timer = self.metrics.create_timer("name", dimension="thing")
timer.start()
timer.stop()
def test_gauge(self):
gauge = self.metrics.create_gauge("name", dimension="thing")
gauge.set(1212)
def test_counter(self):
counter = self.metrics.create_counter("name", dimension="thing")
counter.count()
class TestMeteoriteMetrics(unittest.TestCase):
def setUp(self):
self.mock_meteorite = mock.Mock()
metrics_lib.yelp_meteorite = self.mock_meteorite
self.metrics = metrics_lib.MeteoriteMetrics("paasta.deployd")
def test_init(self):
metrics_lib.yelp_meteorite = None
with raises(ImportError):
metrics_lib.MeteoriteMetrics("paasta.deployd")
def test_init_no_error(self):
metrics_lib.MeteoriteMetrics("paasta.deployd")
def test_create_timer(self):
self.metrics.create_timer("name", dimension="thing")
self.mock_meteorite.create_timer.assert_called_with(
"paasta.deployd.name", {"dimension": "thing"}
)
def test_create_gauge(self):
self.metrics.create_gauge("name", dimension="thing")
self.mock_meteorite.create_gauge.assert_called_with(
"paasta.deployd.name", {"dimension": "thing"}
)
def test_create_counter(self):
self.metrics.create_counter("name", dimension="thing")
self.mock_meteorite.create_counter.assert_called_with(
"paasta.deployd.name", {"dimension": "thing"}
)
def tearDown(self):
metrics_lib.yelp_meteorite = None
|
import os
import os.path as op
import numpy as np
from numpy.polynomial import legendre
from ..fixes import einsum
from ..parallel import parallel_func
from ..utils import logger, verbose, _get_extra_data_path, fill_doc
##############################################################################
# FAST LEGENDRE (DERIVATIVE) POLYNOMIALS USING LOOKUP TABLE
def _next_legen_der(n, x, p0, p01, p0d, p0dd):
"""Compute the next Legendre polynomial and its derivatives."""
# only good for n > 1 !
old_p0 = p0
old_p0d = p0d
p0 = ((2 * n - 1) * x * old_p0 - (n - 1) * p01) / n
p0d = n * old_p0 + x * old_p0d
p0dd = (n + 1) * old_p0d + x * p0dd
return p0, p0d, p0dd
def _get_legen(x, n_coeff=100):
"""Get Legendre polynomials expanded about x."""
return legendre.legvander(x, n_coeff - 1)
def _get_legen_der(xx, n_coeff=100):
"""Get Legendre polynomial derivatives expanded about x."""
coeffs = np.empty((len(xx), n_coeff, 3))
for c, x in zip(coeffs, xx):
p0s, p0ds, p0dds = c[:, 0], c[:, 1], c[:, 2]
p0s[:2] = [1.0, x]
p0ds[:2] = [0.0, 1.0]
p0dds[:2] = [0.0, 0.0]
for n in range(2, n_coeff):
p0s[n], p0ds[n], p0dds[n] = _next_legen_der(
n, x, p0s[n - 1], p0s[n - 2], p0ds[n - 1], p0dds[n - 1])
return coeffs
@verbose
def _get_legen_table(ch_type, volume_integral=False, n_coeff=100,
n_interp=20000, force_calc=False, verbose=None):
"""Return a (generated) LUT of Legendre (derivative) polynomial coeffs."""
if n_interp % 2 != 0:
raise RuntimeError('n_interp must be even')
fname = op.join(_get_extra_data_path(), 'tables')
if not op.isdir(fname):
# Updated due to API change (GH 1167)
os.makedirs(fname)
if ch_type == 'meg':
fname = op.join(fname, 'legder_%s_%s.bin' % (n_coeff, n_interp))
leg_fun = _get_legen_der
extra_str = ' derivative'
lut_shape = (n_interp + 1, n_coeff, 3)
else: # 'eeg'
fname = op.join(fname, 'legval_%s_%s.bin' % (n_coeff, n_interp))
leg_fun = _get_legen
extra_str = ''
lut_shape = (n_interp + 1, n_coeff)
if not op.isfile(fname) or force_calc:
logger.info('Generating Legendre%s table...' % extra_str)
x_interp = np.linspace(-1, 1, n_interp + 1)
lut = leg_fun(x_interp, n_coeff).astype(np.float32)
if not force_calc:
with open(fname, 'wb') as fid:
fid.write(lut.tobytes())
else:
logger.info('Reading Legendre%s table...' % extra_str)
with open(fname, 'rb', buffering=0) as fid:
lut = np.fromfile(fid, np.float32)
lut.shape = lut_shape
# we need this for the integration step
n_fact = np.arange(1, n_coeff, dtype=float)
if ch_type == 'meg':
n_facts = list() # multn, then mult, then multn * (n + 1)
if volume_integral:
n_facts.append(n_fact / ((2.0 * n_fact + 1.0) *
(2.0 * n_fact + 3.0)))
else:
n_facts.append(n_fact / (2.0 * n_fact + 1.0))
n_facts.append(n_facts[0] / (n_fact + 1.0))
n_facts.append(n_facts[0] * (n_fact + 1.0))
# skip the first set of coefficients because they are not used
lut = lut[:, 1:, [0, 1, 1, 2]] # for multiplicative convenience later
# reshape this for convenience, too
n_facts = np.array(n_facts)[[2, 0, 1, 1], :].T
n_facts = np.ascontiguousarray(n_facts)
n_fact = n_facts
else: # 'eeg'
n_fact = (2.0 * n_fact + 1.0) * (2.0 * n_fact + 1.0) / n_fact
# skip the first set of coefficients because they are not used
lut = lut[:, 1:].copy()
return lut, n_fact
def _comp_sum_eeg(beta, ctheta, lut_fun, n_fact):
"""Lead field dot products using Legendre polynomial (P_n) series."""
# Compute the sum occurring in the evaluation.
# The result is
# sums[:] (2n+1)^2/n beta^n P_n
n_chunk = 50000000 // (8 * max(n_fact.shape) * 2)
lims = np.concatenate([np.arange(0, beta.size, n_chunk), [beta.size]])
s0 = np.empty(beta.shape)
for start, stop in zip(lims[:-1], lims[1:]):
coeffs = lut_fun(ctheta[start:stop])
betans = np.tile(beta[start:stop][:, np.newaxis], (1, n_fact.shape[0]))
np.cumprod(betans, axis=1, out=betans) # run inplace
coeffs *= betans
s0[start:stop] = np.dot(coeffs, n_fact) # == weighted sum across cols
return s0
def _comp_sums_meg(beta, ctheta, lut_fun, n_fact, volume_integral):
"""Lead field dot products using Legendre polynomial (P_n) series.
Parameters
----------
beta : array, shape (n_points * n_points, 1)
Coefficients of the integration.
ctheta : array, shape (n_points * n_points, 1)
Cosine of the angle between the sensor integration points.
lut_fun : callable
Look-up table for evaluating Legendre polynomials.
n_fact : array
Coefficients in the integration sum.
volume_integral : bool
If True, compute volume integral.
Returns
-------
sums : array, shape (4, n_points * n_points)
The results.
"""
# Compute the sums occurring in the evaluation.
# Two point magnetometers on the xz plane are assumed.
# The four sums are:
# * sums[:, 0] n(n+1)/(2n+1) beta^(n+1) P_n
# * sums[:, 1] n/(2n+1) beta^(n+1) P_n'
# * sums[:, 2] n/((2n+1)(n+1)) beta^(n+1) P_n'
# * sums[:, 3] n/((2n+1)(n+1)) beta^(n+1) P_n''
# This is equivalent, but slower:
# sums = np.sum(bbeta[:, :, np.newaxis].T * n_fact * coeffs, axis=1)
# sums = np.rollaxis(sums, 2)
# or
# sums = einsum('ji,jk,ijk->ki', bbeta, n_fact, lut_fun(ctheta)))
sums = np.empty((n_fact.shape[1], len(beta)))
# beta can be e.g. 3 million elements, which ends up using lots of memory
# so we split up the computations into ~50 MB blocks
n_chunk = 50000000 // (8 * max(n_fact.shape) * 2)
lims = np.concatenate([np.arange(0, beta.size, n_chunk), [beta.size]])
for start, stop in zip(lims[:-1], lims[1:]):
bbeta = np.tile(beta[start:stop][np.newaxis], (n_fact.shape[0], 1))
bbeta[0] *= beta[start:stop]
np.cumprod(bbeta, axis=0, out=bbeta) # run inplace
einsum('ji,jk,ijk->ki', bbeta, n_fact, lut_fun(ctheta[start:stop]),
out=sums[:, start:stop])
return sums
###############################################################################
# SPHERE DOTS
_meg_const = 4e-14 * np.pi # This is \mu_0^2/4\pi
_eeg_const = 1.0 / (4.0 * np.pi)
def _fast_sphere_dot_r0(r, rr1_orig, rr2s, lr1, lr2s, cosmags1, cosmags2s,
w1, w2s, volume_integral, lut, n_fact, ch_type):
"""Lead field dot product computation for M/EEG in the sphere model.
Parameters
----------
r : float
The integration radius. It is used to calculate beta as:
beta = (r * r) / (lr1 * lr2).
rr1 : array, shape (n_points x 3)
Normalized position vectors of integrations points in first sensor.
rr2s : list
Normalized position vector of integration points in second sensor.
lr1 : array, shape (n_points x 1)
Magnitude of position vector of integration points in first sensor.
lr2s : list
Magnitude of position vector of integration points in second sensor.
cosmags1 : array, shape (n_points x 1)
Direction of integration points in first sensor.
cosmags2s : list
Direction of integration points in second sensor.
w1 : array, shape (n_points x 1) | None
Weights of integration points in the first sensor.
w2s : list
Weights of integration points in the second sensor.
volume_integral : bool
If True, compute volume integral.
lut : callable
Look-up table for evaluating Legendre polynomials.
n_fact : array
Coefficients in the integration sum.
ch_type : str
The channel type. It can be 'meg' or 'eeg'.
Returns
-------
result : float
The integration sum.
"""
if w1 is None: # operating on surface, treat independently
out_shape = (len(rr2s), len(rr1_orig))
sum_axis = 1 # operate along second axis only at the end
else:
out_shape = (len(rr2s),)
sum_axis = None # operate on flattened array at the end
out = np.empty(out_shape)
rr2 = np.concatenate(rr2s)
lr2 = np.concatenate(lr2s)
cosmags2 = np.concatenate(cosmags2s)
# outer product, sum over coords
ct = einsum('ik,jk->ij', rr1_orig, rr2)
np.clip(ct, -1, 1, ct)
# expand axes
rr1 = rr1_orig[:, np.newaxis, :] # (n_rr1, n_rr2, n_coord) e.g. 4x4x3
rr2 = rr2[np.newaxis, :, :]
lr1lr2 = lr1[:, np.newaxis] * lr2[np.newaxis, :]
beta = (r * r) / lr1lr2
if ch_type == 'meg':
sums = _comp_sums_meg(beta.flatten(), ct.flatten(), lut, n_fact,
volume_integral)
sums.shape = (4,) + beta.shape
# Accumulate the result, a little bit streamlined version
# cosmags1 = cosmags1[:, np.newaxis, :]
# cosmags2 = cosmags2[np.newaxis, :, :]
# n1c1 = np.sum(cosmags1 * rr1, axis=2)
# n1c2 = np.sum(cosmags1 * rr2, axis=2)
# n2c1 = np.sum(cosmags2 * rr1, axis=2)
# n2c2 = np.sum(cosmags2 * rr2, axis=2)
# n1n2 = np.sum(cosmags1 * cosmags2, axis=2)
n1c1 = einsum('ik,ijk->ij', cosmags1, rr1)
n1c2 = einsum('ik,ijk->ij', cosmags1, rr2)
n2c1 = einsum('jk,ijk->ij', cosmags2, rr1)
n2c2 = einsum('jk,ijk->ij', cosmags2, rr2)
n1n2 = einsum('ik,jk->ij', cosmags1, cosmags2)
part1 = ct * n1c1 * n2c2
part2 = n1c1 * n2c1 + n1c2 * n2c2
result = (n1c1 * n2c2 * sums[0] +
(2.0 * part1 - part2) * sums[1] +
(n1n2 + part1 - part2) * sums[2] +
(n1c2 - ct * n1c1) * (n2c1 - ct * n2c2) * sums[3])
# Give it a finishing touch!
result *= (_meg_const / lr1lr2)
if volume_integral:
result *= r
else: # 'eeg'
result = _comp_sum_eeg(beta.flatten(), ct.flatten(), lut, n_fact)
result.shape = beta.shape
# Give it a finishing touch!
result *= _eeg_const
result /= lr1lr2
# now we add them all up with weights
offset = 0
result *= np.concatenate(w2s)
if w1 is not None:
result *= w1[:, np.newaxis]
for ii, w2 in enumerate(w2s):
out[ii] = np.sum(result[:, offset:offset + len(w2)], axis=sum_axis)
offset += len(w2)
return out
@fill_doc
def _do_self_dots(intrad, volume, coils, r0, ch_type, lut, n_fact, n_jobs):
"""Perform the lead field dot product integrations.
Parameters
----------
intrad : float
The integration radius. It is used to calculate beta as:
beta = (intrad * intrad) / (r1 * r2).
volume : bool
If True, perform volume integral.
coils : list of dict
The coils.
r0 : array, shape (3 x 1)
The origin of the sphere.
ch_type : str
The channel type. It can be 'meg' or 'eeg'.
lut : callable
Look-up table for evaluating Legendre polynomials.
n_fact : array
Coefficients in the integration sum.
%(n_jobs)s
Returns
-------
products : array, shape (n_coils, n_coils)
The integration products.
"""
if ch_type == 'eeg':
intrad = intrad * 0.7
# convert to normalized distances from expansion center
rmags = [coil['rmag'] - r0[np.newaxis, :] for coil in coils]
rlens = [np.sqrt(np.sum(r * r, axis=1)) for r in rmags]
rmags = [r / rl[:, np.newaxis] for r, rl in zip(rmags, rlens)]
cosmags = [coil['cosmag'] for coil in coils]
ws = [coil['w'] for coil in coils]
parallel, p_fun, _ = parallel_func(_do_self_dots_subset, n_jobs)
prods = parallel(p_fun(intrad, rmags, rlens, cosmags,
ws, volume, lut, n_fact, ch_type, idx)
for idx in np.array_split(np.arange(len(rmags)), n_jobs))
products = np.sum(prods, axis=0)
return products
def _do_self_dots_subset(intrad, rmags, rlens, cosmags, ws, volume, lut,
n_fact, ch_type, idx):
"""Parallelize."""
# all possible combinations of two magnetometers
products = np.zeros((len(rmags), len(rmags)))
for ci1 in idx:
ci2 = ci1 + 1
res = _fast_sphere_dot_r0(
intrad, rmags[ci1], rmags[:ci2], rlens[ci1], rlens[:ci2],
cosmags[ci1], cosmags[:ci2], ws[ci1], ws[:ci2], volume, lut,
n_fact, ch_type)
products[ci1, :ci2] = res
products[:ci2, ci1] = res
return products
def _do_cross_dots(intrad, volume, coils1, coils2, r0, ch_type,
lut, n_fact):
"""Compute lead field dot product integrations between two coil sets.
The code is a direct translation of MNE-C code found in
`mne_map_data/lead_dots.c`.
Parameters
----------
intrad : float
The integration radius. It is used to calculate beta as:
beta = (intrad * intrad) / (r1 * r2).
volume : bool
If True, compute volume integral.
coils1 : list of dict
The original coils.
coils2 : list of dict
The coils to which data is being mapped.
r0 : array, shape (3 x 1).
The origin of the sphere.
ch_type : str
The channel type. It can be 'meg' or 'eeg'
lut : callable
Look-up table for evaluating Legendre polynomials.
n_fact : array
Coefficients in the integration sum.
Returns
-------
products : array, shape (n_coils, n_coils)
The integration products.
"""
if ch_type == 'eeg':
intrad = intrad * 0.7
rmags1 = [coil['rmag'] - r0[np.newaxis, :] for coil in coils1]
rmags2 = [coil['rmag'] - r0[np.newaxis, :] for coil in coils2]
rlens1 = [np.sqrt(np.sum(r * r, axis=1)) for r in rmags1]
rlens2 = [np.sqrt(np.sum(r * r, axis=1)) for r in rmags2]
rmags1 = [r / rl[:, np.newaxis] for r, rl in zip(rmags1, rlens1)]
rmags2 = [r / rl[:, np.newaxis] for r, rl in zip(rmags2, rlens2)]
ws1 = [coil['w'] for coil in coils1]
ws2 = [coil['w'] for coil in coils2]
cosmags1 = [coil['cosmag'] for coil in coils1]
cosmags2 = [coil['cosmag'] for coil in coils2]
products = np.zeros((len(rmags1), len(rmags2)))
for ci1 in range(len(coils1)):
res = _fast_sphere_dot_r0(
intrad, rmags1[ci1], rmags2, rlens1[ci1], rlens2, cosmags1[ci1],
cosmags2, ws1[ci1], ws2, volume, lut, n_fact, ch_type)
products[ci1, :] = res
return products
@fill_doc
def _do_surface_dots(intrad, volume, coils, surf, sel, r0, ch_type,
lut, n_fact, n_jobs):
"""Compute the map construction products.
Parameters
----------
intrad : float
The integration radius. It is used to calculate beta as:
beta = (intrad * intrad) / (r1 * r2)
volume : bool
If True, compute a volume integral.
coils : list of dict
The coils.
surf : dict
The surface on which the field is interpolated.
sel : array
Indices of the surface vertices to select.
r0 : array, shape (3 x 1)
The origin of the sphere.
ch_type : str
The channel type. It can be 'meg' or 'eeg'.
lut : callable
Look-up table for Legendre polynomials.
n_fact : array
Coefficients in the integration sum.
%(n_jobs)s
Returns
-------
products : array, shape (n_coils, n_coils)
The integration products.
"""
# convert to normalized distances from expansion center
rmags = [coil['rmag'] - r0[np.newaxis, :] for coil in coils]
rlens = [np.sqrt(np.sum(r * r, axis=1)) for r in rmags]
rmags = [r / rl[:, np.newaxis] for r, rl in zip(rmags, rlens)]
cosmags = [coil['cosmag'] for coil in coils]
ws = [coil['w'] for coil in coils]
rref = None
refl = None
# virt_ref = False
if ch_type == 'eeg':
intrad = intrad * 0.7
# The virtual ref code is untested and unused, so it is
# commented out for now
# if virt_ref:
# rref = virt_ref[np.newaxis, :] - r0[np.newaxis, :]
# refl = np.sqrt(np.sum(rref * rref, axis=1))
# rref /= refl[:, np.newaxis]
rsurf = surf['rr'][sel] - r0[np.newaxis, :]
lsurf = np.sqrt(np.sum(rsurf * rsurf, axis=1))
rsurf /= lsurf[:, np.newaxis]
this_nn = surf['nn'][sel]
# loop over the coils
parallel, p_fun, _ = parallel_func(_do_surface_dots_subset, n_jobs)
prods = parallel(p_fun(intrad, rsurf, rmags, rref, refl, lsurf, rlens,
this_nn, cosmags, ws, volume, lut, n_fact, ch_type,
idx)
for idx in np.array_split(np.arange(len(rmags)), n_jobs))
products = np.sum(prods, axis=0)
return products
def _do_surface_dots_subset(intrad, rsurf, rmags, rref, refl, lsurf, rlens,
this_nn, cosmags, ws, volume, lut, n_fact, ch_type,
idx):
"""Parallelize.
Parameters
----------
refl : array | None
If ch_type is 'eeg', the magnitude of position vector of the
virtual reference (never used).
lsurf : array
Magnitude of position vector of the surface points.
rlens : list of arrays of length n_coils
Magnitude of position vector.
this_nn : array, shape (n_vertices, 3)
Surface normals.
cosmags : list of array.
Direction of the integration points in the coils.
ws : list of array
Integration weights of the coils.
volume : bool
If True, compute volume integral.
lut : callable
Look-up table for evaluating Legendre polynomials.
n_fact : array
Coefficients in the integration sum.
ch_type : str
'meg' or 'eeg'
idx : array, shape (n_coils x 1)
Index of coil.
Returns
-------
products : array, shape (n_coils, n_coils)
The integration products.
"""
products = _fast_sphere_dot_r0(
intrad, rsurf, rmags, lsurf, rlens, this_nn, cosmags, None, ws,
volume, lut, n_fact, ch_type).T
if rref is not None:
raise NotImplementedError # we don't ever use this, isn't tested
# vres = _fast_sphere_dot_r0(
# intrad, rref, rmags, refl, rlens, this_nn, cosmags, None, ws,
# volume, lut, n_fact, ch_type)
# products -= vres
return products
|
from django.conf import settings
from django.core.validators import MinValueValidator
from django.db import models
from django.utils.translation import gettext_lazy as _
from cms.models.fields import PlaceholderField
from filer.fields import image
from djangocms_text_ckeditor.fields import HTMLField
from polymorphic.query import PolymorphicQuerySet
from shop.conf import app_settings
from shop.models.product import BaseProduct, BaseProductManager, CMSPageReferenceMixin, AvailableProductMixin
from shop.models.defaults.mapping import ProductPage
from shop.money.fields import MoneyField
class CommodityMixin(AvailableProductMixin):
"""
Common methods used by both default Commodity models.
"""
def get_price(self, request):
return self.unit_price
if settings.USE_I18N:
assert 'parler' in settings.INSTALLED_APPS, "Requires `django-parler`, if configured as multilingual project"
from parler.managers import TranslatableManager, TranslatableQuerySet
from parler.models import TranslatableModelMixin, TranslatedFieldsModel
from parler.fields import TranslatedField
class ProductQuerySet(TranslatableQuerySet, PolymorphicQuerySet):
pass
class ProductManager(BaseProductManager, TranslatableManager):
queryset_class = ProductQuerySet
class Commodity(CMSPageReferenceMixin, TranslatableModelMixin, CommodityMixin, BaseProduct):
"""
Generic Product Commodity to be used whenever the merchant does not require product specific
attributes and just required a placeholder field to add arbitrary data.
"""
# common product fields
product_code = models.CharField(
_("Product code"),
max_length=255,
unique=True,
)
unit_price = MoneyField(
_("Unit price"),
decimal_places=3,
help_text=_("Net price for this product"),
)
# controlling the catalog
order = models.PositiveIntegerField(
verbose_name=_("Sort by"),
db_index=True,
)
cms_pages = models.ManyToManyField(
'cms.Page',
through=ProductPage,
help_text=_("Choose list view this product shall appear on."),
)
sample_image = image.FilerImageField(
verbose_name=_("Sample Image"),
blank=True,
null=True,
default=None,
on_delete=models.SET_DEFAULT,
help_text=_("Sample image used in the catalog's list view."),
)
show_breadcrumb = models.BooleanField(
_("Show Breadcrumb"),
default=True,
help_text=_("Shall the detail page show the product's breadcrumb."),
)
placeholder = PlaceholderField("Commodity Details")
quantity = models.PositiveIntegerField(
_("Quantity"),
default=0,
validators=[MinValueValidator(0)],
help_text=_("Available quantity in stock")
)
# translatable fields for the catalog's list- and detail views
product_name = TranslatedField()
slug = TranslatedField()
caption = TranslatedField()
# filter expression used to search for a product item using the Select2 widget
lookup_fields = ['product_code__startswith', 'product_name__icontains']
objects = ProductManager()
class Meta:
app_label = app_settings.APP_LABEL
ordering = ['order']
verbose_name = _("Commodity")
verbose_name_plural = _("Commodities")
def __str__(self):
return self.product_code
class CommodityTranslation(TranslatedFieldsModel):
master = models.ForeignKey(
Commodity,
related_name='translations',
on_delete=models.CASCADE,
null=True,
)
product_name = models.CharField(
max_length=255,
verbose_name=_("Product Name"),
)
slug = models.SlugField(verbose_name=_("Slug"))
caption = HTMLField(
verbose_name=_("Caption"),
blank=True,
null=True,
help_text=_("Short description for the catalog list view."),
)
class Meta:
app_label = app_settings.APP_LABEL
unique_together = [('language_code', 'master')]
else:
class Commodity(CMSPageReferenceMixin, CommodityMixin, BaseProduct):
"""
Generic Product Commodity to be used whenever the merchant does not require product specific
attributes and just required a placeholder field to add arbitrary data.
"""
# common product fields
product_name = models.CharField(
max_length=255,
verbose_name=_("Product Name"),
)
product_code = models.CharField(
_("Product code"),
max_length=255,
unique=True,
)
unit_price = MoneyField(
_("Unit price"),
decimal_places=3,
help_text=_("Net price for this product"),
)
# controlling the catalog
order = models.PositiveIntegerField(
verbose_name=_("Sort by"),
db_index=True,
)
cms_pages = models.ManyToManyField(
'cms.Page',
through=ProductPage,
help_text=_("Choose list view this product shall appear on."),
)
sample_image = image.FilerImageField(
verbose_name=_("Sample Image"),
blank=True,
null=True,
default=None,
on_delete=models.SET_DEFAULT,
help_text=_("Sample image used in the catalog's list view."),
)
show_breadcrumb = models.BooleanField(
_("Show Breadcrumb"),
default=True,
help_text=_("Shall the detail page show the product's breadcrumb."),
)
placeholder = PlaceholderField("Commodity Details")
quantity = models.PositiveIntegerField(
_("Quantity"),
default=0,
validators=[MinValueValidator(0)],
help_text=_("Available quantity in stock")
)
# common fields for the catalog's list- and detail views
slug = models.SlugField(verbose_name=_("Slug"))
caption = HTMLField(
verbose_name=_("Caption"),
blank=True,
null=True,
help_text=_("Short description for the catalog list view."),
)
# filter expression used to search for a product item using the Select2 widget
lookup_fields = ['product_code__startswith', 'product_name__icontains']
objects = BaseProductManager()
class Meta:
app_label = app_settings.APP_LABEL
ordering = ('order',)
verbose_name = _("Commodity")
verbose_name_plural = _("Commodities")
def __str__(self):
return self.product_code
|
import numpy as np
import pandas as pd
import pytest
import xarray as xr
cp = pytest.importorskip("cupy")
@pytest.fixture
def toy_weather_data():
"""Construct the example DataSet from the Toy weather data example.
http://xarray.pydata.org/en/stable/examples/weather-data.html
Here we construct the DataSet exactly as shown in the example and then
convert the numpy arrays to cupy.
"""
np.random.seed(123)
times = pd.date_range("2000-01-01", "2001-12-31", name="time")
annual_cycle = np.sin(2 * np.pi * (times.dayofyear.values / 365.25 - 0.28))
base = 10 + 15 * annual_cycle.reshape(-1, 1)
tmin_values = base + 3 * np.random.randn(annual_cycle.size, 3)
tmax_values = base + 10 + 3 * np.random.randn(annual_cycle.size, 3)
ds = xr.Dataset(
{
"tmin": (("time", "location"), tmin_values),
"tmax": (("time", "location"), tmax_values),
},
{"time": times, "location": ["IA", "IN", "IL"]},
)
ds.tmax.data = cp.asarray(ds.tmax.data)
ds.tmin.data = cp.asarray(ds.tmin.data)
return ds
def test_cupy_import():
"""Check the import worked."""
assert cp
def test_check_data_stays_on_gpu(toy_weather_data):
"""Perform some operations and check the data stays on the GPU."""
freeze = (toy_weather_data["tmin"] <= 0).groupby("time.month").mean("time")
assert isinstance(freeze.data, cp.core.core.ndarray)
def test_where():
from xarray.core.duck_array_ops import where
data = cp.zeros(10)
output = where(data < 1, 1, data).all()
assert output
assert isinstance(output, cp.ndarray)
|
from unittest import mock
import blebox_uniapi
import pytest
from homeassistant.components.blebox.const import DOMAIN
from homeassistant.const import CONF_HOST, CONF_PORT
from homeassistant.setup import async_setup_component
from tests.async_mock import AsyncMock, PropertyMock, patch
from tests.common import MockConfigEntry
from tests.components.light.conftest import mock_light_profiles # noqa
def patch_product_identify(path=None, **kwargs):
"""Patch the blebox_uniapi Products class."""
if path is None:
path = "homeassistant.components.blebox.Products"
patcher = patch(path, mock.DEFAULT, blebox_uniapi.products.Products, True, True)
products_class = patcher.start()
products_class.async_from_host = AsyncMock(**kwargs)
return products_class
def setup_product_mock(category, feature_mocks, path=None):
"""Mock a product returning the given features."""
product_mock = mock.create_autospec(
blebox_uniapi.box.Box, True, True, features=None
)
type(product_mock).features = PropertyMock(return_value={category: feature_mocks})
for feature in feature_mocks:
type(feature).product = PropertyMock(return_value=product_mock)
patch_product_identify(path, return_value=product_mock)
return product_mock
def mock_only_feature(spec, **kwargs):
"""Mock just the feature, without the product setup."""
return mock.create_autospec(spec, True, True, **kwargs)
def mock_feature(category, spec, **kwargs):
"""Mock a feature along with whole product setup."""
feature_mock = mock_only_feature(spec, **kwargs)
feature_mock.async_update = AsyncMock()
product = setup_product_mock(category, [feature_mock])
type(feature_mock.product).name = PropertyMock(return_value="Some name")
type(feature_mock.product).type = PropertyMock(return_value="some type")
type(feature_mock.product).model = PropertyMock(return_value="some model")
type(feature_mock.product).brand = PropertyMock(return_value="BleBox")
type(feature_mock.product).firmware_version = PropertyMock(return_value="1.23")
type(feature_mock.product).unique_id = PropertyMock(return_value="abcd0123ef5678")
type(feature_mock).product = PropertyMock(return_value=product)
return feature_mock
def mock_config(ip_address="172.100.123.4"):
"""Return a Mock of the HA entity config."""
return MockConfigEntry(domain=DOMAIN, data={CONF_HOST: ip_address, CONF_PORT: 80})
@pytest.fixture(name="config")
def config_fixture():
"""Create hass config fixture."""
return {DOMAIN: {CONF_HOST: "172.100.123.4", CONF_PORT: 80}}
@pytest.fixture(name="feature")
def feature_fixture(request):
"""Return an entity wrapper from given fixture name."""
return request.getfixturevalue(request.param)
async def async_setup_entities(hass, config, entity_ids):
"""Return configured entries with the given entity ids."""
config_entry = mock_config()
config_entry.add_to_hass(hass)
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
entity_registry = await hass.helpers.entity_registry.async_get_registry()
return [entity_registry.async_get(entity_id) for entity_id in entity_ids]
async def async_setup_entity(hass, config, entity_id):
"""Return a configured entry with the given entity_id."""
return (await async_setup_entities(hass, config, [entity_id]))[0]
|
import configobj
import os
import sys
import logging
import inspect
import traceback
import pkg_resources
import imp
from diamond.util import load_class_from_name
from diamond.collector import Collector
from diamond.handler.Handler import Handler
logger = logging.getLogger('diamond')
def load_include_path(paths):
"""
Scan for and add paths to the include path
"""
for path in paths:
# Verify the path is valid
if not os.path.isdir(path):
continue
# Add path to the system path, to avoid name clashes
# with mysql-connector for example ...
if path not in sys.path:
sys.path.insert(1, path)
# Load all the files in path
for f in os.listdir(path):
# Are we a directory? If so process down the tree
fpath = os.path.join(path, f)
if os.path.isdir(fpath):
load_include_path([fpath])
def load_dynamic_class(fqn, subclass):
"""
Dynamically load fqn class and verify it's a subclass of subclass
"""
if not isinstance(fqn, basestring):
return fqn
cls = load_class_from_name(fqn)
if cls == subclass or not issubclass(cls, subclass):
raise TypeError("%s is not a valid %s" % (fqn, subclass.__name__))
return cls
def load_handlers(config, handler_names):
"""
Load handlers
"""
handlers = []
if isinstance(handler_names, basestring):
handler_names = [handler_names]
for handler in handler_names:
logger.debug('Loading Handler %s', handler)
try:
# Load Handler Class
cls = load_dynamic_class(handler, Handler)
cls_name = cls.__name__
# Initialize Handler config
handler_config = configobj.ConfigObj()
# Merge default Handler default config
handler_config.merge(config['handlers']['default'])
# Check if Handler config exists
if cls_name in config['handlers']:
# Merge Handler config section
handler_config.merge(config['handlers'][cls_name])
# Check for config file in config directory
if 'handlers_config_path' in config['server']:
configfile = os.path.join(
config['server']['handlers_config_path'],
cls_name) + '.conf'
if os.path.exists(configfile):
# Merge Collector config file
handler_config.merge(configobj.ConfigObj(configfile))
# Initialize Handler class
h = cls(handler_config)
handlers.append(h)
except (ImportError, SyntaxError):
# Log Error
logger.warning("Failed to load handler %s. %s",
handler,
traceback.format_exc())
continue
return handlers
def load_collectors(paths):
"""
Load all collectors
"""
collectors = load_collectors_from_paths(paths)
collectors.update(load_collectors_from_entry_point('diamond.collectors'))
return collectors
def load_collectors_from_paths(paths):
"""
Scan for collectors to load from path
"""
# Initialize return value
collectors = {}
if paths is None:
return
if isinstance(paths, basestring):
paths = paths.split(',')
paths = map(str.strip, paths)
load_include_path(paths)
for path in paths:
# Get a list of files in the directory, if the directory exists
if not os.path.exists(path):
raise OSError("Directory does not exist: %s" % path)
if path.endswith('tests') or path.endswith('fixtures'):
return collectors
# Load all the files in path
for f in os.listdir(path):
# Are we a directory? If so process down the tree
fpath = os.path.join(path, f)
if os.path.isdir(fpath):
subcollectors = load_collectors_from_paths([fpath])
for key in subcollectors:
collectors[key] = subcollectors[key]
# Ignore anything that isn't a .py file
elif (os.path.isfile(fpath) and
len(f) > 3 and
f[-3:] == '.py' and
f[0:4] != 'test' and
f[0] != '.'):
modname = f[:-3]
fp, pathname, description = imp.find_module(modname, [path])
try:
# Import the module
mod = imp.load_module(modname, fp, pathname, description)
except (KeyboardInterrupt, SystemExit) as err:
logger.error(
"System or keyboard interrupt "
"while loading module %s"
% modname)
if isinstance(err, SystemExit):
sys.exit(err.code)
raise KeyboardInterrupt
except Exception:
# Log error
logger.error("Failed to import module: %s. %s",
modname,
traceback.format_exc())
else:
for name, cls in get_collectors_from_module(mod):
collectors[name] = cls
finally:
if fp:
fp.close()
# Return Collector classes
return collectors
def load_collectors_from_entry_point(path):
"""
Load collectors that were installed into an entry_point.
"""
collectors = {}
for ep in pkg_resources.iter_entry_points(path):
try:
mod = ep.load()
except Exception:
logger.error('Failed to import entry_point: %s. %s',
ep.name,
traceback.format_exc())
else:
collectors.update(get_collectors_from_module(mod))
return collectors
def get_collectors_from_module(mod):
"""
Locate all of the collector classes within a given module
"""
for attrname in dir(mod):
attr = getattr(mod, attrname)
# Only attempt to load classes that are infact classes
# are Collectors but are not the base Collector class
if ((inspect.isclass(attr) and
issubclass(attr, Collector) and
attr != Collector)):
if attrname.startswith('parent_'):
continue
# Get class name
fqcn = '.'.join([mod.__name__, attrname])
try:
# Load Collector class
cls = load_dynamic_class(fqcn, Collector)
# Add Collector class
yield cls.__name__, cls
except Exception:
# Log error
logger.error(
"Failed to load Collector: %s. %s",
fqcn, traceback.format_exc())
continue
def initialize_collector(cls, name=None, configfile=None, handlers=[]):
"""
Initialize collector
"""
collector = None
try:
# Initialize Collector
collector = cls(name=name, configfile=configfile, handlers=handlers)
except Exception:
# Log error
logger.error("Failed to initialize Collector: %s. %s",
cls.__name__, traceback.format_exc())
# Return collector
return collector
|
from Handler import Handler
from diamond.metric import Metric
import urllib2
import StringIO
import gzip
import base64
import json
import re
import contextlib
class TSDBHandler(Handler):
"""
Implements the abstract Handler class, sending data to OpenTSDB
"""
def __init__(self, config=None):
"""
Create a new instance of the TSDBHandler class
"""
# Initialize Handler
Handler.__init__(self, config)
# Initialize Options
# host
self.host = str(self.config['host'])
self.port = int(self.config['port'])
self.timeout = int(self.config['timeout'])
# Authorization
self.user = str(self.config['user'])
self.password = str(self.config['password'])
# data
self.batch = int(self.config['batch'])
self.compression = int(self.config['compression'])
# prefix
if self.config['prefix'] != "":
self.prefix = str(self.config['prefix'])+'.'
else:
self.prefix = ""
# tags
self.tags = []
pattern = re.compile(r'([a-zA-Z0-9]+)=([a-zA-Z0-9]+)')
for (key, value) in re.findall(pattern, str(self.config['tags'])):
self.tags.append([key, value])
# headers
self.httpheader = {"Content-Type": "application/json"}
# Authorization
if self.user != "":
self.httpheader["Authorization"] = "Basic " +\
base64.encodestring('%s:%s' % (self.user, self.password))[:-1]
# compression
if self.compression >= 1:
self.httpheader["Content-Encoding"] = "gzip"
self.entrys = []
self.skipAggregates = self.config['skipAggregates']
self.cleanMetrics = self.config['cleanMetrics']
def get_default_config_help(self):
"""
Returns the help text for the configuration options for this handler
"""
config = super(TSDBHandler, self).get_default_config_help()
config.update({
'host': '',
'port': '',
'timeout': '',
'tags': '',
'prefix': '',
'batch': '',
'compression': '',
'user': '',
'password': '',
'cleanMetrics': True,
'skipAggregates': True,
})
return config
def get_default_config(self):
"""
Return the default config for the handler
"""
config = super(TSDBHandler, self).get_default_config()
config.update({
'host': '127.0.0.1',
'port': 4242,
'timeout': 5,
'tags': '',
'prefix': '',
'batch': 1,
'compression': 0,
'user': '',
'password': '',
'cleanMetrics': True,
'skipAggregates': True,
})
return config
def __del__(self):
"""
Destroy instance of the TSDBHandler class
"""
self.log.debug("Stopping TSDBHandler ...")
def process(self, metric):
"""
Process a metric by sending it to TSDB
"""
entry = {'timestamp': metric.timestamp, 'value': metric.value,
"tags": {}}
entry["tags"]["hostname"] = metric.host
if self.cleanMetrics:
metric = MetricWrapper(metric, self.log)
if self.skipAggregates and metric.isAggregate():
return
for tagKey in metric.getTags():
entry["tags"][tagKey] = metric.getTags()[tagKey]
entry['metric'] = (self.prefix + metric.getCollectorPath() +
'.' + metric.getMetricPath())
for [key, value] in self.tags:
entry["tags"][key] = value
self.entrys.append(entry)
# send data if list is long enough
if (len(self.entrys) >= self.batch):
# Compress data
if self.compression >= 1:
data = StringIO.StringIO()
with contextlib.closing(gzip.GzipFile(fileobj=data,
compresslevel=self.compression,
mode="w")) as f:
f.write(json.dumps(self.entrys))
self._send(data.getvalue())
else:
# no compression
data = json.dumps(self.entrys)
self._send(data)
def _send(self, content):
"""
Send content to TSDB.
"""
retry = 0
success = False
while retry < 3 and success is False:
self.log.debug(content)
try:
request = urllib2.Request("http://"+self.host+":" +
str(self.port)+"/api/put",
content, self.httpheader)
response = urllib2.urlopen(url=request, timeout=self.timeout)
if response.getcode() < 301:
self.log.debug(response.read())
# Transaction should be finished
self.log.debug(response.getcode())
success = True
except urllib2.HTTPError as e:
self.log.error("HTTP Error Code: "+str(e.code))
self.log.error("Message : "+str(e.reason))
except urllib2.URLError as e:
self.log.error("Connection Error: "+str(e.reason))
finally:
retry += 1
self.entrys = []
"""
This class wraps a metric and applies the additonal OpenTSDB tagging logic.
"""
class MetricWrapper(Metric):
def isAggregate(self):
return self.aggregate
def getTags(self):
return self.tags
"""
This method does nothing and therefore keeps the existing metric unchanged.
"""
def processDefaultMetric(self):
self.tags = {}
self.aggregate = False
"""
Processes a metric of the CPUCollector. It stores the cpuId in a tag and
marks all metrics with 'total' as aggregates, so they can be skipped if
the skipAggregates feature is active.
"""
def processCpuMetric(self):
if len(self.getMetricPath().split('.')) > 1:
self.aggregate = self.getMetricPath().split('.')[0] == 'total'
cpuId = self.delegate.getMetricPath().split('.')[0]
self.tags["cpuId"] = cpuId
self.path = self.path.replace("."+cpuId+".", ".")
"""
Processes metrics of the HaProxyCollector. It stores the backend and the
server to which the backends send as tags. Counters with 'backend' as
backend name are considered aggregates.
"""
def processHaProxyMetric(self):
if len(self.getMetricPath().split('.')) == 3:
self.aggregate = self.getMetricPath().split('.')[1] == 'backend'
backend = self.delegate.getMetricPath().split('.')[0]
server = self.delegate.getMetricPath().split('.')[1]
self.tags["backend"] = backend
self.tags["server"] = server
self.path = self.path.replace("."+server+".", ".")
self.path = self.path.replace("."+backend+".", ".")
"""
Processes metrics of the DiskspaceCollector. It stores the mountpoint as a
tag. There are no aggregates in this collector.
"""
def processDiskspaceMetric(self):
if len(self.getMetricPath().split('.')) == 2:
mountpoint = self.delegate.getMetricPath().split('.')[0]
self.tags["mountpoint"] = mountpoint
self.path = self.path.replace("."+mountpoint+".", ".")
"""
Processes metrics of the DiskusageCollector. It stores the device as a
tag. There are no aggregates in this collector.
"""
def processDiskusageMetric(self):
if len(self.getMetricPath().split('.')) == 2:
device = self.delegate.getMetricPath().split('.')[0]
self.tags["device"] = device
self.path = self.path.replace("."+device+".", ".")
"""
Processes metrics of the NetworkCollector. It stores the interface as a
tag. There are no aggregates in this collector.
"""
def processNetworkMetric(self):
if len(self.getMetricPath().split('.')) == 2:
interface = self.delegate.getMetricPath().split('.')[0]
self.tags["interface"] = interface
self.path = self.path.replace("."+interface+".", ".")
def processMattermostMetric(self):
split = self.getMetricPath().split('.')
if len(split) > 2:
if split[0] == 'teamdetails' or split[0] == 'channeldetails':
team = split[1]
self.tags["team"] = team
self.path = self.path.replace("."+team+".", ".")
# fall through for channeldetails
if split[0] == 'channeldetails':
channel = split[2]
self.tags["channel"] = channel
self.path = self.path.replace("."+channel+".", ".")
if split[0] == 'userdetails':
user = split[1]
team = split[2]
channel = split[3]
self.tags["user"] = user
self.tags["team"] = team
self.tags["channel"] = channel
self.path = self.path.replace("."+user+".", ".")
self.path = self.path.replace("."+team+".", ".")
self.path = self.path.replace("."+channel+".", ".")
handlers = {'cpu': processCpuMetric, 'haproxy': processHaProxyMetric,
'mattermost': processMattermostMetric,
'diskspace': processDiskspaceMetric,
'iostat': processDiskusageMetric,
'network': processNetworkMetric,
'default': processDefaultMetric}
def __init__(self, delegate, logger):
self.path = delegate.path
self.value = delegate.value
self.host = delegate.host
self.raw_value = delegate.raw_value
self.timestamp = delegate.timestamp
self.precision = delegate.precision
self.ttl = delegate.ttl
self.metric_type = delegate.metric_type
self.delegate = delegate
self.tags = {}
self.aggregate = False
self.newMetricName = None
self.logger = logger
# call the handler for that collector
handler = self.handlers.get(self.getCollectorPath(),
self.handlers['default'])
handler(self)
|
import diamond.collector
import re
import subprocess
from diamond.collector import str_to_bool
class VarnishCollector(diamond.collector.Collector):
_RE = re.compile("^(?P<stat>[\w_.,]*)\s+(?P<psa>\d*)\s+"
"(?P<psan>[\d.]*)\s+(?P<desc>.*)$", re.M)
_KEYS_v3 = frozenset([
'client_conn', 'client_drop', 'client_req', 'cache_hit',
'cache_hitpass', 'cache_miss', 'backend_conn', 'backend_unhealthy',
'backend_busy', 'backend_fail', 'backend_reuse', 'backend_toolate',
'backend_recycle', 'backend_retry', 'fetch_head', 'fetch_length',
'fetch_chunked', 'fetch_eof', 'fetch_bad', 'fetch_close',
'fetch_oldhttp', 'fetch_zero', 'fetch_failed', 'fetch_1xx',
'fetch_204', 'fetch_304', 'n_sess_mem',
'n_sess', 'n_object', 'n_vampireobject', 'n_objectcore',
'n_objecthead', 'n_waitinglist', 'n_vbc', 'n_wrk', 'n_wrk_create',
'n_wrk_failed', 'n_wrk_max', 'n_wrk_lqueue', 'n_wrk_queued',
'n_wrk_drop', 'n_backend', 'n_expired', 'n_lru_nuked',
'n_lru_moved', 'losthdr', 'n_objsendfile', 'n_objwrite',
'n_objoverflow', 's_sess', 's_req', 's_pipe', 's_pass', 's_fetch',
's_hdrbytes', 's_bodybytes', 'sess_closed', 'sess_pipeline',
'sess_readahead', 'sess_linger', 'sess_herd', 'shm_records',
'shm_writes', 'shm_flushes', 'shm_cont', 'shm_cycles', 'sms_nreq',
'sms_nobj', 'sms_nbytes', 'sms_balloc', 'sms_bfree', 'backend_req',
'n_vcl', 'n_vcl_avail', 'n_vcl_discard', 'n_ban', 'n_ban_add',
'n_ban_retire', 'n_ban_obj_test', 'n_ban_re_test', 'n_ban_dups',
'hcb_nolock', 'hcb_lock', 'hcb_insert', 'esi_errors',
'esi_warnings', 'accept_fail', 'client_drop_late', 'uptime',
'dir_dns_lookups', 'dir_dns_failed', 'dir_dns_hit',
'dir_dns_cache_full', 'n_gzip', 'n_gunzip',
])
_KEYS_v4 = frozenset([
'MAIN.uptime', 'MAIN.sess_conn', 'MAIN.sess_drop', 'MAIN.sess_fail',
'MAIN.sess_pipe_overflow', 'MAIN.client_req_400', 'MAIN.client_req_411',
'MAIN.client_req_413', 'MAIN.client_req_417', 'MAIN.client_req',
'MAIN.cache_hit', 'MAIN.cache_hitpass', 'MAIN.cache_miss',
'MAIN.backend_conn', 'MAIN.backend_unhealthy', 'MAIN.backend_busy',
'MAIN.backend_fail', 'MAIN.backend_reuse', 'MAIN.backend_toolate',
'MAIN.backend_recycle', 'MAIN.backend_retry', 'MAIN.fetch_head',
'MAIN.fetch_length', 'MAIN.fetch_chunked', 'MAIN.fetch_eof',
'MAIN.fetch_bad', 'MAIN.fetch_close', 'MAIN.fetch_oldhttp',
'MAIN.fetch_zero', 'MAIN.fetch_1xx', 'MAIN.fetch_204', 'MAIN.fetch_304',
'MAIN.fetch_failed', 'MAIN.fetch_no_thread', 'MAIN.pools',
'MAIN.threads', 'MAIN.threads_limited',
'MAIN.threads_created', 'MAIN.threads_destroyed', 'MAIN.threads_failed',
'MAIN.thread_queue_len', 'MAIN.busy_sleep', 'MAIN.busy_wakeup',
'MAIN.sess_queued', 'MAIN.sess_dropped', 'MAIN.n_object',
'MAIN.n_vampireobject', 'MAIN.n_objectcore', 'MAIN.n_objecthead',
'MAIN.n_waitinglist', 'MAIN.n_backend', 'MAIN.n_expired',
'MAIN.n_lru_nuked', 'MAIN.n_lru_moved', 'MAIN.losthdr', 'MAIN.s_sess',
'MAIN.s_req', 'MAIN.s_pipe', 'MAIN.s_pass', 'MAIN.s_fetch',
'MAIN.s_synth',
'MAIN.s_req_hdrbytes', 'MAIN.s_req_bodybytes', 'MAIN.s_resp_hdrbytes',
'MAIN.s_resp_bodybytes', 'MAIN.s_pipe_hdrbytes', 'MAIN.s_pipe_in',
'MAIN.s_pipe_out', 'MAIN.sess_closed', 'MAIN.sess_pipeline',
'MAIN.sess_closed_err',
'MAIN.sess_readahead', 'MAIN.sess_herd', 'MAIN.shm_records',
'MAIN.shm_writes', 'MAIN.shm_flushes', 'MAIN.shm_cont',
'MAIN.shm_cycles',
'MAIN.sms_nreq', 'MAIN.sms_nobj', 'MAIN.sms_nbytes', 'MAIN.sms_balloc',
'MAIN.sms_bfree', 'MAIN.backend_req', 'MAIN.n_vcl', 'MAIN.n_vcl_avail',
'MAIN.n_vcl_discard', 'MAIN.bans', 'MAIN.bans_completed',
'MAIN.bans_obj',
'MAIN.bans_req', 'MAIN.bans_added', 'MAIN.bans_deleted',
'MAIN.bans_tested',
'MAIN.bans_obj_killed', 'MAIN.bans_lurker_tested',
'MAIN.bans_tests_tested',
'MAIN.bans_lurker_tests_tested', 'MAIN.bans_lurker_obj_killed',
'MAIN.bans_dups', 'MAIN.bans_lurker_contention',
'MAIN.bans_persisted_bytes',
'MAIN.bans_persisted_fragmentation', 'MAIN.n_purges',
'MAIN.n_obj_purged', 'MAIN.exp_mailed', 'MAIN.exp_received',
'MAIN.hcb_nolock', 'MAIN.hcb_lock', 'MAIN.hcb_insert',
'MAIN.esi_errors',
'MAIN.esi_warnings', 'MAIN.vmods', 'MAIN.n_gzip', 'MAIN.n_gunzip',
'MAIN.vsm_free', 'MAIN.vsm_used', 'MAIN.vsm_cooling',
'MAIN.vsm_overflow',
'MAIN.vsm_overflowed', 'MGT.uptime', 'MGT.child_start',
'MGT.child_exit',
'MGT.child_stop', 'MGT.child_died', 'MGT.child_dump', 'MGT.child_panic',
'LCK.sms.creat', 'LCK.sms.destroy', 'LCK.sms.locks', 'LCK.smp.creat',
'LCK.smp.destroy', 'LCK.smp.locks', 'LCK.sma.creat', 'LCK.sma.destroy',
'LCK.sma.locks', 'LCK.smf.creat', 'LCK.smf.destroy', 'LCK.smf.locks',
'LCK.hsl.creat', 'LCK.hsl.destroy', 'LCK.hsl.locks', 'LCK.hcb.creat',
'LCK.hcb.destroy', 'LCK.hcb.locks', 'LCK.hcl.creat', 'LCK.hcl.destroy',
'LCK.hcl.locks', 'LCK.vcl.creat', 'LCK.vcl.destroy', 'LCK.vcl.locks',
'LCK.sessmem.creat', 'LCK.sessmem.destroy', 'LCK.sessmem.locks',
'LCK.sess.creat', 'LCK.sess.destroy', 'LCK.sess.locks',
'LCK.wstat.creat',
'LCK.wstat.destroy', 'LCK.wstat.locks', 'LCK.herder.creat',
'LCK.herder.destroy', 'LCK.herder.locks', 'LCK.wq.creat',
'LCK.wq.destroy',
'LCK.wq.locks', 'LCK.objhdr.creat', 'LCK.objhdr.destroy',
'LCK.objhdr.locks',
'LCK.exp.creat', 'LCK.exp.destroy', 'LCK.exp.locks', 'LCK.lru.creat',
'LCK.lru.destroy', 'LCK.lru.locks', 'LCK.cli.creat', 'LCK.cli.destroy',
'LCK.cli.locks', 'LCK.ban.creat', 'LCK.ban.destroy', 'LCK.ban.locks',
'LCK.vbp.creat', 'LCK.vbp.destroy', 'LCK.vbp.locks',
'LCK.backend.creat',
'LCK.backend.destroy', 'LCK.backend.locks', 'LCK.vcapace.creat',
'LCK.vcapace.destroy', 'LCK.vcapace.locks', 'LCK.nbusyobj.creat',
'LCK.nbusyobj.destroy', 'LCK.nbusyobj.locks', 'LCK.busyobj.creat',
'LCK.busyobj.destroy', 'LCK.busyobj.locks', 'LCK.mempool.creat',
'LCK.mempool.destroy', 'LCK.mempool.locks', 'LCK.vxid.creat',
'LCK.vxid.destroy', 'LCK.vxid.locks', 'LCK.pipestat.creat',
'LCK.pipestat.destroy', 'LCK.pipestat.locks'
])
def get_default_config_help(self):
config_help = super(VarnishCollector, self).get_default_config_help()
config_help.update({
'bin': 'The path to the varnishstat binary',
'use_sudo': 'Use sudo?',
'sudo_cmd': 'Path to sudo',
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(VarnishCollector, self).get_default_config()
config.update({
'path': 'varnish',
'bin': '/usr/bin/varnishstat',
'use_sudo': False,
'sudo_cmd': '/usr/bin/sudo',
})
return config
def collect(self):
data = {}
output = self.poll()
matches = self._RE.findall(output)
# No matches at all, bail out
if not matches:
return
# Check first line to see if it begins with MAIN.,
# If so, this is varnish 4.0 stats
if matches[0][0].startswith('MAIN.'):
keys = self._KEYS_v4
else:
keys = self._KEYS_v3
for line in matches:
if line[0] in keys:
data[line[0]] = line[1]
for key in data:
self.publish(key, int(data[key]))
def poll(self):
try:
command = [self.config['bin'], '-1']
if str_to_bool(self.config['use_sudo']):
command.insert(0, self.config['sudo_cmd'])
output = subprocess.Popen(command,
stdout=subprocess.PIPE).communicate()[0]
except OSError:
output = ""
return output
|
import json
import logging
import requests
import voluptuous as vol
from homeassistant.components.notify import (
ATTR_DATA,
PLATFORM_SCHEMA,
BaseNotificationService,
)
from homeassistant.const import CONF_RESOURCE, CONF_VERIFY_SSL, HTTP_CREATED, HTTP_OK
import homeassistant.helpers.config_validation as cv
ATTR_FILE_URL = "file_url"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_RESOURCE): cv.url,
vol.Optional(CONF_VERIFY_SSL, default=True): cv.boolean,
}
)
_LOGGER = logging.getLogger(__name__)
def get_service(hass, config, discovery_info=None):
"""Get the Synology Chat notification service."""
resource = config.get(CONF_RESOURCE)
verify_ssl = config.get(CONF_VERIFY_SSL)
return SynologyChatNotificationService(resource, verify_ssl)
class SynologyChatNotificationService(BaseNotificationService):
"""Implementation of a notification service for Synology Chat."""
def __init__(self, resource, verify_ssl):
"""Initialize the service."""
self._resource = resource
self._verify_ssl = verify_ssl
def send_message(self, message="", **kwargs):
"""Send a message to a user."""
data = {"text": message}
extended_data = kwargs.get(ATTR_DATA)
file_url = extended_data.get(ATTR_FILE_URL) if extended_data else None
if file_url:
data["file_url"] = file_url
to_send = "payload={}".format(json.dumps(data))
response = requests.post(
self._resource, data=to_send, timeout=10, verify=self._verify_ssl
)
if response.status_code not in (HTTP_OK, HTTP_CREATED):
_LOGGER.exception(
"Error sending message. Response %d: %s:",
response.status_code,
response.reason,
)
|
from django.urls import register_converter
from django.urls.converters import StringConverter
class WeblateSlugConverter(StringConverter):
regex = "[^/]+"
class GitPathConverter(StringConverter):
regex = "(info/|git-upload-pack)[a-z0-9_/-]*"
class WordConverter(StringConverter):
regex = "[^/-]+"
class WidgetExtensionConverter(StringConverter):
regex = "(png|svg)"
class OptionalPathConverter(StringConverter):
regex = "(info/|git-upload-pack)[a-z0-9_/-]*|"
register_converter(WeblateSlugConverter, "name")
register_converter(GitPathConverter, "gitpath")
register_converter(WordConverter, "word")
register_converter(WidgetExtensionConverter, "extension")
register_converter(OptionalPathConverter, "optionalpath")
|
from homeassistant.components.axis.const import DOMAIN as AXIS_DOMAIN
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_MOTION,
DOMAIN as BINARY_SENSOR_DOMAIN,
)
from homeassistant.const import STATE_OFF, STATE_ON
from homeassistant.setup import async_setup_component
from .test_device import NAME, setup_axis_integration
EVENTS = [
{
"operation": "Initialized",
"topic": "tns1:Device/tnsaxis:Sensor/PIR",
"source": "sensor",
"source_idx": "0",
"type": "state",
"value": "0",
},
{
"operation": "Initialized",
"topic": "tnsaxis:CameraApplicationPlatform/VMD/Camera1Profile1",
"type": "active",
"value": "1",
},
]
async def test_platform_manually_configured(hass):
"""Test that nothing happens when platform is manually configured."""
assert (
await async_setup_component(
hass,
BINARY_SENSOR_DOMAIN,
{BINARY_SENSOR_DOMAIN: {"platform": AXIS_DOMAIN}},
)
is True
)
assert AXIS_DOMAIN not in hass.data
async def test_no_binary_sensors(hass):
"""Test that no sensors in Axis results in no sensor entities."""
await setup_axis_integration(hass)
assert not hass.states.async_entity_ids(BINARY_SENSOR_DOMAIN)
async def test_binary_sensors(hass):
"""Test that sensors are loaded properly."""
config_entry = await setup_axis_integration(hass)
device = hass.data[AXIS_DOMAIN][config_entry.unique_id]
for event in EVENTS:
device.api.event.process_event(event)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids(BINARY_SENSOR_DOMAIN)) == 2
pir = hass.states.get(f"{BINARY_SENSOR_DOMAIN}.{NAME}_pir_0")
assert pir.state == STATE_OFF
assert pir.name == f"{NAME} PIR 0"
assert pir.attributes["device_class"] == DEVICE_CLASS_MOTION
vmd4 = hass.states.get(f"{BINARY_SENSOR_DOMAIN}.{NAME}_vmd4_profile_1")
assert vmd4.state == STATE_ON
assert vmd4.name == f"{NAME} VMD4 Profile 1"
assert vmd4.attributes["device_class"] == DEVICE_CLASS_MOTION
|
import pytest
import os
import sys
import time
import psutil
from plumbum import local, NOHUP
try:
from plumbum.cmd import bash, echo
except ImportError:
bash = None
echo = None
from plumbum.path.utils import delete
from plumbum._testtools import skip_on_windows
@skip_on_windows
class TestNohupLocal:
def read_file(self, filename):
assert filename in os.listdir('.')
with open(filename) as f:
return f.read()
@pytest.mark.usefixtures("testdir")
def test_slow(self):
delete('nohup.out')
sp = bash['slow_process.bash']
sp & NOHUP
time.sleep(.5)
assert self.read_file('slow_process.out') == 'Starting test\n1\n'
assert self.read_file('nohup.out') == '1\n'
time.sleep(1)
assert self.read_file('slow_process.out') == 'Starting test\n1\n2\n'
assert self.read_file('nohup.out') == '1\n2\n'
time.sleep(2)
delete('nohup.out', 'slow_process.out')
def test_append(self):
delete('nohup.out')
output = echo['This is output']
output & NOHUP
time.sleep(.2)
assert self.read_file('nohup.out') == 'This is output\n'
output & NOHUP
time.sleep(.2)
assert self.read_file('nohup.out') == 'This is output\n'*2
delete('nohup.out')
def test_redir(self):
delete('nohup_new.out')
output = echo['This is output']
output & NOHUP(stdout = 'nohup_new.out')
time.sleep(.2)
assert self.read_file('nohup_new.out') == 'This is output\n'
delete('nohup_new.out')
(output > 'nohup_new.out') & NOHUP
time.sleep(.2)
assert self.read_file('nohup_new.out') == 'This is output\n'
delete('nohup_new.out')
output & NOHUP
time.sleep(.2)
assert self.read_file('nohup.out') == 'This is output\n'
delete('nohup.out')
def test_closed_filehandles(self):
proc = psutil.Process()
file_handles_prior = (proc.num_fds())
sleep_proc = (local['sleep']['1'] & NOHUP)
sleep_proc.wait()
file_handles_after = proc.num_fds()
assert file_handles_prior == file_handles_after
|
import os
from django.contrib.sites.models import Site
from django.db import models
from django.db.models import Q
from django.template.defaultfilters import slugify
from django.urls import reverse
from django.utils import timezone
from django.utils.html import strip_tags
from django.utils.text import Truncator
from django.utils.translation import gettext_lazy as _
import django_comments as comments
from django_comments.models import CommentFlag
from tagging.fields import TagField
from tagging.utils import parse_tag_input
from zinnia.flags import PINGBACK
from zinnia.flags import TRACKBACK
from zinnia.managers import DRAFT, HIDDEN, PUBLISHED
from zinnia.managers import EntryPublishedManager
from zinnia.managers import entries_published
from zinnia.markups import html_format
from zinnia.preview import HTMLPreview
from zinnia.settings import AUTO_CLOSE_COMMENTS_AFTER
from zinnia.settings import AUTO_CLOSE_PINGBACKS_AFTER
from zinnia.settings import AUTO_CLOSE_TRACKBACKS_AFTER
from zinnia.settings import ENTRY_CONTENT_TEMPLATES
from zinnia.settings import ENTRY_DETAIL_TEMPLATES
from zinnia.settings import UPLOAD_TO
from zinnia.url_shortener import get_url_shortener
class CoreEntry(models.Model):
"""
Abstract core entry model class providing
the fields and methods required for publishing
content over time.
"""
STATUS_CHOICES = ((DRAFT, _('draft')),
(HIDDEN, _('hidden')),
(PUBLISHED, _('published')))
title = models.CharField(
_('title'), max_length=255)
slug = models.SlugField(
_('slug'), max_length=255,
unique_for_date='publication_date',
help_text=_("Used to build the entry's URL."))
status = models.IntegerField(
_('status'), db_index=True,
choices=STATUS_CHOICES, default=DRAFT)
publication_date = models.DateTimeField(
_('publication date'),
db_index=True, default=timezone.now,
help_text=_("Used to build the entry's URL."))
start_publication = models.DateTimeField(
_('start publication'),
db_index=True, blank=True, null=True,
help_text=_('Start date of publication.'))
end_publication = models.DateTimeField(
_('end publication'),
db_index=True, blank=True, null=True,
help_text=_('End date of publication.'))
sites = models.ManyToManyField(
Site,
related_name='entries',
verbose_name=_('sites'),
help_text=_('Sites where the entry will be published.'))
creation_date = models.DateTimeField(
_('creation date'),
default=timezone.now)
last_update = models.DateTimeField(
_('last update'), default=timezone.now)
objects = models.Manager()
published = EntryPublishedManager()
@property
def is_actual(self):
"""
Checks if an entry is within his publication period.
"""
now = timezone.now()
if self.start_publication and now < self.start_publication:
return False
if self.end_publication and now >= self.end_publication:
return False
return True
@property
def is_visible(self):
"""
Checks if an entry is visible and published.
"""
return self.is_actual and self.status == PUBLISHED
@property
def previous_entry(self):
"""
Returns the previous published entry if exists.
"""
return self.previous_next_entries[0]
@property
def next_entry(self):
"""
Returns the next published entry if exists.
"""
return self.previous_next_entries[1]
@property
def previous_next_entries(self):
"""
Returns and caches a tuple containing the next
and previous published entries.
Only available if the entry instance is published.
"""
previous_next = getattr(self, 'previous_next', None)
if previous_next is None:
if not self.is_visible:
previous_next = (None, None)
setattr(self, 'previous_next', previous_next)
return previous_next
entries = list(self.__class__.published.all())
index = entries.index(self)
try:
previous = entries[index + 1]
except IndexError:
previous = None
if index:
_next = entries[index - 1]
else:
_next = None
previous_next = (previous, _next)
setattr(self, 'previous_next', previous_next)
return previous_next
@property
def short_url(self):
"""
Returns the entry's short url.
"""
return get_url_shortener()(self)
def save(self, *args, **kwargs):
"""
Overrides the save method to update the
the last_update field.
"""
self.last_update = timezone.now()
super(CoreEntry, self).save(*args, **kwargs)
def get_absolute_url(self):
"""
Builds and returns the entry's URL based on
the slug and the creation date.
"""
publication_date = self.publication_date
if timezone.is_aware(publication_date):
publication_date = timezone.localtime(publication_date)
return reverse('zinnia:entry_detail', kwargs={
'year': publication_date.strftime('%Y'),
'month': publication_date.strftime('%m'),
'day': publication_date.strftime('%d'),
'slug': self.slug})
def __str__(self):
return '%s: %s' % (self.title, self.get_status_display())
class Meta:
"""
CoreEntry's meta informations.
"""
abstract = True
ordering = ['-publication_date']
get_latest_by = 'publication_date'
verbose_name = _('entry')
verbose_name_plural = _('entries')
index_together = [['slug', 'publication_date'],
['status', 'publication_date',
'start_publication', 'end_publication']]
permissions = (('can_view_all', 'Can view all entries'),
('can_change_status', 'Can change status'),
('can_change_author', 'Can change author(s)'), )
class ContentEntry(models.Model):
"""
Abstract content model class providing field
and methods to write content inside an entry.
"""
content = models.TextField(_('content'), blank=True)
@property
def html_content(self):
"""
Returns the "content" field formatted in HTML.
"""
return html_format(self.content)
@property
def html_preview(self):
"""
Returns a preview of the "content" field or
the "lead" field if defined, formatted in HTML.
"""
return HTMLPreview(self.html_content,
getattr(self, 'html_lead', ''))
@property
def word_count(self):
"""
Counts the number of words used in the content.
"""
return len(strip_tags(self.html_content).split())
class Meta:
abstract = True
class DiscussionsEntry(models.Model):
"""
Abstract discussion model class providing
the fields and methods to manage the discussions
(comments, pingbacks, trackbacks).
"""
comment_enabled = models.BooleanField(
_('comments enabled'), default=True,
help_text=_('Allows comments if checked.'))
pingback_enabled = models.BooleanField(
_('pingbacks enabled'), default=True,
help_text=_('Allows pingbacks if checked.'))
trackback_enabled = models.BooleanField(
_('trackbacks enabled'), default=True,
help_text=_('Allows trackbacks if checked.'))
comment_count = models.IntegerField(
_('comment count'), default=0)
pingback_count = models.IntegerField(
_('pingback count'), default=0)
trackback_count = models.IntegerField(
_('trackback count'), default=0)
@property
def discussions(self):
"""
Returns a queryset of the published discussions.
"""
return comments.get_model().objects.for_model(
self).filter(is_public=True, is_removed=False)
@property
def comments(self):
"""
Returns a queryset of the published comments.
"""
return self.discussions.filter(Q(flags=None) | Q(
flags__flag=CommentFlag.MODERATOR_APPROVAL))
@property
def pingbacks(self):
"""
Returns a queryset of the published pingbacks.
"""
return self.discussions.filter(flags__flag=PINGBACK)
@property
def trackbacks(self):
"""
Return a queryset of the published trackbacks.
"""
return self.discussions.filter(flags__flag=TRACKBACK)
def discussion_is_still_open(self, discussion_type, auto_close_after):
"""
Checks if a type of discussion is still open
are a certain number of days.
"""
discussion_enabled = getattr(self, discussion_type)
if (discussion_enabled and isinstance(auto_close_after, int) and
auto_close_after >= 0):
return (timezone.now() - (
self.start_publication or self.publication_date)).days < \
auto_close_after
return discussion_enabled
@property
def comments_are_open(self):
"""
Checks if the comments are open with the
AUTO_CLOSE_COMMENTS_AFTER setting.
"""
return self.discussion_is_still_open(
'comment_enabled', AUTO_CLOSE_COMMENTS_AFTER)
@property
def pingbacks_are_open(self):
"""
Checks if the pingbacks are open with the
AUTO_CLOSE_PINGBACKS_AFTER setting.
"""
return self.discussion_is_still_open(
'pingback_enabled', AUTO_CLOSE_PINGBACKS_AFTER)
@property
def trackbacks_are_open(self):
"""
Checks if the trackbacks are open with the
AUTO_CLOSE_TRACKBACKS_AFTER setting.
"""
return self.discussion_is_still_open(
'trackback_enabled', AUTO_CLOSE_TRACKBACKS_AFTER)
class Meta:
abstract = True
class RelatedEntry(models.Model):
"""
Abstract model class for making manual relations
between the differents entries.
"""
related = models.ManyToManyField(
'self',
blank=True,
verbose_name=_('related entries'))
@property
def related_published(self):
"""
Returns only related entries published.
"""
return entries_published(self.related)
class Meta:
abstract = True
class LeadEntry(models.Model):
"""
Abstract model class providing a lead content to the entries.
"""
lead = models.TextField(
_('lead'), blank=True,
help_text=_('Lead paragraph'))
@property
def html_lead(self):
"""
Returns the "lead" field formatted in HTML.
"""
return html_format(self.lead)
class Meta:
abstract = True
class ExcerptEntry(models.Model):
"""
Abstract model class to add an excerpt to the entries.
"""
excerpt = models.TextField(
_('excerpt'), blank=True,
help_text=_('Used for SEO purposes.'))
def save(self, *args, **kwargs):
"""
Overrides the save method to create an excerpt
from the content field if void.
"""
if not self.excerpt and self.status == PUBLISHED:
self.excerpt = Truncator(strip_tags(
getattr(self, 'content', ''))).words(50)
super(ExcerptEntry, self).save(*args, **kwargs)
class Meta:
abstract = True
def image_upload_to_dispatcher(entry, filename):
"""
Dispatch function to allow overriding of ``image_upload_to`` method.
Outside the model for fixing an issue with Django's migrations on Python 2.
"""
return entry.image_upload_to(filename)
class ImageEntry(models.Model):
"""
Abstract model class to add an image for illustrating the entries.
"""
def image_upload_to(self, filename):
"""
Compute the upload path for the image field.
"""
now = timezone.now()
filename, extension = os.path.splitext(filename)
return os.path.join(
UPLOAD_TO,
now.strftime('%Y'),
now.strftime('%m'),
now.strftime('%d'),
'%s%s' % (slugify(filename), extension))
image = models.ImageField(
_('image'), blank=True,
upload_to=image_upload_to_dispatcher,
help_text=_('Used for illustration.'))
image_caption = models.TextField(
_('caption'), blank=True,
help_text=_("Image's caption."))
class Meta:
abstract = True
class FeaturedEntry(models.Model):
"""
Abstract model class to mark entries as featured.
"""
featured = models.BooleanField(
_('featured'), default=False)
class Meta:
abstract = True
class AuthorsEntry(models.Model):
"""
Abstract model class to add relationship
between the entries and their authors.
"""
authors = models.ManyToManyField(
'zinnia.Author',
blank=True,
related_name='entries',
verbose_name=_('authors'))
class Meta:
abstract = True
class CategoriesEntry(models.Model):
"""
Abstract model class to categorize the entries.
"""
categories = models.ManyToManyField(
'zinnia.Category',
blank=True,
related_name='entries',
verbose_name=_('categories'))
class Meta:
abstract = True
class TagsEntry(models.Model):
"""
Abstract model class to add tags to the entries.
"""
tags = TagField(_('tags'))
@property
def tags_list(self):
"""
Return iterable list of tags.
"""
return parse_tag_input(self.tags)
class Meta:
abstract = True
class LoginRequiredEntry(models.Model):
"""
Abstract model class to restrcit the display
of the entry on authenticated users.
"""
login_required = models.BooleanField(
_('login required'), default=False,
help_text=_('Only authenticated users can view the entry.'))
class Meta:
abstract = True
class PasswordRequiredEntry(models.Model):
"""
Abstract model class to restrict the display
of the entry to users knowing the password.
"""
password = models.CharField(
_('password'), max_length=50, blank=True,
help_text=_('Protects the entry with a password.'))
class Meta:
abstract = True
class ContentTemplateEntry(models.Model):
"""
Abstract model class to display entry's content
with a custom template.
"""
content_template = models.CharField(
_('content template'), max_length=250,
default='zinnia/_entry_detail.html',
choices=[('zinnia/_entry_detail.html', _('Default template'))] +
ENTRY_CONTENT_TEMPLATES,
help_text=_("Template used to display the entry's content."))
class Meta:
abstract = True
class DetailTemplateEntry(models.Model):
"""
Abstract model class to display entries with a
custom template if needed on the detail page.
"""
detail_template = models.CharField(
_('detail template'), max_length=250,
default='entry_detail.html',
choices=[('entry_detail.html', _('Default template'))] +
ENTRY_DETAIL_TEMPLATES,
help_text=_("Template used to display the entry's detail page."))
class Meta:
abstract = True
class AbstractEntry(
CoreEntry,
ContentEntry,
DiscussionsEntry,
RelatedEntry,
LeadEntry,
ExcerptEntry,
ImageEntry,
FeaturedEntry,
AuthorsEntry,
CategoriesEntry,
TagsEntry,
LoginRequiredEntry,
PasswordRequiredEntry,
ContentTemplateEntry,
DetailTemplateEntry):
"""
Final abstract entry model class assembling
all the abstract entry model classes into a single one.
In this manner we can override some fields without
reimplemting all the AbstractEntry.
"""
class Meta(CoreEntry.Meta):
abstract = True
|
import inspect
import os
import unittest
class TestDynamicLoading(unittest.TestCase):
"""
Test case for dynamic loading of python class
This is used to test we can successfully import:
- STT engine
- TTS engine
- Trigger engine
- All core neurons
"""
def setUp(self):
# get current script directory path. We are in /an/unknown/path/kalliope/core/tests
cur_script_directory = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
# get parent dir. Now we are in /an/unknown/path/kalliope
root_dir = os.path.normpath(cur_script_directory + os.sep + os.pardir)
# get the neuron dir
self.neurons_dir = os.path.normpath(root_dir + os.sep + "kalliope/neurons")
# get stt dir
self.stt_dir = os.path.normpath(root_dir + os.sep + "kalliope/stt")
# get tts dir
self.tts_dir = os.path.normpath(root_dir + os.sep + "kalliope/tts")
# get trigger dir
self.trigger_dir = os.path.normpath(root_dir + os.sep + "kalliope/trigger")
def test_packages_present(self):
"""
Check that the neurons folder exist in the root of the project
"""
self.assertTrue(os.path.isdir(self.neurons_dir))
self.assertTrue(os.path.isdir(self.stt_dir))
self.assertTrue(os.path.isdir(self.tts_dir))
self.assertTrue(os.path.isdir(self.trigger_dir))
def test_can_import_neurons(self):
"""
Try to import each neurons that are present in the neurons package
:return:
"""
neurons = self.get_package_in_folder(self.neurons_dir)
package_name = "neurons"
for neuron_name in neurons:
module_name = neuron_name.capitalize()
self.dynamic_import(package_name, module_name)
def test_can_import_stt(self):
"""
Try to import each stt that are present in the stt package
:return:
"""
stts = self.get_package_in_folder(self.stt_dir)
package_name = "stt"
for stt_name in stts:
module_name = stt_name.capitalize()
self.dynamic_import(package_name, module_name)
def test_can_import_tts(self):
"""
Try to import each tts that are present in the tts package
:return:
"""
ttss = self.get_package_in_folder(self.tts_dir)
package_name = "tts"
for tts_name in ttss:
module_name = tts_name.capitalize()
self.dynamic_import(package_name, module_name)
def test_can_import_trigger(self):
"""
Try to import each trigger that are present in the trigger package
:return:
"""
triggers = self.get_package_in_folder(self.trigger_dir)
package_name = "trigger"
for trigger in triggers:
module_name = trigger.capitalize()
self.dynamic_import(package_name, module_name)
@staticmethod
def get_package_in_folder(folder):
"""
receive a path in <folder>, return a list of package in that folder.
The function test if elements in that path are directory and return a list of those directory
:param folder: Path of a folder to return package
:return: list of package name
"""
# get the list of neurons in the neurons packages
el_folder = os.listdir(folder)
# we keep only package. Because we have _init_.py or other stuff in what listdir returned
packages_in_folder = list()
for el in el_folder:
if os.path.isdir(folder + os.sep + el) and not '__pycache__' in el:
packages_in_folder.append(el)
return packages_in_folder
def dynamic_import(self, package_name, module_name):
"""
Dynamic import of a module by its name.
package name can be:
- triggers
- neurons
- stt
- tts
:param package_name: name of the mother package
:param module_name: module name to load
:return:
"""
module_name_with_path = "kalliope." + package_name + "." + module_name.lower() + "." + module_name.lower()
mod = __import__(module_name_with_path, fromlist=[module_name])
try:
getattr(mod, module_name)
except AttributeError:
self.fail("The module %s does not exist in package %s" % (module_name, package_name))
if __name__ == '__main__':
unittest.main()
|
import logging
from buienradar.constants import (
CONDCODE,
CONDITION,
DATETIME,
MAX_TEMP,
MIN_TEMP,
RAIN,
WINDAZIMUTH,
WINDSPEED,
)
import voluptuous as vol
from homeassistant.components.weather import (
ATTR_FORECAST_CONDITION,
ATTR_FORECAST_PRECIPITATION,
ATTR_FORECAST_TEMP,
ATTR_FORECAST_TEMP_LOW,
ATTR_FORECAST_TIME,
ATTR_FORECAST_WIND_BEARING,
ATTR_FORECAST_WIND_SPEED,
PLATFORM_SCHEMA,
WeatherEntity,
)
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME, TEMP_CELSIUS
from homeassistant.helpers import config_validation as cv
# Reuse data and API logic from the sensor implementation
from .const import DEFAULT_TIMEFRAME
from .util import BrData
_LOGGER = logging.getLogger(__name__)
DATA_CONDITION = "buienradar_condition"
CONF_FORECAST = "forecast"
CONDITION_CLASSES = {
"cloudy": ["c", "p"],
"fog": ["d", "n"],
"hail": [],
"lightning": ["g"],
"lightning-rainy": ["s"],
"partlycloudy": ["b", "j", "o", "r"],
"pouring": ["l", "q"],
"rainy": ["f", "h", "k", "m"],
"snowy": ["u", "i", "v", "t"],
"snowy-rainy": ["w"],
"sunny": ["a"],
"windy": [],
"windy-variant": [],
"exceptional": [],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_LATITUDE): cv.latitude,
vol.Optional(CONF_LONGITUDE): cv.longitude,
vol.Optional(CONF_FORECAST, default=True): cv.boolean,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the buienradar platform."""
latitude = config.get(CONF_LATITUDE, hass.config.latitude)
longitude = config.get(CONF_LONGITUDE, hass.config.longitude)
if None in (latitude, longitude):
_LOGGER.error("Latitude or longitude not set in Home Assistant config")
return False
coordinates = {CONF_LATITUDE: float(latitude), CONF_LONGITUDE: float(longitude)}
# create weather data:
data = BrData(hass, coordinates, DEFAULT_TIMEFRAME, None)
# create weather device:
_LOGGER.debug("Initializing buienradar weather: coordinates %s", coordinates)
# create condition helper
if DATA_CONDITION not in hass.data:
cond_keys = [str(chr(x)) for x in range(97, 123)]
hass.data[DATA_CONDITION] = dict.fromkeys(cond_keys)
for cond, condlst in CONDITION_CLASSES.items():
for condi in condlst:
hass.data[DATA_CONDITION][condi] = cond
async_add_entities([BrWeather(data, config, coordinates)])
# schedule the first update in 1 minute from now:
await data.schedule_update(1)
class BrWeather(WeatherEntity):
"""Representation of a weather condition."""
def __init__(self, data, config, coordinates):
"""Initialise the platform with a data instance and station name."""
self._stationname = config.get(CONF_NAME)
self._forecast = config[CONF_FORECAST]
self._data = data
self._unique_id = "{:2.6f}{:2.6f}".format(
coordinates[CONF_LATITUDE], coordinates[CONF_LONGITUDE]
)
@property
def attribution(self):
"""Return the attribution."""
return self._data.attribution
@property
def name(self):
"""Return the name of the sensor."""
return (
self._stationname or f"BR {self._data.stationname or '(unknown station)'}"
)
@property
def condition(self):
"""Return the current condition."""
if self._data and self._data.condition:
ccode = self._data.condition.get(CONDCODE)
if ccode:
conditions = self.hass.data.get(DATA_CONDITION)
if conditions:
return conditions.get(ccode)
@property
def temperature(self):
"""Return the current temperature."""
return self._data.temperature
@property
def pressure(self):
"""Return the current pressure."""
return self._data.pressure
@property
def humidity(self):
"""Return the name of the sensor."""
return self._data.humidity
@property
def visibility(self):
"""Return the current visibility in km."""
if self._data.visibility is None:
return None
return round(self._data.visibility / 1000, 1)
@property
def wind_speed(self):
"""Return the current windspeed in km/h."""
if self._data.wind_speed is None:
return None
return round(self._data.wind_speed * 3.6, 1)
@property
def wind_bearing(self):
"""Return the current wind bearing (degrees)."""
return self._data.wind_bearing
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return TEMP_CELSIUS
@property
def forecast(self):
"""Return the forecast array."""
if not self._forecast:
return None
fcdata_out = []
cond = self.hass.data[DATA_CONDITION]
if not self._data.forecast:
return None
for data_in in self._data.forecast:
# remap keys from external library to
# keys understood by the weather component:
condcode = data_in.get(CONDITION, []).get(CONDCODE)
data_out = {
ATTR_FORECAST_TIME: data_in.get(DATETIME),
ATTR_FORECAST_CONDITION: cond[condcode],
ATTR_FORECAST_TEMP_LOW: data_in.get(MIN_TEMP),
ATTR_FORECAST_TEMP: data_in.get(MAX_TEMP),
ATTR_FORECAST_PRECIPITATION: data_in.get(RAIN),
ATTR_FORECAST_WIND_BEARING: data_in.get(WINDAZIMUTH),
ATTR_FORECAST_WIND_SPEED: round(data_in.get(WINDSPEED) * 3.6, 1),
}
fcdata_out.append(data_out)
return fcdata_out
@property
def unique_id(self):
"""Return the unique id."""
return self._unique_id
|
import voluptuous as vol
from homeassistant.components.notify import PLATFORM_SCHEMA, BaseNotificationService
import homeassistant.helpers.config_validation as cv
from .const import CONF_INDEX, DOMAIN
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Optional(CONF_INDEX, default=0): cv.positive_int}
)
def get_service(hass, config, discovery_info=None):
"""Get the Ecobee notification service."""
data = hass.data[DOMAIN]
index = config.get(CONF_INDEX)
return EcobeeNotificationService(data, index)
class EcobeeNotificationService(BaseNotificationService):
"""Implement the notification service for the Ecobee thermostat."""
def __init__(self, data, thermostat_index):
"""Initialize the service."""
self.data = data
self.thermostat_index = thermostat_index
def send_message(self, message="", **kwargs):
"""Send a message."""
self.data.ecobee.send_message(self.thermostat_index, message)
|
import socket
import warnings
from collections import defaultdict, deque
from contextlib import contextmanager
from copy import copy
from itertools import count
from threading import local
from time import time
from . import Exchange, Queue, Consumer, Producer
from .clocks import LamportClock
from .common import maybe_declare, oid_from
from .exceptions import InconsistencyError
from .log import get_logger
from .utils.functional import maybe_evaluate, reprcall
from .utils.objects import cached_property
from .utils.uuid import uuid
from .matcher import match
REPLY_QUEUE_EXPIRES = 10
W_PIDBOX_IN_USE = """\
A node named {node.hostname} is already using this process mailbox!
Maybe you forgot to shutdown the other node or did not do so properly?
Or if you meant to start multiple nodes on the same host please make sure
you give each node a unique node name!
"""
__all__ = ('Node', 'Mailbox')
logger = get_logger(__name__)
debug, error = logger.debug, logger.error
class Node:
"""Mailbox node."""
#: hostname of the node.
hostname = None
#: the :class:`Mailbox` this is a node for.
mailbox = None
#: map of method name/handlers.
handlers = None
#: current context (passed on to handlers)
state = None
#: current channel.
channel = None
def __init__(self, hostname, state=None, channel=None,
handlers=None, mailbox=None):
self.channel = channel
self.mailbox = mailbox
self.hostname = hostname
self.state = state
self.adjust_clock = self.mailbox.clock.adjust
if handlers is None:
handlers = {}
self.handlers = handlers
def Consumer(self, channel=None, no_ack=True, accept=None, **options):
queue = self.mailbox.get_queue(self.hostname)
def verify_exclusive(name, messages, consumers):
if consumers:
warnings.warn(W_PIDBOX_IN_USE.format(node=self))
queue.on_declared = verify_exclusive
return Consumer(
channel or self.channel, [queue], no_ack=no_ack,
accept=self.mailbox.accept if accept is None else accept,
**options
)
def handler(self, fun):
self.handlers[fun.__name__] = fun
return fun
def on_decode_error(self, message, exc):
error('Cannot decode message: %r', exc, exc_info=1)
def listen(self, channel=None, callback=None):
consumer = self.Consumer(channel=channel,
callbacks=[callback or self.handle_message],
on_decode_error=self.on_decode_error)
consumer.consume()
return consumer
def dispatch(self, method, arguments=None,
reply_to=None, ticket=None, **kwargs):
arguments = arguments or {}
debug('pidbox received method %s [reply_to:%s ticket:%s]',
reprcall(method, (), kwargs=arguments), reply_to, ticket)
handle = reply_to and self.handle_call or self.handle_cast
try:
reply = handle(method, arguments)
except SystemExit:
raise
except Exception as exc:
error('pidbox command error: %r', exc, exc_info=1)
reply = {'error': repr(exc)}
if reply_to:
self.reply({self.hostname: reply},
exchange=reply_to['exchange'],
routing_key=reply_to['routing_key'],
ticket=ticket)
return reply
def handle(self, method, arguments=None):
arguments = {} if not arguments else arguments
return self.handlers[method](self.state, **arguments)
def handle_call(self, method, arguments):
return self.handle(method, arguments)
def handle_cast(self, method, arguments):
return self.handle(method, arguments)
def handle_message(self, body, message=None):
destination = body.get('destination')
pattern = body.get('pattern')
matcher = body.get('matcher')
if message:
self.adjust_clock(message.headers.get('clock') or 0)
hostname = self.hostname
run_dispatch = False
if destination:
if hostname in destination:
run_dispatch = True
elif pattern and matcher:
if match(hostname, pattern, matcher):
run_dispatch = True
else:
run_dispatch = True
if run_dispatch:
return self.dispatch(**body)
dispatch_from_message = handle_message
def reply(self, data, exchange, routing_key, ticket, **kwargs):
self.mailbox._publish_reply(data, exchange, routing_key, ticket,
channel=self.channel,
serializer=self.mailbox.serializer)
class Mailbox:
"""Process Mailbox."""
node_cls = Node
exchange_fmt = '%s.pidbox'
reply_exchange_fmt = 'reply.%s.pidbox'
#: Name of application.
namespace = None
#: Connection (if bound).
connection = None
#: Exchange type (usually direct, or fanout for broadcast).
type = 'direct'
#: mailbox exchange (init by constructor).
exchange = None
#: exchange to send replies to.
reply_exchange = None
#: Only accepts json messages by default.
accept = ['json']
#: Message serializer
serializer = None
def __init__(self, namespace,
type='direct', connection=None, clock=None,
accept=None, serializer=None, producer_pool=None,
queue_ttl=None, queue_expires=None,
reply_queue_ttl=None, reply_queue_expires=10.0):
self.namespace = namespace
self.connection = connection
self.type = type
self.clock = LamportClock() if clock is None else clock
self.exchange = self._get_exchange(self.namespace, self.type)
self.reply_exchange = self._get_reply_exchange(self.namespace)
self._tls = local()
self.unclaimed = defaultdict(deque)
self.accept = self.accept if accept is None else accept
self.serializer = self.serializer if serializer is None else serializer
self.queue_ttl = queue_ttl
self.queue_expires = queue_expires
self.reply_queue_ttl = reply_queue_ttl
self.reply_queue_expires = reply_queue_expires
self._producer_pool = producer_pool
def __call__(self, connection):
bound = copy(self)
bound.connection = connection
return bound
def Node(self, hostname=None, state=None, channel=None, handlers=None):
hostname = hostname or socket.gethostname()
return self.node_cls(hostname, state, channel, handlers, mailbox=self)
def call(self, destination, command, kwargs=None,
timeout=None, callback=None, channel=None):
kwargs = {} if not kwargs else kwargs
return self._broadcast(command, kwargs, destination,
reply=True, timeout=timeout,
callback=callback,
channel=channel)
def cast(self, destination, command, kwargs=None):
kwargs = {} if not kwargs else kwargs
return self._broadcast(command, kwargs, destination, reply=False)
def abcast(self, command, kwargs=None):
kwargs = {} if not kwargs else kwargs
return self._broadcast(command, kwargs, reply=False)
def multi_call(self, command, kwargs=None, timeout=1,
limit=None, callback=None, channel=None):
kwargs = {} if not kwargs else kwargs
return self._broadcast(command, kwargs, reply=True,
timeout=timeout, limit=limit,
callback=callback,
channel=channel)
def get_reply_queue(self):
oid = self.oid
return Queue(
f'{oid}.{self.reply_exchange.name}',
exchange=self.reply_exchange,
routing_key=oid,
durable=False,
auto_delete=True,
expires=self.reply_queue_expires,
message_ttl=self.reply_queue_ttl,
)
@cached_property
def reply_queue(self):
return self.get_reply_queue()
def get_queue(self, hostname):
return Queue(
f'{hostname}.{self.namespace}.pidbox',
exchange=self.exchange,
durable=False,
auto_delete=True,
expires=self.queue_expires,
message_ttl=self.queue_ttl,
)
@contextmanager
def producer_or_acquire(self, producer=None, channel=None):
if producer:
yield producer
elif self.producer_pool:
with self.producer_pool.acquire() as producer:
yield producer
else:
yield Producer(channel, auto_declare=False)
def _publish_reply(self, reply, exchange, routing_key, ticket,
channel=None, producer=None, **opts):
chan = channel or self.connection.default_channel
exchange = Exchange(exchange, exchange_type='direct',
delivery_mode='transient',
durable=False)
with self.producer_or_acquire(producer, chan) as producer:
try:
producer.publish(
reply, exchange=exchange, routing_key=routing_key,
declare=[exchange], headers={
'ticket': ticket, 'clock': self.clock.forward(),
}, retry=True,
**opts
)
except InconsistencyError:
# queue probably deleted and no one is expecting a reply.
pass
def _publish(self, type, arguments, destination=None,
reply_ticket=None, channel=None, timeout=None,
serializer=None, producer=None, pattern=None, matcher=None):
message = {'method': type,
'arguments': arguments,
'destination': destination,
'pattern': pattern,
'matcher': matcher}
chan = channel or self.connection.default_channel
exchange = self.exchange
if reply_ticket:
maybe_declare(self.reply_queue(chan))
message.update(ticket=reply_ticket,
reply_to={'exchange': self.reply_exchange.name,
'routing_key': self.oid})
serializer = serializer or self.serializer
with self.producer_or_acquire(producer, chan) as producer:
producer.publish(
message, exchange=exchange.name, declare=[exchange],
headers={'clock': self.clock.forward(),
'expires': time() + timeout if timeout else 0},
serializer=serializer, retry=True,
)
def _broadcast(self, command, arguments=None, destination=None,
reply=False, timeout=1, limit=None,
callback=None, channel=None, serializer=None,
pattern=None, matcher=None):
if destination is not None and \
not isinstance(destination, (list, tuple)):
raise ValueError(
'destination must be a list/tuple not {}'.format(
type(destination)))
if (pattern is not None and not isinstance(pattern, str) and
matcher is not None and not isinstance(matcher, str)):
raise ValueError(
'pattern and matcher must be '
'strings not {}, {}'.format(type(pattern), type(matcher))
)
arguments = arguments or {}
reply_ticket = reply and uuid() or None
chan = channel or self.connection.default_channel
# Set reply limit to number of destinations (if specified)
if limit is None and destination:
limit = destination and len(destination) or None
serializer = serializer or self.serializer
self._publish(command, arguments, destination=destination,
reply_ticket=reply_ticket,
channel=chan,
timeout=timeout,
serializer=serializer,
pattern=pattern,
matcher=matcher)
if reply_ticket:
return self._collect(reply_ticket, limit=limit,
timeout=timeout,
callback=callback,
channel=chan)
def _collect(self, ticket,
limit=None, timeout=1, callback=None,
channel=None, accept=None):
if accept is None:
accept = self.accept
chan = channel or self.connection.default_channel
queue = self.reply_queue
consumer = Consumer(chan, [queue], accept=accept, no_ack=True)
responses = []
unclaimed = self.unclaimed
adjust_clock = self.clock.adjust
try:
return unclaimed.pop(ticket)
except KeyError:
pass
def on_message(body, message):
# ticket header added in kombu 2.5
header = message.headers.get
adjust_clock(header('clock') or 0)
expires = header('expires')
if expires and time() > expires:
return
this_id = header('ticket', ticket)
if this_id == ticket:
if callback:
callback(body)
responses.append(body)
else:
unclaimed[this_id].append(body)
consumer.register_callback(on_message)
try:
with consumer:
for i in limit and range(limit) or count():
try:
self.connection.drain_events(timeout=timeout)
except socket.timeout:
break
return responses
finally:
chan.after_reply_message_received(queue.name)
def _get_exchange(self, namespace, type):
return Exchange(self.exchange_fmt % namespace,
type=type,
durable=False,
delivery_mode='transient')
def _get_reply_exchange(self, namespace):
return Exchange(self.reply_exchange_fmt % namespace,
type='direct',
durable=False,
delivery_mode='transient')
@cached_property
def oid(self):
try:
return self._tls.OID
except AttributeError:
oid = self._tls.OID = oid_from(self)
return oid
@cached_property
def producer_pool(self):
return maybe_evaluate(self._producer_pool)
|
import unittest
import cv2
import dlib
class TestDLib(unittest.TestCase):
def test_dlib_face_detector(self):
detector = dlib.get_frontal_face_detector()
image = cv2.imread('/input/tests/data/face.jpg')
image_gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
faces = detector(image_gray, 1)
self.assertEqual(len(faces), 1)
|
import ctypes
import inspect
import sys
import threading
from typing import Any
def fix_threading_exception_logging() -> None:
"""Fix threads passing uncaught exceptions to our exception hook.
https://bugs.python.org/issue1230540
Fixed in Python 3.8.
"""
if sys.version_info[:2] >= (3, 8):
return
run_old = threading.Thread.run
def run(*args: Any, **kwargs: Any) -> None:
try:
run_old(*args, **kwargs)
except (KeyboardInterrupt, SystemExit): # pylint: disable=try-except-raise
raise
except Exception: # pylint: disable=broad-except
sys.excepthook(*sys.exc_info())
threading.Thread.run = run # type: ignore
def _async_raise(tid: int, exctype: Any) -> None:
"""Raise an exception in the threads with id tid."""
if not inspect.isclass(exctype):
raise TypeError("Only types can be raised (not instances)")
c_tid = ctypes.c_long(tid)
res = ctypes.pythonapi.PyThreadState_SetAsyncExc(c_tid, ctypes.py_object(exctype))
if res == 1:
return
# "if it returns a number greater than one, you're in trouble,
# and you should call it again with exc=NULL to revert the effect"
ctypes.pythonapi.PyThreadState_SetAsyncExc(c_tid, None)
raise SystemError("PyThreadState_SetAsyncExc failed")
class ThreadWithException(threading.Thread):
"""A thread class that supports raising exception in the thread from another thread.
Based on
https://stackoverflow.com/questions/323972/is-there-any-way-to-kill-a-thread/49877671
"""
def raise_exc(self, exctype: Any) -> None:
"""Raise the given exception type in the context of this thread."""
assert self.ident
_async_raise(self.ident, exctype)
|
import math
from gi.repository import Gdk, GtkSource, Pango
from meld.settings import get_meld_settings
from meld.style import get_common_theme
def get_background_rgba(renderer):
'''Get and cache the expected background for the renderer widget
Current versions of GTK+ don't paint the background of text view
gutters with the actual expected widget background, which causes
them to look wrong when put next to any other widgets. This hack
just gets the background from the renderer's view, and then caches
it for performance, and on the basis that all renderers will be
assigned to similarly-styled views. This is fragile, but the
alternative is really significantly slower.
'''
global _background_rgba
if _background_rgba is None:
if renderer.props.view:
stylecontext = renderer.props.view.get_style_context()
background_set, _background_rgba = (
stylecontext.lookup_color('theme_bg_color'))
return _background_rgba
_background_rgba = None
class MeldGutterRenderer:
def set_renderer_defaults(self):
self.set_alignment_mode(GtkSource.GutterRendererAlignmentMode.FIRST)
self.set_padding(3, 0)
self.set_alignment(0.5, 0.5)
def on_setting_changed(self, settings, key):
if key == 'style-scheme':
self.fill_colors, self.line_colors = get_common_theme()
alpha = self.fill_colors['current-chunk-highlight'].alpha
self.chunk_highlights = {
state: Gdk.RGBA(*[alpha + c * (1.0 - alpha) for c in colour])
for state, colour in self.fill_colors.items()
}
def draw_chunks(
self, context, background_area, cell_area, start, end, state):
chunk = self._chunk
if not chunk:
return
line = start.get_line()
is_first_line = line == chunk[1]
is_last_line = line == chunk[2] - 1
if not (is_first_line or is_last_line):
# Only paint for the first and last lines of a chunk
return
x = background_area.x - 1
y = background_area.y
width = background_area.width + 2
height = 1 if chunk[1] == chunk[2] else background_area.height
context.set_line_width(1.0)
Gdk.cairo_set_source_rgba(context, self.line_colors[chunk[0]])
if is_first_line:
context.move_to(x, y + 0.5)
context.rel_line_to(width, 0)
if is_last_line:
context.move_to(x, y - 0.5 + height)
context.rel_line_to(width, 0)
context.stroke()
def query_chunks(self, start, end, state):
line = start.get_line()
chunk_index = self.linediffer.locate_chunk(self.from_pane, line)[0]
in_chunk = chunk_index is not None
chunk = None
if in_chunk:
chunk = self.linediffer.get_chunk(
chunk_index, self.from_pane, self.to_pane)
if chunk is not None:
if chunk[1] == chunk[2]:
background_rgba = get_background_rgba(self)
elif self.props.view.current_chunk_check(chunk):
background_rgba = self.chunk_highlights[chunk[0]]
else:
background_rgba = self.fill_colors[chunk[0]]
else:
# TODO: Remove when fixed in upstream GTK+
background_rgba = get_background_rgba(self)
self._chunk = chunk
self.set_background(background_rgba)
return in_chunk
# GutterRendererChunkLines is an adaptation of GtkSourceGutterRendererLines
# Copyright (C) 2010 - Jesse van den Kieboom
#
# Python reimplementation is Copyright (C) 2015 Kai Willadsen
class GutterRendererChunkLines(
GtkSource.GutterRendererText, MeldGutterRenderer):
__gtype_name__ = "GutterRendererChunkLines"
def __init__(self, from_pane, to_pane, linediffer):
super().__init__()
self.set_renderer_defaults()
self.from_pane = from_pane
self.to_pane = to_pane
# FIXME: Don't pass in the linediffer; pass a generator like elsewhere
self.linediffer = linediffer
self.num_line_digits = 0
self.changed_handler_id = None
meld_settings = get_meld_settings()
meld_settings.connect('changed', self.on_setting_changed)
self.on_setting_changed(meld_settings, 'style-scheme')
def do_change_buffer(self, old_buffer):
if old_buffer:
old_buffer.disconnect(self.changed_handler_id)
view = self.get_view()
if view:
buf = view.get_buffer()
if buf:
self.changed_handler_id = buf.connect(
"changed", self.recalculate_size)
self.recalculate_size(buf)
def _measure_markup(self, markup):
layout = self.get_view().create_pango_layout()
layout.set_markup(markup)
w, h = layout.get_size()
return w / Pango.SCALE, h / Pango.SCALE
def recalculate_size(self, buf):
# Always calculate display size for at least two-digit line counts
num_lines = max(buf.get_line_count(), 99)
num_digits = int(math.ceil(math.log(num_lines, 10)))
if num_digits == self.num_line_digits:
return
self.num_line_digits = num_digits
markup = "<b>%d</b>" % num_lines
width, height = self._measure_markup(markup)
self.set_size(width)
def do_draw(self, context, background_area, cell_area, start, end, state):
GtkSource.GutterRendererText.do_draw(
self, context, background_area, cell_area, start, end, state)
self.draw_chunks(
context, background_area, cell_area, start, end, state)
def do_query_data(self, start, end, state):
self.query_chunks(start, end, state)
line = start.get_line() + 1
current_line = state & GtkSource.GutterRendererState.CURSOR
markup = "<b>%d</b>" % line if current_line else str(line)
self.set_markup(markup, -1)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import flags
from absl.testing import parameterized
from compare_gan import datasets
from compare_gan import test_utils
from compare_gan.gans import consts as c
from compare_gan.gans import loss_lib
from compare_gan.gans import penalty_lib
from compare_gan.gans.modular_gan import ModularGAN
import gin
import tensorflow as tf
FLAGS = flags.FLAGS
TEST_ARCHITECTURES = [c.RESNET5_ARCH, c.RESNET_BIGGAN_ARCH, c.RESNET_CIFAR_ARCH]
TEST_LOSSES = [loss_lib.non_saturating, loss_lib.wasserstein,
loss_lib.least_squares, loss_lib.hinge]
TEST_PENALTIES = [penalty_lib.no_penalty, penalty_lib.dragan_penalty,
penalty_lib.wgangp_penalty, penalty_lib.l2_penalty]
class ModularGANConditionalTest(parameterized.TestCase,
test_utils.CompareGanTestCase):
def _runSingleTrainingStep(self, architecture, loss_fn, penalty_fn,
labeled_dataset):
parameters = {
"architecture": architecture,
"lambda": 1,
"z_dim": 120,
}
with gin.unlock_config():
gin.bind_parameter("penalty.fn", penalty_fn)
gin.bind_parameter("loss.fn", loss_fn)
model_dir = self._get_empty_model_dir()
run_config = tf.contrib.tpu.RunConfig(
model_dir=model_dir,
tpu_config=tf.contrib.tpu.TPUConfig(iterations_per_loop=1))
dataset = datasets.get_dataset("cifar10")
gan = ModularGAN(
dataset=dataset,
parameters=parameters,
conditional=True,
model_dir=model_dir)
estimator = gan.as_estimator(run_config, batch_size=2, use_tpu=False)
estimator.train(gan.input_fn, steps=1)
@parameterized.parameters(TEST_ARCHITECTURES)
def testSingleTrainingStepArchitectures(self, architecture):
self._runSingleTrainingStep(architecture, loss_lib.hinge,
penalty_lib.no_penalty, True)
@parameterized.parameters(TEST_LOSSES)
def testSingleTrainingStepLosses(self, loss_fn):
self._runSingleTrainingStep(c.RESNET_CIFAR_ARCH, loss_fn,
penalty_lib.no_penalty, labeled_dataset=True)
@parameterized.parameters(TEST_PENALTIES)
def testSingleTrainingStepPenalties(self, penalty_fn):
self._runSingleTrainingStep(c.RESNET_CIFAR_ARCH, loss_lib.hinge, penalty_fn,
labeled_dataset=True)
def testUnlabledDatasetRaisesError(self):
parameters = {
"architecture": c.RESNET_CIFAR_ARCH,
"lambda": 1,
"z_dim": 120,
}
with gin.unlock_config():
gin.bind_parameter("loss.fn", loss_lib.hinge)
# Use dataset without labels.
dataset = datasets.get_dataset("celeb_a")
model_dir = self._get_empty_model_dir()
with self.assertRaises(ValueError):
gan = ModularGAN(
dataset=dataset,
parameters=parameters,
conditional=True,
model_dir=model_dir)
del gan
if __name__ == "__main__":
tf.test.main()
|
import asyncio
import logging
import time
from pyHS100 import SmartDeviceException, SmartPlug
from homeassistant.components.switch import (
ATTR_CURRENT_POWER_W,
ATTR_TODAY_ENERGY_KWH,
SwitchEntity,
)
from homeassistant.const import ATTR_VOLTAGE
from homeassistant.exceptions import PlatformNotReady
import homeassistant.helpers.device_registry as dr
from homeassistant.helpers.typing import HomeAssistantType
from . import CONF_SWITCH, DOMAIN as TPLINK_DOMAIN
from .common import add_available_devices
PARALLEL_UPDATES = 0
_LOGGER = logging.getLogger(__name__)
ATTR_TOTAL_ENERGY_KWH = "total_energy_kwh"
ATTR_CURRENT_A = "current_a"
MAX_ATTEMPTS = 300
SLEEP_TIME = 2
async def async_setup_entry(hass: HomeAssistantType, config_entry, async_add_entities):
"""Set up switches."""
entities = await hass.async_add_executor_job(
add_available_devices, hass, CONF_SWITCH, SmartPlugSwitch
)
if entities:
async_add_entities(entities, update_before_add=True)
if hass.data[TPLINK_DOMAIN][f"{CONF_SWITCH}_remaining"]:
raise PlatformNotReady
class SmartPlugSwitch(SwitchEntity):
"""Representation of a TPLink Smart Plug switch."""
def __init__(self, smartplug: SmartPlug):
"""Initialize the switch."""
self.smartplug = smartplug
self._sysinfo = None
self._state = None
self._is_available = False
# Set up emeter cache
self._emeter_params = {}
self._mac = None
self._alias = None
self._model = None
self._device_id = None
self._host = None
@property
def unique_id(self):
"""Return a unique ID."""
return self._device_id
@property
def name(self):
"""Return the name of the Smart Plug."""
return self._alias
@property
def device_info(self):
"""Return information about the device."""
return {
"name": self._alias,
"model": self._model,
"manufacturer": "TP-Link",
"connections": {(dr.CONNECTION_NETWORK_MAC, self._mac)},
"sw_version": self._sysinfo["sw_ver"],
}
@property
def available(self) -> bool:
"""Return if switch is available."""
return self._is_available
@property
def is_on(self):
"""Return true if switch is on."""
return self._state
def turn_on(self, **kwargs):
"""Turn the switch on."""
self.smartplug.turn_on()
def turn_off(self, **kwargs):
"""Turn the switch off."""
self.smartplug.turn_off()
@property
def device_state_attributes(self):
"""Return the state attributes of the device."""
return self._emeter_params
@property
def _plug_from_context(self):
"""Return the plug from the context."""
children = self.smartplug.sys_info["children"]
return next(c for c in children if c["id"] == self.smartplug.context)
def update_state(self):
"""Update the TP-Link switch's state."""
if self.smartplug.context is None:
self._state = self.smartplug.state == self.smartplug.SWITCH_STATE_ON
else:
self._state = self._plug_from_context["state"] == 1
def attempt_update(self, update_attempt):
"""Attempt to get details from the TP-Link switch."""
try:
if not self._sysinfo:
self._sysinfo = self.smartplug.sys_info
self._mac = self._sysinfo["mac"]
self._model = self._sysinfo["model"]
self._host = self.smartplug.host
if self.smartplug.context is None:
self._alias = self._sysinfo["alias"]
self._device_id = self._mac
else:
self._alias = self._plug_from_context["alias"]
self._device_id = self.smartplug.context
self.update_state()
if self.smartplug.has_emeter:
emeter_readings = self.smartplug.get_emeter_realtime()
self._emeter_params[ATTR_CURRENT_POWER_W] = "{:.2f}".format(
emeter_readings["power"]
)
self._emeter_params[ATTR_TOTAL_ENERGY_KWH] = "{:.3f}".format(
emeter_readings["total"]
)
self._emeter_params[ATTR_VOLTAGE] = "{:.1f}".format(
emeter_readings["voltage"]
)
self._emeter_params[ATTR_CURRENT_A] = "{:.2f}".format(
emeter_readings["current"]
)
emeter_statics = self.smartplug.get_emeter_daily()
try:
self._emeter_params[ATTR_TODAY_ENERGY_KWH] = "{:.3f}".format(
emeter_statics[int(time.strftime("%e"))]
)
except KeyError:
# Device returned no daily history
pass
return True
except (SmartDeviceException, OSError) as ex:
if update_attempt == 0:
_LOGGER.debug(
"Retrying in %s seconds for %s|%s due to: %s",
SLEEP_TIME,
self._host,
self._alias,
ex,
)
return False
async def async_update(self):
"""Update the TP-Link switch's state."""
for update_attempt in range(MAX_ATTEMPTS):
is_ready = await self.hass.async_add_executor_job(
self.attempt_update, update_attempt
)
if is_ready:
self._is_available = True
if update_attempt > 0:
_LOGGER.debug(
"Device %s|%s responded after %s attempts",
self._host,
self._alias,
update_attempt,
)
break
await asyncio.sleep(SLEEP_TIME)
else:
if self._is_available:
_LOGGER.warning(
"Could not read state for %s|%s", self.smartplug.host, self._alias
)
self._is_available = False
|
import os.path
from typing import Tuple
import cairo
import gi
from django.conf import settings
from django.template.loader import render_to_string
from django.urls import reverse
from django.utils.formats import number_format
from django.utils.html import escape
from django.utils.translation import get_language
from django.utils.translation import gettext as _
from django.utils.translation import gettext_lazy, npgettext, pgettext, pgettext_lazy
from weblate.fonts.utils import configure_fontconfig, render_size
from weblate.trans.util import sort_unicode
from weblate.utils.site import get_site_url
from weblate.utils.stats import GlobalStats
from weblate.utils.views import get_percent_color
gi.require_version("PangoCairo", "1.0")
gi.require_version("Pango", "1.0")
# pylint:disable=wrong-import-position,wrong-import-order
from gi.repository import Pango, PangoCairo # noqa:E402,I001 isort:skip
COLOR_DATA = {
"grey": (0, 0, 0),
"white": (0, 0, 0),
"black": (255, 255, 255),
"graph": (255, 255, 255),
}
WIDGETS = {}
def register_widget(widget):
"""Register widget in dictionary."""
WIDGETS[widget.name] = widget
return widget
class Widget:
"""Generic widget class."""
name = ""
verbose = ""
colors: Tuple[str, ...] = ()
extension = "png"
content_type = "image/png"
order = 100
show = True
def __init__(self, obj, color=None, lang=None):
"""Create Widget object."""
# Get object and related params
self.obj = obj
self.color = self.get_color_name(color)
self.lang = lang
def get_color_name(self, color):
"""Return color name based on allowed ones."""
if color not in self.colors:
return self.colors[0]
return color
class ContentWidget(Widget):
"""Generic content widget class."""
def __init__(self, obj, color=None, lang=None):
"""Create Widget object."""
super().__init__(obj, color, lang)
# Get translation status
if lang:
stats = obj.stats.get_single_language_stats(lang)
else:
stats = obj.stats
self.percent = stats.translated_percent
def get_percent_text(self):
return pgettext("Translated percents", "%(percent)s%%") % {
"percent": int(self.percent)
}
class BitmapWidget(ContentWidget):
"""Base class for bitmap rendering widgets."""
colors: Tuple[str, ...] = ("grey", "white", "black")
extension = "png"
content_type = "image/png"
order = 100
show = True
head_template = '<span letter_spacing="-500"><b>{}</b></span>'
foot_template = '<span letter_spacing="1000">{}</span>'
font_size = 10
line_spacing = 1.0
offset = 0
column_offset = 0
lines = True
def __init__(self, obj, color=None, lang=None):
"""Create Widget object."""
super().__init__(obj, color, lang)
# Get object and related params
self.total = obj.stats.source_strings
self.languages = obj.stats.languages
self.params = self.get_text_params()
# Set rendering variables
self.draw = None
self.width = 0
def get_text_params(self):
"""Create dictionary used for text formatting."""
return {
"name": self.obj.name,
"count": self.total,
"languages": self.languages,
"percent": self.percent,
}
def get_filename(self):
"""Return widgets filename."""
return os.path.join(
settings.STATIC_ROOT,
"widget-images",
"{widget}-{color}.png".format(**{"color": self.color, "widget": self.name}),
)
def get_columns(self):
raise NotImplementedError()
def get_column_width(self, surface, columns):
return surface.get_width() // len(columns)
def get_column_fonts(self):
return [
Pango.FontDescription("Source Sans Pro {}".format(self.font_size * 1.5)),
Pango.FontDescription(f"Source Sans Pro {self.font_size}"),
]
def render_additional(self, ctx):
return
def render(self, response):
"""Render widget."""
configure_fontconfig()
surface = cairo.ImageSurface.create_from_png(self.get_filename())
height = surface.get_height()
ctx = cairo.Context(surface)
columns = self.get_columns()
column_width = self.get_column_width(surface, columns)
fonts = self.get_column_fonts()
for i, column in enumerate(columns):
offset = self.offset
for row, text in enumerate(column):
layout = PangoCairo.create_layout(ctx)
layout.set_font_description(fonts[row])
# Set color and position
ctx.move_to(self.column_offset + column_width * i, offset)
ctx.set_source_rgb(*COLOR_DATA[self.color])
# Add text
layout.set_markup(text)
layout.set_alignment(Pango.Alignment.CENTER)
layout.set_width(column_width * Pango.SCALE)
offset += layout.get_pixel_size().height * self.line_spacing
# Render to cairo context
PangoCairo.show_layout(ctx, layout)
# Render column separators
if self.lines and i > 0:
ctx.new_path()
ctx.set_source_rgb(*COLOR_DATA[self.color])
ctx.set_line_width(0.5)
ctx.move_to(column_width * i, self.offset)
ctx.line_to(column_width * i, height - self.offset)
ctx.stroke()
self.render_additional(ctx)
surface.write_to_png(response)
class SVGWidget(ContentWidget):
"""Base class for SVG rendering widgets."""
extension = "svg"
content_type = "image/svg+xml; charset=utf-8"
template_name = ""
def render(self, response):
"""Rendering method to be implemented."""
raise NotImplementedError()
class RedirectWidget(Widget):
"""Generic redirect widget class."""
show = False
def redirect(self):
"""Redirect to matching SVG badge."""
kwargs = {
"project": self.obj.slug,
"widget": "svg",
"color": "badge",
"extension": "svg",
}
if self.lang:
kwargs["lang"] = self.lang.code
return reverse("widget-image", kwargs=kwargs)
return reverse("widget-image", kwargs=kwargs)
@register_widget
class NormalWidget(BitmapWidget):
name = "287x66"
order = 110
offset = 10
verbose = gettext_lazy("Big status badge")
def get_columns(self):
return [
[
self.head_template.format(
number_format(self.total, force_grouping=True)
),
self.foot_template.format(
npgettext(
"Label on enage page", "String", "Strings", self.total
).upper()
),
],
[
self.head_template.format(
number_format(self.languages, force_grouping=True)
),
self.foot_template.format(
npgettext(
"Label on enage page", "Language", "Languages", self.languages
).upper()
),
],
[
self.head_template.format(self.get_percent_text()),
self.foot_template.format(_("Translated").upper()),
],
]
@register_widget
class SmallWidget(BitmapWidget):
name = "88x31"
order = 111
font_size = 7
line_spacing = 0.8
offset = -1
verbose = gettext_lazy("Small status badge")
def get_columns(self):
return [
[
self.head_template.format(self.get_percent_text()),
self.foot_template.format(_("Translated").upper()),
]
]
@register_widget
class OpenGraphWidget(NormalWidget):
name = "open"
colors: Tuple[str, ...] = ("graph",)
order = 120
lines = False
offset = 300
font_size = 20
column_offset = 265
head_template = '<span letter_spacing="-1000">{}</span>'
foot_template = '<span letter_spacing="2000">{}</span>'
verbose = pgettext_lazy("Status widget name", "Panel")
def get_column_width(self, surface, columns):
return 230
def get_column_fonts(self):
return [
Pango.FontDescription("Source Sans Pro {}".format(42)),
Pango.FontDescription("Source Sans Pro {}".format(18)),
]
def get_title(self):
# Translators: Text on OpenGraph image
return _("Project %s") % "<b>{}</b>".format(escape(self.obj.name))
def render_additional(self, ctx):
ctx.move_to(280, 170)
layout = PangoCairo.create_layout(ctx)
layout.set_font_description(
Pango.FontDescription("Source Sans Pro {}".format(52))
)
layout.set_markup(self.get_title())
PangoCairo.show_layout(ctx, layout)
class SiteOpenGraphWidget(OpenGraphWidget):
def __init__(self, obj=None, color=None, lang=None):
super().__init__(GlobalStats())
def get_title(self):
return "<b>{}</b>".format(escape(settings.SITE_TITLE))
def get_text_params(self):
return {}
@register_widget
class BadgeWidget(RedirectWidget):
"""Legacy badge which used to render PNG."""
name = "status"
colors: Tuple[str, ...] = ("badge",)
@register_widget
class ShieldsBadgeWidget(RedirectWidget):
"""Legacy badge which used to redirect to shields.io."""
name = "shields"
colors: Tuple[str, ...] = ("badge",)
@register_widget
class SVGBadgeWidget(SVGWidget):
name = "svg"
colors: Tuple[str, ...] = ("badge",)
order = 80
template_name = "svg/badge.svg"
verbose = gettext_lazy("Status badge")
def render(self, response):
translated_text = _("translated")
translated_width = (
render_size("DejaVu Sans", Pango.Weight.NORMAL, 11, 0, translated_text)[
0
].width
+ 5
)
percent_text = self.get_percent_text()
percent_width = (
render_size("DejaVu Sans", Pango.Weight.NORMAL, 11, 0, percent_text)[
0
].width
+ 5
)
if self.percent >= 90:
color = "#4c1"
elif self.percent >= 75:
color = "#dfb317"
else:
color = "#e05d44"
response.write(
render_to_string(
self.template_name,
{
"translated_text": translated_text,
"percent_text": percent_text,
"translated_width": translated_width,
"percent_width": percent_width,
"width": translated_width + percent_width,
"color": color,
"translated_offset": translated_width // 2,
"percent_offset": translated_width + percent_width // 2,
"lang": get_language(),
"fonts_cdn_url": settings.FONTS_CDN_URL,
},
)
)
@register_widget
class MultiLanguageWidget(SVGWidget):
name = "multi"
order = 81
colors: Tuple[str, ...] = ("auto", "red", "green", "blue")
template_name = "svg/multi-language-badge.svg"
verbose = pgettext_lazy("Status widget name", "Vertical language bar chart")
COLOR_MAP = {"red": "#fa3939", "green": "#3fed48", "blue": "#3f85ed", "auto": None}
def render(self, response):
translations = []
offset = 20
color = self.COLOR_MAP[self.color]
language_width = 190
languages = self.obj.stats.get_language_stats()
for stats in sort_unicode(languages, lambda x: str(x.language)):
# Skip empty translations
if stats.translated == 0:
continue
language = stats.language
percent = stats.translated_percent
if self.color == "auto":
color = get_percent_color(percent)
language_name = str(language)
language_width = max(
language_width,
(
render_size(
"DejaVu Sans", Pango.Weight.NORMAL, 11, 0, language_name
)[0].width
+ 5
),
)
translations.append(
(
# Language name
language_name,
# Translation percent
int(percent),
# Text y offset
offset,
# Bar y offset
offset - 6,
# Bar width
int(percent * 1.5),
# Bar color
color,
# Row URL
get_site_url(
reverse(
"project-language",
kwargs={"lang": language.code, "project": self.obj.slug},
)
),
# Top offset for horizontal
10 + int((100 - percent) * 1.5),
)
)
offset += 15
response.write(
render_to_string(
self.template_name,
{
"height": len(translations) * 15 + 15,
"width": language_width + 210,
"language_offset": language_width,
"bar_offset": language_width + 10,
"text_offset": language_width + 170,
"translations": translations,
"site_url": get_site_url(),
"horizontal_height": language_width + 130,
"fonts_cdn_url": settings.FONTS_CDN_URL,
},
)
)
@register_widget
class HorizontalMultiLanguageWidget(MultiLanguageWidget):
name = "horizontal"
order = 82
template_name = "svg/multi-language-badge-horizontal.svg"
verbose = pgettext_lazy("Status widget name", "Horizontal language bar chart")
|
from homeassistant.components.sensor import DEVICE_CLASS_TEMPERATURE
from homeassistant.const import (
CONCENTRATION_PARTS_PER_MILLION,
MASS_GRAMS,
PERCENTAGE,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
VOLUME_LITERS,
)
from .common import OmniLogicEntity, OmniLogicUpdateCoordinator
from .const import COORDINATOR, DOMAIN, PUMP_TYPES
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up the sensor platform."""
coordinator = hass.data[DOMAIN][entry.entry_id][COORDINATOR]
entities = []
for item_id, item in coordinator.data.items():
id_len = len(item_id)
item_kind = item_id[-2]
entity_settings = SENSOR_TYPES.get((id_len, item_kind))
if not entity_settings:
continue
for entity_setting in entity_settings:
for state_key, entity_class in entity_setting["entity_classes"].items():
if state_key not in item:
continue
guard = False
for guard_condition in entity_setting["guard_condition"]:
if guard_condition and all(
item.get(guard_key) == guard_value
for guard_key, guard_value in guard_condition.items()
):
guard = True
if guard:
continue
entity = entity_class(
coordinator=coordinator,
state_key=state_key,
name=entity_setting["name"],
kind=entity_setting["kind"],
item_id=item_id,
device_class=entity_setting["device_class"],
icon=entity_setting["icon"],
unit=entity_setting["unit"],
)
entities.append(entity)
async_add_entities(entities)
class OmnilogicSensor(OmniLogicEntity):
"""Defines an Omnilogic sensor entity."""
def __init__(
self,
coordinator: OmniLogicUpdateCoordinator,
kind: str,
name: str,
device_class: str,
icon: str,
unit: str,
item_id: tuple,
state_key: str,
):
"""Initialize Entities."""
super().__init__(
coordinator=coordinator,
kind=kind,
name=name,
item_id=item_id,
icon=icon,
)
backyard_id = item_id[:2]
unit_type = coordinator.data[backyard_id].get("Unit-of-Measurement")
self._unit_type = unit_type
self._device_class = device_class
self._unit = unit
self._state_key = state_key
@property
def device_class(self):
"""Return the device class of the entity."""
return self._device_class
@property
def unit_of_measurement(self):
"""Return the right unit of measure."""
return self._unit
class OmniLogicTemperatureSensor(OmnilogicSensor):
"""Define an OmniLogic Temperature (Air/Water) Sensor."""
@property
def state(self):
"""Return the state for the temperature sensor."""
sensor_data = self.coordinator.data[self._item_id][self._state_key]
hayward_state = sensor_data
hayward_unit_of_measure = TEMP_FAHRENHEIT
state = sensor_data
if self._unit_type == "Metric":
hayward_state = round((int(hayward_state) - 32) * 5 / 9, 1)
hayward_unit_of_measure = TEMP_CELSIUS
if int(sensor_data) == -1:
hayward_state = None
state = None
self._attrs["hayward_temperature"] = hayward_state
self._attrs["hayward_unit_of_measure"] = hayward_unit_of_measure
self._unit = TEMP_FAHRENHEIT
return state
class OmniLogicPumpSpeedSensor(OmnilogicSensor):
"""Define an OmniLogic Pump Speed Sensor."""
@property
def state(self):
"""Return the state for the pump speed sensor."""
pump_type = PUMP_TYPES[self.coordinator.data[self._item_id]["Filter-Type"]]
pump_speed = self.coordinator.data[self._item_id][self._state_key]
if pump_type == "VARIABLE":
self._unit = PERCENTAGE
state = pump_speed
elif pump_type == "DUAL":
if pump_speed == 0:
state = "off"
elif pump_speed == self.coordinator.data[self._item_id].get(
"Min-Pump-Speed"
):
state = "low"
elif pump_speed == self.coordinator.data[self._item_id].get(
"Max-Pump-Speed"
):
state = "high"
self._attrs["pump_type"] = pump_type
return state
class OmniLogicSaltLevelSensor(OmnilogicSensor):
"""Define an OmniLogic Salt Level Sensor."""
@property
def state(self):
"""Return the state for the salt level sensor."""
salt_return = self.coordinator.data[self._item_id][self._state_key]
unit_of_measurement = self._unit
if self._unit_type == "Metric":
salt_return = round(int(salt_return) / 1000, 2)
unit_of_measurement = f"{MASS_GRAMS}/{VOLUME_LITERS}"
self._unit = unit_of_measurement
return salt_return
class OmniLogicChlorinatorSensor(OmnilogicSensor):
"""Define an OmniLogic Chlorinator Sensor."""
@property
def state(self):
"""Return the state for the chlorinator sensor."""
state = self.coordinator.data[self._item_id][self._state_key]
return state
class OmniLogicPHSensor(OmnilogicSensor):
"""Define an OmniLogic pH Sensor."""
@property
def state(self):
"""Return the state for the pH sensor."""
ph_state = self.coordinator.data[self._item_id][self._state_key]
if ph_state == 0:
ph_state = None
return ph_state
class OmniLogicORPSensor(OmnilogicSensor):
"""Define an OmniLogic ORP Sensor."""
def __init__(
self,
coordinator: OmniLogicUpdateCoordinator,
state_key: str,
name: str,
kind: str,
item_id: tuple,
device_class: str,
icon: str,
unit: str,
):
"""Initialize the sensor."""
super().__init__(
coordinator=coordinator,
kind=kind,
name=name,
device_class=device_class,
icon=icon,
unit=unit,
item_id=item_id,
state_key=state_key,
)
@property
def state(self):
"""Return the state for the ORP sensor."""
orp_state = self.coordinator.data[self._item_id][self._state_key]
if orp_state == -1:
orp_state = None
return orp_state
SENSOR_TYPES = {
(2, "Backyard"): [
{
"entity_classes": {"airTemp": OmniLogicTemperatureSensor},
"name": "Air Temperature",
"kind": "air_temperature",
"device_class": DEVICE_CLASS_TEMPERATURE,
"icon": None,
"unit": TEMP_FAHRENHEIT,
"guard_condition": [{}],
},
],
(4, "BOWS"): [
{
"entity_classes": {"waterTemp": OmniLogicTemperatureSensor},
"name": "Water Temperature",
"kind": "water_temperature",
"device_class": DEVICE_CLASS_TEMPERATURE,
"icon": None,
"unit": TEMP_FAHRENHEIT,
"guard_condition": [{}],
},
],
(6, "Filter"): [
{
"entity_classes": {"filterSpeed": OmniLogicPumpSpeedSensor},
"name": "Speed",
"kind": "filter_pump_speed",
"device_class": None,
"icon": "mdi:speedometer",
"unit": PERCENTAGE,
"guard_condition": [
{"Filter-Type": "FMT_SINGLE_SPEED"},
],
},
],
(6, "Pumps"): [
{
"entity_classes": {"pumpSpeed": OmniLogicPumpSpeedSensor},
"name": "Pump Speed",
"kind": "pump_speed",
"device_class": None,
"icon": "mdi:speedometer",
"unit": PERCENTAGE,
"guard_condition": [
{"Type": "PMP_SINGLE_SPEED"},
],
},
],
(6, "Chlorinator"): [
{
"entity_classes": {"Timed-Percent": OmniLogicChlorinatorSensor},
"name": "Setting",
"kind": "chlorinator",
"device_class": None,
"icon": "mdi:gauge",
"unit": PERCENTAGE,
"guard_condition": [
{
"Shared-Type": "BOW_SHARED_EQUIPMENT",
"status": "0",
},
{
"operatingMode": "2",
},
],
},
{
"entity_classes": {"avgSaltLevel": OmniLogicSaltLevelSensor},
"name": "Salt Level",
"kind": "salt_level",
"device_class": None,
"icon": "mdi:gauge",
"unit": CONCENTRATION_PARTS_PER_MILLION,
"guard_condition": [
{
"Shared-Type": "BOW_SHARED_EQUIPMENT",
"status": "0",
},
],
},
],
(6, "CSAD"): [
{
"entity_classes": {"ph": OmniLogicPHSensor},
"name": "pH",
"kind": "csad_ph",
"device_class": None,
"icon": "mdi:gauge",
"unit": "pH",
"guard_condition": [
{"ph": ""},
],
},
{
"entity_classes": {"orp": OmniLogicORPSensor},
"name": "ORP",
"kind": "csad_orp",
"device_class": None,
"icon": "mdi:gauge",
"unit": "mV",
"guard_condition": [
{"orp": ""},
],
},
],
}
|
import functools
from typing import Callable, MutableSequence, Tuple, Union
from PyQt5.QtCore import pyqtSlot, QSize, Qt
from PyQt5.QtWidgets import QListView, QSizePolicy, QMenu, QStyleFactory
from qutebrowser.browser import downloads
from qutebrowser.config import stylesheet
from qutebrowser.utils import qtutils, utils
_ActionListType = MutableSequence[
Union[
Tuple[None, None], # separator
Tuple[str, Callable[[], None]],
]
]
class DownloadView(QListView):
"""QListView which shows currently running downloads as a bar.
Attributes:
_menu: The QMenu which is currently displayed.
"""
STYLESHEET = """
QListView {
background-color: {{ conf.colors.downloads.bar.bg }};
font: {{ conf.fonts.downloads }};
}
QListView::item {
padding-right: 2px;
}
"""
def __init__(self, model, parent=None):
super().__init__(parent)
if not utils.is_mac:
self.setStyle(QStyleFactory.create('Fusion'))
stylesheet.set_register(self)
self.setResizeMode(QListView.Adjust)
self.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.setSizePolicy(QSizePolicy.MinimumExpanding, QSizePolicy.Fixed)
self.setFocusPolicy(Qt.NoFocus)
self.setFlow(QListView.LeftToRight)
self.setSpacing(1)
self._menu = None
model.rowsInserted.connect(self._update_geometry)
model.rowsRemoved.connect(self._update_geometry)
model.dataChanged.connect(self._update_geometry)
self.setModel(model)
self.setWrapping(True)
self.setContextMenuPolicy(Qt.CustomContextMenu)
self.customContextMenuRequested.connect(self.show_context_menu)
self.clicked.connect(self.on_clicked)
def __repr__(self):
model = self.model()
if model is None:
count = 'None' # type: ignore[unreachable]
else:
count = model.rowCount()
return utils.get_repr(self, count=count)
@pyqtSlot()
def _update_geometry(self):
"""Wrapper to call updateGeometry.
For some reason, this is needed so that PyQt disconnects the signals and handles
arguments correctly. Probably a WORKAROUND for an unknown PyQt bug.
"""
self.updateGeometry()
@pyqtSlot(bool)
def on_fullscreen_requested(self, on):
"""Hide/show the downloadview when entering/leaving fullscreen."""
if on:
self.hide()
else:
self.show()
@pyqtSlot('QModelIndex')
def on_clicked(self, index):
"""Handle clicking of an item.
Args:
index: The QModelIndex of the clicked item.
"""
if not index.isValid():
return
item = self.model().data(index, downloads.ModelRole.item)
if item.done and item.successful:
item.open_file()
item.remove()
def _get_menu_actions(
self,
item: downloads.AbstractDownloadItem
) -> _ActionListType:
"""Get the available context menu actions for a given DownloadItem.
Args:
item: The DownloadItem to get the actions for, or None.
"""
model = self.model()
actions: _ActionListType = []
if item is None:
pass
elif item.done:
if item.successful:
actions.append(("Open", item.open_file))
actions.append(("Open directory", functools.partial(
item.open_file, open_dir=True, cmdline=None)))
else:
actions.append(("Retry", item.try_retry))
actions.append(("Remove", item.remove))
else:
actions.append(("Cancel", item.cancel))
if model.can_clear():
actions.append((None, None))
actions.append(("Remove all finished", model.download_clear))
return actions
@pyqtSlot('QPoint')
def show_context_menu(self, point):
"""Show the context menu."""
index = self.indexAt(point)
if index.isValid():
item = self.model().data(index, downloads.ModelRole.item)
else:
item = None
self._menu = QMenu(self)
actions = self._get_menu_actions(item)
for (name, handler) in actions:
if name is None and handler is None:
self._menu.addSeparator()
else:
assert name is not None
assert handler is not None
action = self._menu.addAction(name)
action.triggered.connect(handler)
if actions:
self._menu.popup(self.viewport().mapToGlobal(point))
def minimumSizeHint(self):
"""Override minimumSizeHint so the size is correct in a layout."""
return self.sizeHint()
def sizeHint(self):
"""Return sizeHint based on the view contents."""
idx = self.model().last_index()
bottom = self.visualRect(idx).bottom()
if bottom != -1:
margins = self.contentsMargins()
height = (bottom + margins.top() + margins.bottom() +
2 * self.spacing())
size = QSize(0, height)
else:
size = QSize(0, 0)
qtutils.ensure_valid(size)
return size
|
from datetime import timedelta
import logging
import math
from Adafruit_SHT31 import SHT31
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_MONITORED_CONDITIONS,
CONF_NAME,
PERCENTAGE,
PRECISION_TENTHS,
TEMP_CELSIUS,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.temperature import display_temp
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
CONF_I2C_ADDRESS = "i2c_address"
DEFAULT_NAME = "SHT31"
DEFAULT_I2C_ADDRESS = 0x44
SENSOR_TEMPERATURE = "temperature"
SENSOR_HUMIDITY = "humidity"
SENSOR_TYPES = (SENSOR_TEMPERATURE, SENSOR_HUMIDITY)
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=10)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_I2C_ADDRESS, default=DEFAULT_I2C_ADDRESS): vol.All(
vol.Coerce(int), vol.Range(min=0x44, max=0x45)
),
vol.Optional(CONF_MONITORED_CONDITIONS, default=list(SENSOR_TYPES)): vol.All(
cv.ensure_list, [vol.In(SENSOR_TYPES)]
),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the sensor platform."""
i2c_address = config.get(CONF_I2C_ADDRESS)
sensor = SHT31(address=i2c_address)
try:
if sensor.read_status() is None:
raise ValueError("CRC error while reading SHT31 status")
except (OSError, ValueError):
_LOGGER.error("SHT31 sensor not detected at address %s", hex(i2c_address))
return
sensor_client = SHTClient(sensor)
sensor_classes = {
SENSOR_TEMPERATURE: SHTSensorTemperature,
SENSOR_HUMIDITY: SHTSensorHumidity,
}
devs = []
for sensor_type, sensor_class in sensor_classes.items():
name = "{} {}".format(config.get(CONF_NAME), sensor_type.capitalize())
devs.append(sensor_class(sensor_client, name))
add_entities(devs)
class SHTClient:
"""Get the latest data from the SHT sensor."""
def __init__(self, adafruit_sht):
"""Initialize the sensor."""
self.adafruit_sht = adafruit_sht
self.temperature = None
self.humidity = None
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data from the SHT sensor."""
temperature, humidity = self.adafruit_sht.read_temperature_humidity()
if math.isnan(temperature) or math.isnan(humidity):
_LOGGER.warning("Bad sample from sensor SHT31")
return
self.temperature = temperature
self.humidity = humidity
class SHTSensor(Entity):
"""An abstract SHTSensor, can be either temperature or humidity."""
def __init__(self, sensor, name):
"""Initialize the sensor."""
self._sensor = sensor
self._name = name
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
def update(self):
"""Fetch temperature and humidity from the sensor."""
self._sensor.update()
class SHTSensorTemperature(SHTSensor):
"""Representation of a temperature sensor."""
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self.hass.config.units.temperature_unit
def update(self):
"""Fetch temperature from the sensor."""
super().update()
temp_celsius = self._sensor.temperature
if temp_celsius is not None:
self._state = display_temp(
self.hass, temp_celsius, TEMP_CELSIUS, PRECISION_TENTHS
)
class SHTSensorHumidity(SHTSensor):
"""Representation of a humidity sensor."""
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return PERCENTAGE
def update(self):
"""Fetch humidity from the sensor."""
super().update()
humidity = self._sensor.humidity
if humidity is not None:
self._state = round(humidity)
|
import a_sync
import asynctest
from mock import Mock
from pytest import raises
from paasta_tools.async_utils import aiter_to_list
from paasta_tools.mesos import cluster
from paasta_tools.mesos import exceptions
from paasta_tools.mesos import task
def test_get_files_for_tasks_no_files():
attrs = {"id": "foo"}
mock_task = asynctest.MagicMock(spec=task.Task)
mock_task.__getitem__.side_effect = lambda x: attrs[x]
mock_file = Mock()
mock_file.exists = asynctest.CoroutineMock(return_value=False)
mock_task.file.return_value = mock_file
files = cluster.get_files_for_tasks([mock_task], ["myfile"], 1)
with raises(exceptions.FileNotFoundForTaskException) as excinfo:
files = a_sync.block(aiter_to_list, files)
assert "None of the tasks in foo contain the files in list myfile" in str(
excinfo.value
)
def test_get_files_for_tasks_all():
mock_task = asynctest.MagicMock(spec=task.Task)
mock_file = Mock()
mock_file.exists = asynctest.CoroutineMock(return_value=True)
mock_task.file.return_value = mock_file
files = cluster.get_files_for_tasks([mock_task], ["myfile"], 1)
files = a_sync.block(aiter_to_list, files)
assert files == [mock_file]
def test_get_files_for_tasks_some():
mock_task = asynctest.MagicMock(spec=task.Task)
mock_file = Mock()
mock_file_2 = Mock()
mock_file.exists = asynctest.CoroutineMock(return_value=False)
mock_file_2.exists = asynctest.CoroutineMock(return_value=True)
mock_task.file.side_effect = [mock_file, mock_file_2]
files = cluster.get_files_for_tasks([mock_task], ["myfile", "myotherfile"], 1)
files = a_sync.block(aiter_to_list, files)
assert files == [mock_file_2]
|
import io
import os
from hashlib import md5
from nikola.plugin_categories import PageCompiler
from nikola.utils import makedirs, write_metadata
class CompilePhp(PageCompiler):
"""Compile PHP into PHP."""
name = "php"
friendly_name = "PHP"
def compile(self, source, dest, is_two_file=True, post=None, lang=None):
"""Compile the source file into HTML and save as dest."""
makedirs(os.path.dirname(dest))
with io.open(dest, "w+", encoding="utf8") as out_file:
with open(source, "rb") as in_file:
hash = md5(in_file.read()).hexdigest()
out_file.write('<!-- __NIKOLA_PHP_TEMPLATE_INJECTION source:{0} checksum:{1}__ -->'.format(source, hash))
return True
def compile_string(self, data, source_path=None, is_two_file=True, post=None, lang=None):
"""Compile PHP into HTML strings."""
return data, []
def create_post(self, path, **kw):
"""Create a new post."""
content = kw.pop('content', None)
onefile = kw.pop('onefile', False)
# is_page is not used by create_post as of now.
kw.pop('is_page', False)
metadata = {}
metadata.update(self.default_metadata)
metadata.update(kw)
if not metadata['description']:
# For PHP, a description must be set. Otherwise, Nikola will
# take the first 200 characters of the post as the Open Graph
# description (og:description meta element)!
# If the PHP source leaks there:
# (a) The script will be executed multiple times
# (b) PHP may encounter a syntax error if it cuts too early,
# therefore completely breaking the page
# Here, we just use the title. The user should come up with
# something better, but just using the title does the job.
metadata['description'] = metadata['title']
makedirs(os.path.dirname(path))
if not content.endswith('\n'):
content += '\n'
with io.open(path, "w+", encoding="utf8") as fd:
if onefile:
fd.write(write_metadata(metadata, comment_wrap=True, site=self.site, compiler=self))
fd.write(content)
def extension(self):
"""Return extension used for PHP files."""
return ".php"
|
from __future__ import absolute_import
from __future__ import unicode_literals
import datetime
import json
import os
import textwrap
import time
import unittest
import warnings
import contexter
import fs
import requests
import six
from instalooter._impl import length_hint, piexif, PIL
from instalooter.batch import BatchRunner, logger as batch_logger
from instalooter.cli import main
from instalooter.looters import InstaLooter, HashtagLooter, ProfileLooter, PostLooter
from .utils import mock
from .utils.ig_mock import MockPages
try:
CONNECTION_FAILURE = not requests.get("https://instagr.am/instagram").ok
except requests.exceptions.ConnectionError:
CONNECTION_FAILURE = True
class TestResolvedIssues(unittest.TestCase):
if six.PY2:
assertRegex = unittest.TestCase.assertRegexpMatches
@classmethod
def setUpClass(cls):
cls.session = requests.Session()
_user_agent = mock.Mock(return_value=cls.session.headers["User-Agent"])
cls.patch = mock.patch.object(InstaLooter, "_user_agent", new=_user_agent)
cls.patch.__enter__()
@classmethod
def tearDownClass(cls):
cls.session.close()
cls.patch.__exit__(None, None, None)
def setUp(self):
self.destfs = fs.open_fs("temp://")
self.tmpdir = self.destfs.getsyspath("/")
warnings._showwarning = warnings.showwarning
def tearDown(self):
self.destfs.close()
warnings.showwarning = warnings._showwarning
if os.getenv("CI") == "true":
time.sleep(1)
@unittest.expectedFailure
@unittest.skipUnless(piexif, "piexif required for this test")
def test_issue_009(self):
"""
Thanks to @kurtmaia for reporting this bug.
Checks that adding metadata to pictures downloaded from a hashtag
works as well.
"""
looter = HashtagLooter("fluoxetine", add_metadata=True, session=self.session)
with contexter.Contexter() as ctx:
ctx << mock.patch.object(looter, 'pages', MockPages('fluoxetine'))
looter.download(self.destfs, media_count=10)
for f in self.destfs.listdir("/"):
exif = piexif.load(self.destfs.getbytes(f))
self.assertTrue(exif['Exif']) # Date & Caption
self.assertTrue(exif['0th']) # Image creator
@unittest.skipIf(CONNECTION_FAILURE, "cannot connect to Instagram")
def test_issue_012(self):
"""Feature request by @paramjitrohit.
Allows downloading pictures and videos only within a timeframe.
"""
looter = ProfileLooter("nintendo", session=self.session)
day = datetime.date(2018, 3, 16)
with contexter.Contexter() as ctx:
ctx << mock.patch.object(looter, 'pages', MockPages('nintendo'))
medias_in_timeframe = list(looter.medias(timeframe=[day, day]))
self.assertEqual(len(medias_in_timeframe), 2)
@unittest.skipIf(CONNECTION_FAILURE, "cannot connect to Instagram")
def test_issue_019(self):
"""
Thanks to @emijawdo for reporting this bug.
Checks that instalooter does not crash when not given a destination
directory and uses the current directory.
"""
initial_dir = os.getcwd()
os.chdir(self.tmpdir)
try:
with contexter.Contexter() as ctx:
ctx << mock.patch('instalooter.looters.InstaLooter.pages', MockPages('nintendo'))
main(["user", "nintendo", "-n", "3", "-q"])
self.assertGreaterEqual(len(self.destfs.listdir("/")), 3)
finally:
os.chdir(initial_dir)
@unittest.skipIf(CONNECTION_FAILURE, "cannot connect to Instagram")
def test_issue_014(self):
"""Feature request by @JFLarsen.
Allows customizing filenames using a template following Python
`.format()` minilanguage.
"""
looter = ProfileLooter("nintendo", template="{username}.{id}", session=self.session)
with contexter.Contexter() as ctx:
ctx << mock.patch.object(looter, 'pages', MockPages('nintendo'))
looter.download(self.destfs, media_count=5)
for f in self.destfs.scandir("/"):
self.assertTrue(f.name.startswith('nintendo.'))
@unittest.skipIf(CONNECTION_FAILURE, "cannot connect to Instagram")
@unittest.skipIf(os.getenv("IG_USERNAME") is None, "need private user account")
def test_issue_006(self):
"""
Checks that instalooter does not iterate forever on a private
profile.
"""
with self.assertRaises(RuntimeError):
username = os.getenv("IG_USERNAME")
looter = ProfileLooter(username, session=self.session)
looter.logout()
next(looter.medias())
@unittest.skipIf(CONNECTION_FAILURE, "cannot connect to Instagram")
def test_issue_015(self):
"""
Feature request by @MohamedIM.
Checks that videos are not downloaded several times if present
already in the destination directory.
"""
looter = ProfileLooter("nintendo", session=self.session)
with contexter.Contexter() as ctx:
ctx << mock.patch.object(looter, 'pages', MockPages('nintendo'))
looter.download_videos(self.destfs, media_count=1)
video_file = next(self.destfs.filterdir("/", ["*.mp4"]))
mtime = self.destfs.getdetails(video_file.name).accessed
looter.download_videos(self.destfs, media_count=1)
self.assertEqual(mtime, self.destfs.getdetails(video_file.name).accessed)
@unittest.skipIf(CONNECTION_FAILURE, "cannot connect to Instagram")
def test_issue_022(self):
"""
Thanks to @kuchenmitsahne for reporting this bug.
Checks that using ``{datetime}`` in the template does not put
a Windows forbidden character in the filename.
"""
FORBIDDEN = set('<>:"/\|?*')
looter = ProfileLooter("nintendo", template="{datetime}", session=self.session)
with contexter.Contexter() as ctx:
ctx << mock.patch.object(looter, 'pages', MockPages('nintendo'))
looter.download(self.destfs, media_count=5)
for f in self.destfs.scandir("/"):
self.assertFalse(FORBIDDEN.intersection(f.name))
@unittest.skipIf(CONNECTION_FAILURE, "cannot connect to Instagram")
@unittest.skipUnless(PIL, "PIL required for this test")
def test_issue_026(self):
"""
Feature request by @verafide.
Checks that pictures that are downloaded are not
resized.
"""
PostLooter("BO0XpEshejh", session=self.session).download(self.destfs)
pic = PIL.Image.open(self.destfs.getsyspath("1419863760138791137.jpg"))
self.assertEqual(pic.size, (525, 612))
@unittest.skipIf(CONNECTION_FAILURE, "cannot connect to Instagram")
def test_issue_039(self):
"""
Feature request by @verafide
Checks that all pictures are downloaded from posts
with more than one picture.
"""
looter = PostLooter("BRHecUuFhPl", session=self.session)
looter.download(self.destfs)
self.assertEqual(
set(self.destfs.listdir("/")),
{
"1461270165803344956.jpg",
"1461270167497776767.jpg",
"1461270174435133336.jpg",
"1461270172581471925.jpg",
"1461270181565655668.jpg",
}
)
@unittest.skipIf(CONNECTION_FAILURE, "cannot connect to Instagram")
def test_issue_042(self):
"""
Thanks to @MohamedIM for reporting this bug.
Checks that a multipost is successfully downloaded from
the CLI `post` option.
"""
looter = PostLooter('BRW-j_dBI6F', get_videos=True, session=self.session)
looter.download(self.destfs)
self.assertEqual(
set(self.destfs.listdir("/")),
{
'1465633492745668095.mp4',
'1465633517836005761.mp4',
'1465633541559037966.mp4',
'1465633561523918792.mp4',
}
)
# OUTDATED: warn_windows is not used anymore
#
# def test_issue_044(self):
# """
# Thanks to @Bangaio64 for reporting this bug.
#
# Checks that warn_windows does not trigger an exception.
# """
# import instalooter.utils
# warnings.showwarning = instalooter.utils.warn_windows
# looter = instalooter.InstaLooter(
# directory=self.tmpdir,
# profile="akjhdskjhfkjsdhfkjhdskjhfkjdshkfjhsdkjfdhkjdfshdfskhfd"
# )
# try:
# looter.download()
# except Exception:
# self.fail()
@unittest.skipIf(CONNECTION_FAILURE, "cannot connect to Instagram")
def test_issue_041(self):
"""Feature request by @liorlior
Allow downloading only videos.
"""
looter = ProfileLooter("nintendo", videos_only=True, session=self.session)
day = datetime.date(2017, 3, 10)
with contexter.Contexter() as ctx:
ctx << mock.patch.object(looter, 'pages', MockPages('nintendo'))
looter.download(self.destfs, timeframe=[day, day])
self.assertEqual(self.destfs.listdir("/"), ["1467639884243493431.mp4"])
@unittest.skipIf(CONNECTION_FAILURE, "cannot connect to Instagram")
def test_issue_052(self):
"""Thanks to @cyrusclarke for reporting this bug.
Checks that on hashtags with a lot of posts, the time parameter
doesn't cause the program to crash without finding any media to
download.
"""
main(["hashtag", "happy", self.tmpdir, "-q", "-t", "thisweek", "-n", "5"])
self.assertGreaterEqual(len(self.destfs.listdir('/')), 5)
# OUTDATED: Sidecar info dicts are not converted anymore but passed
# to the workers directly.
#
# def test_issue_057(self):
# """
# Thanks to @VasiliPupkin256 for reporting this bug.
#
# Checks that metadata can successfully extract caption
# out of multiposts containing images.
# """
# looter = ProfileLooter("awwwwshoot_ob", session=self.session)
# sidecar = next(m for m in looter.medias() if m['__typename'] == "GraphSidecar")
#
# looter = PostLooter(sidecar['shortcode'], session=self.session)
# looter.download(self.destfs)
#
# for key in ('caption', 'code', 'date'):
# self.assertIn(key, media)
# self.assertIsNotNone(media[key])
@unittest.skipIf(CONNECTION_FAILURE, "cannot connect to Instagram")
def test_issue_066(self):
"""Thanks to @douglasrizzo for reporting this bug.
Check that likescount and commentscount can be used
in filename templates without causing the program to
crash.
"""
looter = ProfileLooter(
"nintendo", get_videos=True, add_metadata=True,
template='{id}-{likescount}-{commentscount}',
session=self.session)
with contexter.Contexter() as ctx:
ctx << mock.patch.object(looter, 'pages', MockPages('nintendo'))
looter.download(self.destfs, media_count=10)
for image in self.destfs.listdir("/"):
self.assertRegex(image, '[a-zA-Z0-9]*-[0-9]*-[0-9]*.(jpg|mp4)')
@unittest.skipIf(CONNECTION_FAILURE, "cannot connect to Instagram")
def test_issue_076(self):
"""Thanks to @zeshuaro for reporting this bug.
Check that when downloading hashtags, the downloader
actually stops.
"""
looter = HashtagLooter("oulianov", session=self.session)
medias_it = looter.medias()
postcount = length_hint(medias_it)
for i, m in enumerate(medias_it):
if i > postcount:
self.fail("looter.medias() did not stop.")
# OUTDATED: URLs are not modified anymore as Instagram prevents
# any modification
#
# def test_issue_082(self):
# """
# Thanks to @MohamedIM for reporting this bug.
#
# Check that urls containing 'h-ak-igx' are not stripped from all
# their parameters.
# """
# looter = instalooter.looter.PostLooter('BWOYSYQDCo5', template='{code}')
# info = next(looter.medias())
#
# info['display_url'] = \
# 'https://ig-s-c-a.akamaihd.net/h-ak-igx/19764472_1586345694718446_4011887281420894208_n.jpg'
# looter.get_post_info = lambda code: info
#
# looter.download_post('BWOYSYQDCo5')
#
# with open(os.path.join(self.tmpdir, 'BWOYSYQDCo5.jpg'), 'rb') as f:
# self.assertNotIn(b'5xx Server Error', f.read())
@unittest.skipIf(CONNECTION_FAILURE, "cannot connect to Instagram")
def test_issue_084(self):
"""Thanks to @raphaelbernardino for reporting this bug.
Make sure private profiles with few pictures (less than a page worth)
raise the private error as expected.
"""
looter = ProfileLooter("rararudo", session=self.session)
self.assertRaises(RuntimeError, looter.medias)
@unittest.expectedFailure
@unittest.skipIf(CONNECTION_FAILURE, "cannot connect to Instagram")
@unittest.skipUnless(piexif, "piexif required for this test")
def test_issue_094(self):
"""Thanks to @jeanmarctst for raising this issue.
Make sure caption is properly extracted from images downloaded
from a post code and written to the metadata.
"""
looter = PostLooter("BY77tSfBnRm",
add_metadata=True, template='{code}', session=self.session)
looter.download(self.destfs)
metadata = piexif.load(self.destfs.getbytes("BY77tSfBnRm.jpg"), True)
self.assertTrue(metadata['Exif']['UserComment'])
def test_issue_125(self):
"""Thanks to @applepanda for reporting this bug.
Make sure colons in path do not cause issue in batch mode.
"""
configfile = six.StringIO(textwrap.dedent(
"""
[Family]
users =
instagram: D:\\Instagram\\Profiles\\instagram
therock: D:\\Instagram\\Profiles\\therock
"""
))
runner = BatchRunner(configfile)
self.assertEqual(
runner.get_targets(runner._get('Family', 'users')),
{'instagram': 'D:\\Instagram\\Profiles\\instagram',
'therock': 'D:\\Instagram\\Profiles\\therock'}
)
@mock.patch('instalooter.looters.InstaLooter.__init__')
def test_issue_184(self, _):
"""Feature request by @ghost.
Allow downloading a post directly from its URL.
"""
looter = PostLooter("https://www.instagram.com/p/BJlIB9WhdRn/?taken-by=2k")
self.assertEqual(looter.code, "BJlIB9WhdRn")
@unittest.skipIf(CONNECTION_FAILURE, "cannot connect to Instagram")
def test_issue_185(self):
"""Feature request by @JPNYC81.
Make sure an ``instalooter`` batch keeps even if it encounters errors
on a specific job. This test tries with an non-existing profile.
"""
configfile = six.StringIO(textwrap.dedent(
"""
[Family]
num-to-dl = 3
users =
jdskjhjkfhkdshfkjdhsfjsfdkjhfksdjhf: {tmp}
instagram: {tmp}
therock: {tmp}
"""
).format(tmp=self.tmpdir))
runner = BatchRunner(configfile)
with mock.patch('instalooter.batch.logger'):
runner.run_all()
self.assertGreaterEqual(len(self.destfs.listdir('/')), 6)
@unittest.skipIf(CONNECTION_FAILURE, "cannot connect to Instagram")
def test_issue_194(self):
"""Feature request by @raphaelbernardino
When trying to download from an non-existing user, try to display a
meaningful message instead of a cryptic error.
"""
username = "jdhfdjkhdlqdhfdhqfqjqlhfhdsdjquryerhdjfhqlkdfhkqhfqkure"
looter = ProfileLooter(username)
with self.assertRaises(ValueError) as ctx:
media = next(looter.medias())
self.assertEqual(str(ctx.exception), "user not found: '{}'".format(username))
# @mock.patch('instalooter.looter.requests.Session', lambda: TestPullRequests.session)
class TestPullRequests(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.session = requests.Session()
@classmethod
def tearDownClass(cls):
cls.session.close()
def setUp(self):
self.destfs = fs.open_fs("temp://")
self.tmpdir = self.destfs.getsyspath("/")
def tearDown(self):
self.destfs.close()
if os.getenv("CI") == "true":
time.sleep(1)
def _pr_122_looter(self):
return ProfileLooter('nintendo', template='{code}', session=self.session)
@unittest.skipIf(CONNECTION_FAILURE, "cannot connect to Instagram")
def test_pr_122_download_post(self):
"""Feature implemented by @susundberg.
Set the access time and modification time of a downloaded media
according to its IG date.
"""
code = 'BY77tSfBnRm'
post_looter = PostLooter(code, session=self.session, template='{code}')
info = post_looter.get_post_info(code)
post_looter.download(self.destfs)
stat = self.destfs.getdetails('{}.jpg'.format(code))
self.assertEqual(stat.raw["details"]["accessed"], info['taken_at_timestamp'])
self.assertEqual(stat.raw["details"]["modified"], info['taken_at_timestamp'])
@unittest.skipIf(CONNECTION_FAILURE, "cannot connect to Instagram")
def test_pr_122_download_pictures(self):
"""Feature implemented by @susundberg.
Set the access time and modification time of a downloaded media
according to its IG date.
"""
# Test download_pictures
looter = self._pr_122_looter()
with contexter.Contexter() as ctx:
ctx << mock.patch.object(looter, 'pages', MockPages('nintendo'))
pic = next(m for m in looter.medias() if not m['is_video'])
looter.download_pictures(self.destfs, media_count=1)
stat = self.destfs.getdetails('{}.jpg'.format(pic['shortcode']))
self.assertEqual(stat.raw["details"]["accessed"], pic['taken_at_timestamp'])
self.assertEqual(stat.raw["details"]["modified"], pic['taken_at_timestamp'])
@unittest.skipIf(CONNECTION_FAILURE, "cannot connect to Instagram")
def test_pr_122_download_videos(self):
"""Feature implemented by @susundberg.
Set the access time and modification time of a downloaded media
according to its IG date.
"""
# Test download_videos
looter = self._pr_122_looter()
with contexter.Contexter() as ctx:
ctx << mock.patch.object(looter, 'pages', MockPages('nintendo'))
vid = next(m for m in looter.medias() if m['is_video'])
looter.download_videos(self.destfs, media_count=1)
stat = self.destfs.getdetails('{}.mp4'.format(vid['shortcode']))
self.assertEqual(stat.raw["details"]["accessed"], vid['taken_at_timestamp'])
self.assertEqual(stat.raw["details"]["modified"], vid['taken_at_timestamp'])
def setUpModule():
warnings.simplefilter('ignore')
def tearDownModule():
warnings.simplefilter(warnings.defaultaction)
|
import logging
import os
import re
import sys
import time
from collections import defaultdict
from urllib.parse import unquote, urlparse, urljoin, urldefrag
import lxml.html
import requests
from doit.loader import generate_tasks
from nikola.plugin_categories import Command
def _call_nikola_list(site, cache=None):
if cache is not None:
if 'files' in cache and 'deps' in cache:
return cache['files'], cache['deps']
files = []
deps = defaultdict(list)
for task in generate_tasks('render_site', site.gen_tasks('render_site', "Task", '')):
files.extend(task.targets)
for target in task.targets:
deps[target].extend(task.file_dep)
for task in generate_tasks('post_render', site.gen_tasks('render_site', "LateTask", '')):
files.extend(task.targets)
for target in task.targets:
deps[target].extend(task.file_dep)
if cache is not None:
cache['files'] = files
cache['deps'] = deps
return files, deps
def real_scan_files(site, cache=None):
"""Scan for files."""
task_fnames = set([])
real_fnames = set([])
output_folder = site.config['OUTPUT_FOLDER']
# First check that all targets are generated in the right places
for fname in _call_nikola_list(site, cache)[0]:
fname = fname.strip()
if fname.startswith(output_folder):
task_fnames.add(fname)
# And now check that there are no non-target files
for root, dirs, files in os.walk(output_folder, followlinks=True):
for src_name in files:
fname = os.path.join(root, src_name)
real_fnames.add(fname)
only_on_output = list(real_fnames - task_fnames)
only_on_input = list(task_fnames - real_fnames)
return (only_on_output, only_on_input)
def fs_relpath_from_url_path(url_path):
"""Create a filesystem relative path from an URL path."""
# Expects as input an urlparse(s).path
url_path = unquote(url_path)
# in windows relative paths don't begin with os.sep
if sys.platform == 'win32' and len(url_path):
url_path = url_path.replace('/', '\\')
return url_path
class CommandCheck(Command):
"""Check the generated site."""
name = "check"
doc_usage = "[-v] (-l [--find-sources] [-r] | -f [--clean-files])"
doc_purpose = "check links and files in the generated site"
cmd_options = [
{
'name': 'links',
'short': 'l',
'long': 'check-links',
'type': bool,
'default': False,
'help': 'Check for dangling links',
},
{
'name': 'files',
'short': 'f',
'long': 'check-files',
'type': bool,
'default': False,
'help': 'Check for unknown (orphaned and not generated) files',
},
{
'name': 'clean',
'long': 'clean-files',
'type': bool,
'default': False,
'help': 'Remove all unknown files, use with caution',
},
{
'name': 'find_sources',
'long': 'find-sources',
'type': bool,
'default': False,
'help': 'List possible source files for files with broken links.',
},
{
'name': 'verbose',
'long': 'verbose',
'short': 'v',
'type': bool,
'default': False,
'help': 'Be more verbose.',
},
{
'name': 'remote',
'long': 'remote',
'short': 'r',
'type': bool,
'default': False,
'help': 'Check that remote links work.',
},
]
def _execute(self, options, args):
"""Check the generated site."""
if not options['links'] and not options['files'] and not options['clean']:
print(self.help())
return 1
if options['verbose']:
self.logger.level = logging.DEBUG
else:
self.logger.level = logging.WARNING
failure = False
if options['links']:
failure |= self.scan_links(options['find_sources'], options['remote'])
if options['files']:
failure |= self.scan_files()
if options['clean']:
failure |= self.clean_files()
if failure:
return 1
existing_targets = set([])
checked_remote_targets = {}
cache = {}
def analyze(self, fname, find_sources=False, check_remote=False):
"""Analyze links on a page."""
rv = False
self.whitelist = [re.compile(x) for x in self.site.config['LINK_CHECK_WHITELIST']]
self.internal_redirects = [urljoin('/', _[0]) for _ in self.site.config['REDIRECTIONS']]
base_url = urlparse(self.site.config['BASE_URL'])
self.existing_targets.add(self.site.config['SITE_URL'])
self.existing_targets.add(self.site.config['BASE_URL'])
url_type = self.site.config['URL_TYPE']
atom_extension = self.site.config['ATOM_EXTENSION']
deps = {}
if find_sources:
deps = _call_nikola_list(self.site, self.cache)[1]
if url_type in ('absolute', 'full_path'):
url_netloc_to_root = urlparse(self.site.config['BASE_URL']).path
try:
filename = fname
if filename.startswith(self.site.config['CACHE_FOLDER']):
# Do not look at links in the cache, which are not parsed by
# anyone and may result in false positives. Problems arise
# with galleries, for example. Full rationale: (Issue #1447)
self.logger.warning("Ignoring {0} (in cache, links may be incorrect)".format(filename))
return False
if not os.path.exists(fname):
# Quietly ignore files that don’t exist; use `nikola check -f` instead (Issue #1831)
return False
if '.html' == fname[-5:]:
with open(filename, 'rb') as inf:
d = lxml.html.fromstring(inf.read())
extra_objs = lxml.html.fromstring('<html/>')
# Turn elements with a srcset attribute into individual img elements with src attributes
for obj in list(d.xpath('(*//img|*//source)')):
if 'srcset' in obj.attrib:
for srcset_item in obj.attrib['srcset'].split(','):
extra_objs.append(lxml.etree.Element('img', src=srcset_item.strip().split(' ')[0]))
link_elements = list(d.iterlinks()) + list(extra_objs.iterlinks())
# Extract links from XML formats to minimal HTML, allowing those to go through the link checks
elif atom_extension == filename[-len(atom_extension):]:
d = lxml.etree.parse(filename)
link_elements = lxml.html.fromstring('<html/>')
for elm in d.findall('*//{http://www.w3.org/2005/Atom}link'):
feed_link = elm.attrib['href'].split('?')[0].strip() # strip FEED_LINKS_APPEND_QUERY
link_elements.append(lxml.etree.Element('a', href=feed_link))
link_elements = list(link_elements.iterlinks())
elif filename.endswith('sitemap.xml') or filename.endswith('sitemapindex.xml'):
d = lxml.etree.parse(filename)
link_elements = lxml.html.fromstring('<html/>')
for elm in d.getroot().findall("*//{http://www.sitemaps.org/schemas/sitemap/0.9}loc"):
link_elements.append(lxml.etree.Element('a', href=elm.text.strip()))
link_elements = list(link_elements.iterlinks())
else: # unsupported file type
return False
for l in link_elements:
target = l[2]
if target == "#":
continue
target = urldefrag(target)[0]
if any([urlparse(target).netloc.endswith(_) for _ in ['example.com', 'example.net', 'example.org']]):
self.logger.debug("Not testing example address \"{0}\".".format(target))
continue
# absolute URL to root-relative
if target.startswith(base_url.geturl()):
target = target.replace(base_url.geturl(), '/')
parsed = urlparse(target)
# Warn about links from https to http (mixed-security)
if base_url.netloc == parsed.netloc and base_url.scheme == "https" and parsed.scheme == "http":
self.logger.warning("Mixed-content security for link in {0}: {1}".format(filename, target))
# Link to an internal REDIRECTIONS page
if target in self.internal_redirects:
redir_status_code = 301
redir_target = [_dest for _target, _dest in self.site.config['REDIRECTIONS'] if urljoin('/', _target) == target][0]
self.logger.warning("Remote link moved PERMANENTLY to \"{0}\" and should be updated in {1}: {2} [HTTP: 301]".format(redir_target, filename, target))
# Absolute links to other domains, skip
# Absolute links when using only paths, skip.
if ((parsed.scheme or target.startswith('//')) and parsed.netloc != base_url.netloc) or \
((parsed.scheme or target.startswith('//')) and url_type in ('rel_path', 'full_path')):
if not check_remote or parsed.scheme not in ["http", "https"]:
continue
if target in self.checked_remote_targets: # already checked this exact target
if self.checked_remote_targets[target] in [301, 308]:
self.logger.warning("Remote link PERMANENTLY redirected in {0}: {1} [Error {2}]".format(filename, target, self.checked_remote_targets[target]))
elif self.checked_remote_targets[target] in [302, 307]:
self.logger.debug("Remote link temporarily redirected in {0}: {1} [HTTP: {2}]".format(filename, target, self.checked_remote_targets[target]))
elif self.checked_remote_targets[target] > 399:
self.logger.error("Broken link in {0}: {1} [Error {2}]".format(filename, target, self.checked_remote_targets[target]))
continue
# Skip whitelisted targets
if any(pattern.search(target) for pattern in self.whitelist):
continue
# Check the remote link works
req_headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:45.0) Gecko/20100101 Firefox/45.0 (Nikola)'} # I’m a real boy!
resp = requests.head(target, headers=req_headers, allow_redirects=False)
# Retry client errors (4xx) as GET requests because many servers are broken
if resp.status_code >= 400 and resp.status_code <= 499:
time.sleep(0.5)
resp = requests.get(target, headers=req_headers, allow_redirects=False)
# Follow redirects and see where they lead, redirects to errors will be reported twice
if resp.status_code in [301, 302, 307, 308]:
redir_status_code = resp.status_code
time.sleep(0.5)
# Known redirects are retested using GET because IIS servers otherwise get HEADaches
resp = requests.get(target, headers=req_headers, allow_redirects=True)
# Permanent redirects should be updated
if redir_status_code in [301, 308]:
self.logger.warning("Remote link moved PERMANENTLY to \"{0}\" and should be updated in {1}: {2} [HTTP: {3}]".format(resp.url, filename, target, redir_status_code))
if redir_status_code in [302, 307]:
self.logger.debug("Remote link temporarily redirected to \"{0}\" in {1}: {2} [HTTP: {3}]".format(resp.url, filename, target, redir_status_code))
self.checked_remote_targets[resp.url] = resp.status_code
self.checked_remote_targets[target] = redir_status_code
else:
self.checked_remote_targets[target] = resp.status_code
if resp.status_code > 399: # Error
self.logger.error("Broken link in {0}: {1} [Error {2}]".format(filename, target, resp.status_code))
continue
elif resp.status_code <= 399: # The address leads *somewhere* that is not an error
self.logger.debug("Successfully checked remote link in {0}: {1} [HTTP: {2}]".format(filename, target, resp.status_code))
continue
self.logger.warning("Could not check remote link in {0}: {1} [Unknown problem]".format(filename, target))
continue
if url_type == 'rel_path':
if target.startswith('/'):
target_filename = os.path.abspath(
os.path.join(self.site.config['OUTPUT_FOLDER'], unquote(target.lstrip('/'))))
else: # Relative path
unquoted_target = unquote(target).encode('utf-8')
target_filename = os.path.abspath(
os.path.join(os.path.dirname(filename).encode('utf-8'), unquoted_target))
else:
relative = False
if url_type == 'absolute':
# convert to 'full_path' case, ie url relative to root
if parsed.path.startswith(url_netloc_to_root):
url_rel_path = parsed.path[len(url_netloc_to_root):]
else:
url_rel_path = parsed.path
if not url_rel_path.startswith('/'):
relative = True
else:
# convert to relative to base path
if target.startswith(url_netloc_to_root):
url_rel_path = target[len(url_netloc_to_root):]
else:
url_rel_path = target
if not url_rel_path.startswith('/'):
relative = True
if url_rel_path == '' or url_rel_path.endswith('/'):
url_rel_path = urljoin(url_rel_path, self.site.config['INDEX_FILE'])
if relative:
unquoted_target = unquote(target).encode('utf-8')
target_filename = os.path.abspath(
os.path.join(os.path.dirname(filename).encode('utf-8'), unquoted_target))
else:
fs_rel_path = fs_relpath_from_url_path(url_rel_path)
target_filename = os.path.join(self.site.config['OUTPUT_FOLDER'], fs_rel_path)
if isinstance(target_filename, str):
target_filename_str = target_filename
else:
target_filename_str = target_filename.decode("utf-8", errors="surrogateescape")
if any(pattern.search(target_filename_str) for pattern in self.whitelist):
continue
elif target_filename not in self.existing_targets:
if os.path.exists(target_filename):
self.logger.info("Good link {0} => {1}".format(target, target_filename))
self.existing_targets.add(target_filename)
else:
rv = True
self.logger.warning("Broken link in {0}: {1}".format(filename, target))
if find_sources:
self.logger.warning("Possible sources:")
self.logger.warning("\n".join(deps[filename]))
self.logger.warning("===============================\n")
except Exception as exc:
self.logger.error(u"Error with: {0} {1}".format(filename, exc))
return rv
def scan_links(self, find_sources=False, check_remote=False):
"""Check links on the site."""
self.logger.debug("Checking Links:")
self.logger.debug("===============\n")
self.logger.debug("{0} mode".format(self.site.config['URL_TYPE']))
failure = False
atom_extension = self.site.config['ATOM_EXTENSION']
# Maybe we should just examine all HTML files
output_folder = self.site.config['OUTPUT_FOLDER']
if urlparse(self.site.config['BASE_URL']).netloc == 'example.com':
self.logger.error("You've not changed the SITE_URL (or BASE_URL) setting from \"example.com\"!")
for fname in _call_nikola_list(self.site, self.cache)[0]:
if fname.startswith(output_folder):
if '.html' == fname[-5:]:
if self.analyze(fname, find_sources, check_remote):
failure = True
if atom_extension == fname[-len(atom_extension):]:
if self.analyze(fname, find_sources, False):
failure = True
if fname.endswith('sitemap.xml') or fname.endswith('sitemapindex.xml'):
if self.analyze(fname, find_sources, False):
failure = True
if not failure:
self.logger.debug("All links checked.")
return failure
def scan_files(self):
"""Check files in the site, find missing and orphaned files."""
failure = False
self.logger.debug("Checking Files:")
self.logger.debug("===============\n")
only_on_output, only_on_input = real_scan_files(self.site, self.cache)
# Ignore folders
only_on_output = [p for p in only_on_output if not os.path.isdir(p)]
only_on_input = [p for p in only_on_input if not os.path.isdir(p)]
if only_on_output:
only_on_output.sort()
self.logger.warning("Files from unknown origins (orphans):")
for f in only_on_output:
self.logger.warning(f)
failure = True
if only_on_input:
only_on_input.sort()
self.logger.warning("Files not generated:")
for f in only_on_input:
self.logger.warning(f)
if not failure:
self.logger.debug("All files checked.")
return failure
def clean_files(self):
"""Remove orphaned files."""
only_on_output, _ = real_scan_files(self.site, self.cache)
for f in only_on_output:
self.logger.debug('removed: {0}'.format(f))
os.unlink(f)
warn_flag = bool(only_on_output)
# Find empty directories and remove them
output_folder = self.site.config['OUTPUT_FOLDER']
all_dirs = []
for root, dirs, files in os.walk(output_folder, followlinks=True):
all_dirs.append(root)
all_dirs.sort(key=len, reverse=True)
for d in all_dirs:
try:
os.rmdir(d)
self.logger.debug('removed: {0}/'.format(d))
warn_flag = True
except OSError:
pass
if warn_flag:
self.logger.warning('Some files or directories have been removed, your site may need rebuilding')
return True
return False
|
from homeassistant.const import TEMP_CELSIUS
from homeassistant.helpers.entity import Entity
from . import DATA_HIVE, DOMAIN, HiveEntity
FRIENDLY_NAMES = {
"Hub_OnlineStatus": "Hive Hub Status",
"Hive_OutsideTemperature": "Outside Temperature",
}
DEVICETYPE_ICONS = {
"Hub_OnlineStatus": "mdi:switch",
"Hive_OutsideTemperature": "mdi:thermometer",
}
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up Hive sensor devices."""
if discovery_info is None:
return
session = hass.data.get(DATA_HIVE)
devs = []
for dev in discovery_info:
if dev["HA_DeviceType"] in FRIENDLY_NAMES:
devs.append(HiveSensorEntity(session, dev))
add_entities(devs)
class HiveSensorEntity(HiveEntity, Entity):
"""Hive Sensor Entity."""
@property
def unique_id(self):
"""Return unique ID of entity."""
return self._unique_id
@property
def device_info(self):
"""Return device information."""
return {"identifiers": {(DOMAIN, self.unique_id)}, "name": self.name}
@property
def name(self):
"""Return the name of the sensor."""
return FRIENDLY_NAMES.get(self.device_type)
@property
def state(self):
"""Return the state of the sensor."""
if self.device_type == "Hub_OnlineStatus":
return self.session.sensor.hub_online_status(self.node_id)
if self.device_type == "Hive_OutsideTemperature":
return self.session.weather.temperature()
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
if self.device_type == "Hive_OutsideTemperature":
return TEMP_CELSIUS
@property
def icon(self):
"""Return the icon to use."""
return DEVICETYPE_ICONS.get(self.device_type)
def update(self):
"""Update all Node data from Hive."""
self.session.core.update_data(self.node_id)
|
import os
import attr
import pytest
import bs4
from PyQt5.QtCore import QUrl
from PyQt5.QtNetwork import QNetworkRequest
from qutebrowser.browser.webkit.network import filescheme
from qutebrowser.utils import urlutils, utils
from helpers import utils as testutils
@pytest.mark.parametrize('create_file, create_dir, filterfunc, expected', [
(True, False, os.path.isfile, True),
(True, False, os.path.isdir, False),
(False, True, os.path.isfile, False),
(False, True, os.path.isdir, True),
(False, False, os.path.isfile, False),
(False, False, os.path.isdir, False),
])
def test_get_file_list(tmpdir, create_file, create_dir, filterfunc, expected):
"""Test get_file_list."""
path = tmpdir / 'foo'
if create_file or create_dir:
path.ensure(dir=create_dir)
all_files = os.listdir(str(tmpdir))
result = filescheme.get_file_list(str(tmpdir), all_files, filterfunc)
item = {'name': 'foo', 'absname': str(path)}
assert (item in result) == expected
class TestIsRoot:
@pytest.mark.windows
@pytest.mark.parametrize('directory, is_root', [
('C:\\foo\\bar', False),
('C:\\foo\\', False),
('C:\\foo', False),
('C:\\', True)
])
def test_windows(self, directory, is_root):
assert filescheme.is_root(directory) == is_root
@pytest.mark.posix
@pytest.mark.parametrize('directory, is_root', [
('/foo/bar', False),
('/foo/', False),
('/foo', False),
('/', True)
])
def test_posix(self, directory, is_root):
assert filescheme.is_root(directory) == is_root
class TestParentDir:
@pytest.mark.windows
@pytest.mark.parametrize('directory, parent', [
('C:\\foo\\bar', 'C:\\foo'),
('C:\\foo', 'C:\\'),
('C:\\foo\\', 'C:\\'),
('C:\\', 'C:\\'),
])
def test_windows(self, directory, parent):
assert filescheme.parent_dir(directory) == parent
@pytest.mark.posix
@pytest.mark.parametrize('directory, parent', [
('/home/foo', '/home'),
('/home', '/'),
('/home/', '/'),
('/', '/'),
])
def test_posix(self, directory, parent):
assert filescheme.parent_dir(directory) == parent
def _file_url(path):
"""Return a file:// url (as string) for the given LocalPath.
Arguments:
path: The filepath as LocalPath (as handled by py.path)
"""
return urlutils.file_url(str(path))
class TestDirbrowserHtml:
@attr.s
class Parsed:
parent = attr.ib()
folders = attr.ib()
files = attr.ib()
@attr.s
class Item:
link = attr.ib()
text = attr.ib()
@pytest.fixture
def parser(self):
"""Provide a function to get a parsed dirbrowser document."""
def parse(path):
html = filescheme.dirbrowser_html(path).decode('utf-8')
soup = bs4.BeautifulSoup(html, 'html.parser')
with testutils.ignore_bs4_warning():
print(soup.prettify())
container = soup('div', id='dirbrowserContainer')[0]
parent_elem = container('ul', class_='parent')
if not parent_elem:
parent = None
else:
parent = parent_elem[0].li.a.string
folders = []
files = []
for li in container('ul', class_='folders')[0]('li'):
item = self.Item(link=li.a['href'], text=str(li.a.string))
folders.append(item)
for li in container('ul', class_='files')[0]('li'):
item = self.Item(link=li.a['href'], text=str(li.a.string))
files.append(item)
return self.Parsed(parent=parent, folders=folders, files=files)
return parse
def test_basic(self):
html = filescheme.dirbrowser_html(os.getcwd()).decode('utf-8')
soup = bs4.BeautifulSoup(html, 'html.parser')
with testutils.ignore_bs4_warning():
print(soup.prettify())
container = soup.div
assert container['id'] == 'dirbrowserContainer'
title_elem = container('div', id='dirbrowserTitle')[0]
title_text = title_elem('p', id='dirbrowserTitleText')[0].text
assert title_text == 'Browse directory: {}'.format(os.getcwd())
def test_icons(self, monkeypatch):
"""Make sure icon paths are correct file:// URLs."""
monkeypatch.setattr(filescheme.jinja.utils, 'resource_filename',
lambda name: '/test path/foo.svg')
html = filescheme.dirbrowser_html(os.getcwd()).decode('utf-8')
soup = bs4.BeautifulSoup(html, 'html.parser')
with testutils.ignore_bs4_warning():
print(soup.prettify())
css = soup.html.head.style.string
assert "background-image: url('file:///test%20path/foo.svg');" in css
def test_empty(self, tmpdir, parser):
parsed = parser(str(tmpdir))
assert parsed.parent
assert not parsed.folders
assert not parsed.files
def test_files(self, tmpdir, parser):
foo_file = tmpdir / 'foo'
bar_file = tmpdir / 'bar'
foo_file.ensure()
bar_file.ensure()
parsed = parser(str(tmpdir))
assert parsed.parent
assert not parsed.folders
foo_item = self.Item(_file_url(foo_file), foo_file.relto(tmpdir))
bar_item = self.Item(_file_url(bar_file), bar_file.relto(tmpdir))
assert parsed.files == [bar_item, foo_item]
def test_html_special_chars(self, tmpdir, parser):
special_file = tmpdir / 'foo&bar'
special_file.ensure()
parsed = parser(str(tmpdir))
item = self.Item(_file_url(special_file), special_file.relto(tmpdir))
assert parsed.files == [item]
def test_dirs(self, tmpdir, parser):
foo_dir = tmpdir / 'foo'
bar_dir = tmpdir / 'bar'
foo_dir.ensure(dir=True)
bar_dir.ensure(dir=True)
parsed = parser(str(tmpdir))
assert parsed.parent
assert not parsed.files
foo_item = self.Item(_file_url(foo_dir), foo_dir.relto(tmpdir))
bar_item = self.Item(_file_url(bar_dir), bar_dir.relto(tmpdir))
assert parsed.folders == [bar_item, foo_item]
def test_mixed(self, tmpdir, parser):
foo_file = tmpdir / 'foo'
bar_dir = tmpdir / 'bar'
foo_file.ensure()
bar_dir.ensure(dir=True)
parsed = parser(str(tmpdir))
foo_item = self.Item(_file_url(foo_file), foo_file.relto(tmpdir))
bar_item = self.Item(_file_url(bar_dir), bar_dir.relto(tmpdir))
assert parsed.parent
assert parsed.files == [foo_item]
assert parsed.folders == [bar_item]
def test_root_dir(self, tmpdir, parser):
root_dir = 'C:\\' if utils.is_windows else '/'
parsed = parser(root_dir)
assert not parsed.parent
def test_oserror(self, mocker):
m = mocker.patch('qutebrowser.browser.webkit.network.filescheme.'
'os.listdir')
m.side_effect = OSError('Error message')
html = filescheme.dirbrowser_html('').decode('utf-8')
soup = bs4.BeautifulSoup(html, 'html.parser')
with testutils.ignore_bs4_warning():
print(soup.prettify())
error_msg = soup('p', id='error-message-text')[0].string
assert error_msg == 'Error message'
class TestFileSchemeHandler:
def test_dir(self, tmpdir):
url = QUrl.fromLocalFile(str(tmpdir))
req = QNetworkRequest(url)
reply = filescheme.handler(req, None, None)
# The URL will always use /, even on Windows - so we force this here
# too.
tmpdir_path = str(tmpdir).replace(os.sep, '/')
assert reply.readAll() == filescheme.dirbrowser_html(tmpdir_path)
def test_file(self, tmpdir):
filename = tmpdir / 'foo'
filename.ensure()
url = QUrl.fromLocalFile(str(filename))
req = QNetworkRequest(url)
reply = filescheme.handler(req, None, None)
assert reply is None
def test_unicode_encode_error(self, mocker):
url = QUrl('file:///tmp/foo')
req = QNetworkRequest(url)
err = UnicodeEncodeError('ascii', '', 0, 2, 'foo')
mocker.patch('os.path.isdir', side_effect=err)
reply = filescheme.handler(req, None, None)
assert reply is None
|
import os
import unittest
from perfkitbenchmarker.linux_packages import dstat
class DstatTestCase(unittest.TestCase):
def testParseDstatFile(self):
path = os.path.join(os.path.dirname(__file__), '..', 'data',
'dstat-result.csv')
with open(path) as f:
labels, out = dstat.ParseCsvFile(iter(f))
self.assertEqual(len(labels), len(out[0]))
self.assertEqual(out.shape, (383, 62))
self.assertEqual([
'epoch__epoch', 'usr__total cpu usage', 'sys__total cpu usage',
'idl__total cpu usage', 'wai__total cpu usage', 'hiq__total cpu usage',
'siq__total cpu usage', '1m__load avg', '5m__load avg', '15m__load avg',
'read__io/total', 'writ__io/total', 'read__io/sda', 'writ__io/sda',
'read__dsk/total', 'writ__dsk/total', 'read__dsk/sda', 'writ__dsk/sda',
'recv__net/total', 'send__net/total', 'in__paging', 'out__paging',
'int__system', 'csw__system', '12__interrupts', '25__interrupts',
'30__interrupts', 'run__procs', 'blk__procs', 'new__procs',
'used__memory usage', 'buff__memory usage', 'cach__memory usage',
'free__memory usage', 'used__swap', 'free__swap', 'files__filesystem',
'inodes__filesystem', 'msg__sysv ipc', 'sem__sysv ipc', 'shm__sysv ipc',
'lis__tcp sockets', 'act__tcp sockets', 'syn__tcp sockets',
'tim__tcp sockets', 'clo__tcp sockets', 'lis__udp', 'act__udp',
'raw__raw', 'tot__sockets', 'tcp__sockets', 'udp__sockets',
'raw__sockets', 'frg__sockets', 'dgm__unix sockets',
'str__unix sockets', 'lis__unix sockets', 'act__unix sockets',
'majpf__virtual memory', 'minpf__virtual memory',
'alloc__virtual memory', 'free__virtual memory'], labels)
if __name__ == '__main__':
unittest.main()
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.