text
stringlengths 213
32.3k
|
---|
from typing import Callable
import pysesame2
import voluptuous as vol
from homeassistant.components.lock import PLATFORM_SCHEMA, LockEntity
from homeassistant.const import (
ATTR_BATTERY_LEVEL,
CONF_API_KEY,
STATE_LOCKED,
STATE_UNLOCKED,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.typing import ConfigType
ATTR_DEVICE_ID = "device_id"
ATTR_SERIAL_NO = "serial"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Required(CONF_API_KEY): cv.string})
def setup_platform(
hass, config: ConfigType, add_entities: Callable[[list], None], discovery_info=None
):
"""Set up the Sesame platform."""
api_key = config.get(CONF_API_KEY)
add_entities(
[SesameDevice(sesame) for sesame in pysesame2.get_sesames(api_key)],
update_before_add=True,
)
class SesameDevice(LockEntity):
"""Representation of a Sesame device."""
def __init__(self, sesame: object) -> None:
"""Initialize the Sesame device."""
self._sesame = sesame
# Cached properties from pysesame object.
self._device_id = None
self._serial = None
self._nickname = None
self._is_locked = False
self._responsive = False
self._battery = -1
@property
def name(self) -> str:
"""Return the name of the device."""
return self._nickname
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self._responsive
@property
def is_locked(self) -> bool:
"""Return True if the device is currently locked, else False."""
return self._is_locked
@property
def state(self) -> str:
"""Get the state of the device."""
return STATE_LOCKED if self._is_locked else STATE_UNLOCKED
def lock(self, **kwargs) -> None:
"""Lock the device."""
self._sesame.lock()
def unlock(self, **kwargs) -> None:
"""Unlock the device."""
self._sesame.unlock()
def update(self) -> None:
"""Update the internal state of the device."""
status = self._sesame.get_status()
self._nickname = self._sesame.nickname
self._device_id = str(self._sesame.id)
self._serial = self._sesame.serial
self._battery = status["battery"]
self._is_locked = status["locked"]
self._responsive = status["responsive"]
@property
def device_state_attributes(self) -> dict:
"""Return the state attributes."""
return {
ATTR_DEVICE_ID: self._device_id,
ATTR_SERIAL_NO: self._serial,
ATTR_BATTERY_LEVEL: self._battery,
}
|
import asyncio
from datetime import timedelta
import threading
import pytest
from homeassistant.const import ATTR_DEVICE_CLASS, STATE_UNAVAILABLE
from homeassistant.core import Context
from homeassistant.helpers import entity, entity_registry
from tests.async_mock import MagicMock, PropertyMock, patch
from tests.common import (
MockConfigEntry,
MockEntity,
MockEntityPlatform,
get_test_home_assistant,
mock_registry,
)
def test_generate_entity_id_requires_hass_or_ids():
"""Ensure we require at least hass or current ids."""
with pytest.raises(ValueError):
entity.generate_entity_id("test.{}", "hello world")
def test_generate_entity_id_given_keys():
"""Test generating an entity id given current ids."""
assert (
entity.generate_entity_id(
"test.{}",
"overwrite hidden true",
current_ids=["test.overwrite_hidden_true"],
)
== "test.overwrite_hidden_true_2"
)
assert (
entity.generate_entity_id(
"test.{}", "overwrite hidden true", current_ids=["test.another_entity"]
)
== "test.overwrite_hidden_true"
)
async def test_async_update_support(hass):
"""Test async update getting called."""
sync_update = []
async_update = []
class AsyncEntity(entity.Entity):
"""A test entity."""
entity_id = "sensor.test"
def update(self):
"""Update entity."""
sync_update.append([1])
ent = AsyncEntity()
ent.hass = hass
await ent.async_update_ha_state(True)
assert len(sync_update) == 1
assert len(async_update) == 0
async def async_update_func():
"""Async update."""
async_update.append(1)
ent.async_update = async_update_func
await ent.async_update_ha_state(True)
assert len(sync_update) == 1
assert len(async_update) == 1
class TestHelpersEntity:
"""Test homeassistant.helpers.entity module."""
def setup_method(self, method):
"""Set up things to be run when tests are started."""
self.entity = entity.Entity()
self.entity.entity_id = "test.overwrite_hidden_true"
self.hass = self.entity.hass = get_test_home_assistant()
self.entity.schedule_update_ha_state()
self.hass.block_till_done()
def teardown_method(self, method):
"""Stop everything that was started."""
self.hass.stop()
def test_generate_entity_id_given_hass(self):
"""Test generating an entity id given hass object."""
fmt = "test.{}"
assert (
entity.generate_entity_id(fmt, "overwrite hidden true", hass=self.hass)
== "test.overwrite_hidden_true_2"
)
def test_device_class(self):
"""Test device class attribute."""
state = self.hass.states.get(self.entity.entity_id)
assert state.attributes.get(ATTR_DEVICE_CLASS) is None
with patch(
"homeassistant.helpers.entity.Entity.device_class", new="test_class"
):
self.entity.schedule_update_ha_state()
self.hass.block_till_done()
state = self.hass.states.get(self.entity.entity_id)
assert state.attributes.get(ATTR_DEVICE_CLASS) == "test_class"
async def test_warn_slow_update(hass, caplog):
"""Warn we log when entity update takes a long time."""
update_call = False
async def async_update():
"""Mock async update."""
nonlocal update_call
await asyncio.sleep(0.00001)
update_call = True
mock_entity = entity.Entity()
mock_entity.hass = hass
mock_entity.entity_id = "comp_test.test_entity"
mock_entity.async_update = async_update
fast_update_time = 0.0000001
with patch.object(entity, "SLOW_UPDATE_WARNING", fast_update_time):
await mock_entity.async_update_ha_state(True)
assert str(fast_update_time) in caplog.text
assert mock_entity.entity_id in caplog.text
assert update_call
async def test_warn_slow_update_with_exception(hass, caplog):
"""Warn we log when entity update takes a long time and trow exception."""
update_call = False
async def async_update():
"""Mock async update."""
nonlocal update_call
update_call = True
await asyncio.sleep(0.00001)
raise AssertionError("Fake update error")
mock_entity = entity.Entity()
mock_entity.hass = hass
mock_entity.entity_id = "comp_test.test_entity"
mock_entity.async_update = async_update
fast_update_time = 0.0000001
with patch.object(entity, "SLOW_UPDATE_WARNING", fast_update_time):
await mock_entity.async_update_ha_state(True)
assert str(fast_update_time) in caplog.text
assert mock_entity.entity_id in caplog.text
assert update_call
async def test_warn_slow_device_update_disabled(hass, caplog):
"""Disable slow update warning with async_device_update."""
update_call = False
async def async_update():
"""Mock async update."""
nonlocal update_call
await asyncio.sleep(0.00001)
update_call = True
mock_entity = entity.Entity()
mock_entity.hass = hass
mock_entity.entity_id = "comp_test.test_entity"
mock_entity.async_update = async_update
fast_update_time = 0.0000001
with patch.object(entity, "SLOW_UPDATE_WARNING", fast_update_time):
await mock_entity.async_device_update(warning=False)
assert str(fast_update_time) not in caplog.text
assert mock_entity.entity_id not in caplog.text
assert update_call
async def test_async_schedule_update_ha_state(hass):
"""Warn we log when entity update takes a long time and trow exception."""
update_call = False
async def async_update():
"""Mock async update."""
nonlocal update_call
update_call = True
mock_entity = entity.Entity()
mock_entity.hass = hass
mock_entity.entity_id = "comp_test.test_entity"
mock_entity.async_update = async_update
mock_entity.async_schedule_update_ha_state(True)
await hass.async_block_till_done()
assert update_call is True
async def test_async_async_request_call_without_lock(hass):
"""Test for async_requests_call works without a lock."""
updates = []
class AsyncEntity(entity.Entity):
"""Test entity."""
def __init__(self, entity_id):
"""Initialize Async test entity."""
self.entity_id = entity_id
self.hass = hass
async def testhelper(self, count):
"""Helper function."""
updates.append(count)
ent_1 = AsyncEntity("light.test_1")
ent_2 = AsyncEntity("light.test_2")
try:
job1 = ent_1.async_request_call(ent_1.testhelper(1))
job2 = ent_2.async_request_call(ent_2.testhelper(2))
await asyncio.wait([job1, job2])
while True:
if len(updates) >= 2:
break
await asyncio.sleep(0)
finally:
pass
assert len(updates) == 2
updates.sort()
assert updates == [1, 2]
async def test_async_async_request_call_with_lock(hass):
"""Test for async_requests_call works with a semaphore."""
updates = []
test_semaphore = asyncio.Semaphore(1)
class AsyncEntity(entity.Entity):
"""Test entity."""
def __init__(self, entity_id, lock):
"""Initialize Async test entity."""
self.entity_id = entity_id
self.hass = hass
self.parallel_updates = lock
async def testhelper(self, count):
"""Helper function."""
updates.append(count)
ent_1 = AsyncEntity("light.test_1", test_semaphore)
ent_2 = AsyncEntity("light.test_2", test_semaphore)
try:
assert test_semaphore.locked() is False
await test_semaphore.acquire()
assert test_semaphore.locked()
job1 = ent_1.async_request_call(ent_1.testhelper(1))
job2 = ent_2.async_request_call(ent_2.testhelper(2))
hass.async_create_task(job1)
hass.async_create_task(job2)
assert len(updates) == 0
assert updates == []
assert test_semaphore._value == 0
test_semaphore.release()
while True:
if len(updates) >= 2:
break
await asyncio.sleep(0)
finally:
test_semaphore.release()
assert len(updates) == 2
updates.sort()
assert updates == [1, 2]
async def test_async_parallel_updates_with_zero(hass):
"""Test parallel updates with 0 (disabled)."""
updates = []
test_lock = asyncio.Event()
class AsyncEntity(entity.Entity):
"""Test entity."""
def __init__(self, entity_id, count):
"""Initialize Async test entity."""
self.entity_id = entity_id
self.hass = hass
self._count = count
async def async_update(self):
"""Test update."""
updates.append(self._count)
await test_lock.wait()
ent_1 = AsyncEntity("sensor.test_1", 1)
ent_2 = AsyncEntity("sensor.test_2", 2)
try:
ent_1.async_schedule_update_ha_state(True)
ent_2.async_schedule_update_ha_state(True)
while True:
if len(updates) >= 2:
break
await asyncio.sleep(0)
assert len(updates) == 2
assert updates == [1, 2]
finally:
test_lock.set()
async def test_async_parallel_updates_with_zero_on_sync_update(hass):
"""Test parallel updates with 0 (disabled)."""
updates = []
test_lock = threading.Event()
class AsyncEntity(entity.Entity):
"""Test entity."""
def __init__(self, entity_id, count):
"""Initialize Async test entity."""
self.entity_id = entity_id
self.hass = hass
self._count = count
def update(self):
"""Test update."""
updates.append(self._count)
if not test_lock.wait(timeout=1):
# if timeout populate more data to fail the test
updates.append(self._count)
ent_1 = AsyncEntity("sensor.test_1", 1)
ent_2 = AsyncEntity("sensor.test_2", 2)
try:
ent_1.async_schedule_update_ha_state(True)
ent_2.async_schedule_update_ha_state(True)
while True:
if len(updates) >= 2:
break
await asyncio.sleep(0)
assert len(updates) == 2
assert updates == [1, 2]
finally:
test_lock.set()
await asyncio.sleep(0)
async def test_async_parallel_updates_with_one(hass):
"""Test parallel updates with 1 (sequential)."""
updates = []
test_lock = asyncio.Lock()
test_semaphore = asyncio.Semaphore(1)
class AsyncEntity(entity.Entity):
"""Test entity."""
def __init__(self, entity_id, count):
"""Initialize Async test entity."""
self.entity_id = entity_id
self.hass = hass
self._count = count
self.parallel_updates = test_semaphore
async def async_update(self):
"""Test update."""
updates.append(self._count)
await test_lock.acquire()
ent_1 = AsyncEntity("sensor.test_1", 1)
ent_2 = AsyncEntity("sensor.test_2", 2)
ent_3 = AsyncEntity("sensor.test_3", 3)
await test_lock.acquire()
try:
ent_1.async_schedule_update_ha_state(True)
ent_2.async_schedule_update_ha_state(True)
ent_3.async_schedule_update_ha_state(True)
while True:
if len(updates) >= 1:
break
await asyncio.sleep(0)
assert len(updates) == 1
assert updates == [1]
updates.clear()
test_lock.release()
await asyncio.sleep(0)
while True:
if len(updates) >= 1:
break
await asyncio.sleep(0)
assert len(updates) == 1
assert updates == [2]
updates.clear()
test_lock.release()
await asyncio.sleep(0)
while True:
if len(updates) >= 1:
break
await asyncio.sleep(0)
assert len(updates) == 1
assert updates == [3]
updates.clear()
test_lock.release()
await asyncio.sleep(0)
finally:
# we may have more than one lock need to release in case test failed
for _ in updates:
test_lock.release()
await asyncio.sleep(0)
test_lock.release()
async def test_async_parallel_updates_with_two(hass):
"""Test parallel updates with 2 (parallel)."""
updates = []
test_lock = asyncio.Lock()
test_semaphore = asyncio.Semaphore(2)
class AsyncEntity(entity.Entity):
"""Test entity."""
def __init__(self, entity_id, count):
"""Initialize Async test entity."""
self.entity_id = entity_id
self.hass = hass
self._count = count
self.parallel_updates = test_semaphore
async def async_update(self):
"""Test update."""
updates.append(self._count)
await test_lock.acquire()
ent_1 = AsyncEntity("sensor.test_1", 1)
ent_2 = AsyncEntity("sensor.test_2", 2)
ent_3 = AsyncEntity("sensor.test_3", 3)
ent_4 = AsyncEntity("sensor.test_4", 4)
await test_lock.acquire()
try:
ent_1.async_schedule_update_ha_state(True)
ent_2.async_schedule_update_ha_state(True)
ent_3.async_schedule_update_ha_state(True)
ent_4.async_schedule_update_ha_state(True)
while True:
if len(updates) >= 2:
break
await asyncio.sleep(0)
assert len(updates) == 2
assert updates == [1, 2]
updates.clear()
test_lock.release()
await asyncio.sleep(0)
test_lock.release()
await asyncio.sleep(0)
while True:
if len(updates) >= 2:
break
await asyncio.sleep(0)
assert len(updates) == 2
assert updates == [3, 4]
updates.clear()
test_lock.release()
await asyncio.sleep(0)
test_lock.release()
await asyncio.sleep(0)
finally:
# we may have more than one lock need to release in case test failed
for _ in updates:
test_lock.release()
await asyncio.sleep(0)
test_lock.release()
async def test_async_remove_no_platform(hass):
"""Test async_remove method when no platform set."""
ent = entity.Entity()
ent.hass = hass
ent.entity_id = "test.test"
await ent.async_update_ha_state()
assert len(hass.states.async_entity_ids()) == 1
await ent.async_remove()
assert len(hass.states.async_entity_ids()) == 0
async def test_async_remove_runs_callbacks(hass):
"""Test async_remove method when no platform set."""
result = []
ent = entity.Entity()
ent.hass = hass
ent.entity_id = "test.test"
ent.async_on_remove(lambda: result.append(1))
await ent.async_remove()
assert len(result) == 1
async def test_set_context(hass):
"""Test setting context."""
context = Context()
ent = entity.Entity()
ent.hass = hass
ent.entity_id = "hello.world"
ent.async_set_context(context)
await ent.async_update_ha_state()
assert hass.states.get("hello.world").context == context
async def test_set_context_expired(hass):
"""Test setting context."""
context = Context()
with patch.object(
entity.Entity, "context_recent_time", new_callable=PropertyMock
) as recent:
recent.return_value = timedelta(seconds=-5)
ent = entity.Entity()
ent.hass = hass
ent.entity_id = "hello.world"
ent.async_set_context(context)
await ent.async_update_ha_state()
assert hass.states.get("hello.world").context != context
assert ent._context is None
assert ent._context_set is None
async def test_warn_disabled(hass, caplog):
"""Test we warn once if we write to a disabled entity."""
entry = entity_registry.RegistryEntry(
entity_id="hello.world",
unique_id="test-unique-id",
platform="test-platform",
disabled_by="user",
)
mock_registry(hass, {"hello.world": entry})
ent = entity.Entity()
ent.hass = hass
ent.entity_id = "hello.world"
ent.registry_entry = entry
ent.platform = MagicMock(platform_name="test-platform")
caplog.clear()
ent.async_write_ha_state()
assert hass.states.get("hello.world") is None
assert "Entity hello.world is incorrectly being triggered" in caplog.text
caplog.clear()
ent.async_write_ha_state()
assert hass.states.get("hello.world") is None
assert caplog.text == ""
async def test_disabled_in_entity_registry(hass):
"""Test entity is removed if we disable entity registry entry."""
entry = entity_registry.RegistryEntry(
entity_id="hello.world",
unique_id="test-unique-id",
platform="test-platform",
disabled_by=None,
)
registry = mock_registry(hass, {"hello.world": entry})
ent = entity.Entity()
ent.hass = hass
ent.entity_id = "hello.world"
ent.registry_entry = entry
assert ent.enabled is True
ent.add_to_platform_start(hass, MagicMock(platform_name="test-platform"), None)
await ent.add_to_platform_finish()
assert hass.states.get("hello.world") is not None
entry2 = registry.async_update_entity("hello.world", disabled_by="user")
await hass.async_block_till_done()
assert entry2 != entry
assert ent.registry_entry == entry2
assert ent.enabled is False
assert hass.states.get("hello.world") is None
entry3 = registry.async_update_entity("hello.world", disabled_by=None)
await hass.async_block_till_done()
assert entry3 != entry2
# Entry is no longer updated, entity is no longer tracking changes
assert ent.registry_entry == entry2
async def test_capability_attrs(hass):
"""Test we still include capabilities even when unavailable."""
with patch.object(
entity.Entity, "available", PropertyMock(return_value=False)
), patch.object(
entity.Entity,
"capability_attributes",
PropertyMock(return_value={"always": "there"}),
):
ent = entity.Entity()
ent.hass = hass
ent.entity_id = "hello.world"
ent.async_write_ha_state()
state = hass.states.get("hello.world")
assert state is not None
assert state.state == STATE_UNAVAILABLE
assert state.attributes["always"] == "there"
async def test_warn_slow_write_state(hass, caplog):
"""Check that we log a warning if reading properties takes too long."""
mock_entity = entity.Entity()
mock_entity.hass = hass
mock_entity.entity_id = "comp_test.test_entity"
mock_entity.platform = MagicMock(platform_name="hue")
with patch("homeassistant.helpers.entity.timer", side_effect=[0, 10]):
mock_entity.async_write_ha_state()
assert (
"Updating state for comp_test.test_entity "
"(<class 'homeassistant.helpers.entity.Entity'>) "
"took 10.000 seconds. Please create a bug report at "
"https://github.com/home-assistant/core/issues?"
"q=is%3Aopen+is%3Aissue+label%3A%22integration%3A+hue%22"
) in caplog.text
async def test_warn_slow_write_state_custom_component(hass, caplog):
"""Check that we log a warning if reading properties takes too long."""
class CustomComponentEntity(entity.Entity):
"""Custom component entity."""
__module__ = "custom_components.bla.sensor"
mock_entity = CustomComponentEntity()
mock_entity.hass = hass
mock_entity.entity_id = "comp_test.test_entity"
mock_entity.platform = MagicMock(platform_name="hue")
with patch("homeassistant.helpers.entity.timer", side_effect=[0, 10]):
mock_entity.async_write_ha_state()
assert (
"Updating state for comp_test.test_entity "
"(<class 'custom_components.bla.sensor.test_warn_slow_write_state_custom_component.<locals>.CustomComponentEntity'>) "
"took 10.000 seconds. Please report it to the custom component author."
) in caplog.text
async def test_setup_source(hass):
"""Check that we register sources correctly."""
platform = MockEntityPlatform(hass)
entity_platform = MockEntity(name="Platform Config Source")
await platform.async_add_entities([entity_platform])
platform.config_entry = MockConfigEntry()
entity_entry = MockEntity(name="Config Entry Source")
await platform.async_add_entities([entity_entry])
assert entity.entity_sources(hass) == {
"test_domain.platform_config_source": {
"source": entity.SOURCE_PLATFORM_CONFIG,
"domain": "test_platform",
},
"test_domain.config_entry_source": {
"source": entity.SOURCE_CONFIG_ENTRY,
"config_entry": platform.config_entry.entry_id,
"domain": "test_platform",
},
}
await platform.async_reset()
assert entity.entity_sources(hass) == {}
|
from datetime import timedelta
from functools import partial
import logging
import voluptuous as vol
from homeassistant.const import ( # noqa: F401 # STATE_PAUSED/IDLE are API
ATTR_BATTERY_LEVEL,
ATTR_COMMAND,
SERVICE_TOGGLE,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_IDLE,
STATE_ON,
STATE_PAUSED,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.config_validation import ( # noqa: F401
PLATFORM_SCHEMA,
PLATFORM_SCHEMA_BASE,
make_entity_service_schema,
)
from homeassistant.helpers.entity import Entity, ToggleEntity
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.icon import icon_for_battery_level
from homeassistant.loader import bind_hass
# mypy: allow-untyped-defs, no-check-untyped-defs
_LOGGER = logging.getLogger(__name__)
DOMAIN = "vacuum"
SCAN_INTERVAL = timedelta(seconds=20)
ATTR_BATTERY_ICON = "battery_icon"
ATTR_CLEANED_AREA = "cleaned_area"
ATTR_FAN_SPEED = "fan_speed"
ATTR_FAN_SPEED_LIST = "fan_speed_list"
ATTR_PARAMS = "params"
ATTR_STATUS = "status"
SERVICE_CLEAN_SPOT = "clean_spot"
SERVICE_LOCATE = "locate"
SERVICE_RETURN_TO_BASE = "return_to_base"
SERVICE_SEND_COMMAND = "send_command"
SERVICE_SET_FAN_SPEED = "set_fan_speed"
SERVICE_START_PAUSE = "start_pause"
SERVICE_START = "start"
SERVICE_PAUSE = "pause"
SERVICE_STOP = "stop"
STATE_CLEANING = "cleaning"
STATE_DOCKED = "docked"
STATE_RETURNING = "returning"
STATE_ERROR = "error"
STATES = [STATE_CLEANING, STATE_DOCKED, STATE_RETURNING, STATE_ERROR]
DEFAULT_NAME = "Vacuum cleaner robot"
SUPPORT_TURN_ON = 1
SUPPORT_TURN_OFF = 2
SUPPORT_PAUSE = 4
SUPPORT_STOP = 8
SUPPORT_RETURN_HOME = 16
SUPPORT_FAN_SPEED = 32
SUPPORT_BATTERY = 64
SUPPORT_STATUS = 128
SUPPORT_SEND_COMMAND = 256
SUPPORT_LOCATE = 512
SUPPORT_CLEAN_SPOT = 1024
SUPPORT_MAP = 2048
SUPPORT_STATE = 4096
SUPPORT_START = 8192
@bind_hass
def is_on(hass, entity_id):
"""Return if the vacuum is on based on the statemachine."""
return hass.states.is_state(entity_id, STATE_ON)
async def async_setup(hass, config):
"""Set up the vacuum component."""
component = hass.data[DOMAIN] = EntityComponent(
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
)
await component.async_setup(config)
component.async_register_entity_service(SERVICE_TURN_ON, {}, "async_turn_on")
component.async_register_entity_service(SERVICE_TURN_OFF, {}, "async_turn_off")
component.async_register_entity_service(SERVICE_TOGGLE, {}, "async_toggle")
component.async_register_entity_service(
SERVICE_START_PAUSE, {}, "async_start_pause"
)
component.async_register_entity_service(SERVICE_START, {}, "async_start")
component.async_register_entity_service(SERVICE_PAUSE, {}, "async_pause")
component.async_register_entity_service(
SERVICE_RETURN_TO_BASE, {}, "async_return_to_base"
)
component.async_register_entity_service(SERVICE_CLEAN_SPOT, {}, "async_clean_spot")
component.async_register_entity_service(SERVICE_LOCATE, {}, "async_locate")
component.async_register_entity_service(SERVICE_STOP, {}, "async_stop")
component.async_register_entity_service(
SERVICE_SET_FAN_SPEED,
{vol.Required(ATTR_FAN_SPEED): cv.string},
"async_set_fan_speed",
)
component.async_register_entity_service(
SERVICE_SEND_COMMAND,
{
vol.Required(ATTR_COMMAND): cv.string,
vol.Optional(ATTR_PARAMS): vol.Any(dict, cv.ensure_list),
},
"async_send_command",
)
return True
async def async_setup_entry(hass, entry):
"""Set up a config entry."""
return await hass.data[DOMAIN].async_setup_entry(entry)
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
return await hass.data[DOMAIN].async_unload_entry(entry)
class _BaseVacuum(Entity):
"""Representation of a base vacuum.
Contains common properties and functions for all vacuum devices.
"""
@property
def supported_features(self):
"""Flag vacuum cleaner features that are supported."""
raise NotImplementedError()
@property
def battery_level(self):
"""Return the battery level of the vacuum cleaner."""
return None
@property
def battery_icon(self):
"""Return the battery icon for the vacuum cleaner."""
raise NotImplementedError()
@property
def fan_speed(self):
"""Return the fan speed of the vacuum cleaner."""
return None
@property
def fan_speed_list(self):
"""Get the list of available fan speed steps of the vacuum cleaner."""
raise NotImplementedError()
@property
def capability_attributes(self):
"""Return capability attributes."""
if self.supported_features & SUPPORT_FAN_SPEED:
return {ATTR_FAN_SPEED_LIST: self.fan_speed_list}
@property
def state_attributes(self):
"""Return the state attributes of the vacuum cleaner."""
data = {}
if self.supported_features & SUPPORT_BATTERY:
data[ATTR_BATTERY_LEVEL] = self.battery_level
data[ATTR_BATTERY_ICON] = self.battery_icon
if self.supported_features & SUPPORT_FAN_SPEED:
data[ATTR_FAN_SPEED] = self.fan_speed
return data
def stop(self, **kwargs):
"""Stop the vacuum cleaner."""
raise NotImplementedError()
async def async_stop(self, **kwargs):
"""Stop the vacuum cleaner.
This method must be run in the event loop.
"""
await self.hass.async_add_executor_job(partial(self.stop, **kwargs))
def return_to_base(self, **kwargs):
"""Set the vacuum cleaner to return to the dock."""
raise NotImplementedError()
async def async_return_to_base(self, **kwargs):
"""Set the vacuum cleaner to return to the dock.
This method must be run in the event loop.
"""
await self.hass.async_add_executor_job(partial(self.return_to_base, **kwargs))
def clean_spot(self, **kwargs):
"""Perform a spot clean-up."""
raise NotImplementedError()
async def async_clean_spot(self, **kwargs):
"""Perform a spot clean-up.
This method must be run in the event loop.
"""
await self.hass.async_add_executor_job(partial(self.clean_spot, **kwargs))
def locate(self, **kwargs):
"""Locate the vacuum cleaner."""
raise NotImplementedError()
async def async_locate(self, **kwargs):
"""Locate the vacuum cleaner.
This method must be run in the event loop.
"""
await self.hass.async_add_executor_job(partial(self.locate, **kwargs))
def set_fan_speed(self, fan_speed, **kwargs):
"""Set fan speed."""
raise NotImplementedError()
async def async_set_fan_speed(self, fan_speed, **kwargs):
"""Set fan speed.
This method must be run in the event loop.
"""
await self.hass.async_add_executor_job(
partial(self.set_fan_speed, fan_speed, **kwargs)
)
def send_command(self, command, params=None, **kwargs):
"""Send a command to a vacuum cleaner."""
raise NotImplementedError()
async def async_send_command(self, command, params=None, **kwargs):
"""Send a command to a vacuum cleaner.
This method must be run in the event loop.
"""
await self.hass.async_add_executor_job(
partial(self.send_command, command, params=params, **kwargs)
)
class VacuumEntity(_BaseVacuum, ToggleEntity):
"""Representation of a vacuum cleaner robot."""
@property
def status(self):
"""Return the status of the vacuum cleaner."""
return None
@property
def battery_icon(self):
"""Return the battery icon for the vacuum cleaner."""
charging = False
if self.status is not None:
charging = "charg" in self.status.lower()
return icon_for_battery_level(
battery_level=self.battery_level, charging=charging
)
@property
def state_attributes(self):
"""Return the state attributes of the vacuum cleaner."""
data = super().state_attributes
if self.supported_features & SUPPORT_STATUS:
data[ATTR_STATUS] = self.status
return data
def turn_on(self, **kwargs):
"""Turn the vacuum on and start cleaning."""
raise NotImplementedError()
async def async_turn_on(self, **kwargs):
"""Turn the vacuum on and start cleaning.
This method must be run in the event loop.
"""
await self.hass.async_add_executor_job(partial(self.turn_on, **kwargs))
def turn_off(self, **kwargs):
"""Turn the vacuum off stopping the cleaning and returning home."""
raise NotImplementedError()
async def async_turn_off(self, **kwargs):
"""Turn the vacuum off stopping the cleaning and returning home.
This method must be run in the event loop.
"""
await self.hass.async_add_executor_job(partial(self.turn_off, **kwargs))
def start_pause(self, **kwargs):
"""Start, pause or resume the cleaning task."""
raise NotImplementedError()
async def async_start_pause(self, **kwargs):
"""Start, pause or resume the cleaning task.
This method must be run in the event loop.
"""
await self.hass.async_add_executor_job(partial(self.start_pause, **kwargs))
async def async_pause(self):
"""Not supported."""
async def async_start(self):
"""Not supported."""
class VacuumDevice(VacuumEntity):
"""Representation of a vacuum (for backwards compatibility)."""
def __init_subclass__(cls, **kwargs):
"""Print deprecation warning."""
super().__init_subclass__(**kwargs)
_LOGGER.warning(
"VacuumDevice is deprecated, modify %s to extend VacuumEntity", cls.__name__
)
class StateVacuumEntity(_BaseVacuum):
"""Representation of a vacuum cleaner robot that supports states."""
@property
def state(self):
"""Return the state of the vacuum cleaner."""
return None
@property
def battery_icon(self):
"""Return the battery icon for the vacuum cleaner."""
charging = bool(self.state == STATE_DOCKED)
return icon_for_battery_level(
battery_level=self.battery_level, charging=charging
)
def start(self):
"""Start or resume the cleaning task."""
raise NotImplementedError()
async def async_start(self):
"""Start or resume the cleaning task.
This method must be run in the event loop.
"""
await self.hass.async_add_executor_job(self.start)
def pause(self):
"""Pause the cleaning task."""
raise NotImplementedError()
async def async_pause(self):
"""Pause the cleaning task.
This method must be run in the event loop.
"""
await self.hass.async_add_executor_job(self.pause)
async def async_turn_on(self, **kwargs):
"""Not supported."""
async def async_turn_off(self, **kwargs):
"""Not supported."""
async def async_toggle(self, **kwargs):
"""Not supported."""
class StateVacuumDevice(StateVacuumEntity):
"""Representation of a vacuum (for backwards compatibility)."""
def __init_subclass__(cls, **kwargs):
"""Print deprecation warning."""
super().__init_subclass__(**kwargs)
_LOGGER.warning(
"StateVacuumDevice is deprecated, modify %s to extend StateVacuumEntity",
cls.__name__,
)
|
import pytest
from homeassistant import config_entries
from homeassistant.components import dynalite
from tests.async_mock import AsyncMock, patch
from tests.common import MockConfigEntry
@pytest.mark.parametrize(
"first_con, second_con,exp_type, exp_result, exp_reason",
[
(True, True, "create_entry", "loaded", ""),
(False, False, "abort", "", "no_connection"),
(True, False, "create_entry", "setup_retry", ""),
],
)
async def test_flow(hass, first_con, second_con, exp_type, exp_result, exp_reason):
"""Run a flow with or without errors and return result."""
host = "1.2.3.4"
with patch(
"homeassistant.components.dynalite.bridge.DynaliteDevices.async_setup",
side_effect=[first_con, second_con],
):
result = await hass.config_entries.flow.async_init(
dynalite.DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={dynalite.CONF_HOST: host},
)
await hass.async_block_till_done()
assert result["type"] == exp_type
if exp_result:
assert result["result"].state == exp_result
if exp_reason:
assert result["reason"] == exp_reason
async def test_existing(hass):
"""Test when the entry exists with the same config."""
host = "1.2.3.4"
MockConfigEntry(
domain=dynalite.DOMAIN, data={dynalite.CONF_HOST: host}
).add_to_hass(hass)
with patch(
"homeassistant.components.dynalite.bridge.DynaliteDevices.async_setup",
return_value=True,
):
result = await hass.config_entries.flow.async_init(
dynalite.DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={dynalite.CONF_HOST: host},
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
async def test_existing_update(hass):
"""Test when the entry exists with a different config."""
host = "1.2.3.4"
port1 = 7777
port2 = 8888
entry = MockConfigEntry(
domain=dynalite.DOMAIN,
data={dynalite.CONF_HOST: host, dynalite.CONF_PORT: port1},
)
entry.add_to_hass(hass)
with patch(
"homeassistant.components.dynalite.bridge.DynaliteDevices"
) as mock_dyn_dev:
mock_dyn_dev().async_setup = AsyncMock(return_value=True)
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
mock_dyn_dev().configure.assert_called_once()
assert mock_dyn_dev().configure.mock_calls[0][1][0]["port"] == port1
result = await hass.config_entries.flow.async_init(
dynalite.DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={dynalite.CONF_HOST: host, dynalite.CONF_PORT: port2},
)
await hass.async_block_till_done()
assert mock_dyn_dev().configure.call_count == 2
assert mock_dyn_dev().configure.mock_calls[1][1][0]["port"] == port2
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
async def test_two_entries(hass):
"""Test when two different entries exist with different hosts."""
host1 = "1.2.3.4"
host2 = "5.6.7.8"
MockConfigEntry(
domain=dynalite.DOMAIN, data={dynalite.CONF_HOST: host1}
).add_to_hass(hass)
with patch(
"homeassistant.components.dynalite.bridge.DynaliteDevices.async_setup",
return_value=True,
):
result = await hass.config_entries.flow.async_init(
dynalite.DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={dynalite.CONF_HOST: host2},
)
assert result["type"] == "create_entry"
assert result["result"].state == "loaded"
|
import webbrowser
import voluptuous as vol
ATTR_URL = "url"
ATTR_URL_DEFAULT = "https://www.google.com"
DOMAIN = "browser"
SERVICE_BROWSE_URL = "browse_url"
SERVICE_BROWSE_URL_SCHEMA = vol.Schema(
{
# pylint: disable=no-value-for-parameter
vol.Required(ATTR_URL, default=ATTR_URL_DEFAULT): vol.Url()
}
)
def setup(hass, config):
"""Listen for browse_url events."""
hass.services.register(
DOMAIN,
SERVICE_BROWSE_URL,
lambda service: webbrowser.open(service.data[ATTR_URL]),
schema=SERVICE_BROWSE_URL_SCHEMA,
)
return True
|
import re
import sys
import json
import time
import types
import logging
from pscript import (py2js, JSString, RawJS, JSConstant, create_js_module,
get_all_std_names)
from pscript.stdlib import FUNCTION_PREFIX, METHOD_PREFIX
from .. import event
from ..event import Component, Property, loop
from ..event._js import create_js_component_class
from ._clientcore import bsdf
from ._component2 import BaseAppComponent, PyComponent, JsComponent, StubComponent
from ._asset import Asset, get_mod_name, module_is_package
from . import logger
pscript_types = type, types.FunctionType # class or function
if sys.version_info > (3, ):
json_types = None.__class__, bool, int, float, str, tuple, list, dict
else: # pragma: no cover
json_types = None.__class__, bool, int, float, basestring, tuple, list, dict # noqa, bah
# In essense, the idea of modules is all about propagating dependencies:
#
# * In PScript we detect unresolved dependencies in JS code, and move these up
# the namespace stack.
# * The create_js_component_class() function and AppComponentMeta class collect the
# dependencies from the different code pieces.
# * In JSModule we resolve some dependencies and let other propagate into
# module dependencies.
# * In the Bundle class, again some dependencies are resolved due to bundling,
# and others propagate to dependencies between bundles.
def mangle_dotted_vars(jscode, names_to_mangle):
""" Mangle the names of unknown variables that have dots in them, so that
they become simple identifiers. We use $ because thats not valid in Python
(i.e. no name clashes).
"""
for name in list(names_to_mangle):
if '.' in name:
# Replace dots with $
name1 = name.replace('.', r'\.')
name2 = name.replace('.', '$')
jscode = re.sub(r"\b(" + name1 + r")\b", name2, jscode,
flags=re.UNICODE | re.MULTILINE)
# Fix calls with *args to funcs that have dots in name
jscode = jscode.replace(
name2 + '.apply(' + name2.rsplit('$', 1)[0] + ', [].concat',
name2 + '.apply(null, [].concat')
return jscode
def is_pscript_module(m):
return (getattr(m, '__pscript__', False) or
getattr(m, '__pyscript__', False))
class JSModule:
"""
A JSModule object represents the JavaScript (and CSS) corresponding
to a Python module, which either defines one or more
PyComponent/JsCompontent classes, or PScript transpilable functions or
classes. Intended for internal use only.
Modules are collected in a "store" which is simply a dictionary. The
flexx asset system has this dict in ``app.assets.modules``.
The module contains the JS corresponding to the variables that are
marked as used (by calling the ``add_variable()`` method), and the
variables that are used by the included JavaScript.
The JS code includes:
* The JS code corresponding to all used Component classes defined in the module.
* The transpiled JS from (PySript compatible) functions and classes that
are defined in this module and marked as used.
* Variables with json-compatible values that are used by JS in this module.
* Imports of names from other modules.
* ... unless this module defines ``__pscript__ = True``, in which case
the module is transpiled as a whole.
A module can also have dependencies:
* The modules that define the base classes of the classes defined
in this module.
* The modules that define functions/classes that are used by this module.
* Assets that are present in the module.
Notes on how the Flexx asset system uses modules:
The asset system will generate JSModule objects for all Python modules
that define PyComponent or JsComponent subclasses. The session is aware of
the Component classes that it uses (and their base classes), and can
therefore determine what modules (and assets) need to be loaded.
"""
def __init__(self, name, store):
if not isinstance(name, str):
raise TypeError('JSModule needs a str name.')
if not isinstance(store, dict):
raise TypeError('JSModule needs a dict store.')
# Resolve name of Python module
py_name = name
if name.endswith('.__init__'):
py_name = name.rsplit('.', 1)[0]
if py_name not in sys.modules:
raise ValueError("Cannot find Python module coresponding to %s." % name)
# Store module and name
self._pymodule = sys.modules[py_name]
self._name = get_mod_name(self._pymodule)
# Check if name matches the kind of module
is_package = module_is_package(self._pymodule)
if is_package and not name.endswith('.__init__'):
raise ValueError('Modules representing the __init__ of a package '
'should end with ".__init__".')
elif not is_package and name.endswith('.__init__'):
raise ValueError('Plain modules should not end with ".__init__".')
# Self-register
self._store = store
if self.name in self._store:
raise RuntimeError('Module %s already exists!' % self.name)
self._store[self.name] = self
# Bookkeeping content of the module
self._provided_names = set()
self._imported_names = set()
# Stuff defined in this module (in JS)
# We use dicts so that we can "overwrite" them in interactive mode
self._component_classes = {}
self._pscript_code = {}
self._js_values = {}
# Dependencies
self._deps = {} # mod_name -> [mod_as_name, *imports]
# Caches
self._js_cache = None
self._css_cache = None
if is_pscript_module(self._pymodule):
# PScript module; transpile as a whole
js = py2js(self._pymodule, inline_stdlib=False, docstrings=False)
self._pscript_code['__all__'] = js
self._provided_names.update([n for n in js.meta['vars_defined']
if not n.startswith('_')])
else:
self._init_default_objects()
def __repr__(self):
return '<%s %s with %i definitions>' % (self.__class__.__name__,
self.name,
len(self._provided_names))
def _init_default_objects(self):
# Component classes
# Add property classes ...
for name, val in self._pymodule.__dict__.items():
if isinstance(val, type) and issubclass(val, Property):
self.add_variable(name)
@property
def name(self):
""" The (qualified) name of this module.
"""
return self._name
@property
def filename(self):
""" The filename of the Python file that defines
(the contents of) this module. Can be '__main__'.
"""
# E.g. __main__ does not have __file__
return getattr(self._pymodule, '__file__', self.name)
@property
def deps(self):
""" The (unsorted) set of dependencies (names of other modules) for
this module.
"""
return set(self._deps.keys())
@property
def component_classes(self):
""" The PyComponent and JsComponent classes defined in this module.
"""
return set(self._component_classes.values())
def _import(self, mod_name, name, as_name):
""" Import a name from another module. This also ensures that the
other module exists.
"""
# Create module, if we must
if mod_name == self.name:
return self
elif mod_name not in self._deps:
if mod_name not in self._store:
JSModule(mod_name, store=self._store)
m = self._store[mod_name]
# Define imports and if necessary, the name that we import
imports = self._deps.setdefault(mod_name, [mod_name])
if name:
self._imported_names.add(as_name)
m.add_variable(name)
line = '%s as %s' % (name, as_name)
if line not in imports:
imports.append(line)
return m
@property
def variables(self):
""" The names of variables provided by this module.
A name passed to add_variable, might not end up in this list
if its imported into this module rather than defined here.
"""
return self._provided_names
def add_variable(self, name, is_global=False, _dep_stack=None):
""" Mark the variable with the given name as used by JavaScript.
The corresponding object must be a module, Component, class or function,
or a json serializable value.
If the object is defined here (or a json value) it will add JS to
this module. Otherwise this module will import the name from
another module.
If ``is_global``, the name is considered global; it may be declared in
this module, but it may also be a JS global. So we try to resolve the
name, but do not care if it fails.
"""
_dep_stack = _dep_stack or []
if name in self._imported_names:
return
elif name in _dep_stack:
return # avoid dependency recursion
elif name in ('Infinity', 'NaN'):
return # stubs
elif name in self._provided_names and self.name != '__main__':
return # in __main__ we allow redefinitions
if is_pscript_module(self._pymodule):
return # everything is transpiled and exported already
_dep_stack.append(name)
# Try getting value. We warn if there is no variable to match, but
# if we do find a value we're either including it or raising an error
try:
val = self._pymodule
nameparts = name.split('.')
for i in range(len(nameparts)):
val = getattr(val, nameparts[i])
# Maybe we "know" (this kind of) value ...
if isinstance(val, json_types):
name = '.'.join(nameparts[:i+1])
break
elif isinstance(val, type) and issubclass(val, JsComponent):
name = '.'.join(nameparts[:i+1])
break
elif val is loop and i == 0:
return self._add_dep_from_event_module('loop', nameparts[0])
elif isinstance(val, (JSConstant, Asset)):
return # stubs
elif isinstance(val, logging.Logger) and i == 0:
# todo: hehe, we can do more here (issue #179)
return self._add_dep_from_event_module('logger', nameparts[0])
except AttributeError:
msg = 'JS in "%s" uses undefined variable %r.' % (self.filename, name)
if is_global:
pass # it may be a JS-global
elif val is self._pymodule:
# Did not resolve first part of the name, so cannot be a JS global
logger.warning(msg)
else:
raise RuntimeError(msg) # E.g. typo in ui.Buttom
return
# Mark dirty
self._changed_time = time.time()
self._js_cache = self._css_cache = None
if isinstance(val, types.ModuleType):
# Modules as a whole can be converted if its a PScript module
if is_pscript_module(val):
self._import(val.__name__, None, None)
self._deps[val.__name__][0] = name # set/overwrite as-name
else:
t = 'JS in "%s" cannot use module %s directly unless it defines %s.'
raise ValueError(t % (self.filename, val.__name__, '"__pscript__"'))
elif isinstance(val, type) and issubclass(val, Component):
if val is Component:
return self._add_dep_from_event_module('Component')
elif val is BaseAppComponent or val.mro()[1] is BaseAppComponent:
# BaseAppComponent, PyComponent, JsComponent or StubComponent
# are covered in _component2.py
return
elif issubclass(val, (PyComponent, JsComponent)):
# App Component class; we know that we can get the JS for this
if val.__jsmodule__ == self.name:
# Define here
self._provided_names.add(name)
self._component_classes[name] = val
# Recurse
self._collect_dependencies_from_bases(val)
self._collect_dependencies(val.JS.CODE, _dep_stack)
else:
# Import from another module
self._import(val.__jsmodule__, val.__name__, name)
else:
# Regular Component, similar to other classes,
# but using create_js_component_class()
mod_name = get_mod_name(val)
if mod_name == self.name:
# Define here
js = create_js_component_class(val, val.__name__)
self._provided_names.add(name)
self._pscript_code[name] = js
# Recurse
self._collect_dependencies_from_bases(val)
self._collect_dependencies(js, _dep_stack)
else:
# Import from another module
self._import(mod_name, val.__name__, name)
elif isinstance(val, type) and issubclass(val, bsdf.Extension):
# A bit hacky mechanism to define BSDF extensions that also work in JS.
# todo: can we make this better? See also app/_component2.py (issue #429)
js = 'var %s = {name: "%s"' % (name, val.name)
for mname in ('match', 'encode', 'decode'):
func = getattr(val, mname + '_js')
funccode = py2js(func, indent=1, inline_stdlib=False, docstrings=False)
js += ',\n ' + mname + ':' + funccode.split('=', 1)[1].rstrip(' \n;')
self._collect_dependencies(funccode, _dep_stack)
js += '};\n'
js += 'serializer.add_extension(%s);\n' % name
js = JSString(js)
js.meta = funccode.meta
self._pscript_code[name] = js
self._deps.setdefault('flexx.app._clientcore',
['flexx.app._clientcore']).append('serializer')
elif isinstance(val, pscript_types) and hasattr(val, '__module__'):
# Looks like something we can convert using PScript
mod_name = get_mod_name(val)
if mod_name == self.name:
# Define here
try:
js = py2js(val, inline_stdlib=False, docstrings=False)
except Exception as err:
t = 'JS in "%s" uses %r but cannot transpile it with PScript:\n%s'
raise ValueError(t % (self.filename, name, str(err)))
self._provided_names.add(name)
self._pscript_code[name] = js
# Recurse
if isinstance(val, type):
self._collect_dependencies_from_bases(val)
self._collect_dependencies(js, _dep_stack)
elif mod_name.endswith('.event._property'):
return self._add_dep_from_event_module(name.split('.')[-1], name)
else:
# Import from another module
self._import(mod_name, val.__name__, name)
elif isinstance(val, RawJS):
# Verbatim JS
if val.__module__ == self.name:
self._provided_names.add(name)
self._js_values[name] = val.get_code()
else:
self._import(val.__module__, val.get_defined_name(name), name)
elif isinstance(val, json_types):
# Looks like something we can serialize
# Unlike with RawJS, we have no way to determine where it is defined
try:
js = json.dumps(val)
except Exception as err:
t = 'JS in "%s" uses %r but cannot serialize that value:\n%s'
raise ValueError(t % (self.filename, name, str(err)))
self._provided_names.add(name)
self._js_values[name] = js
elif (getattr(val, '__module__', None) and
is_pscript_module(sys.modules[val.__module__]) and
val is getattr(sys.modules[val.__module__], name, 'unlikely-val')):
# An instance from a pscript module!
# We cannot know the "name" as its known in the module, but
# we assume that its the same as as_name and test whether
# it matches in the test above.
self._import(val.__module__, name, name)
else:
# Cannot convert to JS
t = 'JS in "%s" uses %r but cannot convert %s to JS.'
raise ValueError(t % (self.filename, name, val.__class__))
def _collect_dependencies(self, js, _dep_stack):
"""
Collect dependencies corresponding to names used in the JS.
"""
vars_unknown = js.meta['vars_unknown']
vars_global = js.meta['vars_global']
for name in reversed(sorted(vars_unknown)):
if name.startswith('event.'):
self._deps.setdefault('flexx.event.js', ['event'])
elif self._name_ispropclass(name):
self._add_dep_from_event_module(name, name)
else:
self.add_variable(name, _dep_stack=_dep_stack)
for name in reversed(sorted(vars_global)):
self.add_variable(name, True, _dep_stack=_dep_stack)
def _name_ispropclass(self, name):
ob = getattr(event._property, name, None)
if ob is not None:
return isinstance(ob, type) and issubclass(ob, Property)
return False
def _collect_dependencies_from_bases(self, cls):
"""
Collect dependencies based on the base classes of a class.
"""
if len(cls.__bases__) != 1: # pragma: no cover
raise TypeError('PScript classes do not (yet) support '
'multiple inheritance.')
if cls is PyComponent or cls is JsComponent or cls is StubComponent:
return self._add_dep_from_event_module('Component')
for base_cls in cls.__bases__:
if base_cls is object:
return
elif base_cls is Component:
return self._add_dep_from_event_module('Component')
elif base_cls.__module__.endswith('.event._property'): # base properties
return self._add_dep_from_event_module(cls.__name__)
m = self._import(get_mod_name(base_cls),
base_cls.__name__, base_cls.__name__)
m.add_variable(base_cls.__name__) # note: m can be self, which is ok
def _add_dep_from_event_module(self, name, asname=None):
asname = asname or name
entry = '%s as %s' % (name, asname)
imports = self._deps.setdefault('flexx.event.js', ['event'])
self._imported_names.add(asname)
if entry not in imports:
imports.append(entry)
def get_js(self):
""" Get the JS code for this module.
"""
if self._js_cache is None:
# Collect JS and sort by linenr
js = [cls.JS.CODE for cls in self._component_classes.values()]
js += list(self._pscript_code.values())
js.sort(key=lambda x: x.meta['linenr'])
used_std_functions, used_std_methods = set(), set()
for code in js:
used_std_functions.update(code.meta['std_functions'])
used_std_methods.update(code.meta['std_methods'])
# Mangle dotted names
for i in range(len(js)):
js[i] = mangle_dotted_vars(js[i], self._imported_names)
# Insert serialized values
value_lines = []
for name in sorted(self._js_values):
if '.' in name:
for i in range(len(js)):
js[i] = mangle_dotted_vars(js[i], [name])
value_lines.append('var %s = %s;' % (name.replace('.', '$'),
self._js_values[name]))
js.insert(0, '')
js.insert(0, '\n'.join(value_lines))
# Prepare imports and exports
exports = tuple(sorted(n for n in self._provided_names if '.' not in n))
imports = ['pscript-std.js as _py']
# Handle dependency imports
for dep_name in reversed(sorted(self._deps)):
names = self._deps[dep_name]
mod_name = names[0].replace('.', '$') # mangle module name
imports.append(dep_name + ' as ' + mod_name)
for name in names[1:]:
as_name = name
if ' as ' in name:
name, _, as_name = name.partition(' as ')
as_name = as_name.replace('.', '$') # mangle dotted name
pieces = ['%s = %s.%s' % (as_name, mod_name, name)]
js.insert(0, 'var ' + (', '.join(pieces)) + ';')
# Import stdlib
func_names, method_names = get_all_std_names()
pre1 = ', '.join(['%s%s = _py.%s%s' %
(FUNCTION_PREFIX, n, FUNCTION_PREFIX, n)
for n in sorted(used_std_functions)])
pre2 = ', '.join(['%s%s = _py.%s%s' %
(METHOD_PREFIX, n, METHOD_PREFIX, n)
for n in sorted(used_std_methods)])
if pre2:
js.insert(0, 'var %s;' % pre2)
if pre1:
js.insert(0, 'var %s;' % pre1)
# Create module
self._js_cache = create_js_module(self.name, '\n\n'.join(js),
imports, exports, 'amd-flexx')
self._js_cache = self._js_cache
return self._js_cache
def get_css(self):
""" Get the CSS code for this module.
"""
if self._css_cache is None:
css = []
sorter = lambda x: x.JS.CODE.meta['linenr']
for cls in sorted(self._component_classes.values(), key=sorter):
css.append(cls.CSS)
self._css_cache = '\n\n'.join(css)
return self._css_cache
|
import os
from stash.tests.stashtest import StashTestCase
class Md5sumTests(StashTestCase):
"""tests for the md5sum command."""
def setUp(self):
"""setup the tests"""
self.cwd = self.get_data_path()
StashTestCase.setUp(self)
def get_data_path(self):
"""return the data/ sibling path"""
return os.path.abspath(os.path.join(os.path.dirname(__file__), "data"))
def test_help(self):
"""test md5sum --help"""
output = self.run_command("md5sum --help", exitcode=0)
# check for code words in output
self.assertIn("md5sum", output)
self.assertIn("-h", output)
self.assertIn("-c", output)
def test_filehash(self):
"""tests the hashes of the files in data/"""
fp = self.get_data_path()
for fn in os.listdir(fp):
if "." in fn:
# file used for something else
continue
expected_hash = fn
fullp = os.path.join(fp, fn)
output = self.run_command("md5sum " + fullp, exitcode=0)
result = output.split(" ")[0]
self.assertEqual(result, expected_hash)
def test_checkhash(self):
"""test md5sum -c"""
output = self.run_command("md5sum -c results.md5sum", exitcode=0)
self.assertIn("Pass", output)
self.assertNotIn("Fail", output)
def test_checkhash_fail(self):
"""test failure md5sum -c with invalid data"""
output = self.run_command("md5sum -c wrong_results.md5sum", exitcode=1)
self.assertIn("Pass", output) # some files should have the correct hash
self.assertIn("Fail", output)
def test_hash_stdin_implicit(self):
"""test hashing of stdin without arg"""
output = self.run_command("echo test | md5sum", exitcode=0).replace("\n", "")
expected = "d8e8fca2dc0f896fd7cb4cb0031ba249"
self.assertEqual(output, expected)
def test_hash_stdin_explicit(self):
"""test hashing of stdin with '-' arg"""
output = self.run_command("echo test | md5sum -", exitcode=0).replace("\n", "")
expected = "d8e8fca2dc0f896fd7cb4cb0031ba249"
self.assertEqual(output, expected)
|
import argparse
import matplotlib.pyplot as plt
import chainer
from chainercv.datasets import coco_bbox_label_names
from chainercv.links import LightHeadRCNNResNet101
from chainercv import utils
from chainercv.visualizations import vis_bbox
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--gpu', type=int, default=-1)
parser.add_argument('--pretrained-model', default='coco')
parser.add_argument('image')
args = parser.parse_args()
model = LightHeadRCNNResNet101(
n_fg_class=len(coco_bbox_label_names),
pretrained_model=args.pretrained_model)
if args.gpu >= 0:
chainer.cuda.get_device_from_id(args.gpu).use()
model.to_gpu()
img = utils.read_image(args.image, color=True)
bboxes, labels, scores = model.predict([img])
bbox, label, score = bboxes[0], labels[0], scores[0]
vis_bbox(
img, bbox, label, score, label_names=coco_bbox_label_names)
plt.show()
if __name__ == '__main__':
main()
|
import pyvera as pv
from homeassistant.core import HomeAssistant
from .common import ComponentFactory, new_simple_controller_config
from tests.async_mock import MagicMock
async def test_cover(
hass: HomeAssistant, vera_component_factory: ComponentFactory
) -> None:
"""Test function."""
vera_device = MagicMock(spec=pv.VeraCurtain) # type: pv.VeraCurtain
vera_device.device_id = 1
vera_device.vera_device_id = vera_device.device_id
vera_device.name = "dev1"
vera_device.category = pv.CATEGORY_CURTAIN
vera_device.is_closed = False
vera_device.get_level.return_value = 0
entity_id = "cover.dev1_1"
component_data = await vera_component_factory.configure_component(
hass=hass,
controller_config=new_simple_controller_config(devices=(vera_device,)),
)
update_callback = component_data.controller_data[0].update_callback
assert hass.states.get(entity_id).state == "closed"
assert hass.states.get(entity_id).attributes["current_position"] == 0
await hass.services.async_call(
"cover",
"open_cover",
{"entity_id": entity_id},
)
await hass.async_block_till_done()
vera_device.open.assert_called()
vera_device.is_open.return_value = True
vera_device.get_level.return_value = 100
update_callback(vera_device)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == "open"
assert hass.states.get(entity_id).attributes["current_position"] == 100
await hass.services.async_call(
"cover",
"set_cover_position",
{"entity_id": entity_id, "position": 50},
)
await hass.async_block_till_done()
vera_device.set_level.assert_called_with(50)
vera_device.is_open.return_value = True
vera_device.get_level.return_value = 50
update_callback(vera_device)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == "open"
assert hass.states.get(entity_id).attributes["current_position"] == 50
await hass.services.async_call(
"cover",
"stop_cover",
{"entity_id": entity_id},
)
await hass.async_block_till_done()
vera_device.stop.assert_called()
update_callback(vera_device)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == "open"
assert hass.states.get(entity_id).attributes["current_position"] == 50
await hass.services.async_call(
"cover",
"close_cover",
{"entity_id": entity_id},
)
await hass.async_block_till_done()
vera_device.close.assert_called()
vera_device.is_open.return_value = False
vera_device.get_level.return_value = 00
update_callback(vera_device)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == "closed"
assert hass.states.get(entity_id).attributes["current_position"] == 00
|
from __future__ import division
import argparse
import numpy as np
import six
import chainer
from chainer.dataset.convert import _concat_arrays
from chainer.dataset.convert import to_device
from chainer.datasets import TransformDataset
from chainer.training import extensions
from chainer.training.triggers import ManualScheduleTrigger
from chainercv.datasets import sbd_instance_segmentation_label_names
from chainercv.datasets import SBDInstanceSegmentationDataset
from chainercv.experimental.links import FCISResNet101
from chainercv.experimental.links import FCISTrainChain
from chainercv.extensions import InstanceSegmentationVOCEvaluator
from chainercv.links.model.ssd import GradientScaling
from chainercv import transforms
from chainercv.utils.mask.mask_to_bbox import mask_to_bbox
# https://docs.chainer.org/en/stable/tips.html#my-training-process-gets-stuck-when-using-multiprocessiterator
try:
import cv2
cv2.setNumThreads(0)
except ImportError:
pass
def concat_examples(batch, device=None):
# batch: img, mask, label, scale
if len(batch) == 0:
raise ValueError('batch is empty')
first_elem = batch[0]
result = []
for i in six.moves.range(len(first_elem)):
array = _concat_arrays([example[i] for example in batch], None)
if i == 0: # img
result.append(to_device(device, array))
else:
result.append(array)
return tuple(result)
class Transform(object):
def __init__(self, fcis):
self.fcis = fcis
def __call__(self, in_data):
img, mask, label = in_data
bbox = mask_to_bbox(mask)
_, orig_H, orig_W = img.shape
img = self.fcis.prepare(img)
_, H, W = img.shape
scale = H / orig_H
mask = transforms.resize(mask.astype(np.float32), (H, W))
bbox = transforms.resize_bbox(bbox, (orig_H, orig_W), (H, W))
img, params = transforms.random_flip(
img, x_random=True, return_param=True)
mask = transforms.flip(mask, x_flip=params['x_flip'])
bbox = transforms.flip_bbox(bbox, (H, W), x_flip=params['x_flip'])
return img, mask, label, bbox, scale
def main():
parser = argparse.ArgumentParser(
description='ChainerCV training example: FCIS')
parser.add_argument('--gpu', '-g', type=int, default=-1)
parser.add_argument('--out', '-o', default='result',
help='Output directory')
parser.add_argument('--seed', '-s', type=int, default=0)
parser.add_argument('--lr', '-l', type=float, default=0.0005)
parser.add_argument(
'--lr-cooldown-factor', '-lcf', type=float, default=0.1)
parser.add_argument('--epoch', '-e', type=int, default=42)
parser.add_argument('--cooldown-epoch', '-ce', type=int, default=28)
args = parser.parse_args()
np.random.seed(args.seed)
# dataset
train_dataset = SBDInstanceSegmentationDataset(split='train')
test_dataset = SBDInstanceSegmentationDataset(split='val')
# model
fcis = FCISResNet101(
n_fg_class=len(sbd_instance_segmentation_label_names),
pretrained_model='imagenet', iter2=False)
fcis.use_preset('evaluate')
model = FCISTrainChain(fcis)
# gpu
if args.gpu >= 0:
chainer.cuda.get_device_from_id(args.gpu).use()
model.to_gpu()
# optimizer
optimizer = chainer.optimizers.MomentumSGD(lr=args.lr, momentum=0.9)
optimizer.setup(model)
model.fcis.head.conv1.W.update_rule.add_hook(GradientScaling(3.0))
model.fcis.head.conv1.b.update_rule.add_hook(GradientScaling(3.0))
optimizer.add_hook(chainer.optimizer.WeightDecay(rate=0.0005))
for param in model.params():
if param.name in ['beta', 'gamma']:
param.update_rule.enabled = False
model.fcis.extractor.conv1.disable_update()
model.fcis.extractor.res2.disable_update()
train_dataset = TransformDataset(
train_dataset, Transform(model.fcis))
# iterator
train_iter = chainer.iterators.SerialIterator(
train_dataset, batch_size=1)
test_iter = chainer.iterators.SerialIterator(
test_dataset, batch_size=1, repeat=False, shuffle=False)
updater = chainer.training.updater.StandardUpdater(
train_iter, optimizer, converter=concat_examples,
device=args.gpu)
trainer = chainer.training.Trainer(
updater, (args.epoch, 'epoch'), out=args.out)
# lr scheduler
trainer.extend(
chainer.training.extensions.ExponentialShift(
'lr', args.lr_cooldown_factor, init=args.lr),
trigger=(args.cooldown_epoch, 'epoch'))
# interval
log_interval = 100, 'iteration'
plot_interval = 3000, 'iteration'
print_interval = 20, 'iteration'
# training extensions
trainer.extend(
extensions.snapshot_object(
model.fcis, filename='snapshot_model.npz'),
trigger=(args.epoch, 'epoch'))
trainer.extend(
extensions.observe_lr(),
trigger=log_interval)
trainer.extend(
extensions.LogReport(log_name='log.json', trigger=log_interval))
trainer.extend(extensions.PrintReport([
'iteration', 'epoch', 'elapsed_time', 'lr',
'main/loss',
'main/rpn_loc_loss',
'main/rpn_cls_loss',
'main/roi_loc_loss',
'main/roi_cls_loss',
'main/roi_mask_loss',
'validation/main/map',
]), trigger=print_interval)
trainer.extend(
extensions.ProgressBar(update_interval=10))
if extensions.PlotReport.available():
trainer.extend(
extensions.PlotReport(
['main/loss'],
file_name='loss.png', trigger=plot_interval),
trigger=plot_interval)
trainer.extend(
InstanceSegmentationVOCEvaluator(
test_iter, model.fcis,
iou_thresh=0.5, use_07_metric=True,
label_names=sbd_instance_segmentation_label_names),
trigger=ManualScheduleTrigger(
[len(train_dataset) * args.cooldown_epoch,
len(train_dataset) * args.epoch], 'iteration'))
trainer.extend(extensions.dump_graph('main/loss'))
trainer.run()
if __name__ == '__main__':
main()
|
import logging
import voluptuous as vol
from zoneminder.monitor import TimePeriod
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_MONITORED_CONDITIONS
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from . import DOMAIN as ZONEMINDER_DOMAIN
_LOGGER = logging.getLogger(__name__)
CONF_INCLUDE_ARCHIVED = "include_archived"
DEFAULT_INCLUDE_ARCHIVED = False
SENSOR_TYPES = {
"all": ["Events"],
"hour": ["Events Last Hour"],
"day": ["Events Last Day"],
"week": ["Events Last Week"],
"month": ["Events Last Month"],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(
CONF_INCLUDE_ARCHIVED, default=DEFAULT_INCLUDE_ARCHIVED
): cv.boolean,
vol.Optional(CONF_MONITORED_CONDITIONS, default=["all"]): vol.All(
cv.ensure_list, [vol.In(list(SENSOR_TYPES))]
),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the ZoneMinder sensor platform."""
include_archived = config.get(CONF_INCLUDE_ARCHIVED)
sensors = []
for zm_client in hass.data[ZONEMINDER_DOMAIN].values():
monitors = zm_client.get_monitors()
if not monitors:
_LOGGER.warning("Could not fetch any monitors from ZoneMinder")
for monitor in monitors:
sensors.append(ZMSensorMonitors(monitor))
for sensor in config[CONF_MONITORED_CONDITIONS]:
sensors.append(ZMSensorEvents(monitor, include_archived, sensor))
sensors.append(ZMSensorRunState(zm_client))
add_entities(sensors)
class ZMSensorMonitors(Entity):
"""Get the status of each ZoneMinder monitor."""
def __init__(self, monitor):
"""Initialize monitor sensor."""
self._monitor = monitor
self._state = None
self._is_available = None
@property
def name(self):
"""Return the name of the sensor."""
return f"{self._monitor.name} Status"
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def available(self):
"""Return True if Monitor is available."""
return self._is_available
def update(self):
"""Update the sensor."""
state = self._monitor.function
if not state:
self._state = None
else:
self._state = state.value
self._is_available = self._monitor.is_available
class ZMSensorEvents(Entity):
"""Get the number of events for each monitor."""
def __init__(self, monitor, include_archived, sensor_type):
"""Initialize event sensor."""
self._monitor = monitor
self._include_archived = include_archived
self.time_period = TimePeriod.get_time_period(sensor_type)
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return f"{self._monitor.name} {self.time_period.title}"
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return "Events"
@property
def state(self):
"""Return the state of the sensor."""
return self._state
def update(self):
"""Update the sensor."""
self._state = self._monitor.get_events(self.time_period, self._include_archived)
class ZMSensorRunState(Entity):
"""Get the ZoneMinder run state."""
def __init__(self, client):
"""Initialize run state sensor."""
self._state = None
self._is_available = None
self._client = client
@property
def name(self):
"""Return the name of the sensor."""
return "Run State"
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def available(self):
"""Return True if ZoneMinder is available."""
return self._is_available
def update(self):
"""Update the sensor."""
self._state = self._client.get_active_state()
self._is_available = self._client.is_available
|
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
AREA_SQUARE_METERS,
CONF_MONITORED_CONDITIONS,
PERCENTAGE,
PRESSURE_INHG,
PRESSURE_MBAR,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from . import DOMAIN
# These are the available sensors
SENSOR_TYPES = [
"Temperature",
"Humidity",
"Pressure",
"Luminance",
"UVIndex",
"Voltage",
]
# Sensor units - these do not currently align with the API documentation
SENSOR_UNITS_IMPERIAL = {
"Temperature": TEMP_FAHRENHEIT,
"Humidity": PERCENTAGE,
"Pressure": PRESSURE_INHG,
"Luminance": f"cd/{AREA_SQUARE_METERS}",
"Voltage": "mV",
}
# Metric units
SENSOR_UNITS_METRIC = {
"Temperature": TEMP_CELSIUS,
"Humidity": PERCENTAGE,
"Pressure": PRESSURE_MBAR,
"Luminance": f"cd/{AREA_SQUARE_METERS}",
"Voltage": "mV",
}
# Which sensors to format numerically
FORMAT_NUMBERS = ["Temperature", "Pressure", "Voltage"]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_MONITORED_CONDITIONS, default=SENSOR_TYPES): vol.All(
cv.ensure_list, [vol.In(SENSOR_TYPES)]
)
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the available BloomSky weather sensors."""
# Default needed in case of discovery
if discovery_info is not None:
return
sensors = config[CONF_MONITORED_CONDITIONS]
bloomsky = hass.data[DOMAIN]
for device in bloomsky.devices.values():
for variable in sensors:
add_entities([BloomSkySensor(bloomsky, device, variable)], True)
class BloomSkySensor(Entity):
"""Representation of a single sensor in a BloomSky device."""
def __init__(self, bs, device, sensor_name):
"""Initialize a BloomSky sensor."""
self._bloomsky = bs
self._device_id = device["DeviceID"]
self._sensor_name = sensor_name
self._name = f"{device['DeviceName']} {sensor_name}"
self._state = None
self._unique_id = f"{self._device_id}-{self._sensor_name}"
@property
def unique_id(self):
"""Return a unique ID."""
return self._unique_id
@property
def name(self):
"""Return the name of the BloomSky device and this sensor."""
return self._name
@property
def state(self):
"""Return the current state, eg. value, of this sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the sensor units."""
if self._bloomsky.is_metric:
return SENSOR_UNITS_METRIC.get(self._sensor_name, None)
return SENSOR_UNITS_IMPERIAL.get(self._sensor_name, None)
def update(self):
"""Request an update from the BloomSky API."""
self._bloomsky.refresh_devices()
state = self._bloomsky.devices[self._device_id]["Data"][self._sensor_name]
if self._sensor_name in FORMAT_NUMBERS:
self._state = f"{state:.2f}"
else:
self._state = state
|
__docformat__ = "restructuredtext en"
class Interface(object):
"""Base class for interfaces."""
def is_implemented_by(cls, instance):
return implements(instance, cls)
is_implemented_by = classmethod(is_implemented_by)
def implements(obj, interface):
"""Return true if the give object (maybe an instance or class) implements
the interface.
"""
kimplements = getattr(obj, '__implements__', ())
if not isinstance(kimplements, (list, tuple)):
kimplements = (kimplements,)
for implementedinterface in kimplements:
if issubclass(implementedinterface, interface):
return True
return False
def extend(klass, interface, _recurs=False):
"""Add interface to klass'__implements__ if not already implemented in.
If klass is subclassed, ensure subclasses __implements__ it as well.
NOTE: klass should be e new class.
"""
if not implements(klass, interface):
try:
kimplements = klass.__implements__
kimplementsklass = type(kimplements)
kimplements = list(kimplements)
except AttributeError:
kimplementsklass = tuple
kimplements = []
kimplements.append(interface)
klass.__implements__ = kimplementsklass(kimplements)
for subklass in klass.__subclasses__():
extend(subklass, interface, _recurs=True)
elif _recurs:
for subklass in klass.__subclasses__():
extend(subklass, interface, _recurs=True)
|
import logging
import voluptuous as vol
from homeassistant.components import frontend
from homeassistant.config import async_hass_config_yaml, async_process_component_config
from homeassistant.const import CONF_FILENAME
from homeassistant.core import callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import collection, config_validation as cv
from homeassistant.helpers.service import async_register_admin_service
from homeassistant.helpers.typing import ConfigType, HomeAssistantType, ServiceCallType
from homeassistant.loader import async_get_integration
from homeassistant.util import sanitize_path
from . import dashboard, resources, websocket
from .const import (
CONF_ICON,
CONF_MODE,
CONF_REQUIRE_ADMIN,
CONF_RESOURCES,
CONF_SHOW_IN_SIDEBAR,
CONF_TITLE,
CONF_URL_PATH,
DASHBOARD_BASE_CREATE_FIELDS,
DEFAULT_ICON,
DOMAIN,
MODE_STORAGE,
MODE_YAML,
RESOURCE_CREATE_FIELDS,
RESOURCE_RELOAD_SERVICE_SCHEMA,
RESOURCE_SCHEMA,
RESOURCE_UPDATE_FIELDS,
SERVICE_RELOAD_RESOURCES,
STORAGE_DASHBOARD_CREATE_FIELDS,
STORAGE_DASHBOARD_UPDATE_FIELDS,
url_slug,
)
_LOGGER = logging.getLogger(__name__)
CONF_DASHBOARDS = "dashboards"
YAML_DASHBOARD_SCHEMA = vol.Schema(
{
**DASHBOARD_BASE_CREATE_FIELDS,
vol.Required(CONF_MODE): MODE_YAML,
vol.Required(CONF_FILENAME): vol.All(cv.string, sanitize_path),
}
)
CONFIG_SCHEMA = vol.Schema(
{
vol.Optional(DOMAIN, default={}): vol.Schema(
{
vol.Optional(CONF_MODE, default=MODE_STORAGE): vol.All(
vol.Lower, vol.In([MODE_YAML, MODE_STORAGE])
),
vol.Optional(CONF_DASHBOARDS): cv.schema_with_slug_keys(
YAML_DASHBOARD_SCHEMA,
slug_validator=url_slug,
),
vol.Optional(CONF_RESOURCES): [RESOURCE_SCHEMA],
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass: HomeAssistantType, config: ConfigType):
"""Set up the Lovelace commands."""
mode = config[DOMAIN][CONF_MODE]
yaml_resources = config[DOMAIN].get(CONF_RESOURCES)
frontend.async_register_built_in_panel(hass, DOMAIN, config={"mode": mode})
async def reload_resources_service_handler(service_call: ServiceCallType) -> None:
"""Reload yaml resources."""
try:
conf = await async_hass_config_yaml(hass)
except HomeAssistantError as err:
_LOGGER.error(err)
return
integration = await async_get_integration(hass, DOMAIN)
config = await async_process_component_config(hass, conf, integration)
resource_collection = await create_yaml_resource_col(
hass, config[DOMAIN].get(CONF_RESOURCES)
)
hass.data[DOMAIN]["resources"] = resource_collection
if mode == MODE_YAML:
default_config = dashboard.LovelaceYAML(hass, None, None)
resource_collection = await create_yaml_resource_col(hass, yaml_resources)
async_register_admin_service(
hass,
DOMAIN,
SERVICE_RELOAD_RESOURCES,
reload_resources_service_handler,
schema=RESOURCE_RELOAD_SERVICE_SCHEMA,
)
else:
default_config = dashboard.LovelaceStorage(hass, None)
if yaml_resources is not None:
_LOGGER.warning(
"Lovelace is running in storage mode. Define resources via user interface"
)
resource_collection = resources.ResourceStorageCollection(hass, default_config)
collection.StorageCollectionWebsocket(
resource_collection,
"lovelace/resources",
"resource",
RESOURCE_CREATE_FIELDS,
RESOURCE_UPDATE_FIELDS,
).async_setup(hass, create_list=False)
hass.components.websocket_api.async_register_command(
websocket.websocket_lovelace_config
)
hass.components.websocket_api.async_register_command(
websocket.websocket_lovelace_save_config
)
hass.components.websocket_api.async_register_command(
websocket.websocket_lovelace_delete_config
)
hass.components.websocket_api.async_register_command(
websocket.websocket_lovelace_resources
)
hass.components.websocket_api.async_register_command(
websocket.websocket_lovelace_dashboards
)
hass.components.system_health.async_register_info(DOMAIN, system_health_info)
hass.data[DOMAIN] = {
# We store a dictionary mapping url_path: config. None is the default.
"dashboards": {None: default_config},
"resources": resource_collection,
"yaml_dashboards": config[DOMAIN].get(CONF_DASHBOARDS, {}),
}
if hass.config.safe_mode:
return True
async def storage_dashboard_changed(change_type, item_id, item):
"""Handle a storage dashboard change."""
url_path = item[CONF_URL_PATH]
if change_type == collection.CHANGE_REMOVED:
frontend.async_remove_panel(hass, url_path)
await hass.data[DOMAIN]["dashboards"].pop(url_path).async_delete()
return
if change_type == collection.CHANGE_ADDED:
existing = hass.data[DOMAIN]["dashboards"].get(url_path)
if existing:
_LOGGER.warning(
"Cannot register panel at %s, it is already defined in %s",
url_path,
existing,
)
return
hass.data[DOMAIN]["dashboards"][url_path] = dashboard.LovelaceStorage(
hass, item
)
update = False
else:
hass.data[DOMAIN]["dashboards"][url_path].config = item
update = True
try:
_register_panel(hass, url_path, MODE_STORAGE, item, update)
except ValueError:
_LOGGER.warning("Failed to %s panel %s from storage", change_type, url_path)
# Process YAML dashboards
for url_path, dashboard_conf in hass.data[DOMAIN]["yaml_dashboards"].items():
# For now always mode=yaml
config = dashboard.LovelaceYAML(hass, url_path, dashboard_conf)
hass.data[DOMAIN]["dashboards"][url_path] = config
try:
_register_panel(hass, url_path, MODE_YAML, dashboard_conf, False)
except ValueError:
_LOGGER.warning("Panel url path %s is not unique", url_path)
# Process storage dashboards
dashboards_collection = dashboard.DashboardsCollection(hass)
dashboards_collection.async_add_listener(storage_dashboard_changed)
await dashboards_collection.async_load()
collection.StorageCollectionWebsocket(
dashboards_collection,
"lovelace/dashboards",
"dashboard",
STORAGE_DASHBOARD_CREATE_FIELDS,
STORAGE_DASHBOARD_UPDATE_FIELDS,
).async_setup(hass, create_list=False)
return True
async def create_yaml_resource_col(hass, yaml_resources):
"""Create yaml resources collection."""
if yaml_resources is None:
default_config = dashboard.LovelaceYAML(hass, None, None)
try:
ll_conf = await default_config.async_load(False)
except HomeAssistantError:
pass
else:
if CONF_RESOURCES in ll_conf:
_LOGGER.warning(
"Resources need to be specified in your configuration.yaml. Please see the docs"
)
yaml_resources = ll_conf[CONF_RESOURCES]
return resources.ResourceYAMLCollection(yaml_resources or [])
async def system_health_info(hass):
"""Get info for the info page."""
health_info = {"dashboards": len(hass.data[DOMAIN]["dashboards"])}
health_info.update(await hass.data[DOMAIN]["dashboards"][None].async_get_info())
health_info.update(await hass.data[DOMAIN]["resources"].async_get_info())
return health_info
@callback
def _register_panel(hass, url_path, mode, config, update):
"""Register a panel."""
kwargs = {
"frontend_url_path": url_path,
"require_admin": config[CONF_REQUIRE_ADMIN],
"config": {"mode": mode},
"update": update,
}
if config[CONF_SHOW_IN_SIDEBAR]:
kwargs["sidebar_title"] = config[CONF_TITLE]
kwargs["sidebar_icon"] = config.get(CONF_ICON, DEFAULT_ICON)
frontend.async_register_built_in_panel(hass, DOMAIN, **kwargs)
|
from gitsweep.tests.testcases import (GitSweepTestCase, InspectorTestCase,
DeleterTestCase)
class TestDeleter(GitSweepTestCase, InspectorTestCase, DeleterTestCase):
"""
Can delete remote refs from a remote.
"""
def setUp(self):
super(TestDeleter, self).setUp()
for i in range(1, 6):
self.command('git checkout -b branch{0}'.format(i))
self.make_commit()
self.command('git checkout master')
self.make_commit()
self.command('git merge branch{0}'.format(i))
def test_will_delete_merged_from_clone(self):
"""
Given a list of refs, will delete them from cloned repo.
This test looks at our cloned repository, the one which is setup to
track the remote and makes sure that the changes occur on it as
expected.
"""
clone = self.remote.remotes[0]
# Grab all the remote branches
before = [i.remote_head for i in clone.refs]
# We should have 5 branches plus HEAD and master
self.assertEqual(7, len(before))
# Delete from the remote through the clone
pushes = self.deleter.remove_remote_refs(
self.merged_refs(refobjs=True))
# Make sure it removed the expected number
self.assertEqual(5, len(pushes))
# Grab all the remote branches again
after = [i.remote_head for i in clone.refs]
after.sort()
# We should be down to 2, HEAD and master
self.assertEqual(['HEAD', 'master'], after)
def test_will_delete_merged_on_remote(self):
"""
With the list of refs, will delete these from the remote.
This test makes assertion against the remote, not the clone repository.
We are testing to see if the interactions in the cloned repo are pushed
through to the remote.
Note that accessing the repository directly does not include the
symbolic reference of HEAD.
"""
remote = self.repo
# Get a list of branches on this remote
before = [i.name for i in remote.refs]
# Should be 5 branches + master
self.assertEqual(6, len(before))
# Delete through the clone which pushes to this remote
pushes = self.deleter.remove_remote_refs(
self.merged_refs(refobjs=True))
# Make sure it removed the expected number
self.assertEqual(5, len(pushes))
# Grab again
after = [i.name for i in remote.refs]
# Should be down to just master
self.assertEqual(['master'], after)
|
from datetime import timedelta
from typing import Optional
import voluptuous as vol
from homeassistant.components import websocket_api
from homeassistant.components.http.auth import async_sign_path
from homeassistant.components.media_player.const import ATTR_MEDIA_CONTENT_ID
from homeassistant.components.media_player.errors import BrowseError
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.integration_platform import (
async_process_integration_platforms,
)
from homeassistant.loader import bind_hass
from . import local_source, models
from .const import DOMAIN, URI_SCHEME, URI_SCHEME_REGEX
from .error import Unresolvable
def is_media_source_id(media_content_id: str):
"""Test if identifier is a media source."""
return URI_SCHEME_REGEX.match(media_content_id) is not None
def generate_media_source_id(domain: str, identifier: str) -> str:
"""Generate a media source ID."""
uri = f"{URI_SCHEME}{domain or ''}"
if identifier:
uri += f"/{identifier}"
return uri
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the media_source component."""
hass.data[DOMAIN] = {}
hass.components.websocket_api.async_register_command(websocket_browse_media)
hass.components.websocket_api.async_register_command(websocket_resolve_media)
hass.components.frontend.async_register_built_in_panel(
"media-browser", "media_browser", "hass:play-box-multiple"
)
local_source.async_setup(hass)
await async_process_integration_platforms(
hass, DOMAIN, _process_media_source_platform
)
return True
async def _process_media_source_platform(hass, domain, platform):
"""Process a media source platform."""
hass.data[DOMAIN][domain] = await platform.async_get_media_source(hass)
@callback
def _get_media_item(
hass: HomeAssistant, media_content_id: Optional[str]
) -> models.MediaSourceItem:
"""Return media item."""
if media_content_id:
return models.MediaSourceItem.from_uri(hass, media_content_id)
# We default to our own domain if its only one registered
domain = None if len(hass.data[DOMAIN]) > 1 else DOMAIN
return models.MediaSourceItem(hass, domain, "")
@bind_hass
async def async_browse_media(
hass: HomeAssistant, media_content_id: str
) -> models.BrowseMediaSource:
"""Return media player browse media results."""
return await _get_media_item(hass, media_content_id).async_browse()
@bind_hass
async def async_resolve_media(
hass: HomeAssistant, media_content_id: str
) -> models.PlayMedia:
"""Get info to play media."""
return await _get_media_item(hass, media_content_id).async_resolve()
@websocket_api.websocket_command(
{
vol.Required("type"): "media_source/browse_media",
vol.Optional(ATTR_MEDIA_CONTENT_ID, default=""): str,
}
)
@websocket_api.async_response
async def websocket_browse_media(hass, connection, msg):
"""Browse available media."""
try:
media = await async_browse_media(hass, msg.get("media_content_id"))
connection.send_result(
msg["id"],
media.as_dict(),
)
except BrowseError as err:
connection.send_error(msg["id"], "browse_media_failed", str(err))
@websocket_api.websocket_command(
{
vol.Required("type"): "media_source/resolve_media",
vol.Required(ATTR_MEDIA_CONTENT_ID): str,
vol.Optional("expires", default=30): int,
}
)
@websocket_api.async_response
async def websocket_resolve_media(hass, connection, msg):
"""Resolve media."""
try:
media = await async_resolve_media(hass, msg["media_content_id"])
url = media.url
except Unresolvable as err:
connection.send_error(msg["id"], "resolve_media_failed", str(err))
else:
if url[0] == "/":
url = async_sign_path(
hass,
connection.refresh_token_id,
url,
timedelta(seconds=msg["expires"]),
)
connection.send_result(msg["id"], {"url": url, "mime_type": media.mime_type})
|
import plumbum
#from plumbum.colorlib import HTMLStyle, StyleFactory
#plumbum.colors = StyleFactory(HTMLStyle)
from plumbum import cli, colors
class MyApp(cli.Application):
PROGNAME = colors.green
VERSION = colors.blue | "1.0.2"
COLOR_GROUPS = {"Meta-switches" : colors.bold & colors.yellow}
opts = cli.Flag("--ops", help=colors.magenta | "This is help")
def main(self):
print("HI")
if __name__ == "__main__":
MyApp.run()
|
import re
import click
from molecule import logger
from molecule import util
from molecule.command import base
LOG = logger.get_logger(__name__)
class Idempotence(base.Base):
"""
Runs the converge step a second time. If no tasks will be marked as changed
the scenario will be considered idempotent.
.. program:: molecule idempotence
.. option:: molecule idempotence
Target the default scenario.
.. program:: molecule idempotence --scenario-name foo
.. option:: molecule idempotence --scenario-name foo
Targeting a specific scenario.
.. program:: molecule --debug idempotence
.. option:: molecule --debug idempotence
Executing with `debug`.
.. program:: molecule --base-config base.yml idempotence
.. option:: molecule --base-config base.yml idempotence
Executing with a `base-config`.
.. program:: molecule --env-file foo.yml idempotence
.. option:: molecule --env-file foo.yml idempotence
Load an env file to read variables from when rendering
molecule.yml.
"""
def execute(self):
"""
Execute the actions necessary to perform a `molecule idempotence` and
returns None.
:return: None
"""
self.print_info()
if not self._config.state.converged:
msg = 'Instances not converged. Please converge instances first.'
util.sysexit_with_message(msg)
output = self._config.provisioner.converge(out=None, err=None)
idempotent = self._is_idempotent(output)
if idempotent:
msg = 'Idempotence completed successfully.'
LOG.success(msg)
else:
msg = ('Idempotence test failed because of the following tasks:\n'
u'{}').format('\n'.join(self._non_idempotent_tasks(output)))
util.sysexit_with_message(msg)
def _is_idempotent(self, output):
"""
Parses the output of the provisioning for changed and returns a bool.
:param output: A string containing the output of the ansible run.
:return: bool
"""
# Remove blank lines to make regex matches easier
output = re.sub(r'\n\s*\n*', '\n', output)
# Look for any non-zero changed lines
changed = re.search(r'(changed=[1-9][0-9]*)', output)
if changed:
# Not idempotent
return False
return True
def _non_idempotent_tasks(self, output):
"""
Parses the output to identify the non idempotent tasks.
:param (str) output: A string containing the output of the ansible run.
:return: A list containing the names of the non idempotent tasks.
"""
# Remove blank lines to make regex matches easier.
output = re.sub(r'\n\s*\n*', '\n', output)
# Remove ansi escape sequences.
output = util.strip_ansi_escape(output)
# Split the output into a list and go through it.
output_lines = output.split('\n')
res = []
task_line = ''
for _, line in enumerate(output_lines):
if line.startswith('TASK'):
task_line = line
elif line.startswith('changed'):
host_name = re.search(r'\[(.*)\]', line).groups()[0]
task_name = re.search(r'\[(.*)\]', task_line).groups()[0]
res.append(u'* [{}] => {}'.format(host_name, task_name))
return res
@click.command()
@click.pass_context
@click.option(
'--scenario-name',
'-s',
default=base.MOLECULE_DEFAULT_SCENARIO_NAME,
help='Name of the scenario to target. ({})'.format(
base.MOLECULE_DEFAULT_SCENARIO_NAME))
def idempotence(ctx, scenario_name): # pragma: no cover
"""
Use the provisioner to configure the instances and parse the output to
determine idempotence.
"""
args = ctx.obj.get('args')
subcommand = base._get_subcommand(__name__)
command_args = {
'subcommand': subcommand,
}
base.execute_cmdline_scenarios(scenario_name, args, command_args)
|
from unittest import mock
import contextlib
import shutil
import attr
from PyQt5.QtCore import pyqtSignal, QPoint, QProcess, QObject, QUrl
from PyQt5.QtGui import QIcon
from PyQt5.QtNetwork import (QNetworkRequest, QAbstractNetworkCache,
QNetworkCacheMetaData)
from PyQt5.QtWidgets import QCommonStyle, QLineEdit, QWidget, QTabBar
from qutebrowser.browser import browsertab, downloads
from qutebrowser.utils import usertypes
from qutebrowser.commands import runners
class FakeNetworkCache(QAbstractNetworkCache):
"""Fake cache with no data."""
def cacheSize(self):
return 0
def data(self, _url):
return None
def insert(self, _dev):
pass
def metaData(self, _url):
return QNetworkCacheMetaData()
def prepare(self, _metadata):
return None
def remove(self, _url):
return False
def updateMetaData(self, _url):
pass
class FakeKeyEvent:
"""Fake QKeyPressEvent stub."""
def __init__(self, key, modifiers=0, text=''):
self.key = mock.Mock(return_value=key)
self.text = mock.Mock(return_value=text)
self.modifiers = mock.Mock(return_value=modifiers)
class FakeWebFrame:
"""A stub for QWebFrame."""
def __init__(self, geometry=None, *, scroll=None, plaintext=None,
html=None, parent=None, zoom=1.0):
"""Constructor.
Args:
geometry: The geometry of the frame as QRect.
scroll: The scroll position as QPoint.
plaintext: Return value of toPlainText
html: Return value of tohtml.
zoom: The zoom factor.
parent: The parent frame.
"""
if scroll is None:
scroll = QPoint(0, 0)
self.geometry = mock.Mock(return_value=geometry)
self.scrollPosition = mock.Mock(return_value=scroll)
self.parentFrame = mock.Mock(return_value=parent)
self.toPlainText = mock.Mock(return_value=plaintext)
self.toHtml = mock.Mock(return_value=html)
self.zoomFactor = mock.Mock(return_value=zoom)
class FakeChildrenFrame:
"""A stub for QWebFrame to test get_child_frames."""
def __init__(self, children=None):
if children is None:
children = []
self.childFrames = mock.Mock(return_value=children)
class FakeQApplication:
"""Stub to insert as QApplication module."""
UNSET = object()
def __init__(self, *, style=None, all_widgets=None, active_window=None,
instance=UNSET, arguments=None, platform_name=None):
if instance is self.UNSET:
self.instance = mock.Mock(return_value=self)
else:
self.instance = mock.Mock(return_value=instance)
self.style = mock.Mock(spec=QCommonStyle)
self.style().metaObject().className.return_value = style
self.allWidgets = lambda: all_widgets
self.activeWindow = lambda: active_window
self.arguments = lambda: arguments
self.platformName = lambda: platform_name
class FakeNetworkReply:
"""QNetworkReply stub which provides a Content-Disposition header."""
KNOWN_HEADERS = {
QNetworkRequest.ContentTypeHeader: 'Content-Type',
}
def __init__(self, headers=None, url=None):
if url is None:
url = QUrl()
if headers is None:
self.headers = {}
else:
self.headers = headers
self.url = mock.Mock(return_value=url)
def hasRawHeader(self, name):
"""Check if the reply has a certain header.
Args:
name: The name of the header as ISO-8859-1 encoded bytes object.
Return:
True if the header is present, False if not.
"""
return name.decode('iso-8859-1') in self.headers
def rawHeader(self, name):
"""Get the raw header data of a header.
Args:
name: The name of the header as ISO-8859-1 encoded bytes object.
Return:
The header data, as ISO-8859-1 encoded bytes() object.
"""
name = name.decode('iso-8859-1')
return self.headers[name].encode('iso-8859-1')
def header(self, known_header):
"""Get a known header.
Args:
known_header: A QNetworkRequest::KnownHeaders member.
"""
key = self.KNOWN_HEADERS[known_header]
try:
return self.headers[key]
except KeyError:
return None
def setHeader(self, known_header, value):
"""Set a known header.
Args:
known_header: A QNetworkRequest::KnownHeaders member.
value: The value to set.
"""
key = self.KNOWN_HEADERS[known_header]
self.headers[key] = value
def fake_qprocess():
"""Factory for a QProcess mock which has the QProcess enum values."""
m = mock.Mock(spec=QProcess)
for name in ['NormalExit', 'CrashExit', 'FailedToStart', 'Crashed',
'Timedout', 'WriteError', 'ReadError', 'UnknownError']:
setattr(m, name, getattr(QProcess, name))
return m
class FakeWebTabScroller(browsertab.AbstractScroller):
"""Fake AbstractScroller to use in tests."""
def __init__(self, tab, pos_perc):
super().__init__(tab)
self._pos_perc = pos_perc
def pos_perc(self):
return self._pos_perc
class FakeWebTabHistory(browsertab.AbstractHistory):
"""Fake for Web{Kit,Engine}History."""
def __init__(self, tab, *, can_go_back, can_go_forward):
super().__init__(tab)
self._can_go_back = can_go_back
self._can_go_forward = can_go_forward
def can_go_back(self):
assert self._can_go_back is not None
return self._can_go_back
def can_go_forward(self):
assert self._can_go_forward is not None
return self._can_go_forward
class FakeWebTabAudio(browsertab.AbstractAudio):
def is_muted(self):
return False
def is_recently_audible(self):
return False
class FakeWebTabPrivate(browsertab.AbstractTabPrivate):
def shutdown(self):
pass
class FakeWebTab(browsertab.AbstractTab):
"""Fake AbstractTab to use in tests."""
def __init__(self, url=QUrl(), title='', tab_id=0, *,
scroll_pos_perc=(0, 0),
load_status=usertypes.LoadStatus.success,
progress=0, can_go_back=None, can_go_forward=None):
super().__init__(win_id=0, mode_manager=None, private=False)
self._load_status = load_status
self._title = title
self._url = url
self._progress = progress
self.history = FakeWebTabHistory(self, can_go_back=can_go_back,
can_go_forward=can_go_forward)
self.scroller = FakeWebTabScroller(self, scroll_pos_perc)
self.audio = FakeWebTabAudio(self)
self.private_api = FakeWebTabPrivate(tab=self, mode_manager=None)
wrapped = QWidget()
self._layout.wrap(self, wrapped)
def url(self, *, requested=False):
assert not requested
return self._url
def title(self):
return self._title
def progress(self):
return self._progress
def load_status(self):
return self._load_status
def icon(self):
return QIcon()
class FakeSignal:
"""Fake pyqtSignal stub which does nothing.
Attributes:
signal: The name of the signal, like pyqtSignal.
_func: The function to be invoked when the signal gets called.
"""
def __init__(self, name='fake', func=None):
self.signal = '2{}(int, int)'.format(name)
self._func = func
def __call__(self):
if self._func is None:
raise TypeError("'FakeSignal' object is not callable")
return self._func()
def connect(self, slot):
"""Connect the signal to a slot.
Currently does nothing, but could be improved to do some sanity
checking on the slot.
"""
def disconnect(self, slot=None):
"""Disconnect the signal from a slot.
Currently does nothing, but could be improved to do some sanity
checking on the slot and see if it actually got connected.
"""
def emit(self, *args):
"""Emit the signal.
Currently does nothing, but could be improved to do type checking based
on a signature given to __init__.
"""
@attr.s(frozen=True)
class FakeCommand:
"""A simple command stub which has a description."""
name = attr.ib('')
desc = attr.ib('')
hide = attr.ib(False)
debug = attr.ib(False)
deprecated = attr.ib(False)
completion = attr.ib(None)
maxsplit = attr.ib(None)
takes_count = attr.ib(lambda: False)
modes = attr.ib((usertypes.KeyMode.normal, ))
class FakeTimer(QObject):
"""Stub for a usertypes.Timer."""
timeout_signal = pyqtSignal()
def __init__(self, parent=None, name=None):
super().__init__(parent)
self.timeout = mock.Mock(spec=['connect', 'disconnect', 'emit'])
self.timeout.connect.side_effect = self.timeout_signal.connect
self.timeout.disconnect.side_effect = self.timeout_signal.disconnect
self.timeout.emit.side_effect = self._emit
self._started = False
self._singleshot = False
self._interval = 0
self._name = name
def __repr__(self):
return '<{} name={!r}>'.format(self.__class__.__name__, self._name)
def _emit(self):
"""Called when the timeout "signal" gets emitted."""
if self._singleshot:
self._started = False
self.timeout_signal.emit()
def setInterval(self, interval):
self._interval = interval
def interval(self):
return self._interval
def setSingleShot(self, singleshot):
self._singleshot = singleshot
def isSingleShot(self):
return self._singleshot
def start(self, interval=None):
if interval:
self._interval = interval
self._started = True
def stop(self):
self._started = False
def isActive(self):
return self._started
class InstaTimer(QObject):
"""Stub for a QTimer that fires instantly on start().
Useful to test a time-based event without inserting an artificial delay.
"""
timeout = pyqtSignal()
def start(self, interval=None):
self.timeout.emit()
def setSingleShot(self, yes):
pass
def setInterval(self, interval):
pass
@staticmethod
def singleShot(_interval, fun):
fun()
class StatusBarCommandStub(QLineEdit):
"""Stub for the statusbar command prompt."""
got_cmd = pyqtSignal(str)
clear_completion_selection = pyqtSignal()
hide_completion = pyqtSignal()
update_completion = pyqtSignal()
show_cmd = pyqtSignal()
hide_cmd = pyqtSignal()
def prefix(self):
return self.text()[0]
class UrlMarkManagerStub(QObject):
"""Stub for the quickmark-manager or bookmark-manager object."""
added = pyqtSignal(str, str)
removed = pyqtSignal(str)
def __init__(self, parent=None):
super().__init__(parent)
self.marks = {}
def delete(self, key):
del self.marks[key]
self.removed.emit(key)
class BookmarkManagerStub(UrlMarkManagerStub):
"""Stub for the bookmark-manager object."""
class QuickmarkManagerStub(UrlMarkManagerStub):
"""Stub for the quickmark-manager object."""
def quickmark_del(self, key):
self.delete(key)
class SessionManagerStub:
"""Stub for the session-manager object."""
def __init__(self):
self.sessions = []
def list_sessions(self):
return self.sessions
def save_autosave(self):
pass
class TabbedBrowserStub(QObject):
"""Stub for the tabbed-browser object."""
def __init__(self, parent=None):
super().__init__(parent)
self.widget = TabWidgetStub()
self.is_shutting_down = False
self.loaded_url = None
self.cur_url = None
self.undo_stack = None
def on_tab_close_requested(self, idx):
del self.widget.tabs[idx]
def widgets(self):
return self.widget.tabs
def tabopen(self, url):
self.loaded_url = url
def load_url(self, url, *, newtab):
self.loaded_url = url
def current_url(self):
if self.current_url is None:
raise ValueError("current_url got called with cur_url None!")
return self.cur_url
class TabWidgetStub(QObject):
"""Stub for the tab-widget object."""
new_tab = pyqtSignal(browsertab.AbstractTab, int)
def __init__(self, parent=None):
super().__init__(parent)
self.tabs = []
self._qtabbar = QTabBar()
self.index_of = None
self.current_index = None
def count(self):
return len(self.tabs)
def widget(self, i):
return self.tabs[i]
def page_title(self, i):
return self.tabs[i].title()
def tabBar(self):
return self._qtabbar
def indexOf(self, _tab):
if self.index_of is None:
raise ValueError("indexOf got called with index_of None!")
if self.index_of is RuntimeError:
raise RuntimeError
return self.index_of
def currentIndex(self):
if self.current_index is None:
raise ValueError("currentIndex got called with current_index "
"None!")
return self.current_index
def currentWidget(self):
idx = self.currentIndex()
if idx == -1:
return None
return self.tabs[idx - 1]
class HTTPPostStub(QObject):
"""A stub class for HTTPClient.
Attributes:
url: the last url send by post()
data: the last data send by post()
"""
success = pyqtSignal(str)
error = pyqtSignal(str)
def __init__(self, parent=None):
super().__init__(parent)
self.url = None
self.data = None
def post(self, url, data=None):
self.url = url
self.data = data
class FakeDownloadItem(QObject):
"""Mock browser.downloads.DownloadItem."""
finished = pyqtSignal()
def __init__(self, fileobj, name, parent=None):
super().__init__(parent)
self.fileobj = fileobj
self.name = name
self.successful = False
class FakeDownloadManager:
"""Mock browser.downloads.DownloadManager."""
def __init__(self, tmpdir):
self._tmpdir = tmpdir
self.downloads = []
@contextlib.contextmanager
def _open_fileobj(self, target):
"""Ensure a DownloadTarget's fileobj attribute is available."""
if isinstance(target, downloads.FileDownloadTarget):
target.fileobj = open(target.filename, 'wb')
try:
yield target.fileobj
finally:
target.fileobj.close()
else:
yield target.fileobj
def get(self, url, target, **kwargs):
"""Return a FakeDownloadItem instance with a fileobj.
The content is copied from the file the given url links to.
"""
with self._open_fileobj(target):
download_item = FakeDownloadItem(target.fileobj, name=url.path())
with (self._tmpdir / url.path()).open('rb') as fake_url_file:
shutil.copyfileobj(fake_url_file, download_item.fileobj)
self.downloads.append(download_item)
return download_item
def has_downloads_with_nam(self, _nam):
"""Needed during WebView.shutdown()."""
return False
class FakeHistoryProgress:
"""Fake for a WebHistoryProgress object."""
def __init__(self):
self._started = False
self._finished = False
self._value = 0
def start(self, _text, _maximum):
self._started = True
def tick(self):
self._value += 1
def finish(self):
self._finished = True
class FakeCommandRunner(runners.AbstractCommandRunner):
def __init__(self, parent=None):
super().__init__(parent)
self.commands = []
def run(self, text, count=None, *, safely=False):
self.commands.append((text, count))
class FakeHintManager:
def __init__(self):
self.keystr = None
def handle_partial_key(self, keystr):
self.keystr = keystr
def current_mode(self):
return 'letter'
class FakeWebEngineProfile:
def __init__(self, cookie_store):
self.cookieStore = lambda: cookie_store
class FakeCookieStore:
def __init__(self):
self.cookie_filter = None
def setCookieFilter(self, func):
self.cookie_filter = func
|
import itertools
import operator
from datetime import datetime as dt
import pytest
import six
from arctic.date import DateRange, string_to_daterange, CLOSED_CLOSED, CLOSED_OPEN, OPEN_CLOSED, OPEN_OPEN
test_ranges_for_bounding = {
"unbounded": (DateRange(),
None, None, True, None, None),
"unbounded_right": (DateRange('20110101'),
dt(2011, 1, 1), None, True, True, None),
"unbounded_left": (DateRange(None, '20111231'),
None, dt(2011, 12, 31), True, None, True),
"closed_by_default": (DateRange('20110101', '20111231'),
dt(2011, 1, 1), dt(2011, 12, 31), False, True, True),
"closed_explicitly": (DateRange('20110101', '20111231', CLOSED_CLOSED),
dt(2011, 1, 1), dt(2011, 12, 31), False, True, True),
"closed_open": (DateRange('20110101', '20111231', CLOSED_OPEN),
dt(2011, 1, 1), dt(2011, 12, 31), False, True, False),
"open_closed": (DateRange('20110101', '20111231', OPEN_CLOSED),
dt(2011, 1, 1), dt(2011, 12, 31), False, False, True),
"open_open": (DateRange('20110101', '20111231', OPEN_OPEN),
dt(2011, 1, 1), dt(2011, 12, 31), False, False, False),
}
test_ranges_for_bounding = sorted(six.iteritems(test_ranges_for_bounding), key=operator.itemgetter(1))
def eq_nan(*args):
if all(arg is None for arg in args):
return True
return all(arg == args[0] for arg in args[1:])
@pytest.mark.parametrize(("dt_range", "start", "end", "is_unbounded", "start_in_range", "end_in_range"),
[i[1] for i in test_ranges_for_bounding],
ids=[i[0] for i in test_ranges_for_bounding])
def test_daterange_bounding(dt_range, start, end, is_unbounded, start_in_range, end_in_range):
assert eq_nan(start, dt_range.start)
assert eq_nan(end, dt_range.end)
assert dt_range.unbounded is is_unbounded
assert dt_range.start is None or (start_in_range is (dt_range.start in dt_range))
assert dt_range.end is None or (end_in_range is (dt_range.end in dt_range))
test_ranges_for_parse = [
[20110102, 20111231],
['20110102', '20111231'],
['2011-01-02', '2011-12-31'],
[dt(2011, 1, 2), dt(2011, 12, 31)],
]
@pytest.mark.parametrize("date_range", test_ranges_for_parse)
def test_daterange_arg_parsing(date_range):
d1 = DateRange(date_range[0], date_range[1])
assert d1.start == dt(2011, 1, 2)
assert d1.end == dt(2011, 12, 31)
assert d1.unbounded is False
def test_daterange_eq():
d1 = DateRange('20110101', '20111231')
d2 = DateRange('20110101', '20111231')
assert d1 == d2
d1 = DateRange(None, '20111231')
d2 = DateRange(None, '20111231')
assert d1 == d2
d1 = DateRange('20111231', None)
d2 = DateRange('20111231', None)
assert d1 == d2
d1 = DateRange(None, None)
d2 = DateRange(None, None)
assert d1 == d2
d1 = DateRange('20110102', '20111231')
d2 = DateRange('20110101', '20111231')
assert not d1 == d2
def test_daterange_hash():
d1 = DateRange('20110101', '20111231')
d2 = DateRange('20110101', '20111231')
assert hash(d1) == hash(d2)
d1 = DateRange(None, '20111231')
d2 = DateRange(None, '20111231')
assert hash(d1) == hash(d2)
d1 = DateRange('20111231', None)
d2 = DateRange('20111231', None)
assert hash(d1) == hash(d2)
d1 = DateRange(None, None)
d2 = DateRange(None, None)
assert hash(d1) == hash(d2)
d1 = DateRange('20110102', '20111231')
d2 = DateRange('20110101', '20111231')
assert not hash(d1) == hash(d2)
def test_daterange_invalid_start():
with pytest.raises(TypeError) as ex:
DateRange(1.1, None)
assert "unsupported type for start" in str(ex.value)
def test_daterange_invalid_end():
with pytest.raises(TypeError) as ex:
DateRange(None, object())
assert "unsupported type for end" in str(ex.value)
def test_daterange_index():
start, end = dt(2000, 1, 1), dt(3000, 1, 1)
dr = DateRange(start, end)
assert dr[0] == start
assert dr[1] == end
def test_daterange_index_error():
start, end = dt(2000, 1, 1), dt(3000, 1, 1)
dr = DateRange(start, end)
with pytest.raises(IndexError):
dr[None]
with pytest.raises(IndexError):
dr[3]
def test_as_dates():
"""Various permutations of datetime/None, and date/None values."""
dtime = dt(2010, 12, 13, 10, 30)
for testdt in [dtime, dtime.date()]:
vals = [testdt, None]
for start, end in itertools.product(vals, vals):
dr = DateRange(start, end)
dad = dr.as_dates()
if dr.start:
assert dad.start == dr.start.date() if isinstance(dr.start, dt) else dr.start
else:
assert not dad.start
if dr.end:
assert dad.end == dr.end.date() if isinstance(dr.end, dt) else dr.end
else:
assert not dad.end
DR1 = DateRange('20110101', '20110102')
DR2 = DateRange('201101011030', '201101021030')
DR3 = DateRange('201101011030')
DR4 = DateRange(None, '201101011030')
DR5 = DateRange('201101011030')
DR6 = DateRange('20110101', '20110102', OPEN_OPEN)
DR7 = DateRange('20110101', '20110102', OPEN_CLOSED)
DR7 = DateRange('20110101', '20110102', CLOSED_OPEN)
STRING_DR_TESTS = [('20110101', DR1, DateRange(DR1.start.date(), DR1.end.date())),
('20110101-20110102', DR1, DateRange(DR1.start.date(), DR1.end.date())),
('201101011030', DR2, DateRange(DR2.start.date(), DR2.end.date())),
('-201101011030', DR4, DateRange(None, DR2.start.date())),
('201101011030-', DR5, DateRange(DR2.start.date())),
('(20110101-20110102)', DR6, DateRange(DR6.start.date(), DR6.end.date(), DR6.interval)),
('(20110101-20110102]', DR6, DateRange(DR6.start.date(), DR6.end.date(), DR6.interval)),
('[20110101-20110102)', DR6, DateRange(DR6.start.date(), DR6.end.date(), DR6.interval)),
('[20110101-20110102]', DR1, DateRange(DR1.start.date(), DR1.end.date(), DR1.interval)),
]
@pytest.mark.parametrize(['instr', 'expected_ts', 'expected_dt'], STRING_DR_TESTS)
def test_string_to_daterange(instr, expected_ts, expected_dt):
assert string_to_daterange(instr) == expected_ts
assert string_to_daterange(instr, as_dates=True) == expected_dt
def test_string_to_daterange_raises():
with pytest.raises(ValueError) as e:
string_to_daterange('20120101-20130101-20140101')
assert str(e.value) == "Too many dates in input string [20120101-20130101-20140101] with delimiter (-)"
QUERY_TESTS = [(DateRange('20110101', '20110102'), {'$gte': dt(2011, 1, 1), '$lte': dt(2011, 1, 2)}),
(DateRange('20110101', '20110102', OPEN_OPEN), {'$gt': dt(2011, 1, 1), '$lt': dt(2011, 1, 2)}),
(DateRange('20110101', '20110102', OPEN_CLOSED), {'$gt': dt(2011, 1, 1), '$lte': dt(2011, 1, 2)}),
(DateRange('20110101', '20110102', CLOSED_OPEN), {'$gte': dt(2011, 1, 1), '$lt': dt(2011, 1, 2)}),
(DateRange('20110101', '20110102'), {'$gte': dt(2011, 1, 1), '$lte': dt(2011, 1, 2)}),
(DateRange('20110101', None), {'$gte': dt(2011, 1, 1)}),
(DateRange(None, '20110102'), {'$lte': dt(2011, 1, 2)}),
(DateRange(), {})]
@pytest.mark.parametrize(['date_range', 'expected'], QUERY_TESTS)
def test_mongo_query(date_range, expected):
assert date_range.mongo_query() == expected
QUERY_TESTS_DB = [(DateRange('20110101', '20110102'), ('>=', dt(2011, 1, 1), '<=', dt(2011, 1, 2))),
(DateRange('20110101', '20110102', OPEN_OPEN), ('>', dt(2011, 1, 1), '<', dt(2011, 1, 2))),
(DateRange('20110101', '20110102', OPEN_CLOSED), ('>', dt(2011, 1, 1), '<=', dt(2011, 1, 2))),
(DateRange('20110101', '20110102', CLOSED_OPEN), ('>=', dt(2011, 1, 1), '<', dt(2011, 1, 2))),
(DateRange('20110101', '20110102'), ('>=', dt(2011, 1, 1), '<=', dt(2011, 1, 2))),
(DateRange('20110101', None), ('>=', dt(2011, 1, 1), '<=' , None)),
(DateRange(None, '20110102'), ('>=', None, '<=', dt(2011, 1, 2))),
(DateRange(), ('>=', None , '<=' , None))]
@pytest.mark.parametrize(['date_range', 'expected'], QUERY_TESTS_DB)
def test_get_date_bounds(date_range, expected):
assert date_range.get_date_bounds() == expected
@pytest.mark.parametrize(["dr"], [(DR1,), (DR2,), (DR3,), (DR4,), (DR5,), (DR6,), (DR7,)])
def test_intersection_with_self(dr):
assert dr == dr.intersection(dr)
def test_intersection_returns_inner_boundaries():
# #start:
assert DateRange('20110103',).intersection(DateRange('20110102')).start == dt(2011, 1, 3)
assert DateRange('20110102',).intersection(DateRange('20110103')).start == dt(2011, 1, 3)
assert DateRange(None,).intersection(DateRange('20110103')).start == dt(2011, 1, 3)
assert DateRange('20110103').intersection(DateRange(None)).start == dt(2011, 1, 3)
# #end:
assert DateRange(None, '20110103',).intersection(DateRange(None, '20110102')).end == dt(2011, 1, 2)
assert DateRange(None, '20110102',).intersection(DateRange(None, '20110103')).end == dt(2011, 1, 2)
assert DateRange(None, None,).intersection(DateRange(None, '20110103')).end == dt(2011, 1, 3)
assert DateRange(None, '20110103').intersection(DateRange(None, None)).end == dt(2011, 1, 3)
def test_intersection_preserves_boundaries():
# Non-matching boundaries
assert DateRange('20110101', '20110102', OPEN_OPEN) == DateRange('20110101', '20110103', OPEN_CLOSED).intersection(DateRange('20110101', '20110102', OPEN_OPEN))
assert DateRange('20110101', '20110102', OPEN_OPEN) == DateRange('20110101', '20110102', OPEN_OPEN).intersection(DateRange('20110101', '20110103', OPEN_CLOSED))
assert DateRange('20110102', '20110103', OPEN_OPEN) == DateRange('20110102', '20110103', OPEN_OPEN).intersection(DateRange('20110101', '20110103', CLOSED_OPEN))
assert DateRange('20110102', '20110103', CLOSED_OPEN) == DateRange('20110102', '20110103', CLOSED_OPEN).intersection(DateRange('20110101', '20110103', CLOSED_OPEN))
assert DateRange('20110102', '20110103', CLOSED_OPEN) == DateRange('20110101', '20110103', CLOSED_OPEN).intersection(DateRange('20110102', '20110103', CLOSED_OPEN))
# Matching boundaries
assert DateRange('20110101', '20110102', OPEN_OPEN) == DateRange('20110101', '20110102', CLOSED_OPEN).intersection(DateRange('20110101', '20110102', OPEN_OPEN))
assert DateRange('20110101', '20110102', OPEN_OPEN) == DateRange('20110101', '20110102', OPEN_OPEN).intersection(DateRange('20110101', '20110102', OPEN_CLOSED))
def test_intersection_contains():
# assert ((d in dr1) & (d in dr2)) == (d in (dr1 & dr2)) for any interval combination
start, end = dt(2018, 1, 1), dt(2018, 1, 2)
date_ranges = [DateRange(start, end, interval) for interval in CLOSED_CLOSED.__class__]
def equal_contains(date, dr1, dr2):
return ((date in dr1) and (date in dr2)) == (date in dr1.intersection(dr2))
assert all(equal_contains(start, dr1, dr2) for dr1 in date_ranges for dr2 in date_ranges)
assert all(equal_contains(end, dr1, dr2) for dr1 in date_ranges for dr2 in date_ranges)
|
import logging
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchEntity
from homeassistant.const import ATTR_ATTRIBUTION, CONF_MONITORED_CONDITIONS
import homeassistant.helpers.config_validation as cv
from . import (
ALLOWED_WATERING_TIME,
ATTRIBUTION,
CONF_WATERING_TIME,
DATA_RAINCLOUD,
DEFAULT_WATERING_TIME,
SWITCHES,
RainCloudEntity,
)
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_MONITORED_CONDITIONS, default=list(SWITCHES)): vol.All(
cv.ensure_list, [vol.In(SWITCHES)]
),
vol.Optional(CONF_WATERING_TIME, default=DEFAULT_WATERING_TIME): vol.All(
vol.In(ALLOWED_WATERING_TIME)
),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up a sensor for a raincloud device."""
raincloud = hass.data[DATA_RAINCLOUD].data
default_watering_timer = config.get(CONF_WATERING_TIME)
sensors = []
for sensor_type in config.get(CONF_MONITORED_CONDITIONS):
# create a sensor for each zone managed by faucet
for zone in raincloud.controller.faucet.zones:
sensors.append(RainCloudSwitch(default_watering_timer, zone, sensor_type))
add_entities(sensors, True)
class RainCloudSwitch(RainCloudEntity, SwitchEntity):
"""A switch implementation for raincloud device."""
def __init__(self, default_watering_timer, *args):
"""Initialize a switch for raincloud device."""
super().__init__(*args)
self._default_watering_timer = default_watering_timer
@property
def is_on(self):
"""Return true if device is on."""
return self._state
def turn_on(self, **kwargs):
"""Turn the device on."""
if self._sensor_type == "manual_watering":
self.data.watering_time = self._default_watering_timer
elif self._sensor_type == "auto_watering":
self.data.auto_watering = True
self._state = True
def turn_off(self, **kwargs):
"""Turn the device off."""
if self._sensor_type == "manual_watering":
self.data.watering_time = "off"
elif self._sensor_type == "auto_watering":
self.data.auto_watering = False
self._state = False
def update(self):
"""Update device state."""
_LOGGER.debug("Updating RainCloud switch: %s", self._name)
if self._sensor_type == "manual_watering":
self._state = bool(self.data.watering_time)
elif self._sensor_type == "auto_watering":
self._state = self.data.auto_watering
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {
ATTR_ATTRIBUTION: ATTRIBUTION,
"default_manual_timer": self._default_watering_timer,
"identifier": self.data.serial,
}
|
from django.shortcuts import get_object_or_404
from django.views.generic.base import RedirectView
from zinnia.models.entry import Entry
class EntryShortLink(RedirectView):
"""
View for handling the shortlink of an Entry,
simply do a redirection.
"""
permanent = True
def get_redirect_url(self, **kwargs):
"""
Get entry corresponding to 'pk' encoded in base36
in the 'token' variable and return the get_absolute_url
of the entry.
"""
entry = get_object_or_404(Entry.published, pk=int(kwargs['token'], 36))
return entry.get_absolute_url()
|
import asyncio
import logging
import voluptuous as vol
from zigpy.config import CONF_DEVICE, CONF_DEVICE_PATH
from homeassistant import config_entries, const as ha_const
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.device_registry import CONNECTION_ZIGBEE
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.typing import HomeAssistantType
from . import api
from .core import ZHAGateway
from .core.const import (
BAUD_RATES,
COMPONENTS,
CONF_BAUDRATE,
CONF_DATABASE,
CONF_DEVICE_CONFIG,
CONF_ENABLE_QUIRKS,
CONF_RADIO_TYPE,
CONF_USB_PATH,
CONF_ZIGPY,
DATA_ZHA,
DATA_ZHA_CONFIG,
DATA_ZHA_DISPATCHERS,
DATA_ZHA_GATEWAY,
DATA_ZHA_PLATFORM_LOADED,
DOMAIN,
SIGNAL_ADD_ENTITIES,
RadioType,
)
from .core.discovery import GROUP_PROBE
DEVICE_CONFIG_SCHEMA_ENTRY = vol.Schema({vol.Optional(ha_const.CONF_TYPE): cv.string})
ZHA_CONFIG_SCHEMA = {
vol.Optional(CONF_BAUDRATE): cv.positive_int,
vol.Optional(CONF_DATABASE): cv.string,
vol.Optional(CONF_DEVICE_CONFIG, default={}): vol.Schema(
{cv.string: DEVICE_CONFIG_SCHEMA_ENTRY}
),
vol.Optional(CONF_ENABLE_QUIRKS, default=True): cv.boolean,
vol.Optional(CONF_ZIGPY): dict,
vol.Optional(CONF_RADIO_TYPE): cv.enum(RadioType),
vol.Optional(CONF_USB_PATH): cv.string,
}
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
vol.All(
cv.deprecated(CONF_USB_PATH),
cv.deprecated(CONF_BAUDRATE),
cv.deprecated(CONF_RADIO_TYPE),
ZHA_CONFIG_SCHEMA,
),
),
},
extra=vol.ALLOW_EXTRA,
)
# Zigbee definitions
CENTICELSIUS = "C-100"
# Internal definitions
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass, config):
"""Set up ZHA from config."""
hass.data[DATA_ZHA] = {}
if DOMAIN in config:
conf = config[DOMAIN]
hass.data[DATA_ZHA][DATA_ZHA_CONFIG] = conf
return True
async def async_setup_entry(hass, config_entry):
"""Set up ZHA.
Will automatically load components to support devices found on the network.
"""
zha_data = hass.data.setdefault(DATA_ZHA, {})
config = zha_data.get(DATA_ZHA_CONFIG, {})
for component in COMPONENTS:
zha_data.setdefault(component, [])
if config.get(CONF_ENABLE_QUIRKS, True):
# needs to be done here so that the ZHA module is finished loading
# before zhaquirks is imported
import zhaquirks # noqa: F401 pylint: disable=unused-import, import-outside-toplevel, import-error
zha_gateway = ZHAGateway(hass, config, config_entry)
await zha_gateway.async_initialize()
zha_data[DATA_ZHA_DISPATCHERS] = []
zha_data[DATA_ZHA_PLATFORM_LOADED] = []
for component in COMPONENTS:
coro = hass.config_entries.async_forward_entry_setup(config_entry, component)
zha_data[DATA_ZHA_PLATFORM_LOADED].append(hass.async_create_task(coro))
device_registry = await hass.helpers.device_registry.async_get_registry()
device_registry.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(CONNECTION_ZIGBEE, str(zha_gateway.application_controller.ieee))},
identifiers={(DOMAIN, str(zha_gateway.application_controller.ieee))},
name="Zigbee Coordinator",
manufacturer="ZHA",
model=zha_gateway.radio_description,
)
api.async_load_api(hass)
async def async_zha_shutdown(event):
"""Handle shutdown tasks."""
await zha_data[DATA_ZHA_GATEWAY].shutdown()
await zha_data[DATA_ZHA_GATEWAY].async_update_device_storage()
hass.bus.async_listen_once(ha_const.EVENT_HOMEASSISTANT_STOP, async_zha_shutdown)
asyncio.create_task(async_load_entities(hass))
return True
async def async_unload_entry(hass, config_entry):
"""Unload ZHA config entry."""
await hass.data[DATA_ZHA][DATA_ZHA_GATEWAY].shutdown()
GROUP_PROBE.cleanup()
api.async_unload_api(hass)
dispatchers = hass.data[DATA_ZHA].get(DATA_ZHA_DISPATCHERS, [])
for unsub_dispatcher in dispatchers:
unsub_dispatcher()
for component in COMPONENTS:
await hass.config_entries.async_forward_entry_unload(config_entry, component)
return True
async def async_load_entities(hass: HomeAssistantType) -> None:
"""Load entities after integration was setup."""
await hass.data[DATA_ZHA][DATA_ZHA_GATEWAY].async_initialize_devices_and_entities()
to_setup = hass.data[DATA_ZHA][DATA_ZHA_PLATFORM_LOADED]
results = await asyncio.gather(*to_setup, return_exceptions=True)
for res in results:
if isinstance(res, Exception):
_LOGGER.warning("Couldn't setup zha platform: %s", res)
async_dispatcher_send(hass, SIGNAL_ADD_ENTITIES)
async def async_migrate_entry(
hass: HomeAssistantType, config_entry: config_entries.ConfigEntry
):
"""Migrate old entry."""
_LOGGER.debug("Migrating from version %s", config_entry.version)
if config_entry.version == 1:
data = {
CONF_RADIO_TYPE: config_entry.data[CONF_RADIO_TYPE],
CONF_DEVICE: {CONF_DEVICE_PATH: config_entry.data[CONF_USB_PATH]},
}
baudrate = hass.data[DATA_ZHA].get(DATA_ZHA_CONFIG, {}).get(CONF_BAUDRATE)
if data[CONF_RADIO_TYPE] != RadioType.deconz and baudrate in BAUD_RATES:
data[CONF_DEVICE][CONF_BAUDRATE] = baudrate
config_entry.version = 2
hass.config_entries.async_update_entry(config_entry, data=data)
_LOGGER.info("Migration to version %s successful", config_entry.version)
return True
|
import lemur_statsd as plug
from flask import current_app
from lemur.plugins.bases.metric import MetricPlugin
from datadog import DogStatsd
class StatsdMetricPlugin(MetricPlugin):
title = "Statsd"
slug = "statsd-metrics"
description = "Adds support for sending metrics to Statsd"
version = plug.VERSION
def __init__(self):
host = current_app.config.get("STATSD_HOST")
port = current_app.config.get("STATSD_PORT")
prefix = current_app.config.get("STATSD_PREFIX")
self.statsd = DogStatsd(host=host, port=port, namespace=prefix)
def submit(
self, metric_name, metric_type, metric_value, metric_tags=None, options=None
):
valid_types = ["COUNTER", "GAUGE", "TIMER"]
tags = []
if metric_type.upper() not in valid_types:
raise Exception(
"Invalid Metric Type for Statsd, '{metric}' choose from: {options}".format(
metric=metric_type, options=",".join(valid_types)
)
)
if metric_tags:
if not isinstance(metric_tags, dict):
raise Exception(
"Invalid Metric Tags for Statsd: Tags must be in dict format"
)
else:
tags = map(lambda e: "{0}:{1}".format(*e), metric_tags.items())
if metric_type.upper() == "COUNTER":
self.statsd.increment(metric_name, metric_value, tags)
elif metric_type.upper() == "GAUGE":
self.statsd.gauge(metric_name, metric_value, tags)
elif metric_type.upper() == "TIMER":
self.statsd.timing(metric_name, metric_value, tags)
return
|
import logging
from collections import namedtuple
from datetime import datetime
from time import time as get_time
from kazoo.client import KazooClient
from kazoo.exceptions import NoNodeError
from paasta_tools.clusterman import get_clusterman_metrics
from paasta_tools.utils import load_system_paasta_config
from paasta_tools.utils import ZookeeperPool
clusterman_metrics, __ = get_clusterman_metrics()
DEFAULT_BOOST_FACTOR = 1.5
DEFAULT_BOOST_DURATION = 40
MIN_BOOST_FACTOR = 1.0
MAX_BOOST_FACTOR = 3.0
MAX_BOOST_DURATION = 240
BoostValues = namedtuple("BoostValues", ["end_time", "boost_factor", "expected_load"])
log = logging.getLogger(__name__)
log.addHandler(logging.NullHandler())
def get_zk_cluster_boost_path(region: str, pool: str) -> str:
return f"/paasta_cluster_autoscaler/{region}/{pool}/boost"
def get_boosted_load(zk_boost_path: str, current_load: float) -> float:
"""Return the load to use for autoscaling calculations, taking into
account the computed boost, if any.
This function will fail gracefully no matter what (returning the current load)
so we don't block the autoscaler.
"""
try:
current_time = get_time()
with ZookeeperPool() as zk:
boost_values = get_boost_values(zk_boost_path, zk)
if current_time >= boost_values.end_time:
# If there is an expected_load value, that means we've just completed
# a boost period. Reset it to 0
if boost_values.expected_load > 0:
zk.set(zk_boost_path + "/expected_load", "0".encode("utf-8"))
# Boost is no longer active - return current load with no boost
return current_load
# Boost is active. If expected load wasn't already computed, set it now.
if boost_values.expected_load == 0:
expected_load = current_load * boost_values.boost_factor
log.debug(
f"Activating boost, storing expected load: {expected_load} in ZooKeeper"
)
zk.ensure_path(zk_boost_path + "/expected_load")
zk.set(
zk_boost_path + "/expected_load", str(expected_load).encode("utf-8")
)
else:
expected_load = boost_values.expected_load
# We return the boosted expected_load, but only if the current load isn't greater.
return expected_load if expected_load > current_load else current_load
except Exception as e:
# Fail gracefully in the face of ANY error
log.error(f"get_boost failed with: {e}")
return current_load
def get_boost_factor(zk_boost_path: str) -> float:
"""This function returns the boost factor value if a boost is active
"""
current_time = get_time()
with ZookeeperPool() as zk:
boost_values = get_boost_values(zk_boost_path, zk)
if current_time < boost_values.end_time:
return boost_values.boost_factor
else:
return 1.0
def get_boost_values(zk_boost_path: str, zk: KazooClient) -> BoostValues:
# Default values, non-boost.
end_time: float = 0
boost_factor: float = 1.0
expected_load: float = 0
try:
end_time = float(zk.get(zk_boost_path + "/end_time")[0].decode("utf-8"))
boost_factor = float(zk.get(zk_boost_path + "/factor")[0].decode("utf-8"))
expected_load = float(
zk.get(zk_boost_path + "/expected_load")[0].decode("utf-8")
)
except NoNodeError:
# If we can't read boost values from zookeeper
return BoostValues(end_time=0, boost_factor=1.0, expected_load=0)
return BoostValues(
end_time=end_time, boost_factor=boost_factor, expected_load=expected_load
)
def set_boost_factor(
zk_boost_path: str,
region: str = "",
pool: str = "",
send_clusterman_metrics: bool = True,
factor: float = DEFAULT_BOOST_FACTOR,
duration_minutes: int = DEFAULT_BOOST_DURATION,
override: bool = False,
) -> bool:
"""
Set a boost factor for a path in zk
Can be used to boost either cluster or service autoscalers.
If using for cluster you must specify region, pool and set
send_clusterman_metrics=True so that clusterman metrics are updated
otherwise just zk_boost_path is enough.
"""
if factor < MIN_BOOST_FACTOR:
log.error(f"Cannot set a boost factor smaller than {MIN_BOOST_FACTOR}")
return False
if not override and factor > MAX_BOOST_FACTOR:
log.warning(
"Boost factor {} does not sound reasonable. Defaulting to {}".format(
factor, MAX_BOOST_FACTOR
)
)
factor = MAX_BOOST_FACTOR
if duration_minutes > MAX_BOOST_DURATION:
log.warning(
"Boost duration of {} minutes is too much. Falling back to {}.".format(
duration_minutes, MAX_BOOST_DURATION
)
)
duration_minutes = MAX_BOOST_DURATION
current_time = get_time()
end_time = current_time + 60 * duration_minutes
if clusterman_metrics and send_clusterman_metrics:
cluster = load_system_paasta_config().get_cluster()
metrics_client = clusterman_metrics.ClustermanMetricsBotoClient(
region_name=region, app_identifier=pool
)
with metrics_client.get_writer(clusterman_metrics.APP_METRICS) as writer:
metrics_key = clusterman_metrics.generate_key_with_dimensions(
"boost_factor", {"cluster": cluster, "pool": pool}
)
writer.send((metrics_key, current_time, factor))
if duration_minutes > 0:
writer.send((metrics_key, end_time, 1.0))
zk_end_time_path = zk_boost_path + "/end_time"
zk_factor_path = zk_boost_path + "/factor"
zk_expected_load_path = zk_boost_path + "/expected_load"
with ZookeeperPool() as zk:
if not override and current_time < get_boost_values(zk_boost_path, zk).end_time:
log.error("Boost already active. Not overriding.")
return False
try:
zk.ensure_path(zk_end_time_path)
zk.ensure_path(zk_factor_path)
zk.ensure_path(zk_expected_load_path)
zk.set(zk_end_time_path, str(end_time).encode("utf-8"))
zk.set(zk_factor_path, str(factor).encode("utf-8"))
zk.set(zk_expected_load_path, "0".encode("utf-8"))
except Exception:
log.error("Error setting the boost in Zookeeper")
raise
log.info(
"Load boost: Set capacity boost factor {} at path {} until {}".format(
factor, zk_boost_path, datetime.fromtimestamp(end_time).strftime("%c")
)
)
# Let's check that this factor has been properly written to zk
return get_boost_values(zk_boost_path, zk) == BoostValues(
end_time=end_time, boost_factor=factor, expected_load=0
)
def clear_boost(
zk_boost_path: str,
region: str = "",
pool: str = "",
send_clusterman_metrics: bool = True,
) -> bool:
return set_boost_factor(
zk_boost_path,
region=region,
pool=pool,
send_clusterman_metrics=send_clusterman_metrics,
factor=1,
duration_minutes=0,
override=True,
)
|
import contextlib
import inspect
from io import StringIO
import re
import sys
import logging
import os.path as op
import warnings
from ..externals.decorator import FunctionMaker
logger = logging.getLogger('mne') # one selection here used across mne-python
logger.propagate = False # don't propagate (in case of multiple imports)
# class to provide frame information (should be low overhead, just on logger
# calls)
class _FrameFilter(logging.Filter):
def __init__(self):
self.add_frames = 0
def filter(self, record):
record.frame_info = 'Unknown'
if self.add_frames:
# 5 is the offset necessary to get out of here and the logging
# module, reversal is to put the oldest at the top
frame_info = _frame_info(5 + self.add_frames)[5:][::-1]
if len(frame_info):
frame_info[-1] = (frame_info[-1] + ' :').ljust(30)
if len(frame_info) > 1:
frame_info[0] = '┌' + frame_info[0]
frame_info[-1] = '└' + frame_info[-1]
for ii, info in enumerate(frame_info[1:-1], 1):
frame_info[ii] = '├' + info
record.frame_info = '\n'.join(frame_info)
return True
_filter = _FrameFilter()
logger.addFilter(_filter)
def verbose(function):
"""Verbose decorator to allow functions to override log-level.
Parameters
----------
function : callable
Function to be decorated by setting the verbosity level.
Returns
-------
dec : callable
The decorated function.
See Also
--------
set_log_level
set_config
Notes
-----
This decorator is used to set the verbose level during a function or method
call, such as :func:`mne.compute_covariance`. The `verbose` keyword
argument can be 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL', True (an
alias for 'INFO'), or False (an alias for 'WARNING'). To set the global
verbosity level for all functions, use :func:`mne.set_log_level`.
This function also serves as a docstring filler.
Examples
--------
You can use the ``verbose`` argument to set the verbose level on the fly::
>>> import mne
>>> cov = mne.compute_raw_covariance(raw, verbose='WARNING') # doctest: +SKIP
>>> cov = mne.compute_raw_covariance(raw, verbose='INFO') # doctest: +SKIP
Using up to 49 segments
Number of samples used : 5880
[done]
""" # noqa: E501
# See https://decorator.readthedocs.io/en/latest/tests.documentation.html
# #dealing-with-third-party-decorators
from .docs import fill_doc
try:
fill_doc(function)
except TypeError: # nothing to add
pass
# Anything using verbose should either have `verbose=None` in the signature
# or have a `self.verbose` attribute (if in a method). This code path
# will raise an error if neither is the case.
body = """\
def %(name)s(%(signature)s):\n
try:
verbose
except UnboundLocalError:
try:
verbose = self.verbose
except NameError:
raise RuntimeError('Function %%s does not accept verbose parameter'
%% (_function_,))
except AttributeError:
raise RuntimeError('Method %%s class does not have self.verbose'
%% (_function_,))
else:
if verbose is None:
try:
verbose = self.verbose
except (NameError, AttributeError):
pass
if verbose is not None:
with _use_log_level_(verbose):
return _function_(%(shortsignature)s)
else:
return _function_(%(shortsignature)s)"""
evaldict = dict(
_use_log_level_=use_log_level, _function_=function)
fm = FunctionMaker(function, None, None, None, None, function.__module__)
attrs = dict(__wrapped__=function, __qualname__=function.__qualname__)
return fm.make(body, evaldict, addsource=True, **attrs)
class use_log_level(object):
"""Context handler for logging level.
Parameters
----------
level : int
The level to use.
add_frames : int | None
Number of stack frames to include.
"""
def __init__(self, level, add_frames=None): # noqa: D102
self.level = level
self.add_frames = add_frames
self.old_frames = _filter.add_frames
def __enter__(self): # noqa: D105
self.old_level = set_log_level(self.level, True, self.add_frames)
def __exit__(self, *args): # noqa: D105
add_frames = self.old_frames if self.add_frames is not None else None
set_log_level(self.old_level, add_frames=add_frames)
def set_log_level(verbose=None, return_old_level=False, add_frames=None):
"""Set the logging level.
Parameters
----------
verbose : bool, str, int, or None
The verbosity of messages to print. If a str, it can be either DEBUG,
INFO, WARNING, ERROR, or CRITICAL. Note that these are for
convenience and are equivalent to passing in logging.DEBUG, etc.
For bool, True is the same as 'INFO', False is the same as 'WARNING'.
If None, the environment variable MNE_LOGGING_LEVEL is read, and if
it doesn't exist, defaults to INFO.
return_old_level : bool
If True, return the old verbosity level.
add_frames : int | None
If int, enable (>=1) or disable (0) the printing of stack frame
information using formatting. Default (None) does not change the
formatting. This can add overhead so is meant only for debugging.
Returns
-------
old_level : int
The old level. Only returned if ``return_old_level`` is True.
"""
from .config import get_config
from .check import _check_option, _validate_type
_validate_type(verbose, (bool, str, int, None), 'verbose')
if verbose is None:
verbose = get_config('MNE_LOGGING_LEVEL', 'INFO')
elif isinstance(verbose, bool):
if verbose is True:
verbose = 'INFO'
else:
verbose = 'WARNING'
if isinstance(verbose, str):
verbose = verbose.upper()
logging_types = dict(DEBUG=logging.DEBUG, INFO=logging.INFO,
WARNING=logging.WARNING, ERROR=logging.ERROR,
CRITICAL=logging.CRITICAL)
_check_option('verbose', verbose, logging_types, '(when a string)')
verbose = logging_types[verbose]
old_verbose = logger.level
if verbose != old_verbose:
logger.setLevel(verbose)
if add_frames is not None:
_filter.add_frames = int(add_frames)
fmt = '%(frame_info)s ' if add_frames else ''
fmt += '%(message)s'
fmt = logging.Formatter(fmt)
for handler in logger.handlers:
handler.setFormatter(fmt)
return (old_verbose if return_old_level else None)
def set_log_file(fname=None, output_format='%(message)s', overwrite=None):
"""Set the log to print to a file.
Parameters
----------
fname : str, or None
Filename of the log to print to. If None, stdout is used.
To suppress log outputs, use set_log_level('WARN').
output_format : str
Format of the output messages. See the following for examples:
https://docs.python.org/dev/howto/logging.html
e.g., "%(asctime)s - %(levelname)s - %(message)s".
overwrite : bool | None
Overwrite the log file (if it exists). Otherwise, statements
will be appended to the log (default). None is the same as False,
but additionally raises a warning to notify the user that log
entries will be appended.
"""
_remove_close_handlers(logger)
if fname is not None:
if op.isfile(fname) and overwrite is None:
# Don't use warn() here because we just want to
# emit a warnings.warn here (not logger.warn)
warnings.warn('Log entries will be appended to the file. Use '
'overwrite=False to avoid this message in the '
'future.', RuntimeWarning, stacklevel=2)
overwrite = False
mode = 'w' if overwrite else 'a'
lh = logging.FileHandler(fname, mode=mode)
else:
""" we should just be able to do:
lh = logging.StreamHandler(sys.stdout)
but because doctests uses some magic on stdout, we have to do this:
"""
lh = logging.StreamHandler(WrapStdOut())
lh.setFormatter(logging.Formatter(output_format))
# actually add the stream handler
logger.addHandler(lh)
def _remove_close_handlers(logger):
for h in list(logger.handlers):
# only remove our handlers (get along nicely with nose)
if isinstance(h, (logging.FileHandler, logging.StreamHandler)):
if isinstance(h, logging.FileHandler):
h.close()
logger.removeHandler(h)
class ClosingStringIO(StringIO):
"""StringIO that closes after getvalue()."""
def getvalue(self, close=True):
"""Get the value."""
out = super().getvalue()
if close:
self.close()
return out
class catch_logging(object):
"""Store logging.
This will remove all other logging handlers, and return the handler to
stdout when complete.
"""
def __enter__(self): # noqa: D105
self._data = ClosingStringIO()
self._lh = logging.StreamHandler(self._data)
self._lh.setFormatter(logging.Formatter('%(message)s'))
self._lh._mne_file_like = True # monkey patch for warn() use
_remove_close_handlers(logger)
logger.addHandler(self._lh)
return self._data
def __exit__(self, *args): # noqa: D105
logger.removeHandler(self._lh)
set_log_file(None)
class WrapStdOut(object):
"""Dynamically wrap to sys.stdout.
This makes packages that monkey-patch sys.stdout (e.g.doctest,
sphinx-gallery) work properly.
"""
def __getattr__(self, name): # noqa: D105
# Even more ridiculous than this class, this must be sys.stdout (not
# just stdout) in order for this to work (tested on OSX and Linux)
if hasattr(sys.stdout, name):
return getattr(sys.stdout, name)
else:
raise AttributeError("'file' object has not attribute '%s'" % name)
_verbose_dec_re = re.compile('^<decorator-gen-[0-9]+>$')
def warn(message, category=RuntimeWarning, module='mne'):
"""Emit a warning with trace outside the mne namespace.
This function takes arguments like warnings.warn, and sends messages
using both ``warnings.warn`` and ``logger.warn``. Warnings can be
generated deep within nested function calls. In order to provide a
more helpful warning, this function traverses the stack until it
reaches a frame outside the ``mne`` namespace that caused the error.
Parameters
----------
message : str
Warning message.
category : instance of Warning
The warning class. Defaults to ``RuntimeWarning``.
module : str
The name of the module emitting the warning.
"""
import mne
root_dir = op.dirname(mne.__file__)
frame = None
if logger.level <= logging.WARN:
frame = inspect.currentframe()
while frame:
fname = frame.f_code.co_filename
lineno = frame.f_lineno
# in verbose dec
if not _verbose_dec_re.search(fname):
# treat tests as scripts
# and don't capture unittest/case.py (assert_raises)
if not (fname.startswith(root_dir) or
('unittest' in fname and 'case' in fname)) or \
op.basename(op.dirname(fname)) == 'tests':
break
frame = frame.f_back
del frame
# We need to use this instead of warn(message, category, stacklevel)
# because we move out of the MNE stack, so warnings won't properly
# recognize the module name (and our warnings.simplefilter will fail)
warnings.warn_explicit(
message, category, fname, lineno, module,
globals().get('__warningregistry__', {}))
# To avoid a duplicate warning print, we only emit the logger.warning if
# one of the handlers is a FileHandler. See gh-5592
if any(isinstance(h, logging.FileHandler) or getattr(h, '_mne_file_like',
False)
for h in logger.handlers):
logger.warning(message)
def _get_call_line():
"""Get the call line from within a function."""
frame = inspect.currentframe().f_back.f_back
if _verbose_dec_re.search(frame.f_code.co_filename):
frame = frame.f_back
context = inspect.getframeinfo(frame).code_context
context = 'unknown' if context is None else context[0].strip()
return context
def filter_out_warnings(warn_record, category=None, match=None):
r"""Remove particular records from ``warn_record``.
This helper takes a list of :class:`warnings.WarningMessage` objects,
and remove those matching category and/or text.
Parameters
----------
category: WarningMessage type | None
class of the message to filter out
match : str | None
text or regex that matches the error message to filter out
Examples
--------
This can be used as::
>>> import pytest
>>> import warnings
>>> from mne.utils import filter_out_warnings
>>> with pytest.warns(None) as recwarn:
... warnings.warn("value must be 0 or None", UserWarning)
>>> filter_out_warnings(recwarn, match=".* 0 or None")
>>> assert len(recwarn.list) == 0
>>> with pytest.warns(None) as recwarn:
... warnings.warn("value must be 42", UserWarning)
>>> filter_out_warnings(recwarn, match=r'.* must be \d+$')
>>> assert len(recwarn.list) == 0
>>> with pytest.warns(None) as recwarn:
... warnings.warn("this is not here", UserWarning)
>>> filter_out_warnings(recwarn, match=r'.* must be \d+$')
>>> assert len(recwarn.list) == 1
"""
regexp = re.compile('.*' if match is None else match)
is_category = [w.category == category if category is not None else True
for w in warn_record._list]
is_match = [regexp.match(w.message.args[0]) is not None
for w in warn_record._list]
ind = [ind for ind, (c, m) in enumerate(zip(is_category, is_match))
if c and m]
for i in reversed(ind):
warn_record._list.pop(i)
class ETSContext(object):
"""Add more meaningful message to errors generated by ETS Toolkit."""
def __enter__(self): # noqa: D105
pass
def __exit__(self, type, value, traceback): # noqa: D105
if isinstance(value, SystemExit) and value.code.\
startswith("This program needs access to the screen"):
value.code += ("\nThis can probably be solved by setting "
"ETS_TOOLKIT=qt4. On bash, type\n\n $ export "
"ETS_TOOLKIT=qt4\n\nand run the command again.")
@contextlib.contextmanager
def wrapped_stdout(indent='', cull_newlines=False):
"""Wrap stdout writes to logger.info, with an optional indent prefix.
Parameters
----------
indent : str
The indentation to add.
cull_newlines : bool
If True, cull any new/blank lines at the end.
"""
orig_stdout = sys.stdout
my_out = ClosingStringIO()
sys.stdout = my_out
try:
yield
finally:
sys.stdout = orig_stdout
pending_newlines = 0
for line in my_out.getvalue().split('\n'):
if not line.strip() and cull_newlines:
pending_newlines += 1
continue
for _ in range(pending_newlines):
logger.info('\n')
logger.info(indent + line)
def _frame_info(n):
frame = inspect.currentframe()
try:
frame = frame.f_back
infos = list()
for _ in range(n):
try:
name = frame.f_globals['__name__']
except KeyError: # in our verbose dec
pass
else:
infos.append(f'{name.lstrip("mne.")}:{frame.f_lineno}')
frame = frame.f_back
if frame is None:
break
return infos
except Exception:
return ['unknown']
finally:
del frame
|
import os
import sys
# -- Path setup --------------------------------------------------------------
# sys.path.insert(0, os.path.abspath('.'))
# Our extension
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "_ext")))
# Weblate code
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
def setup(app):
app.add_css_file("https://s.weblate.org/cdn/font-source/source-sans-pro.css")
app.add_css_file("https://s.weblate.org/cdn/font-source/source-code-pro.css")
app.add_css_file("docs.css")
# Used in Sphinx docs, needed for intersphinx links to it
app.add_object_type(
"confval",
"confval",
objname="configuration value",
indextemplate="pair: %s; configuration value",
)
# -- Project information -----------------------------------------------------
project = "Weblate"
copyright = "2012 - 2020 Michal Čihař"
author = "Michal Čihař"
# The full version, including alpha/beta/rc tags
release = "4.4.1"
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"djangodocs",
"sphinxcontrib.httpdomain",
"sphinx.ext.autodoc",
"sphinx.ext.graphviz",
"sphinx.ext.intersphinx",
"sphinx-jsonschema",
"sphinx_rtd_theme",
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", "admin/install/steps/*.rst"]
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "sphinx_rtd_theme"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["../weblate/static/"]
html_logo = "../weblate/static/logo-128.png"
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = "Weblatedoc"
# -- Options for LaTeX output ------------------------------------------------
PREAMBLE = r"""
\pagestyle{fancy}
\setcounter{tocdepth}{1}
\usepackage{hyperref}
"""
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
"papersize": "a4paper",
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
"preamble": PREAMBLE,
# Avoid opening chapter only on even pages
"extraclassoptions": "openany",
# Latex figure (float) alignment
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
("latexindex", "Weblate.tex", "The Weblate Manual", author, "manual")
]
# Include logo on title page
latex_logo = "../weblate/static/logo-1024.png"
# Use xelatex engine for better unicode support
latex_engine = "xelatex"
# Disable using xindy as it does not work on readthedocs.org
latex_use_xindy = False
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [("wlc", "wlc", "Weblate Client Documentation", [author], 1)]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
"index",
"Weblate",
project,
author,
"Weblate",
"One line description of project.",
"Miscellaneous",
)
]
# -- Options for Epub output -------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ["search.html"]
graphviz_output_format = "svg"
# Configuration for intersphinx
intersphinx_mapping = {
"python": ("https://docs.python.org/3.7", None),
"django": (
"https://docs.djangoproject.com/en/stable/",
"https://docs.djangoproject.com/en/stable/_objects/",
),
"psa": ("https://python-social-auth.readthedocs.io/en/latest/", None),
"tt": (
"http://docs.translatehouse.org/projects/translate-toolkit/en/latest/",
None,
),
"amagama": ("https://docs.translatehouse.org/projects/amagama/en/latest/", None),
"virtaal": ("http://docs.translatehouse.org/projects/virtaal/en/latest/", None),
"ldap": ("https://django-auth-ldap.readthedocs.io/en/latest/", None),
"celery": ("https://docs.celeryproject.org/en/latest/", None),
"sphinx": ("https://www.sphinx-doc.org/en/stable/", None),
"rtd": ("https://docs.readthedocs.io/en/latest/", None),
"venv": ("https://virtualenv.pypa.io/en/stable/", None),
"borg": ("https://borgbackup.readthedocs.io/en/stable/", None),
"pip": ("https://pip.pypa.io/en/stable/", None),
"compressor": ("https://django-compressor.readthedocs.io/en/stable/", None),
}
# Ignore missing targets for the http:obj <type>, it's how we declare the types
# for input/output fields in the API docs.
nitpick_ignore = [
("http:obj", "array"),
("http:obj", "boolean"),
("http:obj", "int"),
("http:obj", "float"),
("http:obj", "object"),
("http:obj", "string"),
("http:obj", "timestamp"),
("http:obj", "file"),
]
# Number of retries and timeout for linkcheck
linkcheck_retries = 10
linkcheck_timeout = 10
linkcheck_ignore = ["http://127.0.0.1:8080/"]
# HTTP docs
http_index_ignore_prefixes = ["/api/"]
http_strict_mode = True
# Autodocs
autodoc_mock_imports = [
"django",
"celery",
"sentry_sdk",
"crispy_forms",
"weblate.trans.discovery",
"weblate.checks.models",
"weblate.trans.forms",
"weblate.addons.forms",
"weblate.trans.tasks",
"dateutil",
"filelock",
"lxml",
"translate",
"siphashc",
"git",
"PIL",
"weblate.addons.models",
"weblate.trans.models",
"weblate.lang.models",
"weblate.vcs.git",
"weblate.utils.files",
]
# Gettext / i18n
gettext_compact = "docs"
|
from __future__ import absolute_import
from __future__ import unicode_literals
import datetime
import typing
import six
from .pages import PageIterator
if typing.TYPE_CHECKING:
from typing import Any, Dict, List, Optional, Iterable, Set, Text
_I = typing.TypeVar('_I', bound='MediasIterator')
__all__ = [
"MediasIterator",
"TimedMediasIterator",
]
class MediasIterator(typing.Iterator[typing.Dict[typing.Text, typing.Any]]):
"""An iterator over the medias obtained from a page iterator.
"""
def __init__(self, page_iterator):
# type: (Iterable[Dict[Text, Any]]) -> None
self._it = iter(page_iterator)
self._seen = set() # type: Set[Text]
self._edges = [] # type: List[Dict[Text, Dict[Text, Any]]]
self._finished = False
self._total = None # type: Optional[int]
self._done = 0
def __iter__(self):
# type: (_I) -> _I
return self
def _next_page(self):
# type: () -> Dict[Text, Any]
data = next(self._it)
section = next(s for s in six.iterkeys(data) if s.endswith('_media'))
return data[section]
def __next__(self):
# type: () -> Dict[Text, Any]
if self._finished:
raise StopIteration
if not self._edges:
page = self._next_page()
self._total = page['count']
self._edges.extend(page['edges'])
if not page['edges']:
raise StopIteration
media = self._edges.pop(0)
self._done += 1
if media['node']['id'] in self._seen:
self._finished = True
self._seen.add(media['node']['id'])
return media['node']
def __length_hint__(self):
if self._total is None:
try:
page = self._next_page()
self._total = page['count']
self._edges.extend(page['edges'])
except StopIteration:
self._total = 0
return self._total - self._done
if six.PY2:
next = __next__
class TimedMediasIterator(MediasIterator):
"""An iterator over the medias within a specific timeframe.
"""
@staticmethod
def get_times(timeframe):
if timeframe is None:
timeframe = (None, None)
try:
start_time = timeframe[0] or datetime.date.today()
end_time = timeframe[1] or datetime.date.fromtimestamp(0)
except (IndexError, AttributeError):
raise TypeError("'timeframe' must be a couple of dates!")
return start_time, end_time
def __init__(self, page_iterator, timeframe=None):
super(TimedMediasIterator, self).__init__(page_iterator)
self.start_time, self.end_time = self.get_times(timeframe)
def __next__(self):
number_old = 0
while True:
media = super(TimedMediasIterator, self).__next__()
timestamp = media.get('taken_at_timestamp') or media['date']
media_date = type(self.start_time).fromtimestamp(timestamp)
if self.start_time >= media_date >= self.end_time:
return media
elif media_date < self.end_time:
number_old += 1
if number_old >= PageIterator.PAGE_SIZE:
self._finished = True
raise StopIteration
if six.PY2:
next = __next__
|
from cerberus import errors
from cerberus.tests import assert_fail, assert_success
def test_check_with_rule(validator):
def check_with_name(field, value, error):
if not value.islower():
error(field, 'must be lowercase')
validator.schema = {
'name': {'check_with': check_with_name},
'age': {'type': 'integer'},
}
assert_fail(
{'name': 'ItsMe', 'age': 2},
validator=validator,
error=('name', (), errors.CUSTOM, None, ('must be lowercase',)),
)
assert validator.errors == {'name': ['must be lowercase']}
assert_success({'name': 'itsme', 'age': 2}, validator=validator)
|
from homeassistant import data_entry_flow
from homeassistant.components.totalconnect.const import DOMAIN
from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from tests.async_mock import patch
from tests.common import MockConfigEntry
USERNAME = "[email protected]"
PASSWORD = "password"
async def test_user(hass):
"""Test user config."""
# no data provided so show the form
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
# now data is provided, so check if login is correct and create the entry
with patch(
"homeassistant.components.totalconnect.config_flow.TotalConnectClient.TotalConnectClient"
) as client_mock:
client_mock.return_value.is_valid_credentials.return_value = True
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
async def test_import(hass):
"""Test import step with good username and password."""
with patch(
"homeassistant.components.totalconnect.config_flow.TotalConnectClient.TotalConnectClient"
) as client_mock:
client_mock.return_value.is_valid_credentials.return_value = True
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
async def test_abort_if_already_setup(hass):
"""Test abort if the account is already setup."""
MockConfigEntry(
domain=DOMAIN,
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
unique_id=USERNAME,
).add_to_hass(hass)
# Should fail, same USERNAME (import)
with patch(
"homeassistant.components.totalconnect.config_flow.TotalConnectClient.TotalConnectClient"
) as client_mock:
client_mock.return_value.is_valid_credentials.return_value = True
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
# Should fail, same USERNAME (flow)
with patch(
"homeassistant.components.totalconnect.config_flow.TotalConnectClient.TotalConnectClient"
) as client_mock:
client_mock.return_value.is_valid_credentials.return_value = True
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_login_failed(hass):
"""Test when we have errors during login."""
with patch(
"homeassistant.components.totalconnect.config_flow.TotalConnectClient.TotalConnectClient"
) as client_mock:
client_mock.return_value.is_valid_credentials.return_value = False
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "invalid_auth"}
|
from homeassistant import config_entries
from . import home_assistant_cast
from .const import DOMAIN
async def async_setup(hass, config):
"""Set up the Cast component."""
conf = config.get(DOMAIN)
hass.data[DOMAIN] = conf or {}
if conf is not None:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}
)
)
return True
async def async_setup_entry(hass, entry: config_entries.ConfigEntry):
"""Set up Cast from a config entry."""
await home_assistant_cast.async_setup_ha_cast(hass, entry)
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, "media_player")
)
return True
|
from __future__ import print_function
import os
import select
import socket
import struct
import sys
import time
import argparse
from six.moves import xrange
# On Windows, the best timer is time.clock()
# On most other platforms the best timer is time.time()
default_timer = time.clock if sys.platform == "win32" else time.time
# From /usr/include/linux/icmp.h; your milage may vary.
ICMP_ECHO_REQUEST = 8 # Seems to be the same on Solaris.
def checksum(source_string):
"""
I'm not too confident that this is right but testing seems
to suggest that it gives the same answers as in_cksum in ping.c
"""
sum = 0
countTo = (len(source_string) / 2) * 2
count = 0
while count < countTo:
v1 = source_string[count + 1]
if not isinstance(v1, int):
v1 = ord(v1)
v2 = source_string[count]
if not isinstance(v2, int):
v2 = ord(v2)
thisVal = v1 * 256 + v2
sum = sum + thisVal
sum = sum & 0xffffffff # Necessary?
count = count + 2
if countTo < len(source_string):
sum = sum + ord(source_string[len(source_string) - 1])
sum = sum & 0xffffffff # Necessary?
sum = (sum >> 16) + (sum & 0xffff)
sum = sum + (sum >> 16)
answer = ~sum
answer = answer & 0xffff
# Swap bytes. Bugger me if I know why.
answer = answer >> 8 | (answer << 8 & 0xff00)
return answer
def receive_one_ping(my_socket, ID, timeout):
"""
receive the ping from the socket.
"""
timeLeft = timeout
while True:
startedSelect = default_timer()
whatReady = select.select([my_socket], [], [], timeLeft)
howLongInSelect = (default_timer() - startedSelect)
if whatReady[0] == []: # Timeout
return
timeReceived = default_timer()
recPacket, addr = my_socket.recvfrom(1024)
icmpHeader = recPacket[20:28]
type, code, checksum, packetID, sequence = struct.unpack(b"bbHHh", icmpHeader)
# Filters out the echo request itself.
# This can be tested by pinging 127.0.0.1
# You'll see your own request
if type != 8 and packetID == ID:
bytesInDouble = struct.calcsize(b"d")
timeSent = struct.unpack(b"d", recPacket[28:28 + bytesInDouble])[0]
return timeReceived - timeSent
timeLeft = timeLeft - howLongInSelect
if timeLeft <= 0:
return
def send_one_ping(my_socket, dest_addr, ID):
"""
Send one ping to the given >dest_addr<.
"""
dest_addr = socket.gethostbyname(dest_addr)
# Header is type (8), code (8), checksum (16), id (16), sequence (16)
my_checksum = 0
# Make a dummy heder with a 0 checksum.
header = struct.pack(b"bbHHh", ICMP_ECHO_REQUEST, 0, my_checksum, ID, 1)
bytesInDouble = struct.calcsize("d")
data = (192 - bytesInDouble) * b"Q"
data = struct.pack("d", default_timer()) + data
# Calculate the checksum on the data and the dummy header.
my_checksum = checksum(header + data)
# Now that we have the right checksum, we put that in. It's just easier
# to make up a new header than to stuff it into the dummy.
header = struct.pack(b"bbHHh", ICMP_ECHO_REQUEST, 0, socket.htons(my_checksum), ID, 1)
packet = header + data
my_socket.sendto(packet, (dest_addr, 1)) # Don't know about the 1
def do_one(dest_addr, timeout):
"""
Returns either the delay (in seconds) or none on timeout.
"""
icmp = socket.getprotobyname("icmp")
my_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, icmp)
my_ID = os.getpid() & 0xFFFF
send_one_ping(my_socket, dest_addr, my_ID)
delay = receive_one_ping(my_socket, my_ID, timeout)
my_socket.close()
return delay
def verbose_ping(dest_addr, timeout=2, count=4, interval=1.0):
"""
Send >count< ping to >dest_addr< with the given >timeout< and display
the result.
"""
ping_succeeded = False
for i in xrange(count):
print("ping %s..." % dest_addr, end=' ')
try:
delay = do_one(dest_addr, timeout)
except socket.gaierror as e:
print("failed. (socket error: '%s')" % e[1])
break
if delay == None:
print("failed. (timeout within %ssec.)" % timeout)
else:
time.sleep(min(0, interval - delay))
print("got ping in %0.4fms\n" % (delay * 1000))
ping_succeeded = True
return ping_succeeded
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="send ICMP ECHO_REQUEST to network hosts")
parser.add_argument("destination", help="host to ping")
parser.add_argument("-W", "--timeout", help="specify a timeout", type=float, default=2)
parser.add_argument("-c", "--count", help="stop after sending this much ECHO_REQUEST packkets", type=int, default=5)
parser.add_argument("-i", "--interval", help="Wait the specified time between each ping", type=float, default=1.0)
ns = parser.parse_args()
s = verbose_ping(ns.destination, ns.timeout, ns.count, ns.interval)
if s:
sys.exit(0)
else:
sys.exit(1)
|
from django.core.exceptions import ObjectDoesNotExist
from django.http import HttpResponsePermanentRedirect
from django.template.defaultfilters import slugify
from django.views.generic.base import TemplateResponseMixin
from django.views.generic.base import View
import django_comments as comments
class CommentSuccess(TemplateResponseMixin, View):
"""
View for handing the publication of a Comment on an Entry.
Do a redirection if the comment is visible,
else render a confirmation template.
"""
template_name = 'comments/zinnia/entry/posted.html'
def get_context_data(self, **kwargs):
return {'comment': self.comment}
def get(self, request, *args, **kwargs):
self.comment = None
if 'c' in request.GET:
try:
self.comment = comments.get_model().objects.get(
pk=request.GET['c'])
except (ObjectDoesNotExist, ValueError):
pass
if self.comment and self.comment.is_public:
return HttpResponsePermanentRedirect(
self.comment.get_absolute_url(
'#comment-%(id)s-by-') + slugify(self.comment.user_name))
context = self.get_context_data(**kwargs)
return self.render_to_response(context)
|
import sys
import time
from uuid import UUID
import pytest
from cherrypy._cpcompat import text_or_bytes
try:
# On Windows, msvcrt.getch reads a single char without output.
import msvcrt
def getchar():
return msvcrt.getch()
except ImportError:
# Unix getchr
import tty
import termios
def getchar():
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(sys.stdin.fileno())
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
class LogCase(object):
"""unittest.TestCase mixin for testing log messages.
logfile: a filename for the desired log. Yes, I know modes are evil,
but it makes the test functions so much cleaner to set this once.
lastmarker: the last marker in the log. This can be used to search for
messages since the last marker.
markerPrefix: a string with which to prefix log markers. This should be
unique enough from normal log output to use for marker identification.
"""
interactive = False
logfile = None
lastmarker = None
markerPrefix = b'test suite marker: '
def _handleLogError(self, msg, data, marker, pattern):
print('')
print(' ERROR: %s' % msg)
if not self.interactive:
raise pytest.fail(msg)
p = (' Show: '
'[L]og [M]arker [P]attern; '
'[I]gnore, [R]aise, or sys.e[X]it >> ')
sys.stdout.write(p + ' ')
# ARGH
sys.stdout.flush()
while True:
i = getchar().upper()
if i not in 'MPLIRX':
continue
print(i.upper()) # Also prints new line
if i == 'L':
for x, line in enumerate(data):
if (x + 1) % self.console_height == 0:
# The \r and comma should make the next line overwrite
sys.stdout.write('<-- More -->\r ')
m = getchar().lower()
# Erase our "More" prompt
sys.stdout.write(' \r ')
if m == 'q':
break
print(line.rstrip())
elif i == 'M':
print(repr(marker or self.lastmarker))
elif i == 'P':
print(repr(pattern))
elif i == 'I':
# return without raising the normal exception
return
elif i == 'R':
raise pytest.fail(msg)
elif i == 'X':
self.exit()
sys.stdout.write(p + ' ')
def exit(self):
sys.exit()
def emptyLog(self):
"""Overwrite self.logfile with 0 bytes."""
open(self.logfile, 'wb').write('')
def markLog(self, key=None):
"""Insert a marker line into the log and set self.lastmarker."""
if key is None:
key = str(time.time())
self.lastmarker = key
open(self.logfile, 'ab+').write(
b'%s%s\n'
% (self.markerPrefix, key.encode('utf-8'))
)
def _read_marked_region(self, marker=None):
"""Return lines from self.logfile in the marked region.
If marker is None, self.lastmarker is used. If the log hasn't
been marked (using self.markLog), the entire log will be returned.
"""
# Give the logger time to finish writing?
# time.sleep(0.5)
logfile = self.logfile
marker = marker or self.lastmarker
if marker is None:
return open(logfile, 'rb').readlines()
if isinstance(marker, str):
marker = marker.encode('utf-8')
data = []
in_region = False
for line in open(logfile, 'rb'):
if in_region:
if line.startswith(self.markerPrefix) and marker not in line:
break
else:
data.append(line)
elif marker in line:
in_region = True
return data
def assertInLog(self, line, marker=None):
"""Fail if the given (partial) line is not in the log.
The log will be searched from the given marker to the next marker.
If marker is None, self.lastmarker is used. If the log hasn't
been marked (using self.markLog), the entire log will be searched.
"""
data = self._read_marked_region(marker)
for logline in data:
if line in logline:
return
msg = '%r not found in log' % line
self._handleLogError(msg, data, marker, line)
def assertNotInLog(self, line, marker=None):
"""Fail if the given (partial) line is in the log.
The log will be searched from the given marker to the next marker.
If marker is None, self.lastmarker is used. If the log hasn't
been marked (using self.markLog), the entire log will be searched.
"""
data = self._read_marked_region(marker)
for logline in data:
if line in logline:
msg = '%r found in log' % line
self._handleLogError(msg, data, marker, line)
def assertValidUUIDv4(self, marker=None):
"""Fail if the given UUIDv4 is not valid.
The log will be searched from the given marker to the next marker.
If marker is None, self.lastmarker is used. If the log hasn't
been marked (using self.markLog), the entire log will be searched.
"""
data = self._read_marked_region(marker)
data = [
chunk.decode('utf-8').rstrip('\n').rstrip('\r')
for chunk in data
]
for log_chunk in data:
try:
uuid_log = data[-1]
uuid_obj = UUID(uuid_log, version=4)
except (TypeError, ValueError):
pass # it might be in other chunk
else:
if str(uuid_obj) == uuid_log:
return
msg = '%r is not a valid UUIDv4' % uuid_log
self._handleLogError(msg, data, marker, log_chunk)
msg = 'UUIDv4 not found in log'
self._handleLogError(msg, data, marker, log_chunk)
def assertLog(self, sliceargs, lines, marker=None):
"""Fail if log.readlines()[sliceargs] is not contained in 'lines'.
The log will be searched from the given marker to the next marker.
If marker is None, self.lastmarker is used. If the log hasn't
been marked (using self.markLog), the entire log will be searched.
"""
data = self._read_marked_region(marker)
if isinstance(sliceargs, int):
# Single arg. Use __getitem__ and allow lines to be str or list.
if isinstance(lines, (tuple, list)):
lines = lines[0]
if isinstance(lines, str):
lines = lines.encode('utf-8')
if lines not in data[sliceargs]:
msg = '%r not found on log line %r' % (lines, sliceargs)
self._handleLogError(
msg,
[data[sliceargs], '--EXTRA CONTEXT--'] + data[
sliceargs + 1:sliceargs + 6],
marker,
lines)
else:
# Multiple args. Use __getslice__ and require lines to be list.
if isinstance(lines, tuple):
lines = list(lines)
elif isinstance(lines, text_or_bytes):
raise TypeError("The 'lines' arg must be a list when "
"'sliceargs' is a tuple.")
start, stop = sliceargs
for line, logline in zip(lines, data[start:stop]):
if isinstance(line, str):
line = line.encode('utf-8')
if line not in logline:
msg = '%r not found in log' % line
self._handleLogError(msg, data[start:stop], marker, line)
|
import asyncio
from datetime import timedelta
from avri.api import Avri
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from .const import (
CONF_COUNTRY_CODE,
CONF_HOUSE_NUMBER,
CONF_HOUSE_NUMBER_EXTENSION,
CONF_ZIP_CODE,
DOMAIN,
)
PLATFORMS = ["sensor"]
SCAN_INTERVAL = timedelta(hours=4)
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the Avri component."""
hass.data[DOMAIN] = {}
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up Avri from a config entry."""
client = Avri(
postal_code=entry.data[CONF_ZIP_CODE],
house_nr=entry.data[CONF_HOUSE_NUMBER],
house_nr_extension=entry.data.get(CONF_HOUSE_NUMBER_EXTENSION),
country_code=entry.data[CONF_COUNTRY_CODE],
)
hass.data[DOMAIN][entry.entry_id] = client
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
|
import mock
from pyramid import testing
from paasta_tools.api.views import pause_autoscaler
def test_get_service_autoscaler_pause():
with mock.patch(
"paasta_tools.utils.KazooClient", autospec=True
) as mock_zk, mock.patch(
"paasta_tools.utils.load_system_paasta_config", autospec=True
):
request = testing.DummyRequest()
mock_zk_get = mock.Mock(return_value=(b"100", None))
mock_zk.return_value = mock.Mock(get=mock_zk_get)
response = pause_autoscaler.get_service_autoscaler_pause(request)
mock_zk_get.assert_called_once_with("/autoscaling/paused")
assert response == "100"
def test_update_autoscaler_pause():
with mock.patch(
"paasta_tools.utils.KazooClient", autospec=True
) as mock_zk, mock.patch(
"paasta_tools.api.views.pause_autoscaler.time", autospec=True
) as mock_time, mock.patch(
"paasta_tools.utils.load_system_paasta_config", autospec=True
):
request = testing.DummyRequest()
request.swagger_data = {"json_body": {"minutes": 100}}
mock_zk_set = mock.Mock()
mock_zk_ensure = mock.Mock()
mock_zk.return_value = mock.Mock(set=mock_zk_set, ensure_path=mock_zk_ensure)
mock_time.time = mock.Mock(return_value=0)
response = pause_autoscaler.update_service_autoscaler_pause(request)
assert mock_zk_ensure.call_count == 1
mock_zk_set.assert_called_once_with("/autoscaling/paused", b"6000")
assert response is None
def test_delete_autoscaler_pause():
with mock.patch(
"paasta_tools.utils.KazooClient", autospec=True
) as mock_zk, mock.patch(
"paasta_tools.api.views.pause_autoscaler.time", autospec=True
) as mock_time, mock.patch(
"paasta_tools.utils.load_system_paasta_config", autospec=True
):
request = testing.DummyRequest()
mock_zk_del = mock.Mock()
mock_zk_ensure = mock.Mock()
mock_zk.return_value = mock.Mock(delete=mock_zk_del, ensure_path=mock_zk_ensure)
mock_time.time = mock.Mock(return_value=0)
response = pause_autoscaler.delete_service_autoscaler_pause(request)
assert mock_zk_ensure.call_count == 1
mock_zk_del.assert_called_once_with("/autoscaling/paused")
assert response is None
|
import concurrent.futures
import logging
from multiprocessing import pool as pool_lib
import time
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker import errors
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.linux_packages import ycsb
from google.cloud import datastore
from google.oauth2 import service_account
BENCHMARK_NAME = 'cloud_datastore_ycsb'
BENCHMARK_CONFIG = """
cloud_datastore_ycsb:
description: >
Run YCSB agains Google Cloud Datastore.
Configure the number of VMs via --num-vms.
vm_groups:
default:
vm_spec: *default_single_core
vm_count: 1"""
_CLEANUP_THREAD_POOL_WORKERS = 30
_CLEANUP_KIND_READ_BATCH_SIZE = 12000
_CLEANUP_KIND_DELETE_BATCH_SIZE = 6000
_CLEANUP_KIND_DELETE_PER_THREAD_BATCH_SIZE = 3000
_CLEANUP_KIND_DELETE_OP_BATCH_SIZE = 500
# the name of the database entity created when running datastore YCSB
# https://github.com/brianfrankcooper/YCSB/tree/master/googledatastore
_YCSB_COLLECTIONS = ['usertable']
FLAGS = flags.FLAGS
flags.DEFINE_string('google_datastore_keyfile', None,
'The path to Google API P12 private key file')
flags.DEFINE_string(
'private_keyfile', '/tmp/key.p12',
'The path where the private key file is copied to on a VM.')
flags.DEFINE_string(
'google_datastore_serviceAccount', None,
'The service account email associated with'
'datastore private key file')
flags.DEFINE_string('google_datastore_datasetId', None,
'The project ID that has Cloud Datastore service')
flags.DEFINE_string('google_datastore_debug', 'false',
'The logging level when running YCSB')
# the JSON keyfile is needed to validate credentials in the Cleanup phase
flags.DEFINE_string('google_datastore_deletion_keyfile', None,
'The path to Google API JSON private key file')
class _DeletionTask(object):
"""Represents a cleanup deletion task.
Attributes:
kind: Datastore kind to be deleted.
task_id: Task id
entity_deletion_count: No of entities deleted.
deletion_error: Set to true if deletion fails with an error.
"""
def __init__(self, kind, task_id):
self.kind = kind
self.task_id = task_id
self.entity_deletion_count = 0
self.deletion_error = False
def DeleteEntities(self, dataset_id, credentials, delete_entities):
"""Deletes entities in a datastore database in batches.
Args:
dataset_id: Cloud Datastore client dataset id.
credentials: Cloud Datastore client credentials.
delete_entities: Entities to delete.
Returns:
number of records deleted.
Raises:
ValueError: In case of delete failures.
"""
try:
client = datastore.Client(project=dataset_id, credentials=credentials)
logging.info('Task %d - Started deletion for %s', self.task_id, self.kind)
while delete_entities:
chunk = delete_entities[:_CLEANUP_KIND_DELETE_OP_BATCH_SIZE]
delete_entities = delete_entities[_CLEANUP_KIND_DELETE_OP_BATCH_SIZE:]
client.delete_multi(chunk)
self.entity_deletion_count += len(chunk)
logging.info('Task %d - Completed deletion for %s - %d', self.task_id,
self.kind, self.entity_deletion_count)
return self.entity_deletion_count
except ValueError as error:
logging.exception('Task %d - Delete entities for %s failed due to %s',
self.task_id, self.kind, error)
self.deletion_error = True
raise error
def GetConfig(user_config):
config = configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
if FLAGS['ycsb_client_vms'].present:
config['vm_groups']['default']['vm_count'] = FLAGS.ycsb_client_vms
return config
def CheckPrerequisites(_):
"""Verifies that the required resources are present.
Raises:
perfkitbenchmarker.data.ResourceNotFound: On missing resource.
"""
# Before YCSB Cloud Datastore supports Application Default Credential,
# we should always make sure valid credential flags are set.
if not FLAGS.google_datastore_keyfile:
raise ValueError('"google_datastore_keyfile" must be set')
if not FLAGS.google_datastore_serviceAccount:
raise ValueError('"google_datastore_serviceAccount" must be set')
if not FLAGS.google_datastore_datasetId:
raise ValueError('"google_datastore_datasetId" must be set ')
def GetDatastoreDeleteCredentials():
"""Returns credentials to datastore db."""
if FLAGS.google_datastore_deletion_keyfile.startswith('gs://'):
# Copy private keyfile to local disk
cp_cmd = [
'gsutil', 'cp', FLAGS.google_datastore_deletion_keyfile,
FLAGS.private_keyfile
]
vm_util.IssueCommand(cp_cmd)
credentials_path = FLAGS.private_keyfile
else:
credentials_path = FLAGS.google_datastore_deletion_keyfile
credentials = service_account.Credentials.from_service_account_file(
credentials_path,
scopes=datastore.client.Client.SCOPE,
)
return credentials
def Prepare(benchmark_spec):
"""Prepare the virtual machines to run cloud datastore.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
benchmark_spec.always_call_cleanup = True
# Check that the database is empty before running
if FLAGS.google_datastore_deletion_keyfile:
dataset_id = FLAGS.google_datastore_datasetId
credentials = GetDatastoreDeleteCredentials()
client = datastore.Client(project=dataset_id, credentials=credentials)
for kind in _YCSB_COLLECTIONS:
# TODO(user): Allow a small number of leftover entities until we
# figure out why these are not getting deleted.
if len(list(client.query(kind=kind).fetch(limit=200))) > 100:
raise errors.Benchmarks.PrepareException(
'Database is non-empty. Stopping test.')
else:
logging.warning('Test could be executed on a non-empty database.')
vms = benchmark_spec.vms
# Install required packages and copy credential files
vm_util.RunThreaded(_Install, vms)
# Restore YCSB_TAR_URL
benchmark_spec.executor = ycsb.YCSBExecutor('googledatastore')
def Run(benchmark_spec):
"""Spawn YCSB and gather the results.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample instances.
"""
vms = benchmark_spec.vms
run_kwargs = {
'googledatastore.datasetId': FLAGS.google_datastore_datasetId,
'googledatastore.privateKeyFile': FLAGS.private_keyfile,
'googledatastore.serviceAccountEmail':
FLAGS.google_datastore_serviceAccount,
'googledatastore.debug': FLAGS.google_datastore_debug,
}
load_kwargs = run_kwargs.copy()
if FLAGS['ycsb_preload_threads'].present:
load_kwargs['threads'] = FLAGS['ycsb_preload_threads']
samples = list(benchmark_spec.executor.LoadAndRun(
vms, load_kwargs=load_kwargs, run_kwargs=run_kwargs))
return samples
def Cleanup(_):
"""Deletes all entries in a datastore database."""
if FLAGS.google_datastore_deletion_keyfile:
dataset_id = FLAGS.google_datastore_datasetId
executor = concurrent.futures.ThreadPoolExecutor(
max_workers=_CLEANUP_THREAD_POOL_WORKERS)
logging.info('Attempting to delete all data in %s', dataset_id)
credentials = GetDatastoreDeleteCredentials()
futures = []
for kind in _YCSB_COLLECTIONS:
futures.append(
executor.submit(
_ReadAndDeleteAllEntities(dataset_id, credentials, kind)))
concurrent.futures.wait(
futures, timeout=None, return_when=concurrent.futures.ALL_COMPLETED)
logging.info('Deleted all data for %s', dataset_id)
else:
logging.warning('Manually delete all the entries via GCP portal.')
def _ReadAndDeleteAllEntities(dataset_id, credentials, kind):
"""Reads and deletes all kind entries in a datastore database.
Args:
dataset_id: Cloud Datastore client dataset id.
credentials: Cloud Datastore client credentials.
kind: Kind for which entities will be deleted.
Raises:
ValueError: In case of delete failures.
"""
task_id = 1
start_cursor = None
pool = pool_lib.ThreadPool(processes=_CLEANUP_THREAD_POOL_WORKERS)
# We use a cursor to fetch entities in larger read batches and submit delete
# tasks to delete them in smaller delete batches (500 at a time) due to
# datastore single operation restriction.
entity_read_count = 0
total_entity_count = 0
delete_keys = []
while True:
query = _CreateClient(dataset_id, credentials).query(kind=kind)
query.keys_only()
query_iter = query.fetch(
start_cursor=start_cursor, limit=_CLEANUP_KIND_READ_BATCH_SIZE)
for current_entities in query_iter.pages:
delete_keys.extend([entity.key for entity in current_entities])
entity_read_count = len(delete_keys)
# logging.debug('next batch of entities for %s - total = %d', kind,
# entity_read_count)
if entity_read_count >= _CLEANUP_KIND_DELETE_BATCH_SIZE:
total_entity_count += entity_read_count
logging.info('Creating tasks...Read %d in total', total_entity_count)
while delete_keys:
delete_chunk = delete_keys[:
_CLEANUP_KIND_DELETE_PER_THREAD_BATCH_SIZE]
delete_keys = delete_keys[_CLEANUP_KIND_DELETE_PER_THREAD_BATCH_SIZE:]
# logging.debug(
# 'Creating new Task %d - Read %d entities for %s kind , Read %d '
# + 'in total.',
# task_id, entity_read_count, kind, total_entity_count)
deletion_task = _DeletionTask(kind, task_id)
pool.apply_async(deletion_task.DeleteEntities, (
dataset_id,
credentials,
delete_chunk,
))
task_id += 1
# Reset delete batch,
entity_read_count = 0
delete_keys = []
# Read this after the pages are retrieved otherwise it will be set to None.
start_cursor = query_iter.next_page_token
if start_cursor is None:
logging.info('Read all existing records for %s', kind)
if delete_keys:
logging.info('Entities batch is not empty %d, submitting new tasks',
len(delete_keys))
while delete_keys:
delete_chunk = delete_keys[:
_CLEANUP_KIND_DELETE_PER_THREAD_BATCH_SIZE]
delete_keys = delete_keys[_CLEANUP_KIND_DELETE_PER_THREAD_BATCH_SIZE:]
logging.debug(
'Creating new Task %d - Read %d entities for %s kind , Read %d in total.',
task_id, entity_read_count, kind, total_entity_count)
deletion_task = _DeletionTask(kind, task_id)
pool.apply_async(deletion_task.DeleteEntities, (
dataset_id,
credentials,
delete_chunk,
))
task_id += 1
break
logging.info('Waiting for all tasks - %d to complete...', task_id)
time.sleep(60)
pool.close()
pool.join()
# Rerun the query and delete any leftovers to make sure that all records
# are deleted as intended.
client = _CreateClient(dataset_id, credentials)
query = client.query(kind=kind)
query.keys_only()
entities = list(query.fetch(limit=20000))
if entities:
logging.info('Deleting leftover %d entities for %s', len(entities), kind)
total_entity_count += len(entities)
deletion_task = _DeletionTask(kind, task_id)
delete_keys = []
delete_keys.extend([entity.key for entity in entities])
deletion_task.DeleteEntities(dataset_id, credentials, delete_keys)
logging.info('Deleted all data for %s - %s - %d', dataset_id, kind,
total_entity_count)
def _CreateClient(dataset_id, credentials):
"""Creates a datastore client for the dataset using the credentials.
Args:
dataset_id: Cloud Datastore client dataset id.
credentials: Cloud Datastore client credentials.
Returns:
Datastore client.
"""
return datastore.Client(project=dataset_id, credentials=credentials)
def _Install(vm):
"""Installs YCSB benchmark & copies datastore keyfile to client vm."""
vm.Install('ycsb')
# Copy private key file to VM
if FLAGS.google_datastore_keyfile.startswith('gs://'):
vm.Install('google_cloud_sdk')
vm.RemoteCommand('{cmd} {datastore_keyfile} {private_keyfile}'.format(
cmd='gsutil cp',
datastore_keyfile=FLAGS.google_datastore_keyfile,
private_keyfile=FLAGS.private_keyfile))
else:
vm.RemoteCopy(FLAGS.google_datastore_keyfile, FLAGS.private_keyfile)
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from diamond.collector import Collector
from proc import ProcessStatCollector
##########################################################################
class TestProcessStatCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('ProcessStatCollector', {
'interval': 1
})
self.collector = ProcessStatCollector(config, None)
def test_import(self):
self.assertTrue(ProcessStatCollector)
@patch('__builtin__.open')
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_open_proc_stat(self, publish_mock, open_mock):
open_mock.return_value = StringIO('')
self.collector.collect()
open_mock.assert_called_once_with('/proc/stat', 'r')
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
ProcessStatCollector.PROC = self.getFixturePath('proc_stat_1')
self.collector.collect()
self.assertPublishedMany(publish_mock, {})
ProcessStatCollector.PROC = self.getFixturePath('proc_stat_2')
self.collector.collect()
metrics = {
'ctxt': 0,
'btime': 1319181102,
'processes': 0,
'procs_running': 1,
'procs_blocked': 0,
'ctxt': 1791,
'btime': 1319181102,
'processes': 2,
'procs_running': 1,
'procs_blocked': 0,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
from mlpatches import os_popen
from mlpatches.l2c import _get_str
def popen2(cmd, bufsize=0, mode="t"):
"""Executes cmd as a sub-process. Returns the file objects (child_stdout, child_stdin)."""
command = _get_str(cmd)
i, o = os_popen.popen2(None, command, mode, bufsize)
return o, i
def popen3(cmd, bufsize=0, mode="t"):
"""Executes cmd as a sub-process. Returns the file objects (child_stdout, child_stdin, child_stderr)."""
command = _get_str(cmd)
i, o, e = os_popen.popen3(None, command, mode, bufsize)
return o, i, e
def popen4(cmd, bufsize=0, mode="t"):
"""Executes cmd as a sub-process. Returns the file objects (child_stdout_and_stderr, child_stdin)."""
command = _get_str(cmd)
i, oe = os_popen.popen4(None, command, mode, bufsize)
return oe, i
class Popen3(object):
"""This class represents a child process. Normally, Popen3 instances are created using the popen2() and popen3() factory functions described above.
If not using one of the helper functions to create Popen3 objects, the parameter cmd is the shell command to execute in a sub-process. The capturestderr flag, if true, specifies that the object should capture standard error output of the child process. The default is false. If the bufsize parameter is specified, it specifies the size of the I/O buffers to/from the child process."""
def __init__(self, cmd, capture_stderr=False, bufsize=0):
strcmd = _get_str(cmd)
self._p = os_popen._PopenCmd(strcmd, "w", bufsize, shared_eo=False)
self._p.run()
self.tochild = self._p.chinw
self.fromchild = self._p.choutr
if capture_stderr:
self.childerr = self._p.cherrr
else:
self.childerr = None
self.pid = self._p.worker.job_id
def poll(self):
"""Returns -1 if child process has not completed yet, or its status code (see wait()) otherwise."""
if self._p.worker.is_alive():
return -1
else:
return self._p.worker.state.return_value
def wait(self):
"""Waits for and returns the status code of the child process. The status code encodes both the return code of the process and information about whether it exited using the exit() system call or died due to a signal. Functions to help interpret the status code are defined in the os module; see section Process Management for the W*() family of functions."""
self._p.worker.join()
return self._p.worker.state.return_value
class Popen4(Popen3):
"""Similar to Popen3, but always captures standard error into the same file object as standard output. These are typically created using popen4()."""
def __init__(self, cmd, bufsize=1):
strcmd = _get_str(cmd)
self._p = os_popen._PopenCmd(strcmd, "w", bufsize, shared_eo=True)
self._p.run()
self.tochild = self._p.chinw
self.fromchild = self._p.choutr
self.childerr = None
self.pid = self._p.worker.job_id
|
import inspect
import marshal
import os
import struct
import sys
import types
from coverage import env
from coverage.backward import BUILTINS
from coverage.backward import PYC_MAGIC_NUMBER, imp, importlib_util_find_spec
from coverage.files import canonical_filename, python_reported_file
from coverage.misc import CoverageException, ExceptionDuringRun, NoCode, NoSource, isolate_module
from coverage.phystokens import compile_unicode
from coverage.python import get_python_source
os = isolate_module(os)
class DummyLoader(object):
"""A shim for the pep302 __loader__, emulating pkgutil.ImpLoader.
Currently only implements the .fullname attribute
"""
def __init__(self, fullname, *_args):
self.fullname = fullname
if importlib_util_find_spec:
def find_module(modulename):
"""Find the module named `modulename`.
Returns the file path of the module, the name of the enclosing
package, and the spec.
"""
try:
spec = importlib_util_find_spec(modulename)
except ImportError as err:
raise NoSource(str(err))
if not spec:
raise NoSource("No module named %r" % (modulename,))
pathname = spec.origin
packagename = spec.name
if spec.submodule_search_locations:
mod_main = modulename + ".__main__"
spec = importlib_util_find_spec(mod_main)
if not spec:
raise NoSource(
"No module named %s; "
"%r is a package and cannot be directly executed"
% (mod_main, modulename)
)
pathname = spec.origin
packagename = spec.name
packagename = packagename.rpartition(".")[0]
return pathname, packagename, spec
else:
def find_module(modulename):
"""Find the module named `modulename`.
Returns the file path of the module, the name of the enclosing
package, and None (where a spec would have been).
"""
openfile = None
glo, loc = globals(), locals()
try:
# Search for the module - inside its parent package, if any - using
# standard import mechanics.
if '.' in modulename:
packagename, name = modulename.rsplit('.', 1)
package = __import__(packagename, glo, loc, ['__path__'])
searchpath = package.__path__
else:
packagename, name = None, modulename
searchpath = None # "top-level search" in imp.find_module()
openfile, pathname, _ = imp.find_module(name, searchpath)
# Complain if this is a magic non-file module.
if openfile is None and pathname is None:
raise NoSource(
"module does not live in a file: %r" % modulename
)
# If `modulename` is actually a package, not a mere module, then we
# pretend to be Python 2.7 and try running its __main__.py script.
if openfile is None:
packagename = modulename
name = '__main__'
package = __import__(packagename, glo, loc, ['__path__'])
searchpath = package.__path__
openfile, pathname, _ = imp.find_module(name, searchpath)
except ImportError as err:
raise NoSource(str(err))
finally:
if openfile:
openfile.close()
return pathname, packagename, None
class PyRunner(object):
"""Multi-stage execution of Python code.
This is meant to emulate real Python execution as closely as possible.
"""
def __init__(self, args, as_module=False):
self.args = args
self.as_module = as_module
self.arg0 = args[0]
self.package = self.modulename = self.pathname = self.loader = self.spec = None
def prepare(self):
"""Set sys.path properly.
This needs to happen before any importing, and without importing anything.
"""
if self.as_module:
if env.PYBEHAVIOR.actual_syspath0_dash_m:
path0 = os.getcwd()
else:
path0 = ""
elif os.path.isdir(self.arg0):
# Running a directory means running the __main__.py file in that
# directory.
path0 = self.arg0
else:
path0 = os.path.abspath(os.path.dirname(self.arg0))
if os.path.isdir(sys.path[0]):
# sys.path fakery. If we are being run as a command, then sys.path[0]
# is the directory of the "coverage" script. If this is so, replace
# sys.path[0] with the directory of the file we're running, or the
# current directory when running modules. If it isn't so, then we
# don't know what's going on, and just leave it alone.
top_file = inspect.stack()[-1][0].f_code.co_filename
sys_path_0_abs = os.path.abspath(sys.path[0])
top_file_dir_abs = os.path.abspath(os.path.dirname(top_file))
sys_path_0_abs = canonical_filename(sys_path_0_abs)
top_file_dir_abs = canonical_filename(top_file_dir_abs)
if sys_path_0_abs != top_file_dir_abs:
path0 = None
else:
# sys.path[0] is a file. Is the next entry the directory containing
# that file?
if sys.path[1] == os.path.dirname(sys.path[0]):
# Can it be right to always remove that?
del sys.path[1]
if path0 is not None:
sys.path[0] = python_reported_file(path0)
def _prepare2(self):
"""Do more preparation to run Python code.
Includes finding the module to run and adjusting sys.argv[0].
This method is allowed to import code.
"""
if self.as_module:
self.modulename = self.arg0
pathname, self.package, self.spec = find_module(self.modulename)
if self.spec is not None:
self.modulename = self.spec.name
self.loader = DummyLoader(self.modulename)
self.pathname = os.path.abspath(pathname)
self.args[0] = self.arg0 = self.pathname
elif os.path.isdir(self.arg0):
# Running a directory means running the __main__.py file in that
# directory.
for ext in [".py", ".pyc", ".pyo"]:
try_filename = os.path.join(self.arg0, "__main__" + ext)
if os.path.exists(try_filename):
self.arg0 = try_filename
break
else:
raise NoSource("Can't find '__main__' module in '%s'" % self.arg0)
if env.PY2:
self.arg0 = os.path.abspath(self.arg0)
# Make a spec. I don't know if this is the right way to do it.
try:
import importlib.machinery
except ImportError:
pass
else:
try_filename = python_reported_file(try_filename)
self.spec = importlib.machinery.ModuleSpec("__main__", None, origin=try_filename)
self.spec.has_location = True
self.package = ""
self.loader = DummyLoader("__main__")
else:
if env.PY3:
self.loader = DummyLoader("__main__")
self.arg0 = python_reported_file(self.arg0)
def run(self):
"""Run the Python code!"""
self._prepare2()
# Create a module to serve as __main__
main_mod = types.ModuleType('__main__')
from_pyc = self.arg0.endswith((".pyc", ".pyo"))
main_mod.__file__ = self.arg0
if from_pyc:
main_mod.__file__ = main_mod.__file__[:-1]
if self.package is not None:
main_mod.__package__ = self.package
main_mod.__loader__ = self.loader
if self.spec is not None:
main_mod.__spec__ = self.spec
main_mod.__builtins__ = BUILTINS
sys.modules['__main__'] = main_mod
# Set sys.argv properly.
sys.argv = self.args
try:
# Make a code object somehow.
if from_pyc:
code = make_code_from_pyc(self.arg0)
else:
code = make_code_from_py(self.arg0)
except CoverageException:
raise
except Exception as exc:
msg = "Couldn't run '{filename}' as Python code: {exc.__class__.__name__}: {exc}"
raise CoverageException(msg.format(filename=self.arg0, exc=exc))
# Execute the code object.
# Return to the original directory in case the test code exits in
# a non-existent directory.
cwd = os.getcwd()
try:
exec(code, main_mod.__dict__)
except SystemExit: # pylint: disable=try-except-raise
# The user called sys.exit(). Just pass it along to the upper
# layers, where it will be handled.
raise
except Exception:
# Something went wrong while executing the user code.
# Get the exc_info, and pack them into an exception that we can
# throw up to the outer loop. We peel one layer off the traceback
# so that the coverage.py code doesn't appear in the final printed
# traceback.
typ, err, tb = sys.exc_info()
# PyPy3 weirdness. If I don't access __context__, then somehow it
# is non-None when the exception is reported at the upper layer,
# and a nested exception is shown to the user. This getattr fixes
# it somehow? https://bitbucket.org/pypy/pypy/issue/1903
getattr(err, '__context__', None)
# Call the excepthook.
try:
if hasattr(err, "__traceback__"):
err.__traceback__ = err.__traceback__.tb_next
sys.excepthook(typ, err, tb.tb_next)
except SystemExit: # pylint: disable=try-except-raise
raise
except Exception:
# Getting the output right in the case of excepthook
# shenanigans is kind of involved.
sys.stderr.write("Error in sys.excepthook:\n")
typ2, err2, tb2 = sys.exc_info()
err2.__suppress_context__ = True
if hasattr(err2, "__traceback__"):
err2.__traceback__ = err2.__traceback__.tb_next
sys.__excepthook__(typ2, err2, tb2.tb_next)
sys.stderr.write("\nOriginal exception was:\n")
raise ExceptionDuringRun(typ, err, tb.tb_next)
else:
sys.exit(1)
finally:
os.chdir(cwd)
def run_python_module(args):
"""Run a Python module, as though with ``python -m name args...``.
`args` is the argument array to present as sys.argv, including the first
element naming the module being executed.
This is a helper for tests, to encapsulate how to use PyRunner.
"""
runner = PyRunner(args, as_module=True)
runner.prepare()
runner.run()
def run_python_file(args):
"""Run a Python file as if it were the main program on the command line.
`args` is the argument array to present as sys.argv, including the first
element naming the file being executed. `package` is the name of the
enclosing package, if any.
This is a helper for tests, to encapsulate how to use PyRunner.
"""
runner = PyRunner(args, as_module=False)
runner.prepare()
runner.run()
def make_code_from_py(filename):
"""Get source from `filename` and make a code object of it."""
# Open the source file.
try:
source = get_python_source(filename)
except (IOError, NoSource):
raise NoSource("No file to run: '%s'" % filename)
code = compile_unicode(source, filename, "exec")
return code
def make_code_from_pyc(filename):
"""Get a code object from a .pyc file."""
try:
fpyc = open(filename, "rb")
except IOError:
raise NoCode("No file to run: '%s'" % filename)
with fpyc:
# First four bytes are a version-specific magic number. It has to
# match or we won't run the file.
magic = fpyc.read(4)
if magic != PYC_MAGIC_NUMBER:
raise NoCode("Bad magic number in .pyc file: {} != {}".format(magic, PYC_MAGIC_NUMBER))
date_based = True
if env.PYBEHAVIOR.hashed_pyc_pep552:
flags = struct.unpack('<L', fpyc.read(4))[0]
hash_based = flags & 0x01
if hash_based:
fpyc.read(8) # Skip the hash.
date_based = False
if date_based:
# Skip the junk in the header that we don't need.
fpyc.read(4) # Skip the moddate.
if env.PYBEHAVIOR.size_in_pyc:
# 3.3 added another long to the header (size), skip it.
fpyc.read(4)
# The rest of the file is the code object we want.
code = marshal.load(fpyc)
return code
|
import mock
from pyramid import testing
from paasta_tools.api.views import autoscaler
from paasta_tools.kubernetes_tools import KubernetesDeploymentConfig
from paasta_tools.marathon_tools import MarathonServiceConfig
@mock.patch("paasta_tools.api.views.autoscaler.get_instance_config", autospec=True)
def test_get_autoscaler_count(mock_get_instance_config):
request = testing.DummyRequest()
request.swagger_data = {"service": "fake_service", "instance": "fake_instance"}
mock_get_instance_config.return_value = mock.MagicMock(
get_instances=mock.MagicMock(return_value=123), spec=KubernetesDeploymentConfig,
)
response = autoscaler.get_autoscaler_count(request)
assert response.json_body["desired_instances"] == 123
assert response.json_body["calculated_instances"] == 123
@mock.patch("paasta_tools.api.views.autoscaler.get_instance_config", autospec=True)
def test_update_autoscaler_count_marathon(mock_get_instance_config):
request = testing.DummyRequest()
request.swagger_data = {
"service": "fake_marathon_service",
"instance": "fake_marathon_instance",
"json_body": {"desired_instances": 123},
}
mock_get_instance_config.return_value = mock.MagicMock(
get_min_instances=mock.MagicMock(return_value=100),
get_max_instances=mock.MagicMock(return_value=200),
spec=MarathonServiceConfig,
)
response = autoscaler.update_autoscaler_count(request)
assert response.json_body["desired_instances"] == 123
assert response.status_code == 202
@mock.patch("paasta_tools.api.views.autoscaler.get_instance_config", autospec=True)
def test_update_autoscaler_count_kubernetes(mock_get_instance_config):
request = testing.DummyRequest()
request.swagger_data = {
"service": "fake_kubernetes_service",
"instance": "fake__kubernetes_instance",
"json_body": {"desired_instances": 155},
}
mock_get_instance_config.return_value = mock.MagicMock(
get_min_instances=mock.MagicMock(return_value=100),
get_max_instances=mock.MagicMock(return_value=200),
spec=KubernetesDeploymentConfig,
)
response = autoscaler.update_autoscaler_count(request)
assert response.json_body["desired_instances"] == 155
assert response.status_code == 202
@mock.patch("paasta_tools.api.views.autoscaler.get_instance_config", autospec=True)
def test_update_autoscaler_count_warning(mock_get_instance_config):
request = testing.DummyRequest()
request.swagger_data = {
"service": "fake_service",
"instance": "fake_instance",
"json_body": {"desired_instances": 123},
}
mock_get_instance_config.return_value = mock.MagicMock(
get_min_instances=mock.MagicMock(return_value=10),
get_max_instances=mock.MagicMock(return_value=100),
spec=KubernetesDeploymentConfig,
)
response = autoscaler.update_autoscaler_count(request)
assert response.json_body["desired_instances"] == 100
assert "WARNING" in response.json_body["status"]
|
from typing import List
import voluptuous as vol
from homeassistant.components.automation import AutomationActionType
from homeassistant.components.device_automation import TRIGGER_BASE_SCHEMA
from homeassistant.components.homeassistant.triggers import state as state_trigger
from homeassistant.const import (
CONF_DEVICE_ID,
CONF_DOMAIN,
CONF_ENTITY_ID,
CONF_PLATFORM,
CONF_TYPE,
STATE_LOCKED,
STATE_UNLOCKED,
)
from homeassistant.core import CALLBACK_TYPE, HomeAssistant
from homeassistant.helpers import config_validation as cv, entity_registry
from homeassistant.helpers.typing import ConfigType
from . import DOMAIN
TRIGGER_TYPES = {"locked", "unlocked"}
TRIGGER_SCHEMA = TRIGGER_BASE_SCHEMA.extend(
{
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Required(CONF_TYPE): vol.In(TRIGGER_TYPES),
}
)
async def async_get_triggers(hass: HomeAssistant, device_id: str) -> List[dict]:
"""List device triggers for Lock devices."""
registry = await entity_registry.async_get_registry(hass)
triggers = []
# Get all the integrations entities for this device
for entry in entity_registry.async_entries_for_device(registry, device_id):
if entry.domain != DOMAIN:
continue
# Add triggers for each entity that belongs to this integration
triggers.append(
{
CONF_PLATFORM: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "locked",
}
)
triggers.append(
{
CONF_PLATFORM: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "unlocked",
}
)
return triggers
async def async_attach_trigger(
hass: HomeAssistant,
config: ConfigType,
action: AutomationActionType,
automation_info: dict,
) -> CALLBACK_TYPE:
"""Attach a trigger."""
config = TRIGGER_SCHEMA(config)
if config[CONF_TYPE] == "locked":
from_state = STATE_UNLOCKED
to_state = STATE_LOCKED
else:
from_state = STATE_LOCKED
to_state = STATE_UNLOCKED
state_config = {
CONF_PLATFORM: "state",
CONF_ENTITY_ID: config[CONF_ENTITY_ID],
state_trigger.CONF_FROM: from_state,
state_trigger.CONF_TO: to_state,
}
state_config = state_trigger.TRIGGER_SCHEMA(state_config)
return await state_trigger.async_attach_trigger(
hass, state_config, action, automation_info, platform_type="device"
)
|
from datetime import timedelta
from bond_api import Action, DeviceType
from homeassistant import core
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON
from homeassistant.helpers.entity_registry import EntityRegistry
from homeassistant.util import utcnow
from .common import (
help_test_entity_available,
patch_bond_action,
patch_bond_device_state,
setup_platform,
)
from tests.common import async_fire_time_changed
def generic_device(name: str):
"""Create a generic device with given name."""
return {"name": name, "type": DeviceType.GENERIC_DEVICE}
async def test_entity_registry(hass: core.HomeAssistant):
"""Tests that the devices are registered in the entity registry."""
await setup_platform(
hass,
SWITCH_DOMAIN,
generic_device("name-1"),
bond_version={"bondid": "test-hub-id"},
bond_device_id="test-device-id",
)
registry: EntityRegistry = await hass.helpers.entity_registry.async_get_registry()
entity = registry.entities["switch.name_1"]
assert entity.unique_id == "test-hub-id_test-device-id"
async def test_turn_on_switch(hass: core.HomeAssistant):
"""Tests that turn on command delegates to API."""
await setup_platform(
hass, SWITCH_DOMAIN, generic_device("name-1"), bond_device_id="test-device-id"
)
with patch_bond_action() as mock_turn_on, patch_bond_device_state():
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.name_1"},
blocking=True,
)
await hass.async_block_till_done()
mock_turn_on.assert_called_once_with("test-device-id", Action.turn_on())
async def test_turn_off_switch(hass: core.HomeAssistant):
"""Tests that turn off command delegates to API."""
await setup_platform(
hass, SWITCH_DOMAIN, generic_device("name-1"), bond_device_id="test-device-id"
)
with patch_bond_action() as mock_turn_off, patch_bond_device_state():
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: "switch.name_1"},
blocking=True,
)
await hass.async_block_till_done()
mock_turn_off.assert_called_once_with("test-device-id", Action.turn_off())
async def test_update_reports_switch_is_on(hass: core.HomeAssistant):
"""Tests that update command sets correct state when Bond API reports the device is on."""
await setup_platform(hass, SWITCH_DOMAIN, generic_device("name-1"))
with patch_bond_device_state(return_value={"power": 1}):
async_fire_time_changed(hass, utcnow() + timedelta(seconds=30))
await hass.async_block_till_done()
assert hass.states.get("switch.name_1").state == "on"
async def test_update_reports_switch_is_off(hass: core.HomeAssistant):
"""Tests that update command sets correct state when Bond API reports the device is off."""
await setup_platform(hass, SWITCH_DOMAIN, generic_device("name-1"))
with patch_bond_device_state(return_value={"power": 0}):
async_fire_time_changed(hass, utcnow() + timedelta(seconds=30))
await hass.async_block_till_done()
assert hass.states.get("switch.name_1").state == "off"
async def test_switch_available(hass: core.HomeAssistant):
"""Tests that available state is updated based on API errors."""
await help_test_entity_available(
hass, SWITCH_DOMAIN, generic_device("name-1"), "switch.name_1"
)
|
from pysmartthings import Attribute, Capability
from homeassistant.components.smartthings.const import DOMAIN, SIGNAL_SMARTTHINGS_UPDATE
from homeassistant.components.switch import (
ATTR_CURRENT_POWER_W,
ATTR_TODAY_ENERGY_KWH,
DOMAIN as SWITCH_DOMAIN,
)
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .conftest import setup_platform
async def test_entity_and_device_attributes(hass, device_factory):
"""Test the attributes of the entity are correct."""
# Arrange
device = device_factory("Switch_1", [Capability.switch], {Attribute.switch: "on"})
entity_registry = await hass.helpers.entity_registry.async_get_registry()
device_registry = await hass.helpers.device_registry.async_get_registry()
# Act
await setup_platform(hass, SWITCH_DOMAIN, devices=[device])
# Assert
entry = entity_registry.async_get("switch.switch_1")
assert entry
assert entry.unique_id == device.device_id
entry = device_registry.async_get_device({(DOMAIN, device.device_id)}, [])
assert entry
assert entry.name == device.label
assert entry.model == device.device_type_name
assert entry.manufacturer == "Unavailable"
async def test_turn_off(hass, device_factory):
"""Test the switch turns of successfully."""
# Arrange
device = device_factory("Switch_1", [Capability.switch], {Attribute.switch: "on"})
await setup_platform(hass, SWITCH_DOMAIN, devices=[device])
# Act
await hass.services.async_call(
"switch", "turn_off", {"entity_id": "switch.switch_1"}, blocking=True
)
# Assert
state = hass.states.get("switch.switch_1")
assert state is not None
assert state.state == "off"
async def test_turn_on(hass, device_factory):
"""Test the switch turns of successfully."""
# Arrange
device = device_factory(
"Switch_1",
[Capability.switch, Capability.power_meter, Capability.energy_meter],
{Attribute.switch: "off", Attribute.power: 355, Attribute.energy: 11.422},
)
await setup_platform(hass, SWITCH_DOMAIN, devices=[device])
# Act
await hass.services.async_call(
"switch", "turn_on", {"entity_id": "switch.switch_1"}, blocking=True
)
# Assert
state = hass.states.get("switch.switch_1")
assert state is not None
assert state.state == "on"
assert state.attributes[ATTR_CURRENT_POWER_W] == 355
assert state.attributes[ATTR_TODAY_ENERGY_KWH] == 11.422
async def test_update_from_signal(hass, device_factory):
"""Test the switch updates when receiving a signal."""
# Arrange
device = device_factory("Switch_1", [Capability.switch], {Attribute.switch: "off"})
await setup_platform(hass, SWITCH_DOMAIN, devices=[device])
await device.switch_on(True)
# Act
async_dispatcher_send(hass, SIGNAL_SMARTTHINGS_UPDATE, [device.device_id])
# Assert
await hass.async_block_till_done()
state = hass.states.get("switch.switch_1")
assert state is not None
assert state.state == "on"
async def test_unload_config_entry(hass, device_factory):
"""Test the switch is removed when the config entry is unloaded."""
# Arrange
device = device_factory("Switch 1", [Capability.switch], {Attribute.switch: "on"})
config_entry = await setup_platform(hass, SWITCH_DOMAIN, devices=[device])
# Act
await hass.config_entries.async_forward_entry_unload(config_entry, "switch")
# Assert
assert not hass.states.get("switch.switch_1")
|
import logging
from paasta_tools.cli.cmds.mark_for_deployment import NoSuchCluster
from paasta_tools.cli.cmds.mark_for_deployment import report_waiting_aborted
from paasta_tools.cli.cmds.mark_for_deployment import wait_for_deployment
from paasta_tools.cli.utils import lazy_choices_completer
from paasta_tools.cli.utils import list_deploy_groups
from paasta_tools.cli.utils import NoSuchService
from paasta_tools.cli.utils import validate_git_sha
from paasta_tools.cli.utils import validate_given_deploy_groups
from paasta_tools.cli.utils import validate_service_name
from paasta_tools.cli.utils import validate_short_git_sha
from paasta_tools.remote_git import list_remote_refs
from paasta_tools.remote_git import LSRemoteException
from paasta_tools.utils import _log
from paasta_tools.utils import DEFAULT_SOA_DIR
from paasta_tools.utils import get_git_url
from paasta_tools.utils import list_services
from paasta_tools.utils import PaastaColors
from paasta_tools.utils import TimeoutError
DEFAULT_DEPLOYMENT_TIMEOUT = 3600 # seconds
log = logging.getLogger(__name__)
class GitShaError(Exception):
pass
class DeployGroupError(Exception):
pass
def add_subparser(subparsers):
list_parser = subparsers.add_parser(
"wait-for-deployment",
help="Wait a service to be deployed to deploy_group",
description=(
"'paasta wait-for-deployment' waits for a previously marked for "
"deployment service to be deployed to deploy_group."
),
epilog=(
"Note: Access and credentials to the Git repo of a service "
"are required for this command to work."
),
)
list_parser.add_argument(
"-u",
"--git-url",
help=(
"Git url for service. Defaults to the normal git URL for " "the service."
),
default=None,
)
list_parser.add_argument(
"-c",
"-k",
"--commit",
help="Git sha to wait for deployment",
required=True,
type=validate_short_git_sha,
)
list_parser.add_argument(
"-l",
"--deploy-group",
help="deploy group (e.g. cluster1.canary, cluster2.main).",
required=True,
).completer = lazy_choices_completer(list_deploy_groups)
list_parser.add_argument(
"-s",
"--service",
help="Name of the service which you wish to wait for deployment. "
'Leading "services-" will be stripped.',
required=True,
).completer = lazy_choices_completer(list_services)
list_parser.add_argument(
"-t",
"--timeout",
dest="timeout",
type=int,
default=DEFAULT_DEPLOYMENT_TIMEOUT,
help=(
"Time in seconds to wait for paasta to deploy the service. "
"If the timeout is exceeded we return 1. "
"Default is %(default)s seconds."
),
)
list_parser.add_argument(
"-d",
"--soa-dir",
dest="soa_dir",
metavar="SOA_DIR",
default=DEFAULT_SOA_DIR,
help="define a different soa config directory",
)
list_parser.add_argument(
"-v",
"--verbose",
action="count",
dest="verbose",
default=0,
help="Print out more output.",
)
list_parser.set_defaults(command=paasta_wait_for_deployment)
def get_latest_marked_sha(git_url, deploy_group):
"""Return the latest marked for deployment git sha or ''"""
refs = list_remote_refs(git_url)
last_ref = ""
for ref in refs:
if (
ref.startswith(f"refs/tags/paasta-{deploy_group}-")
and ref.endswith("-deploy")
and ref > last_ref
):
last_ref = ref
return refs[last_ref] if last_ref else ""
def validate_git_sha_is_latest(git_sha, git_url, deploy_group, service):
"""Verify if git_sha is the latest sha marked for deployment.
Raise exception when the provided git_sha is not the latest
marked for deployment in 'deploy_group' for 'service'.
"""
try:
marked_sha = get_latest_marked_sha(git_url, deploy_group)
except LSRemoteException as e:
print(
"Error talking to the git server: {}\n"
"It is not possible to verify that {} is marked for deployment in {}, "
"but I assume that it is marked and will continue waiting..".format(
e, git_sha, deploy_group
)
)
return
if marked_sha == "":
raise GitShaError(
"ERROR: Nothing is marked for deployment "
"in {} for {}".format(deploy_group, service)
)
if git_sha != marked_sha:
raise GitShaError(
"ERROR: The latest git SHA marked for "
"deployment in {} is {}".format(deploy_group, marked_sha)
)
def validate_deploy_group(deploy_group, service, soa_dir):
"""Validate deploy_group.
Raise exception if the specified deploy group is not used anywhere.
"""
in_use_deploy_groups = list_deploy_groups(service=service, soa_dir=soa_dir)
_, invalid_deploy_groups = validate_given_deploy_groups(
in_use_deploy_groups, [deploy_group]
)
if len(invalid_deploy_groups) == 1:
raise DeployGroupError(
"ERROR: These deploy groups are not currently "
"used anywhere: {}.\n"
"You probably need one of these in-use deploy "
"groups?:\n {}".format(
",".join(invalid_deploy_groups), ",".join(in_use_deploy_groups)
)
)
def paasta_wait_for_deployment(args):
"""Wrapping wait_for_deployment"""
if args.verbose:
log.setLevel(level=logging.DEBUG)
else:
log.setLevel(level=logging.INFO)
service = args.service
if service and service.startswith("services-"):
service = service.split("services-", 1)[1]
if args.git_url is None:
args.git_url = get_git_url(service=service, soa_dir=args.soa_dir)
args.commit = validate_git_sha(sha=args.commit, git_url=args.git_url)
try:
validate_service_name(service, soa_dir=args.soa_dir)
validate_deploy_group(args.deploy_group, service, args.soa_dir)
validate_git_sha_is_latest(
args.commit, args.git_url, args.deploy_group, service
)
except (GitShaError, DeployGroupError, NoSuchService) as e:
print(PaastaColors.red(f"{e}"))
return 1
try:
wait_for_deployment(
service=service,
deploy_group=args.deploy_group,
git_sha=args.commit,
soa_dir=args.soa_dir,
timeout=args.timeout,
)
_log(
service=service,
component="deploy",
line=(
"Deployment of {} for {} complete".format(
args.commit, args.deploy_group
)
),
level="event",
)
except (KeyboardInterrupt, TimeoutError, NoSuchCluster):
report_waiting_aborted(service, args.deploy_group)
return 1
return 0
|
import os
import unittest
import numpy as np
import numpy.testing as np_test
import networkx as nx
from pgmpy.readwrite import BIFReader, BIFWriter
from pgmpy.models import BayesianModel
from pgmpy.factors.discrete import TabularCPD
class TestBIFReader(unittest.TestCase):
def setUp(self):
self.reader = BIFReader(
string="""
// Bayesian Network in the Interchange Format
// Produced by BayesianNetworks package in JavaBayes
// Output created Sun Nov 02 17:49:49 GMT+00:00 1997
// Bayesian network
network "Dog-Problem" { //5 variables and 5 probability distributions
property "credal-set constant-density-bounded 1.1" ;
}
variable "light-on" { //2 values
type discrete[2] { "true" "false" };
property "position = (218, 195)" ;
}
variable "bowel-problem" { //2 values
type discrete[2] { "true" "false" };
property "position = (335, 99)" ;
}
variable "dog-out" { //2 values
type discrete[2] { "true" "false" };
property "position = (300, 195)" ;
}
variable "hear-bark" { //2 values
type discrete[2] { "true" "false" };
property "position = (296, 268)" ;
}
variable "family-out" { //2 values
type discrete[2] { "true" "false" };
property "position = (257, 99)" ;
}
probability ( "light-on" "family-out" ) { //2 variable(s) and 4 values
(true) 0.6 0.4 ;
(false) 0.05 0.95 ;
}
probability ( "bowel-problem" ) { //1 variable(s) and 2 values
table 0.01 0.99 ;
}
probability ( "dog-out" "bowel-problem" "family-out" ) { //3 variable(s) and 8 values
table 0.99 0.97 0.9 0.3 0.01 0.03 0.1 0.7 ;
}
probability ( "hear-bark" "dog-out" ) { //2 variable(s) and 4 values
table 0.7 0.01 0.3 0.99 ;
}
probability ( "family-out" ) { //1 variable(s) and 2 values
table 0.15 0.85 ;
}
""",
include_properties=True,
)
self.water_model = BIFReader(
"pgmpy/tests/test_readwrite/testdata/water.bif", include_properties=True
)
def test_network_name(self):
name_expected = "Dog-Problem"
self.assertEqual(self.reader.network_name, name_expected)
def test_get_variables(self):
var_expected = [
"light-on",
"bowel-problem",
"dog-out",
"hear-bark",
"family-out",
]
self.assertListEqual(self.reader.get_variables(), var_expected)
def test_states(self):
states_expected = {
"bowel-problem": ["true", "false"],
"dog-out": ["true", "false"],
"family-out": ["true", "false"],
"hear-bark": ["true", "false"],
"light-on": ["true", "false"],
}
states = self.reader.get_states()
for variable in states_expected:
self.assertListEqual(states_expected[variable], states[variable])
def test_get_property(self):
property_expected = {
"bowel-problem": ["position = (335, 99)"],
"dog-out": ["position = (300, 195)"],
"family-out": ["position = (257, 99)"],
"hear-bark": ["position = (296, 268)"],
"light-on": ["position = (218, 195)"],
}
prop = self.reader.get_property()
for variable in property_expected:
self.assertListEqual(property_expected[variable], prop[variable])
def test_get_values(self):
cpd_expected = {
"bowel-problem": np.array([[0.01], [0.99]]),
"dog-out": np.array([[0.99, 0.97, 0.9, 0.3], [0.01, 0.03, 0.1, 0.7]]),
"family-out": np.array([[0.15], [0.85]]),
"hear-bark": np.array([[0.7, 0.01], [0.3, 0.99]]),
"light-on": np.array([[0.6, 0.05], [0.4, 0.95]]),
}
cpd = self.reader.variable_cpds
for variable in cpd_expected:
np_test.assert_array_equal(cpd_expected[variable], cpd[variable])
def test_get_values_reordered(self):
cancer_values1 = BIFReader(
string="""
network unknown {
}
variable Pollution {
type discrete [ 2 ] { low, high };
}
variable Smoker {
type discrete [ 2 ] { True, False };
}
variable Cancer {
type discrete [ 2 ] { True, False };
}
probability ( Cancer | Pollution, Smoker ) {
(low, True) 0.03, 0.97;
(low, False) 0.001, 0.999;
(high, True) 0.05, 0.95;
(high, False) 0.02, 0.98;
}"""
).get_values()
cancer_values2 = BIFReader(
string="""
network unknown {
}
variable Pollution {
type discrete [ 2 ] { low, high };
}
variable Smoker {
type discrete [ 2 ] { True, False };
}
variable Cancer {
type discrete [ 2 ] { True, False };
}
probability ( Cancer | Pollution, Smoker ) {
(low, True) 0.03, 0.97;
(high, True) 0.05, 0.95;
(low, False) 0.001, 0.999;
(high, False) 0.02, 0.98;
}"""
).get_values()
for var in cancer_values1:
np_test.assert_array_equal(cancer_values1[var], cancer_values2[var])
def test_get_parents(self):
parents_expected = {
"bowel-problem": [],
"dog-out": ["bowel-problem", "family-out"],
"family-out": [],
"hear-bark": ["dog-out"],
"light-on": ["family-out"],
}
parents = self.reader.get_parents()
for variable in parents_expected:
self.assertListEqual(parents_expected[variable], parents[variable])
def test_get_edges(self):
edges_expected = [
["family-out", "dog-out"],
["bowel-problem", "dog-out"],
["family-out", "light-on"],
["dog-out", "hear-bark"],
]
self.assertListEqual(sorted(self.reader.variable_edges), sorted(edges_expected))
def test_get_model(self):
edges_expected = [
("family-out", "dog-out"),
("bowel-problem", "dog-out"),
("family-out", "light-on"),
("dog-out", "hear-bark"),
]
nodes_expected = [
"bowel-problem",
"hear-bark",
"light-on",
"dog-out",
"family-out",
]
edge_expected = {
"bowel-problem": {"dog-out": {"weight": None}},
"dog-out": {"hear-bark": {"weight": None}},
"family-out": {"dog-out": {"weight": None}, "light-on": {"weight": None}},
"hear-bark": {},
"light-on": {},
}
node_expected = {
"bowel-problem": {"weight": None, "position": "(335, 99)"},
"dog-out": {"weight": None, "position": "(300, 195)"},
"family-out": {"weight": None, "position": "(257, 99)"},
"hear-bark": {"weight": None, "position": "(296, 268)"},
"light-on": {"weight": None, "position": "(218, 195)"},
}
cpds_expected = [
TabularCPD(
variable="bowel-problem",
variable_card=2,
values=np.array([[0.01], [0.99]]),
state_names={"bowel-problem": ["true", "false"]},
),
TabularCPD(
variable="dog-out",
variable_card=2,
values=np.array([[0.99, 0.97, 0.9, 0.3], [0.01, 0.03, 0.1, 0.7]]),
evidence=["bowel-problem", "family-out"],
evidence_card=[2, 2],
state_names={
"dog-out": ["true", "false"],
"bowel-problem": ["true", "false"],
"family-out": ["true", "false"],
},
),
TabularCPD(
variable="family-out",
variable_card=2,
values=np.array([[0.15], [0.85]]),
state_names={"family-out": ["true", "false"]},
),
TabularCPD(
variable="hear-bark",
variable_card=2,
values=np.array([[0.7, 0.01], [0.3, 0.99]]),
evidence=["dog-out"],
evidence_card=[2],
state_names={
"hear-bark": ["true", "false"],
"dog-out": ["true", "false"],
},
),
TabularCPD(
variable="light-on",
variable_card=2,
values=np.array([[0.6, 0.05], [0.4, 0.95]]),
evidence=["family-out"],
evidence_card=[2],
state_names={
"light-on": ["true", "false"],
"family-out": ["true", "false"],
},
),
]
model = self.reader.get_model()
model_cpds = model.get_cpds()
for cpd_index in range(5):
self.assertEqual(model_cpds[cpd_index], cpds_expected[cpd_index])
self.assertDictEqual(dict(model.nodes), node_expected)
self.assertDictEqual(dict(model.adj), edge_expected)
self.assertListEqual(sorted(model.nodes()), sorted(nodes_expected))
self.assertListEqual(sorted(model.edges()), sorted(edges_expected))
def test_water_model(self):
model = self.water_model.get_model()
self.assertEqual(len(model.nodes()), 32)
self.assertEqual(len(model.edges()), 66)
self.assertEqual(len(model.get_cpds()), 32)
def tearDown(self):
del self.reader
class TestBIFWriter(unittest.TestCase):
def setUp(self):
variables = [
"kid",
"bowel-problem",
"dog-out",
"family-out",
"hear-bark",
"light-on",
]
edges = [
["family-out", "dog-out"],
["bowel-problem", "dog-out"],
["family-out", "light-on"],
["dog-out", "hear-bark"],
]
cpds = {
"kid": np.array([[0.3], [0.7]]),
"bowel-problem": np.array([[0.01], [0.99]]),
"dog-out": np.array([[0.99, 0.01, 0.97, 0.03], [0.9, 0.1, 0.3, 0.7]]),
"family-out": np.array([[0.15], [0.85]]),
"hear-bark": np.array([[0.7, 0.3], [0.01, 0.99]]),
"light-on": np.array([[0.6, 0.4], [0.05, 0.95]]),
}
states = {
"kid": ["true", "false"],
"bowel-problem": ["true", "false"],
"dog-out": ["true", "false"],
"family-out": ["true", "false"],
"hear-bark": ["true", "false"],
"light-on": ["true", "false"],
}
parents = {
"kid": [],
"bowel-problem": [],
"dog-out": ["bowel-problem", "family-out"],
"family-out": [],
"hear-bark": ["dog-out"],
"light-on": ["family-out"],
}
properties = {
"kid": ["position = (100, 165)"],
"bowel-problem": ["position = (335, 99)"],
"dog-out": ["position = (300, 195)"],
"family-out": ["position = (257, 99)"],
"hear-bark": ["position = (296, 268)"],
"light-on": ["position = (218, 195)"],
}
self.model = BayesianModel()
self.model.add_nodes_from(variables)
self.model.add_edges_from(edges)
tabular_cpds = []
for var in sorted(cpds.keys()):
values = cpds[var]
cpd = TabularCPD(
var,
len(states[var]),
values,
evidence=parents[var],
evidence_card=[
len(states[evidence_var]) for evidence_var in parents[var]
],
)
tabular_cpds.append(cpd)
self.model.add_cpds(*tabular_cpds)
for node, properties in properties.items():
for prop in properties:
prop_name, prop_value = map(lambda t: t.strip(), prop.split("="))
self.model.nodes[node][prop_name] = prop_value
self.writer = BIFWriter(model=self.model)
def test_str(self):
self.expected_string = """network unknown {
}
variable bowel-problem {
type discrete [ 2 ] { 0, 1 };
property position = (335, 99) ;
property weight = None ;
}
variable dog-out {
type discrete [ 2 ] { 0, 1 };
property position = (300, 195) ;
property weight = None ;
}
variable family-out {
type discrete [ 2 ] { 0, 1 };
property position = (257, 99) ;
property weight = None ;
}
variable hear-bark {
type discrete [ 2 ] { 0, 1 };
property position = (296, 268) ;
property weight = None ;
}
variable kid {
type discrete [ 2 ] { 0, 1 };
property position = (100, 165) ;
property weight = None ;
}
variable light-on {
type discrete [ 2 ] { 0, 1 };
property position = (218, 195) ;
property weight = None ;
}
probability ( bowel-problem ) {
table 0.01, 0.99 ;
}
probability ( dog-out | bowel-problem, family-out ) {
table 0.99, 0.01, 0.97, 0.03, 0.9, 0.1, 0.3, 0.7 ;
}
probability ( family-out ) {
table 0.15, 0.85 ;
}
probability ( hear-bark | dog-out ) {
table 0.7, 0.3, 0.01, 0.99 ;
}
probability ( kid ) {
table 0.3, 0.7 ;
}
probability ( light-on | family-out ) {
table 0.6, 0.4, 0.05, 0.95 ;
}
"""
self.maxDiff = None
self.assertEqual(self.writer.__str__(), self.expected_string)
def test_write_read_equal(self):
self.writer.write_bif("test_bif.bif")
reader = BIFReader("test_bif.bif")
read_model = reader.get_model(state_name_type=int)
self.assertEqual(sorted(self.model.nodes()), sorted(read_model.nodes()))
self.assertEqual(sorted(self.model.edges()), sorted(read_model.edges()))
for var in self.model.nodes():
self.assertEqual(self.model.get_cpds(var), read_model.get_cpds(var))
os.remove("test_bif.bif")
|
import builtins
import operator
from typing import Union
import numpy as np
# Vendored from NumPy 1.12; we need a version that support duck typing, even
# on dask arrays with __array_function__ enabled.
def _validate_axis(axis, ndim, argname):
try:
axis = [operator.index(axis)]
except TypeError:
axis = list(axis)
axis = [a + ndim if a < 0 else a for a in axis]
if not builtins.all(0 <= a < ndim for a in axis):
raise ValueError("invalid axis for this array in `%s` argument" % argname)
if len(set(axis)) != len(axis):
raise ValueError("repeated axis in `%s` argument" % argname)
return axis
def moveaxis(a, source, destination):
try:
# allow duck-array types if they define transpose
transpose = a.transpose
except AttributeError:
a = np.asarray(a)
transpose = a.transpose
source = _validate_axis(source, a.ndim, "source")
destination = _validate_axis(destination, a.ndim, "destination")
if len(source) != len(destination):
raise ValueError(
"`source` and `destination` arguments must have "
"the same number of elements"
)
order = [n for n in range(a.ndim) if n not in source]
for dest, src in sorted(zip(destination, source)):
order.insert(dest, src)
result = transpose(order)
return result
# Type annotations stubs. See also / to be replaced by:
# https://github.com/numpy/numpy/issues/7370
# https://github.com/numpy/numpy-stubs/
DTypeLike = Union[np.dtype, str]
# from dask/array/utils.py
def _is_nep18_active():
class A:
def __array_function__(self, *args, **kwargs):
return True
try:
return np.concatenate([A()])
except ValueError:
return False
IS_NEP18_ACTIVE = _is_nep18_active()
|
from django.contrib.sites.models import Site
from django.test import TestCase
from django.utils import timezone
import django_comments as comments
from zinnia.managers import PUBLISHED
from zinnia.models.entry import Entry
from zinnia.signals import connect_discussion_signals
from zinnia.signals import disconnect_discussion_signals
from zinnia.signals import disconnect_entry_signals
class CommentDenormalizationTestCase(TestCase):
def setUp(self):
disconnect_entry_signals()
disconnect_discussion_signals()
params = {'title': 'My entry',
'status': PUBLISHED,
'slug': 'my-entry'}
self.entry = Entry.objects.create(**params)
self.site = Site.objects.get_current()
def test_count_after_deletion_issue_283(self):
comment_klass = comments.get_model()
connect_discussion_signals()
comment_klass.objects.create(
comment='My Comment 1', site=self.site,
content_object=self.entry, submit_date=timezone.now())
comment_klass.objects.create(
comment='My Comment 2', site=self.site,
content_object=self.entry, submit_date=timezone.now())
# It's normal, the signals are not catched on the creation
self.assertEqual(self.entry.comment_count, 0)
self.entry.comment_count = 2
self.entry.save()
comment_klass.objects.all().delete()
self.assertEqual(comment_klass.objects.count(), 0)
entry_reloaded = Entry.objects.get(pk=self.entry.pk)
self.assertEqual(entry_reloaded.comment_count, 0)
disconnect_discussion_signals()
|
import posixpath
from perfkitbenchmarker import linux_packages
# Use this directory for all data stored in the VM for this test.
PATH = '{0}/scimark2'.format(linux_packages.INSTALL_DIR)
# Download location for both the C and Java tests.
BASE_URL = 'https://math.nist.gov/scimark2'
# Java-specific constants.
JAVA_JAR = 'scimark2lib.jar'
JAVA_MAIN = 'jnt.scimark2.commandline'
# C-specific constants.
C_ZIP = 'scimark2_1c.zip'
C_SRC = '{0}/src'.format(PATH)
# SciMark2 does not set optimization flags, it leaves this to the
# discretion of the tester. The following gets good performance and
# has been used for LLVM and GCC regression testing, see for example
# https://llvm.org/bugs/show_bug.cgi?id=22589 .
C_CFLAGS = '-O3 -march=native'
PACKAGE_NAME = 'scimark2'
PREPROVISIONED_DATA = {
JAVA_JAR:
'6f84f949c3167b385da1a9957ecd53fe0111b42e981e0c481be53dba0504305f',
C_ZIP: '223464cd7e90b4c22e2af08dbae6f6faa33e65b01e1c58e5a176837bc67958be'
}
PACKAGE_DATA_URL = {
JAVA_JAR: posixpath.join(BASE_URL, JAVA_JAR),
C_ZIP: posixpath.join(BASE_URL, C_ZIP)
}
def Install(vm):
"""Installs scimark2 on the vm."""
vm.Install('build_tools')
vm.Install('wget')
vm.Install('openjdk')
vm.Install('unzip')
vm.RemoteCommand('rm -rf {0} && mkdir {0}'.format(PATH))
vm.InstallPreprovisionedPackageData(
PACKAGE_NAME, PREPROVISIONED_DATA.keys(), PATH)
cmds = [
'(mkdir {0} && cd {0} && unzip {1}/{2})'.format(C_SRC, PATH, C_ZIP),
'(cd {0} && make CFLAGS="{1}")'.format(C_SRC, C_CFLAGS),
]
for cmd in cmds:
vm.RemoteCommand(cmd, should_log=True)
def Uninstall(vm):
"""Uninstalls scimark2 from the vm."""
vm.RemoteCommand('rm -rf {0}'.format(PATH))
|
from datetime import timedelta
import logging
import voluptuous as vol
import whois
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_NAME, TIME_DAYS
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
CONF_DOMAIN = "domain"
DEFAULT_NAME = "Whois"
ATTR_EXPIRES = "expires"
ATTR_NAME_SERVERS = "name_servers"
ATTR_REGISTRAR = "registrar"
ATTR_UPDATED = "updated"
SCAN_INTERVAL = timedelta(hours=24)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_DOMAIN): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the WHOIS sensor."""
domain = config.get(CONF_DOMAIN)
name = config.get(CONF_NAME)
try:
if "expiration_date" in whois.whois(domain):
add_entities([WhoisSensor(name, domain)], True)
else:
_LOGGER.error(
"WHOIS lookup for %s didn't contain an expiration date", domain
)
return
except whois.BaseException as ex: # pylint: disable=broad-except
_LOGGER.error("Exception %s occurred during WHOIS lookup for %s", ex, domain)
return
class WhoisSensor(Entity):
"""Implementation of a WHOIS sensor."""
def __init__(self, name, domain):
"""Initialize the sensor."""
self.whois = whois.whois
self._name = name
self._domain = domain
self._state = None
self._attributes = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def icon(self):
"""Return the icon to represent this sensor."""
return "mdi:calendar-clock"
@property
def unit_of_measurement(self):
"""Return the unit of measurement to present the value in."""
return TIME_DAYS
@property
def state(self):
"""Return the expiration days for hostname."""
return self._state
@property
def device_state_attributes(self):
"""Get the more info attributes."""
return self._attributes
def _empty_state_and_attributes(self):
"""Empty the state and attributes on an error."""
self._state = None
self._attributes = None
def update(self):
"""Get the current WHOIS data for the domain."""
try:
response = self.whois(self._domain)
except whois.BaseException as ex: # pylint: disable=broad-except
_LOGGER.error("Exception %s occurred during WHOIS lookup", ex)
self._empty_state_and_attributes()
return
if response:
if "expiration_date" not in response:
_LOGGER.error(
"Failed to find expiration_date in whois lookup response. "
"Did find: %s",
", ".join(response.keys()),
)
self._empty_state_and_attributes()
return
if not response["expiration_date"]:
_LOGGER.error("Whois response contains empty expiration_date")
self._empty_state_and_attributes()
return
attrs = {}
expiration_date = response["expiration_date"]
if isinstance(expiration_date, list):
attrs[ATTR_EXPIRES] = expiration_date[0].isoformat()
else:
attrs[ATTR_EXPIRES] = expiration_date.isoformat()
if "nameservers" in response:
attrs[ATTR_NAME_SERVERS] = " ".join(response["nameservers"])
if "updated_date" in response:
update_date = response["updated_date"]
if isinstance(update_date, list):
attrs[ATTR_UPDATED] = update_date[0].isoformat()
else:
attrs[ATTR_UPDATED] = update_date.isoformat()
if "registrar" in response:
attrs[ATTR_REGISTRAR] = response["registrar"]
time_delta = expiration_date - expiration_date.now()
self._attributes = attrs
self._state = time_delta.days
|
from __future__ import print_function
import sys
from argparse import ArgumentParser
def main(args):
ap = ArgumentParser(description="Read and execute commands from a shell script in the current environment")
ap.add_argument('file', action="store", help='file to be sourced')
ap.add_argument('args', nargs='*', help='arguments to the file being sourced')
ns = ap.parse_args(args)
_stash = globals()['_stash']
""":type : StaSh"""
_, current_state = _stash.runtime.get_current_worker_and_state()
# The special thing about source is it persists any environmental changes
# in the sub-shell to the parent shell.
try:
with open(ns.file) as ins:
_stash(ins.readlines(), persistent_level=1)
except IOError as e:
print('%s: %s' % (e.filename, e.strerror))
except Exception as e:
print('error: %s' % str(e))
finally:
pass
if __name__ == '__main__':
main(sys.argv[1:])
|
from homeassistant.const import STATE_CLOSED
from .util import async_init_integration
async def test_create_covers(hass):
"""Test creation of covers."""
await async_init_integration(hass)
state = hass.states.get("cover.large_garage_door")
assert state.state == STATE_CLOSED
expected_attributes = {
"device_class": "garage",
"friendly_name": "Large Garage Door",
"supported_features": 3,
}
# Only test for a subset of attributes in case
# HA changes the implementation and a new one appears
assert all(
state.attributes[key] == expected_attributes[key] for key in expected_attributes
)
state = hass.states.get("cover.small_garage_door")
assert state.state == STATE_CLOSED
expected_attributes = {
"device_class": "garage",
"friendly_name": "Small Garage Door",
"supported_features": 3,
}
# Only test for a subset of attributes in case
# HA changes the implementation and a new one appears
assert all(
state.attributes[key] == expected_attributes[key] for key in expected_attributes
)
state = hass.states.get("cover.gate")
assert state.state == STATE_CLOSED
expected_attributes = {
"device_class": "gate",
"friendly_name": "Gate",
"supported_features": 3,
}
# Only test for a subset of attributes in case
# HA changes the implementation and a new one appears
assert all(
state.attributes[key] == expected_attributes[key] for key in expected_attributes
)
|
import pytest
from homeassistant.components.demo import DOMAIN
from homeassistant.components.lock import (
DOMAIN as LOCK_DOMAIN,
SERVICE_LOCK,
SERVICE_OPEN,
SERVICE_UNLOCK,
STATE_LOCKED,
STATE_UNLOCKED,
)
from homeassistant.const import ATTR_ENTITY_ID
from homeassistant.setup import async_setup_component
from tests.common import async_mock_service
FRONT = "lock.front_door"
KITCHEN = "lock.kitchen_door"
OPENABLE_LOCK = "lock.openable_lock"
@pytest.fixture(autouse=True)
async def setup_comp(hass):
"""Set up demo component."""
assert await async_setup_component(
hass, LOCK_DOMAIN, {LOCK_DOMAIN: {"platform": DOMAIN}}
)
await hass.async_block_till_done()
async def test_locking(hass):
"""Test the locking of a lock."""
state = hass.states.get(KITCHEN)
assert state.state == STATE_UNLOCKED
await hass.services.async_call(
LOCK_DOMAIN, SERVICE_LOCK, {ATTR_ENTITY_ID: KITCHEN}, blocking=True
)
state = hass.states.get(KITCHEN)
assert state.state == STATE_LOCKED
async def test_unlocking(hass):
"""Test the unlocking of a lock."""
state = hass.states.get(FRONT)
assert state.state == STATE_LOCKED
await hass.services.async_call(
LOCK_DOMAIN, SERVICE_UNLOCK, {ATTR_ENTITY_ID: FRONT}, blocking=True
)
state = hass.states.get(FRONT)
assert state.state == STATE_UNLOCKED
async def test_opening(hass):
"""Test the opening of a lock."""
calls = async_mock_service(hass, LOCK_DOMAIN, SERVICE_OPEN)
await hass.services.async_call(
LOCK_DOMAIN, SERVICE_OPEN, {ATTR_ENTITY_ID: OPENABLE_LOCK}, blocking=True
)
assert len(calls) == 1
|
from datetime import timedelta
from axis.event_stream import (
CLASS_INPUT,
CLASS_LIGHT,
CLASS_MOTION,
CLASS_OUTPUT,
CLASS_SOUND,
FenceGuard,
LoiteringGuard,
MotionGuard,
Vmd4,
)
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_CONNECTIVITY,
DEVICE_CLASS_LIGHT,
DEVICE_CLASS_MOTION,
DEVICE_CLASS_SOUND,
BinarySensorEntity,
)
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.event import async_track_point_in_utc_time
from homeassistant.util.dt import utcnow
from .axis_base import AxisEventBase
from .const import DOMAIN as AXIS_DOMAIN
DEVICE_CLASS = {
CLASS_INPUT: DEVICE_CLASS_CONNECTIVITY,
CLASS_LIGHT: DEVICE_CLASS_LIGHT,
CLASS_MOTION: DEVICE_CLASS_MOTION,
CLASS_SOUND: DEVICE_CLASS_SOUND,
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up a Axis binary sensor."""
device = hass.data[AXIS_DOMAIN][config_entry.unique_id]
@callback
def async_add_sensor(event_id):
"""Add binary sensor from Axis device."""
event = device.api.event[event_id]
if event.CLASS != CLASS_OUTPUT and not (
event.CLASS == CLASS_LIGHT and event.TYPE == "Light"
):
async_add_entities([AxisBinarySensor(event, device)])
device.listeners.append(
async_dispatcher_connect(hass, device.signal_new_event, async_add_sensor)
)
class AxisBinarySensor(AxisEventBase, BinarySensorEntity):
"""Representation of a binary Axis event."""
def __init__(self, event, device):
"""Initialize the Axis binary sensor."""
super().__init__(event, device)
self.cancel_scheduled_update = None
@callback
def update_callback(self, no_delay=False):
"""Update the sensor's state, if needed.
Parameter no_delay is True when device_event_reachable is sent.
"""
@callback
def scheduled_update(now):
"""Timer callback for sensor update."""
self.cancel_scheduled_update = None
self.async_write_ha_state()
if self.cancel_scheduled_update is not None:
self.cancel_scheduled_update()
self.cancel_scheduled_update = None
if self.is_on or self.device.option_trigger_time == 0 or no_delay:
self.async_write_ha_state()
return
self.cancel_scheduled_update = async_track_point_in_utc_time(
self.hass,
scheduled_update,
utcnow() + timedelta(seconds=self.device.option_trigger_time),
)
@property
def is_on(self):
"""Return true if event is active."""
return self.event.is_tripped
@property
def name(self):
"""Return the name of the event."""
if (
self.event.CLASS == CLASS_INPUT
and self.event.id
and self.device.api.vapix.ports[self.event.id].name
):
return (
f"{self.device.name} {self.device.api.vapix.ports[self.event.id].name}"
)
if self.event.CLASS == CLASS_MOTION:
for event_class, event_data in (
(FenceGuard, self.device.api.vapix.fence_guard),
(LoiteringGuard, self.device.api.vapix.loitering_guard),
(MotionGuard, self.device.api.vapix.motion_guard),
(Vmd4, self.device.api.vapix.vmd4),
):
if (
isinstance(self.event, event_class)
and event_data
and self.event.id in event_data
):
return f"{self.device.name} {self.event.TYPE} {event_data[self.event.id].name}"
return super().name
@property
def device_class(self):
"""Return the class of the sensor."""
return DEVICE_CLASS.get(self.event.CLASS)
|
from pydeconz.sensor import (
Battery,
Consumption,
Daylight,
Humidity,
LightLevel,
Power,
Pressure,
Switch,
Temperature,
Thermostat,
)
from homeassistant.components.sensor import DOMAIN
from homeassistant.const import (
ATTR_TEMPERATURE,
ATTR_VOLTAGE,
DEVICE_CLASS_BATTERY,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_ILLUMINANCE,
DEVICE_CLASS_POWER,
DEVICE_CLASS_PRESSURE,
DEVICE_CLASS_TEMPERATURE,
ENERGY_KILO_WATT_HOUR,
LIGHT_LUX,
PERCENTAGE,
POWER_WATT,
PRESSURE_HPA,
TEMP_CELSIUS,
)
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from .const import ATTR_DARK, ATTR_ON, NEW_SENSOR
from .deconz_device import DeconzDevice
from .gateway import get_gateway_from_config_entry
ATTR_CURRENT = "current"
ATTR_POWER = "power"
ATTR_DAYLIGHT = "daylight"
ATTR_EVENT_ID = "event_id"
DEVICE_CLASS = {
Humidity: DEVICE_CLASS_HUMIDITY,
LightLevel: DEVICE_CLASS_ILLUMINANCE,
Power: DEVICE_CLASS_POWER,
Pressure: DEVICE_CLASS_PRESSURE,
Temperature: DEVICE_CLASS_TEMPERATURE,
}
ICON = {
Daylight: "mdi:white-balance-sunny",
Pressure: "mdi:gauge",
Temperature: "mdi:thermometer",
}
UNIT_OF_MEASUREMENT = {
Consumption: ENERGY_KILO_WATT_HOUR,
Humidity: PERCENTAGE,
LightLevel: LIGHT_LUX,
Power: POWER_WATT,
Pressure: PRESSURE_HPA,
Temperature: TEMP_CELSIUS,
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the deCONZ sensors."""
gateway = get_gateway_from_config_entry(hass, config_entry)
gateway.entities[DOMAIN] = set()
battery_handler = DeconzBatteryHandler(gateway)
@callback
def async_add_sensor(sensors):
"""Add sensors from deCONZ.
Create DeconzBattery if sensor has a battery attribute.
Create DeconzSensor if not a battery, switch or thermostat and not a binary sensor.
"""
entities = []
for sensor in sensors:
if not gateway.option_allow_clip_sensor and sensor.type.startswith("CLIP"):
continue
if sensor.battery is not None:
battery_handler.remove_tracker(sensor)
known_batteries = set(gateway.entities[DOMAIN])
new_battery = DeconzBattery(sensor, gateway)
if new_battery.unique_id not in known_batteries:
entities.append(new_battery)
else:
battery_handler.create_tracker(sensor)
if (
not sensor.BINARY
and sensor.type
not in Battery.ZHATYPE + Switch.ZHATYPE + Thermostat.ZHATYPE
and sensor.uniqueid not in gateway.entities[DOMAIN]
):
entities.append(DeconzSensor(sensor, gateway))
if entities:
async_add_entities(entities)
gateway.listeners.append(
async_dispatcher_connect(
hass, gateway.async_signal_new_device(NEW_SENSOR), async_add_sensor
)
)
async_add_sensor(
[gateway.api.sensors[key] for key in sorted(gateway.api.sensors, key=int)]
)
class DeconzSensor(DeconzDevice):
"""Representation of a deCONZ sensor."""
TYPE = DOMAIN
@callback
def async_update_callback(self, force_update=False):
"""Update the sensor's state."""
keys = {"on", "reachable", "state"}
if force_update or self._device.changed_keys.intersection(keys):
super().async_update_callback(force_update=force_update)
@property
def state(self):
"""Return the state of the sensor."""
return self._device.state
@property
def device_class(self):
"""Return the class of the sensor."""
return DEVICE_CLASS.get(type(self._device))
@property
def icon(self):
"""Return the icon to use in the frontend."""
return ICON.get(type(self._device))
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this sensor."""
return UNIT_OF_MEASUREMENT.get(type(self._device))
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
attr = {}
if self._device.on is not None:
attr[ATTR_ON] = self._device.on
if self._device.secondary_temperature is not None:
attr[ATTR_TEMPERATURE] = self._device.secondary_temperature
if self._device.type in Consumption.ZHATYPE:
attr[ATTR_POWER] = self._device.power
elif self._device.type in Daylight.ZHATYPE:
attr[ATTR_DAYLIGHT] = self._device.daylight
elif self._device.type in LightLevel.ZHATYPE:
if self._device.dark is not None:
attr[ATTR_DARK] = self._device.dark
if self._device.daylight is not None:
attr[ATTR_DAYLIGHT] = self._device.daylight
elif self._device.type in Power.ZHATYPE:
attr[ATTR_CURRENT] = self._device.current
attr[ATTR_VOLTAGE] = self._device.voltage
return attr
class DeconzBattery(DeconzDevice):
"""Battery class for when a device is only represented as an event."""
TYPE = DOMAIN
@callback
def async_update_callback(self, force_update=False):
"""Update the battery's state, if needed."""
keys = {"battery", "reachable"}
if force_update or self._device.changed_keys.intersection(keys):
super().async_update_callback(force_update=force_update)
@property
def unique_id(self):
"""Return a unique identifier for this device."""
return f"{self.serial}-battery"
@property
def state(self):
"""Return the state of the battery."""
return self._device.battery
@property
def name(self):
"""Return the name of the battery."""
return f"{self._device.name} Battery Level"
@property
def device_class(self):
"""Return the class of the sensor."""
return DEVICE_CLASS_BATTERY
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity."""
return PERCENTAGE
@property
def device_state_attributes(self):
"""Return the state attributes of the battery."""
attr = {}
if self._device.type in Switch.ZHATYPE:
for event in self.gateway.events:
if self._device == event.device:
attr[ATTR_EVENT_ID] = event.event_id
return attr
class DeconzSensorStateTracker:
"""Track sensors without a battery state and signal when battery state exist."""
def __init__(self, sensor, gateway):
"""Set up tracker."""
self.sensor = sensor
self.gateway = gateway
sensor.register_callback(self.async_update_callback)
@callback
def close(self):
"""Clean up tracker."""
self.sensor.remove_callback(self.async_update_callback)
self.gateway = None
self.sensor = None
@callback
def async_update_callback(self, ignore_update=False):
"""Sensor state updated."""
if "battery" in self.sensor.changed_keys:
async_dispatcher_send(
self.gateway.hass,
self.gateway.async_signal_new_device(NEW_SENSOR),
[self.sensor],
)
class DeconzBatteryHandler:
"""Creates and stores trackers for sensors without a battery state."""
def __init__(self, gateway):
"""Set up battery handler."""
self.gateway = gateway
self._trackers = set()
@callback
def create_tracker(self, sensor):
"""Create new tracker for battery state."""
for tracker in self._trackers:
if sensor == tracker.sensor:
return
self._trackers.add(DeconzSensorStateTracker(sensor, self.gateway))
@callback
def remove_tracker(self, sensor):
"""Remove tracker of battery state."""
for tracker in self._trackers:
if sensor == tracker.sensor:
tracker.close()
self._trackers.remove(tracker)
break
|
import logging
from babelfish import Language, language_converters
from requests import Session
from . import Provider
from .. import __short_version__
from ..subtitle import Subtitle, fix_line_ending
logger = logging.getLogger(__name__)
language_converters.register('thesubdb = subliminal.converters.thesubdb:TheSubDBConverter')
class TheSubDBSubtitle(Subtitle):
"""TheSubDB Subtitle."""
provider_name = 'thesubdb'
def __init__(self, language, hash):
super(TheSubDBSubtitle, self).__init__(language)
self.hash = hash
@property
def id(self):
return self.hash + '-' + str(self.language)
@property
def info(self):
return self.hash
def get_matches(self, video):
matches = set()
# hash
if 'thesubdb' in video.hashes and video.hashes['thesubdb'] == self.hash:
matches.add('hash')
return matches
class TheSubDBProvider(Provider):
"""TheSubDB Provider."""
languages = {Language.fromthesubdb(l) for l in language_converters['thesubdb'].codes}
required_hash = 'thesubdb'
server_url = 'http://api.thesubdb.com/'
subtitle_class = TheSubDBSubtitle
user_agent = 'SubDB/1.0 (subliminal/%s; https://github.com/Diaoul/subliminal)' % __short_version__
def __init__(self):
self.session = None
def initialize(self):
self.session = Session()
self.session.headers['User-Agent'] = self.user_agent
def terminate(self):
self.session.close()
def query(self, hash):
# make the query
params = {'action': 'search', 'hash': hash}
logger.info('Searching subtitles %r', params)
r = self.session.get(self.server_url, params=params, timeout=10)
# handle subtitles not found and errors
if r.status_code == 404:
logger.debug('No subtitles found')
return []
r.raise_for_status()
# loop over languages
subtitles = []
for language_code in r.text.split(','):
language = Language.fromthesubdb(language_code)
subtitle = self.subtitle_class(language, hash)
logger.debug('Found subtitle %r', subtitle)
subtitles.append(subtitle)
return subtitles
def list_subtitles(self, video, languages):
return [s for s in self.query(video.hashes['thesubdb']) if s.language in languages]
def download_subtitle(self, subtitle):
logger.info('Downloading subtitle %r', subtitle)
params = {'action': 'download', 'hash': subtitle.hash, 'language': subtitle.language.alpha2}
r = self.session.get(self.server_url, params=params, timeout=10)
r.raise_for_status()
subtitle.content = fix_line_ending(r.content)
|
import logging
import pytest
pytest.importorskip('PyQt5.QtWebEngineWidgets')
from qutebrowser.browser.webengine import webenginesettings
from qutebrowser.utils import usertypes
from qutebrowser.misc import objects
@pytest.fixture(autouse=True)
def init(qapp, config_stub, cache_tmpdir, data_tmpdir, monkeypatch):
monkeypatch.setattr(webenginesettings.webenginequtescheme, 'init',
lambda: None)
monkeypatch.setattr(objects, 'backend', usertypes.Backend.QtWebEngine)
webenginesettings.init()
config_stub.changed.disconnect(webenginesettings._update_settings)
def test_big_cache_size(config_stub):
"""Make sure a too big cache size is handled correctly."""
config_stub.val.content.cache.size = 2 ** 63 - 1
profile = webenginesettings.default_profile
profile.setter.set_http_cache_size()
assert profile.httpCacheMaximumSize() == 2 ** 31 - 1
def test_non_existing_dict(config_stub, monkeypatch, message_mock, caplog):
monkeypatch.setattr(webenginesettings.spell, 'local_filename',
lambda _code: None)
config_stub.val.spellcheck.languages = ['af-ZA']
with caplog.at_level(logging.WARNING):
webenginesettings._update_settings('spellcheck.languages')
msg = message_mock.getmsg(usertypes.MessageLevel.warning)
expected = ("Language af-ZA is not installed - see scripts/dictcli.py in "
"qutebrowser's sources")
assert msg.text == expected
def test_existing_dict(config_stub, monkeypatch):
monkeypatch.setattr(webenginesettings.spell, 'local_filename',
lambda _code: 'en-US-8-0')
config_stub.val.spellcheck.languages = ['en-US']
webenginesettings._update_settings('spellcheck.languages')
for profile in [webenginesettings.default_profile,
webenginesettings.private_profile]:
assert profile.isSpellCheckEnabled()
assert profile.spellCheckLanguages() == ['en-US-8-0']
def test_spell_check_disabled(config_stub, monkeypatch):
config_stub.val.spellcheck.languages = []
webenginesettings._update_settings('spellcheck.languages')
for profile in [webenginesettings.default_profile,
webenginesettings.private_profile]:
assert not profile.isSpellCheckEnabled()
def test_default_user_agent_saved():
assert webenginesettings.parsed_user_agent is not None
def test_parsed_user_agent(qapp):
webenginesettings.init_user_agent()
parsed = webenginesettings.parsed_user_agent
assert parsed.upstream_browser_key == 'Chrome'
assert parsed.qt_key == 'QtWebEngine'
|
import threading
import os
from yandextank.common.util import get_test_path
from yandextank.core.tankcore import TankCore
from yandextank.core.tankworker import TankInfo
from yandextank.plugins.Telegraf import Plugin as TelegrafPlugin
class TestTelegrafPlugin(object):
def test_plugin_configuration(self):
""" testing telegraf plugin configuration """
cfg = {
'core': {},
'telegraf': {
'package': 'yandextank.plugins.Telegraf',
'enabled': True,
'config': os.path.join(get_test_path(), 'yandextank/plugins/Telegraf/tests/telegraf_mon.xml')
}
}
core = TankCore(cfg, threading.Event(), TankInfo({}))
telegraf_plugin = core.get_plugin_of_type(TelegrafPlugin)
telegraf_plugin.configure()
assert telegraf_plugin.detected_conf == 'telegraf'
def test_legacy_plugin_configuration(self):
""" testing legacy plugin configuration, old-style monitoring """
cfg = {
'core': {},
'monitoring': {
'package': 'yandextank.plugins.Telegraf',
'enabled': True,
'config': os.path.join(get_test_path(), 'yandextank/plugins/Telegraf/tests/old_mon.xml')
}
}
core = TankCore(cfg, threading.Event(), TankInfo({}))
telegraf_plugin = core.get_plugin_of_type(TelegrafPlugin)
telegraf_plugin.configure()
assert telegraf_plugin.detected_conf == 'monitoring'
|
from homeassistant.components.homematicip_cloud.const import (
DOMAIN as HMIPC_DOMAIN,
HMIPC_AUTHTOKEN,
HMIPC_HAPID,
HMIPC_NAME,
HMIPC_PIN,
)
from tests.async_mock import patch
from tests.common import MockConfigEntry
DEFAULT_CONFIG = {HMIPC_HAPID: "ABC123", HMIPC_PIN: "123", HMIPC_NAME: "hmip"}
IMPORT_CONFIG = {HMIPC_HAPID: "ABC123", HMIPC_AUTHTOKEN: "123", HMIPC_NAME: "hmip"}
async def test_flow_works(hass, simple_mock_home):
"""Test config flow."""
with patch(
"homeassistant.components.homematicip_cloud.hap.HomematicipAuth.async_checkbutton",
return_value=False,
), patch(
"homeassistant.components.homematicip_cloud.hap.HomematicipAuth.get_auth",
return_value=True,
):
result = await hass.config_entries.flow.async_init(
HMIPC_DOMAIN, context={"source": "user"}, data=DEFAULT_CONFIG
)
assert result["type"] == "form"
assert result["step_id"] == "link"
assert result["errors"] == {"base": "press_the_button"}
flow = next(
flow
for flow in hass.config_entries.flow.async_progress()
if flow["flow_id"] == result["flow_id"]
)
assert flow["context"]["unique_id"] == "ABC123"
with patch(
"homeassistant.components.homematicip_cloud.hap.HomematicipAuth.async_checkbutton",
return_value=True,
), patch(
"homeassistant.components.homematicip_cloud.hap.HomematicipAuth.async_setup",
return_value=True,
), patch(
"homeassistant.components.homematicip_cloud.hap.HomematicipAuth.async_register",
return_value=True,
), patch(
"homeassistant.components.homematicip_cloud.hap.HomematicipHAP.async_connect",
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] == "create_entry"
assert result["title"] == "ABC123"
assert result["data"] == {"hapid": "ABC123", "authtoken": True, "name": "hmip"}
assert result["result"].unique_id == "ABC123"
async def test_flow_init_connection_error(hass):
"""Test config flow with accesspoint connection error."""
with patch(
"homeassistant.components.homematicip_cloud.hap.HomematicipAuth.async_setup",
return_value=False,
):
result = await hass.config_entries.flow.async_init(
HMIPC_DOMAIN, context={"source": "user"}, data=DEFAULT_CONFIG
)
assert result["type"] == "form"
assert result["step_id"] == "init"
async def test_flow_link_connection_error(hass):
"""Test config flow client registration connection error."""
with patch(
"homeassistant.components.homematicip_cloud.hap.HomematicipAuth.async_checkbutton",
return_value=True,
), patch(
"homeassistant.components.homematicip_cloud.hap.HomematicipAuth.async_setup",
return_value=True,
), patch(
"homeassistant.components.homematicip_cloud.hap.HomematicipAuth.async_register",
return_value=False,
):
result = await hass.config_entries.flow.async_init(
HMIPC_DOMAIN, context={"source": "user"}, data=DEFAULT_CONFIG
)
assert result["type"] == "abort"
assert result["reason"] == "connection_aborted"
async def test_flow_link_press_button(hass):
"""Test config flow ask for pressing the blue button."""
with patch(
"homeassistant.components.homematicip_cloud.hap.HomematicipAuth.async_checkbutton",
return_value=False,
), patch(
"homeassistant.components.homematicip_cloud.hap.HomematicipAuth.async_setup",
return_value=True,
):
result = await hass.config_entries.flow.async_init(
HMIPC_DOMAIN, context={"source": "user"}, data=DEFAULT_CONFIG
)
assert result["type"] == "form"
assert result["step_id"] == "link"
assert result["errors"] == {"base": "press_the_button"}
async def test_init_flow_show_form(hass):
"""Test config flow shows up with a form."""
result = await hass.config_entries.flow.async_init(
HMIPC_DOMAIN, context={"source": "user"}
)
assert result["type"] == "form"
assert result["step_id"] == "init"
async def test_init_already_configured(hass):
"""Test accesspoint is already configured."""
MockConfigEntry(domain=HMIPC_DOMAIN, unique_id="ABC123").add_to_hass(hass)
with patch(
"homeassistant.components.homematicip_cloud.hap.HomematicipAuth.async_checkbutton",
return_value=True,
):
result = await hass.config_entries.flow.async_init(
HMIPC_DOMAIN, context={"source": "user"}, data=DEFAULT_CONFIG
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
async def test_import_config(hass, simple_mock_home):
"""Test importing a host with an existing config file."""
with patch(
"homeassistant.components.homematicip_cloud.hap.HomematicipAuth.async_checkbutton",
return_value=True,
), patch(
"homeassistant.components.homematicip_cloud.hap.HomematicipAuth.async_setup",
return_value=True,
), patch(
"homeassistant.components.homematicip_cloud.hap.HomematicipAuth.async_register",
return_value=True,
), patch(
"homeassistant.components.homematicip_cloud.hap.HomematicipHAP.async_connect",
):
result = await hass.config_entries.flow.async_init(
HMIPC_DOMAIN, context={"source": "import"}, data=IMPORT_CONFIG
)
assert result["type"] == "create_entry"
assert result["title"] == "ABC123"
assert result["data"] == {"authtoken": "123", "hapid": "ABC123", "name": "hmip"}
assert result["result"].unique_id == "ABC123"
async def test_import_existing_config(hass):
"""Test abort of an existing accesspoint from config."""
MockConfigEntry(domain=HMIPC_DOMAIN, unique_id="ABC123").add_to_hass(hass)
with patch(
"homeassistant.components.homematicip_cloud.hap.HomematicipAuth.async_checkbutton",
return_value=True,
), patch(
"homeassistant.components.homematicip_cloud.hap.HomematicipAuth.async_setup",
return_value=True,
), patch(
"homeassistant.components.homematicip_cloud.hap.HomematicipAuth.async_register",
return_value=True,
):
result = await hass.config_entries.flow.async_init(
HMIPC_DOMAIN, context={"source": "import"}, data=IMPORT_CONFIG
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
|
import io
import os
import azure.storage.blob
from pytest import fixture
import smart_open
_AZURE_CONTAINER = os.environ.get('SO_AZURE_CONTAINER')
_AZURE_STORAGE_CONNECTION_STRING = os.environ.get('AZURE_STORAGE_CONNECTION_STRING')
_FILE_PREFIX = '%s://%s' % (smart_open.azure.SCHEME, _AZURE_CONTAINER)
assert _AZURE_CONTAINER is not None, 'please set the SO_AZURE_CONTAINER environment variable'
assert _AZURE_STORAGE_CONNECTION_STRING is not None, \
'please set the AZURE_STORAGE_CONNECTION_STRING environment variable'
@fixture
def client():
# type: () -> azure.storage.blob.BlobServiceClient
return azure.storage.blob.BlobServiceClient.from_connection_string(_AZURE_STORAGE_CONNECTION_STRING)
def initialize_bucket(client):
container_client = client.get_container_client(_AZURE_CONTAINER)
blobs = container_client.list_blobs()
for blob in blobs:
container_client.delete_blob(blob=blob)
def write_read(key, content, write_mode, read_mode, **kwargs):
with smart_open.open(key, write_mode, **kwargs) as fout:
fout.write(content)
with smart_open.open(key, read_mode, **kwargs) as fin:
return fin.read()
def read_length_prefixed_messages(key, read_mode, **kwargs):
result = io.BytesIO()
with smart_open.open(key, read_mode, **kwargs) as fin:
length_byte = fin.read(1)
while len(length_byte):
result.write(length_byte)
msg = fin.read(ord(length_byte))
result.write(msg)
length_byte = fin.read(1)
return result.getvalue()
def test_azure_readwrite_text(benchmark, client):
initialize_bucket(client)
key = _FILE_PREFIX + '/sanity.txt'
text = 'с гранатою в кармане, с чекою в руке'
actual = benchmark(
write_read, key, text, 'w', 'r', encoding='utf-8', transport_params=dict(client=client)
)
assert actual == text
def test_azure_readwrite_text_gzip(benchmark, client):
initialize_bucket(client)
key = _FILE_PREFIX + '/sanity.txt.gz'
text = 'не чайки здесь запели на знакомом языке'
actual = benchmark(
write_read, key, text, 'w', 'r', encoding='utf-8', transport_params=dict(client=client)
)
assert actual == text
def test_azure_readwrite_binary(benchmark, client):
initialize_bucket(client)
key = _FILE_PREFIX + '/sanity.txt'
binary = b'this is a test'
actual = benchmark(write_read, key, binary, 'wb', 'rb', transport_params=dict(client=client))
assert actual == binary
def test_azure_readwrite_binary_gzip(benchmark, client):
initialize_bucket(client)
key = _FILE_PREFIX + '/sanity.txt.gz'
binary = b'this is a test'
actual = benchmark(write_read, key, binary, 'wb', 'rb', transport_params=dict(client=client))
assert actual == binary
def test_azure_performance(benchmark, client):
initialize_bucket(client)
one_megabyte = io.BytesIO()
for _ in range(1024*128):
one_megabyte.write(b'01234567')
one_megabyte = one_megabyte.getvalue()
key = _FILE_PREFIX + '/performance.txt'
actual = benchmark(write_read, key, one_megabyte, 'wb', 'rb', transport_params=dict(client=client))
assert actual == one_megabyte
def test_azure_performance_gz(benchmark, client):
initialize_bucket(client)
one_megabyte = io.BytesIO()
for _ in range(1024*128):
one_megabyte.write(b'01234567')
one_megabyte = one_megabyte.getvalue()
key = _FILE_PREFIX + '/performance.txt.gz'
actual = benchmark(write_read, key, one_megabyte, 'wb', 'rb', transport_params=dict(client=client))
assert actual == one_megabyte
def test_azure_performance_small_reads(benchmark, client):
initialize_bucket(client)
ONE_MIB = 1024**2
one_megabyte_of_msgs = io.BytesIO()
msg = b'\x0f' + b'0123456789abcde' # a length-prefixed "message"
for _ in range(0, ONE_MIB, len(msg)):
one_megabyte_of_msgs.write(msg)
one_megabyte_of_msgs = one_megabyte_of_msgs.getvalue()
key = _FILE_PREFIX + '/many_reads_performance.bin'
with smart_open.open(key, 'wb', transport_params=dict(client=client)) as fout:
fout.write(one_megabyte_of_msgs)
actual = benchmark(
read_length_prefixed_messages, key, 'rb', buffering=ONE_MIB, transport_params=dict(client=client)
)
assert actual == one_megabyte_of_msgs
|
import bz2
import logging
import multiprocessing
import re
import signal
from pickle import PicklingError
# LXML isn't faster, so let's go with the built-in solution
from xml.etree.ElementTree import iterparse
from gensim import utils
# cannot import whole gensim.corpora, because that imports wikicorpus...
from gensim.corpora.dictionary import Dictionary
from gensim.corpora.textcorpus import TextCorpus
logger = logging.getLogger(__name__)
ARTICLE_MIN_WORDS = 50
"""Ignore shorter articles (after full preprocessing)."""
# default thresholds for lengths of individual tokens
TOKEN_MIN_LEN = 2
TOKEN_MAX_LEN = 15
RE_P0 = re.compile(r'<!--.*?-->', re.DOTALL | re.UNICODE)
"""Comments."""
RE_P1 = re.compile(r'<ref([> ].*?)(</ref>|/>)', re.DOTALL | re.UNICODE)
"""Footnotes."""
RE_P2 = re.compile(r'(\n\[\[[a-z][a-z][\w-]*:[^:\]]+\]\])+$', re.UNICODE)
"""Links to languages."""
RE_P3 = re.compile(r'{{([^}{]*)}}', re.DOTALL | re.UNICODE)
"""Template."""
RE_P4 = re.compile(r'{{([^}]*)}}', re.DOTALL | re.UNICODE)
"""Template."""
RE_P5 = re.compile(r'\[(\w+):\/\/(.*?)(( (.*?))|())\]', re.UNICODE)
"""Remove URL, keep description."""
RE_P6 = re.compile(r'\[([^][]*)\|([^][]*)\]', re.DOTALL | re.UNICODE)
"""Simplify links, keep description."""
RE_P7 = re.compile(r'\n\[\[[iI]mage(.*?)(\|.*?)*\|(.*?)\]\]', re.UNICODE)
"""Keep description of images."""
RE_P8 = re.compile(r'\n\[\[[fF]ile(.*?)(\|.*?)*\|(.*?)\]\]', re.UNICODE)
"""Keep description of files."""
RE_P9 = re.compile(r'<nowiki([> ].*?)(</nowiki>|/>)', re.DOTALL | re.UNICODE)
"""External links."""
RE_P10 = re.compile(r'<math([> ].*?)(</math>|/>)', re.DOTALL | re.UNICODE)
"""Math content."""
RE_P11 = re.compile(r'<(.*?)>', re.DOTALL | re.UNICODE)
"""All other tags."""
RE_P12 = re.compile(r'(({\|)|(\|-(?!\d))|(\|}))(.*?)(?=\n)', re.UNICODE)
"""Table formatting."""
RE_P13 = re.compile(r'(?<=(\n[ ])|(\n\n)|([ ]{2})|(.\n)|(.\t))(\||\!)([^[\]\n]*?\|)*', re.UNICODE)
"""Table cell formatting."""
RE_P14 = re.compile(r'\[\[Category:[^][]*\]\]', re.UNICODE)
"""Categories."""
RE_P15 = re.compile(r'\[\[([fF]ile:|[iI]mage)[^]]*(\]\])', re.UNICODE)
"""Remove File and Image templates."""
RE_P16 = re.compile(r'\[{2}(.*?)\]{2}', re.UNICODE)
"""Capture interlinks text and article linked"""
RE_P17 = re.compile(
r'(\n.{0,4}((bgcolor)|(\d{0,1}[ ]?colspan)|(rowspan)|(style=)|(class=)|(align=)|(scope=))(.*))|'
r'(^.{0,2}((bgcolor)|(\d{0,1}[ ]?colspan)|(rowspan)|(style=)|(class=)|(align=))(.*))',
re.UNICODE
)
"""Table markup"""
IGNORED_NAMESPACES = [
'Wikipedia', 'Category', 'File', 'Portal', 'Template',
'MediaWiki', 'User', 'Help', 'Book', 'Draft', 'WikiProject',
'Special', 'Talk'
]
"""MediaWiki namespaces that ought to be ignored."""
def filter_example(elem, text, *args, **kwargs):
"""Example function for filtering arbitrary documents from wikipedia dump.
The custom filter function is called _before_ tokenisation and should work on
the raw text and/or XML element information.
The filter function gets the entire context of the XML element passed into it,
but you can of course choose not the use some or all parts of the context. Please
refer to :func:`gensim.corpora.wikicorpus.extract_pages` for the exact details
of the page context.
Parameters
----------
elem : etree.Element
XML etree element
text : str
The text of the XML node
namespace : str
XML namespace of the XML element
title : str
Page title
page_tag : str
XPath expression for page.
text_path : str
XPath expression for text.
title_path : str
XPath expression for title.
ns_path : str
XPath expression for namespace.
pageid_path : str
XPath expression for page id.
Example
-------
.. sourcecode:: pycon
>>> import gensim.corpora
>>> filter_func = gensim.corpora.wikicorpus.filter_example
>>> dewiki = gensim.corpora.WikiCorpus(
... './dewiki-20180520-pages-articles-multistream.xml.bz2',
... filter_articles=filter_func)
"""
# Filter German wikipedia dump for articles that are marked either as
# Lesenswert (featured) or Exzellent (excellent) by wikipedia editors.
# *********************
# regex is in the function call so that we do not pollute the wikicorpus
# namespace do not do this in production as this function is called for
# every element in the wiki dump
_regex_de_excellent = re.compile(r'.*\{\{(Exzellent.*?)\}\}[\s]*', flags=re.DOTALL)
_regex_de_featured = re.compile(r'.*\{\{(Lesenswert.*?)\}\}[\s]*', flags=re.DOTALL)
if text is None:
return False
if _regex_de_excellent.match(text) or _regex_de_featured.match(text):
return True
else:
return False
def find_interlinks(raw):
"""Find all interlinks to other articles in the dump.
Parameters
----------
raw : str
Unicode or utf-8 encoded string.
Returns
-------
list
List of tuples in format [(linked article, the actual text found), ...].
"""
filtered = filter_wiki(raw, promote_remaining=False, simplify_links=False)
interlinks_raw = re.findall(RE_P16, filtered)
interlinks = []
for parts in [i.split('|') for i in interlinks_raw]:
actual_title = parts[0]
try:
interlink_text = parts[1]
except IndexError:
interlink_text = actual_title
interlink_tuple = (actual_title, interlink_text)
interlinks.append(interlink_tuple)
legit_interlinks = [(i, j) for i, j in interlinks if '[' not in i and ']' not in i]
return legit_interlinks
def filter_wiki(raw, promote_remaining=True, simplify_links=True):
"""Filter out wiki markup from `raw`, leaving only text.
Parameters
----------
raw : str
Unicode or utf-8 encoded string.
promote_remaining : bool
Whether uncaught markup should be promoted to plain text.
simplify_links : bool
Whether links should be simplified keeping only their description text.
Returns
-------
str
`raw` without markup.
"""
# parsing of the wiki markup is not perfect, but sufficient for our purposes
# contributions to improving this code are welcome :)
text = utils.to_unicode(raw, 'utf8', errors='ignore')
text = utils.decode_htmlentities(text) # '&nbsp;' --> '\xa0'
return remove_markup(text, promote_remaining, simplify_links)
def remove_markup(text, promote_remaining=True, simplify_links=True):
"""Filter out wiki markup from `text`, leaving only text.
Parameters
----------
text : str
String containing markup.
promote_remaining : bool
Whether uncaught markup should be promoted to plain text.
simplify_links : bool
Whether links should be simplified keeping only their description text.
Returns
-------
str
`text` without markup.
"""
text = re.sub(RE_P2, '', text) # remove the last list (=languages)
# the wiki markup is recursive (markup inside markup etc)
# instead of writing a recursive grammar, here we deal with that by removing
# markup in a loop, starting with inner-most expressions and working outwards,
# for as long as something changes.
text = remove_template(text)
text = remove_file(text)
iters = 0
while True:
old, iters = text, iters + 1
text = re.sub(RE_P0, '', text) # remove comments
text = re.sub(RE_P1, '', text) # remove footnotes
text = re.sub(RE_P9, '', text) # remove outside links
text = re.sub(RE_P10, '', text) # remove math content
text = re.sub(RE_P11, '', text) # remove all remaining tags
text = re.sub(RE_P14, '', text) # remove categories
text = re.sub(RE_P5, '\\3', text) # remove urls, keep description
if simplify_links:
text = re.sub(RE_P6, '\\2', text) # simplify links, keep description only
# remove table markup
text = text.replace("!!", "\n|") # each table head cell on a separate line
text = text.replace("|-||", "\n|") # for cases where a cell is filled with '-'
text = re.sub(RE_P12, '\n', text) # remove formatting lines
text = text.replace('|||', '|\n|') # each table cell on a separate line(where |{{a|b}}||cell-content)
text = text.replace('||', '\n|') # each table cell on a separate line
text = re.sub(RE_P13, '\n', text) # leave only cell content
text = re.sub(RE_P17, '\n', text) # remove formatting lines
# remove empty mark-up
text = text.replace('[]', '')
# stop if nothing changed between two iterations or after a fixed number of iterations
if old == text or iters > 2:
break
if promote_remaining:
text = text.replace('[', '').replace(']', '') # promote all remaining markup to plain text
return text
def remove_template(s):
"""Remove template wikimedia markup.
Parameters
----------
s : str
String containing markup template.
Returns
-------
str
Сopy of `s` with all the `wikimedia markup template <http://meta.wikimedia.org/wiki/Help:Template>`_ removed.
Notes
-----
Since template can be nested, it is difficult remove them using regular expressions.
"""
# Find the start and end position of each template by finding the opening
# '{{' and closing '}}'
n_open, n_close = 0, 0
starts, ends = [], [-1]
in_template = False
prev_c = None
for i, c in enumerate(s):
if not in_template:
if c == '{' and c == prev_c:
starts.append(i - 1)
in_template = True
n_open = 1
if in_template:
if c == '{':
n_open += 1
elif c == '}':
n_close += 1
if n_open == n_close:
ends.append(i)
in_template = False
n_open, n_close = 0, 0
prev_c = c
# Remove all the templates
starts.append(None)
return ''.join(s[end + 1:start] for end, start in zip(ends, starts))
def remove_file(s):
"""Remove the 'File:' and 'Image:' markup, keeping the file caption.
Parameters
----------
s : str
String containing 'File:' and 'Image:' markup.
Returns
-------
str
Сopy of `s` with all the 'File:' and 'Image:' markup replaced by their `corresponding captions
<http://www.mediawiki.org/wiki/Help:Images>`_.
"""
# The regex RE_P15 match a File: or Image: markup
for match in re.finditer(RE_P15, s):
m = match.group(0)
caption = m[:-2].split('|')[-1]
s = s.replace(m, caption, 1)
return s
def tokenize(content, token_min_len=TOKEN_MIN_LEN, token_max_len=TOKEN_MAX_LEN, lower=True):
"""Tokenize a piece of text from Wikipedia.
Set `token_min_len`, `token_max_len` as character length (not bytes!) thresholds for individual tokens.
Parameters
----------
content : str
String without markup (see :func:`~gensim.corpora.wikicorpus.filter_wiki`).
token_min_len : int
Minimal token length.
token_max_len : int
Maximal token length.
lower : bool
Convert `content` to lower case?
Returns
-------
list of str
List of tokens from `content`.
"""
# TODO maybe ignore tokens with non-latin characters? (no chinese, arabic, russian etc.)
return [
utils.to_unicode(token) for token in utils.tokenize(content, lower=lower, errors='ignore')
if token_min_len <= len(token) <= token_max_len and not token.startswith('_')
]
def get_namespace(tag):
"""Get the namespace of tag.
Parameters
----------
tag : str
Namespace or tag.
Returns
-------
str
Matched namespace or tag.
"""
m = re.match("^{(.*?)}", tag)
namespace = m.group(1) if m else ""
if not namespace.startswith("http://www.mediawiki.org/xml/export-"):
raise ValueError("%s not recognized as MediaWiki dump namespace" % namespace)
return namespace
_get_namespace = get_namespace
def extract_pages(f, filter_namespaces=False, filter_articles=None):
"""Extract pages from a MediaWiki database dump.
Parameters
----------
f : file
File-like object.
filter_namespaces : list of str or bool
Namespaces that will be extracted.
Yields
------
tuple of (str or None, str, str)
Title, text and page id.
"""
elems = (elem for _, elem in iterparse(f, events=("end",)))
# We can't rely on the namespace for database dumps, since it's changed
# it every time a small modification to the format is made. So, determine
# those from the first element we find, which will be part of the metadata,
# and construct element paths.
elem = next(elems)
namespace = get_namespace(elem.tag)
ns_mapping = {"ns": namespace}
page_tag = "{%(ns)s}page" % ns_mapping
text_path = "./{%(ns)s}revision/{%(ns)s}text" % ns_mapping
title_path = "./{%(ns)s}title" % ns_mapping
ns_path = "./{%(ns)s}ns" % ns_mapping
pageid_path = "./{%(ns)s}id" % ns_mapping
for elem in elems:
if elem.tag == page_tag:
title = elem.find(title_path).text
text = elem.find(text_path).text
if filter_namespaces:
ns = elem.find(ns_path).text
if ns not in filter_namespaces:
text = None
if filter_articles is not None:
if not filter_articles(
elem, namespace=namespace, title=title,
text=text, page_tag=page_tag,
text_path=text_path, title_path=title_path,
ns_path=ns_path, pageid_path=pageid_path):
text = None
pageid = elem.find(pageid_path).text
yield title, text or "", pageid # empty page will yield None
# Prune the element tree, as per
# http://www.ibm.com/developerworks/xml/library/x-hiperfparse/
# except that we don't need to prune backlinks from the parent
# because we don't use LXML.
# We do this only for <page>s, since we need to inspect the
# ./revision/text element. The pages comprise the bulk of the
# file, so in practice we prune away enough.
elem.clear()
_extract_pages = extract_pages # for backward compatibility
def process_article(args, tokenizer_func=tokenize, token_min_len=TOKEN_MIN_LEN,
token_max_len=TOKEN_MAX_LEN, lower=True):
"""Parse a Wikipedia article, extract all tokens.
Notes
-----
Set `tokenizer_func` (defaults is :func:`~gensim.corpora.wikicorpus.tokenize`) parameter for languages
like Japanese or Thai to perform better tokenization.
The `tokenizer_func` needs to take 4 parameters: (text: str, token_min_len: int, token_max_len: int, lower: bool).
Parameters
----------
args : (str, bool, str, int)
Article text, lemmatize flag (if True, :func:`~gensim.utils.lemmatize` will be used), article title,
page identificator.
tokenizer_func : function
Function for tokenization (defaults is :func:`~gensim.corpora.wikicorpus.tokenize`).
Needs to have interface:
tokenizer_func(text: str, token_min_len: int, token_max_len: int, lower: bool) -> list of str.
token_min_len : int
Minimal token length.
token_max_len : int
Maximal token length.
lower : bool
Convert article text to lower case?
Returns
-------
(list of str, str, int)
List of tokens from article, title and page id.
"""
text, lemmatize, title, pageid = args
text = filter_wiki(text)
if lemmatize:
result = utils.lemmatize(text)
else:
result = tokenizer_func(text, token_min_len, token_max_len, lower)
return result, title, pageid
def init_to_ignore_interrupt():
"""Enables interruption ignoring.
Warnings
--------
Should only be used when master is prepared to handle termination of
child processes.
"""
signal.signal(signal.SIGINT, signal.SIG_IGN)
def _process_article(args):
"""Same as :func:`~gensim.corpora.wikicorpus.process_article`, but with args in list format.
Parameters
----------
args : [(str, bool, str, int), (function, int, int, bool)]
First element - same as `args` from :func:`~gensim.corpora.wikicorpus.process_article`,
second element is tokenizer function, token minimal length, token maximal length, lowercase flag.
Returns
-------
(list of str, str, int)
List of tokens from article, title and page id.
Warnings
--------
Should not be called explicitly. Use :func:`~gensim.corpora.wikicorpus.process_article` instead.
"""
tokenizer_func, token_min_len, token_max_len, lower = args[-1]
args = args[:-1]
return process_article(
args, tokenizer_func=tokenizer_func, token_min_len=token_min_len,
token_max_len=token_max_len, lower=lower
)
class WikiCorpus(TextCorpus):
"""Treat a Wikipedia articles dump as a read-only, streamed, memory-efficient corpus.
Supported dump formats:
* <LANG>wiki-<YYYYMMDD>-pages-articles.xml.bz2
* <LANG>wiki-latest-pages-articles.xml.bz2
The documents are extracted on-the-fly, so that the whole (massive) dump can stay compressed on disk.
Notes
-----
Dumps for the English Wikipedia can be founded at https://dumps.wikimedia.org/enwiki/.
Attributes
----------
metadata : bool
Whether to write articles titles to serialized corpus.
Warnings
--------
"Multistream" archives are *not* supported in Python 2 due to `limitations in the core bz2 library
<https://docs.python.org/2/library/bz2.html#de-compression-of-files>`_.
Examples
--------
.. sourcecode:: pycon
>>> from gensim.test.utils import datapath, get_tmpfile
>>> from gensim.corpora import WikiCorpus, MmCorpus
>>>
>>> path_to_wiki_dump = datapath("enwiki-latest-pages-articles1.xml-p000000010p000030302-shortened.bz2")
>>> corpus_path = get_tmpfile("wiki-corpus.mm")
>>>
>>> wiki = WikiCorpus(path_to_wiki_dump) # create word->word_id mapping, ~8h on full wiki
>>> MmCorpus.serialize(corpus_path, wiki) # another 8h, creates a file in MatrixMarket format and mapping
"""
def __init__(self, fname, processes=None, lemmatize=utils.has_pattern(), dictionary=None,
filter_namespaces=('0',), tokenizer_func=tokenize, article_min_tokens=ARTICLE_MIN_WORDS,
token_min_len=TOKEN_MIN_LEN, token_max_len=TOKEN_MAX_LEN, lower=True, filter_articles=None):
"""Initialize the corpus.
Unless a dictionary is provided, this scans the corpus once,
to determine its vocabulary.
Parameters
----------
fname : str
Path to the Wikipedia dump file.
processes : int, optional
Number of processes to run, defaults to `max(1, number of cpu - 1)`.
lemmatize : bool
Use lemmatization instead of simple regexp tokenization.
Defaults to `True` if you have the `pattern <https://github.com/clips/pattern>`_ package installed.
dictionary : :class:`~gensim.corpora.dictionary.Dictionary`, optional
Dictionary, if not provided, this scans the corpus once, to determine its vocabulary
**IMPORTANT: this needs a really long time**.
filter_namespaces : tuple of str, optional
Namespaces to consider.
tokenizer_func : function, optional
Function that will be used for tokenization. By default, use :func:`~gensim.corpora.wikicorpus.tokenize`.
If you inject your own tokenizer, it must conform to this interface:
`tokenizer_func(text: str, token_min_len: int, token_max_len: int, lower: bool) -> list of str`
article_min_tokens : int, optional
Minimum tokens in article. Article will be ignored if number of tokens is less.
token_min_len : int, optional
Minimal token length.
token_max_len : int, optional
Maximal token length.
lower : bool, optional
If True - convert all text to lower case.
filter_articles: callable or None, optional
If set, each XML article element will be passed to this callable before being processed. Only articles
where the callable returns an XML element are processed, returning None allows filtering out
some articles based on customised rules.
Warnings
--------
Unless a dictionary is provided, this scans the corpus once, to determine its vocabulary.
"""
self.fname = fname
self.filter_namespaces = filter_namespaces
self.filter_articles = filter_articles
self.metadata = False
if processes is None:
processes = max(1, multiprocessing.cpu_count() - 1)
self.processes = processes
self.lemmatize = lemmatize
self.tokenizer_func = tokenizer_func
self.article_min_tokens = article_min_tokens
self.token_min_len = token_min_len
self.token_max_len = token_max_len
self.lower = lower
if dictionary is None:
self.dictionary = Dictionary(self.get_texts())
else:
self.dictionary = dictionary
@property
def input(self):
return self.fname
def get_texts(self):
"""Iterate over the dump, yielding a list of tokens for each article that passed
the length and namespace filtering.
Uses multiprocessing internally to parallelize the work and process the dump more quickly.
Notes
-----
This iterates over the **texts**. If you want vectors, just use the standard corpus interface
instead of this method:
Examples
--------
.. sourcecode:: pycon
>>> from gensim.test.utils import datapath
>>> from gensim.corpora import WikiCorpus
>>>
>>> path_to_wiki_dump = datapath("enwiki-latest-pages-articles1.xml-p000000010p000030302-shortened.bz2")
>>>
>>> for vec in WikiCorpus(path_to_wiki_dump):
... pass
Yields
------
list of str
If `metadata` is False, yield only list of token extracted from the article.
(list of str, (int, str))
List of tokens (extracted from the article), page id and article title otherwise.
"""
articles, articles_all = 0, 0
positions, positions_all = 0, 0
tokenization_params = (self.tokenizer_func, self.token_min_len, self.token_max_len, self.lower)
texts = (
(text, self.lemmatize, title, pageid, tokenization_params)
for title, text, pageid
in extract_pages(bz2.BZ2File(self.fname), self.filter_namespaces, self.filter_articles)
)
pool = multiprocessing.Pool(self.processes, init_to_ignore_interrupt)
try:
# process the corpus in smaller chunks of docs, because multiprocessing.Pool
# is dumb and would load the entire input into RAM at once...
for group in utils.chunkize(texts, chunksize=10 * self.processes, maxsize=1):
for tokens, title, pageid in pool.imap(_process_article, group):
articles_all += 1
positions_all += len(tokens)
# article redirects and short stubs are pruned here
if len(tokens) < self.article_min_tokens or \
any(title.startswith(ignore + ':') for ignore in IGNORED_NAMESPACES):
continue
articles += 1
positions += len(tokens)
if self.metadata:
yield (tokens, (pageid, title))
else:
yield tokens
except KeyboardInterrupt:
logger.warning(
"user terminated iteration over Wikipedia corpus after %i documents with %i positions "
"(total %i articles, %i positions before pruning articles shorter than %i words)",
articles, positions, articles_all, positions_all, self.article_min_tokens
)
except PicklingError as exc:
raise PicklingError(
f'Can not send filtering function {self.filter_articles} to multiprocessing, '
'make sure the function can be pickled.'
) from exc
else:
logger.info(
"finished iterating over Wikipedia corpus of %i documents with %i positions "
"(total %i articles, %i positions before pruning articles shorter than %i words)",
articles, positions, articles_all, positions_all, self.article_min_tokens
)
self.length = articles # cache corpus length
finally:
pool.terminate()
|
import datetime
from six import string_types
from ._generalslice import OPEN_OPEN, CLOSED_CLOSED, OPEN_CLOSED, CLOSED_OPEN, GeneralSlice
from ._parse import parse
INTERVAL_LOOKUP = {(True, True): OPEN_OPEN,
(False, False): CLOSED_CLOSED,
(True, False): OPEN_CLOSED,
(False, True): CLOSED_OPEN
}
class DateRange(GeneralSlice):
"""
Represents a bounded datetime range.
Ranges may be bounded on either end if a date is
specified for the start or end of the range, or unbounded
if None is specified for either value. Unbounded ranges will allow
all available data to pass through when used as a filter argument
on function or method.
===== ==== ============================ ===============================
start end interval Meaning
----- ---- ---------------------------- -------------------------------
None None any date
a None CLOSED_CLOSED or CLOSED_OPEN date >= a
a None OPEN_CLOSED or OPEN_OPEN date > a
None b CLOSED_CLOSED or OPEN_CLOSED date <= b
None b CLOSED_OPEN or OPEN_OPEN date < b
a b CLOSED_CLOSED date >= a and date <= b
a b OPEN_CLOSED date > a and date <= b
a b CLOSED_OPEN date >= a and date < b
a b OPEN_OPEN date > a and date < b
===== ==== ============================ ===============================
Parameters
----------
start : `int`, `str` or `datetime.datetime`
lower bound date value as an integer, string or datetime object.
end : `int`, `str` or `datetime.datetime`
upper bound date value as an integer, string or datetime object.
interval : `int`
CLOSED_CLOSED, OPEN_CLOSED, CLOSED_OPEN or OPEN_OPEN.
**Default is CLOSED_CLOSED**.
"""
def __init__(self, start=None, end=None, interval=CLOSED_CLOSED):
def _is_dt_type(x):
return isinstance(x, (datetime.datetime, datetime.date))
def _compute_bound(value, desc):
if isinstance(value, bytes):
return parse(value.decode('ascii'))
elif isinstance(value, (int, string_types)):
return parse(str(value))
elif _is_dt_type(value):
return value
elif value is None:
return None
else:
raise TypeError('unsupported type for %s: %s' % (desc, type(value)))
super(DateRange, self).__init__(_compute_bound(start, "start"), _compute_bound(end, "end"), 1, interval)
if _is_dt_type(self.start) and _is_dt_type(self.end):
if self.start > self.end:
raise ValueError('start date (%s) cannot be greater than end date (%s)!'
% (self.start, self.end))
@property
def unbounded(self):
"""True if range is unbounded on either or both ends, False otherwise."""
return self.start is None or self.end is None
def intersection(self, other):
"""
Create a new DateRange representing the maximal range enclosed by this range and other
"""
startopen = other.startopen if self.start is None \
else self.startopen if other.start is None \
else other.startopen if self.start < other.start \
else self.startopen if self.start > other.start \
else (self.startopen or other.startopen)
endopen = other.endopen if self.end is None \
else self.endopen if other.end is None \
else other.endopen if self.end > other.end \
else self.endopen if self.end < other.end \
else (self.endopen or other.endopen)
new_start = self.start if other.start is None \
else other.start if self.start is None \
else max(self.start, other.start)
new_end = self.end if other.end is None \
else other.end if self.end is None \
else min(self.end, other.end)
interval = INTERVAL_LOOKUP[(startopen, endopen)]
return DateRange(new_start, new_end, interval)
def as_dates(self):
"""
Create a new DateRange with the datetimes converted to dates and changing to CLOSED/CLOSED.
"""
new_start = self.start.date() if self.start and isinstance(self.start, datetime.datetime) else self.start
new_end = self.end.date() if self.end and isinstance(self.end, datetime.datetime) else self.end
return DateRange(new_start, new_end, CLOSED_CLOSED)
def mongo_query(self):
"""
Convert a DateRange into a MongoDb query string. FIXME: Mongo can only handle
datetimes in queries, so we should make this handle the case where start/end are
datetime.date and extend accordingly (being careful about the interval logic).
"""
comps = {OPEN_CLOSED: ('t', 'te'), OPEN_OPEN: ('t', 't'),
CLOSED_OPEN: ('te', 't'), CLOSED_CLOSED: ('te', 'te')}
query = {}
comp = comps[self.interval]
if self.start:
query['$g' + comp[0]] = self.start
if self.end:
query['$l' + comp[1]] = self.end
return query
def get_date_bounds(self):
"""
Return the upper and lower bounds along
with operators that are needed to do an 'in range' test.
Useful for SQL commands.
Returns
-------
tuple: (`str`, `date`, `str`, `date`)
(date_gt, start, date_lt, end)
e.g.:
('>=', start_date, '<', end_date)
"""
start = end = None
date_gt = '>='
date_lt = '<='
if self:
if self.start:
start = self.start
if self.end:
end = self.end
if self.startopen:
date_gt = '>'
if self.endopen:
date_lt = '<'
return date_gt, start, date_lt, end
def __contains__(self, d):
if self.interval == CLOSED_CLOSED:
return (self.start is None or d >= self.start) and (self.end is None or d <= self.end)
elif self.interval == CLOSED_OPEN:
return (self.start is None or d >= self.start) and (self.end is None or d < self.end)
elif self.interval == OPEN_CLOSED:
return (self.start is None or d > self.start) and (self.end is None or d <= self.end)
return (self.start is None or d > self.start) and (self.end is None or d < self.end)
def __repr__(self):
return 'DateRange(start=%r, end=%r)' % (self.start, self.end)
def __eq__(self, rhs):
if rhs is None or not (hasattr(rhs, "end") and hasattr(rhs, "start")):
return False
return self.end == rhs.end and self.start == rhs.start
def __lt__(self, other):
if self.start is None:
return True
if other.start is None:
return False
return self.start < other.start
def __hash__(self):
return hash((self.start, self.end, self.step, self.interval))
def __getitem__(self, key):
if key == 0:
return self.start
elif key == 1:
return self.end
else:
raise IndexError('Index %s not in range (0:1)' % key)
def __str__(self):
return "%s%s, %s%s" % (
"(" if self.startopen else "[",
self.start,
self.end,
")" if self.endopen else "]",
)
def __setstate__(self, state):
"""Called by pickle, PyYAML etc to set state."""
self.start = state['start']
self.end = state['end']
self.interval = state.get('interval') or CLOSED_CLOSED
self.step = 1
|
from . import html5
from .examples import examples
from lark import Lark
from lark.tree import Tree
class App(html5.Div):
def __init__(self):
super().__init__("""
<h1>
<img src="lark-logo.png"> IDE
</h1>
<main>
<menu>
<select [name]="examples">
<option disabled selected>Examples</option>
</select>
<select [name]="parser">
<option value="earley" selected>Earley (default)</option>
<option value="lalr">LALR</option>
<option value="cyk">CYK</option>
</select>
</menu>
<div id="inputs">
<div>
<div>Grammar:</div>
<textarea [name]="grammar" id="grammar" placeholder="Lark Grammar..."></textarea>
</div>
<div>
<div>Input:</div>
<textarea [name]="input" id="input" placeholder="Parser input..."></textarea>
</div>
</div>
<div id="result">
<ul [name]="ast" />
</div>
</main>
""")
self.sinkEvent("onKeyUp", "onChange")
self.parser = "earley"
# Pre-load examples
for name, (grammar, input) in examples.items():
option = html5.Option(name)
option.grammar = grammar
option.input = input
self.examples.appendChild(option)
def onChange(self, e):
if html5.utils.doesEventHitWidgetOrChildren(e, self.examples):
example = self.examples.children(self.examples["selectedIndex"])
self.grammar["value"] = example.grammar.strip()
self.input["value"] = example.input.strip()
self.onKeyUp()
elif html5.utils.doesEventHitWidgetOrChildren(e, self.parser):
self.parser = self.parser.children(self.parser["selectedIndex"])["value"]
self.onKeyUp()
def onKeyUp(self, e=None):
l = Lark(self.grammar["value"], parser=self.parser)
try:
ast = l.parse(self.input["value"])
except Exception as e:
self.ast.appendChild(
html5.Li(str(e)), replace=True
)
print(ast)
traverse = lambda node: html5.Li([node.data, html5.Ul([traverse(c) for c in node.children])] if isinstance(node, Tree) else node)
self.ast.appendChild(traverse(ast), replace=True)
def start():
html5.Body().appendChild(
App()
)
|
from __future__ import annotations
import logging
from typing import Any, Dict, Optional
from sentry_sdk.utils import BadDsn, Dsn
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.core import callback
from .const import ( # pylint: disable=unused-import
CONF_DSN,
CONF_ENVIRONMENT,
CONF_EVENT_CUSTOM_COMPONENTS,
CONF_EVENT_HANDLED,
CONF_EVENT_THIRD_PARTY_PACKAGES,
CONF_LOGGING_EVENT_LEVEL,
CONF_LOGGING_LEVEL,
CONF_TRACING,
CONF_TRACING_SAMPLE_RATE,
DEFAULT_LOGGING_EVENT_LEVEL,
DEFAULT_LOGGING_LEVEL,
DEFAULT_TRACING_SAMPLE_RATE,
DOMAIN,
LOGGING_LEVELS,
)
_LOGGER = logging.getLogger(__name__)
DATA_SCHEMA = vol.Schema({vol.Required(CONF_DSN): str})
class SentryConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a Sentry config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
@staticmethod
@callback
def async_get_options_flow(
config_entry: config_entries.ConfigEntry,
) -> SentryOptionsFlow:
"""Get the options flow for this handler."""
return SentryOptionsFlow(config_entry)
async def async_step_user(
self, user_input: Optional[Dict[str, Any]] = None
) -> Dict[str, Any]:
"""Handle a user config flow."""
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
errors = {}
if user_input is not None:
try:
Dsn(user_input["dsn"])
except BadDsn:
errors["base"] = "bad_dsn"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self.async_create_entry(title="Sentry", data=user_input)
return self.async_show_form(
step_id="user", data_schema=DATA_SCHEMA, errors=errors
)
class SentryOptionsFlow(config_entries.OptionsFlow):
"""Handle Sentry options."""
def __init__(self, config_entry: config_entries.ConfigEntry):
"""Initialize Sentry options flow."""
self.config_entry = config_entry
async def async_step_init(
self, user_input: Optional[Dict[str, Any]] = None
) -> Dict[str, Any]:
"""Manage Sentry options."""
if user_input is not None:
return self.async_create_entry(title="", data=user_input)
return self.async_show_form(
step_id="init",
data_schema=vol.Schema(
{
vol.Optional(
CONF_LOGGING_EVENT_LEVEL,
default=self.config_entry.options.get(
CONF_LOGGING_EVENT_LEVEL, DEFAULT_LOGGING_EVENT_LEVEL
),
): vol.In(LOGGING_LEVELS),
vol.Optional(
CONF_LOGGING_LEVEL,
default=self.config_entry.options.get(
CONF_LOGGING_LEVEL, DEFAULT_LOGGING_LEVEL
),
): vol.In(LOGGING_LEVELS),
vol.Optional(
CONF_ENVIRONMENT,
default=self.config_entry.options.get(CONF_ENVIRONMENT),
): str,
vol.Optional(
CONF_EVENT_HANDLED,
default=self.config_entry.options.get(
CONF_EVENT_HANDLED, False
),
): bool,
vol.Optional(
CONF_EVENT_CUSTOM_COMPONENTS,
default=self.config_entry.options.get(
CONF_EVENT_CUSTOM_COMPONENTS, False
),
): bool,
vol.Optional(
CONF_EVENT_THIRD_PARTY_PACKAGES,
default=self.config_entry.options.get(
CONF_EVENT_THIRD_PARTY_PACKAGES, False
),
): bool,
vol.Optional(
CONF_TRACING,
default=self.config_entry.options.get(CONF_TRACING, False),
): bool,
vol.Optional(
CONF_TRACING_SAMPLE_RATE,
default=self.config_entry.options.get(
CONF_TRACING_SAMPLE_RATE, DEFAULT_TRACING_SAMPLE_RATE
),
): vol.All(vol.Coerce(float), vol.Range(min=0.0, max=1.0)),
}
),
)
|
from typing import Any
from homeassistant.components.scene import Scene
from . import LUTRON_CONTROLLER, LUTRON_DEVICES, LutronDevice
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Lutron scenes."""
devs = []
for scene_data in hass.data[LUTRON_DEVICES]["scene"]:
(area_name, keypad_name, device, led) = scene_data
dev = LutronScene(
area_name, keypad_name, device, led, hass.data[LUTRON_CONTROLLER]
)
devs.append(dev)
add_entities(devs, True)
class LutronScene(LutronDevice, Scene):
"""Representation of a Lutron Scene."""
def __init__(self, area_name, keypad_name, lutron_device, lutron_led, controller):
"""Initialize the scene/button."""
super().__init__(area_name, lutron_device, controller)
self._keypad_name = keypad_name
self._led = lutron_led
def activate(self, **kwargs: Any) -> None:
"""Activate the scene."""
self._lutron_device.press()
@property
def name(self):
"""Return the name of the device."""
return f"{self._area_name} {self._keypad_name}: {self._lutron_device.name}"
|
import numpy as np
from .fixes import rfft, irfft
from .utils import (sizeof_fmt, logger, get_config, warn, _explain_exception,
verbose)
_cuda_capable = False
def get_cuda_memory(kind='available'):
"""Get the amount of free memory for CUDA operations.
Parameters
----------
kind : str
Can be "available" or "total".
Returns
-------
memory : str
The amount of available or total memory as a human-readable string.
"""
if not _cuda_capable:
warn('CUDA not enabled, returning zero for memory')
mem = 0
else:
import cupy
mem = cupy.cuda.runtime.memGetInfo()[dict(available=0, total=1)[kind]]
return sizeof_fmt(mem)
@verbose
def init_cuda(ignore_config=False, verbose=None):
"""Initialize CUDA functionality.
This function attempts to load the necessary interfaces
(hardware connectivity) to run CUDA-based filtering. This
function should only need to be run once per session.
If the config var (set via mne.set_config or in ENV)
MNE_USE_CUDA == 'true', this function will be executed when
the first CUDA setup is performed. If this variable is not
set, this function can be manually executed.
Parameters
----------
ignore_config : bool
If True, ignore the config value MNE_USE_CUDA and force init.
%(verbose)s
"""
global _cuda_capable
if _cuda_capable:
return
if not ignore_config and (get_config('MNE_USE_CUDA', 'false').lower() !=
'true'):
logger.info('CUDA not enabled in config, skipping initialization')
return
# Triage possible errors for informative messaging
_cuda_capable = False
try:
import cupy # noqa
except ImportError:
warn('module cupy not found, CUDA not enabled')
return
device_id = int(get_config('MNE_CUDA_DEVICE', '0'))
try:
# Initialize CUDA
_set_cuda_device(device_id, verbose)
except Exception:
warn('so CUDA device could be initialized, likely a hardware error, '
'CUDA not enabled%s' % _explain_exception())
return
_cuda_capable = True
# Figure out limit for CUDA FFT calculations
logger.info('Enabling CUDA with %s available memory' % get_cuda_memory())
@verbose
def set_cuda_device(device_id, verbose=None):
"""Set the CUDA device temporarily for the current session.
Parameters
----------
device_id : int
Numeric ID of the CUDA-capable device you want MNE-Python to use.
%(verbose)s
"""
if _cuda_capable:
_set_cuda_device(device_id, verbose)
elif get_config('MNE_USE_CUDA', 'false').lower() == 'true':
init_cuda()
_set_cuda_device(device_id, verbose)
else:
warn('Could not set CUDA device because CUDA is not enabled; either '
'run mne.cuda.init_cuda() first, or set the MNE_USE_CUDA config '
'variable to "true".')
@verbose
def _set_cuda_device(device_id, verbose=None):
"""Set the CUDA device."""
import cupy
cupy.cuda.Device(device_id).use()
logger.info('Now using CUDA device {}'.format(device_id))
###############################################################################
# Repeated FFT multiplication
def _setup_cuda_fft_multiply_repeated(n_jobs, h, n_fft,
kind='FFT FIR filtering'):
"""Set up repeated CUDA FFT multiplication with a given filter.
Parameters
----------
n_jobs : int | str
If n_jobs == 'cuda', the function will attempt to set up for CUDA
FFT multiplication.
h : array
The filtering function that will be used repeatedly.
n_fft : int
The number of points in the FFT.
kind : str
The kind to report to the user.
Returns
-------
n_jobs : int
Sets n_jobs = 1 if n_jobs == 'cuda' was passed in, otherwise
original n_jobs is passed.
cuda_dict : dict
Dictionary with the following CUDA-related variables:
use_cuda : bool
Whether CUDA should be used.
fft_plan : instance of FFTPlan
FFT plan to use in calculating the FFT.
ifft_plan : instance of FFTPlan
FFT plan to use in calculating the IFFT.
x_fft : instance of gpuarray
Empty allocated GPU space for storing the result of the
frequency-domain multiplication.
x : instance of gpuarray
Empty allocated GPU space for the data to filter.
h_fft : array | instance of gpuarray
This will either be a gpuarray (if CUDA enabled) or ndarray.
Notes
-----
This function is designed to be used with fft_multiply_repeated().
"""
cuda_dict = dict(n_fft=n_fft, rfft=rfft, irfft=irfft,
h_fft=rfft(h, n=n_fft))
if n_jobs == 'cuda':
n_jobs = 1
init_cuda()
if _cuda_capable:
import cupy
try:
# do the IFFT normalization now so we don't have to later
h_fft = cupy.array(cuda_dict['h_fft'])
logger.info('Using CUDA for %s' % kind)
except Exception as exp:
logger.info('CUDA not used, could not instantiate memory '
'(arrays may be too large: "%s"), falling back to '
'n_jobs=1' % str(exp))
cuda_dict.update(h_fft=h_fft,
rfft=_cuda_upload_rfft,
irfft=_cuda_irfft_get)
else:
logger.info('CUDA not used, CUDA could not be initialized, '
'falling back to n_jobs=1')
return n_jobs, cuda_dict
def _fft_multiply_repeated(x, cuda_dict):
"""Do FFT multiplication by a filter function (possibly using CUDA).
Parameters
----------
h_fft : 1-d array or gpuarray
The filtering array to apply.
x : 1-d array
The array to filter.
n_fft : int
The number of points in the FFT.
cuda_dict : dict
Dictionary constructed using setup_cuda_multiply_repeated().
Returns
-------
x : 1-d array
Filtered version of x.
"""
# do the fourier-domain operations
x_fft = cuda_dict['rfft'](x, cuda_dict['n_fft'])
x_fft *= cuda_dict['h_fft']
x = cuda_dict['irfft'](x_fft, cuda_dict['n_fft'])
return x
###############################################################################
# FFT Resampling
def _setup_cuda_fft_resample(n_jobs, W, new_len):
"""Set up CUDA FFT resampling.
Parameters
----------
n_jobs : int | str
If n_jobs == 'cuda', the function will attempt to set up for CUDA
FFT resampling.
W : array
The filtering function to be used during resampling.
If n_jobs='cuda', this function will be shortened (since CUDA
assumes FFTs of real signals are half the length of the signal)
and turned into a gpuarray.
new_len : int
The size of the array following resampling.
Returns
-------
n_jobs : int
Sets n_jobs = 1 if n_jobs == 'cuda' was passed in, otherwise
original n_jobs is passed.
cuda_dict : dict
Dictionary with the following CUDA-related variables:
use_cuda : bool
Whether CUDA should be used.
fft_plan : instance of FFTPlan
FFT plan to use in calculating the FFT.
ifft_plan : instance of FFTPlan
FFT plan to use in calculating the IFFT.
x_fft : instance of gpuarray
Empty allocated GPU space for storing the result of the
frequency-domain multiplication.
x : instance of gpuarray
Empty allocated GPU space for the data to resample.
Notes
-----
This function is designed to be used with fft_resample().
"""
cuda_dict = dict(use_cuda=False, rfft=rfft, irfft=irfft)
rfft_len_x = len(W) // 2 + 1
# fold the window onto inself (should be symmetric) and truncate
W = W.copy()
W[1:rfft_len_x] = (W[1:rfft_len_x] + W[::-1][:rfft_len_x - 1]) / 2.
W = W[:rfft_len_x]
if n_jobs == 'cuda':
n_jobs = 1
init_cuda()
if _cuda_capable:
try:
import cupy
# do the IFFT normalization now so we don't have to later
W = cupy.array(W)
logger.info('Using CUDA for FFT resampling')
except Exception:
logger.info('CUDA not used, could not instantiate memory '
'(arrays may be too large), falling back to '
'n_jobs=1')
else:
cuda_dict.update(use_cuda=True,
rfft=_cuda_upload_rfft,
irfft=_cuda_irfft_get)
else:
logger.info('CUDA not used, CUDA could not be initialized, '
'falling back to n_jobs=1')
cuda_dict['W'] = W
return n_jobs, cuda_dict
def _cuda_upload_rfft(x, n, axis=-1):
"""Upload and compute rfft."""
import cupy
return cupy.fft.rfft(cupy.array(x), n=n, axis=axis)
def _cuda_irfft_get(x, n, axis=-1):
"""Compute irfft and get."""
import cupy
return cupy.fft.irfft(x, n=n, axis=axis).get()
def _fft_resample(x, new_len, npads, to_removes, cuda_dict=None,
pad='reflect_limited'):
"""Do FFT resampling with a filter function (possibly using CUDA).
Parameters
----------
x : 1-d array
The array to resample. Will be converted to float64 if necessary.
new_len : int
The size of the output array (before removing padding).
npads : tuple of int
Amount of padding to apply to the start and end of the
signal before resampling.
to_removes : tuple of int
Number of samples to remove after resampling.
cuda_dict : dict
Dictionary constructed using setup_cuda_multiply_repeated().
pad : str
The type of padding to use. Supports all :func:`np.pad` ``mode``
options. Can also be "reflect_limited" (default), which pads with a
reflected version of each vector mirrored on the first and last values
of the vector, followed by zeros.
.. versionadded:: 0.15
Returns
-------
x : 1-d array
Filtered version of x.
"""
cuda_dict = dict(use_cuda=False) if cuda_dict is None else cuda_dict
# add some padding at beginning and end to make this work a little cleaner
if x.dtype != np.float64:
x = x.astype(np.float64)
x = _smart_pad(x, npads, pad)
old_len = len(x)
shorter = new_len < old_len
use_len = new_len if shorter else old_len
x_fft = cuda_dict['rfft'](x, None)
if use_len % 2 == 0:
nyq = use_len // 2
x_fft[nyq:nyq + 1] *= 2 if shorter else 0.5
x_fft *= cuda_dict['W']
y = cuda_dict['irfft'](x_fft, new_len)
# now let's trim it back to the correct size (if there was padding)
if (to_removes > 0).any():
y = y[to_removes[0]:y.shape[0] - to_removes[1]]
return y
###############################################################################
# Misc
# this has to go in mne.cuda instead of mne.filter to avoid import errors
def _smart_pad(x, n_pad, pad='reflect_limited'):
"""Pad vector x."""
n_pad = np.asarray(n_pad)
assert n_pad.shape == (2,)
if (n_pad == 0).all():
return x
elif (n_pad < 0).any():
raise RuntimeError('n_pad must be non-negative')
if pad == 'reflect_limited':
# need to pad with zeros if len(x) <= npad
l_z_pad = np.zeros(max(n_pad[0] - len(x) + 1, 0), dtype=x.dtype)
r_z_pad = np.zeros(max(n_pad[1] - len(x) + 1, 0), dtype=x.dtype)
return np.concatenate([l_z_pad, 2 * x[0] - x[n_pad[0]:0:-1], x,
2 * x[-1] - x[-2:-n_pad[1] - 2:-1], r_z_pad])
else:
return np.pad(x, (tuple(n_pad),), pad)
|
from collections import deque
import random
import functools
import itertools
import logging
from collections import defaultdict
logger = logging.getLogger(__name__)
def blockedSample(sampler, sample_size, predicates, *args):
blocked_sample = set()
remaining_sample = sample_size - len(blocked_sample)
previous_sample_size = 0
while remaining_sample and predicates:
random.shuffle(predicates)
new_sample = list(sampler(remaining_sample,
predicates,
*args))
filtered_sample = (subsample for subsample
in new_sample if subsample)
blocked_sample.update(itertools.chain.from_iterable(filtered_sample))
growth = len(blocked_sample) - previous_sample_size
growth_rate = growth / remaining_sample
remaining_sample = sample_size - len(blocked_sample)
previous_sample_size = len(blocked_sample)
if growth_rate < 0.001:
logging.debug("%s blocked samples were requested, "
"but only able to sample %s"
% (sample_size, len(blocked_sample)))
break
predicates = [pred for pred, pred_sample
in zip(predicates, new_sample)
if pred_sample or pred_sample is None]
return blocked_sample
def dedupeSamplePredicates(sample_size, predicates, items):
n_items = len(items)
for subsample_size, predicate in subsample(sample_size, predicates):
if not subsample_size:
yield None
continue
items.rotate(random.randrange(n_items))
items.reverse()
yield dedupeSamplePredicate(subsample_size,
predicate,
items)
def dedupeSamplePredicate(subsample_size, predicate, items):
sample = []
block_dict = {}
predicate_function = predicate.func
field = predicate.field
for pivot, (index, record) in enumerate(items):
column = record[field]
if not column:
continue
if pivot == 10000:
if len(block_dict) + len(sample) < 10:
return sample
block_keys = predicate_function(column)
for block_key in block_keys:
if block_key not in block_dict:
block_dict[block_key] = index
else:
pair = sort_pair(block_dict.pop(block_key), index)
sample.append(pair)
subsample_size -= 1
if subsample_size:
break
else:
return sample
else:
return sample
def linkSamplePredicates(sample_size, predicates, items1, items2):
n_1 = len(items1)
n_2 = len(items2)
for subsample_size, predicate in subsample(sample_size, predicates):
if not subsample_size:
yield None
continue
try:
items1.rotate(random.randrange(n_1))
items2.rotate(random.randrange(n_2))
except ValueError:
raise ValueError("Empty itemset.")
try:
items1.reverse()
items2.reverse()
except AttributeError:
items1 = deque(reversed(items1))
items2 = deque(reversed(items2))
yield linkSamplePredicate(subsample_size, predicate, items1, items2)
def linkSamplePredicate(subsample_size, predicate, items1, items2):
sample = []
predicate_function = predicate.func
field = predicate.field
red = defaultdict(list)
blue = defaultdict(list)
for i, (index, record) in enumerate(interleave(items1, items2)):
if i == 20000:
if min(len(red), len(blue)) + len(sample) < 10:
return sample
column = record[field]
if not column:
red, blue = blue, red
continue
block_keys = predicate_function(column)
for block_key in block_keys:
if blue.get(block_key):
pair = sort_pair(blue[block_key].pop(), index)
sample.append(pair)
subsample_size -= 1
if subsample_size:
break
else:
return sample
else:
red[block_key].append(index)
red, blue = blue, red
for index, record in itertools.islice(items2, len(items1)):
column = record[field]
if not column:
continue
block_keys = predicate_function(column)
for block_key in block_keys:
if red.get(block_key):
pair = sort_pair(red[block_key].pop(), index)
sample.append(pair)
subsample_size -= 1
if subsample_size:
break
else:
return sample
return sample
def evenSplits(total_size, num_splits):
avg = total_size / num_splits
split = 0
for _ in range(num_splits):
split += avg - int(split)
yield int(split)
def subsample(total_size, predicates):
splits = evenSplits(total_size, len(predicates))
for split, predicate in zip(splits, predicates):
yield split, predicate
def interleave(*iterables):
return itertools.chain.from_iterable(zip(*iterables))
def sort_pair(a, b):
if a > b:
return (b, a)
else:
return (a, b)
def randomDeque(data):
data_q = deque(random.sample(data.items(), len(data)))
return data_q
dedupeBlockedSample = functools.partial(blockedSample, dedupeSamplePredicates)
linkBlockedSample = functools.partial(blockedSample, linkSamplePredicates)
|
from collections import OrderedDict
import datetime
import time
from typing import MutableMapping, Optional, cast
import attr
from homeassistant.core import callback
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.loader import bind_hass
from .typing import ZhaDeviceType
DATA_REGISTRY = "zha_storage"
STORAGE_KEY = "zha.storage"
STORAGE_VERSION = 1
SAVE_DELAY = 10
TOMBSTONE_LIFETIME = datetime.timedelta(days=60).total_seconds()
@attr.s(slots=True, frozen=True)
class ZhaDeviceEntry:
"""Zha Device storage Entry."""
name: Optional[str] = attr.ib(default=None)
ieee: Optional[str] = attr.ib(default=None)
last_seen: Optional[float] = attr.ib(default=None)
class ZhaStorage:
"""Class to hold a registry of zha devices."""
def __init__(self, hass: HomeAssistantType) -> None:
"""Initialize the zha device storage."""
self.hass: HomeAssistantType = hass
self.devices: MutableMapping[str, ZhaDeviceEntry] = {}
self._store = hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY)
@callback
def async_create_device(self, device: ZhaDeviceType) -> ZhaDeviceEntry:
"""Create a new ZhaDeviceEntry."""
device_entry: ZhaDeviceEntry = ZhaDeviceEntry(
name=device.name, ieee=str(device.ieee), last_seen=device.last_seen
)
self.devices[device_entry.ieee] = device_entry
self.async_schedule_save()
return device_entry
@callback
def async_get_or_create_device(self, device: ZhaDeviceType) -> ZhaDeviceEntry:
"""Create a new ZhaDeviceEntry."""
ieee_str: str = str(device.ieee)
if ieee_str in self.devices:
return self.devices[ieee_str]
return self.async_create_device(device)
@callback
def async_create_or_update_device(self, device: ZhaDeviceType) -> ZhaDeviceEntry:
"""Create or update a ZhaDeviceEntry."""
if str(device.ieee) in self.devices:
return self.async_update_device(device)
return self.async_create_device(device)
@callback
def async_delete_device(self, device: ZhaDeviceType) -> None:
"""Delete ZhaDeviceEntry."""
ieee_str: str = str(device.ieee)
if ieee_str in self.devices:
del self.devices[ieee_str]
self.async_schedule_save()
@callback
def async_update_device(self, device: ZhaDeviceType) -> ZhaDeviceEntry:
"""Update name of ZhaDeviceEntry."""
ieee_str: str = str(device.ieee)
old = self.devices[ieee_str]
if old is not None and device.last_seen is None:
return
changes = {}
changes["last_seen"] = device.last_seen
new = self.devices[ieee_str] = attr.evolve(old, **changes)
self.async_schedule_save()
return new
async def async_load(self) -> None:
"""Load the registry of zha device entries."""
data = await self._store.async_load()
devices: "OrderedDict[str, ZhaDeviceEntry]" = OrderedDict()
if data is not None:
for device in data["devices"]:
devices[device["ieee"]] = ZhaDeviceEntry(
name=device["name"],
ieee=device["ieee"],
last_seen=device.get("last_seen"),
)
self.devices = devices
@callback
def async_schedule_save(self) -> None:
"""Schedule saving the registry of zha devices."""
self._store.async_delay_save(self._data_to_save, SAVE_DELAY)
async def async_save(self) -> None:
"""Save the registry of zha devices."""
await self._store.async_save(self._data_to_save())
@callback
def _data_to_save(self) -> dict:
"""Return data for the registry of zha devices to store in a file."""
data = {}
data["devices"] = [
{"name": entry.name, "ieee": entry.ieee, "last_seen": entry.last_seen}
for entry in self.devices.values()
if entry.last_seen and (time.time() - entry.last_seen) < TOMBSTONE_LIFETIME
]
return data
@bind_hass
async def async_get_registry(hass: HomeAssistantType) -> ZhaStorage:
"""Return zha device storage instance."""
task = hass.data.get(DATA_REGISTRY)
if task is None:
async def _load_reg() -> ZhaStorage:
registry = ZhaStorage(hass)
await registry.async_load()
return registry
task = hass.data[DATA_REGISTRY] = hass.async_create_task(_load_reg())
return cast(ZhaStorage, await task)
|
import voluptuous as vol
from homeassistant.const import (
CONF_API_KEY,
CONF_DEVICE_ID,
CONF_DEVICES,
CONF_NAME,
CONF_SCAN_INTERVAL,
CONF_TYPE,
)
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.discovery import async_load_platform
from homeassistant.helpers.event import async_track_time_interval
from .api_data import KaiterraApiData
from .const import (
AVAILABLE_AQI_STANDARDS,
AVAILABLE_DEVICE_TYPES,
AVAILABLE_UNITS,
CONF_AQI_STANDARD,
CONF_PREFERRED_UNITS,
DEFAULT_AQI_STANDARD,
DEFAULT_PREFERRED_UNIT,
DEFAULT_SCAN_INTERVAL,
DOMAIN,
KAITERRA_COMPONENTS,
)
KAITERRA_DEVICE_SCHEMA = vol.Schema(
{
vol.Required(CONF_DEVICE_ID): cv.string,
vol.Required(CONF_TYPE): vol.In(AVAILABLE_DEVICE_TYPES),
vol.Optional(CONF_NAME): cv.string,
}
)
KAITERRA_SCHEMA = vol.Schema(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_DEVICES): vol.All(cv.ensure_list, [KAITERRA_DEVICE_SCHEMA]),
vol.Optional(CONF_AQI_STANDARD, default=DEFAULT_AQI_STANDARD): vol.In(
AVAILABLE_AQI_STANDARDS
),
vol.Optional(CONF_PREFERRED_UNITS, default=DEFAULT_PREFERRED_UNIT): vol.All(
cv.ensure_list, [vol.In(AVAILABLE_UNITS)]
),
vol.Optional(CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL): cv.time_period,
}
)
CONFIG_SCHEMA = vol.Schema({DOMAIN: KAITERRA_SCHEMA}, extra=vol.ALLOW_EXTRA)
async def async_setup(hass, config):
"""Set up the Kaiterra components."""
conf = config[DOMAIN]
scan_interval = conf[CONF_SCAN_INTERVAL]
devices = conf[CONF_DEVICES]
session = async_get_clientsession(hass)
api = hass.data[DOMAIN] = KaiterraApiData(hass, conf, session)
await api.async_update()
async def _update(now=None):
"""Periodic update."""
await api.async_update()
async_track_time_interval(hass, _update, scan_interval)
# Load platforms for each device
for device in devices:
device_name, device_id = (
device.get(CONF_NAME) or device[CONF_TYPE],
device[CONF_DEVICE_ID],
)
for component in KAITERRA_COMPONENTS:
hass.async_create_task(
async_load_platform(
hass,
component,
DOMAIN,
{CONF_NAME: device_name, CONF_DEVICE_ID: device_id},
config,
)
)
return True
|
from __future__ import division
from datetime import datetime, timedelta
import logging
import os
from guessit import guessit
from rebulk.loose import ensure_list
from subliminal.utils import matches_title
logger = logging.getLogger(__name__)
#: Video extensions
VIDEO_EXTENSIONS = ('.3g2', '.3gp', '.3gp2', '.3gpp', '.60d', '.ajp', '.asf', '.asx', '.avchd', '.avi', '.bik',
'.bix', '.box', '.cam', '.dat', '.divx', '.dmf', '.dv', '.dvr-ms', '.evo', '.flc', '.fli',
'.flic', '.flv', '.flx', '.gvi', '.gvp', '.h264', '.m1v', '.m2p', '.m2ts', '.m2v', '.m4e',
'.m4v', '.mjp', '.mjpeg', '.mjpg', '.mk3d', '.mkv', '.moov', '.mov', '.movhd', '.movie', '.movx',
'.mp4', '.mpe', '.mpeg', '.mpg', '.mpv', '.mpv2', '.mxf', '.nsv', '.nut', '.ogg', '.ogm', '.ogv',
'.omf', '.ps', '.qt', '.ram', '.rm', '.rmvb', '.swf', '.ts', '.vfw', '.vid', '.video', '.viv',
'.vivo', '.vob', '.vro', '.webm', '.wm', '.wmv', '.wmx', '.wrap', '.wvx', '.wx', '.x264', '.xvid')
class Video(object):
"""Base class for videos.
Represent a video, existing or not.
:param str name: name or path of the video.
:param str source: source of the video (HDTV, Web, Blu-ray, ...).
:param str release_group: release group of the video.
:param str streaming_service: streaming_service of the video.
:param str resolution: resolution of the video stream (480p, 720p, 1080p or 1080i).
:param str video_codec: codec of the video stream.
:param str audio_codec: codec of the main audio stream.
:param str imdb_id: IMDb id of the video.
:param dict hashes: hashes of the video file by provider names.
:param int size: size of the video file in bytes.
:param set subtitle_languages: existing subtitle languages.
"""
def __init__(self, name, source=None, release_group=None, resolution=None, streaming_service=None,
video_codec=None, audio_codec=None, imdb_id=None, hashes=None, size=None, subtitle_languages=None):
#: Name or path of the video
self.name = name
#: Source of the video (HDTV, Web, Blu-ray, ...)
self.source = source
#: Release group of the video
self.release_group = release_group
#: Streaming service of the video
self.streaming_service = streaming_service
#: Resolution of the video stream (480p, 720p, 1080p or 1080i)
self.resolution = resolution
#: Codec of the video stream
self.video_codec = video_codec
#: Codec of the main audio stream
self.audio_codec = audio_codec
#: IMDb id of the video
self.imdb_id = imdb_id
#: Hashes of the video file by provider names
self.hashes = hashes or {}
#: Size of the video file in bytes
self.size = size
#: Existing subtitle languages
self.subtitle_languages = subtitle_languages or set()
@property
def exists(self):
"""Test whether the video exists"""
return os.path.exists(self.name)
@property
def age(self):
"""Age of the video"""
if self.exists:
return datetime.utcnow() - datetime.utcfromtimestamp(os.path.getmtime(self.name))
return timedelta()
@classmethod
def fromguess(cls, name, guess):
"""Create an :class:`Episode` or a :class:`Movie` with the given `name` based on the `guess`.
:param str name: name of the video.
:param dict guess: guessed data.
:raise: :class:`ValueError` if the `type` of the `guess` is invalid
"""
if guess['type'] == 'episode':
return Episode.fromguess(name, guess)
if guess['type'] == 'movie':
return Movie.fromguess(name, guess)
raise ValueError('The guess must be an episode or a movie guess')
@classmethod
def fromname(cls, name):
"""Shortcut for :meth:`fromguess` with a `guess` guessed from the `name`.
:param str name: name of the video.
"""
return cls.fromguess(name, guessit(name))
def __repr__(self):
return '<%s [%r]>' % (self.__class__.__name__, self.name)
def __hash__(self):
return hash(self.name)
class Episode(Video):
"""Episode :class:`Video`.
:param str series: series of the episode.
:param int season: season number of the episode.
:param int or list episodes: episode numbers of the episode.
:param str title: title of the episode.
:param int year: year of the series.
:param country: Country of the series.
:type country: :class:`~babelfish.country.Country`
:param bool original_series: whether the series is the first with this name.
:param int tvdb_id: TVDB id of the episode.
:param list alternative_series: alternative names of the series
:param \*\*kwargs: additional parameters for the :class:`Video` constructor.
"""
def __init__(self, name, series, season, episodes, title=None, year=None, country=None, original_series=True,
tvdb_id=None, series_tvdb_id=None, series_imdb_id=None, alternative_series=None, **kwargs):
super(Episode, self).__init__(name, **kwargs)
#: Series of the episode
self.series = series
#: Season number of the episode
self.season = season
#: Episode numbers of the episode
self.episodes = ensure_list(episodes)
#: Title of the episode
self.title = title
#: Year of series
self.year = year
#: The series is the first with this name
self.original_series = original_series
#: Country of the series
self.country = country
#: TVDB id of the episode
self.tvdb_id = tvdb_id
#: TVDB id of the series
self.series_tvdb_id = series_tvdb_id
#: IMDb id of the series
self.series_imdb_id = series_imdb_id
#: Alternative names of the series
self.alternative_series = alternative_series or []
@property
def episode(self):
return min(self.episodes) if self.episodes else None
def matches(self, series):
return matches_title(series, self.series, self.alternative_series)
@classmethod
def fromguess(cls, name, guess):
if guess['type'] != 'episode':
raise ValueError('The guess must be an episode guess')
if 'title' not in guess or 'episode' not in guess:
raise ValueError('Insufficient data to process the guess')
return cls(name, guess['title'], guess.get('season', 1), guess.get('episode'), title=guess.get('episode_title'),
year=guess.get('year'), country=guess.get('country'),
original_series='year' not in guess and 'country' not in guess,
source=guess.get('source'), alternative_series=ensure_list(guess.get('alternative_title')),
release_group=guess.get('release_group'), streaming_service=guess.get('streaming_service'),
resolution=guess.get('screen_size'),
video_codec=guess.get('video_codec'), audio_codec=guess.get('audio_codec'))
@classmethod
def fromname(cls, name):
return cls.fromguess(name, guessit(name, {'type': 'episode'}))
def __repr__(self):
return '<{cn} [{series}{open}{country}{sep}{year}{close} s{season:02d}e{episodes}]>'.format(
cn=self.__class__.__name__, series=self.series, year=self.year or '', country=self.country or '',
season=self.season, episodes='-'.join(map(lambda v: '{:02d}'.format(v), self.episodes)),
open=' (' if not self.original_series else '',
sep=') (' if self.year and self.country else '',
close=')' if not self.original_series else ''
)
class Movie(Video):
"""Movie :class:`Video`.
:param str title: title of the movie.
:param int year: year of the movie.
:param country: Country of the movie.
:type country: :class:`~babelfish.country.Country`
:param list alternative_titles: alternative titles of the movie
:param \*\*kwargs: additional parameters for the :class:`Video` constructor.
"""
def __init__(self, name, title, year=None, country=None, alternative_titles=None, **kwargs):
super(Movie, self).__init__(name, **kwargs)
#: Title of the movie
self.title = title
#: Year of the movie
self.year = year
#: Country of the movie
self.country = country
#: Alternative titles of the movie
self.alternative_titles = alternative_titles or []
def matches(self, title):
return matches_title(title, self.title, self.alternative_titles)
@classmethod
def fromguess(cls, name, guess):
if guess['type'] != 'movie':
raise ValueError('The guess must be a movie guess')
if 'title' not in guess:
raise ValueError('Insufficient data to process the guess')
return cls(name, guess['title'], source=guess.get('source'), release_group=guess.get('release_group'),
streaming_service=guess.get('streaming_service'),
resolution=guess.get('screen_size'), video_codec=guess.get('video_codec'),
alternative_titles=ensure_list(guess.get('alternative_title')),
audio_codec=guess.get('audio_codec'), year=guess.get('year'), country=guess.get('country'))
@classmethod
def fromname(cls, name):
return cls.fromguess(name, guessit(name, {'type': 'movie'}))
def __repr__(self):
return '<{cn} [{title}{open}{country}{sep}{year}{close}]>'.format(
cn=self.__class__.__name__, title=self.title, year=self.year or '', country=self.country or '',
open=' (' if self.year or self.country else '',
sep=') (' if self.year and self.country else '',
close=')' if self.year or self.country else ''
)
|
import functools
import inspect
import itertools
import logging
import typing as T
import warnings
import pkg_resources
from .cfgrib_ import cfgrib_backend
from .common import BackendEntrypoint
from .h5netcdf_ import h5netcdf_backend
from .netCDF4_ import netcdf4_backend
from .pseudonetcdf_ import pseudonetcdf_backend
from .pydap_ import pydap_backend
from .pynio_ import pynio_backend
from .scipy_ import scipy_backend
from .store import store_backend
from .zarr import zarr_backend
BACKEND_ENTRYPOINTS: T.Dict[str, BackendEntrypoint] = {
"store": store_backend,
"netcdf4": netcdf4_backend,
"h5netcdf": h5netcdf_backend,
"scipy": scipy_backend,
"pseudonetcdf": pseudonetcdf_backend,
"zarr": zarr_backend,
"cfgrib": cfgrib_backend,
"pydap": pydap_backend,
"pynio": pynio_backend,
}
def remove_duplicates(backend_entrypoints):
# sort and group entrypoints by name
backend_entrypoints = sorted(backend_entrypoints, key=lambda ep: ep.name)
backend_entrypoints_grouped = itertools.groupby(
backend_entrypoints, key=lambda ep: ep.name
)
# check if there are multiple entrypoints for the same name
unique_backend_entrypoints = []
for name, matches in backend_entrypoints_grouped:
matches = list(matches)
unique_backend_entrypoints.append(matches[0])
matches_len = len(matches)
if matches_len > 1:
selected_module_name = matches[0].module_name
all_module_names = [e.module_name for e in matches]
warnings.warn(
f"Found {matches_len} entrypoints for the engine name {name}:"
f"\n {all_module_names}.\n It will be used: {selected_module_name}.",
RuntimeWarning,
)
return unique_backend_entrypoints
def detect_parameters(open_dataset):
signature = inspect.signature(open_dataset)
parameters = signature.parameters
for name, param in parameters.items():
if param.kind in (
inspect.Parameter.VAR_KEYWORD,
inspect.Parameter.VAR_POSITIONAL,
):
raise TypeError(
f"All the parameters in {open_dataset!r} signature should be explicit. "
"*args and **kwargs is not supported"
)
return tuple(parameters)
def create_engines_dict(backend_entrypoints):
engines = {}
for backend_ep in backend_entrypoints:
name = backend_ep.name
backend = backend_ep.load()
engines[name] = backend
return engines
def set_missing_parameters(engines):
for name, backend in engines.items():
if backend.open_dataset_parameters is None:
open_dataset = backend.open_dataset
backend.open_dataset_parameters = detect_parameters(open_dataset)
def build_engines(entrypoints):
backend_entrypoints = BACKEND_ENTRYPOINTS.copy()
pkg_entrypoints = remove_duplicates(entrypoints)
external_backend_entrypoints = create_engines_dict(pkg_entrypoints)
backend_entrypoints.update(external_backend_entrypoints)
set_missing_parameters(backend_entrypoints)
return backend_entrypoints
@functools.lru_cache(maxsize=1)
def list_engines():
entrypoints = pkg_resources.iter_entry_points("xarray.backends")
return build_engines(entrypoints)
def guess_engine(store_spec):
engines = list_engines()
# use the pre-defined selection order for netCDF files
for engine in ["netcdf4", "h5netcdf", "scipy"]:
if engine in engines and engines[engine].guess_can_open(store_spec):
return engine
for engine, backend in engines.items():
try:
if backend.guess_can_open and backend.guess_can_open(store_spec):
return engine
except Exception:
logging.exception(f"{engine!r} fails while guessing")
raise ValueError("cannot guess the engine, try passing one explicitly")
def get_backend(engine):
"""Select open_dataset method based on current engine"""
engines = list_engines()
if engine not in engines:
raise ValueError(
f"unrecognized engine {engine} must be one of: {list(engines)}"
)
return engines[engine]
|
from pscript import window
from ... import event, app
from .._widget import Widget, create_element
# todo: some form of autocompletetion?
class BaseDropdown(Widget):
""" Base class for drop-down-like widgets.
"""
DEFAULT_MIN_SIZE = 50, 28
CSS = """
.flx-BaseDropdown {
display: inline-block;
overflow: visible;
margin: 2px;
border-radius: 3px;
padding: 2px;
border: 1px solid #aaa;
max-height: 28px; /* overridden by maxsize */
white-space: nowrap; /* keep label and but on-line */
background: #e8e8e8
}
.flx-BaseDropdown:focus {
outline: none;
box-shadow: 0px 0px 3px 1px rgba(0, 100, 200, 0.7);
}
.flx-BaseDropdown > .flx-dd-edit {
display: none;
max-width: 2em; /* reset silly lineedit sizing */
min-width: calc(100% - 1.5em - 2px);
min-height: 1em;
margin: 0;
padding: 0;
border: none;
}
.flx-BaseDropdown > .flx-dd-label {
display: inline-block;
min-width: calc(100% - 1.5em - 2px);
min-height: 1em;
user-select: none;
-moz-user-select: none;
-webkit-user-select: none;
-ms-user-select: none;
}
.flx-BaseDropdown.editable-true {
background: #fff;
}
.flx-BaseDropdown.editable-true > .flx-dd-label {
display: none;
}
.flx-BaseDropdown.editable-true > .flx-dd-edit {
display: inline-block;
}
.flx-BaseDropdown > .flx-dd-button {
display: inline-block;
position: static;
min-width: 1.5em;
max-width: 1.5em;
text-align: center;
margin: 0;
}
.flx-BaseDropdown > .flx-dd-button:hover {
background: rgba(128, 128, 128, 0.1);
}
.flx-BaseDropdown > .flx-dd-button::after {
content: '\\25BE'; /* 2228 1F847 1F83F */
}
.flx-BaseDropdown .flx-dd-space {
display: inline-block;
min-width: 1em;
}
.flx-BaseDropdown > .flx-dd-strud {
/* The strud allows to give the box a natural minimum size,
but it should not affect the height. */
visibility: hidden;
overflow: hidden;
max-height: 0;
}
"""
def init(self):
if self.tabindex == -2:
self.set_tabindex(-1)
@event.action
def expand(self):
""" Expand the dropdown and give it focus, so that it can be used
with the up/down keys.
"""
self._expand()
self.node.focus()
def _create_dom(self):
return window.document.createElement('span')
def _render_dom(self):
# Render more or less this:
# <span class='flx-dd-label'></span>
# <input type='text' class='flx-dd-edit'></input>
# <span></span>
# <span class='flx-dd-button'></span>
# <div class='flx-dd-strud'> </div>
f2 = lambda e: self._submit_text() if e.which == 13 else None
return [create_element('span',
{'className': 'flx-dd-label',
'onclick': self._but_click},
self.text + '\u00A0'),
create_element('input',
{'className': 'flx-dd-edit',
'onkeypress': f2,
'onblur': self._submit_text,
'value': self.text}),
create_element('span'),
create_element('span', {'className': 'flx-dd-button',
'onclick': self._but_click}),
create_element('div', {'className': 'flx-dd-strud'}, '\u00A0'),
]
def _but_click(self):
if self.node.classList.contains('expanded'):
self._collapse()
else:
self._expand()
def _submit_text(self):
edit_node = self.outernode.childNodes[1] # not pretty but we need to get value
self.set_text(edit_node.value)
def _expand(self):
# Expand
self.node.classList.add('expanded')
# Collapse when the node changes position (e.g. scroll or layout change)
rect = self.node.getBoundingClientRect()
self._rect_to_check = rect
window.setTimeout(self._check_expanded_pos, 100)
# Collapse when the mouse is used outside the combobox (or its children)
self._addEventListener(window.document, 'mousedown', self._collapse_maybe, 1)
# Return rect so subclasses can use it
return rect
def _collapse_maybe(self, e):
# Collapse if the given pointer event is outside the combobox.
# Better version of blur event, sort of. Dont use mouseup, as then
# there's mouse capturing (the event will come from the main widget).
t = e.target
while t is not window.document.body:
if t is self.outernode:
return
t = t.parentElement
window.document.removeEventListener('mousedown', self._collapse_maybe, 1)
self._collapse()
def _collapse(self):
self.node.classList.remove('expanded')
def _check_expanded_pos(self):
if self.node.classList.contains('expanded'):
rect = self.node.getBoundingClientRect()
if not (rect.top == self._rect_to_check.top and
rect.left == self._rect_to_check.left):
self._collapse()
else:
window.setTimeout(self._check_expanded_pos, 100)
class ComboBox(BaseDropdown):
"""
The Combobox is a combination of a button and a popup list, optionally
with an editable text. It can be used to select among a set of
options in a more compact manner than a TreeWidget would.
Optionally, the text of the combobox can be edited.
It is generally good practive to react to ``user_selected`` to detect user
interaction, and react to ``text``, ``selected_key`` or ``selected_index``
to keep track of all kinds of (incl. programatic) interaction .
When the combobox is expanded, the arrow keys can be used to select
an item, and it can be made current by pressing Enter or spacebar.
Escape can be used to collapse the combobox.
The ``node`` of this widget is a
`<span> <https://developer.mozilla.org/docs/Web/HTML/Element/span>`_
with some child elements and quite a bit of CSS for rendering.
"""
CSS = """
.flx-ComboBox {
}
.flx-ComboBox > ul {
list-style-type: none;
box-sizing: border-box;
border: 1px solid #333;
border-radius: 3px;
margin: 0;
padding: 2px;
position: fixed; /* because all our widgets are overflow:hidden */
background: white;
z-index: 9999;
display: none;
}
.flx-ComboBox.expanded > ul {
display: block;
max-height: 220px;
overflow-y: auto;
}
.flx-ComboBox.expanded > ul > li:hover {
background: rgba(0, 128, 255, 0.2);
}
.flx-ComboBox.expanded > ul > li.highlighted-true {
box-shadow: inset 0 0 3px 1px rgba(0, 0, 255, 0.4);
}
"""
# Note: we don't define text on the base class, because it would be
# the only common prop, plus we want a different docstring.
text = event.StringProp('', settable=True, doc="""
The text displayed on the widget. This property is set
when an item is selected from the dropdown menu. When editable,
the ``text`` is also set when the text is edited by the user.
This property is settable programatically regardless of the
value of ``editable``.
""")
selected_index = event.IntProp(-1, settable=True, doc="""
The currently selected item index. Can be -1 if no item has
been selected or when the text was changed manually (if editable).
Can also be programatically set.
""")
selected_key = event.StringProp('', settable=True, doc="""
The currently selected item key. Can be '' if no item has
been selected or when the text was changed manually (if editable).
Can also be programatically set.
""")
placeholder_text = event.StringProp('', settable=True, doc="""
The placeholder text to display in editable mode.
""")
editable = event.BoolProp(False, settable=True, doc="""
Whether the combobox's text is editable.
""")
options = event.TupleProp((), settable=True, doc="""
A list of tuples (key, text) representing the options. Both
keys and texts are converted to strings if they are not already.
For items that are given as a string, the key and text are the same.
If a dict is given, it is transformed to key-text pairs.
""")
_highlighted = app.LocalProperty(-1, settable=True, doc="""
The index of the currently highlighted item.
""")
@event.action
def set_options(self, options):
# If dict ...
if isinstance(options, dict):
keys = options.keys()
keys = sorted(keys) # Sort dict by key
options = [(k, options[k]) for k in keys]
# Parse
options2 = []
for opt in options:
if isinstance(opt, (tuple, list)):
opt = str(opt[0]), str(opt[1])
else:
opt = str(opt), str(opt)
options2.append(opt)
self._mutate_options(tuple(options2))
# Be smart about maintaining item selection
keys = [key_text[0] for key_text in self.options]
if self.selected_key and self.selected_key in keys:
key = self.selected_key
self.set_selected_key('')
self.set_selected_key(key) # also changes text
elif 0 <= self.selected_index < len(self.options):
index = self.selected_index
self.set_selected_index(-1)
self.set_selected_index(index) # also changes text
elif self.selected_key:
self.selected_key('') # also changes text
else:
pass # no selection, leave text alone
@event.action
def set_selected_index(self, index):
if index == self.selected_index:
return
elif 0 <= index < len(self.options):
key, text = self.options[index]
self._mutate('selected_index', index)
self._mutate('selected_key', key)
self.set_text(text)
else:
self._mutate('selected_index', -1)
self._mutate('selected_key', '')
self.set_text('')
@event.action
def set_selected_key(self, key):
if key == self.selected_key:
return
elif key:
if key == self.selected_key:
return # eraly exit
for index, option in enumerate(self.options):
if option[0] == key:
self._mutate('selected_index', index)
self._mutate('selected_key', key)
self.set_text(option[1])
return
# else
self._mutate('selected_index', -1)
self._mutate('selected_key', '')
self.set_text('')
@event.emitter
def user_selected(self, index):
""" Event emitted when the user selects an item using the mouse or
keyboard. The event has attributes ``index``, ``key`` and ``text``.
"""
options = self.options
if index >= 0 and index < len(options):
key, text = options[index]
self.set_selected_index(index)
self.set_selected_key(key)
self.set_text(text)
return dict(index=index, key=key, text=text)
def _create_dom(self):
node = super()._create_dom()
node.onkeydown=self._key_down
return node
def _render_dom(self):
# Create a virtual node for each option
options = self.options
option_nodes = []
strud = []
for i in range(len(options)):
key, text = options[i]
clsname = 'highlighted-true' if self._highlighted == i else ''
li = create_element('li',
dict(index=i, className=clsname),
text if len(text.strip()) else '\u00A0')
strud += [text + '\u00A0',
create_element('span', {'class': "flx-dd-space"}),
create_element('br')]
option_nodes.append(li)
# Update the list of nodes created by superclass
nodes = super()._render_dom()
nodes[1].props.placeholder = self.placeholder_text # the line edit
nodes[-1].children = strud # set strud
nodes.append(create_element('ul',
dict(onmousedown=self._ul_click),
option_nodes))
return nodes
@event.reaction
def __track_editable(self):
if self.editable:
self.node.classList.remove('editable-false')
self.node.classList.add('editable-true')
else:
self.node.classList.add('editable-false')
self.node.classList.remove('editable-true')
def _ul_click(self, e):
if hasattr(e.target, 'index'): # not when scrollbar is clicked
self._select_from_ul(e.target.index)
def _select_from_ul(self, index):
self.user_selected(index)
self._collapse()
def _key_down(self, e):
# Get key
key = e.key
if not key and e.code:
key = e.code
# If collapsed, we may want to expand. Otherwise, do nothing.
# In this case, only consume events that dont sit in the way with
# the line edit of an editable combobox.
if not self.node.classList.contains('expanded'):
if key in ['ArrowUp', 'ArrowDown']:
e.stopPropagation()
self.expand()
return
# Early exit, be specific about the keys that we want to accept
if key not in ['Escape', 'ArrowUp', 'ArrowDown', ' ', 'Enter']:
return
# Consume the keys
e.preventDefault()
e.stopPropagation()
if key == 'Escape':
self._set_highlighted(-1)
self._collapse()
elif key == 'ArrowUp' or key == 'ArrowDown':
if key == 'ArrowDown':
hl = self._highlighted + 1
else:
hl = self._highlighted - 1
self._set_highlighted(min(max(hl, 0), len(self.options)-1))
elif key == 'Enter' or key == ' ':
if self._highlighted >= 0 and self._highlighted < len(self.options):
self._select_from_ul(self._highlighted)
def _expand(self):
rect = super()._expand()
ul = self.outernode.children[len(self.outernode.children) - 1]
ul.style.left = rect.left + 'px'
ul.style.width = rect.width + 'px'
ul.style.top = (rect.bottom - 1) + 'px'
# Correct position (above, below) as needed
space_below = window.innerHeight - rect.bottom
if space_below < ul.clientHeight:
space_above = rect.top
if space_above > space_below:
ul.style.top = (rect.top - 1 - ul.clientHeight) + 'px'
def _submit_text(self):
super()._submit_text()
# todo: should this select option if text happens to match it?
self.set_selected_index(-1)
self.set_selected_key('')
class DropdownContainer(BaseDropdown):
"""
A dropdown widget that shows its children when expanded. This can be
used to e.g. make a collapsable tree widget. Some styling may be required
for the child widget to be sized appropriately.
*Note: This widget is currently broken, because pointer events do not work in the
contained widget (at least on Firefox).*
"""
CSS = """
.flx-DropdownContainer {
min-width: 50px;
}
.flx-DropdownContainer > .flx-Widget {
position: fixed;
min-height: 100px;
max-height: 300px;
width: 200px;
background: white;
z-index: 10001;
display: none;
}
.flx-DropdownContainer.expanded > .flx-Widget {
display: initial;
}
"""
text = event.StringProp('', settable=True, doc="""
The text displayed on the dropdown widget.
""")
def _render_dom(self):
nodes = super()._render_dom()
for widget in self.children:
nodes.append(widget.outernode)
return nodes
def _expand(self):
rect = super()._expand()
node = self.children[0].outernode
node.style.left = rect.left + 'px'
node.style.top = (rect.bottom - 1) + 'px'
# node.style.width = (rect.width - 6) + 'px'
|
import json
import voluptuous as vol
from homeassistant.components.mqtt import valid_publish_topic, valid_subscribe_topic
from homeassistant.const import (
ATTR_SERVICE_DATA,
EVENT_CALL_SERVICE,
EVENT_STATE_CHANGED,
EVENT_TIME_CHANGED,
MATCH_ALL,
)
from homeassistant.core import EventOrigin, State, callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.json import JSONEncoder
DOMAIN = "mqtt_eventstream"
CONF_PUBLISH_TOPIC = "publish_topic"
CONF_SUBSCRIBE_TOPIC = "subscribe_topic"
CONF_PUBLISH_EVENTSTREAM_RECEIVED = "publish_eventstream_received"
CONF_IGNORE_EVENT = "ignore_event"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_PUBLISH_TOPIC): valid_publish_topic,
vol.Optional(CONF_SUBSCRIBE_TOPIC): valid_subscribe_topic,
vol.Optional(
CONF_PUBLISH_EVENTSTREAM_RECEIVED, default=False
): cv.boolean,
vol.Optional(CONF_IGNORE_EVENT, default=[]): cv.ensure_list,
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Set up the MQTT eventstream component."""
mqtt = hass.components.mqtt
conf = config.get(DOMAIN, {})
pub_topic = conf.get(CONF_PUBLISH_TOPIC)
sub_topic = conf.get(CONF_SUBSCRIBE_TOPIC)
ignore_event = conf.get(CONF_IGNORE_EVENT)
@callback
def _event_publisher(event):
"""Handle events by publishing them on the MQTT queue."""
if event.origin != EventOrigin.local:
return
if event.event_type == EVENT_TIME_CHANGED:
return
# User-defined events to ignore
if event.event_type in ignore_event:
return
# Filter out the events that were triggered by publishing
# to the MQTT topic, or you will end up in an infinite loop.
if event.event_type == EVENT_CALL_SERVICE:
if (
event.data.get("domain") == mqtt.DOMAIN
and event.data.get("service") == mqtt.SERVICE_PUBLISH
and event.data[ATTR_SERVICE_DATA].get("topic") == pub_topic
):
return
event_info = {"event_type": event.event_type, "event_data": event.data}
msg = json.dumps(event_info, cls=JSONEncoder)
mqtt.async_publish(pub_topic, msg)
# Only listen for local events if you are going to publish them.
if pub_topic:
hass.bus.async_listen(MATCH_ALL, _event_publisher)
# Process events from a remote server that are received on a queue.
@callback
def _event_receiver(msg):
"""Receive events published by and fire them on this hass instance."""
event = json.loads(msg.payload)
event_type = event.get("event_type")
event_data = event.get("event_data")
# Special case handling for event STATE_CHANGED
# We will try to convert state dicts back to State objects
# Copied over from the _handle_api_post_events_event method
# of the api component.
if event_type == EVENT_STATE_CHANGED and event_data:
for key in ("old_state", "new_state"):
state = State.from_dict(event_data.get(key))
if state:
event_data[key] = state
hass.bus.async_fire(
event_type, event_data=event_data, origin=EventOrigin.remote
)
# Only subscribe if you specified a topic.
if sub_topic:
await mqtt.async_subscribe(sub_topic, _event_receiver)
return True
|
import types
import dbus
import dbus.service
def copy_func(function_reference, name=None):
"""
Copy function
:param function_reference: Function
:type function_reference: func
:param name: Name of function
:type name: str
:return: Copy of function
:rtype: func
"""
if hasattr(function_reference, 'code'):
return types.FunctionType(function_reference.code, function_reference.globals, name or function_reference.func_name, function_reference.defaults, function_reference.closure)
else:
return types.FunctionType(function_reference.__code__, function_reference.__globals__, name or function_reference.func_name, function_reference.__defaults__, function_reference.__closure__)
class DBusServiceFactory(object):
"""
Factory object to create different service objects.
Different service objects are useful, as the DBus service table is stored in the class which would be shared
between all class instances.
"""
service_number = 0
@staticmethod
def get_service(bus_name, object_path):
"""
Get an instance of the service history
:param bus_name: DBus bus name
:type bus_name: str
:param object_path: DBus Object name
:type object_path: str
:return: New object
:rtype: DBusService
"""
new_service = type("DBUSService{0:04}".format(DBusServiceFactory.service_number), (DBusService,), {})
DBusServiceFactory.service_number += 1
return new_service(bus_name, object_path)
class DBusService(dbus.service.Object):
"""
DBus Service object
Allows for dynamic method adding
"""
BUS_TYPE = 'session'
def __init__(self, bus_name, object_path):
"""
Init the object
:param bus_name: DBus bus name
:type bus_name: str
:param object_path: DBus Object name
:type object_path: str
"""
self.bus_name = bus_name
self.object_path = object_path
if DBusService.BUS_TYPE == 'session':
bus_object = dbus.service.BusName(bus_name, bus=dbus.SessionBus())
else:
bus_object = dbus.service.BusName(bus_name, bus=dbus.SystemBus())
super(DBusService, self).__init__(bus_object, object_path)
def add_dbus_method(self, interface_name, function_name, function, in_signature=None, out_signature=None, byte_arrays=False):
"""
Add method to DBus Object
:param interface_name: DBus interface name
:type interface_name: str
:param function_name: DBus function name
:type function_name: str
:param function: Function reference
:type function: object
:param in_signature: DBus function signature
:type in_signature: str
:param out_signature: DBus function signature
:type out_signature: str
:param byte_arrays: Is byte array
:type byte_arrays: bool
"""
# Get class key for use in the DBus introspection table
class_key = [key for key in self._dbus_class_table.keys() if key.endswith(self.__class__.__name__)][0]
# Create a copy of the function so that if its used multiple times it won't affect other instances if the names changed
function_deepcopy = copy_func(function, function_name)
func = dbus.service.method(interface_name, in_signature=in_signature, out_signature=out_signature, byte_arrays=byte_arrays)(function_deepcopy)
# Add method to DBus tables
try:
self._dbus_class_table[class_key][interface_name][function_name] = func
except KeyError:
self._dbus_class_table[class_key][interface_name] = {function_name: func}
# Add method to class as DBus expects it to be there.
setattr(self.__class__, function_name, func)
def del_dbus_method(self, interface_name, function_name):
"""
Remove method from DBus Object
:param interface_name: DBus interface name
:type interface_name: str
:param function_name: DBus function name
:type function_name: str
"""
# Get class key for use in the DBus introspection table
class_key = [key for key in self._dbus_class_table.keys() if key.endswith(self.__class__.__name__)][0]
# Remove method from DBus tables
# Remove method from class
try:
del self._dbus_class_table[class_key][interface_name][function_name]
delattr(DBusService, function_name)
except (KeyError, AttributeError):
pass
|
from datetime import timedelta
import pytest
from homeassistant.components.cover import (
ATTR_CURRENT_POSITION,
ATTR_CURRENT_TILT_POSITION,
ATTR_POSITION,
ATTR_TILT_POSITION,
DOMAIN,
)
from homeassistant.components.group.cover import DEFAULT_NAME
from homeassistant.const import (
ATTR_ASSUMED_STATE,
ATTR_ENTITY_ID,
ATTR_FRIENDLY_NAME,
ATTR_SUPPORTED_FEATURES,
CONF_ENTITIES,
SERVICE_CLOSE_COVER,
SERVICE_CLOSE_COVER_TILT,
SERVICE_OPEN_COVER,
SERVICE_OPEN_COVER_TILT,
SERVICE_SET_COVER_POSITION,
SERVICE_SET_COVER_TILT_POSITION,
SERVICE_STOP_COVER,
SERVICE_STOP_COVER_TILT,
SERVICE_TOGGLE,
SERVICE_TOGGLE_COVER_TILT,
STATE_CLOSED,
STATE_CLOSING,
STATE_OPEN,
STATE_OPENING,
)
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.common import assert_setup_component, async_fire_time_changed
COVER_GROUP = "cover.cover_group"
DEMO_COVER = "cover.kitchen_window"
DEMO_COVER_POS = "cover.hall_window"
DEMO_COVER_TILT = "cover.living_room_window"
DEMO_TILT = "cover.tilt_demo"
CONFIG_ALL = {
DOMAIN: [
{"platform": "demo"},
{
"platform": "group",
CONF_ENTITIES: [DEMO_COVER, DEMO_COVER_POS, DEMO_COVER_TILT, DEMO_TILT],
},
]
}
CONFIG_POS = {
DOMAIN: [
{"platform": "demo"},
{
"platform": "group",
CONF_ENTITIES: [DEMO_COVER_POS, DEMO_COVER_TILT, DEMO_TILT],
},
]
}
CONFIG_ATTRIBUTES = {
DOMAIN: {
"platform": "group",
CONF_ENTITIES: [DEMO_COVER, DEMO_COVER_POS, DEMO_COVER_TILT, DEMO_TILT],
}
}
@pytest.fixture
async def setup_comp(hass, config_count):
"""Set up group cover component."""
config, count = config_count
with assert_setup_component(count, DOMAIN):
await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
@pytest.mark.parametrize("config_count", [(CONFIG_ATTRIBUTES, 1)])
async def test_attributes(hass, setup_comp):
"""Test handling of state attributes."""
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_CLOSED
assert state.attributes[ATTR_FRIENDLY_NAME] == DEFAULT_NAME
assert state.attributes[ATTR_ENTITY_ID] == [
DEMO_COVER,
DEMO_COVER_POS,
DEMO_COVER_TILT,
DEMO_TILT,
]
assert ATTR_ASSUMED_STATE not in state.attributes
assert state.attributes[ATTR_SUPPORTED_FEATURES] == 0
assert ATTR_CURRENT_POSITION not in state.attributes
assert ATTR_CURRENT_TILT_POSITION not in state.attributes
# Add Entity that supports open / close / stop
hass.states.async_set(DEMO_COVER, STATE_OPEN, {ATTR_SUPPORTED_FEATURES: 11})
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert ATTR_ASSUMED_STATE not in state.attributes
assert state.attributes[ATTR_SUPPORTED_FEATURES] == 11
assert ATTR_CURRENT_POSITION not in state.attributes
assert ATTR_CURRENT_TILT_POSITION not in state.attributes
# Add Entity that supports set_cover_position
hass.states.async_set(
DEMO_COVER_POS,
STATE_OPEN,
{ATTR_SUPPORTED_FEATURES: 4, ATTR_CURRENT_POSITION: 70},
)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert ATTR_ASSUMED_STATE not in state.attributes
assert state.attributes[ATTR_SUPPORTED_FEATURES] == 15
assert state.attributes[ATTR_CURRENT_POSITION] == 70
assert ATTR_CURRENT_TILT_POSITION not in state.attributes
# Add Entity that supports open tilt / close tilt / stop tilt
hass.states.async_set(DEMO_TILT, STATE_OPEN, {ATTR_SUPPORTED_FEATURES: 112})
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert ATTR_ASSUMED_STATE not in state.attributes
assert state.attributes[ATTR_SUPPORTED_FEATURES] == 127
assert state.attributes[ATTR_CURRENT_POSITION] == 70
assert ATTR_CURRENT_TILT_POSITION not in state.attributes
# Add Entity that supports set_tilt_position
hass.states.async_set(
DEMO_COVER_TILT,
STATE_OPEN,
{ATTR_SUPPORTED_FEATURES: 128, ATTR_CURRENT_TILT_POSITION: 60},
)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert ATTR_ASSUMED_STATE not in state.attributes
assert state.attributes[ATTR_SUPPORTED_FEATURES] == 255
assert state.attributes[ATTR_CURRENT_POSITION] == 70
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 60
# ### Test assumed state ###
# ##########################
# For covers
hass.states.async_set(
DEMO_COVER, STATE_OPEN, {ATTR_SUPPORTED_FEATURES: 4, ATTR_CURRENT_POSITION: 100}
)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert state.attributes[ATTR_ASSUMED_STATE] is True
assert state.attributes[ATTR_SUPPORTED_FEATURES] == 244
assert state.attributes[ATTR_CURRENT_POSITION] == 100
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 60
hass.states.async_remove(DEMO_COVER)
hass.states.async_remove(DEMO_COVER_POS)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert ATTR_ASSUMED_STATE not in state.attributes
assert state.attributes[ATTR_SUPPORTED_FEATURES] == 240
assert ATTR_CURRENT_POSITION not in state.attributes
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 60
# For tilts
hass.states.async_set(
DEMO_TILT,
STATE_OPEN,
{ATTR_SUPPORTED_FEATURES: 128, ATTR_CURRENT_TILT_POSITION: 100},
)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert state.attributes[ATTR_ASSUMED_STATE] is True
assert state.attributes[ATTR_SUPPORTED_FEATURES] == 128
assert ATTR_CURRENT_POSITION not in state.attributes
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100
hass.states.async_remove(DEMO_COVER_TILT)
hass.states.async_set(DEMO_TILT, STATE_CLOSED)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_CLOSED
assert ATTR_ASSUMED_STATE not in state.attributes
assert state.attributes[ATTR_SUPPORTED_FEATURES] == 0
assert ATTR_CURRENT_POSITION not in state.attributes
assert ATTR_CURRENT_TILT_POSITION not in state.attributes
hass.states.async_set(DEMO_TILT, STATE_CLOSED, {ATTR_ASSUMED_STATE: True})
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.attributes[ATTR_ASSUMED_STATE] is True
@pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)])
async def test_open_covers(hass, setup_comp):
"""Test open cover function."""
await hass.services.async_call(
DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
for _ in range(10):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert state.attributes[ATTR_CURRENT_POSITION] == 100
assert hass.states.get(DEMO_COVER).state == STATE_OPEN
assert hass.states.get(DEMO_COVER_POS).attributes[ATTR_CURRENT_POSITION] == 100
assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_POSITION] == 100
@pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)])
async def test_close_covers(hass, setup_comp):
"""Test close cover function."""
await hass.services.async_call(
DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
for _ in range(10):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_CLOSED
assert state.attributes[ATTR_CURRENT_POSITION] == 0
assert hass.states.get(DEMO_COVER).state == STATE_CLOSED
assert hass.states.get(DEMO_COVER_POS).attributes[ATTR_CURRENT_POSITION] == 0
assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_POSITION] == 0
@pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)])
async def test_toggle_covers(hass, setup_comp):
"""Test toggle cover function."""
# Start covers in open state
await hass.services.async_call(
DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
for _ in range(10):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
# Toggle will close covers
await hass.services.async_call(
DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
for _ in range(10):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_CLOSED
assert state.attributes[ATTR_CURRENT_POSITION] == 0
assert hass.states.get(DEMO_COVER).state == STATE_CLOSED
assert hass.states.get(DEMO_COVER_POS).attributes[ATTR_CURRENT_POSITION] == 0
assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_POSITION] == 0
# Toggle again will open covers
await hass.services.async_call(
DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
for _ in range(10):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert state.attributes[ATTR_CURRENT_POSITION] == 100
assert hass.states.get(DEMO_COVER).state == STATE_OPEN
assert hass.states.get(DEMO_COVER_POS).attributes[ATTR_CURRENT_POSITION] == 100
assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_POSITION] == 100
@pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)])
async def test_stop_covers(hass, setup_comp):
"""Test stop cover function."""
await hass.services.async_call(
DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
await hass.services.async_call(
DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert state.attributes[ATTR_CURRENT_POSITION] == 100
assert hass.states.get(DEMO_COVER).state == STATE_OPEN
assert hass.states.get(DEMO_COVER_POS).attributes[ATTR_CURRENT_POSITION] == 20
assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_POSITION] == 80
@pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)])
async def test_set_cover_position(hass, setup_comp):
"""Test set cover position function."""
await hass.services.async_call(
DOMAIN,
SERVICE_SET_COVER_POSITION,
{ATTR_ENTITY_ID: COVER_GROUP, ATTR_POSITION: 50},
blocking=True,
)
for _ in range(4):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert state.attributes[ATTR_CURRENT_POSITION] == 50
assert hass.states.get(DEMO_COVER).state == STATE_CLOSED
assert hass.states.get(DEMO_COVER_POS).attributes[ATTR_CURRENT_POSITION] == 50
assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_POSITION] == 50
@pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)])
async def test_open_tilts(hass, setup_comp):
"""Test open tilt function."""
await hass.services.async_call(
DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
for _ in range(5):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100
assert (
hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_TILT_POSITION] == 100
)
@pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)])
async def test_close_tilts(hass, setup_comp):
"""Test close tilt function."""
await hass.services.async_call(
DOMAIN, SERVICE_CLOSE_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
for _ in range(5):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0
assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_TILT_POSITION] == 0
@pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)])
async def test_toggle_tilts(hass, setup_comp):
"""Test toggle tilt function."""
# Start tilted open
await hass.services.async_call(
DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
for _ in range(10):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100
assert (
hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_TILT_POSITION] == 100
)
# Toggle will tilt closed
await hass.services.async_call(
DOMAIN, SERVICE_TOGGLE_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
for _ in range(10):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0
assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_TILT_POSITION] == 0
# Toggle again will tilt open
await hass.services.async_call(
DOMAIN, SERVICE_TOGGLE_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
for _ in range(10):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100
assert (
hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_TILT_POSITION] == 100
)
@pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)])
async def test_stop_tilts(hass, setup_comp):
"""Test stop tilts function."""
await hass.services.async_call(
DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
await hass.services.async_call(
DOMAIN, SERVICE_STOP_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 60
assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_TILT_POSITION] == 60
@pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)])
async def test_set_tilt_positions(hass, setup_comp):
"""Test set tilt position function."""
await hass.services.async_call(
DOMAIN,
SERVICE_SET_COVER_TILT_POSITION,
{ATTR_ENTITY_ID: COVER_GROUP, ATTR_TILT_POSITION: 80},
blocking=True,
)
for _ in range(3):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 80
assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_TILT_POSITION] == 80
@pytest.mark.parametrize("config_count", [(CONFIG_POS, 2)])
async def test_is_opening_closing(hass, setup_comp):
"""Test is_opening property."""
await hass.services.async_call(
DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
await hass.async_block_till_done()
assert hass.states.get(DEMO_COVER_POS).state == STATE_OPENING
assert hass.states.get(DEMO_COVER_TILT).state == STATE_OPENING
assert hass.states.get(COVER_GROUP).state == STATE_OPENING
for _ in range(10):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
await hass.services.async_call(
DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
assert hass.states.get(DEMO_COVER_POS).state == STATE_CLOSING
assert hass.states.get(DEMO_COVER_TILT).state == STATE_CLOSING
assert hass.states.get(COVER_GROUP).state == STATE_CLOSING
hass.states.async_set(DEMO_COVER_POS, STATE_OPENING, {ATTR_SUPPORTED_FEATURES: 11})
await hass.async_block_till_done()
assert hass.states.get(DEMO_COVER_POS).state == STATE_OPENING
assert hass.states.get(COVER_GROUP).state == STATE_OPENING
hass.states.async_set(DEMO_COVER_POS, STATE_CLOSING, {ATTR_SUPPORTED_FEATURES: 11})
await hass.async_block_till_done()
assert hass.states.get(DEMO_COVER_POS).state == STATE_CLOSING
assert hass.states.get(COVER_GROUP).state == STATE_CLOSING
|
from datetime import timedelta
import logging
from agent import AgentError
from homeassistant.components.camera import SUPPORT_ON_OFF
from homeassistant.components.mjpeg.camera import (
CONF_MJPEG_URL,
CONF_STILL_IMAGE_URL,
MjpegCamera,
filter_urllib3_logging,
)
from homeassistant.const import ATTR_ATTRIBUTION, CONF_NAME
from homeassistant.helpers import entity_platform
from .const import (
ATTRIBUTION,
CAMERA_SCAN_INTERVAL_SECS,
CONNECTION,
DOMAIN as AGENT_DOMAIN,
)
SCAN_INTERVAL = timedelta(seconds=CAMERA_SCAN_INTERVAL_SECS)
_LOGGER = logging.getLogger(__name__)
_DEV_EN_ALT = "enable_alerts"
_DEV_DS_ALT = "disable_alerts"
_DEV_EN_REC = "start_recording"
_DEV_DS_REC = "stop_recording"
_DEV_SNAP = "snapshot"
CAMERA_SERVICES = {
_DEV_EN_ALT: "async_enable_alerts",
_DEV_DS_ALT: "async_disable_alerts",
_DEV_EN_REC: "async_start_recording",
_DEV_DS_REC: "async_stop_recording",
_DEV_SNAP: "async_snapshot",
}
async def async_setup_entry(
hass, config_entry, async_add_entities, discovery_info=None
):
"""Set up the Agent cameras."""
filter_urllib3_logging()
cameras = []
server = hass.data[AGENT_DOMAIN][config_entry.entry_id][CONNECTION]
if not server.devices:
_LOGGER.warning("Could not fetch cameras from Agent server")
return
for device in server.devices:
if device.typeID == 2:
camera = AgentCamera(device)
cameras.append(camera)
async_add_entities(cameras)
platform = entity_platform.current_platform.get()
for service, method in CAMERA_SERVICES.items():
platform.async_register_entity_service(service, {}, method)
class AgentCamera(MjpegCamera):
"""Representation of an Agent Device Stream."""
def __init__(self, device):
"""Initialize as a subclass of MjpegCamera."""
self._servername = device.client.name
self.server_url = device.client._server_url
device_info = {
CONF_NAME: device.name,
CONF_MJPEG_URL: f"{self.server_url}{device.mjpeg_image_url}&size={device.mjpegStreamWidth}x{device.mjpegStreamHeight}",
CONF_STILL_IMAGE_URL: f"{self.server_url}{device.still_image_url}&size={device.mjpegStreamWidth}x{device.mjpegStreamHeight}",
}
self.device = device
self._removed = False
self._name = f"{self._servername} {device.name}"
self._unique_id = f"{device._client.unique}_{device.typeID}_{device.id}"
super().__init__(device_info)
@property
def device_info(self):
"""Return the device info for adding the entity to the agent object."""
return {
"identifiers": {(AGENT_DOMAIN, self._unique_id)},
"name": self._name,
"manufacturer": "Agent",
"model": "Camera",
"sw_version": self.device.client.version,
}
async def async_update(self):
"""Update our state from the Agent API."""
try:
await self.device.update()
if self._removed:
_LOGGER.debug("%s reacquired", self._name)
self._removed = False
except AgentError:
if self.device.client.is_available: # server still available - camera error
if not self._removed:
_LOGGER.error("%s lost", self._name)
self._removed = True
@property
def device_state_attributes(self):
"""Return the Agent DVR camera state attributes."""
return {
ATTR_ATTRIBUTION: ATTRIBUTION,
"editable": False,
"enabled": self.is_on,
"connected": self.connected,
"detected": self.is_detected,
"alerted": self.is_alerted,
"has_ptz": self.device.has_ptz,
"alerts_enabled": self.device.alerts_active,
}
@property
def should_poll(self) -> bool:
"""Update the state periodically."""
return True
@property
def is_recording(self) -> bool:
"""Return whether the monitor is recording."""
return self.device.recording
@property
def is_alerted(self) -> bool:
"""Return whether the monitor has alerted."""
return self.device.alerted
@property
def is_detected(self) -> bool:
"""Return whether the monitor has alerted."""
return self.device.detected
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self.device.client.is_available
@property
def connected(self) -> bool:
"""Return True if entity is connected."""
return self.device.connected
@property
def supported_features(self) -> int:
"""Return supported features."""
return SUPPORT_ON_OFF
@property
def is_on(self) -> bool:
"""Return true if on."""
return self.device.online
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
if self.is_on:
return "mdi:camcorder"
return "mdi:camcorder-off"
@property
def motion_detection_enabled(self):
"""Return the camera motion detection status."""
return self.device.detector_active
@property
def unique_id(self) -> str:
"""Return a unique identifier for this agent object."""
return self._unique_id
async def async_enable_alerts(self):
"""Enable alerts."""
await self.device.alerts_on()
async def async_disable_alerts(self):
"""Disable alerts."""
await self.device.alerts_off()
async def async_enable_motion_detection(self):
"""Enable motion detection."""
await self.device.detector_on()
async def async_disable_motion_detection(self):
"""Disable motion detection."""
await self.device.detector_off()
async def async_start_recording(self):
"""Start recording."""
await self.device.record()
async def async_stop_recording(self):
"""Stop recording."""
await self.device.record_stop()
async def async_turn_on(self):
"""Enable the camera."""
await self.device.enable()
async def async_snapshot(self):
"""Take a snapshot."""
await self.device.snapshot()
async def async_turn_off(self):
"""Disable the camera."""
await self.device.disable()
|
from datetime import timedelta
import logging
import socialbladeclient
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_NAME
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
CHANNEL_ID = "channel_id"
DEFAULT_NAME = "Social Blade"
MIN_TIME_BETWEEN_UPDATES = timedelta(hours=2)
SUBSCRIBERS = "subscribers"
TOTAL_VIEWS = "total_views"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CHANNEL_ID): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Social Blade sensor."""
social_blade = SocialBladeSensor(config[CHANNEL_ID], config[CONF_NAME])
social_blade.update()
if social_blade.valid_channel_id is False:
return
add_entities([social_blade])
class SocialBladeSensor(Entity):
"""Representation of a Social Blade Sensor."""
def __init__(self, case, name):
"""Initialize the Social Blade sensor."""
self._state = None
self.channel_id = case
self._attributes = None
self.valid_channel_id = None
self._name = name
@property
def name(self):
"""Return the name."""
return self._name
@property
def state(self):
"""Return the state."""
return self._state
@property
def device_state_attributes(self):
"""Return the state attributes."""
if self._attributes:
return self._attributes
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data from Social Blade."""
try:
data = socialbladeclient.get_data(self.channel_id)
self._attributes = {TOTAL_VIEWS: data[TOTAL_VIEWS]}
self._state = data[SUBSCRIBERS]
self.valid_channel_id = True
except (ValueError, IndexError):
_LOGGER.error("Unable to find valid channel ID")
self.valid_channel_id = False
self._attributes = None
|
from homeassistant.const import TEMP_CELSIUS, VOLUME_CUBIC_METERS
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from . import RainMachineEntity
from .const import (
DATA_CLIENT,
DATA_PROVISION_SETTINGS,
DATA_RESTRICTIONS_UNIVERSAL,
DOMAIN as RAINMACHINE_DOMAIN,
SENSOR_UPDATE_TOPIC,
)
TYPE_FLOW_SENSOR_CLICK_M3 = "flow_sensor_clicks_cubic_meter"
TYPE_FLOW_SENSOR_CONSUMED_LITERS = "flow_sensor_consumed_liters"
TYPE_FLOW_SENSOR_START_INDEX = "flow_sensor_start_index"
TYPE_FLOW_SENSOR_WATERING_CLICKS = "flow_sensor_watering_clicks"
TYPE_FREEZE_TEMP = "freeze_protect_temp"
SENSORS = {
TYPE_FLOW_SENSOR_CLICK_M3: (
"Flow Sensor Clicks",
"mdi:water-pump",
f"clicks/{VOLUME_CUBIC_METERS}",
None,
False,
DATA_PROVISION_SETTINGS,
),
TYPE_FLOW_SENSOR_CONSUMED_LITERS: (
"Flow Sensor Consumed Liters",
"mdi:water-pump",
"liter",
None,
False,
DATA_PROVISION_SETTINGS,
),
TYPE_FLOW_SENSOR_START_INDEX: (
"Flow Sensor Start Index",
"mdi:water-pump",
"index",
None,
False,
DATA_PROVISION_SETTINGS,
),
TYPE_FLOW_SENSOR_WATERING_CLICKS: (
"Flow Sensor Clicks",
"mdi:water-pump",
"clicks",
None,
False,
DATA_PROVISION_SETTINGS,
),
TYPE_FREEZE_TEMP: (
"Freeze Protect Temperature",
"mdi:thermometer",
TEMP_CELSIUS,
"temperature",
True,
DATA_RESTRICTIONS_UNIVERSAL,
),
}
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up RainMachine sensors based on a config entry."""
rainmachine = hass.data[RAINMACHINE_DOMAIN][DATA_CLIENT][entry.entry_id]
async_add_entities(
[
RainMachineSensor(
rainmachine,
sensor_type,
name,
icon,
unit,
device_class,
enabled_by_default,
api_category,
)
for (
sensor_type,
(name, icon, unit, device_class, enabled_by_default, api_category),
) in SENSORS.items()
]
)
class RainMachineSensor(RainMachineEntity):
"""A sensor implementation for raincloud device."""
def __init__(
self,
rainmachine,
sensor_type,
name,
icon,
unit,
device_class,
enabled_by_default,
api_category,
):
"""Initialize."""
super().__init__(rainmachine)
self._api_category = api_category
self._device_class = device_class
self._enabled_by_default = enabled_by_default
self._icon = icon
self._name = name
self._sensor_type = sensor_type
self._state = None
self._unit = unit
@property
def entity_registry_enabled_default(self):
"""Determine whether an entity is enabled by default."""
return self._enabled_by_default
@property
def icon(self) -> str:
"""Return the icon."""
return self._icon
@property
def state(self) -> str:
"""Return the name of the entity."""
return self._state
@property
def unique_id(self) -> str:
"""Return a unique, Home Assistant friendly identifier for this entity."""
return "{}_{}".format(
self.rainmachine.device_mac.replace(":", ""), self._sensor_type
)
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit
async def async_added_to_hass(self):
"""Register callbacks."""
self.async_on_remove(
async_dispatcher_connect(self.hass, SENSOR_UPDATE_TOPIC, self._update_state)
)
await self.rainmachine.async_register_sensor_api_interest(self._api_category)
self.update_from_latest_data()
async def async_will_remove_from_hass(self):
"""Disconnect dispatcher listeners and deregister API interest."""
super().async_will_remove_from_hass()
self.rainmachine.async_deregister_sensor_api_interest(self._api_category)
@callback
def update_from_latest_data(self):
"""Update the sensor's state."""
if self._sensor_type == TYPE_FLOW_SENSOR_CLICK_M3:
self._state = self.rainmachine.data[DATA_PROVISION_SETTINGS]["system"].get(
"flowSensorClicksPerCubicMeter"
)
elif self._sensor_type == TYPE_FLOW_SENSOR_CONSUMED_LITERS:
clicks = self.rainmachine.data[DATA_PROVISION_SETTINGS]["system"].get(
"flowSensorWateringClicks"
)
clicks_per_m3 = self.rainmachine.data[DATA_PROVISION_SETTINGS][
"system"
].get("flowSensorClicksPerCubicMeter")
if clicks and clicks_per_m3:
self._state = (clicks * 1000) / clicks_per_m3
else:
self._state = None
elif self._sensor_type == TYPE_FLOW_SENSOR_START_INDEX:
self._state = self.rainmachine.data[DATA_PROVISION_SETTINGS]["system"].get(
"flowSensorStartIndex"
)
elif self._sensor_type == TYPE_FLOW_SENSOR_WATERING_CLICKS:
self._state = self.rainmachine.data[DATA_PROVISION_SETTINGS]["system"].get(
"flowSensorWateringClicks"
)
elif self._sensor_type == TYPE_FREEZE_TEMP:
self._state = self.rainmachine.data[DATA_RESTRICTIONS_UNIVERSAL][
"freezeProtectTemp"
]
|
from mock import patch
from paasta_tools.adhoc_tools import AdhocJobConfig
from paasta_tools.adhoc_tools import load_adhoc_job_config
from paasta_tools.marathon_tools import load_marathon_service_config
from paasta_tools.marathon_tools import MarathonServiceConfig
from paasta_tools.paasta_service_config_loader import PaastaServiceConfigLoader
from paasta_tools.utils import DeploymentsJsonV2
TEST_SERVICE_NAME = "example_happyhour"
TEST_SOA_DIR = "fake_soa_dir"
TEST_CLUSTER_NAME = "cluster"
def create_test_service():
return PaastaServiceConfigLoader(
service=TEST_SERVICE_NAME, soa_dir=TEST_SOA_DIR, load_deployments=True
)
def deployment_json():
return DeploymentsJsonV2(
service="test-service",
config_dict={
"deployments": {
"cluster.non_canary": {
"docker_image": "some_image",
"git_sha": "some_sha",
},
"cluster.canary": {"docker_image": "some_image", "git_sha": "some_sha"},
},
"controls": {
"example_happyhour:%s.sample_batch"
% TEST_CLUSTER_NAME: {"desired_state": "start", "force_bounce": None},
"example_happyhour:%s.interactive"
% TEST_CLUSTER_NAME: {"desired_state": "start", "force_bounce": None},
f"{TEST_SERVICE_NAME}:{TEST_CLUSTER_NAME}.main": {
"desired_state": "start",
"force_bounce": None,
},
f"{TEST_SERVICE_NAME}:{TEST_CLUSTER_NAME}.canary": {
"desired_state": "start",
"force_bounce": None,
},
f"{TEST_SERVICE_NAME}:{TEST_CLUSTER_NAME}.example_child_job": {
"desired_state": "start",
"force_bounce": None,
},
f"{TEST_SERVICE_NAME}:{TEST_CLUSTER_NAME}.sample_batch": {
"desired_state": "start",
"force_bounce": None,
},
f"{TEST_SERVICE_NAME}:{TEST_CLUSTER_NAME}.interactive": {
"desired_state": "start",
"force_bounce": None,
},
},
},
)
def marathon_cluster_config():
"""Return a sample dict to mock paasta_tools.utils.load_service_instance_configs"""
return {
"main": {
"instances": 3,
"deploy_group": "{cluster}.non_canary",
"cpus": 0.1,
"mem": 1000,
},
"canary": {
"instances": 1,
"deploy_group": "{cluster}.canary",
"cpus": 0.1,
"mem": 1000,
},
"not_deployed": {
"instances": 1,
"deploy_group": "not_deployed",
"cpus": 0.1,
"mem": 1000,
},
}
def adhoc_cluster_config():
return {
"sample_batch": {
"deploy_group": "{cluster}.non_canary",
"cpus": 0.1,
"mem": 1000,
"cmd": "/bin/sleep 5s",
},
"interactive": {"deploy_group": "{cluster}.non_canary", "mem": 1000},
"not_deployed": {"deploy_group": "not_deployed"},
}
@patch(
"paasta_tools.paasta_service_config_loader.load_service_instance_configs",
autospec=True,
)
def test_marathon_instances(mock_load_service_instance_configs):
mock_load_service_instance_configs.return_value = marathon_cluster_config()
s = create_test_service()
assert list(s.instances(TEST_CLUSTER_NAME, MarathonServiceConfig)) == [
"main",
"canary",
"not_deployed",
]
mock_load_service_instance_configs.assert_called_once_with(
service=TEST_SERVICE_NAME,
instance_type="marathon",
cluster=TEST_CLUSTER_NAME,
soa_dir=TEST_SOA_DIR,
)
@patch(
"paasta_tools.paasta_service_config_loader.load_v2_deployments_json", autospec=True
)
@patch(
"paasta_tools.paasta_service_config_loader.load_service_instance_configs",
autospec=True,
)
def test_marathon_instances_configs(
mock_load_service_instance_configs, mock_load_deployments_json
):
mock_load_service_instance_configs.return_value = marathon_cluster_config()
mock_load_deployments_json.return_value = deployment_json()
s = create_test_service()
expected = [
MarathonServiceConfig(
service=TEST_SERVICE_NAME,
cluster=TEST_CLUSTER_NAME,
instance="main",
config_dict={
"port": None,
"monitoring": {},
"deploy": {},
"data": {},
"smartstack": {},
"dependencies": {},
"instances": 3,
"deploy_group": f"{TEST_CLUSTER_NAME}.non_canary",
"cpus": 0.1,
"mem": 1000,
},
branch_dict={
"docker_image": "some_image",
"desired_state": "start",
"force_bounce": None,
"git_sha": "some_sha",
},
soa_dir=TEST_SOA_DIR,
),
MarathonServiceConfig(
service=TEST_SERVICE_NAME,
cluster=TEST_CLUSTER_NAME,
instance="canary",
config_dict={
"port": None,
"monitoring": {},
"deploy": {},
"data": {},
"smartstack": {},
"dependencies": {},
"instances": 1,
"deploy_group": f"{TEST_CLUSTER_NAME}.canary",
"cpus": 0.1,
"mem": 1000,
},
branch_dict={
"docker_image": "some_image",
"desired_state": "start",
"force_bounce": None,
"git_sha": "some_sha",
},
soa_dir=TEST_SOA_DIR,
),
]
assert (
list(s.instance_configs(TEST_CLUSTER_NAME, MarathonServiceConfig)) == expected
)
mock_load_service_instance_configs.assert_called_once_with(
service=TEST_SERVICE_NAME,
instance_type="marathon",
cluster=TEST_CLUSTER_NAME,
soa_dir=TEST_SOA_DIR,
)
mock_load_deployments_json.assert_called_once_with(
TEST_SERVICE_NAME, soa_dir=TEST_SOA_DIR
)
@patch(
"paasta_tools.paasta_service_config_loader.load_v2_deployments_json", autospec=True
)
@patch(
"paasta_tools.paasta_service_config_loader.load_service_instance_configs",
autospec=True,
)
def test_adhoc_instances_configs(
mock_load_service_instance_configs, mock_load_deployments_json
):
mock_load_service_instance_configs.return_value = adhoc_cluster_config()
mock_load_deployments_json.return_value = deployment_json()
s = create_test_service()
expected = [
AdhocJobConfig(
service=TEST_SERVICE_NAME,
cluster=TEST_CLUSTER_NAME,
instance="sample_batch",
config_dict={
"port": None,
"monitoring": {},
"deploy": {},
"data": {},
"smartstack": {},
"dependencies": {},
"cmd": "/bin/sleep 5s",
"deploy_group": "cluster.non_canary",
"cpus": 0.1,
"mem": 1000,
},
branch_dict={
"docker_image": "some_image",
"desired_state": "start",
"force_bounce": None,
"git_sha": "some_sha",
},
soa_dir=TEST_SOA_DIR,
),
AdhocJobConfig(
service=TEST_SERVICE_NAME,
cluster=TEST_CLUSTER_NAME,
instance="interactive",
config_dict={
"port": None,
"monitoring": {},
"deploy": {},
"data": {},
"smartstack": {},
"dependencies": {},
"deploy_group": "cluster.non_canary",
"mem": 1000,
},
branch_dict={
"docker_image": "some_image",
"desired_state": "start",
"force_bounce": None,
"git_sha": "some_sha",
},
soa_dir=TEST_SOA_DIR,
),
]
for i in s.instance_configs(TEST_CLUSTER_NAME, AdhocJobConfig):
print(i, i.cluster)
assert list(s.instance_configs(TEST_CLUSTER_NAME, AdhocJobConfig)) == expected
mock_load_service_instance_configs.assert_called_once_with(
service=TEST_SERVICE_NAME,
instance_type="adhoc",
cluster=TEST_CLUSTER_NAME,
soa_dir=TEST_SOA_DIR,
)
mock_load_deployments_json.assert_called_once_with(
TEST_SERVICE_NAME, soa_dir=TEST_SOA_DIR
)
@patch(
"paasta_tools.paasta_service_config_loader.load_v2_deployments_json", autospec=True
)
@patch("paasta_tools.marathon_tools.load_v2_deployments_json", autospec=True)
@patch(
"paasta_tools.paasta_service_config_loader.load_service_instance_configs",
autospec=True,
)
@patch(
"paasta_tools.marathon_tools.load_service_instance_config", autospec=True,
)
def test_old_and_new_ways_load_the_same_marathon_configs(
mock_marathon_tools_load_service_instance_config,
mock_load_service_instance_configs,
mock_marathon_tools_load_deployments_json,
mock_load_deployments_json,
):
mock_load_service_instance_configs.return_value = marathon_cluster_config()
mock_marathon_tools_load_service_instance_config.side_effect = [
marathon_cluster_config().get("main"),
marathon_cluster_config().get("canary"),
]
mock_load_deployments_json.return_value = deployment_json()
mock_marathon_tools_load_deployments_json.return_value = deployment_json()
s = create_test_service()
expected = [
load_marathon_service_config(
service=TEST_SERVICE_NAME,
instance="main",
cluster=TEST_CLUSTER_NAME,
load_deployments=True,
soa_dir=TEST_SOA_DIR,
),
load_marathon_service_config(
service=TEST_SERVICE_NAME,
instance="canary",
cluster=TEST_CLUSTER_NAME,
load_deployments=True,
soa_dir=TEST_SOA_DIR,
),
]
assert (
list(s.instance_configs(TEST_CLUSTER_NAME, MarathonServiceConfig)) == expected
)
@patch(
"paasta_tools.paasta_service_config_loader.load_v2_deployments_json", autospec=True
)
@patch("paasta_tools.adhoc_tools.load_v2_deployments_json", autospec=True)
@patch(
"paasta_tools.paasta_service_config_loader.load_service_instance_configs",
autospec=True,
)
@patch(
"paasta_tools.adhoc_tools.load_service_instance_config", autospec=True,
)
def test_old_and_new_ways_load_the_same_adhoc_configs(
mock_adhoc_tools_load_service_instance_config,
mock_load_service_instance_configs,
mock_adhoc_tools_load_deployments_json,
mock_load_deployments_json,
):
mock_load_service_instance_configs.return_value = adhoc_cluster_config()
mock_adhoc_tools_load_service_instance_config.side_effect = [
adhoc_cluster_config().get("sample_batch"),
adhoc_cluster_config().get("interactive"),
]
mock_load_deployments_json.return_value = deployment_json()
mock_adhoc_tools_load_deployments_json.return_value = deployment_json()
s = create_test_service()
expected = [
load_adhoc_job_config(
service=TEST_SERVICE_NAME,
instance="sample_batch",
cluster=TEST_CLUSTER_NAME,
load_deployments=True,
soa_dir=TEST_SOA_DIR,
),
load_adhoc_job_config(
service=TEST_SERVICE_NAME,
instance="interactive",
cluster=TEST_CLUSTER_NAME,
load_deployments=True,
soa_dir=TEST_SOA_DIR,
),
]
assert list(s.instance_configs(TEST_CLUSTER_NAME, AdhocJobConfig)) == expected
|
import pandas as pd
from scipy.stats import rankdata
from scattertext.Scalers import scale
from scattertext.frequencyreaders.DefaultBackgroundFrequencies import DefaultBackgroundFrequencies
from scattertext.termranking import AbsoluteFrequencyRanker
from scattertext.termscoring.RankDifference import RankDifference
class CharacteristicScorer(object):
def __init__(self,
term_ranker=AbsoluteFrequencyRanker,
background_frequencies=DefaultBackgroundFrequencies,
rerank_ranks=False):
'''
Parameters
----------
term_ranker : TermRanker, default is OncePerDocFrequencyRanker
background_frequencies : BackgroundFrequencies
rerank_ranks : bool, False by default
orders scores from 0 to 1 by their dense rank
'''
self.term_ranker = term_ranker
self.background_frequencies = background_frequencies
self.rerank_ranks = rerank_ranks
def get_scores(self, corpus):
raise Exception()
def _rerank_scores(self, scores):
ranks = rankdata(scores, 'dense')
ranks = ranks / ranks.max()
return ranks, 0.5
class DenseRankCharacteristicness(CharacteristicScorer):
def get_scores(self, corpus):
'''
Parameters
----------
corpus
Returns
-------
float, pd.Series
float: point on x-axis at even characteristicness
pd.Series: term -> value between 0 and 1, sorted by score in a descending manner
Background scores from corpus
'''
term_ranks = self.term_ranker(corpus).get_ranks()
freq_df = pd.DataFrame({
'corpus': term_ranks.sum(axis=1),
'standard': self.background_frequencies.get_background_frequency_df()[
'background'
]
})
freq_df = freq_df.loc[freq_df['corpus'].dropna().index].fillna(0)
corpus_rank = rankdata(freq_df.corpus, 'dense')
standard_rank = rankdata(freq_df.standard, 'dense')
scores = corpus_rank/corpus_rank.max() - standard_rank/standard_rank.max()
if self.rerank_ranks:
rank_scores, zero_marker = self._rerank_scores(scores)
freq_df['score'] = pd.Series(rank_scores, index=freq_df.index)
else:
if scores.min() < 0 and scores.max() > 0:
zero_marker = -scores.min() / (scores.max() - scores.min())
elif scores.min() > 0:
zero_marker = 0
else:
zero_marker = 1
freq_df['score'] = scale(scores)
return zero_marker, freq_df.sort_values(by='score', ascending=False)['score']
|
import numpy as np
def rotate_bbox(bbox, angle, size):
"""Rotate bounding boxes by degrees.
Args:
bbox (~numpy.ndarray): See the table below.
angle (float): Counter clock-wise rotation angle (degree).
image is rotated by 90 degrees.
size (tuple): A tuple of length 2. The height and the width
of the image.
.. csv-table::
:header: name, shape, dtype, format
:obj:`bbox`, ":math:`(R, 4)`", :obj:`float32`, \
":math:`(y_{min}, x_{min}, y_{max}, x_{max})`"
Returns:
~numpy.ndarray:
Bounding boxes rescaled according to the given :obj:`k`.
"""
if angle % 90 != 0:
raise ValueError(
'angle which satisfies angle % 90 == 0 is only supported: {}'
.format(angle))
H, W = size
if angle % 360 == 0:
return bbox
if angle % 360 == 90:
rotated_bbox = np.concatenate(
(W - bbox[:, 3:4], bbox[:, 0:1],
W - bbox[:, 1:2], bbox[:, 2:3]), axis=1)
elif angle % 360 == 180:
rotated_bbox = np.concatenate(
(H - bbox[:, 2:3], W - bbox[:, 3:4],
H - bbox[:, 0:1], W - bbox[:, 1:2]), axis=1)
elif angle % 360 == 270:
rotated_bbox = np.concatenate(
(bbox[:, 1:2], H - bbox[:, 2:3],
bbox[:, 3:4], H - bbox[:, 0:1]), axis=1)
rotated_bbox = rotated_bbox.astype(bbox.dtype)
return rotated_bbox
|
import sys
import tempfile
import itertools
import textwrap
import unittest.mock
import types
import mimetypes
import os.path
import attr
import pytest
import py.path # pylint: disable=no-name-in-module
from PyQt5.QtCore import QSize, Qt
from PyQt5.QtWidgets import QWidget, QHBoxLayout, QVBoxLayout
from PyQt5.QtNetwork import QNetworkCookieJar
import helpers.stubs as stubsmod
from qutebrowser.config import (config, configdata, configtypes, configexc,
configfiles, configcache, stylesheet)
from qutebrowser.api import config as configapi
from qutebrowser.utils import objreg, standarddir, utils, usertypes
from qutebrowser.browser import greasemonkey, history, qutescheme
from qutebrowser.browser.webkit import cookies, cache
from qutebrowser.misc import savemanager, sql, objects, sessions
from qutebrowser.keyinput import modeman
from qutebrowser.qt import sip
_qute_scheme_handler = None
class WidgetContainer(QWidget):
"""Container for another widget."""
def __init__(self, qtbot, parent=None):
super().__init__(parent)
self._qtbot = qtbot
self.vbox = QVBoxLayout(self)
qtbot.add_widget(self)
self._widget = None
def set_widget(self, widget):
self.vbox.addWidget(widget)
widget.container = self
self._widget = widget
def expose(self):
with self._qtbot.waitExposed(self):
self.show()
self._widget.setFocus()
@pytest.fixture
def widget_container(qtbot):
return WidgetContainer(qtbot)
class WinRegistryHelper:
"""Helper class for win_registry."""
@attr.s
class FakeWindow:
"""A fake window object for the registry."""
registry = attr.ib()
def windowTitle(self):
return 'window title - qutebrowser'
@property
def tabbed_browser(self):
return self.registry['tabbed-browser']
def __init__(self):
self._ids = []
def add_window(self, win_id):
assert win_id not in objreg.window_registry
registry = objreg.ObjectRegistry()
window = self.FakeWindow(registry)
objreg.window_registry[win_id] = window
self._ids.append(win_id)
def cleanup(self):
for win_id in self._ids:
del objreg.window_registry[win_id]
class FakeStatusBar(QWidget):
"""Fake statusbar to test progressbar sizing."""
def __init__(self, parent=None):
super().__init__(parent)
self.hbox = QHBoxLayout(self)
self.hbox.addStretch()
self.hbox.setContentsMargins(0, 0, 0, 0)
self.setAttribute(Qt.WA_StyledBackground, True)
self.setStyleSheet('background-color: red;')
def minimumSizeHint(self):
return QSize(1, self.fontMetrics().height())
@pytest.fixture
def fake_statusbar(widget_container):
"""Fixture providing a statusbar in a container window."""
widget_container.vbox.addStretch()
statusbar = FakeStatusBar(widget_container)
widget_container.set_widget(statusbar)
return statusbar
@pytest.fixture
def win_registry():
"""Fixture providing a window registry for win_id 0 and 1."""
helper = WinRegistryHelper()
helper.add_window(0)
yield helper
helper.cleanup()
@pytest.fixture
def tab_registry(win_registry):
"""Fixture providing a tab registry for win_id 0."""
registry = objreg.ObjectRegistry()
objreg.register('tab-registry', registry, scope='window', window=0)
yield registry
objreg.delete('tab-registry', scope='window', window=0)
@pytest.fixture
def fake_web_tab(stubs, tab_registry, mode_manager, qapp):
"""Fixture providing the FakeWebTab *class*."""
return stubs.FakeWebTab
@pytest.fixture
def greasemonkey_manager(monkeypatch, data_tmpdir):
gm_manager = greasemonkey.GreasemonkeyManager()
monkeypatch.setattr(greasemonkey, 'gm_manager', gm_manager)
@pytest.fixture(scope='session')
def testdata_scheme(qapp):
try:
global _qute_scheme_handler
from qutebrowser.browser.webengine import webenginequtescheme
from PyQt5.QtWebEngineWidgets import QWebEngineProfile
webenginequtescheme.init()
_qute_scheme_handler = webenginequtescheme.QuteSchemeHandler(
parent=qapp)
_qute_scheme_handler.install(QWebEngineProfile.defaultProfile())
except ImportError:
pass
@qutescheme.add_handler('testdata')
def handler(url): # pylint: disable=unused-variable
file_abs = os.path.abspath(os.path.dirname(__file__))
filename = os.path.join(file_abs, os.pardir, 'end2end',
url.path().lstrip('/'))
with open(filename, 'rb') as f:
data = f.read()
mimetype, _encoding = mimetypes.guess_type(filename)
return mimetype, data
@pytest.fixture
def web_tab_setup(qtbot, tab_registry, session_manager_stub,
greasemonkey_manager, fake_args, config_stub,
testdata_scheme):
"""Shared setup for webkit_tab/webengine_tab."""
# Make sure error logging via JS fails tests
config_stub.val.content.javascript.log = {
'info': 'info',
'error': 'error',
'unknown': 'error',
'warning': 'error',
}
@pytest.fixture
def webkit_tab(web_tab_setup, qtbot, cookiejar_and_cache, mode_manager,
widget_container, download_stub, webpage, monkeypatch):
webkittab = pytest.importorskip('qutebrowser.browser.webkit.webkittab')
monkeypatch.setattr(objects, 'backend', usertypes.Backend.QtWebKit)
tab = webkittab.WebKitTab(win_id=0, mode_manager=mode_manager,
private=False)
tab.backend = usertypes.Backend.QtWebKit
widget_container.set_widget(tab)
yield tab
# Make sure the tab shuts itself down properly
tab.private_api.shutdown()
@pytest.fixture
def webengine_tab(web_tab_setup, qtbot, redirect_webengine_data,
tabbed_browser_stubs, mode_manager, widget_container,
monkeypatch):
monkeypatch.setattr(objects, 'backend', usertypes.Backend.QtWebEngine)
tabwidget = tabbed_browser_stubs[0].widget
tabwidget.current_index = 0
tabwidget.index_of = 0
webenginetab = pytest.importorskip(
'qutebrowser.browser.webengine.webenginetab')
tab = webenginetab.WebEngineTab(win_id=0, mode_manager=mode_manager,
private=False)
tab.backend = usertypes.Backend.QtWebEngine
widget_container.set_widget(tab)
yield tab
# If a page is still loading here, _on_load_finished could get called
# during teardown when session_manager_stub is already deleted.
tab.stop()
# Make sure the tab shuts itself down properly
tab.private_api.shutdown()
# If we wait for the GC to clean things up, there's a segfault inside
# QtWebEngine sometimes (e.g. if we only run
# tests/unit/browser/test_caret.py).
sip.delete(tab._widget)
@pytest.fixture(params=['webkit', 'webengine'])
def web_tab(request):
"""A WebKitTab/WebEngineTab."""
if request.param == 'webkit':
pytest.importorskip('qutebrowser.browser.webkit.webkittab')
return request.getfixturevalue('webkit_tab')
elif request.param == 'webengine':
pytest.importorskip('qutebrowser.browser.webengine.webenginetab')
return request.getfixturevalue('webengine_tab')
else:
raise utils.Unreachable
def _generate_cmdline_tests():
"""Generate testcases for test_split_binding."""
@attr.s
class TestCase:
cmd = attr.ib()
valid = attr.ib()
separators = [';;', ' ;; ', ';; ', ' ;;']
invalid = ['foo', '']
valid = ['leave-mode', 'hint all']
# Valid command only -> valid
for item in valid:
yield TestCase(''.join(item), True)
# Invalid command only -> invalid
for item in invalid:
yield TestCase(''.join(item), False)
# Invalid command combined with invalid command -> invalid
for item in itertools.product(invalid, separators, invalid):
yield TestCase(''.join(item), False)
# Valid command combined with valid command -> valid
for item in itertools.product(valid, separators, valid):
yield TestCase(''.join(item), True)
# Valid command combined with invalid command -> invalid
for item in itertools.product(valid, separators, invalid):
yield TestCase(''.join(item), False)
# Invalid command combined with valid command -> invalid
for item in itertools.product(invalid, separators, valid):
yield TestCase(''.join(item), False)
# Command with no_cmd_split combined with an "invalid" command -> valid
for item in itertools.product(['bind x open'], separators, invalid):
yield TestCase(''.join(item), True)
# Partial command
yield TestCase('message-i', False)
@pytest.fixture(params=_generate_cmdline_tests(), ids=lambda e: e.cmd)
def cmdline_test(request):
"""Fixture which generates tests for things validating commandlines."""
return request.param
@pytest.fixture(scope='session')
def configdata_init():
"""Initialize configdata if needed."""
if configdata.DATA is None:
configdata.init()
@pytest.fixture
def yaml_config_stub(config_tmpdir):
"""Fixture which provides a YamlConfig object."""
return configfiles.YamlConfig()
@pytest.fixture
def config_stub(stubs, monkeypatch, configdata_init, yaml_config_stub, qapp):
"""Fixture which provides a fake config object."""
conf = config.Config(yaml_config=yaml_config_stub)
monkeypatch.setattr(config, 'instance', conf)
container = config.ConfigContainer(conf)
monkeypatch.setattr(config, 'val', container)
monkeypatch.setattr(configapi, 'val', container)
cache = configcache.ConfigCache()
monkeypatch.setattr(config, 'cache', cache)
try:
configtypes.FontBase.set_defaults(None, '10pt')
except configexc.NoOptionError:
# Completion tests patch configdata so fonts.default_family is
# unavailable.
pass
conf.val = container # For easier use in tests
stylesheet.init()
return conf
@pytest.fixture
def key_config_stub(config_stub, monkeypatch):
"""Fixture which provides a fake key config object."""
keyconf = config.KeyConfig(config_stub)
monkeypatch.setattr(config, 'key_instance', keyconf)
return keyconf
@pytest.fixture
def quickmark_manager_stub(stubs):
"""Fixture which provides a fake quickmark manager object."""
stub = stubs.QuickmarkManagerStub()
objreg.register('quickmark-manager', stub)
yield stub
objreg.delete('quickmark-manager')
@pytest.fixture
def bookmark_manager_stub(stubs):
"""Fixture which provides a fake bookmark manager object."""
stub = stubs.BookmarkManagerStub()
objreg.register('bookmark-manager', stub)
yield stub
objreg.delete('bookmark-manager')
@pytest.fixture
def session_manager_stub(stubs, monkeypatch):
"""Fixture which provides a fake session-manager object."""
stub = stubs.SessionManagerStub()
monkeypatch.setattr(sessions, 'session_manager', stub)
return stub
@pytest.fixture
def tabbed_browser_stubs(qapp, stubs, win_registry):
"""Fixture providing a fake tabbed-browser object on win_id 0 and 1."""
win_registry.add_window(1)
stubs = [stubs.TabbedBrowserStub(), stubs.TabbedBrowserStub()]
objreg.register('tabbed-browser', stubs[0], scope='window', window=0)
objreg.register('tabbed-browser', stubs[1], scope='window', window=1)
yield stubs
objreg.delete('tabbed-browser', scope='window', window=0)
objreg.delete('tabbed-browser', scope='window', window=1)
@pytest.fixture
def status_command_stub(stubs, qtbot, win_registry):
"""Fixture which provides a fake status-command object."""
cmd = stubs.StatusBarCommandStub()
objreg.register('status-command', cmd, scope='window', window=0)
qtbot.addWidget(cmd)
yield cmd
objreg.delete('status-command', scope='window', window=0)
@pytest.fixture(scope='session')
def stubs():
"""Provide access to stub objects useful for testing."""
return stubsmod
@pytest.fixture(scope='session')
def unicode_encode_err():
"""Provide a fake UnicodeEncodeError exception."""
return UnicodeEncodeError('ascii', # codec
'', # object
0, # start
2, # end
'fake exception') # reason
@pytest.fixture(scope='session')
def qnam(qapp):
"""Session-wide QNetworkAccessManager."""
from PyQt5.QtNetwork import QNetworkAccessManager
nam = QNetworkAccessManager()
nam.setNetworkAccessible(QNetworkAccessManager.NotAccessible)
return nam
@pytest.fixture
def webengineview(qtbot, monkeypatch, web_tab_setup):
"""Get a QWebEngineView if QtWebEngine is available."""
QtWebEngineWidgets = pytest.importorskip('PyQt5.QtWebEngineWidgets')
monkeypatch.setattr(objects, 'backend', usertypes.Backend.QtWebEngine)
view = QtWebEngineWidgets.QWebEngineView()
qtbot.add_widget(view)
return view
@pytest.fixture
def webpage(qnam, monkeypatch):
"""Get a new QWebPage object."""
QtWebKitWidgets = pytest.importorskip('PyQt5.QtWebKitWidgets')
monkeypatch.setattr(objects, 'backend', usertypes.Backend.QtWebKit)
class WebPageStub(QtWebKitWidgets.QWebPage):
"""QWebPage with default error pages disabled."""
def supportsExtension(self, _ext):
"""No extensions needed."""
return False
page = WebPageStub()
page.networkAccessManager().deleteLater()
page.setNetworkAccessManager(qnam)
from qutebrowser.browser.webkit import webkitsettings
webkitsettings._init_user_agent()
return page
@pytest.fixture
def webview(qtbot, webpage):
"""Get a new QWebView object."""
QtWebKitWidgets = pytest.importorskip('PyQt5.QtWebKitWidgets')
view = QtWebKitWidgets.QWebView()
qtbot.add_widget(view)
view.page().deleteLater()
view.setPage(webpage)
view.resize(640, 480)
return view
@pytest.fixture
def webframe(webpage):
"""Convenience fixture to get a mainFrame of a QWebPage."""
return webpage.mainFrame()
@pytest.fixture
def cookiejar_and_cache(stubs, monkeypatch):
"""Fixture providing a fake cookie jar and cache."""
monkeypatch.setattr(cookies, 'cookie_jar', QNetworkCookieJar())
monkeypatch.setattr(cookies, 'ram_cookie_jar', cookies.RAMCookieJar())
monkeypatch.setattr(cache, 'diskcache', stubs.FakeNetworkCache())
@pytest.fixture
def py_proc():
"""Get a python executable and args list which executes the given code."""
if getattr(sys, 'frozen', False):
pytest.skip("Can't be run when frozen")
def func(code):
return (sys.executable, ['-c', textwrap.dedent(code.strip('\n'))])
return func
@pytest.fixture
def fake_save_manager():
"""Create a mock of save-manager and register it into objreg."""
fake_save_manager = unittest.mock.Mock(spec=savemanager.SaveManager)
objreg.register('save-manager', fake_save_manager)
yield fake_save_manager
objreg.delete('save-manager')
@pytest.fixture
def fake_args(request, monkeypatch):
ns = types.SimpleNamespace()
ns.backend = 'webengine' if request.config.webengine else 'webkit'
ns.debug_flags = []
monkeypatch.setattr(objects, 'args', ns)
return ns
@pytest.fixture
def mode_manager(win_registry, config_stub, key_config_stub, qapp):
mm = modeman.init(win_id=0, parent=qapp)
yield mm
objreg.delete('mode-manager', scope='window', window=0)
def standarddir_tmpdir(folder, monkeypatch, tmpdir):
"""Set tmpdir/config as the configdir.
Use this to avoid creating a 'real' config dir (~/.config/qute_test).
"""
confdir = tmpdir / folder
confdir.ensure(dir=True)
if hasattr(standarddir, folder):
monkeypatch.setattr(standarddir, folder,
lambda **_kwargs: str(confdir))
return confdir
@pytest.fixture
def download_tmpdir(monkeypatch, tmpdir):
"""Set tmpdir/download as the downloaddir.
Use this to avoid creating a 'real' download dir (~/.config/qute_test).
"""
return standarddir_tmpdir('download', monkeypatch, tmpdir)
@pytest.fixture
def config_tmpdir(monkeypatch, tmpdir):
"""Set tmpdir/config as the configdir.
Use this to avoid creating a 'real' config dir (~/.config/qute_test).
"""
monkeypatch.setattr(
standarddir, 'config_py',
lambda **_kwargs: str(tmpdir / 'config' / 'config.py'))
return standarddir_tmpdir('config', monkeypatch, tmpdir)
@pytest.fixture
def config_py_arg(tmpdir, monkeypatch):
"""Set the config_py arg with a custom value for init."""
f = tmpdir / 'temp_config.py'
monkeypatch.setattr(
standarddir, 'config_py',
lambda **_kwargs: str(f))
return f
@pytest.fixture
def data_tmpdir(monkeypatch, tmpdir):
"""Set tmpdir/data as the datadir.
Use this to avoid creating a 'real' data dir (~/.local/share/qute_test).
"""
return standarddir_tmpdir('data', monkeypatch, tmpdir)
@pytest.fixture
def runtime_tmpdir(monkeypatch, tmpdir):
"""Set tmpdir/runtime as the runtime dir.
Use this to avoid creating a 'real' runtime dir.
"""
return standarddir_tmpdir('runtime', monkeypatch, tmpdir)
@pytest.fixture
def cache_tmpdir(monkeypatch, tmpdir):
"""Set tmpdir/cache as the cachedir.
Use this to avoid creating a 'real' cache dir (~/.cache/qute_test).
"""
return standarddir_tmpdir('cache', monkeypatch, tmpdir)
@pytest.fixture
def redirect_webengine_data(data_tmpdir, monkeypatch):
"""Set XDG_DATA_HOME and HOME to a temp location.
While data_tmpdir covers most cases by redirecting standarddir.data(), this
is not enough for places QtWebEngine references the data dir internally.
For these, we need to set the environment variable to redirect data access.
We also set HOME as in some places, the home directory is used directly...
"""
monkeypatch.setenv('XDG_DATA_HOME', str(data_tmpdir))
monkeypatch.setenv('HOME', str(data_tmpdir))
@pytest.fixture()
def short_tmpdir():
"""A short temporary directory for a XDG_RUNTIME_DIR."""
with tempfile.TemporaryDirectory() as tdir:
yield py.path.local(tdir) # pylint: disable=no-member
@pytest.fixture
def init_sql(data_tmpdir):
"""Initialize the SQL module, and shut it down after the test."""
path = str(data_tmpdir / 'test.db')
sql.init(path)
yield
sql.close()
class ModelValidator:
"""Validates completion models."""
def __init__(self, modeltester):
self._model = None
self._modeltester = modeltester
def set_model(self, model):
self._model = model
self._modeltester.check(model)
def validate(self, expected):
assert self._model.rowCount() == len(expected)
for row, items in enumerate(expected):
for col, item in enumerate(items):
assert self._model.data(self._model.index(row, col)) == item
@pytest.fixture
def model_validator(qtmodeltester):
return ModelValidator(qtmodeltester)
@pytest.fixture
def download_stub(win_registry, tmpdir, stubs):
"""Register a FakeDownloadManager."""
stub = stubs.FakeDownloadManager(tmpdir)
objreg.register('qtnetwork-download-manager', stub)
yield stub
objreg.delete('qtnetwork-download-manager')
@pytest.fixture
def web_history(fake_save_manager, tmpdir, init_sql, config_stub, stubs,
monkeypatch):
"""Create a WebHistory object."""
config_stub.val.completion.timestamp_format = '%Y-%m-%d'
config_stub.val.completion.web_history.max_items = -1
web_history = history.WebHistory(stubs.FakeHistoryProgress())
monkeypatch.setattr(history, 'web_history', web_history)
return web_history
@pytest.fixture
def blue_widget(qtbot):
widget = QWidget()
widget.setStyleSheet('background-color: blue;')
qtbot.add_widget(widget)
return widget
@pytest.fixture
def red_widget(qtbot):
widget = QWidget()
widget.setStyleSheet('background-color: red;')
qtbot.add_widget(widget)
return widget
@pytest.fixture
def state_config(data_tmpdir, monkeypatch):
state = configfiles.StateConfig()
monkeypatch.setattr(configfiles, 'state', state)
return state
@pytest.fixture
def unwritable_tmp_path(tmp_path):
tmp_path.chmod(0)
if os.access(str(tmp_path), os.W_OK):
# Docker container or similar
pytest.skip("Directory was still writable")
yield tmp_path
# Make sure pytest can clean up the tmp_path
tmp_path.chmod(0o755)
|
import asyncio
from datetime import timedelta
import json
import logging
import aiohttp
import async_timeout
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CONF_NAME, CONF_OFFSET
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
_RESOURCE = "https://hourlypricing.comed.com/api"
SCAN_INTERVAL = timedelta(minutes=5)
ATTRIBUTION = "Data provided by ComEd Hourly Pricing service"
CONF_CURRENT_HOUR_AVERAGE = "current_hour_average"
CONF_FIVE_MINUTE = "five_minute"
CONF_MONITORED_FEEDS = "monitored_feeds"
CONF_SENSOR_TYPE = "type"
SENSOR_TYPES = {
CONF_FIVE_MINUTE: ["ComEd 5 Minute Price", "c"],
CONF_CURRENT_HOUR_AVERAGE: ["ComEd Current Hour Average Price", "c"],
}
TYPES_SCHEMA = vol.In(SENSOR_TYPES)
SENSORS_SCHEMA = vol.Schema(
{
vol.Required(CONF_SENSOR_TYPE): TYPES_SCHEMA,
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_OFFSET, default=0.0): vol.Coerce(float),
}
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_MONITORED_FEEDS): [SENSORS_SCHEMA]}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the ComEd Hourly Pricing sensor."""
websession = async_get_clientsession(hass)
dev = []
for variable in config[CONF_MONITORED_FEEDS]:
dev.append(
ComedHourlyPricingSensor(
hass.loop,
websession,
variable[CONF_SENSOR_TYPE],
variable[CONF_OFFSET],
variable.get(CONF_NAME),
)
)
async_add_entities(dev, True)
class ComedHourlyPricingSensor(Entity):
"""Implementation of a ComEd Hourly Pricing sensor."""
def __init__(self, loop, websession, sensor_type, offset, name):
"""Initialize the sensor."""
self.loop = loop
self.websession = websession
if name:
self._name = name
else:
self._name = SENSOR_TYPES[sensor_type][0]
self.type = sensor_type
self.offset = offset
self._state = None
self._unit_of_measurement = SENSOR_TYPES[sensor_type][1]
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {ATTR_ATTRIBUTION: ATTRIBUTION}
async def async_update(self):
"""Get the ComEd Hourly Pricing data from the web service."""
try:
if self.type == CONF_FIVE_MINUTE or self.type == CONF_CURRENT_HOUR_AVERAGE:
url_string = _RESOURCE
if self.type == CONF_FIVE_MINUTE:
url_string += "?type=5minutefeed"
else:
url_string += "?type=currenthouraverage"
with async_timeout.timeout(60):
response = await self.websession.get(url_string)
# The API responds with MIME type 'text/html'
text = await response.text()
data = json.loads(text)
self._state = round(float(data[0]["price"]) + self.offset, 2)
else:
self._state = None
except (asyncio.TimeoutError, aiohttp.ClientError) as err:
_LOGGER.error("Could not get data from ComEd API: %s", err)
except (ValueError, KeyError):
_LOGGER.warning("Could not update status for %s", self.name)
|
import contextlib
import socket
import threading
import queue
import webbrowser
import six
import pkg_resources
class UserAgentRequestHandler(six.moves.BaseHTTPServer.BaseHTTPRequestHandler):
def do_GET(self):
"""Serve a GET request."""
self.do_HEAD()
template = pkg_resources.resource_string(__name__, "static/splash.html")
page = template.decode('utf-8').format(self.headers.get("User-Agent"), self.cache)
self.wfile.write(page.encode('utf-8'))
def do_HEAD(self):
"""Serve a HEAD request."""
self.queue.put(self.headers.get("User-Agent"))
self.send_response(six.moves.BaseHTTPServer.HTTPStatus.OK)
self.send_header("Location", self.path)
self.end_headers()
def log_message(self, format, *args):
pass # silence the server
def get_free_port():
with contextlib.closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
s.bind(('', 0))
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
return s.getsockname()[1]
def get_user_agent(port=None, cache=None):
# Setup thread-local request handler
UserAgentRequestHandler.queue = queue.Queue()
UserAgentRequestHandler.cache = cache
# Lock the request handler lock to wait for user agent to be processed.
# Use the given port or get a free one and create the HTTP server
port = port or get_free_port()
server = six.moves.BaseHTTPServer.HTTPServer(
("localhost", port),
UserAgentRequestHandler,
)
# Launch the server thread in the background
server_thread = threading.Thread(target=server.serve_forever)
server_thread.start()
# Use webbrowser to connect to the server with the default browser
webbrowser.open("http://localhost:{}/".format(port))
# Wait for the request handler to get the request from the browser
user_agent = UserAgentRequestHandler.queue.get()
# Close the server
server.shutdown()
server.server_close()
# Return the obtained user agent
return user_agent
if __name__ == "__main__":
print(get_user_agent())
|
def get_context_first_matching_object(context, context_lookups):
"""
Return the first object found in the context,
from a list of keys, with the matching key.
"""
for key in context_lookups:
context_object = context.get(key)
if context_object:
return key, context_object
return None, None
def get_context_first_object(context, context_lookups):
"""
Return the first object found in the context,
from a list of keys.
"""
return get_context_first_matching_object(
context, context_lookups)[1]
def get_context_loop_positions(context):
"""
Return the paginated current position within a loop,
and the non-paginated position.
"""
try:
loop_counter = context['forloop']['counter']
except KeyError:
return 0, 0
try:
page = context['page_obj']
except KeyError:
return loop_counter, loop_counter
total_loop_counter = ((page.number - 1) * page.paginator.per_page +
loop_counter)
return total_loop_counter, loop_counter
|
import cherrypy
from cherrypy import _json as json
from cherrypy._cpcompat import text_or_bytes, ntou
def json_processor(entity):
"""Read application/json data into request.json."""
if not entity.headers.get(ntou('Content-Length'), ntou('')):
raise cherrypy.HTTPError(411)
body = entity.fp.read()
with cherrypy.HTTPError.handle(ValueError, 400, 'Invalid JSON document'):
cherrypy.serving.request.json = json.decode(body.decode('utf-8'))
def json_in(content_type=[ntou('application/json'), ntou('text/javascript')],
force=True, debug=False, processor=json_processor):
"""Add a processor to parse JSON request entities:
The default processor places the parsed data into request.json.
Incoming request entities which match the given content_type(s) will
be deserialized from JSON to the Python equivalent, and the result
stored at cherrypy.request.json. The 'content_type' argument may
be a Content-Type string or a list of allowable Content-Type strings.
If the 'force' argument is True (the default), then entities of other
content types will not be allowed; "415 Unsupported Media Type" is
raised instead.
Supply your own processor to use a custom decoder, or to handle the parsed
data differently. The processor can be configured via
tools.json_in.processor or via the decorator method.
Note that the deserializer requires the client send a Content-Length
request header, or it will raise "411 Length Required". If for any
other reason the request entity cannot be deserialized from JSON,
it will raise "400 Bad Request: Invalid JSON document".
"""
request = cherrypy.serving.request
if isinstance(content_type, text_or_bytes):
content_type = [content_type]
if force:
if debug:
cherrypy.log('Removing body processors %s' %
repr(request.body.processors.keys()), 'TOOLS.JSON_IN')
request.body.processors.clear()
request.body.default_proc = cherrypy.HTTPError(
415, 'Expected an entity of content type %s' %
', '.join(content_type))
for ct in content_type:
if debug:
cherrypy.log('Adding body processor for %s' % ct, 'TOOLS.JSON_IN')
request.body.processors[ct] = processor
def json_handler(*args, **kwargs):
value = cherrypy.serving.request._json_inner_handler(*args, **kwargs)
return json.encode(value)
def json_out(content_type='application/json', debug=False,
handler=json_handler):
"""Wrap request.handler to serialize its output to JSON. Sets Content-Type.
If the given content_type is None, the Content-Type response header
is not set.
Provide your own handler to use a custom encoder. For example
cherrypy.config['tools.json_out.handler'] = <function>, or
@json_out(handler=function).
"""
request = cherrypy.serving.request
# request.handler may be set to None by e.g. the caching tool
# to signal to all components that a response body has already
# been attached, in which case we don't need to wrap anything.
if request.handler is None:
return
if debug:
cherrypy.log('Replacing %s with JSON handler' % request.handler,
'TOOLS.JSON_OUT')
request._json_inner_handler = request.handler
request.handler = handler
if content_type is not None:
if debug:
cherrypy.log('Setting Content-Type to %s' %
content_type, 'TOOLS.JSON_OUT')
cherrypy.serving.response.headers['Content-Type'] = content_type
|
import pytest
from arctic.tickstore import tickstore
from arctic.tickstore import toplevel
def pytest_generate_tests(metafunc):
if 'tickstore_lib' in metafunc.fixturenames:
metafunc.parametrize("tickstore_lib", ['tickstore'], indirect=True)
@pytest.fixture(scope='function')
def tickstore_lib(arctic, request):
if request.param == "tickstore":
store = tickstore
arctic.initialize_library('test.tickstore', store.TICK_STORE_TYPE)
return arctic['test.tickstore']
@pytest.fixture(scope='function')
def toplevel_tickstore(arctic):
arctic.initialize_library('test.toplevel_tickstore', toplevel.TICK_STORE_TYPE)
return arctic['test.toplevel_tickstore']
|
from __future__ import absolute_import, unicode_literals
import os
import pytest
import kaptan
from tmuxp import config
from .fixtures import config_teamocil as fixtures
TMUXP_DIR = os.path.join(os.path.dirname(__file__), '.tmuxp')
@pytest.mark.parametrize(
"teamocil_yaml,teamocil_dict,tmuxp_dict",
[
(
fixtures.test1.teamocil_yaml,
fixtures.test1.teamocil_conf,
fixtures.test1.expected,
),
(
fixtures.test2.teamocil_yaml,
fixtures.test2.teamocil_dict,
fixtures.test2.expected,
),
(
fixtures.test3.teamocil_yaml,
fixtures.test3.teamocil_dict,
fixtures.test3.expected,
),
(
fixtures.test4.teamocil_yaml,
fixtures.test4.teamocil_dict,
fixtures.test4.expected,
),
],
)
def test_config_to_dict(teamocil_yaml, teamocil_dict, tmuxp_dict):
configparser = kaptan.Kaptan(handler='yaml')
test_config = configparser.import_config(teamocil_yaml)
yaml_to_dict = test_config.get()
assert yaml_to_dict == teamocil_dict
assert config.import_teamocil(teamocil_dict) == tmuxp_dict
config.validate_schema(config.import_teamocil(teamocil_dict))
@pytest.fixture(scope='module')
def multisession_config():
"""Return loaded multisession teamocil config as a dictionary.
Also prevents re-running assertion the loads the yaml, since ordering of
deep list items like panes will be inconsistent."""
teamocil_yaml = fixtures.layouts.teamocil_yaml
configparser = kaptan.Kaptan(handler='yaml')
test_config = configparser.import_config(teamocil_yaml)
teamocil_dict = fixtures.layouts.teamocil_dict
assert test_config.get() == teamocil_dict
return teamocil_dict
@pytest.mark.parametrize(
"session_name,expected",
[
('two-windows', fixtures.layouts.two_windows),
('two-windows-with-filters', fixtures.layouts.two_windows_with_filters),
(
'two-windows-with-custom-command-options',
fixtures.layouts.two_windows_with_custom_command_options,
),
(
'three-windows-within-a-session',
fixtures.layouts.three_windows_within_a_session,
),
],
)
def test_multisession_config(session_name, expected, multisession_config):
# teamocil can fit multiple sessions in a config
assert config.import_teamocil(multisession_config[session_name]) == expected
config.validate_schema(config.import_teamocil(multisession_config[session_name]))
|
import pickle
from mne.utils import BunchConstNamed
from mne.utils._bunch import NamedInt, NamedFloat
def test_pickle():
"""Test if BunchConstNamed object can be pickled."""
b1 = BunchConstNamed()
b1.x = 1
b1.y = 2.12
assert isinstance(b1.x, int)
assert isinstance(b1.x, NamedInt)
assert repr(b1.x) == '1 (x)'
assert isinstance(b1.y, float)
assert isinstance(b1.y, NamedFloat)
assert repr(b1.y) == '2.12 (y)'
b2 = pickle.loads(pickle.dumps(b1))
assert b1 == b2
|
import logging
from miio import DeviceException, gateway
from homeassistant.helpers.entity import Entity
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
class ConnectXiaomiGateway:
"""Class to async connect to a Xiaomi Gateway."""
def __init__(self, hass):
"""Initialize the entity."""
self._hass = hass
self._gateway_device = None
self._gateway_info = None
@property
def gateway_device(self):
"""Return the class containing all connections to the gateway."""
return self._gateway_device
@property
def gateway_info(self):
"""Return the class containing gateway info."""
return self._gateway_info
async def async_connect_gateway(self, host, token):
"""Connect to the Xiaomi Gateway."""
_LOGGER.debug("Initializing with host %s (token %s...)", host, token[:5])
try:
self._gateway_device = gateway.Gateway(host, token)
# get the gateway info
self._gateway_info = await self._hass.async_add_executor_job(
self._gateway_device.info
)
# get the connected sub devices
await self._hass.async_add_executor_job(
self._gateway_device.discover_devices
)
except DeviceException:
_LOGGER.error(
"DeviceException during setup of xiaomi gateway with host %s", host
)
return False
_LOGGER.debug(
"%s %s %s detected",
self._gateway_info.model,
self._gateway_info.firmware_version,
self._gateway_info.hardware_version,
)
return True
class XiaomiGatewayDevice(Entity):
"""Representation of a base Xiaomi Gateway Device."""
def __init__(self, sub_device, entry):
"""Initialize the Xiaomi Gateway Device."""
self._sub_device = sub_device
self._entry = entry
self._unique_id = sub_device.sid
self._name = f"{sub_device.name} ({sub_device.sid})"
self._available = False
@property
def unique_id(self):
"""Return an unique ID."""
return self._unique_id
@property
def name(self):
"""Return the name of this entity, if any."""
return self._name
@property
def device_info(self):
"""Return the device info of the gateway."""
return {
"identifiers": {(DOMAIN, self._sub_device.sid)},
"via_device": (DOMAIN, self._entry.unique_id),
"manufacturer": "Xiaomi",
"name": self._sub_device.name,
"model": self._sub_device.model,
"sw_version": self._sub_device.firmware_version,
}
@property
def available(self):
"""Return true when state is known."""
return self._available
async def async_update(self):
"""Fetch state from the sub device."""
try:
await self.hass.async_add_executor_job(self._sub_device.update)
self._available = True
except gateway.GatewayException as ex:
if self._available:
self._available = False
_LOGGER.error("Got exception while fetching the state: %s", ex)
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN
class AdvantageAirEntity(CoordinatorEntity):
"""Parent class for Advantage Air Entities."""
def __init__(self, instance, ac_key, zone_key=None):
"""Initialize common aspects of an Advantage Air sensor."""
super().__init__(instance["coordinator"])
self.async_change = instance["async_change"]
self.ac_key = ac_key
self.zone_key = zone_key
@property
def _ac(self):
return self.coordinator.data["aircons"][self.ac_key]["info"]
@property
def _zone(self):
return self.coordinator.data["aircons"][self.ac_key]["zones"][self.zone_key]
@property
def device_info(self):
"""Return parent device information."""
return {
"identifiers": {(DOMAIN, self.coordinator.data["system"]["rid"])},
"name": self.coordinator.data["system"]["name"],
"manufacturer": "Advantage Air",
"model": self.coordinator.data["system"]["sysType"],
"sw_version": self.coordinator.data["system"]["myAppRev"],
}
|
from asyncio import gather
from typing import Any, Optional
from async_timeout import timeout
from python_awair import Awair
from python_awair.exceptions import AuthError
from homeassistant.const import CONF_ACCESS_TOKEN
from homeassistant.core import Config, HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import API_TIMEOUT, DOMAIN, LOGGER, UPDATE_INTERVAL, AwairResult
PLATFORMS = ["sensor"]
async def async_setup(hass: HomeAssistant, config: Config) -> bool:
"""Set up Awair integration."""
return True
async def async_setup_entry(hass, config_entry) -> bool:
"""Set up Awair integration from a config entry."""
session = async_get_clientsession(hass)
coordinator = AwairDataUpdateCoordinator(hass, config_entry, session)
await coordinator.async_refresh()
if not coordinator.last_update_success:
raise ConfigEntryNotReady
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN][config_entry.entry_id] = coordinator
for platform in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, platform)
)
return True
async def async_unload_entry(hass, config_entry) -> bool:
"""Unload Awair configuration."""
tasks = []
for platform in PLATFORMS:
tasks.append(
hass.config_entries.async_forward_entry_unload(config_entry, platform)
)
unload_ok = all(await gather(*tasks))
if unload_ok:
hass.data[DOMAIN].pop(config_entry.entry_id)
return unload_ok
class AwairDataUpdateCoordinator(DataUpdateCoordinator):
"""Define a wrapper class to update Awair data."""
def __init__(self, hass, config_entry, session) -> None:
"""Set up the AwairDataUpdateCoordinator class."""
access_token = config_entry.data[CONF_ACCESS_TOKEN]
self._awair = Awair(access_token=access_token, session=session)
self._config_entry = config_entry
super().__init__(hass, LOGGER, name=DOMAIN, update_interval=UPDATE_INTERVAL)
async def _async_update_data(self) -> Optional[Any]:
"""Update data via Awair client library."""
with timeout(API_TIMEOUT):
try:
LOGGER.debug("Fetching users and devices")
user = await self._awair.user()
devices = await user.devices()
results = await gather(
*[self._fetch_air_data(device) for device in devices]
)
return {result.device.uuid: result for result in results}
except AuthError as err:
flow_context = {
"source": "reauth",
"unique_id": self._config_entry.unique_id,
}
matching_flows = [
flow
for flow in self.hass.config_entries.flow.async_progress()
if flow["context"] == flow_context
]
if not matching_flows:
self.hass.async_create_task(
self.hass.config_entries.flow.async_init(
DOMAIN,
context=flow_context,
data=self._config_entry.data,
)
)
raise UpdateFailed(err) from err
except Exception as err:
raise UpdateFailed(err) from err
async def _fetch_air_data(self, device):
"""Fetch latest air quality data."""
LOGGER.debug("Fetching data for %s", device.uuid)
air_data = await device.air_data_latest()
LOGGER.debug(air_data)
return AwairResult(device=device, air_data=air_data)
|
from datetime import timedelta
import logging
from clearpasspy import ClearPass
import voluptuous as vol
from homeassistant.components.device_tracker import (
DOMAIN,
PLATFORM_SCHEMA,
DeviceScanner,
)
from homeassistant.const import CONF_API_KEY, CONF_CLIENT_ID, CONF_HOST
import homeassistant.helpers.config_validation as cv
SCAN_INTERVAL = timedelta(seconds=120)
GRANT_TYPE = "client_credentials"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_CLIENT_ID): cv.string,
vol.Required(CONF_API_KEY): cv.string,
}
)
_LOGGER = logging.getLogger(__name__)
def get_scanner(hass, config):
"""Initialize Scanner."""
data = {
"server": config[DOMAIN][CONF_HOST],
"grant_type": GRANT_TYPE,
"secret": config[DOMAIN][CONF_API_KEY],
"client": config[DOMAIN][CONF_CLIENT_ID],
}
cppm = ClearPass(data)
if cppm.access_token is None:
return None
_LOGGER.debug("Successfully received Access Token")
return CPPMDeviceScanner(cppm)
class CPPMDeviceScanner(DeviceScanner):
"""Initialize class."""
def __init__(self, cppm):
"""Initialize class."""
self._cppm = cppm
self.results = None
def scan_devices(self):
"""Initialize scanner."""
self.get_cppm_data()
return [device["mac"] for device in self.results]
def get_device_name(self, device):
"""Retrieve device name."""
name = next(
(result["name"] for result in self.results if result["mac"] == device), None
)
return name
def get_cppm_data(self):
"""Retrieve data from Aruba Clearpass and return parsed result."""
endpoints = self._cppm.get_endpoints(100)["_embedded"]["items"]
devices = []
for item in endpoints:
if self._cppm.online_status(item["mac_address"]):
device = {"mac": item["mac_address"], "name": item["mac_address"]}
devices.append(device)
else:
continue
_LOGGER.debug("Devices: %s", devices)
self.results = devices
|
import logging
from scsgate.messages import ScenarioTriggeredMessage, StateMessage
from scsgate.tasks import ToggleStatusTask
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchEntity
from homeassistant.const import ATTR_ENTITY_ID, ATTR_STATE, CONF_DEVICES, CONF_NAME
import homeassistant.helpers.config_validation as cv
from . import CONF_SCS_ID, DOMAIN, SCSGATE_SCHEMA
ATTR_SCENARIO_ID = "scenario_id"
CONF_TRADITIONAL = "traditional"
CONF_SCENARIO = "scenario"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_DEVICES): cv.schema_with_slug_keys(SCSGATE_SCHEMA)}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the SCSGate switches."""
logger = logging.getLogger(__name__)
scsgate = hass.data[DOMAIN]
_setup_traditional_switches(
logger=logger,
config=config,
scsgate=scsgate,
add_entities_callback=add_entities,
)
_setup_scenario_switches(logger=logger, config=config, scsgate=scsgate, hass=hass)
def _setup_traditional_switches(logger, config, scsgate, add_entities_callback):
"""Add traditional SCSGate switches."""
traditional = config.get(CONF_TRADITIONAL)
switches = []
if traditional:
for entity_info in traditional.values():
if entity_info[CONF_SCS_ID] in scsgate.devices:
continue
name = entity_info[CONF_NAME]
scs_id = entity_info[CONF_SCS_ID]
logger.info("Adding %s scsgate.traditional_switch", name)
switch = SCSGateSwitch(
name=name, scs_id=scs_id, logger=logger, scsgate=scsgate
)
switches.append(switch)
add_entities_callback(switches)
scsgate.add_devices_to_register(switches)
def _setup_scenario_switches(logger, config, scsgate, hass):
"""Add only SCSGate scenario switches."""
scenario = config.get(CONF_SCENARIO)
if scenario:
for entity_info in scenario.values():
if entity_info[CONF_SCS_ID] in scsgate.devices:
continue
name = entity_info[CONF_NAME]
scs_id = entity_info[CONF_SCS_ID]
logger.info("Adding %s scsgate.scenario_switch", name)
switch = SCSGateScenarioSwitch(
name=name, scs_id=scs_id, logger=logger, hass=hass
)
scsgate.add_device(switch)
class SCSGateSwitch(SwitchEntity):
"""Representation of a SCSGate switch."""
def __init__(self, scs_id, name, logger, scsgate):
"""Initialize the switch."""
self._name = name
self._scs_id = scs_id
self._toggled = False
self._logger = logger
self._scsgate = scsgate
@property
def scs_id(self):
"""Return the SCS ID."""
return self._scs_id
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the device if any."""
return self._name
@property
def is_on(self):
"""Return true if switch is on."""
return self._toggled
def turn_on(self, **kwargs):
"""Turn the device on."""
self._scsgate.append_task(ToggleStatusTask(target=self._scs_id, toggled=True))
self._toggled = True
self.schedule_update_ha_state()
def turn_off(self, **kwargs):
"""Turn the device off."""
self._scsgate.append_task(ToggleStatusTask(target=self._scs_id, toggled=False))
self._toggled = False
self.schedule_update_ha_state()
def process_event(self, message):
"""Handle a SCSGate message related with this switch."""
if self._toggled == message.toggled:
self._logger.info(
"Switch %s, ignoring message %s because state already active",
self._scs_id,
message,
)
# Nothing changed, ignoring
return
self._toggled = message.toggled
self.schedule_update_ha_state()
command = "off"
if self._toggled:
command = "on"
self.hass.bus.fire(
"button_pressed", {ATTR_ENTITY_ID: self._scs_id, ATTR_STATE: command}
)
class SCSGateScenarioSwitch:
"""Provides a SCSGate scenario switch.
This switch is always in an 'off" state, when toggled it's used to trigger
events.
"""
def __init__(self, scs_id, name, logger, hass):
"""Initialize the scenario."""
self._name = name
self._scs_id = scs_id
self._logger = logger
self._hass = hass
@property
def scs_id(self):
"""Return the SCS ID."""
return self._scs_id
@property
def name(self):
"""Return the name of the device if any."""
return self._name
def process_event(self, message):
"""Handle a SCSGate message related with this switch."""
if isinstance(message, StateMessage):
scenario_id = message.bytes[4]
elif isinstance(message, ScenarioTriggeredMessage):
scenario_id = message.scenario
else:
self._logger.warn("Scenario switch: received unknown message %s", message)
return
self._hass.bus.fire(
"scenario_switch_triggered",
{ATTR_ENTITY_ID: int(self._scs_id), ATTR_SCENARIO_ID: int(scenario_id, 16)},
)
|
from abc import abstractmethod
from datetime import timedelta
import functools as ft
import logging
from typing import Any, Dict, List, Optional
import voluptuous as vol
from homeassistant.const import (
ATTR_TEMPERATURE,
PRECISION_TENTHS,
PRECISION_WHOLE,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_OFF,
STATE_ON,
TEMP_CELSIUS,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.config_validation import ( # noqa: F401
PLATFORM_SCHEMA,
PLATFORM_SCHEMA_BASE,
make_entity_service_schema,
)
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.temperature import display_temp as show_temp
from homeassistant.helpers.typing import ConfigType, HomeAssistantType, ServiceDataType
from homeassistant.util.temperature import convert as convert_temperature
from .const import (
ATTR_AUX_HEAT,
ATTR_CURRENT_HUMIDITY,
ATTR_CURRENT_TEMPERATURE,
ATTR_FAN_MODE,
ATTR_FAN_MODES,
ATTR_HUMIDITY,
ATTR_HVAC_ACTION,
ATTR_HVAC_MODE,
ATTR_HVAC_MODES,
ATTR_MAX_HUMIDITY,
ATTR_MAX_TEMP,
ATTR_MIN_HUMIDITY,
ATTR_MIN_TEMP,
ATTR_PRESET_MODE,
ATTR_PRESET_MODES,
ATTR_SWING_MODE,
ATTR_SWING_MODES,
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
ATTR_TARGET_TEMP_STEP,
DOMAIN,
HVAC_MODE_COOL,
HVAC_MODE_HEAT,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
HVAC_MODES,
SERVICE_SET_AUX_HEAT,
SERVICE_SET_FAN_MODE,
SERVICE_SET_HUMIDITY,
SERVICE_SET_HVAC_MODE,
SERVICE_SET_PRESET_MODE,
SERVICE_SET_SWING_MODE,
SERVICE_SET_TEMPERATURE,
SUPPORT_AUX_HEAT,
SUPPORT_FAN_MODE,
SUPPORT_PRESET_MODE,
SUPPORT_SWING_MODE,
SUPPORT_TARGET_HUMIDITY,
SUPPORT_TARGET_TEMPERATURE,
SUPPORT_TARGET_TEMPERATURE_RANGE,
)
DEFAULT_MIN_TEMP = 7
DEFAULT_MAX_TEMP = 35
DEFAULT_MIN_HUMIDITY = 30
DEFAULT_MAX_HUMIDITY = 99
ENTITY_ID_FORMAT = DOMAIN + ".{}"
SCAN_INTERVAL = timedelta(seconds=60)
CONVERTIBLE_ATTRIBUTE = [ATTR_TEMPERATURE, ATTR_TARGET_TEMP_LOW, ATTR_TARGET_TEMP_HIGH]
_LOGGER = logging.getLogger(__name__)
SET_TEMPERATURE_SCHEMA = vol.All(
cv.has_at_least_one_key(
ATTR_TEMPERATURE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW
),
make_entity_service_schema(
{
vol.Exclusive(ATTR_TEMPERATURE, "temperature"): vol.Coerce(float),
vol.Inclusive(ATTR_TARGET_TEMP_HIGH, "temperature"): vol.Coerce(float),
vol.Inclusive(ATTR_TARGET_TEMP_LOW, "temperature"): vol.Coerce(float),
vol.Optional(ATTR_HVAC_MODE): vol.In(HVAC_MODES),
}
),
)
async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool:
"""Set up climate entities."""
component = hass.data[DOMAIN] = EntityComponent(
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
)
await component.async_setup(config)
component.async_register_entity_service(SERVICE_TURN_ON, {}, "async_turn_on")
component.async_register_entity_service(SERVICE_TURN_OFF, {}, "async_turn_off")
component.async_register_entity_service(
SERVICE_SET_HVAC_MODE,
{vol.Required(ATTR_HVAC_MODE): vol.In(HVAC_MODES)},
"async_set_hvac_mode",
)
component.async_register_entity_service(
SERVICE_SET_PRESET_MODE,
{vol.Required(ATTR_PRESET_MODE): cv.string},
"async_set_preset_mode",
[SUPPORT_PRESET_MODE],
)
component.async_register_entity_service(
SERVICE_SET_AUX_HEAT,
{vol.Required(ATTR_AUX_HEAT): cv.boolean},
async_service_aux_heat,
[SUPPORT_AUX_HEAT],
)
component.async_register_entity_service(
SERVICE_SET_TEMPERATURE,
SET_TEMPERATURE_SCHEMA,
async_service_temperature_set,
[SUPPORT_TARGET_TEMPERATURE, SUPPORT_TARGET_TEMPERATURE_RANGE],
)
component.async_register_entity_service(
SERVICE_SET_HUMIDITY,
{vol.Required(ATTR_HUMIDITY): vol.Coerce(float)},
"async_set_humidity",
[SUPPORT_TARGET_HUMIDITY],
)
component.async_register_entity_service(
SERVICE_SET_FAN_MODE,
{vol.Required(ATTR_FAN_MODE): cv.string},
"async_set_fan_mode",
[SUPPORT_FAN_MODE],
)
component.async_register_entity_service(
SERVICE_SET_SWING_MODE,
{vol.Required(ATTR_SWING_MODE): cv.string},
"async_set_swing_mode",
[SUPPORT_SWING_MODE],
)
return True
async def async_setup_entry(hass: HomeAssistantType, entry):
"""Set up a config entry."""
return await hass.data[DOMAIN].async_setup_entry(entry)
async def async_unload_entry(hass: HomeAssistantType, entry):
"""Unload a config entry."""
return await hass.data[DOMAIN].async_unload_entry(entry)
class ClimateEntity(Entity):
"""Representation of a climate entity."""
@property
def state(self) -> str:
"""Return the current state."""
return self.hvac_mode
@property
def precision(self) -> float:
"""Return the precision of the system."""
if self.hass.config.units.temperature_unit == TEMP_CELSIUS:
return PRECISION_TENTHS
return PRECISION_WHOLE
@property
def capability_attributes(self) -> Optional[Dict[str, Any]]:
"""Return the capability attributes."""
supported_features = self.supported_features
data = {
ATTR_HVAC_MODES: self.hvac_modes,
ATTR_MIN_TEMP: show_temp(
self.hass, self.min_temp, self.temperature_unit, self.precision
),
ATTR_MAX_TEMP: show_temp(
self.hass, self.max_temp, self.temperature_unit, self.precision
),
}
if self.target_temperature_step:
data[ATTR_TARGET_TEMP_STEP] = self.target_temperature_step
if supported_features & SUPPORT_TARGET_HUMIDITY:
data[ATTR_MIN_HUMIDITY] = self.min_humidity
data[ATTR_MAX_HUMIDITY] = self.max_humidity
if supported_features & SUPPORT_FAN_MODE:
data[ATTR_FAN_MODES] = self.fan_modes
if supported_features & SUPPORT_PRESET_MODE:
data[ATTR_PRESET_MODES] = self.preset_modes
if supported_features & SUPPORT_SWING_MODE:
data[ATTR_SWING_MODES] = self.swing_modes
return data
@property
def state_attributes(self) -> Dict[str, Any]:
"""Return the optional state attributes."""
supported_features = self.supported_features
data = {
ATTR_CURRENT_TEMPERATURE: show_temp(
self.hass,
self.current_temperature,
self.temperature_unit,
self.precision,
),
}
if supported_features & SUPPORT_TARGET_TEMPERATURE:
data[ATTR_TEMPERATURE] = show_temp(
self.hass,
self.target_temperature,
self.temperature_unit,
self.precision,
)
if supported_features & SUPPORT_TARGET_TEMPERATURE_RANGE:
data[ATTR_TARGET_TEMP_HIGH] = show_temp(
self.hass,
self.target_temperature_high,
self.temperature_unit,
self.precision,
)
data[ATTR_TARGET_TEMP_LOW] = show_temp(
self.hass,
self.target_temperature_low,
self.temperature_unit,
self.precision,
)
if self.current_humidity is not None:
data[ATTR_CURRENT_HUMIDITY] = self.current_humidity
if supported_features & SUPPORT_TARGET_HUMIDITY:
data[ATTR_HUMIDITY] = self.target_humidity
if supported_features & SUPPORT_FAN_MODE:
data[ATTR_FAN_MODE] = self.fan_mode
if self.hvac_action:
data[ATTR_HVAC_ACTION] = self.hvac_action
if supported_features & SUPPORT_PRESET_MODE:
data[ATTR_PRESET_MODE] = self.preset_mode
if supported_features & SUPPORT_SWING_MODE:
data[ATTR_SWING_MODE] = self.swing_mode
if supported_features & SUPPORT_AUX_HEAT:
data[ATTR_AUX_HEAT] = STATE_ON if self.is_aux_heat else STATE_OFF
return data
@property
def temperature_unit(self) -> str:
"""Return the unit of measurement used by the platform."""
raise NotImplementedError()
@property
def current_humidity(self) -> Optional[int]:
"""Return the current humidity."""
return None
@property
def target_humidity(self) -> Optional[int]:
"""Return the humidity we try to reach."""
return None
@property
@abstractmethod
def hvac_mode(self) -> str:
"""Return hvac operation ie. heat, cool mode.
Need to be one of HVAC_MODE_*.
"""
@property
@abstractmethod
def hvac_modes(self) -> List[str]:
"""Return the list of available hvac operation modes.
Need to be a subset of HVAC_MODES.
"""
@property
def hvac_action(self) -> Optional[str]:
"""Return the current running hvac operation if supported.
Need to be one of CURRENT_HVAC_*.
"""
return None
@property
def current_temperature(self) -> Optional[float]:
"""Return the current temperature."""
return None
@property
def target_temperature(self) -> Optional[float]:
"""Return the temperature we try to reach."""
return None
@property
def target_temperature_step(self) -> Optional[float]:
"""Return the supported step of target temperature."""
return None
@property
def target_temperature_high(self) -> Optional[float]:
"""Return the highbound target temperature we try to reach.
Requires SUPPORT_TARGET_TEMPERATURE_RANGE.
"""
raise NotImplementedError
@property
def target_temperature_low(self) -> Optional[float]:
"""Return the lowbound target temperature we try to reach.
Requires SUPPORT_TARGET_TEMPERATURE_RANGE.
"""
raise NotImplementedError
@property
def preset_mode(self) -> Optional[str]:
"""Return the current preset mode, e.g., home, away, temp.
Requires SUPPORT_PRESET_MODE.
"""
raise NotImplementedError
@property
def preset_modes(self) -> Optional[List[str]]:
"""Return a list of available preset modes.
Requires SUPPORT_PRESET_MODE.
"""
raise NotImplementedError
@property
def is_aux_heat(self) -> Optional[bool]:
"""Return true if aux heater.
Requires SUPPORT_AUX_HEAT.
"""
raise NotImplementedError
@property
def fan_mode(self) -> Optional[str]:
"""Return the fan setting.
Requires SUPPORT_FAN_MODE.
"""
raise NotImplementedError
@property
def fan_modes(self) -> Optional[List[str]]:
"""Return the list of available fan modes.
Requires SUPPORT_FAN_MODE.
"""
raise NotImplementedError
@property
def swing_mode(self) -> Optional[str]:
"""Return the swing setting.
Requires SUPPORT_SWING_MODE.
"""
raise NotImplementedError
@property
def swing_modes(self) -> Optional[List[str]]:
"""Return the list of available swing modes.
Requires SUPPORT_SWING_MODE.
"""
raise NotImplementedError
def set_temperature(self, **kwargs) -> None:
"""Set new target temperature."""
raise NotImplementedError()
async def async_set_temperature(self, **kwargs) -> None:
"""Set new target temperature."""
await self.hass.async_add_executor_job(
ft.partial(self.set_temperature, **kwargs)
)
def set_humidity(self, humidity: int) -> None:
"""Set new target humidity."""
raise NotImplementedError()
async def async_set_humidity(self, humidity: int) -> None:
"""Set new target humidity."""
await self.hass.async_add_executor_job(self.set_humidity, humidity)
def set_fan_mode(self, fan_mode: str) -> None:
"""Set new target fan mode."""
raise NotImplementedError()
async def async_set_fan_mode(self, fan_mode: str) -> None:
"""Set new target fan mode."""
await self.hass.async_add_executor_job(self.set_fan_mode, fan_mode)
def set_hvac_mode(self, hvac_mode: str) -> None:
"""Set new target hvac mode."""
raise NotImplementedError()
async def async_set_hvac_mode(self, hvac_mode: str) -> None:
"""Set new target hvac mode."""
await self.hass.async_add_executor_job(self.set_hvac_mode, hvac_mode)
def set_swing_mode(self, swing_mode: str) -> None:
"""Set new target swing operation."""
raise NotImplementedError()
async def async_set_swing_mode(self, swing_mode: str) -> None:
"""Set new target swing operation."""
await self.hass.async_add_executor_job(self.set_swing_mode, swing_mode)
def set_preset_mode(self, preset_mode: str) -> None:
"""Set new preset mode."""
raise NotImplementedError()
async def async_set_preset_mode(self, preset_mode: str) -> None:
"""Set new preset mode."""
await self.hass.async_add_executor_job(self.set_preset_mode, preset_mode)
def turn_aux_heat_on(self) -> None:
"""Turn auxiliary heater on."""
raise NotImplementedError()
async def async_turn_aux_heat_on(self) -> None:
"""Turn auxiliary heater on."""
await self.hass.async_add_executor_job(self.turn_aux_heat_on)
def turn_aux_heat_off(self) -> None:
"""Turn auxiliary heater off."""
raise NotImplementedError()
async def async_turn_aux_heat_off(self) -> None:
"""Turn auxiliary heater off."""
await self.hass.async_add_executor_job(self.turn_aux_heat_off)
async def async_turn_on(self) -> None:
"""Turn the entity on."""
if hasattr(self, "turn_on"):
# pylint: disable=no-member
await self.hass.async_add_executor_job(self.turn_on)
return
# Fake turn on
for mode in (HVAC_MODE_HEAT_COOL, HVAC_MODE_HEAT, HVAC_MODE_COOL):
if mode not in self.hvac_modes:
continue
await self.async_set_hvac_mode(mode)
break
async def async_turn_off(self) -> None:
"""Turn the entity off."""
if hasattr(self, "turn_off"):
# pylint: disable=no-member
await self.hass.async_add_executor_job(self.turn_off)
return
# Fake turn off
if HVAC_MODE_OFF in self.hvac_modes:
await self.async_set_hvac_mode(HVAC_MODE_OFF)
@property
def supported_features(self) -> int:
"""Return the list of supported features."""
raise NotImplementedError()
@property
def min_temp(self) -> float:
"""Return the minimum temperature."""
return convert_temperature(
DEFAULT_MIN_TEMP, TEMP_CELSIUS, self.temperature_unit
)
@property
def max_temp(self) -> float:
"""Return the maximum temperature."""
return convert_temperature(
DEFAULT_MAX_TEMP, TEMP_CELSIUS, self.temperature_unit
)
@property
def min_humidity(self) -> int:
"""Return the minimum humidity."""
return DEFAULT_MIN_HUMIDITY
@property
def max_humidity(self) -> int:
"""Return the maximum humidity."""
return DEFAULT_MAX_HUMIDITY
async def async_service_aux_heat(
entity: ClimateEntity, service: ServiceDataType
) -> None:
"""Handle aux heat service."""
if service.data[ATTR_AUX_HEAT]:
await entity.async_turn_aux_heat_on()
else:
await entity.async_turn_aux_heat_off()
async def async_service_temperature_set(
entity: ClimateEntity, service: ServiceDataType
) -> None:
"""Handle set temperature service."""
hass = entity.hass
kwargs = {}
for value, temp in service.data.items():
if value in CONVERTIBLE_ATTRIBUTE:
kwargs[value] = convert_temperature(
temp, hass.config.units.temperature_unit, entity.temperature_unit
)
else:
kwargs[value] = temp
await entity.async_set_temperature(**kwargs)
class ClimateDevice(ClimateEntity):
"""Representation of a climate entity (for backwards compatibility)."""
def __init_subclass__(cls, **kwargs):
"""Print deprecation warning."""
super().__init_subclass__(**kwargs)
_LOGGER.warning(
"ClimateDevice is deprecated, modify %s to extend ClimateEntity",
cls.__name__,
)
|
import pytest
from cryptography import x509
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization, hashes
from cryptography.x509 import UniformResourceIdentifier
from lemur.certificates.verify import verify_string, crl_verify
from lemur.utils import mktempfile
from .vectors import INTERMEDIATE_CERT_STR
def test_verify_simple_cert():
"""Simple certificate without CRL or OCSP."""
# Verification returns None if there are no means to verify a cert
assert verify_string(INTERMEDIATE_CERT_STR, "") is None
def test_verify_crl_unknown_scheme(cert_builder, private_key):
"""Unknown distribution point URI schemes should be ignored."""
ldap_uri = "ldap://ldap.example.org/cn=Example%20Certificate%20Authority?certificateRevocationList;binary"
crl_dp = x509.DistributionPoint(
[UniformResourceIdentifier(ldap_uri)],
relative_name=None,
reasons=None,
crl_issuer=None,
)
cert = cert_builder.add_extension(
x509.CRLDistributionPoints([crl_dp]), critical=False
).sign(private_key, hashes.SHA256(), default_backend())
with mktempfile() as cert_tmp:
with open(cert_tmp, "wb") as f:
f.write(cert.public_bytes(serialization.Encoding.PEM))
# Must not raise exception
crl_verify(cert, cert_tmp)
def test_verify_crl_unreachable(cert_builder, private_key):
"""Unreachable CRL distribution point results in error."""
ldap_uri = "http://invalid.example.org/crl/foobar.crl"
crl_dp = x509.DistributionPoint(
[UniformResourceIdentifier(ldap_uri)],
relative_name=None,
reasons=None,
crl_issuer=None,
)
cert = cert_builder.add_extension(
x509.CRLDistributionPoints([crl_dp]), critical=False
).sign(private_key, hashes.SHA256(), default_backend())
with mktempfile() as cert_tmp:
with open(cert_tmp, "wb") as f:
f.write(cert.public_bytes(serialization.Encoding.PEM))
with pytest.raises(Exception, match="Unable to retrieve CRL:"):
crl_verify(cert, cert_tmp)
|
import os
from os import path as op
import shutil
import zipfile
import sys
import pytest
from mne import datasets, read_labels_from_annot, write_labels_to_annot
from mne.datasets import testing
from mne.datasets._fsaverage.base import _set_montage_coreg_path
from mne.datasets.utils import _manifest_check_download
from mne.utils import (run_tests_if_main, requires_good_network, modified_env,
get_subjects_dir, ArgvSetter, _pl, use_log_level,
catch_logging, hashfunc)
subjects_dir = op.join(testing.data_path(download=False), 'subjects')
def test_datasets_basic(tmpdir):
"""Test simple dataset functions."""
# XXX 'hf_sef' and 'misc' do not conform to these standards
for dname in ('sample', 'somato', 'spm_face', 'testing', 'opm',
'bst_raw', 'bst_auditory', 'bst_resting', 'multimodal',
'bst_phantom_ctf', 'bst_phantom_elekta', 'kiloword',
'mtrf', 'phantom_4dbti',
'visual_92_categories', 'fieldtrip_cmc'):
if dname.startswith('bst'):
dataset = getattr(datasets.brainstorm, dname)
check_name = 'brainstorm.%s' % (dname,)
else:
dataset = getattr(datasets, dname)
check_name = dname
if dataset.data_path(download=False) != '':
assert isinstance(dataset.get_version(), str)
assert datasets.utils.has_dataset(check_name)
else:
assert dataset.get_version() is None
assert not datasets.utils.has_dataset(check_name)
print('%s: %s' % (dname, datasets.utils.has_dataset(check_name)))
tempdir = str(tmpdir)
# don't let it read from the config file to get the directory,
# force it to look for the default
with modified_env(**{'_MNE_FAKE_HOME_DIR': tempdir, 'SUBJECTS_DIR': None}):
assert (datasets.utils._get_path(None, 'foo', 'bar') ==
op.join(tempdir, 'mne_data'))
assert get_subjects_dir(None) is None
_set_montage_coreg_path()
sd = get_subjects_dir()
assert sd.endswith('MNE-fsaverage-data')
def _fake_fetch_file(url, destination, print_destination=False):
with open(destination, 'w') as fid:
fid.write(url)
@requires_good_network
def test_downloads(tmpdir):
"""Test dataset URL handling."""
# Try actually downloading a dataset
path = datasets._fake.data_path(path=str(tmpdir), update_path=False)
assert op.isfile(op.join(path, 'bar'))
assert datasets._fake.get_version() is None
@pytest.mark.slowtest
@testing.requires_testing_data
@requires_good_network
def test_fetch_parcellations(tmpdir):
"""Test fetching parcellations."""
this_subjects_dir = str(tmpdir)
os.mkdir(op.join(this_subjects_dir, 'fsaverage'))
os.mkdir(op.join(this_subjects_dir, 'fsaverage', 'label'))
os.mkdir(op.join(this_subjects_dir, 'fsaverage', 'surf'))
for hemi in ('lh', 'rh'):
shutil.copyfile(
op.join(subjects_dir, 'fsaverage', 'surf', '%s.white' % hemi),
op.join(this_subjects_dir, 'fsaverage', 'surf', '%s.white' % hemi))
# speed up by prenteding we have one of them
with open(op.join(this_subjects_dir, 'fsaverage', 'label',
'lh.aparc_sub.annot'), 'wb'):
pass
datasets.fetch_aparc_sub_parcellation(subjects_dir=this_subjects_dir)
with ArgvSetter(('--accept-hcpmmp-license',)):
datasets.fetch_hcp_mmp_parcellation(subjects_dir=this_subjects_dir)
for hemi in ('lh', 'rh'):
assert op.isfile(op.join(this_subjects_dir, 'fsaverage', 'label',
'%s.aparc_sub.annot' % hemi))
# test our annot round-trips here
kwargs = dict(subject='fsaverage', hemi='both', sort=False,
subjects_dir=this_subjects_dir)
labels = read_labels_from_annot(parc='HCPMMP1', **kwargs)
write_labels_to_annot(
labels, parc='HCPMMP1_round',
table_name='./left.fsaverage164.label.gii', **kwargs)
orig = op.join(this_subjects_dir, 'fsaverage', 'label', 'lh.HCPMMP1.annot')
first = hashfunc(orig)
new = orig[:-6] + '_round.annot'
second = hashfunc(new)
assert first == second
_zip_fnames = ['foo/foo.txt', 'foo/bar.txt', 'foo/baz.txt']
def _fake_zip_fetch(url, fname, hash_):
with zipfile.ZipFile(fname, 'w') as zipf:
with zipf.open('foo/', 'w'):
pass
for fname in _zip_fnames:
with zipf.open(fname, 'w'):
pass
@pytest.mark.skipif(sys.version_info < (3, 6),
reason="writing zip files requires python3.6 or higher")
@pytest.mark.parametrize('n_have', range(len(_zip_fnames)))
def test_manifest_check_download(tmpdir, n_have, monkeypatch):
"""Test our manifest downloader."""
monkeypatch.setattr(datasets.utils, '_fetch_file', _fake_zip_fetch)
destination = op.join(str(tmpdir), 'empty')
manifest_path = op.join(str(tmpdir), 'manifest.txt')
with open(manifest_path, 'w') as fid:
for fname in _zip_fnames:
fid.write('%s\n' % fname)
assert n_have in range(len(_zip_fnames) + 1)
assert not op.isdir(destination)
if n_have > 0:
os.makedirs(op.join(destination, 'foo'))
assert op.isdir(op.join(destination, 'foo'))
for fname in _zip_fnames:
assert not op.isfile(op.join(destination, fname))
for fname in _zip_fnames[:n_have]:
with open(op.join(destination, fname), 'w'):
pass
with catch_logging() as log:
with use_log_level(True):
url = hash_ = '' # we mock the _fetch_file so these are not used
_manifest_check_download(manifest_path, destination, url, hash_)
log = log.getvalue()
n_missing = 3 - n_have
assert ('%d file%s missing from' % (n_missing, _pl(n_missing))) in log
for want in ('Extracting missing', 'Successfully '):
if n_missing > 0:
assert want in log
else:
assert want not in log
assert op.isdir(destination)
for fname in _zip_fnames:
assert op.isfile(op.join(destination, fname))
run_tests_if_main()
|
import logging
import requests
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_POWER,
BinarySensorEntity,
)
from homeassistant.const import CONF_DEVICE_CLASS, CONF_NAME
from . import (
DOMAIN as VICARE_DOMAIN,
PYVICARE_ERROR,
VICARE_API,
VICARE_HEATING_TYPE,
VICARE_NAME,
HeatingType,
)
_LOGGER = logging.getLogger(__name__)
CONF_GETTER = "getter"
SENSOR_CIRCULATION_PUMP_ACTIVE = "circulationpump_active"
SENSOR_BURNER_ACTIVE = "burner_active"
SENSOR_COMPRESSOR_ACTIVE = "compressor_active"
SENSOR_TYPES = {
SENSOR_CIRCULATION_PUMP_ACTIVE: {
CONF_NAME: "Circulation pump active",
CONF_DEVICE_CLASS: DEVICE_CLASS_POWER,
CONF_GETTER: lambda api: api.getCirculationPumpActive(),
},
# gas sensors
SENSOR_BURNER_ACTIVE: {
CONF_NAME: "Burner active",
CONF_DEVICE_CLASS: DEVICE_CLASS_POWER,
CONF_GETTER: lambda api: api.getBurnerActive(),
},
# heatpump sensors
SENSOR_COMPRESSOR_ACTIVE: {
CONF_NAME: "Compressor active",
CONF_DEVICE_CLASS: DEVICE_CLASS_POWER,
CONF_GETTER: lambda api: api.getCompressorActive(),
},
}
SENSORS_GENERIC = [SENSOR_CIRCULATION_PUMP_ACTIVE]
SENSORS_BY_HEATINGTYPE = {
HeatingType.gas: [SENSOR_BURNER_ACTIVE],
HeatingType.heatpump: [SENSOR_COMPRESSOR_ACTIVE],
}
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Create the ViCare sensor devices."""
if discovery_info is None:
return
vicare_api = hass.data[VICARE_DOMAIN][VICARE_API]
heating_type = hass.data[VICARE_DOMAIN][VICARE_HEATING_TYPE]
sensors = SENSORS_GENERIC.copy()
if heating_type != HeatingType.generic:
sensors.extend(SENSORS_BY_HEATINGTYPE[heating_type])
add_entities(
[
ViCareBinarySensor(
hass.data[VICARE_DOMAIN][VICARE_NAME], vicare_api, sensor
)
for sensor in sensors
]
)
class ViCareBinarySensor(BinarySensorEntity):
"""Representation of a ViCare sensor."""
def __init__(self, name, api, sensor_type):
"""Initialize the sensor."""
self._sensor = SENSOR_TYPES[sensor_type]
self._name = f"{name} {self._sensor[CONF_NAME]}"
self._api = api
self._sensor_type = sensor_type
self._state = None
@property
def available(self):
"""Return True if entity is available."""
return self._state is not None and self._state != PYVICARE_ERROR
@property
def unique_id(self):
"""Return a unique ID."""
return f"{self._api.service.id}-{self._sensor_type}"
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def is_on(self):
"""Return the state of the sensor."""
return self._state
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return self._sensor[CONF_DEVICE_CLASS]
def update(self):
"""Update state of sensor."""
try:
self._state = self._sensor[CONF_GETTER](self._api)
except requests.exceptions.ConnectionError:
_LOGGER.error("Unable to retrieve data from ViCare server")
except ValueError:
_LOGGER.error("Unable to decode data from ViCare server")
|
import argparse
import sys
import time
from kazoo.client import KazooClient
from service_configuration_lib import DEFAULT_SOA_DIR
from paasta_tools.deployd.common import ServiceInstance
from paasta_tools.deployd.queue import ZKDelayDeadlineQueue
from paasta_tools.utils import load_system_paasta_config
from paasta_tools.utils import NoConfigurationForServiceError
from paasta_tools.utils import PaastaColors
from paasta_tools.utils import validate_service_instance
def parse_args(default_bounce_by_delay_secs):
parser = argparse.ArgumentParser(
description="Add a service instance to the deploy queue",
)
parser.add_argument(
"--bounce-by-delay-secs",
help="Number of seconds to wait before considering this entry late. Default: %(default)s",
dest="bounce_by_delay_secs",
type=float,
default=default_bounce_by_delay_secs,
)
parser.add_argument(
"service_instance",
help="The service.instance to add to the deploy queue",
type=str,
)
return parser.parse_args()
def main():
system_paasta_config = load_system_paasta_config()
args = parse_args(system_paasta_config.get_deployd_startup_bounce_deadline())
service, instance = args.service_instance.split(".", 1)
try:
validate_service_instance(
service,
instance,
cluster=system_paasta_config.get_cluster(),
soa_dir=DEFAULT_SOA_DIR,
)
except NoConfigurationForServiceError as e:
print(PaastaColors.red(str(e)))
sys.exit(1)
service_instance = ServiceInstance(
service=service,
instance=instance,
bounce_by=time.time() + args.bounce_by_delay_secs,
wait_until=time.time(),
watcher="manually_added",
failures=0,
enqueue_time=time.time(),
bounce_start_time=time.time(),
)
zk_client = KazooClient(hosts=system_paasta_config.get_zk_hosts())
zk_client.start()
queue = ZKDelayDeadlineQueue(client=zk_client)
queue.put(service_instance)
if __name__ == "__main__":
sys.exit(main())
|
import hangups
from common import run_example
async def retrieve_suggested_contacts(client, _):
request = hangups.hangouts_pb2.GetSuggestedEntitiesRequest(
request_header=client.get_request_header(),
max_count=100,
)
res = await client.get_suggested_entities(request)
# Print the list of entities in the response.
for entity in res.entity:
print('{} ({})'.format(
entity.properties.display_name, entity.id.gaia_id
))
if __name__ == '__main__':
run_example(retrieve_suggested_contacts)
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.