text
stringlengths 213
32.3k
|
---|
import json
from django.urls import reverse
import weblate.machinery
from weblate.trans.tests.test_views import FixtureTestCase
from weblate.utils.classloader import load_class
class JSViewsTest(FixtureTestCase):
"""Testing of AJAX/JS views."""
@staticmethod
def ensure_dummy_mt():
"""Ensure we have dummy mt installed."""
if "dummy" in weblate.machinery.MACHINE_TRANSLATION_SERVICES:
return
name = "weblate.machinery.dummy.DummyTranslation"
service = load_class(name, "TEST")()
weblate.machinery.MACHINE_TRANSLATION_SERVICES[service.mtid] = service
def test_translate(self):
self.ensure_dummy_mt()
unit = self.get_unit()
response = self.client.post(
reverse("js-translate", kwargs={"unit_id": unit.id, "service": "dummy"})
)
self.assertContains(response, "Ahoj")
data = json.loads(response.content.decode())
self.assertEqual(
data["translations"],
[
{
"quality": 100,
"service": "Dummy",
"text": "Nazdar světe!",
"source": "Hello, world!\n",
},
{
"quality": 100,
"service": "Dummy",
"text": "Ahoj světe!",
"source": "Hello, world!\n",
},
],
)
# Invalid service
response = self.client.post(
reverse("js-translate", kwargs={"unit_id": unit.id, "service": "invalid"})
)
self.assertEqual(response.status_code, 404)
def test_memory(self):
unit = self.get_unit()
url = reverse("js-memory", kwargs={"unit_id": unit.id})
# Missing param
response = self.client.post(url)
self.assertEqual(response.status_code, 400)
# Valid query
response = self.client.post(url, {"q": "a"})
self.assertEqual(response.status_code, 200)
data = json.loads(response.content.decode())
self.assertEqual(data["service"], "Weblate Translation Memory")
def test_get_unit_translations(self):
unit = self.get_unit()
response = self.client.get(
reverse("js-unit-translations", kwargs={"unit_id": unit.id})
)
self.assertContains(response, 'href="/translate/')
|
import re
from haffmpeg.tools import FFVersion
import voluptuous as vol
from homeassistant.const import (
ATTR_ENTITY_ID,
CONTENT_TYPE_MULTIPART,
EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.entity import Entity
DOMAIN = "ffmpeg"
SERVICE_START = "start"
SERVICE_STOP = "stop"
SERVICE_RESTART = "restart"
SIGNAL_FFMPEG_START = "ffmpeg.start"
SIGNAL_FFMPEG_STOP = "ffmpeg.stop"
SIGNAL_FFMPEG_RESTART = "ffmpeg.restart"
DATA_FFMPEG = "ffmpeg"
CONF_INITIAL_STATE = "initial_state"
CONF_INPUT = "input"
CONF_FFMPEG_BIN = "ffmpeg_bin"
CONF_EXTRA_ARGUMENTS = "extra_arguments"
CONF_OUTPUT = "output"
DEFAULT_BINARY = "ffmpeg"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{vol.Optional(CONF_FFMPEG_BIN, default=DEFAULT_BINARY): cv.string}
)
},
extra=vol.ALLOW_EXTRA,
)
SERVICE_FFMPEG_SCHEMA = vol.Schema({vol.Optional(ATTR_ENTITY_ID): cv.entity_ids})
async def async_setup(hass, config):
"""Set up the FFmpeg component."""
conf = config.get(DOMAIN, {})
manager = FFmpegManager(hass, conf.get(CONF_FFMPEG_BIN, DEFAULT_BINARY))
await manager.async_get_version()
# Register service
async def async_service_handle(service):
"""Handle service ffmpeg process."""
entity_ids = service.data.get(ATTR_ENTITY_ID)
if service.service == SERVICE_START:
async_dispatcher_send(hass, SIGNAL_FFMPEG_START, entity_ids)
elif service.service == SERVICE_STOP:
async_dispatcher_send(hass, SIGNAL_FFMPEG_STOP, entity_ids)
else:
async_dispatcher_send(hass, SIGNAL_FFMPEG_RESTART, entity_ids)
hass.services.async_register(
DOMAIN, SERVICE_START, async_service_handle, schema=SERVICE_FFMPEG_SCHEMA
)
hass.services.async_register(
DOMAIN, SERVICE_STOP, async_service_handle, schema=SERVICE_FFMPEG_SCHEMA
)
hass.services.async_register(
DOMAIN, SERVICE_RESTART, async_service_handle, schema=SERVICE_FFMPEG_SCHEMA
)
hass.data[DATA_FFMPEG] = manager
return True
class FFmpegManager:
"""Helper for ha-ffmpeg."""
def __init__(self, hass, ffmpeg_bin):
"""Initialize helper."""
self.hass = hass
self._cache = {}
self._bin = ffmpeg_bin
self._version = None
self._major_version = None
@property
def binary(self):
"""Return ffmpeg binary from config."""
return self._bin
async def async_get_version(self):
"""Return ffmpeg version."""
ffversion = FFVersion(self._bin, self.hass.loop)
self._version = await ffversion.get_version()
self._major_version = None
if self._version is not None:
result = re.search(r"(\d+)\.", self._version)
if result is not None:
self._major_version = int(result.group(1))
return self._version, self._major_version
@property
def ffmpeg_stream_content_type(self):
"""Return HTTP content type for ffmpeg stream."""
if self._major_version is not None and self._major_version > 3:
return CONTENT_TYPE_MULTIPART.format("ffmpeg")
return CONTENT_TYPE_MULTIPART.format("ffserver")
class FFmpegBase(Entity):
"""Interface object for FFmpeg."""
def __init__(self, initial_state=True):
"""Initialize ffmpeg base object."""
self.ffmpeg = None
self.initial_state = initial_state
async def async_added_to_hass(self):
"""Register dispatcher & events.
This method is a coroutine.
"""
self.async_on_remove(
async_dispatcher_connect(
self.hass, SIGNAL_FFMPEG_START, self._async_start_ffmpeg
)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass, SIGNAL_FFMPEG_STOP, self._async_stop_ffmpeg
)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass, SIGNAL_FFMPEG_RESTART, self._async_restart_ffmpeg
)
)
# register start/stop
self._async_register_events()
@property
def available(self):
"""Return True if entity is available."""
return self.ffmpeg.is_running
@property
def should_poll(self):
"""Return True if entity has to be polled for state."""
return False
async def _async_start_ffmpeg(self, entity_ids):
"""Start a FFmpeg process.
This method is a coroutine.
"""
raise NotImplementedError()
async def _async_stop_ffmpeg(self, entity_ids):
"""Stop a FFmpeg process.
This method is a coroutine.
"""
if entity_ids is None or self.entity_id in entity_ids:
await self.ffmpeg.close()
async def _async_restart_ffmpeg(self, entity_ids):
"""Stop a FFmpeg process.
This method is a coroutine.
"""
if entity_ids is None or self.entity_id in entity_ids:
await self._async_stop_ffmpeg(None)
await self._async_start_ffmpeg(None)
@callback
def _async_register_events(self):
"""Register a FFmpeg process/device."""
async def async_shutdown_handle(event):
"""Stop FFmpeg process."""
await self._async_stop_ffmpeg(None)
self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, async_shutdown_handle)
# start on startup
if not self.initial_state:
return
async def async_start_handle(event):
"""Start FFmpeg process."""
await self._async_start_ffmpeg(None)
self.async_write_ha_state()
self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, async_start_handle)
|
import voluptuous as vol
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
)
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
STATE_OFF,
STATE_ON,
STATE_PAUSED,
STATE_PLAYING,
)
import homeassistant.helpers.config_validation as cv
from .const import ATTR_MANUFACTURER, DEFAULT_NAME, DOMAIN
CONF_SOURCES = "sources"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_SOURCES): vol.Schema({cv.string: cv.string}),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
DUNEHD_PLAYER_SUPPORT = (
SUPPORT_PAUSE
| SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_NEXT_TRACK
| SUPPORT_PLAY
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Dune HD media player platform."""
host = config.get(CONF_HOST)
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data={CONF_HOST: host}
)
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Add Dune HD entities from a config_entry."""
unique_id = config_entry.entry_id
player = hass.data[DOMAIN][config_entry.entry_id]
async_add_entities([DuneHDPlayerEntity(player, DEFAULT_NAME, unique_id)], True)
class DuneHDPlayerEntity(MediaPlayerEntity):
"""Implementation of the Dune HD player."""
def __init__(self, player, name, unique_id):
"""Initialize entity to control Dune HD."""
self._player = player
self._name = name
self._media_title = None
self._state = None
self._unique_id = unique_id
def update(self):
"""Update internal status of the entity."""
self._state = self._player.update_state()
self.__update_title()
return True
@property
def state(self):
"""Return player state."""
state = STATE_OFF
if "playback_position" in self._state:
state = STATE_PLAYING
if self._state.get("player_state") in ("playing", "buffering", "photo_viewer"):
state = STATE_PLAYING
if int(self._state.get("playback_speed", 1234)) == 0:
state = STATE_PAUSED
if self._state.get("player_state") == "navigator":
state = STATE_ON
return state
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def available(self):
"""Return True if entity is available."""
return bool(self._state)
@property
def unique_id(self):
"""Return a unique_id for this entity."""
return self._unique_id
@property
def device_info(self):
"""Return the device info."""
return {
"identifiers": {(DOMAIN, self._unique_id)},
"name": DEFAULT_NAME,
"manufacturer": ATTR_MANUFACTURER,
}
@property
def volume_level(self):
"""Return the volume level of the media player (0..1)."""
return int(self._state.get("playback_volume", 0)) / 100
@property
def is_volume_muted(self):
"""Return a boolean if volume is currently muted."""
return int(self._state.get("playback_mute", 0)) == 1
@property
def supported_features(self):
"""Flag media player features that are supported."""
return DUNEHD_PLAYER_SUPPORT
def volume_up(self):
"""Volume up media player."""
self._state = self._player.volume_up()
def volume_down(self):
"""Volume down media player."""
self._state = self._player.volume_down()
def mute_volume(self, mute):
"""Mute/unmute player volume."""
self._state = self._player.mute(mute)
def turn_off(self):
"""Turn off media player."""
self._media_title = None
self._state = self._player.turn_off()
def turn_on(self):
"""Turn off media player."""
self._state = self._player.turn_on()
def media_play(self):
"""Play media player."""
self._state = self._player.play()
def media_pause(self):
"""Pause media player."""
self._state = self._player.pause()
@property
def media_title(self):
"""Return the current media source."""
self.__update_title()
if self._media_title:
return self._media_title
def __update_title(self):
if self._state.get("player_state") == "bluray_playback":
self._media_title = "Blu-Ray"
elif self._state.get("player_state") == "photo_viewer":
self._media_title = "Photo Viewer"
elif self._state.get("playback_url"):
self._media_title = self._state["playback_url"].split("/")[-1]
else:
self._media_title = None
def media_previous_track(self):
"""Send previous track command."""
self._state = self._player.previous_track()
def media_next_track(self):
"""Send next track command."""
self._state = self._player.next_track()
|
from app.database.model import WebHook, Collaborator
from sqlalchemy.sql.expression import false
# 具有只读权限
def has_readonly_auth(user_id, webhook_id):
return has_admin_auth(user_id, webhook_id) or \
has_collaborator_auth(user_id, webhook_id)
# 是否有创建者权限
def has_admin_auth(user_id, webhook_id):
return WebHook.query.filter_by(user_id=user_id,
id=webhook_id).first()
# 是否有观察者权限
def has_collaborator_auth(user_id, webhook_id):
return Collaborator.query.filter_by(user_id=user_id,
webhook_id=webhook_id).first()
def has_auth_webhooks(user_id):
"""获取所有我有权访问的Webhooks"""
# create webhooks
created_webhooks = WebHook.query.filter_by(
user_id=user_id, deleted=False).all()
# collaborator webhooks
collaborated_webhooks = \
WebHook.query.join(Collaborator,
Collaborator.webhook_id == WebHook.id) \
.filter(Collaborator.user_id == user_id) \
.filter(WebHook.deleted == false()).all()
webhooks = created_webhooks + collaborated_webhooks
# 去重并排序
webhooks = list(sorted(set(webhooks), key=lambda x: x.id, reverse=True))
return webhooks
|
from django.contrib import admin
from weblate.fonts.models import FontOverride
from weblate.wladmin.models import WeblateModelAdmin
class FontAdmin(WeblateModelAdmin):
list_display = ["family", "style", "project", "user"]
search_fields = ["family", "style"]
list_filter = [("project", admin.RelatedOnlyFieldListFilter)]
ordering = ["family", "style"]
class InlineFontOverrideAdmin(admin.TabularInline):
model = FontOverride
extra = 0
class FontGroupAdmin(WeblateModelAdmin):
list_display = ["name", "font", "project"]
search_fields = ["name", "font__family"]
list_filter = [("project", admin.RelatedOnlyFieldListFilter)]
ordering = ["name"]
inlines = [InlineFontOverrideAdmin]
|
from mongoengine.fields import Document
from mongoengine.fields import EmbeddedDocument
from mongoengine.fields import EmbeddedDocumentListField
from mongoengine.fields import ListField
from mongoengine.fields import StringField
class EntityValue(EmbeddedDocument):
value = StringField(required=True)
synonyms = ListField(required=True, default=[])
class Entity(Document):
name = StringField(max_length=100, required=True, unique=True)
entity_values = EmbeddedDocumentListField(EntityValue)
meta = {
'indexes': [
{
'fields': ['$name'],
'default_language': 'english'
}
]}
|
import json
import logging
import plexapi.exceptions
import requests.exceptions
from homeassistant.components.media_player import DOMAIN as MP_DOMAIN, MediaPlayerEntity
from homeassistant.components.media_player.const import (
MEDIA_TYPE_MOVIE,
MEDIA_TYPE_MUSIC,
MEDIA_TYPE_TVSHOW,
MEDIA_TYPE_VIDEO,
SUPPORT_BROWSE_MEDIA,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_STOP,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
)
from homeassistant.const import STATE_IDLE, STATE_OFF, STATE_PAUSED, STATE_PLAYING
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_registry import async_get_registry
from homeassistant.util import dt as dt_util
from .const import (
COMMON_PLAYERS,
CONF_SERVER_IDENTIFIER,
DISPATCHERS,
DOMAIN as PLEX_DOMAIN,
NAME_FORMAT,
PLEX_NEW_MP_SIGNAL,
PLEX_UPDATE_MEDIA_PLAYER_SIGNAL,
SERVERS,
)
from .media_browser import browse_media
LIVE_TV_SECTION = "-4"
PLAYLISTS_BROWSE_PAYLOAD = {
"title": "Playlists",
"media_content_id": "all",
"media_content_type": "playlists",
"can_play": False,
"can_expand": True,
}
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Plex media_player from a config entry."""
server_id = config_entry.data[CONF_SERVER_IDENTIFIER]
registry = await async_get_registry(hass)
@callback
def async_new_media_players(new_entities):
_async_add_entities(
hass, registry, config_entry, async_add_entities, server_id, new_entities
)
unsub = async_dispatcher_connect(
hass, PLEX_NEW_MP_SIGNAL.format(server_id), async_new_media_players
)
hass.data[PLEX_DOMAIN][DISPATCHERS][server_id].append(unsub)
_LOGGER.debug("New entity listener created")
@callback
def _async_add_entities(
hass, registry, config_entry, async_add_entities, server_id, new_entities
):
"""Set up Plex media_player entities."""
_LOGGER.debug("New entities: %s", new_entities)
entities = []
plexserver = hass.data[PLEX_DOMAIN][SERVERS][server_id]
for entity_params in new_entities:
plex_mp = PlexMediaPlayer(plexserver, **entity_params)
entities.append(plex_mp)
# Migration to per-server unique_ids
old_entity_id = registry.async_get_entity_id(
MP_DOMAIN, PLEX_DOMAIN, plex_mp.machine_identifier
)
if old_entity_id is not None:
new_unique_id = f"{server_id}:{plex_mp.machine_identifier}"
_LOGGER.debug(
"Migrating unique_id from [%s] to [%s]",
plex_mp.machine_identifier,
new_unique_id,
)
registry.async_update_entity(old_entity_id, new_unique_id=new_unique_id)
async_add_entities(entities, True)
class PlexMediaPlayer(MediaPlayerEntity):
"""Representation of a Plex device."""
def __init__(self, plex_server, device, player_source, session=None):
"""Initialize the Plex device."""
self.plex_server = plex_server
self.device = device
self.session = session
self.player_source = player_source
self._app_name = ""
self._available = False
self._device_protocol_capabilities = None
self._is_player_active = False
self._machine_identifier = device.machineIdentifier
self._make = ""
self._device_platform = None
self._device_product = None
self._device_title = None
self._device_version = None
self._name = None
self._player_state = "idle"
self._previous_volume_level = 1 # Used in fake muting
self._session_type = None
self._session_username = None
self._state = STATE_IDLE
self._volume_level = 1 # since we can't retrieve remotely
self._volume_muted = False # since we can't retrieve remotely
# General
self._media_content_id = None
self._media_content_rating = None
self._media_content_type = None
self._media_duration = None
self._media_image_url = None
self._media_summary = None
self._media_title = None
self._media_position = None
self._media_position_updated_at = None
# Music
self._media_album_artist = None
self._media_album_name = None
self._media_artist = None
self._media_track = None
# TV Show
self._media_episode = None
self._media_season = None
self._media_series_title = None
async def async_added_to_hass(self):
"""Run when about to be added to hass."""
server_id = self.plex_server.machine_identifier
_LOGGER.debug("Added %s [%s]", self.entity_id, self.unique_id)
unsub = async_dispatcher_connect(
self.hass,
PLEX_UPDATE_MEDIA_PLAYER_SIGNAL.format(self.unique_id),
self.async_refresh_media_player,
)
self.hass.data[PLEX_DOMAIN][DISPATCHERS][server_id].append(unsub)
@callback
def async_refresh_media_player(self, device, session):
"""Set instance objects and trigger an entity state update."""
_LOGGER.debug("Refreshing %s [%s / %s]", self.entity_id, device, session)
self.device = device
self.session = session
self.async_schedule_update_ha_state(True)
def _clear_media_details(self):
"""Set all Media Items to None."""
# General
self._media_content_id = None
self._media_content_rating = None
self._media_content_type = None
self._media_duration = None
self._media_image_url = None
self._media_summary = None
self._media_title = None
# Music
self._media_album_artist = None
self._media_album_name = None
self._media_artist = None
self._media_track = None
# TV Show
self._media_episode = None
self._media_season = None
self._media_series_title = None
# Clear library Name
self._app_name = ""
def update(self):
"""Refresh key device data."""
self._clear_media_details()
self._available = self.device or self.session
if self.device:
try:
device_url = self.device.url("/")
except plexapi.exceptions.BadRequest:
device_url = "127.0.0.1"
if "127.0.0.1" in device_url:
self.device.proxyThroughServer()
self._device_platform = self.device.platform
self._device_product = self.device.product
self._device_title = self.device.title
self._device_version = self.device.version
self._device_protocol_capabilities = self.device.protocolCapabilities
self._player_state = self.device.state
if not self.session:
self.force_idle()
else:
session_device = next(
(
p
for p in self.session.players
if p.machineIdentifier == self.device.machineIdentifier
),
None,
)
if session_device:
self._make = session_device.device or ""
self._player_state = session_device.state
self._device_platform = self._device_platform or session_device.platform
self._device_product = self._device_product or session_device.product
self._device_title = self._device_title or session_device.title
self._device_version = self._device_version or session_device.version
else:
_LOGGER.warning("No player associated with active session")
if self.session.usernames:
self._session_username = self.session.usernames[0]
# Calculate throttled position for proper progress display.
position = int(self.session.viewOffset / 1000)
now = dt_util.utcnow()
if self._media_position is not None:
pos_diff = position - self._media_position
time_diff = now - self._media_position_updated_at
if pos_diff != 0 and abs(time_diff.total_seconds() - pos_diff) > 5:
self._media_position_updated_at = now
self._media_position = position
else:
self._media_position_updated_at = now
self._media_position = position
self._media_content_id = self.session.ratingKey
self._media_content_rating = getattr(self.session, "contentRating", None)
name_parts = [self._device_product, self._device_title or self._device_platform]
if (self._device_product in COMMON_PLAYERS) and self.make:
# Add more context in name for likely duplicates
name_parts.append(self.make)
if self.username and self.username != self.plex_server.owner:
# Prepend username for shared/managed clients
name_parts.insert(0, self.username)
self._name = NAME_FORMAT.format(" - ".join(name_parts))
self._set_player_state()
if self._is_player_active and self.session is not None:
self._session_type = self.session.type
if self.session.duration:
self._media_duration = int(self.session.duration / 1000)
else:
self._media_duration = None
# title (movie name, tv episode name, music song name)
self._media_summary = self.session.summary
self._media_title = self.session.title
# media type
self._set_media_type()
if self.session.librarySectionID == LIVE_TV_SECTION:
self._app_name = "Live TV"
else:
self._app_name = (
self.session.section().title
if self.session.section() is not None
else ""
)
self._set_media_image()
else:
self._session_type = None
def _set_media_image(self):
thumb_url = self.session.thumbUrl
if (
self.media_content_type is MEDIA_TYPE_TVSHOW
and not self.plex_server.option_use_episode_art
):
if self.session.librarySectionID == LIVE_TV_SECTION:
thumb_url = self.session.grandparentThumb
else:
thumb_url = self.session.url(self.session.grandparentThumb)
if thumb_url is None:
_LOGGER.debug(
"Using media art because media thumb was not found: %s", self.name
)
thumb_url = self.session.url(self.session.art)
self._media_image_url = thumb_url
def _set_player_state(self):
if self._player_state == "playing":
self._is_player_active = True
self._state = STATE_PLAYING
elif self._player_state == "paused":
self._is_player_active = True
self._state = STATE_PAUSED
elif self.device:
self._is_player_active = False
self._state = STATE_IDLE
else:
self._is_player_active = False
self._state = STATE_OFF
def _set_media_type(self):
if self._session_type == "episode":
self._media_content_type = MEDIA_TYPE_TVSHOW
# season number (00)
self._media_season = self.session.seasonNumber
# show name
self._media_series_title = self.session.grandparentTitle
# episode number (00)
if self.session.index is not None:
self._media_episode = self.session.index
elif self._session_type == "movie":
self._media_content_type = MEDIA_TYPE_MOVIE
if self.session.year is not None and self._media_title is not None:
self._media_title += f" ({self.session.year!s})"
elif self._session_type == "track":
self._media_content_type = MEDIA_TYPE_MUSIC
self._media_album_name = self.session.parentTitle
self._media_album_artist = self.session.grandparentTitle
self._media_track = self.session.index
self._media_artist = self.session.originalTitle
# use album artist if track artist is missing
if self._media_artist is None:
_LOGGER.debug(
"Using album artist because track artist was not found: %s",
self.name,
)
self._media_artist = self._media_album_artist
elif self._session_type == "clip":
_LOGGER.debug(
"Clip content type detected, compatibility may vary: %s", self.name
)
self._media_content_type = MEDIA_TYPE_VIDEO
def force_idle(self):
"""Force client to idle."""
self._player_state = STATE_IDLE
self._state = STATE_IDLE
self.session = None
self._clear_media_details()
@property
def should_poll(self):
"""Return True if entity has to be polled for state."""
return False
@property
def unique_id(self):
"""Return the id of this plex client."""
return f"{self.plex_server.machine_identifier}:{self._machine_identifier}"
@property
def machine_identifier(self):
"""Return the Plex-provided identifier of this plex client."""
return self._machine_identifier
@property
def available(self):
"""Return the availability of the client."""
return self._available
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def username(self):
"""Return the username of the client owner."""
return self._session_username
@property
def app_name(self):
"""Return the library name of playing media."""
return self._app_name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def _active_media_plexapi_type(self):
"""Get the active media type required by PlexAPI commands."""
if self.media_content_type is MEDIA_TYPE_MUSIC:
return "music"
return "video"
@property
def media_content_id(self):
"""Return the content ID of current playing media."""
return self._media_content_id
@property
def media_content_type(self):
"""Return the content type of current playing media."""
return self._media_content_type
@property
def media_artist(self):
"""Return the artist of current playing media, music track only."""
return self._media_artist
@property
def media_album_name(self):
"""Return the album name of current playing media, music track only."""
return self._media_album_name
@property
def media_album_artist(self):
"""Return the album artist of current playing media, music only."""
return self._media_album_artist
@property
def media_track(self):
"""Return the track number of current playing media, music only."""
return self._media_track
@property
def media_duration(self):
"""Return the duration of current playing media in seconds."""
return self._media_duration
@property
def media_position(self):
"""Return the duration of current playing media in seconds."""
return self._media_position
@property
def media_position_updated_at(self):
"""When was the position of the current playing media valid."""
return self._media_position_updated_at
@property
def media_image_url(self):
"""Return the image URL of current playing media."""
return self._media_image_url
@property
def media_summary(self):
"""Return the summary of current playing media."""
return self._media_summary
@property
def media_title(self):
"""Return the title of current playing media."""
return self._media_title
@property
def media_season(self):
"""Return the season of current playing media (TV Show only)."""
return self._media_season
@property
def media_series_title(self):
"""Return the title of the series of current playing media."""
return self._media_series_title
@property
def media_episode(self):
"""Return the episode of current playing media (TV Show only)."""
return self._media_episode
@property
def make(self):
"""Return the make of the device (ex. SHIELD Android TV)."""
return self._make
@property
def supported_features(self):
"""Flag media player features that are supported."""
if self.device and "playback" in self._device_protocol_capabilities:
return (
SUPPORT_PAUSE
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_NEXT_TRACK
| SUPPORT_STOP
| SUPPORT_VOLUME_SET
| SUPPORT_PLAY
| SUPPORT_PLAY_MEDIA
| SUPPORT_VOLUME_MUTE
| SUPPORT_BROWSE_MEDIA
)
return SUPPORT_BROWSE_MEDIA | SUPPORT_PLAY_MEDIA
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
if self.device and "playback" in self._device_protocol_capabilities:
self.device.setVolume(int(volume * 100), self._active_media_plexapi_type)
self._volume_level = volume # store since we can't retrieve
@property
def volume_level(self):
"""Return the volume level of the client (0..1)."""
if (
self._is_player_active
and self.device
and "playback" in self._device_protocol_capabilities
):
return self._volume_level
@property
def is_volume_muted(self):
"""Return boolean if volume is currently muted."""
if self._is_player_active and self.device:
return self._volume_muted
def mute_volume(self, mute):
"""Mute the volume.
Since we can't actually mute, we'll:
- On mute, store volume and set volume to 0
- On unmute, set volume to previously stored volume
"""
if not (self.device and "playback" in self._device_protocol_capabilities):
return
self._volume_muted = mute
if mute:
self._previous_volume_level = self._volume_level
self.set_volume_level(0)
else:
self.set_volume_level(self._previous_volume_level)
def media_play(self):
"""Send play command."""
if self.device and "playback" in self._device_protocol_capabilities:
self.device.play(self._active_media_plexapi_type)
def media_pause(self):
"""Send pause command."""
if self.device and "playback" in self._device_protocol_capabilities:
self.device.pause(self._active_media_plexapi_type)
def media_stop(self):
"""Send stop command."""
if self.device and "playback" in self._device_protocol_capabilities:
self.device.stop(self._active_media_plexapi_type)
def media_next_track(self):
"""Send next track command."""
if self.device and "playback" in self._device_protocol_capabilities:
self.device.skipNext(self._active_media_plexapi_type)
def media_previous_track(self):
"""Send previous track command."""
if self.device and "playback" in self._device_protocol_capabilities:
self.device.skipPrevious(self._active_media_plexapi_type)
def play_media(self, media_type, media_id, **kwargs):
"""Play a piece of media."""
if not (self.device and "playback" in self._device_protocol_capabilities):
_LOGGER.debug(
"Client is not currently accepting playback controls: %s", self.name
)
return
src = json.loads(media_id)
if isinstance(src, int):
src = {"plex_key": src}
shuffle = src.pop("shuffle", 0)
media = self.plex_server.lookup_media(media_type, **src)
if media is None:
_LOGGER.error("Media could not be found: %s", media_id)
return
_LOGGER.debug("Attempting to play %s on %s", media, self.name)
playqueue = self.plex_server.create_playqueue(media, shuffle=shuffle)
try:
self.device.playMedia(playqueue)
except requests.exceptions.ConnectTimeout:
_LOGGER.error("Timed out playing on %s", self.name)
@property
def device_state_attributes(self):
"""Return the scene state attributes."""
return {
"media_content_rating": self._media_content_rating,
"session_username": self.username,
"media_library_name": self._app_name,
"summary": self.media_summary,
"player_source": self.player_source,
}
@property
def device_info(self):
"""Return a device description for device registry."""
if self.machine_identifier is None:
return None
return {
"identifiers": {(PLEX_DOMAIN, self.machine_identifier)},
"manufacturer": self._device_platform or "Plex",
"model": self._device_product or self.make,
"name": self.name,
"sw_version": self._device_version,
"via_device": (PLEX_DOMAIN, self.plex_server.machine_identifier),
}
async def async_browse_media(self, media_content_type=None, media_content_id=None):
"""Implement the websocket media browsing helper."""
return await self.hass.async_add_executor_job(
browse_media,
self.entity_id,
self.plex_server,
media_content_type,
media_content_id,
)
|
from homeassistant.components.cover import CoverEntity
from . import (
ATTR_DISCOVER_CONFIG,
ATTR_DISCOVER_DEVICES,
DATA_TELLSTICK,
DEFAULT_SIGNAL_REPETITIONS,
TellstickDevice,
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Tellstick covers."""
if discovery_info is None or discovery_info[ATTR_DISCOVER_DEVICES] is None:
return
signal_repetitions = discovery_info.get(
ATTR_DISCOVER_CONFIG, DEFAULT_SIGNAL_REPETITIONS
)
add_entities(
[
TellstickCover(hass.data[DATA_TELLSTICK][tellcore_id], signal_repetitions)
for tellcore_id in discovery_info[ATTR_DISCOVER_DEVICES]
],
True,
)
class TellstickCover(TellstickDevice, CoverEntity):
"""Representation of a Tellstick cover."""
@property
def is_closed(self):
"""Return the current position of the cover is not possible."""
return None
@property
def assumed_state(self):
"""Return True if unable to access real state of the entity."""
return True
def close_cover(self, **kwargs):
"""Close the cover."""
self._tellcore_device.down()
def open_cover(self, **kwargs):
"""Open the cover."""
self._tellcore_device.up()
def stop_cover(self, **kwargs):
"""Stop the cover."""
self._tellcore_device.stop()
def _parse_tellcore_data(self, tellcore_data):
"""Turn the value received from tellcore into something useful."""
def _parse_ha_data(self, kwargs):
"""Turn the value from HA into something useful."""
def _update_model(self, new_state, data):
"""Update the device entity state to match the arguments."""
|
from __future__ import unicode_literals
import os
from lib.fun.fun import cool
from lib.fun.decorator import magic
from lib.data.data import pyoptions
def handler_magic(*args):
"""[file]"""
args = list(args[0])
if len(args) >= 2:
path = args[1]
else:
exit(pyoptions.CRLF + cool.fuchsia("[!] Usage: {} {}".format(args[0], pyoptions.tools_info.get(args[0]))))
if not os.path.isfile(path):
exit(cool.red("[-] File don't exits" + pyoptions.CRLF))
@magic
def handler():
with open(path, 'r') as f:
for item in f.readlines():
yield item.strip()
|
from tqdm import tqdm
from matchzoo.engine.base_preprocessor import BasePreprocessor
from matchzoo import DataPack
from .chain_transform import chain_transform
from .build_vocab_unit import build_vocab_unit
from . import units
tqdm.pandas()
class NaivePreprocessor(BasePreprocessor):
"""
Naive preprocessor.
Example:
>>> import matchzoo as mz
>>> train_data = mz.datasets.toy.load_data()
>>> test_data = mz.datasets.toy.load_data(stage='test')
>>> preprocessor = mz.preprocessors.NaivePreprocessor()
>>> train_data_processed = preprocessor.fit_transform(train_data,
... verbose=0)
>>> type(train_data_processed)
<class 'matchzoo.data_pack.data_pack.DataPack'>
>>> test_data_transformed = preprocessor.transform(test_data,
... verbose=0)
>>> type(test_data_transformed)
<class 'matchzoo.data_pack.data_pack.DataPack'>
"""
def fit(self, data_pack: DataPack, verbose: int = 1):
"""
Fit pre-processing context for transformation.
:param data_pack: data_pack to be preprocessed.
:param verbose: Verbosity.
:return: class:`NaivePreprocessor` instance.
"""
func = chain_transform(self._default_units())
data_pack = data_pack.apply_on_text(func, verbose=verbose)
vocab_unit = build_vocab_unit(data_pack, verbose=verbose)
self._context['vocab_unit'] = vocab_unit
return self
def transform(self, data_pack: DataPack, verbose: int = 1) -> DataPack:
"""
Apply transformation on data, create `tri-letter` representation.
:param data_pack: Inputs to be preprocessed.
:param verbose: Verbosity.
:return: Transformed data as :class:`DataPack` object.
"""
units_ = self._default_units()
units_.append(self._context['vocab_unit'])
units_.append(units.FixedLength(text_length=30, pad_mode='post'))
func = chain_transform(units_)
return data_pack.apply_on_text(func, verbose=verbose)
|
from io import open
import subprocess
from flask import current_app
from lemur.utils import mktempfile, mktemppath
from lemur.plugins.bases import ExportPlugin
from lemur.plugins import lemur_openssl as openssl
from lemur.common.utils import get_psuedo_random_string, parse_certificate
from lemur.common.defaults import common_name
def run_process(command):
"""
Runs a given command with pOpen and wraps some
error handling around it.
:param command:
:return:
"""
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
current_app.logger.debug(command)
stdout, stderr = p.communicate()
if p.returncode != 0:
current_app.logger.debug(" ".join(command))
current_app.logger.error(stderr)
raise Exception(stderr)
def create_pkcs12(cert, chain, p12_tmp, key, alias, passphrase):
"""
Creates a pkcs12 formated file.
:param cert:
:param chain:
:param p12_tmp:
:param key:
:param alias:
:param passphrase:
"""
assert isinstance(cert, str)
assert isinstance(chain, str)
assert isinstance(key, str)
with mktempfile() as key_tmp:
with open(key_tmp, "w") as f:
f.write(key)
# Create PKCS12 keystore from private key and public certificate
with mktempfile() as cert_tmp:
with open(cert_tmp, "w") as f:
if chain:
f.writelines([cert.strip() + "\n", chain.strip() + "\n"])
else:
f.writelines([cert.strip() + "\n"])
run_process(
[
"openssl",
"pkcs12",
"-export",
"-name",
alias,
"-in",
cert_tmp,
"-inkey",
key_tmp,
"-out",
p12_tmp,
"-password",
"pass:{}".format(passphrase),
]
)
class OpenSSLExportPlugin(ExportPlugin):
title = "OpenSSL"
slug = "openssl-export"
description = "Is a loose interface to openssl and support various formats"
version = openssl.VERSION
author = "Kevin Glisson"
author_url = "https://github.com/netflix/lemur"
options = [
{
"name": "type",
"type": "select",
"required": True,
"available": ["PKCS12 (.p12)"],
"helpMessage": "Choose the format you wish to export",
},
{
"name": "passphrase",
"type": "str",
"required": False,
"helpMessage": "If no passphrase is given one will be generated for you, we highly recommend this.",
"validation": "",
},
{
"name": "alias",
"type": "str",
"required": False,
"helpMessage": "Enter the alias you wish to use for the keystore.",
},
]
def export(self, body, chain, key, options, **kwargs):
"""
Generates a PKCS#12 archive.
:param key:
:param chain:
:param body:
:param options:
:param kwargs:
"""
if self.get_option("passphrase", options):
passphrase = self.get_option("passphrase", options)
else:
passphrase = get_psuedo_random_string()
if self.get_option("alias", options):
alias = self.get_option("alias", options)
else:
alias = common_name(parse_certificate(body))
type = self.get_option("type", options)
with mktemppath() as output_tmp:
if type == "PKCS12 (.p12)":
if not key:
raise Exception("Private Key required by {0}".format(type))
create_pkcs12(body, chain, output_tmp, key, alias, passphrase)
extension = "p12"
else:
raise Exception("Unable to export, unsupported type: {0}".format(type))
with open(output_tmp, "rb") as f:
raw = f.read()
return extension, passphrase, raw
|
import datetime
from homeassistant.components import geonetnz_quakes
from homeassistant.components.geo_location import ATTR_SOURCE
from homeassistant.components.geonetnz_quakes import DEFAULT_SCAN_INTERVAL, DOMAIN, FEED
from homeassistant.components.geonetnz_quakes.geo_location import (
ATTR_DEPTH,
ATTR_EXTERNAL_ID,
ATTR_LOCALITY,
ATTR_MAGNITUDE,
ATTR_MMI,
ATTR_QUALITY,
)
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_FRIENDLY_NAME,
ATTR_ICON,
ATTR_LATITUDE,
ATTR_LONGITUDE,
ATTR_TIME,
ATTR_UNIT_OF_MEASUREMENT,
CONF_RADIUS,
EVENT_HOMEASSISTANT_START,
LENGTH_KILOMETERS,
)
from homeassistant.helpers.entity_registry import async_get_registry
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from homeassistant.util.unit_system import IMPERIAL_SYSTEM
from tests.async_mock import patch
from tests.common import async_fire_time_changed
from tests.components.geonetnz_quakes import _generate_mock_feed_entry
CONFIG = {geonetnz_quakes.DOMAIN: {CONF_RADIUS: 200}}
async def test_setup(hass):
"""Test the general setup of the integration."""
# Set up some mock feed entries for this test.
mock_entry_1 = _generate_mock_feed_entry(
"1234",
"Title 1",
15.5,
(38.0, -3.0),
locality="Locality 1",
attribution="Attribution 1",
time=datetime.datetime(2018, 9, 22, 8, 0, tzinfo=datetime.timezone.utc),
magnitude=5.7,
mmi=5,
depth=10.5,
quality="best",
)
mock_entry_2 = _generate_mock_feed_entry(
"2345", "Title 2", 20.5, (38.1, -3.1), magnitude=4.6
)
mock_entry_3 = _generate_mock_feed_entry(
"3456", "Title 3", 25.5, (38.2, -3.2), locality="Locality 3"
)
mock_entry_4 = _generate_mock_feed_entry("4567", "Title 4", 12.5, (38.3, -3.3))
# Patching 'utcnow' to gain more control over the timed update.
utcnow = dt_util.utcnow()
with patch("homeassistant.util.dt.utcnow", return_value=utcnow), patch(
"aio_geojson_client.feed.GeoJsonFeed.update"
) as mock_feed_update:
mock_feed_update.return_value = "OK", [mock_entry_1, mock_entry_2, mock_entry_3]
assert await async_setup_component(hass, geonetnz_quakes.DOMAIN, CONFIG)
await hass.async_block_till_done()
# Artificially trigger update and collect events.
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
all_states = hass.states.async_all()
# 3 geolocation and 1 sensor entities
assert len(all_states) == 4
entity_registry = await async_get_registry(hass)
assert len(entity_registry.entities) == 4
state = hass.states.get("geo_location.title_1")
assert state is not None
assert state.name == "Title 1"
assert state.attributes == {
ATTR_EXTERNAL_ID: "1234",
ATTR_LATITUDE: 38.0,
ATTR_LONGITUDE: -3.0,
ATTR_FRIENDLY_NAME: "Title 1",
ATTR_LOCALITY: "Locality 1",
ATTR_ATTRIBUTION: "Attribution 1",
ATTR_TIME: datetime.datetime(
2018, 9, 22, 8, 0, tzinfo=datetime.timezone.utc
),
ATTR_MAGNITUDE: 5.7,
ATTR_DEPTH: 10.5,
ATTR_MMI: 5,
ATTR_QUALITY: "best",
ATTR_UNIT_OF_MEASUREMENT: LENGTH_KILOMETERS,
ATTR_SOURCE: "geonetnz_quakes",
ATTR_ICON: "mdi:pulse",
}
assert float(state.state) == 15.5
state = hass.states.get("geo_location.title_2")
assert state is not None
assert state.name == "Title 2"
assert state.attributes == {
ATTR_EXTERNAL_ID: "2345",
ATTR_LATITUDE: 38.1,
ATTR_LONGITUDE: -3.1,
ATTR_FRIENDLY_NAME: "Title 2",
ATTR_MAGNITUDE: 4.6,
ATTR_UNIT_OF_MEASUREMENT: LENGTH_KILOMETERS,
ATTR_SOURCE: "geonetnz_quakes",
ATTR_ICON: "mdi:pulse",
}
assert float(state.state) == 20.5
state = hass.states.get("geo_location.title_3")
assert state is not None
assert state.name == "Title 3"
assert state.attributes == {
ATTR_EXTERNAL_ID: "3456",
ATTR_LATITUDE: 38.2,
ATTR_LONGITUDE: -3.2,
ATTR_FRIENDLY_NAME: "Title 3",
ATTR_LOCALITY: "Locality 3",
ATTR_UNIT_OF_MEASUREMENT: LENGTH_KILOMETERS,
ATTR_SOURCE: "geonetnz_quakes",
ATTR_ICON: "mdi:pulse",
}
assert float(state.state) == 25.5
# Simulate an update - two existing, one new entry, one outdated entry
mock_feed_update.return_value = "OK", [mock_entry_1, mock_entry_4, mock_entry_3]
async_fire_time_changed(hass, utcnow + DEFAULT_SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 4
# Simulate an update - empty data, but successful update,
# so no changes to entities.
mock_feed_update.return_value = "OK_NO_DATA", None
async_fire_time_changed(hass, utcnow + 2 * DEFAULT_SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 4
# Simulate an update - empty data, removes all entities
mock_feed_update.return_value = "ERROR", None
async_fire_time_changed(hass, utcnow + 3 * DEFAULT_SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 1
assert len(entity_registry.entities) == 1
async def test_setup_imperial(hass):
"""Test the setup of the integration using imperial unit system."""
hass.config.units = IMPERIAL_SYSTEM
# Set up some mock feed entries for this test.
mock_entry_1 = _generate_mock_feed_entry("1234", "Title 1", 15.5, (38.0, -3.0))
# Patching 'utcnow' to gain more control over the timed update.
utcnow = dt_util.utcnow()
with patch("homeassistant.util.dt.utcnow", return_value=utcnow), patch(
"aio_geojson_client.feed.GeoJsonFeed.update"
) as mock_feed_update, patch(
"aio_geojson_client.feed.GeoJsonFeed.last_timestamp", create=True
):
mock_feed_update.return_value = "OK", [mock_entry_1]
assert await async_setup_component(hass, geonetnz_quakes.DOMAIN, CONFIG)
await hass.async_block_till_done()
# Artificially trigger update and collect events.
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 2
# Test conversion of 200 miles to kilometers.
feeds = hass.data[DOMAIN][FEED]
assert feeds is not None
assert len(feeds) == 1
manager = list(feeds.values())[0]
# Ensure that the filter value in km is correctly set.
assert manager._feed_manager._feed._filter_radius == 321.8688
state = hass.states.get("geo_location.title_1")
assert state is not None
assert state.name == "Title 1"
assert state.attributes == {
ATTR_EXTERNAL_ID: "1234",
ATTR_LATITUDE: 38.0,
ATTR_LONGITUDE: -3.0,
ATTR_FRIENDLY_NAME: "Title 1",
ATTR_UNIT_OF_MEASUREMENT: "mi",
ATTR_SOURCE: "geonetnz_quakes",
ATTR_ICON: "mdi:pulse",
}
# 15.5km (as defined in mock entry) has been converted to 9.6mi.
assert float(state.state) == 9.6
|
import os
import re
import sys
import fnmatch
import os.path
# for command line options and supported environment variables, please
# see the end of 'setupinfo.py'
if (2, 7) != sys.version_info[:2] < (3, 5):
print("This lxml version requires Python 2.7, 3.5 or later.")
sys.exit(1)
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
# make sure Cython finds include files in the project directory and not outside
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'src'))
import versioninfo
import setupinfo
# override these and pass --static for a static build. See
# doc/build.txt for more information. If you do not pass --static
# changing this will have no effect.
STATIC_INCLUDE_DIRS = []
STATIC_LIBRARY_DIRS = []
STATIC_CFLAGS = []
STATIC_BINARIES = []
# create lxml-version.h file
versioninfo.create_version_h()
lxml_version = versioninfo.version()
print("Building lxml version %s." % lxml_version)
OPTION_RUN_TESTS = setupinfo.has_option('run-tests')
branch_link = """
After an official release of a new stable series, bug fixes may become
available at
https://github.com/lxml/lxml/tree/lxml-%(branch_version)s .
Running ``easy_install lxml==%(branch_version)sbugfix`` will install
the unreleased branch state from
https://github.com/lxml/lxml/tarball/lxml-%(branch_version)s#egg=lxml-%(branch_version)sbugfix
as soon as a maintenance branch has been established. Note that this
requires Cython to be installed at an appropriate version for the build.
"""
if versioninfo.is_pre_release():
branch_link = ""
extra_options = {}
if 'setuptools' in sys.modules:
extra_options['zip_safe'] = False
extra_options['python_requires'] = (
# NOTE: keep in sync with Trove classifier list below.
'>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*')
try:
import pkg_resources
except ImportError:
pass
else:
f = open("requirements.txt", "r")
try:
deps = [str(req) for req in pkg_resources.parse_requirements(f)]
finally:
f.close()
extra_options['extras_require'] = {
'source': deps,
'cssselect': 'cssselect>=0.7',
'html5': 'html5lib',
'htmlsoup': 'BeautifulSoup4',
}
extra_options.update(setupinfo.extra_setup_args())
extra_options['package_data'] = {
'lxml': [
'etree.h',
'etree_api.h',
'lxml.etree.h',
'lxml.etree_api.h',
],
'lxml.includes': [
'*.pxd', '*.h'
],
'lxml.isoschematron': [
'resources/rng/iso-schematron.rng',
'resources/xsl/*.xsl',
'resources/xsl/iso-schematron-xslt1/*.xsl',
'resources/xsl/iso-schematron-xslt1/readme.txt'
],
}
extra_options['package_dir'] = {
'': 'src'
}
extra_options['packages'] = [
'lxml', 'lxml.includes', 'lxml.html', 'lxml.isoschematron'
]
def setup_extra_options():
is_interesting_package = re.compile('^(libxml|libxslt|libexslt)$').match
def extract_files(directories, pattern='*'):
def get_files(root, dir_path, files):
return [ (root, dir_path, filename)
for filename in fnmatch.filter(files, pattern) ]
file_list = []
for dir_path in directories:
dir_path = os.path.realpath(dir_path)
for root, dirs, files in os.walk(dir_path):
rel_dir = root[len(dir_path)+1:]
if is_interesting_package(rel_dir):
file_list.extend(get_files(root, rel_dir, files))
return file_list
def build_packages(files):
packages = {}
seen = set()
for root_path, rel_path, filename in files:
if filename in seen:
# libxml2/libxslt header filenames are unique
continue
seen.add(filename)
package_path = '.'.join(rel_path.split(os.sep))
if package_path in packages:
root, package_files = packages[package_path]
if root != root_path:
print("conflicting directories found for include package '%s': %s and %s"
% (package_path, root_path, root))
continue
else:
package_files = []
packages[package_path] = (root_path, package_files)
package_files.append(filename)
return packages
# Copy Global Extra Options
extra_opts = dict(extra_options)
# Build ext modules
ext_modules = setupinfo.ext_modules(
STATIC_INCLUDE_DIRS, STATIC_LIBRARY_DIRS,
STATIC_CFLAGS, STATIC_BINARIES)
extra_opts['ext_modules'] = ext_modules
packages = extra_opts.get('packages', list())
package_dir = extra_opts.get('package_dir', dict())
package_data = extra_opts.get('package_data', dict())
# Add lxml.include with (lxml, libxslt headers...)
# python setup.py build --static --static-deps install
# python setup.py bdist_wininst --static
if setupinfo.OPTION_STATIC:
include_dirs = [] # keep them in order
for extension in ext_modules:
for inc_dir in extension.include_dirs:
if inc_dir not in include_dirs:
include_dirs.append(inc_dir)
header_packages = build_packages(extract_files(include_dirs))
for package_path, (root_path, filenames) in header_packages.items():
if package_path:
package = 'lxml.includes.' + package_path
packages.append(package)
else:
package = 'lxml.includes'
package_data[package] = filenames
package_dir[package] = root_path
return extra_opts
setup(
name = "lxml",
version = lxml_version,
author="lxml dev team",
author_email="[email protected]",
maintainer="lxml dev team",
maintainer_email="[email protected]",
license="BSD",
url="https://lxml.de/",
# Commented out because this causes distutils to emit warnings
# `Unknown distribution option: 'bugtrack_url'`
# which distract folks from real causes of problems when troubleshooting
# bugtrack_url="https://bugs.launchpad.net/lxml",
description=(
"Powerful and Pythonic XML processing library"
" combining libxml2/libxslt with the ElementTree API."
),
long_description=((("""\
lxml is a Pythonic, mature binding for the libxml2 and libxslt libraries. It
provides safe and convenient access to these libraries using the ElementTree
API.
It extends the ElementTree API significantly to offer support for XPath,
RelaxNG, XML Schema, XSLT, C14N and much more.
To contact the project, go to the `project home page
<https://lxml.de/>`_ or see our bug tracker at
https://launchpad.net/lxml
In case you want to use the current in-development version of lxml,
you can get it from the github repository at
https://github.com/lxml/lxml . Note that this requires Cython to
build the sources, see the build instructions on the project home
page. To the same end, running ``easy_install lxml==dev`` will
install lxml from
https://github.com/lxml/lxml/tarball/master#egg=lxml-dev if you have
an appropriate version of Cython installed.
""" + branch_link) % {"branch_version": versioninfo.branch_version()}) +
versioninfo.changes()),
classifiers=[
versioninfo.dev_status(),
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: BSD License',
'Programming Language :: Cython',
# NOTE: keep in sync with 'python_requires' list above.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: C',
'Operating System :: OS Independent',
'Topic :: Text Processing :: Markup :: HTML',
'Topic :: Text Processing :: Markup :: XML',
'Topic :: Software Development :: Libraries :: Python Modules'
],
**setup_extra_options()
)
if OPTION_RUN_TESTS:
print("Running tests.")
import test
sys.exit( test.main(sys.argv[:1]) )
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tempfile
import unittest
from absl import flags
from absl.testing import absltest
FLAGS = flags.FLAGS
flags.DEFINE_integer('test_id', 0, 'Which test to run.')
class HelperTest(absltest.TestCase):
def test_flags(self):
if FLAGS.test_id == 1:
self.assertEqual(FLAGS.test_random_seed, 301)
if os.name == 'nt':
# On Windows, it's always in the temp dir, which doesn't start with '/'.
expected_prefix = tempfile.gettempdir()
else:
expected_prefix = '/'
self.assertTrue(
FLAGS.test_tmpdir.startswith(expected_prefix),
'--test_tmpdir={} does not start with {}'.format(
FLAGS.test_tmpdir, expected_prefix))
self.assertTrue(os.access(FLAGS.test_tmpdir, os.W_OK))
elif FLAGS.test_id == 2:
self.assertEqual(FLAGS.test_random_seed, 321)
self.assertEqual(
FLAGS.test_srcdir,
os.environ['ABSLTEST_TEST_HELPER_EXPECTED_TEST_SRCDIR'])
self.assertEqual(
FLAGS.test_tmpdir,
os.environ['ABSLTEST_TEST_HELPER_EXPECTED_TEST_TMPDIR'])
elif FLAGS.test_id == 3:
self.assertEqual(FLAGS.test_random_seed, 123)
self.assertEqual(
FLAGS.test_srcdir,
os.environ['ABSLTEST_TEST_HELPER_EXPECTED_TEST_SRCDIR'])
self.assertEqual(
FLAGS.test_tmpdir,
os.environ['ABSLTEST_TEST_HELPER_EXPECTED_TEST_TMPDIR'])
elif FLAGS.test_id == 4:
self.assertEqual(FLAGS.test_random_seed, 221)
self.assertEqual(
FLAGS.test_srcdir,
os.environ['ABSLTEST_TEST_HELPER_EXPECTED_TEST_SRCDIR'])
self.assertEqual(
FLAGS.test_tmpdir,
os.environ['ABSLTEST_TEST_HELPER_EXPECTED_TEST_TMPDIR'])
else:
raise unittest.SkipTest(
'Not asked to run: --test_id={}'.format(FLAGS.test_id))
@unittest.expectedFailure
def test_expected_failure(self):
if FLAGS.test_id == 5:
self.assertEqual(1, 1) # Expected failure, got success.
else:
self.assertEqual(1, 2) # The expected failure.
def test_xml_env_vars(self):
if FLAGS.test_id == 6:
self.assertEqual(
FLAGS.xml_output_file,
os.environ['ABSLTEST_TEST_HELPER_EXPECTED_XML_OUTPUT_FILE'])
else:
raise unittest.SkipTest(
'Not asked to run: --test_id={}'.format(FLAGS.test_id))
class TempFileHelperTest(absltest.TestCase):
tempfile_cleanup = absltest.TempFileCleanup[os.environ.get(
'ABSLTEST_TEST_HELPER_TEMPFILE_CLEANUP', 'SUCCESS')]
def test_failure(self):
self.create_tempfile('failure')
self.fail('expected failure')
def test_success(self):
self.create_tempfile('success')
if __name__ == '__main__':
absltest.main()
|
from __future__ import print_function
import logging
import urllib2
import kerberos as krb
class GssapiAuthError(Exception):
"""raised on error during authentication process"""
import re
RGX = re.compile('(?:.*,)*\s*Negotiate\s*([^,]*),?', re.I)
def get_negociate_value(headers):
for authreq in headers.getheaders('www-authenticate'):
match = RGX.search(authreq)
if match:
return match.group(1)
class HTTPGssapiAuthHandler(urllib2.BaseHandler):
"""Negotiate HTTP authentication using context from GSSAPI"""
handler_order = 400 # before Digest Auth
def __init__(self):
self._reset()
def _reset(self):
self._retried = 0
self._context = None
def clean_context(self):
if self._context is not None:
krb.authGSSClientClean(self._context)
def http_error_401(self, req, fp, code, msg, headers):
try:
if self._retried > 5:
raise urllib2.HTTPError(req.get_full_url(), 401,
"negotiate auth failed", headers, None)
self._retried += 1
logging.debug('gssapi handler, try %s' % self._retried)
negotiate = get_negociate_value(headers)
if negotiate is None:
logging.debug('no negociate found in a www-authenticate header')
return None
logging.debug('HTTPGssapiAuthHandler: negotiate 1 is %r' % negotiate)
result, self._context = krb.authGSSClientInit("HTTP@%s" % req.get_host())
if result < 1:
raise GssapiAuthError("HTTPGssapiAuthHandler: init failed with %d" % result)
result = krb.authGSSClientStep(self._context, negotiate)
if result < 0:
raise GssapiAuthError("HTTPGssapiAuthHandler: step 1 failed with %d" % result)
client_response = krb.authGSSClientResponse(self._context)
logging.debug('HTTPGssapiAuthHandler: client response is %s...' % client_response[:10])
req.add_unredirected_header('Authorization', "Negotiate %s" % client_response)
server_response = self.parent.open(req)
negotiate = get_negociate_value(server_response.info())
if negotiate is None:
logging.warning('HTTPGssapiAuthHandler: failed to authenticate server')
else:
logging.debug('HTTPGssapiAuthHandler negotiate 2: %s' % negotiate)
result = krb.authGSSClientStep(self._context, negotiate)
if result < 1:
raise GssapiAuthError("HTTPGssapiAuthHandler: step 2 failed with %d" % result)
return server_response
except GssapiAuthError as exc:
logging.error(repr(exc))
finally:
self.clean_context()
self._reset()
if __name__ == '__main__':
import sys
# debug
import httplib
httplib.HTTPConnection.debuglevel = 1
httplib.HTTPSConnection.debuglevel = 1
# debug
import logging
logging.basicConfig(level=logging.DEBUG)
# handle cookies
import cookielib
cj = cookielib.CookieJar()
ch = urllib2.HTTPCookieProcessor(cj)
# test with url sys.argv[1]
h = HTTPGssapiAuthHandler()
response = urllib2.build_opener(h, ch).open(sys.argv[1])
print('\nresponse: %s\n--------------\n' % response.code, response.info())
|
import io
import json
from typing import Any, Dict, Optional
from urllib.parse import parse_qsl
from multidict import CIMultiDict, MultiDict
from homeassistant.const import HTTP_OK
class MockStreamReader:
"""Small mock to imitate stream reader."""
def __init__(self, content: bytes) -> None:
"""Initialize mock stream reader."""
self._content = io.BytesIO(content)
async def read(self, byte_count: int = -1) -> bytes:
"""Read bytes."""
if byte_count == -1:
return self._content.read()
return self._content.read(byte_count)
class MockRequest:
"""Mock an aiohttp request."""
mock_source: Optional[str] = None
def __init__(
self,
content: bytes,
mock_source: str,
method: str = "GET",
status: int = HTTP_OK,
headers: Optional[Dict[str, str]] = None,
query_string: Optional[str] = None,
url: str = "",
) -> None:
"""Initialize a request."""
self.method = method
self.url = url
self.status = status
self.headers: CIMultiDict[str] = CIMultiDict(headers or {})
self.query_string = query_string or ""
self._content = content
self.mock_source = mock_source
@property
def query(self) -> "MultiDict[str]":
"""Return a dictionary with the query variables."""
return MultiDict(parse_qsl(self.query_string, keep_blank_values=True))
@property
def _text(self) -> str:
"""Return the body as text."""
return self._content.decode("utf-8")
@property
def content(self) -> MockStreamReader:
"""Return the body as text."""
return MockStreamReader(self._content)
async def json(self) -> Any:
"""Return the body as JSON."""
return json.loads(self._text)
async def post(self) -> "MultiDict[str]":
"""Return POST parameters."""
return MultiDict(parse_qsl(self._text, keep_blank_values=True))
async def text(self) -> str:
"""Return the body as text."""
return self._text
|
from flask_script import Manager
import sys
from lemur.constants import SUCCESS_METRIC_STATUS
from lemur.plugins.lemur_acme.acme_handlers import AcmeDnsHandler
from lemur.dns_providers.service import get_all_dns_providers, set_domains
from lemur.extensions import metrics, sentry
manager = Manager(
usage="Iterates through all DNS providers and sets DNS zones in the database."
)
@manager.command
def get_all_zones():
"""
Retrieves all DNS providers from the database. Refreshes the zones associated with each DNS provider
"""
print("[+] Starting dns provider zone lookup and configuration.")
dns_providers = get_all_dns_providers()
acme_dns_handler = AcmeDnsHandler()
function = f"{__name__}.{sys._getframe().f_code.co_name}"
log_data = {
"function": function,
"message": "",
}
for dns_provider in dns_providers:
try:
zones = acme_dns_handler.get_all_zones(dns_provider)
set_domains(dns_provider, zones)
except Exception as e:
print("[+] Error with DNS Provider {}: {}".format(dns_provider.name, e))
log_data["message"] = f"get all zones failed for {dns_provider} {e}."
sentry.captureException(extra=log_data)
status = SUCCESS_METRIC_STATUS
metrics.send("get_all_zones", "counter", 1, metric_tags={"status": status})
print("[+] Done with dns provider zone lookup and configuration.")
|
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_MONITORED_CONDITIONS
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.icon import icon_for_battery_level
from . import DATA_RAINCLOUD, ICON_MAP, SENSORS, RainCloudEntity
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_MONITORED_CONDITIONS, default=list(SENSORS)): vol.All(
cv.ensure_list, [vol.In(SENSORS)]
)
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up a sensor for a raincloud device."""
raincloud = hass.data[DATA_RAINCLOUD].data
sensors = []
for sensor_type in config.get(CONF_MONITORED_CONDITIONS):
if sensor_type == "battery":
sensors.append(RainCloudSensor(raincloud.controller.faucet, sensor_type))
else:
# create a sensor for each zone managed by a faucet
for zone in raincloud.controller.faucet.zones:
sensors.append(RainCloudSensor(zone, sensor_type))
add_entities(sensors, True)
return True
class RainCloudSensor(RainCloudEntity):
"""A sensor implementation for raincloud device."""
@property
def state(self):
"""Return the state of the sensor."""
return self._state
def update(self):
"""Get the latest data and updates the states."""
_LOGGER.debug("Updating RainCloud sensor: %s", self._name)
if self._sensor_type == "battery":
self._state = self.data.battery
else:
self._state = getattr(self.data, self._sensor_type)
@property
def icon(self):
"""Icon to use in the frontend, if any."""
if self._sensor_type == "battery" and self._state is not None:
return icon_for_battery_level(
battery_level=int(self._state), charging=False
)
return ICON_MAP.get(self._sensor_type)
|
from __future__ import division
from builtins import str
from builtins import range
import matplotlib
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import proj3d
import matplotlib.animation as animation
import matplotlib.patches as patches
from .._shared.helpers import *
matplotlib.rcParams['pdf.fonttype'] = 42
def _draw(x, legend=None, title=None, labels=False,
show=True, kwargs_list=None, fmt=None, animate=False,
tail_duration=2, rotations=2, zoom=1, chemtrails=False, precog=False,
bullettime=False, frame_rate=50, elev=10, azim=-60, duration=30,
explore=False, size=None, ax=None):
"""
Draws the plot
"""
# handle static plots
def dispatch_static(x, ax=None):
shape = x[0].shape[1]
if shape==3:
opts = dict(projection='3d')
else:
opts = dict()
if not ax:
fig = plt.figure()
ax = fig.add_subplot(111, **opts)
else:
fig = ax.figure
if x[0].ndim==1 or x[0].shape[-1]==1:
return plot1D(x, fig, ax)
elif x[0].shape[-1]==2:
return plot2D(x, fig, ax)
elif x[0].shape[-1]==3:
return plot3D(x, fig, ax)
# plot data in 1D
def plot1D(data, fig, ax):
n=len(data)
for i in range(n):
ikwargs = kwargs_list[i]
if fmt is None:
ax.plot(data[i][:,0], **ikwargs)
else:
ax.plot(data[i][:,0], fmt[i], **ikwargs)
return fig, ax, data
# plot data in 2D
def plot2D(data, fig, ax):
n=len(data)
for i in range(n):
ikwargs = kwargs_list[i]
if fmt is None:
ax.plot(data[i][:,0], data[i][:,1], **ikwargs)
else:
ax.plot(data[i][:,0], data[i][:,1], fmt[i], **ikwargs)
return fig, ax, data
# plot data in 3D
def plot3D(data, fig, ax):
n=len(data)
for i in range(n):
ikwargs = kwargs_list[i]
if fmt is None:
ax.plot(data[i][:,0], data[i][:,1], data[i][:,2], **ikwargs)
else:
ax.plot(data[i][:,0], data[i][:,1], data[i][:,2], fmt[i], **ikwargs)
return fig, ax, data
def annotate_plot(data, labels):
"""Create labels in 3d chart
Args:
X (np.array) - array of points, of shape (numPoints, 3)
labels (list) - list of labels of shape (numPoints,1)
Returns:
None
"""
global labels_and_points
labels_and_points = []
if data[0].shape[-1]>2:
proj = ax.get_proj()
for idx,x in enumerate(data):
if labels[idx] is not None:
if data[0].shape[-1]>2:
x2, y2, _ = proj3d.proj_transform(x[0], x[1], x[2], proj)
label = plt.annotate(
labels[idx],
xy = (x2, y2), xytext = (-20, 20), textcoords = 'offset points', ha = 'right', va = 'bottom',
bbox = dict(boxstyle = 'round,pad=0.5', fc = 'white', alpha = 0.5),
arrowprops = dict(arrowstyle = '-', connectionstyle = 'arc3,rad=0'),family='serif')
labels_and_points.append((label,x[0],x[1],x[2]))
elif data[0].shape[-1]==2:
x2, y2 = x[0], x[1]
label = plt.annotate(
labels[idx],
xy = (x2, y2), xytext = (-20, 20), textcoords = 'offset points', ha = 'right', va = 'bottom',
bbox = dict(boxstyle = 'round,pad=0.5', fc = 'white', alpha = 0.5),
arrowprops = dict(arrowstyle = '-', connectionstyle = 'arc3,rad=0'),family='serif')
label.draggable()
labels_and_points.append((label,x[0],x[1]))
fig.canvas.draw()
def update_position(e):
"""Update label positions in 3d chart
Args:
e (mouse event) - event handle to update on
Returns:
None
"""
proj = ax.get_proj()
for label, x, y, z in labels_and_points:
x2, y2, _ = proj3d.proj_transform(x, y, z, proj)
label.xy = x2,y2
label.update_positions(fig.canvas.renderer)
label._visible=True
fig.canvas.draw()
def hide_labels(e):
"""Hides labels on button press
Args:
e (mouse event) - event handle to update on
Returns:
None
"""
for label in labels_and_points:
label[0]._visible=False
def add_labels(x, labels, explore=False):
"""Add labels to graph if available
Args:
data (np.ndarray) - Array containing the data points
labels (list) - List containing labels
Returns:
None
"""
# if explore mode is activated, implement the on hover behavior
if explore:
X = np.vstack(x)
if labels is not None:
if any(isinstance(el, list) for el in labels):
labels = list(itertools.chain(*labels))
fig.canvas.mpl_connect('motion_notify_event', lambda event: onMouseMotion(event, X, labels)) # on mouse motion
# fig.canvas.mpl_connect('button_press_event', lambda event: onMouseClick(event, X, labels)) # on mouse click
else:
fig.canvas.mpl_connect('motion_notify_event', lambda event: onMouseMotion(event, X)) # on mouse motion
# fig.canvas.mpl_connect('button_press_event', lambda event: onMouseClick(event, X, labels)) # on mouse click
elif labels is not None:
X = np.vstack(x)
if any(isinstance(el, list) for el in labels):
labels = list(itertools.chain(*labels))
annotate_plot(X, labels)
fig.canvas.mpl_connect('button_press_event', hide_labels)
fig.canvas.mpl_connect('button_release_event', update_position)
##EXPLORE MODE##
def distance(point, event):
"""Return distance between mouse position and given data point
Args:
point (np.array) - np.array of shape (3,), with x,y,z in data coords
event (MouseEvent) - mouse event (which contains mouse position in .x and .xdata)
Returns:
distance (np.float64) - distance (in screen coords) between mouse pos and data point
"""
assert point.shape == (3,), "distance: point.shape is wrong: %s, must be (3,)" % point.shape
# Project 3d data space to 2d data space
x2, y2, _ = proj3d.proj_transform(point[0], point[1], point[2], plt.gca().get_proj())
# Convert 2d data space to 2d screen space
x3, y3 = ax.transData.transform((x2, y2))
return np.sqrt ((x3 - event.x)**2 + (y3 - event.y)**2)
def calcClosestDatapoint(X, event):
""""Calculate which data point is closest to the mouse position.
Args:
X (np.array) - array of points, of shape (numPoints, 3)
event (MouseEvent) - mouse event (containing mouse position)
Returns:
smallestIndex (int) - the index (into the array of points X) of the element closest to the mouse position
"""
distances = [distance (X[i, 0:3], event) for i in range(X.shape[0])]
return np.argmin(distances)
def annotate_plot_explore(X, index, labels=False):
"""Create popover label in 3d chart
Args:
X (np.array) - array of points, of shape (numPoints, 3)
index (int) - index (into points array X) of item which should be printed
labels (list or False) - list of data point labels (default is False)
Returns:
None
"""
# save clicked points
if not hasattr(annotate_plot_explore, 'clicked'):
annotate_plot_explore.clicked = []
# If we have previously displayed another label, remove it first
if hasattr(annotate_plot_explore, 'label'):
if index not in annotate_plot_explore.clicked:
annotate_plot_explore.label.remove()
# Get data point from array of points X, at position index
x2, y2, _ = proj3d.proj_transform(X[index, 0], X[index, 1], X[index, 2], ax.get_proj())
if type(labels) is list:
label = labels[index]
else:
label = "Index " + str(index) + ": (" + "{0:.2f}, ".format(X[index, 0]) + "{0:.2f}, ".format(X[index, 1]) + "{0:.2f}".format(X[index, 2]) + ")"
annotate_plot_explore.label = plt.annotate(
label,
xy = (x2, y2), xytext = (-20, 20), textcoords = 'offset points', ha = 'right', va = 'bottom',
bbox = dict(boxstyle = 'round,pad=0.5', fc = 'yellow', alpha = 0.5),
arrowprops = dict(arrowstyle = '->', connectionstyle = 'arc3,rad=0'))
fig.canvas.draw()
def onMouseMotion(event,X,labels=False):
"""Event that is triggered when mouse is moved. Shows text annotation over data point closest to mouse
Args:
event (event) - event triggered when the mous is moved
X (np.ndarray) - coordinates by datapoints matrix
labels (list or False) - list of data labels (default is False)
Returns:
None
"""
closestIndex = calcClosestDatapoint(X, event)
if hasattr(onMouseMotion, 'first'):
pass
else:
onMouseMotion.first = False
onMouseMotion.closestIndex_prev = calcClosestDatapoint(X, event)
if closestIndex!=onMouseMotion.closestIndex_prev:
if type(labels) is list:
annotate_plot_explore (X, closestIndex, labels)
closestIndex_prev = closestIndex
else:
annotate_plot_explore (X, closestIndex)
closestIndex_prev = closestIndex
def plot_cube(scale):
cube = {
"top" : ( [[-1,1],[-1,1]], [[-1,-1],[1,1]], [[1,1],[1,1]] ),
"bottom" : ( [[-1,1],[-1,1]], [[-1,-1],[1,1]], [[-1,-1],[-1,-1]] ),
"left" : ( [[-1,-1],[-1,-1]], [[-1,1],[-1,1]], [[-1,-1],[1,1]] ),
"right" : ( [[1,1],[1,1]], [[-1,1],[-1,1]], [[-1,-1],[1,1]] ),
"front" : ( [[-1,1],[-1,1]], [[-1,-1],[-1,-1]], [[-1,-1],[1,1]] ),
"back" : ( [[-1,1],[-1,1]], [[1,1],[1,1]], [[-1,-1],[1,1]] )
}
plane_list = []
for side in cube:
(Xs, Ys, Zs) = (
np.asarray(cube[side][0])*scale,
np.asarray(cube[side][1])*scale,
np.asarray(cube[side][2])*scale
)
plane_list.append(ax.plot_wireframe(Xs, Ys, Zs, rstride=1, cstride=1, color='black', linewidth=1))
return plane_list
def plot_square(ax, scale=1):
ax.add_patch(patches.Rectangle(scale*[-1, -1], scale*2, scale*2, fill=False, edgecolor='black', linewidth=1))
def update_lines_parallel(num, data_lines, lines, trail_lines, cube_scale, tail_duration=2,
rotations=2, zoom=1, chemtrails=False, elev=10):
if hasattr(update_lines_parallel, 'planes'):
for plane in update_lines_parallel.planes:
plane.remove()
update_lines_parallel.planes = plot_cube(cube_scale)
ax.view_init(elev=10, azim=rotations*(360*(num/data_lines[0].shape[0])))
ax.dist=9-zoom
for line, data, trail in zip(lines, data_lines, trail_lines):
if (precog and chemtrails) or bullettime:
trail.set_data(data[:, 0:2].T)
trail.set_3d_properties(data[:, 2])
elif chemtrails:
trail.set_data(data[0:num-tail_duration + 1, 0:2].T)
trail.set_3d_properties(data[0:num-tail_duration + 1, 2])
elif precog:
trail.set_data(data[num+1:, 0:2].T)
trail.set_3d_properties(data[num+1:, 2])
if num<=tail_duration:
line.set_data(data[0:num+1, 0:2].T)
line.set_3d_properties(data[0:num+1, 2])
else:
line.set_data(data[num-tail_duration:num+1, 0:2].T)
line.set_3d_properties(data[num-tail_duration:num+1, 2])
return lines, trail_lines
def update_lines_spin(num, data_lines, lines, cube_scale, rotations=2,
zoom=1, elev=10):
if hasattr(update_lines_spin, 'planes'):
for plane in update_lines_spin.planes:
plane.remove()
update_lines_spin.planes = plot_cube(cube_scale)
ax.view_init(elev=elev, azim=rotations*(360*(num/(frame_rate*duration))))
ax.dist=9-zoom
for line, data in zip(lines, data_lines):
line.set_data(data[:, 0:2].T)
line.set_3d_properties(data[:, 2])
return lines
def dispatch_animate(x, ani_params):
if x[0].shape[1] is 3:
return animate_plot3D(x, **ani_params)
def animate_plot3D(x, tail_duration=2, rotations=2, zoom=1, chemtrails=False,
frame_rate=50, elev=10, style='parallel'):
# initialize plot
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
# create lines
if fmt is not None:
lines = [ax.plot(dat[0:1, 0], dat[0:1, 1], dat[0:1, 2], fmt[idx],
linewidth=1, **kwargs_list[idx])[0] for idx,dat in enumerate(x)]
if is_line(fmt):
trail = [ax.plot(dat[0:1, 0], dat[0:1, 1], dat[0:1, 2], fmt[idx],
alpha=.3, linewidth=1, **kwargs_list[idx])[0] for idx, dat in enumerate(x)]
else:
lines = [ax.plot(dat[0:1, 0], dat[0:1, 1], dat[0:1, 2],
linewidth=1, **kwargs_list[idx])[0] for idx,dat in enumerate(x)]
if is_line(fmt):
trail = [ax.plot(dat[0:1, 0], dat[0:1, 1], dat[0:1, 2],
alpha=.3, linewidth=1, **kwargs_list[idx])[0] for idx, dat in enumerate(x)]
if tail_duration==0:
tail_duration=1
else:
tail_duration = int(frame_rate*tail_duration)
# get line animation
if style in ['parallel', True]:
line_ani = animation.FuncAnimation(fig, update_lines_parallel, x[0].shape[0],
fargs=(x, lines, trail, 1, tail_duration, rotations, zoom, chemtrails, elev),
interval=1000/frame_rate, blit=False, repeat=False)
elif style == 'spin':
line_ani = animation.FuncAnimation(fig, update_lines_spin, frame_rate*duration,
fargs=(x, lines, 1, rotations, zoom, elev),
interval=1000/frame_rate, blit=False, repeat=False)
return fig, ax, x, line_ani
# if a single point, but formatted as a line, replace with a point
for i, (xi, fi) in enumerate(zip(x, fmt)):
if xi.shape[0]==1 and fi in ('-', ':', '--'):
fmt[i]='.'
if not show:
# prevents the backend from rendering this plot
plt.ioff()
if animate in [True, 'parallel', 'spin']:
assert x[0].shape[1] is 3, "Animations are currently only supported for 3d plots."
# animation params
ani_params = dict(tail_duration=tail_duration,
rotations=rotations,
zoom=zoom,
chemtrails=chemtrails,
frame_rate=frame_rate,
elev=elev,
style=animate)
# dispatch animation
fig, ax, data, line_ani = dispatch_animate(x, ani_params)
else:
# dispatch static
fig, ax, data = dispatch_static(x, ax)
# if 3d, plot the cube
if x[0].shape[1] is 3:
# set cube scale
cube_scale = 1
# plot cube
plot_cube(cube_scale)
# set the axes properties
ax.set_xlim3d([-cube_scale, cube_scale])
ax.set_ylim3d([-cube_scale, cube_scale])
ax.set_zlim3d([-cube_scale, cube_scale])
# initialize the view
ax.view_init(elev=elev, azim=azim)
elif x[0].shape[1] is 2:
# plot square
plot_square(ax)
# set axes
ax.set_xlim(-1.1, 1.1)
ax.set_ylim(-1.1, 1.1)
# set line_ani to empty
line_ani = None
# remove axes
ax.set_axis_off()
# add labels
add_labels(x, labels, explore=explore)
# add title
if title is not None:
ax.set_title(title)
# add legend
if legend is not None:
ax.legend()
if size is not None:
fig.set_size_inches(size)
return fig, ax, data, line_ani
|
import json
from homeassistant import config_entries
from homeassistant.components.ozw.const import DOMAIN
from tests.async_mock import Mock, patch
from tests.common import MockConfigEntry
async def setup_ozw(hass, entry=None, fixture=None):
"""Set up OZW and load a dump."""
hass.config.components.add("mqtt")
if entry is None:
entry = MockConfigEntry(
domain=DOMAIN,
title="Z-Wave",
connection_class=config_entries.CONN_CLASS_LOCAL_PUSH,
)
entry.add_to_hass(hass)
with patch("homeassistant.components.mqtt.async_subscribe") as mock_subscribe:
mock_subscribe.return_value = Mock()
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert "ozw" in hass.config.components
assert len(mock_subscribe.mock_calls) == 1
receive_message = mock_subscribe.mock_calls[0][1][2]
if fixture is not None:
for line in fixture.split("\n"):
line = line.strip()
if not line:
continue
topic, payload = line.split(",", 1)
receive_message(Mock(topic=topic, payload=payload))
await hass.async_block_till_done()
return receive_message
class MQTTMessage:
"""Represent a mock MQTT message."""
def __init__(self, topic, payload):
"""Set up message."""
self.topic = topic
self.payload = payload
def decode(self):
"""Decode message payload from a string to a json dict."""
self.payload = json.loads(self.payload)
def encode(self):
"""Encode message payload into a string."""
self.payload = json.dumps(self.payload)
|
import tensornetwork as tn
from tensornetwork.backend_contextmanager import _default_backend_stack
import pytest
import numpy as np
def test_contextmanager_simple():
with tn.DefaultBackend("tensorflow"):
a = tn.Node(np.ones((10,)))
b = tn.Node(np.ones((10,)))
assert a.backend.name == b.backend.name
def test_contextmanager_default_backend():
tn.set_default_backend("pytorch")
with tn.DefaultBackend("numpy"):
assert _default_backend_stack.default_backend == "pytorch"
def test_contextmanager_interruption():
tn.set_default_backend("pytorch")
with pytest.raises(AssertionError):
with tn.DefaultBackend("numpy"):
tn.set_default_backend("tensorflow")
def test_contextmanager_nested():
with tn.DefaultBackend("tensorflow"):
a = tn.Node(np.ones((10,)))
assert a.backend.name == "tensorflow"
with tn.DefaultBackend("numpy"):
b = tn.Node(np.ones((10,)))
assert b.backend.name == "numpy"
c = tn.Node(np.ones((10,)))
assert c.backend.name == "tensorflow"
d = tn.Node(np.ones((10,)))
assert d.backend.name == "numpy"
def test_contextmanager_wrong_item():
a = tn.Node(np.ones((10,)))
with pytest.raises(ValueError):
tn.DefaultBackend(a) # pytype: disable=wrong-arg-types
def test_contextmanager_BaseBackend():
tn.set_default_backend("pytorch")
a = tn.Node(np.ones((10,)))
with tn.DefaultBackend(a.backend):
b = tn.Node(np.ones((10,)))
assert b.backend.name == "pytorch"
def test_set_default_backend_value_error():
tn.set_default_backend("pytorch")
with pytest.raises(
ValueError,
match="Item passed to set_default_backend "
"must be Text or BaseBackend"):
tn.set_default_backend(-1) # pytype: disable=wrong-arg-types
|
import tarfile
import os.path as op
import os
from ...utils import _fetch_file, verbose, _check_option
from ..utils import _get_path, logger, _do_path_update
@verbose
def data_path(dataset='evoked', path=None, force_update=False,
update_path=True, verbose=None):
u"""Get path to local copy of the high frequency SEF dataset.
Gets a local copy of the high frequency SEF MEG dataset [1]_.
Parameters
----------
dataset : 'evoked' | 'raw'
Whether to get the main dataset (evoked, structural and the rest) or
the separate dataset containing raw MEG data only.
path : None | str
Where to look for the HF-SEF data storing location.
If None, the environment variable or config parameter
``MNE_DATASETS_HF_SEF_PATH`` is used. If it doesn't exist, the
"~/mne_data" directory is used. If the HF-SEF dataset
is not found under the given path, the data
will be automatically downloaded to the specified folder.
force_update : bool
Force update of the dataset even if a local copy exists.
update_path : bool | None
If True, set the MNE_DATASETS_HF_SEF_PATH in mne-python
config to the given path. If None, the user is prompted.
%(verbose)s
Returns
-------
path : str
Local path to the directory where the HF-SEF data is stored.
References
----------
.. [1] Nurminen, J., Paananen, H., Mäkelä, J. (2017): High frequency
somatosensory MEG dataset. https://doi.org/10.5281/zenodo.889234
"""
key = 'MNE_DATASETS_HF_SEF_PATH'
name = 'HF_SEF'
path = _get_path(path, key, name)
destdir = op.join(path, 'HF_SEF')
urls = {'evoked':
'https://zenodo.org/record/3523071/files/hf_sef_evoked.tar.gz',
'raw':
'https://zenodo.org/record/889296/files/hf_sef_raw.tar.gz'}
hashes = {'evoked': '13d34cb5db584e00868677d8fb0aab2b',
'raw': '33934351e558542bafa9b262ac071168'}
_check_option('dataset', dataset, sorted(urls.keys()))
url = urls[dataset]
hash_ = hashes[dataset]
fn = url.split('/')[-1] # pick the filename from the url
archive = op.join(destdir, fn)
# check for existence of evoked and raw sets
has = dict()
subjdir = op.join(destdir, 'subjects')
megdir_a = op.join(destdir, 'MEG', 'subject_a')
has['evoked'] = op.isdir(destdir) and op.isdir(subjdir)
has['raw'] = op.isdir(megdir_a) and any(['raw' in fn_ for fn_ in
os.listdir(megdir_a)])
if not has[dataset] or force_update:
if not op.isdir(destdir):
os.mkdir(destdir)
_fetch_file(url, archive, hash_=hash_)
with tarfile.open(archive) as tar:
logger.info('Decompressing %s' % archive)
for member in tar.getmembers():
# strip the leading dirname 'hf_sef/' from the archive paths
# this should be fixed when making next version of archives
member.name = member.name[7:]
try:
tar.extract(member, destdir)
except IOError:
# check whether file exists but could not be overwritten
fn_full = op.join(destdir, member.name)
if op.isfile(fn_full):
os.remove(fn_full)
tar.extract(member, destdir)
else: # some more sinister cause for IOError
raise
os.remove(archive)
_do_path_update(path, update_path, key, name)
return destdir
|
import unittest
from trashcli.fstab import FakeFstab
class TestFakeFstab(unittest.TestCase):
def setUp(self):
self.fstab = FakeFstab()
def test_default(self):
assert ["/"] == self.filter_only_mount_points("/")
def test_it_should_accept_fake_mount_points(self):
self.fstab.add_mount('/fake')
assert ['/', '/fake'] == self.filter_only_mount_points('/', '/fake')
def test_something(self):
fstab = FakeFstab()
fstab.add_mount('/fake')
assert '/fake' == fstab.volume_of('/fake/foo')
def filter_only_mount_points(self, *supposed_mount_points):
return [mp for mp in supposed_mount_points
if mp == self.fstab.volume_of(mp)]
|
import lakeside
from homeassistant.components.switch import SwitchEntity
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up Eufy switches."""
if discovery_info is None:
return
add_entities([EufySwitch(discovery_info)], True)
class EufySwitch(SwitchEntity):
"""Representation of a Eufy switch."""
def __init__(self, device):
"""Initialize the light."""
self._state = None
self._name = device["name"]
self._address = device["address"]
self._code = device["code"]
self._type = device["type"]
self._switch = lakeside.switch(self._address, self._code, self._type)
self._switch.connect()
def update(self):
"""Synchronise state from the switch."""
self._switch.update()
self._state = self._switch.power
@property
def unique_id(self):
"""Return the ID of this light."""
return self._address
@property
def name(self):
"""Return the name of the device if any."""
return self._name
@property
def is_on(self):
"""Return true if device is on."""
return self._state
def turn_on(self, **kwargs):
"""Turn the specified switch on."""
try:
self._switch.set_state(True)
except BrokenPipeError:
self._switch.connect()
self._switch.set_state(power=True)
def turn_off(self, **kwargs):
"""Turn the specified switch off."""
try:
self._switch.set_state(False)
except BrokenPipeError:
self._switch.connect()
self._switch.set_state(False)
|
from datetime import timedelta
import pytest
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.components.sonarr.const import DOMAIN
from homeassistant.const import (
ATTR_ICON,
ATTR_UNIT_OF_MEASUREMENT,
DATA_GIGABYTES,
STATE_UNAVAILABLE,
)
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.util import dt as dt_util
from tests.async_mock import patch
from tests.common import async_fire_time_changed
from tests.components.sonarr import mock_connection, setup_integration
from tests.test_util.aiohttp import AiohttpClientMocker
UPCOMING_ENTITY_ID = f"{SENSOR_DOMAIN}.sonarr_upcoming"
async def test_sensors(
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test the creation and values of the sensors."""
entry = await setup_integration(hass, aioclient_mock, skip_entry_setup=True)
registry = await hass.helpers.entity_registry.async_get_registry()
# Pre-create registry entries for disabled by default sensors
sensors = {
"commands": "sonarr_commands",
"diskspace": "sonarr_disk_space",
"queue": "sonarr_queue",
"series": "sonarr_shows",
"wanted": "sonarr_wanted",
}
for (unique, oid) in sensors.items():
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
f"{entry.entry_id}_{unique}",
suggested_object_id=oid,
disabled_by=None,
)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
for (unique, oid) in sensors.items():
entity = registry.async_get(f"sensor.{oid}")
assert entity
assert entity.unique_id == f"{entry.entry_id}_{unique}"
state = hass.states.get("sensor.sonarr_commands")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:code-braces"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "Commands"
assert state.state == "2"
state = hass.states.get("sensor.sonarr_disk_space")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:harddisk"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == DATA_GIGABYTES
assert state.state == "263.10"
state = hass.states.get("sensor.sonarr_queue")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:download"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "Episodes"
assert state.state == "1"
state = hass.states.get("sensor.sonarr_shows")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:television"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "Series"
assert state.state == "1"
state = hass.states.get("sensor.sonarr_upcoming")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:television"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "Episodes"
assert state.state == "1"
state = hass.states.get("sensor.sonarr_wanted")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:television"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "Episodes"
assert state.state == "2"
@pytest.mark.parametrize(
"entity_id",
(
"sensor.sonarr_commands",
"sensor.sonarr_disk_space",
"sensor.sonarr_queue",
"sensor.sonarr_shows",
"sensor.sonarr_wanted",
),
)
async def test_disabled_by_default_sensors(
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker, entity_id: str
) -> None:
"""Test the disabled by default sensors."""
await setup_integration(hass, aioclient_mock)
registry = await hass.helpers.entity_registry.async_get_registry()
print(registry.entities)
state = hass.states.get(entity_id)
assert state is None
entry = registry.async_get(entity_id)
assert entry
assert entry.disabled
assert entry.disabled_by == "integration"
async def test_availability(
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test entity availability."""
now = dt_util.utcnow()
with patch("homeassistant.util.dt.utcnow", return_value=now):
await setup_integration(hass, aioclient_mock)
assert hass.states.get(UPCOMING_ENTITY_ID).state == "1"
# state to unavailable
aioclient_mock.clear_requests()
mock_connection(aioclient_mock, error=True)
future = now + timedelta(minutes=1)
with patch("homeassistant.util.dt.utcnow", return_value=future):
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
assert hass.states.get(UPCOMING_ENTITY_ID).state == STATE_UNAVAILABLE
# state to available
aioclient_mock.clear_requests()
mock_connection(aioclient_mock)
future += timedelta(minutes=1)
with patch("homeassistant.util.dt.utcnow", return_value=future):
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
assert hass.states.get(UPCOMING_ENTITY_ID).state == "1"
# state to unavailable
aioclient_mock.clear_requests()
mock_connection(aioclient_mock, invalid_auth=True)
future += timedelta(minutes=1)
with patch("homeassistant.util.dt.utcnow", return_value=future):
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
assert hass.states.get(UPCOMING_ENTITY_ID).state == STATE_UNAVAILABLE
# state to available
aioclient_mock.clear_requests()
mock_connection(aioclient_mock)
future += timedelta(minutes=1)
with patch("homeassistant.util.dt.utcnow", return_value=future):
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
assert hass.states.get(UPCOMING_ENTITY_ID).state == "1"
|
from pysmartthings import ATTRIBUTES, CAPABILITIES, Attribute, Capability
from homeassistant.components.binary_sensor import (
DEVICE_CLASSES,
DOMAIN as BINARY_SENSOR_DOMAIN,
)
from homeassistant.components.smartthings import binary_sensor
from homeassistant.components.smartthings.const import DOMAIN, SIGNAL_SMARTTHINGS_UPDATE
from homeassistant.const import ATTR_FRIENDLY_NAME
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .conftest import setup_platform
async def test_mapping_integrity():
"""Test ensures the map dicts have proper integrity."""
# Ensure every CAPABILITY_TO_ATTRIB key is in CAPABILITIES
# Ensure every CAPABILITY_TO_ATTRIB value is in ATTRIB_TO_CLASS keys
for capability, attrib in binary_sensor.CAPABILITY_TO_ATTRIB.items():
assert capability in CAPABILITIES, capability
assert attrib in ATTRIBUTES, attrib
assert attrib in binary_sensor.ATTRIB_TO_CLASS.keys(), attrib
# Ensure every ATTRIB_TO_CLASS value is in DEVICE_CLASSES
for attrib, device_class in binary_sensor.ATTRIB_TO_CLASS.items():
assert attrib in ATTRIBUTES, attrib
assert device_class in DEVICE_CLASSES, device_class
async def test_entity_state(hass, device_factory):
"""Tests the state attributes properly match the light types."""
device = device_factory(
"Motion Sensor 1", [Capability.motion_sensor], {Attribute.motion: "inactive"}
)
await setup_platform(hass, BINARY_SENSOR_DOMAIN, devices=[device])
state = hass.states.get("binary_sensor.motion_sensor_1_motion")
assert state.state == "off"
assert state.attributes[ATTR_FRIENDLY_NAME] == f"{device.label} {Attribute.motion}"
async def test_entity_and_device_attributes(hass, device_factory):
"""Test the attributes of the entity are correct."""
# Arrange
device = device_factory(
"Motion Sensor 1", [Capability.motion_sensor], {Attribute.motion: "inactive"}
)
entity_registry = await hass.helpers.entity_registry.async_get_registry()
device_registry = await hass.helpers.device_registry.async_get_registry()
# Act
await setup_platform(hass, BINARY_SENSOR_DOMAIN, devices=[device])
# Assert
entry = entity_registry.async_get("binary_sensor.motion_sensor_1_motion")
assert entry
assert entry.unique_id == f"{device.device_id}.{Attribute.motion}"
entry = device_registry.async_get_device({(DOMAIN, device.device_id)}, [])
assert entry
assert entry.name == device.label
assert entry.model == device.device_type_name
assert entry.manufacturer == "Unavailable"
async def test_update_from_signal(hass, device_factory):
"""Test the binary_sensor updates when receiving a signal."""
# Arrange
device = device_factory(
"Motion Sensor 1", [Capability.motion_sensor], {Attribute.motion: "inactive"}
)
await setup_platform(hass, BINARY_SENSOR_DOMAIN, devices=[device])
device.status.apply_attribute_update(
"main", Capability.motion_sensor, Attribute.motion, "active"
)
# Act
async_dispatcher_send(hass, SIGNAL_SMARTTHINGS_UPDATE, [device.device_id])
# Assert
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.motion_sensor_1_motion")
assert state is not None
assert state.state == "on"
async def test_unload_config_entry(hass, device_factory):
"""Test the binary_sensor is removed when the config entry is unloaded."""
# Arrange
device = device_factory(
"Motion Sensor 1", [Capability.motion_sensor], {Attribute.motion: "inactive"}
)
config_entry = await setup_platform(hass, BINARY_SENSOR_DOMAIN, devices=[device])
# Act
await hass.config_entries.async_forward_entry_unload(config_entry, "binary_sensor")
# Assert
assert not hass.states.get("binary_sensor.motion_sensor_1_motion")
|
from homeassistant.core import State
from tests.common import async_mock_service
async def test_reproducing_states(hass, caplog):
"""Test reproducing NEW_NAME states."""
hass.states.async_set("NEW_DOMAIN.entity_off", "off", {})
hass.states.async_set("NEW_DOMAIN.entity_on", "on", {"color": "red"})
turn_on_calls = async_mock_service(hass, "NEW_DOMAIN", "turn_on")
turn_off_calls = async_mock_service(hass, "NEW_DOMAIN", "turn_off")
# These calls should do nothing as entities already in desired state
await hass.helpers.state.async_reproduce_state(
[
State("NEW_DOMAIN.entity_off", "off"),
State("NEW_DOMAIN.entity_on", "on", {"color": "red"}),
],
blocking=True,
)
assert len(turn_on_calls) == 0
assert len(turn_off_calls) == 0
# Test invalid state is handled
await hass.helpers.state.async_reproduce_state(
[State("NEW_DOMAIN.entity_off", "not_supported")], blocking=True
)
assert "not_supported" in caplog.text
assert len(turn_on_calls) == 0
assert len(turn_off_calls) == 0
# Make sure correct services are called
await hass.helpers.state.async_reproduce_state(
[
State("NEW_DOMAIN.entity_on", "off"),
State("NEW_DOMAIN.entity_off", "on", {"color": "red"}),
# Should not raise
State("NEW_DOMAIN.non_existing", "on"),
],
blocking=True,
)
assert len(turn_on_calls) == 1
assert turn_on_calls[0].domain == "NEW_DOMAIN"
assert turn_on_calls[0].data == {
"entity_id": "NEW_DOMAIN.entity_off",
"color": "red",
}
assert len(turn_off_calls) == 1
assert turn_off_calls[0].domain == "NEW_DOMAIN"
assert turn_off_calls[0].data == {"entity_id": "NEW_DOMAIN.entity_on"}
|
import logging
import subprocess
import threading
from kalliope.core.NeuronModule import NeuronModule, MissingParameterException
logging.basicConfig()
logger = logging.getLogger("kalliope")
class AsyncShell(threading.Thread):
"""
Class used to run an asynchrone Shell command
.. notes:: Impossible to get the success code of the command
"""
def __init__(self, cmd):
self.stdout = None
self.stderr = None
self.cmd = cmd
threading.Thread.__init__(self)
def run(self):
p = subprocess.Popen(self.cmd,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
self.stdout, self.stderr = p.communicate()
class Shell(NeuronModule):
"""
Run a shell command in a synchron mode
"""
def __init__(self, **kwargs):
super(Shell, self).__init__(**kwargs)
# get the command
self.cmd = kwargs.get('cmd', None)
# get if the user select a blocking command or not
self.async_mode = kwargs.get('async', False)
self.query = kwargs.get('query', None)
if self.query is not None:
self.cmd = self.cmd + "\"" + self.query +"\""
# check parameters
if self._is_parameters_ok():
# run the command
if not self.async_mode:
p = subprocess.Popen(self.cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
(output, err) = p.communicate()
self.output = output.decode()
self.returncode = p.returncode
message = {
"output": self.output,
"returncode": self.returncode
}
self.say(message)
else:
async_shell = AsyncShell(cmd=self.cmd)
async_shell.start()
def _is_parameters_ok(self):
"""
Check if received parameters are ok to perform operations in the neuron
:return: true if parameters are ok, raise an exception otherwise
.. raises:: MissingParameterException
"""
if self.cmd is None:
raise MissingParameterException("cmd parameter required")
return True
|
from collections import defaultdict
import logging
import voluptuous as vol
from homeassistant.const import CONF_NAME
from homeassistant.core import callback
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
from homeassistant.util.decorator import Registry
from .const import ATTR_DEVICES, DOMAIN, FLAT_PLATFORM_TYPES, TYPE_TO_PLATFORMS
_LOGGER = logging.getLogger(__name__)
SCHEMAS = Registry()
@callback
def discover_mysensors_platform(hass, hass_config, platform, new_devices):
"""Discover a MySensors platform."""
task = hass.async_create_task(
discovery.async_load_platform(
hass,
platform,
DOMAIN,
{ATTR_DEVICES: new_devices, CONF_NAME: DOMAIN},
hass_config,
)
)
return task
def default_schema(gateway, child, value_type_name):
"""Return a default validation schema for value types."""
schema = {value_type_name: cv.string}
return get_child_schema(gateway, child, value_type_name, schema)
@SCHEMAS.register(("light", "V_DIMMER"))
def light_dimmer_schema(gateway, child, value_type_name):
"""Return a validation schema for V_DIMMER."""
schema = {"V_DIMMER": cv.string, "V_LIGHT": cv.string}
return get_child_schema(gateway, child, value_type_name, schema)
@SCHEMAS.register(("light", "V_PERCENTAGE"))
def light_percentage_schema(gateway, child, value_type_name):
"""Return a validation schema for V_PERCENTAGE."""
schema = {"V_PERCENTAGE": cv.string, "V_STATUS": cv.string}
return get_child_schema(gateway, child, value_type_name, schema)
@SCHEMAS.register(("light", "V_RGB"))
def light_rgb_schema(gateway, child, value_type_name):
"""Return a validation schema for V_RGB."""
schema = {"V_RGB": cv.string, "V_STATUS": cv.string}
return get_child_schema(gateway, child, value_type_name, schema)
@SCHEMAS.register(("light", "V_RGBW"))
def light_rgbw_schema(gateway, child, value_type_name):
"""Return a validation schema for V_RGBW."""
schema = {"V_RGBW": cv.string, "V_STATUS": cv.string}
return get_child_schema(gateway, child, value_type_name, schema)
@SCHEMAS.register(("switch", "V_IR_SEND"))
def switch_ir_send_schema(gateway, child, value_type_name):
"""Return a validation schema for V_IR_SEND."""
schema = {"V_IR_SEND": cv.string, "V_LIGHT": cv.string}
return get_child_schema(gateway, child, value_type_name, schema)
def get_child_schema(gateway, child, value_type_name, schema):
"""Return a child schema."""
set_req = gateway.const.SetReq
child_schema = child.get_schema(gateway.protocol_version)
schema = child_schema.extend(
{
vol.Required(
set_req[name].value, msg=invalid_msg(gateway, child, name)
): child_schema.schema.get(set_req[name].value, valid)
for name, valid in schema.items()
},
extra=vol.ALLOW_EXTRA,
)
return schema
def invalid_msg(gateway, child, value_type_name):
"""Return a message for an invalid child during schema validation."""
pres = gateway.const.Presentation
set_req = gateway.const.SetReq
return (
f"{pres(child.type).name} requires value_type {set_req[value_type_name].name}"
)
def validate_set_msg(msg):
"""Validate a set message."""
if not validate_node(msg.gateway, msg.node_id):
return {}
child = msg.gateway.sensors[msg.node_id].children[msg.child_id]
return validate_child(msg.gateway, msg.node_id, child, msg.sub_type)
def validate_node(gateway, node_id):
"""Validate a node."""
if gateway.sensors[node_id].sketch_name is None:
_LOGGER.debug("Node %s is missing sketch name", node_id)
return False
return True
def validate_child(gateway, node_id, child, value_type=None):
"""Validate a child."""
validated = defaultdict(list)
pres = gateway.const.Presentation
set_req = gateway.const.SetReq
child_type_name = next(
(member.name for member in pres if member.value == child.type), None
)
value_types = {value_type} if value_type else {*child.values}
value_type_names = {
member.name for member in set_req if member.value in value_types
}
platforms = TYPE_TO_PLATFORMS.get(child_type_name, [])
if not platforms:
_LOGGER.warning("Child type %s is not supported", child.type)
return validated
for platform in platforms:
platform_v_names = FLAT_PLATFORM_TYPES[platform, child_type_name]
v_names = platform_v_names & value_type_names
if not v_names:
child_value_names = {
member.name for member in set_req if member.value in child.values
}
v_names = platform_v_names & child_value_names
for v_name in v_names:
child_schema_gen = SCHEMAS.get((platform, v_name), default_schema)
child_schema = child_schema_gen(gateway, child, v_name)
try:
child_schema(child.values)
except vol.Invalid as exc:
_LOGGER.warning(
"Invalid %s on node %s, %s platform: %s",
child,
node_id,
platform,
exc,
)
continue
dev_id = id(gateway), node_id, child.id, set_req[v_name].value
validated[platform].append(dev_id)
return validated
|
import unittest
import tensorflow as tf
import numpy as np
from kerastuner.tuners import RandomSearch
class TestKerasTuner(unittest.TestCase):
def test_search(self):
def build_model(hp):
x_train = np.random.random((100, 28, 28))
y_train = np.random.randint(10, size=(100, 1))
x_test = np.random.random((20, 28, 28))
y_test = np.random.randint(10, size=(20, 1))
model = tf.keras.models.Sequential([
tf.keras.layers.Flatten(input_shape=(28, 28)),
tf.keras.layers.Dense(128, activation='relu'),
tf.keras.layers.Dropout(hp.Choice('dropout_rate', values=[0.2, 0.4])),
tf.keras.layers.Dense(10, activation='softmax')
])
model.compile(
optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
return model
tuner = RandomSearch(build_model, objective='accuracy', max_trials=1, executions_per_trial=1, seed=1)
tuner.search(x_train, y_train, epochs=1)
self.assertEqual(0.4, tuner.get_best_hyperparameters(1)[0].get('dropout_rate'))
|
import functools
from io import BytesIO
from tornado import httputil
from tornado.httpclient import HTTPResponse
from vcr.errors import CannotOverwriteExistingCassetteException
from vcr.request import Request
def vcr_fetch_impl(cassette, real_fetch_impl):
@functools.wraps(real_fetch_impl)
def new_fetch_impl(self, request, callback):
headers = request.headers.copy()
if request.user_agent:
headers.setdefault("User-Agent", request.user_agent)
# TODO body_producer, header_callback, and streaming_callback are not
# yet supported.
unsupported_call = (
getattr(request, "body_producer", None) is not None
or request.header_callback is not None
or request.streaming_callback is not None
)
if unsupported_call:
response = HTTPResponse(
request,
599,
error=Exception(
"The request (%s) uses AsyncHTTPClient functionality "
"that is not yet supported by VCR.py. Please make the "
"request outside a VCR.py context." % repr(request)
),
request_time=self.io_loop.time() - request.start_time,
)
return callback(response)
vcr_request = Request(request.method, request.url, request.body, headers)
if cassette.can_play_response_for(vcr_request):
vcr_response = cassette.play_response(vcr_request)
headers = httputil.HTTPHeaders()
recorded_headers = vcr_response["headers"]
if isinstance(recorded_headers, dict):
recorded_headers = recorded_headers.items()
for k, vs in recorded_headers:
for v in vs:
headers.add(k, v)
response = HTTPResponse(
request,
code=vcr_response["status"]["code"],
reason=vcr_response["status"]["message"],
headers=headers,
buffer=BytesIO(vcr_response["body"]["string"]),
effective_url=vcr_response.get("url"),
request_time=self.io_loop.time() - request.start_time,
)
return callback(response)
else:
if cassette.write_protected and cassette.filter_request(vcr_request):
response = HTTPResponse(
request,
599,
error=CannotOverwriteExistingCassetteException(
cassette=cassette, failed_request=vcr_request
),
request_time=self.io_loop.time() - request.start_time,
)
return callback(response)
def new_callback(response):
headers = [(k, response.headers.get_list(k)) for k in response.headers.keys()]
vcr_response = {
"status": {"code": response.code, "message": response.reason},
"headers": headers,
"body": {"string": response.body},
"url": response.effective_url,
}
cassette.append(vcr_request, vcr_response)
return callback(response)
real_fetch_impl(self, request, new_callback)
return new_fetch_impl
|
from django.conf.urls import url
from django.contrib import admin
from django.db.models.fields import Field, FieldDoesNotExist
from django.forms import widgets
from django.http import HttpResponse
from django.template.loader import select_template
from django.urls import reverse, NoReverseMatch
from django.utils.html import format_html
from django.utils.translation import pgettext_lazy
from fsm_admin.mixins import FSMTransitionMixin
from shop.conf import app_settings
from shop.models.customer import CustomerModel
from shop.models.order import OrderItemModel, OrderPayment
from shop.modifiers.pool import cart_modifiers_pool
from shop.serializers.order import OrderDetailSerializer
from shop.transition import transition_change_notification
class OrderPaymentInline(admin.TabularInline):
model = OrderPayment
extra = 0
def get_formset(self, request, obj=None, **kwargs):
"""
Convert the field `payment_method` into a select box with all possible payment methods.
"""
choices = [pm.get_choice() for pm in cart_modifiers_pool.get_payment_modifiers()]
kwargs.update(widgets={'payment_method': widgets.Select(choices=choices)})
formset = super().get_formset(request, obj, **kwargs)
return formset
def has_add_permission(self, request, obj=None):
assert obj is not None, "An Order object can not be added through the Django-Admin"
return obj.status in ['awaiting_payment', 'refund_payment']
def has_delete_permission(self, request, obj=None):
return False
def get_max_num(self, request, obj=None, **kwargs):
qs = self.model.objects.filter(order=obj)
if self.has_add_permission(request, obj):
return qs.count() + 1
return qs.count()
def get_fields(self, request, obj=None):
if self.has_add_permission(request, obj):
return ['amount', 'transaction_id', 'payment_method', 'created_at']
return ['get_amount', 'transaction_id', 'payment_method', 'created_at']
def get_readonly_fields(self, request, obj=None):
if self.has_add_permission(request, obj):
return ['created_at', 'get_amount']
# unless we expect a payment, set all fields as readonly
return self.get_fields(request, obj)
def get_amount(self, obj):
"""Return amount using correct local format"""
return obj.amount
get_amount.short_description = pgettext_lazy('admin', "Amount Paid")
class OrderItemInline(admin.StackedInline):
model = OrderItemModel
extra = 0
fields = [
('product_code', 'unit_price', 'line_total',),
('quantity',),
'render_as_html_extra',
]
readonly_fields = ['product_code', 'quantity', 'unit_price', 'line_total', 'render_as_html_extra']
template = 'shop/admin/edit_inline/stacked-order.html'
def has_add_permission(self, request, obj=None):
return False
def has_delete_permission(self, request, obj=None):
return False
def get_max_num(self, request, obj=None, **kwargs):
return self.model.objects.filter(order=obj).count()
def render_as_html_extra(self, obj):
item_extra_template = select_template([
'{0}/admin/orderitem-{1}-extra.html'.format(app_settings.APP_LABEL, obj.product.product_model),
'{0}/admin/orderitem-product-extra.html'.format(app_settings.APP_LABEL),
'shop/admin/orderitem-product-extra.html',
])
return item_extra_template.render(obj.extra)
render_as_html_extra.short_description = pgettext_lazy('admin', "Extra data")
class StatusListFilter(admin.SimpleListFilter):
title = pgettext_lazy('admin', "Status")
parameter_name = 'status'
def lookups(self, request, model_admin):
lookups = dict(model_admin.model._transition_targets)
lookups.pop('new')
lookups.pop('created')
return lookups.items()
def queryset(self, request, queryset):
if self.value():
return queryset.filter(status=self.value())
return queryset
class BaseOrderAdmin(FSMTransitionMixin, admin.ModelAdmin):
list_display = ['get_number', 'customer', 'status_name', 'get_total', 'created_at']
list_filter = [StatusListFilter]
fsm_field = ['status']
date_hierarchy = 'created_at'
inlines = [OrderItemInline]
readonly_fields = ['get_number', 'status_name', 'get_total', 'get_subtotal',
'get_customer_link', 'get_outstanding_amount', 'created_at', 'updated_at',
'render_as_html_extra', 'stored_request', 'is_fully_paid']
fields = ['get_number', 'status_name',
('created_at', 'updated_at'),
'get_customer_link',
('get_subtotal', 'get_total', 'get_outstanding_amount', 'is_fully_paid'),
'render_as_html_extra', 'stored_request']
actions = None
change_form_template = 'shop/admin/change_form.html'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.extra_template = select_template([
'{}/admin/order-extra.html'.format(app_settings.APP_LABEL),
'shop/admin/order-extra.html',
])
def get_number(self, obj):
return obj.get_number()
get_number.short_description = pgettext_lazy('admin', "Order number")
def get_total(self, obj):
return str(obj.total)
get_total.short_description = pgettext_lazy('admin', "Total")
def get_subtotal(self, obj):
return str(obj.subtotal)
get_subtotal.short_description = pgettext_lazy('admin', "Subtotal")
def get_outstanding_amount(self, obj):
return str(obj.outstanding_amount)
get_outstanding_amount.short_description = pgettext_lazy('admin', "Outstanding amount")
def is_fully_paid(self, obj):
return obj.is_fully_paid()
is_fully_paid.short_description = pgettext_lazy('admin', "Is fully paid")
is_fully_paid.boolean = True
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
return False
def render_as_html_extra(self, obj):
return self.extra_template.render(obj.extra)
render_as_html_extra.short_description = pgettext_lazy('admin', "Extra data")
def get_customer_link(self, obj):
try:
url = reverse('admin:shop_customerproxy_change', args=(obj.customer.pk,))
return format_html('<a href="{0}" target="_new">{1}</a>', url, obj.customer.get_username())
except NoReverseMatch:
return format_html('<strong>{0}</strong>', obj.customer.get_username())
get_customer_link.short_description = pgettext_lazy('admin', "Customer")
def get_search_fields(self, request):
search_fields = list(super().get_search_fields(request))
search_fields.extend(['customer__user__email', 'customer__user__last_name'])
try:
# if CustomerModel contains a number field, let search for it
if isinstance(CustomerModel._meta.get_field('number'), Field):
search_fields.append('customer__number')
except FieldDoesNotExist:
pass
return search_fields
def get_form(self, request, obj=None, **kwargs):
ModelForm = super().get_form(request, obj, **kwargs)
if obj:
# store the requested transition inside the instance, so that the model's `clean()` method can access it
obj._fsm_requested_transition = self._get_requested_transition(request)
return ModelForm
def change_view(self, request, object_id, form_url='', extra_context=None):
assert object_id, "An Order object can not be added through the Django-Admin"
current_status = self.get_object(request, object_id).status
response = super().change_view(request, object_id, form_url, extra_context)
order = self.get_object(request, object_id)
if current_status != order.status:
transition_change_notification(order)
return response
class PrintInvoiceAdminMixin:
"""
A customized OrderAdmin class shall inherit from this mixin class, to add
methods for printing the the invoice.
"""
def get_fields(self, request, obj=None):
fields = list(super().get_fields(request, obj))
fields.append('print_out')
return fields
def get_readonly_fields(self, request, obj=None):
readonly_fields = list(super().get_readonly_fields(request, obj))
readonly_fields.append('print_out')
return readonly_fields
def get_urls(self):
my_urls = [
url(r'^(?P<pk>\d+)/print_invoice/$', self.admin_site.admin_view(self.render_invoice),
name='print_invoice'),
]
my_urls.extend(super().get_urls())
return my_urls
def _render_content(self, request, pk, template):
order = self.get_object(request, pk)
context = {'request': request, 'render_label': 'print'}
customer_serializer = app_settings.CUSTOMER_SERIALIZER(order.customer)
order_serializer = OrderDetailSerializer(order, context=context)
return template.render(context={
'customer': customer_serializer.data,
'order': order_serializer.data,
'object': order,
}, request=request)
def render_invoice(self, request, pk=None):
template = select_template([
'{}/print/invoice.html'.format(app_settings.APP_LABEL.lower()),
'shop/print/invoice.html'
])
content = self._render_content(request, pk, template)
return HttpResponse(content)
def print_out(self, obj):
if obj.status in ['ready_for_delivery']:
link = reverse('admin:print_invoice', args=(obj.id,)), pgettext_lazy('admin', "Invoice")
return format_html(
'<span class="object-tools"><a href="{0}" class="viewsitelink" target="_new">{1}</a></span>',
*link)
return ''
print_out.short_description = pgettext_lazy('admin', "Print out")
|
import importlib
import sys
from kombu.exceptions import reraise
def symbol_by_name(name, aliases=None, imp=None, package=None,
sep='.', default=None, **kwargs):
"""Get symbol by qualified name.
The name should be the full dot-separated path to the class::
modulename.ClassName
Example::
celery.concurrency.processes.TaskPool
^- class name
or using ':' to separate module and symbol::
celery.concurrency.processes:TaskPool
If `aliases` is provided, a dict containing short name/long name
mappings, the name is looked up in the aliases first.
Examples:
>>> symbol_by_name('celery.concurrency.processes.TaskPool')
<class 'celery.concurrency.processes.TaskPool'>
>>> symbol_by_name('default', {
... 'default': 'celery.concurrency.processes.TaskPool'})
<class 'celery.concurrency.processes.TaskPool'>
# Does not try to look up non-string names.
>>> from celery.concurrency.processes import TaskPool
>>> symbol_by_name(TaskPool) is TaskPool
True
"""
aliases = {} if not aliases else aliases
if imp is None:
imp = importlib.import_module
if not isinstance(name, str):
return name # already a class
name = aliases.get(name) or name
sep = ':' if ':' in name else sep
module_name, _, cls_name = name.rpartition(sep)
if not module_name:
cls_name, module_name = None, package if package else cls_name
try:
try:
module = imp(module_name, package=package, **kwargs)
except ValueError as exc:
reraise(ValueError,
ValueError(f"Couldn't import {name!r}: {exc}"),
sys.exc_info()[2])
return getattr(module, cls_name) if cls_name else module
except (ImportError, AttributeError):
if default is None:
raise
return default
|
from datetime import datetime
import math
from random import Random
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_NAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
import homeassistant.util.dt as dt_util
CONF_AMP = "amplitude"
CONF_FWHM = "spread"
CONF_MEAN = "mean"
CONF_PERIOD = "period"
CONF_PHASE = "phase"
CONF_SEED = "seed"
CONF_UNIT = "unit"
CONF_RELATIVE_TO_EPOCH = "relative_to_epoch"
DEFAULT_AMP = 1
DEFAULT_FWHM = 0
DEFAULT_MEAN = 0
DEFAULT_NAME = "simulated"
DEFAULT_PERIOD = 60
DEFAULT_PHASE = 0
DEFAULT_SEED = 999
DEFAULT_UNIT = "value"
DEFAULT_RELATIVE_TO_EPOCH = True
ICON = "mdi:chart-line"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_AMP, default=DEFAULT_AMP): vol.Coerce(float),
vol.Optional(CONF_FWHM, default=DEFAULT_FWHM): vol.Coerce(float),
vol.Optional(CONF_MEAN, default=DEFAULT_MEAN): vol.Coerce(float),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PERIOD, default=DEFAULT_PERIOD): cv.positive_int,
vol.Optional(CONF_PHASE, default=DEFAULT_PHASE): vol.Coerce(float),
vol.Optional(CONF_SEED, default=DEFAULT_SEED): cv.positive_int,
vol.Optional(CONF_UNIT, default=DEFAULT_UNIT): cv.string,
vol.Optional(
CONF_RELATIVE_TO_EPOCH, default=DEFAULT_RELATIVE_TO_EPOCH
): cv.boolean,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the simulated sensor."""
name = config.get(CONF_NAME)
unit = config.get(CONF_UNIT)
amp = config.get(CONF_AMP)
mean = config.get(CONF_MEAN)
period = config.get(CONF_PERIOD)
phase = config.get(CONF_PHASE)
fwhm = config.get(CONF_FWHM)
seed = config.get(CONF_SEED)
relative_to_epoch = config.get(CONF_RELATIVE_TO_EPOCH)
sensor = SimulatedSensor(
name, unit, amp, mean, period, phase, fwhm, seed, relative_to_epoch
)
add_entities([sensor], True)
class SimulatedSensor(Entity):
"""Class for simulated sensor."""
def __init__(
self, name, unit, amp, mean, period, phase, fwhm, seed, relative_to_epoch
):
"""Init the class."""
self._name = name
self._unit = unit
self._amp = amp
self._mean = mean
self._period = period
self._phase = phase # phase in degrees
self._fwhm = fwhm
self._seed = seed
self._random = Random(seed) # A local seeded Random
self._start_time = (
datetime(1970, 1, 1, tzinfo=dt_util.UTC)
if relative_to_epoch
else dt_util.utcnow()
)
self._relative_to_epoch = relative_to_epoch
self._state = None
def time_delta(self):
"""Return the time delta."""
dt0 = self._start_time
dt1 = dt_util.utcnow()
return dt1 - dt0
def signal_calc(self):
"""Calculate the signal."""
mean = self._mean
amp = self._amp
time_delta = self.time_delta().total_seconds() * 1e6 # to milliseconds
period = self._period * 1e6 # to milliseconds
fwhm = self._fwhm / 2
phase = math.radians(self._phase)
if period == 0:
periodic = 0
else:
periodic = amp * (math.sin((2 * math.pi * time_delta / period) + phase))
noise = self._random.gauss(mu=0, sigma=fwhm)
return round(mean + periodic + noise, 3)
async def async_update(self):
"""Update the sensor."""
self._state = self.signal_calc()
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return ICON
@property
def unit_of_measurement(self):
"""Return the unit this state is expressed in."""
return self._unit
@property
def device_state_attributes(self):
"""Return other details about the sensor state."""
return {
"amplitude": self._amp,
"mean": self._mean,
"period": self._period,
"phase": self._phase,
"spread": self._fwhm,
"seed": self._seed,
"relative_to_epoch": self._relative_to_epoch,
}
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import flags
from absl.testing import parameterized
from compare_gan import datasets
from compare_gan import test_utils
from compare_gan.gans import consts as c
from compare_gan.gans import loss_lib
from compare_gan.gans import penalty_lib
from compare_gan.gans.ssgan import SSGAN
import gin
import tensorflow as tf
FLAGS = flags.FLAGS
TEST_ARCHITECTURES = [c.RESNET_CIFAR_ARCH, c.SNDCGAN_ARCH, c.RESNET5_ARCH]
TEST_LOSSES = [loss_lib.non_saturating, loss_lib.hinge]
TEST_PENALTIES = [penalty_lib.no_penalty, penalty_lib.wgangp_penalty]
class SSGANTest(parameterized.TestCase, test_utils.CompareGanTestCase):
def _runSingleTrainingStep(self, architecture, loss_fn, penalty_fn):
parameters = {
"architecture": architecture,
"lambda": 1,
"z_dim": 128,
}
with gin.unlock_config():
gin.bind_parameter("penalty.fn", penalty_fn)
gin.bind_parameter("loss.fn", loss_fn)
model_dir = self._get_empty_model_dir()
run_config = tf.contrib.tpu.RunConfig(
model_dir=model_dir,
tpu_config=tf.contrib.tpu.TPUConfig(iterations_per_loop=1))
dataset = datasets.get_dataset("cifar10")
gan = SSGAN(
dataset=dataset,
parameters=parameters,
model_dir=model_dir,
g_optimizer_fn=tf.train.AdamOptimizer,
g_lr=0.0002,
rotated_batch_size=4)
estimator = gan.as_estimator(run_config, batch_size=2, use_tpu=False)
estimator.train(gan.input_fn, steps=1)
@parameterized.parameters(TEST_ARCHITECTURES)
def testSingleTrainingStepArchitectures(self, architecture):
self._runSingleTrainingStep(architecture, loss_lib.hinge,
penalty_lib.no_penalty)
@parameterized.parameters(TEST_LOSSES)
def testSingleTrainingStepLosses(self, loss_fn):
self._runSingleTrainingStep(c.RESNET_CIFAR_ARCH, loss_fn,
penalty_lib.no_penalty)
@parameterized.parameters(TEST_PENALTIES)
def testSingleTrainingStepPenalties(self, penalty_fn):
self._runSingleTrainingStep(c.RESNET_CIFAR_ARCH, loss_lib.hinge, penalty_fn)
if __name__ == "__main__":
tf.test.main()
|
from homeassistant.const import (
SERVICE_ALARM_ARM_AWAY,
SERVICE_ALARM_ARM_CUSTOM_BYPASS,
SERVICE_ALARM_ARM_HOME,
SERVICE_ALARM_ARM_NIGHT,
SERVICE_ALARM_DISARM,
SERVICE_ALARM_TRIGGER,
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_CUSTOM_BYPASS,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_DISARMED,
STATE_ALARM_TRIGGERED,
)
from homeassistant.core import State
from tests.common import async_mock_service
async def test_reproducing_states(hass, caplog):
"""Test reproducing Alarm control panel states."""
hass.states.async_set(
"alarm_control_panel.entity_armed_away", STATE_ALARM_ARMED_AWAY, {}
)
hass.states.async_set(
"alarm_control_panel.entity_armed_custom_bypass",
STATE_ALARM_ARMED_CUSTOM_BYPASS,
{},
)
hass.states.async_set(
"alarm_control_panel.entity_armed_home", STATE_ALARM_ARMED_HOME, {}
)
hass.states.async_set(
"alarm_control_panel.entity_armed_night", STATE_ALARM_ARMED_NIGHT, {}
)
hass.states.async_set(
"alarm_control_panel.entity_disarmed", STATE_ALARM_DISARMED, {}
)
hass.states.async_set(
"alarm_control_panel.entity_triggered", STATE_ALARM_TRIGGERED, {}
)
arm_away_calls = async_mock_service(
hass, "alarm_control_panel", SERVICE_ALARM_ARM_AWAY
)
arm_custom_bypass_calls = async_mock_service(
hass, "alarm_control_panel", SERVICE_ALARM_ARM_CUSTOM_BYPASS
)
arm_home_calls = async_mock_service(
hass, "alarm_control_panel", SERVICE_ALARM_ARM_HOME
)
arm_night_calls = async_mock_service(
hass, "alarm_control_panel", SERVICE_ALARM_ARM_NIGHT
)
disarm_calls = async_mock_service(hass, "alarm_control_panel", SERVICE_ALARM_DISARM)
trigger_calls = async_mock_service(
hass, "alarm_control_panel", SERVICE_ALARM_TRIGGER
)
# These calls should do nothing as entities already in desired state
await hass.helpers.state.async_reproduce_state(
[
State("alarm_control_panel.entity_armed_away", STATE_ALARM_ARMED_AWAY),
State(
"alarm_control_panel.entity_armed_custom_bypass",
STATE_ALARM_ARMED_CUSTOM_BYPASS,
),
State("alarm_control_panel.entity_armed_home", STATE_ALARM_ARMED_HOME),
State("alarm_control_panel.entity_armed_night", STATE_ALARM_ARMED_NIGHT),
State("alarm_control_panel.entity_disarmed", STATE_ALARM_DISARMED),
State("alarm_control_panel.entity_triggered", STATE_ALARM_TRIGGERED),
]
)
assert len(arm_away_calls) == 0
assert len(arm_custom_bypass_calls) == 0
assert len(arm_home_calls) == 0
assert len(arm_night_calls) == 0
assert len(disarm_calls) == 0
assert len(trigger_calls) == 0
# Test invalid state is handled
await hass.helpers.state.async_reproduce_state(
[State("alarm_control_panel.entity_triggered", "not_supported")]
)
assert "not_supported" in caplog.text
assert len(arm_away_calls) == 0
assert len(arm_custom_bypass_calls) == 0
assert len(arm_home_calls) == 0
assert len(arm_night_calls) == 0
assert len(disarm_calls) == 0
assert len(trigger_calls) == 0
# Make sure correct services are called
await hass.helpers.state.async_reproduce_state(
[
State("alarm_control_panel.entity_armed_away", STATE_ALARM_TRIGGERED),
State(
"alarm_control_panel.entity_armed_custom_bypass", STATE_ALARM_ARMED_AWAY
),
State(
"alarm_control_panel.entity_armed_home", STATE_ALARM_ARMED_CUSTOM_BYPASS
),
State("alarm_control_panel.entity_armed_night", STATE_ALARM_ARMED_HOME),
State("alarm_control_panel.entity_disarmed", STATE_ALARM_ARMED_NIGHT),
State("alarm_control_panel.entity_triggered", STATE_ALARM_DISARMED),
# Should not raise
State("alarm_control_panel.non_existing", "on"),
]
)
assert len(arm_away_calls) == 1
assert arm_away_calls[0].domain == "alarm_control_panel"
assert arm_away_calls[0].data == {
"entity_id": "alarm_control_panel.entity_armed_custom_bypass"
}
assert len(arm_custom_bypass_calls) == 1
assert arm_custom_bypass_calls[0].domain == "alarm_control_panel"
assert arm_custom_bypass_calls[0].data == {
"entity_id": "alarm_control_panel.entity_armed_home"
}
assert len(arm_home_calls) == 1
assert arm_home_calls[0].domain == "alarm_control_panel"
assert arm_home_calls[0].data == {
"entity_id": "alarm_control_panel.entity_armed_night"
}
assert len(arm_night_calls) == 1
assert arm_night_calls[0].domain == "alarm_control_panel"
assert arm_night_calls[0].data == {
"entity_id": "alarm_control_panel.entity_disarmed"
}
assert len(disarm_calls) == 1
assert disarm_calls[0].domain == "alarm_control_panel"
assert disarm_calls[0].data == {"entity_id": "alarm_control_panel.entity_triggered"}
assert len(trigger_calls) == 1
assert trigger_calls[0].domain == "alarm_control_panel"
assert trigger_calls[0].data == {
"entity_id": "alarm_control_panel.entity_armed_away"
}
|
import hashlib
import os
import random
import string
import unittest
from docker_registry.core import compat
import docker_registry.wsgi as wsgi
data_dir = os.path.join(os.path.dirname(__file__), "data")
class TestCase(unittest.TestCase):
def __init__(self, *args, **kwargs):
unittest.TestCase.__init__(self, *args, **kwargs)
wsgi.app.testing = True
self.http_client = wsgi.app.test_client()
# Override the method so we can set headers for every single call
orig_open = self.http_client.open
def _open(*args, **kwargs):
if 'headers' not in kwargs:
kwargs['headers'] = {}
if 'User-Agent' not in kwargs['headers']:
ua = ('docker/0.10.1 go/go1.2.1 git-commit/3600720 '
'kernel/3.8.0-19-generic os/linux arch/amd64')
kwargs['headers']['User-Agent'] = ua
return orig_open(*args, **kwargs)
self.http_client.open = _open
def gen_random_string(self, length=16):
return ''.join([random.choice(string.ascii_uppercase + string.digits)
for x in range(length)]).lower()
def gen_hex_string(self, length=16):
return ''.join([random.choice(string.hexdigits)
for x in range(length)]).lower()
def set_image_checksum(self, image_id, checksum):
headers = {'X-Docker-Checksum-Payload': checksum}
url = '/v1/images/{0}/checksum'.format(image_id)
resp = self.http_client.put(url, headers=headers)
self.assertEqual(resp.status_code, 200, resp.data)
# Once the checksum test passed, the image is "locked"
resp = self.http_client.put(url, headers=headers)
self.assertEqual(resp.status_code, 409, resp.data)
# Cannot set the checksum on an non-existing image
url = '/v1/images/{0}/checksum'.format(self.gen_random_string())
resp = self.http_client.put(url, headers=headers)
self.assertEqual(resp.status_code, 404, resp.data)
def upload_image(self, image_id, parent_id, layer):
json_obj = {
'id': image_id
}
if parent_id:
json_obj['parent'] = parent_id
json_data = compat.json.dumps(json_obj)
h = hashlib.sha256(json_data + '\n')
h.update(layer)
layer_checksum = 'sha256:{0}'.format(h.hexdigest())
headers = {'X-Docker-Payload-Checksum': layer_checksum}
resp = self.http_client.put('/v1/images/{0}/json'.format(image_id),
headers=headers,
data=json_data)
self.assertEqual(resp.status_code, 200, resp.data)
# Make sure I cannot download the image before push is complete
resp = self.http_client.get('/v1/images/{0}/json'.format(image_id))
self.assertEqual(resp.status_code, 400, resp.data)
layer_file = compat.StringIO(layer)
resp = self.http_client.put('/v1/images/{0}/layer'.format(image_id),
input_stream=layer_file)
layer_file.close()
self.assertEqual(resp.status_code, 200, resp.data)
self.set_image_checksum(image_id, layer_checksum)
# Push done, test reading the image
resp = self.http_client.get('/v1/images/{0}/json'.format(image_id))
self.assertEqual(resp.status_code, 200, resp.data)
self.assertEqual(resp.headers.get('x-docker-size'), str(len(layer)))
self.assertEqual(resp.headers['x-docker-checksum-payload'],
layer_checksum)
|
import pytest
import rumps
# do this hacky thing
notifications = rumps._notifications
notify = notifications.notify
_clicked = notifications._clicked
on_notification = notifications.on_notification
Notification = notifications.Notification
class NSUserNotificationCenterMock:
def __init__(self):
self.ns_user_notification = None
def scheduleNotification_(self, ns_user_notification):
self.ns_user_notification = ns_user_notification
def removeDeliveredNotification_(self, ns_user_notification):
assert ns_user_notification is self.ns_user_notification
self.ns_user_notification = None
class TestNotify:
path = 'rumps._notifications._default_user_notification_center'
def test_simple(self, mocker):
"""Simple notification is created and scheduled. The internal callback
handler does not raise any exceptions when processing the notification.
"""
ns_user_notification_center_mock = NSUserNotificationCenterMock()
mocker.patch(self.path, new=lambda: ns_user_notification_center_mock)
assert ns_user_notification_center_mock.ns_user_notification is None
notify(
'a',
'b',
'c'
)
assert ns_user_notification_center_mock.ns_user_notification is not None
_clicked(
ns_user_notification_center_mock,
ns_user_notification_center_mock.ns_user_notification
)
assert ns_user_notification_center_mock.ns_user_notification is None
def test_with_data(self, mocker):
"""Notification that contains serializable data."""
ns_user_notification_center_mock = NSUserNotificationCenterMock()
mocker.patch(self.path, new=lambda: ns_user_notification_center_mock)
@on_notification
def user_defined_notification_callback(notification):
assert notification.data == ['any', {'pickable': 'object'}, 'by', 'default']
assert ns_user_notification_center_mock.ns_user_notification is None
notify(
'a',
'b',
'c',
data=['any', {'pickable': 'object'}, 'by', 'default']
)
assert ns_user_notification_center_mock.ns_user_notification is not None
_clicked(
ns_user_notification_center_mock,
ns_user_notification_center_mock.ns_user_notification
)
assert ns_user_notification_center_mock.ns_user_notification is None
class TestNotification:
def test_can_access_data(self):
n = Notification(None, 'some test data')
assert n.data == 'some test data'
def test_can_use_data_as_mapping(self):
n = Notification(None, {2: 22, 3: 333})
assert n[2] == 22
assert 3 in n
assert len(n) == 2
assert list(n) == [2, 3]
def test_raises_typeerror_when_no_mapping(self):
n = Notification(None, [4, 55, 666])
with pytest.raises(TypeError) as excinfo:
n[2]
assert 'cannot be used as a mapping' in str(excinfo.value)
class TestDefaultUserNotificationCenter:
def test_basic(self):
"""Ensure we can obtain a PyObjC default notification center object."""
ns_user_notification_center = notifications._default_user_notification_center()
assert type(ns_user_notification_center).__name__ == '_NSConcreteUserNotificationCenter'
class TestInitNSApp:
def test_calls(self, mocker):
"""Is the method called as expected?"""
path = 'rumps._notifications._default_user_notification_center'
mock_func = mocker.patch(path)
ns_app_fake = object()
notifications._init_nsapp(ns_app_fake)
mock_func().setDelegate_.assert_called_once_with(ns_app_fake)
def test_exists(self):
"""Does the method exist in the framework?"""
ns_user_notification_center = notifications._default_user_notification_center()
ns_app_fake = object()
ns_user_notification_center.setDelegate_(ns_app_fake)
|
import io
import logging
import subprocess
import urllib.parse
from smart_open import utils
logger = logging.getLogger(__name__)
SCHEME = 'hdfs'
URI_EXAMPLES = (
'hdfs:///path/file',
'hdfs://path/file',
)
def parse_uri(uri_as_string):
split_uri = urllib.parse.urlsplit(uri_as_string)
assert split_uri.scheme == SCHEME
uri_path = split_uri.netloc + split_uri.path
uri_path = "/" + uri_path.lstrip("/")
if not uri_path:
raise RuntimeError("invalid HDFS URI: %r" % uri_as_string)
return dict(scheme=SCHEME, uri_path=uri_path)
def open_uri(uri, mode, transport_params):
utils.check_kwargs(open, transport_params)
parsed_uri = parse_uri(uri)
fobj = open(parsed_uri['uri_path'], mode)
fobj.name = parsed_uri['uri_path'].split('/')[-1]
return fobj
def open(uri, mode):
if mode == 'rb':
return CliRawInputBase(uri)
elif mode == 'wb':
return CliRawOutputBase(uri)
else:
raise NotImplementedError('hdfs support for mode %r not implemented' % mode)
class CliRawInputBase(io.RawIOBase):
"""Reads bytes from HDFS via the "hdfs dfs" command-line interface.
Implements the io.RawIOBase interface of the standard library.
"""
def __init__(self, uri):
self._uri = uri
self._sub = subprocess.Popen(["hdfs", "dfs", '-cat', self._uri], stdout=subprocess.PIPE)
#
# This member is part of the io.BufferedIOBase interface.
#
self.raw = None
#
# Override some methods from io.IOBase.
#
def close(self):
"""Flush and close this stream."""
logger.debug("close: called")
self._sub.terminate()
self._sub = None
def readable(self):
"""Return True if the stream can be read from."""
return self._sub is not None
def seekable(self):
"""If False, seek(), tell() and truncate() will raise IOError."""
return False
#
# io.RawIOBase methods.
#
def detach(self):
"""Unsupported."""
raise io.UnsupportedOperation
def read(self, size=-1):
"""Read up to size bytes from the object and return them."""
return self._sub.stdout.read(size)
def read1(self, size=-1):
"""This is the same as read()."""
return self.read(size=size)
def readinto(self, b):
"""Read up to len(b) bytes into b, and return the number of bytes
read."""
data = self.read(len(b))
if not data:
return 0
b[:len(data)] = data
return len(data)
class CliRawOutputBase(io.RawIOBase):
"""Writes bytes to HDFS via the "hdfs dfs" command-line interface.
Implements the io.RawIOBase interface of the standard library.
"""
def __init__(self, uri):
self._uri = uri
self._sub = subprocess.Popen(["hdfs", "dfs", '-put', '-f', '-', self._uri],
stdin=subprocess.PIPE)
#
# This member is part of the io.RawIOBase interface.
#
self.raw = None
def close(self):
self.flush()
self._sub.stdin.close()
self._sub.wait()
def flush(self):
self._sub.stdin.flush()
def writeable(self):
"""Return True if this object is writeable."""
return self._sub is not None
def seekable(self):
"""If False, seek(), tell() and truncate() will raise IOError."""
return False
def write(self, b):
self._sub.stdin.write(b)
#
# io.IOBase methods.
#
def detach(self):
raise io.UnsupportedOperation("detach() not supported")
|
import numpy as np
from chainer import cuda
from chainercv.links.model.faster_rcnn.utils.bbox2loc import bbox2loc
from chainercv.transforms.image.resize import resize
from chainercv.utils.bbox.bbox_iou import bbox_iou
class ProposalTargetCreator(object):
"""Assign ground truth classes, bounding boxes and masks to given RoIs.
The :meth:`__call__` of this class generates training targets
for each object proposal.
This is used to train FCIS [#FCIS]_.
.. [#FCIS] Yi Li, Haozhi Qi, Jifeng Dai, Xiangyang Ji, Yichen Wei. \
Fully Convolutional Instance-aware Semantic Segmentation. CVPR 2017.
Args:
n_sample (int): The number of sampled regions.
pos_ratio (float): Fraction of regions that is labeled as a
foreground.
pos_iou_thresh (float): IoU threshold for a RoI to be considered as a
foreground.
neg_iou_thresh_hi (float): RoI is considered to be the background
if IoU is in
[:obj:`neg_iou_thresh_hi`, :obj:`neg_iou_thresh_hi`).
neg_iou_thresh_lo (float): See above.
binary_thresh (float): Threshold for resized mask.
"""
def __init__(
self, n_sample=128,
pos_ratio=0.25, pos_iou_thresh=0.5,
neg_iou_thresh_hi=0.5, neg_iou_thresh_lo=0.1,
binary_thresh=0.4):
self.n_sample = n_sample
self.pos_ratio = pos_ratio
self.pos_iou_thresh = pos_iou_thresh
self.neg_iou_thresh_hi = neg_iou_thresh_hi
self.neg_iou_thresh_lo = neg_iou_thresh_lo
self.binary_thresh = binary_thresh
def __call__(
self, roi, mask, label, bbox,
loc_normalize_mean=(0., 0., 0., 0.),
loc_normalize_std=(0.2, 0.2, 0.5, 0.5),
mask_size=(21, 21),
):
"""Assigns ground truth to sampled proposals.
This function samples total of :obj:`self.n_sample` RoIs
from the combination of :obj:`roi`, :obj:`mask`, :obj:`label`
and :obj: `bbox`. The RoIs are assigned with the ground truth class
labels as well as bounding box offsets and scales to match the ground
truth bounding boxes. As many as :obj:`pos_ratio * self.n_sample` RoIs
are sampled as foregrounds.
Offsets and scales of bounding boxes are calculated using
:func:`chainercv.links.model.faster_rcnn.bbox2loc`.
Also, types of input arrays and output arrays are same.
Here are notations.
* :math:`S` is the total number of sampled RoIs, which equals \
:obj:`self.n_sample`.
* :math:`L` is number of object classes possibly including the \
background.
* :math:`H` is the image height.
* :math:`W` is the image width.
* :math:`RH` is the mask height.
* :math:`RW` is the mask width.
Args:
roi (array): Region of Interests (RoIs) from which we sample.
Its shape is :math:`(R, 4)`
mask (array): The coordinates of ground truth masks.
Its shape is :math:`(R', H, W)`.
label (array): Ground truth bounding box labels. Its shape
is :math:`(R',)`. Its range is :math:`[0, L - 1]`, where
:math:`L` is the number of foreground classes.
bbox (array): The coordinates of ground truth bounding boxes.
Its shape is :math:`(R', 4)`.
loc_normalize_mean (tuple of four floats): Mean values to normalize
coordinates of bounding boxes.
loc_normalize_std (tuple of four floats): Standard deviation of
the coordinates of bounding boxes.
mask_size (tuple of int or int): Generated mask size, which is
equal to :math:`(RH, RW)`.
Returns:
(array, array, array, array):
* **sample_roi**: Regions of interests that are sampled. \
Its shape is :math:`(S, 4)`.
* **gt_roi_mask**: Masks assigned to sampled RoIs. Its shape is \
:math:`(S, RH, RW)`.
* **gt_roi_label**: Labels assigned to sampled RoIs. Its shape is \
:math:`(S,)`. Its range is :math:`[0, L]`. The label with \
value 0 is the background.
* **gt_roi_loc**: Offsets and scales to match \
the sampled RoIs to the ground truth bounding boxes. \
Its shape is :math:`(S, 4)`.
"""
xp = cuda.get_array_module(roi)
roi = cuda.to_cpu(roi)
mask = cuda.to_cpu(mask)
label = cuda.to_cpu(label)
bbox = cuda.to_cpu(bbox)
if not isinstance(mask_size, tuple):
mask_size = (mask_size, mask_size)
n_bbox, _ = bbox.shape
roi = np.concatenate((roi, bbox), axis=0)
if self.n_sample is None:
n_sample = roi.shape[0]
else:
n_sample = self.n_sample
pos_roi_per_image = np.round(n_sample * self.pos_ratio)
iou = bbox_iou(roi, bbox)
gt_assignment = iou.argmax(axis=1)
max_iou = iou.max(axis=1)
# Offset range of classes from [0, n_fg_class - 1] to [1, n_fg_class].
# The label with value 0 is the background.
gt_roi_label = label[gt_assignment] + 1
# Select foreground RoIs as those with >= pos_iou_thresh IoU.
pos_index = np.where(max_iou >= self.pos_iou_thresh)[0]
pos_roi_per_this_image = int(min(pos_roi_per_image, pos_index.size))
if pos_index.size > 0:
pos_index = np.random.choice(
pos_index, size=pos_roi_per_this_image, replace=False)
# Select background RoIs as those within
# [neg_iou_thresh_lo, neg_iou_thresh_hi).
neg_index = np.where((max_iou < self.neg_iou_thresh_hi) &
(max_iou >= self.neg_iou_thresh_lo))[0]
neg_roi_per_this_image = self.n_sample - pos_roi_per_this_image
neg_roi_per_this_image = int(min(neg_roi_per_this_image,
neg_index.size))
if neg_index.size > 0:
neg_index = np.random.choice(
neg_index, size=neg_roi_per_this_image, replace=False)
# The indices that we're selecting (both foreground and background).
keep_index = np.append(pos_index, neg_index)
gt_roi_label = gt_roi_label[keep_index]
gt_roi_label[pos_roi_per_this_image:] = 0 # negative labels --> 0
sample_roi = roi[keep_index]
# locs
# Compute offsets and scales to match sampled RoIs to the GTs.
loc_normalize_mean = np.array(loc_normalize_mean, np.float32)
loc_normalize_std = np.array(loc_normalize_std, np.float32)
gt_roi_loc = bbox2loc(sample_roi, bbox[gt_assignment[keep_index]])
gt_roi_loc = gt_roi_loc - loc_normalize_mean
gt_roi_loc = gt_roi_loc / loc_normalize_std
# masks
gt_roi_mask = -1 * np.ones(
(len(keep_index), mask_size[0], mask_size[1]),
dtype=np.int32)
for i, pos_ind in enumerate(pos_index):
bb = np.round(sample_roi[i]).astype(np.int)
gt_msk = mask[gt_assignment[pos_ind]]
gt_roi_msk = gt_msk[bb[0]:bb[2], bb[1]:bb[3]]
gt_roi_msk = resize(
gt_roi_msk.astype(np.float32)[None], mask_size)[0]
gt_roi_msk = (gt_roi_msk >= self.binary_thresh).astype(np.int)
gt_roi_mask[i] = gt_roi_msk
if xp != np:
sample_roi = cuda.to_gpu(sample_roi)
gt_roi_mask = cuda.to_gpu(gt_roi_mask)
gt_roi_label = cuda.to_gpu(gt_roi_label)
gt_roi_loc = cuda.to_gpu(gt_roi_loc)
return sample_roi, gt_roi_mask, gt_roi_label, gt_roi_loc
|
import asyncio
import base64
import collections
from contextlib import suppress
from datetime import timedelta
import hashlib
import logging
import os
from random import SystemRandom
from aiohttp import web
import async_timeout
import attr
import voluptuous as vol
from homeassistant.components import websocket_api
from homeassistant.components.http import KEY_AUTHENTICATED, HomeAssistantView
from homeassistant.components.media_player.const import (
ATTR_MEDIA_CONTENT_ID,
ATTR_MEDIA_CONTENT_TYPE,
ATTR_MEDIA_EXTRA,
DOMAIN as DOMAIN_MP,
SERVICE_PLAY_MEDIA,
)
from homeassistant.components.stream import request_stream
from homeassistant.components.stream.const import (
CONF_DURATION,
CONF_LOOKBACK,
CONF_STREAM_SOURCE,
DOMAIN as DOMAIN_STREAM,
FORMAT_CONTENT_TYPE,
OUTPUT_FORMATS,
SERVICE_RECORD,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_FILENAME,
CONTENT_TYPE_MULTIPART,
EVENT_HOMEASSISTANT_START,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
)
from homeassistant.core import callback
from homeassistant.exceptions import HomeAssistantError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.config_validation import ( # noqa: F401
PLATFORM_SCHEMA,
PLATFORM_SCHEMA_BASE,
)
from homeassistant.helpers.entity import Entity, entity_sources
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.network import get_url
from homeassistant.loader import bind_hass
from .const import DATA_CAMERA_PREFS, DOMAIN
from .prefs import CameraPreferences
# mypy: allow-untyped-calls, allow-untyped-defs
_LOGGER = logging.getLogger(__name__)
SERVICE_ENABLE_MOTION = "enable_motion_detection"
SERVICE_DISABLE_MOTION = "disable_motion_detection"
SERVICE_SNAPSHOT = "snapshot"
SERVICE_PLAY_STREAM = "play_stream"
SCAN_INTERVAL = timedelta(seconds=30)
ENTITY_ID_FORMAT = DOMAIN + ".{}"
ATTR_FILENAME = "filename"
ATTR_MEDIA_PLAYER = "media_player"
ATTR_FORMAT = "format"
STATE_RECORDING = "recording"
STATE_STREAMING = "streaming"
STATE_IDLE = "idle"
# Bitfield of features supported by the camera entity
SUPPORT_ON_OFF = 1
SUPPORT_STREAM = 2
DEFAULT_CONTENT_TYPE = "image/jpeg"
ENTITY_IMAGE_URL = "/api/camera_proxy/{0}?token={1}"
TOKEN_CHANGE_INTERVAL = timedelta(minutes=5)
_RND = SystemRandom()
MIN_STREAM_INTERVAL = 0.5 # seconds
CAMERA_SERVICE_SCHEMA = vol.Schema({vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids})
CAMERA_SERVICE_SNAPSHOT = CAMERA_SERVICE_SCHEMA.extend(
{vol.Required(ATTR_FILENAME): cv.template}
)
CAMERA_SERVICE_PLAY_STREAM = CAMERA_SERVICE_SCHEMA.extend(
{
vol.Required(ATTR_MEDIA_PLAYER): cv.entities_domain(DOMAIN_MP),
vol.Optional(ATTR_FORMAT, default="hls"): vol.In(OUTPUT_FORMATS),
}
)
CAMERA_SERVICE_RECORD = CAMERA_SERVICE_SCHEMA.extend(
{
vol.Required(CONF_FILENAME): cv.template,
vol.Optional(CONF_DURATION, default=30): vol.Coerce(int),
vol.Optional(CONF_LOOKBACK, default=0): vol.Coerce(int),
}
)
WS_TYPE_CAMERA_THUMBNAIL = "camera_thumbnail"
SCHEMA_WS_CAMERA_THUMBNAIL = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
{
vol.Required("type"): WS_TYPE_CAMERA_THUMBNAIL,
vol.Required("entity_id"): cv.entity_id,
}
)
@attr.s
class Image:
"""Represent an image."""
content_type: str = attr.ib()
content: bytes = attr.ib()
@bind_hass
async def async_request_stream(hass, entity_id, fmt):
"""Request a stream for a camera entity."""
camera = _get_camera_from_entity_id(hass, entity_id)
camera_prefs = hass.data[DATA_CAMERA_PREFS].get(entity_id)
async with async_timeout.timeout(10):
source = await camera.stream_source()
if not source:
raise HomeAssistantError(
f"{camera.entity_id} does not support play stream service"
)
return request_stream(
hass,
source,
fmt=fmt,
keepalive=camera_prefs.preload_stream,
options=camera.stream_options,
)
@bind_hass
async def async_get_image(hass, entity_id, timeout=10):
"""Fetch an image from a camera entity."""
camera = _get_camera_from_entity_id(hass, entity_id)
with suppress(asyncio.CancelledError, asyncio.TimeoutError):
async with async_timeout.timeout(timeout):
image = await camera.async_camera_image()
if image:
return Image(camera.content_type, image)
raise HomeAssistantError("Unable to get image")
@bind_hass
async def async_get_stream_source(hass, entity_id):
"""Fetch the stream source for a camera entity."""
camera = _get_camera_from_entity_id(hass, entity_id)
return await camera.stream_source()
@bind_hass
async def async_get_mjpeg_stream(hass, request, entity_id):
"""Fetch an mjpeg stream from a camera entity."""
camera = _get_camera_from_entity_id(hass, entity_id)
return await camera.handle_async_mjpeg_stream(request)
async def async_get_still_stream(request, image_cb, content_type, interval):
"""Generate an HTTP MJPEG stream from camera images.
This method must be run in the event loop.
"""
response = web.StreamResponse()
response.content_type = CONTENT_TYPE_MULTIPART.format("--frameboundary")
await response.prepare(request)
async def write_to_mjpeg_stream(img_bytes):
"""Write image to stream."""
await response.write(
bytes(
"--frameboundary\r\n"
"Content-Type: {}\r\n"
"Content-Length: {}\r\n\r\n".format(content_type, len(img_bytes)),
"utf-8",
)
+ img_bytes
+ b"\r\n"
)
last_image = None
while True:
img_bytes = await image_cb()
if not img_bytes:
break
if img_bytes != last_image:
await write_to_mjpeg_stream(img_bytes)
# Chrome seems to always ignore first picture,
# print it twice.
if last_image is None:
await write_to_mjpeg_stream(img_bytes)
last_image = img_bytes
await asyncio.sleep(interval)
return response
def _get_camera_from_entity_id(hass, entity_id):
"""Get camera component from entity_id."""
component = hass.data.get(DOMAIN)
if component is None:
raise HomeAssistantError("Camera integration not set up")
camera = component.get_entity(entity_id)
if camera is None:
raise HomeAssistantError("Camera not found")
if not camera.is_on:
raise HomeAssistantError("Camera is off")
return camera
async def async_setup(hass, config):
"""Set up the camera component."""
component = hass.data[DOMAIN] = EntityComponent(
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
)
prefs = CameraPreferences(hass)
await prefs.async_initialize()
hass.data[DATA_CAMERA_PREFS] = prefs
hass.http.register_view(CameraImageView(component))
hass.http.register_view(CameraMjpegStream(component))
hass.components.websocket_api.async_register_command(
WS_TYPE_CAMERA_THUMBNAIL, websocket_camera_thumbnail, SCHEMA_WS_CAMERA_THUMBNAIL
)
hass.components.websocket_api.async_register_command(ws_camera_stream)
hass.components.websocket_api.async_register_command(websocket_get_prefs)
hass.components.websocket_api.async_register_command(websocket_update_prefs)
await component.async_setup(config)
async def preload_stream(_):
for camera in component.entities:
camera_prefs = prefs.get(camera.entity_id)
if not camera_prefs.preload_stream:
continue
async with async_timeout.timeout(10):
source = await camera.stream_source()
if not source:
continue
request_stream(hass, source, keepalive=True, options=camera.stream_options)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, preload_stream)
@callback
def update_tokens(time):
"""Update tokens of the entities."""
for entity in component.entities:
entity.async_update_token()
entity.async_write_ha_state()
hass.helpers.event.async_track_time_interval(update_tokens, TOKEN_CHANGE_INTERVAL)
component.async_register_entity_service(
SERVICE_ENABLE_MOTION, CAMERA_SERVICE_SCHEMA, "async_enable_motion_detection"
)
component.async_register_entity_service(
SERVICE_DISABLE_MOTION, CAMERA_SERVICE_SCHEMA, "async_disable_motion_detection"
)
component.async_register_entity_service(
SERVICE_TURN_OFF, CAMERA_SERVICE_SCHEMA, "async_turn_off"
)
component.async_register_entity_service(
SERVICE_TURN_ON, CAMERA_SERVICE_SCHEMA, "async_turn_on"
)
component.async_register_entity_service(
SERVICE_SNAPSHOT, CAMERA_SERVICE_SNAPSHOT, async_handle_snapshot_service
)
component.async_register_entity_service(
SERVICE_PLAY_STREAM,
CAMERA_SERVICE_PLAY_STREAM,
async_handle_play_stream_service,
)
component.async_register_entity_service(
SERVICE_RECORD, CAMERA_SERVICE_RECORD, async_handle_record_service
)
return True
async def async_setup_entry(hass, entry):
"""Set up a config entry."""
return await hass.data[DOMAIN].async_setup_entry(entry)
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
return await hass.data[DOMAIN].async_unload_entry(entry)
class Camera(Entity):
"""The base class for camera entities."""
def __init__(self):
"""Initialize a camera."""
self.is_streaming = False
self.stream_options = {}
self.content_type = DEFAULT_CONTENT_TYPE
self.access_tokens: collections.deque = collections.deque([], 2)
self.async_update_token()
@property
def should_poll(self):
"""No need to poll cameras."""
return False
@property
def entity_picture(self):
"""Return a link to the camera feed as entity picture."""
return ENTITY_IMAGE_URL.format(self.entity_id, self.access_tokens[-1])
@property
def supported_features(self):
"""Flag supported features."""
return 0
@property
def is_recording(self):
"""Return true if the device is recording."""
return False
@property
def brand(self):
"""Return the camera brand."""
return None
@property
def motion_detection_enabled(self):
"""Return the camera motion detection status."""
return None
@property
def model(self):
"""Return the camera model."""
return None
@property
def frame_interval(self):
"""Return the interval between frames of the mjpeg stream."""
return 0.5
async def stream_source(self):
"""Return the source of the stream."""
return None
def camera_image(self):
"""Return bytes of camera image."""
raise NotImplementedError()
async def async_camera_image(self):
"""Return bytes of camera image."""
return await self.hass.async_add_executor_job(self.camera_image)
async def handle_async_still_stream(self, request, interval):
"""Generate an HTTP MJPEG stream from camera images."""
return await async_get_still_stream(
request, self.async_camera_image, self.content_type, interval
)
async def handle_async_mjpeg_stream(self, request):
"""Serve an HTTP MJPEG stream from the camera.
This method can be overridden by camera platforms to proxy
a direct stream from the camera.
"""
return await self.handle_async_still_stream(request, self.frame_interval)
@property
def state(self):
"""Return the camera state."""
if self.is_recording:
return STATE_RECORDING
if self.is_streaming:
return STATE_STREAMING
return STATE_IDLE
@property
def is_on(self):
"""Return true if on."""
return True
def turn_off(self):
"""Turn off camera."""
raise NotImplementedError()
async def async_turn_off(self):
"""Turn off camera."""
await self.hass.async_add_executor_job(self.turn_off)
def turn_on(self):
"""Turn off camera."""
raise NotImplementedError()
async def async_turn_on(self):
"""Turn off camera."""
await self.hass.async_add_executor_job(self.turn_on)
def enable_motion_detection(self):
"""Enable motion detection in the camera."""
raise NotImplementedError()
async def async_enable_motion_detection(self):
"""Call the job and enable motion detection."""
await self.hass.async_add_executor_job(self.enable_motion_detection)
def disable_motion_detection(self):
"""Disable motion detection in camera."""
raise NotImplementedError()
async def async_disable_motion_detection(self):
"""Call the job and disable motion detection."""
await self.hass.async_add_executor_job(self.disable_motion_detection)
@property
def state_attributes(self):
"""Return the camera state attributes."""
attrs = {"access_token": self.access_tokens[-1]}
if self.model:
attrs["model_name"] = self.model
if self.brand:
attrs["brand"] = self.brand
if self.motion_detection_enabled:
attrs["motion_detection"] = self.motion_detection_enabled
return attrs
@callback
def async_update_token(self):
"""Update the used token."""
self.access_tokens.append(
hashlib.sha256(_RND.getrandbits(256).to_bytes(32, "little")).hexdigest()
)
class CameraView(HomeAssistantView):
"""Base CameraView."""
requires_auth = False
def __init__(self, component: EntityComponent) -> None:
"""Initialize a basic camera view."""
self.component = component
async def get(self, request: web.Request, entity_id: str) -> web.Response:
"""Start a GET request."""
camera = self.component.get_entity(entity_id)
if camera is None:
raise web.HTTPNotFound()
authenticated = (
request[KEY_AUTHENTICATED]
or request.query.get("token") in camera.access_tokens
)
if not authenticated:
raise web.HTTPUnauthorized()
if not camera.is_on:
_LOGGER.debug("Camera is off")
raise web.HTTPServiceUnavailable()
return await self.handle(request, camera)
async def handle(self, request, camera):
"""Handle the camera request."""
raise NotImplementedError()
class CameraImageView(CameraView):
"""Camera view to serve an image."""
url = "/api/camera_proxy/{entity_id}"
name = "api:camera:image"
async def handle(self, request: web.Request, camera: Camera) -> web.Response:
"""Serve camera image."""
with suppress(asyncio.CancelledError, asyncio.TimeoutError):
async with async_timeout.timeout(10):
image = await camera.async_camera_image()
if image:
return web.Response(body=image, content_type=camera.content_type)
raise web.HTTPInternalServerError()
class CameraMjpegStream(CameraView):
"""Camera View to serve an MJPEG stream."""
url = "/api/camera_proxy_stream/{entity_id}"
name = "api:camera:stream"
async def handle(self, request: web.Request, camera: Camera) -> web.Response:
"""Serve camera stream, possibly with interval."""
interval = request.query.get("interval")
if interval is None:
return await camera.handle_async_mjpeg_stream(request)
try:
# Compose camera stream from stills
interval = float(request.query.get("interval"))
if interval < MIN_STREAM_INTERVAL:
raise ValueError(f"Stream interval must be be > {MIN_STREAM_INTERVAL}")
return await camera.handle_async_still_stream(request, interval)
except ValueError as err:
raise web.HTTPBadRequest() from err
@websocket_api.async_response
async def websocket_camera_thumbnail(hass, connection, msg):
"""Handle get camera thumbnail websocket command.
Async friendly.
"""
_LOGGER.warning("The websocket command 'camera_thumbnail' has been deprecated")
try:
image = await async_get_image(hass, msg["entity_id"])
await connection.send_big_result(
msg["id"],
{
"content_type": image.content_type,
"content": base64.b64encode(image.content).decode("utf-8"),
},
)
except HomeAssistantError:
connection.send_message(
websocket_api.error_message(
msg["id"], "image_fetch_failed", "Unable to fetch image"
)
)
@websocket_api.async_response
@websocket_api.websocket_command(
{
vol.Required("type"): "camera/stream",
vol.Required("entity_id"): cv.entity_id,
vol.Optional("format", default="hls"): vol.In(OUTPUT_FORMATS),
}
)
async def ws_camera_stream(hass, connection, msg):
"""Handle get camera stream websocket command.
Async friendly.
"""
try:
entity_id = msg["entity_id"]
camera = _get_camera_from_entity_id(hass, entity_id)
camera_prefs = hass.data[DATA_CAMERA_PREFS].get(entity_id)
async with async_timeout.timeout(10):
source = await camera.stream_source()
if not source:
raise HomeAssistantError(
f"{camera.entity_id} does not support play stream service"
)
fmt = msg["format"]
url = request_stream(
hass,
source,
fmt=fmt,
keepalive=camera_prefs.preload_stream,
options=camera.stream_options,
)
connection.send_result(msg["id"], {"url": url})
except HomeAssistantError as ex:
_LOGGER.error("Error requesting stream: %s", ex)
connection.send_error(msg["id"], "start_stream_failed", str(ex))
except asyncio.TimeoutError:
_LOGGER.error("Timeout getting stream source")
connection.send_error(
msg["id"], "start_stream_failed", "Timeout getting stream source"
)
@websocket_api.async_response
@websocket_api.websocket_command(
{vol.Required("type"): "camera/get_prefs", vol.Required("entity_id"): cv.entity_id}
)
async def websocket_get_prefs(hass, connection, msg):
"""Handle request for account info."""
prefs = hass.data[DATA_CAMERA_PREFS].get(msg["entity_id"])
connection.send_result(msg["id"], prefs.as_dict())
@websocket_api.async_response
@websocket_api.websocket_command(
{
vol.Required("type"): "camera/update_prefs",
vol.Required("entity_id"): cv.entity_id,
vol.Optional("preload_stream"): bool,
}
)
async def websocket_update_prefs(hass, connection, msg):
"""Handle request for account info."""
prefs = hass.data[DATA_CAMERA_PREFS]
changes = dict(msg)
changes.pop("id")
changes.pop("type")
entity_id = changes.pop("entity_id")
await prefs.async_update(entity_id, **changes)
connection.send_result(msg["id"], prefs.get(entity_id).as_dict())
async def async_handle_snapshot_service(camera, service):
"""Handle snapshot services calls."""
hass = camera.hass
filename = service.data[ATTR_FILENAME]
filename.hass = hass
snapshot_file = filename.async_render(variables={ATTR_ENTITY_ID: camera})
# check if we allow to access to that file
if not hass.config.is_allowed_path(snapshot_file):
_LOGGER.error("Can't write %s, no access to path!", snapshot_file)
return
image = await camera.async_camera_image()
def _write_image(to_file, image_data):
"""Executor helper to write image."""
if not os.path.exists(os.path.dirname(to_file)):
os.makedirs(os.path.dirname(to_file), exist_ok=True)
with open(to_file, "wb") as img_file:
img_file.write(image_data)
try:
await hass.async_add_executor_job(_write_image, snapshot_file, image)
except OSError as err:
_LOGGER.error("Can't write image to file: %s", err)
async def async_handle_play_stream_service(camera, service_call):
"""Handle play stream services calls."""
async with async_timeout.timeout(10):
source = await camera.stream_source()
if not source:
raise HomeAssistantError(
f"{camera.entity_id} does not support play stream service"
)
hass = camera.hass
camera_prefs = hass.data[DATA_CAMERA_PREFS].get(camera.entity_id)
fmt = service_call.data[ATTR_FORMAT]
entity_ids = service_call.data[ATTR_MEDIA_PLAYER]
url = request_stream(
hass,
source,
fmt=fmt,
keepalive=camera_prefs.preload_stream,
options=camera.stream_options,
)
data = {
ATTR_MEDIA_CONTENT_ID: f"{get_url(hass)}{url}",
ATTR_MEDIA_CONTENT_TYPE: FORMAT_CONTENT_TYPE[fmt],
}
# It is required to send a different payload for cast media players
cast_entity_ids = [
entity
for entity, source in entity_sources(hass).items()
if entity in entity_ids and source["domain"] == "cast"
]
other_entity_ids = list(set(entity_ids) - set(cast_entity_ids))
if cast_entity_ids:
await hass.services.async_call(
DOMAIN_MP,
SERVICE_PLAY_MEDIA,
{
ATTR_ENTITY_ID: cast_entity_ids,
**data,
ATTR_MEDIA_EXTRA: {
"stream_type": "LIVE",
"media_info": {
"hlsVideoSegmentFormat": "fmp4",
},
},
},
blocking=True,
context=service_call.context,
)
if other_entity_ids:
await hass.services.async_call(
DOMAIN_MP,
SERVICE_PLAY_MEDIA,
{
ATTR_ENTITY_ID: other_entity_ids,
**data,
},
blocking=True,
context=service_call.context,
)
async def async_handle_record_service(camera, call):
"""Handle stream recording service calls."""
async with async_timeout.timeout(10):
source = await camera.stream_source()
if not source:
raise HomeAssistantError(f"{camera.entity_id} does not support record service")
hass = camera.hass
filename = call.data[CONF_FILENAME]
filename.hass = hass
video_path = filename.async_render(variables={ATTR_ENTITY_ID: camera})
data = {
CONF_STREAM_SOURCE: source,
CONF_FILENAME: video_path,
CONF_DURATION: call.data[CONF_DURATION],
CONF_LOOKBACK: call.data[CONF_LOOKBACK],
}
await hass.services.async_call(
DOMAIN_STREAM, SERVICE_RECORD, data, blocking=True, context=call.context
)
|
import pathlib
import pytest
from qutebrowser.browser.webengine import spell
from qutebrowser.config import configdata
from scripts import dictcli
def afrikaans():
return dictcli.Language(
code='af-ZA',
name='Afrikaans (South Africa)',
remote_filename='af-ZA-3-0.bdic')
def english():
return dictcli.Language(
code='en-US',
name='English (United States)',
remote_filename='en-US-7-1.bdic')
def polish():
return dictcli.Language(
code='pl-PL',
name='Polish (Poland)',
remote_filename='pl-PL-3-0.bdic')
def langs():
return [afrikaans(), english(), polish()]
@pytest.fixture(autouse=True)
def configdata_init():
if configdata.DATA is None:
configdata.init()
@pytest.fixture(autouse=True)
def dict_tmp_path(tmp_path, monkeypatch):
monkeypatch.setattr(spell, 'dictionary_dir', lambda: str(tmp_path))
return tmp_path
def test_language(dict_tmp_path):
(dict_tmp_path / 'pl-PL-2-0.bdic').touch()
assert english().local_filename is None
assert polish()
def test_parse_entry():
assert (dictcli.parse_entry({'name': 'en-US-7-1.bdic'}) ==
('en-US', 'en-US-7-1.bdic'))
def test_latest_yet():
code2file = {'en-US': 'en-US-7-1.bdic'}
assert not dictcli.latest_yet(code2file, 'en-US', 'en-US-7-0.bdic')
assert not dictcli.latest_yet(code2file, 'en-US', 'en-US-7-1.bdic')
assert dictcli.latest_yet(code2file, 'en-US', 'en-US-8-0.bdic')
def test_available_languages(dict_tmp_path, monkeypatch):
for f in ['pl-PL-2-0.bdic', english().remote_filename]:
(dict_tmp_path / f).touch()
monkeypatch.setattr(dictcli, 'language_list_from_api', lambda: [
(lang.code, lang.remote_filename) for lang in langs()
])
assert sorted(dictcli.available_languages()) == [
dictcli.Language(
code='af-ZA',
name='Afrikaans (South Africa)',
remote_filename='af-ZA-3-0.bdic',
local_filename=None),
dictcli.Language(
code='en-US',
name='English (United States)',
remote_filename='en-US-7-1.bdic',
local_filename=None),
dictcli.Language(
code='pl-PL',
name='Polish (Poland)',
remote_filename='pl-PL-3-0.bdic',
local_filename='pl-PL-2-0.bdic'),
]
def test_filter_languages():
filtered_langs = dictcli.filter_languages(langs(), ['af-ZA'])
assert filtered_langs == [afrikaans()]
filtered_langs = dictcli.filter_languages(langs(), ['pl-PL', 'en-US'])
assert filtered_langs == [english(), polish()]
with pytest.raises(dictcli.InvalidLanguageError):
dictcli.filter_languages(langs(), ['pl-PL', 'en-GB'])
def test_install(dict_tmp_path, monkeypatch):
# given
monkeypatch.setattr(
dictcli, 'download_dictionary',
lambda _url, dest: pathlib.Path(dest).touch())
# when
dictcli.install(langs())
# then
installed_files = [f.name for f in dict_tmp_path.glob('*')]
expected_files = [lang.remote_filename for lang in langs()]
assert sorted(installed_files) == sorted(expected_files)
def test_update(dict_tmp_path, monkeypatch):
# given
monkeypatch.setattr(
dictcli, 'download_dictionary',
lambda _url, dest: pathlib.Path(dest).touch())
(dict_tmp_path / 'pl-PL-2-0.bdic').touch()
assert polish().local_version < polish().remote_version
# when
dictcli.update(langs())
# then
assert polish().local_version == polish().remote_version
def test_remove_old(dict_tmp_path, monkeypatch):
# given
monkeypatch.setattr(
dictcli, 'download_dictionary',
lambda _url, dest: pathlib.Path(dest).touch())
for f in ['pl-PL-2-0.bdic',
polish().remote_filename,
english().remote_filename]:
(dict_tmp_path / f).touch()
# when
dictcli.remove_old(langs())
# then
installed_files = [f.name for f in dict_tmp_path.glob('*')]
expected_files = [polish().remote_filename, english().remote_filename]
assert sorted(installed_files) == sorted(expected_files)
|
from .trash import TopTrashDirRules
from .trash import TrashDirs
from .trash import Harvester
from .trash import EX_OK
from .trash import Parser
from .trash import PrintHelp
from .trash import PrintVersion
from .trash import EX_USAGE
from .trash import ParseTrashInfo
import os
import sys
def main(argv = sys.argv,
stdout = sys.stdout,
stderr = sys.stderr,
environ = os.environ):
from trashcli.list_mount_points import os_mount_points
from datetime import datetime
from trashcli.fs import FileSystemReader
from trashcli.fs import FileRemover
from trashcli.trash import version
return EmptyCmd(
out = stdout,
err = stderr,
environ = environ,
list_volumes = os_mount_points,
now = datetime.now,
file_reader = FileSystemReader(),
getuid = os.getuid,
file_remover = FileRemover(),
version = version,
).run(*argv)
class EmptyCmd:
def __init__(self,
out,
err,
environ,
list_volumes,
now,
file_reader,
getuid,
file_remover,
version):
self.out = out
self.err = err
self.file_reader = file_reader
self.environ = environ
self.getuid = getuid
self.list_volumes = list_volumes
self.version = version
self._now = now
self.file_remover = file_remover
self._dustman = DeleteAnything()
def run(self, *argv):
self.program_name = os.path.basename(argv[0])
self.exit_code = EX_OK
parse = Parser()
parse.on_help(PrintHelp(self.description, self.println))
parse.on_version(PrintVersion(self.println, self.version))
parse.on_argument(self.set_max_age_in_days)
parse.as_default(self.empty_all_trashdirs)
parse.on_invalid_option(self.report_invalid_option_usage)
parse.add_option('trash-dir=', self.empty_trashdir)
parse(argv)
return self.exit_code
def set_max_age_in_days(self, arg):
max_age_in_days = int(arg)
self._dustman = DeleteAccordingDate(self.file_reader.contents_of,
self._now,
max_age_in_days)
def report_invalid_option_usage(self, program_name, option):
self.println_err("{program_name}: invalid option -- '{option}'"
.format(**locals()))
self.exit_code |= EX_USAGE
def println_err(self, msg):
self.err.write("{}\n".format(msg))
def description(self, program_name, printer):
printer.usage('Usage: %s [days]' % program_name)
printer.summary('Purge trashed files.')
printer.options(
" --version show program's version number and exit",
" -h, --help show this help message and exit")
printer.bug_reporting()
def empty_trashdir(self, specific_dir):
self.delete_all_things_under_trash_dir(specific_dir, None)
def empty_all_trashdirs(self):
trashdirs = TrashDirs(self.environ,
self.getuid,
self.list_volumes,
TopTrashDirRules(self.file_reader))
trashdirs.on_trash_dir_found = self.delete_all_things_under_trash_dir
trashdirs.list_trashdirs()
def delete_all_things_under_trash_dir(self, trash_dir_path, volume_path):
harvester = Harvester(self.file_reader)
harvester.on_trashinfo_found = self.delete_trashinfo_and_backup_copy
harvester.on_orphan_found = self.delete_orphan
harvester.analize_trash_directory(trash_dir_path, volume_path)
def delete_trashinfo_and_backup_copy(self, trashinfo_path):
trashcan = self.make_trashcan()
self._dustman.delete_if_ok(trashinfo_path, trashcan)
def delete_orphan(self, path_to_backup_copy):
trashcan = self.make_trashcan()
trashcan.delete_orphan(path_to_backup_copy)
def make_trashcan(self):
file_remover_with_error = FileRemoveWithErrorHandling(self.file_remover,
self.print_cannot_remove_error)
trashcan = CleanableTrashcan(file_remover_with_error)
return trashcan
def print_cannot_remove_error(self, exc, path):
error_message = "cannot remove {path}".format(path=path)
self.println_err("{program_name}: {msg}".format(
program_name=self.program_name,
msg=error_message))
def println(self, line):
self.out.write(line + '\n')
class FileRemoveWithErrorHandling:
def __init__(self, file_remover, on_error):
self.file_remover = file_remover
self.on_error = on_error
def remove_file(self, path):
try:
return self.file_remover.remove_file(path)
except OSError as e:
self.on_error(e, path)
def remove_file_if_exists(self, path):
try:
return self.file_remover.remove_file_if_exists(path)
except OSError as e:
self.on_error(e, path)
class DeleteAccordingDate:
def __init__(self, contents_of, now, max_age_in_days):
self._contents_of = contents_of
self._now = now
self.max_age_in_days = max_age_in_days
def delete_if_ok(self, trashinfo_path, trashcan):
contents = self._contents_of(trashinfo_path)
ParseTrashInfo(
on_deletion_date=IfDate(
OlderThan(self.max_age_in_days, self._now),
lambda: trashcan.delete_trashinfo_and_backup_copy(trashinfo_path)
),
)(contents)
class DeleteAnything:
def delete_if_ok(self, trashinfo_path, trashcan):
trashcan.delete_trashinfo_and_backup_copy(trashinfo_path)
class IfDate:
def __init__(self, date_criteria, then):
self.date_criteria = date_criteria
self.then = then
def __call__(self, date2):
if self.date_criteria(date2):
self.then()
class OlderThan:
def __init__(self, days_ago, now):
from datetime import timedelta
self.limit_date = now() - timedelta(days=days_ago)
def __call__(self, deletion_date):
return deletion_date < self.limit_date
class CleanableTrashcan:
def __init__(self, file_remover):
self._file_remover = file_remover
def delete_orphan(self, path_to_backup_copy):
self._file_remover.remove_file(path_to_backup_copy)
def delete_trashinfo_and_backup_copy(self, trashinfo_path):
backup_copy = self._path_of_backup_copy(trashinfo_path)
self._file_remover.remove_file_if_exists(backup_copy)
self._file_remover.remove_file(trashinfo_path)
def _path_of_backup_copy(self, path_to_trashinfo):
from os.path import dirname, join, basename
trash_dir = dirname(dirname(path_to_trashinfo))
return join(trash_dir, 'files', basename(path_to_trashinfo)[:-len('.trashinfo')])
|
SNOWSQL_INSTALL_LOCATION = '~/bin'
SNOWSQL_VERSION = '1.2.5'
SNOWSQL_DOWNLOAD_URL = 'https://sfc-repo.snowflakecomputing.com/snowsql/bootstrap/1.2/linux_x86_64/snowsql-%s-linux_x86_64.bash' % SNOWSQL_VERSION
def AptInstall(vm):
"""Installs snowsql on the Debian VM."""
vm.Install('curl')
vm.Install('unzip')
vm.RemoteCommand('curl -O %s' % SNOWSQL_DOWNLOAD_URL)
vm.RemoteCommand(
'SNOWSQL_DEST=%s SNOWSQL_LOGIN_SHELL=~/.profile '
'bash snowsql-%s-linux_x86_64.bash'
% (SNOWSQL_INSTALL_LOCATION, SNOWSQL_VERSION))
vm.RemoteCommand('chmod +x %s/snowsql' % SNOWSQL_INSTALL_LOCATION)
def YumInstall(vm):
"""Raises exception when trying to install on yum-based VMs."""
del vm
raise NotImplementedError(
'PKB currently only supports the installation of snowsql on '
'Debian-based VMs')
def AptUninstall(vm):
"""Removes snowsql from the Debian VM."""
vm.RemoteCommand('rm -rf ~/bin/snowsql')
vm.RemoteCommand('rm -rf ~/.snowsql')
vm.RemoteCommand('rm -rf snowsql-%s-linux_x86_64.bash' % SNOWSQL_VERSION)
|
import os
import pytest
from molecule import state
from molecule import util
@pytest.fixture
def _instance(config_instance):
return state.State(config_instance)
def test_state_file_property(_instance):
x = os.path.join(_instance._config.scenario.ephemeral_directory,
'state.yml')
assert x == _instance.state_file
def test_converged(_instance):
assert not _instance.converged
def test_created(_instance):
assert not _instance.created
def test_driver(_instance):
assert not _instance.driver
def test_prepared(_instance):
assert not _instance.prepared
def test_reset(_instance):
assert not _instance.converged
_instance.change_state('converged', True)
assert _instance.converged
_instance.reset()
assert not _instance.converged
def test_reset_persists(_instance):
assert not _instance.converged
_instance.change_state('converged', True)
assert _instance.converged
_instance.reset()
assert not _instance.converged
d = util.safe_load_file(_instance.state_file)
assert not d.get('converged')
def test_change_state_converged(_instance):
_instance.change_state('converged', True)
assert _instance.converged
def test_change_state_created(_instance):
_instance.change_state('created', True)
assert _instance.created
def test_change_state_driver(_instance):
_instance.change_state('driver', 'foo')
assert 'foo' == _instance.driver
def test_change_state_prepared(_instance):
_instance.change_state('prepared', True)
assert _instance.prepared
def test_change_state_raises(_instance):
with pytest.raises(state.InvalidState):
_instance.change_state('invalid-state', True)
def test_get_data_loads_existing_state_file(_instance, molecule_data,
config_instance):
data = {
'converged': False,
'created': True,
'driver': None,
'prepared': None,
}
util.write_file(_instance._state_file, util.safe_dump(data))
s = state.State(config_instance)
assert not s.converged
assert s.created
assert not s.driver
assert not s.prepared
|
import numpy as np
import unittest
import chainer
from chainer import initializers
from chainer import testing
from chainer.testing import attr
from chainercv.links.model.ssd import Normalize
@testing.parameterize(*testing.product({
'shape': [(5, 5), (25, 25), (5, 25)],
'n_channel': [1, 10],
'eps': [1e-5, 1],
}))
class TestNormalize(unittest.TestCase):
def setUp(self):
self.link = Normalize(
self.n_channel, initializers.Normal(), eps=self.eps)
self.x = np.random.uniform(size=(1, self.n_channel) + self.shape) \
.astype(np.float32)
def _check_forward(self, x):
y = self.link(x)
self.assertIsInstance(y, chainer.Variable)
self.assertIsInstance(y.array, type(x))
self.assertEqual(y.shape, x.shape)
self.assertEqual(y.dtype, x.dtype)
x = chainer.backends.cuda.to_cpu(x)
y = chainer.backends.cuda.to_cpu(y.array)
scale = chainer.backends.cuda.to_cpu(self.link.scale.array)
norm = np.linalg.norm(x, axis=1, keepdims=True) + self.eps
expect = x / norm * scale[:, np.newaxis, np.newaxis]
np.testing.assert_almost_equal(y, expect)
def test_forward_cpu(self):
self._check_forward(self.x)
@attr.gpu
def test_forward_gpu(self):
self.link.to_gpu()
self._check_forward(chainer.backends.cuda.to_gpu(self.x))
def test_forward_zero_cpu(self):
self._check_forward(np.zeros_like(self.x))
@attr.gpu
def test_forward_zero_gpu(self):
self.link.to_gpu()
self._check_forward(
chainer.backends.cuda.to_gpu(np.zeros_like(self.x)))
testing.run_module(__name__, __file__)
|
import re
class Warnings:
"""Extract warnings from GCC's output
Analyzes compiler output and classifies warnings.
"""
_warning_pattern_map = {
'antiquated': ' antiquated',
'deprecated': ' deprecated',
'unused_func': ' defined but not used',
'isoc': ' ISO C',
'missing_init': ' missing initializer',
'out_of_bounds': ' subscript .*? bounds',
'unused_var': ' unused variable'
}
def __init__(self, console_output):
self.warning_lines = [x for x in console_output.splitlines() if x.find(' warning:') > 0]
def byType(self, warntype):
"""Extract warning messages corresponding to warntype.
The warntypes can be all keys of the _warning_pattern_map dictionary.
@param warntype: The type of warning message that should be extracted.
@type warntype: str
@return a list of warning messages
@rtype list
"""
return [x for x in self.warning_lines if re.search(self._warning_pattern_map[warntype], x)]
def analyze(self):
"""Get dictionary of classified warnings.
@return A dictionary of lists of warning messages indexed by the warning type
@rtype {str:[str]}
"""
return {(t, self.byType(t)) for t, p in self._warning_pattern_map.items()}
|
import numpy
from dedupe import predicates
from .base import FieldType
class PriceType(FieldType):
_predicate_functions = [predicates.orderOfMagnitude,
predicates.wholeFieldPredicate,
predicates.roundTo1]
type = "Price"
@staticmethod
def comparator(price_1, price_2):
if price_1 <= 0:
return numpy.nan
elif price_2 <= 0:
return numpy.nan
else:
return abs(numpy.log10(price_1) - numpy.log10(price_2))
|
import os
import os.path
import cherrypy
localDir = os.path.dirname(__file__)
curpath = os.path.normpath(os.path.join(os.getcwd(), localDir))
class HTTPErrorDemo(object):
# Set a custom response for 403 errors.
_cp_config = {'error_page.403':
os.path.join(curpath, 'custom_error.html')}
@cherrypy.expose
def index(self):
# display some links that will result in errors
tracebacks = cherrypy.request.show_tracebacks
if tracebacks:
trace = 'off'
else:
trace = 'on'
return """
<html><body>
<p>Toggle tracebacks <a href="toggleTracebacks">%s</a></p>
<p><a href="/doesNotExist">Click me; I'm a broken link!</a></p>
<p>
<a href="/error?code=403">
Use a custom error page from a file.
</a>
</p>
<p>These errors are explicitly raised by the application:</p>
<ul>
<li><a href="/error?code=400">400</a></li>
<li><a href="/error?code=401">401</a></li>
<li><a href="/error?code=402">402</a></li>
<li><a href="/error?code=500">500</a></li>
</ul>
<p><a href="/messageArg">You can also set the response body
when you raise an error.</a></p>
</body></html>
""" % trace
@cherrypy.expose
def toggleTracebacks(self):
# simple function to toggle tracebacks on and off
tracebacks = cherrypy.request.show_tracebacks
cherrypy.config.update({'request.show_tracebacks': not tracebacks})
# redirect back to the index
raise cherrypy.HTTPRedirect('/')
@cherrypy.expose
def error(self, code):
# raise an error based on the get query
raise cherrypy.HTTPError(status=code)
@cherrypy.expose
def messageArg(self):
message = ("If you construct an HTTPError with a 'message' "
'argument, it wil be placed on the error page '
'(underneath the status line by default).')
raise cherrypy.HTTPError(500, message=message)
tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf')
if __name__ == '__main__':
# CherryPy always starts with app.root when trying to map request URIs
# to objects, so we need to mount a request handler root. A request
# to '/' will be mapped to HelloWorld().index().
cherrypy.quickstart(HTTPErrorDemo(), config=tutconf)
|
import os
import sys
from types import ModuleType
from flexx import ui, app, event
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
DOC_DIR = os.path.abspath(os.path.join(THIS_DIR, '..'))
OUTPUT_DIR = os.path.join(DOC_DIR, 'ui')
created_files = []
def main():
pages = {}
class_names = []
layouts = set()
# Get all pages and class names
namespace = {}; namespace.update(ui.__dict__); namespace.update(ui.layouts.__dict__); namespace.update(ui.widgets.__dict__)
for mod in namespace.values():
if isinstance(mod, ModuleType):
classes = []
for w in mod.__dict__.values():
if isinstance(w, type) and issubclass(w, (app.PyComponent, app.JsComponent)):
if w.__module__ == mod.__name__:
classes.append(w)
if issubclass(w, ui.Layout):
layouts.add(w.__name__)
if classes:
classes.sort(key=lambda x: x.__name__)
classes.sort(key=lambda x: len(x.mro()))
class_names.extend([w.__name__ for w in classes])
pages[mod.__name__] = classes
# Create page for each module
for module_name, classes in sorted(pages.items()):
# Get page name and title
page_name = page_title = module_name.split('.')[-1].strip('_').capitalize()
mdoc = sys.modules[module_name].__doc__
if mdoc and 0 < len(mdoc.split('\n', 1)[0].strip()) <= 24:
page_title = mdoc.split('\n', 1)[0].strip()
sys.modules[module_name].__doc__ = sys.modules[module_name].__doc__.split('\n', 1)[-1]
docs = '%s\n%s\n\n' % (page_title, '-' * len(page_title))
docs += '.. automodule:: %s\n\n' % module_name
docs += '----\n\n'
# Include more docs?
if module_name.endswith('_widget'):
docs += '.. autofunction:: flexx.ui.create_element\n\n'
for cls in classes:
assert issubclass(cls, app.JsComponent)
name = cls.__name__
# Insert info on base clases
if 'Inherits from' not in cls.__doc__:
bases = []
for bcls in cls.__bases__:
if getattr(ui, bcls.__name__, None):
bases.append(':class:`%s <flexx.ui.%s>`' % (bcls.__name__, bcls.__name__))
elif getattr(app, bcls.__name__, None):
bases.append(':class:`%s <flexx.app.%s>`' % (bcls.__name__, bcls.__name__))
else:
bases.append(':class:`%s <%s.%s>`' % (bcls.__name__, bcls.__module__, bcls.__name__))
line = ' *Inherits from:* ' + ', '.join(bases)
cls.__doc__ = line + '\n\n ' + (cls.__doc__ or '').lstrip()
members = {}
include = '_create_dom', '_render_dom'
exclude = 'CODE', 'CSS', 'DEFAULT_MIN_SIZE'
# Collect all stuff that's on the class.
for n in list(cls.JS.__dict__):
val = getattr(cls.JS, n)
if n in exclude or not val.__doc__:
pass
elif n.startswith('_') and n not in include:
pass
elif isinstance(val, event._action.BaseDescriptor):
for tname, tclass in (('attributes', event._attribute.Attribute),
('properties', event._property.Property),
('actions', event._action.ActionDescriptor),
('reactions', event._reaction.ReactionDescriptor),
('emitters', event._emitter.EmitterDescriptor)):
if isinstance(val, tclass):
members.setdefault(tname, []).append(n)
break
else:
assert False
elif getattr(val, '__doc__', None):
members.setdefault('methods', []).append(n)
# Get canonical name
full_name = '%s.%s' % (module_name, name)
if getattr(ui, name, None):
full_name = 'flexx.ui.%s' % name
# Sort and combine
order = 'attributes', 'properties', 'emitters', 'actions', 'reactions', 'methods'
member_str = ' :members:'
toc_str = '\n'
for key in members:
members[key].sort()
assert not set(members).difference(order)
for key in order:
if key in members:
# Add to member string and toc
toc_str = toc_str.rstrip(',') + '\n\n *{}*:'.format(key)
for n in members[key]:
member_str += ' {},'.format(n)
toc_str += ' `{} <#{}.{}>`__,'.format(n, full_name, n) # __ means anonymous hyperlink
# Hack: put members back on Python class to have them documented
for n in members[key]:
if n not in cls.__dict__:
setattr(cls, n, cls.JS.__dict__[n])
cls.__doc__ += toc_str.rstrip(',') + '\n\n'
# Create rst for class
docs += '.. autoclass:: %s\n' % full_name
docs += member_str.rstrip(',') + '\n :member-order: alphabetical\n\n'
# Write doc page
filename = os.path.join(OUTPUT_DIR, page_name.lower() + '.rst')
created_files.append(filename)
open(filename, 'wt', encoding='utf-8').write(docs)
# Create overview doc page
docs = 'Widgets reference'
docs += '\n' + '=' * len(docs) + '\n\n'
docs += 'This is a list of all widget classes provided by ``flexx.ui``. '
docs += 'The :class:`Widget <flexx.ui.Widget>` class is the base class of all widgets. '
docs += '\n\n'
docs += '\nBase widget:\n\n'
if True:
docs += '* :class:`%s <flexx.ui.%s>`\n' % ('Widget', 'Widget')
docs += '\nLayouts:\n\n'
for name in [n for n in sorted(class_names) if n in layouts if getattr(ui, n, None)]:
docs += '* :class:`%s <flexx.ui.%s>`\n' % (name, name)
docs += '\nWidgets:\n\n'
for name in [n for n in sorted(class_names) if n not in layouts if getattr(ui, n, None)]:
docs += '* :class:`%s <flexx.ui.%s>`\n' % (name, name)
docs += '\n.. toctree::\n :maxdepth: 1\n :hidden:\n\n'
for module_name in sorted(pages.keys()):
docs += ' %s\n' % module_name.split('.')[-1].strip('_').lower()
# Write overview doc page
filename = os.path.join(OUTPUT_DIR, 'api.rst')
created_files.append(filename)
open(filename, 'wt', encoding='utf-8').write(docs)
print(' generated widget docs with %i pages and %i widgets' % (len(pages), len(class_names)))
def clean():
while created_files:
filename = created_files.pop()
if os.path.isfile(filename):
os.remove(filename)
|
import tablib
from core.models import Book
from django.test import TestCase
from import_export import instance_loaders, resources
class CachedInstanceLoaderTest(TestCase):
def setUp(self):
self.resource = resources.modelresource_factory(Book)()
self.dataset = tablib.Dataset(headers=['id', 'name', 'author_email'])
self.book = Book.objects.create(name="Some book")
self.book2 = Book.objects.create(name="Some other book")
row = [str(self.book.pk), 'Some book', '[email protected]']
self.dataset.append(row)
self.instance_loader = instance_loaders.CachedInstanceLoader(
self.resource, self.dataset)
def test_all_instances(self):
self.assertTrue(self.instance_loader.all_instances)
self.assertEqual(len(self.instance_loader.all_instances), 1)
self.assertEqual(list(self.instance_loader.all_instances),
[self.book.pk])
def test_get_instance(self):
obj = self.instance_loader.get_instance(self.dataset.dict[0])
self.assertEqual(obj, self.book)
|
from datetime import date
import pytest
import voluptuous as vol
import homeassistant.components.workday.binary_sensor as binary_sensor
from homeassistant.setup import setup_component
from tests.async_mock import patch
from tests.common import assert_setup_component, get_test_home_assistant
FUNCTION_PATH = "homeassistant.components.workday.binary_sensor.get_date"
class TestWorkdaySetup:
"""Test class for workday sensor."""
def setup_method(self):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
# Set valid default config for test
self.config_province = {
"binary_sensor": {"platform": "workday", "country": "DE", "province": "BW"}
}
self.config_noprovince = {
"binary_sensor": {"platform": "workday", "country": "DE"}
}
self.config_invalidprovince = {
"binary_sensor": {
"platform": "workday",
"country": "DE",
"province": "invalid",
}
}
self.config_state = {
"binary_sensor": {"platform": "workday", "country": "US", "province": "CA"}
}
self.config_nostate = {
"binary_sensor": {"platform": "workday", "country": "US"}
}
self.config_includeholiday = {
"binary_sensor": {
"platform": "workday",
"country": "DE",
"province": "BW",
"workdays": ["holiday"],
"excludes": ["sat", "sun"],
}
}
self.config_example1 = {
"binary_sensor": {
"platform": "workday",
"country": "US",
"workdays": ["mon", "tue", "wed", "thu", "fri"],
"excludes": ["sat", "sun"],
}
}
self.config_example2 = {
"binary_sensor": {
"platform": "workday",
"country": "DE",
"province": "BW",
"workdays": ["mon", "wed", "fri"],
"excludes": ["sat", "sun", "holiday"],
"add_holidays": ["2020-02-24"],
}
}
self.config_tomorrow = {
"binary_sensor": {"platform": "workday", "country": "DE", "days_offset": 1}
}
self.config_day_after_tomorrow = {
"binary_sensor": {"platform": "workday", "country": "DE", "days_offset": 2}
}
self.config_yesterday = {
"binary_sensor": {"platform": "workday", "country": "DE", "days_offset": -1}
}
def teardown_method(self):
"""Stop everything that was started."""
self.hass.stop()
def test_valid_country(self):
"""Test topic name/filter validation."""
# Invalid UTF-8, must not contain U+D800 to U+DFFF
with pytest.raises(vol.Invalid):
binary_sensor.valid_country("\ud800")
with pytest.raises(vol.Invalid):
binary_sensor.valid_country("\udfff")
# Country MUST NOT be empty
with pytest.raises(vol.Invalid):
binary_sensor.valid_country("")
# Country must be supported by holidays
with pytest.raises(vol.Invalid):
binary_sensor.valid_country("HomeAssistantLand")
def test_setup_component_province(self):
"""Set up workday component."""
with assert_setup_component(1, "binary_sensor"):
setup_component(self.hass, "binary_sensor", self.config_province)
self.hass.block_till_done()
entity = self.hass.states.get("binary_sensor.workday_sensor")
assert entity is not None
# Freeze time to a workday - Mar 15th, 2017
@patch(FUNCTION_PATH, return_value=date(2017, 3, 15))
def test_workday_province(self, mock_date):
"""Test if workdays are reported correctly."""
with assert_setup_component(1, "binary_sensor"):
setup_component(self.hass, "binary_sensor", self.config_province)
self.hass.block_till_done()
self.hass.start()
entity = self.hass.states.get("binary_sensor.workday_sensor")
assert entity.state == "on"
# Freeze time to a weekend - Mar 12th, 2017
@patch(FUNCTION_PATH, return_value=date(2017, 3, 12))
def test_weekend_province(self, mock_date):
"""Test if weekends are reported correctly."""
with assert_setup_component(1, "binary_sensor"):
setup_component(self.hass, "binary_sensor", self.config_province)
self.hass.block_till_done()
self.hass.start()
entity = self.hass.states.get("binary_sensor.workday_sensor")
assert entity.state == "off"
# Freeze time to a public holiday in province BW - Jan 6th, 2017
@patch(FUNCTION_PATH, return_value=date(2017, 1, 6))
def test_public_holiday_province(self, mock_date):
"""Test if public holidays are reported correctly."""
with assert_setup_component(1, "binary_sensor"):
setup_component(self.hass, "binary_sensor", self.config_province)
self.hass.block_till_done()
self.hass.start()
entity = self.hass.states.get("binary_sensor.workday_sensor")
assert entity.state == "off"
def test_setup_component_noprovince(self):
"""Set up workday component."""
with assert_setup_component(1, "binary_sensor"):
setup_component(self.hass, "binary_sensor", self.config_noprovince)
self.hass.block_till_done()
entity = self.hass.states.get("binary_sensor.workday_sensor")
assert entity is not None
# Freeze time to a public holiday in province BW - Jan 6th, 2017
@patch(FUNCTION_PATH, return_value=date(2017, 1, 6))
def test_public_holiday_noprovince(self, mock_date):
"""Test if public holidays are reported correctly."""
with assert_setup_component(1, "binary_sensor"):
setup_component(self.hass, "binary_sensor", self.config_noprovince)
self.hass.block_till_done()
self.hass.start()
entity = self.hass.states.get("binary_sensor.workday_sensor")
assert entity.state == "on"
# Freeze time to a public holiday in state CA - Mar 31st, 2017
@patch(FUNCTION_PATH, return_value=date(2017, 3, 31))
def test_public_holiday_state(self, mock_date):
"""Test if public holidays are reported correctly."""
with assert_setup_component(1, "binary_sensor"):
setup_component(self.hass, "binary_sensor", self.config_state)
self.hass.start()
entity = self.hass.states.get("binary_sensor.workday_sensor")
assert entity.state == "off"
# Freeze time to a public holiday in state CA - Mar 31st, 2017
@patch(FUNCTION_PATH, return_value=date(2017, 3, 31))
def test_public_holiday_nostate(self, mock_date):
"""Test if public holidays are reported correctly."""
with assert_setup_component(1, "binary_sensor"):
setup_component(self.hass, "binary_sensor", self.config_nostate)
self.hass.start()
entity = self.hass.states.get("binary_sensor.workday_sensor")
assert entity.state == "on"
def test_setup_component_invalidprovince(self):
"""Set up workday component."""
with assert_setup_component(1, "binary_sensor"):
setup_component(self.hass, "binary_sensor", self.config_invalidprovince)
entity = self.hass.states.get("binary_sensor.workday_sensor")
assert entity is None
# Freeze time to a public holiday in province BW - Jan 6th, 2017
@patch(FUNCTION_PATH, return_value=date(2017, 1, 6))
def test_public_holiday_includeholiday(self, mock_date):
"""Test if public holidays are reported correctly."""
with assert_setup_component(1, "binary_sensor"):
setup_component(self.hass, "binary_sensor", self.config_includeholiday)
self.hass.start()
entity = self.hass.states.get("binary_sensor.workday_sensor")
assert entity.state == "on"
# Freeze time to a saturday to test offset - Aug 5th, 2017
@patch(FUNCTION_PATH, return_value=date(2017, 8, 5))
def test_tomorrow(self, mock_date):
"""Test if tomorrow are reported correctly."""
with assert_setup_component(1, "binary_sensor"):
setup_component(self.hass, "binary_sensor", self.config_tomorrow)
self.hass.start()
entity = self.hass.states.get("binary_sensor.workday_sensor")
assert entity.state == "off"
# Freeze time to a saturday to test offset - Aug 5th, 2017
@patch(FUNCTION_PATH, return_value=date(2017, 8, 5))
def test_day_after_tomorrow(self, mock_date):
"""Test if the day after tomorrow are reported correctly."""
with assert_setup_component(1, "binary_sensor"):
setup_component(self.hass, "binary_sensor", self.config_day_after_tomorrow)
self.hass.start()
entity = self.hass.states.get("binary_sensor.workday_sensor")
assert entity.state == "on"
# Freeze time to a saturday to test offset - Aug 5th, 2017
@patch(FUNCTION_PATH, return_value=date(2017, 8, 5))
def test_yesterday(self, mock_date):
"""Test if yesterday are reported correctly."""
with assert_setup_component(1, "binary_sensor"):
setup_component(self.hass, "binary_sensor", self.config_yesterday)
self.hass.start()
entity = self.hass.states.get("binary_sensor.workday_sensor")
assert entity.state == "on"
# Freeze time to a Presidents day to test Holiday on a Work day - Jan 20th, 2020
# Presidents day Feb 17th 2020 is mon.
@patch(FUNCTION_PATH, return_value=date(2020, 2, 17))
def test_config_example1_holiday(self, mock_date):
"""Test if public holidays are reported correctly."""
with assert_setup_component(1, "binary_sensor"):
setup_component(self.hass, "binary_sensor", self.config_example1)
self.hass.start()
entity = self.hass.states.get("binary_sensor.workday_sensor")
assert entity.state == "on"
# Freeze time to test tue - Feb 18th, 2020
@patch(FUNCTION_PATH, return_value=date(2020, 2, 18))
def test_config_example2_tue(self, mock_date):
"""Test if public holidays are reported correctly."""
with assert_setup_component(1, "binary_sensor"):
setup_component(self.hass, "binary_sensor", self.config_example2)
self.hass.start()
entity = self.hass.states.get("binary_sensor.workday_sensor")
assert entity.state == "off"
# Freeze time to test mon, but added as holiday - Feb 24th, 2020
@patch(FUNCTION_PATH, return_value=date(2020, 2, 24))
def test_config_example2_add_holiday(self, mock_date):
"""Test if public holidays are reported correctly."""
with assert_setup_component(1, "binary_sensor"):
setup_component(self.hass, "binary_sensor", self.config_example2)
self.hass.start()
entity = self.hass.states.get("binary_sensor.workday_sensor")
assert entity.state == "off"
def test_day_to_string(self):
"""Test if day_to_string is behaving correctly."""
assert binary_sensor.day_to_string(0) == "mon"
assert binary_sensor.day_to_string(1) == "tue"
assert binary_sensor.day_to_string(7) == "holiday"
assert binary_sensor.day_to_string(8) is None
|
import threading
import copy
from mlpatches import base
class ThreadLocalVar(object):
"""creates a proxy to a thread-local version of passee var."""
# todo: maybe add lock?
def __init__(self, var):
self.__var = var
self.__local = threading.local()
self.__setattr__ = self.__setattr_ # set __settattr__ here
def __getattr__(self, name):
try:
v = self.__local.var
except AttributeError:
v = self.__local.var = copy.deepcopy(self.__var)
return getattr(v, name)
def __setattr_(self, name, value): # keep missing "_"
try:
v = self.__local.var
except AttributeError:
v = self.__local.var = copy.deepcopy(self.__var)
return setattr(v, name, value)
def __delattr__(self, name):
try:
v = self.__local.var
except AttributeError:
v = self.__local.var = copy.deepcopy(self.__var)
return delattr(v, name)
def __del__(self):
try:
del self.__local.var
except AttributeError:
pass
# define patches
class ThreadLocalArgv(base.FunctionPatch):
"""Patches sys.argv to be thread-local."""
PY2 = True
PY3 = True
module = "sys"
function = "argv"
replacement = ThreadLocalVar([])
# create patch instances
TL_ARGV_PATCH = ThreadLocalArgv()
|
import logging
from homeassistant.components.cover import CoverEntity
from homeassistant.const import CONF_DEVICES, STATE_OPEN
from homeassistant.core import callback
from . import (
CONF_AUTOMATIC_ADD,
CONF_DATA_BITS,
CONF_SIGNAL_REPETITIONS,
DEFAULT_SIGNAL_REPETITIONS,
SIGNAL_EVENT,
RfxtrxCommandEntity,
get_device_id,
get_rfx_object,
)
from .const import COMMAND_OFF_LIST, COMMAND_ON_LIST
_LOGGER = logging.getLogger(__name__)
def supported(event):
"""Return whether an event supports cover."""
return event.device.known_to_be_rollershutter
async def async_setup_entry(
hass,
config_entry,
async_add_entities,
):
"""Set up config entry."""
discovery_info = config_entry.data
device_ids = set()
entities = []
for packet_id, entity_info in discovery_info[CONF_DEVICES].items():
event = get_rfx_object(packet_id)
if event is None:
_LOGGER.error("Invalid device: %s", packet_id)
continue
if not supported(event):
continue
device_id = get_device_id(
event.device, data_bits=entity_info.get(CONF_DATA_BITS)
)
if device_id in device_ids:
continue
device_ids.add(device_id)
entity = RfxtrxCover(
event.device, device_id, entity_info[CONF_SIGNAL_REPETITIONS]
)
entities.append(entity)
async_add_entities(entities)
@callback
def cover_update(event, device_id):
"""Handle cover updates from the RFXtrx gateway."""
if not supported(event):
return
if device_id in device_ids:
return
device_ids.add(device_id)
_LOGGER.info(
"Added cover (Device ID: %s Class: %s Sub: %s, Event: %s)",
event.device.id_string.lower(),
event.device.__class__.__name__,
event.device.subtype,
"".join(f"{x:02x}" for x in event.data),
)
entity = RfxtrxCover(
event.device, device_id, DEFAULT_SIGNAL_REPETITIONS, event=event
)
async_add_entities([entity])
# Subscribe to main RFXtrx events
if discovery_info[CONF_AUTOMATIC_ADD]:
hass.helpers.dispatcher.async_dispatcher_connect(SIGNAL_EVENT, cover_update)
class RfxtrxCover(RfxtrxCommandEntity, CoverEntity):
"""Representation of a RFXtrx cover."""
async def async_added_to_hass(self):
"""Restore device state."""
await super().async_added_to_hass()
if self._event is None:
old_state = await self.async_get_last_state()
if old_state is not None:
self._state = old_state.state == STATE_OPEN
@property
def is_closed(self):
"""Return if the cover is closed."""
return not self._state
async def async_open_cover(self, **kwargs):
"""Move the cover up."""
await self._async_send(self._device.send_open)
self._state = True
self.async_write_ha_state()
async def async_close_cover(self, **kwargs):
"""Move the cover down."""
await self._async_send(self._device.send_close)
self._state = False
self.async_write_ha_state()
async def async_stop_cover(self, **kwargs):
"""Stop the cover."""
await self._async_send(self._device.send_stop)
self._state = True
self.async_write_ha_state()
def _apply_event(self, event):
"""Apply command from rfxtrx."""
super()._apply_event(event)
if event.values["Command"] in COMMAND_ON_LIST:
self._state = True
elif event.values["Command"] in COMMAND_OFF_LIST:
self._state = False
@callback
def _handle_event(self, event, device_id):
"""Check if event applies to me and update."""
if device_id != self._device_id:
return
self._apply_event(event)
self.async_write_ha_state()
|
import sys
import pathlib
import yaml
import astroid
from pylint import interfaces, checkers
from pylint.checkers import utils
OPTIONS = None
FAILED_LOAD = False
class ConfigChecker(checkers.BaseChecker):
"""Custom astroid checker for config calls."""
__implements__ = interfaces.IAstroidChecker
name = 'config'
msgs = {
'E9998': ('%s is no valid config option.', # flake8: disable=S001
'bad-config-option',
None),
}
priority = -1
printed_warning = False
@utils.check_messages('bad-config-option')
def visit_attribute(self, node):
"""Visit a getattr node."""
# At the end of a config.val.foo.bar chain
if not isinstance(node.parent, astroid.Attribute):
# FIXME:conf do some proper check for this...
node_str = node.as_string()
prefix = 'config.val.'
if node_str.startswith(prefix):
self._check_config(node, node_str[len(prefix):])
def _check_config(self, node, name):
"""Check that we're accessing proper config options."""
if FAILED_LOAD:
if not ConfigChecker.printed_warning:
print("[WARN] Could not find configdata.yml. Please run "
"pylint from qutebrowser root.", file=sys.stderr)
print("Skipping some checks...", file=sys.stderr)
ConfigChecker.printed_warning = True
return
if name not in OPTIONS:
self.add_message('bad-config-option', node=node, args=name)
def register(linter):
"""Register this checker."""
linter.register_checker(ConfigChecker(linter))
global OPTIONS
global FAILED_LOAD
yaml_file = pathlib.Path('qutebrowser') / 'config' / 'configdata.yml'
if not yaml_file.exists():
OPTIONS = None
FAILED_LOAD = True
return
with yaml_file.open(mode='r', encoding='utf-8') as f:
OPTIONS = list(yaml.safe_load(f))
|
import sys
from subprocess import check_call as sh
def convert_nb(nbname):
# Execute the notebook
sh(["jupyter", "nbconvert", "--to", "notebook",
"--execute", "--inplace", "--ExecutePreprocessor.timeout=60", nbname + ".ipynb"])
# Convert to .rst for Sphinx
sh(["jupyter", "nbconvert", "--to", "rst", nbname + ".ipynb"])
# Clear notebook output
sh(["jupyter", "nbconvert", "--to", "notebook", "--inplace",
"--ClearOutputPreprocessor.enabled=True", nbname + ".ipynb"])
if __name__ == "__main__":
for nbname in sys.argv[1:]:
convert_nb(nbname)
|
import copy
import unittest
from absl import flags
from perfkitbenchmarker.configs import benchmark_config_spec
from perfkitbenchmarker.providers.aws import snowflake
from tests import pkb_common_test_case
_TEST_RUN_URI = 'fakeru'
_AWS_ZONE_US_EAST_1A = 'us-east-1a'
_BASE_SNOWFLAKE_SPEC = {'type': 'snowflake_aws'}
FLAGS = flags.FLAGS
class FakeRemoteVMCreateLambdaRole(object):
def Install(self, package_name):
if package_name != 'snowsql':
raise RuntimeError
def PushFile(self, file_to_push, push_destination):
del push_destination
if file_to_push != 'snowflake_snowsql_config_override_file':
raise RuntimeError
class SnowflakeTestCase(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(SnowflakeTestCase, self).setUp()
FLAGS.cloud = 'AWS'
FLAGS.run_uri = _TEST_RUN_URI
FLAGS.zones = [_AWS_ZONE_US_EAST_1A]
FLAGS.snowflake_snowsql_config_override_file = 'snowflake_snowsql_config_override_file'
FLAGS.snowflake_connection = 'fake_connection'
def testCreateRequestError(self):
kwargs = copy.copy(_BASE_SNOWFLAKE_SPEC)
spec = benchmark_config_spec._EdwServiceSpec('NAME', **kwargs)
snowflake_local = snowflake.Snowflake(spec)
with self.assertRaises(NotImplementedError):
snowflake_local._Create()
def testIsAlwaysUserManaged(self):
kwargs = copy.copy(_BASE_SNOWFLAKE_SPEC)
spec = benchmark_config_spec._EdwServiceSpec('NAME', **kwargs)
snowflake_local = snowflake.Snowflake(spec)
self.assertTrue(snowflake_local.IsUserManaged(spec))
def testAlwaysExists(self):
kwargs = copy.copy(_BASE_SNOWFLAKE_SPEC)
spec = benchmark_config_spec._EdwServiceSpec('NAME', **kwargs)
snowflake_local = snowflake.Snowflake(spec)
self.assertTrue(snowflake_local._Exists())
if __name__ == '__main__':
unittest.main()
|
from homeassistant.bootstrap import async_setup_component
from homeassistant.components import config
from tests.async_mock import patch
async def test_delete_script(hass, hass_client):
"""Test deleting a script."""
with patch.object(config, "SECTIONS", ["script"]):
await async_setup_component(hass, "config", {})
client = await hass_client()
orig_data = {"one": {}, "two": {}}
def mock_read(path):
"""Mock reading data."""
return orig_data
written = []
def mock_write(path, data):
"""Mock writing data."""
written.append(data)
with patch("homeassistant.components.config._read", mock_read), patch(
"homeassistant.components.config._write", mock_write
):
resp = await client.delete("/api/config/script/config/two")
assert resp.status == 200
result = await resp.json()
assert result == {"result": "ok"}
assert len(written) == 1
assert written[0] == {"one": {}}
|
import configparser
import logging
import os
import re
import sqlite3
import sys
from threading import local
from acdcli.utils.conf import get_conf
from .cursors import *
from .format import FormatterMixin
from .query import QueryMixin
from .schema import SchemaMixin
from .sync import SyncMixin
logger = logging.getLogger(__name__)
_ROOT_ID_SQL = 'SELECT id FROM nodes WHERE name IS NULL AND type == "folder" ORDER BY created'
_SETTINGS_FILENAME = 'cache.ini'
_def_conf = configparser.ConfigParser()
_def_conf['sqlite'] = dict(filename='nodes.db', busy_timeout=30000, journal_mode='wal')
_def_conf['blacklist'] = dict(folders=[])
class IntegrityError(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return repr(self.msg)
def _create_conn(path: str) -> sqlite3.Connection:
c = sqlite3.connect(path)
c.row_factory = sqlite3.Row # allow dict-like access on rows with col name
return c
def _regex_match(pattern: str, cell: str) -> bool:
if cell is None:
return False
return re.match(pattern, cell, re.IGNORECASE) is not None
class NodeCache(SchemaMixin, QueryMixin, SyncMixin, FormatterMixin):
IntegrityCheckType = dict(full=0, quick=1, none=2)
"""types of SQLite integrity checks"""
def __init__(self, cache_path: str='', settings_path='', check=IntegrityCheckType['full']):
self._conf = get_conf(settings_path, _SETTINGS_FILENAME, _def_conf)
self.db_path = os.path.join(cache_path, self._conf['sqlite']['filename'])
self.tl = local()
self.integrity_check(check)
try:
self.init()
except sqlite3.DatabaseError as e:
raise IntegrityError(e)
self._conn.create_function('REGEXP', _regex_match.__code__.co_argcount, _regex_match)
with cursor(self._conn) as c:
c.execute(_ROOT_ID_SQL)
row = c.fetchone()
if not row:
self.root_id = ''
return
first_id = row['id']
if c.fetchone():
raise IntegrityError('Could not uniquely identify root node.')
self.root_id = first_id
self._execute_pragma('busy_timeout', self._conf['sqlite']['busy_timeout'])
if sys.version_info[:3] != (3, 6, 0):
self._execute_pragma('journal_mode', self._conf['sqlite']['journal_mode'])
@property
def _conn(self) -> sqlite3.Connection:
if not hasattr(self.tl, '_conn'):
self.tl._conn = _create_conn(self.db_path)
return self.tl._conn
def _execute_pragma(self, key, value) -> str:
with cursor(self._conn) as c:
c.execute('PRAGMA %s=%s;' % (key, value))
r = c.fetchone()
if r:
logger.debug('Set %s to %s. Result: %s.' % (key, value, r[0]))
return r[0]
@classmethod
def remove_db_file(cls, cache_path='', settings_path='') -> bool:
"""Removes database file."""
import os
import random
import string
import tempfile
conf = get_conf(settings_path, _SETTINGS_FILENAME, _def_conf)
db_path = os.path.join(cache_path, conf['sqlite']['filename'])
tmp_name = ''.join(random.choice(string.ascii_lowercase) for _ in range(16))
tmp_name = os.path.join(tempfile.gettempdir(), tmp_name)
try:
os.rename(db_path, tmp_name)
except OSError:
logger.critical('Error renaming/removing database file "%s".' % db_path)
return False
else:
try:
os.remove(tmp_name)
except OSError:
logger.info('Database file was moved, but not deleted.')
return True
def integrity_check(self, type_: IntegrityCheckType):
"""Performs a `self-integrity check
<https://www.sqlite.org/pragma.html#pragma_integrity_check>`_ on the database."""
with cursor(self._conn) as c:
if type_ == NodeCache.IntegrityCheckType['full']:
r = c.execute('PRAGMA integrity_check;')
elif type_ == NodeCache.IntegrityCheckType['quick']:
r = c.execute('PRAGMA quick_check;')
else:
return
r = c.fetchone()
if not r or r[0] != 'ok':
logger.warn('Sqlite database integrity check failed. '
'You may need to clear the cache if you encounter any errors.')
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from diamond.collector import Collector
from phpfpm import PhpFpmCollector
##########################################################################
class TestPhpFpmCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('PhpFpmCollector', {
'interval': 10
})
self.collector = PhpFpmCollector(config, None)
def test_import(self):
self.assertTrue(PhpFpmCollector)
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
patch_urlopen = patch('urllib2.urlopen', Mock(
return_value=self.getFixture('stats')))
patch_urlopen.start()
self.collector.collect()
patch_urlopen.stop()
metrics = {
'accepted_conn': 777335,
'listen_queue': 0,
'max_listen_queue': 0,
'listen_queue_len': 0,
'idle_processes': 502,
'active_processes': 14,
'total_processes': 516,
'max_active_processes': 515,
'max_children_reached': 0,
'slow_requests': 0,
}
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_should_fail_gracefully(self, publish_mock):
patch_urlopen = patch('urllib2.urlopen', Mock(
return_value=self.getFixture('stats_blank')))
patch_urlopen.start()
self.collector.collect()
patch_urlopen.stop()
self.assertPublishedMany(publish_mock, {})
##########################################################################
if __name__ == "__main__":
unittest.main()
|
import io
import operator
import contextlib
from typing import TYPE_CHECKING, BinaryIO, IO, Iterator, Optional, Union, cast
import pkg_resources
from PyQt5.QtCore import (qVersion, QEventLoop, QDataStream, QByteArray,
QIODevice, QFileDevice, QSaveFile, QT_VERSION_STR,
PYQT_VERSION_STR, QObject, QUrl)
from PyQt5.QtGui import QColor
from PyQt5.QtWidgets import QApplication
try:
from PyQt5.QtWebKit import qWebKitVersion
except ImportError: # pragma: no cover
qWebKitVersion = None # type: ignore[assignment] # noqa: N816
if TYPE_CHECKING:
from PyQt5.QtWebKit import QWebHistory
from PyQt5.QtWebEngineWidgets import QWebEngineHistory
from qutebrowser.misc import objects
from qutebrowser.utils import usertypes
MAXVALS = {
'int': 2 ** 31 - 1,
'int64': 2 ** 63 - 1,
}
MINVALS = {
'int': -(2 ** 31),
'int64': -(2 ** 63),
}
class QtOSError(OSError):
"""An OSError triggered by a QIODevice.
Attributes:
qt_errno: The error attribute of the given QFileDevice, if applicable.
"""
def __init__(self, dev: QIODevice, msg: str = None) -> None:
if msg is None:
msg = dev.errorString()
self.qt_errno: Optional[QFileDevice.FileError] = None
if isinstance(dev, QFileDevice):
msg = self._init_filedev(dev, msg)
super().__init__(msg)
def _init_filedev(self, dev: QFileDevice, msg: str) -> str:
self.qt_errno = dev.error()
filename = dev.fileName()
msg += ": {!r}".format(filename)
return msg
def version_check(version: str,
exact: bool = False,
compiled: bool = True) -> bool:
"""Check if the Qt runtime version is the version supplied or newer.
Args:
version: The version to check against.
exact: if given, check with == instead of >=
compiled: Set to False to not check the compiled version.
"""
if compiled and exact:
raise ValueError("Can't use compiled=True with exact=True!")
parsed = pkg_resources.parse_version(version)
op = operator.eq if exact else operator.ge
result = op(pkg_resources.parse_version(qVersion()), parsed)
if compiled and result:
# qVersion() ==/>= parsed, now check if QT_VERSION_STR ==/>= parsed.
result = op(pkg_resources.parse_version(QT_VERSION_STR), parsed)
if compiled and result:
# Finally, check PYQT_VERSION_STR as well.
result = op(pkg_resources.parse_version(PYQT_VERSION_STR), parsed)
return result
MAX_WORLD_ID = 256
def is_new_qtwebkit() -> bool:
"""Check if the given version is a new QtWebKit."""
assert qWebKitVersion is not None
return (pkg_resources.parse_version(qWebKitVersion()) >
pkg_resources.parse_version('538.1'))
def is_single_process() -> bool:
"""Check whether QtWebEngine is running in single-process mode."""
if objects.backend == usertypes.Backend.QtWebKit:
return False
assert objects.backend == usertypes.Backend.QtWebEngine, objects.backend
args = QApplication.instance().arguments()
return '--single-process' in args
def check_overflow(arg: int, ctype: str, fatal: bool = True) -> int:
"""Check if the given argument is in bounds for the given type.
Args:
arg: The argument to check
ctype: The C/Qt type to check as a string.
fatal: Whether to raise exceptions (True) or truncate values (False)
Return
The truncated argument if fatal=False
The original argument if it's in bounds.
"""
maxval = MAXVALS[ctype]
minval = MINVALS[ctype]
if arg > maxval:
if fatal:
raise OverflowError(arg)
return maxval
elif arg < minval:
if fatal:
raise OverflowError(arg)
return minval
else:
return arg
if TYPE_CHECKING:
# Protocol was added in Python 3.8
from typing import Protocol
class Validatable(Protocol):
"""An object with an isValid() method (e.g. QUrl)."""
def isValid(self) -> bool:
...
def ensure_valid(obj: 'Validatable') -> None:
"""Ensure a Qt object with an .isValid() method is valid."""
if not obj.isValid():
raise QtValueError(obj)
def check_qdatastream(stream: QDataStream) -> None:
"""Check the status of a QDataStream and raise OSError if it's not ok."""
status_to_str = {
QDataStream.Ok: "The data stream is operating normally.",
QDataStream.ReadPastEnd: ("The data stream has read past the end of "
"the data in the underlying device."),
QDataStream.ReadCorruptData: "The data stream has read corrupt data.",
QDataStream.WriteFailed: ("The data stream cannot write to the "
"underlying device."),
}
if stream.status() != QDataStream.Ok:
raise OSError(status_to_str[stream.status()])
_QtSerializableType = Union[
QObject,
QByteArray,
QUrl,
'QWebEngineHistory',
'QWebHistory'
]
def serialize(obj: _QtSerializableType) -> QByteArray:
"""Serialize an object into a QByteArray."""
data = QByteArray()
stream = QDataStream(data, QIODevice.WriteOnly)
serialize_stream(stream, obj)
return data
def deserialize(data: QByteArray, obj: _QtSerializableType) -> None:
"""Deserialize an object from a QByteArray."""
stream = QDataStream(data, QIODevice.ReadOnly)
deserialize_stream(stream, obj)
def serialize_stream(stream: QDataStream, obj: _QtSerializableType) -> None:
"""Serialize an object into a QDataStream."""
# pylint: disable=pointless-statement
check_qdatastream(stream)
stream << obj # type: ignore[operator]
check_qdatastream(stream)
def deserialize_stream(stream: QDataStream, obj: _QtSerializableType) -> None:
"""Deserialize a QDataStream into an object."""
# pylint: disable=pointless-statement
check_qdatastream(stream)
stream >> obj # type: ignore[operator]
check_qdatastream(stream)
@contextlib.contextmanager
def savefile_open(
filename: str,
binary: bool = False,
encoding: str = 'utf-8'
) -> Iterator[IO]:
"""Context manager to easily use a QSaveFile."""
f = QSaveFile(filename)
cancelled = False
try:
open_ok = f.open(QIODevice.WriteOnly)
if not open_ok:
raise QtOSError(f)
dev = cast(BinaryIO, PyQIODevice(f))
if binary:
new_f: IO = dev
else:
new_f = io.TextIOWrapper(dev, encoding=encoding)
yield new_f
new_f.flush()
except:
f.cancelWriting()
cancelled = True
raise
finally:
commit_ok = f.commit()
if not commit_ok and not cancelled:
raise QtOSError(f, msg="Commit failed!")
def qcolor_to_qsscolor(c: QColor) -> str:
"""Convert a QColor to a string that can be used in a QStyleSheet."""
ensure_valid(c)
return "rgba({}, {}, {}, {})".format(
c.red(), c.green(), c.blue(), c.alpha())
class PyQIODevice(io.BufferedIOBase):
"""Wrapper for a QIODevice which provides a python interface.
Attributes:
dev: The underlying QIODevice.
"""
def __init__(self, dev: QIODevice) -> None:
super().__init__()
self.dev = dev
def __len__(self) -> int:
return self.dev.size()
def _check_open(self) -> None:
"""Check if the device is open, raise ValueError if not."""
if not self.dev.isOpen():
raise ValueError("IO operation on closed device!")
def _check_random(self) -> None:
"""Check if the device supports random access, raise OSError if not."""
if not self.seekable():
raise OSError("Random access not allowed!")
def _check_readable(self) -> None:
"""Check if the device is readable, raise OSError if not."""
if not self.dev.isReadable():
raise OSError("Trying to read unreadable file!")
def _check_writable(self) -> None:
"""Check if the device is writable, raise OSError if not."""
if not self.writable():
raise OSError("Trying to write to unwritable file!")
def open(self, mode: QIODevice.OpenMode) -> contextlib.closing:
"""Open the underlying device and ensure opening succeeded.
Raises OSError if opening failed.
Args:
mode: QIODevice::OpenMode flags.
Return:
A contextlib.closing() object so this can be used as
contextmanager.
"""
ok = self.dev.open(mode)
if not ok:
raise QtOSError(self.dev)
return contextlib.closing(self)
def close(self) -> None:
"""Close the underlying device."""
self.dev.close()
def fileno(self) -> int:
raise io.UnsupportedOperation
def seek(self, offset: int, whence: int = io.SEEK_SET) -> int:
self._check_open()
self._check_random()
if whence == io.SEEK_SET:
ok = self.dev.seek(offset)
elif whence == io.SEEK_CUR:
ok = self.dev.seek(self.tell() + offset)
elif whence == io.SEEK_END:
ok = self.dev.seek(len(self) + offset)
else:
raise io.UnsupportedOperation("whence = {} is not "
"supported!".format(whence))
if not ok:
raise QtOSError(self.dev, msg="seek failed!")
return self.dev.pos()
def truncate(self, size: int = None) -> int:
raise io.UnsupportedOperation
@property
def closed(self) -> bool:
return not self.dev.isOpen()
def flush(self) -> None:
self._check_open()
self.dev.waitForBytesWritten(-1)
def isatty(self) -> bool:
self._check_open()
return False
def readable(self) -> bool:
return self.dev.isReadable()
def readline(self, size: Optional[int] = -1) -> bytes:
self._check_open()
self._check_readable()
if size is None or size < 0:
qt_size = 0 # no maximum size
elif size == 0:
return b''
else:
qt_size = size + 1 # Qt also counts the NUL byte
buf: Union[QByteArray, bytes, None] = None
if self.dev.canReadLine():
buf = self.dev.readLine(qt_size)
elif size is None or size < 0:
buf = self.dev.readAll()
else:
buf = self.dev.read(size)
if buf is None:
raise QtOSError(self.dev)
if isinstance(buf, QByteArray):
# The type (bytes or QByteArray) seems to depend on what data we
# feed in...
buf = buf.data()
return buf
def seekable(self) -> bool:
return not self.dev.isSequential()
def tell(self) -> int:
self._check_open()
self._check_random()
return self.dev.pos()
def writable(self) -> bool:
return self.dev.isWritable()
def write( # type: ignore[override]
self,
data: Union[bytes, bytearray]
) -> int:
self._check_open()
self._check_writable()
num = self.dev.write(data)
if num == -1 or num < len(data):
raise QtOSError(self.dev)
return num
def read(self, size: Optional[int] = None) -> bytes:
self._check_open()
self._check_readable()
buf: Union[QByteArray, bytes, None] = None
if size in [None, -1]:
buf = self.dev.readAll()
else:
assert size is not None
buf = self.dev.read(size)
if buf is None:
raise QtOSError(self.dev)
if isinstance(buf, QByteArray):
# The type (bytes or QByteArray) seems to depend on what data we
# feed in...
buf = buf.data()
return buf
class QtValueError(ValueError):
"""Exception which gets raised by ensure_valid."""
def __init__(self, obj: 'Validatable') -> None:
try:
self.reason = obj.errorString() # type: ignore[attr-defined]
except AttributeError:
self.reason = None
err = "{} is not valid".format(obj)
if self.reason:
err += ": {}".format(self.reason)
super().__init__(err)
class EventLoop(QEventLoop):
"""A thin wrapper around QEventLoop.
Raises an exception when doing exec_() multiple times.
"""
def __init__(self, parent: QObject = None) -> None:
super().__init__(parent)
self._executing = False
def exec_(
self,
flags: QEventLoop.ProcessEventsFlags =
cast(QEventLoop.ProcessEventsFlags, QEventLoop.AllEvents)
) -> int:
"""Override exec_ to raise an exception when re-running."""
if self._executing:
raise AssertionError("Eventloop is already running!")
self._executing = True
status = super().exec_(flags)
self._executing = False
return status
|
from pytest import mark
from cerberus import errors
from cerberus.tests import assert_fail, assert_normalized, assert_success
@mark.parametrize(
("test_function", "document"),
[
(assert_success, {'a_nullable_integer': None}),
(assert_success, {'a_nullable_integer': 3}),
(assert_success, {'a_nullable_field_without_type': None}),
(assert_fail, {'a_nullable_integer': "foo"}),
(assert_fail, {'an_integer': None}),
(assert_fail, {'a_not_nullable_field_without_type': None}),
],
)
def test_nullable(test_function, document):
test_function(document=document)
def test_nullable_does_not_fail_coerce():
assert_normalized(
schema={'foo': {'coerce': int, 'nullable': True}},
document={'foo': None},
expected={'foo': None},
)
def test_nullables_fail_coerce_on_non_null_values(validator):
def failing_coercion(value):
raise Exception("expected to fail")
schema = {'foo': {'coerce': failing_coercion, 'nullable': True, 'type': 'integer'}}
assert_normalized(document={'foo': None}, expected={'foo': None}, schema=schema)
assert_fail(document={'foo': 2}, schema=schema, validator=validator)
assert errors.COERCION_FAILED in validator._errors
def test_nullable_skips_allowed():
assert_success(
schema={'role': {'allowed': ['agent', 'client', 'supplier'], 'nullable': True}},
document={'role': None},
)
def test_nullable_skips_type(validator):
assert_fail({'an_integer': None}, validator=validator)
assert validator.errors == {'an_integer': ['null value not allowed']}
|
import json
from homeassistant.components.brother.const import DOMAIN
from homeassistant.const import CONF_HOST, CONF_TYPE
from tests.async_mock import patch
from tests.common import MockConfigEntry, load_fixture
async def init_integration(hass) -> MockConfigEntry:
"""Set up the Brother integration in Home Assistant."""
entry = MockConfigEntry(
domain=DOMAIN,
title="HL-L2340DW 0123456789",
unique_id="0123456789",
data={CONF_HOST: "localhost", CONF_TYPE: "laser"},
)
with patch(
"brother.Brother._get_data",
return_value=json.loads(load_fixture("brother_printer_data.json")),
):
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry
|
import logging
import pyzerproc
from homeassistant import config_entries
from homeassistant.helpers import config_entry_flow
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
async def _async_has_devices(hass) -> bool:
"""Return if there are devices that can be discovered."""
try:
devices = await hass.async_add_executor_job(pyzerproc.discover)
return len(devices) > 0
except pyzerproc.ZerprocException:
_LOGGER.error("Unable to discover nearby Zerproc devices", exc_info=True)
return False
config_entry_flow.register_discovery_flow(
DOMAIN, "Zerproc", _async_has_devices, config_entries.CONN_CLASS_LOCAL_POLL
)
|
from pyinsteon.groups import (
CO_SENSOR,
DOOR_SENSOR,
HEARTBEAT,
LEAK_SENSOR_WET,
LIGHT_SENSOR,
LOW_BATTERY,
MOTION_SENSOR,
OPEN_CLOSE_SENSOR,
SENSOR_MALFUNCTION,
SMOKE_SENSOR,
TEST_SENSOR,
)
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_BATTERY,
DEVICE_CLASS_DOOR,
DEVICE_CLASS_GAS,
DEVICE_CLASS_LIGHT,
DEVICE_CLASS_MOISTURE,
DEVICE_CLASS_MOTION,
DEVICE_CLASS_OPENING,
DEVICE_CLASS_PROBLEM,
DEVICE_CLASS_SAFETY,
DEVICE_CLASS_SMOKE,
DOMAIN as BINARY_SENSOR_DOMAIN,
BinarySensorEntity,
)
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import SIGNAL_ADD_ENTITIES
from .insteon_entity import InsteonEntity
from .utils import async_add_insteon_entities
SENSOR_TYPES = {
OPEN_CLOSE_SENSOR: DEVICE_CLASS_OPENING,
MOTION_SENSOR: DEVICE_CLASS_MOTION,
DOOR_SENSOR: DEVICE_CLASS_DOOR,
LEAK_SENSOR_WET: DEVICE_CLASS_MOISTURE,
LIGHT_SENSOR: DEVICE_CLASS_LIGHT,
LOW_BATTERY: DEVICE_CLASS_BATTERY,
CO_SENSOR: DEVICE_CLASS_GAS,
SMOKE_SENSOR: DEVICE_CLASS_SMOKE,
TEST_SENSOR: DEVICE_CLASS_SAFETY,
SENSOR_MALFUNCTION: DEVICE_CLASS_PROBLEM,
HEARTBEAT: DEVICE_CLASS_PROBLEM,
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Insteon binary sensors from a config entry."""
def add_entities(discovery_info=None):
"""Add the Insteon entities for the platform."""
async_add_insteon_entities(
hass,
BINARY_SENSOR_DOMAIN,
InsteonBinarySensorEntity,
async_add_entities,
discovery_info,
)
signal = f"{SIGNAL_ADD_ENTITIES}_{BINARY_SENSOR_DOMAIN}"
async_dispatcher_connect(hass, signal, add_entities)
add_entities()
class InsteonBinarySensorEntity(InsteonEntity, BinarySensorEntity):
"""A Class for an Insteon binary sensor entity."""
def __init__(self, device, group):
"""Initialize the INSTEON binary sensor."""
super().__init__(device, group)
self._sensor_type = SENSOR_TYPES.get(self._insteon_device_group.name)
@property
def device_class(self):
"""Return the class of this sensor."""
return self._sensor_type
@property
def is_on(self):
"""Return the boolean response if the node is on."""
return bool(self._insteon_device_group.value)
|
import os
import xml.dom.minidom as dom
import roslib.exceptions
# stack.xml and manifest.xml have the same internal tags right now
REQUIRED = ['author', 'license']
ALLOWXHTML = ['description']
OPTIONAL = ['logo', 'url', 'brief', 'description', 'status',
'notes', 'depend', 'rosdep', 'export', 'review',
'versioncontrol', 'platform', 'version', 'rosbuild2',
'catkin']
VALID = REQUIRED + OPTIONAL
class ManifestException(roslib.exceptions.ROSLibException):
pass
def get_nodes_by_name(n, name):
return [t for t in n.childNodes if t.nodeType == t.ELEMENT_NODE and t.tagName == name]
def check_optional(name, allowXHTML=False, merge_multiple=False):
"""
Validator for optional elements.
@raise ManifestException: if validation fails
"""
def check(n, filename):
n = get_nodes_by_name(n, name)
if len(n) > 1 and not merge_multiple:
raise ManifestException("Invalid manifest file: must have a single '%s' element" % name)
if n:
values = []
for child in n:
if allowXHTML:
values.append(''.join([x.toxml() for x in child.childNodes]))
else:
values.append(_get_text(child.childNodes).strip())
return ', '.join(values)
return check
def check_required(name, allowXHTML=False, merge_multiple=False):
"""
Validator for required elements.
@raise ManifestException: if validation fails
"""
def check(n, filename):
n = get_nodes_by_name(n, name)
if not n:
# print >> sys.stderr, "Invalid manifest file[%s]: missing required '%s' element"%(filename, name)
return ''
if len(n) != 1 and not merge_multiple:
raise ManifestException("Invalid manifest file: must have only one '%s' element" % name)
values = []
for child in n:
if allowXHTML:
values.append(''.join([x.toxml() for x in child.childNodes]))
else:
values.append(_get_text(child.childNodes).strip())
return ', '.join(values)
return check
def check_platform(name):
"""
Validator for manifest platform.
@raise ManifestException: if validation fails
"""
def check(n, filename):
platforms = get_nodes_by_name(n, name)
try:
vals = [(p.attributes['os'].value, p.attributes['version'].value, p.getAttribute('notes')) for p in platforms]
except KeyError as e:
raise ManifestException("<platform> tag is missing required '%s' attribute" % str(e))
return [Platform(*v) for v in vals]
return check
def check_depends(name):
"""
Validator for manifest depends.
@raise ManifestException: if validation fails
"""
def check(n, filename):
nodes = get_nodes_by_name(n, name)
# TDS 20110419: this is a hack.
# rosbuild2 has a <depend thirdparty="depname"/> tag,
# which is confusing this subroutine with
# KeyError: 'package'
# for now, explicitly don't consider thirdparty depends
depends = [e.attributes for e in nodes if 'thirdparty' not in e.attributes.keys()]
try:
packages = [d['package'].value for d in depends]
except KeyError:
raise ManifestException("Invalid manifest file: depends is missing 'package' attribute")
return [Depend(p) for p in packages]
return check
def check_stack_depends(name):
"""
Validator for stack depends.
@raise ManifestException: if validation fails
"""
def check(n, filename):
nodes = get_nodes_by_name(n, name)
depends = [e.attributes for e in nodes]
packages = [d['stack'].value for d in depends]
return [StackDepend(p) for p in packages]
return check
def check_rosdeps(name):
"""
Validator for stack rosdeps.
@raise ManifestException: if validation fails
"""
def check(n, filename):
nodes = get_nodes_by_name(n, name)
rosdeps = [e.attributes for e in nodes]
names = [d['name'].value for d in rosdeps]
return [ROSDep(n) for n in names]
return check
def _attrs(node):
attrs = {}
for k in node.attributes.keys():
attrs[k] = node.attributes.get(k).value
return attrs
def check_exports(name):
def check(n, filename):
ret_val = []
for e in get_nodes_by_name(n, name):
elements = [c for c in e.childNodes if c.nodeType == c.ELEMENT_NODE]
ret_val.extend([Export(t.tagName, _attrs(t), _get_text(t.childNodes)) for t in elements])
return ret_val
return check
def check_versioncontrol(name):
def check(n, filename):
e = get_nodes_by_name(n, name)
if not e:
return None
# note: 'url' isn't actually required, but as we only support type=svn it implicitly is for now
return VersionControl(e[0].attributes['type'].value, e[0].attributes['url'].value)
return check
def check(name, merge_multiple=False):
if name == 'depend':
return check_depends('depend')
elif name == 'export':
return check_exports('export')
elif name == 'versioncontrol':
return check_versioncontrol('versioncontrol')
elif name == 'rosdep':
return check_rosdeps('rosdep')
elif name == 'platform':
return check_platform('platform')
elif name in REQUIRED:
if name in ALLOWXHTML:
return check_required(name, True, merge_multiple)
return check_required(name, merge_multiple=merge_multiple)
elif name in OPTIONAL:
if name in ALLOWXHTML:
return check_optional(name, True, merge_multiple)
return check_optional(name, merge_multiple=merge_multiple)
class Export(object):
"""
Manifest 'export' tag
"""
def __init__(self, tag, attrs, str):
"""
Create new export instance.
@param tag: name of the XML tag
@type tag: str
@param attrs: dictionary of XML attributes for this export tag
@type attrs: dict
@param str: string value contained by tag, if any
@type str: str
"""
self.tag = tag
self.attrs = attrs
self.str = str
def get(self, attr):
"""
@return: value of attribute or None if attribute not set
@rtype: str
"""
return self.attrs.get(attr, None)
def xml(self):
"""
@return: export instance represented as manifest XML
@rtype: str
"""
attrs = ' '.join([' %s="%s"' % (k, v) for k, v in self.attrs.items()]) # py3k
if self.str:
return '<%s%s>%s</%s>' % (self.tag, attrs, self.str, self.tag)
else:
return '<%s%s />' % (self.tag, attrs)
class Platform(object):
"""
Manifest 'platform' tag
"""
__slots__ = ['os', 'version', 'notes']
def __init__(self, os, version, notes=None):
"""
Create new depend instance.
@param os: OS name. must be non-empty
@type os: str
@param version: OS version. must be non-empty
@type version: str
@param notes: (optional) notes about platform support
@type notes: str
"""
if not os:
raise ValueError("bad 'os' attribute")
if not version:
raise ValueError("bad 'version' attribute")
self.os = os
self.version = version
self.notes = notes
def __str__(self):
return '%s %s' % (self.os, self.version)
def __repr__(self):
return '%s %s' % (self.os, self.version)
def __eq__(self, obj):
"""
Override equality test. notes *are* considered in the equality test.
"""
if not isinstance(obj, Platform):
return False
return self.os == obj.os and self.version == obj.version and self.notes == obj.notes
def xml(self):
"""
@return: instance represented as manifest XML
@rtype: str
"""
if self.notes is not None:
return '<platform os="%s" version="%s" notes="%s"/>' % (self.os, self.version, self.notes)
else:
return '<platform os="%s" version="%s"/>' % (self.os, self.version)
class Depend(object):
"""
Manifest 'depend' tag
"""
__slots__ = ['package']
def __init__(self, package):
"""
Create new depend instance.
@param package: package name. must be non-empty
@type package: str
"""
if not package:
raise ValueError("bad 'package' attribute")
self.package = package
def __str__(self):
return self.package
def __repr__(self):
return self.package
def __eq__(self, obj):
if not isinstance(obj, Depend):
return False
return self.package == obj.package
def xml(self):
"""
@return: depend instance represented as manifest XML
@rtype: str
"""
return '<depend package="%s" />' % self.package
class StackDepend(object):
"""
Stack Manifest 'depend' tag
"""
__slots__ = ['stack', 'annotation']
def __init__(self, stack):
"""
@param stack: stack name. must be non-empty
@type stack: str
"""
if not stack:
raise ValueError("bad 'stack' attribute")
self.stack = stack
self.annotation = None
def __str__(self):
return self.stack
def __repr__(self):
return self.stack
def __eq__(self, obj):
if not isinstance(obj, StackDepend):
return False
return self.stack == obj.stack
def xml(self):
"""
@return: stack depend instance represented as stack manifest XML
@rtype: str
"""
if self.annotation:
return '<depend stack="%s" /> <!-- %s -->' % (self.stack, self.annotation)
else:
return '<depend stack="%s" />' % self.stack
class ROSDep(object):
"""
Manifest 'rosdep' tag
"""
__slots__ = ['name', ]
def __init__(self, name):
"""
Create new rosdep instance.
@param name: dependency name. Must be non-empty.
@type name: str
"""
if not name:
raise ValueError("bad 'name' attribute")
self.name = name
def xml(self):
"""
@return: rosdep instance represented as manifest XML
@rtype: str
"""
return '<rosdep name="%s" />' % self.name
class VersionControl(object):
"""
Manifest 'versioncontrol' tag
"""
__slots__ = ['type', 'url']
def __init__(self, type_, url):
"""
@param type_: version control type (e.g. 'svn'). must be non empty
@type type_: str
@param url: URL associated with version control. must be non empty
@type url: str
"""
def is_string_type(obj):
try:
return isinstance(obj, basestring)
except NameError:
return isinstance(obj, str)
if not type_ or not is_string_type(type_):
raise ValueError("bad 'type' attribute")
if url is not None and not is_string_type(url):
raise ValueError("bad 'url' attribute")
self.type = type_
self.url = url
def xml(self):
"""
@return: versioncontrol instance represented as manifest XML
@rtype: str
"""
if self.url:
return '<versioncontrol type="%s" url="%s" />' % (self.type, self.url)
else:
return '<versioncontrol type="%s" />' % self.type
class _Manifest(object):
"""
Object representation of a ROS manifest file
"""
__slots__ = ['description', 'brief',
'author', 'license', 'license_url', 'url',
'depends', 'rosdeps', 'platforms',
'logo', 'exports', 'version',
'versioncontrol', 'status', 'notes',
'unknown_tags',
'_type']
def __init__(self, _type='package'):
self.description = self.brief = self.author = \
self.license = self.license_url = \
self.url = self.logo = self.status = \
self.version = self.notes = ''
self.depends = []
self.rosdeps = []
self.exports = []
self.platforms = []
self._type = _type
# store unrecognized tags during parsing
self.unknown_tags = []
def __str__(self):
return self.xml()
def get_export(self, tag, attr):
"""
@return: exports that match the specified tag and attribute, e.g. 'python', 'path'
@rtype: [L{Export}]
"""
return [e.get(attr) for e in self.exports if e.tag == tag if e.get(attr) is not None]
def xml(self):
"""
@return: Manifest instance as ROS XML manifest
@rtype: str
"""
if not self.brief:
desc = ' <description>%s</description>' % self.description
else:
desc = ' <description brief="%s">%s</description>' % (self.brief, self.description)
author = ' <author>%s</author>' % self.author
if self.license_url:
license = ' <license url="%s">%s</license>' % (self.license_url, self.license)
else:
license = ' <license>%s</license>' % self.license
versioncontrol = url = logo = exports = version = ''
if self.url:
url = ' <url>%s</url>' % self.url
if self.version:
version = ' <version>%s</version>' % self.version
if self.logo:
logo = ' <logo>%s</logo>' % self.logo
depends = '\n'.join([' %s' % d.xml() for d in self.depends])
rosdeps = '\n'.join([' %s' % rd.xml() for rd in self.rosdeps])
platforms = '\n'.join([' %s' % p.xml() for p in self.platforms])
if self.exports:
exports = ' <export>\n' + '\n'.join([' %s' % e.xml() for e in self.exports]) + ' </export>'
if self.versioncontrol:
versioncontrol = ' %s' % self.versioncontrol.xml()
if self.status or self.notes:
review = ' <review status="%s" notes="%s" />' % (self.status, self.notes)
fields = filter(lambda x: x,
[desc, author, license, review, url, logo, depends,
rosdeps, platforms, exports, versioncontrol, version])
return '<%s>\n' % self._type + '\n'.join(fields) + '\n</%s>' % self._type
def _get_text(nodes):
"""
DOM utility routine for getting contents of text nodes
"""
return ''.join([n.data for n in nodes if n.nodeType == n.TEXT_NODE])
def parse_file(m, file):
"""
Parse manifest file (package, stack)
@param m: field to populate
@type m: L{_Manifest}
@param file: manifest.xml file path
@type file: str
@return: return m, populated with parsed fields
@rtype: L{_Manifest}
"""
if not file:
raise ValueError('Missing manifest file argument')
if not os.path.isfile(file):
raise ValueError('Invalid/non-existent manifest file: %s' % file)
with open(file, 'r') as f:
text = f.read()
try:
return parse(m, text, file)
except ManifestException as e:
raise ManifestException('Invalid manifest file [%s]: %s' % (os.path.abspath(file), e))
def parse(m, string, filename='string'):
"""
Parse manifest.xml string contents
@param string: manifest.xml contents
@type string: str
@param m: field to populate
@type m: L{_Manifest}
@return: return m, populated with parsed fields
@rtype: L{_Manifest}
"""
try:
d = dom.parseString(string)
except Exception as e:
raise ManifestException('invalid XML: %s' % e)
p = get_nodes_by_name(d, m._type)
if len(p) != 1:
raise ManifestException("manifest must have a single '%s' element" % m._type)
p = p[0]
m.description = check('description')(p, filename)
m.brief = ''
try:
tag = get_nodes_by_name(p, 'description')[0]
m.brief = tag.getAttribute('brief') or ''
except Exception:
# means that 'description' tag is missing
pass
# TODO: figure out how to multiplex
if m._type == 'package':
m.depends = check_depends('depend')(p, filename)
elif m._type == 'stack':
m.depends = check_stack_depends('depend')(p, filename)
elif m._type == 'app':
# not implemented yet
pass
m.rosdeps = check('rosdep')(p, filename)
m.platforms = check('platform')(p, filename)
m.exports = check('export')(p, filename)
m.versioncontrol = check('versioncontrol')(p, filename)
m.license = check('license')(p, filename)
m.license_url = ''
try:
tag = get_nodes_by_name(p, 'license')[0]
m.license_url = tag.getAttribute('url') or ''
except Exception:
pass # manifest is missing required 'license' tag
m.status = 'unreviewed'
try:
tag = get_nodes_by_name(p, 'review')[0]
m.status = tag.getAttribute('status') or ''
except Exception:
pass # manifest is missing optional 'review status' tag
m.notes = ''
try:
tag = get_nodes_by_name(p, 'review')[0]
m.notes = tag.getAttribute('notes') or ''
except Exception:
pass # manifest is missing optional 'review notes' tag
m.author = check('author', True)(p, filename)
m.url = check('url')(p, filename)
m.version = check('version')(p, filename)
m.logo = check('logo')(p, filename)
# do some validation on what we just parsed
if m._type == 'stack':
if m.exports:
raise ManifestException('stack manifests are not allowed to have exports')
if m.rosdeps:
raise ManifestException('stack manifests are not allowed to have rosdeps')
# store unrecognized tags
m.unknown_tags = [e for e in p.childNodes if e.nodeType == e.ELEMENT_NODE and e.tagName not in VALID]
return m
|
import pyfnip
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
PLATFORM_SCHEMA,
SUPPORT_BRIGHTNESS,
LightEntity,
)
from homeassistant.const import CONF_DEVICES, CONF_HOST, CONF_NAME, CONF_PORT
import homeassistant.helpers.config_validation as cv
CONF_DRIVER = "driver"
CONF_DRIVER_FNIP6X10AD = "FNIP6x10ad"
CONF_DRIVER_FNIP8X10A = "FNIP8x10a"
CONF_DRIVER_TYPES = [CONF_DRIVER_FNIP6X10AD, CONF_DRIVER_FNIP8X10A]
DEVICE_SCHEMA = vol.Schema(
{
vol.Required(CONF_NAME): cv.string,
vol.Optional("dimmable", default=False): cv.boolean,
}
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_DRIVER): vol.In(CONF_DRIVER_TYPES),
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PORT): cv.port,
vol.Required(CONF_DEVICES): {cv.string: DEVICE_SCHEMA},
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the light platform for each FutureNow unit."""
lights = []
for channel, device_config in config[CONF_DEVICES].items():
device = {}
device["name"] = device_config[CONF_NAME]
device["dimmable"] = device_config["dimmable"]
device["channel"] = channel
device["driver"] = config[CONF_DRIVER]
device["host"] = config[CONF_HOST]
device["port"] = config[CONF_PORT]
lights.append(FutureNowLight(device))
add_entities(lights, True)
def to_futurenow_level(level):
"""Convert the given Home Assistant light level (0-255) to FutureNow (0-100)."""
return int((level * 100) / 255)
def to_hass_level(level):
"""Convert the given FutureNow (0-100) light level to Home Assistant (0-255)."""
return int((level * 255) / 100)
class FutureNowLight(LightEntity):
"""Representation of an FutureNow light."""
def __init__(self, device):
"""Initialize the light."""
self._name = device["name"]
self._dimmable = device["dimmable"]
self._channel = device["channel"]
self._brightness = None
self._last_brightness = 255
self._state = None
if device["driver"] == CONF_DRIVER_FNIP6X10AD:
self._light = pyfnip.FNIP6x2adOutput(
device["host"], device["port"], self._channel
)
if device["driver"] == CONF_DRIVER_FNIP8X10A:
self._light = pyfnip.FNIP8x10aOutput(
device["host"], device["port"], self._channel
)
@property
def name(self):
"""Return the name of the device if any."""
return self._name
@property
def is_on(self):
"""Return true if device is on."""
return self._state
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return self._brightness
@property
def supported_features(self):
"""Flag supported features."""
if self._dimmable:
return SUPPORT_BRIGHTNESS
return 0
def turn_on(self, **kwargs):
"""Turn the light on."""
if self._dimmable:
level = kwargs.get(ATTR_BRIGHTNESS, self._last_brightness)
else:
level = 255
self._light.turn_on(to_futurenow_level(level))
def turn_off(self, **kwargs):
"""Turn the light off."""
self._light.turn_off()
if self._brightness:
self._last_brightness = self._brightness
def update(self):
"""Fetch new state data for this light."""
state = int(self._light.is_on())
self._state = bool(state)
self._brightness = to_hass_level(state)
|
import io
import logging
import os.path
import urllib.parse
try:
import requests
except ImportError:
MISSING_DEPS = True
from smart_open import bytebuffer, constants
import smart_open.utils
DEFAULT_BUFFER_SIZE = 128 * 1024
SCHEMES = ('http', 'https')
logger = logging.getLogger(__name__)
_HEADERS = {'Accept-Encoding': 'identity'}
"""The headers we send to the server with every HTTP request.
For now, we ask the server to send us the files as they are.
Sometimes, servers compress the file for more efficient transfer, in which case
the client (us) has to decompress them with the appropriate algorithm.
"""
def parse_uri(uri_as_string):
split_uri = urllib.parse.urlsplit(uri_as_string)
assert split_uri.scheme in SCHEMES
uri_path = split_uri.netloc + split_uri.path
uri_path = "/" + uri_path.lstrip("/")
return dict(scheme=split_uri.scheme, uri_path=uri_path)
def open_uri(uri, mode, transport_params):
kwargs = smart_open.utils.check_kwargs(open, transport_params)
return open(uri, mode, **kwargs)
def open(uri, mode, kerberos=False, user=None, password=None, headers=None):
"""Implement streamed reader from a web site.
Supports Kerberos and Basic HTTP authentication.
Parameters
----------
url: str
The URL to open.
mode: str
The mode to open using.
kerberos: boolean, optional
If True, will attempt to use the local Kerberos credentials
user: str, optional
The username for authenticating over HTTP
password: str, optional
The password for authenticating over HTTP
headers: dict, optional
Any headers to send in the request. If ``None``, the default headers are sent:
``{'Accept-Encoding': 'identity'}``. To use no headers at all,
set this variable to an empty dict, ``{}``.
Note
----
If neither kerberos or (user, password) are set, will connect
unauthenticated, unless set separately in headers.
"""
if mode == constants.READ_BINARY:
fobj = SeekableBufferedInputBase(
uri, mode, kerberos=kerberos,
user=user, password=password, headers=headers
)
fobj.name = os.path.basename(urllib.parse.urlparse(uri).path)
return fobj
else:
raise NotImplementedError('http support for mode %r not implemented' % mode)
class BufferedInputBase(io.BufferedIOBase):
def __init__(self, url, mode='r', buffer_size=DEFAULT_BUFFER_SIZE,
kerberos=False, user=None, password=None, headers=None):
if kerberos:
import requests_kerberos
auth = requests_kerberos.HTTPKerberosAuth()
elif user is not None and password is not None:
auth = (user, password)
else:
auth = None
self.buffer_size = buffer_size
self.mode = mode
if headers is None:
self.headers = _HEADERS.copy()
else:
self.headers = headers
self.response = requests.get(url, auth=auth, stream=True, headers=self.headers)
if not self.response.ok:
self.response.raise_for_status()
self._read_iter = self.response.iter_content(self.buffer_size)
self._read_buffer = bytebuffer.ByteBuffer(buffer_size)
self._current_pos = 0
#
# This member is part of the io.BufferedIOBase interface.
#
self.raw = None
#
# Override some methods from io.IOBase.
#
def close(self):
"""Flush and close this stream."""
logger.debug("close: called")
self.response = None
self._read_iter = None
def readable(self):
"""Return True if the stream can be read from."""
return True
def seekable(self):
return False
#
# io.BufferedIOBase methods.
#
def detach(self):
"""Unsupported."""
raise io.UnsupportedOperation
def read(self, size=-1):
"""
Mimics the read call to a filehandle object.
"""
logger.debug("reading with size: %d", size)
if self.response is None:
return b''
if size == 0:
return b''
elif size < 0 and len(self._read_buffer) == 0:
retval = self.response.raw.read()
elif size < 0:
retval = self._read_buffer.read() + self.response.raw.read()
else:
while len(self._read_buffer) < size:
logger.debug(
"http reading more content at current_pos: %d with size: %d",
self._current_pos, size,
)
bytes_read = self._read_buffer.fill(self._read_iter)
if bytes_read == 0:
# Oops, ran out of data early.
retval = self._read_buffer.read()
self._current_pos += len(retval)
return retval
# If we got here, it means we have enough data in the buffer
# to return to the caller.
retval = self._read_buffer.read(size)
self._current_pos += len(retval)
return retval
def read1(self, size=-1):
"""This is the same as read()."""
return self.read(size=size)
def readinto(self, b):
"""Read up to len(b) bytes into b, and return the number of bytes
read."""
data = self.read(len(b))
if not data:
return 0
b[:len(data)] = data
return len(data)
class SeekableBufferedInputBase(BufferedInputBase):
"""
Implement seekable streamed reader from a web site.
Supports Kerberos and Basic HTTP authentication.
"""
def __init__(self, url, mode='r', buffer_size=DEFAULT_BUFFER_SIZE,
kerberos=False, user=None, password=None, headers=None):
"""
If Kerberos is True, will attempt to use the local Kerberos credentials.
Otherwise, will try to use "basic" HTTP authentication via username/password.
If none of those are set, will connect unauthenticated.
"""
self.url = url
if kerberos:
import requests_kerberos
self.auth = requests_kerberos.HTTPKerberosAuth()
elif user is not None and password is not None:
self.auth = (user, password)
else:
self.auth = None
if headers is None:
self.headers = _HEADERS.copy()
else:
self.headers = headers
self.buffer_size = buffer_size
self.mode = mode
self.response = self._partial_request()
if not self.response.ok:
self.response.raise_for_status()
logger.debug('self.response: %r, raw: %r', self.response, self.response.raw)
self._seekable = True
self.content_length = int(self.response.headers.get("Content-Length", -1))
if self.content_length < 0:
self._seekable = False
if self.response.headers.get("Accept-Ranges", "none").lower() != "bytes":
self._seekable = False
self._read_iter = self.response.iter_content(self.buffer_size)
self._read_buffer = bytebuffer.ByteBuffer(buffer_size)
self._current_pos = 0
#
# This member is part of the io.BufferedIOBase interface.
#
self.raw = None
def seek(self, offset, whence=0):
"""Seek to the specified position.
:param int offset: The offset in bytes.
:param int whence: Where the offset is from.
Returns the position after seeking."""
logger.debug('seeking to offset: %r whence: %r', offset, whence)
if whence not in constants.WHENCE_CHOICES:
raise ValueError('invalid whence, expected one of %r' % constants.WHENCE_CHOICES)
if not self.seekable():
raise OSError
if whence == constants.WHENCE_START:
new_pos = offset
elif whence == constants.WHENCE_CURRENT:
new_pos = self._current_pos + offset
elif whence == constants.WHENCE_END:
new_pos = self.content_length + offset
new_pos = smart_open.utils.clamp(new_pos, 0, self.content_length)
if self._current_pos == new_pos:
return self._current_pos
logger.debug("http seeking from current_pos: %d to new_pos: %d", self._current_pos, new_pos)
self._current_pos = new_pos
if new_pos == self.content_length:
self.response = None
self._read_iter = None
self._read_buffer.empty()
else:
response = self._partial_request(new_pos)
if response.ok:
self.response = response
self._read_iter = self.response.iter_content(self.buffer_size)
self._read_buffer.empty()
else:
self.response = None
return self._current_pos
def tell(self):
return self._current_pos
def seekable(self, *args, **kwargs):
return self._seekable
def truncate(self, size=None):
"""Unsupported."""
raise io.UnsupportedOperation
def _partial_request(self, start_pos=None):
if start_pos is not None:
self.headers.update({"range": smart_open.utils.make_range_string(start_pos)})
response = requests.get(self.url, auth=self.auth, stream=True, headers=self.headers)
return response
|
import pytest
import xarray
from xarray import concat, merge
from xarray.backends.file_manager import FILE_CACHE
from xarray.core.options import OPTIONS, _get_keep_attrs
from xarray.tests.test_dataset import create_test_data
def test_invalid_option_raises():
with pytest.raises(ValueError):
xarray.set_options(not_a_valid_options=True)
def test_display_width():
with pytest.raises(ValueError):
xarray.set_options(display_width=0)
with pytest.raises(ValueError):
xarray.set_options(display_width=-10)
with pytest.raises(ValueError):
xarray.set_options(display_width=3.5)
def test_arithmetic_join():
with pytest.raises(ValueError):
xarray.set_options(arithmetic_join="invalid")
with xarray.set_options(arithmetic_join="exact"):
assert OPTIONS["arithmetic_join"] == "exact"
def test_enable_cftimeindex():
with pytest.raises(ValueError):
xarray.set_options(enable_cftimeindex=None)
with pytest.warns(FutureWarning, match="no-op"):
with xarray.set_options(enable_cftimeindex=True):
assert OPTIONS["enable_cftimeindex"]
def test_file_cache_maxsize():
with pytest.raises(ValueError):
xarray.set_options(file_cache_maxsize=0)
original_size = FILE_CACHE.maxsize
with xarray.set_options(file_cache_maxsize=123):
assert FILE_CACHE.maxsize == 123
assert FILE_CACHE.maxsize == original_size
def test_keep_attrs():
with pytest.raises(ValueError):
xarray.set_options(keep_attrs="invalid_str")
with xarray.set_options(keep_attrs=True):
assert OPTIONS["keep_attrs"]
with xarray.set_options(keep_attrs=False):
assert not OPTIONS["keep_attrs"]
with xarray.set_options(keep_attrs="default"):
assert _get_keep_attrs(default=True)
assert not _get_keep_attrs(default=False)
def test_nested_options():
original = OPTIONS["display_width"]
with xarray.set_options(display_width=1):
assert OPTIONS["display_width"] == 1
with xarray.set_options(display_width=2):
assert OPTIONS["display_width"] == 2
assert OPTIONS["display_width"] == 1
assert OPTIONS["display_width"] == original
def test_display_style():
original = "html"
assert OPTIONS["display_style"] == original
with pytest.raises(ValueError):
xarray.set_options(display_style="invalid_str")
with xarray.set_options(display_style="text"):
assert OPTIONS["display_style"] == "text"
assert OPTIONS["display_style"] == original
def create_test_dataset_attrs(seed=0):
ds = create_test_data(seed)
ds.attrs = {"attr1": 5, "attr2": "history", "attr3": {"nested": "more_info"}}
return ds
def create_test_dataarray_attrs(seed=0, var="var1"):
da = create_test_data(seed)[var]
da.attrs = {"attr1": 5, "attr2": "history", "attr3": {"nested": "more_info"}}
return da
class TestAttrRetention:
def test_dataset_attr_retention(self):
# Use .mean() for all tests: a typical reduction operation
ds = create_test_dataset_attrs()
original_attrs = ds.attrs
# Test default behaviour
result = ds.mean()
assert result.attrs == {}
with xarray.set_options(keep_attrs="default"):
result = ds.mean()
assert result.attrs == {}
with xarray.set_options(keep_attrs=True):
result = ds.mean()
assert result.attrs == original_attrs
with xarray.set_options(keep_attrs=False):
result = ds.mean()
assert result.attrs == {}
def test_dataarray_attr_retention(self):
# Use .mean() for all tests: a typical reduction operation
da = create_test_dataarray_attrs()
original_attrs = da.attrs
# Test default behaviour
result = da.mean()
assert result.attrs == {}
with xarray.set_options(keep_attrs="default"):
result = da.mean()
assert result.attrs == {}
with xarray.set_options(keep_attrs=True):
result = da.mean()
assert result.attrs == original_attrs
with xarray.set_options(keep_attrs=False):
result = da.mean()
assert result.attrs == {}
def test_groupby_attr_retention(self):
da = xarray.DataArray([1, 2, 3], [("x", [1, 1, 2])])
da.attrs = {"attr1": 5, "attr2": "history", "attr3": {"nested": "more_info"}}
original_attrs = da.attrs
# Test default behaviour
result = da.groupby("x").sum(keep_attrs=True)
assert result.attrs == original_attrs
with xarray.set_options(keep_attrs="default"):
result = da.groupby("x").sum(keep_attrs=True)
assert result.attrs == original_attrs
with xarray.set_options(keep_attrs=True):
result1 = da.groupby("x")
result = result1.sum()
assert result.attrs == original_attrs
with xarray.set_options(keep_attrs=False):
result = da.groupby("x").sum()
assert result.attrs == {}
def test_concat_attr_retention(self):
ds1 = create_test_dataset_attrs()
ds2 = create_test_dataset_attrs()
ds2.attrs = {"wrong": "attributes"}
original_attrs = ds1.attrs
# Test default behaviour of keeping the attrs of the first
# dataset in the supplied list
# global keep_attrs option current doesn't affect concat
result = concat([ds1, ds2], dim="dim1")
assert result.attrs == original_attrs
@pytest.mark.xfail
def test_merge_attr_retention(self):
da1 = create_test_dataarray_attrs(var="var1")
da2 = create_test_dataarray_attrs(var="var2")
da2.attrs = {"wrong": "attributes"}
original_attrs = da1.attrs
# merge currently discards attrs, and the global keep_attrs
# option doesn't affect this
result = merge([da1, da2])
assert result.attrs == original_attrs
def test_display_style_text(self):
ds = create_test_dataset_attrs()
with xarray.set_options(display_style="text"):
text = ds._repr_html_()
assert text.startswith("<pre>")
assert "'nested'" in text
assert "<xarray.Dataset>" in text
def test_display_style_html(self):
ds = create_test_dataset_attrs()
with xarray.set_options(display_style="html"):
html = ds._repr_html_()
assert html.startswith("<div>")
assert "'nested'" in html
def test_display_dataarray_style_text(self):
da = create_test_dataarray_attrs()
with xarray.set_options(display_style="text"):
text = da._repr_html_()
assert text.startswith("<pre>")
assert "<xarray.DataArray 'var1'" in text
def test_display_dataarray_style_html(self):
da = create_test_dataarray_attrs()
with xarray.set_options(display_style="html"):
html = da._repr_html_()
assert html.startswith("<div>")
assert "#x27;nested'" in html
|
from pymelcloud import DEVICE_TYPE_ATA, DEVICE_TYPE_ATW
from pymelcloud.atw_device import Zone
from homeassistant.const import (
DEVICE_CLASS_TEMPERATURE,
ENERGY_KILO_WATT_HOUR,
TEMP_CELSIUS,
)
from homeassistant.helpers.entity import Entity
from . import MelCloudDevice
from .const import DOMAIN
ATTR_MEASUREMENT_NAME = "measurement_name"
ATTR_ICON = "icon"
ATTR_UNIT = "unit"
ATTR_DEVICE_CLASS = "device_class"
ATTR_VALUE_FN = "value_fn"
ATTR_ENABLED_FN = "enabled"
ATA_SENSORS = {
"room_temperature": {
ATTR_MEASUREMENT_NAME: "Room Temperature",
ATTR_ICON: "mdi:thermometer",
ATTR_UNIT: TEMP_CELSIUS,
ATTR_DEVICE_CLASS: DEVICE_CLASS_TEMPERATURE,
ATTR_VALUE_FN: lambda x: x.device.room_temperature,
ATTR_ENABLED_FN: lambda x: True,
},
"energy": {
ATTR_MEASUREMENT_NAME: "Energy",
ATTR_ICON: "mdi:factory",
ATTR_UNIT: ENERGY_KILO_WATT_HOUR,
ATTR_DEVICE_CLASS: None,
ATTR_VALUE_FN: lambda x: x.device.total_energy_consumed,
ATTR_ENABLED_FN: lambda x: x.device.has_energy_consumed_meter,
},
}
ATW_SENSORS = {
"outside_temperature": {
ATTR_MEASUREMENT_NAME: "Outside Temperature",
ATTR_ICON: "mdi:thermometer",
ATTR_UNIT: TEMP_CELSIUS,
ATTR_DEVICE_CLASS: DEVICE_CLASS_TEMPERATURE,
ATTR_VALUE_FN: lambda x: x.device.outside_temperature,
ATTR_ENABLED_FN: lambda x: True,
},
"tank_temperature": {
ATTR_MEASUREMENT_NAME: "Tank Temperature",
ATTR_ICON: "mdi:thermometer",
ATTR_UNIT: TEMP_CELSIUS,
ATTR_DEVICE_CLASS: DEVICE_CLASS_TEMPERATURE,
ATTR_VALUE_FN: lambda x: x.device.tank_temperature,
ATTR_ENABLED_FN: lambda x: True,
},
}
ATW_ZONE_SENSORS = {
"room_temperature": {
ATTR_MEASUREMENT_NAME: "Room Temperature",
ATTR_ICON: "mdi:thermometer",
ATTR_UNIT: TEMP_CELSIUS,
ATTR_DEVICE_CLASS: DEVICE_CLASS_TEMPERATURE,
ATTR_VALUE_FN: lambda zone: zone.room_temperature,
ATTR_ENABLED_FN: lambda x: True,
},
"flow_temperature": {
ATTR_MEASUREMENT_NAME: "Flow Temperature",
ATTR_ICON: "mdi:thermometer",
ATTR_UNIT: TEMP_CELSIUS,
ATTR_DEVICE_CLASS: DEVICE_CLASS_TEMPERATURE,
ATTR_VALUE_FN: lambda zone: zone.flow_temperature,
ATTR_ENABLED_FN: lambda x: True,
},
"return_temperature": {
ATTR_MEASUREMENT_NAME: "Flow Return Temperature",
ATTR_ICON: "mdi:thermometer",
ATTR_UNIT: TEMP_CELSIUS,
ATTR_DEVICE_CLASS: DEVICE_CLASS_TEMPERATURE,
ATTR_VALUE_FN: lambda zone: zone.return_temperature,
ATTR_ENABLED_FN: lambda x: True,
},
}
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up MELCloud device sensors based on config_entry."""
mel_devices = hass.data[DOMAIN].get(entry.entry_id)
async_add_entities(
[
MelDeviceSensor(mel_device, measurement, definition)
for measurement, definition in ATA_SENSORS.items()
for mel_device in mel_devices[DEVICE_TYPE_ATA]
if definition[ATTR_ENABLED_FN](mel_device)
]
+ [
MelDeviceSensor(mel_device, measurement, definition)
for measurement, definition in ATW_SENSORS.items()
for mel_device in mel_devices[DEVICE_TYPE_ATW]
if definition[ATTR_ENABLED_FN](mel_device)
]
+ [
AtwZoneSensor(mel_device, zone, measurement, definition)
for mel_device in mel_devices[DEVICE_TYPE_ATW]
for zone in mel_device.device.zones
for measurement, definition, in ATW_ZONE_SENSORS.items()
if definition[ATTR_ENABLED_FN](zone)
],
True,
)
class MelDeviceSensor(Entity):
"""Representation of a Sensor."""
def __init__(self, api: MelCloudDevice, measurement, definition):
"""Initialize the sensor."""
self._api = api
self._name_slug = api.name
self._measurement = measurement
self._def = definition
@property
def unique_id(self):
"""Return a unique ID."""
return f"{self._api.device.serial}-{self._api.device.mac}-{self._measurement}"
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return self._def[ATTR_ICON]
@property
def name(self):
"""Return the name of the sensor."""
return f"{self._name_slug} {self._def[ATTR_MEASUREMENT_NAME]}"
@property
def state(self):
"""Return the state of the sensor."""
return self._def[ATTR_VALUE_FN](self._api)
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self._def[ATTR_UNIT]
@property
def device_class(self):
"""Return device class."""
return self._def[ATTR_DEVICE_CLASS]
async def async_update(self):
"""Retrieve latest state."""
await self._api.async_update()
@property
def device_info(self):
"""Return a device description for device registry."""
return self._api.device_info
class AtwZoneSensor(MelDeviceSensor):
"""Air-to-Air device sensor."""
def __init__(self, api: MelCloudDevice, zone: Zone, measurement, definition):
"""Initialize the sensor."""
super().__init__(api, measurement, definition)
self._zone = zone
self._name_slug = f"{api.name} {zone.name}"
@property
def state(self):
"""Return zone based state."""
return self._def[ATTR_VALUE_FN](self._zone)
|
import logging
from pysignalclirestapi import SignalCliRestApi, SignalCliRestApiError
import voluptuous as vol
from homeassistant.components.notify import (
ATTR_DATA,
PLATFORM_SCHEMA,
BaseNotificationService,
)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_SENDER_NR = "number"
CONF_RECP_NR = "recipients"
CONF_SIGNAL_CLI_REST_API = "url"
ATTR_FILENAME = "attachment"
ATTR_FILENAMES = "attachments"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_SENDER_NR): cv.string,
vol.Required(CONF_SIGNAL_CLI_REST_API): cv.string,
vol.Required(CONF_RECP_NR): vol.All(cv.ensure_list, [cv.string]),
}
)
def get_service(hass, config, discovery_info=None):
"""Get the SignalMessenger notification service."""
sender_nr = config[CONF_SENDER_NR]
recp_nrs = config[CONF_RECP_NR]
signal_cli_rest_api_url = config[CONF_SIGNAL_CLI_REST_API]
signal_cli_rest_api = SignalCliRestApi(signal_cli_rest_api_url, sender_nr)
return SignalNotificationService(recp_nrs, signal_cli_rest_api)
class SignalNotificationService(BaseNotificationService):
"""Implement the notification service for SignalMessenger."""
def __init__(self, recp_nrs, signal_cli_rest_api):
"""Initialize the service."""
self._recp_nrs = recp_nrs
self._signal_cli_rest_api = signal_cli_rest_api
def send_message(self, message="", **kwargs):
"""Send a message to a one or more recipients.
Additionally a file can be attached.
"""
_LOGGER.debug("Sending signal message")
data = kwargs.get(ATTR_DATA)
filenames = None
if data is not None:
if ATTR_FILENAMES in data:
filenames = data[ATTR_FILENAMES]
if ATTR_FILENAME in data:
_LOGGER.warning(
"The 'attachment' option is deprecated, please replace it with 'attachments'. This option will become invalid in version 0.108"
)
if filenames is None:
filenames = [data[ATTR_FILENAME]]
else:
filenames.append(data[ATTR_FILENAME])
try:
self._signal_cli_rest_api.send_message(message, self._recp_nrs, filenames)
except SignalCliRestApiError as ex:
_LOGGER.error("%s", ex)
raise ex
|
from mlpatches import base
class Popen2Patch(base.ModulePatch):
"""the patch for the popen2 module."""
PY2 = True
PY3 = False
relpath = "popen2.py"
name = "popen2"
class SubprocessPatch(base.ModulePatch):
"""the patch for the subprocess module."""
PY2 = True
PY3 = False # uses unicode
relpath = "subprocess.py"
name = "subprocess"
# create instances
POPEN2PATCH = Popen2Patch()
SUBPROCESSPATCH = SubprocessPatch()
# name -> ModulePatch()
MODULE_PATCHES = {
"popen2": POPEN2PATCH,
"subprocess": SUBPROCESSPATCH,
}
|
from homeassistant.components.nws.const import DOMAIN
from homeassistant.components.weather import DOMAIN as WEATHER_DOMAIN
from tests.common import MockConfigEntry
from tests.components.nws.const import NWS_CONFIG
async def test_unload_entry(hass, mock_simple_nws):
"""Test that nws setup with config yaml."""
entry = MockConfigEntry(
domain=DOMAIN,
data=NWS_CONFIG,
)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids(WEATHER_DOMAIN)) == 1
assert DOMAIN in hass.data
assert len(hass.data[DOMAIN]) == 1
entries = hass.config_entries.async_entries(DOMAIN)
assert len(entries) == 1
assert await hass.config_entries.async_unload(entries[0].entry_id)
assert len(hass.states.async_entity_ids(WEATHER_DOMAIN)) == 0
assert DOMAIN not in hass.data
|
import os
from absl import flags
from perfkitbenchmarker import beam_benchmark_helper
from perfkitbenchmarker import dpb_service
from perfkitbenchmarker import errors
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.providers import gcp
flags.DEFINE_string('dpb_dataflow_staging_location', None,
'Google Cloud Storage bucket for Dataflow to stage the '
'binary and any temporary files. You must create this '
'bucket ahead of time, before running your pipeline.')
flags.DEFINE_string('dpb_dataflow_runner', 'DataflowRunner',
'Flag to specify the pipeline runner at runtime.')
flags.DEFINE_string('dpb_dataflow_sdk', None,
'SDK used to build the Dataflow executable.')
FLAGS = flags.FLAGS
GCP_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ'
DATAFLOW_WC_INPUT = 'gs://dataflow-samples/shakespeare/kinglear.txt'
class GcpDpbDataflow(dpb_service.BaseDpbService):
"""Object representing GCP Dataflow Service."""
CLOUD = gcp.CLOUD
SERVICE_TYPE = 'dataflow'
def __init__(self, dpb_service_spec):
super(GcpDpbDataflow, self).__init__(dpb_service_spec)
self.project = None
@staticmethod
def _GetStats(stdout):
"""
TODO(saksena): Hook up the metrics API of dataflow to retrieve performance
metrics when available
"""
pass
@staticmethod
def CheckPrerequisites(benchmark_config):
del benchmark_config # Unused
if not FLAGS.dpb_job_jarfile or not os.path.exists(FLAGS.dpb_job_jarfile):
raise errors.Config.InvalidValue('Job jar missing.')
if not FLAGS.dpb_dataflow_sdk:
raise errors.Config.InvalidValue('Dataflow SDK version missing.')
def Create(self):
"""See base class."""
pass
def Delete(self):
"""See base class."""
pass
# TODO(saksena): Make this actually follow the contract or better yet delete
# this class.
def SubmitJob(
self,
jarfile='',
classname=None,
job_poll_interval=None,
job_arguments=None,
job_stdout_file=None,
job_type=None):
"""See base class."""
if job_type == self.BEAM_JOB_TYPE:
full_cmd, base_dir = beam_benchmark_helper.BuildBeamCommand(
self.spec, classname, job_arguments)
stdout, _, retcode = vm_util.IssueCommand(full_cmd, cwd=base_dir,
timeout=FLAGS.beam_it_timeout,
raise_on_failure=False)
assert retcode == 0, "Integration Test Failed."
return
worker_machine_type = self.spec.worker_group.vm_spec.machine_type
num_workers = self.spec.worker_count
max_num_workers = self.spec.worker_count
if self.spec.worker_group.disk_spec and \
self.spec.worker_group.disk_spec.disk_size:
disk_size_gb = self.spec.worker_group.disk_spec.disk_size
elif self.spec.worker_group.vm_spec.boot_disk_size:
disk_size_gb = self.spec.worker_group.vm_spec.boot_disk_size
else:
disk_size_gb = None
cmd = []
# Needed to verify java executable is on the path
dataflow_executable = 'java'
if not vm_util.ExecutableOnPath(dataflow_executable):
raise errors.Setup.MissingExecutableError(
'Could not find required executable "%s"' % dataflow_executable)
cmd.append(dataflow_executable)
cmd.append('-cp')
cmd.append(jarfile)
cmd.append(classname)
cmd += job_arguments
cmd.append('--workerMachineType={}'.format(worker_machine_type))
cmd.append('--numWorkers={}'.format(num_workers))
cmd.append('--maxNumWorkers={}'.format(max_num_workers))
if disk_size_gb:
cmd.append('--diskSizeGb={}'.format(disk_size_gb))
cmd.append('--defaultWorkerLogLevel={}'.format(FLAGS.dpb_log_level))
stdout, _, _ = vm_util.IssueCommand(cmd)
def SetClusterProperty(self):
pass
def GetMetadata(self):
"""Return a dictionary of the metadata for this cluster."""
basic_data = super(GcpDpbDataflow, self).GetMetadata()
basic_data['dpb_dataflow_runner'] = FLAGS.dpb_dataflow_runner
basic_data['dpb_dataflow_sdk'] = FLAGS.dpb_dataflow_sdk
return basic_data
|
import io
import os
import pytest
import nikola.plugins.command.init
from nikola import __main__
from nikola.utils import makedirs
from .helper import append_config, cd
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
def get_last_folder_as_id(value):
"""Use the last part of the directories as test identifier."""
if isinstance(value, (tuple,)):
return value[-1]
return value
@pytest.mark.parametrize(
"dirs, expected_file",
[
(("pages",), "page0"),
(("pages", "subdir1"), "page1"),
(("pages", "subdir1"), "page2"),
(("pages", "subdir2"), "page3"),
(("pages", "subdir3"), "page4"),
],
ids=get_last_folder_as_id,
)
def test_page_index(build, output_dir, dirs, expected_file, output_path_func):
"""Test PAGE_INDEX - Do all files exist?"""
path_func = output_path_func
checkdir = os.path.join(output_dir, *dirs)
assert os.path.isfile(path_func(checkdir, expected_file))
@pytest.mark.parametrize(
"dirs, expected_index_file",
[
(("pages",), "index.html"),
(("pages", "subdir1"), "index.html"),
(("pages", "subdir2"), "index.html"),
(("pages", "subdir3"), "index.php"),
],
ids=get_last_folder_as_id,
)
def test_page_index_in_subdir(build, output_dir, dirs, expected_index_file):
"""Test PAGE_INDEX - Do index files in subdir exist?"""
checkdir = os.path.join(output_dir, *dirs)
assert os.path.isfile(os.path.join(checkdir, expected_index_file))
if expected_index_file == "index.php":
assert not os.path.isfile(os.path.join(checkdir, "index.html"))
@pytest.fixture(scope="module")
def output_path_func():
def output_path(dir, name):
"""Make a file path to the output."""
return os.path.join(dir, name + ".html")
return output_path
def test_page_index_content_in_pages(build, output_dir):
"""Do the indexes only contain the pages the should?"""
pages = os.path.join(output_dir, "pages")
with io.open(os.path.join(pages, "index.html"), "r", encoding="utf-8") as fh:
pages_index = fh.read()
assert "Page 0" in pages_index
assert "Page 1" not in pages_index
assert "Page 2" not in pages_index
assert "Page 3" not in pages_index
assert "Page 4" not in pages_index
assert "This is not the page index" not in pages_index
def test_page_index_content_in_subdir1(build, output_dir):
"""Do the indexes only contain the pages the should?"""
subdir1 = os.path.join(output_dir, "pages", "subdir1")
with io.open(os.path.join(subdir1, "index.html"), "r", encoding="utf-8") as fh:
subdir1_index = fh.read()
assert "Page 0" not in subdir1_index
assert "Page 1" in subdir1_index
assert "Page 2" in subdir1_index
assert "Page 3" not in subdir1_index
assert "Page 4" not in subdir1_index
assert "This is not the page index" not in subdir1_index
def test_page_index_content_in_subdir2(build, output_dir):
"""Do the indexes only contain the pages the should?"""
subdir2 = os.path.join(output_dir, "pages", "subdir2")
with io.open(os.path.join(subdir2, "index.html"), "r", encoding="utf-8") as fh:
subdir2_index = fh.read()
assert "Page 0" not in subdir2_index
assert "Page 1" not in subdir2_index
assert "Page 2" not in subdir2_index
assert "Page 3" not in subdir2_index
assert "Page 4" not in subdir2_index
assert "This is not the page index." in subdir2_index
def test_page_index_content_in_subdir3(build, output_dir):
"""Do the indexes only contain the pages the should?"""
subdir3 = os.path.join(output_dir, "pages", "subdir3")
with io.open(os.path.join(subdir3, "index.php"), "r", encoding="utf-8") as fh:
subdir3_index = fh.read()
assert "Page 0" not in subdir3_index
assert "Page 1" not in subdir3_index
assert "Page 2" not in subdir3_index
assert "Page 3" not in subdir3_index
assert "Page 4" not in subdir3_index
assert "This is not the page index either." in subdir3_index
@pytest.fixture(scope="module")
def build(target_dir):
"""Build the site."""
init_command = nikola.plugins.command.init.CommandInit()
init_command.create_empty_site(target_dir)
init_command.create_configuration(target_dir)
create_pages(target_dir)
append_config(
target_dir,
"""
PAGE_INDEX = True
PRETTY_URLS = False
PAGES = PAGES + (('pages/*.php', 'pages', 'page.tmpl'),)
""",
)
with cd(target_dir):
__main__.main(["build"])
def create_pages(target_dir):
pages = os.path.join(target_dir, "pages")
subdir1 = os.path.join(target_dir, "pages", "subdir1")
subdir2 = os.path.join(target_dir, "pages", "subdir2")
subdir3 = os.path.join(target_dir, "pages", "subdir3")
makedirs(subdir1)
makedirs(subdir2)
makedirs(subdir3)
with io.open(os.path.join(pages, "page0.txt"), "w+", encoding="utf8") as outf:
outf.write(
"""\
.. title: Page 0
.. slug: page0
This is page 0.
"""
)
with io.open(os.path.join(subdir1, "page1.txt"), "w+", encoding="utf8") as outf:
outf.write(
"""\
.. title: Page 1
.. slug: page1
This is page 1.
"""
)
with io.open(os.path.join(subdir1, "page2.txt"), "w+", encoding="utf8") as outf:
outf.write(
"""\
.. title: Page 2
.. slug: page2
This is page 2.
"""
)
with io.open(os.path.join(subdir2, "page3.txt"), "w+", encoding="utf8") as outf:
outf.write(
"""\
.. title: Page 3
.. slug: page3
This is page 3.
"""
)
with io.open(os.path.join(subdir2, "foo.txt"), "w+", encoding="utf8") as outf:
outf.write(
"""\
.. title: Not the page index
.. slug: index
This is not the page index.
"""
)
with io.open(os.path.join(subdir3, "page4.txt"), "w+", encoding="utf8") as outf:
outf.write(
"""\
.. title: Page 4
.. slug: page4
This is page 4.
"""
)
with io.open(os.path.join(subdir3, "bar.php"), "w+", encoding="utf8") as outf:
outf.write(
"""\
.. title: Still not the page index
.. slug: index
This is not the page index either.
"""
)
|
import time
from time import sleep
import random
import logging
from threading import Lock, local
from requests.exceptions import RequestException
from .common import *
logger = logging.getLogger(__name__)
class BackOffRequest(object):
"""Wrapper for requests that implements timed back-off algorithm
https://developer.amazon.com/public/apis/experience/cloud-drive/content/best-practices
Caution: this catches all connection errors and may stall for a long time.
It is necessary to init this module before use."""
def __init__(self, auth_callback: 'requests.auth.AuthBase', timeout: 'Tuple[int, int]', proxies: dict={}):
""":arg auth_callback: callable object that attaches auth info to a request
:arg timeout: tuple of connection timeout and idle timeout \
(http://docs.python-requests.org/en/latest/user/advanced/#timeouts)
:arg proxies: dict of protocol to proxy, \
see http://docs.python-requests.org/en/master/user/advanced/#proxies
"""
self.auth_callback = auth_callback
self.timeout = timeout if requests.__version__ >= '2.4.0' else timeout[1]
self.proxies = proxies
self.__session = requests.session()
self.__thr_local = local()
self.__lock = Lock()
self.__retries = 0
self.__next_req = time.time()
random.seed()
def _succeeded(self):
with self.__lock:
self.__retries = 0
self.__calc_next()
def _failed(self):
with self.__lock:
self.__retries += 1
self.__calc_next()
def __calc_next(self):
"""Calculates minimal acceptable time for next request.
Back-off time is in a range of seconds, depending on number of failed previous tries (r):
[0,2^r], maximum interval [0,256]"""
with self.__lock:
duration = random.random() * 2 ** min(self.__retries, 8)
self.__next_req = time.time() + duration
def _wait(self):
with self.__lock:
duration = self.__next_req - time.time()
if duration > 5:
logger.warning('Waiting %fs because of error(s).' % duration)
logger.debug('Retry %i, waiting %fs' % (self.__retries, duration))
if duration > 0:
sleep(duration)
@catch_conn_exception
def _request(self, type_: str, url: str, acc_codes: 'List[int]', **kwargs) -> requests.Response:
"""Performs a HTTP request
:param type_: the type of HTTP request to perform
:param acc_codes: list of HTTP status codes that indicate a successful request
:param kwargs: may include additional header: dict and timeout: int"""
self._wait()
headers = {}
if 'headers' in kwargs:
headers = dict(**(kwargs['headers']))
del kwargs['headers']
last_url = getattr(self.__thr_local, 'last_req_url', None)
if url == last_url:
logger.debug('%s "%s"' % (type_, url))
else:
logger.info('%s "%s"' % (type_, url))
if 'data' in kwargs.keys():
logger.debug(kwargs['data'])
self.__thr_local.last_req_url = url
if 'timeout' in kwargs:
timeout = kwargs['timeout']
del kwargs['timeout']
else:
timeout = self.timeout
r = None
exc = False
try:
try:
r = self.__session.request(type_, url, auth=self.auth_callback,
proxies=self.proxies, headers=headers, timeout=timeout,
**kwargs)
except RequestException as e:
r = e.request
raise
except:
exc = True
self._failed()
raise
finally:
if r and 'x-amzn-RequestId' in r.headers:
if (exc or r.status_code not in acc_codes):
logger.info('Failed x-amzn-RequestId: %s' % r.headers['x-amzn-RequestId'])
else:
logger.debug('x-amzn-RequestId: %s' % r.headers['x-amzn-RequestId'])
self._succeeded() if r.status_code in acc_codes else self._failed()
return r
# HTTP verbs
def get(self, url, acc_codes=OK_CODES, **kwargs) -> requests.Response:
return self._request('GET', url, acc_codes, **kwargs)
def post(self, url, acc_codes=OK_CODES, **kwargs) -> requests.Response:
return self._request('POST', url, acc_codes, **kwargs)
def patch(self, url, acc_codes=OK_CODES, **kwargs) -> requests.Response:
return self._request('PATCH', url, acc_codes, **kwargs)
def put(self, url, acc_codes=OK_CODES, **kwargs) -> requests.Response:
return self._request('PUT', url, acc_codes, **kwargs)
def delete(self, url, acc_codes=OK_CODES, **kwargs) -> requests.Response:
return self._request('DELETE', url, acc_codes, **kwargs)
def paginated_get(self, url: str, params: dict = None) -> 'List[dict]':
"""Gets node list in segments of 200."""
if params is None:
params = {}
node_list = []
while True:
r = self.get(url, params=params)
if r.status_code not in OK_CODES:
logger.error("Error getting node list.")
raise RequestError(r.status_code, r.text)
ret = r.json()
node_list.extend(ret['data'])
if 'nextToken' in ret.keys():
params['startToken'] = ret['nextToken']
else:
if ret['count'] != len(node_list):
logger.warning(
'Expected %i items in page, received %i.' % (ret['count'], len(node_list)))
break
return node_list
|
from smart_open import open
def read_bytes(url, limit):
bytes_ = []
with open(url, 'rb') as fin:
for i in range(limit):
bytes_.append(fin.read(1))
return bytes_
def test(benchmark):
#
# This file is around 850MB.
#
url = (
's3://commoncrawl/crawl-data/CC-MAIN-2019-51/segments/1575541319511.97'
'/warc/CC-MAIN-20191216093448-20191216121448-00559.warc.gz'
)
limit = 1000000
bytes_ = benchmark(read_bytes, url, limit)
assert len(bytes_) == limit
|
import pytest
from redbot.pytest.economy import *
@pytest.mark.asyncio
async def test_bank_register(bank, ctx):
default_bal = await bank.get_default_balance(ctx.guild)
assert default_bal == (await bank.get_account(ctx.author)).balance
async def has_account(member, bank):
balance = await bank.get_balance(member)
if balance == 0:
balance = 1
await bank.set_balance(member, balance)
@pytest.mark.asyncio
async def test_bank_transfer(bank, member_factory):
mbr1 = member_factory.get()
mbr2 = member_factory.get()
bal1 = (await bank.get_account(mbr1)).balance
bal2 = (await bank.get_account(mbr2)).balance
await bank.transfer_credits(mbr1, mbr2, 50)
newbal1 = (await bank.get_account(mbr1)).balance
newbal2 = (await bank.get_account(mbr2)).balance
assert bal1 - 50 == newbal1
assert bal2 + 50 == newbal2
@pytest.mark.asyncio
async def test_bank_set(bank, member_factory):
mbr = member_factory.get()
await bank.set_balance(mbr, 250)
acc = await bank.get_account(mbr)
assert acc.balance == 250
@pytest.mark.asyncio
async def test_bank_can_spend(bank, member_factory):
mbr = member_factory.get()
canspend = await bank.can_spend(mbr, 50)
assert canspend == (50 < await bank.get_default_balance(mbr.guild))
await bank.set_balance(mbr, 200)
acc = await bank.get_account(mbr)
canspendnow = await bank.can_spend(mbr, 100)
assert canspendnow
@pytest.mark.asyncio
async def test_set_bank_name(bank, guild_factory):
guild = guild_factory.get()
await bank.set_bank_name("Test Bank", guild)
name = await bank.get_bank_name(guild)
assert name == "Test Bank"
@pytest.mark.asyncio
async def test_set_currency_name(bank, guild_factory):
guild = guild_factory.get()
await bank.set_currency_name("Coins", guild)
name = await bank.get_currency_name(guild)
assert name == "Coins"
@pytest.mark.asyncio
async def test_set_default_balance(bank, guild_factory):
guild = guild_factory.get()
await bank.set_default_balance(500, guild)
default_bal = await bank.get_default_balance(guild)
assert default_bal == 500
@pytest.mark.asyncio
async def test_nonint_transaction_amount(bank, member_factory):
mbr1 = member_factory.get()
mbr2 = member_factory.get()
with pytest.raises(TypeError):
await bank.deposit_credits(mbr1, 1.0)
with pytest.raises(TypeError):
await bank.withdraw_credits(mbr1, 1.0)
with pytest.raises(TypeError):
await bank.transfer_credits(mbr1, mbr2, 1.0)
|
from homeassistant.components.climate.const import SUPPORT_TARGET_TEMPERATURE
from tests.components.homekit_controller.common import (
Helper,
setup_accessories_from_file,
setup_test_accessories,
)
async def test_lennox_e30_setup(hass):
"""Test that a Lennox E30 can be correctly setup in HA."""
accessories = await setup_accessories_from_file(hass, "lennox_e30.json")
config_entry, pairing = await setup_test_accessories(hass, accessories)
entity_registry = await hass.helpers.entity_registry.async_get_registry()
climate = entity_registry.async_get("climate.lennox")
assert climate.unique_id == "homekit-XXXXXXXX-100"
climate_helper = Helper(
hass, "climate.lennox", pairing, accessories[0], config_entry
)
climate_state = await climate_helper.poll_and_get_state()
assert climate_state.attributes["friendly_name"] == "Lennox"
assert climate_state.attributes["supported_features"] == (
SUPPORT_TARGET_TEMPERATURE
)
device_registry = await hass.helpers.device_registry.async_get_registry()
device = device_registry.async_get(climate.device_id)
assert device.manufacturer == "Lennox"
assert device.name == "Lennox"
assert device.model == "E30 2B"
assert device.sw_version == "3.40.XX"
# The fixture contains a single accessory - so its a single device
# and no bridge
assert device.via_device_id is None
|
import pytest
from homeassistant.components import owntracks
from homeassistant.setup import async_setup_component
from tests.common import MockConfigEntry, mock_component
MINIMAL_LOCATION_MESSAGE = {
"_type": "location",
"lon": 45,
"lat": 90,
"p": 101.3977584838867,
"tid": "test",
"tst": 1,
}
LOCATION_MESSAGE = {
"_type": "location",
"acc": 60,
"alt": 27,
"batt": 92,
"cog": 248,
"lon": 45,
"lat": 90,
"p": 101.3977584838867,
"tid": "test",
"t": "u",
"tst": 1,
"vac": 4,
"vel": 0,
}
@pytest.fixture(autouse=True)
def mock_dev_track(mock_device_tracker_conf):
"""Mock device tracker config loading."""
pass
@pytest.fixture
def mock_client(hass, aiohttp_client):
"""Start the Home Assistant HTTP component."""
mock_component(hass, "group")
mock_component(hass, "zone")
mock_component(hass, "device_tracker")
MockConfigEntry(
domain="owntracks", data={"webhook_id": "owntracks_test", "secret": "abcd"}
).add_to_hass(hass)
hass.loop.run_until_complete(async_setup_component(hass, "owntracks", {}))
return hass.loop.run_until_complete(aiohttp_client(hass.http.app))
async def test_handle_valid_message(mock_client):
"""Test that we forward messages correctly to OwnTracks."""
resp = await mock_client.post(
"/api/webhook/owntracks_test",
json=LOCATION_MESSAGE,
headers={"X-Limit-u": "Paulus", "X-Limit-d": "Pixel"},
)
assert resp.status == 200
json = await resp.json()
assert json == []
async def test_handle_valid_minimal_message(mock_client):
"""Test that we forward messages correctly to OwnTracks."""
resp = await mock_client.post(
"/api/webhook/owntracks_test",
json=MINIMAL_LOCATION_MESSAGE,
headers={"X-Limit-u": "Paulus", "X-Limit-d": "Pixel"},
)
assert resp.status == 200
json = await resp.json()
assert json == []
async def test_handle_value_error(mock_client):
"""Test we don't disclose that this is a valid webhook."""
resp = await mock_client.post(
"/api/webhook/owntracks_test",
json="",
headers={"X-Limit-u": "Paulus", "X-Limit-d": "Pixel"},
)
assert resp.status == 200
json = await resp.text()
assert json == ""
async def test_returns_error_missing_username(mock_client, caplog):
"""Test that an error is returned when username is missing."""
resp = await mock_client.post(
"/api/webhook/owntracks_test",
json=LOCATION_MESSAGE,
headers={"X-Limit-d": "Pixel"},
)
# Needs to be 200 or OwnTracks keeps retrying bad packet.
assert resp.status == 200
json = await resp.json()
assert json == []
assert "No topic or user found" in caplog.text
async def test_returns_error_incorrect_json(mock_client, caplog):
"""Test that an error is returned when username is missing."""
resp = await mock_client.post(
"/api/webhook/owntracks_test", data="not json", headers={"X-Limit-d": "Pixel"}
)
# Needs to be 200 or OwnTracks keeps retrying bad packet.
assert resp.status == 200
json = await resp.json()
assert json == []
assert "invalid JSON" in caplog.text
async def test_returns_error_missing_device(mock_client):
"""Test that an error is returned when device name is missing."""
resp = await mock_client.post(
"/api/webhook/owntracks_test",
json=LOCATION_MESSAGE,
headers={"X-Limit-u": "Paulus"},
)
assert resp.status == 200
json = await resp.json()
assert json == []
def test_context_delivers_pending_msg():
"""Test that context is able to hold pending messages while being init."""
context = owntracks.OwnTracksContext(None, None, None, None, None, None, None, None)
context.async_see(hello="world")
context.async_see(world="hello")
received = []
context.set_async_see(lambda **data: received.append(data))
assert len(received) == 2
assert received[0] == {"hello": "world"}
assert received[1] == {"world": "hello"}
received.clear()
context.set_async_see(lambda **data: received.append(data))
assert len(received) == 0
|
import logging
from typing import List, Optional
import voluptuous as vol
from homeassistant.components.media_player import (
DEVICE_CLASS_RECEIVER,
DEVICE_CLASS_TV,
MediaPlayerEntity,
)
from homeassistant.components.media_player.const import (
MEDIA_TYPE_APP,
MEDIA_TYPE_CHANNEL,
SUPPORT_BROWSE_MEDIA,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SELECT_SOURCE,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_STEP,
)
from homeassistant.components.media_player.errors import BrowseError
from homeassistant.const import (
STATE_HOME,
STATE_IDLE,
STATE_ON,
STATE_PAUSED,
STATE_PLAYING,
STATE_STANDBY,
)
from homeassistant.helpers import entity_platform
from . import RokuDataUpdateCoordinator, RokuEntity, roku_exception_handler
from .browse_media import build_item_response, library_payload
from .const import ATTR_KEYWORD, DOMAIN, SERVICE_SEARCH
_LOGGER = logging.getLogger(__name__)
SUPPORT_ROKU = (
SUPPORT_PREVIOUS_TRACK
| SUPPORT_NEXT_TRACK
| SUPPORT_VOLUME_STEP
| SUPPORT_VOLUME_MUTE
| SUPPORT_SELECT_SOURCE
| SUPPORT_PAUSE
| SUPPORT_PLAY
| SUPPORT_PLAY_MEDIA
| SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_BROWSE_MEDIA
)
SEARCH_SCHEMA = {vol.Required(ATTR_KEYWORD): str}
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up the Roku config entry."""
coordinator = hass.data[DOMAIN][entry.entry_id]
unique_id = coordinator.data.info.serial_number
async_add_entities([RokuMediaPlayer(unique_id, coordinator)], True)
platform = entity_platform.current_platform.get()
platform.async_register_entity_service(
SERVICE_SEARCH,
SEARCH_SCHEMA,
"search",
)
class RokuMediaPlayer(RokuEntity, MediaPlayerEntity):
"""Representation of a Roku media player on the network."""
def __init__(self, unique_id: str, coordinator: RokuDataUpdateCoordinator) -> None:
"""Initialize the Roku device."""
super().__init__(
coordinator=coordinator,
name=coordinator.data.info.name,
device_id=unique_id,
)
self._unique_id = unique_id
def _media_playback_trackable(self) -> bool:
"""Detect if we have enough media data to track playback."""
if self.coordinator.data.media is None or self.coordinator.data.media.live:
return False
return self.coordinator.data.media.duration > 0
@property
def unique_id(self) -> str:
"""Return the unique ID for this entity."""
return self._unique_id
@property
def device_class(self) -> Optional[str]:
"""Return the class of this device."""
if self.coordinator.data.info.device_type == "tv":
return DEVICE_CLASS_TV
return DEVICE_CLASS_RECEIVER
@property
def state(self) -> str:
"""Return the state of the device."""
if self.coordinator.data.state.standby:
return STATE_STANDBY
if self.coordinator.data.app is None:
return None
if (
self.coordinator.data.app.name == "Power Saver"
or self.coordinator.data.app.screensaver
):
return STATE_IDLE
if self.coordinator.data.app.name == "Roku":
return STATE_HOME
if self.coordinator.data.media:
if self.coordinator.data.media.paused:
return STATE_PAUSED
return STATE_PLAYING
if self.coordinator.data.app.name:
return STATE_ON
return None
@property
def supported_features(self):
"""Flag media player features that are supported."""
return SUPPORT_ROKU
@property
def media_content_type(self) -> str:
"""Content type of current playing media."""
if self.app_id is None or self.app_name in ("Power Saver", "Roku"):
return None
if self.app_id == "tvinput.dtv" and self.coordinator.data.channel is not None:
return MEDIA_TYPE_CHANNEL
return MEDIA_TYPE_APP
@property
def media_image_url(self) -> str:
"""Image url of current playing media."""
if self.app_id is None or self.app_name in ("Power Saver", "Roku"):
return None
return self.coordinator.roku.app_icon_url(self.app_id)
@property
def app_name(self) -> str:
"""Name of the current running app."""
if self.coordinator.data.app is not None:
return self.coordinator.data.app.name
return None
@property
def app_id(self) -> str:
"""Return the ID of the current running app."""
if self.coordinator.data.app is not None:
return self.coordinator.data.app.app_id
return None
@property
def media_channel(self):
"""Return the TV channel currently tuned."""
if self.app_id != "tvinput.dtv" or self.coordinator.data.channel is None:
return None
if self.coordinator.data.channel.name is not None:
return f"{self.coordinator.data.channel.name} ({self.coordinator.data.channel.number})"
return self.coordinator.data.channel.number
@property
def media_title(self):
"""Return the title of current playing media."""
if self.app_id != "tvinput.dtv" or self.coordinator.data.channel is None:
return None
if self.coordinator.data.channel.program_title is not None:
return self.coordinator.data.channel.program_title
return None
@property
def media_duration(self):
"""Duration of current playing media in seconds."""
if self._media_playback_trackable():
return self.coordinator.data.media.duration
return None
@property
def media_position(self):
"""Position of current playing media in seconds."""
if self._media_playback_trackable():
return self.coordinator.data.media.position
return None
@property
def media_position_updated_at(self):
"""When was the position of the current playing media valid."""
if self._media_playback_trackable():
return self.coordinator.data.media.at
return None
@property
def source(self) -> str:
"""Return the current input source."""
if self.coordinator.data.app is not None:
return self.coordinator.data.app.name
return None
@property
def source_list(self) -> List:
"""List of available input sources."""
return ["Home"] + sorted(app.name for app in self.coordinator.data.apps)
@roku_exception_handler
async def search(self, keyword):
"""Emulate opening the search screen and entering the search keyword."""
await self.coordinator.roku.search(keyword)
async def async_browse_media(self, media_content_type=None, media_content_id=None):
"""Implement the websocket media browsing helper."""
if media_content_type in [None, "library"]:
return library_payload(self.coordinator)
payload = {
"search_type": media_content_type,
"search_id": media_content_id,
}
response = build_item_response(self.coordinator, payload)
if response is None:
raise BrowseError(
f"Media not found: {media_content_type} / {media_content_id}"
)
return response
@roku_exception_handler
async def async_turn_on(self) -> None:
"""Turn on the Roku."""
await self.coordinator.roku.remote("poweron")
await self.coordinator.async_request_refresh()
@roku_exception_handler
async def async_turn_off(self) -> None:
"""Turn off the Roku."""
await self.coordinator.roku.remote("poweroff")
await self.coordinator.async_request_refresh()
@roku_exception_handler
async def async_media_pause(self) -> None:
"""Send pause command."""
if self.state not in (STATE_STANDBY, STATE_PAUSED):
await self.coordinator.roku.remote("play")
await self.coordinator.async_request_refresh()
@roku_exception_handler
async def async_media_play(self) -> None:
"""Send play command."""
if self.state not in (STATE_STANDBY, STATE_PLAYING):
await self.coordinator.roku.remote("play")
await self.coordinator.async_request_refresh()
@roku_exception_handler
async def async_media_play_pause(self) -> None:
"""Send play/pause command."""
if self.state != STATE_STANDBY:
await self.coordinator.roku.remote("play")
await self.coordinator.async_request_refresh()
@roku_exception_handler
async def async_media_previous_track(self) -> None:
"""Send previous track command."""
await self.coordinator.roku.remote("reverse")
await self.coordinator.async_request_refresh()
@roku_exception_handler
async def async_media_next_track(self) -> None:
"""Send next track command."""
await self.coordinator.roku.remote("forward")
await self.coordinator.async_request_refresh()
@roku_exception_handler
async def async_mute_volume(self, mute) -> None:
"""Mute the volume."""
await self.coordinator.roku.remote("volume_mute")
await self.coordinator.async_request_refresh()
@roku_exception_handler
async def async_volume_up(self) -> None:
"""Volume up media player."""
await self.coordinator.roku.remote("volume_up")
@roku_exception_handler
async def async_volume_down(self) -> None:
"""Volume down media player."""
await self.coordinator.roku.remote("volume_down")
@roku_exception_handler
async def async_play_media(self, media_type: str, media_id: str, **kwargs) -> None:
"""Tune to channel."""
if media_type not in (MEDIA_TYPE_APP, MEDIA_TYPE_CHANNEL):
_LOGGER.error(
"Invalid media type %s. Only %s and %s are supported",
media_type,
MEDIA_TYPE_APP,
MEDIA_TYPE_CHANNEL,
)
return
if media_type == MEDIA_TYPE_APP:
await self.coordinator.roku.launch(media_id)
elif media_type == MEDIA_TYPE_CHANNEL:
await self.coordinator.roku.tune(media_id)
await self.coordinator.async_request_refresh()
@roku_exception_handler
async def async_select_source(self, source: str) -> None:
"""Select input source."""
if source == "Home":
await self.coordinator.roku.remote("home")
appl = next(
(
app
for app in self.coordinator.data.apps
if source in (app.name, app.app_id)
),
None,
)
if appl is not None:
await self.coordinator.roku.launch(appl.app_id)
await self.coordinator.async_request_refresh()
|
from .utils import STRING_TYPE, logger, NO_VALUE
###{standalone
class LarkError(Exception):
pass
class ConfigurationError(LarkError, ValueError):
pass
def assert_config(value, options, msg='Got %r, expected one of %s'):
if value not in options:
raise ConfigurationError(msg % (value, options))
class GrammarError(LarkError):
pass
class ParseError(LarkError):
pass
class LexError(LarkError):
pass
class UnexpectedInput(LarkError):
"""UnexpectedInput Error.
Used as a base class for the following exceptions:
- ``UnexpectedToken``: The parser received an unexpected token
- ``UnexpectedCharacters``: The lexer encountered an unexpected string
After catching one of these exceptions, you may call the following helper methods to create a nicer error message.
"""
pos_in_stream = None
_terminals_by_name = None
def get_context(self, text, span=40):
"""Returns a pretty string pinpointing the error in the text,
with span amount of context characters around it.
Note:
The parser doesn't hold a copy of the text it has to parse,
so you have to provide it again
"""
assert self.pos_in_stream is not None, self
pos = self.pos_in_stream
start = max(pos - span, 0)
end = pos + span
if not isinstance(text, bytes):
before = text[start:pos].rsplit('\n', 1)[-1]
after = text[pos:end].split('\n', 1)[0]
return before + after + '\n' + ' ' * len(before.expandtabs()) + '^\n'
else:
before = text[start:pos].rsplit(b'\n', 1)[-1]
after = text[pos:end].split(b'\n', 1)[0]
return (before + after + b'\n' + b' ' * len(before.expandtabs()) + b'^\n').decode("ascii", "backslashreplace")
def match_examples(self, parse_fn, examples, token_type_match_fallback=False, use_accepts=False):
"""Allows you to detect what's wrong in the input text by matching
against example errors.
Given a parser instance and a dictionary mapping some label with
some malformed syntax examples, it'll return the label for the
example that bests matches the current error. The function will
iterate the dictionary until it finds a matching error, and
return the corresponding value.
For an example usage, see `examples/error_reporting_lalr.py`
Parameters:
parse_fn: parse function (usually ``lark_instance.parse``)
examples: dictionary of ``{'example_string': value}``.
use_accepts: Recommended to call this with ``use_accepts=True``.
The default is ``False`` for backwards compatibility.
"""
assert self.state is not None, "Not supported for this exception"
if isinstance(examples, dict):
examples = examples.items()
candidate = (None, False)
for i, (label, example) in enumerate(examples):
assert not isinstance(example, STRING_TYPE)
for j, malformed in enumerate(example):
try:
parse_fn(malformed)
except UnexpectedInput as ut:
if ut.state == self.state:
if use_accepts and hasattr(self, 'accepts') and ut.accepts != self.accepts:
logger.debug("Different accepts with same state[%d]: %s != %s at example [%s][%s]" %
(self.state, self.accepts, ut.accepts, i, j))
continue
try:
if ut.token == self.token: # Try exact match first
logger.debug("Exact Match at example [%s][%s]" % (i, j))
return label
if token_type_match_fallback:
# Fallback to token types match
if (ut.token.type == self.token.type) and not candidate[-1]:
logger.debug("Token Type Fallback at example [%s][%s]" % (i, j))
candidate = label, True
except AttributeError:
pass
if candidate[0] is None:
logger.debug("Same State match at example [%s][%s]" % (i, j))
candidate = label, False
return candidate[0]
def _format_expected(self, expected):
if self._terminals_by_name:
d = self._terminals_by_name
expected = [d[t_name].user_repr() if t_name in d else t_name for t_name in expected]
return "Expected one of: \n\t* %s\n" % '\n\t* '.join(expected)
class UnexpectedEOF(ParseError, UnexpectedInput):
def __init__(self, expected, state=None, terminals_by_name=None):
self.expected = expected
self.state = state
from .lexer import Token
self.token = Token("<EOF>", "") # , line=-1, column=-1, pos_in_stream=-1)
self.pos_in_stream = -1
self.line = -1
self.column = -1
self._terminals_by_name = terminals_by_name
super(UnexpectedEOF, self).__init__()
def __str__(self):
message = "Unexpected end-of-input. "
message += self._format_expected(self.expected)
return message
class UnexpectedCharacters(LexError, UnexpectedInput):
def __init__(self, seq, lex_pos, line, column, allowed=None, considered_tokens=None, state=None, token_history=None,
terminals_by_name=None):
# TODO considered_tokens and allowed can be figured out using state
self.line = line
self.column = column
self.pos_in_stream = lex_pos
self.state = state
self._terminals_by_name = terminals_by_name
self.allowed = allowed
self.considered_tokens = considered_tokens
self.token_history = token_history
if isinstance(seq, bytes):
self.char = seq[lex_pos:lex_pos + 1].decode("ascii", "backslashreplace")
else:
self.char = seq[lex_pos]
self._context = self.get_context(seq)
super(UnexpectedCharacters, self).__init__()
def __str__(self):
message = "No terminal defined for '%s' at line %d col %d" % (self.char, self.line, self.column)
message += '\n\n' + self._context
if self.allowed:
message += self._format_expected(self.allowed)
if self.token_history:
message += '\nPrevious tokens: %s\n' % ', '.join(repr(t) for t in self.token_history)
return message
class UnexpectedToken(ParseError, UnexpectedInput):
"""When the parser throws UnexpectedToken, it instantiates a puppet
with its internal state. Users can then interactively set the puppet to
the desired puppet state, and resume regular parsing.
see: :ref:`ParserPuppet`.
"""
def __init__(self, token, expected, considered_rules=None, state=None, puppet=None, terminals_by_name=None, token_history=None):
# TODO considered_rules and expected can be figured out using state
self.line = getattr(token, 'line', '?')
self.column = getattr(token, 'column', '?')
self.pos_in_stream = getattr(token, 'pos_in_stream', None)
self.state = state
self.token = token
self.expected = expected # XXX deprecate? `accepts` is better
self._accepts = NO_VALUE
self.considered_rules = considered_rules
self.puppet = puppet
self._terminals_by_name = terminals_by_name
self.token_history = token_history
super(UnexpectedToken, self).__init__()
@property
def accepts(self):
if self._accepts is NO_VALUE:
self._accepts = self.puppet and self.puppet.accepts()
return self._accepts
def __str__(self):
message = ("Unexpected token %r at line %s, column %s.\n%s"
% (self.token, self.line, self.column, self._format_expected(self.accepts or self.expected)))
if self.token_history:
message += "Previous tokens: %r\n" % self.token_history
return message
class VisitError(LarkError):
"""VisitError is raised when visitors are interrupted by an exception
It provides the following attributes for inspection:
- obj: the tree node or token it was processing when the exception was raised
- orig_exc: the exception that cause it to fail
"""
def __init__(self, rule, obj, orig_exc):
self.obj = obj
self.orig_exc = orig_exc
message = 'Error trying to process rule "%s":\n\n%s' % (rule, orig_exc)
super(VisitError, self).__init__(message)
###}
|
from abc import ABC, abstractmethod
from asyncio import gather
from collections.abc import Mapping
import logging
import pprint
from typing import List, Optional
from aiohttp.web import json_response
from homeassistant.components import webhook
from homeassistant.const import (
ATTR_DEVICE_CLASS,
ATTR_SUPPORTED_FEATURES,
CLOUD_NEVER_EXPOSED_ENTITIES,
CONF_NAME,
STATE_UNAVAILABLE,
)
from homeassistant.core import Context, HomeAssistant, State, callback
from homeassistant.helpers.event import async_call_later
from homeassistant.helpers.network import get_url
from homeassistant.helpers.storage import Store
from . import trait
from .const import (
CONF_ALIASES,
CONF_ROOM_HINT,
DEVICE_CLASS_TO_GOOGLE_TYPES,
DOMAIN,
DOMAIN_TO_GOOGLE_TYPES,
ERR_FUNCTION_NOT_SUPPORTED,
NOT_EXPOSE_LOCAL,
SOURCE_LOCAL,
STORE_AGENT_USER_IDS,
)
from .error import SmartHomeError
SYNC_DELAY = 15
_LOGGER = logging.getLogger(__name__)
class AbstractConfig(ABC):
"""Hold the configuration for Google Assistant."""
_unsub_report_state = None
def __init__(self, hass):
"""Initialize abstract config."""
self.hass = hass
self._store = None
self._google_sync_unsub = {}
self._local_sdk_active = False
async def async_initialize(self):
"""Perform async initialization of config."""
self._store = GoogleConfigStore(self.hass)
await self._store.async_load()
@property
def enabled(self):
"""Return if Google is enabled."""
return False
@property
def entity_config(self):
"""Return entity config."""
return {}
@property
def secure_devices_pin(self):
"""Return entity config."""
return None
@property
def is_reporting_state(self):
"""Return if we're actively reporting states."""
return self._unsub_report_state is not None
@property
def is_local_sdk_active(self):
"""Return if we're actively accepting local messages."""
return self._local_sdk_active
@property
def should_report_state(self):
"""Return if states should be proactively reported."""
return False
@property
def local_sdk_webhook_id(self):
"""Return the local SDK webhook ID.
Return None to disable the local SDK.
"""
return None
@property
def local_sdk_user_id(self):
"""Return the user ID to be used for actions received via the local SDK."""
raise NotImplementedError
@abstractmethod
def get_agent_user_id(self, context):
"""Get agent user ID from context."""
@abstractmethod
def should_expose(self, state) -> bool:
"""Return if entity should be exposed."""
def should_2fa(self, state):
"""If an entity should have 2FA checked."""
# pylint: disable=no-self-use
return True
async def async_report_state(self, message, agent_user_id: str):
"""Send a state report to Google."""
raise NotImplementedError
async def async_report_state_all(self, message):
"""Send a state report to Google for all previously synced users."""
jobs = [
self.async_report_state(message, agent_user_id)
for agent_user_id in self._store.agent_user_ids
]
await gather(*jobs)
@callback
def async_enable_report_state(self):
"""Enable proactive mode."""
# Circular dep
# pylint: disable=import-outside-toplevel
from .report_state import async_enable_report_state
if self._unsub_report_state is None:
self._unsub_report_state = async_enable_report_state(self.hass, self)
@callback
def async_disable_report_state(self):
"""Disable report state."""
if self._unsub_report_state is not None:
self._unsub_report_state()
self._unsub_report_state = None
async def async_sync_entities(self, agent_user_id: str):
"""Sync all entities to Google."""
# Remove any pending sync
self._google_sync_unsub.pop(agent_user_id, lambda: None)()
return await self._async_request_sync_devices(agent_user_id)
async def async_sync_entities_all(self):
"""Sync all entities to Google for all registered agents."""
res = await gather(
*[
self.async_sync_entities(agent_user_id)
for agent_user_id in self._store.agent_user_ids
]
)
return max(res, default=204)
@callback
def async_schedule_google_sync(self, agent_user_id: str):
"""Schedule a sync."""
async def _schedule_callback(_now):
"""Handle a scheduled sync callback."""
self._google_sync_unsub.pop(agent_user_id, None)
await self.async_sync_entities(agent_user_id)
self._google_sync_unsub.pop(agent_user_id, lambda: None)()
self._google_sync_unsub[agent_user_id] = async_call_later(
self.hass, SYNC_DELAY, _schedule_callback
)
@callback
def async_schedule_google_sync_all(self):
"""Schedule a sync for all registered agents."""
for agent_user_id in self._store.agent_user_ids:
self.async_schedule_google_sync(agent_user_id)
async def _async_request_sync_devices(self, agent_user_id: str) -> int:
"""Trigger a sync with Google.
Return value is the HTTP status code of the sync request.
"""
raise NotImplementedError
async def async_connect_agent_user(self, agent_user_id: str):
"""Add an synced and known agent_user_id.
Called when a completed sync response have been sent to Google.
"""
self._store.add_agent_user_id(agent_user_id)
async def async_disconnect_agent_user(self, agent_user_id: str):
"""Turn off report state and disable further state reporting.
Called when the user disconnects their account from Google.
"""
self._store.pop_agent_user_id(agent_user_id)
@callback
def async_enable_local_sdk(self):
"""Enable the local SDK."""
webhook_id = self.local_sdk_webhook_id
if webhook_id is None:
return
webhook.async_register(
self.hass,
DOMAIN,
"Local Support",
webhook_id,
self._handle_local_webhook,
)
self._local_sdk_active = True
@callback
def async_disable_local_sdk(self):
"""Disable the local SDK."""
if not self._local_sdk_active:
return
webhook.async_unregister(self.hass, self.local_sdk_webhook_id)
self._local_sdk_active = False
async def _handle_local_webhook(self, hass, webhook_id, request):
"""Handle an incoming local SDK message."""
# Circular dep
# pylint: disable=import-outside-toplevel
from . import smart_home
payload = await request.json()
if _LOGGER.isEnabledFor(logging.DEBUG):
_LOGGER.debug("Received local message:\n%s\n", pprint.pformat(payload))
if not self.enabled:
return json_response(smart_home.turned_off_response(payload))
result = await smart_home.async_handle_message(
self.hass, self, self.local_sdk_user_id, payload, SOURCE_LOCAL
)
if _LOGGER.isEnabledFor(logging.DEBUG):
_LOGGER.debug("Responding to local message:\n%s\n", pprint.pformat(result))
return json_response(result)
class GoogleConfigStore:
"""A configuration store for google assistant."""
_STORAGE_VERSION = 1
_STORAGE_KEY = DOMAIN
def __init__(self, hass):
"""Initialize a configuration store."""
self._hass = hass
self._store = Store(hass, self._STORAGE_VERSION, self._STORAGE_KEY)
self._data = {STORE_AGENT_USER_IDS: {}}
@property
def agent_user_ids(self):
"""Return a list of connected agent user_ids."""
return self._data[STORE_AGENT_USER_IDS]
@callback
def add_agent_user_id(self, agent_user_id):
"""Add an agent user id to store."""
if agent_user_id not in self._data[STORE_AGENT_USER_IDS]:
self._data[STORE_AGENT_USER_IDS][agent_user_id] = {}
self._store.async_delay_save(lambda: self._data, 1.0)
@callback
def pop_agent_user_id(self, agent_user_id):
"""Remove agent user id from store."""
if agent_user_id in self._data[STORE_AGENT_USER_IDS]:
self._data[STORE_AGENT_USER_IDS].pop(agent_user_id, None)
self._store.async_delay_save(lambda: self._data, 1.0)
async def async_load(self):
"""Store current configuration to disk."""
data = await self._store.async_load()
if data:
self._data = data
class RequestData:
"""Hold data associated with a particular request."""
def __init__(
self,
config: AbstractConfig,
user_id: str,
source: str,
request_id: str,
devices: Optional[List[dict]],
):
"""Initialize the request data."""
self.config = config
self.source = source
self.request_id = request_id
self.context = Context(user_id=user_id)
self.devices = devices
@property
def is_local_request(self):
"""Return if this is a local request."""
return self.source == SOURCE_LOCAL
def get_google_type(domain, device_class):
"""Google type based on domain and device class."""
typ = DEVICE_CLASS_TO_GOOGLE_TYPES.get((domain, device_class))
return typ if typ is not None else DOMAIN_TO_GOOGLE_TYPES[domain]
class GoogleEntity:
"""Adaptation of Entity expressed in Google's terms."""
def __init__(self, hass: HomeAssistant, config: AbstractConfig, state: State):
"""Initialize a Google entity."""
self.hass = hass
self.config = config
self.state = state
self._traits = None
@property
def entity_id(self):
"""Return entity ID."""
return self.state.entity_id
@callback
def traits(self):
"""Return traits for entity."""
if self._traits is not None:
return self._traits
state = self.state
domain = state.domain
features = state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
if not isinstance(features, int):
_LOGGER.warning(
"Entity %s contains invalid supported_features value %s",
self.entity_id,
features,
)
return []
device_class = state.attributes.get(ATTR_DEVICE_CLASS)
self._traits = [
Trait(self.hass, state, self.config)
for Trait in trait.TRAITS
if Trait.supported(domain, features, device_class)
]
return self._traits
@callback
def should_expose(self):
"""If entity should be exposed."""
return self.config.should_expose(self.state)
@callback
def should_expose_local(self) -> bool:
"""Return if the entity should be exposed locally."""
return (
self.should_expose()
and get_google_type(
self.state.domain, self.state.attributes.get(ATTR_DEVICE_CLASS)
)
not in NOT_EXPOSE_LOCAL
and not self.might_2fa()
)
@callback
def is_supported(self) -> bool:
"""Return if the entity is supported by Google."""
return bool(self.traits())
@callback
def might_2fa(self) -> bool:
"""Return if the entity might encounter 2FA."""
if not self.config.should_2fa(self.state):
return False
return self.might_2fa_traits()
@callback
def might_2fa_traits(self) -> bool:
"""Return if the entity might encounter 2FA based on just traits."""
state = self.state
domain = state.domain
features = state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
device_class = state.attributes.get(ATTR_DEVICE_CLASS)
return any(
trait.might_2fa(domain, features, device_class) for trait in self.traits()
)
async def sync_serialize(self, agent_user_id):
"""Serialize entity for a SYNC response.
https://developers.google.com/actions/smarthome/create-app#actiondevicessync
"""
state = self.state
entity_config = self.config.entity_config.get(state.entity_id, {})
name = (entity_config.get(CONF_NAME) or state.name).strip()
domain = state.domain
device_class = state.attributes.get(ATTR_DEVICE_CLASS)
traits = self.traits()
device_type = get_google_type(domain, device_class)
device = {
"id": state.entity_id,
"name": {"name": name},
"attributes": {},
"traits": [trait.name for trait in traits],
"willReportState": self.config.should_report_state,
"type": device_type,
}
# use aliases
aliases = entity_config.get(CONF_ALIASES)
if aliases:
device["name"]["nicknames"] = [name] + aliases
if self.config.is_local_sdk_active and self.should_expose_local():
device["otherDeviceIds"] = [{"deviceId": self.entity_id}]
device["customData"] = {
"webhookId": self.config.local_sdk_webhook_id,
"httpPort": self.hass.http.server_port,
"httpSSL": self.hass.config.api.use_ssl,
"uuid": await self.hass.helpers.instance_id.async_get(),
"baseUrl": get_url(self.hass, prefer_external=True),
"proxyDeviceId": agent_user_id,
}
for trt in traits:
device["attributes"].update(trt.sync_attributes())
room = entity_config.get(CONF_ROOM_HINT)
if room:
device["roomHint"] = room
return device
dev_reg, ent_reg, area_reg = await gather(
self.hass.helpers.device_registry.async_get_registry(),
self.hass.helpers.entity_registry.async_get_registry(),
self.hass.helpers.area_registry.async_get_registry(),
)
entity_entry = ent_reg.async_get(state.entity_id)
if not (entity_entry and entity_entry.device_id):
return device
device_entry = dev_reg.devices.get(entity_entry.device_id)
if not (device_entry and device_entry.area_id):
return device
area_entry = area_reg.areas.get(device_entry.area_id)
if area_entry and area_entry.name:
device["roomHint"] = area_entry.name
return device
@callback
def query_serialize(self):
"""Serialize entity for a QUERY response.
https://developers.google.com/actions/smarthome/create-app#actiondevicesquery
"""
state = self.state
if state.state == STATE_UNAVAILABLE:
return {"online": False}
attrs = {"online": True}
for trt in self.traits():
deep_update(attrs, trt.query_attributes())
return attrs
@callback
def reachable_device_serialize(self):
"""Serialize entity for a REACHABLE_DEVICE response."""
return {"verificationId": self.entity_id}
async def execute(self, data, command_payload):
"""Execute a command.
https://developers.google.com/actions/smarthome/create-app#actiondevicesexecute
"""
command = command_payload["command"]
params = command_payload.get("params", {})
challenge = command_payload.get("challenge", {})
executed = False
for trt in self.traits():
if trt.can_execute(command, params):
await trt.execute(command, data, params, challenge)
executed = True
break
if not executed:
raise SmartHomeError(
ERR_FUNCTION_NOT_SUPPORTED,
f"Unable to execute {command} for {self.state.entity_id}",
)
@callback
def async_update(self):
"""Update the entity with latest info from Home Assistant."""
self.state = self.hass.states.get(self.entity_id)
if self._traits is None:
return
for trt in self._traits:
trt.state = self.state
def deep_update(target, source):
"""Update a nested dictionary with another nested dictionary."""
for key, value in source.items():
if isinstance(value, Mapping):
target[key] = deep_update(target.get(key, {}), value)
else:
target[key] = value
return target
@callback
def async_get_entities(hass, config) -> List[GoogleEntity]:
"""Return all entities that are supported by Google."""
entities = []
for state in hass.states.async_all():
if state.entity_id in CLOUD_NEVER_EXPOSED_ENTITIES:
continue
entity = GoogleEntity(hass, config, state)
if entity.is_supported():
entities.append(entity)
return entities
|
import logging
from pybotvac import Account, Neato, Vorwerk
from pybotvac.exceptions import NeatoLoginException, NeatoRobotException
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
# pylint: disable=unused-import
from .const import CONF_VENDOR, NEATO_DOMAIN, VALID_VENDORS
DOCS_URL = "https://www.home-assistant.io/integrations/neato"
DEFAULT_VENDOR = "neato"
_LOGGER = logging.getLogger(__name__)
class NeatoConfigFlow(config_entries.ConfigFlow, domain=NEATO_DOMAIN):
"""Neato integration config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
def __init__(self):
"""Initialize flow."""
self._username = vol.UNDEFINED
self._password = vol.UNDEFINED
self._vendor = vol.UNDEFINED
async def async_step_user(self, user_input=None):
"""Handle a flow initialized by the user."""
errors = {}
if self._async_current_entries():
return self.async_abort(reason="already_configured")
if user_input is not None:
self._username = user_input["username"]
self._password = user_input["password"]
self._vendor = user_input["vendor"]
error = await self.hass.async_add_executor_job(
self.try_login, self._username, self._password, self._vendor
)
if error:
errors["base"] = error
else:
return self.async_create_entry(
title=user_input[CONF_USERNAME],
data=user_input,
description_placeholders={"docs_url": DOCS_URL},
)
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_USERNAME): str,
vol.Required(CONF_PASSWORD): str,
vol.Optional(CONF_VENDOR, default="neato"): vol.In(VALID_VENDORS),
}
),
description_placeholders={"docs_url": DOCS_URL},
errors=errors,
)
async def async_step_import(self, user_input):
"""Import a config flow from configuration."""
if self._async_current_entries():
return self.async_abort(reason="already_configured")
username = user_input[CONF_USERNAME]
password = user_input[CONF_PASSWORD]
vendor = user_input[CONF_VENDOR]
error = await self.hass.async_add_executor_job(
self.try_login, username, password, vendor
)
if error is not None:
_LOGGER.error(error)
return self.async_abort(reason=error)
return self.async_create_entry(
title=f"{username} (from configuration)",
data={
CONF_USERNAME: username,
CONF_PASSWORD: password,
CONF_VENDOR: vendor,
},
)
@staticmethod
def try_login(username, password, vendor):
"""Try logging in to device and return any errors."""
this_vendor = None
if vendor == "vorwerk":
this_vendor = Vorwerk()
else: # Neato
this_vendor = Neato()
try:
Account(username, password, this_vendor)
except NeatoLoginException:
return "invalid_auth"
except NeatoRobotException:
return "unknown"
return None
|
from functools import reduce
from typing import Set
from django.db.models import Q
from weblate.machinery.base import BatchStringMachineTranslation, get_machinery_language
from weblate.trans.models import Unit
from weblate.utils.state import STATE_TRANSLATED
class WeblateTranslation(BatchStringMachineTranslation):
"""Translation service using strings already translated in Weblate."""
name = "Weblate"
rank_boost = 1
cache_translations = False
def convert_language(self, language):
"""No conversion of language object."""
return get_machinery_language(language)
def is_supported(self, source, language):
"""Any language is supported."""
return True
def is_rate_limited(self):
"""This service has no rate limiting."""
return False
def download_translations(
self,
source,
language,
text: str,
unit,
user,
search: bool,
threshold: int = 75,
):
"""Download list of possible translations from a service."""
if user:
base = Unit.objects.filter_access(user)
else:
base = Unit.objects.all()
matching_units = base.filter(
source__search=text,
translation__component__source_language=source,
translation__language=language,
state__gte=STATE_TRANSLATED,
)
for munit in matching_units:
source = munit.source_string
quality = self.comparer.similarity(text, source)
if quality < 10 or (quality < threshold and not search):
continue
yield {
"text": munit.get_target_plurals()[0],
"quality": quality,
"service": self.name,
"origin": str(munit.translation.component),
"origin_url": munit.get_absolute_url(),
"source": source,
}
def download_batch_strings(
self, source, language, units, texts: Set[str], user=None, threshold: int = 75
):
if user:
base = Unit.objects.filter_access(user)
else:
base = Unit.objects.all()
query = reduce(lambda x, y: x | Q(source__search=y), texts, Q())
matching_units = base.filter(
query,
translation__component__source_language=source,
translation__language=language,
state__gte=STATE_TRANSLATED,
).only("source", "target")
for unit in matching_units:
yield unit.source_string, unit.get_target_plurals()[0]
|
from typing import Any, Dict
from twentemilieu import (
WASTE_TYPE_NON_RECYCLABLE,
WASTE_TYPE_ORGANIC,
WASTE_TYPE_PAPER,
WASTE_TYPE_PLASTIC,
TwenteMilieu,
TwenteMilieuConnectionError,
)
from homeassistant.components.twentemilieu.const import DATA_UPDATE, DOMAIN
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ID
from homeassistant.core import callback
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.typing import HomeAssistantType
PARALLEL_UPDATES = 1
async def async_setup_entry(
hass: HomeAssistantType, entry: ConfigEntry, async_add_entities
) -> None:
"""Set up Twente Milieu sensor based on a config entry."""
twentemilieu = hass.data[DOMAIN][entry.data[CONF_ID]]
try:
await twentemilieu.update()
except TwenteMilieuConnectionError as exception:
raise PlatformNotReady from exception
sensors = [
TwenteMilieuSensor(
twentemilieu,
unique_id=entry.data[CONF_ID],
name=f"{WASTE_TYPE_NON_RECYCLABLE} Waste Pickup",
waste_type=WASTE_TYPE_NON_RECYCLABLE,
icon="mdi:delete-empty",
),
TwenteMilieuSensor(
twentemilieu,
unique_id=entry.data[CONF_ID],
name=f"{WASTE_TYPE_ORGANIC} Waste Pickup",
waste_type=WASTE_TYPE_ORGANIC,
icon="mdi:delete-empty",
),
TwenteMilieuSensor(
twentemilieu,
unique_id=entry.data[CONF_ID],
name=f"{WASTE_TYPE_PAPER} Waste Pickup",
waste_type=WASTE_TYPE_PAPER,
icon="mdi:delete-empty",
),
TwenteMilieuSensor(
twentemilieu,
unique_id=entry.data[CONF_ID],
name=f"{WASTE_TYPE_PLASTIC} Waste Pickup",
waste_type=WASTE_TYPE_PLASTIC,
icon="mdi:delete-empty",
),
]
async_add_entities(sensors, True)
class TwenteMilieuSensor(Entity):
"""Defines a Twente Milieu sensor."""
def __init__(
self,
twentemilieu: TwenteMilieu,
unique_id: str,
name: str,
waste_type: str,
icon: str,
) -> None:
"""Initialize the Twente Milieu entity."""
self._available = True
self._unique_id = unique_id
self._icon = icon
self._name = name
self._twentemilieu = twentemilieu
self._waste_type = waste_type
self._unsub_dispatcher = None
self._state = None
@property
def name(self) -> str:
"""Return the name of the entity."""
return self._name
@property
def icon(self) -> str:
"""Return the mdi icon of the entity."""
return self._icon
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self._available
@property
def unique_id(self) -> str:
"""Return the unique ID for this sensor."""
return f"{DOMAIN}_{self._unique_id}_{self._waste_type}"
@property
def should_poll(self) -> bool:
"""Return the polling requirement of the entity."""
return False
async def async_added_to_hass(self) -> None:
"""Connect to dispatcher listening for entity data notifications."""
self._unsub_dispatcher = async_dispatcher_connect(
self.hass, DATA_UPDATE, self._schedule_immediate_update
)
async def async_will_remove_from_hass(self) -> None:
"""Disconnect from update signal."""
self._unsub_dispatcher()
@callback
def _schedule_immediate_update(self, unique_id: str) -> None:
"""Schedule an immediate update of the entity."""
if unique_id == self._unique_id:
self.async_schedule_update_ha_state(True)
@property
def state(self):
"""Return the state of the sensor."""
return self._state
async def async_update(self) -> None:
"""Update Twente Milieu entity."""
next_pickup = await self._twentemilieu.next_pickup(self._waste_type)
if next_pickup is not None:
self._state = next_pickup.date().isoformat()
@property
def device_info(self) -> Dict[str, Any]:
"""Return device information about Twente Milieu."""
return {
"identifiers": {(DOMAIN, self._unique_id)},
"name": "Twente Milieu",
"manufacturer": "Twente Milieu",
}
|
import unittest
import logging
import numpy as np # for arrays, array broadcasting etc.
from gensim.models import ldaseqmodel
from gensim.corpora import Dictionary
from gensim.test.utils import datapath
class TestLdaSeq(unittest.TestCase):
# we are setting up a DTM model and fitting it, and checking topic-word and doc-topic results.
def setUp(self):
texts = [
[u'senior', u'studios', u'studios', u'studios', u'creators', u'award', u'mobile', u'currently',
u'challenges', u'senior', u'summary', u'senior', u'motivated', u'creative', u'senior'],
[u'performs', u'engineering', u'tasks', u'infrastructure', u'focusing', u'primarily',
u'programming', u'interaction', u'designers', u'engineers', u'leadership', u'teams',
u'teams', u'crews', u'responsibilities', u'engineering', u'quality', u'functional',
u'functional', u'teams', u'organizing', u'prioritizing', u'technical', u'decisions',
u'engineering', u'participates', u'participates', u'reviews', u'participates',
u'hiring', u'conducting', u'interviews'],
[u'feedback', u'departments', u'define', u'focusing', u'engineering', u'teams', u'crews',
u'facilitate', u'engineering', u'departments', u'deadlines', u'milestones', u'typically',
u'spends', u'designing', u'developing', u'updating', u'bugs', u'mentoring', u'engineers',
u'define', u'schedules', u'milestones', u'participating'],
[u'reviews', u'interviews', u'sized', u'teams', u'interacts', u'disciplines', u'knowledge',
u'skills', u'knowledge', u'knowledge', u'xcode', u'scripting', u'debugging', u'skills',
u'skills', u'knowledge', u'disciplines', u'animation', u'networking', u'expertise',
u'competencies', u'oral', u'skills', u'management', u'skills', u'proven', u'effectively',
u'teams', u'deadline', u'environment', u'bachelor', u'minimum', u'shipped', u'leadership',
u'teams', u'location', u'resumes', u'jobs', u'candidates', u'openings', u'jobs'],
[u'maryland', u'client', u'producers', u'electricity', u'operates', u'storage', u'utility',
u'retail', u'customers', u'engineering', u'consultant', u'maryland', u'summary', u'technical',
u'technology', u'departments', u'expertise', u'maximizing', u'output', u'reduces', u'operating',
u'participates', u'areas', u'engineering', u'conducts', u'testing', u'solve', u'supports',
u'environmental', u'understands', u'objectives', u'operates', u'responsibilities', u'handles',
u'complex', u'engineering', u'aspects', u'monitors', u'quality', u'proficiency', u'optimization',
u'recommendations', u'supports', u'personnel', u'troubleshooting', u'commissioning', u'startup',
u'shutdown', u'supports', u'procedure', u'operating', u'units', u'develops', u'simulations',
u'troubleshooting', u'tests', u'enhancing', u'solving', u'develops', u'estimates', u'schedules',
u'scopes', u'understands', u'technical', u'management', u'utilize', u'routine', u'conducts',
u'hazards', u'utilizing', u'hazard', u'operability', u'methodologies', u'participates', u'startup',
u'reviews', u'pssr', u'participate', u'teams', u'participate', u'regulatory', u'audits', u'define',
u'scopes', u'budgets', u'schedules', u'technical', u'management', u'environmental', u'awareness',
u'interfacing', u'personnel', u'interacts', u'regulatory', u'departments', u'input', u'objectives',
u'identifying', u'introducing', u'concepts', u'solutions', u'peers', u'customers', u'coworkers',
u'knowledge', u'skills', u'engineering', u'quality', u'engineering'],
[u'commissioning', u'startup', u'knowledge', u'simulators', u'technologies', u'knowledge',
u'engineering', u'techniques', u'disciplines', u'leadership', u'skills', u'proven',
u'engineers', u'oral', u'skills', u'technical', u'skills', u'analytically', u'solve',
u'complex', u'interpret', u'proficiency', u'simulation', u'knowledge', u'applications',
u'manipulate', u'applications', u'engineering'],
[u'calculations', u'programs', u'matlab', u'excel', u'independently', u'environment',
u'proven', u'skills', u'effectively', u'multiple', u'tasks', u'planning', u'organizational',
u'management', u'skills', u'rigzone', u'jobs', u'developer', u'exceptional', u'strategies',
u'junction', u'exceptional', u'strategies', u'solutions', u'solutions', u'biggest',
u'insurers', u'operates', u'investment'],
[u'vegas', u'tasks', u'electrical', u'contracting', u'expertise', u'virtually', u'electrical',
u'developments', u'institutional', u'utilities', u'technical', u'experts', u'relationships',
u'credibility', u'contractors', u'utility', u'customers', u'customer', u'relationships',
u'consistently', u'innovations', u'profile', u'construct', u'envision', u'dynamic', u'complex',
u'electrical', u'management', u'grad', u'internship', u'electrical', u'engineering',
u'infrastructures', u'engineers', u'documented', u'management', u'engineering',
u'quality', u'engineering', u'electrical', u'engineers', u'complex', u'distribution',
u'grounding', u'estimation', u'testing', u'procedures', u'voltage', u'engineering'],
[u'troubleshooting', u'installation', u'documentation', u'bsee', u'certification',
u'electrical', u'voltage', u'cabling', u'electrical', u'engineering', u'candidates',
u'electrical', u'internships', u'oral', u'skills', u'organizational', u'prioritization',
u'skills', u'skills', u'excel', u'cadd', u'calculation', u'autocad', u'mathcad',
u'skills', u'skills', u'customer', u'relationships', u'solving', u'ethic', u'motivation',
u'tasks', u'budget', u'affirmative', u'diversity', u'workforce', u'gender', u'orientation',
u'disability', u'disabled', u'veteran', u'vietnam', u'veteran', u'qualifying', u'veteran',
u'diverse', u'candidates', u'respond', u'developing', u'workplace', u'reflects', u'diversity',
u'communities', u'reviews', u'electrical', u'contracting', u'southwest', u'electrical', u'contractors'],
[u'intern', u'electrical', u'engineering', u'idexx', u'laboratories', u'validating', u'idexx',
u'integrated', u'hardware', u'entails', u'planning', u'debug', u'validation', u'engineers',
u'validation', u'methodologies', u'healthcare', u'platforms', u'brightest', u'solve',
u'challenges', u'innovation', u'technology', u'idexx', u'intern', u'idexx', u'interns',
u'supplement', u'interns', u'teams', u'roles', u'competitive', u'interns', u'idexx',
u'interns', u'participate', u'internships', u'mentors', u'seminars', u'topics', u'leadership',
u'workshops', u'relevant', u'planning', u'topics', u'intern', u'presentations', u'mixers',
u'applicants', u'ineligible', u'laboratory', u'compliant', u'idexx', u'laboratories', u'healthcare',
u'innovation', u'practicing', u'veterinarians', u'diagnostic', u'technology', u'idexx', u'enhance',
u'veterinarians', u'efficiency', u'economically', u'idexx', u'worldwide', u'diagnostic', u'tests',
u'tests', u'quality', u'headquartered', u'idexx', u'laboratories', u'employs', u'customers',
u'qualifications', u'applicants', u'idexx', u'interns', u'potential', u'demonstrated', u'portfolio',
u'recommendation', u'resumes', u'marketing', u'location', u'americas', u'verification', u'validation',
u'schedule', u'overtime', u'idexx', u'laboratories', u'reviews', u'idexx', u'laboratories',
u'nasdaq', u'healthcare', u'innovation', u'practicing', u'veterinarians'],
[u'location', u'duration', u'temp', u'verification', u'validation', u'tester', u'verification',
u'validation', u'middleware', u'specifically', u'testing', u'applications', u'clinical',
u'laboratory', u'regulated', u'environment', u'responsibilities', u'complex', u'hardware',
u'testing', u'clinical', u'analyzers', u'laboratory', u'graphical', u'interfaces', u'complex',
u'sample', u'sequencing', u'protocols', u'developers', u'correction', u'tracking',
u'tool', u'timely', u'troubleshoot', u'testing', u'functional', u'manual',
u'automated', u'participate', u'ongoing'],
[u'testing', u'coverage', u'planning', u'documentation', u'testing', u'validation',
u'corrections', u'monitor', u'implementation', u'recurrence', u'operating', u'statistical',
u'quality', u'testing', u'global', u'multi', u'teams', u'travel', u'skills', u'concepts',
u'waterfall', u'agile', u'methodologies', u'debugging', u'skills', u'complex', u'automated',
u'instrumentation', u'environment', u'hardware', u'mechanical', u'components', u'tracking',
u'lifecycle', u'management', u'quality', u'organize', u'define', u'priorities', u'organize',
u'supervision', u'aggressive', u'deadlines', u'ambiguity', u'analyze', u'complex', u'situations',
u'concepts', u'technologies', u'verbal', u'skills', u'effectively', u'technical', u'clinical',
u'diverse', u'strategy', u'clinical', u'chemistry', u'analyzer', u'laboratory', u'middleware',
u'basic', u'automated', u'testing', u'biomedical', u'engineering', u'technologists',
u'laboratory', u'technology', u'availability', u'click', u'attach'],
[u'scientist', u'linux', u'asrc', u'scientist', u'linux', u'asrc', u'technology',
u'solutions', u'subsidiary', u'asrc', u'engineering', u'technology', u'contracts'],
[u'multiple', u'agencies', u'scientists', u'engineers', u'management', u'personnel',
u'allows', u'solutions', u'complex', u'aeronautics', u'aviation', u'management', u'aviation',
u'engineering', u'hughes', u'technical', u'technical', u'aviation', u'evaluation',
u'engineering', u'management', u'technical', u'terminal', u'surveillance', u'programs',
u'currently', u'scientist', u'travel', u'responsibilities', u'develops', u'technology',
u'modifies', u'technical', u'complex', u'reviews', u'draft', u'conformity', u'completeness',
u'testing', u'interface', u'hardware', u'regression', u'impact', u'reliability',
u'maintainability', u'factors', u'standardization', u'skills', u'travel', u'programming',
u'linux', u'environment', u'cisco', u'knowledge', u'terminal', u'environment', u'clearance',
u'clearance', u'input', u'output', u'digital', u'automatic', u'terminal', u'management',
u'controller', u'termination', u'testing', u'evaluating', u'policies', u'procedure', u'interface',
u'installation', u'verification', u'certification', u'core', u'avionic', u'programs', u'knowledge',
u'procedural', u'testing', u'interfacing', u'hardware', u'regression', u'impact',
u'reliability', u'maintainability', u'factors', u'standardization', u'missions', u'asrc', u'subsidiaries',
u'affirmative', u'employers', u'applicants', u'disability', u'veteran', u'technology', u'location',
u'airport', u'bachelor', u'schedule', u'travel', u'contributor', u'management', u'asrc', u'reviews'],
[u'technical', u'solarcity', u'niche', u'vegas', u'overview', u'resolving', u'customer',
u'clients', u'expanding', u'engineers', u'developers', u'responsibilities', u'knowledge',
u'planning', u'adapt', u'dynamic', u'environment', u'inventive', u'creative', u'solarcity',
u'lifecycle', u'responsibilities', u'technical', u'analyzing', u'diagnosing', u'troubleshooting',
u'customers', u'ticketing', u'console', u'escalate', u'knowledge', u'engineering', u'timely',
u'basic', u'phone', u'functionality', u'customer', u'tracking', u'knowledgebase', u'rotation',
u'configure', u'deployment', u'sccm', u'technical', u'deployment', u'deploy', u'hardware',
u'solarcity', u'bachelor', u'knowledge', u'dell', u'laptops', u'analytical', u'troubleshooting',
u'solving', u'skills', u'knowledge', u'databases', u'preferably', u'server', u'preferably',
u'monitoring', u'suites', u'documentation', u'procedures', u'knowledge', u'entries', u'verbal',
u'skills', u'customer', u'skills', u'competitive', u'solar', u'package', u'insurance', u'vacation',
u'savings', u'referral', u'eligibility', u'equity', u'performers', u'solarcity', u'affirmative',
u'diversity', u'workplace', u'applicants', u'orientation', u'disability', u'veteran', u'careerrookie'],
[u'embedded', u'exelis', u'junction', u'exelis', u'embedded', u'acquisition', u'networking',
u'capabilities', u'classified', u'customer', u'motivated', u'develops', u'tests',
u'innovative', u'solutions', u'minimal', u'supervision', u'paced', u'environment', u'enjoys',
u'assignments', u'interact', u'multi', u'disciplined', u'challenging', u'focused', u'embedded',
u'developments', u'spanning', u'engineering', u'lifecycle', u'specification', u'enhancement',
u'applications', u'embedded', u'freescale', u'applications', u'android', u'platforms',
u'interface', u'customers', u'developers', u'refine', u'specifications', u'architectures'],
[u'java', u'programming', u'scripts', u'python', u'debug', u'debugging', u'emulators',
u'regression', u'revisions', u'specialized', u'setups', u'capabilities', u'subversion',
u'technical', u'documentation', u'multiple', u'engineering', u'techexpousa', u'reviews'],
[u'modeler', u'semantic', u'modeling', u'models', u'skills', u'ontology', u'resource',
u'framework', u'schema', u'technologies', u'hadoop', u'warehouse', u'oracle', u'relational',
u'artifacts', u'models', u'dictionaries', u'models', u'interface', u'specifications',
u'documentation', u'harmonization', u'mappings', u'aligned', u'coordinate', u'technical',
u'peer', u'reviews', u'stakeholder', u'communities', u'impact', u'domains', u'relationships',
u'interdependencies', u'models', u'define', u'analyze', u'legacy', u'models', u'corporate',
u'databases', u'architectural', u'alignment', u'customer', u'expertise', u'harmonization',
u'modeling', u'modeling', u'consulting', u'stakeholders', u'quality', u'models', u'storage',
u'agile', u'specifically', u'focus', u'modeling', u'qualifications', u'bachelors', u'accredited',
u'modeler', u'encompass', u'evaluation', u'skills', u'knowledge', u'modeling', u'techniques',
u'resource', u'framework', u'schema', u'technologies', u'unified', u'modeling', u'technologies',
u'schemas', u'ontologies', u'sybase', u'knowledge', u'skills', u'interpersonal', u'skills',
u'customers', u'clearance', u'applicants', u'eligibility', u'classified', u'clearance',
u'polygraph', u'techexpousa', u'solutions', u'partnership', u'solutions', u'integration'],
[u'technologies', u'junction', u'develops', u'maintains', u'enhances', u'complex', u'diverse',
u'intensive', u'analytics', u'algorithm', u'manipulation', u'management', u'documented',
u'individually', u'reviews', u'tests', u'components', u'adherence', u'resolves', u'utilizes',
u'methodologies', u'environment', u'input', u'components', u'hardware', u'offs', u'reuse', u'cots',
u'gots', u'synthesis', u'components', u'tasks', u'individually', u'analyzes', u'modifies',
u'debugs', u'corrects', u'integrates', u'operating', u'environments', u'develops', u'queries',
u'databases', u'repositories', u'recommendations', u'improving', u'documentation', u'develops',
u'implements', u'algorithms', u'functional', u'assists', u'developing', u'executing', u'procedures',
u'components', u'reviews', u'documentation', u'solutions', u'analyzing', u'conferring',
u'users', u'engineers', u'analyzing', u'investigating', u'areas', u'adapt', u'hardware',
u'mathematical', u'models', u'predict', u'outcome', u'implement', u'complex', u'database',
u'repository', u'interfaces', u'queries', u'bachelors', u'accredited', u'substituted',
u'bachelors', u'firewalls', u'ipsec', u'vpns', u'technology', u'administering', u'servers',
u'apache', u'jboss', u'tomcat', u'developing', u'interfaces', u'firefox', u'internet',
u'explorer', u'operating', u'mainframe', u'linux', u'solaris', u'virtual', u'scripting',
u'programming', u'oriented', u'programming', u'ajax', u'script', u'procedures', u'cobol',
u'cognos', u'fusion', u'focus', u'html', u'java', u'java', u'script', u'jquery', u'perl',
u'visual', u'basic', u'powershell', u'cots', u'cots', u'oracle', u'apex', u'integration',
u'competitive', u'package', u'bonus', u'corporate', u'equity', u'tuition', u'reimbursement',
u'referral', u'bonus', u'holidays', u'insurance', u'flexible', u'disability', u'insurance'],
[u'technologies', u'disability', u'accommodation', u'recruiter', u'techexpousa'],
['bank', 'river', 'shore', 'water'],
['river', 'water', 'flow', 'fast', 'tree'],
['bank', 'water', 'fall', 'flow'],
['bank', 'bank', 'water', 'rain', 'river'],
['river', 'water', 'mud', 'tree'],
['money', 'transaction', 'bank', 'finance'],
['bank', 'borrow', 'money'],
['bank', 'finance'],
['finance', 'money', 'sell', 'bank'],
['borrow', 'sell'],
['bank', 'loan', 'sell']
]
# initializing using own LDA sufficient statistics so that we get same results each time.
sstats = np.loadtxt(datapath('DTM/sstats_test.txt'))
dictionary = Dictionary(texts)
corpus = [dictionary.doc2bow(text) for text in texts]
self.ldaseq = ldaseqmodel.LdaSeqModel(
corpus=corpus, id2word=dictionary, num_topics=2,
time_slice=[10, 10, 11], initialize='own', sstats=sstats,
passes=2, lda_inference_max_iter=10, em_min_iter=1, em_max_iter=4
)
# testing topic word proportions
def testTopicWord(self):
topics = self.ldaseq.print_topics(0)
expected_topic_word = [('skills', 0.035999999999999997)]
self.assertEqual(topics[0][0][0], expected_topic_word[0][0])
self.assertAlmostEqual(topics[0][0][1], expected_topic_word[0][1], places=2)
# testing document-topic proportions
def testDocTopic(self):
doc_topic = self.ldaseq.doc_topics(0)
expected_doc_topic = 0.00066577896138482028
self.assertAlmostEqual(doc_topic[0], expected_doc_topic, places=2)
def testDtypeBackwardCompatibility(self):
ldaseq_3_0_1_fname = datapath('DTM/ldaseq_3_0_1_model')
test_doc = [(547, 1), (549, 1), (552, 1), (555, 1)]
expected_topics = [0.99751244, 0.00248756]
# save model to use in test
# self.ldaseq.save(ldaseq_3_0_1_fname)
# load a model saved using a 3.0.1 version of Gensim
model = ldaseqmodel.LdaSeqModel.load(ldaseq_3_0_1_fname)
# and test it on a predefined document
topics = model[test_doc]
self.assertTrue(np.allclose(expected_topics, topics))
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.DEBUG)
unittest.main()
|
import requests.exceptions
from homeassistant import config_entries, setup
from homeassistant.components.flume.const import DOMAIN
from homeassistant.const import (
CONF_CLIENT_ID,
CONF_CLIENT_SECRET,
CONF_PASSWORD,
CONF_USERNAME,
)
from tests.async_mock import MagicMock, patch
def _get_mocked_flume_device_list():
flume_device_list_mock = MagicMock()
type(flume_device_list_mock).device_list = ["mock"]
return flume_device_list_mock
async def test_form(hass):
"""Test we get the form and can setup from user input."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
mock_flume_device_list = _get_mocked_flume_device_list()
with patch(
"homeassistant.components.flume.config_flow.FlumeAuth",
return_value=True,
), patch(
"homeassistant.components.flume.config_flow.FlumeDeviceList",
return_value=mock_flume_device_list,
), patch(
"homeassistant.components.flume.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.flume.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_USERNAME: "test-username",
CONF_PASSWORD: "test-password",
CONF_CLIENT_ID: "client_id",
CONF_CLIENT_SECRET: "client_secret",
},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "test-username"
assert result2["data"] == {
CONF_USERNAME: "test-username",
CONF_PASSWORD: "test-password",
CONF_CLIENT_ID: "client_id",
CONF_CLIENT_SECRET: "client_secret",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_import(hass):
"""Test we can import the sensor platform config."""
await setup.async_setup_component(hass, "persistent_notification", {})
mock_flume_device_list = _get_mocked_flume_device_list()
with patch(
"homeassistant.components.flume.config_flow.FlumeAuth",
return_value=True,
), patch(
"homeassistant.components.flume.config_flow.FlumeDeviceList",
return_value=mock_flume_device_list,
), patch(
"homeassistant.components.flume.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.flume.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={
CONF_USERNAME: "test-username",
CONF_PASSWORD: "test-password",
CONF_CLIENT_ID: "client_id",
CONF_CLIENT_SECRET: "client_secret",
},
)
await hass.async_block_till_done()
assert result["type"] == "create_entry"
assert result["title"] == "test-username"
assert result["data"] == {
CONF_USERNAME: "test-username",
CONF_PASSWORD: "test-password",
CONF_CLIENT_ID: "client_id",
CONF_CLIENT_SECRET: "client_secret",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_invalid_auth(hass):
"""Test we handle invalid auth."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.flume.config_flow.FlumeAuth",
return_value=True,
), patch(
"homeassistant.components.flume.config_flow.FlumeDeviceList",
side_effect=Exception,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_USERNAME: "test-username",
CONF_PASSWORD: "test-password",
CONF_CLIENT_ID: "client_id",
CONF_CLIENT_SECRET: "client_secret",
},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "invalid_auth"}
async def test_form_cannot_connect(hass):
"""Test we handle cannot connect error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.flume.config_flow.FlumeAuth",
return_value=True,
), patch(
"homeassistant.components.flume.config_flow.FlumeDeviceList",
side_effect=requests.exceptions.ConnectionError(),
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_USERNAME: "test-username",
CONF_PASSWORD: "test-password",
CONF_CLIENT_ID: "client_id",
CONF_CLIENT_SECRET: "client_secret",
},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "cannot_connect"}
|
import voluptuous as vol
from homeassistant.components import websocket_api
from homeassistant.components.websocket_api.decorators import (
async_response,
require_admin,
)
from homeassistant.core import callback
from homeassistant.helpers.area_registry import async_get_registry
WS_TYPE_LIST = "config/area_registry/list"
SCHEMA_WS_LIST = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
{vol.Required("type"): WS_TYPE_LIST}
)
WS_TYPE_CREATE = "config/area_registry/create"
SCHEMA_WS_CREATE = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
{vol.Required("type"): WS_TYPE_CREATE, vol.Required("name"): str}
)
WS_TYPE_DELETE = "config/area_registry/delete"
SCHEMA_WS_DELETE = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
{vol.Required("type"): WS_TYPE_DELETE, vol.Required("area_id"): str}
)
WS_TYPE_UPDATE = "config/area_registry/update"
SCHEMA_WS_UPDATE = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
{
vol.Required("type"): WS_TYPE_UPDATE,
vol.Required("area_id"): str,
vol.Required("name"): str,
}
)
async def async_setup(hass):
"""Enable the Area Registry views."""
hass.components.websocket_api.async_register_command(
WS_TYPE_LIST, websocket_list_areas, SCHEMA_WS_LIST
)
hass.components.websocket_api.async_register_command(
WS_TYPE_CREATE, websocket_create_area, SCHEMA_WS_CREATE
)
hass.components.websocket_api.async_register_command(
WS_TYPE_DELETE, websocket_delete_area, SCHEMA_WS_DELETE
)
hass.components.websocket_api.async_register_command(
WS_TYPE_UPDATE, websocket_update_area, SCHEMA_WS_UPDATE
)
return True
@async_response
async def websocket_list_areas(hass, connection, msg):
"""Handle list areas command."""
registry = await async_get_registry(hass)
connection.send_message(
websocket_api.result_message(
msg["id"],
[
{"name": entry.name, "area_id": entry.id}
for entry in registry.async_list_areas()
],
)
)
@require_admin
@async_response
async def websocket_create_area(hass, connection, msg):
"""Create area command."""
registry = await async_get_registry(hass)
try:
entry = registry.async_create(msg["name"])
except ValueError as err:
connection.send_message(
websocket_api.error_message(msg["id"], "invalid_info", str(err))
)
else:
connection.send_message(
websocket_api.result_message(msg["id"], _entry_dict(entry))
)
@require_admin
@async_response
async def websocket_delete_area(hass, connection, msg):
"""Delete area command."""
registry = await async_get_registry(hass)
try:
await registry.async_delete(msg["area_id"])
except KeyError:
connection.send_message(
websocket_api.error_message(
msg["id"], "invalid_info", "Area ID doesn't exist"
)
)
else:
connection.send_message(websocket_api.result_message(msg["id"], "success"))
@require_admin
@async_response
async def websocket_update_area(hass, connection, msg):
"""Handle update area websocket command."""
registry = await async_get_registry(hass)
try:
entry = registry.async_update(msg["area_id"], msg["name"])
except ValueError as err:
connection.send_message(
websocket_api.error_message(msg["id"], "invalid_info", str(err))
)
else:
connection.send_message(
websocket_api.result_message(msg["id"], _entry_dict(entry))
)
@callback
def _entry_dict(entry):
"""Convert entry to API format."""
return {"area_id": entry.id, "name": entry.name}
|
from datetime import timedelta
from unittest import mock
from requests.exceptions import HTTPError
from homeassistant.components.binary_sensor import DOMAIN
from homeassistant.components.fritzbox.const import DOMAIN as FB_DOMAIN
from homeassistant.const import (
ATTR_DEVICE_CLASS,
ATTR_FRIENDLY_NAME,
STATE_OFF,
STATE_ON,
)
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from . import MOCK_CONFIG, FritzDeviceBinarySensorMock
from tests.async_mock import Mock
from tests.common import async_fire_time_changed
ENTITY_ID = f"{DOMAIN}.fake_name"
async def setup_fritzbox(hass: HomeAssistantType, config: dict):
"""Set up mock AVM Fritz!Box."""
assert await async_setup_component(hass, FB_DOMAIN, config)
await hass.async_block_till_done()
async def test_setup(hass: HomeAssistantType, fritz: Mock):
"""Test setup of platform."""
device = FritzDeviceBinarySensorMock()
fritz().get_devices.return_value = [device]
await setup_fritzbox(hass, MOCK_CONFIG)
state = hass.states.get(ENTITY_ID)
assert state
assert state.state == STATE_ON
assert state.attributes[ATTR_FRIENDLY_NAME] == "fake_name"
assert state.attributes[ATTR_DEVICE_CLASS] == "window"
async def test_is_off(hass: HomeAssistantType, fritz: Mock):
"""Test state of platform."""
device = FritzDeviceBinarySensorMock()
device.present = False
fritz().get_devices.return_value = [device]
await setup_fritzbox(hass, MOCK_CONFIG)
state = hass.states.get(ENTITY_ID)
assert state
assert state.state == STATE_OFF
async def test_update(hass: HomeAssistantType, fritz: Mock):
"""Test update with error."""
device = FritzDeviceBinarySensorMock()
fritz().get_devices.return_value = [device]
await setup_fritzbox(hass, MOCK_CONFIG)
assert device.update.call_count == 1
assert fritz().login.call_count == 1
next_update = dt_util.utcnow() + timedelta(seconds=200)
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
assert device.update.call_count == 2
assert fritz().login.call_count == 1
async def test_update_error(hass: HomeAssistantType, fritz: Mock):
"""Test update with error."""
device = FritzDeviceBinarySensorMock()
device.update.side_effect = [mock.DEFAULT, HTTPError("Boom")]
fritz().get_devices.return_value = [device]
await setup_fritzbox(hass, MOCK_CONFIG)
assert device.update.call_count == 1
assert fritz().login.call_count == 1
next_update = dt_util.utcnow() + timedelta(seconds=200)
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
assert device.update.call_count == 2
assert fritz().login.call_count == 2
|
import unittest
import numpy as np
import numpy.testing as np_test
from scipy.special import beta
from scipy.stats import multivariate_normal
from pgmpy.factors.distributions import CustomDistribution
class TestCustomDistribution(unittest.TestCase):
def pdf1(self, x, y):
return np.power(x, 1) * np.power(y, 2) / beta(x, y)
def pdf2(self, *args):
return multivariate_normal.pdf(args, [0, 0], [[1, 0], [0, 1]])
def pdf3(self, x, y, z):
return z * (np.power(x, 1) * np.power(y, 2)) / beta(x, y)
def test_class_init(self):
phi1 = CustomDistribution(["x", "y"], self.pdf1)
self.assertEqual(phi1.variables, ["x", "y"])
self.assertEqual(phi1._pdf, self.pdf1)
phi2 = CustomDistribution(["x1", "x2"], self.pdf2)
self.assertEqual(phi2.variables, ["x1", "x2"])
self.assertEqual(phi2._pdf, self.pdf2)
phi3 = CustomDistribution(["x", "y", "z"], self.pdf3)
self.assertEqual(phi3.variables, ["x", "y", "z"])
self.assertEqual(phi3._pdf, self.pdf3)
def test_class_init_typeerror(self):
self.assertRaises(TypeError, CustomDistribution, "x y", self.pdf1)
self.assertRaises(TypeError, CustomDistribution, "x", self.pdf1)
self.assertRaises(TypeError, CustomDistribution, "x1 x2", self.pdf2)
self.assertRaises(TypeError, CustomDistribution, "x1", self.pdf1)
self.assertRaises(TypeError, CustomDistribution, "x y z", self.pdf3)
self.assertRaises(TypeError, CustomDistribution, "x", self.pdf3)
self.assertRaises(TypeError, CustomDistribution, set(["x", "y"]), self.pdf1)
self.assertRaises(TypeError, CustomDistribution, {"x": 1, "y": 2}, self.pdf1)
self.assertRaises(TypeError, CustomDistribution, set(["x1", "x2"]), self.pdf2)
self.assertRaises(TypeError, CustomDistribution, {"x1": 1, "x2": 2}, self.pdf1)
self.assertRaises(
TypeError, CustomDistribution, set(["x", "y", "z"]), self.pdf3
)
self.assertRaises(
TypeError, CustomDistribution, {"x": 1, "y": 2, "z": 3}, self.pdf3
)
def test_class_init_valueerror(self):
self.assertRaises(ValueError, CustomDistribution, ["x", "x"], self.pdf1)
self.assertRaises(ValueError, CustomDistribution, ["x", "y", "y"], self.pdf1)
self.assertRaises(ValueError, CustomDistribution, ["x1", "x1"], self.pdf2)
self.assertRaises(ValueError, CustomDistribution, ["x1", "x2", "x2"], self.pdf2)
self.assertRaises(ValueError, CustomDistribution, ["x", "x"], self.pdf1)
self.assertRaises(
ValueError, CustomDistribution, ["x", "y", "y", "z", "z"], self.pdf1
)
class TestCustomDistributionMethods(unittest.TestCase):
def pdf1(self, x, y):
return np.power(x, 1) * np.power(y, 2) / beta(x, y)
def pdf2(self, x1, x2):
return multivariate_normal.pdf([x1, x2], [0, 0], [[1, 0], [0, 1]])
def pdf3(self, x, y, z):
return z * (np.power(x, 1) * np.power(y, 2)) / beta(x, y)
def pdf4(self, x1, x2, x3):
return multivariate_normal.pdf(
[x1, x2, x3], [0, 0, 0], [[1, 0, 0], [0, 1, 0], [0, 0, 1]]
)
def setUp(self):
self.phi1 = CustomDistribution(["x", "y"], self.pdf1)
self.phi2 = CustomDistribution(["x1", "x2"], self.pdf2)
self.phi3 = CustomDistribution(["x", "y", "z"], self.pdf3)
self.phi4 = CustomDistribution(["x1", "x2", "x3"], self.pdf4)
def test_variables(self):
self.assertEqual(self.phi1.variables, self.phi1._variables)
self.assertEqual(self.phi2.variables, self.phi2._variables)
self.assertEqual(self.phi3.variables, self.phi3._variables)
def test_assignment(self):
self.assertEqual(self.phi1.assignment(1.212, 2), self.pdf1(1.212, 2))
self.assertEqual(self.phi2.assignment(1, -2.231), self.pdf2(1, -2.231))
self.assertEqual(
self.phi3.assignment(1.212, 2.213, -3), self.pdf3(1.212, 2.213, -3)
)
def test_reduce(self):
phi1 = self.phi1.copy()
phi1.reduce([("x", 1)])
def reduced_pdf1(y):
return (np.power(1, 1) * np.power(y, 2)) / beta(1, y)
self.assertEqual(phi1.variables, ["y"])
for inp in np.random.rand(4):
self.assertEqual(phi1._pdf(inp), reduced_pdf1(inp))
self.assertEqual(phi1._pdf(y=inp), reduced_pdf1(inp))
phi1 = self.phi1.reduce([("x", 1)], inplace=False)
self.assertEqual(phi1.variables, ["y"])
for inp in np.random.rand(4):
self.assertEqual(phi1._pdf(inp), reduced_pdf1(inp))
self.assertEqual(phi1._pdf(y=inp), reduced_pdf1(inp))
phi2 = self.phi2.copy()
phi2.reduce([("x2", 7.213)])
def reduced_pdf2(x1):
return multivariate_normal.pdf([x1, 7.213], [0, 0], [[1, 0], [0, 1]])
self.assertEqual(phi2.variables, ["x1"])
for inp in np.random.rand(4):
self.assertEqual(phi2._pdf(inp), reduced_pdf2(inp))
self.assertEqual(phi2._pdf(x1=inp), reduced_pdf2(inp))
phi2 = self.phi2.reduce([("x2", 7.213)], inplace=False)
self.assertEqual(phi2.variables, ["x1"])
for inp in np.random.rand(4):
self.assertEqual(phi2._pdf(inp), reduced_pdf2(inp))
self.assertEqual(phi2._pdf(x1=inp), reduced_pdf2(inp))
phi3 = self.phi3.copy()
phi3.reduce([("y", 0.112), ("z", 23)])
def reduced_pdf4(x):
return 23 * (np.power(x, 1) * np.power(0.112, 2)) / beta(x, 0.112)
self.assertEqual(phi3.variables, ["x"])
for inp in np.random.rand(4):
self.assertEqual(phi3._pdf(inp), reduced_pdf4(inp))
self.assertEqual(phi3._pdf(x=inp), reduced_pdf4(inp))
phi3 = self.phi3.copy()
phi3.reduce([("y", 0.112)])
def reduced_pdf3(x, z):
return z * (np.power(x, 1) * np.power(0.112, 2)) / beta(x, 0.112)
self.assertEqual(phi3.variables, ["x", "z"])
for inp in np.random.rand(4, 2):
self.assertEqual(phi3._pdf(inp[0], inp[1]), reduced_pdf3(inp[0], inp[1]))
self.assertEqual(
phi3._pdf(x=inp[0], z=inp[1]), reduced_pdf3(inp[0], inp[1])
)
phi3 = self.phi3.reduce([("y", 0.112)], inplace=False)
self.assertEqual(phi3.variables, ["x", "z"])
for inp in np.random.rand(4, 2):
self.assertEqual(phi3._pdf(inp[0], inp[1]), reduced_pdf3(inp[0], inp[1]))
self.assertEqual(
phi3._pdf(x=inp[0], z=inp[1]), reduced_pdf3(inp[0], inp[1])
)
self.assertEqual(phi3._pdf(inp[0], z=inp[1]), reduced_pdf3(inp[0], inp[1]))
phi3 = self.phi3.reduce([("y", 0.112), ("z", 23)], inplace=False)
self.assertEqual(phi3.variables, ["x"])
for inp in np.random.rand(4):
self.assertEqual(phi3._pdf(inp), reduced_pdf4(inp))
self.assertEqual(phi3._pdf(x=inp), reduced_pdf4(inp))
def test_reduce_error(self):
self.assertRaises(TypeError, self.phi1.reduce, "x1")
self.assertRaises(TypeError, self.phi1.reduce, set(["x", "y"]))
self.assertRaises(TypeError, self.phi1.reduce, {"x": 1, "y": 1})
self.assertRaises(TypeError, self.phi4.reduce, "x4")
self.assertRaises(TypeError, self.phi4.reduce, set(["x1", "x2", "x3"]))
self.assertRaises(TypeError, self.phi4.reduce, {"x1": 1, "x2": 1, "x3": 1})
self.assertRaises(ValueError, self.phi1.reduce, [("z", 3)])
self.assertRaises(ValueError, self.phi1.reduce, [("x", 0), ("y", 1), ("z", 4)])
self.assertRaises(ValueError, self.phi4.reduce, [("x4", 7)])
self.assertRaises(
ValueError, self.phi4.reduce, [("x1", 1), ("x2", 2), ("x3", 3), ("x4", 4)]
)
def test_marginalize(self):
phi2 = self.phi2.copy()
phi2.marginalize(["x2"])
self.assertEqual(phi2.variables, ["x1"])
for inp in np.random.rand(4):
np_test.assert_almost_equal(
phi2._pdf(inp), multivariate_normal.pdf([inp], [0], [[1]])
)
phi2 = self.phi2.marginalize(["x2"], inplace=False)
self.assertEqual(phi2.variables, ["x1"])
for inp in np.random.rand(4):
np_test.assert_almost_equal(
phi2._pdf(inp), multivariate_normal.pdf([inp], [0], [[1]])
)
phi4 = self.phi4.copy()
phi4.marginalize(["x2"])
self.assertEqual(phi4.variables, ["x1", "x3"])
for inp in np.random.rand(4, 2):
np_test.assert_almost_equal(
phi4._pdf(inp[0], inp[1]),
multivariate_normal.pdf([inp[0], inp[1]], [0, 0], [[1, 0], [0, 1]]),
)
phi4.marginalize(["x3"])
self.assertEqual(phi4.variables, ["x1"])
for inp in np.random.rand(1):
np_test.assert_almost_equal(
phi4._pdf(inp), multivariate_normal.pdf([inp], [0], [[1]])
)
phi4 = self.phi4.marginalize(["x2"], inplace=False)
self.assertEqual(phi4.variables, ["x1", "x3"])
for inp in np.random.rand(4, 2):
np_test.assert_almost_equal(
phi4._pdf(inp[0], inp[1]),
multivariate_normal.pdf([inp[0], inp[1]], [0, 0], [[1, 0], [0, 1]]),
)
phi4 = phi4.marginalize(["x3"], inplace=False)
self.assertEqual(phi4.variables, ["x1"])
for inp in np.random.rand(1):
np_test.assert_almost_equal(
phi4._pdf(inp), multivariate_normal.pdf([inp], [0], [[1]])
)
def test_marginalize_error(self):
self.assertRaises(TypeError, self.phi1.marginalize, "x1")
self.assertRaises(TypeError, self.phi1.marginalize, set(["x", "y"]))
self.assertRaises(TypeError, self.phi1.marginalize, {"x": 1, "y": 1})
self.assertRaises(TypeError, self.phi4.marginalize, "x4")
self.assertRaises(TypeError, self.phi4.marginalize, set(["x1", "x2", "x3"]))
self.assertRaises(TypeError, self.phi4.marginalize, {"x1": 1, "x2": 1, "x3": 1})
self.assertRaises(ValueError, self.phi1.marginalize, ["z"])
self.assertRaises(ValueError, self.phi1.marginalize, ["x", "y", "z"])
self.assertRaises(ValueError, self.phi4.marginalize, ["x4"])
self.assertRaises(ValueError, self.phi4.marginalize, ["x1", "x2", "x3", "x4"])
def test_normalize(self):
def pdf2(x1, x2):
return 2 * self.pdf2(x1, x2)
phi2 = CustomDistribution(["x1", "x2"], pdf2)
phi4 = phi2.copy()
phi4.normalize()
self.assertEqual(phi4.variables, phi2.variables)
for inp in np.random.rand(1, 2):
np_test.assert_almost_equal(
phi4._pdf(inp[0], inp[1]), self.pdf2(inp[0], inp[1])
)
phi4 = phi2.normalize(inplace=False)
self.assertEqual(phi4.variables, phi4.variables)
for inp in np.random.rand(1, 2):
np_test.assert_almost_equal(
phi4._pdf(inp[0], inp[1]), self.pdf2(inp[0], inp[1])
)
def test_operate(self):
phi1 = self.phi1.copy()
phi1._operate(self.phi2, "product")
self.assertEqual(phi1.variables, ["x", "y", "x1", "x2"])
for inp in np.random.rand(4, 4):
self.assertEqual(
phi1._pdf(*inp),
self.phi1._pdf(inp[0], inp[1]) * self.phi2._pdf(inp[2], inp[3]),
)
phi1 = self.phi1._operate(self.phi2, "product", inplace=False)
self.assertEqual(phi1.variables, ["x", "y", "x1", "x2"])
for inp in np.random.rand(4, 4):
self.assertEqual(
phi1._pdf(*inp),
self.phi1._pdf(inp[0], inp[1]) * self.phi2._pdf(inp[2], inp[3]),
)
phi1 = self.phi1 * self.phi2
self.assertEqual(phi1.variables, ["x", "y", "x1", "x2"])
for inp in np.random.rand(4, 4):
self.assertEqual(
phi1._pdf(*inp),
self.phi1._pdf(inp[0], inp[1]) * self.phi2._pdf(inp[2], inp[3]),
)
phi3 = self.phi3.copy()
phi3._operate(self.phi1, "product")
self.assertEqual(phi3.variables, ["x", "y", "z"])
for inp in np.random.rand(4, 3):
self.assertEqual(
phi3._pdf(*inp), self.phi3._pdf(*inp) * self.phi1._pdf(inp[0], inp[1])
)
phi3 = self.phi3._operate(self.phi1, "product", inplace=False)
self.assertEqual(phi3.variables, ["x", "y", "z"])
for inp in np.random.rand(4, 3):
self.assertEqual(
phi3._pdf(*inp), self.phi3._pdf(*inp) * self.phi1._pdf(inp[0], inp[1])
)
phi3 = self.phi3 * self.phi1
self.assertEqual(phi3.variables, ["x", "y", "z"])
for inp in np.random.rand(4, 3):
self.assertEqual(
phi3._pdf(*inp), self.phi3._pdf(*inp) * self.phi1._pdf(inp[0], inp[1])
)
phi3 = self.phi3.copy()
phi3._operate(self.phi1, "divide")
self.assertEqual(phi3.variables, ["x", "y", "z"])
for inp in np.random.rand(4, 3):
self.assertEqual(
phi3._pdf(*inp), self.phi3._pdf(*inp) / self.phi1._pdf(inp[0], inp[1])
)
phi3 = self.phi3._operate(self.phi1, "divide", inplace=False)
self.assertEqual(phi3.variables, ["x", "y", "z"])
for inp in np.random.rand(4, 3):
self.assertEqual(
phi3._pdf(*inp), self.phi3._pdf(*inp) / self.phi1._pdf(inp[0], inp[1])
)
phi3 = self.phi3 / self.phi1
self.assertEqual(phi3.variables, ["x", "y", "z"])
for inp in np.random.rand(4, 3):
self.assertEqual(
phi3._pdf(*inp), self.phi3._pdf(*inp) / self.phi1._pdf(inp[0], inp[1])
)
phi4 = self.phi4.copy()
phi4._operate(self.phi2, "product")
self.assertEqual(phi4.variables, ["x1", "x2", "x3"])
for inp in np.random.rand(4, 3):
self.assertEqual(
phi4._pdf(*inp), self.phi4._pdf(*inp) * self.phi2._pdf(inp[0], inp[1])
)
phi4 = self.phi4._operate(self.phi2, "product", inplace=False)
self.assertEqual(phi4.variables, ["x1", "x2", "x3"])
for inp in np.random.rand(4, 3):
self.assertEqual(
phi4._pdf(*inp), self.phi4._pdf(*inp) * self.phi2._pdf(inp[0], inp[1])
)
phi4 = self.phi4 * self.phi2
self.assertEqual(phi4.variables, ["x1", "x2", "x3"])
for inp in np.random.rand(4, 3):
self.assertEqual(
phi4._pdf(*inp), self.phi4._pdf(*inp) * self.phi2._pdf(inp[0], inp[1])
)
phi4 = self.phi4.copy()
phi4._operate(self.phi2, "divide")
self.assertEqual(phi4.variables, ["x1", "x2", "x3"])
for inp in np.random.rand(4, 3):
self.assertEqual(
phi4._pdf(*inp), self.phi4._pdf(*inp) / self.phi2._pdf(inp[0], inp[1])
)
phi4 = self.phi4._operate(self.phi2, "divide", inplace=False)
self.assertEqual(phi4.variables, ["x1", "x2", "x3"])
for inp in np.random.rand(4, 3):
self.assertEqual(
phi4._pdf(*inp), self.phi4._pdf(*inp) / self.phi2._pdf(inp[0], inp[1])
)
phi4 = self.phi4 / self.phi2
self.assertEqual(phi4.variables, ["x1", "x2", "x3"])
for inp in np.random.rand(4, 3):
self.assertEqual(
phi4._pdf(*inp), self.phi4._pdf(*inp) / self.phi2._pdf(inp[0], inp[1])
)
def test_operate_error(self):
self.assertRaises(TypeError, self.phi1._operate, 1, "product")
self.assertRaises(TypeError, self.phi1._operate, 1, "divide")
self.assertRaises(TypeError, self.phi1._operate, "1", "product")
self.assertRaises(TypeError, self.phi1._operate, "1", "divide")
self.assertRaises(TypeError, self.phi1._operate, self.phi2._pdf, "product")
self.assertRaises(TypeError, self.phi1._operate, self.phi2._pdf, "divide")
self.assertRaises(TypeError, self.phi1._operate, [1], "product")
self.assertRaises(TypeError, self.phi1._operate, [1], "divide")
self.assertRaises(TypeError, self.phi4._operate, 1, "product")
self.assertRaises(TypeError, self.phi4._operate, 1, "divide")
self.assertRaises(TypeError, self.phi4._operate, "1", "product")
self.assertRaises(TypeError, self.phi4._operate, "1", "divide")
self.assertRaises(TypeError, self.phi4._operate, self.phi2._pdf, "product")
self.assertRaises(TypeError, self.phi4._operate, self.phi2._pdf, "divide")
self.assertRaises(TypeError, self.phi4._operate, [1], "product")
self.assertRaises(TypeError, self.phi4._operate, [1], "divide")
self.assertRaises(TypeError, self.phi1._operate, 1, "product", False)
self.assertRaises(TypeError, self.phi1._operate, 1, "divide", False)
self.assertRaises(TypeError, self.phi1._operate, "1", "product", False)
self.assertRaises(TypeError, self.phi1._operate, "1", "divide", False)
self.assertRaises(
TypeError, self.phi1._operate, self.phi2._pdf, "product", False
)
self.assertRaises(
TypeError, self.phi1._operate, self.phi2._pdf, "divide", False
)
self.assertRaises(TypeError, self.phi1._operate, [1], "product", False)
self.assertRaises(TypeError, self.phi1._operate, [1], "divide", False)
self.assertRaises(TypeError, self.phi4._operate, 1, "product", False)
self.assertRaises(TypeError, self.phi4._operate, 1, "divide", False)
self.assertRaises(TypeError, self.phi4._operate, "1", "product", False)
self.assertRaises(TypeError, self.phi4._operate, "1", "divide", False)
self.assertRaises(
TypeError, self.phi4._operate, self.phi2._pdf, "product", False
)
self.assertRaises(
TypeError, self.phi4._operate, self.phi2._pdf, "divide", False
)
self.assertRaises(TypeError, self.phi4._operate, [1], "product", False)
self.assertRaises(TypeError, self.phi4._operate, [1], "divide", False)
self.assertRaises(ValueError, self.phi1.__truediv__, self.phi2)
self.assertRaises(ValueError, self.phi1.__truediv__, self.phi3)
self.assertRaises(ValueError, self.phi1.__truediv__, self.phi4)
self.assertRaises(ValueError, self.phi2.__truediv__, self.phi3)
self.assertRaises(ValueError, self.phi2.__truediv__, self.phi4)
def test_copy(self):
copy1 = self.phi1.copy()
copy2 = self.phi3.copy()
copy4 = copy1.copy()
copy5 = copy2.copy()
self.assertEqual(copy1.variables, copy4.variables)
self.assertEqual(copy1._pdf, copy4._pdf)
self.assertEqual(copy2.variables, copy5.variables)
self.assertEqual(copy2._pdf, copy5._pdf)
copy1.variables = ["A", "B"]
self.assertEqual(copy4.variables, self.phi1.variables)
def pdf(a, b):
return (a + b) / (a * a + b * b)
copy1._pdf = pdf
copy1_pdf = pdf
self.assertEqual(copy4._pdf, self.phi1._pdf)
copy4.variables = ["X", "Y"]
self.assertEqual(copy1.variables, ["A", "B"])
copy4._pdf = lambda a, b: a + b
for inp in np.random.rand(4, 2):
self.assertEqual(copy1._pdf(inp[0], inp[1]), copy1_pdf(inp[0], inp[1]))
copy2.reduce([("x", 7.7)])
def reduced_pdf(y, z):
return z * (np.power(7.7, 1) * np.power(y, 2)) / beta(7.7, y)
self.assertEqual(copy5.variables, self.phi3.variables)
self.assertEqual(copy5._pdf, self.phi3._pdf)
copy5.reduce([("x", 11), ("z", 13)])
self.assertEqual(copy2.variables, ["y", "z"])
for inp in np.random.rand(4, 2):
self.assertEqual(copy2._pdf(inp[0], inp[1]), reduced_pdf(inp[0], inp[1]))
def tearDown(self):
del self.phi1
del self.phi2
del self.phi3
|
import json
from datetime import date, datetime
from decimal import Decimal
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.utils import datetime_safe, timezone
from django.utils.dateparse import parse_duration
from django.utils.encoding import force_str, smart_str
class Widget:
"""
A Widget takes care of converting between import and export representations.
This is achieved by the two methods,
:meth:`~import_export.widgets.Widget.clean` and
:meth:`~import_export.widgets.Widget.render`.
"""
def clean(self, value, row=None, *args, **kwargs):
"""
Returns an appropriate Python object for an imported value.
For example, if you import a value from a spreadsheet,
:meth:`~import_export.widgets.Widget.clean` handles conversion
of this value into the corresponding Python object.
Numbers or dates can be *cleaned* to their respective data types and
don't have to be imported as Strings.
"""
return value
def render(self, value, obj=None):
"""
Returns an export representation of a Python value.
For example, if you have an object you want to export,
:meth:`~import_export.widgets.Widget.render` takes care of converting
the object's field to a value that can be written to a spreadsheet.
"""
return force_str(value)
class NumberWidget(Widget):
"""
"""
def is_empty(self, value):
if isinstance(value, str):
value = value.strip()
# 0 is not empty
return value is None or value == ""
def render(self, value, obj=None):
return value
class FloatWidget(NumberWidget):
"""
Widget for converting floats fields.
"""
def clean(self, value, row=None, *args, **kwargs):
if self.is_empty(value):
return None
return float(value)
class IntegerWidget(NumberWidget):
"""
Widget for converting integer fields.
"""
def clean(self, value, row=None, *args, **kwargs):
if self.is_empty(value):
return None
return int(float(value))
class DecimalWidget(NumberWidget):
"""
Widget for converting decimal fields.
"""
def clean(self, value, row=None, *args, **kwargs):
if self.is_empty(value):
return None
return Decimal(force_str(value))
class CharWidget(Widget):
"""
Widget for converting text fields.
"""
def render(self, value, obj=None):
return force_str(value)
class BooleanWidget(Widget):
"""
Widget for converting boolean fields.
The widget assumes that ``True``, ``False``, and ``None`` are all valid
values, as to match Django's `BooleanField
<https://docs.djangoproject.com/en/dev/ref/models/fields/#booleanfield>`_.
That said, whether the database/Django will actually accept NULL values
will depend on if you have set ``null=True`` on that Django field.
While the BooleanWidget is set up to accept as input common variations of
"True" and "False" (and "None"), you may need to munge less common values
to ``True``/``False``/``None``. Probably the easiest way to do this is to
override the :func:`~import_export.resources.Resource.before_import_row`
function of your Resource class. A short example::
from import_export import fields, resources, widgets
class BooleanExample(resources.ModelResource):
warn = fields.Field(widget=widgets.BooleanWidget())
def before_row_import(self, row, **kwargs):
if "warn" in row.keys():
# munge "warn" to "True"
if row["warn"] in ["warn", "WARN"]:
row["warn"] = True
return super().before_import_row(row, **kwargs)
"""
TRUE_VALUES = ["1", 1, True, "true", "TRUE", "True"]
FALSE_VALUES = ["0", 0, False, "false", "FALSE", "False"]
NULL_VALUES = ["", None, "null", "NULL", "none", "NONE", "None"]
def render(self, value, obj=None):
"""
On export, ``True`` is represented as ``1``, ``False`` as ``0``, and
``None``/NULL as a empty string.
Note that these values are also used on the import confirmation view.
"""
if value in self.NULL_VALUES:
return ""
return self.TRUE_VALUES[0] if value else self.FALSE_VALUES[0]
def clean(self, value, row=None, *args, **kwargs):
if value in self.NULL_VALUES:
return None
return True if value in self.TRUE_VALUES else False
class DateWidget(Widget):
"""
Widget for converting date fields.
Takes optional ``format`` parameter.
"""
def __init__(self, format=None):
if format is None:
if not settings.DATE_INPUT_FORMATS:
formats = ("%Y-%m-%d",)
else:
formats = settings.DATE_INPUT_FORMATS
else:
formats = (format,)
self.formats = formats
def clean(self, value, row=None, *args, **kwargs):
if not value:
return None
if isinstance(value, date):
return value
for format in self.formats:
try:
return datetime.strptime(value, format).date()
except (ValueError, TypeError):
continue
raise ValueError("Enter a valid date.")
def render(self, value, obj=None):
if not value:
return ""
try:
return value.strftime(self.formats[0])
except:
return datetime_safe.new_date(value).strftime(self.formats[0])
class DateTimeWidget(Widget):
"""
Widget for converting date fields.
Takes optional ``format`` parameter. If none is set, either
``settings.DATETIME_INPUT_FORMATS`` or ``"%Y-%m-%d %H:%M:%S"`` is used.
"""
def __init__(self, format=None):
if format is None:
if not settings.DATETIME_INPUT_FORMATS:
formats = ("%Y-%m-%d %H:%M:%S",)
else:
formats = settings.DATETIME_INPUT_FORMATS
else:
formats = (format,)
self.formats = formats
def clean(self, value, row=None, *args, **kwargs):
if not value:
return None
if isinstance(value, datetime):
return value
for format in self.formats:
try:
dt = datetime.strptime(value, format)
if settings.USE_TZ:
# make datetime timezone aware so we don't compare
# naive datetime to an aware one
dt = timezone.make_aware(dt,
timezone.get_default_timezone())
return dt
except (ValueError, TypeError):
continue
raise ValueError("Enter a valid date/time.")
def render(self, value, obj=None):
if not value:
return ""
if settings.USE_TZ:
value = timezone.localtime(value)
return value.strftime(self.formats[0])
class TimeWidget(Widget):
"""
Widget for converting time fields.
Takes optional ``format`` parameter.
"""
def __init__(self, format=None):
if format is None:
if not settings.TIME_INPUT_FORMATS:
formats = ("%H:%M:%S",)
else:
formats = settings.TIME_INPUT_FORMATS
else:
formats = (format,)
self.formats = formats
def clean(self, value, row=None, *args, **kwargs):
if not value:
return None
for format in self.formats:
try:
return datetime.strptime(value, format).time()
except (ValueError, TypeError):
continue
raise ValueError("Enter a valid time.")
def render(self, value, obj=None):
if not value:
return ""
return value.strftime(self.formats[0])
class DurationWidget(Widget):
"""
Widget for converting time duration fields.
"""
def clean(self, value, row=None, *args, **kwargs):
if not value:
return None
try:
return parse_duration(value)
except (ValueError, TypeError):
raise ValueError("Enter a valid duration.")
def render(self, value, obj=None):
if value is None:
return ""
return str(value)
class SimpleArrayWidget(Widget):
"""
Widget for an Array field. Can be used for Postgres' Array field.
:param separator: Defaults to ``','``
"""
def __init__(self, separator=None):
if separator is None:
separator = ','
self.separator = separator
super().__init__()
def clean(self, value, row=None, *args, **kwargs):
return value.split(self.separator) if value else []
def render(self, value, obj=None):
return self.separator.join(str(v) for v in value)
class JSONWidget(Widget):
"""
Widget for a JSON object (especially required for jsonb fields in PostgreSQL database.)
:param value: Defaults to JSON format.
The widget covers two cases: Proper JSON string with double quotes, else it
tries to use single quotes and then convert it to proper JSON.
"""
def clean(self, value, row=None, *args, **kwargs):
val = super().clean(value)
if val:
try:
return json.loads(val)
except json.decoder.JSONDecodeError:
return json.loads(val.replace("'", "\""))
def render(self, value, obj=None):
if value:
return json.dumps(value)
class ForeignKeyWidget(Widget):
"""
Widget for a ``ForeignKey`` field which looks up a related model using
"natural keys" in both export and import.
The lookup field defaults to using the primary key (``pk``) as lookup
criterion but can be customised to use any field on the related model.
Unlike specifying a related field in your resource like so…
::
class Meta:
fields = ('author__name',)
…using a :class:`~import_export.widgets.ForeignKeyWidget` has the
advantage that it can not only be used for exporting, but also importing
data with foreign key relationships.
Here's an example on how to use
:class:`~import_export.widgets.ForeignKeyWidget` to lookup related objects
using ``Author.name`` instead of ``Author.pk``::
from import_export import fields, resources
from import_export.widgets import ForeignKeyWidget
class BookResource(resources.ModelResource):
author = fields.Field(
column_name='author',
attribute='author',
widget=ForeignKeyWidget(Author, 'name'))
class Meta:
fields = ('author',)
:param model: The Model the ForeignKey refers to (required).
:param field: A field on the related model used for looking up a particular object.
"""
def __init__(self, model, field='pk', *args, **kwargs):
self.model = model
self.field = field
super().__init__(*args, **kwargs)
def get_queryset(self, value, row, *args, **kwargs):
"""
Returns a queryset of all objects for this Model.
Overwrite this method if you want to limit the pool of objects from
which the related object is retrieved.
:param value: The field's value in the datasource.
:param row: The datasource's current row.
As an example; if you'd like to have ForeignKeyWidget look up a Person
by their pre- **and** lastname column, you could subclass the widget
like so::
class FullNameForeignKeyWidget(ForeignKeyWidget):
def get_queryset(self, value, row):
return self.model.objects.filter(
first_name__iexact=row["first_name"],
last_name__iexact=row["last_name"]
)
"""
return self.model.objects.all()
def clean(self, value, row=None, *args, **kwargs):
val = super().clean(value)
if val:
return self.get_queryset(value, row, *args, **kwargs).get(**{self.field: val})
else:
return None
def render(self, value, obj=None):
if value is None:
return ""
attrs = self.field.split('__')
for attr in attrs:
try:
value = getattr(value, attr, None)
except (ValueError, ObjectDoesNotExist):
# needs to have a primary key value before a many-to-many
# relationship can be used.
return None
if value is None:
return None
return value
class ManyToManyWidget(Widget):
"""
Widget that converts between representations of a ManyToMany relationships
as a list and an actual ManyToMany field.
:param model: The model the ManyToMany field refers to (required).
:param separator: Defaults to ``','``.
:param field: A field on the related model. Default is ``pk``.
"""
def __init__(self, model, separator=None, field=None, *args, **kwargs):
if separator is None:
separator = ','
if field is None:
field = 'pk'
self.model = model
self.separator = separator
self.field = field
super().__init__(*args, **kwargs)
def clean(self, value, row=None, *args, **kwargs):
if not value:
return self.model.objects.none()
if isinstance(value, (float, int)):
ids = [int(value)]
else:
ids = value.split(self.separator)
ids = filter(None, [i.strip() for i in ids])
return self.model.objects.filter(**{
'%s__in' % self.field: ids
})
def render(self, value, obj=None):
ids = [smart_str(getattr(obj, self.field)) for obj in value.all()]
return self.separator.join(ids)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import unittest
from absl import flags
from perfkitbenchmarker import sample
from perfkitbenchmarker import test_util
from perfkitbenchmarker.linux_packages import memtier
FLAGS = flags.FLAGS
FLAGS.mark_as_parsed()
TEST_OUTPUT = """
4 Threads
50 Connections per thread
20 Seconds
Type Ops/sec Hits/sec Misses/sec Latency KB/sec
------------------------------------------------------------------------
Sets 4005.50 --- --- 4.50600 308.00
Gets 40001.05 0.00 40001.05 4.54300 1519.00
Totals 44006.55 0.00 40001.05 4.54000 1828.00
Request Latency Distribution
Type <= msec Percent
------------------------------------------------------------------------
SET 0 5.00
SET 1 10.00
SET 2 15.00
SET 3 30.00
SET 4 50.00
SET 5 70.00
SET 6 90.00
SET 7 95.00
SET 8 99.00
SET 9 100.00
---
GET 0 50.0
GET 2 100.00
"""
METADATA = {'test': 'foobar'}
class MemtierTestCase(unittest.TestCase, test_util.SamplesTestMixin):
def testParseResults(self):
get_metadata = {
'histogram': json.dumps([
{'microsec': 0.0, 'count': 4500},
{'microsec': 2000.0, 'count': 4500}])
}
get_metadata.update(METADATA)
set_metadata = {
'histogram': json.dumps([
{'microsec': 0.0, 'count': 50},
{'microsec': 1000.0, 'count': 50},
{'microsec': 2000.0, 'count': 50},
{'microsec': 3000.0, 'count': 150},
{'microsec': 4000.0, 'count': 200},
{'microsec': 5000.0, 'count': 200},
{'microsec': 6000.0, 'count': 200},
{'microsec': 7000.0, 'count': 50},
{'microsec': 8000.0, 'count': 40},
{'microsec': 9000.0, 'count': 10}])
}
set_metadata.update(METADATA)
expected_result = [
sample.Sample(
metric='Ops Throughput',
value=44006.55, unit='ops/s',
metadata=METADATA),
sample.Sample(
metric='KB Throughput',
value=1828.0,
unit='KB/s',
metadata=METADATA),
sample.Sample(
metric='get latency histogram',
value=0,
unit='',
metadata=get_metadata),
sample.Sample(
metric='set latency histogram',
value=0,
unit='',
metadata=set_metadata),
]
samples = []
samples.extend(memtier.ParseResults(TEST_OUTPUT, METADATA))
self.assertSampleListsEqualUpToTimestamp(samples, expected_result)
if __name__ == '__main__':
unittest.main()
|
from __future__ import print_function
import sys
import argparse
import time
def main(args):
ap = argparse.ArgumentParser()
ap.add_argument('job_ids', nargs='+', type=int, help='ID of a running job')
ns = ap.parse_args(args)
_stash = globals()['_stash']
""":type : StaSh"""
for job_id in ns.job_ids:
if job_id in _stash.runtime.worker_registry:
print('killing job {} ...'.format(job_id))
worker = _stash.runtime.worker_registry.get_worker(job_id)
worker.kill()
time.sleep(1)
else:
print('error: no such job with id: {}'.format(job_id))
break
if __name__ == '__main__':
main(sys.argv[1:])
|
from __future__ import absolute_import
import unittest
# These tests check that error handling in the Pyrex code is
# complete.
# It is likely that if there are errors, instead of failing the code
# will simply crash.
import sys, gc, os.path
from lxml import etree
from .common_imports import HelperTestCase
class ErrorTestCase(HelperTestCase):
etree = etree
def test_bad_element(self):
# attrib argument of Element() should be a dictionary, so if
# we pass a string we should get an error.
self.assertRaises(TypeError, self.etree.Element, 'a', 'b')
def test_empty_parse(self):
self.assertRaises(etree.XMLSyntaxError, etree.fromstring, '')
def test_element_cyclic_gc_none(self):
# test if cyclic reference can crash etree
Element = self.etree.Element
getrefcount = sys.getrefcount
# must disable tracing as it could change the refcounts
trace_func = sys.gettrace()
try:
sys.settrace(None)
gc.collect()
count = getrefcount(None)
l = [Element('name'), Element('name')]
l.append(l)
del l
gc.collect()
count = getrefcount(None) - count
self.assertEqual(count, 0)
finally:
sys.settrace(trace_func)
def test_xmlsyntaxerror_has_info(self):
broken_xml_name = 'test_broken.xml'
broken_xml_path = os.path.join(os.path.dirname(__file__), broken_xml_name)
fail_msg = 'test_broken.xml should raise an etree.XMLSyntaxError'
try:
etree.parse(broken_xml_path)
except etree.XMLSyntaxError as e:
# invariant
self.assertEqual(e.position, (e.lineno, e.offset + 1), 'position and lineno/offset out of sync')
# SyntaxError info derived from file & contents
self.assertTrue(e.filename.endswith(broken_xml_name), 'filename must be preserved')
self.assertEqual(e.lineno, 1)
self.assertEqual(e.offset, 10)
except Exception as e:
self.fail('{0}, not {1}'.format(fail_msg, type(e)))
else:
self.fail('test_broken.xml should raise an etree.XMLSyntaxError')
def test_suite():
suite = unittest.TestSuite()
suite.addTests([unittest.makeSuite(ErrorTestCase)])
return suite
if __name__ == '__main__':
print('to test use test.py %s' % __file__)
|
from __future__ import print_function, unicode_literals, division
import sys
import codecs
import io
import argparse
from collections import defaultdict
# hack for python2/3 compatibility
from io import open
argparse.open = open
def create_parser():
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description="learn BPE-based word segmentation")
parser.add_argument(
'--ref', '-r', type=argparse.FileType('r'), required=True,
metavar='PATH',
help="Reference file")
parser.add_argument(
'--hyp', type=argparse.FileType('r'), metavar='PATH',
default=sys.stdin,
help="Hypothesis file (default: stdin).")
parser.add_argument(
'--beta', '-b', type=float, default=3,
metavar='FLOAT',
help="beta parameter (default: '%(default)s')")
parser.add_argument(
'--ngram', '-n', type=int, default=6,
metavar='INT',
help="ngram order (default: '%(default)s')")
parser.add_argument(
'--space', '-s', action='store_true',
help="take spaces into account (default: '%(default)s')")
parser.add_argument(
'--precision', action='store_true',
help="report precision (default: '%(default)s')")
parser.add_argument(
'--recall', action='store_true',
help="report recall (default: '%(default)s')")
return parser
def extract_ngrams(words, max_length=4, spaces=False):
if not spaces:
words = ''.join(words.split())
else:
words = words.strip()
results = defaultdict(lambda: defaultdict(int))
for length in range(max_length):
for start_pos in range(len(words)):
end_pos = start_pos + length + 1
if end_pos <= len(words):
results[length][tuple(words[start_pos: end_pos])] += 1
return results
def get_correct(ngrams_ref, ngrams_test, correct, total):
for rank in ngrams_test:
for chain in ngrams_test[rank]:
total[rank] += ngrams_test[rank][chain]
if chain in ngrams_ref[rank]:
correct[rank] += min(ngrams_test[rank][chain], ngrams_ref[rank][chain])
return correct, total
def f1(correct, total_hyp, total_ref, max_length, beta=3, smooth=0):
precision = 0
recall = 0
for i in range(max_length):
if total_hyp[i] + smooth and total_ref[i] + smooth:
precision += (correct[i] + smooth) / (total_hyp[i] + smooth)
recall += (correct[i] + smooth) / (total_ref[i] + smooth)
precision /= max_length
recall /= max_length
return (1 + beta**2) * (precision*recall) / ((beta**2 * precision) + recall), precision, recall
def main(args):
correct = [0]*args.ngram
total = [0]*args.ngram
total_ref = [0]*args.ngram
for line in args.ref:
line2 = args.hyp.readline()
ngrams_ref = extract_ngrams(line, max_length=args.ngram, spaces=args.space)
ngrams_test = extract_ngrams(line2, max_length=args.ngram, spaces=args.space)
get_correct(ngrams_ref, ngrams_test, correct, total)
for rank in ngrams_ref:
for chain in ngrams_ref[rank]:
total_ref[rank] += ngrams_ref[rank][chain]
chrf, precision, recall = f1(correct, total, total_ref, args.ngram, args.beta)
print('chrF3: {0:.4f}'.format(chrf))
if args.precision:
print('chrPrec: {0:.4f}'.format(precision))
if args.recall:
print('chrRec: {0:.4f}'.format(recall))
if __name__ == '__main__':
# python 2/3 compatibility
if sys.version_info < (3, 0):
sys.stderr = codecs.getwriter('UTF-8')(sys.stderr)
sys.stdout = codecs.getwriter('UTF-8')(sys.stdout)
sys.stdin = codecs.getreader('UTF-8')(sys.stdin)
else:
sys.stdin = io.TextIOWrapper(sys.stdin.buffer, encoding='utf-8')
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8')
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8', write_through=True, line_buffering=True)
parser = create_parser()
args = parser.parse_args()
main(args)
|
from datetime import timedelta
from requests.exceptions import HTTPError
from homeassistant.components.climate.const import (
ATTR_CURRENT_TEMPERATURE,
ATTR_HVAC_MODE,
ATTR_HVAC_MODES,
ATTR_MAX_TEMP,
ATTR_MIN_TEMP,
ATTR_PRESET_MODE,
ATTR_PRESET_MODES,
DOMAIN,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
PRESET_COMFORT,
PRESET_ECO,
SERVICE_SET_HVAC_MODE,
SERVICE_SET_PRESET_MODE,
SERVICE_SET_TEMPERATURE,
)
from homeassistant.components.fritzbox.const import (
ATTR_STATE_BATTERY_LOW,
ATTR_STATE_DEVICE_LOCKED,
ATTR_STATE_HOLIDAY_MODE,
ATTR_STATE_LOCKED,
ATTR_STATE_SUMMER_MODE,
ATTR_STATE_WINDOW_OPEN,
DOMAIN as FB_DOMAIN,
)
from homeassistant.const import (
ATTR_BATTERY_LEVEL,
ATTR_ENTITY_ID,
ATTR_FRIENDLY_NAME,
ATTR_TEMPERATURE,
)
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from . import MOCK_CONFIG, FritzDeviceClimateMock
from tests.async_mock import Mock, call
from tests.common import async_fire_time_changed
ENTITY_ID = f"{DOMAIN}.fake_name"
async def setup_fritzbox(hass: HomeAssistantType, config: dict):
"""Set up mock AVM Fritz!Box."""
assert await async_setup_component(hass, FB_DOMAIN, config) is True
await hass.async_block_till_done()
async def test_setup(hass: HomeAssistantType, fritz: Mock):
"""Test setup of platform."""
device = FritzDeviceClimateMock()
fritz().get_devices.return_value = [device]
await setup_fritzbox(hass, MOCK_CONFIG)
state = hass.states.get(ENTITY_ID)
assert state
assert state.attributes[ATTR_BATTERY_LEVEL] == 23
assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 18
assert state.attributes[ATTR_FRIENDLY_NAME] == "fake_name"
assert state.attributes[ATTR_HVAC_MODES] == [HVAC_MODE_HEAT, HVAC_MODE_OFF]
assert state.attributes[ATTR_MAX_TEMP] == 28
assert state.attributes[ATTR_MIN_TEMP] == 8
assert state.attributes[ATTR_PRESET_MODE] is None
assert state.attributes[ATTR_PRESET_MODES] == [PRESET_ECO, PRESET_COMFORT]
assert state.attributes[ATTR_STATE_BATTERY_LOW] is True
assert state.attributes[ATTR_STATE_DEVICE_LOCKED] == "fake_locked_device"
assert state.attributes[ATTR_STATE_HOLIDAY_MODE] == "fake_holiday"
assert state.attributes[ATTR_STATE_LOCKED] == "fake_locked"
assert state.attributes[ATTR_STATE_SUMMER_MODE] == "fake_summer"
assert state.attributes[ATTR_STATE_WINDOW_OPEN] == "fake_window"
assert state.attributes[ATTR_TEMPERATURE] == 19.5
assert state.state == HVAC_MODE_HEAT
async def test_target_temperature_on(hass: HomeAssistantType, fritz: Mock):
"""Test turn device on."""
device = FritzDeviceClimateMock()
fritz().get_devices.return_value = [device]
device.target_temperature = 127.0
await setup_fritzbox(hass, MOCK_CONFIG)
state = hass.states.get(ENTITY_ID)
assert state
assert state.attributes[ATTR_TEMPERATURE] == 30
async def test_target_temperature_off(hass: HomeAssistantType, fritz: Mock):
"""Test turn device on."""
device = FritzDeviceClimateMock()
fritz().get_devices.return_value = [device]
device.target_temperature = 126.5
await setup_fritzbox(hass, MOCK_CONFIG)
state = hass.states.get(ENTITY_ID)
assert state
assert state.attributes[ATTR_TEMPERATURE] == 0
async def test_update(hass: HomeAssistantType, fritz: Mock):
"""Test update with error."""
device = FritzDeviceClimateMock()
fritz().get_devices.return_value = [device]
await setup_fritzbox(hass, MOCK_CONFIG)
state = hass.states.get(ENTITY_ID)
assert state
assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 18
assert state.attributes[ATTR_MAX_TEMP] == 28
assert state.attributes[ATTR_MIN_TEMP] == 8
assert state.attributes[ATTR_TEMPERATURE] == 19.5
device.actual_temperature = 19
device.target_temperature = 20
next_update = dt_util.utcnow() + timedelta(seconds=200)
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_ID)
assert device.update.call_count == 1
assert state
assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 19
assert state.attributes[ATTR_TEMPERATURE] == 20
async def test_update_error(hass: HomeAssistantType, fritz: Mock):
"""Test update with error."""
device = FritzDeviceClimateMock()
device.update.side_effect = HTTPError("Boom")
fritz().get_devices.return_value = [device]
await setup_fritzbox(hass, MOCK_CONFIG)
assert device.update.call_count == 0
assert fritz().login.call_count == 1
next_update = dt_util.utcnow() + timedelta(seconds=200)
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
assert device.update.call_count == 1
assert fritz().login.call_count == 2
async def test_set_temperature_temperature(hass: HomeAssistantType, fritz: Mock):
"""Test setting temperature by temperature."""
device = FritzDeviceClimateMock()
fritz().get_devices.return_value = [device]
await setup_fritzbox(hass, MOCK_CONFIG)
assert await hass.services.async_call(
DOMAIN,
SERVICE_SET_TEMPERATURE,
{ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 123},
True,
)
assert device.set_target_temperature.call_args_list == [call(123)]
async def test_set_temperature_mode_off(hass: HomeAssistantType, fritz: Mock):
"""Test setting temperature by mode."""
device = FritzDeviceClimateMock()
fritz().get_devices.return_value = [device]
await setup_fritzbox(hass, MOCK_CONFIG)
assert await hass.services.async_call(
DOMAIN,
SERVICE_SET_TEMPERATURE,
{
ATTR_ENTITY_ID: ENTITY_ID,
ATTR_HVAC_MODE: HVAC_MODE_OFF,
ATTR_TEMPERATURE: 123,
},
True,
)
assert device.set_target_temperature.call_args_list == [call(0)]
async def test_set_temperature_mode_heat(hass: HomeAssistantType, fritz: Mock):
"""Test setting temperature by mode."""
device = FritzDeviceClimateMock()
fritz().get_devices.return_value = [device]
await setup_fritzbox(hass, MOCK_CONFIG)
assert await hass.services.async_call(
DOMAIN,
SERVICE_SET_TEMPERATURE,
{
ATTR_ENTITY_ID: ENTITY_ID,
ATTR_HVAC_MODE: HVAC_MODE_HEAT,
ATTR_TEMPERATURE: 123,
},
True,
)
assert device.set_target_temperature.call_args_list == [call(22)]
async def test_set_hvac_mode_off(hass: HomeAssistantType, fritz: Mock):
"""Test setting hvac mode."""
device = FritzDeviceClimateMock()
fritz().get_devices.return_value = [device]
await setup_fritzbox(hass, MOCK_CONFIG)
assert await hass.services.async_call(
DOMAIN,
SERVICE_SET_HVAC_MODE,
{ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVAC_MODE_OFF},
True,
)
assert device.set_target_temperature.call_args_list == [call(0)]
async def test_set_hvac_mode_heat(hass: HomeAssistantType, fritz: Mock):
"""Test setting hvac mode."""
device = FritzDeviceClimateMock()
fritz().get_devices.return_value = [device]
await setup_fritzbox(hass, MOCK_CONFIG)
assert await hass.services.async_call(
DOMAIN,
SERVICE_SET_HVAC_MODE,
{ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVAC_MODE_HEAT},
True,
)
assert device.set_target_temperature.call_args_list == [call(22)]
async def test_set_preset_mode_comfort(hass: HomeAssistantType, fritz: Mock):
"""Test setting preset mode."""
device = FritzDeviceClimateMock()
fritz().get_devices.return_value = [device]
await setup_fritzbox(hass, MOCK_CONFIG)
assert await hass.services.async_call(
DOMAIN,
SERVICE_SET_PRESET_MODE,
{ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: PRESET_COMFORT},
True,
)
assert device.set_target_temperature.call_args_list == [call(22)]
async def test_set_preset_mode_eco(hass: HomeAssistantType, fritz: Mock):
"""Test setting preset mode."""
device = FritzDeviceClimateMock()
fritz().get_devices.return_value = [device]
await setup_fritzbox(hass, MOCK_CONFIG)
assert await hass.services.async_call(
DOMAIN,
SERVICE_SET_PRESET_MODE,
{ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: PRESET_ECO},
True,
)
assert device.set_target_temperature.call_args_list == [call(16)]
async def test_preset_mode_update(hass: HomeAssistantType, fritz: Mock):
"""Test preset mode."""
device = FritzDeviceClimateMock()
device.comfort_temperature = 98
device.eco_temperature = 99
fritz().get_devices.return_value = [device]
await setup_fritzbox(hass, MOCK_CONFIG)
state = hass.states.get(ENTITY_ID)
assert state
assert state.attributes[ATTR_PRESET_MODE] is None
device.target_temperature = 98
next_update = dt_util.utcnow() + timedelta(seconds=200)
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_ID)
assert device.update.call_count == 1
assert state
assert state.attributes[ATTR_PRESET_MODE] == PRESET_COMFORT
device.target_temperature = 99
next_update = dt_util.utcnow() + timedelta(seconds=200)
async_fire_time_changed(hass, next_update)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_ID)
assert device.update.call_count == 2
assert state
assert state.attributes[ATTR_PRESET_MODE] == PRESET_ECO
|
import io
import logging
import os
import re
import sys
from more_itertools import always_iterable
import cherrypy
from cherrypy._cperror import format_exc, bare_error
from cherrypy.lib import httputil
# ------------------------------ Request-handling
def setup(req):
from mod_python import apache
# Run any setup functions defined by a "PythonOption cherrypy.setup"
# directive.
options = req.get_options()
if 'cherrypy.setup' in options:
for function in options['cherrypy.setup'].split():
atoms = function.split('::', 1)
if len(atoms) == 1:
mod = __import__(atoms[0], globals(), locals())
else:
modname, fname = atoms
mod = __import__(modname, globals(), locals(), [fname])
func = getattr(mod, fname)
func()
cherrypy.config.update({'log.screen': False,
'tools.ignore_headers.on': True,
'tools.ignore_headers.headers': ['Range'],
})
engine = cherrypy.engine
if hasattr(engine, 'signal_handler'):
engine.signal_handler.unsubscribe()
if hasattr(engine, 'console_control_handler'):
engine.console_control_handler.unsubscribe()
engine.autoreload.unsubscribe()
cherrypy.server.unsubscribe()
@engine.subscribe('log')
def _log(msg, level):
newlevel = apache.APLOG_ERR
if logging.DEBUG >= level:
newlevel = apache.APLOG_DEBUG
elif logging.INFO >= level:
newlevel = apache.APLOG_INFO
elif logging.WARNING >= level:
newlevel = apache.APLOG_WARNING
# On Windows, req.server is required or the msg will vanish. See
# http://www.modpython.org/pipermail/mod_python/2003-October/014291.html
# Also, "When server is not specified...LogLevel does not apply..."
apache.log_error(msg, newlevel, req.server)
engine.start()
def cherrypy_cleanup(data):
engine.exit()
try:
# apache.register_cleanup wasn't available until 3.1.4.
apache.register_cleanup(cherrypy_cleanup)
except AttributeError:
req.server.register_cleanup(req, cherrypy_cleanup)
class _ReadOnlyRequest:
expose = ('read', 'readline', 'readlines')
def __init__(self, req):
for method in self.expose:
self.__dict__[method] = getattr(req, method)
recursive = False
_isSetUp = False
def handler(req):
from mod_python import apache
try:
global _isSetUp
if not _isSetUp:
setup(req)
_isSetUp = True
# Obtain a Request object from CherryPy
local = req.connection.local_addr
local = httputil.Host(
local[0], local[1], req.connection.local_host or '')
remote = req.connection.remote_addr
remote = httputil.Host(
remote[0], remote[1], req.connection.remote_host or '')
scheme = req.parsed_uri[0] or 'http'
req.get_basic_auth_pw()
try:
# apache.mpm_query only became available in mod_python 3.1
q = apache.mpm_query
threaded = q(apache.AP_MPMQ_IS_THREADED)
forked = q(apache.AP_MPMQ_IS_FORKED)
except AttributeError:
bad_value = ("You must provide a PythonOption '%s', "
"either 'on' or 'off', when running a version "
'of mod_python < 3.1')
options = req.get_options()
threaded = options.get('multithread', '').lower()
if threaded == 'on':
threaded = True
elif threaded == 'off':
threaded = False
else:
raise ValueError(bad_value % 'multithread')
forked = options.get('multiprocess', '').lower()
if forked == 'on':
forked = True
elif forked == 'off':
forked = False
else:
raise ValueError(bad_value % 'multiprocess')
sn = cherrypy.tree.script_name(req.uri or '/')
if sn is None:
send_response(req, '404 Not Found', [], '')
else:
app = cherrypy.tree.apps[sn]
method = req.method
path = req.uri
qs = req.args or ''
reqproto = req.protocol
headers = list(req.headers_in.copy().items())
rfile = _ReadOnlyRequest(req)
prev = None
try:
redirections = []
while True:
request, response = app.get_serving(local, remote, scheme,
'HTTP/1.1')
request.login = req.user
request.multithread = bool(threaded)
request.multiprocess = bool(forked)
request.app = app
request.prev = prev
# Run the CherryPy Request object and obtain the response
try:
request.run(method, path, qs, reqproto, headers, rfile)
break
except cherrypy.InternalRedirect:
ir = sys.exc_info()[1]
app.release_serving()
prev = request
if not recursive:
if ir.path in redirections:
raise RuntimeError(
'InternalRedirector visited the same URL '
'twice: %r' % ir.path)
else:
# Add the *previous* path_info + qs to
# redirections.
if qs:
qs = '?' + qs
redirections.append(sn + path + qs)
# Munge environment and try again.
method = 'GET'
path = ir.path
qs = ir.query_string
rfile = io.BytesIO()
send_response(
req, response.output_status, response.header_list,
response.body, response.stream)
finally:
app.release_serving()
except Exception:
tb = format_exc()
cherrypy.log(tb, 'MOD_PYTHON', severity=logging.ERROR)
s, h, b = bare_error()
send_response(req, s, h, b)
return apache.OK
def send_response(req, status, headers, body, stream=False):
# Set response status
req.status = int(status[:3])
# Set response headers
req.content_type = 'text/plain'
for header, value in headers:
if header.lower() == 'content-type':
req.content_type = value
continue
req.headers_out.add(header, value)
if stream:
# Flush now so the status and headers are sent immediately.
req.flush()
# Set response body
for seg in always_iterable(body):
req.write(seg)
# --------------- Startup tools for CherryPy + mod_python --------------- #
try:
import subprocess
def popen(fullcmd):
p = subprocess.Popen(fullcmd, shell=True,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
close_fds=True)
return p.stdout
except ImportError:
def popen(fullcmd):
pipein, pipeout = os.popen4(fullcmd)
return pipeout
def read_process(cmd, args=''):
fullcmd = '%s %s' % (cmd, args)
pipeout = popen(fullcmd)
try:
firstline = pipeout.readline()
cmd_not_found = re.search(
b'(not recognized|No such file|not found)',
firstline,
re.IGNORECASE
)
if cmd_not_found:
raise IOError('%s must be on your system path.' % cmd)
output = firstline + pipeout.read()
finally:
pipeout.close()
return output
class ModPythonServer(object):
template = """
# Apache2 server configuration file for running CherryPy with mod_python.
DocumentRoot "/"
Listen %(port)s
LoadModule python_module modules/mod_python.so
<Location %(loc)s>
SetHandler python-program
PythonHandler %(handler)s
PythonDebug On
%(opts)s
</Location>
"""
def __init__(self, loc='/', port=80, opts=None, apache_path='apache',
handler='cherrypy._cpmodpy::handler'):
self.loc = loc
self.port = port
self.opts = opts
self.apache_path = apache_path
self.handler = handler
def start(self):
opts = ''.join([' PythonOption %s %s\n' % (k, v)
for k, v in self.opts])
conf_data = self.template % {'port': self.port,
'loc': self.loc,
'opts': opts,
'handler': self.handler,
}
mpconf = os.path.join(os.path.dirname(__file__), 'cpmodpy.conf')
f = open(mpconf, 'wb')
try:
f.write(conf_data)
finally:
f.close()
response = read_process(self.apache_path, '-k start -f %s' % mpconf)
self.ready = True
return response
def stop(self):
os.popen('apache -k stop')
self.ready = False
|
from datetime import date, datetime, timedelta
import logging
from pytrafikverket import TrafikverketTrain
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_API_KEY,
CONF_NAME,
CONF_WEEKDAY,
DEVICE_CLASS_TIMESTAMP,
WEEKDAYS,
)
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
CONF_TRAINS = "trains"
CONF_FROM = "from"
CONF_TO = "to"
CONF_TIME = "time"
ATTR_DEPARTURE_STATE = "departure_state"
ATTR_CANCELED = "canceled"
ATTR_DELAY_TIME = "number_of_minutes_delayed"
ATTR_PLANNED_TIME = "planned_time"
ATTR_ESTIMATED_TIME = "estimated_time"
ATTR_ACTUAL_TIME = "actual_time"
ATTR_OTHER_INFORMATION = "other_information"
ATTR_DEVIATIONS = "deviations"
ICON = "mdi:train"
SCAN_INTERVAL = timedelta(minutes=5)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_TRAINS): [
{
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_TO): cv.string,
vol.Required(CONF_FROM): cv.string,
vol.Optional(CONF_TIME): cv.time,
vol.Optional(CONF_WEEKDAY, default=WEEKDAYS): vol.All(
cv.ensure_list, [vol.In(WEEKDAYS)]
),
}
],
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the departure sensor."""
httpsession = async_get_clientsession(hass)
train_api = TrafikverketTrain(httpsession, config[CONF_API_KEY])
sensors = []
station_cache = {}
for train in config[CONF_TRAINS]:
try:
trainstops = [train[CONF_FROM], train[CONF_TO]]
for station in trainstops:
if station not in station_cache:
station_cache[station] = await train_api.async_get_train_station(
station
)
except ValueError as station_error:
if "Invalid authentication" in station_error.args[0]:
_LOGGER.error("Unable to set up up component: %s", station_error)
return
_LOGGER.error(
"Problem when trying station %s to %s. Error: %s ",
train[CONF_FROM],
train[CONF_TO],
station_error,
)
continue
sensor = TrainSensor(
train_api,
train[CONF_NAME],
station_cache[train[CONF_FROM]],
station_cache[train[CONF_TO]],
train[CONF_WEEKDAY],
train.get(CONF_TIME),
)
sensors.append(sensor)
async_add_entities(sensors, update_before_add=True)
def next_weekday(fromdate, weekday):
"""Return the date of the next time a specific weekday happen."""
days_ahead = weekday - fromdate.weekday()
if days_ahead <= 0:
days_ahead += 7
return fromdate + timedelta(days_ahead)
def next_departuredate(departure):
"""Calculate the next departuredate from an array input of short days."""
today_date = date.today()
today_weekday = date.weekday(today_date)
if WEEKDAYS[today_weekday] in departure:
return today_date
for day in departure:
next_departure = WEEKDAYS.index(day)
if next_departure > today_weekday:
return next_weekday(today_date, next_departure)
return next_weekday(today_date, WEEKDAYS.index(departure[0]))
class TrainSensor(Entity):
"""Contains data about a train depature."""
def __init__(self, train_api, name, from_station, to_station, weekday, time):
"""Initialize the sensor."""
self._train_api = train_api
self._name = name
self._from_station = from_station
self._to_station = to_station
self._weekday = weekday
self._time = time
self._state = None
self._departure_state = None
self._delay_in_minutes = None
async def async_update(self):
"""Retrieve latest state."""
if self._time is not None:
departure_day = next_departuredate(self._weekday)
when = datetime.combine(departure_day, self._time)
try:
self._state = await self._train_api.async_get_train_stop(
self._from_station, self._to_station, when
)
except ValueError as output_error:
_LOGGER.error(
"Departure %s encountered a problem: %s", when, output_error
)
else:
when = datetime.now()
self._state = await self._train_api.async_get_next_train_stop(
self._from_station, self._to_station, when
)
self._departure_state = self._state.get_state().name
self._delay_in_minutes = self._state.get_delay_time()
@property
def device_state_attributes(self):
"""Return the state attributes."""
if self._state is None:
return None
state = self._state
other_information = None
if state.other_information is not None:
other_information = ", ".join(state.other_information)
deviations = None
if state.deviations is not None:
deviations = ", ".join(state.deviations)
if self._delay_in_minutes is not None:
self._delay_in_minutes = self._delay_in_minutes.total_seconds() / 60
return {
ATTR_DEPARTURE_STATE: self._departure_state,
ATTR_CANCELED: state.canceled,
ATTR_DELAY_TIME: self._delay_in_minutes,
ATTR_PLANNED_TIME: state.advertised_time_at_location,
ATTR_ESTIMATED_TIME: state.estimated_time_at_location,
ATTR_ACTUAL_TIME: state.time_at_location,
ATTR_OTHER_INFORMATION: other_information,
ATTR_DEVIATIONS: deviations,
}
@property
def device_class(self):
"""Return the device class."""
return DEVICE_CLASS_TIMESTAMP
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def icon(self):
"""Return the icon for the frontend."""
return ICON
@property
def state(self):
"""Return the departure state."""
state = self._state
if state is not None:
if state.time_at_location is not None:
return state.time_at_location
if state.estimated_time_at_location is not None:
return state.estimated_time_at_location
return state.advertised_time_at_location
return None
|
import importlib
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_HS_COLOR,
PLATFORM_SCHEMA,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
LightEntity,
)
from homeassistant.const import CONF_NAME
import homeassistant.helpers.config_validation as cv
import homeassistant.util.color as color_util
SUPPORT_BLINKT = SUPPORT_BRIGHTNESS | SUPPORT_COLOR
DEFAULT_NAME = "blinkt"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Blinkt Light platform."""
# pylint: disable=no-member
blinkt = importlib.import_module("blinkt")
# ensure that the lights are off when exiting
blinkt.set_clear_on_exit()
name = config[CONF_NAME]
add_entities(
[BlinktLight(blinkt, name, index) for index in range(blinkt.NUM_PIXELS)]
)
class BlinktLight(LightEntity):
"""Representation of a Blinkt! Light."""
def __init__(self, blinkt, name, index):
"""Initialize a Blinkt Light.
Default brightness and white color.
"""
self._blinkt = blinkt
self._name = f"{name}_{index}"
self._index = index
self._is_on = False
self._brightness = 255
self._hs_color = [0, 0]
@property
def name(self):
"""Return the display name of this light."""
return self._name
@property
def brightness(self):
"""Read back the brightness of the light.
Returns integer in the range of 1-255.
"""
return self._brightness
@property
def hs_color(self):
"""Read back the color of the light."""
return self._hs_color
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_BLINKT
@property
def is_on(self):
"""Return true if light is on."""
return self._is_on
@property
def should_poll(self):
"""Return if we should poll this device."""
return False
@property
def assumed_state(self) -> bool:
"""Return True if unable to access real state of the entity."""
return True
def turn_on(self, **kwargs):
"""Instruct the light to turn on and set correct brightness & color."""
if ATTR_HS_COLOR in kwargs:
self._hs_color = kwargs[ATTR_HS_COLOR]
if ATTR_BRIGHTNESS in kwargs:
self._brightness = kwargs[ATTR_BRIGHTNESS]
percent_bright = self._brightness / 255
rgb_color = color_util.color_hs_to_RGB(*self._hs_color)
self._blinkt.set_pixel(
self._index, rgb_color[0], rgb_color[1], rgb_color[2], percent_bright
)
self._blinkt.show()
self._is_on = True
self.schedule_update_ha_state()
def turn_off(self, **kwargs):
"""Instruct the light to turn off."""
self._blinkt.set_pixel(self._index, 0, 0, 0, 0)
self._blinkt.show()
self._is_on = False
self.schedule_update_ha_state()
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.