text
stringlengths 213
32.3k
|
---|
import codecs
import logging
import os
import chardet
import pysrt
from six import text_type
logger = logging.getLogger(__name__)
#: Subtitle extensions
SUBTITLE_EXTENSIONS = ('.srt', '.sub', '.smi', '.txt', '.ssa', '.ass', '.mpl')
class Subtitle(object):
"""Base class for subtitle.
:param language: language of the subtitle.
:type language: :class:`~babelfish.language.Language`
:param bool hearing_impaired: whether or not the subtitle is hearing impaired.
:param page_link: URL of the web page from which the subtitle can be downloaded.
:type page_link: str
:param encoding: Text encoding of the subtitle.
:type encoding: str
"""
#: Name of the provider that returns that class of subtitle
provider_name = ''
def __init__(self, language, hearing_impaired=False, page_link=None, encoding=None):
#: Language of the subtitle
self.language = language
#: Whether or not the subtitle is hearing impaired
self.hearing_impaired = hearing_impaired
#: URL of the web page from which the subtitle can be downloaded
self.page_link = page_link
#: Content as bytes
self.content = None
#: Encoding to decode with when accessing :attr:`text`
self.encoding = None
# validate the encoding
if encoding:
try:
self.encoding = codecs.lookup(encoding).name
except (TypeError, LookupError):
logger.debug('Unsupported encoding %s', encoding)
@property
def id(self):
"""Unique identifier of the subtitle"""
raise NotImplementedError
@property
def info(self):
"""Info of the subtitle, human readable. Usually the subtitle name for GUI rendering"""
raise NotImplementedError
@property
def text(self):
"""Content as string
If :attr:`encoding` is None, the encoding is guessed with :meth:`guess_encoding`
"""
if not self.content:
return
if not isinstance(self.content, text_type):
if self.encoding:
return self.content.decode(self.encoding, errors='replace')
guessed_encoding = self.guess_encoding()
if guessed_encoding:
return self.content.decode(guessed_encoding, errors='replace')
return None
return self.content
def is_valid(self):
"""Check if a :attr:`text` is a valid SubRip format.
:return: whether or not the subtitle is valid.
:rtype: bool
"""
if not self.text:
return False
try:
pysrt.from_string(self.text, error_handling=pysrt.ERROR_RAISE)
except pysrt.Error as e:
if e.args[0] < 80:
return False
return True
def guess_encoding(self):
"""Guess encoding using the language, falling back on chardet.
:return: the guessed encoding.
:rtype: str
"""
logger.info('Guessing encoding for language %s', self.language)
# always try utf-8 first
encodings = ['utf-8']
# add language-specific encodings
if self.language.alpha3 == 'zho':
encodings.extend(['gb18030', 'big5'])
elif self.language.alpha3 == 'jpn':
encodings.append('shift-jis')
elif self.language.alpha3 == 'ara':
encodings.append('windows-1256')
elif self.language.alpha3 == 'heb':
encodings.append('windows-1255')
elif self.language.alpha3 == 'tur':
encodings.extend(['iso-8859-9', 'windows-1254'])
elif self.language.alpha3 == 'pol':
# Eastern European Group 1
encodings.extend(['windows-1250'])
elif self.language.alpha3 == 'bul':
# Eastern European Group 2
encodings.extend(['windows-1251'])
else:
# Western European (windows-1252)
encodings.append('latin-1')
# try to decode
logger.debug('Trying encodings %r', encodings)
for encoding in encodings:
try:
self.content.decode(encoding)
except UnicodeDecodeError:
pass
else:
logger.info('Guessed encoding %s', encoding)
return encoding
logger.warning('Could not guess encoding from language')
# fallback on chardet
encoding = chardet.detect(self.content)['encoding']
logger.info('Chardet found encoding %s', encoding)
return encoding
def get_path(self, video, single=False):
"""Get the subtitle path using the `video`, `language` and `extension`.
:param video: path to the video.
:type video: :class:`~subliminal.video.Video`
:param bool single: save a single subtitle, default is to save one subtitle per language.
:return: path of the subtitle.
:rtype: str
"""
return get_subtitle_path(video.name, None if single else self.language)
def get_matches(self, video):
"""Get the matches against the `video`.
:param video: the video to get the matches with.
:type video: :class:`~subliminal.video.Video`
:return: matches of the subtitle.
:rtype: set
"""
raise NotImplementedError
def __hash__(self):
return hash(self.provider_name + '-' + self.id)
def __repr__(self):
return '<%s %r [%s]>' % (self.__class__.__name__, self.id, self.language)
def get_subtitle_path(video_path, language=None, extension='.srt'):
"""Get the subtitle path using the `video_path` and `language`.
:param str video_path: path to the video.
:param language: language of the subtitle to put in the path.
:type language: :class:`~babelfish.language.Language`
:param str extension: extension of the subtitle.
:return: path of the subtitle.
:rtype: str
"""
subtitle_root = os.path.splitext(video_path)[0]
if language:
subtitle_root += '.' + str(language)
return subtitle_root + extension
def fix_line_ending(content):
"""Fix line ending of `content` by changing it to \n.
:param bytes content: content of the subtitle.
:return: the content with fixed line endings.
:rtype: bytes
"""
return content.replace(b'\r\n', b'\n').replace(b'\r', b'\n')
|
class DefaultSettings:
def _setting(self, name, default=None):
from django.conf import settings
return getattr(settings, name, default)
@property
def SHOP_APP_LABEL(self):
"""
The name of the project implementing the shop, for instance ``myshop``.
This is required to assign the abstract shop models to a project. There is no default.
"""
from django.core.exceptions import ImproperlyConfigured
result = self._setting('SHOP_APP_LABEL')
if not result:
raise ImproperlyConfigured("SHOP_APP_LABEL setting must be set")
return result
@property
def SHOP_DEFAULT_CURRENCY(self):
"""
The default currency this shop is working with. The default is ``EUR``.
.. note:: All model- and form input fields can be specified for any other currency, this
setting is only used if the supplied currency is missing.
"""
return self._setting('SHOP_DEFAULT_CURRENCY', 'EUR')
@property
def SHOP_VENDOR_EMAIL(self):
"""
The vendor's email addresses, unless specified through the ``Order`` object.
"""
try:
default_email = self._setting('ADMINS')[0][1]
except IndexError:
default_email = None
return self._setting('SHOP_VENDOR_EMAIL', default_email)
@property
def SHOP_MONEY_FORMAT(self):
"""
When rendering an amount of type Money, use this format.
Possible placeholders are:
* ``{symbol}``: This is replaced by €, $, £, etc.
* ``{currency}``: This is replaced by Euro, US Dollar, Pound Sterling, etc.
* ``{code}``: This is replaced by EUR, USD, GBP, etc.
* ``{amount}``: The localized amount.
* ``{minus}``: Only for negative amounts, where to put the ``-`` sign.
For further information about formatting currency amounts, please refer to
https://docs.microsoft.com/en-us/globalization/locale/currency-formatting
"""
return self._setting('SHOP_MONEY_FORMAT', '{minus}{symbol} {amount}')
@property
def SHOP_DECIMAL_PLACES(self):
"""
Number of decimal places for the internal representation of a price.
This is purely used by the Django admin and is not the number of digits
visible by the customer.
Defaults to 2.
"""
return self._setting('SHOP_DECIMAL_PLACES', 2)
@property
def SHOP_CUSTOMER_SERIALIZER(self):
"""
Depending on the materialized customer model, use this directive to configure the
customer serializer.
Defaults to :class:`shop.serializers.defaults.customer.CustomerSerializer`.
"""
from django.core.exceptions import ImproperlyConfigured
from django.utils.module_loading import import_string
from shop.serializers.bases import BaseCustomerSerializer
s = self._setting('SHOP_CUSTOMER_SERIALIZER', 'shop.serializers.defaults.customer.CustomerSerializer')
CustomerSerializer = import_string(s)
if not issubclass(CustomerSerializer, BaseCustomerSerializer):
raise ImproperlyConfigured(
"Serializer class must inherit from 'BaseCustomerSerializer'.")
return CustomerSerializer
@property
def SHOP_PRODUCT_SUMMARY_SERIALIZER(self):
"""
Serialize the smallest common denominator of all Product models available in this shop.
This serialized data then is used for Catalog List Views, Cart List Views and Order List
Views.
Defaults to :class:`shop.serializers.defaults.product_summary.ProductSummarySerializer`.
"""
from django.core.exceptions import ImproperlyConfigured
from django.utils.module_loading import import_string
from shop.serializers.bases import ProductSerializer
s = self._setting('SHOP_PRODUCT_SUMMARY_SERIALIZER',
'shop.serializers.defaults.product_summary.ProductSummarySerializer')
ProductSummarySerializer = import_string(s)
if not issubclass(ProductSummarySerializer, ProductSerializer):
msg = "class {} specified in SHOP_PRODUCT_SUMMARY_SERIALIZER must inherit from 'ProductSerializer'."
raise ImproperlyConfigured(msg.format(s))
return ProductSummarySerializer
@property
def SHOP_PRODUCT_SELECT_SERIALIZER(self):
"""
This serializer is only used by the plugin editors, when selecting a product using a
drop down menu with auto-completion.
Defaults to :class:`shop.serializers.defaults.ProductSelectSerializer`.
"""
from django.utils.module_loading import import_string
s = self._setting('SHOP_PRODUCT_SELECT_SERIALIZER',
'shop.serializers.defaults.product_select.ProductSelectSerializer')
ProductSelectSerializer = import_string(s)
return ProductSelectSerializer
@property
def SHOP_MAX_PURCHASE_QUANTITY(self):
"""
The default maximum number of items a customer can add to his cart per product type.
"""
return self._setting('SHOP_MAX_PURCHASE_QUANTITY', 99)
@property
def SHOP_SELL_SHORT_PERIOD(self):
"""
The time period (in seconds or timedelta) from the current timestamp, in which a product
is considered available, although it currently is not in stock, but scheduled to be added
to the inventory.
"""
from datetime import timedelta
from django.core.exceptions import ImproperlyConfigured
period = self._setting('SHOP_SELL_SHORT_PERIOD', 7 * 24 * 3600)
if isinstance(period, int):
period = timedelta(seconds=period)
elif not isinstance(period, timedelta):
raise ImproperlyConfigured("'SHOP_SELL_SHORT_PERIOD' contains an invalid property.")
return period
@property
def SHOP_LIMITED_OFFER_PERIOD(self):
"""
The time period (in seconds or timedelta) from the current timestamp, in which a product
is marked as limited time offer.
"""
from datetime import timedelta
from django.core.exceptions import ImproperlyConfigured
period = self._setting('SHOP_LIMITED_OFFER_PERIOD', 7 * 24 * 3600)
if isinstance(period, int):
period = timedelta(seconds=period)
elif not isinstance(period, timedelta):
raise ImproperlyConfigured("'SHOP_LIMITED_OFFER_PERIOD' contains an invalid property.")
return period
@property
def SHOP_LINK_TO_EMPTY_CART(self):
"""
If ``True`` the link on the cart-icon pointing to the cart is enabled, even if there are no
items are in the cart.
"""
return self._setting('SHOP_LINK_TO_EMPTY_CART', True)
@property
def SHOP_ORDER_ITEM_SERIALIZER(self):
"""
Depending on the materialized OrderItem model, use this directive to configure the
serializer.
Defaults to :class:`shop.serializers.defaults.OrderItemSerializer`.
"""
from django.core.exceptions import ImproperlyConfigured
from django.utils.module_loading import import_string
from shop.serializers.bases import BaseOrderItemSerializer
s = self._setting('SHOP_ORDER_ITEM_SERIALIZER',
'shop.serializers.defaults.order_item.OrderItemSerializer')
OrderItemSerializer = import_string(s)
if not issubclass(OrderItemSerializer, BaseOrderItemSerializer):
raise ImproperlyConfigured(
"Serializer class must inherit from 'BaseOrderItemSerializer'.")
return OrderItemSerializer
@property
def SHOP_CART_MODIFIERS(self):
"""
Specifies the list of :ref:`reference/cart-modifiers`. They are are applied on each cart item and the
cart final sums.
This list typically starts with ``'shop.modifiers.defaults.DefaultCartModifier'`` as its first entry,
followed by other cart modifiers.
"""
from django.utils.module_loading import import_string
cart_modifiers = self._setting('SHOP_CART_MODIFIERS', ['shop.modifiers.defaults.DefaultCartModifier'])
return [import_string(mc) for mc in cart_modifiers]
@property
def SHOP_VALUE_ADDED_TAX(self):
"""
Use this convenience settings if you can apply the same tax rate for all products
and you use one of the default tax modifiers :class:`shop.modifiers.taxes.CartIncludeTaxModifier`
or :class:`shop.modifiers.taxes.CartExcludedTaxModifier`.
If your products require individual tax rates or you ship into states with different tax rates,
then you must provide your own tax modifier.
"""
from decimal import Decimal
return self._setting('SHOP_VALUE_ADDED_TAX', Decimal('20'))
@property
def SHOP_ORDER_WORKFLOWS(self):
"""
Specifies a list of :ref:`reference/order-workflows`. Order workflows are applied after
an order has been created and conduct the vendor through the steps of receiving the payments
until fulfilling the shipment.
"""
from django.utils.module_loading import import_string
order_workflows = self._setting('SHOP_ORDER_WORKFLOWS', [])
return [import_string(mc) for mc in order_workflows]
@property
def SHOP_ADD2CART_NG_MODEL_OPTIONS(self):
"""
Used to configure the update behavior when changing the quantity of a product, in the product's
detail view after adding it to the cart. For more information refer to the documentation of the
NgModelOptions_ directive in the AngularJS reference.
.. _NgModelOptions: https://code.angularjs.org/1.5.9/docs/api/ng/directive/ngModelOptions
"""
return self._setting('SHOP_ADD2CART_NG_MODEL_OPTIONS',
"{updateOn: 'default blur', debounce: {'default': 500, 'blur': 0}}")
@property
def SHOP_EDITCART_NG_MODEL_OPTIONS(self):
"""
Used to configure the update behavior when changing the quantity of a cart item, in the cart's
edit view. For more information refer to the documentation of the
NgModelOptions_ directive in the AngularJS reference.
"""
return self._setting('SHOP_EDITCART_NG_MODEL_OPTIONS',
"{updateOn: 'default blur', debounce: {'default': 500, 'blur': 0}}")
@property
def SHOP_GUEST_IS_ACTIVE_USER(self):
"""
If this directive is ``True``, customers which declared themselves as guests, may request
a password reset, so that they can log into their account at a later time. Then it also
makes sense to set the ``email`` field in model ``email_auth.User`` as unique.
The default is ``False``.
"""
return self._setting('SHOP_GUEST_IS_ACTIVE_USER', False)
@property
def SHOP_OVERRIDE_SHIPPING_METHOD(self):
"""
If this directive is ``True``, the merchant is allowed to override the shipping method the
customer has chosen while performing the checkout.
Note that if alternative shipping is more expensive, usually the merchant has to come up
for the additional costs.
The default is ``False``.
"""
return self._setting('SHOP_OVERRIDE_SHIPPING_METHOD', False)
@property
def SHOP_CACHE_DURATIONS(self):
"""
In the product's list views, HTML snippets are created for the summary representation of
each product.
By default these snippet are cached for one day.
"""
result = self._setting('SHOP_CACHE_DURATIONS') or {}
result.setdefault('product_html_snippet', 86400)
return result
@property
def SHOP_DIALOG_FORMS(self):
"""
Specify a list of dialog forms available in our :class:`shop.views.checkout.CheckoutViewSet`.
This allows the usage of the endpoint ``resolve('shop:checkout-upload')`` in a generic way.
If Cascade plugins are used for the forms in the checkout view, this list can be empty.
"""
return self._setting('SHOP_DIALOG_FORMS', [])
@property
def SHOP_CASCADE_FORMS(self):
"""
Specify a map of Django Form classes to be used by the Cascade plugins used for the
checkout view. Override this map, if the Cascade plugins shall use a Form other than the
ones provided.
"""
cascade_forms = {
'CustomerForm': 'shop.forms.checkout.CustomerForm',
'GuestForm': 'shop.forms.checkout.GuestForm',
'ShippingAddressForm': 'shop.forms.checkout.ShippingAddressForm',
'BillingAddressForm': 'shop.forms.checkout.BillingAddressForm',
'PaymentMethodForm': 'shop.forms.checkout.PaymentMethodForm',
'ShippingMethodForm': 'shop.forms.checkout.ShippingMethodForm',
'ExtraAnnotationForm': 'shop.forms.checkout.ExtraAnnotationForm',
'AcceptConditionForm': 'shop.forms.checkout.AcceptConditionForm',
'RegisterUserForm': 'shop.forms.auth.RegisterUserForm',
}
cascade_forms.update(self._setting('SHOP_CASCADE_FORMS', {}))
return cascade_forms
def __getattr__(self, key):
if not key.startswith('SHOP_'):
key = 'SHOP_' + key
return self.__getattribute__(key)
app_settings = DefaultSettings()
|
from unittest import mock
import pytest
from gi.repository import Gtk
@pytest.mark.parametrize("text, ignored_ranges, expected_text", [
# 0123456789012345678901234567890123456789012345678901234567890123456789
# Matching without groups
(
"# asdasdasdasdsad",
[(0, 17)],
"",
),
# Matching with single group
(
"asdasdasdasdsab",
[(1, 14)],
"ab",
),
# Matching with multiple groups
(
"xasdyasdz",
[(1, 4), (5, 8)],
"xyz",
),
# Matching with multiple partially overlapping filters
(
"qaqxqbyqzq",
[(2, 6), (7, 8)],
"qayzq",
),
# Matching with multiple fully overlapping filters
(
"qaqxqybqzq",
[(2, 8)],
"qazq",
),
# Matching with and without groups, with single dominated match
(
"# asdasdasdasdsab",
[(0, 17)],
"",
),
# Matching with and without groups, with partially overlapping filters
(
"/*a*/ub",
[(0, 6)],
"b",
),
# Non-matching with groups
(
"xasdyasdx",
[],
"xasdyasdx",
),
# Multiple lines with non-overlapping filters
(
"#ab\na2b",
[(0, 3), (5, 6)],
"\nab",
),
# CVS keyword
(
"$Author: John Doe $",
[(8, 18)],
"$Author:$",
),
])
def test_filter_text(text, ignored_ranges, expected_text):
from meld.filediff import FileDiff
from meld.filters import FilterEntry
filter_patterns = [
'#.*',
r'/\*.*\*/',
'a(.*)b',
'x(.*)y(.*)z',
r'\$\w+:([^\n$]+)\$'
]
filters = [
FilterEntry.new_from_gsetting(("name", True, f), FilterEntry.REGEX)
for f in filter_patterns
]
filediff = mock.MagicMock()
filediff.text_filters = filters
filter_text = FileDiff._filter_text
buf = Gtk.TextBuffer()
buf.create_tag("inline")
buf.create_tag("dimmed")
buf.set_text(text)
start, end = buf.get_bounds()
text = filter_text(
filediff, buf.get_text(start, end, False), buf, start, end)
# Find ignored ranges
tag = buf.get_tag_table().lookup("dimmed")
toggles = []
it = start.copy()
if it.toggles_tag(tag):
toggles.append(it.get_offset())
while it.forward_to_tag_toggle(tag):
toggles.append(it.get_offset())
toggles = list(zip(toggles[::2], toggles[1::2]))
print("Text:", text)
print("Toggles:", toggles)
assert toggles == ignored_ranges
assert text == expected_text
|
import logging
import os
import voluptuous as vol
from watchdog.events import PatternMatchingEventHandler
from watchdog.observers import Observer
from homeassistant.const import EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_FOLDER = "folder"
CONF_PATTERNS = "patterns"
DEFAULT_PATTERN = "*"
DOMAIN = "folder_watcher"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.All(
cv.ensure_list,
[
vol.Schema(
{
vol.Required(CONF_FOLDER): cv.isdir,
vol.Optional(CONF_PATTERNS, default=[DEFAULT_PATTERN]): vol.All(
cv.ensure_list, [cv.string]
),
}
)
],
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Set up the folder watcher."""
conf = config[DOMAIN]
for watcher in conf:
path = watcher[CONF_FOLDER]
patterns = watcher[CONF_PATTERNS]
if not hass.config.is_allowed_path(path):
_LOGGER.error("folder %s is not valid or allowed", path)
return False
Watcher(path, patterns, hass)
return True
def create_event_handler(patterns, hass):
"""Return the Watchdog EventHandler object."""
class EventHandler(PatternMatchingEventHandler):
"""Class for handling Watcher events."""
def __init__(self, patterns, hass):
"""Initialise the EventHandler."""
super().__init__(patterns)
self.hass = hass
def process(self, event):
"""On Watcher event, fire HA event."""
_LOGGER.debug("process(%s)", event)
if not event.is_directory:
folder, file_name = os.path.split(event.src_path)
self.hass.bus.fire(
DOMAIN,
{
"event_type": event.event_type,
"path": event.src_path,
"file": file_name,
"folder": folder,
},
)
def on_modified(self, event):
"""File modified."""
self.process(event)
def on_moved(self, event):
"""File moved."""
self.process(event)
def on_created(self, event):
"""File created."""
self.process(event)
def on_deleted(self, event):
"""File deleted."""
self.process(event)
return EventHandler(patterns, hass)
class Watcher:
"""Class for starting Watchdog."""
def __init__(self, path, patterns, hass):
"""Initialise the watchdog observer."""
self._observer = Observer()
self._observer.schedule(
create_event_handler(patterns, hass), path, recursive=True
)
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, self.startup)
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, self.shutdown)
def startup(self, event):
"""Start the watcher."""
self._observer.start()
def shutdown(self, event):
"""Shutdown the watcher."""
self._observer.stop()
self._observer.join()
|
import unittest
import geopandas
class TestGeopandas(unittest.TestCase):
def test_read(self):
df = geopandas.read_file(geopandas.datasets.get_path('nybb'))
self.assertTrue(df.size > 1)
def test_spatial_join(self):
cities = geopandas.read_file(geopandas.datasets.get_path('naturalearth_cities'))
world = geopandas.read_file(geopandas.datasets.get_path('naturalearth_lowres'))
countries = world[['geometry', 'name']]
countries = countries.rename(columns={'name':'country'})
cities_with_country = geopandas.sjoin(cities, countries, how="inner", op='intersects')
self.assertTrue(cities_with_country.size > 1)
|
from homeassistant.components import speedtestdotnet
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.components.speedtestdotnet.const import DEFAULT_NAME, SENSOR_TYPES
from . import MOCK_RESULTS, MOCK_SERVERS, MOCK_STATES
from tests.async_mock import patch
from tests.common import MockConfigEntry
async def test_speedtestdotnet_sensors(hass):
"""Test sensors created for speedtestdotnet integration."""
entry = MockConfigEntry(domain=speedtestdotnet.DOMAIN, data={})
entry.add_to_hass(hass)
with patch("speedtest.Speedtest") as mock_api:
mock_api.return_value.get_best_server.return_value = MOCK_SERVERS[1][0]
mock_api.return_value.results.dict.return_value = MOCK_RESULTS
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 3
for sensor_type in SENSOR_TYPES:
sensor = hass.states.get(
f"sensor.{DEFAULT_NAME}_{SENSOR_TYPES[sensor_type][0]}"
)
assert sensor.state == MOCK_STATES[sensor_type]
|
import asyncio
import datetime
import itertools
import logging
from pathlib import Path
from typing import Optional
import lavalink
from redbot.core.data_manager import cog_data_path
from redbot.core.i18n import Translator
from redbot.core.utils._internal_utils import send_to_owners_with_prefix_replaced
from redbot.core.utils.dbtools import APSWConnectionWrapper
from ...apis.interface import AudioAPIInterface
from ...apis.playlist_wrapper import PlaylistWrapper
from ...audio_logging import debug_exc_log
from ...utils import task_callback
from ..abc import MixinMeta
from ..cog_utils import _OWNER_NOTIFICATION, _SCHEMA_VERSION, CompositeMetaClass
log = logging.getLogger("red.cogs.Audio.cog.Tasks.startup")
_ = Translator("Audio", Path(__file__))
class StartUpTasks(MixinMeta, metaclass=CompositeMetaClass):
def start_up_task(self):
# There has to be a task since this requires the bot to be ready
# If it waits for ready in startup, we cause a deadlock during initial load
# as initial load happens before the bot can ever be ready.
self.cog_init_task = self.bot.loop.create_task(self.initialize())
self.cog_init_task.add_done_callback(task_callback)
async def initialize(self) -> None:
await self.bot.wait_until_red_ready()
# Unlike most cases, we want the cache to exit before migration.
try:
await self.maybe_message_all_owners()
self.db_conn = APSWConnectionWrapper(
str(cog_data_path(self.bot.get_cog("Audio")) / "Audio.db")
)
self.api_interface = AudioAPIInterface(
self.bot, self.config, self.session, self.db_conn, self.bot.get_cog("Audio")
)
self.playlist_api = PlaylistWrapper(self.bot, self.config, self.db_conn)
await self.playlist_api.init()
await self.api_interface.initialize()
self.global_api_user = await self.api_interface.global_cache_api.get_perms()
await self.data_schema_migration(
from_version=await self.config.schema_version(), to_version=_SCHEMA_VERSION
)
await self.playlist_api.delete_scheduled()
await self.api_interface.persistent_queue_api.delete_scheduled()
self.lavalink_restart_connect()
self.player_automated_timer_task = self.bot.loop.create_task(
self.player_automated_timer()
)
self.player_automated_timer_task.add_done_callback(task_callback)
except Exception as err:
log.exception("Audio failed to start up, please report this issue.", exc_info=err)
raise err
self.cog_ready_event.set()
async def restore_players(self):
tries = 0
tracks_to_restore = await self.api_interface.persistent_queue_api.fetch_all()
await asyncio.sleep(10)
for guild_id, track_data in itertools.groupby(tracks_to_restore, key=lambda x: x.guild_id):
await asyncio.sleep(0)
try:
player: Optional[lavalink.Player]
track_data = list(track_data)
guild = self.bot.get_guild(guild_id)
persist_cache = self._persist_queue_cache.setdefault(
guild_id, await self.config.guild(guild).persist_queue()
)
if not persist_cache:
await self.api_interface.persistent_queue_api.drop(guild_id)
continue
if self.lavalink_connection_aborted:
player = None
else:
try:
player = lavalink.get_player(guild_id)
except IndexError:
player = None
except KeyError:
player = None
vc = 0
if player is None:
while tries < 25 and vc is not None:
try:
vc = guild.get_channel(track_data[-1].room_id)
if not vc:
break
perms = vc.permissions_for(guild.me)
if not (perms.connect and perms.speak):
vc = None
break
await lavalink.connect(vc)
player = lavalink.get_player(guild.id)
player.store("connect", datetime.datetime.utcnow())
player.store("guild", guild_id)
await self.self_deafen(player)
break
except IndexError:
await asyncio.sleep(5)
tries += 1
except Exception as exc:
debug_exc_log(log, exc, "Failed to restore music voice channel")
if vc is None:
break
if tries >= 25 or guild is None or vc is None:
await self.api_interface.persistent_queue_api.drop(guild_id)
continue
shuffle = await self.config.guild(guild).shuffle()
repeat = await self.config.guild(guild).repeat()
volume = await self.config.guild(guild).volume()
shuffle_bumped = await self.config.guild(guild).shuffle_bumped()
player.repeat = repeat
player.shuffle = shuffle
player.shuffle_bumped = shuffle_bumped
if player.volume != volume:
await player.set_volume(volume)
for track in track_data:
track = track.track_object
player.add(guild.get_member(track.extras.get("requester")) or guild.me, track)
player.maybe_shuffle()
if not player.is_playing:
await player.play()
except Exception as err:
debug_exc_log(log, err, f"Error restoring player in {guild_id}")
await self.api_interface.persistent_queue_api.drop(guild_id)
async def maybe_message_all_owners(self):
current_notification = await self.config.owner_notification()
if current_notification == _OWNER_NOTIFICATION:
return
if current_notification < 1 <= _OWNER_NOTIFICATION:
msg = _(
"""Hello, this message brings you an important update regarding the core Audio cog:
Starting from Audio v2.3.0+ you can take advantage of the **Global Audio API**, a new service offered by the Cog-Creators organization that allows your bot to greatly reduce the amount of requests done to YouTube / Spotify. This reduces the likelihood of YouTube rate-limiting your bot for making requests too often.
See `[p]help audioset globalapi` for more information.
Access to this service is disabled by default and **requires you to explicitly opt-in** to start using it.
An access token is **required** to use this API. To obtain this token you may join <https://discord.gg/red> and run `?audioapi register` in the #testing channel.
Note: by using this service you accept that your bot's IP address will be disclosed to the Cog-Creators organization and used only for the purpose of providing the Global API service.
On a related note, it is highly recommended that you enable your local cache if you haven't yet.
To do so, run `[p]audioset cache 5`. This cache, which stores only metadata, will make repeated audio requests faster and further reduce the likelihood of YouTube rate-limiting your bot. Since it's only metadata the required disk space for this cache is expected to be negligible."""
)
await send_to_owners_with_prefix_replaced(self.bot, msg)
await self.config.owner_notification.set(1)
|
import datetime
import json
import os.path
import sys
import requests
URL = 'https://api.github.com/repos/RaRe-Technologies/gensim'
def summarize_prs(since_version):
"""Go through all closed PRs, summarize those merged after the previous release.
Yields one-line summaries of each relevant PR as a string.
"""
releases = requests.get(URL + '/releases').json()
most_recent_release = releases[0]['tag_name']
assert most_recent_release == since_version, 'unexpected most_recent_release: %r' % most_recent_release
published_at = releases[0]['published_at']
pulls = requests.get(URL + '/pulls', params={'state': 'closed'}).json()
for pr in pulls:
merged_at = pr['merged_at']
if merged_at is None or merged_at < published_at:
continue
summary = "* {msg} (__[{author}]({author_url})__, [#{pr}]({purl}))".format(
msg=pr['title'],
author=pr['user']['login'],
author_url=pr['user']['html_url'],
pr=pr['number'],
purl=pr['html_url'],
)
print(summary)
yield summary
def main():
root = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..')
previous_version, new_version = sys.argv[1:3]
path = os.path.join(root, 'CHANGELOG.md')
with open(path) as fin:
contents = fin.read().split('\n')
header, contents = contents[:2], contents[2:]
header.append('## %s, %s\n' % (new_version, datetime.date.today().isoformat()))
header.append("""
### :star2: New Features
### :red_circle: Bug fixes
### :books: Tutorial and doc improvements
### :+1: Improvements
### :warning: Deprecations (will be removed in the next major release)
**COPY-PASTE DEPRECATIONS FROM THE PREVIOUS RELEASE HERE**
Please organize the PR summaries from below into the above sections
You may remove empty sections. Be sure to include all deprecations.
""")
header += list(summarize_prs(previous_version))
with open(path, 'w') as fout:
fout.write('\n'.join(header + contents))
if __name__ == '__main__':
main()
|
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_DEVICE_CLASS,
CONF_ID,
CONF_NAME,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_POWER,
DEVICE_CLASS_TEMPERATURE,
PERCENTAGE,
POWER_WATT,
STATE_CLOSED,
STATE_OPEN,
TEMP_CELSIUS,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.restore_state import RestoreEntity
from .device import EnOceanEntity
CONF_MAX_TEMP = "max_temp"
CONF_MIN_TEMP = "min_temp"
CONF_RANGE_FROM = "range_from"
CONF_RANGE_TO = "range_to"
DEFAULT_NAME = "EnOcean sensor"
SENSOR_TYPE_HUMIDITY = "humidity"
SENSOR_TYPE_POWER = "powersensor"
SENSOR_TYPE_TEMPERATURE = "temperature"
SENSOR_TYPE_WINDOWHANDLE = "windowhandle"
SENSOR_TYPES = {
SENSOR_TYPE_HUMIDITY: {
"name": "Humidity",
"unit": PERCENTAGE,
"icon": "mdi:water-percent",
"class": DEVICE_CLASS_HUMIDITY,
},
SENSOR_TYPE_POWER: {
"name": "Power",
"unit": POWER_WATT,
"icon": "mdi:power-plug",
"class": DEVICE_CLASS_POWER,
},
SENSOR_TYPE_TEMPERATURE: {
"name": "Temperature",
"unit": TEMP_CELSIUS,
"icon": "mdi:thermometer",
"class": DEVICE_CLASS_TEMPERATURE,
},
SENSOR_TYPE_WINDOWHANDLE: {
"name": "WindowHandle",
"unit": None,
"icon": "mdi:window",
"class": None,
},
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_ID): vol.All(cv.ensure_list, [vol.Coerce(int)]),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_DEVICE_CLASS, default=SENSOR_TYPE_POWER): cv.string,
vol.Optional(CONF_MAX_TEMP, default=40): vol.Coerce(int),
vol.Optional(CONF_MIN_TEMP, default=0): vol.Coerce(int),
vol.Optional(CONF_RANGE_FROM, default=255): cv.positive_int,
vol.Optional(CONF_RANGE_TO, default=0): cv.positive_int,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up an EnOcean sensor device."""
dev_id = config.get(CONF_ID)
dev_name = config.get(CONF_NAME)
sensor_type = config.get(CONF_DEVICE_CLASS)
if sensor_type == SENSOR_TYPE_TEMPERATURE:
temp_min = config.get(CONF_MIN_TEMP)
temp_max = config.get(CONF_MAX_TEMP)
range_from = config.get(CONF_RANGE_FROM)
range_to = config.get(CONF_RANGE_TO)
add_entities(
[
EnOceanTemperatureSensor(
dev_id, dev_name, temp_min, temp_max, range_from, range_to
)
]
)
elif sensor_type == SENSOR_TYPE_HUMIDITY:
add_entities([EnOceanHumiditySensor(dev_id, dev_name)])
elif sensor_type == SENSOR_TYPE_POWER:
add_entities([EnOceanPowerSensor(dev_id, dev_name)])
elif sensor_type == SENSOR_TYPE_WINDOWHANDLE:
add_entities([EnOceanWindowHandle(dev_id, dev_name)])
class EnOceanSensor(EnOceanEntity, RestoreEntity):
"""Representation of an EnOcean sensor device such as a power meter."""
def __init__(self, dev_id, dev_name, sensor_type):
"""Initialize the EnOcean sensor device."""
super().__init__(dev_id, dev_name)
self._sensor_type = sensor_type
self._device_class = SENSOR_TYPES[self._sensor_type]["class"]
self._dev_name = f"{SENSOR_TYPES[self._sensor_type]['name']} {dev_name}"
self._unit_of_measurement = SENSOR_TYPES[self._sensor_type]["unit"]
self._icon = SENSOR_TYPES[self._sensor_type]["icon"]
self._state = None
@property
def name(self):
"""Return the name of the device."""
return self._dev_name
@property
def icon(self):
"""Icon to use in the frontend."""
return self._icon
@property
def device_class(self):
"""Return the device class of the sensor."""
return self._device_class
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self._unit_of_measurement
async def async_added_to_hass(self):
"""Call when entity about to be added to hass."""
# If not None, we got an initial value.
await super().async_added_to_hass()
if self._state is not None:
return
state = await self.async_get_last_state()
if state is not None:
self._state = state.state
def value_changed(self, packet):
"""Update the internal state of the sensor."""
class EnOceanPowerSensor(EnOceanSensor):
"""Representation of an EnOcean power sensor.
EEPs (EnOcean Equipment Profiles):
- A5-12-01 (Automated Meter Reading, Electricity)
"""
def __init__(self, dev_id, dev_name):
"""Initialize the EnOcean power sensor device."""
super().__init__(dev_id, dev_name, SENSOR_TYPE_POWER)
def value_changed(self, packet):
"""Update the internal state of the sensor."""
if packet.rorg != 0xA5:
return
packet.parse_eep(0x12, 0x01)
if packet.parsed["DT"]["raw_value"] == 1:
# this packet reports the current value
raw_val = packet.parsed["MR"]["raw_value"]
divisor = packet.parsed["DIV"]["raw_value"]
self._state = raw_val / (10 ** divisor)
self.schedule_update_ha_state()
class EnOceanTemperatureSensor(EnOceanSensor):
"""Representation of an EnOcean temperature sensor device.
EEPs (EnOcean Equipment Profiles):
- A5-02-01 to A5-02-1B All 8 Bit Temperature Sensors of A5-02
- A5-10-01 to A5-10-14 (Room Operating Panels)
- A5-04-01 (Temp. and Humidity Sensor, Range 0°C to +40°C and 0% to 100%)
- A5-04-02 (Temp. and Humidity Sensor, Range -20°C to +60°C and 0% to 100%)
- A5-10-10 (Temp. and Humidity Sensor and Set Point)
- A5-10-12 (Temp. and Humidity Sensor, Set Point and Occupancy Control)
- 10 Bit Temp. Sensors are not supported (A5-02-20, A5-02-30)
For the following EEPs the scales must be set to "0 to 250":
- A5-04-01
- A5-04-02
- A5-10-10 to A5-10-14
"""
def __init__(self, dev_id, dev_name, scale_min, scale_max, range_from, range_to):
"""Initialize the EnOcean temperature sensor device."""
super().__init__(dev_id, dev_name, SENSOR_TYPE_TEMPERATURE)
self._scale_min = scale_min
self._scale_max = scale_max
self.range_from = range_from
self.range_to = range_to
def value_changed(self, packet):
"""Update the internal state of the sensor."""
if packet.data[0] != 0xA5:
return
temp_scale = self._scale_max - self._scale_min
temp_range = self.range_to - self.range_from
raw_val = packet.data[3]
temperature = temp_scale / temp_range * (raw_val - self.range_from)
temperature += self._scale_min
self._state = round(temperature, 1)
self.schedule_update_ha_state()
class EnOceanHumiditySensor(EnOceanSensor):
"""Representation of an EnOcean humidity sensor device.
EEPs (EnOcean Equipment Profiles):
- A5-04-01 (Temp. and Humidity Sensor, Range 0°C to +40°C and 0% to 100%)
- A5-04-02 (Temp. and Humidity Sensor, Range -20°C to +60°C and 0% to 100%)
- A5-10-10 to A5-10-14 (Room Operating Panels)
"""
def __init__(self, dev_id, dev_name):
"""Initialize the EnOcean humidity sensor device."""
super().__init__(dev_id, dev_name, SENSOR_TYPE_HUMIDITY)
def value_changed(self, packet):
"""Update the internal state of the sensor."""
if packet.rorg != 0xA5:
return
humidity = packet.data[2] * 100 / 250
self._state = round(humidity, 1)
self.schedule_update_ha_state()
class EnOceanWindowHandle(EnOceanSensor):
"""Representation of an EnOcean window handle device.
EEPs (EnOcean Equipment Profiles):
- F6-10-00 (Mechanical handle / Hoppe AG)
"""
def __init__(self, dev_id, dev_name):
"""Initialize the EnOcean window handle sensor device."""
super().__init__(dev_id, dev_name, SENSOR_TYPE_WINDOWHANDLE)
def value_changed(self, packet):
"""Update the internal state of the sensor."""
action = (packet.data[1] & 0x70) >> 4
if action == 0x07:
self._state = STATE_CLOSED
if action in (0x04, 0x06):
self._state = STATE_OPEN
if action == 0x05:
self._state = "tilt"
self.schedule_update_ha_state()
|
import json
import os
import pathlib
import re
import subprocess
from typing import Dict, List, Union
from .const import CLI_2_DOCKER_IMAGE, CORE_PROJECT_ID, INTEGRATIONS_DIR
from .error import ExitApp
from .util import get_lokalise_token
FILENAME_FORMAT = re.compile(r"strings\.(?P<suffix>\w+)\.json")
DOWNLOAD_DIR = pathlib.Path("build/translations-download").absolute()
def run_download_docker():
"""Run the Docker image to download the translations."""
print("Running Docker to download latest translations.")
run = subprocess.run(
[
"docker",
"run",
"-v",
f"{DOWNLOAD_DIR}:/opt/dest/locale",
"--rm",
f"lokalise/lokalise-cli-2:{CLI_2_DOCKER_IMAGE}",
# Lokalise command
"lokalise2",
"--token",
get_lokalise_token(),
"--project-id",
CORE_PROJECT_ID,
"file",
"download",
CORE_PROJECT_ID,
"--original-filenames=false",
"--replace-breaks=false",
"--export-empty-as",
"skip",
"--format",
"json",
"--unzip-to",
"/opt/dest",
]
)
print()
if run.returncode != 0:
raise ExitApp("Failed to download translations")
def save_json(filename: str, data: Union[List, Dict]):
"""Save JSON data to a file.
Returns True on success.
"""
data = json.dumps(data, sort_keys=True, indent=4)
with open(filename, "w", encoding="utf-8") as fdesc:
fdesc.write(data)
return True
return False
def get_component_path(lang, component):
"""Get the component translation path."""
if os.path.isdir(os.path.join("homeassistant", "components", component)):
return os.path.join(
"homeassistant", "components", component, "translations", f"{lang}.json"
)
raise ExitApp(f"Integration {component} not found under homeassistant/components/")
def get_platform_path(lang, component, platform):
"""Get the platform translation path."""
return os.path.join(
"homeassistant",
"components",
component,
"translations",
f"{platform}.{lang}.json",
)
def get_component_translations(translations):
"""Get the component level translations."""
translations = translations.copy()
translations.pop("platform", None)
return translations
def save_language_translations(lang, translations):
"""Distribute the translations for this language."""
components = translations.get("component", {})
for component, component_translations in components.items():
base_translations = get_component_translations(component_translations)
if base_translations:
path = get_component_path(lang, component)
os.makedirs(os.path.dirname(path), exist_ok=True)
save_json(path, base_translations)
if "platform" not in component_translations:
continue
for platform, platform_translations in component_translations[
"platform"
].items():
path = get_platform_path(lang, component, platform)
os.makedirs(os.path.dirname(path), exist_ok=True)
save_json(path, platform_translations)
def write_integration_translations():
"""Write integration translations."""
for lang_file in DOWNLOAD_DIR.glob("*.json"):
lang = lang_file.stem
translations = json.loads(lang_file.read_text())
save_language_translations(lang, translations)
def delete_old_translations():
"""Delete old translations."""
for fil in INTEGRATIONS_DIR.glob("*/translations/*"):
fil.unlink()
def run():
"""Run the script."""
DOWNLOAD_DIR.mkdir(parents=True, exist_ok=True)
run_download_docker()
delete_old_translations()
write_integration_translations()
return 0
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import re
from absl import flags
from perfkitbenchmarker import context
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.providers import azure
import six
AZURE_PATH = azure.AZURE_PATH
AZURE_SUFFIX = ['--output', 'json']
FLAGS = flags.FLAGS
def GetAzureStorageConnectionString(storage_account_name, resource_group_args):
"""Get connection string."""
stdout, _ = vm_util.IssueRetryableCommand(
[AZURE_PATH, 'storage', 'account', 'show-connection-string',
'--name', storage_account_name] + resource_group_args + AZURE_SUFFIX)
response = json.loads(stdout)
return response['connectionString']
def GetAzureStorageConnectionArgs(storage_account_name, resource_group_args):
"""Get connection CLI arguments."""
return ['--connection-string',
GetAzureStorageConnectionString(storage_account_name,
resource_group_args)]
def GetAzureStorageAccountKey(storage_account_name, resource_group_args):
"""Get storage account key."""
stdout, _ = vm_util.IssueRetryableCommand(
[AZURE_PATH, 'storage', 'account', 'keys', 'list',
'--account-name', storage_account_name] +
resource_group_args + AZURE_SUFFIX)
response = json.loads(stdout)
# A new storage account comes with two keys, but we only need one.
assert response[0]['permissions'].lower() == 'full'
return response[0]['value']
def FormatTag(key, value):
"""Format an individual tag for use with the --tags param of Azure CLI."""
return '{0}={1}'.format(key, value)
def FormatTags(tags_dict):
"""Format a dict of tags into arguments for 'tag' parameter.
Args:
tags_dict: Tags to be formatted.
Returns:
A list of tags formatted as arguments for 'tag' parameter.
"""
return [FormatTag(k, v) for k, v in sorted(six.iteritems(tags_dict))]
def GetResourceTags(timeout_minutes):
"""Gets a dict of tags.
Args:
timeout_minutes: int, Timeout used for setting the timeout_utc tag.
Returns:
A dict contains formatted tags.
"""
benchmark_spec = context.GetThreadBenchmarkSpec()
return benchmark_spec.GetResourceTags(timeout_minutes)
def GetTags(timeout_minutes):
"""Gets a list of tags to be used with the --tags param of Azure CLI.
Args:
timeout_minutes: int, Timeout used for setting the timeout_utc tag.
Returns:
A string contains formatted tags.
"""
return FormatTags(GetResourceTags(timeout_minutes))
def GetTagsJson(timeout_minutes):
"""Gets a JSON string of tags to be used with the --set param of Azure CLI.
Args:
timeout_minutes: int, Timeout used for setting the timeout_utc tag.
Returns:
A string contains json formatted tags.
"""
return 'tags={}'.format(json.dumps(GetResourceTags(timeout_minutes)))
def _IsLocation(zone_or_location):
"""Returns whether "zone_or_location" is a location."""
return re.match(r'[a-z]+[0-9]?$', zone_or_location)
def IsZone(zone_or_location):
"""Returns whether "zone_or_location" is a zone.
Args:
zone_or_location: string, Azure zone or location. Format for Azure
availability
zone support is "location-availability_zone". Example: eastus2-1 specifies
Azure location eastus2 with availability zone 1.
"""
return re.match(r'[a-z]+[0-9]?-[0-9]$', zone_or_location)
def GetLocationFromZone(zone_or_location):
"""Returns the location a zone is in (or "zone_or_location" if it's a location)."""
if _IsLocation(zone_or_location):
return zone_or_location
if IsZone(zone_or_location):
return zone_or_location[:-2]
raise ValueError('%s is not a valid Azure zone or location name' %
zone_or_location)
def GetAvailabilityZoneFromZone(zone_or_location):
"""Returns the Availability Zone from a zone."""
if IsZone(zone_or_location):
return zone_or_location[-1]
if _IsLocation(zone_or_location):
return None
raise ValueError('%s is not a valid Azure zone' % zone_or_location)
|
import requests
from docutils.parsers.rst import Directive, directives
from docutils import nodes
from nikola.plugin_categories import RestExtension
class Plugin(RestExtension):
"""Plugin for gist directive."""
name = "rest_gist"
def set_site(self, site):
"""Set Nikola site."""
self.site = site
directives.register_directive('gist', GitHubGist)
return super().set_site(site)
class GitHubGist(Directive):
"""Embed GitHub Gist.
Usage:
.. gist:: GIST_ID
or
.. gist:: GIST_URL
"""
required_arguments = 1
optional_arguments = 1
option_spec = {'file': directives.unchanged}
final_argument_whitespace = True
has_content = False
def get_raw_gist_with_filename(self, gistID, filename):
"""Get raw gist text for a filename."""
url = '/'.join(("https://gist.github.com/raw", gistID, filename))
return requests.get(url).text
def get_raw_gist(self, gistID):
"""Get raw gist text."""
url = "https://gist.github.com/raw/{0}".format(gistID)
try:
return requests.get(url).text
except requests.exceptions.RequestException:
raise self.error('Cannot get gist for url={0}'.format(url))
def run(self):
"""Run the gist directive."""
if 'https://' in self.arguments[0]:
gistID = self.arguments[0].split('/')[-1].strip()
else:
gistID = self.arguments[0].strip()
embedHTML = ""
rawGist = ""
if 'file' in self.options:
filename = self.options['file']
rawGist = (self.get_raw_gist_with_filename(gistID, filename))
embedHTML = ('<script src="https://gist.github.com/{0}.js'
'?file={1}"></script>').format(gistID, filename)
else:
rawGist = (self.get_raw_gist(gistID))
embedHTML = ('<script src="https://gist.github.com/{0}.js">'
'</script>').format(gistID)
reqnode = nodes.literal_block('', rawGist)
return [nodes.raw('', embedHTML, format='html'),
nodes.raw('', '<noscript>', format='html'),
reqnode,
nodes.raw('', '</noscript>', format='html')]
|
import logging
import pywink
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
CURRENT_HVAC_COOL,
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
CURRENT_HVAC_OFF,
FAN_AUTO,
FAN_HIGH,
FAN_LOW,
FAN_MEDIUM,
FAN_ON,
HVAC_MODE_AUTO,
HVAC_MODE_COOL,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
PRESET_AWAY,
PRESET_ECO,
PRESET_NONE,
SUPPORT_AUX_HEAT,
SUPPORT_FAN_MODE,
SUPPORT_PRESET_MODE,
SUPPORT_TARGET_TEMPERATURE,
SUPPORT_TARGET_TEMPERATURE_RANGE,
)
from homeassistant.const import ATTR_TEMPERATURE, PRECISION_TENTHS, TEMP_CELSIUS
from homeassistant.helpers.temperature import display_temp as show_temp
from . import DOMAIN, WinkDevice
_LOGGER = logging.getLogger(__name__)
ATTR_ECO_TARGET = "eco_target"
ATTR_EXTERNAL_TEMPERATURE = "external_temperature"
ATTR_OCCUPIED = "occupied"
ATTR_SCHEDULE_ENABLED = "schedule_enabled"
ATTR_SMART_TEMPERATURE = "smart_temperature"
ATTR_TOTAL_CONSUMPTION = "total_consumption"
HA_HVAC_TO_WINK = {
HVAC_MODE_AUTO: "auto",
HVAC_MODE_COOL: "cool_only",
HVAC_MODE_FAN_ONLY: "fan_only",
HVAC_MODE_HEAT: "heat_only",
HVAC_MODE_OFF: "off",
}
WINK_HVAC_TO_HA = {value: key for key, value in HA_HVAC_TO_WINK.items()}
SUPPORT_FLAGS_THERMOSTAT = (
SUPPORT_TARGET_TEMPERATURE
| SUPPORT_TARGET_TEMPERATURE_RANGE
| SUPPORT_FAN_MODE
| SUPPORT_AUX_HEAT
)
SUPPORT_FAN_THERMOSTAT = [FAN_AUTO, FAN_ON]
SUPPORT_PRESET_THERMOSTAT = [PRESET_AWAY, PRESET_ECO]
SUPPORT_FLAGS_AC = SUPPORT_TARGET_TEMPERATURE | SUPPORT_FAN_MODE | SUPPORT_PRESET_MODE
SUPPORT_FAN_AC = [FAN_HIGH, FAN_LOW, FAN_MEDIUM]
SUPPORT_PRESET_AC = [PRESET_NONE, PRESET_ECO]
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Wink climate devices."""
for climate in pywink.get_thermostats():
_id = climate.object_id() + climate.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkThermostat(climate, hass)])
for climate in pywink.get_air_conditioners():
_id = climate.object_id() + climate.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkAC(climate, hass)])
class WinkThermostat(WinkDevice, ClimateEntity):
"""Representation of a Wink thermostat."""
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_FLAGS_THERMOSTAT
async def async_added_to_hass(self):
"""Call when entity is added to hass."""
self.hass.data[DOMAIN]["entities"]["climate"].append(self)
@property
def temperature_unit(self):
"""Return the unit of measurement."""
# The Wink API always returns temp in Celsius
return TEMP_CELSIUS
@property
def device_state_attributes(self):
"""Return the optional device state attributes."""
data = {}
if self.external_temperature is not None:
data[ATTR_EXTERNAL_TEMPERATURE] = show_temp(
self.hass,
self.external_temperature,
self.temperature_unit,
PRECISION_TENTHS,
)
if self.smart_temperature:
data[ATTR_SMART_TEMPERATURE] = self.smart_temperature
if self.occupied is not None:
data[ATTR_OCCUPIED] = self.occupied
if self.eco_target is not None:
data[ATTR_ECO_TARGET] = self.eco_target
return data
@property
def current_temperature(self):
"""Return the current temperature."""
return self.wink.current_temperature()
@property
def current_humidity(self):
"""Return the current humidity."""
if self.wink.current_humidity() is not None:
# The API states humidity will be a float 0-1
# the only example API response with humidity listed show an int
# This will address both possibilities
if self.wink.current_humidity() < 1:
return self.wink.current_humidity() * 100
return self.wink.current_humidity()
return None
@property
def external_temperature(self):
"""Return the current external temperature."""
return self.wink.current_external_temperature()
@property
def smart_temperature(self):
"""Return the current average temp of all remote sensor."""
return self.wink.current_smart_temperature()
@property
def eco_target(self):
"""Return status of eco target (Is the thermostat in eco mode)."""
return self.wink.eco_target()
@property
def occupied(self):
"""Return status of if the thermostat has detected occupancy."""
return self.wink.occupied()
@property
def preset_mode(self):
"""Return the current preset mode, e.g., home, away, temp."""
mode = self.wink.current_hvac_mode()
if mode == "eco":
return PRESET_ECO
if self.wink.away():
return PRESET_AWAY
return None
@property
def preset_modes(self):
"""Return a list of available preset modes."""
return SUPPORT_PRESET_THERMOSTAT
@property
def target_humidity(self):
"""Return the humidity we try to reach."""
target_hum = None
if self.wink.current_humidifier_mode() == "on":
if self.wink.current_humidifier_set_point() is not None:
target_hum = self.wink.current_humidifier_set_point() * 100
elif self.wink.current_dehumidifier_mode() == "on":
if self.wink.current_dehumidifier_set_point() is not None:
target_hum = self.wink.current_dehumidifier_set_point() * 100
else:
target_hum = None
return target_hum
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
if self.hvac_mode != HVAC_MODE_AUTO and not self.wink.away():
if self.hvac_mode == HVAC_MODE_COOL:
return self.wink.current_max_set_point()
if self.hvac_mode == HVAC_MODE_HEAT:
return self.wink.current_min_set_point()
return None
@property
def target_temperature_low(self):
"""Return the lower bound temperature we try to reach."""
if self.hvac_mode == HVAC_MODE_AUTO:
return self.wink.current_min_set_point()
return None
@property
def target_temperature_high(self):
"""Return the higher bound temperature we try to reach."""
if self.hvac_mode == HVAC_MODE_AUTO:
return self.wink.current_max_set_point()
return None
@property
def is_aux_heat(self):
"""Return true if aux heater."""
if "aux" not in self.wink.hvac_modes():
return None
if self.wink.current_hvac_mode() == "aux":
return True
return False
@property
def hvac_mode(self) -> str:
"""Return hvac operation ie. heat, cool mode.
Need to be one of HVAC_MODE_*.
"""
if not self.wink.is_on():
return HVAC_MODE_OFF
wink_mode = self.wink.current_hvac_mode()
if wink_mode == "aux":
return HVAC_MODE_HEAT
if wink_mode == "eco":
return HVAC_MODE_AUTO
return WINK_HVAC_TO_HA.get(wink_mode)
@property
def hvac_modes(self):
"""Return the list of available hvac operation modes.
Need to be a subset of HVAC_MODES.
"""
hvac_list = [HVAC_MODE_OFF]
modes = self.wink.hvac_modes()
for mode in modes:
if mode in ("eco", "aux"):
continue
try:
ha_mode = WINK_HVAC_TO_HA[mode]
hvac_list.append(ha_mode)
except KeyError:
_LOGGER.error(
"Invalid operation mode mapping. %s doesn't map. "
"Please report this",
mode,
)
return hvac_list
@property
def hvac_action(self):
"""Return the current running hvac operation if supported.
Need to be one of CURRENT_HVAC_*.
"""
if not self.wink.is_on():
return CURRENT_HVAC_OFF
if self.wink.cool_on():
return CURRENT_HVAC_COOL
if self.wink.heat_on():
return CURRENT_HVAC_HEAT
return CURRENT_HVAC_IDLE
def set_temperature(self, **kwargs):
"""Set new target temperature."""
target_temp = kwargs.get(ATTR_TEMPERATURE)
target_temp_low = kwargs.get(ATTR_TARGET_TEMP_LOW)
target_temp_high = kwargs.get(ATTR_TARGET_TEMP_HIGH)
if target_temp is not None:
if self.hvac_mode == HVAC_MODE_COOL:
target_temp_high = target_temp
if self.hvac_mode == HVAC_MODE_HEAT:
target_temp_low = target_temp
self.wink.set_temperature(target_temp_low, target_temp_high)
def set_hvac_mode(self, hvac_mode):
"""Set new target hvac mode."""
hvac_mode_to_set = HA_HVAC_TO_WINK.get(hvac_mode)
self.wink.set_operation_mode(hvac_mode_to_set)
def set_preset_mode(self, preset_mode):
"""Set new preset mode."""
# Away
if preset_mode != PRESET_AWAY and self.wink.away():
self.wink.set_away_mode(False)
elif preset_mode == PRESET_AWAY:
self.wink.set_away_mode()
if preset_mode == PRESET_ECO:
self.wink.set_operation_mode("eco")
@property
def fan_mode(self):
"""Return whether the fan is on."""
if self.wink.current_fan_mode() == "on":
return FAN_ON
if self.wink.current_fan_mode() == "auto":
return FAN_AUTO
# No Fan available so disable slider
return None
@property
def fan_modes(self):
"""List of available fan modes."""
if self.wink.has_fan():
return SUPPORT_FAN_THERMOSTAT
return None
def set_fan_mode(self, fan_mode):
"""Turn fan on/off."""
self.wink.set_fan_mode(fan_mode.lower())
def turn_aux_heat_on(self):
"""Turn auxiliary heater on."""
self.wink.set_operation_mode("aux")
def turn_aux_heat_off(self):
"""Turn auxiliary heater off."""
self.wink.set_operation_mode("heat_only")
@property
def min_temp(self):
"""Return the minimum temperature."""
minimum = 7 # Default minimum
min_min = self.wink.min_min_set_point()
min_max = self.wink.min_max_set_point()
if self.hvac_mode == HVAC_MODE_HEAT:
if min_min:
return_value = min_min
else:
return_value = minimum
elif self.hvac_mode == HVAC_MODE_COOL:
if min_max:
return_value = min_max
else:
return_value = minimum
elif self.hvac_mode == HVAC_MODE_AUTO:
if min_min and min_max:
return_value = min(min_min, min_max)
else:
return_value = minimum
else:
return_value = minimum
return return_value
@property
def max_temp(self):
"""Return the maximum temperature."""
maximum = 35 # Default maximum
max_min = self.wink.max_min_set_point()
max_max = self.wink.max_max_set_point()
if self.hvac_mode == HVAC_MODE_HEAT:
if max_min:
return_value = max_min
else:
return_value = maximum
elif self.hvac_mode == HVAC_MODE_COOL:
if max_max:
return_value = max_max
else:
return_value = maximum
elif self.hvac_mode == HVAC_MODE_AUTO:
if max_min and max_max:
return_value = min(max_min, max_max)
else:
return_value = maximum
else:
return_value = maximum
return return_value
class WinkAC(WinkDevice, ClimateEntity):
"""Representation of a Wink air conditioner."""
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_FLAGS_AC
@property
def temperature_unit(self):
"""Return the unit of measurement."""
# The Wink API always returns temp in Celsius
return TEMP_CELSIUS
@property
def device_state_attributes(self):
"""Return the optional device state attributes."""
data = {}
data[ATTR_TOTAL_CONSUMPTION] = self.wink.total_consumption()
data[ATTR_SCHEDULE_ENABLED] = self.wink.schedule_enabled()
return data
@property
def current_temperature(self):
"""Return the current temperature."""
return self.wink.current_temperature()
@property
def preset_mode(self):
"""Return the current preset mode, e.g., home, away, temp."""
if not self.wink.is_on():
return PRESET_NONE
mode = self.wink.current_mode()
if mode == "auto_eco":
return PRESET_ECO
return PRESET_NONE
@property
def preset_modes(self):
"""Return a list of available preset modes."""
return SUPPORT_PRESET_AC
@property
def hvac_mode(self) -> str:
"""Return hvac operation ie. heat, cool mode.
Need to be one of HVAC_MODE_*.
"""
if not self.wink.is_on():
return HVAC_MODE_OFF
wink_mode = self.wink.current_mode()
if wink_mode == "auto_eco":
return HVAC_MODE_COOL
return WINK_HVAC_TO_HA.get(wink_mode)
@property
def hvac_modes(self):
"""Return the list of available hvac operation modes.
Need to be a subset of HVAC_MODES.
"""
hvac_list = [HVAC_MODE_OFF]
modes = self.wink.modes()
for mode in modes:
if mode == "auto_eco":
continue
try:
ha_mode = WINK_HVAC_TO_HA[mode]
hvac_list.append(ha_mode)
except KeyError:
_LOGGER.error(
"Invalid operation mode mapping. %s doesn't map. "
"Please report this",
mode,
)
return hvac_list
def set_temperature(self, **kwargs):
"""Set new target temperature."""
target_temp = kwargs.get(ATTR_TEMPERATURE)
self.wink.set_temperature(target_temp)
def set_hvac_mode(self, hvac_mode):
"""Set new target hvac mode."""
hvac_mode_to_set = HA_HVAC_TO_WINK.get(hvac_mode)
self.wink.set_operation_mode(hvac_mode_to_set)
def set_preset_mode(self, preset_mode):
"""Set new preset mode."""
if preset_mode == PRESET_ECO:
self.wink.set_operation_mode("auto_eco")
elif self.hvac_mode == HVAC_MODE_COOL and preset_mode == PRESET_NONE:
self.set_hvac_mode(HVAC_MODE_COOL)
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self.wink.current_max_set_point()
@property
def fan_mode(self):
"""
Return the current fan mode.
The official Wink app only supports 3 modes [low, medium, high]
which are equal to [0.33, 0.66, 1.0] respectively.
"""
speed = self.wink.current_fan_speed()
if speed <= 0.33:
return FAN_LOW
if speed <= 0.66:
return FAN_MEDIUM
return FAN_HIGH
@property
def fan_modes(self):
"""Return a list of available fan modes."""
return SUPPORT_FAN_AC
def set_fan_mode(self, fan_mode):
"""
Set fan speed.
The official Wink app only supports 3 modes [low, medium, high]
which are equal to [0.33, 0.66, 1.0] respectively.
"""
if fan_mode == FAN_LOW:
speed = 0.33
elif fan_mode == FAN_MEDIUM:
speed = 0.66
elif fan_mode == FAN_HIGH:
speed = 1.0
self.wink.set_ac_fan_speed(speed)
|
from bisect import bisect
import logging
import re
from typing import Callable, Dict, List, NamedTuple, Optional, Pattern, Tuple, Union
import attr
from homeassistant.components.sensor import (
DEVICE_CLASS_BATTERY,
DEVICE_CLASS_SIGNAL_STRENGTH,
DOMAIN as SENSOR_DOMAIN,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_URL,
DATA_BYTES,
DATA_RATE_BYTES_PER_SECOND,
PERCENTAGE,
STATE_UNKNOWN,
TIME_SECONDS,
)
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.typing import HomeAssistantType, StateType
from . import HuaweiLteBaseEntity
from .const import (
DOMAIN,
KEY_DEVICE_INFORMATION,
KEY_DEVICE_SIGNAL,
KEY_MONITORING_CHECK_NOTIFICATIONS,
KEY_MONITORING_MONTH_STATISTICS,
KEY_MONITORING_STATUS,
KEY_MONITORING_TRAFFIC_STATISTICS,
KEY_NET_CURRENT_PLMN,
KEY_NET_NET_MODE,
KEY_SMS_SMS_COUNT,
SENSOR_KEYS,
)
_LOGGER = logging.getLogger(__name__)
class SensorMeta(NamedTuple):
"""Metadata for defining sensors."""
name: Optional[str] = None
device_class: Optional[str] = None
icon: Union[str, Callable[[StateType], str], None] = None
unit: Optional[str] = None
enabled_default: bool = False
include: Optional[Pattern[str]] = None
exclude: Optional[Pattern[str]] = None
formatter: Optional[Callable[[str], Tuple[StateType, Optional[str]]]] = None
SENSOR_META: Dict[Union[str, Tuple[str, str]], SensorMeta] = {
KEY_DEVICE_INFORMATION: SensorMeta(
include=re.compile(r"^WanIP.*Address$", re.IGNORECASE)
),
(KEY_DEVICE_INFORMATION, "WanIPAddress"): SensorMeta(
name="WAN IP address", icon="mdi:ip", enabled_default=True
),
(KEY_DEVICE_INFORMATION, "WanIPv6Address"): SensorMeta(
name="WAN IPv6 address", icon="mdi:ip"
),
(KEY_DEVICE_SIGNAL, "band"): SensorMeta(name="Band"),
(KEY_DEVICE_SIGNAL, "cell_id"): SensorMeta(name="Cell ID"),
(KEY_DEVICE_SIGNAL, "dl_mcs"): SensorMeta(name="Downlink MCS"),
(KEY_DEVICE_SIGNAL, "dlbandwidth"): SensorMeta(
name="Downlink bandwidth",
icon=lambda x: (
"mdi:speedometer-slow",
"mdi:speedometer-medium",
"mdi:speedometer",
)[bisect((8, 15), x if x is not None else -1000)],
),
(KEY_DEVICE_SIGNAL, "earfcn"): SensorMeta(name="EARFCN"),
(KEY_DEVICE_SIGNAL, "lac"): SensorMeta(name="LAC", icon="mdi:map-marker"),
(KEY_DEVICE_SIGNAL, "plmn"): SensorMeta(name="PLMN"),
(KEY_DEVICE_SIGNAL, "rac"): SensorMeta(name="RAC", icon="mdi:map-marker"),
(KEY_DEVICE_SIGNAL, "rrc_status"): SensorMeta(name="RRC status"),
(KEY_DEVICE_SIGNAL, "tac"): SensorMeta(name="TAC", icon="mdi:map-marker"),
(KEY_DEVICE_SIGNAL, "tdd"): SensorMeta(name="TDD"),
(KEY_DEVICE_SIGNAL, "txpower"): SensorMeta(
name="Transmit power",
device_class=DEVICE_CLASS_SIGNAL_STRENGTH,
),
(KEY_DEVICE_SIGNAL, "ul_mcs"): SensorMeta(name="Uplink MCS"),
(KEY_DEVICE_SIGNAL, "ulbandwidth"): SensorMeta(
name="Uplink bandwidth",
icon=lambda x: (
"mdi:speedometer-slow",
"mdi:speedometer-medium",
"mdi:speedometer",
)[bisect((8, 15), x if x is not None else -1000)],
),
(KEY_DEVICE_SIGNAL, "mode"): SensorMeta(
name="Mode",
formatter=lambda x: ({"0": "2G", "2": "3G", "7": "4G"}.get(x, "Unknown"), None),
),
(KEY_DEVICE_SIGNAL, "pci"): SensorMeta(name="PCI"),
(KEY_DEVICE_SIGNAL, "rsrq"): SensorMeta(
name="RSRQ",
device_class=DEVICE_CLASS_SIGNAL_STRENGTH,
# http://www.lte-anbieter.info/technik/rsrq.php
icon=lambda x: (
"mdi:signal-cellular-outline",
"mdi:signal-cellular-1",
"mdi:signal-cellular-2",
"mdi:signal-cellular-3",
)[bisect((-11, -8, -5), x if x is not None else -1000)],
enabled_default=True,
),
(KEY_DEVICE_SIGNAL, "rsrp"): SensorMeta(
name="RSRP",
device_class=DEVICE_CLASS_SIGNAL_STRENGTH,
# http://www.lte-anbieter.info/technik/rsrp.php
icon=lambda x: (
"mdi:signal-cellular-outline",
"mdi:signal-cellular-1",
"mdi:signal-cellular-2",
"mdi:signal-cellular-3",
)[bisect((-110, -95, -80), x if x is not None else -1000)],
enabled_default=True,
),
(KEY_DEVICE_SIGNAL, "rssi"): SensorMeta(
name="RSSI",
device_class=DEVICE_CLASS_SIGNAL_STRENGTH,
# https://eyesaas.com/wi-fi-signal-strength/
icon=lambda x: (
"mdi:signal-cellular-outline",
"mdi:signal-cellular-1",
"mdi:signal-cellular-2",
"mdi:signal-cellular-3",
)[bisect((-80, -70, -60), x if x is not None else -1000)],
enabled_default=True,
),
(KEY_DEVICE_SIGNAL, "sinr"): SensorMeta(
name="SINR",
device_class=DEVICE_CLASS_SIGNAL_STRENGTH,
# http://www.lte-anbieter.info/technik/sinr.php
icon=lambda x: (
"mdi:signal-cellular-outline",
"mdi:signal-cellular-1",
"mdi:signal-cellular-2",
"mdi:signal-cellular-3",
)[bisect((0, 5, 10), x if x is not None else -1000)],
enabled_default=True,
),
(KEY_DEVICE_SIGNAL, "rscp"): SensorMeta(
name="RSCP",
device_class=DEVICE_CLASS_SIGNAL_STRENGTH,
# https://wiki.teltonika.lt/view/RSCP
icon=lambda x: (
"mdi:signal-cellular-outline",
"mdi:signal-cellular-1",
"mdi:signal-cellular-2",
"mdi:signal-cellular-3",
)[bisect((-95, -85, -75), x if x is not None else -1000)],
),
(KEY_DEVICE_SIGNAL, "ecio"): SensorMeta(
name="EC/IO",
device_class=DEVICE_CLASS_SIGNAL_STRENGTH,
# https://wiki.teltonika.lt/view/EC/IO
icon=lambda x: (
"mdi:signal-cellular-outline",
"mdi:signal-cellular-1",
"mdi:signal-cellular-2",
"mdi:signal-cellular-3",
)[bisect((-20, -10, -6), x if x is not None else -1000)],
),
KEY_MONITORING_CHECK_NOTIFICATIONS: SensorMeta(
exclude=re.compile(
r"^(onlineupdatestatus|smsstoragefull)$",
re.IGNORECASE,
)
),
(KEY_MONITORING_CHECK_NOTIFICATIONS, "UnreadMessage"): SensorMeta(
name="SMS unread", icon="mdi:email-receive"
),
KEY_MONITORING_MONTH_STATISTICS: SensorMeta(
exclude=re.compile(r"^month(duration|lastcleartime)$", re.IGNORECASE)
),
(KEY_MONITORING_MONTH_STATISTICS, "CurrentMonthDownload"): SensorMeta(
name="Current month download", unit=DATA_BYTES, icon="mdi:download"
),
(KEY_MONITORING_MONTH_STATISTICS, "CurrentMonthUpload"): SensorMeta(
name="Current month upload", unit=DATA_BYTES, icon="mdi:upload"
),
KEY_MONITORING_STATUS: SensorMeta(
include=re.compile(
r"^(batterypercent|currentwifiuser|(primary|secondary).*dns)$",
re.IGNORECASE,
)
),
(KEY_MONITORING_STATUS, "BatteryPercent"): SensorMeta(
name="Battery",
device_class=DEVICE_CLASS_BATTERY,
unit=PERCENTAGE,
),
(KEY_MONITORING_STATUS, "CurrentWifiUser"): SensorMeta(
name="WiFi clients connected", icon="mdi:wifi"
),
(KEY_MONITORING_STATUS, "PrimaryDns"): SensorMeta(
name="Primary DNS server", icon="mdi:ip"
),
(KEY_MONITORING_STATUS, "SecondaryDns"): SensorMeta(
name="Secondary DNS server", icon="mdi:ip"
),
(KEY_MONITORING_STATUS, "PrimaryIPv6Dns"): SensorMeta(
name="Primary IPv6 DNS server", icon="mdi:ip"
),
(KEY_MONITORING_STATUS, "SecondaryIPv6Dns"): SensorMeta(
name="Secondary IPv6 DNS server", icon="mdi:ip"
),
KEY_MONITORING_TRAFFIC_STATISTICS: SensorMeta(
exclude=re.compile(r"^showtraffic$", re.IGNORECASE)
),
(KEY_MONITORING_TRAFFIC_STATISTICS, "CurrentConnectTime"): SensorMeta(
name="Current connection duration", unit=TIME_SECONDS, icon="mdi:timer-outline"
),
(KEY_MONITORING_TRAFFIC_STATISTICS, "CurrentDownload"): SensorMeta(
name="Current connection download", unit=DATA_BYTES, icon="mdi:download"
),
(KEY_MONITORING_TRAFFIC_STATISTICS, "CurrentDownloadRate"): SensorMeta(
name="Current download rate",
unit=DATA_RATE_BYTES_PER_SECOND,
icon="mdi:download",
),
(KEY_MONITORING_TRAFFIC_STATISTICS, "CurrentUpload"): SensorMeta(
name="Current connection upload", unit=DATA_BYTES, icon="mdi:upload"
),
(KEY_MONITORING_TRAFFIC_STATISTICS, "CurrentUploadRate"): SensorMeta(
name="Current upload rate",
unit=DATA_RATE_BYTES_PER_SECOND,
icon="mdi:upload",
),
(KEY_MONITORING_TRAFFIC_STATISTICS, "TotalConnectTime"): SensorMeta(
name="Total connected duration", unit=TIME_SECONDS, icon="mdi:timer-outline"
),
(KEY_MONITORING_TRAFFIC_STATISTICS, "TotalDownload"): SensorMeta(
name="Total download", unit=DATA_BYTES, icon="mdi:download"
),
(KEY_MONITORING_TRAFFIC_STATISTICS, "TotalUpload"): SensorMeta(
name="Total upload", unit=DATA_BYTES, icon="mdi:upload"
),
KEY_NET_CURRENT_PLMN: SensorMeta(
exclude=re.compile(r"^(Rat|ShortName|Spn)$", re.IGNORECASE)
),
(KEY_NET_CURRENT_PLMN, "State"): SensorMeta(
name="Operator search mode",
formatter=lambda x: ({"0": "Auto", "1": "Manual"}.get(x, "Unknown"), None),
),
(KEY_NET_CURRENT_PLMN, "FullName"): SensorMeta(
name="Operator name",
),
(KEY_NET_CURRENT_PLMN, "Numeric"): SensorMeta(
name="Operator code",
),
KEY_NET_NET_MODE: SensorMeta(include=re.compile(r"^NetworkMode$", re.IGNORECASE)),
(KEY_NET_NET_MODE, "NetworkMode"): SensorMeta(
name="Preferred mode",
formatter=lambda x: (
{
"00": "4G/3G/2G",
"01": "2G",
"02": "3G",
"03": "4G",
"0301": "4G/2G",
"0302": "4G/3G",
"0201": "3G/2G",
}.get(x, "Unknown"),
None,
),
),
(KEY_SMS_SMS_COUNT, "LocalDeleted"): SensorMeta(
name="SMS deleted (device)",
icon="mdi:email-minus",
),
(KEY_SMS_SMS_COUNT, "LocalDraft"): SensorMeta(
name="SMS drafts (device)",
icon="mdi:email-send-outline",
),
(KEY_SMS_SMS_COUNT, "LocalInbox"): SensorMeta(
name="SMS inbox (device)",
icon="mdi:email",
),
(KEY_SMS_SMS_COUNT, "LocalMax"): SensorMeta(
name="SMS capacity (device)",
icon="mdi:email",
),
(KEY_SMS_SMS_COUNT, "LocalOutbox"): SensorMeta(
name="SMS outbox (device)",
icon="mdi:email-send",
),
(KEY_SMS_SMS_COUNT, "LocalUnread"): SensorMeta(
name="SMS unread (device)",
icon="mdi:email-receive",
),
(KEY_SMS_SMS_COUNT, "SimDraft"): SensorMeta(
name="SMS drafts (SIM)",
icon="mdi:email-send-outline",
),
(KEY_SMS_SMS_COUNT, "SimInbox"): SensorMeta(
name="SMS inbox (SIM)",
icon="mdi:email",
),
(KEY_SMS_SMS_COUNT, "SimMax"): SensorMeta(
name="SMS capacity (SIM)",
icon="mdi:email",
),
(KEY_SMS_SMS_COUNT, "SimOutbox"): SensorMeta(
name="SMS outbox (SIM)",
icon="mdi:email-send",
),
(KEY_SMS_SMS_COUNT, "SimUnread"): SensorMeta(
name="SMS unread (SIM)",
icon="mdi:email-receive",
),
(KEY_SMS_SMS_COUNT, "SimUsed"): SensorMeta(
name="SMS messages (SIM)",
icon="mdi:email-receive",
),
}
async def async_setup_entry(
hass: HomeAssistantType,
config_entry: ConfigEntry,
async_add_entities: Callable[[List[Entity], bool], None],
) -> None:
"""Set up from config entry."""
router = hass.data[DOMAIN].routers[config_entry.data[CONF_URL]]
sensors: List[Entity] = []
for key in SENSOR_KEYS:
items = router.data.get(key)
if not items:
continue
key_meta = SENSOR_META.get(key)
if key_meta:
if key_meta.include:
items = filter(key_meta.include.search, items)
if key_meta.exclude:
items = [x for x in items if not key_meta.exclude.search(x)]
for item in items:
sensors.append(
HuaweiLteSensor(
router, key, item, SENSOR_META.get((key, item), SensorMeta())
)
)
async_add_entities(sensors, True)
def format_default(value: StateType) -> Tuple[StateType, Optional[str]]:
"""Format value."""
unit = None
if value is not None:
# Clean up value and infer unit, e.g. -71dBm, 15 dB
match = re.match(
r"([>=<]*)(?P<value>.+?)\s*(?P<unit>[a-zA-Z]+)\s*$", str(value)
)
if match:
try:
value = float(match.group("value"))
unit = match.group("unit")
except ValueError:
pass
return value, unit
@attr.s
class HuaweiLteSensor(HuaweiLteBaseEntity):
"""Huawei LTE sensor entity."""
key: str = attr.ib()
item: str = attr.ib()
meta: SensorMeta = attr.ib()
_state: StateType = attr.ib(init=False, default=STATE_UNKNOWN)
_unit: Optional[str] = attr.ib(init=False)
async def async_added_to_hass(self) -> None:
"""Subscribe to needed data on add."""
await super().async_added_to_hass()
self.router.subscriptions[self.key].add(f"{SENSOR_DOMAIN}/{self.item}")
async def async_will_remove_from_hass(self) -> None:
"""Unsubscribe from needed data on remove."""
await super().async_will_remove_from_hass()
self.router.subscriptions[self.key].remove(f"{SENSOR_DOMAIN}/{self.item}")
@property
def _entity_name(self) -> str:
return self.meta.name or self.item
@property
def _device_unique_id(self) -> str:
return f"{self.key}.{self.item}"
@property
def state(self) -> StateType:
"""Return sensor state."""
return self._state
@property
def device_class(self) -> Optional[str]:
"""Return sensor device class."""
return self.meta.device_class
@property
def unit_of_measurement(self) -> Optional[str]:
"""Return sensor's unit of measurement."""
return self.meta.unit or self._unit
@property
def icon(self) -> Optional[str]:
"""Return icon for sensor."""
icon = self.meta.icon
if callable(icon):
return icon(self.state)
return icon
@property
def entity_registry_enabled_default(self) -> bool:
"""Return if the entity should be enabled when first added to the entity registry."""
return self.meta.enabled_default
async def async_update(self) -> None:
"""Update state."""
try:
value = self.router.data[self.key][self.item]
except KeyError:
_LOGGER.debug("%s[%s] not in data", self.key, self.item)
value = None
formatter = self.meta.formatter
if not callable(formatter):
formatter = format_default
self._state, self._unit = formatter(value)
self._available = value is not None
|
import socket
import ssl
from homeassistant.util import dt
from .const import TIMEOUT
from .errors import (
ConnectionRefused,
ConnectionTimeout,
ResolveFailed,
ValidationFailure,
)
def get_cert(host, port):
"""Get the certificate for the host and port combination."""
ctx = ssl.create_default_context()
address = (host, port)
with socket.create_connection(address, timeout=TIMEOUT) as sock:
with ctx.wrap_socket(sock, server_hostname=address[0]) as ssock:
# pylint disable: https://github.com/PyCQA/pylint/issues/3166
cert = ssock.getpeercert() # pylint: disable=no-member
return cert
async def get_cert_expiry_timestamp(hass, hostname, port):
"""Return the certificate's expiration timestamp."""
try:
cert = await hass.async_add_executor_job(get_cert, hostname, port)
except socket.gaierror as err:
raise ResolveFailed(f"Cannot resolve hostname: {hostname}") from err
except socket.timeout as err:
raise ConnectionTimeout(
f"Connection timeout with server: {hostname}:{port}"
) from err
except ConnectionRefusedError as err:
raise ConnectionRefused(
f"Connection refused by server: {hostname}:{port}"
) from err
except ssl.CertificateError as err:
raise ValidationFailure(err.verify_message) from err
except ssl.SSLError as err:
raise ValidationFailure(err.args[0]) from err
ts_seconds = ssl.cert_time_to_seconds(cert["notAfter"])
return dt.utc_from_timestamp(ts_seconds)
|
import lark
from pathlib import Path
examples_path = Path(__file__).parent
lark_path = Path(lark.__file__).parent
parser = lark.Lark.open(lark_path / 'grammars/lark.lark', rel_to=__file__, parser="lalr")
grammar_files = [
examples_path / 'advanced/python2.lark',
examples_path / 'advanced/python3.lark',
examples_path / 'relative-imports/multiples.lark',
examples_path / 'relative-imports/multiple2.lark',
examples_path / 'relative-imports/multiple3.lark',
examples_path / 'tests/no_newline_at_end.lark',
examples_path / 'tests/negative_priority.lark',
examples_path / 'standalone/json.lark',
lark_path / 'grammars/common.lark',
lark_path / 'grammars/lark.lark',
lark_path / 'grammars/unicode.lark',
lark_path / 'grammars/python.lark',
]
def test():
for grammar_file in grammar_files:
tree = parser.parse(open(grammar_file).read())
print("All grammars parsed successfully")
if __name__ == '__main__':
test()
|
import abc
import enum
from typing import Tuple, Dict, Any, Union, List, AsyncIterator, Type
__all__ = ["BaseDriver", "IdentifierData", "ConfigCategory"]
class ConfigCategory(str, enum.Enum):
"""Represents config category."""
#: Global category.
GLOBAL = "GLOBAL"
#: Guild category.
GUILD = "GUILD"
#: Channel category.
CHANNEL = "TEXTCHANNEL"
#: Role category.
ROLE = "ROLE"
#: User category.
USER = "USER"
#: Member category.
MEMBER = "MEMBER"
@classmethod
def get_pkey_info(
cls, category: Union[str, "ConfigCategory"], custom_group_data: Dict[str, int]
) -> Tuple[int, bool]:
"""Get the full primary key length for the given category,
and whether or not the category is a custom category.
"""
try:
# noinspection PyArgumentList
category_obj = cls(category)
except ValueError:
return custom_group_data[category], True
else:
return _CATEGORY_PKEY_COUNTS[category_obj], False
_CATEGORY_PKEY_COUNTS = {
ConfigCategory.GLOBAL: 0,
ConfigCategory.GUILD: 1,
ConfigCategory.CHANNEL: 1,
ConfigCategory.ROLE: 1,
ConfigCategory.USER: 1,
ConfigCategory.MEMBER: 2,
}
class IdentifierData:
def __init__(
self,
cog_name: str,
uuid: str,
category: str,
primary_key: Tuple[str, ...],
identifiers: Tuple[str, ...],
primary_key_len: int,
is_custom: bool = False,
):
self._cog_name = cog_name
self._uuid = uuid
self._category = category
self._primary_key = primary_key
self._identifiers = identifiers
self.primary_key_len = primary_key_len
self._is_custom = is_custom
@property
def cog_name(self) -> str:
return self._cog_name
@property
def uuid(self) -> str:
return self._uuid
@property
def category(self) -> str:
return self._category
@property
def primary_key(self) -> Tuple[str, ...]:
return self._primary_key
@property
def identifiers(self) -> Tuple[str, ...]:
return self._identifiers
@property
def is_custom(self) -> bool:
return self._is_custom
def __repr__(self) -> str:
return (
f"<IdentifierData cog_name={self.cog_name} uuid={self.uuid} category={self.category} "
f"primary_key={self.primary_key} identifiers={self.identifiers}>"
)
def __eq__(self, other) -> bool:
if not isinstance(other, IdentifierData):
return False
return (
self.uuid == other.uuid
and self.category == other.category
and self.primary_key == other.primary_key
and self.identifiers == other.identifiers
)
def __hash__(self) -> int:
return hash((self.uuid, self.category, self.primary_key, self.identifiers))
def get_child(self, *keys: str) -> "IdentifierData":
if not all(isinstance(i, str) for i in keys):
raise ValueError("Identifiers must be strings.")
primary_keys = self.primary_key
identifiers = self.identifiers
num_missing_pkeys = self.primary_key_len - len(self.primary_key)
if num_missing_pkeys > 0:
primary_keys += keys[:num_missing_pkeys]
if len(keys) > num_missing_pkeys:
identifiers += keys[num_missing_pkeys:]
return IdentifierData(
self.cog_name,
self.uuid,
self.category,
primary_keys,
identifiers,
self.primary_key_len,
self.is_custom,
)
def add_identifier(self, *identifier: str) -> "IdentifierData":
if not all(isinstance(i, str) for i in identifier):
raise ValueError("Identifiers must be strings.")
return IdentifierData(
self.cog_name,
self.uuid,
self.category,
self.primary_key,
self.identifiers + identifier,
self.primary_key_len,
is_custom=self.is_custom,
)
def to_tuple(self) -> Tuple[str, ...]:
return tuple(
filter(
None,
(self.cog_name, self.uuid, self.category, *self.primary_key, *self.identifiers),
)
)
class BaseDriver(abc.ABC):
def __init__(self, cog_name: str, identifier: str, **kwargs):
self.cog_name = cog_name
self.unique_cog_identifier = identifier
@classmethod
@abc.abstractmethod
async def initialize(cls, **storage_details) -> None:
"""
Initialize this driver.
Parameters
----------
**storage_details
The storage details required to initialize this driver.
Should be the same as :func:`data_manager.storage_details`
Raises
------
MissingExtraRequirements
If initializing the driver requires an extra which isn't
installed.
"""
raise NotImplementedError
@classmethod
@abc.abstractmethod
async def teardown(cls) -> None:
"""
Tear down this driver.
"""
raise NotImplementedError
@staticmethod
@abc.abstractmethod
def get_config_details() -> Dict[str, Any]:
"""
Asks users for additional configuration information necessary
to use this config driver.
Returns
-------
Dict[str, Any]
Dictionary of configuration details.
"""
raise NotImplementedError
@abc.abstractmethod
async def get(self, identifier_data: IdentifierData) -> Any:
"""
Finds the value indicate by the given identifiers.
Parameters
----------
identifier_data
Returns
-------
Any
Stored value.
"""
raise NotImplementedError
@abc.abstractmethod
async def set(self, identifier_data: IdentifierData, value=None) -> None:
"""
Sets the value of the key indicated by the given identifiers.
Parameters
----------
identifier_data
value
Any JSON serializable python object.
"""
raise NotImplementedError
@abc.abstractmethod
async def clear(self, identifier_data: IdentifierData) -> None:
"""
Clears out the value specified by the given identifiers.
Equivalent to using ``del`` on a dict.
Parameters
----------
identifier_data
"""
raise NotImplementedError
@classmethod
@abc.abstractmethod
def aiter_cogs(cls) -> AsyncIterator[Tuple[str, str]]:
"""Get info for cogs which have data stored on this backend.
Yields
------
Tuple[str, str]
Asynchronously yields (cog_name, cog_identifier) tuples.
"""
raise NotImplementedError
@classmethod
async def migrate_to(
cls,
new_driver_cls: Type["BaseDriver"],
all_custom_group_data: Dict[str, Dict[str, Dict[str, int]]],
) -> None:
"""Migrate data from this backend to another.
Both drivers must be initialized beforehand.
This will only move the data - no instance metadata is modified
as a result of this operation.
Parameters
----------
new_driver_cls
Subclass of `BaseDriver`.
all_custom_group_data : Dict[str, Dict[str, Dict[str, int]]]
Dict mapping cog names, to cog IDs, to custom groups, to
primary key lengths.
"""
# Backend-agnostic method of migrating from one driver to another.
async for cog_name, cog_id in cls.aiter_cogs():
this_driver = cls(cog_name, cog_id)
other_driver = new_driver_cls(cog_name, cog_id)
custom_group_data = all_custom_group_data.get(cog_name, {}).get(cog_id, {})
exported_data = await this_driver.export_data(custom_group_data)
await other_driver.import_data(exported_data, custom_group_data)
@classmethod
async def delete_all_data(cls, **kwargs) -> None:
"""Delete all data being stored by this driver.
The driver must be initialized before this operation.
The BaseDriver provides a generic method which may be overridden
by subclasses.
Parameters
----------
**kwargs
Driver-specific kwargs to change the way this method
operates.
"""
async for cog_name, cog_id in cls.aiter_cogs():
driver = cls(cog_name, cog_id)
await driver.clear(IdentifierData(cog_name, cog_id, "", (), (), 0))
@staticmethod
def _split_primary_key(
category: Union[ConfigCategory, str],
custom_group_data: Dict[str, int],
data: Dict[str, Any],
) -> List[Tuple[Tuple[str, ...], Dict[str, Any]]]:
pkey_len = ConfigCategory.get_pkey_info(category, custom_group_data)[0]
if pkey_len == 0:
return [((), data)]
def flatten(levels_remaining, currdata, parent_key=()):
items = []
for _k, _v in currdata.items():
new_key = parent_key + (_k,)
if levels_remaining > 1:
items.extend(flatten(levels_remaining - 1, _v, new_key).items())
else:
items.append((new_key, _v))
return dict(items)
ret = []
for k, v in flatten(pkey_len, data).items():
ret.append((k, v))
return ret
async def export_data(
self, custom_group_data: Dict[str, int]
) -> List[Tuple[str, Dict[str, Any]]]:
categories = [c.value for c in ConfigCategory]
categories.extend(custom_group_data.keys())
ret = []
for c in categories:
ident_data = IdentifierData(
self.cog_name,
self.unique_cog_identifier,
c,
(),
(),
*ConfigCategory.get_pkey_info(c, custom_group_data),
)
try:
data = await self.get(ident_data)
except KeyError:
continue
ret.append((c, data))
return ret
async def import_data(
self, cog_data: List[Tuple[str, Dict[str, Any]]], custom_group_data: Dict[str, int]
) -> None:
for category, all_data in cog_data:
splitted_pkey = self._split_primary_key(category, custom_group_data, all_data)
for pkey, data in splitted_pkey:
ident_data = IdentifierData(
self.cog_name,
self.unique_cog_identifier,
category,
pkey,
(),
*ConfigCategory.get_pkey_info(category, custom_group_data),
)
await self.set(ident_data, data)
|
import pytest
from homeassistant.components.device_tracker.config_entry import (
BaseTrackerEntity,
ScannerEntity,
)
from homeassistant.components.device_tracker.const import (
ATTR_SOURCE_TYPE,
DOMAIN,
SOURCE_TYPE_ROUTER,
)
from homeassistant.const import ATTR_BATTERY_LEVEL, STATE_HOME, STATE_NOT_HOME
from tests.common import MockConfigEntry
async def test_scanner_entity_device_tracker(hass):
"""Test ScannerEntity based device tracker."""
config_entry = MockConfigEntry(domain="test")
config_entry.add_to_hass(hass)
await hass.config_entries.async_forward_entry_setup(config_entry, DOMAIN)
await hass.async_block_till_done()
entity_id = "device_tracker.unnamed_device"
entity_state = hass.states.get(entity_id)
assert entity_state.attributes == {
ATTR_SOURCE_TYPE: SOURCE_TYPE_ROUTER,
ATTR_BATTERY_LEVEL: 100,
}
assert entity_state.state == STATE_NOT_HOME
entity = hass.data[DOMAIN].get_entity(entity_id)
entity.set_connected()
await hass.async_block_till_done()
entity_state = hass.states.get(entity_id)
assert entity_state.state == STATE_HOME
def test_scanner_entity():
"""Test coverage for base ScannerEntity entity class."""
entity = ScannerEntity()
with pytest.raises(NotImplementedError):
assert entity.source_type is None
with pytest.raises(NotImplementedError):
assert entity.is_connected is None
with pytest.raises(NotImplementedError):
assert entity.state == STATE_NOT_HOME
assert entity.battery_level is None
def test_base_tracker_entity():
"""Test coverage for base BaseTrackerEntity entity class."""
entity = BaseTrackerEntity()
with pytest.raises(NotImplementedError):
assert entity.source_type is None
assert entity.battery_level is None
with pytest.raises(NotImplementedError):
assert entity.state_attributes is None
|
from collections import namedtuple
import unittest
import logging
import numpy as np
from scipy.spatial.distance import cosine
from gensim.models.doc2vec import Doc2Vec
from gensim import utils
from gensim.models import translation_matrix
from gensim.models import KeyedVectors
from gensim.test.utils import datapath, get_tmpfile
class TestTranslationMatrix(unittest.TestCase):
def setUp(self):
self.source_word_vec_file = datapath("EN.1-10.cbow1_wind5_hs0_neg10_size300_smpl1e-05.txt")
self.target_word_vec_file = datapath("IT.1-10.cbow1_wind5_hs0_neg10_size300_smpl1e-05.txt")
self.word_pairs = [
("one", "uno"), ("two", "due"), ("three", "tre"),
("four", "quattro"), ("five", "cinque"), ("seven", "sette"), ("eight", "otto"),
("dog", "cane"), ("pig", "maiale"), ("fish", "cavallo"), ("birds", "uccelli"),
("apple", "mela"), ("orange", "arancione"), ("grape", "acino"), ("banana", "banana"),
]
self.test_word_pairs = [("ten", "dieci"), ("cat", "gatto")]
self.source_word_vec = KeyedVectors.load_word2vec_format(self.source_word_vec_file, binary=False)
self.target_word_vec = KeyedVectors.load_word2vec_format(self.target_word_vec_file, binary=False)
def test_translation_matrix(self):
model = translation_matrix.TranslationMatrix(self.source_word_vec, self.target_word_vec, self.word_pairs)
model.train(self.word_pairs)
self.assertEqual(model.translation_matrix.shape, (300, 300))
def testPersistence(self):
"""Test storing/loading the entire model."""
tmpf = get_tmpfile('transmat-en-it.pkl')
model = translation_matrix.TranslationMatrix(self.source_word_vec, self.target_word_vec, self.word_pairs)
model.train(self.word_pairs)
model.save(tmpf)
loaded_model = translation_matrix.TranslationMatrix.load(tmpf)
self.assertTrue(np.allclose(model.translation_matrix, loaded_model.translation_matrix))
def test_translate_nn(self):
# Test the nearest neighbor retrieval method
model = translation_matrix.TranslationMatrix(self.source_word_vec, self.target_word_vec, self.word_pairs)
model.train(self.word_pairs)
test_source_word, test_target_word = zip(*self.test_word_pairs)
translated_words = model.translate(
test_source_word, topn=5, source_lang_vec=self.source_word_vec, target_lang_vec=self.target_word_vec,
)
for idx, item in enumerate(self.test_word_pairs):
self.assertTrue(item[1] in translated_words[item[0]])
def test_translate_gc(self):
# Test globally corrected neighbour retrieval method
model = translation_matrix.TranslationMatrix(self.source_word_vec, self.target_word_vec, self.word_pairs)
model.train(self.word_pairs)
test_source_word, test_target_word = zip(*self.test_word_pairs)
translated_words = model.translate(
test_source_word, topn=5, gc=1, sample_num=3,
source_lang_vec=self.source_word_vec, target_lang_vec=self.target_word_vec
)
for idx, item in enumerate(self.test_word_pairs):
self.assertTrue(item[1] in translated_words[item[0]])
def read_sentiment_docs(filename):
sentiment_document = namedtuple('SentimentDocument', 'words tags')
alldocs = [] # will hold all docs in original order
with utils.open(filename, mode='rb', encoding='utf-8') as alldata:
for line_no, line in enumerate(alldata):
tokens = utils.to_unicode(line).split()
words = tokens
tags = str(line_no)
alldocs.append(sentiment_document(words, tags))
return alldocs
class TestBackMappingTranslationMatrix(unittest.TestCase):
def setUp(self):
filename = datapath("alldata-id-10.txt")
train_docs = read_sentiment_docs(filename)
self.train_docs = train_docs
self.source_doc_vec = Doc2Vec(documents=train_docs[:5], vector_size=8, epochs=50, seed=1)
self.target_doc_vec = Doc2Vec(documents=train_docs, vector_size=8, epochs=50, seed=2)
def test_translation_matrix(self):
model = translation_matrix.BackMappingTranslationMatrix(
self.source_doc_vec, self.target_doc_vec, self.train_docs[:5],
)
transmat = model.train(self.train_docs[:5])
self.assertEqual(transmat.shape, (8, 8))
@unittest.skip(
"flaky test likely to be discarded when <https://github.com/RaRe-Technologies/gensim/issues/2977> "
"is addressed"
)
def test_infer_vector(self):
"""Test that translation gives similar results to traditional inference.
This may not be completely sensible/salient with such tiny data, but
replaces what seemed to me to be an ever-more-nonsensical test.
See <https://github.com/RaRe-Technologies/gensim/issues/2977> for discussion
of whether the class this supposedly tested even survives when the
TranslationMatrix functionality is better documented.
"""
model = translation_matrix.BackMappingTranslationMatrix(
self.source_doc_vec, self.target_doc_vec, self.train_docs[:5],
)
model.train(self.train_docs[:5])
backmapped_vec = model.infer_vector(self.target_doc_vec.dv[self.train_docs[5].tags[0]])
self.assertEqual(backmapped_vec.shape, (8, ))
d2v_inferred_vector = self.source_doc_vec.infer_vector(self.train_docs[5].words)
distance = cosine(backmapped_vec, d2v_inferred_vector)
self.assertLessEqual(distance, 0.1)
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.DEBUG)
unittest.main()
|
import argparse
import json
from django.core.exceptions import ValidationError
from django.core.management.base import CommandError
from django.utils.text import slugify
from weblate.trans.models import Component, Project
from weblate.utils.management.base import BaseCommand
class Command(BaseCommand):
"""Command for mass importing of repositories into Weblate based on JSON data."""
help = "imports projects based on JSON data"
def add_arguments(self, parser):
super().add_arguments(parser)
parser.add_argument(
"--project", default=None, required=True, help=("Project where to operate")
)
parser.add_argument(
"--ignore",
default=False,
action="store_true",
help=("Ignore already existing entries"),
)
parser.add_argument(
"--update",
default=False,
action="store_true",
help=("Update already existing entries"),
)
parser.add_argument(
"--main-component",
default=None,
help=(
"Define which component will be used as main for the" " VCS repository"
),
)
parser.add_argument(
"json-file",
type=argparse.FileType("r"),
help="JSON file containing component defintion",
)
def handle(self, *args, **options): # noqa: C901
"""Automatic import of components."""
# Get project
try:
project = Project.objects.get(slug=options["project"])
except Project.DoesNotExist:
raise CommandError("Project does not exist!")
# Get main component
main_component = None
if options["main_component"]:
try:
main_component = Component.objects.get(
project=project, slug=options["main_component"]
)
except Component.DoesNotExist:
raise CommandError("Main component does not exist!")
try:
data = json.load(options["json-file"])
except ValueError:
raise CommandError("Failed to parse JSON file!")
finally:
options["json-file"].close()
allfields = {
field.name
for field in Component._meta.get_fields()
if field.editable and not field.is_relation
}
# Handle dumps from API
if "results" in data:
data = data["results"]
for item in data:
if "filemask" not in item or "name" not in item:
raise CommandError("Missing required fields in JSON!")
if "slug" not in item:
item["slug"] = slugify(item["name"])
if "repo" not in item:
if main_component is None:
raise CommandError("No main component and no repository URL!")
item["repo"] = main_component.get_repo_link_url()
try:
component = Component.objects.get(slug=item["slug"], project=project)
self.stderr.write(f"Component {component} already exists")
if options["ignore"]:
continue
if options["update"]:
for key in item:
if key not in allfields or key == "slug":
continue
setattr(component, key, item[key])
component.save()
continue
raise CommandError(
"Component already exists, use --ignore or --update!"
)
except Component.DoesNotExist:
params = {key: item[key] for key in allfields if key in item}
component = Component(project=project, **params)
try:
component.full_clean()
except ValidationError as error:
for key, value in error.message_dict.items():
self.stderr.write(
"Error in {}: {}".format(key, ", ".join(value))
)
raise CommandError("Component failed validation!")
component.save(force_insert=True)
self.stdout.write(
"Imported {} with {} translations".format(
component, component.translation_set.count()
)
)
|
import pytest
from datetime import datetime
from unittest.mock import Mock, patch
from kombu.asynchronous.timer import Entry, Timer, to_timestamp
class test_to_timestamp:
def test_timestamp(self):
assert to_timestamp(3.13) == 3.13
def test_datetime(self):
assert to_timestamp(datetime.utcnow())
class test_Entry:
def test_call(self):
fun = Mock(name='fun')
tref = Entry(fun, (4, 4), {'moo': 'baz'})
tref()
fun.assert_called_with(4, 4, moo='baz')
def test_cancel(self):
tref = Entry(lambda x: x, (1,), {})
assert not tref.canceled
assert not tref.cancelled
tref.cancel()
assert tref.canceled
assert tref.cancelled
def test_repr(self):
tref = Entry(lambda x: x(1,), {})
assert repr(tref)
def test_hash(self):
assert hash(Entry(lambda: None))
def test_ordering(self):
# we don't care about results, just that it's possible
Entry(lambda x: 1) < Entry(lambda x: 2)
Entry(lambda x: 1) > Entry(lambda x: 2)
Entry(lambda x: 1) >= Entry(lambda x: 2)
Entry(lambda x: 1) <= Entry(lambda x: 2)
def test_eq(self):
x = Entry(lambda x: 1)
y = Entry(lambda x: 1)
assert x == x
assert x != y
class test_Timer:
def test_enter_exit(self):
x = Timer()
x.stop = Mock(name='timer.stop')
with x:
pass
x.stop.assert_called_with()
def test_supports_Timer_interface(self):
x = Timer()
x.stop()
tref = Mock()
x.cancel(tref)
tref.cancel.assert_called_with()
assert x.schedule is x
def test_handle_error(self):
from datetime import datetime
on_error = Mock(name='on_error')
s = Timer(on_error=on_error)
with patch('kombu.asynchronous.timer.to_timestamp') as tot:
tot.side_effect = OverflowError()
s.enter_at(Entry(lambda: None, (), {}),
eta=datetime.now())
s.enter_at(Entry(lambda: None, (), {}), eta=None)
s.on_error = None
with pytest.raises(OverflowError):
s.enter_at(Entry(lambda: None, (), {}),
eta=datetime.now())
on_error.assert_called_once()
exc = on_error.call_args[0][0]
assert isinstance(exc, OverflowError)
def test_call_repeatedly(self):
t = Timer()
try:
t.schedule.enter_after = Mock()
myfun = Mock()
myfun.__name__ = 'myfun'
t.call_repeatedly(0.03, myfun)
assert t.schedule.enter_after.call_count == 1
args1, _ = t.schedule.enter_after.call_args_list[0]
sec1, tref1, _ = args1
assert sec1 == 0.03
tref1()
assert t.schedule.enter_after.call_count == 2
args2, _ = t.schedule.enter_after.call_args_list[1]
sec2, tref2, _ = args2
assert sec2 == 0.03
tref2.canceled = True
tref2()
assert t.schedule.enter_after.call_count == 2
finally:
t.stop()
@patch('kombu.asynchronous.timer.logger')
def test_apply_entry_error_handled(self, logger):
t = Timer()
t.schedule.on_error = None
fun = Mock()
fun.side_effect = ValueError()
t.schedule.apply_entry(fun)
logger.error.assert_called()
def test_apply_entry_error_not_handled(self, stdouts):
t = Timer()
t.schedule.on_error = Mock()
fun = Mock()
fun.side_effect = ValueError()
t.schedule.apply_entry(fun)
fun.assert_called_with()
assert not stdouts.stderr.getvalue()
def test_enter_after(self):
t = Timer()
t._enter = Mock()
fun = Mock(name='fun')
time = Mock(name='time')
time.return_value = 10
t.enter_after(10, fun, time=time)
time.assert_called_with()
t._enter.assert_called_with(20, 0, fun)
def test_cancel(self):
t = Timer()
tref = Mock()
t.cancel(tref)
tref.cancel.assert_called_with()
|
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation
from keras.optimizers import RMSprop
from keras.utils import np_utils
from elephas.spark_model import SparkMLlibModel, load_spark_model
from elephas.utils.rdd_utils import to_labeled_point
import pytest
pytest.mark.usefixtures("spark_context")
# Define basic parameters
batch_size = 64
nb_classes = 10
epochs = 3
# Load data
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train = x_train.reshape(60000, 784)[:1000]
x_test = x_test.reshape(10000, 784)
x_train = x_train.astype("float32")
x_test = x_test.astype("float32")
x_train /= 255
x_test /= 255
print(x_train.shape[0], 'train samples')
print(x_test.shape[0], 'test samples')
# Convert class vectors to binary class matrices
y_train = np_utils.to_categorical(y_train, nb_classes)
y_test = np_utils.to_categorical(y_test, nb_classes)
model = Sequential()
model.add(Dense(128, input_dim=784))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(128))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(10))
model.add(Activation('softmax'))
# Compile model
rms = RMSprop()
model.compile(rms, 'categorical_crossentropy', ['acc'])
def test_serialization():
spark_model = SparkMLlibModel(
model, frequency='epoch', mode='synchronous', num_workers=2)
spark_model.save("test.h5")
load_spark_model("test.h5")
def test_mllib_model(spark_context):
# Build RDD from numpy features and labels
lp_rdd = to_labeled_point(spark_context, x_train,
y_train, categorical=True)
# Initialize SparkModel from Keras model and Spark context
spark_model = SparkMLlibModel(
model=model, frequency='epoch', mode='synchronous')
# Train Spark model
spark_model.fit(lp_rdd, epochs=5, batch_size=32, verbose=0,
validation_split=0.1, categorical=True, nb_classes=nb_classes)
# Evaluate Spark model by evaluating the underlying model
score = spark_model.master_network.evaluate(x_test, y_test, verbose=2)
print('Test accuracy:', score[1])
|
import logging
import secrets
from aiohttp.web import Request, Response
import voluptuous as vol
from homeassistant.components import websocket_api
from homeassistant.components.http.view import HomeAssistantView
from homeassistant.const import HTTP_OK
from homeassistant.core import callback
from homeassistant.helpers.network import get_url
from homeassistant.loader import bind_hass
from homeassistant.util.aiohttp import MockRequest
_LOGGER = logging.getLogger(__name__)
DOMAIN = "webhook"
URL_WEBHOOK_PATH = "/api/webhook/{webhook_id}"
WS_TYPE_LIST = "webhook/list"
SCHEMA_WS_LIST = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
{vol.Required("type"): WS_TYPE_LIST}
)
@callback
@bind_hass
def async_register(hass, domain, name, webhook_id, handler):
"""Register a webhook."""
handlers = hass.data.setdefault(DOMAIN, {})
if webhook_id in handlers:
raise ValueError("Handler is already defined!")
handlers[webhook_id] = {"domain": domain, "name": name, "handler": handler}
@callback
@bind_hass
def async_unregister(hass, webhook_id):
"""Remove a webhook."""
handlers = hass.data.setdefault(DOMAIN, {})
handlers.pop(webhook_id, None)
@callback
def async_generate_id():
"""Generate a webhook_id."""
return secrets.token_hex(32)
@callback
@bind_hass
def async_generate_url(hass, webhook_id):
"""Generate the full URL for a webhook_id."""
return "{}{}".format(
get_url(hass, prefer_external=True, allow_cloud=False),
async_generate_path(webhook_id),
)
@callback
def async_generate_path(webhook_id):
"""Generate the path component for a webhook_id."""
return URL_WEBHOOK_PATH.format(webhook_id=webhook_id)
@bind_hass
async def async_handle_webhook(hass, webhook_id, request):
"""Handle a webhook."""
handlers = hass.data.setdefault(DOMAIN, {})
webhook = handlers.get(webhook_id)
# Always respond successfully to not give away if a hook exists or not.
if webhook is None:
if isinstance(request, MockRequest):
received_from = request.mock_source
else:
received_from = request.remote
_LOGGER.warning(
"Received message for unregistered webhook %s from %s",
webhook_id,
received_from,
)
# Look at content to provide some context for received webhook
# Limit to 64 chars to avoid flooding the log
content = await request.content.read(64)
_LOGGER.debug("%s...", content)
return Response(status=HTTP_OK)
try:
response = await webhook["handler"](hass, webhook_id, request)
if response is None:
response = Response(status=HTTP_OK)
return response
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Error processing webhook %s", webhook_id)
return Response(status=HTTP_OK)
async def async_setup(hass, config):
"""Initialize the webhook component."""
hass.http.register_view(WebhookView)
hass.components.websocket_api.async_register_command(
WS_TYPE_LIST, websocket_list, SCHEMA_WS_LIST
)
return True
class WebhookView(HomeAssistantView):
"""Handle incoming webhook requests."""
url = URL_WEBHOOK_PATH
name = "api:webhook"
requires_auth = False
cors_allowed = True
async def _handle(self, request: Request, webhook_id):
"""Handle webhook call."""
_LOGGER.debug("Handling webhook %s payload for %s", request.method, webhook_id)
hass = request.app["hass"]
return await async_handle_webhook(hass, webhook_id, request)
head = _handle
post = _handle
put = _handle
@callback
def websocket_list(hass, connection, msg):
"""Return a list of webhooks."""
handlers = hass.data.setdefault(DOMAIN, {})
result = [
{"webhook_id": webhook_id, "domain": info["domain"], "name": info["name"]}
for webhook_id, info in handlers.items()
]
connection.send_message(websocket_api.result_message(msg["id"], result))
|
from random import choice
from string import ascii_lowercase
from cerberus import errors
from cerberus.tests import assert_fail, assert_success
def test_minlength_and_maxlength_with_list(schema):
field = 'a_list_length'
min_length = schema[field]['minlength']
max_length = schema[field]['maxlength']
assert_fail(
{field: [1] * (min_length - 1)},
error=(
field,
(field, 'minlength'),
errors.MIN_LENGTH,
min_length,
(min_length - 1,),
),
)
for i in range(min_length, max_length):
assert_success({field: [1] * i})
assert_fail(
{field: [1] * (max_length + 1)},
error=(
field,
(field, 'maxlength'),
errors.MAX_LENGTH,
max_length,
(max_length + 1,),
),
)
def test_maxlength_fails(schema):
field = 'a_string'
max_length = schema[field]['maxlength']
value = "".join(choice(ascii_lowercase) for i in range(max_length + 1))
assert_fail(
document={field: value},
error=(
field,
(field, 'maxlength'),
errors.MAX_LENGTH,
max_length,
(len(value),),
),
)
def test_maxlength_with_bytestring_fails(schema):
field = 'a_bytestring'
max_length = schema[field]['maxlength']
value = b'\x00' * (max_length + 1)
assert_fail(
document={field: value},
error=(
field,
(field, 'maxlength'),
errors.MAX_LENGTH,
max_length,
(len(value),),
),
)
def test_minlength_fails(schema):
field = 'a_string'
min_length = schema[field]['minlength']
value = "".join(choice(ascii_lowercase) for i in range(min_length - 1))
assert_fail(
document={field: value},
error=(
field,
(field, 'minlength'),
errors.MIN_LENGTH,
min_length,
(len(value),),
),
)
def test_minlength_with_bytestring_fails(schema):
field = 'a_bytestring'
min_length = schema[field]['minlength']
value = b'\x00' * (min_length - 1)
assert_fail(
document={field: value},
error=(
field,
(field, 'minlength'),
errors.MIN_LENGTH,
min_length,
(len(value),),
),
)
def test_minlength_with_dict():
schema = {'dict': {'minlength': 1}}
assert_fail(document={'dict': {}}, schema=schema)
assert_success(document={'dict': {'foo': 'bar'}}, schema=schema)
|
from django.core.exceptions import ImproperlyConfigured, PermissionDenied
from django.forms import fields, widgets
from django.template import engines
from django.template.loader import select_template
from django.utils.html import format_html
from django.utils.module_loading import import_string
from django.utils.safestring import mark_safe
from django.utils.translation import gettext_lazy as _, pgettext_lazy
from cms.plugin_pool import plugin_pool
from djangocms_text_ckeditor.cms_plugins import TextPlugin
from cmsplugin_cascade.bootstrap4.buttons import ButtonFormMixin
from cmsplugin_cascade.strides import strides_plugin_map, strides_element_map, TextStridePlugin, TextStrideElement
from cmsplugin_cascade.icon.forms import IconFormMixin
from cmsplugin_cascade.link.config import LinkPluginBase, LinkFormMixin
from cmsplugin_cascade.link.plugin_base import LinkElementMixin
from cmsplugin_cascade.plugin_base import TransparentContainer
from cmsplugin_cascade.bootstrap4.buttons import BootstrapButtonMixin
from shop.cascade.plugin_base import ShopPluginBase, DialogFormPluginBase, DialogPluginBaseForm
from shop.conf import app_settings
from shop.models.cart import CartModel
from shop.modifiers.pool import cart_modifiers_pool
class ProceedButtonFormMixin(LinkFormMixin, IconFormMixin, ButtonFormMixin):
require_icon = False
LINK_TYPE_CHOICES = [
('cmspage', _("CMS Page")),
('NEXT_STEP', _("Next Step")),
('RELOAD_PAGE', _("Reload Page")),
('PURCHASE_NOW', _("Purchase Now")),
('DO_NOTHING', _("Do nothing")),
]
disable_invalid = fields.BooleanField(
label=_("Disable if invalid"),
required=False,
help_text=_("Disable button if any form in this set is invalid."),
)
class Meta:
entangled_fields = {'glossary': ['disable_invalid']}
class ShopProceedButton(BootstrapButtonMixin, LinkPluginBase):
"""
This button is used to proceed from one checkout step to the next one.
"""
name = _("Proceed Button")
parent_classes = ['BootstrapColumnPlugin', 'ProcessStepPlugin', 'ValidateSetOfFormsPlugin']
form = ProceedButtonFormMixin
model_mixins = (LinkElementMixin,)
ring_plugin = 'ProceedButtonPlugin'
class Media:
js = ['admin/js/jquery.init.js', 'shop/js/admin/proceedbuttonplugin.js']
@classmethod
def get_identifier(cls, instance):
return mark_safe(instance.glossary.get('link_content', ''))
def get_render_template(self, context, instance, placeholder):
if instance.link == 'NEXT_STEP':
button_template = 'next-step-button'
elif instance.link == 'RELOAD_PAGE':
button_template = 'reload-button'
elif instance.link == 'PURCHASE_NOW':
button_template = 'purchase-button'
elif instance.link == 'DO_NOTHING':
button_template = 'noop-button'
else:
button_template = 'proceed-button'
template_names = [
'{}/checkout/{}.html'.format(app_settings.APP_LABEL, button_template),
'shop/checkout/{}.html'.format(button_template),
]
return select_template(template_names)
plugin_pool.register_plugin(ShopProceedButton)
class CustomerFormPluginBase(DialogFormPluginBase):
"""
Base class for CustomerFormPlugin and GuestFormPlugin to share common methods.
"""
template_leaf_name = 'customer-{}.html'
cache = False
def get_form_data(self, context, instance, placeholder):
form_data = self.super(CustomerFormPluginBase, self).get_form_data(context, instance, placeholder)
form_data.update(instance=context['request'].customer)
return form_data
def get_render_template(self, context, instance, placeholder):
if 'error_message' in context:
return engines['django'].from_string('<p class="text-danger">{{ error_message }}</p>')
return self.super(CustomerFormPluginBase, self).get_render_template(context, instance, placeholder)
class CustomerFormPlugin(CustomerFormPluginBase):
"""
Provides the form to edit specific data stored in :class:`shop.model.customer.CustomerModel`,
if customer declared himself as registered.
"""
name = _("Customer Form")
form_class = app_settings.SHOP_CASCADE_FORMS['CustomerForm']
def render(self, context, instance, placeholder):
if not context['request'].customer.is_registered:
context['error_message'] = _("Only registered customers can access this form.")
return context
return self.super(CustomerFormPlugin, self).render(context, instance, placeholder)
DialogFormPluginBase.register_plugin(CustomerFormPlugin)
class GuestFormPlugin(CustomerFormPluginBase):
"""
Provides the form to edit specific data stored in model `Customer`, if customer declared
himself as guest.
"""
name = _("Guest Form")
form_class = app_settings.SHOP_CASCADE_FORMS['GuestForm']
def render(self, context, instance, placeholder):
assert 'customer' in context, "Please add 'shop.context_processors.customer' to your TEMPLATES 'context_processor' settings."
if not context['customer'].is_guest:
context['error_message'] = _("Only guest customers can access this form.")
return context
return self.super(GuestFormPlugin, self).render(context, instance, placeholder)
DialogFormPluginBase.register_plugin(GuestFormPlugin)
class CheckoutAddressPluginForm(DialogPluginBaseForm):
ADDRESS_CHOICES = [
('shipping', _("Shipping")),
('billing', _("Billing")),
]
address_form = fields.ChoiceField(
choices=ADDRESS_CHOICES,
widget=widgets.RadioSelect,
label=_("Address Form"),
initial=ADDRESS_CHOICES[0][0]
)
allow_multiple = fields.BooleanField(
label=_("Multiple Addresses"),
initial=False,
required=False,
help_text=_("Allow the customer to add and edit multiple addresses."),
)
allow_use_primary = fields.BooleanField(
label=_("Use primary address"),
initial=False,
required=False,
help_text=_("Allow the customer to use the primary address, if this is the secondary form."),
)
class Meta:
entangled_fields = {'glossary': ['address_form', 'allow_multiple', 'allow_use_primary']}
class CheckoutAddressPlugin(DialogFormPluginBase):
name = _("Checkout Address Form")
form = CheckoutAddressPluginForm
# glossary_field_order = ['address_form', 'render_type', 'allow_multiple', 'allow_use_primary', 'headline_legend']
form_classes = [app_settings.SHOP_CASCADE_FORMS['ShippingAddressForm'], app_settings.SHOP_CASCADE_FORMS['BillingAddressForm']]
def get_form_class(self, instance):
if instance.glossary.get('address_form') == 'shipping':
return import_string(self.form_classes[0])
else: # address_form == billing
return import_string(self.form_classes[1])
def get_address(self, cart, instance):
if instance.glossary.get('address_form') == 'shipping':
if cart.shipping_address:
address = cart.shipping_address
else:
# fallback to another existing shipping address
FormClass = self.get_form_class(instance)
address = FormClass.get_model().objects.get_fallback(customer=cart.customer)
else: # address_form == billing
if cart.billing_address:
address = cart.billing_address
else:
# fallback to another existing billing address
FormClass = self.get_form_class(instance)
address = FormClass.get_model().objects.get_fallback(customer=cart.customer)
return address
def get_form_data(self, context, instance, placeholder):
form_data = self.super(CheckoutAddressPlugin, self).get_form_data(context, instance, placeholder)
if form_data.get('cart') is None:
raise PermissionDenied("Can not proceed to checkout without cart")
address = self.get_address(form_data['cart'], instance)
if instance.glossary.get('allow_multiple'):
form_data.update(multi_addr=True)
else:
form_data.update(multi_addr=False)
form_data.update(
instance=address,
initial={'active_priority': address.priority if address else 'add'},
allow_use_primary=instance.glossary.get('allow_use_primary', False)
)
return form_data
@classmethod
def get_identifier(cls, instance):
identifier = super().get_identifier(instance)
address_form = instance.glossary.get('address_form')
address_form = dict(cls.form.ADDRESS_CHOICES).get(address_form, '')
return format_html(pgettext_lazy('get_identifier', "for {} {}"), address_form, identifier)
def get_render_template(self, context, instance, placeholder):
addr_form = instance.glossary.get('address_form')
if addr_form not in ['shipping', 'billing']: # validate
addr_form = 'shipping'
render_type = instance.glossary.get('render_type')
if render_type not in ['form', 'summary']: # validate
render_type = 'form'
template_names = [
'{0}/checkout/{1}-address-{2}.html'.format(app_settings.APP_LABEL, addr_form, render_type),
'shop/checkout/{0}-address-{1}.html'.format(addr_form, render_type),
]
return select_template(template_names)
DialogFormPluginBase.register_plugin(CheckoutAddressPlugin)
class MethodPluginForm(DialogPluginBaseForm):
show_additional_charge = fields.BooleanField(
label=_("Show additional charge"),
initial=True,
required=False,
help_text=_("Add an extra line showing the additional charge depending on the chosen payment/shipping method."),
)
class Meta:
entangled_fields = {'glossary': ['show_additional_charge']}
class PaymentMethodFormPlugin(DialogFormPluginBase):
name = _("Payment Method Form")
form = MethodPluginForm
form_class = app_settings.SHOP_CASCADE_FORMS['PaymentMethodForm']
template_leaf_name = 'payment-method-{}.html'
def get_form_data(self, context, instance, placeholder):
form_data = self.super(PaymentMethodFormPlugin, self).get_form_data(context, instance, placeholder)
cart = form_data.get('cart')
if cart:
form_data.update(initial={'payment_modifier': cart.extra.get('payment_modifier')})
return form_data
def render(self, context, instance, placeholder):
self.super(PaymentMethodFormPlugin, self).render(context, instance, placeholder)
for payment_modifier in cart_modifiers_pool.get_payment_modifiers():
payment_modifier.update_render_context(context)
context['show_additional_charge'] = instance.glossary.get('show_additional_charge', False)
return context
if cart_modifiers_pool.get_payment_modifiers():
# Plugin is registered only if at least one payment modifier exists
DialogFormPluginBase.register_plugin(PaymentMethodFormPlugin)
class ShippingMethodFormPlugin(DialogFormPluginBase):
name = _("Shipping Method Form")
form = MethodPluginForm
form_class = app_settings.SHOP_CASCADE_FORMS['ShippingMethodForm']
template_leaf_name = 'shipping-method-{}.html'
def get_form_data(self, context, instance, placeholder):
form_data = self.super(ShippingMethodFormPlugin, self).get_form_data(context, instance, placeholder)
cart = form_data.get('cart')
if cart:
form_data.update(initial={'shipping_modifier': cart.extra.get('shipping_modifier')})
return form_data
def render(self, context, instance, placeholder):
self.super(ShippingMethodFormPlugin, self).render(context, instance, placeholder)
for shipping_modifier in cart_modifiers_pool.get_shipping_modifiers():
shipping_modifier.update_render_context(context)
context['show_additional_charge'] = instance.glossary.get('show_additional_charge', False)
return context
if cart_modifiers_pool.get_shipping_modifiers():
# Plugin is registered only if at least one shipping modifier exists
DialogFormPluginBase.register_plugin(ShippingMethodFormPlugin)
class ExtraAnnotationFormPlugin(DialogFormPluginBase):
name = _("Extra Annotation Form")
form_class = app_settings.SHOP_CASCADE_FORMS['ExtraAnnotationForm']
template_leaf_name = 'extra-annotation-{}.html'
def get_form_data(self, context, instance, placeholder):
form_data = self.super(ExtraAnnotationFormPlugin, self).get_form_data(context, instance, placeholder)
cart = form_data.get('cart')
if cart:
form_data.update(initial={'annotation': cart.extra.get('annotation', '')})
return form_data
DialogFormPluginBase.register_plugin(ExtraAnnotationFormPlugin)
class AcceptConditionMixin:
render_template = 'shop/checkout/accept-condition.html'
def render(self, context, instance, placeholder):
"""
Return the context to render a checkbox used to accept the terms and conditions
"""
request = context['request']
try:
cart = CartModel.objects.get_from_request(request)
cart.update(request)
except CartModel.DoesNotExist:
cart = None
request._plugin_order = getattr(request, '_plugin_order', 0) + 1
try:
FormClass = import_string(app_settings.SHOP_CASCADE_FORMS['AcceptConditionForm'])
except ImportError:
msg = "Can not import Form class. Please check your settings directive SHOP_CASCADE_FORMS['AcceptConditionForm']."
raise ImproperlyConfigured(msg)
form_data = {'cart': cart, 'initial': dict(plugin_id=instance.pk, plugin_order=request._plugin_order)}
bound_form = FormClass(**form_data)
context[bound_form.form_name] = bound_form
super().render(context, instance, placeholder)
accept_condition_form = context['accept_condition_form.plugin_{}'.format(instance.pk)]
# transfer the stored HTML content into the widget's label
accept_condition_form['accept'].field.label = mark_safe(context['body'])
accept_condition_form['accept'].field.widget.choice_label = accept_condition_form['accept'].field.label # Django < 1.11
context['accept_condition_form'] = accept_condition_form
return context
class AcceptConditionPlugin(AcceptConditionMixin, TextPlugin):
name = _("Accept Condition")
module = "Shop"
def get_admin_url_name(self, name):
model_name = 'acceptcondition'
url_name = "%s_%s_%s" % ('shop', model_name, name)
return url_name
class AcceptConditionMinion(AcceptConditionMixin, TextStridePlugin):
pass
plugin_pool.register_plugin(AcceptConditionPlugin)
strides_plugin_map['AcceptConditionPlugin'] = AcceptConditionMinion
strides_element_map['AcceptConditionPlugin'] = TextStrideElement
class RequiredFormFieldsPlugin(ShopPluginBase):
"""
This plugin renders a short text message, emphasizing that fields with a star are required.
"""
name = _("Required Form Fields")
template_leaf_name = 'required-form-fields.html'
parent_classes = ('BootstrapColumnPlugin',)
def get_render_template(self, context, instance, placeholder):
template_names = [
'{0}/checkout/{1}'.format(app_settings.APP_LABEL, self.template_leaf_name),
'shop/checkout/{}'.format(self.template_leaf_name),
]
return select_template(template_names)
plugin_pool.register_plugin(RequiredFormFieldsPlugin)
class ValidateSetOfFormsPlugin(TransparentContainer, ShopPluginBase):
"""
This plugin wraps arbitrary forms into the Angular directive shopFormsSet.
This is required to validate all forms, so that a proceed button is disabled otherwise.
"""
name = _("Manage Set of Forms")
allow_children = True
alien_child_classes = True
def get_render_template(self, context, instance, placeholder):
return select_template([
'{}/checkout/forms-set.html'.format(app_settings.APP_LABEL),
'shop/checkout/forms-set.html',
])
plugin_pool.register_plugin(ValidateSetOfFormsPlugin)
|
try:
# Prefer simplejson
import simplejson as json
except ImportError:
import json
__all__ = ['json', 'encode', 'decode']
decode = json.JSONDecoder().decode
_encode = json.JSONEncoder().iterencode
def encode(value):
"""Encode to bytes."""
for chunk in _encode(value):
yield chunk.encode('utf-8')
|
import os
from typing import Dict, List, Text
from absl import flags
from perfkitbenchmarker import resource
flags.DEFINE_integer('edw_service_cluster_concurrency', 5,
'Number of queries to run concurrently on the cluster.')
flags.DEFINE_string('edw_service_cluster_snapshot', None,
'If set, the snapshot to restore as cluster.')
flags.DEFINE_string('edw_service_cluster_identifier', None,
'If set, the preprovisioned edw cluster.')
flags.DEFINE_string('edw_service_endpoint', None,
'If set, the preprovisioned edw cluster endpoint.')
flags.DEFINE_string('edw_service_cluster_db', None,
'If set, the db on cluster to use during the benchmark ('
'only applicable when using snapshots).')
flags.DEFINE_string('edw_service_cluster_user', None,
'If set, the user authorized on cluster (only applicable '
'when using snapshots).')
flags.DEFINE_string('edw_service_cluster_password', None,
'If set, the password authorized on cluster (only '
'applicable when using snapshots).')
flags.DEFINE_string('snowflake_snowsql_config_override_file', None,
'The SnowSQL configuration to use.'
'https://docs.snowflake.net/manuals/user-guide/snowsql-config.html#snowsql-config-file') # pylint: disable=line-too-long
flags.DEFINE_string('snowflake_connection', None,
'Named Snowflake connection defined in SnowSQL config file.'
'https://docs.snowflake.net/manuals/user-guide/snowsql-start.html#using-named-connections') # pylint: disable=line-too-long
flags.DEFINE_integer('edw_suite_iterations', 1, 'Number of suite iterations to perform.')
# TODO(user): Revisit flags for accepting query lists.
flags.DEFINE_string('edw_simultaneous_queries',
None, 'CSV list of simultaneous queries to benchmark.')
flags.DEFINE_integer('edw_simultaneous_query_submission_interval', '0',
'Simultaneous query submission interval in milliseconds.')
flags.DEFINE_string('edw_power_queries', None,
'CSV list of power queries to benchmark.')
flags.DEFINE_multi_string(
'concurrency_streams', [], 'List of all query streams to execute. Each '
'stream should be passed in separately and the queries should be comma '
'separated, e.g. --concurrency_streams=1,2,3 --concurrency_streams=3,2,1')
flags.DEFINE_string('snowflake_warehouse', None,
'A virtual warehouse, often referred to simply as a - '
'warehouse, is a cluster of compute in Snowflake. '
'https://docs.snowflake.com/en/user-guide/warehouses.html') # pylint: disable=line-too-long
flags.DEFINE_string(
'snowflake_database', None,
'The hosted snowflake database to use during the benchmark.')
flags.DEFINE_string(
'snowflake_schema', None,
'The schema of the hosted snowflake database to use during the benchmark.')
flags.DEFINE_enum(
'snowflake_client_interface', 'JDBC', ['JDBC'],
'The Runtime Interface used when interacting with Snowflake.')
FLAGS = flags.FLAGS
TYPE_2_PROVIDER = dict([('athena', 'aws'), ('redshift', 'aws'),
('spectrum', 'aws'), ('snowflake_aws', 'aws'),
('bigquery', 'gcp'), ('endor', 'gcp'),
('bqfederated', 'gcp'),
('azuresqldatawarehouse', 'azure')])
TYPE_2_MODULE = dict([
('athena', 'perfkitbenchmarker.providers.aws.athena'),
('redshift', 'perfkitbenchmarker.providers.aws.redshift'),
('spectrum', 'perfkitbenchmarker.providers.aws.spectrum'),
('snowflake_aws', 'perfkitbenchmarker.providers.aws.snowflake'),
('bigquery', 'perfkitbenchmarker.providers.gcp.bigquery'),
('endor', 'perfkitbenchmarker.providers.gcp.bigquery'),
('bqfederated', 'perfkitbenchmarker.providers.gcp.bigquery'),
('azuresqldatawarehouse', 'perfkitbenchmarker.providers.azure.'
'azure_sql_data_warehouse')
])
DEFAULT_NUMBER_OF_NODES = 1
# The order of stages is important to the successful lifecycle completion.
EDW_SERVICE_LIFECYCLE_STAGES = ['create', 'load', 'query', 'delete']
SAMPLE_QUERY_PATH = '/tmp/sample.sql'
SAMPLE_QUERY = 'select * from INFORMATION_SCHEMA.TABLES;'
class EdwExecutionError(Exception):
"""Encapsulates errors encountered during execution of a query."""
class EdwClientInterface(object):
"""Defines the interface for EDW service clients.
Attributes:
client_vm: An instance of virtual_machine.BaseVirtualMachine used to
interface with the edw service.
"""
def __init__(self):
self.client_vm = None
def SetProvisionedAttributes(self, benchmark_spec):
"""Sets any attributes that were unknown during initialization."""
self.client_vm = benchmark_spec.vms[0]
self.client_vm.RemoteCommand('echo "\nMaxSessions 100" | '
'sudo tee -a /etc/ssh/sshd_config')
def Prepare(self, package_name: Text) -> None:
"""Prepares the client vm to execute query.
The default implementation raises an Error, to ensure client specific
installation and authentication of runner utilities.
Args:
package_name: String name of the package defining the preprovisioned data
(certificates, etc.) to extract and use during client vm preparation.
"""
raise NotImplementedError
def ExecuteQuery(self, query_name: Text) -> (float, Dict[str, str]):
"""Executes a query and returns performance details.
Args:
query_name: String name of the query to execute
Returns:
A tuple of (execution_time, execution details)
execution_time: A Float variable set to the query's completion time in
secs. -1.0 is used as a sentinel value implying the query failed. For a
successful query the value is expected to be positive.
performance_details: A dictionary of query execution attributes eg. job_id
"""
raise NotImplementedError
def ExecuteSimultaneous(self, submission_interval: int,
queries: List[str]) -> str:
"""Executes queries simultaneously on client and return performance details.
Simultaneous app expects queries as white space separated query file names.
Response format:
{"simultaneous_end":1601145943197,"simultaneous_start":1601145940113,
"stream_performance_array":[{"query_wall_time_in_secs":2.079,
"query_end":1601145942208,"job_id":"914682d9-4f64-4323-bad2-554267cbbd8d",
"query":"1","query_start":1601145940129},{"query_wall_time_in_secs":2.572,
"query_end":1601145943192,"job_id":"efbf93a1-614c-4645-a268-e3801ae994f1",
"query":"2","query_start":1601145940620}],
"simultaneous_wall_time_in_secs":3.084}
Args:
submission_interval: Simultaneous query submission interval in milliseconds.
queries: List of string names of the queries to execute simultaneously.
Returns:
performance_details: A serialized dictionary of execution details.
"""
raise NotImplementedError
def ExecuteThroughput(self, concurrency_streams: List[List[str]]) -> str:
"""Executes a throughput test and returns performance details.
Response format:
{"throughput_start":1601666911596,"throughput_end":1601666916139,
"throughput_wall_time_in_secs":4.543,
"all_streams_performance_array":[
{"stream_start":1601666911597,"stream_end":1601666916139,
"stream_wall_time_in_secs":4.542,
"stream_performance_array":[
{"query_wall_time_in_secs":2.238,"query_end":1601666913849,
"query":"1","query_start":1601666911611,
"details":{"job_id":"438170b0-b0cb-4185-b733-94dd05b46b05"}},
{"query_wall_time_in_secs":2.285,"query_end":1601666916139,
"query":"2","query_start":1601666913854,
"details":{"job_id":"371902c7-5964-46f6-9f90-1dd00137d0c8"}}
]},
{"stream_start":1601666911597,"stream_end":1601666916018,
"stream_wall_time_in_secs":4.421,
"stream_performance_array":[
{"query_wall_time_in_secs":2.552,"query_end":1601666914163,
"query":"2","query_start":1601666911611,
"details":{"job_id":"5dcba418-d1a2-4a73-be70-acc20c1f03e6"}},
{"query_wall_time_in_secs":1.855,"query_end":1601666916018,
"query":"1","query_start":1601666914163,
"details":{"job_id":"568c4526-ae26-4e9d-842c-03459c3a216d"}}
]}
]}
Args:
concurrency_streams: List of streams to execute simultaneously, each of
which is a list of string names of queries.
Returns:
A serialized dictionary of execution details.
"""
raise NotImplementedError
def WarmUpQuery(self):
"""Executes a service-agnostic query that can detect cold start issues."""
with open(SAMPLE_QUERY_PATH, 'w+') as f:
f.write(SAMPLE_QUERY)
self.client_vm.PushFile(SAMPLE_QUERY_PATH)
query_name = os.path.basename(SAMPLE_QUERY_PATH)
self.ExecuteQuery(query_name)
def GetMetadata(self) -> Dict[str, str]:
"""Returns the client interface metadata."""
raise NotImplementedError
class EdwService(resource.BaseResource):
"""Object representing a EDW Service."""
def __init__(self, edw_service_spec):
"""Initialize the edw service object.
Args:
edw_service_spec: spec of the edw service.
"""
# Hand over the actual creation to the resource module, which assumes the
# resource is pkb managed by default
is_user_managed = self.IsUserManaged(edw_service_spec)
# edw_service attribute
self.cluster_identifier = self.GetClusterIdentifier(edw_service_spec)
super(EdwService, self).__init__(user_managed=is_user_managed)
# Provision related attributes
if edw_service_spec.snapshot:
self.snapshot = edw_service_spec.snapshot
else:
self.snapshot = None
# Cluster related attributes
self.concurrency = edw_service_spec.concurrency
self.node_type = edw_service_spec.node_type
if edw_service_spec.node_count:
self.node_count = edw_service_spec.node_count
else:
self.node_count = DEFAULT_NUMBER_OF_NODES
# Interaction related attributes
if edw_service_spec.endpoint:
self.endpoint = edw_service_spec.endpoint
else:
self.endpoint = ''
self.db = edw_service_spec.db
self.user = edw_service_spec.user
self.password = edw_service_spec.password
# resource config attribute
self.spec = edw_service_spec
# resource workflow management
self.supports_wait_on_delete = True
self.client_interface = None
def GetClientInterface(self) -> EdwClientInterface:
"""Gets the active Client Interface."""
return self.client_interface
def IsUserManaged(self, edw_service_spec):
"""Indicates if the edw service instance is user managed.
Args:
edw_service_spec: spec of the edw service.
Returns:
A boolean, set to True if the edw service instance is user managed, False
otherwise.
"""
return edw_service_spec.cluster_identifier is not None
def GetClusterIdentifier(self, edw_service_spec):
"""Returns a string name of the Cluster Identifier.
Args:
edw_service_spec: spec of the edw service.
Returns:
A string, set to the name of the cluster identifier.
"""
if self.IsUserManaged(edw_service_spec):
return edw_service_spec.cluster_identifier
else:
return 'pkb-' + FLAGS.run_uri
def GetMetadata(self):
"""Return a dictionary of the metadata for this edw service."""
basic_data = {'edw_service_type': self.spec.type,
'edw_cluster_identifier': self.cluster_identifier,
'edw_cluster_node_type': self.node_type,
'edw_cluster_node_count': self.node_count}
return basic_data
def GenerateLifecycleStageScriptName(self, lifecycle_stage):
"""Computes the default name for script implementing an edw lifecycle stage.
Args:
lifecycle_stage: Stage for which the corresponding sql script is desired.
Returns:
script name for implementing the argument lifecycle_stage.
"""
return os.path.basename(
os.path.normpath('database_%s.sql' % lifecycle_stage))
def Cleanup(self):
"""Cleans up any temporary resources created for the service."""
pass
def GetDatasetLastUpdatedTime(self, dataset=None):
"""Get the formatted last modified timestamp of the dataset."""
raise NotImplementedError
def ExtractDataset(self,
dest_bucket,
dataset=None,
tables=None,
dest_format='CSV'):
"""Extract all tables in a dataset to object storage.
Args:
dest_bucket: Name of the bucket to extract the data to. Should already
exist.
dataset: Optional name of the dataset. If none, will be determined by the
service.
tables: Optional list of table names to extract. If none, all tables in
the dataset will be extracted.
dest_format: Format to extract data in.
"""
raise NotImplementedError
def RemoveDataset(self, dataset=None):
"""Removes a dataset.
Args:
dataset: Optional name of the dataset. If none, will be determined by the
service.
"""
raise NotImplementedError
def CreateDataset(self, dataset=None, description=None):
"""Creates a new dataset.
Args:
dataset: Optional name of the dataset. If none, will be determined by the
service.
description: Optional description of the dataset.
"""
raise NotImplementedError
def LoadDataset(self, source_bucket, tables, dataset=None):
"""Load all tables in a dataset to a database from object storage.
Args:
source_bucket: Name of the bucket to load the data from. Should already
exist. Each table must have its own subfolder in the bucket named after
the table, containing one or more csv files that make up the table data.
tables: List of table names to load.
dataset: Optional name of the dataset. If none, will be determined by the
service.
"""
raise NotImplementedError
def RequiresWarmUpSuite(self) -> bool:
"""Verifies if the edw_service requires a warm up suite execution.
Currently enabled for all service types, for parity.
Returns:
A boolean value (True) if the warm suite is recommended.
"""
return True
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import deque
import contextlib
import unittest
import mock
from perfkitbenchmarker import errors
from perfkitbenchmarker import requirements
import pkg_resources
import six
from six.moves import map
_PATH = 'dir/file'
class _MockOpenRequirementsFile(object):
def __init__(self, *args):
self._io = deque(six.StringIO(a) for a in args)
def __enter__(self):
return self._io.popleft()
def __exit__(self, *unused_args, **unused_kwargs):
pass
class CheckRequirementsTestCase(unittest.TestCase):
@contextlib.contextmanager
def _MockOpen(self, *args):
mocked_file = _MockOpenRequirementsFile(*args)
with mock.patch.object(requirements, 'open', return_value=mocked_file) as m:
yield m
def testFulfilledRequirement(self):
requirements_content = """
# Comment line, blank line, and a fulfilled requirement.
absl-py
"""
with self._MockOpen(requirements_content) as mocked_open:
requirements._CheckRequirements(_PATH)
mocked_open.assert_called_once_with('dir/file', 'r')
def testMissingPackage(self):
requirements_content = """
# This is not a real package.
perfkitbenchmarker-fake-package>=1.2
"""
with self._MockOpen(requirements_content) as mocked_open:
with self.assertRaises(errors.Setup.PythonPackageRequirementUnfulfilled):
requirements._CheckRequirements(_PATH)
mocked_open.assert_called_once_with('dir/file', 'r')
def testInstalledVersionLowerThanRequirement(self):
requirements_content = """
# The version of the installed absl-py will be less than 42.
absl-py>=42
"""
with self._MockOpen(requirements_content) as mocked_open:
with self.assertRaises(errors.Setup.PythonPackageRequirementUnfulfilled):
requirements._CheckRequirements(_PATH)
mocked_open.assert_called_once_with('dir/file', 'r')
def testInstalledVersionGreaterThanRequirement(self):
requirements_content = """
# The version of the installed absl-py will be greater than 0.0.1.
absl-py==0.0.1
"""
with self._MockOpen(requirements_content) as mocked_open:
with self.assertRaises(errors.Setup.PythonPackageRequirementUnfulfilled):
requirements._CheckRequirements(_PATH)
mocked_open.assert_called_once_with('dir/file', 'r')
def testIncludedFiles(self):
top_file = """
package-0
-rsubfile0
package-1>=2.0
-rsubfile1
package-2
"""
subfile0 = """
package-3
-rsubdir/subfile2
package-4
-r../subfile3
package-5
"""
subfile1 = """
package-6
"""
with self._MockOpen(top_file, subfile0, '', '', subfile1) as mocked_open:
with mock.patch.object(pkg_resources, 'require') as mocked_require:
requirements._CheckRequirements(_PATH)
mocked_open.assert_has_calls((
mock.call('dir/file', 'r'), mock.call('dir/subfile0', 'r'),
mock.call('dir/subdir/subfile2', 'r'),
mock.call('dir/../subfile3', 'r'), mock.call('dir/subfile1', 'r')))
mocked_require.assert_has_calls(
list(
map(mock.call, ('package-0', 'package-3', 'package-4', 'package-5',
'package-1>=2.0', 'package-6', 'package-2'))))
class CheckBasicRequirementsTestCase(unittest.TestCase):
def testAllRequirementsFulfilled(self):
requirements.CheckBasicRequirements()
class CheckProviderRequirementsTestCase(unittest.TestCase):
def testNoRequirementsFile(self):
# If a provider does not have a requirements file, then there can be no
# unfulfilled requirement.
requirements.CheckProviderRequirements('fakeprovider')
def testUnfulfilledRequirements(self):
# AWS does have a requirements file, but it contains packages that are not
# installed as part of the test environment.
with self.assertRaises(errors.Setup.PythonPackageRequirementUnfulfilled):
requirements.CheckProviderRequirements('aws')
if __name__ == '__main__':
unittest.main()
|
from tqdm import tqdm
from . import units
from matchzoo import DataPack
from matchzoo.engine.base_preprocessor import BasePreprocessor
from .build_vocab_unit import build_vocab_unit
from .build_unit_from_data_pack import build_unit_from_data_pack
from .chain_transform import chain_transform
tqdm.pandas()
class BasicPreprocessor(BasePreprocessor):
"""
Baisc preprocessor helper.
:param fixed_length_left: Integer, maximize length of :attr:`left` in the
data_pack.
:param fixed_length_right: Integer, maximize length of :attr:`right` in the
data_pack.
:param filter_mode: String, mode used by :class:`FrequenceFilterUnit`, Can
be 'df', 'cf', and 'idf'.
:param filter_low_freq: Float, lower bound value used by
:class:`FrequenceFilterUnit`.
:param filter_high_freq: Float, upper bound value used by
:class:`FrequenceFilterUnit`.
:param remove_stop_words: Bool, use :class:`StopRemovalUnit` unit or not.
Example:
>>> import matchzoo as mz
>>> train_data = mz.datasets.toy.load_data('train')
>>> test_data = mz.datasets.toy.load_data('test')
>>> preprocessor = mz.preprocessors.BasicPreprocessor(
... fixed_length_left=10,
... fixed_length_right=20,
... filter_mode='df',
... filter_low_freq=2,
... filter_high_freq=1000,
... remove_stop_words=True
... )
>>> preprocessor = preprocessor.fit(train_data, verbose=0)
>>> preprocessor.context['input_shapes']
[(10,), (20,)]
>>> preprocessor.context['vocab_size']
228
>>> processed_train_data = preprocessor.transform(train_data,
... verbose=0)
>>> type(processed_train_data)
<class 'matchzoo.data_pack.data_pack.DataPack'>
>>> test_data_transformed = preprocessor.transform(test_data,
... verbose=0)
>>> type(test_data_transformed)
<class 'matchzoo.data_pack.data_pack.DataPack'>
"""
def __init__(self, fixed_length_left: int = 30,
fixed_length_right: int = 30,
filter_mode: str = 'df',
filter_low_freq: float = 2,
filter_high_freq: float = float('inf'),
remove_stop_words: bool = False):
"""Initialization."""
super().__init__()
self._fixed_length_left = fixed_length_left
self._fixed_length_right = fixed_length_right
self._left_fixedlength_unit = units.FixedLength(
self._fixed_length_left,
pad_mode='post'
)
self._right_fixedlength_unit = units.FixedLength(
self._fixed_length_right,
pad_mode='post'
)
self._filter_unit = units.FrequencyFilter(
low=filter_low_freq,
high=filter_high_freq,
mode=filter_mode
)
self._units = self._default_units()
if remove_stop_words:
self._units.append(units.stop_removal.StopRemoval())
def fit(self, data_pack: DataPack, verbose: int = 1):
"""
Fit pre-processing context for transformation.
:param data_pack: data_pack to be preprocessed.
:param verbose: Verbosity.
:return: class:`BasicPreprocessor` instance.
"""
data_pack = data_pack.apply_on_text(chain_transform(self._units),
verbose=verbose)
fitted_filter_unit = build_unit_from_data_pack(self._filter_unit,
data_pack,
flatten=False,
mode='right',
verbose=verbose)
data_pack = data_pack.apply_on_text(fitted_filter_unit.transform,
mode='right', verbose=verbose)
self._context['filter_unit'] = fitted_filter_unit
vocab_unit = build_vocab_unit(data_pack, verbose=verbose)
self._context['vocab_unit'] = vocab_unit
vocab_size = len(vocab_unit.state['term_index'])
self._context['vocab_size'] = vocab_size
self._context['embedding_input_dim'] = vocab_size
self._context['input_shapes'] = [(self._fixed_length_left,),
(self._fixed_length_right,)]
return self
def transform(self, data_pack: DataPack, verbose: int = 1) -> DataPack:
"""
Apply transformation on data, create fixed length representation.
:param data_pack: Inputs to be preprocessed.
:param verbose: Verbosity.
:return: Transformed data as :class:`DataPack` object.
"""
data_pack = data_pack.copy()
data_pack.apply_on_text(chain_transform(self._units), inplace=True,
verbose=verbose)
data_pack.apply_on_text(self._context['filter_unit'].transform,
mode='right', inplace=True, verbose=verbose)
data_pack.apply_on_text(self._context['vocab_unit'].transform,
mode='both', inplace=True, verbose=verbose)
data_pack.append_text_length(inplace=True, verbose=verbose)
data_pack.apply_on_text(self._left_fixedlength_unit.transform,
mode='left', inplace=True, verbose=verbose)
data_pack.apply_on_text(self._right_fixedlength_unit.transform,
mode='right', inplace=True, verbose=verbose)
max_len_left = self._fixed_length_left
max_len_right = self._fixed_length_right
data_pack.left['length_left'] = \
data_pack.left['length_left'].apply(
lambda val: min(val, max_len_left))
data_pack.right['length_right'] = \
data_pack.right['length_right'].apply(
lambda val: min(val, max_len_right))
return data_pack
|
from datetime import datetime as dt
import pytest
from mock import patch
from arctic.date import datetime_to_ms, ms_to_datetime, mktz, to_pandas_closed_closed, DateRange, OPEN_OPEN, \
CLOSED_CLOSED
from arctic.date._util import to_dt, utc_dt_to_local_dt
@pytest.mark.parametrize('pdt', [
dt(2007, 3, 25, 1, tzinfo=mktz('Europe/London')),
dt(2004, 10, 31, 23, 3, tzinfo=mktz('Europe/London')),
dt(1990, 4, 5, 0, 0, tzinfo=mktz('Europe/London')),
dt(2007, 3, 25, 1, tzinfo=mktz('EST')),
dt(2004, 10, 31, 23, 3, tzinfo=mktz('EST')),
dt(1990, 4, 5, 0, 0, tzinfo=mktz('EST')),
]
)
@pytest.mark.parametrize('local_tz', [
mktz('EST'),
mktz('CET'),
mktz('Asia/Tokyo'),
mktz('Cuba'),
mktz('US/Alaska'),
mktz(),
mktz('Europe/London'),
]
)
def test_datetime_to_ms_and_back(pdt, local_tz):
i = datetime_to_ms(pdt)
pdt = pdt.astimezone(local_tz)
pdt2 = ms_to_datetime(i, tzinfo=local_tz)
assert datetime_to_ms(pdt) == datetime_to_ms(pdt2)
def test_datetime_to_ms_and_back_microseconds():
pdt = dt(2012, 8, 1, 12, 34, 56, 999999, tzinfo=mktz())
i = datetime_to_ms(pdt)
pdt2 = ms_to_datetime(i)
assert pdt != pdt2
assert pdt.year == pdt2.year
assert pdt.month == pdt2.month
assert pdt.day == pdt2.day
assert pdt.hour == pdt2.hour
assert pdt.minute == pdt2.minute
assert pdt.second == pdt2.second
# Microsecond precision loss inevitable.
assert pdt.microsecond // 1000 == pdt2.microsecond // 1000
def test_daterange_closedclosed_None():
assert to_pandas_closed_closed(None) is None
def test_daterange_closedclosed():
date_range = DateRange(dt(2013, 1, 1, tzinfo=mktz('Europe/London')),
dt(2014, 2, 1, tzinfo=mktz('Europe/London')), OPEN_OPEN)
expected = DateRange(dt(2013, 1, 1, 0, 0, 0, 1000, tzinfo=mktz('Europe/London')),
dt(2014, 1, 31, 23, 59, 59, 999000, tzinfo=mktz('Europe/London')),
CLOSED_CLOSED)
act = to_pandas_closed_closed(date_range)
assert act == expected
def test_daterange_closedclosed_no_tz():
date_range = DateRange(dt(2013, 1, 1),
dt(2014, 2, 1), OPEN_OPEN)
expected = DateRange(dt(2013, 1, 1, 0, 0, 0, 1000, tzinfo=mktz()),
dt(2014, 1, 31, 23, 59, 59, 999000, tzinfo=mktz()),
CLOSED_CLOSED)
act = to_pandas_closed_closed(date_range)
assert act == expected
def test_to_dt_0():
assert to_dt(0) == dt(1970, 1, 1, tzinfo=mktz('UTC'))
def test_to_dt_0_default():
assert to_dt(0, mktz('UTC')) == dt(1970, 1, 1, tzinfo=mktz('UTC'))
def test_to_dt_dt_no_tz():
with pytest.raises(ValueError):
assert to_dt(dt(1970, 1, 1)) == dt(1970, 1, 1, tzinfo=mktz())
def test_to_dt_dt_no_tz_default():
assert to_dt(dt(1970, 1, 1), mktz('UTC')) == dt(1970, 1, 1, tzinfo=mktz('UTC'))
def test_to_dt_dt_tz():
assert to_dt(dt(1970, 1, 1, tzinfo=mktz('UTC'))) == dt(1970, 1, 1, tzinfo=mktz('UTC'))
def test_to_dt_dt_tz_default():
assert to_dt(dt(1970, 1, 1, tzinfo=mktz('UTC')), mktz('Europe/London')) == dt(1970, 1, 1, tzinfo=mktz('UTC'))
def test_daterange_raises():
with pytest.raises(ValueError):
assert(DateRange(dt(2013, 1, 1), dt(2000, 1, 1)))
def test_daterange_eq():
dr = DateRange(dt(2013, 1, 1))
assert((dr == None) == False)
assert(dr == dr)
def test_daterange_lt():
dr = DateRange(dt(2013, 1, 1))
dr2 = DateRange(dt(2001, 1, 1))
assert(dr2 < dr)
dr.start = None
assert((dr2 < dr) == False)
@patch("arctic.date._util.mktz", lambda zone="Asia/Shanghai": mktz(zone))
def test_utc_dt_to_local_dt():
with pytest.raises(ValueError):
assert(utc_dt_to_local_dt(
dt(2000, 1, 1, 0, 0, 0, tzinfo=mktz("Asia/Shanghai"))
))
utc_time = dt(2000, 1, 1, 10, 0, 0)
pek_time = utc_dt_to_local_dt(utc_time) # GMT +0800
assert(pek_time.hour - utc_time.hour == 8)
|
import math
import numpy as np
from matchzoo.engine.base_metric import BaseMetric, sort_and_couple
class DiscountedCumulativeGain(BaseMetric):
"""Disconunted cumulative gain metric."""
ALIAS = ['discounted_cumulative_gain', 'dcg']
def __init__(self, k: int = 1, threshold: float = 0.):
"""
:class:`DiscountedCumulativeGain` constructor.
:param k: Number of results to consider.
:param threshold: the label threshold of relevance degree.
"""
self._k = k
self._threshold = threshold
def __repr__(self) -> str:
""":return: Formated string representation of the metric."""
return f"{self.ALIAS[0]}@{self._k}({self._threshold})"
def __call__(self, y_true: np.array, y_pred: np.array) -> float:
"""
Calculate discounted cumulative gain (dcg).
Relevance is positive real values or binary values.
Example:
>>> y_true = [0, 1, 2, 0]
>>> y_pred = [0.4, 0.2, 0.5, 0.7]
>>> DiscountedCumulativeGain(1)(y_true, y_pred)
0.0
>>> round(DiscountedCumulativeGain(k=-1)(y_true, y_pred), 2)
0.0
>>> round(DiscountedCumulativeGain(k=2)(y_true, y_pred), 2)
2.73
>>> round(DiscountedCumulativeGain(k=3)(y_true, y_pred), 2)
2.73
>>> type(DiscountedCumulativeGain(k=1)(y_true, y_pred))
<class 'float'>
:param y_true: The ground true label of each document.
:param y_pred: The predicted scores of each document.
:return: Discounted cumulative gain.
"""
if self._k <= 0:
return 0.
coupled_pair = sort_and_couple(y_true, y_pred)
result = 0.
for i, (label, score) in enumerate(coupled_pair):
if i >= self._k:
break
if label > self._threshold:
result += (math.pow(2., label) - 1.) / math.log(2. + i)
return result
|
from test import CollectorTestCase, get_collector_config
from mock import MagicMock, patch
from pgq import PgQCollector
class TestPgQCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('PgQCollector', {})
self.collector = PgQCollector(config, None)
def test_import(self):
self.assertTrue(PgQCollector)
@patch.object(PgQCollector, 'publish')
@patch.object(PgQCollector, 'get_consumer_info')
@patch.object(PgQCollector, 'get_queue_info')
def test_collect(self, get_queue_info, get_consumer_info, publish):
get_queue_info.return_value = iter([
('q1', {
'ticker_lag': 1,
'ev_per_sec': 2,
}),
('q2', {
'ticker_lag': 3,
'ev_per_sec': 4,
}),
])
get_consumer_info.return_value = iter([
('q1', 'c1', {
'lag': 1,
'pending_events': 2,
'last_seen': 3,
}),
('q2', 'c1', {
'lag': 4,
'pending_events': 5,
'last_seen': 6,
}),
])
self.collector._collect_for_instance('db1', connection=MagicMock())
self.assertPublished(publish, 'db1.q1.ticker_lag', 1)
self.assertPublished(publish, 'db1.q1.ev_per_sec', 2)
self.assertPublished(publish, 'db1.q2.ticker_lag', 3)
self.assertPublished(publish, 'db1.q2.ev_per_sec', 4)
self.assertPublished(publish, 'db1.q1.consumers.c1.lag', 1)
self.assertPublished(publish, 'db1.q1.consumers.c1.pending_events', 2)
self.assertPublished(publish, 'db1.q1.consumers.c1.last_seen', 3)
self.assertPublished(publish, 'db1.q2.consumers.c1.lag', 4)
self.assertPublished(publish, 'db1.q2.consumers.c1.pending_events', 5)
self.assertPublished(publish, 'db1.q2.consumers.c1.last_seen', 6)
|
import unittest
import numpy.testing as np_test
import pandas as pd
import numpy as np
from pgmpy.factors.continuous import LinearGaussianCPD
class TestLGCPD(unittest.TestCase):
# @unittest.skip("TODO")
def test_class_init(self):
mu = np.array([7, 13])
sigma = np.array([[4, 3], [3, 6]])
cpd1 = LinearGaussianCPD(
"Y", evidence_mean=mu, evidence_variance=sigma, evidence=["X1", "X2"]
)
self.assertEqual(cpd1.variable, "Y")
self.assertEqual(cpd1.evidence, ["X1", "X2"])
def test_maximum_likelihood_estimator(self):
# Obtain the X and Y which are jointly gaussian from the distribution
# beta = [2, 0.7, 0.3]
sigma_c = 4
x_df = pd.read_csv("pgmpy/tests/test_factors/test_continuous/gbn_values_1.csv")
mu = np.array([7, 13])
sigma = np.array([[4, 3], [3, 6]])
cpd1 = LinearGaussianCPD(
"Y", evidence_mean=mu, evidence_variance=sigma, evidence=["X1", "X2"]
)
mean, variance = cpd1.fit(x_df, states=["(Y|X)", "X1", "X2"], estimator="MLE")
np_test.assert_allclose(mean, [2.361152, 0.693147, 0.276383], rtol=1e-03)
np_test.assert_allclose(variance, sigma_c, rtol=1e-1)
@unittest.skip("TODO")
def test_pdf(self):
cpd1 = LinearGaussianCPD("x", [0.23], 0.56)
cpd2 = LinearGaussianCPD("y", [0.67, 1, 4.56, 8], 2, ["x1", "x2", "x3"])
np_test.assert_almost_equal(cpd1.assignment(1), 0.3139868)
np_test.assert_almost_equal(cpd2.assignment(1, 1.2, 2.3, 3.4), 1.076e-162)
@unittest.skip("TODO")
def test_copy(self):
cpd = LinearGaussianCPD("y", [0.67, 1, 4.56, 8], 2, ["x1", "x2", "x3"])
copy = cpd.copy()
self.assertEqual(cpd.variable, copy.variable)
self.assertEqual(cpd.beta_0, copy.beta_0)
self.assertEqual(cpd.variance, copy.variance)
np_test.assert_array_equal(cpd.beta_vector, copy.beta_vector)
self.assertEqual(cpd.evidence, copy.evidence)
cpd.variable = "z"
self.assertEqual(copy.variable, "y")
cpd.variance = 0
self.assertEqual(copy.variance, 2)
cpd.beta_0 = 1
self.assertEqual(copy.beta_0, 0.67)
cpd.evidence = ["p", "q", "r"]
self.assertEqual(copy.evidence, ["x1", "x2", "x3"])
cpd.beta_vector = [2, 2, 2]
np_test.assert_array_equal(copy.beta_vector, [1, 4.56, 8])
copy = cpd.copy()
copy.variable = "k"
self.assertEqual(cpd.variable, "z")
copy.variance = 0.3
self.assertEqual(cpd.variance, 0)
copy.beta_0 = 1.5
self.assertEqual(cpd.beta_0, 1)
copy.evidence = ["a", "b", "c"]
self.assertEqual(cpd.evidence, ["p", "q", "r"])
copy.beta_vector = [2.2, 2.2, 2.2]
np_test.assert_array_equal(cpd.beta_vector, [2, 2, 2])
@unittest.skip("TODO")
def test_str(self):
cpd1 = LinearGaussianCPD("x", [0.23], 0.56)
cpd2 = LinearGaussianCPD("y", [0.67, 1, 4.56, 8], 2, ["x1", "x2", "x3"])
self.assertEqual(cpd1.__str__(), "P(x) = N(0.23; 0.56)")
self.assertEqual(
cpd2.__str__(),
"P(y | x1, x2, x3) = N(1.0*x1 + " "4.56*x2 + 8.0*x3 + 0.67; 2)",
)
# def test_mle_fit(self):
# cpd = LinearGaussianCPD('Y', [0.2, -2, 3, 7], 9.6, ['X1', 'X2', 'X3'])
# gbn_values = pd.read_csv('gbn_values.csv')
# cpd.fit(gbn_values)
|
import asyncio
from aiohttp.client_exceptions import ClientResponseError
import async_timeout
from logi_circle import LogiCircle
from logi_circle.exception import AuthorizationFailed
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components.camera import ATTR_FILENAME, CAMERA_SERVICE_SCHEMA
from homeassistant.const import (
ATTR_MODE,
CONF_CLIENT_ID,
CONF_CLIENT_SECRET,
CONF_MONITORED_CONDITIONS,
CONF_SENSORS,
EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_send
from . import config_flow
from .const import (
CONF_API_KEY,
CONF_REDIRECT_URI,
DATA_LOGI,
DEFAULT_CACHEDB,
DOMAIN,
LED_MODE_KEY,
LOGI_SENSORS,
RECORDING_MODE_KEY,
SIGNAL_LOGI_CIRCLE_RECONFIGURE,
SIGNAL_LOGI_CIRCLE_RECORD,
SIGNAL_LOGI_CIRCLE_SNAPSHOT,
)
NOTIFICATION_ID = "logi_circle_notification"
NOTIFICATION_TITLE = "Logi Circle Setup"
_TIMEOUT = 15 # seconds
SERVICE_SET_CONFIG = "set_config"
SERVICE_LIVESTREAM_SNAPSHOT = "livestream_snapshot"
SERVICE_LIVESTREAM_RECORD = "livestream_record"
ATTR_VALUE = "value"
ATTR_DURATION = "duration"
SENSOR_SCHEMA = vol.Schema(
{
vol.Optional(CONF_MONITORED_CONDITIONS, default=list(LOGI_SENSORS)): vol.All(
cv.ensure_list, [vol.In(LOGI_SENSORS)]
)
}
)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_CLIENT_ID): cv.string,
vol.Required(CONF_CLIENT_SECRET): cv.string,
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_REDIRECT_URI): cv.string,
vol.Optional(CONF_SENSORS, default={}): SENSOR_SCHEMA,
}
)
},
extra=vol.ALLOW_EXTRA,
)
LOGI_CIRCLE_SERVICE_SET_CONFIG = CAMERA_SERVICE_SCHEMA.extend(
{
vol.Required(ATTR_MODE): vol.In([LED_MODE_KEY, RECORDING_MODE_KEY]),
vol.Required(ATTR_VALUE): cv.boolean,
}
)
LOGI_CIRCLE_SERVICE_SNAPSHOT = CAMERA_SERVICE_SCHEMA.extend(
{vol.Required(ATTR_FILENAME): cv.template}
)
LOGI_CIRCLE_SERVICE_RECORD = CAMERA_SERVICE_SCHEMA.extend(
{
vol.Required(ATTR_FILENAME): cv.template,
vol.Required(ATTR_DURATION): cv.positive_int,
}
)
async def async_setup(hass, config):
"""Set up configured Logi Circle component."""
if DOMAIN not in config:
return True
conf = config[DOMAIN]
config_flow.register_flow_implementation(
hass,
DOMAIN,
client_id=conf[CONF_CLIENT_ID],
client_secret=conf[CONF_CLIENT_SECRET],
api_key=conf[CONF_API_KEY],
redirect_uri=conf[CONF_REDIRECT_URI],
sensors=conf[CONF_SENSORS],
)
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}
)
)
return True
async def async_setup_entry(hass, entry):
"""Set up Logi Circle from a config entry."""
logi_circle = LogiCircle(
client_id=entry.data[CONF_CLIENT_ID],
client_secret=entry.data[CONF_CLIENT_SECRET],
api_key=entry.data[CONF_API_KEY],
redirect_uri=entry.data[CONF_REDIRECT_URI],
cache_file=hass.config.path(DEFAULT_CACHEDB),
)
if not logi_circle.authorized:
hass.components.persistent_notification.create(
(
f"Error: The cached access tokens are missing from {DEFAULT_CACHEDB}.<br />"
f"Please unload then re-add the Logi Circle integration to resolve."
),
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID,
)
return False
try:
with async_timeout.timeout(_TIMEOUT):
# Ensure the cameras property returns the same Camera objects for
# all devices. Performs implicit login and session validation.
await logi_circle.synchronize_cameras()
except AuthorizationFailed:
hass.components.persistent_notification.create(
"Error: Failed to obtain an access token from the cached "
"refresh token.<br />"
"Token may have expired or been revoked.<br />"
"Please unload then re-add the Logi Circle integration to resolve",
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID,
)
return False
except asyncio.TimeoutError:
# The TimeoutError exception object returns nothing when casted to a
# string, so we'll handle it separately.
err = f"{_TIMEOUT}s timeout exceeded when connecting to Logi Circle API"
hass.components.persistent_notification.create(
f"Error: {err}<br />You will need to restart hass after fixing.",
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID,
)
return False
except ClientResponseError as ex:
hass.components.persistent_notification.create(
f"Error: {ex}<br />You will need to restart hass after fixing.",
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID,
)
return False
hass.data[DATA_LOGI] = logi_circle
for component in "camera", "sensor":
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
async def service_handler(service):
"""Dispatch service calls to target entities."""
params = dict(service.data)
if service.service == SERVICE_SET_CONFIG:
async_dispatcher_send(hass, SIGNAL_LOGI_CIRCLE_RECONFIGURE, params)
if service.service == SERVICE_LIVESTREAM_SNAPSHOT:
async_dispatcher_send(hass, SIGNAL_LOGI_CIRCLE_SNAPSHOT, params)
if service.service == SERVICE_LIVESTREAM_RECORD:
async_dispatcher_send(hass, SIGNAL_LOGI_CIRCLE_RECORD, params)
hass.services.async_register(
DOMAIN,
SERVICE_SET_CONFIG,
service_handler,
schema=LOGI_CIRCLE_SERVICE_SET_CONFIG,
)
hass.services.async_register(
DOMAIN,
SERVICE_LIVESTREAM_SNAPSHOT,
service_handler,
schema=LOGI_CIRCLE_SERVICE_SNAPSHOT,
)
hass.services.async_register(
DOMAIN,
SERVICE_LIVESTREAM_RECORD,
service_handler,
schema=LOGI_CIRCLE_SERVICE_RECORD,
)
async def shut_down(event=None):
"""Close Logi Circle aiohttp session."""
await logi_circle.auth_provider.close()
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, shut_down)
return True
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
for component in "camera", "sensor":
await hass.config_entries.async_forward_entry_unload(entry, component)
logi_circle = hass.data.pop(DATA_LOGI)
# Tell API wrapper to close all aiohttp sessions, invalidate WS connections
# and clear all locally cached tokens
await logi_circle.auth_provider.clear_authorization()
return True
|
import numpy as np
import tensorflow as tf
from keras import layers, backend as K
from keras.losses import Loss
from keras.utils import losses_utils
class RankHingeLoss(Loss):
"""
Rank hinge loss.
Examples:
>>> from keras import backend as K
>>> x_pred = K.variable(np.array([[1.0], [1.2], [0.8], [1.4]]))
>>> x_true = K.variable(np.array([[1], [0], [1], [0]]))
>>> expect = ((1.0 + 1.2 - 1.0) + (1.0 + 1.4 - 0.8)) / 2
>>> expect
1.4
>>> loss = K.eval(RankHingeLoss(num_neg=1, margin=1.0)(x_true, x_pred))
>>> np.isclose(loss, expect)
True
"""
def __init__(self, num_neg: int = 1, margin: float = 1.0):
"""
:class:`RankHingeLoss` constructor.
:param num_neg: number of negative instances in hinge loss.
:param margin: the margin between positive and negative scores.
"""
super().__init__(reduction=losses_utils.Reduction.SUM_OVER_BATCH_SIZE,
name="rank_hinge")
self._num_neg = num_neg
self._margin = margin
def call(self, y_true: np.array, y_pred: np.array,
sample_weight=None) -> np.array:
"""
Calculate rank hinge loss.
:param y_true: Label.
:param y_pred: Predicted result.
:return: Hinge loss computed by user-defined margin.
"""
y_pos = layers.Lambda(lambda a: a[::(self._num_neg + 1), :],
output_shape=(1,))(y_pred)
y_neg = []
for neg_idx in range(self._num_neg):
y_neg.append(
layers.Lambda(
lambda a: a[(neg_idx + 1)::(self._num_neg + 1), :],
output_shape=(1,))(y_pred))
y_neg = tf.concat(y_neg, axis=-1)
y_neg = tf.reduce_mean(y_neg, axis=-1, keepdims=True)
loss = tf.maximum(0., self._margin + y_neg - y_pos)
return losses_utils.compute_weighted_loss(
loss, sample_weight, reduction=self.reduction)
@property
def num_neg(self):
"""`num_neg` getter."""
return self._num_neg
@property
def margin(self):
"""`margin` getter."""
return self._margin
|
import pytest
import numpy as np
from numpy.testing import assert_almost_equal
from scipy import stats
from scipy import linalg
from mne.preprocessing.infomax_ import infomax
from mne.utils import requires_sklearn, run_tests_if_main
def center_and_norm(x, axis=-1):
"""Center and norm x in place.
Parameters
----------
x: ndarray
Array with an axis of observations (statistical units) measured on
random variables.
axis: int, optional
Axis along which the mean and variance are calculated.
"""
x = np.rollaxis(x, axis)
x -= x.mean(axis=0)
x /= x.std(axis=0)
@requires_sklearn
def test_infomax_blowup():
"""Test the infomax algorithm blowup condition."""
# scipy.stats uses the global RNG:
np.random.seed(0)
n_samples = 100
# Generate two sources:
s1 = (2 * np.sin(np.linspace(0, 100, n_samples)) > 0) - 1
s2 = stats.t.rvs(1, size=n_samples)
s = np.c_[s1, s2].T
center_and_norm(s)
s1, s2 = s
# Mixing angle
phi = 0.6
mixing = np.array([[np.cos(phi), np.sin(phi)], # noqa: E241
[np.sin(phi), -np.cos(phi)]])
m = np.dot(mixing, s)
center_and_norm(m)
X = _get_pca().fit_transform(m.T)
k_ = infomax(X, extended=True, l_rate=0.1)
s_ = np.dot(k_, X.T)
center_and_norm(s_)
s1_, s2_ = s_
# Check to see if the sources have been estimated
# in the wrong order
if abs(np.dot(s1_, s2)) > abs(np.dot(s1_, s1)):
s2_, s1_ = s_
s1_ *= np.sign(np.dot(s1_, s1))
s2_ *= np.sign(np.dot(s2_, s2))
# Check that we have estimated the original sources
assert_almost_equal(np.dot(s1_, s1) / n_samples, 1, decimal=2)
assert_almost_equal(np.dot(s2_, s2) / n_samples, 1, decimal=2)
@requires_sklearn
def test_infomax_simple():
"""Test the infomax algorithm on very simple data."""
rng = np.random.RandomState(0)
# scipy.stats uses the global RNG:
np.random.seed(0)
n_samples = 500
# Generate two sources:
s1 = (2 * np.sin(np.linspace(0, 100, n_samples)) > 0) - 1
s2 = stats.t.rvs(1, size=n_samples)
s = np.c_[s1, s2].T
center_and_norm(s)
s1, s2 = s
# Mixing angle
phi = 0.6
mixing = np.array([[np.cos(phi), np.sin(phi)], # noqa: E241
[np.sin(phi), -np.cos(phi)]])
for add_noise in (False, True):
m = np.dot(mixing, s)
if add_noise:
m += 0.1 * rng.randn(2, n_samples)
center_and_norm(m)
algos = [True, False]
for algo in algos:
X = _get_pca().fit_transform(m.T)
k_ = infomax(X, extended=algo)
s_ = np.dot(k_, X.T)
center_and_norm(s_)
s1_, s2_ = s_
# Check to see if the sources have been estimated
# in the wrong order
if abs(np.dot(s1_, s2)) > abs(np.dot(s1_, s1)):
s2_, s1_ = s_
s1_ *= np.sign(np.dot(s1_, s1))
s2_ *= np.sign(np.dot(s2_, s2))
# Check that we have estimated the original sources
if not add_noise:
assert_almost_equal(np.dot(s1_, s1) / n_samples, 1, decimal=2)
assert_almost_equal(np.dot(s2_, s2) / n_samples, 1, decimal=2)
else:
assert_almost_equal(np.dot(s1_, s1) / n_samples, 1, decimal=1)
assert_almost_equal(np.dot(s2_, s2) / n_samples, 1, decimal=1)
def test_infomax_weights_ini():
"""Test the infomax algorithm w/initial weights matrix."""
X = np.random.random((3, 100))
weights = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]], dtype=np.float64)
w1 = infomax(X, max_iter=0, weights=weights, extended=True)
w2 = infomax(X, max_iter=0, weights=weights, extended=False)
assert_almost_equal(w1, weights)
assert_almost_equal(w2, weights)
@requires_sklearn
def test_non_square_infomax():
"""Test non-square infomax."""
rng = np.random.RandomState(0)
n_samples = 200
# Generate two sources:
t = np.linspace(0, 100, n_samples)
s1 = np.sin(t)
s2 = np.ceil(np.sin(np.pi * t))
s = np.c_[s1, s2].T
center_and_norm(s)
s1, s2 = s
# Mixing matrix
n_observed = 6
mixing = rng.randn(n_observed, 2)
for add_noise in (False, True):
m = np.dot(mixing, s)
if add_noise:
m += 0.1 * rng.randn(n_observed, n_samples)
center_and_norm(m)
m = m.T
m = _get_pca(rng).fit_transform(m)
# we need extended since input signals are sub-gaussian
unmixing_ = infomax(m, random_state=rng, extended=True)
s_ = np.dot(unmixing_, m.T)
# Check that the mixing model described in the docstring holds:
mixing_ = linalg.pinv(unmixing_.T)
assert_almost_equal(m, s_.T.dot(mixing_))
center_and_norm(s_)
s1_, s2_ = s_
# Check to see if the sources have been estimated
# in the wrong order
if abs(np.dot(s1_, s2)) > abs(np.dot(s1_, s1)):
s2_, s1_ = s_
s1_ *= np.sign(np.dot(s1_, s1))
s2_ *= np.sign(np.dot(s2_, s2))
# Check that we have estimated the original sources
if not add_noise:
assert_almost_equal(np.dot(s1_, s1) / n_samples, 1, decimal=2)
assert_almost_equal(np.dot(s2_, s2) / n_samples, 1, decimal=2)
@pytest.mark.parametrize("return_n_iter", [True, False])
def test_infomax_n_iter(return_n_iter):
"""Test the return_n_iter kwarg."""
X = np.random.random((3, 100))
max_iter = 1
r = infomax(X, max_iter=max_iter, extended=True,
return_n_iter=return_n_iter)
if return_n_iter:
assert isinstance(r, tuple)
assert r[1] == max_iter
else:
assert isinstance(r, np.ndarray)
def _get_pca(rng=None):
from sklearn.decomposition import PCA
return PCA(n_components=2, whiten=True, svd_solver='randomized',
random_state=rng)
run_tests_if_main()
|
__author__ = "VMware, Inc."
from pyVmomi import VmomiSupport, SoapAdapter, vmodl
from .SoapAdapter import SoapStubAdapterBase, SerializeToUnicode, Deserialize
## ManagedMethodExecutor soap stub adapter
#
class MMESoapStubAdapter(SoapStubAdapterBase):
""" Managed method executor stub adapter """
## Constructor
#
# The endpoint can be specified individually as either a host/port
# combination, or with a URL (using a url= keyword).
#
# @param self self
# @param mme managed method executor
def __init__(self, mme):
stub = mme._stub
SoapStubAdapterBase.__init__(self, version=stub.version)
self.mme = mme
## Compute the version information for the specified namespace
#
# @param ns the namespace
def ComputeVersionInfo(self, version):
SoapStubAdapterBase.ComputeVersionInfo(self, version)
self.versionId = self.versionId[1:-1]
## Invoke a managed method, with _ExecuteSoap. Wohooo!
#
# @param self self
# @param mo the 'this'
# @param info method info
# @param args arguments
def InvokeMethod(self, mo, info, args):
# Serialize parameters to soap parameters
methodArgs = None
if info.params:
methodArgs = vmodl.Reflect.ManagedMethodExecutor.SoapArgument.Array()
for param, arg in zip(info.params, args):
if arg is not None:
# Serialize parameters to soap snippets
soapVal = SerializeToUnicode(val=arg, info=param, version=self.version)
# Insert argument
soapArg = vmodl.Reflect.ManagedMethodExecutor.SoapArgument(
name=param.name, val=soapVal)
methodArgs.append(soapArg)
moid = mo._GetMoId()
version = self.versionId
methodName = VmomiSupport.GetVmodlName(info.type) + "." + info.name
# Execute method
result = self.mme.ExecuteSoap(moid=moid,
version=version,
method=methodName,
argument=methodArgs)
return self._DeserializeExecutorResult(result, info.result)
## Invoke a managed property accessor
#
# @param self self
# @param mo the 'this'
# @param info property info
def InvokeAccessor(self, mo, info):
moid = mo._GetMoId()
version = self.versionId
prop = info.name
# Fetch property
result = self.mme.FetchSoap(moid=moid, version=version, prop=prop)
return self._DeserializeExecutorResult(result, info.type)
## Deserialize result from ExecuteSoap / FetchSoap
#
# @param self self
# @param result result from ExecuteSoap / FetchSoap
# @param resultType Expected result type
def _DeserializeExecutorResult(self, result, resultType):
obj = None
if result:
# Parse the return soap snippet. If fault, raise exception
if result.response:
# Deserialize back to result
obj = Deserialize(result.response, resultType, stub=self)
elif result.fault:
# Deserialize back to fault (or vmomi fault)
fault = Deserialize(result.fault.faultDetail,
object,
stub=self)
# Silent pylint
raise fault # pylint: disable-msg=E0702
else:
# Unexpected: result should have either response or fault
msg = "Unexpected execute/fetchSoap error"
reason = "execute/fetchSoap did not return response or fault"
raise vmodl.Fault.SystemError(msg=msg, reason=reason)
return obj
|
from __future__ import unicode_literals
import json
import pytest
from schema import Use
from flask import Flask
from app.utils.validator import Validator
from app.utils.ResponseUtil import standard_response
from . import success, load_data
v = Validator()
@v.register('even')
def even():
return Use(int, lambda value: value % 2)
SCHEMA = {
'name': v.str(),
'age': v.int(min=18, max=99),
'sex': v.enum('男', '女'),
'number': v.even(),
v.optional('email'): v.email()
}
EXPECT = {
'user_id': 123,
'name': 'kk',
'age': 21,
'sex': '男',
'number': 2,
'email': '[email protected]'
}
DATA = {
'name': 'kk',
'age': '21',
'sex': '男',
'number': 2,
'email': '[email protected]'
}
JSONDATA = json.dumps(DATA)
app = Flask(__name__)
@app.route('/<int:user_id>', methods=['GET', 'POST'])
@v.param(SCHEMA)
def index(**kwargs):
return standard_response(1, kwargs)
@pytest.fixture
def client():
with app.test_client() as c:
yield c
# def load_data(resp):
# if resp.status_code != 200:
# print(resp.data)
# assert resp.status_code == 200
# return json.loads(resp.data)
def test_form_ok(client):
resp = client.post('/123', data=DATA)
assert load_data(resp) == EXPECT
def test_json_ok(client):
headers = {'Content-Type': 'application/json'}
resp = client.post('/123', data=JSONDATA, headers=headers)
assert load_data(resp) == EXPECT
def test_args_ok(client):
resp = client.get('/123', query_string=DATA)
assert load_data(resp) == EXPECT
def test_optional(client):
data = DATA.copy()
data.pop('email')
resp = client.post('/123', data=data)
expect = EXPECT.copy()
expect['email'] = None
load_data(resp) == expect
def test_required(client):
data = DATA.copy()
data.pop('name')
resp = client.post('/123', data=data)
assert not success(resp)
def test_error(client):
data = DATA.copy()
data['age'] = '17'
resp = client.post('/123', data=data)
assert not success(resp)
|
import argparse
import sys
from a_sync import block
from paasta_tools.mesos.exceptions import MasterNotAvailableException
from paasta_tools.mesos_tools import get_mesos_master
from paasta_tools.metrics.metastatus_lib import assert_no_duplicate_frameworks
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument(
"--check",
"-C",
dest="check",
type=str,
default="",
help="Comma separated list of frameworks to check for duplicates",
)
return parser.parse_args()
def check_mesos_no_duplicate_frameworks() -> None:
options = parse_args()
check = options.check.split(",")
master = get_mesos_master()
try:
state = block(master.state)
except MasterNotAvailableException as e:
print("CRITICAL: %s" % e.args[0])
sys.exit(2)
result = assert_no_duplicate_frameworks(state, check)
if result.healthy:
print("OK: " + result.message)
sys.exit(0)
else:
print(result.message)
sys.exit(2)
if __name__ == "__main__":
check_mesos_no_duplicate_frameworks()
|
from aioeafm import get_stations
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.helpers.aiohttp_client import async_get_clientsession
# pylint: disable=unused-import
from .const import DOMAIN
class UKFloodsFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a UK Environment Agency flood monitoring config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
def __init__(self):
"""Handle a UK Floods config flow."""
self.stations = {}
async def async_step_user(self, user_input=None):
"""Handle a flow start."""
errors = {}
if user_input is not None:
station = self.stations[user_input["station"]]
await self.async_set_unique_id(station, raise_on_progress=False)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=user_input["station"],
data={"station": station},
)
session = async_get_clientsession(hass=self.hass)
stations = await get_stations(session)
self.stations = {}
for station in stations:
label = station["label"]
# API annoyingly sometimes returns a list and some times returns a string
# E.g. L3121 has a label of ['Scurf Dyke', 'Scurf Dyke Dyke Level']
if isinstance(label, list):
label = label[-1]
self.stations[label] = station["stationReference"]
if not self.stations:
return self.async_abort(reason="no_stations")
return self.async_show_form(
step_id="user",
errors=errors,
data_schema=vol.Schema(
{vol.Required("station"): vol.In(sorted(self.stations))}
),
)
|
import logging
import operator
import requests
from .. import __short_version__
from ..cache import REFINER_EXPIRATION_TIME, region
from ..video import Episode, Movie
logger = logging.getLogger(__name__)
class OMDBClient(object):
base_url = 'http://www.omdbapi.com'
def __init__(self, version=1, session=None, headers=None, timeout=10):
#: Session for the requests
self.session = session or requests.Session()
self.session.timeout = timeout
self.session.headers.update(headers or {})
self.session.params['r'] = 'json'
self.session.params['v'] = version
def get(self, id=None, title=None, type=None, year=None, plot='short', tomatoes=False):
# build the params
params = {}
if id:
params['i'] = id
if title:
params['t'] = title
if not params:
raise ValueError('At least id or title is required')
params['type'] = type
params['y'] = year
params['plot'] = plot
params['tomatoes'] = tomatoes
# perform the request
r = self.session.get(self.base_url, params=params)
r.raise_for_status()
# get the response as json
j = r.json()
# check response status
if j['Response'] == 'False':
return None
return j
def search(self, title, type=None, year=None, page=1):
# build the params
params = {'s': title, 'type': type, 'y': year, 'page': page}
# perform the request
r = self.session.get(self.base_url, params=params)
r.raise_for_status()
# get the response as json
j = r.json()
# check response status
if j['Response'] == 'False':
return None
return j
user_agent = 'Subliminal/%s' % __short_version__
omdb_client = OMDBClient(headers={'User-Agent': user_agent})
@region.cache_on_arguments(expiration_time=REFINER_EXPIRATION_TIME)
def search(title, type, year):
results = omdb_client.search(title, type, year)
if not results:
return None
# fetch all paginated results
all_results = results['Search']
total_results = int(results['totalResults'])
page = 1
while total_results > page * 10:
page += 1
results = omdb_client.search(title, type, year, page=page)
all_results.extend(results['Search'])
return all_results
def refine(video, apikey=None, **kwargs):
"""Refine a video by searching `OMDb API <http://omdbapi.com/>`_.
Several :class:`~subliminal.video.Episode` attributes can be found:
* :attr:`~subliminal.video.Episode.series`
* :attr:`~subliminal.video.Episode.year`
* :attr:`~subliminal.video.Episode.series_imdb_id`
Similarly, for a :class:`~subliminal.video.Movie`:
* :attr:`~subliminal.video.Movie.title`
* :attr:`~subliminal.video.Movie.year`
* :attr:`~subliminal.video.Video.imdb_id`
"""
if not apikey:
logger.warning('No apikey. Skipping omdb refiner.')
return
omdb_client.session.params['apikey'] = apikey
if isinstance(video, Episode):
# exit if the information is complete
if video.series_imdb_id:
logger.debug('No need to search')
return
# search the series
results = search(video.series, 'series', video.year)
if not results:
logger.warning('No results for series')
return
logger.debug('Found %d results', len(results))
# filter the results
results = [r for r in results if video.matches(r['Title'])]
if not results:
logger.warning('No matching series found')
return
# process the results
found = False
for result in sorted(results, key=operator.itemgetter('Year')):
if video.original_series and video.year is None:
logger.debug('Found result for original series without year')
found = True
break
if video.year == int(result['Year'].split(u'\u2013')[0]):
logger.debug('Found result with matching year')
found = True
break
if not found:
logger.warning('No matching series found')
return
# add series information
logger.debug('Found series %r', result)
video.series = result['Title']
video.year = int(result['Year'].split(u'\u2013')[0])
video.series_imdb_id = result['imdbID']
elif isinstance(video, Movie):
# exit if the information is complete
if video.imdb_id:
return
# search the movie
results = search(video.title, 'movie', video.year)
if not results:
logger.warning('No results for movie')
return
logger.debug('Found %d results', len(results))
# filter the results
results = [r for r in results if video.matches(r['Title'])]
if not results:
logger.warning('No matching movie found')
return
# process the results
found = False
for result in results:
if video.year is None:
logger.debug('Found result for movie without year')
found = True
break
if video.year == int(result['Year']):
logger.debug('Found result with matching year')
found = True
break
if not found:
logger.warning('No matching movie found')
return
# add movie information
logger.debug('Found movie %r', result)
video.title = result['Title']
video.year = int(result['Year'].split(u'\u2013')[0])
video.imdb_id = result['imdbID']
|
from homeassistant.bootstrap import async_setup_component
from homeassistant.components.websocket_api.auth import TYPE_AUTH_REQUIRED
from homeassistant.components.websocket_api.http import URL
from .test_auth import test_auth_active_with_token
async def test_websocket_api(hass, aiohttp_client, hass_access_token, legacy_auth):
"""Test API streams."""
await async_setup_component(
hass, "sensor", {"sensor": {"platform": "websocket_api"}}
)
await hass.async_block_till_done()
client = await aiohttp_client(hass.http.app)
ws = await client.ws_connect(URL)
auth_ok = await ws.receive_json()
assert auth_ok["type"] == TYPE_AUTH_REQUIRED
ws.client = client
state = hass.states.get("sensor.connected_clients")
assert state.state == "0"
await test_auth_active_with_token(hass, ws, hass_access_token)
state = hass.states.get("sensor.connected_clients")
assert state.state == "1"
await ws.close()
await hass.async_block_till_done()
state = hass.states.get("sensor.connected_clients")
assert state.state == "0"
|
from weblate.trans.management.commands import WeblateComponentCommand
from weblate.trans.models.change import Change
class Command(WeblateComponentCommand):
help = "List translators for a component"
def add_arguments(self, parser):
super().add_arguments(parser)
parser.add_argument(
"--language-code",
action="store_true",
dest="code",
default=False,
help="Use language code instead of language name",
)
def handle(self, *args, **options):
data = []
for component in self.get_components(*args, **options):
for translation in component.translation_set.iterator():
authors = Change.objects.filter(translation=translation).authors_list()
if not authors:
continue
if options["code"]:
key = translation.language.code
else:
key = translation.language.name
data.append({key: sorted(set(authors))})
for language in data:
name, translators = language.popitem()
self.stdout.write(f"[{name}]\n")
for translator in translators:
self.stdout.write("{1} <{0}>\n".format(*translator))
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from copy import deepcopy
import numpy as np
from numpy import array, zeros, eye, dot
from numpy.random import multivariate_normal
from filterpy.common import pretty_str, outer_product_sum
class EnsembleKalmanFilter(object):
"""
This implements the ensemble Kalman filter (EnKF). The EnKF uses
an ensemble of hundreds to thousands of state vectors that are randomly
sampled around the estimate, and adds perturbations at each update and
predict step. It is useful for extremely large systems such as found
in hydrophysics. As such, this class is admittedly a toy as it is far
too slow with large N.
There are many versions of this sort of this filter. This formulation is
due to Crassidis and Junkins [1]. It works with both linear and nonlinear
systems.
Parameters
----------
x : np.array(dim_x)
state mean
P : np.array((dim_x, dim_x))
covariance of the state
dim_z : int
Number of of measurement inputs. For example, if the sensor
provides you with position in (x,y), dim_z would be 2.
dt : float
time step in seconds
N : int
number of sigma points (ensembles). Must be greater than 1.
K : np.array
Kalman gain
hx : function hx(x)
Measurement function. May be linear or nonlinear - converts state
x into a measurement. Return must be an np.array of the same
dimensionality as the measurement vector.
fx : function fx(x, dt)
State transition function. May be linear or nonlinear. Projects
state x into the next time period. Returns the projected state x.
Attributes
----------
x : numpy.array(dim_x, 1)
State estimate
P : numpy.array(dim_x, dim_x)
State covariance matrix
x_prior : numpy.array(dim_x, 1)
Prior (predicted) state estimate. The *_prior and *_post attributes
are for convienence; they store the prior and posterior of the
current epoch. Read Only.
P_prior : numpy.array(dim_x, dim_x)
Prior (predicted) state covariance matrix. Read Only.
x_post : numpy.array(dim_x, 1)
Posterior (updated) state estimate. Read Only.
P_post : numpy.array(dim_x, dim_x)
Posterior (updated) state covariance matrix. Read Only.
z : numpy.array
Last measurement used in update(). Read only.
R : numpy.array(dim_z, dim_z)
Measurement noise matrix
Q : numpy.array(dim_x, dim_x)
Process noise matrix
fx : callable (x, dt)
State transition function
hx : callable (x)
Measurement function. Convert state `x` into a measurement
K : numpy.array(dim_x, dim_z)
Kalman gain of the update step. Read only.
inv : function, default numpy.linalg.inv
If you prefer another inverse function, such as the Moore-Penrose
pseudo inverse, set it to that instead: kf.inv = np.linalg.pinv
Examples
--------
.. code-block:: Python
def hx(x):
return np.array([x[0]])
F = np.array([[1., 1.],
[0., 1.]])
def fx(x, dt):
return np.dot(F, x)
x = np.array([0., 1.])
P = np.eye(2) * 100.
dt = 0.1
f = EnsembleKalmanFilter(x=x, P=P, dim_z=1, dt=dt,
N=8, hx=hx, fx=fx)
std_noise = 3.
f.R *= std_noise**2
f.Q = Q_discrete_white_noise(2, dt, .01)
while True:
z = read_sensor()
f.predict()
f.update(np.asarray([z]))
See my book Kalman and Bayesian Filters in Python
https://github.com/rlabbe/Kalman-and-Bayesian-Filters-in-Python
References
----------
- [1] John L Crassidis and John L. Junkins. "Optimal Estimation of
Dynamic Systems. CRC Press, second edition. 2012. pp, 257-9.
"""
def __init__(self, x, P, dim_z, dt, N, hx, fx):
if dim_z <= 0:
raise ValueError('dim_z must be greater than zero')
if N <= 0:
raise ValueError('N must be greater than zero')
dim_x = len(x)
self.dim_x = dim_x
self.dim_z = dim_z
self.dt = dt
self.N = N
self.hx = hx
self.fx = fx
self.K = zeros((dim_x, dim_z))
self.z = array([[None] * self.dim_z]).T
self.S = zeros((dim_z, dim_z)) # system uncertainty
self.SI = zeros((dim_z, dim_z)) # inverse system uncertainty
self.initialize(x, P)
self.Q = eye(dim_x) # process uncertainty
self.R = eye(dim_z) # state uncertainty
self.inv = np.linalg.inv
# used to create error terms centered at 0 mean for
# state and measurement
self._mean = zeros(dim_x)
self._mean_z = zeros(dim_z)
def initialize(self, x, P):
"""
Initializes the filter with the specified mean and
covariance. Only need to call this if you are using the filter
to filter more than one set of data; this is called by __init__
Parameters
----------
x : np.array(dim_z)
state mean
P : np.array((dim_x, dim_x))
covariance of the state
"""
if x.ndim != 1:
raise ValueError('x must be a 1D array')
self.sigmas = multivariate_normal(mean=x, cov=P, size=self.N)
self.x = x
self.P = P
# these will always be a copy of x,P after predict() is called
self.x_prior = self.x.copy()
self.P_prior = self.P.copy()
# these will always be a copy of x,P after update() is called
self.x_post = self.x.copy()
self.P_post = self.P.copy()
def update(self, z, R=None):
"""
Add a new measurement (z) to the kalman filter. If z is None, nothing
is changed.
Parameters
----------
z : np.array
measurement for this update.
R : np.array, scalar, or None
Optionally provide R to override the measurement noise for this
one call, otherwise self.R will be used.
"""
if z is None:
self.z = array([[None]*self.dim_z]).T
self.x_post = self.x.copy()
self.P_post = self.P.copy()
return
if R is None:
R = self.R
if np.isscalar(R):
R = eye(self.dim_z) * R
N = self.N
dim_z = len(z)
sigmas_h = zeros((N, dim_z))
# transform sigma points into measurement space
for i in range(N):
sigmas_h[i] = self.hx(self.sigmas[i])
z_mean = np.mean(sigmas_h, axis=0)
P_zz = (outer_product_sum(sigmas_h - z_mean) / (N-1)) + R
P_xz = outer_product_sum(
self.sigmas - self.x, sigmas_h - z_mean) / (N - 1)
self.S = P_zz
self.SI = self.inv(self.S)
self.K = dot(P_xz, self.SI)
e_r = multivariate_normal(self._mean_z, R, N)
for i in range(N):
self.sigmas[i] += dot(self.K, z + e_r[i] - sigmas_h[i])
self.x = np.mean(self.sigmas, axis=0)
self.P = self.P - dot(dot(self.K, self.S), self.K.T)
# save measurement and posterior state
self.z = deepcopy(z)
self.x_post = self.x.copy()
self.P_post = self.P.copy()
def predict(self):
""" Predict next position. """
N = self.N
for i, s in enumerate(self.sigmas):
self.sigmas[i] = self.fx(s, self.dt)
e = multivariate_normal(self._mean, self.Q, N)
self.sigmas += e
self.x = np.mean(self.sigmas, axis=0)
self.P = outer_product_sum(self.sigmas - self.x) / (N - 1)
# save prior
self.x_prior = np.copy(self.x)
self.P_prior = np.copy(self.P)
def __repr__(self):
return '\n'.join([
'EnsembleKalmanFilter object',
pretty_str('dim_x', self.dim_x),
pretty_str('dim_z', self.dim_z),
pretty_str('dt', self.dt),
pretty_str('x', self.x),
pretty_str('P', self.P),
pretty_str('x_prior', self.x_prior),
pretty_str('P_prior', self.P_prior),
pretty_str('Q', self.Q),
pretty_str('R', self.R),
pretty_str('K', self.K),
pretty_str('S', self.S),
pretty_str('sigmas', self.sigmas),
pretty_str('hx', self.hx),
pretty_str('fx', self.fx)
])
|
from datetime import timedelta
import logging
import requests
import voluptuous as vol
import xmltodict
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT, POWER_WATT, VOLT
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "ted"
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=10)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PORT, default=80): cv.port,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Ted5000 sensor."""
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
name = config.get(CONF_NAME)
url = f"http://{host}:{port}/api/LiveData.xml"
gateway = Ted5000Gateway(url)
# Get MUT information to create the sensors.
gateway.update()
dev = []
for mtu in gateway.data:
dev.append(Ted5000Sensor(gateway, name, mtu, POWER_WATT))
dev.append(Ted5000Sensor(gateway, name, mtu, VOLT))
add_entities(dev)
return True
class Ted5000Sensor(Entity):
"""Implementation of a Ted5000 sensor."""
def __init__(self, gateway, name, mtu, unit):
"""Initialize the sensor."""
units = {POWER_WATT: "power", VOLT: "voltage"}
self._gateway = gateway
self._name = "{} mtu{} {}".format(name, mtu, units[unit])
self._mtu = mtu
self._unit = unit
self.update()
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit
@property
def state(self):
"""Return the state of the resources."""
try:
return self._gateway.data[self._mtu][self._unit]
except KeyError:
pass
def update(self):
"""Get the latest data from REST API."""
self._gateway.update()
class Ted5000Gateway:
"""The class for handling the data retrieval."""
def __init__(self, url):
"""Initialize the data object."""
self.url = url
self.data = {}
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data from the Ted5000 XML API."""
try:
request = requests.get(self.url, timeout=10)
except requests.exceptions.RequestException as err:
_LOGGER.error("No connection to endpoint: %s", err)
else:
doc = xmltodict.parse(request.text)
mtus = int(doc["LiveData"]["System"]["NumberMTU"])
for mtu in range(1, mtus + 1):
power = int(doc["LiveData"]["Power"]["MTU%d" % mtu]["PowerNow"])
voltage = int(doc["LiveData"]["Voltage"]["MTU%d" % mtu]["VoltageNow"])
self.data[mtu] = {POWER_WATT: power, VOLT: voltage / 10}
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import posixpath
import re
from absl import flags
from perfkitbenchmarker.linux_packages import nvidia_driver
# There is no way to tell the apt-get installation
# method what dir to install the cuda toolkit to
CUDA_HOME = '/usr/local/cuda'
flags.DEFINE_enum(
'cuda_toolkit_version',
'9.0', ['9.0', '10.0', '10.1', '10.2', '11.0', 'None', ''],
'Version of CUDA Toolkit to install. '
'Input "None" or empty string to skip installation',
module_name=__name__)
FLAGS = flags.FLAGS
CUDA_PIN = 'https://developer.download.nvidia.com/compute/cuda/repos/ubuntu1604/x86_64/cuda-ubuntu1604.pin'
CUDA_11_0_TOOLKIT = 'http://developer.download.nvidia.com/compute/cuda/11.0.2/local_installers/cuda-repo-ubuntu1604-11-0-local_11.0.2-450.51.05-1_amd64.deb'
CUDA_10_2_TOOLKIT = 'http://developer.download.nvidia.com/compute/cuda/10.2/Prod/local_installers/cuda-repo-ubuntu1604-10-2-local-10.2.89-440.33.01_1.0-1_amd64.deb'
CUDA_10_1_TOOLKIT = 'https://developer.download.nvidia.com/compute/cuda/10.1/Prod/local_installers/cuda-repo-ubuntu1604-10-1-local-10.1.243-418.87.00_1.0-1_amd64.deb'
CUDA_10_0_TOOLKIT = 'https://developer.nvidia.com/compute/cuda/10.0/Prod/local_installers/cuda-repo-ubuntu1604-10-0-local-10.0.130-410.48_1.0-1_amd64'
CUDA_9_0_TOOLKIT = 'https://developer.nvidia.com/compute/cuda/9.0/Prod/local_installers/cuda-repo-ubuntu1604-9-0-local_9.0.176-1_amd64-deb'
CUDA_9_0_PATCH = 'https://developer.nvidia.com/compute/cuda/9.0/Prod/patches/1/cuda-repo-ubuntu1604-9-0-local-cublas-performance-update_1.0-1_amd64-deb'
class UnsupportedCudaVersionError(Exception):
pass
class NvccParseOutputError(Exception):
pass
def GetMetadata(vm):
"""Returns gpu-specific metadata as a dict.
Args:
vm: virtual machine to operate on
Returns:
A dict of gpu- and CUDA- specific metadata.
"""
metadata = nvidia_driver.GetMetadata(vm)
metadata['cuda_toolkit_version'] = FLAGS.cuda_toolkit_version
metadata['cuda_toolkit_home'] = CUDA_HOME
return metadata
def DoPostInstallActions(vm):
"""Perform post NVIDIA driver install action on the vm.
Args:
vm: the virtual machine to operate on
"""
nvidia_driver.DoPostInstallActions(vm)
def GetCudaToolkitVersion(vm):
"""Get the CUDA toolkit version on the vm, based on nvcc.
Args:
vm: the virtual machine to query
Returns:
A string containing the active CUDA toolkit version,
None if nvcc could not be found
Raises:
NvccParseOutputError: On can not parse nvcc output
"""
stdout, _ = vm.RemoteCommand(
posixpath.join(CUDA_HOME, 'bin/nvcc') + ' --version',
ignore_failure=True, suppress_warning=True)
if bool(stdout.rstrip()):
regex = r'release (\S+),'
match = re.search(regex, stdout)
if match:
return str(match.group(1))
raise NvccParseOutputError('Unable to parse nvcc version output from {}'
.format(stdout))
else:
return None
def _InstallCudaPatch(vm, patch_url):
"""Installs CUDA Toolkit patch from NVIDIA.
Args:
vm: VM to install patch on
patch_url: url of the CUDA patch to install
"""
# Need to append .deb to package name because the file downloaded from
# NVIDIA is missing the .deb extension.
basename = posixpath.basename(patch_url) + '.deb'
vm.RemoteCommand('wget -q %s -O %s' % (patch_url,
basename))
vm.RemoteCommand('sudo dpkg -i %s' % basename)
vm.RemoteCommand('sudo apt-get update')
# Need to be extra careful on the command below because without these
# precautions, it was brining up a menu option about grub's menu.lst
# on AWS Ubuntu16.04 and thus causing the RemoteCommand to hang and fail.
vm.RemoteCommand(
'sudo DEBIAN_FRONTEND=noninteractive apt-get upgrade -yq cuda')
def _InstallCuda9Point0(vm):
"""Installs CUDA Toolkit 9.0 from NVIDIA.
Args:
vm: VM to install CUDA on
"""
basename = posixpath.basename(CUDA_9_0_TOOLKIT) + '.deb'
vm.RemoteCommand('wget -q %s -O %s' % (CUDA_9_0_TOOLKIT,
basename))
vm.RemoteCommand('sudo dpkg -i %s' % basename)
vm.RemoteCommand('sudo apt-key add /var/cuda-repo-9-0-local/7fa2af80.pub')
vm.RemoteCommand('sudo apt-get update')
vm.RemoteCommand('sudo apt-get install -y cuda')
_InstallCudaPatch(vm, CUDA_9_0_PATCH)
def _InstallCuda10Point0(vm):
"""Installs CUDA Toolkit 10.0 from NVIDIA.
Args:
vm: VM to install CUDA on
"""
basename = posixpath.basename(CUDA_10_0_TOOLKIT) + '.deb'
vm.RemoteCommand('wget -q %s -O %s' % (CUDA_10_0_TOOLKIT,
basename))
vm.RemoteCommand('sudo dpkg -i %s' % basename)
vm.RemoteCommand('sudo apt-key add '
'/var/cuda-repo-10-0-local-10.0.130-410.48/7fa2af80.pub')
vm.RemoteCommand('sudo apt-get update')
vm.RemoteCommand('sudo apt-get install -y cuda-toolkit-10-0 cuda-tools-10-0 '
'cuda-libraries-10-0 cuda-libraries-dev-10-0')
def _InstallCuda10Point1(vm):
"""Installs CUDA Toolkit 10.1 from NVIDIA.
Args:
vm: VM to install CUDA on
"""
basename = posixpath.basename(CUDA_10_1_TOOLKIT)
vm.RemoteCommand('wget -q %s' % CUDA_PIN)
vm.RemoteCommand('sudo mv cuda-ubuntu1604.pin '
'/etc/apt/preferences.d/cuda-repository-pin-600')
vm.RemoteCommand('wget -q %s' % CUDA_10_1_TOOLKIT)
vm.RemoteCommand('sudo dpkg -i %s' % basename)
vm.RemoteCommand('sudo apt-key add '
'/var/cuda-repo-10-1-local-10.1.243-418.87.00/7fa2af80.pub')
vm.RemoteCommand('sudo apt-get update')
vm.RemoteCommand('sudo apt-get install -y cuda-toolkit-10-1 cuda-tools-10-1 '
'cuda-libraries-10-1 cuda-libraries-dev-10-1')
def _InstallCuda10Point2(vm):
"""Installs CUDA Toolkit 10.2 from NVIDIA.
Args:
vm: VM to install CUDA on
"""
basename = posixpath.basename(CUDA_10_2_TOOLKIT)
vm.RemoteCommand('wget -q %s' % CUDA_PIN)
vm.RemoteCommand('sudo mv cuda-ubuntu1604.pin '
'/etc/apt/preferences.d/cuda-repository-pin-600')
vm.RemoteCommand('wget -q %s' % CUDA_10_2_TOOLKIT)
vm.RemoteCommand('sudo dpkg -i %s' % basename)
vm.RemoteCommand('sudo apt-key add '
'/var/cuda-repo-10-2-local-10.2.89-440.33.01/7fa2af80.pub')
vm.RemoteCommand('sudo apt-get update')
vm.RemoteCommand('sudo apt-get install -y cuda-toolkit-10-2 cuda-tools-10-2 '
'cuda-libraries-10-2 cuda-libraries-dev-10-2')
def _InstallCuda11Point0(vm):
"""Installs CUDA Toolkit 11.0 from NVIDIA.
Args:
vm: VM to install CUDA on
"""
basename = posixpath.basename(CUDA_11_0_TOOLKIT)
vm.RemoteCommand('wget -q %s' % CUDA_PIN)
vm.RemoteCommand('sudo mv cuda-ubuntu1604.pin '
'/etc/apt/preferences.d/cuda-repository-pin-600')
vm.RemoteCommand('wget -q %s' % CUDA_11_0_TOOLKIT)
vm.RemoteCommand('sudo dpkg -i %s' % basename)
vm.RemoteCommand('sudo apt-key add '
'/var/cuda-repo-ubuntu1604-11-0-local/7fa2af80.pub')
vm.RemoteCommand('sudo apt-get update')
vm.InstallPackages('cuda-toolkit-11-0 cuda-tools-11-0 '
'cuda-libraries-11-0 cuda-libraries-dev-11-0')
def AptInstall(vm):
"""Installs CUDA toolkit on the VM if not already installed."""
version_to_install = FLAGS.cuda_toolkit_version
if (version_to_install == 'None' or not version_to_install):
return
current_version = GetCudaToolkitVersion(vm)
if current_version == version_to_install:
return
cuda_path = f'/usr/local/cuda-{FLAGS.cuda_toolkit_version}'
if vm.TryRemoteCommand(f'stat {cuda_path}'):
vm.RemoteCommand('sudo rm -rf /usr/local/cuda', ignore_failure=True)
vm.RemoteCommand(f'sudo ln -s {cuda_path} /usr/local/cuda')
return
vm.Install('build_tools')
vm.Install('wget')
vm.Install('nvidia_driver')
if version_to_install == '9.0':
_InstallCuda9Point0(vm)
elif version_to_install == '10.0':
_InstallCuda10Point0(vm)
elif version_to_install == '10.1':
_InstallCuda10Point1(vm)
elif version_to_install == '10.2':
_InstallCuda10Point2(vm)
elif version_to_install == '11.0':
_InstallCuda11Point0(vm)
else:
raise UnsupportedCudaVersionError()
DoPostInstallActions(vm)
# NVIDIA CUDA Profile Tools Interface.
# This library provides advanced profiling support
if version_to_install in ('9.0', '10.0'):
# cupti is part of cuda>=10.1, and installed as cuda-cupti-10-1/2
vm.RemoteCommand('sudo apt-get install -y libcupti-dev')
def YumInstall(vm):
"""Installs CUDA toolkit on the VM if not already installed.
TODO: PKB currently only supports the installation of CUDA toolkit on Ubuntu.
Args:
vm: VM to install CUDA on
"""
del vm # unused
raise NotImplementedError()
def CheckPrerequisites():
"""Verifies that the required resources are present.
Raises:
perfkitbenchmarker.data.ResourceNotFound: On missing resource.
"""
pass
def Uninstall(vm):
"""Removes the CUDA toolkit.
Args:
vm: VM that installed CUDA
Note that reinstallation does not work correctly, i.e. you cannot reinstall
CUDA by calling _Install() again.
"""
vm.RemoteCommand('rm -f cuda-repo-ubuntu1604*')
vm.RemoteCommand('sudo rm -rf {cuda_home}'.format(cuda_home=CUDA_HOME))
|
import logging
import requests
import homeassistant.components.alarm_control_panel as alarm
from homeassistant.components.alarm_control_panel.const import (
SUPPORT_ALARM_ARM_AWAY,
SUPPORT_ALARM_ARM_HOME,
)
from homeassistant.const import (
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_DISARMED,
STATE_ALARM_TRIGGERED,
)
from . import (
CONF_REPORT_SERVER_CODES,
CONF_REPORT_SERVER_ENABLED,
CONF_REPORT_SERVER_PORT,
EGARDIA_DEVICE,
EGARDIA_SERVER,
REPORT_SERVER_CODES_IGNORE,
)
_LOGGER = logging.getLogger(__name__)
STATES = {
"ARM": STATE_ALARM_ARMED_AWAY,
"DAY HOME": STATE_ALARM_ARMED_HOME,
"DISARM": STATE_ALARM_DISARMED,
"ARMHOME": STATE_ALARM_ARMED_HOME,
"HOME": STATE_ALARM_ARMED_HOME,
"NIGHT HOME": STATE_ALARM_ARMED_NIGHT,
"TRIGGERED": STATE_ALARM_TRIGGERED,
}
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Egardia Alarm Control Panael platform."""
if discovery_info is None:
return
device = EgardiaAlarm(
discovery_info["name"],
hass.data[EGARDIA_DEVICE],
discovery_info[CONF_REPORT_SERVER_ENABLED],
discovery_info.get(CONF_REPORT_SERVER_CODES),
discovery_info[CONF_REPORT_SERVER_PORT],
)
add_entities([device], True)
class EgardiaAlarm(alarm.AlarmControlPanelEntity):
"""Representation of a Egardia alarm."""
def __init__(
self, name, egardiasystem, rs_enabled=False, rs_codes=None, rs_port=52010
):
"""Initialize the Egardia alarm."""
self._name = name
self._egardiasystem = egardiasystem
self._status = None
self._rs_enabled = rs_enabled
self._rs_codes = rs_codes
self._rs_port = rs_port
async def async_added_to_hass(self):
"""Add Egardiaserver callback if enabled."""
if self._rs_enabled:
_LOGGER.debug("Registering callback to Egardiaserver")
self.hass.data[EGARDIA_SERVER].register_callback(self.handle_status_event)
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._status
@property
def supported_features(self) -> int:
"""Return the list of supported features."""
return SUPPORT_ALARM_ARM_HOME | SUPPORT_ALARM_ARM_AWAY
@property
def should_poll(self):
"""Poll if no report server is enabled."""
if not self._rs_enabled:
return True
return False
def handle_status_event(self, event):
"""Handle the Egardia system status event."""
statuscode = event.get("status")
if statuscode is not None:
status = self.lookupstatusfromcode(statuscode)
self.parsestatus(status)
self.schedule_update_ha_state()
def lookupstatusfromcode(self, statuscode):
"""Look at the rs_codes and returns the status from the code."""
status = next(
(
status_group.upper()
for status_group, codes in self._rs_codes.items()
for code in codes
if statuscode == code
),
"UNKNOWN",
)
return status
def parsestatus(self, status):
"""Parse the status."""
_LOGGER.debug("Parsing status %s", status)
# Ignore the statuscode if it is IGNORE
if status.lower().strip() != REPORT_SERVER_CODES_IGNORE:
_LOGGER.debug("Not ignoring status %s", status)
newstatus = STATES.get(status.upper())
_LOGGER.debug("newstatus %s", newstatus)
self._status = newstatus
else:
_LOGGER.error("Ignoring status")
def update(self):
"""Update the alarm status."""
status = self._egardiasystem.getstate()
self.parsestatus(status)
def alarm_disarm(self, code=None):
"""Send disarm command."""
try:
self._egardiasystem.alarm_disarm()
except requests.exceptions.RequestException as err:
_LOGGER.error(
"Egardia device exception occurred when sending disarm command: %s",
err,
)
def alarm_arm_home(self, code=None):
"""Send arm home command."""
try:
self._egardiasystem.alarm_arm_home()
except requests.exceptions.RequestException as err:
_LOGGER.error(
"Egardia device exception occurred when "
"sending arm home command: %s",
err,
)
def alarm_arm_away(self, code=None):
"""Send arm away command."""
try:
self._egardiasystem.alarm_arm_away()
except requests.exceptions.RequestException as err:
_LOGGER.error(
"Egardia device exception occurred when "
"sending arm away command: %s",
err,
)
|
import socket
import unittest
from unittest import mock
import homeassistant.components.graphite as graphite
from homeassistant.const import (
EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STOP,
EVENT_STATE_CHANGED,
STATE_OFF,
STATE_ON,
)
import homeassistant.core as ha
from homeassistant.setup import setup_component
from tests.async_mock import patch
from tests.common import get_test_home_assistant
class TestGraphite(unittest.TestCase):
"""Test the Graphite component."""
def setup_method(self, method):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.gf = graphite.GraphiteFeeder(self.hass, "foo", 123, "ha")
def teardown_method(self, method):
"""Stop everything that was started."""
self.hass.stop()
@patch("socket.socket")
def test_setup(self, mock_socket):
"""Test setup."""
assert setup_component(self.hass, graphite.DOMAIN, {"graphite": {}})
assert mock_socket.call_count == 1
assert mock_socket.call_args == mock.call(socket.AF_INET, socket.SOCK_STREAM)
@patch("socket.socket")
@patch("homeassistant.components.graphite.GraphiteFeeder")
def test_full_config(self, mock_gf, mock_socket):
"""Test setup with full configuration."""
config = {"graphite": {"host": "foo", "port": 123, "prefix": "me"}}
assert setup_component(self.hass, graphite.DOMAIN, config)
assert mock_gf.call_count == 1
assert mock_gf.call_args == mock.call(self.hass, "foo", 123, "me")
assert mock_socket.call_count == 1
assert mock_socket.call_args == mock.call(socket.AF_INET, socket.SOCK_STREAM)
@patch("socket.socket")
@patch("homeassistant.components.graphite.GraphiteFeeder")
def test_config_port(self, mock_gf, mock_socket):
"""Test setup with invalid port."""
config = {"graphite": {"host": "foo", "port": 2003}}
assert setup_component(self.hass, graphite.DOMAIN, config)
assert mock_gf.called
assert mock_socket.call_count == 1
assert mock_socket.call_args == mock.call(socket.AF_INET, socket.SOCK_STREAM)
def test_subscribe(self):
"""Test the subscription."""
fake_hass = mock.MagicMock()
gf = graphite.GraphiteFeeder(fake_hass, "foo", 123, "ha")
fake_hass.bus.listen_once.has_calls(
[
mock.call(EVENT_HOMEASSISTANT_START, gf.start_listen),
mock.call(EVENT_HOMEASSISTANT_STOP, gf.shutdown),
]
)
assert fake_hass.bus.listen.call_count == 1
assert fake_hass.bus.listen.call_args == mock.call(
EVENT_STATE_CHANGED, gf.event_listener
)
def test_start(self):
"""Test the start."""
with mock.patch.object(self.gf, "start") as mock_start:
self.gf.start_listen("event")
assert mock_start.call_count == 1
assert mock_start.call_args == mock.call()
def test_shutdown(self):
"""Test the shutdown."""
with mock.patch.object(self.gf, "_queue") as mock_queue:
self.gf.shutdown("event")
assert mock_queue.put.call_count == 1
assert mock_queue.put.call_args == mock.call(self.gf._quit_object)
def test_event_listener(self):
"""Test the event listener."""
with mock.patch.object(self.gf, "_queue") as mock_queue:
self.gf.event_listener("foo")
assert mock_queue.put.call_count == 1
assert mock_queue.put.call_args == mock.call("foo")
@patch("time.time")
def test_report_attributes(self, mock_time):
"""Test the reporting with attributes."""
mock_time.return_value = 12345
attrs = {"foo": 1, "bar": 2.0, "baz": True, "bat": "NaN"}
expected = [
"ha.entity.state 0.000000 12345",
"ha.entity.foo 1.000000 12345",
"ha.entity.bar 2.000000 12345",
"ha.entity.baz 1.000000 12345",
]
state = mock.MagicMock(state=0, attributes=attrs)
with mock.patch.object(self.gf, "_send_to_graphite") as mock_send:
self.gf._report_attributes("entity", state)
actual = mock_send.call_args_list[0][0][0].split("\n")
assert sorted(expected) == sorted(actual)
@patch("time.time")
def test_report_with_string_state(self, mock_time):
"""Test the reporting with strings."""
mock_time.return_value = 12345
expected = ["ha.entity.foo 1.000000 12345", "ha.entity.state 1.000000 12345"]
state = mock.MagicMock(state="above_horizon", attributes={"foo": 1.0})
with mock.patch.object(self.gf, "_send_to_graphite") as mock_send:
self.gf._report_attributes("entity", state)
actual = mock_send.call_args_list[0][0][0].split("\n")
assert sorted(expected) == sorted(actual)
@patch("time.time")
def test_report_with_binary_state(self, mock_time):
"""Test the reporting with binary state."""
mock_time.return_value = 12345
state = ha.State("domain.entity", STATE_ON, {"foo": 1.0})
with mock.patch.object(self.gf, "_send_to_graphite") as mock_send:
self.gf._report_attributes("entity", state)
expected = [
"ha.entity.foo 1.000000 12345",
"ha.entity.state 1.000000 12345",
]
actual = mock_send.call_args_list[0][0][0].split("\n")
assert sorted(expected) == sorted(actual)
state.state = STATE_OFF
with mock.patch.object(self.gf, "_send_to_graphite") as mock_send:
self.gf._report_attributes("entity", state)
expected = [
"ha.entity.foo 1.000000 12345",
"ha.entity.state 0.000000 12345",
]
actual = mock_send.call_args_list[0][0][0].split("\n")
assert sorted(expected) == sorted(actual)
@patch("time.time")
def test_send_to_graphite_errors(self, mock_time):
"""Test the sending with errors."""
mock_time.return_value = 12345
state = ha.State("domain.entity", STATE_ON, {"foo": 1.0})
with mock.patch.object(self.gf, "_send_to_graphite") as mock_send:
mock_send.side_effect = socket.error
self.gf._report_attributes("entity", state)
mock_send.side_effect = socket.gaierror
self.gf._report_attributes("entity", state)
@patch("socket.socket")
def test_send_to_graphite(self, mock_socket):
"""Test the sending of data."""
self.gf._send_to_graphite("foo")
assert mock_socket.call_count == 1
assert mock_socket.call_args == mock.call(socket.AF_INET, socket.SOCK_STREAM)
sock = mock_socket.return_value
assert sock.connect.call_count == 1
assert sock.connect.call_args == mock.call(("foo", 123))
assert sock.sendall.call_count == 1
assert sock.sendall.call_args == mock.call(b"foo")
assert sock.send.call_count == 1
assert sock.send.call_args == mock.call(b"\n")
assert sock.close.call_count == 1
assert sock.close.call_args == mock.call()
def test_run_stops(self):
"""Test the stops."""
with mock.patch.object(self.gf, "_queue") as mock_queue:
mock_queue.get.return_value = self.gf._quit_object
assert self.gf.run() is None
assert mock_queue.get.call_count == 1
assert mock_queue.get.call_args == mock.call()
assert mock_queue.task_done.call_count == 1
assert mock_queue.task_done.call_args == mock.call()
def test_run(self):
"""Test the running."""
runs = []
event = mock.MagicMock(
event_type=EVENT_STATE_CHANGED,
data={"entity_id": "entity", "new_state": mock.MagicMock()},
)
def fake_get():
if len(runs) >= 2:
return self.gf._quit_object
if runs:
runs.append(1)
return mock.MagicMock(
event_type="somethingelse", data={"new_event": None}
)
runs.append(1)
return event
with mock.patch.object(self.gf, "_queue") as mock_queue:
with mock.patch.object(self.gf, "_report_attributes") as mock_r:
mock_queue.get.side_effect = fake_get
self.gf.run()
# Twice for two events, once for the stop
assert mock_queue.task_done.call_count == 3
assert mock_r.call_count == 1
assert mock_r.call_args == mock.call("entity", event.data["new_state"])
|
import warnings
from typing import Optional, Sequence, Tuple, Any, Union, Type, Callable, List
from typing import Text
import numpy as np
from tensornetwork.backends import abstract_backend
#pylint: disable=line-too-long
from tensornetwork.network_components import AbstractNode, Node, outer_product_final_nodes
from tensornetwork import backend_contextmanager
from tensornetwork import backends
from tensornetwork import network_components
Tensor = Any
BaseBackend = abstract_backend.AbstractBackend
# INITIALIZATION
def initialize_node(fname: Text,
*fargs: Any,
name: Optional[Text] = None,
axis_names: Optional[List[Text]] = None,
backend: Optional[Union[Text, BaseBackend]] = None,
**fkwargs: Any) -> Tensor:
"""Return a Node wrapping data obtained by an initialization function
implemented in a backend. The Node will have the same shape as the
underlying array that function generates, with all Edges dangling.
This function is not intended to be called directly, but doing so should
be safe enough.
Args:
fname: Name of the method of backend to call (a string).
*fargs: Positional arguments to the initialization method.
name: Optional name of the Node.
axis_names: Optional names of the Node's dangling edges.
backend: The backend or its name.
**fkwargs: Keyword arguments to the initialization method.
Returns:
node: A Node wrapping data generated by
(the_backend).fname(*fargs, **fkwargs), with one dangling edge per
axis of data.
"""
if backend is None:
backend_obj = backend_contextmanager.get_default_backend()
else:
backend_obj = backends.backend_factory.get_backend(backend)
func = getattr(backend_obj, fname)
data = func(*fargs, **fkwargs)
node = Node(data, name=name, axis_names=axis_names, backend=backend)
return node
def eye(N: int,
dtype: Optional[Type[np.number]] = None,
M: Optional[int] = None,
name: Optional[Text] = None,
axis_names: Optional[List[Text]] = None,
backend: Optional[Union[Text, BaseBackend]] = None) -> Tensor:
"""Return a Node representing a 2D array with ones on the diagonal and
zeros elsewhere. The Node has two dangling Edges.
Args:
N (int): The first dimension of the returned matrix.
dtype, optional: dtype of array (default np.float64).
M (int, optional): The second dimension of the returned matrix.
name (text, optional): Name of the Node.
axis_names (optional): List of names of the edges.
backend (optional): The backend or its name.
Returns:
I : Node of shape (N, M)
Represents an array of all zeros except for the k'th diagonal of all
ones.
"""
the_node = initialize_node(
"eye",
N,
name=name,
axis_names=axis_names,
backend=backend,
dtype=dtype,
M=M)
return the_node
def zeros(shape: Sequence[int],
dtype: Optional[Type[np.number]] = None,
name: Optional[Text] = None,
axis_names: Optional[List[Text]] = None,
backend: Optional[Union[Text, BaseBackend]] = None) -> Tensor:
"""Return a Node of shape `shape` of all zeros.
The Node has one dangling Edge per dimension.
Args:
shape : Shape of the array.
dtype, optional: dtype of array (default np.float64).
name (text, optional): Name of the Node.
axis_names (optional): List of names of the edges.
backend (optional): The backend or its name.
Returns:
the_node : Node of shape `shape`. Represents an array of all zeros.
"""
the_node = initialize_node(
"zeros",
shape,
name=name,
axis_names=axis_names,
backend=backend,
dtype=dtype)
return the_node
def ones(shape: Sequence[int],
dtype: Optional[Type[np.number]] = None,
name: Optional[Text] = None,
axis_names: Optional[List[Text]] = None,
backend: Optional[Union[Text, BaseBackend]] = None) -> Tensor:
"""Return a Node of shape `shape` of all ones.
The Node has one dangling Edge per dimension.
Args:
shape : Shape of the array.
dtype, optional: dtype of array (default np.float64).
name (text, optional): Name of the Node.
axis_names (optional): List of names of the edges.
backend (optional): The backend or its name.
Returns:
the_node : Node of shape `shape`
Represents an array of all ones.
"""
the_node = initialize_node(
"ones",
shape,
name=name,
axis_names=axis_names,
backend=backend,
dtype=dtype)
return the_node
def randn(shape: Sequence[int],
dtype: Optional[Type[np.number]] = None,
seed: Optional[int] = None,
name: Optional[Text] = None,
axis_names: Optional[List[Text]] = None,
backend: Optional[Union[Text, BaseBackend]] = None) -> Tensor:
"""Return a Node of shape `shape` of Gaussian random floats.
The Node has one dangling Edge per dimension.
Args:
shape : Shape of the array.
dtype, optional: dtype of array (default np.float64).
seed, optional: Seed for the RNG.
name (text, optional): Name of the Node.
axis_names (optional): List of names of the edges.
backend (optional): The backend or its name.
Returns:
the_node : Node of shape `shape` filled with Gaussian random data.
"""
the_node = initialize_node(
"randn",
shape,
name=name,
axis_names=axis_names,
backend=backend,
seed=seed,
dtype=dtype)
return the_node
def random_uniform(
shape: Sequence[int],
dtype: Optional[Type[np.number]] = None,
seed: Optional[int] = None,
boundaries: Optional[Tuple[float, float]] = (0.0, 1.0),
name: Optional[Text] = None,
axis_names: Optional[List[Text]] = None,
backend: Optional[Union[Text, BaseBackend]] = None) -> Tensor:
"""Return a Node of shape `shape` of uniform random floats.
The Node has one dangling Edge per dimension.
Args:
shape : Shape of the array.
dtype, optional: dtype of array (default np.float64).
seed, optional: Seed for the RNG.
boundaries : Values lie in [boundaries[0], boundaries[1]).
name (text, optional): Name of the Node.
axis_names (optional): List of names of the edges.
backend (optional): The backend or its name.
Returns:
the_node : Node of shape `shape` filled with uniform random data.
"""
the_node = initialize_node(
"random_uniform",
shape,
name=name,
axis_names=axis_names,
backend=backend,
seed=seed,
boundaries=boundaries,
dtype=dtype)
return the_node
def norm(node: AbstractNode) -> Tensor:
"""The L2 norm of `node`
Args:
node: A `AbstractNode`.
Returns:
The L2 norm.
Raises:
AttributeError: If `node` has no `backend` attribute.
"""
if not hasattr(node, 'backend'):
raise AttributeError('Node {} of type {} has no `backend`'.format(
node, type(node)))
return node.backend.norm(node.tensor)
def conj(node: AbstractNode,
name: Optional[Text] = None,
axis_names: Optional[List[Text]] = None) -> AbstractNode:
"""Conjugate a `node`.
Args:
node: A `AbstractNode`.
name: Optional name to give the new node.
axis_names: Optional list of names for the axis.
Returns:
A new node. The complex conjugate of `node`.
Raises:
AttributeError: If `node` has no `backend` attribute.
"""
if not hasattr(node, 'backend'):
raise AttributeError('Node {} of type {} has no `backend`'.format(
node, type(node)))
backend = node.backend
if not axis_names:
axis_names = node.axis_names
return Node(
backend.conj(node.tensor),
name=name,
axis_names=axis_names,
backend=backend)
def transpose(node: AbstractNode,
permutation: Sequence[Union[Text, int]],
name: Optional[Text] = None,
axis_names: Optional[List[Text]] = None) -> AbstractNode:
"""Transpose `node`
Args:
node: A `AbstractNode`.
permutation: A list of int or str. The permutation of the axis.
name: Optional name to give the new node.
axis_names: Optional list of names for the axis.
Returns:
A new node. The transpose of `node`.
Raises:
AttributeError: If `node` has no `backend` attribute, or if
`node` has no tensor.
ValueError: If either `permutation` is not the same as expected or
if you try to permute with a trace edge.
"""
if not hasattr(node, 'backend'):
raise AttributeError('Node {} of type {} has no `backend`'.format(
node, type(node)))
perm = [node.get_axis_number(p) for p in permutation]
if not axis_names:
axis_names = node.axis_names
new_node = Node(
node.tensor, name=name, axis_names=node.axis_names, backend=node.backend)
return new_node.reorder_axes(perm)
def kron(nodes: Sequence[AbstractNode]) -> AbstractNode:
"""Kronecker product of the given nodes.
Kronecker products of nodes is the same as the outer product, but the order
of the axes is different. The first half of edges of all of the nodes will
appear first half of edges in the resulting node, and the second half ot the
edges in each node will be in the second half of the resulting node.
For example, if I had two nodes :math:`X_{ab}`, :math:`Y_{cdef}`, and
:math:`Z_{gh}`, then the resulting node would have the edges ordered
:math:`R_{acdgbefh}`.
The kronecker product is designed such that the kron of many operators is
itself an operator.
Args:
nodes: A sequence of `AbstractNode` objects.
Returns:
A `Node` that is the kronecker product of the given inputs. The first
half of the edges of this node would represent the "input" edges of the
operator and the last half of edges are the "output" edges of the
operator.
"""
input_edges = []
output_edges = []
for node in nodes:
order = len(node.shape)
if order % 2 != 0:
raise ValueError(f"All operator tensors must have an even order. "
f"Found tensor with order {order}")
input_edges += node.edges[:order // 2]
output_edges += node.edges[order // 2:]
result = outer_product_final_nodes(nodes, input_edges + output_edges)
return result
|
import json
import os
import threading
import unittest
from http.server import BaseHTTPRequestHandler, HTTPServer
from test.support import EnvironmentVarGuard
from urllib.parse import urlparse
from kaggle_session import UserSessionClient
from kaggle_web_client import (_KAGGLE_URL_BASE_ENV_VAR_NAME,
_KAGGLE_USER_SECRETS_TOKEN_ENV_VAR_NAME,
CredentialError, BackendError)
class UserSessionHTTPHandler(BaseHTTPRequestHandler):
def set_request(self):
raise NotImplementedError()
def get_response(self):
raise NotImplementedError()
def do_HEAD(s):
s.send_response(200)
def do_POST(s):
s.set_request()
s.send_response(200)
s.send_header("Content-type", "application/json")
s.end_headers()
s.wfile.write(json.dumps(s.get_response()).encode("utf-8"))
class TestUserSessionClient(unittest.TestCase):
SERVER_ADDRESS = urlparse(os.getenv(_KAGGLE_URL_BASE_ENV_VAR_NAME, default="http://127.0.0.1:8001"))
TEST_JWT = 'test-secrets-key'
def _test_client(self, client_func, expected_path, expected_body, source=None, success=True):
_request = {}
class GetKernelRunSourceForCaipHandler(UserSessionHTTPHandler):
def set_request(self):
_request['path'] = self.path
content_len = int(self.headers.get('Content-Length'))
_request['body'] = json.loads(self.rfile.read(content_len))
_request['headers'] = self.headers
def get_response(self):
if success:
return {'result': {'source': source}, 'wasSuccessful': 'true'}
return {'wasSuccessful': 'false'}
env = EnvironmentVarGuard()
env.set(_KAGGLE_USER_SECRETS_TOKEN_ENV_VAR_NAME, self.TEST_JWT)
with env:
with HTTPServer((self.SERVER_ADDRESS.hostname, self.SERVER_ADDRESS.port), GetKernelRunSourceForCaipHandler) as httpd:
threading.Thread(target=httpd.serve_forever).start()
try:
client_func()
finally:
httpd.shutdown()
path, headers, body = _request['path'], _request['headers'], _request['body']
self.assertEqual(
path,
expected_path,
msg="Fake server did not receive the right request from UserSessionClient.")
self.assertEqual(
body,
expected_body,
msg="Fake server did not receive the right body from UserSessionClient.")
def test_no_token_fails(self):
env = EnvironmentVarGuard()
env.unset(_KAGGLE_USER_SECRETS_TOKEN_ENV_VAR_NAME)
with env:
with self.assertRaises(CredentialError):
client = UserSessionClient()
def test_get_exportable_ipynb_succeeds(self):
source = "import foo"
def call_get_ipynb():
client = UserSessionClient()
response = client.get_exportable_ipynb()
self.assertEqual(source, response['source'])
self._test_client(
call_get_ipynb,
'/requests/GetKernelRunSourceForCaipRequest',
{'UseDraft': True},
source=source,
success=True)
def test_get_exportable_ipynb_fails(self):
def call_get_ipynb():
client = UserSessionClient()
with self.assertRaises(BackendError):
client.get_exportable_ipynb()
self._test_client(
call_get_ipynb,
'/requests/GetKernelRunSourceForCaipRequest',
{'UseDraft': True},
success=False)
|
import os
import unittest
import mock
from perfkitbenchmarker.linux_benchmarks import tensorflow_serving_benchmark
SAMPLE_CLIENT_OUTPUT = '../data/tensorflow_serving_client_workload_stdout.txt'
class TensorflowServingBenchmarkTestCase(unittest.TestCase):
def setUp(self):
flag_values = {
'tf_serving_client_thread_count': 12,
'tf_serving_runtime': 60,
}
p = mock.patch(tensorflow_serving_benchmark.__name__ + '.FLAGS')
flags_mock = p.start()
flags_mock.configure_mock(**flag_values)
self.addCleanup(p.stop)
path = os.path.join(os.path.dirname(__file__), SAMPLE_CLIENT_OUTPUT)
with open(path) as fp:
self.test_output = fp.read()
def testParseStdoutFromClientScript(self):
benchmark_spec = mock.MagicMock()
num_client_threads = 12
samples = tensorflow_serving_benchmark._MakeSamplesFromClientOutput(
benchmark_spec, self.test_output, num_client_threads)
expected_metadata = {
'client_thread_count': num_client_threads,
'scheduled_runtime': 60,
}
self.assertEqual(len(samples), 5)
self.assertEqual(samples[0].metric, 'Completed requests')
self.assertEqual(samples[0].unit, 'requests')
self.assertEqual(samples[0].value, 1)
self.assertDictEqual(samples[0].metadata, expected_metadata)
self.assertEqual(samples[1].metric, 'Failed requests')
self.assertEqual(samples[1].unit, 'requests')
self.assertEqual(samples[1].value, 2)
self.assertDictEqual(samples[1].metadata, expected_metadata)
self.assertEqual(samples[2].metric, 'Throughput')
self.assertEqual(samples[2].unit, 'images_per_second')
self.assertEqual(samples[2].value, 5.2)
self.assertDictEqual(samples[2].metadata, expected_metadata)
self.assertEqual(samples[3].metric, 'Runtime')
self.assertEqual(samples[3].unit, 'seconds')
self.assertEqual(samples[3].value, 3.3)
self.assertDictEqual(samples[3].metadata, expected_metadata)
expected_metadata.update({'latency_array': [1.1, 2.2, 3.3]})
self.assertEqual(samples[4].metric, 'Latency')
self.assertEqual(samples[4].unit, 'seconds')
self.assertEqual(samples[4].value, -1)
self.assertDictEqual(samples[4].metadata, expected_metadata)
if __name__ == '__main__':
unittest.main()
|
import json
from homeassistant.components.alexa import DOMAIN, smart_home_http
from homeassistant.const import CONTENT_TYPE_JSON, HTTP_NOT_FOUND
from homeassistant.setup import async_setup_component
from . import get_new_request
async def do_http_discovery(config, hass, hass_client):
"""Submit a request to the Smart Home HTTP API."""
await async_setup_component(hass, DOMAIN, config)
http_client = await hass_client()
request = get_new_request("Alexa.Discovery", "Discover")
response = await http_client.post(
smart_home_http.SMART_HOME_HTTP_ENDPOINT,
data=json.dumps(request),
headers={"content-type": CONTENT_TYPE_JSON},
)
return response
async def test_http_api(hass, hass_client):
"""With `smart_home:` HTTP API is exposed."""
config = {"alexa": {"smart_home": None}}
response = await do_http_discovery(config, hass, hass_client)
response_data = await response.json()
# Here we're testing just the HTTP view glue -- details of discovery are
# covered in other tests.
assert response_data["event"]["header"]["name"] == "Discover.Response"
async def test_http_api_disabled(hass, hass_client):
"""Without `smart_home:`, the HTTP API is disabled."""
config = {"alexa": {}}
response = await do_http_discovery(config, hass, hass_client)
assert response.status == HTTP_NOT_FOUND
|
import imp
import logging
import time
from contextlib import contextmanager
from random import randint
import requests
from queue import Full
from ...common.interfaces import AbstractPlugin
logger = logging.getLogger(__name__)
requests_logger = logging.getLogger('requests')
requests_logger.setLevel(logging.WARNING)
requests.packages.urllib3.disable_warnings()
class GunConfigError(Exception):
pass
class AbstractGun(AbstractPlugin):
def __init__(self, core, cfg):
super(AbstractGun, self).__init__(core, cfg, 'bfg_gun')
self.results = None
@contextmanager
def measure(self, marker):
start_time = time.time()
data_item = {
"send_ts": start_time,
"tag": marker,
"interval_real": None,
"connect_time": 0,
"send_time": 0,
"latency": 0,
"receive_time": 0,
"interval_event": 0,
"size_out": 0,
"size_in": 0,
"net_code": 0,
"proto_code": 200,
}
try:
yield data_item
except Exception as e:
logger.warning("%s failed while measuring with %s", marker, e)
if data_item["proto_code"] == 200:
data_item["proto_code"] = 500
if data_item["net_code"] == 0:
data_item["net_code"] == 1
raise
finally:
if data_item.get("interval_real") is None:
data_item["interval_real"] = int(
(time.time() - start_time) * 1e6)
try:
self.results.put(data_item, block=False)
except Full:
logger.error("Results full. Data corrupted")
def setup(self):
pass
def shoot(self, missile, marker):
raise NotImplementedError(
"Gun should implement 'shoot(self, missile, marker)' method")
def teardown(self):
pass
def get_option(self, key, default_value=None):
try:
return super(AbstractGun, self).get_option(key, default_value)
except KeyError:
if default_value is not None:
return default_value
else:
raise GunConfigError('Missing key: %s' % key)
class LogGun(AbstractGun):
SECTION = 'log_gun'
def __init__(self, core, cfg):
super(LogGun, self).__init__(core, cfg)
param = self.get_option("param")
logger.info('Initialized log gun for BFG with param = %s' % param)
def shoot(self, missile, marker):
logger.info("Missile: %s\n%s", marker, missile)
rt = randint(2, 30000) * 1000
with self.measure(marker) as di:
di["interval_real"] = rt
class HttpGun(AbstractGun):
SECTION = 'http_gun'
def __init__(self, core, cfg):
super(HttpGun, self).__init__(core, cfg)
self.base_address = cfg["base_address"]
def shoot(self, missile, marker):
logger.debug("Missile: %s\n%s", marker, missile)
logger.debug("Sending request: %s", self.base_address + missile)
with self.measure(marker) as di:
try:
r = requests.get(self.base_address + missile, verify=False)
di["proto_code"] = r.status_code
except requests.ConnectionError:
logger.debug("Connection error", exc_info=True)
di["net_code"] = 1
di["proto_code"] = 500
class SqlGun(AbstractGun):
SECTION = 'sql_gun'
def __init__(self, core):
super(SqlGun, self).__init__(core)
from sqlalchemy import create_engine
from sqlalchemy import exc
self.exc = exc
self.engine = create_engine(self.get_option("db"))
def shoot(self, missile, marker):
logger.debug("Missile: %s\n%s", marker, missile)
with self.measure(marker) as di:
errno = 0
proto_code = 200
try:
cursor = self.engine.execute(missile.replace('%', '%%'))
cursor.fetchall()
cursor.close()
except self.exc.TimeoutError as e:
logger.debug("Timeout: %s", e)
errno = 110
except self.exc.ResourceClosedError as e:
logger.debug(e)
except self.exc.SQLAlchemyError as e:
proto_code = 500
logger.debug(e.orig.args)
except self.exc.SAWarning as e:
proto_code = 400
logger.debug(e)
except Exception as e:
proto_code = 500
logger.debug(e)
di["proto_code"] = proto_code
di["net_code"] = errno
class CustomGun(AbstractGun):
"""
This gun is deprecated! Use UltimateGun
"""
SECTION = 'custom_gun'
def __init__(self, core, cfg):
super(CustomGun, self).__init__(core, cfg)
logger.warning("Custom gun is deprecated. Use Ultimate gun instead")
module_path = cfg["module_path"].split()
module_name = cfg["module_name"]
fp, pathname, description = imp.find_module(module_name, module_path)
try:
self.module = imp.load_module(
module_name, fp, pathname, description)
finally:
if fp:
fp.close()
def shoot(self, missile, marker):
try:
self.module.shoot(missile, marker, self.measure)
except Exception as e:
logger.warning("CustomGun %s failed with %s", marker, e)
def setup(self):
if hasattr(self.module, 'init'):
self.module.init(self)
class ScenarioGun(AbstractGun):
"""
This gun is deprecated! Use UltimateGun
"""
SECTION = 'scenario_gun'
def __init__(self, core, cfg):
super(ScenarioGun, self).__init__(core, cfg)
logger.warning("Scenario gun is deprecated. Use Ultimate gun instead")
module_path = cfg["module_path"]
if module_path:
module_path = module_path.split()
else:
module_path = None
module_name = cfg["module_name"]
fp, pathname, description = imp.find_module(module_name, module_path)
try:
self.module = imp.load_module(
module_name, fp, pathname, description)
finally:
if fp:
fp.close()
self.scenarios = self.module.SCENARIOS
def shoot(self, missile, marker):
marker = marker.rsplit("#", 1)[0] # support enum_ammo
if not marker:
marker = "default"
scenario = self.scenarios.get(marker, None)
if scenario:
try:
scenario(missile, marker, self.measure)
except Exception as e:
logger.warning("Scenario %s failed with %s", marker, e)
else:
logger.warning("Scenario not found: %s", marker)
def setup(self):
if hasattr(self.module, 'init'):
self.module.init(self)
class UltimateGun(AbstractGun):
SECTION = "ultimate_gun"
def __init__(self, core, cfg):
super(UltimateGun, self).__init__(core, cfg)
class_name = self.get_option("class_name")
module_path = self.get_option("module_path")
if module_path:
module_path = module_path.split()
else:
module_path = None
module_name = self.get_option("module_name")
self.init_param = self.get_option("init_param")
fp, pathname, description = imp.find_module(module_name, module_path)
#
# Dirty Hack
#
# we will add current unix timestamp to the name of a module each time
# it is imported to be sure Python won't be able to cache it
#
try:
self.module = imp.load_module(
"%s_%d" % (module_name, time.time()), fp, pathname, description)
finally:
if fp:
fp.close()
test_class = getattr(self.module, class_name, None)
if not isinstance(test_class, type):
raise NotImplementedError(
"Class definition for '%s' was not found in '%s' module" %
(class_name, module_name))
self.load_test = test_class(self)
def setup(self):
if callable(getattr(self.load_test, "setup", None)):
self.load_test.setup(self.init_param)
def teardown(self):
if callable(getattr(self.load_test, "teardown", None)):
self.load_test.teardown()
def shoot(self, missile, marker):
marker = marker.rsplit("#", 1)[0] # support enum_ammo
if not marker:
marker = "default"
scenario = getattr(self.load_test, marker, None)
if callable(scenario):
try:
scenario(missile)
except Exception as e:
logger.warning(
"Scenario %s failed with %s",
marker, e, exc_info=True)
else:
logger.warning("Scenario not found: %s", marker)
|
from datetime import timedelta
from homeassistant.components.switch import (
DOMAIN as SENSOR_DOMAIN,
ENTITY_ID_FORMAT,
SwitchEntity,
)
from homeassistant.const import CONF_PLATFORM
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from . import TuyaDevice
from .const import DOMAIN, TUYA_DATA, TUYA_DISCOVERY_NEW
SCAN_INTERVAL = timedelta(seconds=15)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up tuya sensors dynamically through tuya discovery."""
platform = config_entry.data[CONF_PLATFORM]
async def async_discover_sensor(dev_ids):
"""Discover and add a discovered tuya sensor."""
if not dev_ids:
return
entities = await hass.async_add_executor_job(
_setup_entities,
hass,
dev_ids,
platform,
)
async_add_entities(entities)
async_dispatcher_connect(
hass, TUYA_DISCOVERY_NEW.format(SENSOR_DOMAIN), async_discover_sensor
)
devices_ids = hass.data[DOMAIN]["pending"].pop(SENSOR_DOMAIN)
await async_discover_sensor(devices_ids)
def _setup_entities(hass, dev_ids, platform):
"""Set up Tuya Switch device."""
tuya = hass.data[DOMAIN][TUYA_DATA]
entities = []
for dev_id in dev_ids:
device = tuya.get_device_by_id(dev_id)
if device is None:
continue
entities.append(TuyaSwitch(device, platform))
return entities
class TuyaSwitch(TuyaDevice, SwitchEntity):
"""Tuya Switch Device."""
def __init__(self, tuya, platform):
"""Init Tuya switch device."""
super().__init__(tuya, platform)
self.entity_id = ENTITY_ID_FORMAT.format(tuya.object_id())
@property
def is_on(self):
"""Return true if switch is on."""
return self._tuya.state()
def turn_on(self, **kwargs):
"""Turn the switch on."""
self._tuya.turn_on()
def turn_off(self, **kwargs):
"""Turn the device off."""
self._tuya.turn_off()
|
import voluptuous as vol
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_MOVING,
DEVICE_CLASSES_SCHEMA,
PLATFORM_SCHEMA,
BinarySensorEntity,
)
from homeassistant.const import CONF_DEVICE_CLASS, CONF_NAME
import homeassistant.helpers.config_validation as cv
from . import CONF_ADS_VAR, DATA_ADS, STATE_KEY_STATE, AdsEntity
DEFAULT_NAME = "ADS binary sensor"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_ADS_VAR): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Binary Sensor platform for ADS."""
ads_hub = hass.data.get(DATA_ADS)
ads_var = config[CONF_ADS_VAR]
name = config[CONF_NAME]
device_class = config.get(CONF_DEVICE_CLASS)
ads_sensor = AdsBinarySensor(ads_hub, name, ads_var, device_class)
add_entities([ads_sensor])
class AdsBinarySensor(AdsEntity, BinarySensorEntity):
"""Representation of ADS binary sensors."""
def __init__(self, ads_hub, name, ads_var, device_class):
"""Initialize ADS binary sensor."""
super().__init__(ads_hub, name, ads_var)
self._device_class = device_class or DEVICE_CLASS_MOVING
async def async_added_to_hass(self):
"""Register device notification."""
await self.async_initialize_device(self._ads_var, self._ads_hub.PLCTYPE_BOOL)
@property
def is_on(self):
"""Return True if the entity is on."""
return self._state_dict[STATE_KEY_STATE]
@property
def device_class(self):
"""Return the device class."""
return self._device_class
|
import keras
import keras.backend as K
import tensorflow as tf
import matchzoo as mz
from matchzoo.engine.base_model import BaseModel
from matchzoo.engine.param import Param
from matchzoo.engine.param_table import ParamTable
class ESIM(BaseModel):
"""
ESIM model.
Examples:
>>> model = ESIM()
>>> task = classification_task = mz.tasks.Classification(num_classes=2)
>>> model.params['task'] = task
>>> model.params['input_shapes'] = [(20, ), (40, )]
>>> model.params['lstm_dim'] = 300
>>> model.params['mlp_num_units'] = 300
>>> model.params['embedding_input_dim'] = 5000
>>> model.params['embedding_output_dim'] = 10
>>> model.params['embedding_trainable'] = False
>>> model.params['mlp_num_layers'] = 0
>>> model.params['mlp_num_fan_out'] = 300
>>> model.params['mlp_activation_func'] = 'tanh'
>>> model.params['mask_value'] = 0
>>> model.params['dropout_rate'] = 0.5
>>> model.params['optimizer'] = keras.optimizers.Adam(lr=4e-4)
>>> model.guess_and_fill_missing_params()
>>> model.build()
"""
@classmethod
def get_default_params(cls) -> ParamTable:
"""Get default parameters."""
params = super().get_default_params(with_embedding=True,
with_multi_layer_perceptron=True)
params.add(Param(
name='dropout_rate',
value=0.5,
desc="The dropout rate for all fully-connected layer"
))
params.add(Param(
name='lstm_dim',
value=8,
desc="The dimension of LSTM layer."
))
params.add(Param(
name='mask_value',
value=0,
desc="The value would be regarded as pad"
))
return params
def _expand_dim(self, inp: tf.Tensor, axis: int) -> keras.layers.Layer:
"""
Wrap keras.backend.expand_dims into a Lambda layer.
:param inp: input tensor to expand the dimension
:param axis: the axis of new dimension
"""
return keras.layers.Lambda(lambda x: tf.expand_dims(x, axis=axis))(inp)
def _make_atten_mask_layer(self) -> keras.layers.Layer:
"""
Make mask layer for attention weight matrix so that
each word won't pay attention to <PAD> timestep.
"""
return keras.layers.Lambda(
lambda weight_mask: weight_mask[0] + (1.0 - weight_mask[1]) * -1e7,
name="atten_mask")
def _make_bilstm_layer(self, lstm_dim: int) -> keras.layers.Layer:
"""
Bidirectional LSTM layer in ESIM.
:param lstm_dim: int, dimension of LSTM layer
:return: `keras.layers.Layer`.
"""
return keras.layers.Bidirectional(
layer=keras.layers.LSTM(lstm_dim, return_sequences=True),
merge_mode='concat')
def _max(self, texts: tf.Tensor, mask: tf.Tensor) -> tf.Tensor:
"""
Compute the max of each text according to their real length
:param texts: np.array with shape [B, T, H]
:param lengths: np.array with shape [B, T, ],
where 1 means valid, 0 means pad
"""
mask = self._expand_dim(mask, axis=2)
new_texts = keras.layers.Multiply()([texts, mask])
text_max = keras.layers.Lambda(
lambda x: tf.reduce_max(x, axis=1),
)(new_texts)
return text_max
def _avg(self, texts: tf.Tensor, mask: tf.Tensor) -> tf.Tensor:
"""
Compute the mean of each text according to their real length
:param texts: np.array with shape [B, T, H]
:param lengths: np.array with shape [B, T, ],
where 1 means valid, 0 means pad
"""
mask = self._expand_dim(mask, axis=2)
new_texts = keras.layers.Multiply()([texts, mask])
# timestep-wise division, exclude the PAD number when calc avg
text_avg = keras.layers.Lambda(
lambda text_mask:
tf.reduce_sum(text_mask[0], axis=1) / tf.reduce_sum(text_mask[1], axis=1),
)([new_texts, mask])
return text_avg
def build(self):
"""Build model."""
# parameters
lstm_dim = self._params['lstm_dim']
dropout_rate = self._params['dropout_rate']
# layers
create_mask = keras.layers.Lambda(
lambda x:
tf.cast(tf.not_equal(x, self._params['mask_value']), K.floatx())
)
embedding = self._make_embedding_layer()
lstm_compare = self._make_bilstm_layer(lstm_dim)
lstm_compose = self._make_bilstm_layer(lstm_dim)
dense_compare = keras.layers.Dense(units=lstm_dim,
activation='relu',
use_bias=True)
dropout = keras.layers.Dropout(dropout_rate)
# model
a, b = self._make_inputs() # [B, T_a], [B, T_b]
a_mask = create_mask(a) # [B, T_a]
b_mask = create_mask(b) # [B, T_b]
# encoding
a_emb = dropout(embedding(a)) # [B, T_a, E_dim]
b_emb = dropout(embedding(b)) # [B, T_b, E_dim]
a_ = lstm_compare(a_emb) # [B, T_a, H*2]
b_ = lstm_compare(b_emb) # [B, T_b, H*2]
# mask a_ and b_, since the <pad> position is no more zero
a_ = keras.layers.Multiply()([a_, self._expand_dim(a_mask, axis=2)])
b_ = keras.layers.Multiply()([b_, self._expand_dim(b_mask, axis=2)])
# local inference
e = keras.layers.Dot(axes=-1)([a_, b_]) # [B, T_a, T_b]
_ab_mask = keras.layers.Multiply()( # _ab_mask: [B, T_a, T_b]
[self._expand_dim(a_mask, axis=2), # [B, T_a, 1]
self._expand_dim(b_mask, axis=1)]) # [B, 1, T_b]
pm = keras.layers.Permute((2, 1))
mask_layer = self._make_atten_mask_layer()
softmax_layer = keras.layers.Softmax(axis=-1)
e_a = softmax_layer(mask_layer([e, _ab_mask])) # [B, T_a, T_b]
e_b = softmax_layer(mask_layer([pm(e), pm(_ab_mask)])) # [B, T_b, T_a]
# alignment (a_t = a~, b_t = b~ )
a_t = keras.layers.Dot(axes=(2, 1))([e_a, b_]) # [B, T_a, H*2]
b_t = keras.layers.Dot(axes=(2, 1))([e_b, a_]) # [B, T_b, H*2]
# local inference info enhancement
m_a = keras.layers.Concatenate(axis=-1)([
a_,
a_t,
keras.layers.Subtract()([a_, a_t]),
keras.layers.Multiply()([a_, a_t])]) # [B, T_a, H*2*4]
m_b = keras.layers.Concatenate(axis=-1)([
b_,
b_t,
keras.layers.Subtract()([b_, b_t]),
keras.layers.Multiply()([b_, b_t])]) # [B, T_b, H*2*4]
# project m_a and m_b from 4*H*2 dim to H dim
m_a = dropout(dense_compare(m_a)) # [B, T_a, H]
m_b = dropout(dense_compare(m_b)) # [B, T_a, H]
# inference composition
v_a = lstm_compose(m_a) # [B, T_a, H*2]
v_b = lstm_compose(m_b) # [B, T_b, H*2]
# pooling
v_a = keras.layers.Concatenate(axis=-1)(
[self._avg(v_a, a_mask), self._max(v_a, a_mask)]) # [B, H*4]
v_b = keras.layers.Concatenate(axis=-1)(
[self._avg(v_b, b_mask), self._max(v_b, b_mask)]) # [B, H*4]
v = keras.layers.Concatenate(axis=-1)([v_a, v_b]) # [B, H*8]
# mlp (multilayer perceptron) classifier
output = self._make_multi_layer_perceptron_layer()(v) # [B, H]
output = dropout(output)
output = self._make_output_layer()(output) # [B, #classes]
self._backend = keras.Model(inputs=[a, b], outputs=output)
|
from homeassistant.components.switch import DOMAIN
from homeassistant.const import (
ATTR_ENTITY_ID,
ENTITY_MATCH_ALL,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
)
from homeassistant.loader import bind_hass
@bind_hass
def turn_on(hass, entity_id=ENTITY_MATCH_ALL):
"""Turn all or specified switch on."""
hass.add_job(async_turn_on, hass, entity_id)
async def async_turn_on(hass, entity_id=ENTITY_MATCH_ALL):
"""Turn all or specified switch on."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else None
await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, data, blocking=True)
@bind_hass
def turn_off(hass, entity_id=ENTITY_MATCH_ALL):
"""Turn all or specified switch off."""
hass.add_job(async_turn_off, hass, entity_id)
async def async_turn_off(hass, entity_id=ENTITY_MATCH_ALL):
"""Turn all or specified switch off."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else None
await hass.services.async_call(DOMAIN, SERVICE_TURN_OFF, data, blocking=True)
|
from abc import abstractmethod
from datetime import timedelta
import logging
from homeassistant.const import ATTR_NAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from .const import (
ATTR_ADDRESS,
ATTR_CHANNEL,
ATTR_INTERFACE,
ATTR_PARAM,
ATTR_UNIQUE_ID,
DATA_HOMEMATIC,
DOMAIN,
HM_ATTRIBUTE_SUPPORT,
)
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL_HUB = timedelta(seconds=300)
SCAN_INTERVAL_VARIABLES = timedelta(seconds=30)
class HMDevice(Entity):
"""The HomeMatic device base object."""
def __init__(self, config):
"""Initialize a generic HomeMatic device."""
self._name = config.get(ATTR_NAME)
self._address = config.get(ATTR_ADDRESS)
self._interface = config.get(ATTR_INTERFACE)
self._channel = config.get(ATTR_CHANNEL)
self._state = config.get(ATTR_PARAM)
self._unique_id = config.get(ATTR_UNIQUE_ID)
self._data = {}
self._homematic = None
self._hmdevice = None
self._connected = False
self._available = False
self._channel_map = set()
# Set parameter to uppercase
if self._state:
self._state = self._state.upper()
async def async_added_to_hass(self):
"""Load data init callbacks."""
self._subscribe_homematic_events()
@property
def unique_id(self):
"""Return unique ID. HomeMatic entity IDs are unique by default."""
return self._unique_id.replace(" ", "_")
@property
def should_poll(self):
"""Return false. HomeMatic states are pushed by the XML-RPC Server."""
return False
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def available(self):
"""Return true if device is available."""
return self._available
@property
def device_state_attributes(self):
"""Return device specific state attributes."""
# Static attributes
attr = {
"id": self._hmdevice.ADDRESS,
"interface": self._interface,
}
# Generate a dictionary with attributes
for node, data in HM_ATTRIBUTE_SUPPORT.items():
# Is an attribute and exists for this object
if node in self._data:
value = data[1].get(self._data[node], self._data[node])
attr[data[0]] = value
return attr
def update(self):
"""Connect to HomeMatic init values."""
if self._connected:
return True
# Initialize
self._homematic = self.hass.data[DATA_HOMEMATIC]
self._hmdevice = self._homematic.devices[self._interface][self._address]
self._connected = True
try:
# Initialize datapoints of this object
self._init_data()
self._load_data_from_hm()
# Link events from pyhomematic
self._available = not self._hmdevice.UNREACH
except Exception as err: # pylint: disable=broad-except
self._connected = False
_LOGGER.error("Exception while linking %s: %s", self._address, str(err))
def _hm_event_callback(self, device, caller, attribute, value):
"""Handle all pyhomematic device events."""
has_changed = False
# Is data needed for this instance?
if f"{attribute}:{device.partition(':')[2]}" in self._channel_map:
self._data[attribute] = value
has_changed = True
# Availability has changed
if self.available != (not self._hmdevice.UNREACH):
self._available = not self._hmdevice.UNREACH
has_changed = True
# If it has changed data point, update Home Assistant
if has_changed:
self.schedule_update_ha_state()
def _subscribe_homematic_events(self):
"""Subscribe all required events to handle job."""
for metadata in (
self._hmdevice.SENSORNODE,
self._hmdevice.BINARYNODE,
self._hmdevice.ATTRIBUTENODE,
self._hmdevice.WRITENODE,
self._hmdevice.EVENTNODE,
self._hmdevice.ACTIONNODE,
):
for node, channels in metadata.items():
# Data is needed for this instance
if node in self._data:
# chan is current channel
if len(channels) == 1:
channel = channels[0]
else:
channel = self._channel
# Remember the channel for this attribute to ignore invalid events later
self._channel_map.add(f"{node}:{channel!s}")
# Set callbacks
self._hmdevice.setEventCallback(callback=self._hm_event_callback, bequeath=True)
def _load_data_from_hm(self):
"""Load first value from pyhomematic."""
if not self._connected:
return False
# Read data from pyhomematic
for metadata, funct in (
(self._hmdevice.ATTRIBUTENODE, self._hmdevice.getAttributeData),
(self._hmdevice.WRITENODE, self._hmdevice.getWriteData),
(self._hmdevice.SENSORNODE, self._hmdevice.getSensorData),
(self._hmdevice.BINARYNODE, self._hmdevice.getBinaryData),
):
for node in metadata:
if metadata[node] and node in self._data:
self._data[node] = funct(name=node, channel=self._channel)
return True
def _hm_set_state(self, value):
"""Set data to main datapoint."""
if self._state in self._data:
self._data[self._state] = value
def _hm_get_state(self):
"""Get data from main datapoint."""
if self._state in self._data:
return self._data[self._state]
return None
def _init_data(self):
"""Generate a data dict (self._data) from the HomeMatic metadata."""
# Add all attributes to data dictionary
for data_note in self._hmdevice.ATTRIBUTENODE:
self._data.update({data_note: None})
# Initialize device specific data
self._init_data_struct()
@abstractmethod
def _init_data_struct(self):
"""Generate a data dictionary from the HomeMatic device metadata."""
class HMHub(Entity):
"""The HomeMatic hub. (CCU2/HomeGear)."""
def __init__(self, hass, homematic, name):
"""Initialize HomeMatic hub."""
self.hass = hass
self.entity_id = f"{DOMAIN}.{name.lower()}"
self._homematic = homematic
self._variables = {}
self._name = name
self._state = None
# Load data
self.hass.helpers.event.track_time_interval(self._update_hub, SCAN_INTERVAL_HUB)
self.hass.add_job(self._update_hub, None)
self.hass.helpers.event.track_time_interval(
self._update_variables, SCAN_INTERVAL_VARIABLES
)
self.hass.add_job(self._update_variables, None)
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def should_poll(self):
"""Return false. HomeMatic Hub object updates variables."""
return False
@property
def state(self):
"""Return the state of the entity."""
return self._state
@property
def state_attributes(self):
"""Return the state attributes."""
return self._variables.copy()
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return "mdi:gradient"
def _update_hub(self, now):
"""Retrieve latest state."""
service_message = self._homematic.getServiceMessages(self._name)
state = None if service_message is None else len(service_message)
# state have change?
if self._state != state:
self._state = state
self.schedule_update_ha_state()
def _update_variables(self, now):
"""Retrieve all variable data and update hmvariable states."""
variables = self._homematic.getAllSystemVariables(self._name)
if variables is None:
return
state_change = False
for key, value in variables.items():
if key in self._variables and value == self._variables[key]:
continue
state_change = True
self._variables.update({key: value})
if state_change:
self.schedule_update_ha_state()
def hm_set_variable(self, name, value):
"""Set variable value on CCU/Homegear."""
if name not in self._variables:
_LOGGER.error("Variable %s not found on %s", name, self.name)
return
old_value = self._variables.get(name)
if isinstance(old_value, bool):
value = cv.boolean(value)
else:
value = float(value)
self._homematic.setSystemVariable(self.name, name, value)
self._variables.update({name: value})
self.schedule_update_ha_state()
|
from .base_classes import LatexObject, Environment, Command, Options, Container
from .package import Package
import re
import math
class TikZOptions(Options):
"""Options class, do not escape."""
escape = False
def append_positional(self, option):
"""Add a new positional option."""
self._positional_args.append(option)
class TikZ(Environment):
"""Basic TikZ container class."""
_latex_name = 'tikzpicture'
packages = [Package('tikz')]
class Axis(Environment):
"""PGFPlots axis container class, this contains plots."""
packages = [Package('pgfplots'), Command('pgfplotsset', 'compat=newest')]
def __init__(self, options=None, *, data=None):
"""
Args
----
options: str, list or `~.Options`
Options to format the axis environment.
"""
super().__init__(options=options, data=data)
class TikZScope(Environment):
"""TikZ Scope Environment."""
_latex_name = 'scope'
class TikZCoordinate(LatexObject):
"""A General Purpose Coordinate Class."""
_coordinate_str_regex = re.compile(r'(\+\+)?\(\s*(-?[0-9]+(\.[0-9]+)?)\s*'
r',\s*(-?[0-9]+(\.[0-9]+)?)\s*\)')
def __init__(self, x, y, relative=False):
"""
Args
----
x: float or int
X coordinate
y: float or int
Y coordinate
relative: bool
Coordinate is relative or absolute
"""
self._x = float(x)
self._y = float(y)
self.relative = relative
def __repr__(self):
if self.relative:
ret_str = '++'
else:
ret_str = ''
return ret_str + '({},{})'.format(self._x, self._y)
def dumps(self):
"""Return representation."""
return self.__repr__()
@classmethod
def from_str(cls, coordinate):
"""Build a TikZCoordinate object from a string."""
m = cls._coordinate_str_regex.match(coordinate)
if m is None:
raise ValueError('invalid coordinate string')
if m.group(1) == '++':
relative = True
else:
relative = False
return TikZCoordinate(
float(m.group(2)), float(m.group(4)), relative=relative)
def __eq__(self, other):
if isinstance(other, tuple):
# if comparing to a tuple, assume it to be an absolute coordinate.
other_relative = False
other_x = float(other[0])
other_y = float(other[1])
elif isinstance(other, TikZCoordinate):
other_relative = other.relative
other_x = other._x
other_y = other._y
else:
raise TypeError('can only compare tuple and TiKZCoordinate types')
# prevent comparison between relative and non relative
# by returning False
if (other_relative != self.relative):
return False
# return comparison result
return (other_x == self._x and other_y == self._y)
def _arith_check(self, other):
if isinstance(other, tuple):
other_coord = TikZCoordinate(*other)
elif isinstance(other, TikZCoordinate):
if other.relative is True or self.relative is True:
raise ValueError('refusing to add relative coordinates')
other_coord = other
else:
raise TypeError('can only add tuple or TiKZCoordinate types')
return other_coord
def __add__(self, other):
other_coord = self._arith_check(other)
return TikZCoordinate(self._x + other_coord._x,
self._y + other_coord._y)
def __radd__(self, other):
self.__add__(other)
def __sub__(self, other):
other_coord = self._arith_check(other)
return TikZCoordinate(self._x - other_coord._y,
self._y - other_coord._y)
def distance_to(self, other):
"""Euclidean distance between two coordinates."""
other_coord = self._arith_check(other)
return math.sqrt(math.pow(self._x - other_coord._x, 2) +
math.pow(self._y - other_coord._y, 2))
class TikZObject(Container):
"""Abstract Class that most TikZ Objects inherits from."""
def __init__(self, options=None):
"""
Args
----
options: list
Options pertaining to the object
"""
super(TikZObject, self).__init__()
self.options = options
class TikZNodeAnchor(LatexObject):
"""Representation of a node's anchor point."""
def __init__(self, node_handle, anchor_name):
"""
Args
----
node_handle: str
Node's identifier
anchor_name: str
Name of the anchor
"""
self.handle = node_handle
self.anchor = anchor_name
def __repr__(self):
return '({}.{})'.format(self.handle, self.anchor)
def dumps(self):
"""Return a representation. Alias for consistency."""
return self.__repr__()
class TikZNode(TikZObject):
"""A class that represents a TiKZ node."""
_possible_anchors = ['north', 'south', 'east', 'west']
def __init__(self, handle=None, options=None, at=None, text=None):
"""
Args
----
handle: str
Node identifier
options: list
List of options
at: TikZCoordinate
Coordinate where node is placed
text: str
Body text of the node
"""
super(TikZNode, self).__init__(options=options)
self.handle = handle
if isinstance(at, (TikZCoordinate, type(None))):
self._node_position = at
else:
raise TypeError(
'at parameter must be an object of the'
'TikzCoordinate class')
self._node_text = text
def dumps(self):
"""Return string representation of the node."""
ret_str = []
ret_str.append(Command('node', options=self.options).dumps())
if self.handle is not None:
ret_str.append('({})'.format(self.handle))
if self._node_position is not None:
ret_str.append('at {}'.format(str(self._node_position)))
if self._node_text is not None:
ret_str.append('{{{text}}};'.format(text=self._node_text))
else:
ret_str.append('{};')
return ' '.join(ret_str)
def get_anchor_point(self, anchor_name):
"""Return an anchor point of the node, if it exists."""
if anchor_name in self._possible_anchors:
return TikZNodeAnchor(self.handle, anchor_name)
else:
try:
anchor = int(anchor_name.split('_')[1])
except:
anchor = None
if anchor is not None:
return TikZNodeAnchor(self.handle, str(anchor))
raise ValueError('Invalid anchor name: "{}"'.format(anchor_name))
def __getattr__(self, attr_name):
try:
point = self.get_anchor_point(attr_name)
return point
except ValueError:
pass
# raise AttributeError(
# 'Invalid attribute requested: "{}"'.format(attr_name))
class TikZUserPath(LatexObject):
"""Represents a possible TikZ path."""
def __init__(self, path_type, options=None):
"""
Args
----
path_type: str
Type of path used
options: Options
List of options to add
"""
super(TikZUserPath, self).__init__()
self.path_type = path_type
self.options = options
def dumps(self):
"""Return path command representation."""
ret_str = self.path_type
if self.options is not None:
ret_str += self.options.dumps()
return ret_str
class TikZPathList(LatexObject):
"""Represents a path drawing."""
_legal_path_types = ['--', '-|', '|-', 'to',
'rectangle', 'circle',
'arc', 'edge']
def __init__(self, *args):
"""
Args
----
args: list
A list of path elements
"""
self._last_item_type = None
self._arg_list = []
# parse list and verify legality
self._parse_arg_list(args)
def append(self, item):
"""Add a new element to the current path."""
self._parse_next_item(item)
def _parse_next_item(self, item):
# assume first item is a point
if self._last_item_type is None:
try:
self._add_point(item)
except (TypeError, ValueError):
# not a point, do something
raise TypeError(
'First element of path list must be a node identifier'
' or coordinate'
)
elif self._last_item_type == 'point':
# point after point is permitted, doesnt draw
try:
self._add_point(item)
return
except (ValueError, TypeError):
# not a point, try path
pass
# will raise typeerror if wrong
self._add_path(item)
elif self._last_item_type == 'path':
# only point allowed after path
original_exception = None
try:
self._add_point(item)
return
except (TypeError, ValueError) as ex:
# check if trying to insert path after path
try:
self._add_path(item, parse_only=True)
not_a_path = False
original_exception = ex
except (TypeError, ValueError) as ex:
# not a path either!
not_a_path = True
original_exception = ex
# disentangle exceptions
if not_a_path is False:
raise ValueError('only a point descriptor can come'
' after a path descriptor')
if original_exception is not None:
raise original_exception
def _parse_arg_list(self, args):
for item in args:
self._parse_next_item(item)
def _add_path(self, path, parse_only=False):
if isinstance(path, str):
if path in self._legal_path_types:
_path = TikZUserPath(path)
else:
raise ValueError('Illegal user path type: "{}"'.format(path))
elif isinstance(path, TikZUserPath):
_path = path
else:
raise TypeError('Only string or TikZUserPath types are allowed')
# add
if parse_only is False:
self._arg_list.append(_path)
self._last_item_type = 'path'
else:
return _path
def _add_point(self, point, parse_only=False):
if isinstance(point, str):
try:
_item = TikZCoordinate.from_str(point)
except ValueError:
raise ValueError('Illegal point string: "{}"'.format(point))
elif isinstance(point, TikZCoordinate):
_item = point
elif isinstance(point, tuple):
_item = TikZCoordinate(*point)
elif isinstance(point, TikZNode):
_item = '({})'.format(point.handle)
elif isinstance(point, TikZNodeAnchor):
_item = point.dumps()
else:
raise TypeError('Only str, tuple, TikZCoordinate,'
'TikZNode or TikZNodeAnchor types are allowed,'
' got: {}'.format(type(point)))
# add, finally
if parse_only is False:
self._arg_list.append(_item)
self._last_item_type = 'point'
else:
return _item
def dumps(self):
"""Return representation of the path command."""
ret_str = []
for item in self._arg_list:
if isinstance(item, TikZUserPath):
ret_str.append(item.dumps())
elif isinstance(item, TikZCoordinate):
ret_str.append(item.dumps())
elif isinstance(item, str):
ret_str.append(item)
return ' '.join(ret_str)
class TikZPath(TikZObject):
r"""The TikZ \path command."""
def __init__(self, path=None, options=None):
"""
Args
----
path: TikZPathList
A list of the nodes, path types in the path
options: TikZOptions
A list of options for the command
"""
super(TikZPath, self).__init__(options=options)
if isinstance(path, TikZPathList):
self.path = path
elif isinstance(path, list):
self.path = TikZPathList(*path)
elif path is None:
self.path = TikZPathList()
else:
raise TypeError(
'argument "path" can only be of types list or TikZPathList')
def append(self, element):
"""Append a path element to the current list."""
self.path.append(element)
def dumps(self):
"""Return a representation for the command."""
ret_str = [Command('path', options=self.options).dumps()]
ret_str.append(self.path.dumps())
return ' '.join(ret_str) + ';'
class TikZDraw(TikZPath):
"""A draw command is just a path command with the draw option."""
def __init__(self, path=None, options=None):
"""
Args
----
path: TikZPathList
A list of the nodes, path types in the path
options: TikZOptions
A list of options for the command
"""
super(TikZDraw, self).__init__(path=path, options=options)
# append option
if self.options is not None:
self.options.append_positional('draw')
else:
self.options = TikZOptions('draw')
class Plot(LatexObject):
"""A class representing a PGFPlot."""
packages = [Package('pgfplots'), Command('pgfplotsset', 'compat=newest')]
def __init__(self,
name=None,
func=None,
coordinates=None,
error_bar=None,
options=None):
"""
Args
----
name: str
Name of the plot.
func: str
A function that should be plotted.
coordinates: list
A list of exact coordinates tat should be plotted.
options: str, list or `~.Options`
"""
self.name = name
self.func = func
self.coordinates = coordinates
self.error_bar = error_bar
self.options = options
super().__init__()
def dumps(self):
"""Represent the plot as a string in LaTeX syntax.
Returns
-------
str
"""
string = Command('addplot', options=self.options).dumps()
if self.coordinates is not None:
string += ' coordinates {%\n'
if self.error_bar is None:
for x, y in self.coordinates:
# ie: "(x,y)"
string += '(' + str(x) + ',' + str(y) + ')%\n'
else:
for (x, y), (e_x, e_y) in zip(self.coordinates,
self.error_bar):
# ie: "(x,y) +- (e_x,e_y)"
string += '(' + str(x) + ',' + str(y) + \
') +- (' + str(e_x) + ',' + str(e_y) + ')%\n'
string += '};%\n%\n'
elif self.func is not None:
string += '{' + self.func + '};%\n%\n'
if self.name is not None:
string += Command('addlegendentry', self.name).dumps()
super().dumps()
return string
|
import os
from django.conf import settings
from django.test import SimpleTestCase
from weblate.utils.files import remove_tree
from weblate.utils.unittest import tempdir_setting
class FilesTestCase(SimpleTestCase):
@tempdir_setting("DATA_DIR")
def test_remove(self, callback=None):
target = os.path.join(settings.DATA_DIR, "test")
nested = os.path.join(target, "nested")
filename = os.path.join(target, "file")
os.makedirs(target)
os.makedirs(nested)
with open(filename, "w") as handle:
handle.write("test")
if callback:
callback(target, nested, filename)
remove_tree(target)
self.assertFalse(os.path.exists(target))
def test_remove_readonly(self):
def callback_readonly(target, nested, filename):
os.chmod(target, 0)
self.test_remove(callback_readonly)
def test_remove_nested(self):
def callback_readonly(target, nested, filename):
os.chmod(nested, 0)
self.test_remove(callback_readonly)
|
from homeassistant.components.binary_sensor import BinarySensorEntity
from . import BINARY_SENSORS, DOMAIN
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Nextcloud sensors."""
if discovery_info is None:
return
binary_sensors = []
for name in hass.data[DOMAIN]:
if name in BINARY_SENSORS:
binary_sensors.append(NextcloudBinarySensor(name))
add_entities(binary_sensors, True)
class NextcloudBinarySensor(BinarySensorEntity):
"""Represents a Nextcloud binary sensor."""
def __init__(self, item):
"""Initialize the Nextcloud binary sensor."""
self._name = item
self._is_on = None
@property
def icon(self):
"""Return the icon for this binary sensor."""
return "mdi:cloud"
@property
def name(self):
"""Return the name for this binary sensor."""
return self._name
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return self._is_on == "yes"
@property
def unique_id(self):
"""Return the unique ID for this binary sensor."""
return f"{self.hass.data[DOMAIN]['instance']}#{self._name}"
def update(self):
"""Update the binary sensor."""
self._is_on = self.hass.data[DOMAIN][self._name]
|
import logging
import re
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker import flag_util
from perfkitbenchmarker import sample
from perfkitbenchmarker import vm_util
flag_util.DEFINE_integerlist(
'iperf_sending_thread_count',
flag_util.IntegerList([1]), 'server for sending traffic. Iperf'
'will run once for each value in the list',
module_name=__name__)
flags.DEFINE_integer(
'iperf_runtime_in_seconds',
60,
'Number of seconds to run iperf.',
lower_bound=1)
flags.DEFINE_integer(
'iperf_timeout',
None, 'Number of seconds to wait in '
'addition to iperf runtime before '
'killing iperf client command.',
lower_bound=1)
flags.DEFINE_float(
'iperf_udp_per_stream_bandwidth', None,
'In Mbits. Iperf will attempt to send at this bandwidth for UDP tests. '
'If using multiple streams, each stream will '
'attempt to send at this bandwidth')
flags.DEFINE_float(
'iperf_tcp_per_stream_bandwidth', None,
'In Mbits. Iperf will attempt to send at this bandwidth for TCP tests. '
'If using multiple streams, each stream will '
'attempt to send at this bandwidth')
TCP = 'TCP'
UDP = 'UDP'
IPERF_BENCHMARKS = [TCP, UDP]
flags.DEFINE_list('iperf_benchmarks', [TCP], 'Run TCP, UDP or both')
flags.register_validator(
'iperf_benchmarks',
lambda benchmarks: benchmarks and set(benchmarks).issubset(IPERF_BENCHMARKS)
)
FLAGS = flags.FLAGS
BENCHMARK_NAME = 'iperf'
BENCHMARK_CONFIG = """
iperf:
description: Run iperf
vm_groups:
vm_1:
vm_spec: *default_single_core
vm_2:
vm_spec: *default_single_core
"""
IPERF_PORT = 20000
IPERF_UDP_PORT = 25000
IPERF_RETRIES = 5
def GetConfig(user_config):
return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
def Prepare(benchmark_spec):
"""Install iperf and start the server on all machines.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
vms = benchmark_spec.vms
if len(vms) != 2:
raise ValueError(
f'iperf benchmark requires exactly two machines, found {len(vms)}')
for vm in vms:
vm.Install('iperf')
if vm_util.ShouldRunOnExternalIpAddress():
if TCP in FLAGS.iperf_benchmarks:
vm.AllowPort(IPERF_PORT)
if UDP in FLAGS.iperf_benchmarks:
vm.AllowPort(IPERF_UDP_PORT)
if TCP in FLAGS.iperf_benchmarks:
stdout, _ = vm.RemoteCommand(f'nohup iperf --server --port {IPERF_PORT}'
' &> /dev/null & echo $!')
# TODO(ssabhaya): store this in a better place once we have a better place
vm.iperf_tcp_server_pid = stdout.strip()
if UDP in FLAGS.iperf_benchmarks:
stdout, _ = vm.RemoteCommand(
f'nohup iperf --server --udp --port {IPERF_UDP_PORT}'
' &> /dev/null & echo $!')
# TODO(ssabhaya): store this in a better place once we have a better place
vm.iperf_udp_server_pid = stdout.strip()
@vm_util.Retry(max_retries=IPERF_RETRIES)
def _RunIperf(sending_vm, receiving_vm, receiving_ip_address, thread_count,
ip_type, protocol):
"""Run iperf using sending 'vm' to connect to 'ip_address'.
Args:
sending_vm: The VM sending traffic.
receiving_vm: The VM receiving traffic.
receiving_ip_address: The IP address of the iperf server (ie the receiver).
thread_count: The number of threads the server will use.
ip_type: The IP type of 'ip_address' (e.g. 'internal', 'external')
protocol: The protocol for Iperf to use. Either 'TCP' or 'UDP'
Returns:
A Sample.
"""
metadata = {
# The meta data defining the environment
'receiving_machine_type': receiving_vm.machine_type,
'receiving_zone': receiving_vm.zone,
'sending_machine_type': sending_vm.machine_type,
'sending_thread_count': thread_count,
'sending_zone': sending_vm.zone,
'runtime_in_seconds': FLAGS.iperf_runtime_in_seconds,
'ip_type': ip_type,
}
if protocol == TCP:
iperf_cmd = (
f'iperf --enhancedreports --client {receiving_ip_address} --port '
f'{IPERF_PORT} --format m --time {FLAGS.iperf_runtime_in_seconds} '
f'--parallel {thread_count}')
if FLAGS.iperf_tcp_per_stream_bandwidth:
iperf_cmd += f' --bandwidth {FLAGS.iperf_tcp_per_stream_bandwidth}M'
# the additional time on top of the iperf runtime is to account for the
# time it takes for the iperf process to start and exit
timeout_buffer = FLAGS.iperf_timeout or 30 + thread_count
stdout, _ = sending_vm.RemoteCommand(
iperf_cmd,
should_log=True,
timeout=FLAGS.iperf_runtime_in_seconds + timeout_buffer)
window_size_match = re.search(
r'TCP window size: (?P<size>\d+\.?\d+) (?P<units>\S+)', stdout)
window_size = float(window_size_match.group('size'))
buffer_size = float(
re.search(r'Write buffer size: (?P<buffer_size>\d+\.\d+) \S+',
stdout).group('buffer_size'))
multi_thread = re.search((
r'\[SUM\]\s+\d+\.\d+-\d+\.\d+\s\w+\s+(?P<transfer>\d+)\s\w+\s+(?P<throughput>\d+)'
r'\s\w+/\w+\s+(?P<write>\d+)/(?P<err>\d+)\s+(?P<retry>\d+)\s*'), stdout)
# Iperf output is formatted differently when running with multiple threads
# vs a single thread
if multi_thread:
# Write, error, retry
write = int(multi_thread.group('write'))
err = int(multi_thread.group('err'))
retry = int(multi_thread.group('retry'))
# if single thread
else:
# Write, error, retry
match = re.search(
r'\d+ Mbits/sec\s+(?P<write>\d+)/(?P<err>\d+)\s+(?P<retry>\d+)',
stdout)
write = int(match.group('write'))
err = int(match.group('err'))
retry = int(match.group('retry'))
r = re.compile((
r'\d+ Mbits\/sec\s+ \d+\/\d+\s+\d+\s+(?P<cwnd>-*\d+)(?P<cwnd_unit>\w+)\/(?P<rtt>\d+)'
r'\s+(?P<rtt_unit>\w+)\s+(?P<netpwr>\d+\.\d+)'))
match = [m.groupdict() for m in r.finditer(stdout)]
cwnd = sum(float(i['cwnd']) for i in match) / len(match)
rtt = round(sum(float(i['rtt']) for i in match) / len(match), 2)
netpwr = round(sum(float(i['netpwr']) for i in match) / len(match), 2)
rtt_unit = match[0]['rtt_unit']
thread_values = re.findall(r'\[SUM].*\s+(\d+\.?\d*).Mbits/sec', stdout)
if not thread_values:
# If there is no sum you have try and figure out an estimate
# which happens when threads start at different times. The code
# below will tend to overestimate a bit.
thread_values = re.findall(r'\[.*\d+\].*\s+(\d+\.?\d*).Mbits/sec', stdout)
if len(thread_values) != thread_count:
raise ValueError(f'Only {len(thread_values)} out of {thread_count}'
' iperf threads reported a throughput value.')
total_throughput = sum(float(value) for value in thread_values)
tcp_metadata = {
'buffer_size': buffer_size,
'tcp_window_size': window_size,
'write_packet_count': write,
'err_packet_count': err,
'retry_packet_count': retry,
'congestion_window': cwnd,
'rtt': rtt,
'rtt_unit': rtt_unit,
'netpwr': netpwr
}
metadata.update(tcp_metadata)
return sample.Sample('Throughput', total_throughput, 'Mbits/sec', metadata)
elif protocol == UDP:
iperf_cmd = (
f'iperf --enhancedreports --udp --client {receiving_ip_address} --port'
f' {IPERF_UDP_PORT} --format m --time {FLAGS.iperf_runtime_in_seconds}'
f' --parallel {thread_count}')
if FLAGS.iperf_udp_per_stream_bandwidth:
iperf_cmd += f' --bandwidth {FLAGS.iperf_udp_per_stream_bandwidth}M'
# the additional time on top of the iperf runtime is to account for the
# time it takes for the iperf process to start and exit
timeout_buffer = FLAGS.iperf_timeout or 30 + thread_count
stdout, _ = sending_vm.RemoteCommand(
iperf_cmd,
should_log=True,
timeout=FLAGS.iperf_runtime_in_seconds + timeout_buffer)
match = re.search(
r'UDP buffer size: (?P<buffer_size>\d+\.\d+)\s+(?P<buffer_unit>\w+)',
stdout)
buffer_size = float(match.group('buffer_size'))
datagram_size = int(
re.findall(r'(?P<datagram_size>\d+)\sbyte\sdatagrams', stdout)[0])
ipg_target = float(re.findall(r'IPG\starget:\s(\d+.?\d+)', stdout)[0])
ipg_target_unit = str(
re.findall(r'IPG\starget:\s\d+.?\d+\s(\S+)\s', stdout)[0])
multi_thread = re.search(
(r'\[SUM\]\s\d+\.?\d+-\d+\.?\d+\ssec\s+\d+\.?\d+\s+MBytes\s+\d+\.?\d+'
r'\s+Mbits/sec\s+(?P<write>\d+)/(?P<err>\d+)\s+(?P<pps>\d+)\s+pps'),
stdout)
if multi_thread:
# Write, Err, PPS
write = int(multi_thread.group('write'))
err = int(multi_thread.group('err'))
pps = int(multi_thread.group('pps'))
else:
# Write, Err, PPS
match = re.search(
r'\d+\s+Mbits/sec\s+(?P<write>\d+)/(?P<err>\d+)\s+(?P<pps>\d+)\s+pps',
stdout)
write = int(match.group('write'))
err = int(match.group('err'))
pps = int(match.group('pps'))
# Jitter
jitter_array = re.findall(r'Mbits/sec\s+(?P<jitter>\d+\.?\d+)\s+[a-zA-Z]+',
stdout)
jitter_avg = sum(float(x) for x in jitter_array) / len(jitter_array)
jitter_unit = str(
re.search(r'Mbits/sec\s+\d+\.?\d+\s+(?P<jitter_unit>[a-zA-Z]+)',
stdout).group('jitter_unit'))
# total and lost datagrams
match = re.findall(
r'(?P<lost_datagrams>\d+)/\s*(?P<total_datagrams>\d+)\s+\(', stdout)
lost_datagrams_sum = sum(float(i[0]) for i in match)
total_datagrams_sum = sum(float(i[1]) for i in match)
# out of order datagrams
out_of_order_array = re.findall(
r'(\d+)\s+datagrams\sreceived\sout-of-order', stdout)
out_of_order_sum = sum(int(x) for x in out_of_order_array)
thread_values = re.findall(r'\[SUM].*\s+(\d+\.?\d*).Mbits/sec', stdout)
if not thread_values:
# If there is no sum you have try and figure out an estimate
# which happens when threads start at different times. The code
# below will tend to overestimate a bit.
thread_values = re.findall(
r'\[.*\d+\].*\s+(\d+\.?\d*).Mbits/sec\s+\d+/\d+', stdout)
if len(thread_values) != thread_count:
raise ValueError(
f'Only {len(thread_values)} out of {thread_count} iperf threads reported a'
' throughput value.')
total_throughput = sum(float(value) for value in thread_values)
udp_metadata = {
'buffer_size': buffer_size,
'datagram_size_bytes': datagram_size,
'write_packet_count': write,
'err_packet_count': err,
'pps': pps,
'ipg_target': ipg_target,
'ipg_target_unit': ipg_target_unit,
'jitter': jitter_avg,
'jitter_unit': jitter_unit,
'lost_datagrams': lost_datagrams_sum,
'total_datagrams': total_datagrams_sum,
'out_of_order_datagrams': out_of_order_sum
}
metadata.update(udp_metadata)
return sample.Sample('UDP Throughput', total_throughput, 'Mbits/sec',
metadata)
def Run(benchmark_spec):
"""Run iperf on the target vm.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample objects.
"""
vms = benchmark_spec.vms
results = []
logging.info('Iperf Results:')
for protocol in FLAGS.iperf_benchmarks:
for thread_count in FLAGS.iperf_sending_thread_count:
# Send traffic in both directions
for sending_vm, receiving_vm in vms, reversed(vms):
# Send using external IP addresses
if vm_util.ShouldRunOnExternalIpAddress():
results.append(
_RunIperf(sending_vm,
receiving_vm,
receiving_vm.ip_address,
thread_count,
vm_util.IpAddressMetadata.EXTERNAL,
protocol))
# Send using internal IP addresses
if vm_util.ShouldRunOnInternalIpAddress(sending_vm, receiving_vm):
results.append(
_RunIperf(sending_vm,
receiving_vm,
receiving_vm.internal_ip,
thread_count,
vm_util.IpAddressMetadata.INTERNAL,
protocol))
return results
def Cleanup(benchmark_spec):
"""Cleanup iperf on the target vm (by uninstalling).
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
vms = benchmark_spec.vms
for vm in vms:
if TCP in FLAGS.iperf_benchmarks:
vm.RemoteCommand(
f'kill -9 {vm.iperf_tcp_server_pid}', ignore_failure=True)
if UDP in FLAGS.iperf_benchmarks:
vm.RemoteCommand(
f'kill -9 {vm.iperf_udp_server_pid}', ignore_failure=True)
|
from __future__ import unicode_literals
from django.contrib import admin
from django.contrib.sites.models import Site
from django.db.models import Q
from django.urls import NoReverseMatch
from django.urls import reverse
from django.utils import timezone
from django.utils.html import conditional_escape
from django.utils.html import format_html
from django.utils.html import format_html_join
from django.utils.translation import gettext_lazy as _
from django.utils.translation import ngettext_lazy
from zinnia import settings
from zinnia.admin.filters import AuthorListFilter
from zinnia.admin.filters import CategoryListFilter
from zinnia.admin.forms import EntryAdminForm
from zinnia.comparison import EntryPublishedVectorBuilder
from zinnia.managers import HIDDEN
from zinnia.managers import PUBLISHED
from zinnia.models.author import Author
from zinnia.ping import DirectoryPinger
class EntryAdmin(admin.ModelAdmin):
"""
Admin for Entry model.
"""
form = EntryAdminForm
date_hierarchy = 'publication_date'
fieldsets = (
(_('Content'), {
'fields': (('title', 'status'), 'lead', 'content',)}),
(_('Illustration'), {
'fields': ('image', 'image_caption'),
'classes': ('collapse', 'collapse-closed')}),
(_('Publication'), {
'fields': ('publication_date', 'sites',
('start_publication', 'end_publication')),
'classes': ('collapse', 'collapse-closed')}),
(_('Discussions'), {
'fields': ('comment_enabled', 'pingback_enabled',
'trackback_enabled'),
'classes': ('collapse', 'collapse-closed')}),
(_('Privacy'), {
'fields': ('login_required', 'password'),
'classes': ('collapse', 'collapse-closed')}),
(_('Templates'), {
'fields': ('content_template', 'detail_template'),
'classes': ('collapse', 'collapse-closed')}),
(_('Metadatas'), {
'fields': ('featured', 'excerpt', 'authors', 'related'),
'classes': ('collapse', 'collapse-closed')}),
(None, {'fields': ('categories', 'tags', 'slug')}))
list_filter = (CategoryListFilter, AuthorListFilter,
'publication_date', 'sites', 'status')
list_display = ('get_title', 'get_authors', 'get_categories',
'get_tags', 'get_sites', 'get_is_visible', 'featured',
'get_short_url', 'publication_date')
sortable_by = ('publication_date', 'featured')
radio_fields = {'content_template': admin.VERTICAL,
'detail_template': admin.VERTICAL}
filter_horizontal = ('categories', 'authors', 'related')
prepopulated_fields = {'slug': ('title', )}
search_fields = ('title', 'excerpt', 'content', 'tags')
actions = ['make_mine', 'make_published', 'make_hidden',
'close_comments', 'close_pingbacks', 'close_trackbacks',
'ping_directories', 'put_on_top',
'mark_featured', 'unmark_featured']
actions_on_top = True
actions_on_bottom = True
def __init__(self, model, admin_site):
self.form.admin_site = admin_site
super(EntryAdmin, self).__init__(model, admin_site)
# Custom Display
def get_title(self, entry):
"""
Return the title with word count and number of comments.
"""
title = _('%(title)s (%(word_count)i words)') % \
{'title': entry.title, 'word_count': entry.word_count}
reaction_count = int(entry.comment_count +
entry.pingback_count +
entry.trackback_count)
if reaction_count:
return ngettext_lazy(
'%(title)s (%(reactions)i reaction)',
'%(title)s (%(reactions)i reactions)', reaction_count) % \
{'title': title,
'reactions': reaction_count}
return title
get_title.short_description = _('title')
def get_authors(self, entry):
"""
Return the authors in HTML.
"""
try:
return format_html_join(
', ', '<a href="{}" target="blank">{}</a>',
[(author.get_absolute_url(),
getattr(author, author.USERNAME_FIELD))
for author in entry.authors.all()])
except NoReverseMatch:
return ', '.join(
[conditional_escape(getattr(author, author.USERNAME_FIELD))
for author in entry.authors.all()])
get_authors.short_description = _('author(s)')
def get_categories(self, entry):
"""
Return the categories linked in HTML.
"""
try:
return format_html_join(
', ', '<a href="{}" target="blank">{}</a>',
[(category.get_absolute_url(), category.title)
for category in entry.categories.all()])
except NoReverseMatch:
return ', '.join([conditional_escape(category.title)
for category in entry.categories.all()])
get_categories.short_description = _('category(s)')
def get_tags(self, entry):
"""
Return the tags linked in HTML.
"""
try:
return format_html_join(
', ', '<a href="{}" target="blank">{}</a>',
[(reverse('zinnia:tag_detail', args=[tag]), tag)
for tag in entry.tags_list])
except NoReverseMatch:
return conditional_escape(entry.tags)
get_tags.short_description = _('tag(s)')
def get_sites(self, entry):
"""
Return the sites linked in HTML.
"""
try:
index_url = reverse('zinnia:entry_archive_index')
except NoReverseMatch:
index_url = ''
return format_html_join(
', ', '<a href="{}://{}{}" target="blank">{}</a>',
[(settings.PROTOCOL, site.domain, index_url,
conditional_escape(site.name)) for site in entry.sites.all()])
get_sites.short_description = _('site(s)')
def get_short_url(self, entry):
"""
Return the short url in HTML.
"""
try:
short_url = entry.short_url
except NoReverseMatch:
short_url = entry.get_absolute_url()
return format_html('<a href="{url}" target="blank">{url}</a>',
url=short_url)
get_short_url.short_description = _('short url')
def get_is_visible(self, entry):
"""
Admin wrapper for entry.is_visible.
"""
return entry.is_visible
get_is_visible.boolean = True
get_is_visible.short_description = _('is visible')
# Custom Methods
def get_queryset(self, request):
"""
Make special filtering by user's permissions.
"""
if not request.user.has_perm('zinnia.can_view_all'):
queryset = self.model.objects.filter(authors__pk=request.user.pk)
else:
queryset = super(EntryAdmin, self).get_queryset(request)
return queryset.prefetch_related('categories', 'authors', 'sites')
def get_changeform_initial_data(self, request):
"""
Provide initial datas when creating an entry.
"""
get_data = super(EntryAdmin, self).get_changeform_initial_data(request)
return get_data or {
'sites': [Site.objects.get_current().pk],
'authors': [request.user.pk]
}
def formfield_for_manytomany(self, db_field, request, **kwargs):
"""
Filter the disposable authors.
"""
if db_field.name == 'authors':
kwargs['queryset'] = Author.objects.filter(
Q(is_staff=True) | Q(entries__isnull=False)
).distinct()
return super(EntryAdmin, self).formfield_for_manytomany(
db_field, request, **kwargs)
def get_readonly_fields(self, request, obj=None):
"""
Return readonly fields by user's permissions.
"""
readonly_fields = list(super(EntryAdmin, self).get_readonly_fields(
request, obj))
if not request.user.has_perm('zinnia.can_change_status'):
readonly_fields.append('status')
if not request.user.has_perm('zinnia.can_change_author'):
readonly_fields.append('authors')
return readonly_fields
def get_actions(self, request):
"""
Define actions by user's permissions.
"""
actions = super(EntryAdmin, self).get_actions(request)
if not actions:
return actions
if (not request.user.has_perm('zinnia.can_change_author') or
not request.user.has_perm('zinnia.can_view_all')):
del actions['make_mine']
if not request.user.has_perm('zinnia.can_change_status'):
del actions['make_hidden']
del actions['make_published']
if not settings.PING_DIRECTORIES:
del actions['ping_directories']
return actions
# Custom Actions
def make_mine(self, request, queryset):
"""
Set the entries to the current user.
"""
author = Author.objects.get(pk=request.user.pk)
for entry in queryset:
if author not in entry.authors.all():
entry.authors.add(author)
self.message_user(
request, _('The selected entries now belong to you.'))
make_mine.short_description = _('Set the entries to the user')
def make_published(self, request, queryset):
"""
Set entries selected as published.
"""
queryset.update(status=PUBLISHED)
EntryPublishedVectorBuilder().cache_flush()
self.ping_directories(request, queryset, messages=False)
self.message_user(
request, _('The selected entries are now marked as published.'))
make_published.short_description = _('Set entries selected as published')
def make_hidden(self, request, queryset):
"""
Set entries selected as hidden.
"""
queryset.update(status=HIDDEN)
EntryPublishedVectorBuilder().cache_flush()
self.message_user(
request, _('The selected entries are now marked as hidden.'))
make_hidden.short_description = _('Set entries selected as hidden')
def close_comments(self, request, queryset):
"""
Close the comments for selected entries.
"""
queryset.update(comment_enabled=False)
self.message_user(
request, _('Comments are now closed for selected entries.'))
close_comments.short_description = _('Close the comments for '
'selected entries')
def close_pingbacks(self, request, queryset):
"""
Close the pingbacks for selected entries.
"""
queryset.update(pingback_enabled=False)
self.message_user(
request, _('Pingbacks are now closed for selected entries.'))
close_pingbacks.short_description = _(
'Close the pingbacks for selected entries')
def close_trackbacks(self, request, queryset):
"""
Close the trackbacks for selected entries.
"""
queryset.update(trackback_enabled=False)
self.message_user(
request, _('Trackbacks are now closed for selected entries.'))
close_trackbacks.short_description = _(
'Close the trackbacks for selected entries')
def put_on_top(self, request, queryset):
"""
Put the selected entries on top at the current date.
"""
queryset.update(publication_date=timezone.now())
self.ping_directories(request, queryset, messages=False)
self.message_user(request, _(
'The selected entries are now set at the current date.'))
put_on_top.short_description = _(
'Put the selected entries on top at the current date')
def mark_featured(self, request, queryset):
"""
Mark selected as featured post.
"""
queryset.update(featured=True)
self.message_user(
request, _('Selected entries are now marked as featured.'))
mark_featured.short_description = _('Mark selected entries as featured')
def unmark_featured(self, request, queryset):
"""
Un-Mark selected featured posts.
"""
queryset.update(featured=False)
self.message_user(
request, _('Selected entries are no longer marked as featured.'))
unmark_featured.short_description = _(
'Unmark selected entries as featured')
def ping_directories(self, request, queryset, messages=True):
"""
Ping web directories for selected entries.
"""
for directory in settings.PING_DIRECTORIES:
pinger = DirectoryPinger(directory, queryset)
pinger.join()
if messages:
success = 0
for result in pinger.results:
if not result.get('flerror', True):
success += 1
else:
self.message_user(request,
'%s : %s' % (directory,
result['message']))
if success:
self.message_user(
request,
_('%(directory)s directory succesfully '
'pinged %(success)d entries.') %
{'directory': directory, 'success': success})
ping_directories.short_description = _(
'Ping Directories for selected entries')
|
import json
import os
import re
import sys
from typing import Any
from typing import Dict
from typing import List
from typing import Optional
from paasta_tools.secret_providers import SecretProvider
SECRET_REGEX = r"^(SHARED_)?SECRET\([A-Za-z0-9_-]*\)$"
SHARED_SECRET_SERVICE = "_shared"
def is_secret_ref(env_var_val: str) -> bool:
pattern = re.compile(SECRET_REGEX)
try:
match = pattern.match(env_var_val)
except TypeError:
# it can't be a secret ref if it isn't a string
return False
return match is not None
def is_shared_secret(env_var_val: str) -> bool:
return env_var_val.startswith("SHARED_")
def get_hmac_for_secret(
env_var_val: str, service: str, soa_dir: str, secret_environment: str
) -> Optional[str]:
secret_name = get_secret_name_from_ref(env_var_val)
if is_shared_secret(env_var_val):
service = SHARED_SECRET_SERVICE
secret_path = os.path.join(soa_dir, service, "secrets", f"{secret_name}.json")
try:
with open(secret_path, "r") as json_secret_file:
secret_file = json.load(json_secret_file)
try:
return secret_file["environments"][secret_environment]["signature"]
except KeyError:
print(
"Failed to get secret signature at environments:{}:signature in json"
" file".format(secret_environment),
file=sys.stderr,
)
return None
except IOError:
print(f"Failed to open json secret at {secret_path}", file=sys.stderr)
return None
except json.decoder.JSONDecodeError:
print(f"Failed to deserialise json secret at {secret_path}", file=sys.stderr)
return None
def get_secret_name_from_ref(env_var_val: str) -> str:
return env_var_val.split("(")[1][:-1]
def get_secret_provider(
secret_provider_name: str,
soa_dir: str,
service_name: str,
cluster_names: List[str],
secret_provider_kwargs: Dict[str, Any],
) -> SecretProvider:
SecretProvider = __import__(
secret_provider_name, fromlist=["SecretProvider"]
).SecretProvider
return SecretProvider(
soa_dir=soa_dir,
service_name=service_name,
cluster_names=cluster_names,
**secret_provider_kwargs,
)
def get_secret_hashes(
environment_variables: Dict[str, str],
secret_environment: str,
service: str,
soa_dir: str,
) -> Dict[str, str]:
secret_hashes = {}
for env_var_val in environment_variables.values():
if is_secret_ref(env_var_val):
secret_hashes[env_var_val] = get_hmac_for_secret(
env_var_val=env_var_val,
service=service,
soa_dir=soa_dir,
secret_environment=secret_environment,
)
return secret_hashes
def decrypt_secret_environment_for_service(
secret_env_vars: Dict[str, str],
service_name: str,
secret_provider_name: str,
soa_dir: str,
cluster_name: str,
secret_provider_kwargs: Dict[str, Any],
) -> Dict[str, str]:
if not secret_env_vars:
return {}
secret_provider = get_secret_provider(
secret_provider_name=secret_provider_name,
soa_dir=soa_dir,
service_name=service_name,
cluster_names=[cluster_name],
secret_provider_kwargs=secret_provider_kwargs,
)
return secret_provider.decrypt_environment(secret_env_vars)
def decrypt_secret_environment_variables(
secret_provider_name: str,
environment: Dict[str, str],
soa_dir: str,
service_name: str,
cluster_name: str,
secret_provider_kwargs: Dict[str, Any],
) -> Dict[str, str]:
decrypted_secrets = {}
service_secret_env = {}
shared_secret_env = {}
for k, v in environment.items():
if is_secret_ref(v):
if is_shared_secret(v):
shared_secret_env[k] = v
else:
service_secret_env[k] = v
secret_provider_kwargs["vault_num_uses"] = len(service_secret_env) + len(
shared_secret_env
)
decrypted_secrets.update(
decrypt_secret_environment_for_service(
service_secret_env,
service_name,
secret_provider_name,
soa_dir,
cluster_name,
secret_provider_kwargs,
)
)
decrypted_secrets.update(
decrypt_secret_environment_for_service(
shared_secret_env,
SHARED_SECRET_SERVICE,
secret_provider_name,
soa_dir,
cluster_name,
secret_provider_kwargs,
)
)
return decrypted_secrets
|
from __future__ import absolute_import, print_function, division
import os
import sys
import inspect
import pytest
PACKAGE_NAME = __name__.split('.')[0]
# Get project root dir
THIS_DIR = os.path.abspath(os.path.dirname(__file__))
ROOT_DIR = THIS_DIR
for i in range(9):
ROOT_DIR = os.path.dirname(ROOT_DIR)
if os.path.basename(ROOT_DIR) == PACKAGE_NAME:
ROOT_DIR = os.path.dirname(ROOT_DIR)
break
else:
print('testing.py could not find project root dir, '
'using testing.py directory instead.')
ROOT_DIR = THIS_DIR
# Inject some function names so they can be obtained with one import
raises = pytest.raises
skipif = pytest.mark.skipif
skip = pytest.skip
def run_tests_if_main(show_coverage=False):
""" Run tests in a given file if it is run as a script
Coverage is reported for running this single test. Set show_coverage to
launch the report in the web browser.
"""
local_vars = inspect.currentframe().f_back.f_locals
if not local_vars.get('__name__', '') == '__main__':
return
# we are in a "__main__"
os.chdir(ROOT_DIR)
fname = str(local_vars['__file__'])
_clear_our_modules()
_enable_faulthandler()
pytest.main(['-v', '-x', '--color=yes', '--cov', PACKAGE_NAME,
'--cov-config', '.coveragerc', '--cov-report', 'html', fname])
if show_coverage:
import webbrowser
fname = os.path.join(ROOT_DIR, 'htmlcov', 'index.html')
webbrowser.open_new_tab(fname)
def _enable_faulthandler():
""" Enable faulthandler (if we can), so that we get tracebacks
on segfaults.
"""
try:
import faulthandler
faulthandler.enable()
print('Faulthandler enabled')
except Exception:
print('Could not enable faulthandler')
def _clear_our_modules():
""" Remove ourselves from sys.modules to force an import.
"""
for key in list(sys.modules.keys()):
if key.startswith(PACKAGE_NAME) and 'testing' not in key:
del sys.modules[key]
|
from pylatex import Document, Section, Subsection, Command
from pylatex.utils import italic, NoEscape
class MyDocument(Document):
def __init__(self):
super().__init__()
self.preamble.append(Command('title', 'Awesome Title'))
self.preamble.append(Command('author', 'Anonymous author'))
self.preamble.append(Command('date', NoEscape(r'\today')))
self.append(NoEscape(r'\maketitle'))
def fill_document(self):
"""Add a section, a subsection and some text to the document."""
with self.create(Section('A section')):
self.append('Some regular text and some ')
self.append(italic('italic text. '))
with self.create(Subsection('A subsection')):
self.append('Also some crazy characters: $&#{}')
if __name__ == '__main__':
# Document
doc = MyDocument()
# Call function to add text
doc.fill_document()
# Add stuff to the document
with doc.create(Section('A second section')):
doc.append('Some text.')
doc.generate_pdf('basic_inheritance', clean_tex=False)
tex = doc.dumps() # The document as string in LaTeX syntax
|
import asyncio
from datetime import timedelta
import logging
from typing import Any, Dict
from pyipp import IPP, IPPError, Printer as IPPPrinter
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
ATTR_NAME,
CONF_HOST,
CONF_PORT,
CONF_SSL,
CONF_VERIFY_SSL,
)
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
UpdateFailed,
)
from .const import (
ATTR_IDENTIFIERS,
ATTR_MANUFACTURER,
ATTR_MODEL,
ATTR_SOFTWARE_VERSION,
CONF_BASE_PATH,
DOMAIN,
)
PLATFORMS = [SENSOR_DOMAIN]
SCAN_INTERVAL = timedelta(seconds=60)
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass: HomeAssistant, config: Dict) -> bool:
"""Set up the IPP component."""
hass.data.setdefault(DOMAIN, {})
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up IPP from a config entry."""
# Create IPP instance for this entry
coordinator = IPPDataUpdateCoordinator(
hass,
host=entry.data[CONF_HOST],
port=entry.data[CONF_PORT],
base_path=entry.data[CONF_BASE_PATH],
tls=entry.data[CONF_SSL],
verify_ssl=entry.data[CONF_VERIFY_SSL],
)
await coordinator.async_refresh()
if not coordinator.last_update_success:
raise ConfigEntryNotReady
hass.data[DOMAIN][entry.entry_id] = coordinator
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
class IPPDataUpdateCoordinator(DataUpdateCoordinator[IPPPrinter]):
"""Class to manage fetching IPP data from single endpoint."""
def __init__(
self,
hass: HomeAssistant,
*,
host: str,
port: int,
base_path: str,
tls: bool,
verify_ssl: bool,
):
"""Initialize global IPP data updater."""
self.ipp = IPP(
host=host,
port=port,
base_path=base_path,
tls=tls,
verify_ssl=verify_ssl,
session=async_get_clientsession(hass, verify_ssl),
)
super().__init__(
hass,
_LOGGER,
name=DOMAIN,
update_interval=SCAN_INTERVAL,
)
async def _async_update_data(self) -> IPPPrinter:
"""Fetch data from IPP."""
try:
return await self.ipp.printer()
except IPPError as error:
raise UpdateFailed(f"Invalid response from API: {error}") from error
class IPPEntity(CoordinatorEntity):
"""Defines a base IPP entity."""
def __init__(
self,
*,
entry_id: str,
device_id: str,
coordinator: IPPDataUpdateCoordinator,
name: str,
icon: str,
enabled_default: bool = True,
) -> None:
"""Initialize the IPP entity."""
super().__init__(coordinator)
self._device_id = device_id
self._enabled_default = enabled_default
self._entry_id = entry_id
self._icon = icon
self._name = name
@property
def name(self) -> str:
"""Return the name of the entity."""
return self._name
@property
def icon(self) -> str:
"""Return the mdi icon of the entity."""
return self._icon
@property
def entity_registry_enabled_default(self) -> bool:
"""Return if the entity should be enabled when first added to the entity registry."""
return self._enabled_default
@property
def device_info(self) -> Dict[str, Any]:
"""Return device information about this IPP device."""
if self._device_id is None:
return None
return {
ATTR_IDENTIFIERS: {(DOMAIN, self._device_id)},
ATTR_NAME: self.coordinator.data.info.name,
ATTR_MANUFACTURER: self.coordinator.data.info.manufacturer,
ATTR_MODEL: self.coordinator.data.info.model,
ATTR_SOFTWARE_VERSION: self.coordinator.data.info.version,
}
|
import pytest
from .aiohttp import AiohttpClientMocker
async def test_matching_url():
"""Test we can match urls."""
mocker = AiohttpClientMocker()
mocker.get("http://example.com")
await mocker.match_request("get", "http://example.com/")
mocker.clear_requests()
with pytest.raises(AssertionError):
await mocker.match_request("get", "http://example.com/")
mocker.clear_requests()
mocker.get("http://example.com?a=1")
await mocker.match_request("get", "http://example.com/", params={"a": 1, "b": 2})
|
from datetime import datetime
from typing import Hashable
import numpy as np
import pandas as pd
import pytest
from xarray.coding.cftimeindex import CFTimeIndex
from xarray.core import duck_array_ops, utils
from xarray.core.utils import either_dict_or_kwargs
from . import assert_array_equal, raises_regex, requires_cftime, requires_dask
from .test_coding_times import _all_cftime_date_types
class TestAlias:
def test(self):
def new_method():
pass
old_method = utils.alias(new_method, "old_method")
assert "deprecated" in old_method.__doc__
with pytest.warns(Warning, match="deprecated"):
old_method()
def test_safe_cast_to_index():
dates = pd.date_range("2000-01-01", periods=10)
x = np.arange(5)
td = x * np.timedelta64(1, "D")
for expected, array in [
(dates, dates.values),
(pd.Index(x, dtype=object), x.astype(object)),
(pd.Index(td), td),
(pd.Index(td, dtype=object), td.astype(object)),
]:
actual = utils.safe_cast_to_index(array)
assert_array_equal(expected, actual)
assert expected.dtype == actual.dtype
@requires_cftime
def test_safe_cast_to_index_cftimeindex():
date_types = _all_cftime_date_types()
for date_type in date_types.values():
dates = [date_type(1, 1, day) for day in range(1, 20)]
expected = CFTimeIndex(dates)
actual = utils.safe_cast_to_index(np.array(dates))
assert_array_equal(expected, actual)
assert expected.dtype == actual.dtype
assert isinstance(actual, type(expected))
# Test that datetime.datetime objects are never used in a CFTimeIndex
@requires_cftime
def test_safe_cast_to_index_datetime_datetime():
dates = [datetime(1, 1, day) for day in range(1, 20)]
expected = pd.Index(dates)
actual = utils.safe_cast_to_index(np.array(dates))
assert_array_equal(expected, actual)
assert isinstance(actual, pd.Index)
def test_multiindex_from_product_levels():
result = utils.multiindex_from_product_levels(
[pd.Index(["b", "a"]), pd.Index([1, 3, 2])]
)
np.testing.assert_array_equal(
result.codes, [[0, 0, 0, 1, 1, 1], [0, 1, 2, 0, 1, 2]]
)
np.testing.assert_array_equal(result.levels[0], ["b", "a"])
np.testing.assert_array_equal(result.levels[1], [1, 3, 2])
other = pd.MultiIndex.from_product([["b", "a"], [1, 3, 2]])
np.testing.assert_array_equal(result.values, other.values)
def test_multiindex_from_product_levels_non_unique():
result = utils.multiindex_from_product_levels(
[pd.Index(["b", "a"]), pd.Index([1, 1, 2])]
)
np.testing.assert_array_equal(
result.codes, [[0, 0, 0, 1, 1, 1], [0, 0, 1, 0, 0, 1]]
)
np.testing.assert_array_equal(result.levels[0], ["b", "a"])
np.testing.assert_array_equal(result.levels[1], [1, 2])
class TestArrayEquiv:
def test_0d(self):
# verify our work around for pd.isnull not working for 0-dimensional
# object arrays
assert duck_array_ops.array_equiv(0, np.array(0, dtype=object))
assert duck_array_ops.array_equiv(np.nan, np.array(np.nan, dtype=object))
assert not duck_array_ops.array_equiv(0, np.array(1, dtype=object))
class TestDictionaries:
@pytest.fixture(autouse=True)
def setup(self):
self.x = {"a": "A", "b": "B"}
self.y = {"c": "C", "b": "B"}
self.z = {"a": "Z"}
def test_equivalent(self):
assert utils.equivalent(0, 0)
assert utils.equivalent(np.nan, np.nan)
assert utils.equivalent(0, np.array(0.0))
assert utils.equivalent([0], np.array([0]))
assert utils.equivalent(np.array([0]), [0])
assert utils.equivalent(np.arange(3), 1.0 * np.arange(3))
assert not utils.equivalent(0, np.zeros(3))
def test_safe(self):
# should not raise exception:
utils.update_safety_check(self.x, self.y)
def test_unsafe(self):
with pytest.raises(ValueError):
utils.update_safety_check(self.x, self.z)
def test_compat_dict_intersection(self):
assert {"b": "B"} == utils.compat_dict_intersection(self.x, self.y)
assert {} == utils.compat_dict_intersection(self.x, self.z)
def test_compat_dict_union(self):
assert {"a": "A", "b": "B", "c": "C"} == utils.compat_dict_union(self.x, self.y)
with raises_regex(
ValueError,
"unsafe to merge dictionaries without "
"overriding values; conflicting key",
):
utils.compat_dict_union(self.x, self.z)
def test_dict_equiv(self):
x = {}
x["a"] = 3
x["b"] = np.array([1, 2, 3])
y = {}
y["b"] = np.array([1.0, 2.0, 3.0])
y["a"] = 3
assert utils.dict_equiv(x, y) # two nparrays are equal
y["b"] = [1, 2, 3] # np.array not the same as a list
assert utils.dict_equiv(x, y) # nparray == list
x["b"] = [1.0, 2.0, 3.0]
assert utils.dict_equiv(x, y) # list vs. list
x["c"] = None
assert not utils.dict_equiv(x, y) # new key in x
x["c"] = np.nan
y["c"] = np.nan
assert utils.dict_equiv(x, y) # as intended, nan is nan
x["c"] = np.inf
y["c"] = np.inf
assert utils.dict_equiv(x, y) # inf == inf
y = dict(y)
assert utils.dict_equiv(x, y) # different dictionary types are fine
y["b"] = 3 * np.arange(3)
assert not utils.dict_equiv(x, y) # not equal when arrays differ
def test_frozen(self):
x = utils.Frozen(self.x)
with pytest.raises(TypeError):
x["foo"] = "bar"
with pytest.raises(TypeError):
del x["a"]
with pytest.raises(AttributeError):
x.update(self.y)
assert x.mapping == self.x
assert repr(x) in (
"Frozen({'a': 'A', 'b': 'B'})",
"Frozen({'b': 'B', 'a': 'A'})",
)
def test_sorted_keys_dict(self):
x = {"a": 1, "b": 2, "c": 3}
y = utils.SortedKeysDict(x)
assert list(y) == ["a", "b", "c"]
assert repr(utils.SortedKeysDict()) == "SortedKeysDict({})"
def test_repr_object():
obj = utils.ReprObject("foo")
assert repr(obj) == "foo"
assert isinstance(obj, Hashable)
assert not isinstance(obj, str)
def test_repr_object_magic_methods():
o1 = utils.ReprObject("foo")
o2 = utils.ReprObject("foo")
o3 = utils.ReprObject("bar")
o4 = "foo"
assert o1 == o2
assert o1 != o3
assert o1 != o4
assert hash(o1) == hash(o2)
assert hash(o1) != hash(o3)
assert hash(o1) != hash(o4)
def test_is_remote_uri():
assert utils.is_remote_uri("http://example.com")
assert utils.is_remote_uri("https://example.com")
assert not utils.is_remote_uri(" http://example.com")
assert not utils.is_remote_uri("example.nc")
def test_is_grib_path():
assert not utils.is_grib_path("example.nc")
assert not utils.is_grib_path("example.grib ")
assert utils.is_grib_path("example.grib")
assert utils.is_grib_path("example.grib2")
assert utils.is_grib_path("example.grb")
assert utils.is_grib_path("example.grb2")
class Test_is_uniform_and_sorted:
def test_sorted_uniform(self):
assert utils.is_uniform_spaced(np.arange(5))
def test_sorted_not_uniform(self):
assert not utils.is_uniform_spaced([-2, 1, 89])
def test_not_sorted_uniform(self):
assert not utils.is_uniform_spaced([1, -1, 3])
def test_not_sorted_not_uniform(self):
assert not utils.is_uniform_spaced([4, 1, 89])
def test_two_numbers(self):
assert utils.is_uniform_spaced([0, 1.7])
def test_relative_tolerance(self):
assert utils.is_uniform_spaced([0, 0.97, 2], rtol=0.1)
class Test_hashable:
def test_hashable(self):
for v in [False, 1, (2,), (3, 4), "four"]:
assert utils.hashable(v)
for v in [[5, 6], ["seven", "8"], {9: "ten"}]:
assert not utils.hashable(v)
@requires_dask
def test_dask_array_is_scalar():
# regression test for GH1684
import dask.array as da
y = da.arange(8, chunks=4)
assert not utils.is_scalar(y)
def test_hidden_key_dict():
hidden_key = "_hidden_key"
data = {"a": 1, "b": 2, hidden_key: 3}
data_expected = {"a": 1, "b": 2}
hkd = utils.HiddenKeyDict(data, [hidden_key])
assert len(hkd) == 2
assert hidden_key not in hkd
for k, v in data_expected.items():
assert hkd[k] == v
with pytest.raises(KeyError):
hkd[hidden_key]
with pytest.raises(KeyError):
del hkd[hidden_key]
def test_either_dict_or_kwargs():
result = either_dict_or_kwargs(dict(a=1), None, "foo")
expected = dict(a=1)
assert result == expected
result = either_dict_or_kwargs(None, dict(a=1), "foo")
expected = dict(a=1)
assert result == expected
with pytest.raises(ValueError, match=r"foo"):
result = either_dict_or_kwargs(dict(a=1), dict(a=1), "foo")
@pytest.mark.parametrize(
["supplied", "all_", "expected"],
[
(list("abc"), list("abc"), list("abc")),
(["a", ..., "c"], list("abc"), list("abc")),
(["a", ...], list("abc"), list("abc")),
(["c", ...], list("abc"), list("cab")),
([..., "b"], list("abc"), list("acb")),
([...], list("abc"), list("abc")),
],
)
def test_infix_dims(supplied, all_, expected):
result = list(utils.infix_dims(supplied, all_))
assert result == expected
@pytest.mark.parametrize(
["supplied", "all_"], [([..., ...], list("abc")), ([...], list("aac"))]
)
def test_infix_dims_errors(supplied, all_):
with pytest.raises(ValueError):
list(utils.infix_dims(supplied, all_))
|
import asyncio
from os import path
import httpx
import respx
from homeassistant import config as hass_config
import homeassistant.components.binary_sensor as binary_sensor
from homeassistant.const import (
ATTR_ENTITY_ID,
CONTENT_TYPE_JSON,
SERVICE_RELOAD,
STATE_OFF,
STATE_ON,
STATE_UNAVAILABLE,
)
from homeassistant.setup import async_setup_component
from tests.async_mock import Mock, patch
async def test_setup_missing_basic_config(hass):
"""Test setup with configuration missing required entries."""
assert await async_setup_component(
hass, binary_sensor.DOMAIN, {"binary_sensor": {"platform": "rest"}}
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 0
async def test_setup_missing_config(hass):
"""Test setup with configuration missing required entries."""
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
"binary_sensor": {
"platform": "rest",
"resource": "localhost",
"method": "GET",
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 0
@respx.mock
async def test_setup_failed_connect(hass):
"""Test setup when connection error occurs."""
respx.get(
"http://localhost", content=httpx.RequestError(message="any", request=Mock())
)
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
"binary_sensor": {
"platform": "rest",
"resource": "http://localhost",
"method": "GET",
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 0
@respx.mock
async def test_setup_timeout(hass):
"""Test setup when connection timeout occurs."""
respx.get("http://localhost", content=asyncio.TimeoutError())
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
"binary_sensor": {
"platform": "rest",
"resource": "localhost",
"method": "GET",
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 0
@respx.mock
async def test_setup_minimum(hass):
"""Test setup with minimum configuration."""
respx.get("http://localhost", status_code=200)
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
"binary_sensor": {
"platform": "rest",
"resource": "http://localhost",
"method": "GET",
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
@respx.mock
async def test_setup_minimum_resource_template(hass):
"""Test setup with minimum configuration (resource_template)."""
respx.get("http://localhost", status_code=200)
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
"binary_sensor": {
"platform": "rest",
"resource_template": "http://localhost",
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
@respx.mock
async def test_setup_duplicate_resource_template(hass):
"""Test setup with duplicate resources."""
respx.get("http://localhost", status_code=200)
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
"binary_sensor": {
"platform": "rest",
"resource": "http://localhost",
"resource_template": "http://localhost",
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 0
@respx.mock
async def test_setup_get(hass):
"""Test setup with valid configuration."""
respx.get("http://localhost", status_code=200, content="{}")
assert await async_setup_component(
hass,
"binary_sensor",
{
"binary_sensor": {
"platform": "rest",
"resource": "http://localhost",
"method": "GET",
"value_template": "{{ value_json.key }}",
"name": "foo",
"verify_ssl": "true",
"timeout": 30,
"authentication": "basic",
"username": "my username",
"password": "my password",
"headers": {"Accept": CONTENT_TYPE_JSON},
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
@respx.mock
async def test_setup_get_digest_auth(hass):
"""Test setup with valid configuration."""
respx.get("http://localhost", status_code=200, content="{}")
assert await async_setup_component(
hass,
"binary_sensor",
{
"binary_sensor": {
"platform": "rest",
"resource": "http://localhost",
"method": "GET",
"value_template": "{{ value_json.key }}",
"name": "foo",
"verify_ssl": "true",
"timeout": 30,
"authentication": "digest",
"username": "my username",
"password": "my password",
"headers": {"Accept": CONTENT_TYPE_JSON},
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
@respx.mock
async def test_setup_post(hass):
"""Test setup with valid configuration."""
respx.post("http://localhost", status_code=200, content="{}")
assert await async_setup_component(
hass,
"binary_sensor",
{
"binary_sensor": {
"platform": "rest",
"resource": "http://localhost",
"method": "POST",
"value_template": "{{ value_json.key }}",
"payload": '{ "device": "toaster"}',
"name": "foo",
"verify_ssl": "true",
"timeout": 30,
"authentication": "basic",
"username": "my username",
"password": "my password",
"headers": {"Accept": CONTENT_TYPE_JSON},
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
@respx.mock
async def test_setup_get_off(hass):
"""Test setup with valid off configuration."""
respx.get(
"http://localhost",
status_code=200,
headers={"content-type": "text/json"},
content='{"dog": false}',
)
assert await async_setup_component(
hass,
"binary_sensor",
{
"binary_sensor": {
"platform": "rest",
"resource": "http://localhost",
"method": "GET",
"value_template": "{{ value_json.dog }}",
"name": "foo",
"verify_ssl": "true",
"timeout": 30,
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
state = hass.states.get("binary_sensor.foo")
assert state.state == STATE_OFF
@respx.mock
async def test_setup_get_on(hass):
"""Test setup with valid on configuration."""
respx.get(
"http://localhost",
status_code=200,
headers={"content-type": "text/json"},
content='{"dog": true}',
)
assert await async_setup_component(
hass,
"binary_sensor",
{
"binary_sensor": {
"platform": "rest",
"resource": "http://localhost",
"method": "GET",
"value_template": "{{ value_json.dog }}",
"name": "foo",
"verify_ssl": "true",
"timeout": 30,
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
state = hass.states.get("binary_sensor.foo")
assert state.state == STATE_ON
@respx.mock
async def test_setup_with_exception(hass):
"""Test setup with exception."""
respx.get("http://localhost", status_code=200, content="{}")
assert await async_setup_component(
hass,
"binary_sensor",
{
"binary_sensor": {
"platform": "rest",
"resource": "http://localhost",
"method": "GET",
"value_template": "{{ value_json.dog }}",
"name": "foo",
"verify_ssl": "true",
"timeout": 30,
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
state = hass.states.get("binary_sensor.foo")
assert state.state == STATE_OFF
await async_setup_component(hass, "homeassistant", {})
await hass.async_block_till_done()
respx.clear()
respx.get(
"http://localhost", content=httpx.RequestError(message="any", request=Mock())
)
await hass.services.async_call(
"homeassistant",
"update_entity",
{ATTR_ENTITY_ID: ["binary_sensor.foo"]},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.foo")
assert state.state == STATE_UNAVAILABLE
@respx.mock
async def test_reload(hass):
"""Verify we can reload reset sensors."""
respx.get("http://localhost", status_code=200)
await async_setup_component(
hass,
"binary_sensor",
{
"binary_sensor": {
"platform": "rest",
"method": "GET",
"name": "mockrest",
"resource": "http://localhost",
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
assert hass.states.get("binary_sensor.mockrest")
yaml_path = path.join(
_get_fixtures_base_path(),
"fixtures",
"rest/configuration.yaml",
)
with patch.object(hass_config, "YAML_CONFIG_FILE", yaml_path):
await hass.services.async_call(
"rest",
SERVICE_RELOAD,
{},
blocking=True,
)
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.mockreset") is None
assert hass.states.get("binary_sensor.rollout")
def _get_fixtures_base_path():
return path.dirname(path.dirname(path.dirname(__file__)))
|
from homeassistant.components.switch import SwitchEntity
from homeassistant.const import DEVICE_DEFAULT_NAME
from . import DOMAIN
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the demo switches."""
async_add_entities(
[
DemoSwitch("swith1", "Decorative Lights", True, None, True),
DemoSwitch(
"swith2",
"AC",
False,
"mdi:air-conditioner",
False,
device_class="outlet",
),
]
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Demo config entry."""
await async_setup_platform(hass, {}, async_add_entities)
class DemoSwitch(SwitchEntity):
"""Representation of a demo switch."""
def __init__(self, unique_id, name, state, icon, assumed, device_class=None):
"""Initialize the Demo switch."""
self._unique_id = unique_id
self._name = name or DEVICE_DEFAULT_NAME
self._state = state
self._icon = icon
self._assumed = assumed
self._device_class = device_class
@property
def device_info(self):
"""Return device info."""
return {
"identifiers": {
# Serial numbers are unique identifiers within a specific domain
(DOMAIN, self.unique_id)
},
"name": self.name,
}
@property
def unique_id(self):
"""Return the unique id."""
return self._unique_id
@property
def should_poll(self):
"""No polling needed for a demo switch."""
return False
@property
def name(self):
"""Return the name of the device if any."""
return self._name
@property
def icon(self):
"""Return the icon to use for device if any."""
return self._icon
@property
def assumed_state(self):
"""Return if the state is based on assumptions."""
return self._assumed
@property
def current_power_w(self):
"""Return the current power usage in W."""
if self._state:
return 100
@property
def today_energy_kwh(self):
"""Return the today total energy usage in kWh."""
return 15
@property
def is_on(self):
"""Return true if switch is on."""
return self._state
@property
def device_class(self):
"""Return device of entity."""
return self._device_class
def turn_on(self, **kwargs):
"""Turn the switch on."""
self._state = True
self.schedule_update_ha_state()
def turn_off(self, **kwargs):
"""Turn the device off."""
self._state = False
self.schedule_update_ha_state()
|
import io
from typing import Callable
from aiohttp import web
from homeassistant.core import callback
from .const import FORMAT_CONTENT_TYPE
from .core import PROVIDERS, StreamOutput, StreamView
from .fmp4utils import get_codec_string, get_init, get_m4s
@callback
def async_setup_hls(hass):
"""Set up api endpoints."""
hass.http.register_view(HlsPlaylistView())
hass.http.register_view(HlsSegmentView())
hass.http.register_view(HlsInitView())
hass.http.register_view(HlsMasterPlaylistView())
return "/api/hls/{}/master_playlist.m3u8"
class HlsMasterPlaylistView(StreamView):
"""Stream view used only for Chromecast compatibility."""
url = r"/api/hls/{token:[a-f0-9]+}/master_playlist.m3u8"
name = "api:stream:hls:master_playlist"
cors_allowed = True
@staticmethod
def render(track):
"""Render M3U8 file."""
# Need to calculate max bandwidth as input_container.bit_rate doesn't seem to work
# Calculate file size / duration and use a small multiplier to account for variation
# hls spec already allows for 25% variation
segment = track.get_segment(track.segments[-1])
bandwidth = round(
segment.segment.seek(0, io.SEEK_END) * 8 / segment.duration * 1.2
)
codecs = get_codec_string(segment.segment)
lines = [
"#EXTM3U",
f'#EXT-X-STREAM-INF:BANDWIDTH={bandwidth},CODECS="{codecs}"',
"playlist.m3u8",
]
return "\n".join(lines) + "\n"
async def handle(self, request, stream, sequence):
"""Return m3u8 playlist."""
track = stream.add_provider("hls")
stream.start()
# Wait for a segment to be ready
if not track.segments:
await track.recv()
headers = {"Content-Type": FORMAT_CONTENT_TYPE["hls"]}
return web.Response(body=self.render(track).encode("utf-8"), headers=headers)
class HlsPlaylistView(StreamView):
"""Stream view to serve a M3U8 stream."""
url = r"/api/hls/{token:[a-f0-9]+}/playlist.m3u8"
name = "api:stream:hls:playlist"
cors_allowed = True
@staticmethod
def render_preamble(track):
"""Render preamble."""
return [
"#EXT-X-VERSION:7",
f"#EXT-X-TARGETDURATION:{track.target_duration}",
'#EXT-X-MAP:URI="init.mp4"',
]
@staticmethod
def render_playlist(track):
"""Render playlist."""
segments = track.segments
if not segments:
return []
playlist = ["#EXT-X-MEDIA-SEQUENCE:{}".format(segments[0])]
for sequence in segments:
segment = track.get_segment(sequence)
playlist.extend(
[
"#EXTINF:{:.04f},".format(float(segment.duration)),
f"./segment/{segment.sequence}.m4s",
]
)
return playlist
def render(self, track):
"""Render M3U8 file."""
lines = ["#EXTM3U"] + self.render_preamble(track) + self.render_playlist(track)
return "\n".join(lines) + "\n"
async def handle(self, request, stream, sequence):
"""Return m3u8 playlist."""
track = stream.add_provider("hls")
stream.start()
# Wait for a segment to be ready
if not track.segments:
await track.recv()
headers = {"Content-Type": FORMAT_CONTENT_TYPE["hls"]}
return web.Response(body=self.render(track).encode("utf-8"), headers=headers)
class HlsInitView(StreamView):
"""Stream view to serve HLS init.mp4."""
url = r"/api/hls/{token:[a-f0-9]+}/init.mp4"
name = "api:stream:hls:init"
cors_allowed = True
async def handle(self, request, stream, sequence):
"""Return init.mp4."""
track = stream.add_provider("hls")
segments = track.get_segment()
if not segments:
return web.HTTPNotFound()
headers = {"Content-Type": "video/mp4"}
return web.Response(body=get_init(segments[0].segment), headers=headers)
class HlsSegmentView(StreamView):
"""Stream view to serve a HLS fmp4 segment."""
url = r"/api/hls/{token:[a-f0-9]+}/segment/{sequence:\d+}.m4s"
name = "api:stream:hls:segment"
cors_allowed = True
async def handle(self, request, stream, sequence):
"""Return fmp4 segment."""
track = stream.add_provider("hls")
segment = track.get_segment(int(sequence))
if not segment:
return web.HTTPNotFound()
headers = {"Content-Type": "video/iso.segment"}
return web.Response(
body=get_m4s(segment.segment, int(sequence)),
headers=headers,
)
@PROVIDERS.register("hls")
class HlsStreamOutput(StreamOutput):
"""Represents HLS Output formats."""
@property
def name(self) -> str:
"""Return provider name."""
return "hls"
@property
def format(self) -> str:
"""Return container format."""
return "mp4"
@property
def audio_codecs(self) -> str:
"""Return desired audio codecs."""
return {"aac", "mp3"}
@property
def video_codecs(self) -> tuple:
"""Return desired video codecs."""
return {"hevc", "h264"}
@property
def container_options(self) -> Callable[[int], dict]:
"""Return Callable which takes a sequence number and returns container options."""
return lambda sequence: {
# Removed skip_sidx - see https://github.com/home-assistant/core/pull/39970
"movflags": "frag_custom+empty_moov+default_base_moof+frag_discont",
"avoid_negative_ts": "make_non_negative",
"fragment_index": str(sequence),
}
|
import io
import os
from urllib.parse import urljoin, urlparse
from nikola.plugin_categories import LateTask
from nikola import utils
class RobotsFile(LateTask):
"""Generate a robots.txt file."""
name = "robots_file"
def gen_tasks(self):
"""Generate a robots.txt file."""
kw = {
"base_url": self.site.config["BASE_URL"],
"site_url": self.site.config["SITE_URL"],
"output_folder": self.site.config["OUTPUT_FOLDER"],
"files_folders": self.site.config['FILES_FOLDERS'],
"robots_exclusions": self.site.config["ROBOTS_EXCLUSIONS"],
"filters": self.site.config["FILTERS"],
}
sitemapindex_url = urljoin(kw["base_url"], "sitemapindex.xml")
robots_path = os.path.join(kw['output_folder'], "robots.txt")
def write_robots():
if kw["site_url"] != urljoin(kw["site_url"], "/"):
utils.LOGGER.warning('robots.txt not ending up in server root, will be useless')
utils.LOGGER.info('Add "robots" to DISABLED_PLUGINS to disable this warning and robots.txt generation.')
with io.open(robots_path, 'w+', encoding='utf8') as outf:
outf.write("Sitemap: {0}\n\n".format(sitemapindex_url))
outf.write("User-Agent: *\n")
if kw["robots_exclusions"]:
for loc in kw["robots_exclusions"]:
outf.write("Disallow: {0}\n".format(loc))
outf.write("Host: {0}\n".format(urlparse(kw["base_url"]).netloc))
yield self.group_task()
if not utils.get_asset_path("robots.txt", [], files_folders=kw["files_folders"], output_dir=False):
yield utils.apply_filters({
"basename": self.name,
"name": robots_path,
"targets": [robots_path],
"actions": [(write_robots)],
"uptodate": [utils.config_changed(kw, 'nikola.plugins.task.robots')],
"clean": True,
"task_dep": ["sitemap"]
}, kw["filters"])
elif kw["robots_exclusions"]:
utils.LOGGER.warning('Did not generate robots.txt as one already exists in FILES_FOLDERS. ROBOTS_EXCLUSIONS will not have any affect on the copied file.')
else:
utils.LOGGER.debug('Did not generate robots.txt as one already exists in FILES_FOLDERS.')
|
from aiohomekit.model.characteristics import CharacteristicsTypes
from homeassistant.components.air_quality import AirQualityEntity
from homeassistant.core import callback
from . import KNOWN_DEVICES, HomeKitEntity
AIR_QUALITY_TEXT = {
0: "unknown",
1: "excellent",
2: "good",
3: "fair",
4: "inferior",
5: "poor",
}
class HomeAirQualitySensor(HomeKitEntity, AirQualityEntity):
"""Representation of a HomeKit Controller Air Quality sensor."""
def get_characteristic_types(self):
"""Define the homekit characteristics the entity cares about."""
return [
CharacteristicsTypes.AIR_QUALITY,
CharacteristicsTypes.DENSITY_PM25,
CharacteristicsTypes.DENSITY_PM10,
CharacteristicsTypes.DENSITY_OZONE,
CharacteristicsTypes.DENSITY_NO2,
CharacteristicsTypes.DENSITY_SO2,
CharacteristicsTypes.DENSITY_VOC,
]
@property
def particulate_matter_2_5(self):
"""Return the particulate matter 2.5 level."""
return self.service.value(CharacteristicsTypes.DENSITY_PM25)
@property
def particulate_matter_10(self):
"""Return the particulate matter 10 level."""
return self.service.value(CharacteristicsTypes.DENSITY_PM10)
@property
def ozone(self):
"""Return the O3 (ozone) level."""
return self.service.value(CharacteristicsTypes.DENSITY_OZONE)
@property
def sulphur_dioxide(self):
"""Return the SO2 (sulphur dioxide) level."""
return self.service.value(CharacteristicsTypes.DENSITY_SO2)
@property
def nitrogen_dioxide(self):
"""Return the NO2 (nitrogen dioxide) level."""
return self.service.value(CharacteristicsTypes.DENSITY_NO2)
@property
def air_quality_text(self):
"""Return the Air Quality Index (AQI)."""
air_quality = self.service.value(CharacteristicsTypes.AIR_QUALITY)
return AIR_QUALITY_TEXT.get(air_quality, "unknown")
@property
def volatile_organic_compounds(self):
"""Return the volatile organic compounds (VOC) level."""
return self.service.value(CharacteristicsTypes.DENSITY_VOC)
@property
def device_state_attributes(self):
"""Return the device state attributes."""
data = {"air_quality_text": self.air_quality_text}
voc = self.volatile_organic_compounds
if voc:
data["volatile_organic_compounds"] = voc
return data
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Homekit air quality sensor."""
hkid = config_entry.data["AccessoryPairingID"]
conn = hass.data[KNOWN_DEVICES][hkid]
@callback
def async_add_service(aid, service):
if service["stype"] != "air-quality":
return False
info = {"aid": aid, "iid": service["iid"]}
async_add_entities([HomeAirQualitySensor(conn, info)], True)
return True
conn.add_listener(async_add_service)
|
from datetime import datetime, timedelta
import logging
from operator import itemgetter
import rjpl
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CONF_NAME, TIME_MINUTES
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
ATTR_STOP_ID = "stop_id"
ATTR_STOP_NAME = "stop"
ATTR_ROUTE = "route"
ATTR_TYPE = "type"
ATTR_DIRECTION = "direction"
ATTR_FINAL_STOP = "final_stop"
ATTR_DUE_IN = "due_in"
ATTR_DUE_AT = "due_at"
ATTR_SCHEDULED_AT = "scheduled_at"
ATTR_REAL_TIME_AT = "real_time_at"
ATTR_TRACK = "track"
ATTR_NEXT_UP = "next_departures"
ATTRIBUTION = "Data provided by rejseplanen.dk"
CONF_STOP_ID = "stop_id"
CONF_ROUTE = "route"
CONF_DIRECTION = "direction"
CONF_DEPARTURE_TYPE = "departure_type"
DEFAULT_NAME = "Next departure"
ICON = "mdi:bus"
SCAN_INTERVAL = timedelta(minutes=1)
BUS_TYPES = ["BUS", "EXB", "TB"]
TRAIN_TYPES = ["LET", "S", "REG", "IC", "LYN", "TOG"]
METRO_TYPES = ["M"]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_STOP_ID): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_ROUTE, default=[]): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_DIRECTION, default=[]): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_DEPARTURE_TYPE, default=[]): vol.All(
cv.ensure_list, [vol.In([*BUS_TYPES, *TRAIN_TYPES, *METRO_TYPES])]
),
}
)
def due_in_minutes(timestamp):
"""Get the time in minutes from a timestamp.
The timestamp should be in the format day.month.year hour:minute
"""
diff = datetime.strptime(timestamp, "%d.%m.%y %H:%M") - dt_util.now().replace(
tzinfo=None
)
return int(diff.total_seconds() // 60)
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the Rejseplanen transport sensor."""
name = config[CONF_NAME]
stop_id = config[CONF_STOP_ID]
route = config.get(CONF_ROUTE)
direction = config[CONF_DIRECTION]
departure_type = config[CONF_DEPARTURE_TYPE]
data = PublicTransportData(stop_id, route, direction, departure_type)
add_devices(
[RejseplanenTransportSensor(data, stop_id, route, direction, name)], True
)
class RejseplanenTransportSensor(Entity):
"""Implementation of Rejseplanen transport sensor."""
def __init__(self, data, stop_id, route, direction, name):
"""Initialize the sensor."""
self.data = data
self._name = name
self._stop_id = stop_id
self._route = route
self._direction = direction
self._times = self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def device_state_attributes(self):
"""Return the state attributes."""
if not self._times:
return {ATTR_STOP_ID: self._stop_id, ATTR_ATTRIBUTION: ATTRIBUTION}
next_up = []
if len(self._times) > 1:
next_up = self._times[1:]
attributes = {
ATTR_ATTRIBUTION: ATTRIBUTION,
ATTR_NEXT_UP: next_up,
ATTR_STOP_ID: self._stop_id,
}
if self._times[0] is not None:
attributes.update(self._times[0])
return attributes
@property
def unit_of_measurement(self):
"""Return the unit this state is expressed in."""
return TIME_MINUTES
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return ICON
def update(self):
"""Get the latest data from rejseplanen.dk and update the states."""
self.data.update()
self._times = self.data.info
if not self._times:
self._state = None
else:
try:
self._state = self._times[0][ATTR_DUE_IN]
except TypeError:
pass
class PublicTransportData:
"""The Class for handling the data retrieval."""
def __init__(self, stop_id, route, direction, departure_type):
"""Initialize the data object."""
self.stop_id = stop_id
self.route = route
self.direction = direction
self.departure_type = departure_type
self.info = []
def update(self):
"""Get the latest data from rejseplanen."""
self.info = []
def intersection(lst1, lst2):
"""Return items contained in both lists."""
return list(set(lst1) & set(lst2))
# Limit search to selected types, to get more results
all_types = not bool(self.departure_type)
use_train = all_types or bool(intersection(TRAIN_TYPES, self.departure_type))
use_bus = all_types or bool(intersection(BUS_TYPES, self.departure_type))
use_metro = all_types or bool(intersection(METRO_TYPES, self.departure_type))
try:
results = rjpl.departureBoard(
int(self.stop_id),
timeout=5,
useTrain=use_train,
useBus=use_bus,
useMetro=use_metro,
)
except rjpl.rjplAPIError as error:
_LOGGER.debug("API returned error: %s", error)
return
except (rjpl.rjplConnectionError, rjpl.rjplHTTPError):
_LOGGER.debug("Error occurred while connecting to the API")
return
# Filter result
results = [d for d in results if "cancelled" not in d]
if self.route:
results = [d for d in results if d["name"] in self.route]
if self.direction:
results = [d for d in results if d["direction"] in self.direction]
if self.departure_type:
results = [d for d in results if d["type"] in self.departure_type]
for item in results:
route = item.get("name")
scheduled_date = item.get("date")
scheduled_time = item.get("time")
real_time_date = due_at_date = item.get("rtDate")
real_time_time = due_at_time = item.get("rtTime")
if due_at_date is None:
due_at_date = scheduled_date
if due_at_time is None:
due_at_time = scheduled_time
if (
due_at_date is not None
and due_at_time is not None
and route is not None
):
due_at = f"{due_at_date} {due_at_time}"
scheduled_at = f"{scheduled_date} {scheduled_time}"
departure_data = {
ATTR_DIRECTION: item.get("direction"),
ATTR_DUE_IN: due_in_minutes(due_at),
ATTR_DUE_AT: due_at,
ATTR_FINAL_STOP: item.get("finalStop"),
ATTR_ROUTE: route,
ATTR_SCHEDULED_AT: scheduled_at,
ATTR_STOP_NAME: item.get("stop"),
ATTR_TYPE: item.get("type"),
}
if real_time_date is not None and real_time_time is not None:
departure_data[
ATTR_REAL_TIME_AT
] = f"{real_time_date} {real_time_time}"
if item.get("rtTrack") is not None:
departure_data[ATTR_TRACK] = item.get("rtTrack")
self.info.append(departure_data)
if not self.info:
_LOGGER.debug("No departures with given parameters")
# Sort the data by time
self.info = sorted(self.info, key=itemgetter(ATTR_DUE_IN))
|
import re
import os
import os.path
import sys
import shutil
import pstats
import operator
import pathlib
import pytest
from PyQt5.QtCore import PYQT_VERSION, QCoreApplication
pytest.register_assert_rewrite('end2end.fixtures')
from end2end.fixtures.webserver import server, server_per_test, ssl_server
from end2end.fixtures.quteprocess import (quteproc_process, quteproc,
quteproc_new)
from end2end.fixtures.testprocess import pytest_runtest_makereport
from qutebrowser.utils import qtutils, utils
from qutebrowser.browser.webengine import spell
def pytest_configure(config):
"""Remove old profile files."""
if config.getoption('--qute-profile-subprocs'):
try:
shutil.rmtree('prof')
except FileNotFoundError:
pass
def pytest_unconfigure(config):
"""Combine profiles."""
if config.getoption('--qute-profile-subprocs'):
stats = pstats.Stats()
for fn in os.listdir('prof'):
stats.add(os.path.join('prof', fn))
stats.dump_stats(os.path.join('prof', 'combined.pstats'))
def _get_version_tag(tag):
"""Handle tags like pyqt>=5.3.1 for BDD tests.
This transforms e.g. pyqt>=5.3.1 into an appropriate @pytest.mark.skip
marker, and falls back to pytest-bdd's implementation for all other
casesinto an appropriate @pytest.mark.skip marker, and falls back to
"""
version_re = re.compile(r"""
(?P<package>qt|pyqt)
(?P<operator>==|>=|!=|<)
(?P<version>\d+\.\d+(\.\d+)?)
""", re.VERBOSE)
match = version_re.fullmatch(tag)
if not match:
return None
package = match.group('package')
version = match.group('version')
if package == 'qt':
op = match.group('operator')
do_skip = {
'==': not qtutils.version_check(version, exact=True,
compiled=False),
'>=': not qtutils.version_check(version, compiled=False),
'<': qtutils.version_check(version, compiled=False),
'!=': qtutils.version_check(version, exact=True, compiled=False),
}
return pytest.mark.skipif(do_skip[op], reason='Needs ' + tag)
elif package == 'pyqt':
operators = {
'==': operator.eq,
'>=': operator.ge,
'!=': operator.ne,
}
op = operators[match.group('operator')]
major, minor, patch = [int(e) for e in version.split('.')]
hex_version = (major << 16) | (minor << 8) | patch
return pytest.mark.skipif(not op(PYQT_VERSION, hex_version),
reason='Needs ' + tag)
else:
raise ValueError("Invalid package {!r}".format(package))
def _get_backend_tag(tag):
"""Handle a @qtwebengine_*/@qtwebkit_skip tag."""
pytest_marks = {
'qtwebengine_todo': pytest.mark.qtwebengine_todo,
'qtwebengine_skip': pytest.mark.qtwebengine_skip,
'qtwebengine_notifications': pytest.mark.qtwebengine_notifications,
'qtwebkit_skip': pytest.mark.qtwebkit_skip,
}
if not any(tag.startswith(t + ':') for t in pytest_marks):
return None
name, desc = tag.split(':', maxsplit=1)
return pytest_marks[name](desc)
if not getattr(sys, 'frozen', False):
def pytest_bdd_apply_tag(tag, function):
"""Handle custom tags for BDD tests.
This tries various functions, and if none knows how to handle this tag,
it returns None so it falls back to pytest-bdd's implementation.
"""
funcs = [_get_version_tag, _get_backend_tag]
for func in funcs:
mark = func(tag)
if mark is not None:
mark(function)
return True
return None
def pytest_collection_modifyitems(config, items):
"""Apply @qtwebengine_* markers; skip unittests with QUTE_BDD_WEBENGINE."""
# WORKAROUND for https://bugreports.qt.io/browse/QTBUG-75884
# (note this isn't actually fixed properly before Qt 5.15)
header_bug_fixed = qtutils.version_check('5.15', compiled=False)
lib_path = pathlib.Path(QCoreApplication.libraryPaths()[0])
qpdf_image_plugin = lib_path / 'imageformats' / 'libqpdf.so'
markers = [
('qtwebengine_todo', 'QtWebEngine TODO', pytest.mark.xfail,
config.webengine),
('qtwebengine_skip', 'Skipped with QtWebEngine', pytest.mark.skipif,
config.webengine),
('qtwebengine_notifications',
'Skipped with QtWebEngine < 5.13',
pytest.mark.skipif,
config.webengine and not qtutils.version_check('5.13')),
('qtwebkit_skip', 'Skipped with QtWebKit', pytest.mark.skipif,
not config.webengine),
('qtwebengine_flaky', 'Flaky with QtWebEngine', pytest.mark.skipif,
config.webengine),
('qtwebengine_mac_xfail', 'Fails on macOS with QtWebEngine',
pytest.mark.xfail, config.webengine and utils.is_mac),
('js_headers', 'Sets headers dynamically via JS',
pytest.mark.skipif,
config.webengine and not header_bug_fixed),
('qtwebkit_pdf_imageformat_skip',
'Skipped with QtWebKit if PDF image plugin is available',
pytest.mark.skipif,
not config.webengine and qpdf_image_plugin.exists()),
]
for item in items:
for name, prefix, pytest_mark, condition in markers:
marker = item.get_closest_marker(name)
if marker and condition:
if marker.args:
text = '{}: {}'.format(prefix, marker.args[0])
else:
text = prefix
item.add_marker(pytest_mark(condition, reason=text,
**marker.kwargs))
|
from canary.api import Api
from pytest import fixture
from tests.async_mock import MagicMock, patch
@fixture
def canary(hass):
"""Mock the CanaryApi for easier testing."""
with patch.object(Api, "login", return_value=True), patch(
"homeassistant.components.canary.Api"
) as mock_canary:
instance = mock_canary.return_value = Api(
"test-username",
"test-password",
1,
)
instance.login = MagicMock(return_value=True)
instance.get_entries = MagicMock(return_value=[])
instance.get_locations = MagicMock(return_value=[])
instance.get_location = MagicMock(return_value=None)
instance.get_modes = MagicMock(return_value=[])
instance.get_readings = MagicMock(return_value=[])
instance.get_latest_readings = MagicMock(return_value=[])
instance.set_location_mode = MagicMock(return_value=None)
yield mock_canary
@fixture
def canary_config_flow(hass):
"""Mock the CanaryApi for easier config flow testing."""
with patch.object(Api, "login", return_value=True), patch(
"homeassistant.components.canary.config_flow.Api"
) as mock_canary:
instance = mock_canary.return_value = Api(
"test-username",
"test-password",
1,
)
instance.login = MagicMock(return_value=True)
instance.get_entries = MagicMock(return_value=[])
instance.get_locations = MagicMock(return_value=[])
instance.get_location = MagicMock(return_value=None)
instance.get_modes = MagicMock(return_value=[])
instance.get_readings = MagicMock(return_value=[])
instance.get_latest_readings = MagicMock(return_value=[])
instance.set_location_mode = MagicMock(return_value=None)
yield mock_canary
|
from django.db import migrations
from django.db.models import F
def update_source_unit(apps, schema_editor):
Unit = apps.get_model("trans", "Unit")
db_alias = schema_editor.connection.alias
Unit.objects.using(db_alias).filter(
translation__language=F("translation__component__source_language")
).update(source_unit_id=F("id"))
class Migration(migrations.Migration):
dependencies = [
("trans", "0103_update_source_unit"),
]
operations = [migrations.RunPython(update_source_unit, elidable=True)]
|
import homeassistant.components.mqtt_statestream as statestream
from homeassistant.core import State
from homeassistant.setup import async_setup_component
from tests.async_mock import ANY, call
from tests.common import mock_state_change_event
async def add_statestream(
hass,
base_topic=None,
publish_attributes=None,
publish_timestamps=None,
publish_include=None,
publish_exclude=None,
):
"""Add a mqtt_statestream component."""
config = {}
if base_topic:
config["base_topic"] = base_topic
if publish_attributes:
config["publish_attributes"] = publish_attributes
if publish_timestamps:
config["publish_timestamps"] = publish_timestamps
if publish_include:
config["include"] = publish_include
if publish_exclude:
config["exclude"] = publish_exclude
return await async_setup_component(
hass, statestream.DOMAIN, {statestream.DOMAIN: config}
)
async def test_fails_with_no_base(hass, mqtt_mock):
"""Setup should fail if no base_topic is set."""
assert await add_statestream(hass) is False
async def test_setup_succeeds_without_attributes(hass, mqtt_mock):
"""Test the success of the setup with a valid base_topic."""
assert await add_statestream(hass, base_topic="pub")
async def test_setup_succeeds_with_attributes(hass, mqtt_mock):
"""Test setup with a valid base_topic and publish_attributes."""
assert await add_statestream(hass, base_topic="pub", publish_attributes=True)
async def test_state_changed_event_sends_message(hass, mqtt_mock):
"""Test the sending of a new message if event changed."""
e_id = "fake.entity"
base_topic = "pub"
# Add the statestream component for publishing state updates
assert await add_statestream(hass, base_topic=base_topic)
await hass.async_block_till_done()
# Reset the mock because it will have already gotten calls for the
# mqtt_statestream state change on initialization, etc.
mqtt_mock.async_publish.reset_mock()
# Set a state of an entity
mock_state_change_event(hass, State(e_id, "on"))
await hass.async_block_till_done()
await hass.async_block_till_done()
# Make sure 'on' was published to pub/fake/entity/state
mqtt_mock.async_publish.assert_called_with("pub/fake/entity/state", "on", 1, True)
assert mqtt_mock.async_publish.called
async def test_state_changed_event_sends_message_and_timestamp(hass, mqtt_mock):
"""Test the sending of a message and timestamps if event changed."""
e_id = "another.entity"
base_topic = "pub"
# Add the statestream component for publishing state updates
assert await add_statestream(
hass, base_topic=base_topic, publish_attributes=None, publish_timestamps=True
)
await hass.async_block_till_done()
# Reset the mock because it will have already gotten calls for the
# mqtt_statestream state change on initialization, etc.
mqtt_mock.async_publish.reset_mock()
# Set a state of an entity
mock_state_change_event(hass, State(e_id, "on"))
await hass.async_block_till_done()
await hass.async_block_till_done()
# Make sure 'on' was published to pub/fake/entity/state
calls = [
call.async_publish("pub/another/entity/state", "on", 1, True),
call.async_publish("pub/another/entity/last_changed", ANY, 1, True),
call.async_publish("pub/another/entity/last_updated", ANY, 1, True),
]
mqtt_mock.async_publish.assert_has_calls(calls, any_order=True)
assert mqtt_mock.async_publish.called
async def test_state_changed_attr_sends_message(hass, mqtt_mock):
"""Test the sending of a new message if attribute changed."""
e_id = "fake.entity"
base_topic = "pub"
# Add the statestream component for publishing state updates
assert await add_statestream(hass, base_topic=base_topic, publish_attributes=True)
await hass.async_block_till_done()
# Reset the mock because it will have already gotten calls for the
# mqtt_statestream state change on initialization, etc.
mqtt_mock.async_publish.reset_mock()
test_attributes = {"testing": "YES", "list": ["a", "b", "c"], "bool": False}
# Set a state of an entity
mock_state_change_event(hass, State(e_id, "off", attributes=test_attributes))
await hass.async_block_till_done()
await hass.async_block_till_done()
# Make sure 'on' was published to pub/fake/entity/state
calls = [
call.async_publish("pub/fake/entity/state", "off", 1, True),
call.async_publish("pub/fake/entity/testing", '"YES"', 1, True),
call.async_publish("pub/fake/entity/list", '["a", "b", "c"]', 1, True),
call.async_publish("pub/fake/entity/bool", "false", 1, True),
]
mqtt_mock.async_publish.assert_has_calls(calls, any_order=True)
assert mqtt_mock.async_publish.called
async def test_state_changed_event_include_domain(hass, mqtt_mock):
"""Test that filtering on included domain works as expected."""
base_topic = "pub"
incl = {"domains": ["fake"]}
excl = {}
# Add the statestream component for publishing state updates
# Set the filter to allow fake.* items
assert await add_statestream(
hass, base_topic=base_topic, publish_include=incl, publish_exclude=excl
)
await hass.async_block_till_done()
# Reset the mock because it will have already gotten calls for the
# mqtt_statestream state change on initialization, etc.
mqtt_mock.async_publish.reset_mock()
# Set a state of an entity
mock_state_change_event(hass, State("fake.entity", "on"))
await hass.async_block_till_done()
await hass.async_block_till_done()
# Make sure 'on' was published to pub/fake/entity/state
mqtt_mock.async_publish.assert_called_with("pub/fake/entity/state", "on", 1, True)
assert mqtt_mock.async_publish.called
mqtt_mock.async_publish.reset_mock()
# Set a state of an entity that shouldn't be included
mock_state_change_event(hass, State("fake2.entity", "on"))
await hass.async_block_till_done()
await hass.async_block_till_done()
assert not mqtt_mock.async_publish.called
async def test_state_changed_event_include_entity(hass, mqtt_mock):
"""Test that filtering on included entity works as expected."""
base_topic = "pub"
incl = {"entities": ["fake.entity"]}
excl = {}
# Add the statestream component for publishing state updates
# Set the filter to allow fake.* items
assert await add_statestream(
hass, base_topic=base_topic, publish_include=incl, publish_exclude=excl
)
await hass.async_block_till_done()
# Reset the mock because it will have already gotten calls for the
# mqtt_statestream state change on initialization, etc.
mqtt_mock.async_publish.reset_mock()
# Set a state of an entity
mock_state_change_event(hass, State("fake.entity", "on"))
await hass.async_block_till_done()
await hass.async_block_till_done()
# Make sure 'on' was published to pub/fake/entity/state
mqtt_mock.async_publish.assert_called_with("pub/fake/entity/state", "on", 1, True)
assert mqtt_mock.async_publish.called
mqtt_mock.async_publish.reset_mock()
# Set a state of an entity that shouldn't be included
mock_state_change_event(hass, State("fake.entity2", "on"))
await hass.async_block_till_done()
await hass.async_block_till_done()
assert not mqtt_mock.async_publish.called
async def test_state_changed_event_exclude_domain(hass, mqtt_mock):
"""Test that filtering on excluded domain works as expected."""
base_topic = "pub"
incl = {}
excl = {"domains": ["fake2"]}
# Add the statestream component for publishing state updates
# Set the filter to allow fake.* items
assert await add_statestream(
hass, base_topic=base_topic, publish_include=incl, publish_exclude=excl
)
await hass.async_block_till_done()
# Reset the mock because it will have already gotten calls for the
# mqtt_statestream state change on initialization, etc.
mqtt_mock.async_publish.reset_mock()
# Set a state of an entity
mock_state_change_event(hass, State("fake.entity", "on"))
await hass.async_block_till_done()
await hass.async_block_till_done()
# Make sure 'on' was published to pub/fake/entity/state
mqtt_mock.async_publish.assert_called_with("pub/fake/entity/state", "on", 1, True)
assert mqtt_mock.async_publish.called
mqtt_mock.async_publish.reset_mock()
# Set a state of an entity that shouldn't be included
mock_state_change_event(hass, State("fake2.entity", "on"))
await hass.async_block_till_done()
await hass.async_block_till_done()
assert not mqtt_mock.async_publish.called
async def test_state_changed_event_exclude_entity(hass, mqtt_mock):
"""Test that filtering on excluded entity works as expected."""
base_topic = "pub"
incl = {}
excl = {"entities": ["fake.entity2"]}
# Add the statestream component for publishing state updates
# Set the filter to allow fake.* items
assert await add_statestream(
hass, base_topic=base_topic, publish_include=incl, publish_exclude=excl
)
await hass.async_block_till_done()
# Reset the mock because it will have already gotten calls for the
# mqtt_statestream state change on initialization, etc.
mqtt_mock.async_publish.reset_mock()
# Set a state of an entity
mock_state_change_event(hass, State("fake.entity", "on"))
await hass.async_block_till_done()
await hass.async_block_till_done()
# Make sure 'on' was published to pub/fake/entity/state
mqtt_mock.async_publish.assert_called_with("pub/fake/entity/state", "on", 1, True)
assert mqtt_mock.async_publish.called
mqtt_mock.async_publish.reset_mock()
# Set a state of an entity that shouldn't be included
mock_state_change_event(hass, State("fake.entity2", "on"))
await hass.async_block_till_done()
await hass.async_block_till_done()
assert not mqtt_mock.async_publish.called
async def test_state_changed_event_exclude_domain_include_entity(hass, mqtt_mock):
"""Test filtering with excluded domain and included entity."""
base_topic = "pub"
incl = {"entities": ["fake.entity"]}
excl = {"domains": ["fake"]}
# Add the statestream component for publishing state updates
# Set the filter to allow fake.* items
assert await add_statestream(
hass, base_topic=base_topic, publish_include=incl, publish_exclude=excl
)
await hass.async_block_till_done()
# Reset the mock because it will have already gotten calls for the
# mqtt_statestream state change on initialization, etc.
mqtt_mock.async_publish.reset_mock()
# Set a state of an entity
mock_state_change_event(hass, State("fake.entity", "on"))
await hass.async_block_till_done()
await hass.async_block_till_done()
# Make sure 'on' was published to pub/fake/entity/state
mqtt_mock.async_publish.assert_called_with("pub/fake/entity/state", "on", 1, True)
assert mqtt_mock.async_publish.called
mqtt_mock.async_publish.reset_mock()
# Set a state of an entity that shouldn't be included
mock_state_change_event(hass, State("fake.entity2", "on"))
await hass.async_block_till_done()
await hass.async_block_till_done()
assert not mqtt_mock.async_publish.called
async def test_state_changed_event_include_domain_exclude_entity(hass, mqtt_mock):
"""Test filtering with included domain and excluded entity."""
base_topic = "pub"
incl = {"domains": ["fake"]}
excl = {"entities": ["fake.entity2"]}
# Add the statestream component for publishing state updates
# Set the filter to allow fake.* items
assert await add_statestream(
hass, base_topic=base_topic, publish_include=incl, publish_exclude=excl
)
await hass.async_block_till_done()
# Reset the mock because it will have already gotten calls for the
# mqtt_statestream state change on initialization, etc.
mqtt_mock.async_publish.reset_mock()
# Set a state of an entity
mock_state_change_event(hass, State("fake.entity", "on"))
await hass.async_block_till_done()
await hass.async_block_till_done()
# Make sure 'on' was published to pub/fake/entity/state
mqtt_mock.async_publish.assert_called_with("pub/fake/entity/state", "on", 1, True)
assert mqtt_mock.async_publish.called
mqtt_mock.async_publish.reset_mock()
# Set a state of an entity that shouldn't be included
mock_state_change_event(hass, State("fake.entity2", "on"))
await hass.async_block_till_done()
await hass.async_block_till_done()
assert not mqtt_mock.async_publish.called
async def test_state_changed_event_include_globs(hass, mqtt_mock):
"""Test that filtering on included glob works as expected."""
base_topic = "pub"
incl = {"entity_globs": ["*.included_*"]}
excl = {}
# Add the statestream component for publishing state updates
# Set the filter to allow *.included_* items
assert await add_statestream(
hass, base_topic=base_topic, publish_include=incl, publish_exclude=excl
)
await hass.async_block_till_done()
# Reset the mock because it will have already gotten calls for the
# mqtt_statestream state change on initialization, etc.
mqtt_mock.async_publish.reset_mock()
# Set a state of an entity with included glob
mock_state_change_event(hass, State("fake2.included_entity", "on"))
await hass.async_block_till_done()
await hass.async_block_till_done()
# Make sure 'on' was published to pub/fake2/included_entity/state
mqtt_mock.async_publish.assert_called_with(
"pub/fake2/included_entity/state", "on", 1, True
)
assert mqtt_mock.async_publish.called
mqtt_mock.async_publish.reset_mock()
# Set a state of an entity that shouldn't be included
mock_state_change_event(hass, State("fake2.entity", "on"))
await hass.async_block_till_done()
await hass.async_block_till_done()
assert not mqtt_mock.async_publish.called
async def test_state_changed_event_exclude_globs(hass, mqtt_mock):
"""Test that filtering on excluded globs works as expected."""
base_topic = "pub"
incl = {}
excl = {"entity_globs": ["*.excluded_*"]}
# Add the statestream component for publishing state updates
# Set the filter to allow *.excluded_* items
assert await add_statestream(
hass, base_topic=base_topic, publish_include=incl, publish_exclude=excl
)
await hass.async_block_till_done()
# Reset the mock because it will have already gotten calls for the
# mqtt_statestream state change on initialization, etc.
mqtt_mock.async_publish.reset_mock()
# Set a state of an entity
mock_state_change_event(hass, State("fake.entity", "on"))
await hass.async_block_till_done()
await hass.async_block_till_done()
# Make sure 'on' was published to pub/fake/entity/state
mqtt_mock.async_publish.assert_called_with("pub/fake/entity/state", "on", 1, True)
assert mqtt_mock.async_publish.called
mqtt_mock.async_publish.reset_mock()
# Set a state of an entity that shouldn't be included by glob
mock_state_change_event(hass, State("fake.excluded_entity", "on"))
await hass.async_block_till_done()
await hass.async_block_till_done()
assert not mqtt_mock.async_publish.called
async def test_state_changed_event_exclude_domain_globs_include_entity(hass, mqtt_mock):
"""Test filtering with excluded domain and glob and included entity."""
base_topic = "pub"
incl = {"entities": ["fake.entity"]}
excl = {"domains": ["fake"], "entity_globs": ["*.excluded_*"]}
# Add the statestream component for publishing state updates
# Set the filter to exclude with include filter
assert await add_statestream(
hass, base_topic=base_topic, publish_include=incl, publish_exclude=excl
)
await hass.async_block_till_done()
# Reset the mock because it will have already gotten calls for the
# mqtt_statestream state change on initialization, etc.
mqtt_mock.async_publish.reset_mock()
# Set a state of an entity
mock_state_change_event(hass, State("fake.entity", "on"))
await hass.async_block_till_done()
await hass.async_block_till_done()
# Make sure 'on' was published to pub/fake/entity/state
mqtt_mock.async_publish.assert_called_with("pub/fake/entity/state", "on", 1, True)
assert mqtt_mock.async_publish.called
mqtt_mock.async_publish.reset_mock()
# Set a state of an entity that doesn't match any filters
mock_state_change_event(hass, State("fake2.included_entity", "on"))
await hass.async_block_till_done()
await hass.async_block_till_done()
# Make sure 'on' was published to pub/fake/entity/state
mqtt_mock.async_publish.assert_called_with(
"pub/fake2/included_entity/state", "on", 1, True
)
assert mqtt_mock.async_publish.called
mqtt_mock.async_publish.reset_mock()
# Set a state of an entity that shouldn't be included by domain
mock_state_change_event(hass, State("fake.entity2", "on"))
await hass.async_block_till_done()
await hass.async_block_till_done()
assert not mqtt_mock.async_publish.called
mqtt_mock.async_publish.reset_mock()
# Set a state of an entity that shouldn't be included by glob
mock_state_change_event(hass, State("fake.excluded_entity", "on"))
await hass.async_block_till_done()
await hass.async_block_till_done()
assert not mqtt_mock.async_publish.called
async def test_state_changed_event_include_domain_globs_exclude_entity(hass, mqtt_mock):
"""Test filtering with included domain and glob and excluded entity."""
base_topic = "pub"
incl = {"domains": ["fake"], "entity_globs": ["*.included_*"]}
excl = {"entities": ["fake.entity2"]}
# Add the statestream component for publishing state updates
# Set the filter to include with exclude filter
assert await add_statestream(
hass, base_topic=base_topic, publish_include=incl, publish_exclude=excl
)
await hass.async_block_till_done()
# Reset the mock because it will have already gotten calls for the
# mqtt_statestream state change on initialization, etc.
mqtt_mock.async_publish.reset_mock()
# Set a state of an entity included by domain
mock_state_change_event(hass, State("fake.entity", "on"))
await hass.async_block_till_done()
await hass.async_block_till_done()
# Make sure 'on' was published to pub/fake/entity/state
mqtt_mock.async_publish.assert_called_with("pub/fake/entity/state", "on", 1, True)
assert mqtt_mock.async_publish.called
mqtt_mock.async_publish.reset_mock()
# Set a state of an entity included by glob
mock_state_change_event(hass, State("fake.included_entity", "on"))
await hass.async_block_till_done()
await hass.async_block_till_done()
# Make sure 'on' was published to pub/fake/entity/state
mqtt_mock.async_publish.assert_called_with(
"pub/fake/included_entity/state", "on", 1, True
)
assert mqtt_mock.async_publish.called
mqtt_mock.async_publish.reset_mock()
# Set a state of an entity that shouldn't be included
mock_state_change_event(hass, State("fake.entity2", "on"))
await hass.async_block_till_done()
await hass.async_block_till_done()
assert not mqtt_mock.async_publish.called
mqtt_mock.async_publish.reset_mock()
# Set a state of an entity that doesn't match any filters
mock_state_change_event(hass, State("fake2.entity", "on"))
await hass.async_block_till_done()
await hass.async_block_till_done()
assert not mqtt_mock.async_publish.called
|
from datetime import timedelta
import json
from homeassistant.components.airly.const import DOMAIN
from homeassistant.config_entries import (
ENTRY_STATE_LOADED,
ENTRY_STATE_NOT_LOADED,
ENTRY_STATE_SETUP_RETRY,
)
from homeassistant.const import STATE_UNAVAILABLE
from tests.async_mock import patch
from tests.common import MockConfigEntry, load_fixture
from tests.components.airly import init_integration
async def test_async_setup_entry(hass):
"""Test a successful setup entry."""
await init_integration(hass)
state = hass.states.get("air_quality.home")
assert state is not None
assert state.state != STATE_UNAVAILABLE
assert state.state == "14"
async def test_config_not_ready(hass):
"""Test for setup failure if connection to Airly is missing."""
entry = MockConfigEntry(
domain=DOMAIN,
title="Home",
unique_id="55.55-122.12",
data={
"api_key": "foo",
"latitude": 55.55,
"longitude": 122.12,
"name": "Home",
},
)
with patch("airly._private._RequestsHandler.get", side_effect=ConnectionError()):
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
assert entry.state == ENTRY_STATE_SETUP_RETRY
async def test_config_without_unique_id(hass):
"""Test for setup entry without unique_id."""
entry = MockConfigEntry(
domain=DOMAIN,
title="Home",
data={
"api_key": "foo",
"latitude": 55.55,
"longitude": 122.12,
"name": "Home",
},
)
with patch(
"airly._private._RequestsHandler.get",
return_value=json.loads(load_fixture("airly_valid_station.json")),
):
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
assert entry.state == ENTRY_STATE_LOADED
assert entry.unique_id == "55.55-122.12"
async def test_config_with_turned_off_station(hass):
"""Test for setup entry for a turned off measuring station."""
entry = MockConfigEntry(
domain=DOMAIN,
title="Home",
unique_id="55.55-122.12",
data={
"api_key": "foo",
"latitude": 55.55,
"longitude": 122.12,
"name": "Home",
},
)
with patch(
"airly._private._RequestsHandler.get",
return_value=json.loads(load_fixture("airly_no_station.json")),
):
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
assert entry.state == ENTRY_STATE_SETUP_RETRY
async def test_update_interval(hass):
"""Test correct update interval when the number of configured instances changes."""
entry = await init_integration(hass)
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
assert entry.state == ENTRY_STATE_LOADED
for instance in hass.data[DOMAIN].values():
assert instance.update_interval == timedelta(minutes=15)
entry = MockConfigEntry(
domain=DOMAIN,
title="Work",
unique_id="66.66-111.11",
data={
"api_key": "foo",
"latitude": 66.66,
"longitude": 111.11,
"name": "Work",
},
)
with patch(
"airly._private._RequestsHandler.get",
return_value=json.loads(load_fixture("airly_valid_station.json")),
):
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert len(hass.config_entries.async_entries(DOMAIN)) == 2
assert entry.state == ENTRY_STATE_LOADED
for instance in hass.data[DOMAIN].values():
assert instance.update_interval == timedelta(minutes=30)
async def test_unload_entry(hass):
"""Test successful unload of entry."""
entry = await init_integration(hass)
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
assert entry.state == ENTRY_STATE_LOADED
assert await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
assert entry.state == ENTRY_STATE_NOT_LOADED
assert not hass.data.get(DOMAIN)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import tensorflow as tf
from nets import alexnet
from nets import cifarnet
from nets import inception
from nets import lenet
from nets import overfeat
from nets import resnet_v1
from nets import resnet_v2
from nets import vgg
from nets import mobilenet
from nets import mobilenetdet
slim = tf.contrib.slim
networks_map = {'alexnet_v2': alexnet.alexnet_v2,
'cifarnet': cifarnet.cifarnet,
'overfeat': overfeat.overfeat,
'vgg_a': vgg.vgg_a,
'vgg_16': vgg.vgg_16,
'vgg_19': vgg.vgg_19,
'inception_v1': inception.inception_v1,
'inception_v2': inception.inception_v2,
'inception_v3': inception.inception_v3,
'inception_v4': inception.inception_v4,
'inception_resnet_v2': inception.inception_resnet_v2,
'lenet': lenet.lenet,
'resnet_v1_50': resnet_v1.resnet_v1_50,
'resnet_v1_101': resnet_v1.resnet_v1_101,
'resnet_v1_152': resnet_v1.resnet_v1_152,
'resnet_v1_200': resnet_v1.resnet_v1_200,
'resnet_v2_50': resnet_v2.resnet_v2_50,
'resnet_v2_101': resnet_v2.resnet_v2_101,
'resnet_v2_152': resnet_v2.resnet_v2_152,
'resnet_v2_200': resnet_v2.resnet_v2_200,
'mobilenet': mobilenet.mobilenet,
'mobilenetdet': mobilenetdet.mobilenet
}
arg_scopes_map = {'alexnet_v2': alexnet.alexnet_v2_arg_scope,
'cifarnet': cifarnet.cifarnet_arg_scope,
'overfeat': overfeat.overfeat_arg_scope,
'vgg_a': vgg.vgg_arg_scope,
'vgg_16': vgg.vgg_arg_scope,
'vgg_19': vgg.vgg_arg_scope,
'inception_v1': inception.inception_v3_arg_scope,
'inception_v2': inception.inception_v3_arg_scope,
'inception_v3': inception.inception_v3_arg_scope,
'inception_v4': inception.inception_v4_arg_scope,
'inception_resnet_v2':
inception.inception_resnet_v2_arg_scope,
'lenet': lenet.lenet_arg_scope,
'resnet_v1_50': resnet_v1.resnet_arg_scope,
'resnet_v1_101': resnet_v1.resnet_arg_scope,
'resnet_v1_152': resnet_v1.resnet_arg_scope,
'resnet_v1_200': resnet_v1.resnet_arg_scope,
'resnet_v2_50': resnet_v2.resnet_arg_scope,
'resnet_v2_101': resnet_v2.resnet_arg_scope,
'resnet_v2_152': resnet_v2.resnet_arg_scope,
'resnet_v2_200': resnet_v2.resnet_arg_scope,
'mobilenet': mobilenet.mobilenet_arg_scope,
'mobilenetdet': mobilenetdet.mobilenet_arg_scope
}
def get_network_fn(name, num_classes, weight_decay=0.0, is_training=False, width_multiplier=1):
"""Returns a network_fn such as `logits, end_points = network_fn(images)`.
Args:
name: The name of the network.
num_classes: The number of classes to use for classification.
weight_decay: The l2 coefficient for the model weights.
is_training: `True` if the model is being used for training and `False`
otherwise.
Returns:
network_fn: A function that applies the model to a batch of images. It has
the following signature:
logits, end_points = network_fn(images)
Raises:
ValueError: If network `name` is not recognized.
"""
if name not in networks_map:
raise ValueError('Name of network unknown %s' % name)
func = networks_map[name]
@functools.wraps(func)
def network_fn(images):
arg_scope = arg_scopes_map[name](weight_decay=weight_decay)
with slim.arg_scope(arg_scope):
if name=='mobilenet':
return func(images, num_classes, is_training=is_training, width_multiplier=width_multiplier)
elif name=='mobilenetdet':
return func(images, is_training=is_training, width_multiplier=width_multiplier)
else:
return func(images, num_classes, is_training=is_training)
if hasattr(func, 'default_image_size'):
network_fn.default_image_size = func.default_image_size
return network_fn
|
import diamond.collector
import socket
from struct import pack
import re
import time
class ApcupsdCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(ApcupsdCollector, self).get_default_config_help()
config_help.update({
'hostname': 'Hostname to collect from',
'port': 'port to collect from. defaults to 3551',
'metrics':
'List of metrics. Valid metric keys can be found [here]' +
'(http://www.apcupsd.com/manual/' +
'manual.html#status-report-fields)'
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(ApcupsdCollector, self).get_default_config()
config.update({
'path': 'apcupsd',
'hostname': 'localhost',
'port': 3551,
'metrics': ['LINEV', 'LOADPCT', 'BCHARGE', 'TIMELEFT', 'BATTV',
'NUMXFERS', 'TONBATT', 'MAXLINEV', 'MINLINEV',
'OUTPUTV', 'ITEMP', 'LINEFREQ', 'CUMONBATT', ],
})
return config
def getData(self):
# Get the data via TCP stream
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((self.config['hostname'], int(self.config['port'])))
# Packet is pad byte, size byte, and command
s.send(pack('xb6s', 6, 'status'))
# Ditch the header
s.recv(1024)
time.sleep(.25)
data = s.recv(4096)
# We're done. Close the socket
s.close()
return data
def collect(self):
metrics = {}
raw = {}
data = self.getData()
data = data.split('\n\x00')
for d in data:
matches = re.search("([A-Z]+)\s+:\s+(.*)$", d)
if matches:
value = matches.group(2).strip()
raw[matches.group(1)] = matches.group(2).strip()
vmatch = re.search("([0-9.]+)", value)
if not vmatch:
continue
try:
value = float(vmatch.group(1))
except ValueError:
continue
metrics[matches.group(1)] = value
for metric in self.config['metrics']:
if metric not in metrics:
continue
metric_name = "%s.%s" % (raw['UPSNAME'], metric)
value = metrics[metric]
if metric in ['TONBATT', 'CUMONBATT', 'NUMXFERS']:
value = self.derivative(metric_name, metrics[metric])
self.publish(metric_name, value)
return True
|
from copy import deepcopy
from homeassistant.components.axis.const import DOMAIN as AXIS_DOMAIN
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
from homeassistant.const import (
ATTR_ENTITY_ID,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_OFF,
STATE_ON,
)
from homeassistant.setup import async_setup_component
from .test_device import (
API_DISCOVERY_PORT_MANAGEMENT,
API_DISCOVERY_RESPONSE,
NAME,
setup_axis_integration,
)
from tests.async_mock import AsyncMock, patch
EVENTS = [
{
"operation": "Initialized",
"topic": "tns1:Device/Trigger/Relay",
"source": "RelayToken",
"source_idx": "0",
"type": "LogicalState",
"value": "inactive",
},
{
"operation": "Initialized",
"topic": "tns1:Device/Trigger/Relay",
"source": "RelayToken",
"source_idx": "1",
"type": "LogicalState",
"value": "active",
},
]
async def test_platform_manually_configured(hass):
"""Test that nothing happens when platform is manually configured."""
assert await async_setup_component(
hass, SWITCH_DOMAIN, {SWITCH_DOMAIN: {"platform": AXIS_DOMAIN}}
)
assert AXIS_DOMAIN not in hass.data
async def test_no_switches(hass):
"""Test that no output events in Axis results in no switch entities."""
await setup_axis_integration(hass)
assert not hass.states.async_entity_ids(SWITCH_DOMAIN)
async def test_switches_with_port_cgi(hass):
"""Test that switches are loaded properly using port.cgi."""
config_entry = await setup_axis_integration(hass)
device = hass.data[AXIS_DOMAIN][config_entry.unique_id]
device.api.vapix.ports = {"0": AsyncMock(), "1": AsyncMock()}
device.api.vapix.ports["0"].name = "Doorbell"
device.api.vapix.ports["0"].open = AsyncMock()
device.api.vapix.ports["0"].close = AsyncMock()
device.api.vapix.ports["1"].name = ""
for event in EVENTS:
device.api.event.process_event(event)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 2
relay_1 = hass.states.get(f"{SWITCH_DOMAIN}.{NAME}_relay_1")
assert relay_1.state == STATE_ON
assert relay_1.name == f"{NAME} Relay 1"
entity_id = f"{SWITCH_DOMAIN}.{NAME}_doorbell"
relay_0 = hass.states.get(entity_id)
assert relay_0.state == STATE_OFF
assert relay_0.name == f"{NAME} Doorbell"
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: entity_id},
blocking=True,
)
device.api.vapix.ports["0"].close.assert_called_once()
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: entity_id},
blocking=True,
)
device.api.vapix.ports["0"].open.assert_called_once()
async def test_switches_with_port_management(hass):
"""Test that switches are loaded properly using port management."""
api_discovery = deepcopy(API_DISCOVERY_RESPONSE)
api_discovery["data"]["apiList"].append(API_DISCOVERY_PORT_MANAGEMENT)
with patch.dict(API_DISCOVERY_RESPONSE, api_discovery):
config_entry = await setup_axis_integration(hass)
device = hass.data[AXIS_DOMAIN][config_entry.unique_id]
device.api.vapix.ports = {"0": AsyncMock(), "1": AsyncMock()}
device.api.vapix.ports["0"].name = "Doorbell"
device.api.vapix.ports["0"].open = AsyncMock()
device.api.vapix.ports["0"].close = AsyncMock()
device.api.vapix.ports["1"].name = ""
for event in EVENTS:
device.api.event.process_event(event)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 2
relay_1 = hass.states.get(f"{SWITCH_DOMAIN}.{NAME}_relay_1")
assert relay_1.state == STATE_ON
assert relay_1.name == f"{NAME} Relay 1"
entity_id = f"{SWITCH_DOMAIN}.{NAME}_doorbell"
relay_0 = hass.states.get(entity_id)
assert relay_0.state == STATE_OFF
assert relay_0.name == f"{NAME} Doorbell"
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: entity_id},
blocking=True,
)
device.api.vapix.ports["0"].close.assert_called_once()
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: entity_id},
blocking=True,
)
device.api.vapix.ports["0"].open.assert_called_once()
|
from ... import event
from . import Widget
class LineEdit(Widget):
""" An input widget to edit a line of text.
The ``node`` of this widget is a text
`<input> <https://developer.mozilla.org/docs/Web/HTML/Element/input>`_.
"""
DEFAULT_MIN_SIZE = 100, 28
CSS = """
.flx-LineEdit {
color: #333;
padding: 0.2em 0.4em;
border-radius: 3px;
border: 1px solid #aaa;
margin: 2px;
}
.flx-LineEdit:focus {
outline: none;
box-shadow: 0px 0px 3px 1px rgba(0, 100, 200, 0.7);
}
"""
## Properties
text = event.StringProp(settable=True, doc="""
The current text of the line edit. Settable. If this is an empty
string, the placeholder_text is displayed instead.
""")
password_mode = event.BoolProp(False, settable=True, doc="""
Whether the insered text should be hidden.
""")
placeholder_text = event.StringProp(settable=True, doc="""
The placeholder text (shown when the text is an empty string).
""")
autocomp = event.TupleProp(settable=True, doc="""
A tuple/list of strings for autocompletion. Might not work in all browsers.
""")
disabled = event.BoolProp(False, settable=True, doc="""
Whether the line edit is disabled.
""")
## Methods, actions, emitters
def _create_dom(self):
global window
# Create node element
node = window.document.createElement('input')
node.setAttribute('type', 'input')
node.setAttribute('list', self.id)
self._autocomp = window.document.createElement('datalist')
self._autocomp.id = self.id
node.appendChild(self._autocomp)
f1 = lambda: self.user_text(self.node.value)
self._addEventListener(node, 'input', f1, False)
self._addEventListener(node, 'blur', self.user_done, False)
#if IE10:
# self._addEventListener(self.node, 'change', f1, False)
return node
@event.emitter
def user_text(self, text):
""" Event emitted when the user edits the text. Has ``old_value``
and ``new_value`` attributes.
"""
d = {'old_value': self.text, 'new_value': text}
self.set_text(text)
return d
@event.emitter
def user_done(self):
""" Event emitted when the user is done editing the text, either by
moving the focus elsewhere, or by hitting enter.
Has ``old_value`` and ``new_value`` attributes (which are the same).
"""
d = {'old_value': self.text, 'new_value': self.text}
return d
@event.emitter
def submit(self):
""" Event emitted when the user strikes the enter or return key
(but not when losing focus). Has ``old_value`` and ``new_value``
attributes (which are the same).
"""
self.user_done()
d = {'old_value': self.text, 'new_value': self.text}
return d
@event.emitter
def key_down(self, e):
# Prevent propating the key
ev = super().key_down(e)
pkeys = 'Escape', # keys to propagate
if (ev.modifiers and ev.modifiers != ('Shift', )) or ev.key in pkeys:
pass
else:
e.stopPropagation()
if ev.key in ('Enter', 'Return'):
self.submit()
# Nice to blur on mobile, since it hides keyboard, but less nice on desktop
# self.node.blur()
elif ev.key == 'Escape':
self.node.blur()
return ev
## Reactions
@event.reaction
def __text_changed(self):
self.node.value = self.text
@event.reaction
def __password_mode_changed(self):
self.node.type = ['text', 'password'][int(bool(self.password_mode))]
@event.reaction
def __placeholder_text_changed(self):
self.node.placeholder = self.placeholder_text
# note: this works in the browser but not in e.g. firefox-app
@event.reaction
def __autocomp_changed(self):
global window
autocomp = self.autocomp
# Clear
for op in self._autocomp:
self._autocomp.removeChild(op)
# Add new options
for option in autocomp:
op = window.document.createElement('option')
op.value = option
self._autocomp.appendChild(op)
@event.reaction
def __disabled_changed(self):
if self.disabled:
self.node.setAttribute("disabled", "disabled")
else:
self.node.removeAttribute("disabled")
|
from homeassistant.setup import async_setup_component
from tests.common import load_fixture
async def test_setup(hass, requests_mock):
"""Test for successfully setting up the platform."""
config = {
"sensor": {
"platform": "openhardwaremonitor",
"host": "localhost",
"port": 8085,
}
}
requests_mock.get(
"http://localhost:8085/data.json",
text=load_fixture("openhardwaremonitor.json"),
)
await async_setup_component(hass, "sensor", config)
await hass.async_block_till_done()
entities = hass.states.async_entity_ids("sensor")
assert len(entities) == 38
state = hass.states.get("sensor.test_pc_intel_core_i7_7700_temperatures_cpu_core_1")
assert state is not None
assert state.state == "31.0"
state = hass.states.get("sensor.test_pc_intel_core_i7_7700_temperatures_cpu_core_2")
assert state is not None
assert state.state == "30.0"
|
from socket import gaierror as SocketGIAError
from homeassistant.components.sonarr.const import (
CONF_BASE_PATH,
CONF_UPCOMING_DAYS,
CONF_WANTED_MAX_ITEMS,
DEFAULT_UPCOMING_DAYS,
DEFAULT_WANTED_MAX_ITEMS,
DOMAIN,
)
from homeassistant.const import (
CONF_API_KEY,
CONF_HOST,
CONF_PORT,
CONF_SSL,
CONF_VERIFY_SSL,
CONTENT_TYPE_JSON,
)
from homeassistant.helpers.typing import HomeAssistantType
from tests.async_mock import patch
from tests.common import MockConfigEntry, load_fixture
from tests.test_util.aiohttp import AiohttpClientMocker
HOST = "192.168.1.189"
PORT = 8989
BASE_PATH = "/api"
API_KEY = "MOCK_API_KEY"
MOCK_REAUTH_INPUT = {CONF_API_KEY: "test-api-key-reauth"}
MOCK_USER_INPUT = {
CONF_HOST: HOST,
CONF_PORT: PORT,
CONF_BASE_PATH: BASE_PATH,
CONF_SSL: False,
CONF_API_KEY: API_KEY,
}
def mock_connection(
aioclient_mock: AiohttpClientMocker,
host: str = HOST,
port: str = PORT,
base_path: str = BASE_PATH,
error: bool = False,
invalid_auth: bool = False,
server_error: bool = False,
) -> None:
"""Mock Sonarr connection."""
if error:
mock_connection_error(
aioclient_mock,
host=host,
port=port,
base_path=base_path,
)
return
if invalid_auth:
mock_connection_invalid_auth(
aioclient_mock,
host=host,
port=port,
base_path=base_path,
)
return
if server_error:
mock_connection_server_error(
aioclient_mock,
host=host,
port=port,
base_path=base_path,
)
return
sonarr_url = f"http://{host}:{port}{base_path}"
aioclient_mock.get(
f"{sonarr_url}/system/status",
text=load_fixture("sonarr/system-status.json"),
headers={"Content-Type": CONTENT_TYPE_JSON},
)
aioclient_mock.get(
f"{sonarr_url}/diskspace",
text=load_fixture("sonarr/diskspace.json"),
headers={"Content-Type": CONTENT_TYPE_JSON},
)
aioclient_mock.get(
f"{sonarr_url}/calendar",
text=load_fixture("sonarr/calendar.json"),
headers={"Content-Type": CONTENT_TYPE_JSON},
)
aioclient_mock.get(
f"{sonarr_url}/command",
text=load_fixture("sonarr/command.json"),
headers={"Content-Type": CONTENT_TYPE_JSON},
)
aioclient_mock.get(
f"{sonarr_url}/queue",
text=load_fixture("sonarr/queue.json"),
headers={"Content-Type": CONTENT_TYPE_JSON},
)
aioclient_mock.get(
f"{sonarr_url}/series",
text=load_fixture("sonarr/series.json"),
headers={"Content-Type": CONTENT_TYPE_JSON},
)
aioclient_mock.get(
f"{sonarr_url}/wanted/missing",
text=load_fixture("sonarr/wanted-missing.json"),
headers={"Content-Type": CONTENT_TYPE_JSON},
)
def mock_connection_error(
aioclient_mock: AiohttpClientMocker,
host: str = HOST,
port: str = PORT,
base_path: str = BASE_PATH,
) -> None:
"""Mock Sonarr connection errors."""
sonarr_url = f"http://{host}:{port}{base_path}"
aioclient_mock.get(f"{sonarr_url}/system/status", exc=SocketGIAError)
aioclient_mock.get(f"{sonarr_url}/diskspace", exc=SocketGIAError)
aioclient_mock.get(f"{sonarr_url}/calendar", exc=SocketGIAError)
aioclient_mock.get(f"{sonarr_url}/command", exc=SocketGIAError)
aioclient_mock.get(f"{sonarr_url}/queue", exc=SocketGIAError)
aioclient_mock.get(f"{sonarr_url}/series", exc=SocketGIAError)
aioclient_mock.get(f"{sonarr_url}/missing/wanted", exc=SocketGIAError)
def mock_connection_invalid_auth(
aioclient_mock: AiohttpClientMocker,
host: str = HOST,
port: str = PORT,
base_path: str = BASE_PATH,
) -> None:
"""Mock Sonarr invalid auth errors."""
sonarr_url = f"http://{host}:{port}{base_path}"
aioclient_mock.get(f"{sonarr_url}/system/status", status=403)
aioclient_mock.get(f"{sonarr_url}/diskspace", status=403)
aioclient_mock.get(f"{sonarr_url}/calendar", status=403)
aioclient_mock.get(f"{sonarr_url}/command", status=403)
aioclient_mock.get(f"{sonarr_url}/queue", status=403)
aioclient_mock.get(f"{sonarr_url}/series", status=403)
aioclient_mock.get(f"{sonarr_url}/missing/wanted", status=403)
def mock_connection_server_error(
aioclient_mock: AiohttpClientMocker,
host: str = HOST,
port: str = PORT,
base_path: str = BASE_PATH,
) -> None:
"""Mock Sonarr server errors."""
sonarr_url = f"http://{host}:{port}{base_path}"
aioclient_mock.get(f"{sonarr_url}/system/status", status=500)
aioclient_mock.get(f"{sonarr_url}/diskspace", status=500)
aioclient_mock.get(f"{sonarr_url}/calendar", status=500)
aioclient_mock.get(f"{sonarr_url}/command", status=500)
aioclient_mock.get(f"{sonarr_url}/queue", status=500)
aioclient_mock.get(f"{sonarr_url}/series", status=500)
aioclient_mock.get(f"{sonarr_url}/missing/wanted", status=500)
async def setup_integration(
hass: HomeAssistantType,
aioclient_mock: AiohttpClientMocker,
host: str = HOST,
port: str = PORT,
base_path: str = BASE_PATH,
api_key: str = API_KEY,
unique_id: str = None,
skip_entry_setup: bool = False,
connection_error: bool = False,
invalid_auth: bool = False,
server_error: bool = False,
) -> MockConfigEntry:
"""Set up the Sonarr integration in Home Assistant."""
entry = MockConfigEntry(
domain=DOMAIN,
unique_id=unique_id,
data={
CONF_HOST: host,
CONF_PORT: port,
CONF_BASE_PATH: base_path,
CONF_SSL: False,
CONF_VERIFY_SSL: False,
CONF_API_KEY: api_key,
CONF_UPCOMING_DAYS: DEFAULT_UPCOMING_DAYS,
CONF_WANTED_MAX_ITEMS: DEFAULT_WANTED_MAX_ITEMS,
},
options={
CONF_UPCOMING_DAYS: DEFAULT_UPCOMING_DAYS,
CONF_WANTED_MAX_ITEMS: DEFAULT_WANTED_MAX_ITEMS,
},
)
entry.add_to_hass(hass)
mock_connection(
aioclient_mock,
host=host,
port=port,
base_path=base_path,
error=connection_error,
invalid_auth=invalid_auth,
server_error=server_error,
)
if not skip_entry_setup:
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry
def _patch_async_setup(return_value=True):
"""Patch the async setup of sonarr."""
return patch(
"homeassistant.components.sonarr.async_setup", return_value=return_value
)
def _patch_async_setup_entry(return_value=True):
"""Patch the async entry setup of sonarr."""
return patch(
"homeassistant.components.sonarr.async_setup_entry",
return_value=return_value,
)
|
import unittest
from trashcli.put import TrashDirectoryForPut
from trashcli.put import TopDirRelativePaths
from mock import Mock
from mock import ANY
class TestTrashing(unittest.TestCase):
def setUp(self):
self.now = Mock()
self.fs = Mock()
self.trashdir = TrashDirectoryForPut('~/.Trash', '/', self.fs)
self.trashdir.path_maker = TopDirRelativePaths('/')
path_maker = Mock()
path_maker.calc_parent_path.return_value = ''
self.trashdir.path_maker = path_maker
self.logger = Mock(['debug'])
def test_the_file_should_be_moved_in_trash_dir(self):
self.trashdir.trash2('foo', self.now, self.logger)
self.fs.move.assert_called_with('foo', '~/.Trash/files/foo')
self.logger.debug.assert_called_with('.trashinfo created as ~/.Trash/info/foo.trashinfo.')
def test_should_create_a_trashinfo(self):
self.trashdir.trash2('foo', self.now, self.logger)
self.fs.atomic_write.assert_called_with('~/.Trash/info/foo.trashinfo', ANY)
self.logger.debug.assert_called_with('.trashinfo created as ~/.Trash/info/foo.trashinfo.')
def test_trashinfo_should_contains_original_location_and_deletion_date(self):
from datetime import datetime
self.now.return_value = datetime(2012, 9, 25, 21, 47, 39)
self.trashdir.trash2('foo', self.now, self.logger)
self.fs.atomic_write.assert_called_with(ANY,
b'[Trash Info]\n'
b'Path=foo\n'
b'DeletionDate=2012-09-25T21:47:39\n')
self.logger.debug.assert_called_with('.trashinfo created as ~/.Trash/info/foo.trashinfo.')
def test_should_rollback_trashinfo_creation_on_problems(self):
self.fs.move.side_effect = IOError
try: self.trashdir.trash2('foo', self.now, self.logger)
except IOError: pass
self.fs.remove_file.assert_called_with('~/.Trash/info/foo.trashinfo')
self.logger.debug.assert_called_with('.trashinfo created as ~/.Trash/info/foo.trashinfo.')
|
import unittest
from absl import flags
import mock
from perfkitbenchmarker.linux_packages import hbase
from tests import pkb_common_test_case
from six.moves import urllib
FLAGS = flags.FLAGS
class HbaseTest(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(HbaseTest, self).setUp()
FLAGS['hbase_version'].parse('1.3.2.1')
p = mock.patch.object(urllib.request, 'urlopen')
self.mock_url_open = p.start()
self.addCleanup(p.stop)
def testGetUrlVersion(self):
url = hbase._GetHBaseURL()
self.assertRegexpMatches(url, 'hbase-1.3.2.1-bin.tar.gz$')
def MakeHbaseUrl(version):
return ('<a href="hbase-{version}-bin.tar.gz">'
'hbase-{version}-bin.tar.gz</a>').format(version=version)
if __name__ == '__main__':
unittest.main()
|
from tqdm import tqdm
from . import units
from .chain_transform import chain_transform
from matchzoo import DataPack
from matchzoo.engine.base_preprocessor import BasePreprocessor
from .build_vocab_unit import built_bert_vocab_unit
from .build_unit_from_data_pack import build_unit_from_data_pack
tqdm.pandas()
class BertPreprocessor(BasePreprocessor):
"""Bert-base Model preprocessor."""
def __init__(self, bert_vocab_path: str,
fixed_length_left: int = 30,
fixed_length_right: int = 30,
filter_mode: str = 'df',
filter_low_freq: float = 2,
filter_high_freq: float = float('inf'),
remove_stop_words: bool = False,
lower_case: bool = True,
chinese_version: bool = False,
):
"""
Bert-base Model preprocessor.
Example:
>>> import matchzoo as mz
>>> train_data = mz.datasets.toy.load_data()
>>> test_data = mz.datasets.toy.load_data(stage='test')
>>> # The argument 'bert_vocab_path' must feed the bert vocab path
>>> bert_preprocessor = mz.preprocessors.BertPreprocessor(
... bert_vocab_path=
... 'matchzoo/datasets/bert_resources/uncased_vocab_100.txt')
>>> train_data_processed = bert_preprocessor.fit_transform(
... train_data)
>>> test_data_processed = bert_preprocessor.transform(test_data)
"""
super().__init__()
self._fixed_length_left = fixed_length_left
self._fixed_length_right = fixed_length_right
self._bert_vocab_path = bert_vocab_path
self._left_fixedlength_unit = units.FixedLength(
self._fixed_length_left,
pad_mode='post'
)
self._right_fixedlength_unit = units.FixedLength(
self._fixed_length_right,
pad_mode='post'
)
self._filter_unit = units.FrequencyFilter(
low=filter_low_freq,
high=filter_high_freq,
mode=filter_mode
)
self._units = self._default_units()
self._vocab_unit = built_bert_vocab_unit(self._bert_vocab_path)
if chinese_version:
self._units.insert(1, units.ChineseTokenize())
if lower_case:
self._units.append(units.Lowercase())
self._units.append(units.StripAccent())
self._units.append(units.WordPieceTokenize(
self._vocab_unit.state['term_index']))
if remove_stop_words:
self._units.append(units.StopRemoval())
def fit(self, data_pack: DataPack, verbose: int = 1):
"""
Fit pre-processing context for transformation.
:param verbose: Verbosity.
:param data_pack: Data_pack to be preprocessed.
:return: class:`BertPreprocessor` instance.
"""
data_pack = data_pack.apply_on_text(chain_transform(self._units),
verbose=verbose)
fitted_filter_unit = build_unit_from_data_pack(self._filter_unit,
data_pack,
flatten=False,
mode='right',
verbose=verbose)
self._context['filter_unit'] = fitted_filter_unit
self._context['vocab_unit'] = self._vocab_unit
vocab_size = len(self._vocab_unit.state['term_index'])
self._context['vocab_size'] = vocab_size
self._context['embedding_input_dim'] = vocab_size
self._context['input_shapes'] = [(self._fixed_length_left,),
(self._fixed_length_right,)]
return self
def transform(self, data_pack: DataPack, verbose: int = 1) -> DataPack:
"""
Apply transformation on data, create fixed length representation.
:param data_pack: Inputs to be preprocessed.
:param verbose: Verbosity.
:return: Transformed data as :class:`DataPack` object.
"""
data_pack = data_pack.copy()
data_pack.apply_on_text(chain_transform(self._units), inplace=True,
verbose=verbose)
data_pack.apply_on_text(self._context['filter_unit'].transform,
mode='right', inplace=True, verbose=verbose)
data_pack.apply_on_text(self._context['vocab_unit'].transform,
mode='both', inplace=True, verbose=verbose)
data_pack.append_text_length(inplace=True, verbose=verbose)
data_pack.apply_on_text(self._left_fixedlength_unit.transform,
mode='left', inplace=True, verbose=verbose)
data_pack.apply_on_text(self._right_fixedlength_unit.transform,
mode='right', inplace=True, verbose=verbose)
max_len_left = self._fixed_length_left
max_len_right = self._fixed_length_right
data_pack.left['length_left'] = \
data_pack.left['length_left'].apply(
lambda val: min(val, max_len_left))
data_pack.right['length_right'] = \
data_pack.right['length_right'].apply(
lambda val: min(val, max_len_right))
return data_pack
@classmethod
def _default_units(cls) -> list:
"""Prepare needed process units."""
return [
units.BertClean(),
units.BasicTokenize()
]
|
import os
import shutil
import pytest
import nikola.plugins.command.init
from nikola import __main__
from .helper import cd
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
@pytest.fixture(scope="module")
def build(target_dir, test_dir):
"""Build the site."""
init_command = nikola.plugins.command.init.CommandInit()
init_command.create_empty_site(target_dir)
init_command.create_configuration(target_dir)
src = os.path.join(test_dir, "..", "data", "translated_titles")
for root, dirs, files in os.walk(src):
for src_name in files:
if src_name == "1.txt": # English post
continue
rel_dir = os.path.relpath(root, src)
dst_file = os.path.join(target_dir, rel_dir, src_name)
src_file = os.path.join(root, src_name)
shutil.copy2(src_file, dst_file)
with cd(target_dir):
__main__.main(["build"])
|
import numpy as np
import unittest
import chainer
from chainer import testing
from chainer.testing import attr
from chainercv.links.model.faster_rcnn import FasterRCNNTrainChain
from chainercv.utils import generate_random_bbox
from tests.links_tests.model_tests.faster_rcnn_tests.dummy_faster_rcnn \
import DummyFasterRCNN
def _random_array(shape):
return np.array(
np.random.uniform(-1, 1, size=shape), dtype=np.float32)
class TestFasterRCNNTrainChain(unittest.TestCase):
def setUp(self):
self.n_anchor_base = 6
self.feat_stride = 4
self.n_fg_class = 3
self.n_roi = 24
self.n_bbox = 3
self.link = FasterRCNNTrainChain(DummyFasterRCNN(
n_anchor_base=self.n_anchor_base,
feat_stride=self.feat_stride,
n_fg_class=self.n_fg_class,
n_roi=self.n_roi,
min_size=600,
max_size=800,
))
self.bboxes = chainer.Variable(
generate_random_bbox(self.n_bbox, (600, 800), 16, 350)[np.newaxis])
_labels = np.random.randint(
0, self.n_fg_class, size=(1, self.n_bbox)).astype(np.int32)
self.labels = chainer.Variable(_labels)
self.imgs = chainer.Variable(_random_array((1, 3, 600, 800)))
self.scales = chainer.Variable(np.array([1.]))
def check_call(self):
loss = self.link(self.imgs, self.bboxes, self.labels, self.scales)
self.assertEqual(loss.shape, ())
def test_call_cpu(self):
self.check_call()
@attr.gpu
def test_call_gpu(self):
self.link.to_gpu()
self.imgs.to_gpu()
self.bboxes.to_gpu()
self.labels.to_gpu()
self.check_call()
testing.run_module(__name__, __file__)
|
from collections import namedtuple
import pytest
from homeassistant.components.cover import (
ATTR_CURRENT_POSITION,
ATTR_CURRENT_TILT_POSITION,
ATTR_POSITION,
ATTR_TILT_POSITION,
DOMAIN,
SUPPORT_SET_POSITION,
SUPPORT_SET_TILT_POSITION,
SUPPORT_STOP,
)
from homeassistant.components.homekit.const import (
ATTR_OBSTRUCTION_DETECTED,
ATTR_VALUE,
CONF_LINKED_OBSTRUCTION_SENSOR,
HK_DOOR_CLOSED,
HK_DOOR_CLOSING,
HK_DOOR_OPEN,
HK_DOOR_OPENING,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES,
EVENT_HOMEASSISTANT_START,
SERVICE_SET_COVER_TILT_POSITION,
STATE_CLOSED,
STATE_CLOSING,
STATE_OFF,
STATE_ON,
STATE_OPEN,
STATE_OPENING,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
)
from homeassistant.core import CoreState
from homeassistant.helpers import entity_registry
from tests.common import async_mock_service
from tests.components.homekit.common import patch_debounce
@pytest.fixture(scope="module")
def cls():
"""Patch debounce decorator during import of type_covers."""
patcher = patch_debounce()
patcher.start()
_import = __import__(
"homeassistant.components.homekit.type_covers",
fromlist=["GarageDoorOpener", "WindowCovering", "WindowCoveringBasic"],
)
patcher_tuple = namedtuple(
"Cls", ["window", "windowcovering", "windowcovering_basic", "garage"]
)
yield patcher_tuple(
window=_import.Window,
windowcovering=_import.WindowCovering,
windowcovering_basic=_import.WindowCoveringBasic,
garage=_import.GarageDoorOpener,
)
patcher.stop()
async def test_garage_door_open_close(hass, hk_driver, cls, events):
"""Test if accessory and HA are updated accordingly."""
entity_id = "cover.garage_door"
hass.states.async_set(entity_id, None)
await hass.async_block_till_done()
acc = cls.garage(hass, hk_driver, "Garage Door", entity_id, 2, None)
await acc.run_handler()
await hass.async_block_till_done()
assert acc.aid == 2
assert acc.category == 4 # GarageDoorOpener
assert acc.char_current_state.value == HK_DOOR_OPEN
assert acc.char_target_state.value == HK_DOOR_OPEN
hass.states.async_set(entity_id, STATE_CLOSED, {ATTR_OBSTRUCTION_DETECTED: False})
await hass.async_block_till_done()
assert acc.char_current_state.value == HK_DOOR_CLOSED
assert acc.char_target_state.value == HK_DOOR_CLOSED
assert acc.char_obstruction_detected.value is False
hass.states.async_set(entity_id, STATE_OPEN, {ATTR_OBSTRUCTION_DETECTED: True})
await hass.async_block_till_done()
assert acc.char_current_state.value == HK_DOOR_OPEN
assert acc.char_target_state.value == HK_DOOR_OPEN
assert acc.char_obstruction_detected.value is True
hass.states.async_set(
entity_id, STATE_UNAVAILABLE, {ATTR_OBSTRUCTION_DETECTED: False}
)
await hass.async_block_till_done()
assert acc.char_current_state.value == HK_DOOR_OPEN
assert acc.char_target_state.value == HK_DOOR_OPEN
assert acc.char_obstruction_detected.value is False
hass.states.async_set(entity_id, STATE_UNKNOWN)
await hass.async_block_till_done()
assert acc.char_current_state.value == HK_DOOR_OPEN
assert acc.char_target_state.value == HK_DOOR_OPEN
# Set from HomeKit
call_close_cover = async_mock_service(hass, DOMAIN, "close_cover")
call_open_cover = async_mock_service(hass, DOMAIN, "open_cover")
await hass.async_add_executor_job(acc.char_target_state.client_update_value, 1)
await hass.async_block_till_done()
assert call_close_cover
assert call_close_cover[0].data[ATTR_ENTITY_ID] == entity_id
assert acc.char_current_state.value == HK_DOOR_CLOSING
assert acc.char_target_state.value == HK_DOOR_CLOSED
assert len(events) == 1
assert events[-1].data[ATTR_VALUE] is None
hass.states.async_set(entity_id, STATE_CLOSED)
await hass.async_block_till_done()
await hass.async_add_executor_job(acc.char_target_state.client_update_value, 1)
await hass.async_block_till_done()
assert acc.char_current_state.value == HK_DOOR_CLOSED
assert acc.char_target_state.value == HK_DOOR_CLOSED
assert len(events) == 2
assert events[-1].data[ATTR_VALUE] is None
await hass.async_add_executor_job(acc.char_target_state.client_update_value, 0)
await hass.async_block_till_done()
assert call_open_cover
assert call_open_cover[0].data[ATTR_ENTITY_ID] == entity_id
assert acc.char_current_state.value == HK_DOOR_OPENING
assert acc.char_target_state.value == HK_DOOR_OPEN
assert len(events) == 3
assert events[-1].data[ATTR_VALUE] is None
hass.states.async_set(entity_id, STATE_OPEN)
await hass.async_block_till_done()
await hass.async_add_executor_job(acc.char_target_state.client_update_value, 0)
await hass.async_block_till_done()
assert acc.char_current_state.value == HK_DOOR_OPEN
assert acc.char_target_state.value == HK_DOOR_OPEN
assert len(events) == 4
assert events[-1].data[ATTR_VALUE] is None
async def test_windowcovering_set_cover_position(hass, hk_driver, cls, events):
"""Test if accessory and HA are updated accordingly."""
entity_id = "cover.window"
hass.states.async_set(entity_id, None)
await hass.async_block_till_done()
acc = cls.windowcovering(hass, hk_driver, "Cover", entity_id, 2, None)
await acc.run_handler()
await hass.async_block_till_done()
assert acc.aid == 2
assert acc.category == 14 # WindowCovering
assert acc.char_current_position.value == 0
assert acc.char_target_position.value == 0
hass.states.async_set(entity_id, STATE_UNKNOWN, {ATTR_CURRENT_POSITION: None})
await hass.async_block_till_done()
assert acc.char_current_position.value == 0
assert acc.char_target_position.value == 0
assert acc.char_position_state.value == 2
hass.states.async_set(entity_id, STATE_OPENING, {ATTR_CURRENT_POSITION: 60})
await hass.async_block_till_done()
assert acc.char_current_position.value == 60
assert acc.char_target_position.value == 60
assert acc.char_position_state.value == 1
hass.states.async_set(entity_id, STATE_OPENING, {ATTR_CURRENT_POSITION: 70.0})
await hass.async_block_till_done()
assert acc.char_current_position.value == 70
assert acc.char_target_position.value == 70
assert acc.char_position_state.value == 1
hass.states.async_set(entity_id, STATE_CLOSING, {ATTR_CURRENT_POSITION: 50})
await hass.async_block_till_done()
assert acc.char_current_position.value == 50
assert acc.char_target_position.value == 50
assert acc.char_position_state.value == 0
hass.states.async_set(entity_id, STATE_OPEN, {ATTR_CURRENT_POSITION: 50})
await hass.async_block_till_done()
assert acc.char_current_position.value == 50
assert acc.char_target_position.value == 50
assert acc.char_position_state.value == 2
# Set from HomeKit
call_set_cover_position = async_mock_service(hass, DOMAIN, "set_cover_position")
await hass.async_add_executor_job(acc.char_target_position.client_update_value, 25)
await hass.async_block_till_done()
assert call_set_cover_position[0]
assert call_set_cover_position[0].data[ATTR_ENTITY_ID] == entity_id
assert call_set_cover_position[0].data[ATTR_POSITION] == 25
assert acc.char_current_position.value == 50
assert acc.char_target_position.value == 25
assert len(events) == 1
assert events[-1].data[ATTR_VALUE] == 25
await hass.async_add_executor_job(acc.char_target_position.client_update_value, 75)
await hass.async_block_till_done()
assert call_set_cover_position[1]
assert call_set_cover_position[1].data[ATTR_ENTITY_ID] == entity_id
assert call_set_cover_position[1].data[ATTR_POSITION] == 75
assert acc.char_current_position.value == 50
assert acc.char_target_position.value == 75
assert len(events) == 2
assert events[-1].data[ATTR_VALUE] == 75
async def test_window_instantiate(hass, hk_driver, cls, events):
"""Test if Window accessory is instantiated correctly."""
entity_id = "cover.window"
hass.states.async_set(entity_id, None)
await hass.async_block_till_done()
acc = cls.window(hass, hk_driver, "Window", entity_id, 2, None)
await acc.run_handler()
await hass.async_block_till_done()
assert acc.aid == 2
assert acc.category == 13 # Window
assert acc.char_current_position.value == 0
assert acc.char_target_position.value == 0
async def test_windowcovering_cover_set_tilt(hass, hk_driver, cls, events):
"""Test if accessory and HA update slat tilt accordingly."""
entity_id = "cover.window"
hass.states.async_set(
entity_id, STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: SUPPORT_SET_TILT_POSITION}
)
await hass.async_block_till_done()
acc = cls.windowcovering(hass, hk_driver, "Cover", entity_id, 2, None)
await acc.run_handler()
await hass.async_block_till_done()
assert acc.aid == 2
assert acc.category == 14 # CATEGORY_WINDOW_COVERING
assert acc.char_current_tilt.value == 0
assert acc.char_target_tilt.value == 0
hass.states.async_set(entity_id, STATE_UNKNOWN, {ATTR_CURRENT_TILT_POSITION: None})
await hass.async_block_till_done()
assert acc.char_current_tilt.value == 0
assert acc.char_target_tilt.value == 0
hass.states.async_set(entity_id, STATE_UNKNOWN, {ATTR_CURRENT_TILT_POSITION: 100})
await hass.async_block_till_done()
assert acc.char_current_tilt.value == 90
assert acc.char_target_tilt.value == 90
hass.states.async_set(entity_id, STATE_UNKNOWN, {ATTR_CURRENT_TILT_POSITION: 50})
await hass.async_block_till_done()
assert acc.char_current_tilt.value == 0
assert acc.char_target_tilt.value == 0
hass.states.async_set(entity_id, STATE_UNKNOWN, {ATTR_CURRENT_TILT_POSITION: 0})
await hass.async_block_till_done()
assert acc.char_current_tilt.value == -90
assert acc.char_target_tilt.value == -90
# set from HomeKit
call_set_tilt_position = async_mock_service(
hass, DOMAIN, SERVICE_SET_COVER_TILT_POSITION
)
# HomeKit sets tilts between -90 and 90 (degrees), whereas
# Homeassistant expects a % between 0 and 100. Keep that in mind
# when comparing
await hass.async_add_executor_job(acc.char_target_tilt.client_update_value, 90)
await hass.async_block_till_done()
assert call_set_tilt_position[0]
assert call_set_tilt_position[0].data[ATTR_ENTITY_ID] == entity_id
assert call_set_tilt_position[0].data[ATTR_TILT_POSITION] == 100
assert acc.char_current_tilt.value == -90
assert acc.char_target_tilt.value == 90
assert len(events) == 1
assert events[-1].data[ATTR_VALUE] == 100
await hass.async_add_executor_job(acc.char_target_tilt.client_update_value, 45)
await hass.async_block_till_done()
assert call_set_tilt_position[1]
assert call_set_tilt_position[1].data[ATTR_ENTITY_ID] == entity_id
assert call_set_tilt_position[1].data[ATTR_TILT_POSITION] == 75
assert acc.char_current_tilt.value == -90
assert acc.char_target_tilt.value == 45
assert len(events) == 2
assert events[-1].data[ATTR_VALUE] == 75
async def test_windowcovering_open_close(hass, hk_driver, cls, events):
"""Test if accessory and HA are updated accordingly."""
entity_id = "cover.window"
hass.states.async_set(entity_id, STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: 0})
acc = cls.windowcovering_basic(hass, hk_driver, "Cover", entity_id, 2, None)
await acc.run_handler()
await hass.async_block_till_done()
assert acc.aid == 2
assert acc.category == 14 # WindowCovering
assert acc.char_current_position.value == 0
assert acc.char_target_position.value == 0
assert acc.char_position_state.value == 2
hass.states.async_set(entity_id, STATE_UNKNOWN)
await hass.async_block_till_done()
assert acc.char_current_position.value == 0
assert acc.char_target_position.value == 0
assert acc.char_position_state.value == 2
hass.states.async_set(entity_id, STATE_OPENING)
await hass.async_block_till_done()
assert acc.char_current_position.value == 0
assert acc.char_target_position.value == 0
assert acc.char_position_state.value == 1
hass.states.async_set(entity_id, STATE_OPEN)
await hass.async_block_till_done()
assert acc.char_current_position.value == 100
assert acc.char_target_position.value == 100
assert acc.char_position_state.value == 2
hass.states.async_set(entity_id, STATE_CLOSING)
await hass.async_block_till_done()
assert acc.char_current_position.value == 100
assert acc.char_target_position.value == 100
assert acc.char_position_state.value == 0
hass.states.async_set(entity_id, STATE_CLOSED)
await hass.async_block_till_done()
assert acc.char_current_position.value == 0
assert acc.char_target_position.value == 0
assert acc.char_position_state.value == 2
# Set from HomeKit
call_close_cover = async_mock_service(hass, DOMAIN, "close_cover")
call_open_cover = async_mock_service(hass, DOMAIN, "open_cover")
await hass.async_add_executor_job(acc.char_target_position.client_update_value, 25)
await hass.async_block_till_done()
assert call_close_cover
assert call_close_cover[0].data[ATTR_ENTITY_ID] == entity_id
assert acc.char_current_position.value == 0
assert acc.char_target_position.value == 0
assert acc.char_position_state.value == 2
assert len(events) == 1
assert events[-1].data[ATTR_VALUE] is None
await hass.async_add_executor_job(acc.char_target_position.client_update_value, 90)
await hass.async_block_till_done()
assert call_open_cover[0]
assert call_open_cover[0].data[ATTR_ENTITY_ID] == entity_id
assert acc.char_current_position.value == 100
assert acc.char_target_position.value == 100
assert acc.char_position_state.value == 2
assert len(events) == 2
assert events[-1].data[ATTR_VALUE] is None
await hass.async_add_executor_job(acc.char_target_position.client_update_value, 55)
await hass.async_block_till_done()
assert call_open_cover[1]
assert call_open_cover[1].data[ATTR_ENTITY_ID] == entity_id
assert acc.char_current_position.value == 100
assert acc.char_target_position.value == 100
assert acc.char_position_state.value == 2
assert len(events) == 3
assert events[-1].data[ATTR_VALUE] is None
async def test_windowcovering_open_close_stop(hass, hk_driver, cls, events):
"""Test if accessory and HA are updated accordingly."""
entity_id = "cover.window"
hass.states.async_set(
entity_id, STATE_UNKNOWN, {ATTR_SUPPORTED_FEATURES: SUPPORT_STOP}
)
acc = cls.windowcovering_basic(hass, hk_driver, "Cover", entity_id, 2, None)
await acc.run_handler()
await hass.async_block_till_done()
# Set from HomeKit
call_close_cover = async_mock_service(hass, DOMAIN, "close_cover")
call_open_cover = async_mock_service(hass, DOMAIN, "open_cover")
call_stop_cover = async_mock_service(hass, DOMAIN, "stop_cover")
await hass.async_add_executor_job(acc.char_target_position.client_update_value, 25)
await hass.async_block_till_done()
assert call_close_cover
assert call_close_cover[0].data[ATTR_ENTITY_ID] == entity_id
assert acc.char_current_position.value == 0
assert acc.char_target_position.value == 0
assert acc.char_position_state.value == 2
assert len(events) == 1
assert events[-1].data[ATTR_VALUE] is None
await hass.async_add_executor_job(acc.char_target_position.client_update_value, 90)
await hass.async_block_till_done()
assert call_open_cover
assert call_open_cover[0].data[ATTR_ENTITY_ID] == entity_id
assert acc.char_current_position.value == 100
assert acc.char_target_position.value == 100
assert acc.char_position_state.value == 2
assert len(events) == 2
assert events[-1].data[ATTR_VALUE] is None
await hass.async_add_executor_job(acc.char_target_position.client_update_value, 55)
await hass.async_block_till_done()
assert call_stop_cover
assert call_stop_cover[0].data[ATTR_ENTITY_ID] == entity_id
assert acc.char_current_position.value == 50
assert acc.char_target_position.value == 50
assert acc.char_position_state.value == 2
assert len(events) == 3
assert events[-1].data[ATTR_VALUE] is None
async def test_windowcovering_open_close_with_position_and_stop(
hass, hk_driver, cls, events
):
"""Test if accessory and HA are updated accordingly."""
entity_id = "cover.stop_window"
hass.states.async_set(
entity_id,
STATE_UNKNOWN,
{ATTR_SUPPORTED_FEATURES: SUPPORT_STOP | SUPPORT_SET_POSITION},
)
acc = cls.windowcovering(hass, hk_driver, "Cover", entity_id, 2, None)
await acc.run_handler()
await hass.async_block_till_done()
# Set from HomeKit
call_stop_cover = async_mock_service(hass, DOMAIN, "stop_cover")
await hass.async_add_executor_job(acc.char_hold_position.client_update_value, 0)
await hass.async_block_till_done()
assert not call_stop_cover
await hass.async_add_executor_job(acc.char_hold_position.client_update_value, 1)
await hass.async_block_till_done()
assert call_stop_cover
assert call_stop_cover[0].data[ATTR_ENTITY_ID] == entity_id
assert acc.char_hold_position.value == 1
assert len(events) == 1
assert events[-1].data[ATTR_VALUE] is None
async def test_windowcovering_basic_restore(hass, hk_driver, cls, events):
"""Test setting up an entity from state in the event registry."""
hass.state = CoreState.not_running
registry = await entity_registry.async_get_registry(hass)
registry.async_get_or_create(
"cover",
"generic",
"1234",
suggested_object_id="simple",
)
registry.async_get_or_create(
"cover",
"generic",
"9012",
suggested_object_id="all_info_set",
capabilities={},
supported_features=SUPPORT_STOP,
device_class="mock-device-class",
)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START, {})
await hass.async_block_till_done()
acc = cls.windowcovering_basic(hass, hk_driver, "Cover", "cover.simple", 2, None)
assert acc.category == 14
assert acc.char_current_position is not None
assert acc.char_target_position is not None
assert acc.char_position_state is not None
acc = cls.windowcovering_basic(
hass, hk_driver, "Cover", "cover.all_info_set", 2, None
)
assert acc.category == 14
assert acc.char_current_position is not None
assert acc.char_target_position is not None
assert acc.char_position_state is not None
async def test_windowcovering_restore(hass, hk_driver, cls, events):
"""Test setting up an entity from state in the event registry."""
hass.state = CoreState.not_running
registry = await entity_registry.async_get_registry(hass)
registry.async_get_or_create(
"cover",
"generic",
"1234",
suggested_object_id="simple",
)
registry.async_get_or_create(
"cover",
"generic",
"9012",
suggested_object_id="all_info_set",
capabilities={},
supported_features=SUPPORT_STOP,
device_class="mock-device-class",
)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START, {})
await hass.async_block_till_done()
acc = cls.windowcovering(hass, hk_driver, "Cover", "cover.simple", 2, None)
assert acc.category == 14
assert acc.char_current_position is not None
assert acc.char_target_position is not None
assert acc.char_position_state is not None
acc = cls.windowcovering(hass, hk_driver, "Cover", "cover.all_info_set", 2, None)
assert acc.category == 14
assert acc.char_current_position is not None
assert acc.char_target_position is not None
assert acc.char_position_state is not None
async def test_garage_door_with_linked_obstruction_sensor(hass, hk_driver, cls, events):
"""Test if accessory and HA are updated accordingly with a linked obstruction sensor."""
linked_obstruction_sensor_entity_id = "binary_sensor.obstruction"
entity_id = "cover.garage_door"
hass.states.async_set(linked_obstruction_sensor_entity_id, STATE_OFF)
hass.states.async_set(entity_id, None)
await hass.async_block_till_done()
acc = cls.garage(
hass,
hk_driver,
"Garage Door",
entity_id,
2,
{CONF_LINKED_OBSTRUCTION_SENSOR: linked_obstruction_sensor_entity_id},
)
await acc.run_handler()
await hass.async_block_till_done()
assert acc.aid == 2
assert acc.category == 4 # GarageDoorOpener
assert acc.char_current_state.value == HK_DOOR_OPEN
assert acc.char_target_state.value == HK_DOOR_OPEN
hass.states.async_set(entity_id, STATE_CLOSED)
await hass.async_block_till_done()
assert acc.char_current_state.value == HK_DOOR_CLOSED
assert acc.char_target_state.value == HK_DOOR_CLOSED
assert acc.char_obstruction_detected.value is False
hass.states.async_set(entity_id, STATE_OPEN)
hass.states.async_set(linked_obstruction_sensor_entity_id, STATE_ON)
await hass.async_block_till_done()
assert acc.char_current_state.value == HK_DOOR_OPEN
assert acc.char_target_state.value == HK_DOOR_OPEN
assert acc.char_obstruction_detected.value is True
hass.states.async_set(entity_id, STATE_CLOSED)
hass.states.async_set(linked_obstruction_sensor_entity_id, STATE_OFF)
await hass.async_block_till_done()
assert acc.char_current_state.value == HK_DOOR_CLOSED
assert acc.char_target_state.value == HK_DOOR_CLOSED
assert acc.char_obstruction_detected.value is False
hass.states.async_remove(entity_id)
hass.states.async_remove(linked_obstruction_sensor_entity_id)
await hass.async_block_till_done()
|
import logging
from pyqwikswitch.qwikswitch import SENSORS
from homeassistant.core import callback
from . import DOMAIN as QWIKSWITCH, QSEntity
_LOGGER = logging.getLogger(__name__)
async def async_setup_platform(hass, _, add_entities, discovery_info=None):
"""Add sensor from the main Qwikswitch component."""
if discovery_info is None:
return
qsusb = hass.data[QWIKSWITCH]
_LOGGER.debug("Setup qwikswitch.sensor %s, %s", qsusb, discovery_info)
devs = [QSSensor(sensor) for sensor in discovery_info[QWIKSWITCH]]
add_entities(devs)
class QSSensor(QSEntity):
"""Sensor based on a Qwikswitch relay/dimmer module."""
_val = None
def __init__(self, sensor):
"""Initialize the sensor."""
super().__init__(sensor["id"], sensor["name"])
self.channel = sensor["channel"]
sensor_type = sensor["type"]
self._decode, self.unit = SENSORS[sensor_type]
# this cannot happen because it only happens in bool and this should be redirected to binary_sensor
assert not isinstance(
self.unit, type
), f"boolean sensor id={sensor['id']} name={sensor['name']}"
@callback
def update_packet(self, packet):
"""Receive update packet from QSUSB."""
val = self._decode(packet, channel=self.channel)
_LOGGER.debug(
"Update %s (%s:%s) decoded as %s: %s",
self.entity_id,
self.qsid,
self.channel,
val,
packet,
)
if val is not None:
self._val = val
self.async_write_ha_state()
@property
def state(self):
"""Return the value of the sensor."""
return str(self._val)
@property
def unique_id(self):
"""Return a unique identifier for this sensor."""
return f"qs{self.qsid}:{self.channel}"
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self.unit
|
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
from django.template.loader import render_to_string
from django.utils.functional import cached_property
from django.utils.translation import gettext_lazy as _
from weblate_language_data.ambiguous import AMBIGUOUS
from weblate_language_data.countries import DEFAULT_LANGS
from weblate.utils.fields import JSONField
ALERTS = {}
ALERTS_IMPORT = set()
def register(cls):
name = cls.__name__
ALERTS[name] = cls
if cls.on_import:
ALERTS_IMPORT.add(name)
return cls
class Alert(models.Model):
component = models.ForeignKey("Component", on_delete=models.deletion.CASCADE)
timestamp = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
name = models.CharField(max_length=150)
dismissed = models.BooleanField(default=False, db_index=True)
details = JSONField(default={})
class Meta:
unique_together = ("component", "name")
verbose_name = "component alert"
verbose_name_plural = "component alerts"
def __str__(self):
return str(self.obj.verbose)
def save(self, *args, **kwargs):
is_new = not self.id
super().save(*args, **kwargs)
if is_new:
from weblate.trans.models import Change
Change.objects.create(
action=Change.ACTION_ALERT,
component=self.component,
alert=self,
details={"alert": self.name},
)
@cached_property
def obj(self):
return ALERTS[self.name](self, **self.details)
def render(self, user):
return self.obj.render(user)
class BaseAlert:
verbose = ""
on_import = False
link_wide = False
dismissable = False
doc_page = ""
doc_anchor = ""
def __init__(self, instance):
self.instance = instance
def get_analysis(self):
return {}
def get_context(self, user):
result = {
"alert": self.instance,
"component": self.instance.component,
"timestamp": self.instance.timestamp,
"details": self.instance.details,
"analysis": self.get_analysis(),
"user": user,
}
result.update(self.instance.details)
return result
def render(self, user):
return render_to_string(
f"trans/alert/{self.__class__.__name__.lower()}.html",
self.get_context(user),
)
class ErrorAlert(BaseAlert):
def __init__(self, instance, error):
super().__init__(instance)
self.error = error
class MultiAlert(BaseAlert):
def __init__(self, instance, occurrences):
super().__init__(instance)
self.occurrences = self.process_occurrences(occurrences)
def get_context(self, user):
result = super().get_context(user)
result["occurrences"] = self.occurrences
return result
def process_occurrences(self, occurrences):
from weblate.lang.models import Language
from weblate.trans.models import Unit
processors = (
("language_code", "language", Language, "code"),
("unit_pk", "unit", Unit, "pk"),
)
for occurrence in occurrences:
for key, target, obj, lookup in processors:
if key not in occurrence:
continue
try:
occurrence[target] = obj.objects.get(**{lookup: occurrence[key]})
except ObjectDoesNotExist:
occurrence[target] = None
return occurrences
@register
class DuplicateString(MultiAlert):
# Translators: Name of an alert
verbose = _("Duplicated string found in the file.")
on_import = True
@register
class DuplicateLanguage(MultiAlert):
# Translators: Name of an alert
verbose = _("Duplicated translation.")
on_import = True
def get_analysis(self):
result = {}
source = self.instance.component.source_language
for occurrence in self.occurrences:
if occurrence["language"] == source:
result["source_language"] = True
codes = {
code.strip().replace("-", "_").lower()
for code in occurrence["codes"].split(",")
}
if codes.intersection(DEFAULT_LANGS):
result["default_country"] = True
return result
@register
class DuplicateFilemask(BaseAlert):
# Translators: Name of an alert
verbose = _("Duplicated filemask.")
link_wide = True
def __init__(self, instance, duplicates):
super().__init__(instance)
self.duplicates = duplicates
@register
class MergeFailure(ErrorAlert):
# Translators: Name of an alert
verbose = _("Could not merge the repository.")
link_wide = True
@register
class UpdateFailure(ErrorAlert):
# Translators: Name of an alert
verbose = _("Could not update the repository.")
link_wide = True
@register
class PushFailure(ErrorAlert):
# Translators: Name of an alert
verbose = _("Could not push the repository.")
link_wide = True
def get_context(self, user):
result = super().get_context(user)
result["terminal"] = "terminal prompts disabled" in result["error"]
return result
@register
class ParseError(MultiAlert):
# Translators: Name of an alert
verbose = _("Could not parse translation files.")
on_import = True
@register
class BillingLimit(BaseAlert):
# Translators: Name of an alert
verbose = _("Your billing plan has exceeded its limits.")
@register
class RepositoryOutdated(BaseAlert):
# Translators: Name of an alert
verbose = _("Repository outdated.")
link_wide = True
@register
class RepositoryChanges(BaseAlert):
# Translators: Name of an alert
verbose = _("Repository has changes.")
link_wide = True
@register
class MissingLicense(BaseAlert):
# Translators: Name of an alert
verbose = _("License info missing.")
@register
class AddonScriptError(MultiAlert):
# Translators: Name of an alert
verbose = _("Could not run addon.")
@register
class CDNAddonError(MultiAlert):
# Translators: Name of an alert
verbose = _("Could not run addon.")
@register
class MsgmergeAddonError(MultiAlert):
# Translators: Name of an alert
verbose = _("Could not run addon.")
@register
class MonolingualTranslation(BaseAlert):
# Translators: Name of an alert
verbose = _("Misconfigured monolingual translation.")
@register
class UnsupportedConfiguration(BaseAlert):
# Translators: Name of an alert
verbose = _("Unsupported component configuration")
def __init__(self, instance, vcs, file_format):
super().__init__(instance)
self.vcs = vcs
self.file_format = file_format
@register
class BrokenBrowserURL(BaseAlert):
# Translators: Name of an alert
verbose = _("Broken repository browser URL")
dismissable = True
def __init__(self, instance, link, error):
super().__init__(instance)
self.link = link
self.error = error
@register
class BrokenProjectURL(BaseAlert):
# Translators: Name of an alert
verbose = _("Broken project website URL")
dismissable = True
def __init__(self, instance, error=None):
super().__init__(instance)
self.error = error
@register
class UnusedScreenshot(BaseAlert):
# Translators: Name of an alert
verbose = _("Unused screenshot")
@register
class AmbiguousLanguage(BaseAlert):
# Translators: Name of an alert
verbose = _("Ambiguous language code.")
dismissable = True
doc_page = "admin/languages"
doc_anchor = "ambiguous-languages"
def get_context(self, user):
result = super().get_context(user)
ambgiuous = self.instance.component.get_ambiguous_translations().values_list(
"language__code", flat=True
)
result["ambiguous"] = {code: AMBIGUOUS[code] for code in ambgiuous}
return result
@register
class NoLibreConditions(BaseAlert):
# Translators: Name of an alert
verbose = _("Does not meet libre hosting conditions.")
|
import logging
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchEntity
from homeassistant.const import ATTR_ATTRIBUTION
import homeassistant.helpers.config_validation as cv
from . import (
ATTR_CREATED_AT,
ATTR_DROPLET_ID,
ATTR_DROPLET_NAME,
ATTR_FEATURES,
ATTR_IPV4_ADDRESS,
ATTR_IPV6_ADDRESS,
ATTR_MEMORY,
ATTR_REGION,
ATTR_VCPUS,
ATTRIBUTION,
CONF_DROPLETS,
DATA_DIGITAL_OCEAN,
)
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "Droplet"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_DROPLETS): vol.All(cv.ensure_list, [cv.string])}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Digital Ocean droplet switch."""
digital = hass.data.get(DATA_DIGITAL_OCEAN)
if not digital:
return False
droplets = config[CONF_DROPLETS]
dev = []
for droplet in droplets:
droplet_id = digital.get_droplet_id(droplet)
if droplet_id is None:
_LOGGER.error("Droplet %s is not available", droplet)
return False
dev.append(DigitalOceanSwitch(digital, droplet_id))
add_entities(dev, True)
class DigitalOceanSwitch(SwitchEntity):
"""Representation of a Digital Ocean droplet switch."""
def __init__(self, do, droplet_id):
"""Initialize a new Digital Ocean sensor."""
self._digital_ocean = do
self._droplet_id = droplet_id
self.data = None
self._state = None
@property
def name(self):
"""Return the name of the switch."""
return self.data.name
@property
def is_on(self):
"""Return true if switch is on."""
return self.data.status == "active"
@property
def device_state_attributes(self):
"""Return the state attributes of the Digital Ocean droplet."""
return {
ATTR_ATTRIBUTION: ATTRIBUTION,
ATTR_CREATED_AT: self.data.created_at,
ATTR_DROPLET_ID: self.data.id,
ATTR_DROPLET_NAME: self.data.name,
ATTR_FEATURES: self.data.features,
ATTR_IPV4_ADDRESS: self.data.ip_address,
ATTR_IPV6_ADDRESS: self.data.ip_v6_address,
ATTR_MEMORY: self.data.memory,
ATTR_REGION: self.data.region["name"],
ATTR_VCPUS: self.data.vcpus,
}
def turn_on(self, **kwargs):
"""Boot-up the droplet."""
if self.data.status != "active":
self.data.power_on()
def turn_off(self, **kwargs):
"""Shutdown the droplet."""
if self.data.status == "active":
self.data.power_off()
def update(self):
"""Get the latest data from the device and update the data."""
self._digital_ocean.update()
for droplet in self._digital_ocean.data:
if droplet.id == self._droplet_id:
self.data = droplet
|
import re
from unittest import TestCase
import pandas as pd
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.linear_model import PassiveAggressiveClassifier
from scattertext import CorpusFromParsedDocuments
from scattertext import FeatsFromSpacyDoc, FeatsFromSpacyDocAndEmpath
from scattertext import TermDocMatrixFactory
from scattertext.TermDocMatrixFactory import FeatsFromDoc
from scattertext.WhitespaceNLP import whitespace_nlp, Doc, Tok
def build_hamlet_jz_term_doc_mat():
# type: () -> TermDocMatrix
categories, documents = get_docs_categories()
clean_function = lambda text: '' if text.startswith('[') else text
term_doc_mat = TermDocMatrixFactory(
category_text_iter=zip(categories, documents),
clean_function=clean_function,
nlp=whitespace_nlp
).build()
return term_doc_mat
def build_hamlet_jz_corpus():
# type: () -> Corpus
df = build_hamlet_jz_df()
return CorpusFromParsedDocuments(
df=df,
category_col='category',
parsed_col='parsed'
).build()
def build_hamlet_jz_df():
# type: () -> pd.DataFrame
categories, documents = get_docs_categories()
clean_function = lambda text: '' if text.startswith('[') else text
df = pd.DataFrame({
'category': categories,
'parsed': [whitespace_nlp(clean_function(doc)) for doc in documents]
})
df = df[df['parsed'].apply(lambda x: len(str(x).strip()) > 0)]
return df
def build_hamlet_jz_corpus_with_alt_text():
# type: () -> Corpus
df = build_hamlet_jz_df_with_alt_text()
return CorpusFromParsedDocuments(
df=df,
category_col='category',
parsed_col='parsed'
).build()
def build_hamlet_jz_df_with_alt_text():
# type: () -> pd.DataFrame
categories, documents = get_docs_categories()
clean_function = lambda text: '' if text.startswith('[') else text
df = pd.DataFrame({
'category': categories,
'parsed': [whitespace_nlp(clean_function(doc)) for doc in documents],
'alt': [doc.upper() for doc in documents]
})
df = df[df['parsed'].apply(lambda x: len(str(x).strip()) > 0)]
return df
def build_hamlet_jz_corpus_with_meta():
# type: () -> Corpus
def empath_mock(doc, **kwargs):
toks = list(doc)
num_toks = min(3,len(toks))
return {'cat'+str(len(tok)):val for val,tok in enumerate(toks[:num_toks])}
categories, documents = get_docs_categories()
clean_function = lambda text: '' if text.startswith('[') else text
df = pd.DataFrame({
'category': categories,
'parsed': [whitespace_nlp(clean_function(doc)) for doc in documents]
})
df = df[df['parsed'].apply(lambda x: len(str(x).strip()) > 0)]
return CorpusFromParsedDocuments(
df=df,
category_col='category',
parsed_col='parsed',
feats_from_spacy_doc=FeatsFromSpacyDocAndEmpath(empath_analyze_function=empath_mock)
).build()
def get_docs_categories():
documents = [u"What art thou that usurp'st this time of night,",
u'Together with that fair and warlike form',
u'In which the majesty of buried Denmark',
u'Did sometimes march? by heaven I charge thee, speak!',
u'Halt! Who goes there?',
u'[Intro]',
u'It is I sire Tone from Brooklyn.',
u'Well, speak up man what is it?',
u'News from the East sire! THE BEST OF BOTH WORLDS HAS RETURNED!'
]
categories = ['hamlet'] * 4 + ['jay-z/r. kelly'] * 5
return categories, documents
def _testing_nlp(doc):
toks = []
for tok in re.split(r"(\W)", doc):
pos = 'WORD'
ent = ''
tag = ''
if tok.strip() == '':
pos = 'SPACE'
elif re.match('^\W+$', tok):
pos = 'PUNCT'
if tok == 'Tone':
ent = 'PERSON'
if tok == 'Brooklyn':
ent = 'GPE'
toks.append(Tok(pos, tok[:2].lower(), tok.lower(), ent, tag))
return Doc([toks])
class TestTermDocMatrixFactory(TestCase):
def test_build(self):
term_doc_mat = build_hamlet_jz_term_doc_mat()
self.assertEqual(term_doc_mat.get_num_docs(), 8)
self.assertEqual(term_doc_mat.get_categories(), ['hamlet', 'jay-z/r. kelly'])
def test_build_censor_entities(self):
categories, documents = get_docs_categories()
clean_function = lambda text: '' if text.startswith('[') else text
term_doc_mat = (
TermDocMatrixFactory(
category_text_iter=zip(categories, documents),
clean_function=clean_function,
nlp=_testing_nlp,
feats_from_spacy_doc=FeatsFromSpacyDoc(entity_types_to_censor=set(['GPE']))
).build()
)
self.assertIn('_GPE', set(term_doc_mat.get_term_freq_df().index))
self.assertNotIn('brooklyn', set(term_doc_mat.get_term_freq_df().index))
class TestFeatsFromDoc(TestCase):
def test_main(self):
categories, documents = get_docs_categories()
clean_function = lambda text: '' if text.startswith('[') else text
entity_types = set(['GPE'])
term_doc_mat = (
TermDocMatrixFactory(
category_text_iter=zip(categories, documents),
clean_function=clean_function,
nlp=_testing_nlp,
feats_from_spacy_doc=FeatsFromSpacyDoc(entity_types_to_censor=entity_types)
).build()
)
clf = PassiveAggressiveClassifier()
fdc = FeatsFromDoc(term_doc_mat._term_idx_store,
clean_function=clean_function,
feats_from_spacy_doc=FeatsFromSpacyDoc(
entity_types_to_censor=entity_types)).set_nlp(_testing_nlp)
tfidf = TfidfTransformer(norm='l1')
X = tfidf.fit_transform(term_doc_mat._X)
clf.fit(X, term_doc_mat._y)
X_to_predict = fdc.feats_from_doc('Did sometimes march UNKNOWNWORD')
pred = clf.predict(tfidf.transform(X_to_predict))
dec = clf.decision_function(X_to_predict)
|
import pytest
from homeassistant.helpers import frame
from tests.async_mock import Mock, patch
async def test_extract_frame_integration(caplog):
"""Test extracting the current frame from integration context."""
correct_frame = Mock(
filename="/home/paulus/homeassistant/components/hue/light.py",
lineno="23",
line="self.light.is_on",
)
with patch(
"homeassistant.helpers.frame.extract_stack",
return_value=[
Mock(
filename="/home/paulus/homeassistant/core.py",
lineno="23",
line="do_something()",
),
correct_frame,
Mock(
filename="/home/paulus/aiohue/lights.py",
lineno="2",
line="something()",
),
],
):
found_frame, integration, path = frame.get_integration_frame()
assert integration == "hue"
assert path == "homeassistant/components/"
assert found_frame == correct_frame
async def test_extract_frame_integration_with_excluded_intergration(caplog):
"""Test extracting the current frame from integration context."""
correct_frame = Mock(
filename="/home/dev/homeassistant/components/mdns/light.py",
lineno="23",
line="self.light.is_on",
)
with patch(
"homeassistant.helpers.frame.extract_stack",
return_value=[
Mock(
filename="/home/dev/homeassistant/core.py",
lineno="23",
line="do_something()",
),
correct_frame,
Mock(
filename="/home/dev/homeassistant/components/zeroconf/usage.py",
lineno="23",
line="self.light.is_on",
),
Mock(
filename="/home/dev/mdns/lights.py",
lineno="2",
line="something()",
),
],
):
found_frame, integration, path = frame.get_integration_frame(
exclude_integrations={"zeroconf"}
)
assert integration == "mdns"
assert path == "homeassistant/components/"
assert found_frame == correct_frame
async def test_extract_frame_no_integration(caplog):
"""Test extracting the current frame without integration context."""
with patch(
"homeassistant.helpers.frame.extract_stack",
return_value=[
Mock(
filename="/home/paulus/homeassistant/core.py",
lineno="23",
line="do_something()",
),
Mock(
filename="/home/paulus/aiohue/lights.py",
lineno="2",
line="something()",
),
],
), pytest.raises(frame.MissingIntegrationFrame):
frame.get_integration_frame()
|
import logging
import dbus
import dbus.exceptions
DBUS_SCREENSAVER_INTERFACES = (
'org.cinnamon.ScreenSaver',
'org.freedesktop.ScreenSaver',
'org.gnome.ScreenSaver',
'org.mate.ScreenSaver',
'org.xfce.ScreenSaver',
)
class ScreensaverMonitor(object):
"""
Simple class for monitoring signals on the Session Bus
"""
def __init__(self, parent):
self._logger = logging.getLogger('razer.screensaver')
self._logger.info("Initialising DBus Screensaver Monitor")
self._parent = parent
self._monitoring = True
self._active = None
self._dbus_instances = []
# Get session bus
bus = dbus.SessionBus()
# Loop through and monitor the signals
for screensaver_interface in DBUS_SCREENSAVER_INTERFACES:
bus.add_signal_receiver(self.signal_callback, dbus_interface=screensaver_interface, signal_name='ActiveChanged')
@property
def monitoring(self):
"""
Monitoring property, if true then suspend/resume will be actioned.
:return: If monitoring
:rtype: bool
"""
return self._monitoring
@monitoring.setter
def monitoring(self, value):
"""
Monitoring property setter, if true then suspend/resume will be actioned.
:param value: If monitoring
:type: bool
"""
self._monitoring = bool(value)
def suspend(self):
"""
Suspend the device
"""
self._logger.debug("Received screensaver active signal")
self._parent.suspend_devices()
def resume(self):
"""
Resume the device
"""
self._logger.debug("Received screensaver inactive signal")
self._parent.resume_devices()
def signal_callback(self, active):
"""
Called by DBus when a signal is found
:param active: If the screensaver is active
:type active: dbus.Boolean
"""
active = bool(active)
if self.monitoring:
if active:
# Only trigger once per state change
if self._active is None or not self._active:
self._active = active
self.suspend()
else:
if self._active is None or self._active:
self._active = active
self.resume()
|
import pytest
from jinja2.environment import Environment
from jinja2.exceptions import TemplateNotFound
from jinja2.exceptions import TemplatesNotFound
from jinja2.exceptions import TemplateSyntaxError
from jinja2.exceptions import UndefinedError
from jinja2.loaders import DictLoader
@pytest.fixture
def test_env():
env = Environment(
loader=DictLoader(
dict(
module="{% macro test() %}[{{ foo }}|{{ bar }}]{% endmacro %}",
header="[{{ foo }}|{{ 23 }}]",
o_printer="({{ o }})",
)
)
)
env.globals["bar"] = 23
return env
class TestImports:
def test_context_imports(self, test_env):
t = test_env.from_string('{% import "module" as m %}{{ m.test() }}')
assert t.render(foo=42) == "[|23]"
t = test_env.from_string(
'{% import "module" as m without context %}{{ m.test() }}'
)
assert t.render(foo=42) == "[|23]"
t = test_env.from_string(
'{% import "module" as m with context %}{{ m.test() }}'
)
assert t.render(foo=42) == "[42|23]"
t = test_env.from_string('{% from "module" import test %}{{ test() }}')
assert t.render(foo=42) == "[|23]"
t = test_env.from_string(
'{% from "module" import test without context %}{{ test() }}'
)
assert t.render(foo=42) == "[|23]"
t = test_env.from_string(
'{% from "module" import test with context %}{{ test() }}'
)
assert t.render(foo=42) == "[42|23]"
def test_import_needs_name(self, test_env):
test_env.from_string('{% from "foo" import bar %}')
test_env.from_string('{% from "foo" import bar, baz %}')
with pytest.raises(TemplateSyntaxError):
test_env.from_string('{% from "foo" import %}')
def test_no_trailing_comma(self, test_env):
with pytest.raises(TemplateSyntaxError):
test_env.from_string('{% from "foo" import bar, %}')
with pytest.raises(TemplateSyntaxError):
test_env.from_string('{% from "foo" import bar,, %}')
with pytest.raises(TemplateSyntaxError):
test_env.from_string('{% from "foo" import, %}')
def test_trailing_comma_with_context(self, test_env):
test_env.from_string('{% from "foo" import bar, baz with context %}')
test_env.from_string('{% from "foo" import bar, baz, with context %}')
test_env.from_string('{% from "foo" import bar, with context %}')
test_env.from_string('{% from "foo" import bar, with, context %}')
test_env.from_string('{% from "foo" import bar, with with context %}')
with pytest.raises(TemplateSyntaxError):
test_env.from_string('{% from "foo" import bar,, with context %}')
with pytest.raises(TemplateSyntaxError):
test_env.from_string('{% from "foo" import bar with context, %}')
def test_exports(self, test_env):
m = test_env.from_string(
"""
{% macro toplevel() %}...{% endmacro %}
{% macro __private() %}...{% endmacro %}
{% set variable = 42 %}
{% for item in [1] %}
{% macro notthere() %}{% endmacro %}
{% endfor %}
"""
).module
assert m.toplevel() == "..."
assert not hasattr(m, "__missing")
assert m.variable == 42
assert not hasattr(m, "notthere")
def test_not_exported(self, test_env):
t = test_env.from_string("{% from 'module' import nothing %}{{ nothing() }}")
with pytest.raises(UndefinedError, match="does not export the requested name"):
t.render()
class TestIncludes:
def test_context_include(self, test_env):
t = test_env.from_string('{% include "header" %}')
assert t.render(foo=42) == "[42|23]"
t = test_env.from_string('{% include "header" with context %}')
assert t.render(foo=42) == "[42|23]"
t = test_env.from_string('{% include "header" without context %}')
assert t.render(foo=42) == "[|23]"
def test_choice_includes(self, test_env):
t = test_env.from_string('{% include ["missing", "header"] %}')
assert t.render(foo=42) == "[42|23]"
t = test_env.from_string('{% include ["missing", "missing2"] ignore missing %}')
assert t.render(foo=42) == ""
t = test_env.from_string('{% include ["missing", "missing2"] %}')
pytest.raises(TemplateNotFound, t.render)
with pytest.raises(TemplatesNotFound) as e:
t.render()
assert e.value.templates == ["missing", "missing2"]
assert e.value.name == "missing2"
def test_includes(t, **ctx):
ctx["foo"] = 42
assert t.render(ctx) == "[42|23]"
t = test_env.from_string('{% include ["missing", "header"] %}')
test_includes(t)
t = test_env.from_string("{% include x %}")
test_includes(t, x=["missing", "header"])
t = test_env.from_string('{% include [x, "header"] %}')
test_includes(t, x="missing")
t = test_env.from_string("{% include x %}")
test_includes(t, x="header")
t = test_env.from_string("{% include [x] %}")
test_includes(t, x="header")
def test_include_ignoring_missing(self, test_env):
t = test_env.from_string('{% include "missing" %}')
pytest.raises(TemplateNotFound, t.render)
for extra in "", "with context", "without context":
t = test_env.from_string(
'{% include "missing" ignore missing ' + extra + " %}"
)
assert t.render() == ""
def test_context_include_with_overrides(self, test_env):
env = Environment(
loader=DictLoader(
dict(
main="{% for item in [1, 2, 3] %}{% include 'item' %}{% endfor %}",
item="{{ item }}",
)
)
)
assert env.get_template("main").render() == "123"
def test_unoptimized_scopes(self, test_env):
t = test_env.from_string(
"""
{% macro outer(o) %}
{% macro inner() %}
{% include "o_printer" %}
{% endmacro %}
{{ inner() }}
{% endmacro %}
{{ outer("FOO") }}
"""
)
assert t.render().strip() == "(FOO)"
def test_import_from_with_context(self):
env = Environment(
loader=DictLoader({"a": "{% macro x() %}{{ foobar }}{% endmacro %}"})
)
t = env.from_string(
"{% set foobar = 42 %}{% from 'a' import x with context %}{{ x() }}"
)
assert t.render() == "42"
|
import logging
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
PLATFORM_SCHEMA,
SUPPORT_BRIGHTNESS,
LightEntity,
)
from homeassistant.const import CONF_NAME, CONF_TYPE
import homeassistant.helpers.config_validation as cv
from . import (
CONF_ALIASES,
CONF_AUTOMATIC_ADD,
CONF_DEVICE_DEFAULTS,
CONF_DEVICES,
CONF_FIRE_EVENT,
CONF_GROUP,
CONF_GROUP_ALIASES,
CONF_NOGROUP_ALIASES,
CONF_SIGNAL_REPETITIONS,
DATA_DEVICE_REGISTER,
DEVICE_DEFAULTS_SCHEMA,
EVENT_KEY_COMMAND,
EVENT_KEY_ID,
SwitchableRflinkDevice,
)
_LOGGER = logging.getLogger(__name__)
PARALLEL_UPDATES = 0
TYPE_DIMMABLE = "dimmable"
TYPE_SWITCHABLE = "switchable"
TYPE_HYBRID = "hybrid"
TYPE_TOGGLE = "toggle"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(
CONF_DEVICE_DEFAULTS, default=DEVICE_DEFAULTS_SCHEMA({})
): DEVICE_DEFAULTS_SCHEMA,
vol.Optional(CONF_AUTOMATIC_ADD, default=True): cv.boolean,
vol.Optional(CONF_DEVICES, default={}): {
cv.string: vol.Schema(
{
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_TYPE): vol.Any(
TYPE_DIMMABLE, TYPE_SWITCHABLE, TYPE_HYBRID, TYPE_TOGGLE
),
vol.Optional(CONF_ALIASES, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_GROUP_ALIASES, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_NOGROUP_ALIASES, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_FIRE_EVENT): cv.boolean,
vol.Optional(CONF_SIGNAL_REPETITIONS): vol.Coerce(int),
vol.Optional(CONF_GROUP, default=True): cv.boolean,
}
)
},
},
extra=vol.ALLOW_EXTRA,
)
def entity_type_for_device_id(device_id):
"""Return entity class for protocol of a given device_id.
Async friendly.
"""
entity_type_mapping = {
# KlikAanKlikUit support both dimmers and on/off switches on the same
# protocol
"newkaku": TYPE_HYBRID
}
protocol = device_id.split("_")[0]
return entity_type_mapping.get(protocol)
def entity_class_for_type(entity_type):
"""Translate entity type to entity class.
Async friendly.
"""
entity_device_mapping = {
# sends only 'dim' commands not compatible with on/off switches
TYPE_DIMMABLE: DimmableRflinkLight,
# sends only 'on/off' commands not advices with dimmers and signal
# repetition
TYPE_SWITCHABLE: RflinkLight,
# sends 'dim' and 'on' command to support both dimmers and on/off
# switches. Not compatible with signal repetition.
TYPE_HYBRID: HybridRflinkLight,
# sends only 'on' commands for switches which turn on and off
# using the same 'on' command for both.
TYPE_TOGGLE: ToggleRflinkLight,
}
return entity_device_mapping.get(entity_type, RflinkLight)
def devices_from_config(domain_config):
"""Parse configuration and add Rflink light devices."""
devices = []
for device_id, config in domain_config[CONF_DEVICES].items():
# Determine which kind of entity to create
if CONF_TYPE in config:
# Remove type from config to not pass it as and argument to entity
# instantiation
entity_type = config.pop(CONF_TYPE)
else:
entity_type = entity_type_for_device_id(device_id)
entity_class = entity_class_for_type(entity_type)
device_config = dict(domain_config[CONF_DEVICE_DEFAULTS], **config)
is_hybrid = entity_class is HybridRflinkLight
# Make user aware this can cause problems
repetitions_enabled = device_config[CONF_SIGNAL_REPETITIONS] != 1
if is_hybrid and repetitions_enabled:
_LOGGER.warning(
"Hybrid type for %s not compatible with signal "
"repetitions. Please set 'dimmable' or 'switchable' "
"type explicitly in configuration",
device_id,
)
device = entity_class(device_id, **device_config)
devices.append(device)
return devices
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Rflink light platform."""
async_add_entities(devices_from_config(config))
async def add_new_device(event):
"""Check if device is known, otherwise add to list of known devices."""
device_id = event[EVENT_KEY_ID]
entity_type = entity_type_for_device_id(event[EVENT_KEY_ID])
entity_class = entity_class_for_type(entity_type)
device_config = config[CONF_DEVICE_DEFAULTS]
device = entity_class(device_id, initial_event=event, **device_config)
async_add_entities([device])
if config[CONF_AUTOMATIC_ADD]:
hass.data[DATA_DEVICE_REGISTER][EVENT_KEY_COMMAND] = add_new_device
class RflinkLight(SwitchableRflinkDevice, LightEntity):
"""Representation of a Rflink light."""
class DimmableRflinkLight(SwitchableRflinkDevice, LightEntity):
"""Rflink light device that support dimming."""
_brightness = 255
async def async_added_to_hass(self):
"""Restore RFLink light brightness attribute."""
await super().async_added_to_hass()
old_state = await self.async_get_last_state()
if (
old_state is not None
and old_state.attributes.get(ATTR_BRIGHTNESS) is not None
):
# restore also brightness in dimmables devices
self._brightness = int(old_state.attributes[ATTR_BRIGHTNESS])
async def async_turn_on(self, **kwargs):
"""Turn the device on."""
if ATTR_BRIGHTNESS in kwargs:
# rflink only support 16 brightness levels
self._brightness = int(kwargs[ATTR_BRIGHTNESS] / 17) * 17
# Turn on light at the requested dim level
await self._async_handle_command("dim", self._brightness)
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return self._brightness
@property
def device_state_attributes(self):
"""Return the device state attributes."""
if self._brightness is None:
return {}
return {ATTR_BRIGHTNESS: self._brightness}
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_BRIGHTNESS
class HybridRflinkLight(SwitchableRflinkDevice, LightEntity):
"""Rflink light device that sends out both dim and on/off commands.
Used for protocols which support lights that are not exclusively on/off
style. For example KlikAanKlikUit supports both on/off and dimmable light
switches using the same protocol. This type allows unconfigured
KlikAanKlikUit devices to support dimming without breaking support for
on/off switches.
This type is not compatible with signal repetitions as the 'dim' and 'on'
command are send sequential and multiple 'on' commands to a dimmable
device can cause the dimmer to switch into a pulsating brightness mode.
Which results in a nice house disco :)
"""
_brightness = 255
async def async_added_to_hass(self):
"""Restore RFLink light brightness attribute."""
await super().async_added_to_hass()
old_state = await self.async_get_last_state()
if (
old_state is not None
and old_state.attributes.get(ATTR_BRIGHTNESS) is not None
):
# restore also brightness in dimmables devices
self._brightness = int(old_state.attributes[ATTR_BRIGHTNESS])
async def async_turn_on(self, **kwargs):
"""Turn the device on and set dim level."""
if ATTR_BRIGHTNESS in kwargs:
# rflink only support 16 brightness levels
self._brightness = int(kwargs[ATTR_BRIGHTNESS] / 17) * 17
# if receiver supports dimming this will turn on the light
# at the requested dim level
await self._async_handle_command("dim", self._brightness)
# if the receiving device does not support dimlevel this
# will ensure it is turned on when full brightness is set
if self._brightness == 255:
await self._async_handle_command("turn_on")
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return self._brightness
@property
def device_state_attributes(self):
"""Return the device state attributes."""
if self._brightness is None:
return {}
return {ATTR_BRIGHTNESS: self._brightness}
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_BRIGHTNESS
class ToggleRflinkLight(SwitchableRflinkDevice, LightEntity):
"""Rflink light device which sends out only 'on' commands.
Some switches like for example Livolo light switches use the
same 'on' command to switch on and switch off the lights.
If the light is on and 'on' gets sent, the light will turn off
and if the light is off and 'on' gets sent, the light will turn on.
"""
def _handle_event(self, event):
"""Adjust state if Rflink picks up a remote command for this device."""
self.cancel_queued_send_commands()
command = event["command"]
if command == "on":
# if the state is unknown or false, it gets set as true
# if the state is true, it gets set as false
self._state = self._state in [None, False]
async def async_turn_on(self, **kwargs):
"""Turn the device on."""
await self._async_handle_command("toggle")
async def async_turn_off(self, **kwargs):
"""Turn the device off."""
await self._async_handle_command("toggle")
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.