text
stringlengths 213
32.3k
|
---|
from django.contrib.sitemaps import Sitemap
from django.urls import reverse
from weblate.trans.models import Change, Component, Project, Translation
from weblate.utils.stats import prefetch_stats
class PagesSitemap(Sitemap):
def items(self):
return (
("/", 1.0, "daily"),
("/about/", 0.4, "weekly"),
("/keys/", 0.4, "weekly"),
)
def location(self, obj):
return obj[0]
def lastmod(self, item):
try:
return Change.objects.values_list("timestamp", flat=True).order()[0]
except Change.DoesNotExist:
return None
def priority(self, item):
return item[1]
def changefreq(self, item):
return item[2]
class WeblateSitemap(Sitemap):
priority = 0.0
changefreq = None
def items(self):
raise NotImplementedError()
def lastmod(self, item):
return item.stats.last_changed
class ProjectSitemap(WeblateSitemap):
priority = 0.8
def items(self):
return prefetch_stats(
Project.objects.filter(access_control__lt=Project.ACCESS_PRIVATE).order_by(
"id"
)
)
class ComponentSitemap(WeblateSitemap):
priority = 0.6
def items(self):
return prefetch_stats(
Component.objects.prefetch_related("project")
.filter(project__access_control__lt=Project.ACCESS_PRIVATE)
.order_by("id")
)
class TranslationSitemap(WeblateSitemap):
priority = 0.2
def items(self):
return prefetch_stats(
Translation.objects.prefetch_related(
"component",
"component__project",
"language",
)
.filter(component__project__access_control__lt=Project.ACCESS_PRIVATE)
.order_by("id")
)
class EngageSitemap(ProjectSitemap):
"""Wrapper around ProjectSitemap to point to engage page."""
priority = 1.0
def location(self, obj):
return reverse("engage", kwargs={"project": obj.slug})
class EngageLangSitemap(Sitemap):
"""Wrapper to generate sitemap for all per language engage pages."""
priority = 0.9
def items(self):
"""Return list of existing project, language tuples."""
ret = []
projects = Project.objects.filter(
access_control__lt=Project.ACCESS_PRIVATE
).order_by("id")
for project in projects:
for lang in project.languages:
ret.append((project, lang))
return ret
def location(self, obj):
return reverse("engage", kwargs={"project": obj[0].slug, "lang": obj[1].code})
SITEMAPS = {
"project": ProjectSitemap(),
"engage": EngageSitemap(),
"engagelang": EngageLangSitemap(),
"component": ComponentSitemap(),
"translation": TranslationSitemap(),
"pages": PagesSitemap(),
}
|
from typing import Dict, Sequence, Union
from gi.repository import Gtk
VIEW_ACCELERATORS: Dict[str, Union[str, Sequence[str]]] = {
'app.quit': '<Primary>Q',
'view.find': '<Primary>F',
'view.find-next': '<Primary>G',
'view.find-previous': '<Primary><Shift>G',
'view.find-replace': '<Primary>H',
'view.go-to-line': '<Primary>I',
# Overridden in CSS
'view.next-change': ('<Alt>Down', '<Alt>KP_Down', '<Primary>D'),
'view.next-pane': '<Alt>Page_Down',
# Overridden in CSS
'view.previous-change': ('<Alt>Up', '<Alt>KP_Up', '<Primary>E'),
'view.previous-pane': '<Alt>Page_Up',
'view.redo': '<Primary><Shift>Z',
'view.refresh': ('<control>R', 'F5'),
'view.save': '<Primary>S',
'view.save-all': '<Primary><Shift>L',
'view.save-as': '<Primary><Shift>S',
'view.undo': '<Primary>Z',
'win.close': '<Primary>W',
'win.new-tab': '<Primary>N',
'win.stop': 'Escape',
# File comparison actions
'view.file-previous-conflict': '<Primary>I',
'view.file-next-conflict': '<Primary>K',
'view.file-push-left': '<Alt>Left',
'view.file-push-right': '<Alt>Right',
'view.file-pull-left': '<Alt><shift>Right',
'view.file-pull-right': '<Alt><shift>Left',
'view.file-copy-left-up': '<Alt>bracketleft',
'view.file-copy-right-up': '<Alt>bracketright',
'view.file-copy-left-down': '<Alt>semicolon',
'view.file-copy-right-down': '<Alt>quoteright',
'view.file-delete': ('<Alt>Delete', '<Alt>KP_Delete'),
'view.show-overview-map': 'F9',
# Folder comparison actions
'view.folder-compare': 'Return',
'view.folder-copy-left': '<Alt>Left',
'view.folder-copy-right': '<Alt>Right',
'view.folder-delete': 'Delete',
# Version control actions
'view.vc-commit': '<Primary>M',
'view.vc-console-visible': 'F9',
}
def register_accels(app: Gtk.Application):
for name, accel in VIEW_ACCELERATORS.items():
accel = accel if isinstance(accel, tuple) else (accel,)
app.set_accels_for_action(name, accel)
|
class Neuron(object):
"""
This Class is representing a Neuron which is corresponding to an action to perform.
.. note:: Neurons are defined in the brain file
"""
def __init__(self, name=None, parameters=dict()):
self.name = name
self.parameters = parameters
def serialize(self):
"""
This method allows to serialize in a proper way this object
:return: A dict of name and parameters
:rtype: Dict
"""
return {
'name': self.name,
'parameters': self.parameters
}
def __str__(self):
"""
Return a string that describe the neuron. If a parameter contains the word "password",
the output of this parameter will be masked in order to not appears in clean in the console
:return: string description of the neuron
"""
returned_dict = {
'name': self.name,
'parameters': self.parameters
}
cleaned_parameters = dict()
for key, value in self.parameters.items():
if "password" in key:
cleaned_parameters[key] = "*****"
else:
cleaned_parameters[key] = value
returned_dict["parameters"] = cleaned_parameters
return str(returned_dict)
def __eq__(self, other):
"""
This is used to compare 2 objects
:param other:
:return:
"""
return self.__dict__ == other.__dict__
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
from pyspark.sql import SparkSession
def main():
spark = (SparkSession.builder
.appName('Setup Spark table')
.enableHiveSupport()
.getOrCreate())
table = 'warehouse'
table_dir = sys.argv[1]
# clean up previous table
spark.sql('drop table if exists ' + table)
# register new table
spark.catalog.createTable(table, table_dir, source='parquet')
if __name__ == '__main__':
main()
|
from weblate.auth.models import create_groups, setup_project_groups
from weblate.trans.models import Project
from weblate.utils.management.base import BaseCommand
class Command(BaseCommand):
help = "setups default user groups"
def add_arguments(self, parser):
parser.add_argument(
"--no-privs-update",
action="store_false",
dest="update",
default=True,
help="Prevents updates of privileges of existing groups",
)
parser.add_argument(
"--no-projects-update",
action="store_false",
dest="projects",
default=True,
help="Prevents updates of groups for existing projects",
)
def handle(self, *args, **options):
"""Create or update default set of groups.
It also optionally updates them and moves users around to default group.
"""
create_groups(options["update"])
if options["projects"]:
for project in Project.objects.iterator():
setup_project_groups(Project, project)
|
from typing import Callable
from homeassistant.components.cover import (
DEVICE_CLASS_SHUTTER,
DEVICE_CLASSES,
CoverEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from .dynalitebase import DynaliteBase, async_setup_entry_base
DEFAULT_COVER_CLASS = DEVICE_CLASS_SHUTTER
async def async_setup_entry(
hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: Callable
) -> None:
"""Record the async_add_entities function to add them later when received from Dynalite."""
@callback
def cover_from_device(device, bridge):
if device.has_tilt:
return DynaliteCoverWithTilt(device, bridge)
return DynaliteCover(device, bridge)
async_setup_entry_base(
hass, config_entry, async_add_entities, "cover", cover_from_device
)
class DynaliteCover(DynaliteBase, CoverEntity):
"""Representation of a Dynalite Channel as a Home Assistant Cover."""
@property
def device_class(self) -> str:
"""Return the class of the device."""
dev_cls = self._device.device_class
ret_val = DEFAULT_COVER_CLASS
if dev_cls in DEVICE_CLASSES:
ret_val = dev_cls
return ret_val
@property
def current_cover_position(self) -> int:
"""Return the position of the cover from 0 to 100."""
return self._device.current_cover_position
@property
def is_opening(self) -> bool:
"""Return true if cover is opening."""
return self._device.is_opening
@property
def is_closing(self) -> bool:
"""Return true if cover is closing."""
return self._device.is_closing
@property
def is_closed(self) -> bool:
"""Return true if cover is closed."""
return self._device.is_closed
async def async_open_cover(self, **kwargs) -> None:
"""Open the cover."""
await self._device.async_open_cover(**kwargs)
async def async_close_cover(self, **kwargs) -> None:
"""Close the cover."""
await self._device.async_close_cover(**kwargs)
async def async_set_cover_position(self, **kwargs) -> None:
"""Set the cover position."""
await self._device.async_set_cover_position(**kwargs)
async def async_stop_cover(self, **kwargs) -> None:
"""Stop the cover."""
await self._device.async_stop_cover(**kwargs)
class DynaliteCoverWithTilt(DynaliteCover):
"""Representation of a Dynalite Channel as a Home Assistant Cover that uses up and down for tilt."""
@property
def current_cover_tilt_position(self) -> int:
"""Return the current tilt position."""
return self._device.current_cover_tilt_position
async def async_open_cover_tilt(self, **kwargs) -> None:
"""Open cover tilt."""
await self._device.async_open_cover_tilt(**kwargs)
async def async_close_cover_tilt(self, **kwargs) -> None:
"""Close cover tilt."""
await self._device.async_close_cover_tilt(**kwargs)
async def async_set_cover_tilt_position(self, **kwargs) -> None:
"""Set the cover tilt position."""
await self._device.async_set_cover_tilt_position(**kwargs)
async def async_stop_cover_tilt(self, **kwargs) -> None:
"""Stop the cover tilt."""
await self._device.async_stop_cover_tilt(**kwargs)
|
from functools import wraps
import logging
from typing import Any, Awaitable, Callable
from aiohttp import web
import voluptuous as vol
from homeassistant.const import HTTP_BAD_REQUEST
from .view import HomeAssistantView
_LOGGER = logging.getLogger(__name__)
class RequestDataValidator:
"""Decorator that will validate the incoming data.
Takes in a voluptuous schema and adds 'post_data' as
keyword argument to the function call.
Will return a 400 if no JSON provided or doesn't match schema.
"""
def __init__(self, schema: vol.Schema, allow_empty: bool = False) -> None:
"""Initialize the decorator."""
if isinstance(schema, dict):
schema = vol.Schema(schema)
self._schema = schema
self._allow_empty = allow_empty
def __call__(
self, method: Callable[..., Awaitable[web.StreamResponse]]
) -> Callable:
"""Decorate a function."""
@wraps(method)
async def wrapper(
view: HomeAssistantView, request: web.Request, *args: Any, **kwargs: Any
) -> web.StreamResponse:
"""Wrap a request handler with data validation."""
data = None
try:
data = await request.json()
except ValueError:
if not self._allow_empty or (await request.content.read()) != b"":
_LOGGER.error("Invalid JSON received")
return view.json_message("Invalid JSON.", HTTP_BAD_REQUEST)
data = {}
try:
kwargs["data"] = self._schema(data)
except vol.Invalid as err:
_LOGGER.error("Data does not match schema: %s", err)
return view.json_message(
f"Message format incorrect: {err}", HTTP_BAD_REQUEST
)
result = await method(view, request, *args, **kwargs)
return result
return wrapper
|
from datetime import timedelta
import logging
import time
import requests
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_API_KEY,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_NAME,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Data provided by WorldTides"
DEFAULT_NAME = "WorldTidesInfo"
SCAN_INTERVAL = timedelta(seconds=3600)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Optional(CONF_LATITUDE): cv.latitude,
vol.Optional(CONF_LONGITUDE): cv.longitude,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the WorldTidesInfo sensor."""
name = config.get(CONF_NAME)
lat = config.get(CONF_LATITUDE, hass.config.latitude)
lon = config.get(CONF_LONGITUDE, hass.config.longitude)
key = config.get(CONF_API_KEY)
if None in (lat, lon):
_LOGGER.error("Latitude or longitude not set in Home Assistant config")
tides = WorldTidesInfoSensor(name, lat, lon, key)
tides.update()
if tides.data.get("error") == "No location found":
_LOGGER.error("Location not available")
return
add_entities([tides])
class WorldTidesInfoSensor(Entity):
"""Representation of a WorldTidesInfo sensor."""
def __init__(self, name, lat, lon, key):
"""Initialize the sensor."""
self._name = name
self._lat = lat
self._lon = lon
self._key = key
self.data = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def device_state_attributes(self):
"""Return the state attributes of this device."""
attr = {ATTR_ATTRIBUTION: ATTRIBUTION}
if "High" in str(self.data["extremes"][0]["type"]):
attr["high_tide_time_utc"] = self.data["extremes"][0]["date"]
attr["high_tide_height"] = self.data["extremes"][0]["height"]
attr["low_tide_time_utc"] = self.data["extremes"][1]["date"]
attr["low_tide_height"] = self.data["extremes"][1]["height"]
elif "Low" in str(self.data["extremes"][0]["type"]):
attr["high_tide_time_utc"] = self.data["extremes"][1]["date"]
attr["high_tide_height"] = self.data["extremes"][1]["height"]
attr["low_tide_time_utc"] = self.data["extremes"][0]["date"]
attr["low_tide_height"] = self.data["extremes"][0]["height"]
return attr
@property
def state(self):
"""Return the state of the device."""
if self.data:
if "High" in str(self.data["extremes"][0]["type"]):
tidetime = time.strftime(
"%I:%M %p", time.localtime(self.data["extremes"][0]["dt"])
)
return f"High tide at {tidetime}"
if "Low" in str(self.data["extremes"][0]["type"]):
tidetime = time.strftime(
"%I:%M %p", time.localtime(self.data["extremes"][0]["dt"])
)
return f"Low tide at {tidetime}"
return None
return None
def update(self):
"""Get the latest data from WorldTidesInfo API."""
start = int(time.time())
resource = (
"https://www.worldtides.info/api?extremes&length=86400"
"&key={}&lat={}&lon={}&start={}"
).format(self._key, self._lat, self._lon, start)
try:
self.data = requests.get(resource, timeout=10).json()
_LOGGER.debug("Data: %s", self.data)
_LOGGER.debug("Tide data queried with start time set to: %s", start)
except ValueError as err:
_LOGGER.error("Error retrieving data from WorldTidesInfo: %s", err.args)
self.data = None
|
import argparse
import json
from django.core.serializers.json import DjangoJSONEncoder
from weblate.accounts.models import Profile
from weblate.utils.management.base import BaseCommand
class Command(BaseCommand):
help = "dumps user data to JSON file"
def add_arguments(self, parser):
parser.add_argument(
"json-file", type=argparse.FileType("w"), help="File where to export"
)
def handle(self, *args, **options):
data = []
profiles = Profile.objects.select_related("user").prefetch_related(
"watched", "languages", "secondary_languages"
)
for profile in profiles:
if not profile.user.is_active:
continue
data.append(profile.dump_data())
json.dump(data, options["json-file"], indent=2, cls=DjangoJSONEncoder)
options["json-file"].close()
|
import logging
import pytest
from qutebrowser.config import configdata
from qutebrowser.utils import usertypes, version
from qutebrowser.browser.webengine import darkmode
from qutebrowser.misc import objects
from helpers import utils
@pytest.fixture(autouse=True)
def patch_backend(monkeypatch):
monkeypatch.setattr(objects, 'backend', usertypes.Backend.QtWebEngine)
@pytest.mark.parametrize('settings, expected', [
# Disabled
({}, []),
# Enabled without customization
({'enabled': True}, [('forceDarkModeEnabled', 'true')]),
# Algorithm
(
{'enabled': True, 'algorithm': 'brightness-rgb'},
[
('forceDarkModeEnabled', 'true'),
('forceDarkModeInversionAlgorithm', '2')
],
),
])
def test_basics(config_stub, monkeypatch, settings, expected):
for k, v in settings.items():
config_stub.set_obj('colors.webpage.darkmode.' + k, v)
monkeypatch.setattr(darkmode, '_variant',
lambda: darkmode.Variant.qt_515_2)
if expected:
expected.append(('forceDarkModeImagePolicy', '2'))
assert list(darkmode.settings()) == expected
QT_514_SETTINGS = [
('darkMode', '2'),
('darkModeImagePolicy', '2'),
('darkModeGrayscale', 'true'),
]
QT_515_0_SETTINGS = [
('darkModeEnabled', 'true'),
('darkModeInversionAlgorithm', '2'),
('darkModeGrayscale', 'true'),
]
QT_515_1_SETTINGS = [
('darkModeEnabled', 'true'),
('darkModeInversionAlgorithm', '2'),
('darkModeImagePolicy', '2'),
('darkModeGrayscale', 'true'),
]
QT_515_2_SETTINGS = [
('forceDarkModeEnabled', 'true'),
('forceDarkModeInversionAlgorithm', '2'),
('forceDarkModeImagePolicy', '2'),
('forceDarkModeGrayscale', 'true'),
]
@pytest.mark.parametrize('qversion, expected', [
('5.14.0', QT_514_SETTINGS),
('5.14.1', QT_514_SETTINGS),
('5.14.2', QT_514_SETTINGS),
('5.15.0', QT_515_0_SETTINGS),
('5.15.1', QT_515_1_SETTINGS),
('5.15.2', QT_515_2_SETTINGS),
])
def test_qt_version_differences(config_stub, monkeypatch, qversion, expected):
monkeypatch.setattr(darkmode.qtutils, 'qVersion', lambda: qversion)
major, minor, patch = [int(part) for part in qversion.split('.')]
hexversion = major << 16 | minor << 8 | patch
if major > 5 or minor >= 13:
# Added in Qt 5.13
monkeypatch.setattr(darkmode, 'PYQT_WEBENGINE_VERSION', hexversion)
settings = {
'enabled': True,
'algorithm': 'brightness-rgb',
'grayscale.all': True,
}
for k, v in settings.items():
config_stub.set_obj('colors.webpage.darkmode.' + k, v)
assert list(darkmode.settings()) == expected
@utils.qt514
@pytest.mark.parametrize('setting, value, exp_key, exp_val', [
('contrast', -0.5,
'Contrast', '-0.5'),
('policy.page', 'smart',
'PagePolicy', '1'),
('policy.images', 'smart',
'ImagePolicy', '2'),
('threshold.text', 100,
'TextBrightnessThreshold', '100'),
('threshold.background', 100,
'BackgroundBrightnessThreshold', '100'),
('grayscale.all', True,
'Grayscale', 'true'),
('grayscale.images', 0.5,
'ImageGrayscale', '0.5'),
])
def test_customization(config_stub, monkeypatch, setting, value, exp_key, exp_val):
config_stub.val.colors.webpage.darkmode.enabled = True
config_stub.set_obj('colors.webpage.darkmode.' + setting, value)
monkeypatch.setattr(darkmode, '_variant', lambda: darkmode.Variant.qt_515_2)
expected = []
expected.append(('forceDarkModeEnabled', 'true'))
if exp_key != 'ImagePolicy':
expected.append(('forceDarkModeImagePolicy', '2'))
expected.append(('forceDarkMode' + exp_key, exp_val))
assert list(darkmode.settings()) == expected
@pytest.mark.parametrize('qversion, webengine_version, expected', [
# Without PYQT_WEBENGINE_VERSION
('5.12.9', None, darkmode.Variant.qt_511_to_513),
# With PYQT_WEBENGINE_VERSION
(None, 0x050d00, darkmode.Variant.qt_511_to_513),
(None, 0x050e00, darkmode.Variant.qt_514),
(None, 0x050f00, darkmode.Variant.qt_515_0),
(None, 0x050f01, darkmode.Variant.qt_515_1),
(None, 0x050f02, darkmode.Variant.qt_515_2),
(None, 0x060000, darkmode.Variant.qt_515_2), # Qt 6
])
def test_variant(monkeypatch, qversion, webengine_version, expected):
monkeypatch.setattr(darkmode.qtutils, 'qVersion', lambda: qversion)
monkeypatch.setattr(darkmode, 'PYQT_WEBENGINE_VERSION', webengine_version)
assert darkmode._variant() == expected
def test_broken_smart_images_policy(config_stub, monkeypatch, caplog):
config_stub.val.colors.webpage.darkmode.enabled = True
config_stub.val.colors.webpage.darkmode.policy.images = 'smart'
monkeypatch.setattr(darkmode, 'PYQT_WEBENGINE_VERSION', 0x050f00)
with caplog.at_level(logging.WARNING):
settings = list(darkmode.settings())
assert caplog.messages[-1] == (
'Ignoring colors.webpage.darkmode.policy.images = smart because of '
'Qt 5.15.0 bug')
expected = [
[('darkModeEnabled', 'true')], # Qt 5.15
[('darkMode', '4')], # Qt 5.14
]
assert settings in expected
def test_new_chromium():
"""Fail if we encounter an unknown Chromium version.
Dark mode in Chromium (or rather, the underlying Blink) is being changed with
almost every Chromium release.
Make this test fail deliberately with newer Chromium versions, so that
we can test whether dark mode still works manually, and adjust if not.
"""
assert version._chromium_version() in [
'unavailable', # QtWebKit
'61.0.3163.140', # Qt 5.10
'65.0.3325.230', # Qt 5.11
'69.0.3497.128', # Qt 5.12
'73.0.3683.105', # Qt 5.13
'77.0.3865.129', # Qt 5.14
'80.0.3987.163', # Qt 5.15.0
'83.0.4103.122', # Qt 5.15.2
]
def test_options(configdata_init):
"""Make sure all darkmode options have the right attributes set."""
for name, opt in configdata.DATA.items():
if not name.startswith('colors.webpage.darkmode.'):
continue
assert not opt.supports_pattern, name
assert opt.restart, name
if opt.backends:
# On older Qt versions, this is an empty list.
assert opt.backends == [usertypes.Backend.QtWebEngine], name
if opt.raw_backends is not None:
assert not opt.raw_backends['QtWebKit'], name
assert opt.raw_backends['QtWebEngine'] == 'Qt 5.14', name
|
import csv
from django.http import HttpResponse, JsonResponse
from weblate.api.serializers import StatisticsSerializer
from weblate.trans.stats import get_project_stats
from weblate.utils.views import get_component, get_project
def export_stats_project(request, project):
"""Export stats in JSON format."""
obj = get_project(request, project)
return export_response(
request,
f"stats-{obj.slug}.csv",
(
"language",
"code",
"total",
"translated",
"translated_percent",
"total_words",
"translated_words",
"translated_words_percent",
"total_chars",
"translated_chars",
"translated_chars_percent",
),
get_project_stats(obj),
)
def export_stats(request, project, component):
"""Export stats in JSON format."""
subprj = get_component(request, project, component)
translations = subprj.translation_set.order_by("language_code")
return export_response(
request,
f"stats-{subprj.project.slug}-{subprj.slug}.csv",
(
"name",
"code",
"total",
"translated",
"translated_percent",
"translated_words_percent",
"total_words",
"translated_words",
"total_chars",
"translated_chars",
"translated_chars_percent",
"failing",
"failing_percent",
"fuzzy",
"fuzzy_percent",
"url_translate",
"url",
"translate_url",
"last_change",
"last_author",
"recent_changes",
"readonly",
"readonly_percent",
"approved",
"approved_percent",
"suggestions",
"comments",
),
StatisticsSerializer(translations, many=True).data,
)
def export_response(request, filename, fields, data):
"""Generic handler for stats exports."""
output = request.GET.get("format", "json")
if output not in ("json", "csv"):
output = "json"
if output == "csv":
response = HttpResponse(content_type="text/csv; charset=utf-8")
response["Content-Disposition"] = f"attachment; filename={filename}"
writer = csv.DictWriter(response, fields)
writer.writeheader()
for row in data:
writer.writerow(row)
return response
return JsonResponse(data=data, safe=False, json_dumps_params={"indent": 2})
|
from homeassistant.components.notify import BaseNotificationService
EVENT_NOTIFY = "notify"
def get_service(hass, config, discovery_info=None):
"""Get the demo notification service."""
return DemoNotificationService(hass)
class DemoNotificationService(BaseNotificationService):
"""Implement demo notification service."""
def __init__(self, hass):
"""Initialize the service."""
self.hass = hass
@property
def targets(self):
"""Return a dictionary of registered targets."""
return {"test target name": "test target id"}
def send_message(self, message="", **kwargs):
"""Send a message to a user."""
kwargs["message"] = message
self.hass.bus.fire(EVENT_NOTIFY, kwargs)
|
import logging
from serial import SerialException
from homeassistant import core
from homeassistant.components.media_player import MediaPlayerEntity
from homeassistant.components.media_player.const import (
SUPPORT_SELECT_SOURCE,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
SUPPORT_VOLUME_STEP,
)
from homeassistant.const import CONF_PORT, STATE_OFF, STATE_ON
from homeassistant.helpers import config_validation as cv, entity_platform, service
from .const import (
CONF_SOURCES,
DOMAIN,
FIRST_RUN,
MONOPRICE_OBJECT,
SERVICE_RESTORE,
SERVICE_SNAPSHOT,
)
_LOGGER = logging.getLogger(__name__)
PARALLEL_UPDATES = 1
SUPPORT_MONOPRICE = (
SUPPORT_VOLUME_MUTE
| SUPPORT_VOLUME_SET
| SUPPORT_VOLUME_STEP
| SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_SELECT_SOURCE
)
@core.callback
def _get_sources_from_dict(data):
sources_config = data[CONF_SOURCES]
source_id_name = {int(index): name for index, name in sources_config.items()}
source_name_id = {v: k for k, v in source_id_name.items()}
source_names = sorted(source_name_id.keys(), key=lambda v: source_name_id[v])
return [source_id_name, source_name_id, source_names]
@core.callback
def _get_sources(config_entry):
if CONF_SOURCES in config_entry.options:
data = config_entry.options
else:
data = config_entry.data
return _get_sources_from_dict(data)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Monoprice 6-zone amplifier platform."""
port = config_entry.data[CONF_PORT]
monoprice = hass.data[DOMAIN][config_entry.entry_id][MONOPRICE_OBJECT]
sources = _get_sources(config_entry)
entities = []
for i in range(1, 4):
for j in range(1, 7):
zone_id = (i * 10) + j
_LOGGER.info("Adding zone %d for port %s", zone_id, port)
entities.append(
MonopriceZone(monoprice, sources, config_entry.entry_id, zone_id)
)
# only call update before add if it's the first run so we can try to detect zones
first_run = hass.data[DOMAIN][config_entry.entry_id][FIRST_RUN]
async_add_entities(entities, first_run)
platform = entity_platform.current_platform.get()
def _call_service(entities, service_call):
for entity in entities:
if service_call.service == SERVICE_SNAPSHOT:
entity.snapshot()
elif service_call.service == SERVICE_RESTORE:
entity.restore()
@service.verify_domain_control(hass, DOMAIN)
async def async_service_handle(service_call):
"""Handle for services."""
entities = await platform.async_extract_from_service(service_call)
if not entities:
return
hass.async_add_executor_job(_call_service, entities, service_call)
hass.services.async_register(
DOMAIN,
SERVICE_SNAPSHOT,
async_service_handle,
schema=cv.make_entity_service_schema({}),
)
hass.services.async_register(
DOMAIN,
SERVICE_RESTORE,
async_service_handle,
schema=cv.make_entity_service_schema({}),
)
class MonopriceZone(MediaPlayerEntity):
"""Representation of a Monoprice amplifier zone."""
def __init__(self, monoprice, sources, namespace, zone_id):
"""Initialize new zone."""
self._monoprice = monoprice
# dict source_id -> source name
self._source_id_name = sources[0]
# dict source name -> source_id
self._source_name_id = sources[1]
# ordered list of all source names
self._source_names = sources[2]
self._zone_id = zone_id
self._unique_id = f"{namespace}_{self._zone_id}"
self._name = f"Zone {self._zone_id}"
self._snapshot = None
self._state = None
self._volume = None
self._source = None
self._mute = None
self._update_success = True
def update(self):
"""Retrieve latest state."""
try:
state = self._monoprice.zone_status(self._zone_id)
except SerialException:
self._update_success = False
_LOGGER.warning("Could not update zone %d", self._zone_id)
return
if not state:
self._update_success = False
return
self._state = STATE_ON if state.power else STATE_OFF
self._volume = state.volume
self._mute = state.mute
idx = state.source
if idx in self._source_id_name:
self._source = self._source_id_name[idx]
else:
self._source = None
@property
def entity_registry_enabled_default(self):
"""Return if the entity should be enabled when first added to the entity registry."""
return self._zone_id < 20 or self._update_success
@property
def device_info(self):
"""Return device info for this device."""
return {
"identifiers": {(DOMAIN, self.unique_id)},
"name": self.name,
"manufacturer": "Monoprice",
"model": "6-Zone Amplifier",
}
@property
def unique_id(self):
"""Return unique ID for this device."""
return self._unique_id
@property
def name(self):
"""Return the name of the zone."""
return self._name
@property
def state(self):
"""Return the state of the zone."""
return self._state
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
if self._volume is None:
return None
return self._volume / 38.0
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._mute
@property
def supported_features(self):
"""Return flag of media commands that are supported."""
return SUPPORT_MONOPRICE
@property
def media_title(self):
"""Return the current source as medial title."""
return self._source
@property
def source(self):
"""Return the current input source of the device."""
return self._source
@property
def source_list(self):
"""List of available input sources."""
return self._source_names
def snapshot(self):
"""Save zone's current state."""
self._snapshot = self._monoprice.zone_status(self._zone_id)
def restore(self):
"""Restore saved state."""
if self._snapshot:
self._monoprice.restore_zone(self._snapshot)
self.schedule_update_ha_state(True)
def select_source(self, source):
"""Set input source."""
if source not in self._source_name_id:
return
idx = self._source_name_id[source]
self._monoprice.set_source(self._zone_id, idx)
def turn_on(self):
"""Turn the media player on."""
self._monoprice.set_power(self._zone_id, True)
def turn_off(self):
"""Turn the media player off."""
self._monoprice.set_power(self._zone_id, False)
def mute_volume(self, mute):
"""Mute (true) or unmute (false) media player."""
self._monoprice.set_mute(self._zone_id, mute)
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
self._monoprice.set_volume(self._zone_id, int(volume * 38))
def volume_up(self):
"""Volume up the media player."""
if self._volume is None:
return
self._monoprice.set_volume(self._zone_id, min(self._volume + 1, 38))
def volume_down(self):
"""Volume down media player."""
if self._volume is None:
return
self._monoprice.set_volume(self._zone_id, max(self._volume - 1, 0))
|
from homeassistant.const import CONF_NAME
from . import PiHoleEntity
from .const import (
ATTR_BLOCKED_DOMAINS,
DATA_KEY_API,
DATA_KEY_COORDINATOR,
DOMAIN as PIHOLE_DOMAIN,
SENSOR_DICT,
SENSOR_LIST,
)
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up the Pi-hole sensor."""
name = entry.data[CONF_NAME]
hole_data = hass.data[PIHOLE_DOMAIN][entry.entry_id]
sensors = [
PiHoleSensor(
hole_data[DATA_KEY_API],
hole_data[DATA_KEY_COORDINATOR],
name,
sensor_name,
entry.entry_id,
)
for sensor_name in SENSOR_LIST
]
async_add_entities(sensors, True)
class PiHoleSensor(PiHoleEntity):
"""Representation of a Pi-hole sensor."""
def __init__(self, api, coordinator, name, sensor_name, server_unique_id):
"""Initialize a Pi-hole sensor."""
super().__init__(api, coordinator, name, server_unique_id)
self._condition = sensor_name
variable_info = SENSOR_DICT[sensor_name]
self._condition_name = variable_info[0]
self._unit_of_measurement = variable_info[1]
self._icon = variable_info[2]
@property
def name(self):
"""Return the name of the sensor."""
return f"{self._name} {self._condition_name}"
@property
def unique_id(self):
"""Return the unique id of the sensor."""
return f"{self._server_unique_id}/{self._condition_name}"
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return self._icon
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
@property
def state(self):
"""Return the state of the device."""
try:
return round(self.api.data[self._condition], 2)
except TypeError:
return self.api.data[self._condition]
@property
def device_state_attributes(self):
"""Return the state attributes of the Pi-hole."""
return {ATTR_BLOCKED_DOMAINS: self.api.data["domains_being_blocked"]}
|
import logging
import pytest
from homeassistant.components.yessssms.const import CONF_PROVIDER
import homeassistant.components.yessssms.notify as yessssms
from homeassistant.const import (
CONF_PASSWORD,
CONF_RECIPIENT,
CONF_USERNAME,
HTTP_INTERNAL_SERVER_ERROR,
)
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
@pytest.fixture(name="config")
def config_data():
"""Set valid config data."""
config = {
"notify": {
"platform": "yessssms",
"name": "sms",
CONF_USERNAME: "06641234567",
CONF_PASSWORD: "secretPassword",
CONF_RECIPIENT: "06509876543",
CONF_PROVIDER: "educom",
}
}
return config
@pytest.fixture(name="valid_settings")
def init_valid_settings(hass, config):
"""Initialize component with valid settings."""
return async_setup_component(hass, "notify", config)
@pytest.fixture(name="invalid_provider_settings")
def init_invalid_provider_settings(hass, config):
"""Set invalid provider data and initialize component."""
config["notify"][CONF_PROVIDER] = "FantasyMobile" # invalid provider
return async_setup_component(hass, "notify", config)
@pytest.fixture(name="invalid_login_data")
def mock_invalid_login_data():
"""Mock invalid login data."""
path = "homeassistant.components.yessssms.notify.YesssSMS.login_data_valid"
with patch(path, return_value=False):
yield
@pytest.fixture(name="valid_login_data")
def mock_valid_login_data():
"""Mock valid login data."""
path = "homeassistant.components.yessssms.notify.YesssSMS.login_data_valid"
with patch(path, return_value=True):
yield
@pytest.fixture(name="connection_error")
def mock_connection_error():
"""Mock a connection error."""
path = "homeassistant.components.yessssms.notify.YesssSMS.login_data_valid"
with patch(path, side_effect=yessssms.YesssSMS.ConnectionError()):
yield
async def test_unsupported_provider_error(hass, caplog, invalid_provider_settings):
"""Test for error on unsupported provider."""
await invalid_provider_settings
for record in caplog.records:
if (
record.levelname == "ERROR"
and record.name == "homeassistant.components.yessssms.notify"
):
assert (
"Unknown provider: provider (fantasymobile) is not known to YesssSMS"
in record.message
)
assert (
"Unknown provider: provider (fantasymobile) is not known to YesssSMS"
in caplog.text
)
assert not hass.services.has_service("notify", "sms")
async def test_false_login_data_error(hass, caplog, valid_settings, invalid_login_data):
"""Test login data check error."""
await valid_settings
assert not hass.services.has_service("notify", "sms")
for record in caplog.records:
if (
record.levelname == "ERROR"
and record.name == "homeassistant.components.yessssms.notify"
):
assert (
"Login data is not valid! Please double check your login data at"
in record.message
)
async def test_init_success(hass, caplog, valid_settings, valid_login_data):
"""Test for successful init of yessssms."""
caplog.set_level(logging.DEBUG)
await valid_settings
assert hass.services.has_service("notify", "sms")
messages = []
for record in caplog.records:
if (
record.levelname == "DEBUG"
and record.name == "homeassistant.components.yessssms.notify"
):
messages.append(record.message)
assert "Login data for 'educom' valid" in messages[0]
assert (
"initialized; library version: {}".format(yessssms.YesssSMS("", "").version())
in messages[1]
)
async def test_connection_error_on_init(hass, caplog, valid_settings, connection_error):
"""Test for connection error on init."""
caplog.set_level(logging.DEBUG)
await valid_settings
assert hass.services.has_service("notify", "sms")
for record in caplog.records:
if (
record.levelname == "WARNING"
and record.name == "homeassistant.components.yessssms.notify"
):
assert (
"Connection Error, could not verify login data for 'educom'"
in record.message
)
for record in caplog.records:
if (
record.levelname == "DEBUG"
and record.name == "homeassistant.components.yessssms.notify"
):
assert (
"initialized; library version: {}".format(
yessssms.YesssSMS("", "").version()
)
in record.message
)
@pytest.fixture(name="yessssms")
def yessssms_init():
"""Set up things to be run when tests are started."""
login = "06641234567"
passwd = "testpasswd"
recipient = "06501234567"
client = yessssms.YesssSMS(login, passwd)
return yessssms.YesssSMSNotificationService(client, recipient)
async def test_login_error(yessssms, requests_mock, caplog):
"""Test login that fails."""
requests_mock.post(
# pylint: disable=protected-access
yessssms.yesss._login_url,
status_code=200,
text="BlaBlaBla<strong>Login nicht erfolgreichBlaBla",
)
message = "Testing YesssSMS platform :)"
with caplog.at_level(logging.ERROR):
yessssms.send_message(message)
assert requests_mock.called is True
assert requests_mock.call_count == 1
async def test_empty_message_error(yessssms, caplog):
"""Test for an empty SMS message error."""
message = ""
with caplog.at_level(logging.ERROR):
yessssms.send_message(message)
for record in caplog.records:
if (
record.levelname == "ERROR"
and record.name == "homeassistant.components.yessssms.notify"
):
assert "Cannot send empty SMS message" in record.message
async def test_error_account_suspended(yessssms, requests_mock, caplog):
"""Test login that fails after multiple attempts."""
requests_mock.post(
# pylint: disable=protected-access
yessssms.yesss._login_url,
status_code=200,
text="BlaBlaBla<strong>Login nicht erfolgreichBlaBla",
)
message = "Testing YesssSMS platform :)"
yessssms.send_message(message)
assert requests_mock.called is True
assert requests_mock.call_count == 1
requests_mock.post(
# pylint: disable=protected-access
yessssms.yesss._login_url,
status_code=200,
text="Wegen 3 ungültigen Login-Versuchen ist Ihr Account für "
"eine Stunde gesperrt.",
)
message = "Testing YesssSMS platform :)"
with caplog.at_level(logging.ERROR):
yessssms.send_message(message)
assert requests_mock.called is True
assert requests_mock.call_count == 2
async def test_error_account_suspended_2(yessssms, caplog):
"""Test login that fails after multiple attempts."""
message = "Testing YesssSMS platform :)"
# pylint: disable=protected-access
yessssms.yesss._suspended = True
with caplog.at_level(logging.ERROR):
yessssms.send_message(message)
for record in caplog.records:
if (
record.levelname == "ERROR"
and record.name == "homeassistant.components.yessssms.notify"
):
assert "Account is suspended, cannot send SMS." in record.message
async def test_send_message(yessssms, requests_mock, caplog):
"""Test send message."""
message = "Testing YesssSMS platform :)"
requests_mock.post(
# pylint: disable=protected-access
yessssms.yesss._login_url,
status_code=302,
# pylint: disable=protected-access
headers={"location": yessssms.yesss._kontomanager},
)
# pylint: disable=protected-access
login = yessssms.yesss._logindata["login_rufnummer"]
requests_mock.get(
# pylint: disable=protected-access
yessssms.yesss._kontomanager,
status_code=200,
text=f"test...{login}</a>",
)
requests_mock.post(
# pylint: disable=protected-access
yessssms.yesss._websms_url,
status_code=200,
text="<h1>Ihre SMS wurde erfolgreich verschickt!</h1>",
)
requests_mock.get(
# pylint: disable=protected-access
yessssms.yesss._logout_url,
status_code=200,
)
with caplog.at_level(logging.INFO):
yessssms.send_message(message)
for record in caplog.records:
if (
record.levelname == "INFO"
and record.name == "homeassistant.components.yessssms.notify"
):
assert "SMS sent" in record.message
assert requests_mock.called is True
assert requests_mock.call_count == 4
assert (
requests_mock.last_request.scheme
+ "://"
+ requests_mock.last_request.hostname
+ requests_mock.last_request.path
+ "?"
+ requests_mock.last_request.query
) in yessssms.yesss._logout_url # pylint: disable=protected-access
async def test_no_recipient_error(yessssms, caplog):
"""Test for missing/empty recipient."""
message = "Testing YesssSMS platform :)"
# pylint: disable=protected-access
yessssms._recipient = ""
with caplog.at_level(logging.ERROR):
yessssms.send_message(message)
for record in caplog.records:
if (
record.levelname == "ERROR"
and record.name == "homeassistant.components.yessssms.notify"
):
assert (
"You need to provide a recipient for SMS notification" in record.message
)
async def test_sms_sending_error(yessssms, requests_mock, caplog):
"""Test sms sending error."""
requests_mock.post(
# pylint: disable=protected-access
yessssms.yesss._login_url,
status_code=302,
# pylint: disable=protected-access
headers={"location": yessssms.yesss._kontomanager},
)
# pylint: disable=protected-access
login = yessssms.yesss._logindata["login_rufnummer"]
requests_mock.get(
# pylint: disable=protected-access
yessssms.yesss._kontomanager,
status_code=200,
text=f"test...{login}</a>",
)
requests_mock.post(
# pylint: disable=protected-access
yessssms.yesss._websms_url,
status_code=HTTP_INTERNAL_SERVER_ERROR,
)
message = "Testing YesssSMS platform :)"
with caplog.at_level(logging.ERROR):
yessssms.send_message(message)
assert requests_mock.called is True
assert requests_mock.call_count == 3
for record in caplog.records:
if (
record.levelname == "ERROR"
and record.name == "homeassistant.components.yessssms.notify"
):
assert "YesssSMS: error sending SMS" in record.message
async def test_connection_error(yessssms, requests_mock, caplog):
"""Test connection error."""
requests_mock.post(
# pylint: disable=protected-access
yessssms.yesss._login_url,
exc=yessssms.yesss.ConnectionError,
)
message = "Testing YesssSMS platform :)"
with caplog.at_level(logging.ERROR):
yessssms.send_message(message)
assert requests_mock.called is True
assert requests_mock.call_count == 1
for record in caplog.records:
if (
record.levelname == "ERROR"
and record.name == "homeassistant.components.yessssms.notify"
):
assert "cannot connect to provider" in record.message
|
import base64
from kombu.message import Message
from kombu.utils.encoding import str_to_bytes
class BaseAsyncMessage(Message):
"""Base class for messages received on async client."""
class AsyncRawMessage(BaseAsyncMessage):
"""Raw Message."""
class AsyncMessage(BaseAsyncMessage):
"""Serialized message."""
def encode(self, value):
"""Encode/decode the value using Base64 encoding."""
return base64.b64encode(str_to_bytes(value)).decode()
def __getitem__(self, item):
"""Support Boto3-style access on a message."""
if item == 'ReceiptHandle':
return self.receipt_handle
elif item == 'Body':
return self.get_body()
elif item == 'queue':
return self.queue
else:
raise KeyError(item)
|
from django.urls import reverse
from weblate.trans.tests.test_views import FixtureTestCase
class BasicViewTest(FixtureTestCase):
def test_about(self):
response = self.client.get(reverse("about"))
self.assertContains(response, "translate-toolkit")
def test_keys(self):
response = self.client.get(reverse("keys"))
self.assertContains(response, "SSH")
def test_stats(self):
response = self.client.get(reverse("stats"))
self.assertContains(response, "Weblate statistics")
def test_healthz(self):
response = self.client.get(reverse("healthz"))
self.assertContains(response, "ok")
|
import logging
import pyatmo
import requests
import voluptuous as vol
from homeassistant.components.camera import SUPPORT_STREAM, Camera
from homeassistant.core import callback
from homeassistant.helpers import config_validation as cv, entity_platform
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import (
ATTR_CAMERA_LIGHT_MODE,
ATTR_PERSON,
ATTR_PERSONS,
ATTR_PSEUDO,
CAMERA_LIGHT_MODES,
DATA_CAMERAS,
DATA_EVENTS,
DATA_HANDLER,
DATA_PERSONS,
DOMAIN,
EVENT_TYPE_LIGHT_MODE,
EVENT_TYPE_OFF,
EVENT_TYPE_ON,
MANUFACTURER,
MODELS,
SERVICE_SET_CAMERA_LIGHT,
SERVICE_SET_PERSON_AWAY,
SERVICE_SET_PERSONS_HOME,
SIGNAL_NAME,
)
from .data_handler import CAMERA_DATA_CLASS_NAME
from .netatmo_entity_base import NetatmoBase
_LOGGER = logging.getLogger(__name__)
DEFAULT_QUALITY = "high"
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up the Netatmo camera platform."""
if "access_camera" not in entry.data["token"]["scope"]:
_LOGGER.info(
"Cameras are currently not supported with this authentication method"
)
return
data_handler = hass.data[DOMAIN][entry.entry_id][DATA_HANDLER]
async def get_entities():
"""Retrieve Netatmo entities."""
await data_handler.register_data_class(
CAMERA_DATA_CLASS_NAME, CAMERA_DATA_CLASS_NAME, None
)
data = data_handler.data
if not data.get(CAMERA_DATA_CLASS_NAME):
return []
data_class = data_handler.data[CAMERA_DATA_CLASS_NAME]
entities = []
try:
all_cameras = []
for home in data_class.cameras.values():
for camera in home.values():
all_cameras.append(camera)
for camera in all_cameras:
_LOGGER.debug("Adding camera %s %s", camera["id"], camera["name"])
entities.append(
NetatmoCamera(
data_handler,
camera["id"],
camera["type"],
camera["home_id"],
DEFAULT_QUALITY,
)
)
for person_id, person_data in data_handler.data[
CAMERA_DATA_CLASS_NAME
].persons.items():
hass.data[DOMAIN][DATA_PERSONS][person_id] = person_data.get(
ATTR_PSEUDO
)
except pyatmo.NoDevice:
_LOGGER.debug("No cameras found")
return entities
async_add_entities(await get_entities(), True)
platform = entity_platform.current_platform.get()
if data_handler.data[CAMERA_DATA_CLASS_NAME] is not None:
platform.async_register_entity_service(
SERVICE_SET_PERSONS_HOME,
{vol.Required(ATTR_PERSONS): vol.All(cv.ensure_list, [cv.string])},
"_service_set_persons_home",
)
platform.async_register_entity_service(
SERVICE_SET_PERSON_AWAY,
{vol.Optional(ATTR_PERSON): cv.string},
"_service_set_person_away",
)
platform.async_register_entity_service(
SERVICE_SET_CAMERA_LIGHT,
{vol.Required(ATTR_CAMERA_LIGHT_MODE): vol.In(CAMERA_LIGHT_MODES)},
"_service_set_camera_light",
)
class NetatmoCamera(NetatmoBase, Camera):
"""Representation of a Netatmo camera."""
def __init__(
self,
data_handler,
camera_id,
camera_type,
home_id,
quality,
):
"""Set up for access to the Netatmo camera images."""
Camera.__init__(self)
super().__init__(data_handler)
self._data_classes.append(
{"name": CAMERA_DATA_CLASS_NAME, SIGNAL_NAME: CAMERA_DATA_CLASS_NAME}
)
self._id = camera_id
self._home_id = home_id
self._device_name = self._data.get_camera(camera_id=camera_id).get("name")
self._name = f"{MANUFACTURER} {self._device_name}"
self._model = camera_type
self._unique_id = f"{self._id}-{self._model}"
self._quality = quality
self._vpnurl = None
self._localurl = None
self._status = None
self._sd_status = None
self._alim_status = None
self._is_local = None
self._light_state = None
async def async_added_to_hass(self) -> None:
"""Entity created."""
await super().async_added_to_hass()
for event_type in (EVENT_TYPE_LIGHT_MODE, EVENT_TYPE_OFF, EVENT_TYPE_ON):
self._listeners.append(
async_dispatcher_connect(
self.hass,
f"signal-{DOMAIN}-webhook-{event_type}",
self.handle_event,
)
)
self.hass.data[DOMAIN][DATA_CAMERAS][self._id] = self._device_name
@callback
def handle_event(self, event):
"""Handle webhook events."""
data = event["data"]
if not data.get("camera_id"):
return
if data["home_id"] == self._home_id and data["camera_id"] == self._id:
if data["push_type"] in ["NACamera-off", "NACamera-disconnection"]:
self.is_streaming = False
self._status = "off"
elif data["push_type"] in ["NACamera-on", "NACamera-connection"]:
self.is_streaming = True
self._status = "on"
elif data["push_type"] == "NOC-light_mode":
self._light_state = data["sub_type"]
self.async_write_ha_state()
return
def camera_image(self):
"""Return a still image response from the camera."""
try:
if self._localurl:
response = requests.get(
f"{self._localurl}/live/snapshot_720.jpg", timeout=10
)
elif self._vpnurl:
response = requests.get(
f"{self._vpnurl}/live/snapshot_720.jpg",
timeout=10,
verify=True,
)
else:
_LOGGER.error("Welcome/Presence VPN URL is None")
(self._vpnurl, self._localurl) = self._data.camera_urls(
camera_id=self._id
)
return None
except requests.exceptions.RequestException as error:
_LOGGER.info("Welcome/Presence URL changed: %s", error)
self._data.update_camera_urls(camera_id=self._id)
(self._vpnurl, self._localurl) = self._data.camera_urls(camera_id=self._id)
return None
return response.content
@property
def device_state_attributes(self):
"""Return the Netatmo-specific camera state attributes."""
return {
"id": self._id,
"status": self._status,
"sd_status": self._sd_status,
"alim_status": self._alim_status,
"is_local": self._is_local,
"vpn_url": self._vpnurl,
"local_url": self._localurl,
"light_state": self._light_state,
}
@property
def available(self):
"""Return True if entity is available."""
return bool(self._alim_status == "on" or self._status == "disconnected")
@property
def supported_features(self):
"""Return supported features."""
return SUPPORT_STREAM
@property
def brand(self):
"""Return the camera brand."""
return MANUFACTURER
@property
def motion_detection_enabled(self):
"""Return the camera motion detection status."""
return bool(self._status == "on")
@property
def is_on(self):
"""Return true if on."""
return self.is_streaming
def turn_off(self):
"""Turn off camera."""
self._data.set_state(
home_id=self._home_id, camera_id=self._id, monitoring="off"
)
def turn_on(self):
"""Turn on camera."""
self._data.set_state(home_id=self._home_id, camera_id=self._id, monitoring="on")
async def stream_source(self):
"""Return the stream source."""
url = "{0}/live/files/{1}/index.m3u8"
if self._localurl:
return url.format(self._localurl, self._quality)
return url.format(self._vpnurl, self._quality)
@property
def model(self):
"""Return the camera model."""
return MODELS[self._model]
@callback
def async_update_callback(self):
"""Update the entity's state."""
camera = self._data.get_camera(self._id)
self._vpnurl, self._localurl = self._data.camera_urls(self._id)
self._status = camera.get("status")
self._sd_status = camera.get("sd_status")
self._alim_status = camera.get("alim_status")
self._is_local = camera.get("is_local")
self.is_streaming = bool(self._status == "on")
if self._model == "NACamera": # Smart Indoor Camera
self.hass.data[DOMAIN][DATA_EVENTS][self._id] = self.process_events(
self._data.events.get(self._id, {})
)
elif self._model == "NOC": # Smart Outdoor Camera
self.hass.data[DOMAIN][DATA_EVENTS][self._id] = self.process_events(
self._data.outdoor_events.get(self._id, {})
)
def process_events(self, events):
"""Add meta data to events."""
for event in events.values():
if "video_id" not in event:
continue
if self._is_local:
event[
"media_url"
] = f"{self._localurl}/vod/{event['video_id']}/files/{self._quality}/index.m3u8"
else:
event[
"media_url"
] = f"{self._vpnurl}/vod/{event['video_id']}/files/{self._quality}/index.m3u8"
return events
def _service_set_persons_home(self, **kwargs):
"""Service to change current home schedule."""
persons = kwargs.get(ATTR_PERSONS)
person_ids = []
for person in persons:
for pid, data in self._data.persons.items():
if data.get("pseudo") == person:
person_ids.append(pid)
self._data.set_persons_home(person_ids=person_ids, home_id=self._home_id)
_LOGGER.debug("Set %s as at home", persons)
def _service_set_person_away(self, **kwargs):
"""Service to mark a person as away or set the home as empty."""
person = kwargs.get(ATTR_PERSON)
person_id = None
if person:
for pid, data in self._data.persons.items():
if data.get("pseudo") == person:
person_id = pid
if person_id is not None:
self._data.set_persons_away(
person_id=person_id,
home_id=self._home_id,
)
_LOGGER.debug("Set %s as away", person)
else:
self._data.set_persons_away(
person_id=person_id,
home_id=self._home_id,
)
_LOGGER.debug("Set home as empty")
def _service_set_camera_light(self, **kwargs):
"""Service to set light mode."""
mode = kwargs.get(ATTR_CAMERA_LIGHT_MODE)
_LOGGER.debug("Turn camera '%s' %s", self._name, mode)
self._data.set_state(
home_id=self._home_id,
camera_id=self._id,
floodlight=mode,
)
|
import numpy as np
from hypertools.tools import text2mat
from sklearn.decomposition import LatentDirichletAllocation
data = [['i like cats alot', 'cats r pretty cool', 'cats are better than dogs'],
['dogs rule the haus', 'dogs are my jam', 'dogs are a mans best friend']]
def test_transform_text():
assert isinstance(text2mat(data)[0], np.ndarray)
def test_count_LDA():
isinstance(text2mat(data, vectorizer='CountVectorizer',
semantic='LatentDirichletAllocation', corpus=data)[0], np.ndarray)
def test_tfidf_LDA():
isinstance(text2mat(data, vectorizer='TfidfVectorizer',
semantic='LatentDirichletAllocation', corpus=data)[0], np.ndarray)
def test_count_NMF():
isinstance(text2mat(data, vectorizer='CountVectorizer', semantic='NMF', corpus=data)[0], np.ndarray)
def test_tfidf_NMF():
isinstance(text2mat(data, vectorizer='TfidfVectorizer', semantic='NMF', corpus=data)[0], np.ndarray)
def test_transform_no_text_model():
assert isinstance(text2mat(data, semantic=None, corpus=data)[0], np.ndarray)
def test_text_model_params():
assert isinstance(text2mat(data, semantic={
'model' : 'LatentDirichletAllocation',
'params' : {
'learning_method' : 'batch'
}}
, corpus=data)[0], np.ndarray)
def test_vectorizer_params():
assert text2mat(data, vectorizer={
'model' : 'CountVectorizer',
'params': {
'max_features' : 2
}}, corpus=data)[0].shape[1]==20
def test_LDA_class():
assert text2mat(data, semantic=LatentDirichletAllocation, corpus=data)[0].shape[1]==10
def test_LDA_class_instance():
user_model = LatentDirichletAllocation(n_components=15)
assert text2mat(data, semantic=user_model, corpus=data)[0].shape[1]==15
def test_corpus():
assert text2mat(data, corpus=data)[0].shape[1]==20
|
from mock import patch, Mock, call, MagicMock
from arctic.scripts.utils import do_db_auth
def test_do_db_auth():
# Create the user agains the current mongo database
admin_creds = Mock()
user_creds = Mock()
connection = MagicMock()
with patch('arctic.scripts.utils.logger', autospec=True) as logger, \
patch('arctic.scripts.utils.get_auth', autospec=True, side_effect=[admin_creds, user_creds]) as get_auth:
assert do_db_auth('hostname', connection, 'arctic_user')
assert get_auth.call_args_list == [call('hostname', 'admin', 'admin'),
call('hostname', 'arctic', 'arctic_user')]
connection.admin.authenticate.assert_called_once_with(admin_creds.user,
admin_creds.password)
# Must also ensure that we auth against the user's db too ; the user
# may well have read-only access to the admin database, but not to their user_db!
connection.__getitem__.assert_called_once_with('arctic_user')
connection.__getitem__.return_value.authenticate.assert_called_once_with(user_creds.user, user_creds.password)
assert logger.error.call_count == 0
def test_do_db_auth_no_admin():
user_creds = Mock()
connection = MagicMock()
# Create the user agains the current mongo database
with patch('arctic.scripts.utils.logger', autospec=True) as logger, \
patch('arctic.scripts.utils.get_auth', side_effect=[None, user_creds],
autospec=True) as get_auth:
connection.admin.authenticate.return_value = False
assert do_db_auth('hostname', connection, 'arctic_user')
assert logger.call_count == 0
assert get_auth.call_args_list == [call('hostname', 'admin', 'admin'),
call('hostname', 'arctic', 'arctic_user')]
connection['arctic_user'].authenticate.assert_called_once_with(user_creds.user, user_creds.password)
def test_do_db_auth_no_user_creds():
user_creds = Mock()
connection = MagicMock()
with patch('arctic.scripts.utils.logger', autospec=True) as logger, \
patch('arctic.scripts.utils.get_auth', side_effect=[None, user_creds],
autospec=True) as get_auth:
connection['arctic_user'].authenticate.return_value = False
assert not do_db_auth('hostname', connection, 'arctic_user')
assert get_auth.call_args_list == [call('hostname', 'admin', 'admin'),
call('hostname', 'arctic', 'arctic_user')]
logger.error.assert_called_once_with("Failed to authenticate to db 'arctic_user' on 'hostname',"
" using user credentials")
def test_do_db_auth_no_admin_user_creds_fails():
connection = MagicMock()
with patch('arctic.scripts.utils.logger', autospec=True) as logger, \
patch('arctic.scripts.utils.get_auth', side_effect=[None, None],
autospec=True) as get_auth:
connection.admin.authenticate.return_value = False
assert not do_db_auth('hostname', connection, 'arctic_user')
assert get_auth.call_args_list == [call('hostname', 'admin', 'admin'),
call('hostname', 'arctic', 'arctic_user')]
logger.error.assert_called_once_with("You need credentials for db 'arctic_user' on 'hostname',"
" or admin credentials")
def test_do_db_auth_admin_user_creds_fails():
connection = MagicMock()
with patch('arctic.scripts.utils.logger', autospec=True) as logger, \
patch('arctic.scripts.utils.get_auth', side_effect=[Mock(), None],
autospec=True) as get_auth:
connection.admin.authenticate.return_value = False
assert not do_db_auth('hostname', connection, 'arctic_user')
assert get_auth.call_args_list == [call('hostname', 'admin', 'admin'),
call('hostname', 'arctic', 'arctic_user')]
logger.error.assert_called_once_with("Failed to authenticate to '%s' as Admin. Giving up." % ('hostname'))
def test_do_db_auth_role():
# Create the user agains the current mongo database
admin_creds = Mock()
user_creds = Mock()
connection = MagicMock()
with patch('arctic.scripts.utils.logger', autospec=True) as logger, \
patch('arctic.scripts.utils.get_auth', autospec=True, side_effect=[admin_creds, user_creds]) as get_auth:
assert do_db_auth('hostname', connection, 'arctic_user')
assert get_auth.call_args_list == [call('hostname', 'admin', 'admin'),
call('hostname', 'arctic', 'arctic_user')]
connection.admin.authenticate.assert_called_once_with(admin_creds.user,
admin_creds.password)
# Must also ensure that we auth against the user's db too ; the user
# may well have read-only access to the admin database, but not to their user_db!
connection.__getitem__.assert_called_once_with('arctic_user')
connection.__getitem__.return_value.authenticate.assert_called_once_with(user_creds.user, user_creds.password)
assert logger.error.call_count == 0
|
import typing
import keras
from keras.models import Model
from matchzoo.engine.base_model import BaseModel
from matchzoo.engine.param import Param
from matchzoo.engine.param_table import ParamTable
from matchzoo import preprocessors
from matchzoo.utils import TensorType
class CDSSM(BaseModel):
"""
CDSSM Model implementation.
Learning Semantic Representations Using Convolutional Neural Networks
for Web Search. (2014a)
A Latent Semantic Model with Convolutional-Pooling Structure for
Information Retrieval. (2014b)
Examples:
>>> model = CDSSM()
>>> model.params['optimizer'] = 'adam'
>>> model.params['filters'] = 32
>>> model.params['kernel_size'] = 3
>>> model.params['conv_activation_func'] = 'relu'
>>> model.guess_and_fill_missing_params(verbose=0)
>>> model.build()
"""
@classmethod
def get_default_params(cls) -> ParamTable:
""":return: model default parameters."""
# set :attr:`with_multi_layer_perceptron` to False to support
# user-defined variable dense layer units
params = super().get_default_params(with_multi_layer_perceptron=True)
params.add(Param(name='filters', value=32,
desc="Number of filters in the 1D convolution "
"layer."))
params.add(Param(name='kernel_size', value=3,
desc="Number of kernel size in the 1D "
"convolution layer."))
params.add(Param(name='strides', value=1,
desc="Strides in the 1D convolution layer."))
params.add(Param(name='padding', value='same',
desc="The padding mode in the convolution "
"layer. It should be one of `same`, "
"`valid`, ""and `causal`."))
params.add(Param(name='conv_activation_func', value='relu',
desc="Activation function in the convolution"
" layer."))
params.add(Param(name='w_initializer', value='glorot_normal'))
params.add(Param(name='b_initializer', value='zeros'))
params.add(Param(name='dropout_rate', value=0.3,
desc="The dropout rate."))
return params
def _create_base_network(self) -> typing.Callable:
"""
Apply conv and maxpooling operation towards to each letter-ngram.
The input shape is `fixed_text_length`*`number of letter-ngram`,
as described in the paper, `n` is 3, `number of letter-trigram`
is about 30,000 according to their observation.
:return: Wrapped Keras `Layer` as CDSSM network, tensor in tensor out.
"""
def _wrapper(x: TensorType):
# Apply 1d convolutional on each word_ngram (lt).
# Input shape: (batch_size, num_tri_letters, 90000)
# Sequence of num_tri_letters vectors of 90000d vectors.
x = keras.layers.Conv1D(
filters=self._params['filters'],
kernel_size=self._params['kernel_size'],
strides=self._params['strides'],
padding=self._params['padding'],
activation=self._params['conv_activation_func'],
kernel_initializer=self._params['w_initializer'],
bias_initializer=self._params['b_initializer'])(x)
# Apply max pooling by take max at each dimension across
# all word_trigram features.
x = keras.layers.Dropout(self._params['dropout_rate'])(x)
x = keras.layers.GlobalMaxPool1D()(x)
# Apply a none-linear transformation use a tanh layer.
x = self._make_multi_layer_perceptron_layer()(x)
return x
return _wrapper
def build(self):
"""
Build model structure.
CDSSM use Siamese architecture.
"""
base_network = self._create_base_network()
# Left input and right input.
input_left, input_right = self._make_inputs()
# Process left & right input.
x = [base_network(input_left),
base_network(input_right)]
# Dot product with cosine similarity.
x = keras.layers.Dot(axes=[1, 1], normalize=True)(x)
x_out = self._make_output_layer()(x)
self._backend = Model(inputs=[input_left, input_right],
outputs=x_out)
@classmethod
def get_default_preprocessor(cls):
""":return: Default preprocessor."""
return preprocessors.CDSSMPreprocessor()
def guess_and_fill_missing_params(self, verbose: int = 1):
"""
Guess and fill missing parameters in :attr:`params`.
Use this method to automatically fill-in hyper parameters.
This involves some guessing so the parameter it fills could be
wrong. For example, the default task is `Ranking`, and if we do not
set it to `Classification` manually for data packs prepared for
classification, then the shape of the model output and the data will
mismatch.
:param verbose: Verbosity.
"""
self._params.get('input_shapes').set_default([(10, 30),
(10, 30)], verbose)
super().guess_and_fill_missing_params(verbose)
|
from stash.tests.stashtest import StashTestCase
class LibDistTests(StashTestCase):
"""
Tests for 'libdist'
"""
def test_libdist_is_loaded(self):
"""
Test that 'libdist' is loaded.
"""
loaded_libs = [an for an in dir(self.stash) if an.startswith("lib")]
self.assertIn("libdist", loaded_libs)
def test_clipboard_api_available(self):
"""
Test that the clipboard api is provided by libdist
"""
defs = dir(self.stash.libdist)
self.assertIn("clipboard_get", defs)
self.assertIn("clipboard_set", defs)
def test_pip_definitions_available(self):
"""
Test that the libdist provides the definitions required by 'pip'.
"""
defs = dir(self.stash.libdist)
required = ["SITE_PACKAGES_FOLDER", "SITE_PACKAGES_FOLDER_6", "BUNDLED_MODULES"]
for an in required:
self.assertIn(an, defs)
|
import datetime
from homeassistant.components.manual.alarm_control_panel import ManualAlarm
from homeassistant.const import (
CONF_ARMING_TIME,
CONF_DELAY_TIME,
CONF_TRIGGER_TIME,
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_CUSTOM_BYPASS,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_DISARMED,
STATE_ALARM_TRIGGERED,
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Demo alarm control panel platform."""
async_add_entities(
[
ManualAlarm(
hass,
"Alarm",
"1234",
None,
True,
False,
{
STATE_ALARM_ARMED_AWAY: {
CONF_ARMING_TIME: datetime.timedelta(seconds=5),
CONF_DELAY_TIME: datetime.timedelta(seconds=0),
CONF_TRIGGER_TIME: datetime.timedelta(seconds=10),
},
STATE_ALARM_ARMED_HOME: {
CONF_ARMING_TIME: datetime.timedelta(seconds=5),
CONF_DELAY_TIME: datetime.timedelta(seconds=0),
CONF_TRIGGER_TIME: datetime.timedelta(seconds=10),
},
STATE_ALARM_ARMED_NIGHT: {
CONF_ARMING_TIME: datetime.timedelta(seconds=5),
CONF_DELAY_TIME: datetime.timedelta(seconds=0),
CONF_TRIGGER_TIME: datetime.timedelta(seconds=10),
},
STATE_ALARM_DISARMED: {
CONF_DELAY_TIME: datetime.timedelta(seconds=0),
CONF_TRIGGER_TIME: datetime.timedelta(seconds=10),
},
STATE_ALARM_ARMED_CUSTOM_BYPASS: {
CONF_ARMING_TIME: datetime.timedelta(seconds=5),
CONF_DELAY_TIME: datetime.timedelta(seconds=0),
CONF_TRIGGER_TIME: datetime.timedelta(seconds=10),
},
STATE_ALARM_TRIGGERED: {
CONF_ARMING_TIME: datetime.timedelta(seconds=5)
},
},
)
]
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Demo config entry."""
await async_setup_platform(hass, {}, async_add_entities)
|
import asyncio
from functools import wraps
from typing import Awaitable, Callable
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import Unauthorized
from . import const, messages
from .connection import ActiveConnection
# mypy: allow-untyped-calls, allow-untyped-defs
async def _handle_async_response(func, hass, connection, msg):
"""Create a response and handle exception."""
try:
await func(hass, connection, msg)
except Exception as err: # pylint: disable=broad-except
connection.async_handle_exception(msg, err)
def async_response(
func: Callable[[HomeAssistant, ActiveConnection, dict], Awaitable[None]]
) -> const.WebSocketCommandHandler:
"""Decorate an async function to handle WebSocket API messages."""
@callback
@wraps(func)
def schedule_handler(hass, connection, msg):
"""Schedule the handler."""
# As the webserver is now started before the start
# event we do not want to block for websocket responders
asyncio.create_task(_handle_async_response(func, hass, connection, msg))
return schedule_handler
def require_admin(func: const.WebSocketCommandHandler) -> const.WebSocketCommandHandler:
"""Websocket decorator to require user to be an admin."""
@wraps(func)
def with_admin(hass, connection, msg):
"""Check admin and call function."""
user = connection.user
if user is None or not user.is_admin:
raise Unauthorized()
func(hass, connection, msg)
return with_admin
def ws_require_user(
only_owner=False,
only_system_user=False,
allow_system_user=True,
only_active_user=True,
only_inactive_user=False,
):
"""Decorate function validating login user exist in current WS connection.
Will write out error message if not authenticated.
"""
def validator(func):
"""Decorate func."""
@wraps(func)
def check_current_user(hass, connection, msg):
"""Check current user."""
def output_error(message_id, message):
"""Output error message."""
connection.send_message(
messages.error_message(msg["id"], message_id, message)
)
if connection.user is None:
output_error("no_user", "Not authenticated as a user")
return
if only_owner and not connection.user.is_owner:
output_error("only_owner", "Only allowed as owner")
return
if only_system_user and not connection.user.system_generated:
output_error("only_system_user", "Only allowed as system user")
return
if not allow_system_user and connection.user.system_generated:
output_error("not_system_user", "Not allowed as system user")
return
if only_active_user and not connection.user.is_active:
output_error("only_active_user", "Only allowed as active user")
return
if only_inactive_user and connection.user.is_active:
output_error("only_inactive_user", "Not allowed as active user")
return
return func(hass, connection, msg)
return check_current_user
return validator
def websocket_command(
schema: dict,
) -> Callable[[const.WebSocketCommandHandler], const.WebSocketCommandHandler]:
"""Tag a function as a websocket command."""
command = schema["type"]
def decorate(func):
"""Decorate ws command function."""
# pylint: disable=protected-access
func._ws_schema = messages.BASE_COMMAND_MESSAGE_SCHEMA.extend(schema)
func._ws_command = command
return func
return decorate
|
from binascii import hexlify
import logging
import voluptuous as vol
from xbee_helper.exceptions import ZigBeeException, ZigBeeTxFailure
from homeassistant.const import TEMP_CELSIUS
from homeassistant.helpers.entity import Entity
from . import DOMAIN, PLATFORM_SCHEMA, XBeeAnalogIn, XBeeAnalogInConfig, XBeeConfig
_LOGGER = logging.getLogger(__name__)
CONF_TYPE = "type"
CONF_MAX_VOLTS = "max_volts"
DEFAULT_VOLTS = 1.2
TYPES = ["analog", "temperature"]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_TYPE): vol.In(TYPES),
vol.Optional(CONF_MAX_VOLTS, default=DEFAULT_VOLTS): vol.Coerce(float),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the XBee Zigbee platform.
Uses the 'type' config value to work out which type of Zigbee sensor we're
dealing with and instantiates the relevant classes to handle it.
"""
zigbee_device = hass.data[DOMAIN]
typ = config.get(CONF_TYPE)
try:
sensor_class, config_class = TYPE_CLASSES[typ]
except KeyError:
_LOGGER.exception("Unknown XBee Zigbee sensor type: %s", typ)
return
add_entities([sensor_class(config_class(config), zigbee_device)], True)
class XBeeTemperatureSensor(Entity):
"""Representation of XBee Pro temperature sensor."""
def __init__(self, config, device):
"""Initialize the sensor."""
self._config = config
self._device = device
self._temp = None
@property
def name(self):
"""Return the name of the sensor."""
return self._config.name
@property
def state(self):
"""Return the state of the sensor."""
return self._temp
@property
def unit_of_measurement(self):
"""Return the unit of measurement the value is expressed in."""
return TEMP_CELSIUS
def update(self):
"""Get the latest data."""
try:
self._temp = self._device.get_temperature(self._config.address)
except ZigBeeTxFailure:
_LOGGER.warning(
"Transmission failure when attempting to get sample from "
"Zigbee device at address: %s",
hexlify(self._config.address),
)
except ZigBeeException as exc:
_LOGGER.exception("Unable to get sample from Zigbee device: %s", exc)
# This must be below the classes to which it refers.
TYPE_CLASSES = {
"temperature": (XBeeTemperatureSensor, XBeeConfig),
"analog": (XBeeAnalogIn, XBeeAnalogInConfig),
}
|
from pylatex.base_classes import Environment
from pylatex.package import Package
from pylatex import Document, Section
from pylatex.utils import NoEscape
class AllTT(Environment):
"""A class to wrap LaTeX's alltt environment."""
packages = [Package('alltt')]
escape = False
content_separator = "\n"
# Create a new document
doc = Document()
with doc.create(Section('Wrapping Latex Environments')):
doc.append(NoEscape(
r"""
The following is a demonstration of a custom \LaTeX{}
command with a couple of parameters.
"""))
# Put some data inside the AllTT environment
with doc.create(AllTT()):
verbatim = ("This is verbatim, alltt, text.\n\n\n"
"Setting \\underline{escape} to \\underline{False} "
"ensures that text in the environment is not\n"
"subject to escaping...\n\n\n"
"Setting \\underline{content_separator} "
"ensures that line endings are broken in\n"
"the latex just as they are in the input text.\n"
"alltt supports math: \\(x^2=10\\)")
doc.append(verbatim)
doc.append("This is back to normal text...")
# Generate pdf
doc.generate_pdf('environment_ex', clean_tex=False)
|
import json
from homeassistant.components.gios.const import DOMAIN
from tests.async_mock import patch
from tests.common import MockConfigEntry, load_fixture
STATIONS = [
{"id": 123, "stationName": "Test Name 1", "gegrLat": "99.99", "gegrLon": "88.88"},
{"id": 321, "stationName": "Test Name 2", "gegrLat": "77.77", "gegrLon": "66.66"},
]
async def init_integration(hass, incomplete_data=False) -> MockConfigEntry:
"""Set up the GIOS integration in Home Assistant."""
entry = MockConfigEntry(
domain=DOMAIN,
title="Home",
unique_id=123,
data={"station_id": 123, "name": "Home"},
)
indexes = json.loads(load_fixture("gios/indexes.json"))
station = json.loads(load_fixture("gios/station.json"))
sensors = json.loads(load_fixture("gios/sensors.json"))
if incomplete_data:
indexes["stIndexLevel"]["indexLevelName"] = "foo"
sensors["PM10"]["values"][0]["value"] = None
sensors["PM10"]["values"][1]["value"] = None
with patch(
"homeassistant.components.gios.Gios._get_stations", return_value=STATIONS
), patch(
"homeassistant.components.gios.Gios._get_station",
return_value=station,
), patch(
"homeassistant.components.gios.Gios._get_all_sensors",
return_value=sensors,
), patch(
"homeassistant.components.gios.Gios._get_indexes", return_value=indexes
):
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry
|
from homeassistant.components.atag import CLIMATE, DOMAIN
from homeassistant.components.climate import (
ATTR_HVAC_ACTION,
ATTR_HVAC_MODE,
ATTR_PRESET_MODE,
HVAC_MODE_HEAT,
SERVICE_SET_HVAC_MODE,
SERVICE_SET_PRESET_MODE,
SERVICE_SET_TEMPERATURE,
)
from homeassistant.components.climate.const import CURRENT_HVAC_HEAT, PRESET_AWAY
from homeassistant.components.homeassistant import (
DOMAIN as HA_DOMAIN,
SERVICE_UPDATE_ENTITY,
)
from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE, STATE_UNKNOWN
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
from tests.async_mock import PropertyMock, patch
from tests.components.atag import UID, init_integration
from tests.test_util.aiohttp import AiohttpClientMocker
CLIMATE_ID = f"{CLIMATE}.{DOMAIN}"
async def test_climate(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test the creation and values of Atag climate device."""
with patch("pyatag.entities.Climate.status"):
entry = await init_integration(hass, aioclient_mock)
registry = await hass.helpers.entity_registry.async_get_registry()
assert registry.async_is_registered(CLIMATE_ID)
entry = registry.async_get(CLIMATE_ID)
assert entry.unique_id == f"{UID}-{CLIMATE}"
assert (
hass.states.get(CLIMATE_ID).attributes[ATTR_HVAC_ACTION]
== CURRENT_HVAC_HEAT
)
async def test_setting_climate(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test setting the climate device."""
await init_integration(hass, aioclient_mock)
with patch("pyatag.entities.Climate.set_temp") as mock_set_temp:
await hass.services.async_call(
CLIMATE,
SERVICE_SET_TEMPERATURE,
{ATTR_ENTITY_ID: CLIMATE_ID, ATTR_TEMPERATURE: 15},
blocking=True,
)
await hass.async_block_till_done()
mock_set_temp.assert_called_once_with(15)
with patch("pyatag.entities.Climate.set_preset_mode") as mock_set_preset:
await hass.services.async_call(
CLIMATE,
SERVICE_SET_PRESET_MODE,
{ATTR_ENTITY_ID: CLIMATE_ID, ATTR_PRESET_MODE: PRESET_AWAY},
blocking=True,
)
await hass.async_block_till_done()
mock_set_preset.assert_called_once_with(PRESET_AWAY)
with patch("pyatag.entities.Climate.set_hvac_mode") as mock_set_hvac:
await hass.services.async_call(
CLIMATE,
SERVICE_SET_HVAC_MODE,
{ATTR_ENTITY_ID: CLIMATE_ID, ATTR_HVAC_MODE: HVAC_MODE_HEAT},
blocking=True,
)
await hass.async_block_till_done()
mock_set_hvac.assert_called_once_with(HVAC_MODE_HEAT)
async def test_incorrect_modes(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
) -> None:
"""Test incorrect values are handled correctly."""
with patch(
"pyatag.entities.Climate.hvac_mode",
new_callable=PropertyMock(return_value="bug"),
):
await init_integration(hass, aioclient_mock)
assert hass.states.get(CLIMATE_ID).state == STATE_UNKNOWN
async def test_update_service(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test the updater service is called."""
await init_integration(hass, aioclient_mock)
await async_setup_component(hass, HA_DOMAIN, {})
with patch("pyatag.AtagOne.update") as updater:
await hass.services.async_call(
HA_DOMAIN,
SERVICE_UPDATE_ENTITY,
{ATTR_ENTITY_ID: CLIMATE_ID},
blocking=True,
)
await hass.async_block_till_done()
updater.assert_called_once()
|
import json
from aiohttp.hdrs import AUTHORIZATION
import homeassistant.components.html5.notify as html5
from homeassistant.const import HTTP_INTERNAL_SERVER_ERROR
from homeassistant.exceptions import HomeAssistantError
from homeassistant.setup import async_setup_component
from tests.async_mock import MagicMock, mock_open, patch
CONFIG_FILE = "file.conf"
VAPID_CONF = {
"vapid_pub_key": "BJMA2gDZEkHaXRhf1fhY_"
+ "QbKbhVIHlSJXI0bFyo0eJXnUPOjdgycCAbj-2bMKMKNKs"
+ "_rM8JoSnyKGCXAY2dbONI",
"vapid_prv_key": "ZwPgwKpESGuGLMZYU39vKgrekrWzCijo-LsBM3CZ9-c",
"vapid_email": "[email protected]",
}
SUBSCRIPTION_1 = {
"browser": "chrome",
"subscription": {
"endpoint": "https://googleapis.com",
"keys": {"auth": "auth", "p256dh": "p256dh"},
},
}
SUBSCRIPTION_2 = {
"browser": "firefox",
"subscription": {
"endpoint": "https://example.com",
"keys": {"auth": "bla", "p256dh": "bla"},
},
}
SUBSCRIPTION_3 = {
"browser": "chrome",
"subscription": {
"endpoint": "https://example.com/not_exist",
"keys": {"auth": "bla", "p256dh": "bla"},
},
}
SUBSCRIPTION_4 = {
"browser": "chrome",
"subscription": {
"endpoint": "https://googleapis.com",
"expirationTime": None,
"keys": {"auth": "auth", "p256dh": "p256dh"},
},
}
SUBSCRIPTION_5 = {
"browser": "chrome",
"subscription": {
"endpoint": "https://fcm.googleapis.com/fcm/send/LONG-RANDOM-KEY",
"expirationTime": None,
"keys": {"auth": "auth", "p256dh": "p256dh"},
},
}
REGISTER_URL = "/api/notify.html5"
PUBLISH_URL = "/api/notify.html5/callback"
async def mock_client(hass, hass_client, registrations=None):
"""Create a test client for HTML5 views."""
if registrations is None:
registrations = {}
with patch(
"homeassistant.components.html5.notify._load_config", return_value=registrations
):
await async_setup_component(hass, "notify", {"notify": {"platform": "html5"}})
await hass.async_block_till_done()
return await hass_client()
class TestHtml5Notify:
"""Tests for HTML5 notify platform."""
def test_get_service_with_no_json(self):
"""Test empty json file."""
hass = MagicMock()
m = mock_open()
with patch("homeassistant.util.json.open", m, create=True):
service = html5.get_service(hass, {})
assert service is not None
@patch("homeassistant.components.html5.notify.WebPusher")
def test_dismissing_message(self, mock_wp):
"""Test dismissing message."""
hass = MagicMock()
data = {"device": SUBSCRIPTION_1}
m = mock_open(read_data=json.dumps(data))
with patch("homeassistant.util.json.open", m, create=True):
service = html5.get_service(hass, {"gcm_sender_id": "100"})
assert service is not None
service.dismiss(target=["device", "non_existing"], data={"tag": "test"})
assert len(mock_wp.mock_calls) == 3
# WebPusher constructor
assert mock_wp.mock_calls[0][1][0] == SUBSCRIPTION_1["subscription"]
# Third mock_call checks the status_code of the response.
assert mock_wp.mock_calls[2][0] == "().send().status_code.__eq__"
# Call to send
payload = json.loads(mock_wp.mock_calls[1][1][0])
assert payload["dismiss"] is True
assert payload["tag"] == "test"
@patch("homeassistant.components.html5.notify.WebPusher")
def test_sending_message(self, mock_wp):
"""Test sending message."""
hass = MagicMock()
data = {"device": SUBSCRIPTION_1}
m = mock_open(read_data=json.dumps(data))
with patch("homeassistant.util.json.open", m, create=True):
service = html5.get_service(hass, {"gcm_sender_id": "100"})
assert service is not None
service.send_message(
"Hello", target=["device", "non_existing"], data={"icon": "beer.png"}
)
assert len(mock_wp.mock_calls) == 3
# WebPusher constructor
assert mock_wp.mock_calls[0][1][0] == SUBSCRIPTION_1["subscription"]
# Third mock_call checks the status_code of the response.
assert mock_wp.mock_calls[2][0] == "().send().status_code.__eq__"
# Call to send
payload = json.loads(mock_wp.mock_calls[1][1][0])
assert payload["body"] == "Hello"
assert payload["icon"] == "beer.png"
@patch("homeassistant.components.html5.notify.WebPusher")
def test_gcm_key_include(self, mock_wp):
"""Test if the gcm_key is only included for GCM endpoints."""
hass = MagicMock()
data = {"chrome": SUBSCRIPTION_1, "firefox": SUBSCRIPTION_2}
m = mock_open(read_data=json.dumps(data))
with patch("homeassistant.util.json.open", m, create=True):
service = html5.get_service(
hass, {"gcm_sender_id": "100", "gcm_api_key": "Y6i0JdZ0mj9LOaSI"}
)
assert service is not None
service.send_message("Hello", target=["chrome", "firefox"])
assert len(mock_wp.mock_calls) == 6
# WebPusher constructor
assert mock_wp.mock_calls[0][1][0] == SUBSCRIPTION_1["subscription"]
assert mock_wp.mock_calls[3][1][0] == SUBSCRIPTION_2["subscription"]
# Third mock_call checks the status_code of the response.
assert mock_wp.mock_calls[2][0] == "().send().status_code.__eq__"
assert mock_wp.mock_calls[5][0] == "().send().status_code.__eq__"
# Get the keys passed to the WebPusher's send method
assert mock_wp.mock_calls[1][2]["gcm_key"] is not None
assert mock_wp.mock_calls[4][2]["gcm_key"] is None
@patch("homeassistant.components.html5.notify.WebPusher")
def test_fcm_key_include(self, mock_wp):
"""Test if the FCM header is included."""
hass = MagicMock()
data = {"chrome": SUBSCRIPTION_5}
m = mock_open(read_data=json.dumps(data))
with patch("homeassistant.util.json.open", m, create=True):
service = html5.get_service(hass, VAPID_CONF)
assert service is not None
service.send_message("Hello", target=["chrome"])
assert len(mock_wp.mock_calls) == 3
# WebPusher constructor
assert mock_wp.mock_calls[0][1][0] == SUBSCRIPTION_5["subscription"]
# Third mock_call checks the status_code of the response.
assert mock_wp.mock_calls[2][0] == "().send().status_code.__eq__"
# Get the keys passed to the WebPusher's send method
assert mock_wp.mock_calls[1][2]["headers"]["Authorization"] is not None
@patch("homeassistant.components.html5.notify.WebPusher")
def test_fcm_send_with_unknown_priority(self, mock_wp):
"""Test if the gcm_key is only included for GCM endpoints."""
hass = MagicMock()
data = {"chrome": SUBSCRIPTION_5}
m = mock_open(read_data=json.dumps(data))
with patch("homeassistant.util.json.open", m, create=True):
service = html5.get_service(hass, VAPID_CONF)
assert service is not None
service.send_message("Hello", target=["chrome"], priority="undefined")
assert len(mock_wp.mock_calls) == 3
# WebPusher constructor
assert mock_wp.mock_calls[0][1][0] == SUBSCRIPTION_5["subscription"]
# Third mock_call checks the status_code of the response.
assert mock_wp.mock_calls[2][0] == "().send().status_code.__eq__"
# Get the keys passed to the WebPusher's send method
assert mock_wp.mock_calls[1][2]["headers"]["priority"] == "normal"
@patch("homeassistant.components.html5.notify.WebPusher")
def test_fcm_no_targets(self, mock_wp):
"""Test if the gcm_key is only included for GCM endpoints."""
hass = MagicMock()
data = {"chrome": SUBSCRIPTION_5}
m = mock_open(read_data=json.dumps(data))
with patch("homeassistant.util.json.open", m, create=True):
service = html5.get_service(hass, VAPID_CONF)
assert service is not None
service.send_message("Hello")
assert len(mock_wp.mock_calls) == 3
# WebPusher constructor
assert mock_wp.mock_calls[0][1][0] == SUBSCRIPTION_5["subscription"]
# Third mock_call checks the status_code of the response.
assert mock_wp.mock_calls[2][0] == "().send().status_code.__eq__"
# Get the keys passed to the WebPusher's send method
assert mock_wp.mock_calls[1][2]["headers"]["priority"] == "normal"
@patch("homeassistant.components.html5.notify.WebPusher")
def test_fcm_additional_data(self, mock_wp):
"""Test if the gcm_key is only included for GCM endpoints."""
hass = MagicMock()
data = {"chrome": SUBSCRIPTION_5}
m = mock_open(read_data=json.dumps(data))
with patch("homeassistant.util.json.open", m, create=True):
service = html5.get_service(hass, VAPID_CONF)
assert service is not None
service.send_message("Hello", data={"mykey": "myvalue"})
assert len(mock_wp.mock_calls) == 3
# WebPusher constructor
assert mock_wp.mock_calls[0][1][0] == SUBSCRIPTION_5["subscription"]
# Third mock_call checks the status_code of the response.
assert mock_wp.mock_calls[2][0] == "().send().status_code.__eq__"
# Get the keys passed to the WebPusher's send method
assert mock_wp.mock_calls[1][2]["headers"]["priority"] == "normal"
def test_create_vapid_withoutvapid():
"""Test creating empty vapid."""
resp = html5.create_vapid_headers(
vapid_email=None, vapid_private_key=None, subscription_info=None
)
assert resp is None
async def test_registering_new_device_view(hass, hass_client):
"""Test that the HTML view works."""
client = await mock_client(hass, hass_client)
with patch("homeassistant.components.html5.notify.save_json") as mock_save:
resp = await client.post(REGISTER_URL, data=json.dumps(SUBSCRIPTION_1))
assert resp.status == 200
assert len(mock_save.mock_calls) == 1
assert mock_save.mock_calls[0][1][1] == {"unnamed device": SUBSCRIPTION_1}
async def test_registering_new_device_view_with_name(hass, hass_client):
"""Test that the HTML view works with name attribute."""
client = await mock_client(hass, hass_client)
SUB_WITH_NAME = SUBSCRIPTION_1.copy()
SUB_WITH_NAME["name"] = "test device"
with patch("homeassistant.components.html5.notify.save_json") as mock_save:
resp = await client.post(REGISTER_URL, data=json.dumps(SUB_WITH_NAME))
assert resp.status == 200
assert len(mock_save.mock_calls) == 1
assert mock_save.mock_calls[0][1][1] == {"test device": SUBSCRIPTION_1}
async def test_registering_new_device_expiration_view(hass, hass_client):
"""Test that the HTML view works."""
client = await mock_client(hass, hass_client)
with patch("homeassistant.components.html5.notify.save_json") as mock_save:
resp = await client.post(REGISTER_URL, data=json.dumps(SUBSCRIPTION_4))
assert resp.status == 200
assert mock_save.mock_calls[0][1][1] == {"unnamed device": SUBSCRIPTION_4}
async def test_registering_new_device_fails_view(hass, hass_client):
"""Test subs. are not altered when registering a new device fails."""
registrations = {}
client = await mock_client(hass, hass_client, registrations)
with patch(
"homeassistant.components.html5.notify.save_json",
side_effect=HomeAssistantError(),
):
resp = await client.post(REGISTER_URL, data=json.dumps(SUBSCRIPTION_4))
assert resp.status == HTTP_INTERNAL_SERVER_ERROR
assert registrations == {}
async def test_registering_existing_device_view(hass, hass_client):
"""Test subscription is updated when registering existing device."""
registrations = {}
client = await mock_client(hass, hass_client, registrations)
with patch("homeassistant.components.html5.notify.save_json") as mock_save:
await client.post(REGISTER_URL, data=json.dumps(SUBSCRIPTION_1))
resp = await client.post(REGISTER_URL, data=json.dumps(SUBSCRIPTION_4))
assert resp.status == 200
assert mock_save.mock_calls[0][1][1] == {"unnamed device": SUBSCRIPTION_4}
assert registrations == {"unnamed device": SUBSCRIPTION_4}
async def test_registering_existing_device_view_with_name(hass, hass_client):
"""Test subscription is updated when reg'ing existing device with name."""
registrations = {}
client = await mock_client(hass, hass_client, registrations)
SUB_WITH_NAME = SUBSCRIPTION_1.copy()
SUB_WITH_NAME["name"] = "test device"
with patch("homeassistant.components.html5.notify.save_json") as mock_save:
await client.post(REGISTER_URL, data=json.dumps(SUB_WITH_NAME))
resp = await client.post(REGISTER_URL, data=json.dumps(SUBSCRIPTION_4))
assert resp.status == 200
assert mock_save.mock_calls[0][1][1] == {"test device": SUBSCRIPTION_4}
assert registrations == {"test device": SUBSCRIPTION_4}
async def test_registering_existing_device_fails_view(hass, hass_client):
"""Test sub. is not updated when registering existing device fails."""
registrations = {}
client = await mock_client(hass, hass_client, registrations)
with patch("homeassistant.components.html5.notify.save_json") as mock_save:
await client.post(REGISTER_URL, data=json.dumps(SUBSCRIPTION_1))
mock_save.side_effect = HomeAssistantError
resp = await client.post(REGISTER_URL, data=json.dumps(SUBSCRIPTION_4))
assert resp.status == HTTP_INTERNAL_SERVER_ERROR
assert registrations == {"unnamed device": SUBSCRIPTION_1}
async def test_registering_new_device_validation(hass, hass_client):
"""Test various errors when registering a new device."""
client = await mock_client(hass, hass_client)
resp = await client.post(
REGISTER_URL,
data=json.dumps({"browser": "invalid browser", "subscription": "sub info"}),
)
assert resp.status == 400
resp = await client.post(REGISTER_URL, data=json.dumps({"browser": "chrome"}))
assert resp.status == 400
with patch("homeassistant.components.html5.notify.save_json", return_value=False):
resp = await client.post(
REGISTER_URL,
data=json.dumps({"browser": "chrome", "subscription": "sub info"}),
)
assert resp.status == 400
async def test_unregistering_device_view(hass, hass_client):
"""Test that the HTML unregister view works."""
registrations = {"some device": SUBSCRIPTION_1, "other device": SUBSCRIPTION_2}
client = await mock_client(hass, hass_client, registrations)
with patch("homeassistant.components.html5.notify.save_json") as mock_save:
resp = await client.delete(
REGISTER_URL,
data=json.dumps({"subscription": SUBSCRIPTION_1["subscription"]}),
)
assert resp.status == 200
assert len(mock_save.mock_calls) == 1
assert registrations == {"other device": SUBSCRIPTION_2}
async def test_unregister_device_view_handle_unknown_subscription(hass, hass_client):
"""Test that the HTML unregister view handles unknown subscriptions."""
registrations = {}
client = await mock_client(hass, hass_client, registrations)
with patch("homeassistant.components.html5.notify.save_json") as mock_save:
resp = await client.delete(
REGISTER_URL,
data=json.dumps({"subscription": SUBSCRIPTION_3["subscription"]}),
)
assert resp.status == 200, resp.response
assert registrations == {}
assert len(mock_save.mock_calls) == 0
async def test_unregistering_device_view_handles_save_error(hass, hass_client):
"""Test that the HTML unregister view handles save errors."""
registrations = {"some device": SUBSCRIPTION_1, "other device": SUBSCRIPTION_2}
client = await mock_client(hass, hass_client, registrations)
with patch(
"homeassistant.components.html5.notify.save_json",
side_effect=HomeAssistantError(),
):
resp = await client.delete(
REGISTER_URL,
data=json.dumps({"subscription": SUBSCRIPTION_1["subscription"]}),
)
assert resp.status == HTTP_INTERNAL_SERVER_ERROR, resp.response
assert registrations == {
"some device": SUBSCRIPTION_1,
"other device": SUBSCRIPTION_2,
}
async def test_callback_view_no_jwt(hass, hass_client):
"""Test that the notification callback view works without JWT."""
client = await mock_client(hass, hass_client)
resp = await client.post(
PUBLISH_URL,
data=json.dumps(
{"type": "push", "tag": "3bc28d69-0921-41f1-ac6a-7a627ba0aa72"}
),
)
assert resp.status == 401
async def test_callback_view_with_jwt(hass, hass_client):
"""Test that the notification callback view works with JWT."""
registrations = {"device": SUBSCRIPTION_1}
client = await mock_client(hass, hass_client, registrations)
with patch("homeassistant.components.html5.notify.WebPusher") as mock_wp:
await hass.services.async_call(
"notify",
"notify",
{"message": "Hello", "target": ["device"], "data": {"icon": "beer.png"}},
blocking=True,
)
assert len(mock_wp.mock_calls) == 3
# WebPusher constructor
assert mock_wp.mock_calls[0][1][0] == SUBSCRIPTION_1["subscription"]
# Third mock_call checks the status_code of the response.
assert mock_wp.mock_calls[2][0] == "().send().status_code.__eq__"
# Call to send
push_payload = json.loads(mock_wp.mock_calls[1][1][0])
assert push_payload["body"] == "Hello"
assert push_payload["icon"] == "beer.png"
bearer_token = "Bearer {}".format(push_payload["data"]["jwt"])
resp = await client.post(
PUBLISH_URL, json={"type": "push"}, headers={AUTHORIZATION: bearer_token}
)
assert resp.status == 200
body = await resp.json()
assert body == {"event": "push", "status": "ok"}
async def test_send_fcm_without_targets(hass, hass_client):
"""Test that the notification is send with FCM without targets."""
registrations = {"device": SUBSCRIPTION_5}
await mock_client(hass, hass_client, registrations)
with patch("homeassistant.components.html5.notify.WebPusher") as mock_wp:
await hass.services.async_call(
"notify",
"notify",
{"message": "Hello", "target": ["device"], "data": {"icon": "beer.png"}},
blocking=True,
)
assert len(mock_wp.mock_calls) == 3
# WebPusher constructor
assert mock_wp.mock_calls[0][1][0] == SUBSCRIPTION_5["subscription"]
# Third mock_call checks the status_code of the response.
assert mock_wp.mock_calls[2][0] == "().send().status_code.__eq__"
|
import pytest
from unittest.mock import Mock, patch
from kombu import Connection, Exchange, Queue
from kombu import compat
from t.mocks import Transport, Channel
class test_misc:
def test_iterconsume(self):
class MyConnection:
drained = 0
def drain_events(self, *args, **kwargs):
self.drained += 1
return self.drained
class Consumer:
active = False
def consume(self, *args, **kwargs):
self.active = True
conn = MyConnection()
consumer = Consumer()
it = compat._iterconsume(conn, consumer)
assert next(it) == 1
assert consumer.active
it2 = compat._iterconsume(conn, consumer, limit=10)
assert list(it2), [2, 3, 4, 5, 6, 7, 8, 9, 10 == 11]
def test_Queue_from_dict(self):
defs = {'binding_key': 'foo.#',
'exchange': 'fooex',
'exchange_type': 'topic',
'durable': True,
'auto_delete': False}
q1 = Queue.from_dict('foo', **dict(defs))
assert q1.name == 'foo'
assert q1.routing_key == 'foo.#'
assert q1.exchange.name == 'fooex'
assert q1.exchange.type == 'topic'
assert q1.durable
assert q1.exchange.durable
assert not q1.auto_delete
assert not q1.exchange.auto_delete
q2 = Queue.from_dict('foo', **dict(defs,
exchange_durable=False))
assert q2.durable
assert not q2.exchange.durable
q3 = Queue.from_dict('foo', **dict(defs,
exchange_auto_delete=True))
assert not q3.auto_delete
assert q3.exchange.auto_delete
q4 = Queue.from_dict('foo', **dict(defs,
queue_durable=False))
assert not q4.durable
assert q4.exchange.durable
q5 = Queue.from_dict('foo', **dict(defs,
queue_auto_delete=True))
assert q5.auto_delete
assert not q5.exchange.auto_delete
assert (Queue.from_dict('foo', **dict(defs)) ==
Queue.from_dict('foo', **dict(defs)))
class test_Publisher:
def setup(self):
self.connection = Connection(transport=Transport)
def test_constructor(self):
pub = compat.Publisher(self.connection,
exchange='test_Publisher_constructor',
routing_key='rkey')
assert isinstance(pub.backend, Channel)
assert pub.exchange.name == 'test_Publisher_constructor'
assert pub.exchange.durable
assert not pub.exchange.auto_delete
assert pub.exchange.type == 'direct'
pub2 = compat.Publisher(self.connection,
exchange='test_Publisher_constructor2',
routing_key='rkey',
auto_delete=True,
durable=False)
assert pub2.exchange.auto_delete
assert not pub2.exchange.durable
explicit = Exchange('test_Publisher_constructor_explicit',
type='topic')
pub3 = compat.Publisher(self.connection,
exchange=explicit)
assert pub3.exchange == explicit
compat.Publisher(self.connection,
exchange='test_Publisher_constructor3',
channel=self.connection.default_channel)
def test_send(self):
pub = compat.Publisher(self.connection,
exchange='test_Publisher_send',
routing_key='rkey')
pub.send({'foo': 'bar'})
assert 'basic_publish' in pub.backend
pub.close()
def test__enter__exit__(self):
pub = compat.Publisher(self.connection,
exchange='test_Publisher_send',
routing_key='rkey')
x = pub.__enter__()
assert x is pub
x.__exit__()
assert pub._closed
class test_Consumer:
def setup(self):
self.connection = Connection(transport=Transport)
@patch('kombu.compat._iterconsume')
def test_iterconsume_calls__iterconsume(self, it, n='test_iterconsume'):
c = compat.Consumer(self.connection, queue=n, exchange=n)
c.iterconsume(limit=10, no_ack=True)
it.assert_called_with(c.connection, c, True, 10)
def test_constructor(self, n='test_Consumer_constructor'):
c = compat.Consumer(self.connection, queue=n, exchange=n,
routing_key='rkey')
assert isinstance(c.backend, Channel)
q = c.queues[0]
assert q.durable
assert q.exchange.durable
assert not q.auto_delete
assert not q.exchange.auto_delete
assert q.name == n
assert q.exchange.name == n
c2 = compat.Consumer(self.connection, queue=n + '2',
exchange=n + '2',
routing_key='rkey', durable=False,
auto_delete=True, exclusive=True)
q2 = c2.queues[0]
assert not q2.durable
assert not q2.exchange.durable
assert q2.auto_delete
assert q2.exchange.auto_delete
def test__enter__exit__(self, n='test__enter__exit__'):
c = compat.Consumer(self.connection, queue=n, exchange=n,
routing_key='rkey')
x = c.__enter__()
assert x is c
x.__exit__()
assert c._closed
def test_revive(self, n='test_revive'):
c = compat.Consumer(self.connection, queue=n, exchange=n)
with self.connection.channel() as c2:
c.revive(c2)
assert c.backend is c2
def test__iter__(self, n='test__iter__'):
c = compat.Consumer(self.connection, queue=n, exchange=n)
c.iterqueue = Mock()
c.__iter__()
c.iterqueue.assert_called_with(infinite=True)
def test_iter(self, n='test_iterqueue'):
c = compat.Consumer(self.connection, queue=n, exchange=n,
routing_key='rkey')
c.close()
def test_process_next(self, n='test_process_next'):
c = compat.Consumer(self.connection, queue=n, exchange=n,
routing_key='rkey')
with pytest.raises(NotImplementedError):
c.process_next()
c.close()
def test_iterconsume(self, n='test_iterconsume'):
c = compat.Consumer(self.connection, queue=n, exchange=n,
routing_key='rkey')
c.close()
def test_discard_all(self, n='test_discard_all'):
c = compat.Consumer(self.connection, queue=n, exchange=n,
routing_key='rkey')
c.discard_all()
assert 'queue_purge' in c.backend
def test_fetch(self, n='test_fetch'):
c = compat.Consumer(self.connection, queue=n, exchange=n,
routing_key='rkey')
assert c.fetch() is None
assert c.fetch(no_ack=True) is None
assert 'basic_get' in c.backend
callback_called = [False]
def receive(payload, message):
callback_called[0] = True
c.backend.to_deliver.append('42')
payload = c.fetch().payload
assert payload == '42'
c.backend.to_deliver.append('46')
c.register_callback(receive)
assert c.fetch(enable_callbacks=True).payload == '46'
assert callback_called[0]
def test_discard_all_filterfunc_not_supported(self, n='xjf21j21'):
c = compat.Consumer(self.connection, queue=n, exchange=n,
routing_key='rkey')
with pytest.raises(NotImplementedError):
c.discard_all(filterfunc=lambda x: x)
c.close()
def test_wait(self, n='test_wait'):
class C(compat.Consumer):
def iterconsume(self, limit=None):
yield from range(limit)
c = C(self.connection,
queue=n, exchange=n, routing_key='rkey')
assert c.wait(10) == list(range(10))
c.close()
def test_iterqueue(self, n='test_iterqueue'):
i = [0]
class C(compat.Consumer):
def fetch(self, limit=None):
z = i[0]
i[0] += 1
return z
c = C(self.connection,
queue=n, exchange=n, routing_key='rkey')
assert list(c.iterqueue(limit=10)) == list(range(10))
c.close()
class test_ConsumerSet:
def setup(self):
self.connection = Connection(transport=Transport)
def test_providing_channel(self):
chan = Mock(name='channel')
cs = compat.ConsumerSet(self.connection, channel=chan)
assert cs._provided_channel
assert cs.backend is chan
cs.cancel = Mock(name='cancel')
cs.close()
chan.close.assert_not_called()
@patch('kombu.compat._iterconsume')
def test_iterconsume(self, _iterconsume, n='test_iterconsume'):
c = compat.Consumer(self.connection, queue=n, exchange=n)
cs = compat.ConsumerSet(self.connection, consumers=[c])
cs.iterconsume(limit=10, no_ack=True)
_iterconsume.assert_called_with(c.connection, cs, True, 10)
def test_revive(self, n='test_revive'):
c = compat.Consumer(self.connection, queue=n, exchange=n)
cs = compat.ConsumerSet(self.connection, consumers=[c])
with self.connection.channel() as c2:
cs.revive(c2)
assert cs.backend is c2
def test_constructor(self, prefix='0daf8h21'):
dcon = {'%s.xyx' % prefix: {'exchange': '%s.xyx' % prefix,
'routing_key': 'xyx'},
'%s.xyz' % prefix: {'exchange': '%s.xyz' % prefix,
'routing_key': 'xyz'}}
consumers = [compat.Consumer(self.connection, queue=prefix + str(i),
exchange=prefix + str(i))
for i in range(3)]
c = compat.ConsumerSet(self.connection, consumers=consumers)
c2 = compat.ConsumerSet(self.connection, from_dict=dcon)
assert len(c.queues) == 3
assert len(c2.queues) == 2
c.add_consumer(compat.Consumer(self.connection,
queue=prefix + 'xaxxxa',
exchange=prefix + 'xaxxxa'))
assert len(c.queues) == 4
for cq in c.queues:
assert cq.channel is c.channel
c2.add_consumer_from_dict(
'%s.xxx' % prefix,
exchange='%s.xxx' % prefix,
routing_key='xxx',
)
assert len(c2.queues) == 3
for c2q in c2.queues:
assert c2q.channel is c2.channel
c.discard_all()
assert c.channel.called.count('queue_purge') == 4
c.consume()
c.close()
c2.close()
assert 'basic_cancel' in c.channel
assert 'close' in c.channel
assert 'close' in c2.channel
|
parameters = {
'KMeans': {'n_clusters': 5},
'MiniBatchKMeans': {'n_clusters': 5},
'SpectralClustering': {'n_clusters': 5,
'affinity': 'nearest_neighbors',
'n_neighbors': 10},
'AgglomerativeClustering': {'n_clusters': 5, 'linkage' : 'ward'},
'FeatureAgglomeration': {'n_clusters': 5},
'Birch': {'n_clusters': 5},
'HDBSCAN': {'min_samples': 5, 'min_cluster_size': 15},
'CountVectorizer': {},
'TfidfVectorizer': {},
'LatentDirichletAllocation': {'n_components': 20, 'learning_method': 'batch'},
'NMF': {'n_components': 20}
}
def default_params(model, update_dict=None):
"""
Loads and updates default model parameters
Parameters
----------
model : str
The name of a model
update_dict : dict
A dict to update default parameters
Returns
----------
params : dict
A dictionary of parameters
"""
if model in parameters:
params = parameters[model].copy()
else:
params = None
if update_dict:
if params is None:
params = {}
params.update(update_dict)
return params
|
import asyncio
from pyflick.authentication import AuthException
from homeassistant import config_entries, data_entry_flow, setup
from homeassistant.components.flick_electric.const import DOMAIN
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from tests.async_mock import patch
from tests.common import MockConfigEntry
CONF = {CONF_USERNAME: "test-username", CONF_PASSWORD: "test-password"}
async def _flow_submit(hass):
return await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_USER},
data=CONF,
)
async def test_form(hass):
"""Test we get the form."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch(
"homeassistant.components.flick_electric.config_flow.SimpleFlickAuth.async_get_access_token",
return_value="123456789abcdef",
), patch(
"homeassistant.components.flick_electric.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.flick_electric.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
CONF,
)
await hass.async_block_till_done()
assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result2["title"] == "Flick Electric: test-username"
assert result2["data"] == CONF
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_duplicate_login(hass):
"""Test uniqueness of username."""
entry = MockConfigEntry(
domain=DOMAIN,
data=CONF,
title="Flick Electric: test-username",
unique_id="flick_electric_test-username",
)
entry.add_to_hass(hass)
with patch(
"homeassistant.components.flick_electric.config_flow.SimpleFlickAuth.async_get_access_token",
return_value="123456789abcdef",
):
result = await _flow_submit(hass)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_form_invalid_auth(hass):
"""Test we handle invalid auth."""
with patch(
"homeassistant.components.flick_electric.config_flow.SimpleFlickAuth.async_get_access_token",
side_effect=AuthException,
):
result = await _flow_submit(hass)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "invalid_auth"}
async def test_form_cannot_connect(hass):
"""Test we handle cannot connect error."""
with patch(
"homeassistant.components.flick_electric.config_flow.SimpleFlickAuth.async_get_access_token",
side_effect=asyncio.TimeoutError,
):
result = await _flow_submit(hass)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "cannot_connect"}
async def test_form_generic_exception(hass):
"""Test we handle cannot connect error."""
with patch(
"homeassistant.components.flick_electric.config_flow.SimpleFlickAuth.async_get_access_token",
side_effect=Exception,
):
result = await _flow_submit(hass)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "unknown"}
|
import unittest.mock
import pytest
from PyQt5.QtCore import QObject
from PyQt5.QtGui import QStandardItemModel
from qutebrowser.completion import completer
from qutebrowser.commands import command
from qutebrowser.api import cmdutils
@pytest.fixture(autouse=True)
def setup_cur_tab(tabbed_browser_stubs, fake_web_tab):
# Make sure completions can access the current tab
tabbed_browser_stubs[0].widget.tabs = [fake_web_tab()]
tabbed_browser_stubs[0].widget.current_index = 0
class FakeCompletionModel(QStandardItemModel):
"""Stub for a completion model."""
def __init__(self, kind, *pos_args, info, parent=None):
super().__init__(parent)
self.kind = kind
self.pos_args = list(pos_args)
self.info = info
class CompletionWidgetStub(QObject):
"""Stub for the CompletionView."""
def __init__(self, parent=None):
super().__init__(parent)
self.hide = unittest.mock.Mock()
self.show = unittest.mock.Mock()
self.set_pattern = unittest.mock.Mock()
self.model = unittest.mock.Mock()
self.set_model = unittest.mock.Mock()
self.enabled = unittest.mock.Mock()
@pytest.fixture
def completion_widget_stub():
return CompletionWidgetStub()
@pytest.fixture
def completer_obj(qtbot, status_command_stub, config_stub, monkeypatch, stubs,
completion_widget_stub):
"""Create the completer used for testing."""
monkeypatch.setattr(completer, 'QTimer', stubs.InstaTimer)
config_stub.val.completion.show = 'auto'
return completer.Completer(cmd=status_command_stub, win_id=0,
parent=completion_widget_stub)
@pytest.fixture(autouse=True)
def miscmodels_patch(mocker):
"""Patch the miscmodels module to provide fake completion functions.
Technically some of these are not part of miscmodels, but rolling them into
one module is easier and sufficient for mocking. The only one referenced
directly by Completer is miscmodels.command.
"""
m = mocker.patch('qutebrowser.completion.completer.miscmodels',
autospec=True)
def func(name):
return lambda *args, info: FakeCompletionModel(name, *args, info=info)
m.command = func('command')
m.helptopic = func('helptopic')
m.quickmark = func('quickmark')
m.bookmark = func('bookmark')
m.session = func('session')
m.buffer = func('buffer')
m.bind = func('bind')
m.url = func('url')
m.section = func('section')
m.option = func('option')
m.value = func('value')
return m
@pytest.fixture(autouse=True)
def cmdutils_patch(monkeypatch, stubs, miscmodels_patch):
"""Patch the cmdutils module to provide fake commands."""
@cmdutils.argument('section_', completion=miscmodels_patch.section)
@cmdutils.argument('option', completion=miscmodels_patch.option)
@cmdutils.argument('value', completion=miscmodels_patch.value)
def set_command(section_=None, option=None, value=None):
"""docstring."""
@cmdutils.argument('topic', completion=miscmodels_patch.helptopic)
def show_help(tab=False, bg=False, window=False, topic=None):
"""docstring."""
@cmdutils.argument('url', completion=miscmodels_patch.url)
@cmdutils.argument('count', value=cmdutils.Value.count)
def openurl(url=None, related=False, bg=False, tab=False, window=False,
count=None):
"""docstring."""
@cmdutils.argument('win_id', value=cmdutils.Value.win_id)
@cmdutils.argument('command', completion=miscmodels_patch.command)
def bind(key, win_id, command=None, *, mode='normal'):
"""docstring."""
def tab_give():
"""docstring."""
@cmdutils.argument('option', completion=miscmodels_patch.option)
@cmdutils.argument('values', completion=miscmodels_patch.value)
def config_cycle(option, *values):
"""For testing varargs."""
commands = {
'set': command.Command(name='set', handler=set_command),
'help': command.Command(name='help', handler=show_help),
'open': command.Command(name='open', handler=openurl, maxsplit=0),
'bind': command.Command(name='bind', handler=bind),
'tab-give': command.Command(name='tab-give', handler=tab_give),
'config-cycle': command.Command(name='config-cycle',
handler=config_cycle),
}
monkeypatch.setattr(completer.objects, 'commands', commands)
def _set_cmd_prompt(cmd, txt):
"""Set the command prompt's text and cursor position.
Args:
cmd: The command prompt object.
txt: The prompt text, using | as a placeholder for the cursor position.
"""
cmd.setText(txt.replace('|', ''))
cmd.setCursorPosition(txt.index('|'))
@pytest.mark.parametrize('txt, kind, pattern, pos_args', [
(':nope|', 'command', 'nope', []),
(':nope |', None, '', []),
(':set |', 'section', '', []),
(':set gen|', 'section', 'gen', []),
(':set general |', 'option', '', ['general']),
(':set what |', 'option', '', ['what']),
(':set general editor |', 'value', '', ['general', 'editor']),
(':set general editor gv|', 'value', 'gv', ['general', 'editor']),
(':set general editor "gvim -f"|', 'value', 'gvim -f',
['general', 'editor']),
(':set general editor "gvim |', 'value', 'gvim', ['general', 'editor']),
(':set general huh |', 'value', '', ['general', 'huh']),
(':help |', 'helptopic', '', []),
(':help |', 'helptopic', '', []),
(':open |', 'url', '', []),
(':bind |', None, '', []),
(':bind <c-x> |', 'command', '', ['<c-x>']),
(':bind <c-x> foo|', 'command', 'foo', ['<c-x>']),
(':bind <c-x>| foo', None, '<c-x>', []),
(':set| general ', 'command', 'set', []),
(':|set general ', 'command', 'set', []),
(':set gene|ral ignore-case', 'section', 'general', []),
(':|', 'command', '', []),
(': |', 'command', '', []),
('/|', None, '', []),
(':open -t|', None, '', []),
(':open --tab|', None, '', []),
(':open -t |', 'url', '', []),
(':open --tab |', 'url', '', []),
(':open | -t', 'url', '', []),
(':tab-give |', None, '', []),
(':bind --mode=caret <c-x> |', 'command', '', ['<c-x>']),
pytest.param(':bind --mode caret <c-x> |', 'command', '', [],
marks=pytest.mark.xfail(reason='issue #74')),
(':set -t -p |', 'section', '', []),
(':open -- |', None, '', []),
(':gibberish nonesense |', None, '', []),
('/:help|', None, '', []),
('::bind|', 'command', ':bind', []),
(':-w open |', None, '', []),
# varargs
(':config-cycle option |', 'value', '', ['option']),
(':config-cycle option one |', 'value', '', ['option', 'one']),
(':config-cycle option one two |', 'value', '', ['option', 'one', 'two']),
])
def test_update_completion(txt, kind, pattern, pos_args, status_command_stub,
completer_obj, completion_widget_stub, config_stub,
key_config_stub):
"""Test setting the completion widget's model based on command text."""
# this test uses | as a placeholder for the current cursor position
_set_cmd_prompt(status_command_stub, txt)
completer_obj.schedule_completion_update()
if kind is None:
assert not completion_widget_stub.set_pattern.called
else:
assert completion_widget_stub.set_model.call_count == 1
model = completion_widget_stub.set_model.call_args[0][0]
assert model.kind == kind
assert model.pos_args == pos_args
assert model.info.config == config_stub
assert model.info.keyconf == key_config_stub
completion_widget_stub.set_pattern.assert_called_once_with(pattern)
@pytest.mark.parametrize('txt1, txt2, regen', [
(':config-cycle |', ':config-cycle a|', False),
(':config-cycle abc|', ':config-cycle abc |', True),
(':config-cycle abc |', ':config-cycle abc d|', False),
(':config-cycle abc def|', ':config-cycle abc def |', True),
# open has maxsplit=0, so all args just set the pattern, not the model
(':open |', ':open a|', False),
(':open abc|', ':open abc |', False),
(':open abc |', ':open abc d|', False),
(':open abc def|', ':open abc def |', False),
])
def test_regen_completion(txt1, txt2, regen, status_command_stub,
completer_obj, completion_widget_stub, config_stub,
key_config_stub):
"""Test that the completion function is only called as needed."""
# set the initial state
_set_cmd_prompt(status_command_stub, txt1)
completer_obj.schedule_completion_update()
completion_widget_stub.set_model.reset_mock()
# "move" the cursor and check if the completion function was called
_set_cmd_prompt(status_command_stub, txt2)
completer_obj.schedule_completion_update()
assert completion_widget_stub.set_model.called == regen
@pytest.mark.parametrize('before, newtxt, after', [
(':|', 'set', ':set|'),
(':| ', 'set', ':set|'),
(': |', 'set', ':set|'),
(':|set', 'set', ':set|'),
(':|set ', 'set', ':set|'),
(':|se', 'set', ':set|'),
(':|se ', 'set', ':set|'),
(':s|e', 'set', ':set|'),
(':se|', 'set', ':set|'),
(':|se fonts', 'set', ':set| fonts'),
(':set |', 'fonts', ':set fonts|'),
(':set |', 'fonts', ':set fonts|'),
(':set --temp |', 'fonts', ':set --temp fonts|'),
(':set |fo', 'fonts', ':set fonts|'),
(':set f|o', 'fonts', ':set fonts|'),
(':set fo|', 'fonts', ':set fonts|'),
(':set fonts |', 'hints', ':set fonts hints|'),
(':set fonts |nt', 'hints', ':set fonts hints|'),
(':set fonts n|t', 'hints', ':set fonts hints|'),
(':set fonts nt|', 'hints', ':set fonts hints|'),
(':set | hints', 'fonts', ':set fonts| hints'),
(':set | hints', 'fonts', ':set fonts| hints'),
(':set |fo hints', 'fonts', ':set fonts| hints'),
(':set f|o hints', 'fonts', ':set fonts| hints'),
(':set fo| hints', 'fonts', ':set fonts| hints'),
(':set fonts hints |', 'Comic Sans', ":set fonts hints 'Comic Sans'|"),
(":set fonts hints 'Comic Sans'|", '12px Hack',
":set fonts hints '12px Hack'|"),
(":set fonts hints 'Comic| Sans'", '12px Hack',
":set fonts hints '12px Hack'|"),
# Make sure " is quoted properly
(':set url.start_pages \'["https://www.|example.com"]\'',
'["https://www.example.org"]',
':set url.start_pages \'["https://www.example.org"]\'|'),
# open has maxsplit=0, so treat the last two tokens as one and don't quote
(':open foo bar|', 'baz', ':open baz|'),
(':open foo| bar', 'baz', ':open baz|'),
])
def test_on_selection_changed(before, newtxt, after, completer_obj,
config_stub, status_command_stub,
completion_widget_stub):
"""Test that on_selection_changed modifies the cmd text properly.
The | represents the current cursor position in the cmd prompt.
If quick is True and there is only 1 completion (count == 1),
then we expect a space to be appended after the current word.
"""
model = unittest.mock.Mock()
completion_widget_stub.model.return_value = model
def check(quick, count, expected_txt, expected_pos):
config_stub.val.completion.quick = quick
model.count = lambda: count
_set_cmd_prompt(status_command_stub, before)
completer_obj.on_selection_changed(newtxt)
assert status_command_stub.text() == expected_txt
assert status_command_stub.cursorPosition() == expected_pos
after_pos = after.index('|')
after_txt = after.replace('|', '')
check(False, 1, after_txt, after_pos)
check(True, 2, after_txt, after_pos)
# quick-completing a single item should move the cursor ahead by 1 and add
# a trailing space if at the end of the cmd string, unless the command has
# maxsplit < len(before) (such as :open in these tests)
if after_txt.startswith(':open'):
return
after_pos += 1
if after_pos > len(after_txt):
after_txt += ' '
check(True, 1, after_txt, after_pos)
def test_quickcomplete_flicker(status_command_stub, completer_obj,
completion_widget_stub, config_stub):
"""Validate fix for #1519: bookmark-load background highlighting quirk.
For commands like bookmark-load and open with maxsplit=0, a commandline
that looks like ':open someurl |' is considered to be completing the first
arg with pattern 'someurl ' (note trailing whitespace). As this matches the
one completion available, it keeps the completionmenu open.
This test validates that the completion model is not re-set after we
quick-complete an entry after maxsplit.
"""
model = unittest.mock.Mock()
model.count = unittest.mock.Mock(return_value=1)
completion_widget_stub.model.return_value = model
config_stub.val.completion.quick = True
_set_cmd_prompt(status_command_stub, ':open |')
completer_obj.schedule_completion_update()
assert completion_widget_stub.set_model.called
completion_widget_stub.set_model.reset_mock()
# selecting a completion should not re-set the model
completer_obj.on_selection_changed('http://example.com')
completer_obj.schedule_completion_update()
assert not completion_widget_stub.set_model.called
def test_min_chars(status_command_stub, completer_obj, completion_widget_stub,
config_stub, key_config_stub):
"""Test that an update is delayed until min_chars characters are input."""
config_stub.val.completion.min_chars = 3
# Test #3635, where min_chars could crash the first update
_set_cmd_prompt(status_command_stub, ':set c|')
completer_obj.schedule_completion_update()
assert not completion_widget_stub.set_model.called
_set_cmd_prompt(status_command_stub, ':set co|')
completer_obj.schedule_completion_update()
assert not completion_widget_stub.set_model.called
_set_cmd_prompt(status_command_stub, ':set com|')
completer_obj.schedule_completion_update()
assert completion_widget_stub.set_model.call_count == 1
|
from typing import TYPE_CHECKING, Generic, Hashable, Mapping, Optional, TypeVar
import numpy as np
from .options import _get_keep_attrs
from .pdcompat import count_not_none
from .pycompat import is_duck_dask_array
if TYPE_CHECKING:
from .dataarray import DataArray # noqa: F401
from .dataset import Dataset # noqa: F401
T_DSorDA = TypeVar("T_DSorDA", "DataArray", "Dataset")
def _get_alpha(com=None, span=None, halflife=None, alpha=None):
# pandas defines in terms of com (converting to alpha in the algo)
# so use its function to get a com and then convert to alpha
com = _get_center_of_mass(com, span, halflife, alpha)
return 1 / (1 + com)
def move_exp_nanmean(array, *, axis, alpha):
if is_duck_dask_array(array):
raise TypeError("rolling_exp is not currently support for dask-like arrays")
import numbagg
if axis == ():
return array.astype(np.float64)
else:
return numbagg.move_exp_nanmean(array, axis=axis, alpha=alpha)
def _get_center_of_mass(comass, span, halflife, alpha):
"""
Vendored from pandas.core.window.common._get_center_of_mass
See licenses/PANDAS_LICENSE for the function's license
"""
valid_count = count_not_none(comass, span, halflife, alpha)
if valid_count > 1:
raise ValueError("comass, span, halflife, and alpha are mutually exclusive")
# Convert to center of mass; domain checks ensure 0 < alpha <= 1
if comass is not None:
if comass < 0:
raise ValueError("comass must satisfy: comass >= 0")
elif span is not None:
if span < 1:
raise ValueError("span must satisfy: span >= 1")
comass = (span - 1) / 2.0
elif halflife is not None:
if halflife <= 0:
raise ValueError("halflife must satisfy: halflife > 0")
decay = 1 - np.exp(np.log(0.5) / halflife)
comass = 1 / decay - 1
elif alpha is not None:
if alpha <= 0 or alpha > 1:
raise ValueError("alpha must satisfy: 0 < alpha <= 1")
comass = (1.0 - alpha) / alpha
else:
raise ValueError("Must pass one of comass, span, halflife, or alpha")
return float(comass)
class RollingExp(Generic[T_DSorDA]):
"""
Exponentially-weighted moving window object.
Similar to EWM in pandas
Parameters
----------
obj : Dataset or DataArray
Object to window.
windows : mapping of hashable to int
A mapping from the name of the dimension to create the rolling
exponential window along (e.g. `time`) to the size of the moving window.
window_type : {"span", "com", "halflife", "alpha"}, default: "span"
The format of the previously supplied window. Each is a simple
numerical transformation of the others. Described in detail:
https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.ewm.html
Returns
-------
RollingExp : type of input argument
"""
def __init__(
self,
obj: T_DSorDA,
windows: Mapping[Hashable, int],
window_type: str = "span",
):
self.obj: T_DSorDA = obj
dim, window = next(iter(windows.items()))
self.dim = dim
self.alpha = _get_alpha(**{window_type: window})
def mean(self, keep_attrs: Optional[bool] = None) -> T_DSorDA:
"""
Exponentially weighted moving average
Parameters
----------
keep_attrs : bool, default: None
If True, the attributes (``attrs``) will be copied from the original
object to the new one. If False, the new object will be returned
without attributes. If None uses the global default.
Examples
--------
>>> da = xr.DataArray([1, 1, 2, 2, 2], dims="x")
>>> da.rolling_exp(x=2, window_type="span").mean()
<xarray.DataArray (x: 5)>
array([1. , 1. , 1.69230769, 1.9 , 1.96694215])
Dimensions without coordinates: x
"""
if keep_attrs is None:
keep_attrs = _get_keep_attrs(default=True)
return self.obj.reduce(
move_exp_nanmean, dim=self.dim, alpha=self.alpha, keep_attrs=keep_attrs
)
|
import builtins
import json
import unittest
from absl import flags as flgs
import contextlib2
import mock
from perfkitbenchmarker import os_types
from perfkitbenchmarker import providers
from perfkitbenchmarker import virtual_machine
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.providers.azure import util as azure_util
from perfkitbenchmarker.providers.kubernetes import kubernetes_pod_spec
from perfkitbenchmarker.providers.kubernetes import kubernetes_virtual_machine
from tests import pkb_common_test_case
FLAGS = flgs.FLAGS
FLAGS.kubernetes_anti_affinity = False
_COMPONENT = 'test_component'
_RUN_URI = 'fake_run_uri'
_NAME = 'fake_name'
_KUBECTL = 'fake_kubectl_path'
_KUBECONFIG = 'fake_kubeconfig_path'
_EXPECTED_CALL_BODY_WITHOUT_GPUS = """
{
"spec": {
"dnsPolicy":
"ClusterFirst",
"volumes": [],
"containers": [{
"name": "fake_name",
"workingDir": "/root",
"volumeMounts": [],
"image": "test_image",
"securityContext": {
"privileged": true
}
}]
},
"kind": "Pod",
"metadata": {
"name": "fake_name",
"labels": {
"pkb": "fake_name"
}
},
"apiVersion": "v1"
}
"""
_EXPECTED_CALL_BODY_WITH_2_GPUS = """
{
"spec": {
"dnsPolicy":
"ClusterFirst",
"volumes": [],
"containers": [{
"name": "fake_name",
"volumeMounts": [],
"workingDir": "/root",
"image": "test_image",
"securityContext": {
"privileged": true
},
"resources" : {
"limits": {
"nvidia.com/gpu": "2"
},
"requests": {
"nvidia.com/gpu": "2"
}
}
}]
},
"kind": "Pod",
"metadata": {
"name": "fake_name",
"labels": {
"pkb": "fake_name"
}
},
"apiVersion": "v1"
}
"""
_EXPECTED_CALL_BODY_WITH_NVIDIA_CUDA_IMAGE = """
{
"spec": {
"dnsPolicy":
"ClusterFirst",
"volumes": [],
"containers": [{
"name": "fake_name",
"volumeMounts": [],
"workingDir": "/root",
"image": "nvidia/cuda:9.0-devel-ubuntu16.04",
"securityContext": {
"privileged": true
},
"command": [
"bash",
"-c",
"apt-get update && apt-get install -y sudo && sed -i '/env_reset/d' /etc/sudoers && sed -i '/secure_path/d' /etc/sudoers && sudo ldconfig && tail -f /dev/null"
]
}]
},
"kind": "Pod",
"metadata": {
"name": "fake_name",
"labels": {
"pkb": "fake_name"
}
},
"apiVersion": "v1"
}
"""
def get_write_mock_from_temp_file_mock(temp_file_mock):
"""Returns the write method mock from the NamedTemporaryFile mock.
This can be used to make assertions about the calls make to write(),
which exists on the instance returned from the NamedTemporaryFile mock.
The reason for the __enter__() in this context is due to the fact
that NamedTemporaryFile is used in a context manager inside
kubernetes_helper.py.
Args:
temp_file_mock: mock object of the NamedTemporaryFile() contextManager
"""
return temp_file_mock().__enter__().write
@contextlib2.contextmanager
def patch_critical_objects(stdout='', stderr='', return_code=0, flags=FLAGS):
with contextlib2.ExitStack() as stack:
retval = (stdout, stderr, return_code)
flags.gcloud_path = 'gcloud'
flags.run_uri = _RUN_URI
flags.kubectl = _KUBECTL
flags.kubeconfig = _KUBECONFIG
stack.enter_context(mock.patch(builtins.__name__ + '.open'))
stack.enter_context(mock.patch(vm_util.__name__ + '.PrependTempDir'))
# Save and return the temp_file mock here so that we can access the write()
# call on the instance that the mock returned. This allows us to verify
# that the body of the file is what we expect it to be (useful for
# verifying that the pod.yml body was written correctly).
temp_file = stack.enter_context(
mock.patch(vm_util.__name__ + '.NamedTemporaryFile'))
issue_command = stack.enter_context(
mock.patch(vm_util.__name__ + '.IssueCommand', return_value=retval))
yield issue_command, temp_file
class TestKubernetesVirtualMachine(
pkb_common_test_case.TestOsMixin,
kubernetes_virtual_machine.KubernetesVirtualMachine):
pass
class BaseKubernetesVirtualMachineTestCase(
pkb_common_test_case.PkbCommonTestCase):
def assertJsonEqual(self, str1, str2):
json1 = json.loads(str1)
json2 = json.loads(str2)
self.assertEqual(
json.dumps(json1, sort_keys=True),
json.dumps(json2, sort_keys=True)
)
class KubernetesResourcesTestCase(
BaseKubernetesVirtualMachineTestCase):
@staticmethod
def create_virtual_machine_spec():
spec = kubernetes_pod_spec.KubernetesPodSpec(
_COMPONENT,
resource_limits={
'cpus': 2,
'memory': '5GiB'
},
resource_requests={
'cpus': 1.5,
'memory': '4GiB'
},
gpu_count=2,
gpu_type='k80',
)
return spec
def testPodResourceLimits(self):
spec = self.create_virtual_machine_spec()
self.assertEqual(spec.resource_limits.cpus, 2)
self.assertEqual(spec.resource_limits.memory, 5120)
def testCreatePodResourceBody(self):
spec = self.create_virtual_machine_spec()
with patch_critical_objects():
kub_vm = TestKubernetesVirtualMachine(spec)
expected = {
'limits': {
'cpu': '2',
'memory': '5120Mi',
'nvidia.com/gpu': '2'
},
'requests': {
'cpu': '1.5',
'memory': '4096Mi',
'nvidia.com/gpu': '2'
}
}
actual = kub_vm._BuildResourceBody()
self.assertDictEqual(expected, actual)
def testGetMetadata(self):
spec = self.create_virtual_machine_spec()
with patch_critical_objects():
kub_vm = TestKubernetesVirtualMachine(spec)
subset_of_expected_metadata = {
'pod_cpu_limit': 2,
'pod_memory_limit_mb': 5120,
'pod_cpu_request': 1.5,
'pod_memory_request_mb': 4096,
}
actual = kub_vm.GetResourceMetadata()
self.assertDictContainsSubset(subset_of_expected_metadata, actual)
class KubernetesVirtualMachineOsTypesTestCase(
BaseKubernetesVirtualMachineTestCase):
@staticmethod
def create_kubernetes_vm(os_type):
spec = kubernetes_pod_spec.KubernetesPodSpec(
_COMPONENT)
vm_class = virtual_machine.GetVmClass(providers.KUBERNETES,
os_type)
kub_vm = vm_class(spec)
kub_vm._WaitForPodBootCompletion = lambda: None
kub_vm._Create()
def testUbuntuImagesInstallSudo(self):
with patch_critical_objects() as (_, temp_file):
self.create_kubernetes_vm(os_types.UBUNTU1604)
write_mock = get_write_mock_from_temp_file_mock(temp_file)
create_json = json.loads(write_mock.call_args[0][0])
command = create_json['spec']['containers'][0]['command']
self.assertEqual(command,
[u'bash', u'-c',
(u'apt-get update && apt-get install -y sudo && '
'sed -i \'/env_reset/d\' /etc/sudoers && '
'sed -i \'/secure_path/d\' /etc/sudoers && '
'sudo ldconfig && tail -f /dev/null')])
def testCreateUbuntu1604(self):
with patch_critical_objects() as (_, temp_file):
self.create_kubernetes_vm(os_types.UBUNTU1604)
write_mock = get_write_mock_from_temp_file_mock(temp_file)
create_json = json.loads(write_mock.call_args[0][0])
self.assertEqual(create_json['spec']['containers'][0]['image'],
'ubuntu:16.04')
def testCreateUbuntu1710(self):
with patch_critical_objects() as (_, temp_file):
self.create_kubernetes_vm(os_types.UBUNTU1710)
write_mock = get_write_mock_from_temp_file_mock(temp_file)
create_json = json.loads(write_mock.call_args[0][0])
self.assertEqual(create_json['spec']['containers'][0]['image'],
'ubuntu:17.10')
class KubernetesVirtualMachineTestCase(
BaseKubernetesVirtualMachineTestCase):
@staticmethod
def create_virtual_machine_spec():
spec = kubernetes_pod_spec.KubernetesPodSpec(
_COMPONENT,
image='test_image',
install_packages=False,
machine_type='test_machine_type',
zone='test_zone')
return spec
def testCreate(self):
spec = self.create_virtual_machine_spec()
with patch_critical_objects() as (issue_command, _):
kub_vm = TestKubernetesVirtualMachine(spec)
kub_vm._WaitForPodBootCompletion = lambda: None # pylint: disable=invalid-name
kub_vm._Create()
command = issue_command.call_args[0][0]
command_string = ' '.join(command[:4])
self.assertEqual(issue_command.call_count, 1)
self.assertIn('{0} --kubeconfig={1} create -f'.format(
_KUBECTL, _KUBECONFIG), command_string)
def testCreatePodBodyWrittenCorrectly(self):
spec = self.create_virtual_machine_spec()
with patch_critical_objects() as (_, temp_file):
kub_vm = TestKubernetesVirtualMachine(spec)
# Need to set the name explicitly on the instance because the test
# running is currently using a single PKB instance, so the BaseVm
# instance counter is at an unpredictable number at this stage, and it is
# used to set the name.
kub_vm.name = _NAME
kub_vm._WaitForPodBootCompletion = lambda: None
kub_vm._Create()
write_mock = get_write_mock_from_temp_file_mock(temp_file)
self.assertJsonEqual(
write_mock.call_args[0][0],
_EXPECTED_CALL_BODY_WITHOUT_GPUS
)
def testDownloadPreprovisionedDataAws(self):
spec = self.create_virtual_machine_spec()
FLAGS.container_cluster_cloud = 'AWS'
with patch_critical_objects(flags=FLAGS) as (issue_command, _):
kub_vm = (
kubernetes_virtual_machine.Ubuntu1604BasedKubernetesVirtualMachine(
spec))
kub_vm.DownloadPreprovisionedData('path', 'name', 'filename')
command = issue_command.call_args[0][0]
command_string = ' '.join(command)
self.assertIn('s3', command_string)
def testDownloadPreprovisionedDataAzure(self):
azure_util.GetAzureStorageConnectionString = mock.Mock(return_value='')
spec = self.create_virtual_machine_spec()
FLAGS.container_cluster_cloud = 'Azure'
with patch_critical_objects() as (issue_command, _):
kub_vm = (
kubernetes_virtual_machine.Ubuntu1604BasedKubernetesVirtualMachine(
spec))
kub_vm.DownloadPreprovisionedData('path', 'name', 'filename')
command = issue_command.call_args[0][0]
command_string = ' '.join(command)
self.assertIn('az storage blob download', command_string)
self.assertIn('--connection-string', command_string)
def testDownloadPreprovisionedDataGcp(self):
spec = self.create_virtual_machine_spec()
FLAGS.container_cluster_cloud = 'GCP'
with patch_critical_objects() as (issue_command, _):
kub_vm = (
kubernetes_virtual_machine.Ubuntu1604BasedKubernetesVirtualMachine(
spec))
kub_vm.DownloadPreprovisionedData('path', 'name', 'filename')
command = issue_command.call_args[0][0]
command_string = ' '.join(command)
self.assertIn('gsutil', command_string)
class KubernetesVirtualMachineWithGpusTestCase(
BaseKubernetesVirtualMachineTestCase):
@staticmethod
def create_virtual_machine_spec():
spec = kubernetes_pod_spec.KubernetesPodSpec(
_COMPONENT,
image='test_image',
gpu_count=2,
gpu_type='k80',
install_packages=False,
machine_type='test_machine_type',
zone='test_zone')
return spec
def testCreate(self):
spec = self.create_virtual_machine_spec()
with patch_critical_objects() as (issue_command, _):
kub_vm = TestKubernetesVirtualMachine(spec)
kub_vm._WaitForPodBootCompletion = lambda: None
kub_vm._Create()
command = issue_command.call_args[0][0]
command_string = ' '.join(command[:4])
self.assertEqual(issue_command.call_count, 1)
self.assertIn('{0} --kubeconfig={1} create -f'.format(
_KUBECTL, _KUBECONFIG), command_string)
def testCreatePodBodyWrittenCorrectly(self):
spec = self.create_virtual_machine_spec()
with patch_critical_objects() as (_, temp_file):
kub_vm = TestKubernetesVirtualMachine(spec)
# Need to set the name explicitly on the instance because the test
# running is currently using a single PKB instance, so the BaseVm
# instance counter is at an unpredictable number at this stage, and it is
# used to set the name.
kub_vm.name = _NAME
kub_vm._WaitForPodBootCompletion = lambda: None
kub_vm._Create()
write_mock = get_write_mock_from_temp_file_mock(temp_file)
self.assertJsonEqual(
write_mock.call_args[0][0],
_EXPECTED_CALL_BODY_WITH_2_GPUS
)
class KubernetesVirtualMachineWithNvidiaCudaImage(
BaseKubernetesVirtualMachineTestCase):
@staticmethod
def create_virtual_machine_spec():
spec = kubernetes_pod_spec.KubernetesPodSpec(
_COMPONENT,
install_packages=False,
machine_type='test_machine_type',
zone='test_zone')
return spec
def testCreatePodBodyWrittenCorrectly(self):
spec = self.create_virtual_machine_spec()
vm_class = virtual_machine.GetVmClass(providers.KUBERNETES,
os_types.UBUNTU1604_CUDA9)
with patch_critical_objects() as (_, temp_file):
kub_vm = vm_class(spec)
# Need to set the name explicitly on the instance because the test
# running is currently using a single PKB instance, so the BaseVm
# instance counter is at an unpredictable number at this stage, and it is
# used to set the name.
kub_vm.name = _NAME
kub_vm._WaitForPodBootCompletion = lambda: None
kub_vm._Create()
write_mock = get_write_mock_from_temp_file_mock(temp_file)
self.assertJsonEqual(
write_mock.call_args[0][0],
_EXPECTED_CALL_BODY_WITH_NVIDIA_CUDA_IMAGE
)
if __name__ == '__main__':
unittest.main()
|
import json
from appconf import AppConf
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
from django.db.models import Q
from django.db.models.signals import post_delete, post_save
from django.dispatch import receiver
from django.utils.functional import cached_property
from weblate.utils.classloader import ClassLoader
from weblate.utils.decorators import disable_for_loaddata
class ChecksLoader(ClassLoader):
@cached_property
def source(self):
return {k: v for k, v in self.items() if v.source}
@cached_property
def target(self):
return {k: v for k, v in self.items() if v.target}
# Initialize checks list
CHECKS = ChecksLoader("CHECK_LIST")
class WeblateChecksConf(AppConf):
# List of quality checks
CHECK_LIST = (
"weblate.checks.same.SameCheck",
"weblate.checks.chars.BeginNewlineCheck",
"weblate.checks.chars.EndNewlineCheck",
"weblate.checks.chars.BeginSpaceCheck",
"weblate.checks.chars.EndSpaceCheck",
"weblate.checks.chars.DoubleSpaceCheck",
"weblate.checks.chars.EndStopCheck",
"weblate.checks.chars.EndColonCheck",
"weblate.checks.chars.EndQuestionCheck",
"weblate.checks.chars.EndExclamationCheck",
"weblate.checks.chars.EndEllipsisCheck",
"weblate.checks.chars.EndSemicolonCheck",
"weblate.checks.chars.MaxLengthCheck",
"weblate.checks.chars.KashidaCheck",
"weblate.checks.chars.PunctuationSpacingCheck",
"weblate.checks.format.PythonFormatCheck",
"weblate.checks.format.PythonBraceFormatCheck",
"weblate.checks.format.PHPFormatCheck",
"weblate.checks.format.CFormatCheck",
"weblate.checks.format.PerlFormatCheck",
"weblate.checks.format.JavaScriptFormatCheck",
"weblate.checks.format.CSharpFormatCheck",
"weblate.checks.format.JavaFormatCheck",
"weblate.checks.format.JavaMessageFormatCheck",
"weblate.checks.format.PercentPlaceholdersCheck",
"weblate.checks.format.VueFormattingCheck",
"weblate.checks.format.I18NextInterpolationCheck",
"weblate.checks.format.ESTemplateLiteralsCheck",
"weblate.checks.angularjs.AngularJSInterpolationCheck",
"weblate.checks.qt.QtFormatCheck",
"weblate.checks.qt.QtPluralCheck",
"weblate.checks.ruby.RubyFormatCheck",
"weblate.checks.consistency.PluralsCheck",
"weblate.checks.consistency.SamePluralsCheck",
"weblate.checks.consistency.ConsistencyCheck",
"weblate.checks.consistency.TranslatedCheck",
"weblate.checks.chars.EscapedNewlineCountingCheck",
"weblate.checks.chars.NewLineCountCheck",
"weblate.checks.markup.BBCodeCheck",
"weblate.checks.chars.ZeroWidthSpaceCheck",
"weblate.checks.render.MaxSizeCheck",
"weblate.checks.markup.XMLValidityCheck",
"weblate.checks.markup.XMLTagsCheck",
"weblate.checks.markup.MarkdownRefLinkCheck",
"weblate.checks.markup.MarkdownLinkCheck",
"weblate.checks.markup.MarkdownSyntaxCheck",
"weblate.checks.markup.URLCheck",
"weblate.checks.markup.SafeHTMLCheck",
"weblate.checks.placeholders.PlaceholderCheck",
"weblate.checks.placeholders.RegexCheck",
"weblate.checks.duplicate.DuplicateCheck",
"weblate.checks.source.OptionalPluralCheck",
"weblate.checks.source.EllipsisCheck",
"weblate.checks.source.MultipleFailingCheck",
"weblate.checks.source.LongUntranslatedCheck",
"weblate.checks.format.MultipleUnnamedFormatsCheck",
)
class Meta:
prefix = ""
class CheckQuerySet(models.QuerySet):
def filter_access(self, user):
if user.is_superuser:
return self
return self.filter(
Q(unit__translation__component__project_id__in=user.allowed_project_ids)
& (
Q(unit__translation__component__restricted=False)
| Q(unit__translation__component_id__in=user.component_permissions)
)
)
class Check(models.Model):
unit = models.ForeignKey("trans.Unit", on_delete=models.deletion.CASCADE)
check = models.CharField(max_length=50, choices=CHECKS.get_choices())
dismissed = models.BooleanField(db_index=True, default=False)
weblate_unsafe_delete = True
objects = CheckQuerySet.as_manager()
class Meta:
unique_together = ("unit", "check")
def __str__(self):
return str(self.get_name())
@cached_property
def check_obj(self):
try:
return CHECKS[self.check]
except KeyError:
return None
def is_enforced(self):
return self.check in self.unit.translation.component.enforced_checks
def get_description(self):
if self.check_obj:
return self.check_obj.get_description(self)
return self.check
def get_fixup(self):
if self.check_obj:
return self.check_obj.get_fixup(self.unit)
return None
def get_fixup_json(self):
fixup = self.get_fixup()
if not fixup:
return None
return json.dumps(fixup)
def get_name(self):
if self.check_obj:
return self.check_obj.name
return self.check
def get_doc_url(self, user=None):
if self.check_obj:
return self.check_obj.get_doc_url(user=user)
return ""
def set_dismiss(self, state=True):
"""Set ignore flag."""
self.dismissed = state
self.save()
@receiver(post_save, sender=Check)
@disable_for_loaddata
def check_post_save(sender, instance, created, **kwargs):
"""Handle check creation or updates."""
if not created:
instance.unit.translation.invalidate_cache()
@receiver(post_delete, sender=Check)
@disable_for_loaddata
def remove_complimentary_checks(sender, instance, **kwargs):
"""Remove propagate checks from all units."""
unit = instance.unit
unit.translation.invalidate_cache()
check_obj = instance.check_obj
if not check_obj:
return
# Handle propagating checks - remove on other units
if check_obj.propagates:
Check.objects.filter(
unit__in=unit.same_source_units, check=instance.check
).delete()
for other in unit.same_source_units:
other.translation.invalidate_cache()
# Update source checks if needed
if check_obj.target:
source_unit = unit.source_unit
if unit.is_batch_update:
unit.translation.component.updated_sources[source_unit.id] = source_unit
else:
try:
source_unit.run_checks()
except ObjectDoesNotExist:
pass
def get_display_checks(unit):
for check, check_obj in CHECKS.target.items():
if check_obj.should_display(unit):
yield Check(unit=unit, dismissed=False, check=check)
|
revision = "ee827d1e1974"
down_revision = "7ead443ba911"
from alembic import op
from sqlalchemy.exc import ProgrammingError
def upgrade():
connection = op.get_bind()
connection.execute("CREATE EXTENSION IF NOT EXISTS pg_trgm")
op.create_index(
"ix_certificates_cn",
"certificates",
["cn"],
unique=False,
postgresql_ops={"cn": "gin_trgm_ops"},
postgresql_using="gin",
)
op.create_index(
"ix_certificates_name",
"certificates",
["name"],
unique=False,
postgresql_ops={"name": "gin_trgm_ops"},
postgresql_using="gin",
)
op.create_index(
"ix_domains_name_gin",
"domains",
["name"],
unique=False,
postgresql_ops={"name": "gin_trgm_ops"},
postgresql_using="gin",
)
def downgrade():
op.drop_index("ix_domains_name_gin", table_name="domains")
op.drop_index("ix_certificates_name", table_name="certificates")
op.drop_index("ix_certificates_cn", table_name="certificates")
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import multiprocessing
import multiprocessing.managers
import os
import signal
import threading
import unittest
from perfkitbenchmarker import background_tasks
from perfkitbenchmarker import errors
from tests import pkb_common_test_case
from six.moves import range
def _ReturnArgs(a, b=None):
return b, a
def _RaiseValueError():
raise ValueError('ValueError')
def _IncrementCounter(lock, counter):
with lock:
counter.value += 1
def _AppendLength(int_list):
int_list.append(len(int_list))
def _WaitAndAppendInt(int_list, int_to_append, event=None, timeout=None):
if event:
event.wait(timeout)
int_list.append(int_to_append)
class GetCallStringTestCase(pkb_common_test_case.PkbCommonTestCase):
def testNoArgs(self):
result = background_tasks._GetCallString((_ReturnArgs, (), {}))
self.assertEqual(result, '_ReturnArgs()')
def testArgs(self):
result = background_tasks._GetCallString((_ReturnArgs, ('blue', 5), {}))
self.assertEqual(result, '_ReturnArgs(blue, 5)')
def testKwargs(self):
result = background_tasks._GetCallString((_ReturnArgs, (), {'x': 8}))
self.assertEqual(result, '_ReturnArgs(x=8)')
def testArgsAndKwargs(self):
result = background_tasks._GetCallString((_ReturnArgs, ('blue', 5),
{'x': 8}))
self.assertEqual(result, '_ReturnArgs(blue, 5, x=8)')
def testSinglePartial(self):
_ReturnArgs2 = functools.partial(_ReturnArgs, 1, x=2)
result = background_tasks._GetCallString((_ReturnArgs2, (), {}))
self.assertEqual(result, '_ReturnArgs(1, x=2)')
result = background_tasks._GetCallString((_ReturnArgs2, ('blue', 5),
{'x': 8}))
self.assertEqual(result, '_ReturnArgs(1, blue, 5, x=8)')
def testDoublePartial(self):
_ReturnArgs2 = functools.partial(_ReturnArgs, 1, x=2)
_ReturnArgs3 = functools.partial(_ReturnArgs2, 3, x=4)
result = background_tasks._GetCallString((_ReturnArgs3, (), {}))
self.assertEqual(result, '_ReturnArgs(1, 3, x=4)')
result = background_tasks._GetCallString((_ReturnArgs3, ('blue', 5),
{'x': 8}))
self.assertEqual(result, '_ReturnArgs(1, 3, blue, 5, x=8)')
class RunParallelThreadsTestCase(pkb_common_test_case.PkbCommonTestCase):
def testFewerThreadsThanConcurrencyLimit(self):
calls = [(_ReturnArgs, ('a',), {'b': i}) for i in range(2)]
result = background_tasks.RunParallelThreads(calls, max_concurrency=4)
self.assertEqual(result, [(0, 'a'), (1, 'a')])
def testMoreThreadsThanConcurrencyLimit(self):
calls = [(_ReturnArgs, ('a',), {'b': i}) for i in range(10)]
result = background_tasks.RunParallelThreads(calls, max_concurrency=4)
self.assertEqual(result, [(i, 'a') for i in range(10)])
def testException(self):
int_list = []
calls = [(_AppendLength, (int_list,), {}), (_RaiseValueError, (), {}),
(_AppendLength, (int_list,), {})]
with self.assertRaises(errors.VmUtil.ThreadException):
background_tasks.RunParallelThreads(calls, max_concurrency=1)
self.assertEqual(int_list, [0, 1])
def testInterrupt(self):
# Uses RunParallelThreads to try to run four threads:
# 0: Waits 5 seconds and adds 0 to int_list.
# 1: Adds 1 to int_list.
# 2: Sends a SIGINT to the current process.
# 3: Waits 5 seconds and adds 3 to int_list.
# Since the max_concurrency is set to 2, what should happen is that thread 0
# waits, thread 1 succeeds, thread 2 sends the SIGINT, and then neither
# thread 1 nor 3 is able to append to int_list.
int_list = []
event = threading.Event()
calls = [(_WaitAndAppendInt, (int_list, 0, event, 5), {}),
(_WaitAndAppendInt, (int_list, 1), {}),
(os.kill, (os.getpid(), signal.SIGINT), {}),
(_WaitAndAppendInt, (int_list, 3, event, 5), {})]
with self.assertRaises(KeyboardInterrupt):
background_tasks.RunParallelThreads(calls, max_concurrency=2)
self.assertEqual(int_list, [1])
class RunThreadedTestCase(pkb_common_test_case.PkbCommonTestCase):
def testNonListParams(self):
with self.assertRaises(ValueError):
background_tasks.RunThreaded(_ReturnArgs, 'blue')
def testNoParams(self):
result = background_tasks.RunThreaded(_ReturnArgs, [])
self.assertEqual(result, [])
def testInvalidTupleParams(self):
with self.assertRaises(ValueError):
background_tasks.RunThreaded(_ReturnArgs, [('blue', 'red')])
def testSimpleListParams(self):
result = background_tasks.RunThreaded(_ReturnArgs, ['blue', 'red'])
self.assertEqual(result, [(None, 'blue'), (None, 'red')])
def testListOfTupleParams(self):
result = background_tasks.RunThreaded(
_ReturnArgs, [(('red',), {}), (('green',), {'b': 'blue'})])
self.assertEqual(result, [(None, 'red'), ('blue', 'green')])
class RunParallelProcessesTestCase(pkb_common_test_case.PkbCommonTestCase):
def testFewerThreadsThanConcurrencyLimit(self):
calls = [(_ReturnArgs, ('a',), {'b': i}) for i in range(2)]
result = background_tasks.RunParallelProcesses(calls, max_concurrency=4)
self.assertEqual(result, [(0, 'a'), (1, 'a')])
def testMoreThreadsThanConcurrencyLimit(self):
calls = [(_ReturnArgs, ('a',), {'b': i}) for i in range(10)]
result = background_tasks.RunParallelProcesses(calls, max_concurrency=4)
self.assertEqual(result, [(i, 'a') for i in range(10)])
def testException(self):
manager = multiprocessing.managers.SyncManager()
manager.start()
lock = manager.Lock()
counter = manager.Value('i', 0)
calls = [(_IncrementCounter, (lock, counter), {}),
(_RaiseValueError, (), {}),
(_IncrementCounter, (lock, counter), {})]
with self.assertRaises(errors.VmUtil.CalledProcessException):
background_tasks.RunParallelProcesses(calls, max_concurrency=1)
self.assertEqual(counter.value, 2)
if __name__ == '__main__':
unittest.main()
|
from abc import ABCMeta, abstractmethod
class ExecutionAlgorithm(object):
"""
Callable which takes in a list of desired rebalance Orders
and outputs a new Order list with a particular execution
algorithm strategy.
"""
__metaclass__ = ABCMeta
@abstractmethod
def __call__(self, dt, initial_orders):
raise NotImplementedError(
"Should implement __call__()"
)
|
import pandas as pd
import time
import itertools as itt
from queue import Empty
from threading import Lock
import threading as th
import logging
logger = logging.getLogger(__name__)
def records_to_df(records):
records = pd.DataFrame.from_records(records)
records['receive_ts'] = records['send_ts'] + records['interval_real'] / 1e6
records['receive_sec'] = records.receive_ts.astype(int)
# TODO: consider configuration for the following:
records['tag'] = records.tag.str.rsplit('#', 1, expand=True)[0]
records.set_index(['receive_sec'], inplace=True)
return records
def _expand_steps(steps):
return list(itt.chain(
* [[rps] * int(duration) for rps, duration in steps]))
class BfgReader(object):
def __init__(self, results, closed):
self.buffer = ""
self.stat_buffer = ""
self.results = results
self.closed = closed
self.records = []
self.lock = Lock()
self.thread = th.Thread(target=self._cacher)
self.thread.start()
def _cacher(self):
while True:
try:
self.records.append(
self.results.get(block=False))
except Empty:
if not self.closed.is_set():
time.sleep(0.1)
else:
break
def __next__(self):
if self.closed.is_set():
self.thread.join()
raise StopIteration
with self.lock:
records = self.records
self.records = []
if records:
return records_to_df(records)
return None
def __iter__(self):
return self
class BfgStatsReader(object):
def __init__(self, instance_counter, steps):
self.closed = False
self.last_ts = 0
self.steps = _expand_steps(steps)
self.instance_counter = instance_counter
self.start_time = int(time.time())
def __iter__(self):
while not self.closed:
cur_ts = int(time.time())
if cur_ts > self.last_ts:
offset = cur_ts - self.start_time
reqps = 0
if offset >= 0 and offset < len(self.steps):
reqps = self.steps[offset]
yield [{
'ts': cur_ts,
'metrics': {
'instances': self.instance_counter.value,
'reqps': reqps
}
}]
self.last_ts = cur_ts
else:
yield []
def close(self):
self.closed = True
|
from uuid import uuid4
from builtins import int
__test_missile = """\
POST /example/search/hello/help/us?param1=50¶m2=0¶m3=hello HTTP/1.1\r
Connection: close\r
Host: example.org\r
Content-length: 32\r
\r
param1=50¶m2=0¶m3=hello
"""
def __mark_by_uri(missile):
return '_'.join(
missile.decode('utf8').split('\n', 1)[0].split(' ', 2)[1].split('?')[0].split('/'))
class __UriMarker(object):
'''
Returns a uri marker function with requested limit
>>> marker = __UriMarker(2)
>>> marker(__test_missile)
'_example_search'
'''
def __init__(self, limit):
self.limit = limit
def __call__(self, missile):
return b'_'.join(
missile.split(b'\n', 1)[0].split(b' ', 2)[1].split(b'?')[0].split(b'/')[
0:self.limit + 1])
__markers = {
'uniq': lambda m: uuid4().hex,
'uri': __mark_by_uri,
}
class __Enumerator(object):
def __init__(self, marker):
self.marker = marker
self.number = int(0)
def __call__(self, missile):
marker = b"%s#%d" % (self.marker(missile), self.number)
self.number += 1
return marker
def get_marker(marker_type, enum_ammo=False):
'''
Returns a marker function of the requested marker_type
>>> marker = get_marker('uniq')(__test_missile)
>>> type(marker)
<type 'str'>
>>> len(marker)
32
>>> get_marker('uri')(__test_missile)
'_example_search_hello_help_us'
>>> marker = get_marker('non-existent')(__test_missile)
Traceback (most recent call last):
...
NotImplementedError: No such marker: "non-existent"
>>> get_marker('3')(__test_missile)
'_example_search_hello'
>>> marker = get_marker('3', True)
>>> marker(__test_missile)
'_example_search_hello#0'
>>> marker(__test_missile)
'_example_search_hello#1'
'''
try:
limit = int(marker_type)
if limit:
marker = __UriMarker(limit)
else:
def marker(m):
return b''
except ValueError:
if marker_type in __markers:
marker = __markers[marker_type]
else:
raise NotImplementedError('No such marker: "%s"' % marker_type)
# todo: fix u'False'
if enum_ammo:
marker = __Enumerator(marker)
return marker
|
from collections import defaultdict
from functools import reduce
import logging
import multiprocessing
from pycec.cec import CecAdapter # pylint: disable=import-error
from pycec.commands import ( # pylint: disable=import-error
CecCommand,
KeyPressCommand,
KeyReleaseCommand,
)
from pycec.const import ( # pylint: disable=import-error
ADDR_AUDIOSYSTEM,
ADDR_BROADCAST,
ADDR_UNREGISTERED,
KEY_MUTE_OFF,
KEY_MUTE_ON,
KEY_MUTE_TOGGLE,
KEY_VOLUME_DOWN,
KEY_VOLUME_UP,
POWER_OFF,
POWER_ON,
STATUS_PLAY,
STATUS_STILL,
STATUS_STOP,
)
from pycec.network import HDMINetwork, PhysicalAddress # pylint: disable=import-error
from pycec.tcp import TcpAdapter # pylint: disable=import-error
import voluptuous as vol
from homeassistant.components.media_player import DOMAIN as MEDIA_PLAYER
from homeassistant.components.switch import DOMAIN as SWITCH
from homeassistant.const import (
CONF_DEVICES,
CONF_HOST,
CONF_PLATFORM,
EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STOP,
STATE_IDLE,
STATE_OFF,
STATE_ON,
STATE_PAUSED,
STATE_PLAYING,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
DOMAIN = "hdmi_cec"
_LOGGER = logging.getLogger(__name__)
DEFAULT_DISPLAY_NAME = "HA"
CONF_TYPES = "types"
ICON_UNKNOWN = "mdi:help"
ICON_AUDIO = "mdi:speaker"
ICON_PLAYER = "mdi:play"
ICON_TUNER = "mdi:radio"
ICON_RECORDER = "mdi:microphone"
ICON_TV = "mdi:television"
ICONS_BY_TYPE = {
0: ICON_TV,
1: ICON_RECORDER,
3: ICON_TUNER,
4: ICON_PLAYER,
5: ICON_AUDIO,
}
CEC_DEVICES = defaultdict(list)
CMD_UP = "up"
CMD_DOWN = "down"
CMD_MUTE = "mute"
CMD_UNMUTE = "unmute"
CMD_MUTE_TOGGLE = "toggle mute"
CMD_PRESS = "press"
CMD_RELEASE = "release"
EVENT_CEC_COMMAND_RECEIVED = "cec_command_received"
EVENT_CEC_KEYPRESS_RECEIVED = "cec_keypress_received"
ATTR_PHYSICAL_ADDRESS = "physical_address"
ATTR_TYPE_ID = "type_id"
ATTR_VENDOR_NAME = "vendor_name"
ATTR_VENDOR_ID = "vendor_id"
ATTR_DEVICE = "device"
ATTR_TYPE = "type"
ATTR_KEY = "key"
ATTR_DUR = "dur"
ATTR_SRC = "src"
ATTR_DST = "dst"
ATTR_CMD = "cmd"
ATTR_ATT = "att"
ATTR_RAW = "raw"
ATTR_DIR = "dir"
ATTR_ABT = "abt"
ATTR_NEW = "new"
ATTR_ON = "on"
ATTR_OFF = "off"
ATTR_TOGGLE = "toggle"
_VOL_HEX = vol.Any(vol.Coerce(int), lambda x: int(x, 16))
SERVICE_SEND_COMMAND = "send_command"
SERVICE_SEND_COMMAND_SCHEMA = vol.Schema(
{
vol.Optional(ATTR_CMD): _VOL_HEX,
vol.Optional(ATTR_SRC): _VOL_HEX,
vol.Optional(ATTR_DST): _VOL_HEX,
vol.Optional(ATTR_ATT): _VOL_HEX,
vol.Optional(ATTR_RAW): vol.Coerce(str),
},
extra=vol.PREVENT_EXTRA,
)
SERVICE_VOLUME = "volume"
SERVICE_VOLUME_SCHEMA = vol.Schema(
{
vol.Optional(CMD_UP): vol.Any(CMD_PRESS, CMD_RELEASE, vol.Coerce(int)),
vol.Optional(CMD_DOWN): vol.Any(CMD_PRESS, CMD_RELEASE, vol.Coerce(int)),
vol.Optional(CMD_MUTE): vol.Any(ATTR_ON, ATTR_OFF, ATTR_TOGGLE),
},
extra=vol.PREVENT_EXTRA,
)
SERVICE_UPDATE_DEVICES = "update"
SERVICE_UPDATE_DEVICES_SCHEMA = vol.Schema(
{DOMAIN: vol.Schema({})}, extra=vol.PREVENT_EXTRA
)
SERVICE_SELECT_DEVICE = "select_device"
SERVICE_POWER_ON = "power_on"
SERVICE_STANDBY = "standby"
# pylint: disable=unnecessary-lambda
DEVICE_SCHEMA = vol.Schema(
{
vol.All(cv.positive_int): vol.Any(
lambda devices: DEVICE_SCHEMA(devices), cv.string
)
}
)
CONF_DISPLAY_NAME = "osd_name"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_DEVICES): vol.Any(
DEVICE_SCHEMA, vol.Schema({vol.All(cv.string): vol.Any(cv.string)})
),
vol.Optional(CONF_PLATFORM): vol.Any(SWITCH, MEDIA_PLAYER),
vol.Optional(CONF_HOST): cv.string,
vol.Optional(CONF_DISPLAY_NAME): cv.string,
vol.Optional(CONF_TYPES, default={}): vol.Schema(
{cv.entity_id: vol.Any(MEDIA_PLAYER, SWITCH)}
),
}
)
},
extra=vol.ALLOW_EXTRA,
)
def pad_physical_address(addr):
"""Right-pad a physical address."""
return addr + [0] * (4 - len(addr))
def parse_mapping(mapping, parents=None):
"""Parse configuration device mapping."""
if parents is None:
parents = []
for addr, val in mapping.items():
if isinstance(addr, (str,)) and isinstance(val, (str,)):
yield (addr, PhysicalAddress(val))
else:
cur = parents + [addr]
if isinstance(val, dict):
yield from parse_mapping(val, cur)
elif isinstance(val, str):
yield (val, pad_physical_address(cur))
def setup(hass: HomeAssistant, base_config):
"""Set up the CEC capability."""
# Parse configuration into a dict of device name to physical address
# represented as a list of four elements.
device_aliases = {}
devices = base_config[DOMAIN].get(CONF_DEVICES, {})
_LOGGER.debug("Parsing config %s", devices)
device_aliases.update(parse_mapping(devices))
_LOGGER.debug("Parsed devices: %s", device_aliases)
platform = base_config[DOMAIN].get(CONF_PLATFORM, SWITCH)
loop = (
# Create own thread if more than 1 CPU
hass.loop
if multiprocessing.cpu_count() < 2
else None
)
host = base_config[DOMAIN].get(CONF_HOST)
display_name = base_config[DOMAIN].get(CONF_DISPLAY_NAME, DEFAULT_DISPLAY_NAME)
if host:
adapter = TcpAdapter(host, name=display_name, activate_source=False)
else:
adapter = CecAdapter(name=display_name[:12], activate_source=False)
hdmi_network = HDMINetwork(adapter, loop=loop)
def _volume(call):
"""Increase/decrease volume and mute/unmute system."""
mute_key_mapping = {
ATTR_TOGGLE: KEY_MUTE_TOGGLE,
ATTR_ON: KEY_MUTE_ON,
ATTR_OFF: KEY_MUTE_OFF,
}
for cmd, att in call.data.items():
if cmd == CMD_UP:
_process_volume(KEY_VOLUME_UP, att)
elif cmd == CMD_DOWN:
_process_volume(KEY_VOLUME_DOWN, att)
elif cmd == CMD_MUTE:
hdmi_network.send_command(
KeyPressCommand(mute_key_mapping[att], dst=ADDR_AUDIOSYSTEM)
)
hdmi_network.send_command(KeyReleaseCommand(dst=ADDR_AUDIOSYSTEM))
_LOGGER.info("Audio muted")
else:
_LOGGER.warning("Unknown command %s", cmd)
def _process_volume(cmd, att):
if isinstance(att, (str,)):
att = att.strip()
if att == CMD_PRESS:
hdmi_network.send_command(KeyPressCommand(cmd, dst=ADDR_AUDIOSYSTEM))
elif att == CMD_RELEASE:
hdmi_network.send_command(KeyReleaseCommand(dst=ADDR_AUDIOSYSTEM))
else:
att = 1 if att == "" else int(att)
for _ in range(0, att):
hdmi_network.send_command(KeyPressCommand(cmd, dst=ADDR_AUDIOSYSTEM))
hdmi_network.send_command(KeyReleaseCommand(dst=ADDR_AUDIOSYSTEM))
def _tx(call):
"""Send CEC command."""
data = call.data
if ATTR_RAW in data:
command = CecCommand(data[ATTR_RAW])
else:
if ATTR_SRC in data:
src = data[ATTR_SRC]
else:
src = ADDR_UNREGISTERED
if ATTR_DST in data:
dst = data[ATTR_DST]
else:
dst = ADDR_BROADCAST
if ATTR_CMD in data:
cmd = data[ATTR_CMD]
else:
_LOGGER.error("Attribute 'cmd' is missing")
return False
if ATTR_ATT in data:
if isinstance(data[ATTR_ATT], (list,)):
att = data[ATTR_ATT]
else:
att = reduce(lambda x, y: f"{x}:{y:x}", data[ATTR_ATT])
else:
att = ""
command = CecCommand(cmd, dst, src, att)
hdmi_network.send_command(command)
def _standby(call):
hdmi_network.standby()
def _power_on(call):
hdmi_network.power_on()
def _select_device(call):
"""Select the active device."""
addr = call.data[ATTR_DEVICE]
if not addr:
_LOGGER.error("Device not found: %s", call.data[ATTR_DEVICE])
return
if addr in device_aliases:
addr = device_aliases[addr]
else:
entity = hass.states.get(addr)
_LOGGER.debug("Selecting entity %s", entity)
if entity is not None:
addr = entity.attributes["physical_address"]
_LOGGER.debug("Address acquired: %s", addr)
if addr is None:
_LOGGER.error(
"Device %s has not physical address", call.data[ATTR_DEVICE]
)
return
if not isinstance(addr, (PhysicalAddress,)):
addr = PhysicalAddress(addr)
hdmi_network.active_source(addr)
_LOGGER.info("Selected %s (%s)", call.data[ATTR_DEVICE], addr)
def _update(call):
"""
Update if device update is needed.
Called by service, requests CEC network to update data.
"""
hdmi_network.scan()
def _new_device(device):
"""Handle new devices which are detected by HDMI network."""
key = f"{DOMAIN}.{device.name}"
hass.data[key] = device
ent_platform = base_config[DOMAIN][CONF_TYPES].get(key, platform)
discovery.load_platform(
hass,
ent_platform,
DOMAIN,
discovered={ATTR_NEW: [key]},
hass_config=base_config,
)
def _shutdown(call):
hdmi_network.stop()
def _start_cec(event):
"""Register services and start HDMI network to watch for devices."""
hass.services.register(
DOMAIN, SERVICE_SEND_COMMAND, _tx, SERVICE_SEND_COMMAND_SCHEMA
)
hass.services.register(
DOMAIN, SERVICE_VOLUME, _volume, schema=SERVICE_VOLUME_SCHEMA
)
hass.services.register(
DOMAIN,
SERVICE_UPDATE_DEVICES,
_update,
schema=SERVICE_UPDATE_DEVICES_SCHEMA,
)
hass.services.register(DOMAIN, SERVICE_POWER_ON, _power_on)
hass.services.register(DOMAIN, SERVICE_STANDBY, _standby)
hass.services.register(DOMAIN, SERVICE_SELECT_DEVICE, _select_device)
hdmi_network.set_new_device_callback(_new_device)
hdmi_network.start()
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, _start_cec)
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown)
return True
class CecEntity(Entity):
"""Representation of a HDMI CEC device entity."""
def __init__(self, device, logical) -> None:
"""Initialize the device."""
self._device = device
self._icon = None
self._state = None
self._logical_address = logical
self.entity_id = "%s.%d" % (DOMAIN, self._logical_address)
def update(self):
"""Update device status."""
device = self._device
if device.power_status in [POWER_OFF, 3]:
self._state = STATE_OFF
elif device.status == STATUS_PLAY:
self._state = STATE_PLAYING
elif device.status == STATUS_STOP:
self._state = STATE_IDLE
elif device.status == STATUS_STILL:
self._state = STATE_PAUSED
elif device.power_status in [POWER_ON, 4]:
self._state = STATE_ON
else:
_LOGGER.warning("Unknown state: %d", device.power_status)
async def async_added_to_hass(self):
"""Register HDMI callbacks after initialization."""
self._device.set_update_callback(self._update)
def _update(self, device=None):
"""Device status changed, schedule an update."""
self.schedule_update_ha_state(True)
@property
def should_poll(self):
"""
Return false.
CecEntity.update() is called by the HDMI network when there is new data.
"""
return False
@property
def name(self):
"""Return the name of the device."""
return (
f"{self.vendor_name} {self._device.osd_name}"
if (
self._device.osd_name is not None
and self.vendor_name is not None
and self.vendor_name != "Unknown"
)
else "%s %d" % (self._device.type_name, self._logical_address)
if self._device.osd_name is None
else "%s %d (%s)"
% (self._device.type_name, self._logical_address, self._device.osd_name)
)
@property
def vendor_id(self):
"""Return the ID of the device's vendor."""
return self._device.vendor_id
@property
def vendor_name(self):
"""Return the name of the device's vendor."""
return self._device.vendor
@property
def physical_address(self):
"""Return the physical address of device in HDMI network."""
return str(self._device.physical_address)
@property
def type(self):
"""Return a string representation of the device's type."""
return self._device.type_name
@property
def type_id(self):
"""Return the type ID of device."""
return self._device.type
@property
def icon(self):
"""Return the icon for device by its type."""
return (
self._icon
if self._icon is not None
else ICONS_BY_TYPE.get(self._device.type)
if self._device.type in ICONS_BY_TYPE
else ICON_UNKNOWN
)
@property
def device_state_attributes(self):
"""Return the state attributes."""
state_attr = {}
if self.vendor_id is not None:
state_attr[ATTR_VENDOR_ID] = self.vendor_id
state_attr[ATTR_VENDOR_NAME] = self.vendor_name
if self.type_id is not None:
state_attr[ATTR_TYPE_ID] = self.type_id
state_attr[ATTR_TYPE] = self.type
if self.physical_address is not None:
state_attr[ATTR_PHYSICAL_ADDRESS] = self.physical_address
return state_attr
|
import os
from itertools import chain
from .connection import Resource
from .messaging import Producer
from .utils.collections import EqualityDict
from .utils.compat import register_after_fork
from .utils.functional import lazy
__all__ = ('ProducerPool', 'PoolGroup', 'register_group',
'connections', 'producers', 'get_limit', 'set_limit', 'reset')
_limit = [10]
_groups = []
use_global_limit = object()
disable_limit_protection = os.environ.get('KOMBU_DISABLE_LIMIT_PROTECTION')
def _after_fork_cleanup_group(group):
group.clear()
class ProducerPool(Resource):
"""Pool of :class:`kombu.Producer` instances."""
Producer = Producer
close_after_fork = True
def __init__(self, connections, *args, **kwargs):
self.connections = connections
self.Producer = kwargs.pop('Producer', None) or self.Producer
super().__init__(*args, **kwargs)
def _acquire_connection(self):
return self.connections.acquire(block=True)
def create_producer(self):
conn = self._acquire_connection()
try:
return self.Producer(conn)
except BaseException:
conn.release()
raise
def new(self):
return lazy(self.create_producer)
def setup(self):
if self.limit:
for _ in range(self.limit):
self._resource.put_nowait(self.new())
def close_resource(self, resource):
pass
def prepare(self, p):
if callable(p):
p = p()
if p._channel is None:
conn = self._acquire_connection()
try:
p.revive(conn)
except BaseException:
conn.release()
raise
return p
def release(self, resource):
if resource.__connection__:
resource.__connection__.release()
resource.channel = None
super().release(resource)
class PoolGroup(EqualityDict):
"""Collection of resource pools."""
def __init__(self, limit=None, close_after_fork=True):
self.limit = limit
self.close_after_fork = close_after_fork
if self.close_after_fork and register_after_fork is not None:
register_after_fork(self, _after_fork_cleanup_group)
def create(self, resource, limit):
raise NotImplementedError('PoolGroups must define ``create``')
def __missing__(self, resource):
limit = self.limit
if limit is use_global_limit:
limit = get_limit()
k = self[resource] = self.create(resource, limit)
return k
def register_group(group):
"""Register group (can be used as decorator)."""
_groups.append(group)
return group
class Connections(PoolGroup):
"""Collection of connection pools."""
def create(self, connection, limit):
return connection.Pool(limit=limit)
connections = register_group(Connections(limit=use_global_limit)) # noqa: E305
class Producers(PoolGroup):
"""Collection of producer pools."""
def create(self, connection, limit):
return ProducerPool(connections[connection], limit=limit)
producers = register_group(Producers(limit=use_global_limit)) # noqa: E305
def _all_pools():
return chain(*[(g.values() if g else iter([])) for g in _groups])
def get_limit():
"""Get current connection pool limit."""
return _limit[0]
def set_limit(limit, force=False, reset_after=False, ignore_errors=False):
"""Set new connection pool limit."""
limit = limit or 0
glimit = _limit[0] or 0
if limit != glimit:
_limit[0] = limit
for pool in _all_pools():
pool.resize(limit)
return limit
def reset(*args, **kwargs):
"""Reset all pools by closing open resources."""
for pool in _all_pools():
try:
pool.force_close_all()
except Exception:
pass
for group in _groups:
group.clear()
|
import datetime
import re
import time
def current_time():
"""Return the current datetime."""
return datetime.datetime.now()
def current_timestamp():
"""Return the current time as a timestamp."""
return to_timestamp(current_time())
def to_timestamp(time_val):
"""Generate a unix timestamp for the given datetime instance"""
return time.mktime(time_val.timetuple())
def delta_total_seconds(td):
"""Equivalent to timedelta.total_seconds() available in Python 2.7.
"""
microseconds, seconds, days = td.microseconds, td.seconds, td.days
return (microseconds + (seconds + days * 24 * 3600) * 10 ** 6) / 10 ** 6
def macro_timedelta(start_date, years=0, months=0, days=0, hours=0):
"""Since datetime doesn't provide timedeltas at the year or month level,
this function generates timedeltas of the appropriate sizes.
"""
delta = datetime.timedelta(days=days, hours=hours)
new_month = start_date.month + months
while new_month > 12:
new_month -= 12
years += 1
while new_month < 1:
new_month += 12
years -= 1
end_date = datetime.datetime(
start_date.year + years, new_month, start_date.day, start_date.hour
)
delta += end_date - start_date
return delta
def duration(start_time, end_time=None):
"""Get a timedelta between end_time and start_time, where end_time defaults
to now().
"""
if not start_time:
return None
last_time = end_time if end_time else current_time()
return last_time - start_time
class DateArithmetic:
"""Parses a string which contains a date arithmetic pattern and returns
a date with the delta added or subtracted.
"""
DATE_TYPE_PATTERN = re.compile(r"(\w+)([+-]\d+)?")
DATE_FORMATS = {
"year": "%Y",
"month": "%m",
"day": "%d",
"hour": "%H",
"shortdate": "%Y-%m-%d",
}
@classmethod
def parse(cls, date_str, dt=None):
"""Parse a date arithmetic pattern (Ex: 'shortdate-1'). Supports
date strings: shortdate, year, month, day, unixtime, daynumber.
Supports subtraction and addition operations of integers. Time unit is
based on date format (Ex: seconds for unixtime, days for day).
"""
dt = dt or current_time()
match = cls.DATE_TYPE_PATTERN.match(date_str)
if not match:
return
attr, value = match.groups()
delta = int(value) if value else 0
if attr in ("shortdate", "year", "month", "day", "hour"):
if delta:
kwargs = {"days" if attr == "shortdate" else attr + "s": delta}
dt += macro_timedelta(dt, **kwargs)
return dt.strftime(cls.DATE_FORMATS[attr])
if attr == "unixtime":
return int(to_timestamp(dt)) + delta
if attr == "daynumber":
return dt.toordinal() + delta
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import logging
from absl import flags
from perfkitbenchmarker import resource
from perfkitbenchmarker.configs import option_decoders
from perfkitbenchmarker.configs import spec
import six
flags.DEFINE_boolean('nfs_timeout_hard', True,
'Whether to use hard or soft for NFS mount.')
flags.DEFINE_integer('nfs_rsize', 1048576, 'NFS read size.')
flags.DEFINE_integer('nfs_wsize', 1048576, 'NFS write size.')
flags.DEFINE_integer('nfs_timeout', 60, 'NFS timeout.')
flags.DEFINE_integer('nfs_retries', 2, 'NFS Retries.')
flags.DEFINE_boolean('nfs_noresvport', False,
'Whether the NFS client should use a non-privileged '
'source port. Suggested to use with EFS')
flags.DEFINE_boolean('nfs_managed', True,
'Use a managed NFS service if using NFS disks. Otherwise '
'start an NFS server on the first VM.')
flags.DEFINE_string('nfs_ip_address', None,
'If specified, PKB will target this ip address when '
'mounting NFS "disks" rather than provisioning an NFS '
'Service for the corresponding cloud.')
flags.DEFINE_string('nfs_directory', None,
'Directory to mount if using a StaticNfsService. This '
'corresponds to the "VOLUME_NAME" of other NfsService '
'classes.')
flags.DEFINE_string('smb_version', '3.0', 'SMB version.')
flags.DEFINE_list('mount_options', [],
'Additional arguments to supply when mounting.')
flags.DEFINE_list('fstab_options', [],
'Additional arguments to supply to fstab.')
FLAGS = flags.FLAGS
# These are the (deprecated) old disk type names
STANDARD = 'standard'
REMOTE_SSD = 'remote_ssd'
PIOPS = 'piops' # Provisioned IOPS (SSD) in AWS and Alicloud
REMOTE_ESSD = 'remote_essd' # Enhanced Cloud SSD in Alicloud
# 'local' refers to disks that come attached to VMs. It is the only
# "universal" disk type that is not associated with a provider. It
# exists because we can provision a local disk without creating a disk
# spec. The Aerospike benchmarks use this fact in their config
# methods, and they need to be able to tell when a disk is local. So
# until that changes, 'local' is a special disk type.
LOCAL = 'local'
RAM = 'ram'
# refers to disks that come from a cloud/unmanaged NFS or SMB service
NFS = 'nfs'
SMB = 'smb'
# Map old disk type names to new disk type names
DISK_TYPE_MAPS = dict()
# Standard metadata keys relating to disks
MEDIA = 'media'
REPLICATION = 'replication'
# And some possible values
HDD = 'hdd'
SSD = 'ssd'
NONE = 'none'
ZONE = 'zone'
REGION = 'region'
DEFAULT_MOUNT_OPTIONS = 'discard'
DEFAULT_FSTAB_OPTIONS = 'defaults'
# TODO(user): remove this function when we remove the deprecated
# flags and disk type names.
def RegisterDiskTypeMap(provider_name, type_map):
"""Register a map from legacy disk type names to modern ones.
The translation machinery looks here to find the map corresponding
to the chosen provider and translates the user's flags and configs
to the new naming system. This function should be removed once the
(deprecated) legacy flags are removed.
Args:
provider_name: a string. The name of the provider. Must match
the names we give to providers in benchmark_spec.py.
type_map: a dict. Maps generic disk type names (STANDARD,
REMOTE_SSD, PIOPS) to provider-specific names.
"""
DISK_TYPE_MAPS[provider_name] = type_map
def GetDiskSpecClass(cloud):
"""Get the DiskSpec class corresponding to 'cloud'."""
return spec.GetSpecClass(BaseDiskSpec, CLOUD=cloud)
def WarnAndTranslateDiskTypes(name, cloud):
"""Translate old disk types to new disk types, printing warnings if needed.
Args:
name: a string specifying a disk type, either new or old.
cloud: the cloud we're running on.
Returns:
The new-style disk type name (i.e. the provider's name for the type).
"""
if cloud in DISK_TYPE_MAPS:
disk_type_map = DISK_TYPE_MAPS[cloud]
if name in disk_type_map and disk_type_map[name] != name:
new_name = disk_type_map[name]
logging.warning('Disk type name %s is deprecated and will be removed. '
'Translating to %s for now.', name, new_name)
return new_name
else:
return name
else:
logging.info('No legacy->new disk type map for provider %s', cloud)
# The provider has not been updated to use new-style names. We
# need to keep benchmarks working, so we pass through the name.
return name
def WarnAndCopyFlag(old_name, new_name):
"""Copy a value from an old flag to a new one, warning the user.
Args:
old_name: old name of flag.
new_name: new name of flag.
"""
if FLAGS[old_name].present:
logging.warning('Flag --%s is deprecated and will be removed. Please '
'switch to --%s.', old_name, new_name)
if not FLAGS[new_name].present:
FLAGS[new_name].value = FLAGS[old_name].value
# Mark the new flag as present so we'll print it out in our list
# of flag values.
FLAGS[new_name].present = True
else:
logging.warning('Ignoring legacy flag %s because new flag %s is present.',
old_name, new_name)
# We keep the old flag around so that providers that haven't been
# updated yet will continue to work.
DISK_FLAGS_TO_TRANSLATE = {
'scratch_disk_type': 'data_disk_type',
'scratch_disk_iops': 'aws_provisioned_iops',
'scratch_disk_size': 'data_disk_size'
}
def WarnAndTranslateDiskFlags():
"""Translate old disk-related flags to new disk-related flags.
"""
for old, new in six.iteritems(DISK_FLAGS_TO_TRANSLATE):
WarnAndCopyFlag(old, new)
class BaseDiskSpec(spec.BaseSpec):
"""Stores the information needed to create a disk.
Attributes:
device_path: None or string. Path on the machine where the disk is located.
disk_number: None or int. Optional disk identifier unique within the
current machine.
disk_size: None or int. Size of the disk in GB.
disk_type: None or string. See cloud specific disk classes for more
information about acceptable values.
mount_point: None or string. Directory of mount point.
num_striped_disks: int. The number of disks to stripe together. If this is
1, it means no striping will occur. This must be >= 1.
"""
SPEC_TYPE = 'BaseDiskSpec'
CLOUD = None
@classmethod
def _ApplyFlags(cls, config_values, flag_values):
"""Overrides config values with flag values.
Can be overridden by derived classes to add support for specific flags.
Args:
config_values: dict mapping config option names to provided values. Is
modified by this function.
flag_values: flags.FlagValues. Runtime flags that may override the
provided config values.
Returns:
dict mapping config option names to values derived from the config
values or flag values.
"""
super(BaseDiskSpec, cls)._ApplyFlags(config_values, flag_values)
if flag_values['data_disk_size'].present:
config_values['disk_size'] = flag_values.data_disk_size
if flag_values['data_disk_type'].present:
config_values['disk_type'] = flag_values.data_disk_type
if flag_values['num_striped_disks'].present:
config_values['num_striped_disks'] = flag_values.num_striped_disks
if flag_values['scratch_dir'].present:
config_values['mount_point'] = flag_values.scratch_dir
if flag_values['nfs_version'].present:
config_values['nfs_version'] = flag_values.nfs_version
if flag_values['nfs_timeout_hard'].present:
config_values['nfs_timeout_hard'] = flag_values.nfs_timeout_hard
if flag_values['nfs_rsize'].present:
config_values['nfs_rsize'] = flag_values.nfs_rsize
if flag_values['nfs_wsize'].present:
config_values['nfs_wsize'] = flag_values.nfs_wsize
if flag_values['nfs_timeout'].present:
config_values['nfs_timeout'] = flag_values.nfs_timeout
if flag_values['nfs_retries'].present:
config_values['nfs_retries'] = flag_values.nfs_retries
if flag_values['nfs_ip_address'].present:
config_values['nfs_ip_address'] = flag_values.nfs_ip_address
if flag_values['nfs_managed'].present:
config_values['nfs_managed'] = flag_values.nfs_managed
if flag_values['nfs_directory'].present:
config_values['nfs_directory'] = flag_values.nfs_directory
if flag_values['smb_version'].present:
config_values['smb_version'] = flag_values.smb_version
@classmethod
def _GetOptionDecoderConstructions(cls):
"""Gets decoder classes and constructor args for each configurable option.
Can be overridden by derived classes to add options or impose additional
requirements on existing options.
Returns:
dict. Maps option name string to a (ConfigOptionDecoder class, dict) pair.
The pair specifies a decoder class and its __init__() keyword
arguments to construct in order to decode the named option.
"""
result = super(BaseDiskSpec, cls)._GetOptionDecoderConstructions()
result.update({
'device_path': (option_decoders.StringDecoder, {'default': None,
'none_ok': True}),
'disk_number': (option_decoders.IntDecoder, {'default': None,
'none_ok': True}),
'disk_size': (option_decoders.IntDecoder, {'default': None,
'none_ok': True}),
'disk_type': (option_decoders.StringDecoder, {'default': None,
'none_ok': True}),
'mount_point': (option_decoders.StringDecoder, {'default': None,
'none_ok': True}),
'num_striped_disks': (option_decoders.IntDecoder, {'default': 1,
'min': 1}),
'nfs_version': (option_decoders.StringDecoder, {'default': None}),
'nfs_ip_address': (option_decoders.StringDecoder, {'default': None}),
'nfs_managed': (option_decoders.BooleanDecoder, {'default': True}),
'nfs_directory': (option_decoders.StringDecoder, {'default': None}),
'nfs_rsize': (option_decoders.IntDecoder, {'default': 1048576}),
'nfs_wsize': (option_decoders.IntDecoder, {'default': 1048576}),
'nfs_timeout': (option_decoders.IntDecoder, {'default': 60}),
'nfs_timeout_hard': (option_decoders.BooleanDecoder, {'default': True}),
'nfs_retries': (option_decoders.IntDecoder, {'default': 2}),
'smb_version': (option_decoders.StringDecoder, {'default': '3.0'}),
})
return result
class BaseDisk(resource.BaseResource):
"""Object representing a Base Disk."""
is_striped = False
def __init__(self, disk_spec):
super(BaseDisk, self).__init__()
self.disk_size = disk_spec.disk_size
self.disk_type = disk_spec.disk_type
self.mount_point = disk_spec.mount_point
self.num_striped_disks = disk_spec.num_striped_disks
self.metadata.update({
'type': self.disk_type,
'size': self.disk_size,
'num_stripes': self.num_striped_disks,
})
# Linux related attributes.
self.device_path = disk_spec.device_path
# Windows related attributes.
# The disk number corresponds to the order in which disks were attached to
# the instance. The System Disk has a disk number of 0. Any local disks
# have disk numbers ranging from 1 to the number of local disks on the
# system. Any additional disks that were attached after boot will have
# disk numbers starting at the number of local disks + 1. These disk
# numbers are used in diskpart scripts in order to identify the disks
# that we want to operate on.
self.disk_number = disk_spec.disk_number
@property
def mount_options(self):
"""Returns options to mount a disk.
The default value 'discard' is from the linux VM's MountDisk method.
See `man 8 mount` for usage. For example, returning "ro" will cause the
mount command to be "mount ... -o ro ..." mounting the disk as read only.
"""
opts = DEFAULT_MOUNT_OPTIONS
if FLAGS.mount_options:
opts = ','.join(FLAGS.mount_options)
self.metadata.update({'mount_options': opts})
return opts
@property
def fstab_options(self):
"""Returns options to use in the /etc/fstab entry for this drive.
The default value 'defaults' is from the linux VM's MountDisk method.
See `man fstab` for usage.
"""
opts = DEFAULT_FSTAB_OPTIONS
if FLAGS.fstab_options:
opts = ','.join(FLAGS.fstab_options)
self.metadata.update({'fstab_options': opts})
return opts
@abc.abstractmethod
def Attach(self, vm):
"""Attaches the disk to a VM.
Args:
vm: The BaseVirtualMachine instance to which the disk will be attached.
"""
pass
@abc.abstractmethod
def Detach(self):
"""Detaches the disk from a VM."""
pass
def GetDevicePath(self):
"""Returns the path to the device inside a Linux VM."""
if self.device_path is None:
raise ValueError('device_path is None.')
return self.device_path
def GetDeviceId(self):
"""Return the Windows DeviceId of this disk."""
if self.disk_number is None:
raise ValueError('disk_number is None.')
return r'\\.\PHYSICALDRIVE%s' % self.disk_number
class StripedDisk(BaseDisk):
"""Object representing several disks striped together."""
is_striped = True
def __init__(self, disk_spec, disks):
"""Initializes a StripedDisk object.
Args:
disk_spec: A BaseDiskSpec containing the desired mount point.
disks: A list of BaseDisk objects that constitute the StripedDisk.
"""
super(StripedDisk, self).__init__(disk_spec)
self.disks = disks
self.metadata = disks[0].metadata.copy()
if self.disk_size:
self.metadata['size'] = self.disk_size * self.num_striped_disks
def _Create(self):
for disk in self.disks:
disk.Create()
def _Delete(self):
for disk in self.disks:
disk.Delete()
def Attach(self, vm):
for disk in self.disks:
disk.Attach(vm)
def Detach(self):
for disk in self.disks:
disk.Detach()
class RamDisk(BaseDisk):
"""Object representing a Ram Disk."""
def Attach(self, vm):
"""Attaches the disk to a VM.
Args:
vm: The BaseVirtualMachine instance to which the disk will be attached.
"""
pass
def Detach(self):
"""Detaches the disk from a VM."""
pass
def GetDevicePath(self):
"""Returns the path to the device inside a Linux VM."""
return None
def GetDeviceId(self):
"""Return the Windows DeviceId of this disk."""
return None
def _Create(self):
"""Creates the disk."""
pass
def _Delete(self):
"""Deletes the disk."""
pass
class NetworkDisk(BaseDisk):
"""Object representing a Network Disk."""
def __init__(self, disk_spec):
super(NetworkDisk, self).__init__(disk_spec)
super(NetworkDisk, self).GetResourceMetadata()
@abc.abstractmethod
def _GetNetworkDiskMountOptionsDict(self):
"""Default mount options as a dict."""
pass
@property
def mount_options(self):
opts = []
for key, value in sorted(
six.iteritems(self._GetNetworkDiskMountOptionsDict())):
opts.append('%s' % key if value is None else '%s=%s' % (key, value))
return ','.join(opts)
@property
def fstab_options(self):
return self.mount_options
def Detach(self):
self.vm.RemoteCommand('sudo umount %s' % self.mount_point)
def _Create(self):
# handled by the Network Disk service
pass
def _Delete(self):
# handled by the Network Disk service
pass
# TODO(chriswilkes): adds to the disk.GetDiskSpecClass registry
# that only has the cloud as the key. Look into using (cloud, disk_type)
# if causes problems
class NfsDisk(NetworkDisk):
"""Provides options for mounting NFS drives.
NFS disk should be ready to mount at the time of creation of this disk.
Args:
disk_spec: The disk spec.
remote_mount_address: The host_address:/volume path to the NFS drive.
nfs_tier: The NFS tier / performance level of the server.
"""
def __init__(self, disk_spec, remote_mount_address, default_nfs_version=None,
nfs_tier=None):
super(NfsDisk, self).__init__(disk_spec)
self.nfs_version = disk_spec.nfs_version or default_nfs_version
self.nfs_timeout_hard = disk_spec.nfs_timeout_hard
self.nfs_rsize = disk_spec.nfs_rsize
self.nfs_wsize = disk_spec.nfs_wsize
self.nfs_timeout = disk_spec.nfs_timeout
self.nfs_retries = disk_spec.nfs_retries
self.device_path = remote_mount_address
for key, value in six.iteritems(self._GetNetworkDiskMountOptionsDict()):
self.metadata['nfs_{}'.format(key)] = value
if nfs_tier:
self.metadata['nfs_tier'] = nfs_tier
super(NfsDisk, self).GetResourceMetadata()
def _GetNetworkDiskMountOptionsDict(self):
"""Default NFS mount options as a dict."""
options = {
'hard' if self.nfs_timeout_hard else 'soft': None,
'rsize': self.nfs_rsize,
'wsize': self.nfs_wsize,
'timeo': self.nfs_timeout * 10, # in decaseconds
'retrans': self.nfs_retries,
}
# the client doesn't have to specify an NFS version to use (but should)
if self.nfs_version:
options['nfsvers'] = self.nfs_version
if FLAGS.nfs_noresvport:
options['noresvport'] = None
return options
def Attach(self, vm):
self.vm = vm
self.vm.Install('nfs_utils')
class SmbDisk(NetworkDisk):
"""Provides options for mounting SMB drives.
SMB disk should be ready to mount at the time of creation of this disk.
Args:
disk_spec: The disk spec.
remote_mount_address: The host_address:/volume path to the SMB drive.
smb_tier: The SMB tier / performance level of the server.
"""
def __init__(self, disk_spec, remote_mount_address, storage_account_and_key,
default_smb_version=None, smb_tier=None):
super(SmbDisk, self).__init__(disk_spec)
self.smb_version = disk_spec.smb_version
self.device_path = remote_mount_address
self.storage_account_and_key = storage_account_and_key
if smb_tier:
self.metadata['smb_tier'] = smb_tier
def _GetNetworkDiskMountOptionsDict(self):
"""Default SMB mount options as a dict."""
options = {
'vers': self.smb_version,
'username': self.storage_account_and_key['user'],
'password': self.storage_account_and_key['pw'],
'dir_mode': '0777',
'file_mode': '0777',
'serverino': None,
# the following mount option is a suggestion from
# https://docs.microsoft.com/en-us/azure/storage/files/storage-troubleshooting-files-performance#throughput-on-linux-clients-is-significantly-lower-when-compared-to-windows-clients
'nostrictsync': None,
}
return options
def Attach(self, vm):
self.vm = vm
self.vm.InstallPackages('cifs-utils')
|
from pytest import mark
from cerberus.tests import assert_fail, assert_success
@mark.parametrize(
("test_function", "document"),
[(assert_success, {'user': 'alice'}), (assert_fail, {'user': 'admin'})],
)
def test_forbidden(test_function, document):
test_function(schema={'user': {'forbidden': ['root', 'admin']}}, document=document)
@mark.parametrize("document", [{'amount': 0}, {'amount': 0.0}])
def test_forbidden_number(document):
assert_fail(schema={'amount': {'forbidden': (0, 0.0)}}, document=document)
|
import os
import time
import json
import getpass
import binascii
import hashlib
from PyQt5.QtCore import pyqtSignal, pyqtSlot, QObject, Qt
from PyQt5.QtNetwork import QLocalSocket, QLocalServer, QAbstractSocket
import qutebrowser
from qutebrowser.utils import log, usertypes, error, standarddir, utils
from qutebrowser.qt import sip
CONNECT_TIMEOUT = 100 # timeout for connecting/disconnecting
WRITE_TIMEOUT = 1000
READ_TIMEOUT = 5000
ATIME_INTERVAL = 5000 * 60 # 5 minutes
PROTOCOL_VERSION = 1
# The ipc server instance
server = None
def _get_socketname_windows(basedir):
"""Get a socketname to use for Windows."""
try:
username = getpass.getuser()
except ImportError:
# getpass.getuser() first tries a couple of environment variables. If
# none of those are set (i.e., USERNAME is missing), it tries to import
# the "pwd" module which is unavailable on Windows.
raise Error("Could not find username. This should only happen if "
"there is a bug in the application launching qutebrowser, "
"preventing the USERNAME environment variable from being "
"passed. If you know more about when this happens, please "
"report this to [email protected].")
parts = ['qutebrowser', username]
if basedir is not None:
md5 = hashlib.md5(basedir.encode('utf-8')).hexdigest()
parts.append(md5)
return '-'.join(parts)
def _get_socketname(basedir):
"""Get a socketname to use."""
if utils.is_windows: # pragma: no cover
return _get_socketname_windows(basedir)
parts_to_hash = [getpass.getuser()]
if basedir is not None:
parts_to_hash.append(basedir)
data_to_hash = '-'.join(parts_to_hash).encode('utf-8')
md5 = hashlib.md5(data_to_hash).hexdigest()
prefix = 'i-' if utils.is_mac else 'ipc-'
filename = '{}{}'.format(prefix, md5)
return os.path.join(standarddir.runtime(), filename)
class Error(Exception):
"""Base class for IPC exceptions."""
class SocketError(Error):
"""Exception raised when there was an error with a QLocalSocket.
Args:
code: The error code.
message: The error message.
action: The action which was taken when the error happened.
"""
def __init__(self, action, socket):
"""Constructor.
Args:
action: The action which was taken when the error happened.
socket: The QLocalSocket which has the error set.
"""
super().__init__()
self.action = action
self.code = socket.error()
self.message = socket.errorString()
def __str__(self):
return "Error while {}: {} (error {})".format(
self.action, self.message, self.code)
class ListenError(Error):
"""Exception raised when there was a problem with listening to IPC.
Args:
code: The error code.
message: The error message.
"""
def __init__(self, local_server):
"""Constructor.
Args:
local_server: The QLocalServer which has the error set.
"""
super().__init__()
self.code = local_server.serverError()
self.message = local_server.errorString()
def __str__(self):
return "Error while listening to IPC server: {} (error {})".format(
self.message, self.code)
class AddressInUseError(ListenError):
"""Emitted when the server address is already in use."""
class IPCServer(QObject):
"""IPC server to which clients connect to.
Attributes:
ignored: Whether requests are ignored (in exception hook).
_timer: A timer to handle timeouts.
_server: A QLocalServer to accept new connections.
_socket: The QLocalSocket we're currently connected to.
_socketname: The socketname to use.
_atime_timer: Timer to update the atime of the socket regularly.
Signals:
got_args: Emitted when there was an IPC connection and arguments were
passed.
got_args: Emitted with the raw data an IPC connection got.
got_invalid_data: Emitted when there was invalid incoming data.
"""
got_args = pyqtSignal(list, str, str)
got_raw = pyqtSignal(bytes)
got_invalid_data = pyqtSignal()
def __init__(self, socketname, parent=None):
"""Start the IPC server and listen to commands.
Args:
socketname: The socketname to use.
parent: The parent to be used.
"""
super().__init__(parent)
self.ignored = False
self._socketname = socketname
self._timer = usertypes.Timer(self, 'ipc-timeout')
self._timer.setInterval(READ_TIMEOUT)
self._timer.timeout.connect(self.on_timeout)
if utils.is_windows: # pragma: no cover
self._atime_timer = None
else:
self._atime_timer = usertypes.Timer(self, 'ipc-atime')
self._atime_timer.setInterval(ATIME_INTERVAL)
self._atime_timer.timeout.connect(self.update_atime)
self._atime_timer.setTimerType(Qt.VeryCoarseTimer)
self._server = QLocalServer(self)
self._server.newConnection.connect( # type: ignore[attr-defined]
self.handle_connection)
self._socket = None
self._old_socket = None
if utils.is_windows: # pragma: no cover
# As a WORKAROUND for a Qt bug, we can't use UserAccessOption on Unix. If we
# do, we don't get an AddressInUseError anymore:
# https://bugreports.qt.io/browse/QTBUG-48635
#
# Thus, we only do so on Windows, and handle permissions manually in
# listen() on Linux.
log.ipc.debug("Calling setSocketOptions")
self._server.setSocketOptions(QLocalServer.UserAccessOption)
else: # pragma: no cover
log.ipc.debug("Not calling setSocketOptions")
def _remove_server(self):
"""Remove an existing server."""
ok = QLocalServer.removeServer(self._socketname)
if not ok:
raise Error("Error while removing server {}!".format(
self._socketname))
def listen(self):
"""Start listening on self._socketname."""
log.ipc.debug("Listening as {}".format(self._socketname))
if self._atime_timer is not None: # pragma: no branch
self._atime_timer.start()
self._remove_server()
ok = self._server.listen(self._socketname)
if not ok:
if self._server.serverError() == QAbstractSocket.AddressInUseError:
raise AddressInUseError(self._server)
raise ListenError(self._server)
if not utils.is_windows: # pragma: no cover
# WORKAROUND for QTBUG-48635, see the comment in __init__ for details.
try:
os.chmod(self._server.fullServerName(), 0o700)
except FileNotFoundError:
# https://github.com/qutebrowser/qutebrowser/issues/1530
# The server doesn't actually exist even if ok was reported as
# True, so report this as an error.
raise ListenError(self._server)
@pyqtSlot('QLocalSocket::LocalSocketError')
def on_error(self, err):
"""Raise SocketError on fatal errors."""
if self._socket is None:
# Sometimes this gets called from stale sockets.
log.ipc.debug("In on_error with None socket!")
return
self._timer.stop()
log.ipc.debug("Socket 0x{:x}: error {}: {}".format(
id(self._socket), self._socket.error(),
self._socket.errorString()))
if err != QLocalSocket.PeerClosedError:
raise SocketError("handling IPC connection", self._socket)
@pyqtSlot()
def handle_connection(self):
"""Handle a new connection to the server."""
if self.ignored:
return
if self._socket is not None:
log.ipc.debug("Got new connection but ignoring it because we're "
"still handling another one (0x{:x}).".format(
id(self._socket)))
return
socket = self._server.nextPendingConnection()
if socket is None:
log.ipc.debug( # type: ignore[unreachable]
"No new connection to handle.")
return
log.ipc.debug("Client connected (socket 0x{:x}).".format(id(socket)))
self._socket = socket
self._timer.start()
socket.readyRead.connect( # type: ignore[attr-defined]
self.on_ready_read)
if socket.canReadLine():
log.ipc.debug("We can read a line immediately.")
self.on_ready_read()
socket.error.connect(self.on_error) # type: ignore[attr-defined]
if socket.error() not in [QLocalSocket.UnknownSocketError,
QLocalSocket.PeerClosedError]:
log.ipc.debug("We got an error immediately.")
self.on_error(socket.error())
socket.disconnected.connect( # type: ignore[attr-defined]
self.on_disconnected)
if socket.state() == QLocalSocket.UnconnectedState:
log.ipc.debug("Socket was disconnected immediately.")
self.on_disconnected()
@pyqtSlot()
def on_disconnected(self):
"""Clean up socket when the client disconnected."""
log.ipc.debug("Client disconnected from socket 0x{:x}.".format(
id(self._socket)))
self._timer.stop()
if self._old_socket is not None:
self._old_socket.deleteLater()
self._old_socket = self._socket
self._socket = None
# Maybe another connection is waiting.
self.handle_connection()
def _handle_invalid_data(self):
"""Handle invalid data we got from a QLocalSocket."""
assert self._socket is not None
log.ipc.error("Ignoring invalid IPC data from socket 0x{:x}.".format(
id(self._socket)))
self.got_invalid_data.emit()
self._socket.error.connect(self.on_error)
self._socket.disconnectFromServer()
def _handle_data(self, data):
"""Handle data (as bytes) we got from on_ready_read."""
try:
decoded = data.decode('utf-8')
except UnicodeDecodeError:
log.ipc.error("invalid utf-8: {!r}".format(binascii.hexlify(data)))
self._handle_invalid_data()
return
log.ipc.debug("Processing: {}".format(decoded))
try:
json_data = json.loads(decoded)
except ValueError:
log.ipc.error("invalid json: {}".format(decoded.strip()))
self._handle_invalid_data()
return
for name in ['args', 'target_arg']:
if name not in json_data:
log.ipc.error("Missing {}: {}".format(name, decoded.strip()))
self._handle_invalid_data()
return
try:
protocol_version = int(json_data['protocol_version'])
except (KeyError, ValueError):
log.ipc.error("invalid version: {}".format(decoded.strip()))
self._handle_invalid_data()
return
if protocol_version != PROTOCOL_VERSION:
log.ipc.error("incompatible version: expected {}, got {}".format(
PROTOCOL_VERSION, protocol_version))
self._handle_invalid_data()
return
args = json_data['args']
target_arg = json_data['target_arg']
if target_arg is None:
# https://www.riverbankcomputing.com/pipermail/pyqt/2016-April/037375.html
target_arg = ''
cwd = json_data.get('cwd', '')
assert cwd is not None
self.got_args.emit(args, target_arg, cwd)
@pyqtSlot()
def on_ready_read(self):
"""Read json data from the client."""
if self._socket is None: # pragma: no cover
# This happens when doing a connection while another one is already
# active for some reason.
if self._old_socket is None:
log.ipc.warning("In on_ready_read with None socket and "
"old_socket!")
return
log.ipc.debug("In on_ready_read with None socket!")
socket = self._old_socket
else:
socket = self._socket
if sip.isdeleted(socket): # pragma: no cover
log.ipc.warning("Ignoring deleted IPC socket")
return
self._timer.stop()
while socket is not None and socket.canReadLine():
data = bytes(socket.readLine())
self.got_raw.emit(data)
log.ipc.debug("Read from socket 0x{:x}: {!r}".format(
id(socket), data))
self._handle_data(data)
if self._socket is not None:
self._timer.start()
@pyqtSlot()
def on_timeout(self):
"""Cancel the current connection if it was idle for too long."""
assert self._socket is not None
log.ipc.error("IPC connection timed out "
"(socket 0x{:x}).".format(id(self._socket)))
self._socket.disconnectFromServer()
if self._socket is not None: # pragma: no cover
# on_socket_disconnected sets it to None
self._socket.waitForDisconnected(CONNECT_TIMEOUT)
if self._socket is not None: # pragma: no cover
# on_socket_disconnected sets it to None
self._socket.abort()
@pyqtSlot()
def update_atime(self):
"""Update the atime of the socket file all few hours.
From the XDG basedir spec:
To ensure that your files are not removed, they should have their
access time timestamp modified at least once every 6 hours of monotonic
time or the 'sticky' bit should be set on the file.
"""
path = self._server.fullServerName()
if not path:
log.ipc.error("In update_atime with no server path!")
return
log.ipc.debug("Touching {}".format(path))
try:
os.utime(path)
except OSError:
log.ipc.exception("Failed to update IPC socket, trying to "
"re-listen...")
self._server.close()
self.listen()
@pyqtSlot()
def shutdown(self):
"""Shut down the IPC server cleanly."""
log.ipc.debug("Shutting down IPC (socket 0x{:x})".format(
id(self._socket)))
if self._socket is not None:
self._socket.deleteLater()
self._socket = None
self._timer.stop()
if self._atime_timer is not None: # pragma: no branch
self._atime_timer.stop()
try:
self._atime_timer.timeout.disconnect(self.update_atime)
except TypeError:
pass
self._server.close()
self._server.deleteLater()
self._remove_server()
def send_to_running_instance(socketname, command, target_arg, *, socket=None):
"""Try to send a commandline to a running instance.
Blocks for CONNECT_TIMEOUT ms.
Args:
socketname: The name which should be used for the socket.
command: The command to send to the running instance.
target_arg: --target command line argument
socket: The socket to read data from, or None.
Return:
True if connecting was successful, False if no connection was made.
"""
if socket is None:
socket = QLocalSocket()
log.ipc.debug("Connecting to {}".format(socketname))
socket.connectToServer(socketname)
connected = socket.waitForConnected(CONNECT_TIMEOUT)
if connected:
log.ipc.info("Opening in existing instance")
json_data = {'args': command, 'target_arg': target_arg,
'version': qutebrowser.__version__,
'protocol_version': PROTOCOL_VERSION}
try:
cwd = os.getcwd()
except OSError:
pass
else:
json_data['cwd'] = cwd
line = json.dumps(json_data) + '\n'
data = line.encode('utf-8')
log.ipc.debug("Writing: {!r}".format(data))
socket.writeData(data)
socket.waitForBytesWritten(WRITE_TIMEOUT)
if socket.error() != QLocalSocket.UnknownSocketError:
raise SocketError("writing to running instance", socket)
socket.disconnectFromServer()
if socket.state() != QLocalSocket.UnconnectedState:
socket.waitForDisconnected(CONNECT_TIMEOUT)
return True
else:
if socket.error() not in [QLocalSocket.ConnectionRefusedError,
QLocalSocket.ServerNotFoundError]:
raise SocketError("connecting to running instance", socket)
log.ipc.debug("No existing instance present (error {})".format(
socket.error()))
return False
def display_error(exc, args):
"""Display a message box with an IPC error."""
error.handle_fatal_exc(
exc, "Error while connecting to running instance!",
no_err_windows=args.no_err_windows)
def send_or_listen(args):
"""Send the args to a running instance or start a new IPCServer.
Args:
args: The argparse namespace.
Return:
The IPCServer instance if no running instance was detected.
None if an instance was running and received our request.
"""
global server
try:
socketname = _get_socketname(args.basedir)
try:
sent = send_to_running_instance(socketname, args.command,
args.target)
if sent:
return None
log.init.debug("Starting IPC server...")
server = IPCServer(socketname)
server.listen()
return server
except AddressInUseError:
# This could be a race condition...
log.init.debug("Got AddressInUseError, trying again.")
time.sleep(0.5)
sent = send_to_running_instance(socketname, args.command,
args.target)
if sent:
return None
else:
raise
except Error as e:
display_error(e, args)
raise
|
from __future__ import print_function
import pytest
from plumbum import local
from plumbum.cli import Config, ConfigINI
fname = 'test_config.ini'
@pytest.mark.usefixtures('cleandir')
class TestConfig:
def test_makefile(self):
with ConfigINI(fname) as conf:
conf['value'] = 12
conf['string'] = 'ho'
with open(fname) as f:
contents = f.read()
assert 'value = 12' in contents
assert 'string = ho' in contents
def test_readfile(self):
with open(fname, 'w') as f:
print('''
[DEFAULT]
one = 1
two = hello''', file=f)
with ConfigINI(fname) as conf:
assert conf['one'] == '1'
assert conf['two'] == 'hello'
def test_complex_ini(self):
with Config(fname) as conf:
conf['value'] = 'normal'
conf['newer.value'] = 'other'
with Config(fname) as conf:
assert conf['value'] == 'normal'
assert conf['DEFAULT.value'] == 'normal'
assert conf['newer.value'] == 'other'
def test_nowith(self):
conf = ConfigINI(fname)
conf['something'] = 'nothing'
conf.write()
with open(fname) as f:
contents = f.read()
assert 'something = nothing' in contents
def test_home(self):
mypath = local.env.home / 'some_simple_home_rc.ini'
assert not mypath.exists()
try:
with Config('~/some_simple_home_rc.ini') as conf:
conf['a'] = 'b'
assert mypath.exists()
mypath.unlink()
with Config(mypath) as conf:
conf['a'] = 'b'
assert mypath.exists()
mypath.unlink()
finally:
mypath.unlink()
def test_notouch(self):
conf = ConfigINI(fname)
assert not local.path(fname).exists()
def test_only_string(self):
conf = ConfigINI(fname)
value = conf.get('value', 2)
assert value == '2'
|
import re # noqa: F401
import sys # noqa: F401
import nulltype # noqa: F401
from paasta_tools.paastaapi.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from paasta_tools.paastaapi.model.adhoc_launch_history import AdhocLaunchHistory
globals()['AdhocLaunchHistory'] = AdhocLaunchHistory
class InstanceStatusAdhoc(ModelSimple):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'value': ([AdhocLaunchHistory],),
}
@cached_property
def discriminator():
return None
attribute_map = {}
_composed_schemas = None
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
"""InstanceStatusAdhoc - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] ([AdhocLaunchHistory]): List of runs associated with job. # noqa: E501
Keyword Args:
value ([AdhocLaunchHistory]): List of runs associated with job. # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
|
from tests.common import assert_lists_same, async_get_device_automations
from tests.components.homekit_controller.common import (
Helper,
setup_accessories_from_file,
setup_test_accessories,
)
async def test_hue_bridge_setup(hass):
"""Test that a Hue hub can be correctly setup in HA via HomeKit."""
accessories = await setup_accessories_from_file(hass, "hue_bridge.json")
config_entry, pairing = await setup_test_accessories(hass, accessories)
entity_registry = await hass.helpers.entity_registry.async_get_registry()
# Check that the battery is correctly found and set up
battery_id = "sensor.hue_dimmer_switch_battery"
battery = entity_registry.async_get(battery_id)
assert battery.unique_id == "homekit-6623462389072572-644245094400"
battery_helper = Helper(
hass, "sensor.hue_dimmer_switch_battery", pairing, accessories[0], config_entry
)
battery_state = await battery_helper.poll_and_get_state()
assert battery_state.attributes["friendly_name"] == "Hue dimmer switch Battery"
assert battery_state.attributes["icon"] == "mdi:battery"
assert battery_state.state == "100"
device_registry = await hass.helpers.device_registry.async_get_registry()
device = device_registry.async_get(battery.device_id)
assert device.manufacturer == "Philips"
assert device.name == "Hue dimmer switch"
assert device.model == "RWL021"
assert device.sw_version == "45.1.17846"
# The fixture file has 1 dimmer, which is a remote with 4 buttons
# It (incorrectly) claims to support single, double and long press events
# It also has a battery
expected = [
{
"device_id": device.id,
"domain": "sensor",
"entity_id": "sensor.hue_dimmer_switch_battery",
"platform": "device",
"type": "battery_level",
}
]
for button in ("button1", "button2", "button3", "button4"):
for subtype in ("single_press", "double_press", "long_press"):
expected.append(
{
"device_id": device.id,
"domain": "homekit_controller",
"platform": "device",
"type": button,
"subtype": subtype,
}
)
triggers = await async_get_device_automations(hass, "trigger", device.id)
assert_lists_same(triggers, expected)
|
import logging
from kalliope import Utils
from kalliope.signals.order import Order
logging.basicConfig()
logger = logging.getLogger("kalliope")
class SignalLauncher:
# keep a list of instantiated signals
list_launched_signals = list()
def __init__(self):
pass
@classmethod
def launch_signal_class_by_name(cls, signal_name, settings=None):
"""
load the signal class from the given name, pass the brain and settings to the signal
:param signal_name: name of the signal class to load
:param settings: Settings Object
"""
signal_folder = None
if settings.resources:
signal_folder = settings.resources.signal_folder
launched_signal = Utils.get_dynamic_class_instantiation(package_name="signals",
module_name=signal_name,
resources_dir=signal_folder)
cls.add_launched_signals_to_list(launched_signal)
return launched_signal
@classmethod
def add_launched_signals_to_list(cls, signal):
cls.list_launched_signals.append(signal)
@classmethod
def get_launched_signals_list(cls):
return cls.list_launched_signals
@classmethod
def get_order_instance(cls):
"""
Return the Order instance from the list of launched signals if exist
:return:
"""
for signal in cls.list_launched_signals:
if isinstance(signal, Order):
return signal
return None
|
import logging
import requests
import voluptuous as vol
from homeassistant.components.http import HomeAssistantView
from homeassistant.const import (
CONF_ACCESS_TOKEN,
HTTP_BAD_REQUEST,
HTTP_CREATED,
HTTP_OK,
)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_PUSH_SECRET = "push_secret"
DOMAIN = "foursquare"
EVENT_CHECKIN = "foursquare.checkin"
EVENT_PUSH = "foursquare.push"
SERVICE_CHECKIN = "checkin"
CHECKIN_SERVICE_SCHEMA = vol.Schema(
{
vol.Optional("alt"): cv.string,
vol.Optional("altAcc"): cv.string,
vol.Optional("broadcast"): cv.string,
vol.Optional("eventId"): cv.string,
vol.Optional("ll"): cv.string,
vol.Optional("llAcc"): cv.string,
vol.Optional("mentions"): cv.string,
vol.Optional("shout"): cv.string,
vol.Required("venueId"): cv.string,
}
)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_ACCESS_TOKEN): cv.string,
vol.Required(CONF_PUSH_SECRET): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Set up the Foursquare component."""
config = config[DOMAIN]
def checkin_user(call):
"""Check a user in on Swarm."""
url = f"https://api.foursquare.com/v2/checkins/add?oauth_token={config[CONF_ACCESS_TOKEN]}&v=20160802&m=swarm"
response = requests.post(url, data=call.data, timeout=10)
if response.status_code not in (HTTP_OK, HTTP_CREATED):
_LOGGER.exception(
"Error checking in user. Response %d: %s:",
response.status_code,
response.reason,
)
hass.bus.fire(EVENT_CHECKIN, response.text)
# Register our service with Home Assistant.
hass.services.register(
DOMAIN, "checkin", checkin_user, schema=CHECKIN_SERVICE_SCHEMA
)
hass.http.register_view(FoursquarePushReceiver(config[CONF_PUSH_SECRET]))
return True
class FoursquarePushReceiver(HomeAssistantView):
"""Handle pushes from the Foursquare API."""
requires_auth = False
url = "/api/foursquare"
name = "foursquare"
def __init__(self, push_secret):
"""Initialize the OAuth callback view."""
self.push_secret = push_secret
async def post(self, request):
"""Accept the POST from Foursquare."""
try:
data = await request.json()
except ValueError:
return self.json_message("Invalid JSON", HTTP_BAD_REQUEST)
secret = data.pop("secret", None)
_LOGGER.debug("Received Foursquare push: %s", data)
if self.push_secret != secret:
_LOGGER.error(
"Received Foursquare push with invalid push secret: %s", secret
)
return self.json_message("Incorrect secret", HTTP_BAD_REQUEST)
request.app["hass"].bus.async_fire(EVENT_PUSH, data)
|
from copy import copy
import socket
import unittest
from uuid import uuid4
import homeassistant.components.tcp.sensor as tcp
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.template import Template
from homeassistant.setup import setup_component
from tests.async_mock import Mock, patch
from tests.common import assert_setup_component, get_test_home_assistant
TEST_CONFIG = {
"sensor": {
"platform": "tcp",
tcp.CONF_NAME: "test_name",
tcp.CONF_HOST: "test_host",
tcp.CONF_PORT: 12345,
tcp.CONF_TIMEOUT: tcp.DEFAULT_TIMEOUT + 1,
tcp.CONF_PAYLOAD: "test_payload",
tcp.CONF_UNIT_OF_MEASUREMENT: "test_unit",
tcp.CONF_VALUE_TEMPLATE: Template("test_template"),
tcp.CONF_VALUE_ON: "test_on",
tcp.CONF_BUFFER_SIZE: tcp.DEFAULT_BUFFER_SIZE + 1,
}
}
KEYS_AND_DEFAULTS = {
tcp.CONF_TIMEOUT: tcp.DEFAULT_TIMEOUT,
tcp.CONF_UNIT_OF_MEASUREMENT: None,
tcp.CONF_VALUE_TEMPLATE: None,
tcp.CONF_VALUE_ON: None,
tcp.CONF_BUFFER_SIZE: tcp.DEFAULT_BUFFER_SIZE,
}
class TestTCPSensor(unittest.TestCase):
"""Test the TCP Sensor."""
def setup_method(self, method):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
def teardown_method(self, method):
"""Stop everything that was started."""
self.hass.stop()
@patch("homeassistant.components.tcp.sensor.TcpSensor.update")
def test_setup_platform_valid_config(self, mock_update):
"""Check a valid configuration and call add_entities with sensor."""
with assert_setup_component(0, "sensor"):
assert setup_component(self.hass, "sensor", TEST_CONFIG)
add_entities = Mock()
tcp.setup_platform(None, TEST_CONFIG["sensor"], add_entities)
assert add_entities.called
assert isinstance(add_entities.call_args[0][0][0], tcp.TcpSensor)
def test_setup_platform_invalid_config(self):
"""Check an invalid configuration."""
with assert_setup_component(0):
assert setup_component(
self.hass, "sensor", {"sensor": {"platform": "tcp", "porrt": 1234}}
)
@patch("homeassistant.components.tcp.sensor.TcpSensor.update")
def test_name(self, mock_update):
"""Return the name if set in the configuration."""
sensor = tcp.TcpSensor(self.hass, TEST_CONFIG["sensor"])
assert sensor.name == TEST_CONFIG["sensor"][tcp.CONF_NAME]
@patch("homeassistant.components.tcp.sensor.TcpSensor.update")
def test_name_not_set(self, mock_update):
"""Return the superclass name property if not set in configuration."""
config = copy(TEST_CONFIG["sensor"])
del config[tcp.CONF_NAME]
entity = Entity()
sensor = tcp.TcpSensor(self.hass, config)
assert sensor.name == entity.name
@patch("homeassistant.components.tcp.sensor.TcpSensor.update")
def test_state(self, mock_update):
"""Return the contents of _state."""
sensor = tcp.TcpSensor(self.hass, TEST_CONFIG["sensor"])
uuid = str(uuid4())
sensor._state = uuid
assert sensor.state == uuid
@patch("homeassistant.components.tcp.sensor.TcpSensor.update")
def test_unit_of_measurement(self, mock_update):
"""Return the configured unit of measurement."""
sensor = tcp.TcpSensor(self.hass, TEST_CONFIG["sensor"])
assert (
sensor.unit_of_measurement
== TEST_CONFIG["sensor"][tcp.CONF_UNIT_OF_MEASUREMENT]
)
@patch("homeassistant.components.tcp.sensor.TcpSensor.update")
def test_config_valid_keys(self, *args):
"""Store valid keys in _config."""
sensor = tcp.TcpSensor(self.hass, TEST_CONFIG["sensor"])
del TEST_CONFIG["sensor"]["platform"]
for key in TEST_CONFIG["sensor"]:
assert key in sensor._config
def test_validate_config_valid_keys(self):
"""Return True when provided with the correct keys."""
with assert_setup_component(0, "sensor"):
assert setup_component(self.hass, "sensor", TEST_CONFIG)
@patch("homeassistant.components.tcp.sensor.TcpSensor.update")
def test_config_invalid_keys(self, mock_update):
"""Shouldn't store invalid keys in _config."""
config = copy(TEST_CONFIG["sensor"])
config.update({"a": "test_a", "b": "test_b", "c": "test_c"})
sensor = tcp.TcpSensor(self.hass, config)
for invalid_key in "abc":
assert invalid_key not in sensor._config
def test_validate_config_invalid_keys(self):
"""Test with invalid keys plus some extra."""
config = copy(TEST_CONFIG["sensor"])
config.update({"a": "test_a", "b": "test_b", "c": "test_c"})
with assert_setup_component(0, "sensor"):
assert setup_component(self.hass, "sensor", {"tcp": config})
@patch("homeassistant.components.tcp.sensor.TcpSensor.update")
def test_config_uses_defaults(self, mock_update):
"""Check if defaults were set."""
config = copy(TEST_CONFIG["sensor"])
for key in KEYS_AND_DEFAULTS:
del config[key]
with assert_setup_component(1) as result_config:
assert setup_component(self.hass, "sensor", {"sensor": config})
sensor = tcp.TcpSensor(self.hass, result_config["sensor"][0])
for key, default in KEYS_AND_DEFAULTS.items():
assert sensor._config[key] == default
def test_validate_config_missing_defaults(self):
"""Return True when defaulted keys are not provided."""
config = copy(TEST_CONFIG["sensor"])
for key in KEYS_AND_DEFAULTS:
del config[key]
with assert_setup_component(0, "sensor"):
assert setup_component(self.hass, "sensor", {"tcp": config})
def test_validate_config_missing_required(self):
"""Return False when required config items are missing."""
for key in TEST_CONFIG["sensor"]:
if key in KEYS_AND_DEFAULTS:
continue
config = copy(TEST_CONFIG["sensor"])
del config[key]
with assert_setup_component(0, "sensor"):
assert setup_component(self.hass, "sensor", {"tcp": config})
@patch("homeassistant.components.tcp.sensor.TcpSensor.update")
def test_init_calls_update(self, mock_update):
"""Call update() method during __init__()."""
tcp.TcpSensor(self.hass, TEST_CONFIG)
assert mock_update.called
@patch("socket.socket")
@patch("select.select", return_value=(True, False, False))
def test_update_connects_to_host_and_port(self, mock_select, mock_socket):
"""Connect to the configured host and port."""
tcp.TcpSensor(self.hass, TEST_CONFIG["sensor"])
mock_socket = mock_socket().__enter__()
assert mock_socket.connect.mock_calls[0][1] == (
(
TEST_CONFIG["sensor"][tcp.CONF_HOST],
TEST_CONFIG["sensor"][tcp.CONF_PORT],
),
)
@patch("socket.socket.connect", side_effect=socket.error())
def test_update_returns_if_connecting_fails(self, *args):
"""Return if connecting to host fails."""
with patch("homeassistant.components.tcp.sensor.TcpSensor.update"):
sensor = tcp.TcpSensor(self.hass, TEST_CONFIG["sensor"])
assert sensor.update() is None
@patch("socket.socket.connect")
@patch("socket.socket.send", side_effect=socket.error())
def test_update_returns_if_sending_fails(self, *args):
"""Return if sending fails."""
with patch("homeassistant.components.tcp.sensor.TcpSensor.update"):
sensor = tcp.TcpSensor(self.hass, TEST_CONFIG["sensor"])
assert sensor.update() is None
@patch("socket.socket.connect")
@patch("socket.socket.send")
@patch("select.select", return_value=(False, False, False))
def test_update_returns_if_select_fails(self, *args):
"""Return if select fails to return a socket."""
with patch("homeassistant.components.tcp.sensor.TcpSensor.update"):
sensor = tcp.TcpSensor(self.hass, TEST_CONFIG["sensor"])
assert sensor.update() is None
@patch("socket.socket")
@patch("select.select", return_value=(True, False, False))
def test_update_sends_payload(self, mock_select, mock_socket):
"""Send the configured payload as bytes."""
tcp.TcpSensor(self.hass, TEST_CONFIG["sensor"])
mock_socket = mock_socket().__enter__()
mock_socket.send.assert_called_with(
TEST_CONFIG["sensor"][tcp.CONF_PAYLOAD].encode()
)
@patch("socket.socket")
@patch("select.select", return_value=(True, False, False))
def test_update_calls_select_with_timeout(self, mock_select, mock_socket):
"""Provide the timeout argument to select."""
tcp.TcpSensor(self.hass, TEST_CONFIG["sensor"])
mock_socket = mock_socket().__enter__()
mock_select.assert_called_with(
[mock_socket], [], [], TEST_CONFIG["sensor"][tcp.CONF_TIMEOUT]
)
@patch("socket.socket")
@patch("select.select", return_value=(True, False, False))
def test_update_receives_packet_and_sets_as_state(self, mock_select, mock_socket):
"""Test the response from the socket and set it as the state."""
test_value = "test_value"
mock_socket = mock_socket().__enter__()
mock_socket.recv.return_value = test_value.encode()
config = copy(TEST_CONFIG["sensor"])
del config[tcp.CONF_VALUE_TEMPLATE]
sensor = tcp.TcpSensor(self.hass, config)
assert sensor._state == test_value
@patch("socket.socket")
@patch("select.select", return_value=(True, False, False))
def test_update_renders_value_in_template(self, mock_select, mock_socket):
"""Render the value in the provided template."""
test_value = "test_value"
mock_socket = mock_socket().__enter__()
mock_socket.recv.return_value = test_value.encode()
config = copy(TEST_CONFIG["sensor"])
config[tcp.CONF_VALUE_TEMPLATE] = Template("{{ value }} {{ 1+1 }}")
sensor = tcp.TcpSensor(self.hass, config)
assert sensor._state == "%s 2" % test_value
@patch("socket.socket")
@patch("select.select", return_value=(True, False, False))
def test_update_returns_if_template_render_fails(self, mock_select, mock_socket):
"""Return None if rendering the template fails."""
test_value = "test_value"
mock_socket = mock_socket().__enter__()
mock_socket.recv.return_value = test_value.encode()
config = copy(TEST_CONFIG["sensor"])
config[tcp.CONF_VALUE_TEMPLATE] = Template("{{ this won't work")
sensor = tcp.TcpSensor(self.hass, config)
assert sensor.update() is None
|
from typing import Any
from xknx.devices import Scene as XknxScene
from homeassistant.components.scene import Scene
from .const import DOMAIN
from .knx_entity import KnxEntity
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the scenes for KNX platform."""
entities = []
for device in hass.data[DOMAIN].xknx.devices:
if isinstance(device, XknxScene):
entities.append(KNXScene(device))
async_add_entities(entities)
class KNXScene(KnxEntity, Scene):
"""Representation of a KNX scene."""
def __init__(self, device: XknxScene):
"""Init KNX scene."""
super().__init__(device)
async def async_activate(self, **kwargs: Any) -> None:
"""Activate the scene."""
await self._device.run()
|
from unittest.mock import PropertyMock, patch
import homeassistant.components.image_processing as ip
import homeassistant.components.microsoft_face as mf
from homeassistant.const import ATTR_ENTITY_PICTURE, STATE_UNKNOWN
from homeassistant.core import callback
from homeassistant.setup import setup_component
from tests.common import (
assert_setup_component,
get_test_home_assistant,
load_fixture,
mock_coro,
)
from tests.components.image_processing import common
class TestMicrosoftFaceIdentifySetup:
"""Test class for image processing."""
def setup_method(self):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
def teardown_method(self):
"""Stop everything that was started."""
self.hass.stop()
@patch(
"homeassistant.components.microsoft_face.MicrosoftFace.update_store",
return_value=mock_coro(),
)
def test_setup_platform(self, store_mock):
"""Set up platform with one entity."""
config = {
ip.DOMAIN: {
"platform": "microsoft_face_identify",
"source": {"entity_id": "camera.demo_camera"},
"group": "Test Group1",
},
"camera": {"platform": "demo"},
mf.DOMAIN: {"api_key": "12345678abcdef6"},
}
with assert_setup_component(1, ip.DOMAIN):
setup_component(self.hass, ip.DOMAIN, config)
self.hass.block_till_done()
assert self.hass.states.get("image_processing.microsoftface_demo_camera")
@patch(
"homeassistant.components.microsoft_face.MicrosoftFace.update_store",
return_value=mock_coro(),
)
def test_setup_platform_name(self, store_mock):
"""Set up platform with one entity and set name."""
config = {
ip.DOMAIN: {
"platform": "microsoft_face_identify",
"source": {"entity_id": "camera.demo_camera", "name": "test local"},
"group": "Test Group1",
},
"camera": {"platform": "demo"},
mf.DOMAIN: {"api_key": "12345678abcdef6"},
}
with assert_setup_component(1, ip.DOMAIN):
setup_component(self.hass, ip.DOMAIN, config)
self.hass.block_till_done()
assert self.hass.states.get("image_processing.test_local")
class TestMicrosoftFaceIdentify:
"""Test class for image processing."""
def setup_method(self):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.config = {
ip.DOMAIN: {
"platform": "microsoft_face_identify",
"source": {"entity_id": "camera.demo_camera", "name": "test local"},
"group": "Test Group1",
},
"camera": {"platform": "demo"},
mf.DOMAIN: {"api_key": "12345678abcdef6"},
}
self.endpoint_url = f"https://westus.{mf.FACE_API_URL}"
def teardown_method(self):
"""Stop everything that was started."""
self.hass.stop()
@patch(
"homeassistant.components.microsoft_face_identify.image_processing."
"MicrosoftFaceIdentifyEntity.should_poll",
new_callable=PropertyMock(return_value=False),
)
def test_ms_identify_process_image(self, poll_mock, aioclient_mock):
"""Set up and scan a picture and test plates from event."""
aioclient_mock.get(
self.endpoint_url.format("persongroups"),
text=load_fixture("microsoft_face_persongroups.json"),
)
aioclient_mock.get(
self.endpoint_url.format("persongroups/test_group1/persons"),
text=load_fixture("microsoft_face_persons.json"),
)
aioclient_mock.get(
self.endpoint_url.format("persongroups/test_group2/persons"),
text=load_fixture("microsoft_face_persons.json"),
)
setup_component(self.hass, ip.DOMAIN, self.config)
self.hass.block_till_done()
state = self.hass.states.get("camera.demo_camera")
url = f"{self.hass.config.internal_url}{state.attributes.get(ATTR_ENTITY_PICTURE)}"
face_events = []
@callback
def mock_face_event(event):
"""Mock event."""
face_events.append(event)
self.hass.bus.listen("image_processing.detect_face", mock_face_event)
aioclient_mock.get(url, content=b"image")
aioclient_mock.post(
self.endpoint_url.format("detect"),
text=load_fixture("microsoft_face_detect.json"),
)
aioclient_mock.post(
self.endpoint_url.format("identify"),
text=load_fixture("microsoft_face_identify.json"),
)
common.scan(self.hass, entity_id="image_processing.test_local")
self.hass.block_till_done()
state = self.hass.states.get("image_processing.test_local")
assert len(face_events) == 1
assert state.attributes.get("total_faces") == 2
assert state.state == "David"
assert face_events[0].data["name"] == "David"
assert face_events[0].data["confidence"] == float(92)
assert face_events[0].data["entity_id"] == "image_processing.test_local"
# Test that later, if a request is made that results in no face
# being detected, that this is reflected in the state object
aioclient_mock.clear_requests()
aioclient_mock.post(self.endpoint_url.format("detect"), text="[]")
common.scan(self.hass, entity_id="image_processing.test_local")
self.hass.block_till_done()
state = self.hass.states.get("image_processing.test_local")
# No more face events were fired
assert len(face_events) == 1
# Total faces and actual qualified number of faces reset to zero
assert state.attributes.get("total_faces") == 0
assert state.state == STATE_UNKNOWN
|
from collections import OrderedDict
from django.contrib import admin
from django.contrib.auth import get_user_model
from django.forms import fields, models, widgets
from django.utils.translation import gettext_lazy as _
from django_fsm import RETURN_VALUE
from shop.models.notification import Notify, Notification, NotificationAttachment
from shop.models.order import OrderModel
class NotificationAttachmentAdmin(admin.TabularInline):
model = NotificationAttachment
extra = 0
class NotificationForm(models.ModelForm):
notify_recipient = fields.ChoiceField(label=_("Recipient"))
class Meta:
model = Notification
exclude = ['notify', 'recipient']
widgets = {
'transition_target': widgets.Select(),
'notify_recipient': widgets.Select(),
}
def __init__(self, *args, **kwargs):
if kwargs.get('instance'):
initial = kwargs.get('initial', {})
if kwargs['instance'].notify is Notify.RECIPIENT:
initial['notify_recipient'] = kwargs['instance'].recipient_id
else:
initial['notify_recipient'] = kwargs['instance'].notify.name
kwargs.update(initial=initial)
super().__init__(*args, **kwargs)
self.fields['transition_target'].widget.choices = self.get_transition_choices()
self.fields['notify_recipient'].choices = self.get_recipient_choices()
def get_transition_choices(self):
choices = OrderedDict()
for transition in OrderModel.get_all_transitions():
if isinstance(transition.target, str):
choices[transition.target] = OrderModel.get_transition_name(transition.target)
elif isinstance(transition.target, RETURN_VALUE):
for target in transition.target.allowed_states:
choices[target] = OrderModel.get_transition_name(target)
return choices.items()
def get_recipient_choices(self):
"""
Instead of offering one field for the recipient and one for whom to notify, we
merge staff users with the context dependent recipients.
"""
choices = [(n.name, str(n)) for n in Notify if n is not Notify.RECIPIENT]
for user in get_user_model().objects.filter(is_staff=True):
email = '{0} <{1}>'.format(user.get_full_name(), user.email)
choices.append((user.id, email))
return choices
def save(self, commit=True):
obj = super().save(commit=commit)
try:
obj.recipient = get_user_model().objects.get(pk=self.cleaned_data['notify_recipient'])
obj.notify = Notify.RECIPIENT
except (ValueError, get_user_model().DoesNotExist):
obj.recipient = None
obj.notify = getattr(Notify, self.cleaned_data['notify_recipient'], Notify.NOBODY)
return obj
@admin.register(Notification)
class NotificationAdmin(admin.ModelAdmin):
list_display = ['name', 'transition_name', 'get_recipient', 'mail_template', 'num_attachments']
inlines = (NotificationAttachmentAdmin,)
form = NotificationForm
save_as = True
def transition_name(self, obj):
return OrderModel.get_transition_name(obj.transition_target)
transition_name.short_description = _("Event")
def num_attachments(self, obj):
return obj.notificationattachment_set.count()
num_attachments.short_description = _("Attachments")
def get_recipient(self, obj):
if obj.notify is Notify.RECIPIENT:
return '{0} <{1}>'.format(obj.recipient.get_full_name(), obj.recipient.email)
else:
return str(obj.notify)
get_recipient.short_description = _("Mail Recipient")
|
import os
def test_that_tests_dont_have_multiple_functions_with_same_name():
dir = os.path.dirname(__file__)
for fname in os.listdir(dir):
if not (fname.startswith('test_') and fname.endswith('.py')):
continue
print(fname)
text = open(os.path.join(dir, fname), 'rb').read().decode()
func_names = set()
for line in text.splitlines():
line = line.split('(')[0].strip()
if line.startswith('def '):
func_name = line[4:]
if func_name.startswith('test_'):
print(func_name)
assert func_name not in func_names, (fname, func_name)
func_names.add(func_name)
if __name__ == '__main__':
test_that_tests_dont_have_multiple_functions_with_same_name()
|
import os.path as op
import numpy as np
from scipy.signal import welch, coherence, unit_impulse
from matplotlib import pyplot as plt
import mne
from mne.simulation import simulate_raw, add_noise
from mne.datasets import sample
from mne.minimum_norm import make_inverse_operator, apply_inverse
from mne.time_frequency import csd_morlet
from mne.beamformer import make_dics, apply_dics_csd
# We use the MEG and MRI setup from the MNE-sample dataset
data_path = sample.data_path(download=False)
subjects_dir = op.join(data_path, 'subjects')
# Filenames for various files we'll be using
meg_path = op.join(data_path, 'MEG', 'sample')
raw_fname = op.join(meg_path, 'sample_audvis_raw.fif')
fwd_fname = op.join(meg_path, 'sample_audvis-meg-eeg-oct-6-fwd.fif')
cov_fname = op.join(meg_path, 'sample_audvis-cov.fif')
fwd = mne.read_forward_solution(fwd_fname)
# Seed for the random number generator
rand = np.random.RandomState(42)
###############################################################################
# Data simulation
# ---------------
#
# The following function generates a timeseries that contains an oscillator,
# whose frequency fluctuates a little over time, but stays close to 10 Hz.
# We'll use this function to generate our two signals.
sfreq = 50. # Sampling frequency of the generated signal
n_samp = int(round(10. * sfreq))
times = np.arange(n_samp) / sfreq # 10 seconds of signal
n_times = len(times)
def coh_signal_gen():
"""Generate an oscillating signal.
Returns
-------
signal : ndarray
The generated signal.
"""
t_rand = 0.001 # Variation in the instantaneous frequency of the signal
std = 0.1 # Std-dev of the random fluctuations added to the signal
base_freq = 10. # Base frequency of the oscillators in Hertz
n_times = len(times)
# Generate an oscillator with varying frequency and phase lag.
signal = np.sin(2.0 * np.pi *
(base_freq * np.arange(n_times) / sfreq +
np.cumsum(t_rand * rand.randn(n_times))))
# Add some random fluctuations to the signal.
signal += std * rand.randn(n_times)
# Scale the signal to be in the right order of magnitude (~100 nAm)
# for MEG data.
signal *= 100e-9
return signal
###############################################################################
# Let's simulate two timeseries and plot some basic information about them.
signal1 = coh_signal_gen()
signal2 = coh_signal_gen()
fig, axes = plt.subplots(2, 2, figsize=(8, 4))
# Plot the timeseries
ax = axes[0][0]
ax.plot(times, 1e9 * signal1, lw=0.5)
ax.set(xlabel='Time (s)', xlim=times[[0, -1]], ylabel='Amplitude (Am)',
title='Signal 1')
ax = axes[0][1]
ax.plot(times, 1e9 * signal2, lw=0.5)
ax.set(xlabel='Time (s)', xlim=times[[0, -1]], title='Signal 2')
# Power spectrum of the first timeseries
f, p = welch(signal1, fs=sfreq, nperseg=128, nfft=256)
ax = axes[1][0]
# Only plot the first 100 frequencies
ax.plot(f[:100], 20 * np.log10(p[:100]), lw=1.)
ax.set(xlabel='Frequency (Hz)', xlim=f[[0, 99]],
ylabel='Power (dB)', title='Power spectrum of signal 1')
# Compute the coherence between the two timeseries
f, coh = coherence(signal1, signal2, fs=sfreq, nperseg=100, noverlap=64)
ax = axes[1][1]
ax.plot(f[:50], coh[:50], lw=1.)
ax.set(xlabel='Frequency (Hz)', xlim=f[[0, 49]], ylabel='Coherence',
title='Coherence between the timeseries')
fig.tight_layout()
###############################################################################
# Now we put the signals at two locations on the cortex. We construct a
# :class:`mne.SourceEstimate` object to store them in.
#
# The timeseries will have a part where the signal is active and a part where
# it is not. The techniques we'll be using in this tutorial depend on being
# able to contrast data that contains the signal of interest versus data that
# does not (i.e. it contains only noise).
# The locations on the cortex where the signal will originate from. These
# locations are indicated as vertex numbers.
vertices = [[146374], [33830]]
# Construct SourceEstimates that describe the signals at the cortical level.
data = np.vstack((signal1, signal2))
stc_signal = mne.SourceEstimate(
data, vertices, tmin=0, tstep=1. / sfreq, subject='sample')
stc_noise = stc_signal * 0.
###############################################################################
# Before we simulate the sensor-level data, let's define a signal-to-noise
# ratio. You are encouraged to play with this parameter and see the effect of
# noise on our results.
snr = 1. # Signal-to-noise ratio. Decrease to add more noise.
###############################################################################
# Now we run the signal through the forward model to obtain simulated sensor
# data. To save computation time, we'll only simulate gradiometer data. You can
# try simulating other types of sensors as well.
#
# Some noise is added based on the baseline noise covariance matrix from the
# sample dataset, scaled to implement the desired SNR.
# Read the info from the sample dataset. This defines the location of the
# sensors and such.
info = mne.io.read_info(raw_fname)
info.update(sfreq=sfreq, bads=[])
# Only use gradiometers
picks = mne.pick_types(info, meg='grad', stim=True, exclude=())
mne.pick_info(info, picks, copy=False)
# Define a covariance matrix for the simulated noise. In this tutorial, we use
# a simple diagonal matrix.
cov = mne.cov.make_ad_hoc_cov(info)
cov['data'] *= (20. / snr) ** 2 # Scale the noise to achieve the desired SNR
# Simulate the raw data, with a lowpass filter on the noise
stcs = [(stc_signal, unit_impulse(n_samp, dtype=int) * 1),
(stc_noise, unit_impulse(n_samp, dtype=int) * 2)] # stacked in time
duration = (len(stc_signal.times) * 2) / sfreq
raw = simulate_raw(info, stcs, forward=fwd)
add_noise(raw, cov, iir_filter=[4, -4, 0.8], random_state=rand)
###############################################################################
# We create an :class:`mne.Epochs` object containing two trials: one with
# both noise and signal and one with just noise
events = mne.find_events(raw, initial_event=True)
tmax = (len(stc_signal.times) - 1) / sfreq
epochs = mne.Epochs(raw, events, event_id=dict(signal=1, noise=2),
tmin=0, tmax=tmax, baseline=None, preload=True)
assert len(epochs) == 2 # ensure that we got the two expected events
# Plot some of the channels of the simulated data that are situated above one
# of our simulated sources.
picks = mne.pick_channels(epochs.ch_names, mne.read_selection('Left-frontal'))
epochs.plot(picks=picks)
###############################################################################
# Power mapping
# -------------
# With our simulated dataset ready, we can now pretend to be researchers that
# have just recorded this from a real subject and are going to study what parts
# of the brain communicate with each other.
#
# First, we'll create a source estimate of the MEG data. We'll use both a
# straightforward MNE-dSPM inverse solution for this, and the DICS beamformer
# which is specifically designed to work with oscillatory data.
###############################################################################
# Computing the inverse using MNE-dSPM:
# Compute the inverse operator
fwd = mne.read_forward_solution(fwd_fname)
inv = make_inverse_operator(epochs.info, fwd, cov)
# Apply the inverse model to the trial that also contains the signal.
s = apply_inverse(epochs['signal'].average(), inv)
# Take the root-mean square along the time dimension and plot the result.
s_rms = np.sqrt((s ** 2).mean())
title = 'MNE-dSPM inverse (RMS)'
brain = s_rms.plot('sample', subjects_dir=subjects_dir, hemi='both', figure=1,
size=600, time_label=title, title=title)
# Indicate the true locations of the source activity on the plot.
brain.add_foci(vertices[0][0], coords_as_verts=True, hemi='lh')
brain.add_foci(vertices[1][0], coords_as_verts=True, hemi='rh')
# Rotate the view and add a title.
brain.show_view(view={'azimuth': 0, 'elevation': 0, 'distance': 550,
'focalpoint': [0, 0, 0]})
###############################################################################
# We will now compute the cortical power map at 10 Hz. using a DICS beamformer.
# A beamformer will construct for each vertex a spatial filter that aims to
# pass activity originating from the vertex, while dampening activity from
# other sources as much as possible.
#
# The :func:`mne.beamformer.make_dics` function has many switches that offer
# precise control
# over the way the filter weights are computed. Currently, there is no clear
# consensus regarding the best approach. This is why we will demonstrate two
# approaches here:
#
# 1. The approach as described in :footcite:`vanVlietEtAl2018`, which first
# normalizes the forward solution and computes a vector beamformer.
# 2. The scalar beamforming approach based on
# :footcite:`SekiharaNagarajan2008`, which uses weight normalization
# instead of normalizing the forward solution.
# Estimate the cross-spectral density (CSD) matrix on the trial containing the
# signal.
csd_signal = csd_morlet(epochs['signal'], frequencies=[10])
# Compute the spatial filters for each vertex, using two approaches.
filters_approach1 = make_dics(
info, fwd, csd_signal, reg=0.05, pick_ori='max-power', depth=1.,
inversion='single', weight_norm=None)
print(filters_approach1)
filters_approach2 = make_dics(
info, fwd, csd_signal, reg=0.05, pick_ori='max-power', depth=None,
inversion='matrix', weight_norm='unit-noise-gain')
print(filters_approach2)
# You can save these to disk with:
# filters_approach1.save('filters_1-dics.h5')
# Compute the DICS power map by applying the spatial filters to the CSD matrix.
power_approach1, f = apply_dics_csd(csd_signal, filters_approach1)
power_approach2, f = apply_dics_csd(csd_signal, filters_approach2)
###############################################################################
# Plot the DICS power maps for both approaches, starting with the first:
def plot_approach(power, n):
"""Plot the results on a brain."""
title = 'DICS power map, approach %d' % n
brain = power_approach1.plot(
'sample', subjects_dir=subjects_dir, hemi='both',
size=600, time_label=title, title=title)
# Indicate the true locations of the source activity on the plot.
brain.add_foci(vertices[0][0], coords_as_verts=True, hemi='lh', color='b')
brain.add_foci(vertices[1][0], coords_as_verts=True, hemi='rh', color='b')
# Rotate the view and add a title.
brain.show_view(view={'azimuth': 0, 'elevation': 0, 'distance': 550,
'focalpoint': [0, 0, 0]})
return brain
brain1 = plot_approach(power_approach1, 1)
###############################################################################
# Now the second:
brain2 = plot_approach(power_approach2, 2)
###############################################################################
# Excellent! All methods found our two simulated sources. Of course, with a
# signal-to-noise ratio (SNR) of 1, is isn't very hard to find them. You can
# try playing with the SNR and see how the MNE-dSPM and DICS approaches hold up
# in the presence of increasing noise. In the presence of more noise, you may
# need to increase the regularization parameter of the DICS beamformer.
#
# References
# ----------
# .. footbibliography::
|
from decouple import Csv
def test_csv():
csv = Csv()
assert ['127.0.0.1', '.localhost', '.herokuapp.com'] == \
csv('127.0.0.1, .localhost, .herokuapp.com')
csv = Csv(int)
assert [1, 2, 3, 4, 5] == csv('1,2,3,4,5')
csv = Csv(post_process=tuple)
assert ('HTTP_X_FORWARDED_PROTO', 'https') == \
csv('HTTP_X_FORWARDED_PROTO, https')
csv = Csv(cast=lambda s: s.upper(), delimiter='\t', strip=' %*')
assert ['VIRTUAL_ENV', 'IMPORTANT STUFF', 'TRAILING SPACES'] == \
csv('%virtual_env%\t *important stuff*\t trailing spaces ')
def test_csv_quoted_parse():
csv = Csv()
assert ['foo', 'bar, baz', 'qux'] == csv(""" foo ,'bar, baz', 'qux'""")
assert ['foo', 'bar, baz', 'qux'] == csv(''' foo ,"bar, baz", "qux"''')
assert ['foo', "'bar, baz'", "'qux"] == csv(''' foo ,"'bar, baz'", "'qux"''')
assert ['foo', '"bar, baz"', '"qux'] == csv(""" foo ,'"bar, baz"', '"qux'""")
|
from datetime import timedelta, datetime as dt
from time import strptime
import pytz
from pytz import utc
from dateutil.tz import gettz, tzoffset
DATE_FORMAT = '%m/%d/%Y'
DATETIME_FORMAT = '{0} %H:%M:%S'.format(DATE_FORMAT)
TIMEOUT = 60 * 60 * 1
HALF_DAY = 60 * 60 * 12
TODAY = dt.utcnow()
def gen_tzinfos():
for zone in pytz.common_timezones:
try:
tzdate = pytz.timezone(zone).localize(dt.utcnow(), is_dst=None)
except pytz.NonExistentTimeError:
pass
else:
tzinfo = gettz(zone)
if tzinfo:
yield tzdate.tzname(), tzinfo
def get_date(unit, count, op):
new_month = op(TODAY.month, count) % 12 or 12
DATES = {
'seconds': op(TODAY, timedelta(seconds=count)),
'minutes': op(TODAY, timedelta(minutes=count)),
'hours': op(TODAY, timedelta(hours=count)),
'days': op(TODAY, timedelta(days=count)),
'weeks': op(TODAY, timedelta(weeks=count)),
'months': TODAY.replace(month=new_month),
'years': TODAY.replace(year=op(TODAY.year, count)),
}
return DATES[unit]
def normalize_date(date):
try:
# See if date is a `time.struct_time`
# if so, convert it and account for leapseconds
tt, date = date, dt(*date[:5] + (min(date[5], 59),))
except TypeError:
pass
else:
is_dst = None if tt[8] == -1 else tt[8]
try:
tm_zone = tt.tm_zone
except AttributeError:
tm_zone = None
tm_gmtoff = None
else:
tm_gmtoff = tt.tm_gmtoff
if tm_zone:
date = pytz.timezone(tm_zone).localize(date, is_dst=is_dst)
elif tm_gmtoff:
offset = tzoffset(None, tm_gmtoff)
date.replace(tzinfo=offset)
# Set timezone to UTC
try:
tzdate = date.astimezone(utc) if date.tzinfo else utc.localize(date)
except AttributeError:
tzdate = date
return tzdate
def get_tt(date):
formatted = ''.join(date.isoformat().rsplit(':', 1))
sformat = '%Y-%m-%d' if len(formatted) == 10 else '%Y-%m-%dT%H:%M:%S%z'
try:
tt = strptime(formatted, sformat)
except ValueError:
tt = strptime(formatted[:19], '%Y-%m-%dT%H:%M:%S')
return tt
|
import os
import time
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from test import run_only
from mock import patch
from diamond.collector import Collector
from processresources import ProcessResourcesCollector
##########################################################################
def run_only_if_psutil_is_available(func):
try:
import psutil
except ImportError:
psutil = None
return run_only(func, lambda: psutil is not None)
class TestProcessResourcesCollector(CollectorTestCase):
TEST_CONFIG = {
'interval': 10,
'process': {
'postgres': {
'exe': '^\/usr\/lib\/postgresql\/+d.+d\/bin\/postgres',
'name': ['postgres', 'pg'],
},
'foo': {
'exe': '^\/usr\/bin\/foo',
},
'bar': {
'name': '^bar',
},
'barexe': {
'exe': 'bar$'
},
'noprocess': {
'name': 'noproc',
'count_workers': 'true'
},
'diamond-selfmon': {
'selfmon': 'true',
}
}
}
SELFMON_PID = 10001 # used for selfmonitoring
def setUp(self):
config = get_collector_config('ProcessResourcesCollector',
self.TEST_CONFIG)
self.collector = ProcessResourcesCollector(config, None)
def test_import(self):
self.assertTrue(ProcessResourcesCollector)
@run_only_if_psutil_is_available
@patch.object(time, 'time')
@patch.object(os, 'getpid')
@patch.object(Collector, 'publish')
def test(self, publish_mock, getpid_mock, time_mock):
process_info_list = [
# postgres processes
{
'exe': '/usr/lib/postgresql/9.1/bin/postgres',
'name': 'postgres',
'pid': 1427,
'rss': 1000000,
'vms': 1000000
},
{
'exe': '',
'name': 'postgres: writer process ',
'pid': 1445,
'rss': 100000,
'vms': 200000
},
{
'exe': '',
'name': 'postgres: wal writer process ',
'pid': 1446,
'rss': 10000,
'vms': 20000
},
{
'exe': '',
'name': 'postgres: autovacuum launcher process ',
'pid': 1447,
'rss': 1000,
'vms': 2000
},
{
'exe': '',
'name': 'postgres: stats collector process ',
'pid': 1448,
'rss': 100,
'vms': 200},
# postgres-y process
{
'exe': '',
'name': 'posgre: not really',
'pid': 9999,
'rss': 10,
'vms': 20,
},
{
'exe': '/usr/bin/foo',
'name': 'bar',
'pid': 9998,
'rss': 1,
'vms': 1
},
{
'exe': '',
'name': 'barein',
'pid': 9997,
'rss': 2,
'vms': 2
},
{
'exe': '/usr/bin/bar',
'name': '',
'pid': 9996,
'rss': 3,
'vms': 3,
},
# diamond self mon process
{
'exe': 'DUMMY',
'name': 'DUMMY',
'pid': self.SELFMON_PID,
'rss': 1234,
'vms': 4,
},
]
class ProcessMock:
def __init__(self, pid, name, rss, vms, exe=None):
self.pid = pid
self.name = name
self.rss = rss
self.vms = vms
if exe is not None:
self.exe = exe
self.cmdline = [self.exe]
self.create_time = 0
def as_dict(self, attrs=None, ad_value=None):
from collections import namedtuple
meminfo = namedtuple('meminfo', 'rss vms')
ext_meminfo = namedtuple('meminfo',
'rss vms shared text lib data dirty')
cputimes = namedtuple('cputimes', 'user system')
thread = namedtuple('thread', 'id user_time system_time')
user = namedtuple('user', 'real effective saved')
group = namedtuple('group', 'real effective saved')
ionice = namedtuple('ionice', 'ioclass value')
amount = namedtuple('amount', 'voluntary involuntary')
return {
'status': 'sleeping',
'num_ctx_switches': amount(voluntary=2243, involuntary=221),
'pid': self.pid,
'connections': None,
'cmdline': [self.exe],
'create_time': 0,
'ionice': ionice(ioclass=0, value=0),
'num_fds': 10,
'memory_maps': None,
'cpu_percent': 0.0,
'terminal': None,
'ppid': 0,
'cwd': None,
'nice': 0,
'username': 'root',
'cpu_times': cputimes(user=0.27, system=1.05),
'io_counters': None,
'memory_info_ex': ext_meminfo(rss=self.rss,
vms=self.vms,
shared=1310720,
text=188416,
lib=0,
data=868352,
dirty=0),
'threads': [thread(id=1, user_time=0.27, system_time=1.04)],
'open_files': None,
'name': self.name,
'num_threads': 1,
'exe': self.exe,
'uids': user(real=0, effective=0, saved=0),
'gids': group(real=0, effective=0, saved=0),
'cpu_affinity': [0, 1, 2, 3],
'memory_percent': 0.03254831000922748,
'memory_info': meminfo(rss=self.rss, vms=self.vms)}
process_iter_mock = (ProcessMock(
pid=x['pid'],
name=x['name'],
rss=x['rss'],
vms=x['vms'],
exe=x['exe'])
for x in process_info_list)
time_mock.return_value = 1234567890
getpid_mock.return_value = self.SELFMON_PID
patch_psutil_process_iter = patch('psutil.process_iter',
return_value=process_iter_mock)
patch_psutil_process_iter.start()
self.collector.collect()
patch_psutil_process_iter.stop()
self.assertPublished(publish_mock, 'foo.uptime', 1234567890)
self.assertPublished(publish_mock, 'foo.num_fds', 10)
self.assertPublished(publish_mock, 'postgres.memory_info_ex.rss',
1000000 + 100000 + 10000 + 1000 + 100)
self.assertPublished(publish_mock, 'foo.memory_info_ex.rss', 1)
self.assertPublished(publish_mock, 'bar.memory_info_ex.rss', 3)
self.assertPublished(publish_mock, 'barexe.memory_info_ex.rss', 3)
self.assertPublished(publish_mock,
'diamond-selfmon.memory_info_ex.rss', 1234)
self.assertPublished(publish_mock, 'noprocess.workers_count', 0)
self.assertUnpublished(publish_mock, 'noprocess.uptime', 0)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
import difflib
import os
from gi.repository import Gdk, Gio, GLib, Gtk, GtkSource
from meld.conf import _
from meld.iohelpers import prompt_save_filename
from meld.misc import error_dialog
from meld.settings import get_meld_settings
from meld.sourceview import LanguageManager
@Gtk.Template(resource_path='/org/gnome/meld/ui/patch-dialog.ui')
class PatchDialog(Gtk.Dialog):
__gtype_name__ = "PatchDialog"
left_radiobutton = Gtk.Template.Child("left_radiobutton")
reverse_checkbutton = Gtk.Template.Child("reverse_checkbutton")
right_radiobutton = Gtk.Template.Child("right_radiobutton")
side_selection_box = Gtk.Template.Child("side_selection_box")
side_selection_label = Gtk.Template.Child("side_selection_label")
textview: Gtk.TextView = Gtk.Template.Child("textview")
def __init__(self, filediff):
super().__init__()
self.set_transient_for(filediff.get_toplevel())
self.filediff = filediff
buf = GtkSource.Buffer()
self.textview.set_buffer(buf)
lang = LanguageManager.get_language_from_mime_type("text/x-diff")
buf.set_language(lang)
buf.set_highlight_syntax(True)
self.index_map = {self.left_radiobutton: (0, 1),
self.right_radiobutton: (1, 2)}
self.left_patch = True
self.reverse_patch = self.reverse_checkbutton.get_active()
if self.filediff.num_panes < 3:
self.side_selection_label.hide()
self.side_selection_box.hide()
meld_settings = get_meld_settings()
self.textview.modify_font(meld_settings.font)
self.textview.set_editable(False)
meld_settings.connect('changed', self.on_setting_changed)
def on_setting_changed(self, settings, key):
if key == "font":
self.textview.modify_font(settings.font)
@Gtk.Template.Callback()
def on_buffer_selection_changed(self, radiobutton):
if not radiobutton.get_active():
return
self.left_patch = radiobutton == self.left_radiobutton
self.update_patch()
@Gtk.Template.Callback()
def on_reverse_checkbutton_toggled(self, checkbutton):
self.reverse_patch = checkbutton.get_active()
self.update_patch()
def update_patch(self):
indices = (0, 1)
if not self.left_patch:
indices = (1, 2)
if self.reverse_patch:
indices = (indices[1], indices[0])
texts = []
for b in self.filediff.textbuffer:
start, end = b.get_bounds()
text = b.get_text(start, end, False)
lines = text.splitlines(True)
# Ensure that the last line ends in a newline
barelines = text.splitlines(False)
if barelines and lines and barelines[-1] == lines[-1]:
# Final line lacks a line-break; add in a best guess
if len(lines) > 1:
previous_linebreak = lines[-2][len(barelines[-2]):]
else:
previous_linebreak = "\n"
lines[-1] += previous_linebreak
texts.append(lines)
names = [self.filediff.textbuffer[i].data.label for i in range(3)]
prefix = os.path.commonprefix(names)
names = [n[prefix.rfind("/") + 1:] for n in names]
buf = self.textview.get_buffer()
text0, text1 = texts[indices[0]], texts[indices[1]]
name0, name1 = names[indices[0]], names[indices[1]]
diff = difflib.unified_diff(text0, text1, name0, name1)
diff_text = "".join(d for d in diff)
buf.set_text(diff_text)
def save_patch(self, targetfile: Gio.File):
buf = self.textview.get_buffer()
sourcefile = GtkSource.File.new()
saver = GtkSource.FileSaver.new_with_target(
buf, sourcefile, targetfile)
saver.save_async(
GLib.PRIORITY_HIGH,
callback=self.file_saved_cb,
)
def file_saved_cb(self, saver, result, *args):
gfile = saver.get_location()
try:
saver.save_finish(result)
except GLib.Error as err:
filename = GLib.markup_escape_text(gfile.get_parse_name())
error_dialog(
primary=_("Could not save file %s.") % filename,
secondary=_("Couldn’t save file due to:\n%s") % (
GLib.markup_escape_text(str(err))),
)
def run(self):
self.update_patch()
result = super().run()
if result < 0:
self.hide()
return
# Copy patch to clipboard
if result == 1:
buf = self.textview.get_buffer()
start, end = buf.get_bounds()
clip = Gtk.Clipboard.get_default(Gdk.Display.get_default())
clip.set_text(buf.get_text(start, end, False), -1)
clip.store()
# Save patch as a file
else:
gfile = prompt_save_filename(_("Save Patch"))
if gfile:
self.save_patch(gfile)
self.hide()
|
from homeassistant import config_entries
from homeassistant.components.ozw import DOMAIN, PLATFORMS, const
from .common import setup_ozw
from tests.common import MockConfigEntry
async def test_init_entry(hass, generic_data):
"""Test setting up config entry."""
await setup_ozw(hass, fixture=generic_data)
# Verify integration + platform loaded.
assert "ozw" in hass.config.components
for platform in PLATFORMS:
assert platform in hass.config.components, platform
assert f"{platform}.{DOMAIN}" in hass.config.components, f"{platform}.{DOMAIN}"
# Verify services registered
assert hass.services.has_service(DOMAIN, const.SERVICE_ADD_NODE)
assert hass.services.has_service(DOMAIN, const.SERVICE_REMOVE_NODE)
async def test_unload_entry(hass, generic_data, switch_msg, caplog):
"""Test unload the config entry."""
entry = MockConfigEntry(
domain=DOMAIN,
title="Z-Wave",
connection_class=config_entries.CONN_CLASS_LOCAL_PUSH,
)
entry.add_to_hass(hass)
assert entry.state == config_entries.ENTRY_STATE_NOT_LOADED
receive_message = await setup_ozw(hass, entry=entry, fixture=generic_data)
assert entry.state == config_entries.ENTRY_STATE_LOADED
assert len(hass.states.async_entity_ids("switch")) == 1
await hass.config_entries.async_unload(entry.entry_id)
assert entry.state == config_entries.ENTRY_STATE_NOT_LOADED
assert len(hass.states.async_entity_ids("switch")) == 0
# Send a message for a switch from the broker to check that
# all entity topic subscribers are unsubscribed.
receive_message(switch_msg)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids("switch")) == 0
# Load the integration again and check that there are no errors when
# adding the entities.
# This asserts that we have unsubscribed the entity addition signals
# when unloading the integration previously.
await setup_ozw(hass, entry=entry, fixture=generic_data)
await hass.async_block_till_done()
assert entry.state == config_entries.ENTRY_STATE_LOADED
assert len(hass.states.async_entity_ids("switch")) == 1
for record in caplog.records:
assert record.levelname != "ERROR"
|
import collections
class LRUDict:
"""
dict with LRU-eviction and max-size
This is intended for caching, it may not behave how you want otherwise
This uses collections.OrderedDict under the hood, but does not directly expose
all of it's methods (intentional)
"""
def __init__(self, *keyval_pairs, size):
self.size = size
self._dict = collections.OrderedDict(*keyval_pairs)
def __contains__(self, key):
if key in self._dict:
self._dict.move_to_end(key, last=True)
return True
return False
def __getitem__(self, key):
ret = self._dict.__getitem__(key)
self._dict.move_to_end(key, last=True)
return ret
def __setitem__(self, key, value):
if key in self._dict:
self._dict.move_to_end(key, last=True)
self._dict[key] = value
if len(self._dict) > self.size:
self._dict.popitem(last=False)
def __delitem__(self, key):
return self._dict.__delitem__(key)
def clear(self):
return self._dict.clear()
def pop(self, key):
return self._dict.pop(key)
# all of the below access all of the items, and therefore shouldn't modify the ordering for eviction
def keys(self):
return self._dict.keys()
def items(self):
return self._dict.items()
def values(self):
return self._dict.values()
|
from typing import List
import voluptuous as vol
from homeassistant.components.automation import AutomationActionType
from homeassistant.components.device_automation import toggle_entity
from homeassistant.const import CONF_DOMAIN
from homeassistant.core import CALLBACK_TYPE, HomeAssistant
from homeassistant.helpers.typing import ConfigType
from . import DOMAIN
TRIGGER_SCHEMA = toggle_entity.TRIGGER_SCHEMA.extend(
{vol.Required(CONF_DOMAIN): DOMAIN}
)
async def async_attach_trigger(
hass: HomeAssistant,
config: ConfigType,
action: AutomationActionType,
automation_info: dict,
) -> CALLBACK_TYPE:
"""Listen for state changes based on configuration."""
return await toggle_entity.async_attach_trigger(
hass, config, action, automation_info
)
async def async_get_triggers(hass: HomeAssistant, device_id: str) -> List[dict]:
"""List device triggers."""
return await toggle_entity.async_get_triggers(hass, device_id, DOMAIN)
async def async_get_trigger_capabilities(hass: HomeAssistant, config: dict) -> dict:
"""List trigger capabilities."""
return await toggle_entity.async_get_trigger_capabilities(hass, config)
|
import logging
import unittest
import numpy as np
from scipy import sparse
from scipy.special import psi # gamma function utils
import gensim.matutils as matutils
# we'll define known, good (slow) version of functions here
# and compare results from these functions vs. cython ones
def logsumexp(x):
"""Log of sum of exponentials.
Parameters
----------
x : numpy.ndarray
Input 2d matrix.
Returns
-------
float
log of sum of exponentials of elements in `x`.
Warnings
--------
By performance reasons, doesn't support NaNs or 1d, 3d, etc arrays like :func:`scipy.special.logsumexp`.
"""
x_max = np.max(x)
x = np.log(np.sum(np.exp(x - x_max)))
x += x_max
return x
def mean_absolute_difference(a, b):
"""Mean absolute difference between two arrays.
Parameters
----------
a : numpy.ndarray
Input 1d array.
b : numpy.ndarray
Input 1d array.
Returns
-------
float
mean(abs(a - b)).
"""
return np.mean(np.abs(a - b))
def dirichlet_expectation(alpha):
r"""For a vector :math:`\theta \sim Dir(\alpha)`, compute :math:`E[log \theta]`.
Parameters
----------
alpha : numpy.ndarray
Dirichlet parameter 2d matrix or 1d vector, if 2d - each row is treated as a separate parameter vector.
Returns
-------
numpy.ndarray:
:math:`E[log \theta]`
"""
if len(alpha.shape) == 1:
result = psi(alpha) - psi(np.sum(alpha))
else:
result = psi(alpha) - psi(np.sum(alpha, 1))[:, np.newaxis]
return result.astype(alpha.dtype, copy=False) # keep the same precision as input
dirichlet_expectation_1d = dirichlet_expectation
dirichlet_expectation_2d = dirichlet_expectation
class TestLdaModelInner(unittest.TestCase):
def setUp(self):
self.random_state = np.random.RandomState()
self.num_runs = 100 # test functions with *num_runs* random inputs
self.num_topics = 100
def testLogSumExp(self):
# test logsumexp
rs = self.random_state
for dtype in [np.float16, np.float32, np.float64]:
for i in range(self.num_runs):
input = rs.uniform(-1000, 1000, size=(self.num_topics, 1))
known_good = logsumexp(input)
test_values = matutils.logsumexp(input)
msg = "logsumexp failed for dtype={}".format(dtype)
self.assertTrue(np.allclose(known_good, test_values), msg)
def testMeanAbsoluteDifference(self):
# test mean_absolute_difference
rs = self.random_state
for dtype in [np.float16, np.float32, np.float64]:
for i in range(self.num_runs):
input1 = rs.uniform(-10000, 10000, size=(self.num_topics,))
input2 = rs.uniform(-10000, 10000, size=(self.num_topics,))
known_good = mean_absolute_difference(input1, input2)
test_values = matutils.mean_absolute_difference(input1, input2)
msg = "mean_absolute_difference failed for dtype={}".format(dtype)
self.assertTrue(np.allclose(known_good, test_values), msg)
def testDirichletExpectation(self):
# test dirichlet_expectation
rs = self.random_state
for dtype in [np.float16, np.float32, np.float64]:
for i in range(self.num_runs):
# 1 dimensional case
input_1d = rs.uniform(.01, 10000, size=(self.num_topics,))
known_good = dirichlet_expectation(input_1d)
test_values = matutils.dirichlet_expectation(input_1d)
msg = "dirichlet_expectation_1d failed for dtype={}".format(dtype)
self.assertTrue(np.allclose(known_good, test_values), msg)
# 2 dimensional case
input_2d = rs.uniform(.01, 10000, size=(1, self.num_topics,))
known_good = dirichlet_expectation(input_2d)
test_values = matutils.dirichlet_expectation(input_2d)
msg = "dirichlet_expectation_2d failed for dtype={}".format(dtype)
self.assertTrue(np.allclose(known_good, test_values), msg)
def manual_unitvec(vec):
# manual unit vector calculation for UnitvecTestCase
vec = vec.astype(np.float)
if sparse.issparse(vec):
vec_sum_of_squares = vec.multiply(vec)
unit = 1. / np.sqrt(vec_sum_of_squares.sum())
return vec.multiply(unit)
elif not sparse.issparse(vec):
sum_vec_squared = np.sum(vec ** 2)
vec /= np.sqrt(sum_vec_squared)
return vec
class UnitvecTestCase(unittest.TestCase):
# test unitvec
def test_sparse_npfloat32(self):
input_vector = sparse.csr_matrix(np.asarray([[1, 0, 0, 0, 3], [0, 0, 4, 3, 0]])).astype(np.float32)
unit_vector = matutils.unitvec(input_vector)
man_unit_vector = manual_unitvec(input_vector)
self.assertTrue(np.allclose(unit_vector.data, man_unit_vector.data, atol=1e-3))
self.assertEqual(input_vector.dtype, unit_vector.dtype)
def test_sparse_npfloat64(self):
input_vector = sparse.csr_matrix(np.asarray([[1, 0, 0, 0, 3], [0, 0, 4, 3, 0]])).astype(np.float64)
unit_vector = matutils.unitvec(input_vector)
man_unit_vector = manual_unitvec(input_vector)
self.assertTrue(np.allclose(unit_vector.data, man_unit_vector.data, atol=1e-3))
self.assertEqual(input_vector.dtype, unit_vector.dtype)
def test_sparse_npint32(self):
input_vector = sparse.csr_matrix(np.asarray([[1, 0, 0, 0, 3], [0, 0, 4, 3, 0]])).astype(np.int32)
unit_vector = matutils.unitvec(input_vector)
man_unit_vector = manual_unitvec(input_vector)
self.assertTrue(np.allclose(unit_vector.data, man_unit_vector.data, atol=1e-3))
self.assertTrue(np.issubdtype(unit_vector.dtype, np.floating))
def test_sparse_npint64(self):
input_vector = sparse.csr_matrix(np.asarray([[1, 0, 0, 0, 3], [0, 0, 4, 3, 0]])).astype(np.int64)
unit_vector = matutils.unitvec(input_vector)
man_unit_vector = manual_unitvec(input_vector)
self.assertTrue(np.allclose(unit_vector.data, man_unit_vector.data, atol=1e-3))
self.assertTrue(np.issubdtype(unit_vector.dtype, np.floating))
def test_dense_npfloat32(self):
input_vector = np.random.uniform(size=(5,)).astype(np.float32)
unit_vector = matutils.unitvec(input_vector)
man_unit_vector = manual_unitvec(input_vector)
self.assertTrue(np.allclose(unit_vector, man_unit_vector))
self.assertEqual(input_vector.dtype, unit_vector.dtype)
def test_dense_npfloat64(self):
input_vector = np.random.uniform(size=(5,)).astype(np.float64)
unit_vector = matutils.unitvec(input_vector)
man_unit_vector = manual_unitvec(input_vector)
self.assertTrue(np.allclose(unit_vector, man_unit_vector))
self.assertEqual(input_vector.dtype, unit_vector.dtype)
def test_dense_npint32(self):
input_vector = np.random.randint(10, size=5).astype(np.int32)
unit_vector = matutils.unitvec(input_vector)
man_unit_vector = manual_unitvec(input_vector)
self.assertTrue(np.allclose(unit_vector, man_unit_vector))
self.assertTrue(np.issubdtype(unit_vector.dtype, np.floating))
def test_dense_npint64(self):
input_vector = np.random.randint(10, size=5).astype(np.int32)
unit_vector = matutils.unitvec(input_vector)
man_unit_vector = manual_unitvec(input_vector)
self.assertTrue(np.allclose(unit_vector, man_unit_vector))
self.assertTrue(np.issubdtype(unit_vector.dtype, np.floating))
def test_sparse_python_float(self):
input_vector = sparse.csr_matrix(np.asarray([[1, 0, 0, 0, 3], [0, 0, 4, 3, 0]])).astype(float)
unit_vector = matutils.unitvec(input_vector)
man_unit_vector = manual_unitvec(input_vector)
self.assertTrue(np.allclose(unit_vector.data, man_unit_vector.data, atol=1e-3))
self.assertEqual(input_vector.dtype, unit_vector.dtype)
def test_sparse_python_int(self):
input_vector = sparse.csr_matrix(np.asarray([[1, 0, 0, 0, 3], [0, 0, 4, 3, 0]])).astype(int)
unit_vector = matutils.unitvec(input_vector)
man_unit_vector = manual_unitvec(input_vector)
self.assertTrue(np.allclose(unit_vector.data, man_unit_vector.data, atol=1e-3))
self.assertTrue(np.issubdtype(unit_vector.dtype, np.floating))
def test_dense_python_float(self):
input_vector = np.random.uniform(size=(5,)).astype(float)
unit_vector = matutils.unitvec(input_vector)
man_unit_vector = manual_unitvec(input_vector)
self.assertTrue(np.allclose(unit_vector, man_unit_vector))
self.assertEqual(input_vector.dtype, unit_vector.dtype)
def test_dense_python_int(self):
input_vector = np.random.randint(10, size=5).astype(int)
unit_vector = matutils.unitvec(input_vector)
man_unit_vector = manual_unitvec(input_vector)
self.assertTrue(np.allclose(unit_vector, man_unit_vector))
self.assertTrue(np.issubdtype(unit_vector.dtype, np.floating))
def test_return_norm_zero_vector_scipy_sparse(self):
input_vector = sparse.csr_matrix([[]], dtype=np.int32)
return_value = matutils.unitvec(input_vector, return_norm=True)
self.assertTrue(isinstance(return_value, tuple))
norm = return_value[1]
self.assertTrue(isinstance(norm, float))
self.assertEqual(norm, 1.0)
def test_return_norm_zero_vector_numpy(self):
input_vector = np.array([], dtype=np.int32)
return_value = matutils.unitvec(input_vector, return_norm=True)
self.assertTrue(isinstance(return_value, tuple))
norm = return_value[1]
self.assertTrue(isinstance(norm, float))
self.assertEqual(norm, 1.0)
def test_return_norm_zero_vector_gensim_sparse(self):
input_vector = []
return_value = matutils.unitvec(input_vector, return_norm=True)
self.assertTrue(isinstance(return_value, tuple))
norm = return_value[1]
self.assertTrue(isinstance(norm, float))
self.assertEqual(norm, 1.0)
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.DEBUG)
unittest.main()
|
from __future__ import division, print_function
from urwid.util import calc_width, calc_text_pos, calc_trim_text, is_wide_char, \
move_prev_char, move_next_char
from urwid.compat import bytes, PYTHON3, B, xrange
class TextLayout:
def supports_align_mode(self, align):
"""Return True if align is a supported align mode."""
return True
def supports_wrap_mode(self, wrap):
"""Return True if wrap is a supported wrap mode."""
return True
def layout(self, text, width, align, wrap ):
"""
Return a layout structure for text.
:param text: string in current encoding or unicode string
:param width: number of screen columns available
:param align: align mode for text
:param wrap: wrap mode for text
Layout structure is a list of line layouts, one per output line.
Line layouts are lists than may contain the following tuples:
* (column width of text segment, start offset, end offset)
* (number of space characters to insert, offset or None)
* (column width of insert text, offset, "insert text")
The offset in the last two tuples is used to determine the
attribute used for the inserted spaces or text respectively.
The attribute used will be the same as the attribute at that
text offset. If the offset is None when inserting spaces
then no attribute will be used.
"""
raise NotImplementedError("This function must be overridden by a real"
" text layout class. (see StandardTextLayout)")
class CanNotDisplayText(Exception):
pass
class StandardTextLayout(TextLayout):
def __init__(self):#, tab_stops=(), tab_stop_every=8):
pass
#"""
#tab_stops -- list of screen column indexes for tab stops
#tab_stop_every -- repeated interval for following tab stops
#"""
#assert tab_stop_every is None or type(tab_stop_every)==int
#if not tab_stops and tab_stop_every:
# self.tab_stops = (tab_stop_every,)
#self.tab_stops = tab_stops
#self.tab_stop_every = tab_stop_every
def supports_align_mode(self, align):
"""Return True if align is 'left', 'center' or 'right'."""
return align in ('left', 'center', 'right')
def supports_wrap_mode(self, wrap):
"""Return True if wrap is 'any', 'space', 'clip' or 'ellipsis'."""
return wrap in ('any', 'space', 'clip', 'ellipsis')
def layout(self, text, width, align, wrap ):
"""Return a layout structure for text."""
try:
segs = self.calculate_text_segments( text, width, wrap )
return self.align_layout( text, width, segs, wrap, align )
except CanNotDisplayText:
return [[]]
def pack(self, maxcol, layout):
"""
Return a minimal maxcol value that would result in the same
number of lines for layout. layout must be a layout structure
returned by self.layout().
"""
maxwidth = 0
assert layout, "huh? empty layout?: "+repr(layout)
for l in layout:
lw = line_width(l)
if lw >= maxcol:
return maxcol
maxwidth = max(maxwidth, lw)
return maxwidth
def align_layout( self, text, width, segs, wrap, align ):
"""Convert the layout segs to an aligned layout."""
out = []
for l in segs:
sc = line_width(l)
if sc == width or align=='left':
out.append(l)
continue
if align == 'right':
out.append([(width-sc, None)] + l)
continue
assert align == 'center'
out.append([((width-sc+1) // 2, None)] + l)
return out
def calculate_text_segments(self, text, width, wrap):
"""
Calculate the segments of text to display given width screen
columns to display them.
text - unicode text or byte string to display
width - number of available screen columns
wrap - wrapping mode used
Returns a layout structure without alignment applied.
"""
nl, nl_o, sp_o = "\n", "\n", " "
if PYTHON3 and isinstance(text, bytes):
nl = B(nl) # can only find bytes in python3 bytestrings
nl_o = ord(nl_o) # + an item of a bytestring is the ordinal value
sp_o = ord(sp_o)
b = []
p = 0
if wrap in ('clip', 'ellipsis'):
# no wrapping to calculate, so it's easy.
while p<=len(text):
n_cr = text.find(nl, p)
if n_cr == -1:
n_cr = len(text)
sc = calc_width(text, p, n_cr)
# trim line to max width if needed, add ellipsis if trimmed
if wrap == 'ellipsis' and sc > width:
trimmed = True
spos, n_end, pad_left, pad_right = calc_trim_text(text, p, n_cr, 0, width-1)
# pad_left should be 0, because the start_col parameter was 0 (no trimming on the left)
# similarly spos should not be changed from p
assert pad_left == 0
assert spos == p
sc = width - 1 - pad_right
else:
trimmed = False
n_end = n_cr
pad_right = 0
l = []
if p!=n_end:
l += [(sc, p, n_end)]
if trimmed:
l += [(1, n_end, u'…'.encode("utf-8"))]
l += [(pad_right,n_end)]
b.append(l)
p = n_cr+1
return b
while p<=len(text):
# look for next eligible line break
n_cr = text.find(nl, p)
if n_cr == -1:
n_cr = len(text)
sc = calc_width(text, p, n_cr)
if sc == 0:
# removed character hint
b.append([(0,n_cr)])
p = n_cr+1
continue
if sc <= width:
# this segment fits
b.append([(sc,p,n_cr),
# removed character hint
(0,n_cr)])
p = n_cr+1
continue
pos, sc = calc_text_pos( text, p, n_cr, width )
if pos == p: # pathological width=1 double-byte case
raise CanNotDisplayText(
"Wide character will not fit in 1-column width")
if wrap == 'any':
b.append([(sc,p,pos)])
p = pos
continue
assert wrap == 'space'
if text[pos] == sp_o:
# perfect space wrap
b.append([(sc,p,pos),
# removed character hint
(0,pos)])
p = pos+1
continue
if is_wide_char(text, pos):
# perfect next wide
b.append([(sc,p,pos)])
p = pos
continue
prev = pos
while prev > p:
prev = move_prev_char(text, p, prev)
if text[prev] == sp_o:
sc = calc_width(text,p,prev)
l = [(0,prev)]
if p!=prev:
l = [(sc,p,prev)] + l
b.append(l)
p = prev+1
break
if is_wide_char(text,prev):
# wrap after wide char
next = move_next_char(text, prev, pos)
sc = calc_width(text,p,next)
b.append([(sc,p,next)])
p = next
break
else:
# unwrap previous line space if possible to
# fit more text (we're breaking a word anyway)
if b and (len(b[-1]) == 2 or ( len(b[-1])==1
and len(b[-1][0])==2 )):
# look for removed space above
if len(b[-1]) == 1:
[(h_sc, h_off)] = b[-1]
p_sc = 0
p_off = p_end = h_off
else:
[(p_sc, p_off, p_end),
(h_sc, h_off)] = b[-1]
if (p_sc < width and h_sc==0 and
text[h_off] == sp_o):
# combine with previous line
del b[-1]
p = p_off
pos, sc = calc_text_pos(
text, p, n_cr, width )
b.append([(sc,p,pos)])
# check for trailing " " or "\n"
p = pos
if p < len(text) and (
text[p] in (sp_o, nl_o)):
# removed character hint
b[-1].append((0,p))
p += 1
continue
# force any char wrap
b.append([(sc,p,pos)])
p = pos
return b
######################################
# default layout object to use
default_layout = StandardTextLayout()
######################################
class LayoutSegment:
def __init__(self, seg):
"""Create object from line layout segment structure"""
assert type(seg) == tuple, repr(seg)
assert len(seg) in (2,3), repr(seg)
self.sc, self.offs = seg[:2]
assert type(self.sc) == int, repr(self.sc)
if len(seg)==3:
assert type(self.offs) == int, repr(self.offs)
assert self.sc > 0, repr(seg)
t = seg[2]
if type(t) == bytes:
self.text = t
self.end = None
else:
assert type(t) == int, repr(t)
self.text = None
self.end = t
else:
assert len(seg) == 2, repr(seg)
if self.offs is not None:
assert self.sc >= 0, repr(seg)
assert type(self.offs)==int
self.text = self.end = None
def subseg(self, text, start, end):
"""
Return a "sub-segment" list containing segment structures
that make up a portion of this segment.
A list is returned to handle cases where wide characters
need to be replaced with a space character at either edge
so two or three segments will be returned.
"""
if start < 0: start = 0
if end > self.sc: end = self.sc
if start >= end:
return [] # completely gone
if self.text:
# use text stored in segment (self.text)
spos, epos, pad_left, pad_right = calc_trim_text(
self.text, 0, len(self.text), start, end )
return [ (end-start, self.offs, bytes().ljust(pad_left) +
self.text[spos:epos] + bytes().ljust(pad_right)) ]
elif self.end:
# use text passed as parameter (text)
spos, epos, pad_left, pad_right = calc_trim_text(
text, self.offs, self.end, start, end )
l = []
if pad_left:
l.append((1,spos-1))
l.append((end-start-pad_left-pad_right, spos, epos))
if pad_right:
l.append((1,epos))
return l
else:
# simple padding adjustment
return [(end-start,self.offs)]
def line_width( segs ):
"""
Return the screen column width of one line of a text layout structure.
This function ignores any existing shift applied to the line,
represented by an (amount, None) tuple at the start of the line.
"""
sc = 0
seglist = segs
if segs and len(segs[0])==2 and segs[0][1]==None:
seglist = segs[1:]
for s in seglist:
sc += s[0]
return sc
def shift_line( segs, amount ):
"""
Return a shifted line from a layout structure to the left or right.
segs -- line of a layout structure
amount -- screen columns to shift right (+ve) or left (-ve)
"""
assert type(amount)==int, repr(amount)
if segs and len(segs[0])==2 and segs[0][1]==None:
# existing shift
amount += segs[0][0]
if amount:
return [(amount,None)]+segs[1:]
return segs[1:]
if amount:
return [(amount,None)]+segs
return segs
def trim_line( segs, text, start, end ):
"""
Return a trimmed line of a text layout structure.
text -- text to which this layout structure applies
start -- starting screen column
end -- ending screen column
"""
l = []
x = 0
for seg in segs:
sc = seg[0]
if start or sc < 0:
if start >= sc:
start -= sc
x += sc
continue
s = LayoutSegment(seg)
if x+sc >= end:
# can all be done at once
return s.subseg( text, start, end-x )
l += s.subseg( text, start, sc )
start = 0
x += sc
continue
if x >= end:
break
if x+sc > end:
s = LayoutSegment(seg)
l += s.subseg( text, 0, end-x )
break
l.append( seg )
return l
def calc_line_pos( text, line_layout, pref_col ):
"""
Calculate the closest linear position to pref_col given a
line layout structure. Returns None if no position found.
"""
closest_sc = None
closest_pos = None
current_sc = 0
if pref_col == 'left':
for seg in line_layout:
s = LayoutSegment(seg)
if s.offs is not None:
return s.offs
return
elif pref_col == 'right':
for seg in line_layout:
s = LayoutSegment(seg)
if s.offs is not None:
closest_pos = s
s = closest_pos
if s is None:
return
if s.end is None:
return s.offs
return calc_text_pos( text, s.offs, s.end, s.sc-1)[0]
for seg in line_layout:
s = LayoutSegment(seg)
if s.offs is not None:
if s.end is not None:
if (current_sc <= pref_col and
pref_col < current_sc + s.sc):
# exact match within this segment
return calc_text_pos( text,
s.offs, s.end,
pref_col - current_sc )[0]
elif current_sc <= pref_col:
closest_sc = current_sc + s.sc - 1
closest_pos = s
if closest_sc is None or ( abs(pref_col-current_sc)
< abs(pref_col-closest_sc) ):
# this screen column is closer
closest_sc = current_sc
closest_pos = s.offs
if current_sc > closest_sc:
# we're moving past
break
current_sc += s.sc
if closest_pos is None or type(closest_pos) == int:
return closest_pos
# return the last positions in the segment "closest_pos"
s = closest_pos
return calc_text_pos( text, s.offs, s.end, s.sc-1)[0]
def calc_pos( text, layout, pref_col, row ):
"""
Calculate the closest linear position to pref_col and row given a
layout structure.
"""
if row < 0 or row >= len(layout):
raise Exception("calculate_pos: out of layout row range")
pos = calc_line_pos( text, layout[row], pref_col )
if pos is not None:
return pos
rows_above = list(xrange(row-1,-1,-1))
rows_below = list(xrange(row+1,len(layout)))
while rows_above and rows_below:
if rows_above:
r = rows_above.pop(0)
pos = calc_line_pos(text, layout[r], pref_col)
if pos is not None: return pos
if rows_below:
r = rows_below.pop(0)
pos = calc_line_pos(text, layout[r], pref_col)
if pos is not None: return pos
return 0
def calc_coords( text, layout, pos, clamp=1 ):
"""
Calculate the coordinates closest to position pos in text with layout.
text -- raw string or unicode string
layout -- layout structure applied to text
pos -- integer position into text
clamp -- ignored right now
"""
closest = None
y = 0
for line_layout in layout:
x = 0
for seg in line_layout:
s = LayoutSegment(seg)
if s.offs is None:
x += s.sc
continue
if s.offs == pos:
return x,y
if s.end is not None and s.offs<=pos and s.end>pos:
x += calc_width( text, s.offs, pos )
return x,y
distance = abs(s.offs - pos)
if s.end is not None and s.end<pos:
distance = pos - (s.end-1)
if closest is None or distance < closest[0]:
closest = distance, (x,y)
x += s.sc
y += 1
if closest:
return closest[1]
return 0,0
|
from django.conf.urls import url, include
from django.http import JsonResponse
from rest_framework import routers
from shop.forms.checkout import ShippingAddressForm, BillingAddressForm
from shop.messages import get_messages_as_json
from shop.views.address import AddressEditView
from shop.views.cart import CartViewSet, WatchViewSet
from shop.views.checkout import CheckoutViewSet
from shop.views.catalog import ProductSelectView
router = routers.DefaultRouter() # TODO: try with trailing_slash=False
router.register(r'cart', CartViewSet, basename='cart')
router.register(r'watch', WatchViewSet, basename='watch')
router.register(r'checkout', CheckoutViewSet, basename='checkout')
def fetch_messages(request):
data = get_messages_as_json(request)
return JsonResponse({'django_messages': data})
urlpatterns = [
url(r'^select_product/?$',
ProductSelectView.as_view(),
name='select-product'),
url(r'^fetch_messages/?$',
fetch_messages,
name='fetch-messages'),
url(r'^shipping_address/(?P<priority>({{\s*\w+\s*}}|\d+|add))$',
AddressEditView.as_view(form_class=ShippingAddressForm),
name='edit-shipping-address'),
url(r'^billing_address/(?P<priority>({{\s*\w+\s*}}|\d+|add))$',
AddressEditView.as_view(form_class=BillingAddressForm),
name='edit-billing-address'),
url(r'^', include(router.urls)),
]
|
import sys
import urwid
class LineWalker(urwid.ListWalker):
"""ListWalker-compatible class for lazily reading file contents."""
def __init__(self, name):
self.file = open(name)
self.lines = []
self.focus = 0
def get_focus(self):
return self._get_at_pos(self.focus)
def set_focus(self, focus):
self.focus = focus
self._modified()
def get_next(self, start_from):
return self._get_at_pos(start_from + 1)
def get_prev(self, start_from):
return self._get_at_pos(start_from - 1)
def read_next_line(self):
"""Read another line from the file."""
next_line = self.file.readline()
if not next_line or next_line[-1:] != '\n':
# no newline on last line of file
self.file = None
else:
# trim newline characters
next_line = next_line[:-1]
expanded = next_line.expandtabs()
edit = urwid.Edit("", expanded, allow_tab=True)
edit.set_edit_pos(0)
edit.original_text = next_line
self.lines.append(edit)
return next_line
def _get_at_pos(self, pos):
"""Return a widget for the line number passed."""
if pos < 0:
# line 0 is the start of the file, no more above
return None, None
if len(self.lines) > pos:
# we have that line so return it
return self.lines[pos], pos
if self.file is None:
# file is closed, so there are no more lines
return None, None
assert pos == len(self.lines), "out of order request?"
self.read_next_line()
return self.lines[-1], pos
def split_focus(self):
"""Divide the focus edit widget at the cursor location."""
focus = self.lines[self.focus]
pos = focus.edit_pos
edit = urwid.Edit("",focus.edit_text[pos:], allow_tab=True)
edit.original_text = ""
focus.set_edit_text(focus.edit_text[:pos])
edit.set_edit_pos(0)
self.lines.insert(self.focus+1, edit)
def combine_focus_with_prev(self):
"""Combine the focus edit widget with the one above."""
above, ignore = self.get_prev(self.focus)
if above is None:
# already at the top
return
focus = self.lines[self.focus]
above.set_edit_pos(len(above.edit_text))
above.set_edit_text(above.edit_text + focus.edit_text)
del self.lines[self.focus]
self.focus -= 1
def combine_focus_with_next(self):
"""Combine the focus edit widget with the one below."""
below, ignore = self.get_next(self.focus)
if below is None:
# already at bottom
return
focus = self.lines[self.focus]
focus.set_edit_text(focus.edit_text + below.edit_text)
del self.lines[self.focus+1]
class EditDisplay:
palette = [
('body','default', 'default'),
('foot','dark cyan', 'dark blue', 'bold'),
('key','light cyan', 'dark blue', 'underline'),
]
footer_text = ('foot', [
"Text Editor ",
('key', "F5"), " save ",
('key', "F8"), " quit",
])
def __init__(self, name):
self.save_name = name
self.walker = LineWalker(name)
self.listbox = urwid.ListBox(self.walker)
self.footer = urwid.AttrWrap(urwid.Text(self.footer_text),
"foot")
self.view = urwid.Frame(urwid.AttrWrap(self.listbox, 'body'),
footer=self.footer)
def main(self):
self.loop = urwid.MainLoop(self.view, self.palette,
unhandled_input=self.unhandled_keypress)
self.loop.run()
def unhandled_keypress(self, k):
"""Last resort for keypresses."""
if k == "f5":
self.save_file()
elif k == "f8":
raise urwid.ExitMainLoop()
elif k == "delete":
# delete at end of line
self.walker.combine_focus_with_next()
elif k == "backspace":
# backspace at beginning of line
self.walker.combine_focus_with_prev()
elif k == "enter":
# start new line
self.walker.split_focus()
# move the cursor to the new line and reset pref_col
self.loop.process_input(["down", "home"])
elif k == "right":
w, pos = self.walker.get_focus()
w, pos = self.walker.get_next(pos)
if w:
self.listbox.set_focus(pos, 'above')
self.loop.process_input(["home"])
elif k == "left":
w, pos = self.walker.get_focus()
w, pos = self.walker.get_prev(pos)
if w:
self.listbox.set_focus(pos, 'below')
self.loop.process_input(["end"])
else:
return
return True
def save_file(self):
"""Write the file out to disk."""
l = []
walk = self.walker
for edit in walk.lines:
# collect the text already stored in edit widgets
if edit.original_text.expandtabs() == edit.edit_text:
l.append(edit.original_text)
else:
l.append(re_tab(edit.edit_text))
# then the rest
while walk.file is not None:
l.append(walk.read_next_line())
# write back to disk
outfile = open(self.save_name, "w")
prefix = ""
for line in l:
outfile.write(prefix + line)
prefix = "\n"
def re_tab(s):
"""Return a tabbed string from an expanded one."""
l = []
p = 0
for i in range(8, len(s), 8):
if s[i-2:i] == " ":
# collapse two or more spaces into a tab
l.append(s[p:i].rstrip() + "\t")
p = i
if p == 0:
return s
else:
l.append(s[p:])
return "".join(l)
def main():
try:
name = sys.argv[1]
assert open(name, "a")
except:
sys.stderr.write(__doc__)
return
EditDisplay(name).main()
if __name__=="__main__":
main()
|
import voluptuous as vol
from homeassistant.components.binary_sensor import PLATFORM_SCHEMA, BinarySensorEntity
from homeassistant.const import CONF_NAME
import homeassistant.helpers.config_validation as cv
from . import DOMAIN, KEY_STATUS, VALUE_ONLINE
DEFAULT_NAME = "UPS Online Status"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up an APCUPSd Online Status binary sensor."""
apcups_data = hass.data[DOMAIN]
add_entities([OnlineStatus(config, apcups_data)], True)
class OnlineStatus(BinarySensorEntity):
"""Representation of an UPS online status."""
def __init__(self, config, data):
"""Initialize the APCUPSd binary device."""
self._config = config
self._data = data
self._state = None
@property
def name(self):
"""Return the name of the UPS online status sensor."""
return self._config[CONF_NAME]
@property
def is_on(self):
"""Return true if the UPS is online, else false."""
return self._state & VALUE_ONLINE > 0
def update(self):
"""Get the status report from APCUPSd and set this entity's state."""
self._state = int(self._data.status[KEY_STATUS], 16)
|
from typing import Dict, Optional
import uuid
from homeassistant.core import HomeAssistant
from . import singleton, storage
DATA_KEY = "core.uuid"
DATA_VERSION = 1
LEGACY_UUID_FILE = ".uuid"
@singleton.singleton(DATA_KEY)
async def async_get(hass: HomeAssistant) -> str:
"""Get unique ID for the hass instance."""
store = storage.Store(hass, DATA_VERSION, DATA_KEY, True)
data: Optional[Dict[str, str]] = await storage.async_migrator( # type: ignore
hass,
hass.config.path(LEGACY_UUID_FILE),
store,
)
if data is not None:
return data["uuid"]
data = {"uuid": uuid.uuid4().hex}
await store.async_save(data)
return data["uuid"]
|
import os
from django.test import SimpleTestCase
from weblate.utils.environment import (
get_env_bool,
get_env_int,
get_env_list,
get_env_map,
modify_env_list,
)
class EnvTest(SimpleTestCase):
def test_list(self):
os.environ["TEST_DATA"] = "foo,bar,baz"
self.assertEqual(get_env_list("TEST_DATA"), ["foo", "bar", "baz"])
os.environ["TEST_DATA"] = "foo"
self.assertEqual(get_env_list("TEST_DATA"), ["foo"])
del os.environ["TEST_DATA"]
self.assertEqual(get_env_list("TEST_DATA"), [])
self.assertEqual(get_env_list("TEST_DATA", ["x"]), ["x"])
def test_map(self):
os.environ["TEST_DATA"] = "foo:bar,baz:bag"
self.assertEqual(get_env_map("TEST_DATA"), {"foo": "bar", "baz": "bag"})
os.environ["TEST_DATA"] = "foo:bar"
self.assertEqual(get_env_map("TEST_DATA"), {"foo": "bar"})
del os.environ["TEST_DATA"]
self.assertEqual(get_env_map("TEST_DATA"), {})
self.assertEqual(get_env_map("TEST_DATA", {"x": "y"}), {"x": "y"})
def test_bool(self):
os.environ["TEST_DATA"] = "1"
self.assertEqual(get_env_bool("TEST_DATA"), True)
os.environ["TEST_DATA"] = "True"
self.assertEqual(get_env_bool("TEST_DATA"), True)
os.environ["TEST_DATA"] = "true"
self.assertEqual(get_env_bool("TEST_DATA"), True)
os.environ["TEST_DATA"] = "Yes"
self.assertEqual(get_env_bool("TEST_DATA"), True)
os.environ["TEST_DATA"] = "no"
self.assertEqual(get_env_bool("TEST_DATA"), False)
os.environ["TEST_DATA"] = "0"
self.assertEqual(get_env_bool("TEST_DATA"), False)
del os.environ["TEST_DATA"]
self.assertEqual(get_env_bool("TEST_DATA"), False)
def test_int(self):
os.environ["TEST_DATA"] = "1"
self.assertEqual(get_env_int("TEST_DATA"), 1)
del os.environ["TEST_DATA"]
self.assertEqual(get_env_int("TEST_DATA"), 0)
def test_modify_list(self):
os.environ["WEBLATE_ADD_TEST"] = "foo,bar"
os.environ["WEBLATE_REMOVE_TEST"] = "baz,bag"
setting = ["baz", "bag", "aaa"]
modify_env_list(setting, "TEST")
self.assertEqual(setting, ["foo", "bar", "aaa"])
del os.environ["WEBLATE_ADD_TEST"]
del os.environ["WEBLATE_REMOVE_TEST"]
|
from datetime import datetime, timedelta
import secrets
from typing import Dict, List, NamedTuple, Optional
import uuid
import attr
from homeassistant.util import dt as dt_util
from . import permissions as perm_mdl
from .const import GROUP_ID_ADMIN
TOKEN_TYPE_NORMAL = "normal"
TOKEN_TYPE_SYSTEM = "system"
TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN = "long_lived_access_token"
@attr.s(slots=True)
class Group:
"""A group."""
name: Optional[str] = attr.ib()
policy: perm_mdl.PolicyType = attr.ib()
id: str = attr.ib(factory=lambda: uuid.uuid4().hex)
system_generated: bool = attr.ib(default=False)
@attr.s(slots=True)
class User:
"""A user."""
name: Optional[str] = attr.ib()
perm_lookup: perm_mdl.PermissionLookup = attr.ib(eq=False, order=False)
id: str = attr.ib(factory=lambda: uuid.uuid4().hex)
is_owner: bool = attr.ib(default=False)
is_active: bool = attr.ib(default=False)
system_generated: bool = attr.ib(default=False)
groups: List[Group] = attr.ib(factory=list, eq=False, order=False)
# List of credentials of a user.
credentials: List["Credentials"] = attr.ib(factory=list, eq=False, order=False)
# Tokens associated with a user.
refresh_tokens: Dict[str, "RefreshToken"] = attr.ib(
factory=dict, eq=False, order=False
)
_permissions: Optional[perm_mdl.PolicyPermissions] = attr.ib(
init=False,
eq=False,
order=False,
default=None,
)
@property
def permissions(self) -> perm_mdl.AbstractPermissions:
"""Return permissions object for user."""
if self.is_owner:
return perm_mdl.OwnerPermissions
if self._permissions is not None:
return self._permissions
self._permissions = perm_mdl.PolicyPermissions(
perm_mdl.merge_policies([group.policy for group in self.groups]),
self.perm_lookup,
)
return self._permissions
@property
def is_admin(self) -> bool:
"""Return if user is part of the admin group."""
if self.is_owner:
return True
return self.is_active and any(gr.id == GROUP_ID_ADMIN for gr in self.groups)
def invalidate_permission_cache(self) -> None:
"""Invalidate permission cache."""
self._permissions = None
@attr.s(slots=True)
class RefreshToken:
"""RefreshToken for a user to grant new access tokens."""
user: User = attr.ib()
client_id: Optional[str] = attr.ib()
access_token_expiration: timedelta = attr.ib()
client_name: Optional[str] = attr.ib(default=None)
client_icon: Optional[str] = attr.ib(default=None)
token_type: str = attr.ib(
default=TOKEN_TYPE_NORMAL,
validator=attr.validators.in_(
(TOKEN_TYPE_NORMAL, TOKEN_TYPE_SYSTEM, TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN)
),
)
id: str = attr.ib(factory=lambda: uuid.uuid4().hex)
created_at: datetime = attr.ib(factory=dt_util.utcnow)
token: str = attr.ib(factory=lambda: secrets.token_hex(64))
jwt_key: str = attr.ib(factory=lambda: secrets.token_hex(64))
last_used_at: Optional[datetime] = attr.ib(default=None)
last_used_ip: Optional[str] = attr.ib(default=None)
@attr.s(slots=True)
class Credentials:
"""Credentials for a user on an auth provider."""
auth_provider_type: str = attr.ib()
auth_provider_id: Optional[str] = attr.ib()
# Allow the auth provider to store data to represent their auth.
data: dict = attr.ib()
id: str = attr.ib(factory=lambda: uuid.uuid4().hex)
is_new: bool = attr.ib(default=True)
class UserMeta(NamedTuple):
"""User metadata."""
name: Optional[str]
is_active: bool
|
from datetime import timedelta
import logging
import magicseaweed
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_API_KEY,
CONF_MONITORED_CONDITIONS,
CONF_NAME,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
CONF_HOURS = "hours"
CONF_SPOT_ID = "spot_id"
CONF_UNITS = "units"
DEFAULT_UNIT = "us"
DEFAULT_NAME = "MSW"
DEFAULT_ATTRIBUTION = "Data provided by magicseaweed.com"
ICON = "mdi:waves"
HOURS = ["12AM", "3AM", "6AM", "9AM", "12PM", "3PM", "6PM", "9PM"]
SENSOR_TYPES = {
"max_breaking_swell": ["Max"],
"min_breaking_swell": ["Min"],
"swell_forecast": ["Forecast"],
}
UNITS = ["eu", "uk", "us"]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_MONITORED_CONDITIONS): vol.All(
cv.ensure_list, [vol.In(SENSOR_TYPES)]
),
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_SPOT_ID): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_HOURS, default=None): vol.All(
cv.ensure_list, [vol.In(HOURS)]
),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_UNITS): vol.In(UNITS),
}
)
# Return cached results if last scan was less then this time ago.
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=30)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Magicseaweed sensor."""
name = config.get(CONF_NAME)
spot_id = config[CONF_SPOT_ID]
api_key = config[CONF_API_KEY]
hours = config.get(CONF_HOURS)
if CONF_UNITS in config:
units = config.get(CONF_UNITS)
elif hass.config.units.is_metric:
units = UNITS[0]
else:
units = UNITS[2]
forecast_data = MagicSeaweedData(api_key=api_key, spot_id=spot_id, units=units)
forecast_data.update()
# If connection failed don't setup platform.
if forecast_data.currently is None or forecast_data.hourly is None:
return
sensors = []
for variable in config[CONF_MONITORED_CONDITIONS]:
sensors.append(MagicSeaweedSensor(forecast_data, variable, name, units))
if "forecast" not in variable and hours is not None:
for hour in hours:
sensors.append(
MagicSeaweedSensor(forecast_data, variable, name, units, hour)
)
add_entities(sensors, True)
class MagicSeaweedSensor(Entity):
"""Implementation of a MagicSeaweed sensor."""
def __init__(self, forecast_data, sensor_type, name, unit_system, hour=None):
"""Initialize the sensor."""
self.client_name = name
self.data = forecast_data
self.hour = hour
self.type = sensor_type
self._attrs = {ATTR_ATTRIBUTION: DEFAULT_ATTRIBUTION}
self._name = SENSOR_TYPES[sensor_type][0]
self._icon = None
self._state = None
self._unit_system = unit_system
self._unit_of_measurement = None
@property
def name(self):
"""Return the name of the sensor."""
if self.hour is None and "forecast" in self.type:
return f"{self.client_name} {self._name}"
if self.hour is None:
return f"Current {self.client_name} {self._name}"
return f"{self.hour} {self.client_name} {self._name}"
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_system(self):
"""Return the unit system of this entity."""
return self._unit_system
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
@property
def icon(self):
"""Return the entity weather icon, if any."""
return ICON
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._attrs
def update(self):
"""Get the latest data from Magicseaweed and updates the states."""
self.data.update()
if self.hour is None:
forecast = self.data.currently
else:
forecast = self.data.hourly[self.hour]
self._unit_of_measurement = forecast.swell_unit
if self.type == "min_breaking_swell":
self._state = forecast.swell_minBreakingHeight
elif self.type == "max_breaking_swell":
self._state = forecast.swell_maxBreakingHeight
elif self.type == "swell_forecast":
summary = f"{forecast.swell_minBreakingHeight} - {forecast.swell_maxBreakingHeight}"
self._state = summary
if self.hour is None:
for hour, data in self.data.hourly.items():
occurs = hour
hr_summary = f"{data.swell_minBreakingHeight} - {data.swell_maxBreakingHeight} {data.swell_unit}"
self._attrs[occurs] = hr_summary
if self.type != "swell_forecast":
self._attrs.update(forecast.attrs)
class MagicSeaweedData:
"""Get the latest data from MagicSeaweed."""
def __init__(self, api_key, spot_id, units):
"""Initialize the data object."""
self._msw = magicseaweed.MSW_Forecast(api_key, spot_id, None, units)
self.currently = None
self.hourly = {}
# Apply throttling to methods using configured interval
self.update = Throttle(MIN_TIME_BETWEEN_UPDATES)(self._update)
def _update(self):
"""Get the latest data from MagicSeaweed."""
try:
forecasts = self._msw.get_future()
self.currently = forecasts.data[0]
for forecast in forecasts.data[:8]:
hour = dt_util.utc_from_timestamp(forecast.localTimestamp).strftime(
"%-I%p"
)
self.hourly[hour] = forecast
except ConnectionError:
_LOGGER.error("Unable to retrieve data from Magicseaweed")
|
import json
from unittest.mock import patch, Mock
import arrow
import pytest
from cryptography import x509
from freezegun import freeze_time
from lemur.plugins.lemur_digicert import plugin
from lemur.tests.vectors import CSR_STR
def config_mock(*args):
values = {
"DIGICERT_ORG_ID": 111111,
"DIGICERT_PRIVATE": False,
"DIGICERT_DEFAULT_SIGNING_ALGORITHM": "sha256",
"DIGICERT_CIS_PROFILE_NAMES": {"digicert": 'digicert'},
"DIGICERT_CIS_SIGNING_ALGORITHMS": {"digicert": 'digicert'},
}
return values[args[0]]
@patch("lemur.plugins.lemur_digicert.plugin.current_app")
def test_determine_validity_years(mock_current_app):
assert plugin.determine_validity_years(1) == 1
assert plugin.determine_validity_years(0) == 1
assert plugin.determine_validity_years(3) == 1
@patch("lemur.plugins.lemur_digicert.plugin.current_app")
def test_determine_end_date(mock_current_app):
mock_current_app.config.get = Mock(return_value=397) # 397 days validity
with freeze_time(time_to_freeze=arrow.get(2016, 11, 3).datetime):
assert arrow.get(2017, 12, 5) == plugin.determine_end_date(0) # 397 days from (2016, 11, 3)
assert arrow.get(2017, 12, 5) == plugin.determine_end_date(arrow.get(2017, 12, 5))
assert arrow.get(2017, 12, 5) == plugin.determine_end_date(arrow.get(2020, 5, 7))
@patch("lemur.plugins.lemur_digicert.plugin.current_app")
def test_map_fields_with_validity_years(mock_current_app):
mock_current_app.config.get = Mock(side_effect=config_mock)
with patch('lemur.plugins.lemur_digicert.plugin.signature_hash') as mock_signature_hash:
mock_signature_hash.return_value = "sha256"
names = [u"one.example.com", u"two.example.com", u"three.example.com"]
options = {
"common_name": "example.com",
"owner": "[email protected]",
"description": "test certificate",
"extensions": {"sub_alt_names": {"names": [x509.DNSName(x) for x in names]}},
"validity_years": 1
}
expected = {
"certificate": {
"csr": CSR_STR,
"common_name": "example.com",
"dns_names": names,
"signature_hash": "sha256",
},
"organization": {"id": 111111},
"validity_years": 1,
}
assert expected == plugin.map_fields(options, CSR_STR)
@patch("lemur.plugins.lemur_digicert.plugin.current_app")
def test_map_fields_with_validity_end_and_start(mock_current_app):
mock_current_app.config.get = Mock(side_effect=config_mock)
plugin.determine_end_date = Mock(return_value=arrow.get(2017, 5, 7))
with patch('lemur.plugins.lemur_digicert.plugin.signature_hash') as mock_signature_hash:
mock_signature_hash.return_value = "sha256"
names = [u"one.example.com", u"two.example.com", u"three.example.com"]
options = {
"common_name": "example.com",
"owner": "[email protected]",
"description": "test certificate",
"extensions": {"sub_alt_names": {"names": [x509.DNSName(x) for x in names]}},
"validity_end": arrow.get(2017, 5, 7),
"validity_start": arrow.get(2016, 10, 30),
}
expected = {
"certificate": {
"csr": CSR_STR,
"common_name": "example.com",
"dns_names": names,
"signature_hash": "sha256",
},
"organization": {"id": 111111},
"custom_expiration_date": arrow.get(2017, 5, 7).format("YYYY-MM-DD"),
}
assert expected == plugin.map_fields(options, CSR_STR)
@patch("lemur.plugins.lemur_digicert.plugin.current_app")
def test_map_cis_fields_with_validity_years(mock_current_app, authority):
mock_current_app.config.get = Mock(side_effect=config_mock)
plugin.determine_end_date = Mock(return_value=arrow.get(2018, 11, 3))
with patch('lemur.plugins.lemur_digicert.plugin.signature_hash') as mock_signature_hash:
mock_signature_hash.return_value = "sha256"
names = [u"one.example.com", u"two.example.com", u"three.example.com"]
options = {
"common_name": "example.com",
"owner": "[email protected]",
"description": "test certificate",
"extensions": {"sub_alt_names": {"names": [x509.DNSName(x) for x in names]}},
"organization": "Example, Inc.",
"organizational_unit": "Example Org",
"validity_years": 2,
"authority": authority,
}
expected = {
"common_name": "example.com",
"csr": CSR_STR,
"additional_dns_names": names,
"signature_hash": "sha256",
"organization": {"name": "Example, Inc."},
"validity": {
"valid_to": arrow.get(2018, 11, 3).format("YYYY-MM-DDTHH:mm:ss") + "Z"
},
"profile_name": None,
}
assert expected == plugin.map_cis_fields(options, CSR_STR)
@patch("lemur.plugins.lemur_digicert.plugin.current_app")
def test_map_cis_fields_with_validity_end_and_start(mock_current_app, app, authority):
mock_current_app.config.get = Mock(side_effect=config_mock)
plugin.determine_end_date = Mock(return_value=arrow.get(2017, 5, 7))
with patch('lemur.plugins.lemur_digicert.plugin.signature_hash') as mock_signature_hash:
mock_signature_hash.return_value = "sha256"
names = [u"one.example.com", u"two.example.com", u"three.example.com"]
options = {
"common_name": "example.com",
"owner": "[email protected]",
"description": "test certificate",
"extensions": {"sub_alt_names": {"names": [x509.DNSName(x) for x in names]}},
"organization": "Example, Inc.",
"organizational_unit": "Example Org",
"validity_end": arrow.get(2017, 5, 7),
"validity_start": arrow.get(2016, 10, 30),
"authority": authority
}
expected = {
"common_name": "example.com",
"csr": CSR_STR,
"additional_dns_names": names,
"signature_hash": "sha256",
"organization": {"name": "Example, Inc."},
"validity": {
"valid_to": arrow.get(2017, 5, 7).format("YYYY-MM-DDTHH:mm:ss") + "Z"
},
"profile_name": None,
}
assert expected == plugin.map_cis_fields(options, CSR_STR)
@patch("lemur.plugins.lemur_digicert.plugin.current_app")
def test_signature_hash(mock_current_app, app):
mock_current_app.config.get = Mock(side_effect=config_mock)
assert plugin.signature_hash(None) == "sha256"
assert plugin.signature_hash("sha256WithRSA") == "sha256"
assert plugin.signature_hash("sha384WithRSA") == "sha384"
assert plugin.signature_hash("sha512WithRSA") == "sha512"
with pytest.raises(Exception):
plugin.signature_hash("sdfdsf")
def test_issuer_plugin_create_certificate(
certificate_="""\
-----BEGIN CERTIFICATE-----
abc
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
def
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
ghi
-----END CERTIFICATE-----
"""
):
import requests_mock
from lemur.plugins.lemur_digicert.plugin import DigiCertIssuerPlugin
pem_fixture = certificate_
subject = DigiCertIssuerPlugin()
adapter = requests_mock.Adapter()
adapter.register_uri(
"POST",
"mock://www.digicert.com/services/v2/order/certificate/ssl_plus",
text=json.dumps({"id": "id123"}),
)
adapter.register_uri(
"GET",
"mock://www.digicert.com/services/v2/order/certificate/id123",
text=json.dumps({"status": "issued", "certificate": {"id": "cert123"}}),
)
adapter.register_uri(
"GET",
"mock://www.digicert.com/services/v2/certificate/cert123/download/format/pem_all",
text=pem_fixture,
)
subject.session.mount("mock", adapter)
cert, intermediate, external_id = subject.create_certificate(
"", {"common_name": "test.com"}
)
assert cert == "-----BEGIN CERTIFICATE-----\nabc\n-----END CERTIFICATE-----"
assert intermediate == "-----BEGIN CERTIFICATE-----\ndef\n-----END CERTIFICATE-----"
@patch("lemur.pending_certificates.models.PendingCertificate")
def test_cancel_ordered_certificate(mock_pending_cert):
import requests_mock
from lemur.plugins.lemur_digicert.plugin import DigiCertIssuerPlugin
mock_pending_cert.external_id = 1234
subject = DigiCertIssuerPlugin()
adapter = requests_mock.Adapter()
adapter.register_uri(
"PUT",
"mock://www.digicert.com/services/v2/order/certificate/1234/status",
status_code=204,
)
adapter.register_uri(
"PUT",
"mock://www.digicert.com/services/v2/order/certificate/111/status",
status_code=404,
)
subject.session.mount("mock", adapter)
data = {"note": "Test"}
subject.cancel_ordered_certificate(mock_pending_cert, **data)
# A non-existing order id, does not raise exception because if it doesn't exist, then it doesn't matter
mock_pending_cert.external_id = 111
subject.cancel_ordered_certificate(mock_pending_cert, **data)
|
from docutils.nodes import reference
from docutils.parsers.rst.roles import set_classes
# adapted from
# https://doughellmann.com/blog/2010/05/09/defining-custom-roles-in-sphinx/
def gh_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
"""Link to a GitHub issue."""
try:
# issue/PR mode (issues/PR-num will redirect to pull/PR-num)
int(text)
except ValueError:
# direct link mode
slug = text
else:
slug = 'issues/' + text
text = '#' + text
ref = 'https://github.com/mne-tools/mne-python/' + slug
set_classes(options)
node = reference(rawtext, text, refuri=ref, **options)
return [node], []
def setup(app):
app.add_role('gh', gh_role)
return
|
import collections
import errno
import functools
import os
import shutil
import subprocess
from pathlib import PurePath
from typing import (
TYPE_CHECKING,
AnyStr,
Callable,
Generator,
List,
Optional,
Pattern,
Sequence,
Tuple,
Union,
)
from gi.repository import GLib, Gtk
from meld.conf import _
if TYPE_CHECKING:
from meld.vcview import ConsoleStream
if os.name != "nt":
from select import select
else:
import time
def select(rlist, wlist, xlist, timeout):
time.sleep(timeout)
return rlist, wlist, xlist
def with_focused_pane(function):
@functools.wraps(function)
def wrap_function(*args, **kwargs):
pane = args[0]._get_focused_pane()
if pane == -1:
return
return function(args[0], pane, *args[1:], **kwargs)
return wrap_function
def get_modal_parent(widget: Optional[Gtk.Widget] = None) -> Gtk.Window:
parent: Gtk.Window
if not widget:
parent = Gtk.Application.get_default().get_active_window()
elif not isinstance(widget, Gtk.Window):
parent = widget.get_toplevel()
else:
parent = widget
return parent
def error_dialog(primary: str, secondary: str) -> Gtk.ResponseType:
"""A common error dialog handler for Meld
This should only ever be used as a last resort, and for errors that
a user is unlikely to encounter. If you're tempted to use this,
think twice.
Primary must be plain text. Secondary must be valid markup.
"""
return modal_dialog(
primary, secondary, Gtk.ButtonsType.CLOSE, parent=None,
messagetype=Gtk.MessageType.ERROR)
def modal_dialog(
primary: str,
secondary: str,
buttons: Union[Gtk.ButtonsType, Sequence[Tuple[str, int]]],
parent: Optional[Gtk.Window] = None,
messagetype: Gtk.MessageType = Gtk.MessageType.WARNING
) -> Gtk.ResponseType:
"""A common message dialog handler for Meld
This should only ever be used for interactions that must be resolved
before the application flow can continue.
Primary must be plain text. Secondary must be valid markup.
"""
custom_buttons: Sequence[Tuple[str, int]] = []
if not isinstance(buttons, Gtk.ButtonsType):
custom_buttons, buttons = buttons, Gtk.ButtonsType.NONE
dialog = Gtk.MessageDialog(
transient_for=get_modal_parent(parent),
modal=True,
destroy_with_parent=True,
message_type=messagetype,
buttons=buttons,
text=primary)
dialog.format_secondary_markup(secondary)
for label, response_id in custom_buttons:
dialog.add_button(label, response_id)
response = dialog.run()
dialog.destroy()
return response
def user_critical(
primary: str, message: str) -> Callable[[Callable], Callable]:
"""Decorator for when the user must be told about failures
The use case here is for e.g., saving a file, where even if we
don't handle errors, the user *still* needs to know that something
failed. This should be extremely sparingly used, but anything where
the user might not otherwise see a problem and data loss is a
potential side effect should be considered a candidate.
"""
def wrap(function):
@functools.wraps(function)
def wrap_function(locked, *args, **kwargs):
try:
return function(locked, *args, **kwargs)
except Exception:
error_dialog(
primary=primary,
secondary=_(
"{}\n\n"
"Meld encountered a critical error while running:\n"
"<tt>{}</tt>").format(
message, GLib.markup_escape_text(str(function))
),
)
raise
return wrap_function
return wrap
def all_same(iterable: Sequence) -> bool:
"""Return True if all elements of the list are equal"""
sample, has_no_sample = None, True
for item in iterable or ():
if has_no_sample:
sample, has_no_sample = item, False
elif sample != item:
return False
return True
def shorten_names(*names: str) -> List[str]:
"""Remove common parts of a list of paths
For example, `('/tmp/foo1', '/tmp/foo2')` would be summarised as
`('foo1', 'foo2')`. Paths that share a basename are distinguished
by prepending an indicator, e.g., `('/a/b/c', '/a/d/c')` would be
summarised to `['[b] c', '[d] c']`.
"""
paths = [PurePath(n) for n in names]
# Identify the longest common path among the list of path
common = set(paths[0].parents)
common = common.intersection(*(p.parents for p in paths))
if not common:
return list(names)
common_parent = sorted(common, key=lambda p: -len(p.parts))[0]
paths = [p.relative_to(common_parent) for p in paths]
basenames = [p.name for p in paths]
if all_same(basenames):
def firstpart(path: PurePath) -> str:
if len(path.parts) > 1 and path.parts[0]:
return "[%s] " % path.parts[0]
else:
return ""
return [firstpart(p) + p.name for p in paths]
return [name or _("[None]") for name in basenames]
SubprocessGenerator = Generator[Union[Tuple[int, str], None], None, None]
def read_pipe_iter(
command: List[str],
workdir: str,
errorstream: 'ConsoleStream',
yield_interval: float = 0.1,
) -> SubprocessGenerator:
"""Read the output of a shell command iteratively.
Each time 'callback_interval' seconds pass without reading any data,
this function yields None.
When all the data is read, the entire string is yielded.
"""
class Sentinel:
proc: Optional[subprocess.Popen]
def __init__(self) -> None:
self.proc = None
def __del__(self) -> None:
if self.proc:
errorstream.error("killing '%s'\n" % command[0])
self.proc.terminate()
errorstream.error("killed (status was '%i')\n" %
self.proc.wait())
def __call__(self) -> SubprocessGenerator:
self.proc = subprocess.Popen(
command, cwd=workdir, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
universal_newlines=True)
self.proc.stdin.close()
childout, childerr = self.proc.stdout, self.proc.stderr
bits: List[str] = []
while len(bits) == 0 or bits[-1] != "":
state = select([childout, childerr], [], [childout, childerr],
yield_interval)
if len(state[0]) == 0:
if len(state[2]) == 0:
yield None
else:
raise Exception("Error reading pipe")
if childout in state[0]:
try:
# get buffer size
bits.append(childout.read(4096))
except IOError:
# FIXME: ick need to fix
break
if childerr in state[0]:
try:
# how many chars?
errorstream.error(childerr.read(1))
except IOError:
# FIXME: ick need to fix
break
status = self.proc.wait()
errorstream.error(childerr.read())
self.proc = None
if status:
errorstream.error("Exit code: %i\n" % status)
yield status, "".join(bits)
return Sentinel()()
def write_pipe(
command: List[str], text: str, error: Optional[int] = None) -> int:
"""Write 'text' into a shell command and discard its stdout output.
"""
proc = subprocess.Popen(command, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=error)
proc.communicate(text)
return proc.wait()
def copy2(src: str, dst: str) -> None:
"""Like shutil.copy2 but ignores chmod errors, and copies symlinks as links
See [Bug 568000] Copying to NTFS fails
"""
if os.path.isdir(dst):
dst = os.path.join(dst, os.path.basename(src))
if os.path.islink(src) and os.path.isfile(src):
if os.path.lexists(dst):
os.unlink(dst)
os.symlink(os.readlink(src), dst)
elif os.path.isfile(src):
shutil.copyfile(src, dst)
else:
raise OSError("Not a file")
try:
shutil.copystat(src, dst)
except OSError as e:
if e.errno not in (errno.EPERM, errno.ENOTSUP):
raise
def copytree(src: str, dst: str) -> None:
"""Similar to shutil.copytree, but always copies symlinks and doesn't
error out if the destination path already exists.
"""
# If the source tree is a symlink, duplicate the link and we're done.
if os.path.islink(src):
os.symlink(os.readlink(src), dst)
return
try:
os.mkdir(dst)
except OSError as e:
if e.errno != errno.EEXIST:
raise
names = os.listdir(src)
for name in names:
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
if os.path.islink(srcname):
os.symlink(os.readlink(srcname), dstname)
elif os.path.isdir(srcname):
copytree(srcname, dstname)
else:
copy2(srcname, dstname)
try:
shutil.copystat(src, dst)
except OSError as e:
if e.errno != errno.EPERM:
raise
def merge_intervals(
interval_list: List[Tuple[int, int]]) -> List[Tuple[int, int]]:
"""Merge a list of intervals
Returns a list of itervals as 2-tuples with all overlapping
intervals merged.
interval_list must be a list of 2-tuples of integers representing
the start and end of an interval.
"""
if len(interval_list) < 2:
return interval_list
interval_deque = collections.deque(sorted(interval_list))
merged_intervals = [interval_deque.popleft()]
current_start, current_end = merged_intervals[-1]
while interval_deque:
new_start, new_end = interval_deque.popleft()
if current_end >= new_end:
continue
if current_end < new_start:
# Intervals do not overlap; create a new one
merged_intervals.append((new_start, new_end))
elif current_end < new_end:
# Intervals overlap; extend the current one
merged_intervals[-1] = (current_start, new_end)
current_start, current_end = merged_intervals[-1]
return merged_intervals
def apply_text_filters(
txt: AnyStr,
regexes: Sequence[Pattern],
apply_fn: Optional[Callable[[int, int], None]] = None
) -> AnyStr:
"""Apply text filters
Text filters "regexes", resolved as regular expressions are applied
to "txt". "txt" may be either strings or bytes, but the supplied
regexes must match the type.
"apply_fn" is a callable run for each filtered interval
"""
empty_string = b"" if isinstance(txt, bytes) else ""
newline = b"\n" if isinstance(txt, bytes) else "\n"
filter_ranges = []
for r in regexes:
for match in r.finditer(txt):
# If there are no groups in the match, use the whole match
if not r.groups:
span = match.span()
if span[0] != span[1]:
filter_ranges.append(span)
continue
# If there are groups in the regex, include all groups that
# participated in the match
for i in range(r.groups):
span = match.span(i + 1)
if span != (-1, -1) and span[0] != span[1]:
filter_ranges.append(span)
filter_ranges = merge_intervals(filter_ranges)
if apply_fn:
for (start, end) in reversed(filter_ranges):
apply_fn(start, end)
offset = 0
result_txts = []
for (start, end) in filter_ranges:
assert txt[start:end].count(newline) == 0
result_txts.append(txt[offset:start])
offset = end
result_txts.append(txt[offset:])
return empty_string.join(result_txts)
def calc_syncpoint(adj: Gtk.Adjustment) -> float:
"""Calculate a cross-pane adjustment synchronisation point
Our normal syncpoint is the middle of the screen. If the
current position is within the first half screen of a
document, we scale the sync point linearly back to 0.0 (top
of the screen); if it's the the last half screen, we again
scale linearly to 1.0.
The overall effect of this is to make sure that the top and
bottom parts of documents with different lengths and chunk
offsets correctly scroll into view.
"""
current = adj.get_value()
half_a_screen = adj.get_page_size() / 2
syncpoint = 0.0
# How far through the first half-screen our adjustment is
top_val = adj.get_lower()
first_scale = (current - top_val) / half_a_screen
syncpoint += 0.5 * min(1, first_scale)
# How far through the last half-screen our adjustment is
bottom_val = adj.get_upper() - 1.5 * adj.get_page_size()
last_scale = (current - bottom_val) / half_a_screen
syncpoint += 0.5 * max(0, last_scale)
return syncpoint
# The functions below are Mac specific to help with shell integration
class MacShellIntegration:
alias_name = 'meld'
re = '\s*alias\s*meld=.*'
def __init__(self):
from Foundation import NSBundle
from pathlib import Path, PurePath
self.bashrc_file = str(PurePath(Path.home(), '.bashrc'))
bundle = NSBundle.mainBundle()
self.executable_path = bundle.executablePath().fileSystemRepresentation().decode("utf-8")
def is_alias_found(self):
from pathlib import Path
import re
if Path(self.bashrc_file).is_file():
pattern = re.compile(self.re)
content = []
with open (self.bashrc_file, "r") as myfile:
content = myfile.readlines()
for line in content:
if pattern.match(line):
return True
return False
else:
return False
def create_shell_alias(self):
from pathlib import Path
import re
if self.is_alias_found():
pattern = re.compile(self.re)
content = []
with open (self.bashrc_file, "r") as myfile:
content = myfile.readlines()
f = open(self.bashrc_file, "w")
for line in content:
if pattern.match(line):
f.write("alias {}={}\n".format(self.alias_name, self.executable_path))
else:
f.write(line)
f.close()
else:
f = open(self.bashrc_file, "a+")
f.write("\n# Added by Meld for OSX\n")
f.write("alias {}={}\n".format(self.alias_name, self.executable_path))
f.close()
def setup_integration(self):
if self.is_alias_found():
add_shortcut = modal_dialog(
primary=_("Mac Shell Integration already exists"),
secondary=_("Overwrite alias for meld?" "\n\n*Note*: alias already exists "),
buttons=[
(_("_Cancel"), Gtk.ResponseType.CANCEL),
(_("Overwrite"), Gtk.ResponseType.OK),
],
messagetype=Gtk.MessageType.QUESTION
)
else:
add_shortcut = Gtk.ResponseType.OK
if add_shortcut == Gtk.ResponseType.OK:
try:
self.create_shell_alias()
modal_dialog(
primary=_(
"Alias created"
),
secondary=_(
"You should be able to use meld from the command line.\n\n"
"New Terminals will work automatically. For Terminals that are already open, issue the command:\n\n"
"source ~/.bashrc"
),
buttons=[
(_("OK"), Gtk.ResponseType.OK),
],
messagetype=Gtk.MessageType.INFO
)
except:
modal_dialog(
primary=_(
"Failed to create/update alias"
),
secondary=_(
"Meld was unable to create the alias required for shell operation. "
"Edit your ~/.bashrc and add the line: alias meld={}".format(self.executable_path)
),
buttons=[
(_("OK"), Gtk.ResponseType.OK),
],
messagetype=Gtk.MessageType.WARNING
)
|
import logging
import pytest
import pytest_bdd as bdd
bdd.scenarios('open.feature')
@pytest.mark.parametrize('scheme', ['http://', ''])
def test_open_s(request, quteproc, ssl_server, scheme):
"""Test :open with -s."""
quteproc.set_setting('content.ssl_strict', 'false')
quteproc.send_cmd(':open -s {}localhost:{}/'
.format(scheme, ssl_server.port))
if scheme == 'http://' or not request.config.webengine:
# Error is only logged on the first error with QtWebEngine
quteproc.mark_expected(category='message',
loglevel=logging.ERROR,
message="Certificate error: *")
quteproc.wait_for_load_finished('/', port=ssl_server.port, https=True,
load_status='warn')
def test_open_s_non_http(quteproc, ssl_server):
"""Test :open with -s and a qute:// page."""
quteproc.send_cmd(':open -s qute://version')
quteproc.wait_for_load_finished('qute://version')
|
import unittest
from unittest import mock
import pytest
import requests.exceptions
import somecomfort
import voluptuous as vol
from homeassistant.components.climate.const import (
ATTR_FAN_MODE,
ATTR_FAN_MODES,
ATTR_HVAC_MODES,
)
import homeassistant.components.honeywell.climate as honeywell
from homeassistant.const import (
CONF_PASSWORD,
CONF_USERNAME,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
pytestmark = pytest.mark.skip("Need to be fixed!")
class TestHoneywell(unittest.TestCase):
"""A test class for Honeywell themostats."""
@mock.patch("somecomfort.SomeComfort")
@mock.patch("homeassistant.components.honeywell.climate.HoneywellUSThermostat")
def test_setup_us(self, mock_ht, mock_sc):
"""Test for the US setup."""
config = {
CONF_USERNAME: "user",
CONF_PASSWORD: "pass",
honeywell.CONF_REGION: "us",
}
bad_pass_config = {CONF_USERNAME: "user", honeywell.CONF_REGION: "us"}
bad_region_config = {
CONF_USERNAME: "user",
CONF_PASSWORD: "pass",
honeywell.CONF_REGION: "un",
}
with pytest.raises(vol.Invalid):
honeywell.PLATFORM_SCHEMA(None)
with pytest.raises(vol.Invalid):
honeywell.PLATFORM_SCHEMA({})
with pytest.raises(vol.Invalid):
honeywell.PLATFORM_SCHEMA(bad_pass_config)
with pytest.raises(vol.Invalid):
honeywell.PLATFORM_SCHEMA(bad_region_config)
hass = mock.MagicMock()
add_entities = mock.MagicMock()
locations = [mock.MagicMock(), mock.MagicMock()]
devices_1 = [mock.MagicMock()]
devices_2 = [mock.MagicMock(), mock.MagicMock]
mock_sc.return_value.locations_by_id.values.return_value = locations
locations[0].devices_by_id.values.return_value = devices_1
locations[1].devices_by_id.values.return_value = devices_2
result = honeywell.setup_platform(hass, config, add_entities)
assert result
assert mock_sc.call_count == 1
assert mock_sc.call_args == mock.call("user", "pass")
mock_ht.assert_has_calls(
[
mock.call(mock_sc.return_value, devices_1[0], 18, 28, "user", "pass"),
mock.call(mock_sc.return_value, devices_2[0], 18, 28, "user", "pass"),
mock.call(mock_sc.return_value, devices_2[1], 18, 28, "user", "pass"),
]
)
@mock.patch("somecomfort.SomeComfort")
def test_setup_us_failures(self, mock_sc):
"""Test the US setup."""
hass = mock.MagicMock()
add_entities = mock.MagicMock()
config = {
CONF_USERNAME: "user",
CONF_PASSWORD: "pass",
honeywell.CONF_REGION: "us",
}
mock_sc.side_effect = somecomfort.AuthError
result = honeywell.setup_platform(hass, config, add_entities)
assert not result
assert not add_entities.called
mock_sc.side_effect = somecomfort.SomeComfortError
result = honeywell.setup_platform(hass, config, add_entities)
assert not result
assert not add_entities.called
@mock.patch("somecomfort.SomeComfort")
@mock.patch("homeassistant.components.honeywell.climate.HoneywellUSThermostat")
def _test_us_filtered_devices(self, mock_ht, mock_sc, loc=None, dev=None):
"""Test for US filtered thermostats."""
config = {
CONF_USERNAME: "user",
CONF_PASSWORD: "pass",
honeywell.CONF_REGION: "us",
"location": loc,
"thermostat": dev,
}
locations = {
1: mock.MagicMock(
locationid=mock.sentinel.loc1,
devices_by_id={
11: mock.MagicMock(deviceid=mock.sentinel.loc1dev1),
12: mock.MagicMock(deviceid=mock.sentinel.loc1dev2),
},
),
2: mock.MagicMock(
locationid=mock.sentinel.loc2,
devices_by_id={21: mock.MagicMock(deviceid=mock.sentinel.loc2dev1)},
),
3: mock.MagicMock(
locationid=mock.sentinel.loc3,
devices_by_id={31: mock.MagicMock(deviceid=mock.sentinel.loc3dev1)},
),
}
mock_sc.return_value = mock.MagicMock(locations_by_id=locations)
hass = mock.MagicMock()
add_entities = mock.MagicMock()
assert honeywell.setup_platform(hass, config, add_entities) is True
return mock_ht.call_args_list, mock_sc
def test_us_filtered_thermostat_1(self):
"""Test for US filtered thermostats."""
result, client = self._test_us_filtered_devices(dev=mock.sentinel.loc1dev1)
devices = [x[0][1].deviceid for x in result]
assert [mock.sentinel.loc1dev1] == devices
def test_us_filtered_thermostat_2(self):
"""Test for US filtered location."""
result, client = self._test_us_filtered_devices(dev=mock.sentinel.loc2dev1)
devices = [x[0][1].deviceid for x in result]
assert [mock.sentinel.loc2dev1] == devices
def test_us_filtered_location_1(self):
"""Test for US filtered locations."""
result, client = self._test_us_filtered_devices(loc=mock.sentinel.loc1)
devices = [x[0][1].deviceid for x in result]
assert [mock.sentinel.loc1dev1, mock.sentinel.loc1dev2] == devices
def test_us_filtered_location_2(self):
"""Test for US filtered locations."""
result, client = self._test_us_filtered_devices(loc=mock.sentinel.loc2)
devices = [x[0][1].deviceid for x in result]
assert [mock.sentinel.loc2dev1] == devices
@mock.patch("evohomeclient.EvohomeClient")
@mock.patch("homeassistant.components.honeywell.climate.HoneywellUSThermostat")
def test_eu_setup_full_config(self, mock_round, mock_evo):
"""Test the EU setup with complete configuration."""
config = {
CONF_USERNAME: "user",
CONF_PASSWORD: "pass",
honeywell.CONF_REGION: "eu",
}
mock_evo.return_value.temperatures.return_value = [{"id": "foo"}, {"id": "bar"}]
hass = mock.MagicMock()
add_entities = mock.MagicMock()
assert honeywell.setup_platform(hass, config, add_entities)
assert mock_evo.call_count == 1
assert mock_evo.call_args == mock.call("user", "pass")
assert mock_evo.return_value.temperatures.call_count == 1
assert mock_evo.return_value.temperatures.call_args == mock.call(
force_refresh=True
)
mock_round.assert_has_calls(
[
mock.call(mock_evo.return_value, "foo", True, 20.0),
mock.call(mock_evo.return_value, "bar", False, 20.0),
]
)
assert 2 == add_entities.call_count
@mock.patch("evohomeclient.EvohomeClient")
@mock.patch("homeassistant.components.honeywell.climate.HoneywellUSThermostat")
def test_eu_setup_partial_config(self, mock_round, mock_evo):
"""Test the EU setup with partial configuration."""
config = {
CONF_USERNAME: "user",
CONF_PASSWORD: "pass",
honeywell.CONF_REGION: "eu",
}
mock_evo.return_value.temperatures.return_value = [{"id": "foo"}, {"id": "bar"}]
hass = mock.MagicMock()
add_entities = mock.MagicMock()
assert honeywell.setup_platform(hass, config, add_entities)
mock_round.assert_has_calls(
[
mock.call(mock_evo.return_value, "foo", True, 16),
mock.call(mock_evo.return_value, "bar", False, 16),
]
)
@mock.patch("evohomeclient.EvohomeClient")
@mock.patch("homeassistant.components.honeywell.climate.HoneywellUSThermostat")
def test_eu_setup_bad_temp(self, mock_round, mock_evo):
"""Test the EU setup with invalid temperature."""
config = {
CONF_USERNAME: "user",
CONF_PASSWORD: "pass",
honeywell.CONF_REGION: "eu",
}
with pytest.raises(vol.Invalid):
honeywell.PLATFORM_SCHEMA(config)
@mock.patch("evohomeclient.EvohomeClient")
@mock.patch("homeassistant.components.honeywell.climate.HoneywellUSThermostat")
def test_eu_setup_error(self, mock_round, mock_evo):
"""Test the EU setup with errors."""
config = {
CONF_USERNAME: "user",
CONF_PASSWORD: "pass",
honeywell.CONF_REGION: "eu",
}
mock_evo.return_value.temperatures.side_effect = (
requests.exceptions.RequestException
)
add_entities = mock.MagicMock()
hass = mock.MagicMock()
assert not honeywell.setup_platform(hass, config, add_entities)
class TestHoneywellRound(unittest.TestCase):
"""A test class for Honeywell Round thermostats."""
def setup_method(self, method):
"""Test the setup method."""
def fake_temperatures(force_refresh=None):
"""Create fake temperatures."""
temps = [
{
"id": "1",
"temp": 20,
"setpoint": 21,
"thermostat": "main",
"name": "House",
},
{
"id": "2",
"temp": 21,
"setpoint": 22,
"thermostat": "DOMESTIC_HOT_WATER",
},
]
return temps
self.device = mock.MagicMock()
self.device.temperatures.side_effect = fake_temperatures
self.round1 = honeywell.RoundThermostat(self.device, "1", True, 16)
self.round1.update()
self.round2 = honeywell.RoundThermostat(self.device, "2", False, 17)
self.round2.update()
def test_attributes(self):
"""Test the attributes."""
assert "House" == self.round1.name
assert TEMP_CELSIUS == self.round1.temperature_unit
assert 20 == self.round1.current_temperature
assert 21 == self.round1.target_temperature
assert not self.round1.is_away_mode_on
assert "Hot Water" == self.round2.name
assert TEMP_CELSIUS == self.round2.temperature_unit
assert 21 == self.round2.current_temperature
assert self.round2.target_temperature is None
assert not self.round2.is_away_mode_on
def test_away_mode(self):
"""Test setting the away mode."""
assert not self.round1.is_away_mode_on
self.round1.turn_away_mode_on()
assert self.round1.is_away_mode_on
assert self.device.set_temperature.call_count == 1
assert self.device.set_temperature.call_args == mock.call("House", 16)
self.device.set_temperature.reset_mock()
self.round1.turn_away_mode_off()
assert not self.round1.is_away_mode_on
assert self.device.cancel_temp_override.call_count == 1
assert self.device.cancel_temp_override.call_args == mock.call("House")
def test_set_temperature(self):
"""Test setting the temperature."""
self.round1.set_temperature(temperature=25)
assert self.device.set_temperature.call_count == 1
assert self.device.set_temperature.call_args == mock.call("House", 25)
def test_set_hvac_mode(self) -> None:
"""Test setting the system operation."""
self.round1.set_hvac_mode("cool")
assert "cool" == self.round1.current_operation
assert "cool" == self.device.system_mode
self.round1.set_hvac_mode("heat")
assert "heat" == self.round1.current_operation
assert "heat" == self.device.system_mode
class TestHoneywellUS(unittest.TestCase):
"""A test class for Honeywell US thermostats."""
def setup_method(self, method):
"""Test the setup method."""
self.client = mock.MagicMock()
self.device = mock.MagicMock()
self.cool_away_temp = 18
self.heat_away_temp = 28
self.honeywell = honeywell.HoneywellUSThermostat(
self.client,
self.device,
self.cool_away_temp,
self.heat_away_temp,
"user",
"password",
)
self.device.fan_running = True
self.device.name = "test"
self.device.temperature_unit = "F"
self.device.current_temperature = 72
self.device.setpoint_cool = 78
self.device.setpoint_heat = 65
self.device.system_mode = "heat"
self.device.fan_mode = "auto"
def test_properties(self):
"""Test the properties."""
assert self.honeywell.is_fan_on
assert "test" == self.honeywell.name
assert 72 == self.honeywell.current_temperature
def test_unit_of_measurement(self):
"""Test the unit of measurement."""
assert TEMP_FAHRENHEIT == self.honeywell.temperature_unit
self.device.temperature_unit = "C"
assert TEMP_CELSIUS == self.honeywell.temperature_unit
def test_target_temp(self):
"""Test the target temperature."""
assert 65 == self.honeywell.target_temperature
self.device.system_mode = "cool"
assert 78 == self.honeywell.target_temperature
def test_set_temp(self):
"""Test setting the temperature."""
self.honeywell.set_temperature(temperature=70)
assert 70 == self.device.setpoint_heat
assert 70 == self.honeywell.target_temperature
self.device.system_mode = "cool"
assert 78 == self.honeywell.target_temperature
self.honeywell.set_temperature(temperature=74)
assert 74 == self.device.setpoint_cool
assert 74 == self.honeywell.target_temperature
def test_set_hvac_mode(self) -> None:
"""Test setting the operation mode."""
self.honeywell.set_hvac_mode("cool")
assert "cool" == self.device.system_mode
self.honeywell.set_hvac_mode("heat")
assert "heat" == self.device.system_mode
def test_set_temp_fail(self):
"""Test if setting the temperature fails."""
self.device.setpoint_heat = mock.MagicMock(
side_effect=somecomfort.SomeComfortError
)
self.honeywell.set_temperature(temperature=123)
def test_attributes(self):
"""Test the attributes."""
expected = {
honeywell.ATTR_FAN: "running",
ATTR_FAN_MODE: "auto",
ATTR_FAN_MODES: somecomfort.FAN_MODES,
ATTR_HVAC_MODES: somecomfort.SYSTEM_MODES,
}
assert expected == self.honeywell.device_state_attributes
expected["fan"] = "idle"
self.device.fan_running = False
assert expected == self.honeywell.device_state_attributes
def test_with_no_fan(self):
"""Test if there is on fan."""
self.device.fan_running = False
self.device.fan_mode = None
expected = {
honeywell.ATTR_FAN: "idle",
ATTR_FAN_MODE: None,
ATTR_FAN_MODES: somecomfort.FAN_MODES,
ATTR_HVAC_MODES: somecomfort.SYSTEM_MODES,
}
assert expected == self.honeywell.device_state_attributes
def test_heat_away_mode(self):
"""Test setting the heat away mode."""
self.honeywell.set_hvac_mode("heat")
assert not self.honeywell.is_away_mode_on
self.honeywell.turn_away_mode_on()
assert self.honeywell.is_away_mode_on
assert self.device.setpoint_heat == self.heat_away_temp
assert self.device.hold_heat is True
self.honeywell.turn_away_mode_off()
assert not self.honeywell.is_away_mode_on
assert self.device.hold_heat is False
@mock.patch("somecomfort.SomeComfort")
def test_retry(self, test_somecomfort):
"""Test retry connection."""
old_device = self.honeywell._device
self.honeywell._retry()
assert self.honeywell._device == old_device
|
import diamond.collector
import datetime
from diamond.collector import str_to_bool
import re
import zlib
try:
import pymongo
except ImportError:
pymongo = None
try:
from pymongo import ReadPreference
except ImportError:
ReadPreference = None
class MongoDBCollector(diamond.collector.Collector):
MAX_CRC32 = 4294967295
def __init__(self, *args, **kwargs):
self.__totals = {}
super(MongoDBCollector, self).__init__(*args, **kwargs)
def get_default_config_help(self):
config_help = super(MongoDBCollector, self).get_default_config_help()
config_help.update({
'hosts': 'Array of hostname(:port) elements to get metrics from'
'Set an alias by prefixing host:port with alias@',
'host': 'A single hostname(:port) to get metrics from'
' (can be used instead of hosts and overrides it)',
'user': 'Username for authenticated login (optional)',
'passwd': 'Password for authenticated login (optional)',
'databases': 'A regex of which databases to gather metrics for.'
' Defaults to all databases.',
'ignore_collections': 'A regex of which collections to ignore.'
' MapReduce temporary collections (tmp.mr.*)'
' are ignored by default.',
'collection_sample_rate': 'Only send stats for a consistent subset '
'of collections. This is applied after '
'collections are ignored via '
'ignore_collections Sampling uses crc32 '
'so it is consistent across '
'replicas. Value between 0 and 1. '
'Default is 1',
'network_timeout': 'Timeout for mongodb connection (in'
' milliseconds). There is no timeout by'
' default.',
'simple': 'Only collect the same metrics as mongostat.',
'translate_collections': 'Translate dot (.) to underscores (_)'
' in collection names.',
'ssl': 'True to enable SSL connections to the MongoDB server.'
' Default is False',
'replica': 'True to enable replica set logging. Reports health of'
' individual nodes as well as basic aggregate stats.'
' Default is False',
'replset_node_name': 'Identifier for reporting replset metrics. '
'Default is _id'
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(MongoDBCollector, self).get_default_config()
config.update({
'path': 'mongo',
'hosts': ['localhost'],
'user': None,
'passwd': None,
'databases': '.*',
'ignore_collections': '^tmp\.mr\.',
'network_timeout': None,
'simple': 'False',
'translate_collections': 'False',
'collection_sample_rate': 1,
'ssl': False,
'replica': False,
'replset_node_name': '_id'
})
return config
def collect(self):
"""Collect number values from db.serverStatus()"""
if pymongo is None:
self.log.error('Unable to import pymongo')
return
hosts = self.config.get('hosts')
# Convert a string config value to be an array
if isinstance(hosts, basestring):
hosts = [hosts]
# we need this for backwards compatibility
if 'host' in self.config:
hosts = [self.config['host']]
# convert network_timeout to integer
if self.config['network_timeout']:
self.config['network_timeout'] = int(
self.config['network_timeout'])
# convert collection_sample_rate to float
if self.config['collection_sample_rate']:
self.config['collection_sample_rate'] = float(
self.config['collection_sample_rate'])
# use auth if given
if 'user' in self.config:
user = self.config['user']
else:
user = None
if 'passwd' in self.config:
passwd = self.config['passwd']
else:
passwd = None
for host in hosts:
matches = re.search('((.+)\@)?(.+)?', host)
alias = matches.group(2)
host = matches.group(3)
if alias is None:
if len(hosts) == 1:
# one host only, no need to have a prefix
base_prefix = []
else:
base_prefix = [re.sub('[:\.]', '_', host)]
else:
base_prefix = [alias]
try:
# Ensure that the SSL option is a boolean.
if type(self.config['ssl']) is str:
self.config['ssl'] = str_to_bool(self.config['ssl'])
if ReadPreference is None:
conn = pymongo.MongoClient(
host,
socketTimeoutMS=self.config['network_timeout'],
ssl=self.config['ssl'],
)
else:
conn = pymongo.MongoClient(
host,
socketTimeoutMS=self.config['network_timeout'],
ssl=self.config['ssl'],
read_preference=ReadPreference.SECONDARY,
)
except Exception as e:
self.log.error('Couldnt connect to mongodb: %s', e)
continue
# try auth
if user:
try:
conn.admin.authenticate(user, passwd)
except Exception as e:
self.log.error(
'User auth given, but could not autheticate' +
' with host: %s, err: %s' % (host, e))
return{}
data = conn.db.command('serverStatus')
self._publish_transformed(data, base_prefix)
if str_to_bool(self.config['simple']):
data = self._extract_simple_data(data)
if str_to_bool(self.config['replica']):
try:
replset_data = conn.admin.command('replSetGetStatus')
self._publish_replset(replset_data, base_prefix)
except pymongo.errors.OperationFailure as e:
self.log.error('error getting replica set status', e)
self._publish_dict_with_prefix(data, base_prefix)
db_name_filter = re.compile(self.config['databases'])
ignored_collections = re.compile(self.config['ignore_collections'])
sample_threshold = self.MAX_CRC32 * self.config[
'collection_sample_rate']
for db_name in conn.database_names():
if not db_name_filter.search(db_name):
continue
db_stats = conn[db_name].command('dbStats')
db_prefix = base_prefix + ['databases', db_name]
self._publish_dict_with_prefix(db_stats, db_prefix)
for collection_name in conn[db_name].collection_names():
if ignored_collections.search(collection_name):
continue
if (self.config['collection_sample_rate'] < 1 and (
zlib.crc32(collection_name) & 0xffffffff
) > sample_threshold):
continue
collection_stats = conn[db_name].command('collstats',
collection_name)
if str_to_bool(self.config['translate_collections']):
collection_name = collection_name.replace('.', '_')
collection_prefix = db_prefix + [collection_name]
self._publish_dict_with_prefix(collection_stats,
collection_prefix)
def _publish_replset(self, data, base_prefix):
""" Given a response to replSetGetStatus, publishes all numeric values
of the instance, aggregate stats of healthy nodes vs total nodes,
and the observed statuses of all nodes in the replica set.
"""
prefix = base_prefix + ['replset']
self._publish_dict_with_prefix(data, prefix)
total_nodes = len(data['members'])
healthy_nodes = reduce(lambda value, node: value + node['health'],
data['members'], 0)
self._publish_dict_with_prefix({
'healthy_nodes': healthy_nodes,
'total_nodes': total_nodes
}, prefix)
for node in data['members']:
replset_node_name = node[self.config['replset_node_name']]
node_name = str(replset_node_name.split('.')[0])
self._publish_dict_with_prefix(node, prefix + ['node', node_name])
def _publish_transformed(self, data, base_prefix):
""" Publish values of type: counter or percent """
self._publish_dict_with_prefix(data.get('opcounters', {}),
base_prefix + ['opcounters_per_sec'],
self.publish_counter)
self._publish_dict_with_prefix(data.get('opcountersRepl', {}),
base_prefix +
['opcountersRepl_per_sec'],
self.publish_counter)
self._publish_metrics(base_prefix + ['backgroundFlushing_per_sec'],
'flushes',
data.get('backgroundFlushing', {}),
self.publish_counter)
self._publish_dict_with_prefix(data.get('network', {}),
base_prefix + ['network_per_sec'],
self.publish_counter)
self._publish_metrics(base_prefix + ['extra_info_per_sec'],
'page_faults',
data.get('extra_info', {}),
self.publish_counter)
def get_dotted_value(data, key_name):
key_name = key_name.split('.')
for i in key_name:
data = data.get(i, {})
if not data:
return 0
return data
def compute_interval(data, total_name):
current_total = get_dotted_value(data, total_name)
total_key = '.'.join(base_prefix + [total_name])
last_total = self.__totals.get(total_key, current_total)
interval = current_total - last_total
self.__totals[total_key] = current_total
return interval
def publish_percent(value_name, total_name, data):
value = float(get_dotted_value(data, value_name) * 100)
interval = compute_interval(data, total_name)
key = '.'.join(base_prefix + ['percent', value_name])
self.publish_counter(key, value, time_delta=bool(interval),
interval=interval)
publish_percent('globalLock.lockTime', 'globalLock.totalTime', data)
publish_percent('indexCounters.btree.misses',
'indexCounters.btree.accesses', data)
locks = data.get('locks')
if locks:
if '.' in locks:
locks['_global_'] = locks['.']
del (locks['.'])
key_prefix = '.'.join(base_prefix + ['percent'])
db_name_filter = re.compile(self.config['databases'])
interval = compute_interval(data, 'uptimeMillis')
for db_name in locks:
if not db_name_filter.search(db_name):
continue
r = get_dotted_value(
locks,
'%s.timeLockedMicros.r' % db_name)
R = get_dotted_value(
locks,
'.%s.timeLockedMicros.R' % db_name)
value = float(r + R) / 10
if value:
self.publish_counter(
key_prefix + '.locks.%s.read' % db_name,
value, time_delta=bool(interval),
interval=interval)
w = get_dotted_value(
locks,
'%s.timeLockedMicros.w' % db_name)
W = get_dotted_value(
locks,
'%s.timeLockedMicros.W' % db_name)
value = float(w + W) / 10
if value:
self.publish_counter(
key_prefix + '.locks.%s.write' % db_name,
value, time_delta=bool(interval), interval=interval)
def _publish_dict_with_prefix(self, dict, prefix, publishfn=None):
for key in dict:
self._publish_metrics(prefix, key, dict, publishfn)
def _publish_metrics(self, prev_keys, key, data, publishfn=None):
"""Recursively publish keys"""
if key not in data:
return
value = data[key]
keys = prev_keys + [key]
keys = [x.replace(" ", "_").replace("-", ".") for x in keys]
if not publishfn:
publishfn = self.publish
if isinstance(value, dict):
for new_key in value:
self._publish_metrics(keys, new_key, value)
elif isinstance(value, int) or isinstance(value, float):
publishfn('.'.join(keys), value)
elif isinstance(value, long):
publishfn('.'.join(keys), float(value))
elif isinstance(value, datetime.datetime):
publishfn('.'.join(keys), long(value.strftime('%s')))
def _extract_simple_data(self, data):
return {
'connections': data.get('connections'),
'globalLock': data.get('globalLock'),
'indexCounters': data.get('indexCounters')
}
|
import sys
import json
import qutebrowser
try:
from qutebrowser.misc.checkpyver import check_python_version
except ImportError:
try:
# python2
from .misc.checkpyver import check_python_version
except (SystemError, ValueError):
# Import without module - SystemError on Python3, ValueError (?!?) on
# Python2
sys.stderr.write("Please don't run this script directly, do something "
"like python3 -m qutebrowser instead.\n")
sys.stderr.flush()
sys.exit(100)
check_python_version()
import argparse # pylint: disable=wrong-import-order
from qutebrowser.misc import earlyinit
def get_argparser():
"""Get the argparse parser."""
parser = argparse.ArgumentParser(prog='qutebrowser',
description=qutebrowser.__description__)
parser.add_argument('-B', '--basedir', help="Base directory for all "
"storage.")
parser.add_argument('-C', '--config-py', help="Path to config.py.",
metavar='CONFIG')
parser.add_argument('-V', '--version', help="Show version and quit.",
action='store_true')
parser.add_argument('-s', '--set', help="Set a temporary setting for "
"this session.", nargs=2, action='append',
dest='temp_settings', default=[],
metavar=('OPTION', 'VALUE'))
parser.add_argument('-r', '--restore', help="Restore a named session.",
dest='session')
parser.add_argument('-R', '--override-restore', help="Don't restore a "
"session even if one would be restored.",
action='store_true')
parser.add_argument('--target', choices=['auto', 'tab', 'tab-bg',
'tab-silent', 'tab-bg-silent',
'window', 'private-window'],
help="How URLs should be opened if there is already a "
"qutebrowser instance running.")
parser.add_argument('--backend', choices=['webkit', 'webengine'],
help="Which backend to use.")
parser.add_argument('--json-args', help=argparse.SUPPRESS)
parser.add_argument('--temp-basedir-restarted', help=argparse.SUPPRESS)
debug = parser.add_argument_group('debug arguments')
debug.add_argument('-l', '--loglevel', dest='loglevel',
help="Override the configured console loglevel",
choices=['critical', 'error', 'warning', 'info',
'debug', 'vdebug'])
debug.add_argument('--logfilter', type=logfilter_error,
help="Comma-separated list of things to be logged "
"to the debug log on stdout.")
debug.add_argument('--loglines',
help="How many lines of the debug log to keep in RAM "
"(-1: unlimited).",
default=2000, type=int)
debug.add_argument('-d', '--debug', help="Turn on debugging options.",
action='store_true')
debug.add_argument('--json-logging', action='store_true', help="Output log"
" lines in JSON format (one object per line).")
debug.add_argument('--nocolor', help="Turn off colored logging.",
action='store_false', dest='color')
debug.add_argument('--force-color', help="Force colored logging",
action='store_true')
debug.add_argument('--nowindow', action='store_true', help="Don't show "
"the main window.")
debug.add_argument('-T', '--temp-basedir', action='store_true', help="Use "
"a temporary basedir.")
debug.add_argument('--no-err-windows', action='store_true', help="Don't "
"show any error windows (used for tests/smoke.py).")
debug.add_argument('--qt-arg', help="Pass an argument with a value to Qt. "
"For example, you can do "
"`--qt-arg geometry 650x555+200+300` to set the window "
"geometry.", nargs=2, metavar=('NAME', 'VALUE'),
action='append')
debug.add_argument('--qt-flag', help="Pass an argument to Qt as flag.",
nargs=1, action='append')
debug.add_argument('-D', '--debug-flag', type=debug_flag_error,
default=[], help="Pass name of debugging feature to be"
" turned on.", action='append', dest='debug_flags')
parser.add_argument('command', nargs='*', help="Commands to execute on "
"startup.", metavar=':command')
# URLs will actually be in command
parser.add_argument('url', nargs='*', help="URLs to open on startup "
"(empty as a window separator).")
return parser
def directory(arg):
if not arg:
raise argparse.ArgumentTypeError("Invalid empty value")
def logfilter_error(logfilter):
"""Validate logger names passed to --logfilter.
Args:
logfilter: A comma separated list of logger names.
"""
from qutebrowser.utils import log
try:
log.LogFilter.parse(logfilter)
except log.InvalidLogFilterError as e:
raise argparse.ArgumentTypeError(e)
return logfilter
def debug_flag_error(flag):
"""Validate flags passed to --debug-flag.
Available flags:
debug-exit: Turn on debugging of late exit.
pdb-postmortem: Drop into pdb on exceptions.
no-sql-history: Don't store history items.
no-scroll-filtering: Process all scrolling updates.
log-requests: Log all network requests.
log-cookies: Log cookies in cookie filter.
log-scroll-pos: Log all scrolling changes.
stack: Enable Chromium stack logging.
chromium: Enable Chromium logging.
wait-renderer-process: Wait for debugger in renderer process.
avoid-chromium-init: Enable `--version` without initializing Chromium.
werror: Turn Python warnings into errors.
"""
valid_flags = ['debug-exit', 'pdb-postmortem', 'no-sql-history',
'no-scroll-filtering', 'log-requests', 'log-cookies',
'log-scroll-pos', 'stack', 'chromium',
'wait-renderer-process', 'avoid-chromium-init', 'werror']
if flag in valid_flags:
return flag
else:
raise argparse.ArgumentTypeError("Invalid debug flag - valid flags: {}"
.format(', '.join(valid_flags)))
def main():
parser = get_argparser()
argv = sys.argv[1:]
args = parser.parse_args(argv)
if args.json_args is not None:
# Restoring after a restart.
# When restarting, we serialize the argparse namespace into json, and
# construct a "fake" argparse.Namespace here based on the data loaded
# from json.
data = json.loads(args.json_args)
args = argparse.Namespace(**data)
earlyinit.early_init(args)
# We do this imports late as earlyinit needs to be run first (because of
# version checking and other early initialization)
from qutebrowser import app
return app.run(args)
|
import logging
from homeassistant.components.media_player import MediaPlayerEntity
from homeassistant.components.media_player.const import (
MEDIA_TYPE_CHANNEL,
MEDIA_TYPE_MUSIC,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SELECT_SOUND_MODE,
SUPPORT_SELECT_SOURCE,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
SUPPORT_VOLUME_STEP,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_MAC,
ENTITY_MATCH_ALL,
ENTITY_MATCH_NONE,
STATE_OFF,
STATE_ON,
STATE_PAUSED,
STATE_PLAYING,
)
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from . import CONF_RECEIVER
from .config_flow import (
CONF_MANUFACTURER,
CONF_MODEL,
CONF_SERIAL_NUMBER,
CONF_TYPE,
DOMAIN,
)
_LOGGER = logging.getLogger(__name__)
ATTR_SOUND_MODE_RAW = "sound_mode_raw"
SUPPORT_DENON = (
SUPPORT_VOLUME_STEP
| SUPPORT_VOLUME_MUTE
| SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_SELECT_SOURCE
| SUPPORT_VOLUME_SET
)
SUPPORT_MEDIA_MODES = (
SUPPORT_PLAY_MEDIA
| SUPPORT_PAUSE
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_NEXT_TRACK
| SUPPORT_VOLUME_SET
| SUPPORT_PLAY
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the DenonAVR receiver from a config entry."""
entities = []
receiver = hass.data[DOMAIN][config_entry.entry_id][CONF_RECEIVER]
for receiver_zone in receiver.zones.values():
if config_entry.data[CONF_SERIAL_NUMBER] is not None:
unique_id = f"{config_entry.unique_id}-{receiver_zone.zone}"
else:
unique_id = None
entities.append(DenonDevice(receiver_zone, unique_id, config_entry))
_LOGGER.debug(
"%s receiver at host %s initialized", receiver.manufacturer, receiver.host
)
async_add_entities(entities)
class DenonDevice(MediaPlayerEntity):
"""Representation of a Denon Media Player Device."""
def __init__(self, receiver, unique_id, config_entry):
"""Initialize the device."""
self._receiver = receiver
self._name = self._receiver.name
self._unique_id = unique_id
self._config_entry = config_entry
self._muted = self._receiver.muted
self._volume = self._receiver.volume
self._current_source = self._receiver.input_func
self._source_list = self._receiver.input_func_list
self._state = self._receiver.state
self._power = self._receiver.power
self._media_image_url = self._receiver.image_url
self._title = self._receiver.title
self._artist = self._receiver.artist
self._album = self._receiver.album
self._band = self._receiver.band
self._frequency = self._receiver.frequency
self._station = self._receiver.station
self._sound_mode_support = self._receiver.support_sound_mode
if self._sound_mode_support:
self._sound_mode = self._receiver.sound_mode
self._sound_mode_raw = self._receiver.sound_mode_raw
self._sound_mode_list = self._receiver.sound_mode_list
else:
self._sound_mode = None
self._sound_mode_raw = None
self._sound_mode_list = None
self._supported_features_base = SUPPORT_DENON
self._supported_features_base |= (
self._sound_mode_support and SUPPORT_SELECT_SOUND_MODE
)
async def async_added_to_hass(self):
"""Register signal handler."""
self.async_on_remove(
async_dispatcher_connect(self.hass, DOMAIN, self.signal_handler)
)
def signal_handler(self, data):
"""Handle domain-specific signal by calling appropriate method."""
entity_ids = data[ATTR_ENTITY_ID]
if entity_ids == ENTITY_MATCH_NONE:
return
if entity_ids == ENTITY_MATCH_ALL or self.entity_id in entity_ids:
params = {
key: value
for key, value in data.items()
if key not in ["entity_id", "method"]
}
getattr(self, data["method"])(**params)
def update(self):
"""Get the latest status information from device."""
self._receiver.update()
self._name = self._receiver.name
self._muted = self._receiver.muted
self._volume = self._receiver.volume
self._current_source = self._receiver.input_func
self._source_list = self._receiver.input_func_list
self._state = self._receiver.state
self._power = self._receiver.power
self._media_image_url = self._receiver.image_url
self._title = self._receiver.title
self._artist = self._receiver.artist
self._album = self._receiver.album
self._band = self._receiver.band
self._frequency = self._receiver.frequency
self._station = self._receiver.station
if self._sound_mode_support:
self._sound_mode = self._receiver.sound_mode
self._sound_mode_raw = self._receiver.sound_mode_raw
@property
def unique_id(self):
"""Return the unique id of the zone."""
return self._unique_id
@property
def device_info(self):
"""Return the device info of the receiver."""
if self._config_entry.data[CONF_SERIAL_NUMBER] is None:
return None
device_info = {
"identifiers": {(DOMAIN, self._config_entry.unique_id)},
"manufacturer": self._config_entry.data[CONF_MANUFACTURER],
"name": self._config_entry.title,
"model": f"{self._config_entry.data[CONF_MODEL]}-{self._config_entry.data[CONF_TYPE]}",
}
if self._config_entry.data[CONF_MAC] is not None:
device_info["connections"] = {
(dr.CONNECTION_NETWORK_MAC, self._config_entry.data[CONF_MAC])
}
return device_info
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def is_volume_muted(self):
"""Return boolean if volume is currently muted."""
return self._muted
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
# Volume is sent in a format like -50.0. Minimum is -80.0,
# maximum is 18.0
return (float(self._volume) + 80) / 100
@property
def source(self):
"""Return the current input source."""
return self._current_source
@property
def source_list(self):
"""Return a list of available input sources."""
return self._source_list
@property
def sound_mode(self):
"""Return the current matched sound mode."""
return self._sound_mode
@property
def sound_mode_list(self):
"""Return a list of available sound modes."""
return self._sound_mode_list
@property
def supported_features(self):
"""Flag media player features that are supported."""
if self._current_source in self._receiver.netaudio_func_list:
return self._supported_features_base | SUPPORT_MEDIA_MODES
return self._supported_features_base
@property
def media_content_id(self):
"""Content ID of current playing media."""
return None
@property
def media_content_type(self):
"""Content type of current playing media."""
if self._state == STATE_PLAYING or self._state == STATE_PAUSED:
return MEDIA_TYPE_MUSIC
return MEDIA_TYPE_CHANNEL
@property
def media_duration(self):
"""Duration of current playing media in seconds."""
return None
@property
def media_image_url(self):
"""Image url of current playing media."""
if self._current_source in self._receiver.playing_func_list:
return self._media_image_url
return None
@property
def media_title(self):
"""Title of current playing media."""
if self._current_source not in self._receiver.playing_func_list:
return self._current_source
if self._title is not None:
return self._title
return self._frequency
@property
def media_artist(self):
"""Artist of current playing media, music track only."""
if self._artist is not None:
return self._artist
return self._band
@property
def media_album_name(self):
"""Album name of current playing media, music track only."""
if self._album is not None:
return self._album
return self._station
@property
def media_album_artist(self):
"""Album artist of current playing media, music track only."""
return None
@property
def media_track(self):
"""Track number of current playing media, music track only."""
return None
@property
def media_series_title(self):
"""Title of series of current playing media, TV show only."""
return None
@property
def media_season(self):
"""Season of current playing media, TV show only."""
return None
@property
def media_episode(self):
"""Episode of current playing media, TV show only."""
return None
@property
def device_state_attributes(self):
"""Return device specific state attributes."""
if (
self._sound_mode_raw is not None
and self._sound_mode_support
and self._power == "ON"
):
return {ATTR_SOUND_MODE_RAW: self._sound_mode_raw}
return {}
def media_play_pause(self):
"""Play or pause the media player."""
return self._receiver.toggle_play_pause()
def media_play(self):
"""Send play command."""
return self._receiver.play()
def media_pause(self):
"""Send pause command."""
return self._receiver.pause()
def media_previous_track(self):
"""Send previous track command."""
return self._receiver.previous_track()
def media_next_track(self):
"""Send next track command."""
return self._receiver.next_track()
def select_source(self, source):
"""Select input source."""
return self._receiver.set_input_func(source)
def select_sound_mode(self, sound_mode):
"""Select sound mode."""
return self._receiver.set_sound_mode(sound_mode)
def turn_on(self):
"""Turn on media player."""
if self._receiver.power_on():
self._state = STATE_ON
def turn_off(self):
"""Turn off media player."""
if self._receiver.power_off():
self._state = STATE_OFF
def volume_up(self):
"""Volume up the media player."""
return self._receiver.volume_up()
def volume_down(self):
"""Volume down media player."""
return self._receiver.volume_down()
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
# Volume has to be sent in a format like -50.0. Minimum is -80.0,
# maximum is 18.0
volume_denon = float((volume * 100) - 80)
if volume_denon > 18:
volume_denon = float(18)
try:
if self._receiver.set_volume(volume_denon):
self._volume = volume_denon
except ValueError:
pass
def mute_volume(self, mute):
"""Send mute command."""
return self._receiver.mute(mute)
def get_command(self, command, **kwargs):
"""Send generic command."""
self._receiver.send_get_command(command)
|
import asyncio
import logging
from typing import Any, Dict
from aiohttp import ClientResponseError
from homeassistant.components.water_heater import (
DOMAIN as WATER_HEATER_DOMAIN,
WaterHeaterEntity,
)
from homeassistant.const import TEMP_CELSIUS
from homeassistant.helpers.dispatcher import async_dispatcher_send
from . import DOMAIN, IncomfortEntity
_LOGGER = logging.getLogger(__name__)
HEATER_ATTRS = ["display_code", "display_text", "is_burning"]
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up an InComfort/Intouch water_heater device."""
if discovery_info is None:
return
client = hass.data[DOMAIN]["client"]
heaters = hass.data[DOMAIN]["heaters"]
async_add_entities([IncomfortWaterHeater(client, h) for h in heaters])
class IncomfortWaterHeater(IncomfortEntity, WaterHeaterEntity):
"""Representation of an InComfort/Intouch water_heater device."""
def __init__(self, client, heater) -> None:
"""Initialize the water_heater device."""
super().__init__()
self._unique_id = f"{heater.serial_no}"
self.entity_id = f"{WATER_HEATER_DOMAIN}.{DOMAIN}"
self._name = "Boiler"
self._client = client
self._heater = heater
@property
def icon(self) -> str:
"""Return the icon of the water_heater device."""
return "mdi:thermometer-lines"
@property
def device_state_attributes(self) -> Dict[str, Any]:
"""Return the device state attributes."""
return {k: v for k, v in self._heater.status.items() if k in HEATER_ATTRS}
@property
def current_temperature(self) -> float:
"""Return the current temperature."""
if self._heater.is_tapping:
return self._heater.tap_temp
if self._heater.is_pumping:
return self._heater.heater_temp
return max(self._heater.heater_temp, self._heater.tap_temp)
@property
def min_temp(self) -> float:
"""Return max valid temperature that can be set."""
return 80.0
@property
def max_temp(self) -> float:
"""Return max valid temperature that can be set."""
return 30.0
@property
def temperature_unit(self) -> str:
"""Return the unit of measurement."""
return TEMP_CELSIUS
@property
def supported_features(self) -> int:
"""Return the list of supported features."""
return 0
@property
def current_operation(self) -> str:
"""Return the current operation mode."""
if self._heater.is_failed:
return f"Fault code: {self._heater.fault_code}"
return self._heater.display_text
async def async_update(self) -> None:
"""Get the latest state data from the gateway."""
try:
await self._heater.update()
except (ClientResponseError, asyncio.TimeoutError) as err:
_LOGGER.warning("Update failed, message is: %s", err)
else:
async_dispatcher_send(self.hass, DOMAIN)
|
import logging
from pyhomeworks.pyhomeworks import HW_BUTTON_PRESSED, HW_BUTTON_RELEASED, Homeworks
import voluptuous as vol
from homeassistant.const import (
CONF_HOST,
CONF_ID,
CONF_NAME,
CONF_PORT,
EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import load_platform
from homeassistant.helpers.dispatcher import async_dispatcher_connect, dispatcher_send
from homeassistant.util import slugify
_LOGGER = logging.getLogger(__name__)
DOMAIN = "homeworks"
HOMEWORKS_CONTROLLER = "homeworks"
EVENT_BUTTON_PRESS = "homeworks_button_press"
EVENT_BUTTON_RELEASE = "homeworks_button_release"
CONF_DIMMERS = "dimmers"
CONF_KEYPADS = "keypads"
CONF_ADDR = "addr"
CONF_RATE = "rate"
FADE_RATE = 1.0
CV_FADE_RATE = vol.All(vol.Coerce(float), vol.Range(min=0, max=20))
DIMMER_SCHEMA = vol.Schema(
{
vol.Required(CONF_ADDR): cv.string,
vol.Required(CONF_NAME): cv.string,
vol.Optional(CONF_RATE, default=FADE_RATE): CV_FADE_RATE,
}
)
KEYPAD_SCHEMA = vol.Schema(
{vol.Required(CONF_ADDR): cv.string, vol.Required(CONF_NAME): cv.string}
)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PORT): cv.port,
vol.Required(CONF_DIMMERS): vol.All(cv.ensure_list, [DIMMER_SCHEMA]),
vol.Optional(CONF_KEYPADS, default=[]): vol.All(
cv.ensure_list, [KEYPAD_SCHEMA]
),
}
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, base_config):
"""Start Homeworks controller."""
def hw_callback(msg_type, values):
"""Dispatch state changes."""
_LOGGER.debug("callback: %s, %s", msg_type, values)
addr = values[0]
signal = f"homeworks_entity_{addr}"
dispatcher_send(hass, signal, msg_type, values)
config = base_config.get(DOMAIN)
controller = Homeworks(config[CONF_HOST], config[CONF_PORT], hw_callback)
hass.data[HOMEWORKS_CONTROLLER] = controller
def cleanup(event):
controller.close()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, cleanup)
dimmers = config[CONF_DIMMERS]
load_platform(hass, "light", DOMAIN, {CONF_DIMMERS: dimmers}, base_config)
for key_config in config[CONF_KEYPADS]:
addr = key_config[CONF_ADDR]
name = key_config[CONF_NAME]
HomeworksKeypadEvent(hass, addr, name)
return True
class HomeworksDevice:
"""Base class of a Homeworks device."""
def __init__(self, controller, addr, name):
"""Initialize Homeworks device."""
self._addr = addr
self._name = name
self._controller = controller
@property
def unique_id(self):
"""Return a unique identifier."""
return f"homeworks.{self._addr}"
@property
def name(self):
"""Device name."""
return self._name
@property
def should_poll(self):
"""No need to poll."""
return False
class HomeworksKeypadEvent:
"""When you want signals instead of entities.
Stateless sensors such as keypads are expected to generate an event
instead of a sensor entity in hass.
"""
def __init__(self, hass, addr, name):
"""Register callback that will be used for signals."""
self._hass = hass
self._addr = addr
self._name = name
self._id = slugify(self._name)
signal = f"homeworks_entity_{self._addr}"
async_dispatcher_connect(self._hass, signal, self._update_callback)
@callback
def _update_callback(self, msg_type, values):
"""Fire events if button is pressed or released."""
if msg_type == HW_BUTTON_PRESSED:
event = EVENT_BUTTON_PRESS
elif msg_type == HW_BUTTON_RELEASED:
event = EVENT_BUTTON_RELEASE
else:
return
data = {CONF_ID: self._id, CONF_NAME: self._name, "button": values[1]}
self._hass.bus.async_fire(event, data)
|
import contextlib
import io
import threading
import warnings
from typing import Any, Dict, cast
from ..core import utils
from ..core.options import OPTIONS
from .locks import acquire
from .lru_cache import LRUCache
# Global cache for storing open files.
FILE_CACHE: LRUCache[str, io.IOBase] = LRUCache(
maxsize=cast(int, OPTIONS["file_cache_maxsize"]), on_evict=lambda k, v: v.close()
)
assert FILE_CACHE.maxsize, "file cache must be at least size one"
REF_COUNTS: Dict[Any, int] = {}
_DEFAULT_MODE = utils.ReprObject("<unused>")
class FileManager:
"""Manager for acquiring and closing a file object.
Use FileManager subclasses (CachingFileManager in particular) on backend
storage classes to automatically handle issues related to keeping track of
many open files and transferring them between multiple processes.
"""
def acquire(self, needs_lock=True):
"""Acquire the file object from this manager."""
raise NotImplementedError()
def acquire_context(self, needs_lock=True):
"""Context manager for acquiring a file. Yields a file object.
The context manager unwinds any actions taken as part of acquisition
(i.e., removes it from any cache) if an exception is raised from the
context. It *does not* automatically close the file.
"""
raise NotImplementedError()
def close(self, needs_lock=True):
"""Close the file object associated with this manager, if needed."""
raise NotImplementedError()
class CachingFileManager(FileManager):
"""Wrapper for automatically opening and closing file objects.
Unlike files, CachingFileManager objects can be safely pickled and passed
between processes. They should be explicitly closed to release resources,
but a per-process least-recently-used cache for open files ensures that you
can safely create arbitrarily large numbers of FileManager objects.
Don't directly close files acquired from a FileManager. Instead, call
FileManager.close(), which ensures that closed files are removed from the
cache as well.
Example usage:
manager = FileManager(open, 'example.txt', mode='w')
f = manager.acquire()
f.write(...)
manager.close() # ensures file is closed
Note that as long as previous files are still cached, acquiring a file
multiple times from the same FileManager is essentially free:
f1 = manager.acquire()
f2 = manager.acquire()
assert f1 is f2
"""
def __init__(
self,
opener,
*args,
mode=_DEFAULT_MODE,
kwargs=None,
lock=None,
cache=None,
ref_counts=None,
):
"""Initialize a FileManager.
The cache and ref_counts arguments exist solely to facilitate
dependency injection, and should only be set for tests.
Parameters
----------
opener : callable
Function that when called like ``opener(*args, **kwargs)`` returns
an open file object. The file object must implement a ``close()``
method.
*args
Positional arguments for opener. A ``mode`` argument should be
provided as a keyword argument (see below). All arguments must be
hashable.
mode : optional
If provided, passed as a keyword argument to ``opener`` along with
``**kwargs``. ``mode='w' `` has special treatment: after the first
call it is replaced by ``mode='a'`` in all subsequent function to
avoid overriding the newly created file.
kwargs : dict, optional
Keyword arguments for opener, excluding ``mode``. All values must
be hashable.
lock : duck-compatible threading.Lock, optional
Lock to use when modifying the cache inside acquire() and close().
By default, uses a new threading.Lock() object. If set, this object
should be pickleable.
cache : MutableMapping, optional
Mapping to use as a cache for open files. By default, uses xarray's
global LRU file cache. Because ``cache`` typically points to a
global variable and contains non-picklable file objects, an
unpickled FileManager objects will be restored with the default
cache.
ref_counts : dict, optional
Optional dict to use for keeping track the number of references to
the same file.
"""
self._opener = opener
self._args = args
self._mode = mode
self._kwargs = {} if kwargs is None else dict(kwargs)
self._default_lock = lock is None or lock is False
self._lock = threading.Lock() if self._default_lock else lock
# cache[self._key] stores the file associated with this object.
if cache is None:
cache = FILE_CACHE
self._cache = cache
self._key = self._make_key()
# ref_counts[self._key] stores the number of CachingFileManager objects
# in memory referencing this same file. We use this to know if we can
# close a file when the manager is deallocated.
if ref_counts is None:
ref_counts = REF_COUNTS
self._ref_counter = _RefCounter(ref_counts)
self._ref_counter.increment(self._key)
def _make_key(self):
"""Make a key for caching files in the LRU cache."""
value = (
self._opener,
self._args,
"a" if self._mode == "w" else self._mode,
tuple(sorted(self._kwargs.items())),
)
return _HashedSequence(value)
@contextlib.contextmanager
def _optional_lock(self, needs_lock):
"""Context manager for optionally acquiring a lock."""
if needs_lock:
with self._lock:
yield
else:
yield
def acquire(self, needs_lock=True):
"""Acquire a file object from the manager.
A new file is only opened if it has expired from the
least-recently-used cache.
This method uses a lock, which ensures that it is thread-safe. You can
safely acquire a file in multiple threads at the same time, as long as
the underlying file object is thread-safe.
Returns
-------
file-like
An open file object, as returned by ``opener(*args, **kwargs)``.
"""
file, _ = self._acquire_with_cache_info(needs_lock)
return file
@contextlib.contextmanager
def acquire_context(self, needs_lock=True):
"""Context manager for acquiring a file."""
file, cached = self._acquire_with_cache_info(needs_lock)
try:
yield file
except Exception:
if not cached:
self.close(needs_lock)
raise
def _acquire_with_cache_info(self, needs_lock=True):
"""Acquire a file, returning the file and whether it was cached."""
with self._optional_lock(needs_lock):
try:
file = self._cache[self._key]
except KeyError:
kwargs = self._kwargs
if self._mode is not _DEFAULT_MODE:
kwargs = kwargs.copy()
kwargs["mode"] = self._mode
file = self._opener(*self._args, **kwargs)
if self._mode == "w":
# ensure file doesn't get overriden when opened again
self._mode = "a"
self._cache[self._key] = file
return file, False
else:
return file, True
def close(self, needs_lock=True):
"""Explicitly close any associated file object (if necessary)."""
# TODO: remove needs_lock if/when we have a reentrant lock in
# dask.distributed: https://github.com/dask/dask/issues/3832
with self._optional_lock(needs_lock):
default = None
file = self._cache.pop(self._key, default)
if file is not None:
file.close()
def __del__(self):
# If we're the only CachingFileManger referencing a unclosed file, we
# should remove it from the cache upon garbage collection.
#
# Keeping our own count of file references might seem like overkill,
# but it's actually pretty common to reopen files with the same
# variable name in a notebook or command line environment, e.g., to
# fix the parameters used when opening a file:
# >>> ds = xarray.open_dataset('myfile.nc')
# >>> ds = xarray.open_dataset('myfile.nc', decode_times=False)
# This second assignment to "ds" drops CPython's ref-count on the first
# "ds" argument to zero, which can trigger garbage collections. So if
# we didn't check whether another object is referencing 'myfile.nc',
# the newly opened file would actually be immediately closed!
ref_count = self._ref_counter.decrement(self._key)
if not ref_count and self._key in self._cache:
if acquire(self._lock, blocking=False):
# Only close files if we can do so immediately.
try:
self.close(needs_lock=False)
finally:
self._lock.release()
if OPTIONS["warn_for_unclosed_files"]:
warnings.warn(
"deallocating {}, but file is not already closed. "
"This may indicate a bug.".format(self),
RuntimeWarning,
stacklevel=2,
)
def __getstate__(self):
"""State for pickling."""
# cache and ref_counts are intentionally omitted: we don't want to try
# to serialize these global objects.
lock = None if self._default_lock else self._lock
return (self._opener, self._args, self._mode, self._kwargs, lock)
def __setstate__(self, state):
"""Restore from a pickle."""
opener, args, mode, kwargs, lock = state
self.__init__(opener, *args, mode=mode, kwargs=kwargs, lock=lock)
def __repr__(self):
args_string = ", ".join(map(repr, self._args))
if self._mode is not _DEFAULT_MODE:
args_string += f", mode={self._mode!r}"
return "{}({!r}, {}, kwargs={})".format(
type(self).__name__, self._opener, args_string, self._kwargs
)
class _RefCounter:
"""Class for keeping track of reference counts."""
def __init__(self, counts):
self._counts = counts
self._lock = threading.Lock()
def increment(self, name):
with self._lock:
count = self._counts[name] = self._counts.get(name, 0) + 1
return count
def decrement(self, name):
with self._lock:
count = self._counts[name] - 1
if count:
self._counts[name] = count
else:
del self._counts[name]
return count
class _HashedSequence(list):
"""Speedup repeated look-ups by caching hash values.
Based on what Python uses internally in functools.lru_cache.
Python doesn't perform this optimization automatically:
https://bugs.python.org/issue1462796
"""
def __init__(self, tuple_value):
self[:] = tuple_value
self.hashvalue = hash(tuple_value)
def __hash__(self):
return self.hashvalue
class DummyFileManager(FileManager):
"""FileManager that simply wraps an open file in the FileManager interface."""
def __init__(self, value):
self._value = value
def acquire(self, needs_lock=True):
del needs_lock # ignored
return self._value
@contextlib.contextmanager
def acquire_context(self, needs_lock=True):
del needs_lock
yield self._value
def close(self, needs_lock=True):
del needs_lock # ignored
self._value.close()
|
from __future__ import unicode_literals
import os
import time
import string
from lib.fun.fun import cool
from collections import Counter
from lib.fun.decorator import magic
from lib.data.data import pystrs, pyoptions
def counter_operator(original_file_path, justsave, justview, vs_count=pyoptions.default_vs_items):
items = Counter(open(original_file_path, 'r').read().replace(string.punctuation, "").
split(pyoptions.counter_split)).most_common(vs_count)
items_length = len(items)
if vs_count > pyoptions.vs_counter_switcher:
exit(pyoptions.CRLF + cool.fuchsia("[!] view items should Leq {0}".format(pyoptions.vs_counter_switcher)))
elif items_length < vs_count:
exit(pyoptions.CRLF + cool.fuchsia("[!] max items is {0}".format(items_length)))
if justsave:
@magic
def counter():
for _ in items:
yield _[0]
elif justview:
print(pyoptions.CRLF * 2)
for item in items:
print("{0}Word:{2:20} -> {1:10} times".format(" "*5, cool.orange(item[1]), cool.orange(item[0])))
print("[+] Cost:{} seconds".format(cool.orange(str(time.time() - pystrs.startime)[:6])))
else:
print(pyoptions.CRLF * 2)
for item in items:
print("{0}Word:{2:20} -> {1:10} times".format(" "*5, cool.orange(item[1]), cool.orange(item[0])))
@magic
def counter():
for _ in items:
yield _[0]
def counter_magic(*args):
"""['v','s','vs'] [file] [view_num]"""
args = list(args[0])
# counter lack file argument
if len(args) == 2 and args[1] in pystrs.counter_cmd_range:
exit(pyoptions.CRLF + cool.red("[-] {0} need specify the file path".format(pyoptions.tool_range[2])))
# counter
elif len(args) >= 3:
if args[1] not in pystrs.counter_cmd_range:
exit(pyoptions.CRLF + cool.red("[-] Need {0}'s options, choose from '{1}' or '{2}' or '{3}'".format(
args[0], pystrs.counter_cmd_range[0], pystrs.counter_cmd_range[1], pystrs.counter_cmd_range[2])))
if os.path.isfile(args[2]):
# counter s file
if len(args) == 3 and args[1] == pystrs.just_save_counter:
counter_operator(args[2], True, False)
# counter v file
elif len(args) == 3 and args[1] == pystrs.just_view_counter:
counter_operator(args[2], False, True)
# counter vs file
elif len(args) == 3 and args[1] == pystrs.save_and_view:
counter_operator(args[2], False, False)
# counter v file 100
elif len(args) == 4 and args[1] == pystrs.just_view_counter and str(args[3]).isdigit():
counter_operator(args[2], False, True, vs_count=int(args[3]))
# counter s file 100
elif len(args) == 4 and args[1] == pystrs.just_save_counter and str(args[3]).isdigit():
counter_operator(args[2], True, False, vs_count=int(args[3]))
# counter vs file 100
elif len(args) == 4 and args[1] == pystrs.save_and_view and str(args[3]).isdigit():
counter_operator(args[2], False, False, vs_count=int(args[3]))
else:
exit(pyoptions.CRLF + cool.red("[-] Some unexpected input"))
else:
exit(pyoptions.CRLF + cool.red("[-] File: %s not exists" % args[2]))
else:
exit(pyoptions.CRLF + cool.fuchsia("[!] Usage: {} {}".format(args[0], pyoptions.tools_info.get(args[0]))))
|
import pkgutil
from io import StringIO
import pandas as pd
from scipy.stats import rankdata
class BackgroundFrequencyDataFramePreparer(object):
@staticmethod
def prep_background_frequency(df):
df['rank'] = rankdata(df.background, method='dense')
df['background'] = df['rank'] / df['rank'].max()
return df[['background']]
class BackgroundFrequenciesFromCorpus(BackgroundFrequencyDataFramePreparer):
def __init__(self, corpus, exclude_categories=[]):
self.background_df = (pd.DataFrame(corpus.remove_categories(exclude_categories)
.get_term_freq_df().sum(axis=1))
.rename(columns={0: 'background'}))
def get_background_frequency_df(self):
return self.background_df
def get_background_rank_df(self):
return self.prep_background_frequency(self.get_background_frequency_df())
class BackgroundFrequencies(BackgroundFrequencyDataFramePreparer):
@staticmethod
def get_background_frequency_df(frequency_path=None):
raise Exception
@classmethod
def get_background_rank_df(cls, frequency_path=None):
return cls.prep_background_frequency(
cls.get_background_frequency_df(frequency_path)
)
class DefaultBackgroundFrequencies(BackgroundFrequencies):
@staticmethod
def get_background_frequency_df(frequency_path=None):
if frequency_path:
unigram_freq_table_buf = open(frequency_path)
else:
unigram_freq_table_buf = StringIO(pkgutil.get_data('scattertext', 'data/count_1w.txt')
.decode('utf-8'))
to_ret = (pd.read_csv(unigram_freq_table_buf,
sep='\t',
names=['word', 'background'])
.sort_values(ascending=False, by='background')
.drop_duplicates(['word'])
.set_index('word'))
return to_ret
|
from __future__ import print_function
import argparse, os, sys
def rmdir(dirnames, verbose=False):
for dirname in dirnames:
try:
os.rmdir(dirname)
if verbose:
print('Removed directory {!r}'.format(dirname))
except OSError as e:
print('Cannot remove directory {!r}: {}'.format(dirname, e), file=sys.stderr)
# --- main
def main(args):
parser = argparse.ArgumentParser(description=__doc__, epilog='Use "rm -r" to remove non-empty directory tree')
parser.add_argument('dir', help='directories to remove', action='store', nargs='+')
parser.add_argument('-v', '--verbose', help='display info for each processed directory', action='store_true')
ns = parser.parse_args(args)
rmdir(ns.dir, ns.verbose)
if __name__ == "__main__":
main(sys.argv[1:])
|
from collections import OrderedDict
import logging
import queue
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_PAYLOAD,
CONF_PORT,
CONF_PROTOCOL,
CONF_USERNAME,
)
from .const import (
ATTR_PAYLOAD,
ATTR_QOS,
ATTR_RETAIN,
ATTR_TOPIC,
CONF_BIRTH_MESSAGE,
CONF_BROKER,
CONF_DISCOVERY,
CONF_WILL_MESSAGE,
DATA_MQTT_CONFIG,
DEFAULT_BIRTH,
DEFAULT_DISCOVERY,
DEFAULT_WILL,
)
from .util import MQTT_WILL_BIRTH_SCHEMA
_LOGGER = logging.getLogger(__name__)
@config_entries.HANDLERS.register("mqtt")
class FlowHandler(config_entries.ConfigFlow):
"""Handle a config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_PUSH
_hassio_discovery = None
@staticmethod
def async_get_options_flow(config_entry):
"""Get the options flow for this handler."""
return MQTTOptionsFlowHandler(config_entry)
async def async_step_user(self, user_input=None):
"""Handle a flow initialized by the user."""
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
return await self.async_step_broker()
async def async_step_broker(self, user_input=None):
"""Confirm the setup."""
errors = {}
if user_input is not None:
can_connect = await self.hass.async_add_executor_job(
try_connection,
user_input[CONF_BROKER],
user_input[CONF_PORT],
user_input.get(CONF_USERNAME),
user_input.get(CONF_PASSWORD),
)
if can_connect:
return self.async_create_entry(
title=user_input[CONF_BROKER], data=user_input
)
errors["base"] = "cannot_connect"
fields = OrderedDict()
fields[vol.Required(CONF_BROKER)] = str
fields[vol.Required(CONF_PORT, default=1883)] = vol.Coerce(int)
fields[vol.Optional(CONF_USERNAME)] = str
fields[vol.Optional(CONF_PASSWORD)] = str
fields[vol.Optional(CONF_DISCOVERY, default=DEFAULT_DISCOVERY)] = bool
return self.async_show_form(
step_id="broker", data_schema=vol.Schema(fields), errors=errors
)
async def async_step_import(self, user_input):
"""Import a config entry.
Special type of import, we're not actually going to store any data.
Instead, we're going to rely on the values that are in config file.
"""
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
return self.async_create_entry(title="configuration.yaml", data={})
async def async_step_hassio(self, discovery_info):
"""Receive a Hass.io discovery."""
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
self._hassio_discovery = discovery_info
return await self.async_step_hassio_confirm()
async def async_step_hassio_confirm(self, user_input=None):
"""Confirm a Hass.io discovery."""
errors = {}
if user_input is not None:
data = self._hassio_discovery
can_connect = await self.hass.async_add_executor_job(
try_connection,
data[CONF_HOST],
data[CONF_PORT],
data.get(CONF_USERNAME),
data.get(CONF_PASSWORD),
data.get(CONF_PROTOCOL),
)
if can_connect:
return self.async_create_entry(
title=data["addon"],
data={
CONF_BROKER: data[CONF_HOST],
CONF_PORT: data[CONF_PORT],
CONF_USERNAME: data.get(CONF_USERNAME),
CONF_PASSWORD: data.get(CONF_PASSWORD),
CONF_PROTOCOL: data.get(CONF_PROTOCOL),
CONF_DISCOVERY: user_input[CONF_DISCOVERY],
},
)
errors["base"] = "cannot_connect"
return self.async_show_form(
step_id="hassio_confirm",
description_placeholders={"addon": self._hassio_discovery["addon"]},
data_schema=vol.Schema(
{vol.Optional(CONF_DISCOVERY, default=DEFAULT_DISCOVERY): bool}
),
errors=errors,
)
class MQTTOptionsFlowHandler(config_entries.OptionsFlow):
"""Handle MQTT options."""
def __init__(self, config_entry):
"""Initialize MQTT options flow."""
self.config_entry = config_entry
self.broker_config = {}
self.options = dict(config_entry.options)
async def async_step_init(self, user_input=None):
"""Manage the MQTT options."""
return await self.async_step_broker()
async def async_step_broker(self, user_input=None):
"""Manage the MQTT options."""
errors = {}
current_config = self.config_entry.data
yaml_config = self.hass.data.get(DATA_MQTT_CONFIG, {})
if user_input is not None:
can_connect = await self.hass.async_add_executor_job(
try_connection,
user_input[CONF_BROKER],
user_input[CONF_PORT],
user_input.get(CONF_USERNAME),
user_input.get(CONF_PASSWORD),
)
if can_connect:
self.broker_config.update(user_input)
return await self.async_step_options()
errors["base"] = "cannot_connect"
fields = OrderedDict()
current_broker = current_config.get(CONF_BROKER, yaml_config.get(CONF_BROKER))
current_port = current_config.get(CONF_PORT, yaml_config.get(CONF_PORT))
current_user = current_config.get(CONF_USERNAME, yaml_config.get(CONF_USERNAME))
current_pass = current_config.get(CONF_PASSWORD, yaml_config.get(CONF_PASSWORD))
fields[vol.Required(CONF_BROKER, default=current_broker)] = str
fields[vol.Required(CONF_PORT, default=current_port)] = vol.Coerce(int)
fields[
vol.Optional(
CONF_USERNAME,
description={"suggested_value": current_user},
)
] = str
fields[
vol.Optional(
CONF_PASSWORD,
description={"suggested_value": current_pass},
)
] = str
return self.async_show_form(
step_id="broker",
data_schema=vol.Schema(fields),
errors=errors,
)
async def async_step_options(self, user_input=None):
"""Manage the MQTT options."""
errors = {}
current_config = self.config_entry.data
yaml_config = self.hass.data.get(DATA_MQTT_CONFIG, {})
options_config = {}
if user_input is not None:
bad_birth = False
bad_will = False
if "birth_topic" in user_input:
birth_message = {
ATTR_TOPIC: user_input["birth_topic"],
ATTR_PAYLOAD: user_input.get("birth_payload", ""),
ATTR_QOS: user_input["birth_qos"],
ATTR_RETAIN: user_input["birth_retain"],
}
try:
birth_message = MQTT_WILL_BIRTH_SCHEMA(birth_message)
options_config[CONF_BIRTH_MESSAGE] = birth_message
except vol.Invalid:
errors["base"] = "bad_birth"
bad_birth = True
if not user_input["birth_enable"]:
options_config[CONF_BIRTH_MESSAGE] = {}
if "will_topic" in user_input:
will_message = {
ATTR_TOPIC: user_input["will_topic"],
ATTR_PAYLOAD: user_input.get("will_payload", ""),
ATTR_QOS: user_input["will_qos"],
ATTR_RETAIN: user_input["will_retain"],
}
try:
will_message = MQTT_WILL_BIRTH_SCHEMA(will_message)
options_config[CONF_WILL_MESSAGE] = will_message
except vol.Invalid:
errors["base"] = "bad_will"
bad_will = True
if not user_input["will_enable"]:
options_config[CONF_WILL_MESSAGE] = {}
options_config[CONF_DISCOVERY] = user_input[CONF_DISCOVERY]
if not bad_birth and not bad_will:
updated_config = {}
updated_config.update(self.broker_config)
updated_config.update(options_config)
self.hass.config_entries.async_update_entry(
self.config_entry, data=updated_config
)
return self.async_create_entry(title="", data=None)
birth = {
**DEFAULT_BIRTH,
**current_config.get(
CONF_BIRTH_MESSAGE, yaml_config.get(CONF_BIRTH_MESSAGE, {})
),
}
will = {
**DEFAULT_WILL,
**current_config.get(
CONF_WILL_MESSAGE, yaml_config.get(CONF_WILL_MESSAGE, {})
),
}
discovery = current_config.get(
CONF_DISCOVERY, yaml_config.get(CONF_DISCOVERY, DEFAULT_DISCOVERY)
)
fields = OrderedDict()
fields[vol.Optional(CONF_DISCOVERY, default=discovery)] = bool
# Birth message is disabled if CONF_BIRTH_MESSAGE = {}
fields[
vol.Optional(
"birth_enable",
default=CONF_BIRTH_MESSAGE not in current_config
or current_config[CONF_BIRTH_MESSAGE] != {},
)
] = bool
fields[
vol.Optional(
"birth_topic", description={"suggested_value": birth[ATTR_TOPIC]}
)
] = str
fields[
vol.Optional(
"birth_payload", description={"suggested_value": birth[CONF_PAYLOAD]}
)
] = str
fields[vol.Optional("birth_qos", default=birth[ATTR_QOS])] = vol.In([0, 1, 2])
fields[vol.Optional("birth_retain", default=birth[ATTR_RETAIN])] = bool
# Will message is disabled if CONF_WILL_MESSAGE = {}
fields[
vol.Optional(
"will_enable",
default=CONF_WILL_MESSAGE not in current_config
or current_config[CONF_WILL_MESSAGE] != {},
)
] = bool
fields[
vol.Optional(
"will_topic", description={"suggested_value": will[ATTR_TOPIC]}
)
] = str
fields[
vol.Optional(
"will_payload", description={"suggested_value": will[CONF_PAYLOAD]}
)
] = str
fields[vol.Optional("will_qos", default=will[ATTR_QOS])] = vol.In([0, 1, 2])
fields[vol.Optional("will_retain", default=will[ATTR_RETAIN])] = bool
return self.async_show_form(
step_id="options",
data_schema=vol.Schema(fields),
errors=errors,
)
def try_connection(broker, port, username, password, protocol="3.1"):
"""Test if we can connect to an MQTT broker."""
# pylint: disable=import-outside-toplevel
import paho.mqtt.client as mqtt
if protocol == "3.1":
proto = mqtt.MQTTv31
else:
proto = mqtt.MQTTv311
client = mqtt.Client(protocol=proto)
if username and password:
client.username_pw_set(username, password)
result = queue.Queue(maxsize=1)
def on_connect(client_, userdata, flags, result_code):
"""Handle connection result."""
result.put(result_code == mqtt.CONNACK_ACCEPTED)
client.on_connect = on_connect
client.connect_async(broker, port)
client.loop_start()
try:
return result.get(timeout=5)
except queue.Empty:
return False
finally:
client.disconnect()
client.loop_stop()
|
import os
import html
import netrc
from typing import Callable, Mapping
from PyQt5.QtCore import QUrl
from qutebrowser.config import config
from qutebrowser.utils import usertypes, message, log, objreg, jinja, utils
from qutebrowser.mainwindow import mainwindow
class CallSuper(Exception):
"""Raised when the caller should call the superclass instead."""
def custom_headers(url):
"""Get the combined custom headers."""
headers = {}
dnt_config = config.instance.get('content.headers.do_not_track', url=url)
if dnt_config is not None:
dnt = b'1' if dnt_config else b'0'
headers[b'DNT'] = dnt
conf_headers = config.instance.get('content.headers.custom', url=url)
for header, value in conf_headers.items():
headers[header.encode('ascii')] = value.encode('ascii')
accept_language = config.instance.get('content.headers.accept_language',
url=url)
if accept_language is not None:
headers[b'Accept-Language'] = accept_language.encode('ascii')
return sorted(headers.items())
def authentication_required(url, authenticator, abort_on):
"""Ask a prompt for an authentication question."""
realm = authenticator.realm()
if realm:
msg = '<b>{}</b> says:<br/>{}'.format(
html.escape(url.toDisplayString()), html.escape(realm))
else:
msg = '<b>{}</b> needs authentication'.format(
html.escape(url.toDisplayString()))
urlstr = url.toString(QUrl.RemovePassword | QUrl.FullyEncoded)
answer = message.ask(title="Authentication required", text=msg,
mode=usertypes.PromptMode.user_pwd,
abort_on=abort_on, url=urlstr)
if answer is not None:
authenticator.setUser(answer.user)
authenticator.setPassword(answer.password)
return answer
def javascript_confirm(url, js_msg, abort_on):
"""Display a javascript confirm prompt."""
log.js.debug("confirm: {}".format(js_msg))
if config.val.content.javascript.modal_dialog:
raise CallSuper
msg = 'From <b>{}</b>:<br/>{}'.format(html.escape(url.toDisplayString()),
js_msg)
urlstr = url.toString(QUrl.RemovePassword | QUrl.FullyEncoded)
ans = message.ask('Javascript confirm', msg,
mode=usertypes.PromptMode.yesno,
abort_on=abort_on, url=urlstr)
return bool(ans)
def javascript_prompt(url, js_msg, default, abort_on):
"""Display a javascript prompt."""
log.js.debug("prompt: {}".format(js_msg))
if config.val.content.javascript.modal_dialog:
raise CallSuper
if not config.val.content.javascript.prompt:
return (False, "")
msg = '<b>{}</b> asks:<br/>{}'.format(html.escape(url.toDisplayString()),
js_msg)
urlstr = url.toString(QUrl.RemovePassword | QUrl.FullyEncoded)
answer = message.ask('Javascript prompt', msg,
mode=usertypes.PromptMode.text,
default=default,
abort_on=abort_on, url=urlstr)
if answer is None:
return (False, "")
else:
return (True, answer)
def javascript_alert(url, js_msg, abort_on):
"""Display a javascript alert."""
log.js.debug("alert: {}".format(js_msg))
if config.val.content.javascript.modal_dialog:
raise CallSuper
if not config.val.content.javascript.alert:
return
msg = 'From <b>{}</b>:<br/>{}'.format(html.escape(url.toDisplayString()),
js_msg)
urlstr = url.toString(QUrl.RemovePassword | QUrl.FullyEncoded)
message.ask('Javascript alert', msg, mode=usertypes.PromptMode.alert,
abort_on=abort_on, url=urlstr)
# Needs to line up with the values allowed for the
# content.javascript.log setting.
_JS_LOGMAP: Mapping[str, Callable[[str], None]] = {
'none': lambda arg: None,
'debug': log.js.debug,
'info': log.js.info,
'warning': log.js.warning,
'error': log.js.error,
}
def javascript_log_message(level, source, line, msg):
"""Display a JavaScript log message."""
logstring = "[{}:{}] {}".format(source, line, msg)
logger = _JS_LOGMAP[config.cache['content.javascript.log'][level.name]]
logger(logstring)
def ignore_certificate_errors(url, errors, abort_on):
"""Display a certificate error question.
Args:
url: The URL the errors happened in
errors: A list of QSslErrors or QWebEngineCertificateErrors
Return:
True if the error should be ignored, False otherwise.
"""
ssl_strict = config.instance.get('content.ssl_strict', url=url)
log.network.debug("Certificate errors {!r}, strict {}".format(
errors, ssl_strict))
for error in errors:
assert error.is_overridable(), repr(error)
if ssl_strict == 'ask':
err_template = jinja.environment.from_string("""
Errors while loading <b>{{url.toDisplayString()}}</b>:<br/>
<ul>
{% for err in errors %}
<li>{{err}}</li>
{% endfor %}
</ul>
""".strip())
msg = err_template.render(url=url, errors=errors)
urlstr = url.toString(QUrl.RemovePassword | QUrl.FullyEncoded)
ignore = message.ask(title="Certificate errors - continue?", text=msg,
mode=usertypes.PromptMode.yesno, default=False,
abort_on=abort_on, url=urlstr)
if ignore is None:
# prompt aborted
ignore = False
return ignore
elif ssl_strict is False:
log.network.debug("ssl_strict is False, only warning about errors")
for err in errors:
# FIXME we might want to use warn here (non-fatal error)
# https://github.com/qutebrowser/qutebrowser/issues/114
message.error('Certificate error: {}'.format(err))
return True
elif ssl_strict is True:
return False
else:
raise ValueError("Invalid ssl_strict value {!r}".format(ssl_strict))
raise utils.Unreachable
def feature_permission(url, option, msg, yes_action, no_action, abort_on,
blocking=False):
"""Handle a feature permission request.
Args:
url: The URL the request was done for.
option: An option name to check.
msg: A string like "show notifications"
yes_action: A callable to call if the request was approved
no_action: A callable to call if the request was denied
abort_on: A list of signals which interrupt the question.
blocking: If True, ask a blocking question.
Return:
The Question object if a question was asked (and blocking=False),
None otherwise.
"""
config_val = config.instance.get(option, url=url)
if config_val == 'ask':
if url.isValid():
urlstr = url.toString(QUrl.RemovePassword | QUrl.FullyEncoded)
text = "Allow the website at <b>{}</b> to {}?".format(
html.escape(url.toDisplayString()), msg)
else:
urlstr = None
option = None # For message.ask/confirm_async
text = "Allow the website to {}?".format(msg)
if blocking:
answer = message.ask(abort_on=abort_on, title='Permission request',
text=text, url=urlstr, option=option,
mode=usertypes.PromptMode.yesno)
if answer:
yes_action()
else:
no_action()
return None
else:
return message.confirm_async(
yes_action=yes_action, no_action=no_action,
cancel_action=no_action, abort_on=abort_on,
title='Permission request', text=text, url=urlstr,
option=option)
elif config_val:
yes_action()
return None
else:
no_action()
return None
def get_tab(win_id, target):
"""Get a tab widget for the given usertypes.ClickTarget.
Args:
win_id: The window ID to open new tabs in
target: A usertypes.ClickTarget
"""
if target == usertypes.ClickTarget.tab:
bg_tab = False
elif target == usertypes.ClickTarget.tab_bg:
bg_tab = True
elif target == usertypes.ClickTarget.window:
tabbed_browser = objreg.get('tabbed-browser', scope='window',
window=win_id)
window = mainwindow.MainWindow(private=tabbed_browser.is_private)
window.show()
win_id = window.win_id
bg_tab = False
else:
raise ValueError("Invalid ClickTarget {}".format(target))
tabbed_browser = objreg.get('tabbed-browser', scope='window',
window=win_id)
return tabbed_browser.tabopen(url=None, background=bg_tab)
def get_user_stylesheet(searching=False):
"""Get the combined user-stylesheet."""
css = ''
stylesheets = config.val.content.user_stylesheets
for filename in stylesheets:
with open(filename, 'r', encoding='utf-8') as f:
css += f.read()
setting = config.val.scrolling.bar
if setting == 'overlay' and not utils.is_mac:
setting = 'when-searching'
if setting == 'never' or setting == 'when-searching' and not searching:
css += '\nhtml > ::-webkit-scrollbar { width: 0px; height: 0px; }'
return css
def netrc_authentication(url, authenticator):
"""Perform authorization using netrc.
Args:
url: The URL the request was done for.
authenticator: QAuthenticator object used to set credentials provided.
Return:
True if netrc found credentials for the URL.
False otherwise.
"""
if 'HOME' not in os.environ:
# We'll get an OSError by netrc if 'HOME' isn't available in
# os.environ. We don't want to log that, so we prevent it
# altogether.
return False
user = None
password = None
authenticators = None
try:
net = netrc.netrc(config.val.content.netrc_file)
if url.port() != -1:
authenticators = net.authenticators(
"{}:{}".format(url.host(), url.port()))
if not authenticators:
authenticators = net.authenticators(url.host())
if authenticators:
user, _account, password = authenticators
except FileNotFoundError:
log.misc.debug("No .netrc file found")
except OSError as e:
log.misc.exception("Unable to read the netrc file: {}".format(e))
except netrc.NetrcParseError as e:
log.misc.exception("Error when parsing the netrc file: {}".format(e))
if user is None:
return False
authenticator.setUser(user)
authenticator.setPassword(password)
return True
|
import mock
from paasta_tools.cli.cmds.pause_service_autoscaler import MAX_PAUSE_DURATION
from paasta_tools.cli.cmds.pause_service_autoscaler import (
paasta_pause_service_autoscaler,
)
@mock.patch("paasta_tools.cli.cmds.pause_service_autoscaler._log_audit", autospec=True)
def test_pause_autoscaler_defaults(mock_log_audit):
args = mock.Mock(cluster="cluster1", duration=30, resume=False, info=False)
with mock.patch(
"paasta_tools.cli.cmds.pause_service_autoscaler.update_service_autoscale_pause_time",
autospec=True,
) as mock_exc:
mock_exc.return_value = 0
return_code = paasta_pause_service_autoscaler(args)
mock_exc.assert_called_once_with("cluster1", 30)
assert return_code == 0
mock_log_audit.assert_called_once_with(
action="pause-service-autoscaler",
action_details={"duration": 30},
cluster="cluster1",
)
@mock.patch("paasta_tools.cli.cmds.pause_service_autoscaler._log_audit", autospec=True)
def test_pause_autoscaler_long(mock_log_audit):
args = mock.Mock(
cluster="cluster1",
duration=MAX_PAUSE_DURATION + 10,
force=False,
resume=False,
info=False,
)
with mock.patch(
"paasta_tools.cli.cmds.pause_service_autoscaler.update_service_autoscale_pause_time",
autospec=True,
):
return_code = paasta_pause_service_autoscaler(args)
assert return_code == 3
assert not mock_log_audit.called
@mock.patch("paasta_tools.cli.cmds.pause_service_autoscaler._log_audit", autospec=True)
def test_pause_autoscaler_resume(mock_log_audit):
args = mock.Mock(
cluster="cluster1", duration=120, force=False, resume=True, info=False
)
with mock.patch(
"paasta_tools.cli.cmds.pause_service_autoscaler.delete_service_autoscale_pause_time",
autospec=True,
) as mock_exc:
mock_exc.return_value = 0
return_code = paasta_pause_service_autoscaler(args)
mock_exc.assert_called_once_with("cluster1")
assert return_code == 0
mock_log_audit.assert_called_once_with(
action="resume-service-autoscaler", cluster="cluster1"
)
@mock.patch("paasta_tools.cli.cmds.pause_service_autoscaler._log_audit", autospec=True)
def test_pause_autoscaler_force(mock_log_audit):
args = mock.Mock(
cluster="cluster1",
duration=MAX_PAUSE_DURATION + 10,
force=True,
resume=False,
info=False,
)
with mock.patch(
"paasta_tools.cli.cmds.pause_service_autoscaler.update_service_autoscale_pause_time",
autospec=True,
) as mock_exc:
mock_exc.return_value = 0
return_code = paasta_pause_service_autoscaler(args)
assert return_code == 0
mock_exc.assert_called_once_with("cluster1", 330)
mock_log_audit.assert_called_once_with(
action="pause-service-autoscaler",
action_details={"duration": 330},
cluster="cluster1",
)
@mock.patch("paasta_tools.cli.cmds.pause_service_autoscaler._log_audit", autospec=True)
def test_pause_autoscaler_info(mock_log_audit):
args = mock.Mock(
cluster="cluster1", duration=30, force=False, resume=False, info=True
)
with mock.patch(
"paasta_tools.cli.cmds.pause_service_autoscaler.get_service_autoscale_pause_time",
autospec=True,
) as mock_exc:
mock_exc.return_value = 0
return_code = paasta_pause_service_autoscaler(args)
mock_exc.assert_called_once_with("cluster1")
assert return_code == 0
assert not mock_log_audit.called
|
from _csv import QUOTE_NONE
import pandas as pd
import numpy as np
import logging
import json
import time
import datetime
import itertools as itt
from pandas.errors import ParserError
from yandextank.common.interfaces import StatsReader
from io import StringIO
logger = logging.getLogger(__name__)
phout_columns = [
'send_ts', 'tag', 'interval_real', 'connect_time', 'send_time', 'latency',
'receive_time', 'interval_event', 'size_out', 'size_in', 'net_code',
'proto_code'
]
dtypes = {
'time': np.float64,
'tag': np.str,
'interval_real': np.int64,
'connect_time': np.int64,
'send_time': np.int64,
'latency': np.int64,
'receive_time': np.int64,
'interval_event': np.int64,
'size_out': np.int64,
'size_in': np.int64,
'net_code': np.int64,
'proto_code': np.int64,
}
def string_to_df(data):
try:
chunk = pd.read_csv(StringIO(data), sep='\t', names=phout_columns, dtype=dtypes, quoting=QUOTE_NONE)
except ParserError as e:
logger.error(e.message)
logger.error('Incorrect phout data: {}'.format(data))
return
chunk['receive_ts'] = chunk.send_ts + chunk.interval_real / 1e6
chunk['receive_sec'] = chunk.receive_ts.astype(np.int64)
# TODO: consider configuration for the following:
chunk['tag'] = chunk.tag.str.rsplit('#', 1, expand=True)[0]
chunk.set_index(['receive_sec'], inplace=True)
return chunk
def string_to_df_microsec(data):
# start_time = time.time()
try:
df = pd.read_csv(StringIO(data), sep='\t', names=phout_columns, na_values='', dtype=dtypes, quoting=QUOTE_NONE)
except ParserError as e:
logger.error(e.message)
logger.error('Incorrect phout data: {}'.format(data))
return
df['ts'] = (df['send_ts'] * 1e6 + df['interval_real']).astype(int)
df['tag'] = df.tag.str.rsplit('#', 1, expand=True)[0]
# logger.debug("Chunk decode time: %.2fms", (time.time() - start_time) * 1000)
return df
class PhantomReader(object):
def __init__(self, fileobj, cache_size=1024 * 1024 * 50, parser=string_to_df):
self.buffer = ""
self.phout = fileobj
self.cache_size = cache_size
self.parser = parser
def __iter__(self):
return self
def __next__(self):
data = self.phout.read(self.cache_size)
if data is None:
raise StopIteration
else:
parts = data.rsplit('\n', 1)
if len(parts) > 1:
chunk = self.buffer + parts[0] + '\n'
self.buffer = parts[1]
return self.parser(chunk)
else:
self.buffer += parts[0]
return None
class PhantomStatsReader(StatsReader):
def __init__(self, filename, phantom_info, get_start_time=lambda: 0, cache_size=1024 * 1024 * 50):
self.phantom_info = phantom_info
self.stat_buffer = ""
self.stat_filename = filename
self.closed = False
self.cache_size = cache_size
self.get_start_time = get_start_time
self._start_time = None
def _decode_stat_data(self, chunk):
"""
Return all items found in this chunk
"""
for date_str, statistics in chunk.items():
date_obj = datetime.datetime.strptime(
date_str.split(".")[0], '%Y-%m-%d %H:%M:%S')
chunk_date = int(time.mktime(date_obj.timetuple()))
instances = 0
for benchmark_name, benchmark in statistics.items():
if not benchmark_name.startswith("benchmark_io"):
continue
for method, meth_obj in benchmark.items():
if "mmtasks" in meth_obj:
instances += meth_obj["mmtasks"][2]
offset = chunk_date - 1 - self.start_time
reqps = 0
if 0 <= offset < len(self.phantom_info.steps):
reqps = self.phantom_info.steps[offset][0]
yield self.stats_item(chunk_date - 1, instances, reqps)
def _read_stat_data(self, stat_file):
chunk = stat_file.read(self.cache_size)
if chunk:
self.stat_buffer += chunk
parts = self.stat_buffer.rsplit('\n},', 1)
if len(parts) > 1:
ready_chunk = parts[0]
self.stat_buffer = parts[1]
return self._format_chunk(ready_chunk)
else:
self.stat_buffer += stat_file.readline()
return None
def _format_chunk(self, chunk):
chunks = [json.loads('{%s}}' % s) for s in chunk.split('\n},')]
return list(itt.chain(*(self._decode_stat_data(chunk) for chunk in chunks)))
@property
def start_time(self):
if self._start_time is None:
self._start_time = int(self.get_start_time())
return 0 if self._start_time is None else self._start_time
def __iter__(self):
"""
Union buffer and chunk, split using '\n},',
return splitted parts
"""
with open(self.stat_filename, 'r') as stat_file:
while not self.closed:
yield self._read_stat_data(stat_file)
# read end:
data = self._read_stat_data(stat_file)
while data:
yield data
data = self._read_stat_data(stat_file)
# buffer is always included
def close(self):
self.closed = True
|
class CallFailure(Exception):
"""Exception raised when a Google Music server responds that a call failed.
Attributes:
callname -- name of the protocol.Call that failed
"""
def __init__(self, message, callname):
Exception.__init__(self, message)
self.callname = callname
def __str__(self):
return "%s: %s" % (self.callname, Exception.__str__(self))
class ParseException(Exception):
"""Thrown by Call.parse_response on errors."""
pass
class ValidationException(Exception):
"""Thrown by Transaction.verify_res_schema on errors."""
pass
class AlreadyLoggedIn(Exception):
pass
class NotLoggedIn(Exception):
pass
class NotSubscribed(Exception):
def __init__(self, *args):
if len(args) >= 1:
args = list(args)
args[0] += " (https://goo.gl/v1wVHT)"
args = tuple(args)
else:
args = ("Subscription required. (https://goo.gl/v1wVHT)",)
self.args = args
class GmusicapiWarning(UserWarning):
pass
class InvalidDeviceId(Exception):
def __init__(self, message, ids):
if ids:
message += 'Your valid device IDs are:\n* %s' % '\n* '.join(ids)
else:
message += 'It looks like your account does not have any '
'valid device IDs.'
super().__init__(message)
self.valid_device_ids = ids
|
import logging
from saltbox import RouterLoginException, RouterNotReachableException, SaltBox
import voluptuous as vol
from homeassistant.components.device_tracker import (
DOMAIN,
PLATFORM_SCHEMA,
DeviceScanner,
)
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
}
)
def get_scanner(hass, config):
"""Return the Salt device scanner."""
scanner = SaltDeviceScanner(config[DOMAIN])
# Test whether the router is accessible.
data = scanner.get_salt_data()
return scanner if data is not None else None
class SaltDeviceScanner(DeviceScanner):
"""This class queries a Salt Fiber Box router."""
def __init__(self, config):
"""Initialize the scanner."""
host = config[CONF_HOST]
username = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
self.saltbox = SaltBox(f"http://{host}", username, password)
self.online_clients = []
def scan_devices(self):
"""Scan for new devices and return a list with found device IDs."""
self._update_info()
return [client["mac"] for client in self.online_clients]
def get_device_name(self, device):
"""Return the name of the given device or None if we don't know."""
for client in self.online_clients:
if client["mac"] == device:
return client["name"]
return None
def get_salt_data(self):
"""Retrieve data from Salt router and return parsed result."""
try:
clients = self.saltbox.get_online_clients()
return clients
except (RouterLoginException, RouterNotReachableException) as error:
_LOGGER.warning(error)
return None
def _update_info(self):
"""Pull the current information from the Salt router."""
_LOGGER.debug("Loading data from Salt Fiber Box")
data = self.get_salt_data()
self.online_clients = data or []
|
import asyncio
from datetime import timedelta
from itertools import chain
import logging
from haffmpeg.camera import CameraMjpeg
from haffmpeg.tools import IMAGE_JPEG, ImageFrame
import requests
from homeassistant.components.camera import Camera
from homeassistant.components.ffmpeg import DATA_FFMPEG
from homeassistant.const import ATTR_ATTRIBUTION
from homeassistant.core import callback
from homeassistant.helpers.aiohttp_client import async_aiohttp_proxy_stream
from homeassistant.util import dt as dt_util
from . import ATTRIBUTION, DOMAIN
from .entity import RingEntityMixin
FORCE_REFRESH_INTERVAL = timedelta(minutes=45)
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up a Ring Door Bell and StickUp Camera."""
devices = hass.data[DOMAIN][config_entry.entry_id]["devices"]
cams = []
for camera in chain(
devices["doorbots"], devices["authorized_doorbots"], devices["stickup_cams"]
):
if not camera.has_subscription:
continue
cams.append(RingCam(config_entry.entry_id, hass.data[DATA_FFMPEG], camera))
async_add_entities(cams)
class RingCam(RingEntityMixin, Camera):
"""An implementation of a Ring Door Bell camera."""
def __init__(self, config_entry_id, ffmpeg, device):
"""Initialize a Ring Door Bell camera."""
super().__init__(config_entry_id, device)
self._name = self._device.name
self._ffmpeg = ffmpeg
self._last_event = None
self._last_video_id = None
self._video_url = None
self._expires_at = dt_util.utcnow() - FORCE_REFRESH_INTERVAL
async def async_added_to_hass(self):
"""Register callbacks."""
await super().async_added_to_hass()
await self.ring_objects["history_data"].async_track_device(
self._device, self._history_update_callback
)
async def async_will_remove_from_hass(self):
"""Disconnect callbacks."""
await super().async_will_remove_from_hass()
self.ring_objects["history_data"].async_untrack_device(
self._device, self._history_update_callback
)
@callback
def _history_update_callback(self, history_data):
"""Call update method."""
if history_data:
self._last_event = history_data[0]
self.async_schedule_update_ha_state(True)
else:
self._last_event = None
self._last_video_id = None
self._video_url = None
self._expires_at = dt_util.utcnow()
self.async_write_ha_state()
@property
def name(self):
"""Return the name of this camera."""
return self._name
@property
def unique_id(self):
"""Return a unique ID."""
return self._device.id
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {
ATTR_ATTRIBUTION: ATTRIBUTION,
"video_url": self._video_url,
"last_video_id": self._last_video_id,
}
async def async_camera_image(self):
"""Return a still image response from the camera."""
ffmpeg = ImageFrame(self._ffmpeg.binary, loop=self.hass.loop)
if self._video_url is None:
return
image = await asyncio.shield(
ffmpeg.get_image(
self._video_url,
output_format=IMAGE_JPEG,
)
)
return image
async def handle_async_mjpeg_stream(self, request):
"""Generate an HTTP MJPEG stream from the camera."""
if self._video_url is None:
return
stream = CameraMjpeg(self._ffmpeg.binary, loop=self.hass.loop)
await stream.open_camera(self._video_url)
try:
stream_reader = await stream.get_reader()
return await async_aiohttp_proxy_stream(
self.hass,
request,
stream_reader,
self._ffmpeg.ffmpeg_stream_content_type,
)
finally:
await stream.close()
async def async_update(self):
"""Update camera entity and refresh attributes."""
if self._last_event is None:
return
if self._last_event["recording"]["status"] != "ready":
return
utcnow = dt_util.utcnow()
if self._last_video_id == self._last_event["id"] and utcnow <= self._expires_at:
return
try:
video_url = await self.hass.async_add_executor_job(
self._device.recording_url, self._last_event["id"]
)
except requests.Timeout:
_LOGGER.warning(
"Time out fetching recording url for camera %s", self.entity_id
)
video_url = None
if video_url:
self._last_video_id = self._last_event["id"]
self._video_url = video_url
self._expires_at = FORCE_REFRESH_INTERVAL + utcnow
|
from hangups import CredentialsPrompt, GoogleAuthError, RefreshTokenCache
class Google2FAError(GoogleAuthError):
"""A Google authentication request failed."""
class HangoutsCredentials(CredentialsPrompt):
"""Google account credentials.
This implementation gets the user data as params.
"""
def __init__(self, email, password, pin=None, auth_code=None):
"""Google account credentials.
:param email: Google account email address.
:param password: Google account password.
:param pin: Google account verification code.
"""
self._email = email
self._password = password
self._pin = pin
self._auth_code = auth_code
def get_email(self):
"""Return email.
:return: Google account email address.
"""
return self._email
def get_password(self):
"""Return password.
:return: Google account password.
"""
return self._password
def get_verification_code(self):
"""Return the verification code.
:return: Google account verification code.
"""
if self._pin is None:
raise Google2FAError()
return self._pin
def set_verification_code(self, pin):
"""Set the verification code.
:param pin: Google account verification code.
"""
self._pin = pin
def get_authorization_code(self):
"""Return the oauth authorization code.
:return: Google oauth code.
"""
return self._auth_code
def set_authorization_code(self, code):
"""Set the google oauth authorization code.
:param code: Oauth code returned after authentication with google.
"""
self._auth_code = code
class HangoutsRefreshToken(RefreshTokenCache):
"""Memory-based cache for refresh token."""
def __init__(self, token):
"""Memory-based cache for refresh token.
:param token: Initial refresh token.
"""
super().__init__("")
self._token = token
def get(self):
"""Get cached refresh token.
:return: Cached refresh token.
"""
return self._token
def set(self, refresh_token):
"""Cache a refresh token.
:param refresh_token: Refresh token to cache.
"""
self._token = refresh_token
|
import unittest
from bayes_opt import BayesianOptimization
class TestBayesOpt(unittest.TestCase):
def test_optimize(self):
# Bounded region of parameter space
pbounds = {'x': (2, 4), 'y': (-3, 3)}
optimizer = BayesianOptimization(
f=black_box_function,
pbounds=pbounds,
random_state=1,
)
optimizer.maximize(
init_points=2,
n_iter=1,
)
self.assertAlmostEqual(-7, optimizer.max['target'], places=0) # compares using 0 decimal
def black_box_function(x, y):
"""Function with unknown internals we wish to maximize.
This is just serving as an example, for all intents and
purposes think of the internals of this function, i.e.: the process
which generates its output values, as unknown.
"""
return -x ** 2 - (y - 1) ** 2 + 1
|
import blebox_uniapi
import pytest
from homeassistant import config_entries, data_entry_flow
from homeassistant.components.blebox import config_flow
from homeassistant.setup import async_setup_component
from .conftest import mock_config, mock_only_feature, setup_product_mock
from tests.async_mock import DEFAULT, AsyncMock, PropertyMock, patch
def create_valid_feature_mock(path="homeassistant.components.blebox.Products"):
"""Return a valid, complete BleBox feature mock."""
feature = mock_only_feature(
blebox_uniapi.cover.Cover,
unique_id="BleBox-gateBox-1afe34db9437-0.position",
full_name="gateBox-0.position",
device_class="gate",
state=0,
async_update=AsyncMock(),
current=None,
)
product = setup_product_mock("covers", [feature], path)
type(product).name = PropertyMock(return_value="My gate controller")
type(product).model = PropertyMock(return_value="gateController")
type(product).type = PropertyMock(return_value="gateBox")
type(product).brand = PropertyMock(return_value="BleBox")
type(product).firmware_version = PropertyMock(return_value="1.23")
type(product).unique_id = PropertyMock(return_value="abcd0123ef5678")
return feature
@pytest.fixture(name="valid_feature_mock")
def valid_feature_mock_fixture():
"""Return a valid, complete BleBox feature mock."""
return create_valid_feature_mock()
@pytest.fixture(name="flow_feature_mock")
def flow_feature_mock_fixture():
"""Return a mocked user flow feature."""
return create_valid_feature_mock(
"homeassistant.components.blebox.config_flow.Products"
)
async def test_flow_works(hass, valid_feature_mock, flow_feature_mock):
"""Test that config flow works."""
result = await hass.config_entries.flow.async_init(
config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
result = await hass.config_entries.flow.async_init(
config_flow.DOMAIN,
context={"source": config_entries.SOURCE_USER},
data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80},
)
assert result["type"] == "create_entry"
assert result["title"] == "My gate controller"
assert result["data"] == {
config_flow.CONF_HOST: "172.2.3.4",
config_flow.CONF_PORT: 80,
}
@pytest.fixture(name="product_class_mock")
def product_class_mock_fixture():
"""Return a mocked feature."""
path = "homeassistant.components.blebox.config_flow.Products"
patcher = patch(path, DEFAULT, blebox_uniapi.products.Products, True, True)
yield patcher
async def test_flow_with_connection_failure(hass, product_class_mock):
"""Test that config flow works."""
with product_class_mock as products_class:
products_class.async_from_host = AsyncMock(
side_effect=blebox_uniapi.error.ConnectionError
)
result = await hass.config_entries.flow.async_init(
config_flow.DOMAIN,
context={"source": config_entries.SOURCE_USER},
data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80},
)
assert result["errors"] == {"base": "cannot_connect"}
async def test_flow_with_api_failure(hass, product_class_mock):
"""Test that config flow works."""
with product_class_mock as products_class:
products_class.async_from_host = AsyncMock(
side_effect=blebox_uniapi.error.Error
)
result = await hass.config_entries.flow.async_init(
config_flow.DOMAIN,
context={"source": config_entries.SOURCE_USER},
data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80},
)
assert result["errors"] == {"base": "cannot_connect"}
async def test_flow_with_unknown_failure(hass, product_class_mock):
"""Test that config flow works."""
with product_class_mock as products_class:
products_class.async_from_host = AsyncMock(side_effect=RuntimeError)
result = await hass.config_entries.flow.async_init(
config_flow.DOMAIN,
context={"source": config_entries.SOURCE_USER},
data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80},
)
assert result["errors"] == {"base": "unknown"}
async def test_flow_with_unsupported_version(hass, product_class_mock):
"""Test that config flow works."""
with product_class_mock as products_class:
products_class.async_from_host = AsyncMock(
side_effect=blebox_uniapi.error.UnsupportedBoxVersion
)
result = await hass.config_entries.flow.async_init(
config_flow.DOMAIN,
context={"source": config_entries.SOURCE_USER},
data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80},
)
assert result["errors"] == {"base": "unsupported_version"}
async def test_async_setup(hass):
"""Test async_setup (for coverage)."""
assert await async_setup_component(hass, "blebox", {"host": "172.2.3.4"})
await hass.async_block_till_done()
async def test_already_configured(hass, valid_feature_mock):
"""Test that same device cannot be added twice."""
config = mock_config("172.2.3.4")
config.add_to_hass(hass)
await hass.config_entries.async_setup(config.entry_id)
await hass.async_block_till_done()
result = await hass.config_entries.flow.async_init(
config_flow.DOMAIN,
context={"source": config_entries.SOURCE_USER},
data={config_flow.CONF_HOST: "172.2.3.4", config_flow.CONF_PORT: 80},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "address_already_configured"
async def test_async_setup_entry(hass, valid_feature_mock):
"""Test async_setup_entry (for coverage)."""
config = mock_config()
config.add_to_hass(hass)
assert await hass.config_entries.async_setup(config.entry_id)
await hass.async_block_till_done()
assert hass.config_entries.async_entries() == [config]
assert config.state == config_entries.ENTRY_STATE_LOADED
async def test_async_remove_entry(hass, valid_feature_mock):
"""Test async_setup_entry (for coverage)."""
config = mock_config()
config.add_to_hass(hass)
assert await hass.config_entries.async_setup(config.entry_id)
await hass.async_block_till_done()
assert await hass.config_entries.async_remove(config.entry_id)
await hass.async_block_till_done()
assert hass.config_entries.async_entries() == []
assert config.state == config_entries.ENTRY_STATE_NOT_LOADED
|
import logging
import os
import os.path
import re
import xml.sax # for parsing arxmliv articles
from gensim import utils
import sys
if sys.version_info[0] >= 3:
unicode = str
PAT_TAG = re.compile(r'<(.*?)>(.*)</.*?>')
logger = logging.getLogger('gensim.corpora.sources')
class ArticleSource:
"""
Objects of this class describe a single source of articles.
A source is an abstraction over where the documents reside (the findArticles()
method), how to retrieve their fulltexts, their metadata, how to tokenize the
articles and how to normalize the tokens.
What is NOT abstracted away (ie. must hold for all sources) is the idea of
article identifiers (URIs), which uniquely identify each article within
one source.
This class is just an ABC interface; see eg. DmlSource or ArxmlivSource classes
for concrete instances.
"""
def __init__(self, sourceId):
self.sourceId = sourceId
def __str__(self):
return self.sourceId
def findArticles(self):
raise NotImplementedError('Abstract Base Class')
def getContent(self, uri):
raise NotImplementedError('Abstract Base Class')
def getMeta(self, uri):
raise NotImplementedError('Abstract Base Class')
def tokenize(self, content):
raise NotImplementedError('Abstract Base Class')
def normalizeWord(self, word):
raise NotImplementedError('Abstract Base Class')
# endclass ArticleSource
class DmlSource(ArticleSource):
"""
Article source for articles in DML format (DML-CZ, Numdam):
1) articles = directories starting with '#'
2) content is stored in fulltext.txt
3) metadata are stored in meta.xml
Article URI is currently (a part of) the article's path on filesystem.
See the ArticleSource class for general info on sources.
"""
def __init__(self, sourceId, baseDir):
self.sourceId = sourceId
self.baseDir = os.path.normpath(baseDir)
def __str__(self):
return self.sourceId
@classmethod
def parseDmlMeta(cls, xmlfile):
"""
Parse out all fields from meta.xml, return them as a dictionary.
"""
result = {}
xml = open(xmlfile)
for line in xml:
if line.find('<article>') >= 0: # skip until the beginning of <article> tag
break
for line in xml:
if line.find('</article>') >= 0: # end of <article>, we're done
break
p = re.search(PAT_TAG, line)
if p:
name, cont = p.groups()
name = name.split()[0]
name, cont = name.strip(), cont.strip()
if name == 'msc':
if len(cont) != 5:
logger.warning('invalid MSC=%s in %s', cont, xmlfile)
result.setdefault('msc', []).append(cont)
continue
if name == 'idMR':
cont = cont[2:] # omit MR from MR123456
if name and cont:
result[name] = cont
xml.close()
return result
def idFromDir(self, path):
assert len(path) > len(self.baseDir)
intId = path[1 + path.rfind('#'):]
pathId = path[1 + len(self.baseDir):]
return (intId, pathId)
def isArticle(self, path):
# in order to be valid, the article directory must start with '#'
if not os.path.basename(path).startswith('#'):
return False
# and contain the fulltext.txt file
if not os.path.exists(os.path.join(path, 'fulltext.txt')):
logger.info('missing fulltext in %s', path)
return False
# and also the meta.xml file
if not os.path.exists(os.path.join(path, 'meta.xml')):
logger.info('missing meta.xml in %s', path)
return False
return True
def findArticles(self):
dirTotal = artAccepted = 0
logger.info("looking for '%s' articles inside %s", self.sourceId, self.baseDir)
for root, dirs, files in os.walk(self.baseDir):
dirTotal += 1
root = os.path.normpath(root)
if self.isArticle(root):
artAccepted += 1
yield self.idFromDir(root)
logger.info('%i directories processed, found %i articles', dirTotal, artAccepted)
def getContent(self, uri):
"""
Return article content as a single large string.
"""
intId, pathId = uri
filename = os.path.join(self.baseDir, pathId, 'fulltext.txt')
return open(filename).read()
def getMeta(self, uri):
"""
Return article metadata as a attribute->value dictionary.
"""
intId, pathId = uri
filename = os.path.join(self.baseDir, pathId, 'meta.xml')
return DmlSource.parseDmlMeta(filename)
def tokenize(self, content):
return [token.encode('utf8') for token in utils.tokenize(content, errors='ignore') if not token.isdigit()]
def normalizeWord(self, word):
wordU = unicode(word, 'utf8')
return wordU.lower().encode('utf8') # lowercase and then convert back to bytestring
# endclass DmlSource
class DmlCzSource(DmlSource):
"""
Article source for articles in DML-CZ format:
1) articles = directories starting with '#'
2) content is stored in fulltext.txt or fulltext_dspace.txt
3) there exists a dspace_id file, containing internal dmlcz id
3) metadata are stored in meta.xml
See the ArticleSource class for general info on sources.
"""
def idFromDir(self, path):
assert len(path) > len(self.baseDir)
dmlczId = open(os.path.join(path, 'dspace_id')).read().strip()
pathId = path[1 + len(self.baseDir):]
return (dmlczId, pathId)
def isArticle(self, path):
# in order to be valid, the article directory must start with '#'
if not os.path.basename(path).startswith('#'):
return False
# and contain a dspace_id file
if not (os.path.exists(os.path.join(path, 'dspace_id'))):
logger.info('missing dspace_id in %s', path)
return False
# and contain either fulltext.txt or fulltext_dspace.txt file
if not (os.path.exists(os.path.join(path, 'fulltext.txt'))
or os.path.exists(os.path.join(path, 'fulltext-dspace.txt'))):
logger.info('missing fulltext in %s', path)
return False
# and contain the meta.xml file
if not os.path.exists(os.path.join(path, 'meta.xml')):
logger.info('missing meta.xml in %s', path)
return False
return True
def getContent(self, uri):
"""
Return article content as a single large string.
"""
intId, pathId = uri
filename1 = os.path.join(self.baseDir, pathId, 'fulltext.txt')
filename2 = os.path.join(self.baseDir, pathId, 'fulltext-dspace.txt')
if os.path.exists(filename1) and os.path.exists(filename2):
# if both fulltext and dspace files exist, pick the larger one
if os.path.getsize(filename1) < os.path.getsize(filename2):
filename = filename2
else:
filename = filename1
elif os.path.exists(filename1):
filename = filename1
else:
assert os.path.exists(filename2)
filename = filename2
return open(filename).read()
# endclass DmlCzSource
class ArxmlivSource(ArticleSource):
"""
Article source for articles in arxmliv format:
1) articles = directories starting with '#'
2) content is stored in tex.xml
3) metadata in special tags within tex.xml
Article URI is currently (a part of) the article's path on filesystem.
See the ArticleSource class for general info on sources.
"""
class ArxmlivContentHandler(xml.sax.handler.ContentHandler):
def __init__(self):
self.path = [''] # help structure for sax event parsing
self.tokens = [] # will contain tokens once parsing is finished
def startElement(self, name, attr):
# for math tokens, we only care about Math elements directly below <p>
if name == 'Math' and self.path[-1] == 'p' and attr.get('mode', '') == 'inline':
tex = attr.get('tex', '')
if tex and not tex.isdigit():
self.tokens.append('$%s$' % tex.encode('utf8'))
self.path.append(name)
def endElement(self, name):
self.path.pop()
def characters(self, text):
# for text, we only care about tokens directly within the <p> tag
if self.path[-1] == 'p':
tokens = [
token.encode('utf8') for token in utils.tokenize(text, errors='ignore') if not token.isdigit()
]
self.tokens.extend(tokens)
# endclass ArxmlivHandler
class ArxmlivErrorHandler(xml.sax.handler.ErrorHandler):
# Python2.5 implementation of xml.sax is broken -- character streams and
# byte encodings of InputSource are ignored, bad things sometimes happen
# in buffering of multi-byte files (such as utf8), characters get cut in
# the middle, resulting in invalid tokens...
# This is not really a problem with arxmliv xml files themselves, so ignore
# these errors silently.
def error(self, exception):
pass
warning = fatalError = error
# endclass ArxmlivErrorHandler
def __init__(self, sourceId, baseDir):
self.sourceId = sourceId
self.baseDir = os.path.normpath(baseDir)
def __str__(self):
return self.sourceId
def idFromDir(self, path):
assert len(path) > len(self.baseDir)
intId = path[1 + path.rfind('#'):]
pathId = path[1 + len(self.baseDir):]
return (intId, pathId)
def isArticle(self, path):
# in order to be valid, the article directory must start with '#'
if not os.path.basename(path).startswith('#'):
return False
# and contain the tex.xml file
if not os.path.exists(os.path.join(path, 'tex.xml')):
logger.warning('missing tex.xml in %s', path)
return False
return True
def findArticles(self):
dirTotal = artAccepted = 0
logger.info("looking for '%s' articles inside %s", self.sourceId, self.baseDir)
for root, dirs, files in os.walk(self.baseDir):
dirTotal += 1
root = os.path.normpath(root)
if self.isArticle(root):
artAccepted += 1
yield self.idFromDir(root)
logger.info('%i directories processed, found %i articles', dirTotal, artAccepted)
def getContent(self, uri):
"""
Return article content as a single large string.
"""
intId, pathId = uri
filename = os.path.join(self.baseDir, pathId, 'tex.xml')
return open(filename).read()
def getMeta(self, uri):
"""
Return article metadata as an attribute->value dictionary.
"""
# intId, pathId = uri
# filename = os.path.join(self.baseDir, pathId, 'tex.xml')
return {'language': 'eng'} # TODO maybe parse out some meta; but currently not needed for anything...
def tokenize(self, content):
"""
Parse tokens out of xml. There are two types of token: normal text and
mathematics. Both are returned interspersed in a single list, in the same
order as they appeared in the content.
The math tokens will be returned in the form $tex_expression$, ie. with
a dollar sign prefix and suffix.
"""
handler = ArxmlivSource.ArxmlivContentHandler()
xml.sax.parseString(content, handler, ArxmlivSource.ArxmlivErrorHandler())
return handler.tokens
def normalizeWord(self, word):
if word[0] == '$': # ignore math tokens
return word
wordU = unicode(word, 'utf8')
return wordU.lower().encode('utf8') # lowercase and then convert back to bytestring
# endclass ArxmlivSource
|
from mock import patch, call, ANY
from arctic.scripts.arctic_create_user import main
from ...util import run_as_main
def test_main_minimal():
with patch('arctic.scripts.arctic_create_user.logger', autospec=True) as logger, \
patch('arctic.scripts.arctic_create_user.MongoClient', autospec=True) as MC, \
patch('arctic.scripts.arctic_create_user.get_mongodb_uri', autospec=True) as get_mongodb_uri, \
patch('arctic.scripts.arctic_create_user.do_db_auth', autospec=True) as do_db_auth:
run_as_main(main, '--host', 'some_host',
'--password', 'asdf',
'user')
get_mongodb_uri.assert_called_once_with('some_host')
MC.assert_called_once_with(get_mongodb_uri.return_value)
assert do_db_auth.call_args_list == [call('some_host',
MC.return_value,
'admin')]
assert MC.return_value.__getitem__.call_args_list == [call('arctic_user')]
db = MC.return_value.__getitem__.return_value
assert [call('user', ANY, read_only=False)] == db.add_user.call_args_list
assert logger.info.call_args_list == [call('Granted: user [WRITE] to arctic_user'),
call('User creds: arctic_user/user/asdf')]
def test_main_with_db():
with patch('arctic.scripts.arctic_create_user.MongoClient', autospec=True) as MC, \
patch('arctic.scripts.arctic_create_user.get_mongodb_uri', autospec=True) as get_mongodb_uri, \
patch('arctic.scripts.arctic_create_user.do_db_auth', autospec=True) as do_db_auth:
run_as_main(main, '--host', 'some_host',
'--db', 'some_db',
'jblackburn')
get_mongodb_uri.assert_called_once_with('some_host')
MC.assert_called_once_with(get_mongodb_uri.return_value)
assert do_db_auth.call_args_list == [call('some_host',
MC.return_value,
'some_db')]
assert MC.return_value.__getitem__.call_args_list == [call('some_db')]
db = MC.return_value.__getitem__.return_value
assert [call('jblackburn', ANY, read_only=True)] == db.add_user.call_args_list
def test_main_with_db_write():
with patch('arctic.scripts.arctic_create_user.MongoClient', autospec=True) as MC, \
patch('arctic.scripts.arctic_create_user.get_mongodb_uri', autospec=True) as get_mongodb_uri, \
patch('arctic.scripts.arctic_create_user.do_db_auth', autospec=True) as do_db_auth:
run_as_main(main, '--host', 'some_host',
'--db', 'some_db',
'--write',
'jblackburn')
get_mongodb_uri.assert_called_once_with('some_host')
MC.assert_called_once_with(get_mongodb_uri.return_value)
assert do_db_auth.call_args_list == [call('some_host',
MC.return_value,
'some_db')]
assert MC.return_value.__getitem__.call_args_list == [call('some_db')]
db = MC.return_value.__getitem__.return_value
assert [call('jblackburn', ANY, read_only=False)] == db.add_user.call_args_list
def test_no_auth():
with patch('arctic.scripts.arctic_create_user.logger', autospec=True) as logger, \
patch('arctic.scripts.arctic_create_user.MongoClient', autospec=True) as MC, \
patch('arctic.scripts.arctic_create_user.get_mongodb_uri', autospec=True) as get_mongodb_uri, \
patch('arctic.scripts.arctic_create_user.do_db_auth', autospec=True,
return_value=False) as do_db_auth:
run_as_main(main, '--host', 'some_host',
'jblackburn')
assert logger.error.call_args_list == [call("Failed to authenticate to 'some_host'. Check your admin password!")]
|
Subsets and Splits