text
stringlengths 213
32.3k
|
---|
import argparse
import logging
import sys
import service_configuration_lib
from kubernetes.client import V1DeleteOptions
from kubernetes.client.rest import ApiException
from paasta_tools.kubernetes_tools import KubeClient
from paasta_tools.kubernetes_tools import paasta_prefixed
from paasta_tools.utils import DEFAULT_SOA_DIR
from paasta_tools.utils import load_system_paasta_config
log = logging.getLogger(__name__)
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(description="Removes stale kubernetes CRDs.")
parser.add_argument(
"-c",
"--cluster",
dest="cluster",
metavar="CLUSTER",
default=None,
help="Kubernetes cluster name",
)
parser.add_argument(
"-d",
"--soa-dir",
dest="soa_dir",
metavar="SOA_DIR",
default=DEFAULT_SOA_DIR,
help="define a different soa config directory",
)
parser.add_argument(
"-v", "--verbose", action="store_true", dest="verbose", default=False
)
parser.add_argument(
"-n", "--dry-run", action="store_true", dest="dry_run", default=False
)
args = parser.parse_args()
return args
def main() -> None:
args = parse_args()
soa_dir = args.soa_dir
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.WARNING)
if args.cluster:
cluster = args.cluster
else:
system_paasta_config = load_system_paasta_config()
cluster = system_paasta_config.get_cluster()
kube_client = KubeClient()
success = cleanup_kube_crd(
kube_client=kube_client, cluster=cluster, soa_dir=soa_dir, dry_run=args.dry_run
)
sys.exit(0 if success else 1)
def cleanup_kube_crd(
kube_client: KubeClient,
cluster: str,
soa_dir: str = DEFAULT_SOA_DIR,
dry_run: bool = False,
) -> bool:
service_attr = paasta_prefixed("service")
existing_crds = kube_client.apiextensions.list_custom_resource_definition(
label_selector=service_attr
)
success = True
for crd in existing_crds.items:
service = crd.metadata.labels[service_attr]
if not service:
log.error(f"CRD {crd.metadata.name} has empty {service_attr} label")
continue
crd_config = service_configuration_lib.read_extra_service_information(
service, f"crd-{cluster}", soa_dir=soa_dir
)
if crd_config:
log.debug(f"CRD {crd.metadata.name} declaration found in {service}")
continue
log.info(f"CRD {crd.metadata.name} not found in {service} service")
if dry_run:
log.info("not deleting in dry-run mode")
continue
try:
kube_client.apiextensions.delete_custom_resource_definition(
name=crd.metadata.name, body=V1DeleteOptions()
)
log.info(f"deleted {crd.metadata.name} for {cluster}:{service}")
except ApiException as exc:
log.error(
f"error deploying crd for {cluster}:{service}, "
f"status: {exc.status}, reason: {exc.reason}"
)
log.debug(exc.body)
success = False
return success
if __name__ == "__main__":
main()
|
from __future__ import division, print_function
"""
Urwid tree view
Features:
- custom selectable widgets for trees
- custom list walker for displaying widgets in a tree fashion
"""
import urwid
from urwid.wimp import SelectableIcon
class TreeWidgetError(RuntimeError):
pass
class TreeWidget(urwid.WidgetWrap):
"""A widget representing something in a nested tree display."""
indent_cols = 3
unexpanded_icon = SelectableIcon('+', 0)
expanded_icon = SelectableIcon('-', 0)
def __init__(self, node):
self._node = node
self._innerwidget = None
self.is_leaf = not hasattr(node, 'get_first_child')
self.expanded = True
widget = self.get_indented_widget()
self.__super.__init__(widget)
def selectable(self):
"""
Allow selection of non-leaf nodes so children may be (un)expanded
"""
return not self.is_leaf
def get_indented_widget(self):
widget = self.get_inner_widget()
if not self.is_leaf:
widget = urwid.Columns([('fixed', 1,
[self.unexpanded_icon, self.expanded_icon][self.expanded]),
widget], dividechars=1)
indent_cols = self.get_indent_cols()
return urwid.Padding(widget,
width=('relative', 100), left=indent_cols)
def update_expanded_icon(self):
"""Update display widget text for parent widgets"""
# icon is first element in columns indented widget
self._w.base_widget.widget_list[0] = [
self.unexpanded_icon, self.expanded_icon][self.expanded]
def get_indent_cols(self):
return self.indent_cols * self.get_node().get_depth()
def get_inner_widget(self):
if self._innerwidget is None:
self._innerwidget = self.load_inner_widget()
return self._innerwidget
def load_inner_widget(self):
return urwid.Text(self.get_display_text())
def get_node(self):
return self._node
def get_display_text(self):
return (self.get_node().get_key() + ": " +
str(self.get_node().get_value()))
def next_inorder(self):
"""Return the next TreeWidget depth first from this one."""
# first check if there's a child widget
firstchild = self.first_child()
if firstchild is not None:
return firstchild
# now we need to hunt for the next sibling
thisnode = self.get_node()
nextnode = thisnode.next_sibling()
depth = thisnode.get_depth()
while nextnode is None and depth > 0:
# keep going up the tree until we find an ancestor next sibling
thisnode = thisnode.get_parent()
nextnode = thisnode.next_sibling()
depth -= 1
assert depth == thisnode.get_depth()
if nextnode is None:
# we're at the end of the tree
return None
else:
return nextnode.get_widget()
def prev_inorder(self):
"""Return the previous TreeWidget depth first from this one."""
thisnode = self._node
prevnode = thisnode.prev_sibling()
if prevnode is not None:
# we need to find the last child of the previous widget if its
# expanded
prevwidget = prevnode.get_widget()
lastchild = prevwidget.last_child()
if lastchild is None:
return prevwidget
else:
return lastchild
else:
# need to hunt for the parent
depth = thisnode.get_depth()
if prevnode is None and depth == 0:
return None
elif prevnode is None:
prevnode = thisnode.get_parent()
return prevnode.get_widget()
def keypress(self, size, key):
"""Handle expand & collapse requests (non-leaf nodes)"""
if self.is_leaf:
return key
if key in ("+", "right"):
self.expanded = True
self.update_expanded_icon()
elif key == "-":
self.expanded = False
self.update_expanded_icon()
elif self._w.selectable():
return self.__super.keypress(size, key)
else:
return key
def mouse_event(self, size, event, button, col, row, focus):
if self.is_leaf or event != 'mouse press' or button!=1:
return False
if row == 0 and col == self.get_indent_cols():
self.expanded = not self.expanded
self.update_expanded_icon()
return True
return False
def first_child(self):
"""Return first child if expanded."""
if self.is_leaf or not self.expanded:
return None
else:
if self._node.has_children():
firstnode = self._node.get_first_child()
return firstnode.get_widget()
else:
return None
def last_child(self):
"""Return last child if expanded."""
if self.is_leaf or not self.expanded:
return None
else:
if self._node.has_children():
lastchild = self._node.get_last_child().get_widget()
else:
return None
# recursively search down for the last descendant
lastdescendant = lastchild.last_child()
if lastdescendant is None:
return lastchild
else:
return lastdescendant
class TreeNode(object):
"""
Store tree contents and cache TreeWidget objects.
A TreeNode consists of the following elements:
* key: accessor token for parent nodes
* value: subclass-specific data
* parent: a TreeNode which contains a pointer back to this object
* widget: The widget used to render the object
"""
def __init__(self, value, parent=None, key=None, depth=None):
self._key = key
self._parent = parent
self._value = value
self._depth = depth
self._widget = None
def get_widget(self, reload=False):
""" Return the widget for this node."""
if self._widget is None or reload == True:
self._widget = self.load_widget()
return self._widget
def load_widget(self):
return TreeWidget(self)
def get_depth(self):
if self._depth is None and self._parent is None:
self._depth = 0
elif self._depth is None:
self._depth = self._parent.get_depth() + 1
return self._depth
def get_index(self):
if self.get_depth() == 0:
return None
else:
key = self.get_key()
parent = self.get_parent()
return parent.get_child_index(key)
def get_key(self):
return self._key
def set_key(self, key):
self._key = key
def change_key(self, key):
self.get_parent().change_child_key(self._key, key)
def get_parent(self):
if self._parent == None and self.get_depth() > 0:
self._parent = self.load_parent()
return self._parent
def load_parent(self):
"""Provide TreeNode with a parent for the current node. This function
is only required if the tree was instantiated from a child node
(virtual function)"""
raise TreeWidgetError("virtual function. Implement in subclass")
def get_value(self):
return self._value
def is_root(self):
return self.get_depth() == 0
def next_sibling(self):
if self.get_depth() > 0:
return self.get_parent().next_child(self.get_key())
else:
return None
def prev_sibling(self):
if self.get_depth() > 0:
return self.get_parent().prev_child(self.get_key())
else:
return None
def get_root(self):
root = self
while root.get_parent() is not None:
root = root.get_parent()
return root
class ParentNode(TreeNode):
"""Maintain sort order for TreeNodes."""
def __init__(self, value, parent=None, key=None, depth=None):
TreeNode.__init__(self, value, parent=parent, key=key, depth=depth)
self._child_keys = None
self._children = {}
def get_child_keys(self, reload=False):
"""Return a possibly ordered list of child keys"""
if self._child_keys is None or reload == True:
self._child_keys = self.load_child_keys()
return self._child_keys
def load_child_keys(self):
"""Provide ParentNode with an ordered list of child keys (virtual
function)"""
raise TreeWidgetError("virtual function. Implement in subclass")
def get_child_widget(self, key):
"""Return the widget for a given key. Create if necessary."""
child = self.get_child_node(key)
return child.get_widget()
def get_child_node(self, key, reload=False):
"""Return the child node for a given key. Create if necessary."""
if key not in self._children or reload == True:
self._children[key] = self.load_child_node(key)
return self._children[key]
def load_child_node(self, key):
"""Load the child node for a given key (virtual function)"""
raise TreeWidgetError("virtual function. Implement in subclass")
def set_child_node(self, key, node):
"""Set the child node for a given key. Useful for bottom-up, lazy
population of a tree."""
self._children[key] = node
def change_child_key(self, oldkey, newkey):
if newkey in self._children:
raise TreeWidgetError("%s is already in use" % newkey)
self._children[newkey] = self._children.pop(oldkey)
self._children[newkey].set_key(newkey)
def get_child_index(self, key):
try:
return self.get_child_keys().index(key)
except ValueError:
errorstring = ("Can't find key %s in ParentNode %s\n" +
"ParentNode items: %s")
raise TreeWidgetError(errorstring % (key, self.get_key(),
str(self.get_child_keys())))
def next_child(self, key):
"""Return the next child node in index order from the given key."""
index = self.get_child_index(key)
# the given node may have just been deleted
if index is None:
return None
index += 1
child_keys = self.get_child_keys()
if index < len(child_keys):
# get the next item at same level
return self.get_child_node(child_keys[index])
else:
return None
def prev_child(self, key):
"""Return the previous child node in index order from the given key."""
index = self.get_child_index(key)
if index is None:
return None
child_keys = self.get_child_keys()
index -= 1
if index >= 0:
# get the previous item at same level
return self.get_child_node(child_keys[index])
else:
return None
def get_first_child(self):
"""Return the first TreeNode in the directory."""
child_keys = self.get_child_keys()
return self.get_child_node(child_keys[0])
def get_last_child(self):
"""Return the last TreeNode in the directory."""
child_keys = self.get_child_keys()
return self.get_child_node(child_keys[-1])
def has_children(self):
"""Does this node have any children?"""
return len(self.get_child_keys())>0
class TreeWalker(urwid.ListWalker):
"""ListWalker-compatible class for displaying TreeWidgets
positions are TreeNodes."""
def __init__(self, start_from):
"""start_from: TreeNode with the initial focus."""
self.focus = start_from
def get_focus(self):
widget = self.focus.get_widget()
return widget, self.focus
def set_focus(self, focus):
self.focus = focus
self._modified()
def get_next(self, start_from):
widget = start_from.get_widget()
target = widget.next_inorder()
if target is None:
return None, None
else:
return target, target.get_node()
def get_prev(self, start_from):
widget = start_from.get_widget()
target = widget.prev_inorder()
if target is None:
return None, None
else:
return target, target.get_node()
class TreeListBox(urwid.ListBox):
"""A ListBox with special handling for navigation and
collapsing of TreeWidgets"""
def keypress(self, size, key):
key = self.__super.keypress(size, key)
return self.unhandled_input(size, key)
def unhandled_input(self, size, input):
"""Handle macro-navigation keys"""
if input == 'left':
self.move_focus_to_parent(size)
elif input == '-':
self.collapse_focus_parent(size)
else:
return input
def collapse_focus_parent(self, size):
"""Collapse parent directory."""
widget, pos = self.body.get_focus()
self.move_focus_to_parent(size)
pwidget, ppos = self.body.get_focus()
if pos != ppos:
self.keypress(size, "-")
def move_focus_to_parent(self, size):
"""Move focus to parent of widget in focus."""
widget, pos = self.body.get_focus()
parentpos = pos.get_parent()
if parentpos is None:
return
middle, top, bottom = self.calculate_visible( size )
row_offset, focus_widget, focus_pos, focus_rows, cursor = middle
trim_top, fill_above = top
for widget, pos, rows in fill_above:
row_offset -= rows
if pos == parentpos:
self.change_focus(size, pos, row_offset)
return
self.change_focus(size, pos.get_parent())
def _keypress_max_left(self, size):
return self.focus_home(size)
def _keypress_max_right(self, size):
return self.focus_end(size)
def focus_home(self, size):
"""Move focus to very top."""
widget, pos = self.body.get_focus()
rootnode = pos.get_root()
self.change_focus(size, rootnode)
def focus_end( self, size ):
"""Move focus to far bottom."""
maxrow, maxcol = size
widget, pos = self.body.get_focus()
rootnode = pos.get_root()
rootwidget = rootnode.get_widget()
lastwidget = rootwidget.last_child()
if lastwidget:
lastnode = lastwidget.get_node()
self.change_focus(size, lastnode, maxrow-1)
|
from homeassistant.components.nuheat.const import DOMAIN
from homeassistant.setup import async_setup_component
from .mocks import _get_mock_nuheat
from tests.async_mock import patch
VALID_CONFIG = {
"nuheat": {"username": "warm", "password": "feet", "devices": "thermostat123"}
}
INVALID_CONFIG = {"nuheat": {"username": "warm", "password": "feet"}}
async def test_init_success(hass):
"""Test that we can setup with valid config."""
mock_nuheat = _get_mock_nuheat()
with patch(
"homeassistant.components.nuheat.nuheat.NuHeat",
return_value=mock_nuheat,
):
assert await async_setup_component(hass, DOMAIN, VALID_CONFIG)
await hass.async_block_till_done()
|
import json
import os
import pytest
from homeassistant.components.demo import DOMAIN
from homeassistant.components.device_tracker.legacy import YAML_DEVICES
from homeassistant.helpers.json import JSONEncoder
from homeassistant.setup import async_setup_component
@pytest.fixture(autouse=True)
def mock_history(hass):
"""Mock history component loaded."""
hass.config.components.add("history")
@pytest.fixture(autouse=True)
def demo_cleanup(hass):
"""Clean up device tracker demo file."""
yield
try:
os.remove(hass.config.path(YAML_DEVICES))
except FileNotFoundError:
pass
async def test_setting_up_demo(hass):
"""Test if we can set up the demo and dump it to JSON."""
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
await hass.async_block_till_done()
await hass.async_start()
# This is done to make sure entity components don't accidentally store
# non-JSON-serializable data in the state machine.
try:
json.dumps(hass.states.async_all(), cls=JSONEncoder)
except Exception: # pylint: disable=broad-except
pytest.fail(
"Unable to convert all demo entities to JSON. "
"Wrong data in state machine!"
)
|
import mimetypes
from pathlib import Path
from typing import Tuple
from aiohttp import web
from homeassistant.components.http import HomeAssistantView
from homeassistant.components.media_player.const import MEDIA_CLASS_DIRECTORY
from homeassistant.components.media_player.errors import BrowseError
from homeassistant.components.media_source.error import Unresolvable
from homeassistant.core import HomeAssistant, callback
from homeassistant.util import sanitize_path
from .const import DOMAIN, MEDIA_CLASS_MAP, MEDIA_MIME_TYPES
from .models import BrowseMediaSource, MediaSource, MediaSourceItem, PlayMedia
@callback
def async_setup(hass: HomeAssistant):
"""Set up local media source."""
source = LocalSource(hass)
hass.data[DOMAIN][DOMAIN] = source
hass.http.register_view(LocalMediaView(hass, source))
class LocalSource(MediaSource):
"""Provide local directories as media sources."""
name: str = "Local Media"
def __init__(self, hass: HomeAssistant):
"""Initialize local source."""
super().__init__(DOMAIN)
self.hass = hass
@callback
def async_full_path(self, source_dir_id, location) -> Path:
"""Return full path."""
return Path(self.hass.config.media_dirs[source_dir_id], location)
@callback
def async_parse_identifier(self, item: MediaSourceItem) -> Tuple[str, str]:
"""Parse identifier."""
if not item.identifier:
# Empty source_dir_id and location
return "", ""
source_dir_id, location = item.identifier.split("/", 1)
if source_dir_id not in self.hass.config.media_dirs:
raise Unresolvable("Unknown source directory.")
if location != sanitize_path(location):
raise Unresolvable("Invalid path.")
return source_dir_id, location
async def async_resolve_media(self, item: MediaSourceItem) -> str:
"""Resolve media to a url."""
source_dir_id, location = self.async_parse_identifier(item)
if source_dir_id == "" or source_dir_id not in self.hass.config.media_dirs:
raise Unresolvable("Unknown source directory.")
mime_type, _ = mimetypes.guess_type(
str(self.async_full_path(source_dir_id, location))
)
return PlayMedia(f"/media/{item.identifier}", mime_type)
async def async_browse_media(
self, item: MediaSourceItem, media_types: Tuple[str] = MEDIA_MIME_TYPES
) -> BrowseMediaSource:
"""Return media."""
try:
source_dir_id, location = self.async_parse_identifier(item)
except Unresolvable as err:
raise BrowseError(str(err)) from err
return await self.hass.async_add_executor_job(
self._browse_media, source_dir_id, location
)
def _browse_media(self, source_dir_id: str, location: Path):
"""Browse media."""
# If only one media dir is configured, use that as the local media root
if source_dir_id == "" and len(self.hass.config.media_dirs) == 1:
source_dir_id = list(self.hass.config.media_dirs)[0]
# Multiple folder, root is requested
if source_dir_id == "":
if location:
raise BrowseError("Folder not found.")
base = BrowseMediaSource(
domain=DOMAIN,
identifier="",
media_class=MEDIA_CLASS_DIRECTORY,
media_content_type=None,
title=self.name,
can_play=False,
can_expand=True,
children_media_class=MEDIA_CLASS_DIRECTORY,
)
base.children = [
self._browse_media(source_dir_id, "")
for source_dir_id in self.hass.config.media_dirs
]
return base
full_path = Path(self.hass.config.media_dirs[source_dir_id], location)
if not full_path.exists():
if location == "":
raise BrowseError("Media directory does not exist.")
raise BrowseError("Path does not exist.")
if not full_path.is_dir():
raise BrowseError("Path is not a directory.")
return self._build_item_response(source_dir_id, full_path)
def _build_item_response(self, source_dir_id: str, path: Path, is_child=False):
mime_type, _ = mimetypes.guess_type(str(path))
is_file = path.is_file()
is_dir = path.is_dir()
# Make sure it's a file or directory
if not is_file and not is_dir:
return None
# Check that it's a media file
if is_file and (
not mime_type or mime_type.split("/")[0] not in MEDIA_MIME_TYPES
):
return None
title = path.name
if is_dir:
title += "/"
media_class = MEDIA_CLASS_MAP.get(
mime_type and mime_type.split("/")[0], MEDIA_CLASS_DIRECTORY
)
media = BrowseMediaSource(
domain=DOMAIN,
identifier=f"{source_dir_id}/{path.relative_to(self.hass.config.media_dirs[source_dir_id])}",
media_class=media_class,
media_content_type=mime_type or "",
title=title,
can_play=is_file,
can_expand=is_dir,
)
if is_file or is_child:
return media
# Append first level children
media.children = []
for child_path in path.iterdir():
child = self._build_item_response(source_dir_id, child_path, True)
if child:
media.children.append(child)
# Sort children showing directories first, then by name
media.children.sort(key=lambda child: (child.can_play, child.title))
return media
class LocalMediaView(HomeAssistantView):
"""
Local Media Finder View.
Returns media files in config/media.
"""
url = "/media/{source_dir_id}/{location:.*}"
name = "media"
def __init__(self, hass: HomeAssistant, source: LocalSource):
"""Initialize the media view."""
self.hass = hass
self.source = source
async def get(
self, request: web.Request, source_dir_id: str, location: str
) -> web.FileResponse:
"""Start a GET request."""
if location != sanitize_path(location):
raise web.HTTPNotFound()
if source_dir_id not in self.hass.config.media_dirs:
raise web.HTTPNotFound()
media_path = self.source.async_full_path(source_dir_id, location)
# Check that the file exists
if not media_path.is_file():
raise web.HTTPNotFound()
# Check that it's a media file
mime_type, _ = mimetypes.guess_type(str(media_path))
if not mime_type or mime_type.split("/")[0] not in MEDIA_MIME_TYPES:
raise web.HTTPNotFound()
return web.FileResponse(media_path)
|
import logging
import wave
import pyaudio
from kalliope.core.PlayerModule import PlayerModule
logging.basicConfig()
logger = logging.getLogger("kalliope")
CHUNK = 1024
class Pyaudioplayer(PlayerModule):
"""
This Class is representing the Player Object used to play the all sound of the system.
"""
def __init__(self, **kwargs):
super(Pyaudioplayer, self).__init__(**kwargs)
logger.debug("[Pyaudioplayer.__init__] instance")
logger.debug("[Pyaudioplayer.__init__] args : %s " % str(kwargs))
def play(self, file_path):
"""
Play the sound located in the provided file_path
:param file_path: The file path of the sound to play. Must be wav format
:type file_path: str
"""
if self.convert:
self.convert_mp3_to_wav(file_path_mp3=file_path)
# open the wave file
wf = wave.open(file_path, 'rb')
# instantiate PyAudio
p = pyaudio.PyAudio()
# open stream (2)
stream = p.open(format=p.get_format_from_width(wf.getsampwidth()),
channels=wf.getnchannels(),
rate=wf.getframerate(),
# frames_per_buffer=CHUNK,
output=True)
# read data
data = wf.readframes(CHUNK)
logger.debug("Pyplayer file: %s" % str(file_path))
# play stream (3)
while len(data) > 0:
stream.write(data)
data = wf.readframes(CHUNK)
# stop stream (4)
stream.stop_stream()
stream.close()
# close PyAudio
p.terminate()
|
from kombu.matcher import (
match, register, registry, unregister, fnmatch, rematch,
MatcherNotInstalled
)
import pytest
class test_Matcher:
def test_register_match_unregister_matcher(self):
register("test_matcher", rematch)
registry.matcher_pattern_first.append("test_matcher")
assert registry._matchers["test_matcher"] == rematch
assert match("data", r"d.*", "test_matcher") is not None
assert registry._default_matcher == fnmatch
registry._set_default_matcher("test_matcher")
assert registry._default_matcher == rematch
unregister("test_matcher")
assert "test_matcher" not in registry._matchers
registry._set_default_matcher("glob")
assert registry._default_matcher == fnmatch
def test_unregister_matcher_not_registered(self):
with pytest.raises(MatcherNotInstalled):
unregister('notinstalled')
def test_match_using_unregistered_matcher(self):
with pytest.raises(MatcherNotInstalled):
match("data", r"d.*", "notinstalled")
|
import logging
from datetime import datetime
from datetime import timedelta
from datetime import timezone
from typing import Optional
from typing import Sequence
from marathon.models.task import MarathonTask
from paasta_tools import marathon_tools
from paasta_tools import monitoring_tools
from paasta_tools.check_services_replication_tools import main
from paasta_tools.long_running_service_tools import get_proxy_port_for_instance
from paasta_tools.marathon_tools import format_job_id
from paasta_tools.marathon_tools import MarathonServiceConfig
from paasta_tools.smartstack_tools import MesosSmartstackEnvoyReplicationChecker
log = logging.getLogger(__name__)
def filter_healthy_marathon_instances_for_short_app_id(all_tasks, app_id):
tasks_for_app = [
task for task in all_tasks if task.app_id.startswith("/%s" % app_id)
]
one_minute_ago = datetime.now(timezone.utc) - timedelta(minutes=1)
healthy_tasks = []
for task in tasks_for_app:
if (
marathon_tools.is_task_healthy(task, default_healthy=True)
and task.started_at is not None
and task.started_at < one_minute_ago
):
healthy_tasks.append(task)
return len(healthy_tasks)
def check_healthy_marathon_tasks_for_service_instance(
instance_config, expected_count, all_tasks
):
app_id = format_job_id(instance_config.service, instance_config.instance)
num_healthy_tasks = filter_healthy_marathon_instances_for_short_app_id(
all_tasks=all_tasks, app_id=app_id
)
log.info("Checking %s in marathon as it is not in smartstack" % app_id)
monitoring_tools.send_replication_event_if_under_replication(
instance_config=instance_config,
expected_count=expected_count,
num_available=num_healthy_tasks,
)
def check_service_replication(
instance_config: MarathonServiceConfig,
all_tasks_or_pods: Sequence[MarathonTask],
replication_checker: MesosSmartstackEnvoyReplicationChecker,
) -> Optional[bool]:
"""Checks a service's replication levels based on how the service's replication
should be monitored. (smartstack/envoy or mesos)
:param instance_config: an instance of MarathonServiceConfig
:param replication_checker: an instance of MesosSmartstackEnvoyReplicationChecker
"""
expected_count = instance_config.get_instances()
log.info(
"Expecting %d total tasks for %s" % (expected_count, instance_config.job_id)
)
proxy_port = get_proxy_port_for_instance(instance_config)
registrations = instance_config.get_registrations()
# if the primary registration does not match the service_instance name then
# the best we can do is check marathon for replication (for now).
if proxy_port is not None and registrations[0] == instance_config.job_id:
is_well_replicated = monitoring_tools.check_replication_for_instance(
instance_config=instance_config,
expected_count=expected_count,
replication_checker=replication_checker,
)
return is_well_replicated
else:
check_healthy_marathon_tasks_for_service_instance(
instance_config=instance_config,
expected_count=expected_count,
all_tasks=all_tasks_or_pods,
)
return None
if __name__ == "__main__":
main(
instance_type_class=marathon_tools.MarathonServiceConfig,
check_service_replication=check_service_replication,
namespace=None, # not relevant for mesos
mesos=True,
)
|
import asyncio
import logging
from pysqueezebox import Server, async_discover
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_PORT,
CONF_USERNAME,
HTTP_UNAUTHORIZED,
)
from homeassistant.helpers.aiohttp_client import async_get_clientsession
# pylint: disable=unused-import
from .const import DEFAULT_PORT, DOMAIN
_LOGGER = logging.getLogger(__name__)
TIMEOUT = 5
def _base_schema(discovery_info=None):
"""Generate base schema."""
base_schema = {}
if discovery_info and CONF_HOST in discovery_info:
base_schema.update(
{
vol.Required(
CONF_HOST,
description={"suggested_value": discovery_info[CONF_HOST]},
): str,
}
)
else:
base_schema.update({vol.Required(CONF_HOST): str})
if discovery_info and CONF_PORT in discovery_info:
base_schema.update(
{
vol.Required(
CONF_PORT,
default=DEFAULT_PORT,
description={"suggested_value": discovery_info[CONF_PORT]},
): int,
}
)
else:
base_schema.update({vol.Required(CONF_PORT, default=DEFAULT_PORT): int})
base_schema.update(
{vol.Optional(CONF_USERNAME): str, vol.Optional(CONF_PASSWORD): str}
)
return vol.Schema(base_schema)
class SqueezeboxConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Logitech Squeezebox."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
def __init__(self):
"""Initialize an instance of the squeezebox config flow."""
self.data_schema = _base_schema()
self.discovery_info = None
async def _discover(self, uuid=None):
"""Discover an unconfigured LMS server."""
self.discovery_info = None
discovery_event = asyncio.Event()
def _discovery_callback(server):
if server.uuid:
# ignore already configured uuids
for entry in self._async_current_entries():
if entry.unique_id == server.uuid:
return
self.discovery_info = {
CONF_HOST: server.host,
CONF_PORT: server.port,
"uuid": server.uuid,
}
_LOGGER.debug("Discovered server: %s", self.discovery_info)
discovery_event.set()
discovery_task = self.hass.async_create_task(
async_discover(_discovery_callback)
)
await discovery_event.wait()
discovery_task.cancel() # stop searching as soon as we find server
# update with suggested values from discovery
self.data_schema = _base_schema(self.discovery_info)
async def _validate_input(self, data):
"""
Validate the user input allows us to connect.
Retrieve unique id and abort if already configured.
"""
server = Server(
async_get_clientsession(self.hass),
data[CONF_HOST],
data[CONF_PORT],
data.get(CONF_USERNAME),
data.get(CONF_PASSWORD),
)
try:
status = await server.async_query("serverstatus")
if not status:
if server.http_status == HTTP_UNAUTHORIZED:
return "invalid_auth"
return "cannot_connect"
except Exception: # pylint: disable=broad-except
return "unknown"
if "uuid" in status:
await self.async_set_unique_id(status["uuid"])
self._abort_if_unique_id_configured()
async def async_step_user(self, user_input=None):
"""Handle a flow initialized by the user."""
errors = {}
if user_input and CONF_HOST in user_input:
# update with host provided by user
self.data_schema = _base_schema(user_input)
return await self.async_step_edit()
# no host specified, see if we can discover an unconfigured LMS server
try:
await asyncio.wait_for(self._discover(), timeout=TIMEOUT)
return await self.async_step_edit()
except asyncio.TimeoutError:
errors["base"] = "no_server_found"
# display the form
return self.async_show_form(
step_id="user",
data_schema=vol.Schema({vol.Optional(CONF_HOST): str}),
errors=errors,
)
async def async_step_edit(self, user_input=None):
"""Edit a discovered or manually inputted server."""
errors = {}
if user_input:
error = await self._validate_input(user_input)
if not error:
return self.async_create_entry(
title=user_input[CONF_HOST], data=user_input
)
errors["base"] = error
return self.async_show_form(
step_id="edit", data_schema=self.data_schema, errors=errors
)
async def async_step_import(self, config):
"""Import a config flow from configuration."""
error = await self._validate_input(config)
if error:
return self.async_abort(reason=error)
return self.async_create_entry(title=config[CONF_HOST], data=config)
async def async_step_discovery(self, discovery_info):
"""Handle discovery."""
_LOGGER.debug("Reached discovery flow with info: %s", discovery_info)
if "uuid" in discovery_info:
await self.async_set_unique_id(discovery_info.pop("uuid"))
self._abort_if_unique_id_configured()
else:
# attempt to connect to server and determine uuid. will fail if password required
error = await self._validate_input(discovery_info)
if error:
await self._async_handle_discovery_without_unique_id()
# update schema with suggested values from discovery
self.data_schema = _base_schema(discovery_info)
# pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167
self.context.update({"title_placeholders": {"host": discovery_info[CONF_HOST]}})
return await self.async_step_edit()
|
from keras.models import Model
from keras.layers import Input, Dot
from matchzoo.engine.param_table import ParamTable
from matchzoo.engine.base_model import BaseModel
from matchzoo import preprocessors
class DSSM(BaseModel):
"""
Deep structured semantic model.
Examples:
>>> model = DSSM()
>>> model.params['mlp_num_layers'] = 3
>>> model.params['mlp_num_units'] = 300
>>> model.params['mlp_num_fan_out'] = 128
>>> model.params['mlp_activation_func'] = 'relu'
>>> model.guess_and_fill_missing_params(verbose=0)
>>> model.build()
"""
@classmethod
def get_default_params(cls) -> ParamTable:
""":return: model default parameters."""
params = super().get_default_params(with_multi_layer_perceptron=True)
return params
def build(self):
"""
Build model structure.
DSSM use Siamese arthitecture.
"""
dim_triletter = self._params['input_shapes'][0][0]
input_shape = (dim_triletter,)
base_network = self._make_multi_layer_perceptron_layer()
# Left input and right input.
input_left = Input(name='text_left', shape=input_shape)
input_right = Input(name='text_right', shape=input_shape)
# Process left & right input.
x = [base_network(input_left),
base_network(input_right)]
# Dot product with cosine similarity.
x = Dot(axes=[1, 1], normalize=True)(x)
x_out = self._make_output_layer()(x)
self._backend = Model(
inputs=[input_left, input_right],
outputs=x_out)
@classmethod
def get_default_preprocessor(cls):
""":return: Default preprocessor."""
return preprocessors.DSSMPreprocessor()
|
import argparse
import chainer
from chainer import iterators
import chainermn
from chainercv.utils import apply_to_iterator
from chainercv.utils import ProgressHook
from eval_instance_segmentation import models
from eval_instance_segmentation import setup
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--dataset', choices=('sbd', 'coco'))
parser.add_argument('--model', choices=sorted(models.keys()))
parser.add_argument('--pretrained-model')
parser.add_argument('--batchsize', type=int)
args = parser.parse_args()
comm = chainermn.create_communicator('pure_nccl')
device = comm.intra_rank
dataset, eval_, model, batchsize = setup(
args.dataset, args.model, args.pretrained_model, args.batchsize)
chainer.cuda.get_device_from_id(device).use()
model.to_gpu()
if not comm.rank == 0:
apply_to_iterator(model.predict, None, comm=comm)
return
iterator = iterators.MultithreadIterator(
dataset, batchsize * comm.size, repeat=False, shuffle=False)
in_values, out_values, rest_values = apply_to_iterator(
model.predict, iterator, hook=ProgressHook(len(dataset)), comm=comm)
# delete unused iterators explicitly
del in_values
eval_(out_values, rest_values)
if __name__ == '__main__':
main()
|
import datetime
from typing import Set
from appconf import AppConf
from django.conf import settings
from django.contrib.auth.signals import user_logged_in
from django.core.exceptions import ValidationError
from django.core.validators import MaxValueValidator, MinValueValidator
from django.db import models, transaction
from django.db.models import F, Q
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.utils import timezone
from django.utils.crypto import get_random_string
from django.utils.functional import cached_property
from django.utils.translation import gettext
from django.utils.translation import gettext_lazy as _
from rest_framework.authtoken.models import Token
from social_django.models import UserSocialAuth
from weblate.accounts.avatar import get_user_display
from weblate.accounts.data import create_default_notifications
from weblate.accounts.notifications import FREQ_CHOICES, NOTIFICATIONS, SCOPE_CHOICES
from weblate.accounts.tasks import notify_auditlog
from weblate.auth.models import User
from weblate.lang.models import Language
from weblate.trans.defines import EMAIL_LENGTH
from weblate.utils import messages
from weblate.utils.decorators import disable_for_loaddata
from weblate.utils.fields import EmailField, JSONField
from weblate.utils.render import validate_editor
from weblate.utils.request import get_ip_address, get_user_agent
class Subscription(models.Model):
user = models.ForeignKey(User, on_delete=models.deletion.CASCADE)
notification = models.CharField(
choices=[n.get_choice() for n in NOTIFICATIONS], max_length=100
)
scope = models.IntegerField(choices=SCOPE_CHOICES)
frequency = models.IntegerField(choices=FREQ_CHOICES)
project = models.ForeignKey(
"trans.Project", on_delete=models.deletion.CASCADE, null=True
)
component = models.ForeignKey(
"trans.Component", on_delete=models.deletion.CASCADE, null=True
)
onetime = models.BooleanField(default=False)
class Meta:
unique_together = [("notification", "scope", "project", "component", "user")]
def __str__(self):
return "{}:{},{} ({},{})".format(
self.user.username,
self.get_scope_display(),
self.get_notification_display(),
self.project,
self.component,
)
ACCOUNT_ACTIVITY = {
"password": _("Password changed."),
"username": _("Username changed from {old} to {new}."),
"email": _("E-mail changed from {old} to {new}."),
"full_name": _("Full name changed from {old} to {new}."),
"reset-request": _("Password reset requested."),
"reset": _("Password reset confirmed, password turned off."),
"auth-connect": _("Configured sign in using {method} ({name})."),
"auth-disconnect": _("Removed sign in using {method} ({name})."),
"login": _("Signed in using {method} ({name})."),
"login-new": _("Signed in using {method} ({name}) from a new device."),
"register": _("Somebody has attempted to register with your e-mail."),
"connect": _("Somebody has attempted to register using your e-mail address."),
"failed-auth": _("Could not sign in using {method} ({name})."),
"locked": _("Account locked due to many failed sign in attempts."),
"removed": _("Account and all private data removed."),
"tos": _("Agreement with Terms of Service {date}."),
"invited": _("Invited to Weblate by {username}."),
"trial": _("Started trial period."),
}
# Override activty messages based on method
ACCOUNT_ACTIVITY_METHOD = {
"password": {
"auth-connect": _("Configured password to sign in."),
"login": _("Signed in using password."),
"login-new": _("Signed in using password from a new device."),
"failed-auth": _("Could not sign in using password."),
}
}
EXTRA_MESSAGES = {
"locked": _("To restore access to your account, please reset your password.")
}
NOTIFY_ACTIVITY = {
"password",
"reset",
"auth-connect",
"auth-disconnect",
"register",
"connect",
"locked",
"removed",
"login-new",
"email",
"username",
"full_name",
}
class AuditLogManager(models.Manager):
def is_new_login(self, user, address, user_agent):
"""Checks whether this login is coming from a new device.
Currently based purely on the IP address.
"""
logins = self.filter(user=user, activity="login-new")
# First login
if not logins.exists():
return False
return not logins.filter(Q(address=address) | Q(user_agent=user_agent)).exists()
def create(self, user, request, activity, **params):
address = get_ip_address(request)
user_agent = get_user_agent(request)
if activity == "login" and self.is_new_login(user, address, user_agent):
activity = "login-new"
return super().create(
user=user,
activity=activity,
address=address,
user_agent=user_agent,
params=params,
)
class AuditLogQuerySet(models.QuerySet):
def get_after(self, user, after, activity):
"""Get user activites of given type after another activity.
This is mostly used for rate limiting, as it can return the number of failed
authentication attempts since last login.
"""
try:
latest_login = self.filter(user=user, activity=after).order()[0]
kwargs = {"timestamp__gte": latest_login.timestamp}
except IndexError:
kwargs = {}
return self.filter(user=user, activity=activity, **kwargs)
def get_password(self, user):
"""Get user activities with password change."""
start = timezone.now() - datetime.timedelta(days=settings.AUTH_PASSWORD_DAYS)
return self.filter(
user=user, activity__in=("reset", "password"), timestamp__gt=start
)
def order(self):
return self.order_by("-timestamp")
class AuditLog(models.Model):
"""User audit log storage."""
user = models.ForeignKey(User, on_delete=models.deletion.CASCADE)
activity = models.CharField(
max_length=20,
choices=[(a, a) for a in sorted(ACCOUNT_ACTIVITY.keys())],
db_index=True,
)
params = JSONField()
address = models.GenericIPAddressField(null=True)
user_agent = models.CharField(max_length=200, default="")
timestamp = models.DateTimeField(auto_now_add=True, db_index=True)
objects = AuditLogManager.from_queryset(AuditLogQuerySet)()
def __str__(self):
return f"{self.activity} for {self.user.username} from {self.address}"
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
if self.should_notify():
email = self.user.email
transaction.on_commit(lambda: notify_auditlog.delay(self.pk, email))
def get_params(self):
from weblate.accounts.templatetags.authnames import get_auth_name
result = {}
result.update(self.params)
if "method" in result:
# The gettext is here for legacy entries which contained method name
result["method"] = gettext(get_auth_name(result["method"]))
return result
def get_message(self):
method = self.params.get("method")
activity = self.activity
if activity in ACCOUNT_ACTIVITY_METHOD.get(method, {}):
message = ACCOUNT_ACTIVITY_METHOD[method][activity]
else:
message = ACCOUNT_ACTIVITY[activity]
return message.format(**self.get_params())
get_message.short_description = _("Account activity")
def get_extra_message(self):
if self.activity in EXTRA_MESSAGES:
return EXTRA_MESSAGES[self.activity].format(**self.params)
return None
def should_notify(self):
return self.user.is_active and self.activity in NOTIFY_ACTIVITY
def check_rate_limit(self, request):
"""Check whether the activity should be rate limited."""
if self.activity == "failed-auth" and self.user.has_usable_password():
failures = AuditLog.objects.get_after(self.user, "login", "failed-auth")
if failures.count() >= settings.AUTH_LOCK_ATTEMPTS:
self.user.set_unusable_password()
self.user.save(update_fields=["password"])
AuditLog.objects.create(self.user, request, "locked")
return True
elif self.activity == "reset-request":
failures = AuditLog.objects.filter(
user=self.user,
timestamp__gte=timezone.now() - datetime.timedelta(days=1),
activity="reset-request",
)
if failures.count() >= settings.AUTH_LOCK_ATTEMPTS:
return True
return False
class VerifiedEmail(models.Model):
"""Storage for verified e-mails from auth backends."""
social = models.ForeignKey(UserSocialAuth, on_delete=models.deletion.CASCADE)
email = models.EmailField(max_length=EMAIL_LENGTH)
def __str__(self):
return f"{self.social.user.username} - {self.email}"
@property
def provider(self):
return self.social.provider
class Profile(models.Model):
"""User profiles storage."""
user = models.OneToOneField(
User, unique=True, editable=False, on_delete=models.deletion.CASCADE
)
language = models.CharField(
verbose_name=_("Interface Language"),
max_length=10,
blank=True,
choices=settings.LANGUAGES,
)
languages = models.ManyToManyField(
Language,
verbose_name=_("Translated languages"),
blank=True,
help_text=_(
"Choose the languages you can translate to. "
"These will be offered to you on the dashboard "
"for easier access to your chosen translations."
),
)
secondary_languages = models.ManyToManyField(
Language,
verbose_name=_("Secondary languages"),
help_text=_(
"Choose languages you can understand, strings in those languages "
"will be shown in addition to the source string."
),
related_name="secondary_profile_set",
blank=True,
)
suggested = models.IntegerField(default=0, db_index=True)
translated = models.IntegerField(default=0, db_index=True)
uploaded = models.IntegerField(default=0, db_index=True)
commented = models.IntegerField(default=0, db_index=True)
hide_completed = models.BooleanField(
verbose_name=_("Hide completed translations on the dashboard"), default=False
)
secondary_in_zen = models.BooleanField(
verbose_name=_("Show secondary translations in the Zen mode"), default=True
)
hide_source_secondary = models.BooleanField(
verbose_name=_("Hide source if a secondary translation exists"), default=False
)
editor_link = models.CharField(
default="",
blank=True,
max_length=200,
verbose_name=_("Editor link"),
help_text=_(
"Enter a custom URL to be used as link to the source code. "
"You can use {{branch}} for branch, "
"{{filename}} and {{line}} as filename and line placeholders."
),
validators=[validate_editor],
)
TRANSLATE_FULL = 0
TRANSLATE_ZEN = 1
translate_mode = models.IntegerField(
verbose_name=_("Translation editor mode"),
choices=((TRANSLATE_FULL, _("Full editor")), (TRANSLATE_ZEN, _("Zen mode"))),
default=TRANSLATE_FULL,
)
ZEN_VERTICAL = 0
ZEN_HORIZONTAL = 1
zen_mode = models.IntegerField(
verbose_name=_("Zen editor mode"),
choices=(
(ZEN_VERTICAL, _("Top to bottom")),
(ZEN_HORIZONTAL, _("Side by side")),
),
default=ZEN_VERTICAL,
)
special_chars = models.CharField(
default="",
blank=True,
max_length=30,
verbose_name=_("Special characters"),
help_text=_(
"You can specify additional special visual keyboard characters "
"to be shown while translating. It can be useful for "
"characters you use frequently, but are hard to type on your keyboard."
),
)
nearby_strings = models.SmallIntegerField(
verbose_name=_("Number of nearby strings"),
default=settings.NEARBY_MESSAGES,
validators=[MinValueValidator(1), MaxValueValidator(50)],
help_text=_(
"Number of nearby strings to show in each direction in the full editor."
),
)
DASHBOARD_WATCHED = 1
DASHBOARD_COMPONENT_LIST = 4
DASHBOARD_SUGGESTIONS = 5
DASHBOARD_COMPONENT_LISTS = 6
DASHBOARD_CHOICES = (
(DASHBOARD_WATCHED, _("Watched translations")),
(DASHBOARD_COMPONENT_LISTS, _("Component lists")),
(DASHBOARD_COMPONENT_LIST, _("Component list")),
(DASHBOARD_SUGGESTIONS, _("Suggested translations")),
)
DASHBOARD_SLUGS = {
DASHBOARD_WATCHED: "your-subscriptions",
DASHBOARD_COMPONENT_LIST: "list",
DASHBOARD_SUGGESTIONS: "suggestions",
DASHBOARD_COMPONENT_LISTS: "componentlists",
}
dashboard_view = models.IntegerField(
choices=DASHBOARD_CHOICES,
verbose_name=_("Default dashboard view"),
default=DASHBOARD_WATCHED,
)
dashboard_component_list = models.ForeignKey(
"trans.ComponentList",
verbose_name=_("Default component list"),
on_delete=models.deletion.SET_NULL,
blank=True,
null=True,
)
watched = models.ManyToManyField(
"trans.Project",
verbose_name=_("Watched projects"),
help_text=_(
"You can receive notifications for watched projects and "
"they are shown on the dashboard by default."
),
blank=True,
)
# Public profile fields
website = models.URLField(
verbose_name=_("Website URL"),
blank=True,
)
liberapay = models.SlugField(
verbose_name=_("Liberapay username"),
blank=True,
help_text=_(
"Liberapay is a platform to donate money to teams, "
"organizations and individuals."
),
)
fediverse = models.URLField(
verbose_name=_("Fediverse URL"),
blank=True,
help_text=_(
"Link to your Fediverse profile for federated services "
"like Mastodon or diaspora*."
),
)
codesite = models.URLField(
verbose_name=_("Code site URL"),
blank=True,
help_text=_("Link to your code profile for services like Codeberg or GitLab."),
)
github = models.SlugField(
verbose_name=_("GitHub username"),
blank=True,
)
twitter = models.SlugField(
verbose_name=_("Twitter username"),
blank=True,
)
linkedin = models.SlugField(
verbose_name=_("LinkedIn profile name"),
help_text=_("Your LinkedIn profile name from linkedin.com/in/profilename"),
blank=True,
)
location = models.CharField(
verbose_name=_("Location"),
max_length=100,
blank=True,
)
company = models.CharField(
verbose_name=_("Company"),
max_length=100,
blank=True,
)
public_email = EmailField(
verbose_name=_("Public e-mail"),
blank=True,
max_length=EMAIL_LENGTH,
)
def __str__(self):
return self.user.username
def get_absolute_url(self):
return self.user.get_absolute_url()
def get_user_display(self):
return get_user_display(self.user)
def get_user_display_link(self):
return get_user_display(self.user, True, True)
def get_user_name(self):
return get_user_display(self.user, False)
def increase_count(self, item: str, increase: int = 1):
"""Updates user actions counter."""
# Update our copy
setattr(self, item, getattr(self, item) + increase)
# Update database
update = {item: F(item) + increase}
Profile.objects.filter(pk=self.pk).update(**update)
@property
def full_name(self):
"""Return user's full name."""
return self.user.full_name
def clean(self):
"""Check if component list is chosen when required."""
# There is matching logic in ProfileBaseForm.add_error to ignore this
# validation on partial forms
if (
self.dashboard_view == Profile.DASHBOARD_COMPONENT_LIST
and self.dashboard_component_list is None
):
message = _(
"Please choose which component list you want to display on "
"the dashboard."
)
raise ValidationError(
{"dashboard_component_list": message, "dashboard_view": message}
)
if (
self.dashboard_view != Profile.DASHBOARD_COMPONENT_LIST
and self.dashboard_component_list is not None
):
message = _(
"Selecting component list has no effect when not shown on "
"the dashboard."
)
raise ValidationError(
{"dashboard_component_list": message, "dashboard_view": message}
)
def dump_data(self):
def dump_object(obj, *attrs):
return {attr: getattr(obj, attr) for attr in attrs}
result = {
"basic": dump_object(
self.user, "username", "full_name", "email", "date_joined"
),
"profile": dump_object(
self,
"language",
"suggested",
"translated",
"uploaded",
"hide_completed",
"secondary_in_zen",
"hide_source_secondary",
"editor_link",
"translate_mode",
"zen_mode",
"special_chars",
"dashboard_view",
"dashboard_component_list",
),
"auditlog": [
dump_object(log, "address", "user_agent", "timestamp", "activity")
for log in self.user.auditlog_set.iterator()
],
}
result["profile"]["languages"] = [
lang.code for lang in self.languages.iterator()
]
result["profile"]["secondary_languages"] = [
lang.code for lang in self.secondary_languages.iterator()
]
result["profile"]["watched"] = [
project.slug for project in self.watched.iterator()
]
return result
@cached_property
def primary_language_ids(self) -> Set[int]:
return set(self.languages.values_list("pk", flat=True))
@cached_property
def secondary_language_ids(self) -> Set[int]:
return set(self.secondary_languages.values_list("pk", flat=True))
def get_language_order(self, language: Language) -> int:
"""Returns key suitable for ordering languages based on user preferences."""
if language.pk in self.primary_language_ids:
return 0
if language.pk in self.secondary_language_ids:
return 1
return 2
@cached_property
def watched_project_ids(self):
# We do not use values_list, because we prefetch this
return {watched.id for watched in self.watched.all()}
def watches_project(self, project):
return project.id in self.watched_project_ids
def set_lang_cookie(response, profile):
"""Set session language based on user preferences."""
if profile.language:
response.set_cookie(
settings.LANGUAGE_COOKIE_NAME,
profile.language,
max_age=settings.LANGUAGE_COOKIE_AGE,
path=settings.LANGUAGE_COOKIE_PATH,
domain=settings.LANGUAGE_COOKIE_DOMAIN,
secure=settings.LANGUAGE_COOKIE_SECURE,
httponly=settings.LANGUAGE_COOKIE_HTTPONLY,
samesite=settings.LANGUAGE_COOKIE_SAMESITE,
)
@receiver(user_logged_in)
def post_login_handler(sender, request, user, **kwargs):
"""Signal handler for post login.
It sets user language and migrates profile if needed.
"""
backend_name = getattr(user, "backend", "")
is_email_auth = backend_name.endswith(".EmailAuth") or backend_name.endswith(
".WeblateUserBackend"
)
# Warning about setting password
if is_email_auth and not user.has_usable_password():
request.session["show_set_password"] = True
# Migrate django-registration based verification to python-social-auth
# and handle external authentication such as LDAP
if (
is_email_auth
and user.has_usable_password()
and user.email
and not user.social_auth.filter(provider="email").exists()
):
social = user.social_auth.create(provider="email", uid=user.email)
VerifiedEmail.objects.create(social=social, email=user.email)
# Fixup accounts with empty name
if not user.full_name:
user.full_name = user.username
user.save(update_fields=["full_name"])
# Warn about not set e-mail
if not user.email:
messages.error(
request,
_(
"You can not submit translations as "
"you do not have assigned any e-mail address."
),
)
@receiver(post_save, sender=User)
@disable_for_loaddata
def create_profile_callback(sender, instance, created=False, **kwargs):
"""Automatically create token and profile for user."""
if created:
# Create API token
Token.objects.create(user=instance, key=get_random_string(40))
# Create profile
Profile.objects.create(user=instance)
# Create subscriptions
if not instance.is_anonymous:
create_default_notifications(instance)
class WeblateAccountsConf(AppConf):
"""Accounts settings."""
# Disable avatars
ENABLE_AVATARS = True
# Avatar URL prefix
AVATAR_URL_PREFIX = "https://www.gravatar.com/"
# Avatar fallback image
# See http://en.gravatar.com/site/implement/images/ for available choices
AVATAR_DEFAULT_IMAGE = "identicon"
# Enable registrations
REGISTRATION_OPEN = True
# Allow registration from certain backends
REGISTRATION_ALLOW_BACKENDS = []
# Registration email filter
REGISTRATION_EMAIL_MATCH = ".*"
# Captcha for registrations
REGISTRATION_CAPTCHA = True
# How long to keep auditlog entries
AUDITLOG_EXPIRY = 180
# Auth0 provider default image & title on login page
SOCIAL_AUTH_AUTH0_IMAGE = "auth0.svg"
SOCIAL_AUTH_AUTH0_TITLE = "Auth0"
SOCIAL_AUTH_SAML_IMAGE = "saml.svg"
SOCIAL_AUTH_SAML_TITLE = "SAML"
# Login required URLs
LOGIN_REQUIRED_URLS = []
LOGIN_REQUIRED_URLS_EXCEPTIONS = (
r"{URL_PREFIX}/accounts/(.*)$", # Required for login
r"{URL_PREFIX}/admin/login/(.*)$", # Required for admin login
r"{URL_PREFIX}/static/(.*)$", # Required for development mode
r"{URL_PREFIX}/widgets/(.*)$", # Allowing public access to widgets
r"{URL_PREFIX}/data/(.*)$", # Allowing public access to data exports
r"{URL_PREFIX}/hooks/(.*)$", # Allowing public access to notification hooks
r"{URL_PREFIX}/healthz/$", # Allowing public access to health check
r"{URL_PREFIX}/api/(.*)$", # Allowing access to API
r"{URL_PREFIX}/js/i18n/$", # JavaScript localization
r"{URL_PREFIX}/contact/$", # Optional for contact form
r"{URL_PREFIX}/legal/(.*)$", # Optional for legal app
)
class Meta:
prefix = ""
|
from datetime import timedelta
import logging
from math import ceil
import pytankerkoenig
import voluptuous as vol
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.const import (
CONF_API_KEY,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_RADIUS,
CONF_SCAN_INTERVAL,
CONF_SHOW_ON_MAP,
)
from homeassistant.exceptions import HomeAssistantError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import async_load_platform
from .const import CONF_FUEL_TYPES, CONF_STATIONS, DOMAIN, FUEL_TYPES
_LOGGER = logging.getLogger(__name__)
DEFAULT_RADIUS = 2
DEFAULT_SCAN_INTERVAL = timedelta(minutes=30)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Optional(
CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL
): cv.time_period,
vol.Optional(CONF_FUEL_TYPES, default=FUEL_TYPES): vol.All(
cv.ensure_list, [vol.In(FUEL_TYPES)]
),
vol.Inclusive(
CONF_LATITUDE,
"coordinates",
"Latitude and longitude must exist together",
): cv.latitude,
vol.Inclusive(
CONF_LONGITUDE,
"coordinates",
"Latitude and longitude must exist together",
): cv.longitude,
vol.Optional(CONF_RADIUS, default=DEFAULT_RADIUS): vol.All(
cv.positive_int, vol.Range(min=1)
),
vol.Optional(CONF_STATIONS, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_SHOW_ON_MAP, default=True): cv.boolean,
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Set the tankerkoenig component up."""
if DOMAIN not in config:
return True
conf = config[DOMAIN]
_LOGGER.debug("Setting up integration")
tankerkoenig = TankerkoenigData(hass, conf)
latitude = conf.get(CONF_LATITUDE, hass.config.latitude)
longitude = conf.get(CONF_LONGITUDE, hass.config.longitude)
radius = conf[CONF_RADIUS]
additional_stations = conf[CONF_STATIONS]
setup_ok = await hass.async_add_executor_job(
tankerkoenig.setup, latitude, longitude, radius, additional_stations
)
if not setup_ok:
_LOGGER.error("Could not setup integration")
return False
hass.data[DOMAIN] = tankerkoenig
hass.async_create_task(
async_load_platform(
hass,
SENSOR_DOMAIN,
DOMAIN,
discovered=tankerkoenig.stations,
hass_config=conf,
)
)
return True
class TankerkoenigData:
"""Get the latest data from the API."""
def __init__(self, hass, conf):
"""Initialize the data object."""
self._api_key = conf[CONF_API_KEY]
self.stations = {}
self.fuel_types = conf[CONF_FUEL_TYPES]
self.update_interval = conf[CONF_SCAN_INTERVAL]
self.show_on_map = conf[CONF_SHOW_ON_MAP]
self._hass = hass
def setup(self, latitude, longitude, radius, additional_stations):
"""Set up the tankerkoenig API.
Read the initial data from the server, to initialize the list of fuel stations to monitor.
"""
_LOGGER.debug("Fetching data for (%s, %s) rad: %s", latitude, longitude, radius)
try:
data = pytankerkoenig.getNearbyStations(
self._api_key, latitude, longitude, radius, "all", "dist"
)
except pytankerkoenig.customException as err:
data = {"ok": False, "message": err, "exception": True}
_LOGGER.debug("Received data: %s", data)
if not data["ok"]:
_LOGGER.error(
"Setup for sensors was unsuccessful. Error occurred while fetching data from tankerkoenig.de: %s",
data["message"],
)
return False
# Add stations found via location + radius
nearby_stations = data["stations"]
if not nearby_stations:
if not additional_stations:
_LOGGER.error(
"Could not find any station in range."
"Try with a bigger radius or manually specify stations in additional_stations"
)
return False
_LOGGER.warning(
"Could not find any station in range. Will only use manually specified stations"
)
else:
for station in data["stations"]:
self.add_station(station)
# Add manually specified additional stations
for station_id in additional_stations:
try:
additional_station_data = pytankerkoenig.getStationData(
self._api_key, station_id
)
except pytankerkoenig.customException as err:
additional_station_data = {
"ok": False,
"message": err,
"exception": True,
}
if not additional_station_data["ok"]:
_LOGGER.error(
"Error when adding station %s:\n %s",
station_id,
additional_station_data["message"],
)
return False
self.add_station(additional_station_data["station"])
if len(self.stations) > 10:
_LOGGER.warning(
"Found more than 10 stations to check. "
"This might invalidate your api-key on the long run. "
"Try using a smaller radius"
)
return True
async def fetch_data(self):
"""Get the latest data from tankerkoenig.de."""
_LOGGER.debug("Fetching new data from tankerkoenig.de")
station_ids = list(self.stations)
prices = {}
# The API seems to only return at most 10 results, so split the list in chunks of 10
# and merge it together.
for index in range(ceil(len(station_ids) / 10)):
data = await self._hass.async_add_executor_job(
pytankerkoenig.getPriceList,
self._api_key,
station_ids[index * 10 : (index + 1) * 10],
)
_LOGGER.debug("Received data: %s", data)
if not data["ok"]:
_LOGGER.error(
"Error fetching data from tankerkoenig.de: %s", data["message"]
)
raise TankerkoenigError(data["message"])
if "prices" not in data:
_LOGGER.error("Did not receive price information from tankerkoenig.de")
raise TankerkoenigError("No prices in data")
prices.update(data["prices"])
return prices
def add_station(self, station: dict):
"""Add fuel station to the entity list."""
station_id = station["id"]
if station_id in self.stations:
_LOGGER.warning(
"Sensor for station with id %s was already created", station_id
)
return
self.stations[station_id] = station
_LOGGER.debug("add_station called for station: %s", station)
class TankerkoenigError(HomeAssistantError):
"""An error occurred while contacting tankerkoenig.de."""
|
from datetime import datetime
import mock
from mock import Mock
from paasta_tools.autoscaling import ec2_fitness
from paasta_tools.mesos_tools import SlaveTaskCount
def test_sort_by_total_tasks():
mock_slave_1 = Mock(
task_counts=SlaveTaskCount(count=3, slave=Mock(), batch_count=0)
)
mock_slave_2 = Mock(
task_counts=SlaveTaskCount(count=2, slave=Mock(), batch_count=1)
)
mock_slave_3 = Mock(
task_counts=SlaveTaskCount(count=5, slave=Mock(), batch_count=0)
)
ret = ec2_fitness.sort_by_total_tasks([mock_slave_1, mock_slave_2, mock_slave_3])
assert ret == [mock_slave_3, mock_slave_1, mock_slave_2]
def test_sort_by_running_batch_count():
mock_slave_1 = Mock(
task_counts=SlaveTaskCount(count=3, slave=Mock(), batch_count=1)
)
mock_slave_2 = Mock(
task_counts=SlaveTaskCount(count=2, slave=Mock(), batch_count=2)
)
mock_slave_3 = Mock(
task_counts=SlaveTaskCount(count=5, slave=Mock(), batch_count=3)
)
ret = ec2_fitness.sort_by_running_batch_count(
[mock_slave_1, mock_slave_2, mock_slave_3]
)
assert ret == [mock_slave_3, mock_slave_2, mock_slave_1]
def test_sort_by_health_system_instance_health_system_status_failed():
mock_slave_1 = Mock(name="slave1")
mock_slave_1.task_counts = SlaveTaskCount(count=3, slave=Mock(), batch_count=1)
mock_slave_1.instance_status = {
"Events": [
{
"Code": "instance-reboot",
"Description": "string",
"NotBefore": datetime(2015, 1, 1),
"NotAfter": datetime(2015, 1, 1),
}
],
"SystemStatus": {"Status": "impaired"},
"InstanceStatus": {"Status": "ok"},
}
mock_slave_2 = Mock(name="slave2")
mock_slave_2.task_counts = (SlaveTaskCount(count=3, slave=Mock(), batch_count=1),)
mock_slave_2.instance_status = {
"Events": [
{
"Code": "instance-reboot",
"Description": "string",
"NotBefore": datetime(2015, 1, 1),
"NotAfter": datetime(2015, 1, 1),
}
],
"SystemStatus": {"Status": "ok"},
"InstanceStatus": {"Status": "ok"},
}
ret = ec2_fitness.sort_by_system_instance_health([mock_slave_1, mock_slave_2])
assert ret == [mock_slave_2, mock_slave_1]
def test_sort_by_upcoming_events():
mock_slave_1 = Mock()
mock_slave_1.task_counts = SlaveTaskCount(count=3, slave=Mock(), batch_count=1)
mock_slave_1.instance_status = {
"Events": [],
"SystemStatus": {"Status": "ok"},
"InstanceStatus": {"Status": "ok"},
}
mock_slave_2 = Mock()
mock_slave_2.task_counts = SlaveTaskCount(count=3, slave=Mock(), batch_count=1)
mock_slave_2.instance_status = {
"Events": [
{
"Code": "instance-reboot",
"Description": "string",
"NotBefore": datetime(2015, 1, 1),
"NotAfter": datetime(2015, 1, 1),
}
],
"SystemStatus": {"Status": "ok"},
"InstanceStatus": {"Status": "ok"},
}
ret = ec2_fitness.sort_by_upcoming_events([mock_slave_1, mock_slave_2])
assert ret == [mock_slave_1, mock_slave_2]
def test_sort_by_fitness_calls_all_sorting_funcs():
with mock.patch(
"paasta_tools.autoscaling.ec2_fitness.sort_by_system_instance_health",
autospec=True,
) as mock_sort_by_system_instance_health, mock.patch(
"paasta_tools.autoscaling.ec2_fitness.sort_by_upcoming_events", autospec=True
) as mock_sort_by_upcoming_events, mock.patch(
"paasta_tools.autoscaling.ec2_fitness.sort_by_running_batch_count",
autospec=True,
) as mock_sort_by_running_batch_count, mock.patch(
"paasta_tools.autoscaling.ec2_fitness.sort_by_total_tasks", autospec=True
) as mock_sort_by_total_tasks:
instances = []
ec2_fitness.sort_by_ec2_fitness(instances)
assert mock_sort_by_total_tasks.called
assert mock_sort_by_running_batch_count.called
assert mock_sort_by_upcoming_events.called
assert mock_sort_by_system_instance_health.called
def test_sort_by_fitness():
mock_slave_1 = Mock(name="slave1")
mock_slave_1.task_counts = SlaveTaskCount(count=3, slave=Mock(), batch_count=1)
mock_slave_1.instance_status = {
"Events": [],
"SystemStatus": {"Status": "impaired"},
"InstanceStatus": {"Status": "ok"},
}
mock_slave_2 = Mock(name="slave2")
mock_slave_2.task_counts = SlaveTaskCount(count=3, slave=Mock(), batch_count=1)
mock_slave_2.instance_status = {
"Events": [
{
"Code": "instance-reboot",
"Description": "foo",
"NotBefore": datetime(2015, 1, 1),
"NotAfter": datetime(2015, 1, 1),
}
],
"SystemStatus": {"Status": "ok"},
"InstanceStatus": {"Status": "ok"},
}
mock_slave_3 = Mock(name="slave3")
mock_slave_3.task_counts = SlaveTaskCount(count=2, slave=Mock(), batch_count=3)
mock_slave_3.instance_status = {
"Events": [],
"SystemStatus": {"Status": "ok"},
"InstanceStatus": {"Status": "ok"},
}
mock_slave_4 = Mock(name="slave4")
mock_slave_4.task_counts = SlaveTaskCount(count=3, slave=Mock(), batch_count=1)
mock_slave_4.instance_status = {
"Events": [],
"SystemStatus": {"Status": "ok"},
"InstanceStatus": {"Status": "ok"},
}
mock_slave_5 = Mock(name="slave5")
mock_slave_5.task_counts = SlaveTaskCount(count=1, slave=Mock(), batch_count=1)
mock_slave_5.instance_status = {
"Events": [],
"SystemStatus": {"Status": "ok"},
"InstanceStatus": {"Status": "ok"},
}
ret = ec2_fitness.sort_by_ec2_fitness(
[mock_slave_1, mock_slave_2, mock_slave_3, mock_slave_4, mock_slave_5]
)
# we expect this order for the following reason:
# mock_slave_1 is impaired and so should be killed asap
# mock_slave_2 has an upcoming event
# mock_slave_5 and mock_slave_4 have the fewest batch tasks, and so should be killed before
# mock_slave_3 (we cant drain batch tasks, so try and save them)
# mock_slave_5 has fewer tasks than mock_slave_4, and so is a better candidate for killing
assert ret == [mock_slave_3, mock_slave_4, mock_slave_5, mock_slave_2, mock_slave_1]
|
from homeassistant.config_entries import ConfigEntry
from homeassistant.helpers.typing import HomeAssistantType
from .camera_legacy import async_setup_legacy_entry
from .camera_sdm import async_setup_sdm_entry
from .const import DATA_SDM
async def async_setup_entry(
hass: HomeAssistantType, entry: ConfigEntry, async_add_entities
) -> None:
"""Set up the cameras."""
if DATA_SDM not in entry.data:
await async_setup_legacy_entry(hass, entry, async_add_entities)
return
await async_setup_sdm_entry(hass, entry, async_add_entities)
|
from __future__ import absolute_import
import re
xpath_tokenizer_re = re.compile(
"("
"'[^']*'|\"[^\"]*\"|"
"::|"
"//?|"
r"\.\.|"
r"\(\)|"
r"[/.*:\[\]\(\)@=])|"
r"((?:\{[^}]+\})?[^/\[\]\(\)@=\s]+)|"
r"\s+"
)
def xpath_tokenizer(pattern, namespaces=None):
# ElementTree uses '', lxml used None originally.
default_namespace = (namespaces.get(None) or namespaces.get('')) if namespaces else None
parsing_attribute = False
for token in xpath_tokenizer_re.findall(pattern):
ttype, tag = token
if tag and tag[0] != "{":
if ":" in tag:
prefix, uri = tag.split(":", 1)
try:
if not namespaces:
raise KeyError
yield ttype, "{%s}%s" % (namespaces[prefix], uri)
except KeyError:
raise SyntaxError("prefix %r not found in prefix map" % prefix)
elif default_namespace and not parsing_attribute:
yield ttype, "{%s}%s" % (default_namespace, tag)
else:
yield token
parsing_attribute = False
else:
yield token
parsing_attribute = ttype == '@'
def prepare_child(next, token):
tag = token[1]
def select(result):
for elem in result:
for e in elem.iterchildren(tag):
yield e
return select
def prepare_star(next, token):
def select(result):
for elem in result:
for e in elem.iterchildren('*'):
yield e
return select
def prepare_self(next, token):
def select(result):
return result
return select
def prepare_descendant(next, token):
token = next()
if token[0] == "*":
tag = "*"
elif not token[0]:
tag = token[1]
else:
raise SyntaxError("invalid descendant")
def select(result):
for elem in result:
for e in elem.iterdescendants(tag):
yield e
return select
def prepare_parent(next, token):
def select(result):
for elem in result:
parent = elem.getparent()
if parent is not None:
yield parent
return select
def prepare_predicate(next, token):
# FIXME: replace with real parser!!! refs:
# http://effbot.org/zone/simple-iterator-parser.htm
# http://javascript.crockford.com/tdop/tdop.html
signature = ''
predicate = []
while 1:
token = next()
if token[0] == "]":
break
if token == ('', ''):
# ignore whitespace
continue
if token[0] and token[0][:1] in "'\"":
token = "'", token[0][1:-1]
signature += token[0] or "-"
predicate.append(token[1])
# use signature to determine predicate type
if signature == "@-":
# [@attribute] predicate
key = predicate[1]
def select(result):
for elem in result:
if elem.get(key) is not None:
yield elem
return select
if signature == "@-='":
# [@attribute='value']
key = predicate[1]
value = predicate[-1]
def select(result):
for elem in result:
if elem.get(key) == value:
yield elem
return select
if signature == "-" and not re.match(r"-?\d+$", predicate[0]):
# [tag]
tag = predicate[0]
def select(result):
for elem in result:
for _ in elem.iterchildren(tag):
yield elem
break
return select
if signature == ".='" or (signature == "-='" and not re.match(r"-?\d+$", predicate[0])):
# [.='value'] or [tag='value']
tag = predicate[0]
value = predicate[-1]
if tag:
def select(result):
for elem in result:
for e in elem.iterchildren(tag):
if "".join(e.itertext()) == value:
yield elem
break
else:
def select(result):
for elem in result:
if "".join(elem.itertext()) == value:
yield elem
return select
if signature == "-" or signature == "-()" or signature == "-()-":
# [index] or [last()] or [last()-index]
if signature == "-":
# [index]
index = int(predicate[0]) - 1
if index < 0:
if index == -1:
raise SyntaxError(
"indices in path predicates are 1-based, not 0-based")
else:
raise SyntaxError("path index >= 1 expected")
else:
if predicate[0] != "last":
raise SyntaxError("unsupported function")
if signature == "-()-":
try:
index = int(predicate[2]) - 1
except ValueError:
raise SyntaxError("unsupported expression")
else:
index = -1
def select(result):
for elem in result:
parent = elem.getparent()
if parent is None:
continue
try:
# FIXME: what if the selector is "*" ?
elems = list(parent.iterchildren(elem.tag))
if elems[index] is elem:
yield elem
except IndexError:
pass
return select
raise SyntaxError("invalid predicate")
ops = {
"": prepare_child,
"*": prepare_star,
".": prepare_self,
"..": prepare_parent,
"//": prepare_descendant,
"[": prepare_predicate,
}
# --------------------------------------------------------------------
_cache = {}
def _build_path_iterator(path, namespaces):
"""compile selector pattern"""
if path[-1:] == "/":
path += "*" # implicit all (FIXME: keep this?)
cache_key = (path,)
if namespaces:
# lxml originally used None for the default namespace but ElementTree uses the
# more convenient (all-strings-dict) empty string, so we support both here,
# preferring the more convenient '', as long as they aren't ambiguous.
if None in namespaces:
if '' in namespaces and namespaces[None] != namespaces['']:
raise ValueError("Ambiguous default namespace provided: %r versus %r" % (
namespaces[None], namespaces['']))
cache_key += (namespaces[None],) + tuple(sorted(
item for item in namespaces.items() if item[0] is not None))
else:
cache_key += tuple(sorted(namespaces.items()))
try:
return _cache[cache_key]
except KeyError:
pass
if len(_cache) > 100:
_cache.clear()
if path[:1] == "/":
raise SyntaxError("cannot use absolute path on element")
stream = iter(xpath_tokenizer(path, namespaces))
try:
_next = stream.next
except AttributeError:
# Python 3
_next = stream.__next__
try:
token = _next()
except StopIteration:
raise SyntaxError("empty path expression")
selector = []
while 1:
try:
selector.append(ops[token[0]](_next, token))
except StopIteration:
raise SyntaxError("invalid path")
try:
token = _next()
if token[0] == "/":
token = _next()
except StopIteration:
break
_cache[cache_key] = selector
return selector
##
# Iterate over the matching nodes
def iterfind(elem, path, namespaces=None):
selector = _build_path_iterator(path, namespaces)
result = iter((elem,))
for select in selector:
result = select(result)
return result
##
# Find first matching object.
def find(elem, path, namespaces=None):
it = iterfind(elem, path, namespaces)
try:
return next(it)
except StopIteration:
return None
##
# Find all matching objects.
def findall(elem, path, namespaces=None):
return list(iterfind(elem, path, namespaces))
##
# Find text for first matching object.
def findtext(elem, path, default=None, namespaces=None):
el = find(elem, path, namespaces)
if el is None:
return default
else:
return el.text or ''
|
try:
import yaml
except ImportError:
yaml = None
import diamond.collector
class PuppetAgentCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(PuppetAgentCollector,
self).get_default_config_help()
config_help.update({
'yaml_path': "Path to last_run_summary.yaml",
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(PuppetAgentCollector, self).get_default_config()
config.update({
'yaml_path': '/var/lib/puppet/state/last_run_summary.yaml',
'path': 'puppetagent',
})
return config
def _get_summary(self):
summary_fp = open(self.config['yaml_path'], 'r')
try:
summary = yaml.load(summary_fp)
finally:
summary_fp.close()
return summary
def collect(self):
if yaml is None:
self.log.error('Unable to import yaml')
return
summary = self._get_summary()
for sect, data in summary.iteritems():
for stat, value in data.iteritems():
if value is None or isinstance(value, basestring):
continue
metric = '.'.join([sect, stat])
self.publish(metric, value)
|
import requests
from nikola.plugin_categories import MarkdownExtension
from nikola.utils import get_logger
try:
from markdown.extensions import Extension
from markdown.inlinepatterns import Pattern
from markdown.util import AtomicString
from markdown.util import etree
except ImportError:
# No need to catch this, if you try to use this without Markdown,
# the markdown compiler will fail first
Extension = Pattern = object
LOGGER = get_logger('compile_markdown.mdx_gist')
GIST_JS_URL = "https://gist.github.com/{0}.js"
GIST_FILE_JS_URL = "https://gist.github.com/{0}.js?file={1}"
GIST_RAW_URL = "https://gist.githubusercontent.com/raw/{0}"
GIST_FILE_RAW_URL = "https://gist.githubusercontent.com/raw/{0}/{1}"
GIST_MD_RE = r'\[:gist:\s*(?P<gist_id>\S+)(?:\s*(?P<filename>.+?))?\s*\]'
GIST_RST_RE = r'(?m)^\.\.\s*gist::\s*(?P<gist_id>[^\]\s]+)(?:\s*(?P<filename>.+?))?\s*$'
class GistFetchException(Exception):
"""Raised when attempt to fetch content of a Gist from github.com fails."""
def __init__(self, url, status_code):
"""Initialize the exception."""
Exception.__init__(self)
self.message = 'Received a {0} response from Gist URL: {1}'.format(
status_code, url)
class GistPattern(Pattern):
"""InlinePattern for footnote markers in a document's body text."""
def __init__(self, pattern, configs):
"""Initialize the pattern."""
Pattern.__init__(self, pattern)
def get_raw_gist_with_filename(self, gist_id, filename):
"""Get raw gist text for a filename."""
url = GIST_FILE_RAW_URL.format(gist_id, filename)
resp = requests.get(url)
if not resp.ok:
raise GistFetchException(url, resp.status_code)
return resp.text
def get_raw_gist(self, gist_id):
"""Get raw gist text."""
url = GIST_RAW_URL.format(gist_id)
resp = requests.get(url)
if not resp.ok:
raise GistFetchException(url, resp.status_code)
return resp.text
def handleMatch(self, m):
"""Handle pattern match."""
gist_id = m.group('gist_id')
gist_file = m.group('filename')
gist_elem = etree.Element('div')
gist_elem.set('class', 'gist')
script_elem = etree.SubElement(gist_elem, 'script')
noscript_elem = etree.SubElement(gist_elem, 'noscript')
try:
if gist_file:
script_elem.set('src', GIST_FILE_JS_URL.format(
gist_id, gist_file))
raw_gist = (self.get_raw_gist_with_filename(
gist_id, gist_file))
else:
script_elem.set('src', GIST_JS_URL.format(gist_id))
raw_gist = (self.get_raw_gist(gist_id))
# Insert source as <pre/> within <noscript>
pre_elem = etree.SubElement(noscript_elem, 'pre')
pre_elem.text = AtomicString(raw_gist)
except GistFetchException as e:
LOGGER.warning(e.message)
warning_comment = etree.Comment(' WARNING: {0} '.format(e.message))
noscript_elem.append(warning_comment)
return gist_elem
class GistExtension(MarkdownExtension, Extension):
"""Gist extension for Markdown."""
def __init__(self, configs={}):
"""Initialize the extension."""
# set extension defaults
self.config = {}
# Override defaults with user settings
for key, value in configs:
self.setConfig(key, value)
def extendMarkdown(self, md, md_globals=None):
"""Extend Markdown."""
gist_md_pattern = GistPattern(GIST_MD_RE, self.getConfigs())
gist_md_pattern.md = md
md.inlinePatterns.register(gist_md_pattern, 'gist', 175)
gist_rst_pattern = GistPattern(GIST_RST_RE, self.getConfigs())
gist_rst_pattern.md = md
md.inlinePatterns.register(gist_rst_pattern, 'gist-rst', 176)
md.registerExtension(self)
def makeExtension(configs=None): # pragma: no cover
"""Make Markdown extension."""
return GistExtension(configs)
if __name__ == '__main__':
import doctest
doctest.testmod(optionflags=(doctest.NORMALIZE_WHITESPACE +
doctest.REPORT_NDIFF))
|
from arctic.arctic import Arctic
from arctic.store.metadata_store import MetadataStore
from arctic.store.version_store import VersionStore
def test_arctic(arctic):
assert isinstance(arctic, Arctic)
def test_library(library):
assert isinstance(library, VersionStore)
assert library._arctic_lib.get_library_type() == 'VersionStore'
def test_ms_lib(ms_lib):
assert isinstance(ms_lib, MetadataStore)
assert ms_lib._arctic_lib.get_library_type() == 'MetadataStore'
|
import proliphix
import voluptuous as vol
from homeassistant.components.climate import PLATFORM_SCHEMA, ClimateEntity
from homeassistant.components.climate.const import (
CURRENT_HVAC_COOL,
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
CURRENT_HVAC_OFF,
HVAC_MODE_COOL,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.const import (
ATTR_TEMPERATURE,
CONF_HOST,
CONF_PASSWORD,
CONF_USERNAME,
PRECISION_TENTHS,
TEMP_FAHRENHEIT,
)
import homeassistant.helpers.config_validation as cv
ATTR_FAN = "fan"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Proliphix thermostats."""
username = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
host = config.get(CONF_HOST)
pdp = proliphix.PDP(host, username, password)
pdp.update()
add_entities([ProliphixThermostat(pdp)], True)
class ProliphixThermostat(ClimateEntity):
"""Representation a Proliphix thermostat."""
def __init__(self, pdp):
"""Initialize the thermostat."""
self._pdp = pdp
self._name = None
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_TARGET_TEMPERATURE
@property
def should_poll(self):
"""Set up polling needed for thermostat."""
return True
def update(self):
"""Update the data from the thermostat."""
self._pdp.update()
self._name = self._pdp.name
@property
def name(self):
"""Return the name of the thermostat."""
return self._name
@property
def precision(self):
"""Return the precision of the system.
Proliphix temperature values are passed back and forth in the
API as tenths of degrees F (i.e. 690 for 69 degrees).
"""
return PRECISION_TENTHS
@property
def device_state_attributes(self):
"""Return the device specific state attributes."""
return {ATTR_FAN: self._pdp.fan_state}
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return TEMP_FAHRENHEIT
@property
def current_temperature(self):
"""Return the current temperature."""
return self._pdp.cur_temp
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self._pdp.setback
@property
def hvac_action(self):
"""Return the current state of the thermostat."""
state = self._pdp.hvac_state
if state == 1:
return CURRENT_HVAC_OFF
if state in (3, 4, 5):
return CURRENT_HVAC_HEAT
if state in (6, 7):
return CURRENT_HVAC_COOL
return CURRENT_HVAC_IDLE
@property
def hvac_mode(self):
"""Return the current state of the thermostat."""
if self._pdp.is_heating:
return HVAC_MODE_HEAT
if self._pdp.is_cooling:
return HVAC_MODE_COOL
return HVAC_MODE_OFF
@property
def hvac_modes(self):
"""Return available HVAC modes."""
return []
def set_temperature(self, **kwargs):
"""Set new target temperature."""
temperature = kwargs.get(ATTR_TEMPERATURE)
if temperature is None:
return
self._pdp.setback = temperature
|
import logging
from agent import AgentConnectionError, AgentError
from agent.a import Agent
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_HOST, CONF_PORT
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DOMAIN, SERVER_URL # pylint:disable=unused-import
from .helpers import generate_url
DEFAULT_PORT = 8090
_LOGGER = logging.getLogger(__name__)
class AgentFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle an Agent config flow."""
def __init__(self):
"""Initialize the Agent config flow."""
self.device_config = {}
async def async_step_user(self, user_input=None):
"""Handle an Agent config flow."""
errors = {}
if user_input is not None:
host = user_input[CONF_HOST]
port = user_input[CONF_PORT]
server_origin = generate_url(host, port)
agent_client = Agent(server_origin, async_get_clientsession(self.hass))
try:
await agent_client.update()
except AgentConnectionError:
pass
except AgentError:
pass
await agent_client.close()
if agent_client.is_available:
await self.async_set_unique_id(agent_client.unique)
self._abort_if_unique_id_configured(
updates={
CONF_HOST: user_input[CONF_HOST],
CONF_PORT: user_input[CONF_PORT],
SERVER_URL: server_origin,
}
)
self.device_config = {
CONF_HOST: host,
CONF_PORT: port,
SERVER_URL: server_origin,
}
return await self._create_entry(agent_client.name)
errors["base"] = "cannot_connect"
data = {
vol.Required(CONF_HOST): str,
vol.Required(CONF_PORT, default=DEFAULT_PORT): int,
}
return self.async_show_form(
step_id="user",
description_placeholders=self.device_config,
data_schema=vol.Schema(data),
errors=errors,
)
async def _create_entry(self, server_name):
"""Create entry for device."""
return self.async_create_entry(title=server_name, data=self.device_config)
|
import json
import os
import pathlib
import re
import subprocess
from .const import CLI_2_DOCKER_IMAGE, CORE_PROJECT_ID, INTEGRATIONS_DIR
from .error import ExitApp
from .util import get_current_branch, get_lokalise_token
FILENAME_FORMAT = re.compile(r"strings\.(?P<suffix>\w+)\.json")
LOCAL_FILE = pathlib.Path("build/translations-upload.json").absolute()
CONTAINER_FILE = "/opt/src/build/translations-upload.json"
LANG_ISO = "en"
def run_upload_docker():
"""Run the Docker image to upload the translations."""
print("Running Docker to upload latest translations.")
run = subprocess.run(
[
"docker",
"run",
"-v",
f"{LOCAL_FILE}:{CONTAINER_FILE}",
"--rm",
f"lokalise/lokalise-cli-2:{CLI_2_DOCKER_IMAGE}",
# Lokalise command
"lokalise2",
"--token",
get_lokalise_token(),
"--project-id",
CORE_PROJECT_ID,
"file",
"upload",
"--file",
CONTAINER_FILE,
"--lang-iso",
LANG_ISO,
"--convert-placeholders=false",
"--replace-modified",
],
)
print()
if run.returncode != 0:
raise ExitApp("Failed to download translations")
def generate_upload_data():
"""Generate the data for uploading."""
translations = json.loads((INTEGRATIONS_DIR.parent / "strings.json").read_text())
translations["component"] = {}
for path in INTEGRATIONS_DIR.glob(f"*{os.sep}strings*.json"):
component = path.parent.name
match = FILENAME_FORMAT.search(path.name)
platform = match.group("suffix") if match else None
parent = translations["component"].setdefault(component, {})
if platform:
platforms = parent.setdefault("platform", {})
parent = platforms.setdefault(platform, {})
parent.update(json.loads(path.read_text()))
return translations
def run():
"""Run the script."""
if get_current_branch() != "dev" and os.environ.get("AZURE_BRANCH") != "dev":
raise ExitApp(
"Please only run the translations upload script from a clean checkout of dev."
)
translations = generate_upload_data()
LOCAL_FILE.parent.mkdir(parents=True, exist_ok=True)
LOCAL_FILE.write_text(json.dumps(translations, indent=4, sort_keys=True))
run_upload_docker()
return 0
|
revision = "932525b82f1a"
down_revision = "7f71c0cea31a"
from alembic import op
def upgrade():
op.alter_column("certificates", "active", new_column_name="notify")
def downgrade():
op.alter_column("certificates", "notify", new_column_name="active")
|
import sys, os
sys.path = [os.path.abspath(os.path.dirname(__file__))] + sys.path
import pandas as pd
from sklearn.datasets import load_boston
from sklearn.metrics import brier_score_loss, mean_squared_error
from sklearn.model_selection import train_test_split
from auto_ml import Predictor
def get_boston_regression_dataset():
boston = load_boston()
df_boston = pd.DataFrame(boston.data)
df_boston.columns = boston.feature_names
df_boston['MEDV'] = boston['target']
df_boston_train, df_boston_test = train_test_split(df_boston, test_size=0.33, random_state=42)
return df_boston_train, df_boston_test
def get_titanic_binary_classification_dataset(basic=True):
try:
df_titanic = pd.read_csv(os.path.join('tests', 'titanic.csv'))
except Exception as e:
print('Error')
print(e)
dataset_url = 'http://biostat.mc.vanderbilt.edu/wiki/pub/Main/DataSets/titanic3.csv'
df_titanic = pd.read_csv(dataset_url)
# Do not write the index that pandas automatically creates
df_titanic.to_csv(os.path.join('tests', 'titanic.csv'), index=False)
df_titanic = df_titanic.drop(['boat', 'body'], axis=1)
if basic == True:
df_titanic = df_titanic.drop(['name', 'ticket', 'cabin', 'home.dest'], axis=1)
df_titanic_train, df_titanic_test = train_test_split(df_titanic, test_size=0.33, random_state=42)
return df_titanic_train, df_titanic_test
def train_basic_binary_classifier(df_titanic_train):
column_descriptions = {
'survived': 'output'
, 'sex': 'categorical'
, 'embarked': 'categorical'
, 'pclass': 'categorical'
}
ml_predictor = Predictor(type_of_estimator='classifier', column_descriptions=column_descriptions)
ml_predictor.train(df_titanic_train)
return ml_predictor
def train_basic_regressor(df_boston_train):
column_descriptions = {
'MEDV': 'output'
, 'CHAS': 'categorical'
}
ml_predictor = Predictor(type_of_estimator='regressor', column_descriptions=column_descriptions)
ml_predictor.train(df_boston_train, verbose=False)
return ml_predictor
def calculate_rmse(actuals, preds):
return mean_squared_error(actuals, preds)**0.5 * -1
def calculate_brier_score_loss(actuals, probas):
return -1 * brier_score_loss(actuals, probas)
def get_twitter_sentiment_multilabel_classification_dataset():
file_name = os.path.join('tests', 'twitter_sentiment.csv')
try:
df_twitter = pd.read_csv(open(file_name,'rU'), encoding='latin-1', engine='python')
except Exception as e:
print('Error')
print(e)
dataset_url = 'https://raw.githubusercontent.com/ClimbsRocks/sample_datasets/master/twitter_airline_sentiment.csv'
df_twitter = pd.read_csv(dataset_url, encoding='latin-1')
# Do not write the index that pandas automatically creates
df_twitter.to_csv(file_name, index=False, encoding='latin-1')
# Grab only 10% of the dataset- runs much faster this way
df_twitter = df_twitter.sample(frac=0.1)
df_twitter['tweet_created'] = pd.to_datetime(df_twitter.tweet_created)
df_twitter_train, df_twitter_test = train_test_split(df_twitter, test_size=0.33, random_state=42)
return df_twitter_train, df_twitter_test
def train_basic_multilabel_classifier(df_twitter_train):
column_descriptions = {
'airline_sentiment': 'output'
, 'airline': 'categorical'
, 'text': 'ignore'
, 'tweet_location': 'categorical'
, 'user_timezone': 'categorical'
, 'tweet_created': 'date'
}
ml_predictor = Predictor(type_of_estimator='classifier', column_descriptions=column_descriptions)
ml_predictor.train(df_twitter_train)
return ml_predictor
|
from __future__ import unicode_literals
from rules.SDrule import SDrule
from lib.data.data import pyoptions
def SB(sname, birth):
for _ in SDrule(sname, birth):
yield _
for sn in sname:
for bd in birth:
# {sname birth SNAME}
yield sn.lower() + bd + sn.upper()
yield sn.lower() + bd[2:] + sn.upper()
yield sn.lower() + bd[:4] + bd[4:].replace('0', '') + sn.upper()
for suf in pyoptions.sedb_trick_suf:
yield sn.lower() + bd + sn.upper() + suf
yield sn.lower() + bd[2:] + sn.upper() + suf
yield sn.lower() + bd[:4] + bd[4:].replace('0', '') + sn.upper() + suf
# You can continue to add new and useful rules
#
|
import os
import os.path
import io
import re
import sys
import enum
import json
import datetime
import traceback
import functools
import contextlib
import posixpath
import shlex
import glob
import mimetypes
import ctypes
import ctypes.util
from typing import Any, Callable, IO, Iterator, Optional, Sequence, Tuple, Type, Union
from PyQt5.QtCore import QUrl
from PyQt5.QtGui import QColor, QClipboard, QDesktopServices
from PyQt5.QtWidgets import QApplication
import pkg_resources
import yaml
try:
from yaml import (CSafeLoader as YamlLoader,
CSafeDumper as YamlDumper)
YAML_C_EXT = True
except ImportError: # pragma: no cover
from yaml import (SafeLoader as YamlLoader, # type: ignore[misc]
SafeDumper as YamlDumper)
YAML_C_EXT = False
import qutebrowser
from qutebrowser.utils import qtutils, log
fake_clipboard = None
log_clipboard = False
_resource_cache = {}
is_mac = sys.platform.startswith('darwin')
is_linux = sys.platform.startswith('linux')
is_windows = sys.platform.startswith('win')
is_posix = os.name == 'posix'
class Unreachable(Exception):
"""Raised when there was unreachable code."""
class ClipboardError(Exception):
"""Raised if the clipboard contents are unavailable for some reason."""
class SelectionUnsupportedError(ClipboardError):
"""Raised if [gs]et_clipboard is used and selection=True is unsupported."""
def __init__(self) -> None:
super().__init__("Primary selection is not supported on this "
"platform!")
class ClipboardEmptyError(ClipboardError):
"""Raised if get_clipboard is used and the clipboard is empty."""
def elide(text: str, length: int) -> str:
"""Elide text so it uses a maximum of length chars."""
if length < 1:
raise ValueError("length must be >= 1!")
if len(text) <= length:
return text
else:
return text[:length - 1] + '\u2026'
def elide_filename(filename: str, length: int) -> str:
"""Elide a filename to the given length.
The difference to the elide() is that the text is removed from
the middle instead of from the end. This preserves file name extensions.
Additionally, standard ASCII dots are used ("...") instead of the unicode
"…" (U+2026) so it works regardless of the filesystem encoding.
This function does not handle path separators.
Args:
filename: The filename to elide.
length: The maximum length of the filename, must be at least 3.
Return:
The elided filename.
"""
elidestr = '...'
if length < len(elidestr):
raise ValueError('length must be greater or equal to 3')
if len(filename) <= length:
return filename
# Account for '...'
length -= len(elidestr)
left = length // 2
right = length - left
if right == 0:
return filename[:left] + elidestr
else:
return filename[:left] + elidestr + filename[-right:]
def compact_text(text: str, elidelength: int = None) -> str:
"""Remove leading whitespace and newlines from a text and maybe elide it.
Args:
text: The text to compact.
elidelength: To how many chars to elide.
"""
lines = []
for line in text.splitlines():
lines.append(line.strip())
out = ''.join(lines)
if elidelength is not None:
out = elide(out, elidelength)
return out
def preload_resources() -> None:
"""Load resource files into the cache."""
for subdir, pattern in [('html', '*.html'), ('javascript', '*.js')]:
path = resource_filename(subdir)
for full_path in glob.glob(os.path.join(path, pattern)):
sub_path = '/'.join([subdir, os.path.basename(full_path)])
_resource_cache[sub_path] = read_file(sub_path)
# FIXME:typing Return value should be bytes/str
def read_file(filename: str, binary: bool = False) -> Any:
"""Get the contents of a file contained with qutebrowser.
Args:
filename: The filename to open as string.
binary: Whether to return a binary string.
If False, the data is UTF-8-decoded.
Return:
The file contents as string.
"""
assert not posixpath.isabs(filename), filename
assert os.path.pardir not in filename.split(posixpath.sep), filename
if not binary and filename in _resource_cache:
return _resource_cache[filename]
if hasattr(sys, 'frozen'):
# PyInstaller doesn't support pkg_resources :(
# https://github.com/pyinstaller/pyinstaller/wiki/FAQ#misc
fn = os.path.join(os.path.dirname(sys.executable), filename)
if binary:
f: IO
with open(fn, 'rb') as f:
return f.read()
else:
with open(fn, 'r', encoding='utf-8') as f:
return f.read()
else:
data = pkg_resources.resource_string(
qutebrowser.__name__, filename)
if binary:
return data
return data.decode('UTF-8')
def resource_filename(filename: str) -> str:
"""Get the absolute filename of a file contained with qutebrowser.
Args:
filename: The filename.
Return:
The absolute filename.
"""
if hasattr(sys, 'frozen'):
return os.path.join(os.path.dirname(sys.executable), filename)
return pkg_resources.resource_filename(qutebrowser.__name__, filename)
def _get_color_percentage(x1: int, y1: int, z1: int, a1: int,
x2: int, y2: int, z2: int, a2: int,
percent: int) -> Tuple[int, int, int, int]:
"""Get a color which is percent% interpolated between start and end.
Args:
x1, y1, z1, a1 : Start color components (R, G, B, A / H, S, V, A / H, S, L, A)
x2, y2, z2, a2 : End color components (R, G, B, A / H, S, V, A / H, S, L, A)
percent: Percentage to interpolate, 0-100.
0: Start color will be returned.
100: End color will be returned.
Return:
A (x, y, z, alpha) tuple with the interpolated color components.
"""
if not 0 <= percent <= 100:
raise ValueError("percent needs to be between 0 and 100!")
x = round(x1 + (x2 - x1) * percent / 100)
y = round(y1 + (y2 - y1) * percent / 100)
z = round(z1 + (z2 - z1) * percent / 100)
a = round(a1 + (a2 - a1) * percent / 100)
return (x, y, z, a)
def interpolate_color(
start: QColor,
end: QColor,
percent: int,
colorspace: Optional[QColor.Spec] = QColor.Rgb
) -> QColor:
"""Get an interpolated color value.
Args:
start: The start color.
end: The end color.
percent: Which value to get (0 - 100)
colorspace: The desired interpolation color system,
QColor::{Rgb,Hsv,Hsl} (from QColor::Spec enum)
If None, start is used except when percent is 100.
Return:
The interpolated QColor, with the same spec as the given start color.
"""
qtutils.ensure_valid(start)
qtutils.ensure_valid(end)
if colorspace is None:
if percent == 100:
return QColor(*end.getRgb())
else:
return QColor(*start.getRgb())
out = QColor()
if colorspace == QColor.Rgb:
r1, g1, b1, a1 = start.getRgb()
r2, g2, b2, a2 = end.getRgb()
components = _get_color_percentage(r1, g1, b1, a1, r2, g2, b2, a2, percent)
out.setRgb(*components)
elif colorspace == QColor.Hsv:
h1, s1, v1, a1 = start.getHsv()
h2, s2, v2, a2 = end.getHsv()
components = _get_color_percentage(h1, s1, v1, a1, h2, s2, v2, a2, percent)
out.setHsv(*components)
elif colorspace == QColor.Hsl:
h1, s1, l1, a1 = start.getHsl()
h2, s2, l2, a2 = end.getHsl()
components = _get_color_percentage(h1, s1, l1, a1, h2, s2, l2, a2, percent)
out.setHsl(*components)
else:
raise ValueError("Invalid colorspace!")
out = out.convertTo(start.spec())
qtutils.ensure_valid(out)
return out
def format_seconds(total_seconds: int) -> str:
"""Format a count of seconds to get a [H:]M:SS string."""
prefix = '-' if total_seconds < 0 else ''
hours, rem = divmod(abs(round(total_seconds)), 3600)
minutes, seconds = divmod(rem, 60)
chunks = []
if hours:
chunks.append(str(hours))
min_format = '{:02}'
else:
min_format = '{}'
chunks.append(min_format.format(minutes))
chunks.append('{:02}'.format(seconds))
return prefix + ':'.join(chunks)
def format_size(size: Optional[float], base: int = 1024, suffix: str = '') -> str:
"""Format a byte size so it's human readable.
Inspired by http://stackoverflow.com/q/1094841
"""
prefixes = ['', 'k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']
if size is None:
return '?.??' + suffix
for p in prefixes:
if -base < size < base:
return '{:.02f}{}{}'.format(size, p, suffix)
size /= base
return '{:.02f}{}{}'.format(size, prefixes[-1], suffix)
class FakeIOStream(io.TextIOBase):
"""A fake file-like stream which calls a function for write-calls."""
def __init__(self, write_func: Callable[[str], int]) -> None:
super().__init__()
self.write = write_func # type: ignore[assignment]
@contextlib.contextmanager
def fake_io(write_func: Callable[[str], int]) -> Iterator[None]:
"""Run code with stdout and stderr replaced by FakeIOStreams.
Args:
write_func: The function to call when write is called.
"""
old_stdout = sys.stdout
old_stderr = sys.stderr
fake_stderr = FakeIOStream(write_func)
fake_stdout = FakeIOStream(write_func)
sys.stderr = fake_stderr # type: ignore[assignment]
sys.stdout = fake_stdout # type: ignore[assignment]
try:
yield
finally:
# If the code we did run did change sys.stdout/sys.stderr, we leave it
# unchanged. Otherwise, we reset it.
if sys.stdout is fake_stdout: # type: ignore[comparison-overlap]
sys.stdout = old_stdout
if sys.stderr is fake_stderr: # type: ignore[comparison-overlap]
sys.stderr = old_stderr
@contextlib.contextmanager
def disabled_excepthook() -> Iterator[None]:
"""Run code with the exception hook temporarily disabled."""
old_excepthook = sys.excepthook
sys.excepthook = sys.__excepthook__
try:
yield
finally:
# If the code we did run did change sys.excepthook, we leave it
# unchanged. Otherwise, we reset it.
if sys.excepthook is sys.__excepthook__:
sys.excepthook = old_excepthook
class prevent_exceptions: # noqa: N801,N806 pylint: disable=invalid-name
"""Decorator to ignore and log exceptions.
This needs to be used for some places where PyQt segfaults on exceptions or
silently ignores them.
We used to re-raise the exception with a single-shot QTimer in a similar
case, but that lead to a strange problem with a KeyError with some random
jinja template stuff as content. For now, we only log it, so it doesn't
pass 100% silently.
This could also be a function, but as a class (with a "wrong" name) it's
much cleaner to implement.
Attributes:
_retval: The value to return in case of an exception.
_predicate: The condition which needs to be True to prevent exceptions
"""
def __init__(self, retval: Any, predicate: bool = True) -> None:
"""Save decorator arguments.
Gets called on parse-time with the decorator arguments.
Args:
See class attributes.
"""
self._retval = retval
self._predicate = predicate
def __call__(self, func: Callable) -> Callable:
"""Called when a function should be decorated.
Args:
func: The function to be decorated.
Return:
The decorated function.
"""
if not self._predicate:
return func
retval = self._retval
@functools.wraps(func)
def wrapper(*args: Any, **kwargs: Any) -> Any:
"""Call the original function."""
try:
return func(*args, **kwargs)
except BaseException:
log.misc.exception("Error in {}".format(qualname(func)))
return retval
return wrapper
def is_enum(obj: Any) -> bool:
"""Check if a given object is an enum."""
try:
return issubclass(obj, enum.Enum)
except TypeError:
return False
def get_repr(obj: Any, constructor: bool = False, **attrs: Any) -> str:
"""Get a suitable __repr__ string for an object.
Args:
obj: The object to get a repr for.
constructor: If True, show the Foo(one=1, two=2) form instead of
<Foo one=1 two=2>.
attrs: The attributes to add.
"""
cls = qualname(obj.__class__)
parts = []
items = sorted(attrs.items())
for name, val in items:
parts.append('{}={!r}'.format(name, val))
if constructor:
return '{}({})'.format(cls, ', '.join(parts))
else:
if parts:
return '<{} {}>'.format(cls, ' '.join(parts))
else:
return '<{}>'.format(cls)
def qualname(obj: Any) -> str:
"""Get the fully qualified name of an object.
Based on twisted.python.reflect.fullyQualifiedName.
Should work with:
- functools.partial objects
- functions
- classes
- methods
- modules
"""
if isinstance(obj, functools.partial):
obj = obj.func
if hasattr(obj, '__module__'):
prefix = '{}.'.format(obj.__module__)
else:
prefix = ''
if hasattr(obj, '__qualname__'):
return '{}{}'.format(prefix, obj.__qualname__)
elif hasattr(obj, '__name__'):
return '{}{}'.format(prefix, obj.__name__)
else:
return repr(obj)
_ExceptionType = Union[Type[BaseException], Tuple[Type[BaseException]]]
def raises(exc: _ExceptionType, func: Callable, *args: Any) -> bool:
"""Check if a function raises a given exception.
Args:
exc: A single exception or an iterable of exceptions.
func: A function to call.
*args: The arguments to pass to the function.
Returns:
True if the exception was raised, False otherwise.
"""
try:
func(*args)
except exc:
return True
else:
return False
def force_encoding(text: str, encoding: str) -> str:
"""Make sure a given text is encodable with the given encoding.
This replaces all chars not encodable with question marks.
"""
return text.encode(encoding, errors='replace').decode(encoding)
def sanitize_filename(name: str,
replacement: Optional[str] = '_',
shorten: bool = False) -> str:
"""Replace invalid filename characters.
Note: This should be used for the basename, as it also removes the path
separator.
Args:
name: The filename.
replacement: The replacement character (or None).
shorten: Shorten the filename if it's too long for the filesystem.
"""
if replacement is None:
replacement = ''
# Remove chars which can't be encoded in the filename encoding.
# See https://github.com/qutebrowser/qutebrowser/issues/427
encoding = sys.getfilesystemencoding()
name = force_encoding(name, encoding)
# See also
# https://en.wikipedia.org/wiki/Filename#Reserved_characters_and_words
if is_windows:
bad_chars = '\\/:*?"<>|'
elif is_mac:
# Colons can be confusing in finder https://superuser.com/a/326627
bad_chars = '/:'
else:
bad_chars = '/'
for bad_char in bad_chars:
name = name.replace(bad_char, replacement)
if not shorten:
return name
# Truncate the filename if it's too long.
# Most filesystems have a maximum filename length of 255 bytes:
# https://en.wikipedia.org/wiki/Comparison_of_file_systems#Limits
# We also want to keep some space for QtWebEngine's ".download" suffix, as
# well as deduplication counters.
max_bytes = 255 - len("(123).download")
root, ext = os.path.splitext(name)
root = root[:max_bytes - len(ext)]
excess = len(os.fsencode(root + ext)) - max_bytes
while excess > 0 and root:
# Max 4 bytes per character is assumed.
# Integer division floors to -∞, not to 0.
root = root[:(-excess // 4)]
excess = len(os.fsencode(root + ext)) - max_bytes
if not root:
# Trimming the root is not enough. We must trim the extension.
# We leave one character in the root, so that the filename
# doesn't start with a dot, which makes the file hidden.
root = name[0]
excess = len(os.fsencode(root + ext)) - max_bytes
while excess > 0 and ext:
ext = ext[:(-excess // 4)]
excess = len(os.fsencode(root + ext)) - max_bytes
assert ext, name
name = root + ext
return name
def set_clipboard(data: str, selection: bool = False) -> None:
"""Set the clipboard to some given data."""
global fake_clipboard
if selection and not supports_selection():
raise SelectionUnsupportedError
if log_clipboard:
what = 'primary selection' if selection else 'clipboard'
log.misc.debug("Setting fake {}: {}".format(what, json.dumps(data)))
fake_clipboard = data
else:
mode = QClipboard.Selection if selection else QClipboard.Clipboard
QApplication.clipboard().setText(data, mode=mode)
def get_clipboard(selection: bool = False, fallback: bool = False) -> str:
"""Get data from the clipboard.
Args:
selection: Use the primary selection.
fallback: Fall back to the clipboard if primary selection is
unavailable.
"""
global fake_clipboard
if fallback and not selection:
raise ValueError("fallback given without selection!")
if selection and not supports_selection():
if fallback:
selection = False
else:
raise SelectionUnsupportedError
if fake_clipboard is not None:
data = fake_clipboard
fake_clipboard = None
else:
mode = QClipboard.Selection if selection else QClipboard.Clipboard
data = QApplication.clipboard().text(mode=mode)
target = "Primary selection" if selection else "Clipboard"
if not data.strip():
raise ClipboardEmptyError("{} is empty.".format(target))
log.misc.debug("{} contained: {!r}".format(target, data))
return data
def supports_selection() -> bool:
"""Check if the OS supports primary selection."""
return QApplication.clipboard().supportsSelection()
def open_file(filename: str, cmdline: str = None) -> None:
"""Open the given file.
If cmdline is not given, downloads.open_dispatcher is used.
If open_dispatcher is unset, the system's default application is used.
Args:
filename: The filename to open.
cmdline: The command to use as string. A `{}` is expanded to the
filename. None means to use the system's default application
or `downloads.open_dispatcher` if set. If no `{}` is found,
the filename is appended to the cmdline.
"""
# Import late to avoid circular imports:
# - usertypes -> utils -> guiprocess -> message -> usertypes
# - usertypes -> utils -> config -> configdata -> configtypes ->
# cmdutils -> command -> message -> usertypes
from qutebrowser.config import config
from qutebrowser.misc import guiprocess
from qutebrowser.utils import version, message
# the default program to open downloads with - will be empty string
# if we want to use the default
override = config.val.downloads.open_dispatcher
if version.is_sandboxed():
if cmdline:
message.error("Cannot spawn download dispatcher from sandbox")
return
if override:
message.warning("Ignoring download dispatcher from config in "
"sandbox environment")
override = None
# precedence order: cmdline > downloads.open_dispatcher > openUrl
if cmdline is None and not override:
log.misc.debug("Opening {} with the system application"
.format(filename))
url = QUrl.fromLocalFile(filename)
QDesktopServices.openUrl(url)
return
if cmdline is None and override:
cmdline = override
assert cmdline is not None
cmd, *args = shlex.split(cmdline)
args = [arg.replace('{}', filename) for arg in args]
if '{}' not in cmdline:
args.append(filename)
log.misc.debug("Opening {} with {}"
.format(filename, [cmd] + args))
proc = guiprocess.GUIProcess(what='open-file')
proc.start_detached(cmd, args)
def unused(_arg: Any) -> None:
"""Function which does nothing to avoid pylint complaining."""
def expand_windows_drive(path: str) -> str:
r"""Expand a drive-path like E: into E:\.
Does nothing for other paths.
Args:
path: The path to expand.
"""
# Usually, "E:" on Windows refers to the current working directory on drive
# E:\. The correct way to specifify drive E: is "E:\", but most users
# probably don't use the "multiple working directories" feature and expect
# "E:" and "E:\" to be equal.
if re.fullmatch(r'[A-Z]:', path, re.IGNORECASE):
return path + "\\"
else:
return path
def yaml_load(f: Union[str, IO[str]]) -> Any:
"""Wrapper over yaml.load using the C loader if possible."""
start = datetime.datetime.now()
# WORKAROUND for https://github.com/yaml/pyyaml/pull/181
with log.py_warning_filter(
category=DeprecationWarning,
message=r"Using or importing the ABCs from 'collections' instead "
r"of from 'collections\.abc' is deprecated.*"):
try:
data = yaml.load(f, Loader=YamlLoader)
except ValueError as e:
if str(e).startswith('could not convert string to float'):
# WORKAROUND for https://github.com/yaml/pyyaml/issues/168
raise yaml.YAMLError(e)
raise # pragma: no cover
end = datetime.datetime.now()
delta = (end - start).total_seconds()
deadline = 10 if 'CI' in os.environ else 2
if delta > deadline: # pragma: no cover
log.misc.warning(
"YAML load took unusually long, please report this at "
"https://github.com/qutebrowser/qutebrowser/issues/2777\n"
"duration: {}s\n"
"PyYAML version: {}\n"
"C extension: {}\n"
"Stack:\n\n"
"{}".format(
delta, yaml.__version__, YAML_C_EXT,
''.join(traceback.format_stack())))
return data
def yaml_dump(data: Any, f: IO[str] = None) -> Optional[str]:
"""Wrapper over yaml.dump using the C dumper if possible.
Also returns a str instead of bytes.
"""
yaml_data = yaml.dump(data, f, Dumper=YamlDumper, default_flow_style=False,
encoding='utf-8', allow_unicode=True)
if yaml_data is None:
return None
else:
return yaml_data.decode('utf-8')
def chunk(elems: Sequence, n: int) -> Iterator[Sequence]:
"""Yield successive n-sized chunks from elems.
If elems % n != 0, the last chunk will be smaller.
"""
if n < 1:
raise ValueError("n needs to be at least 1!")
for i in range(0, len(elems), n):
yield elems[i:i + n]
def guess_mimetype(filename: str, fallback: bool = False) -> str:
"""Guess a mimetype based on a filename.
Args:
filename: The filename to check.
fallback: Fall back to application/octet-stream if unknown.
"""
mimetype, _encoding = mimetypes.guess_type(filename)
if mimetype is None:
if fallback:
return 'application/octet-stream'
else:
raise ValueError("Got None mimetype for {}".format(filename))
return mimetype
def ceil_log(number: int, base: int) -> int:
"""Compute max(1, ceil(log(number, base))).
Use only integer arithmetic in order to avoid numerical error.
"""
if number < 1 or base < 2:
raise ValueError("math domain error")
result = 1
accum = base
while accum < number:
result += 1
accum *= base
return result
def libgl_workaround() -> None:
"""Work around QOpenGLShaderProgram issues, especially for Nvidia.
See https://bugs.launchpad.net/ubuntu/+source/python-qt4/+bug/941826
"""
if os.environ.get('QUTE_SKIP_LIBGL_WORKAROUND'):
return
libgl = ctypes.util.find_library("GL")
if libgl is not None: # pragma: no branch
ctypes.CDLL(libgl, mode=ctypes.RTLD_GLOBAL)
|
import contextlib
import os
import posixpath
import sys
import threading
from tempfile import TemporaryDirectory
if os.name == "nt":
import ctypes
import ctypes.wintypes
import msvcrt
LOCKFILE_EXCLUSIVE_LOCK = 2
if ctypes.sizeof(ctypes.c_void_p) == 4:
ULONG_PTR = ctypes.c_uint32
else:
ULONG_PTR = ctypes.c_uint64
class Overlapped(ctypes.Structure):
_fields_ = [
("internal", ULONG_PTR),
("internal_high", ULONG_PTR),
("offset", ctypes.wintypes.DWORD),
("offset_high", ctypes.wintypes.DWORD),
("h_event", ctypes.wintypes.HANDLE)]
kernel32 = ctypes.WinDLL("kernel32", use_last_error=True)
lock_file_ex = kernel32.LockFileEx
lock_file_ex.argtypes = [
ctypes.wintypes.HANDLE,
ctypes.wintypes.DWORD,
ctypes.wintypes.DWORD,
ctypes.wintypes.DWORD,
ctypes.wintypes.DWORD,
ctypes.POINTER(Overlapped)]
lock_file_ex.restype = ctypes.wintypes.BOOL
unlock_file_ex = kernel32.UnlockFileEx
unlock_file_ex.argtypes = [
ctypes.wintypes.HANDLE,
ctypes.wintypes.DWORD,
ctypes.wintypes.DWORD,
ctypes.wintypes.DWORD,
ctypes.POINTER(Overlapped)]
unlock_file_ex.restype = ctypes.wintypes.BOOL
elif os.name == "posix":
import fcntl
HAVE_RENAMEAT2 = False
if sys.platform == "linux":
import ctypes
RENAME_EXCHANGE = 2
try:
renameat2 = ctypes.CDLL(None, use_errno=True).renameat2
except AttributeError:
pass
else:
HAVE_RENAMEAT2 = True
renameat2.argtypes = [
ctypes.c_int, ctypes.c_char_p,
ctypes.c_int, ctypes.c_char_p,
ctypes.c_uint]
renameat2.restype = ctypes.c_int
class RwLock:
"""A readers-Writer lock that locks a file."""
def __init__(self, path):
self._path = path
self._readers = 0
self._writer = False
self._lock = threading.Lock()
@property
def locked(self):
with self._lock:
if self._readers > 0:
return "r"
if self._writer:
return "w"
return ""
@contextlib.contextmanager
def acquire(self, mode):
if mode not in "rw":
raise ValueError("Invalid mode: %r" % mode)
with open(self._path, "w+") as lock_file:
if os.name == "nt":
handle = msvcrt.get_osfhandle(lock_file.fileno())
flags = LOCKFILE_EXCLUSIVE_LOCK if mode == "w" else 0
overlapped = Overlapped()
try:
if not lock_file_ex(handle, flags, 0, 1, 0, overlapped):
raise ctypes.WinError()
except OSError as e:
raise RuntimeError("Locking the storage failed: %s" %
e) from e
elif os.name == "posix":
_cmd = fcntl.LOCK_EX if mode == "w" else fcntl.LOCK_SH
try:
fcntl.flock(lock_file.fileno(), _cmd)
except OSError as e:
raise RuntimeError("Locking the storage failed: %s" %
e) from e
else:
raise RuntimeError("Locking the storage failed: "
"Unsupported operating system")
with self._lock:
if self._writer or mode == "w" and self._readers != 0:
raise RuntimeError("Locking the storage failed: "
"Guarantees failed")
if mode == "r":
self._readers += 1
else:
self._writer = True
try:
yield
finally:
with self._lock:
if mode == "r":
self._readers -= 1
self._writer = False
def rename_exchange(src, dst):
"""Exchange the files or directories `src` and `dst`.
Both `src` and `dst` must exist but may be of different types.
On Linux with renameat2 the operation is atomic.
On other platforms it's not atomic.
"""
src_dir, src_base = os.path.split(src)
dst_dir, dst_base = os.path.split(dst)
src_dir = src_dir or os.curdir
dst_dir = dst_dir or os.curdir
if not src_base or not dst_base:
raise ValueError("Invalid arguments: %r -> %r" % (src, dst))
if HAVE_RENAMEAT2:
src_base_bytes = os.fsencode(src_base)
dst_base_bytes = os.fsencode(dst_base)
src_dir_fd = os.open(src_dir, 0)
try:
dst_dir_fd = os.open(dst_dir, 0)
try:
if renameat2(src_dir_fd, src_base_bytes,
dst_dir_fd, dst_base_bytes,
RENAME_EXCHANGE) != 0:
errno = ctypes.get_errno()
raise OSError(errno, os.strerror(errno))
finally:
os.close(dst_dir_fd)
finally:
os.close(src_dir_fd)
else:
with TemporaryDirectory(
prefix=".Radicale.tmp-", dir=src_dir) as tmp_dir:
os.rename(dst, os.path.join(tmp_dir, "interim"))
os.rename(src, dst)
os.rename(os.path.join(tmp_dir, "interim"), src)
def fsync(fd):
if os.name == "posix" and hasattr(fcntl, "F_FULLFSYNC"):
fcntl.fcntl(fd, fcntl.F_FULLFSYNC)
else:
os.fsync(fd)
def strip_path(path):
assert sanitize_path(path) == path
return path.strip("/")
def unstrip_path(stripped_path, trailing_slash=False):
assert strip_path(sanitize_path(stripped_path)) == stripped_path
assert stripped_path or trailing_slash
path = "/%s" % stripped_path
if trailing_slash and not path.endswith("/"):
path += "/"
return path
def sanitize_path(path):
"""Make path absolute with leading slash to prevent access to other data.
Preserve potential trailing slash.
"""
trailing_slash = "/" if path.endswith("/") else ""
path = posixpath.normpath(path)
new_path = "/"
for part in path.split("/"):
if not is_safe_path_component(part):
continue
new_path = posixpath.join(new_path, part)
trailing_slash = "" if new_path.endswith("/") else trailing_slash
return new_path + trailing_slash
def is_safe_path_component(path):
"""Check if path is a single component of a path.
Check that the path is safe to join too.
"""
return path and "/" not in path and path not in (".", "..")
def is_safe_filesystem_path_component(path):
"""Check if path is a single component of a local and posix filesystem
path.
Check that the path is safe to join too.
"""
return (
path and not os.path.splitdrive(path)[0] and
not os.path.split(path)[0] and path not in (os.curdir, os.pardir) and
not path.startswith(".") and not path.endswith("~") and
is_safe_path_component(path))
def path_to_filesystem(root, sane_path):
"""Convert `sane_path` to a local filesystem path relative to `root`.
`root` must be a secure filesystem path, it will be prepend to the path.
`sane_path` must be a sanitized path without leading or trailing ``/``.
Conversion of `sane_path` is done in a secure manner,
or raises ``ValueError``.
"""
assert sane_path == strip_path(sanitize_path(sane_path))
safe_path = root
parts = sane_path.split("/") if sane_path else []
for part in parts:
if not is_safe_filesystem_path_component(part):
raise UnsafePathError(part)
safe_path_parent = safe_path
safe_path = os.path.join(safe_path, part)
# Check for conflicting files (e.g. case-insensitive file systems
# or short names on Windows file systems)
if (os.path.lexists(safe_path) and
part not in (e.name for e in
os.scandir(safe_path_parent))):
raise CollidingPathError(part)
return safe_path
class UnsafePathError(ValueError):
def __init__(self, path):
message = "Can't translate name safely to filesystem: %r" % path
super().__init__(message)
class CollidingPathError(ValueError):
def __init__(self, path):
message = "File name collision: %r" % path
super().__init__(message)
def name_from_path(path, collection):
"""Return Radicale item name from ``path``."""
assert sanitize_path(path) == path
start = unstrip_path(collection.path, True)
if not (path + "/").startswith(start):
raise ValueError("%r doesn't start with %r" % (path, start))
name = path[len(start):]
if name and not is_safe_path_component(name):
raise ValueError("%r is not a component in collection %r" %
(name, collection.path))
return name
|
import datetime
import re
import sys
import git
URL = 'https://github.com/miguelgrinberg/flask-migrate'
merges = {}
def format_message(commit):
if commit.message.startswith('Version '):
return ''
if '#nolog' in commit.message:
return ''
if commit.message.startswith('Merge pull request'):
pr = commit.message.split('#')[1].split(' ')[0]
message = ' '.join([line for line in [line.strip() for line in commit.message.split('\n')[1:]] if line])
merges[message] = pr
return ''
if commit.message.startswith('Release '):
return '\n**{message}** - {date}\n'.format(
message=commit.message.strip(),
date=datetime.datetime.fromtimestamp(commit.committed_date).strftime('%Y-%m-%d'))
message = ' '.join([line for line in [line.strip() for line in commit.message.split('\n')] if line])
if message in merges:
message += ' #' + merges[message]
message = re.sub('\\(.*(#[0-9]+)\\)', '\\1', message)
message = re.sub('Fixes (#[0-9]+)', '\\1', message)
message = re.sub('fixes (#[0-9]+)', '\\1', message)
message = re.sub('#([0-9]+)', '[#\\1]({url}/issues/\\1)'.format(url=URL), message)
message += ' ([commit]({url}/commit/{sha}))'.format(url=URL, sha=str(commit))
if commit.author.name != 'Miguel Grinberg':
message += ' (thanks **{name}**!)'.format(name=commit.author.name)
return '- ' + message
def main(all=False):
repo = git.Repo()
for commit in repo.iter_commits():
if not all and commit.message.startswith('Release '):
break
message = format_message(commit)
if message:
print(message)
if __name__ == '__main__':
main(all=len(sys.argv) > 1 and sys.argv[1] == 'all')
|
from datetime import timedelta
import logging
import requests
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_HOST,
CONF_MONITORED_CONDITIONS,
CONF_NAME,
STATE_UNKNOWN,
TIME_DAYS,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle, dt
_LOGGER = logging.getLogger(__name__)
ATTR_CURRENT_VERSION = "current_version"
ATTR_LAST_RESTART = "last_restart"
ATTR_LOCAL_IP = "local_ip"
ATTR_NEW_VERSION = "new_version"
ATTR_STATUS = "status"
ATTR_UPTIME = "uptime"
DEFAULT_HOST = "testwifi.here"
DEFAULT_NAME = "google_wifi"
ENDPOINT = "/api/v1/status"
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=1)
MONITORED_CONDITIONS = {
ATTR_CURRENT_VERSION: [
["software", "softwareVersion"],
None,
"mdi:checkbox-marked-circle-outline",
],
ATTR_NEW_VERSION: [["software", "updateNewVersion"], None, "mdi:update"],
ATTR_UPTIME: [["system", "uptime"], TIME_DAYS, "mdi:timelapse"],
ATTR_LAST_RESTART: [["system", "uptime"], None, "mdi:restart"],
ATTR_LOCAL_IP: [["wan", "localIpAddress"], None, "mdi:access-point-network"],
ATTR_STATUS: [["wan", "online"], None, "mdi:google"],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(
CONF_MONITORED_CONDITIONS, default=list(MONITORED_CONDITIONS)
): vol.All(cv.ensure_list, [vol.In(MONITORED_CONDITIONS)]),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Google Wifi sensor."""
name = config.get(CONF_NAME)
host = config.get(CONF_HOST)
conditions = config.get(CONF_MONITORED_CONDITIONS)
api = GoogleWifiAPI(host, conditions)
dev = []
for condition in conditions:
dev.append(GoogleWifiSensor(api, name, condition))
add_entities(dev, True)
class GoogleWifiSensor(Entity):
"""Representation of a Google Wifi sensor."""
def __init__(self, api, name, variable):
"""Initialize a Google Wifi sensor."""
self._api = api
self._name = name
self._state = None
variable_info = MONITORED_CONDITIONS[variable]
self._var_name = variable
self._var_units = variable_info[1]
self._var_icon = variable_info[2]
@property
def name(self):
"""Return the name of the sensor."""
return f"{self._name}_{self._var_name}"
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return self._var_icon
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._var_units
@property
def available(self):
"""Return availability of Google Wifi API."""
return self._api.available
@property
def state(self):
"""Return the state of the device."""
return self._state
def update(self):
"""Get the latest data from the Google Wifi API."""
self._api.update()
if self.available:
self._state = self._api.data[self._var_name]
else:
self._state = None
class GoogleWifiAPI:
"""Get the latest data and update the states."""
def __init__(self, host, conditions):
"""Initialize the data object."""
uri = "http://"
resource = f"{uri}{host}{ENDPOINT}"
self._request = requests.Request("GET", resource).prepare()
self.raw_data = None
self.conditions = conditions
self.data = {
ATTR_CURRENT_VERSION: STATE_UNKNOWN,
ATTR_NEW_VERSION: STATE_UNKNOWN,
ATTR_UPTIME: STATE_UNKNOWN,
ATTR_LAST_RESTART: STATE_UNKNOWN,
ATTR_LOCAL_IP: STATE_UNKNOWN,
ATTR_STATUS: STATE_UNKNOWN,
}
self.available = True
self.update()
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data from the router."""
try:
with requests.Session() as sess:
response = sess.send(self._request, timeout=10)
self.raw_data = response.json()
self.data_format()
self.available = True
except (ValueError, requests.exceptions.ConnectionError):
_LOGGER.warning("Unable to fetch data from Google Wifi")
self.available = False
self.raw_data = None
def data_format(self):
"""Format raw data into easily accessible dict."""
for attr_key in self.conditions:
value = MONITORED_CONDITIONS[attr_key]
try:
primary_key = value[0][0]
sensor_key = value[0][1]
if primary_key in self.raw_data:
sensor_value = self.raw_data[primary_key][sensor_key]
# Format sensor for better readability
if attr_key == ATTR_NEW_VERSION and sensor_value == "0.0.0.0":
sensor_value = "Latest"
elif attr_key == ATTR_UPTIME:
sensor_value = round(sensor_value / (3600 * 24), 2)
elif attr_key == ATTR_LAST_RESTART:
last_restart = dt.now() - timedelta(seconds=sensor_value)
sensor_value = last_restart.strftime("%Y-%m-%d %H:%M:%S")
elif attr_key == ATTR_STATUS:
if sensor_value:
sensor_value = "Online"
else:
sensor_value = "Offline"
elif attr_key == ATTR_LOCAL_IP:
if not self.raw_data["wan"]["online"]:
sensor_value = STATE_UNKNOWN
self.data[attr_key] = sensor_value
except KeyError:
_LOGGER.error(
"Router does not support %s field. "
"Please remove %s from monitored_conditions",
sensor_key,
attr_key,
)
self.data[attr_key] = STATE_UNKNOWN
|
import pytest
from homeassistant.components.media_player.const import DOMAIN as MP_DOMAIN
from homeassistant.components.vizio.const import DOMAIN
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.setup import async_setup_component
from .const import MOCK_SPEAKER_CONFIG, MOCK_USER_VALID_TV_CONFIG, UNIQUE_ID
from tests.common import MockConfigEntry
async def test_setup_component(
hass: HomeAssistantType,
vizio_connect: pytest.fixture,
vizio_update: pytest.fixture,
) -> None:
"""Test component setup."""
assert await async_setup_component(
hass, DOMAIN, {DOMAIN: MOCK_USER_VALID_TV_CONFIG}
)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids(MP_DOMAIN)) == 1
async def test_tv_load_and_unload(
hass: HomeAssistantType,
vizio_connect: pytest.fixture,
vizio_update: pytest.fixture,
) -> None:
"""Test loading and unloading TV entry."""
config_entry = MockConfigEntry(
domain=DOMAIN, data=MOCK_USER_VALID_TV_CONFIG, unique_id=UNIQUE_ID
)
config_entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids(MP_DOMAIN)) == 1
assert DOMAIN in hass.data
assert await config_entry.async_unload(hass)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids(MP_DOMAIN)) == 0
assert DOMAIN not in hass.data
async def test_speaker_load_and_unload(
hass: HomeAssistantType,
vizio_connect: pytest.fixture,
vizio_update: pytest.fixture,
) -> None:
"""Test loading and unloading speaker entry."""
config_entry = MockConfigEntry(
domain=DOMAIN, data=MOCK_SPEAKER_CONFIG, unique_id=UNIQUE_ID
)
config_entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids(MP_DOMAIN)) == 1
assert DOMAIN in hass.data
assert await config_entry.async_unload(hass)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids(MP_DOMAIN)) == 0
assert DOMAIN not in hass.data
|
from dynalite_devices_lib.dynalite_devices import (
CONF_AREA as dyn_CONF_AREA,
CONF_PRESET as dyn_CONF_PRESET,
NOTIFICATION_PACKET,
NOTIFICATION_PRESET,
DynaliteNotification,
)
from homeassistant.components import dynalite
from homeassistant.components.dynalite.const import (
ATTR_AREA,
ATTR_HOST,
ATTR_PACKET,
ATTR_PRESET,
)
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from tests.async_mock import AsyncMock, Mock, patch
from tests.common import MockConfigEntry
async def test_update_device(hass):
"""Test that update works."""
host = "1.2.3.4"
entry = MockConfigEntry(domain=dynalite.DOMAIN, data={dynalite.CONF_HOST: host})
entry.add_to_hass(hass)
with patch(
"homeassistant.components.dynalite.bridge.DynaliteDevices"
) as mock_dyn_dev:
mock_dyn_dev().async_setup = AsyncMock(return_value=True)
assert await hass.config_entries.async_setup(entry.entry_id)
# Not waiting so it add the devices before registration
update_device_func = mock_dyn_dev.mock_calls[1][2]["update_device_func"]
device = Mock()
device.unique_id = "abcdef"
wide_func = Mock()
async_dispatcher_connect(hass, f"dynalite-update-{host}", wide_func)
specific_func = Mock()
async_dispatcher_connect(
hass, f"dynalite-update-{host}-{device.unique_id}", specific_func
)
update_device_func()
await hass.async_block_till_done()
wide_func.assert_called_once()
specific_func.assert_not_called()
update_device_func(device)
await hass.async_block_till_done()
wide_func.assert_called_once()
specific_func.assert_called_once()
async def test_add_devices_then_register(hass):
"""Test that add_devices work."""
host = "1.2.3.4"
entry = MockConfigEntry(domain=dynalite.DOMAIN, data={dynalite.CONF_HOST: host})
entry.add_to_hass(hass)
with patch(
"homeassistant.components.dynalite.bridge.DynaliteDevices"
) as mock_dyn_dev:
mock_dyn_dev().async_setup = AsyncMock(return_value=True)
assert await hass.config_entries.async_setup(entry.entry_id)
# Not waiting so it add the devices before registration
new_device_func = mock_dyn_dev.mock_calls[1][2]["new_device_func"]
# Now with devices
device1 = Mock()
device1.category = "light"
device1.name = "NAME"
device1.unique_id = "unique1"
device2 = Mock()
device2.category = "switch"
device2.name = "NAME2"
device2.unique_id = "unique2"
new_device_func([device1, device2])
device3 = Mock()
device3.category = "switch"
device3.name = "NAME3"
device3.unique_id = "unique3"
new_device_func([device3])
await hass.async_block_till_done()
assert hass.states.get("light.name")
assert hass.states.get("switch.name2")
assert hass.states.get("switch.name3")
async def test_register_then_add_devices(hass):
"""Test that add_devices work after register_add_entities."""
host = "1.2.3.4"
entry = MockConfigEntry(domain=dynalite.DOMAIN, data={dynalite.CONF_HOST: host})
entry.add_to_hass(hass)
with patch(
"homeassistant.components.dynalite.bridge.DynaliteDevices"
) as mock_dyn_dev:
mock_dyn_dev().async_setup = AsyncMock(return_value=True)
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
new_device_func = mock_dyn_dev.mock_calls[1][2]["new_device_func"]
# Now with devices
device1 = Mock()
device1.category = "light"
device1.name = "NAME"
device1.unique_id = "unique1"
device2 = Mock()
device2.category = "switch"
device2.name = "NAME2"
device2.unique_id = "unique2"
new_device_func([device1, device2])
await hass.async_block_till_done()
assert hass.states.get("light.name")
assert hass.states.get("switch.name2")
async def test_notifications(hass):
"""Test that update works."""
host = "1.2.3.4"
entry = MockConfigEntry(domain=dynalite.DOMAIN, data={dynalite.CONF_HOST: host})
entry.add_to_hass(hass)
with patch(
"homeassistant.components.dynalite.bridge.DynaliteDevices"
) as mock_dyn_dev:
mock_dyn_dev().async_setup = AsyncMock(return_value=True)
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
notification_func = mock_dyn_dev.mock_calls[1][2]["notification_func"]
event_listener1 = Mock()
hass.bus.async_listen("dynalite_packet", event_listener1)
packet = [5, 4, 3, 2]
notification_func(
DynaliteNotification(NOTIFICATION_PACKET, {NOTIFICATION_PACKET: packet})
)
await hass.async_block_till_done()
event_listener1.assert_called_once()
my_event = event_listener1.mock_calls[0][1][0]
assert my_event.data[ATTR_HOST] == host
assert my_event.data[ATTR_PACKET] == packet
event_listener2 = Mock()
hass.bus.async_listen("dynalite_preset", event_listener2)
notification_func(
DynaliteNotification(
NOTIFICATION_PRESET, {dyn_CONF_AREA: 7, dyn_CONF_PRESET: 2}
)
)
await hass.async_block_till_done()
event_listener2.assert_called_once()
my_event = event_listener2.mock_calls[0][1][0]
assert my_event.data[ATTR_HOST] == host
assert my_event.data[ATTR_AREA] == 7
assert my_event.data[ATTR_PRESET] == 2
|
import xml.etree.ElementTree as ET
from http import client
from radicale import app, httputils, storage, xmlutils
def xml_delete(base_prefix, path, collection, href=None):
"""Read and answer DELETE requests.
Read rfc4918-9.6 for info.
"""
collection.delete(href)
multistatus = ET.Element(xmlutils.make_clark("D:multistatus"))
response = ET.Element(xmlutils.make_clark("D:response"))
multistatus.append(response)
href = ET.Element(xmlutils.make_clark("D:href"))
href.text = xmlutils.make_href(base_prefix, path)
response.append(href)
status = ET.Element(xmlutils.make_clark("D:status"))
status.text = xmlutils.make_response(200)
response.append(status)
return multistatus
class ApplicationDeleteMixin:
def do_DELETE(self, environ, base_prefix, path, user):
"""Manage DELETE request."""
access = app.Access(self._rights, user, path)
if not access.check("w"):
return httputils.NOT_ALLOWED
with self._storage.acquire_lock("w", user):
item = next(self._storage.discover(path), None)
if not item:
return httputils.NOT_FOUND
if not access.check("w", item):
return httputils.NOT_ALLOWED
if_match = environ.get("HTTP_IF_MATCH", "*")
if if_match not in ("*", item.etag):
# ETag precondition not verified, do not delete item
return httputils.PRECONDITION_FAILED
if isinstance(item, storage.BaseCollection):
xml_answer = xml_delete(base_prefix, path, item)
else:
xml_answer = xml_delete(
base_prefix, path, item.collection, item.href)
headers = {"Content-Type": "text/xml; charset=%s" % self._encoding}
return client.OK, headers, self._xml_response(xml_answer)
|
from datetime import timedelta
import librouteros
import pytest
from homeassistant import data_entry_flow
from homeassistant.components import mikrotik
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_USERNAME,
CONF_VERIFY_SSL,
)
from tests.async_mock import patch
from tests.common import MockConfigEntry
DEMO_USER_INPUT = {
CONF_NAME: "Home router",
CONF_HOST: "0.0.0.0",
CONF_USERNAME: "username",
CONF_PASSWORD: "password",
CONF_PORT: 8278,
CONF_VERIFY_SSL: False,
}
DEMO_CONFIG = {
CONF_NAME: "Home router",
CONF_HOST: "0.0.0.0",
CONF_USERNAME: "username",
CONF_PASSWORD: "password",
CONF_PORT: 8278,
CONF_VERIFY_SSL: False,
mikrotik.const.CONF_FORCE_DHCP: False,
mikrotik.CONF_ARP_PING: False,
mikrotik.CONF_DETECTION_TIME: timedelta(seconds=30),
}
DEMO_CONFIG_ENTRY = {
CONF_NAME: "Home router",
CONF_HOST: "0.0.0.0",
CONF_USERNAME: "username",
CONF_PASSWORD: "password",
CONF_PORT: 8278,
CONF_VERIFY_SSL: False,
mikrotik.const.CONF_FORCE_DHCP: False,
mikrotik.CONF_ARP_PING: False,
mikrotik.CONF_DETECTION_TIME: 30,
}
@pytest.fixture(name="api")
def mock_mikrotik_api():
"""Mock an api."""
with patch("librouteros.connect"):
yield
@pytest.fixture(name="auth_error")
def mock_api_authentication_error():
"""Mock an api."""
with patch(
"librouteros.connect",
side_effect=librouteros.exceptions.TrapError("invalid user name or password"),
):
yield
@pytest.fixture(name="conn_error")
def mock_api_connection_error():
"""Mock an api."""
with patch(
"librouteros.connect", side_effect=librouteros.exceptions.ConnectionClosed
):
yield
async def test_import(hass, api):
"""Test import step."""
result = await hass.config_entries.flow.async_init(
mikrotik.DOMAIN, context={"source": "import"}, data=DEMO_CONFIG
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "Home router"
assert result["data"][CONF_NAME] == "Home router"
assert result["data"][CONF_HOST] == "0.0.0.0"
assert result["data"][CONF_USERNAME] == "username"
assert result["data"][CONF_PASSWORD] == "password"
assert result["data"][CONF_PORT] == 8278
assert result["data"][CONF_VERIFY_SSL] is False
async def test_flow_works(hass, api):
"""Test config flow."""
result = await hass.config_entries.flow.async_init(
mikrotik.DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=DEMO_USER_INPUT
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "Home router"
assert result["data"][CONF_NAME] == "Home router"
assert result["data"][CONF_HOST] == "0.0.0.0"
assert result["data"][CONF_USERNAME] == "username"
assert result["data"][CONF_PASSWORD] == "password"
assert result["data"][CONF_PORT] == 8278
async def test_options(hass):
"""Test updating options."""
entry = MockConfigEntry(domain=mikrotik.DOMAIN, data=DEMO_CONFIG_ENTRY)
entry.add_to_hass(hass)
result = await hass.config_entries.options.async_init(entry.entry_id)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "device_tracker"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={
mikrotik.CONF_DETECTION_TIME: 30,
mikrotik.CONF_ARP_PING: True,
mikrotik.const.CONF_FORCE_DHCP: False,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"] == {
mikrotik.CONF_DETECTION_TIME: 30,
mikrotik.CONF_ARP_PING: True,
mikrotik.const.CONF_FORCE_DHCP: False,
}
async def test_host_already_configured(hass, auth_error):
"""Test host already configured."""
entry = MockConfigEntry(domain=mikrotik.DOMAIN, data=DEMO_CONFIG_ENTRY)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
mikrotik.DOMAIN, context={"source": "user"}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=DEMO_USER_INPUT
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
async def test_name_exists(hass, api):
"""Test name already configured."""
entry = MockConfigEntry(domain=mikrotik.DOMAIN, data=DEMO_CONFIG_ENTRY)
entry.add_to_hass(hass)
user_input = DEMO_USER_INPUT.copy()
user_input[CONF_HOST] = "0.0.0.1"
result = await hass.config_entries.flow.async_init(
mikrotik.DOMAIN, context={"source": "user"}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=user_input
)
assert result["type"] == "form"
assert result["errors"] == {CONF_NAME: "name_exists"}
async def test_connection_error(hass, conn_error):
"""Test error when connection is unsuccessful."""
result = await hass.config_entries.flow.async_init(
mikrotik.DOMAIN, context={"source": "user"}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=DEMO_USER_INPUT
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "cannot_connect"}
async def test_wrong_credentials(hass, auth_error):
"""Test error when credentials are wrong."""
result = await hass.config_entries.flow.async_init(
mikrotik.DOMAIN, context={"source": "user"}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=DEMO_USER_INPUT
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {
CONF_USERNAME: "invalid_auth",
CONF_PASSWORD: "invalid_auth",
}
|
from homeassistant.components import dynalite
from homeassistant.helpers import entity_registry
from tests.async_mock import AsyncMock, Mock, call, patch
from tests.common import MockConfigEntry
ATTR_SERVICE = "service"
ATTR_METHOD = "method"
ATTR_ARGS = "args"
def create_mock_device(platform, spec):
"""Create a dynalite mock device for a platform according to a spec."""
device = Mock(spec=spec)
device.category = platform
device.unique_id = "UNIQUE"
device.name = "NAME"
device.device_class = "Device Class"
return device
async def get_entry_id_from_hass(hass):
"""Get the config entry id from hass."""
ent_reg = await entity_registry.async_get_registry(hass)
assert ent_reg
conf_entries = hass.config_entries.async_entries(dynalite.DOMAIN)
assert len(conf_entries) == 1
return conf_entries[0].entry_id
async def create_entity_from_device(hass, device):
"""Set up the component and platform and create a light based on the device provided."""
host = "1.2.3.4"
entry = MockConfigEntry(domain=dynalite.DOMAIN, data={dynalite.CONF_HOST: host})
entry.add_to_hass(hass)
with patch(
"homeassistant.components.dynalite.bridge.DynaliteDevices"
) as mock_dyn_dev:
mock_dyn_dev().async_setup = AsyncMock(return_value=True)
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
new_device_func = mock_dyn_dev.mock_calls[1][2]["new_device_func"]
new_device_func([device])
await hass.async_block_till_done()
return mock_dyn_dev.mock_calls[1][2]["update_device_func"]
async def run_service_tests(hass, device, platform, services):
"""Run a series of service calls and check that the entity and device behave correctly."""
for cur_item in services:
service = cur_item[ATTR_SERVICE]
args = cur_item.get(ATTR_ARGS, {})
service_data = {"entity_id": f"{platform}.name", **args}
await hass.services.async_call(platform, service, service_data, blocking=True)
await hass.async_block_till_done()
for check_item in services:
check_method = getattr(device, check_item[ATTR_METHOD])
if check_item[ATTR_SERVICE] == service:
check_method.assert_called_once()
assert check_method.mock_calls == [call(**args)]
check_method.reset_mock()
else:
check_method.assert_not_called()
|
from gogogate2_api import GogoGate2Api
from gogogate2_api.common import ApiError
from gogogate2_api.const import GogoGate2ApiErrorCode
from homeassistant import config_entries, setup
from homeassistant.components.gogogate2.const import (
DEVICE_TYPE_GOGOGATE2,
DEVICE_TYPE_ISMARTGATE,
DOMAIN,
)
from homeassistant.config_entries import SOURCE_USER
from homeassistant.const import (
CONF_DEVICE,
CONF_IP_ADDRESS,
CONF_PASSWORD,
CONF_USERNAME,
)
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import RESULT_TYPE_ABORT, RESULT_TYPE_FORM
from tests.async_mock import MagicMock, patch
from tests.common import MockConfigEntry
MOCK_MAC_ADDR = "AA:BB:CC:DD:EE:FF"
@patch("homeassistant.components.gogogate2.async_setup", return_value=True)
@patch("homeassistant.components.gogogate2.async_setup_entry", return_value=True)
@patch("homeassistant.components.gogogate2.common.GogoGate2Api")
async def test_auth_fail(
gogogate2api_mock, async_setup_entry_mock, async_setup_mock, hass: HomeAssistant
) -> None:
"""Test authorization failures."""
api: GogoGate2Api = MagicMock(spec=GogoGate2Api)
gogogate2api_mock.return_value = api
api.reset_mock()
api.info.side_effect = ApiError(GogoGate2ApiErrorCode.CREDENTIALS_INCORRECT, "blah")
result = await hass.config_entries.flow.async_init(
"gogogate2", context={"source": SOURCE_USER}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_DEVICE: DEVICE_TYPE_GOGOGATE2,
CONF_IP_ADDRESS: "127.0.0.2",
CONF_USERNAME: "user0",
CONF_PASSWORD: "password0",
},
)
assert result
assert result["type"] == RESULT_TYPE_FORM
assert result["errors"] == {
"base": "invalid_auth",
}
api.reset_mock()
api.info.side_effect = Exception("Generic connection error.")
result = await hass.config_entries.flow.async_init(
"gogogate2", context={"source": SOURCE_USER}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_DEVICE: DEVICE_TYPE_GOGOGATE2,
CONF_IP_ADDRESS: "127.0.0.2",
CONF_USERNAME: "user0",
CONF_PASSWORD: "password0",
},
)
assert result
assert result["type"] == RESULT_TYPE_FORM
assert result["errors"] == {"base": "cannot_connect"}
async def test_form_homekit_unique_id_already_setup(hass):
"""Test that we abort from homekit if gogogate2 is already setup."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_HOMEKIT},
data={"host": "1.2.3.4", "properties": {"id": MOCK_MAC_ADDR}},
)
assert result["type"] == RESULT_TYPE_FORM
assert result["errors"] == {}
flow = next(
flow
for flow in hass.config_entries.flow.async_progress()
if flow["flow_id"] == result["flow_id"]
)
assert flow["context"]["unique_id"] == MOCK_MAC_ADDR
entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_IP_ADDRESS: "1.2.3.4", CONF_USERNAME: "mock", CONF_PASSWORD: "mock"},
)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_HOMEKIT},
data={"host": "1.2.3.4", "properties": {"id": MOCK_MAC_ADDR}},
)
assert result["type"] == RESULT_TYPE_ABORT
async def test_form_homekit_ip_address_already_setup(hass):
"""Test that we abort from homekit if gogogate2 is already setup."""
await setup.async_setup_component(hass, "persistent_notification", {})
entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_IP_ADDRESS: "1.2.3.4", CONF_USERNAME: "mock", CONF_PASSWORD: "mock"},
)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_HOMEKIT},
data={"host": "1.2.3.4", "properties": {"id": MOCK_MAC_ADDR}},
)
assert result["type"] == RESULT_TYPE_ABORT
async def test_form_homekit_ip_address(hass):
"""Test homekit includes the defaults ip address."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_HOMEKIT},
data={"host": "1.2.3.4", "properties": {"id": MOCK_MAC_ADDR}},
)
assert result["type"] == RESULT_TYPE_FORM
assert result["errors"] == {}
data_schema = result["data_schema"]
assert data_schema({CONF_USERNAME: "username", CONF_PASSWORD: "password"}) == {
CONF_DEVICE: DEVICE_TYPE_ISMARTGATE,
CONF_IP_ADDRESS: "1.2.3.4",
CONF_PASSWORD: "password",
CONF_USERNAME: "username",
}
|
from homeassistant.components.alarm_control_panel import AlarmControlPanelEntity
from homeassistant.components.alarm_control_panel.const import (
SUPPORT_ALARM_ARM_AWAY,
SUPPORT_ALARM_ARM_HOME,
SUPPORT_ALARM_ARM_NIGHT,
SUPPORT_ALARM_TRIGGER,
)
from homeassistant.const import (
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_DISARMED,
STATE_ALARM_TRIGGERED,
)
from tests.common import MockEntity
ENTITIES = {}
def init(empty=False):
"""Initialize the platform with entities."""
global ENTITIES
ENTITIES = (
{}
if empty
else {
"arm_code": MockAlarm(
name="Alarm arm code",
code_arm_required=True,
unique_id="unique_arm_code",
),
"no_arm_code": MockAlarm(
name="Alarm no arm code",
code_arm_required=False,
unique_id="unique_no_arm_code",
),
}
)
async def async_setup_platform(
hass, config, async_add_entities_callback, discovery_info=None
):
"""Return mock entities."""
async_add_entities_callback(list(ENTITIES.values()))
class MockAlarm(MockEntity, AlarmControlPanelEntity):
"""Mock Alarm control panel class."""
def __init__(self, **values):
"""Init the Mock Alarm Control Panel."""
self._state = None
MockEntity.__init__(self, **values)
@property
def code_arm_required(self):
"""Whether the code is required for arm actions."""
return self._handle("code_arm_required")
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def supported_features(self) -> int:
"""Return the list of supported features."""
return (
SUPPORT_ALARM_ARM_HOME
| SUPPORT_ALARM_ARM_AWAY
| SUPPORT_ALARM_ARM_NIGHT
| SUPPORT_ALARM_TRIGGER
)
def alarm_arm_away(self, code=None):
"""Send arm away command."""
self._state = STATE_ALARM_ARMED_AWAY
self.async_write_ha_state()
def alarm_arm_home(self, code=None):
"""Send arm home command."""
self._state = STATE_ALARM_ARMED_HOME
self.async_write_ha_state()
def alarm_arm_night(self, code=None):
"""Send arm night command."""
self._state = STATE_ALARM_ARMED_NIGHT
self.async_write_ha_state()
def alarm_disarm(self, code=None):
"""Send disarm command."""
if code == "1234":
self._state = STATE_ALARM_DISARMED
self.async_write_ha_state()
def alarm_trigger(self, code=None):
"""Send alarm trigger command."""
self._state = STATE_ALARM_TRIGGERED
self.async_write_ha_state()
|
from datetime import timedelta
import logging
import iperf3
import voluptuous as vol
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.const import (
CONF_HOST,
CONF_HOSTS,
CONF_MONITORED_CONDITIONS,
CONF_PORT,
CONF_PROTOCOL,
CONF_SCAN_INTERVAL,
DATA_RATE_MEGABITS_PER_SECOND,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import async_load_platform
from homeassistant.helpers.dispatcher import dispatcher_send
from homeassistant.helpers.event import async_track_time_interval
DOMAIN = "iperf3"
DATA_UPDATED = f"{DOMAIN}_data_updated"
_LOGGER = logging.getLogger(__name__)
CONF_DURATION = "duration"
CONF_PARALLEL = "parallel"
CONF_MANUAL = "manual"
DEFAULT_DURATION = 10
DEFAULT_PORT = 5201
DEFAULT_PARALLEL = 1
DEFAULT_PROTOCOL = "tcp"
DEFAULT_INTERVAL = timedelta(minutes=60)
ATTR_DOWNLOAD = "download"
ATTR_UPLOAD = "upload"
ATTR_VERSION = "Version"
ATTR_HOST = "host"
SENSOR_TYPES = {
ATTR_DOWNLOAD: [ATTR_DOWNLOAD.capitalize(), DATA_RATE_MEGABITS_PER_SECOND],
ATTR_UPLOAD: [ATTR_UPLOAD.capitalize(), DATA_RATE_MEGABITS_PER_SECOND],
}
PROTOCOLS = ["tcp", "udp"]
HOST_CONFIG_SCHEMA = vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_DURATION, default=DEFAULT_DURATION): vol.Range(5, 10),
vol.Optional(CONF_PARALLEL, default=DEFAULT_PARALLEL): vol.Range(1, 20),
vol.Optional(CONF_PROTOCOL, default=DEFAULT_PROTOCOL): vol.In(PROTOCOLS),
}
)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_HOSTS): vol.All(cv.ensure_list, [HOST_CONFIG_SCHEMA]),
vol.Optional(
CONF_MONITORED_CONDITIONS, default=list(SENSOR_TYPES)
): vol.All(cv.ensure_list, [vol.In(list(SENSOR_TYPES))]),
vol.Optional(CONF_SCAN_INTERVAL, default=DEFAULT_INTERVAL): vol.All(
cv.time_period, cv.positive_timedelta
),
vol.Optional(CONF_MANUAL, default=False): cv.boolean,
}
)
},
extra=vol.ALLOW_EXTRA,
)
SERVICE_SCHEMA = vol.Schema({vol.Optional(ATTR_HOST, default=None): cv.string})
async def async_setup(hass, config):
"""Set up the iperf3 component."""
hass.data[DOMAIN] = {}
conf = config[DOMAIN]
for host in conf[CONF_HOSTS]:
data = hass.data[DOMAIN][host[CONF_HOST]] = Iperf3Data(hass, host)
if not conf[CONF_MANUAL]:
async_track_time_interval(hass, data.update, conf[CONF_SCAN_INTERVAL])
def update(call):
"""Service call to manually update the data."""
called_host = call.data[ATTR_HOST]
if called_host in hass.data[DOMAIN]:
hass.data[DOMAIN][called_host].update()
else:
for iperf3_host in hass.data[DOMAIN].values():
iperf3_host.update()
hass.services.async_register(DOMAIN, "speedtest", update, schema=SERVICE_SCHEMA)
hass.async_create_task(
async_load_platform(
hass, SENSOR_DOMAIN, DOMAIN, conf[CONF_MONITORED_CONDITIONS], config
)
)
return True
class Iperf3Data:
"""Get the latest data from iperf3."""
def __init__(self, hass, host):
"""Initialize the data object."""
self._hass = hass
self._host = host
self.data = {ATTR_DOWNLOAD: None, ATTR_UPLOAD: None, ATTR_VERSION: None}
def create_client(self):
"""Create a new iperf3 client to use for measurement."""
client = iperf3.Client()
client.duration = self._host[CONF_DURATION]
client.server_hostname = self._host[CONF_HOST]
client.port = self._host[CONF_PORT]
client.num_streams = self._host[CONF_PARALLEL]
client.protocol = self._host[CONF_PROTOCOL]
client.verbose = False
return client
@property
def protocol(self):
"""Return the protocol used for this connection."""
return self._host[CONF_PROTOCOL]
@property
def host(self):
"""Return the host connected to."""
return self._host[CONF_HOST]
@property
def port(self):
"""Return the port on the host connected to."""
return self._host[CONF_PORT]
def update(self, now=None):
"""Get the latest data from iperf3."""
if self.protocol == "udp":
# UDP only have 1 way attribute
result = self._run_test(ATTR_DOWNLOAD)
self.data[ATTR_DOWNLOAD] = self.data[ATTR_UPLOAD] = getattr(
result, "Mbps", None
)
self.data[ATTR_VERSION] = getattr(result, "version", None)
else:
result = self._run_test(ATTR_DOWNLOAD)
self.data[ATTR_DOWNLOAD] = getattr(result, "received_Mbps", None)
self.data[ATTR_VERSION] = getattr(result, "version", None)
self.data[ATTR_UPLOAD] = getattr(
self._run_test(ATTR_UPLOAD), "sent_Mbps", None
)
dispatcher_send(self._hass, DATA_UPDATED, self.host)
def _run_test(self, test_type):
"""Run and return the iperf3 data."""
client = self.create_client()
client.reverse = test_type == ATTR_DOWNLOAD
try:
result = client.run()
except (AttributeError, OSError, ValueError) as error:
_LOGGER.error("Iperf3 error: %s", error)
return None
if result is not None and hasattr(result, "error") and result.error is not None:
_LOGGER.error("Iperf3 error: %s", result.error)
return None
return result
|
import aiohttp.web_exceptions
import voluptuous as vol
import voluptuous_serialize
from homeassistant import config_entries, data_entry_flow
from homeassistant.auth.permissions.const import CAT_CONFIG_ENTRIES, POLICY_EDIT
from homeassistant.components import websocket_api
from homeassistant.components.http import HomeAssistantView
from homeassistant.const import HTTP_FORBIDDEN, HTTP_NOT_FOUND
from homeassistant.core import callback
from homeassistant.exceptions import Unauthorized
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.data_entry_flow import (
FlowManagerIndexView,
FlowManagerResourceView,
)
from homeassistant.loader import async_get_config_flows
async def async_setup(hass):
"""Enable the Home Assistant views."""
hass.http.register_view(ConfigManagerEntryIndexView)
hass.http.register_view(ConfigManagerEntryResourceView)
hass.http.register_view(ConfigManagerEntryResourceReloadView)
hass.http.register_view(ConfigManagerFlowIndexView(hass.config_entries.flow))
hass.http.register_view(ConfigManagerFlowResourceView(hass.config_entries.flow))
hass.http.register_view(ConfigManagerAvailableFlowView)
hass.http.register_view(OptionManagerFlowIndexView(hass.config_entries.options))
hass.http.register_view(OptionManagerFlowResourceView(hass.config_entries.options))
hass.components.websocket_api.async_register_command(config_entry_update)
hass.components.websocket_api.async_register_command(config_entries_progress)
hass.components.websocket_api.async_register_command(system_options_list)
hass.components.websocket_api.async_register_command(system_options_update)
hass.components.websocket_api.async_register_command(ignore_config_flow)
return True
def _prepare_json(result):
"""Convert result for JSON."""
if result["type"] != data_entry_flow.RESULT_TYPE_FORM:
return result
data = result.copy()
schema = data["data_schema"]
if schema is None:
data["data_schema"] = []
else:
data["data_schema"] = voluptuous_serialize.convert(
schema, custom_serializer=cv.custom_serializer
)
return data
class ConfigManagerEntryIndexView(HomeAssistantView):
"""View to get available config entries."""
url = "/api/config/config_entries/entry"
name = "api:config:config_entries:entry"
async def get(self, request):
"""List available config entries."""
hass = request.app["hass"]
return self.json(
[entry_json(entry) for entry in hass.config_entries.async_entries()]
)
class ConfigManagerEntryResourceView(HomeAssistantView):
"""View to interact with a config entry."""
url = "/api/config/config_entries/entry/{entry_id}"
name = "api:config:config_entries:entry:resource"
async def delete(self, request, entry_id):
"""Delete a config entry."""
if not request["hass_user"].is_admin:
raise Unauthorized(config_entry_id=entry_id, permission="remove")
hass = request.app["hass"]
try:
result = await hass.config_entries.async_remove(entry_id)
except config_entries.UnknownEntry:
return self.json_message("Invalid entry specified", HTTP_NOT_FOUND)
return self.json(result)
class ConfigManagerEntryResourceReloadView(HomeAssistantView):
"""View to reload a config entry."""
url = "/api/config/config_entries/entry/{entry_id}/reload"
name = "api:config:config_entries:entry:resource:reload"
async def post(self, request, entry_id):
"""Reload a config entry."""
if not request["hass_user"].is_admin:
raise Unauthorized(config_entry_id=entry_id, permission="remove")
hass = request.app["hass"]
try:
result = await hass.config_entries.async_reload(entry_id)
except config_entries.OperationNotAllowed:
return self.json_message("Entry cannot be reloaded", HTTP_FORBIDDEN)
except config_entries.UnknownEntry:
return self.json_message("Invalid entry specified", HTTP_NOT_FOUND)
return self.json({"require_restart": not result})
class ConfigManagerFlowIndexView(FlowManagerIndexView):
"""View to create config flows."""
url = "/api/config/config_entries/flow"
name = "api:config:config_entries:flow"
async def get(self, request):
"""Not implemented."""
raise aiohttp.web_exceptions.HTTPMethodNotAllowed("GET", ["POST"])
# pylint: disable=arguments-differ
async def post(self, request):
"""Handle a POST request."""
if not request["hass_user"].is_admin:
raise Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission="add")
# pylint: disable=no-value-for-parameter
return await super().post(request)
def _prepare_result_json(self, result):
"""Convert result to JSON."""
if result["type"] != data_entry_flow.RESULT_TYPE_CREATE_ENTRY:
return super()._prepare_result_json(result)
data = result.copy()
data["result"] = data["result"].entry_id
data.pop("data")
return data
class ConfigManagerFlowResourceView(FlowManagerResourceView):
"""View to interact with the flow manager."""
url = "/api/config/config_entries/flow/{flow_id}"
name = "api:config:config_entries:flow:resource"
async def get(self, request, flow_id):
"""Get the current state of a data_entry_flow."""
if not request["hass_user"].is_admin:
raise Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission="add")
return await super().get(request, flow_id)
# pylint: disable=arguments-differ
async def post(self, request, flow_id):
"""Handle a POST request."""
if not request["hass_user"].is_admin:
raise Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission="add")
# pylint: disable=no-value-for-parameter
return await super().post(request, flow_id)
def _prepare_result_json(self, result):
"""Convert result to JSON."""
if result["type"] != data_entry_flow.RESULT_TYPE_CREATE_ENTRY:
return super()._prepare_result_json(result)
data = result.copy()
data["result"] = data["result"].entry_id
data.pop("data")
return data
class ConfigManagerAvailableFlowView(HomeAssistantView):
"""View to query available flows."""
url = "/api/config/config_entries/flow_handlers"
name = "api:config:config_entries:flow_handlers"
async def get(self, request):
"""List available flow handlers."""
hass = request.app["hass"]
return self.json(await async_get_config_flows(hass))
class OptionManagerFlowIndexView(FlowManagerIndexView):
"""View to create option flows."""
url = "/api/config/config_entries/options/flow"
name = "api:config:config_entries:option:flow"
# pylint: disable=arguments-differ
async def post(self, request):
"""Handle a POST request.
handler in request is entry_id.
"""
if not request["hass_user"].is_admin:
raise Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission=POLICY_EDIT)
# pylint: disable=no-value-for-parameter
return await super().post(request)
class OptionManagerFlowResourceView(FlowManagerResourceView):
"""View to interact with the option flow manager."""
url = "/api/config/config_entries/options/flow/{flow_id}"
name = "api:config:config_entries:options:flow:resource"
async def get(self, request, flow_id):
"""Get the current state of a data_entry_flow."""
if not request["hass_user"].is_admin:
raise Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission=POLICY_EDIT)
return await super().get(request, flow_id)
# pylint: disable=arguments-differ
async def post(self, request, flow_id):
"""Handle a POST request."""
if not request["hass_user"].is_admin:
raise Unauthorized(perm_category=CAT_CONFIG_ENTRIES, permission=POLICY_EDIT)
# pylint: disable=no-value-for-parameter
return await super().post(request, flow_id)
@websocket_api.require_admin
@websocket_api.websocket_command({"type": "config_entries/flow/progress"})
def config_entries_progress(hass, connection, msg):
"""List flows that are in progress but not started by a user.
Example of a non-user initiated flow is a discovered Hue hub that
requires user interaction to finish setup.
"""
connection.send_result(
msg["id"],
[
flw
for flw in hass.config_entries.flow.async_progress()
if flw["context"]["source"] != config_entries.SOURCE_USER
],
)
@websocket_api.require_admin
@websocket_api.async_response
@websocket_api.websocket_command(
{"type": "config_entries/system_options/list", "entry_id": str}
)
async def system_options_list(hass, connection, msg):
"""List all system options for a config entry."""
entry_id = msg["entry_id"]
entry = hass.config_entries.async_get_entry(entry_id)
if entry:
connection.send_result(msg["id"], entry.system_options.as_dict())
@websocket_api.require_admin
@websocket_api.async_response
@websocket_api.websocket_command(
{
"type": "config_entries/system_options/update",
"entry_id": str,
vol.Optional("disable_new_entities"): bool,
}
)
async def system_options_update(hass, connection, msg):
"""Update config entry system options."""
changes = dict(msg)
changes.pop("id")
changes.pop("type")
entry_id = changes.pop("entry_id")
entry = hass.config_entries.async_get_entry(entry_id)
if entry is None:
connection.send_error(
msg["id"], websocket_api.const.ERR_NOT_FOUND, "Config entry not found"
)
return
hass.config_entries.async_update_entry(entry, system_options=changes)
connection.send_result(msg["id"], entry.system_options.as_dict())
@websocket_api.require_admin
@websocket_api.async_response
@websocket_api.websocket_command(
{"type": "config_entries/update", "entry_id": str, vol.Optional("title"): str}
)
async def config_entry_update(hass, connection, msg):
"""Update config entry."""
changes = dict(msg)
changes.pop("id")
changes.pop("type")
entry_id = changes.pop("entry_id")
entry = hass.config_entries.async_get_entry(entry_id)
if entry is None:
connection.send_error(
msg["id"], websocket_api.const.ERR_NOT_FOUND, "Config entry not found"
)
return
hass.config_entries.async_update_entry(entry, **changes)
connection.send_result(msg["id"], entry_json(entry))
@websocket_api.require_admin
@websocket_api.async_response
@websocket_api.websocket_command({"type": "config_entries/ignore_flow", "flow_id": str})
async def ignore_config_flow(hass, connection, msg):
"""Ignore a config flow."""
flow = next(
(
flw
for flw in hass.config_entries.flow.async_progress()
if flw["flow_id"] == msg["flow_id"]
),
None,
)
if flow is None:
connection.send_error(
msg["id"], websocket_api.const.ERR_NOT_FOUND, "Config entry not found"
)
return
if "unique_id" not in flow["context"]:
connection.send_error(
msg["id"], "no_unique_id", "Specified flow has no unique ID."
)
return
await hass.config_entries.flow.async_init(
flow["handler"],
context={"source": config_entries.SOURCE_IGNORE},
data={"unique_id": flow["context"]["unique_id"]},
)
connection.send_result(msg["id"])
@callback
def entry_json(entry: config_entries.ConfigEntry) -> dict:
"""Return JSON value of a config entry."""
handler = config_entries.HANDLERS.get(entry.domain)
supports_options = (
# Guard in case handler is no longer registered (custom compnoent etc)
handler is not None
# pylint: disable=comparison-with-callable
and handler.async_get_options_flow
!= config_entries.ConfigFlow.async_get_options_flow
)
return {
"entry_id": entry.entry_id,
"domain": entry.domain,
"title": entry.title,
"source": entry.source,
"state": entry.state,
"connection_class": entry.connection_class,
"supports_options": supports_options,
"supports_unload": entry.supports_unload,
}
|
from appconf import AppConf
from django.core.cache import cache
from django.db.models.signals import post_save
from django.dispatch import receiver
from weblate.trans.models import Change
from weblate.utils.decorators import disable_for_loaddata
class WeblateConf(AppConf):
WEBLATE_GPG_IDENTITY = None
WEBLATE_GPG_ALGO = "default"
RATELIMIT_ATTEMPTS = 5
RATELIMIT_WINDOW = 300
RATELIMIT_LOCKOUT = 600
RATELIMIT_SEARCH_ATTEMPTS = 30
RATELIMIT_SEARCH_WINDOW = 60
RATELIMIT_SEARCH_LOCKOUT = 60
RATELIMIT_COMMENT_ATTEMPTS = 30
RATELIMIT_COMMENT_WINDOW = 60
RATELIMIT_TRANSLATE_ATTEMPTS = 30
RATELIMIT_TRANSLATE_WINDOW = 60
RATELIMIT_GLOSSARY_ATTEMPTS = 30
RATELIMIT_GLOSSARY_WINDOW = 60
SENTRY_DSN = None
SENTRY_SECURITY = None
SENTRY_ENVIRONMENT = "devel"
SENTRY_ORGANIZATION = "weblate"
SENTRY_TOKEN = None
SENTRY_PROJECTS = ["weblate"]
SENTRY_EXTRA_ARGS = {}
CELERY_TASK_ALWAYS_EAGER = True
CELERY_BROKER_URL = "memory://"
DATABASE_BACKUP = "plain"
HIDE_VERSION = False
CSP_SCRIPT_SRC = []
CSP_IMG_SRC = []
CSP_CONNECT_SRC = []
CSP_STYLE_SRC = []
CSP_FONT_SRC = []
class Meta:
prefix = ""
@receiver(post_save, sender=Change)
@disable_for_loaddata
def update_source(sender, instance, created, **kwargs):
if (
not created
or instance.action not in Change.ACTIONS_CONTENT
or instance.translation is None
):
return
cache.set(
f"last-content-change-{instance.translation.pk}",
instance.pk,
180 * 86400,
)
|
import numpy as np
import networkx as nx
from pgmpy.models.BayesianModel import BayesianModel
from pgmpy.estimators.LinearModel import LinearEstimator
from pgmpy.utils.sets import _powerset, _variable_or_iterable_to_set
class CausalInference(object):
"""
This is an inference class for performing Causal Inference over Bayesian Networks or Structural Equation Models.
This class will accept queries of the form: P(Y | do(X)) and utilize its methods to provide an estimand which:
* Identifies adjustment variables
* Backdoor Adjustment
* Front Door Adjustment
* Instrumental Variable Adjustment
Parameters
----------
model: CausalGraph
The model that we'll perform inference over.
set_nodes: list[node:str] or None
A list (or set/tuple) of nodes in the Bayesian Network which have been set to a specific value per the
do-operator.
Examples
--------
Create a small Bayesian Network.
>>> from pgmpy.models.BayesianModel import BayesianModel
>>> game = CausalGraph([('X', 'A'),
('A', 'Y'),
('A', 'B')])
Load the graph into the CausalInference object to make causal queries.
>>> from pgmpy.inference.causal_inference import CausalInference
>>> inference = CausalInference(game)
>>> inference.get_all_backdoor_adjustment_sets(X="X", Y="Y")
>>> inference.get_all_frontdoor_adjustment_sets(X="X", Y="Y")
References
----------
'Causality: Models, Reasoning, and Inference' - Judea Pearl (2000)
Many thanks to @ijmbarr for their implementation of Causal Graphical models available. It served as an invaluable
reference. Available on GitHub: https://github.com/ijmbarr/causalgraphicalmodels
"""
def __init__(self, model, latent_vars=None, set_nodes=None):
if not isinstance(model, BayesianModel):
raise NotImplementedError(
"Causal Inference is only implemented for BayesianModels at this time."
)
self.dag = model
self.graph = self.dag.to_undirected()
self.latent_variables = _variable_or_iterable_to_set(latent_vars)
self.set_nodes = _variable_or_iterable_to_set(set_nodes)
self.observed_variables = frozenset(self.dag.nodes()).difference(
self.latent_variables
)
def __repr__(self):
variables = ", ".join(map(str, sorted(self.observed_variables)))
return f"{self.__class__.__name__}({variables})"
def _is_d_separated(self, X, Y, Z=None):
return not self.dag.is_active_trail(X, Y, observed=Z)
def is_valid_backdoor_adjustment_set(self, X, Y, Z=[]):
"""
Test whether Z is a valid backdoor adjustment set for estimating the causal impact of X on Y.
Parameters
----------
X: str
Intervention Variable
Y: str
Target Variable
Z: str or set[str]
Adjustment variables
Returns
-------
boolean: True if Z is a valid backdoor adjustment set.
Examples
--------
>>> game1 = BayesianModel([('X', 'A'),
('A', 'Y'),
('A', 'B')])
>>> inference = CausalInference(game1)
>>> inference.is_valid_backdoor_adjustment_set("X", "Y")
True
"""
Z_ = list(Z)
observed = [X] + Z_
parents_d_sep = []
for p in self.dag.predecessors(X):
parents_d_sep.append(self._is_d_separated(p, Y, Z=observed))
return all(parents_d_sep)
def get_all_backdoor_adjustment_sets(self, X, Y):
"""
Returns a list of all adjustment sets per the back-door criterion.
A set of variables Z satisfies the back-door criterion relative to an ordered pair of variabies (Xi, Xj) in a DAG G if:
(i) no node in Z is a descendant of Xi; and
(ii) Z blocks every path between Xi and Xj that contains an arrow into Xi.
TODO:
* Backdoors are great, but the most general things we could implement would be Ilya Shpitser's ID and
IDC algorithms. See [his Ph.D. thesis for a full explanation]
(https://ftp.cs.ucla.edu/pub/stat_ser/shpitser-thesis.pdf). After doing a little reading it is clear
that we do not need to immediatly implement this. However, in order for us to truly account for
unobserved variables, we will need not only these algorithms, but a more general implementation of a DAG.
Most DAGs do not allow for bidirected edges, but it is an important piece of notation which Pearl and
Shpitser use to denote graphs with latent variables.
Parameters
----------
X: str
Intervention Variable
Returns
-------
frozenset: A frozenset of frozensets
Y: str
Target Variable
Examples
--------
>>> game1 = BayesianModel([('X', 'A'),
('A', 'Y'),
('A', 'B')])
>>> inference = CausalInference(game1)
>>> inference.get_all_backdoor_adjustment_sets("X", "Y")
frozenset([])
References
----------
"Causality: Models, Reasoning, and Inference", Judea Pearl (2000). p.79.
"""
try:
assert X in self.observed_variables
assert Y in self.observed_variables
except AssertionError:
raise AssertionError("Make sure both X and Y are observed.")
if self.is_valid_backdoor_adjustment_set(X, Y, Z=frozenset()):
return frozenset()
possible_adjustment_variables = (
set(self.observed_variables) - {X} - {Y} - set(nx.descendants(self.dag, X))
)
valid_adjustment_sets = []
for s in _powerset(possible_adjustment_variables):
super_of_complete = []
for vs in valid_adjustment_sets:
super_of_complete.append(vs.intersection(set(s)) == vs)
if any(super_of_complete):
continue
if self.is_valid_backdoor_adjustment_set(X, Y, s):
valid_adjustment_sets.append(frozenset(s))
return frozenset(valid_adjustment_sets)
def is_valid_frontdoor_adjustment_set(self, X, Y, Z=None):
"""
Test whether Z is a valid frontdoor adjustment set for estimating the causal impact of X on Y via the frontdoor
adjustment formula.
Parameters
----------
X: str
Intervention Variable
Y: str
Target Variable
Z: set
Adjustment variables
Returns
-------
boolean: True if Z is a valid frontdoor adjustment set.
"""
Z = _variable_or_iterable_to_set(Z)
# 0. Get all directed paths from X to Y. Don't check further if there aren't any.
directed_paths = list(nx.all_simple_paths(self.dag, X, Y))
if directed_paths == []:
return False
# 1. Z intercepts all directed paths from X to Y
unblocked_directed_paths = [
path for path in directed_paths if not any(zz in path for zz in Z)
]
if unblocked_directed_paths:
return False
# 2. there is no backdoor path from X to Z
unblocked_backdoor_paths_X_Z = [
zz for zz in Z if not self.is_valid_backdoor_adjustment_set(X, zz)
]
if unblocked_backdoor_paths_X_Z:
return False
# 3. All back-door paths from Z to Y are blocked by X
valid_backdoor_sets = []
for zz in Z:
valid_backdoor_sets.append(self.is_valid_backdoor_adjustment_set(zz, Y, X))
if not all(valid_backdoor_sets):
return False
return True
def get_all_frontdoor_adjustment_sets(self, X, Y):
"""
Identify possible sets of variables, Z, which satisify the front-door criterion relative to given X and Y.
Z satisifies the front-door critierion if:
(i) Z intercepts all directed paths from X to Y
(ii) there is no backdoor path from X to Z
(iii) all back-door paths from Z to Y are blocked by X
Returns
-------
frozenset: a frozenset of frozensets
References
----------
Causality: Models, Reasoning, and Inference, Judea Pearl (2000). p.82.
"""
assert X in self.observed_variables
assert Y in self.observed_variables
possible_adjustment_variables = set(self.observed_variables) - {X} - {Y}
valid_adjustment_sets = frozenset(
[
frozenset(s)
for s in _powerset(possible_adjustment_variables)
if self.is_valid_frontdoor_adjustment_set(X, Y, s)
]
)
return valid_adjustment_sets
def get_distribution(self):
"""
Returns a string representing the factorized distribution implied by the CGM.
"""
products = []
for node in nx.topological_sort(self.dag):
if node in self.set_nodes:
continue
parents = list(self.dag.predecessors(node))
if not parents:
p = f"P({node})"
else:
parents = [
f"do({n})" if n in self.set_nodes else str(n) for n in parents
]
p = f"P({node}|{','.join(parents)})"
products.append(p)
return "".join(products)
def simple_decision(self, adjustment_sets=[]):
"""
Selects the smallest set from provided adjustment sets.
Parameters
----------
adjustment_sets: iterable
A frozenset or list of valid adjustment sets
Returns
-------
frozenset
"""
adjustment_list = list(adjustment_sets)
if adjustment_list == []:
return frozenset([])
return adjustment_list[np.argmin(adjustment_list)]
def estimate_ate(
self,
X,
Y,
data,
estimand_strategy="smallest",
estimator_type="linear",
**kwargs,
):
"""
Estimate the average treatment effect (ATE) of X on Y.
Parameters
----------
X: str
Intervention Variable
Y: str
Target Variable
data: pandas.DataFrame
All observed data for this Bayesian Network.
estimand_strategy: str or frozenset
Either specify a specific backdoor adjustment set or a strategy.
The available options are:
smallest:
Use the smallest estimand of observed variables
all:
Estimate the ATE from each identified estimand
estimator_type: str
The type of model to be used to estimate the ATE.
All of the linear regression classes in statsmodels are available including:
* GLS: generalized least squares for arbitrary covariance
* OLS: ordinary least square of i.i.d. errors
* WLS: weighted least squares for heteroskedastic error
Specify them with their acronym (e.g. "OLS") or simple "linear" as an alias for OLS.
**kwargs: dict
Keyward arguments specific to the selected estimator.
linear:
missing: str
Available options are "none", "drop", or "raise"
Returns
-------
float: The average treatment effect
Examples
--------
>>> game1 = BayesianModel([('X', 'A'),
... ('A', 'Y'),
... ('A', 'B')])
>>> data = pd.DataFrame(np.random.randint(2, size=(1000, 4)), columns=['X', 'A', 'B', 'Y'])
>>> inference = CausalInference(model=game1)
>>> inference.estimate_ate("X", "Y", data=data, estimator_type="linear")
"""
valid_estimators = ["linear"]
try:
assert estimator_type in valid_estimators
except AssertionError:
print(
f"{estimator_type} if not a valid estimator_type. Please select from {valid_estimators}"
)
if isinstance(estimand_strategy, frozenset):
adjustment_set = frozenset({estimand_strategy})
assert self.is_valid_backdoor_adjustment_set(X, Y, Z=adjustment_set)
elif estimand_strategy in ["smallest", "all"]:
adjustment_sets = self.get_all_backdoor_adjustment_sets(X, Y)
if estimand_strategy == "smallest":
adjustment_sets = frozenset({self.simple_decision(adjustment_sets)})
if estimator_type == "linear":
self.estimator = LinearEstimator(self.dag)
ate = [
self.estimator.fit(X=X, Y=Y, Z=s, data=data, **kwargs)._get_ate()
for s in adjustment_sets
]
return np.mean(ate)
|
from cerberus import errors
from cerberus.tests import assert_fail, assert_success
def test_valuesrules_succeds():
assert_success(
document={'a_dict_with_valuesrules': {'an integer': 99, 'another integer': 100}}
)
def test_valuesrules_fails(validator):
assert_fail(
document={'a_dict_with_valuesrules': {'a string': '99'}},
validator=validator,
error=(
'a_dict_with_valuesrules',
('a_dict_with_valuesrules', 'valuesrules'),
errors.VALUESRULES,
{'type': ('integer',)},
),
child_errors=[
(
('a_dict_with_valuesrules', 'a string'),
('a_dict_with_valuesrules', 'valuesrules', 'type'),
errors.TYPE,
('integer',),
)
],
)
assert 'valuesrules' in validator.schema_error_tree['a_dict_with_valuesrules']
error_node = validator.schema_error_tree['a_dict_with_valuesrules']['valuesrules']
assert len(error_node.descendants) == 1
|
from ipaddress import ip_address
import logging
import urllib
from doorbirdpy import DoorBird
import voluptuous as vol
from homeassistant import config_entries, core, exceptions
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_USERNAME,
HTTP_UNAUTHORIZED,
)
from homeassistant.core import callback
from homeassistant.util.network import is_link_local
from .const import CONF_EVENTS, DOORBIRD_OUI
from .const import DOMAIN # pylint:disable=unused-import
from .util import get_mac_address_from_doorstation_info
_LOGGER = logging.getLogger(__name__)
def _schema_with_defaults(host=None, name=None):
return vol.Schema(
{
vol.Required(CONF_HOST, default=host): str,
vol.Required(CONF_USERNAME): str,
vol.Required(CONF_PASSWORD): str,
vol.Optional(CONF_NAME, default=name): str,
}
)
async def validate_input(hass: core.HomeAssistant, data):
"""Validate the user input allows us to connect.
Data has the keys from DATA_SCHEMA with values provided by the user.
"""
device = DoorBird(data[CONF_HOST], data[CONF_USERNAME], data[CONF_PASSWORD])
try:
status = await hass.async_add_executor_job(device.ready)
info = await hass.async_add_executor_job(device.info)
except urllib.error.HTTPError as err:
if err.code == HTTP_UNAUTHORIZED:
raise InvalidAuth from err
raise CannotConnect from err
except OSError as err:
raise CannotConnect from err
if not status[0]:
raise CannotConnect
mac_addr = get_mac_address_from_doorstation_info(info)
# Return info that you want to store in the config entry.
return {"title": data[CONF_HOST], "mac_addr": mac_addr}
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for DoorBird."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_PUSH
def __init__(self):
"""Initialize the DoorBird config flow."""
self.discovery_schema = {}
async def async_step_user(self, user_input=None):
"""Handle the initial step."""
errors = {}
if user_input is not None:
info, errors = await self._async_validate_or_error(user_input)
if not errors:
await self.async_set_unique_id(info["mac_addr"])
self._abort_if_unique_id_configured()
return self.async_create_entry(title=info["title"], data=user_input)
data = self.discovery_schema or _schema_with_defaults()
return self.async_show_form(step_id="user", data_schema=data, errors=errors)
async def async_step_zeroconf(self, discovery_info):
"""Prepare configuration for a discovered doorbird device."""
macaddress = discovery_info["properties"]["macaddress"]
if macaddress[:6] != DOORBIRD_OUI:
return self.async_abort(reason="not_doorbird_device")
if is_link_local(ip_address(discovery_info[CONF_HOST])):
return self.async_abort(reason="link_local_address")
await self.async_set_unique_id(macaddress)
self._abort_if_unique_id_configured(
updates={CONF_HOST: discovery_info[CONF_HOST]}
)
chop_ending = "._axis-video._tcp.local."
friendly_hostname = discovery_info["name"]
if friendly_hostname.endswith(chop_ending):
friendly_hostname = friendly_hostname[: -len(chop_ending)]
# pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167
self.context["title_placeholders"] = {
CONF_NAME: friendly_hostname,
CONF_HOST: discovery_info[CONF_HOST],
}
self.discovery_schema = _schema_with_defaults(
host=discovery_info[CONF_HOST], name=friendly_hostname
)
return await self.async_step_user()
async def async_step_import(self, user_input):
"""Handle import."""
if user_input:
info, errors = await self._async_validate_or_error(user_input)
if not errors:
await self.async_set_unique_id(
info["mac_addr"], raise_on_progress=False
)
self._abort_if_unique_id_configured()
return self.async_create_entry(title=info["title"], data=user_input)
return await self.async_step_user(user_input)
async def _async_validate_or_error(self, user_input):
"""Validate doorbird or error."""
errors = {}
info = {}
try:
info = await validate_input(self.hass, user_input)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
return info, errors
@staticmethod
@callback
def async_get_options_flow(config_entry):
"""Get the options flow for this handler."""
return OptionsFlowHandler(config_entry)
class OptionsFlowHandler(config_entries.OptionsFlow):
"""Handle a option flow for doorbird."""
def __init__(self, config_entry: config_entries.ConfigEntry):
"""Initialize options flow."""
self.config_entry = config_entry
async def async_step_init(self, user_input=None):
"""Handle options flow."""
if user_input is not None:
events = [event.strip() for event in user_input[CONF_EVENTS].split(",")]
return self.async_create_entry(title="", data={CONF_EVENTS: events})
current_events = self.config_entry.options.get(CONF_EVENTS, [])
# We convert to a comma separated list for the UI
# since there really isn't anything better
options_schema = vol.Schema(
{vol.Optional(CONF_EVENTS, default=", ".join(current_events)): str}
)
return self.async_show_form(step_id="init", data_schema=options_schema)
class CannotConnect(exceptions.HomeAssistantError):
"""Error to indicate we cannot connect."""
class InvalidAuth(exceptions.HomeAssistantError):
"""Error to indicate there is invalid auth."""
|
import unittest
import locale
import urwid
from urwid import util
from urwid.compat import B
class CalcWidthTest(unittest.TestCase):
def wtest(self, desc, s, exp):
s = B(s)
result = util.calc_width( s, 0, len(s))
assert result==exp, "%s got:%r expected:%r" % (desc, result, exp)
def test1(self):
util.set_encoding("utf-8")
self.wtest("narrow", "hello", 5)
self.wtest("wide char", '\xe6\x9b\xbf', 2)
self.wtest("invalid", '\xe6', 1)
self.wtest("zero width", '\xcc\x80', 0)
self.wtest("mixed", 'hello\xe6\x9b\xbf\xe6\x9b\xbf', 9)
def test2(self):
util.set_encoding("euc-jp")
self.wtest("narrow", "hello", 5)
self.wtest("wide", "\xA1\xA1\xA1\xA1", 4)
self.wtest("invalid", "\xA1", 1)
class ConvertDecSpecialTest(unittest.TestCase):
def ctest(self, desc, s, exp, expcs):
exp = B(exp)
util.set_encoding('ascii')
c = urwid.Text(s).render((5,))
result = c._text[0]
assert result==exp, "%s got:%r expected:%r" % (desc, result, exp)
resultcs = c._cs[0]
assert resultcs==expcs, "%s got:%r expected:%r" % (desc,
resultcs, expcs)
def test1(self):
self.ctest("no conversion", u"hello", "hello", [(None,5)])
self.ctest("only special", u"£££££", "}}}}}", [("0",5)])
self.ctest("mix left", u"££abc", "}}abc", [("0",2),(None,3)])
self.ctest("mix right", u"abc££", "abc}}", [(None,3),("0",2)])
self.ctest("mix inner", u"a££bc", "a}}bc",
[(None,1),("0",2),(None,2)] )
self.ctest("mix well", u"£a£b£", "}a}b}",
[("0",1),(None,1),("0",1),(None,1),("0",1)] )
class WithinDoubleByteTest(unittest.TestCase):
def setUp(self):
urwid.set_encoding("euc-jp")
def wtest(self, s, ls, pos, expected, desc):
result = util.within_double_byte(B(s), ls, pos)
assert result==expected, "%s got:%r expected: %r" % (desc,
result, expected)
def test1(self):
self.wtest("mnopqr",0,2,0,'simple no high bytes')
self.wtest("mn\xA1\xA1qr",0,2,1,'simple 1st half')
self.wtest("mn\xA1\xA1qr",0,3,2,'simple 2nd half')
self.wtest("m\xA1\xA1\xA1\xA1r",0,3,1,'subsequent 1st half')
self.wtest("m\xA1\xA1\xA1\xA1r",0,4,2,'subsequent 2nd half')
self.wtest("mn\xA1@qr",0,3,2,'simple 2nd half lo')
self.wtest("mn\xA1\xA1@r",0,4,0,'subsequent not 2nd half lo')
self.wtest("m\xA1\xA1\xA1@r",0,4,2,'subsequent 2nd half lo')
def test2(self):
self.wtest("\xA1\xA1qr",0,0,1,'begin 1st half')
self.wtest("\xA1\xA1qr",0,1,2,'begin 2nd half')
self.wtest("\xA1@qr",0,1,2,'begin 2nd half lo')
self.wtest("\xA1\xA1\xA1\xA1r",0,2,1,'begin subs. 1st half')
self.wtest("\xA1\xA1\xA1\xA1r",0,3,2,'begin subs. 2nd half')
self.wtest("\xA1\xA1\xA1@r",0,3,2,'begin subs. 2nd half lo')
self.wtest("\xA1@\xA1@r",0,3,2,'begin subs. 2nd half lo lo')
self.wtest("@\xA1\xA1@r",0,3,0,'begin subs. not 2nd half lo')
def test3(self):
self.wtest("abc \xA1\xA1qr",4,4,1,'newline 1st half')
self.wtest("abc \xA1\xA1qr",4,5,2,'newline 2nd half')
self.wtest("abc \xA1@qr",4,5,2,'newline 2nd half lo')
self.wtest("abc \xA1\xA1\xA1\xA1r",4,6,1,'newl subs. 1st half')
self.wtest("abc \xA1\xA1\xA1\xA1r",4,7,2,'newl subs. 2nd half')
self.wtest("abc \xA1\xA1\xA1@r",4,7,2,'newl subs. 2nd half lo')
self.wtest("abc \xA1@\xA1@r",4,7,2,'newl subs. 2nd half lo lo')
self.wtest("abc @\xA1\xA1@r",4,7,0,'newl subs. not 2nd half lo')
class CalcTextPosTest(unittest.TestCase):
def ctptest(self, text, tests):
text = B(text)
for s,e,p, expected in tests:
got = util.calc_text_pos( text, s, e, p )
assert got == expected, "%r got:%r expected:%r" % ((s,e,p),
got, expected)
def test1(self):
text = "hello world out there"
tests = [
(0,21,0, (0,0)),
(0,21,5, (5,5)),
(0,21,21, (21,21)),
(0,21,50, (21,21)),
(2,15,50, (15,13)),
(6,21,0, (6,0)),
(6,21,3, (9,3)),
]
self.ctptest(text, tests)
def test2_wide(self):
util.set_encoding("euc-jp")
text = "hel\xA1\xA1 world out there"
tests = [
(0,21,0, (0,0)),
(0,21,4, (3,3)),
(2,21,2, (3,1)),
(2,21,3, (5,3)),
(6,21,0, (6,0)),
]
self.ctptest(text, tests)
def test3_utf8(self):
util.set_encoding("utf-8")
text = "hel\xc4\x83 world \xe2\x81\x81 there"
tests = [
(0,21,0, (0,0)),
(0,21,4, (5,4)),
(2,21,1, (3,1)),
(2,21,2, (5,2)),
(2,21,3, (6,3)),
(6,21,7, (15,7)),
(6,21,8, (16,8)),
]
self.ctptest(text, tests)
def test4_utf8(self):
util.set_encoding("utf-8")
text = "he\xcc\x80llo \xe6\x9b\xbf world"
tests = [
(0,15,0, (0,0)),
(0,15,1, (1,1)),
(0,15,2, (4,2)),
(0,15,4, (6,4)),
(8,15,0, (8,0)),
(8,15,1, (8,0)),
(8,15,2, (11,2)),
(8,15,5, (14,5)),
]
self.ctptest(text, tests)
class TagMarkupTest(unittest.TestCase):
mytests = [
("simple one", "simple one", []),
(('blue',"john"), "john", [('blue',4)]),
(["a ","litt","le list"], "a little list", []),
(["mix",('high',[" it ",('ital',"up a")])," little"],
"mix it up a little",
[(None,3),('high',4),('ital',4)]),
([u"££", u"x££"], u"££x££", []),
([B("\xc2\x80"), B("\xc2\x80")], B("\xc2\x80\xc2\x80"), []),
]
def test(self):
for input, text, attr in self.mytests:
restext,resattr = urwid.decompose_tagmarkup( input )
assert restext == text, "got: %r expected: %r" % (restext, text)
assert resattr == attr, "got: %r expected: %r" % (resattr, attr)
def test_bad_tuple(self):
self.assertRaises(urwid.TagMarkupException, lambda:
urwid.decompose_tagmarkup((1,2,3)))
def test_bad_type(self):
self.assertRaises(urwid.TagMarkupException, lambda:
urwid.decompose_tagmarkup(5))
class RleTest(unittest.TestCase):
def test_rle_prepend(self):
rle0 = [('A', 10), ('B', 15)]
# the rle functions are mutating, so make a few copies of rle0
rle1, rle2 = rle0[:], rle0[:]
util.rle_prepend_modify(rle1, ('A', 3))
util.rle_prepend_modify(rle2, ('X', 2))
self.assertListEqual(rle1, [('A', 13), ('B', 15)])
self.assertListEqual(rle2, [('X', 2), ('A', 10), ('B', 15)])
def test_rle_append(self):
rle0 = [('A', 10), ('B', 15)]
rle3, rle4 = rle0[:], rle0[:]
util.rle_append_modify(rle3, ('B', 5))
util.rle_append_modify(rle4, ('K', 1))
self.assertListEqual(rle3, [('A', 10), ('B', 20)])
self.assertListEqual(rle4, [('A', 10), ('B', 15), ('K', 1)])
class PortabilityTest(unittest.TestCase):
def test_locale(self):
initial = locale.getlocale()
locale.setlocale(locale.LC_ALL, (None, None))
util.detect_encoding()
self.assertEqual(locale.getlocale(), (None, None))
locale.setlocale(locale.LC_ALL, ('en_US', 'UTF-8'))
util.detect_encoding()
self.assertEqual(locale.getlocale(), ('en_US', 'UTF-8'))
locale.setlocale(locale.LC_ALL, initial)
|
from . import mock_dict
from . import utils
import boto.s3.bucket
import boto.s3.connection
import boto.s3.key
import six
Bucket__init__ = boto.s3.bucket.Bucket.__init__
@six.add_metaclass(utils.monkeypatch_class)
class MultiPartUpload(boto.s3.multipart.MultiPartUpload):
def upload_part_from_file(self, io, num_part):
if num_part == 1:
self.bucket._bucket[self.bucket.name][self._tmp_key] = io.read()
else:
self.bucket._bucket[self.bucket.name][self._tmp_key] += io.read()
def complete_upload(self):
return None
@six.add_metaclass(utils.monkeypatch_class)
class S3Connection(boto.s3.connection.S3Connection):
def __init__(self, *args, **kwargs):
return None
def get_bucket(self, name, **kwargs):
# Create a bucket for testing
bucket = Bucket(connection=self, name=name, key_class=Key)
return bucket
def make_request(self, *args, **kwargs):
return 'request result'
@six.add_metaclass(utils.monkeypatch_class)
class Bucket(boto.s3.bucket.Bucket):
_bucket = mock_dict.MockDict()
_bucket.add_dict_methods()
@property
def _bucket_dict(self):
if self.name in Bucket._bucket:
return Bucket._bucket[self.name]._mock_dict
def __init__(self, *args, **kwargs):
Bucket__init__(self, *args, **kwargs)
Bucket._bucket[self.name] = mock_dict.MockDict()
Bucket._bucket[self.name].add_dict_methods()
def delete(self):
if self.name in Bucket._bucket:
Bucket._bucket[self.name] = mock_dict.MockDict()
Bucket._bucket[self.name].add_dict_methods()
def list(self, **kwargs):
return ([self.lookup(k) for k in self._bucket_dict.keys()]
if self._bucket_dict else [])
def lookup(self, key_name, **kwargs):
if self._bucket_dict and key_name in self._bucket_dict:
value = Bucket._bucket[self.name][key_name]
k = Key(self)
k.name = key_name
k.size = len(value)
return k
def initiate_multipart_upload(self, key_name, **kwargs):
# Pass key_name to MultiPartUpload
mp = MultiPartUpload(self)
mp._tmp_key = key_name
return mp
@six.add_metaclass(utils.monkeypatch_class)
class Key(boto.s3.key.Key):
def exists(self):
bucket_dict = self.bucket._bucket_dict
return self.name in bucket_dict if bucket_dict else False
def delete(self):
del self.bucket._bucket_dict[self.name]
def set_contents_from_string(self, value, **kwargs):
self.size = len(value)
self.bucket._bucket_dict[self.name] = value
def get_contents_as_string(self, *args, **kwargs):
return self.bucket._bucket_dict[self.name]
def get_contents_to_file(self, fp, **kwargs):
min_cur, max_cur = (kwargs['headers']['Range'].replace('bytes=', '')
.split('-'))
value = self.bucket._bucket_dict[self.name]
fp.write(value[int(min_cur):int(max_cur) + 1])
fp.flush()
def read(self, buffer_size):
# fetch read status
lp = getattr(self, '_last_position', 0)
self._last_position = lp + buffer_size
return self.bucket._bucket_dict[self.name][lp:lp + buffer_size]
|
from absl import flags
from perfkitbenchmarker.linux_packages import cuda_toolkit
CUDNN_7_4_9 = 'libcudnn7=7.4.2.24-1+cuda9.0'
CUDNN_7_4_10 = 'libcudnn7=7.4.2.24-1+cuda10.0'
CUDNN_7_6_1 = 'libcudnn7=7.6.1.34-1+cuda10.1'
FLAGS = flags.FLAGS
def AptInstall(vm):
"""Installs the cudnn package on the VM."""
if not cuda_toolkit.CheckNvidiaSmiExists(vm):
raise Exception('CUDA Toolkit is a prerequisite for installing CUDNN.')
if FLAGS.cuda_toolkit_version == '9.0':
cudnn_version = CUDNN_7_4_9
elif FLAGS.cuda_toolkit_version == '10.0':
cudnn_version = CUDNN_7_4_10
elif FLAGS.cuda_toolkit_version == '10.1':
cudnn_version = CUDNN_7_6_1
else:
raise Exception('No CUDNN version found for given CUDA version.')
# dirmngr is needed for getting the certificate from network
vm.RemoteCommand(
'sudo apt-get install -y --no-install-recommends dirmngr',
should_log=True)
vm.RemoteCommand(
'sudo apt-key adv --fetch-keys https://developer.download.nvidia.com/'
'compute/cuda/repos/ubuntu1604/x86_64/7fa2af80.pub', should_log=True)
vm.RemoteCommand(
'sudo bash -c \'echo "deb https://developer.download.nvidia.com/compute/'
'machine-learning/repos/ubuntu1604/x86_64 /" > /etc/apt/sources.list.d/'
'nvidia-ml.list\'', should_log=True)
vm.RemoteCommand('sudo apt-get update', should_log=True)
vm.RemoteCommand('sudo apt-get install -y --no-install-recommends '
'{}'.format(cudnn_version), should_log=True)
|
from plumbum import local, cli, FG
from plumbum.path.utils import delete
try:
from plumbum.cmd import twine
except ImportError:
twine = None
class BuildProject(cli.Application):
'Build and optionally upload. For help, see https://packaging.python.org/en/latest/distributing/#uploading-your-project-to-pypi'
upload = cli.Flag("upload", help = "If given, the artifacts will be uploaded to PyPI")
def main(self):
delete(local.cwd // "*.egg-info", "build", "dist")
local.python("setup.py", "sdist", "bdist_wheel")
delete(local.cwd // "*.egg-info", "build")
if self.upload:
if twine is None:
print("Twine not installed, cannot securely upload. Install twine.")
else:
twine['upload', 'dist/*tar.gz', 'dist/*.whl'] & FG
else:
print("Built. To upload, run:")
print(" twine upload dist/*tar.gz dist/*.whl")
if __name__ == "__main__":
BuildProject.run()
|
from datetime import timedelta
import logging
import time
from sqlalchemy.exc import OperationalError, SQLAlchemyError
import homeassistant.util.dt as dt_util
from .models import Events, RecorderRuns, States
from .util import execute, session_scope
_LOGGER = logging.getLogger(__name__)
def purge_old_data(instance, purge_days: int, repack: bool) -> bool:
"""Purge events and states older than purge_days ago.
Cleans up an timeframe of an hour, based on the oldest record.
"""
purge_before = dt_util.utcnow() - timedelta(days=purge_days)
_LOGGER.debug("Purging states and events before target %s", purge_before)
try:
with session_scope(session=instance.get_session()) as session:
# Purge a max of 1 hour, based on the oldest states or events record
batch_purge_before = purge_before
query = session.query(States).order_by(States.last_updated.asc()).limit(1)
states = execute(query, to_native=True, validate_entity_ids=False)
if states:
batch_purge_before = min(
batch_purge_before,
states[0].last_updated + timedelta(hours=1),
)
query = session.query(Events).order_by(Events.time_fired.asc()).limit(1)
events = execute(query, to_native=True)
if events:
batch_purge_before = min(
batch_purge_before,
events[0].time_fired + timedelta(hours=1),
)
_LOGGER.debug("Purging states and events before %s", batch_purge_before)
deleted_rows = (
session.query(States)
.filter(States.last_updated < batch_purge_before)
.delete(synchronize_session=False)
)
_LOGGER.debug("Deleted %s states", deleted_rows)
deleted_rows = (
session.query(Events)
.filter(Events.time_fired < batch_purge_before)
.delete(synchronize_session=False)
)
_LOGGER.debug("Deleted %s events", deleted_rows)
# If states or events purging isn't processing the purge_before yet,
# return false, as we are not done yet.
if batch_purge_before != purge_before:
_LOGGER.debug("Purging hasn't fully completed yet")
return False
# Recorder runs is small, no need to batch run it
deleted_rows = (
session.query(RecorderRuns)
.filter(RecorderRuns.start < purge_before)
.delete(synchronize_session=False)
)
_LOGGER.debug("Deleted %s recorder_runs", deleted_rows)
if repack:
# Execute sqlite or postgresql vacuum command to free up space on disk
if instance.engine.driver in ("pysqlite", "postgresql"):
_LOGGER.debug("Vacuuming SQL DB to free space")
instance.engine.execute("VACUUM")
# Optimize mysql / mariadb tables to free up space on disk
elif instance.engine.driver in ("mysqldb", "pymysql"):
_LOGGER.debug("Optimizing SQL DB to free space")
instance.engine.execute("OPTIMIZE TABLE states, events, recorder_runs")
except OperationalError as err:
# Retry when one of the following MySQL errors occurred:
# 1205: Lock wait timeout exceeded; try restarting transaction
# 1206: The total number of locks exceeds the lock table size
# 1213: Deadlock found when trying to get lock; try restarting transaction
if instance.engine.driver in ("mysqldb", "pymysql") and err.orig.args[0] in (
1205,
1206,
1213,
):
_LOGGER.info("%s; purge not completed, retrying", err.orig.args[1])
time.sleep(instance.db_retry_wait)
return False
_LOGGER.warning("Error purging history: %s", err)
except SQLAlchemyError as err:
_LOGGER.warning("Error purging history: %s", err)
return True
|
import os
import os.path
import sys
import bdb
import pdb # noqa: T002
import signal
import argparse
import functools
import threading
import faulthandler
from typing import TYPE_CHECKING, Optional, MutableMapping, cast
import attr
from PyQt5.QtCore import (pyqtSlot, qInstallMessageHandler, QObject,
QSocketNotifier, QTimer, QUrl)
from PyQt5.QtWidgets import QApplication
from qutebrowser.api import cmdutils
from qutebrowser.misc import earlyinit, crashdialog, ipc, objects
from qutebrowser.utils import usertypes, standarddir, log, objreg, debug, utils
from qutebrowser.qt import sip
if TYPE_CHECKING:
from qutebrowser.misc import quitter
@attr.s
class ExceptionInfo:
"""Information stored when there was an exception."""
pages = attr.ib()
cmd_history = attr.ib()
objects = attr.ib()
crash_handler = cast('CrashHandler', None)
class CrashHandler(QObject):
"""Handler for crashes, reports and exceptions.
Attributes:
_app: The QApplication instance.
_quitter: The Quitter instance.
_args: The argparse namespace.
_crash_dialog: The CrashDialog currently being shown.
_crash_log_file: The file handle for the faulthandler crash log.
_crash_log_data: Crash data read from the previous crash log.
is_crashing: Used by mainwindow.py to skip confirm questions on
crashes.
"""
def __init__(self, *, app, quitter, args, parent=None):
super().__init__(parent)
self._app = app
self._quitter = quitter
self._args = args
self._crash_log_file = None
self._crash_log_data = None
self._crash_dialog = None
self.is_crashing = False
def activate(self):
"""Activate the exception hook."""
sys.excepthook = self.exception_hook
def init_faulthandler(self):
"""Handle a segfault from a previous run and set up faulthandler."""
logname = os.path.join(standarddir.data(), 'crash.log')
try:
# First check if an old logfile exists.
if os.path.exists(logname):
with open(logname, 'r', encoding='ascii') as f:
self._crash_log_data = f.read()
os.remove(logname)
self._init_crashlogfile()
else:
# There's no log file, so we can use this to display crashes to
# the user on the next start.
self._init_crashlogfile()
except OSError:
log.init.exception("Error while handling crash log file!")
self._init_crashlogfile()
def display_faulthandler(self):
"""If there was data in the crash log file, display a dialog."""
assert not self._args.no_err_windows
if self._crash_log_data:
# Crashlog exists and has data in it, so something crashed
# previously.
self._crash_dialog = crashdialog.FatalCrashDialog(
self._args.debug, self._crash_log_data)
self._crash_dialog.show()
self._crash_log_data = None
def _recover_pages(self, forgiving=False):
"""Try to recover all open pages.
Called from exception_hook, so as forgiving as possible.
Args:
forgiving: Whether to ignore exceptions.
Return:
A list containing a list for each window, which in turn contain the
opened URLs.
"""
pages = []
for win_id in objreg.window_registry:
win_pages = []
tabbed_browser = objreg.get('tabbed-browser', scope='window',
window=win_id)
for tab in tabbed_browser.widgets():
try:
urlstr = tab.url().toString(
QUrl.RemovePassword | QUrl.FullyEncoded)
if urlstr:
win_pages.append(urlstr)
except Exception:
if forgiving:
log.destroy.exception("Error while recovering tab")
else:
raise
pages.append(win_pages)
return pages
def _init_crashlogfile(self):
"""Start a new logfile and redirect faulthandler to it."""
logname = os.path.join(standarddir.data(), 'crash.log')
try:
self._crash_log_file = open(logname, 'w', encoding='ascii')
except OSError:
log.init.exception("Error while opening crash log file!")
else:
earlyinit.init_faulthandler(self._crash_log_file)
@cmdutils.register(instance='crash-handler')
def report(self, info=None, contact=None):
"""Report a bug in qutebrowser.
Args:
info: Information about the bug report. If given, no report dialog
shows up.
contact: Contact information for the report.
"""
pages = self._recover_pages()
cmd_history = objreg.get('command-history')[-5:]
all_objects = debug.get_all_objects()
self._crash_dialog = crashdialog.ReportDialog(pages, cmd_history,
all_objects)
if info is None:
self._crash_dialog.show()
else:
self._crash_dialog.report(info=info, contact=contact)
@pyqtSlot()
def shutdown(self):
self.destroy_crashlogfile()
def destroy_crashlogfile(self):
"""Clean up the crash log file and delete it."""
if self._crash_log_file is None:
return
# We use sys.__stderr__ instead of sys.stderr here so this will still
# work when sys.stderr got replaced, e.g. by "Python Tools for Visual
# Studio".
if sys.__stderr__ is not None:
faulthandler.enable(sys.__stderr__)
else:
faulthandler.disable() # type: ignore[unreachable]
try:
self._crash_log_file.close()
os.remove(self._crash_log_file.name)
except OSError:
log.destroy.exception("Could not remove crash log!")
def _get_exception_info(self):
"""Get info needed for the exception hook/dialog.
Return:
An ExceptionInfo object.
"""
try:
pages = self._recover_pages(forgiving=True)
except Exception as e:
log.destroy.exception("Error while recovering pages: {}".format(e))
pages = []
try:
cmd_history = objreg.get('command-history')[-5:]
except Exception as e:
log.destroy.exception("Error while getting history: {}".format(e))
cmd_history = []
try:
all_objects = debug.get_all_objects()
except Exception:
log.destroy.exception("Error while getting objects")
all_objects = ""
return ExceptionInfo(pages, cmd_history, all_objects)
def _handle_early_exits(self, exc):
"""Handle some special cases for the exception hook.
Return value:
True: Exception hook should be aborted.
False: Continue handling exception.
"""
exctype, _excvalue, tb = exc
if not self._quitter.quit_status['crash']:
log.misc.error("ARGH, there was an exception while the crash "
"dialog is already shown:", exc_info=exc)
return True
log.misc.error("Uncaught exception", exc_info=exc)
is_ignored_exception = (exctype is bdb.BdbQuit or
not issubclass(exctype, Exception))
if 'pdb-postmortem' in objects.debug_flags:
if tb is None:
pdb.set_trace() # noqa: T100
else:
pdb.post_mortem(tb)
if is_ignored_exception or 'pdb-postmortem' in objects.debug_flags:
# pdb exit, KeyboardInterrupt, ...
sys.exit(usertypes.Exit.exception)
if threading.current_thread() != threading.main_thread():
log.misc.error("Ignoring exception outside of main thread... "
"Please report this as a bug.")
return True
return False
def exception_hook(self, exctype, excvalue, tb):
"""Handle uncaught python exceptions.
It'll try very hard to write all open tabs to a file, and then exit
gracefully.
"""
exc = (exctype, excvalue, tb)
if self._handle_early_exits(exc):
return
self._quitter.quit_status['crash'] = False
info = self._get_exception_info()
if ipc.server is not None:
try:
ipc.server.ignored = True
except Exception:
log.destroy.exception("Error while ignoring ipc")
try:
self._app.lastWindowClosed.disconnect(
self._quitter.on_last_window_closed)
except TypeError:
log.destroy.exception("Error while preventing shutdown")
self.is_crashing = True
self._app.closeAllWindows()
if self._args.no_err_windows:
crashdialog.dump_exception_info(exc, info.pages, info.cmd_history,
info.objects)
else:
self._crash_dialog = crashdialog.ExceptionCrashDialog(
self._args.debug, info.pages, info.cmd_history, exc,
info.objects)
ret = self._crash_dialog.exec_()
if ret == crashdialog.Result.restore:
self._quitter.restart(info.pages)
# We might risk a segfault here, but that's better than continuing to
# run in some undefined state, so we only do the most needed shutdown
# here.
qInstallMessageHandler(None)
self.destroy_crashlogfile()
sys.exit(usertypes.Exit.exception)
def raise_crashdlg(self):
"""Raise the crash dialog if one exists."""
if self._crash_dialog is not None:
self._crash_dialog.raise_()
class SignalHandler(QObject):
"""Handler responsible for handling OS signals (SIGINT, SIGTERM, etc.).
Attributes:
_app: The QApplication instance.
_quitter: The Quitter instance.
_activated: Whether activate() was called.
_notifier: A QSocketNotifier used for signals on Unix.
_timer: A QTimer used to poll for signals on Windows.
_orig_handlers: A {signal: handler} dict of original signal handlers.
_orig_wakeup_fd: The original wakeup filedescriptor.
"""
def __init__(self, *, app, quitter, parent=None):
super().__init__(parent)
self._app = app
self._quitter = quitter
self._notifier = None
self._timer = usertypes.Timer(self, 'python_hacks')
self._orig_handlers: MutableMapping[int, 'signal._HANDLER'] = {}
self._activated = False
self._orig_wakeup_fd: Optional[int] = None
def activate(self):
"""Set up signal handlers.
On Windows this uses a QTimer to periodically hand control over to
Python so it can handle signals.
On Unix, it uses a QSocketNotifier with os.set_wakeup_fd to get
notified.
"""
self._orig_handlers[signal.SIGINT] = signal.signal(
signal.SIGINT, self.interrupt)
self._orig_handlers[signal.SIGTERM] = signal.signal(
signal.SIGTERM, self.interrupt)
if utils.is_posix and hasattr(signal, 'set_wakeup_fd'):
# pylint: disable=import-error,no-member,useless-suppression
import fcntl
read_fd, write_fd = os.pipe()
for fd in [read_fd, write_fd]:
flags = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
self._notifier = QSocketNotifier(cast(sip.voidptr, read_fd),
QSocketNotifier.Read,
self)
self._notifier.activated.connect( # type: ignore[attr-defined]
self.handle_signal_wakeup)
self._orig_wakeup_fd = signal.set_wakeup_fd(write_fd)
# pylint: enable=import-error,no-member,useless-suppression
else:
self._timer.start(1000)
self._timer.timeout.connect(lambda: None)
self._activated = True
def deactivate(self):
"""Deactivate all signal handlers."""
if not self._activated:
return
if self._notifier is not None:
assert self._orig_wakeup_fd is not None
self._notifier.setEnabled(False)
rfd = self._notifier.socket()
wfd = signal.set_wakeup_fd(self._orig_wakeup_fd)
os.close(int(rfd))
os.close(wfd)
for sig, handler in self._orig_handlers.items():
signal.signal(sig, handler)
self._timer.stop()
self._activated = False
@pyqtSlot()
def handle_signal_wakeup(self):
"""Handle a newly arrived signal.
This gets called via self._notifier when there's a signal.
Python will get control here, so the signal will get handled.
"""
assert self._notifier is not None
log.destroy.debug("Handling signal wakeup!")
self._notifier.setEnabled(False)
read_fd = self._notifier.socket()
try:
os.read(int(read_fd), 1)
except OSError:
log.destroy.exception("Failed to read wakeup fd.")
self._notifier.setEnabled(True)
def _log_later(self, *lines):
"""Log the given text line-wise with a QTimer."""
for line in lines:
QTimer.singleShot(0, functools.partial(log.destroy.info, line))
def interrupt(self, signum, _frame):
"""Handler for signals to gracefully shutdown (SIGINT/SIGTERM).
This calls shutdown and remaps the signal to call
interrupt_forcefully the next time.
"""
signal.signal(signal.SIGINT, self.interrupt_forcefully)
signal.signal(signal.SIGTERM, self.interrupt_forcefully)
# Signals can arrive anywhere, so we do this in the main thread
self._log_later("SIGINT/SIGTERM received, shutting down!",
"Do the same again to forcefully quit.")
QTimer.singleShot(0, functools.partial(
self._quitter.shutdown, 128 + signum))
def interrupt_forcefully(self, signum, _frame):
"""Interrupt forcefully on the second SIGINT/SIGTERM request.
This skips our shutdown routine and calls QApplication:exit instead.
It then remaps the signals to call self.interrupt_really_forcefully the
next time.
"""
signal.signal(signal.SIGINT, self.interrupt_really_forcefully)
signal.signal(signal.SIGTERM, self.interrupt_really_forcefully)
# Signals can arrive anywhere, so we do this in the main thread
self._log_later("Forceful quit requested, goodbye cruel world!",
"Do the same again to quit with even more force.")
QTimer.singleShot(0, functools.partial(self._app.exit, 128 + signum))
def interrupt_really_forcefully(self, signum, _frame):
"""Interrupt with even more force on the third SIGINT/SIGTERM request.
This doesn't run *any* Qt cleanup and simply exits via Python.
It will most likely lead to a segfault.
"""
print("WHY ARE YOU DOING THIS TO ME? :(")
sys.exit(128 + signum)
def init(q_app: QApplication,
args: argparse.Namespace,
quitter: 'quitter.Quitter') -> None:
"""Initialize crash/signal handlers."""
global crash_handler
crash_handler = CrashHandler(
app=q_app, quitter=quitter, args=args, parent=q_app)
objreg.register('crash-handler', crash_handler, command_only=True)
crash_handler.activate()
quitter.shutting_down.connect(crash_handler.shutdown)
signal_handler = SignalHandler(app=q_app, quitter=quitter, parent=q_app)
signal_handler.activate()
quitter.shutting_down.connect(signal_handler.deactivate)
|
class OpenApiException(Exception):
"""The base exception class for all OpenAPIExceptions"""
class ApiTypeError(OpenApiException, TypeError):
def __init__(self, msg, path_to_item=None, valid_classes=None,
key_type=None):
""" Raises an exception for TypeErrors
Args:
msg (str): the exception message
Keyword Args:
path_to_item (list): a list of keys an indices to get to the
current_item
None if unset
valid_classes (tuple): the primitive classes that current item
should be an instance of
None if unset
key_type (bool): False if our value is a value in a dict
True if it is a key in a dict
False if our item is an item in a list
None if unset
"""
self.path_to_item = path_to_item
self.valid_classes = valid_classes
self.key_type = key_type
full_msg = msg
if path_to_item:
full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
super(ApiTypeError, self).__init__(full_msg)
class ApiValueError(OpenApiException, ValueError):
def __init__(self, msg, path_to_item=None):
"""
Args:
msg (str): the exception message
Keyword Args:
path_to_item (list) the path to the exception in the
received_data dict. None if unset
"""
self.path_to_item = path_to_item
full_msg = msg
if path_to_item:
full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
super(ApiValueError, self).__init__(full_msg)
class ApiAttributeError(OpenApiException, AttributeError):
def __init__(self, msg, path_to_item=None):
"""
Raised when an attribute reference or assignment fails.
Args:
msg (str): the exception message
Keyword Args:
path_to_item (None/list) the path to the exception in the
received_data dict
"""
self.path_to_item = path_to_item
full_msg = msg
if path_to_item:
full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
super(ApiAttributeError, self).__init__(full_msg)
class ApiKeyError(OpenApiException, KeyError):
def __init__(self, msg, path_to_item=None):
"""
Args:
msg (str): the exception message
Keyword Args:
path_to_item (None/list) the path to the exception in the
received_data dict
"""
self.path_to_item = path_to_item
full_msg = msg
if path_to_item:
full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
super(ApiKeyError, self).__init__(full_msg)
class ApiException(OpenApiException):
def __init__(self, status=None, reason=None, http_resp=None):
if http_resp:
self.status = http_resp.status
self.reason = http_resp.reason
self.body = http_resp.data
self.headers = http_resp.getheaders()
else:
self.status = status
self.reason = reason
self.body = None
self.headers = None
def __str__(self):
"""Custom error messages for exception"""
error_message = "({0})\n"\
"Reason: {1}\n".format(self.status, self.reason)
if self.headers:
error_message += "HTTP response headers: {0}\n".format(
self.headers)
if self.body:
error_message += "HTTP response body: {0}\n".format(self.body)
return error_message
def render_path(path_to_item):
"""Returns a string representation of a path"""
result = ""
for pth in path_to_item:
if isinstance(pth, int):
result += "[{0}]".format(pth)
else:
result += "['{0}']".format(pth)
return result
|
import asyncio
from datetime import timedelta
import io
import logging
from PIL import Image
import voluptuous as vol
from homeassistant.components.camera import (
PLATFORM_SCHEMA,
Camera,
async_get_image,
async_get_mjpeg_stream,
async_get_still_stream,
)
from homeassistant.const import CONF_ENTITY_ID, CONF_MODE, CONF_NAME
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
CONF_CACHE_IMAGES = "cache_images"
CONF_FORCE_RESIZE = "force_resize"
CONF_IMAGE_QUALITY = "image_quality"
CONF_IMAGE_REFRESH_RATE = "image_refresh_rate"
CONF_MAX_IMAGE_WIDTH = "max_image_width"
CONF_MAX_IMAGE_HEIGHT = "max_image_height"
CONF_MAX_STREAM_WIDTH = "max_stream_width"
CONF_MAX_STREAM_HEIGHT = "max_stream_height"
CONF_IMAGE_TOP = "image_top"
CONF_IMAGE_LEFT = "image_left"
CONF_STREAM_QUALITY = "stream_quality"
MODE_RESIZE = "resize"
MODE_CROP = "crop"
DEFAULT_BASENAME = "Camera Proxy"
DEFAULT_QUALITY = 75
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_CACHE_IMAGES, False): cv.boolean,
vol.Optional(CONF_FORCE_RESIZE, False): cv.boolean,
vol.Optional(CONF_MODE, default=MODE_RESIZE): vol.In([MODE_RESIZE, MODE_CROP]),
vol.Optional(CONF_IMAGE_QUALITY): int,
vol.Optional(CONF_IMAGE_REFRESH_RATE): float,
vol.Optional(CONF_MAX_IMAGE_WIDTH): int,
vol.Optional(CONF_MAX_IMAGE_HEIGHT): int,
vol.Optional(CONF_MAX_STREAM_WIDTH): int,
vol.Optional(CONF_MAX_STREAM_HEIGHT): int,
vol.Optional(CONF_IMAGE_LEFT): int,
vol.Optional(CONF_IMAGE_TOP): int,
vol.Optional(CONF_STREAM_QUALITY): int,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Proxy camera platform."""
async_add_entities([ProxyCamera(hass, config)])
def _precheck_image(image, opts):
"""Perform some pre-checks on the given image."""
if not opts:
raise ValueError()
try:
img = Image.open(io.BytesIO(image))
except OSError as err:
_LOGGER.warning("Failed to open image")
raise ValueError() from err
imgfmt = str(img.format)
if imgfmt not in ("PNG", "JPEG"):
_LOGGER.warning("Image is of unsupported type: %s", imgfmt)
raise ValueError()
if not img.mode == "RGB":
img = img.convert("RGB")
return img
def _resize_image(image, opts):
"""Resize image."""
try:
img = _precheck_image(image, opts)
except ValueError:
return image
quality = opts.quality or DEFAULT_QUALITY
new_width = opts.max_width
(old_width, old_height) = img.size
old_size = len(image)
if old_width <= new_width:
if opts.quality is None:
_LOGGER.debug("Image is smaller-than/equal-to requested width")
return image
new_width = old_width
scale = new_width / float(old_width)
new_height = int(float(old_height) * float(scale))
img = img.resize((new_width, new_height), Image.ANTIALIAS)
imgbuf = io.BytesIO()
img.save(imgbuf, "JPEG", optimize=True, quality=quality)
newimage = imgbuf.getvalue()
if not opts.force_resize and len(newimage) >= old_size:
_LOGGER.debug(
"Using original image (%d bytes) "
"because resized image (%d bytes) is not smaller",
old_size,
len(newimage),
)
return image
_LOGGER.debug(
"Resized image from (%dx%d - %d bytes) to (%dx%d - %d bytes)",
old_width,
old_height,
old_size,
new_width,
new_height,
len(newimage),
)
return newimage
def _crop_image(image, opts):
"""Crop image."""
try:
img = _precheck_image(image, opts)
except ValueError:
return image
quality = opts.quality or DEFAULT_QUALITY
(old_width, old_height) = img.size
old_size = len(image)
if opts.top is None:
opts.top = 0
if opts.left is None:
opts.left = 0
if opts.max_width is None or opts.max_width > old_width - opts.left:
opts.max_width = old_width - opts.left
if opts.max_height is None or opts.max_height > old_height - opts.top:
opts.max_height = old_height - opts.top
img = img.crop(
(opts.left, opts.top, opts.left + opts.max_width, opts.top + opts.max_height)
)
imgbuf = io.BytesIO()
img.save(imgbuf, "JPEG", optimize=True, quality=quality)
newimage = imgbuf.getvalue()
_LOGGER.debug(
"Cropped image from (%dx%d - %d bytes) to (%dx%d - %d bytes)",
old_width,
old_height,
old_size,
opts.max_width,
opts.max_height,
len(newimage),
)
return newimage
class ImageOpts:
"""The representation of image options."""
def __init__(self, max_width, max_height, left, top, quality, force_resize):
"""Initialize image options."""
self.max_width = max_width
self.max_height = max_height
self.left = left
self.top = top
self.quality = quality
self.force_resize = force_resize
def __bool__(self):
"""Bool evaluation rules."""
return bool(self.max_width or self.quality)
class ProxyCamera(Camera):
"""The representation of a Proxy camera."""
def __init__(self, hass, config):
"""Initialize a proxy camera component."""
super().__init__()
self.hass = hass
self._proxied_camera = config.get(CONF_ENTITY_ID)
self._name = (
config.get(CONF_NAME) or f"{DEFAULT_BASENAME} - {self._proxied_camera}"
)
self._image_opts = ImageOpts(
config.get(CONF_MAX_IMAGE_WIDTH),
config.get(CONF_MAX_IMAGE_HEIGHT),
config.get(CONF_IMAGE_LEFT),
config.get(CONF_IMAGE_TOP),
config.get(CONF_IMAGE_QUALITY),
config.get(CONF_FORCE_RESIZE),
)
self._stream_opts = ImageOpts(
config.get(CONF_MAX_STREAM_WIDTH),
config.get(CONF_MAX_STREAM_HEIGHT),
config.get(CONF_IMAGE_LEFT),
config.get(CONF_IMAGE_TOP),
config.get(CONF_STREAM_QUALITY),
True,
)
self._image_refresh_rate = config.get(CONF_IMAGE_REFRESH_RATE)
self._cache_images = bool(
config.get(CONF_IMAGE_REFRESH_RATE) or config.get(CONF_CACHE_IMAGES)
)
self._last_image_time = dt_util.utc_from_timestamp(0)
self._last_image = None
self._mode = config.get(CONF_MODE)
def camera_image(self):
"""Return camera image."""
return asyncio.run_coroutine_threadsafe(
self.async_camera_image(), self.hass.loop
).result()
async def async_camera_image(self):
"""Return a still image response from the camera."""
now = dt_util.utcnow()
if self._image_refresh_rate and now < self._last_image_time + timedelta(
seconds=self._image_refresh_rate
):
return self._last_image
self._last_image_time = now
image = await async_get_image(self.hass, self._proxied_camera)
if not image:
_LOGGER.error("Error getting original camera image")
return self._last_image
if self._mode == MODE_RESIZE:
job = _resize_image
else:
job = _crop_image
image = await self.hass.async_add_executor_job(
job, image.content, self._image_opts
)
if self._cache_images:
self._last_image = image
return image
async def handle_async_mjpeg_stream(self, request):
"""Generate an HTTP MJPEG stream from camera images."""
if not self._stream_opts:
return await async_get_mjpeg_stream(
self.hass, request, self._proxied_camera
)
return await async_get_still_stream(
request, self._async_stream_image, self.content_type, self.frame_interval
)
@property
def name(self):
"""Return the name of this camera."""
return self._name
async def _async_stream_image(self):
"""Return a still image response from the camera."""
try:
image = await async_get_image(self.hass, self._proxied_camera)
if not image:
return None
except HomeAssistantError as err:
raise asyncio.CancelledError() from err
if self._mode == MODE_RESIZE:
job = _resize_image
else:
job = _crop_image
return await self.hass.async_add_executor_job(
job, image.content, self._stream_opts
)
|
from typing import Dict, List
import voluptuous as vol
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_CONDITION,
CONF_DEVICE_ID,
CONF_DOMAIN,
CONF_ENTITY_ID,
CONF_TYPE,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import condition, config_validation as cv, entity_registry
from homeassistant.helpers.config_validation import DEVICE_CONDITION_BASE_SCHEMA
from homeassistant.helpers.typing import ConfigType, TemplateVarsType
from . import DOMAIN, STATE_CLEANING, STATE_DOCKED, STATE_RETURNING
CONDITION_TYPES = {"is_cleaning", "is_docked"}
CONDITION_SCHEMA = DEVICE_CONDITION_BASE_SCHEMA.extend(
{
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Required(CONF_TYPE): vol.In(CONDITION_TYPES),
}
)
async def async_get_conditions(
hass: HomeAssistant, device_id: str
) -> List[Dict[str, str]]:
"""List device conditions for Vacuum devices."""
registry = await entity_registry.async_get_registry(hass)
conditions = []
# Get all the integrations entities for this device
for entry in entity_registry.async_entries_for_device(registry, device_id):
if entry.domain != DOMAIN:
continue
conditions.append(
{
CONF_CONDITION: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "is_cleaning",
}
)
conditions.append(
{
CONF_CONDITION: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "is_docked",
}
)
return conditions
@callback
def async_condition_from_config(
config: ConfigType, config_validation: bool
) -> condition.ConditionCheckerType:
"""Create a function to test a device condition."""
if config_validation:
config = CONDITION_SCHEMA(config)
if config[CONF_TYPE] == "is_docked":
test_states = [STATE_DOCKED]
else:
test_states = [STATE_CLEANING, STATE_RETURNING]
def test_is_state(hass: HomeAssistant, variables: TemplateVarsType) -> bool:
"""Test if an entity is a certain state."""
state = hass.states.get(config[ATTR_ENTITY_ID])
return state is not None and state.state in test_states
return test_is_state
|
import errno
import glob
import os
import shutil
import click
import sh
from molecule import config
from molecule import logger
from molecule import migrate
from molecule import scenario
from molecule import util
LOG = logger.get_logger(__name__)
class Convert(object):
def __init__(self, old_molecule_file, driver_name):
self._old_molecule_file = old_molecule_file
if not os.path.isfile(old_molecule_file):
msg = 'Unable to find {}. Exiting.'.format(old_molecule_file)
util.sysexit_with_message(msg)
self._m = migrate.Migrate(old_molecule_file)
self._old_role_dir = os.path.join(os.path.dirname(old_molecule_file))
self._old_dot_molecule_dir = scenario.ephemeral_directory(
self._old_role_dir)
self._old_test_dir = os.path.join(self._old_role_dir, 'tests')
self._old_playbook = os.path.join(self._old_role_dir, 'playbook.yml')
self._molecule_dir = config.molecule_directory(self._old_role_dir)
self._scenario_dir = os.path.join(self._molecule_dir, 'default')
self._test_dir = os.path.join(self._scenario_dir, 'tests')
self._molecule_file = config.molecule_file(self._scenario_dir)
self._role_name = os.path.basename(
os.path.normpath(self._old_role_dir))
def migrate(self):
self._create_scenario()
self._create_new_molecule_file()
self._copy_old_files()
self._cleanup_old_files()
def _create_scenario(self):
options = {
'role_name': self._role_name,
'scenario_name': 'default',
'driver_name': 'vagrant',
}
cmd = sh.molecule.bake(
'init', 'scenario', _cwd=self._old_role_dir, **options)
util.run_command(cmd)
def _create_new_molecule_file(self):
with open(self._molecule_file, 'w') as stream:
msg = 'Writing molecule.yml to {}'.format(self._molecule_file)
LOG.info(msg)
stream.write(self._m.dump())
def _copy_old_files(self):
for f in glob.glob(r'{}/test_*.py'.format(self._old_test_dir)):
msg = 'Copying {} to {}'.format(f, self._test_dir)
LOG.info(msg)
shutil.copy(f, self._test_dir)
if os.path.isfile(self._old_playbook):
msg = 'Copying {} to {}'.format(self._old_playbook,
self._scenario_dir)
LOG.info(msg)
shutil.copy(self._old_playbook, self._scenario_dir)
def _cleanup_old_files(self):
files = [
self._old_dot_molecule_dir,
self._old_molecule_file,
self._old_playbook,
]
for f in files:
if os.path.exists(f):
msg = 'Deleting {}'.format(f)
LOG.warn(msg)
try:
shutil.rmtree(f)
except OSError as exc:
if exc.errno == errno.ENOTDIR:
os.remove(f)
else:
raise
@click.command()
@click.argument('old_molecule_file', required=True)
@click.option(
'--driver-name',
'-d',
type=click.Choice(['vagrant']),
default='vagrant',
help='Name of driver to migrate. (vagrant)')
def main(old_molecule_file, driver_name): # pragma: no cover
""" Migrate a Molecule v1 role to the v2 format. """
c = Convert(old_molecule_file, driver_name)
c.migrate()
if __name__ == '__main__':
main()
|
from homeassistant.components.binary_sensor import PLATFORM_SCHEMA, BinarySensorEntity
from . import edge_detect, read_input, setup_input, setup_mode
from .const import CONF_INVERT_LOGIC, CONF_PIN_MODE, CONF_PORTS, PORT_SCHEMA
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(PORT_SCHEMA)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Orange Pi GPIO platform."""
binary_sensors = []
invert_logic = config[CONF_INVERT_LOGIC]
pin_mode = config[CONF_PIN_MODE]
ports = config[CONF_PORTS]
setup_mode(pin_mode)
for port_num, port_name in ports.items():
binary_sensors.append(
OPiGPIOBinarySensor(hass, port_name, port_num, invert_logic)
)
async_add_entities(binary_sensors)
class OPiGPIOBinarySensor(BinarySensorEntity):
"""Represent a binary sensor that uses Orange Pi GPIO."""
def __init__(self, hass, name, port, invert_logic):
"""Initialize the Orange Pi binary sensor."""
self._name = name
self._port = port
self._invert_logic = invert_logic
self._state = None
async def async_added_to_hass(self):
"""Run when entity about to be added to hass."""
def gpio_edge_listener(port):
"""Update GPIO when edge change is detected."""
self.schedule_update_ha_state(True)
def setup_entity():
setup_input(self._port)
edge_detect(self._port, gpio_edge_listener)
self.schedule_update_ha_state(True)
await self.hass.async_add_executor_job(setup_entity)
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def is_on(self):
"""Return the state of the entity."""
return self._state != self._invert_logic
def update(self):
"""Update state with new GPIO data."""
self._state = read_input(self._port)
|
import mock
from scrapy.settings import Settings
from scrapy_redis import defaults
from scrapy_redis.connection import (
from_settings,
get_redis,
get_redis_from_settings,
)
class TestGetRedis(object):
def test_default_instance(self):
server = get_redis()
assert isinstance(server, defaults.REDIS_CLS)
def test_custom_class(self):
client_cls = mock.Mock()
server = get_redis(param='foo', redis_cls=client_cls)
assert server is client_cls.return_value
client_cls.assert_called_with(param='foo')
def test_from_url(self):
client_cls = mock.Mock()
url = 'redis://localhost'
server = get_redis(redis_cls=client_cls, url=url, param='foo')
assert server is client_cls.from_url.return_value
client_cls.from_url.assert_called_with(url, param='foo')
class TestFromSettings(object):
def setup(self):
self.redis_cls = mock.Mock()
self.expected_params = {
'timeout': 0,
'flag': False,
}
self.settings = Settings({
'REDIS_PARAMS': dict(self.expected_params, redis_cls=self.redis_cls),
})
def test_redis_cls_default(self):
server = from_settings(Settings())
assert isinstance(server, defaults.REDIS_CLS)
def test_redis_cls_custom_path(self):
self.settings['REDIS_PARAMS']['redis_cls'] = 'mock.Mock'
server = from_settings(self.settings)
assert isinstance(server, mock.Mock)
def test_default_params(self):
server = from_settings(self.settings)
assert server is self.redis_cls.return_value
self.redis_cls.assert_called_with(**dict(defaults.REDIS_PARAMS, **self.expected_params))
def test_override_default_params(self):
for key, val in defaults.REDIS_PARAMS.items():
self.expected_params[key] = self.settings['REDIS_PARAMS'][key] = object()
server = from_settings(self.settings)
assert server is self.redis_cls.return_value
self.redis_cls.assert_called_with(**self.expected_params)
def test_get_server_from_settings_alias():
assert from_settings is get_redis_from_settings
|
import sys
from lark import Lark, Transformer, v_args
json_grammar = r"""
?start: value
?value: object
| array
| string
| SIGNED_NUMBER -> number
| "true" -> true
| "false" -> false
| "null" -> null
array : "[" [value ("," value)*] "]"
object : "{" [pair ("," pair)*] "}"
pair : string ":" value
string : ESCAPED_STRING
%import common.ESCAPED_STRING
%import common.SIGNED_NUMBER
%import common.WS
%ignore WS
"""
class TreeToJson(Transformer):
@v_args(inline=True)
def string(self, s):
return s[1:-1].replace('\\"', '"')
array = list
pair = tuple
object = dict
number = v_args(inline=True)(float)
null = lambda self, _: None
true = lambda self, _: True
false = lambda self, _: False
### Create the JSON parser with Lark, using the LALR algorithm
json_parser = Lark(json_grammar, parser='lalr',
# Using the standard lexer isn't required, and isn't usually recommended.
# But, it's good enough for JSON, and it's slightly faster.
lexer='standard',
# Disabling propagate_positions and placeholders slightly improves speed
propagate_positions=False,
maybe_placeholders=False,
# Using an internal transformer is faster and more memory efficient
transformer=TreeToJson())
|
from datetime import timedelta
import functools as ft
import logging
import voluptuous as vol
from homeassistant.const import (
ATTR_CODE,
ATTR_CODE_FORMAT,
SERVICE_LOCK,
SERVICE_OPEN,
SERVICE_UNLOCK,
STATE_LOCKED,
STATE_UNLOCKED,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.config_validation import ( # noqa: F401
PLATFORM_SCHEMA,
PLATFORM_SCHEMA_BASE,
make_entity_service_schema,
)
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_component import EntityComponent
# mypy: allow-untyped-defs, no-check-untyped-defs
_LOGGER = logging.getLogger(__name__)
ATTR_CHANGED_BY = "changed_by"
DOMAIN = "lock"
SCAN_INTERVAL = timedelta(seconds=30)
ENTITY_ID_FORMAT = DOMAIN + ".{}"
MIN_TIME_BETWEEN_SCANS = timedelta(seconds=10)
LOCK_SERVICE_SCHEMA = make_entity_service_schema({vol.Optional(ATTR_CODE): cv.string})
# Bitfield of features supported by the lock entity
SUPPORT_OPEN = 1
PROP_TO_ATTR = {"changed_by": ATTR_CHANGED_BY, "code_format": ATTR_CODE_FORMAT}
async def async_setup(hass, config):
"""Track states and offer events for locks."""
component = hass.data[DOMAIN] = EntityComponent(
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
)
await component.async_setup(config)
component.async_register_entity_service(
SERVICE_UNLOCK, LOCK_SERVICE_SCHEMA, "async_unlock"
)
component.async_register_entity_service(
SERVICE_LOCK, LOCK_SERVICE_SCHEMA, "async_lock"
)
component.async_register_entity_service(
SERVICE_OPEN, LOCK_SERVICE_SCHEMA, "async_open"
)
return True
async def async_setup_entry(hass, entry):
"""Set up a config entry."""
return await hass.data[DOMAIN].async_setup_entry(entry)
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
return await hass.data[DOMAIN].async_unload_entry(entry)
class LockEntity(Entity):
"""Representation of a lock."""
@property
def changed_by(self):
"""Last change triggered by."""
return None
@property
def code_format(self):
"""Regex for code format or None if no code is required."""
return None
@property
def is_locked(self):
"""Return true if the lock is locked."""
return None
def lock(self, **kwargs):
"""Lock the lock."""
raise NotImplementedError()
async def async_lock(self, **kwargs):
"""Lock the lock."""
await self.hass.async_add_executor_job(ft.partial(self.lock, **kwargs))
def unlock(self, **kwargs):
"""Unlock the lock."""
raise NotImplementedError()
async def async_unlock(self, **kwargs):
"""Unlock the lock."""
await self.hass.async_add_executor_job(ft.partial(self.unlock, **kwargs))
def open(self, **kwargs):
"""Open the door latch."""
raise NotImplementedError()
async def async_open(self, **kwargs):
"""Open the door latch."""
await self.hass.async_add_executor_job(ft.partial(self.open, **kwargs))
@property
def state_attributes(self):
"""Return the state attributes."""
state_attr = {}
for prop, attr in PROP_TO_ATTR.items():
value = getattr(self, prop)
if value is not None:
state_attr[attr] = value
return state_attr
@property
def state(self):
"""Return the state."""
locked = self.is_locked
if locked is None:
return None
return STATE_LOCKED if locked else STATE_UNLOCKED
class LockDevice(LockEntity):
"""Representation of a lock (for backwards compatibility)."""
def __init_subclass__(cls, **kwargs):
"""Print deprecation warning."""
super().__init_subclass__(**kwargs)
_LOGGER.warning(
"LockDevice is deprecated, modify %s to extend LockEntity",
cls.__name__,
)
|
import pytest
from lemur.tests.vectors import INTERNAL_PRIVATE_KEY_A_STR, INTERNAL_CERTIFICATE_A_STR
def test_export_certificate_to_pkcs12(app):
from lemur.plugins.base import plugins
p = plugins.get("openssl-export")
options = [
{"name": "passphrase", "value": "test1234"},
{"name": "type", "value": "PKCS12 (.p12)"},
]
with pytest.raises(Exception):
p.export(INTERNAL_CERTIFICATE_A_STR, "", "", options)
raw = p.export(INTERNAL_CERTIFICATE_A_STR, "", INTERNAL_PRIVATE_KEY_A_STR, options)
assert raw != b""
|
import re
from homeassistant.components.aurora import binary_sensor as aurora
from tests.common import load_fixture
def test_setup_and_initial_state(hass, requests_mock):
"""Test that the component is created and initialized as expected."""
uri = re.compile(r"http://services\.swpc\.noaa\.gov/text/aurora-nowcast-map\.txt")
requests_mock.get(uri, text=load_fixture("aurora.txt"))
entities = []
def mock_add_entities(new_entities, update_before_add=False):
"""Mock add entities."""
if update_before_add:
for entity in new_entities:
entity.update()
for entity in new_entities:
entities.append(entity)
config = {"name": "Test", "forecast_threshold": 75}
aurora.setup_platform(hass, config, mock_add_entities)
aurora_component = entities[0]
assert len(entities) == 1
assert aurora_component.name == "Test"
assert aurora_component.device_state_attributes["visibility_level"] == "0"
assert aurora_component.device_state_attributes["message"] == "nothing's out"
assert not aurora_component.is_on
def test_custom_threshold_works(hass, requests_mock):
"""Test that the config can take a custom forecast threshold."""
uri = re.compile(r"http://services\.swpc\.noaa\.gov/text/aurora-nowcast-map\.txt")
requests_mock.get(uri, text=load_fixture("aurora.txt"))
entities = []
def mock_add_entities(new_entities, update_before_add=False):
"""Mock add entities."""
if update_before_add:
for entity in new_entities:
entity.update()
for entity in new_entities:
entities.append(entity)
config = {"name": "Test", "forecast_threshold": 1}
hass.config.longitude = 18.987
hass.config.latitude = 69.648
aurora.setup_platform(hass, config, mock_add_entities)
aurora_component = entities[0]
assert aurora_component.aurora_data.visibility_level == "16"
assert aurora_component.is_on
|
from weblate.checks.markup import (
BBCodeCheck,
MarkdownLinkCheck,
MarkdownRefLinkCheck,
MarkdownSyntaxCheck,
SafeHTMLCheck,
URLCheck,
XMLTagsCheck,
XMLValidityCheck,
)
from weblate.checks.tests.test_checks import CheckTestCase
from weblate.trans.models import Unit
class BBCodeCheckTest(CheckTestCase):
check = BBCodeCheck()
def setUp(self):
super().setUp()
self.test_good_matching = ("[a]string[/a]", "[a]string[/a]", "")
self.test_failure_1 = ("[a]string[/a]", "[b]string[/b]", "")
self.test_failure_2 = ("[a]string[/a]", "string", "")
self.test_highlight = ("", "[a]string[/a]", [(0, 3, "[a]"), (9, 13, "[/a]")])
class XMLValidityCheckTest(CheckTestCase):
check = XMLValidityCheck()
def setUp(self):
super().setUp()
self.test_good_matching = ("<a>string</a>", "<a>string</a>", "xml-text")
self.test_good_none = ("string", "string", "")
self.test_good_ignore = ("<http://weblate.org/>", "<http://weblate.org/>", "")
self.test_failure_1 = ("<a>string</a>", "<a>string</b>", "xml-text")
self.test_failure_2 = ("<a>string</a>", "<a>string", "")
self.test_failure_3 = ("<a>string</a>", "<b>string</a>", "xml-text")
def test_unicode(self):
self.do_test(False, ("<a>zkouška</a>", "<a>zkouška</a>", ""))
def test_not_well_formed(self):
self.do_test(
True, ("<emphasis>1st</emphasis>", "<emphasis>not</ emphasis>", "")
)
self.do_test(
True, ("<emphasis>2nd</emphasis>", "<emphasis>not< /emphasis>", "")
)
def test_root(self):
self.do_test(
False,
(
'<?xml version="1.0" encoding="UTF-8"?><b>test</b>',
'<?xml version="1.0" encoding="UTF-8"?><b>test</b>',
"",
),
)
self.do_test(
True,
(
'<?xml version="1.0" encoding="UTF-8"?><b>test</b>',
'<?xml version="1.0" encoding="UTF-8"?><b>test',
"",
),
)
def test_html(self):
self.do_test(False, ("This is<br>valid HTML", "Toto je<br>platne HTML", ""))
class XMLTagsCheckTest(CheckTestCase):
check = XMLTagsCheck()
def setUp(self):
super().setUp()
self.test_good_matching = ("<a>string</a>", "<a>string</a>", "")
self.test_failure_1 = ("<a>string</a>", "<b>string</b>", "")
self.test_failure_2 = ("<a>string</a>", "string", "")
self.test_highlight = (
"",
'<b><a href="foo<">bar©</a></b>',
[
(0, 3, "<b>"),
(3, 21, '<a href="foo<">'),
(30, 34, "</a>"),
(34, 38, "</b>"),
(24, 30, "©"),
],
)
def test_unicode(self):
self.do_test(False, ("<a>zkouška</a>", "<a>zkouška</a>", ""))
def test_attributes(self):
self.do_test(False, ('<a href="#">a</a>', '<a href="other">z</a>', ""))
self.do_test(
True, ('<a href="#">a</a>', '<a href="#" onclick="alert()">z</a>', "")
)
def test_root(self):
self.do_test(
False,
(
'<?xml version="1.0" encoding="UTF-8"?><b>test</b>',
'<?xml version="1.0" encoding="UTF-8"?><b>test</b>',
"",
),
)
self.do_test(
True,
(
'<?xml version="1.0" encoding="UTF-8"?><b>test</b>',
'<?xml version="1.0" encoding="UTF-8"?><a>test</a>',
"",
),
)
class MarkdownRefLinkCheckTest(CheckTestCase):
check = MarkdownRefLinkCheck()
def setUp(self):
super().setUp()
self.test_good_matching = ("[a][a1]", "[b][a1]", "md-text")
self.test_good_none = ("string", "string", "md-text")
self.test_good_flag = ("[a][a1]", "[b][a2]", "")
self.test_failure_1 = ("[a][a1]", "[b][a2]", "md-text")
class MarkdownLinkCheckTest(CheckTestCase):
check = MarkdownLinkCheck()
def setUp(self):
super().setUp()
self.test_good_matching = (
"[Use Weblate](https://weblate.org/)",
"[Použij Weblate](https://weblate.org/)",
"md-text",
)
self.test_good_none = ("string", "string", "md-text")
self.test_failure_1 = (
"[Use Weblate](https://weblate.org/)",
"[Použij Weblate]",
"md-text",
)
self.test_failure_2 = (
"[Use Weblate](https://weblate.org/)",
"[Použij Weblate] (https://weblate.org/)",
"md-text",
)
self.test_failure_3 = (
"[Use Weblate](../demo/)",
"[Použij Weblate](https://example.com/)",
"md-text",
)
def test_template(self):
self.do_test(
False,
(
"[translate]({{ site.baseurl }}/docs/Translation/) here",
"Die [übersetzen]({{ site.baseurl }}/docs/Translation/)",
"md-text",
),
)
def test_spacing(self):
self.do_test(
True,
(
"[My Home Page](http://example.com)",
"[Moje stránka] (http://example.com)",
"md-text",
),
)
def test_fixup(self):
unit = Unit(
source="[My Home Page](http://example.com)",
target="[Moje stránka] (http://example.com)",
)
self.assertEqual(self.check.get_fixup(unit), [(r"\] +\(", "](")])
unit = Unit(
source="[My Home Page](http://example.com)",
target="[Moje stránka]",
)
self.assertEqual(self.check.get_fixup(unit), None)
class MarkdownLinkCheckMultipleOrderIndependentLinksTest(CheckTestCase):
check = MarkdownLinkCheck()
def setUp(self):
super().setUp()
self.test_good_matching = (
"[Weblate](#weblate) has an [example]({{example}}) "
"for illustrating the useage of [Weblate](#weblate)",
"Ein [Beispiel]({{example}}) in [Webspät](#weblate) "
"illustriert die Verwendung von [Webspät](#weblate)",
"md-text",
)
self.test_failure_1 = (
"[Weblate](#weblate) has an [example]({{example}}) "
"for illustrating the useage of [Weblate](#weblate)",
"Ein [Beispiel]({{example}}) in [Webspät](#weblate) "
"illustriert die Verwendung von [Webspät](#Webspät)",
"md-text",
)
self.test_failure_2 = (
"[Weblate](#weblate) has an [example]({{example}}) "
"for illustrating the useage of [Weblate](#weblate)",
"Ein [Beispiel]({{example}}) in [Webspät](#weblate) "
"illustriert die Verwendung von Webspät",
"md-text",
)
class MarkdownSyntaxCheckTest(CheckTestCase):
check = MarkdownSyntaxCheck()
def setUp(self):
super().setUp()
self.test_good_matching = ("**string**", "**string**", "md-text")
self.test_good_none = ("string", "string", "md-text")
self.test_good_flag = ("**string**", "string", "")
self.test_failure_1 = ("**string**", "*string*", "md-text")
self.test_failure_2 = ("~~string~~", "*string*", "md-text")
self.test_failure_3 = ("_string_", "*string*", "md-text")
self.test_highlight = (
"md-text",
"**string** ~~strike~~ `code`",
[
(0, 2, "**"),
(8, 10, "**"),
(11, 13, "~~"),
(19, 21, "~~"),
(22, 23, "`"),
(27, 28, "`"),
],
)
class URLCheckTest(CheckTestCase):
check = URLCheck()
def setUp(self):
super().setUp()
url = "https://weblate.org/"
self.test_good_matching = (url, url, "url")
self.test_good_none = (url, url, "url")
self.test_good_flag = ("string", "string", "")
self.test_failure_1 = (url, "https:weblate.org/", "url")
self.test_failure_2 = (url, "weblate.org/", "url")
self.test_failure_3 = (url, "weblate", "url")
class SafeHTMLCheckTest(CheckTestCase):
check = SafeHTMLCheck()
def setUp(self):
super().setUp()
safe = '<a href="https://weblate.org/">link</a>'
self.test_good_matching = (safe, safe, "safe-html")
self.test_good_none = ("string", "string", "safe-html")
self.test_good_flag = ("string", "string", "")
self.test_failure_1 = (safe, '<a href="javascript:foo()">link</a>', "safe-html")
self.test_failure_2 = (safe, '<a href="#" onclick="x()">link</a>', "safe-html")
self.test_failure_3 = (safe, '<iframe src="xxx"></iframe>', "safe-html")
|
from homeassistant.components import sensor
from homeassistant.const import (
CONCENTRATION_PARTS_PER_MILLION,
DEGREE,
ELECTRICAL_CURRENT_AMPERE,
ENERGY_KILO_WATT_HOUR,
LIGHT_LUX,
PERCENTAGE,
POWER_WATT,
VOLT,
)
from .entity import (
BlockAttributeDescription,
ShellyBlockAttributeEntity,
async_setup_entry_attribute_entities,
temperature_unit,
)
SENSORS = {
("device", "battery"): BlockAttributeDescription(
name="Battery", unit=PERCENTAGE, device_class=sensor.DEVICE_CLASS_BATTERY
),
("device", "deviceTemp"): BlockAttributeDescription(
name="Device Temperature",
unit=temperature_unit,
value=lambda value: round(value, 1),
device_class=sensor.DEVICE_CLASS_TEMPERATURE,
default_enabled=False,
),
("emeter", "current"): BlockAttributeDescription(
name="Current",
unit=ELECTRICAL_CURRENT_AMPERE,
value=lambda value: value,
device_class=sensor.DEVICE_CLASS_CURRENT,
),
("light", "power"): BlockAttributeDescription(
name="Power",
unit=POWER_WATT,
value=lambda value: round(value, 1),
device_class=sensor.DEVICE_CLASS_POWER,
default_enabled=False,
),
("device", "power"): BlockAttributeDescription(
name="Power",
unit=POWER_WATT,
value=lambda value: round(value, 1),
device_class=sensor.DEVICE_CLASS_POWER,
),
("emeter", "power"): BlockAttributeDescription(
name="Power",
unit=POWER_WATT,
value=lambda value: round(value, 1),
device_class=sensor.DEVICE_CLASS_POWER,
),
("emeter", "voltage"): BlockAttributeDescription(
name="Voltage",
unit=VOLT,
value=lambda value: round(value, 1),
device_class=sensor.DEVICE_CLASS_VOLTAGE,
),
("emeter", "powerFactor"): BlockAttributeDescription(
name="Power Factor",
unit=PERCENTAGE,
value=lambda value: round(value * 100, 1),
device_class=sensor.DEVICE_CLASS_POWER_FACTOR,
),
("relay", "power"): BlockAttributeDescription(
name="Power",
unit=POWER_WATT,
value=lambda value: round(value, 1),
device_class=sensor.DEVICE_CLASS_POWER,
),
("roller", "rollerPower"): BlockAttributeDescription(
name="Power",
unit=POWER_WATT,
value=lambda value: round(value, 1),
device_class=sensor.DEVICE_CLASS_POWER,
),
("device", "energy"): BlockAttributeDescription(
name="Energy",
unit=ENERGY_KILO_WATT_HOUR,
value=lambda value: round(value / 60 / 1000, 2),
device_class=sensor.DEVICE_CLASS_ENERGY,
),
("emeter", "energy"): BlockAttributeDescription(
name="Energy",
unit=ENERGY_KILO_WATT_HOUR,
value=lambda value: round(value / 1000, 2),
device_class=sensor.DEVICE_CLASS_ENERGY,
),
("emeter", "energyReturned"): BlockAttributeDescription(
name="Energy Returned",
unit=ENERGY_KILO_WATT_HOUR,
value=lambda value: round(value / 1000, 2),
device_class=sensor.DEVICE_CLASS_ENERGY,
),
("light", "energy"): BlockAttributeDescription(
name="Energy",
unit=ENERGY_KILO_WATT_HOUR,
value=lambda value: round(value / 60 / 1000, 2),
device_class=sensor.DEVICE_CLASS_ENERGY,
default_enabled=False,
),
("relay", "energy"): BlockAttributeDescription(
name="Energy",
unit=ENERGY_KILO_WATT_HOUR,
value=lambda value: round(value / 60 / 1000, 2),
device_class=sensor.DEVICE_CLASS_ENERGY,
),
("roller", "rollerEnergy"): BlockAttributeDescription(
name="Energy",
unit=ENERGY_KILO_WATT_HOUR,
value=lambda value: round(value / 60 / 1000, 2),
device_class=sensor.DEVICE_CLASS_ENERGY,
),
("sensor", "concentration"): BlockAttributeDescription(
name="Gas Concentration",
unit=CONCENTRATION_PARTS_PER_MILLION,
value=lambda value: value,
# "sensorOp" is "normal" when the Shelly Gas is working properly and taking measurements.
available=lambda block: block.sensorOp == "normal",
),
("sensor", "extTemp"): BlockAttributeDescription(
name="Temperature",
unit=temperature_unit,
value=lambda value: round(value, 1),
device_class=sensor.DEVICE_CLASS_TEMPERATURE,
),
("sensor", "humidity"): BlockAttributeDescription(
name="Humidity",
unit=PERCENTAGE,
value=lambda value: round(value, 1),
device_class=sensor.DEVICE_CLASS_HUMIDITY,
),
("sensor", "luminosity"): BlockAttributeDescription(
name="Luminosity",
unit=LIGHT_LUX,
device_class=sensor.DEVICE_CLASS_ILLUMINANCE,
),
("sensor", "tilt"): BlockAttributeDescription(name="tilt", unit=DEGREE),
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up sensors for device."""
await async_setup_entry_attribute_entities(
hass, config_entry, async_add_entities, SENSORS, ShellySensor
)
class ShellySensor(ShellyBlockAttributeEntity):
"""Represent a shelly sensor."""
@property
def state(self):
"""Return value of sensor."""
return self.attribute_value
|
from __future__ import absolute_import, division, print_function
import numpy.random as random
from numpy.random import randn
import numpy as np
import matplotlib.pyplot as plt
from pytest import approx
from filterpy.kalman import KalmanFilter, update, predict, batch_filter
from filterpy.common import Q_discrete_white_noise, kinematic_kf, Saver
from scipy.linalg import block_diag, norm
from scipy.spatial.distance import mahalanobis as scipy_mahalanobis
DO_PLOT = False
class PosSensor1(object):
def __init__(self, pos=(0, 0), vel=(0, 0), noise_std=1.):
self.vel = vel
self.noise_std = noise_std
self.pos = [pos[0], pos[1]]
def read(self):
self.pos[0] += self.vel[0]
self.pos[1] += self.vel[1]
return [self.pos[0] + randn() * self.noise_std,
self.pos[1] + randn() * self.noise_std]
def const_vel_filter(dt, x0=0, x_ndim=1, P_diag=(1., 1.), R_std=1.,
Q_var=0.0001):
""" helper, constructs 1d, constant velocity filter"""
f = KalmanFilter(dim_x=2, dim_z=1)
if x_ndim == 1:
f.x = np.array([x0, 0.])
else:
f.x = np.array([[x0, 0.]]).T
f.F = np.array([[1., dt],
[0., 1.]])
f.H = np.array([[1., 0.]])
f.P = np.diag(P_diag)
f.R = np.eye(1) * (R_std**2)
f.Q = Q_discrete_white_noise(2, dt, Q_var)
return f
def const_vel_filter_2d(dt, x_ndim=1, P_diag=(1., 1, 1, 1), R_std=1.,
Q_var=0.0001):
""" helper, constructs 1d, constant velocity filter"""
kf = KalmanFilter(dim_x=4, dim_z=2)
kf.x = np.array([[0., 0., 0., 0.]]).T
kf.P *= np.diag(P_diag)
kf.F = np.array([[1., dt, 0., 0.],
[0., 1., 0., 0.],
[0., 0., 1., dt],
[0., 0., 0., 1.]])
kf.H = np.array([[1., 0, 0, 0],
[0., 0, 1, 0]])
kf.R *= np.eye(2) * (R_std**2)
q = Q_discrete_white_noise(dim=2, dt=dt, var=Q_var)
kf.Q = block_diag(q, q)
return kf
def test_noisy_1d():
f = KalmanFilter(dim_x=2, dim_z=1)
f.x = np.array([[2.],
[0.]]) # initial state (location and velocity)
f.F = np.array([[1., 1.],
[0., 1.]]) # state transition matrix
f.H = np.array([[1., 0.]]) # Measurement function
f.P *= 1000. # covariance matrix
f.R = 5 # state uncertainty
f.Q = 0.0001 # process uncertainty
measurements = []
results = []
zs = []
for t in range(100):
# create measurement = t plus white noise
z = t + random.randn()*20
zs.append(z)
# perform kalman filtering
f.update(z)
f.predict()
# save data
results.append(f.x[0, 0])
measurements.append(z)
# test mahalanobis
a = np.zeros(f.y.shape)
maha = scipy_mahalanobis(a, f.y, f.SI)
assert f.mahalanobis == approx(maha)
# now do a batch run with the stored z values so we can test that
# it is working the same as the recursive implementation.
# give slightly different P so result is slightly different
f.x = np.array([[2., 0]]).T
f.P = np.eye(2) * 100.
s = Saver(f)
m, c, _, _ = f.batch_filter(zs, update_first=False, saver=s)
s.to_array()
assert len(s.x) == len(zs)
assert len(s.x) == len(s)
# plot data
if DO_PLOT:
p1, = plt.plot(measurements, 'r', alpha=0.5)
p2, = plt.plot(results, 'b')
p4, = plt.plot(m[:, 0], 'm')
p3, = plt.plot([0, 100], [0, 100], 'g') # perfect result
plt.legend([p1, p2, p3, p4],
["noisy measurement", "KF output", "ideal", "batch"], loc=4)
plt.show()
def test_1d_vel():
from scipy.linalg import inv
from numpy import dot
global ks
dt = 1.
std_z = 0.0001
x = np.array([[0.], [0.]])
F = np.array([[1., dt],
[0., 1.]])
H = np.array([[1., 0.]])
P = np.eye(2)
R = np.eye(1)*std_z**2
Q = np.eye(2)*0.001
measurements = []
xest = []
ks = []
pos = 0.
for t in range(20):
z = pos + random.randn() * std_z
pos += 100
# perform kalman filtering
x = dot(F, x)
P = dot(dot(F, P), F.T) + Q
P2 = P.copy()
P2[0, 1] = 0 # force there to be no correlation
P2[1, 0] = 0
S = dot(dot(H, P2), H.T) + R
K = dot(dot(P2, H.T), inv(S))
y = z - dot(H, x)
x = x + dot(K, y)
# save data
xest.append(x.copy())
measurements.append(z)
ks.append(K.copy())
xest = np.array(xest)
ks = np.array(ks)
# plot data
if DO_PLOT:
plt.subplot(121)
plt.plot(xest[:, 1])
plt.subplot(122)
plt.plot(ks[:, 1])
plt.show()
def test_noisy_11d():
f = KalmanFilter(dim_x=2, dim_z=1)
f.x = np.array([2., 0]) # initial state (location and velocity)
f.F = np.array([[1., 1.],
[0., 1.]]) # state transition matrix
f.H = np.array([[1., 0.]]) # Measurement function
f.P *= 1000. # covariance matrix
f.R = 5 # state uncertainty
f.Q = 0.0001 # process uncertainty
measurements = []
results = []
zs = []
for t in range(100):
# create measurement = t plus white noise
z = t + random.randn()*20
zs.append(z)
# perform kalman filtering
f.update(z)
f.predict()
# save data
results.append(f.x[0])
measurements.append(z)
# test mahalanobis
a = np.zeros(f.y.shape)
maha = scipy_mahalanobis(a, f.y, f.SI)
assert f.mahalanobis == approx(maha)
# now do a batch run with the stored z values so we can test that
# it is working the same as the recursive implementation.
# give slightly different P so result is slightly different
f.x = np.array([[2., 0]]).T
f.P = np.eye(2) * 100.
m, c, _, _ = f.batch_filter(zs, update_first=False)
# plot data
if DO_PLOT:
p1, = plt.plot(measurements, 'r', alpha=0.5)
p2, = plt.plot(results, 'b')
p4, = plt.plot(m[:, 0], 'm')
p3, = plt.plot([0, 100], [0, 100], 'g') # perfect result
plt.legend([p1, p2, p3, p4],
["noisy measurement", "KF output", "ideal", "batch"], loc=4)
plt.show()
def test_batch_filter():
f = KalmanFilter(dim_x=2, dim_z=1)
f.x = np.array([2., 0]) # initial state (location and velocity)
f.F = np.array([[1., 1.],
[0., 1.]]) # state transition matrix
f.H = np.array([[1., 0.]]) # Measurement function
f.P *= 1000. # covariance matrix
f.R = 5 # state uncertainty
f.Q = 0.0001 # process uncertainty
zs = [None, 1., 2.]
m, c, _, _ = f.batch_filter(zs, update_first=False)
m, c, _, _ = f.batch_filter(zs, update_first=True)
def test_univariate():
f = KalmanFilter(dim_x=1, dim_z=1, dim_u=1)
f.x = np.array([[0]])
f.P *= 50
f.H = np.array([[1.]])
f.F = np.array([[1.]])
f.B = np.array([[1.]])
f.Q = .02
f.R *= .1
for i in range(50):
f.predict()
f.update(i)
def test_procedure_form():
dt = 1.
std_z = 10.1
x = np.array([[0.], [0.]])
F = np.array([[1., dt], [0., 1.]])
H = np.array([[1., 0.]])
P = np.eye(2)
R = np.eye(1)*std_z**2
Q = Q_discrete_white_noise(2, dt, 5.1)
kf = KalmanFilter(2, 1)
kf.x = x.copy()
kf.F = F.copy()
kf.H = H.copy()
kf.P = P.copy()
kf.R = R.copy()
kf.Q = Q.copy()
measurements = []
xest = []
pos = 0.
for t in range(2000):
z = pos + random.randn() * std_z
pos += 100
# perform kalman filtering
x, P = predict(x, P, F, Q)
kf.predict()
assert norm(x - kf.x) < 1.e-12
x, P, _, _, _, _ = update(x, P, z, R, H, True)
kf.update(z)
assert norm(x - kf.x) < 1.e-12
# save data
xest.append(x.copy())
measurements.append(z)
xest = np.asarray(xest)
measurements = np.asarray(measurements)
# plot data
if DO_PLOT:
plt.plot(xest[:, 0])
plt.plot(xest[:, 1])
plt.plot(measurements)
def test_steadystate():
dim = 7
cv = kinematic_kf(dim=dim, order=5)
print(cv)
cv.x[1] = 1.0
for i in range(100):
cv.predict()
cv.update([i])
for i in range(100):
cv.predict_steadystate()
cv.update_steadystate([i])
# test mahalanobis
a = np.zeros(cv.y.shape)
maha = scipy_mahalanobis(a, cv.y, cv.SI)
assert cv.mahalanobis == approx(maha)
def test_procedural_batch_filter():
f = KalmanFilter(dim_x=2, dim_z=1)
f.x = np.array([2., 0])
f.F = np.array([[1., 1.],
[0., 1.]])
f.H = np.array([[1., 0.]])
f.P = np.eye(2) * 1000.
f.R = np.eye(1) * 5
f.Q = Q_discrete_white_noise(2, 1., 0.0001)
f.test_matrix_dimensions()
x = np.array([2., 0])
F = np.array([[1., 1.],
[0., 1.]])
H = np.array([[1., 0.]])
P = np.eye(2) * 1000.
R = np.eye(1) * 5
Q = Q_discrete_white_noise(2, 1., 0.0001)
zs = [13., None, 1., 2.] * 10
m, c, _, _ = f.batch_filter(zs, update_first=False)
n = len(zs)
mp, cp, _, _ = batch_filter(x, P, zs, [F]*n, [Q]*n, [H]*n, [R]*n)
for x1, x2 in zip(m, mp):
assert np.allclose(x1, x2)
for p1, p2 in zip(c, cp):
assert np.allclose(p1, p2)
def proc_form():
""" This is for me to run against the class_form() function to see which,
if either, runs faster. They within a few ms of each other on my machine
with Python 3.5.1"""
dt = 1.
std_z = 10.1
x = np.array([[0.], [0.]])
F = np.array([[1., dt], [0., 1.]])
H = np.array([[1., 0.]])
P = np.eye(2)
R = np.eye(1)*std_z**2
Q = Q_discrete_white_noise(2, dt, 5.1)
pos = 0.
for t in range(2000):
z = pos + random.randn() * std_z
pos += 100
# perform kalman filtering
x, P = predict(x, P, F, Q)
x, P, _ = update(z, R, x, P, H)
def class_form():
dt = 1.
std_z = 10.1
f = const_vel_filter(dt, x0=2, R_std=std_z, Q_std=5.1)
pos = 0.
for t in range(2000):
z = pos + random.randn() * std_z
pos += 100
f.predict()
f.update(z)
def test_z_dim():
f = const_vel_filter(1.0, x0=2, R_std=1., Q_var=5.1)
f.test_matrix_dimensions()
f.update(3.)
assert f.x.shape == (2,)
f.update([3])
assert f.x.shape == (2,)
f.update(np.array([[3]]))
assert f.x.shape == (2,)
try:
f.update(np.array([[[3]]]))
assert False, "filter should have asserted that [[[3]]] is not a valid form for z"
except:
pass
f = const_vel_filter_2d(1.0, R_std=1., Q_var=5.1)
try:
f.update(3)
assert False, "filter should have asserted that 3 is not a valid form for z"
except:
pass
try:
f.update([3])
assert False, "filter should have asserted that [3] is not a valid form for z"
except:
pass
try:
f.update([3, 3])
assert False, "filter should have asserted that [3] is not a valid form for z"
except:
pass
try:
f.update([[3, 3]])
assert False, "filter should have asserted that [3] is not a valid form for z"
except:
pass
f = const_vel_filter_2d(1.0, R_std=1., Q_var=5.1)
f.update([[3], [3]])
f.update(np.array([[3], [3]]))
# now make sure test_matrix_dimensions() is working
f.test_matrix_dimensions()
try:
f.H = 3
f.test_matrix_dimensions()
assert False, "test_matrix_dimensions should have asserted on shape of H"
except:
pass
f = const_vel_filter_2d(1.0, R_std=1., Q_var=5.1)
try:
f.R = 3
f.test_matrix_dimensions()
assert False, "test_matrix_dimensions should have asserted on shape of R"
except:
pass
try:
f.R = [3]
f.test_matrix_dimensions()
assert False, "test_matrix_dimensions should have asserted on shape of R"
except:
pass
try:
f.R = [3, 4.]
f.test_matrix_dimensions()
assert False, "test_matrix_dimensions should have asserted on shape of R"
except:
pass
f.R = np.diag([3, 4.])
f.test_matrix_dimensions()
f = const_vel_filter(1.0, x0=2, R_std=1., Q_var=5.1)
#test case where x is 1d array
f.update([[3]])
f.test_matrix_dimensions(z=3.)
f.test_matrix_dimensions(z=[3.])
# test case whre x is 2d array
f.x = np.array([[0., 0.]]).T
f.update([[3]])
f.test_matrix_dimensions(z=3.)
f.test_matrix_dimensions(z=[3.])
try:
f.test_matrix_dimensions(z=[[3.]])
assert False, "test_matrix_dimensions should have asserted on shape of z"
except:
pass
f = const_vel_filter_2d(1.0, R_std=1., Q_var=5.1)
# test for 1D value for x, then set to a 2D vector and try again
for i in range(2):
try:
f.test_matrix_dimensions(z=3.)
assert False, "test_matrix_dimensions should have asserted on shape of z"
except:
pass
try:
f.test_matrix_dimensions(z=[3.])
assert False, "test_matrix_dimensions should have asserted on shape of z"
except:
pass
try:
f.test_matrix_dimensions(z=[3., 3.])
assert False, "test_matrix_dimensions should have asserted on shape of z"
except:
pass
f.test_matrix_dimensions(z=[[3.], [3.]])
f.x = np.array([[1, 2, 3, 4.]]).T
def test_default_dims():
kf = KalmanFilter(dim_x=3, dim_z=1)
kf.predict()
kf.update(np.array([[1.]]).T)
def test_functions():
x, P = predict(x=10., P=3., u=1., Q=2.**2)
x, P = update(x=x, P=P, z=12., R=3.5**2)
x, P = predict(x=np.array([10.]), P=np.array([[3.]]), Q=2.**2)
x, P = update(x=x, P=P, z=12., H=np.array([[1.]]), R=np.array([[3.5**2]]))
x = np.array([1., 0])
P = np.diag([1., 1])
Q = np.diag([0., 0])
H = np.array([[1., 0]])
x, P = predict(x=x, P=P, Q=Q)
assert x.shape == (2,)
assert P.shape == (2, 2)
x, P = update(x, P, z=[1], R=np.array([[1.]]), H=H)
assert x[0] == 1 and x[1] == 0
# test velocity predictions
x, P = predict(x=x, P=P, Q=Q)
assert x[0] == 1 and x[1] == 0
x[1] = 1.
F = np.array([[1., 1], [0, 1]])
x, P = predict(x=x, F=F, P=P, Q=Q)
assert x[0] == 2 and x[1] == 1
x, P = predict(x=x, F=F, P=P, Q=Q)
assert x[0] == 3 and x[1] == 1
def test_z_checks():
kf = KalmanFilter(dim_x=3, dim_z=1)
kf.update(3.)
kf.update([3])
kf.update((3))
kf.update([[3]])
kf.update(np.array([[3]]))
try:
kf.update([[3, 3]])
assert False, "accepted bad z shape"
except ValueError:
pass
kf = KalmanFilter(dim_x=3, dim_z=2)
kf.update([3, 4])
kf.update([[3, 4]])
kf.update(np.array([[3, 4]]))
kf.update(np.array([[3, 4]]).T)
def test_update_correlated():
f = const_vel_filter(1.0, x0=2, R_std=1., Q_var=5.1)
f.M = np.array([[1], [0]])
for i in range(10):
f.predict()
f.update_correlated(3.)
if __name__ == "__main__":
DO_PLOT = False
test_steadystate()
test_functions()
test_default_dims()
test_z_checks()
test_z_dim()
test_batch_filter()
test_procedural_batch_filter()
test_univariate()
test_noisy_1d()
test_noisy_11d()
|
import asyncio
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.const import ATTR_ENTITY_ID, ATTR_NAME, CONF_ENTITY_ID, CONF_NAME
from homeassistant.core import callback
from homeassistant.exceptions import HomeAssistantError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.config_validation import make_entity_service_schema
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.util.async_ import run_callback_threadsafe
# mypy: allow-untyped-defs, no-check-untyped-defs
_LOGGER = logging.getLogger(__name__)
DOMAIN = "image_processing"
SCAN_INTERVAL = timedelta(seconds=10)
DEVICE_CLASSES = [
"alpr", # Automatic license plate recognition
"face", # Face
"ocr", # OCR
]
SERVICE_SCAN = "scan"
EVENT_DETECT_FACE = "image_processing.detect_face"
ATTR_AGE = "age"
ATTR_CONFIDENCE = "confidence"
ATTR_FACES = "faces"
ATTR_GENDER = "gender"
ATTR_GLASSES = "glasses"
ATTR_MOTION = "motion"
ATTR_TOTAL_FACES = "total_faces"
CONF_SOURCE = "source"
CONF_CONFIDENCE = "confidence"
DEFAULT_TIMEOUT = 10
DEFAULT_CONFIDENCE = 80
SOURCE_SCHEMA = vol.Schema(
{
vol.Required(CONF_ENTITY_ID): cv.entity_domain("camera"),
vol.Optional(CONF_NAME): cv.string,
}
)
PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_SOURCE): vol.All(cv.ensure_list, [SOURCE_SCHEMA]),
vol.Optional(CONF_CONFIDENCE, default=DEFAULT_CONFIDENCE): vol.All(
vol.Coerce(float), vol.Range(min=0, max=100)
),
}
)
PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE.extend(PLATFORM_SCHEMA.schema)
async def async_setup(hass, config):
"""Set up the image processing."""
component = EntityComponent(_LOGGER, DOMAIN, hass, SCAN_INTERVAL)
await component.async_setup(config)
async def async_scan_service(service):
"""Service handler for scan."""
image_entities = await component.async_extract_from_service(service)
update_tasks = []
for entity in image_entities:
entity.async_set_context(service.context)
update_tasks.append(entity.async_update_ha_state(True))
if update_tasks:
await asyncio.wait(update_tasks)
hass.services.async_register(
DOMAIN, SERVICE_SCAN, async_scan_service, schema=make_entity_service_schema({})
)
return True
class ImageProcessingEntity(Entity):
"""Base entity class for image processing."""
timeout = DEFAULT_TIMEOUT
@property
def camera_entity(self):
"""Return camera entity id from process pictures."""
return None
@property
def confidence(self):
"""Return minimum confidence for do some things."""
return None
def process_image(self, image):
"""Process image."""
raise NotImplementedError()
async def async_process_image(self, image):
"""Process image."""
return await self.hass.async_add_executor_job(self.process_image, image)
async def async_update(self):
"""Update image and process it.
This method is a coroutine.
"""
camera = self.hass.components.camera
image = None
try:
image = await camera.async_get_image(
self.camera_entity, timeout=self.timeout
)
except HomeAssistantError as err:
_LOGGER.error("Error on receive image from entity: %s", err)
return
# process image data
await self.async_process_image(image.content)
class ImageProcessingFaceEntity(ImageProcessingEntity):
"""Base entity class for face image processing."""
def __init__(self):
"""Initialize base face identify/verify entity."""
self.faces = []
self.total_faces = 0
@property
def state(self):
"""Return the state of the entity."""
confidence = 0
state = None
# No confidence support
if not self.confidence:
return self.total_faces
# Search high confidence
for face in self.faces:
if ATTR_CONFIDENCE not in face:
continue
f_co = face[ATTR_CONFIDENCE]
if f_co > confidence:
confidence = f_co
for attr in [ATTR_NAME, ATTR_MOTION]:
if attr in face:
state = face[attr]
break
return state
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return "face"
@property
def state_attributes(self):
"""Return device specific state attributes."""
return {ATTR_FACES: self.faces, ATTR_TOTAL_FACES: self.total_faces}
def process_faces(self, faces, total):
"""Send event with detected faces and store data."""
run_callback_threadsafe(
self.hass.loop, self.async_process_faces, faces, total
).result()
@callback
def async_process_faces(self, faces, total):
"""Send event with detected faces and store data.
known are a dict in follow format:
[
{
ATTR_CONFIDENCE: 80,
ATTR_NAME: 'Name',
ATTR_AGE: 12.0,
ATTR_GENDER: 'man',
ATTR_MOTION: 'smile',
ATTR_GLASSES: 'sunglasses'
},
]
This method must be run in the event loop.
"""
# Send events
for face in faces:
if ATTR_CONFIDENCE in face and self.confidence:
if face[ATTR_CONFIDENCE] < self.confidence:
continue
face.update({ATTR_ENTITY_ID: self.entity_id})
self.hass.async_add_job(self.hass.bus.async_fire, EVENT_DETECT_FACE, face)
# Update entity store
self.faces = faces
self.total_faces = total
|
from typing import Callable, TypeVar
CALLABLE_T = TypeVar("CALLABLE_T", bound=Callable) # pylint: disable=invalid-name
class Registry(dict):
"""Registry of items."""
def register(self, name: str) -> Callable[[CALLABLE_T], CALLABLE_T]:
"""Return decorator to register item with a specific name."""
def decorator(func: CALLABLE_T) -> CALLABLE_T:
"""Register decorated function."""
self[name] = func
return func
return decorator
|
from homeassistant.exceptions import HomeAssistantError
class CertExpiryException(HomeAssistantError):
"""Base class for cert_expiry exceptions."""
class TemporaryFailure(CertExpiryException):
"""Temporary failure has occurred."""
class ValidationFailure(CertExpiryException):
"""Certificate validation failure has occurred."""
class ResolveFailed(TemporaryFailure):
"""Name resolution failed."""
class ConnectionTimeout(TemporaryFailure):
"""Network connection timed out."""
class ConnectionRefused(TemporaryFailure):
"""Network connection refused."""
|
import logging
from typing import Optional
from homeassistant.components.geo_location import GeolocationEvent
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_UNIT_SYSTEM_IMPERIAL,
LENGTH_KILOMETERS,
LENGTH_MILES,
)
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_registry import async_get_registry
from homeassistant.util.unit_system import IMPERIAL_SYSTEM
from .const import DEFAULT_ICON, DOMAIN, FEED
_LOGGER = logging.getLogger(__name__)
ATTR_ALERT_LEVEL = "alert_level"
ATTR_COUNTRY = "country"
ATTR_DESCRIPTION = "description"
ATTR_DURATION_IN_WEEK = "duration_in_week"
ATTR_EVENT_TYPE = "event_type"
ATTR_EXTERNAL_ID = "external_id"
ATTR_FROM_DATE = "from_date"
ATTR_POPULATION = "population"
ATTR_SEVERITY = "severity"
ATTR_TO_DATE = "to_date"
ATTR_VULNERABILITY = "vulnerability"
ICONS = {
"DR": "mdi:water-off",
"EQ": "mdi:pulse",
"FL": "mdi:home-flood",
"TC": "mdi:weather-hurricane",
"TS": "mdi:waves",
"VO": "mdi:image-filter-hdr",
}
# An update of this entity is not making a web request, but uses internal data only.
PARALLEL_UPDATES = 0
SOURCE = "gdacs"
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up the GDACS Feed platform."""
manager = hass.data[DOMAIN][FEED][entry.entry_id]
@callback
def async_add_geolocation(feed_manager, integration_id, external_id):
"""Add gelocation entity from feed."""
new_entity = GdacsEvent(feed_manager, integration_id, external_id)
_LOGGER.debug("Adding geolocation %s", new_entity)
async_add_entities([new_entity], True)
manager.listeners.append(
async_dispatcher_connect(
hass, manager.async_event_new_entity(), async_add_geolocation
)
)
# Do not wait for update here so that the setup can be completed and because an
# update will fetch data from the feed via HTTP and then process that data.
hass.async_create_task(manager.async_update())
_LOGGER.debug("Geolocation setup done")
class GdacsEvent(GeolocationEvent):
"""This represents an external event with GDACS feed data."""
def __init__(self, feed_manager, integration_id, external_id):
"""Initialize entity with data from feed entry."""
self._feed_manager = feed_manager
self._integration_id = integration_id
self._external_id = external_id
self._title = None
self._distance = None
self._latitude = None
self._longitude = None
self._attribution = None
self._alert_level = None
self._country = None
self._description = None
self._duration_in_week = None
self._event_type_short = None
self._event_type = None
self._from_date = None
self._to_date = None
self._population = None
self._severity = None
self._vulnerability = None
self._version = None
self._remove_signal_delete = None
self._remove_signal_update = None
async def async_added_to_hass(self):
"""Call when entity is added to hass."""
self._remove_signal_delete = async_dispatcher_connect(
self.hass, f"gdacs_delete_{self._external_id}", self._delete_callback
)
self._remove_signal_update = async_dispatcher_connect(
self.hass, f"gdacs_update_{self._external_id}", self._update_callback
)
async def async_will_remove_from_hass(self) -> None:
"""Call when entity will be removed from hass."""
self._remove_signal_delete()
self._remove_signal_update()
# Remove from entity registry.
entity_registry = await async_get_registry(self.hass)
if self.entity_id in entity_registry.entities:
entity_registry.async_remove(self.entity_id)
@callback
def _delete_callback(self):
"""Remove this entity."""
self.hass.async_create_task(self.async_remove())
@callback
def _update_callback(self):
"""Call update method."""
self.async_schedule_update_ha_state(True)
@property
def should_poll(self):
"""No polling needed for GDACS feed location events."""
return False
async def async_update(self):
"""Update this entity from the data held in the feed manager."""
_LOGGER.debug("Updating %s", self._external_id)
feed_entry = self._feed_manager.get_entry(self._external_id)
if feed_entry:
self._update_from_feed(feed_entry)
def _update_from_feed(self, feed_entry):
"""Update the internal state from the provided feed entry."""
event_name = feed_entry.event_name
if not event_name:
# Earthquakes usually don't have an event name.
event_name = f"{feed_entry.country} ({feed_entry.event_id})"
self._title = f"{feed_entry.event_type}: {event_name}"
# Convert distance if not metric system.
if self.hass.config.units.name == CONF_UNIT_SYSTEM_IMPERIAL:
self._distance = IMPERIAL_SYSTEM.length(
feed_entry.distance_to_home, LENGTH_KILOMETERS
)
else:
self._distance = feed_entry.distance_to_home
self._latitude = feed_entry.coordinates[0]
self._longitude = feed_entry.coordinates[1]
self._attribution = feed_entry.attribution
self._alert_level = feed_entry.alert_level
self._country = feed_entry.country
self._description = feed_entry.title
self._duration_in_week = feed_entry.duration_in_week
self._event_type_short = feed_entry.event_type_short
self._event_type = feed_entry.event_type
self._from_date = feed_entry.from_date
self._to_date = feed_entry.to_date
self._population = feed_entry.population
self._severity = feed_entry.severity
self._vulnerability = feed_entry.vulnerability
# Round vulnerability value if presented as float.
if isinstance(self._vulnerability, float):
self._vulnerability = round(self._vulnerability, 1)
self._version = feed_entry.version
@property
def unique_id(self) -> Optional[str]:
"""Return a unique ID containing latitude/longitude and external id."""
return f"{self._integration_id}_{self._external_id}"
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
if self._event_type_short and self._event_type_short in ICONS:
return ICONS[self._event_type_short]
return DEFAULT_ICON
@property
def source(self) -> str:
"""Return source value of this external event."""
return SOURCE
@property
def name(self) -> Optional[str]:
"""Return the name of the entity."""
return self._title
@property
def distance(self) -> Optional[float]:
"""Return distance value of this external event."""
return self._distance
@property
def latitude(self) -> Optional[float]:
"""Return latitude value of this external event."""
return self._latitude
@property
def longitude(self) -> Optional[float]:
"""Return longitude value of this external event."""
return self._longitude
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
if self.hass.config.units.name == CONF_UNIT_SYSTEM_IMPERIAL:
return LENGTH_MILES
return LENGTH_KILOMETERS
@property
def device_state_attributes(self):
"""Return the device state attributes."""
attributes = {}
for key, value in (
(ATTR_EXTERNAL_ID, self._external_id),
(ATTR_DESCRIPTION, self._description),
(ATTR_ATTRIBUTION, self._attribution),
(ATTR_EVENT_TYPE, self._event_type),
(ATTR_ALERT_LEVEL, self._alert_level),
(ATTR_COUNTRY, self._country),
(ATTR_DURATION_IN_WEEK, self._duration_in_week),
(ATTR_FROM_DATE, self._from_date),
(ATTR_TO_DATE, self._to_date),
(ATTR_POPULATION, self._population),
(ATTR_SEVERITY, self._severity),
(ATTR_VULNERABILITY, self._vulnerability),
):
if value or isinstance(value, bool):
attributes[key] = value
return attributes
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from diamond.collector import Collector
from xfs import XFSCollector
################################################################################
class TestXFSCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('XFSCollector', {
'interval': 1
})
self.collector = XFSCollector(config, None)
def test_import(self):
self.assertTrue(XFSCollector)
@patch('__builtin__.open')
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_open_proc_stat(self, publish_mock, open_mock):
open_mock.return_value = StringIO('')
self.collector.collect()
open_mock.assert_called_once_with('/proc/fs/xfs/stat')
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
XFSCollector.PROC = self.getFixturePath('proc_fs_xfs_stat-1')
self.collector.collect()
self.assertPublishedMany(publish_mock, {})
XFSCollector.PROC = self.getFixturePath('proc_fs_xfs_stat-2')
self.collector.collect()
metrics = {
'extent_alloc.alloc_extent': 58,
'extent_alloc.alloc_block': 928,
'extent_alloc.free_extent': 116,
'extent_alloc.free_block': 928,
'abt.lookup': 0,
'abt.compare': 0,
'abt.insrec': 0,
'abt.delrec': 0,
'blk_map.read_ops': 124647,
'blk_map.write_ops': 116,
'blk_map.unmap': 116,
'blk_map.add_exlist': 58,
'blk_map.del_exlist': 116,
'blk_map.look_exlist': 124879,
'blk_map.cmp_exlist': 0,
'bmbt.lookup': 0,
'bmbt.compare': 0,
'bmbt.insrec': 0,
'bmbt.delrec': 0,
'dir.lookup': 49652,
'dir.create': 58,
'dir.remove': 58,
'dir.getdents': 334948,
'trans.sync': 7,
'trans.async': 586,
'trans.empty': 0,
'ig.ig_attempts': 0,
'ig.ig_found': 13142,
'ig.ig_frecycle': 0,
'ig.ig_missed': 34759,
'ig.ig_dup': 0,
'ig.ig_reclaims': 110424,
'ig.ig_attrchg': 0,
'log.writes': 65,
'log.blocks': 8320,
'log.noiclogs': 0,
'log.force': 681,
'log.force_sleep': 65,
'push_ail.try_logspace': 13403,
'push_ail.sleep_logspace': 0,
'push_ail.pushes': 2521,
'push_ail.success': 41,
'push_ail.pushbuf': 144,
'push_ail.pinned': 0,
'push_ail.locked': 0,
'push_ail.flushing': 0,
'push_ail.restarts': 0,
'push_ail.flush': 0,
'xstrat.quick': 58,
'xstrat.split': 0,
'rw.write_calls': 584,
'rw.read_calls': 1909917,
'attr.get': 54995,
'attr.set': 0,
'attr.remove': 0,
'attr.list': 0,
'icluster.iflush_count': 49,
'icluster.icluster_flushcnt': 16,
'icluster.icluster_flushinode': 16,
'vnodes.vn_active': 0,
'vnodes.vn_alloc': 0,
'vnodes.vn_get': 0,
'vnodes.vn_hold': 0,
'vnodes.vn_rele': 110462,
'vnodes.vn_reclaim': 110462,
'vnodes.vn_remove': 110462,
'vnodes.vn_free': 0,
'buf.xb_get': 39013,
'buf.xb_create': 6671,
'buf.xb_get_locked': 32391,
'buf.xb_get_locked_waited': 0,
'buf.xb_busy_locked': 0,
'buf.xb_miss_locked': 6671,
'buf.xb_page_retries': 0,
'buf.xb_page_found': 13217,
'buf.xb_get_read': 6671,
'abtb2.xs_abtb_2_lookup': 203,
'abtb2.xs_abtb_2_compare': 1876,
'abtb2.xs_abtb_2_insrec': 47,
'abtb2.xs_abtb_2_delrec': 47,
'abtb2.xs_abtb_2_newroot': 0,
'abtb2.xs_abtb_2_killroot': 0,
'abtb2.xs_abtb_2_increment': 0,
'abtb2.xs_abtb_2_decrement': 0,
'abtb2.xs_abtb_2_lshift': 0,
'abtb2.xs_abtb_2_rshift': 0,
'abtb2.xs_abtb_2_split': 0,
'abtb2.xs_abtb_2_join': 0,
'abtb2.xs_abtb_2_alloc': 0,
'abtb2.xs_abtb_2_free': 0,
'abtb2.xs_abtb_2_moves': 7040,
'abtc2.xs_abtc_2_lookup': 422,
'abtc2.xs_abtc_2_compare': 4014,
'abtc2.xs_abtc_2_insrec': 203,
'abtc2.xs_abtc_2_delrec': 203,
'abtc2.xs_abtc_2_newroot': 0,
'abtc2.xs_abtc_2_killroot': 0,
'abtc2.xs_abtc_2_increment': 0,
'abtc2.xs_abtc_2_decrement': 0,
'abtc2.xs_abtc_2_lshift': 0,
'abtc2.xs_abtc_2_rshift': 0,
'abtc2.xs_abtc_2_split': 0,
'abtc2.xs_abtc_2_join': 0,
'abtc2.xs_abtc_2_alloc': 0,
'abtc2.xs_abtc_2_free': 0,
'abtc2.xs_abtc_2_moves': 34516,
'bmbt2.xs_bmbt_2_lookup': 0,
'bmbt2.xs_bmbt_2_compare': 0,
'bmbt2.xs_bmbt_2_insrec': 0,
'bmbt2.xs_bmbt_2_delrec': 0,
'bmbt2.xs_bmbt_2_newroot': 0,
'bmbt2.xs_bmbt_2_killroot': 0,
'bmbt2.xs_bmbt_2_increment': 0,
'bmbt2.xs_bmbt_2_decrement': 0,
'bmbt2.xs_bmbt_2_lshift': 0,
'bmbt2.xs_bmbt_2_rshift': 0,
'bmbt2.xs_bmbt_2_split': 0,
'bmbt2.xs_bmbt_2_join': 0,
'bmbt2.xs_bmbt_2_alloc': 0,
'bmbt2.xs_bmbt_2_free': 0,
'bmbt2.xs_bmbt_2_moves': 0,
'ibt2.lookup': 138,
'ibt2.compare': 1214,
'ibt2.insrec': 0,
'ibt2.delrec': 0,
'ibt2.newroot': 0,
'ibt2.killroot': 0,
'ibt2.increment': 0,
'ibt2.decrement': 0,
'ibt2.lshift': 0,
'ibt2.rshift': 0,
'ibt2.split': 0,
'ibt2.join': 0,
'ibt2.alloc': 0,
'ibt2.free': 0,
'ibt2.moves': 0,
'fibt2.lookup': 0,
'fibt2.compare': 0,
'fibt2.insrec': 0,
'fibt2.delrec': 0,
'fibt2.newroot': 0,
'fibt2.killroot': 0,
'fibt2.increment': 0,
'fibt2.decrement': 0,
'fibt2.lshift': 0,
'fibt2.rshift': 0,
'fibt2.split': 0,
'fibt2.join': 0,
'fibt2.alloc': 0,
'fibt2.free': 0,
'fibt2.moves': 0,
'qm.xs_qm_dquot': 0,
'qm.xs_qm_dquot_unused': 0,
'xpc.xs_xstrat_bytes': 3801088,
'xpc.xs_write_bytes': 270944,
'xpc.xs_read_bytes': 2953097143,
'debug.debug': 0
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
################################################################################
if __name__ == "__main__":
unittest.main()
|
import os
from nikola.plugin_categories import Task
from nikola.utils import config_changed, LOGGER
class RenderPages(Task):
"""Render pages into output."""
name = "render_pages"
def gen_tasks(self):
"""Build final pages from metadata and HTML fragments."""
kw = {
"post_pages": self.site.config["post_pages"],
"translations": self.site.config["TRANSLATIONS"],
"filters": self.site.config["FILTERS"],
"show_untranslated_posts": self.site.config['SHOW_UNTRANSLATED_POSTS'],
"demote_headers": self.site.config['DEMOTE_HEADERS'],
}
self.site.scan_posts()
yield self.group_task()
index_paths = {}
for lang in kw["translations"]:
index_paths[lang] = False
if not self.site.config["DISABLE_INDEXES"]:
index_paths[lang] = os.path.normpath(os.path.join(self.site.config['OUTPUT_FOLDER'],
self.site.path('index', '', lang=lang)))
for lang in kw["translations"]:
for post in self.site.timeline:
if not kw["show_untranslated_posts"] and not post.is_translation_available(lang):
continue
if post.is_post:
context = {'pagekind': ['post_page']}
else:
context = {'pagekind': ['story_page', 'page_page']}
for task in self.site.generic_page_renderer(lang, post, kw["filters"], context):
if task['name'] == index_paths[lang]:
# Issue 3022
LOGGER.error(
"Post {0!r}: output path ({1}) conflicts with the blog index ({2}). "
"Please change INDEX_PATH or disable index generation.".format(
post.source_path, task['name'], index_paths[lang]))
task['uptodate'] = task['uptodate'] + [config_changed(kw, 'nikola.plugins.task.pages')]
task['basename'] = self.name
task['task_dep'] = ['render_posts']
yield task
|
import logging
import datapoint
from .const import MODE_3HOURLY
_LOGGER = logging.getLogger(__name__)
class MetOfficeData:
"""Get current and forecast data from Datapoint.
Please note that the 'datapoint' library is not asyncio-friendly, so some
calls have had to be wrapped with the standard hassio helper
async_add_executor_job.
"""
def __init__(self, hass, api_key, latitude, longitude):
"""Initialize the data object."""
self._hass = hass
self._datapoint = datapoint.connection(api_key=api_key)
self._site = None
# Public attributes
self.latitude = latitude
self.longitude = longitude
# Holds the current data from the Met Office
self.site_id = None
self.site_name = None
self.now = None
async def async_update_site(self):
"""Async wrapper for getting the DataPoint site."""
return await self._hass.async_add_executor_job(self._update_site)
def _update_site(self):
"""Return the nearest DataPoint Site to the held latitude/longitude."""
try:
new_site = self._datapoint.get_nearest_forecast_site(
latitude=self.latitude, longitude=self.longitude
)
if self._site is None or self._site.id != new_site.id:
self._site = new_site
self.now = None
self.site_id = self._site.id
self.site_name = self._site.name
except datapoint.exceptions.APIException as err:
_LOGGER.error("Received error from Met Office Datapoint: %s", err)
self._site = None
self.site_id = None
self.site_name = None
self.now = None
return self._site
async def async_update(self):
"""Async wrapper for update method."""
return await self._hass.async_add_executor_job(self._update)
def _update(self):
"""Get the latest data from DataPoint."""
if self._site is None:
_LOGGER.error("No Met Office forecast site held, check logs for problems")
return
try:
forecast = self._datapoint.get_forecast_for_site(
self._site.id, MODE_3HOURLY
)
self.now = forecast.now()
except (ValueError, datapoint.exceptions.APIException) as err:
_LOGGER.error("Check Met Office connection: %s", err.args)
self.now = None
|
from homeassistant import config_entries, data_entry_flow
from homeassistant.components.advantage_air.const import DOMAIN
from tests.async_mock import patch
from tests.components.advantage_air import TEST_SYSTEM_DATA, TEST_SYSTEM_URL, USER_INPUT
async def test_form(hass, aioclient_mock):
"""Test that form shows up."""
aioclient_mock.get(
TEST_SYSTEM_URL,
text=TEST_SYSTEM_DATA,
)
result1 = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result1["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result1["step_id"] == "user"
assert result1["errors"] == {}
with patch(
"homeassistant.components.advantage_air.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result1["flow_id"],
USER_INPUT,
)
assert len(aioclient_mock.mock_calls) == 1
assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result2["title"] == "testname"
assert result2["data"] == USER_INPUT
await hass.async_block_till_done()
assert len(mock_setup_entry.mock_calls) == 1
# Test Duplicate Config Flow
result3 = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
result4 = await hass.config_entries.flow.async_configure(
result3["flow_id"],
USER_INPUT,
)
assert result4["type"] == data_entry_flow.RESULT_TYPE_ABORT
async def test_form_cannot_connect(hass, aioclient_mock):
"""Test we handle cannot connect error."""
aioclient_mock.get(
TEST_SYSTEM_URL,
exc=SyntaxError,
)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
USER_INPUT,
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result2["step_id"] == "user"
assert result2["errors"] == {"base": "cannot_connect"}
assert len(aioclient_mock.mock_calls) == 1
|
from absl import flags
FLAGS = flags.FLAGS
CLOUD_TPU_GIT = 'https://github.com/tensorflow/tpu.git'
flags.DEFINE_string('cloud_tpu_commit_hash',
'45f33771f9d9aef6d37386d07b086d7e20be0254',
'git commit hash of desired cloud TPU models commit.')
def Install(vm):
"""Installs cloud TPU models on the VM."""
vm.InstallPackages('git')
vm.RemoteCommand('git clone {}'.format(CLOUD_TPU_GIT), should_log=True)
vm.RemoteCommand('cd tpu && git checkout {}'.format(
FLAGS.cloud_tpu_commit_hash), should_log=True)
vm.Install('pip')
vm.RemoteCommand('sudo pip install absl-py', should_log=True)
def Uninstall(vm):
"""Uninstalls cloud TPU models on the VM."""
vm.RemoteCommand('rm -rf tpu', should_log=True)
def GetCommit(vm):
stdout, _ = vm.RemoteCommand('cd tpu && git rev-parse HEAD',
should_log=True)
return stdout
|
import unittest
import numpy as np
from chainer.backends import cuda
from chainer import testing
from chainer.testing import attr
from chainercv.utils import generate_random_bbox
from chainercv.utils import non_maximum_suppression
@testing.parameterize(
{'threshold': 1, 'expect': np.array([0, 1, 2, 3])},
{'threshold': 0.5, 'expect': np.array([0, 1, 3])},
{'threshold': 0.3, 'expect': np.array([0, 2, 3])},
{'threshold': 0.2, 'expect': np.array([0, 3])},
{'threshold': 0, 'expect': np.array([0])},
)
class TestNonMaximumSuppression(unittest.TestCase):
def setUp(self):
self.bbox = np.array((
(0, 0, 4, 4),
(1, 1, 5, 5), # 9/23
(2, 1, 6, 5), # 6/26, 12/20
(4, 0, 8, 4), # 0/32, 3/29, 6/26
))
def check_non_maximum_suppression(self, bbox, threshold, expect):
selec = non_maximum_suppression(bbox, threshold)
self.assertIsInstance(selec, type(bbox))
self.assertEqual(selec.dtype, np.int32)
np.testing.assert_equal(
cuda.to_cpu(selec),
cuda.to_cpu(expect))
def test_non_maximum_suppression_cpu(self):
self.check_non_maximum_suppression(
self.bbox, self.threshold, self.expect)
@attr.gpu
def test_non_maximum_suppression_gpu(self):
self.check_non_maximum_suppression(
cuda.to_gpu(self.bbox),
self.threshold,
cuda.to_gpu(self.expect)
)
class TestNonMaximumSuppressionConsistency(unittest.TestCase):
@attr.gpu
def test_non_maximum_suppression_consistency(self):
bbox = generate_random_bbox(6000, (600, 800), 32, 512)
cpu_selec = non_maximum_suppression(bbox, 0.5)
gpu_selec = non_maximum_suppression(cuda.to_gpu(bbox), 0.5)
np.testing.assert_equal(cpu_selec, cuda.to_cpu(gpu_selec))
class TestNonMaximumSuppressionOptions(unittest.TestCase):
def setUp(self):
self.bbox = generate_random_bbox(6000, (600, 800), 32, 512)
self.score = np.random.uniform(0, 100, size=(len(self.bbox),))
self.limit = 100
self.threshold = 0.5
def check_non_maximum_suppression_options(
self, bbox, threshold, score, limit):
# Pass all options to the tested function
scored_selec = non_maximum_suppression(bbox, threshold, score, limit)
self.assertIsInstance(scored_selec, type(bbox))
# Reorder inputs befor passing it to the function.
# Reorder the outputs according to scores.
order = score.argsort()[::-1]
reordered_selec = non_maximum_suppression(
bbox[order], threshold, score=None, limit=None)
reordered_selec = reordered_selec[:limit]
reordered_selec = order[reordered_selec]
np.testing.assert_equal(
cuda.to_cpu(scored_selec), cuda.to_cpu(reordered_selec))
def test_non_maximum_suppression_options_cpu(self):
self.check_non_maximum_suppression_options(
self.bbox, self.threshold, self.score, self.limit)
@attr.gpu
def test_non_maximum_suppression_options_gpu(self):
self.check_non_maximum_suppression_options(
cuda.to_gpu(self.bbox),
self.threshold, cuda.to_gpu(self.score), self.limit)
class TestNonMaximumSuppressionZeroLengthBbox(unittest.TestCase):
def setUp(self):
self.bbox = np.zeros((0, 4))
def check_non_maximum_suppression_zero_legnth_bbox(
self, bbox, threshold):
selec = non_maximum_suppression(bbox, threshold)
self.assertIsInstance(selec, type(bbox))
self.assertEqual(selec.shape, (0,))
def test_non_maximum_suppression_zero_length_bbox_cpu(self):
self.check_non_maximum_suppression_zero_legnth_bbox(
self.bbox, 0.5)
@attr.gpu
def test_non_maximum_suppression_zero_length_bbox_gpu(self):
self.check_non_maximum_suppression_zero_legnth_bbox(
cuda.to_gpu(self.bbox), 0.5)
testing.run_module(__name__, __file__)
|
import sys
import json
import textwrap
import os
import signal
import time
import pytest
import pytest_bdd as bdd
from PyQt5.QtCore import pyqtSignal, pyqtSlot, QObject, QFileSystemWatcher
bdd.scenarios('editor.feature')
from qutebrowser.utils import utils
@bdd.when(bdd.parsers.parse('I set up a fake editor replacing "{text}" by '
'"{replacement}"'))
def set_up_editor_replacement(quteproc, server, tmpdir, text, replacement):
"""Set up editor.command to a small python script doing a replacement."""
text = text.replace('(port)', str(server.port))
script = tmpdir / 'script.py'
script.write(textwrap.dedent("""
import sys
with open(sys.argv[1], encoding='utf-8') as f:
data = f.read()
data = data.replace("{text}", "{replacement}")
with open(sys.argv[1], 'w', encoding='utf-8') as f:
f.write(data)
""".format(text=text, replacement=replacement)))
editor = json.dumps([sys.executable, str(script), '{}'])
quteproc.set_setting('editor.command', editor)
@bdd.when(bdd.parsers.parse('I set up a fake editor returning "{text}"'))
def set_up_editor(quteproc, tmpdir, text):
"""Set up editor.command to a small python script inserting a text."""
script = tmpdir / 'script.py'
script.write(textwrap.dedent("""
import sys
with open(sys.argv[1], 'w', encoding='utf-8') as f:
f.write({text!r})
""".format(text=text)))
editor = json.dumps([sys.executable, str(script), '{}'])
quteproc.set_setting('editor.command', editor)
@bdd.when(bdd.parsers.parse('I set up a fake editor returning empty text'))
def set_up_editor_empty(quteproc, tmpdir):
"""Set up editor.command to a small python script inserting empty text."""
set_up_editor(quteproc, tmpdir, "")
class EditorPidWatcher(QObject):
appeared = pyqtSignal()
def __init__(self, directory, parent=None):
super().__init__(parent)
self._pidfile = directory / 'editor_pid'
self._watcher = QFileSystemWatcher(self)
self._watcher.addPath(str(directory))
self._watcher.directoryChanged.connect(self._check_update)
self.has_pidfile = False
self._check_update()
@pyqtSlot()
def _check_update(self):
if self.has_pidfile:
return
if self._pidfile.check():
if self._pidfile.read():
self.has_pidfile = True
self.appeared.emit()
else:
self._watcher.addPath(str(self._pidfile))
def manual_check(self):
return self._pidfile.check()
@pytest.fixture
def editor_pid_watcher(tmpdir):
return EditorPidWatcher(tmpdir)
@bdd.when(bdd.parsers.parse('I set up a fake editor that writes "{text}" on '
'save'))
def set_up_editor_wait(quteproc, tmpdir, text, editor_pid_watcher):
"""Set up editor.command to a small python script inserting a text."""
assert not utils.is_windows
pidfile = tmpdir / 'editor_pid'
script = tmpdir / 'script.py'
script.write(textwrap.dedent("""
import os
import sys
import time
import signal
def handle(sig, _frame):
filename = sys.argv[1]
old_mtime = new_mtime = os.stat(filename).st_mtime
while old_mtime == new_mtime:
time.sleep(0.1)
with open(filename, 'w', encoding='utf-8') as f:
f.write({text!r})
new_mtime = os.stat(filename).st_mtime
if sig == signal.SIGUSR1:
sys.exit(0)
signal.signal(signal.SIGUSR1, handle)
signal.signal(signal.SIGUSR2, handle)
with open(r'{pidfile}', 'w') as f:
f.write(str(os.getpid()))
time.sleep(100)
""".format(pidfile=pidfile, text=text)))
editor = json.dumps([sys.executable, str(script), '{}'])
quteproc.set_setting('editor.command', editor)
@bdd.when("I wait until the editor has started")
def wait_editor(qtbot, editor_pid_watcher):
if not editor_pid_watcher.has_pidfile:
with qtbot.wait_signal(editor_pid_watcher.appeared, raising=False):
pass
if not editor_pid_watcher.manual_check():
pytest.fail("Editor pidfile failed to appear!")
@bdd.when(bdd.parsers.parse('I kill the waiting editor'))
def kill_editor_wait(tmpdir):
"""Kill the waiting editor."""
pidfile = tmpdir / 'editor_pid'
pid = int(pidfile.read())
# windows has no SIGUSR1, but we don't run this on windows anyways
# for posix, there IS a member so we need to ignore useless-suppression
# pylint: disable=no-member,useless-suppression
os.kill(pid, signal.SIGUSR1)
@bdd.when(bdd.parsers.parse('I save without exiting the editor'))
def save_editor_wait(tmpdir):
"""Trigger the waiting editor to write without exiting."""
pidfile = tmpdir / 'editor_pid'
# give the "editor" process time to write its pid
for _ in range(10):
if pidfile.check():
break
time.sleep(1)
pid = int(pidfile.read())
# windows has no SIGUSR2, but we don't run this on windows anyways
# for posix, there IS a member so we need to ignore useless-suppression
# pylint: disable=no-member,useless-suppression
os.kill(pid, signal.SIGUSR2)
|
from qutebrowser.config import config
from qutebrowser.utils import utils, log
from qutebrowser.misc import objects
@utils.prevent_exceptions(False) # Runs in I/O thread
def _accept_cookie(request):
"""Check whether the given cookie should be accepted."""
url = request.firstPartyUrl
if not url.isValid():
url = None
accept = config.instance.get('content.cookies.accept',
url=url)
if 'log-cookies' in objects.debug_flags:
first_party_str = ("<unknown>" if not request.firstPartyUrl.isValid()
else request.firstPartyUrl.toDisplayString())
origin_str = ("<unknown>" if not request.origin.isValid()
else request.origin.toDisplayString())
log.network.debug('Cookie from origin {} on {} (third party: {}) '
'-> applying setting {}'
.format(origin_str, first_party_str, request.thirdParty,
accept))
if accept == 'all':
return True
elif accept in ['no-3rdparty', 'no-unknown-3rdparty']:
return not request.thirdParty
elif accept == 'never':
return False
else:
raise utils.Unreachable
def install_filter(profile):
"""Install the cookie filter on the given profile."""
profile.cookieStore().setCookieFilter(_accept_cookie)
|
from __future__ import division
import argparse
import functools
import multiprocessing
import numpy as np
import random
import six
import chainer
from chainer.dataset.convert import _concat_arrays
from chainer.dataset.convert import to_device
import chainer.links as L
from chainer.training import extensions
from chainercv.chainer_experimental.datasets.sliceable \
import TransformDataset
from chainercv.chainer_experimental.training.extensions import make_shift
from chainercv.datasets import coco_bbox_label_names
from chainercv.datasets import COCOBboxDataset
from chainercv.links.model.light_head_rcnn import LightHeadRCNNResNet101
from chainercv.links.model.light_head_rcnn import LightHeadRCNNTrainChain
from chainercv.links.model.ssd import GradientScaling
from chainercv import transforms
import chainermn
# https://docs.chainer.org/en/stable/tips.html#my-training-process-gets-stuck-when-using-multiprocessiterator
try:
import cv2
cv2.setNumThreads(0)
except ImportError:
pass
def concat_examples(batch, device=None, padding=None,
indices_concat=None, indices_to_device=None):
if len(batch) == 0:
raise ValueError('batch is empty')
first_elem = batch[0]
elem_size = len(first_elem)
if indices_concat is None:
indices_concat = range(elem_size)
if indices_to_device is None:
indices_to_device = range(elem_size)
result = []
if not isinstance(padding, tuple):
padding = [padding] * elem_size
for i in six.moves.range(elem_size):
res = [example[i] for example in batch]
if i in indices_concat:
res = _concat_arrays(res, padding[i])
if i in indices_to_device:
if i in indices_concat:
res = to_device(device, res)
else:
res = [to_device(device, r) for r in res]
result.append(res)
return tuple(result)
class Transform(object):
def __init__(self, light_head_rcnn):
self.light_head_rcnn = light_head_rcnn
def __call__(self, in_data):
img, bbox, label = in_data
_, H, W = img.shape
img = self.light_head_rcnn.prepare(img)
_, o_H, o_W = img.shape
scale = o_H / H
bbox = transforms.resize_bbox(bbox, (H, W), (o_H, o_W))
# horizontally flip
img, params = transforms.random_flip(
img, x_random=True, return_param=True)
bbox = transforms.flip_bbox(
bbox, (o_H, o_W), x_flip=params['x_flip'])
return img, bbox, label, scale
def main():
parser = argparse.ArgumentParser(
description='ChainerCV training example: LightHeadRCNN')
parser.add_argument('--out', '-o', default='result',
help='Output directory')
parser.add_argument('--seed', '-s', type=int, default=1234)
parser.add_argument('--batchsize', '-b', type=int, default=8)
parser.add_argument('--epoch', type=int, default=30)
parser.add_argument('--step-epoch', type=int, nargs='*', default=[19, 25])
args = parser.parse_args()
# https://docs.chainer.org/en/stable/chainermn/tutorial/tips_faqs.html#using-multiprocessiterator
if hasattr(multiprocessing, 'set_start_method'):
multiprocessing.set_start_method('forkserver')
p = multiprocessing.Process()
p.start()
p.join()
# chainermn
comm = chainermn.create_communicator('pure_nccl')
device = comm.intra_rank
np.random.seed(args.seed)
random.seed(args.seed)
# model
light_head_rcnn = LightHeadRCNNResNet101(
pretrained_model='imagenet',
n_fg_class=len(coco_bbox_label_names))
light_head_rcnn.use_preset('evaluate')
model = LightHeadRCNNTrainChain(light_head_rcnn)
chainer.cuda.get_device_from_id(device).use()
model.to_gpu()
# train dataset
train_dataset = COCOBboxDataset(
year='2017', split='train')
# filter non-annotated data
train_indices = np.array(
[i for i, label in enumerate(train_dataset.slice[:, ['label']])
if len(label[0]) > 0],
dtype=np.int32)
train_dataset = train_dataset.slice[train_indices]
train_dataset = TransformDataset(
train_dataset, ('img', 'bbox', 'label', 'scale'),
Transform(model.light_head_rcnn))
if comm.rank == 0:
indices = np.arange(len(train_dataset))
else:
indices = None
indices = chainermn.scatter_dataset(indices, comm, shuffle=True)
train_dataset = train_dataset.slice[indices]
train_iter = chainer.iterators.SerialIterator(
train_dataset, batch_size=args.batchsize // comm.size)
optimizer = chainermn.create_multi_node_optimizer(
chainer.optimizers.MomentumSGD(momentum=0.9), comm)
optimizer.setup(model)
global_context_module = model.light_head_rcnn.head.global_context_module
global_context_module.col_max.W.update_rule.add_hook(GradientScaling(3.0))
global_context_module.col_max.b.update_rule.add_hook(GradientScaling(3.0))
global_context_module.col.W.update_rule.add_hook(GradientScaling(3.0))
global_context_module.col.b.update_rule.add_hook(GradientScaling(3.0))
global_context_module.row_max.W.update_rule.add_hook(GradientScaling(3.0))
global_context_module.row_max.b.update_rule.add_hook(GradientScaling(3.0))
global_context_module.row.W.update_rule.add_hook(GradientScaling(3.0))
global_context_module.row.b.update_rule.add_hook(GradientScaling(3.0))
optimizer.add_hook(chainer.optimizer.WeightDecay(rate=0.0001))
model.light_head_rcnn.extractor.conv1.disable_update()
model.light_head_rcnn.extractor.res2.disable_update()
for link in model.links():
if isinstance(link, L.BatchNormalization):
link.disable_update()
converter = functools.partial(
concat_examples, padding=0,
# img, bboxes, labels, scales
indices_concat=[0, 2, 3], # img, _, labels, scales
indices_to_device=[0], # img
)
updater = chainer.training.updater.StandardUpdater(
train_iter, optimizer, converter=converter,
device=device)
trainer = chainer.training.Trainer(
updater, (args.epoch, 'epoch'), out=args.out)
@make_shift('lr')
def lr_scheduler(trainer):
base_lr = 0.0005 * 1.25 * args.batchsize
warm_up_duration = 500
warm_up_rate = 1 / 3
iteration = trainer.updater.iteration
epoch = trainer.updater.epoch
if iteration < warm_up_duration:
rate = warm_up_rate \
+ (1 - warm_up_rate) * iteration / warm_up_duration
else:
for step in args.step_epoch:
if epoch > step:
rate *= 0.1
return rate * base_lr
trainer.extend(lr_scheduler)
if comm.rank == 0:
# interval
log_interval = 100, 'iteration'
plot_interval = 3000, 'iteration'
print_interval = 20, 'iteration'
# training extensions
model_name = model.light_head_rcnn.__class__.__name__
trainer.extend(
chainer.training.extensions.snapshot_object(
model.light_head_rcnn,
filename='%s_model_iter_{.updater.iteration}.npz'
% model_name),
trigger=(1, 'epoch'))
trainer.extend(
extensions.observe_lr(),
trigger=log_interval)
trainer.extend(
extensions.LogReport(log_name='log.json', trigger=log_interval))
report_items = [
'iteration', 'epoch', 'elapsed_time', 'lr',
'main/loss',
'main/rpn_loc_loss',
'main/rpn_cls_loss',
'main/roi_loc_loss',
'main/roi_cls_loss',
'validation/main/map/iou=0.50:0.95/area=all/max_dets=100',
]
trainer.extend(
extensions.PrintReport(report_items), trigger=print_interval)
trainer.extend(
extensions.ProgressBar(update_interval=10))
if extensions.PlotReport.available():
trainer.extend(
extensions.PlotReport(
['main/loss'],
file_name='loss.png', trigger=plot_interval),
trigger=plot_interval)
trainer.extend(extensions.dump_graph('main/loss'))
trainer.run()
if __name__ == '__main__':
main()
|
import pytest
from mne.viz._figure import _browse_figure, _psd_figure
def test_browse_figure_constructor():
"""Test error handling in MNEBrowseFigure constructor."""
with pytest.raises(TypeError, match='an instance of Raw, Epochs, or ICA'):
_browse_figure('foo')
def test_psd_figure_constructor():
"""Test error handling in MNELineFigure constructor."""
with pytest.raises(TypeError, match='an instance of Raw or Epochs, got'):
_psd_figure('foo', *((None,) * 19))
|
import unittest
from absl import flags
import mock
from perfkitbenchmarker import temp_dir
from perfkitbenchmarker import units
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.linux_benchmarks import fio_benchmark
from tests import pkb_common_test_case
from six.moves import builtins
FLAGS = flags.FLAGS
class TestGenerateJobFileString(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(TestGenerateJobFileString, self).setUp()
self.filename = '/test/filename'
def testBasicGeneration(self):
expected_jobfile = """
[global]
ioengine=libaio
invalidate=1
direct=1
runtime=600
time_based
filename=/test/filename
do_verify=0
verify_fatal=0
group_reporting=1
randrepeat=0
offset_increment=1k
[sequential_read-io-depth-1-num-jobs-1]
stonewall
rw=read
blocksize=512k
iodepth=1
size=100%
numjobs=1
[sequential_read-io-depth-2-num-jobs-1]
stonewall
rw=read
blocksize=512k
iodepth=2
size=100%
numjobs=1"""
self.assertEqual(
fio_benchmark.GenerateJobFileString(
self.filename,
['sequential_read'],
[1, 2], [1],
None, None, 600, ['randrepeat=0', 'offset_increment=1k']),
expected_jobfile)
def testMultipleScenarios(self):
expected_jobfile = """
[global]
ioengine=libaio
invalidate=1
direct=1
runtime=600
time_based
filename=/test/filename
do_verify=0
verify_fatal=0
group_reporting=1
randrepeat=0
[sequential_read-io-depth-1-num-jobs-1]
stonewall
rw=read
blocksize=512k
iodepth=1
size=100%
numjobs=1
[sequential_write-io-depth-1-num-jobs-1]
stonewall
rw=write
blocksize=512k
iodepth=1
size=100%
numjobs=1"""
self.assertEqual(
fio_benchmark.GenerateJobFileString(
self.filename,
['sequential_read', 'sequential_write'],
[1], [1],
None, None, 600, ['randrepeat=0']),
expected_jobfile)
def testCustomBlocksize(self):
orig_blocksize = fio_benchmark.SCENARIOS['sequential_write']['blocksize']
job_file = fio_benchmark.GenerateJobFileString(
self.filename,
['sequential_read'],
[1], [1], None, units.Unit('megabyte') * 2, 600, {})
self.assertIn('blocksize=2000000B', job_file)
# Test that generating a job file doesn't modify the global
# SCENARIOS variable.
self.assertEqual(fio_benchmark.SCENARIOS['sequential_write']['blocksize'],
orig_blocksize)
class TestProcessedJobFileString(pkb_common_test_case.PkbCommonTestCase):
def testReplaceFilenames(self):
file_contents = """
[global]
blocksize = 4k
filename = zanzibar
ioengine=libaio
[job1]
filename = asdf
blocksize = 8k
"""
jobfile = fio_benchmark.ProcessedJobFileString(file_contents, True)
self.assertNotIn('filename', jobfile)
self.assertNotIn('zanzibar', jobfile)
self.assertNotIn('asdf', jobfile)
def doTargetModeTest(self, mode,
expect_fill_device=None,
expect_against_device=None,
expect_format_disk=None):
fio_name = fio_benchmark.__name__
vm_name = vm_util.__name__
dir_name = temp_dir.__name__
with mock.patch(fio_name + '.FillDevice') as mock_fill_device, \
mock.patch(fio_name + '.GetOrGenerateJobFileString') as mock_get_job_string, \
mock.patch(builtins.__name__ + '.open'), \
mock.patch(vm_name + '.GetTempDir', return_value='/tmp/dir'), \
mock.patch(vm_name + '.PrependTempDir', return_value='/tmp/prepend_dir'), \
mock.patch(dir_name + '.GetRunDirPath', return_value='/tmp/run_dir'), \
mock.patch(fio_name + '.fio.ParseResults'), \
mock.patch(fio_name + '.FLAGS') as mock_fio_flags:
mock_fio_flags.fio_target_mode = mode
benchmark_spec = mock.MagicMock()
benchmark_spec.vms = [mock.MagicMock()]
benchmark_spec.vms[0].RobustRemoteCommand = (
mock.MagicMock(return_value=('"stdout"', '"stderr"')))
fio_benchmark.Prepare(benchmark_spec)
fio_benchmark.Run(benchmark_spec)
if expect_fill_device is True:
self.assertEqual(mock_fill_device.call_count, 1)
elif expect_fill_device is False:
self.assertEqual(mock_fill_device.call_count, 0)
# get_job_string.call_args[0][2] is a boolean saying whether or
# not we are testing against a device.
against_device_arg = mock_get_job_string.call_args[0][2]
if expect_against_device is True:
self.assertEqual(against_device_arg, True)
elif expect_against_device is False:
self.assertEqual(against_device_arg, False)
if expect_format_disk is True:
self.assertEqual(benchmark_spec.vms[0].FormatDisk.call_count, 1)
elif expect_format_disk is False:
self.assertEqual(benchmark_spec.vms[0].FormatDisk.call_count, 0)
def testAgainstFileWithFill(self):
self.doTargetModeTest('against_file_with_fill',
expect_fill_device=True,
expect_against_device=False,
expect_format_disk=True)
def testAgainstFileWithoutFill(self):
self.doTargetModeTest('against_file_without_fill',
expect_fill_device=False,
expect_against_device=False,
expect_format_disk=False)
def testAgainstDeviceWithFill(self):
self.doTargetModeTest('against_device_with_fill',
expect_fill_device=True,
expect_against_device=True,
expect_format_disk=False)
def testAgainstDeviceWithoutFill(self):
self.doTargetModeTest('against_device_without_fill',
expect_fill_device=False,
expect_against_device=True,
expect_format_disk=False)
if __name__ == '__main__':
unittest.main()
|
from homeassistant.components.london_air.sensor import CONF_LOCATIONS, URL
from homeassistant.const import HTTP_OK, HTTP_SERVICE_UNAVAILABLE
from homeassistant.setup import async_setup_component
from tests.common import load_fixture
VALID_CONFIG = {"sensor": {"platform": "london_air", CONF_LOCATIONS: ["Merton"]}}
async def test_valid_state(hass, requests_mock):
"""Test for operational london_air sensor with proper attributes."""
requests_mock.get(URL, text=load_fixture("london_air.json"), status_code=HTTP_OK)
assert await async_setup_component(hass, "sensor", VALID_CONFIG)
await hass.async_block_till_done()
state = hass.states.get("sensor.merton")
assert state is not None
assert state.state == "Low"
assert state.attributes["icon"] == "mdi:cloud-outline"
assert state.attributes["updated"] == "2017-08-03 03:00:00"
assert state.attributes["sites"] == 2
assert state.attributes["friendly_name"] == "Merton"
sites = state.attributes["data"]
assert sites is not None
assert len(sites) == 2
assert sites[0]["site_code"] == "ME2"
assert sites[0]["site_type"] == "Roadside"
assert sites[0]["site_name"] == "Merton Road"
assert sites[0]["pollutants_status"] == "Low"
pollutants = sites[0]["pollutants"]
assert pollutants is not None
assert len(pollutants) == 1
assert pollutants[0]["code"] == "PM10"
assert pollutants[0]["quality"] == "Low"
assert int(pollutants[0]["index"]) == 2
assert pollutants[0]["summary"] == "PM10 is Low"
async def test_api_failure(hass, requests_mock):
"""Test for failure in the API."""
requests_mock.get(URL, status_code=HTTP_SERVICE_UNAVAILABLE)
assert await async_setup_component(hass, "sensor", VALID_CONFIG)
await hass.async_block_till_done()
state = hass.states.get("sensor.merton")
assert state is not None
assert state.attributes["updated"] is None
assert state.attributes["sites"] == 0
|
import unittest
from absl import flags
import mock
from perfkitbenchmarker import benchmark_spec
from perfkitbenchmarker import context
from perfkitbenchmarker import disk
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.configs import benchmark_config_spec
from perfkitbenchmarker.providers.aws import aws_disk
from perfkitbenchmarker.providers.aws import aws_virtual_machine
from perfkitbenchmarker.providers.azure import azure_disk
from perfkitbenchmarker.providers.azure import azure_virtual_machine
from perfkitbenchmarker.providers.azure import flags as azure_flags
from perfkitbenchmarker.providers.gcp import gce_disk
from tests import pkb_common_test_case
FLAGS = flags.FLAGS
_BENCHMARK_NAME = 'name'
_BENCHMARK_UID = 'uid'
_COMPONENT = 'test_component'
class _DiskMetadataTestCase(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(_DiskMetadataTestCase, self).setUp()
self.addCleanup(context.SetThreadBenchmarkSpec, None)
p = mock.patch(vm_util.__name__ + '.GetTempDir', return_value='/tmp/dir')
p.start()
self.addCleanup(p.stop)
config_spec = benchmark_config_spec.BenchmarkConfigSpec(
_BENCHMARK_NAME, flag_values=FLAGS, vm_groups={})
self.benchmark_spec = benchmark_spec.BenchmarkSpec(
mock.MagicMock(), config_spec, _BENCHMARK_UID)
class GcpDiskMetadataTest(_DiskMetadataTestCase):
def testPDStandard(self):
disk_spec = disk.BaseDiskSpec(_COMPONENT, disk_size=2,
disk_type=gce_disk.PD_STANDARD)
disk_obj = gce_disk.GceDisk(disk_spec, 'name', 'zone', 'project')
self.assertDictContainsSubset(
{disk.MEDIA: disk.HDD, disk.REPLICATION: disk.ZONE},
disk_obj.metadata
)
class AwsDiskMetadataTest(_DiskMetadataTestCase):
def DoAwsDiskTest(self, disk_type, machine_type,
goal_media, goal_replication):
disk_spec = aws_disk.AwsDiskSpec(_COMPONENT, disk_size=2,
disk_type=disk_type)
vm_spec = aws_virtual_machine.AwsVmSpec(
'test_vm_spec.AWS', zone='us-east-1a', machine_type=machine_type)
vm = aws_virtual_machine.Ubuntu1804BasedAwsVirtualMachine(vm_spec)
vm.CreateScratchDisk(disk_spec)
self.assertDictContainsSubset(
{disk.MEDIA: goal_media, disk.REPLICATION: goal_replication},
vm.scratch_disks[0].metadata
)
def testLocalSSD(self):
self.DoAwsDiskTest(
disk.LOCAL,
'c3.2xlarge',
disk.SSD,
disk.NONE)
def testLocalHDD(self):
self.DoAwsDiskTest(
disk.LOCAL,
'd2.2xlarge',
disk.HDD,
disk.NONE)
class AzureDiskMetadataTest(_DiskMetadataTestCase):
def DoAzureDiskTest(self, storage_type, disk_type, machine_type,
goal_media, goal_replication,
goal_host_caching, disk_size=2,
goal_size=2, goal_stripes=1):
FLAGS.azure_storage_type = storage_type
FLAGS.azure_host_caching = goal_host_caching
disk_spec = disk.BaseDiskSpec(_COMPONENT, disk_size=disk_size,
disk_type=disk_type,
num_striped_disks=goal_stripes)
vm_spec = azure_virtual_machine.AzureVmSpec(
'test_vm_spec.AZURE', zone='eastus2', machine_type=machine_type)
vm = azure_virtual_machine.Ubuntu1604BasedAzureVirtualMachine(vm_spec)
azure_disk.AzureDisk.Create = mock.Mock()
azure_disk.AzureDisk.Attach = mock.Mock()
vm.StripeDisks = mock.Mock()
vm.CreateScratchDisk(disk_spec)
expected = {disk.MEDIA: goal_media,
disk.REPLICATION: goal_replication,
'num_stripes': goal_stripes,
'size': goal_size}
if goal_host_caching:
expected[azure_disk.HOST_CACHING] = goal_host_caching
self.assertDictContainsSubset(expected, vm.scratch_disks[0].metadata)
def testPremiumStorage(self):
self.DoAzureDiskTest(azure_flags.PLRS,
azure_disk.PREMIUM_STORAGE,
'Standard_D1',
disk.SSD,
disk.ZONE,
azure_flags.READ_ONLY)
def testStandardDisk(self):
self.DoAzureDiskTest(azure_flags.ZRS,
azure_disk.STANDARD_DISK,
'Standard_D1',
disk.HDD,
disk.REGION,
azure_flags.NONE)
def testLocalHDD(self):
self.DoAzureDiskTest(azure_flags.LRS,
disk.LOCAL,
'Standard_A1',
disk.HDD,
disk.NONE,
None)
def testLocalSSD(self):
self.DoAzureDiskTest(azure_flags.LRS,
disk.LOCAL,
'Standard_DS2',
disk.SSD,
disk.NONE,
None)
def testStripedDisk(self):
self.DoAzureDiskTest(azure_flags.LRS,
azure_disk.STANDARD_DISK,
'Standard_D1',
disk.HDD,
disk.ZONE,
azure_flags.READ_ONLY,
disk_size=5,
goal_size=10,
goal_stripes=2)
if __name__ == '__main__':
unittest.main()
|
from __future__ import print_function
import argparse
import sys
_stash = globals()["_stash"]
class ConsoleQuicklook(object):
def __init__(self, args):
p = argparse.ArgumentParser(description=__doc__)
p.add_argument("file", action="store", help="file to open")
ns = p.parse_args(args)
self.filename = ns.file
def quicklook(self):
_stash.libdist.quicklook(self.filename)
if __name__ == "__main__":
ConsoleQuicklook(sys.argv[1:]).quicklook()
|
import mock
from scrapy import Spider
from scrapy.http import Request
from scrapy_redis.queue import Base
class TestBaseQueue(object):
queue_cls = Base
def setup(self):
self.server = mock.Mock()
self.spider = Spider(name='foo')
self.spider.parse_method = lambda x: x
self.key = 'key'
self.q = self.queue_cls(self.server, self.spider, self.key)
def test_encode_decode_requests(self, q=None):
if q is None:
q = self.q
req = Request('http://example.com',
callback=self.spider.parse,
meta={'foo': 'bar'})
out = q._decode_request(q._encode_request(req))
assert req.url == out.url
assert req.meta == out.meta
assert req.callback == out.callback
def test_custom_serializer(self):
serializer = mock.Mock()
serializer.dumps = mock.Mock(side_effect=lambda x: x)
serializer.loads = mock.Mock(side_effect=lambda x: x)
q = Base(self.server, self.spider, self.key, serializer=serializer)
self.test_encode_decode_requests(q)
assert serializer.dumps.call_count == 1
assert serializer.loads.call_count == 1
|
from unittest import TestCase
import numpy as np
import pandas as pd
from scattertext.termscoring.BetaPosterior import BetaPosterior
from scattertext.test.test_termDocMatrixFactory import build_hamlet_jz_corpus
class TestBetaPosterior(TestCase):
def test_get_score_df(self):
corpus = build_hamlet_jz_corpus()
beta_posterior = BetaPosterior(corpus).set_categories('hamlet')
score_df = beta_posterior.get_score_df()
scores = beta_posterior.get_scores()
np.testing.assert_almost_equal(scores[:5], [-0.3194860824225506, 1.0294085051562822, 1.0294085051562822,
1.234664219528909, 1.0294085051562822])
def test_get_name(self):
corpus = build_hamlet_jz_corpus()
self.assertEqual(BetaPosterior(corpus).get_name(), 'Beta Posterior')
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from diamond.collector import Collector
from ping import PingCollector
##########################################################################
class TestPingCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('PingCollector', {
'interval': 10,
'target_a': 'localhost',
'bin': 'true'
})
self.collector = PingCollector(config, None)
def test_import(self):
self.assertTrue(PingCollector)
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_work_with_real_data_bad_gentoo(self, publish_mock):
patch_communicate = patch(
'subprocess.Popen.communicate',
Mock(return_value=(
self.getFixture('bad_gentoo').getvalue(),
'')))
patch_communicate.start()
self.collector.collect()
patch_communicate.stop()
self.assertPublishedMany(publish_mock, {
'localhost': 10000
})
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_work_with_real_data_host_gentoo(self, publish_mock):
config = get_collector_config('PingCollector', {
'interval': 10,
'target_a': 'localhost',
'target_b': 'www.example.org',
'target_c': '192.168.0.1',
'bin': 'true'
})
collector = PingCollector(config, None)
def popen_se(command, **kwargs):
popen_mock = Mock()
if command[-1] == 'localhost':
popen_mock.communicate = Mock(return_value=(
self.getFixture('host_gentoo').getvalue(),
''))
elif command[-1] == 'www.example.org':
popen_mock.communicate = Mock(return_value=(
self.getFixture('host_gentoo2').getvalue(),
''))
elif command[-1] == '192.168.0.1':
popen_mock.communicate = Mock(return_value=(
self.getFixture('host_gentoo3').getvalue(),
''))
return popen_mock
patch_popen = patch(
'subprocess.Popen',
Mock(side_effect=popen_se))
patch_popen.start()
collector.collect()
patch_popen.stop()
metrics = {
'localhost': 11,
'www_example_org': 23,
'192_168_0_1': 16
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_work_with_real_data_ip_gentoo(self, publish_mock):
patch_communicate = patch(
'subprocess.Popen.communicate',
Mock(return_value=(
self.getFixture('ip_gentoo').getvalue(),
'')))
patch_communicate.start()
self.collector.collect()
patch_communicate.stop()
self.assertPublishedMany(publish_mock, {
'localhost': 0
})
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_work_with_real_data_longhost_gentoo(self, publish_mock):
patch_communicate = patch(
'subprocess.Popen.communicate',
Mock(return_value=(
self.getFixture(
'longhost_gentoo').getvalue(),
'')))
patch_communicate.start()
self.collector.collect()
patch_communicate.stop()
self.assertPublishedMany(publish_mock, {
'localhost': 10
})
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_work_with_real_data_timeout_gentoo(self, publish_mock):
patch_communicate = patch(
'subprocess.Popen.communicate',
Mock(return_value=(
self.getFixture(
'timeout_gentoo').getvalue(),
'')))
patch_communicate.start()
self.collector.collect()
patch_communicate.stop()
self.assertPublishedMany(publish_mock, {
'localhost': 10000
})
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_work_with_real_data_host_osx(self, publish_mock):
patch_communicate = patch(
'subprocess.Popen.communicate',
Mock(return_value=(
self.getFixture('host_osx').getvalue(),
'')))
patch_communicate.start()
self.collector.collect()
patch_communicate.stop()
self.assertPublishedMany(publish_mock, {
'localhost': 38
})
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_work_with_real_data_ip_osx(self, publish_mock):
patch_communicate = patch(
'subprocess.Popen.communicate',
Mock(return_value=(
self.getFixture('ip_osx').getvalue(),
'')))
patch_communicate.start()
self.collector.collect()
patch_communicate.stop()
self.assertPublishedMany(publish_mock, {
'localhost': 0
})
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_work_with_real_data_longhost_osx(self, publish_mock):
patch_communicate = patch(
'subprocess.Popen.communicate',
Mock(return_value=(
self.getFixture('longhost_osx').getvalue(),
'')))
patch_communicate.start()
self.collector.collect()
patch_communicate.stop()
self.assertPublishedMany(publish_mock, {
'localhost': 42
})
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_work_with_real_data_timeout_osx(self, publish_mock):
patch_communicate = patch(
'subprocess.Popen.communicate',
Mock(return_value=(
self.getFixture('timeout_osx').getvalue(),
'')))
patch_communicate.start()
self.collector.collect()
patch_communicate.stop()
self.assertPublishedMany(publish_mock, {
'localhost': 10000
})
##########################################################################
if __name__ == "__main__":
unittest.main()
|
from flask import Flask, jsonify, Response, request
from flasgger import Swagger, SwaggerView
swagger_config = {
"headers": [],
"openapi": "3.0.2",
"components": {
'schemas': {
'Species': {
'type': 'object',
'properties': {
'id': {
'type': 'integer',
'format': 'int64'
},
'name': {
'type': 'string'
}
}
},
'Animal': {
'type': 'object',
'properties': {
'species': {
'$ref': '#/components/schemas/Species'
},
'name': {
'type': 'string'
}
}
}
}
},
"title": "Animal Shelter API",
"version": '',
"termsOfService": "",
"static_url_path": "/characteristics/static",
"swagger_ui": True,
"description": "",
}
class Animals(SwaggerView):
def get(self):
"""
find shelter animals that fit the criteria
---
tags:
- Animals
description: Parameters can be provided in the query to search
operationId: getAnimals
parameters:
- name: species
in: query
required: true
schema:
type: string
- name: status
in: query
required: true
schema:
type: string
enum:
- adopted
- pending
- adoptable
- transferred
- sanctuary
- medical-hold
responses:
'200':
description: Animals to be returned
content:
application/json:
schema:
type: array
items:
$ref: '#/components/schemas/Animal'
"""
species = request.args.get('name')
status = request.args.get('status')
return Response(status=200)
app = Flask(__name__)
swag = Swagger(app, config=swagger_config, merge=True)
app.add_url_rule(
'/animals',
view_func=Animals.as_view('animals'),
methods=['GET']
)
if __name__ == "__main__":
app.run(debug=True)
|
from functools import lru_cache
from aiohttp.helpers import reify
from plexapi.exceptions import NotFound
from homeassistant.components.plex.const import (
CONF_SERVER,
CONF_SERVER_IDENTIFIER,
PLEX_SERVER_CONFIG,
)
from homeassistant.const import CONF_URL
from .const import DEFAULT_DATA, MOCK_SERVERS, MOCK_USERS
GDM_SERVER_PAYLOAD = [
{
"data": {
"Content-Type": "plex/media-server",
"Name": "plextest",
"Port": "32400",
"Resource-Identifier": "1234567890123456789012345678901234567890",
"Updated-At": "157762684800",
"Version": "1.0",
},
"from": ("1.2.3.4", 32414),
}
]
GDM_CLIENT_PAYLOAD = [
{
"data": {
"Content-Type": "plex/media-player",
"Device-Class": "stb",
"Name": "plexamp",
"Port": "36000",
"Product": "Plexamp",
"Protocol": "plex",
"Protocol-Capabilities": "timeline,playback,playqueues,playqueues-creation",
"Protocol-Version": "1",
"Resource-Identifier": "client-2",
"Version": "1.1.0",
},
"from": ("1.2.3.10", 32412),
},
{
"data": {
"Content-Type": "plex/media-player",
"Device-Class": "pc",
"Name": "Chrome",
"Port": "32400",
"Product": "Plex Web",
"Protocol": "plex",
"Protocol-Capabilities": "timeline,playback,navigation,mirror,playqueues",
"Protocol-Version": "3",
"Resource-Identifier": "client-1",
"Version": "4.40.1",
},
"from": ("1.2.3.4", 32412),
},
{
"data": {
"Content-Type": "plex/media-player",
"Device-Class": "mobile",
"Name": "SHIELD Android TV",
"Port": "32500",
"Product": "Plex for Android (TV)",
"Protocol": "plex",
"Protocol-Capabilities": "timeline,playback,navigation,mirror,playqueues,provider-playback",
"Protocol-Version": "1",
"Resource-Identifier": "client-999",
"Updated-At": "1597686153",
"Version": "8.5.0.19697",
},
"from": ("1.2.3.11", 32412),
},
]
class MockGDM:
"""Mock a GDM instance."""
def __init__(self, disabled=False):
"""Initialize the object."""
self.entries = []
self.disabled = disabled
def scan(self, scan_for_clients=False):
"""Mock the scan call."""
if self.disabled:
return
if scan_for_clients:
self.entries = GDM_CLIENT_PAYLOAD
else:
self.entries = GDM_SERVER_PAYLOAD
class MockResource:
"""Mock a PlexAccount resource."""
def __init__(self, index, kind="server"):
"""Initialize the object."""
if kind == "server":
self.name = MOCK_SERVERS[index][CONF_SERVER]
self.clientIdentifier = MOCK_SERVERS[index][ # pylint: disable=invalid-name
CONF_SERVER_IDENTIFIER
]
self.provides = ["server"]
self.device = MockPlexServer(index)
else:
self.name = f"plex.tv Resource Player {index+10}"
self.clientIdentifier = f"client-{index+10}"
self.provides = ["player"]
self.device = MockPlexClient(
baseurl=f"http://192.168.0.1{index}:32500", index=index + 10
)
self.presence = index == 0
self.publicAddressMatches = True
def connect(self, timeout):
"""Mock the resource connect method."""
return self.device
class MockPlexAccount:
"""Mock a PlexAccount instance."""
def __init__(self, servers=1, players=3):
"""Initialize the object."""
self._resources = []
for index in range(servers):
self._resources.append(MockResource(index))
for index in range(players):
self._resources.append(MockResource(index, kind="player"))
def resource(self, name):
"""Mock the PlexAccount resource lookup method."""
return [x for x in self._resources if x.name == name][0]
def resources(self):
"""Mock the PlexAccount resources listing method."""
return self._resources
def sonos_speaker(self, speaker_name):
"""Mock the PlexAccount Sonos lookup method."""
return MockPlexSonosClient(speaker_name)
class MockPlexSystemAccount:
"""Mock a PlexSystemAccount instance."""
def __init__(self, index):
"""Initialize the object."""
# Start accountIDs at 1 to set proper owner.
self.name = list(MOCK_USERS)[index]
self.accountID = index + 1
class MockPlexServer:
"""Mock a PlexServer instance."""
def __init__(
self,
index=0,
config_entry=None,
num_users=len(MOCK_USERS),
session_type="video",
):
"""Initialize the object."""
if config_entry:
self._data = config_entry.data
else:
self._data = DEFAULT_DATA
self._baseurl = self._data[PLEX_SERVER_CONFIG][CONF_URL]
self.friendlyName = self._data[CONF_SERVER]
self.machineIdentifier = self._data[CONF_SERVER_IDENTIFIER]
self._systemAccounts = list(map(MockPlexSystemAccount, range(num_users)))
self._clients = []
self._session = None
self._sessions = []
self.set_clients(num_users)
self.set_sessions(num_users, session_type)
self._cache = {}
def set_clients(self, num_clients):
"""Set up mock PlexClients for this PlexServer."""
self._clients = [
MockPlexClient(baseurl=self._baseurl, index=x) for x in range(num_clients)
]
def set_sessions(self, num_sessions, session_type):
"""Set up mock PlexSessions for this PlexServer."""
self._sessions = [
MockPlexSession(self._clients[x], mediatype=session_type, index=x)
for x in range(num_sessions)
]
def clear_clients(self):
"""Clear all active PlexClients."""
self._clients = []
def clear_sessions(self):
"""Clear all active PlexSessions."""
self._sessions = []
def clients(self):
"""Mock the clients method."""
return self._clients
def createToken(self):
"""Mock the createToken method."""
return "temporary_token"
def sessions(self):
"""Mock the sessions method."""
return self._sessions
def systemAccounts(self):
"""Mock the systemAccounts lookup method."""
return self._systemAccounts
def url(self, path, includeToken=False):
"""Mock method to generate a server URL."""
return f"{self._baseurl}{path}"
@property
def accounts(self):
"""Mock the accounts property."""
return set(MOCK_USERS)
@property
def version(self):
"""Mock version of PlexServer."""
return "1.0"
@reify
def library(self):
"""Mock library object of PlexServer."""
return MockPlexLibrary(self)
def playlist(self, playlist):
"""Mock the playlist lookup method."""
return MockPlexMediaItem(playlist, mediatype="playlist")
@lru_cache()
def playlists(self):
"""Mock the playlists lookup method with a lazy init."""
return [
MockPlexPlaylist(
self.library.section("Movies").all()
+ self.library.section("TV Shows").all()
),
MockPlexPlaylist(self.library.section("Music").all()),
]
def fetchItem(self, item):
"""Mock the fetchItem method."""
for section in self.library.sections():
result = section.fetchItem(item)
if result:
return result
class MockPlexClient:
"""Mock a PlexClient instance."""
def __init__(self, server=None, baseurl=None, token=None, index=0):
"""Initialize the object."""
self.machineIdentifier = f"client-{index+1}"
self._baseurl = baseurl
self._index = index
def url(self, key):
"""Mock the url method."""
return f"{self._baseurl}{key}"
@property
def device(self):
"""Mock the device attribute."""
return "DEVICE"
@property
def platform(self):
"""Mock the platform attribute."""
return "PLATFORM"
@property
def product(self):
"""Mock the product attribute."""
if self._index == 1:
return "Plex Web"
return "PRODUCT"
@property
def protocolCapabilities(self):
"""Mock the protocolCapabilities attribute."""
return ["playback"]
@property
def state(self):
"""Mock the state attribute."""
return "playing"
@property
def title(self):
"""Mock the title attribute."""
return "TITLE"
@property
def version(self):
"""Mock the version attribute."""
return "1.0"
def proxyThroughServer(self, value=True, server=None):
"""Mock the proxyThroughServer method."""
pass
def playMedia(self, item):
"""Mock the playMedia method."""
pass
class MockPlexSession:
"""Mock a PlexServer.sessions() instance."""
def __init__(self, player, mediatype, index=0):
"""Initialize the object."""
self.TYPE = mediatype
self.usernames = [list(MOCK_USERS)[index]]
self.players = [player]
self._section = MockPlexLibrarySection("Movies")
@property
def duration(self):
"""Mock the duration attribute."""
return 10000000
@property
def librarySectionID(self):
"""Mock the librarySectionID attribute."""
return 1
@property
def ratingKey(self):
"""Mock the ratingKey attribute."""
return 123
def section(self):
"""Mock the section method."""
return self._section
@property
def summary(self):
"""Mock the summary attribute."""
return "SUMMARY"
@property
def thumbUrl(self):
"""Mock the thumbUrl attribute."""
return "http://1.2.3.4/thumb"
@property
def title(self):
"""Mock the title attribute."""
return "TITLE"
@property
def type(self):
"""Mock the type attribute."""
return "movie"
@property
def viewOffset(self):
"""Mock the viewOffset attribute."""
return 0
@property
def year(self):
"""Mock the year attribute."""
return 2020
class MockPlexLibrary:
"""Mock a Plex Library instance."""
def __init__(self, plex_server):
"""Initialize the object."""
self._plex_server = plex_server
self._sections = {}
for kind in ["Movies", "Music", "TV Shows", "Photos"]:
self._sections[kind] = MockPlexLibrarySection(kind)
def section(self, title):
"""Mock the LibrarySection lookup."""
section = self._sections.get(title)
if section:
return section
raise NotFound
def sections(self):
"""Return all available sections."""
return self._sections.values()
def sectionByID(self, section_id):
"""Mock the sectionByID lookup."""
return [x for x in self.sections() if x.key == section_id][0]
def onDeck(self):
"""Mock an empty On Deck folder."""
return []
def recentlyAdded(self):
"""Mock an empty Recently Added folder."""
return []
class MockPlexLibrarySection:
"""Mock a Plex LibrarySection instance."""
def __init__(self, library):
"""Initialize the object."""
self.title = library
if library == "Music":
self._item = MockPlexArtist("Artist")
elif library == "TV Shows":
self._item = MockPlexShow("TV Show")
else:
self._item = MockPlexMediaItem(library[:-1])
def get(self, query):
"""Mock the get lookup method."""
if self._item.title == query:
return self._item
raise NotFound
def all(self):
"""Mock the all method."""
return [self._item]
def fetchItem(self, ratingKey):
"""Return a specific item."""
for item in self.all():
if item.ratingKey == ratingKey:
return item
if item._children:
for child in item._children:
if child.ratingKey == ratingKey:
return child
def onDeck(self):
"""Mock an empty On Deck folder."""
return []
def recentlyAdded(self):
"""Mock an empty Recently Added folder."""
return self.all()
@property
def type(self):
"""Mock the library type."""
if self.title == "Movies":
return "movie"
if self.title == "Music":
return "artist"
if self.title == "TV Shows":
return "show"
if self.title == "Photos":
return "photo"
@property
def TYPE(self):
"""Return the library type."""
return self.type
@property
def key(self):
"""Mock the key identifier property."""
return str(id(self.title))
def search(self, **kwargs):
"""Mock the LibrarySection search method."""
if kwargs.get("libtype") == "movie":
return self.all()
def update(self):
"""Mock the update call."""
pass
class MockPlexMediaItem:
"""Mock a Plex Media instance."""
def __init__(self, title, mediatype="video", year=2020):
"""Initialize the object."""
self.title = str(title)
self.type = mediatype
self.thumbUrl = "http://1.2.3.4/thumb.png"
self.year = year
self._children = []
def __iter__(self):
"""Provide iterator."""
yield from self._children
@property
def ratingKey(self):
"""Mock the ratingKey property."""
return id(self.title)
class MockPlexPlaylist(MockPlexMediaItem):
"""Mock a Plex Playlist instance."""
def __init__(self, items):
"""Initialize the object."""
super().__init__(f"Playlist ({len(items)} Items)", "playlist")
for item in items:
self._children.append(item)
class MockPlexShow(MockPlexMediaItem):
"""Mock a Plex Show instance."""
def __init__(self, show):
"""Initialize the object."""
super().__init__(show, "show")
for index in range(1, 5):
self._children.append(MockPlexSeason(index))
def season(self, season):
"""Mock the season lookup method."""
return [x for x in self._children if x.title == f"Season {season}"][0]
class MockPlexSeason(MockPlexMediaItem):
"""Mock a Plex Season instance."""
def __init__(self, season):
"""Initialize the object."""
super().__init__(f"Season {season}", "season")
for index in range(1, 10):
self._children.append(MockPlexMediaItem(f"Episode {index}", "episode"))
def episode(self, episode):
"""Mock the episode lookup method."""
return self._children[episode - 1]
class MockPlexAlbum(MockPlexMediaItem):
"""Mock a Plex Album instance."""
def __init__(self, album):
"""Initialize the object."""
super().__init__(album, "album")
for index in range(1, 10):
self._children.append(MockPlexMediaTrack(index))
def track(self, track):
"""Mock the track lookup method."""
try:
return [x for x in self._children if x.title == track][0]
except IndexError:
raise NotFound
def tracks(self):
"""Mock the tracks lookup method."""
return self._children
class MockPlexArtist(MockPlexMediaItem):
"""Mock a Plex Artist instance."""
def __init__(self, artist):
"""Initialize the object."""
super().__init__(artist, "artist")
self._album = MockPlexAlbum("Album")
def album(self, album):
"""Mock the album lookup method."""
return self._album
def get(self, track):
"""Mock the track lookup method."""
return self._album.track(track)
class MockPlexMediaTrack(MockPlexMediaItem):
"""Mock a Plex Track instance."""
def __init__(self, index=1):
"""Initialize the object."""
super().__init__(f"Track {index}", "track")
self.index = index
class MockPlexSonosClient:
"""Mock a PlexSonosClient instance."""
def __init__(self, name):
"""Initialize the object."""
self.name = name
def playMedia(self, item):
"""Mock the playMedia method."""
pass
|
import pytest
import sys
import time
from contextlib import contextmanager
from plumbum.cli.terminal import ask, choose, prompt, hexdump, Progress
from plumbum.lib import StringIO
try:
from collections import OrderedDict
except ImportError:
try:
from ordereddict import OrderedDict
except ImportError:
OrderedDict = None
needs_od = pytest.mark.skipif(OrderedDict is None,
reason="Ordered dict not available (Py 2.6)")
@contextmanager
def send_stdin(stdin = "\n"):
prevstdin = sys.stdin
sys.stdin = StringIO(stdin)
try:
yield sys.stdin
finally:
sys.stdin = prevstdin
class TestPrompt:
def test_simple(self, capsys):
with send_stdin("12"):
assert prompt("Enter a random int:", type=int) == 12
assert capsys.readouterr()[0] == "Enter a random int: "
def test_try_twice(self, capsys):
with send_stdin("\n13"):
assert prompt("Enter a random int:", type=int) == 13
assert capsys.readouterr()[0] == "Enter a random int: Enter a random int: "
def test_str(self):
with send_stdin("1234"):
assert prompt("Enter a string", type=str) == "1234"
def test_default(self, capsys):
with send_stdin(""):
assert prompt("Enter nothing", default='hi') == 'hi'
assert capsys.readouterr()[0] == "Enter nothing [hi]: "
def test_typefail(self, capsys):
with send_stdin("1.2\n13"):
assert prompt("Enter int", type=int) == 13
assert "try again" in capsys.readouterr()[0]
def test_validator(self, capsys):
with send_stdin("12\n9"):
assert prompt("Enter in range < 10", type=int, validator=lambda x: x<10) == 9
assert "try again" in capsys.readouterr()[0]
class TestTerminal:
def test_ask(self, capsys):
with send_stdin("\n"):
assert ask("Do you like cats?", default = True)
assert capsys.readouterr()[0] == "Do you like cats? [Y/n] "
with send_stdin("\nyes"):
assert ask("Do you like cats?")
assert capsys.readouterr()[0] == "Do you like cats? (y/n) Invalid response, please try again\nDo you like cats? (y/n) "
def test_choose(self, capsys):
with send_stdin("foo\n2\n"):
assert choose("What is your favorite color?", ["blue", "yellow", "green"]) == "yellow"
assert capsys.readouterr()[0] == "What is your favorite color?\n(1) blue\n(2) yellow\n(3) green\nChoice: Invalid choice, please try again\nChoice: "
with send_stdin("foo\n2\n"):
assert choose("What is your favorite color?", [("blue", 10), ("yellow", 11), ("green", 12)]) == 11
assert capsys.readouterr()[0] == "What is your favorite color?\n(1) blue\n(2) yellow\n(3) green\nChoice: Invalid choice, please try again\nChoice: "
with send_stdin("foo\n\n"):
assert choose("What is your favorite color?", ["blue", "yellow", "green"], default = "yellow") == "yellow"
assert capsys.readouterr()[0] == "What is your favorite color?\n(1) blue\n(2) yellow\n(3) green\nChoice [2]: Invalid choice, please try again\nChoice [2]: "
def test_choose_dict(self):
with send_stdin("23\n1"):
value = choose("Pick", dict(one="a",two="b"))
assert value in ("a","b")
@needs_od
def test_ordered_dict(self):
dic = OrderedDict()
dic["one"] = "a"
dic["two"] = "b"
with send_stdin("1"):
value = choose("Pick", dic)
assert value == "a"
with send_stdin("2"):
value = choose("Pick", dic)
assert value == "b"
@needs_od
def test_choose_dict_default(self, capsys):
dic = OrderedDict()
dic["one"] = "a"
dic["two"] = "b"
with send_stdin():
assert choose("Pick", dic, default="a") == "a"
assert "[1]" in capsys.readouterr()[0]
def test_hexdump(self):
data = "hello world my name is queen marry" + "A" * 66 + "foo bar"
output = """\
000000 | 68 65 6c 6c 6f 20 77 6f 72 6c 64 20 6d 79 20 6e | hello world my n
000010 | 61 6d 65 20 69 73 20 71 75 65 65 6e 20 6d 61 72 | ame is queen mar
000020 | 72 79 41 41 41 41 41 41 41 41 41 41 41 41 41 41 | ryAAAAAAAAAAAAAA
000030 | 41 41 41 41 41 41 41 41 41 41 41 41 41 41 41 41 | AAAAAAAAAAAAAAAA
*
000060 | 41 41 41 41 66 6f 6f 20 62 61 72 | AAAAfoo bar"""
assert "\n".join(hexdump(data)) == output
assert "\n".join(hexdump(StringIO(data))) == output
def test_progress(self, capsys):
for i in Progress.range(4, has_output=True, timer=False):
print('hi')
stdout, stderr = capsys.readouterr()
output = """\
0% complete
0% complete
hi
25% complete
hi
50% complete
hi
75% complete
hi
100% complete
"""
assert stdout == output
def test_progress_empty(self, capsys):
for i in Progress.range(0, has_output=True, timer=False):
print('hi')
stdout, stderr = capsys.readouterr()
output = '0/0 complete'
assert output in stdout
|
import logging
from requests import RequestException, Session
from schluter.api import Api
from schluter.authenticator import AuthenticationState, Authenticator
import voluptuous as vol
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
DATA_SCHLUTER_SESSION = "schluter_session"
DATA_SCHLUTER_API = "schluter_api"
SCHLUTER_CONFIG_FILE = ".schluter.conf"
API_TIMEOUT = 10
CONFIG_SCHEMA = vol.Schema(
{
vol.Required(DOMAIN): vol.Schema(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Set up the Schluter component."""
_LOGGER.debug("Starting setup of schluter")
conf = config[DOMAIN]
api_http_session = Session()
api = Api(timeout=API_TIMEOUT, http_session=api_http_session)
authenticator = Authenticator(
api,
conf.get(CONF_USERNAME),
conf.get(CONF_PASSWORD),
session_id_cache_file=hass.config.path(SCHLUTER_CONFIG_FILE),
)
authentication = None
try:
authentication = authenticator.authenticate()
except RequestException as ex:
_LOGGER.error("Unable to connect to Schluter service: %s", ex)
return
state = authentication.state
if state == AuthenticationState.AUTHENTICATED:
hass.data[DOMAIN] = {
DATA_SCHLUTER_API: api,
DATA_SCHLUTER_SESSION: authentication.session_id,
}
discovery.load_platform(hass, "climate", DOMAIN, {}, config)
return True
if state == AuthenticationState.BAD_PASSWORD:
_LOGGER.error("Invalid password provided")
return False
if state == AuthenticationState.BAD_EMAIL:
_LOGGER.error("Invalid email provided")
return False
_LOGGER.error("Unknown set up error: %s", state)
return False
|
import contextlib
import mock
import pytest
from scrapy import signals
from scrapy.exceptions import DontCloseSpider
from scrapy.settings import Settings
from scrapy_redis.spiders import (
RedisCrawlSpider,
RedisSpider,
)
@contextlib.contextmanager
def flushall(server):
try:
yield
finally:
server.flushall()
class MySpider(RedisSpider):
name = 'myspider'
class MyCrawlSpider(RedisCrawlSpider):
name = 'myspider'
def get_crawler(**kwargs):
return mock.Mock(settings=Settings(), **kwargs)
class TestRedisMixin_setup_redis(object):
def setup(self):
self.myspider = MySpider()
def test_crawler_required(self):
with pytest.raises(ValueError) as excinfo:
self.myspider.setup_redis()
assert "crawler" in str(excinfo.value)
def test_requires_redis_key(self):
self.myspider.crawler = get_crawler()
self.myspider.redis_key = ''
with pytest.raises(ValueError) as excinfo:
self.myspider.setup_redis()
assert "redis_key" in str(excinfo.value)
def test_invalid_batch_size(self):
self.myspider.redis_batch_size = 'x'
self.myspider.crawler = get_crawler()
with pytest.raises(ValueError) as excinfo:
self.myspider.setup_redis()
assert "redis_batch_size" in str(excinfo.value)
@mock.patch('scrapy_redis.spiders.connection')
def test_via_from_crawler(self, connection):
server = connection.from_settings.return_value = mock.Mock()
crawler = get_crawler()
myspider = MySpider.from_crawler(crawler)
assert myspider.server is server
connection.from_settings.assert_called_with(crawler.settings)
crawler.signals.connect.assert_called_with(myspider.spider_idle, signal=signals.spider_idle)
# Second call does nothing.
server = myspider.server
crawler.signals.connect.reset_mock()
myspider.setup_redis()
assert myspider.server is server
assert crawler.signals.connect.call_count == 0
@pytest.mark.parametrize('spider_cls', [
MySpider,
MyCrawlSpider,
])
def test_from_crawler_with_spider_arguments(spider_cls):
crawler = get_crawler()
spider = spider_cls.from_crawler(
crawler, 'foo',
redis_key='key:%(name)s',
redis_batch_size='2000',
)
assert spider.name == 'foo'
assert spider.redis_key == 'key:foo'
assert spider.redis_batch_size == 2000
class MockRequest(mock.Mock):
def __init__(self, url, **kwargs):
super(MockRequest, self).__init__()
self.url = url
def __eq__(self, other):
return self.url == other.url
def __hash__(self):
return hash(self.url)
def __repr__(self):
return '<%s(%s)>' % (self.__class__.__name__, self.url)
@pytest.mark.parametrize('spider_cls', [
MySpider,
MyCrawlSpider,
])
@pytest.mark.parametrize('start_urls_as_zset', [False, True])
@pytest.mark.parametrize('start_urls_as_set', [False, True])
@mock.patch('scrapy.spiders.Request', MockRequest)
def test_consume_urls_from_redis(start_urls_as_zset, start_urls_as_set, spider_cls):
batch_size = 5
redis_key = 'start:urls'
crawler = get_crawler()
crawler.settings.setdict({
'REDIS_START_URLS_KEY': redis_key,
'REDIS_START_URLS_AS_ZSET': start_urls_as_zset,
'REDIS_START_URLS_AS_SET': start_urls_as_set,
'CONCURRENT_REQUESTS': batch_size,
})
spider = spider_cls.from_crawler(crawler)
with flushall(spider.server):
urls = [
'http://example.com/%d' % i for i in range(batch_size * 2)
]
reqs = []
if start_urls_as_set:
server_put = spider.server.sadd
elif start_urls_as_zset:
server_put = lambda key, value: spider.server.zadd(key, {value: 0})
else:
server_put = spider.server.rpush
for url in urls:
server_put(redis_key, url)
reqs.append(MockRequest(url))
# First call is to start requests.
start_requests = list(spider.start_requests())
if start_urls_as_zset or start_urls_as_set:
assert len(start_requests) == batch_size
assert set(start_requests).issubset(reqs)
else:
assert start_requests == reqs[:batch_size]
# Second call is to spider idle method.
with pytest.raises(DontCloseSpider):
spider.spider_idle()
# Process remaining requests in the queue.
with pytest.raises(DontCloseSpider):
spider.spider_idle()
# Last batch was passed to crawl.
assert crawler.engine.crawl.call_count == batch_size
if start_urls_as_zset or start_urls_as_set:
crawler.engine.crawl.assert_has_calls([
mock.call(req, spider=spider) for req in reqs if req not in start_requests
], any_order=True)
else:
crawler.engine.crawl.assert_has_calls([
mock.call(req, spider=spider) for req in reqs[batch_size:]
])
|
import logging
from homeassistant.const import TIME_MINUTES, UV_INDEX
from homeassistant.core import callback
from homeassistant.util.dt import as_local, parse_datetime
from . import (
DATA_OPENUV_CLIENT,
DATA_UV,
DOMAIN,
TYPE_CURRENT_OZONE_LEVEL,
TYPE_CURRENT_UV_INDEX,
TYPE_CURRENT_UV_LEVEL,
TYPE_MAX_UV_INDEX,
TYPE_SAFE_EXPOSURE_TIME_1,
TYPE_SAFE_EXPOSURE_TIME_2,
TYPE_SAFE_EXPOSURE_TIME_3,
TYPE_SAFE_EXPOSURE_TIME_4,
TYPE_SAFE_EXPOSURE_TIME_5,
TYPE_SAFE_EXPOSURE_TIME_6,
OpenUvEntity,
)
_LOGGER = logging.getLogger(__name__)
ATTR_MAX_UV_TIME = "time"
EXPOSURE_TYPE_MAP = {
TYPE_SAFE_EXPOSURE_TIME_1: "st1",
TYPE_SAFE_EXPOSURE_TIME_2: "st2",
TYPE_SAFE_EXPOSURE_TIME_3: "st3",
TYPE_SAFE_EXPOSURE_TIME_4: "st4",
TYPE_SAFE_EXPOSURE_TIME_5: "st5",
TYPE_SAFE_EXPOSURE_TIME_6: "st6",
}
UV_LEVEL_EXTREME = "Extreme"
UV_LEVEL_VHIGH = "Very High"
UV_LEVEL_HIGH = "High"
UV_LEVEL_MODERATE = "Moderate"
UV_LEVEL_LOW = "Low"
SENSORS = {
TYPE_CURRENT_OZONE_LEVEL: ("Current Ozone Level", "mdi:vector-triangle", "du"),
TYPE_CURRENT_UV_INDEX: ("Current UV Index", "mdi:weather-sunny", UV_INDEX),
TYPE_CURRENT_UV_LEVEL: ("Current UV Level", "mdi:weather-sunny", None),
TYPE_MAX_UV_INDEX: ("Max UV Index", "mdi:weather-sunny", UV_INDEX),
TYPE_SAFE_EXPOSURE_TIME_1: (
"Skin Type 1 Safe Exposure Time",
"mdi:timer-outline",
TIME_MINUTES,
),
TYPE_SAFE_EXPOSURE_TIME_2: (
"Skin Type 2 Safe Exposure Time",
"mdi:timer-outline",
TIME_MINUTES,
),
TYPE_SAFE_EXPOSURE_TIME_3: (
"Skin Type 3 Safe Exposure Time",
"mdi:timer-outline",
TIME_MINUTES,
),
TYPE_SAFE_EXPOSURE_TIME_4: (
"Skin Type 4 Safe Exposure Time",
"mdi:timer-outline",
TIME_MINUTES,
),
TYPE_SAFE_EXPOSURE_TIME_5: (
"Skin Type 5 Safe Exposure Time",
"mdi:timer-outline",
TIME_MINUTES,
),
TYPE_SAFE_EXPOSURE_TIME_6: (
"Skin Type 6 Safe Exposure Time",
"mdi:timer-outline",
TIME_MINUTES,
),
}
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up a Nest sensor based on a config entry."""
openuv = hass.data[DOMAIN][DATA_OPENUV_CLIENT][entry.entry_id]
sensors = []
for kind, attrs in SENSORS.items():
name, icon, unit = attrs
sensors.append(OpenUvSensor(openuv, kind, name, icon, unit, entry.entry_id))
async_add_entities(sensors, True)
class OpenUvSensor(OpenUvEntity):
"""Define a binary sensor for OpenUV."""
def __init__(self, openuv, sensor_type, name, icon, unit, entry_id):
"""Initialize the sensor."""
super().__init__(openuv)
self._async_unsub_dispatcher_connect = None
self._entry_id = entry_id
self._icon = icon
self._latitude = openuv.client.latitude
self._longitude = openuv.client.longitude
self._name = name
self._sensor_type = sensor_type
self._state = None
self._unit = unit
@property
def icon(self):
"""Return the icon."""
return self._icon
@property
def should_poll(self):
"""Disable polling."""
return False
@property
def state(self):
"""Return the status of the sensor."""
return self._state
@property
def unique_id(self) -> str:
"""Return a unique, Home Assistant friendly identifier for this entity."""
return f"{self._latitude}_{self._longitude}_{self._sensor_type}"
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit
@callback
def update_from_latest_data(self):
"""Update the state."""
data = self.openuv.data[DATA_UV].get("result")
if not data:
self._available = False
return
self._available = True
if self._sensor_type == TYPE_CURRENT_OZONE_LEVEL:
self._state = data["ozone"]
elif self._sensor_type == TYPE_CURRENT_UV_INDEX:
self._state = data["uv"]
elif self._sensor_type == TYPE_CURRENT_UV_LEVEL:
if data["uv"] >= 11:
self._state = UV_LEVEL_EXTREME
elif data["uv"] >= 8:
self._state = UV_LEVEL_VHIGH
elif data["uv"] >= 6:
self._state = UV_LEVEL_HIGH
elif data["uv"] >= 3:
self._state = UV_LEVEL_MODERATE
else:
self._state = UV_LEVEL_LOW
elif self._sensor_type == TYPE_MAX_UV_INDEX:
self._state = data["uv_max"]
self._attrs.update(
{ATTR_MAX_UV_TIME: as_local(parse_datetime(data["uv_max_time"]))}
)
elif self._sensor_type in (
TYPE_SAFE_EXPOSURE_TIME_1,
TYPE_SAFE_EXPOSURE_TIME_2,
TYPE_SAFE_EXPOSURE_TIME_3,
TYPE_SAFE_EXPOSURE_TIME_4,
TYPE_SAFE_EXPOSURE_TIME_5,
TYPE_SAFE_EXPOSURE_TIME_6,
):
self._state = data["safe_exposure_time"][
EXPOSURE_TYPE_MAP[self._sensor_type]
]
|
import logging
from typing import List
from pyflexit.pyflexit import pyflexit
import voluptuous as vol
from homeassistant.components.climate import PLATFORM_SCHEMA, ClimateEntity
from homeassistant.components.climate.const import (
HVAC_MODE_COOL,
SUPPORT_FAN_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.components.modbus.const import CONF_HUB, DEFAULT_HUB, MODBUS_DOMAIN
from homeassistant.const import (
ATTR_TEMPERATURE,
CONF_NAME,
CONF_SLAVE,
DEVICE_DEFAULT_NAME,
TEMP_CELSIUS,
)
import homeassistant.helpers.config_validation as cv
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_HUB, default=DEFAULT_HUB): cv.string,
vol.Required(CONF_SLAVE): vol.All(int, vol.Range(min=0, max=32)),
vol.Optional(CONF_NAME, default=DEVICE_DEFAULT_NAME): cv.string,
}
)
_LOGGER = logging.getLogger(__name__)
SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE | SUPPORT_FAN_MODE
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Flexit Platform."""
modbus_slave = config.get(CONF_SLAVE)
name = config.get(CONF_NAME)
hub = hass.data[MODBUS_DOMAIN][config.get(CONF_HUB)]
add_entities([Flexit(hub, modbus_slave, name)], True)
class Flexit(ClimateEntity):
"""Representation of a Flexit AC unit."""
def __init__(self, hub, modbus_slave, name):
"""Initialize the unit."""
self._hub = hub
self._name = name
self._slave = modbus_slave
self._target_temperature = None
self._current_temperature = None
self._current_fan_mode = None
self._current_operation = None
self._fan_modes = ["Off", "Low", "Medium", "High"]
self._current_operation = None
self._filter_hours = None
self._filter_alarm = None
self._heat_recovery = None
self._heater_enabled = False
self._heating = None
self._cooling = None
self._alarm = False
self.unit = pyflexit(hub, modbus_slave)
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_FLAGS
def update(self):
"""Update unit attributes."""
if not self.unit.update():
_LOGGER.warning("Modbus read failed")
self._target_temperature = self.unit.get_target_temp
self._current_temperature = self.unit.get_temp
self._current_fan_mode = self._fan_modes[self.unit.get_fan_speed]
self._filter_hours = self.unit.get_filter_hours
# Mechanical heat recovery, 0-100%
self._heat_recovery = self.unit.get_heat_recovery
# Heater active 0-100%
self._heating = self.unit.get_heating
# Cooling active 0-100%
self._cooling = self.unit.get_cooling
# Filter alarm 0/1
self._filter_alarm = self.unit.get_filter_alarm
# Heater enabled or not. Does not mean it's necessarily heating
self._heater_enabled = self.unit.get_heater_enabled
# Current operation mode
self._current_operation = self.unit.get_operation
@property
def device_state_attributes(self):
"""Return device specific state attributes."""
return {
"filter_hours": self._filter_hours,
"filter_alarm": self._filter_alarm,
"heat_recovery": self._heat_recovery,
"heating": self._heating,
"heater_enabled": self._heater_enabled,
"cooling": self._cooling,
}
@property
def should_poll(self):
"""Return the polling state."""
return True
@property
def name(self):
"""Return the name of the climate device."""
return self._name
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return TEMP_CELSIUS
@property
def current_temperature(self):
"""Return the current temperature."""
return self._current_temperature
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self._target_temperature
@property
def hvac_mode(self):
"""Return current operation ie. heat, cool, idle."""
return self._current_operation
@property
def hvac_modes(self) -> List[str]:
"""Return the list of available hvac operation modes.
Need to be a subset of HVAC_MODES.
"""
return [HVAC_MODE_COOL]
@property
def fan_mode(self):
"""Return the fan setting."""
return self._current_fan_mode
@property
def fan_modes(self):
"""Return the list of available fan modes."""
return self._fan_modes
def set_temperature(self, **kwargs):
"""Set new target temperature."""
if kwargs.get(ATTR_TEMPERATURE) is not None:
self._target_temperature = kwargs.get(ATTR_TEMPERATURE)
self.unit.set_temp(self._target_temperature)
def set_fan_mode(self, fan_mode):
"""Set new fan mode."""
self.unit.set_fan_speed(self._fan_modes.index(fan_mode))
|
import base64
import logging
import ntpath
import os
import time
import uuid
from absl import flags
from perfkitbenchmarker import disk
from perfkitbenchmarker import errors
from perfkitbenchmarker import os_types
from perfkitbenchmarker import virtual_machine
from perfkitbenchmarker import vm_util
from perfkitbenchmarker import windows_packages
import six
import timeout_decorator
import winrm
FLAGS = flags.FLAGS
flags.DEFINE_bool(
'log_windows_password', False,
'Whether to log passwords for Windows machines. This can be useful in '
'the event of needing to manually RDP to the instance.')
flags.DEFINE_bool(
'set_cpu_priority_high', False,
'Allows executables to be set to High (up from Normal) CPU priority '
'through the SetProcessPriorityToHigh function.')
SMB_PORT = 445
WINRM_PORT = 5986
RDP_PORT = 3389
# This startup script enables remote mangement of the instance. It does so
# by creating a WinRM listener (using a self-signed cert) and opening
# the WinRM port in the Windows firewall.
_STARTUP_SCRIPT = """
Enable-PSRemoting -Force
$cert = New-SelfSignedCertificate -DnsName hostname -CertStoreLocation `
Cert:\\LocalMachine\\My\\
New-Item WSMan:\\localhost\\Listener -Transport HTTPS -Address * `
-CertificateThumbPrint $cert.Thumbprint -Force
Set-Item -Path 'WSMan:\\localhost\\Service\\Auth\\Basic' -Value $true
netsh advfirewall firewall add rule name='Allow WinRM' dir=in action=allow `
protocol=TCP localport={winrm_port}
""".format(winrm_port=WINRM_PORT)
STARTUP_SCRIPT = 'powershell -EncodedCommand {encoded_command}'.format(
encoded_command=six.ensure_str(
base64.b64encode(_STARTUP_SCRIPT.encode('utf-16-le'))))
# Cygwin constants for installing and running commands through Cygwin.
# _CYGWIN_FORMAT provides a format string to transform a bash command into one
# that runs under Cygwin.
_CYGWIN32_URL = 'https://cygwin.com/setup-x86.exe'
_CYGWIN64_URL = 'https://cygwin.com/setup-x86_64.exe'
_CYGWIN_MIRROR = 'https://mirrors.kernel.org/sourceware/cygwin/'
_CYGWIN_ROOT = r'%PROGRAMFILES%\cygwinx86\cygwin'
_CYGWIN_FORMAT = (r"%s\bin\bash.exe -c 'export PATH=$PATH:/usr/bin && "
"{command}'" % _CYGWIN_ROOT)
class WaitTimeoutError(Exception):
"""Exception thrown if a wait operation takes too long."""
class BaseWindowsMixin(virtual_machine.BaseOsMixin):
"""Class that holds Windows related VM methods and attributes."""
OS_TYPE = os_types.WINDOWS
BASE_OS_TYPE = os_types.WINDOWS
def __init__(self):
super(BaseWindowsMixin, self).__init__()
self.winrm_port = WINRM_PORT
self.smb_port = SMB_PORT
self.remote_access_ports = [self.winrm_port, self.smb_port, RDP_PORT]
self.primary_remote_access_port = self.winrm_port
self.rdp_port_listening_time = None
self.temp_dir = None
self.home_dir = None
self.system_drive = None
self._send_remote_commands_to_cygwin = False
def RobustRemoteCommand(self, command, should_log=False, ignore_failure=False,
suppress_warning=False, timeout=None):
"""Runs a powershell command on the VM.
Should be more robust than its counterpart, RemoteCommand. In the event of
network failure, the process will continue on the VM, and we continually
reconnect to check if it has finished. The tradeoff is this is noticeably
slower than the normal RemoteCommand.
The algorithm works as follows:
1. Create a "command started" file
2. Run the command
3. Create a "command done" file
If we fail to run step 1, we raise a RemoteCommandError. If we have network
failure during step 2, the command will continue running on the VM and we
will spin inside this function waiting for the "command done" file to be
created.
Args:
command: A valid powershell command.
should_log: A boolean indicating whether the command result should be
logged at the info level. Even if it is false, the results will still be
logged at the debug level.
ignore_failure: Ignore any failure if set to true.
suppress_warning: Suppress the result logging from IssueCommand when the
return code is non-zero.
timeout: Float. A timeout in seconds for the command. If None is passed,
no timeout is applied. Timeout kills the winrm session which then kills
the process being executed.
Returns:
A tuple of stdout and stderr from running the command.
Raises:
RemoteCommandError: If there was a problem issuing the command or the
command timed out.
"""
logging.info('Running robust command on %s: %s', self, command)
command_id = uuid.uuid4()
logged_command = ('New-Item -Path %s.start -ItemType File; powershell "%s" '
'2> %s.err 1> %s.out; New-Item -Path %s.done -ItemType '
'File') % (command_id, command, command_id, command_id,
command_id)
start_command_time = time.time()
try:
self.RemoteCommand(
logged_command,
should_log=should_log,
ignore_failure=ignore_failure,
suppress_warning=suppress_warning,
timeout=timeout)
except errors.VirtualMachine.RemoteCommandError:
logging.exception(
'Exception while running %s on %s, waiting for command to finish',
command, self)
start_out, _ = self.RemoteCommand('Test-Path %s.start' % (command_id,))
if 'True' not in start_out:
raise errors.VirtualMachine.RemoteCommandError(
'RobustRemoteCommand did not start on VM.')
end_command_time = time.time()
@timeout_decorator.timeout(
timeout - (end_command_time - start_command_time),
use_signals=False,
timeout_exception=errors.VirtualMachine.RemoteCommandError)
def wait_for_done_file():
# Spin on the VM until the "done" file is created. It is better to spin
# on the VM rather than creating a new session for each test.
done_out = ''
while 'True' not in done_out:
done_out, _ = self.RemoteCommand(
'$retries=0; while ((-not (Test-Path %s.done)) -and '
'($retries -le 60)) { Start-Sleep -Seconds 1; $retries++ }; '
'Test-Path %s.done' % (command_id, command_id))
wait_for_done_file()
stdout, _ = self.RemoteCommand('Get-Content %s.out' % (command_id,))
_, stderr = self.RemoteCommand('Get-Content %s.err' % (command_id,))
return stdout, stderr
def RemoteCommand(self, command, should_log=False, ignore_failure=False,
suppress_warning=False, timeout=None):
"""Runs a powershell command on the VM.
Args:
command: A valid powershell command.
should_log: A boolean indicating whether the command result should be
logged at the info level. Even if it is false, the results will
still be logged at the debug level.
ignore_failure: Ignore any failure if set to true.
suppress_warning: Suppress the result logging from IssueCommand when the
return code is non-zero.
timeout: Float. A timeout in seconds for the command. If None is passed,
no timeout is applied. Timeout kills the winrm session which then
kills the process being executed.
Returns:
A tuple of stdout and stderr from running the command.
Raises:
RemoteCommandError: If there was a problem issuing the command or the
command timed out.
"""
logging.info('Running command on %s: %s', self, command)
s = winrm.Session(
'https://%s:%s' % (self.GetConnectionIp(), self.winrm_port),
auth=(self.user_name, self.password),
server_cert_validation='ignore')
encoded_command = six.ensure_str(
base64.b64encode(command.encode('utf_16_le')))
@timeout_decorator.timeout(timeout, use_signals=False,
timeout_exception=errors.VirtualMachine.
RemoteCommandError)
def run_command():
return s.run_cmd('powershell -encodedcommand %s' % encoded_command)
r = run_command()
retcode, stdout, stderr = r.status_code, six.ensure_str(
r.std_out), six.ensure_str(r.std_err)
debug_text = ('Ran %s on %s. Return code (%s).\nSTDOUT: %s\nSTDERR: %s' %
(command, self, retcode, stdout, stderr))
if should_log or (retcode and not suppress_warning):
logging.info(debug_text)
else:
logging.debug(debug_text)
if retcode and not ignore_failure:
error_text = ('Got non-zero return code (%s) executing %s\n'
'STDOUT: %sSTDERR: %s' %
(retcode, command, stdout, stderr))
raise errors.VirtualMachine.RemoteCommandError(error_text)
return stdout, stderr
def InstallCygwin(self, bit64=True, packages=None):
"""Downloads and installs cygwin on the Windows instance.
TODO(deitz): Support installing packages via vm.Install calls where the VM
would look in Linux packages and try to find a CygwinInstall function to
call. Alternatively, consider using cyg-apt as an installation method. With
this additional change, we could use similar code to run benchmarks under
both Windows and Linux (if necessary and useful).
Args:
bit64: Whether to use 64-bit Cygwin (default) or 32-bit Cygwin.
packages: List of packages to install on Cygwin.
"""
url = _CYGWIN64_URL if bit64 else _CYGWIN32_URL
setup_exe = url.split('/')[-1]
self.DownloadFile(url, setup_exe)
self.RemoteCommand(
r'.\{setup_exe} --quiet-mode --site {mirror} --root "{cygwin_root}" '
'--packages {packages}'.format(
setup_exe=setup_exe,
mirror=_CYGWIN_MIRROR,
cygwin_root=_CYGWIN_ROOT,
packages=','.join(packages)))
def RemoteCommandCygwin(self, command, *args, **kwargs):
"""Runs a Cygwin command on the VM.
Args:
command: A valid bash command to run under Cygwin.
*args: Arguments passed directly to RemoteCommandWithReturnCode.
**kwargs: Keyword arguments passed directly to
RemoteCommandWithReturnCode.
Returns:
A tuple of stdout and stderr from running the command.
Raises:
RemoteCommandError: If there was a problem issuing the command or the
command timed out.
"""
# Wrap the command to be executed via bash.exe under Cygwin. Escape quotes
# since they are executed in a string.
cygwin_command = _CYGWIN_FORMAT.format(command=command.replace('"', r'\"'))
return self.RemoteCommand(cygwin_command, *args, **kwargs)
def RemoteCopy(self, local_path, remote_path='', copy_to=True):
"""Copies a file to or from the VM.
Args:
local_path: Local path to file.
remote_path: Optional path of where to copy file on remote host.
copy_to: True to copy to vm, False to copy from vm.
Raises:
RemoteCommandError: If there was a problem copying the file.
"""
remote_path = remote_path or '~/'
# In order to expand "~" and "~user" we use ntpath.expanduser(),
# but it relies on environment variables being set. This modifies
# the HOME environment variable in order to use that function, and then
# restores it to its previous value.
home = os.environ.get('HOME')
try:
os.environ['HOME'] = self.home_dir
remote_path = ntpath.expanduser(remote_path)
finally:
if home is None:
del os.environ['HOME']
else:
os.environ['HOME'] = home
drive, remote_path = ntpath.splitdrive(remote_path)
remote_drive = (drive or self.system_drive).rstrip(':')
network_drive = '\\\\%s\\%s$' % (self.GetConnectionIp(), remote_drive)
if vm_util.RunningOnWindows():
self._PsDriveRemoteCopy(local_path, remote_path, copy_to, network_drive)
else:
self._SmbclientRemoteCopy(local_path, remote_path, copy_to, network_drive)
def _SmbclientRemoteCopy(self, local_path, remote_path,
copy_to, network_drive):
"""Copies a file to or from the VM using smbclient.
Args:
local_path: Local path to file.
remote_path: Optional path of where to copy file on remote host.
copy_to: True to copy to vm, False to copy from vm.
network_drive: The smb specification for the remote drive
(//{ip_address}/{share_name}).
Raises:
RemoteCommandError: If there was a problem copying the file.
"""
local_directory, local_file = os.path.split(local_path)
remote_directory, remote_file = ntpath.split(remote_path)
smb_command = 'cd %s; lcd %s; ' % (remote_directory, local_directory)
if copy_to:
smb_command += 'put %s %s' % (local_file, remote_file)
else:
smb_command += 'get %s %s' % (remote_file, local_file)
smb_copy = [
'smbclient', network_drive,
'--max-protocol', 'SMB3',
'--user', '%s%%%s' % (self.user_name, self.password),
'--port', str(self.smb_port),
'--command', smb_command
]
stdout, stderr, retcode = vm_util.IssueCommand(smb_copy,
raise_on_failure=False)
if retcode:
error_text = ('Got non-zero return code (%s) executing %s\n'
'STDOUT: %sSTDERR: %s' %
(retcode, smb_copy, stdout, stderr))
raise errors.VirtualMachine.RemoteCommandError(error_text)
def _PsDriveRemoteCopy(self, local_path, remote_path,
copy_to, network_drive):
"""Copies a file to or from the VM using New-PSDrive and Copy-Item.
Args:
local_path: Local path to file.
remote_path: Optional path of where to copy file on remote host.
copy_to: True to copy to vm, False to copy from vm.
network_drive: The smb specification for the remote drive
(//{ip_address}/{share_name}).
Raises:
RemoteCommandError: If there was a problem copying the file.
"""
set_error_pref = '$ErrorActionPreference="Stop"'
password = self.password.replace("'", "''")
create_cred = (
'$pw = convertto-securestring -AsPlainText -Force \'%s\';'
'$cred = new-object -typename System.Management.Automation'
'.PSCredential -argumentlist %s,$pw' % (password, self.user_name))
psdrive_name = self.name
create_psdrive = (
'New-PSDrive -Name %s -PSProvider filesystem -Root '
'%s -Credential $cred' % (psdrive_name, network_drive))
remote_path = '%s:%s' % (psdrive_name, remote_path)
if copy_to:
from_path, to_path = local_path, remote_path
else:
from_path, to_path = remote_path, local_path
copy_item = 'Copy-Item -Path %s -Destination %s' % (from_path, to_path)
delete_connection = 'net use %s /delete' % network_drive
cmd = ';'.join([set_error_pref, create_cred, create_psdrive,
copy_item, delete_connection])
stdout, stderr, retcode = vm_util.IssueCommand(
['powershell', '-Command', cmd], timeout=None, raise_on_failure=False)
if retcode:
error_text = ('Got non-zero return code (%s) executing %s\n'
'STDOUT: %sSTDERR: %s' %
(retcode, cmd, stdout, stderr))
raise errors.VirtualMachine.RemoteCommandError(error_text)
def WaitForBootCompletion(self):
"""Waits until VM is has booted."""
to_wait_for = [self._WaitForWinRmCommand]
if FLAGS.cluster_boot_test_rdp_port_listening:
to_wait_for.append(self._WaitForRdpPort)
vm_util.RunParallelThreads([(method, [], {}) for method in to_wait_for], 2)
@vm_util.Retry(log_errors=False, poll_interval=1, timeout=2400)
def _WaitForRdpPort(self):
self.TestConnectRemoteAccessPort(RDP_PORT)
if self.rdp_port_listening_time is None:
self.rdp_port_listening_time = time.time()
@vm_util.Retry(log_errors=False, poll_interval=1, timeout=2400)
def _WaitForWinRmCommand(self):
"""Waits for WinRM command and optionally for the WinRM port to listen."""
# Test for listening on the port first, because this will happen strictly
# first.
if (FLAGS.cluster_boot_test_port_listening and
self.port_listening_time is None):
self.TestConnectRemoteAccessPort()
self.port_listening_time = time.time()
# Always wait for remote host command to succeed, because it is necessary to
# run benchmarks.
stdout, _ = self.RemoteCommand('hostname', suppress_warning=True)
if self.bootable_time is None:
self.bootable_time = time.time()
if self.hostname is None:
self.hostname = stdout.rstrip()
if FLAGS.log_windows_password:
logging.info('Password for %s: %s', self, self.password)
@vm_util.Retry(poll_interval=1, max_retries=15)
def OnStartup(self):
# Log driver information so that the user has a record of which drivers
# were used.
# TODO(user): put the driver information in the metadata.
stdout, _ = self.RemoteCommand('dism /online /get-drivers')
logging.info(stdout)
stdout, _ = self.RemoteCommand('echo $env:TEMP')
self.temp_dir = ntpath.join(stdout.strip(), 'pkb')
stdout, _ = self.RemoteCommand('echo $env:USERPROFILE')
self.home_dir = stdout.strip()
stdout, _ = self.RemoteCommand('echo $env:SystemDrive')
self.system_drive = stdout.strip()
self.RemoteCommand('mkdir %s' % self.temp_dir)
self.DisableGuestFirewall()
def _Reboot(self):
"""OS-specific implementation of reboot command."""
self.RemoteCommand('shutdown -t 0 -r -f', ignore_failure=True)
@vm_util.Retry(log_errors=False, poll_interval=1)
def VMLastBootTime(self):
"""Returns the time the VM was last rebooted as reported by the VM."""
resp, _ = self.RemoteCommand(
'systeminfo | find /i "Boot Time"', suppress_warning=True)
return resp
def _AfterReboot(self):
"""Performs any OS-specific setup on the VM following reboot.
This will be called after every call to Reboot().
"""
pass
def Install(self, package_name):
"""Installs a PerfKit package on the VM."""
if not self.install_packages:
return
if package_name not in self._installed_packages:
package = windows_packages.PACKAGES[package_name]
package.Install(self)
self._installed_packages.add(package_name)
def Uninstall(self, package_name):
"""Uninstalls a Perfkit package on the VM."""
package = windows_packages.PACKAGES[package_name]
if hasattr(package, 'Uninstall'):
package.Uninstall()
def PackageCleanup(self):
"""Cleans up all installed packages.
Deletes the Perfkit Benchmarker temp directory on the VM
and uninstalls all PerfKit packages.
"""
for package_name in self._installed_packages:
self.Uninstall(package_name)
self.RemoteCommand('rm -recurse -force %s' % self.temp_dir)
self.EnableGuestFirewall()
def WaitForProcessRunning(self, process, timeout):
"""Blocks until either the timeout passes or the process is running.
Args:
process: string name of the process.
timeout: number of seconds to block while the process is not running.
Raises:
WaitTimeoutError: raised if the process does not run within "timeout"
seconds.
"""
command = ('$count={timeout};'
'while( (ps | select-string {process} | measure-object).Count '
'-eq 0 -and $count -gt 0) {{sleep 1; $count=$count-1}}; '
'if ($count -eq 0) {{echo "FAIL"}}').format(
timeout=timeout, process=process)
stdout, _ = self.RemoteCommand(command)
if 'FAIL' in stdout:
raise WaitTimeoutError()
def IsProcessRunning(self, process):
"""Checks if a given process is running on the system.
Args:
process: string name of the process.
Returns:
Whether the process name is in the PS output.
"""
stdout, _ = self.RemoteCommand('ps')
return process in stdout
def _GetNumCpus(self):
"""Returns the number of logical CPUs on the VM.
This method does not cache results (unlike "num_cpus").
Returns:
int. Number of logical CPUs.
"""
stdout, _ = self.RemoteCommand(
'Get-WmiObject -class Win32_processor | '
'Select-Object -ExpandProperty NumberOfLogicalProcessors')
# In the case that there are multiple Win32_processor instances, the result
# of this command can be a string like '4 4 '.
return sum(int(i) for i in stdout.split())
def _GetTotalFreeMemoryKb(self):
"""Returns the amount of free physical memory on the VM in Kilobytes."""
raise NotImplementedError()
def _GetTotalMemoryKb(self):
"""Returns the amount of physical memory on the VM in Kilobytes.
This method does not cache results (unlike "total_memory_kb").
"""
stdout, _ = self.RemoteCommand(
'Get-WmiObject -class Win32_PhysicalMemory | '
'select -exp Capacity')
result = sum(int(capacity) for capacity in stdout.split('\n') if capacity)
return result / 1024
def GetTotalMemoryMb(self):
return self._GetTotalMemoryKb() / 1024
def _TestReachable(self, ip):
"""Returns True if the VM can reach the ip address and False otherwise."""
return self.TryRemoteCommand('ping -n 1 %s' % ip)
def DownloadFile(self, url, dest):
"""Downloads the content at the url to the specified destination."""
# Allow more security protocols to make it easier to download from
# sites where we don't know the security protocol beforehand
command = ('[Net.ServicePointManager]::SecurityProtocol = '
'[System.Net.SecurityProtocolType] '
'"tls, tls11, tls12";'
'Invoke-WebRequest {url} -OutFile {dest}').format(
url=url, dest=dest)
self.RemoteCommand(command)
def UnzipFile(self, zip_file, dest):
"""Unzips the file with the given path."""
command = ('Add-Type -A System.IO.Compression.FileSystem; '
'[IO.Compression.ZipFile]::ExtractToDirectory(\'{zip_file}\', '
'\'{dest}\')').format(zip_file=zip_file, dest=dest)
self.RemoteCommand(command)
def DisableGuestFirewall(self):
"""Disables the guest firewall."""
command = 'netsh advfirewall set allprofiles state off'
self.RemoteCommand(command)
def EnableGuestFirewall(self):
"""Enables the guest firewall."""
command = 'netsh advfirewall set allprofiles state on'
self.RemoteCommand(command)
def _RunDiskpartScript(self, script):
"""Runs the supplied Diskpart script on the VM."""
with vm_util.NamedTemporaryFile(prefix='diskpart', mode='w') as tf:
tf.write(script)
tf.close()
script_path = ntpath.join(self.temp_dir, os.path.basename(tf.name))
self.RemoteCopy(tf.name, script_path)
self.RemoteCommand('diskpart /s {script_path}'.format(
script_path=script_path))
def _CreateScratchDiskFromDisks(self, disk_spec, disks):
"""Helper method to prepare data disks.
Given a list of BaseDisk objects, this will do most of the work creating,
attaching, striping, formatting, and mounting them. If multiple BaseDisk
objects are passed to this method, it will stripe them, combining them
into one 'logical' data disk (it will be treated as a single disk from a
benchmarks perspective). This is intended to be called from within a cloud
specific VM's CreateScratchDisk method.
Args:
disk_spec: The BaseDiskSpec object corresponding to the disk.
disks: A list of the disk(s) to be created, attached, striped,
formatted, and mounted. If there is more than one disk in
the list, then they will be striped together.
"""
if len(disks) > 1:
# If the disk_spec called for a striped disk, create one.
data_disk = disk.StripedDisk(disk_spec, disks)
else:
data_disk = disks[0]
self.scratch_disks.append(data_disk)
if data_disk.disk_type != disk.LOCAL:
data_disk.Create()
data_disk.Attach(self)
# Create and then run a Diskpart script that will initialize the disks,
# create a volume, and then format and mount the volume.
script = ''
disk_numbers = [str(d.disk_number) for d in disks]
for disk_number in disk_numbers:
# For each disk, set the status to online (if it is not already),
# remove any formatting or partitioning on the disks, and convert
# it to a dynamic disk so it can be used to create a volume.
script += ('select disk %s\n'
'online disk noerr\n'
'attributes disk clear readonly\n'
'clean\n'
'convert dynamic\n' % disk_number)
# Create a volume out of the disk(s).
if data_disk.is_striped:
script += 'create volume stripe disk=%s\n' % ','.join(disk_numbers)
else:
script += 'create volume simple\n'
# If a mount point has been specified, create the directory where it will be
# mounted, format the volume, and assign the mount point to the volume.
if disk_spec.mount_point:
self.RemoteCommand('mkdir %s' % disk_spec.mount_point)
script += ('format quick\n'
'assign mount=%s\n' % disk_spec.mount_point)
self._RunDiskpartScript(script)
def SetReadAhead(self, num_sectors, devices):
"""Set read-ahead value for block devices.
Args:
num_sectors: int. Number of sectors of read ahead.
devices: list of strings. A list of block devices.
"""
raise NotImplementedError()
def SetProcessPriorityToHighByFlag(self, executable_name):
"""Sets the CPU priority for a given executable name.
Note this only sets the CPU priority if FLAGS.set_cpu_priority_high is set.
Args:
executable_name: string. The executable name.
"""
if not FLAGS.set_cpu_priority_high:
return
command = (
"New-Item 'HKLM:\\SOFTWARE\\Microsoft\\Windows "
"NT\\CurrentVersion\\Image File Execution Options\\{exe}\\PerfOptions' "
'-Force | New-ItemProperty -Name CpuPriorityClass -Value 3 -Force'
).format(exe=executable_name)
self.RemoteCommand(command)
executables = self.os_metadata.get('high_cpu_priority')
if executables:
executables.append(executable_name)
else:
self.os_metadata['high_cpu_priority'] = [executable_name]
def _IsSmtEnabled(self):
"""Whether SMT is enabled on the vm."""
# TODO(user): find way to do this in Windows
raise NotImplementedError('SMT detection currently not implemented')
class Windows2012CoreMixin(BaseWindowsMixin):
"""Class holding Windows Server 2012 Server Core VM specifics."""
OS_TYPE = os_types.WINDOWS2012_CORE
class Windows2016CoreMixin(BaseWindowsMixin):
"""Class holding Windows Server 2016 Server Core VM specifics."""
OS_TYPE = os_types.WINDOWS2016_CORE
class Windows2019CoreMixin(BaseWindowsMixin):
"""Class holding Windows Server 2019 Server Core VM specifics."""
OS_TYPE = os_types.WINDOWS2019_CORE
class Windows2012DesktopMixin(BaseWindowsMixin):
"""Class holding Windows Server 2012 with Desktop Experience VM specifics."""
OS_TYPE = os_types.WINDOWS2012_DESKTOP
class Windows2016DesktopMixin(BaseWindowsMixin):
"""Class holding Windows Server 2016 with Desktop Experience VM specifics."""
OS_TYPE = os_types.WINDOWS2016_DESKTOP
class Windows2019DesktopMixin(BaseWindowsMixin):
"""Class holding Windows Server 2019 with Desktop Experience VM specifics."""
OS_TYPE = os_types.WINDOWS2019_DESKTOP
|
from app.utils import JsonUtil
import flask
from app.utils import RequestUtil
def standard_response(success, data):
'''standard response
'''
rst = {}
rst['success'] = success
rst['data'] = data
return JsonUtil.object_2_json(rst)
# 重写 render_template 写入固定的一些参数
def render_template(*args, **kwargs):
kwargs['loginUser'] = JsonUtil.object_2_json(RequestUtil.get_login_user())
return flask.render_template(*args, **kwargs)
|
import asyncio
from homeassistant import config_entries, core
from .const import DOMAIN
from .hub import PulseHub
CONF_HUBS = "hubs"
PLATFORMS = ["cover", "sensor"]
async def async_setup(hass: core.HomeAssistant, config: dict):
"""Set up the Rollease Acmeda Automate component."""
return True
async def async_setup_entry(
hass: core.HomeAssistant, config_entry: config_entries.ConfigEntry
):
"""Set up Rollease Acmeda Automate hub from a config entry."""
hub = PulseHub(hass, config_entry)
if not await hub.async_setup():
return False
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN][config_entry.entry_id] = hub
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, component)
)
return True
async def async_unload_entry(
hass: core.HomeAssistant, config_entry: config_entries.ConfigEntry
):
"""Unload a config entry."""
hub = hass.data[DOMAIN][config_entry.entry_id]
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(config_entry, component)
for component in PLATFORMS
]
)
)
if not await hub.async_reset():
return False
if unload_ok:
hass.data[DOMAIN].pop(config_entry.entry_id)
return unload_ok
|
from typing import Optional
import zigpy.zcl.clusters.homeautomation as homeautomation
from .. import registries
from ..const import (
CHANNEL_ELECTRICAL_MEASUREMENT,
REPORT_CONFIG_DEFAULT,
SIGNAL_ATTR_UPDATED,
)
from .base import ZigbeeChannel
@registries.ZIGBEE_CHANNEL_REGISTRY.register(
homeautomation.ApplianceEventAlerts.cluster_id
)
class ApplianceEventAlerts(ZigbeeChannel):
"""Appliance Event Alerts channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(
homeautomation.ApplianceIdentification.cluster_id
)
class ApplianceIdentification(ZigbeeChannel):
"""Appliance Identification channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(
homeautomation.ApplianceStatistics.cluster_id
)
class ApplianceStatistics(ZigbeeChannel):
"""Appliance Statistics channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(homeautomation.Diagnostic.cluster_id)
class Diagnostic(ZigbeeChannel):
"""Diagnostic channel."""
@registries.ZIGBEE_CHANNEL_REGISTRY.register(
homeautomation.ElectricalMeasurement.cluster_id
)
class ElectricalMeasurementChannel(ZigbeeChannel):
"""Channel that polls active power level."""
CHANNEL_NAME = CHANNEL_ELECTRICAL_MEASUREMENT
REPORT_CONFIG = ({"attr": "active_power", "config": REPORT_CONFIG_DEFAULT},)
async def async_update(self):
"""Retrieve latest state."""
self.debug("async_update")
# This is a polling channel. Don't allow cache.
result = await self.get_attribute_value("active_power", from_cache=False)
if result is not None:
self.async_send_signal(
f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}",
0x050B,
"active_power",
result,
)
async def async_initialize(self, from_cache):
"""Initialize channel."""
await self.fetch_config(True)
await super().async_initialize(from_cache)
async def fetch_config(self, from_cache):
"""Fetch config from device and updates format specifier."""
# prime the cache
await self.get_attributes(
[
"ac_power_divisor",
"power_divisor",
"ac_power_multiplier",
"power_multiplier",
],
from_cache=from_cache,
)
@property
def divisor(self) -> Optional[int]:
"""Return active power divisor."""
return self.cluster.get(
"ac_power_divisor", self.cluster.get("power_divisor", 1)
)
@property
def multiplier(self) -> Optional[int]:
"""Return active power divisor."""
return self.cluster.get(
"ac_power_multiplier", self.cluster.get("power_multiplier", 1)
)
@registries.ZIGBEE_CHANNEL_REGISTRY.register(
homeautomation.MeterIdentification.cluster_id
)
class MeterIdentification(ZigbeeChannel):
"""Metering Identification channel."""
|
from copy import deepcopy
from homeassistant.components.deconz.deconz_event import CONF_DECONZ_EVENT
from homeassistant.components.deconz.gateway import get_gateway_from_config_entry
from .test_gateway import DECONZ_WEB_REQUEST, setup_deconz_integration
from tests.common import async_capture_events
SENSORS = {
"1": {
"id": "Switch 1 id",
"name": "Switch 1",
"type": "ZHASwitch",
"state": {"buttonevent": 1000},
"config": {},
"uniqueid": "00:00:00:00:00:00:00:01-00",
},
"2": {
"id": "Switch 2 id",
"name": "Switch 2",
"type": "ZHASwitch",
"state": {"buttonevent": 1000},
"config": {"battery": 100},
"uniqueid": "00:00:00:00:00:00:00:02-00",
},
"3": {
"id": "Switch 3 id",
"name": "Switch 3",
"type": "ZHASwitch",
"state": {"buttonevent": 1000, "gesture": 1},
"config": {"battery": 100},
"uniqueid": "00:00:00:00:00:00:00:03-00",
},
"4": {
"id": "Switch 4 id",
"name": "Switch 4",
"type": "ZHASwitch",
"state": {"buttonevent": 1000, "gesture": 1},
"config": {"battery": 100},
"uniqueid": "00:00:00:00:00:00:00:04-00",
},
"5": {
"id": "ZHA remote 1 id",
"name": "ZHA remote 1",
"type": "ZHASwitch",
"state": {"angle": 0, "buttonevent": 1000, "xy": [0.0, 0.0]},
"config": {"group": "4,5,6", "reachable": True, "on": True},
"uniqueid": "00:00:00:00:00:00:00:05-00",
},
}
async def test_deconz_events(hass):
"""Test successful creation of deconz events."""
data = deepcopy(DECONZ_WEB_REQUEST)
data["sensors"] = deepcopy(SENSORS)
config_entry = await setup_deconz_integration(hass, get_state_response=data)
gateway = get_gateway_from_config_entry(hass, config_entry)
assert len(hass.states.async_all()) == 3
assert len(gateway.events) == 5
assert hass.states.get("sensor.switch_1") is None
assert hass.states.get("sensor.switch_1_battery_level") is None
assert hass.states.get("sensor.switch_2") is None
assert hass.states.get("sensor.switch_2_battery_level").state == "100"
events = async_capture_events(hass, CONF_DECONZ_EVENT)
gateway.api.sensors["1"].update({"state": {"buttonevent": 2000}})
await hass.async_block_till_done()
assert len(events) == 1
assert events[0].data == {
"id": "switch_1",
"unique_id": "00:00:00:00:00:00:00:01",
"event": 2000,
}
gateway.api.sensors["3"].update({"state": {"buttonevent": 2000}})
await hass.async_block_till_done()
assert len(events) == 2
assert events[1].data == {
"id": "switch_3",
"unique_id": "00:00:00:00:00:00:00:03",
"event": 2000,
"gesture": 1,
}
gateway.api.sensors["4"].update({"state": {"gesture": 0}})
await hass.async_block_till_done()
assert len(events) == 3
assert events[2].data == {
"id": "switch_4",
"unique_id": "00:00:00:00:00:00:00:04",
"event": 1000,
"gesture": 0,
}
gateway.api.sensors["5"].update(
{"state": {"buttonevent": 6002, "angle": 110, "xy": [0.5982, 0.3897]}}
)
await hass.async_block_till_done()
assert len(events) == 4
assert events[3].data == {
"id": "zha_remote_1",
"unique_id": "00:00:00:00:00:00:00:05",
"event": 6002,
"angle": 110,
"xy": [0.5982, 0.3897],
}
await hass.config_entries.async_unload(config_entry.entry_id)
assert len(hass.states.async_all()) == 0
assert len(gateway.events) == 0
|
import unittest
from perfkitbenchmarker import errors
from perfkitbenchmarker.configs import option_decoders
import six
_COMPONENT = 'test_component'
_FLAGS = None
_OPTION = 'test_option'
def _ReturnFive():
return 5
class _PassThroughDecoder(option_decoders.ConfigOptionDecoder):
def Decode(self, value, component_path, flag_values):
return value
class ConfigOptionDecoderTestCase(unittest.TestCase):
def testNoDefault(self):
decoder = _PassThroughDecoder(option=_OPTION)
self.assertIs(decoder.required, True)
with self.assertRaises(AssertionError) as cm:
decoder.default
self.assertEqual(str(cm.exception), (
'Attempted to get the default value of required config option '
'"test_option".'))
def testDefaultValue(self):
decoder = _PassThroughDecoder(default=None, option=_OPTION)
self.assertIs(decoder.required, False)
self.assertIsNone(decoder.default)
def testDefaultCallable(self):
decoder = _PassThroughDecoder(default=_ReturnFive, option=_OPTION)
self.assertIs(decoder.required, False)
self.assertIs(decoder.default, 5)
def testIncompleteDerivedClass(self):
class IncompleteDerivedClass(option_decoders.ConfigOptionDecoder):
pass
with self.assertRaises(TypeError):
IncompleteDerivedClass(option=_OPTION)
class TypeVerifierTestCase(unittest.TestCase):
def testRejectNone(self):
decoder = option_decoders.TypeVerifier((int, float), default=None,
option=_OPTION)
self.assertIs(decoder.required, False)
self.assertIsNone(decoder.default)
self.assertIs(decoder.Decode(5, _COMPONENT, _FLAGS), 5)
self.assertIs(decoder.Decode(5.5, _COMPONENT, _FLAGS), 5.5)
with self.assertRaises(errors.Config.InvalidValue) as cm:
decoder.Decode(None, _COMPONENT, _FLAGS)
self.assertEqual(str(cm.exception), (
'Invalid test_component.test_option value: "None" (of type '
'"NoneType"). Value must be one of the following types: int, float.'))
with self.assertRaises(errors.Config.InvalidValue) as cm:
decoder.Decode('red', _COMPONENT, _FLAGS)
self.assertEqual(str(cm.exception), (
'Invalid test_component.test_option value: "red" (of type "str"). '
'Value must be one of the following types: int, float.'))
def testAcceptNone(self):
decoder = option_decoders.TypeVerifier((int, float), default=None,
none_ok=True, option=_OPTION)
self.assertIs(decoder.required, False)
self.assertIsNone(decoder.default)
self.assertIs(decoder.Decode(5, _COMPONENT, _FLAGS), 5)
self.assertIs(decoder.Decode(5.5, _COMPONENT, _FLAGS), 5.5)
self.assertIsNone(decoder.Decode(None, _COMPONENT, _FLAGS))
with self.assertRaises(errors.Config.InvalidValue) as cm:
decoder.Decode('red', _COMPONENT, _FLAGS)
self.assertEqual(str(cm.exception), (
'Invalid test_component.test_option value: "red" (of type "str"). '
'Value must be one of the following types: NoneType, int, float.'))
class BooleanDecoderTestCase(unittest.TestCase):
def testDefault(self):
decoder = option_decoders.BooleanDecoder(default=None, option=_OPTION)
self.assertIs(decoder.required, False)
self.assertIsNone(decoder.default)
def testNone(self):
decoder = option_decoders.BooleanDecoder(none_ok=True, option=_OPTION)
self.assertIsNone(decoder.Decode(None, _COMPONENT, _FLAGS))
decoder = option_decoders.BooleanDecoder(option=_OPTION)
with self.assertRaises(errors.Config.InvalidValue):
decoder.Decode(None, _COMPONENT, _FLAGS)
def testNonBoolean(self):
decoder = option_decoders.BooleanDecoder(option=_OPTION)
with self.assertRaises(errors.Config.InvalidValue) as cm:
decoder.Decode(5, _COMPONENT, _FLAGS)
self.assertEqual(str(cm.exception), (
'Invalid test_component.test_option value: "5" (of type "int"). '
'Value must be one of the following types: bool.'))
def testValidBoolean(self):
decoder = option_decoders.BooleanDecoder(option=_OPTION)
self.assertIs(decoder.Decode(True, _COMPONENT, _FLAGS), True)
class IntDecoderTestCase(unittest.TestCase):
def testDefault(self):
decoder = option_decoders.IntDecoder(default=5, option=_OPTION)
self.assertIs(decoder.required, False)
self.assertIs(decoder.default, 5)
def testNone(self):
decoder = option_decoders.IntDecoder(none_ok=True, option=_OPTION)
self.assertIsNone(decoder.Decode(None, _COMPONENT, _FLAGS))
decoder = option_decoders.IntDecoder(option=_OPTION)
with self.assertRaises(errors.Config.InvalidValue):
decoder.Decode(None, _COMPONENT, _FLAGS)
def testNonInt(self):
decoder = option_decoders.IntDecoder(option=_OPTION)
with self.assertRaises(errors.Config.InvalidValue) as cm:
decoder.Decode('5', _COMPONENT, _FLAGS)
self.assertEqual(str(cm.exception), (
'Invalid test_component.test_option value: "5" (of type "str"). '
'Value must be one of the following types: int.'))
def testValidInt(self):
decoder = option_decoders.IntDecoder(option=_OPTION)
self.assertEqual(decoder.Decode(5, _COMPONENT, _FLAGS), 5)
def testMax(self):
decoder = option_decoders.IntDecoder(max=2, option=_OPTION)
with self.assertRaises(errors.Config.InvalidValue) as cm:
decoder.Decode(5, _COMPONENT, _FLAGS)
self.assertEqual(str(cm.exception), (
'Invalid test_component.test_option value: "5". Value must be at '
'most 2.'))
self.assertIs(decoder.Decode(2, _COMPONENT, _FLAGS), 2)
self.assertIs(decoder.Decode(1, _COMPONENT, _FLAGS), 1)
def testMin(self):
decoder = option_decoders.IntDecoder(min=10, option=_OPTION)
with self.assertRaises(errors.Config.InvalidValue) as cm:
decoder.Decode(5, _COMPONENT, _FLAGS)
self.assertEqual(str(cm.exception), (
'Invalid test_component.test_option value: "5". Value must be at '
'least 10.'))
self.assertIs(decoder.Decode(10, _COMPONENT, _FLAGS), 10)
self.assertIs(decoder.Decode(15, _COMPONENT, _FLAGS), 15)
def testZeroMaxOrMin(self):
decoder = option_decoders.IntDecoder(max=0, min=0, option=_OPTION)
with self.assertRaises(errors.Config.InvalidValue):
decoder.Decode(-1, _COMPONENT, _FLAGS)
with self.assertRaises(errors.Config.InvalidValue):
decoder.Decode(1, _COMPONENT, _FLAGS)
self.assertEqual(decoder.Decode(0, _COMPONENT, _FLAGS), 0)
class StringDecoderTestCase(unittest.TestCase):
def testDefault(self):
decoder = option_decoders.StringDecoder(default=None, option=_OPTION)
self.assertFalse(decoder.required)
self.assertIsNone(decoder.default)
def testNone(self):
decoder = option_decoders.IntDecoder(none_ok=True, option=_OPTION)
self.assertIsNone(decoder.Decode(None, _COMPONENT, _FLAGS))
decoder = option_decoders.IntDecoder(option=_OPTION)
with self.assertRaises(errors.Config.InvalidValue):
decoder.Decode(None, _COMPONENT, _FLAGS)
def testNonString(self):
decoder = option_decoders.StringDecoder(option=_OPTION)
with self.assertRaises(errors.Config.InvalidValue) as cm:
decoder.Decode(5, _COMPONENT, _FLAGS)
self.assertEqual(str(cm.exception), (
'Invalid test_component.test_option value: "5" (of type "int"). '
'Value must be one of the following types: %s.' %
six.string_types[0].__name__))
def testValidString(self):
decoder = option_decoders.StringDecoder(option=_OPTION)
self.assertEqual(decoder.Decode('red', _COMPONENT, _FLAGS), 'red')
class FloatDecoderTestCase(unittest.TestCase):
def testDefault(self):
decoder = option_decoders.FloatDecoder(default=2.5, option=_OPTION)
self.assertIs(decoder.required, False)
self.assertIs(decoder.default, 2.5)
def testNone(self):
decoder = option_decoders.FloatDecoder(none_ok=True, option=_OPTION)
self.assertIsNone(decoder.Decode(None, _COMPONENT, _FLAGS))
decoder = option_decoders.IntDecoder(option=_OPTION)
with self.assertRaises(errors.Config.InvalidValue):
decoder.Decode(None, _COMPONENT, _FLAGS)
def testNonFloat(self):
decoder = option_decoders.FloatDecoder(option=_OPTION)
with self.assertRaises(errors.Config.InvalidValue) as cm:
decoder.Decode('5', _COMPONENT, _FLAGS)
self.assertEqual(str(cm.exception), (
'Invalid test_component.test_option value: "5" (of type "str"). '
'Value must be one of the following types: float, int.'))
def testValidFloat(self):
decoder = option_decoders.FloatDecoder(option=_OPTION)
self.assertEqual(decoder.Decode(2.5, _COMPONENT, _FLAGS), 2.5)
def testValidFloatAsInt(self):
decoder = option_decoders.FloatDecoder(option=_OPTION)
self.assertEqual(decoder.Decode(2, _COMPONENT, _FLAGS), 2)
def testMaxFloat(self):
MAX = 2.0
decoder = option_decoders.FloatDecoder(max=MAX, option=_OPTION)
with self.assertRaises(errors.Config.InvalidValue) as cm:
decoder.Decode(5, _COMPONENT, _FLAGS)
self.assertEqual(str(cm.exception), (
'Invalid test_component.test_option value: "5". Value must be at '
'most %s.' % MAX))
self.assertIs(decoder.Decode(MAX, _COMPONENT, _FLAGS), MAX)
self.assertIs(decoder.Decode(2, _COMPONENT, _FLAGS), 2)
self.assertIs(decoder.Decode(1, _COMPONENT, _FLAGS), 1)
def testMaxInt(self):
MAX = 2
decoder = option_decoders.FloatDecoder(max=MAX, option=_OPTION)
with self.assertRaises(errors.Config.InvalidValue) as cm:
decoder.Decode(2.01, _COMPONENT, _FLAGS)
self.assertEqual(str(cm.exception), (
'Invalid test_component.test_option value: "2.01". Value must be at '
'most %s.' % MAX))
self.assertIs(decoder.Decode(MAX, _COMPONENT, _FLAGS), MAX)
self.assertIs(decoder.Decode(2.0, _COMPONENT, _FLAGS), 2.0)
self.assertIs(decoder.Decode(1, _COMPONENT, _FLAGS), 1)
def testMinFloat(self):
MIN = 2.0
decoder = option_decoders.FloatDecoder(min=MIN, option=_OPTION)
with self.assertRaises(errors.Config.InvalidValue) as cm:
decoder.Decode(0, _COMPONENT, _FLAGS)
self.assertEqual(str(cm.exception), (
'Invalid test_component.test_option value: "0". Value must be at '
'least %s.' % MIN))
self.assertIs(decoder.Decode(MIN, _COMPONENT, _FLAGS), MIN)
self.assertIs(decoder.Decode(2, _COMPONENT, _FLAGS), 2)
self.assertIs(decoder.Decode(5, _COMPONENT, _FLAGS), 5)
def testMinInt(self):
MIN = 2
decoder = option_decoders.FloatDecoder(min=MIN, option=_OPTION)
with self.assertRaises(errors.Config.InvalidValue) as cm:
decoder.Decode(0, _COMPONENT, _FLAGS)
self.assertEqual(str(cm.exception), (
'Invalid test_component.test_option value: "0". Value must be at '
'least %s.' % MIN))
self.assertIs(decoder.Decode(MIN, _COMPONENT, _FLAGS), MIN)
self.assertIs(decoder.Decode(2.0, _COMPONENT, _FLAGS), 2.0)
self.assertIs(decoder.Decode(5, _COMPONENT, _FLAGS), 5)
def testZeroMaxOrMin(self):
decoder = option_decoders.FloatDecoder(max=0, min=0, option=_OPTION)
with self.assertRaises(errors.Config.InvalidValue):
decoder.Decode(-1, _COMPONENT, _FLAGS)
with self.assertRaises(errors.Config.InvalidValue):
decoder.Decode(1, _COMPONENT, _FLAGS)
self.assertEqual(decoder.Decode(0, _COMPONENT, _FLAGS), 0)
class ListDecoderTestCase(unittest.TestCase):
def setUp(self):
super(ListDecoderTestCase, self).setUp()
self._int_decoder = option_decoders.IntDecoder()
def testNonListInputType(self):
decoder = option_decoders.ListDecoder(item_decoder=self._int_decoder,
option=_OPTION)
with self.assertRaises(errors.Config.InvalidValue) as cm:
decoder.Decode(5, _COMPONENT, _FLAGS)
self.assertEqual(str(cm.exception), (
'Invalid test_component.test_option value: "5" (of type "int"). '
'Value must be one of the following types: list.'))
def testNone(self):
decoder = option_decoders.ListDecoder(item_decoder=self._int_decoder,
none_ok=True, option=_OPTION)
self.assertIsNone(decoder.Decode(None, _COMPONENT, _FLAGS))
def testInvalidItem(self):
decoder = option_decoders.ListDecoder(item_decoder=self._int_decoder,
option=_OPTION)
with self.assertRaises(errors.Config.InvalidValue) as cm:
decoder.Decode([5, 4, 3.5], _COMPONENT, _FLAGS)
self.assertEqual(str(cm.exception), (
'Invalid test_component.test_option[2] value: "3.5" (of type "float"). '
'Value must be one of the following types: int.'))
if __name__ == '__main__':
unittest.main()
|
import gevent.monkey
gevent.monkey.patch_all()
import logging
import os
from . import driver
from . import lru
from .exceptions import FileNotFoundError
logger = logging.getLogger(__name__)
class Base(driver.Base):
supports_bytes_range = True
def __init__(self, path=None, config=None):
self._config = config
self._root_path = path or '/test'
self._boto_conn = self.makeConnection()
self._boto_bucket = self._boto_conn.get_bucket(
self._config.boto_bucket)
logger.info("Boto based storage initialized")
def _build_connection_params(self):
kwargs = {'is_secure': (self._config.s3_secure is True)}
config_args = [
'host', 'port', 'debug',
'proxy', 'proxy_port',
'proxy_user', 'proxy_pass',
'calling_format'
]
for arg in config_args:
confkey = 'boto_' + arg
if getattr(self._config, confkey, None) is not None:
kwargs[arg] = getattr(self._config, confkey)
return kwargs
def _debug_key(self, key):
"""Used for debugging only."""
orig_meth = key.bucket.connection.make_request
def new_meth(*args, **kwargs):
print('#' * 16)
print(args)
print(kwargs)
print('#' * 16)
return orig_meth(*args, **kwargs)
key.bucket.connection.make_request = new_meth
def _init_path(self, path=None):
path = os.path.join(self._root_path, path) if path else self._root_path
if path and path[0] == '/':
return path[1:]
return path
def stream_read(self, path, bytes_range=None):
path = self._init_path(path)
headers = None
if bytes_range:
headers = {'Range': 'bytes={0}-{1}'.format(*bytes_range)}
key = self._boto_bucket.lookup(path, headers=headers)
if not key:
raise FileNotFoundError('%s is not there' % path)
while True:
buf = key.read(self.buffer_size)
if not buf:
break
yield buf
def list_directory(self, path=None):
path = self._init_path(path)
if not path.endswith('/'):
path += '/'
ln = 0
if self._root_path != '/':
ln = len(self._root_path)
exists = False
for key in self._boto_bucket.list(prefix=path, delimiter='/'):
if '%s/' % key.name == path:
continue
exists = True
name = key.name
if name.endswith('/'):
yield name[ln:-1]
else:
yield name[ln:]
if not exists:
raise FileNotFoundError('%s is not there' % path)
def get_size(self, path):
path = self._init_path(path)
# Lookup does a HEAD HTTP Request on the object
key = self._boto_bucket.lookup(path)
if not key:
raise FileNotFoundError('%s is not there' % path)
return key.size
@lru.get
def get_content(self, path):
path = self._init_path(path)
key = self.makeKey(path)
if not key.exists():
raise FileNotFoundError('%s is not there' % path)
return key.get_contents_as_string()
def exists(self, path):
path = self._init_path(path)
key = self.makeKey(path)
return key.exists()
@lru.remove
def remove(self, path):
path = self._init_path(path)
key = self.makeKey(path)
if key.exists():
# It's a file
key.delete()
return
# We assume it's a directory
if not path.endswith('/'):
path += '/'
exists = False
for key in self._boto_bucket.list(prefix=path, delimiter='/'):
if '%s/' % key.name == path:
continue
exists = True
key.delete()
if not exists:
raise FileNotFoundError('%s is not there' % path)
|
import os
import platform
import sys
# Operating systems.
WINDOWS = sys.platform == "win32"
LINUX = sys.platform.startswith("linux")
# Python versions. We amend version_info with one more value, a zero if an
# official version, or 1 if built from source beyond an official version.
PYVERSION = sys.version_info + (int(platform.python_version()[-1] == "+"),)
PY2 = PYVERSION < (3, 0)
PY3 = PYVERSION >= (3, 0)
# Python implementations.
PYPY = (platform.python_implementation() == 'PyPy')
if PYPY:
PYPYVERSION = sys.pypy_version_info
PYPY2 = PYPY and PY2
PYPY3 = PYPY and PY3
JYTHON = (platform.python_implementation() == 'Jython')
IRONPYTHON = (platform.python_implementation() == 'IronPython')
# Python behavior
class PYBEHAVIOR(object):
"""Flags indicating this Python's behavior."""
# Is "if __debug__" optimized away?
optimize_if_debug = (not PYPY)
# Is "if not __debug__" optimized away?
optimize_if_not_debug = (not PYPY) and (PYVERSION >= (3, 7, 0, 'alpha', 4))
# Is "if not __debug__" optimized away even better?
optimize_if_not_debug2 = (not PYPY) and (PYVERSION >= (3, 8, 0, 'beta', 1))
# Do we have yield-from?
yield_from = (PYVERSION >= (3, 3))
# Do we have PEP 420 namespace packages?
namespaces_pep420 = (PYVERSION >= (3, 3))
# Do .pyc files have the source file size recorded in them?
size_in_pyc = (PYVERSION >= (3, 3))
# Do we have async and await syntax?
async_syntax = (PYVERSION >= (3, 5))
# PEP 448 defined additional unpacking generalizations
unpackings_pep448 = (PYVERSION >= (3, 5))
# Can co_lnotab have negative deltas?
negative_lnotab = (PYVERSION >= (3, 6)) and not (PYPY and PYPYVERSION < (7, 2))
# Do .pyc files conform to PEP 552? Hash-based pyc's.
hashed_pyc_pep552 = (PYVERSION >= (3, 7, 0, 'alpha', 4))
# Python 3.7.0b3 changed the behavior of the sys.path[0] entry for -m. It
# used to be an empty string (meaning the current directory). It changed
# to be the actual path to the current directory, so that os.chdir wouldn't
# affect the outcome.
actual_syspath0_dash_m = (not PYPY) and (PYVERSION >= (3, 7, 0, 'beta', 3))
# When a break/continue/return statement in a try block jumps to a finally
# block, does the finally block do the break/continue/return (pre-3.8), or
# does the finally jump back to the break/continue/return (3.8) to do the
# work?
finally_jumps_back = (PYVERSION >= (3, 8))
# When a function is decorated, does the trace function get called for the
# @-line and also the def-line (new behavior in 3.8)? Or just the @-line
# (old behavior)?
trace_decorated_def = (PYVERSION >= (3, 8))
# Are while-true loops optimized into absolute jumps with no loop setup?
nix_while_true = (PYVERSION >= (3, 8))
# Python 3.9a1 made sys.argv[0] and other reported files absolute paths.
report_absolute_files = (PYVERSION >= (3, 9))
# Coverage.py specifics.
# Are we using the C-implemented trace function?
C_TRACER = os.getenv('COVERAGE_TEST_TRACER', 'c') == 'c'
# Are we coverage-measuring ourselves?
METACOV = os.getenv('COVERAGE_COVERAGE', '') != ''
# Are we running our test suite?
# Even when running tests, you can use COVERAGE_TESTING=0 to disable the
# test-specific behavior like contracts.
TESTING = os.getenv('COVERAGE_TESTING', '') == 'True'
|
from homeassistant.components.kodi.const import CONF_WS_PORT, DOMAIN
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_SSL,
CONF_USERNAME,
)
from .util import MockConnection
from tests.async_mock import patch
from tests.common import MockConfigEntry
async def init_integration(hass) -> MockConfigEntry:
"""Set up the Kodi integration in Home Assistant."""
entry_data = {
CONF_NAME: "name",
CONF_HOST: "1.1.1.1",
CONF_PORT: 8080,
CONF_WS_PORT: 9090,
CONF_USERNAME: "user",
CONF_PASSWORD: "pass",
CONF_SSL: False,
}
entry = MockConfigEntry(domain=DOMAIN, data=entry_data, title="name")
entry.add_to_hass(hass)
with patch("homeassistant.components.kodi.Kodi.ping", return_value=True), patch(
"homeassistant.components.kodi.Kodi.get_application_properties",
return_value={"version": {"major": 1, "minor": 1}},
), patch(
"homeassistant.components.kodi.get_kodi_connection",
return_value=MockConnection(),
):
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry
|
import pytest
import homeassistant.components.logentries as logentries
from homeassistant.const import EVENT_STATE_CHANGED, STATE_OFF, STATE_ON
from homeassistant.setup import async_setup_component
from tests.async_mock import MagicMock, call, patch
async def test_setup_config_full(hass):
"""Test setup with all data."""
config = {"logentries": {"token": "secret"}}
hass.bus.listen = MagicMock()
assert await async_setup_component(hass, logentries.DOMAIN, config)
assert hass.bus.listen.called
assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0]
async def test_setup_config_defaults(hass):
"""Test setup with defaults."""
config = {"logentries": {"token": "token"}}
hass.bus.listen = MagicMock()
assert await async_setup_component(hass, logentries.DOMAIN, config)
assert hass.bus.listen.called
assert EVENT_STATE_CHANGED == hass.bus.listen.call_args_list[0][0][0]
@pytest.fixture
def mock_dump():
"""Mock json dumps."""
with patch("json.dumps") as mock_dump:
yield mock_dump
@pytest.fixture
def mock_requests():
"""Mock requests."""
with patch.object(logentries, "requests") as mock_requests:
yield mock_requests
async def test_event_listener(hass, mock_dump, mock_requests):
"""Test event listener."""
mock_dump.side_effect = lambda x: x
mock_post = mock_requests.post
mock_requests.exceptions.RequestException = Exception
config = {"logentries": {"token": "token"}}
hass.bus.listen = MagicMock()
assert await async_setup_component(hass, logentries.DOMAIN, config)
handler_method = hass.bus.listen.call_args_list[0][0][1]
valid = {"1": 1, "1.0": 1.0, STATE_ON: 1, STATE_OFF: 0, "foo": "foo"}
for in_, out in valid.items():
state = MagicMock(state=in_, domain="fake", object_id="entity", attributes={})
event = MagicMock(data={"new_state": state}, time_fired=12345)
body = [
{
"domain": "fake",
"entity_id": "entity",
"attributes": {},
"time": "12345",
"value": out,
}
]
payload = {
"host": "https://webhook.logentries.com/noformat/logs/token",
"event": body,
}
handler_method(event)
assert mock_post.call_count == 1
assert mock_post.call_args == call(payload["host"], data=payload, timeout=10)
mock_post.reset_mock()
|
from flask import Flask
from flexx import flx
from flexxamples.howtos.editor_cm import CodeEditor
# Define an app
class MyApp(flx.Widget):
def init(self):
with flx.HBox():
CodeEditor(flex=1)
flx.Widget(flex=1)
# Dump it to a dictionary of assets that we can serve. Make the main
# page index.html. The link=0 means to pack the whole app into a single
# html page (note that data (e.g. images) will still be separate).
app = flx.App(MyApp)
assets = app.dump('index.html', link=0)
# Do the flask thing
app = Flask(__name__)
@app.route('/')
def handler():
return assets['index.html'].decode()
if __name__ == '__main__':
app.run(host='localhost', port=8080)
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.