text
stringlengths 213
32.3k
|
---|
from __future__ import absolute_import
import logging
import time
from absl import flags
# This is the path that we SCP object_storage_interface to.
from providers import object_storage_interface
from google.cloud import storage
FLAGS = flags.FLAGS
class GcsService(object_storage_interface.ObjectStorageServiceBase):
"""An interface to Google Cloud Storage, using the python library."""
def __init__(self):
self.client = storage.Client()
def ListObjects(self, bucket_name, prefix):
bucket = storage.bucket.Bucket(self.client, bucket_name)
return [obj.name for obj in self.client.list_blobs(bucket, prefix=prefix)]
def DeleteObjects(self,
bucket_name,
objects_to_delete,
objects_deleted=None,
delay_time=0,
object_sizes=None):
start_times = []
latencies = []
sizes = []
bucket = storage.bucket.Bucket(self.client, bucket_name)
for index, object_name in enumerate(objects_to_delete):
time.sleep(delay_time)
try:
start_time = time.time()
obj = storage.blob.Blob(object_name, bucket)
obj.delete(client=self.client)
latency = time.time() - start_time
start_times.append(start_time)
latencies.append(latency)
if objects_deleted is not None:
objects_deleted.append(object_name)
if object_sizes:
sizes.append(object_sizes[index])
except Exception as e: # pylint: disable=broad-except
logging.exception('Caught exception while deleting object %s: %s',
object_name, e)
return start_times, latencies, sizes
def BulkDeleteObjects(self, bucket_name, objects_to_delete, delay_time):
bucket = storage.bucket.Bucket(self.client, bucket_name)
time.sleep(delay_time)
start_time = time.time()
bucket.delete_blobs(objects_to_delete)
latency = time.time() - start_time
return start_time, latency
def WriteObjectFromBuffer(self, bucket_name, object_name, stream, size):
stream.seek(0)
start_time = time.time()
data = str(stream.read(size))
bucket = storage.bucket.Bucket(self.client, bucket_name)
obj = storage.blob.Blob(object_name, bucket)
obj.upload_from_string(data, client=self.client)
latency = time.time() - start_time
return start_time, latency
def ReadObject(self, bucket_name, object_name):
start_time = time.time()
bucket = storage.bucket.Bucket(self.client, bucket_name)
obj = storage.blob.Blob(object_name, bucket)
obj.download_as_string(client=self.client)
latency = time.time() - start_time
return start_time, latency
|
import os
import atexit
import subprocess
process = subprocess.Popen(
args=('node', os.path.join(os.path.dirname(__file__), 'test_server.js'),),
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT
)
# Ensure the process is killed on exit
atexit.register(lambda _process: _process.kill(), process)
def read_line():
return process.stdout.readline().decode('utf-8')
output = read_line()
if output.strip() == '':
output += read_line()
if 'React render server' not in output:
if 'module.js' in output:
line = read_line()
while line:
output += line + os.linesep
line = read_line()
raise Exception('Unexpected output from render server subprocess...' + os.linesep + os.linesep + output)
|
from unittest import TestCase
from unittest.mock import patch
from httpobs.scanner.utils import valid_hostname
class TestValidHostname(TestCase):
def test_valid_hostname(self):
# TODO: Try to find a site with www.site.foo but not site.foo
self.assertTrue(valid_hostname('mozilla.org'))
self.assertTrue(valid_hostname('www.mozilla.org'))
def test_invalid_hostname(self):
self.assertFalse(valid_hostname('.com'))
self.assertFalse(valid_hostname('foo'))
self.assertFalse(valid_hostname('localhost'))
self.assertFalse(valid_hostname('intranet'))
self.assertFalse(valid_hostname('_spf.google.com')) # no A records
self.assertFalse(valid_hostname('127.0.0.1'))
self.assertFalse(valid_hostname('2607:f8b0:4009:80b::200e'))
@patch('httpobs.scanner.utils.SCANNER_ALLOW_LOCALHOST', 'yes')
def test_valid_localhost(self):
self.assertTrue(valid_hostname('localhost'))
|
import pytest
from jinja2 import DictLoader
from jinja2 import Environment
from jinja2 import PrefixLoader
from jinja2 import Template
from jinja2 import TemplateAssertionError
from jinja2 import TemplateNotFound
from jinja2 import TemplateSyntaxError
class TestCorner:
def test_assigned_scoping(self, env):
t = env.from_string(
"""
{%- for item in (1, 2, 3, 4) -%}
[{{ item }}]
{%- endfor %}
{{- item -}}
"""
)
assert t.render(item=42) == "[1][2][3][4]42"
t = env.from_string(
"""
{%- for item in (1, 2, 3, 4) -%}
[{{ item }}]
{%- endfor %}
{%- set item = 42 %}
{{- item -}}
"""
)
assert t.render() == "[1][2][3][4]42"
t = env.from_string(
"""
{%- set item = 42 %}
{%- for item in (1, 2, 3, 4) -%}
[{{ item }}]
{%- endfor %}
{{- item -}}
"""
)
assert t.render() == "[1][2][3][4]42"
def test_closure_scoping(self, env):
t = env.from_string(
"""
{%- set wrapper = "<FOO>" %}
{%- for item in (1, 2, 3, 4) %}
{%- macro wrapper() %}[{{ item }}]{% endmacro %}
{{- wrapper() }}
{%- endfor %}
{{- wrapper -}}
"""
)
assert t.render() == "[1][2][3][4]<FOO>"
t = env.from_string(
"""
{%- for item in (1, 2, 3, 4) %}
{%- macro wrapper() %}[{{ item }}]{% endmacro %}
{{- wrapper() }}
{%- endfor %}
{%- set wrapper = "<FOO>" %}
{{- wrapper -}}
"""
)
assert t.render() == "[1][2][3][4]<FOO>"
t = env.from_string(
"""
{%- for item in (1, 2, 3, 4) %}
{%- macro wrapper() %}[{{ item }}]{% endmacro %}
{{- wrapper() }}
{%- endfor %}
{{- wrapper -}}
"""
)
assert t.render(wrapper=23) == "[1][2][3][4]23"
class TestBug:
def test_keyword_folding(self, env):
env = Environment()
env.filters["testing"] = lambda value, some: value + some
assert (
env.from_string("{{ 'test'|testing(some='stuff') }}").render()
== "teststuff"
)
def test_extends_output_bugs(self, env):
env = Environment(
loader=DictLoader({"parent.html": "(({% block title %}{% endblock %}))"})
)
t = env.from_string(
'{% if expr %}{% extends "parent.html" %}{% endif %}'
"[[{% block title %}title{% endblock %}]]"
"{% for item in [1, 2, 3] %}({{ item }}){% endfor %}"
)
assert t.render(expr=False) == "[[title]](1)(2)(3)"
assert t.render(expr=True) == "((title))"
def test_urlize_filter_escaping(self, env):
tmpl = env.from_string('{{ "http://www.example.org/<foo"|urlize }}')
assert (
tmpl.render() == '<a href="http://www.example.org/<foo" rel="noopener">'
"http://www.example.org/<foo</a>"
)
def test_loop_call_loop(self, env):
tmpl = env.from_string(
"""
{% macro test() %}
{{ caller() }}
{% endmacro %}
{% for num1 in range(5) %}
{% call test() %}
{% for num2 in range(10) %}
{{ loop.index }}
{% endfor %}
{% endcall %}
{% endfor %}
"""
)
assert tmpl.render().split() == [str(x) for x in range(1, 11)] * 5
def test_weird_inline_comment(self, env):
env = Environment(line_statement_prefix="%")
pytest.raises(
TemplateSyntaxError,
env.from_string,
"% for item in seq {# missing #}\n...% endfor",
)
def test_old_macro_loop_scoping_bug(self, env):
tmpl = env.from_string(
"{% for i in (1, 2) %}{{ i }}{% endfor %}"
"{% macro i() %}3{% endmacro %}{{ i() }}"
)
assert tmpl.render() == "123"
def test_partial_conditional_assignments(self, env):
tmpl = env.from_string("{% if b %}{% set a = 42 %}{% endif %}{{ a }}")
assert tmpl.render(a=23) == "23"
assert tmpl.render(b=True) == "42"
def test_stacked_locals_scoping_bug(self, env):
env = Environment(line_statement_prefix="#")
t = env.from_string(
"""\
# for j in [1, 2]:
# set x = 1
# for i in [1, 2]:
# print x
# if i % 2 == 0:
# set x = x + 1
# endif
# endfor
# endfor
# if a
# print 'A'
# elif b
# print 'B'
# elif c == d
# print 'C'
# else
# print 'D'
# endif
"""
)
assert t.render(a=0, b=False, c=42, d=42.0) == "1111C"
def test_stacked_locals_scoping_bug_twoframe(self, env):
t = Template(
"""
{% set x = 1 %}
{% for item in foo %}
{% if item == 1 %}
{% set x = 2 %}
{% endif %}
{% endfor %}
{{ x }}
"""
)
rv = t.render(foo=[1]).strip()
assert rv == "1"
def test_call_with_args(self, env):
t = Template(
"""{% macro dump_users(users) -%}
<ul>
{%- for user in users -%}
<li><p>{{ user.username|e }}</p>{{ caller(user) }}</li>
{%- endfor -%}
</ul>
{%- endmacro -%}
{% call(user) dump_users(list_of_user) -%}
<dl>
<dl>Realname</dl>
<dd>{{ user.realname|e }}</dd>
<dl>Description</dl>
<dd>{{ user.description }}</dd>
</dl>
{% endcall %}"""
)
assert [
x.strip()
for x in t.render(
list_of_user=[
{
"username": "apo",
"realname": "something else",
"description": "test",
}
]
).splitlines()
] == [
"<ul><li><p>apo</p><dl>",
"<dl>Realname</dl>",
"<dd>something else</dd>",
"<dl>Description</dl>",
"<dd>test</dd>",
"</dl>",
"</li></ul>",
]
def test_empty_if_condition_fails(self, env):
pytest.raises(TemplateSyntaxError, Template, "{% if %}....{% endif %}")
pytest.raises(
TemplateSyntaxError, Template, "{% if foo %}...{% elif %}...{% endif %}"
)
pytest.raises(TemplateSyntaxError, Template, "{% for x in %}..{% endfor %}")
def test_recursive_loop_compile(self, env):
Template(
"""
{% for p in foo recursive%}
{{p.bar}}
{% for f in p.fields recursive%}
{{f.baz}}
{{p.bar}}
{% if f.rec %}
{{ loop(f.sub) }}
{% endif %}
{% endfor %}
{% endfor %}
"""
)
Template(
"""
{% for p in foo%}
{{p.bar}}
{% for f in p.fields recursive%}
{{f.baz}}
{{p.bar}}
{% if f.rec %}
{{ loop(f.sub) }}
{% endif %}
{% endfor %}
{% endfor %}
"""
)
def test_else_loop_bug(self, env):
t = Template(
"""
{% for x in y %}
{{ loop.index0 }}
{% else %}
{% for i in range(3) %}{{ i }}{% endfor %}
{% endfor %}
"""
)
assert t.render(y=[]).strip() == "012"
def test_correct_prefix_loader_name(self, env):
env = Environment(loader=PrefixLoader({"foo": DictLoader({})}))
with pytest.raises(TemplateNotFound) as e:
env.get_template("foo/bar.html")
assert e.value.name == "foo/bar.html"
def test_contextfunction_callable_classes(self, env):
from jinja2.utils import contextfunction
class CallableClass:
@contextfunction
def __call__(self, ctx):
return ctx.resolve("hello")
tpl = Template("""{{ callableclass() }}""")
output = tpl.render(callableclass=CallableClass(), hello="TEST")
expected = "TEST"
assert output == expected
def test_block_set_with_extends(self):
env = Environment(
loader=DictLoader({"main": "{% block body %}[{{ x }}]{% endblock %}"})
)
t = env.from_string('{% extends "main" %}{% set x %}42{% endset %}')
assert t.render() == "[42]"
def test_nested_for_else(self, env):
tmpl = env.from_string(
"{% for x in y %}{{ loop.index0 }}{% else %}"
"{% for i in range(3) %}{{ i }}{% endfor %}"
"{% endfor %}"
)
assert tmpl.render() == "012"
def test_macro_var_bug(self, env):
tmpl = env.from_string(
"""
{% set i = 1 %}
{% macro test() %}
{% for i in range(0, 10) %}{{ i }}{% endfor %}
{% endmacro %}{{ test() }}
"""
)
assert tmpl.render().strip() == "0123456789"
def test_macro_var_bug_advanced(self, env):
tmpl = env.from_string(
"""
{% macro outer() %}
{% set i = 1 %}
{% macro test() %}
{% for i in range(0, 10) %}{{ i }}{% endfor %}
{% endmacro %}{{ test() }}
{% endmacro %}{{ outer() }}
"""
)
assert tmpl.render().strip() == "0123456789"
def test_callable_defaults(self):
env = Environment()
env.globals["get_int"] = lambda: 42
t = env.from_string(
"""
{% macro test(a, b, c=get_int()) -%}
{{ a + b + c }}
{%- endmacro %}
{{ test(1, 2) }}|{{ test(1, 2, 3) }}
"""
)
assert t.render().strip() == "45|6"
def test_macro_escaping(self):
env = Environment(
autoescape=lambda x: False, extensions=["jinja2.ext.autoescape"]
)
template = "{% macro m() %}<html>{% endmacro %}"
template += "{% autoescape true %}{{ m() }}{% endautoescape %}"
assert env.from_string(template).render()
def test_macro_scoping(self, env):
tmpl = env.from_string(
"""
{% set n=[1,2,3,4,5] %}
{% for n in [[1,2,3], [3,4,5], [5,6,7]] %}
{% macro x(l) %}
{{ l.pop() }}
{% if l %}{{ x(l) }}{% endif %}
{% endmacro %}
{{ x(n) }}
{% endfor %}
"""
)
assert list(map(int, tmpl.render().split())) == [3, 2, 1, 5, 4, 3, 7, 6, 5]
def test_scopes_and_blocks(self):
env = Environment(
loader=DictLoader(
{
"a.html": """
{%- set foo = 'bar' -%}
{% include 'x.html' -%}
""",
"b.html": """
{%- set foo = 'bar' -%}
{% block test %}{% include 'x.html' %}{% endblock -%}
""",
"c.html": """
{%- set foo = 'bar' -%}
{% block test %}{% set foo = foo
%}{% include 'x.html' %}{% endblock -%}
""",
"x.html": """{{ foo }}|{{ test }}""",
}
)
)
a = env.get_template("a.html")
b = env.get_template("b.html")
c = env.get_template("c.html")
assert a.render(test="x").strip() == "bar|x"
assert b.render(test="x").strip() == "bar|x"
assert c.render(test="x").strip() == "bar|x"
def test_scopes_and_include(self):
env = Environment(
loader=DictLoader(
{
"include.html": "{{ var }}",
"base.html": '{% include "include.html" %}',
"child.html": '{% extends "base.html" %}{% set var = 42 %}',
}
)
)
t = env.get_template("child.html")
assert t.render() == "42"
def test_caller_scoping(self, env):
t = env.from_string(
"""
{% macro detail(icon, value) -%}
{% if value -%}
<p><span class="fa fa-fw fa-{{ icon }}"></span>
{%- if caller is undefined -%}
{{ value }}
{%- else -%}
{{ caller(value, *varargs) }}
{%- endif -%}</p>
{%- endif %}
{%- endmacro %}
{% macro link_detail(icon, value, href) -%}
{% call(value, href) detail(icon, value, href) -%}
<a href="{{ href }}">{{ value }}</a>
{%- endcall %}
{%- endmacro %}
"""
)
assert t.module.link_detail("circle", "Index", "/") == (
'<p><span class="fa fa-fw fa-circle"></span><a href="/">Index</a></p>'
)
def test_variable_reuse(self, env):
t = env.from_string("{% for x in x.y %}{{ x }}{% endfor %}")
assert t.render(x={"y": [0, 1, 2]}) == "012"
t = env.from_string("{% for x in x.y %}{{ loop.index0 }}|{{ x }}{% endfor %}")
assert t.render(x={"y": [0, 1, 2]}) == "0|01|12|2"
t = env.from_string("{% for x in x.y recursive %}{{ x }}{% endfor %}")
assert t.render(x={"y": [0, 1, 2]}) == "012"
def test_double_caller(self, env):
t = env.from_string(
"{% macro x(caller=none) %}[{% if caller %}"
"{{ caller() }}{% endif %}]{% endmacro %}"
"{{ x() }}{% call x() %}aha!{% endcall %}"
)
assert t.render() == "[][aha!]"
def test_double_caller_no_default(self, env):
with pytest.raises(TemplateAssertionError) as exc_info:
env.from_string(
"{% macro x(caller) %}[{% if caller %}"
"{{ caller() }}{% endif %}]{% endmacro %}"
)
assert exc_info.match(
r'"caller" argument must be omitted or ' r"be given a default"
)
t = env.from_string(
"{% macro x(caller=none) %}[{% if caller %}"
"{{ caller() }}{% endif %}]{% endmacro %}"
)
with pytest.raises(TypeError) as exc_info:
t.module.x(None, caller=lambda: 42)
assert exc_info.match(
r"\'x\' was invoked with two values for the " r"special caller argument"
)
def test_macro_blocks(self, env):
t = env.from_string(
"{% macro x() %}{% block foo %}x{% endblock %}{% endmacro %}{{ x() }}"
)
assert t.render() == "x"
def test_scoped_block(self, env):
t = env.from_string(
"{% set x = 1 %}{% with x = 2 %}{% block y scoped %}"
"{{ x }}{% endblock %}{% endwith %}"
)
assert t.render() == "2"
def test_recursive_loop_filter(self, env):
t = env.from_string(
"""
<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
{%- for page in [site.root] if page.url != this recursive %}
<url><loc>{{ page.url }}</loc></url>
{{- loop(page.children) }}
{%- endfor %}
</urlset>
"""
)
sm = t.render(
this="/foo",
site={"root": {"url": "/", "children": [{"url": "/foo"}, {"url": "/bar"}]}},
)
lines = [x.strip() for x in sm.splitlines() if x.strip()]
assert lines == [
'<?xml version="1.0" encoding="UTF-8"?>',
'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">',
"<url><loc>/</loc></url>",
"<url><loc>/bar</loc></url>",
"</urlset>",
]
def test_empty_if(self, env):
t = env.from_string("{% if foo %}{% else %}42{% endif %}")
assert t.render(foo=False) == "42"
def test_subproperty_if(self, env):
t = env.from_string(
"{% if object1.subproperty1 is eq object2.subproperty2 %}42{% endif %}"
)
assert (
t.render(
object1={"subproperty1": "value"}, object2={"subproperty2": "value"}
)
== "42"
)
def test_set_and_include(self):
env = Environment(
loader=DictLoader(
{
"inc": "bar",
"main": '{% set foo = "foo" %}{{ foo }}{% include "inc" %}',
}
)
)
assert env.get_template("main").render() == "foobar"
def test_loop_include(self):
env = Environment(
loader=DictLoader(
{
"inc": "{{ i }}",
"main": '{% for i in [1, 2, 3] %}{% include "inc" %}{% endfor %}',
}
)
)
assert env.get_template("main").render() == "123"
def test_grouper_repr(self):
from jinja2.filters import _GroupTuple
t = _GroupTuple("foo", [1, 2])
assert t.grouper == "foo"
assert t.list == [1, 2]
assert repr(t) == "('foo', [1, 2])"
assert str(t) == "('foo', [1, 2])"
def test_custom_context(self, env):
from jinja2.runtime import Context
class MyContext(Context):
pass
class MyEnvironment(Environment):
context_class = MyContext
loader = DictLoader({"base": "{{ foobar }}", "test": '{% extends "base" %}'})
env = MyEnvironment(loader=loader)
assert env.get_template("test").render(foobar="test") == "test"
def test_legacy_custom_context(self, env):
from jinja2.runtime import Context, missing
class MyContext(Context):
def resolve(self, name):
if name == "foo":
return 42
return super().resolve(name)
x = MyContext(env, parent={"bar": 23}, name="foo", blocks={})
assert x._legacy_resolve_mode
assert x.resolve_or_missing("foo") == 42
assert x.resolve_or_missing("bar") == 23
assert x.resolve_or_missing("baz") is missing
def test_recursive_loop_bug(self, env):
tmpl = env.from_string(
"{%- for value in values recursive %}1{% else %}0{% endfor -%}"
)
assert tmpl.render(values=[]) == "0"
def test_markup_and_chainable_undefined(self):
from jinja2 import Markup
from jinja2.runtime import ChainableUndefined
assert str(Markup(ChainableUndefined())) == ""
|
import asyncio
import logging
import voluptuous as vol
from homeassistant import exceptions
from homeassistant.components import websocket_api
from homeassistant.components.websocket_api import const
async def test_send_big_result(hass, websocket_client):
"""Test sending big results over the WS."""
@websocket_api.websocket_command({"type": "big_result"})
@websocket_api.async_response
async def send_big_result(hass, connection, msg):
await connection.send_big_result(msg["id"], {"big": "result"})
hass.components.websocket_api.async_register_command(send_big_result)
await websocket_client.send_json({"id": 5, "type": "big_result"})
msg = await websocket_client.receive_json()
assert msg["id"] == 5
assert msg["type"] == const.TYPE_RESULT
assert msg["success"]
assert msg["result"] == {"big": "result"}
async def test_exception_handling():
"""Test handling of exceptions."""
send_messages = []
conn = websocket_api.ActiveConnection(
logging.getLogger(__name__), None, send_messages.append, None, None
)
for (exc, code, err) in (
(exceptions.Unauthorized(), websocket_api.ERR_UNAUTHORIZED, "Unauthorized"),
(
vol.Invalid("Invalid something"),
websocket_api.ERR_INVALID_FORMAT,
"Invalid something. Got {'id': 5}",
),
(asyncio.TimeoutError(), websocket_api.ERR_TIMEOUT, "Timeout"),
(
exceptions.HomeAssistantError("Failed to do X"),
websocket_api.ERR_UNKNOWN_ERROR,
"Failed to do X",
),
(ValueError("Really bad"), websocket_api.ERR_UNKNOWN_ERROR, "Unknown error"),
):
send_messages.clear()
conn.async_handle_exception({"id": 5}, exc)
assert len(send_messages) == 1
assert send_messages[0]["error"]["code"] == code
assert send_messages[0]["error"]["message"] == err
|
import asyncio
from collections import OrderedDict
import logging
import os
from typing import Dict
import async_timeout
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.core import callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_entry_oauth2_flow
from homeassistant.util.json import load_json
from .const import DATA_SDM, DOMAIN, SDM_SCOPES
DATA_FLOW_IMPL = "nest_flow_implementation"
_LOGGER = logging.getLogger(__name__)
@callback
def register_flow_implementation(hass, domain, name, gen_authorize_url, convert_code):
"""Register a flow implementation for legacy api.
domain: Domain of the component responsible for the implementation.
name: Name of the component.
gen_authorize_url: Coroutine function to generate the authorize url.
convert_code: Coroutine function to convert a code to an access token.
"""
if DATA_FLOW_IMPL not in hass.data:
hass.data[DATA_FLOW_IMPL] = OrderedDict()
hass.data[DATA_FLOW_IMPL][domain] = {
"domain": domain,
"name": name,
"gen_authorize_url": gen_authorize_url,
"convert_code": convert_code,
}
class NestAuthError(HomeAssistantError):
"""Base class for Nest auth errors."""
class CodeInvalid(NestAuthError):
"""Raised when invalid authorization code."""
class UnexpectedStateError(HomeAssistantError):
"""Raised when the config flow is invoked in a 'should not happen' case."""
@config_entries.HANDLERS.register(DOMAIN)
class NestFlowHandler(
config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain=DOMAIN
):
"""Config flow to handle authentication for both APIs."""
DOMAIN = DOMAIN
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_PUSH
@classmethod
def register_sdm_api(cls, hass):
"""Configure the flow handler to use the SDM API."""
if DOMAIN not in hass.data:
hass.data[DOMAIN] = {}
hass.data[DOMAIN][DATA_SDM] = {}
def is_sdm_api(self):
"""Return true if this flow is setup to use SDM API."""
return DOMAIN in self.hass.data and DATA_SDM in self.hass.data[DOMAIN]
@property
def logger(self) -> logging.Logger:
"""Return logger."""
return logging.getLogger(__name__)
@property
def extra_authorize_data(self) -> Dict[str, str]:
"""Extra data that needs to be appended to the authorize url."""
return {
"scope": " ".join(SDM_SCOPES),
# Add params to ensure we get back a refresh token
"access_type": "offline",
"prompt": "consent",
}
async def async_oauth_create_entry(self, data: dict) -> dict:
"""Create an entry for the SDM flow."""
data[DATA_SDM] = {}
return await super().async_oauth_create_entry(data)
async def async_step_user(self, user_input=None):
"""Handle a flow initialized by the user."""
if self.is_sdm_api():
return await super().async_step_user(user_input)
return await self.async_step_init(user_input)
async def async_step_init(self, user_input=None):
"""Handle a flow start."""
if self.is_sdm_api():
raise UnexpectedStateError("Step only supported for legacy API")
flows = self.hass.data.get(DATA_FLOW_IMPL, {})
if self.hass.config_entries.async_entries(DOMAIN):
return self.async_abort(reason="single_instance_allowed")
if not flows:
return self.async_abort(reason="missing_configuration")
if len(flows) == 1:
self.flow_impl = list(flows)[0]
return await self.async_step_link()
if user_input is not None:
self.flow_impl = user_input["flow_impl"]
return await self.async_step_link()
return self.async_show_form(
step_id="init",
data_schema=vol.Schema({vol.Required("flow_impl"): vol.In(list(flows))}),
)
async def async_step_link(self, user_input=None):
"""Attempt to link with the Nest account.
Route the user to a website to authenticate with Nest. Depending on
implementation type we expect a pin or an external component to
deliver the authentication code.
"""
if self.is_sdm_api():
raise UnexpectedStateError("Step only supported for legacy API")
flow = self.hass.data[DATA_FLOW_IMPL][self.flow_impl]
errors = {}
if user_input is not None:
try:
with async_timeout.timeout(10):
tokens = await flow["convert_code"](user_input["code"])
return self._entry_from_tokens(
f"Nest (via {flow['name']})", flow, tokens
)
except asyncio.TimeoutError:
errors["code"] = "timeout"
except CodeInvalid:
errors["code"] = "invalid_pin"
except NestAuthError:
errors["code"] = "unknown"
except Exception: # pylint: disable=broad-except
errors["code"] = "internal_error"
_LOGGER.exception("Unexpected error resolving code")
try:
with async_timeout.timeout(10):
url = await flow["gen_authorize_url"](self.flow_id)
except asyncio.TimeoutError:
return self.async_abort(reason="authorize_url_timeout")
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected error generating auth url")
return self.async_abort(reason="authorize_url_fail")
return self.async_show_form(
step_id="link",
description_placeholders={"url": url},
data_schema=vol.Schema({vol.Required("code"): str}),
errors=errors,
)
async def async_step_import(self, info):
"""Import existing auth from Nest."""
if self.is_sdm_api():
raise UnexpectedStateError("Step only supported for legacy API")
if self.hass.config_entries.async_entries(DOMAIN):
return self.async_abort(reason="single_instance_allowed")
config_path = info["nest_conf_path"]
if not await self.hass.async_add_executor_job(os.path.isfile, config_path):
self.flow_impl = DOMAIN
return await self.async_step_link()
flow = self.hass.data[DATA_FLOW_IMPL][DOMAIN]
tokens = await self.hass.async_add_executor_job(load_json, config_path)
return self._entry_from_tokens(
"Nest (import from configuration.yaml)", flow, tokens
)
@callback
def _entry_from_tokens(self, title, flow, tokens):
"""Create an entry from tokens."""
return self.async_create_entry(
title=title, data={"tokens": tokens, "impl_domain": flow["domain"]}
)
|
import gc
import sys
import weakref
from flexx.util.testing import run_tests_if_main, skipif, skip, raises
from flexx.event.both_tester import run_in_both, this_is_js
from flexx.util.logging import capture_log
from flexx import event
loop = event.loop
logger = event.logger
class Node(event.Component):
val = event.IntProp(settable=True)
parent = event.ComponentProp(settable=True)
children = event.TupleProp(settable=True)
@event.reaction('parent.val')
def handle_parent_val(self, *events):
xx = []
for ev in events:
if self.parent:
xx.append(self.parent.val)
else:
xx.append(None)
print('parent.val ' + ', '.join([str(x) for x in xx]))
@event.reaction('children*.val')
def handle_children_val(self, *events):
xx = []
for ev in events:
if isinstance(ev.new_value, (int, float)):
xx.append(ev.new_value)
else:
xx.append(None)
print('children.val ' + ', '.join([str(x) for x in xx]))
@run_in_both(Node)
def test_dynamism1():
"""
parent.val 17
parent.val 18
parent.val 29
done
"""
n = Node()
n1 = Node()
n2 = Node()
loop.iter()
with loop: # does not get trigger, because n1.val was not set
n.set_parent(n1)
n.set_val(42)
with loop:
n1.set_val(17)
n2.set_val(27)
with loop:
n1.set_val(18)
n2.set_val(28)
with loop: # does not trigger
n.set_parent(n2)
with loop:
n1.set_val(19)
n2.set_val(29)
with loop:
n.set_parent(None)
with loop:
n1.set_val(11)
n2.set_val(21)
print('done')
@run_in_both(Node)
def test_dynamism2a():
"""
parent.val 17
parent.val 18
parent.val 29
[17, 18, 29]
"""
n = Node()
n1 = Node()
n2 = Node()
res = []
def func(*events):
for ev in events:
if n.parent:
res.append(n.parent.val)
else:
res.append(None)
n.reaction(func, 'parent.val')
loop.iter()
with loop: # does not get trigger, because n1.val was not set
n.set_parent(n1)
n.set_val(42)
with loop:
n1.set_val(17)
n2.set_val(27)
with loop:
n1.set_val(18)
n2.set_val(28)
with loop: # does not trigger
n.set_parent(n2)
with loop:
n1.set_val(19)
n2.set_val(29)
with loop:
n.set_parent(None)
with loop:
n1.set_val(11)
n2.set_val(21)
print(res)
@run_in_both(Node)
def test_dynamism2b():
"""
parent.val 17
parent.val 18
parent.val 29
[None, None, 17, 18, None, 29, None]
"""
n = Node()
n1 = Node()
n2 = Node()
res = []
def func(*events):
for ev in events:
if ev.type == 'val':
res.append(n.parent.val)
else:
res.append(None)
handler = n.reaction(func, 'parent', 'parent.val') # also connect to parent
loop.iter()
with loop: # does not get trigger, because n1.val was not set
n.set_parent(n1)
n.set_val(42)
with loop:
n1.set_val(17)
n2.set_val(27)
with loop:
n1.set_val(18)
n2.set_val(28)
with loop: # does not trigger
n.set_parent(n2)
with loop:
n1.set_val(19)
n2.set_val(29)
with loop:
n.set_parent(None)
with loop:
n1.set_val(11)
n2.set_val(21)
print(res)
@run_in_both(Node)
def test_dynamism3():
"""
children.val 17, 27
children.val 18, 28
children.val 29
done
"""
n = Node()
n1 = Node()
n2 = Node()
loop.iter()
with loop: # no trigger
n.set_children((n1, n2))
n.set_val(42)
with loop:
n1.set_val(17)
n2.set_val(27)
with loop:
n1.set_val(18)
n2.set_val(28)
with loop: # no trigger
n.set_children((n2, ))
with loop:
n1.set_val(19)
n2.set_val(29)
with loop:
n.set_children(())
with loop:
n1.set_val(11)
n2.set_val(21)
print('done')
@run_in_both(Node)
def test_dynamism4a():
"""
children.val 17, 27
children.val 18, 28
children.val 29
[17, 27, 18, 28, 29]
"""
n = Node()
n1 = Node()
n2 = Node()
res = []
def func(*events):
for ev in events:
if isinstance(ev.new_value, (float, int)):
res.append(ev.new_value)
else:
res.append(None)
handler = n.reaction(func, 'children*.val')
loop.iter()
with loop: # no trigger
n.set_children((n1, n2))
n.set_val(42)
with loop:
n1.set_val(17)
n2.set_val(27)
with loop:
n1.set_val(18)
n2.set_val(28)
with loop: # no trigger
n.set_children((n2, ))
with loop:
n1.set_val(19)
n2.set_val(29)
with loop:
n.set_children(())
with loop:
n1.set_val(11)
n2.set_val(21)
print(res)
@run_in_both(Node)
def test_dynamism4b():
"""
children.val 17, 27
children.val 18, 28
children.val 29
[None, None, 17, 27, 18, 28, None, 29, None]
"""
n = Node()
n1 = Node()
n2 = Node()
res = []
def func(*events):
for ev in events:
if isinstance(ev.new_value, (float, int)):
res.append(ev.new_value)
else:
res.append(None)
handler = n.reaction(func, 'children', 'children*.val') # also connect children
loop.iter()
with loop: # no trigger
n.set_children((n1, n2))
n.set_val(42)
with loop:
n1.set_val(17)
n2.set_val(27)
with loop:
n1.set_val(18)
n2.set_val(28)
with loop: # no trigger
n.set_children((n2, ))
with loop:
n1.set_val(19)
n2.set_val(29)
with loop:
n.set_children(())
with loop:
n1.set_val(11)
n2.set_val(21)
print(res)
@run_in_both(Node)
def test_dynamism5a():
"""
[0, 17, 18, 19]
"""
# connection strings with static attributes - no reconnect
n = Node()
n1 = Node()
n.foo = n1
res = []
def func(*events):
for ev in events:
if isinstance(ev.new_value, (float, int)):
res.append(ev.new_value)
else:
res.append(None)
# because the connection is fully resolved upon connecting, and at that time
# the object is still in its init stage, the handler does get the init event
# with value 0.
handler = n.reaction(func, 'foo.val')
loop.iter()
with loop:
n.set_val(42)
with loop:
n1.set_val(17)
n1.set_val(18)
with loop:
n.foo = None # no reconnect in this case
with loop:
n1.set_val(19)
print(res)
@run_in_both(Node)
def test_dynamism5b():
"""
[17, 18, 19]
"""
# connection strings with static attributes - no reconnect
n = Node()
n1 = Node()
n.foo = n1
res = []
def func(*events):
for ev in events:
if isinstance(ev.new_value, (float, int)):
res.append(ev.new_value)
else:
res.append(None)
# But not now
loop.iter() # <-- only change
handler = n.reaction(func, 'foo.val')
loop.iter()
with loop:
n.set_val(42)
with loop:
n1.set_val(17)
n1.set_val(18)
with loop:
n.foo = None # no reconnect in this case
with loop:
n1.set_val(19)
print(res)
@run_in_both(Node)
def test_deep1():
"""
children.val 7
children.val 8
children.val 17
[7, 8, 17]
"""
# deep connectors
n = Node()
n1 = Node()
n2 = Node()
n.set_children((Node(), n1))
loop.iter()
n.children[0].set_children((Node(), n2))
loop.iter()
res = []
def func(*events):
for ev in events:
if isinstance(ev.new_value, (float, int)):
if ev.new_value:
res.append(ev.new_value)
else:
res.append(None)
handler = n.reaction(func, 'children**.val')
loop.iter()
# We want these
with loop:
n1.set_val(7)
with loop:
n2.set_val(8)
# But not these
with loop:
n.set_val(42)
with loop:
n1.set_children((Node(), Node()))
n.children[0].set_children([])
# again ...
with loop:
n1.set_val(17)
with loop:
n2.set_val(18) # n2 is no longer in the tree
print(res)
@run_in_both(Node)
def test_deep2():
"""
children.val 11
children.val 12
['id12', 'id11', 'id10', 'id11']
"""
# deep connectors - string ends in deep connector
n = Node()
n1 = Node()
n2 = Node()
n.set_children((Node(), n1))
loop.iter()
n.children[0].set_children((Node(), n2))
loop.iter()
res = []
def func(*events):
for ev in events:
if isinstance(ev.new_value, (float, int)):
res.append(ev.new_value)
elif ev.type == 'children':
if ev.source.val:
res.append('id%i' % ev.source.val)
else:
res.append(None)
handler = n.reaction(func, 'children**')
loop.iter()
# Give val to id by - these should have no effect on res though
with loop:
n.set_val(10)
with loop:
n1.set_val(11)
with loop:
n2.set_val(12)
# Change children
with loop:
n2.set_children((Node(), Node(), Node()))
n1.set_children((Node(), Node()))
n.set_children((Node(), n1, Node()))
with loop:
n2.set_children([]) # no longer in the tree
n1.set_children([])
print(res)
class TestOb(event.Component):
children = event.TupleProp(settable=True)
foo = event.StringProp(settable=True)
class Tester(event.Component):
children = event.TupleProp(settable=True)
@event.reaction('children**.foo')
def track_deep(self, *events):
for ev in events:
if ev.new_value:
print(ev.new_value)
@event.action
def set_foos(self, prefix):
for i, child in enumerate(self.children):
child.set_foo(prefix + str(i))
for j, subchild in enumerate(child.children):
subchild.set_foo(prefix + str(i) + str(j))
@event.action
def make_children1(self):
t1 = TestOb()
t2 = TestOb()
t1.set_children((TestOb(), ))
t2.set_children((TestOb(), ))
self.set_children(t1, t2)
@event.action
def make_children2(self):
for i, child in enumerate(self.children):
child.set_children(child.children + (TestOb(), ))
@event.action
def make_children3(self):
# See issue #460
t = TestOb()
my_children = self.children
self.set_children(my_children + (t, ))
for i, child in enumerate(my_children):
child.set_children(child.children + (t, ))
self.set_children(my_children)
@run_in_both(TestOb, Tester)
def test_issue_460_and_more():
"""
A0
A00
A1
A10
-
B0
B00
B01
B1
B10
B11
-
C0
C00
C01
C02
C1
C10
C11
C12
"""
tester = Tester()
loop.iter()
tester.make_children1()
loop.iter()
tester.set_foos('A')
loop.iter()
print('-')
tester.make_children2()
loop.iter()
tester.set_foos('B')
loop.iter()
print('-')
tester.make_children3()
loop.iter()
tester.set_foos('C')
loop.iter()
## Python only
class MyComponent(event.Component):
a = event.AnyProp()
aa = event.TupleProp()
def test_connectors1():
""" test connectors """
x = MyComponent()
def foo(*events):
pass
# Can haz any char in label
with capture_log('warning') as log:
h = x.reaction(foo, 'a:+asdkjb&^*!')
type = h.get_connection_info()[0][1][0]
assert type.startswith('a:')
assert not log
# Warn if no known event
with capture_log('warning') as log:
h = x.reaction(foo, 'b')
assert log
x._Component__handlers.pop('b')
# Supress warn
with capture_log('warning') as log:
h = x.reaction(foo, '!b')
assert not log
x._Component__handlers.pop('b')
# Supress warn, with label
with capture_log('warning') as log:
h = x.reaction(foo, '!b:meh')
assert not log
x._Component__handlers.pop('b')
# Supress warn, with label - not like this
with capture_log('warning') as log:
h = x.reaction(foo, 'b:meh!')
assert log
assert 'does not exist' in log[0]
x._Component__handlers.pop('b')
# Invalid syntax - but fix and warn
with capture_log('warning') as log:
h = x.reaction(foo, 'b!:meh')
assert log
assert 'Exclamation mark' in log[0]
def test_connectors2():
""" test connectors with sub """
x = MyComponent()
y = MyComponent()
x.sub = [y]
def foo(*events):
pass
# Warn if no known event
with capture_log('warning') as log:
h = x.reaction(foo, 'sub*.b')
assert log
y._Component__handlers.pop('b')
# Supress warn
with capture_log('warning') as log:
h = x.reaction(foo, '!sub*.b')
assert not log
y._Component__handlers.pop('b')
# Supress warn, with label
with capture_log('warning') as log:
h = x.reaction(foo, '!sub*.b:meh')
assert not log
y._Component__handlers.pop('b')
# Invalid syntax - but fix and warn
with capture_log('warning') as log:
h = x.reaction(foo, 'sub*.!b:meh')
assert log
assert 'Exclamation mark' in log[0]
y._Component__handlers.pop('b')
# Position of *
with capture_log('warning') as log:
h = x.reaction(foo, 'sub*.a')
assert not log
with capture_log('warning') as log:
h = x.reaction(foo, 'sub.*.a')
assert log
with raises(ValueError):
h = x.reaction(foo, 'sub.*a') # fail
# No star, no connection, fail!
with raises(RuntimeError):
h = x.reaction(foo, 'sub.b')
# y.a is not a list, fail!
with raises(RuntimeError):
h = y.reaction(foo, 'a*.b')
# Mix it
with capture_log('warning') as log:
h = x.reaction(foo, '!aa**')
with capture_log('warning') as log:
h = x.reaction(foo, '!aa*')
assert not log
with capture_log('warning') as log:
h = y.reaction(foo, '!aa*')
assert not log
with capture_log('warning') as log:
h = x.reaction(foo, '!aa**')
assert not log
with capture_log('warning') as log:
h = x.reaction(foo, '!aa**:meh') # why not
assert not log
def test_dynamism_and_handler_reconnecting():
# Flexx' event system tries to be smart about reusing connections when
# reconnections are made. This tests checks that this works, and when
# it does not.
class Foo(event.Component):
def __init__(self):
super().__init__()
bars = event.ListProp(settable=True)
def disconnect(self, *args): # Detect disconnections
super().disconnect(*args)
disconnects.append(self)
class Bar(event.Component):
def __init__(self):
super().__init__()
spam = event.AnyProp(0, settable=True)
def disconnect(self, *args): # Detect disconnections
super().disconnect(*args)
disconnects.append(self)
f = Foo()
triggers = []
disconnects = []
@f.reaction('!bars*.spam')
def handle_foo(*events):
triggers.append(len(events))
assert len(triggers) == 0
assert len(disconnects) == 0
# Assign new bar objects
with event.loop:
f.set_bars([Bar(), Bar()])
#
assert len(triggers) == 0
assert len(disconnects) == 0
# Change values of bar.spam
with event.loop:
f.bars[0].set_spam(7)
f.bars[1].set_spam(42)
#
assert sum(triggers) == 2
assert len(disconnects) == 0
# Assign 3 new bar objects - old ones are disconnected
with event.loop:
f.set_bars([Bar(), Bar(), Bar()])
#
assert sum(triggers) == 2
assert len(disconnects) == 2
#
# Append to bars property
disconnects = []
with event.loop:
f.set_bars(f.bars + [Bar(), Bar()])
assert len(disconnects) == 0
# Append to bars property, drop one
disconnects = []
with event.loop:
f.set_bars(f.bars[:-1] + [Bar(), Bar()])
assert len(disconnects) == 1
# Append to bars property, drop one at the wrong end: Flexx can't optimize
disconnects = []
with event.loop:
f.set_bars(f.bars[1:] + [Bar(), Bar()])
assert len(disconnects) == len(f.bars) - 1
# Prepend to bars property
disconnects = []
with event.loop:
f.set_bars([Bar(), Bar()] + f.bars)
assert len(disconnects) == 0
# Prepend to bars property, drop one
disconnects = []
with event.loop:
f.set_bars([Bar(), Bar()] + f.bars[1:])
assert len(disconnects) == 1
# Prepend to bars property, drop one at the wrong end: Flexx can't optimize
disconnects = []
with event.loop:
f.set_bars([Bar(), Bar()] + f.bars[:-1])
assert len(disconnects) == len(f.bars) - 1
run_tests_if_main()
|
from collections import deque
class AssetPriceBuffers(object):
"""
Utility class to store double-ended queue ("deque")
based price buffers for usage in lookback-based
indicator calculations.
Parameters
----------
assets : `list[str]`
The list of assets to create price buffers for.
lookbacks : `list[int]`, optional
The number of lookback periods to store prices for.
"""
def __init__(self, assets, lookbacks=[12]):
self.assets = assets
self.lookbacks = lookbacks
self.prices = self._create_all_assets_prices_buffer_dict()
@staticmethod
def _asset_lookback_key(asset, lookback):
"""
Create the buffer dictionary lookup key based
on asset name and lookback period.
Parameters
----------
asset : `str`
The asset symbol name.
lookback : `int`
The lookback period.
Returns
-------
`str`
The lookup key.
"""
return '%s_%s' % (asset, lookback)
def _create_single_asset_prices_buffer_dict(self, asset):
"""
Creates a dictionary of asset-lookback pair
price buffers for a single asset.
Returns
-------
`dict{str: deque[float]}`
The price buffer dictionary.
"""
return {
AssetPriceBuffers._asset_lookback_key(
asset, lookback
): deque(maxlen=lookback)
for lookback in self.lookbacks
}
def _create_all_assets_prices_buffer_dict(self):
"""
Creates a dictionary of asset-lookback pair
price buffers for all assets.
Returns
-------
`dict{str: deque[float]}`
The price buffer dictionary.
"""
prices = {}
for asset in self.assets:
prices.update(self._create_single_asset_prices_buffer_dict(asset))
return prices
def add_asset(self, asset):
"""
Add an asset to the list of current assets. This is necessary if
the asset is part of a DynamicUniverse and isn't present at
the beginning of a backtest.
Parameters
----------
asset : `str`
The asset symbol name.
"""
if asset in self.assets:
raise ValueError(
'Unable to add asset "%s" since it already '
'exists in this price buffer.' % asset
)
else:
self.prices.update(self._create_single_asset_prices_buffer_dict(asset))
def append(self, asset, price):
"""
Append a new price onto the price deque for
the specific asset provided.
Parameters
----------
asset : `str`
The asset symbol name.
price : `float`
The new price of the asset.
"""
if price <= 0.0:
raise ValueError(
'Unable to append non-positive price of "%0.2f" '
'to metrics buffer for Asset "%s".' % (price, asset)
)
# The asset may have been added to the universe subsequent
# to the beginning of the backtest and as such needs a
# newly created pricing buffer
asset_lookback_key = AssetPriceBuffers._asset_lookback_key(asset, self.lookbacks[0])
if asset_lookback_key not in self.prices:
self.prices.update(self._create_single_asset_prices_buffer_dict(asset))
for lookback in self.lookbacks:
self.prices[
AssetPriceBuffers._asset_lookback_key(
asset, lookback
)
].append(price)
|
import os
from gitless import core
from . import pprint
def parser(subparsers, _):
"""Adds the init parser to the given subparsers object."""
desc = (
'create an empty git repository or clone remote')
init_parser = subparsers.add_parser(
'init', help=desc, description=desc.capitalize(), aliases=['in'])
init_parser.add_argument(
'repo', nargs='?',
help=(
'an optional remote repo address from where to read to create the '
'local repo'))
init_parser.add_argument(
'-o', '--only', nargs='+',
help='use only branches given from remote repo', dest='only')
init_parser.add_argument(
'-e', '--exclude', nargs='+',
help='use everything but these branches from remote repo', dest='exclude')
init_parser.set_defaults(func=main)
def main(args, repo):
if repo:
pprint.err('You are already in a Gitless repository')
return False
core.init_repository(url=args.repo,
only=frozenset(args.only if args.only else []),
exclude=frozenset(args.exclude if args.exclude else []))
pprint.ok('Local repo created in {0}'.format(os.getcwd()))
if args.repo:
pprint.ok('Initialized from remote {0}'.format(args.repo))
return True
|
from typing import TYPE_CHECKING, Hashable, Iterable, Optional, Union, overload
from . import duck_array_ops
from .computation import dot
from .options import _get_keep_attrs
from .pycompat import is_duck_dask_array
if TYPE_CHECKING:
from .dataarray import DataArray, Dataset
_WEIGHTED_REDUCE_DOCSTRING_TEMPLATE = """
Reduce this {cls}'s data by a weighted ``{fcn}`` along some dimension(s).
Parameters
----------
dim : str or sequence of str, optional
Dimension(s) over which to apply the weighted ``{fcn}``.
skipna : bool, optional
If True, skip missing values (as marked by NaN). By default, only
skips missing values for float dtypes; other dtypes either do not
have a sentinel missing value (int) or skipna=True has not been
implemented (object, datetime64 or timedelta64).
keep_attrs : bool, optional
If True, the attributes (``attrs``) will be copied from the original
object to the new one. If False (default), the new object will be
returned without attributes.
Returns
-------
reduced : {cls}
New {cls} object with weighted ``{fcn}`` applied to its data and
the indicated dimension(s) removed.
Notes
-----
Returns {on_zero} if the ``weights`` sum to 0.0 along the reduced
dimension(s).
"""
_SUM_OF_WEIGHTS_DOCSTRING = """
Calculate the sum of weights, accounting for missing values in the data
Parameters
----------
dim : str or sequence of str, optional
Dimension(s) over which to sum the weights.
keep_attrs : bool, optional
If True, the attributes (``attrs``) will be copied from the original
object to the new one. If False (default), the new object will be
returned without attributes.
Returns
-------
reduced : {cls}
New {cls} object with the sum of the weights over the given dimension.
"""
class Weighted:
"""An object that implements weighted operations.
You should create a Weighted object by using the ``DataArray.weighted`` or
``Dataset.weighted`` methods.
See Also
--------
Dataset.weighted
DataArray.weighted
"""
__slots__ = ("obj", "weights")
@overload
def __init__(self, obj: "DataArray", weights: "DataArray") -> None:
...
@overload
def __init__(self, obj: "Dataset", weights: "DataArray") -> None:
...
def __init__(self, obj, weights):
"""
Create a Weighted object
Parameters
----------
obj : DataArray or Dataset
Object over which the weighted reduction operation is applied.
weights : DataArray
An array of weights associated with the values in the obj.
Each value in the obj contributes to the reduction operation
according to its associated weight.
Notes
-----
``weights`` must be a ``DataArray`` and cannot contain missing values.
Missing values can be replaced by ``weights.fillna(0)``.
"""
from .dataarray import DataArray
if not isinstance(weights, DataArray):
raise ValueError("`weights` must be a DataArray")
def _weight_check(w):
# Ref https://github.com/pydata/xarray/pull/4559/files#r515968670
if duck_array_ops.isnull(w).any():
raise ValueError(
"`weights` cannot contain missing values. "
"Missing values can be replaced by `weights.fillna(0)`."
)
return w
if is_duck_dask_array(weights.data):
# assign to copy - else the check is not triggered
weights = weights.copy(
data=weights.data.map_blocks(_weight_check, dtype=weights.dtype),
deep=False,
)
else:
_weight_check(weights.data)
self.obj = obj
self.weights = weights
@staticmethod
def _reduce(
da: "DataArray",
weights: "DataArray",
dim: Optional[Union[Hashable, Iterable[Hashable]]] = None,
skipna: Optional[bool] = None,
) -> "DataArray":
"""reduce using dot; equivalent to (da * weights).sum(dim, skipna)
for internal use only
"""
# need to infer dims as we use `dot`
if dim is None:
dim = ...
# need to mask invalid values in da, as `dot` does not implement skipna
if skipna or (skipna is None and da.dtype.kind in "cfO"):
da = da.fillna(0.0)
# `dot` does not broadcast arrays, so this avoids creating a large
# DataArray (if `weights` has additional dimensions)
# maybe add fasttrack (`(da * weights).sum(dims=dim, skipna=skipna)`)
return dot(da, weights, dims=dim)
def _sum_of_weights(
self, da: "DataArray", dim: Optional[Union[Hashable, Iterable[Hashable]]] = None
) -> "DataArray":
""" Calculate the sum of weights, accounting for missing values """
# we need to mask data values that are nan; else the weights are wrong
mask = da.notnull()
# bool -> int, because ``xr.dot([True, True], [True, True])`` -> True
# (and not 2); GH4074
if self.weights.dtype == bool:
sum_of_weights = self._reduce(
mask, self.weights.astype(int), dim=dim, skipna=False
)
else:
sum_of_weights = self._reduce(mask, self.weights, dim=dim, skipna=False)
# 0-weights are not valid
valid_weights = sum_of_weights != 0.0
return sum_of_weights.where(valid_weights)
def _weighted_sum(
self,
da: "DataArray",
dim: Optional[Union[Hashable, Iterable[Hashable]]] = None,
skipna: Optional[bool] = None,
) -> "DataArray":
"""Reduce a DataArray by a by a weighted ``sum`` along some dimension(s)."""
return self._reduce(da, self.weights, dim=dim, skipna=skipna)
def _weighted_mean(
self,
da: "DataArray",
dim: Optional[Union[Hashable, Iterable[Hashable]]] = None,
skipna: Optional[bool] = None,
) -> "DataArray":
"""Reduce a DataArray by a weighted ``mean`` along some dimension(s)."""
weighted_sum = self._weighted_sum(da, dim=dim, skipna=skipna)
sum_of_weights = self._sum_of_weights(da, dim=dim)
return weighted_sum / sum_of_weights
def _implementation(self, func, dim, **kwargs):
raise NotImplementedError("Use `Dataset.weighted` or `DataArray.weighted`")
def sum_of_weights(
self,
dim: Optional[Union[Hashable, Iterable[Hashable]]] = None,
keep_attrs: Optional[bool] = None,
) -> Union["DataArray", "Dataset"]:
return self._implementation(
self._sum_of_weights, dim=dim, keep_attrs=keep_attrs
)
def sum(
self,
dim: Optional[Union[Hashable, Iterable[Hashable]]] = None,
skipna: Optional[bool] = None,
keep_attrs: Optional[bool] = None,
) -> Union["DataArray", "Dataset"]:
return self._implementation(
self._weighted_sum, dim=dim, skipna=skipna, keep_attrs=keep_attrs
)
def mean(
self,
dim: Optional[Union[Hashable, Iterable[Hashable]]] = None,
skipna: Optional[bool] = None,
keep_attrs: Optional[bool] = None,
) -> Union["DataArray", "Dataset"]:
return self._implementation(
self._weighted_mean, dim=dim, skipna=skipna, keep_attrs=keep_attrs
)
def __repr__(self):
"""provide a nice str repr of our Weighted object"""
klass = self.__class__.__name__
weight_dims = ", ".join(self.weights.dims)
return f"{klass} with weights along dimensions: {weight_dims}"
class DataArrayWeighted(Weighted):
def _implementation(self, func, dim, **kwargs):
keep_attrs = kwargs.pop("keep_attrs")
if keep_attrs is None:
keep_attrs = _get_keep_attrs(default=False)
weighted = func(self.obj, dim=dim, **kwargs)
if keep_attrs:
weighted.attrs = self.obj.attrs
return weighted
class DatasetWeighted(Weighted):
def _implementation(self, func, dim, **kwargs) -> "Dataset":
return self.obj.map(func, dim=dim, **kwargs)
def _inject_docstring(cls, cls_name):
cls.sum_of_weights.__doc__ = _SUM_OF_WEIGHTS_DOCSTRING.format(cls=cls_name)
cls.sum.__doc__ = _WEIGHTED_REDUCE_DOCSTRING_TEMPLATE.format(
cls=cls_name, fcn="sum", on_zero="0"
)
cls.mean.__doc__ = _WEIGHTED_REDUCE_DOCSTRING_TEMPLATE.format(
cls=cls_name, fcn="mean", on_zero="NaN"
)
_inject_docstring(DataArrayWeighted, "DataArray")
_inject_docstring(DatasetWeighted, "Dataset")
|
from datetime import timedelta
import pytest
from homeassistant.components import google_domains
from homeassistant.setup import async_setup_component
from homeassistant.util.dt import utcnow
from tests.common import async_fire_time_changed
DOMAIN = "test.example.com"
USERNAME = "abc123"
PASSWORD = "xyz789"
UPDATE_URL = f"https://{USERNAME}:{PASSWORD}@domains.google.com/nic/update"
@pytest.fixture
def setup_google_domains(hass, aioclient_mock):
"""Fixture that sets up NamecheapDNS."""
aioclient_mock.get(UPDATE_URL, params={"hostname": DOMAIN}, text="ok 0.0.0.0")
hass.loop.run_until_complete(
async_setup_component(
hass,
google_domains.DOMAIN,
{
"google_domains": {
"domain": DOMAIN,
"username": USERNAME,
"password": PASSWORD,
}
},
)
)
async def test_setup(hass, aioclient_mock):
"""Test setup works if update passes."""
aioclient_mock.get(UPDATE_URL, params={"hostname": DOMAIN}, text="nochg 0.0.0.0")
result = await async_setup_component(
hass,
google_domains.DOMAIN,
{
"google_domains": {
"domain": DOMAIN,
"username": USERNAME,
"password": PASSWORD,
}
},
)
assert result
assert aioclient_mock.call_count == 1
async_fire_time_changed(hass, utcnow() + timedelta(minutes=5))
await hass.async_block_till_done()
assert aioclient_mock.call_count == 2
async def test_setup_fails_if_update_fails(hass, aioclient_mock):
"""Test setup fails if first update fails."""
aioclient_mock.get(UPDATE_URL, params={"hostname": DOMAIN}, text="nohost")
result = await async_setup_component(
hass,
google_domains.DOMAIN,
{
"google_domains": {
"domain": DOMAIN,
"username": USERNAME,
"password": PASSWORD,
}
},
)
assert not result
assert aioclient_mock.call_count == 1
|
import os
import os.path
import zipfile
import logging
import pytest
from PyQt5.QtCore import QUrl
from qutebrowser.components import adblock
from qutebrowser.utils import urlmatch
from helpers import utils
pytestmark = pytest.mark.usefixtures('qapp')
# TODO See ../utils/test_standarddirutils for OSError and caplog assertion
WHITELISTED_HOSTS = ('qutebrowser.org', 'mediumhost.io', 'http://*.edu')
BLOCKLIST_HOSTS = ('localhost',
'mediumhost.io',
'malware.badhost.org',
'4-verybadhost.com',
'ads.worsthostever.net')
CLEAN_HOSTS = ('goodhost.gov', 'verygoodhost.com')
URLS_TO_CHECK = ('http://localhost',
'http://mediumhost.io',
'ftp://malware.badhost.org',
'http://4-verybadhost.com',
'http://ads.worsthostever.net',
'http://goodhost.gov',
'ftp://verygoodhost.com',
'http://qutebrowser.org',
'http://veryverygoodhost.edu')
@pytest.fixture
def host_blocker_factory(config_tmpdir, data_tmpdir, download_stub,
config_stub):
def factory():
return adblock.HostBlocker(config_dir=config_tmpdir,
data_dir=data_tmpdir)
return factory
def create_zipfile(directory, files, zipname='test'):
"""Return a path to a newly created zip file.
Args:
directory: path object where to create the zip file.
files: list of filenames (relative to directory) to each file to add.
zipname: name to give to the zip file.
"""
zipfile_path = directory / zipname + '.zip'
with zipfile.ZipFile(str(zipfile_path), 'w') as new_zipfile:
for file_path in files:
new_zipfile.write(str(directory / file_path),
arcname=os.path.basename(str(file_path)))
# Removes path from file name
return str(zipname + '.zip')
def create_blocklist(directory, blocked_hosts=BLOCKLIST_HOSTS,
name='hosts', line_format='one_per_line'):
"""Return a path to a blocklist file.
Args:
directory: path object where to create the blocklist file
blocked_hosts: an iterable of string hosts to add to the blocklist
name: name to give to the blocklist file
line_format: 'etc_hosts' --> /etc/hosts format
'one_per_line' --> one host per line format
'not_correct' --> Not a correct hosts file format.
"""
blocklist_file = directory / name
with blocklist_file.open('w', encoding='UTF-8') as blocklist:
# ensure comments are ignored when processing blocklist
blocklist.write('# Blocked Hosts List #\n\n')
if line_format == 'etc_hosts': # /etc/hosts like format
for host in blocked_hosts:
blocklist.write('127.0.0.1 ' + host + '\n')
elif line_format == 'one_per_line':
for host in blocked_hosts:
blocklist.write(host + '\n')
elif line_format == 'not_correct':
for host in blocked_hosts:
blocklist.write(host + ' This is not a correct hosts file\n')
else:
raise ValueError('Incorrect line_format argument')
return name
def assert_urls(host_blocker, blocked=BLOCKLIST_HOSTS,
whitelisted=WHITELISTED_HOSTS, urls_to_check=URLS_TO_CHECK):
"""Test if Urls to check are blocked or not by HostBlocker.
Ensure URLs in 'blocked' and not in 'whitelisted' are blocked.
All other URLs must not be blocked.
localhost is an example of a special case that shouldn't be blocked.
"""
whitelisted = list(whitelisted) + ['localhost']
for str_url in urls_to_check:
url = QUrl(str_url)
host = url.host()
if host in blocked and host not in whitelisted:
assert host_blocker._is_blocked(url)
else:
assert not host_blocker._is_blocked(url)
def blocklist_to_url(filename):
"""Get an example.com-URL with the given filename as path."""
assert not os.path.isabs(filename), filename
url = QUrl('http://example.com/')
url.setPath('/' + filename)
assert url.isValid(), url.errorString()
return url
def generic_blocklists(directory):
"""Return a generic list of files to be used in hosts-block-lists option.
This list contains :
- a remote zip file with 1 hosts file and 2 useless files
- a remote zip file with only useless files
(Should raise a FileNotFoundError)
- a remote zip file with only one valid hosts file
- a local text file with valid hosts
- a remote text file without valid hosts format.
"""
# remote zip file with 1 hosts file and 2 useless files
file1 = create_blocklist(directory, blocked_hosts=CLEAN_HOSTS,
name='README', line_format='not_correct')
file2 = create_blocklist(directory, blocked_hosts=BLOCKLIST_HOSTS[:3],
name='hosts', line_format='etc_hosts')
file3 = create_blocklist(directory, blocked_hosts=CLEAN_HOSTS,
name='false_positive', line_format='one_per_line')
files_to_zip = [file1, file2, file3]
blocklist1 = blocklist_to_url(
create_zipfile(directory, files_to_zip, 'block1'))
# remote zip file without file named hosts
# (Should raise a FileNotFoundError)
file1 = create_blocklist(directory, blocked_hosts=CLEAN_HOSTS,
name='md5sum', line_format='etc_hosts')
file2 = create_blocklist(directory, blocked_hosts=CLEAN_HOSTS,
name='README', line_format='not_correct')
file3 = create_blocklist(directory, blocked_hosts=CLEAN_HOSTS,
name='false_positive', line_format='one_per_line')
files_to_zip = [file1, file2, file3]
blocklist2 = blocklist_to_url(
create_zipfile(directory, files_to_zip, 'block2'))
# remote zip file with only one valid hosts file inside
file1 = create_blocklist(directory, blocked_hosts=[BLOCKLIST_HOSTS[3]],
name='malwarelist', line_format='etc_hosts')
blocklist3 = blocklist_to_url(create_zipfile(directory, [file1], 'block3'))
# local text file with valid hosts
blocklist4 = QUrl.fromLocalFile(str(directory / create_blocklist(
directory, blocked_hosts=[BLOCKLIST_HOSTS[4]],
name='mycustomblocklist', line_format='one_per_line')))
assert blocklist4.isValid(), blocklist4.errorString()
# remote text file without valid hosts format
blocklist5 = blocklist_to_url(create_blocklist(
directory, blocked_hosts=CLEAN_HOSTS, name='notcorrectlist',
line_format='not_correct'))
return [blocklist1.toString(), blocklist2.toString(),
blocklist3.toString(), blocklist4.toString(),
blocklist5.toString()]
def test_disabled_blocking_update(config_stub, tmpdir, caplog,
host_blocker_factory):
"""Ensure no URL is blocked when host blocking is disabled."""
config_stub.val.content.host_blocking.lists = generic_blocklists(tmpdir)
config_stub.val.content.host_blocking.enabled = False
host_blocker = host_blocker_factory()
host_blocker.adblock_update()
while host_blocker._in_progress:
current_download = host_blocker._in_progress[0]
with caplog.at_level(logging.ERROR):
current_download.successful = True
current_download.finished.emit()
host_blocker.read_hosts()
for str_url in URLS_TO_CHECK:
assert not host_blocker._is_blocked(QUrl(str_url))
def test_disabled_blocking_per_url(config_stub, host_blocker_factory):
example_com = 'https://www.example.com/'
config_stub.val.content.host_blocking.lists = []
pattern = urlmatch.UrlPattern(example_com)
config_stub.set_obj('content.host_blocking.enabled', False,
pattern=pattern)
url = QUrl('blocked.example.com')
host_blocker = host_blocker_factory()
host_blocker._blocked_hosts.add(url.host())
assert host_blocker._is_blocked(url)
assert not host_blocker._is_blocked(url, first_party_url=QUrl(example_com))
def test_no_blocklist_update(config_stub, download_stub, host_blocker_factory):
"""Ensure no URL is blocked when no block list exists."""
config_stub.val.content.host_blocking.lists = None
config_stub.val.content.host_blocking.enabled = True
host_blocker = host_blocker_factory()
host_blocker.adblock_update()
host_blocker.read_hosts()
for dl in download_stub.downloads:
dl.successful = True
for str_url in URLS_TO_CHECK:
assert not host_blocker._is_blocked(QUrl(str_url))
def test_successful_update(config_stub, tmpdir, caplog, host_blocker_factory):
"""Ensure hosts from host_blocking.lists are blocked after an update."""
config_stub.val.content.host_blocking.lists = generic_blocklists(tmpdir)
config_stub.val.content.host_blocking.enabled = True
config_stub.val.content.host_blocking.whitelist = None
host_blocker = host_blocker_factory()
host_blocker.adblock_update()
# Simulate download is finished
while host_blocker._in_progress:
current_download = host_blocker._in_progress[0]
with caplog.at_level(logging.ERROR):
current_download.successful = True
current_download.finished.emit()
host_blocker.read_hosts()
assert_urls(host_blocker, whitelisted=[])
def test_parsing_multiple_hosts_on_line(host_blocker_factory):
"""Ensure multiple hosts on a line get parsed correctly."""
host_blocker = host_blocker_factory()
bytes_host_line = ' '.join(BLOCKLIST_HOSTS).encode('utf-8')
parsed_hosts = host_blocker._read_hosts_line(bytes_host_line)
host_blocker._blocked_hosts |= parsed_hosts
assert_urls(host_blocker, whitelisted=[])
@pytest.mark.parametrize('ip, host', [
('127.0.0.1', 'localhost'),
('27.0.0.1', 'localhost.localdomain'),
('27.0.0.1', 'local'),
('55.255.255.255', 'broadcasthost'),
(':1', 'localhost'),
(':1', 'ip6-localhost'),
(':1', 'ip6-loopback'),
('e80::1%lo0', 'localhost'),
('f00::0', 'ip6-localnet'),
('f00::0', 'ip6-mcastprefix'),
('f02::1', 'ip6-allnodes'),
('f02::2', 'ip6-allrouters'),
('ff02::3', 'ip6-allhosts'),
('.0.0.0', '0.0.0.0'),
('127.0.1.1', 'myhostname'),
('127.0.0.53', 'myhostname'),
])
def test_whitelisted_lines(host_blocker_factory, ip, host):
"""Make sure we don't block hosts we don't want to."""
host_blocker = host_blocker_factory()
line = ('{} {}'.format(ip, host)).encode('ascii')
parsed_hosts = host_blocker._read_hosts_line(line)
assert host not in parsed_hosts
def test_failed_dl_update(config_stub, tmpdir, caplog, host_blocker_factory):
"""One blocklist fails to download.
Ensure hosts from this list are not blocked.
"""
dl_fail_blocklist = blocklist_to_url(create_blocklist(
tmpdir, blocked_hosts=CLEAN_HOSTS, name='download_will_fail',
line_format='one_per_line'))
hosts_to_block = (generic_blocklists(tmpdir) +
[dl_fail_blocklist.toString()])
config_stub.val.content.host_blocking.lists = hosts_to_block
config_stub.val.content.host_blocking.enabled = True
config_stub.val.content.host_blocking.whitelist = None
host_blocker = host_blocker_factory()
host_blocker.adblock_update()
while host_blocker._in_progress:
current_download = host_blocker._in_progress[0]
# if current download is the file we want to fail, make it fail
if current_download.name == dl_fail_blocklist.path():
current_download.successful = False
else:
current_download.successful = True
with caplog.at_level(logging.ERROR):
current_download.finished.emit()
host_blocker.read_hosts()
assert_urls(host_blocker, whitelisted=[])
@pytest.mark.parametrize('location', ['content', 'comment'])
def test_invalid_utf8(config_stub, tmpdir, caplog, host_blocker_factory,
location):
"""Make sure invalid UTF-8 is handled correctly.
See https://github.com/qutebrowser/qutebrowser/issues/2301
"""
blocklist = tmpdir / 'blocklist'
if location == 'comment':
blocklist.write_binary(b'# nbsp: \xa0\n')
else:
assert location == 'content'
blocklist.write_binary(b'https://www.example.org/\xa0')
for url in BLOCKLIST_HOSTS:
blocklist.write(url + '\n', mode='a')
url = blocklist_to_url('blocklist')
config_stub.val.content.host_blocking.lists = [url.toString()]
config_stub.val.content.host_blocking.enabled = True
config_stub.val.content.host_blocking.whitelist = None
host_blocker = host_blocker_factory()
host_blocker.adblock_update()
current_download = host_blocker._in_progress[0]
if location == 'content':
with caplog.at_level(logging.ERROR):
current_download.successful = True
current_download.finished.emit()
expected = (r"Failed to decode: "
r"b'https://www.example.org/\xa0localhost")
assert caplog.messages[-2].startswith(expected)
else:
current_download.successful = True
current_download.finished.emit()
host_blocker.read_hosts()
assert_urls(host_blocker, whitelisted=[])
def test_invalid_utf8_compiled(config_stub, config_tmpdir, data_tmpdir,
monkeypatch, caplog, host_blocker_factory):
"""Make sure invalid UTF-8 in the compiled file is handled."""
config_stub.val.content.host_blocking.lists = []
# Make sure the HostBlocker doesn't delete blocked-hosts in __init__
monkeypatch.setattr(adblock.HostBlocker, 'update_files',
lambda _self: None)
(config_tmpdir / 'blocked-hosts').write_binary(
b'https://www.example.org/\xa0')
(data_tmpdir / 'blocked-hosts').ensure()
host_blocker = host_blocker_factory()
with caplog.at_level(logging.ERROR):
host_blocker.read_hosts()
assert caplog.messages[-1] == "Failed to read host blocklist!"
def test_blocking_with_whitelist(config_stub, data_tmpdir, host_blocker_factory):
"""Ensure hosts in content.host_blocking.whitelist are never blocked."""
# Simulate adblock_update has already been run
# by creating a file named blocked-hosts,
# Exclude localhost from it as localhost is never blocked via list
filtered_blocked_hosts = BLOCKLIST_HOSTS[1:]
blocklist = create_blocklist(data_tmpdir,
blocked_hosts=filtered_blocked_hosts,
name='blocked-hosts',
line_format='one_per_line')
config_stub.val.content.host_blocking.lists = [blocklist]
config_stub.val.content.host_blocking.enabled = True
config_stub.val.content.host_blocking.whitelist = list(WHITELISTED_HOSTS)
host_blocker = host_blocker_factory()
host_blocker.read_hosts()
assert_urls(host_blocker)
def test_config_change_initial(config_stub, tmpdir, host_blocker_factory):
"""Test emptying host_blocking.lists with existing blocked_hosts.
- A blocklist is present in host_blocking.lists and blocked_hosts is
populated
- User quits qutebrowser, empties host_blocking.lists from his config
- User restarts qutebrowser, does adblock-update
"""
create_blocklist(tmpdir, blocked_hosts=BLOCKLIST_HOSTS,
name='blocked-hosts', line_format='one_per_line')
config_stub.val.content.host_blocking.lists = None
config_stub.val.content.host_blocking.enabled = True
config_stub.val.content.host_blocking.whitelist = None
host_blocker = host_blocker_factory()
host_blocker.read_hosts()
for str_url in URLS_TO_CHECK:
assert not host_blocker._is_blocked(QUrl(str_url))
def test_config_change(config_stub, tmpdir, host_blocker_factory):
"""Ensure blocked-hosts resets if host-block-list is changed to None."""
filtered_blocked_hosts = BLOCKLIST_HOSTS[1:] # Exclude localhost
blocklist = blocklist_to_url(create_blocklist(
tmpdir, blocked_hosts=filtered_blocked_hosts, name='blocked-hosts',
line_format='one_per_line'))
config_stub.val.content.host_blocking.lists = [blocklist.toString()]
config_stub.val.content.host_blocking.enabled = True
config_stub.val.content.host_blocking.whitelist = None
host_blocker = host_blocker_factory()
host_blocker.read_hosts()
config_stub.val.content.host_blocking.lists = None
host_blocker.read_hosts()
for str_url in URLS_TO_CHECK:
assert not host_blocker._is_blocked(QUrl(str_url))
def test_add_directory(config_stub, tmpdir, host_blocker_factory):
"""Ensure adblocker can import all files in a directory."""
blocklist_hosts2 = []
for i in BLOCKLIST_HOSTS[1:]:
blocklist_hosts2.append('1' + i)
create_blocklist(tmpdir, blocked_hosts=BLOCKLIST_HOSTS,
name='blocked-hosts', line_format='one_per_line')
create_blocklist(tmpdir, blocked_hosts=blocklist_hosts2,
name='blocked-hosts2', line_format='one_per_line')
config_stub.val.content.host_blocking.lists = [tmpdir.strpath]
config_stub.val.content.host_blocking.enabled = True
host_blocker = host_blocker_factory()
host_blocker.adblock_update()
assert len(host_blocker._blocked_hosts) == len(blocklist_hosts2) * 2
def test_adblock_benchmark(data_tmpdir, benchmark, host_blocker_factory):
blocked_hosts = data_tmpdir / 'blocked-hosts'
blocked_hosts.write_text('\n'.join(utils.blocked_hosts()),
encoding='utf-8')
url = QUrl('https://www.example.org/')
blocker = host_blocker_factory()
blocker.read_hosts()
assert blocker._blocked_hosts
benchmark(lambda: blocker._is_blocked(url))
|
import fnmatch
from functools import partial
import ipaddress
import logging
import socket
import voluptuous as vol
from zeroconf import (
DNSPointer,
DNSRecord,
Error as ZeroconfError,
InterfaceChoice,
IPVersion,
NonUniqueNameException,
ServiceBrowser,
ServiceInfo,
ServiceStateChange,
Zeroconf,
)
from homeassistant import util
from homeassistant.const import (
ATTR_NAME,
EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STARTED,
EVENT_HOMEASSISTANT_STOP,
__version__,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.network import NoURLAvailableError, get_url
from homeassistant.helpers.singleton import singleton
from homeassistant.loader import async_get_homekit, async_get_zeroconf
from .usage import install_multiple_zeroconf_catcher
_LOGGER = logging.getLogger(__name__)
DOMAIN = "zeroconf"
ATTR_HOST = "host"
ATTR_PORT = "port"
ATTR_HOSTNAME = "hostname"
ATTR_TYPE = "type"
ATTR_PROPERTIES = "properties"
ZEROCONF_TYPE = "_home-assistant._tcp.local."
HOMEKIT_TYPE = "_hap._tcp.local."
CONF_DEFAULT_INTERFACE = "default_interface"
CONF_IPV6 = "ipv6"
DEFAULT_DEFAULT_INTERFACE = False
DEFAULT_IPV6 = True
HOMEKIT_PROPERTIES = "properties"
HOMEKIT_PAIRED_STATUS_FLAG = "sf"
HOMEKIT_MODEL = "md"
# Property key=value has a max length of 255
# so we use 230 to leave space for key=
MAX_PROPERTY_VALUE_LEN = 230
# Dns label max length
MAX_NAME_LEN = 63
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(
CONF_DEFAULT_INTERFACE, default=DEFAULT_DEFAULT_INTERFACE
): cv.boolean,
vol.Optional(CONF_IPV6, default=DEFAULT_IPV6): cv.boolean,
}
)
},
extra=vol.ALLOW_EXTRA,
)
@singleton(DOMAIN)
async def async_get_instance(hass):
"""Zeroconf instance to be shared with other integrations that use it."""
return await _async_get_instance(hass)
async def _async_get_instance(hass, **zcargs):
logging.getLogger("zeroconf").setLevel(logging.NOTSET)
zeroconf = await hass.async_add_executor_job(partial(HaZeroconf, **zcargs))
install_multiple_zeroconf_catcher(zeroconf)
def _stop_zeroconf(_):
"""Stop Zeroconf."""
zeroconf.ha_close()
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _stop_zeroconf)
return zeroconf
class HaServiceBrowser(ServiceBrowser):
"""ServiceBrowser that only consumes DNSPointer records."""
def update_record(self, zc: "Zeroconf", now: float, record: DNSRecord) -> None:
"""Pre-Filter update_record to DNSPointers for the configured type."""
#
# Each ServerBrowser currently runs in its own thread which
# processes every A or AAAA record update per instance.
#
# As the list of zeroconf names we watch for grows, each additional
# ServiceBrowser would process all the A and AAAA updates on the network.
#
# To avoid overwhemling the system we pre-filter here and only process
# DNSPointers for the configured record name (type)
#
if record.name not in self.types or not isinstance(record, DNSPointer):
return
super().update_record(zc, now, record)
class HaZeroconf(Zeroconf):
"""Zeroconf that cannot be closed."""
def close(self):
"""Fake method to avoid integrations closing it."""
ha_close = Zeroconf.close
async def async_setup(hass, config):
"""Set up Zeroconf and make Home Assistant discoverable."""
zc_config = config.get(DOMAIN, {})
zc_args = {}
if zc_config.get(CONF_DEFAULT_INTERFACE, DEFAULT_DEFAULT_INTERFACE):
zc_args["interfaces"] = InterfaceChoice.Default
if not zc_config.get(CONF_IPV6, DEFAULT_IPV6):
zc_args["ip_version"] = IPVersion.V4Only
zeroconf = hass.data[DOMAIN] = await _async_get_instance(hass, **zc_args)
async def _async_zeroconf_hass_start(_event):
"""Expose Home Assistant on zeroconf when it starts.
Wait till started or otherwise HTTP is not up and running.
"""
uuid = await hass.helpers.instance_id.async_get()
await hass.async_add_executor_job(
_register_hass_zc_service, hass, zeroconf, uuid
)
async def _async_zeroconf_hass_started(_event):
"""Start the service browser."""
await _async_start_zeroconf_browser(hass, zeroconf)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, _async_zeroconf_hass_start)
hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_STARTED, _async_zeroconf_hass_started
)
return True
def _register_hass_zc_service(hass, zeroconf, uuid):
# Get instance UUID
valid_location_name = _truncate_location_name_to_valid(hass.config.location_name)
params = {
"location_name": valid_location_name,
"uuid": uuid,
"version": __version__,
"external_url": "",
"internal_url": "",
# Old base URL, for backward compatibility
"base_url": "",
# Always needs authentication
"requires_api_password": True,
}
# Get instance URL's
try:
params["external_url"] = get_url(hass, allow_internal=False)
except NoURLAvailableError:
pass
try:
params["internal_url"] = get_url(hass, allow_external=False)
except NoURLAvailableError:
pass
# Set old base URL based on external or internal
params["base_url"] = params["external_url"] or params["internal_url"]
host_ip = util.get_local_ip()
try:
host_ip_pton = socket.inet_pton(socket.AF_INET, host_ip)
except OSError:
host_ip_pton = socket.inet_pton(socket.AF_INET6, host_ip)
_suppress_invalid_properties(params)
info = ServiceInfo(
ZEROCONF_TYPE,
name=f"{valid_location_name}.{ZEROCONF_TYPE}",
server=f"{uuid}.local.",
addresses=[host_ip_pton],
port=hass.http.server_port,
properties=params,
)
_LOGGER.info("Starting Zeroconf broadcast")
try:
zeroconf.register_service(info)
except NonUniqueNameException:
_LOGGER.error(
"Home Assistant instance with identical name present in the local network"
)
async def _async_start_zeroconf_browser(hass, zeroconf):
"""Start the zeroconf browser."""
zeroconf_types = await async_get_zeroconf(hass)
homekit_models = await async_get_homekit(hass)
types = list(zeroconf_types)
if HOMEKIT_TYPE not in zeroconf_types:
types.append(HOMEKIT_TYPE)
def service_update(zeroconf, service_type, name, state_change):
"""Service state changed."""
nonlocal zeroconf_types
nonlocal homekit_models
if state_change != ServiceStateChange.Added:
return
try:
service_info = zeroconf.get_service_info(service_type, name)
except ZeroconfError:
_LOGGER.exception("Failed to get info for device %s", name)
return
if not service_info:
# Prevent the browser thread from collapsing as
# service_info can be None
_LOGGER.debug("Failed to get info for device %s", name)
return
info = info_from_service(service_info)
if not info:
# Prevent the browser thread from collapsing
_LOGGER.debug("Failed to get addresses for device %s", name)
return
_LOGGER.debug("Discovered new device %s %s", name, info)
# If we can handle it as a HomeKit discovery, we do that here.
if service_type == HOMEKIT_TYPE:
discovery_was_forwarded = handle_homekit(hass, homekit_models, info)
# Continue on here as homekit_controller
# still needs to get updates on devices
# so it can see when the 'c#' field is updated.
#
# We only send updates to homekit_controller
# if the device is already paired in order to avoid
# offering a second discovery for the same device
if (
discovery_was_forwarded
and HOMEKIT_PROPERTIES in info
and HOMEKIT_PAIRED_STATUS_FLAG in info[HOMEKIT_PROPERTIES]
):
try:
# 0 means paired and not discoverable by iOS clients)
if int(info[HOMEKIT_PROPERTIES][HOMEKIT_PAIRED_STATUS_FLAG]):
return
except ValueError:
# HomeKit pairing status unknown
# likely bad homekit data
return
for entry in zeroconf_types[service_type]:
if len(entry) > 1:
if "macaddress" in entry:
if "properties" not in info:
continue
if "macaddress" not in info["properties"]:
continue
if not fnmatch.fnmatch(
info["properties"]["macaddress"], entry["macaddress"]
):
continue
if "name" in entry:
if "name" not in info:
continue
if not fnmatch.fnmatch(info["name"], entry["name"]):
continue
hass.add_job(
hass.config_entries.flow.async_init(
entry["domain"], context={"source": DOMAIN}, data=info
)
)
_LOGGER.debug("Starting Zeroconf browser")
HaServiceBrowser(zeroconf, types, handlers=[service_update])
def handle_homekit(hass, homekit_models, info) -> bool:
"""Handle a HomeKit discovery.
Return if discovery was forwarded.
"""
model = None
props = info.get(HOMEKIT_PROPERTIES, {})
for key in props:
if key.lower() == HOMEKIT_MODEL:
model = props[key]
break
if model is None:
return False
for test_model in homekit_models:
if (
model != test_model
and not model.startswith(f"{test_model} ")
and not model.startswith(f"{test_model}-")
):
continue
hass.add_job(
hass.config_entries.flow.async_init(
homekit_models[test_model], context={"source": "homekit"}, data=info
)
)
return True
return False
def info_from_service(service):
"""Return prepared info from mDNS entries."""
properties = {"_raw": {}}
for key, value in service.properties.items():
# See https://ietf.org/rfc/rfc6763.html#section-6.4 and
# https://ietf.org/rfc/rfc6763.html#section-6.5 for expected encodings
# for property keys and values
try:
key = key.decode("ascii")
except UnicodeDecodeError:
_LOGGER.debug(
"Ignoring invalid key provided by [%s]: %s", service.name, key
)
continue
properties["_raw"][key] = value
try:
if isinstance(value, bytes):
properties[key] = value.decode("utf-8")
except UnicodeDecodeError:
pass
if not service.addresses:
return None
address = service.addresses[0]
info = {
ATTR_HOST: str(ipaddress.ip_address(address)),
ATTR_PORT: service.port,
ATTR_HOSTNAME: service.server,
ATTR_TYPE: service.type,
ATTR_NAME: service.name,
ATTR_PROPERTIES: properties,
}
return info
def _suppress_invalid_properties(properties):
"""Suppress any properties that will cause zeroconf to fail to startup."""
for prop, prop_value in properties.items():
if not isinstance(prop_value, str):
continue
if len(prop_value.encode("utf-8")) > MAX_PROPERTY_VALUE_LEN:
_LOGGER.error(
"The property '%s' was suppressed because it is longer than the maximum length of %d bytes: %s",
prop,
MAX_PROPERTY_VALUE_LEN,
prop_value,
)
properties[prop] = ""
def _truncate_location_name_to_valid(location_name):
"""Truncate or return the location name usable for zeroconf."""
if len(location_name.encode("utf-8")) < MAX_NAME_LEN:
return location_name
_LOGGER.warning(
"The location name was truncated because it is longer than the maximum length of %d bytes: %s",
MAX_NAME_LEN,
location_name,
)
return location_name.encode("utf-8")[:MAX_NAME_LEN].decode("utf-8", "ignore")
|
from django.utils.translation import pgettext_lazy
from weblate_language_data import languages
from weblate_language_data.ambiguous import AMBIGUOUS
NO_CODE_LANGUAGES = {lang[0] for lang in languages.LANGUAGES}
UNDERSCORE_EXCEPTIONS = {"nb_NO", "zh_Hant", "zh_Hans", "be_Latn", "ro_MD", "pt_BR"}
AT_EXCEPTIONS = {"ca@valencia"}
def is_basic(code):
if code in AMBIGUOUS:
return False
if "_" in code:
return code in UNDERSCORE_EXCEPTIONS
return "@" not in code or code in AT_EXCEPTIONS
BASIC_LANGUAGES = {lang for lang in NO_CODE_LANGUAGES if is_basic(lang)}
# Following variables are used to map Gettext plural formulas
# to one/few/may/other like rules
ONE_OTHER_PLURALS = (
"n==1 || n%10==1 ? 0 : 1",
"n != 1",
"(n != 1)",
"n > 1",
"(n > 1)",
"n >= 2 && (n < 11 || n > 99)",
"n % 10 != 1 || n % 100 == 11",
"(n % 10 == 1 && n % 100 != 11) ? 0 : 1",
"n != 1 && n != 2 && n != 3 && (n % 10 == 4 || n % 10 == 6 || n % 10 == 9)",
"(n==0 || n==1)",
)
TWO_OTHER_PLURALS = ("(n==2) ? 1 : 0",)
ONE_FEW_OTHER_PLURALS = (
"n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2",
"(n==1) ? 0 : (n>=2 && n<=4) ? 1 : 2",
"n==1 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2",
"n%10==1 && n%100!=11 ? 0 : n%10>=2 && (n%100<10 || n%100>=20) ? 1 : 2",
"n==1 ? 0 : (n==0 || (n%100 > 0 && n%100 < 20)) ? 1 : 2",
"(n == 1) ? 0 : ((n == 0 || n != 1 && n % 100 >= 1 && n % 100 <= 19) ? 1 : 2)",
"(n == 0 || n == 1) ? 0 : ((n >= 2 && n <= 10) ? 1 : 2)",
"(n % 10 == 1 && (n % 100 < 11 || n % 100 > 19)) ? 0 : ((n % 10 >= 2 && n % 10 <= 9 && (n % 100 < 11 || n % 100 > 19)) ? 1 : 2)",
"(n%10==1 && n%100!=11 ? 0 : n%10>=2 && (n%100<10 || n%100>=20) ? 1 : 2)",
"(n == 1) ? 0 : ((n == 0 || n % 100 >= 2 && n % 100 <= 19) ? 1 : 2)",
"(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2)",
)
ZERO_ONE_OTHER_PLURALS = (
"n==0 ? 0 : n==1 ? 1 : 2",
"(n == 0) ? 0 : ((n == 1) ? 1 : 2)",
"(n % 10 == 0 || n % 100 >= 11 && n % 100 <= 19) ? 0 : ((n % 10 == 1 && n % 100 != 11) ? 1 : 2)",
)
ONE_TWO_OTHER_PLURALS = ("n==1 ? 0 : n==2 ? 1 : 2", "(n == 1) ? 0 : ((n == 2) ? 1 : 2)")
ONE_OTHER_TWO_PLURALS = ("n==1 ? 0 : n==2 ? 2 : 1",)
ONE_TWO_THREE_OTHER_PLURALS = ("(n==1) ? 0 : (n==2) ? 1 : (n == 3) ? 2 : 3",)
ONE_TWO_FEW_OTHER_PLURALS = (
"(n==1 || n==11) ? 0 : (n==2 || n==12) ? 1 : (n > 2 && n < 20) ? 2 : 3",
"n%100==1 ? 0 : n%100==2 ? 1 : n%100==3 || n%100==4 ? 2 : 3",
"(n % 10 == 1) ? 0 : ((n % 10 == 2) ? 1 : ((n % 100 == 0 || n % 100 == 20 || n % 100 == 40 || n % 100 == 60 || n % 100 == 80) ? 2 : 3))",
"(n % 100 == 1) ? 0 : ((n % 100 == 2) ? 1 : ((n % 100 == 3 || n % 100 == 4) ? 2 : 3))",
"(n == 1) ? 0 : ((n == 2) ? 1 : ((n > 10 && n % 10 == 0) ? 2 : 3))",
"(n == 1) ? 0 : ((n == 2) ? 1 : ((n == 10) ? 2 : 3))",
"(n==1) ? 0 : (n==2) ? 1 : (n != 8 && n != 11) ? 2 : 3",
)
OTHER_ONE_TWO_FEW_PLURALS = (
"(n%100==1 ? 1 : n%100==2 ? 2 : n%100==3 || n%100==4 ? 3 : 0)",
)
ONE_TWO_FEW_MANY_OTHER_PLURALS = (
"n==1 ? 0 : n==2 ? 1 : n<7 ? 2 : n<11 ? 3 : 4",
"n==1 ? 0 : n==2 ? 1 : (n>2 && n<7) ? 2 :(n>6 && n<11) ? 3 : 4",
"(n % 10 == 1 && n % 100 != 11 && n % 100 != 71 && n % 100 != 91) ? 0 : ((n % 10 == 2 && n % 100 != 12 && n % 100 != 72 && n % 100 != 92) ? 1 : ((((n % 10 == 3 || n % 10 == 4) || n % 10 == 9) && (n % 100 < 10 || n % 100 > 19) && (n % 100 < 70 || n % 100 > 79) && (n % 100 < 90 || n % 100 > 99)) ? 2 : ((n != 0 && n % 1000000 == 0) ? 3 : 4)))",
)
ONE_FEW_MANY_OTHER_PLURALS = (
"n==1 ? 0 : n==0 || ( n%100>1 && n%100<11) ? 1 : (n%100>10 && n%100<20 ) ? 2 : 3",
"n==1 ? 0 : n%10>=2 && (n%100<10 || n%100>=20) ? 1 : n%10==0 || (n%100>10 && n%100<20) ? 2 : 3",
"n==1 ? 3 : n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2",
)
ONE_OTHER_ZERO_PLURALS = ("n%10==1 && n%100!=11 ? 0 : n != 0 ? 1 : 2",)
ZERO_ONE_TWO_FEW_MANY_OTHER = (
"(n==0) ? 0 : (n==1) ? 1 : (n==2) ? 2 : (n==3) ? 3 :(n==6) ? 4 : 5",
"(n == 0) ? 0 : ((n == 1) ? 1 : ((n == 2) ? 2 : ((n % 100 >= 3 && n % 100 <= 10) ? 3 : ((n % 100 >= 11 && n % 100 <= 99) ? 4 : 5))))",
"(n == 0) ? 0 : ((n == 1) ? 1 : (((n % 100 == 2 || n % 100 == 22 || n % 100 == 42 || n % 100 == 62 || n % 100 == 82) || n % 1000 == 0 && (n % 100000 >= 1000 && n % 100000 <= 20000 || n % 100000 == 40000 || n % 100000 == 60000 || n % 100000 == 80000) || n != 0 && n % 1000000 == 100000) ? 2 : ((n % 100 == 3 || n % 100 == 23 || n % 100 == 43 || n % 100 == 63 || n % 100 == 83) ? 3 : ((n != 1 && (n % 100 == 1 || n % 100 == 21 || n % 100 == 41 || n % 100 == 61 || n % 100 == 81)) ? 4 : 5))))",
)
# Plural types definition
PLURAL_NONE = 0
PLURAL_ONE_OTHER = 1
PLURAL_ONE_FEW_OTHER = 2
PLURAL_ARABIC = 3
PLURAL_ONE_TWO_OTHER = 4
PLURAL_ONE_TWO_THREE_OTHER = 5
PLURAL_ONE_TWO_FEW_OTHER = 6
PLURAL_ONE_OTHER_ZERO = 7
PLURAL_ONE_FEW_MANY_OTHER = 8
PLURAL_TWO_OTHER = 9
PLURAL_ONE_TWO_FEW_MANY_OTHER = 10
PLURAL_ZERO_ONE_OTHER = 11
PLURAL_ZERO_ONE_TWO_FEW_MANY_OTHER = 12
PLURAL_OTHER_ONE_TWO_FEW = 13
PLURAL_ONE_OTHER_TWO = 14
PLURAL_UNKNOWN = 666
def nospace_set(source):
return {item.replace(" ", "") for item in source}
# Plural formula - type mappings
PLURAL_MAPPINGS = (
(nospace_set(ONE_OTHER_PLURALS), PLURAL_ONE_OTHER),
(nospace_set(ONE_FEW_OTHER_PLURALS), PLURAL_ONE_FEW_OTHER),
(nospace_set(ONE_TWO_OTHER_PLURALS), PLURAL_ONE_TWO_OTHER),
(nospace_set(ONE_OTHER_TWO_PLURALS), PLURAL_ONE_OTHER_TWO),
(nospace_set(ZERO_ONE_OTHER_PLURALS), PLURAL_ZERO_ONE_OTHER),
(nospace_set(ONE_TWO_FEW_OTHER_PLURALS), PLURAL_ONE_TWO_FEW_OTHER),
(nospace_set(OTHER_ONE_TWO_FEW_PLURALS), PLURAL_OTHER_ONE_TWO_FEW),
(nospace_set(ONE_TWO_THREE_OTHER_PLURALS), PLURAL_ONE_TWO_THREE_OTHER),
(nospace_set(ONE_OTHER_ZERO_PLURALS), PLURAL_ONE_OTHER_ZERO),
(nospace_set(ONE_FEW_MANY_OTHER_PLURALS), PLURAL_ONE_FEW_MANY_OTHER),
(nospace_set(TWO_OTHER_PLURALS), PLURAL_TWO_OTHER),
(nospace_set(ONE_TWO_FEW_MANY_OTHER_PLURALS), PLURAL_ONE_TWO_FEW_MANY_OTHER),
(nospace_set(ZERO_ONE_TWO_FEW_MANY_OTHER), PLURAL_ZERO_ONE_TWO_FEW_MANY_OTHER),
)
# Plural names mapping
PLURAL_NAMES = {
PLURAL_NONE: ("",),
PLURAL_ONE_OTHER: (
pgettext_lazy("Plural form description", "Singular"),
pgettext_lazy("Plural form description", "Plural"),
),
PLURAL_ONE_FEW_OTHER: (
pgettext_lazy("Plural form description", "One"),
pgettext_lazy("Plural form description", "Few"),
pgettext_lazy("Plural form description", "Other"),
),
PLURAL_ARABIC: (
pgettext_lazy("Plural form description", "Zero"),
pgettext_lazy("Plural form description", "One"),
pgettext_lazy("Plural form description", "Two"),
pgettext_lazy("Plural form description", "Few"),
pgettext_lazy("Plural form description", "Many"),
pgettext_lazy("Plural form description", "Other"),
),
PLURAL_ZERO_ONE_OTHER: (
pgettext_lazy("Plural form description", "Zero"),
pgettext_lazy("Plural form description", "One"),
pgettext_lazy("Plural form description", "Other"),
),
PLURAL_ONE_TWO_OTHER: (
pgettext_lazy("Plural form description", "One"),
pgettext_lazy("Plural form description", "Two"),
pgettext_lazy("Plural form description", "Other"),
),
PLURAL_ONE_OTHER_TWO: (
pgettext_lazy("Plural form description", "One"),
pgettext_lazy("Plural form description", "Other"),
pgettext_lazy("Plural form description", "Two"),
),
PLURAL_ONE_TWO_THREE_OTHER: (
pgettext_lazy("Plural form description", "One"),
pgettext_lazy("Plural form description", "Two"),
pgettext_lazy("Plural form description", "Three"),
pgettext_lazy("Plural form description", "Other"),
),
PLURAL_ONE_TWO_FEW_OTHER: (
pgettext_lazy("Plural form description", "One"),
pgettext_lazy("Plural form description", "Two"),
pgettext_lazy("Plural form description", "Few"),
pgettext_lazy("Plural form description", "Other"),
),
PLURAL_OTHER_ONE_TWO_FEW: (
pgettext_lazy("Plural form description", "Other"),
pgettext_lazy("Plural form description", "One"),
pgettext_lazy("Plural form description", "Two"),
pgettext_lazy("Plural form description", "Few"),
),
PLURAL_ONE_OTHER_ZERO: (
pgettext_lazy("Plural form description", "One"),
pgettext_lazy("Plural form description", "Other"),
pgettext_lazy("Plural form description", "Zero"),
),
PLURAL_ONE_FEW_MANY_OTHER: (
pgettext_lazy("Plural form description", "One"),
pgettext_lazy("Plural form description", "Few"),
pgettext_lazy("Plural form description", "Many"),
pgettext_lazy("Plural form description", "Other"),
),
PLURAL_ONE_TWO_FEW_MANY_OTHER: (
pgettext_lazy("Plural form description", "One"),
pgettext_lazy("Plural form description", "Two"),
pgettext_lazy("Plural form description", "Few"),
pgettext_lazy("Plural form description", "Many"),
pgettext_lazy("Plural form description", "Other"),
),
PLURAL_TWO_OTHER: (
pgettext_lazy("Plural form description", "Two"),
pgettext_lazy("Plural form description", "Other"),
),
PLURAL_ZERO_ONE_TWO_FEW_MANY_OTHER: (
pgettext_lazy("Plural form description", "Zero"),
pgettext_lazy("Plural form description", "One"),
pgettext_lazy("Plural form description", "Two"),
pgettext_lazy("Plural form description", "Few"),
pgettext_lazy("Plural form description", "Many"),
pgettext_lazy("Plural form description", "Other"),
),
}
|
from perfkitbenchmarker import linux_packages
DOCKER_RPM_URL = ('https://get.docker.com/rpm/1.7.0/centos-6/'
'RPMS/x86_64/docker-engine-1.7.0-1.el6.x86_64.rpm')
# Docker images that VMs are allowed to install.
_IMAGES = [
'cloudsuite/data-caching:client',
'cloudsuite/data-caching:server',
'cloudsuite/data-serving:client',
'cloudsuite/data-serving:server',
'cloudsuite/graph-analytics',
'cloudsuite/in-memory-analytics',
'cloudsuite/media-streaming:client',
'cloudsuite/media-streaming:dataset',
'cloudsuite/media-streaming:server',
'cloudsuite/movielens-dataset',
'cloudsuite/spark',
'cloudsuite/twitter-dataset-graph',
'cloudsuite/web-search:client',
'cloudsuite/web-search:server',
'cloudsuite/web-serving:db_server',
'cloudsuite/web-serving:faban_client',
'cloudsuite/web-serving:memcached_server',
'cloudsuite/web-serving:web_server',
]
class _DockerImagePackage(object):
"""Facsimile of a perfkitbenchmarker.linux_packages.<name> package."""
def __init__(self, name):
"""Creates a vm-installable package from a docker image."""
self.name = name
self.__name__ = name
def Install(self, vm):
"""Installs the docker image for self.name on the VM."""
vm.Install('docker')
vm.RemoteCommand('sudo docker pull {}'.format(self.name))
def Uninstall(self, vm):
"""Removes the docker image for self.name from the VM."""
vm.RemoteCommand('sudo docker rmi {}'.format(self.name))
def CreateImagePackages():
"""Creates _DockerImagePackage objects."""
return [(name, _DockerImagePackage(name)) for name in _IMAGES]
def YumInstall(vm):
"""Installs the docker package on the VM."""
vm.RemoteHostCommand('curl -o %s/docker.rpm -sSL %s' %
(linux_packages.INSTALL_DIR, DOCKER_RPM_URL))
vm.RemoteHostCommand('sudo yum localinstall '
'--nogpgcheck %s/docker.rpm -y' %
linux_packages.INSTALL_DIR)
vm.RemoteHostCommand('sudo service docker start')
def AptInstall(vm):
"""Installs the docker package on the VM."""
vm.RemoteHostCommand('curl -sSL https://get.docker.com/ | sh')
def IsInstalled(vm):
"""Checks whether docker is installed on the VM."""
resp, _ = vm.RemoteCommand('command -v docker',
ignore_failure=True,
suppress_warning=True)
return bool(resp.rstrip())
|
from collections import OrderedDict
import logging
from life360 import Life360Error, LoginError
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from .const import CONF_AUTHORIZATION, DOMAIN
from .helpers import get_api
_LOGGER = logging.getLogger(__name__)
DOCS_URL = "https://www.home-assistant.io/integrations/life360"
@config_entries.HANDLERS.register(DOMAIN)
class Life360ConfigFlow(config_entries.ConfigFlow):
"""Life360 integration config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
def __init__(self):
"""Initialize."""
self._api = get_api()
self._username = vol.UNDEFINED
self._password = vol.UNDEFINED
@property
def configured_usernames(self):
"""Return tuple of configured usernames."""
entries = self.hass.config_entries.async_entries(DOMAIN)
if entries:
return (entry.data[CONF_USERNAME] for entry in entries)
return ()
async def async_step_user(self, user_input=None):
"""Handle a user initiated config flow."""
errors = {}
if user_input is not None:
self._username = user_input[CONF_USERNAME]
self._password = user_input[CONF_PASSWORD]
try:
# pylint: disable=no-value-for-parameter
vol.Email()(self._username)
authorization = await self.hass.async_add_executor_job(
self._api.get_authorization, self._username, self._password
)
except vol.Invalid:
errors[CONF_USERNAME] = "invalid_username"
except LoginError:
errors["base"] = "invalid_auth"
except Life360Error as error:
_LOGGER.error(
"Unexpected error communicating with Life360 server: %s", error
)
errors["base"] = "unknown"
else:
if self._username in self.configured_usernames:
errors["base"] = "already_configured"
else:
return self.async_create_entry(
title=self._username,
data={
CONF_USERNAME: self._username,
CONF_PASSWORD: self._password,
CONF_AUTHORIZATION: authorization,
},
description_placeholders={"docs_url": DOCS_URL},
)
data_schema = OrderedDict()
data_schema[vol.Required(CONF_USERNAME, default=self._username)] = str
data_schema[vol.Required(CONF_PASSWORD, default=self._password)] = str
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(data_schema),
errors=errors,
description_placeholders={"docs_url": DOCS_URL},
)
async def async_step_import(self, user_input):
"""Import a config flow from configuration."""
username = user_input[CONF_USERNAME]
password = user_input[CONF_PASSWORD]
try:
authorization = await self.hass.async_add_executor_job(
self._api.get_authorization, username, password
)
except LoginError:
_LOGGER.error("Invalid credentials for %s", username)
return self.async_abort(reason="invalid_auth")
except Life360Error as error:
_LOGGER.error(
"Unexpected error communicating with Life360 server: %s", error
)
return self.async_abort(reason="unknown")
return self.async_create_entry(
title=f"{username} (from configuration)",
data={
CONF_USERNAME: username,
CONF_PASSWORD: password,
CONF_AUTHORIZATION: authorization,
},
)
|
import logging
import radiotherm
import voluptuous as vol
from homeassistant.components.climate import PLATFORM_SCHEMA, ClimateEntity
from homeassistant.components.climate.const import (
CURRENT_HVAC_COOL,
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
FAN_OFF,
FAN_ON,
HVAC_MODE_AUTO,
HVAC_MODE_COOL,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
PRESET_AWAY,
PRESET_HOME,
SUPPORT_FAN_MODE,
SUPPORT_PRESET_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.const import (
ATTR_TEMPERATURE,
CONF_HOST,
PRECISION_HALVES,
STATE_ON,
TEMP_FAHRENHEIT,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.util import dt as dt_util
_LOGGER = logging.getLogger(__name__)
ATTR_FAN_ACTION = "fan_action"
CONF_HOLD_TEMP = "hold_temp"
PRESET_HOLIDAY = "holiday"
PRESET_ALTERNATE = "alternate"
STATE_CIRCULATE = "circulate"
PRESET_MODES = [PRESET_HOME, PRESET_ALTERNATE, PRESET_AWAY, PRESET_HOLIDAY]
OPERATION_LIST = [HVAC_MODE_AUTO, HVAC_MODE_COOL, HVAC_MODE_HEAT, HVAC_MODE_OFF]
CT30_FAN_OPERATION_LIST = [STATE_ON, HVAC_MODE_AUTO]
CT80_FAN_OPERATION_LIST = [STATE_ON, STATE_CIRCULATE, HVAC_MODE_AUTO]
# Mappings from radiotherm json data codes to and from Home Assistant state
# flags. CODE is the thermostat integer code and these map to and
# from Home Assistant state flags.
# Programmed temperature mode of the thermostat.
CODE_TO_TEMP_MODE = {
0: HVAC_MODE_OFF,
1: HVAC_MODE_HEAT,
2: HVAC_MODE_COOL,
3: HVAC_MODE_AUTO,
}
TEMP_MODE_TO_CODE = {v: k for k, v in CODE_TO_TEMP_MODE.items()}
# Programmed fan mode (circulate is supported by CT80 models)
CODE_TO_FAN_MODE = {0: HVAC_MODE_AUTO, 1: STATE_CIRCULATE, 2: STATE_ON}
FAN_MODE_TO_CODE = {v: k for k, v in CODE_TO_FAN_MODE.items()}
# Active thermostat state (is it heating or cooling?). In the future
# this should probably made into heat and cool binary sensors.
CODE_TO_TEMP_STATE = {0: CURRENT_HVAC_IDLE, 1: CURRENT_HVAC_HEAT, 2: CURRENT_HVAC_COOL}
# Active fan state. This is if the fan is actually on or not. In the
# future this should probably made into a binary sensor for the fan.
CODE_TO_FAN_STATE = {0: FAN_OFF, 1: FAN_ON}
PRESET_MODE_TO_CODE = {"home": 0, "alternate": 1, "away": 2, "holiday": 3}
CODE_TO_PRESET_MODE = {0: "home", 1: "alternate", 2: "away", 3: "holiday"}
def round_temp(temperature):
"""Round a temperature to the resolution of the thermostat.
RadioThermostats can handle 0.5 degree temps so the input
temperature is rounded to that value and returned.
"""
return round(temperature * 2.0) / 2.0
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_HOST): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_HOLD_TEMP, default=False): cv.boolean,
}
)
SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE | SUPPORT_FAN_MODE | SUPPORT_PRESET_MODE
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Radio Thermostat."""
hosts = []
if CONF_HOST in config:
hosts = config[CONF_HOST]
else:
hosts.append(radiotherm.discover.discover_address())
if hosts is None:
_LOGGER.error("No Radiotherm Thermostats detected")
return False
hold_temp = config.get(CONF_HOLD_TEMP)
tstats = []
for host in hosts:
try:
tstat = radiotherm.get_thermostat(host)
tstats.append(RadioThermostat(tstat, hold_temp))
except OSError:
_LOGGER.exception("Unable to connect to Radio Thermostat: %s", host)
add_entities(tstats, True)
class RadioThermostat(ClimateEntity):
"""Representation of a Radio Thermostat."""
def __init__(self, device, hold_temp):
"""Initialize the thermostat."""
self.device = device
self._target_temperature = None
self._current_temperature = None
self._current_humidity = None
self._current_operation = HVAC_MODE_OFF
self._name = None
self._fmode = None
self._fstate = None
self._tmode = None
self._tstate = None
self._hold_temp = hold_temp
self._hold_set = False
self._prev_temp = None
self._preset_mode = None
self._program_mode = None
self._is_away = False
# Fan circulate mode is only supported by the CT80 models.
self._is_model_ct80 = isinstance(self.device, radiotherm.thermostat.CT80)
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_FLAGS
async def async_added_to_hass(self):
"""Register callbacks."""
# Set the time on the device. This shouldn't be in the
# constructor because it's a network call. We can't put it in
# update() because calling it will clear any temporary mode or
# temperature in the thermostat. So add it as a future job
# for the event loop to run.
self.hass.async_add_job(self.set_time)
@property
def name(self):
"""Return the name of the Radio Thermostat."""
return self._name
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return TEMP_FAHRENHEIT
@property
def precision(self):
"""Return the precision of the system."""
return PRECISION_HALVES
@property
def device_state_attributes(self):
"""Return the device specific state attributes."""
return {ATTR_FAN_ACTION: self._fstate}
@property
def fan_modes(self):
"""List of available fan modes."""
if self._is_model_ct80:
return CT80_FAN_OPERATION_LIST
return CT30_FAN_OPERATION_LIST
@property
def fan_mode(self):
"""Return whether the fan is on."""
return self._fmode
def set_fan_mode(self, fan_mode):
"""Turn fan on/off."""
code = FAN_MODE_TO_CODE.get(fan_mode)
if code is not None:
self.device.fmode = code
@property
def current_temperature(self):
"""Return the current temperature."""
return self._current_temperature
@property
def current_humidity(self):
"""Return the current temperature."""
return self._current_humidity
@property
def hvac_mode(self):
"""Return the current operation. head, cool idle."""
return self._current_operation
@property
def hvac_modes(self):
"""Return the operation modes list."""
return OPERATION_LIST
@property
def hvac_action(self):
"""Return the current running hvac operation if supported."""
if self.hvac_mode == HVAC_MODE_OFF:
return None
return self._tstate
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self._target_temperature
@property
def preset_mode(self):
"""Return the current preset mode, e.g., home, away, temp."""
if self._program_mode == 0:
return PRESET_HOME
if self._program_mode == 1:
return PRESET_ALTERNATE
if self._program_mode == 2:
return PRESET_AWAY
if self._program_mode == 3:
return PRESET_HOLIDAY
@property
def preset_modes(self):
"""Return a list of available preset modes."""
return PRESET_MODES
def update(self):
"""Update and validate the data from the thermostat."""
# Radio thermostats are very slow, and sometimes don't respond
# very quickly. So we need to keep the number of calls to them
# to a bare minimum or we'll hit the Home Assistant 10 sec warning. We
# have to make one call to /tstat to get temps but we'll try and
# keep the other calls to a minimum. Even with this, these
# thermostats tend to time out sometimes when they're actively
# heating or cooling.
# First time - get the name from the thermostat. This is
# normally set in the radio thermostat web app.
if self._name is None:
self._name = self.device.name["raw"]
# Request the current state from the thermostat.
try:
data = self.device.tstat["raw"]
except radiotherm.validate.RadiothermTstatError:
_LOGGER.warning(
"%s (%s) was busy (invalid value returned)",
self._name,
self.device.host,
)
return
current_temp = data["temp"]
if self._is_model_ct80:
try:
humiditydata = self.device.humidity["raw"]
except radiotherm.validate.RadiothermTstatError:
_LOGGER.warning(
"%s (%s) was busy (invalid value returned)",
self._name,
self.device.host,
)
return
self._current_humidity = humiditydata
self._program_mode = data["program_mode"]
self._preset_mode = CODE_TO_PRESET_MODE[data["program_mode"]]
# Map thermostat values into various STATE_ flags.
self._current_temperature = current_temp
self._fmode = CODE_TO_FAN_MODE[data["fmode"]]
self._fstate = CODE_TO_FAN_STATE[data["fstate"]]
self._tmode = CODE_TO_TEMP_MODE[data["tmode"]]
self._tstate = CODE_TO_TEMP_STATE[data["tstate"]]
self._current_operation = self._tmode
if self._tmode == HVAC_MODE_COOL:
self._target_temperature = data["t_cool"]
elif self._tmode == HVAC_MODE_HEAT:
self._target_temperature = data["t_heat"]
elif self._tmode == HVAC_MODE_AUTO:
# This doesn't really work - tstate is only set if the HVAC is
# active. If it's idle, we don't know what to do with the target
# temperature.
if self._tstate == CURRENT_HVAC_COOL:
self._target_temperature = data["t_cool"]
elif self._tstate == CURRENT_HVAC_HEAT:
self._target_temperature = data["t_heat"]
else:
self._current_operation = HVAC_MODE_OFF
def set_temperature(self, **kwargs):
"""Set new target temperature."""
temperature = kwargs.get(ATTR_TEMPERATURE)
if temperature is None:
return
temperature = round_temp(temperature)
if self._current_operation == HVAC_MODE_COOL:
self.device.t_cool = temperature
elif self._current_operation == HVAC_MODE_HEAT:
self.device.t_heat = temperature
elif self._current_operation == HVAC_MODE_AUTO:
if self._tstate == CURRENT_HVAC_COOL:
self.device.t_cool = temperature
elif self._tstate == CURRENT_HVAC_HEAT:
self.device.t_heat = temperature
# Only change the hold if requested or if hold mode was turned
# on and we haven't set it yet.
if kwargs.get("hold_changed", False) or not self._hold_set:
if self._hold_temp:
self.device.hold = 1
self._hold_set = True
else:
self.device.hold = 0
def set_time(self):
"""Set device time."""
# Calling this clears any local temperature override and
# reverts to the scheduled temperature.
now = dt_util.now()
self.device.time = {
"day": now.weekday(),
"hour": now.hour,
"minute": now.minute,
}
def set_hvac_mode(self, hvac_mode):
"""Set operation mode (auto, cool, heat, off)."""
if hvac_mode in (HVAC_MODE_OFF, HVAC_MODE_AUTO):
self.device.tmode = TEMP_MODE_TO_CODE[hvac_mode]
# Setting t_cool or t_heat automatically changes tmode.
elif hvac_mode == HVAC_MODE_COOL:
self.device.t_cool = self._target_temperature
elif hvac_mode == HVAC_MODE_HEAT:
self.device.t_heat = self._target_temperature
def set_preset_mode(self, preset_mode):
"""Set Preset mode (Home, Alternate, Away, Holiday)."""
if preset_mode in (PRESET_MODES):
self.device.program_mode = PRESET_MODE_TO_CODE[preset_mode]
else:
_LOGGER.error(
"preset_mode %s not in PRESET_MODES",
preset_mode,
)
|
import numpy as np
from matchzoo.engine.base_metric import BaseMetric, sort_and_couple
class Precision(BaseMetric):
"""Precision metric."""
ALIAS = 'precision'
def __init__(self, k: int = 1, threshold: float = 0.):
"""
:class:`PrecisionMetric` constructor.
:param k: Number of results to consider.
:param threshold: the label threshold of relevance degree.
"""
self._k = k
self._threshold = threshold
def __repr__(self) -> str:
""":return: Formated string representation of the metric."""
return f"{self.ALIAS}@{self._k}({self._threshold})"
def __call__(self, y_true: np.array, y_pred: np.array) -> float:
"""
Calculate precision@k.
Example:
>>> y_true = [0, 0, 0, 1]
>>> y_pred = [0.2, 0.4, 0.3, 0.1]
>>> Precision(k=1)(y_true, y_pred)
0.0
>>> Precision(k=2)(y_true, y_pred)
0.0
>>> Precision(k=4)(y_true, y_pred)
0.25
>>> Precision(k=5)(y_true, y_pred)
0.2
:param y_true: The ground true label of each document.
:param y_pred: The predicted scores of each document.
:return: Precision @ k
:raises: ValueError: len(r) must be >= k.
"""
if self._k <= 0:
raise ValueError(f"k must be greater than 0."
f"{self._k} received.")
coupled_pair = sort_and_couple(y_true, y_pred)
precision = 0.0
for idx, (label, score) in enumerate(coupled_pair):
if idx >= self._k:
break
if label > self._threshold:
precision += 1.
return precision / self._k
|
import pytest
from hangups.ui.emoticon import _replace_words, replace_emoticons
@pytest.mark.parametrize('replacements,string,result', [
({}, '', ''),
({}, ' ', ' '),
({}, '\n', '\n'),
({}, 'foo', 'foo'),
({'foo': 'bar'}, 'foo', 'bar'),
({'foo': 'bar'}, 'foofoo', 'foofoo'),
({'foo': 'bar'}, 'foo foo', 'bar bar'),
({'foo': 'bar'}, 'foo ', 'bar '),
({'foo': 'bar'}, 'foo\nfoo', 'bar\nbar'),
({'foo': 'bar'}, 'foo\n', 'bar\n'),
])
def test_replace_words(replacements, string, result):
assert _replace_words(replacements, string) == result
@pytest.mark.parametrize('string,result', [
('this is a test:)', 'this is a test:)'),
('this is a test :)', 'this is a test \U0000263a'),
('this is a test\n:)', 'this is a test\n\U0000263a'),
])
def test_replace_emoticons(string, result):
assert replace_emoticons(string) == result
|
import logging
import boto3
from smart_open import open
#
# These are publicly available via play.min.io
#
KEY_ID = 'Q3AM3UQ867SPQQA43P2F'
SECRET_KEY = 'zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG'
ENDPOINT_URL = 'https://play.min.io:9000'
def read_boto3():
"""Read directly using boto3."""
session = get_minio_session()
s3 = session.resource('s3', endpoint_url=ENDPOINT_URL)
obj = s3.Object('smart-open-test', 'README.rst')
data = obj.get()['Body'].read()
logging.info('read %d bytes via boto3', len(data))
return data
def read_smart_open():
url = 's3://Q3AM3UQ867SPQQA43P2F:[email protected]:9000@smart-open-test/README.rst' # noqa
#
# If the default region is not us-east-1, we need to construct our own
# session. This is because smart_open will create a session in the default
# region, which _must_ be us-east-1 for minio to work.
#
tp = {}
if get_default_region() != 'us-east-1':
logging.info('injecting custom session')
tp['session'] = get_minio_session()
with open(url, transport_params=tp) as fin:
text = fin.read()
logging.info('read %d characters via smart_open', len(text))
return text
def get_minio_session():
return boto3.Session(
region_name='us-east-1',
aws_access_key_id=KEY_ID,
aws_secret_access_key=SECRET_KEY,
)
def get_default_region():
return boto3.Session().region_name
def main():
logging.basicConfig(level=logging.INFO)
from_boto3 = read_boto3()
from_smart_open = read_smart_open()
assert from_boto3.decode('utf-8') == from_smart_open
if __name__ == '__main__':
main()
|
from typing import Callable, Sequence
from qutebrowser.utils import usertypes
from qutebrowser.misc import objects
DeleteFuncType = Callable[[Sequence[str]], None]
def get_cmd_completions(info, include_hidden, include_aliases, prefix=''):
"""Get a list of completions info for commands, sorted by name.
Args:
info: The CompletionInfo.
include_hidden: Include commands which are not in normal mode.
include_aliases: True to include command aliases.
prefix: String to append to the command name.
Return: A list of tuples of form (name, description, bindings).
"""
assert objects.commands
cmdlist = []
cmd_to_keys = info.keyconf.get_reverse_bindings_for('normal')
for obj in set(objects.commands.values()):
hide_debug = obj.debug and not objects.args.debug
hide_mode = (usertypes.KeyMode.normal not in obj.modes and
not include_hidden)
if not (hide_debug or hide_mode or obj.deprecated):
bindings = ', '.join(cmd_to_keys.get(obj.name, []))
cmdlist.append((prefix + obj.name, obj.desc, bindings))
if include_aliases:
for name, cmd in info.config.get('aliases').items():
bindings = ', '.join(cmd_to_keys.get(name, []))
cmdlist.append((name, "Alias for '{}'".format(cmd), bindings))
return sorted(cmdlist)
|
import logging
import epson_projector as epson
from epson_projector.const import (
BACK,
BUSY,
CMODE,
CMODE_LIST,
CMODE_LIST_SET,
DEFAULT_SOURCES,
EPSON_CODES,
FAST,
INV_SOURCES,
MUTE,
PAUSE,
PLAY,
POWER,
SOURCE,
SOURCE_LIST,
TURN_OFF,
TURN_ON,
VOL_DOWN,
VOL_UP,
VOLUME,
)
import voluptuous as vol
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
SUPPORT_NEXT_TRACK,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SELECT_SOURCE,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_STEP,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_HOST,
CONF_NAME,
CONF_PORT,
CONF_SSL,
STATE_OFF,
STATE_ON,
)
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from .const import (
ATTR_CMODE,
DATA_EPSON,
DEFAULT_NAME,
DOMAIN,
SERVICE_SELECT_CMODE,
SUPPORT_CMODE,
)
_LOGGER = logging.getLogger(__name__)
SUPPORT_EPSON = (
SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_SELECT_SOURCE
| SUPPORT_CMODE
| SUPPORT_VOLUME_MUTE
| SUPPORT_VOLUME_STEP
| SUPPORT_NEXT_TRACK
| SUPPORT_PREVIOUS_TRACK
)
MEDIA_PLAYER_SCHEMA = vol.Schema({ATTR_ENTITY_ID: cv.comp_entity_ids})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PORT, default=80): cv.port,
vol.Optional(CONF_SSL, default=False): cv.boolean,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Epson media player platform."""
if DATA_EPSON not in hass.data:
hass.data[DATA_EPSON] = []
name = config.get(CONF_NAME)
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
ssl = config[CONF_SSL]
epson_proj = EpsonProjector(
async_get_clientsession(hass, verify_ssl=False), name, host, port, ssl
)
hass.data[DATA_EPSON].append(epson_proj)
async_add_entities([epson_proj], update_before_add=True)
async def async_service_handler(service):
"""Handle for services."""
entity_ids = service.data.get(ATTR_ENTITY_ID)
if entity_ids:
devices = [
device
for device in hass.data[DATA_EPSON]
if device.entity_id in entity_ids
]
else:
devices = hass.data[DATA_EPSON]
for device in devices:
if service.service == SERVICE_SELECT_CMODE:
cmode = service.data.get(ATTR_CMODE)
await device.select_cmode(cmode)
device.async_schedule_update_ha_state(True)
epson_schema = MEDIA_PLAYER_SCHEMA.extend(
{vol.Required(ATTR_CMODE): vol.All(cv.string, vol.Any(*CMODE_LIST_SET))}
)
hass.services.async_register(
DOMAIN, SERVICE_SELECT_CMODE, async_service_handler, schema=epson_schema
)
class EpsonProjector(MediaPlayerEntity):
"""Representation of Epson Projector Device."""
def __init__(self, websession, name, host, port, encryption):
"""Initialize entity to control Epson projector."""
self._name = name
self._projector = epson.Projector(host, websession=websession, port=port)
self._cmode = None
self._source_list = list(DEFAULT_SOURCES.values())
self._source = None
self._volume = None
self._state = None
async def async_update(self):
"""Update state of device."""
is_turned_on = await self._projector.get_property(POWER)
_LOGGER.debug("Project turn on/off status: %s", is_turned_on)
if is_turned_on and is_turned_on == EPSON_CODES[POWER]:
self._state = STATE_ON
cmode = await self._projector.get_property(CMODE)
self._cmode = CMODE_LIST.get(cmode, self._cmode)
source = await self._projector.get_property(SOURCE)
self._source = SOURCE_LIST.get(source, self._source)
volume = await self._projector.get_property(VOLUME)
if volume:
self._volume = volume
elif is_turned_on == BUSY:
self._state = STATE_ON
else:
self._state = STATE_OFF
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def supported_features(self):
"""Flag media player features that are supported."""
return SUPPORT_EPSON
async def async_turn_on(self):
"""Turn on epson."""
if self._state == STATE_OFF:
await self._projector.send_command(TURN_ON)
async def async_turn_off(self):
"""Turn off epson."""
if self._state == STATE_ON:
await self._projector.send_command(TURN_OFF)
@property
def source_list(self):
"""List of available input sources."""
return self._source_list
@property
def source(self):
"""Get current input sources."""
return self._source
@property
def volume_level(self):
"""Return the volume level of the media player (0..1)."""
return self._volume
async def select_cmode(self, cmode):
"""Set color mode in Epson."""
await self._projector.send_command(CMODE_LIST_SET[cmode])
async def async_select_source(self, source):
"""Select input source."""
selected_source = INV_SOURCES[source]
await self._projector.send_command(selected_source)
async def async_mute_volume(self, mute):
"""Mute (true) or unmute (false) sound."""
await self._projector.send_command(MUTE)
async def async_volume_up(self):
"""Increase volume."""
await self._projector.send_command(VOL_UP)
async def async_volume_down(self):
"""Decrease volume."""
await self._projector.send_command(VOL_DOWN)
async def async_media_play(self):
"""Play media via Epson."""
await self._projector.send_command(PLAY)
async def async_media_pause(self):
"""Pause media via Epson."""
await self._projector.send_command(PAUSE)
async def async_media_next_track(self):
"""Skip to next."""
await self._projector.send_command(FAST)
async def async_media_previous_track(self):
"""Skip to previous."""
await self._projector.send_command(BACK)
@property
def device_state_attributes(self):
"""Return device specific state attributes."""
if self._cmode is None:
return {}
return {ATTR_CMODE: self._cmode}
|
import logging
from aiohomekit.model.characteristics import (
ActivationStateValues,
CharacteristicsTypes,
CurrentHeaterCoolerStateValues,
HeatingCoolingCurrentValues,
HeatingCoolingTargetValues,
SwingModeValues,
TargetHeaterCoolerStateValues,
)
from aiohomekit.utils import clamp_enum_to_char
from homeassistant.components.climate import (
DEFAULT_MAX_HUMIDITY,
DEFAULT_MIN_HUMIDITY,
ClimateEntity,
)
from homeassistant.components.climate.const import (
CURRENT_HVAC_COOL,
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
CURRENT_HVAC_OFF,
HVAC_MODE_COOL,
HVAC_MODE_HEAT,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
SUPPORT_SWING_MODE,
SUPPORT_TARGET_HUMIDITY,
SUPPORT_TARGET_TEMPERATURE,
SWING_OFF,
SWING_VERTICAL,
)
from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS
from homeassistant.core import callback
from . import KNOWN_DEVICES, HomeKitEntity
_LOGGER = logging.getLogger(__name__)
# Map of Homekit operation modes to hass modes
MODE_HOMEKIT_TO_HASS = {
HeatingCoolingTargetValues.OFF: HVAC_MODE_OFF,
HeatingCoolingTargetValues.HEAT: HVAC_MODE_HEAT,
HeatingCoolingTargetValues.COOL: HVAC_MODE_COOL,
HeatingCoolingTargetValues.AUTO: HVAC_MODE_HEAT_COOL,
}
CURRENT_MODE_HOMEKIT_TO_HASS = {
HeatingCoolingCurrentValues.IDLE: CURRENT_HVAC_IDLE,
HeatingCoolingCurrentValues.HEATING: CURRENT_HVAC_HEAT,
HeatingCoolingCurrentValues.COOLING: CURRENT_HVAC_COOL,
}
SWING_MODE_HOMEKIT_TO_HASS = {
SwingModeValues.DISABLED: SWING_OFF,
SwingModeValues.ENABLED: SWING_VERTICAL,
}
CURRENT_HEATER_COOLER_STATE_HOMEKIT_TO_HASS = {
CurrentHeaterCoolerStateValues.INACTIVE: CURRENT_HVAC_OFF,
CurrentHeaterCoolerStateValues.IDLE: CURRENT_HVAC_IDLE,
CurrentHeaterCoolerStateValues.HEATING: CURRENT_HVAC_HEAT,
CurrentHeaterCoolerStateValues.COOLING: CURRENT_HVAC_COOL,
}
TARGET_HEATER_COOLER_STATE_HOMEKIT_TO_HASS = {
TargetHeaterCoolerStateValues.AUTOMATIC: HVAC_MODE_HEAT_COOL,
TargetHeaterCoolerStateValues.HEAT: HVAC_MODE_HEAT,
TargetHeaterCoolerStateValues.COOL: HVAC_MODE_COOL,
}
# Map of hass operation modes to homekit modes
MODE_HASS_TO_HOMEKIT = {v: k for k, v in MODE_HOMEKIT_TO_HASS.items()}
TARGET_HEATER_COOLER_STATE_HASS_TO_HOMEKIT = {
v: k for k, v in TARGET_HEATER_COOLER_STATE_HOMEKIT_TO_HASS.items()
}
SWING_MODE_HASS_TO_HOMEKIT = {v: k for k, v in SWING_MODE_HOMEKIT_TO_HASS.items()}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Homekit climate."""
hkid = config_entry.data["AccessoryPairingID"]
conn = hass.data[KNOWN_DEVICES][hkid]
@callback
def async_add_service(aid, service):
entity_class = ENTITY_TYPES.get(service["stype"])
if not entity_class:
return False
info = {"aid": aid, "iid": service["iid"]}
async_add_entities([entity_class(conn, info)], True)
return True
conn.add_listener(async_add_service)
class HomeKitHeaterCoolerEntity(HomeKitEntity, ClimateEntity):
"""Representation of a Homekit climate device."""
def get_characteristic_types(self):
"""Define the homekit characteristics the entity cares about."""
return [
CharacteristicsTypes.ACTIVE,
CharacteristicsTypes.CURRENT_HEATER_COOLER_STATE,
CharacteristicsTypes.TARGET_HEATER_COOLER_STATE,
CharacteristicsTypes.TEMPERATURE_COOLING_THRESHOLD,
CharacteristicsTypes.TEMPERATURE_HEATING_THRESHOLD,
CharacteristicsTypes.SWING_MODE,
CharacteristicsTypes.TEMPERATURE_CURRENT,
]
async def async_set_temperature(self, **kwargs):
"""Set new target temperature."""
temp = kwargs.get(ATTR_TEMPERATURE)
state = self.service.value(CharacteristicsTypes.TARGET_HEATER_COOLER_STATE)
if state == TargetHeaterCoolerStateValues.COOL:
await self.async_put_characteristics(
{CharacteristicsTypes.TEMPERATURE_COOLING_THRESHOLD: temp}
)
elif state == TargetHeaterCoolerStateValues.HEAT:
await self.async_put_characteristics(
{CharacteristicsTypes.TEMPERATURE_HEATING_THRESHOLD: temp}
)
else:
hvac_mode = TARGET_HEATER_COOLER_STATE_HOMEKIT_TO_HASS.get(state)
_LOGGER.warning(
"HomeKit device %s: Setting temperature in %s mode is not supported yet."
" Consider raising a ticket if you have this device and want to help us implement this feature.",
self.entity_id,
hvac_mode,
)
async def async_set_hvac_mode(self, hvac_mode):
"""Set new target operation mode."""
if hvac_mode == HVAC_MODE_OFF:
await self.async_put_characteristics(
{CharacteristicsTypes.ACTIVE: ActivationStateValues.INACTIVE}
)
return
if hvac_mode not in {HVAC_MODE_HEAT, HVAC_MODE_COOL}:
_LOGGER.warning(
"HomeKit device %s: Setting temperature in %s mode is not supported yet."
" Consider raising a ticket if you have this device and want to help us implement this feature.",
self.entity_id,
hvac_mode,
)
await self.async_put_characteristics(
{
CharacteristicsTypes.TARGET_HEATER_COOLER_STATE: TARGET_HEATER_COOLER_STATE_HASS_TO_HOMEKIT[
hvac_mode
],
}
)
@property
def current_temperature(self):
"""Return the current temperature."""
return self.service.value(CharacteristicsTypes.TEMPERATURE_CURRENT)
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
state = self.service.value(CharacteristicsTypes.TARGET_HEATER_COOLER_STATE)
if state == TargetHeaterCoolerStateValues.COOL:
return self.service.value(
CharacteristicsTypes.TEMPERATURE_COOLING_THRESHOLD
)
if state == TargetHeaterCoolerStateValues.HEAT:
return self.service.value(
CharacteristicsTypes.TEMPERATURE_HEATING_THRESHOLD
)
return None
@property
def target_temperature_step(self):
"""Return the supported step of target temperature."""
state = self.service.value(CharacteristicsTypes.TARGET_HEATER_COOLER_STATE)
if state == TargetHeaterCoolerStateValues.COOL and self.service.has(
CharacteristicsTypes.TEMPERATURE_COOLING_THRESHOLD
):
return self.service[
CharacteristicsTypes.TEMPERATURE_COOLING_THRESHOLD
].minStep
if state == TargetHeaterCoolerStateValues.HEAT and self.service.has(
CharacteristicsTypes.TEMPERATURE_HEATING_THRESHOLD
):
return self.service[
CharacteristicsTypes.TEMPERATURE_HEATING_THRESHOLD
].minStep
return None
@property
def min_temp(self):
"""Return the minimum target temp."""
state = self.service.value(CharacteristicsTypes.TARGET_HEATER_COOLER_STATE)
if state == TargetHeaterCoolerStateValues.COOL and self.service.has(
CharacteristicsTypes.TEMPERATURE_COOLING_THRESHOLD
):
return self.service[
CharacteristicsTypes.TEMPERATURE_COOLING_THRESHOLD
].minValue
if state == TargetHeaterCoolerStateValues.HEAT and self.service.has(
CharacteristicsTypes.TEMPERATURE_HEATING_THRESHOLD
):
return self.service[
CharacteristicsTypes.TEMPERATURE_HEATING_THRESHOLD
].minValue
return super().min_temp
@property
def max_temp(self):
"""Return the maximum target temp."""
state = self.service.value(CharacteristicsTypes.TARGET_HEATER_COOLER_STATE)
if state == TargetHeaterCoolerStateValues.COOL and self.service.has(
CharacteristicsTypes.TEMPERATURE_COOLING_THRESHOLD
):
return self.service[
CharacteristicsTypes.TEMPERATURE_COOLING_THRESHOLD
].maxValue
if state == TargetHeaterCoolerStateValues.HEAT and self.service.has(
CharacteristicsTypes.TEMPERATURE_HEATING_THRESHOLD
):
return self.service[
CharacteristicsTypes.TEMPERATURE_HEATING_THRESHOLD
].maxValue
return super().max_temp
@property
def hvac_action(self):
"""Return the current running hvac operation."""
# This characteristic describes the current mode of a device,
# e.g. a thermostat is "heating" a room to 75 degrees Fahrenheit.
# Can be 0 - 3 (Off, Idle, Heat, Cool)
if (
self.service.value(CharacteristicsTypes.ACTIVE)
== ActivationStateValues.INACTIVE
):
return CURRENT_HVAC_OFF
value = self.service.value(CharacteristicsTypes.CURRENT_HEATER_COOLER_STATE)
return CURRENT_HEATER_COOLER_STATE_HOMEKIT_TO_HASS.get(value)
@property
def hvac_mode(self):
"""Return hvac operation ie. heat, cool mode."""
# This characteristic describes the target mode
# E.g. should the device start heating a room if the temperature
# falls below the target temperature.
# Can be 0 - 2 (Auto, Heat, Cool)
if (
self.service.value(CharacteristicsTypes.ACTIVE)
== ActivationStateValues.INACTIVE
):
return HVAC_MODE_OFF
value = self.service.value(CharacteristicsTypes.TARGET_HEATER_COOLER_STATE)
return TARGET_HEATER_COOLER_STATE_HOMEKIT_TO_HASS.get(value)
@property
def hvac_modes(self):
"""Return the list of available hvac operation modes."""
valid_values = clamp_enum_to_char(
TargetHeaterCoolerStateValues,
self.service[CharacteristicsTypes.TARGET_HEATER_COOLER_STATE],
)
modes = [
TARGET_HEATER_COOLER_STATE_HOMEKIT_TO_HASS[mode] for mode in valid_values
]
modes.append(HVAC_MODE_OFF)
return modes
@property
def swing_mode(self):
"""Return the swing setting.
Requires SUPPORT_SWING_MODE.
"""
value = self.service.value(CharacteristicsTypes.SWING_MODE)
return SWING_MODE_HOMEKIT_TO_HASS[value]
@property
def swing_modes(self):
"""Return the list of available swing modes.
Requires SUPPORT_SWING_MODE.
"""
valid_values = clamp_enum_to_char(
SwingModeValues,
self.service[CharacteristicsTypes.SWING_MODE],
)
return [SWING_MODE_HOMEKIT_TO_HASS[mode] for mode in valid_values]
async def async_set_swing_mode(self, swing_mode: str) -> None:
"""Set new target swing operation."""
await self.async_put_characteristics(
{CharacteristicsTypes.SWING_MODE: SWING_MODE_HASS_TO_HOMEKIT[swing_mode]}
)
@property
def supported_features(self):
"""Return the list of supported features."""
features = 0
if self.service.has(CharacteristicsTypes.TEMPERATURE_COOLING_THRESHOLD):
features |= SUPPORT_TARGET_TEMPERATURE
if self.service.has(CharacteristicsTypes.TEMPERATURE_HEATING_THRESHOLD):
features |= SUPPORT_TARGET_TEMPERATURE
if self.service.has(CharacteristicsTypes.SWING_MODE):
features |= SUPPORT_SWING_MODE
return features
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return TEMP_CELSIUS
class HomeKitClimateEntity(HomeKitEntity, ClimateEntity):
"""Representation of a Homekit climate device."""
def get_characteristic_types(self):
"""Define the homekit characteristics the entity cares about."""
return [
CharacteristicsTypes.HEATING_COOLING_CURRENT,
CharacteristicsTypes.HEATING_COOLING_TARGET,
CharacteristicsTypes.TEMPERATURE_CURRENT,
CharacteristicsTypes.TEMPERATURE_TARGET,
CharacteristicsTypes.RELATIVE_HUMIDITY_CURRENT,
CharacteristicsTypes.RELATIVE_HUMIDITY_TARGET,
]
async def async_set_temperature(self, **kwargs):
"""Set new target temperature."""
temp = kwargs.get(ATTR_TEMPERATURE)
await self.async_put_characteristics(
{CharacteristicsTypes.TEMPERATURE_TARGET: temp}
)
async def async_set_humidity(self, humidity):
"""Set new target humidity."""
await self.async_put_characteristics(
{CharacteristicsTypes.RELATIVE_HUMIDITY_TARGET: humidity}
)
async def async_set_hvac_mode(self, hvac_mode):
"""Set new target operation mode."""
await self.async_put_characteristics(
{
CharacteristicsTypes.HEATING_COOLING_TARGET: MODE_HASS_TO_HOMEKIT[
hvac_mode
],
}
)
@property
def current_temperature(self):
"""Return the current temperature."""
return self.service.value(CharacteristicsTypes.TEMPERATURE_CURRENT)
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self.service.value(CharacteristicsTypes.TEMPERATURE_TARGET)
@property
def min_temp(self):
"""Return the minimum target temp."""
if self.service.has(CharacteristicsTypes.TEMPERATURE_TARGET):
char = self.service[CharacteristicsTypes.TEMPERATURE_TARGET]
return char.minValue
return super().min_temp
@property
def max_temp(self):
"""Return the maximum target temp."""
if self.service.has(CharacteristicsTypes.TEMPERATURE_TARGET):
char = self.service[CharacteristicsTypes.TEMPERATURE_TARGET]
return char.maxValue
return super().max_temp
@property
def current_humidity(self):
"""Return the current humidity."""
return self.service.value(CharacteristicsTypes.RELATIVE_HUMIDITY_CURRENT)
@property
def target_humidity(self):
"""Return the humidity we try to reach."""
return self.service.value(CharacteristicsTypes.RELATIVE_HUMIDITY_TARGET)
@property
def min_humidity(self):
"""Return the minimum humidity."""
char = self.service[CharacteristicsTypes.RELATIVE_HUMIDITY_TARGET]
return char.minValue or DEFAULT_MIN_HUMIDITY
@property
def max_humidity(self):
"""Return the maximum humidity."""
char = self.service[CharacteristicsTypes.RELATIVE_HUMIDITY_TARGET]
return char.maxValue or DEFAULT_MAX_HUMIDITY
@property
def hvac_action(self):
"""Return the current running hvac operation."""
# This characteristic describes the current mode of a device,
# e.g. a thermostat is "heating" a room to 75 degrees Fahrenheit.
# Can be 0 - 2 (Off, Heat, Cool)
value = self.service.value(CharacteristicsTypes.HEATING_COOLING_CURRENT)
return CURRENT_MODE_HOMEKIT_TO_HASS.get(value)
@property
def hvac_mode(self):
"""Return hvac operation ie. heat, cool mode."""
# This characteristic describes the target mode
# E.g. should the device start heating a room if the temperature
# falls below the target temperature.
# Can be 0 - 3 (Off, Heat, Cool, Auto)
value = self.service.value(CharacteristicsTypes.HEATING_COOLING_TARGET)
return MODE_HOMEKIT_TO_HASS.get(value)
@property
def hvac_modes(self):
"""Return the list of available hvac operation modes."""
valid_values = clamp_enum_to_char(
HeatingCoolingTargetValues,
self.service[CharacteristicsTypes.HEATING_COOLING_TARGET],
)
return [MODE_HOMEKIT_TO_HASS[mode] for mode in valid_values]
@property
def supported_features(self):
"""Return the list of supported features."""
features = 0
if self.service.has(CharacteristicsTypes.TEMPERATURE_TARGET):
features |= SUPPORT_TARGET_TEMPERATURE
if self.service.has(CharacteristicsTypes.RELATIVE_HUMIDITY_TARGET):
features |= SUPPORT_TARGET_HUMIDITY
return features
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return TEMP_CELSIUS
ENTITY_TYPES = {
"heater-cooler": HomeKitHeaterCoolerEntity,
"thermostat": HomeKitClimateEntity,
}
|
from homeassistant.helpers.entity import Entity
from . import DEFAULT_SCAN_INTERVAL, DOMAIN, timestamp_ms_to_date
from .const import DATA
SCAN_INTERVAL = DEFAULT_SCAN_INTERVAL
ATTR_IBAN = "account"
ATTR_USABLE_BALANCE = "usable_balance"
ATTR_BANK_BALANCE = "bank_balance"
ATTR_ACC_OWNER_TITLE = "owner_title"
ATTR_ACC_OWNER_FIRST_NAME = "owner_first_name"
ATTR_ACC_OWNER_LAST_NAME = "owner_last_name"
ATTR_ACC_OWNER_GENDER = "owner_gender"
ATTR_ACC_OWNER_BIRTH_DATE = "owner_birth_date"
ATTR_ACC_OWNER_EMAIL = "owner_email"
ATTR_ACC_OWNER_PHONE_NUMBER = "owner_phone_number"
ICON_ACCOUNT = "mdi:currency-eur"
ICON_CARD = "mdi:credit-card"
ICON_SPACE = "mdi:crop-square"
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the N26 sensor platform."""
if discovery_info is None:
return
api_list = hass.data[DOMAIN][DATA]
sensor_entities = []
for api_data in api_list:
sensor_entities.append(N26Account(api_data))
for card in api_data.cards:
sensor_entities.append(N26Card(api_data, card))
for space in api_data.spaces["spaces"]:
sensor_entities.append(N26Space(api_data, space))
add_entities(sensor_entities)
class N26Account(Entity):
"""Sensor for a N26 balance account.
A balance account contains an amount of money (=balance). The amount may
also be negative.
"""
def __init__(self, api_data) -> None:
"""Initialize a N26 balance account."""
self._data = api_data
self._iban = self._data.balance["iban"]
def update(self) -> None:
"""Get the current balance and currency for the account."""
self._data.update_account()
@property
def unique_id(self):
"""Return the unique ID of the entity."""
return self._iban[-4:]
@property
def name(self) -> str:
"""Friendly name of the sensor."""
return f"n26_{self._iban[-4:]}"
@property
def state(self) -> float:
"""Return the balance of the account as state."""
if self._data.balance is None:
return None
return self._data.balance.get("availableBalance")
@property
def unit_of_measurement(self) -> str:
"""Use the currency as unit of measurement."""
if self._data.balance is None:
return None
return self._data.balance.get("currency")
@property
def device_state_attributes(self) -> dict:
"""Additional attributes of the sensor."""
attributes = {
ATTR_IBAN: self._data.balance.get("iban"),
ATTR_BANK_BALANCE: self._data.balance.get("bankBalance"),
ATTR_USABLE_BALANCE: self._data.balance.get("usableBalance"),
ATTR_ACC_OWNER_TITLE: self._data.account_info.get("title"),
ATTR_ACC_OWNER_FIRST_NAME: self._data.account_info.get("kycFirstName"),
ATTR_ACC_OWNER_LAST_NAME: self._data.account_info.get("kycLastName"),
ATTR_ACC_OWNER_GENDER: self._data.account_info.get("gender"),
ATTR_ACC_OWNER_BIRTH_DATE: timestamp_ms_to_date(
self._data.account_info.get("birthDate")
),
ATTR_ACC_OWNER_EMAIL: self._data.account_info.get("email"),
ATTR_ACC_OWNER_PHONE_NUMBER: self._data.account_info.get(
"mobilePhoneNumber"
),
}
for limit in self._data.limits:
limit_attr_name = f"limit_{limit['limit'].lower()}"
attributes[limit_attr_name] = limit["amount"]
return attributes
@property
def icon(self) -> str:
"""Set the icon for the sensor."""
return ICON_ACCOUNT
class N26Card(Entity):
"""Sensor for a N26 card."""
def __init__(self, api_data, card) -> None:
"""Initialize a N26 card."""
self._data = api_data
self._account_name = api_data.balance["iban"][-4:]
self._card = card
def update(self) -> None:
"""Get the current balance and currency for the account."""
self._data.update_cards()
self._card = self._data.card(self._card["id"], self._card)
@property
def unique_id(self):
"""Return the unique ID of the entity."""
return self._card["id"]
@property
def name(self) -> str:
"""Friendly name of the sensor."""
return f"{self._account_name.lower()}_card_{self._card['id']}"
@property
def state(self) -> float:
"""Return the balance of the account as state."""
return self._card["status"]
@property
def device_state_attributes(self) -> dict:
"""Additional attributes of the sensor."""
attributes = {
"apple_pay_eligible": self._card.get("applePayEligible"),
"card_activated": timestamp_ms_to_date(self._card.get("cardActivated")),
"card_product": self._card.get("cardProduct"),
"card_product_type": self._card.get("cardProductType"),
"card_settings_id": self._card.get("cardSettingsId"),
"card_Type": self._card.get("cardType"),
"design": self._card.get("design"),
"exceet_actual_delivery_date": self._card.get("exceetActualDeliveryDate"),
"exceet_card_status": self._card.get("exceetCardStatus"),
"exceet_expected_delivery_date": self._card.get(
"exceetExpectedDeliveryDate"
),
"exceet_express_card_delivery": self._card.get("exceetExpressCardDelivery"),
"exceet_express_card_delivery_email_sent": self._card.get(
"exceetExpressCardDeliveryEmailSent"
),
"exceet_express_card_delivery_tracking_id": self._card.get(
"exceetExpressCardDeliveryTrackingId"
),
"expiration_date": timestamp_ms_to_date(self._card.get("expirationDate")),
"google_pay_eligible": self._card.get("googlePayEligible"),
"masked_pan": self._card.get("maskedPan"),
"membership": self._card.get("membership"),
"mpts_card": self._card.get("mptsCard"),
"pan": self._card.get("pan"),
"pin_defined": timestamp_ms_to_date(self._card.get("pinDefined")),
"username_on_card": self._card.get("usernameOnCard"),
}
return attributes
@property
def icon(self) -> str:
"""Set the icon for the sensor."""
return ICON_CARD
class N26Space(Entity):
"""Sensor for a N26 space."""
def __init__(self, api_data, space) -> None:
"""Initialize a N26 space."""
self._data = api_data
self._space = space
def update(self) -> None:
"""Get the current balance and currency for the account."""
self._data.update_spaces()
self._space = self._data.space(self._space["id"], self._space)
@property
def unique_id(self):
"""Return the unique ID of the entity."""
return f"space_{self._data.balance['iban'][-4:]}_{self._space['name'].lower()}"
@property
def name(self) -> str:
"""Friendly name of the sensor."""
return self._space["name"]
@property
def state(self) -> float:
"""Return the balance of the account as state."""
return self._space["balance"]["availableBalance"]
@property
def unit_of_measurement(self) -> str:
"""Use the currency as unit of measurement."""
return self._space["balance"]["currency"]
@property
def device_state_attributes(self) -> dict:
"""Additional attributes of the sensor."""
goal_value = ""
if "goal" in self._space:
goal_value = self._space.get("goal").get("amount")
attributes = {
"name": self._space.get("name"),
"goal": goal_value,
"background_image_url": self._space.get("backgroundImageUrl"),
"image_url": self._space.get("imageUrl"),
"is_card_attached": self._space.get("isCardAttached"),
"is_hidden_from_balance": self._space.get("isHiddenFromBalance"),
"is_locked": self._space.get("isLocked"),
"is_primary": self._space.get("isPrimary"),
}
return attributes
@property
def icon(self) -> str:
"""Set the icon for the sensor."""
return ICON_SPACE
|
import pygogo as gogo
from chardet import detect
from meza.compat import decode
try:
from twisted.internet.protocol import Protocol
except ImportError:
Protocol = object
else:
from twisted.python.reflect import prefixedMethodNames as find_method_names
logger = gogo.Gogo(__name__, monolog=True).logger
# Elements of the three-tuples in the state table.
BEGIN_HANDLER = 0
DO_HANDLER = 1
END_HANDLER = 2
IDENTCHARS = '.-_:'
LENIENT_IDENTCHARS = IDENTCHARS + ';+#/%~'
nop = lambda *args, **kwargs: None
def zipfndict(*args):
for fndict in args:
for key in fndict:
yield (key, tuple(x.get(key, nop) for x in args))
def get_method_obj_dict(obj, prefix):
names = find_method_names(obj.__class__, prefix)
return {name: getattr(obj, prefix + name) for name in names}
class ParseError(Exception):
def __init__(self, filename, line, col, message):
self.filename = filename
self.line = line
self.col = col
self.message = message
def __str__(self):
return "%s:%s:%s: %s" % (
self.filename, self.line, self.col, self.message)
class XMLParser(Protocol):
state = None
encoding = None
bom = None
attrname = ''
attrval = ''
# _leadingBodyData will sometimes be set before switching to the
# 'bodydata' state, when we "accidentally" read a byte of bodydata
# in a different state.
_leadingBodyData = None
def __init__(self, filename='unnamed', **kwargs):
self.filename = filename
self.lenient = kwargs.get('lenient')
self.strict = not self.lenient
# protocol methods
def connectionMade(self):
self.lineno = 1
self.colno = 0
def saveMark(self):
'''Get the line number and column of the last character parsed'''
# This gets replaced during dataReceived, restored afterwards
return (self.lineno, self.colno)
def _raise_parse_error(self, message):
raise ParseError(*((self.filename,) + self.saveMark() + (message,)))
def _build_state_table(self):
'''Return a dictionary of begin, do, end state function tuples'''
# _build_state_table leaves something to be desired but it does what it
# does.. probably slowly, so I'm doing some evil caching so it doesn't
# get called more than once per class.
stateTable = getattr(self.__class__, '__stateTable', None)
if stateTable is None:
prefixes = ('begin_', 'do_', 'end_')
fndicts = (get_method_obj_dict(self, p) for p in prefixes)
stateTable = dict(zipfndict(*fndicts))
self.__class__.__stateTable = stateTable
return stateTable
def check_encoding(self, data):
if self.encoding.startswith('UTF-16'):
data = data[2:]
if 'UTF-16' in self.encoding or 'UCS-2' in self.encoding:
assert not len(data) & 1, 'UTF-16 must come in pairs for now'
def maybeBodyData(self):
if self.endtag:
return 'bodydata'
# Get ready for fun! We're going to allow
# <script>if (foo < bar)</script> to work!
# We do this by making everything between <script> and
# </script> a Text
# BUT <script src="foo"> will be special-cased to do regular,
# lenient behavior, because those may not have </script>
# -radix
if (self.tagName == 'script' and 'src' not in self.tagAttributes):
# we do this ourselves rather than having begin_waitforendscript
# because that can get called multiple times and we don't want
# bodydata to get reset other than the first time.
self.begin_bodydata(None)
return 'waitforendscript'
return 'bodydata'
def dataReceived(self, data):
stateTable = self._build_state_table()
self.encoding = self.encoding or detect(data)['encoding']
self.check_encoding(data)
self.state = self.state or 'begin'
content = decode(data, self.encoding)
# bring state, lineno, colno into local scope
lineno, colno = self.lineno, self.colno
curState = self.state
# replace saveMark with a nested scope function
saveMark = lambda: (lineno, colno)
self.saveMark, _saveMark = saveMark, self.saveMark
# fetch functions from the stateTable
beginFn, doFn, endFn = stateTable[curState]
try:
for char in content:
# do newline stuff
if char == '\n':
lineno += 1
colno = 0
else:
colno += 1
newState = doFn(char)
if newState and newState != curState:
# this is the endFn from the previous state
endFn()
curState = newState
beginFn, doFn, endFn = stateTable[curState]
beginFn(char)
finally:
self.saveMark = _saveMark
self.lineno, self.colno = lineno, colno
# state doesn't make sense if there's an exception..
self.state = curState
def connectionLost(self, reason):
"""
End the last state we were in.
"""
stateTable = self._build_state_table()
stateTable[self.state][END_HANDLER]()
# state methods
def do_begin(self, byte):
if byte.isspace():
return
if byte != '<' and self.lenient:
self._leadingBodyData = byte
return 'bodydata'
elif byte != '<':
msg = "First char of document [%r] wasn't <" % (byte,)
self._raise_parse_error(msg)
return 'tagstart'
def begin_comment(self, byte):
self.commentbuf = ''
def do_comment(self, byte):
self.commentbuf += byte
if self.commentbuf.endswith('-->'):
self.gotComment(self.commentbuf[:-3])
return 'bodydata'
def begin_tagstart(self, byte):
self.tagName = '' # name of the tag
self.tagAttributes = {} # attributes of the tag
self.termtag = 0 # is the tag self-terminating
self.endtag = 0
def _get_val(self, byte):
val = None
alnum_or_ident = byte.isalnum() or byte in IDENTCHARS
is_good = alnum_or_ident or byte in '/!?[' or byte.isspace()
if byte == '-' and self.tagName == '!-':
val = 'comment'
elif byte.isspace() and self.tagName:
# properly strict thing to do here is probably to only
# accept whitespace
val = 'waitforgt' if self.endtag else 'attrs'
elif byte in '>/[':
def_gt = self.strict and 'bodydata' or self.maybeBodyData()
switch = {
'>': 'bodydata' if self.endtag else def_gt,
'/': 'afterslash'if self.tagName else None,
'[': 'expectcdata' if self.tagName == '!' else None}
val = switch[byte]
if not (self.lenient or val or is_good):
self._raise_parse_error('Invalid tag character: %r' % byte)
return val
def _update_tags(self, byte):
alnum_or_ident = byte.isalnum() or byte in IDENTCHARS
if (byte in '!?') or alnum_or_ident:
self.tagName += byte
elif byte == '>' and self.endtag:
self.gotTagEnd(self.tagName)
elif byte == '>':
self.gotTagStart(self.tagName, {})
elif byte == '/' and not self.tagName:
self.endtag = 1
elif byte in '!?' and not self.tagName:
self.tagName += byte
self.termtag = 1
def do_tagstart(self, byte):
if byte.isspace() and not self.tagName:
self._raise_parse_error("Whitespace before tag-name")
elif byte in '!?' and self.tagName and self.strict:
self._raise_parse_error("Invalid character in tag-name")
elif byte == '[' and not self.tagName == '!':
self._raise_parse_error("Invalid '[' in tag-name")
val = self._get_val(byte)
self._update_tags(byte)
return val
def begin_unentity(self, byte):
self.bodydata += byte
def do_unentity(self, byte):
self.bodydata += byte
return 'bodydata'
def end_unentity(self):
self.gotText(self.bodydata)
def begin_expectcdata(self, byte):
self.cdatabuf = byte
def do_expectcdata(self, byte):
self.cdatabuf += byte
cdb = self.cdatabuf
cd = '[CDATA['
if len(cd) > len(cdb):
if cd.startswith(cdb):
return
elif self.lenient:
# WHAT THE CRAP!? MSWord9 generates HTML that includes these
# bizarre <![if !foo]> <![endif]> chunks, so I've gotta ignore
# 'em as best I can. this should really be a separate parse
# state but I don't even have any idea what these _are_.
return 'waitforgt'
else:
self._raise_parse_error("Mal-formed CDATA header")
if cd == cdb:
self.cdatabuf = ''
return 'cdata'
self._raise_parse_error("Mal-formed CDATA header")
def do_cdata(self, byte):
self.cdatabuf += byte
if self.cdatabuf.endswith("]]>"):
self.cdatabuf = self.cdatabuf[:-3]
return 'bodydata'
def end_cdata(self):
self.gotCData(self.cdatabuf)
self.cdatabuf = ''
def do_attrs(self, byte):
if byte.isalnum() or byte in IDENTCHARS:
# XXX FIXME really handle !DOCTYPE at some point
if self.tagName == '!DOCTYPE':
return 'doctype'
if self.tagName[0] in '!?':
return 'waitforgt'
return 'attrname'
elif byte.isspace():
return
elif byte == '>':
self.gotTagStart(self.tagName, self.tagAttributes)
return self.strict and 'bodydata' or self.maybeBodyData()
elif byte == '/':
return 'afterslash'
elif self.lenient:
# discard and move on? Only case I've seen of this so far was:
# <foo bar="baz"">
return
self._raise_parse_error("Unexpected character: %r" % byte)
def begin_doctype(self, byte):
self.doctype = byte
def do_doctype(self, byte):
if byte == '>':
return 'bodydata'
self.doctype += byte
def end_doctype(self):
self.gotDoctype(self.doctype)
self.doctype = None
def do_waitforgt(self, byte):
if byte == '>':
if self.endtag or self.lenient:
return 'bodydata'
return self.maybeBodyData()
def begin_attrname(self, byte):
self.attrname = byte
self._attrname_termtag = 0
def _get_attrname(self, byte):
if byte == '=':
val = 'beforeattrval'
elif byte.isspace():
val = 'beforeeq'
elif self.lenient and byte in '"\'':
val = 'attrval'
elif self.lenient and byte == '>':
val = 'bodydata' if self._attrname_termtag else None
else:
# something is really broken. let's leave this attribute where it
# is and move on to the next thing
val = None
return val
def do_attrname(self, byte):
if byte.isalnum() or byte in IDENTCHARS:
self.attrname += byte
elif self.strict and not (byte.isspace() or byte == '='):
msg = "Invalid attribute name: %r %r" % (self.attrname, byte)
self._raise_parse_error(msg)
elif byte in LENIENT_IDENTCHARS or byte.isalnum():
self.attrname += byte
elif byte == '/':
self._attrname_termtag = 1
elif byte == '>':
self.attrval = 'True'
self.tagAttributes[self.attrname] = self.attrval
self.gotTagStart(self.tagName, self.tagAttributes)
self.gotTagEnd(self.tagName) if self._attrname_termtag else None
return self._get_attrname(byte)
def do_beforeattrval(self, byte):
chars = LENIENT_IDENTCHARS
val = None
if byte in '"\'':
val = 'attrval'
elif byte.isspace():
pass
elif self.lenient and (byte in chars or byte.isalnum()):
val = 'messyattr'
elif self.lenient and byte == '>':
self.attrval = 'True'
self.tagAttributes[self.attrname] = self.attrval
self.gotTagStart(self.tagName, self.tagAttributes)
val = self.maybeBodyData()
elif self.lenient and byte == '\\':
# I saw this in actual HTML once:
# <font size=\"3\"><sup>SM</sup></font>
pass
else:
msg = 'Invalid initial attribute value: %r; ' % byte
msg += 'Attribute values must be quoted.'
self._raise_parse_error(msg)
return val
def begin_beforeeq(self, byte):
self._beforeeq_termtag = 0
def do_beforeeq(self, byte):
if byte == '=':
return 'beforeattrval'
elif byte.isspace():
return
elif self.lenient:
if byte.isalnum() or byte in IDENTCHARS:
self.attrval = 'True'
self.tagAttributes[self.attrname] = self.attrval
return 'attrname'
elif byte == '>':
self.attrval = 'True'
self.tagAttributes[self.attrname] = self.attrval
self.gotTagStart(self.tagName, self.tagAttributes)
if self._beforeeq_termtag:
self.gotTagEnd(self.tagName)
return 'bodydata'
return self.maybeBodyData()
elif byte == '/':
self._beforeeq_termtag = 1
return
self._raise_parse_error("Invalid attribute")
def begin_attrval(self, byte):
self.quotetype = byte
self.attrval = ''
def do_attrval(self, byte):
if byte == self.quotetype:
return 'attrs'
self.attrval += byte
def end_attrval(self):
self.tagAttributes[self.attrname] = self.attrval
self.attrname = self.attrval = ''
def begin_messyattr(self, byte):
self.attrval = byte
def do_messyattr(self, byte):
if byte.isspace():
return 'attrs'
elif byte == '>':
endTag = 0
if self.attrval.endswith('/'):
endTag = 1
self.attrval = self.attrval[:-1]
self.tagAttributes[self.attrname] = self.attrval
self.gotTagStart(self.tagName, self.tagAttributes)
if endTag:
self.gotTagEnd(self.tagName)
return 'bodydata'
return self.maybeBodyData()
else:
self.attrval += byte
def end_messyattr(self):
if self.attrval:
self.tagAttributes[self.attrname] = self.attrval
def begin_afterslash(self, byte):
self._after_slash_closed = 0
def do_afterslash(self, byte):
# this state is only after a self-terminating slash, e.g. <foo/>
if self._after_slash_closed:
self._raise_parse_error("Mal-formed") # XXX When does this happen??
if byte != '>' and self.lenient:
return
elif byte != '>':
self._raise_parse_error("No data allowed after '/'")
self._after_slash_closed = 1
self.gotTagStart(self.tagName, self.tagAttributes)
self.gotTagEnd(self.tagName)
# don't need maybeBodyData here because there better not be
# any javascript code after a <script/>... we'll see :(
return 'bodydata'
def begin_bodydata(self, byte):
if self._leadingBodyData:
self.bodydata = self._leadingBodyData
del self._leadingBodyData
else:
self.bodydata = ''
def do_bodydata(self, byte):
if byte == '<':
return 'tagstart'
if byte == '&':
return 'entityref'
self.bodydata += byte
def end_bodydata(self):
self.gotText(self.bodydata)
self.bodydata = ''
def do_waitforendscript(self, byte):
if byte == '<':
return 'waitscriptendtag'
self.bodydata += byte
def begin_waitscriptendtag(self, byte):
self.temptagdata = ''
self.tagName = ''
self.endtag = 0
def do_waitscriptendtag(self, byte):
# 1 enforce / as first byte read
# 2 enforce following bytes to be subset of "script" until
# tagName == "script"
# 2a when that happens, gotText(self.bodydata) and
# gotTagEnd(self.tagName)
# 3 spaces can happen anywhere, they're ignored
# e.g. < / script >
# 4 anything else causes all data I've read to be moved to the
# bodydata, and switch back to waitforendscript state
# If it turns out this _isn't_ a </script>, we need to
# remember all the data we've been through so we can append it
# to bodydata
self.temptagdata += byte
# 1
if byte == '/':
self.endtag = True
elif not self.endtag:
self.bodydata += "<" + self.temptagdata
return 'waitforendscript'
# 2
elif byte.isalnum() or byte in IDENTCHARS:
self.tagName += byte
if not 'script'.startswith(self.tagName):
self.bodydata += "<" + self.temptagdata
return 'waitforendscript'
elif self.tagName == 'script':
self.gotText(self.bodydata)
self.gotTagEnd(self.tagName)
return 'waitforgt'
# 3
elif byte.isspace():
return 'waitscriptendtag'
# 4
else:
self.bodydata += "<" + self.temptagdata
return 'waitforendscript'
def begin_entityref(self, byte):
self.erefbuf = ''
self.erefextra = '' # extra bit for lenient mode
def do_entityref(self, byte):
if byte.isspace() or byte == "<":
if self.lenient:
# '&foo' probably was '&foo'
if self.erefbuf and self.erefbuf != "amp":
self.erefextra = self.erefbuf
self.erefbuf = "amp"
if byte == "<":
return "tagstart"
else:
self.erefextra += byte
return 'spacebodydata'
self._raise_parse_error("Bad entity reference")
elif byte != ';':
self.erefbuf += byte
else:
return 'bodydata'
def end_entityref(self):
self.gotEntityReference(self.erefbuf)
# hacky support for space after & in entityref in lenient
# state should only happen in that case
def begin_spacebodydata(self, byte):
self.bodydata = self.erefextra
self.erefextra = None
do_spacebodydata = do_bodydata
end_spacebodydata = end_bodydata
# Sorta SAX-ish API
def gotTagStart(self, name, attributes):
'''Encountered an opening tag.
Default behaviour is to print.'''
print('begin', name, attributes)
def gotText(self, data):
'''Encountered text
Default behaviour is to print.'''
print('text:', repr(data))
def gotEntityReference(self, entityRef):
'''Encountered mnemonic entity reference
Default behaviour is to print.'''
print('entityRef: &%s;' % entityRef)
def gotComment(self, comment):
'''Encountered comment.
Default behaviour is to ignore.'''
pass
def gotCData(self, cdata):
'''Encountered CDATA
Default behaviour is to call the gotText method'''
self.gotText(cdata)
def gotDoctype(self, doctype):
"""Encountered DOCTYPE
This is really grotty: it basically just gives you everything between
'<!DOCTYPE' and '>' as an argument.
"""
print('!DOCTYPE', repr(doctype))
def gotTagEnd(self, name):
'''Encountered closing tag
Default behaviour is to print.'''
print('end', name)
|
import re
import sys
import os.path
import glob
import subprocess
import tempfile
import argparse
sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.pardir,
os.pardir))
from scripts import utils
REPO_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'..', '..') # /scripts/dev -> /scripts -> /
REQ_DIR = os.path.join(REPO_DIR, 'misc', 'requirements')
CHANGELOG_URLS = {
'pyparsing': 'https://github.com/pyparsing/pyparsing/blob/master/CHANGES',
'cherrypy': 'https://github.com/cherrypy/cherrypy/blob/master/CHANGES.rst',
'pylint': 'http://pylint.pycqa.org/en/latest/whatsnew/changelog.html',
'setuptools': 'https://github.com/pypa/setuptools/blob/master/CHANGES.rst',
'pytest-cov': 'https://github.com/pytest-dev/pytest-cov/blob/master/CHANGELOG.rst',
'pytest-xdist': 'https://github.com/pytest-dev/pytest-xdist/blob/master/CHANGELOG.rst',
'pytest-forked': 'https://github.com/pytest-dev/pytest-forked/blob/master/CHANGELOG',
'execnet': 'https://execnet.readthedocs.io/en/latest/changelog.html',
'apipkg': 'https://github.com/pytest-dev/apipkg/blob/master/CHANGELOG',
'pytest-rerunfailures': 'https://github.com/pytest-dev/pytest-rerunfailures/blob/master/CHANGES.rst',
'pytest-repeat': 'https://github.com/pytest-dev/pytest-repeat/blob/master/CHANGES.rst',
'requests': 'https://github.com/psf/requests/blob/master/HISTORY.md',
'requests-file': 'https://github.com/dashea/requests-file/blob/master/CHANGES.rst',
'werkzeug': 'https://github.com/pallets/werkzeug/blob/master/CHANGES.rst',
'hypothesis': 'https://hypothesis.readthedocs.io/en/latest/changes.html',
'mypy': 'https://mypy-lang.blogspot.com/',
'pytest': 'https://docs.pytest.org/en/latest/changelog.html',
'iniconfig': 'https://github.com/RonnyPfannschmidt/iniconfig/blob/master/CHANGELOG',
'tox': 'https://tox.readthedocs.io/en/latest/changelog.html',
'pyyaml': 'https://github.com/yaml/pyyaml/blob/master/CHANGES',
'pytest-bdd': 'https://github.com/pytest-dev/pytest-bdd/blob/master/CHANGES.rst',
'snowballstemmer': 'https://github.com/snowballstem/snowball/blob/master/NEWS',
'virtualenv': 'https://virtualenv.pypa.io/en/latest/changelog.html',
'pip': 'https://pip.pypa.io/en/stable/news/',
'packaging': 'https://pypi.org/project/packaging/',
'build': 'https://github.com/pypa/build/commits/master',
'flake8-docstrings': 'https://pypi.org/project/flake8-docstrings/',
'attrs': 'http://www.attrs.org/en/stable/changelog.html',
'jinja2': 'https://github.com/pallets/jinja/blob/master/CHANGES.rst',
'flake8': 'https://gitlab.com/pycqa/flake8/tree/master/docs/source/release-notes',
'cffi': 'https://cffi.readthedocs.io/en/latest/whatsnew.html',
'flake8-debugger': 'https://github.com/JBKahn/flake8-debugger/',
'astroid': 'https://github.com/PyCQA/astroid/blob/2.4/ChangeLog',
'pytest-instafail': 'https://github.com/pytest-dev/pytest-instafail/blob/master/CHANGES.rst',
'coverage': 'https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst',
'colorama': 'https://github.com/tartley/colorama/blob/master/CHANGELOG.rst',
'hunter': 'https://github.com/ionelmc/python-hunter/blob/master/CHANGELOG.rst',
'uritemplate': 'https://pypi.org/project/uritemplate/',
'flake8-builtins': 'https://github.com/gforcada/flake8-builtins/blob/master/CHANGES.rst',
'flake8-bugbear': 'https://github.com/PyCQA/flake8-bugbear',
'flake8-tidy-imports': 'https://github.com/adamchainz/flake8-tidy-imports/blob/master/HISTORY.rst',
'flake8-tuple': 'https://github.com/ar4s/flake8_tuple/blob/master/HISTORY.rst',
'more-itertools': 'https://github.com/erikrose/more-itertools/blob/master/docs/versions.rst',
'pydocstyle': 'http://www.pydocstyle.org/en/latest/release_notes.html',
'sphinx': 'https://www.sphinx-doc.org/en/master/changes.html',
'jaraco.functools': 'https://github.com/jaraco/jaraco.functools/blob/master/CHANGES.rst',
'parse': 'https://github.com/r1chardj0n3s/parse#potential-gotchas',
'py': 'https://py.readthedocs.io/en/latest/changelog.html#changelog',
'Pympler': 'https://github.com/pympler/pympler/blob/master/CHANGELOG.md',
'pytest-mock': 'https://github.com/pytest-dev/pytest-mock/blob/master/CHANGELOG.rst',
'pytest-qt': 'https://github.com/pytest-dev/pytest-qt/blob/master/CHANGELOG.rst',
'wcwidth': 'https://github.com/jquast/wcwidth#history',
'pyinstaller': 'https://pyinstaller.readthedocs.io/en/stable/CHANGES.html',
'pyinstaller-hooks-contrib': 'https://github.com/pyinstaller/pyinstaller-hooks-contrib/blob/master/CHANGELOG.rst',
'pytest-benchmark': 'https://pytest-benchmark.readthedocs.io/en/stable/changelog.html',
'typed-ast': 'https://github.com/python/typed_ast/commits/master',
'docutils': 'https://docutils.sourceforge.io/RELEASE-NOTES.html',
'bump2version': 'https://github.com/c4urself/bump2version/blob/master/CHANGELOG.md',
'six': 'https://github.com/benjaminp/six/blob/master/CHANGES',
'flake8-comprehensions': 'https://github.com/adamchainz/flake8-comprehensions/blob/master/HISTORY.rst',
'altgraph': 'https://github.com/ronaldoussoren/altgraph/blob/master/doc/changelog.rst',
'urllib3': 'https://github.com/urllib3/urllib3/blob/master/CHANGES.rst',
'wheel': 'https://github.com/pypa/wheel/blob/master/docs/news.rst',
'mako': 'https://docs.makotemplates.org/en/latest/changelog.html',
'lxml': 'https://lxml.de/4.6/changes-4.6.0.html',
'jwcrypto': 'https://github.com/latchset/jwcrypto/commits/master',
'tox-pip-version': 'https://github.com/pglass/tox-pip-version/commits/master',
'wrapt': 'https://github.com/GrahamDumpleton/wrapt/blob/develop/docs/changes.rst',
'pep517': 'https://github.com/pypa/pep517/blob/master/doc/changelog.rst',
'cryptography': 'https://cryptography.io/en/latest/changelog.html',
'toml': 'https://github.com/uiri/toml/releases',
'PyQt5': 'https://www.riverbankcomputing.com/news',
'PyQtWebEngine': 'https://www.riverbankcomputing.com/news',
'PyQt-builder': 'https://www.riverbankcomputing.com/news',
'PyQt5-sip': 'https://www.riverbankcomputing.com/news',
'PyQt5_stubs': 'https://github.com/stlehmann/PyQt5-stubs/blob/master/CHANGELOG.md',
'sip': 'https://www.riverbankcomputing.com/news',
'Pygments': 'https://pygments.org/docs/changelog/',
'vulture': 'https://github.com/jendrikseipp/vulture/blob/master/CHANGELOG.md',
'distlib': 'https://bitbucket.org/pypa/distlib/src/master/CHANGES.rst',
'py-cpuinfo': 'https://github.com/workhorsy/py-cpuinfo/blob/master/ChangeLog',
'cheroot': 'https://cheroot.cherrypy.org/en/latest/history.html',
'certifi': 'https://ccadb-public.secure.force.com/mozilla/IncludedCACertificateReport',
'chardet': 'https://github.com/chardet/chardet/releases',
'idna': 'https://github.com/kjd/idna/blob/master/HISTORY.rst',
'tldextract': 'https://github.com/john-kurkowski/tldextract/blob/master/CHANGELOG.md',
'typing_extensions': 'https://github.com/python/typing/commits/master/typing_extensions',
'diff_cover': 'https://github.com/Bachmann1234/diff_cover/blob/master/CHANGELOG',
'pytest-clarity': 'https://github.com/darrenburns/pytest-clarity/commits/master',
'pytest-icdiff': 'https://github.com/hjwp/pytest-icdiff/blob/master/HISTORY.rst',
'icdiff': 'https://github.com/jeffkaufman/icdiff/blob/master/ChangeLog',
'termcolor': 'https://pypi.org/project/termcolor/',
'pprintpp': 'https://github.com/wolever/pprintpp/blob/master/CHANGELOG.txt',
'beautifulsoup4': 'https://bazaar.launchpad.net/~leonardr/beautifulsoup/bs4/view/head:/CHANGELOG',
'check-manifest': 'https://github.com/mgedmin/check-manifest/blob/master/CHANGES.rst',
'yamllint': 'https://github.com/adrienverge/yamllint/blob/master/CHANGELOG.rst',
'filelock': 'https://github.com/benediktschmitt/py-filelock/commits/master',
}
def convert_line(line, comments):
"""Convert the given requirement line to place into the output."""
for pattern, repl in comments['replace'].items():
line = re.sub(pattern, repl, line)
pkgname = line.split('=')[0]
if pkgname in comments['ignore']:
line = '# ' + line
try:
line += ' # ' + comments['comment'][pkgname]
except KeyError:
pass
try:
line += ' # rq.filter: {}'.format(comments['filter'][pkgname])
except KeyError:
pass
try:
line += ' ; {}'.format(comments['markers'][pkgname])
except KeyError:
pass
return line
def read_comments(fobj):
"""Find special comments in the config.
Args:
fobj: A file object for the config.
Return:
A dict with the parsed comment data.
"""
comments = {
'filter': {},
'markers': {},
'comment': {},
'ignore': [],
'add': [],
'replace': {},
'pre': False,
}
for line in fobj:
if line.startswith('#@'):
if ':' in line:
command, args = line[2:].split(':', maxsplit=1)
command = command.strip()
args = args.strip()
else:
command = line[2:].strip()
args = None
if command == 'filter':
pkg, filt = args.split(' ', maxsplit=1)
comments['filter'][pkg] = filt
elif command == 'comment':
pkg, comment = args.split(' ', maxsplit=1)
comments['comment'][pkg] = comment
elif command == 'ignore':
comments['ignore'] += args.split(', ')
elif command == 'replace':
pattern, replacement = args.split(' ', maxsplit=1)
comments['replace'][pattern] = replacement
elif command == 'markers':
pkg, markers = args.split(' ', maxsplit=1)
comments['markers'][pkg] = markers
elif command == 'add':
comments['add'].append(args)
elif command == 'pre':
comments['pre'] = True
return comments
def get_all_names():
"""Get all requirement names based on filenames."""
for filename in glob.glob(os.path.join(REQ_DIR, 'requirements-*.txt-raw')):
basename = os.path.basename(filename)
yield basename[len('requirements-'):-len('.txt-raw')]
def run_pip(venv_dir, *args, **kwargs):
"""Run pip inside the virtualenv."""
arg_str = ' '.join(str(arg) for arg in args)
utils.print_col('venv$ pip {}'.format(arg_str), 'blue')
venv_python = os.path.join(venv_dir, 'bin', 'python')
return subprocess.run([venv_python, '-m', 'pip'] + list(args),
check=True, **kwargs)
def init_venv(host_python, venv_dir, requirements, pre=False):
"""Initialize a new virtualenv and install the given packages."""
with utils.gha_group('Creating virtualenv'):
utils.print_col('$ python3 -m venv {}'.format(venv_dir), 'blue')
subprocess.run([host_python, '-m', 'venv', venv_dir], check=True)
run_pip(venv_dir, 'install', '-U', 'pip')
run_pip(venv_dir, 'install', '-U', 'setuptools', 'wheel')
install_command = ['install', '-r', requirements]
if pre:
install_command.append('--pre')
with utils.gha_group('Installing requirements'):
run_pip(venv_dir, *install_command)
run_pip(venv_dir, 'check')
def parse_args():
"""Parse commandline arguments via argparse."""
parser = argparse.ArgumentParser()
parser.add_argument('names', nargs='*')
return parser.parse_args()
def git_diff(*args):
"""Run a git diff command."""
command = (['git', '--no-pager', 'diff'] + list(args) + [
'--', 'requirements.txt', 'misc/requirements/requirements-*.txt'])
proc = subprocess.run(command,
stdout=subprocess.PIPE,
encoding='utf-8',
check=True)
return proc.stdout.splitlines()
class Change:
"""A single requirements change from a git diff output."""
def __init__(self, name):
self.name = name
self.old = None
self.new = None
if name in CHANGELOG_URLS:
self.url = CHANGELOG_URLS[name]
self.link = '[{}]({})'.format(self.name, self.url)
else:
self.url = '(no changelog)'
self.link = self.name
def __str__(self):
if self.old is None:
return '- {} new: {} {}'.format(self.name, self.new, self.url)
elif self.new is None:
return '- {} removed: {} {}'.format(self.name, self.old,
self.url)
else:
return '- {} {} -> {} {}'.format(self.name, self.old, self.new,
self.url)
def table_str(self):
"""Generate a markdown table."""
if self.old is None:
return '| {} | -- | {} |'.format(self.link, self.new)
elif self.new is None:
return '| {} | {} | -- |'.format(self.link, self.old)
else:
return '| {} | {} | {} |'.format(self.link, self.old, self.new)
def _get_changed_files():
"""Get a list of changed files via git."""
changed_files = set()
filenames = git_diff('--name-only')
for filename in filenames:
filename = filename.strip()
filename = filename.replace('misc/requirements/requirements-', '')
filename = filename.replace('.txt', '')
changed_files.add(filename)
return sorted(changed_files)
def _get_changes():
"""Get a list of changed versions from git."""
changes_dict = {}
diff = git_diff()
for line in diff:
if not line.startswith('-') and not line.startswith('+'):
continue
if line.startswith('+++ ') or line.startswith('--- '):
continue
if '==' in line:
name, version = line[1:].split('==')
if ';' in version: # pip environment markers
version = version.split(';')[0].strip()
elif line[1:].startswith('-e'):
rest, name = line.split('#egg=')
version = rest.split('@')[1][:7]
else:
name = line[1:]
version = '?'
if name.startswith('#'): # duplicate requirements
name = name[1:].strip()
if name not in changes_dict:
changes_dict[name] = Change(name)
if line.startswith('-'):
changes_dict[name].old = version
elif line.startswith('+'):
changes_dict[name].new = version
return [change for _name, change in sorted(changes_dict.items())]
def print_changed_files():
"""Output all changed files from this run."""
changed_files = _get_changed_files()
files_text = '\n'.join('- ' + line for line in changed_files)
changes = _get_changes()
diff_text = '\n'.join(str(change) for change in changes)
utils.print_title('Changed')
utils.print_subtitle('Files')
print(files_text)
print()
utils.print_subtitle('Diff')
print(diff_text)
if 'CI' in os.environ:
print()
print('::set-output name=changed::' +
files_text.replace('\n', '%0A'))
table_header = [
'| Requirement | old | new |',
'|-------------|-----|-----|',
]
diff_table = '%0A'.join(table_header +
[change.table_str() for change in changes])
print('::set-output name=diff::' + diff_table)
def get_host_python(name):
"""Get the Python to use for a given requirement name.
pylint installs typed_ast on < 3.8 only
"""
if name == 'pylint':
return 'python3.7'
else:
return sys.executable
def build_requirements(name):
"""Build a requirements file."""
utils.print_subtitle("Building")
filename = os.path.join(REQ_DIR, 'requirements-{}.txt-raw'.format(name))
host_python = get_host_python(name)
with open(filename, 'r', encoding='utf-8') as f:
comments = read_comments(f)
with tempfile.TemporaryDirectory() as tmpdir:
init_venv(host_python=host_python,
venv_dir=tmpdir,
requirements=filename,
pre=comments['pre'])
with utils.gha_group('Freezing requirements'):
proc = run_pip(tmpdir, 'freeze', stdout=subprocess.PIPE)
reqs = proc.stdout.decode('utf-8')
if utils.ON_CI:
print(reqs.strip())
if name == 'qutebrowser':
outfile = os.path.join(REPO_DIR, 'requirements.txt')
else:
outfile = os.path.join(REQ_DIR, 'requirements-{}.txt'.format(name))
with open(outfile, 'w', encoding='utf-8') as f:
f.write("# This file is automatically generated by "
"scripts/dev/recompile_requirements.py\n\n")
for line in reqs.splitlines():
if line.startswith('qutebrowser=='):
continue
f.write(convert_line(line, comments) + '\n')
for line in comments['add']:
f.write(line + '\n')
return outfile
def test_tox():
"""Test requirements via tox."""
utils.print_title('Testing via tox')
host_python = get_host_python('tox')
req_path = os.path.join(REQ_DIR, 'requirements-tox.txt')
with tempfile.TemporaryDirectory() as tmpdir:
venv_dir = os.path.join(tmpdir, 'venv')
tox_workdir = os.path.join(tmpdir, 'tox-workdir')
venv_python = os.path.join(venv_dir, 'bin', 'python')
init_venv(host_python, venv_dir, req_path)
list_proc = subprocess.run([venv_python, '-m', 'tox', '--listenvs'],
check=True,
stdout=subprocess.PIPE,
universal_newlines=True)
environments = list_proc.stdout.strip().split('\n')
for env in environments:
with utils.gha_group('tox for {}'.format(env)):
utils.print_subtitle(env)
utils.print_col('venv$ tox -e {} --notest'.format(env), 'blue')
subprocess.run([venv_python, '-m', 'tox',
'--workdir', tox_workdir,
'-e', env,
'--notest'],
check=True)
def test_requirements(name, outfile):
"""Test a resulting requirements file."""
print()
utils.print_subtitle("Testing")
host_python = get_host_python(name)
with tempfile.TemporaryDirectory() as tmpdir:
init_venv(host_python, tmpdir, outfile)
def main():
"""Re-compile the given (or all) requirement files."""
args = parse_args()
if args.names:
names = args.names
else:
names = sorted(get_all_names())
utils.print_col('Rebuilding requirements: ' + ', '.join(names), 'green')
for name in names:
utils.print_title(name)
outfile = build_requirements(name)
test_requirements(name, outfile)
if not args.names:
# If we selected a subset, let's not go through the trouble of testing
# via tox.
test_tox()
print_changed_files()
if __name__ == '__main__':
main()
|
import sys
from django.core.management import ManagementUtility
RESTRICTED_COMMANDS = {"squashmigrations", "makemigrations"}
class WeblateManagementUtility(ManagementUtility):
def __init__(self, argv=None, developer_mode: bool = False):
super().__init__(argv)
self.developer_mode = developer_mode
def fetch_command(self, subcommand):
# Block usage of some commands
if not self.developer_mode and subcommand in RESTRICTED_COMMANDS:
sys.stderr.write("Blocked command: %r\n" % subcommand)
sys.stderr.write("This command is restricted for developers only.\n")
sys.stderr.write(
"In case you really want to do this, please execute "
"using manage.py from the Weblate source code.\n"
)
sys.exit(1)
# Fetch command class
command = super().fetch_command(subcommand)
# Monkey patch it's output
original_notice = command.style.NOTICE
def patched_notice(txt):
return original_notice(
txt.replace("python manage.py migrate", "weblate migrate")
)
command.style.NOTICE = patched_notice
return command
|
from homeassistant.components.emulated_roku import config_flow
from tests.common import MockConfigEntry
async def test_flow_works(hass):
"""Test that config flow works."""
flow = config_flow.EmulatedRokuFlowHandler()
flow.hass = hass
result = await flow.async_step_user(
user_input={"name": "Emulated Roku Test", "listen_port": 8060}
)
assert result["type"] == "create_entry"
assert result["title"] == "Emulated Roku Test"
assert result["data"] == {"name": "Emulated Roku Test", "listen_port": 8060}
async def test_flow_already_registered_entry(hass):
"""Test that config flow doesn't allow existing names."""
MockConfigEntry(
domain="emulated_roku", data={"name": "Emulated Roku Test", "listen_port": 8062}
).add_to_hass(hass)
flow = config_flow.EmulatedRokuFlowHandler()
flow.hass = hass
result = await flow.async_step_user(
user_input={"name": "Emulated Roku Test", "listen_port": 8062}
)
assert result["type"] == "abort"
|
import logging
import pytest
def command_expansion_base(
quteproc, send_msg, recv_msg, url="data/hello.txt"):
quteproc.open_path(url)
quteproc.send_cmd(':message-info ' + send_msg)
quteproc.mark_expected(category='message',
loglevel=logging.INFO,
message=recv_msg)
@pytest.mark.parametrize('send_msg, recv_msg', [
# escaping by double-quoting
('foo{{url}}bar', 'foo{url}bar'),
('foo{url}', 'foohttp://localhost:*/hello.txt'),
('foo{url:pretty}', 'foohttp://localhost:*/hello.txt'),
('foo{url:domain}', 'foohttp://localhost:*'),
# test {url:auth} on a site with no auth
('foo{url:auth}', 'foo'),
('foo{url:scheme}', 'foohttp'),
('foo{url:host}', 'foolocalhost'),
('foo{url:path}', 'foo*/hello.txt'),
])
def test_command_expansion(quteproc, send_msg, recv_msg):
command_expansion_base(quteproc, send_msg, recv_msg)
@pytest.mark.parametrize('send_msg, recv_msg, url', [
('foo{title}', 'fooTest title', 'data/title.html'),
('foo{url:query}', 'fooq=bar', 'data/hello.txt?q=bar'),
# multiple variable expansion
('{title}bar{url}', 'Test titlebarhttp://localhost:*/title.html', 'data/title.html'),
])
def test_command_expansion_complex(
quteproc, send_msg, recv_msg, url):
command_expansion_base(quteproc, send_msg, recv_msg, url)
def test_command_expansion_basic_auth(quteproc, server):
url = ('http://user1:password1@localhost:{port}/basic-auth/user1/password1'
.format(port=server.port))
quteproc.open_url(url)
quteproc.send_cmd(':message-info foo{url:auth}')
quteproc.mark_expected(
category='message',
loglevel=logging.INFO, message='foouser1:password1@')
def test_command_expansion_clipboard(quteproc):
quteproc.send_cmd(':debug-set-fake-clipboard "foo"')
command_expansion_base(
quteproc, '{clipboard}bar{url}',
"foobarhttp://localhost:*/hello.txt")
quteproc.send_cmd(':debug-set-fake-clipboard "{{url}}"')
command_expansion_base(
quteproc, '{clipboard}bar{url}',
"{url}barhttp://localhost:*/hello.txt")
|
import diamond.collector
import re
import os
from collections import defaultdict
_RE = re.compile('|'.join([
r'sockets: used (?P<used>\d+)?',
r'(TCP|TCP6): inuse (?P<tcp_inuse>\d+)' +
r'( orphan (?P<tcp_orphan>\d+) ' +
r'tw (?P<tcp_tw>\d+) ' +
r'alloc (?P<tcp_alloc>\d+) ' +
r'mem (?P<tcp_mem>\d+))?',
r'(UDP|UDP6): inuse (?P<udp_inuse>\d+)( mem (?P<udp_mem>\d+))?'
]))
class SockstatCollector(diamond.collector.Collector):
PROCS = ['/proc/net/sockstat', '/proc/net/sockstat6']
def get_default_config_help(self):
config_help = super(SockstatCollector, self).get_default_config_help()
config_help.update({
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(SockstatCollector, self).get_default_config()
config.update({
'path': 'sockets',
})
return config
def collect(self):
result = defaultdict(int)
for path in self.PROCS:
if not os.access(path, os.R_OK):
continue
f = open(path)
self.collect_stat(result, f)
f.close()
for key, value in result.items():
self.publish(key, value, metric_type='GAUGE')
def collect_stat(self, data, f):
for line in f:
match = _RE.match(line)
if match:
for key, value in match.groupdict().items():
if value:
data[key] += int(value)
|
from aiohttp.test_utils import TestClient
from homeassistant.components.withings import const
from homeassistant.config import async_process_ha_core_config
from homeassistant.const import (
CONF_CLIENT_ID,
CONF_CLIENT_SECRET,
CONF_EXTERNAL_URL,
CONF_UNIT_SYSTEM,
CONF_UNIT_SYSTEM_METRIC,
)
from homeassistant.core import DOMAIN as HA_DOMAIN, HomeAssistant
from homeassistant.helpers import config_entry_oauth2_flow
from homeassistant.helpers.config_entry_oauth2_flow import AUTH_CALLBACK_PATH
from homeassistant.setup import async_setup_component
from tests.common import MockConfigEntry
async def test_config_non_unique_profile(hass: HomeAssistant) -> None:
"""Test setup a non-unique profile."""
config_entry = MockConfigEntry(
domain=const.DOMAIN, data={const.PROFILE: "person0"}, unique_id="0"
)
config_entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": "profile"}, data={const.PROFILE: "person0"}
)
assert result
assert result["errors"]["base"] == "already_configured"
async def test_config_reauth_profile(
hass: HomeAssistant, aiohttp_client, aioclient_mock
) -> None:
"""Test reauth an existing profile re-creates the config entry."""
hass_config = {
HA_DOMAIN: {
CONF_UNIT_SYSTEM: CONF_UNIT_SYSTEM_METRIC,
CONF_EXTERNAL_URL: "http://127.0.0.1:8080/",
},
const.DOMAIN: {
CONF_CLIENT_ID: "my_client_id",
CONF_CLIENT_SECRET: "my_client_secret",
const.CONF_USE_WEBHOOK: False,
},
}
await async_process_ha_core_config(hass, hass_config.get(HA_DOMAIN))
assert await async_setup_component(hass, const.DOMAIN, hass_config)
await hass.async_block_till_done()
config_entry = MockConfigEntry(
domain=const.DOMAIN, data={const.PROFILE: "person0"}, unique_id="0"
)
config_entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": "reauth", "profile": "person0"}
)
assert result
assert result["type"] == "form"
assert result["step_id"] == "reauth"
assert result["description_placeholders"] == {const.PROFILE: "person0"}
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{},
)
# pylint: disable=protected-access
state = config_entry_oauth2_flow._encode_jwt(hass, {"flow_id": result["flow_id"]})
client: TestClient = await aiohttp_client(hass.http.app)
resp = await client.get(f"{AUTH_CALLBACK_PATH}?code=abcd&state={state}")
assert resp.status == 200
assert resp.headers["content-type"] == "text/html; charset=utf-8"
aioclient_mock.clear_requests()
aioclient_mock.post(
"https://account.withings.com/oauth2/token",
json={
"refresh_token": "mock-refresh-token",
"access_token": "mock-access-token",
"type": "Bearer",
"expires_in": 60,
"userid": "0",
},
)
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
entries = hass.config_entries.async_entries(const.DOMAIN)
assert entries
assert entries[0].data["token"]["refresh_token"] == "mock-refresh-token"
|
from homeassistant.components.nexia import util
from homeassistant.const import HTTP_FORBIDDEN, HTTP_NOT_FOUND, HTTP_UNAUTHORIZED
async def test_is_invalid_auth_code():
"""Test for invalid auth."""
assert util.is_invalid_auth_code(HTTP_UNAUTHORIZED) is True
assert util.is_invalid_auth_code(HTTP_FORBIDDEN) is True
assert util.is_invalid_auth_code(HTTP_NOT_FOUND) is False
async def test_percent_conv():
"""Test percentage conversion."""
assert util.percent_conv(0.12) == 12.0
assert util.percent_conv(0.123) == 12.3
|
import unittest
import mock
from kalliope.core.Models import Singleton
from kalliope.core.Models.settings.Resources import Resources
from kalliope.core.NeuronLauncher import NeuronLauncher, NeuronParameterNotAvailable
from kalliope.core.ConfigurationManager import SettingLoader
from kalliope.core.Models.Neuron import Neuron
class TestNeuronLauncher(unittest.TestCase):
"""
Class to test Launchers Classes (TriggerLauncher, SynapseLauncher, NeuronLauncher) and methods
"""
def setUp(self):
# clean settings
Singleton._instances = dict()
def tearDown(self):
Singleton._instances = dict()
####
# Neurons Launcher
def test_launch_neuron(self):
"""
Test the Neuron Launcher trying to start a Neuron
"""
neuron = Neuron(name='neurone1', parameters={'var1': 'val1'})
sl = SettingLoader()
resources = Resources(neuron_folder='/var/tmp/test/resources')
sl.settings.resources = resources
with mock.patch("kalliope.core.Utils.get_dynamic_class_instantiation") as mock_get_class_instantiation:
NeuronLauncher.launch_neuron(neuron=neuron)
mock_get_class_instantiation.assert_called_once_with(package_name="neurons",
module_name=neuron.name,
parameters=neuron.parameters,
resources_dir=sl.settings.resources.neuron_folder)
mock_get_class_instantiation.reset_mock()
def test_start_neuron(self):
"""
Testing params association and starting a Neuron
"""
with mock.patch("kalliope.core.NeuronLauncher.launch_neuron") as mock_launch_neuron_method:
# Assert to the neuron is launched with not parameter from order
neuron1 = Neuron(name='neurone1', parameters={'var1': 'val1'})
NeuronLauncher.start_neuron(neuron=neuron1)
mock_launch_neuron_method.assert_called_with(neuron1)
mock_launch_neuron_method.reset_mock()
# Assert the params are well passed to the neuron
neuron2 = Neuron(name='neurone2', parameters={'var2': 'val2', 'var3': "{{ var3 }}"})
params = {
'var3': 'value3'
}
NeuronLauncher.start_neuron(neuron=neuron2,
parameters_dict=params)
neuron2_params = Neuron(name='neurone2', parameters={'var2': 'val2', 'var3': 'value3'})
mock_launch_neuron_method.assert_called_with(neuron2_params)
mock_launch_neuron_method.reset_mock()
# Assert the Neuron is not started when missing args
neuron3 = Neuron(name='neurone3', parameters={'var3': 'val3', 'var4': '{{val4}}'})
params = {
'not_exist': 'test'
}
NeuronLauncher.start_neuron(neuron=neuron3,
parameters_dict=params)
mock_launch_neuron_method.assert_not_called()
mock_launch_neuron_method.reset_mock()
# Assert no neuron is launched when waiting for args and none are given
neuron4 = Neuron(name='neurone4', parameters={'var5': 'val5', 'var6': '{{val6}}'})
NeuronLauncher.start_neuron(neuron=neuron4)
mock_launch_neuron_method.assert_not_called()
mock_launch_neuron_method.reset_mock()
def test_replace_brackets_by_loaded_parameter(self):
# -------------------
# test with string
# -------------------
# the target value to replace is present in the loaded parameter dict
neuron_parameters = {
"param1": "this is a value {{ replaced }}"
}
loaded_parameters = {
"replaced": "replaced successfully"
}
expected_result = {
"param1": "this is a value replaced successfully"
}
self.assertEqual(expected_result, NeuronLauncher._replace_brackets_by_loaded_parameter(neuron_parameters,
loaded_parameters))
# the target value with unicode to replace is present in the loaded parameter dict
neuron_parameters = {
"param1": "this is a value {{ replaced }}"
}
loaded_parameters = {
"replaced": u"rêmpläcée successfülly"
}
expected_result = {
"param1": "this is a value rêmpläcée successfülly"
}
self.assertEqual(expected_result, NeuronLauncher._replace_brackets_by_loaded_parameter(neuron_parameters,
loaded_parameters))
# the target value to replace is NOT present in the loaded parameter dict
neuron_parameters = {
"param1": "this is a value {{ replaced }}"
}
loaded_parameters = {
"not_exist": "replaced successfully"
}
with self.assertRaises(NeuronParameterNotAvailable):
NeuronLauncher._replace_brackets_by_loaded_parameter(neuron_parameters, loaded_parameters)
# one parameter doesn't contains bracket, the other one do
neuron_parameters = {
"param1": "this is a value {{ replaced }}",
"param2": "value"
}
loaded_parameters = {
"replaced": "replaced successfully"
}
expected_result = {
"param1": "this is a value replaced successfully",
"param2": "value"
}
self.assertEqual(expected_result, NeuronLauncher._replace_brackets_by_loaded_parameter(neuron_parameters,
loaded_parameters))
# parameters are integer or boolean
neuron_parameters = {
"param1": 1,
"param2": True
}
loaded_parameters = {
"replaced": "replaced successfully"
}
expected_result = {
"param1": 1,
"param2": True
}
self.assertEqual(expected_result, NeuronLauncher._replace_brackets_by_loaded_parameter(neuron_parameters,
loaded_parameters))
# parameters are say_template or file template. Should not be altered by the loader
neuron_parameters = {
"say_template": "{{output}}",
"file_template": "here is a file"
}
loaded_parameters = {
"output": "should not be used"
}
expected_result = {
"say_template": "{{output}}",
"file_template": "here is a file"
}
self.assertEqual(expected_result, NeuronLauncher._replace_brackets_by_loaded_parameter(neuron_parameters,
loaded_parameters))
# replacing with variable
sl = SettingLoader()
sl.settings.variables = {
"replaced": {
"name": u'replaced successfully'
}
}
neuron_parameters = {
"param1": "this is a value {{ replaced['name'] }}"
}
loaded_parameters = {
"name": "replaced successfully"
}
expected_result = {
"param1": "this is a value replaced successfully"
}
self.assertEqual(expected_result, NeuronLauncher._replace_brackets_by_loaded_parameter(neuron_parameters,
loaded_parameters))
# the parameter is a reserved key. for example from_answer_link from the neurotransmitter
list_reserved_keys = ["say_template", "file_template", "kalliope_memory", "from_answer_link"]
for reserved_key in list_reserved_keys:
neuron_parameters = {
reserved_key: "this is a value with {{ 'brackets '}}"
}
loaded_parameters = dict()
expected_result = {
reserved_key: "this is a value with {{ 'brackets '}}"
}
self.assertEqual(expected_result, NeuronLauncher._replace_brackets_by_loaded_parameter(neuron_parameters,
loaded_parameters))
####
# tests with global variables
####
# 1/ only one global variable
sl = SettingLoader()
sl.settings.variables = {
"hello": "test",
"hello2": "test2",
}
parameters = {
'var1': '{{hello}}'
}
expected_parameters = {
'var1': 'test'
}
self.assertEqual(expected_parameters, NeuronLauncher._replace_brackets_by_loaded_parameter(parameters,
loaded_parameters))
# 2/ global variable with string after
sl.settings.variables = {
"hello": "test",
"hello2": "test2",
}
parameters = {
'var1': '{{hello}} Sispheor'
}
expected_parameters = {
'var1': 'test Sispheor'
}
self.assertEqual(expected_parameters, NeuronLauncher._replace_brackets_by_loaded_parameter(parameters,
loaded_parameters))
# 3/ global variable with int after
parameters = {
'var1': '{{hello}}0'
}
sl.settings.variables = {
"hello": 60,
"hello2": "test2",
}
expected_parameters = {
'var1': '600'
}
self.assertEqual(expected_parameters, NeuronLauncher._replace_brackets_by_loaded_parameter(parameters,
loaded_parameters))
# 4/ multiple global variables
parameters = {
'var1': '{{hello}} {{me}}'
}
sl.settings.variables = {
"hello": "hello",
"me": "LaMonf"
}
expected_parameters = {
'var1': 'hello LaMonf'
}
self.assertEqual(expected_parameters, NeuronLauncher._replace_brackets_by_loaded_parameter(parameters,
loaded_parameters))
# 5/ parameter value is a list
parameters = {
'var1': '[hello {{name}}, bonjour {{name}}]'
}
sl.settings.variables = {
"name": "LaMonf",
"hello2": "test2",
}
expected_parameters = {
'var1': '[hello LaMonf, bonjour LaMonf]'
}
self.assertEqual(expected_parameters, NeuronLauncher._replace_brackets_by_loaded_parameter(parameters,
loaded_parameters))
# 6/ parameter is a dict
parameters = {'random_dict': [{'synapse': 'synapse2', 'answers': ['absolument', '{{ name }}']},
{'synapse': 'synapse3', 'answers': ['{{ name }}']}], 'default': 'synapse4'}
sl.settings.variables = {
"name": "nico"
}
expected_parameters = {
'random_dict': [
{'synapse': 'synapse2', 'answers': ['absolument', 'nico']},
{'synapse': 'synapse3', 'answers': ['nico']}], 'default': 'synapse4'
}
self.assertEqual(expected_parameters, NeuronLauncher._replace_brackets_by_loaded_parameter(parameters,
loaded_parameters))
# 7/ parameter is a dict with a restricted word
parameters = {'from_answer_link': [{'synapse': 'synapse2', 'answers': ['absolument', '{{ name }}']},
{'synapse': 'synapse3', 'answers': ['{{ name }}']}], 'default': 'synapse4'}
sl.settings.variables = {
"name": "nico"
}
expected_parameters = {
'from_answer_link': [
{'synapse': 'synapse2', 'answers': ['absolument', '{{ name }}']},
{'synapse': 'synapse3', 'answers': ['{{ name }}']}], 'default': 'synapse4'
}
self.assertEqual(expected_parameters, NeuronLauncher._replace_brackets_by_loaded_parameter(parameters,
loaded_parameters))
def test_parameters_are_available_in_loaded_parameters(self):
# the parameter in bracket is available in the dict
string_parameters = "this is a {{ parameter1 }}"
loaded_parameters = {"parameter1": "value"}
self.assertTrue(NeuronLauncher._neuron_parameters_are_available_in_loaded_parameters(string_parameters,
loaded_parameters))
# the parameter in bracket is NOT available in the dict
string_parameters = "this is a {{ parameter1 }}"
loaded_parameters = {"parameter2": "value"}
self.assertFalse(NeuronLauncher._neuron_parameters_are_available_in_loaded_parameters(string_parameters,
loaded_parameters))
# the string_parameters doesn't contains bracket in bracket is available in the dict
string_parameters = "this is a {{ parameter1 }}"
loaded_parameters = {"parameter1": "value"}
self.assertTrue(NeuronLauncher._neuron_parameters_are_available_in_loaded_parameters(string_parameters,
loaded_parameters))
# the string_parameters contains 2 parameters available in the dict
string_parameters = "this is a {{ parameter1 }} and this is {{ parameter2 }}"
loaded_parameters = {"parameter1": "value", "parameter2": "other value"}
self.assertTrue(NeuronLauncher._neuron_parameters_are_available_in_loaded_parameters(string_parameters,
loaded_parameters))
# the string_parameters contains 2 parameters and one of them is not available in the dict
string_parameters = "this is a {{ parameter1 }} and this is {{ parameter2 }}"
loaded_parameters = {"parameter1": "value", "parameter3": "other value"}
self.assertFalse(NeuronLauncher._neuron_parameters_are_available_in_loaded_parameters(string_parameters,
loaded_parameters))
if __name__ == '__main__':
unittest.main()
# suite = unittest.TestSuite()
# suite.addTest(TestNeuronLauncher("test_replace_brackets_by_loaded_parameter"))
# runner = unittest.TextTestRunner()
# runner.run(suite)
|
import time
from six.moves import xrange
import arctic._compression as aclz4
from arctic import Arctic
from arctic.async import ASYNC_ARCTIC, async_arctic_submit, async_wait_requests
from tests.integration.chunkstore.test_utils import create_test_data
a = Arctic('localhost:27017')
library_name = 'asyncbench.test'
TEST_DATA_CACHE = {}
def get_cached_random_df(num_chunks):
if num_chunks < 1:
raise ValueError("num_chunks must be > 1")
if num_chunks not in TEST_DATA_CACHE:
TEST_DATA_CACHE[num_chunks] = get_random_df(num_chunks)
return TEST_DATA_CACHE[num_chunks]
def get_random_df(num_chunks):
num_chunks = num_chunks
data_to_write = create_test_data(size=25000, index=True, multiindex=False, random_data=True, random_ids=True,
use_hours=True, date_offset=0, cols=10)
data_to_write = data_to_write.append([data_to_write] * (num_chunks - 1))
return data_to_write
def get_stats(measurements):
import numpy as np
mean = np.mean(measurements)
stdev = np.std(measurements)
min = np.min(measurements)
max = np.max(measurements)
return mean, stdev, min, max
def clean_lib():
a.delete_library(library_name)
a.initialize_library(library_name)
def async_bench(num_requests, num_chunks):
data = get_cached_random_df(num_chunks)
lib = a[library_name]
requests = [async_arctic_submit(lib, lib.write, True, symbol='sym_{}'.format(x), data=data)
for x in xrange(num_requests)]
async_wait_requests(requests, do_raise=True)
def serial_bench(num_requests, num_chunks):
data = get_cached_random_df(num_chunks)
lib = a[library_name]
for x in xrange(num_requests):
lib.write(symbol='sym_{}'.format(x), data=data)
def run_scenario(result_text, rounds, num_requests, num_chunks, parallel_lz4,
use_async, async_arctic_pool_workers=None):
aclz4.enable_parallel_lz4(parallel_lz4)
if async_arctic_pool_workers is not None:
ASYNC_ARCTIC.reset(pool_size=int(async_arctic_pool_workers), timeout=10)
measurements = []
for curr_round in xrange(rounds):
# print("Running round {}".format(curr_round))
clean_lib()
start = time.time()
if use_async:
async_bench(num_requests, num_chunks)
else:
serial_bench(num_requests, num_chunks)
measurements.append(time.time() - start)
print("{}: async={}, chunks/write={}, writes/round={}, rounds={}, "
"parallel_lz4={}, async_arctic_pool_workers={}: {}".format(
result_text, use_async, num_chunks, num_requests, rounds, parallel_lz4, async_arctic_pool_workers,
["{:.3f}".format(x) for x in get_stats(measurements[1:] if len(measurements) > 1 else measurements)]))
def main():
n_use_async = (False, True)
n_rounds = (1,)
n_num_requests = (8,)
n_num_chunks = (4,)
n_parallel_lz4 = (False,)
n_async_arctic_pool_workers = (2, 4, 8)
for num_chunks in n_num_chunks:
for use_async in n_use_async:
for async_arctic_pool_workers in (n_async_arctic_pool_workers if use_async else (4,)):
for parallel_lz4 in n_parallel_lz4:
for num_requests in n_num_requests:
for rounds in n_rounds:
run_scenario(
result_text="Experiment results",
use_async=use_async,
rounds=rounds,
num_requests=num_requests,
num_chunks=num_chunks,
parallel_lz4=parallel_lz4,
async_arctic_pool_workers=async_arctic_pool_workers)
if __name__ == '__main__':
main()
# Experiment results: async=False, chunks/write=2, writes/round=64, rounds=2, parallel_lz4=False, async_arctic_pool_workers=4: ['10.109', '0.000', '10.109', '10.109']
# Experiment results: async=True, chunks/write=2, writes/round=64, rounds=2, parallel_lz4=False, async_arctic_pool_workers=2: ['7.169', '0.000', '7.169', '7.169']
# Experiment results: async=True, chunks/write=2, writes/round=64, rounds=2, parallel_lz4=False, async_arctic_pool_workers=4: ['5.327', '0.000', '5.327', '5.327']
# Experiment results: async=True, chunks/write=2, writes/round=64, rounds=2, parallel_lz4=False, async_arctic_pool_workers=8: ['5.410', '0.000', '5.410', '5.410']
|
import json
import voluptuous as vol
from homeassistant.components import mqtt
from homeassistant.const import CONF_PAYLOAD, CONF_PLATFORM
from homeassistant.core import HassJob, callback
import homeassistant.helpers.config_validation as cv
# mypy: allow-untyped-defs
CONF_ENCODING = "encoding"
CONF_QOS = "qos"
CONF_TOPIC = "topic"
DEFAULT_ENCODING = "utf-8"
DEFAULT_QOS = 0
TRIGGER_SCHEMA = vol.Schema(
{
vol.Required(CONF_PLATFORM): mqtt.DOMAIN,
vol.Required(CONF_TOPIC): mqtt.util.valid_subscribe_topic,
vol.Optional(CONF_PAYLOAD): cv.string,
vol.Optional(CONF_ENCODING, default=DEFAULT_ENCODING): cv.string,
vol.Optional(CONF_QOS, default=DEFAULT_QOS): vol.All(
vol.Coerce(int), vol.In([0, 1, 2])
),
}
)
async def async_attach_trigger(hass, config, action, automation_info):
"""Listen for state changes based on configuration."""
topic = config[CONF_TOPIC]
payload = config.get(CONF_PAYLOAD)
encoding = config[CONF_ENCODING] or None
qos = config[CONF_QOS]
job = HassJob(action)
@callback
def mqtt_automation_listener(mqttmsg):
"""Listen for MQTT messages."""
if payload is None or payload == mqttmsg.payload:
data = {
"platform": "mqtt",
"topic": mqttmsg.topic,
"payload": mqttmsg.payload,
"qos": mqttmsg.qos,
"description": f"mqtt topic {mqttmsg.topic}",
}
try:
data["payload_json"] = json.loads(mqttmsg.payload)
except ValueError:
pass
hass.async_run_hass_job(job, {"trigger": data})
remove = await mqtt.async_subscribe(
hass, topic, mqtt_automation_listener, encoding=encoding, qos=qos
)
return remove
|
from __future__ import print_function
import argparse
import os
import re
import sys
import six
from Crypto.Hash import SHA256
def get_hash(fileobj):
h = SHA256.new()
chunk_size = 8192
while True:
chunk = fileobj.read(chunk_size)
if len(chunk) == 0:
break
h.update(chunk)
return h.hexdigest()
def check_list(fileobj):
correct = True
for line in fileobj:
match = re.match(r'(\w+)[ \t]+(.+)', line)
try:
with open(match.group(2), 'rb') as f1:
if match.group(1) == get_hash(f1):
print(match.group(2) + ': Pass')
else:
print(match.group(2) + ': Fail')
correct = False
except:
print('Invalid format.')
correct = False
return correct
def make_file(txt):
f = six.BytesIO()
if isinstance(txt, six.binary_type):
f.write(txt)
else:
f.write(txt.encode("utf-8"))
f.seek(0)
return f
ap = argparse.ArgumentParser()
ap.add_argument(
'-c',
'--check',
action='store_true',
default=False,
help='''Check a file with sha256 hashes and file names for a match. format: hash filename'''
)
ap.add_argument('file', action='store', nargs='*', help='String or file to hash.')
args = ap.parse_args(sys.argv[1:])
if args.check:
if args.file:
s = True
for arg in args.file:
if os.path.isfile(arg):
s = s and check_list(open(arg))
else:
s = check_list(make_file(sys.stdin.read()))
if s:
sys.exit(0)
else:
sys.exit(1)
else:
if args.file:
for arg in args.file:
if os.path.isfile(arg):
with open(arg, 'rb') as f:
print(get_hash(f) + ' ' + arg)
elif arg == "-":
print(get_hash(make_file(sys.stdin.read())))
else:
print(get_hash(make_file(arg)))
else:
print(get_hash(make_file(sys.stdin.read())))
|
import pandas as pd
from arctic.store.versioned_item import VersionedItem
def test_versioned_item_str():
item = VersionedItem(symbol="sym",
library="ONEMINUTE",
data=pd.DataFrame(),
version=1.0,
host='myhost',
metadata={'metadata': 'foo'})
expected = "VersionedItem(symbol=sym,library=ONEMINUTE," + \
"data=<class 'pandas.core.frame.DataFrame'>,version=1.0,metadata={'metadata': 'foo'},host=myhost)"
assert str(item) == expected
assert repr(item) == expected
def test_versioned_item_default_host():
item = VersionedItem(symbol="sym",
library="ONEMINUTE",
data=[1, 2, 3],
version=1.0,
metadata={'metadata': 'foo'})
expected_item = VersionedItem(symbol="sym",
library="ONEMINUTE",
data=[1, 2, 3],
version=1.0,
host=None,
metadata={'metadata': 'foo'})
assert item == expected_item
def test_versioned_item_str_handles_none():
item = VersionedItem(symbol=None,
library=None,
data=None,
version=None,
metadata=None,
host=None)
assert str(item)
def test_versioned_item_metadata_dict():
item = VersionedItem(symbol="test",
library="test_lib",
data=None,
version=1.2,
metadata=None,
host=None)
assert(item.metadata_dict() == {'symbol': 'test', 'library': 'test_lib', 'version': 1.2})
|
import voluptuous as vol
from homeassistant.components.http import HomeAssistantView
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_ICON,
ATTR_LOCATION,
ATTR_STATE,
ATTR_UNIT_OF_MEASUREMENT,
CONF_ADDRESS,
CONF_EMAIL,
CONF_ENTITY_ID,
CONF_SENSORS,
CONF_STATE,
CONF_URL,
)
import homeassistant.core as ha
import homeassistant.helpers.config_validation as cv
import homeassistant.util.dt as dt_util
ATTR_ADDRESS = "address"
ATTR_SPACEFED = "spacefed"
ATTR_CAM = "cam"
ATTR_STREAM = "stream"
ATTR_FEEDS = "feeds"
ATTR_CACHE = "cache"
ATTR_PROJECTS = "projects"
ATTR_RADIO_SHOW = "radio_show"
ATTR_LAT = "lat"
ATTR_LON = "lon"
ATTR_API = "api"
ATTR_CLOSE = "close"
ATTR_CONTACT = "contact"
ATTR_ISSUE_REPORT_CHANNELS = "issue_report_channels"
ATTR_LASTCHANGE = "lastchange"
ATTR_LOGO = "logo"
ATTR_NAME = "name"
ATTR_OPEN = "open"
ATTR_SENSORS = "sensors"
ATTR_SPACE = "space"
ATTR_UNIT = "unit"
ATTR_URL = "url"
ATTR_VALUE = "value"
ATTR_SENSOR_LOCATION = "location"
CONF_CONTACT = "contact"
CONF_HUMIDITY = "humidity"
CONF_ICON_CLOSED = "icon_closed"
CONF_ICON_OPEN = "icon_open"
CONF_ICONS = "icons"
CONF_IRC = "irc"
CONF_ISSUE_REPORT_CHANNELS = "issue_report_channels"
CONF_LOCATION = "location"
CONF_SPACEFED = "spacefed"
CONF_SPACENET = "spacenet"
CONF_SPACESAML = "spacesaml"
CONF_SPACEPHONE = "spacephone"
CONF_CAM = "cam"
CONF_STREAM = "stream"
CONF_M4 = "m4"
CONF_MJPEG = "mjpeg"
CONF_USTREAM = "ustream"
CONF_FEEDS = "feeds"
CONF_FEED_BLOG = "blog"
CONF_FEED_WIKI = "wiki"
CONF_FEED_CALENDAR = "calendar"
CONF_FEED_FLICKER = "flicker"
CONF_FEED_TYPE = "type"
CONF_FEED_URL = "url"
CONF_CACHE = "cache"
CONF_CACHE_SCHEDULE = "schedule"
CONF_PROJECTS = "projects"
CONF_RADIO_SHOW = "radio_show"
CONF_RADIO_SHOW_NAME = "name"
CONF_RADIO_SHOW_URL = "url"
CONF_RADIO_SHOW_TYPE = "type"
CONF_RADIO_SHOW_START = "start"
CONF_RADIO_SHOW_END = "end"
CONF_LOGO = "logo"
CONF_PHONE = "phone"
CONF_SIP = "sip"
CONF_KEYMASTERS = "keymasters"
CONF_KEYMASTER_NAME = "name"
CONF_KEYMASTER_IRC_NICK = "irc_nick"
CONF_KEYMASTER_PHONE = "phone"
CONF_KEYMASTER_EMAIL = "email"
CONF_KEYMASTER_TWITTER = "twitter"
CONF_TWITTER = "twitter"
CONF_FACEBOOK = "facebook"
CONF_IDENTICA = "identica"
CONF_FOURSQUARE = "foursquare"
CONF_ML = "ml"
CONF_JABBER = "jabber"
CONF_ISSUE_MAIL = "issue_mail"
CONF_SPACE = "space"
CONF_TEMPERATURE = "temperature"
DATA_SPACEAPI = "data_spaceapi"
DOMAIN = "spaceapi"
ISSUE_REPORT_CHANNELS = [CONF_EMAIL, CONF_ISSUE_MAIL, CONF_ML, CONF_TWITTER]
SENSOR_TYPES = [CONF_HUMIDITY, CONF_TEMPERATURE]
SPACEAPI_VERSION = "0.13"
URL_API_SPACEAPI = "/api/spaceapi"
LOCATION_SCHEMA = vol.Schema({vol.Optional(CONF_ADDRESS): cv.string})
SPACEFED_SCHEMA = vol.Schema(
{
vol.Optional(CONF_SPACENET): cv.boolean,
vol.Optional(CONF_SPACESAML): cv.boolean,
vol.Optional(CONF_SPACEPHONE): cv.boolean,
}
)
STREAM_SCHEMA = vol.Schema(
{
vol.Optional(CONF_M4): cv.url,
vol.Optional(CONF_MJPEG): cv.url,
vol.Optional(CONF_USTREAM): cv.url,
}
)
FEED_SCHEMA = vol.Schema(
{vol.Optional(CONF_FEED_TYPE): cv.string, vol.Required(CONF_FEED_URL): cv.url}
)
FEEDS_SCHEMA = vol.Schema(
{
vol.Optional(CONF_FEED_BLOG): FEED_SCHEMA,
vol.Optional(CONF_FEED_WIKI): FEED_SCHEMA,
vol.Optional(CONF_FEED_CALENDAR): FEED_SCHEMA,
vol.Optional(CONF_FEED_FLICKER): FEED_SCHEMA,
}
)
CACHE_SCHEMA = vol.Schema(
{
vol.Required(CONF_CACHE_SCHEDULE): cv.matches_regex(
r"(m.02|m.05|m.10|m.15|m.30|h.01|h.02|h.04|h.08|h.12|d.01)"
)
}
)
RADIO_SHOW_SCHEMA = vol.Schema(
{
vol.Required(CONF_RADIO_SHOW_NAME): cv.string,
vol.Required(CONF_RADIO_SHOW_URL): cv.url,
vol.Required(CONF_RADIO_SHOW_TYPE): cv.matches_regex(r"(mp3|ogg)"),
vol.Required(CONF_RADIO_SHOW_START): cv.string,
vol.Required(CONF_RADIO_SHOW_END): cv.string,
}
)
KEYMASTER_SCHEMA = vol.Schema(
{
vol.Optional(CONF_KEYMASTER_NAME): cv.string,
vol.Optional(CONF_KEYMASTER_IRC_NICK): cv.string,
vol.Optional(CONF_KEYMASTER_PHONE): cv.string,
vol.Optional(CONF_KEYMASTER_EMAIL): cv.string,
vol.Optional(CONF_KEYMASTER_TWITTER): cv.string,
}
)
CONTACT_SCHEMA = vol.Schema(
{
vol.Optional(CONF_EMAIL): cv.string,
vol.Optional(CONF_IRC): cv.string,
vol.Optional(CONF_ML): cv.string,
vol.Optional(CONF_PHONE): cv.string,
vol.Optional(CONF_TWITTER): cv.string,
vol.Optional(CONF_SIP): cv.string,
vol.Optional(CONF_FACEBOOK): cv.string,
vol.Optional(CONF_IDENTICA): cv.string,
vol.Optional(CONF_FOURSQUARE): cv.string,
vol.Optional(CONF_JABBER): cv.string,
vol.Optional(CONF_ISSUE_MAIL): cv.string,
vol.Optional(CONF_KEYMASTERS): vol.All(
cv.ensure_list, [KEYMASTER_SCHEMA], vol.Length(min=1)
),
},
required=False,
)
STATE_SCHEMA = vol.Schema(
{
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Inclusive(CONF_ICON_CLOSED, CONF_ICONS): cv.url,
vol.Inclusive(CONF_ICON_OPEN, CONF_ICONS): cv.url,
},
required=False,
)
SENSOR_SCHEMA = vol.Schema(
{vol.In(SENSOR_TYPES): [cv.entity_id], cv.string: [cv.entity_id]}
)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_CONTACT): CONTACT_SCHEMA,
vol.Required(CONF_ISSUE_REPORT_CHANNELS): vol.All(
cv.ensure_list, [vol.In(ISSUE_REPORT_CHANNELS)]
),
vol.Optional(CONF_LOCATION): LOCATION_SCHEMA,
vol.Required(CONF_LOGO): cv.url,
vol.Required(CONF_SPACE): cv.string,
vol.Required(CONF_STATE): STATE_SCHEMA,
vol.Required(CONF_URL): cv.string,
vol.Optional(CONF_SENSORS): SENSOR_SCHEMA,
vol.Optional(CONF_SPACEFED): SPACEFED_SCHEMA,
vol.Optional(CONF_CAM): vol.All(
cv.ensure_list, [cv.url], vol.Length(min=1)
),
vol.Optional(CONF_STREAM): STREAM_SCHEMA,
vol.Optional(CONF_FEEDS): FEEDS_SCHEMA,
vol.Optional(CONF_CACHE): CACHE_SCHEMA,
vol.Optional(CONF_PROJECTS): vol.All(cv.ensure_list, [cv.url]),
vol.Optional(CONF_RADIO_SHOW): vol.All(
cv.ensure_list, [RADIO_SHOW_SCHEMA]
),
}
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Register the SpaceAPI with the HTTP interface."""
hass.data[DATA_SPACEAPI] = config[DOMAIN]
hass.http.register_view(APISpaceApiView)
return True
class APISpaceApiView(HomeAssistantView):
"""View to provide details according to the SpaceAPI."""
url = URL_API_SPACEAPI
name = "api:spaceapi"
@staticmethod
def get_sensor_data(hass, spaceapi, sensor):
"""Get data from a sensor."""
sensor_state = hass.states.get(sensor)
if not sensor_state:
return None
sensor_data = {ATTR_NAME: sensor_state.name, ATTR_VALUE: sensor_state.state}
if ATTR_SENSOR_LOCATION in sensor_state.attributes:
sensor_data[ATTR_LOCATION] = sensor_state.attributes[ATTR_SENSOR_LOCATION]
else:
sensor_data[ATTR_LOCATION] = spaceapi[CONF_SPACE]
# Some sensors don't have a unit of measurement
if ATTR_UNIT_OF_MEASUREMENT in sensor_state.attributes:
sensor_data[ATTR_UNIT] = sensor_state.attributes[ATTR_UNIT_OF_MEASUREMENT]
return sensor_data
@ha.callback
def get(self, request):
"""Get SpaceAPI data."""
hass = request.app["hass"]
spaceapi = dict(hass.data[DATA_SPACEAPI])
is_sensors = spaceapi.get("sensors")
location = {ATTR_LAT: hass.config.latitude, ATTR_LON: hass.config.longitude}
try:
location[ATTR_ADDRESS] = spaceapi[ATTR_LOCATION][CONF_ADDRESS]
except KeyError:
pass
except TypeError:
pass
state_entity = spaceapi["state"][ATTR_ENTITY_ID]
space_state = hass.states.get(state_entity)
if space_state is not None:
state = {
ATTR_OPEN: space_state.state != "off",
ATTR_LASTCHANGE: dt_util.as_timestamp(space_state.last_updated),
}
else:
state = {ATTR_OPEN: "null", ATTR_LASTCHANGE: 0}
try:
state[ATTR_ICON] = {
ATTR_OPEN: spaceapi["state"][CONF_ICON_OPEN],
ATTR_CLOSE: spaceapi["state"][CONF_ICON_CLOSED],
}
except KeyError:
pass
data = {
ATTR_API: SPACEAPI_VERSION,
ATTR_CONTACT: spaceapi[CONF_CONTACT],
ATTR_ISSUE_REPORT_CHANNELS: spaceapi[CONF_ISSUE_REPORT_CHANNELS],
ATTR_LOCATION: location,
ATTR_LOGO: spaceapi[CONF_LOGO],
ATTR_SPACE: spaceapi[CONF_SPACE],
ATTR_STATE: state,
ATTR_URL: spaceapi[CONF_URL],
}
try:
data[ATTR_CAM] = spaceapi[CONF_CAM]
except KeyError:
pass
try:
data[ATTR_SPACEFED] = spaceapi[CONF_SPACEFED]
except KeyError:
pass
try:
data[ATTR_STREAM] = spaceapi[CONF_STREAM]
except KeyError:
pass
try:
data[ATTR_FEEDS] = spaceapi[CONF_FEEDS]
except KeyError:
pass
try:
data[ATTR_CACHE] = spaceapi[CONF_CACHE]
except KeyError:
pass
try:
data[ATTR_PROJECTS] = spaceapi[CONF_PROJECTS]
except KeyError:
pass
try:
data[ATTR_RADIO_SHOW] = spaceapi[CONF_RADIO_SHOW]
except KeyError:
pass
if is_sensors is not None:
sensors = {}
for sensor_type in is_sensors:
sensors[sensor_type] = []
for sensor in spaceapi["sensors"][sensor_type]:
sensor_data = self.get_sensor_data(hass, spaceapi, sensor)
sensors[sensor_type].append(sensor_data)
data[ATTR_SENSORS] = sensors
return self.json(data)
|
import coverage
class Plugin(coverage.CoveragePlugin):
"""A configuring plugin for testing."""
def configure(self, config):
"""Configure all the things!"""
opt_name = "report:exclude_lines"
exclude_lines = config.get_option(opt_name)
exclude_lines.append(r"pragma: custom")
exclude_lines.append(r"pragma: or whatever")
config.set_option(opt_name, exclude_lines)
def coverage_init(reg, options): # pylint: disable=unused-argument
"""Called by coverage to initialize the plugins here."""
reg.add_configurer(Plugin())
|
from kombu.pools import producers
from .queues import task_exchange
priority_to_routing_key = {
'high': 'hipri',
'mid': 'midpri',
'low': 'lopri',
}
def send_as_task(connection, fun, args=(), kwargs={}, priority='mid'):
payload = {'fun': fun, 'args': args, 'kwargs': kwargs}
routing_key = priority_to_routing_key[priority]
with producers[connection].acquire(block=True) as producer:
producer.publish(payload,
serializer='pickle',
compression='bzip2',
exchange=task_exchange,
declare=[task_exchange],
routing_key=routing_key)
if __name__ == '__main__':
from kombu import Connection
from .tasks import hello_task
connection = Connection('amqp://guest:guest@localhost:5672//')
send_as_task(connection, fun=hello_task, args=('Kombu',), kwargs={},
priority='high')
|
import inspect
import os.path
import coverage
from coverage import env
from coverage.context import qualname_from_frame
from coverage.data import CoverageData
from tests.coveragetest import CoverageTest
class StaticContextTest(CoverageTest):
"""Tests of the static context."""
def test_no_context(self):
self.make_file("main.py", "a = 1")
cov = coverage.Coverage()
self.start_import_stop(cov, "main")
data = cov.get_data()
self.assertCountEqual(data.measured_contexts(), [""])
def test_static_context(self):
self.make_file("main.py", "a = 1")
cov = coverage.Coverage(context="gooey")
self.start_import_stop(cov, "main")
data = cov.get_data()
self.assertCountEqual(data.measured_contexts(), ["gooey"])
SOURCE = """\
a = 1
if a > 2:
a = 3
assert a == 1
"""
LINES = [1, 2, 4]
ARCS = [(-1, 1), (1, 2), (2, 4), (4, -1)]
def run_red_blue(self, **options):
"""Run red.py and blue.py, and return their CoverageData objects."""
self.make_file("red.py", self.SOURCE)
red_cov = coverage.Coverage(context="red", data_suffix="r", source=["."], **options)
self.start_import_stop(red_cov, "red")
red_cov.save()
red_data = red_cov.get_data()
self.make_file("blue.py", self.SOURCE)
blue_cov = coverage.Coverage(context="blue", data_suffix="b", source=["."], **options)
self.start_import_stop(blue_cov, "blue")
blue_cov.save()
blue_data = blue_cov.get_data()
return red_data, blue_data
def test_combining_line_contexts(self):
red_data, blue_data = self.run_red_blue()
for datas in [[red_data, blue_data], [blue_data, red_data]]:
combined = CoverageData(suffix="combined")
for data in datas:
combined.update(data)
self.assertEqual(combined.measured_contexts(), {'red', 'blue'})
full_names = {os.path.basename(f): f for f in combined.measured_files()}
self.assertCountEqual(full_names, ['red.py', 'blue.py'])
fred = full_names['red.py']
fblue = full_names['blue.py']
def assert_combined_lines(filename, context, lines):
# pylint: disable=cell-var-from-loop
combined.set_query_context(context)
self.assertEqual(combined.lines(filename), lines)
assert_combined_lines(fred, 'red', self.LINES)
assert_combined_lines(fred, 'blue', [])
assert_combined_lines(fblue, 'red', [])
assert_combined_lines(fblue, 'blue', self.LINES)
def test_combining_arc_contexts(self):
red_data, blue_data = self.run_red_blue(branch=True)
for datas in [[red_data, blue_data], [blue_data, red_data]]:
combined = CoverageData(suffix="combined")
for data in datas:
combined.update(data)
self.assertEqual(combined.measured_contexts(), {'red', 'blue'})
full_names = {os.path.basename(f): f for f in combined.measured_files()}
self.assertCountEqual(full_names, ['red.py', 'blue.py'])
fred = full_names['red.py']
fblue = full_names['blue.py']
def assert_combined_lines(filename, context, lines):
# pylint: disable=cell-var-from-loop
combined.set_query_context(context)
self.assertEqual(combined.lines(filename), lines)
assert_combined_lines(fred, 'red', self.LINES)
assert_combined_lines(fred, 'blue', [])
assert_combined_lines(fblue, 'red', [])
assert_combined_lines(fblue, 'blue', self.LINES)
def assert_combined_arcs(filename, context, lines):
# pylint: disable=cell-var-from-loop
combined.set_query_context(context)
self.assertEqual(combined.arcs(filename), lines)
assert_combined_arcs(fred, 'red', self.ARCS)
assert_combined_arcs(fred, 'blue', [])
assert_combined_arcs(fblue, 'red', [])
assert_combined_arcs(fblue, 'blue', self.ARCS)
class DynamicContextTest(CoverageTest):
"""Tests of dynamically changing contexts."""
SOURCE = """\
def helper(lineno):
x = 2
def test_one():
a = 5
helper(6)
def test_two():
a = 9
b = 10
if a > 11:
b = 12
assert a == (13-4)
assert b == (14-4)
helper(15)
test_one()
x = 18
helper(19)
test_two()
"""
OUTER_LINES = [1, 4, 8, 17, 18, 19, 2, 20]
TEST_ONE_LINES = [5, 6, 2]
TEST_TWO_LINES = [9, 10, 11, 13, 14, 15, 2]
def test_dynamic_alone(self):
self.make_file("two_tests.py", self.SOURCE)
cov = coverage.Coverage(source=["."])
cov.set_option("run:dynamic_context", "test_function")
self.start_import_stop(cov, "two_tests")
data = cov.get_data()
full_names = {os.path.basename(f): f for f in data.measured_files()}
fname = full_names["two_tests.py"]
self.assertCountEqual(
data.measured_contexts(),
["", "two_tests.test_one", "two_tests.test_two"])
def assert_context_lines(context, lines):
data.set_query_context(context)
self.assertCountEqual(lines, data.lines(fname))
assert_context_lines("", self.OUTER_LINES)
assert_context_lines("two_tests.test_one", self.TEST_ONE_LINES)
assert_context_lines("two_tests.test_two", self.TEST_TWO_LINES)
def test_static_and_dynamic(self):
self.make_file("two_tests.py", self.SOURCE)
cov = coverage.Coverage(context="stat", source=["."])
cov.set_option("run:dynamic_context", "test_function")
self.start_import_stop(cov, "two_tests")
data = cov.get_data()
full_names = {os.path.basename(f): f for f in data.measured_files()}
fname = full_names["two_tests.py"]
self.assertCountEqual(
data.measured_contexts(),
["stat", "stat|two_tests.test_one", "stat|two_tests.test_two"])
def assert_context_lines(context, lines):
data.set_query_context(context)
self.assertCountEqual(lines, data.lines(fname))
assert_context_lines("stat", self.OUTER_LINES)
assert_context_lines("stat|two_tests.test_one", self.TEST_ONE_LINES)
assert_context_lines("stat|two_tests.test_two", self.TEST_TWO_LINES)
def get_qualname():
"""Helper to return qualname_from_frame for the caller."""
stack = inspect.stack()[1:]
if any(sinfo[0].f_code.co_name == "get_qualname" for sinfo in stack):
# We're calling outselves recursively, maybe because we're testing
# properties. Return an int to try to get back on track.
return 17
caller_frame = stack[0][0]
return qualname_from_frame(caller_frame)
# pylint: disable=missing-class-docstring, missing-function-docstring, unused-argument
class Parent(object):
def meth(self):
return get_qualname()
@property
def a_property(self):
return get_qualname()
class Child(Parent):
pass
class SomethingElse(object):
pass
class MultiChild(SomethingElse, Child):
pass
def no_arguments():
return get_qualname()
def plain_old_function(a, b):
return get_qualname()
def fake_out(self):
return get_qualname()
def patch_meth(self):
return get_qualname()
class OldStyle:
def meth(self):
return get_qualname()
class OldChild(OldStyle):
pass
# pylint: enable=missing-class-docstring, missing-function-docstring, unused-argument
class QualnameTest(CoverageTest):
"""Tests of qualname_from_frame."""
# Pylint gets confused about meth() below.
# pylint: disable=no-value-for-parameter
run_in_temp_dir = False
def test_method(self):
self.assertEqual(Parent().meth(), "tests.test_context.Parent.meth")
def test_inherited_method(self):
self.assertEqual(Child().meth(), "tests.test_context.Parent.meth")
def test_mi_inherited_method(self):
self.assertEqual(MultiChild().meth(), "tests.test_context.Parent.meth")
def test_no_arguments(self):
self.assertEqual(no_arguments(), "tests.test_context.no_arguments")
def test_plain_old_function(self):
self.assertEqual(
plain_old_function(0, 1), "tests.test_context.plain_old_function")
def test_fake_out(self):
self.assertEqual(fake_out(0), "tests.test_context.fake_out")
def test_property(self):
self.assertEqual(
Parent().a_property, "tests.test_context.Parent.a_property")
def test_changeling(self):
c = Child()
c.meth = patch_meth
self.assertEqual(c.meth(c), "tests.test_context.patch_meth")
def test_oldstyle(self):
if not env.PY2:
self.skipTest("Old-style classes are only in Python 2")
self.assertEqual(OldStyle().meth(), "tests.test_context.OldStyle.meth")
self.assertEqual(OldChild().meth(), "tests.test_context.OldStyle.meth")
def test_bug_829(self):
# A class with a name like a function shouldn't confuse qualname_from_frame.
class test_something(object): # pylint: disable=unused-variable
self.assertEqual(get_qualname(), None)
|
from __future__ import with_statement
import os
import sys
import logging
import threading
import tempfile
import argparse
try:
import Queue
except ImportError:
import queue as Queue
import Pyro4
from gensim.models import ldamodel
from gensim import utils
logger = logging.getLogger('gensim.models.lda_worker')
# periodically save intermediate models after every SAVE_DEBUG updates (0 for never)
SAVE_DEBUG = 0
LDA_WORKER_PREFIX = 'gensim.lda_worker'
class Worker:
"""Used as a Pyro4 class with exposed methods.
Exposes every non-private method and property of the class automatically to be available for remote access.
"""
def __init__(self):
"""Partly initialize the model."""
self.model = None
@Pyro4.expose
def initialize(self, myid, dispatcher, **model_params):
"""Fully initialize the worker.
Parameters
----------
myid : int
An ID number used to identify this worker in the dispatcher object.
dispatcher : :class:`~gensim.models.lda_dispatcher.Dispatcher`
The dispatcher responsible for scheduling this worker.
**model_params
Keyword parameters to initialize the inner LDA model,see :class:`~gensim.models.ldamodel.LdaModel`.
"""
self.lock_update = threading.Lock()
self.jobsdone = 0 # how many jobs has this worker completed?
# id of this worker in the dispatcher; just a convenience var for easy access/logging TODO remove?
self.myid = myid
self.dispatcher = dispatcher
self.finished = False
logger.info("initializing worker #%s", myid)
self.model = ldamodel.LdaModel(**model_params)
@Pyro4.expose
@Pyro4.oneway
def requestjob(self):
"""Request jobs from the dispatcher, in a perpetual loop until :meth:`gensim.models.lda_worker.Worker.getstate`
is called.
Raises
------
RuntimeError
If `self.model` is None (i.e. worker non initialized).
"""
if self.model is None:
raise RuntimeError("worker must be initialized before receiving jobs")
job = None
while job is None and not self.finished:
try:
job = self.dispatcher.getjob(self.myid)
except Queue.Empty:
# no new job: try again, unless we're finished with all work
continue
if job is not None:
logger.info("worker #%s received job #%i", self.myid, self.jobsdone)
self.processjob(job)
self.dispatcher.jobdone(self.myid)
else:
logger.info("worker #%i stopping asking for jobs", self.myid)
@utils.synchronous('lock_update')
def processjob(self, job):
"""Incrementally process the job and potentially logs progress.
Parameters
----------
job : iterable of list of (int, float)
Corpus in BoW format.
"""
logger.debug("starting to process job #%i", self.jobsdone)
self.model.do_estep(job)
self.jobsdone += 1
if SAVE_DEBUG and self.jobsdone % SAVE_DEBUG == 0:
fname = os.path.join(tempfile.gettempdir(), 'lda_worker.pkl')
self.model.save(fname)
logger.info("finished processing job #%i", self.jobsdone - 1)
@Pyro4.expose
def ping(self):
"""Test the connectivity with Worker."""
return True
@Pyro4.expose
@utils.synchronous('lock_update')
def getstate(self):
"""Log and get the LDA model's current state.
Returns
-------
result : :class:`~gensim.models.ldamodel.LdaState`
The current state.
"""
logger.info("worker #%i returning its state after %s jobs", self.myid, self.jobsdone)
result = self.model.state
assert isinstance(result, ldamodel.LdaState)
self.model.clear() # free up mem in-between two EM cycles
self.finished = True
return result
@Pyro4.expose
@utils.synchronous('lock_update')
def reset(self, state):
"""Reset the worker by setting sufficient stats to 0.
Parameters
----------
state : :class:`~gensim.models.ldamodel.LdaState`
Encapsulates information for distributed computation of LdaModel objects.
"""
assert state is not None
logger.info("resetting worker #%i", self.myid)
self.model.state = state
self.model.sync_state()
self.model.state.reset()
self.finished = False
@Pyro4.oneway
def exit(self):
"""Terminate the worker."""
logger.info("terminating worker #%i", self.myid)
os._exit(0)
def main():
parser = argparse.ArgumentParser(description=__doc__[:-130], formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument("--host", help="Nameserver hostname (default: %(default)s)", default=None)
parser.add_argument("--port", help="Nameserver port (default: %(default)s)", default=None, type=int)
parser.add_argument(
"--no-broadcast", help="Disable broadcast (default: %(default)s)", action='store_const',
default=True, const=False
)
parser.add_argument("--hmac", help="Nameserver hmac key (default: %(default)s)", default=None)
parser.add_argument(
'-v', '--verbose', help='Verbose flag', action='store_const', dest="loglevel",
const=logging.INFO, default=logging.WARNING
)
args = parser.parse_args()
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=args.loglevel)
logger.info("running %s", " ".join(sys.argv))
ns_conf = {
"broadcast": args.no_broadcast,
"host": args.host,
"port": args.port,
"hmac_key": args.hmac
}
utils.pyro_daemon(LDA_WORKER_PREFIX, Worker(), random_suffix=True, ns_conf=ns_conf)
logger.info("finished running %s", " ".join(sys.argv))
if __name__ == '__main__':
main()
|
import logging
import time
import threading as th
import multiprocessing as mp
from queue import Empty, Full
from ...stepper import StpdReader
logger = logging.getLogger(__name__)
class BFGBase(object):
"""
A BFG load generator that manages multiple workers as processes and
threads in each of them and feeds them with tasks
"""
def __init__(self, gun, instances, stpd_filename, cached_stpd=False,
green_threads_per_instance=None):
logger.info(
"""
BFG using stpd from {stpd_filename}
Instances: {instances}
Gun: {gun.__class__.__name__}
""".format(
stpd_filename=stpd_filename,
instances=instances,
gun=gun, ))
self.instances = int(instances)
self.instance_counter = mp.Value('i')
self.results = mp.Queue(16384)
self.gun = gun
self.gun.results = self.results
self.quit = mp.Event()
self.task_queue = mp.Queue(1024)
self.cached_stpd = cached_stpd
self.stpd_filename = stpd_filename
self.pool = [
mp.Process(target=self._worker) for _ in range(self.instances)
]
self.feeder = th.Thread(target=self._feed, name="Feeder")
self.feeder.daemon = True
self.workers_finished = False
self.start_time = None
self.plan = None
self.green_threads_per_instance = green_threads_per_instance
def start(self):
self.start_time = time.time()
for process in self.pool:
process.daemon = True
process.start()
self.feeder.start()
def running(self):
"""
True while there are alive workers out there. Tank
will quit when this would become False
"""
return not self.workers_finished
def stop(self):
"""
Say the workers to finish their jobs and quit.
"""
self.quit.set()
# yapf:disable
while sorted([
self.pool[i].is_alive()
for i in range(len(self.pool))])[-1]:
time.sleep(1)
# yapf:enable
try:
while not self.task_queue.empty():
self.task_queue.get(timeout=0.1)
self.task_queue.close()
self.feeder.join()
except Exception as ex:
logger.info(ex)
def _feed(self):
"""
A feeder that runs in distinct thread in main process.
"""
self.plan = StpdReader(self.stpd_filename)
if self.cached_stpd:
self.plan = list(self.plan)
for task in self.plan:
if self.quit.is_set():
logger.info("Stop feeding: gonna quit")
return
# try putting a task to a queue unless there is a quit flag
# or all workers have exited
while True:
try:
self.task_queue.put(task, timeout=1)
break
except Full:
if self.quit.is_set() or self.workers_finished:
return
else:
continue
workers_count = self.instances
logger.info(
"Feeded all data. Publishing %d killer tasks" % (workers_count))
retry_delay = 1
for _ in range(5):
try:
[
self.task_queue.put(None, timeout=1)
for _ in range(0, workers_count)
]
break
except Full:
logger.debug(
"Couldn't post killer tasks"
" because queue is full. Retrying in %ss", retry_delay)
time.sleep(retry_delay)
retry_delay *= 2
try:
logger.info("Waiting for workers")
for x in self.pool:
x.join()
logger.info("All workers exited.")
self.workers_finished = True
except (KeyboardInterrupt, SystemExit):
self.task_queue.close()
self.results.close()
self.quit.set()
logger.info("Going to quit. Waiting for workers")
for x in self.pool:
x.join()
self.workers_finished = True
class BFGMultiprocessing(BFGBase):
"""
Default worker type, creates process per worker,
every process executes requests synchronously inside.
"""
def _worker(self):
"""
A worker that does actual jobs
"""
logger.debug("Init shooter process")
try:
self.gun.setup()
except Exception:
logger.exception("Couldn't initialize gun. Exit shooter process")
return
while not self.quit.is_set():
try:
task = self.task_queue.get(timeout=1)
if not task:
logger.debug("Got killer task.")
break
timestamp, missile, marker = task
planned_time = self.start_time + (timestamp / 1000.0)
delay = planned_time - time.time()
if delay > 0:
time.sleep(delay)
try:
with self.instance_counter.get_lock():
self.instance_counter.value += 1
self.gun.shoot(missile.decode('utf8'), marker)
finally:
with self.instance_counter.get_lock():
self.instance_counter.value -= 1
except (KeyboardInterrupt, SystemExit):
break
except Empty:
if self.quit.is_set():
logger.debug("Empty queue. Exiting process")
return
except Full:
logger.warning("Couldn't put to result queue because it's full")
except Exception:
logger.exception("Bfg shoot exception")
try:
self.gun.teardown()
except Exception:
logger.exception("Couldn't finalize gun. Exit shooter process")
return
logger.debug("Exit shooter process")
class BFGGreen(BFGBase):
"""
Green version of the worker. Starts `self.instances` processes,
each of process has a pool of `self.green_threads_per_instance` green threads.
"""
def _worker(self):
from gevent import monkey, spawn
from gevent.queue import Queue as GreenQueue
# NOTE: Patching everything will conflict with multiprocessing
monkey.patch_all(thread=False, select=False)
logger.debug("Init shooter process")
try:
self.gun.setup()
except Exception:
logger.exception("Couldn't initialize gun. Exit shooter process")
return
self.green_queue = GreenQueue(self.green_threads_per_instance)
self.green_pool = [spawn(self._green_worker) for _ in range(0, self.green_threads_per_instance)]
# Keep track of tasks sent to greenlets. If all greenlets are busy -
# don't pull more tasks from the main queue, let other workers do that.
self._free_threads_count = self.green_threads_per_instance
while not self.quit.is_set():
while not self.task_queue.empty() and self._free_threads_count:
try:
task = self.task_queue.get_nowait()
except Empty:
continue
self._free_threads_count -= 1
if not task:
logger.debug("Got killer task.")
self.quit.set()
break
self.green_queue.put(task)
time.sleep(0.1)
for g in self.green_pool:
g.join()
try:
self.gun.teardown()
except Exception:
logger.exception("Couldn't finalize gun. Exit shooter process")
return
logger.debug("Exit shooter process")
def _green_worker(self):
"""
A worker that does actual jobs
"""
while not self.quit.is_set():
try:
task = self.green_queue.get(timeout=1)
timestamp, missile, marker = task
planned_time = self.start_time + (timestamp / 1000.0)
delay = planned_time - time.time()
if delay > 0:
time.sleep(delay)
try:
with self.instance_counter.get_lock():
self.instance_counter.value += 1
self.gun.shoot(missile.decode('utf8'), marker)
finally:
with self.instance_counter.get_lock():
self.instance_counter.value -= 1
self._free_threads_count += 1
except (KeyboardInterrupt, SystemExit):
break
except Empty:
continue
except Full:
logger.warning("Couldn't put to result queue because it's full")
except Exception:
logger.exception("Bfg shoot exception")
|
from functools import partial
from ...utils import verbose
from ..utils import (has_dataset, _data_path, _data_path_doc,
_get_version, _version_doc)
has_multimodal_data = partial(has_dataset, name='multimodal')
@verbose
def data_path(path=None, force_update=False, update_path=True, download=True,
verbose=None): # noqa: D103
return _data_path(path=path, force_update=force_update,
update_path=update_path, name='multimodal',
download=download)
data_path.__doc__ = _data_path_doc.format(name='multimodal',
conf='MNE_DATASETS_MULTIMODAL_PATH')
def get_version(): # noqa: D103
return _get_version('multimodal')
get_version.__doc__ = _version_doc.format(name='multimodal')
|
import pytest
import numpy as np
import os.path as op
from mne import create_info, EvokedArray, events_from_annotations, Epochs
from mne.channels import make_standard_montage
from mne.datasets.testing import data_path, _pytest_param
from mne.preprocessing.nirs import optical_density, beer_lambert_law
from mne.io import read_raw_nirx
@pytest.fixture()
def fnirs_evoked():
"""Create an fnirs evoked structure."""
montage = make_standard_montage('biosemi16')
ch_names = montage.ch_names
ch_types = ['eeg'] * 16
info = create_info(ch_names=ch_names, sfreq=20, ch_types=ch_types)
evoked_data = np.random.randn(16, 30)
evoked = EvokedArray(evoked_data, info=info, tmin=-0.2, nave=4)
evoked.set_montage(montage)
evoked.set_channel_types({'Fp1': 'hbo', 'Fp2': 'hbo', 'F4': 'hbo',
'Fz': 'hbo'}, verbose='error')
return evoked
@pytest.fixture(params=[_pytest_param()])
def fnirs_epochs():
"""Create an fnirs epoch structure."""
fname = op.join(data_path(download=False),
'NIRx', 'nirscout', 'nirx_15_2_recording_w_overlap')
raw_intensity = read_raw_nirx(fname, preload=False)
raw_od = optical_density(raw_intensity)
raw_haemo = beer_lambert_law(raw_od)
evts, _ = events_from_annotations(raw_haemo, event_id={'1.0': 1})
evts_dct = {'A': 1}
tn, tx = -1, 2
epochs = Epochs(raw_haemo, evts, event_id=evts_dct, tmin=tn, tmax=tx)
return epochs
|
import datetime
from django.test import SimpleTestCase, TestCase
from django.utils import timezone
from weblate.accounts.models import Profile
from weblate.lang.models import Language
from weblate.trans.models import Component, Project, Translation, Unit
from weblate.trans.templatetags.translations import get_location_links, naturaltime
TEST_DATA = (
(0, "now"),
(1, "a second from now"),
(-1, "a second ago"),
(2, "2 seconds from now"),
(-2, "2 seconds ago"),
(60, "a minute from now"),
(-60, "a minute ago"),
(120, "2 minutes from now"),
(-120, "2 minutes ago"),
(3600, "an hour from now"),
(-3600, "an hour ago"),
(3600 * 2, "2 hours from now"),
(-3600 * 2, "2 hours ago"),
(3600 * 24, "tomorrow"),
(-3600 * 24, "yesterday"),
(3600 * 24 * 2, "2 days from now"),
(-3600 * 24 * 2, "2 days ago"),
(3600 * 24 * 7, "a week from now"),
(-3600 * 24 * 7, "a week ago"),
(3600 * 24 * 14, "2 weeks from now"),
(-3600 * 24 * 14, "2 weeks ago"),
(3600 * 24 * 30, "a month from now"),
(-3600 * 24 * 30, "a month ago"),
(3600 * 24 * 60, "2 months from now"),
(-3600 * 24 * 60, "2 months ago"),
(3600 * 24 * 365, "a year from now"),
(-3600 * 24 * 365, "a year ago"),
(3600 * 24 * 365 * 2, "2 years from now"),
(-3600 * 24 * 365 * 2, "2 years ago"),
)
class NaturalTimeTest(SimpleTestCase):
"""Testing of natural time conversion."""
def test_natural(self):
now = timezone.now()
for diff, expected in TEST_DATA:
testdate = now + datetime.timedelta(seconds=diff)
result = naturaltime(testdate, now)
expected = '<span title="{}">{}</span>'.format(
testdate.replace(microsecond=0).isoformat(), expected
)
self.assertEqual(
expected,
result,
f'naturaltime({testdate}) "{result}" != "{expected}"',
)
class LocationLinksTest(TestCase):
def setUp(self):
self.unit = Unit(
translation=Translation(
component=Component(
project=Project(slug="p", name="p"),
source_language=Language(),
slug="c",
name="c",
),
language=Language(),
)
)
self.unit.source_unit = self.unit
self.profile = Profile()
def test_empty(self):
self.assertEqual(get_location_links(self.profile, self.unit), "")
def test_numeric(self):
self.unit.location = "123"
self.assertEqual(get_location_links(self.profile, self.unit), "string ID 123")
def test_filename(self):
self.unit.location = "f&oo.bar:123"
self.assertEqual(
get_location_links(self.profile, self.unit), "f&oo.bar:123"
)
def test_filenames(self):
self.unit.location = "foo.bar:123,bar.foo:321"
self.assertEqual(
get_location_links(self.profile, self.unit), "foo.bar:123\nbar.foo:321"
)
def test_repowebs(self):
self.unit.translation.component.repoweb = (
"http://example.net/{{filename}}#L{{line}}"
)
self.unit.location = "foo.bar:123,bar.foo:321"
self.assertHTMLEqual(
get_location_links(self.profile, self.unit),
"""
<a class="wrap-text"
href="http://example.net/foo.bar#L123" target="_blank"
dir="ltr" rel="noopener noreferrer">
foo.bar:123
</a>
<a class="wrap-text"
href="http://example.net/bar.foo#L321" target="_blank"
dir="ltr" rel="noopener noreferrer">
bar.foo:321
</a>
""",
)
def test_repoweb(self):
self.unit.translation.component.repoweb = (
"http://example.net/{{filename}}#L{{line}}"
)
self.unit.location = "foo.bar:123"
self.assertHTMLEqual(
get_location_links(self.profile, self.unit),
"""
<a class="wrap-text"
href="http://example.net/foo.bar#L123" target="_blank"
dir="ltr" rel="noopener noreferrer">
foo.bar:123
</a>
""",
)
def test_user_url(self):
self.unit.translation.component.repoweb = (
"http://example.net/{{filename}}#L{{line}}"
)
self.profile.editor_link = "editor://open/?file={{filename}}&line={{line}}"
self.unit.location = "foo.bar:123"
self.assertHTMLEqual(
get_location_links(self.profile, self.unit),
"""
<a class="wrap-text"
href="editor://open/?file=foo.bar&line=123" target="_blank"
dir="ltr" rel="noopener noreferrer">
foo.bar:123
</a>
""",
)
|
from Handler import Handler
import logging
import logging.handlers
class ArchiveHandler(Handler):
"""
Implements the Handler abstract class, archiving data to a log file
"""
def __init__(self, config):
"""
Create a new instance of the ArchiveHandler class
"""
# Initialize Handler
Handler.__init__(self, config)
# Create Archive Logger
self.archive = logging.getLogger('archive')
self.archive.setLevel(logging.DEBUG)
self.archive.propagate = self.config['propagate']
# Create Archive Log Formatter
formatter = logging.Formatter('%(message)s')
# Create Archive Log Handler
handler = logging.handlers.TimedRotatingFileHandler(
filename=self.config['log_file'],
when=self.config['when'],
interval=int(self.config['rollover_interval']),
backupCount=int(self.config['days']),
encoding=self.config['encoding']
)
handler.setFormatter(formatter)
handler.setLevel(logging.DEBUG)
self.archive.addHandler(handler)
def get_default_config_help(self):
"""
Returns the help text for the configuration options for this handler
"""
config = super(ArchiveHandler, self).get_default_config_help()
config.update({
'log_file': 'Path to the logfile',
'when': 'type of interval; S, M, H, D, Weekday, midnight',
'days': 'How many days to store',
'rollover_interval': 'rollover interval length',
'encoding': '',
'propagate': 'Pass handled metrics to configured root logger',
})
return config
def get_default_config(self):
"""
Return the default config for the handler
"""
config = super(ArchiveHandler, self).get_default_config()
config.update({
'log_file': '',
'when': 'midnight',
'days': 7,
'rollover_interval': 1,
'encoding': None,
'propagate': False,
})
return config
def process(self, metric):
"""
Send a Metric to the Archive.
"""
# Archive Metric
self.archive.info(str(metric).strip())
|
from __future__ import division
import logging
import os
import copy
import multiprocessing
from shutil import copyfile, rmtree
from gensim import utils
from gensim.models.keyedvectors import KeyedVectors
from gensim.scripts.glove2word2vec import glove2word2vec
logger = logging.getLogger(__name__)
class Wordrank(KeyedVectors):
"""Python wrapper using `Wordrank implementation <https://bitbucket.org/shihaoji/wordrank/>`_
Communication between Wordrank and Python takes place by working with data
files on disk and calling the Wordrank binary and glove's helper binaries
(for preparing training data) with subprocess module.
Warnings
--------
This is **only** python wrapper for `Wordrank implementation <https://bitbucket.org/shihaoji/wordrank/>`_,
you need to install original implementation first and pass the path to wordrank dir to ``wr_path``.
"""
@classmethod
def train(cls, wr_path, corpus_file, out_name, size=100, window=15, symmetric=1, min_count=5, max_vocab_size=0,
sgd_num=100, lrate=0.001, period=10, iter=90, epsilon=0.75, dump_period=10, reg=0, alpha=100,
beta=99, loss='hinge', memory=4.0, np=1, cleanup_files=False, sorted_vocab=1, ensemble=0):
"""Train model.
Parameters
----------
wr_path : str
Absolute path to the Wordrank directory.
corpus_file : str
Path to corpus file, expected space-separated tokens in a each line format.
out_name : str
Name of the directory which will be created (in wordrank folder) to save embeddings and training data:
* ``model_word_current_<iter>.txt`` - Word Embeddings saved after every dump_period.
* ``model_context_current_<iter>.txt`` - Context Embeddings saved after every dump_period.
* ``meta/vocab.txt`` - vocab file.
* ``meta/wiki.toy`` - word-word concurrence values.
size : int, optional
Dimensionality of the feature vectors.
window : int, optional
Number of context words to the left (and to the right, if `symmetric = 1`).
symmetric : {0, 1}, optional
If 1 - using symmetric windows, if 0 - will use only left context words.
min_count : int, optional
Ignore all words with total frequency lower than `min_count`.
max_vocab_size : int, optional
Upper bound on vocabulary size, i.e. keep the <int> most frequent words. If 0 - no limit.
sgd_num : int, optional
Number of SGD taken for each data point.
lrate : float, optional
Learning rate (attention: too high diverges, give Nan).
period : int, optional
Period of xi variable updates.
iter : int, optional
Number of iterations (epochs) over the corpus.
epsilon : float, optional
Power scaling value for weighting function.
dump_period : int, optional
Period after which embeddings should be dumped.
reg : int, optional
Value of regularization parameter.
alpha : int, optional
Alpha parameter of gamma distribution.
beta : int, optional
Beta parameter of gamma distribution.
loss : {"logistic", "hinge"}, optional
Name of the loss function.
memory : float, optional
Soft limit for memory consumption, in GB.
np : int, optional
Number of process to execute (mpirun option).
cleanup_files : bool, optional
If True, delete directory and files used by this wrapper.
sorted_vocab : {0, 1}, optional
If 1 - sort the vocabulary by descending frequency before assigning word indexes, otherwise - do nothing.
ensemble : {0, 1}, optional
If 1 - use ensemble of word and context vectors.
"""
# prepare training data (cooccurrence matrix and vocab)
model_dir = os.path.join(wr_path, out_name)
meta_dir = os.path.join(model_dir, 'meta')
os.makedirs(meta_dir)
logger.info("Dumped data will be stored in '%s'", model_dir)
copyfile(corpus_file, os.path.join(meta_dir, corpus_file.split('/')[-1]))
vocab_file = os.path.join(meta_dir, 'vocab.txt')
temp_vocab_file = os.path.join(meta_dir, 'tempvocab.txt')
cooccurrence_file = os.path.join(meta_dir, 'cooccurrence')
cooccurrence_shuf_file = os.path.join(meta_dir, 'wiki.toy')
meta_file = os.path.join(meta_dir, 'meta')
cmd_vocab_count = [
os.path.join(wr_path, 'glove', 'vocab_count'),
'-min-count', str(min_count), '-max-vocab', str(max_vocab_size)
]
cmd_cooccurence_count = [
os.path.join(wr_path, 'glove', 'cooccur'), '-memory', str(memory),
'-vocab-file', temp_vocab_file, '-window-size', str(window), '-symmetric', str(symmetric)
]
cmd_shuffle_cooccurences = [os.path.join(wr_path, 'glove', 'shuffle'), '-memory', str(memory)]
cmd_del_vocab_freq = ['cut', '-d', " ", '-f', '1', temp_vocab_file]
commands = [cmd_vocab_count, cmd_cooccurence_count, cmd_shuffle_cooccurences]
input_fnames = [
os.path.join(meta_dir, os.path.split(corpus_file)[-1]),
os.path.join(meta_dir, os.path.split(corpus_file)[-1]),
cooccurrence_file
]
output_fnames = [temp_vocab_file, cooccurrence_file, cooccurrence_shuf_file]
logger.info("Prepare training data (%s) using glove code", ", ".join(input_fnames))
for command, input_fname, output_fname in zip(commands, input_fnames, output_fnames):
with utils.open(input_fname, 'rb') as r:
with utils.open(output_fname, 'wb') as w:
utils.check_output(w, args=command, stdin=r)
logger.info("Deleting frequencies from vocab file")
with utils.open(vocab_file, 'wb') as w:
utils.check_output(w, args=cmd_del_vocab_freq)
with utils.open(vocab_file, 'rb') as f:
numwords = sum(1 for _ in f)
with utils.open(cooccurrence_shuf_file, 'rb') as f:
numlines = sum(1 for _ in f)
with utils.open(meta_file, 'wb') as f:
meta_info = "{0} {1}\n{2} {3}\n{4} {5}".format(
numwords, numwords, numlines, cooccurrence_shuf_file.split('/')[-1],
numwords, vocab_file.split('/')[-1]
)
f.write(meta_info.encode('utf-8'))
if iter % dump_period == 0:
iter += 1
else:
logger.warning(
"Resultant embedding will be from %d iterations rather than the input %d iterations, "
"as wordrank dumps the embedding only at dump_period intervals. "
"Input an appropriate combination of parameters (iter, dump_period) "
"such that \"iter mod dump_period\" is zero.",
iter - (iter % dump_period), iter
)
wr_args = {
'path': meta_dir,
'nthread': multiprocessing.cpu_count(),
'sgd_num': sgd_num,
'lrate': lrate,
'period': period,
'iter': iter,
'epsilon': epsilon,
'dump_prefix': 'model',
'dump_period': dump_period,
'dim': size,
'reg': reg,
'alpha': alpha,
'beta': beta,
'loss': loss
}
# run wordrank executable with wr_args
cmd = ['mpirun', '-np', str(np), os.path.join(wr_path, 'wordrank')]
for option, value in wr_args.items():
cmd.append('--%s' % option)
cmd.append(str(value))
logger.info("Running wordrank binary")
utils.check_output(args=cmd)
# use embeddings from max. iteration's dump
max_iter_dump = iter - (iter % dump_period)
os.rename('model_word_%d.txt' % max_iter_dump, os.path.join(model_dir, 'wordrank.words'))
os.rename('model_context_%d.txt' % max_iter_dump, os.path.join(model_dir, 'wordrank.contexts'))
model = cls.load_wordrank_model(
os.path.join(model_dir, 'wordrank.words'), vocab_file,
os.path.join(model_dir, 'wordrank.contexts'), sorted_vocab, ensemble
)
if cleanup_files:
rmtree(model_dir)
return model
@classmethod
def load_wordrank_model(cls, model_file, vocab_file=None, context_file=None, sorted_vocab=1, ensemble=1):
"""Load model from `model_file`.
Parameters
----------
model_file : str
Path to model in GloVe format.
vocab_file : str, optional
Path to file with vocabulary.
context_file : str, optional
Path to file with context-embedding in word2vec_format.
sorted_vocab : {0, 1}, optional
If 1 - sort the vocabulary by descending frequency before assigning word indexes, otherwise - do nothing.
ensemble : {0, 1}, optional
If 1 - use ensemble of word and context vectors.
"""
model = cls.load_word2vec_format(model_file, binary=False, no_header=True)
if ensemble and context_file:
model.ensemble_embedding(model_file, context_file)
if sorted_vocab and vocab_file:
model.sort_embeddings(vocab_file)
return model
def sort_embeddings(self, vocab_file):
"""Sort embeddings according to word frequency.
Parameters
----------
vocab_file : str
Path to file with vocabulary.
"""
counts = {}
vocab_size = len(self.vocab)
prev_syn0 = copy.deepcopy(self.syn0)
prev_vocab = copy.deepcopy(self.vocab)
self.index2word = []
# sort embeddings using frequency sorted vocab file in wordrank
with utils.open(vocab_file, 'rb') as fin:
for index, line in enumerate(fin):
word, count = utils.to_unicode(line).strip(), vocab_size - index
# store word with it's count in a dict
counts[word] = int(count)
# build new index2word with frequency sorted words
self.index2word.append(word)
assert len(self.index2word) == vocab_size, 'mismatch between vocab sizes'
for word_id, word in enumerate(self.index2word):
self.syn0[word_id] = prev_syn0[prev_vocab[word].index]
self.vocab[word].index = word_id
self.vocab[word].count = counts[word]
def ensemble_embedding(self, word_embedding, context_embedding):
"""Replace current syn0 with the sum of context and word embeddings.
Parameters
----------
word_embedding : str
Path to word embeddings in GloVe format.
context_embedding : str
Path to context embeddings in word2vec_format.
Returns
-------
numpy.ndarray
Matrix with new embeddings.
"""
glove2word2vec(context_embedding, context_embedding + '.w2vformat')
w_emb = KeyedVectors.load_word2vec_format('%s.w2vformat' % word_embedding)
c_emb = KeyedVectors.load_word2vec_format('%s.w2vformat' % context_embedding)
# compare vocab words using keys of dict vocab
assert set(w_emb.vocab) == set(c_emb.vocab), 'Vocabs are not same for both embeddings'
# sort context embedding to have words in same order as word embedding
prev_c_emb = copy.deepcopy(c_emb.syn0)
for word_id, word in enumerate(w_emb.index2word):
c_emb.syn0[word_id] = prev_c_emb[c_emb.vocab[word].index]
# add vectors of the two embeddings
new_emb = w_emb.syn0 + c_emb.syn0
self.syn0 = new_emb
return new_emb
|
from __future__ import division
import numpy as np
import chainer
import chainer.functions as F
import chainer.links as L
from chainercv.transforms import resize
from chainercv import utils
class SegNetBasic(chainer.Chain):
"""SegNet Basic for semantic segmentation.
This is a SegNet [#]_ model for semantic segmenation. This is based on
SegNetBasic model that is found here_.
When you specify the path of a pretrained chainer model serialized as
a :obj:`.npz` file in the constructor, this chain model automatically
initializes all the parameters with it.
When a string in prespecified set is provided, a pretrained model is
loaded from weights distributed on the Internet.
The list of pretrained models supported are as follows:
* :obj:`camvid`: Loads weights trained with the train split of \
CamVid dataset.
.. [#] Vijay Badrinarayanan, Alex Kendall and Roberto Cipolla "SegNet: A \
Deep Convolutional Encoder-Decoder Architecture for Image Segmentation." \
PAMI, 2017
.. _here: http://github.com/alexgkendall/SegNet-Tutorial
Args:
n_class (int): The number of classes. If :obj:`None`, it can
be infered if :obj:`pretrained_model` is given.
pretrained_model (string): The destination of the pretrained
chainer model serialized as a :obj:`.npz` file.
If this is one of the strings described
above, it automatically loads weights stored under a directory
:obj:`$CHAINER_DATASET_ROOT/pfnet/chainercv/models/`,
where :obj:`$CHAINER_DATASET_ROOT` is set as
:obj:`$HOME/.chainer/dataset` unless you specify another value
by modifying the environment variable.
initialW (callable): Initializer for convolution layers.
"""
_models = {
'camvid': {
'param': {'n_class': 11},
'url': 'https://chainercv-models.preferred.jp/'
'segnet_camvid_trained_2018_12_05.npz'
}
}
def __init__(self, n_class=None, pretrained_model=None, initialW=None):
param, path = utils.prepare_pretrained_model(
{'n_class': n_class}, pretrained_model, self._models)
self.n_class = param['n_class']
if initialW is None:
initialW = chainer.initializers.HeNormal()
super(SegNetBasic, self).__init__()
with self.init_scope():
self.conv1 = L.Convolution2D(
None, 64, 7, 1, 3, nobias=True, initialW=initialW)
self.conv1_bn = L.BatchNormalization(64, initial_beta=0.001)
self.conv2 = L.Convolution2D(
64, 64, 7, 1, 3, nobias=True, initialW=initialW)
self.conv2_bn = L.BatchNormalization(64, initial_beta=0.001)
self.conv3 = L.Convolution2D(
64, 64, 7, 1, 3, nobias=True, initialW=initialW)
self.conv3_bn = L.BatchNormalization(64, initial_beta=0.001)
self.conv4 = L.Convolution2D(
64, 64, 7, 1, 3, nobias=True, initialW=initialW)
self.conv4_bn = L.BatchNormalization(64, initial_beta=0.001)
self.conv_decode4 = L.Convolution2D(
64, 64, 7, 1, 3, nobias=True, initialW=initialW)
self.conv_decode4_bn = L.BatchNormalization(64, initial_beta=0.001)
self.conv_decode3 = L.Convolution2D(
64, 64, 7, 1, 3, nobias=True, initialW=initialW)
self.conv_decode3_bn = L.BatchNormalization(64, initial_beta=0.001)
self.conv_decode2 = L.Convolution2D(
64, 64, 7, 1, 3, nobias=True, initialW=initialW)
self.conv_decode2_bn = L.BatchNormalization(64, initial_beta=0.001)
self.conv_decode1 = L.Convolution2D(
64, 64, 7, 1, 3, nobias=True, initialW=initialW)
self.conv_decode1_bn = L.BatchNormalization(64, initial_beta=0.001)
self.conv_classifier = L.Convolution2D(
64, self.n_class, 1, 1, 0, initialW=initialW)
if path:
chainer.serializers.load_npz(path, self)
def _upsampling_2d(self, x, indices):
if x.shape != indices.shape:
min_h = min(x.shape[2], indices.shape[2])
min_w = min(x.shape[3], indices.shape[3])
x = x[:, :, :min_h, :min_w]
indices = indices[:, :, :min_h, :min_w]
outsize = (x.shape[2] * 2, x.shape[3] * 2)
return F.upsampling_2d(x, indices, ksize=2, stride=2, outsize=outsize)
def forward(self, x):
"""Compute an image-wise score from a batch of images
Args:
x (chainer.Variable): A variable with 4D image array.
Returns:
chainer.Variable:
An image-wise score. Its channel size is :obj:`self.n_class`.
"""
h = F.local_response_normalization(x, 5, 1, 1e-4 / 5., 0.75)
h, indices1 = F.max_pooling_2d(
F.relu(self.conv1_bn(self.conv1(h))), 2, 2, return_indices=True)
h, indices2 = F.max_pooling_2d(
F.relu(self.conv2_bn(self.conv2(h))), 2, 2, return_indices=True)
h, indices3 = F.max_pooling_2d(
F.relu(self.conv3_bn(self.conv3(h))), 2, 2, return_indices=True)
h, indices4 = F.max_pooling_2d(
F.relu(self.conv4_bn(self.conv4(h))), 2, 2, return_indices=True)
h = self._upsampling_2d(h, indices4)
h = self.conv_decode4_bn(self.conv_decode4(h))
h = self._upsampling_2d(h, indices3)
h = self.conv_decode3_bn(self.conv_decode3(h))
h = self._upsampling_2d(h, indices2)
h = self.conv_decode2_bn(self.conv_decode2(h))
h = self._upsampling_2d(h, indices1)
h = self.conv_decode1_bn(self.conv_decode1(h))
score = self.conv_classifier(h)
return score
def predict(self, imgs):
"""Conduct semantic segmentations from images.
Args:
imgs (iterable of numpy.ndarray): Arrays holding images.
All images are in CHW and RGB format
and the range of their values are :math:`[0, 255]`.
Returns:
list of numpy.ndarray:
List of integer labels predicted from each image in the input \
list.
"""
labels = []
for img in imgs:
C, H, W = img.shape
with chainer.using_config('train', False), \
chainer.function.no_backprop_mode():
x = chainer.Variable(self.xp.asarray(img[np.newaxis]))
score = self.forward(x)[0].array
score = chainer.backends.cuda.to_cpu(score)
if score.shape != (C, H, W):
dtype = score.dtype
score = resize(score, (H, W)).astype(dtype)
label = np.argmax(score, axis=0).astype(np.int32)
labels.append(label)
return labels
|
import unittest
from credstash import expand_wildcard
class TestExpandingWildcard(unittest.TestCase):
secrets_set = ["a", "b", "ab", " a", " b",
"ba", "abc", "a[anyvalue]z", "a b", "aabb"]
secrets_set2 = ["QQQ", "QVQQ", "QVQVQ",
"QQ", "Q", "QQVQ", "QrEQrE", "QErQE"]
def test_start_regex(self):
self.assertEqual(expand_wildcard("a", self.secrets_set), ["a"])
def test_end_regex(self):
self.assertEqual(expand_wildcard("ba", self.secrets_set), ["ba"])
def test_exact_match_regex(self):
self.assertEqual(expand_wildcard("abc", self.secrets_set), ["abc"])
def test_one_wild_card_with_one_match(self):
self.assertEqual(expand_wildcard(
"a*z", self.secrets_set), ["a[anyvalue]z"])
def test_one_wild_card_with_many_matches(self):
self.assertEqual(expand_wildcard(
"a*b", self.secrets_set), ["ab", "a b", "aabb"])
def test_two_wild_cards_with_many_matches(self):
self.assertEqual(expand_wildcard(
"Q*Q*Q", self.secrets_set2), ["QQQ", "QVQQ", "QVQVQ", "QQVQ"])
def test_three_wild_card_with_many_matches(self):
self.assertEqual(expand_wildcard(
"Q*E*Q*E", self.secrets_set2), ["QrEQrE", "QErQE"])
|
from Handler import Handler
import logging
from collections import deque
try:
import dogapi
except ImportError:
dogapi = None
class DatadogHandler(Handler):
def __init__(self, config=None):
"""
New instance of DatadogHandler class
"""
Handler.__init__(self, config)
logging.debug("Initialized Datadog handler.")
if dogapi is None:
logging.error("Failed to load dogapi module.")
return
self.api = dogapi.dog_http_api
self.api.api_key = self.config.get('api_key', '')
self.queue_size = self.config.get('queue_size', 1)
self.queue = deque([])
def get_default_config_help(self):
"""
Help text
"""
config = super(DatadogHandler, self).get_default_config_help()
config.update({
'api_key': 'Datadog API key',
'queue_size': 'Number of metrics to queue before send',
})
return config
def get_default_config(self):
"""
Return default config for the handler
"""
config = super(DatadogHandler, self).get_default_config()
config.update({
'api_key': '',
'queue_size': '',
})
return config
def process(self, metric):
"""
Process metric by sending it to datadog api
"""
self.queue.append(metric)
if len(self.queue) >= self.queue_size:
self._send()
def flush(self):
"""
Flush metrics
"""
self._send()
def _send(self):
"""
Take metrics from queue and send it to Datadog API
"""
while len(self.queue) > 0:
metric = self.queue.popleft()
path = '%s.%s.%s' % (
metric.getPathPrefix(),
metric.getCollectorPath(),
metric.getMetricPath()
)
topic, value, timestamp = str(metric).split()
logging.debug(
"Sending.. topic[%s], value[%s], timestamp[%s]",
path,
value,
timestamp
)
self.api.metric(path, (timestamp, value), host=metric.host)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import logging
import time
from perfkitbenchmarker import resource
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.providers.aws import util
# https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_version.html
_POLICY_VERSION = '2012-10-17'
_ROLE_ARN_TEMPLATE = 'arn:aws:iam::{account}:role/{role_name}'
_POLICY_ARN_TEMPLATE = 'arn:aws:iam::{account}:policy/{policy_name}'
_TRUST_RELATIONSHIP_FILE = 'service-trust-relationship.json'
_ROLE_POLICY_FILE = 'service-role-policy.json'
_ROLE_CREATION_DELAY = 30
_TRUST_RELATIONSHIP_TEMPLATE = """{{
"Version": "{version}",
"Statement": [
{{
"Effect": "Allow",
"Principal": {{
"Service": "{service}"
}},
"Action": "sts:AssumeRole"
}}
]
}}"""
_ROLE_POLICY_TEMPLATE = """{{
"Version": "{version}",
"Statement": [
{{
"Action": [
"{action}"
],
"Effect": "Allow",
"Resource": [
"{resource_arn}"
]
}}
]
}}"""
class AwsIamRole(resource.BaseResource):
"""Class representing an AWS IAM role."""
def __init__(self,
account,
role_name,
policy_name,
service,
action,
resource_arn,
policy_version=None):
super(AwsIamRole, self).__init__()
self.account = account
self.role_name = role_name
self.policy_name = policy_name
self.service = service
self.action = action
self.resource_arn = resource_arn
self.policy_version = policy_version or _POLICY_VERSION
self.role_arn = _ROLE_ARN_TEMPLATE.format(
account=self.account, role_name=self.role_name)
self.policy_arn = _POLICY_ARN_TEMPLATE.format(
account=self.account, policy_name=self.policy_name)
def _Create(self):
"""See base class."""
if not self._RoleExists():
with open(_TRUST_RELATIONSHIP_FILE, 'w+') as relationship_file:
relationship_file.write(
_TRUST_RELATIONSHIP_TEMPLATE.format(
version=self.policy_version, service=self.service))
cmd = util.AWS_PREFIX + [
'iam', 'create-role', '--role-name', self.role_name,
'--assume-role-policy-document',
'file://{}'.format(_TRUST_RELATIONSHIP_FILE)
]
_, stderror, retcode = vm_util.IssueCommand(cmd, raise_on_failure=True)
if retcode != 0:
logging.warn('Failed to create role! %s', stderror)
if not self._PolicyExists():
with open(_ROLE_POLICY_FILE, 'w+') as policy_file:
policy_file.write(
_ROLE_POLICY_TEMPLATE.format(
version=self.policy_version,
action=self.action,
resource_arn=self.resource_arn))
cmd = util.AWS_PREFIX + [
'iam', 'create-policy', '--policy-name', 'PolicyFor' + self.role_name,
'--policy-document', 'file://{}'.format(_ROLE_POLICY_FILE)
]
_, stderror, retcode = vm_util.IssueCommand(cmd, raise_on_failure=True)
if retcode != 0:
logging.warn('Failed to create policy! %s', stderror)
cmd = util.AWS_PREFIX + [
'iam', 'attach-role-policy', '--role-name', self.role_name,
'--policy-arn', self.policy_arn
]
_, stderror, retcode = vm_util.IssueCommand(cmd, raise_on_failure=True)
if retcode != 0:
logging.warn('Failed to attach role policy! %s', stderror)
# Make sure the role is available for the downstream users (e.g., DAX).
# Without this, the step of creating DAX cluster may fail.
# TODO(user): use a more robust way to handle this.
time.sleep(_ROLE_CREATION_DELAY)
def _Delete(self):
"""See base class."""
cmd = util.AWS_PREFIX + [
'iam', 'detach-role-policy', '--role-name', self.role_name,
'--policy-arn', self.policy_arn
]
_, stderror, retcode = vm_util.IssueCommand(cmd, raise_on_failure=False)
if retcode != 0:
logging.warn('Failed to delete role policy! %s', stderror)
cmd = util.AWS_PREFIX + [
'iam', 'delete-policy', '--policy-arn', self.policy_arn
]
_, stderror, retcode = vm_util.IssueCommand(cmd, raise_on_failure=False)
if retcode != 0:
logging.warn('Failed to delete policy! %s', stderror)
cmd = util.AWS_PREFIX + [
'iam', 'delete-role', '--role-name', self.role_name
]
_, stderror, retcode = vm_util.IssueCommand(cmd, raise_on_failure=False)
if retcode != 0:
logging.warn('Failed to delete role! %s', stderror)
def GetRoleArn(self):
"""Returns the role's Amazon Resource Name (ARN)."""
return self.role_arn
def _RoleExists(self):
"""Returns true if the IAM role exists."""
cmd = util.AWS_PREFIX + ['iam', 'get-role', '--role-name', self.role_name]
stdout, _, retcode = vm_util.IssueCommand(
cmd, suppress_warning=True, raise_on_failure=False)
return retcode == 0 and stdout and json.loads(stdout)['Role']
def _PolicyExists(self):
"""Returns true if the IAM policy used by the role exists."""
cmd = util.AWS_PREFIX + [
'iam', 'get-policy', '--policy-arn', self.policy_arn
]
stdout, _, retcode = vm_util.IssueCommand(
cmd, suppress_warning=True, raise_on_failure=False)
return retcode == 0 and stdout and json.loads(stdout)['Policy']
|
import uuid
from collections import OrderedDict
from functools import lru_cache, partial
from html import escape
import pkg_resources
from .formatting import inline_variable_array_repr, short_data_repr
STATIC_FILES = ("static/html/icons-svg-inline.html", "static/css/style.css")
@lru_cache(None)
def _load_static_files():
"""Lazily load the resource files into memory the first time they are needed"""
return [
pkg_resources.resource_string("xarray", fname).decode("utf8")
for fname in STATIC_FILES
]
def short_data_repr_html(array):
"""Format "data" for DataArray and Variable."""
internal_data = getattr(array, "variable", array)._data
if hasattr(internal_data, "_repr_html_"):
return internal_data._repr_html_()
else:
text = escape(short_data_repr(array))
return f"<pre>{text}</pre>"
def format_dims(dims, coord_names):
if not dims:
return ""
dim_css_map = {
k: " class='xr-has-index'" if k in coord_names else "" for k, v in dims.items()
}
dims_li = "".join(
f"<li><span{dim_css_map[dim]}>" f"{escape(dim)}</span>: {size}</li>"
for dim, size in dims.items()
)
return f"<ul class='xr-dim-list'>{dims_li}</ul>"
def summarize_attrs(attrs):
attrs_dl = "".join(
f"<dt><span>{escape(k)} :</span></dt>" f"<dd>{escape(str(v))}</dd>"
for k, v in attrs.items()
)
return f"<dl class='xr-attrs'>{attrs_dl}</dl>"
def _icon(icon_name):
# icon_name should be defined in xarray/static/html/icon-svg-inline.html
return (
"<svg class='icon xr-{0}'>"
"<use xlink:href='#{0}'>"
"</use>"
"</svg>".format(icon_name)
)
def _summarize_coord_multiindex(name, coord):
preview = f"({', '.join(escape(l) for l in coord.level_names)})"
return summarize_variable(
name, coord, is_index=True, dtype="MultiIndex", preview=preview
)
def summarize_coord(name, var):
is_index = name in var.dims
if is_index:
coord = var.variable.to_index_variable()
if coord.level_names is not None:
coords = {}
coords[name] = _summarize_coord_multiindex(name, coord)
for lname in coord.level_names:
var = coord.get_level_variable(lname)
coords[lname] = summarize_variable(lname, var)
return coords
return {name: summarize_variable(name, var, is_index)}
def summarize_coords(variables):
coords = {}
for k, v in variables.items():
coords.update(**summarize_coord(k, v))
vars_li = "".join(f"<li class='xr-var-item'>{v}</li>" for v in coords.values())
return f"<ul class='xr-var-list'>{vars_li}</ul>"
def summarize_variable(name, var, is_index=False, dtype=None, preview=None):
variable = var.variable if hasattr(var, "variable") else var
cssclass_idx = " class='xr-has-index'" if is_index else ""
dims_str = f"({', '.join(escape(dim) for dim in var.dims)})"
name = escape(str(name))
dtype = dtype or escape(str(var.dtype))
# "unique" ids required to expand/collapse subsections
attrs_id = "attrs-" + str(uuid.uuid4())
data_id = "data-" + str(uuid.uuid4())
disabled = "" if len(var.attrs) else "disabled"
preview = preview or escape(inline_variable_array_repr(variable, 35))
attrs_ul = summarize_attrs(var.attrs)
data_repr = short_data_repr_html(variable)
attrs_icon = _icon("icon-file-text2")
data_icon = _icon("icon-database")
return (
f"<div class='xr-var-name'><span{cssclass_idx}>{name}</span></div>"
f"<div class='xr-var-dims'>{dims_str}</div>"
f"<div class='xr-var-dtype'>{dtype}</div>"
f"<div class='xr-var-preview xr-preview'>{preview}</div>"
f"<input id='{attrs_id}' class='xr-var-attrs-in' "
f"type='checkbox' {disabled}>"
f"<label for='{attrs_id}' title='Show/Hide attributes'>"
f"{attrs_icon}</label>"
f"<input id='{data_id}' class='xr-var-data-in' type='checkbox'>"
f"<label for='{data_id}' title='Show/Hide data repr'>"
f"{data_icon}</label>"
f"<div class='xr-var-attrs'>{attrs_ul}</div>"
f"<div class='xr-var-data'>{data_repr}</div>"
)
def summarize_vars(variables):
vars_li = "".join(
f"<li class='xr-var-item'>{summarize_variable(k, v)}</li>"
for k, v in variables.items()
)
return f"<ul class='xr-var-list'>{vars_li}</ul>"
def collapsible_section(
name, inline_details="", details="", n_items=None, enabled=True, collapsed=False
):
# "unique" id to expand/collapse the section
data_id = "section-" + str(uuid.uuid4())
has_items = n_items is not None and n_items
n_items_span = "" if n_items is None else f" <span>({n_items})</span>"
enabled = "" if enabled and has_items else "disabled"
collapsed = "" if collapsed or not has_items else "checked"
tip = " title='Expand/collapse section'" if enabled else ""
return (
f"<input id='{data_id}' class='xr-section-summary-in' "
f"type='checkbox' {enabled} {collapsed}>"
f"<label for='{data_id}' class='xr-section-summary' {tip}>"
f"{name}:{n_items_span}</label>"
f"<div class='xr-section-inline-details'>{inline_details}</div>"
f"<div class='xr-section-details'>{details}</div>"
)
def _mapping_section(mapping, name, details_func, max_items_collapse, enabled=True):
n_items = len(mapping)
collapsed = n_items >= max_items_collapse
return collapsible_section(
name,
details=details_func(mapping),
n_items=n_items,
enabled=enabled,
collapsed=collapsed,
)
def dim_section(obj):
dim_list = format_dims(obj.dims, list(obj.coords))
return collapsible_section(
"Dimensions", inline_details=dim_list, enabled=False, collapsed=True
)
def array_section(obj):
# "unique" id to expand/collapse the section
data_id = "section-" + str(uuid.uuid4())
collapsed = "checked"
variable = getattr(obj, "variable", obj)
preview = escape(inline_variable_array_repr(variable, max_width=70))
data_repr = short_data_repr_html(obj)
data_icon = _icon("icon-database")
return (
"<div class='xr-array-wrap'>"
f"<input id='{data_id}' class='xr-array-in' type='checkbox' {collapsed}>"
f"<label for='{data_id}' title='Show/hide data repr'>{data_icon}</label>"
f"<div class='xr-array-preview xr-preview'><span>{preview}</span></div>"
f"<div class='xr-array-data'>{data_repr}</div>"
"</div>"
)
coord_section = partial(
_mapping_section,
name="Coordinates",
details_func=summarize_coords,
max_items_collapse=25,
)
datavar_section = partial(
_mapping_section,
name="Data variables",
details_func=summarize_vars,
max_items_collapse=15,
)
attr_section = partial(
_mapping_section,
name="Attributes",
details_func=summarize_attrs,
max_items_collapse=10,
)
def _obj_repr(obj, header_components, sections):
"""Return HTML repr of an xarray object.
If CSS is not injected (untrusted notebook), fallback to the plain text repr.
"""
header = f"<div class='xr-header'>{''.join(h for h in header_components)}</div>"
sections = "".join(f"<li class='xr-section-item'>{s}</li>" for s in sections)
icons_svg, css_style = _load_static_files()
return (
"<div>"
f"{icons_svg}<style>{css_style}</style>"
f"<pre class='xr-text-repr-fallback'>{escape(repr(obj))}</pre>"
"<div class='xr-wrap' hidden>"
f"{header}"
f"<ul class='xr-sections'>{sections}</ul>"
"</div>"
"</div>"
)
def array_repr(arr):
dims = OrderedDict((k, v) for k, v in zip(arr.dims, arr.shape))
obj_type = "xarray.{}".format(type(arr).__name__)
arr_name = f"'{arr.name}'" if getattr(arr, "name", None) else ""
coord_names = list(arr.coords) if hasattr(arr, "coords") else []
header_components = [
f"<div class='xr-obj-type'>{obj_type}</div>",
f"<div class='xr-array-name'>{arr_name}</div>",
format_dims(dims, coord_names),
]
sections = [array_section(arr)]
if hasattr(arr, "coords"):
sections.append(coord_section(arr.coords))
sections.append(attr_section(arr.attrs))
return _obj_repr(arr, header_components, sections)
def dataset_repr(ds):
obj_type = "xarray.{}".format(type(ds).__name__)
header_components = [f"<div class='xr-obj-type'>{escape(obj_type)}</div>"]
sections = [
dim_section(ds),
coord_section(ds.coords),
datavar_section(ds.data_vars),
attr_section(ds.attrs),
]
return _obj_repr(ds, header_components, sections)
|
import os
import re
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
def test_hostname(host):
assert re.search(r'instance-[12]', host.check_output('hostname -s'))
def test_etc_molecule_directory(host):
f = host.file('/etc/molecule')
assert f.is_directory
assert f.user == 'root'
assert f.group == 'root'
assert f.mode == 0o755
def test_etc_molecule_ansible_hostname_file(host):
filename = '/etc/molecule/{}'.format(host.check_output('hostname -s'))
f = host.file(filename)
assert f.is_file
assert f.user == 'root'
assert f.group == 'root'
assert f.mode == 0o644
|
import glob
import logging
import os
import shutil
import stat
import time
from configparser import RawConfigParser, MissingSectionHeaderError
from multiprocessing import Event as ProcessEvent
from threading import Event as ThreadEvent
import yaml
from pkg_resources import resource_filename
from yandextank.common.interfaces import TankInfo
from yandextank.common.util import read_resource, TankapiLogFilter
from yandextank.config_converter.converter import convert_ini, convert_single_option
from yandextank.core import TankCore
from yandextank.core.tankcore import LockError, Lock
from yandextank.validator.validator import ValidationError
logger = logging.getLogger()
class TankWorker():
SECTION = 'core'
FINISH_FILENAME = 'finish_status.yaml'
DEFAULT_CONFIG = 'load.yaml'
def __init__(self, configs, cli_options=None, cfg_patches=None, cli_args=None, no_local=False,
log_handlers=None, wait_lock=False, files=None, ammo_file=None, api_start=False, manager=None,
debug=False):
self.api_start = api_start
self.wait_lock = wait_lock
self.log_handlers = log_handlers if log_handlers is not None else []
self.files = [] if files is None else files
self.ammo_file = ammo_file
self.config_paths = configs
self.interrupted = ProcessEvent() if api_start else ThreadEvent()
self.info = TankInfo(manager.dict()) if api_start else TankInfo(dict())
self.config_list = self._combine_configs(configs, cli_options, cfg_patches, cli_args, no_local)
self.core = TankCore(self.config_list, self.interrupted, self.info)
self.folder = self.init_folder()
self.init_logging(debug or self.core.get_option(self.core.SECTION, 'debug'))
is_locked = Lock.is_locked(self.core.lock_dir)
if is_locked and not self.core.config.get_option(self.SECTION, 'ignore_lock'):
raise LockError(is_locked)
@staticmethod
def _combine_configs(run_cfgs, cli_options=None, cfg_patches=None, cli_args=None, no_local=False):
if cli_options is None:
cli_options = []
if cfg_patches is None:
cfg_patches = []
if cli_args is None:
cli_args = []
run_cfgs = run_cfgs if len(run_cfgs) > 0 else [TankWorker.DEFAULT_CONFIG]
if no_local:
configs = [load_cfg(cfg) for cfg in run_cfgs] + \
parse_options(cli_options) + \
parse_and_check_patches(cfg_patches) + \
cli_args
else:
configs = [load_core_base_cfg()] + \
load_local_base_cfgs() + \
[load_cfg(cfg) for cfg in run_cfgs] + \
parse_options(cli_options) + \
parse_and_check_patches(cfg_patches) + \
cli_args
return configs
def init_folder(self):
folder = self.core.artifacts_dir
if self.api_start > 0:
for cfg in self.config_paths:
shutil.move(cfg, folder)
for f in self.files:
shutil.move(f, folder)
if self.ammo_file:
shutil.move(self.ammo_file, folder)
os.chdir(folder)
return folder
def stop(self):
self.interrupted.set()
logger.warning('Interrupting')
def get_status(self):
return {'status_code': self.status.decode('utf8'),
'left_time': None,
'exit_code': self.retcode,
'lunapark_id': self.get_info('uploader', 'job_no'),
'tank_msg': self.msg,
'lunapark_url': self.get_info('uploader', 'web_link'),
'luna_id': self.get_info('neuploader', 'job_no'),
'luna_url': self.get_info('neuploader', 'web_link')}
def save_finish_status(self):
with open(os.path.join(self.folder, self.FINISH_FILENAME), 'w') as f:
yaml.safe_dump(self.get_status(), f, encoding='utf-8', allow_unicode=True)
def get_info(self, section_name, key_name):
return self.info.get_value([section_name, key_name])
def init_logging(self, debug=False):
filename = os.path.join(self.core.artifacts_dir, 'tank.log')
open(filename, 'a').close()
current_file_mode = os.stat(filename).st_mode
os.chmod(filename, current_file_mode | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH)
logger.handlers = []
logger.setLevel(logging.DEBUG if debug else logging.INFO)
file_handler = logging.FileHandler(filename)
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(logging.Formatter(
"%(asctime)s [%(levelname)s] %(name)s %(filename)s:%(lineno)d\t%(message)s"))
file_handler.addFilter(TankapiLogFilter())
logger.addHandler(file_handler)
logger.info("Log file created")
for handler in self.log_handlers:
logger.addHandler(handler)
logger.info("Logging handler {} added".format(handler))
def get_lock(self):
while not self.interrupted.is_set():
try:
lock = Lock(self.test_id, self.folder).acquire(self.core.lock_dir,
self.core.config.get_option(self.SECTION, 'ignore_lock'))
break
except LockError as e:
self.upd_msg(e.message)
if not self.wait_lock:
raise RuntimeError("Lock file present, cannot continue")
logger.warning(
"Couldn't get lock. Will retry in 5 seconds...")
time.sleep(5)
else:
raise KeyboardInterrupt
return lock
def upd_msg(self, msg):
if msg:
self.msg = self.msg + '\n' + msg
def load_cfg(cfg_filename):
"""
:type cfg_filename: str
"""
if is_ini(cfg_filename):
return convert_ini(cfg_filename)
else:
return yaml.load(read_resource(cfg_filename), Loader=yaml.FullLoader)
def load_core_base_cfg():
return load_cfg(resource_filename(__name__, 'config/00-base.yaml'))
def load_local_base_cfgs():
return cfg_folder_loader('/etc/yandex-tank')
def parse_options(options):
"""
:type options: list of str
:rtype: list of dict
"""
if options is None:
return []
else:
return [
convert_single_option(key.strip(), value.strip())
for key, value
in [option.split('=', 1) for option in options]
]
def parse_and_check_patches(patches):
parsed = [yaml.load(p, Loader=yaml.FullLoader) for p in patches]
for patch in parsed:
if not isinstance(patch, dict):
raise ValidationError('Config patch "{}" should be a dict'.format(patch))
return parsed
def cfg_folder_loader(path):
"""
:type path: str
"""
CFG_WILDCARD = '*.yaml'
return [load_cfg(filename) for filename in sorted(glob.glob(os.path.join(path, CFG_WILDCARD)))]
def is_ini(cfg_file):
if cfg_file.endswith('.yaml') or cfg_file.endswith('.json'):
return False
try:
RawConfigParser().read(cfg_file)
return True
except MissingSectionHeaderError:
return False
|
import mock
from paasta_tools import paasta_maintenance
@mock.patch("paasta_tools.mesos_maintenance.is_host_drained", autospec=True)
@mock.patch(
"paasta_tools.mesos_maintenance.get_hosts_past_maintenance_start", autospec=True
)
def test_is_safe_to_kill(mock_get_hosts_past_maintenance_start, mock_is_host_drained):
mock_is_host_drained.return_value = False
mock_get_hosts_past_maintenance_start.return_value = []
assert not paasta_maintenance.is_safe_to_kill("blah")
mock_is_host_drained.return_value = False
mock_get_hosts_past_maintenance_start.return_value = ["blah"]
assert paasta_maintenance.is_safe_to_kill("blah")
mock_is_host_drained.return_value = True
mock_get_hosts_past_maintenance_start.return_value = ["blah"]
assert paasta_maintenance.is_safe_to_kill("blah")
mock_is_host_drained.return_value = True
mock_get_hosts_past_maintenance_start.return_value = []
assert paasta_maintenance.is_safe_to_kill("blah")
@mock.patch("paasta_tools.paasta_maintenance.is_hostname_local", autospec=True)
def test_is_safe_to_drain_rejects_non_localhosts(mock_is_hostname_local,):
mock_is_hostname_local.return_value = False
assert paasta_maintenance.is_safe_to_drain("non-localhost") is False
@mock.patch("paasta_tools.paasta_maintenance.getfqdn", autospec=True)
@mock.patch("paasta_tools.paasta_maintenance.gethostname", autospec=True)
def test_is_hostname_local_works(mock_gethostname, mock_getfqdn):
mock_gethostname.return_value = "foo"
mock_getfqdn.return_value = "foo.bar"
assert paasta_maintenance.is_hostname_local("localhost") is True
assert paasta_maintenance.is_hostname_local("foo") is True
assert paasta_maintenance.is_hostname_local("foo.bar") is True
assert paasta_maintenance.is_hostname_local("something_different") is False
@mock.patch(
"paasta_tools.paasta_maintenance.utils.load_system_paasta_config", autospec=True
)
def test_are_local_tasks_in_danger_fails_safe_with_false(
mock_load_system_paasta_config,
):
"""If something unexpected happens that we don't know how to
interpret, we make sure that we fail with "False" so that processes
move on and don't deadlock. In general the answer to "is it safe to drain"
is "yes" if mesos can't be reached, etc"""
mock_load_system_paasta_config.side_effect = Exception
assert paasta_maintenance.are_local_tasks_in_danger() is False
@mock.patch(
"paasta_tools.paasta_maintenance.utils.load_system_paasta_config", autospec=True
)
@mock.patch(
"paasta_tools.paasta_maintenance.marathon_services_running_here", autospec=True
)
def test_are_local_tasks_in_danger_is_false_with_nothing_running(
mock_marathon_services_running_here, mock_load_system_paasta_config
):
mock_marathon_services_running_here.return_value = []
assert paasta_maintenance.are_local_tasks_in_danger() is False
@mock.patch(
"paasta_tools.paasta_maintenance.utils.load_system_paasta_config", autospec=True
)
@mock.patch(
"paasta_tools.paasta_maintenance.marathon_services_running_here", autospec=True
)
@mock.patch("paasta_tools.paasta_maintenance.get_backends", autospec=True)
@mock.patch("paasta_tools.paasta_maintenance.is_healthy_in_haproxy", autospec=True)
def test_are_local_tasks_in_danger_is_false_with_an_unhealthy_service(
mock_is_healthy_in_haproxy,
mock_get_backends,
mock_marathon_services_running_here,
mock_load_system_paasta_config,
):
mock_is_healthy_in_haproxy.return_value = False
mock_marathon_services_running_here.return_value = [("service", "instance", 42)]
assert paasta_maintenance.are_local_tasks_in_danger() is False
mock_is_healthy_in_haproxy.assert_called_once_with(42, mock.ANY)
@mock.patch(
"paasta_tools.paasta_maintenance.utils.load_system_paasta_config", autospec=True
)
@mock.patch(
"paasta_tools.paasta_maintenance.marathon_services_running_here", autospec=True
)
@mock.patch("paasta_tools.paasta_maintenance.get_backends", autospec=True)
@mock.patch("paasta_tools.paasta_maintenance.is_healthy_in_haproxy", autospec=True)
@mock.patch("paasta_tools.paasta_maintenance.synapse_replication_is_low", autospec=True)
def test_are_local_tasks_in_danger_is_true_with_an_healthy_service_in_danger(
mock_synapse_replication_is_low,
mock_is_healthy_in_haproxy,
mock_get_backends,
mock_marathon_services_running_here,
mock_load_system_paasta_config,
):
mock_is_healthy_in_haproxy.return_value = True
mock_synapse_replication_is_low.return_value = True
mock_marathon_services_running_here.return_value = [("service", "instance", 42)]
assert paasta_maintenance.are_local_tasks_in_danger() is True
mock_is_healthy_in_haproxy.assert_called_once_with(42, mock.ANY)
assert mock_synapse_replication_is_low.call_count == 1
@mock.patch(
"paasta_tools.paasta_maintenance.load_marathon_service_config", autospec=True
)
@mock.patch(
"paasta_tools.paasta_maintenance.load_smartstack_info_for_service", autospec=True
)
@mock.patch(
"paasta_tools.paasta_maintenance.get_expected_instance_count_for_namespace",
autospec=True,
)
@mock.patch(
"paasta_tools.paasta_maintenance.get_replication_for_services", autospec=True
)
def test_synapse_replication_is_low_understands_underreplicated_services(
mock_get_replication_for_services,
mock_get_expected_instance_count_for_namespace,
mock_load_smartstack_info_for_service,
mock_load_marathon_service_config,
):
mock_load_marathon_service_config.return_value.get_registrations.return_value = (
"service.main"
)
mock_get_expected_instance_count_for_namespace.return_value = 3
mock_load_smartstack_info_for_service.return_value = {
"local_region": {"service.main": "up"}
}
mock_get_replication_for_services.return_value = {"service.main": 1}
local_backends = ["foo"]
system_paasta_config = mock.MagicMock()
assert (
paasta_maintenance.synapse_replication_is_low(
service="service",
instance="instance",
system_paasta_config=system_paasta_config,
local_backends=local_backends,
)
is True
)
@mock.patch("paasta_tools.paasta_maintenance.gethostbyname", autospec=True)
def test_is_healthy_in_harproxy_healthy_path(mock_gethostbyname,):
mock_gethostbyname.return_value = "192.0.2.1"
local_port = 42
backends = [
{"status": "UP", "pxname": "service.main", "svname": "192.0.2.1:42_hostname"}
]
assert (
paasta_maintenance.is_healthy_in_haproxy(
local_port=local_port, backends=backends
)
is True
)
@mock.patch("paasta_tools.paasta_maintenance.gethostbyname", autospec=True)
def test_is_healthy_in_haproxy_unhealthy_path(mock_gethostbyname,):
mock_gethostbyname.return_value = "192.0.2.1"
local_port = 42
backends = [
{"status": "DOWN", "pxname": "service.main", "svname": "192.0.2.1:42_hostname"}
]
assert (
paasta_maintenance.is_healthy_in_haproxy(
local_port=local_port, backends=backends
)
is False
)
@mock.patch("paasta_tools.paasta_maintenance.gethostbyname", autospec=True)
def test_is_healthy_in_haproxy_missing_backend_entirely(mock_gethostbyname,):
mock_gethostbyname.return_value = "192.0.2.1"
local_port = 42
backends = [
{
"status": "DOWN",
"pxname": "service.main",
"svname": "192.0.2.4:666_otherhostname",
}
]
assert (
paasta_maintenance.is_healthy_in_haproxy(
local_port=local_port, backends=backends
)
is False
)
|
import mne
def run():
"""Run command."""
from mne.commands.utils import get_optparser
parser = get_optparser(__file__, usage='usage: %prog fname [fname2 ...]')
options, args = parser.parse_args()
for arg in args:
print(mne.what(arg))
mne.utils.run_command_if_main()
|
from decimal import Decimal
from django.core.exceptions import ValidationError
from django import forms
from django.db import models
from django.utils.html import format_html
from django.utils.translation import gettext_lazy as _
from shop.conf import app_settings
from shop.money.iso4217 import CURRENCIES
from shop.money.money_maker import MoneyMaker, AbstractMoney
class MoneyFieldWidget(forms.widgets.NumberInput):
"""
Replacement for NumberInput widget adding the currency suffix.
"""
def __init__(self, attrs=None):
defaults = {'style': 'width: 75px; text-align: right'}
try:
self.currency_code = attrs.pop('currency_code')
defaults.update(attrs)
except (KeyError, TypeError):
raise ValueError("MoneyFieldWidget must be instantiated with a currency_code.")
super().__init__(defaults)
def render(self, name, value, attrs=None, renderer=None):
input_field = super().render(name, value, attrs, renderer)
return format_html('{} <strong>{}</strong>', input_field, self.currency_code)
class MoneyFormField(forms.DecimalField):
"""
Use this field type in Django Forms instead of a DecimalField, whenever a input field for
the Money representation is required.
"""
def __init__(self, money_class=None, **kwargs):
if money_class is None:
money_class = MoneyMaker()
if not issubclass(money_class, AbstractMoney):
raise AttributeError("Given `money_class` does not declare a valid money type")
self.Money = money_class
if 'widget' not in kwargs:
kwargs['widget'] = MoneyFieldWidget(attrs={'currency_code': money_class.currency})
super().__init__(**kwargs)
def prepare_value(self, value):
if isinstance(value, AbstractMoney):
return Decimal(value)
return value
def to_python(self, value):
value = super().to_python(value)
return self.Money(value)
def validate(self, value):
if value.currency != self.Money.currency:
raise ValidationError("Can not convert different Money types.")
super().validate(Decimal(value))
return value
class MoneyField(models.DecimalField):
"""
A MoneyField shall be used to store money related amounts in the database, keeping track of
the used currency. Accessing a model field of type MoneyField, returns a MoneyIn<CURRENCY> type.
"""
description = _("Money in %(currency_code)s")
def __init__(self, *args, **kwargs):
self.currency_code = kwargs.pop('currency', app_settings.DEFAULT_CURRENCY)
self.Money = MoneyMaker(self.currency_code)
defaults = {
'max_digits': 30,
'decimal_places': CURRENCIES[self.currency_code][1],
}
defaults.update(kwargs)
super().__init__(*args, **defaults)
def deconstruct(self):
name, path, args, kwargs = super(MoneyField, self).deconstruct()
if kwargs['max_digits'] == 30:
kwargs.pop('max_digits')
if kwargs['decimal_places'] == CURRENCIES[self.currency_code][1]:
kwargs.pop('decimal_places')
return name, path, args, kwargs
def to_python(self, value):
if isinstance(value, AbstractMoney):
return value
if value is None:
return self.Money('NaN')
value = super().to_python(value)
return self.Money(value)
def get_prep_value(self, value):
# force to type Decimal by using grandparent super
value = super(models.DecimalField, self).get_prep_value(value)
return super().to_python(value)
def from_db_value(self, value, expression, connection):
if value is None:
return
if isinstance(value, float):
value = str(value)
return self.Money(value)
def get_db_prep_save(self, value, connection):
if isinstance(value, Decimal) and value.is_nan():
return None
return super().get_db_prep_save(value, connection)
def get_prep_lookup(self, lookup_type, value):
if isinstance(value, AbstractMoney):
if value.get_currency() != self.Money.get_currency():
msg = "This field stores money in {}, but the lookup amount is in {}"
raise ValueError(msg.format(value.get_currency(), self.Money.get_currency()))
value = value.as_decimal()
result = super().get_prep_lookup(lookup_type, value)
return result
def value_to_string(self, obj):
value = self._get_val_from_obj(obj)
# grandparent super
value = super(models.DecimalField, self).get_prep_value(value)
return self.to_python(value)
def formfield(self, **kwargs):
widget = MoneyFieldWidget(attrs={'currency_code': self.Money.currency})
defaults = {'form_class': MoneyFormField, 'widget': widget, 'money_class': self.Money}
defaults.update(**kwargs)
formfield = super().formfield(**defaults)
return formfield
|
from distutils.core import setup
import sys
def main():
sys.path.append('.')
from trashcli import trash
scripts.add_script('trash' , 'trashcli.put' , 'main')
scripts.add_script('trash-put' , 'trashcli.put' , 'main')
scripts.add_script('trash-list' , 'trashcli.list', 'main')
scripts.add_script('trash-restore', 'trashcli.restore', 'main')
scripts.add_script('trash-empty' , 'trashcli.empty', 'main')
scripts.add_script('trash-rm' , 'trashcli.rm' , 'main')
setup(
name = 'trash-cli' , version = trash.version ,
author = 'Andrea Francia' , author_email = '[email protected]' ,
url = 'https://github.com/andreafrancia/trash-cli',
description = 'Command line interface to FreeDesktop.org Trash.',
long_description = read_file("README.rst"),
license = 'GPL v2',
packages = ['trashcli'],
scripts = scripts.created_scripts,
data_files = [('share/man/man1', ['man/man1/trash-empty.1',
'man/man1/trash-list.1',
'man/man1/trash-restore.1',
'man/man1/trash-put.1',
'man/man1/trash-rm.1'])],
install_requires=[
'psutil',
],
)
from textwrap import dedent
class Scripts:
def __init__(self, write_file, make_file_executable):
self.write_file = write_file
self.make_file_executable = make_file_executable
self.created_scripts = []
def add_script(self, name, module, main_function):
script_contents = dedent("""\
#!/usr/bin/env python
from __future__ import absolute_import
import sys
from %(module)s import %(main_function)s as main
sys.exit(main())
""") % locals()
self.write_file(name, script_contents)
self.make_file_executable(name)
self.created_scripts.append(name)
import os,stat
def make_file_executable(path):
os.chmod(path, os.stat(path).st_mode | stat.S_IXUSR)
def write_file(name, contents):
with open(name, 'w') as f:
f.write(contents)
def read_file(name):
with open(name) as f:
return f.read()
scripts = Scripts(write_file, make_file_executable)
if __name__ == '__main__':
main()
|
import logging
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_STATE,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_TEMPERATURE,
)
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.util import decorator
from .const import CONF_INVERSE, SIGNAL_DS18B20_NEW
_LOGGER = logging.getLogger(__name__)
HANDLERS = decorator.Registry()
@HANDLERS.register("state")
async def async_handle_state_update(hass, context, msg):
"""Handle a binary sensor state update."""
_LOGGER.debug("[state handler] context: %s msg: %s", context, msg)
entity_id = context.get(ATTR_ENTITY_ID)
state = bool(int(msg.get(ATTR_STATE)))
if context.get(CONF_INVERSE):
state = not state
async_dispatcher_send(hass, f"konnected.{entity_id}.update", state)
@HANDLERS.register("temp")
async def async_handle_temp_update(hass, context, msg):
"""Handle a temperature sensor state update."""
_LOGGER.debug("[temp handler] context: %s msg: %s", context, msg)
entity_id, temp = context.get(DEVICE_CLASS_TEMPERATURE), msg.get("temp")
if entity_id:
async_dispatcher_send(hass, f"konnected.{entity_id}.update", temp)
@HANDLERS.register("humi")
async def async_handle_humi_update(hass, context, msg):
"""Handle a humidity sensor state update."""
_LOGGER.debug("[humi handler] context: %s msg: %s", context, msg)
entity_id, humi = context.get(DEVICE_CLASS_HUMIDITY), msg.get("humi")
if entity_id:
async_dispatcher_send(hass, f"konnected.{entity_id}.update", humi)
@HANDLERS.register("addr")
async def async_handle_addr_update(hass, context, msg):
"""Handle an addressable sensor update."""
_LOGGER.debug("[addr handler] context: %s msg: %s", context, msg)
addr, temp = msg.get("addr"), msg.get("temp")
entity_id = context.get(addr)
if entity_id:
async_dispatcher_send(hass, f"konnected.{entity_id}.update", temp)
else:
msg["device_id"] = context.get("device_id")
msg["temperature"] = temp
msg["addr"] = addr
async_dispatcher_send(hass, SIGNAL_DS18B20_NEW, msg)
|
import posixpath
from absl import flags
from perfkitbenchmarker.linux_packages import cuda_toolkit
flags.DEFINE_string('mx_version', '1.4.0', 'mxnet pip package version')
FLAGS = flags.FLAGS
def GetEnvironmentVars(vm):
"""Return a string containing MXNet-related environment variables.
Args:
vm: vm to get environment varibles
Returns:
string of environment variables
"""
output, _ = vm.RemoteCommand('getconf LONG_BIT', should_log=True)
long_bit = output.strip()
lib_name = 'lib' if long_bit == '32' else 'lib64'
return ' '.join([
'PATH=%s${PATH:+:${PATH}}' %
posixpath.join(cuda_toolkit.CUDA_HOME, 'bin'),
'CUDA_HOME=%s' % cuda_toolkit.CUDA_HOME,
'LD_LIBRARY_PATH=%s${LD_LIBRARY_PATH:+:${LD_LIBRARY_PATH}}' %
posixpath.join(cuda_toolkit.CUDA_HOME, lib_name),
])
def GetMXNetVersion(vm):
"""Returns the version of MXNet installed on the vm.
Args:
vm: the target vm on which to check the MXNet version
Returns:
installed python MXNet version as a string
"""
stdout, _ = vm.RemoteCommand(
('echo -e "import mxnet\nprint(mxnet.__version__)" | {0} python'
.format(GetEnvironmentVars(vm)))
)
return stdout.strip()
def Install(vm):
"""Installs MXNet on the VM."""
vm.Install('pip')
vm.InstallPackages('libatlas-base-dev')
if FLAGS.mx_device == 'gpu':
vm.Install('cuda_toolkit')
if FLAGS.cuda_toolkit_version == '8.0':
vm.RemoteCommand('sudo pip install mxnet-cu80=={}'.format(
FLAGS.mx_version), should_log=True)
elif FLAGS.cuda_toolkit_version == '9.0':
vm.RemoteCommand('sudo pip install mxnet-cu90=={}'.format(
FLAGS.mx_version), should_log=True)
elif FLAGS.cuda_toolkit_version == '10.0':
vm.RemoteCommand('sudo pip install mxnet-cu100=={}'.format(
FLAGS.mx_version), should_log=True)
elif FLAGS.cuda_toolkit_version == '10.1':
vm.RemoteCommand('sudo pip install mxnet-cu101=={}'.format(
FLAGS.mx_version), should_log=True)
elif FLAGS.cuda_toolkit_version == '10.2':
vm.RemoteCommand('sudo pip install mxnet-cu102=={}'.format(
FLAGS.mx_version), should_log=True)
else:
raise cuda_toolkit.UnsupportedCudaVersionError()
elif FLAGS.mx_device == 'cpu':
vm.RemoteCommand('sudo pip install mxnet=={}'.format(
FLAGS.mx_version), should_log=True)
def Uninstall(vm):
"""Uninstalls MXNet on the VM."""
vm.RemoteCommand('sudo pip uninstall mxnet', should_log=True)
|
import json
from homeassistant.components.nut.const import DOMAIN
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_RESOURCES
from homeassistant.core import HomeAssistant
from tests.async_mock import MagicMock, patch
from tests.common import MockConfigEntry, load_fixture
def _get_mock_pynutclient(list_vars=None, list_ups=None):
pynutclient = MagicMock()
type(pynutclient).list_ups = MagicMock(return_value=list_ups)
type(pynutclient).list_vars = MagicMock(return_value=list_vars)
return pynutclient
async def async_init_integration(
hass: HomeAssistant, ups_fixture: str, resources: list
) -> MockConfigEntry:
"""Set up the nexia integration in Home Assistant."""
ups_fixture = f"nut/{ups_fixture}.json"
list_vars = json.loads(load_fixture(ups_fixture))
mock_pynut = _get_mock_pynutclient(list_ups={"ups1": "UPS 1"}, list_vars=list_vars)
with patch(
"homeassistant.components.nut.PyNUTClient",
return_value=mock_pynut,
):
entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: "mock", CONF_PORT: "mock", CONF_RESOURCES: resources},
)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry
|
from collections import defaultdict
import itertools
from ..exceptions import ParseError
from ..lexer import Token
from ..tree import Tree
from ..grammar import Terminal as T, NonTerminal as NT, Symbol
try:
xrange
except NameError:
xrange = range
def match(t, s):
assert isinstance(t, T)
return t.name == s.type
class Rule(object):
"""Context-free grammar rule."""
def __init__(self, lhs, rhs, weight, alias):
super(Rule, self).__init__()
assert isinstance(lhs, NT), lhs
assert all(isinstance(x, NT) or isinstance(x, T) for x in rhs), rhs
self.lhs = lhs
self.rhs = rhs
self.weight = weight
self.alias = alias
def __str__(self):
return '%s -> %s' % (str(self.lhs), ' '.join(str(x) for x in self.rhs))
def __repr__(self):
return str(self)
def __hash__(self):
return hash((self.lhs, tuple(self.rhs)))
def __eq__(self, other):
return self.lhs == other.lhs and self.rhs == other.rhs
def __ne__(self, other):
return not (self == other)
class Grammar(object):
"""Context-free grammar."""
def __init__(self, rules):
self.rules = frozenset(rules)
def __eq__(self, other):
return self.rules == other.rules
def __str__(self):
return '\n' + '\n'.join(sorted(repr(x) for x in self.rules)) + '\n'
def __repr__(self):
return str(self)
# Parse tree data structures
class RuleNode(object):
"""A node in the parse tree, which also contains the full rhs rule."""
def __init__(self, rule, children, weight=0):
self.rule = rule
self.children = children
self.weight = weight
def __repr__(self):
return 'RuleNode(%s, [%s])' % (repr(self.rule.lhs), ', '.join(str(x) for x in self.children))
class Parser(object):
"""Parser wrapper."""
def __init__(self, rules):
super(Parser, self).__init__()
self.orig_rules = {rule: rule for rule in rules}
rules = [self._to_rule(rule) for rule in rules]
self.grammar = to_cnf(Grammar(rules))
def _to_rule(self, lark_rule):
"""Converts a lark rule, (lhs, rhs, callback, options), to a Rule."""
assert isinstance(lark_rule.origin, NT)
assert all(isinstance(x, Symbol) for x in lark_rule.expansion)
return Rule(
lark_rule.origin, lark_rule.expansion,
weight=lark_rule.options.priority if lark_rule.options.priority else 0,
alias=lark_rule)
def parse(self, tokenized, start): # pylint: disable=invalid-name
"""Parses input, which is a list of tokens."""
assert start
start = NT(start)
table, trees = _parse(tokenized, self.grammar)
# Check if the parse succeeded.
if all(r.lhs != start for r in table[(0, len(tokenized) - 1)]):
raise ParseError('Parsing failed.')
parse = trees[(0, len(tokenized) - 1)][start]
return self._to_tree(revert_cnf(parse))
def _to_tree(self, rule_node):
"""Converts a RuleNode parse tree to a lark Tree."""
orig_rule = self.orig_rules[rule_node.rule.alias]
children = []
for child in rule_node.children:
if isinstance(child, RuleNode):
children.append(self._to_tree(child))
else:
assert isinstance(child.name, Token)
children.append(child.name)
t = Tree(orig_rule.origin, children)
t.rule=orig_rule
return t
def print_parse(node, indent=0):
if isinstance(node, RuleNode):
print(' ' * (indent * 2) + str(node.rule.lhs))
for child in node.children:
print_parse(child, indent + 1)
else:
print(' ' * (indent * 2) + str(node.s))
def _parse(s, g):
"""Parses sentence 's' using CNF grammar 'g'."""
# The CYK table. Indexed with a 2-tuple: (start pos, end pos)
table = defaultdict(set)
# Top-level structure is similar to the CYK table. Each cell is a dict from
# rule name to the best (lightest) tree for that rule.
trees = defaultdict(dict)
# Populate base case with existing terminal production rules
for i, w in enumerate(s):
for terminal, rules in g.terminal_rules.items():
if match(terminal, w):
for rule in rules:
table[(i, i)].add(rule)
if (rule.lhs not in trees[(i, i)] or
rule.weight < trees[(i, i)][rule.lhs].weight):
trees[(i, i)][rule.lhs] = RuleNode(rule, [T(w)], weight=rule.weight)
# Iterate over lengths of sub-sentences
for l in xrange(2, len(s) + 1):
# Iterate over sub-sentences with the given length
for i in xrange(len(s) - l + 1):
# Choose partition of the sub-sentence in [1, l)
for p in xrange(i + 1, i + l):
span1 = (i, p - 1)
span2 = (p, i + l - 1)
for r1, r2 in itertools.product(table[span1], table[span2]):
for rule in g.nonterminal_rules.get((r1.lhs, r2.lhs), []):
table[(i, i + l - 1)].add(rule)
r1_tree = trees[span1][r1.lhs]
r2_tree = trees[span2][r2.lhs]
rule_total_weight = rule.weight + r1_tree.weight + r2_tree.weight
if (rule.lhs not in trees[(i, i + l - 1)]
or rule_total_weight < trees[(i, i + l - 1)][rule.lhs].weight):
trees[(i, i + l - 1)][rule.lhs] = RuleNode(rule, [r1_tree, r2_tree], weight=rule_total_weight)
return table, trees
# This section implements context-free grammar converter to Chomsky normal form.
# It also implements a conversion of parse trees from its CNF to the original
# grammar.
# Overview:
# Applies the following operations in this order:
# * TERM: Eliminates non-solitary terminals from all rules
# * BIN: Eliminates rules with more than 2 symbols on their right-hand-side.
# * UNIT: Eliminates non-terminal unit rules
#
# The following grammar characteristics aren't featured:
# * Start symbol appears on RHS
# * Empty rules (epsilon rules)
class CnfWrapper(object):
"""CNF wrapper for grammar.
Validates that the input grammar is CNF and provides helper data structures.
"""
def __init__(self, grammar):
super(CnfWrapper, self).__init__()
self.grammar = grammar
self.rules = grammar.rules
self.terminal_rules = defaultdict(list)
self.nonterminal_rules = defaultdict(list)
for r in self.rules:
# Validate that the grammar is CNF and populate auxiliary data structures.
assert isinstance(r.lhs, NT), r
if len(r.rhs) not in [1, 2]:
raise ParseError("CYK doesn't support empty rules")
if len(r.rhs) == 1 and isinstance(r.rhs[0], T):
self.terminal_rules[r.rhs[0]].append(r)
elif len(r.rhs) == 2 and all(isinstance(x, NT) for x in r.rhs):
self.nonterminal_rules[tuple(r.rhs)].append(r)
else:
assert False, r
def __eq__(self, other):
return self.grammar == other.grammar
def __repr__(self):
return repr(self.grammar)
class UnitSkipRule(Rule):
"""A rule that records NTs that were skipped during transformation."""
def __init__(self, lhs, rhs, skipped_rules, weight, alias):
super(UnitSkipRule, self).__init__(lhs, rhs, weight, alias)
self.skipped_rules = skipped_rules
def __eq__(self, other):
return isinstance(other, type(self)) and self.skipped_rules == other.skipped_rules
__hash__ = Rule.__hash__
def build_unit_skiprule(unit_rule, target_rule):
skipped_rules = []
if isinstance(unit_rule, UnitSkipRule):
skipped_rules += unit_rule.skipped_rules
skipped_rules.append(target_rule)
if isinstance(target_rule, UnitSkipRule):
skipped_rules += target_rule.skipped_rules
return UnitSkipRule(unit_rule.lhs, target_rule.rhs, skipped_rules,
weight=unit_rule.weight + target_rule.weight, alias=unit_rule.alias)
def get_any_nt_unit_rule(g):
"""Returns a non-terminal unit rule from 'g', or None if there is none."""
for rule in g.rules:
if len(rule.rhs) == 1 and isinstance(rule.rhs[0], NT):
return rule
return None
def _remove_unit_rule(g, rule):
"""Removes 'rule' from 'g' without changing the langugage produced by 'g'."""
new_rules = [x for x in g.rules if x != rule]
refs = [x for x in g.rules if x.lhs == rule.rhs[0]]
new_rules += [build_unit_skiprule(rule, ref) for ref in refs]
return Grammar(new_rules)
def _split(rule):
"""Splits a rule whose len(rhs) > 2 into shorter rules."""
rule_str = str(rule.lhs) + '__' + '_'.join(str(x) for x in rule.rhs)
rule_name = '__SP_%s' % (rule_str) + '_%d'
yield Rule(rule.lhs, [rule.rhs[0], NT(rule_name % 1)], weight=rule.weight, alias=rule.alias)
for i in xrange(1, len(rule.rhs) - 2):
yield Rule(NT(rule_name % i), [rule.rhs[i], NT(rule_name % (i + 1))], weight=0, alias='Split')
yield Rule(NT(rule_name % (len(rule.rhs) - 2)), rule.rhs[-2:], weight=0, alias='Split')
def _term(g):
"""Applies the TERM rule on 'g' (see top comment)."""
all_t = {x for rule in g.rules for x in rule.rhs if isinstance(x, T)}
t_rules = {t: Rule(NT('__T_%s' % str(t)), [t], weight=0, alias='Term') for t in all_t}
new_rules = []
for rule in g.rules:
if len(rule.rhs) > 1 and any(isinstance(x, T) for x in rule.rhs):
new_rhs = [t_rules[x].lhs if isinstance(x, T) else x for x in rule.rhs]
new_rules.append(Rule(rule.lhs, new_rhs, weight=rule.weight, alias=rule.alias))
new_rules.extend(v for k, v in t_rules.items() if k in rule.rhs)
else:
new_rules.append(rule)
return Grammar(new_rules)
def _bin(g):
"""Applies the BIN rule to 'g' (see top comment)."""
new_rules = []
for rule in g.rules:
if len(rule.rhs) > 2:
new_rules += _split(rule)
else:
new_rules.append(rule)
return Grammar(new_rules)
def _unit(g):
"""Applies the UNIT rule to 'g' (see top comment)."""
nt_unit_rule = get_any_nt_unit_rule(g)
while nt_unit_rule:
g = _remove_unit_rule(g, nt_unit_rule)
nt_unit_rule = get_any_nt_unit_rule(g)
return g
def to_cnf(g):
"""Creates a CNF grammar from a general context-free grammar 'g'."""
g = _unit(_bin(_term(g)))
return CnfWrapper(g)
def unroll_unit_skiprule(lhs, orig_rhs, skipped_rules, children, weight, alias):
if not skipped_rules:
return RuleNode(Rule(lhs, orig_rhs, weight=weight, alias=alias), children, weight=weight)
else:
weight = weight - skipped_rules[0].weight
return RuleNode(
Rule(lhs, [skipped_rules[0].lhs], weight=weight, alias=alias), [
unroll_unit_skiprule(skipped_rules[0].lhs, orig_rhs,
skipped_rules[1:], children,
skipped_rules[0].weight, skipped_rules[0].alias)
], weight=weight)
def revert_cnf(node):
"""Reverts a parse tree (RuleNode) to its original non-CNF form (Node)."""
if isinstance(node, T):
return node
# Reverts TERM rule.
if node.rule.lhs.name.startswith('__T_'):
return node.children[0]
else:
children = []
for child in map(revert_cnf, node.children):
# Reverts BIN rule.
if isinstance(child, RuleNode) and child.rule.lhs.name.startswith('__SP_'):
children += child.children
else:
children.append(child)
# Reverts UNIT rule.
if isinstance(node.rule, UnitSkipRule):
return unroll_unit_skiprule(node.rule.lhs, node.rule.rhs,
node.rule.skipped_rules, children,
node.rule.weight, node.rule.alias)
else:
return RuleNode(node.rule, children)
|
import numpy as np
from .utils import is_duck_array
integer_types = (int, np.integer)
try:
import dask.array
from dask.base import is_dask_collection
# solely for isinstance checks
dask_array_type = (dask.array.Array,)
def is_duck_dask_array(x):
return is_duck_array(x) and is_dask_collection(x)
except ImportError: # pragma: no cover
dask_array_type = ()
is_duck_dask_array = lambda _: False
is_dask_collection = lambda _: False
try:
# solely for isinstance checks
import sparse
sparse_array_type = (sparse.SparseArray,)
except ImportError: # pragma: no cover
sparse_array_type = ()
try:
# solely for isinstance checks
import cupy
cupy_array_type = (cupy.ndarray,)
except ImportError: # pragma: no cover
cupy_array_type = ()
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import patch
from diamond.collector import Collector
from onewire import OneWireCollector
###############################################################################
class TestOneWireCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('OneWireCollector', {
'owfs': self.getFixturePath('.'),
'scan': {'temperature': 't'},
'id:28.2F702A010000': {'presure': 'p11'}})
self.collector = OneWireCollector(config, None)
def test_import(self):
self.assertTrue(OneWireCollector)
@patch.object(Collector, 'publish')
def test(self, publish_mock):
self.collector.collect()
metrics = {
'28_A76569020000.t': 22.4375,
'28_2F702A010000.p11': 999
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
###############################################################################
if __name__ == "__main__":
unittest.main()
|
from __future__ import absolute_import, print_function
import sys
from unittest import TestCase, main
from lark import Lark
from lark.tree import Tree
from lark.tools import standalone
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
class TestStandalone(TestCase):
def setUp(self):
pass
def _create_standalone(self, grammar, compress=False):
code_buf = StringIO()
standalone.gen_standalone(Lark(grammar, parser='lalr'), out=code_buf, compress=compress)
code = code_buf.getvalue()
context = {'__doc__': None}
exec(code, context)
return context
def test_simple(self):
grammar = """
start: NUMBER WORD
%import common.NUMBER
%import common.WORD
%import common.WS
%ignore WS
"""
context = self._create_standalone(grammar)
_Lark = context['Lark_StandAlone']
l = _Lark()
x = l.parse('12 elephants')
self.assertEqual(x.children, ['12', 'elephants'])
x = l.parse('16 candles')
self.assertEqual(x.children, ['16', 'candles'])
self.assertRaises(context['UnexpectedToken'], l.parse, 'twelve monkeys')
self.assertRaises(context['UnexpectedToken'], l.parse, 'twelve')
self.assertRaises(context['UnexpectedCharacters'], l.parse, '$ talks')
context = self._create_standalone(grammar, compress=True)
_Lark = context['Lark_StandAlone']
l = _Lark()
x = l.parse('12 elephants')
def test_contextual(self):
grammar = """
start: a b
a: "A" "B"
b: "AB"
"""
context = self._create_standalone(grammar)
_Lark = context['Lark_StandAlone']
l = _Lark()
x = l.parse('ABAB')
class T(context['Transformer']):
def a(self, items):
return 'a'
def b(self, items):
return 'b'
start = list
x = T().transform(x)
self.assertEqual(x, ['a', 'b'])
l2 = _Lark(transformer=T())
x = l2.parse('ABAB')
self.assertEqual(x, ['a', 'b'])
def test_postlex(self):
from lark.indenter import Indenter
class MyIndenter(Indenter):
NL_type = '_NEWLINE'
OPEN_PAREN_types = ['LPAR', 'LSQB', 'LBRACE']
CLOSE_PAREN_types = ['RPAR', 'RSQB', 'RBRACE']
INDENT_type = '_INDENT'
DEDENT_type = '_DEDENT'
tab_len = 8
grammar = r"""
start: "(" ")" _NEWLINE
_NEWLINE: /\n/
"""
context = self._create_standalone(grammar)
_Lark = context['Lark_StandAlone']
l = _Lark(postlex=MyIndenter())
x = l.parse('()\n')
self.assertEqual(x, Tree('start', []))
l = _Lark(postlex=MyIndenter())
x = l.parse('(\n)\n')
self.assertEqual(x, Tree('start', []))
def test_transformer(self):
grammar = r"""
start: some_rule "(" SOME_TERMINAL ")"
some_rule: SOME_TERMINAL
SOME_TERMINAL: /[A-Za-z_][A-Za-z0-9_]*/
"""
context = self._create_standalone(grammar)
_Lark = context["Lark_StandAlone"]
_Token = context["Token"]
_Tree = context["Tree"]
class MyTransformer(context["Transformer"]):
def SOME_TERMINAL(self, token):
return _Token("SOME_TERMINAL", "token is transformed")
def some_rule(self, children):
return _Tree("rule_is_transformed", [])
parser = _Lark(transformer=MyTransformer())
self.assertEqual(
parser.parse("FOO(BAR)"),
_Tree("start", [
_Tree("rule_is_transformed", []),
_Token("SOME_TERMINAL", "token is transformed")
])
)
if __name__ == '__main__':
main()
|
from filterpy.discrete_bayes import predict, update, normalize
from numpy.random import randn, randint
import numpy as np
def _predict(distribution, offset, kernel):
""" explicit convolution with wraparound"""
N = len(distribution)
kN = len(kernel)
width = int((kN - 1) / 2)
prior = np.zeros(N)
for i in range(N):
for k in range (kN):
index = (i + (width-k) - offset) % N
prior[i] += distribution[index] * kernel[k]
return prior
def test_predictions():
s = 0.
for k in range(3, 22, 2): # different kernel sizes
for _ in range(1000):
a = randn(100)
kernel = normalize(randn(k))
move = randint(1, 200)
s += sum(predict(a, move, kernel) - _predict(a, move, kernel))
assert s < 1.e-8, "sum of difference = {}".format(s)
|
import pytest
from qutebrowser.misc import objects
from qutebrowser.commands import runners, cmdexc
class TestCommandParser:
def test_parse_all(self, cmdline_test):
"""Test parsing of commands.
See https://github.com/qutebrowser/qutebrowser/issues/615
Args:
cmdline_test: A pytest fixture which provides testcases.
"""
parser = runners.CommandParser()
if cmdline_test.valid:
parser.parse_all(cmdline_test.cmd, aliases=False)
else:
with pytest.raises(cmdexc.NoSuchCommandError):
parser.parse_all(cmdline_test.cmd, aliases=False)
def test_parse_all_with_alias(self, cmdline_test, monkeypatch,
config_stub):
if not cmdline_test.cmd:
pytest.skip("Empty command")
config_stub.val.aliases = {'alias_name': cmdline_test.cmd}
parser = runners.CommandParser()
if cmdline_test.valid:
assert len(parser.parse_all("alias_name")) > 0
else:
with pytest.raises(cmdexc.NoSuchCommandError):
parser.parse_all("alias_name")
@pytest.mark.parametrize('command', ['', ' '])
def test_parse_empty_with_alias(self, command):
"""An empty command should not crash.
See https://github.com/qutebrowser/qutebrowser/issues/1690
and https://github.com/qutebrowser/qutebrowser/issues/1773
"""
parser = runners.CommandParser()
with pytest.raises(cmdexc.NoSuchCommandError):
parser.parse_all(command)
class TestCompletions:
"""Tests for completions.use_best_match."""
@pytest.fixture(autouse=True)
def cmdutils_stub(self, monkeypatch, stubs):
"""Patch the cmdutils module to provide fake commands."""
monkeypatch.setattr(objects, 'commands', {
'one': stubs.FakeCommand(name='one'),
'two': stubs.FakeCommand(name='two'),
'two-foo': stubs.FakeCommand(name='two-foo'),
})
def test_partial_parsing(self, config_stub):
"""Test partial parsing with a runner where it's enabled.
The same with it being disabled is tested by test_parse_all.
"""
parser = runners.CommandParser(partial_match=True)
result = parser.parse('on')
assert result.cmd.name == 'one'
def test_dont_use_best_match(self, config_stub):
"""Test multiple completion options with use_best_match set to false.
Should raise NoSuchCommandError
"""
config_stub.val.completion.use_best_match = False
parser = runners.CommandParser(partial_match=True)
with pytest.raises(cmdexc.NoSuchCommandError):
parser.parse('tw')
def test_use_best_match(self, config_stub):
"""Test multiple completion options with use_best_match set to true.
The resulting command should be the best match
"""
config_stub.val.completion.use_best_match = True
parser = runners.CommandParser(partial_match=True)
result = parser.parse('tw')
assert result.cmd.name == 'two'
|
import logging
from pymediaroom import (
COMMANDS,
PyMediaroomError,
Remote,
State,
install_mediaroom_protocol,
)
import voluptuous as vol
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
MEDIA_TYPE_CHANNEL,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_STOP,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_STEP,
)
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_OPTIMISTIC,
CONF_TIMEOUT,
EVENT_HOMEASSISTANT_STOP,
STATE_OFF,
STATE_PAUSED,
STATE_PLAYING,
STATE_STANDBY,
STATE_UNAVAILABLE,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_connect, dispatcher_send
_LOGGER = logging.getLogger(__name__)
DATA_MEDIAROOM = "mediaroom_known_stb"
DEFAULT_NAME = "Mediaroom STB"
DEFAULT_TIMEOUT = 9
DISCOVERY_MEDIAROOM = "mediaroom_discovery_installed"
MEDIA_TYPE_MEDIAROOM = "mediaroom"
SIGNAL_STB_NOTIFY = "mediaroom_stb_discovered"
SUPPORT_MEDIAROOM = (
SUPPORT_PAUSE
| SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_VOLUME_STEP
| SUPPORT_VOLUME_MUTE
| SUPPORT_PLAY_MEDIA
| SUPPORT_STOP
| SUPPORT_NEXT_TRACK
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_PLAY
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_OPTIMISTIC, default=False): cv.boolean,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Mediaroom platform."""
known_hosts = hass.data.get(DATA_MEDIAROOM)
if known_hosts is None:
known_hosts = hass.data[DATA_MEDIAROOM] = []
host = config.get(CONF_HOST)
if host:
async_add_entities(
[
MediaroomDevice(
host=host,
device_id=None,
optimistic=config[CONF_OPTIMISTIC],
timeout=config[CONF_TIMEOUT],
)
]
)
hass.data[DATA_MEDIAROOM].append(host)
_LOGGER.debug("Trying to discover Mediaroom STB")
def callback_notify(notify):
"""Process NOTIFY message from STB."""
if notify.ip_address in hass.data[DATA_MEDIAROOM]:
dispatcher_send(hass, SIGNAL_STB_NOTIFY, notify)
return
_LOGGER.debug("Discovered new stb %s", notify.ip_address)
hass.data[DATA_MEDIAROOM].append(notify.ip_address)
new_stb = MediaroomDevice(
host=notify.ip_address, device_id=notify.device_uuid, optimistic=False
)
async_add_entities([new_stb])
if not config[CONF_OPTIMISTIC]:
already_installed = hass.data.get(DISCOVERY_MEDIAROOM)
if not already_installed:
hass.data[DISCOVERY_MEDIAROOM] = await install_mediaroom_protocol(
responses_callback=callback_notify
)
@callback
def stop_discovery(event):
"""Stop discovery of new mediaroom STB's."""
_LOGGER.debug("Stopping internal pymediaroom discovery")
hass.data[DISCOVERY_MEDIAROOM].close()
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_discovery)
_LOGGER.debug("Auto discovery installed")
class MediaroomDevice(MediaPlayerEntity):
"""Representation of a Mediaroom set-up-box on the network."""
def set_state(self, mediaroom_state):
"""Map pymediaroom state to HA state."""
state_map = {
State.OFF: STATE_OFF,
State.STANDBY: STATE_STANDBY,
State.PLAYING_LIVE_TV: STATE_PLAYING,
State.PLAYING_RECORDED_TV: STATE_PLAYING,
State.PLAYING_TIMESHIFT_TV: STATE_PLAYING,
State.STOPPED: STATE_PAUSED,
State.UNKNOWN: STATE_UNAVAILABLE,
}
self._state = state_map[mediaroom_state]
def __init__(self, host, device_id, optimistic=False, timeout=DEFAULT_TIMEOUT):
"""Initialize the device."""
self.host = host
self.stb = Remote(host)
_LOGGER.info(
"Found STB at %s%s", host, " - I'm optimistic" if optimistic else ""
)
self._channel = None
self._optimistic = optimistic
self._state = STATE_PLAYING if optimistic else STATE_STANDBY
self._name = f"Mediaroom {device_id if device_id else host}"
self._available = True
if device_id:
self._unique_id = device_id
else:
self._unique_id = None
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def available(self):
"""Return True if entity is available."""
return self._available
async def async_added_to_hass(self):
"""Retrieve latest state."""
async def async_notify_received(notify):
"""Process STB state from NOTIFY message."""
stb_state = self.stb.notify_callback(notify)
# stb_state is None in case the notify is not from the current stb
if not stb_state:
return
self.set_state(stb_state)
_LOGGER.debug("STB(%s) is [%s]", self.host, self._state)
self._available = True
self.async_write_ha_state()
self.async_on_remove(
async_dispatcher_connect(
self.hass, SIGNAL_STB_NOTIFY, async_notify_received
)
)
async def async_play_media(self, media_type, media_id, **kwargs):
"""Play media."""
_LOGGER.debug(
"STB(%s) Play media: %s (%s)", self.stb.stb_ip, media_id, media_type
)
if media_type == MEDIA_TYPE_CHANNEL:
if not media_id.isdigit():
_LOGGER.error("Invalid media_id %s: Must be a channel number", media_id)
return
media_id = int(media_id)
elif media_type == MEDIA_TYPE_MEDIAROOM:
if media_id not in COMMANDS:
_LOGGER.error("Invalid media_id %s: Must be a command", media_id)
return
else:
_LOGGER.error("Invalid media type %s", media_type)
return
try:
await self.stb.send_cmd(media_id)
if self._optimistic:
self._state = STATE_PLAYING
self._available = True
except PyMediaroomError:
self._available = False
self.async_write_ha_state()
@property
def unique_id(self):
"""Return a unique ID."""
return self._unique_id
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def supported_features(self):
"""Flag media player features that are supported."""
return SUPPORT_MEDIAROOM
@property
def media_content_type(self):
"""Return the content type of current playing media."""
return MEDIA_TYPE_CHANNEL
@property
def media_channel(self):
"""Channel currently playing."""
return self._channel
async def async_turn_on(self):
"""Turn on the receiver."""
try:
self.set_state(await self.stb.turn_on())
if self._optimistic:
self._state = STATE_PLAYING
self._available = True
except PyMediaroomError:
self._available = False
self.async_write_ha_state()
async def async_turn_off(self):
"""Turn off the receiver."""
try:
self.set_state(await self.stb.turn_off())
if self._optimistic:
self._state = STATE_STANDBY
self._available = True
except PyMediaroomError:
self._available = False
self.async_write_ha_state()
async def async_media_play(self):
"""Send play command."""
try:
_LOGGER.debug("media_play()")
await self.stb.send_cmd("PlayPause")
if self._optimistic:
self._state = STATE_PLAYING
self._available = True
except PyMediaroomError:
self._available = False
self.async_write_ha_state()
async def async_media_pause(self):
"""Send pause command."""
try:
await self.stb.send_cmd("PlayPause")
if self._optimistic:
self._state = STATE_PAUSED
self._available = True
except PyMediaroomError:
self._available = False
self.async_write_ha_state()
async def async_media_stop(self):
"""Send stop command."""
try:
await self.stb.send_cmd("Stop")
if self._optimistic:
self._state = STATE_PAUSED
self._available = True
except PyMediaroomError:
self._available = False
self.async_write_ha_state()
async def async_media_previous_track(self):
"""Send Program Down command."""
try:
await self.stb.send_cmd("ProgDown")
if self._optimistic:
self._state = STATE_PLAYING
self._available = True
except PyMediaroomError:
self._available = False
self.async_write_ha_state()
async def async_media_next_track(self):
"""Send Program Up command."""
try:
await self.stb.send_cmd("ProgUp")
if self._optimistic:
self._state = STATE_PLAYING
self._available = True
except PyMediaroomError:
self._available = False
self.async_write_ha_state()
async def async_volume_up(self):
"""Send volume up command."""
try:
await self.stb.send_cmd("VolUp")
self._available = True
except PyMediaroomError:
self._available = False
self.async_write_ha_state()
async def async_volume_down(self):
"""Send volume up command."""
try:
await self.stb.send_cmd("VolDown")
except PyMediaroomError:
self._available = False
self.async_write_ha_state()
async def async_mute_volume(self, mute):
"""Send mute command."""
try:
await self.stb.send_cmd("Mute")
except PyMediaroomError:
self._available = False
self.async_write_ha_state()
|
from collections import namedtuple
import logging
from tellcore import telldus
import tellcore.constants as tellcore_constants
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_ID,
CONF_NAME,
CONF_PROTOCOL,
PERCENTAGE,
TEMP_CELSIUS,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
DatatypeDescription = namedtuple("DatatypeDescription", ["name", "unit"])
CONF_DATATYPE_MASK = "datatype_mask"
CONF_ONLY_NAMED = "only_named"
CONF_TEMPERATURE_SCALE = "temperature_scale"
CONF_MODEL = "model"
DEFAULT_DATATYPE_MASK = 127
DEFAULT_TEMPERATURE_SCALE = TEMP_CELSIUS
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(
CONF_TEMPERATURE_SCALE, default=DEFAULT_TEMPERATURE_SCALE
): cv.string,
vol.Optional(
CONF_DATATYPE_MASK, default=DEFAULT_DATATYPE_MASK
): cv.positive_int,
vol.Optional(CONF_ONLY_NAMED, default=[]): vol.All(
cv.ensure_list,
[
vol.Schema(
{
vol.Required(CONF_ID): cv.positive_int,
vol.Required(CONF_NAME): cv.string,
vol.Optional(CONF_PROTOCOL): cv.string,
vol.Optional(CONF_MODEL): cv.string,
}
)
],
),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Tellstick sensors."""
sensor_value_descriptions = {
tellcore_constants.TELLSTICK_TEMPERATURE: DatatypeDescription(
"temperature", config.get(CONF_TEMPERATURE_SCALE)
),
tellcore_constants.TELLSTICK_HUMIDITY: DatatypeDescription(
"humidity", PERCENTAGE
),
tellcore_constants.TELLSTICK_RAINRATE: DatatypeDescription("rain rate", ""),
tellcore_constants.TELLSTICK_RAINTOTAL: DatatypeDescription("rain total", ""),
tellcore_constants.TELLSTICK_WINDDIRECTION: DatatypeDescription(
"wind direction", ""
),
tellcore_constants.TELLSTICK_WINDAVERAGE: DatatypeDescription(
"wind average", ""
),
tellcore_constants.TELLSTICK_WINDGUST: DatatypeDescription("wind gust", ""),
}
try:
tellcore_lib = telldus.TelldusCore()
except OSError:
_LOGGER.exception("Could not initialize Tellstick")
return
sensors = []
datatype_mask = config.get(CONF_DATATYPE_MASK)
if config[CONF_ONLY_NAMED]:
named_sensors = {}
for named_sensor in config[CONF_ONLY_NAMED]:
name = named_sensor[CONF_NAME]
proto = named_sensor.get(CONF_PROTOCOL)
model = named_sensor.get(CONF_MODEL)
id_ = named_sensor[CONF_ID]
if proto is not None:
if model is not None:
named_sensors[f"{proto}{model}{id_}"] = name
else:
named_sensors[f"{proto}{id_}"] = name
else:
named_sensors[id_] = name
for tellcore_sensor in tellcore_lib.sensors():
if not config[CONF_ONLY_NAMED]:
sensor_name = str(tellcore_sensor.id)
else:
proto_id = f"{tellcore_sensor.protocol}{tellcore_sensor.id}"
proto_model_id = "{}{}{}".format(
tellcore_sensor.protocol, tellcore_sensor.model, tellcore_sensor.id
)
if tellcore_sensor.id in named_sensors:
sensor_name = named_sensors[tellcore_sensor.id]
elif proto_id in named_sensors:
sensor_name = named_sensors[proto_id]
elif proto_model_id in named_sensors:
sensor_name = named_sensors[proto_model_id]
else:
continue
for datatype in sensor_value_descriptions:
if datatype & datatype_mask and tellcore_sensor.has_value(datatype):
sensor_info = sensor_value_descriptions[datatype]
sensors.append(
TellstickSensor(sensor_name, tellcore_sensor, datatype, sensor_info)
)
add_entities(sensors)
class TellstickSensor(Entity):
"""Representation of a Tellstick sensor."""
def __init__(self, name, tellcore_sensor, datatype, sensor_info):
"""Initialize the sensor."""
self._datatype = datatype
self._tellcore_sensor = tellcore_sensor
self._unit_of_measurement = sensor_info.unit or None
self._value = None
self._name = f"{name} {sensor_info.name}"
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._value
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
def update(self):
"""Update tellstick sensor."""
self._value = self._tellcore_sensor.value(self._datatype).value
|
from collections import Counter
from flashtext import KeywordProcessor
import scattertext as st
class FlashTextExtact(st.FeatsFromSpacyDoc):
'''
'''
def set_keyword_processor(self, keyword_processor):
'''
:param keyword_processor: set, phrases to look for
:return: self
'''
self.keyword_processor_ = keyword_processor
return self
def get_feats(self, doc):
'''
Parameters
----------
doc, Spacy Doc
Returns
-------
Counter noun chunk -> count
'''
return Counter(self.keyword_processor_.extract_keywords(str(doc)))
keyword_processor = KeywordProcessor(case_sensitive=False)
for phrase in ['the president', 'presidents', 'presidential', 'barack obama', 'mitt romney', 'george bush',
'george w. bush', 'bill clinton', 'ronald regan', 'obama', 'romney',
'barack', 'mitt', 'bush', 'clinton', 'reagan', 'mr. president', 'united states of america']:
keyword_processor.add_keyword(phrase)
feature_extractor = FlashTextExtact().set_keyword_processor(keyword_processor)
convention_df = st.SampleCorpora.ConventionData2012.get_data()
convention_df['parse'] = convention_df['text'].apply(st.whitespace_nlp_with_sentences)
corpus = (st.CorpusFromPandas(convention_df,
category_col='party',
text_col='text',
nlp=st.whitespace_nlp_with_sentences,
feats_from_spacy_doc=feature_extractor)
.build())
print(corpus.get_term_freq_df())
html = st.produce_scattertext_explorer(
corpus,
category='democrat',
category_name='Democratic',
not_category_name='Republican',
metadata=convention_df['speaker'],
term_scorer=st.RankDifference(),
transform=st.Scalers.dense_rank,
pmi_threshold_coefficient=0,
minimum_term_frequency=0,
minimum_not_category_term_frequency=0,
use_full_doc=True
)
file_name = 'demo_specific_phrases.html'
open(file_name, 'wb').write(html.encode('utf-8'))
print('Open %s in Chrome or Firefox.' % file_name)
|
import flask
from docker_registry.core import compat
from docker_registry.core import exceptions
json = compat.json
from . import storage
from . import toolkit
from .lib import mirroring
from .lib import signals
from .app import app # noqa
store = storage.load()
"""Those routes are loaded only when `standalone' is enabled in the config
file. The goal is to make the Registry working without the central Index
It's then possible to push images from Docker without talking to any other
entities. This module mimics the Index.
"""
def generate_headers(namespace, repository, access):
registry_endpoints = toolkit.get_endpoints()
# The token generated will be invalid against a real Index behind.
token = 'Token signature={0},repository="{1}/{2}",access={3}'.format(
toolkit.gen_random_string(), namespace, repository, access)
return {'X-Docker-Endpoints': registry_endpoints,
'WWW-Authenticate': token,
'X-Docker-Token': token}
@app.route('/v1/users', methods=['GET', 'POST'])
@app.route('/v1/users/', methods=['GET', 'POST'])
def get_post_users():
if flask.request.method == 'GET':
return toolkit.response('OK', 200)
try:
# Note(dmp): unicode patch
json.loads(flask.request.data.decode('utf8'))
except ValueError:
return toolkit.api_error('Error Decoding JSON', 400)
return toolkit.response('User Created', 201)
@app.route('/v1/users/<username>/', methods=['PUT'])
def put_username(username):
return toolkit.response('', 204)
def update_index_images(namespace, repository, data_arg):
path = store.index_images_path(namespace, repository)
sender = flask.current_app._get_current_object()
try:
images = {}
# Note(dmp): unicode patch
data = json.loads(data_arg.decode('utf8')) + store.get_json(path)
for i in data:
iid = i['id']
if iid in images and 'checksum' in images[iid]:
continue
i_data = {'id': iid}
for key in ['checksum']:
if key in i:
i_data[key] = i[key]
images[iid] = i_data
data = images.values()
# Note(dmp): unicode patch
store.put_json(path, data)
signals.repository_updated.send(
sender, namespace=namespace, repository=repository, value=data)
except exceptions.FileNotFoundError:
signals.repository_created.send(
sender, namespace=namespace, repository=repository,
# Note(dmp): unicode patch
value=json.loads(data_arg.decode('utf8')))
store.put_content(path, data_arg)
@app.route('/v1/repositories/<path:repository>', methods=['PUT'])
@app.route('/v1/repositories/<path:repository>/images',
defaults={'images': True},
methods=['PUT'])
@toolkit.parse_repository_name
@toolkit.requires_auth
def put_repository(namespace, repository, images=False):
data = None
try:
# Note(dmp): unicode patch
data = json.loads(flask.request.data.decode('utf8'))
except ValueError:
return toolkit.api_error('Error Decoding JSON', 400)
if not isinstance(data, list):
return toolkit.api_error('Invalid data')
update_index_images(namespace, repository, flask.request.data)
headers = generate_headers(namespace, repository, 'write')
code = 204 if images is True else 200
return toolkit.response('', code, headers)
@app.route('/v1/repositories/<path:repository>/images', methods=['GET'])
@toolkit.parse_repository_name
@toolkit.requires_auth
@mirroring.source_lookup(index_route=True)
def get_repository_images(namespace, repository):
data = None
try:
path = store.index_images_path(namespace, repository)
data = store.get_content(path)
except exceptions.FileNotFoundError:
return toolkit.api_error('images not found', 404)
headers = generate_headers(namespace, repository, 'read')
return toolkit.response(data, 200, headers, True)
@app.route('/v1/repositories/<path:repository>/images', methods=['DELETE'])
@toolkit.parse_repository_name
@toolkit.requires_auth
def delete_repository_images(namespace, repository):
# Does nothing, this file will be removed when DELETE on repos
headers = generate_headers(namespace, repository, 'delete')
return toolkit.response('', 204, headers)
@app.route('/v1/repositories/<path:repository>/auth', methods=['PUT'])
@toolkit.parse_repository_name
def put_repository_auth(namespace, repository):
return toolkit.response('OK')
|
from copy import deepcopy
from homeassistant.components.deconz.const import (
CONF_ALLOW_CLIP_SENSOR,
DOMAIN as DECONZ_DOMAIN,
)
from homeassistant.components.deconz.gateway import get_gateway_from_config_entry
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.const import (
DEVICE_CLASS_BATTERY,
DEVICE_CLASS_ILLUMINANCE,
DEVICE_CLASS_POWER,
)
from homeassistant.setup import async_setup_component
from .test_gateway import DECONZ_WEB_REQUEST, setup_deconz_integration
SENSORS = {
"1": {
"id": "Light sensor id",
"name": "Light level sensor",
"type": "ZHALightLevel",
"state": {"lightlevel": 30000, "dark": False},
"config": {"on": True, "reachable": True, "temperature": 10},
"uniqueid": "00:00:00:00:00:00:00:00-00",
},
"2": {
"id": "Presence sensor id",
"name": "Presence sensor",
"type": "ZHAPresence",
"state": {"presence": False},
"config": {},
"uniqueid": "00:00:00:00:00:00:00:01-00",
},
"3": {
"id": "Switch 1 id",
"name": "Switch 1",
"type": "ZHASwitch",
"state": {"buttonevent": 1000},
"config": {},
"uniqueid": "00:00:00:00:00:00:00:02-00",
},
"4": {
"id": "Switch 2 id",
"name": "Switch 2",
"type": "ZHASwitch",
"state": {"buttonevent": 1000},
"config": {"battery": 100},
"uniqueid": "00:00:00:00:00:00:00:03-00",
},
"5": {
"id": "Daylight sensor id",
"name": "Daylight sensor",
"type": "Daylight",
"state": {"daylight": True, "status": 130},
"config": {},
"uniqueid": "00:00:00:00:00:00:00:04-00",
},
"6": {
"id": "Power sensor id",
"name": "Power sensor",
"type": "ZHAPower",
"state": {"current": 2, "power": 6, "voltage": 3},
"config": {"reachable": True},
"uniqueid": "00:00:00:00:00:00:00:05-00",
},
"7": {
"id": "Consumption id",
"name": "Consumption sensor",
"type": "ZHAConsumption",
"state": {"consumption": 2, "power": 6},
"config": {"reachable": True},
"uniqueid": "00:00:00:00:00:00:00:06-00",
},
"8": {
"id": "CLIP light sensor id",
"name": "CLIP light level sensor",
"type": "CLIPLightLevel",
"state": {"lightlevel": 30000},
"config": {"reachable": True},
"uniqueid": "00:00:00:00:00:00:00:07-00",
},
}
async def test_platform_manually_configured(hass):
"""Test that we do not discover anything or try to set up a gateway."""
assert (
await async_setup_component(
hass, SENSOR_DOMAIN, {"sensor": {"platform": DECONZ_DOMAIN}}
)
is True
)
assert DECONZ_DOMAIN not in hass.data
async def test_no_sensors(hass):
"""Test that no sensors in deconz results in no sensor entities."""
await setup_deconz_integration(hass)
assert len(hass.states.async_all()) == 0
async def test_sensors(hass):
"""Test successful creation of sensor entities."""
data = deepcopy(DECONZ_WEB_REQUEST)
data["sensors"] = deepcopy(SENSORS)
config_entry = await setup_deconz_integration(hass, get_state_response=data)
gateway = get_gateway_from_config_entry(hass, config_entry)
assert len(hass.states.async_all()) == 5
light_level_sensor = hass.states.get("sensor.light_level_sensor")
assert light_level_sensor.state == "999.8"
assert light_level_sensor.attributes["device_class"] == DEVICE_CLASS_ILLUMINANCE
assert hass.states.get("sensor.presence_sensor") is None
assert hass.states.get("sensor.switch_1") is None
assert hass.states.get("sensor.switch_1_battery_level") is None
assert hass.states.get("sensor.switch_2") is None
switch_2_battery_level = hass.states.get("sensor.switch_2_battery_level")
assert switch_2_battery_level.state == "100"
assert switch_2_battery_level.attributes["device_class"] == DEVICE_CLASS_BATTERY
assert hass.states.get("sensor.daylight_sensor") is None
power_sensor = hass.states.get("sensor.power_sensor")
assert power_sensor.state == "6"
assert power_sensor.attributes["device_class"] == DEVICE_CLASS_POWER
consumption_sensor = hass.states.get("sensor.consumption_sensor")
assert consumption_sensor.state == "0.002"
assert "device_class" not in consumption_sensor.attributes
assert hass.states.get("sensor.clip_light_level_sensor") is None
# Event signals new light level
state_changed_event = {
"t": "event",
"e": "changed",
"r": "sensors",
"id": "1",
"state": {"lightlevel": 2000},
}
gateway.api.event_handler(state_changed_event)
assert hass.states.get("sensor.light_level_sensor").state == "1.6"
# Event signals new battery level
state_changed_event = {
"t": "event",
"e": "changed",
"r": "sensors",
"id": "4",
"config": {"battery": 75},
}
gateway.api.event_handler(state_changed_event)
await hass.async_block_till_done()
assert hass.states.get("sensor.switch_2_battery_level").state == "75"
await hass.config_entries.async_unload(config_entry.entry_id)
assert len(hass.states.async_all()) == 0
async def test_allow_clip_sensors(hass):
"""Test that CLIP sensors can be allowed."""
data = deepcopy(DECONZ_WEB_REQUEST)
data["sensors"] = deepcopy(SENSORS)
config_entry = await setup_deconz_integration(
hass,
options={CONF_ALLOW_CLIP_SENSOR: True},
get_state_response=data,
)
assert len(hass.states.async_all()) == 6
assert hass.states.get("sensor.clip_light_level_sensor").state == "999.8"
# Disallow clip sensors
hass.config_entries.async_update_entry(
config_entry, options={CONF_ALLOW_CLIP_SENSOR: False}
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 5
assert hass.states.get("sensor.clip_light_level_sensor") is None
# Allow clip sensors
hass.config_entries.async_update_entry(
config_entry, options={CONF_ALLOW_CLIP_SENSOR: True}
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 6
assert hass.states.get("sensor.clip_light_level_sensor")
async def test_add_new_sensor(hass):
"""Test that adding a new sensor works."""
config_entry = await setup_deconz_integration(hass)
gateway = get_gateway_from_config_entry(hass, config_entry)
assert len(hass.states.async_all()) == 0
state_added_event = {
"t": "event",
"e": "added",
"r": "sensors",
"id": "1",
"sensor": deepcopy(SENSORS["1"]),
}
gateway.api.event_handler(state_added_event)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
assert hass.states.get("sensor.light_level_sensor").state == "999.8"
async def test_add_battery_later(hass):
"""Test that a sensor without an initial battery state creates a battery sensor once state exist."""
data = deepcopy(DECONZ_WEB_REQUEST)
data["sensors"] = {"1": deepcopy(SENSORS["3"])}
config_entry = await setup_deconz_integration(hass, get_state_response=data)
gateway = get_gateway_from_config_entry(hass, config_entry)
remote = gateway.api.sensors["1"]
assert len(hass.states.async_all()) == 0
assert len(gateway.events) == 1
assert len(remote._callbacks) == 2 # Event and battery tracker
remote.update({"config": {"battery": 50}})
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
assert len(gateway.events) == 1
assert len(remote._callbacks) == 2 # Event and battery entity
assert hass.states.get("sensor.switch_1_battery_level")
|
import logging
from urllib.parse import urlparse
from synology_dsm import SynologyDSM
from synology_dsm.exceptions import (
SynologyDSMException,
SynologyDSMLogin2SAFailedException,
SynologyDSMLogin2SARequiredException,
SynologyDSMLoginInvalidException,
SynologyDSMRequestException,
)
import voluptuous as vol
from homeassistant import config_entries, exceptions
from homeassistant.components import ssdp
from homeassistant.const import (
CONF_DISKS,
CONF_HOST,
CONF_MAC,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_SCAN_INTERVAL,
CONF_SSL,
CONF_TIMEOUT,
CONF_USERNAME,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from .const import (
CONF_VOLUMES,
DEFAULT_PORT,
DEFAULT_PORT_SSL,
DEFAULT_SCAN_INTERVAL,
DEFAULT_SSL,
DEFAULT_TIMEOUT,
)
from .const import DOMAIN # pylint: disable=unused-import
_LOGGER = logging.getLogger(__name__)
CONF_OTP_CODE = "otp_code"
def _discovery_schema_with_defaults(discovery_info):
return vol.Schema(_ordered_shared_schema(discovery_info))
def _user_schema_with_defaults(user_input):
user_schema = {
vol.Required(CONF_HOST, default=user_input.get(CONF_HOST, "")): str,
}
user_schema.update(_ordered_shared_schema(user_input))
return vol.Schema(user_schema)
def _ordered_shared_schema(schema_input):
return {
vol.Required(CONF_USERNAME, default=schema_input.get(CONF_USERNAME, "")): str,
vol.Required(CONF_PASSWORD, default=schema_input.get(CONF_PASSWORD, "")): str,
vol.Optional(CONF_PORT, default=schema_input.get(CONF_PORT, "")): str,
vol.Optional(CONF_SSL, default=schema_input.get(CONF_SSL, DEFAULT_SSL)): bool,
}
class SynologyDSMFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
@staticmethod
@callback
def async_get_options_flow(config_entry):
"""Get the options flow for this handler."""
return SynologyDSMOptionsFlowHandler(config_entry)
def __init__(self):
"""Initialize the synology_dsm config flow."""
self.saved_user_input = {}
self.discovered_conf = {}
async def _show_setup_form(self, user_input=None, errors=None):
"""Show the setup form to the user."""
if not user_input:
user_input = {}
if self.discovered_conf:
user_input.update(self.discovered_conf)
step_id = "link"
data_schema = _discovery_schema_with_defaults(user_input)
else:
step_id = "user"
data_schema = _user_schema_with_defaults(user_input)
return self.async_show_form(
step_id=step_id,
data_schema=data_schema,
errors=errors or {},
description_placeholders=self.discovered_conf or {},
)
async def async_step_user(self, user_input=None):
"""Handle a flow initiated by the user."""
errors = {}
if user_input is None:
return await self._show_setup_form(user_input, None)
if self.discovered_conf:
user_input.update(self.discovered_conf)
host = user_input[CONF_HOST]
port = user_input.get(CONF_PORT)
username = user_input[CONF_USERNAME]
password = user_input[CONF_PASSWORD]
use_ssl = user_input.get(CONF_SSL, DEFAULT_SSL)
otp_code = user_input.get(CONF_OTP_CODE)
if not port:
if use_ssl is True:
port = DEFAULT_PORT_SSL
else:
port = DEFAULT_PORT
api = SynologyDSM(host, port, username, password, use_ssl, timeout=30)
try:
serial = await self.hass.async_add_executor_job(
_login_and_fetch_syno_info, api, otp_code
)
except SynologyDSMLogin2SARequiredException:
return await self.async_step_2sa(user_input)
except SynologyDSMLogin2SAFailedException:
errors[CONF_OTP_CODE] = "otp_failed"
user_input[CONF_OTP_CODE] = None
return await self.async_step_2sa(user_input, errors)
except SynologyDSMLoginInvalidException as ex:
_LOGGER.error(ex)
errors[CONF_USERNAME] = "invalid_auth"
except SynologyDSMRequestException as ex:
_LOGGER.error(ex)
errors[CONF_HOST] = "cannot_connect"
except SynologyDSMException as ex:
_LOGGER.error(ex)
errors["base"] = "unknown"
except InvalidData:
errors["base"] = "missing_data"
if errors:
return await self._show_setup_form(user_input, errors)
# Check if already configured
await self.async_set_unique_id(serial, raise_on_progress=False)
self._abort_if_unique_id_configured()
config_data = {
CONF_HOST: host,
CONF_PORT: port,
CONF_SSL: use_ssl,
CONF_USERNAME: username,
CONF_PASSWORD: password,
CONF_MAC: api.network.macs,
}
if otp_code:
config_data["device_token"] = api.device_token
if user_input.get(CONF_DISKS):
config_data[CONF_DISKS] = user_input[CONF_DISKS]
if user_input.get(CONF_VOLUMES):
config_data[CONF_VOLUMES] = user_input[CONF_VOLUMES]
return self.async_create_entry(title=host, data=config_data)
async def async_step_ssdp(self, discovery_info):
"""Handle a discovered synology_dsm."""
parsed_url = urlparse(discovery_info[ssdp.ATTR_SSDP_LOCATION])
friendly_name = (
discovery_info[ssdp.ATTR_UPNP_FRIENDLY_NAME].split("(", 1)[0].strip()
)
mac = discovery_info[ssdp.ATTR_UPNP_SERIAL].upper()
# Synology NAS can broadcast on multiple IP addresses, since they can be connected to multiple ethernets.
# The serial of the NAS is actually its MAC address.
if self._mac_already_configured(mac):
return self.async_abort(reason="already_configured")
await self.async_set_unique_id(mac)
self._abort_if_unique_id_configured()
self.discovered_conf = {
CONF_NAME: friendly_name,
CONF_HOST: parsed_url.hostname,
}
# pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167
self.context["title_placeholders"] = self.discovered_conf
return await self.async_step_user()
async def async_step_import(self, user_input=None):
"""Import a config entry."""
return await self.async_step_user(user_input)
async def async_step_link(self, user_input):
"""Link a config entry from discovery."""
return await self.async_step_user(user_input)
async def async_step_2sa(self, user_input, errors=None):
"""Enter 2SA code to anthenticate."""
if not self.saved_user_input:
self.saved_user_input = user_input
if not user_input.get(CONF_OTP_CODE):
return self.async_show_form(
step_id="2sa",
data_schema=vol.Schema({vol.Required(CONF_OTP_CODE): str}),
errors=errors or {},
)
user_input = {**self.saved_user_input, **user_input}
self.saved_user_input = {}
return await self.async_step_user(user_input)
def _mac_already_configured(self, mac):
"""See if we already have configured a NAS with this MAC address."""
existing_macs = [
mac.replace("-", "")
for entry in self._async_current_entries()
for mac in entry.data.get(CONF_MAC, [])
]
return mac in existing_macs
class SynologyDSMOptionsFlowHandler(config_entries.OptionsFlow):
"""Handle a option flow."""
def __init__(self, config_entry: config_entries.ConfigEntry):
"""Initialize options flow."""
self.config_entry = config_entry
async def async_step_init(self, user_input=None):
"""Handle options flow."""
if user_input is not None:
return self.async_create_entry(title="", data=user_input)
data_schema = vol.Schema(
{
vol.Optional(
CONF_SCAN_INTERVAL,
default=self.config_entry.options.get(
CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL
),
): cv.positive_int,
vol.Optional(
CONF_TIMEOUT,
default=self.config_entry.options.get(
CONF_TIMEOUT, DEFAULT_TIMEOUT
),
): cv.positive_int,
}
)
return self.async_show_form(step_id="init", data_schema=data_schema)
def _login_and_fetch_syno_info(api, otp_code):
"""Login to the NAS and fetch basic data."""
# These do i/o
api.login(otp_code)
api.utilisation.update()
api.storage.update()
api.network.update()
if (
not api.information.serial
or api.utilisation.cpu_user_load is None
or not api.storage.volumes_ids
or not api.network.macs
):
raise InvalidData
return api.information.serial
class InvalidData(exceptions.HomeAssistantError):
"""Error to indicate we get invalid data from the nas."""
|
import numpy as np
import os
from chainercv.chainer_experimental.datasets.sliceable import GetterDataset
from chainercv.datasets.voc import voc_utils
from chainercv.utils import read_image
from chainercv.utils import read_label
class VOCSemanticSegmentationDataset(GetterDataset):
"""Semantic segmentation dataset for PASCAL `VOC2012`_.
.. _`VOC2012`: http://host.robots.ox.ac.uk/pascal/VOC/voc2012/
Args:
data_dir (string): Path to the root of the training data. If this is
:obj:`auto`, this class will automatically download data for you
under :obj:`$CHAINER_DATASET_ROOT/pfnet/chainercv/voc`.
split ({'train', 'val', 'trainval'}): Select a split of the dataset.
This dataset returns the following data.
.. csv-table::
:header: name, shape, dtype, format
:obj:`img`, ":math:`(3, H, W)`", :obj:`float32`, \
"RGB, :math:`[0, 255]`"
:obj:`label`, ":math:`(H, W)`", :obj:`int32`, \
":math:`[-1, \#class - 1]`"
"""
def __init__(self, data_dir='auto', split='train'):
super(VOCSemanticSegmentationDataset, self).__init__()
if split not in ['train', 'trainval', 'val']:
raise ValueError(
'please pick split from \'train\', \'trainval\', \'val\'')
if data_dir == 'auto':
data_dir = voc_utils.get_voc('2012', split)
id_list_file = os.path.join(
data_dir, 'ImageSets/Segmentation/{0}.txt'.format(split))
self.ids = [id_.strip() for id_ in open(id_list_file)]
self.data_dir = data_dir
self.add_getter('img', self._get_image)
self.add_getter('label', self._get_label)
def __len__(self):
return len(self.ids)
def _get_image(self, i):
img_path = os.path.join(
self.data_dir, 'JPEGImages', self.ids[i] + '.jpg')
img = read_image(img_path, color=True)
return img
def _get_label(self, i):
label_path = os.path.join(
self.data_dir, 'SegmentationClass', self.ids[i] + '.png')
label = read_label(label_path, dtype=np.int32)
label[label == 255] = -1
# (1, H, W) -> (H, W)
return label
|
from datetime import timedelta
import logging
import urllib
from pyW215.pyW215 import SmartPlug
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchEntity
from homeassistant.const import (
ATTR_TEMPERATURE,
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_USERNAME,
TEMP_CELSIUS,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.util import dt as dt_util
_LOGGER = logging.getLogger(__name__)
ATTR_TOTAL_CONSUMPTION = "total_consumption"
CONF_USE_LEGACY_PROTOCOL = "use_legacy_protocol"
DEFAULT_NAME = "D-Link Smart Plug W215"
DEFAULT_PASSWORD = ""
DEFAULT_USERNAME = "admin"
SCAN_INTERVAL = timedelta(minutes=2)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PASSWORD, default=DEFAULT_PASSWORD): cv.string,
vol.Required(CONF_USERNAME, default=DEFAULT_USERNAME): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_USE_LEGACY_PROTOCOL, default=False): cv.boolean,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up a D-Link Smart Plug."""
host = config[CONF_HOST]
username = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
use_legacy_protocol = config[CONF_USE_LEGACY_PROTOCOL]
name = config[CONF_NAME]
smartplug = SmartPlug(host, password, username, use_legacy_protocol)
data = SmartPlugData(smartplug)
add_entities([SmartPlugSwitch(hass, data, name)], True)
class SmartPlugSwitch(SwitchEntity):
"""Representation of a D-Link Smart Plug switch."""
def __init__(self, hass, data, name):
"""Initialize the switch."""
self.units = hass.config.units
self.data = data
self._name = name
@property
def name(self):
"""Return the name of the Smart Plug."""
return self._name
@property
def device_state_attributes(self):
"""Return the state attributes of the device."""
try:
ui_temp = self.units.temperature(int(self.data.temperature), TEMP_CELSIUS)
temperature = ui_temp
except (ValueError, TypeError):
temperature = None
try:
total_consumption = float(self.data.total_consumption)
except (ValueError, TypeError):
total_consumption = None
attrs = {
ATTR_TOTAL_CONSUMPTION: total_consumption,
ATTR_TEMPERATURE: temperature,
}
return attrs
@property
def current_power_w(self):
"""Return the current power usage in Watt."""
try:
return float(self.data.current_consumption)
except (ValueError, TypeError):
return None
@property
def is_on(self):
"""Return true if switch is on."""
return self.data.state == "ON"
def turn_on(self, **kwargs):
"""Turn the switch on."""
self.data.smartplug.state = "ON"
def turn_off(self, **kwargs):
"""Turn the switch off."""
self.data.smartplug.state = "OFF"
def update(self):
"""Get the latest data from the smart plug and updates the states."""
self.data.update()
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self.data.available
class SmartPlugData:
"""Get the latest data from smart plug."""
def __init__(self, smartplug):
"""Initialize the data object."""
self.smartplug = smartplug
self.state = None
self.temperature = None
self.current_consumption = None
self.total_consumption = None
self.available = False
self._n_tried = 0
self._last_tried = None
def update(self):
"""Get the latest data from the smart plug."""
if self._last_tried is not None:
last_try_s = (dt_util.now() - self._last_tried).total_seconds() / 60
retry_seconds = min(self._n_tried * 2, 10) - last_try_s
if self._n_tried > 0 and retry_seconds > 0:
_LOGGER.warning("Waiting %s s to retry", retry_seconds)
return
_state = "unknown"
try:
self._last_tried = dt_util.now()
_state = self.smartplug.state
except urllib.error.HTTPError:
_LOGGER.error("D-Link connection problem")
if _state == "unknown":
self._n_tried += 1
self.available = False
_LOGGER.warning("Failed to connect to D-Link switch")
return
self.state = _state
self.available = True
self.temperature = self.smartplug.temperature
self.current_consumption = self.smartplug.current_consumption
self.total_consumption = self.smartplug.total_consumption
self._n_tried = 0
|
import os
import shutil
import tempfile
from trashcli.list import ListCmd
from .files import (require_empty_dir, make_sticky_dir, make_unsticky_dir)
from .output_collector import OutputCollector
from .fake_trash_dir import (
a_trashinfo_without_date,
a_trashinfo_without_path,
a_trashinfo_with_invalid_date, FakeTrashDir)
from textwrap import dedent
from trashcli.fs import FileSystemReader
from .asserts import assert_equals_with_unidiff
import unittest
class Setup(unittest.TestCase):
def setUp(self):
self.xdg_data_home = tempfile.mkdtemp()
require_empty_dir('topdir')
self.user = TrashListUser(self.xdg_data_home)
def tearDown(self):
shutil.rmtree(self.xdg_data_home)
def sort_lines(lines):
return "".join(sorted(lines.splitlines(True)))
class Test_describe_trash_list(Setup):
def test_should_output_the_help_message(self):
self.user.run_trash_list('--help')
assert_equals_with_unidiff(dedent("""\
Usage: trash-list [OPTIONS...]
List trashed files
Options:
--version show program's version number and exit
-h, --help show this help message and exit
Report bugs to https://github.com/andreafrancia/trash-cli/issues
"""), self.user.output())
def test_should_output_nothing_when_trashcan_is_empty(self):
self.user.run_trash_list()
assert_equals_with_unidiff('', self.user.output())
def test_should_output_deletion_date_and_path(self):
self.user.home_trashdir.add_trashinfo2('/aboslute/path',
'2001-02-03T23:55:59')
self.user.run_trash_list()
assert_equals_with_unidiff("2001-02-03 23:55:59 /aboslute/path\n",
self.user.output())
def test_should_output_info_for_multiple_files(self):
self.user.home_trashdir.add_trashinfo2("/file1", "2000-01-01T00:00:01")
self.user.home_trashdir.add_trashinfo2("/file2", "2000-01-01T00:00:02")
self.user.home_trashdir.add_trashinfo2("/file3", "2000-01-01T00:00:03")
self.user.run_trash_list()
output = self.user.output()
assert_equals_with_unidiff("2000-01-01 00:00:01 /file1\n"
"2000-01-01 00:00:02 /file2\n"
"2000-01-01 00:00:03 /file3\n",
sort_lines(output))
def test_should_output_unknown_dates_with_question_marks(self):
self.user.home_trashdir.add_trashinfo(a_trashinfo_without_date())
self.user.run_trash_list()
assert_equals_with_unidiff("????-??-?? ??:??:?? /path\n",
self.user.output())
def test_should_output_invalid_dates_using_question_marks(self):
self.user.home_trashdir.add_trashinfo(a_trashinfo_with_invalid_date())
self.user.run_trash_list()
assert_equals_with_unidiff("????-??-?? ??:??:?? /path\n",
self.user.output())
def test_should_warn_about_empty_trashinfos(self):
self.user.home_trashdir.add_trashinfo('', 'empty')
self.user.run_trash_list()
assert_equals_with_unidiff(
"Parse Error: %(XDG_DATA_HOME)s/Trash/info/empty.trashinfo: "
"Unable to parse Path.\n" % {"XDG_DATA_HOME":self.xdg_data_home},
self.user.error())
def test_should_warn_about_unreadable_trashinfo(self):
self.user.home_trashdir.add_unreadable_trashinfo('unreadable')
self.user.run_trash_list()
assert_equals_with_unidiff(
"[Errno 13] Permission denied: "
"'%(XDG_DATA_HOME)s/Trash/info/unreadable.trashinfo'\n" % {
'XDG_DATA_HOME': self.xdg_data_home
},
self.user.error())
def test_should_warn_about_unexistent_path_entry(self):
self.user.home_trashdir.add_trashinfo(a_trashinfo_without_path())
self.user.run_trash_list()
assert_equals_with_unidiff(
"Parse Error: %(XDG_DATA_HOME)s/Trash/info/1.trashinfo: "
"Unable to parse Path.\n" % {'XDG_DATA_HOME': self.xdg_data_home},
self.user.error())
assert_equals_with_unidiff('', self.user.output())
class Test_with_a_top_trash_dir(Setup):
def setUp(self):
super(type(self),self).setUp()
self.top_trashdir1 = FakeTrashDir('topdir/.Trash/123')
self.user.set_fake_uid(123)
self.user.add_volume('topdir')
def test_should_list_its_contents_if_parent_is_sticky(self):
make_sticky_dir('topdir/.Trash')
self.and_contains_a_valid_trashinfo()
self.user.run_trash_list()
assert_equals_with_unidiff("2000-01-01 00:00:00 topdir/file1\n",
self.user.output())
def test_and_should_warn_if_parent_is_not_sticky(self):
make_unsticky_dir('topdir/.Trash')
self.and_dir_exists('topdir/.Trash/123')
self.user.run_trash_list()
assert_equals_with_unidiff(
"TrashDir skipped because parent not sticky: topdir/.Trash/123\n",
self.user.error()
)
def test_but_it_should_not_warn_when_the_parent_is_unsticky_but_there_is_no_trashdir(self):
make_unsticky_dir('topdir/.Trash')
self.but_does_not_exists_any('topdir/.Trash/123')
self.user.run_trash_list()
assert_equals_with_unidiff("", self.user.error())
def test_should_ignore_trash_from_a_unsticky_topdir(self):
make_unsticky_dir('topdir/.Trash')
self.and_contains_a_valid_trashinfo()
self.user.run_trash_list()
assert_equals_with_unidiff('', self.user.output())
def test_it_should_ignore_Trash_is_a_symlink(self):
self.when_is_a_symlink_to_a_dir('topdir/.Trash')
self.and_contains_a_valid_trashinfo()
self.user.run_trash_list()
assert_equals_with_unidiff('', self.user.output())
def test_and_should_warn_about_it(self):
self.when_is_a_symlink_to_a_dir('topdir/.Trash')
self.and_contains_a_valid_trashinfo()
self.user.run_trash_list()
assert_equals_with_unidiff(
'TrashDir skipped because parent not sticky: topdir/.Trash/123\n',
self.user.error()
)
def but_does_not_exists_any(self, path):
assert not os.path.exists(path)
def and_dir_exists(self, path):
os.mkdir(path)
assert os.path.isdir(path)
def and_contains_a_valid_trashinfo(self):
self.top_trashdir1.add_trashinfo2('file1', '2000-01-01T00:00:00')
def when_is_a_symlink_to_a_dir(self, path):
dest = "%s-dest" % path
os.mkdir(dest)
rel_dest = os.path.basename(dest)
os.symlink(rel_dest, path)
class Test_describe_when_a_file_is_in_alternate_top_trashdir(Setup):
def test_should_list_contents_of_alternate_trashdir(self):
self.user.set_fake_uid(123)
self.user.add_volume('topdir')
self.top_trashdir2 = FakeTrashDir('topdir/.Trash-123')
self.top_trashdir2.add_trashinfo2('file', '2000-01-01T00:00:00')
self.user.run_trash_list()
assert_equals_with_unidiff("2000-01-01 00:00:00 topdir/file\n",
self.user.output())
class TrashListUser:
def __init__(self, xdg_data_home):
self.stdout = OutputCollector()
self.stderr = OutputCollector()
self.environ = {'XDG_DATA_HOME': xdg_data_home}
self.fake_getuid = self.error
self.volumes = []
trash_dir = os.path.join(xdg_data_home, "Trash")
self.home_trashdir = FakeTrashDir(trash_dir)
def run_trash_list(self, *args):
self.run('trash-list', *args)
def run(self,*argv):
file_reader = FileSystemReader()
file_reader.list_volumes = lambda: self.volumes
ListCmd(
out = self.stdout,
err = self.stderr,
environ = self.environ,
getuid = self.fake_getuid,
file_reader = file_reader,
list_volumes = lambda: self.volumes,
).run(*argv)
def set_fake_uid(self, uid):
self.fake_getuid = lambda: uid
def add_volume(self, mount_point):
self.volumes.append(mount_point)
def error(self):
return self.stderr.getvalue()
def output(self):
return self.stdout.getvalue()
|
from collections import OrderedDict
import voluptuous as vol
from homeassistant import config_entries
from .const import (
CONF_NETWORK_KEY,
CONF_USB_STICK_PATH,
DEFAULT_CONF_USB_STICK_PATH,
DOMAIN,
)
@config_entries.HANDLERS.register(DOMAIN)
class ZwaveFlowHandler(config_entries.ConfigFlow):
"""Handle a Z-Wave config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_PUSH
def __init__(self):
"""Initialize the Z-Wave config flow."""
self.usb_path = CONF_USB_STICK_PATH
async def async_step_user(self, user_input=None):
"""Handle a flow start."""
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
errors = {}
fields = OrderedDict()
fields[
vol.Required(CONF_USB_STICK_PATH, default=DEFAULT_CONF_USB_STICK_PATH)
] = str
fields[vol.Optional(CONF_NETWORK_KEY)] = str
if user_input is not None:
# Check if USB path is valid
from openzwave.object import ZWaveException
from openzwave.option import ZWaveOption
try:
from functools import partial
option = await self.hass.async_add_executor_job( # noqa: F841 pylint: disable=unused-variable
partial(
ZWaveOption,
user_input[CONF_USB_STICK_PATH],
user_path=self.hass.config.config_dir,
)
)
except ZWaveException:
errors["base"] = "option_error"
return self.async_show_form(
step_id="user", data_schema=vol.Schema(fields), errors=errors
)
if user_input.get(CONF_NETWORK_KEY) is None:
# Generate a random key
from random import choice
key = ""
for i in range(16):
key += "0x"
key += choice("1234567890ABCDEF")
key += choice("1234567890ABCDEF")
if i < 15:
key += ", "
user_input[CONF_NETWORK_KEY] = key
return self.async_create_entry(
title="Z-Wave",
data={
CONF_USB_STICK_PATH: user_input[CONF_USB_STICK_PATH],
CONF_NETWORK_KEY: user_input[CONF_NETWORK_KEY],
},
)
return self.async_show_form(step_id="user", data_schema=vol.Schema(fields))
async def async_step_import(self, info):
"""Import existing configuration from Z-Wave."""
if self._async_current_entries():
return self.async_abort(reason="already_setup")
return self.async_create_entry(
title="Z-Wave (import from configuration.yaml)",
data={
CONF_USB_STICK_PATH: info.get(CONF_USB_STICK_PATH),
CONF_NETWORK_KEY: info.get(CONF_NETWORK_KEY),
},
)
|
import pytest
from unittest.mock import Mock
from kombu import Connection, Exchange, Queue
class SimpleBase:
def Queue(self, name, *args, **kwargs):
q = name
if not isinstance(q, Queue):
q = self.__class__.__name__
if name:
q = f'{q}.{name}'
return self._Queue(q, *args, **kwargs)
def _Queue(self, *args, **kwargs):
raise NotImplementedError()
def setup(self):
self.connection = Connection(transport='memory')
self.connection.default_channel.exchange_declare('amq.direct')
self.q = self.Queue(None, no_ack=True)
def teardown(self):
self.q.close()
self.connection.close()
self.connection = None
self.q = None
def test_produce__consume(self):
q = self.Queue('test_produce__consume', no_ack=True)
q.put({'hello': 'Simple'})
assert q.get(timeout=1).payload == {'hello': 'Simple'}
with pytest.raises(q.Empty):
q.get(timeout=0.1)
def test_produce__basic_get(self):
q = self.Queue('test_produce__basic_get', no_ack=True)
q.put({'hello': 'SimpleSync'})
assert q.get_nowait().payload == {'hello': 'SimpleSync'}
with pytest.raises(q.Empty):
q.get_nowait()
q.put({'hello': 'SimpleSync'})
assert q.get(block=False).payload == {'hello': 'SimpleSync'}
with pytest.raises(q.Empty):
q.get(block=False)
def test_clear(self):
q = self.Queue('test_clear', no_ack=True)
for i in range(10):
q.put({'hello': 'SimplePurge%d' % (i,)})
assert q.clear() == 10
def test_enter_exit(self):
q = self.Queue('test_enter_exit')
q.close = Mock()
assert q.__enter__() is q
q.__exit__()
q.close.assert_called_with()
def test_qsize(self):
q = self.Queue('test_clear', no_ack=True)
for i in range(10):
q.put({'hello': 'SimplePurge%d' % (i,)})
assert q.qsize() == 10
assert len(q) == 10
def test_autoclose(self):
channel = self.connection.channel()
q = self.Queue('test_autoclose', no_ack=True, channel=channel)
q.close()
def test_custom_Queue(self):
n = self.__class__.__name__
exchange = Exchange(f'{n}-test.custom.Queue')
queue = Queue(f'{n}-test.custom.Queue',
exchange,
'my.routing.key')
q = self.Queue(queue)
assert q.consumer.queues[0] == queue
q.close()
def test_bool(self):
q = self.Queue('test_nonzero')
assert q
class test_SimpleQueue(SimpleBase):
def _Queue(self, *args, **kwargs):
return self.connection.SimpleQueue(*args, **kwargs)
def test_is_ack(self):
q = self.Queue('test_is_no_ack')
assert not q.no_ack
def test_queue_args(self):
q = self.Queue('test_queue_args', queue_args={'x-queue-mode': 'lazy'})
assert len(q.queue.queue_arguments) == 1
assert q.queue.queue_arguments['x-queue-mode'] == 'lazy'
q = self.Queue('test_queue_args')
assert q.queue.queue_arguments == {}
def test_exchange_opts(self):
q = self.Queue('test_exchange_opts_a',
exchange_opts={'durable': True, 'type': 'fanout',
'delivery_mode': 'persistent'})
assert q.queue.exchange.type == 'fanout'
assert q.queue.exchange.durable
assert not q.queue.exchange.auto_delete
delivery_mode_code = q.queue.exchange.PERSISTENT_DELIVERY_MODE
assert q.queue.exchange.delivery_mode == delivery_mode_code
q = self.Queue('test_exchange_opts_b')
assert q.queue.exchange.type == 'direct'
assert q.queue.exchange.durable
assert not q.queue.exchange.auto_delete
def test_queue_opts(self):
q = self.Queue('test_queue_opts', queue_opts={'auto_delete': False})
assert not q.queue.auto_delete
class test_SimpleBuffer(SimpleBase):
def Queue(self, *args, **kwargs):
return self.connection.SimpleBuffer(*args, **kwargs)
def test_is_no_ack(self):
q = self.Queue('test_is_no_ack')
assert q.no_ack
def test_queue_args(self):
q = self.Queue('test_queue_args', queue_args={'x-queue-mode': 'lazy'})
assert len(q.queue.queue_arguments) == 1
assert q.queue.queue_arguments['x-queue-mode'] == 'lazy'
def test_exchange_opts(self):
q = self.Queue('test_exchange_opts_a',
exchange_opts={'durable': True, 'auto_delete': True,
'delivery_mode': 'persistent'})
assert q.queue.exchange.type == 'direct'
assert q.queue.exchange.durable
assert q.queue.exchange.auto_delete
delivery_mode_code = q.queue.exchange.PERSISTENT_DELIVERY_MODE
assert q.queue.exchange.delivery_mode == delivery_mode_code
q = self.Queue('test_exchange_opts_b')
assert q.queue.exchange.type == 'direct'
assert not q.queue.exchange.durable
assert q.queue.exchange.auto_delete
def test_queue_opts(self):
q = self.Queue('test_queue_opts', queue_opts={'auto_delete': False})
assert not q.queue.durable
assert not q.queue.auto_delete
q = self.Queue('test_queue_opts')
assert not q.queue.durable
assert q.queue.auto_delete
|
from django.conf import settings
from django.contrib.auth import get_user_model, authenticate, login, password_validation
from django.contrib.auth.forms import PasswordResetForm
from django.contrib.sites.shortcuts import get_current_site
from django.core.mail import EmailMultiAlternatives
from django.core.exceptions import ValidationError
from django.forms import widgets, ModelForm
from django.template.loader import get_template, select_template, render_to_string
from django.utils.html import format_html
from django.utils.translation import gettext_lazy as _
from djng.forms import fields, NgModelFormMixin, NgFormValidationMixin
from djng.styling.bootstrap3.forms import Bootstrap3ModelForm
from post_office import mail as post_office_mail
from post_office.models import EmailTemplate
from shop.conf import app_settings
from shop.forms.base import UniqueEmailValidationMixin
from shop.models.customer import CustomerModel
from shop.signals import email_queued
class RegisterUserForm(NgModelFormMixin, NgFormValidationMixin, UniqueEmailValidationMixin, Bootstrap3ModelForm):
form_name = 'register_user_form'
scope_prefix = 'form_data'
field_css_classes = 'input-group has-feedback'
email = fields.EmailField(
label=_("Your e-mail address"),
widget=widgets.EmailInput(attrs={'placeholder': _("E-mail address")})
)
preset_password = fields.BooleanField(
label=_("Preset password"),
widget=widgets.CheckboxInput(attrs={'class': 'form-check-input'}),
required=False,
help_text=_("Send a randomly generated password to your e-mail address."),
)
error_messages = {
'password_mismatch': _("The two password fields didn't match."),
}
password1 = fields.CharField(
label=_("New password"),
widget=widgets.PasswordInput(attrs={'placeholder': _("Password")}),
strip=False,
help_text=password_validation.password_validators_help_text_html(),
)
password2 = fields.CharField(
label=_("New password confirmation"),
strip=False,
widget=widgets.PasswordInput(attrs={'placeholder': _("Password")}),
help_text=format_html('<ul><li>{}</li></ul>', _("Confirm the password.")),
)
class Meta:
model = CustomerModel
fields = ['email', 'password1', 'password2']
def __init__(self, data=None, instance=None, *args, **kwargs):
if data and data.get('preset_password', False):
pwd_length = max(self.base_fields['password1'].min_length or 8, 8)
password = get_user_model().objects.make_random_password(pwd_length)
data['password1'] = data['password2'] = password
super().__init__(data=data, instance=instance, *args, **kwargs)
def clean(self):
cleaned_data = super().clean()
password1 = cleaned_data.get('password1')
password2 = cleaned_data.get('password2')
if password1 and password2:
if password1 != password2:
raise ValidationError(
self.error_messages['password_mismatch'],
code='password_mismatch',
)
password_validation.validate_password(password2)
return cleaned_data
def save(self, request=None, commit=True):
self.instance.user.is_active = True
self.instance.user.email = self.cleaned_data['email']
self.instance.user.set_password(self.cleaned_data['password1'])
self.instance.recognize_as_registered(request, commit=False)
customer = super().save(commit)
password = self.cleaned_data['password1']
if self.cleaned_data['preset_password']:
self._send_password(request, customer.user, password)
user = authenticate(username=customer.user.username, password=password)
login(request, user)
return customer
def _send_password(self, request, user, password):
current_site = get_current_site(request)
context = {
'site_name': current_site.name,
'absolute_base_uri': request.build_absolute_uri('/'),
'email': user.email,
'password': password,
'user': user,
}
subject_template = select_template([
'{}/email/register-user-subject.txt'.format(app_settings.APP_LABEL),
'shop/email/register-user-subject.txt',
])
# Email subject *must not* contain newlines
subject = ''.join(subject_template.render(context).splitlines())
body_text_template = select_template([
'{}/email/register-user-body.txt'.format(app_settings.APP_LABEL),
'shop/email/register-user-body.txt',
])
body_html_template = select_template([
'{}/email/register-user-body.html'.format(app_settings.APP_LABEL),
'shop/email/register-user-body.html',
], using='post_office')
message = body_text_template.render(context)
html_message = body_html_template.render(context)
from_email = getattr(settings, 'DEFAULT_FROM_EMAIL')
user.email_user(subject, message, from_email=from_email, html_message=html_message)
email_queued()
class ContinueAsGuestForm(ModelForm):
"""
Handles Customer's decision to order as guest.
"""
form_name = 'continue_as_guest_form'
scope_prefix = 'form_data'
class Meta:
model = CustomerModel
fields = () # this form doesn't show any fields
def save(self, request=None, commit=True):
self.instance.recognize_as_guest(request, commit=False)
self.instance.user.is_active = app_settings.GUEST_IS_ACTIVE_USER
if self.instance.user.is_active:
# set a usable password, otherwise the user later can not reset its password
password = get_user_model().objects.make_random_password(length=30)
self.instance.user.set_password(password)
return super().save(commit)
class PasswordResetRequestForm(PasswordResetForm):
def send_mail(self, subject_template_name, email_template_name,
context, from_email, to_email, html_email_template_name=None):
try:
email_template = EmailTemplate.objects.get(name='password-reset-inform')
except EmailTemplate.DoesNotExist:
subject = render_to_string(subject_template_name, context)
# Email subject *must not* contain newlines
subject = ''.join(subject.splitlines())
body = render_to_string(email_template_name, context)
email_message = EmailMultiAlternatives(subject, body, from_email, [to_email])
if html_email_template_name:
template = get_template(html_email_template_name, using='post_office')
html = template.render(context)
email_message.attach_alternative(html, 'text/html')
template.attach_related(email_message)
email_message.send()
else:
context['user'] = str(context['user'])
context['uid'] = context['uid'].decode('utf-8')
post_office_mail.send(to_email, template=email_template, context=context, render_on_delivery=True)
email_queued()
|
from datetime import timedelta
import logging
from typing import Any, Callable, Dict, List, Optional
from bsblan import BSBLan, BSBLanError, Info, State
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
ATTR_HVAC_MODE,
ATTR_PRESET_MODE,
HVAC_MODE_AUTO,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
PRESET_ECO,
PRESET_NONE,
SUPPORT_PRESET_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
ATTR_NAME,
ATTR_TEMPERATURE,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.typing import HomeAssistantType
from .const import (
ATTR_IDENTIFIERS,
ATTR_MANUFACTURER,
ATTR_MODEL,
ATTR_TARGET_TEMPERATURE,
DATA_BSBLAN_CLIENT,
DOMAIN,
)
_LOGGER = logging.getLogger(__name__)
PARALLEL_UPDATES = 1
SCAN_INTERVAL = timedelta(seconds=20)
SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE | SUPPORT_PRESET_MODE
HVAC_MODES = [
HVAC_MODE_AUTO,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
]
PRESET_MODES = [
PRESET_ECO,
PRESET_NONE,
]
HA_STATE_TO_BSBLAN = {
HVAC_MODE_AUTO: "1",
HVAC_MODE_HEAT: "3",
HVAC_MODE_OFF: "0",
}
BSBLAN_TO_HA_STATE = {value: key for key, value in HA_STATE_TO_BSBLAN.items()}
HA_PRESET_TO_BSBLAN = {
PRESET_ECO: "2",
}
BSBLAN_TO_HA_PRESET = {
2: PRESET_ECO,
}
async def async_setup_entry(
hass: HomeAssistantType,
entry: ConfigEntry,
async_add_entities: Callable[[List[Entity], bool], None],
) -> None:
"""Set up BSBLan device based on a config entry."""
bsblan: BSBLan = hass.data[DOMAIN][entry.entry_id][DATA_BSBLAN_CLIENT]
info = await bsblan.info()
async_add_entities([BSBLanClimate(entry.entry_id, bsblan, info)], True)
class BSBLanClimate(ClimateEntity):
"""Defines a BSBLan climate device."""
def __init__(
self,
entry_id: str,
bsblan: BSBLan,
info: Info,
):
"""Initialize BSBLan climate device."""
self._current_temperature: Optional[float] = None
self._available = True
self._hvac_mode: Optional[str] = None
self._target_temperature: Optional[float] = None
self._temperature_unit = None
self._preset_mode = None
self._store_hvac_mode = None
self._info: Info = info
self.bsblan = bsblan
@property
def name(self) -> str:
"""Return the name of the entity."""
return self._info.device_identification
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self._available
@property
def unique_id(self) -> str:
"""Return the unique ID for this sensor."""
return self._info.device_identification
@property
def temperature_unit(self) -> str:
"""Return the unit of measurement which this thermostat uses."""
if self._temperature_unit == "°C":
return TEMP_CELSIUS
return TEMP_FAHRENHEIT
@property
def supported_features(self) -> int:
"""Flag supported features."""
return SUPPORT_FLAGS
@property
def current_temperature(self):
"""Return the current temperature."""
return self._current_temperature
@property
def hvac_mode(self):
"""Return the current operation mode."""
return self._hvac_mode
@property
def hvac_modes(self):
"""Return the list of available operation modes."""
return HVAC_MODES
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self._target_temperature
@property
def preset_modes(self):
"""List of available preset modes."""
return PRESET_MODES
@property
def preset_mode(self):
"""Return the preset_mode."""
return self._preset_mode
async def async_set_preset_mode(self, preset_mode):
"""Set preset mode."""
_LOGGER.debug("Setting preset mode to: %s", preset_mode)
if preset_mode == PRESET_NONE:
# restore previous hvac mode
self._hvac_mode = self._store_hvac_mode
else:
# Store hvac mode.
self._store_hvac_mode = self._hvac_mode
await self.async_set_data(preset_mode=preset_mode)
async def async_set_hvac_mode(self, hvac_mode):
"""Set HVAC mode."""
_LOGGER.debug("Setting HVAC mode to: %s", hvac_mode)
# preset should be none when hvac mode is set
self._preset_mode = PRESET_NONE
await self.async_set_data(hvac_mode=hvac_mode)
async def async_set_temperature(self, **kwargs):
"""Set new target temperatures."""
await self.async_set_data(**kwargs)
async def async_set_data(self, **kwargs: Any) -> None:
"""Set device settings using BSBLan."""
data = {}
if ATTR_TEMPERATURE in kwargs:
data[ATTR_TARGET_TEMPERATURE] = kwargs[ATTR_TEMPERATURE]
_LOGGER.debug("Set temperature data = %s", data)
if ATTR_HVAC_MODE in kwargs:
data[ATTR_HVAC_MODE] = HA_STATE_TO_BSBLAN[kwargs[ATTR_HVAC_MODE]]
_LOGGER.debug("Set hvac mode data = %s", data)
if ATTR_PRESET_MODE in kwargs:
# for now we set the preset as hvac_mode as the api expect this
data[ATTR_HVAC_MODE] = HA_PRESET_TO_BSBLAN[kwargs[ATTR_PRESET_MODE]]
try:
await self.bsblan.thermostat(**data)
except BSBLanError:
_LOGGER.error("An error occurred while updating the BSBLan device")
self._available = False
async def async_update(self) -> None:
"""Update BSBlan entity."""
try:
state: State = await self.bsblan.state()
except BSBLanError:
if self._available:
_LOGGER.error("An error occurred while updating the BSBLan device")
self._available = False
return
self._available = True
self._current_temperature = float(state.current_temperature.value)
self._target_temperature = float(state.target_temperature.value)
# check if preset is active else get hvac mode
_LOGGER.debug("state hvac/preset mode: %s", state.hvac_mode.value)
if state.hvac_mode.value == "2":
self._preset_mode = PRESET_ECO
else:
self._hvac_mode = BSBLAN_TO_HA_STATE[state.hvac_mode.value]
self._preset_mode = PRESET_NONE
self._temperature_unit = state.current_temperature.unit
@property
def device_info(self) -> Dict[str, Any]:
"""Return device information about this BSBLan device."""
return {
ATTR_IDENTIFIERS: {(DOMAIN, self._info.device_identification)},
ATTR_NAME: "BSBLan Device",
ATTR_MANUFACTURER: "BSBLan",
ATTR_MODEL: self._info.controller_variant,
}
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from diamond.collector import Collector
from slabinfo import SlabInfoCollector
##########################################################################
class TestSlabInfoCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('SlabInfoCollector', {
'interval': 1
})
self.collector = SlabInfoCollector(config, None)
def test_import(self):
self.assertTrue(SlabInfoCollector)
@patch('__builtin__.open')
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_open_proc_stat(self, publish_mock, open_mock):
open_mock.return_value = StringIO('')
self.collector.collect()
open_mock.assert_called_once_with('/proc/slabinfo', 'r')
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
SlabInfoCollector.PROC = self.getFixturePath('slabinfo')
self.collector.collect()
metrics = self.getPickledResults('expected.pkl')
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
from paasta_tools.frameworks.native_scheduler import MESOS_TASK_SPACER
from paasta_tools.mesos_tools import status_mesos_tasks_verbose
from paasta_tools.utils import calculate_tail_lines
from paasta_tools.utils import compose_job_id
def perform_command(command, service, instance, cluster, verbose, soa_dir):
tail_lines = calculate_tail_lines(verbose_level=verbose)
# We have to add a spacer at the end to make sure we only return
# things for service.main and not service.main_foo
task_id_prefix = "{}{}".format(compose_job_id(service, instance), MESOS_TASK_SPACER)
if command == "status":
print(
status_mesos_tasks_verbose(
job_id=task_id_prefix,
get_short_task_id=lambda x: x,
tail_lines=tail_lines,
)
)
|
import pytest
from decouple import Choices
FRUIT_APPLE = 'apple'
FRUIT_BANANA = 'banana'
FRUIT_COCONUT = 'coconut'
ALLOWED_FRUITS = (
(FRUIT_APPLE, 'Apple'),
(FRUIT_BANANA, 'Banana'),
(FRUIT_COCONUT, 'Coconut'),
)
ZERO = 0
THREE = 3
SEVEN = 7
ALLOWED_NUMBERS = (
(ZERO, 'Zero'),
(THREE, 'Three'),
(SEVEN, 'Seven'),
)
def test_default_cast_with_flat_list():
"""Default cast with a flat list."""
choices = Choices(['a', 'b', 'c'])
assert 'a' == choices('a')
assert 'b' == choices('b')
assert 'c' == choices('c')
with pytest.raises(ValueError):
choices('d')
def test_cast_to_int_with_flat_list():
"""Cast to int with a flat list."""
choices = Choices([3, 5, 7], cast=int)
assert 3 == choices('3')
assert 5 == choices('5')
assert 7 == choices('7')
with pytest.raises(ValueError):
choices(1)
def test_default_with_django_like_choices():
"""Default cast with a Django-like choices tuple."""
choices = Choices(choices=ALLOWED_FRUITS)
assert 'apple' == choices('apple')
assert 'banana' == choices('banana')
assert 'coconut' == choices('coconut')
with pytest.raises(ValueError):
choices('strawberry')
def test_cast_to_int_with_django_like_choices():
"""Cast to int with a Django-like choices tuple."""
choices = Choices(cast=int, choices=ALLOWED_NUMBERS)
assert 0 == choices('0')
assert 3 == choices('3')
assert 7 == choices('7')
with pytest.raises(ValueError):
choices(1)
def test_default_cast_with_booth_flat_list_and_django_like_choices():
"""Default cast with booth flat list and Django-like choices tuple."""
choices = Choices(['a', 'b', 'c'], choices=ALLOWED_FRUITS)
assert 'a' == choices('a')
assert 'b' == choices('b')
assert 'c' == choices('c')
assert 'apple' == choices('apple')
assert 'banana' == choices('banana')
assert 'coconut' == choices('coconut')
with pytest.raises(ValueError):
choices('d')
with pytest.raises(ValueError):
choices('watermelon')
def test_cast_to_int_with_booth_flat_list_and_django_like_choices():
"""Cast to int with booth flat list and Django-like choices tuple."""
choices = Choices([7, 14, 42], cast=int, choices=ALLOWED_NUMBERS)
assert 7 == choices('7')
assert 14 == choices('14')
assert 42 == choices('42')
assert 0 == choices('0')
assert 3 == choices('3')
assert 7 == choices('7')
with pytest.raises(ValueError):
choices('not my fault')
with pytest.raises(ValueError):
choices('1')
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.python.ops import control_flow_ops
def apply_with_random_selector(x, func, num_cases):
"""Computes func(x, sel), with sel sampled from [0...num_cases-1].
Args:
x: input Tensor.
func: Python function to apply.
num_cases: Python int32, number of cases to sample sel from.
Returns:
The result of func(x, sel), where func receives the value of the
selector as a python integer, but sel is sampled dynamically.
"""
sel = tf.random_uniform([], maxval=num_cases, dtype=tf.int32)
# Pass the real x only to one of the func calls.
return control_flow_ops.merge([
func(control_flow_ops.switch(x, tf.equal(sel, case))[1], case)
for case in range(num_cases)])[0]
def distort_color(image, color_ordering=0, fast_mode=True, scope=None):
"""Distort the color of a Tensor image.
Each color distortion is non-commutative and thus ordering of the color ops
matters. Ideally we would randomly permute the ordering of the color ops.
Rather then adding that level of complication, we select a distinct ordering
of color ops for each preprocessing thread.
Args:
image: 3-D Tensor containing single image in [0, 1].
color_ordering: Python int, a type of distortion (valid values: 0-3).
fast_mode: Avoids slower ops (random_hue and random_contrast)
scope: Optional scope for name_scope.
Returns:
3-D Tensor color-distorted image on range [0, 1]
Raises:
ValueError: if color_ordering not in [0, 3]
"""
with tf.name_scope(scope, 'distort_color', [image]):
if fast_mode:
if color_ordering == 0:
image = tf.image.random_brightness(image, max_delta=32. / 255.)
image = tf.image.random_saturation(image, lower=0.5, upper=1.5)
else:
image = tf.image.random_saturation(image, lower=0.5, upper=1.5)
image = tf.image.random_brightness(image, max_delta=32. / 255.)
else:
if color_ordering == 0:
image = tf.image.random_brightness(image, max_delta=32. / 255.)
image = tf.image.random_saturation(image, lower=0.5, upper=1.5)
image = tf.image.random_hue(image, max_delta=0.2)
image = tf.image.random_contrast(image, lower=0.5, upper=1.5)
elif color_ordering == 1:
image = tf.image.random_saturation(image, lower=0.5, upper=1.5)
image = tf.image.random_brightness(image, max_delta=32. / 255.)
image = tf.image.random_contrast(image, lower=0.5, upper=1.5)
image = tf.image.random_hue(image, max_delta=0.2)
elif color_ordering == 2:
image = tf.image.random_contrast(image, lower=0.5, upper=1.5)
image = tf.image.random_hue(image, max_delta=0.2)
image = tf.image.random_brightness(image, max_delta=32. / 255.)
image = tf.image.random_saturation(image, lower=0.5, upper=1.5)
elif color_ordering == 3:
image = tf.image.random_hue(image, max_delta=0.2)
image = tf.image.random_saturation(image, lower=0.5, upper=1.5)
image = tf.image.random_contrast(image, lower=0.5, upper=1.5)
image = tf.image.random_brightness(image, max_delta=32. / 255.)
else:
raise ValueError('color_ordering must be in [0, 3]')
# The random_* ops do not necessarily clamp.
return tf.clip_by_value(image, 0.0, 1.0)
def distorted_bounding_box_crop(image,
bbox,
min_object_covered=0.1,
aspect_ratio_range=(0.75, 1.33),
area_range=(0.05, 1.0),
max_attempts=100,
scope=None):
"""Generates cropped_image using a one of the bboxes randomly distorted.
See `tf.image.sample_distorted_bounding_box` for more documentation.
Args:
image: 3-D Tensor of image (it will be converted to floats in [0, 1]).
bbox: 3-D float Tensor of bounding boxes arranged [1, num_boxes, coords]
where each coordinate is [0, 1) and the coordinates are arranged
as [ymin, xmin, ymax, xmax]. If num_boxes is 0 then it would use the whole
image.
min_object_covered: An optional `float`. Defaults to `0.1`. The cropped
area of the image must contain at least this fraction of any bounding box
supplied.
aspect_ratio_range: An optional list of `floats`. The cropped area of the
image must have an aspect ratio = width / height within this range.
area_range: An optional list of `floats`. The cropped area of the image
must contain a fraction of the supplied image within in this range.
max_attempts: An optional `int`. Number of attempts at generating a cropped
region of the image of the specified constraints. After `max_attempts`
failures, return the entire image.
scope: Optional scope for name_scope.
Returns:
A tuple, a 3-D Tensor cropped_image and the distorted bbox
"""
with tf.name_scope(scope, 'distorted_bounding_box_crop', [image, bbox]):
# Each bounding box has shape [1, num_boxes, box coords] and
# the coordinates are ordered [ymin, xmin, ymax, xmax].
# A large fraction of image datasets contain a human-annotated bounding
# box delineating the region of the image containing the object of interest.
# We choose to create a new bounding box for the object which is a randomly
# distorted version of the human-annotated bounding box that obeys an
# allowed range of aspect ratios, sizes and overlap with the human-annotated
# bounding box. If no box is supplied, then we assume the bounding box is
# the entire image.
sample_distorted_bounding_box = tf.image.sample_distorted_bounding_box(
tf.shape(image),
bounding_boxes=bbox,
min_object_covered=min_object_covered,
aspect_ratio_range=aspect_ratio_range,
area_range=area_range,
max_attempts=max_attempts,
use_image_if_no_bounding_boxes=True)
bbox_begin, bbox_size, distort_bbox = sample_distorted_bounding_box
# Crop the image to the specified bounding box.
cropped_image = tf.slice(image, bbox_begin, bbox_size)
return cropped_image, distort_bbox
def preprocess_for_train(image, height, width, bbox,
fast_mode=True,
scope=None):
"""Distort one image for training a network.
Distorting images provides a useful technique for augmenting the data
set during training in order to make the network invariant to aspects
of the image that do not effect the label.
Additionally it would create image_summaries to display the different
transformations applied to the image.
Args:
image: 3-D Tensor of image. If dtype is tf.float32 then the range should be
[0, 1], otherwise it would converted to tf.float32 assuming that the range
is [0, MAX], where MAX is largest positive representable number for
int(8/16/32) data type (see `tf.image.convert_image_dtype` for details).
height: integer
width: integer
bbox: 3-D float Tensor of bounding boxes arranged [1, num_boxes, coords]
where each coordinate is [0, 1) and the coordinates are arranged
as [ymin, xmin, ymax, xmax].
fast_mode: Optional boolean, if True avoids slower transformations (i.e.
bi-cubic resizing, random_hue or random_contrast).
scope: Optional scope for name_scope.
Returns:
3-D float Tensor of distorted image used for training with range [-1, 1].
"""
with tf.name_scope(scope, 'distort_image', [image, height, width, bbox]):
if bbox is None:
bbox = tf.constant([0.0, 0.0, 1.0, 1.0],
dtype=tf.float32,
shape=[1, 1, 4])
if image.dtype != tf.float32:
image = tf.image.convert_image_dtype(image, dtype=tf.float32)
# Each bounding box has shape [1, num_boxes, box coords] and
# the coordinates are ordered [ymin, xmin, ymax, xmax].
image_with_box = tf.image.draw_bounding_boxes(tf.expand_dims(image, 0),
bbox)
tf.summary.image('image_with_bounding_boxes', image_with_box)
distorted_image, distorted_bbox = distorted_bounding_box_crop(image, bbox)
# Restore the shape since the dynamic slice based upon the bbox_size loses
# the third dimension.
distorted_image.set_shape([None, None, 3])
image_with_distorted_box = tf.image.draw_bounding_boxes(
tf.expand_dims(image, 0), distorted_bbox)
tf.summary.image('images_with_distorted_bounding_box',
image_with_distorted_box)
# This resizing operation may distort the images because the aspect
# ratio is not respected. We select a resize method in a round robin
# fashion based on the thread number.
# Note that ResizeMethod contains 4 enumerated resizing methods.
# We select only 1 case for fast_mode bilinear.
num_resize_cases = 1 if fast_mode else 4
distorted_image = apply_with_random_selector(
distorted_image,
lambda x, method: tf.image.resize_images(x, [height, width], method=method),
num_cases=num_resize_cases)
tf.summary.image('cropped_resized_image',
tf.expand_dims(distorted_image, 0))
# Randomly flip the image horizontally.
distorted_image = tf.image.random_flip_left_right(distorted_image)
# Randomly distort the colors. There are 4 ways to do it.
distorted_image = apply_with_random_selector(
distorted_image,
lambda x, ordering: distort_color(x, ordering, fast_mode),
num_cases=4)
tf.summary.image('final_distorted_image',
tf.expand_dims(distorted_image, 0))
distorted_image = tf.subtract(distorted_image, 0.5)
distorted_image = tf.multiply(distorted_image, 2.0)
return distorted_image
def preprocess_for_eval(image, height, width,
central_fraction=0.875, scope=None):
"""Prepare one image for evaluation.
If height and width are specified it would output an image with that size by
applying resize_bilinear.
If central_fraction is specified it would cropt the central fraction of the
input image.
Args:
image: 3-D Tensor of image. If dtype is tf.float32 then the range should be
[0, 1], otherwise it would converted to tf.float32 assuming that the range
is [0, MAX], where MAX is largest positive representable number for
int(8/16/32) data type (see `tf.image.convert_image_dtype` for details)
height: integer
width: integer
central_fraction: Optional Float, fraction of the image to crop.
scope: Optional scope for name_scope.
Returns:
3-D float Tensor of prepared image.
"""
with tf.name_scope(scope, 'eval_image', [image, height, width]):
if image.dtype != tf.float32:
image = tf.image.convert_image_dtype(image, dtype=tf.float32)
# Crop the central region of the image with an area containing 87.5% of
# the original image.
if central_fraction:
image = tf.image.central_crop(image, central_fraction=central_fraction)
if height and width:
# Resize the image to the specified height and width.
image = tf.expand_dims(image, 0)
image = tf.image.resize_bilinear(image, [height, width],
align_corners=False)
image = tf.squeeze(image, [0])
image = tf.subtract(image, 0.5)
image = tf.multiply(image, 2.0)
return image
def preprocess_image(image, height, width,
is_training=False,
bbox=None,
fast_mode=True):
"""Pre-process one image for training or evaluation.
Args:
image: 3-D Tensor [height, width, channels] with the image.
height: integer, image expected height.
width: integer, image expected width.
is_training: Boolean. If true it would transform an image for train,
otherwise it would transform it for evaluation.
bbox: 3-D float Tensor of bounding boxes arranged [1, num_boxes, coords]
where each coordinate is [0, 1) and the coordinates are arranged as
[ymin, xmin, ymax, xmax].
fast_mode: Optional boolean, if True avoids slower transformations.
Returns:
3-D float Tensor containing an appropriately scaled image
Raises:
ValueError: if user does not provide bounding box
"""
if is_training:
return preprocess_for_train(image, height, width, bbox, fast_mode)
else:
return preprocess_for_eval(image, height, width)
|
import pytest
from redbot.pytest.alias import *
def test_is_valid_alias_name(alias):
assert alias.is_valid_alias_name("valid") is True
assert alias.is_valid_alias_name("not valid name") is False
@pytest.mark.asyncio
async def test_empty_guild_aliases(alias, empty_guild):
assert list(await alias._aliases.get_guild_aliases(empty_guild)) == []
@pytest.mark.asyncio
async def test_empty_global_aliases(alias):
assert list(await alias._aliases.get_global_aliases()) == []
async def create_test_guild_alias(alias, ctx):
await alias._aliases.add_alias(ctx, "test", "ping", global_=False)
async def create_test_global_alias(alias, ctx):
await alias._aliases.add_alias(ctx, "test_global", "ping", global_=True)
@pytest.mark.asyncio
async def test_add_guild_alias(alias, ctx):
await create_test_guild_alias(alias, ctx)
alias_obj = await alias._aliases.get_alias(ctx.guild, "test")
assert alias_obj.name == "test"
@pytest.mark.asyncio
async def test_delete_guild_alias(alias, ctx):
await create_test_guild_alias(alias, ctx)
alias_obj = await alias._aliases.get_alias(ctx.guild, "test")
assert alias_obj.name == "test"
did_delete = await alias._aliases.delete_alias(ctx, "test")
assert did_delete is True
alias_obj = await alias._aliases.get_alias(ctx.guild, "test")
assert alias_obj is None
@pytest.mark.asyncio
async def test_add_global_alias(alias, ctx):
await create_test_global_alias(alias, ctx)
alias_obj = await alias._aliases.get_alias(ctx.guild, "test_global")
assert alias_obj.name == "test_global"
@pytest.mark.asyncio
async def test_delete_global_alias(alias, ctx):
await create_test_global_alias(alias, ctx)
alias_obj = await alias._aliases.get_alias(ctx.guild, "test_global")
assert alias_obj.name == "test_global"
did_delete = await alias._aliases.delete_alias(ctx, alias_name="test_global", global_=True)
assert did_delete is True
alias_obj = await alias._aliases.get_alias(None, "test_global")
assert alias_obj is None
|
import ast
import pytest
from homeassistant.components import media_source
from homeassistant.components.media_source import const
from homeassistant.components.media_source.models import PlayMedia
from homeassistant.components.netatmo import DATA_CAMERAS, DATA_EVENTS, DOMAIN
from homeassistant.setup import async_setup_component
from tests.common import load_fixture
async def test_async_browse_media(hass):
"""Test browse media."""
assert await async_setup_component(hass, DOMAIN, {})
# Prepare cached Netatmo event date
hass.data[DOMAIN] = {}
hass.data[DOMAIN][DATA_EVENTS] = ast.literal_eval(
load_fixture("netatmo/events.txt")
)
hass.data[DOMAIN][DATA_CAMERAS] = {
"12:34:56:78:90:ab": "MyCamera",
"12:34:56:78:90:ac": "MyOutdoorCamera",
}
assert await async_setup_component(hass, const.DOMAIN, {})
await hass.async_block_till_done()
# Test camera not exists
with pytest.raises(media_source.BrowseError) as excinfo:
await media_source.async_browse_media(
hass, f"{const.URI_SCHEME}{DOMAIN}/events/98:76:54:32:10:ff"
)
assert str(excinfo.value) == "Camera does not exist."
# Test browse event
with pytest.raises(media_source.BrowseError) as excinfo:
await media_source.async_browse_media(
hass, f"{const.URI_SCHEME}{DOMAIN}/events/12:34:56:78:90:ab/12345"
)
assert str(excinfo.value) == "Event does not exist."
# Test invalid base
with pytest.raises(media_source.BrowseError) as excinfo:
await media_source.async_browse_media(
hass, f"{const.URI_SCHEME}{DOMAIN}/invalid/base"
)
assert str(excinfo.value) == "Unknown source directory."
# Test successful listing
media = await media_source.async_browse_media(
hass, f"{const.URI_SCHEME}{DOMAIN}/events/"
)
# Test successful events listing
media = await media_source.async_browse_media(
hass, f"{const.URI_SCHEME}{DOMAIN}/events/12:34:56:78:90:ab"
)
# Test successful event listing
media = await media_source.async_browse_media(
hass, f"{const.URI_SCHEME}{DOMAIN}/events/12:34:56:78:90:ab/1599152672"
)
assert media
# Test successful event resolve
media = await media_source.async_resolve_media(
hass, f"{const.URI_SCHEME}{DOMAIN}/events/12:34:56:78:90:ab/1599152672"
)
assert media == PlayMedia(
url="http:///files/high/index.m3u8", mime_type="application/x-mpegURL"
)
|
import asyncio
from datetime import timedelta
import logging
import async_timeout
from iammeter import real_time_api
from iammeter.power_meter import IamMeterError
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers import debounce
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
UpdateFailed,
)
_LOGGER = logging.getLogger(__name__)
DEFAULT_PORT = 80
DEFAULT_DEVICE_NAME = "IamMeter"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_DEVICE_NAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
}
)
SCAN_INTERVAL = timedelta(seconds=30)
PLATFORM_TIMEOUT = 8
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Platform setup."""
config_host = config[CONF_HOST]
config_port = config[CONF_PORT]
config_name = config[CONF_NAME]
try:
with async_timeout.timeout(PLATFORM_TIMEOUT):
api = await real_time_api(config_host, config_port)
except (IamMeterError, asyncio.TimeoutError) as err:
_LOGGER.error("Device is not ready")
raise PlatformNotReady from err
async def async_update_data():
try:
with async_timeout.timeout(PLATFORM_TIMEOUT):
return await api.get_data()
except (IamMeterError, asyncio.TimeoutError) as err:
raise UpdateFailed from err
coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name=DEFAULT_DEVICE_NAME,
update_method=async_update_data,
update_interval=SCAN_INTERVAL,
request_refresh_debouncer=debounce.Debouncer(
hass, _LOGGER, cooldown=0.3, immediate=True
),
)
await coordinator.async_refresh()
entities = []
for sensor_name, (row, idx, unit) in api.iammeter.sensor_map().items():
serial_number = api.iammeter.serial_number
uid = f"{serial_number}-{row}-{idx}"
entities.append(IamMeter(coordinator, uid, sensor_name, unit, config_name))
async_add_entities(entities)
class IamMeter(CoordinatorEntity):
"""Class for a sensor."""
def __init__(self, coordinator, uid, sensor_name, unit, dev_name):
"""Initialize an iammeter sensor."""
super().__init__(coordinator)
self.uid = uid
self.sensor_name = sensor_name
self.unit = unit
self.dev_name = dev_name
@property
def state(self):
"""Return the state of the sensor."""
return self.coordinator.data.data[self.sensor_name]
@property
def unique_id(self):
"""Return unique id."""
return self.uid
@property
def name(self):
"""Name of this iammeter attribute."""
return f"{self.dev_name} {self.sensor_name}"
@property
def icon(self):
"""Icon for each sensor."""
return "mdi:flash"
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self.unit
|
from pyairvisual.errors import InvalidKeyError, NodeProError
from homeassistant import data_entry_flow
from homeassistant.components.airvisual import (
CONF_GEOGRAPHIES,
CONF_INTEGRATION_TYPE,
DOMAIN,
INTEGRATION_TYPE_GEOGRAPHY,
INTEGRATION_TYPE_NODE_PRO,
)
from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER
from homeassistant.const import (
CONF_API_KEY,
CONF_IP_ADDRESS,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_PASSWORD,
CONF_SHOW_ON_MAP,
)
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
from tests.common import MockConfigEntry
async def test_duplicate_error(hass):
"""Test that errors are shown when duplicate entries are added."""
geography_conf = {
CONF_API_KEY: "abcde12345",
CONF_LATITUDE: 51.528308,
CONF_LONGITUDE: -0.3817765,
}
MockConfigEntry(
domain=DOMAIN, unique_id="51.528308, -0.3817765", data=geography_conf
).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=geography_conf
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
node_pro_conf = {CONF_IP_ADDRESS: "192.168.1.100", CONF_PASSWORD: "12345"}
MockConfigEntry(
domain=DOMAIN, unique_id="192.168.1.100", data=node_pro_conf
).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data={"type": "AirVisual Node/Pro"}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=node_pro_conf
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_invalid_identifier(hass):
"""Test that an invalid API key or Node/Pro ID throws an error."""
geography_conf = {
CONF_API_KEY: "abcde12345",
CONF_LATITUDE: 51.528308,
CONF_LONGITUDE: -0.3817765,
}
with patch(
"pyairvisual.air_quality.AirQuality.nearest_city",
side_effect=InvalidKeyError,
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=geography_conf
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {CONF_API_KEY: "invalid_api_key"}
async def test_migration(hass):
"""Test migrating from version 1 to the current version."""
conf = {
CONF_API_KEY: "abcde12345",
CONF_GEOGRAPHIES: [
{CONF_LATITUDE: 51.528308, CONF_LONGITUDE: -0.3817765},
{CONF_LATITUDE: 35.48847, CONF_LONGITUDE: 137.5263065},
],
}
config_entry = MockConfigEntry(
domain=DOMAIN, version=1, unique_id="abcde12345", data=conf
)
config_entry.add_to_hass(hass)
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
with patch("pyairvisual.air_quality.AirQuality.nearest_city"), patch.object(
hass.config_entries, "async_forward_entry_setup"
):
assert await async_setup_component(hass, DOMAIN, {DOMAIN: conf})
await hass.async_block_till_done()
config_entries = hass.config_entries.async_entries(DOMAIN)
assert len(config_entries) == 2
assert config_entries[0].unique_id == "51.528308, -0.3817765"
assert config_entries[0].title == "Cloud API (51.528308, -0.3817765)"
assert config_entries[0].data == {
CONF_API_KEY: "abcde12345",
CONF_LATITUDE: 51.528308,
CONF_LONGITUDE: -0.3817765,
CONF_INTEGRATION_TYPE: INTEGRATION_TYPE_GEOGRAPHY,
}
assert config_entries[1].unique_id == "35.48847, 137.5263065"
assert config_entries[1].title == "Cloud API (35.48847, 137.5263065)"
assert config_entries[1].data == {
CONF_API_KEY: "abcde12345",
CONF_LATITUDE: 35.48847,
CONF_LONGITUDE: 137.5263065,
CONF_INTEGRATION_TYPE: INTEGRATION_TYPE_GEOGRAPHY,
}
async def test_node_pro_error(hass):
"""Test that an invalid Node/Pro ID shows an error."""
node_pro_conf = {CONF_IP_ADDRESS: "192.168.1.100", CONF_PASSWORD: "my_password"}
with patch(
"pyairvisual.node.NodeSamba.async_connect",
side_effect=NodeProError,
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data={"type": "AirVisual Node/Pro"}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=node_pro_conf
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {CONF_IP_ADDRESS: "cannot_connect"}
async def test_options_flow(hass):
"""Test config flow options."""
geography_conf = {
CONF_API_KEY: "abcde12345",
CONF_LATITUDE: 51.528308,
CONF_LONGITUDE: -0.3817765,
}
config_entry = MockConfigEntry(
domain=DOMAIN,
unique_id="51.528308, -0.3817765",
data=geography_conf,
options={CONF_SHOW_ON_MAP: True},
)
config_entry.add_to_hass(hass)
with patch(
"homeassistant.components.airvisual.async_setup_entry", return_value=True
):
result = await hass.config_entries.options.async_init(config_entry.entry_id)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "init"
result = await hass.config_entries.options.async_configure(
result["flow_id"], user_input={CONF_SHOW_ON_MAP: False}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert config_entry.options == {CONF_SHOW_ON_MAP: False}
async def test_step_geography(hass):
"""Test the geograph (cloud API) step."""
conf = {
CONF_API_KEY: "abcde12345",
CONF_LATITUDE: 51.528308,
CONF_LONGITUDE: -0.3817765,
}
with patch(
"homeassistant.components.airvisual.async_setup_entry", return_value=True
), patch("pyairvisual.air_quality.AirQuality.nearest_city"):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=conf
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "Cloud API (51.528308, -0.3817765)"
assert result["data"] == {
CONF_API_KEY: "abcde12345",
CONF_LATITUDE: 51.528308,
CONF_LONGITUDE: -0.3817765,
CONF_INTEGRATION_TYPE: INTEGRATION_TYPE_GEOGRAPHY,
}
async def test_step_import(hass):
"""Test the import step for both types of configuration."""
geography_conf = {
CONF_API_KEY: "abcde12345",
CONF_LATITUDE: 51.528308,
CONF_LONGITUDE: -0.3817765,
}
with patch(
"homeassistant.components.airvisual.async_setup_entry", return_value=True
), patch("pyairvisual.air_quality.AirQuality.nearest_city"):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=geography_conf
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "Cloud API (51.528308, -0.3817765)"
assert result["data"] == {
CONF_API_KEY: "abcde12345",
CONF_LATITUDE: 51.528308,
CONF_LONGITUDE: -0.3817765,
CONF_INTEGRATION_TYPE: INTEGRATION_TYPE_GEOGRAPHY,
}
async def test_step_node_pro(hass):
"""Test the Node/Pro step."""
conf = {CONF_IP_ADDRESS: "192.168.1.100", CONF_PASSWORD: "my_password"}
with patch(
"homeassistant.components.airvisual.async_setup_entry", return_value=True
), patch("pyairvisual.node.NodeSamba.async_connect"), patch(
"pyairvisual.node.NodeSamba.async_get_latest_measurements"
), patch(
"pyairvisual.node.NodeSamba.async_disconnect"
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data={"type": "AirVisual Node/Pro"}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=conf
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "Node/Pro (192.168.1.100)"
assert result["data"] == {
CONF_IP_ADDRESS: "192.168.1.100",
CONF_PASSWORD: "my_password",
CONF_INTEGRATION_TYPE: INTEGRATION_TYPE_NODE_PRO,
}
async def test_step_reauth(hass):
"""Test that the reauth step works."""
geography_conf = {
CONF_API_KEY: "abcde12345",
CONF_LATITUDE: 51.528308,
CONF_LONGITUDE: -0.3817765,
}
MockConfigEntry(
domain=DOMAIN, unique_id="51.528308, -0.3817765", data=geography_conf
).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "reauth"}, data=geography_conf
)
assert result["step_id"] == "reauth_confirm"
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "reauth_confirm"
with patch(
"homeassistant.components.airvisual.async_setup_entry", return_value=True
), patch("pyairvisual.air_quality.AirQuality.nearest_city", return_value=True):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={CONF_API_KEY: "defgh67890"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "reauth_successful"
assert len(hass.config_entries.async_entries()) == 1
async def test_step_user(hass):
"""Test the user ("pick the integration type") step."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={"type": INTEGRATION_TYPE_GEOGRAPHY},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "geography"
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={"type": INTEGRATION_TYPE_NODE_PRO},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "node_pro"
|
from os import environ, cpu_count
import configparser
import os.path
import sys
# Read in the default config file if /etc/httpobs.conf doesn't already exist
__dirname = os.path.abspath(os.path.dirname(__file__))
_config_parser = configparser.ConfigParser()
_config_parser.read_file(open(os.path.join(__dirname, 'httpobs.conf'))) # default values
_config_parser.read(['/etc/httpobs.conf', os.path.expanduser('~/.httpobs.conf')]) # overridden values
# Return None if it's not in the config parser
def __conf(section, param, type=None, default=None):
try:
if type == str or type is None:
return _config_parser.get(section, param)
elif type == int:
return _config_parser.getint(section, param)
elif type == bool:
return _config_parser.getboolean(section, param)
elif type == float:
return _config_parser.getfloat(section, param)
else:
return None
except (KeyError, configparser.NoSectionError):
return None
except:
if default:
return default
else:
print('Error with key {0} in section {1}'.format(param, section))
sys.exit(1)
DEVELOPMENT_MODE = True if environ.get('HTTPOBS_DEV') == 'yes' else False or __conf('global', 'development', bool)
# API configuration
API_ALLOW_VERBOSE_STATS_FROM_PUBLIC = (environ.get('HTTPOBS_ALLOW_VERBOSE_STATS_FROM_PUBLIC') == 'yes' or
__conf('api', 'allow_verbose_stats_from_public', bool, True))
API_CACHED_RESULT_TIME = int(environ.get('HTTPOBS_API_CACHED_RESULT_TIME') or __conf('api', 'cached_result_time'))
API_COOLDOWN = int(environ.get('HTTPOBS_API_COOLDOWN') or __conf('api', 'cooldown', int))
API_PORT = int(environ.get('HTTPOBS_API_PORT') or __conf('api', 'port', int))
API_PROPAGATE_EXCEPTIONS = (True if environ.get('HTTPOBS_PROPAGATE_EXCEPTIONS') == 'yes' else False or
__conf('api', 'propagate_exceptions', bool))
API_URL = environ.get('HTTPOBS_API_URL') or __conf('api', 'url')
# Broker configuration
BROKER_URL = (environ.get('HTTPOBS_BROKER_URL') or __conf('scanner', 'broker'))
# Database configuration
DATABASE_DB = environ.get('HTTPOBS_DATABASE_DB') or __conf('database', 'database')
DATABASE_HOST = environ.get('HTTPOBS_DATABASE_HOST') or __conf('database', 'host')
DATABASE_PASSWORD = environ.get('HTTPOBS_DATABASE_PASS') or __conf('database', 'pass')
DATABASE_PORT = int(environ.get('HTTPOBS_DATABASE_PORT') or __conf('database', 'port', int))
DATABASE_USER = environ.get('HTTPOBS_DATABASE_USER') or __conf('database', 'user')
# Set some database provider specific parameters
if DATABASE_HOST.endswith('.rds.amazonaws.com'):
DATABASE_CA_CERT = os.path.join(__dirname, 'amazon-rds.pem')
DATABASE_SSL_MODE = 'verify-full'
else:
DATABASE_CA_CERT = None
DATABASE_SSL_MODE = 'prefer'
# Retriever parameters
RETRIEVER_CONNECT_TIMEOUT = float(environ.get('HTTPOBS_RETRIEVER_CONNECT_TIMEOUT') or
__conf('retriever', 'connect_timeout'))
RETRIEVER_READ_TIMEOUT = float(environ.get('HTTPOBS_RETRIEVER_READ_TIMEOUT') or
__conf('retriever', 'read_timeout'))
RETRIEVER_USER_AGENT = environ.get('HTTPOBS_RETRIEVER_USER_AGENT') or __conf('retriever', 'user_agent')
RETRIEVER_CORS_ORIGIN = environ.get('HTTPOBS_RETRIEVER_CORS_ORIGIN') or __conf('retriever', 'cors_origin')
# Scanner configuration
SCANNER_ABORT_SCAN_TIME = int(environ.get('HTTPOBS_SCANNER_ABORT_SCAN_TIME') or
__conf('scanner', 'abort_scan_time'))
SCANNER_ALLOW_KICKSTART = (environ.get('HTTPOBS_SCANNER_ALLOW_KICKSTART') == 'yes' or
__conf('scanner', 'allow_kickstart', bool))
SCANNER_ALLOW_KICKSTART_NUM_ABORTED = int(environ.get('HTTPOBS_SCANNER_ALLOW_KICKSTART_NUM_ABORTED') or
__conf('scanner', 'allow_kickstart_num_aborted'))
SCANNER_ALLOW_LOCALHOST = (environ.get('HTTPOBS_SCANNER_ALLOW_LOCALHOST') == 'yes' or
__conf('scanner', 'allow_localhost', bool))
SCANNER_BROKER_RECONNECTION_SLEEP_TIME = float(environ.get('HTTPOBS_SCANNER_BROKER_RECONNECTION_SLEEP_TIME') or
__conf('scanner', 'broker_reconnection_sleep_time'))
SCANNER_CYCLE_SLEEP_TIME = float(environ.get('HTTPOBS_SCANNER_CYCLE_SLEEP_TIME') or
__conf('scanner', 'cycle_sleep_time'))
SCANNER_DATABASE_RECONNECTION_SLEEP_TIME = float(environ.get('HTTPOBS_SCANNER_DATABASE_RECONNECTION_SLEEP_TIME') or
__conf('scanner', 'database_reconnection_sleep_time'))
SCANNER_MAINTENANCE_CYCLE_FREQUENCY = int(environ.get('HTTPOBS_MAINTENANCE_CYCLE_FREQUENCY') or
__conf('scanner', 'maintenance_cycle_frequency'))
SCANNER_MATERIALIZED_VIEW_REFRESH_FREQUENCY = int(environ.get('HTTPOBS_SCANNER_MATERIALIZED_VIEW_REFRESH_FREQUENCY') or
__conf('scanner', 'materialized_view_refresh_frequency'))
SCANNER_MAX_CPU_UTILIZATION = int(environ.get('HTTPOBS_SCANNER_MAX_CPU_UTILIZATION') or
__conf('scanner', 'max_cpu_utilization'))
SCANNER_MAX_LOAD_RATIO = int(environ.get('HTTPOBS_SCANNER_MAX_LOAD_RATIO_PER_CPU') or
__conf('scanner', 'max_load_ratio_per_cpu'))
SCANNER_MAX_LOAD = cpu_count() * SCANNER_MAX_LOAD_RATIO
SCANNER_MOZILLA_DOMAINS = [domain.strip() for domain in (environ.get('HTTPOBS_SCANNER_MOZILLA_DOMAINS') or
__conf('scanner', 'mozilla_domains')).split(',')]
SCANNER_PINNED_DOMAINS = [domain.strip() for domain in (environ.get('HTTPOBS_SCANNER_PINNED_DOMAINS') or
__conf('scanner', 'pinned_domains')).split(',')]
|
import re
import unittest
import forecastio
from requests.exceptions import ConnectionError
import requests_mock
from homeassistant.components import weather
from homeassistant.setup import setup_component
from homeassistant.util.unit_system import METRIC_SYSTEM
from tests.async_mock import patch
from tests.common import get_test_home_assistant, load_fixture
class TestDarkSky(unittest.TestCase):
"""Test the Dark Sky weather component."""
def setUp(self):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.hass.config.units = METRIC_SYSTEM
self.lat = self.hass.config.latitude = 37.8267
self.lon = self.hass.config.longitude = -122.423
self.addCleanup(self.tear_down_cleanup)
def tear_down_cleanup(self):
"""Stop down everything that was started."""
self.hass.stop()
@requests_mock.Mocker()
@patch("forecastio.api.get_forecast", wraps=forecastio.api.get_forecast)
def test_setup(self, mock_req, mock_get_forecast):
"""Test for successfully setting up the forecast.io platform."""
uri = (
r"https://api.(darksky.net|forecast.io)\/forecast\/(\w+)\/"
r"(-?\d+\.?\d*),(-?\d+\.?\d*)"
)
mock_req.get(re.compile(uri), text=load_fixture("darksky.json"))
assert setup_component(
self.hass,
weather.DOMAIN,
{"weather": {"name": "test", "platform": "darksky", "api_key": "foo"}},
)
self.hass.block_till_done()
assert mock_get_forecast.called
assert mock_get_forecast.call_count == 1
state = self.hass.states.get("weather.test")
assert state.state == "sunny"
@patch("forecastio.load_forecast", side_effect=ConnectionError())
def test_failed_setup(self, mock_load_forecast):
"""Test to ensure that a network error does not break component state."""
assert setup_component(
self.hass,
weather.DOMAIN,
{"weather": {"name": "test", "platform": "darksky", "api_key": "foo"}},
)
self.hass.block_till_done()
state = self.hass.states.get("weather.test")
assert state.state == "unavailable"
|
import signal
def signal_to_exception(signum, frame):
"""
Called by the timeout alarm during the collector run time
"""
if signum == signal.SIGALRM:
raise SIGALRMException()
if signum == signal.SIGHUP:
raise SIGHUPException()
if signum == signal.SIGUSR1:
raise SIGUSR1Exception()
if signum == signal.SIGUSR2:
raise SIGUSR2Exception()
raise SignalException(signum)
class SignalException(Exception):
pass
class SIGALRMException(SignalException):
pass
class SIGHUPException(SignalException):
pass
class SIGUSR1Exception(SignalException):
pass
class SIGUSR2Exception(SignalException):
pass
|
import asyncio
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_SCAN_INTERVAL,
CONF_SSL,
CONF_USERNAME,
)
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import (
ATTR_SPEED,
DATA_COORDINATOR,
DATA_UNDO_UPDATE_LISTENER,
DEFAULT_NAME,
DEFAULT_PORT,
DEFAULT_SCAN_INTERVAL,
DEFAULT_SPEED_LIMIT,
DEFAULT_SSL,
DOMAIN,
SERVICE_PAUSE,
SERVICE_RESUME,
SERVICE_SET_SPEED,
)
from .coordinator import NZBGetDataUpdateCoordinator
PLATFORMS = ["sensor", "switch"]
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PASSWORD): cv.string,
vol.Optional(CONF_USERNAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(
CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL
): cv.time_period,
vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean,
}
)
},
extra=vol.ALLOW_EXTRA,
)
SPEED_LIMIT_SCHEMA = vol.Schema(
{vol.Optional(ATTR_SPEED, default=DEFAULT_SPEED_LIMIT): cv.positive_int}
)
async def async_setup(hass: HomeAssistantType, config: dict) -> bool:
"""Set up the NZBGet integration."""
hass.data.setdefault(DOMAIN, {})
if hass.config_entries.async_entries(DOMAIN):
return True
if DOMAIN in config:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data=config[DOMAIN],
)
)
return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool:
"""Set up NZBGet from a config entry."""
if not entry.options:
options = {
CONF_SCAN_INTERVAL: entry.data.get(
CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL
),
}
hass.config_entries.async_update_entry(entry, options=options)
coordinator = NZBGetDataUpdateCoordinator(
hass,
config=entry.data,
options=entry.options,
)
await coordinator.async_refresh()
if not coordinator.last_update_success:
raise ConfigEntryNotReady
undo_listener = entry.add_update_listener(_async_update_listener)
hass.data[DOMAIN][entry.entry_id] = {
DATA_COORDINATOR: coordinator,
DATA_UNDO_UPDATE_LISTENER: undo_listener,
}
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
_async_register_services(hass, coordinator)
return True
async def async_unload_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN][entry.entry_id][DATA_UNDO_UPDATE_LISTENER]()
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
def _async_register_services(
hass: HomeAssistantType,
coordinator: NZBGetDataUpdateCoordinator,
) -> None:
"""Register integration-level services."""
def pause(call) -> None:
"""Service call to pause downloads in NZBGet."""
coordinator.nzbget.pausedownload()
def resume(call) -> None:
"""Service call to resume downloads in NZBGet."""
coordinator.nzbget.resumedownload()
def set_speed(call) -> None:
"""Service call to rate limit speeds in NZBGet."""
coordinator.nzbget.rate(call.data[ATTR_SPEED])
hass.services.async_register(DOMAIN, SERVICE_PAUSE, pause, schema=vol.Schema({}))
hass.services.async_register(DOMAIN, SERVICE_RESUME, resume, schema=vol.Schema({}))
hass.services.async_register(
DOMAIN, SERVICE_SET_SPEED, set_speed, schema=SPEED_LIMIT_SCHEMA
)
async def _async_update_listener(hass: HomeAssistantType, entry: ConfigEntry) -> None:
"""Handle options update."""
await hass.config_entries.async_reload(entry.entry_id)
class NZBGetEntity(CoordinatorEntity):
"""Defines a base NZBGet entity."""
def __init__(
self, *, entry_id: str, name: str, coordinator: NZBGetDataUpdateCoordinator
) -> None:
"""Initialize the NZBGet entity."""
super().__init__(coordinator)
self._name = name
self._entry_id = entry_id
@property
def name(self) -> str:
"""Return the name of the entity."""
return self._name
|
import time
from homeassistant.components.switch import DOMAIN, SwitchEntity
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from . import ZWaveDeviceEntity, workaround
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Z-Wave Switch from Config Entry."""
@callback
def async_add_switch(switch):
"""Add Z-Wave Switch."""
async_add_entities([switch])
async_dispatcher_connect(hass, "zwave_new_switch", async_add_switch)
def get_device(values, **kwargs):
"""Create zwave entity device."""
return ZwaveSwitch(values)
class ZwaveSwitch(ZWaveDeviceEntity, SwitchEntity):
"""Representation of a Z-Wave switch."""
def __init__(self, values):
"""Initialize the Z-Wave switch device."""
ZWaveDeviceEntity.__init__(self, values, DOMAIN)
self.refresh_on_update = (
workaround.get_device_mapping(values.primary)
== workaround.WORKAROUND_REFRESH_NODE_ON_UPDATE
)
self.last_update = time.perf_counter()
self._state = self.values.primary.data
def update_properties(self):
"""Handle data changes for node values."""
self._state = self.values.primary.data
if self.refresh_on_update and time.perf_counter() - self.last_update > 30:
self.last_update = time.perf_counter()
self.node.request_state()
@property
def is_on(self):
"""Return true if device is on."""
return self._state
def turn_on(self, **kwargs):
"""Turn the device on."""
self.node.set_switch(self.values.primary.value_id, True)
def turn_off(self, **kwargs):
"""Turn the device off."""
self.node.set_switch(self.values.primary.value_id, False)
|
from datetime import datetime, timedelta
import logging
import ns_api
from ns_api import RequestParametersError
import requests
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CONF_API_KEY, CONF_NAME
from homeassistant.exceptions import PlatformNotReady
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Data provided by NS"
CONF_ROUTES = "routes"
CONF_FROM = "from"
CONF_TO = "to"
CONF_VIA = "via"
CONF_TIME = "time"
ICON = "mdi:train"
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=120)
ROUTE_SCHEMA = vol.Schema(
{
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_FROM): cv.string,
vol.Required(CONF_TO): cv.string,
vol.Optional(CONF_VIA): cv.string,
vol.Optional(CONF_TIME): cv.time,
}
)
ROUTES_SCHEMA = vol.All(cv.ensure_list, [ROUTE_SCHEMA])
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_API_KEY): cv.string, vol.Optional(CONF_ROUTES): ROUTES_SCHEMA}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the departure sensor."""
nsapi = ns_api.NSAPI(config[CONF_API_KEY])
try:
stations = nsapi.get_stations()
except (
requests.exceptions.ConnectionError,
requests.exceptions.HTTPError,
) as error:
_LOGGER.error("Could not connect to the internet: %s", error)
raise PlatformNotReady() from error
except RequestParametersError as error:
_LOGGER.error("Could not fetch stations, please check configuration: %s", error)
return
sensors = []
for departure in config.get(CONF_ROUTES):
if not valid_stations(
stations,
[departure.get(CONF_FROM), departure.get(CONF_VIA), departure.get(CONF_TO)],
):
continue
sensors.append(
NSDepartureSensor(
nsapi,
departure.get(CONF_NAME),
departure.get(CONF_FROM),
departure.get(CONF_TO),
departure.get(CONF_VIA),
departure.get(CONF_TIME),
)
)
if sensors:
add_entities(sensors, True)
def valid_stations(stations, given_stations):
"""Verify the existence of the given station codes."""
for station in given_stations:
if station is None:
continue
if not any(s.code == station.upper() for s in stations):
_LOGGER.warning("Station '%s' is not a valid station", station)
return False
return True
class NSDepartureSensor(Entity):
"""Implementation of a NS Departure Sensor."""
def __init__(self, nsapi, name, departure, heading, via, time):
"""Initialize the sensor."""
self._nsapi = nsapi
self._name = name
self._departure = departure
self._via = via
self._heading = heading
self._time = time
self._state = None
self._trips = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def icon(self):
"""Return the icon for the frontend."""
return ICON
@property
def state(self):
"""Return the next departure time."""
return self._state
@property
def device_state_attributes(self):
"""Return the state attributes."""
if not self._trips:
return
if self._trips[0].trip_parts:
route = [self._trips[0].departure]
for k in self._trips[0].trip_parts:
route.append(k.destination)
# Static attributes
attributes = {
"going": self._trips[0].going,
"departure_time_planned": None,
"departure_time_actual": None,
"departure_delay": False,
"departure_platform_planned": self._trips[0].departure_platform_planned,
"departure_platform_actual": self._trips[0].departure_platform_actual,
"arrival_time_planned": None,
"arrival_time_actual": None,
"arrival_delay": False,
"arrival_platform_planned": self._trips[0].arrival_platform_planned,
"arrival_platform_actual": self._trips[0].arrival_platform_actual,
"next": None,
"status": self._trips[0].status.lower(),
"transfers": self._trips[0].nr_transfers,
"route": route,
"remarks": None,
ATTR_ATTRIBUTION: ATTRIBUTION,
}
# Planned departure attributes
if self._trips[0].departure_time_planned is not None:
attributes["departure_time_planned"] = self._trips[
0
].departure_time_planned.strftime("%H:%M")
# Actual departure attributes
if self._trips[0].departure_time_actual is not None:
attributes["departure_time_actual"] = self._trips[
0
].departure_time_actual.strftime("%H:%M")
# Delay departure attributes
if (
attributes["departure_time_planned"]
and attributes["departure_time_actual"]
and attributes["departure_time_planned"]
!= attributes["departure_time_actual"]
):
attributes["departure_delay"] = True
# Planned arrival attributes
if self._trips[0].arrival_time_planned is not None:
attributes["arrival_time_planned"] = self._trips[
0
].arrival_time_planned.strftime("%H:%M")
# Actual arrival attributes
if self._trips[0].arrival_time_actual is not None:
attributes["arrival_time_actual"] = self._trips[
0
].arrival_time_actual.strftime("%H:%M")
# Delay arrival attributes
if (
attributes["arrival_time_planned"]
and attributes["arrival_time_actual"]
and attributes["arrival_time_planned"] != attributes["arrival_time_actual"]
):
attributes["arrival_delay"] = True
# Next attributes
if len(self._trips) > 1:
if self._trips[1].departure_time_actual is not None:
attributes["next"] = self._trips[1].departure_time_actual.strftime(
"%H:%M"
)
elif self._trips[1].departure_time_planned is not None:
attributes["next"] = self._trips[1].departure_time_planned.strftime(
"%H:%M"
)
return attributes
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the trip information."""
# If looking for a specific trip time, update around that trip time only.
if self._time and (
(datetime.now() + timedelta(minutes=30)).time() < self._time
or (datetime.now() - timedelta(minutes=30)).time() > self._time
):
self._state = None
self._trips = None
return
# Set the search parameter to search from a specific trip time or to just search for next trip.
if self._time:
trip_time = (
datetime.today()
.replace(hour=self._time.hour, minute=self._time.minute)
.strftime("%d-%m-%Y %H:%M")
)
else:
trip_time = datetime.now().strftime("%d-%m-%Y %H:%M")
try:
self._trips = self._nsapi.get_trips(
trip_time, self._departure, self._via, self._heading, True, 0, 2
)
if self._trips:
if self._trips[0].departure_time_actual is None:
planned_time = self._trips[0].departure_time_planned
self._state = planned_time.strftime("%H:%M")
else:
actual_time = self._trips[0].departure_time_actual
self._state = actual_time.strftime("%H:%M")
except (
requests.exceptions.ConnectionError,
requests.exceptions.HTTPError,
) as error:
_LOGGER.error("Couldn't fetch trip info: %s", error)
|
import logging
from homeassistant.components.binary_sensor import BinarySensorEntity
from homeassistant.const import ATTR_ATTRIBUTION, CONF_HOST, CONF_PORT
from . import CONF_CONTAINERS, CONF_NODES, CONF_VMS, PROXMOX_CLIENTS, ProxmoxItemType
ATTRIBUTION = "Data provided by Proxmox VE"
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the sensor platform."""
sensors = []
for entry in discovery_info["entries"]:
port = entry[CONF_PORT]
for node in entry[CONF_NODES]:
for virtual_machine in node[CONF_VMS]:
sensors.append(
ProxmoxBinarySensor(
hass.data[PROXMOX_CLIENTS][f"{entry[CONF_HOST]}:{port}"],
node["node"],
ProxmoxItemType.qemu,
virtual_machine,
)
)
for container in node[CONF_CONTAINERS]:
sensors.append(
ProxmoxBinarySensor(
hass.data[PROXMOX_CLIENTS][f"{entry[CONF_HOST]}:{port}"],
node["node"],
ProxmoxItemType.lxc,
container,
)
)
add_entities(sensors, True)
class ProxmoxBinarySensor(BinarySensorEntity):
"""A binary sensor for reading Proxmox VE data."""
def __init__(self, proxmox_client, item_node, item_type, item_id):
"""Initialize the binary sensor."""
self._proxmox_client = proxmox_client
self._item_node = item_node
self._item_type = item_type
self._item_id = item_id
self._vmname = None
self._name = None
self._state = None
@property
def name(self):
"""Return the name of the entity."""
return self._name
@property
def is_on(self):
"""Return true if VM/container is running."""
return self._state
@property
def device_state_attributes(self):
"""Return device attributes of the entity."""
return {
"node": self._item_node,
"vmid": self._item_id,
"vmname": self._vmname,
"type": self._item_type.name,
ATTR_ATTRIBUTION: ATTRIBUTION,
}
def update(self):
"""Check if the VM/Container is running."""
item = self.poll_item()
if item is None:
_LOGGER.warning("Failed to poll VM/container %s", self._item_id)
return
self._state = item["status"] == "running"
def poll_item(self):
"""Find the VM/Container with the set item_id."""
items = (
self._proxmox_client.get_api_client()
.nodes(self._item_node)
.get(self._item_type.name)
)
item = next(
(item for item in items if item["vmid"] == str(self._item_id)), None
)
if item is None:
_LOGGER.warning("Couldn't find VM/Container with the ID %s", self._item_id)
return None
if self._vmname is None:
self._vmname = item["name"]
if self._name is None:
self._name = f"{self._item_node} {self._vmname} running"
return item
|
from plumbum.path.base import Path
from plumbum.lib import six
from plumbum.machines.local import local, LocalPath
import os
def delete(*paths):
"""Deletes the given paths. The arguments can be either strings,
:class:`local paths <plumbum.path.local.LocalPath>`,
:class:`remote paths <plumbum.path.remote.RemotePath>`, or iterables of such.
No error is raised if any of the paths does not exist (it is silently ignored)
"""
for p in paths:
if isinstance(p, Path):
p.delete()
elif isinstance(p, six.string_types):
local.path(p).delete()
elif hasattr(p, "__iter__"):
delete(*p)
else:
raise TypeError("Cannot delete %r" % (p, ))
def _move(src, dst):
ret = copy(src, dst)
delete(src)
return ret
def move(src, dst):
"""Moves the source path onto the destination path; ``src`` and ``dst`` can be either
strings, :class:`LocalPaths <plumbum.path.local.LocalPath>` or
:class:`RemotePath <plumbum.path.remote.RemotePath>`; any combination of the three will
work.
.. versionadded:: 1.3
``src`` can also be a list of strings/paths, in which case ``dst`` must not exist or be a directory.
"""
if not isinstance(dst, Path):
dst = local.path(dst)
if isinstance(src, (tuple, list)):
if not dst.exists():
dst.mkdir()
elif not dst.is_dir():
raise ValueError(
"When using multiple sources, dst %r must be a directory" %
(dst, ))
for src2 in src:
move(src2, dst)
return dst
elif not isinstance(src, Path):
src = local.path(src)
if isinstance(src, LocalPath):
if isinstance(dst, LocalPath):
return src.move(dst)
else:
return _move(src, dst)
elif isinstance(dst, LocalPath):
return _move(src, dst)
elif src.remote == dst.remote:
return src.move(dst)
else:
return _move(src, dst)
def copy(src, dst):
"""
Copy (recursively) the source path onto the destination path; ``src`` and ``dst`` can be
either strings, :class:`LocalPaths <plumbum.path.local.LocalPath>` or
:class:`RemotePath <plumbum.path.remote.RemotePath>`; any combination of the three will
work.
.. versionadded:: 1.3
``src`` can also be a list of strings/paths, in which case ``dst`` must not exist or be a directory.
"""
if not isinstance(dst, Path):
dst = local.path(dst)
if isinstance(src, (tuple, list)):
if not dst.exists():
dst.mkdir()
elif not dst.is_dir():
raise ValueError(
"When using multiple sources, dst %r must be a directory" %
(dst, ))
for src2 in src:
copy(src2, dst)
return dst
elif not isinstance(src, Path):
src = local.path(src)
if isinstance(src, LocalPath):
if isinstance(dst, LocalPath):
return src.copy(dst)
else:
dst.remote.upload(src, dst)
return dst
elif isinstance(dst, LocalPath):
src.remote.download(src, dst)
return dst
elif src.remote == dst.remote:
return src.copy(dst)
else:
with local.tempdir() as tmp:
copy(src, tmp)
copy(tmp / src.name, dst)
return dst
def gui_open(filename):
"""This selects the proper gui open function. This can
also be achieved with webbrowser, but that is not supported."""
if (hasattr(os, "startfile")):
os.startfile(filename)
else:
local.get('xdg-open', 'open')(filename)
|
import numpy as np
from scipy import linalg
from scipy.linalg import LinAlgError
from scipy._lib._util import _asarray_validated
_d = np.empty(0, np.float64)
_z = np.empty(0, np.complex128)
dgemm = linalg.get_blas_funcs('gemm', (_d,))
zgemm = linalg.get_blas_funcs('gemm', (_z,))
dgemv = linalg.get_blas_funcs('gemv', (_d,))
ddot = linalg.get_blas_funcs('dot', (_d,))
_I = np.cast['F'](1j)
###############################################################################
# linalg.svd and linalg.pinv2
dgesdd, dgesdd_lwork = linalg.get_lapack_funcs(('gesdd', 'gesdd_lwork'), (_d,))
zgesdd, zgesdd_lwork = linalg.get_lapack_funcs(('gesdd', 'gesdd_lwork'), (_z,))
dgesvd, dgesvd_lwork = linalg.get_lapack_funcs(('gesvd', 'gesvd_lwork'), (_d,))
zgesvd, zgesvd_lwork = linalg.get_lapack_funcs(('gesvd', 'gesvd_lwork'), (_z,))
def _svd_lwork(shape, dtype=np.float64):
"""Set up SVD calculations on identical-shape float64/complex128 arrays."""
if dtype == np.float64:
gesdd_lwork, gesvd_lwork = dgesdd_lwork, dgesvd_lwork
else:
assert dtype == np.complex128
gesdd_lwork, gesvd_lwork = zgesdd_lwork, zgesvd_lwork
sdd_lwork = linalg.decomp_svd._compute_lwork(
gesdd_lwork, *shape, compute_uv=True, full_matrices=False)
svd_lwork = linalg.decomp_svd._compute_lwork(
gesvd_lwork, *shape, compute_uv=True, full_matrices=False)
return (sdd_lwork, svd_lwork)
def _repeated_svd(x, lwork, overwrite_a=False):
"""Mimic scipy.linalg.svd, avoid lwork and get_lapack_funcs overhead."""
if x.dtype == np.float64:
gesdd, gesvd = dgesdd, zgesdd
else:
assert x.dtype == np.complex128
gesdd, gesvd = zgesdd, zgesvd
# this has to use overwrite_a=False in case we need to fall back to gesvd
u, s, v, info = gesdd(x, compute_uv=True, lwork=lwork[0],
full_matrices=False, overwrite_a=False)
if info > 0:
# Fall back to slower gesvd, sometimes gesdd fails
u, s, v, info = gesvd(x, compute_uv=True, lwork=lwork[1],
full_matrices=False, overwrite_a=overwrite_a)
if info > 0:
raise LinAlgError("SVD did not converge")
if info < 0:
raise ValueError('illegal value in %d-th argument of internal gesdd'
% -info)
return u, s, v
###############################################################################
# linalg.eigh
dsyevd, = linalg.get_lapack_funcs(('syevd',), (_d,))
zheevd, = linalg.get_lapack_funcs(('heevd',), (_z,))
def eigh(a, overwrite_a=False, check_finite=True):
"""Efficient wrapper for eigh.
Parameters
----------
a : ndarray, shape (n_components, n_components)
The symmetric array operate on.
overwrite_a : bool
If True, the contents of a can be overwritten for efficiency.
check_finite : bool
If True, check that all elements are finite.
Returns
-------
w : ndarray, shape (n_components,)
The N eigenvalues, in ascending order, each repeated according to
its multiplicity.
v : ndarray, shape (n_components, n_components)
The normalized eigenvector corresponding to the eigenvalue ``w[i]``
is the column ``v[:, i]``.
"""
# We use SYEVD, see https://github.com/scipy/scipy/issues/9212
if check_finite:
a = _asarray_validated(a, check_finite=check_finite)
if a.dtype == np.float64:
evr, driver = dsyevd, 'syevd'
else:
assert a.dtype == np.complex128
evr, driver = zheevd, 'heevd'
w, v, info = evr(a, lower=1, overwrite_a=overwrite_a)
if info == 0:
return w, v
if info < 0:
raise ValueError('illegal value in argument %d of internal %s'
% (-info, driver))
else:
raise LinAlgError("internal fortran routine failed to converge: "
"%i off-diagonal elements of an "
"intermediate tridiagonal form did not converge"
" to zero." % info)
def sqrtm_sym(A, rcond=1e-7, inv=False):
"""Compute the sqrt of a positive, semi-definite matrix (or its inverse).
Parameters
----------
A : ndarray, shape (..., n, n)
The array to take the square root of.
rcond : float
The relative condition number used during reconstruction.
inv : bool
If True, compute the inverse of the square root rather than the
square root itself.
Returns
-------
A_sqrt : ndarray, shape (..., n, n)
The (possibly inverted) square root of A.
s : ndarray, shape (..., n)
The original square root singular values (not inverted).
"""
# Same as linalg.sqrtm(C) but faster, also yields the eigenvalues
return _sym_mat_pow(A, -0.5 if inv else 0.5, rcond, return_s=True)
def _sym_mat_pow(A, power, rcond=1e-7, reduce_rank=False, return_s=False):
"""Exponentiate Hermitian matrices with optional rank reduction."""
assert power in (-1, 0.5, -0.5) # only used internally
s, u = np.linalg.eigh(A) # eigenvalues in ascending order
# Is it positive semi-defidite? If so, keep real
limit = s[..., -1:] * rcond
if not (s >= -limit).all(): # allow some tiny small negative ones
raise ValueError('Matrix is not positive semi-definite')
s[s <= limit] = np.inf if power < 0 else 0
if reduce_rank:
# These are ordered smallest to largest, so we set the first one
# to inf -- then the 1. / s below will turn this to zero, as needed.
s[..., 0] = np.inf
if power in (-0.5, 0.5):
np.sqrt(s, out=s)
use_s = 1. / s if power < 0 else s
out = np.matmul(u * use_s[..., np.newaxis, :], u.swapaxes(-2, -1).conj())
if return_s:
out = (out, s)
return out
|
import os
import shutil
import unittest
import subprocess
import shlex
import sqlite3
def run_cmd(app, cmd):
"""Run a command and return a tuple with (stdout, stderr, exit_code)"""
os.environ['FLASK_APP'] = app
process = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = process.communicate()
return stdout, stderr, process.wait()
class TestMigrate(unittest.TestCase):
def setUp(self):
os.chdir(os.path.split(os.path.abspath(__file__))[0])
try:
os.remove('app1.db')
os.remove('app2.db')
except OSError:
pass
try:
shutil.rmtree('migrations')
except OSError:
pass
def tearDown(self):
try:
os.remove('app1.db')
os.remove('app2.db')
except OSError:
pass
try:
shutil.rmtree('migrations')
except OSError:
pass
def test_multidb_migrate_upgrade(self):
(o, e, s) = run_cmd('app_multidb.py', 'flask db init --multidb')
self.assertTrue(s == 0)
(o, e, s) = run_cmd('app_multidb.py', 'flask db migrate')
self.assertTrue(s == 0)
(o, e, s) = run_cmd('app_multidb.py', 'flask db upgrade')
self.assertTrue(s == 0)
# ensure the tables are in the correct databases
conn1 = sqlite3.connect('app1.db')
c = conn1.cursor()
c.execute('select name from sqlite_master')
tables = c.fetchall()
conn1.close()
self.assertIn(('alembic_version',), tables)
self.assertIn(('user',), tables)
conn2 = sqlite3.connect('app2.db')
c = conn2.cursor()
c.execute('select name from sqlite_master')
tables = c.fetchall()
conn2.close()
self.assertIn(('alembic_version',), tables)
self.assertIn(('group',), tables)
# ensure the databases can be written to
from .app_multidb import db, User, Group
db.session.add(User(name='test'))
db.session.add(Group(name='group'))
db.session.commit()
# ensure the downgrade works
(o, e, s) = run_cmd('app_multidb.py', 'flask db downgrade')
self.assertTrue(s == 0)
conn1 = sqlite3.connect('app1.db')
c = conn1.cursor()
c.execute('select name from sqlite_master')
tables = c.fetchall()
conn1.close()
self.assertIn(('alembic_version',), tables)
self.assertNotIn(('user',), tables)
conn2 = sqlite3.connect('app2.db')
c = conn2.cursor()
c.execute('select name from sqlite_master')
tables = c.fetchall()
conn2.close()
self.assertIn(('alembic_version',), tables)
self.assertNotIn(('group',), tables)
|
import hkavr
import voluptuous as vol
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
SUPPORT_SELECT_SOURCE,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_STEP,
)
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT, STATE_OFF, STATE_ON
import homeassistant.helpers.config_validation as cv
DEFAULT_NAME = "Harman Kardon AVR"
DEFAULT_PORT = 10025
SUPPORT_HARMAN_KARDON_AVR = (
SUPPORT_VOLUME_STEP
| SUPPORT_VOLUME_MUTE
| SUPPORT_TURN_OFF
| SUPPORT_TURN_ON
| SUPPORT_SELECT_SOURCE
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
}
)
def setup_platform(hass, config, add_entities, discover_info=None):
"""Set up the AVR platform."""
name = config[CONF_NAME]
host = config[CONF_HOST]
port = config[CONF_PORT]
avr = hkavr.HkAVR(host, port, name)
avr_device = HkAvrDevice(avr)
add_entities([avr_device], True)
class HkAvrDevice(MediaPlayerEntity):
"""Representation of a Harman Kardon AVR / JBL AVR TV."""
def __init__(self, avr):
"""Initialize a new HarmanKardonAVR."""
self._avr = avr
self._name = avr.name
self._host = avr.host
self._port = avr.port
self._source_list = avr.sources
self._state = None
self._muted = avr.muted
self._current_source = avr.current_source
def update(self):
"""Update the state of this media_player."""
if self._avr.is_on():
self._state = STATE_ON
elif self._avr.is_off():
self._state = STATE_OFF
else:
self._state = None
self._muted = self._avr.muted
self._current_source = self._avr.current_source
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def is_volume_muted(self):
"""Muted status not available."""
return self._muted
@property
def source(self):
"""Return the current input source."""
return self._current_source
@property
def source_list(self):
"""Available sources."""
return self._source_list
@property
def supported_features(self):
"""Flag media player features that are supported."""
return SUPPORT_HARMAN_KARDON_AVR
def turn_on(self):
"""Turn the AVR on."""
self._avr.power_on()
def turn_off(self):
"""Turn off the AVR."""
self._avr.power_off()
def select_source(self, source):
"""Select input source."""
return self._avr.select_source(source)
def volume_up(self):
"""Volume up the AVR."""
return self._avr.volume_up()
def volume_down(self):
"""Volume down AVR."""
return self._avr.volume_down()
def mute_volume(self, mute):
"""Send mute command."""
return self._avr.mute(mute)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.