text
stringlengths 213
32.3k
|
---|
import re
from typing import TYPE_CHECKING, Any, Iterable, Sequence, Tuple
from homeassistant.const import CONF_PLATFORM
if TYPE_CHECKING:
from .typing import ConfigType
def config_per_platform(config: "ConfigType", domain: str) -> Iterable[Tuple[Any, Any]]:
"""Break a component config into different platforms.
For example, will find 'switch', 'switch 2', 'switch 3', .. etc
Async friendly.
"""
for config_key in extract_domain_configs(config, domain):
platform_config = config[config_key]
if not platform_config:
continue
if not isinstance(platform_config, list):
platform_config = [platform_config]
for item in platform_config:
try:
platform = item.get(CONF_PLATFORM)
except AttributeError:
platform = None
yield platform, item
def extract_domain_configs(config: "ConfigType", domain: str) -> Sequence[str]:
"""Extract keys from config for given domain name.
Async friendly.
"""
pattern = re.compile(fr"^{domain}(| .+)$")
return [key for key in config.keys() if pattern.match(key)]
|
import re
from typing import Dict
import aiohttp
from mypy_extensions import TypedDict
from paasta_tools.utils import get_user_agent
HACHECK_TIMEOUT = aiohttp.ClientTimeout(total=45, connect=30, sock_read=10)
SpoolInfo = TypedDict(
"SpoolInfo",
{"service": str, "state": str, "since": float, "until": float, "reason": str},
total=False,
)
async def post_spool(url: str, status: str, data: Dict["str", "str"]) -> None:
async with aiohttp.ClientSession(timeout=HACHECK_TIMEOUT) as session:
async with session.post(
url, data=data, headers={"User-Agent": get_user_agent()}
) as resp:
resp.raise_for_status()
async def get_spool(spool_url: str) -> SpoolInfo:
"""Query hacheck for the state of a task, and parse the result into a dictionary."""
if spool_url is None:
return None
# TODO: aiohttp says not to create a session per request. Fix this.
async with aiohttp.ClientSession(timeout=HACHECK_TIMEOUT) as session:
async with session.get(
spool_url, headers={"User-Agent": get_user_agent()}
) as response:
if response.status == 200:
return {"state": "up"}
regex = "".join(
[
"^",
r"Service (?P<service>.+)",
r" in (?P<state>.+) state",
r"(?: since (?P<since>[0-9.]+))?",
r"(?: until (?P<until>[0-9.]+))?",
r"(?:: (?P<reason>.*))?",
"$",
]
)
response_text = await response.text()
match = re.match(regex, response_text)
groupdict = match.groupdict()
info: SpoolInfo = {}
info["service"] = groupdict["service"]
info["state"] = groupdict["state"]
if "since" in groupdict:
info["since"] = float(groupdict["since"] or 0)
if "until" in groupdict:
info["until"] = float(groupdict["until"] or 0)
if "reason" in groupdict:
info["reason"] = groupdict["reason"]
return info
|
from datetime import timedelta
import logging
import threading
import time
from aqualogic.core import AquaLogic
import voluptuous as vol
from homeassistant.const import (
CONF_HOST,
CONF_PORT,
EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.helpers import config_validation as cv
_LOGGER = logging.getLogger(__name__)
DOMAIN = "aqualogic"
UPDATE_TOPIC = f"{DOMAIN}_update"
CONF_UNIT = "unit"
RECONNECT_INTERVAL = timedelta(seconds=10)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{vol.Required(CONF_HOST): cv.string, vol.Required(CONF_PORT): cv.port}
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Set up AquaLogic platform."""
host = config[DOMAIN][CONF_HOST]
port = config[DOMAIN][CONF_PORT]
processor = AquaLogicProcessor(hass, host, port)
hass.data[DOMAIN] = processor
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, processor.start_listen)
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, processor.shutdown)
_LOGGER.debug("AquaLogicProcessor %s:%i initialized", host, port)
return True
class AquaLogicProcessor(threading.Thread):
"""AquaLogic event processor thread."""
def __init__(self, hass, host, port):
"""Initialize the data object."""
super().__init__(daemon=True)
self._hass = hass
self._host = host
self._port = port
self._shutdown = False
self._panel = None
def start_listen(self, event):
"""Start event-processing thread."""
_LOGGER.debug("Event processing thread started")
self.start()
def shutdown(self, event):
"""Signal shutdown of processing event."""
_LOGGER.debug("Event processing signaled exit")
self._shutdown = True
def data_changed(self, panel):
"""Aqualogic data changed callback."""
self._hass.helpers.dispatcher.dispatcher_send(UPDATE_TOPIC)
def run(self):
"""Event thread."""
while True:
self._panel = AquaLogic()
self._panel.connect(self._host, self._port)
self._panel.process(self.data_changed)
if self._shutdown:
return
_LOGGER.error("Connection to %s:%d lost", self._host, self._port)
time.sleep(RECONNECT_INTERVAL.seconds)
@property
def panel(self):
"""Retrieve the AquaLogic object."""
return self._panel
|
revision = 'c301c59688d2'
down_revision = '434c29e40511'
from alembic import op
from sqlalchemy.sql import text
import time
import datetime
from flask import current_app
from logging import Formatter, FileHandler, getLogger
from lemur.common import utils
log = getLogger(__name__)
handler = FileHandler(current_app.config.get("LOG_UPGRADE_FILE", "db_upgrade.log"))
handler.setFormatter(
Formatter(
"%(asctime)s %(levelname)s: %(message)s " "[in %(pathname)s:%(lineno)d]"
)
)
handler.setLevel(current_app.config.get("LOG_LEVEL", "DEBUG"))
log.setLevel(current_app.config.get("LOG_LEVEL", "DEBUG"))
log.addHandler(handler)
def upgrade():
log.info("\n*** Starting new run(%s) ***\n" % datetime.datetime.now())
start_time = time.time()
# Update RSA keys using the key length information
update_key_type_rsa(1024)
update_key_type_rsa(2048)
update_key_type_rsa(4096)
# Process remaining certificates. Though below method does not make any assumptions, most of the remaining ones should be ECC certs.
update_key_type()
log.info("--- Total %s seconds ---\n" % (time.time() - start_time))
def downgrade():
# Change key type column back to null
# Going back 32 days instead of 31 to make sure no certificates are skipped
stmt = text(
"update certificates set key_type=null where not_after > CURRENT_DATE - 32"
)
op.execute(stmt)
commit()
"""
Helper methods performing updates for RSA and rest of the keys
"""
def update_key_type_rsa(bits):
log.info("Processing certificate with key type RSA %s\n" % bits)
stmt = text(
f"update certificates set key_type='RSA{bits}' where bits={bits} and not_after > CURRENT_DATE - 31 and key_type is null"
)
log.info("Query: %s\n" % stmt)
start_time = time.time()
op.execute(stmt)
commit()
log.info("--- %s seconds ---\n" % (time.time() - start_time))
def update_key_type():
conn = op.get_bind()
start_time = time.time()
# Loop through all certificates that are valid today or expired in the last 30 days.
for cert_id, body in conn.execute(
text(
"select id, body from certificates where not_after > CURRENT_DATE - 31 and key_type is null")
):
try:
cert_key_type = utils.get_key_type_from_certificate(body)
except ValueError as e:
log.error("Error in processing certificate - ID: %s Error: %s \n" % (cert_id, str(e)))
else:
log.info("Processing certificate - ID: %s key_type: %s\n" % (cert_id, cert_key_type))
stmt = text(
"update certificates set key_type=:key_type where id=:id"
)
stmt = stmt.bindparams(key_type=cert_key_type, id=cert_id)
op.execute(stmt)
commit()
log.info("--- %s seconds ---\n" % (time.time() - start_time))
def commit():
stmt = text("commit")
op.execute(stmt)
|
import asyncio
from dsmr_parser.clients.protocol import DSMRProtocol
from dsmr_parser.obis_references import EQUIPMENT_IDENTIFIER, EQUIPMENT_IDENTIFIER_GAS
from dsmr_parser.objects import CosemObject
import pytest
from tests.async_mock import MagicMock, patch
@pytest.fixture
async def dsmr_connection_fixture(hass):
"""Fixture that mocks serial connection."""
transport = MagicMock(spec=asyncio.Transport)
protocol = MagicMock(spec=DSMRProtocol)
async def connection_factory(*args, **kwargs):
"""Return mocked out Asyncio classes."""
return (transport, protocol)
connection_factory = MagicMock(wraps=connection_factory)
with patch(
"homeassistant.components.dsmr.sensor.create_dsmr_reader", connection_factory
), patch(
"homeassistant.components.dsmr.sensor.create_tcp_dsmr_reader",
connection_factory,
):
yield (connection_factory, transport, protocol)
@pytest.fixture
async def dsmr_connection_send_validate_fixture(hass):
"""Fixture that mocks serial connection."""
transport = MagicMock(spec=asyncio.Transport)
protocol = MagicMock(spec=DSMRProtocol)
async def connection_factory(*args, **kwargs):
"""Return mocked out Asyncio classes."""
return (transport, protocol)
connection_factory = MagicMock(wraps=connection_factory)
protocol.telegram = {
EQUIPMENT_IDENTIFIER: CosemObject([{"value": "12345678", "unit": ""}]),
EQUIPMENT_IDENTIFIER_GAS: CosemObject([{"value": "123456789", "unit": ""}]),
}
async def wait_closed():
if isinstance(connection_factory.call_args_list[0][0][2], str):
# TCP
telegram_callback = connection_factory.call_args_list[0][0][3]
else:
# Serial
telegram_callback = connection_factory.call_args_list[0][0][2]
telegram_callback(protocol.telegram)
protocol.wait_closed = wait_closed
with patch(
"homeassistant.components.dsmr.config_flow.create_dsmr_reader",
connection_factory,
), patch(
"homeassistant.components.dsmr.config_flow.create_tcp_dsmr_reader",
connection_factory,
):
yield (connection_factory, transport, protocol)
|
import sys
from io import StringIO
from unittest import SkipTest
import requests
from django.core.management import call_command
from django.core.management.base import CommandError, SystemCheckError
from django.test import SimpleTestCase, TestCase
from weblate.accounts.models import Profile
from weblate.runner import main
from weblate.trans.models import Component, Translation
from weblate.trans.tests.test_models import RepoTestCase
from weblate.trans.tests.test_views import FixtureTestCase, ViewTestCase
from weblate.trans.tests.utils import create_test_user, get_test_file
from weblate.vcs.mercurial import HgRepository
TEST_PO = get_test_file("cs.po")
TEST_COMPONENTS = get_test_file("components.json")
TEST_COMPONENTS_INVALID = get_test_file("components-invalid.json")
class RunnerTest(SimpleTestCase):
def test_help(self):
restore = sys.stdout
try:
sys.stdout = StringIO()
main(["help"])
self.assertIn("list_versions", sys.stdout.getvalue())
finally:
sys.stdout = restore
class ImportProjectTest(RepoTestCase):
def do_import(self, path=None, **kwargs):
call_command(
"import_project",
"test",
self.git_repo_path if path is None else path,
"master",
"**/*.po",
**kwargs,
)
def test_import(self):
project = self.create_project()
self.do_import()
self.assertEqual(project.component_set.count(), 4)
def test_import_deep(self):
project = self.create_project()
call_command(
"import_project",
"test",
self.git_repo_path,
"master",
"deep/*/locales/*/LC_MESSAGES/**.po",
)
self.assertEqual(project.component_set.count(), 1)
def test_import_ignore(self):
project = self.create_project()
self.do_import()
self.do_import()
self.assertEqual(project.component_set.count(), 4)
def test_import_duplicate(self):
project = self.create_project()
self.do_import()
self.do_import(path="weblate://test/po")
self.assertEqual(project.component_set.count(), 4)
def test_import_main_1(self, name="po-mono"):
project = self.create_project()
call_command(
"import_project",
"test",
self.git_repo_path,
"master",
"**/*.po",
main_component=name,
)
non_linked = project.component_set.with_repo()
self.assertEqual(non_linked.count(), 1)
self.assertEqual(non_linked[0].slug, name)
def test_import_main_2(self):
self.test_import_main_1("second-po")
def test_import_main_invalid(self):
with self.assertRaises(CommandError):
self.test_import_main_1("x-po")
def test_import_filter(self):
project = self.create_project()
call_command(
"import_project",
"test",
self.git_repo_path,
"master",
"**/*.po",
language_regex="cs",
)
self.assertEqual(project.component_set.count(), 4)
for component in project.component_set.iterator():
self.assertEqual(component.translation_set.count(), 2)
def test_import_re(self):
project = self.create_project()
call_command(
"import_project",
"test",
self.git_repo_path,
"master",
r"(?P<component>[^/-]*)/(?P<language>[^/]*)\.po",
)
self.assertEqual(project.component_set.count(), 1)
def test_import_name(self):
project = self.create_project()
call_command(
"import_project",
"test",
self.git_repo_path,
"master",
r"(?P<component>[^/-]*)/(?P<language>[^/]*)\.po",
name_template="Test name",
)
self.assertEqual(project.component_set.count(), 1)
self.assertTrue(project.component_set.filter(name="Test name").exists())
def test_import_re_missing(self):
with self.assertRaises(CommandError):
call_command(
"import_project",
"test",
self.git_repo_path,
"master",
r"(?P<name>[^/-]*)/.*\.po",
)
def test_import_re_wrong(self):
with self.assertRaises(CommandError):
call_command(
"import_project",
"test",
self.git_repo_path,
"master",
r"(?P<name>[^/-]*",
)
def test_import_po(self):
project = self.create_project()
call_command(
"import_project",
"test",
self.git_repo_path,
"master",
"**/*.po",
file_format="po",
)
self.assertEqual(project.component_set.count(), 4)
def test_import_invalid(self):
project = self.create_project()
with self.assertRaises(CommandError):
call_command(
"import_project",
"test",
self.git_repo_path,
"master",
"**/*.po",
file_format="INVALID",
)
self.assertEqual(project.component_set.count(), 0)
def test_import_aresource(self):
project = self.create_project()
call_command(
"import_project",
"test",
self.git_repo_path,
"master",
"**/values-*/strings.xml",
file_format="aresource",
base_file_template="android/values/strings.xml",
)
self.assertEqual(project.component_set.count(), 2)
def test_import_aresource_format(self):
project = self.create_project()
call_command(
"import_project",
"test",
self.git_repo_path,
"master",
"**/values-*/strings.xml",
file_format="aresource",
base_file_template="%s/values/strings.xml",
)
self.assertEqual(project.component_set.count(), 2)
def test_re_import(self):
project = self.create_project()
call_command("import_project", "test", self.git_repo_path, "master", "**/*.po")
self.assertEqual(project.component_set.count(), 4)
call_command("import_project", "test", self.git_repo_path, "master", "**/*.po")
self.assertEqual(project.component_set.count(), 4)
def test_import_against_existing(self):
"""Test importing with a weblate:// URL."""
android = self.create_android()
project = android.project
self.assertEqual(project.component_set.count(), 1)
call_command(
"import_project",
project.slug,
f"weblate://{project.slug!s}/{android.slug!s}",
"master",
"**/*.po",
)
self.assertEqual(project.component_set.count(), 5)
def test_import_missing_project(self):
"""Test of correct handling of missing project."""
with self.assertRaises(CommandError):
call_command(
"import_project", "test", self.git_repo_path, "master", "**/*.po"
)
def test_import_missing_wildcard(self):
"""Test of correct handling of missing wildcard."""
self.create_project()
with self.assertRaises(CommandError):
call_command(
"import_project", "test", self.git_repo_path, "master", "*/*.po"
)
def test_import_wrong_vcs(self):
"""Test of correct handling of wrong vcs."""
self.create_project()
with self.assertRaises(CommandError):
call_command(
"import_project",
"test",
self.git_repo_path,
"master",
"**/*.po",
vcs="nonexisting",
)
def test_import_mercurial(self):
"""Test importing Mercurial project."""
if not HgRepository.is_supported():
raise SkipTest("Mercurial not available!")
project = self.create_project()
call_command(
"import_project",
"test",
self.mercurial_repo_path,
"default",
"**/*.po",
vcs="mercurial",
)
self.assertEqual(project.component_set.count(), 4)
def test_import_mercurial_mixed(self):
"""Test importing Mercurial project with mixed component/lang."""
if not HgRepository.is_supported():
raise SkipTest("Mercurial not available!")
self.create_project()
with self.assertRaises(CommandError):
call_command(
"import_project",
"test",
self.mercurial_repo_path,
"default",
"*/**.po",
vcs="mercurial",
)
class BasicCommandTest(FixtureTestCase):
def test_versions(self):
output = StringIO()
call_command("list_versions", stdout=output)
self.assertIn("Weblate", output.getvalue())
def test_check(self):
with self.assertRaises(SystemCheckError):
call_command("check", "--deploy")
class WeblateComponentCommandTestCase(ViewTestCase):
"""Base class for handling tests of WeblateComponentCommand based commands."""
command_name = "checkgit"
expected_string = "On branch master"
def do_test(self, *args, **kwargs):
output = StringIO()
call_command(self.command_name, *args, stdout=output, **kwargs)
if self.expected_string:
self.assertIn(self.expected_string, output.getvalue())
else:
self.assertEqual("", output.getvalue())
def test_all(self):
self.do_test(all=True)
def test_project(self):
self.do_test("test")
def test_component(self):
self.do_test("test/test")
def test_nonexisting_project(self):
with self.assertRaises(CommandError):
self.do_test("notest")
def test_nonexisting_component(self):
with self.assertRaises(CommandError):
self.do_test("test/notest")
class CommitPendingTest(WeblateComponentCommandTestCase):
command_name = "commit_pending"
expected_string = ""
def test_age(self):
self.do_test("test", "--age", "1")
class CommitPendingChangesTest(CommitPendingTest):
def setUp(self):
super().setUp()
self.edit_unit("Hello, world!\n", "Nazdar svete!\n")
class CommitGitTest(WeblateComponentCommandTestCase):
command_name = "commitgit"
expected_string = ""
class PushGitTest(WeblateComponentCommandTestCase):
command_name = "pushgit"
expected_string = ""
class LoadTest(WeblateComponentCommandTestCase):
command_name = "loadpo"
expected_string = ""
class UpdateChecksTest(WeblateComponentCommandTestCase):
command_name = "updatechecks"
expected_string = "Processing"
class UpdateGitTest(WeblateComponentCommandTestCase):
command_name = "updategit"
expected_string = ""
class LockTranslationTest(WeblateComponentCommandTestCase):
command_name = "lock_translation"
expected_string = ""
class UnLockTranslationTest(WeblateComponentCommandTestCase):
command_name = "unlock_translation"
expected_string = ""
class ImportDemoTestCase(TestCase):
def test_import(self):
try:
requests.get("https://github.com/")
except requests.exceptions.ConnectionError as error:
raise SkipTest(f"GitHub not reachable: {error}")
output = StringIO()
call_command("import_demo", stdout=output)
self.assertEqual(output.getvalue(), "")
self.assertEqual(Component.objects.count(), 4)
class CleanupTestCase(TestCase):
def test_cleanup(self):
output = StringIO()
call_command("cleanuptrans", stdout=output)
self.assertEqual(output.getvalue(), "")
class ListTranslatorsTest(RepoTestCase):
"""Test translators list."""
def setUp(self):
super().setUp()
self.create_component()
def test_output(self):
component = Component.objects.all()[0]
output = StringIO()
call_command(
"list_translators",
f"{component.project.slug}/{component.slug}",
stdout=output,
)
self.assertEqual(output.getvalue(), "")
class LockingCommandTest(RepoTestCase):
"""Test locking and unlocking."""
def setUp(self):
super().setUp()
self.create_component()
def test_locking(self):
component = Component.objects.all()[0]
self.assertFalse(Component.objects.filter(locked=True).exists())
call_command("lock_translation", f"{component.project.slug}/{component.slug}")
self.assertTrue(Component.objects.filter(locked=True).exists())
call_command(
"unlock_translation",
f"{component.project.slug}/{component.slug}",
)
self.assertFalse(Component.objects.filter(locked=True).exists())
class BenchmarkCommandTest(RepoTestCase):
"""Benchmarking test."""
def setUp(self):
super().setUp()
self.create_component()
def test_benchmark(self):
output = StringIO()
call_command(
"benchmark", "test", "weblate://test/test", "po/*.po", stdout=output
)
self.assertIn("function calls", output.getvalue())
class SuggestionCommandTest(RepoTestCase):
"""Test suggestion addding."""
def setUp(self):
super().setUp()
self.component = self.create_component()
def test_add_suggestions(self):
user = create_test_user()
call_command(
"add_suggestions", "test", "test", "cs", TEST_PO, author=user.email
)
translation = self.component.translation_set.get(language_code="cs")
self.assertEqual(translation.stats.suggestions, 1)
profile = Profile.objects.get(user__email=user.email)
self.assertEqual(profile.suggested, 1)
def test_default_user(self):
call_command("add_suggestions", "test", "test", "cs", TEST_PO)
profile = Profile.objects.get(user__email="[email protected]")
self.assertEqual(profile.suggested, 1)
def test_missing_user(self):
call_command(
"add_suggestions", "test", "test", "cs", TEST_PO, author="[email protected]"
)
profile = Profile.objects.get(user__email="[email protected]")
self.assertEqual(profile.suggested, 1)
def test_missing_project(self):
with self.assertRaises(CommandError):
call_command("add_suggestions", "test", "xxx", "cs", TEST_PO)
class ImportCommandTest(RepoTestCase):
"""Import test."""
def setUp(self):
super().setUp()
self.component = self.create_component()
def test_import(self):
output = StringIO()
call_command(
"import_json",
"--main-component",
"test",
"--project",
"test",
TEST_COMPONENTS,
stdout=output,
)
self.assertEqual(self.component.project.component_set.count(), 3)
self.assertEqual(Translation.objects.count(), 10)
self.assertIn("Imported Test/Gettext PO with 4 translations", output.getvalue())
def test_import_invalid(self):
with self.assertRaises(CommandError):
call_command("import_json", "--project", "test", TEST_COMPONENTS_INVALID)
def test_import_twice(self):
call_command(
"import_json",
"--main-component",
"test",
"--project",
"test",
TEST_COMPONENTS,
)
with self.assertRaises(CommandError):
call_command(
"import_json",
"--main-component",
"test",
"--project",
"test",
TEST_COMPONENTS,
)
def test_import_ignore(self):
output = StringIO()
call_command(
"import_json",
"--main-component",
"test",
"--project",
"test",
TEST_COMPONENTS,
stdout=output,
)
self.assertIn("Imported Test/Gettext PO with 4 translations", output.getvalue())
output.truncate()
call_command(
"import_json",
"--main-component",
"test",
"--project",
"test",
"--ignore",
TEST_COMPONENTS,
stderr=output,
)
self.assertIn("Component Test/Gettext PO already exists", output.getvalue())
def test_import_update(self):
call_command(
"import_json",
"--main-component",
"test",
"--project",
"test",
TEST_COMPONENTS,
)
call_command(
"import_json",
"--main-component",
"test",
"--project",
"test",
"--update",
TEST_COMPONENTS,
)
def test_invalid_file(self):
with self.assertRaises(CommandError):
call_command(
"import_json", "--main-component", "test", "--project", "test", TEST_PO
)
def test_nonexisting_project(self):
with self.assertRaises(CommandError):
call_command(
"import_json",
"--main-component",
"test",
"--project",
"test2",
"/nonexisting/dfile",
)
def test_nonexisting_component(self):
with self.assertRaises(CommandError):
call_command(
"import_json",
"--main-component",
"test2",
"--project",
"test",
"/nonexisting/dfile",
)
def test_missing_component(self):
with self.assertRaises(CommandError):
call_command("import_json", "--project", "test", "/nonexisting/dfile")
|
import unittest
import logging
import sys
import mock
from kalliope import parse_args, configure_logging, main
class TestInit(unittest.TestCase):
def test_parse_args(self):
# start option
parser = parse_args(['value'])
self.assertEqual(parser.action, "value")
# no option
with self.assertRaises(SystemExit):
parse_args([])
parser = parse_args(['start', '--run-synapse', 'run_synapse'])
self.assertEqual(parser.run_synapse, 'run_synapse')
parser = parse_args(['start', '--run-order', 'my order'])
self.assertEqual(parser.run_order, 'my order')
def test_configure_logging(self):
logger = logging.getLogger("kalliope")
# Level 10 = DEBUG
configure_logging(debug=True)
self.assertEqual(logger.getEffectiveLevel(), 10)
logger.propagate = False
# Level 20 = INFO
configure_logging(debug=False)
self.assertEqual(logger.getEffectiveLevel(), 20)
# disable after testing
logger.disabled = True
def test_main(self):
# test start kalliope
sys.argv = ['kalliope.py', 'start']
with mock.patch('kalliope.start_rest_api') as mock_rest_api:
with mock.patch('kalliope.start_kalliope') as mock_start_kalliope:
mock_rest_api.return_value = None
main()
mock_rest_api.assert_called()
mock_start_kalliope.assert_called()
# test run_synapse
sys.argv = ['kalliope.py', 'start', '--run-synapse', 'synapse_name']
with mock.patch('kalliope.core.SynapseLauncher.start_synapse_by_list_name') as mock_synapse_launcher:
mock_synapse_launcher.return_value = None
main()
mock_synapse_launcher.assert_called()
# test run order
sys.argv = ['kalliope.py', 'start', '--run-order', 'my order']
with mock.patch('kalliope.core.SynapseLauncher.run_matching_synapse_from_order') as mock_synapse_launcher:
mock_synapse_launcher.return_value = None
main()
mock_synapse_launcher.assert_called()
# action doesn't exist
sys.argv = ['kalliope.py', 'non_existing_action']
with self.assertRaises(SystemExit):
main()
# install
sys.argv = ['kalliope.py', 'install', '--git-url', 'https://my_url']
with mock.patch('kalliope.core.ResourcesManager.install') as mock_resource_manager:
mock_resource_manager.return_value = None
main()
mock_resource_manager.assert_called()
# install, no URL
sys.argv = ['kalliope.py', 'install']
with self.assertRaises(SystemExit):
main()
sys.argv = ['kalliope.py', 'install', '--git-url']
with self.assertRaises(SystemExit):
main()
# uninstall
sys.argv = ['kalliope.py', 'uninstall', '--neuron-name', 'neuron_name']
with mock.patch('kalliope.core.ResourcesManager.uninstall') as mock_resource_manager:
mock_resource_manager.return_value = None
main()
mock_resource_manager.assert_called()
sys.argv = ['kalliope.py', 'uninstall']
with self.assertRaises(SystemExit):
main()
if __name__ == '__main__':
unittest.main()
# suite = unittest.TestSuite()
# suite.addTest(TestInit("test_main"))
# runner = unittest.TextTestRunner()
# runner.run(suite)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import OrderedDict
import json
import logging
import re
import tempfile
from absl import flags
from perfkitbenchmarker import errors
from perfkitbenchmarker import linux_virtual_machine
from perfkitbenchmarker import providers
from perfkitbenchmarker import virtual_machine
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.configs import option_decoders
from perfkitbenchmarker.providers.rackspace import rackspace_disk
from perfkitbenchmarker.providers.rackspace import rackspace_network
from perfkitbenchmarker.providers.rackspace import util
import six
from six.moves import range
from six.moves import zip
FLAGS = flags.FLAGS
CLOUD_CONFIG_TEMPLATE = '''#cloud-config
users:
- name: {0}
ssh-authorized-keys:
- {1}
sudo: ['ALL=(ALL) NOPASSWD:ALL']
groups: sudo
shell: /bin/bash
'''
BLOCK_DEVICE_TEMPLATE = '''
source-type=image,
source-id={0},
dest=volume,
size={1},
shutdown=remove,
bootindex=0
'''
LSBLK_REGEX = (r'NAME="(.*)"\s+MODEL="(.*)"\s+SIZE="(.*)"'
r'\s+TYPE="(.*)"\s+MOUNTPOINT="(.*)"\s+LABEL="(.*)"')
LSBLK_PATTERN = re.compile(LSBLK_REGEX)
INSTANCE_EXISTS_STATUSES = frozenset(
['BUILD', 'ACTIVE', 'PAUSED', 'SHUTOFF', 'ERROR'])
INSTANCE_DELETED_STATUSES = frozenset(
['DELETED'])
INSTANCE_KNOWN_STATUSES = INSTANCE_EXISTS_STATUSES | INSTANCE_DELETED_STATUSES
REMOTE_BOOT_DISK_SIZE_GB = 50
def RenderBlockDeviceTemplate(image, volume_size):
"""Renders template used for the block-device flag in RackCLI.
Args:
image: string. Image ID of the source image.
volume_size: string. Size in GB of desired volume size.
Returns:
string value for block-device parameter used when creating a VM.
"""
blk_params = BLOCK_DEVICE_TEMPLATE.replace('\n', '').format(
image, str(volume_size))
return blk_params
class RackspaceVmSpec(virtual_machine.BaseVmSpec):
"""Object containing the information needed to create a
RackspaceVirtualMachine.
Attributes:
project: None or string. Project ID, also known as Tenant ID
rackspace_region: None or string. Rackspace region to build VM resources.
rack_profile: None or string. Rack CLI profile configuration.
"""
CLOUD = providers.RACKSPACE
@classmethod
def _ApplyFlags(cls, config_values, flag_values):
"""Modifies config options based on runtime flag values.
Args:
config_values: dict mapping config option names to provided values. May
be modified by this function.
flag_values: flags.FlagValues. Runtime flags that may override the
provided config values.
"""
super(RackspaceVmSpec, cls)._ApplyFlags(config_values, flag_values)
if flag_values['project'].present:
config_values['project'] = flag_values.project
if flag_values['rackspace_region'].present:
config_values['rackspace_region'] = flag_values.rackspace_region
if flag_values['rack_profile'].present:
config_values['rack_profile'] = flag_values.rack_profile
@classmethod
def _GetOptionDecoderConstructions(cls):
"""Gets decoder classes and constructor args for each configurable option.
Returns:
dict. Maps option name string to a (ConfigOptionDecoder class, dict) pair.
The pair specifies a decoder class and its __init__() keyword
arguments to construct in order to decode the named option.
"""
result = super(RackspaceVmSpec, cls)._GetOptionDecoderConstructions()
result.update({
'project': (option_decoders.StringDecoder, {'default': None}),
'rackspace_region': (option_decoders.StringDecoder, {'default': 'IAD'}),
'rack_profile': (option_decoders.StringDecoder, {'default': None})})
return result
class RackspaceVirtualMachine(virtual_machine.BaseVirtualMachine):
"""Object representing a Rackspace Public Cloud Virtual Machine."""
CLOUD = providers.RACKSPACE
DEFAULT_IMAGE = None
def __init__(self, vm_spec):
"""Initialize a Rackspace Virtual Machine
Args:
vm_spec: virtual_machine.BaseVirtualMachineSpec object of the VM.
"""
super(RackspaceVirtualMachine, self).__init__(vm_spec)
self.boot_metadata = {}
self.boot_device = None
self.boot_disk_allocated = False
self.allocated_disks = set()
self.id = None
self.image = self.image or self.DEFAULT_IMAGE
self.region = vm_spec.rackspace_region
self.project = vm_spec.project
self.profile = vm_spec.rack_profile
# Isolated tenant networks are regional, not globally available.
# Security groups (firewalls) apply to a network, hence they are regional.
# TODO(meteorfox) Create tenant network if it doesn't exist in the region.
self.firewall = rackspace_network.RackspaceFirewall.GetFirewall()
def _CreateDependencies(self):
"""Create dependencies prior creating the VM."""
# TODO(meteorfox) Create security group (if applies)
self._UploadSSHPublicKey()
def _Create(self):
"""Creates a Rackspace VM instance and waits until it's ACTIVE."""
self._CreateInstance()
self._WaitForInstanceUntilActive()
@vm_util.Retry()
def _PostCreate(self):
"""Gets the VM's information."""
get_cmd = util.RackCLICommand(self, 'servers', 'instance', 'get')
get_cmd.flags['id'] = self.id
stdout, _, _ = get_cmd.Issue()
resp = json.loads(stdout)
self.internal_ip = resp['PrivateIPv4']
self.ip_address = resp['PublicIPv4']
self.AddMetadata(**self.vm_metadata)
def _Exists(self):
"""Returns true if the VM exists otherwise returns false."""
if self.id is None:
return False
get_cmd = util.RackCLICommand(self, 'servers', 'instance', 'get')
get_cmd.flags['id'] = self.id
stdout, _, _ = get_cmd.Issue(suppress_warning=True)
try:
resp = json.loads(stdout)
except ValueError:
return False
status = resp['Status']
return status in INSTANCE_EXISTS_STATUSES
def _Delete(self):
"""Deletes a Rackspace VM instance and waits until API returns 404."""
if self.id is None:
return
self._DeleteInstance()
self._WaitForInstanceUntilDeleted()
def _DeleteDependencies(self):
"""Deletes dependencies that were need for the VM after the VM has been
deleted."""
# TODO(meteorfox) Delete security group (if applies)
self._DeleteSSHPublicKey()
def _UploadSSHPublicKey(self):
"""Uploads SSH public key to the VM's region. 1 key per VM per Region."""
cmd = util.RackCLICommand(self, 'servers', 'keypair', 'upload')
cmd.flags = OrderedDict([
('name', self.name), ('file', self.ssh_public_key)])
cmd.Issue()
def _DeleteSSHPublicKey(self):
"""Deletes SSH public key used for a VM."""
cmd = util.RackCLICommand(self, 'servers', 'keypair', 'delete')
cmd.flags['name'] = self.name
cmd.Issue()
def _CreateInstance(self):
"""Generates and execute command for creating a Rackspace VM."""
with tempfile.NamedTemporaryFile(dir=vm_util.GetTempDir(),
prefix='user-data') as tf:
with open(self.ssh_public_key) as f:
public_key = f.read().rstrip('\n')
tf.write(CLOUD_CONFIG_TEMPLATE.format(self.user_name, public_key))
tf.flush()
create_cmd = self._GetCreateCommand(tf)
stdout, stderr, _ = create_cmd.Issue()
if stderr:
resp = json.loads(stderr)
raise errors.Error(''.join(
('Non-recoverable error has occurred: %s\n' % str(resp),
'Following command caused the error: %s' % repr(create_cmd),)))
resp = json.loads(stdout)
self.id = resp['ID']
def _GetCreateCommand(self, tf):
"""Generates RackCLI command for creating a Rackspace VM.
Args:
tf: file object containing cloud-config script.
Returns:
RackCLICommand containing RackCLI arguments to build a Rackspace VM.
"""
create_cmd = util.RackCLICommand(self, 'servers', 'instance', 'create')
create_cmd.flags['name'] = self.name
create_cmd.flags['keypair'] = self.name
create_cmd.flags['flavor-id'] = self.machine_type
if FLAGS.rackspace_boot_from_cbs_volume:
blk_flag = RenderBlockDeviceTemplate(self.image, REMOTE_BOOT_DISK_SIZE_GB)
create_cmd.flags['block-device'] = blk_flag
else:
create_cmd.flags['image-id'] = self.image
if FLAGS.rackspace_network_id is not None:
create_cmd.flags['networks'] = ','.join([
rackspace_network.PUBLIC_NET_ID, rackspace_network.SERVICE_NET_ID,
FLAGS.rackspace_network_id])
create_cmd.flags['user-data'] = tf.name
metadata = ['owner=%s' % FLAGS.owner]
for key, value in six.iteritems(self.boot_metadata):
metadata.append('%s=%s' % (key, value))
create_cmd.flags['metadata'] = ','.join(metadata)
return create_cmd
@vm_util.Retry(poll_interval=5, max_retries=720, log_errors=False,
retryable_exceptions=(errors.Resource.RetryableCreationError,))
def _WaitForInstanceUntilActive(self):
"""Waits until instance achieves non-transient state."""
get_cmd = util.RackCLICommand(self, 'servers', 'instance', 'get')
get_cmd.flags['id'] = self.id
stdout, stderr, _ = get_cmd.Issue()
if stdout:
instance = json.loads(stdout)
if instance['Status'] == 'ACTIVE':
logging.info('VM: %s is up and running.' % self.name)
return
elif instance['Status'] == 'ERROR':
logging.error('VM: %s failed to boot.' % self.name)
raise errors.VirtualMachine.VmStateError()
raise errors.Resource.RetryableCreationError(
'VM: %s is not running. Retrying to check status.' % self.name)
def _DeleteInstance(self):
"""Executes delete command for removing a Rackspace VM."""
cmd = util.RackCLICommand(self, 'servers', 'instance', 'delete')
cmd.flags['id'] = self.id
stdout, _, _ = cmd.Issue(suppress_warning=True)
resp = json.loads(stdout)
if 'result' not in resp or 'Deleting' not in resp['result']:
raise errors.Resource.RetryableDeletionError()
@vm_util.Retry(poll_interval=5, max_retries=-1, timeout=300,
log_errors=False,
retryable_exceptions=(errors.Resource.RetryableDeletionError,))
def _WaitForInstanceUntilDeleted(self):
"""Waits until instance has been fully removed, or deleted."""
get_cmd = util.RackCLICommand(self, 'servers', 'instance', 'get')
get_cmd.flags['id'] = self.id
stdout, stderr, _ = get_cmd.Issue()
if stderr:
resp = json.loads(stderr)
if 'error' in resp and "couldn't find" in resp['error']:
logging.info('VM: %s has been successfully deleted.' % self.name)
return
instance = json.loads(stdout)
if instance['Status'] == 'ERROR':
logging.error('VM: %s failed to delete.' % self.name)
raise errors.VirtualMachine.VmStateError()
if instance['Status'] == 'DELETED':
logging.info('VM: %s has been successfully deleted.', self.name)
else:
raise errors.Resource.RetryableDeletionError(
'VM: %s has not been deleted. Retrying to check status.' % self.name)
def AddMetadata(self, **kwargs):
"""Adds metadata to the VM via RackCLI update-metadata command."""
if not kwargs:
return
cmd = util.RackCLICommand(self, 'servers', 'instance', 'update-metadata')
cmd.flags['id'] = self.id
cmd.flags['metadata'] = ','.join('{0}={1}'.format(key, value)
for key, value in six.iteritems(kwargs))
cmd.Issue()
def OnStartup(self):
"""Executes commands on the VM immediately after it has booted."""
super(RackspaceVirtualMachine, self).OnStartup()
self.boot_device = self._GetBootDevice()
def CreateScratchDisk(self, disk_spec):
"""Creates a VM's scratch disk that will be used for a benchmark.
Given a data_disk_type it will either create a corresponding Disk object,
or raise an error that such data disk type is not supported.
Args:
disk_spec: virtual_machine.BaseDiskSpec object of the disk.
Raises:
errors.Error indicating that the requested 'data_disk_type' is
not supported.
"""
if disk_spec.disk_type == rackspace_disk.BOOT: # Ignore num_striped_disks
self._AllocateBootDisk(disk_spec)
elif disk_spec.disk_type == rackspace_disk.LOCAL:
self._AllocateLocalDisks(disk_spec)
elif disk_spec.disk_type in rackspace_disk.REMOTE_TYPES:
self._AllocateRemoteDisks(disk_spec)
else:
raise errors.Error('Unsupported data disk type: %s' % disk_spec.disk_type)
def _AllocateBootDisk(self, disk_spec):
"""Allocate the VM's boot, or system, disk as the scratch disk.
Boot disk can only be allocated once. If multiple data disks are required
it will raise an error.
Args:
disk_spec: virtual_machine.BaseDiskSpec object of the disk.
Raises:
errors.Error when boot disk has already been allocated as a data disk.
"""
if self.boot_disk_allocated:
raise errors.Error('Only one boot disk can be created per VM')
device_path = '/dev/%s' % self.boot_device['name']
scratch_disk = rackspace_disk.RackspaceBootDisk(
disk_spec, self.zone, self.project, device_path, self.image)
self.boot_disk_allocated = True
self.scratch_disks.append(scratch_disk)
scratch_disk.Create()
path = disk_spec.mount_point
mk_cmd = 'sudo mkdir -p {0}; sudo chown -R $USER:$USER {0};'.format(path)
self.RemoteCommand(mk_cmd)
def _AllocateLocalDisks(self, disk_spec):
"""Allocate the VM's local disks (included with the VM), as a data disk(s).
A local disk can only be allocated once per data disk.
Args:
disk_spec: virtual_machine.BaseDiskSpec object of the disk.
"""
block_devices = self._GetBlockDevices()
free_blk_devices = self._GetFreeBlockDevices(block_devices, disk_spec)
disks = []
for i in range(disk_spec.num_striped_disks):
local_device = free_blk_devices[i]
disk_name = '%s-local-disk-%d' % (self.name, i)
device_path = '/dev/%s' % local_device['name']
local_disk = rackspace_disk.RackspaceLocalDisk(
disk_spec, disk_name, self.zone, self.project, device_path)
self.allocated_disks.add(local_disk)
disks.append(local_disk)
self._CreateScratchDiskFromDisks(disk_spec, disks)
def _AllocateRemoteDisks(self, disk_spec):
"""Creates and allocates Rackspace Cloud Block Storage volumes as
as data disks.
Args:
disk_spec: virtual_machine.BaseDiskSpec object of the disk.
"""
scratch_disks = []
for disk_num in range(disk_spec.num_striped_disks):
volume_name = '%s-volume-%d' % (self.name, disk_num)
scratch_disk = rackspace_disk.RackspaceRemoteDisk(
disk_spec, volume_name, self.zone, self.project,
media=disk_spec.disk_type)
scratch_disks.append(scratch_disk)
self._CreateScratchDiskFromDisks(disk_spec, scratch_disks)
def _GetFreeBlockDevices(self, block_devices, disk_spec):
"""Returns available block devices that are not in used as data disk or as
a boot disk.
Args:
block_devices: list of dict containing information about all block devices
in the VM.
disk_spec: virtual_machine.BaseDiskSpec of the disk.
Returns:
list of dicts of only block devices that are not being used.
Raises:
errors.Error Whenever there are no available block devices.
"""
free_blk_devices = []
for dev in block_devices:
if self._IsDiskAvailable(dev):
free_blk_devices.append(dev)
if not free_blk_devices:
raise errors.Error(
''.join(('Machine type %s does not include' % self.machine_type,
' local disks. Please use a different disk_type,',
' or a machine_type that provides local disks.')))
elif len(free_blk_devices) < disk_spec.num_striped_disks:
raise errors.Error('Not enough local data disks. '
'Requesting %d disk(s) but only %d available.'
% (disk_spec.num_striped_disks, len(free_blk_devices)))
return free_blk_devices
def _GetBlockDevices(self):
"""Execute command on VM to gather all block devices in the VM.
Returns:
list of dicts block devices in the VM.
"""
stdout, _ = self.RemoteCommand(
'sudo lsblk -o NAME,MODEL,SIZE,TYPE,MOUNTPOINT,LABEL -n -b -P')
lines = stdout.splitlines()
groups = [LSBLK_PATTERN.match(line) for line in lines]
tuples = [g.groups() for g in groups if g]
colnames = ('name', 'model', 'size_bytes', 'type', 'mountpoint', 'label',)
blk_devices = [dict(list(zip(colnames, t))) for t in tuples]
for d in blk_devices:
d['model'] = d['model'].rstrip()
d['label'] = d['label'].rstrip()
d['size_bytes'] = int(d['size_bytes'])
return blk_devices
def _GetBootDevice(self):
"""Returns backing block device where '/' is mounted on.
Returns:
dict blk device data
Raises:
errors.Error indicates that could not find block device with '/'.
"""
blk_devices = self._GetBlockDevices()
boot_blk_device = None
for dev in blk_devices:
if dev['mountpoint'] == '/':
boot_blk_device = dev
break
if boot_blk_device is None: # Unlikely
raise errors.Error('Could not find disk with "/" root mount point.')
if boot_blk_device['type'] != 'part':
return boot_blk_device
return self._FindBootBlockDevice(blk_devices, boot_blk_device)
def _FindBootBlockDevice(self, blk_devices, boot_blk_device):
"""Helper method to search for backing block device of a partition."""
blk_device_name = boot_blk_device['name'].rstrip('0123456789')
for dev in blk_devices:
if dev['type'] == 'disk' and dev['name'] == blk_device_name:
boot_blk_device = dev
return boot_blk_device
def _IsDiskAvailable(self, blk_device):
"""Returns True if a block device is available.
An available disk, is a disk that has not been allocated previously as
a data disk, or is not being used as boot disk.
"""
return (blk_device['type'] != 'part' and
blk_device['name'] != self.boot_device['name'] and
'config' not in blk_device['label'] and
blk_device['name'] not in self.allocated_disks)
class Rhel7BasedRackspaceVirtualMachine(RackspaceVirtualMachine,
linux_virtual_machine.Rhel7Mixin):
DEFAULT_IMAGE = '92f8a8b8-6019-4c27-949b-cf9910b84ffb'
class VersionlessRhelBasedRackspaceVirtualMachine(
linux_virtual_machine.VersionlessRhelMixin,
Rhel7BasedRackspaceVirtualMachine):
pass
|
import os
import os.path
import sys
import shutil
import inspect
import subprocess
import tempfile
import argparse
sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.pardir,
os.pardir))
# We import qutebrowser.app so all @cmdutils-register decorators are run.
import qutebrowser.app
from qutebrowser import qutebrowser, commands
from qutebrowser.extensions import loader
from qutebrowser.commands import argparser
from qutebrowser.config import configdata, configtypes
from qutebrowser.utils import docutils, usertypes
from qutebrowser.misc import objects
from scripts import asciidoc2html, utils
FILE_HEADER = """
// DO NOT EDIT THIS FILE DIRECTLY!
// It is autogenerated by running:
// $ python3 scripts/dev/src2asciidoc.py
// vim: readonly:
""".lstrip()
class UsageFormatter(argparse.HelpFormatter):
"""Patched HelpFormatter to include some asciidoc markup in the usage.
This does some horrible things, but the alternative would be to reimplement
argparse.HelpFormatter while copying 99% of the code :-/
"""
def __init__(self, prog, indent_increment=2, max_help_position=24,
width=200):
"""Override __init__ to set a fixed width as default."""
super().__init__(prog, indent_increment, max_help_position, width)
def _format_usage(self, usage, actions, groups, _prefix):
"""Override _format_usage to not add the 'usage:' prefix."""
return super()._format_usage(usage, actions, groups, '')
def _get_default_metavar_for_optional(self, action):
"""Do name transforming when getting metavar."""
return argparser.arg_name(action.dest.upper())
def _get_default_metavar_for_positional(self, action):
"""Do name transforming when getting metavar."""
return argparser.arg_name(action.dest)
def _metavar_formatter(self, action, default_metavar):
"""Override _metavar_formatter to add asciidoc markup to metavars.
Most code here is copied from Python 3.4's argparse.py.
"""
if action.metavar is not None:
result = "'{}'".format(action.metavar)
elif action.choices is not None:
choice_strs = [str(choice) for choice in action.choices]
result = ('{' + ','.join('*{}*'.format(e) for e in choice_strs) +
'}')
else:
result = "'{}'".format(default_metavar)
def fmt(tuple_size):
"""Format the result according to the tuple size."""
if isinstance(result, tuple):
return result
else:
return (result, ) * tuple_size
return fmt
def _format_actions_usage(self, actions, groups):
"""Override _format_actions_usage to add asciidoc markup to flags.
Because argparse.py's _format_actions_usage is very complex, we first
monkey-patch the option strings to include the asciidoc markup, then
run the original method, then undo the patching.
"""
old_option_strings = {}
for action in actions:
old_option_strings[action] = action.option_strings[:]
action.option_strings = ['*{}*'.format(s)
for s in action.option_strings]
ret = super()._format_actions_usage(actions, groups)
for action in actions:
action.option_strings = old_option_strings[action]
return ret
def _open_file(name, mode='w'):
"""Open a file with a preset newline/encoding mode."""
return open(name, mode, newline='\n', encoding='utf-8')
def _get_cmd_syntax(_name, cmd):
"""Get the command syntax for a command.
We monkey-patch the parser's formatter_class here to use our UsageFormatter
which adds some asciidoc markup.
"""
old_fmt_class = cmd.parser.formatter_class
cmd.parser.formatter_class = UsageFormatter
usage = cmd.parser.format_usage().rstrip()
cmd.parser.formatter_class = old_fmt_class
return usage
def _get_command_quickref(cmds):
"""Generate the command quick reference."""
out = []
out.append('[options="header",width="75%",cols="25%,75%"]')
out.append('|==============')
out.append('|Command|Description')
for name, cmd in cmds:
desc = inspect.getdoc(cmd.handler).splitlines()[0]
out.append('|<<{name},{name}>>|{desc}'.format(name=name, desc=desc))
out.append('|==============')
return '\n'.join(out)
def _get_setting_quickref():
"""Generate the settings quick reference."""
out = []
out.append('')
out.append('[options="header",width="75%",cols="25%,75%"]')
out.append('|==============')
out.append('|Setting|Description')
for opt in sorted(configdata.DATA.values()):
desc = opt.description.splitlines()[0]
out.append('|<<{},{}>>|{}'.format(opt.name, opt.name, desc))
out.append('|==============')
return '\n'.join(out)
def _get_configtypes():
"""Get configtypes classes to document."""
predicate = lambda e: (
inspect.isclass(e) and
# pylint: disable=protected-access
e not in [configtypes.BaseType, configtypes.MappingType,
configtypes._Numeric, configtypes.FontBase] and
# pylint: enable=protected-access
issubclass(e, configtypes.BaseType))
yield from inspect.getmembers(configtypes, predicate)
def _get_setting_types_quickref():
"""Generate the setting types quick reference."""
out = []
out.append('[[types]]')
out.append('[options="header",width="75%",cols="25%,75%"]')
out.append('|==============')
out.append('|Type|Description')
for name, typ in _get_configtypes():
parser = docutils.DocstringParser(typ)
desc = parser.short_desc
if parser.long_desc:
desc += '\n\n' + parser.long_desc
out.append('|{}|{}'.format(name, desc))
out.append('|==============')
return '\n'.join(out)
def _get_command_doc(name, cmd):
"""Generate the documentation for a command."""
output = ['[[{}]]'.format(name)]
output += ['=== {}'.format(name)]
syntax = _get_cmd_syntax(name, cmd)
if syntax != name:
output.append('Syntax: +:{}+'.format(syntax))
output.append("")
parser = docutils.DocstringParser(cmd.handler)
output.append(parser.short_desc)
if parser.long_desc:
output.append("")
output.append(parser.long_desc)
output += list(_get_command_doc_args(cmd, parser))
output += list(_get_command_doc_count(cmd, parser))
output += list(_get_command_doc_notes(cmd))
output.append("")
output.append("")
return '\n'.join(output)
def _get_command_doc_args(cmd, parser):
"""Get docs for the arguments of a command.
Args:
cmd: The Command to get the docs for.
parser: The DocstringParser to use.
Yield:
Strings which should be added to the docs.
"""
if cmd.pos_args:
yield ""
yield "==== positional arguments"
for arg, name in cmd.pos_args:
try:
yield "* +'{}'+: {}".format(name, parser.arg_descs[arg])
except KeyError as e:
raise KeyError("No description for arg {} of command "
"'{}'!".format(e, cmd.name)) from e
if cmd.opt_args:
yield ""
yield "==== optional arguments"
for arg, (long_flag, short_flag) in cmd.opt_args.items():
try:
yield '* +*{}*+, +*{}*+: {}'.format(short_flag, long_flag,
parser.arg_descs[arg])
except KeyError as e:
raise KeyError("No description for arg {} of command "
"'{}'!".format(e, cmd.name)) from e
def _get_command_doc_count(cmd, parser):
"""Get docs for the count of a command.
Args:
cmd: The Command to get the docs for.
parser: The DocstringParser to use.
Yield:
Strings which should be added to the docs.
"""
for param in inspect.signature(cmd.handler).parameters.values():
if cmd.get_arg_info(param).value in cmd.COUNT_COMMAND_VALUES:
yield ""
yield "==== count"
try:
yield parser.arg_descs[param.name]
except KeyError:
try:
yield parser.arg_descs['count']
except KeyError as e:
raise KeyError("No description for count arg {!r} of "
"command {!r}!"
.format(param.name, cmd.name)) from e
def _get_command_doc_notes(cmd):
"""Get docs for the notes of a command.
Args:
cmd: The Command to get the docs for.
parser: The DocstringParser to use.
Yield:
Strings which should be added to the docs.
"""
if (cmd.maxsplit is not None or cmd.no_cmd_split or
cmd.no_replace_variables and cmd.name != "spawn"):
yield ""
yield "==== note"
if cmd.maxsplit is not None:
yield ("* This command does not split arguments after the last "
"argument and handles quotes literally.")
if cmd.no_cmd_split:
yield ("* With this command, +;;+ is interpreted literally "
"instead of splitting off a second command.")
if cmd.no_replace_variables and cmd.name != "spawn":
yield r"* This command does not replace variables like +\{url\}+."
def _get_action_metavar(action, nargs=1):
"""Get the metavar to display for an argparse action.
Args:
action: The argparse action to get the metavar for.
nargs: The nargs setting for the related argument.
"""
if action.metavar is not None:
if isinstance(action.metavar, str):
elems = [action.metavar] * nargs
else:
elems = action.metavar
return ' '.join("'{}'".format(e) for e in elems)
elif action.choices is not None:
choices = ','.join(str(e) for e in action.choices)
return "'{{{}}}'".format(choices)
else:
return "'{}'".format(action.dest.upper())
def _format_action_args(action):
"""Get an argument string based on an argparse action."""
if action.nargs is None:
return _get_action_metavar(action)
elif action.nargs == '?':
return '[{}]'.format(_get_action_metavar(action))
elif action.nargs == '*':
return '[{mv} [{mv} ...]]'.format(mv=_get_action_metavar(action))
elif action.nargs == '+':
return '{mv} [{mv} ...]'.format(mv=_get_action_metavar(action))
elif action.nargs == '...':
return '...'
else:
return _get_action_metavar(action, nargs=action.nargs)
def _format_action(action):
"""Get an invocation string/help from an argparse action."""
if action.help == argparse.SUPPRESS:
return None
if not action.option_strings:
invocation = '*{}*::'.format(_get_action_metavar(action))
else:
parts = []
if action.nargs == 0:
# Doesn't take a value, so the syntax is -s, --long
parts += ['*{}*'.format(s) for s in action.option_strings]
else:
# Takes a value, so the syntax is -s ARGS or --long ARGS.
args_string = _format_action_args(action)
for opt in action.option_strings:
parts.append('*{}* {}'.format(opt, args_string))
invocation = ', '.join(parts) + '::'
return '{}\n {}\n'.format(invocation, action.help)
def generate_commands(filename):
"""Generate the complete commands section."""
with _open_file(filename) as f:
f.write(FILE_HEADER)
f.write("= Commands\n\n")
f.write(commands.__doc__)
normal_cmds = []
other_cmds = []
debug_cmds = []
for name, cmd in objects.commands.items():
if cmd.deprecated:
continue
if usertypes.KeyMode.normal not in cmd.modes:
other_cmds.append((name, cmd))
elif cmd.debug:
debug_cmds.append((name, cmd))
else:
normal_cmds.append((name, cmd))
normal_cmds.sort()
other_cmds.sort()
debug_cmds.sort()
f.write("\n")
f.write("== Normal commands\n")
f.write(".Quick reference\n")
f.write(_get_command_quickref(normal_cmds) + '\n')
for name, cmd in normal_cmds:
f.write(_get_command_doc(name, cmd))
f.write("\n")
f.write("== Commands not usable in normal mode\n")
f.write(".Quick reference\n")
f.write(_get_command_quickref(other_cmds) + '\n')
for name, cmd in other_cmds:
f.write(_get_command_doc(name, cmd))
f.write("\n")
f.write("== Debugging commands\n")
f.write("These commands are mainly intended for debugging. They are "
"hidden if qutebrowser was started without the "
"`--debug`-flag.\n")
f.write("\n")
f.write(".Quick reference\n")
f.write(_get_command_quickref(debug_cmds) + '\n')
for name, cmd in debug_cmds:
f.write(_get_command_doc(name, cmd))
def _generate_setting_backend_info(f, opt):
"""Generate backend information for the given option."""
all_backends = [usertypes.Backend.QtWebKit, usertypes.Backend.QtWebEngine]
if opt.raw_backends is not None:
for name, conditional in sorted(opt.raw_backends.items()):
if conditional is True:
pass
elif conditional is False:
f.write("\nOn {}, this setting is unavailable.\n".format(name))
else:
f.write("\nOn {}, this setting requires {} or newer.\n"
.format(name, conditional))
elif opt.backends == all_backends:
pass
elif opt.backends == [usertypes.Backend.QtWebKit]:
f.write("\nThis setting is only available with the QtWebKit "
"backend.\n")
elif opt.backends == [usertypes.Backend.QtWebEngine]:
f.write("\nThis setting is only available with the QtWebEngine "
"backend.\n")
else:
raise ValueError("Invalid value {!r} for opt.backends"
.format(opt.backends))
def _generate_setting_option(f, opt):
"""Generate documentation for a single section."""
f.write("\n")
f.write('[[{}]]'.format(opt.name) + "\n")
f.write("=== {}".format(opt.name) + "\n")
f.write(opt.description + "\n")
if opt.restart:
f.write("\nThis setting requires a restart.\n")
if opt.supports_pattern:
f.write("\nThis setting supports URL patterns.\n")
if opt.no_autoconfig:
f.write("\nThis setting can only be set in config.py.\n")
f.write("\n")
typ = opt.typ.get_name().replace(',', ',')
f.write('Type: <<types,{typ}>>\n'.format(typ=typ))
f.write("\n")
valid_values = opt.typ.get_valid_values()
if valid_values is not None and valid_values.generate_docs:
f.write("Valid values:\n")
f.write("\n")
for val in valid_values:
try:
desc = valid_values.descriptions[val]
f.write(" * +{}+: {}".format(val, desc) + "\n")
except KeyError:
f.write(" * +{}+".format(val) + "\n")
f.write("\n")
f.write("Default: {}\n".format(opt.typ.to_doc(opt.default)))
_generate_setting_backend_info(f, opt)
def generate_settings(filename):
"""Generate the complete settings section."""
configdata.init()
with _open_file(filename) as f:
f.write(FILE_HEADER)
f.write("= Setting reference\n\n")
f.write("== All settings\n")
f.write(_get_setting_quickref() + "\n")
for opt in sorted(configdata.DATA.values()):
_generate_setting_option(f, opt)
f.write("\n== Setting types\n")
f.write(_get_setting_types_quickref() + "\n")
def _format_block(filename, what, data):
"""Format a block in a file.
The block is delimited by markers like these:
// QUTE_*_START
...
// QUTE_*_END
The * part is the part which should be given as 'what'.
Args:
filename: The file to change.
what: What to change (authors, options, etc.)
data; A list of strings which is the new data.
"""
what = what.upper()
oshandle, tmpname = tempfile.mkstemp()
try:
with _open_file(filename, mode='r') as infile, \
_open_file(oshandle, mode='w') as temp:
found_start = False
found_end = False
for line in infile:
if line.strip() == '// QUTE_{}_START'.format(what):
temp.write(line)
temp.write(''.join(data))
found_start = True
elif line.strip() == '// QUTE_{}_END'.format(what.upper()):
temp.write(line)
found_end = True
elif (not found_start) or found_end:
temp.write(line)
if not found_start:
raise Exception("Marker '// QUTE_{}_START' not found in "
"'{}'!".format(what, filename))
if not found_end:
raise Exception("Marker '// QUTE_{}_END' not found in "
"'{}'!".format(what, filename))
except:
os.remove(tmpname)
raise
else:
os.remove(filename)
shutil.move(tmpname, filename)
def regenerate_manpage(filename):
"""Update manpage OPTIONS using an argparse parser."""
parser = qutebrowser.get_argparser()
groups = []
# positionals, optionals and user-defined groups
# pylint: disable=protected-access
for group in parser._action_groups:
groupdata = []
groupdata.append('=== {}'.format(group.title))
if group.description is not None:
groupdata.append(group.description)
for action in group._group_actions:
action_data = _format_action(action)
if action_data is not None:
groupdata.append(action_data)
groups.append('\n'.join(groupdata))
# pylint: enable=protected-access
options = '\n'.join(groups)
# epilog
if parser.epilog is not None:
options += parser.epilog
_format_block(filename, 'options', options)
def regenerate_cheatsheet():
"""Generate cheatsheet PNGs based on the SVG."""
files = [
('doc/img/cheatsheet-small.png', 300, 185),
('doc/img/cheatsheet-big.png', 3342, 2060),
]
for filename, x, y in files:
subprocess.run(['inkscape', '-e', filename, '-b', 'white',
'-w', str(x), '-h', str(y),
'misc/cheatsheet.svg'], check=True)
subprocess.run(['optipng', filename], check=True)
def main():
"""Regenerate all documentation."""
utils.change_cwd()
loader.load_components(skip_hooks=True)
print("Generating manpage...")
regenerate_manpage('doc/qutebrowser.1.asciidoc')
print("Generating settings help...")
generate_settings('doc/help/settings.asciidoc')
print("Generating command help...")
generate_commands('doc/help/commands.asciidoc')
if '--cheatsheet' in sys.argv:
print("Regenerating cheatsheet .pngs")
regenerate_cheatsheet()
if '--html' in sys.argv:
asciidoc2html.main()
if __name__ == '__main__':
main()
|
from lemur.plugins.base import Plugin, plugins
class DestinationPlugin(Plugin):
type = "destination"
requires_key = True
sync_as_source = False
sync_as_source_name = ""
def upload(self, name, body, private_key, cert_chain, options, **kwargs):
raise NotImplementedError
class ExportDestinationPlugin(DestinationPlugin):
default_options = [
{
"name": "exportPlugin",
"type": "export-plugin",
"required": True,
"helpMessage": "Export plugin to use before sending data to destination.",
}
]
@property
def options(self):
return self.default_options + self.additional_options
def export(self, body, private_key, cert_chain, options):
export_plugin = self.get_option("exportPlugin", options)
if export_plugin:
plugin = plugins.get(export_plugin["slug"])
extension, passphrase, data = plugin.export(
body, cert_chain, private_key, export_plugin["plugin_options"]
)
return [(extension, passphrase, data)]
data = body + "\n" + cert_chain + "\n" + private_key
return [(".pem", "", data)]
def upload(self, name, body, private_key, cert_chain, options, **kwargs):
raise NotImplementedError
|
from marshmallow import fields
from lemur.common.schema import LemurOutputSchema
from lemur.authorities.schemas import AuthorityNestedOutputSchema
class DefaultOutputSchema(LemurOutputSchema):
authority = fields.Nested(AuthorityNestedOutputSchema)
country = fields.String()
state = fields.String()
location = fields.String()
organization = fields.String()
organizational_unit = fields.String()
issuer_plugin = fields.String()
default_output_schema = DefaultOutputSchema()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
from collections import deque
import ctypes
import functools
import logging
import os
import signal
import threading
import time
import traceback
from concurrent import futures
from perfkitbenchmarker import context
from perfkitbenchmarker import errors
from absl import flags
from perfkitbenchmarker import log_util
import six
from six.moves import queue
from six.moves import range
from six.moves import zip
# For situations where an interruptable wait is necessary, a loop of waits with
# long timeouts is used instead. This is because some of Python's built-in wait
# methods are non-interruptable without a timeout.
_LONG_TIMEOUT = 1000.
# Constants used for polling waits. See _WaitForCondition.
_WAIT_MIN_RECHECK_DELAY = 0.001 # 1 ms
_WAIT_MAX_RECHECK_DELAY = 0.050 # 50 ms
# Values sent to child threads that have special meanings.
_THREAD_STOP_PROCESSING = 0
_THREAD_WAIT_FOR_KEYBOARD_INTERRUPT = 1
# The default value for max_concurrent_threads.
MAX_CONCURRENT_THREADS = 200
# The default value is set in pkb.py. It is the greater of
# MAX_CONCURRENT_THREADS or the value passed to --num_vms. This is particularly
# important for the cluster_boot benchmark where we want to launch all of the
# VMs in parallel.
flags.DEFINE_integer(
'max_concurrent_threads', None, 'Maximum number of concurrent threads to '
'use when running a benchmark.')
FLAGS = flags.FLAGS
def _GetCallString(target_arg_tuple):
"""Returns the string representation of a function call."""
target, args, kwargs = target_arg_tuple
while isinstance(target, functools.partial):
args = target.args + args
inner_kwargs = target.keywords.copy()
inner_kwargs.update(kwargs)
kwargs = inner_kwargs
target = target.func
arg_strings = [str(a) for a in args]
arg_strings.extend(['{0}={1}'.format(k, v) for k, v in six.iteritems(kwargs)])
return '{0}({1})'.format(getattr(target, '__name__', target),
', '.join(arg_strings))
def _WaitForCondition(condition_callback, timeout=None):
"""Waits until the specified callback returns a value that evaluates True.
Similar to the threading.Condition.wait method that is the basis of most
threading class wait routines. Polls the condition, starting with frequent
checks but extending the delay between checks upon each failure.
Args:
condition_callback: Callable that returns a value that evaluates True to end
the wait or evaluates False to continue the wait.
timeout: Optional float. Number of seconds to wait before giving up. If
provided, the condition is still checked at least once before giving up.
If not provided, the wait does not time out.
Returns:
True if condition_callback returned a value that evaluated True. False if
condition_callback did not return a value that evaluated True before the
timeout.
"""
deadline = None if timeout is None else time.time() + timeout
delay = _WAIT_MIN_RECHECK_DELAY
while True:
if condition_callback():
return True
remaining_time = (_WAIT_MAX_RECHECK_DELAY if deadline is None
else deadline - time.time())
if remaining_time <= 0:
return False
time.sleep(delay)
delay = min(delay * 2, remaining_time, _WAIT_MAX_RECHECK_DELAY)
class _SingleReaderQueue(object):
"""Queue to which multiple threads write but from which only one thread reads.
A lightweight substitute for the Queue.Queue class that does not use
internal Locks.
Gets are interruptable but depend on polling.
"""
def __init__(self):
self._deque = deque()
def Get(self, timeout=None):
if not _WaitForCondition(lambda: self._deque, timeout):
raise queue.Empty
return self._deque.popleft()
def Put(self, item):
self._deque.append(item)
class _NonPollingSingleReaderQueue(object):
"""Queue to which multiple threads write but from which only one thread reads.
Uses a threading.Lock to implement a non-interruptable Get that does not poll
and is therefore easier on CPU usage. The reader waits for items by acquiring
the Lock, and writers release the Lock to signal that items have been written.
"""
def __init__(self):
self._deque = deque()
self._lock = threading.Lock()
self._lock.acquire()
def _WaitForItem(self):
self._lock.acquire()
def _SignalAvailableItem(self):
try:
self._lock.release()
except threading.ThreadError:
pass
def Get(self):
while True:
self._WaitForItem()
if self._deque:
item = self._deque.popleft()
if self._deque:
self._SignalAvailableItem()
return item
def Put(self, item):
self._deque.append(item)
self._SignalAvailableItem()
class _BackgroundTaskThreadContext(object):
"""Thread-specific information that can be inherited by a background task.
Attributes:
benchmark_spec: BenchmarkSpec of the benchmark currently being executed.
log_context: ThreadLogContext of the parent thread.
"""
def __init__(self):
self.benchmark_spec = context.GetThreadBenchmarkSpec()
self.log_context = log_util.GetThreadLogContext()
def CopyToCurrentThread(self):
"""Sets the thread context of the current thread."""
log_util.SetThreadLogContext(log_util.ThreadLogContext(self.log_context))
context.SetThreadBenchmarkSpec(self.benchmark_spec)
class _BackgroundTask(object):
"""Base class for a task executed in a child thread or process.
Attributes:
target: Function that is invoked in the child thread or process.
args: Series of unnamed arguments to be passed to the target.
kwargs: dict. Keyword arguments to be passed to the target.
context: _BackgroundTaskThreadContext. Thread-specific state to be inherited
from parent to child thread.
return_value: Return value if the call was executed successfully, or None
otherwise.
traceback: The traceback string if the call raised an exception, or None
otherwise.
"""
def __init__(self, target, args, kwargs, thread_context):
self.target = target
self.args = args
self.kwargs = kwargs
self.context = thread_context
self.return_value = None
self.traceback = None
def Run(self):
"""Sets the current thread context and executes the target."""
self.context.CopyToCurrentThread()
try:
self.return_value = self.target(*self.args, **self.kwargs)
except Exception:
self.traceback = traceback.format_exc()
class _BackgroundTaskManager(six.with_metaclass(abc.ABCMeta, object)):
"""Base class for a context manager that manages state for background tasks.
Attributes:
tasks: list of _BackgroundTask instances. Contains one _BackgroundTask per
started task, in the order that they were started.
"""
def __init__(self, max_concurrency):
self._max_concurrency = max_concurrency
self.tasks = []
def __enter__(self):
return self
def __exit__(self, *unused_args, **unused_kwargs):
pass
@abc.abstractmethod
def StartTask(self, target, args, kwargs, thread_context):
"""Creates and starts a _BackgroundTask.
The created task is appended to self.tasks.
Args:
target: Function that is invoked in the child thread or process.
args: Series of unnamed arguments to be passed to the target.
kwargs: dict. Keyword arguments to be passed to the target.
thread_context: _BackgroundTaskThreadContext. Thread-specific state to be
inherited from parent to child thread.
"""
raise NotImplementedError()
@abc.abstractmethod
def AwaitAnyTask(self):
"""Waits for any of the started tasks to complete.
Returns:
int. Index of the task that completed in self.tasks.
"""
raise NotImplementedError()
@abc.abstractmethod
def HandleKeyboardInterrupt(self):
"""Called by the parent thread if a KeyboardInterrupt occurs.
Ensures that any child thread also receives a KeyboardInterrupt, and then
waits for each child thread to stop executing.
"""
raise NotImplementedError()
def _ExecuteBackgroundThreadTasks(worker_id, task_queue, response_queue):
"""Executes tasks received on a task queue.
Executed in a child Thread by _BackgroundThreadTaskManager.
Args:
worker_id: int. Identifier for the child thread relative to other child
threads.
task_queue: _NonPollingSingleReaderQueue. Queue from which input is read.
Each value in the queue can be one of three types of values. If it is a
(task_id, _BackgroundTask) pair, the task is executed on this thread.
If it is _THREAD_STOP_PROCESSING, the thread stops executing. If it is
_THREAD_WAIT_FOR_KEYBOARD_INTERRUPT, the thread waits for a
KeyboardInterrupt.
response_queue: _SingleReaderQueue. Queue to which output is written. It
receives worker_id when this thread's bootstrap code has completed and
receives a (worker_id, task_id) pair for each task completed on this
thread.
"""
try:
response_queue.Put(worker_id)
while True:
task_tuple = task_queue.Get()
if task_tuple == _THREAD_STOP_PROCESSING:
break
elif task_tuple == _THREAD_WAIT_FOR_KEYBOARD_INTERRUPT:
while True:
time.sleep(_WAIT_MAX_RECHECK_DELAY)
task_id, task = task_tuple
task.Run()
response_queue.Put((worker_id, task_id))
except KeyboardInterrupt:
# TODO(skschneider): Detect when the log would be unhelpful (e.g. if the
# current thread was spinning in the _THREAD_WAIT_FOR_KEYBOARD_INTERRUPT
# sub-loop). Only log in helpful cases, like when the task is interrupted.
logging.debug('Child thread %s received a KeyboardInterrupt from its '
'parent.', worker_id, exc_info=True)
class _BackgroundThreadTaskManager(_BackgroundTaskManager):
"""Manages state for background tasks started in child threads."""
def __init__(self, *args, **kwargs):
super(_BackgroundThreadTaskManager, self).__init__(*args, **kwargs)
self._response_queue = _SingleReaderQueue()
self._task_queues = []
self._threads = []
self._available_worker_ids = list(range(self._max_concurrency))
uninitialized_worker_ids = set(self._available_worker_ids)
for worker_id in self._available_worker_ids:
task_queue = _NonPollingSingleReaderQueue()
self._task_queues.append(task_queue)
thread = threading.Thread(
target=_ExecuteBackgroundThreadTasks,
args=(worker_id, task_queue, self._response_queue))
thread.daemon = True
self._threads.append(thread)
thread.start()
# Wait for each Thread to finish its bootstrap code. Starting all the
# threads upfront like this and reusing them for later calls minimizes the
# risk of a KeyboardInterrupt interfering with any of the Lock interactions.
for _ in self._threads:
worker_id = self._response_queue.Get()
uninitialized_worker_ids.remove(worker_id)
assert not uninitialized_worker_ids, uninitialized_worker_ids
def __exit__(self, *unused_args, **unused_kwargs):
# Shut down worker threads.
for task_queue in self._task_queues:
task_queue.Put(_THREAD_STOP_PROCESSING)
for thread in self._threads:
_WaitForCondition(lambda: not thread.is_alive())
def StartTask(self, target, args, kwargs, thread_context):
assert self._available_worker_ids, ('StartTask called when no threads were '
'available')
task = _BackgroundTask(target, args, kwargs, thread_context)
task_id = len(self.tasks)
self.tasks.append(task)
worker_id = self._available_worker_ids.pop()
self._task_queues[worker_id].Put((task_id, task))
def AwaitAnyTask(self):
worker_id, task_id = self._response_queue.Get()
self._available_worker_ids.append(worker_id)
return task_id
def HandleKeyboardInterrupt(self):
# Raise a KeyboardInterrupt in each child thread.
for thread in self._threads:
ctypes.pythonapi.PyThreadState_SetAsyncExc(
ctypes.c_long(thread.ident), ctypes.py_object(KeyboardInterrupt))
# Wake threads up from possible non-interruptable wait states so they can
# actually see the KeyboardInterrupt.
for task_queue, thread in zip(self._task_queues, self._threads):
task_queue.Put(_THREAD_WAIT_FOR_KEYBOARD_INTERRUPT)
for thread in self._threads:
_WaitForCondition(lambda: not thread.is_alive())
def _ExecuteProcessTask(task):
"""Function invoked in another process by _BackgroundProcessTaskManager.
Executes a specified task function and returns the result or exception
traceback.
TODO(skschneider): Rework this helper function when moving to Python 3.5 or
when the backport of concurrent.futures.ProcessPoolExecutor is able to
preserve original traceback.
Args:
task: _BackgroundTask to execute.
Returns:
(result, traceback) tuple. The first element is the return value from the
task function, or None if the function raised an exception. The second
element is the exception traceback string, or None if the function
succeeded.
"""
def handle_sigint(signum, frame):
# Ignore any new SIGINTs since we are already tearing down.
signal.signal(signal.SIGINT, signal.SIG_IGN)
# Execute the default SIGINT handler which throws a KeyboardInterrupt
# in the main thread of the process.
signal.default_int_handler(signum, frame)
signal.signal(signal.SIGINT, handle_sigint)
task.Run()
return task.return_value, task.traceback
class _BackgroundProcessTaskManager(_BackgroundTaskManager):
"""Manages states for background tasks started in child processes.
TODO(skschneider): This class uses futures.ProcessPoolExecutor. We have been
using this executor since before issues regarding KeyboardInterrupt were
fully explored. The only consumer of this class is RunParallelProcesses, and
currently the uses for RunParallelProcesses are limited. In the future, this
class should also be redesigned for protection against KeyboardInterrupt.
"""
def __init__(self, *args, **kwargs):
super(_BackgroundProcessTaskManager, self).__init__(*args, **kwargs)
self._active_futures = {}
self._executor = futures.ProcessPoolExecutor(self._max_concurrency)
def __enter__(self):
self._executor.__enter__()
return self
def __exit__(self, *args, **kwargs):
# Note: This invokes a non-interruptable wait.
return self._executor.__exit__(*args, **kwargs)
def StartTask(self, target, args, kwargs, thread_context):
task = _BackgroundTask(target, args, kwargs, thread_context)
task_id = len(self.tasks)
self.tasks.append(task)
future = self._executor.submit(_ExecuteProcessTask, task)
self._active_futures[future] = task_id
def AwaitAnyTask(self):
completed_tasks = None
while not completed_tasks:
completed_tasks, _ = futures.wait(
self._active_futures, timeout=_LONG_TIMEOUT,
return_when=futures.FIRST_COMPLETED)
future = completed_tasks.pop()
task_id = self._active_futures.pop(future)
task = self.tasks[task_id]
task.return_value, task.traceback = future.result()
return task_id
def HandleKeyboardInterrupt(self):
# If this thread received an interrupt signal, then processes started with
# a ProcessPoolExecutor will also have received an interrupt without any
# extra work needed from this class. Only need to wait for child processes.
# Note: This invokes a non-interruptable wait.
self._executor.shutdown(wait=True)
def _RunParallelTasks(target_arg_tuples, max_concurrency, get_task_manager,
parallel_exception_class, post_task_delay=0):
"""Executes function calls concurrently in separate threads or processes.
Args:
target_arg_tuples: list of (target, args, kwargs) tuples. Each tuple
contains the function to call and the arguments to pass it.
max_concurrency: int or None. The maximum number of concurrent new
threads or processes.
get_task_manager: Callable that accepts an int max_concurrency arg and
returns a _TaskManager.
parallel_exception_class: Type of exception to raise upon an exception in
one of the called functions.
post_task_delay: Delay in seconds between parallel task invocations.
Returns:
list of function return values in the order corresponding to the order of
target_arg_tuples.
Raises:
parallel_exception_class: When an exception occurred in any of the called
functions.
"""
thread_context = _BackgroundTaskThreadContext()
max_concurrency = min(max_concurrency, len(target_arg_tuples))
error_strings = []
started_task_count = 0
active_task_count = 0
with get_task_manager(max_concurrency) as task_manager:
try:
while started_task_count < len(target_arg_tuples) or active_task_count:
if (started_task_count < len(target_arg_tuples) and
active_task_count < max_concurrency):
# Start a new task.
target, args, kwargs = target_arg_tuples[started_task_count]
task_manager.StartTask(target, args, kwargs, thread_context)
started_task_count += 1
active_task_count += 1
if post_task_delay:
time.sleep(post_task_delay)
continue
# Wait for a task to complete.
task_id = task_manager.AwaitAnyTask()
active_task_count -= 1
# If the task failed, it may still be a long time until all remaining
# tasks complete. Log the failure immediately before continuing to wait
# for other tasks.
stacktrace = task_manager.tasks[task_id].traceback
if stacktrace:
msg = ('Exception occurred while calling {0}:{1}{2}'.format(
_GetCallString(target_arg_tuples[task_id]), os.linesep,
stacktrace))
logging.error(msg)
error_strings.append(msg)
except KeyboardInterrupt:
logging.error(
'Received KeyboardInterrupt while executing parallel tasks. Waiting '
'for %s tasks to clean up.', active_task_count)
task_manager.HandleKeyboardInterrupt()
raise
if error_strings:
# TODO(skschneider): Combine errors.VmUtil.ThreadException and
# errors.VmUtil.CalledProcessException so this can be a single exception
# type.
raise parallel_exception_class(
'The following exceptions occurred during parallel execution:'
'{0}{1}'.format(os.linesep, os.linesep.join(error_strings)))
results = [task.return_value for task in task_manager.tasks]
assert len(target_arg_tuples) == len(results), (target_arg_tuples, results)
return results
def RunParallelThreads(target_arg_tuples, max_concurrency, post_task_delay=0):
"""Executes function calls concurrently in separate threads.
Args:
target_arg_tuples: list of (target, args, kwargs) tuples. Each tuple
contains the function to call and the arguments to pass it.
max_concurrency: int or None. The maximum number of concurrent new
threads.
post_task_delay: Delay in seconds between parallel task invocations.
Returns:
list of function return values in the order corresponding to the order of
target_arg_tuples.
Raises:
errors.VmUtil.ThreadException: When an exception occurred in any of the
called functions.
"""
return _RunParallelTasks(
target_arg_tuples, max_concurrency, _BackgroundThreadTaskManager,
errors.VmUtil.ThreadException, post_task_delay)
def RunThreaded(target,
thread_params,
max_concurrent_threads=None,
post_task_delay=0):
"""Runs the target method in parallel threads.
The method starts up threads with one arg from thread_params as the first arg.
Args:
target: The method to invoke in the thread.
thread_params: A thread is launched for each value in the list. The items
in the list can either be a singleton or a (args, kwargs) tuple/list.
Usually this is a list of VMs.
max_concurrent_threads: The maximum number of concurrent threads to allow.
post_task_delay: Delay in seconds between commands.
Returns:
List of the same length as thread_params. Contains the return value from
each threaded function call in the corresponding order as thread_params.
Raises:
ValueError: when thread_params is not valid.
errors.VmUtil.ThreadException: When an exception occurred in any of the
called functions.
Example 1: # no args other than list.
args = [self.CreateVm()
for x in range(0, 10)]
RunThreaded(MyThreadedTargetMethod, args)
Example 2: # using args only to pass to the thread:
args = [((self.CreateVm(), i, 'somestring'), {})
for i in range(0, 10)]
RunThreaded(MyThreadedTargetMethod, args)
Example 3: # using args & kwargs to pass to the thread:
args = [((self.CreateVm(),), {'num': i, 'name': 'somestring'})
for i in range(0, 10)]
RunThreaded(MyThreadedTargetMethod, args)
"""
if max_concurrent_threads is None:
max_concurrent_threads = (
FLAGS.max_concurrent_threads or MAX_CONCURRENT_THREADS)
if not isinstance(thread_params, list):
raise ValueError('Param "thread_params" must be a list')
if not thread_params:
# Nothing to do.
return []
if not isinstance(thread_params[0], tuple):
target_arg_tuples = [(target, (arg,), {}) for arg in thread_params]
elif (not isinstance(thread_params[0][0], tuple) or
not isinstance(thread_params[0][1], dict)):
raise ValueError('If Param is a tuple, the tuple must be (tuple, dict)')
else:
target_arg_tuples = [(target, args, kwargs)
for args, kwargs in thread_params]
return RunParallelThreads(target_arg_tuples,
max_concurrency=max_concurrent_threads,
post_task_delay=post_task_delay)
def RunParallelProcesses(target_arg_tuples, max_concurrency,
post_process_delay=0):
"""Executes function calls concurrently in separate processes.
Args:
target_arg_tuples: list of (target, args, kwargs) tuples. Each tuple
contains the function to call and the arguments to pass it.
max_concurrency: int or None. The maximum number of concurrent new
processes. If None, it will default to the number of processors on the
machine.
post_process_delay: Delay in seconds between parallel process invocations.
Returns:
list of function return values in the order corresponding to the order of
target_arg_tuples.
Raises:
errors.VmUtil.CalledProcessException: When an exception occurred in any
of the called functions.
"""
def handle_sigint(signum, frame):
# Ignore any SIGINTS in the parent process, but let users know
# that the child processes are getting cleaned up.
logging.error('Got SIGINT while executing parallel tasks. '
'Waiting for tasks to clean up.')
old_handler = None
try:
old_handler = signal.signal(signal.SIGINT, handle_sigint)
ret_val = _RunParallelTasks(
target_arg_tuples, max_concurrency, _BackgroundProcessTaskManager,
errors.VmUtil.CalledProcessException,
post_task_delay=post_process_delay)
finally:
if old_handler:
signal.signal(signal.SIGINT, old_handler)
return ret_val
|
import asyncio
import logging
import async_timeout
from roomba import Roomba, RoombaConnectionError
import voluptuous as vol
from homeassistant import config_entries, exceptions
from homeassistant.const import CONF_HOST, CONF_PASSWORD
from homeassistant.core import callback
from homeassistant.helpers import config_validation as cv
from .const import (
BLID,
COMPONENTS,
CONF_BLID,
CONF_CERT,
CONF_CONTINUOUS,
CONF_DELAY,
CONF_NAME,
DEFAULT_CERT,
DEFAULT_CONTINUOUS,
DEFAULT_DELAY,
DOMAIN,
ROOMBA_SESSION,
)
_LOGGER = logging.getLogger(__name__)
def _has_all_unique_bilds(value):
"""Validate that each vacuum configured has a unique bild.
Uniqueness is determined case-independently.
"""
bilds = [device[CONF_BLID] for device in value]
schema = vol.Schema(vol.Unique())
schema(bilds)
return value
DEVICE_SCHEMA = vol.All(
cv.deprecated(CONF_CERT),
vol.Schema(
{
vol.Required(CONF_HOST): str,
vol.Required(CONF_BLID): str,
vol.Required(CONF_PASSWORD): str,
vol.Optional(CONF_CERT, default=DEFAULT_CERT): str,
vol.Optional(CONF_CONTINUOUS, default=DEFAULT_CONTINUOUS): bool,
vol.Optional(CONF_DELAY, default=DEFAULT_DELAY): int,
},
),
)
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: vol.All(cv.ensure_list, [DEVICE_SCHEMA], _has_all_unique_bilds)},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Set up the roomba environment."""
hass.data.setdefault(DOMAIN, {})
if DOMAIN not in config:
return True
for index, conf in enumerate(config[DOMAIN]):
_LOGGER.debug("Importing Roomba #%d - %s", index, conf[CONF_HOST])
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data=conf,
)
)
return True
async def async_setup_entry(hass, config_entry):
"""Set the config entry up."""
# Set up roomba platforms with config entry
if not config_entry.options:
hass.config_entries.async_update_entry(
config_entry,
options={
"continuous": config_entry.data[CONF_CONTINUOUS],
"delay": config_entry.data[CONF_DELAY],
},
)
roomba = Roomba(
address=config_entry.data[CONF_HOST],
blid=config_entry.data[CONF_BLID],
password=config_entry.data[CONF_PASSWORD],
continuous=config_entry.options[CONF_CONTINUOUS],
delay=config_entry.options[CONF_DELAY],
)
try:
if not await async_connect_or_timeout(hass, roomba):
return False
except CannotConnect as err:
raise exceptions.ConfigEntryNotReady from err
hass.data[DOMAIN][config_entry.entry_id] = {
ROOMBA_SESSION: roomba,
BLID: config_entry.data[CONF_BLID],
}
for component in COMPONENTS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, component)
)
if not config_entry.update_listeners:
config_entry.add_update_listener(async_update_options)
return True
async def async_connect_or_timeout(hass, roomba):
"""Connect to vacuum."""
try:
name = None
with async_timeout.timeout(10):
_LOGGER.debug("Initialize connection to vacuum")
await hass.async_add_executor_job(roomba.connect)
while not roomba.roomba_connected or name is None:
# Waiting for connection and check datas ready
name = roomba_reported_state(roomba).get("name", None)
if name:
break
await asyncio.sleep(1)
except RoombaConnectionError as err:
_LOGGER.error("Error to connect to vacuum")
raise CannotConnect from err
except asyncio.TimeoutError as err:
# api looping if user or password incorrect and roomba exist
await async_disconnect_or_timeout(hass, roomba)
_LOGGER.error("Timeout expired")
raise CannotConnect from err
return {ROOMBA_SESSION: roomba, CONF_NAME: name}
async def async_disconnect_or_timeout(hass, roomba):
"""Disconnect to vacuum."""
_LOGGER.debug("Disconnect vacuum")
with async_timeout.timeout(3):
await hass.async_add_executor_job(roomba.disconnect)
return True
async def async_update_options(hass, config_entry):
"""Update options."""
await hass.config_entries.async_reload(config_entry.entry_id)
async def async_unload_entry(hass, config_entry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(config_entry, component)
for component in COMPONENTS
]
)
)
if unload_ok:
domain_data = hass.data[DOMAIN][config_entry.entry_id]
await async_disconnect_or_timeout(hass, roomba=domain_data[ROOMBA_SESSION])
hass.data[DOMAIN].pop(config_entry.entry_id)
return unload_ok
def roomba_reported_state(roomba):
"""Roomba report."""
return roomba.master_state.get("state", {}).get("reported", {})
@callback
def _async_find_matching_config_entry(hass, prefix):
for entry in hass.config_entries.async_entries(DOMAIN):
if entry.unique_id == prefix:
return entry
class CannotConnect(exceptions.HomeAssistantError):
"""Error to indicate we cannot connect."""
|
import ast
import pytest
from script.hassfest.dependencies import ImportCollector
@pytest.fixture
def mock_collector():
"""Fixture with import collector that adds all referenced nodes."""
collector = ImportCollector(None)
collector.unfiltered_referenced = set()
collector._add_reference = collector.unfiltered_referenced.add
return collector
def test_child_import(mock_collector):
"""Test detecting a child_import reference."""
mock_collector.visit(
ast.parse(
"""
from homeassistant.components import child_import
"""
)
)
assert mock_collector.unfiltered_referenced == {"child_import"}
def test_subimport(mock_collector):
"""Test detecting a subimport reference."""
mock_collector.visit(
ast.parse(
"""
from homeassistant.components.subimport.smart_home import EVENT_ALEXA_SMART_HOME
"""
)
)
assert mock_collector.unfiltered_referenced == {"subimport"}
def test_child_import_field(mock_collector):
"""Test detecting a child_import_field reference."""
mock_collector.visit(
ast.parse(
"""
from homeassistant.components.child_import_field import bla
"""
)
)
assert mock_collector.unfiltered_referenced == {"child_import_field"}
def test_renamed_absolute(mock_collector):
"""Test detecting a renamed_absolute reference."""
mock_collector.visit(
ast.parse(
"""
import homeassistant.components.renamed_absolute as hue
"""
)
)
assert mock_collector.unfiltered_referenced == {"renamed_absolute"}
def test_hass_components_var(mock_collector):
"""Test detecting a hass_components_var reference."""
mock_collector.visit(
ast.parse(
"""
def bla(hass):
hass.components.hass_components_var.async_do_something()
"""
)
)
assert mock_collector.unfiltered_referenced == {"hass_components_var"}
def test_hass_components_class(mock_collector):
"""Test detecting a hass_components_class reference."""
mock_collector.visit(
ast.parse(
"""
class Hello:
def something(self):
self.hass.components.hass_components_class.async_yo()
"""
)
)
assert mock_collector.unfiltered_referenced == {"hass_components_class"}
def test_all_imports(mock_collector):
"""Test all imports together."""
mock_collector.visit(
ast.parse(
"""
from homeassistant.components import child_import
from homeassistant.components.subimport.smart_home import EVENT_ALEXA_SMART_HOME
from homeassistant.components.child_import_field import bla
import homeassistant.components.renamed_absolute as hue
def bla(hass):
hass.components.hass_components_var.async_do_something()
class Hello:
def something(self):
self.hass.components.hass_components_class.async_yo()
"""
)
)
assert mock_collector.unfiltered_referenced == {
"child_import",
"subimport",
"child_import_field",
"renamed_absolute",
"hass_components_var",
"hass_components_class",
}
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import re
from absl import flags
import numpy
from perfkitbenchmarker import configs
from perfkitbenchmarker import flag_util
from perfkitbenchmarker import regex_util
from perfkitbenchmarker import sample
from perfkitbenchmarker.linux_packages import cuda_toolkit
from perfkitbenchmarker.linux_packages import nvidia_driver
from six.moves import range
DEFAULT_RANGE_START = 1 << 26 # 64 MB
DEFAULT_RANGE_STEP = 1 << 26 # 64 MB
DEFAULT_RANGE_END = 1 << 30 # 1 GB
flags.DEFINE_integer(
'gpu_pcie_bandwidth_iterations',
30,
'number of iterations to run',
lower_bound=1)
flags.DEFINE_enum('gpu_pcie_bandwidth_mode', 'quick', ['quick', 'range'],
'bandwidth test mode to use. '
'If range is selected, provide desired range '
'in flag gpu_pcie_bandwidth_transfer_sizes. '
'Additionally, if range is selected, the resulting '
'bandwidth will be averaged over all provided transfer '
'sizes.')
flag_util.DEFINE_integerlist(
'gpu_pcie_bandwidth_transfer_sizes',
flag_util.IntegerList(
[DEFAULT_RANGE_START, DEFAULT_RANGE_END,
DEFAULT_RANGE_STEP]), 'range of transfer sizes to use in bytes. '
'Only used if gpu_pcie_bandwidth_mode is set to range',
module_name=__name__)
FLAGS = flags.FLAGS
BENCHMARK_NAME = 'gpu_pcie_bandwidth'
BENCHMARK_CONFIG = """
gpu_pcie_bandwidth:
description: Runs NVIDIA's CUDA bandwidth test.
vm_groups:
default:
vm_spec:
GCP:
machine_type: n1-standard-4
gpu_type: k80
gpu_count: 1
zone: us-east1-d
boot_disk_size: 200
AWS:
machine_type: p2.xlarge
zone: us-east-1
boot_disk_size: 200
Azure:
machine_type: Standard_NC6
zone: eastus
"""
BENCHMARK_METRICS = [
'Host to device bandwidth', 'Device to host bandwidth',
'Device to device bandwidth'
]
EXTRACT_BANDWIDTH_TEST_RESULTS_REGEX = r'\d+\s+(\d+\.?\d*)'
EXTRACT_DEVICE_INFO_REGEX = r'Device\s*(\d):\s*(.*$)'
class InvalidBandwidthTestOutputFormat(Exception):
pass
def GetConfig(user_config):
config = configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
return config
def CheckPrerequisites(benchmark_config):
"""Verifies that the required resources are present.
Raises:
perfkitbenchmarker.data.ResourceNotFound: On missing resource.
"""
cuda_toolkit.CheckPrerequisites()
def Prepare(benchmark_spec):
"""Install CUDA toolkit 8.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
vm = benchmark_spec.vms[0]
vm.Install('cuda_toolkit')
def _ParseDeviceInfo(test_output):
"""Parses the GPU device info from the CUDA device bandwidth test output.
Args:
test_output: The resulting output string from the bandwidth
test application.
Returns:
A dictionary mapping the device number to its name, for every
device available on the system.
"""
matches = regex_util.ExtractAllMatches(EXTRACT_DEVICE_INFO_REGEX, test_output,
re.MULTILINE)
devices = {str(i[0]): str(i[1]) for i in matches}
return devices
def _AverageResultsForSection(lines, results_section_header_index):
"""Return the average bandwidth for a specific section of results
Args:
lines: output of bandwidthTest, split by lines and stripped of whitespace
results_section_header_index: line number of results section header.
The actual results, in MB/s, should begin three lines after the header.
Returns:
average bandwidth, in MB/s, for the section beginning at
results_section_header_index
"""
RESULTS_OFFSET_FROM_HEADER = 3
results = []
for line in lines[results_section_header_index + RESULTS_OFFSET_FROM_HEADER:]:
if not line:
break # done with this section if line is empty
results.append(float(line.split()[1]))
return numpy.mean(results)
def _FindIndexOfLineThatStartsWith(lines, str):
"""Return the index of the line that startswith str.
Args:
lines: iterable
str: predicate to find in lines
Returns:
first index of the element in lines that startswith str
Raises:
InvalidBandwidthTestOutputFormat if str is not found
"""
for idx, line in enumerate(lines):
if line.startswith(str):
return idx
raise InvalidBandwidthTestOutputFormat(
'Unable to find {0} in bandwidthTest output'.format(str))
def _ParseOutputFromSingleIteration(test_output):
"""Parses the output of the CUDA device bandwidth test.
Args:
test_output: The resulting output string from the bandwidth
test application.
Returns:
A dictionary containing the following values as floats:
* the device to host bandwidth
* the host to device bandwidth
* the device to device bandwidth
All units are in MB/s, as these are the units guaranteed to be output
by the test.
"""
lines = [line.strip() for line in test_output.splitlines()]
host_to_device_results_start = _FindIndexOfLineThatStartsWith(
lines, 'Host to Device Bandwidth')
device_to_host_results_start = _FindIndexOfLineThatStartsWith(
lines, 'Device to Host Bandwidth')
device_to_device_results_start = _FindIndexOfLineThatStartsWith(
lines, 'Device to Device Bandwidth')
host_to_device_mean = _AverageResultsForSection(lines,
host_to_device_results_start)
device_to_host_mean = _AverageResultsForSection(lines,
device_to_host_results_start)
device_to_device_mean = _AverageResultsForSection(
lines, device_to_device_results_start)
results = {
'Host to device bandwidth': host_to_device_mean,
'Device to host bandwidth': device_to_host_mean,
'Device to device bandwidth': device_to_device_mean,
}
return results
def _CalculateMetricsOverAllIterations(result_dicts, metadata={}):
"""Calculates stats given list of result dictionaries.
Each item in the list represends the results from a single
iteration.
Args:
result_dicts: a list of result dictionaries. Each result dictionary
represents a single run of the CUDA device bandwidth test,
parsed by _ParseOutputFromSingleIteration().
metadata: metadata dict to be added to each Sample.
Returns:
a list of sample.Samples containing the device to host bandwidth,
host to device bandwidth, and device to device bandwidth for each
iteration, along with the following stats for each bandwidth type:
* mean
* min
* max
* stddev
"""
samples = []
for metric in BENCHMARK_METRICS:
sequence = [x[metric] for x in result_dicts]
# Add a Sample for each iteration, and include the iteration number
# in the metadata.
for idx, measurement in enumerate(sequence):
metadata_copy = metadata.copy()
metadata_copy['iteration'] = idx
samples.append(sample.Sample(metric, measurement, 'MB/s', metadata_copy))
samples.append(
sample.Sample(metric + ', min', min(sequence), 'MB/s', metadata))
samples.append(
sample.Sample(metric + ', max', max(sequence), 'MB/s', metadata))
samples.append(
sample.Sample(metric + ', mean', numpy.mean(sequence), 'MB/s',
metadata))
samples.append(
sample.Sample(metric + ', stddev', numpy.std(sequence), 'MB/s',
metadata))
return samples
def Run(benchmark_spec):
"""Sets the GPU clock speed and runs the CUDA PCIe benchmark.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample objects.
"""
vm = benchmark_spec.vms[0]
# Note: The clock speed is set in this function rather than Prepare()
# so that the user can perform multiple runs with a specified
# clock speed without having to re-prepare the VM.
nvidia_driver.SetAndConfirmGpuClocks(vm)
num_iterations = FLAGS.gpu_pcie_bandwidth_iterations
mode = FLAGS.gpu_pcie_bandwidth_mode
transfer_size_range = FLAGS.gpu_pcie_bandwidth_transfer_sizes
raw_results = []
metadata = {}
metadata.update(cuda_toolkit.GetMetadata(vm))
metadata['num_iterations'] = num_iterations
metadata['mode'] = mode
if mode == 'range':
metadata['range_start'] = transfer_size_range[0]
metadata['range_stop'] = transfer_size_range[1]
metadata['range_step'] = transfer_size_range[2]
run_command = ('%s/extras/demo_suite/bandwidthTest --device=all' %
metadata['cuda_toolkit_home'])
if mode == 'range':
run_command += (' --mode=range --start={0} --end={1} --increment={2}'
.format(transfer_size_range[0], transfer_size_range[1],
transfer_size_range[2]))
for i in range(num_iterations):
stdout, _ = vm.RemoteCommand(run_command, should_log=True)
raw_results.append(_ParseOutputFromSingleIteration(stdout))
if 'device_info' not in metadata:
metadata['device_info'] = _ParseDeviceInfo(stdout)
return _CalculateMetricsOverAllIterations(raw_results, metadata)
def Cleanup(benchmark_spec):
"""Uninstalls CUDA toolkit 8
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
vm = benchmark_spec.vms[0]
vm.Uninstall('cuda_toolkit')
|
from itertools import count
from . import messaging
from .entity import Exchange, Queue
__all__ = ('Publisher', 'Consumer')
# XXX compat attribute
entry_to_queue = Queue.from_dict
def _iterconsume(connection, consumer, no_ack=False, limit=None):
consumer.consume(no_ack=no_ack)
for iteration in count(0): # for infinity
if limit and iteration >= limit:
break
yield connection.drain_events()
class Publisher(messaging.Producer):
"""Carrot compatible producer."""
exchange = ''
exchange_type = 'direct'
routing_key = ''
durable = True
auto_delete = False
_closed = False
def __init__(self, connection, exchange=None, routing_key=None,
exchange_type=None, durable=None, auto_delete=None,
channel=None, **kwargs):
if channel:
connection = channel
self.exchange = exchange or self.exchange
self.exchange_type = exchange_type or self.exchange_type
self.routing_key = routing_key or self.routing_key
if auto_delete is not None:
self.auto_delete = auto_delete
if durable is not None:
self.durable = durable
if not isinstance(self.exchange, Exchange):
self.exchange = Exchange(name=self.exchange,
type=self.exchange_type,
routing_key=self.routing_key,
auto_delete=self.auto_delete,
durable=self.durable)
super().__init__(connection, self.exchange, **kwargs)
def send(self, *args, **kwargs):
return self.publish(*args, **kwargs)
def close(self):
super().close()
self._closed = True
def __enter__(self):
return self
def __exit__(self, *exc_info):
self.close()
@property
def backend(self):
return self.channel
class Consumer(messaging.Consumer):
"""Carrot compatible consumer."""
queue = ''
exchange = ''
routing_key = ''
exchange_type = 'direct'
durable = True
exclusive = False
auto_delete = False
_closed = False
def __init__(self, connection, queue=None, exchange=None,
routing_key=None, exchange_type=None, durable=None,
exclusive=None, auto_delete=None, **kwargs):
self.backend = connection.channel()
if durable is not None:
self.durable = durable
if exclusive is not None:
self.exclusive = exclusive
if auto_delete is not None:
self.auto_delete = auto_delete
self.queue = queue or self.queue
self.exchange = exchange or self.exchange
self.exchange_type = exchange_type or self.exchange_type
self.routing_key = routing_key or self.routing_key
exchange = Exchange(self.exchange,
type=self.exchange_type,
routing_key=self.routing_key,
auto_delete=self.auto_delete,
durable=self.durable)
queue = Queue(self.queue,
exchange=exchange,
routing_key=self.routing_key,
durable=self.durable,
exclusive=self.exclusive,
auto_delete=self.auto_delete)
super().__init__(self.backend, queue, **kwargs)
def revive(self, channel):
self.backend = channel
super().revive(channel)
def close(self):
self.cancel()
self.backend.close()
self._closed = True
def __enter__(self):
return self
def __exit__(self, *exc_info):
self.close()
def __iter__(self):
return self.iterqueue(infinite=True)
def fetch(self, no_ack=None, enable_callbacks=False):
if no_ack is None:
no_ack = self.no_ack
message = self.queues[0].get(no_ack)
if message:
if enable_callbacks:
self.receive(message.payload, message)
return message
def process_next(self):
raise NotImplementedError('Use fetch(enable_callbacks=True)')
def discard_all(self, filterfunc=None):
if filterfunc is not None:
raise NotImplementedError(
'discard_all does not implement filters')
return self.purge()
def iterconsume(self, limit=None, no_ack=None):
return _iterconsume(self.connection, self, no_ack, limit)
def wait(self, limit=None):
it = self.iterconsume(limit)
return list(it)
def iterqueue(self, limit=None, infinite=False):
for items_since_start in count(): # for infinity
item = self.fetch()
if (not infinite and item is None) or \
(limit and items_since_start >= limit):
break
yield item
class ConsumerSet(messaging.Consumer):
def __init__(self, connection, from_dict=None, consumers=None,
channel=None, **kwargs):
if channel:
self._provided_channel = True
self.backend = channel
else:
self._provided_channel = False
self.backend = connection.channel()
queues = []
if consumers:
for consumer in consumers:
queues.extend(consumer.queues)
if from_dict:
for queue_name, queue_options in from_dict.items():
queues.append(Queue.from_dict(queue_name, **queue_options))
super().__init__(self.backend, queues, **kwargs)
def iterconsume(self, limit=None, no_ack=False):
return _iterconsume(self.connection, self, no_ack, limit)
def discard_all(self):
return self.purge()
def add_consumer_from_dict(self, queue, **options):
return self.add_queue(Queue.from_dict(queue, **options))
def add_consumer(self, consumer):
for queue in consumer.queues:
self.add_queue(queue)
def revive(self, channel):
self.backend = channel
super().revive(channel)
def close(self):
self.cancel()
if not self._provided_channel:
self.channel.close()
|
from homeassistant.components.flo.const import DOMAIN as FLO_DOMAIN
from homeassistant.const import (
ATTR_FRIENDLY_NAME,
CONF_PASSWORD,
CONF_USERNAME,
STATE_ON,
)
from homeassistant.setup import async_setup_component
from .common import TEST_PASSWORD, TEST_USER_ID
async def test_binary_sensors(hass, config_entry, aioclient_mock_fixture):
"""Test Flo by Moen sensors."""
config_entry.add_to_hass(hass)
assert await async_setup_component(
hass, FLO_DOMAIN, {CONF_USERNAME: TEST_USER_ID, CONF_PASSWORD: TEST_PASSWORD}
)
await hass.async_block_till_done()
assert len(hass.data[FLO_DOMAIN][config_entry.entry_id]["devices"]) == 1
# we should have 6 entities for the device
state = hass.states.get("binary_sensor.pending_system_alerts")
assert state.state == STATE_ON
assert state.attributes.get("info") == 0
assert state.attributes.get("warning") == 2
assert state.attributes.get("critical") == 0
assert state.attributes.get(ATTR_FRIENDLY_NAME) == "Pending System Alerts"
|
from functools import lru_cache
from django.contrib.auth import get_user_model
from zinnia.settings import COMMENT_FLAG_USER_ID
PINGBACK = 'pingback'
TRACKBACK = 'trackback'
FLAGGER_USERNAME = 'Zinnia-Flagger'
@lru_cache(1)
def get_user_flagger():
"""
Return an User instance used by the system
when flagging a comment as trackback or pingback.
"""
user_klass = get_user_model()
try:
user = user_klass.objects.get(pk=COMMENT_FLAG_USER_ID)
except user_klass.DoesNotExist:
try:
user = user_klass.objects.get(
**{user_klass.USERNAME_FIELD: FLAGGER_USERNAME})
except user_klass.DoesNotExist:
user = user_klass.objects.create_user(FLAGGER_USERNAME)
return user
|
from homeassistant.components.media_player.const import (
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_SELECT_SOURCE,
)
from tests.common import async_get_device_automations
from tests.components.homekit_controller.common import (
Helper,
setup_accessories_from_file,
setup_test_accessories,
)
async def test_lg_tv(hass):
"""Test that a Koogeek LS1 can be correctly setup in HA."""
accessories = await setup_accessories_from_file(hass, "lg_tv.json")
config_entry, pairing = await setup_test_accessories(hass, accessories)
entity_registry = await hass.helpers.entity_registry.async_get_registry()
# Assert that the entity is correctly added to the entity registry
entry = entity_registry.async_get("media_player.lg_webos_tv_af80")
assert entry.unique_id == "homekit-999AAAAAA999-48"
helper = Helper(
hass, "media_player.lg_webos_tv_af80", pairing, accessories[0], config_entry
)
state = await helper.poll_and_get_state()
# Assert that the friendly name is detected correctly
assert state.attributes["friendly_name"] == "LG webOS TV AF80"
# Assert that all channels were found and that we know which is active.
assert state.attributes["source_list"] == [
"AirPlay",
"Live TV",
"HDMI 1",
"Sony",
"Apple",
"AV",
"HDMI 4",
]
assert state.attributes["source"] == "HDMI 4"
# Assert that all optional features the LS1 supports are detected
assert state.attributes["supported_features"] == (
SUPPORT_PAUSE | SUPPORT_PLAY | SUPPORT_SELECT_SOURCE
)
# The LG TV doesn't (at least at this patch level) report its media state via
# CURRENT_MEDIA_STATE. Therefore "ok" is the best we can say.
assert state.state == "ok"
device_registry = await hass.helpers.device_registry.async_get_registry()
device = device_registry.async_get(entry.device_id)
assert device.manufacturer == "LG Electronics"
assert device.name == "LG webOS TV AF80"
assert device.model == "OLED55B9PUA"
assert device.sw_version == "04.71.04"
assert device.via_device_id is None
# A TV doesn't have any triggers
triggers = await async_get_device_automations(hass, "trigger", device.id)
assert triggers == []
|
from docutils import nodes
from docutils.parsers.rst import Directive, directives
from nikola.plugins.compile.rest import _align_choice, _align_options_base
from nikola.plugin_categories import RestExtension
class Plugin(RestExtension):
"""Plugin for the youtube directive."""
name = "rest_youtube"
def set_site(self, site):
"""Set Nikola site."""
self.site = site
directives.register_directive('youtube', Youtube)
return super().set_site(site)
CODE = """\
<div class="youtube-video{align}">
<iframe width="{width}" height="{height}"
src="https://www.youtube-nocookie.com/embed/{yid}?rel=0&wmode=transparent"
frameborder="0" allow="encrypted-media" allowfullscreen
></iframe>
</div>"""
class Youtube(Directive):
"""reST extension for inserting youtube embedded videos.
Usage:
.. youtube:: lyViVmaBQDg
:height: 400
:width: 600
"""
has_content = True
required_arguments = 1
option_spec = {
"width": directives.unchanged,
"height": directives.unchanged,
"align": _align_choice
}
def run(self):
"""Run the youtube directive."""
self.check_content()
options = {
'yid': self.arguments[0],
'width': 560,
'height': 315,
}
options.update({k: v for k, v in self.options.items() if v})
if self.options.get('align') in _align_options_base:
options['align'] = ' align-' + self.options['align']
else:
options['align'] = ''
return [nodes.raw('', CODE.format(**options), format='html')]
def check_content(self):
"""Check if content exists."""
if self.content: # pragma: no cover
raise self.warning("This directive does not accept content. The "
"'key=value' format for options is deprecated, "
"use ':key: value' instead")
|
import unittest
import datatable as dt
from datatable.internal import frame_integrity_check
from datatable import ltype
class TestDatatable(unittest.TestCase):
def test_fread(self):
d0 = dt.fread(
"L,T,U,D\n"
"true,True,TRUE,1\n"
"false,False,FALSE,0\n"
",,,\n"
)
frame_integrity_check(d0)
assert d0.shape == (3, 4)
assert d0.ltypes == (ltype.bool,) * 4
assert d0.to_list() == [[True, False, None]] * 4
|
from collections import Counter
from unittest import TestCase
from scattertext.features.UnigramsFromSpacyDoc import UnigramsFromSpacyDoc
from scattertext.WhitespaceNLP import whitespace_nlp
class TestUnigramsFromSpacyDoc(TestCase):
def test_get_feats(self):
doc = whitespace_nlp("A a bb cc.")
term_freq = UnigramsFromSpacyDoc().get_feats(doc)
self.assertEqual(Counter({'a': 2, 'bb': 1, 'cc': 1}),
term_freq)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import os.path
from absl import flags
from absl.testing import flagsaver
from absl.testing import parameterized
from compare_gan import datasets
from compare_gan import eval_gan_lib
from compare_gan import eval_utils
from compare_gan.gans import consts as c
from compare_gan.gans.modular_gan import ModularGAN
from compare_gan.metrics import fid_score
from compare_gan.metrics import fractal_dimension
from compare_gan.metrics import inception_score
from compare_gan.metrics import ms_ssim_score
import gin
import mock
import tensorflow as tf
FLAGS = flags.FLAGS
def create_fake_inception_graph():
"""Creates a `GraphDef` with that mocks the Inception V1 graph.
It takes the input, multiplies it through a matrix full of 0.00001 values,
and provides the results in the endpoints 'pool_3' and 'logits'. This
matches the tensor names in the real Inception V1 model.
the real inception model.
Returns:
`tf.GraphDef` for the mocked Inception V1 graph.
"""
fake_inception = tf.Graph()
with fake_inception.as_default():
inputs = tf.placeholder(
tf.float32, shape=[None, 299, 299, 3], name="Mul")
w = tf.ones(shape=[299 * 299 * 3, 10]) * 0.00001
outputs = tf.matmul(tf.layers.flatten(inputs), w)
tf.identity(outputs, name="pool_3")
tf.identity(outputs, name="logits")
return fake_inception.as_graph_def()
class EvalGanLibTest(parameterized.TestCase, tf.test.TestCase):
def setUp(self):
super(EvalGanLibTest, self).setUp()
gin.clear_config()
FLAGS.data_fake_dataset = True
self.mock_get_graph = mock.patch.object(
eval_utils, "get_inception_graph_def").start()
self.mock_get_graph.return_value = create_fake_inception_graph()
@parameterized.parameters(c.ARCHITECTURES)
@flagsaver.flagsaver
def test_end2end_checkpoint(self, architecture):
"""Takes real GAN (trained for 1 step) and evaluate it."""
if architecture in {c.RESNET_STL_ARCH, c.RESNET30_ARCH}:
# RESNET_STL_ARCH and RESNET107_ARCH do not support CIFAR image shape.
return
gin.bind_parameter("dataset.name", "cifar10")
dataset = datasets.get_dataset("cifar10")
options = {
"architecture": architecture,
"z_dim": 120,
"disc_iters": 1,
"lambda": 1,
}
model_dir = os.path.join(tf.test.get_temp_dir(), self.id())
tf.logging.info("model_dir: %s" % model_dir)
run_config = tf.contrib.tpu.RunConfig(model_dir=model_dir)
gan = ModularGAN(dataset=dataset,
parameters=options,
conditional="biggan" in architecture,
model_dir=model_dir)
estimator = gan.as_estimator(run_config, batch_size=2, use_tpu=False)
estimator.train(input_fn=gan.input_fn, steps=1)
export_path = os.path.join(model_dir, "tfhub")
checkpoint_path = os.path.join(model_dir, "model.ckpt-1")
module_spec = gan.as_module_spec()
module_spec.export(export_path, checkpoint_path=checkpoint_path)
eval_tasks = [
fid_score.FIDScoreTask(),
fractal_dimension.FractalDimensionTask(),
inception_score.InceptionScoreTask(),
ms_ssim_score.MultiscaleSSIMTask()
]
result_dict = eval_gan_lib.evaluate_tfhub_module(
export_path, eval_tasks, use_tpu=False, num_averaging_runs=1)
tf.logging.info("result_dict: %s", result_dict)
for score in ["fid_score", "fractal_dimension", "inception_score",
"ms_ssim"]:
for stats in ["mean", "std", "list"]:
required_key = "%s_%s" % (score, stats)
self.assertIn(required_key, result_dict, "Missing: %s." % required_key)
if __name__ == "__main__":
tf.test.main()
|
import pytest
from hyperopt import STATUS_OK
from hyperas.distributions import choice, uniform
import six.moves.cPickle as pickle
from elephas.hyperparam import HyperParamModel
pytest.mark.usefixtures("spark_context")
def data():
from keras.datasets import mnist
from keras.utils import np_utils
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train = x_train.reshape(60000, 784)
x_test = x_test.reshape(10000, 784)
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
nb_classes = 10
y_train = np_utils.to_categorical(y_train, nb_classes)
y_test = np_utils.to_categorical(y_test, nb_classes)
return x_train, y_train, x_test, y_test
def model(x_train, y_train, x_test, y_test):
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation
from keras.optimizers import RMSprop
keras_model = Sequential()
keras_model.add(Dense(512, input_shape=(784,)))
keras_model.add(Activation('relu'))
keras_model.add(Dropout({{uniform(0, 1)}}))
keras_model.add(Dense({{choice([256, 512, 1024])}}))
keras_model.add(Activation('relu'))
keras_model.add(Dropout({{uniform(0, 1)}}))
keras_model.add(Dense(10))
keras_model.add(Activation('softmax'))
rms = RMSprop()
keras_model.compile(loss='categorical_crossentropy',
optimizer=rms, metrics=['acc'])
keras_model.fit(x_train, y_train,
batch_size={{choice([64, 128])}},
epochs=1,
verbose=2,
validation_data=(x_test, y_test))
score, acc = keras_model.evaluate(x_test, y_test, verbose=0)
print('Test accuracy:', acc)
return {'loss': -acc, 'status': STATUS_OK, 'model': keras_model.to_yaml(),
'weights': pickle.dumps(keras_model.get_weights())}
def test_hyper_param_model(spark_context):
hyperparam_model = HyperParamModel(spark_context)
hyperparam_model.minimize(model=model, data=data, max_evals=1)
|
import datetime
import unittest.mock
import dateutil
import pytest
from nikola.nikola import LEGAL_VALUES
from nikola.utils import (
LocaleBorg,
LocaleBorgUninitializedException,
TranslatableSetting,
)
TESLA_BIRTHDAY = datetime.date(1856, 7, 10)
TESLA_BIRTHDAY_DT = datetime.datetime(1856, 7, 10, 12, 34, 56)
DT_EN_US = "July 10, 1856 at 12:34:56 PM UTC"
DT_PL = "10 lipca 1856 12:34:56 UTC"
@pytest.mark.parametrize("initial_lang", [None, ""])
def test_initilalize_failure(initial_lang):
with pytest.raises(ValueError):
LocaleBorg.initialize({}, initial_lang)
assert not LocaleBorg.initialized
@pytest.mark.parametrize("initial_lang", ["en", "pl"])
def test_initialize(initial_lang):
LocaleBorg.initialize({}, initial_lang)
assert LocaleBorg.initialized
assert LocaleBorg().current_lang == initial_lang
def test_uninitialized_error():
with pytest.raises(LocaleBorgUninitializedException):
LocaleBorg()
@pytest.mark.parametrize(
"locale, expected_current_lang",
[
("pl", "pl"),
pytest.param(
"xx", "xx", id="fake language"
), # used to ensure any locale can be supported
],
)
def test_set_locale(base_config, locale, expected_current_lang):
LocaleBorg().set_locale(locale)
assert LocaleBorg.initialized
assert LocaleBorg().current_lang == expected_current_lang
def test_set_locale_for_template():
LocaleBorg.initialize({}, "en")
assert LocaleBorg().set_locale("xz") == "" # empty string for template ease of use
def test_format_date_webiso_basic(base_config):
with unittest.mock.patch("babel.dates.format_datetime") as m:
formatted_date = LocaleBorg().formatted_date("webiso", TESLA_BIRTHDAY_DT)
assert formatted_date == "1856-07-10T12:34:56"
m.assert_not_called()
@pytest.mark.parametrize("lang", ["en", "pl"])
def test_format_date_basic(base_config, lang):
LocaleBorg.initialize({}, lang)
formatted_date = LocaleBorg().formatted_date(
"yyyy-MM-dd HH:mm:ss", TESLA_BIRTHDAY_DT
)
assert formatted_date == "1856-07-10 12:34:56"
def test_format_date_long(base_config):
assert LocaleBorg().formatted_date("long", TESLA_BIRTHDAY_DT) == DT_EN_US
assert LocaleBorg().formatted_date("long", TESLA_BIRTHDAY_DT, "en") == DT_EN_US
assert LocaleBorg().formatted_date("long", TESLA_BIRTHDAY_DT, "pl") == DT_PL
LocaleBorg().set_locale("pl")
assert LocaleBorg().formatted_date("long", TESLA_BIRTHDAY_DT) == DT_PL
assert LocaleBorg().formatted_date("long", TESLA_BIRTHDAY_DT, "en") == DT_EN_US
def test_format_date_timezone(base_config):
tesla_150_birthday_dtz = datetime.datetime(
2006, 7, 10, 12, 34, 56, tzinfo=dateutil.tz.gettz("America/New_York")
)
formatted_date = LocaleBorg().formatted_date("long", tesla_150_birthday_dtz)
assert formatted_date == "July 10, 2006 at 12:34:56 PM -0400"
nodst = datetime.datetime(
2006, 1, 10, 12, 34, 56, tzinfo=dateutil.tz.gettz("America/New_York")
)
formatted_date = LocaleBorg().formatted_date("long", nodst)
assert formatted_date == "January 10, 2006 at 12:34:56 PM -0500"
@pytest.mark.parametrize(
"english_variant, expected_date",
[
pytest.param("en_US", DT_EN_US, id="US"),
pytest.param("en_GB", "10 July 1856 at 12:34:56 UTC", id="GB"),
],
)
def test_format_date_locale_variants(english_variant, expected_date):
LocaleBorg.initialize({"en": english_variant}, "en")
assert LocaleBorg().formatted_date("long", TESLA_BIRTHDAY_DT, "en") == expected_date
@pytest.mark.parametrize(
"lang, expected_string", [("en", "en July"), ("pl", "lipca pl")]
)
def test_format_date_translatablesetting(base_config, lang, expected_string):
df = TranslatableSetting(
"DATE_FORMAT", {"en": "'en' MMMM", "pl": "MMMM 'pl'"}, {"en": "", "pl": ""}
)
assert LocaleBorg().formatted_date(df, TESLA_BIRTHDAY_DT, lang) == expected_string
@pytest.mark.parametrize(
"lang, expected_string",
[
pytest.param(None, "Foo July Bar", id="default"),
pytest.param("pl", "Foo lipiec Bar", id="pl"),
],
)
def test_format_date_in_string_month(base_config, lang, expected_string):
formatted_date = LocaleBorg().format_date_in_string(
"Foo {month} Bar", TESLA_BIRTHDAY, lang
)
assert formatted_date == expected_string
@pytest.mark.parametrize(
"lang, expected_string",
[
pytest.param(None, "Foo July 1856 Bar", id="default"),
pytest.param("pl", "Foo lipiec 1856 Bar", id="pl"),
],
)
def test_format_date_in_string_month_year(base_config, lang, expected_string):
formatted_date = LocaleBorg().format_date_in_string(
"Foo {month_year} Bar", TESLA_BIRTHDAY, lang
)
assert formatted_date == expected_string
@pytest.mark.parametrize(
"lang, expected_string",
[
pytest.param(None, "Foo July 10, 1856 Bar", id="default"),
pytest.param("pl", "Foo 10 lipca 1856 Bar", id="pl"),
],
)
def test_format_date_in_string_month_day_year(base_config, lang, expected_string):
formatted_date = LocaleBorg().format_date_in_string(
"Foo {month_day_year} Bar", TESLA_BIRTHDAY, lang
)
assert formatted_date == expected_string
@pytest.mark.parametrize(
"lang, expected_string",
[
pytest.param(None, "Foo 10 July 1856 Bar", id="default"),
pytest.param("pl", "Foo 10 lipca 1856 Bar", id="pl"),
],
)
def test_format_date_in_string_month_day_year_gb(lang, expected_string):
LocaleBorg.initialize({"en": "en_GB"}, "en")
formatted_date = LocaleBorg().format_date_in_string(
"Foo {month_day_year} Bar", TESLA_BIRTHDAY, lang
)
assert formatted_date == expected_string
@pytest.mark.parametrize(
"message, expected_string",
[
("Foo {month:'miesiąca' MMMM} Bar", "Foo miesiąca lipca Bar"),
("Foo {month_year:MMMM yyyy} Bar", "Foo lipca 1856 Bar"),
],
)
def test_format_date_in_string_customization(base_config, message, expected_string):
formatted_date = LocaleBorg().format_date_in_string(message, TESLA_BIRTHDAY, "pl")
assert formatted_date == expected_string
@pytest.mark.parametrize(
"lang, expected_format",
[("sr", "10. јул 1856. 12:34:56 UTC"), ("sr_latin", "10. jul 1856. 12:34:56 UTC")],
)
def test_locale_base(lang, expected_format):
LocaleBorg.initialize(LEGAL_VALUES["LOCALES_BASE"], "en")
formatted_date = LocaleBorg().formatted_date("long", TESLA_BIRTHDAY_DT, lang)
assert formatted_date == expected_format
@pytest.fixture(autouse=True)
def localeborg_reset():
"""
Reset the LocaleBorg before and after every test.
"""
LocaleBorg.reset()
assert not LocaleBorg.initialized
try:
yield
finally:
LocaleBorg.reset()
assert not LocaleBorg.initialized
@pytest.fixture
def base_config():
"""A base config of LocaleBorg."""
LocaleBorg.initialize({}, "en")
|
import io
import logging
import os
import unittest
import pytest
from homeassistant.config import YAML_CONFIG_FILE, load_yaml_config_file
from homeassistant.exceptions import HomeAssistantError
import homeassistant.util.yaml as yaml
from homeassistant.util.yaml import loader as yaml_loader
from tests.async_mock import patch
from tests.common import get_test_config_dir, patch_yaml_files
@pytest.fixture(autouse=True)
def mock_credstash():
"""Mock credstash so it doesn't connect to the internet."""
with patch.object(yaml_loader, "credstash") as mock_credstash:
mock_credstash.getSecret.return_value = None
yield mock_credstash
def test_simple_list():
"""Test simple list."""
conf = "config:\n - simple\n - list"
with io.StringIO(conf) as file:
doc = yaml_loader.yaml.safe_load(file)
assert doc["config"] == ["simple", "list"]
def test_simple_dict():
"""Test simple dict."""
conf = "key: value"
with io.StringIO(conf) as file:
doc = yaml_loader.yaml.safe_load(file)
assert doc["key"] == "value"
def test_unhashable_key():
"""Test an unhashable key."""
files = {YAML_CONFIG_FILE: "message:\n {{ states.state }}"}
with pytest.raises(HomeAssistantError), patch_yaml_files(files):
load_yaml_config_file(YAML_CONFIG_FILE)
def test_no_key():
"""Test item without a key."""
files = {YAML_CONFIG_FILE: "a: a\nnokeyhere"}
with pytest.raises(HomeAssistantError), patch_yaml_files(files):
yaml.load_yaml(YAML_CONFIG_FILE)
def test_environment_variable():
"""Test config file with environment variable."""
os.environ["PASSWORD"] = "secret_password"
conf = "password: !env_var PASSWORD"
with io.StringIO(conf) as file:
doc = yaml_loader.yaml.safe_load(file)
assert doc["password"] == "secret_password"
del os.environ["PASSWORD"]
def test_environment_variable_default():
"""Test config file with default value for environment variable."""
conf = "password: !env_var PASSWORD secret_password"
with io.StringIO(conf) as file:
doc = yaml_loader.yaml.safe_load(file)
assert doc["password"] == "secret_password"
def test_invalid_environment_variable():
"""Test config file with no environment variable sat."""
conf = "password: !env_var PASSWORD"
with pytest.raises(HomeAssistantError):
with io.StringIO(conf) as file:
yaml_loader.yaml.safe_load(file)
def test_include_yaml():
"""Test include yaml."""
with patch_yaml_files({"test.yaml": "value"}):
conf = "key: !include test.yaml"
with io.StringIO(conf) as file:
doc = yaml_loader.yaml.safe_load(file)
assert doc["key"] == "value"
with patch_yaml_files({"test.yaml": None}):
conf = "key: !include test.yaml"
with io.StringIO(conf) as file:
doc = yaml_loader.yaml.safe_load(file)
assert doc["key"] == {}
@patch("homeassistant.util.yaml.loader.os.walk")
def test_include_dir_list(mock_walk):
"""Test include dir list yaml."""
mock_walk.return_value = [["/test", [], ["two.yaml", "one.yaml"]]]
with patch_yaml_files({"/test/one.yaml": "one", "/test/two.yaml": "two"}):
conf = "key: !include_dir_list /test"
with io.StringIO(conf) as file:
doc = yaml_loader.yaml.safe_load(file)
assert doc["key"] == sorted(["one", "two"])
@patch("homeassistant.util.yaml.loader.os.walk")
def test_include_dir_list_recursive(mock_walk):
"""Test include dir recursive list yaml."""
mock_walk.return_value = [
["/test", ["tmp2", ".ignore", "ignore"], ["zero.yaml"]],
["/test/tmp2", [], ["one.yaml", "two.yaml"]],
["/test/ignore", [], [".ignore.yaml"]],
]
with patch_yaml_files(
{
"/test/zero.yaml": "zero",
"/test/tmp2/one.yaml": "one",
"/test/tmp2/two.yaml": "two",
}
):
conf = "key: !include_dir_list /test"
with io.StringIO(conf) as file:
assert (
".ignore" in mock_walk.return_value[0][1]
), "Expecting .ignore in here"
doc = yaml_loader.yaml.safe_load(file)
assert "tmp2" in mock_walk.return_value[0][1]
assert ".ignore" not in mock_walk.return_value[0][1]
assert sorted(doc["key"]) == sorted(["zero", "one", "two"])
@patch("homeassistant.util.yaml.loader.os.walk")
def test_include_dir_named(mock_walk):
"""Test include dir named yaml."""
mock_walk.return_value = [
["/test", [], ["first.yaml", "second.yaml", "secrets.yaml"]]
]
with patch_yaml_files({"/test/first.yaml": "one", "/test/second.yaml": "two"}):
conf = "key: !include_dir_named /test"
correct = {"first": "one", "second": "two"}
with io.StringIO(conf) as file:
doc = yaml_loader.yaml.safe_load(file)
assert doc["key"] == correct
@patch("homeassistant.util.yaml.loader.os.walk")
def test_include_dir_named_recursive(mock_walk):
"""Test include dir named yaml."""
mock_walk.return_value = [
["/test", ["tmp2", ".ignore", "ignore"], ["first.yaml"]],
["/test/tmp2", [], ["second.yaml", "third.yaml"]],
["/test/ignore", [], [".ignore.yaml"]],
]
with patch_yaml_files(
{
"/test/first.yaml": "one",
"/test/tmp2/second.yaml": "two",
"/test/tmp2/third.yaml": "three",
}
):
conf = "key: !include_dir_named /test"
correct = {"first": "one", "second": "two", "third": "three"}
with io.StringIO(conf) as file:
assert (
".ignore" in mock_walk.return_value[0][1]
), "Expecting .ignore in here"
doc = yaml_loader.yaml.safe_load(file)
assert "tmp2" in mock_walk.return_value[0][1]
assert ".ignore" not in mock_walk.return_value[0][1]
assert doc["key"] == correct
@patch("homeassistant.util.yaml.loader.os.walk")
def test_include_dir_merge_list(mock_walk):
"""Test include dir merge list yaml."""
mock_walk.return_value = [["/test", [], ["first.yaml", "second.yaml"]]]
with patch_yaml_files(
{"/test/first.yaml": "- one", "/test/second.yaml": "- two\n- three"}
):
conf = "key: !include_dir_merge_list /test"
with io.StringIO(conf) as file:
doc = yaml_loader.yaml.safe_load(file)
assert sorted(doc["key"]) == sorted(["one", "two", "three"])
@patch("homeassistant.util.yaml.loader.os.walk")
def test_include_dir_merge_list_recursive(mock_walk):
"""Test include dir merge list yaml."""
mock_walk.return_value = [
["/test", ["tmp2", ".ignore", "ignore"], ["first.yaml"]],
["/test/tmp2", [], ["second.yaml", "third.yaml"]],
["/test/ignore", [], [".ignore.yaml"]],
]
with patch_yaml_files(
{
"/test/first.yaml": "- one",
"/test/tmp2/second.yaml": "- two",
"/test/tmp2/third.yaml": "- three\n- four",
}
):
conf = "key: !include_dir_merge_list /test"
with io.StringIO(conf) as file:
assert (
".ignore" in mock_walk.return_value[0][1]
), "Expecting .ignore in here"
doc = yaml_loader.yaml.safe_load(file)
assert "tmp2" in mock_walk.return_value[0][1]
assert ".ignore" not in mock_walk.return_value[0][1]
assert sorted(doc["key"]) == sorted(["one", "two", "three", "four"])
@patch("homeassistant.util.yaml.loader.os.walk")
def test_include_dir_merge_named(mock_walk):
"""Test include dir merge named yaml."""
mock_walk.return_value = [["/test", [], ["first.yaml", "second.yaml"]]]
files = {
"/test/first.yaml": "key1: one",
"/test/second.yaml": "key2: two\nkey3: three",
}
with patch_yaml_files(files):
conf = "key: !include_dir_merge_named /test"
with io.StringIO(conf) as file:
doc = yaml_loader.yaml.safe_load(file)
assert doc["key"] == {"key1": "one", "key2": "two", "key3": "three"}
@patch("homeassistant.util.yaml.loader.os.walk")
def test_include_dir_merge_named_recursive(mock_walk):
"""Test include dir merge named yaml."""
mock_walk.return_value = [
["/test", ["tmp2", ".ignore", "ignore"], ["first.yaml"]],
["/test/tmp2", [], ["second.yaml", "third.yaml"]],
["/test/ignore", [], [".ignore.yaml"]],
]
with patch_yaml_files(
{
"/test/first.yaml": "key1: one",
"/test/tmp2/second.yaml": "key2: two",
"/test/tmp2/third.yaml": "key3: three\nkey4: four",
}
):
conf = "key: !include_dir_merge_named /test"
with io.StringIO(conf) as file:
assert (
".ignore" in mock_walk.return_value[0][1]
), "Expecting .ignore in here"
doc = yaml_loader.yaml.safe_load(file)
assert "tmp2" in mock_walk.return_value[0][1]
assert ".ignore" not in mock_walk.return_value[0][1]
assert doc["key"] == {
"key1": "one",
"key2": "two",
"key3": "three",
"key4": "four",
}
@patch("homeassistant.util.yaml.loader.open", create=True)
def test_load_yaml_encoding_error(mock_open):
"""Test raising a UnicodeDecodeError."""
mock_open.side_effect = UnicodeDecodeError("", b"", 1, 0, "")
with pytest.raises(HomeAssistantError):
yaml_loader.load_yaml("test")
def test_dump():
"""The that the dump method returns empty None values."""
assert yaml.dump({"a": None, "b": "b"}) == "a:\nb: b\n"
def test_dump_unicode():
"""The that the dump method returns empty None values."""
assert yaml.dump({"a": None, "b": "привет"}) == "a:\nb: привет\n"
FILES = {}
def load_yaml(fname, string):
"""Write a string to file and return the parsed yaml."""
FILES[fname] = string
with patch_yaml_files(FILES):
return load_yaml_config_file(fname)
class FakeKeyring:
"""Fake a keyring class."""
def __init__(self, secrets_dict):
"""Store keyring dictionary."""
self._secrets = secrets_dict
# pylint: disable=protected-access
def get_password(self, domain, name):
"""Retrieve password."""
assert domain == yaml._SECRET_NAMESPACE
return self._secrets.get(name)
class TestSecrets(unittest.TestCase):
"""Test the secrets parameter in the yaml utility."""
# pylint: disable=protected-access,invalid-name
def setUp(self):
"""Create & load secrets file."""
config_dir = get_test_config_dir()
yaml.clear_secret_cache()
self._yaml_path = os.path.join(config_dir, YAML_CONFIG_FILE)
self._secret_path = os.path.join(config_dir, yaml.SECRET_YAML)
self._sub_folder_path = os.path.join(config_dir, "subFolder")
self._unrelated_path = os.path.join(config_dir, "unrelated")
load_yaml(
self._secret_path,
"http_pw: pwhttp\n"
"comp1_un: un1\n"
"comp1_pw: pw1\n"
"stale_pw: not_used\n"
"logger: debug\n",
)
self._yaml = load_yaml(
self._yaml_path,
"http:\n"
" api_password: !secret http_pw\n"
"component:\n"
" username: !secret comp1_un\n"
" password: !secret comp1_pw\n"
"",
)
def tearDown(self):
"""Clean up secrets."""
yaml.clear_secret_cache()
FILES.clear()
def test_secrets_from_yaml(self):
"""Did secrets load ok."""
expected = {"api_password": "pwhttp"}
assert expected == self._yaml["http"]
expected = {"username": "un1", "password": "pw1"}
assert expected == self._yaml["component"]
def test_secrets_from_parent_folder(self):
"""Test loading secrets from parent folder."""
expected = {"api_password": "pwhttp"}
self._yaml = load_yaml(
os.path.join(self._sub_folder_path, "sub.yaml"),
"http:\n"
" api_password: !secret http_pw\n"
"component:\n"
" username: !secret comp1_un\n"
" password: !secret comp1_pw\n"
"",
)
assert expected == self._yaml["http"]
def test_secret_overrides_parent(self):
"""Test loading current directory secret overrides the parent."""
expected = {"api_password": "override"}
load_yaml(
os.path.join(self._sub_folder_path, yaml.SECRET_YAML), "http_pw: override"
)
self._yaml = load_yaml(
os.path.join(self._sub_folder_path, "sub.yaml"),
"http:\n"
" api_password: !secret http_pw\n"
"component:\n"
" username: !secret comp1_un\n"
" password: !secret comp1_pw\n"
"",
)
assert expected == self._yaml["http"]
def test_secrets_from_unrelated_fails(self):
"""Test loading secrets from unrelated folder fails."""
load_yaml(os.path.join(self._unrelated_path, yaml.SECRET_YAML), "test: failure")
with pytest.raises(HomeAssistantError):
load_yaml(
os.path.join(self._sub_folder_path, "sub.yaml"),
"http:\n api_password: !secret test",
)
def test_secrets_keyring(self):
"""Test keyring fallback & get_password."""
yaml_loader.keyring = None # Ensure its not there
yaml_str = "http:\n api_password: !secret http_pw_keyring"
with pytest.raises(HomeAssistantError):
load_yaml(self._yaml_path, yaml_str)
yaml_loader.keyring = FakeKeyring({"http_pw_keyring": "yeah"})
_yaml = load_yaml(self._yaml_path, yaml_str)
assert {"http": {"api_password": "yeah"}} == _yaml
@patch.object(yaml_loader, "credstash")
def test_secrets_credstash(self, mock_credstash):
"""Test credstash fallback & get_password."""
mock_credstash.getSecret.return_value = "yeah"
yaml_str = "http:\n api_password: !secret http_pw_credstash"
_yaml = load_yaml(self._yaml_path, yaml_str)
log = logging.getLogger()
log.error(_yaml["http"])
assert {"api_password": "yeah"} == _yaml["http"]
def test_secrets_logger_removed(self):
"""Ensure logger: debug was removed."""
with pytest.raises(HomeAssistantError):
load_yaml(self._yaml_path, "api_password: !secret logger")
@patch("homeassistant.util.yaml.loader._LOGGER.error")
def test_bad_logger_value(self, mock_error):
"""Ensure logger: debug was removed."""
yaml.clear_secret_cache()
load_yaml(self._secret_path, "logger: info\npw: abc")
load_yaml(self._yaml_path, "api_password: !secret pw")
assert mock_error.call_count == 1, "Expected an error about logger: value"
def test_secrets_are_not_dict(self):
"""Did secrets handle non-dict file."""
FILES[
self._secret_path
] = "- http_pw: pwhttp\n comp1_un: un1\n comp1_pw: pw1\n"
yaml.clear_secret_cache()
with pytest.raises(HomeAssistantError):
load_yaml(
self._yaml_path,
"http:\n"
" api_password: !secret http_pw\n"
"component:\n"
" username: !secret comp1_un\n"
" password: !secret comp1_pw\n"
"",
)
def test_representing_yaml_loaded_data():
"""Test we can represent YAML loaded data."""
files = {YAML_CONFIG_FILE: 'key: [1, "2", 3]'}
with patch_yaml_files(files):
data = load_yaml_config_file(YAML_CONFIG_FILE)
assert yaml.dump(data) == "key:\n- 1\n- '2'\n- 3\n"
def test_duplicate_key(caplog):
"""Test duplicate dict keys."""
files = {YAML_CONFIG_FILE: "key: thing1\nkey: thing2"}
with patch_yaml_files(files):
load_yaml_config_file(YAML_CONFIG_FILE)
assert "contains duplicate key" in caplog.text
|
import datetime
import logging
import math
import threading
import time
# pylint: disable=import-error
from openrazer_daemon.keyboard import KeyboardColour
class RippleEffectThread(threading.Thread):
"""
Ripple thread.
This thread contains the run loop which performs all the circle calculations and generating of the binary payload
"""
def __init__(self, parent, device_number):
super(RippleEffectThread, self).__init__()
self._logger = logging.getLogger('razer.device{0}.ripplethread'.format(device_number))
self._parent = parent
self._colour = (0, 255, 0)
self._refresh_rate = 0.040
self._shutdown = False
self._active = False
self._rows, self._cols = self._parent._parent.MATRIX_DIMS
self._keyboard_grid = KeyboardColour(self._rows, self._cols)
@property
def shutdown(self):
"""
Get the shutdown flag
"""
return self._shutdown
@shutdown.setter
def shutdown(self, value):
"""
Set the shutdown flag
:param value: Shutdown
:type value: bool
"""
self._shutdown = value
@property
def active(self):
"""
Get if the thread is active
:return: Active
:rtype: bool
"""
return self._active
@property
def key_list(self):
"""
Get key list
:return: Key list
:rtype: list
"""
return self._parent.key_list
def enable(self, colour, refresh_rate):
"""
Enable the ripple effect
If the colour tuple contains None then it will set the ripple to random colours
:param colour: Colour tuple like (0, 255, 255)
:type colour: tuple
:param refresh_rate: Refresh rate in seconds
:type refresh_rate: float
"""
if colour[0] is None:
self._colour = None
else:
self._colour = colour
self._refresh_rate = refresh_rate
self._active = True
def disable(self):
"""
Disable the ripple effect
"""
self._active = False
def run(self):
"""
Event loop
"""
# pylint: disable=too-many-nested-blocks,too-many-branches
expire_diff = datetime.timedelta(seconds=2)
# self._parent: RippleManager
# self._parent._parent: The device class (e.g. RazerBlackWidowUltimate2013)
if self._rows == 6 and self._cols == 22:
needslogohandling = True
# a virtual 7th row for logo handling
self._rows += 1
else:
needslogohandling = False
# TODO time execution and then sleep for _refresh_rate - time_taken
while not self._shutdown:
if self._active:
# Clear keyboard
self._keyboard_grid.reset_rows()
now = datetime.datetime.now()
radiuses = []
for expire_time, (key_row, key_col), colour in self.key_list:
event_time = expire_time - expire_diff
now_diff = now - event_time
# Current radius is based off a time metric
if self._colour is not None:
colour = self._colour
radiuses.append((key_row, key_col, now_diff.total_seconds() * 24, colour))
# Iterate through the rows
for row in range(0, self._rows):
# Iterate through the columns
for col in range(0, self._cols):
# The logo location is physically at (6, 11), logically at (0, 20)
# Skip when we come across the logo location, as the ripple would look wrong
if needslogohandling and row == 0 and col == 20:
continue
if needslogohandling and row == 6:
if col != 11:
continue
# To account for logo placement
for cirlce_centre_row, circle_centre_col, rad, colour in radiuses:
radius = math.sqrt(math.pow(cirlce_centre_row - row, 2) + math.pow(circle_centre_col - col, 2))
if rad >= radius >= rad - 2:
# Again, (0, 20) is the logical location of the logo led
self._keyboard_grid.set_key_colour(0, 20, colour)
break
else:
for cirlce_centre_row, circle_centre_col, rad, colour in radiuses:
radius = math.sqrt(math.pow(cirlce_centre_row - row, 2) + math.pow(circle_centre_col - col, 2))
if rad >= radius >= rad - 2:
self._keyboard_grid.set_key_colour(row, col, colour)
break
# Set the colors on the device
payload = self._keyboard_grid.get_total_binary()
self._parent.set_rgb_matrix(payload)
self._parent.refresh_keyboard()
# Sleep until the next ripple refresh
time.sleep(self._refresh_rate)
class RippleManager(object):
"""
Class which manages the overall process of performing a ripple effect
"""
def __init__(self, parent, device_number):
self._logger = logging.getLogger('razer.device{0}.ripplemanager'.format(device_number))
self._parent = parent
self._parent.register_observer(self)
self._is_closed = False
self._ripple_thread = RippleEffectThread(self, device_number)
self._ripple_thread.start()
@property
def key_list(self):
"""
Get the list of keys from the key manager
:return: List of tuples (expire_time, (key_row, key_col), random_colour)
:rtype: list of tuple
"""
result = []
if hasattr(self._parent, 'key_manager'):
result = self._parent.key_manager.temp_key_store
return result
def set_rgb_matrix(self, payload):
"""
Set the LED matrix on the keyboard
:param payload: Binary payload
:type payload: bytes
"""
self._parent.setKeyRow(payload)
def refresh_keyboard(self):
"""
Refresh the keyboard
"""
self._parent.setCustom()
def notify(self, msg):
"""
Receive notificatons from the device (we only care about effects)
:param msg: Notification
:type msg: tuple
"""
if not isinstance(msg, tuple):
self._logger.warning("Got msg that was not a tuple")
elif msg[0] == 'effect':
# We have a message directed at us
# MSG format
# 0 1 2 3
# ('effect', Device, 'effectName', 'effectparams'...)
# Device is the device the msg originated from (could be parent device)
if msg[2] == 'setRipple':
# Get (red, green, blue) tuple (args 3:6), and refreshrate arg 6
self._parent.key_manager.temp_key_store_state = True
self._ripple_thread.enable(msg[3:6], msg[6])
else:
# Effect other than ripple so stop
self._ripple_thread.disable()
self._parent.key_manager.temp_key_store_state = False
def close(self):
"""
Close the manager, stop ripple thread
"""
if not self._is_closed:
self._logger.debug("Closing Ripple Manager")
self._is_closed = True
self._ripple_thread.shutdown = True
self._ripple_thread.join(timeout=2)
if self._ripple_thread.is_alive():
self._logger.error("Could not stop RippleEffect thread")
def __del__(self):
self.close()
|
import docker_registry.core.driver as engine
import tempfile
from ..lib import config
__all__ = ['load']
def temp_store_handler():
tmpf = tempfile.TemporaryFile()
def fn(buf):
tmpf.write(buf)
return tmpf, fn
_storage = {}
def load(kind=None):
"""Returns the right storage class according to the configuration."""
global _storage
cfg = config.load()
if not kind:
kind = cfg.storage.lower()
if kind == 'local':
kind = 'file'
if kind in _storage:
return _storage[kind]
_storage[kind] = engine.fetch(kind)(
path=cfg.storage_path,
config=cfg)
return _storage[kind]
|
import matplotlib.pyplot as plt
from contextlib import contextmanager
from ...fixes import nullcontext
from ._pyvista import _Renderer as _PyVistaRenderer
from ._pyvista import \
_close_all, _set_3d_view, _set_3d_title # noqa: F401 analysis:ignore
class _Renderer(_PyVistaRenderer):
def __init__(self, *args, **kwargs):
from IPython import get_ipython
ipython = get_ipython()
ipython.magic('matplotlib widget')
kwargs["notebook"] = True
super().__init__(*args, **kwargs)
def show(self):
self.figure.display = _NotebookInteractor(self)
return self.scene()
class _NotebookInteractor(object):
def __init__(self, renderer):
from IPython import display
from ipywidgets import HBox, VBox
self.dpi = 90
self.sliders = dict()
self.controllers = dict()
self.renderer = renderer
self.plotter = self.renderer.plotter
with self.disabled_interactivity():
self.fig, self.dh = self.screenshot()
self.configure_controllers()
controllers = VBox(list(self.controllers.values()))
layout = HBox([self.fig.canvas, controllers])
display.display(layout)
@contextmanager
def disabled_interactivity(self):
state = plt.isinteractive()
plt.ioff()
try:
yield
finally:
if state:
plt.ion()
else:
plt.ioff()
def screenshot(self):
width, height = self.renderer.figure.store['window_size']
fig = plt.figure()
fig.figsize = (width / self.dpi, height / self.dpi)
fig.dpi = self.dpi
fig.canvas.toolbar_visible = False
fig.canvas.header_visible = False
fig.canvas.resizable = False
fig.canvas.callbacks.callbacks.clear()
ax = plt.Axes(fig, [0., 0., 1., 1.])
ax.set_axis_off()
fig.add_axes(ax)
dh = ax.imshow(self.plotter.screenshot())
return fig, dh
def update(self):
self.plotter.render()
self.dh.set_data(self.plotter.screenshot())
self.fig.canvas.draw()
def configure_controllers(self):
from ipywidgets import (interactive, Label, VBox, FloatSlider,
IntSlider, Checkbox)
# continuous update
self.continuous_update_button = Checkbox(
value=False,
description='Continuous update',
disabled=False,
indent=False,
)
self.controllers["continuous_update"] = interactive(
self.set_continuous_update,
value=self.continuous_update_button
)
# subplot
number_of_plots = len(self.plotter.renderers)
if number_of_plots > 1:
self.sliders["subplot"] = IntSlider(
value=number_of_plots - 1,
min=0,
max=number_of_plots - 1,
step=1,
continuous_update=False
)
self.controllers["subplot"] = VBox([
Label(value='Select the subplot'),
interactive(
self.set_subplot,
index=self.sliders["subplot"],
)
])
# azimuth
default_azimuth = self.plotter.renderer._azimuth
self.sliders["azimuth"] = FloatSlider(
value=default_azimuth,
min=-180.,
max=180.,
step=10.,
continuous_update=False
)
# elevation
default_elevation = self.plotter.renderer._elevation
self.sliders["elevation"] = FloatSlider(
value=default_elevation,
min=-180.,
max=180.,
step=10.,
continuous_update=False
)
# distance
eps = 1e-5
default_distance = self.plotter.renderer._distance
self.sliders["distance"] = FloatSlider(
value=default_distance,
min=eps,
max=2. * default_distance - eps,
step=default_distance / 10.,
continuous_update=False
)
# camera
self.controllers["camera"] = VBox([
Label(value='Camera settings'),
interactive(
self.set_camera,
azimuth=self.sliders["azimuth"],
elevation=self.sliders["elevation"],
distance=self.sliders["distance"],
)
])
def set_camera(self, azimuth, elevation, distance):
focalpoint = self.plotter.camera.GetFocalPoint()
self.renderer.set_camera(azimuth, elevation,
distance, focalpoint)
self.update()
def set_subplot(self, index):
row, col = self.plotter.index_to_loc(index)
self.renderer.subplot(row, col)
figure = self.renderer.figure
default_azimuth = figure.plotter.renderer._azimuth
default_elevation = figure.plotter.renderer._elevation
default_distance = figure.plotter.renderer._distance
self.sliders["azimuth"].value = default_azimuth
self.sliders["elevation"].value = default_elevation
self.sliders["distance"].value = default_distance
def set_continuous_update(self, value):
for slider in self.sliders.values():
slider.continuous_update = value
_testing_context = nullcontext
|
try:
from future_builtins import filter
except ImportError:
pass
from copy import deepcopy
###{standalone
from collections import OrderedDict
class Meta:
def __init__(self):
self.empty = True
class Tree(object):
"""The main tree class.
Creates a new tree, and stores "data" and "children" in attributes of the same name.
Trees can be hashed and compared.
Parameters:
data: The name of the rule or alias
children: List of matched sub-rules and terminals
meta: Line & Column numbers (if ``propagate_positions`` is enabled).
meta attributes: line, column, start_pos, end_line, end_column, end_pos
"""
def __init__(self, data, children, meta=None):
self.data = data
self.children = children
self._meta = meta
@property
def meta(self):
if self._meta is None:
self._meta = Meta()
return self._meta
def __repr__(self):
return 'Tree(%r, %r)' % (self.data, self.children)
def _pretty_label(self):
return self.data
def _pretty(self, level, indent_str):
if len(self.children) == 1 and not isinstance(self.children[0], Tree):
return [indent_str*level, self._pretty_label(), '\t', '%s' % (self.children[0],), '\n']
l = [indent_str*level, self._pretty_label(), '\n']
for n in self.children:
if isinstance(n, Tree):
l += n._pretty(level+1, indent_str)
else:
l += [indent_str*(level+1), '%s' % (n,), '\n']
return l
def pretty(self, indent_str=' '):
"""Returns an indented string representation of the tree.
Great for debugging.
"""
return ''.join(self._pretty(0, indent_str))
def __eq__(self, other):
try:
return self.data == other.data and self.children == other.children
except AttributeError:
return False
def __ne__(self, other):
return not (self == other)
def __hash__(self):
return hash((self.data, tuple(self.children)))
def iter_subtrees(self):
"""Depth-first iteration.
Iterates over all the subtrees, never returning to the same node twice (Lark's parse-tree is actually a DAG).
"""
queue = [self]
subtrees = OrderedDict()
for subtree in queue:
subtrees[id(subtree)] = subtree
queue += [c for c in reversed(subtree.children)
if isinstance(c, Tree) and id(c) not in subtrees]
del queue
return reversed(list(subtrees.values()))
def find_pred(self, pred):
"""Returns all nodes of the tree that evaluate pred(node) as true."""
return filter(pred, self.iter_subtrees())
def find_data(self, data):
"""Returns all nodes of the tree whose data equals the given data."""
return self.find_pred(lambda t: t.data == data)
###}
def expand_kids_by_index(self, *indices):
"""Expand (inline) children at the given indices"""
for i in sorted(indices, reverse=True): # reverse so that changing tail won't affect indices
kid = self.children[i]
self.children[i:i+1] = kid.children
def scan_values(self, pred):
for c in self.children:
if isinstance(c, Tree):
for t in c.scan_values(pred):
yield t
else:
if pred(c):
yield c
def iter_subtrees_topdown(self):
"""Breadth-first iteration.
Iterates over all the subtrees, return nodes in order like pretty() does.
"""
stack = [self]
while stack:
node = stack.pop()
if not isinstance(node, Tree):
continue
yield node
for n in reversed(node.children):
stack.append(n)
def __deepcopy__(self, memo):
return type(self)(self.data, deepcopy(self.children, memo), meta=self._meta)
def copy(self):
return type(self)(self.data, self.children)
def set(self, data, children):
self.data = data
self.children = children
# XXX Deprecated! Here for backwards compatibility <0.6.0
@property
def line(self):
return self.meta.line
@property
def column(self):
return self.meta.column
@property
def end_line(self):
return self.meta.end_line
@property
def end_column(self):
return self.meta.end_column
class SlottedTree(Tree):
__slots__ = 'data', 'children', 'rule', '_meta'
def pydot__tree_to_png(tree, filename, rankdir="LR", **kwargs):
graph = pydot__tree_to_graph(tree, rankdir, **kwargs)
graph.write_png(filename)
def pydot__tree_to_dot(tree, filename, rankdir="LR", **kwargs):
graph = pydot__tree_to_graph(tree, rankdir, **kwargs)
graph.write(filename)
def pydot__tree_to_graph(tree, rankdir="LR", **kwargs):
"""Creates a colorful image that represents the tree (data+children, without meta)
Possible values for `rankdir` are "TB", "LR", "BT", "RL", corresponding to
directed graphs drawn from top to bottom, from left to right, from bottom to
top, and from right to left, respectively.
`kwargs` can be any graph attribute (e. g. `dpi=200`). For a list of
possible attributes, see https://www.graphviz.org/doc/info/attrs.html.
"""
import pydot
graph = pydot.Dot(graph_type='digraph', rankdir=rankdir, **kwargs)
i = [0]
def new_leaf(leaf):
node = pydot.Node(i[0], label=repr(leaf))
i[0] += 1
graph.add_node(node)
return node
def _to_pydot(subtree):
color = hash(subtree.data) & 0xffffff
color |= 0x808080
subnodes = [_to_pydot(child) if isinstance(child, Tree) else new_leaf(child)
for child in subtree.children]
node = pydot.Node(i[0], style="filled", fillcolor="#%x" % color, label=subtree.data)
i[0] += 1
graph.add_node(node)
for subnode in subnodes:
graph.add_edge(pydot.Edge(node, subnode))
return node
_to_pydot(tree)
return graph
|
from mlpatches.base import PatchGroup
from mlpatches import mount_base
from stashutils import mount_ctrl
_BASE_PATCHES = list(filter(None, [getattr(mount_base, p) if p.endswith("PATCH") else None for p in dir(mount_base)]))
class MountPatches(PatchGroup):
"""All mount patches."""
patches = [] + _BASE_PATCHES
def pre_enable(self):
# ensure a manager is set
manager = mount_ctrl.get_manager()
if manager is None:
from stashutils import mount_manager # import here to prevent an error
manager = mount_manager.MountManager()
mount_ctrl.set_manager(manager)
# create patchgroup instances
MOUNT_PATCHES = MountPatches()
|
import operator
import numpy as np
from . import dtypes, duck_array_ops
from .nputils import array_eq, array_ne
try:
import bottleneck as bn
has_bottleneck = True
except ImportError:
# use numpy methods instead
bn = np
has_bottleneck = False
UNARY_OPS = ["neg", "pos", "abs", "invert"]
CMP_BINARY_OPS = ["lt", "le", "ge", "gt"]
NUM_BINARY_OPS = [
"add",
"sub",
"mul",
"truediv",
"floordiv",
"mod",
"pow",
"and",
"xor",
"or",
]
# methods which pass on the numpy return value unchanged
# be careful not to list methods that we would want to wrap later
NUMPY_SAME_METHODS = ["item", "searchsorted"]
# methods which don't modify the data shape, so the result should still be
# wrapped in an Variable/DataArray
NUMPY_UNARY_METHODS = ["argsort", "clip", "conj", "conjugate"]
# methods which remove an axis
REDUCE_METHODS = ["all", "any"]
NAN_REDUCE_METHODS = [
"max",
"min",
"mean",
"prod",
"sum",
"std",
"var",
"median",
]
NAN_CUM_METHODS = ["cumsum", "cumprod"]
# TODO: wrap take, dot, sort
_CUM_DOCSTRING_TEMPLATE = """\
Apply `{name}` along some dimension of {cls}.
Parameters
----------
{extra_args}
skipna : bool, optional
If True, skip missing values (as marked by NaN). By default, only
skips missing values for float dtypes; other dtypes either do not
have a sentinel missing value (int) or skipna=True has not been
implemented (object, datetime64 or timedelta64).
keep_attrs : bool, optional
If True, the attributes (`attrs`) will be copied from the original
object to the new one. If False (default), the new object will be
returned without attributes.
**kwargs : dict
Additional keyword arguments passed on to `{name}`.
Returns
-------
cumvalue : {cls}
New {cls} object with `{name}` applied to its data along the
indicated dimension.
"""
_REDUCE_DOCSTRING_TEMPLATE = """\
Reduce this {cls}'s data by applying `{name}` along some dimension(s).
Parameters
----------
{extra_args}{skip_na_docs}{min_count_docs}
keep_attrs : bool, optional
If True, the attributes (`attrs`) will be copied from the original
object to the new one. If False (default), the new object will be
returned without attributes.
**kwargs : dict
Additional keyword arguments passed on to the appropriate array
function for calculating `{name}` on this object's data.
Returns
-------
reduced : {cls}
New {cls} object with `{name}` applied to its data and the
indicated dimension(s) removed.
"""
_SKIPNA_DOCSTRING = """
skipna : bool, optional
If True, skip missing values (as marked by NaN). By default, only
skips missing values for float dtypes; other dtypes either do not
have a sentinel missing value (int) or skipna=True has not been
implemented (object, datetime64 or timedelta64)."""
_MINCOUNT_DOCSTRING = """
min_count : int, default: None
The required number of valid values to perform the operation.
If fewer than min_count non-NA values are present the result will
be NA. New in version 0.10.8: Added with the default being None."""
_COARSEN_REDUCE_DOCSTRING_TEMPLATE = """\
Coarsen this object by applying `{name}` along its dimensions.
Parameters
----------
**kwargs : dict
Additional keyword arguments passed on to `{name}`.
Returns
-------
reduced : DataArray or Dataset
New object with `{name}` applied along its coasen dimnensions.
"""
def fillna(data, other, join="left", dataset_join="left"):
"""Fill missing values in this object with data from the other object.
Follows normal broadcasting and alignment rules.
Parameters
----------
join : {"outer", "inner", "left", "right"}, optional
Method for joining the indexes of the passed objects along each
dimension
- "outer": use the union of object indexes
- "inner": use the intersection of object indexes
- "left": use indexes from the first object with each dimension
- "right": use indexes from the last object with each dimension
- "exact": raise `ValueError` instead of aligning when indexes to be
aligned are not equal
dataset_join : {"outer", "inner", "left", "right"}, optional
Method for joining variables of Dataset objects with mismatched
data variables.
- "outer": take variables from both Dataset objects
- "inner": take only overlapped variables
- "left": take only variables from the first object
- "right": take only variables from the last object
"""
from .computation import apply_ufunc
return apply_ufunc(
duck_array_ops.fillna,
data,
other,
join=join,
dask="allowed",
dataset_join=dataset_join,
dataset_fill_value=np.nan,
keep_attrs=True,
)
def where_method(self, cond, other=dtypes.NA):
"""Return elements from `self` or `other` depending on `cond`.
Parameters
----------
cond : DataArray or Dataset with boolean dtype
Locations at which to preserve this objects values.
other : scalar, DataArray or Dataset, optional
Value to use for locations in this object where ``cond`` is False.
By default, inserts missing values.
Returns
-------
Same type as caller.
"""
from .computation import apply_ufunc
# alignment for three arguments is complicated, so don't support it yet
join = "inner" if other is dtypes.NA else "exact"
return apply_ufunc(
duck_array_ops.where_method,
self,
cond,
other,
join=join,
dataset_join=join,
dask="allowed",
keep_attrs=True,
)
def _call_possibly_missing_method(arg, name, args, kwargs):
try:
method = getattr(arg, name)
except AttributeError:
duck_array_ops.fail_on_dask_array_input(arg, func_name=name)
if hasattr(arg, "data"):
duck_array_ops.fail_on_dask_array_input(arg.data, func_name=name)
raise
else:
return method(*args, **kwargs)
def _values_method_wrapper(name):
def func(self, *args, **kwargs):
return _call_possibly_missing_method(self.data, name, args, kwargs)
func.__name__ = name
func.__doc__ = getattr(np.ndarray, name).__doc__
return func
def _method_wrapper(name):
def func(self, *args, **kwargs):
return _call_possibly_missing_method(self, name, args, kwargs)
func.__name__ = name
func.__doc__ = getattr(np.ndarray, name).__doc__
return func
def _func_slash_method_wrapper(f, name=None):
# try to wrap a method, but if not found use the function
# this is useful when patching in a function as both a DataArray and
# Dataset method
if name is None:
name = f.__name__
def func(self, *args, **kwargs):
try:
return getattr(self, name)(*args, **kwargs)
except AttributeError:
return f(self, *args, **kwargs)
func.__name__ = name
func.__doc__ = f.__doc__
return func
def inject_reduce_methods(cls):
methods = (
[
(name, getattr(duck_array_ops, "array_%s" % name), False)
for name in REDUCE_METHODS
]
+ [(name, getattr(duck_array_ops, name), True) for name in NAN_REDUCE_METHODS]
+ [("count", duck_array_ops.count, False)]
)
for name, f, include_skipna in methods:
numeric_only = getattr(f, "numeric_only", False)
available_min_count = getattr(f, "available_min_count", False)
skip_na_docs = _SKIPNA_DOCSTRING if include_skipna else ""
min_count_docs = _MINCOUNT_DOCSTRING if available_min_count else ""
func = cls._reduce_method(f, include_skipna, numeric_only)
func.__name__ = name
func.__doc__ = _REDUCE_DOCSTRING_TEMPLATE.format(
name=name,
cls=cls.__name__,
extra_args=cls._reduce_extra_args_docstring.format(name=name),
skip_na_docs=skip_na_docs,
min_count_docs=min_count_docs,
)
setattr(cls, name, func)
def inject_cum_methods(cls):
methods = [(name, getattr(duck_array_ops, name), True) for name in NAN_CUM_METHODS]
for name, f, include_skipna in methods:
numeric_only = getattr(f, "numeric_only", False)
func = cls._reduce_method(f, include_skipna, numeric_only)
func.__name__ = name
func.__doc__ = _CUM_DOCSTRING_TEMPLATE.format(
name=name,
cls=cls.__name__,
extra_args=cls._cum_extra_args_docstring.format(name=name),
)
setattr(cls, name, func)
def op_str(name):
return "__%s__" % name
def get_op(name):
return getattr(operator, op_str(name))
NON_INPLACE_OP = {get_op("i" + name): get_op(name) for name in NUM_BINARY_OPS}
def inplace_to_noninplace_op(f):
return NON_INPLACE_OP[f]
def inject_binary_ops(cls, inplace=False):
for name in CMP_BINARY_OPS + NUM_BINARY_OPS:
setattr(cls, op_str(name), cls._binary_op(get_op(name)))
for name, f in [("eq", array_eq), ("ne", array_ne)]:
setattr(cls, op_str(name), cls._binary_op(f))
for name in NUM_BINARY_OPS:
# only numeric operations have in-place and reflexive variants
setattr(cls, op_str("r" + name), cls._binary_op(get_op(name), reflexive=True))
if inplace:
setattr(cls, op_str("i" + name), cls._inplace_binary_op(get_op("i" + name)))
def inject_all_ops_and_reduce_methods(cls, priority=50, array_only=True):
# prioritize our operations over those of numpy.ndarray (priority=1)
# and numpy.matrix (priority=10)
cls.__array_priority__ = priority
# patch in standard special operations
for name in UNARY_OPS:
setattr(cls, op_str(name), cls._unary_op(get_op(name)))
inject_binary_ops(cls, inplace=True)
# patch in numpy/pandas methods
for name in NUMPY_UNARY_METHODS:
setattr(cls, name, cls._unary_op(_method_wrapper(name)))
f = _func_slash_method_wrapper(duck_array_ops.around, name="round")
setattr(cls, "round", cls._unary_op(f))
if array_only:
# these methods don't return arrays of the same shape as the input, so
# don't try to patch these in for Dataset objects
for name in NUMPY_SAME_METHODS:
setattr(cls, name, _values_method_wrapper(name))
inject_reduce_methods(cls)
inject_cum_methods(cls)
|
from Handler import Handler
from graphitepickle import GraphitePickleHandler
from copy import deepcopy
class MultiGraphitePickleHandler(Handler):
"""
Implements the abstract Handler class, sending data to multiple
graphite servers by using two instances of GraphitePickleHandler
"""
def __init__(self, config=None):
"""
Create a new instance of the MultiGraphitePickleHandler class
"""
# Initialize Handler
Handler.__init__(self, config)
self.handlers = []
# Initialize Options
hosts = self.config['host']
for host in hosts:
config = deepcopy(self.config)
config['host'] = host
self.handlers.append(GraphitePickleHandler(config))
def get_default_config_help(self):
"""
Returns the help text for the configuration options for this handler
"""
config = super(MultiGraphitePickleHandler,
self).get_default_config_help()
config.update({
'host': 'Hostname, Hostname, Hostname',
'port': 'Port',
'proto': 'udp or tcp',
'timeout': '',
'batch': 'How many to store before sending to the graphite server',
'max_backlog_multiplier': 'how many batches to store before trimming', # NOQA
'trim_backlog_multiplier': 'Trim down how many batches',
})
return config
def get_default_config(self):
"""
Return the default config for the handler
"""
config = super(MultiGraphitePickleHandler, self).get_default_config()
config.update({
'host': ['localhost'],
'port': 2003,
'proto': 'tcp',
'timeout': 15,
'batch': 1,
'max_backlog_multiplier': 5,
'trim_backlog_multiplier': 4,
})
return config
def process(self, metric):
"""
Process a metric by passing it to GraphitePickleHandler
instances
"""
for handler in self.handlers:
handler.process(metric)
def flush(self):
"""Flush metrics in queue"""
for handler in self.handlers:
handler.flush()
|
import logging
import threading
import traceback
from configobj import ConfigObj
import time
class Handler(object):
"""
Handlers process metrics that are collected by Collectors.
"""
def __init__(self, config=None, log=None):
"""
Create a new instance of the Handler class
"""
# Enabled? Default to yes, but allow handlers to disable themselves
self.enabled = True
# Initialize Log
if log is None:
self.log = logging.getLogger('diamond')
else:
self.log = log
# Initialize Blank Configs
self.config = ConfigObj()
# Load default
self.config.merge(self.get_default_config())
# Load in user
self.config.merge(config)
# error logging throttling
self.server_error_interval = float(
self.config['server_error_interval'])
self._errors = {}
# Initialize Lock
self.lock = threading.Lock()
def get_default_config_help(self):
"""
Returns the help text for the configuration options for this handler
"""
return {
'get_default_config_help': 'get_default_config_help',
'server_error_interval': ('How frequently to send repeated server '
'errors'),
}
def get_default_config(self):
"""
Return the default config for the handler
"""
return {
'get_default_config': 'get_default_config',
'server_error_interval': 120,
}
def _process(self, metric):
"""
Decorator for processing handlers with a lock, catching exceptions
"""
if not self.enabled:
return
try:
try:
self.lock.acquire()
self.process(metric)
except Exception:
self.log.error(traceback.format_exc())
finally:
if self.lock.locked():
self.lock.release()
def process(self, metric):
"""
Process a metric
Should be overridden in subclasses
"""
raise NotImplementedError
def _flush(self):
"""
Decorator for flushing handlers with an lock, catching exceptions
"""
if not self.enabled:
return
try:
try:
self.lock.acquire()
self.flush()
except Exception:
self.log.error(traceback.format_exc())
finally:
if self.lock.locked():
self.lock.release()
def flush(self):
"""
Flush metrics
Optional: Should be overridden in subclasses
"""
pass
def _throttle_error(self, msg, *args, **kwargs):
"""
Avoids sending errors repeatedly. Waits at least
`self.server_error_interval` seconds before sending the same error
string to the error logging facility. If not enough time has passed,
it calls `log.debug` instead
Receives the same parameters as `Logger.error` an passes them on to the
selected logging function, but ignores all parameters but the main
message string when checking the last emission time.
:returns: the return value of `Logger.debug` or `Logger.error`
"""
now = time.time()
if msg in self._errors:
if ((now - self._errors[msg]) >=
self.server_error_interval):
fn = self.log.error
self._errors[msg] = now
else:
fn = self.log.debug
else:
self._errors[msg] = now
fn = self.log.error
return fn(msg, *args, **kwargs)
def _reset_errors(self, msg=None):
"""
Resets the logging throttle cache, so the next error is emitted
regardless of the value in `self.server_error_interval`
:param msg: if present, only this key is reset. Otherwise, the whole
cache is cleaned.
"""
if msg is not None and msg in self._errors:
del self._errors[msg]
else:
self._errors = {}
|
import html
import fnmatch
import re
from PyQt5.QtWidgets import QLabel, QSizePolicy
from PyQt5.QtCore import pyqtSlot, pyqtSignal, Qt
from qutebrowser.config import config, stylesheet
from qutebrowser.utils import utils, usertypes
from qutebrowser.misc import objects
from qutebrowser.keyinput import keyutils
class KeyHintView(QLabel):
"""The view showing hints for key bindings based on the current key string.
Attributes:
_win_id: Window ID of parent.
Signals:
update_geometry: Emitted when this widget should be resized/positioned.
"""
STYLESHEET = """
QLabel {
font: {{ conf.fonts.keyhint }};
color: {{ conf.colors.keyhint.fg }};
background-color: {{ conf.colors.keyhint.bg }};
padding: 6px;
{% if conf.statusbar.position == 'top' %}
border-bottom-right-radius: {{ conf.keyhint.radius }}px;
{% else %}
border-top-right-radius: {{ conf.keyhint.radius }}px;
{% endif %}
}
"""
update_geometry = pyqtSignal()
def __init__(self, win_id, parent=None):
super().__init__(parent)
self.setTextFormat(Qt.RichText)
self._win_id = win_id
self.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Minimum)
self.hide()
self._show_timer = usertypes.Timer(self, 'keyhint_show')
self._show_timer.timeout.connect(self.show)
self._show_timer.setSingleShot(True)
stylesheet.set_register(self)
def __repr__(self):
return utils.get_repr(self, win_id=self._win_id)
def showEvent(self, e):
"""Adjust the keyhint size when it's freshly shown."""
self.update_geometry.emit()
super().showEvent(e)
@pyqtSlot(usertypes.KeyMode, str)
def update_keyhint(self, mode, prefix):
"""Show hints for the given prefix (or hide if prefix is empty).
Args:
prefix: The current partial keystring.
"""
match = re.fullmatch(r'(\d*)(.*)', prefix)
assert match is not None, prefix
countstr, prefix = match.groups()
if not prefix:
self._show_timer.stop()
self.hide()
return
def blacklisted(keychain):
return any(fnmatch.fnmatchcase(keychain, glob)
for glob in config.val.keyhint.blacklist)
def takes_count(cmdstr):
"""Return true iff this command can take a count argument."""
cmdname = cmdstr.split(' ')[0]
cmd = objects.commands.get(cmdname)
return cmd and cmd.takes_count()
bindings_dict = config.key_instance.get_bindings_for(mode.name)
bindings = [(k, v) for (k, v) in sorted(bindings_dict.items())
if keyutils.KeySequence.parse(prefix).matches(k) and
not blacklisted(str(k)) and
(takes_count(v) or not countstr)]
if not bindings:
self._show_timer.stop()
return
# delay so a quickly typed keychain doesn't display hints
self._show_timer.setInterval(config.val.keyhint.delay)
self._show_timer.start()
suffix_color = html.escape(config.val.colors.keyhint.suffix.fg)
text = ''
for seq, cmd in bindings:
text += (
"<tr>"
"<td>{}</td>"
"<td style='color: {}'>{}</td>"
"<td style='padding-left: 2ex'>{}</td>"
"</tr>"
).format(
html.escape(prefix),
suffix_color,
html.escape(str(seq)[len(prefix):]),
html.escape(cmd)
)
text = '<table>{}</table>'.format(text)
self.setText(text)
self.adjustSize()
self.update_geometry.emit()
|
from datetime import datetime, timedelta
from sqlalchemy.exc import OperationalError
from homeassistant.components.recorder import (
CONFIG_SCHEMA,
DOMAIN,
Recorder,
run_information,
run_information_from_instance,
run_information_with_session,
)
from homeassistant.components.recorder.const import DATA_INSTANCE
from homeassistant.components.recorder.models import Events, RecorderRuns, States
from homeassistant.components.recorder.util import session_scope
from homeassistant.const import MATCH_ALL, STATE_LOCKED, STATE_UNLOCKED
from homeassistant.core import Context, callback
from homeassistant.setup import async_setup_component
from homeassistant.util import dt as dt_util
from .common import wait_recording_done
from tests.async_mock import patch
from tests.common import async_fire_time_changed, get_test_home_assistant
def test_saving_state(hass, hass_recorder):
"""Test saving and restoring a state."""
hass = hass_recorder()
entity_id = "test.recorder"
state = "restoring_from_db"
attributes = {"test_attr": 5, "test_attr_10": "nice"}
hass.states.set(entity_id, state, attributes)
wait_recording_done(hass)
with session_scope(hass=hass) as session:
db_states = list(session.query(States))
assert len(db_states) == 1
assert db_states[0].event_id > 0
state = db_states[0].to_native()
assert state == _state_empty_context(hass, entity_id)
def test_saving_state_with_exception(hass, hass_recorder, caplog):
"""Test saving and restoring a state."""
hass = hass_recorder()
entity_id = "test.recorder"
state = "restoring_from_db"
attributes = {"test_attr": 5, "test_attr_10": "nice"}
def _throw_if_state_in_session(*args, **kwargs):
for obj in hass.data[DATA_INSTANCE].event_session:
if isinstance(obj, States):
raise OperationalError(
"insert the state", "fake params", "forced to fail"
)
with patch("time.sleep"), patch.object(
hass.data[DATA_INSTANCE].event_session,
"flush",
side_effect=_throw_if_state_in_session,
):
hass.states.set(entity_id, "fail", attributes)
wait_recording_done(hass)
assert "Error executing query" in caplog.text
assert "Error saving events" not in caplog.text
caplog.clear()
hass.states.set(entity_id, state, attributes)
wait_recording_done(hass)
with session_scope(hass=hass) as session:
db_states = list(session.query(States))
assert len(db_states) >= 1
assert "Error executing query" not in caplog.text
assert "Error saving events" not in caplog.text
def test_saving_event(hass, hass_recorder):
"""Test saving and restoring an event."""
hass = hass_recorder()
event_type = "EVENT_TEST"
event_data = {"test_attr": 5, "test_attr_10": "nice"}
events = []
@callback
def event_listener(event):
"""Record events from eventbus."""
if event.event_type == event_type:
events.append(event)
hass.bus.listen(MATCH_ALL, event_listener)
hass.bus.fire(event_type, event_data)
wait_recording_done(hass)
assert len(events) == 1
event = events[0]
hass.data[DATA_INSTANCE].block_till_done()
with session_scope(hass=hass) as session:
db_events = list(session.query(Events).filter_by(event_type=event_type))
assert len(db_events) == 1
db_event = db_events[0].to_native()
assert event.event_type == db_event.event_type
assert event.data == db_event.data
assert event.origin == db_event.origin
# Recorder uses SQLite and stores datetimes as integer unix timestamps
assert event.time_fired.replace(microsecond=0) == db_event.time_fired.replace(
microsecond=0
)
def _add_entities(hass, entity_ids):
"""Add entities."""
attributes = {"test_attr": 5, "test_attr_10": "nice"}
for idx, entity_id in enumerate(entity_ids):
hass.states.set(entity_id, f"state{idx}", attributes)
wait_recording_done(hass)
with session_scope(hass=hass) as session:
return [st.to_native() for st in session.query(States)]
def _add_events(hass, events):
with session_scope(hass=hass) as session:
session.query(Events).delete(synchronize_session=False)
for event_type in events:
hass.bus.fire(event_type)
wait_recording_done(hass)
with session_scope(hass=hass) as session:
return [ev.to_native() for ev in session.query(Events)]
def _state_empty_context(hass, entity_id):
# We don't restore context unless we need it by joining the
# events table on the event_id for state_changed events
state = hass.states.get(entity_id)
state.context = Context(id=None)
return state
# pylint: disable=redefined-outer-name,invalid-name
def test_saving_state_include_domains(hass_recorder):
"""Test saving and restoring a state."""
hass = hass_recorder({"include": {"domains": "test2"}})
states = _add_entities(hass, ["test.recorder", "test2.recorder"])
assert len(states) == 1
assert _state_empty_context(hass, "test2.recorder") == states[0]
def test_saving_state_include_domains_globs(hass_recorder):
"""Test saving and restoring a state."""
hass = hass_recorder(
{"include": {"domains": "test2", "entity_globs": "*.included_*"}}
)
states = _add_entities(
hass, ["test.recorder", "test2.recorder", "test3.included_entity"]
)
assert len(states) == 2
assert _state_empty_context(hass, "test2.recorder") == states[0]
assert _state_empty_context(hass, "test3.included_entity") == states[1]
def test_saving_state_incl_entities(hass_recorder):
"""Test saving and restoring a state."""
hass = hass_recorder({"include": {"entities": "test2.recorder"}})
states = _add_entities(hass, ["test.recorder", "test2.recorder"])
assert len(states) == 1
assert _state_empty_context(hass, "test2.recorder") == states[0]
def test_saving_event_exclude_event_type(hass_recorder):
"""Test saving and restoring an event."""
hass = hass_recorder(
{
"exclude": {
"event_types": [
"service_registered",
"homeassistant_start",
"component_loaded",
"core_config_updated",
"homeassistant_started",
"test",
]
}
}
)
events = _add_events(hass, ["test", "test2"])
assert len(events) == 1
assert events[0].event_type == "test2"
def test_saving_state_exclude_domains(hass_recorder):
"""Test saving and restoring a state."""
hass = hass_recorder({"exclude": {"domains": "test"}})
states = _add_entities(hass, ["test.recorder", "test2.recorder"])
assert len(states) == 1
assert _state_empty_context(hass, "test2.recorder") == states[0]
def test_saving_state_exclude_domains_globs(hass_recorder):
"""Test saving and restoring a state."""
hass = hass_recorder(
{"exclude": {"domains": "test", "entity_globs": "*.excluded_*"}}
)
states = _add_entities(
hass, ["test.recorder", "test2.recorder", "test2.excluded_entity"]
)
assert len(states) == 1
assert _state_empty_context(hass, "test2.recorder") == states[0]
def test_saving_state_exclude_entities(hass_recorder):
"""Test saving and restoring a state."""
hass = hass_recorder({"exclude": {"entities": "test.recorder"}})
states = _add_entities(hass, ["test.recorder", "test2.recorder"])
assert len(states) == 1
assert _state_empty_context(hass, "test2.recorder") == states[0]
def test_saving_state_exclude_domain_include_entity(hass_recorder):
"""Test saving and restoring a state."""
hass = hass_recorder(
{"include": {"entities": "test.recorder"}, "exclude": {"domains": "test"}}
)
states = _add_entities(hass, ["test.recorder", "test2.recorder"])
assert len(states) == 2
def test_saving_state_exclude_domain_glob_include_entity(hass_recorder):
"""Test saving and restoring a state."""
hass = hass_recorder(
{
"include": {"entities": ["test.recorder", "test.excluded_entity"]},
"exclude": {"domains": "test", "entity_globs": "*._excluded_*"},
}
)
states = _add_entities(
hass, ["test.recorder", "test2.recorder", "test.excluded_entity"]
)
assert len(states) == 3
def test_saving_state_include_domain_exclude_entity(hass_recorder):
"""Test saving and restoring a state."""
hass = hass_recorder(
{"exclude": {"entities": "test.recorder"}, "include": {"domains": "test"}}
)
states = _add_entities(hass, ["test.recorder", "test2.recorder", "test.ok"])
assert len(states) == 1
assert _state_empty_context(hass, "test.ok") == states[0]
assert _state_empty_context(hass, "test.ok").state == "state2"
def test_saving_state_include_domain_glob_exclude_entity(hass_recorder):
"""Test saving and restoring a state."""
hass = hass_recorder(
{
"exclude": {"entities": ["test.recorder", "test2.included_entity"]},
"include": {"domains": "test", "entity_globs": "*._included_*"},
}
)
states = _add_entities(
hass, ["test.recorder", "test2.recorder", "test.ok", "test2.included_entity"]
)
assert len(states) == 1
assert _state_empty_context(hass, "test.ok") == states[0]
assert _state_empty_context(hass, "test.ok").state == "state2"
def test_saving_state_and_removing_entity(hass, hass_recorder):
"""Test saving the state of a removed entity."""
hass = hass_recorder()
entity_id = "lock.mine"
hass.states.set(entity_id, STATE_LOCKED)
hass.states.set(entity_id, STATE_UNLOCKED)
hass.states.async_remove(entity_id)
wait_recording_done(hass)
with session_scope(hass=hass) as session:
states = list(session.query(States))
assert len(states) == 3
assert states[0].entity_id == entity_id
assert states[0].state == STATE_LOCKED
assert states[1].entity_id == entity_id
assert states[1].state == STATE_UNLOCKED
assert states[2].entity_id == entity_id
assert states[2].state is None
def test_recorder_setup_failure():
"""Test some exceptions."""
hass = get_test_home_assistant()
with patch.object(Recorder, "_setup_connection") as setup, patch(
"homeassistant.components.recorder.time.sleep"
):
setup.side_effect = ImportError("driver not found")
rec = Recorder(
hass,
auto_purge=True,
keep_days=7,
commit_interval=1,
uri="sqlite://",
db_max_retries=10,
db_retry_wait=3,
entity_filter=CONFIG_SCHEMA({DOMAIN: {}}),
exclude_t=[],
db_integrity_check=False,
)
rec.start()
rec.join()
hass.stop()
async def test_defaults_set(hass):
"""Test the config defaults are set."""
recorder_config = None
async def mock_setup(hass, config):
"""Mock setup."""
nonlocal recorder_config
recorder_config = config["recorder"]
return True
with patch("homeassistant.components.recorder.async_setup", side_effect=mock_setup):
assert await async_setup_component(hass, "history", {})
assert recorder_config is not None
# pylint: disable=unsubscriptable-object
assert recorder_config["auto_purge"]
assert recorder_config["purge_keep_days"] == 10
def test_auto_purge(hass_recorder):
"""Test saving and restoring a state."""
hass = hass_recorder()
original_tz = dt_util.DEFAULT_TIME_ZONE
tz = dt_util.get_time_zone("Europe/Copenhagen")
dt_util.set_default_time_zone(tz)
now = dt_util.utcnow()
test_time = tz.localize(datetime(now.year + 1, 1, 1, 4, 12, 0))
async_fire_time_changed(hass, test_time)
with patch(
"homeassistant.components.recorder.purge.purge_old_data", return_value=True
) as purge_old_data:
for delta in (-1, 0, 1):
async_fire_time_changed(hass, test_time + timedelta(seconds=delta))
hass.block_till_done()
hass.data[DATA_INSTANCE].block_till_done()
assert len(purge_old_data.mock_calls) == 1
dt_util.set_default_time_zone(original_tz)
def test_saving_sets_old_state(hass_recorder):
"""Test saving sets old state."""
hass = hass_recorder()
hass.states.set("test.one", "on", {})
hass.states.set("test.two", "on", {})
wait_recording_done(hass)
hass.states.set("test.one", "off", {})
hass.states.set("test.two", "off", {})
wait_recording_done(hass)
with session_scope(hass=hass) as session:
states = list(session.query(States))
assert len(states) == 4
assert states[0].entity_id == "test.one"
assert states[1].entity_id == "test.two"
assert states[2].entity_id == "test.one"
assert states[3].entity_id == "test.two"
assert states[0].old_state_id is None
assert states[1].old_state_id is None
assert states[2].old_state_id == states[0].state_id
assert states[3].old_state_id == states[1].state_id
def test_saving_state_with_serializable_data(hass_recorder, caplog):
"""Test saving data that cannot be serialized does not crash."""
hass = hass_recorder()
hass.states.set("test.one", "on", {"fail": CannotSerializeMe()})
wait_recording_done(hass)
hass.states.set("test.two", "on", {})
wait_recording_done(hass)
hass.states.set("test.two", "off", {})
wait_recording_done(hass)
with session_scope(hass=hass) as session:
states = list(session.query(States))
assert len(states) == 2
assert states[0].entity_id == "test.two"
assert states[1].entity_id == "test.two"
assert states[0].old_state_id is None
assert states[1].old_state_id == states[0].state_id
assert "State is not JSON serializable" in caplog.text
def test_run_information(hass_recorder):
"""Ensure run_information returns expected data."""
before_start_recording = dt_util.utcnow()
hass = hass_recorder()
run_info = run_information_from_instance(hass)
assert isinstance(run_info, RecorderRuns)
assert run_info.closed_incorrect is False
with session_scope(hass=hass) as session:
run_info = run_information_with_session(session)
assert isinstance(run_info, RecorderRuns)
assert run_info.closed_incorrect is False
run_info = run_information(hass)
assert isinstance(run_info, RecorderRuns)
assert run_info.closed_incorrect is False
hass.states.set("test.two", "on", {})
wait_recording_done(hass)
run_info = run_information(hass)
assert isinstance(run_info, RecorderRuns)
assert run_info.closed_incorrect is False
run_info = run_information(hass, before_start_recording)
assert run_info is None
run_info = run_information(hass, dt_util.utcnow())
assert isinstance(run_info, RecorderRuns)
assert run_info.closed_incorrect is False
class CannotSerializeMe:
"""A class that the JSONEncoder cannot serialize."""
|
import string
from collections import Counter
import numpy as np
from scattertext.CSRMatrixTools import CSRMatrixFactory
from scattertext.TermDocMatrix import TermDocMatrix
from scattertext.features.FeatsFromSpacyDoc import FeatsFromSpacyDoc
from scattertext.indexstore.IndexStore import IndexStore
class CategoryTextIterNotSetError(Exception):
pass
class TermDocMatrixFactory(object):
def __init__(self,
category_text_iter=None,
clean_function=lambda x: x,
nlp=None,
feats_from_spacy_doc=None
):
"""
Class for easy construction of a term document matrix.
This class let's you define an iterator for each document (text_iter),
an iterator for each document's category name (category_iter),
and a document cleaning function that's applied to each document
before it's parsed.
Parameters
----------
category_text_iter : iter<str: category, unicode: document)>
An iterator of pairs. The first element is a string category
name, the second the text of a document. You can also set this
using the function set_category_text_iter.
clean_function : function (default lambda x: x)
A function that strips invalid characters out of a string, returning
the new string.
post_nlp_clean_function : function (default lambda x: x)
A function that takes a spaCy Doc
nlp : spacy.load('en') (default None)
The spaCy parser used to parse documents. If it's None,
the class will go through the expensive operation of
creating one to parse the text
feats_from_spacy_doc : FeatsFromSpacyDoc (default None)
Class for extraction of features from spacy
Attributes
----------
_clean_function : function
function that takes a unicode document and returns
a cleaned version of that document
_text_iter : iter<unicode>
an iterator that iterates through the unicode text of each
document
_category_iter : iter<str>
an iterator the same size as text iter that gives a string or
unicode name of each document catgory
Examples
--------
>>> import scattertext as ST
>>> documents = [u'What art thou that usurp''st this time of night,',
...u'Together with that fair and warlike form',
...u'In which the majesty of buried Denmark',
...u'Did sometimes march? by heaven I charge thee, speak!',
...u'Halt! Who goes there?',
...u'[Intro]',
...u'It is I sire Tone from Brooklyn.',
...u'Well, speak up man what is it?',
...u'News from the East sire! THE BEST OF BOTH WORLDS HAS RETURNED!']
>>> categories = ['hamlet'] * 4 + ['jay-z/r. kelly'] * 5
>>> clean_function = lambda text: '' if text.startswith('[') else text
>>> term_doc_mat = ST.TermDocMatrixFactory(category_text_iter = zip(categories, documents),clean_function = clean_function).build()
"""
self._category_text_iter = category_text_iter
self._clean_function = clean_function
self._nlp = nlp
self._entity_types_to_censor = set()
if feats_from_spacy_doc is None:
self._feats_from_spacy_doc = FeatsFromSpacyDoc()
else:
self._feats_from_spacy_doc = feats_from_spacy_doc
def set_category_text_iter(self, category_text_iter):
"""Initializes the category_text_iter
Paramters
----------
category_text_iter : iter<str: category, unicode: document)>
An iterator of pairs. The first element is a string category
name, the second the text of a document.
Returns
----------
self: TermDocMatrixFactory
"""
self._category_text_iter = category_text_iter
return self
def set_nlp(self, nlp):
"""Adds a spaCy-compatible nlp function
Paramters
----------
nlp : spacy model
Returns
----------
self: TermDocMatrixFactory
"""
self._nlp = nlp
return self
def build(self):
"""Generate a TermDocMatrix from data in parameters.
Returns
----------
term_doc_matrix : TermDocMatrix
The object that this factory class builds.
"""
if self._category_text_iter is None:
raise CategoryTextIterNotSetError()
nlp = self.get_nlp()
category_document_iter = (
(category, self._clean_function(raw_text))
for category, raw_text
in self._category_text_iter
)
term_doc_matrix = self._build_from_category_spacy_doc_iter(
(
(category, nlp(text))
for (category, text)
in category_document_iter
if text.strip() != ''
)
)
return term_doc_matrix
def get_nlp(self):
nlp = self._nlp
if nlp is None:
import spacy
nlp = spacy.load('en')
return nlp
def censor_entity_types(self, entity_types):
# type: (set) -> TermDocMatrixFactory
'''
Entity types to exclude from feature construction. Terms matching
specificed entities, instead of labeled by their lower case orthographic
form or lemma, will be labeled by their entity type.
Parameters
----------
entity_types : set of entity types outputted by spaCy
'TIME', 'WORK_OF_ART', 'PERSON', 'MONEY', 'ORG', 'ORDINAL', 'DATE',
'CARDINAL', 'LAW', 'QUANTITY', 'GPE', 'PERCENT'
Returns
---------
self
'''
assert type(entity_types) == set
self._entity_types_to_censor = entity_types
self._feats_from_spacy_doc = FeatsFromSpacyDoc(
use_lemmas=self._use_lemmas,
entity_types_to_censor=self._entity_types_to_censor
)
return self
def _build_from_category_spacy_doc_iter(self, category_doc_iter):
'''
Parameters
----------
category_doc_iter : iterator of (string category name, spacy.tokens.doc.Doc) pairs
Returns
----------
t : TermDocMatrix
'''
term_idx_store = IndexStore()
category_idx_store = IndexStore()
metadata_idx_store = IndexStore()
X, mX, y = self._get_features_and_labels_from_documents_and_indexes \
(category_doc_iter,
category_idx_store,
term_idx_store,
metadata_idx_store)
return TermDocMatrix(X,
mX,
y,
term_idx_store=term_idx_store,
category_idx_store=category_idx_store,
metadata_idx_store=metadata_idx_store)
def _get_features_and_labels_from_documents_and_indexes(self,
category_doc_iter,
category_idx_store,
term_idx_store,
metadata_idx_store):
y = []
X_factory = CSRMatrixFactory()
mX_factory = CSRMatrixFactory()
for document_index, (category, parsed_text) in enumerate(category_doc_iter):
self._register_doc_and_category(X_factory,
mX_factory,
category,
category_idx_store,
document_index,
parsed_text,
term_idx_store,
metadata_idx_store,
y)
X = X_factory.get_csr_matrix()
mX = mX_factory.get_csr_matrix()
y = np.array(y)
return X, mX, y
def _old_register_doc_and_category(self,
X_factory,
category, category_idx_store,
document_index,
parsed_text,
term_idx_store,
y):
y.append(category_idx_store.getidx(category))
document_features = self._get_features_from_parsed_text(parsed_text, term_idx_store)
self._register_document_features_with_X_factory \
(X_factory, document_index, document_features)
def _register_doc_and_category(self,
X_factory,
mX_factory,
category,
category_idx_store,
document_index,
parsed_text,
term_idx_store,
metadata_idx_store,
y):
self._register_doc(X_factory, mX_factory, document_index, parsed_text, term_idx_store, metadata_idx_store)
self._register_category(category, category_idx_store, y)
def _register_doc(self, X_factory, mX_factory, document_index, parsed_text, term_idx_store, metadata_idx_store):
for term, count in self._feats_from_spacy_doc.get_feats(parsed_text).items():
term_idx = term_idx_store.getidx(term)
X_factory[document_index, term_idx] = count
for term, val in self._feats_from_spacy_doc.get_doc_metadata(parsed_text).items():
meta_idx = metadata_idx_store.getidx(term)
mX_factory[document_index, meta_idx] = val
def _register_category(self, category, category_idx_store, y):
y.append(category_idx_store.getidx(category))
def _register_document_features_with_X_factory(self, X_factory, doci, term_freq):
for word_idx, freq in term_freq.items():
X_factory[doci, word_idx] = freq
def _get_features_from_parsed_text(self, parsed_text, term_idx_store):
return {term_idx_store.getidxstrict(k): v for k, v
in self._feats_from_spacy_doc.get_feats(parsed_text).items()
if k in term_idx_store}
class FeatsFromDoc(TermDocMatrixFactory):
def __init__(self,
term_idx_store,
clean_function=lambda x: x,
nlp=None,
feats_from_spacy_doc=None):
"""Class for extracting features from a new document.
Parameters
----------
term_idx_store : IndexStore (index -> term)
clean_function : function (default lambda x: x)
A function that takes a unicode document and returns
a cleaned version of that document
post_nlp_clean_function : function (default lambda x: x)
A function that takes a spaCy Doc
nlp : spacy parser (default None)
The spaCy parser used to parse documents. If it's None,
the class will go through the expensive operation of
creating one to parse the text
feats_from_spacy_doc : FeatsFromSpacyDoc (default None)
Class for extraction of features from spacy
"""
TermDocMatrixFactory.__init__(self,
clean_function=clean_function,
nlp=nlp,
feats_from_spacy_doc=feats_from_spacy_doc)
self._term_idx_store = term_idx_store
def feats_from_doc(self, raw_text):
'''
Parameters
----------
raw_text, uncleaned text for parsing out features
Returns
-------
csr_matrix, feature matrix
'''
parsed_text = self._nlp(self._clean_function(raw_text))
X_factory = CSRMatrixFactory()
X_factory.set_last_col_idx(self._term_idx_store.getnumvals() - 1)
term_freq = self._get_features_from_parsed_text(parsed_text, self._term_idx_store)
self._register_document_features_with_X_factory(X_factory, 0, term_freq)
return X_factory.get_csr_matrix()
def _augment_term_freq_with_unigrams_and_bigrams(self, bigrams, term_freq, term_idx_store, unigrams):
for term in unigrams + bigrams:
if term in term_idx_store:
term_freq[term_idx_store.getidx(term)] += 1
def build_from_category_whitespace_delimited_text(category_text_iter):
'''
Parameters
----------
category_text_iter iterator of (string category name, one line per sentence, whitespace-delimited text) pairs
Returns
-------
TermDocMatrix
'''
y = []
X_factory = CSRMatrixFactory()
term_idx_store = IndexStore()
category_idx_store = IndexStore()
mX_factory = CSRMatrixFactory()
for doci, (category, text) in enumerate(category_text_iter):
y.append(category_idx_store.getidx(category))
term_freq = Counter()
for sent in text.strip(string.punctuation).lower().split('\n'):
unigrams = []
for tok in sent.strip().split():
unigrams.append(tok)
bigrams = list(map(' '.join, zip(unigrams[:-1], unigrams[1:])))
for term in unigrams + bigrams:
term_freq[term_idx_store.getidx(term)] += 1
for word_idx, freq in term_freq.items():
X_factory[doci, word_idx] = freq
metadata_idx_store = IndexStore()
return TermDocMatrix(X=X_factory.get_csr_matrix(),
mX=mX_factory.get_csr_matrix(),
y=np.array(y),
term_idx_store=term_idx_store,
metadata_idx_store=metadata_idx_store,
category_idx_store=category_idx_store)
|
import shlex
import subprocess
import sys
import textwrap
import click
import pkg_resources
from twtxt.mentions import format_mentions
from twtxt.parser import parse_iso8601
def style_timeline(tweets, porcelain=False):
if porcelain:
return "\n".join(style_tweet(tweet, porcelain) for tweet in tweets)
else:
return "\n{0}\n".format("\n\n".join(filter(None, (style_tweet(tweet, porcelain) for tweet in tweets))))
def style_tweet(tweet, porcelain=False):
conf = click.get_current_context().obj["conf"]
limit = conf.character_limit
if porcelain:
return "{nick}\t{url}\t{tweet}".format(
nick=tweet.source.nick,
url=tweet.source.url,
tweet=str(tweet))
else:
if sys.stdout.isatty() and not tweet.text.isprintable():
return None
styled_text = format_mentions(tweet.text)
len_styling = len(styled_text) - len(click.unstyle(styled_text))
final_text = textwrap.shorten(styled_text, limit + len_styling) if limit else styled_text
timestamp = tweet.absolute_datetime if conf.use_abs_time else tweet.relative_datetime
return "➤ {nick} ({time}):\n{tweet}".format(
nick=click.style(tweet.source.nick, bold=True),
tweet=final_text,
time=click.style(timestamp, dim=True))
def style_source(source, porcelain=False):
if porcelain:
return "{nick}\t{url}".format(
nick=source.nick,
url=source.url)
else:
return "➤ {nick} @ {url}".format(
nick=click.style(source.nick, bold=True),
url=source.url)
def style_source_with_status(source, status, porcelain=False):
if porcelain:
return "{nick}\t{url}\t{status}\t{content_length}\t{last_modified}".format(
nick=source.nick,
url=source.url,
status=status.status_code,
content_length=status.content_length,
last_modified=status.last_modified)
else:
if status.status_code == 200:
scolor, smessage = "green", str(status.status_code)
elif status:
scolor, smessage = "red", str(status.status_code)
else:
scolor, smessage = "red", "ERROR"
return "➤ {nick} @ {url} [{content_length}, {last_modified}] ({status})".format(
nick=click.style(source.nick, bold=True, fg=scolor),
url=source.url,
status=click.style(smessage, fg=scolor),
content_length=status.natural_content_length,
last_modified=status.natural_last_modified)
def validate_created_at(ctx, param, value):
if value:
try:
return parse_iso8601(value)
except (ValueError, OverflowError) as e:
raise click.BadParameter("{0}.".format(e))
def validate_text(ctx, param, value):
conf = click.get_current_context().obj["conf"]
if isinstance(value, tuple):
value = " ".join(value)
if not value and not sys.stdin.isatty():
value = click.get_text_stream("stdin").read()
if value:
value = value.strip()
if conf.character_warning and len(value) > conf.character_warning:
click.confirm("✂ Warning: Tweet is longer than {0} characters. Are you sure?".format(
conf.character_warning), abort=True)
return value
else:
raise click.BadArgumentUsage("Text can’t be empty.")
def validate_config_key(ctx, param, value):
"""Validate a configuration key according to `section.item`."""
if not value:
return value
try:
section, item = value.split(".", 1)
except ValueError:
raise click.BadArgumentUsage("Given key does not contain a section name.")
else:
return section, item
def run_pre_tweet_hook(hook, options):
try:
command = shlex.split(hook.format(**options))
except KeyError:
click.echo("✗ Invalid variables in pre_tweet_hook.")
raise click.Abort
try:
subprocess.check_output(command, shell=True, universal_newlines=True, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
click.echo("✗ pre_tweet_hook returned {}.".format(e.returncode))
if e.output:
click.echo(e.output)
raise click.Abort
def run_post_tweet_hook(hook, options):
try:
command = shlex.split(hook.format(**options))
except KeyError:
click.echo("✗ Invalid variables in post_tweet_hook.")
return
try:
subprocess.check_output(command, shell=True, universal_newlines=True, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
click.echo("✗ post_tweet_hook returned {}.".format(e.returncode))
if e.output:
click.echo(e.output)
def sort_and_truncate_tweets(tweets, direction, limit):
if direction == "descending":
return sorted(tweets, reverse=True)[:limit]
elif direction == "ascending":
if limit < len(tweets):
return sorted(tweets)[len(tweets) - limit:]
else:
return sorted(tweets)
else:
return []
def generate_user_agent():
try:
version = pkg_resources.require("twtxt")[0].version
except pkg_resources.DistributionNotFound:
version = "unknown"
conf = click.get_current_context().obj["conf"]
if conf.disclose_identity and conf.nick and conf.twturl:
user_agent = "twtxt/{version} (+{url}; @{nick})".format(
version=version, url=conf.twturl, nick=conf.nick)
else:
user_agent = "twtxt/{version}".format(version=version)
return {"User-Agent": user_agent}
|
from datetime import datetime, timedelta
from enturclient import EnturPublicTransportData
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_NAME,
CONF_SHOW_ON_MAP,
TIME_MINUTES,
)
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
import homeassistant.util.dt as dt_util
API_CLIENT_NAME = "homeassistant-homeassistant"
ATTRIBUTION = "Data provided by entur.org under NLOD"
CONF_STOP_IDS = "stop_ids"
CONF_EXPAND_PLATFORMS = "expand_platforms"
CONF_WHITELIST_LINES = "line_whitelist"
CONF_OMIT_NON_BOARDING = "omit_non_boarding"
CONF_NUMBER_OF_DEPARTURES = "number_of_departures"
DEFAULT_NAME = "Entur"
DEFAULT_ICON_KEY = "bus"
ICONS = {
"air": "mdi:airplane",
"bus": "mdi:bus",
"metro": "mdi:subway",
"rail": "mdi:train",
"tram": "mdi:tram",
"water": "mdi:ferry",
}
SCAN_INTERVAL = timedelta(seconds=45)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_STOP_IDS): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_EXPAND_PLATFORMS, default=True): cv.boolean,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_SHOW_ON_MAP, default=False): cv.boolean,
vol.Optional(CONF_WHITELIST_LINES, default=[]): cv.ensure_list,
vol.Optional(CONF_OMIT_NON_BOARDING, default=True): cv.boolean,
vol.Optional(CONF_NUMBER_OF_DEPARTURES, default=2): vol.All(
cv.positive_int, vol.Range(min=2, max=10)
),
}
)
ATTR_STOP_ID = "stop_id"
ATTR_ROUTE = "route"
ATTR_ROUTE_ID = "route_id"
ATTR_EXPECTED_AT = "due_at"
ATTR_DELAY = "delay"
ATTR_REALTIME = "real_time"
ATTR_NEXT_UP_IN = "next_due_in"
ATTR_NEXT_UP_ROUTE = "next_route"
ATTR_NEXT_UP_ROUTE_ID = "next_route_id"
ATTR_NEXT_UP_AT = "next_due_at"
ATTR_NEXT_UP_DELAY = "next_delay"
ATTR_NEXT_UP_REALTIME = "next_real_time"
ATTR_TRANSPORT_MODE = "transport_mode"
def due_in_minutes(timestamp: datetime) -> int:
"""Get the time in minutes from a timestamp."""
if timestamp is None:
return None
diff = timestamp - dt_util.now()
return int(diff.total_seconds() / 60)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Entur public transport sensor."""
expand = config.get(CONF_EXPAND_PLATFORMS)
line_whitelist = config.get(CONF_WHITELIST_LINES)
name = config.get(CONF_NAME)
show_on_map = config.get(CONF_SHOW_ON_MAP)
stop_ids = config.get(CONF_STOP_IDS)
omit_non_boarding = config.get(CONF_OMIT_NON_BOARDING)
number_of_departures = config.get(CONF_NUMBER_OF_DEPARTURES)
stops = [s for s in stop_ids if "StopPlace" in s]
quays = [s for s in stop_ids if "Quay" in s]
data = EnturPublicTransportData(
API_CLIENT_NAME,
stops=stops,
quays=quays,
line_whitelist=line_whitelist,
omit_non_boarding=omit_non_boarding,
number_of_departures=number_of_departures,
web_session=async_get_clientsession(hass),
)
if expand:
await data.expand_all_quays()
await data.update()
proxy = EnturProxy(data)
entities = []
for place in data.all_stop_places_quays():
try:
given_name = f"{name} {data.get_stop_info(place).name}"
except KeyError:
given_name = f"{name} {place}"
entities.append(
EnturPublicTransportSensor(proxy, given_name, place, show_on_map)
)
async_add_entities(entities, True)
class EnturProxy:
"""Proxy for the Entur client.
Ensure throttle to not hit rate limiting on the API.
"""
def __init__(self, api):
"""Initialize the proxy."""
self._api = api
@Throttle(timedelta(seconds=15))
async def async_update(self) -> None:
"""Update data in client."""
await self._api.update()
def get_stop_info(self, stop_id: str) -> dict:
"""Get info about specific stop place."""
return self._api.get_stop_info(stop_id)
class EnturPublicTransportSensor(Entity):
"""Implementation of a Entur public transport sensor."""
def __init__(self, api: EnturProxy, name: str, stop: str, show_on_map: bool):
"""Initialize the sensor."""
self.api = api
self._stop = stop
self._show_on_map = show_on_map
self._name = name
self._state = None
self._icon = ICONS[DEFAULT_ICON_KEY]
self._attributes = {}
@property
def name(self) -> str:
"""Return the name of the sensor."""
return self._name
@property
def state(self) -> str:
"""Return the state of the sensor."""
return self._state
@property
def device_state_attributes(self) -> dict:
"""Return the state attributes."""
self._attributes[ATTR_ATTRIBUTION] = ATTRIBUTION
self._attributes[ATTR_STOP_ID] = self._stop
return self._attributes
@property
def unit_of_measurement(self) -> str:
"""Return the unit this state is expressed in."""
return TIME_MINUTES
@property
def icon(self) -> str:
"""Icon to use in the frontend."""
return self._icon
async def async_update(self) -> None:
"""Get the latest data and update the states."""
await self.api.async_update()
self._attributes = {}
data = self.api.get_stop_info(self._stop)
if data is None:
self._state = None
return
if self._show_on_map and data.latitude and data.longitude:
self._attributes[CONF_LATITUDE] = data.latitude
self._attributes[CONF_LONGITUDE] = data.longitude
calls = data.estimated_calls
if not calls:
self._state = None
return
self._state = due_in_minutes(calls[0].expected_departure_time)
self._icon = ICONS.get(calls[0].transport_mode, ICONS[DEFAULT_ICON_KEY])
self._attributes[ATTR_ROUTE] = calls[0].front_display
self._attributes[ATTR_ROUTE_ID] = calls[0].line_id
self._attributes[ATTR_EXPECTED_AT] = calls[0].expected_departure_time.strftime(
"%H:%M"
)
self._attributes[ATTR_REALTIME] = calls[0].is_realtime
self._attributes[ATTR_DELAY] = calls[0].delay_in_min
number_of_calls = len(calls)
if number_of_calls < 2:
return
self._attributes[ATTR_NEXT_UP_ROUTE] = calls[1].front_display
self._attributes[ATTR_NEXT_UP_ROUTE_ID] = calls[1].line_id
self._attributes[ATTR_NEXT_UP_AT] = calls[1].expected_departure_time.strftime(
"%H:%M"
)
self._attributes[
ATTR_NEXT_UP_IN
] = f"{due_in_minutes(calls[1].expected_departure_time)} min"
self._attributes[ATTR_NEXT_UP_REALTIME] = calls[1].is_realtime
self._attributes[ATTR_NEXT_UP_DELAY] = calls[1].delay_in_min
if number_of_calls < 3:
return
for i, call in enumerate(calls[2:]):
key_name = f"departure_#{i + 3}"
self._attributes[key_name] = (
f"{'' if bool(call.is_realtime) else 'ca. '}"
f"{call.expected_departure_time.strftime('%H:%M')} {call.front_display}"
)
|
import logging
from homeassistant.components.binary_sensor import BinarySensorEntity
from homeassistant.core import callback
from homeassistant.util.dt import as_local, parse_datetime, utcnow
from . import (
DATA_OPENUV_CLIENT,
DATA_PROTECTION_WINDOW,
DOMAIN,
TYPE_PROTECTION_WINDOW,
OpenUvEntity,
)
_LOGGER = logging.getLogger(__name__)
ATTR_PROTECTION_WINDOW_ENDING_TIME = "end_time"
ATTR_PROTECTION_WINDOW_ENDING_UV = "end_uv"
ATTR_PROTECTION_WINDOW_STARTING_TIME = "start_time"
ATTR_PROTECTION_WINDOW_STARTING_UV = "start_uv"
BINARY_SENSORS = {TYPE_PROTECTION_WINDOW: ("Protection Window", "mdi:sunglasses")}
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up an OpenUV sensor based on a config entry."""
openuv = hass.data[DOMAIN][DATA_OPENUV_CLIENT][entry.entry_id]
binary_sensors = []
for kind, attrs in BINARY_SENSORS.items():
name, icon = attrs
binary_sensors.append(
OpenUvBinarySensor(openuv, kind, name, icon, entry.entry_id)
)
async_add_entities(binary_sensors, True)
class OpenUvBinarySensor(OpenUvEntity, BinarySensorEntity):
"""Define a binary sensor for OpenUV."""
def __init__(self, openuv, sensor_type, name, icon, entry_id):
"""Initialize the sensor."""
super().__init__(openuv)
self._async_unsub_dispatcher_connect = None
self._entry_id = entry_id
self._icon = icon
self._latitude = openuv.client.latitude
self._longitude = openuv.client.longitude
self._name = name
self._sensor_type = sensor_type
self._state = None
@property
def icon(self):
"""Return the icon."""
return self._icon
@property
def is_on(self):
"""Return the status of the sensor."""
return self._state
@property
def should_poll(self):
"""Disable polling."""
return False
@property
def unique_id(self) -> str:
"""Return a unique, Home Assistant friendly identifier for this entity."""
return f"{self._latitude}_{self._longitude}_{self._sensor_type}"
@callback
def update_from_latest_data(self):
"""Update the state."""
data = self.openuv.data[DATA_PROTECTION_WINDOW]
if not data:
self._available = False
return
self._available = True
for key in ("from_time", "to_time", "from_uv", "to_uv"):
if not data.get(key):
_LOGGER.info("Skipping update due to missing data: %s", key)
return
if self._sensor_type == TYPE_PROTECTION_WINDOW:
self._state = (
parse_datetime(data["from_time"])
<= utcnow()
<= parse_datetime(data["to_time"])
)
self._attrs.update(
{
ATTR_PROTECTION_WINDOW_ENDING_TIME: as_local(
parse_datetime(data["to_time"])
),
ATTR_PROTECTION_WINDOW_ENDING_UV: data["to_uv"],
ATTR_PROTECTION_WINDOW_STARTING_UV: data["from_uv"],
ATTR_PROTECTION_WINDOW_STARTING_TIME: as_local(
parse_datetime(data["from_time"])
),
}
)
|
import datetime
import voluptuous as vol
from homeassistant.components import pilight
from homeassistant.components.binary_sensor import PLATFORM_SCHEMA, BinarySensorEntity
from homeassistant.const import (
CONF_DISARM_AFTER_TRIGGER,
CONF_NAME,
CONF_PAYLOAD,
CONF_PAYLOAD_OFF,
CONF_PAYLOAD_ON,
)
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.event import track_point_in_time
from homeassistant.util import dt as dt_util
CONF_VARIABLE = "variable"
CONF_RESET_DELAY_SEC = "reset_delay_sec"
DEFAULT_NAME = "Pilight Binary Sensor"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_VARIABLE): cv.string,
vol.Required(CONF_PAYLOAD): vol.Schema(dict),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PAYLOAD_ON, default="on"): vol.Any(
cv.positive_int, cv.small_float, cv.string
),
vol.Optional(CONF_PAYLOAD_OFF, default="off"): vol.Any(
cv.positive_int, cv.small_float, cv.string
),
vol.Optional(CONF_DISARM_AFTER_TRIGGER, default=False): cv.boolean,
vol.Optional(CONF_RESET_DELAY_SEC, default=30): cv.positive_int,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up Pilight Binary Sensor."""
disarm = config.get(CONF_DISARM_AFTER_TRIGGER)
if disarm:
add_entities(
[
PilightTriggerSensor(
hass=hass,
name=config.get(CONF_NAME),
variable=config.get(CONF_VARIABLE),
payload=config.get(CONF_PAYLOAD),
on_value=config.get(CONF_PAYLOAD_ON),
off_value=config.get(CONF_PAYLOAD_OFF),
rst_dly_sec=config.get(CONF_RESET_DELAY_SEC),
)
]
)
else:
add_entities(
[
PilightBinarySensor(
hass=hass,
name=config.get(CONF_NAME),
variable=config.get(CONF_VARIABLE),
payload=config.get(CONF_PAYLOAD),
on_value=config.get(CONF_PAYLOAD_ON),
off_value=config.get(CONF_PAYLOAD_OFF),
)
]
)
class PilightBinarySensor(BinarySensorEntity):
"""Representation of a binary sensor that can be updated using Pilight."""
def __init__(self, hass, name, variable, payload, on_value, off_value):
"""Initialize the sensor."""
self._state = False
self._hass = hass
self._name = name
self._variable = variable
self._payload = payload
self._on_value = on_value
self._off_value = off_value
hass.bus.listen(pilight.EVENT, self._handle_code)
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def is_on(self):
"""Return True if the binary sensor is on."""
return self._state
def _handle_code(self, call):
"""Handle received code by the pilight-daemon.
If the code matches the defined payload
of this sensor the sensor state is changed accordingly.
"""
# Check if received code matches defined playoad
# True if payload is contained in received code dict
payload_ok = True
for key in self._payload:
if key not in call.data:
payload_ok = False
continue
if self._payload[key] != call.data[key]:
payload_ok = False
# Read out variable if payload ok
if payload_ok:
if self._variable not in call.data:
return
value = call.data[self._variable]
self._state = value == self._on_value
self.schedule_update_ha_state()
class PilightTriggerSensor(BinarySensorEntity):
"""Representation of a binary sensor that can be updated using Pilight."""
def __init__(
self, hass, name, variable, payload, on_value, off_value, rst_dly_sec=30
):
"""Initialize the sensor."""
self._state = False
self._hass = hass
self._name = name
self._variable = variable
self._payload = payload
self._on_value = on_value
self._off_value = off_value
self._reset_delay_sec = rst_dly_sec
self._delay_after = None
self._hass = hass
hass.bus.listen(pilight.EVENT, self._handle_code)
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def is_on(self):
"""Return True if the binary sensor is on."""
return self._state
def _reset_state(self, call):
self._state = False
self._delay_after = None
self.schedule_update_ha_state()
def _handle_code(self, call):
"""Handle received code by the pilight-daemon.
If the code matches the defined payload
of this sensor the sensor state is changed accordingly.
"""
# Check if received code matches defined payload
# True if payload is contained in received code dict
payload_ok = True
for key in self._payload:
if key not in call.data:
payload_ok = False
continue
if self._payload[key] != call.data[key]:
payload_ok = False
# Read out variable if payload ok
if payload_ok:
if self._variable not in call.data:
return
value = call.data[self._variable]
self._state = value == self._on_value
if self._delay_after is None:
self._delay_after = dt_util.utcnow() + datetime.timedelta(
seconds=self._reset_delay_sec
)
track_point_in_time(self._hass, self._reset_state, self._delay_after)
self.schedule_update_ha_state()
|
import contextlib
import inspect
import io
import os.path
import re
from . import compression
from . import transport
PLACEHOLDER = ' smart_open/doctools.py magic goes here'
def extract_kwargs(docstring):
"""Extract keyword argument documentation from a function's docstring.
Parameters
----------
docstring: str
The docstring to extract keyword arguments from.
Returns
-------
list of (str, str, list str)
str
The name of the keyword argument.
str
Its type.
str
Its documentation as a list of lines.
Notes
-----
The implementation is rather fragile. It expects the following:
1. The parameters are under an underlined Parameters section
2. Keyword parameters have the literal ", optional" after the type
3. Names and types are not indented
4. Descriptions are indented with 4 spaces
5. The Parameters section ends with an empty line.
Examples
--------
>>> docstring = '''The foo function.
... Parameters
... ----------
... bar: str, optional
... This parameter is the bar.
... baz: int, optional
... This parameter is the baz.
...
... '''
>>> kwargs = extract_kwargs(docstring)
>>> kwargs[0]
('bar', 'str, optional', ['This parameter is the bar.'])
"""
if not docstring:
return []
lines = inspect.cleandoc(docstring).split('\n')
retval = []
#
# 1. Find the underlined 'Parameters' section
# 2. Once there, continue parsing parameters until we hit an empty line
#
while lines and lines[0] != 'Parameters':
lines.pop(0)
if not lines:
return []
lines.pop(0)
lines.pop(0)
while lines and lines[0]:
name, type_ = lines.pop(0).split(':', 1)
description = []
while lines and lines[0].startswith(' '):
description.append(lines.pop(0).strip())
if 'optional' in type_:
retval.append((name.strip(), type_.strip(), description))
return retval
def to_docstring(kwargs, lpad=''):
"""Reconstruct a docstring from keyword argument info.
Basically reverses :func:`extract_kwargs`.
Parameters
----------
kwargs: list
Output from the extract_kwargs function
lpad: str, optional
Padding string (from the left).
Returns
-------
str
The docstring snippet documenting the keyword arguments.
Examples
--------
>>> kwargs = [
... ('bar', 'str, optional', ['This parameter is the bar.']),
... ('baz', 'int, optional', ['This parameter is the baz.']),
... ]
>>> print(to_docstring(kwargs), end='')
bar: str, optional
This parameter is the bar.
baz: int, optional
This parameter is the baz.
"""
buf = io.StringIO()
for name, type_, description in kwargs:
buf.write('%s%s: %s\n' % (lpad, name, type_))
for line in description:
buf.write('%s %s\n' % (lpad, line))
return buf.getvalue()
def extract_examples_from_readme_rst(indent=' '):
"""Extract examples from this project's README.rst file.
Parameters
----------
indent: str
Prepend each line with this string. Should contain some number of spaces.
Returns
-------
str
The examples.
Notes
-----
Quite fragile, depends on named labels inside the README.rst file.
"""
curr_dir = os.path.dirname(os.path.abspath(__file__))
readme_path = os.path.join(curr_dir, '..', 'README.rst')
try:
with open(readme_path) as fin:
lines = list(fin)
start = lines.index('.. _doctools_before_examples:\n')
end = lines.index(".. _doctools_after_examples:\n")
lines = lines[start+4:end-2]
return ''.join([indent + re.sub('^ ', '', line) for line in lines])
except Exception:
return indent + 'See README.rst'
def tweak_open_docstring(f):
buf = io.StringIO()
seen = set()
root_path = os.path.dirname(os.path.dirname(__file__))
with contextlib.redirect_stdout(buf):
print(' smart_open supports the following transport mechanisms:')
print()
for scheme, submodule in sorted(transport._REGISTRY.items()):
if scheme == transport.NO_SCHEME or submodule in seen:
continue
seen.add(submodule)
relpath = os.path.relpath(submodule.__file__, start=root_path)
heading = '%s (%s)' % (scheme, relpath)
print(' %s' % heading)
print(' %s' % ('~' * len(heading)))
print(' %s' % submodule.__doc__.split('\n')[0])
print()
kwargs = extract_kwargs(submodule.open.__doc__)
if kwargs:
print(to_docstring(kwargs, lpad=u' '))
print(' Examples')
print(' --------')
print()
print(extract_examples_from_readme_rst())
print(' This function also supports transparent compression and decompression ')
print(' using the following codecs:')
print()
for extension in compression.get_supported_extensions():
print(' * %s' % extension)
print()
print(' The function depends on the file extension to determine the appropriate codec.')
#
# The docstring can be None if -OO was passed to the interpreter.
#
if f.__doc__:
f.__doc__ = f.__doc__.replace(PLACEHOLDER, buf.getvalue())
def tweak_parse_uri_docstring(f):
buf = io.StringIO()
seen = set()
schemes = []
examples = []
for scheme, submodule in sorted(transport._REGISTRY.items()):
if scheme == transport.NO_SCHEME or submodule in seen:
continue
schemes.append(scheme)
seen.add(submodule)
try:
examples.extend(submodule.URI_EXAMPLES)
except AttributeError:
pass
with contextlib.redirect_stdout(buf):
print(' Supported URI schemes are:')
print()
for scheme in schemes:
print(' * %s' % scheme)
print()
print(' Valid URI examples::')
print()
for example in examples:
print(' * %s' % example)
if f.__doc__:
f.__doc__ = f.__doc__.replace(PLACEHOLDER, buf.getvalue())
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import logging
from absl import flags
from perfkitbenchmarker import errors
from perfkitbenchmarker import managed_memory_store
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.providers import aws
from perfkitbenchmarker.providers.aws import util
MEMCACHED_VERSION = '1.5.10'
FLAGS = flags.FLAGS
class ElastiCacheMemcached(managed_memory_store.BaseManagedMemoryStore):
"""Object representing a AWS Elasticache memcached instance."""
CLOUD = aws.CLOUD
MEMORY_STORE = managed_memory_store.MEMCACHED
def __init__(self, spec):
super(ElastiCacheMemcached, self).__init__(spec)
self.subnet_group_name = 'subnet-%s' % self.name
self.zone = self.spec.vms[0].zone
self.region = util.GetRegionFromZone(self.zone)
self.node_type = FLAGS.cache_node_type
@staticmethod
def CheckPrerequisites(benchmark_config):
if FLAGS.managed_memory_store_version:
raise errors.Config.InvalidValue('Custom Memcached version not '
'supported. Version is {}.'.format(
MEMCACHED_VERSION))
def GetResourceMetadata(self):
"""Returns a dict containing metadata about the cache cluster.
Returns:
dict mapping string property key to value.
"""
result = {
'cloud_memcached_version': MEMCACHED_VERSION,
'cloud_memcached_node_type': self.node_type,
}
return result
def _CreateDependencies(self):
"""Create the subnet dependencies."""
subnet_id = self.spec.vms[0].network.subnet.id
cmd = ['aws', 'elasticache', 'create-cache-subnet-group',
'--region', self.region,
'--cache-subnet-group-name', self.subnet_group_name,
'--cache-subnet-group-description', '"memcached benchmark subnet"',
'--subnet-ids', subnet_id]
vm_util.IssueCommand(cmd)
def _DeleteDependencies(self):
"""Delete the subnet dependencies."""
cmd = ['aws', 'elasticache', 'delete-cache-subnet-group',
'--region', self.region,
'--cache-subnet-group-name', self.subnet_group_name]
vm_util.IssueCommand(cmd, raise_on_failure=False)
def _Create(self):
"""Creates the cache cluster."""
cmd = ['aws', 'elasticache', 'create-cache-cluster',
'--engine', 'memcached',
'--region', self.region,
'--cache-cluster-id', self.name,
'--preferred-availability-zone', self.zone,
'--num-cache-nodes', str(managed_memory_store.MEMCACHED_NODE_COUNT),
'--engine-version', MEMCACHED_VERSION,
'--cache-node-type', self.node_type,
'--cache-subnet-group-name', self.subnet_group_name]
cmd += ['--tags']
cmd += util.MakeFormattedDefaultTags()
vm_util.IssueCommand(cmd)
def _Delete(self):
"""Deletes the cache cluster."""
cmd = ['aws', 'elasticache', 'delete-cache-cluster',
'--region', self.region,
'--cache-cluster-id', self.name]
vm_util.IssueCommand(cmd, raise_on_failure=False)
def _IsDeleting(self):
"""Returns True if cluster is being deleted and false otherwise."""
cluster_info = self._DescribeInstance()
return cluster_info.get('CacheClusterStatus', '') == 'deleting'
def _IsReady(self):
"""Returns True if cluster is ready and false otherwise."""
cluster_info = self._DescribeInstance()
return cluster_info.get('CacheClusterStatus', '') == 'available'
def _Exists(self):
"""Returns true if the cluster exists and is not being deleted."""
cluster_info = self._DescribeInstance()
return cluster_info.get('CacheClusterStatus', '') not in [
'', 'deleting', 'create-failed']
def _DescribeInstance(self):
"""Calls describe on cluster.
Returns:
dict mapping string cluster_info property key to value.
"""
cmd = ['aws', 'elasticache', 'describe-cache-clusters',
'--region', self.region,
'--cache-cluster-id', self.name]
stdout, stderr, retcode = vm_util.IssueCommand(cmd, raise_on_failure=False)
if retcode != 0:
logging.info('Could not find cluster %s, %s', self.name, stderr)
return {}
for cluster_info in json.loads(stdout)['CacheClusters']:
if cluster_info['CacheClusterId'] == self.name:
return cluster_info
return {}
@vm_util.Retry(max_retries=5)
def _PopulateEndpoint(self):
"""Populates address and port information from cluster_info.
Raises:
errors.Resource.RetryableGetError:
Failed to retrieve information on cluster
"""
cluster_info = self._DescribeInstance()
if not cluster_info:
raise errors.Resource.RetryableGetError(
'Failed to retrieve information on {0}.'.format(self.name))
endpoint = cluster_info['ConfigurationEndpoint']
self._ip = endpoint['Address']
self._port = endpoint['Port']
|
from absl import flags
from perfkitbenchmarker import disk
from perfkitbenchmarker import errors
from perfkitbenchmarker import providers
from perfkitbenchmarker.providers.digitalocean import util
FLAGS = flags.FLAGS
BLOCK_STORAGE = 'block-storage'
LOCAL_DISK_METADATA = {
disk.MEDIA: disk.SSD,
disk.REPLICATION: disk.NONE,
}
BLOCK_STORAGE_METADATA = {
disk.MEDIA: disk.SSD,
disk.REPLICATION: disk.ZONE,
}
# Map legacy disk types to DigitalOcean disk types.
DISK_TYPE_MAP = {
disk.REMOTE_SSD: BLOCK_STORAGE
}
disk.RegisterDiskTypeMap(providers.DIGITALOCEAN, DISK_TYPE_MAP)
class DigitalOceanLocalDisk(disk.BaseDisk):
"""Dummy Object representing a DigitalOcean Disk."""
def __init__(self, disk_spec):
super(DigitalOceanLocalDisk, self).__init__(disk_spec)
self.metadata.update(LOCAL_DISK_METADATA)
def Attach(self, vm):
pass
def Detach(self):
pass
def GetDevicePath(self):
# The local disk is always the boot disk, and it cannot be
# partitioned or reformatted, so we don't support GetDevicePath().
raise errors.Error(
'GetDevicePath not supported for DigitalOcean local disks.')
def _Create(self):
pass
def _Delete(self):
pass
class DigitalOceanBlockStorageDisk(disk.BaseDisk):
"""Interface to DigitalOcean Block Storage."""
def __init__(self, disk_spec, zone):
super(DigitalOceanBlockStorageDisk, self).__init__(disk_spec)
self.zone = zone
if self.disk_type != BLOCK_STORAGE:
raise ValueError('DigitalOcean data disks must have type block-storage.')
self.metadata.update(BLOCK_STORAGE_METADATA)
def _Create(self):
self.volume_name = 'pkb-%s-%s' % (FLAGS.run_uri, self.disk_number)
response, retcode = util.DoctlAndParse(
['compute', 'volume', 'create',
self.volume_name,
'--region', self.zone,
'--size', str(self.disk_size) + 'gb'])
if retcode:
raise errors.Resource.RetryableCreationError(
'Error creating disk: %s' % (response,))
self.volume_id = response[0]['id']
def _Delete(self):
response, retcode = util.DoctlAndParse(
['compute', 'volume', 'delete',
self.volume_id, '--force'])
if retcode:
raise errors.Resource.RetryableDeletionError(
'Error deleting disk: %s' % (response,))
def Attach(self, vm):
response, retcode = util.DoctlAndParse(
['compute', 'volume-action', 'attach',
self.volume_id, vm.droplet_id])
if retcode:
raise errors.VmUtil.CalledProcessException(
'Error attaching disk: %s' % (response,))
action_id = response[0]['id']
util.WaitForAction(action_id)
def Detach(self):
response, retcode = util.DoctlAndParse(
['compute', 'volume-action', 'detach',
self.volume_id])
if retcode:
raise errors.VmUtil.CalledProcessException(
'Error detaching disk: %s' % (response,))
action_id = response[0]['id']
util.WaitForAction(action_id)
def GetDevicePath(self):
return '/dev/disk/by-id/scsi-0DO_Volume_%s' % self.volume_name
|
import time
from zigpy.device import Device as zigpy_dev
from zigpy.endpoint import Endpoint as zigpy_ep
import zigpy.profiles.zha
import zigpy.types
import zigpy.zcl
import zigpy.zcl.clusters.general
import zigpy.zcl.foundation as zcl_f
import zigpy.zdo.types
import homeassistant.components.zha.core.const as zha_const
from homeassistant.util import slugify
from tests.async_mock import AsyncMock, Mock
class FakeEndpoint:
"""Fake endpoint for moking zigpy."""
def __init__(self, manufacturer, model, epid=1):
"""Init fake endpoint."""
self.device = None
self.endpoint_id = epid
self.in_clusters = {}
self.out_clusters = {}
self._cluster_attr = {}
self.member_of = {}
self.status = 1
self.manufacturer = manufacturer
self.model = model
self.profile_id = zigpy.profiles.zha.PROFILE_ID
self.device_type = None
self.request = AsyncMock(return_value=[0])
def add_input_cluster(self, cluster_id, _patch_cluster=True):
"""Add an input cluster."""
cluster = zigpy.zcl.Cluster.from_id(self, cluster_id, is_server=True)
if _patch_cluster:
patch_cluster(cluster)
self.in_clusters[cluster_id] = cluster
if hasattr(cluster, "ep_attribute"):
setattr(self, cluster.ep_attribute, cluster)
def add_output_cluster(self, cluster_id, _patch_cluster=True):
"""Add an output cluster."""
cluster = zigpy.zcl.Cluster.from_id(self, cluster_id, is_server=False)
if _patch_cluster:
patch_cluster(cluster)
self.out_clusters[cluster_id] = cluster
reply = AsyncMock(return_value=[0])
request = AsyncMock(return_value=[0])
@property
def __class__(self):
"""Fake being Zigpy endpoint."""
return zigpy_ep
@property
def unique_id(self):
"""Return the unique id for the endpoint."""
return self.device.ieee, self.endpoint_id
FakeEndpoint.add_to_group = zigpy_ep.add_to_group
FakeEndpoint.remove_from_group = zigpy_ep.remove_from_group
def patch_cluster(cluster):
"""Patch a cluster for testing."""
cluster.bind = AsyncMock(return_value=[0])
cluster.configure_reporting = AsyncMock(return_value=[0])
cluster.deserialize = Mock()
cluster.handle_cluster_request = Mock()
cluster.read_attributes = AsyncMock(return_value=[{}, {}])
cluster.read_attributes_raw = Mock()
cluster.unbind = AsyncMock(return_value=[0])
cluster.write_attributes = AsyncMock(
return_value=[zcl_f.WriteAttributesResponse.deserialize(b"\x00")[0]]
)
if cluster.cluster_id == 4:
cluster.add = AsyncMock(return_value=[0])
class FakeDevice:
"""Fake device for mocking zigpy."""
def __init__(self, app, ieee, manufacturer, model, node_desc=None, nwk=0xB79C):
"""Init fake device."""
self._application = app
self.application = app
self.ieee = zigpy.types.EUI64.convert(ieee)
self.nwk = nwk
self.zdo = Mock()
self.endpoints = {0: self.zdo}
self.lqi = 255
self.rssi = 8
self.last_seen = time.time()
self.status = 2
self.initializing = False
self.skip_configuration = False
self.manufacturer = manufacturer
self.model = model
self.node_desc = zigpy.zdo.types.NodeDescriptor()
self.remove_from_group = AsyncMock()
if node_desc is None:
node_desc = b"\x02@\x807\x10\x7fd\x00\x00*d\x00\x00"
self.node_desc = zigpy.zdo.types.NodeDescriptor.deserialize(node_desc)[0]
self.neighbors = []
FakeDevice.add_to_group = zigpy_dev.add_to_group
def get_zha_gateway(hass):
"""Return ZHA gateway from hass.data."""
try:
return hass.data[zha_const.DATA_ZHA][zha_const.DATA_ZHA_GATEWAY]
except KeyError:
return None
def make_attribute(attrid, value, status=0):
"""Make an attribute."""
attr = zcl_f.Attribute()
attr.attrid = attrid
attr.value = zcl_f.TypeValue()
attr.value.value = value
return attr
def send_attribute_report(hass, cluster, attrid, value):
"""Send a single attribute report."""
return send_attributes_report(hass, cluster, {attrid: value})
async def send_attributes_report(hass, cluster: int, attributes: dict):
"""Cause the sensor to receive an attribute report from the network.
This is to simulate the normal device communication that happens when a
device is paired to the zigbee network.
"""
attrs = [make_attribute(attrid, value) for attrid, value in attributes.items()]
hdr = make_zcl_header(zcl_f.Command.Report_Attributes)
hdr.frame_control.disable_default_response = True
cluster.handle_message(hdr, [attrs])
await hass.async_block_till_done()
async def find_entity_id(domain, zha_device, hass):
"""Find the entity id under the testing.
This is used to get the entity id in order to get the state from the state
machine so that we can test state changes.
"""
ieeetail = "".join([f"{o:02x}" for o in zha_device.ieee[:4]])
head = f"{domain}.{slugify(f'{zha_device.name} {ieeetail}')}"
enitiy_ids = hass.states.async_entity_ids(domain)
await hass.async_block_till_done()
for entity_id in enitiy_ids:
if entity_id.startswith(head):
return entity_id
return None
def async_find_group_entity_id(hass, domain, group):
"""Find the group entity id under test."""
entity_id = f"{domain}.{group.name.lower().replace(' ','_')}_zha_group_0x{group.group_id:04x}"
entity_ids = hass.states.async_entity_ids(domain)
if entity_id in entity_ids:
return entity_id
return None
async def async_enable_traffic(hass, zha_devices, enabled=True):
"""Allow traffic to flow through the gateway and the zha device."""
for zha_device in zha_devices:
zha_device.update_available(enabled)
await hass.async_block_till_done()
def make_zcl_header(
command_id: int, global_command: bool = True, tsn: int = 1
) -> zcl_f.ZCLHeader:
"""Cluster.handle_message() ZCL Header helper."""
if global_command:
frc = zcl_f.FrameControl(zcl_f.FrameType.GLOBAL_COMMAND)
else:
frc = zcl_f.FrameControl(zcl_f.FrameType.CLUSTER_COMMAND)
return zcl_f.ZCLHeader(frc, tsn=tsn, command_id=command_id)
def reset_clusters(clusters):
"""Reset mocks on cluster."""
for cluster in clusters:
cluster.bind.reset_mock()
cluster.configure_reporting.reset_mock()
cluster.write_attributes.reset_mock()
async def async_test_rejoin(hass, zigpy_device, clusters, report_counts, ep_id=1):
"""Test device rejoins."""
reset_clusters(clusters)
zha_gateway = get_zha_gateway(hass)
await zha_gateway.async_device_initialized(zigpy_device)
await hass.async_block_till_done()
for cluster, reports in zip(clusters, report_counts):
assert cluster.bind.call_count == 1
assert cluster.bind.await_count == 1
assert cluster.configure_reporting.call_count == reports
assert cluster.configure_reporting.await_count == reports
|
import warnings
from typing import Optional, Sequence, Tuple, Any, Union, Type, Callable, List
from typing import Text
import numpy as np
from tensornetwork.backends import abstract_backend
from tensornetwork import backend_contextmanager
from tensornetwork import backends
from tensornetwork.tensor import Tensor
AbstractBackend = abstract_backend.AbstractBackend
def initialize_tensor(fname: Text,
*fargs: Any,
backend: Optional[Union[Text, AbstractBackend]] = None,
**fkwargs: Any) -> Tensor:
"""Return a Tensor wrapping data obtained by an initialization function
implemented in a backend. The Tensor will have the same shape as the
underlying array that function generates, with all Edges dangling.
This function is not intended to be called directly, but doing so should
be safe enough.
Args:
fname: Name of the method of backend to call (a string).
*fargs: Positional arguments to the initialization method.
backend: The backend or its name.
**fkwargs: Keyword arguments to the initialization method.
Returns:
tensor: A Tensor wrapping data generated by
(the_backend).fname(*fargs, **fkwargs), with one dangling edge per
axis of data.
"""
if backend is None:
backend = backend_contextmanager.get_default_backend()
backend_obj = backends.backend_factory.get_backend(backend)
func = getattr(backend_obj, fname)
data = func(*fargs, **fkwargs)
tensor = Tensor(data, backend=backend)
return tensor
def eye(N: int,
dtype: Optional[Type[np.number]] = None,
M: Optional[int] = None,
backend: Optional[Union[Text, AbstractBackend]] = None) -> Tensor:
"""Return a Tensor representing a 2D array with ones on the diagonal and
zeros elsewhere. The Tensor has two dangling Edges.
Args:
N (int): The first dimension of the returned matrix.
dtype, optional: dtype of array (default np.float64).
M (int, optional): The second dimension of the returned matrix.
backend (optional): The backend or its name.
Returns:
I : Tensor of shape (N, M)
Represents an array of all zeros except for the k'th diagonal of all
ones.
"""
the_tensor = initialize_tensor("eye", N, backend=backend, dtype=dtype, M=M)
return the_tensor
def zeros(shape: Sequence[int],
dtype: Optional[Type[np.number]] = None,
backend: Optional[Union[Text, AbstractBackend]] = None) -> Tensor:
"""Return a Tensor of shape `shape` of all zeros.
The Tensor has one dangling Edge per dimension.
Args:
shape : Shape of the array.
dtype, optional: dtype of array (default np.float64).
backend (optional): The backend or its name.
Returns:
the_tensor : Tensor of shape `shape`. Represents an array of all zeros.
"""
the_tensor = initialize_tensor("zeros", shape, backend=backend, dtype=dtype)
return the_tensor
def ones(shape: Sequence[int],
dtype: Optional[Type[np.number]] = None,
backend: Optional[Union[Text, AbstractBackend]] = None) -> Tensor:
"""Return a Tensor of shape `shape` of all ones.
The Tensor has one dangling Edge per dimension.
Args:
shape : Shape of the array.
dtype, optional: dtype of array (default np.float64).
backend (optional): The backend or its name.
Returns:
the_tensor : Tensor of shape `shape`
Represents an array of all ones.
"""
the_tensor = initialize_tensor("ones", shape, backend=backend, dtype=dtype)
return the_tensor
def ones_like(tensor: Union[Any],
dtype: Optional[Type[Any]] = None,
backend: Optional[Union[Text, AbstractBackend]] = None) -> Tensor:
"""Return a Tensor shape full of ones the same shape as input
Args:
tensor : Object to recieve shape from
dtype (optional) : dtype of object
backend(optional): The backend or its name."""
if backend is None:
backend = backend_contextmanager.get_default_backend()
else:
backend = backend_contextmanager.backend_factory.get_backend(backend)
if isinstance(tensor, Tensor):
the_tensor = initialize_tensor("ones", tensor.shape,
backend=tensor.backend, dtype=tensor.dtype)
else:
try:
tensor = backend.convert_to_tensor(tensor)
except TypeError as e:
error = "Input to zeros_like has invalid type causing " \
"error massage: \n" + str(e)
raise TypeError(error) from e
the_tensor = initialize_tensor("ones", tensor.get_shape().as_list(),
backend=backend, dtype=dtype)
return the_tensor
def zeros_like(tensor: Union[Any],
dtype: Optional[Any] = None,
backend: Optional[Union[Text,
AbstractBackend]] = None) -> Tensor:
"""Return a Tensor shape full of zeros the same shape as input
Args:
tensor : Object to recieve shape from
dtype (optional) : dtype of object
backend(optional): The backend or its name."""
if backend is None:
backend = backend_contextmanager.get_default_backend()
else:
backend = backend_contextmanager.backend_factory.get_backend(backend)
if isinstance(tensor, Tensor):
the_tensor = initialize_tensor("zeros", tensor.shape,
backend=tensor.backend, dtype=tensor.dtype)
else:
try:
tensor = backend.convert_to_tensor(tensor)
except TypeError as e:
error = "Input to zeros_like has invalid " \
"type causing error massage: \n" + str(e)
raise TypeError(error) from e
the_tensor = initialize_tensor("zeros", tensor.shape,
backend=backend, dtype=dtype)
return the_tensor
def randn(shape: Sequence[int],
dtype: Optional[Type[np.number]] = None,
seed: Optional[int] = None,
backend: Optional[Union[Text, AbstractBackend]] = None) -> Tensor:
"""Return a Tensor of shape `shape` of Gaussian random floats.
The Tensor has one dangling Edge per dimension.
Args:
shape : Shape of the array.
dtype, optional: dtype of array (default np.float64).
seed, optional: Seed for the RNG.
backend (optional): The backend or its name.
Returns:
the_tensor : Tensor of shape `shape` filled with Gaussian random data.
"""
the_tensor = initialize_tensor("randn", shape, backend=backend, seed=seed,
dtype=dtype)
return the_tensor
def random_uniform(shape: Sequence[int],
dtype: Optional[Type[np.number]] = None,
seed: Optional[int] = None,
boundaries: Optional[Tuple[float, float]] = (0.0, 1.0),
backend: Optional[Union[Text, AbstractBackend]]
= None) -> Tensor:
"""Return a Tensor of shape `shape` of uniform random floats.
The Tensor has one dangling Edge per dimension.
Args:
shape : Shape of the array.
dtype, optional: dtype of array (default np.float64).
seed, optional: Seed for the RNG.
boundaries : Values lie in [boundaries[0], boundaries[1]).
backend (optional): The backend or its name.
Returns:
the_tensor : Tensor of shape `shape` filled with uniform random data.
"""
the_tensor = initialize_tensor("random_uniform", shape, backend=backend,
seed=seed, boundaries=boundaries, dtype=dtype)
return the_tensor
|
from typing import Any, Mapping, Optional, Sequence, Union
import attr
from qutebrowser.utils import usertypes, log
class Error(Exception):
"""Base exception for config-related errors."""
class NoAutoconfigError(Error):
"""Raised when this option can't be set in autoconfig.yml."""
def __init__(self, name: str) -> None:
super().__init__("The {} setting can only be set in config.py!"
.format(name))
class BackendError(Error):
"""Raised when this setting is unavailable with the current backend."""
def __init__(
self, name: str,
backend: usertypes.Backend,
raw_backends: Optional[Mapping[str, bool]]
) -> None:
if raw_backends is None or not raw_backends[backend.name]:
msg = ("The {} setting is not available with the {} backend!"
.format(name, backend.name))
else:
msg = ("The {} setting needs {} with the {} backend!"
.format(name, raw_backends[backend.name], backend.name))
super().__init__(msg)
class NoPatternError(Error):
"""Raised when the given setting does not support URL patterns."""
def __init__(self, name: str) -> None:
super().__init__("The {} setting does not support URL patterns!"
.format(name))
class ValidationError(Error):
"""Raised when a value for a config type was invalid.
Attributes:
value: Config value that triggered the error.
msg: Additional error message.
"""
def __init__(self, value: Any, msg: Union[str, Exception]) -> None:
super().__init__("Invalid value '{}' - {}".format(value, msg))
self.option = None
class KeybindingError(Error):
"""Raised for issues with keybindings."""
class NoOptionError(Error):
"""Raised when an option was not found."""
def __init__(self, option: str, *,
deleted: bool = False,
renamed: str = None) -> None:
if deleted:
assert renamed is None
suffix = ' (this option was removed from qutebrowser)'
elif renamed is not None:
suffix = ' (this option was renamed to {!r})'.format(renamed)
else:
suffix = ''
super().__init__("No option {!r}{}".format(option, suffix))
self.option = option
@attr.s
class ConfigErrorDesc:
"""A description of an error happening while reading the config.
Attributes:
text: The text to show.
exception: The exception which happened.
traceback: The formatted traceback of the exception.
"""
text: str = attr.ib()
exception: Union[str, Exception] = attr.ib()
traceback: str = attr.ib(None)
def __str__(self) -> str:
if self.traceback:
return '{} - {}: {}'.format(self.text,
self.exception.__class__.__name__,
self.exception)
return '{}: {}'.format(self.text, self.exception)
def with_text(self, text: str) -> 'ConfigErrorDesc':
"""Get a new ConfigErrorDesc with the given text appended."""
return self.__class__(text='{} ({})'.format(self.text, text),
exception=self.exception,
traceback=self.traceback)
class ConfigFileErrors(Error):
"""Raised when multiple errors occurred inside the config."""
def __init__(self,
basename: str,
errors: Sequence[ConfigErrorDesc], *,
fatal: bool = False) -> None:
super().__init__("Errors occurred while reading {}:\n{}".format(
basename, '\n'.join(' {}'.format(e) for e in errors)))
self.basename = basename
self.errors = errors
self.fatal = fatal
for err in errors:
if err.traceback:
log.config.info(err.traceback)
def to_html(self) -> str:
"""Get the error texts as a HTML snippet."""
from qutebrowser.utils import jinja # circular import
template = jinja.environment.from_string("""
Errors occurred while reading {{ basename }}:
<ul>
{% for error in errors %}
<li>
<b>{{ error.text }}</b>: {{ error.exception }}
{% if error.traceback != none %}
<pre>
""".rstrip() + "\n{{ error.traceback }}" + """
</pre>
{% endif %}
</li>
{% endfor %}
</ul>
""")
return template.render(basename=self.basename, errors=self.errors)
|
from datetime import datetime as dt
import pytest
import pytz
from pandas.util.testing import assert_frame_equal
from arctic.date import mktz, DateRange
from arctic.exceptions import OverlappingDataException
DUMMY_DATA = [
{'a': 1.,
'b': 2.,
'index': dt(2013, 1, 1, tzinfo=mktz('Europe/London'))
},
{'b': 3.,
'c': 4.,
'index': dt(2013, 1, 2, tzinfo=mktz('Europe/London'))
},
{'b': 5.,
'c': 6.,
'index': dt(2013, 1, 3, tzinfo=mktz('Europe/London'))
},
{'b': 7.,
'c': 8.,
'index': dt(2013, 1, 4, tzinfo=mktz('Europe/London'))
},
{'b': 9.,
'c': 10.,
'index': dt(2013, 7, 5, tzinfo=mktz('Europe/London'))
},
]
def test_ts_write_simple(tickstore_lib):
assert tickstore_lib.stats()['chunks']['count'] == 0
tickstore_lib.write('SYM', DUMMY_DATA)
assert tickstore_lib.stats()['chunks']['count'] == 1
assert len(tickstore_lib.read('SYM')) == 5
assert tickstore_lib.list_symbols() == ['SYM']
def test_overlapping_load(tickstore_lib):
data = DUMMY_DATA
tickstore_lib.write('SYM', DUMMY_DATA)
with pytest.raises(OverlappingDataException):
tickstore_lib.write('SYM', data)
data = DUMMY_DATA[2:]
with pytest.raises(OverlappingDataException):
tickstore_lib.write('SYM', data)
data = DUMMY_DATA[2:3]
with pytest.raises(OverlappingDataException):
tickstore_lib.write('SYM', data)
# overlapping at the beginning is ok
data = [DUMMY_DATA[0]]
tickstore_lib.write('SYM', data)
# overlapping at the end is ok
data = [DUMMY_DATA[-1]]
tickstore_lib.write('SYM', data)
def test_ts_write_pandas(tickstore_lib):
data = DUMMY_DATA
tickstore_lib.write('SYM', data)
data = tickstore_lib.read('SYM', columns=None)
assert data.index[0] == dt(2013, 1, 1, tzinfo=mktz('Europe/London'))
assert data.a[0] == 1
tickstore_lib.delete('SYM')
tickstore_lib.write('SYM', data)
read = tickstore_lib.read('SYM', columns=None)
assert_frame_equal(read, data, check_names=False)
def test_ts_write_named_col(tickstore_lib):
data = DUMMY_DATA
tickstore_lib.write('SYM', data)
data = tickstore_lib.read('SYM')
assert data.index[0] == dt(2013, 1, 1, tzinfo=mktz('Europe/London'))
assert data.a[0] == 1
assert(data.index.name is None)
data.index.name = 'IndexName'
tickstore_lib.delete('SYM')
tickstore_lib.write('SYM', data)
read = tickstore_lib.read('SYM')
assert(read.index.name is None)
def test_millisecond_roundtrip(tickstore_lib):
test_time = dt(2004, 1, 14, 8, 30, 4, 807000, tzinfo=pytz.utc)
data = [{'index': test_time, 'price': 9142.12, 'qualifiers': ''}]
tickstore_lib.write('blah', data)
data_range = DateRange(dt(2004, 1, 14, tzinfo=pytz.utc),
dt(2004, 1, 15, tzinfo=pytz.utc))
reread = tickstore_lib.read('blah', data_range)
assert reread.index[0].to_pydatetime() == test_time
|
import asyncio
from asyncio import TimeoutError as AsyncIOTimeoutError
from aiohttp import ClientError
from py_nightscout import Api as NightscoutAPI
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_KEY, CONF_URL
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.entity import SLOW_UPDATE_WARNING
from .const import DOMAIN
PLATFORMS = ["sensor"]
_API_TIMEOUT = SLOW_UPDATE_WARNING - 1
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the Nightscout component."""
hass.data.setdefault(DOMAIN, {})
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up Nightscout from a config entry."""
server_url = entry.data[CONF_URL]
api_key = entry.data.get(CONF_API_KEY)
session = async_get_clientsession(hass)
api = NightscoutAPI(server_url, session=session, api_secret=api_key)
try:
status = await api.get_server_status()
except (ClientError, AsyncIOTimeoutError, OSError) as error:
raise ConfigEntryNotReady from error
hass.data[DOMAIN][entry.entry_id] = api
device_registry = await dr.async_get_registry(hass)
device_registry.async_get_or_create(
config_entry_id=entry.entry_id,
identifiers={(DOMAIN, server_url)},
manufacturer="Nightscout Foundation",
name=status.name,
sw_version=status.version,
entry_type="service",
)
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
|
import numpy as np
import chainer
from chainer.backends import cuda
import chainer.functions as F
from chainercv.links.model.faster_rcnn.utils.anchor_target_creator import\
AnchorTargetCreator
from chainercv.links.model.faster_rcnn.utils.proposal_target_creator import\
ProposalTargetCreator
class FasterRCNNTrainChain(chainer.Chain):
"""Calculate losses for Faster R-CNN and report them.
This is used to train Faster R-CNN in the joint training scheme
[#FRCNN]_.
The losses include:
* :obj:`rpn_loc_loss`: The localization loss for \
Region Proposal Network (RPN).
* :obj:`rpn_cls_loss`: The classification loss for RPN.
* :obj:`roi_loc_loss`: The localization loss for the head module.
* :obj:`roi_cls_loss`: The classification loss for the head module.
.. [#FRCNN] Shaoqing Ren, Kaiming He, Ross Girshick, Jian Sun. \
Faster R-CNN: Towards Real-Time Object Detection with \
Region Proposal Networks. NIPS 2015.
Args:
faster_rcnn (~chainercv.links.model.faster_rcnn.FasterRCNN):
A Faster R-CNN model that is going to be trained.
rpn_sigma (float): Sigma parameter for the localization loss
of Region Proposal Network (RPN). The default value is 3,
which is the value used in [#FRCNN]_.
roi_sigma (float): Sigma paramter for the localization loss of
the head. The default value is 1, which is the value used
in [#FRCNN]_.
anchor_target_creator: An instantiation of
:class:`~chainercv.links.model.faster_rcnn.AnchorTargetCreator`.
proposal_target_creator: An instantiation of
:class:`~chainercv.links.model.faster_rcnn.ProposalTargetCreator`.
"""
def __init__(self, faster_rcnn, rpn_sigma=3., roi_sigma=1.,
anchor_target_creator=AnchorTargetCreator(),
proposal_target_creator=ProposalTargetCreator()):
super(FasterRCNNTrainChain, self).__init__()
with self.init_scope():
self.faster_rcnn = faster_rcnn
self.rpn_sigma = rpn_sigma
self.roi_sigma = roi_sigma
self.anchor_target_creator = anchor_target_creator
self.proposal_target_creator = proposal_target_creator
self.loc_normalize_mean = faster_rcnn.loc_normalize_mean
self.loc_normalize_std = faster_rcnn.loc_normalize_std
def forward(self, imgs, bboxes, labels, scales):
"""Forward Faster R-CNN and calculate losses.
Here are notations used.
* :math:`N` is the batch size.
* :math:`R` is the number of bounding boxes per image.
Currently, only :math:`N=1` is supported.
Args:
imgs (~chainer.Variable): A variable with a batch of images.
bboxes (~chainer.Variable): A batch of bounding boxes.
Its shape is :math:`(N, R, 4)`.
labels (~chainer.Variable): A batch of labels.
Its shape is :math:`(N, R)`. The background is excluded from
the definition, which means that the range of the value
is :math:`[0, L - 1]`. :math:`L` is the number of foreground
classes.
scales (~chainer.Variable): Amount of scaling applied to
each input image during preprocessing.
Returns:
chainer.Variable:
Scalar loss variable.
This is the sum of losses for Region Proposal Network and
the head module.
"""
if isinstance(bboxes, chainer.Variable):
bboxes = bboxes.array
if isinstance(labels, chainer.Variable):
labels = labels.array
if isinstance(scales, chainer.Variable):
scales = scales.array
n = bboxes.shape[0]
if n != 1:
raise ValueError('Currently only batch size 1 is supported.')
scales = cuda.to_cpu(scales).tolist()
_, _, H, W = imgs.shape
img_size = (H, W)
features = self.faster_rcnn.extractor(imgs)
rpn_locs, rpn_scores, rois, roi_indices, anchor = self.faster_rcnn.rpn(
features, img_size, scales)
# Since batch size is one, convert variables to singular form
bbox = bboxes[0]
label = labels[0]
rpn_score = rpn_scores[0]
rpn_loc = rpn_locs[0]
roi = rois
# Sample RoIs and forward
sample_roi, gt_roi_loc, gt_roi_label = self.proposal_target_creator(
roi, bbox, label,
self.loc_normalize_mean, self.loc_normalize_std)
sample_roi_index = self.xp.zeros((len(sample_roi),), dtype=np.int32)
roi_cls_loc, roi_score = self.faster_rcnn.head(
features, sample_roi, sample_roi_index)
# RPN losses
gt_rpn_loc, gt_rpn_label = self.anchor_target_creator(
bbox, anchor, img_size)
rpn_loc_loss = _fast_rcnn_loc_loss(
rpn_loc, gt_rpn_loc, gt_rpn_label, self.rpn_sigma)
rpn_cls_loss = F.softmax_cross_entropy(rpn_score, gt_rpn_label)
# Losses for outputs of the head.
n_sample = roi_cls_loc.shape[0]
roi_cls_loc = roi_cls_loc.reshape((n_sample, -1, 4))
roi_loc = roi_cls_loc[self.xp.arange(n_sample), gt_roi_label]
roi_loc_loss = _fast_rcnn_loc_loss(
roi_loc, gt_roi_loc, gt_roi_label, self.roi_sigma)
roi_cls_loss = F.softmax_cross_entropy(roi_score, gt_roi_label)
loss = rpn_loc_loss + rpn_cls_loss + roi_loc_loss + roi_cls_loss
chainer.reporter.report({'rpn_loc_loss': rpn_loc_loss,
'rpn_cls_loss': rpn_cls_loss,
'roi_loc_loss': roi_loc_loss,
'roi_cls_loss': roi_cls_loss,
'loss': loss},
self)
return loss
def _smooth_l1_loss(x, t, in_weight, sigma):
sigma2 = sigma ** 2
diff = in_weight * (x - t)
abs_diff = F.absolute(diff)
flag = (abs_diff.array < (1. / sigma2)).astype(np.float32)
y = (flag * (sigma2 / 2.) * F.square(diff) +
(1 - flag) * (abs_diff - 0.5 / sigma2))
return F.sum(y)
def _fast_rcnn_loc_loss(pred_loc, gt_loc, gt_label, sigma):
xp = chainer.backends.cuda.get_array_module(pred_loc)
in_weight = xp.zeros_like(gt_loc)
# Localization loss is calculated only for positive rois.
in_weight[gt_label > 0] = 1
loc_loss = _smooth_l1_loss(pred_loc, gt_loc, in_weight, sigma)
# Normalize by total number of negtive and positive rois.
loc_loss /= xp.sum(gt_label >= 0)
return loc_loss
|
from absl import flags
# Sentinel value for unspecified platform.
GCP_MIN_CPU_PLATFORM_NONE = 'none'
flags.DEFINE_string('gcloud_path', 'gcloud', 'The path for the gcloud utility.')
flags.DEFINE_list('additional_gcloud_flags', [],
'Additional flags to pass to gcloud.')
flags.DEFINE_integer(
'gce_num_local_ssds', 0,
'The number of ssds that should be added to the VM. Note '
'that this is currently only supported in certain zones '
'(see https://cloud.google.com/compute/docs/local-ssd).')
flags.DEFINE_string(
'gcloud_scopes', None, 'If set, space-separated list of '
'scopes to apply to every created machine')
flags.DEFINE_boolean('gce_migrate_on_maintenance', True, 'If true, allow VM '
'migration on GCE host maintenance.')
flags.DEFINE_boolean('gce_preemptible_vms', False, 'If true, use preemptible '
'VMs on GCE.')
flags.DEFINE_string(
'image_family', None, 'The family of the image that the boot disk will be '
'initialized with. The --image flag will take priority over this flag. See:'
' https://cloud.google.com/sdk/gcloud/reference/compute/instances/create')
flags.DEFINE_string(
'image_project', None, 'The project against which all image references will'
' be resolved. See: '
'https://cloud.google.com/sdk/gcloud/reference/compute/disks/create')
flags.DEFINE_string(
'gce_network_name', None, 'The name of an already created '
'network to use instead of creating a new one.')
flags.DEFINE_string(
'gce_subnet_name', None, 'The name of an already created '
'subnet to use instead of creating a new one.')
flags.DEFINE_string(
'gce_subnet_region', None, 'Region to create subnet in '
'instead of automatically creating one in every region.')
flags.DEFINE_string(
'gce_subnet_addr', '10.128.0.0/20', 'Address range to the '
'subnet, given in CDR notation. Not used unless '
'--gce_subnet_region is given.')
flags.DEFINE_string(
'gce_remote_access_firewall_rule', None, 'The name of an '
'already created firewall rule which allows remote access '
'instead of creating a new one.')
flags.DEFINE_multi_string(
'gcp_instance_metadata_from_file', [],
'A colon separated key-value pair that will be added to the '
'"--metadata-from-file" flag of the gcloud cli (with the colon replaced by '
'the equal sign). Multiple key-value pairs may be specified by separating '
'each pair by commas. This option can be repeated multiple times. For '
'information about GCP instance metadata, see: --metadata-from-file from '
'`gcloud help compute instances create`.')
flags.DEFINE_multi_string(
'gcp_instance_metadata', [],
'A colon separated key-value pair that will be added to the '
'"--metadata" flag of the gcloud cli (with the colon replaced by the equal '
'sign). Multiple key-value pairs may be specified by separating each pair '
'by commas. This option can be repeated multiple times. For information '
'about GCP instance metadata, see: --metadata from '
'`gcloud help compute instances create`.')
flags.DEFINE_integer('gce_boot_disk_size', None,
'The boot disk size in GB for GCP VMs.')
flags.DEFINE_enum('gce_boot_disk_type', None, ['pd-standard', 'pd-ssd'],
'The boot disk type for GCP VMs.')
flags.DEFINE_enum('gce_ssd_interface', 'SCSI', ['SCSI', 'NVME'],
'The ssd interface for GCE local SSD.')
flags.DEFINE_string('gcp_node_type', None,
'The node type of all sole tenant hosts that get created.')
flags.DEFINE_enum(
'gcp_min_cpu_platform', None, [
GCP_MIN_CPU_PLATFORM_NONE, 'sandybridge', 'ivybridge', 'haswell',
'broadwell', 'skylake', 'cascadelake',
], 'When specified, the VM will have either the specified '
'architecture or a newer one. Architecture availability is zone dependent.')
flags.DEFINE_string(
'gce_accelerator_type_override', None,
'When specified, override the accelerator_type string passed to the gcloud '
'compute instance create command.')
flags.DEFINE_string('gcp_preprovisioned_data_bucket', None,
'GCS bucket where pre-provisioned data has been copied.')
flags.DEFINE_integer('gcp_redis_gb', 5, 'Size of redis cluster in gb')
flags.DEFINE_string('gcp_service_account', None, 'Service account to use for '
'authorization.')
flags.DEFINE_string(
'gcp_service_account_key_file', None,
'Local path to file that contains a private authorization '
'key, used to activate gcloud.')
flags.DEFINE_list('gce_tags', None, 'List of --tags when creating a VM')
flags.DEFINE_boolean('gke_enable_alpha', False,
'Whether to enable alpha kubernetes clusters.')
flags.DEFINE_string('gcp_dataproc_subnet', None,
'Specifies the subnet that the cluster will be part of.')
flags.DEFINE_multi_string('gcp_dataproc_property', [],
'Specifies configuration properties for installed '
'packages, such as Hadoop and Spark. Properties are '
'mapped to configuration files by specifying a prefix'
', such as "core:io.serializations". '
'See https://cloud.google.com/dataproc/docs/concepts/'
'configuring-clusters/cluster-properties '
'for details.')
flags.DEFINE_string('gcp_dataproc_image', None,
'Specifies the custom image URI or the custom image name '
'that will be used to create a cluster.')
flags.DEFINE_boolean('gcp_internal_ip', False,
'Use internal ips for ssh or scp commands. gcloud beta'
'components must be installed to use this flag.')
flags.DEFINE_enum('gce_network_tier', 'premium', ['premium', 'standard'],
'Network tier to use for all GCE VMs. Note that standard '
'networking is only available in certain regions. See '
'https://cloud.google.com/network-tiers/docs/overview')
flags.DEFINE_boolean(
'gce_shielded_secure_boot', False,
'Whether the image uses the shielded VM feature')
flags.DEFINE_boolean('gce_firewall_rules_clean_all', False,
'Determines whether all the gce firewall rules should be '
'cleaned up before deleting the network. If firewall '
'rules are added manually, PKB will not know about all of '
'them. However, they must be deleted in order to '
'successfully delete the PKB-created network.')
flags.DEFINE_enum('bq_client_interface', 'CLI',
['CLI', 'JAVA', 'SIMBA_JDBC_1_2_4_1007'],
'The Runtime Interface used when interacting with BigQuery.')
flags.DEFINE_string('gcp_preemptible_status_bucket', None,
'The GCS bucket to store the preemptible status when '
'running on GCP.')
def _ValidatePreemptFlags(flags_dict):
if flags_dict['gce_preemptible_vms']:
return bool(flags_dict['gcp_preemptible_status_bucket'])
return True
flags.register_multi_flags_validator(
['gce_preemptible_vms', 'gcp_preemptible_status_bucket'],
_ValidatePreemptFlags, 'When gce_preemptible_vms is specified, '
'gcp_preemptible_status_bucket must be specified.')
|
import fnmatch
import os
from absl import flags
from perfkitbenchmarker import dpb_service
from perfkitbenchmarker import errors
from perfkitbenchmarker import vm_util
BEAM_JAVA_SDK = 'java'
BEAM_PYTHON_SDK = 'python'
flags.DEFINE_string('gradle_binary', None,
'Set to use a different gradle binary than gradle wrapper '
'from the repository')
flags.DEFINE_string('beam_location', None,
'Location of already checked out Beam codebase.')
flags.DEFINE_string('beam_it_module', None,
'Gradle module containing integration test. Use full '
'module starting and separated by colon, like :sdk:python')
flags.DEFINE_boolean('beam_prebuilt', False,
'Set this to indicate that the repo in beam_location '
'does not need to be rebuilt before being used')
flags.DEFINE_integer('beam_it_timeout', 600, 'Integration Test Timeout.')
flags.DEFINE_string('git_binary', 'git', 'Path to git binary.')
flags.DEFINE_string('beam_version', None,
'Version of Beam to download. Use tag from Github '
'as value. If not specified, will use HEAD.')
flags.DEFINE_enum('beam_sdk', None, [BEAM_JAVA_SDK, BEAM_PYTHON_SDK],
'Which BEAM SDK is used to build the benchmark pipeline.')
flags.DEFINE_string('beam_python_attr', 'IT',
'Test decorator that is used in Beam Python to filter a '
'specific category.')
flags.DEFINE_string('beam_python_sdk_location', None,
'Python SDK tar ball location. It is a required option to '
'run Python pipeline.')
flags.DEFINE_string('beam_extra_properties', None,
'Allows to specify list of key-value pairs that will be '
'forwarded to target mvn command as system properties')
flags.DEFINE_string('beam_runner', 'dataflow', 'Defines runner which will be used in tests')
flags.DEFINE_string('beam_runner_option', None,
'Overrides any pipeline options to specify the runner.')
flags.DEFINE_string('beam_filesystem', None,
'Defines filesystem which will be used in tests. '
'If not specified it will use runner\'s local filesystem.')
FLAGS = flags.FLAGS
SUPPORTED_RUNNERS = [dpb_service.DATAFLOW]
BEAM_REPO_LOCATION = 'https://github.com/apache/beam.git'
DEFAULT_PYTHON_TAR_PATTERN = 'apache-beam-*.tar.gz'
def AddRunnerArgument(command, runner_name):
if runner_name is None or runner_name == 'direct':
command.append('-DintegrationTestRunner=direct')
if runner_name == 'dataflow':
command.append('-DintegrationTestRunner=dataflow')
def AddRunnerPipelineOption(beam_pipeline_options, runner_name,
runner_option_override):
"""Add runner to pipeline options."""
runner_pipeline_option = ''
if runner_name == 'dataflow':
runner_pipeline_option = ('"--runner=TestDataflowRunner"')
if runner_name == 'direct':
runner_pipeline_option = ('"--runner=DirectRunner"')
if runner_option_override:
runner_pipeline_option = '--runner=' + runner_option_override
if len(runner_pipeline_option) > 0:
beam_pipeline_options.append(runner_pipeline_option)
def AddFilesystemArgument(command, filesystem_name):
if filesystem_name == 'hdfs':
command.append('-Dfilesystem=hdfs')
def AddExtraProperties(command, extra_properties):
if not extra_properties:
return
if 'integrationTestPipelineOptions=' in extra_properties:
raise ValueError('integrationTestPipelineOptions must not be in '
'beam_extra_properties')
extra_properties = extra_properties.rstrip(']').lstrip('[').split(',')
extra_properties = [p.rstrip('" ').lstrip('" ') for p in extra_properties]
for p in extra_properties:
command.append('-D{}'.format(p))
def AddPythonAttributes(command, attributes):
if attributes:
command.append('-Dattr={}'.format(attributes))
def AddTaskArgument(command, task_name, module):
if not task_name or not module:
raise ValueError('task_name and module should not be empty.')
command.append('{}:{}'.format(module, task_name))
def InitializeBeamRepo(benchmark_spec):
"""Ensures environment is prepared for running Beam benchmarks.
In the absence of FLAGS.beam_location, initializes the beam source code base
by checking out the repository from github. Specific branch selection is
supported.
Args:
benchmark_spec: The PKB spec for the benchmark to run.
"""
if benchmark_spec.dpb_service.SERVICE_TYPE not in SUPPORTED_RUNNERS:
raise NotImplementedError('Unsupported Runner')
vm_util.GenTempDir()
if FLAGS.beam_location is None:
git_clone_command = [FLAGS.git_binary, 'clone', BEAM_REPO_LOCATION]
if FLAGS.beam_version:
git_clone_command.append('--branch={}'.format(FLAGS.beam_version))
git_clone_command.append('--single-branch')
vm_util.IssueCommand(git_clone_command, cwd=vm_util.GetTempDir())
elif not os.path.exists(FLAGS.beam_location):
raise errors.Config.InvalidValue('Directory indicated by beam_location '
'does not exist: {}.'.format(
FLAGS.beam_location))
_PrebuildBeam()
def _PrebuildBeam():
"""Rebuild beam if it was not build earlier."""
if not FLAGS.beam_prebuilt:
gradle_prebuild_tasks = ['clean', 'assemble']
gradle_prebuild_flags = ['--stacktrace', '--info']
build_command = [_GetGradleCommand()]
build_command.extend(gradle_prebuild_flags)
for task in gradle_prebuild_tasks:
AddTaskArgument(build_command, task, FLAGS.beam_it_module)
AddRunnerArgument(build_command, FLAGS.beam_runner)
AddFilesystemArgument(build_command, FLAGS.beam_filesystem)
AddExtraProperties(build_command, FLAGS.beam_extra_properties)
vm_util.IssueCommand(build_command, timeout=1500, cwd=_GetBeamDir())
def BuildBeamCommand(benchmark_spec, classname, job_arguments):
"""Constructs a Beam execution command for the benchmark.
Args:
benchmark_spec: The PKB spec for the benchmark to run.
classname: The classname of the class to run.
job_arguments: The additional job arguments provided for the run.
Returns:
cmd: Array containing the built command.
beam_dir: The directory in which to run the command.
"""
if benchmark_spec.service_type not in SUPPORTED_RUNNERS:
raise NotImplementedError('Unsupported Runner')
base_dir = _GetBeamDir()
if FLAGS.beam_sdk == BEAM_JAVA_SDK:
cmd = _BuildGradleCommand(classname, job_arguments)
elif FLAGS.beam_sdk == BEAM_PYTHON_SDK:
cmd = _BuildPythonCommand(benchmark_spec, classname, job_arguments)
else:
raise NotImplementedError('Unsupported Beam SDK: %s.' % FLAGS.beam_sdk)
return cmd, base_dir
def _BuildGradleCommand(classname, job_arguments):
"""Constructs a Gradle command for the benchmark.
Args:
classname: The classname of the class to run.
job_arguments: The additional job arguments provided for the run.
Returns:
cmd: Array containing the built command.
"""
cmd = []
gradle_executable = _GetGradleCommand()
if not vm_util.ExecutableOnPath(gradle_executable):
raise errors.Setup.MissingExecutableError(
'Could not find required executable "%s"' % gradle_executable)
cmd.append(gradle_executable)
AddTaskArgument(cmd, 'integrationTest', FLAGS.beam_it_module)
cmd.append('--tests={}'.format(classname))
beam_args = job_arguments if job_arguments else []
AddRunnerArgument(cmd, FLAGS.beam_runner)
AddRunnerPipelineOption(beam_args, FLAGS.beam_runner,
FLAGS.beam_runner_option)
AddFilesystemArgument(cmd, FLAGS.beam_filesystem)
AddExtraProperties(cmd, FLAGS.beam_extra_properties)
cmd.append('-DintegrationTestPipelineOptions='
'[{}]'.format(','.join(beam_args)))
cmd.append('--stacktrace')
cmd.append('--info')
cmd.append('--scan')
return cmd
def _BuildPythonCommand(benchmark_spec, classname, job_arguments):
"""Constructs Gradle command for Python benchmark.
Python integration tests can be invoked from Gradle task
`integrationTest`. How Python Gradle command constructed
is different from Java. We can use following system properties
in commandline:
-Dtests: fully qualified class/module name of the test to run.
e.g. apache_beam.examples.wordcount_it_test:WordCountIT
-Dattr: a set of tests that are annotated by this attribute tag.
-DpipelineOptions: a set of pipeline options needed to run Beam job
Args:
benchmark_spec: The PKB spec for the benchmark to run.
classname: The fully qualified class/module name of the test to run.
job_arguments: The additional job arguments provided for the run.
Returns:
cmd: Array holds the execution command.
"""
cmd = []
gradle_executable = _GetGradleCommand()
if not vm_util.ExecutableOnPath(gradle_executable):
raise errors.Setup.MissingExecutableError(
'Could not find required executable "%s"' % gradle_executable)
cmd.append(gradle_executable)
AddTaskArgument(cmd, 'integrationTest', FLAGS.beam_it_module)
cmd.append('-Dtests={}'.format(classname))
AddPythonAttributes(cmd, FLAGS.beam_python_attr)
beam_args = job_arguments if job_arguments else []
if benchmark_spec.service_type == dpb_service.DATAFLOW:
beam_args.append('"--runner={}"'.format(FLAGS.beam_runner))
sdk_location = FLAGS.beam_python_sdk_location
if not sdk_location:
tar_list = _FindFiles(_GetBeamPythonDir(), DEFAULT_PYTHON_TAR_PATTERN)
if not tar_list:
raise RuntimeError('No python sdk tar file is available.')
else:
sdk_location = tar_list[0]
beam_args.append('"--sdk_location={}"'.format(sdk_location))
cmd.append('-DpipelineOptions={}'.format(' '.join(beam_args)))
cmd.append('--info')
cmd.append('--scan')
return cmd
def _GetGradleCommand():
return FLAGS.gradle_binary or os.path.join(_GetBeamDir(), 'gradlew')
def _GetBeamDir():
# TODO: This is temporary, find a better way.
return FLAGS.beam_location or os.path.join(vm_util.GetTempDir(), 'beam')
def _GetBeamPythonDir():
return os.path.join(_GetBeamDir(), 'sdks/python')
def _FindFiles(base_path, pattern):
if not os.path.exists(base_path):
raise RuntimeError('No such directory: %s' % base_path)
results = []
for root, _, files in os.walk(base_path):
for f in files:
if fnmatch.fnmatch(f, pattern):
results.append(os.path.join(root, f))
return results
|
from django.http import HttpResponse
from django.utils.timezone import now
from django.views.generic.edit import FormView
from .formats import base_formats
from .forms import ExportForm
from .resources import modelresource_factory
from .signals import post_export
class ExportViewMixin:
formats = base_formats.DEFAULT_FORMATS
form_class = ExportForm
resource_class = None
def get_export_formats(self):
"""
Returns available export formats.
"""
return [f for f in self.formats if f().can_export()]
def get_resource_class(self):
if not self.resource_class:
return modelresource_factory(self.model)
return self.resource_class
def get_export_resource_class(self):
"""
Returns ResourceClass to use for export.
"""
return self.get_resource_class()
def get_resource_kwargs(self, request, *args, **kwargs):
return {}
def get_export_resource_kwargs(self, request, *args, **kwargs):
return self.get_resource_kwargs(request, *args, **kwargs)
def get_export_data(self, file_format, queryset, *args, **kwargs):
"""
Returns file_format representation for given queryset.
"""
resource_class = self.get_export_resource_class()
data = resource_class(**self.get_export_resource_kwargs(self.request))\
.export(queryset, *args, **kwargs)
export_data = file_format.export_data(data)
return export_data
def get_export_filename(self, file_format):
date_str = now().strftime('%Y-%m-%d')
filename = "%s-%s.%s" % (self.model.__name__,
date_str,
file_format.get_extension())
return filename
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
return context
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['formats'] = self.get_export_formats()
return kwargs
class ExportViewFormMixin(ExportViewMixin, FormView):
def form_valid(self, form):
formats = self.get_export_formats()
file_format = formats[
int(form.cleaned_data['file_format'])
]()
if hasattr(self, 'get_filterset'):
queryset = self.get_filterset(self.get_filterset_class()).qs
else:
queryset = self.get_queryset()
export_data = self.get_export_data(file_format, queryset)
content_type = file_format.get_content_type()
# Django 1.7 uses the content_type kwarg instead of mimetype
try:
response = HttpResponse(export_data, content_type=content_type)
except TypeError:
response = HttpResponse(export_data, mimetype=content_type)
response['Content-Disposition'] = 'attachment; filename="%s"' % (
self.get_export_filename(file_format),
)
post_export.send(sender=None, model=self.model)
return response
|
from typing import cast, TYPE_CHECKING, Iterator, List, Optional, Set
from PyQt5.QtCore import QRect, Qt
from PyQt5.QtWebKit import QWebElement, QWebSettings
from PyQt5.QtWebKitWidgets import QWebFrame
from qutebrowser.config import config
from qutebrowser.utils import log, utils, javascript, usertypes
from qutebrowser.browser import webelem
if TYPE_CHECKING:
from qutebrowser.browser.webkit import webkittab
class IsNullError(webelem.Error):
"""Gets raised by WebKitElement if an element is null."""
class WebKitElement(webelem.AbstractWebElement):
"""A wrapper around a QWebElement."""
_tab: 'webkittab.WebKitTab'
def __init__(self, elem: QWebElement, tab: 'webkittab.WebKitTab') -> None:
super().__init__(tab)
if isinstance(elem, self.__class__):
raise TypeError("Trying to wrap a wrapper!")
if elem.isNull():
raise IsNullError('{} is a null element!'.format(elem))
self._elem = elem
def __str__(self) -> str:
self._check_vanished()
return self._elem.toPlainText()
def __eq__(self, other: object) -> bool:
if not isinstance(other, WebKitElement):
return NotImplemented
return self._elem == other._elem
def __getitem__(self, key: str) -> str:
self._check_vanished()
if key not in self:
raise KeyError(key)
return self._elem.attribute(key)
def __setitem__(self, key: str, val: str) -> None:
self._check_vanished()
self._elem.setAttribute(key, val)
def __delitem__(self, key: str) -> None:
self._check_vanished()
if key not in self:
raise KeyError(key)
self._elem.removeAttribute(key)
def __contains__(self, key: object) -> bool:
assert isinstance(key, str)
self._check_vanished()
return self._elem.hasAttribute(key)
def __iter__(self) -> Iterator[str]:
self._check_vanished()
yield from self._elem.attributeNames()
def __len__(self) -> int:
self._check_vanished()
return len(self._elem.attributeNames())
def _check_vanished(self) -> None:
"""Raise an exception if the element vanished (is null)."""
if self._elem.isNull():
raise IsNullError('Element {} vanished!'.format(self._elem))
def has_frame(self) -> bool:
self._check_vanished()
return self._elem.webFrame() is not None
def geometry(self) -> QRect:
self._check_vanished()
return self._elem.geometry()
def classes(self) -> Set[str]:
self._check_vanished()
return set(self._elem.classes())
def tag_name(self) -> str:
"""Get the tag name for the current element."""
self._check_vanished()
return self._elem.tagName().lower()
def outer_xml(self) -> str:
"""Get the full HTML representation of this element."""
self._check_vanished()
return self._elem.toOuterXml()
def is_content_editable_prop(self) -> bool:
self._check_vanished()
val = self._elem.evaluateJavaScript('this.isContentEditable || false')
assert isinstance(val, bool)
return val
def value(self) -> webelem.JsValueType:
self._check_vanished()
val = self._elem.evaluateJavaScript('this.value')
assert isinstance(val, (int, float, str, type(None))), val
return val
def set_value(self, value: webelem.JsValueType) -> None:
self._check_vanished()
if self._tab.is_deleted():
raise webelem.OrphanedError("Tab containing element vanished")
if self.is_content_editable():
log.webelem.debug("Filling {!r} via set_text.".format(self))
assert isinstance(value, str)
self._elem.setPlainText(value)
else:
log.webelem.debug("Filling {!r} via javascript.".format(self))
value = javascript.to_js(value)
self._elem.evaluateJavaScript("this.value={}".format(value))
def dispatch_event(self, event: str,
bubbles: bool = False,
cancelable: bool = False,
composed: bool = False) -> None:
self._check_vanished()
log.webelem.debug("Firing event on {!r} via javascript.".format(self))
self._elem.evaluateJavaScript(
"this.dispatchEvent(new Event({}, "
"{{'bubbles': {}, 'cancelable': {}, 'composed': {}}}))"
.format(javascript.to_js(event),
javascript.to_js(bubbles),
javascript.to_js(cancelable),
javascript.to_js(composed)))
def caret_position(self) -> int:
"""Get the text caret position for the current element."""
self._check_vanished()
pos = self._elem.evaluateJavaScript('this.selectionStart')
if pos is None:
return 0
return int(pos)
def insert_text(self, text: str) -> None:
self._check_vanished()
if not self.is_editable(strict=True):
raise webelem.Error("Element is not editable!")
log.webelem.debug("Inserting text into element {!r}".format(self))
self._elem.evaluateJavaScript("""
var text = {};
var event = document.createEvent("TextEvent");
event.initTextEvent("textInput", true, true, null, text);
this.dispatchEvent(event);
""".format(javascript.to_js(text)))
def _parent(self) -> Optional['WebKitElement']:
"""Get the parent element of this element."""
self._check_vanished()
elem = cast(Optional[QWebElement], self._elem.parent())
if elem is None or elem.isNull():
return None
return WebKitElement(elem, tab=self._tab)
def _rect_on_view_js(self) -> Optional[QRect]:
"""Javascript implementation for rect_on_view."""
# FIXME:qtwebengine maybe we can reuse this?
rects = self._elem.evaluateJavaScript("this.getClientRects()")
if rects is None: # pragma: no cover
# On e.g. Void Linux with musl libc, the stack size is too small
# for jsc, and running JS will fail. If that happens, fall back to
# the Python implementation.
# https://github.com/qutebrowser/qutebrowser/issues/1641
return None
text = utils.compact_text(self._elem.toOuterXml(), 500)
log.webelem.vdebug( # type: ignore[attr-defined]
"Client rectangles of element '{}': {}".format(text, rects))
for i in range(int(rects.get("length", 0))):
rect = rects[str(i)]
width = rect.get("width", 0)
height = rect.get("height", 0)
if width > 1 and height > 1:
# fix coordinates according to zoom level
zoom = self._elem.webFrame().zoomFactor()
if not config.val.zoom.text_only:
rect["left"] *= zoom
rect["top"] *= zoom
width *= zoom
height *= zoom
rect = QRect(int(rect["left"]), int(rect["top"]),
int(width), int(height))
frame = cast(Optional[QWebFrame], self._elem.webFrame())
while frame is not None:
# Translate to parent frames' position (scroll position
# is taken care of inside getClientRects)
rect.translate(frame.geometry().topLeft())
frame = frame.parentFrame()
return rect
return None
def _rect_on_view_python(self, elem_geometry: Optional[QRect]) -> QRect:
"""Python implementation for rect_on_view."""
if elem_geometry is None:
geometry = self._elem.geometry()
else:
geometry = elem_geometry
rect = QRect(geometry)
frame = cast(Optional[QWebFrame], self._elem.webFrame())
while frame is not None:
rect.translate(frame.geometry().topLeft())
rect.translate(frame.scrollPosition() * -1)
frame = cast(Optional[QWebFrame], frame.parentFrame())
return rect
def rect_on_view(self, *, elem_geometry: QRect = None,
no_js: bool = False) -> QRect:
"""Get the geometry of the element relative to the webview.
Uses the getClientRects() JavaScript method to obtain the collection of
rectangles containing the element and returns the first rectangle which
is large enough (larger than 1px times 1px). If all rectangles returned
by getClientRects() are too small, falls back to elem.rect_on_view().
Skipping of small rectangles is due to <a> elements containing other
elements with "display:block" style, see
https://github.com/qutebrowser/qutebrowser/issues/1298
Args:
elem_geometry: The geometry of the element, or None.
Calling QWebElement::geometry is rather expensive so
we want to avoid doing it twice.
no_js: Fall back to the Python implementation
"""
self._check_vanished()
# First try getting the element rect via JS, as that's usually more
# accurate
if elem_geometry is None and not no_js:
rect = self._rect_on_view_js()
if rect is not None:
return rect
# No suitable rects found via JS, try via the QWebElement API
return self._rect_on_view_python(elem_geometry)
def _is_hidden_css(self) -> bool:
"""Check if the given element is hidden via CSS."""
attr_values = {
attr: self._elem.styleProperty(attr, QWebElement.ComputedStyle)
for attr in ['visibility', 'display', 'opacity']
}
invisible = attr_values['visibility'] == 'hidden'
none_display = attr_values['display'] == 'none'
zero_opacity = attr_values['opacity'] == '0'
is_framework = ('ace_text-input' in self.classes() or
'custom-control-input' in self.classes())
return invisible or none_display or (zero_opacity and not is_framework)
def _is_visible(self, mainframe: QWebFrame) -> bool:
"""Check if the given element is visible in the given frame.
This is not public API because it can't be implemented easily here with
QtWebEngine, and is only used via find_css(..., only_visible=True) via
the tab API.
"""
self._check_vanished()
if self._is_hidden_css():
return False
elem_geometry = self._elem.geometry()
if not elem_geometry.isValid() and elem_geometry.x() == 0:
# Most likely an invisible link
return False
# First check if the element is visible on screen
elem_rect = self.rect_on_view(elem_geometry=elem_geometry)
mainframe_geometry = mainframe.geometry()
if elem_rect.isValid():
visible_on_screen = mainframe_geometry.intersects(elem_rect)
else:
# We got an invalid rectangle (width/height 0/0 probably), but this
# can still be a valid link.
visible_on_screen = mainframe_geometry.contains(
elem_rect.topLeft())
# Then check if it's visible in its frame if it's not in the main
# frame.
elem_frame = self._elem.webFrame()
framegeom = QRect(elem_frame.geometry())
if not framegeom.isValid():
visible_in_frame = False
elif elem_frame.parentFrame() is not None:
framegeom.moveTo(0, 0)
framegeom.translate(elem_frame.scrollPosition())
if elem_geometry.isValid():
visible_in_frame = framegeom.intersects(elem_geometry)
else:
# We got an invalid rectangle (width/height 0/0 probably), but
# this can still be a valid link.
visible_in_frame = framegeom.contains(elem_geometry.topLeft())
else:
visible_in_frame = visible_on_screen
return all([visible_on_screen, visible_in_frame])
def remove_blank_target(self) -> None:
elem: Optional[WebKitElement] = self
for _ in range(5):
if elem is None:
break
if elem.is_link():
if elem.get('target', None) == '_blank':
elem['target'] = '_top'
break
elem = elem._parent() # pylint: disable=protected-access
def delete(self) -> None:
self._elem.evaluateJavaScript('this.remove();')
def _move_text_cursor(self) -> None:
if self.is_text_input() and self.is_editable():
self._tab.caret.move_to_end_of_document()
def _requires_user_interaction(self) -> bool:
return False
def _click_editable(self, click_target: usertypes.ClickTarget) -> None:
ok = self._elem.evaluateJavaScript('this.focus(); true;')
if ok:
self._move_text_cursor()
else:
log.webelem.debug("Failed to focus via JS, falling back to event")
self._click_fake_event(click_target)
def _click_js(self, click_target: usertypes.ClickTarget) -> None:
settings = QWebSettings.globalSettings()
attribute = QWebSettings.JavascriptCanOpenWindows
could_open_windows = settings.testAttribute(attribute)
settings.setAttribute(attribute, True)
ok = self._elem.evaluateJavaScript('this.click(); true;')
settings.setAttribute(attribute, could_open_windows)
if not ok:
log.webelem.debug("Failed to click via JS, falling back to event")
self._click_fake_event(click_target)
def _click_fake_event(self, click_target: usertypes.ClickTarget,
button: Qt.MouseButton = Qt.LeftButton) -> None:
self._tab.data.override_target = click_target
super()._click_fake_event(click_target)
def get_child_frames(startframe: QWebFrame) -> List[QWebFrame]:
"""Get all children recursively of a given QWebFrame.
Loosely based on http://blog.nextgenetics.net/?e=64
Args:
startframe: The QWebFrame to start with.
Return:
A list of children QWebFrame, or an empty list.
"""
results = []
frames = [startframe]
while frames:
new_frames: List[QWebFrame] = []
for frame in frames:
results.append(frame)
new_frames += frame.childFrames()
frames = new_frames
return results
|
import os
import platform
import re
import stat
import mimetypes
import urllib.parse
import unicodedata
from email.generator import _make_boundary as make_boundary
from io import UnsupportedOperation
import cherrypy
from cherrypy._cpcompat import ntob
from cherrypy.lib import cptools, httputil, file_generator_limited
def _setup_mimetypes():
"""Pre-initialize global mimetype map."""
if not mimetypes.inited:
mimetypes.init()
mimetypes.types_map['.dwg'] = 'image/x-dwg'
mimetypes.types_map['.ico'] = 'image/x-icon'
mimetypes.types_map['.bz2'] = 'application/x-bzip2'
mimetypes.types_map['.gz'] = 'application/x-gzip'
_setup_mimetypes()
def _make_content_disposition(disposition, file_name):
"""Create HTTP header for downloading a file with a UTF-8 filename.
This function implements the recommendations of :rfc:`6266#appendix-D`.
See this and related answers: https://stackoverflow.com/a/8996249/2173868.
"""
# As normalization algorithm for `unicodedata` is used composed form (NFC
# and NFKC) with compatibility equivalence criteria (NFK), so "NFKC" is the
# one. It first applies the compatibility decomposition, followed by the
# canonical composition. Should be displayed in the same manner, should be
# treated in the same way by applications such as alphabetizing names or
# searching, and may be substituted for each other.
# See: https://en.wikipedia.org/wiki/Unicode_equivalence.
ascii_name = (
unicodedata.normalize('NFKC', file_name).
encode('ascii', errors='ignore').decode()
)
header = '{}; filename="{}"'.format(disposition, ascii_name)
if ascii_name != file_name:
quoted_name = urllib.parse.quote(file_name)
header += '; filename*=UTF-8\'\'{}'.format(quoted_name)
return header
def serve_file(path, content_type=None, disposition=None, name=None,
debug=False):
"""Set status, headers, and body in order to serve the given path.
The Content-Type header will be set to the content_type arg, if provided.
If not provided, the Content-Type will be guessed by the file extension
of the 'path' argument.
If disposition is not None, the Content-Disposition header will be set
to "<disposition>; filename=<name>; filename*=utf-8''<name>"
as described in :rfc:`6266#appendix-D`.
If name is None, it will be set to the basename of path.
If disposition is None, no Content-Disposition header will be written.
"""
response = cherrypy.serving.response
# If path is relative, users should fix it by making path absolute.
# That is, CherryPy should not guess where the application root is.
# It certainly should *not* use cwd (since CP may be invoked from a
# variety of paths). If using tools.staticdir, you can make your relative
# paths become absolute by supplying a value for "tools.staticdir.root".
if not os.path.isabs(path):
msg = "'%s' is not an absolute path." % path
if debug:
cherrypy.log(msg, 'TOOLS.STATICFILE')
raise ValueError(msg)
try:
st = os.stat(path)
except (OSError, TypeError, ValueError):
# OSError when file fails to stat
# TypeError on Python 2 when there's a null byte
# ValueError on Python 3 when there's a null byte
if debug:
cherrypy.log('os.stat(%r) failed' % path, 'TOOLS.STATIC')
raise cherrypy.NotFound()
# Check if path is a directory.
if stat.S_ISDIR(st.st_mode):
# Let the caller deal with it as they like.
if debug:
cherrypy.log('%r is a directory' % path, 'TOOLS.STATIC')
raise cherrypy.NotFound()
# Set the Last-Modified response header, so that
# modified-since validation code can work.
response.headers['Last-Modified'] = httputil.HTTPDate(st.st_mtime)
cptools.validate_since()
if content_type is None:
# Set content-type based on filename extension
ext = ''
i = path.rfind('.')
if i != -1:
ext = path[i:].lower()
content_type = mimetypes.types_map.get(ext, None)
if content_type is not None:
response.headers['Content-Type'] = content_type
if debug:
cherrypy.log('Content-Type: %r' % content_type, 'TOOLS.STATIC')
cd = None
if disposition is not None:
if name is None:
name = os.path.basename(path)
cd = _make_content_disposition(disposition, name)
response.headers['Content-Disposition'] = cd
if debug:
cherrypy.log('Content-Disposition: %r' % cd, 'TOOLS.STATIC')
# Set Content-Length and use an iterable (file object)
# this way CP won't load the whole file in memory
content_length = st.st_size
fileobj = open(path, 'rb')
return _serve_fileobj(fileobj, content_type, content_length, debug=debug)
def serve_fileobj(fileobj, content_type=None, disposition=None, name=None,
debug=False):
"""Set status, headers, and body in order to serve the given file object.
The Content-Type header will be set to the content_type arg, if provided.
If disposition is not None, the Content-Disposition header will be set
to "<disposition>; filename=<name>; filename*=utf-8''<name>"
as described in :rfc:`6266#appendix-D`.
If name is None, 'filename' will not be set.
If disposition is None, no Content-Disposition header will be written.
CAUTION: If the request contains a 'Range' header, one or more seek()s will
be performed on the file object. This may cause undesired behavior if
the file object is not seekable. It could also produce undesired results
if the caller set the read position of the file object prior to calling
serve_fileobj(), expecting that the data would be served starting from that
position.
"""
response = cherrypy.serving.response
try:
st = os.fstat(fileobj.fileno())
except AttributeError:
if debug:
cherrypy.log('os has no fstat attribute', 'TOOLS.STATIC')
content_length = None
except UnsupportedOperation:
content_length = None
else:
# Set the Last-Modified response header, so that
# modified-since validation code can work.
response.headers['Last-Modified'] = httputil.HTTPDate(st.st_mtime)
cptools.validate_since()
content_length = st.st_size
if content_type is not None:
response.headers['Content-Type'] = content_type
if debug:
cherrypy.log('Content-Type: %r' % content_type, 'TOOLS.STATIC')
cd = None
if disposition is not None:
if name is None:
cd = disposition
else:
cd = _make_content_disposition(disposition, name)
response.headers['Content-Disposition'] = cd
if debug:
cherrypy.log('Content-Disposition: %r' % cd, 'TOOLS.STATIC')
return _serve_fileobj(fileobj, content_type, content_length, debug=debug)
def _serve_fileobj(fileobj, content_type, content_length, debug=False):
"""Internal. Set response.body to the given file object, perhaps ranged."""
response = cherrypy.serving.response
# HTTP/1.0 didn't have Range/Accept-Ranges headers, or the 206 code
request = cherrypy.serving.request
if request.protocol >= (1, 1):
response.headers['Accept-Ranges'] = 'bytes'
r = httputil.get_ranges(request.headers.get('Range'), content_length)
if r == []:
response.headers['Content-Range'] = 'bytes */%s' % content_length
message = ('Invalid Range (first-byte-pos greater than '
'Content-Length)')
if debug:
cherrypy.log(message, 'TOOLS.STATIC')
raise cherrypy.HTTPError(416, message)
if r:
if len(r) == 1:
# Return a single-part response.
start, stop = r[0]
if stop > content_length:
stop = content_length
r_len = stop - start
if debug:
cherrypy.log(
'Single part; start: %r, stop: %r' % (start, stop),
'TOOLS.STATIC')
response.status = '206 Partial Content'
response.headers['Content-Range'] = (
'bytes %s-%s/%s' % (start, stop - 1, content_length))
response.headers['Content-Length'] = r_len
fileobj.seek(start)
response.body = file_generator_limited(fileobj, r_len)
else:
# Return a multipart/byteranges response.
response.status = '206 Partial Content'
boundary = make_boundary()
ct = 'multipart/byteranges; boundary=%s' % boundary
response.headers['Content-Type'] = ct
if 'Content-Length' in response.headers:
# Delete Content-Length header so finalize() recalcs it.
del response.headers['Content-Length']
def file_ranges():
# Apache compatibility:
yield b'\r\n'
for start, stop in r:
if debug:
cherrypy.log(
'Multipart; start: %r, stop: %r' % (
start, stop),
'TOOLS.STATIC')
yield ntob('--' + boundary, 'ascii')
yield ntob('\r\nContent-type: %s' % content_type,
'ascii')
yield ntob(
'\r\nContent-range: bytes %s-%s/%s\r\n\r\n' % (
start, stop - 1, content_length),
'ascii')
fileobj.seek(start)
gen = file_generator_limited(fileobj, stop - start)
for chunk in gen:
yield chunk
yield b'\r\n'
# Final boundary
yield ntob('--' + boundary + '--', 'ascii')
# Apache compatibility:
yield b'\r\n'
response.body = file_ranges()
return response.body
else:
if debug:
cherrypy.log('No byteranges requested', 'TOOLS.STATIC')
# Set Content-Length and use an iterable (file object)
# this way CP won't load the whole file in memory
response.headers['Content-Length'] = content_length
response.body = fileobj
return response.body
def serve_download(path, name=None):
"""Serve 'path' as an application/x-download attachment."""
# This is such a common idiom I felt it deserved its own wrapper.
return serve_file(path, 'application/x-download', 'attachment', name)
def _attempt(filename, content_types, debug=False):
if debug:
cherrypy.log('Attempting %r (content_types %r)' %
(filename, content_types), 'TOOLS.STATICDIR')
try:
# you can set the content types for a
# complete directory per extension
content_type = None
if content_types:
r, ext = os.path.splitext(filename)
content_type = content_types.get(ext[1:], None)
serve_file(filename, content_type=content_type, debug=debug)
return True
except cherrypy.NotFound:
# If we didn't find the static file, continue handling the
# request. We might find a dynamic handler instead.
if debug:
cherrypy.log('NotFound', 'TOOLS.STATICFILE')
return False
def staticdir(section, dir, root='', match='', content_types=None, index='',
debug=False):
"""Serve a static resource from the given (root +) dir.
match
If given, request.path_info will be searched for the given
regular expression before attempting to serve static content.
content_types
If given, it should be a Python dictionary of
{file-extension: content-type} pairs, where 'file-extension' is
a string (e.g. "gif") and 'content-type' is the value to write
out in the Content-Type response header (e.g. "image/gif").
index
If provided, it should be the (relative) name of a file to
serve for directory requests. For example, if the dir argument is
'/home/me', the Request-URI is 'myapp', and the index arg is
'index.html', the file '/home/me/myapp/index.html' will be sought.
"""
request = cherrypy.serving.request
if request.method not in ('GET', 'HEAD'):
if debug:
cherrypy.log('request.method not GET or HEAD', 'TOOLS.STATICDIR')
return False
if match and not re.search(match, request.path_info):
if debug:
cherrypy.log('request.path_info %r does not match pattern %r' %
(request.path_info, match), 'TOOLS.STATICDIR')
return False
# Allow the use of '~' to refer to a user's home directory.
dir = os.path.expanduser(dir)
# If dir is relative, make absolute using "root".
if not os.path.isabs(dir):
if not root:
msg = 'Static dir requires an absolute dir (or root).'
if debug:
cherrypy.log(msg, 'TOOLS.STATICDIR')
raise ValueError(msg)
dir = os.path.join(root, dir)
# Determine where we are in the object tree relative to 'section'
# (where the static tool was defined).
if section == 'global':
section = '/'
section = section.rstrip(r'\/')
branch = request.path_info[len(section) + 1:]
branch = urllib.parse.unquote(branch.lstrip(r'\/'))
# Requesting a file in sub-dir of the staticdir results
# in mixing of delimiter styles, e.g. C:\static\js/script.js.
# Windows accepts this form except not when the path is
# supplied in extended-path notation, e.g. \\?\C:\static\js/script.js.
# http://bit.ly/1vdioCX
if platform.system() == 'Windows':
branch = branch.replace('/', '\\')
# If branch is "", filename will end in a slash
filename = os.path.join(dir, branch)
if debug:
cherrypy.log('Checking file %r to fulfill %r' %
(filename, request.path_info), 'TOOLS.STATICDIR')
# There's a chance that the branch pulled from the URL might
# have ".." or similar uplevel attacks in it. Check that the final
# filename is a child of dir.
if not os.path.normpath(filename).startswith(os.path.normpath(dir)):
raise cherrypy.HTTPError(403) # Forbidden
handled = _attempt(filename, content_types)
if not handled:
# Check for an index file if a folder was requested.
if index:
handled = _attempt(os.path.join(filename, index), content_types)
if handled:
request.is_index = filename[-1] in (r'\/')
return handled
def staticfile(filename, root=None, match='', content_types=None, debug=False):
"""Serve a static resource from the given (root +) filename.
match
If given, request.path_info will be searched for the given
regular expression before attempting to serve static content.
content_types
If given, it should be a Python dictionary of
{file-extension: content-type} pairs, where 'file-extension' is
a string (e.g. "gif") and 'content-type' is the value to write
out in the Content-Type response header (e.g. "image/gif").
"""
request = cherrypy.serving.request
if request.method not in ('GET', 'HEAD'):
if debug:
cherrypy.log('request.method not GET or HEAD', 'TOOLS.STATICFILE')
return False
if match and not re.search(match, request.path_info):
if debug:
cherrypy.log('request.path_info %r does not match pattern %r' %
(request.path_info, match), 'TOOLS.STATICFILE')
return False
# If filename is relative, make absolute using "root".
if not os.path.isabs(filename):
if not root:
msg = "Static tool requires an absolute filename (got '%s')." % (
filename,)
if debug:
cherrypy.log(msg, 'TOOLS.STATICFILE')
raise ValueError(msg)
filename = os.path.join(root, filename)
return _attempt(filename, content_types, debug=debug)
|
from __future__ import annotations
from typing import Dict, List, Optional, Union, Set, Iterable, Tuple, overload
import asyncio
from argparse import Namespace
from collections import defaultdict
import discord
from .config import Config
from .utils import AsyncIter
class PrefixManager:
def __init__(self, config: Config, cli_flags: Namespace):
self._config: Config = config
self._global_prefix_overide: Optional[List[str]] = (
sorted(cli_flags.prefix, reverse=True) or None
)
self._cached: Dict[Optional[int], List[str]] = {}
async def get_prefixes(self, guild: Optional[discord.Guild] = None) -> List[str]:
ret: List[str]
gid: Optional[int] = guild.id if guild else None
if gid in self._cached:
ret = self._cached[gid].copy()
else:
if gid is not None:
ret = await self._config.guild_from_id(gid).prefix()
if not ret:
ret = await self.get_prefixes(None)
else:
ret = self._global_prefix_overide or (await self._config.prefix())
self._cached[gid] = ret.copy()
return ret
async def set_prefixes(
self, guild: Optional[discord.Guild] = None, prefixes: Optional[List[str]] = None
):
gid: Optional[int] = guild.id if guild else None
prefixes = prefixes or []
if not isinstance(prefixes, list) and not all(isinstance(pfx, str) for pfx in prefixes):
raise TypeError("Prefixes must be a list of strings")
prefixes = sorted(prefixes, reverse=True)
if gid is None:
if not prefixes:
raise ValueError("You must have at least one prefix.")
self._cached.clear()
await self._config.prefix.set(prefixes)
else:
self._cached.pop(gid, None)
await self._config.guild_from_id(gid).prefix.set(prefixes)
class I18nManager:
def __init__(self, config: Config):
self._config: Config = config
self._guild_locale: Dict[Union[int, None], Union[str, None]] = {}
self._guild_regional_format: Dict[Union[int, None], Union[str, None]] = {}
async def get_locale(self, guild: Union[discord.Guild, None]) -> str:
"""Get the guild locale from the cache"""
# Ensure global locale is in the cache
if None not in self._guild_locale:
global_locale = await self._config.locale()
self._guild_locale[None] = global_locale
if guild is None: # Not a guild so cannot support guild locale
# Return the bot's globally set locale if its None on a guild scope.
return self._guild_locale[None]
elif guild.id in self._guild_locale: # Cached guild
if self._guild_locale[guild.id] is None:
return self._guild_locale[None]
else:
return self._guild_locale[guild.id]
else: # Uncached guild
out = await self._config.guild(guild).locale() # No locale set
if out is None:
self._guild_locale[guild.id] = None
return self._guild_locale[None]
else:
self._guild_locale[guild.id] = out
return out
@overload
async def set_locale(self, guild: None, locale: str):
...
@overload
async def set_locale(self, guild: discord.Guild, locale: Union[str, None]):
...
async def set_locale(
self, guild: Union[discord.Guild, None], locale: Union[str, None]
) -> None:
"""Set the locale in the config and cache"""
if guild is None:
if locale is None:
# this method should never be called like this
raise ValueError("Global locale can't be None!")
self._guild_locale[None] = locale
await self._config.locale.set(locale)
return
self._guild_locale[guild.id] = locale
await self._config.guild(guild).locale.set(locale)
async def get_regional_format(self, guild: Union[discord.Guild, None]) -> Optional[str]:
"""Get the regional format from the cache"""
# Ensure global locale is in the cache
if None not in self._guild_regional_format:
global_regional_format = await self._config.regional_format()
self._guild_regional_format[None] = global_regional_format
if guild is None: # Not a guild so cannot support guild locale
return self._guild_regional_format[None]
elif guild.id in self._guild_regional_format: # Cached guild
if self._guild_regional_format[guild.id] is None:
return self._guild_regional_format[None]
else:
return self._guild_regional_format[guild.id]
else: # Uncached guild
out = await self._config.guild(guild).regional_format() # No locale set
if out is None:
self._guild_regional_format[guild.id] = None
return self._guild_regional_format[None]
else: # Not cached, got a custom regional format.
self._guild_regional_format[guild.id] = out
return out
async def set_regional_format(
self, guild: Union[discord.Guild, None], regional_format: Union[str, None]
) -> None:
"""Set the regional format in the config and cache"""
if guild is None:
self._guild_regional_format[None] = regional_format
await self._config.regional_format.set(regional_format)
return
self._guild_regional_format[guild.id] = regional_format
await self._config.guild(guild).regional_format.set(regional_format)
class IgnoreManager:
def __init__(self, config: Config):
self._config: Config = config
self._cached_channels: Dict[int, bool] = {}
self._cached_guilds: Dict[int, bool] = {}
async def get_ignored_channel(
self, channel: discord.TextChannel, check_category: bool = True
) -> bool:
ret: bool
cid: int = channel.id
cat_id: Optional[int] = (
channel.category.id if check_category and channel.category else None
)
if cid in self._cached_channels:
chan_ret = self._cached_channels[cid]
else:
chan_ret = await self._config.channel_from_id(cid).ignored()
self._cached_channels[cid] = chan_ret
if cat_id and cat_id in self._cached_channels:
cat_ret = self._cached_channels[cat_id]
else:
if cat_id:
cat_ret = await self._config.channel_from_id(cat_id).ignored()
self._cached_channels[cat_id] = cat_ret
else:
cat_ret = False
ret = chan_ret or cat_ret
return ret
async def set_ignored_channel(
self, channel: Union[discord.TextChannel, discord.CategoryChannel], set_to: bool
):
cid: int = channel.id
self._cached_channels[cid] = set_to
if set_to:
await self._config.channel_from_id(cid).ignored.set(set_to)
else:
await self._config.channel_from_id(cid).ignored.clear()
async def get_ignored_guild(self, guild: discord.Guild) -> bool:
ret: bool
gid: int = guild.id
if gid in self._cached_guilds:
ret = self._cached_guilds[gid]
else:
ret = await self._config.guild_from_id(gid).ignored()
self._cached_guilds[gid] = ret
return ret
async def set_ignored_guild(self, guild: discord.Guild, set_to: bool):
gid: int = guild.id
self._cached_guilds[gid] = set_to
if set_to:
await self._config.guild_from_id(gid).ignored.set(set_to)
else:
await self._config.guild_from_id(gid).ignored.clear()
class WhitelistBlacklistManager:
def __init__(self, config: Config):
self._config: Config = config
self._cached_whitelist: Dict[Optional[int], Set[int]] = {}
self._cached_blacklist: Dict[Optional[int], Set[int]] = {}
# because of discord deletion
# we now have sync and async access that may need to happen at the
# same time.
# blame discord for this.
self._access_lock = asyncio.Lock()
async def discord_deleted_user(self, user_id: int):
async with self._access_lock:
async for guild_id_or_none, ids in AsyncIter(
self._cached_whitelist.items(), steps=100
):
ids.discard(user_id)
async for guild_id_or_none, ids in AsyncIter(
self._cached_blacklist.items(), steps=100
):
ids.discard(user_id)
for grp in (self._config.whitelist, self._config.blacklist):
async with grp() as ul:
try:
ul.remove(user_id)
except ValueError:
pass
# don't use this in extensions, it's optimized and controlled for here,
# but can't be safe in 3rd party use
async with self._config._get_base_group("GUILD").all() as abuse:
for guild_str, guild_data in abuse.items():
for l_name in ("whitelist", "blacklist"):
try:
guild_data[l_name].remove(user_id)
except (ValueError, KeyError):
pass # this is raw access not filled with defaults
async def get_whitelist(self, guild: Optional[discord.Guild] = None) -> Set[int]:
async with self._access_lock:
ret: Set[int]
gid: Optional[int] = guild.id if guild else None
if gid in self._cached_whitelist:
ret = self._cached_whitelist[gid].copy()
else:
if gid is not None:
ret = set(await self._config.guild_from_id(gid).whitelist())
else:
ret = set(await self._config.whitelist())
self._cached_whitelist[gid] = ret.copy()
return ret
async def add_to_whitelist(self, guild: Optional[discord.Guild], role_or_user: Iterable[int]):
async with self._access_lock:
gid: Optional[int] = guild.id if guild else None
role_or_user = role_or_user or []
if not all(isinstance(r_or_u, int) for r_or_u in role_or_user):
raise TypeError("`role_or_user` must be an iterable of `int`s.")
if gid is None:
if gid not in self._cached_whitelist:
self._cached_whitelist[gid] = set(await self._config.whitelist())
self._cached_whitelist[gid].update(role_or_user)
await self._config.whitelist.set(list(self._cached_whitelist[gid]))
else:
if gid not in self._cached_whitelist:
self._cached_whitelist[gid] = set(
await self._config.guild_from_id(gid).whitelist()
)
self._cached_whitelist[gid].update(role_or_user)
await self._config.guild_from_id(gid).whitelist.set(
list(self._cached_whitelist[gid])
)
async def clear_whitelist(self, guild: Optional[discord.Guild] = None):
async with self._access_lock:
gid: Optional[int] = guild.id if guild else None
self._cached_whitelist[gid] = set()
if gid is None:
await self._config.whitelist.clear()
else:
await self._config.guild_from_id(gid).whitelist.clear()
async def remove_from_whitelist(
self, guild: Optional[discord.Guild], role_or_user: Iterable[int]
):
async with self._access_lock:
gid: Optional[int] = guild.id if guild else None
role_or_user = role_or_user or []
if not all(isinstance(r_or_u, int) for r_or_u in role_or_user):
raise TypeError("`role_or_user` must be an iterable of `int`s.")
if gid is None:
if gid not in self._cached_whitelist:
self._cached_whitelist[gid] = set(await self._config.whitelist())
self._cached_whitelist[gid].difference_update(role_or_user)
await self._config.whitelist.set(list(self._cached_whitelist[gid]))
else:
if gid not in self._cached_whitelist:
self._cached_whitelist[gid] = set(
await self._config.guild_from_id(gid).whitelist()
)
self._cached_whitelist[gid].difference_update(role_or_user)
await self._config.guild_from_id(gid).whitelist.set(
list(self._cached_whitelist[gid])
)
async def get_blacklist(self, guild: Optional[discord.Guild] = None) -> Set[int]:
async with self._access_lock:
ret: Set[int]
gid: Optional[int] = guild.id if guild else None
if gid in self._cached_blacklist:
ret = self._cached_blacklist[gid].copy()
else:
if gid is not None:
ret = set(await self._config.guild_from_id(gid).blacklist())
else:
ret = set(await self._config.blacklist())
self._cached_blacklist[gid] = ret.copy()
return ret
async def add_to_blacklist(self, guild: Optional[discord.Guild], role_or_user: Iterable[int]):
async with self._access_lock:
gid: Optional[int] = guild.id if guild else None
role_or_user = role_or_user or []
if not all(isinstance(r_or_u, int) for r_or_u in role_or_user):
raise TypeError("`role_or_user` must be an iterable of `int`s.")
if gid is None:
if gid not in self._cached_blacklist:
self._cached_blacklist[gid] = set(await self._config.blacklist())
self._cached_blacklist[gid].update(role_or_user)
await self._config.blacklist.set(list(self._cached_blacklist[gid]))
else:
if gid not in self._cached_blacklist:
self._cached_blacklist[gid] = set(
await self._config.guild_from_id(gid).blacklist()
)
self._cached_blacklist[gid].update(role_or_user)
await self._config.guild_from_id(gid).blacklist.set(
list(self._cached_blacklist[gid])
)
async def clear_blacklist(self, guild: Optional[discord.Guild] = None):
async with self._access_lock:
gid: Optional[int] = guild.id if guild else None
self._cached_blacklist[gid] = set()
if gid is None:
await self._config.blacklist.clear()
else:
await self._config.guild_from_id(gid).blacklist.clear()
async def remove_from_blacklist(
self, guild: Optional[discord.Guild], role_or_user: Iterable[int]
):
async with self._access_lock:
gid: Optional[int] = guild.id if guild else None
role_or_user = role_or_user or []
if not all(isinstance(r_or_u, int) for r_or_u in role_or_user):
raise TypeError("`role_or_user` must be an iterable of `int`s.")
if gid is None:
if gid not in self._cached_blacklist:
self._cached_blacklist[gid] = set(await self._config.blacklist())
self._cached_blacklist[gid].difference_update(role_or_user)
await self._config.blacklist.set(list(self._cached_blacklist[gid]))
else:
if gid not in self._cached_blacklist:
self._cached_blacklist[gid] = set(
await self._config.guild_from_id(gid).blacklist()
)
self._cached_blacklist[gid].difference_update(role_or_user)
await self._config.guild_from_id(gid).blacklist.set(
list(self._cached_blacklist[gid])
)
class DisabledCogCache:
def __init__(self, config: Config):
self._config = config
self._disable_map: Dict[str, Dict[int, bool]] = defaultdict(dict)
async def cog_disabled_in_guild(self, cog_name: str, guild_id: int) -> bool:
"""
Check if a cog is disabled in a guild
Parameters
----------
cog_name: str
This should be the cog's qualified name, not necessarily the classname
guild_id: int
Returns
-------
bool
"""
if guild_id in self._disable_map[cog_name]:
return self._disable_map[cog_name][guild_id]
gset = await self._config.custom("COG_DISABLE_SETTINGS", cog_name, guild_id).disabled()
if gset is None:
gset = await self._config.custom("COG_DISABLE_SETTINGS", cog_name, 0).disabled()
if gset is None:
gset = False
self._disable_map[cog_name][guild_id] = gset
return gset
async def default_disable(self, cog_name: str):
"""
Sets the default for a cog as disabled.
Parameters
----------
cog_name: str
This should be the cog's qualified name, not necessarily the classname
"""
await self._config.custom("COG_DISABLE_SETTINGS", cog_name, 0).disabled.set(True)
del self._disable_map[cog_name]
async def default_enable(self, cog_name: str):
"""
Sets the default for a cog as enabled.
Parameters
----------
cog_name: str
This should be the cog's qualified name, not necessarily the classname
"""
await self._config.custom("COG_DISABLE_SETTINGS", cog_name, 0).disabled.clear()
del self._disable_map[cog_name]
async def disable_cog_in_guild(self, cog_name: str, guild_id: int) -> bool:
"""
Disable a cog in a guild.
Parameters
----------
cog_name: str
This should be the cog's qualified name, not necessarily the classname
guild_id: int
Returns
-------
bool
Whether or not any change was made.
This may be useful for settings commands.
"""
if await self.cog_disabled_in_guild(cog_name, guild_id):
return False
self._disable_map[cog_name][guild_id] = True
await self._config.custom("COG_DISABLE_SETTINGS", cog_name, guild_id).disabled.set(True)
return True
async def enable_cog_in_guild(self, cog_name: str, guild_id: int) -> bool:
"""
Enable a cog in a guild.
Parameters
----------
cog_name: str
This should be the cog's qualified name, not necessarily the classname
guild_id: int
Returns
-------
bool
Whether or not any change was made.
This may be useful for settings commands.
"""
if not await self.cog_disabled_in_guild(cog_name, guild_id):
return False
self._disable_map[cog_name][guild_id] = False
await self._config.custom("COG_DISABLE_SETTINGS", cog_name, guild_id).disabled.set(False)
return True
|
from copy import copy
from .connection import maybe_channel
from .exceptions import NotBoundError
from .utils.functional import ChannelPromise
__all__ = ('Object', 'MaybeChannelBound')
def unpickle_dict(cls, kwargs):
return cls(**kwargs)
def _any(v):
return v
class Object:
"""Common base class.
Supports automatic kwargs->attributes handling, and cloning.
"""
attrs = ()
def __init__(self, *args, **kwargs):
for name, type_ in self.attrs:
value = kwargs.get(name)
if value is not None:
setattr(self, name, (type_ or _any)(value))
else:
try:
getattr(self, name)
except AttributeError:
setattr(self, name, None)
def as_dict(self, recurse=False):
def f(obj, type):
if recurse and isinstance(obj, Object):
return obj.as_dict(recurse=True)
return type(obj) if type and obj is not None else obj
return {
attr: f(getattr(self, attr), type) for attr, type in self.attrs
}
def __reduce__(self):
return unpickle_dict, (self.__class__, self.as_dict())
def __copy__(self):
return self.__class__(**self.as_dict())
class MaybeChannelBound(Object):
"""Mixin for classes that can be bound to an AMQP channel."""
_channel = None
_is_bound = False
#: Defines whether maybe_declare can skip declaring this entity twice.
can_cache_declaration = False
def __call__(self, channel):
"""`self(channel) -> self.bind(channel)`."""
return self.bind(channel)
def bind(self, channel):
"""Create copy of the instance that is bound to a channel."""
return copy(self).maybe_bind(channel)
def maybe_bind(self, channel):
"""Bind instance to channel if not already bound."""
if not self.is_bound and channel:
self._channel = maybe_channel(channel)
self.when_bound()
self._is_bound = True
return self
def revive(self, channel):
"""Revive channel after the connection has been re-established.
Used by :meth:`~kombu.Connection.ensure`.
"""
if self.is_bound:
self._channel = channel
self.when_bound()
def when_bound(self):
"""Callback called when the class is bound."""
def __repr__(self):
return self._repr_entity(type(self).__name__)
def _repr_entity(self, item=''):
item = item or type(self).__name__
if self.is_bound:
return '<{} bound to chan:{}>'.format(
item or type(self).__name__, self.channel.channel_id)
return f'<unbound {item}>'
@property
def is_bound(self):
"""Flag set if the channel is bound."""
return self._is_bound and self._channel is not None
@property
def channel(self):
"""Current channel if the object is bound."""
channel = self._channel
if channel is None:
raise NotBoundError(
"Can't call method on {} not bound to a channel".format(
type(self).__name__))
if isinstance(channel, ChannelPromise):
channel = self._channel = channel()
return channel
|
import numpy as np
import unittest
from chainer.backends import cuda
from chainer import testing
from chainer.testing import attr
from chainercv.links.model.ssd import MultiboxCoder
def _random_array(shape):
return np.random.uniform(-1, 1, size=shape).astype(np.float32)
@testing.parameterize(*testing.product_dict(
[
{
'grids': (4,),
'aspect_ratios': ((2,),),
'steps': (1,),
'sizes': (1, 2),
},
{
'grids': (4, 2, 1),
'aspect_ratios': ((2,), (3, 4), (5,)),
'steps': (1, 2, 4),
'sizes': (1, 2, 3, 4),
},
],
testing.product({
'n_fg_class': [1, 5],
'nms_thresh': [None, 0.5],
'score_thresh': [0, 0.5, np.inf],
})
))
class TestMultiboxCoder(unittest.TestCase):
def setUp(self):
self.coder = MultiboxCoder(
self.grids, self.aspect_ratios, self.steps, self.sizes, (0.1, 0.2))
self.n_bbox = sum(
grid * grid * (len(ar) + 1) * 2
for grid, ar in zip(self.grids, self.aspect_ratios))
self.bbox = _random_array((5, 4))
self.label = np.random.randint(0, self.n_fg_class, size=5)
self.mb_loc = _random_array((self.n_bbox, 4))
self.mb_conf = _random_array((self.n_bbox, self.n_fg_class + 1))
@attr.gpu
def test_to_cpu(self):
self.coder.to_gpu()
self.coder.to_cpu()
self.assertEqual(self.coder.xp, np)
@attr.gpu
def test_to_gpu(self):
self.coder.to_gpu()
self.assertEqual(self.coder.xp, cuda.cupy)
def test_dafault_bbox(self):
self.assertEqual(
self.coder._default_bbox.shape, (self.n_bbox, 4))
def _check_encode(self, bbox, label):
xp = self.coder.xp
mb_loc, mb_label = self.coder.encode(bbox, label)
self.assertIsInstance(mb_loc, xp.ndarray)
self.assertEqual(mb_loc.shape, (self.n_bbox, 4))
self.assertIsInstance(mb_label, xp.ndarray)
self.assertEqual(mb_label.shape, (self.n_bbox,))
def test_encode_cpu(self):
self._check_encode(self.bbox, self.label)
@attr.gpu
def test_encode_gpu(self):
self.coder.to_gpu()
self._check_encode(cuda.to_gpu(self.bbox), cuda.to_gpu(self.label))
def _check_decode(self, mb_loc, mb_conf):
xp = self.coder.xp
bbox, label, score = self.coder.decode(
mb_loc, mb_conf, self.nms_thresh, self.score_thresh)
self.assertIsInstance(bbox, xp.ndarray)
self.assertEqual(bbox.ndim, 2)
self.assertLessEqual(bbox.shape[0], self.n_bbox * self.n_fg_class)
self.assertEqual(bbox.shape[1], 4)
self.assertIsInstance(label, xp.ndarray)
self.assertEqual(label.ndim, 1)
self.assertEqual(label.shape[0], bbox.shape[0])
self.assertIsInstance(score, xp.ndarray)
self.assertEqual(score.ndim, 1)
self.assertEqual(score.shape[0], bbox.shape[0])
def test_decode_cpu(self):
self._check_decode(self.mb_loc, self.mb_conf)
@attr.gpu
def test_decode_gpu(self):
self.coder.to_gpu()
self._check_decode(cuda.to_gpu(self.mb_loc), cuda.to_gpu(self.mb_conf))
testing.run_module(__name__, __file__)
|
import json
import logging
from typing import Dict, Optional
from absl import flags
import dataclasses
from perfkitbenchmarker import resource
from perfkitbenchmarker.configs import option_decoders
from perfkitbenchmarker.configs import spec
from perfkitbenchmarker.providers.gcp import util
FLAGS = flags.FLAGS
flags.DEFINE_string('cloud_spanner_config',
None,
'The config for the Cloud Spanner instance. Use default '
'config if unset.')
flags.DEFINE_integer('cloud_spanner_nodes', None,
'The number of nodes for the Cloud Spanner instance.')
flags.DEFINE_string('cloud_spanner_project',
None,
'The project for the Cloud Spanner instance. Use default '
'project if unset.')
# Valid GCP Spanner types:
DEFAULT_SPANNER_TYPE = 'default'
_DEFAULT_REGION = 'us-central1'
_DEFAULT_DESCRIPTION = 'Spanner instance created by PKB.'
_DEFAULT_DDL = """
CREATE TABLE pkb_table (
id STRING(MAX),
field0 STRING(MAX)
) PRIMARY KEY(id)
"""
_DEFAULT_NODES = 1
# Common decoder configuration option.
_NONE_OK = {'default': None, 'none_ok': True}
@dataclasses.dataclass
class SpannerSpec(spec.BaseSpec):
"""Configurable options of a Spanner instance."""
# Needed for registering the spec class.
SPEC_TYPE = 'SpannerSpec'
SPEC_ATTRS = ['SERVICE_TYPE']
SERVICE_TYPE = DEFAULT_SPANNER_TYPE
service_type: str
name: str
description: str
database: str
ddl: str
config: str
nodes: int
project: str
def __init__(self,
component_full_name: str,
flag_values: Dict[str, flags.FlagValues] = None,
**kwargs):
super().__init__(component_full_name, flag_values=flag_values, **kwargs)
@classmethod
def _GetOptionDecoderConstructions(cls):
"""Gets decoder classes and constructor args for each configurable option.
Returns:
dict. Maps option name string to a (ConfigOptionDecoder class, dict) pair.
The pair specifies a decoder class and its __init__() keyword arguments
to construct in order to decode the named option.
"""
result = super()._GetOptionDecoderConstructions()
result.update({
'service_type': (
option_decoders.EnumDecoder,
{
'valid_values': [
DEFAULT_SPANNER_TYPE,
],
'default': DEFAULT_SPANNER_TYPE
}),
'name': (option_decoders.StringDecoder, _NONE_OK),
'database': (option_decoders.StringDecoder, _NONE_OK),
'description': (option_decoders.StringDecoder, _NONE_OK),
'ddl': (option_decoders.StringDecoder, _NONE_OK),
'config': (option_decoders.StringDecoder, _NONE_OK),
'nodes': (option_decoders.IntDecoder, _NONE_OK),
'project': (option_decoders.StringDecoder, _NONE_OK),
})
return result
@classmethod
def _ApplyFlags(cls, config_values, flag_values):
"""Modifies config options based on runtime flag values.
Can be overridden by derived classes to add support for specific flags.
Args:
config_values: dict mapping config option names to provided values. May
be modified by this function.
flag_values: flags.FlagValues. Runtime flags that may override the
provided config values.
"""
super()._ApplyFlags(config_values, flag_values)
if flag_values['cloud_spanner_config'].present:
config_values['config'] = flag_values.cloud_spanner_config
if flag_values['cloud_spanner_nodes'].present:
config_values['nodes'] = flag_values.cloud_spanner_nodes
if flag_values['cloud_spanner_project'].present:
config_values['project'] = flag_values.cloud_spanner_project
def GetSpannerSpecClass(service_type) -> Optional[spec.BaseSpecMetaClass]:
"""Return the SpannerSpec class corresponding to 'service_type'."""
return spec.GetSpecClass(SpannerSpec, SERVICE_TYPE=service_type)
class GcpSpannerInstance(resource.BaseResource):
"""Object representing a GCP Spanner Instance.
The project and Cloud Spanner config must already exist. Instance and database
will be created and torn down before and after the test.
The following parameters are overridden by the corresponding FLAGs.
project: FLAGS.cloud_spanner_project
config: FLAGS.cloud_spanner_config
nodes: FLAGS.cloud_spanner_nodes
Attributes:
name: Name of the instance to create.
description: Description of the instance.
database: Name of the database to create
ddl: The schema of the database.
"""
# Required for registering the class.
RESOURCE_TYPE = 'GcpSpannerInstance'
REQUIRED_ATTRS = ['SERVICE_TYPE']
SERVICE_TYPE = DEFAULT_SPANNER_TYPE
def __init__(self,
name: Optional[str] = None,
description: Optional[str] = None,
database: Optional[str] = None,
ddl: Optional[str] = None,
config: Optional[str] = None,
nodes: Optional[int] = None,
project: Optional[str] = None,
**kwargs):
super(GcpSpannerInstance, self).__init__(**kwargs)
self.name = name or f'pkb-instance-{FLAGS.run_uri}'
self.database = database or f'pkb-database-{FLAGS.run_uri}'
self._description = description or _DEFAULT_DESCRIPTION
self._ddl = ddl or _DEFAULT_DDL
self._config = config or self._GetDefaultConfig()
self._nodes = nodes or _DEFAULT_NODES
self._end_point = None
# Cloud Spanner may not explicitly set the following common flags.
self.project = (
project or FLAGS.project or util.GetDefaultProject())
self.zone = None
def _GetDefaultConfig(self):
"""Gets the config that corresponds the region used for the test."""
try:
region = util.GetRegionFromZone(
FLAGS.zones[0] if FLAGS.zones else FLAGS.zone[0])
except IndexError:
region = _DEFAULT_REGION
return f'regional-{region}'
@classmethod
def FromSpec(cls, spanner_spec: SpannerSpec) -> 'GcpSpannerInstance':
"""Initialize Spanner from the provided spec."""
return cls(
name=spanner_spec.name,
description=spanner_spec.description,
database=spanner_spec.database,
ddl=spanner_spec.ddl,
config=spanner_spec.config,
nodes=spanner_spec.nodes,
project=spanner_spec.project)
def _Create(self):
"""Creates the instance, the database, and update the schema."""
cmd = util.GcloudCommand(self, 'spanner', 'instances', 'create', self.name)
cmd.flags['description'] = self._description
cmd.flags['nodes'] = self._nodes
cmd.flags['config'] = self._config
_, _, retcode = cmd.Issue(raise_on_failure=False)
if retcode != 0:
logging.error('Create GCP Spanner instance failed.')
return
cmd = util.GcloudCommand(self, 'spanner', 'databases', 'create',
self.database)
cmd.flags['instance'] = self.name
_, _, retcode = cmd.Issue(raise_on_failure=False)
if retcode != 0:
logging.error('Create GCP Spanner database failed.')
return
cmd = util.GcloudCommand(self, 'spanner', 'databases', 'ddl', 'update',
self.database)
cmd.flags['instance'] = self.name
cmd.flags['ddl'] = self._ddl
_, _, retcode = cmd.Issue(raise_on_failure=False)
if retcode != 0:
logging.error('Update GCP Spanner database schema failed.')
else:
logging.info('Created GCP Spanner instance and database.')
def _Delete(self):
"""Deletes the instance."""
cmd = util.GcloudCommand(self, 'spanner', 'instances', 'delete',
self.name)
_, _, retcode = cmd.Issue(raise_on_failure=False)
if retcode != 0:
logging.error('Delete GCP Spanner instance failed.')
else:
logging.info('Deleted GCP Spanner instance.')
def _Exists(self, instance_only=False):
"""Returns true if the instance and the database exists."""
cmd = util.GcloudCommand(self, 'spanner', 'instances', 'describe',
self.name)
# Do not log error or warning when checking existence.
_, _, retcode = cmd.Issue(suppress_warning=True, raise_on_failure=False)
if retcode != 0:
logging.info('Could not found GCP Spanner instances %s.', self.name)
return False
if instance_only:
return True
cmd = util.GcloudCommand(self, 'spanner', 'databases', 'describe',
self.database)
cmd.flags['instance'] = self.name
# Do not log error or warning when checking existence.
_, _, retcode = cmd.Issue(suppress_warning=True, raise_on_failure=False)
if retcode != 0:
logging.info('Could not found GCP Spanner database %s.', self.database)
return False
return True
def GetEndPoint(self):
"""Returns the end point for Cloud Spanner."""
if self._end_point:
return self._end_point
cmd = util.GcloudCommand(self, 'config', 'get-value',
'api_endpoint_overrides/spanner')
stdout, _, retcode = cmd.Issue(raise_on_failure=False)
if retcode != 0:
logging.warning('Fail to retrieve cloud spanner end point.')
return None
self._end_point = json.loads(stdout)
return self._end_point
def GetSpannerClass(
service_type: str) -> Optional[resource.AutoRegisterResourceMeta]:
"""Return the Spanner class associated with service_type."""
return resource.GetResourceClass(
GcpSpannerInstance, SERVICE_TYPE=service_type)
|
import unittest
from check_release_installation import (CheckInstallation,
Pip3Installation,
PipInstallation)
from mock import call
class TestCheckBothInstallations(unittest.TestCase):
def setUp(self):
self.calls = []
outer = self
class FakeSSH:
def run_checked(self, command):
outer.calls.append(call().run_checked(command))
def put(self, command):
outer.calls.append(call().put(command))
self.ssh = FakeSSH()
def test_python3(self):
version = '0.17.1.12'
ci = CheckInstallation(Pip3Installation(), self.ssh, version)
ci.check_installation()
assert [
call().run_checked('sudo rm -f $(which trash-put)'),
call().run_checked('! which trash-put'),
call().run_checked('sudo rm -f $(which trash-list)'),
call().run_checked('! which trash-list'),
call().run_checked('sudo rm -f $(which trash-rm)'),
call().run_checked('! which trash-rm'),
call().run_checked('sudo rm -f $(which trash-empty)'),
call().run_checked('! which trash-empty'),
call().run_checked('sudo rm -f $(which trash-restore)'),
call().run_checked('! which trash-restore'),
call().run_checked('sudo rm -f $(which trash)'),
call().run_checked('! which trash'),
call().put('dist/trash-cli-0.17.1.12.tar.gz'),
call().run_checked('sudo pip3 install trash-cli-0.17.1.12.tar.gz'),
call().run_checked('trash-put --version'),
call().run_checked('trash-list --version'),
call().run_checked('trash-rm --version'),
call().run_checked('trash-empty --version'),
call().run_checked('trash-restore --version'),
call().run_checked('trash --version')] == self.calls
def test_pip2_installation(self):
version = '0.17.1.12'
i = CheckInstallation(PipInstallation(), self.ssh, version)
i.check_installation()
self.maxDiff = None
assert [
call().run_checked('sudo rm -f $(which trash-put)'),
call().run_checked('! which trash-put'),
call().run_checked('sudo rm -f $(which trash-list)'),
call().run_checked('! which trash-list'),
call().run_checked('sudo rm -f $(which trash-rm)'),
call().run_checked('! which trash-rm'),
call().run_checked('sudo rm -f $(which trash-empty)'),
call().run_checked('! which trash-empty'),
call().run_checked('sudo rm -f $(which trash-restore)'),
call().run_checked('! which trash-restore'),
call().run_checked('sudo rm -f $(which trash)'),
call().run_checked('! which trash'),
call().put('dist/trash-cli-0.17.1.12.tar.gz'),
call().run_checked('sudo pip install trash-cli-0.17.1.12.tar.gz'),
call().run_checked('trash-put --version'),
call().run_checked('trash-list --version'),
call().run_checked('trash-rm --version'),
call().run_checked('trash-empty --version'),
call().run_checked('trash-restore --version'),
call().run_checked('trash --version')] == self.calls
|
from collections import OrderedDict
from typing import Optional
from aioesphomeapi import APIClient, APIConnectionError
import voluptuous as vol
from homeassistant.components import zeroconf
from homeassistant.config_entries import CONN_CLASS_LOCAL_PUSH, ConfigFlow
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_PORT
from homeassistant.core import callback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from .entry_data import DATA_KEY, RuntimeEntryData
DOMAIN = "esphome"
class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
"""Handle a esphome config flow."""
VERSION = 1
CONNECTION_CLASS = CONN_CLASS_LOCAL_PUSH
def __init__(self):
"""Initialize flow."""
self._host: Optional[str] = None
self._port: Optional[int] = None
self._password: Optional[str] = None
async def async_step_user(
self, user_input: Optional[ConfigType] = None, error: Optional[str] = None
): # pylint: disable=arguments-differ
"""Handle a flow initialized by the user."""
if user_input is not None:
return await self._async_authenticate_or_add(user_input)
fields = OrderedDict()
fields[vol.Required(CONF_HOST, default=self._host or vol.UNDEFINED)] = str
fields[vol.Optional(CONF_PORT, default=self._port or 6053)] = int
errors = {}
if error is not None:
errors["base"] = error
return self.async_show_form(
step_id="user", data_schema=vol.Schema(fields), errors=errors
)
@property
def _name(self):
# pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167
return self.context.get(CONF_NAME)
@_name.setter
def _name(self, value):
# pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167
self.context[CONF_NAME] = value
self.context["title_placeholders"] = {"name": self._name}
def _set_user_input(self, user_input):
if user_input is None:
return
self._host = user_input[CONF_HOST]
self._port = user_input[CONF_PORT]
async def _async_authenticate_or_add(self, user_input):
self._set_user_input(user_input)
error, device_info = await self.fetch_device_info()
if error is not None:
return await self.async_step_user(error=error)
self._name = device_info.name
# Only show authentication step if device uses password
if device_info.uses_password:
return await self.async_step_authenticate()
return self._async_get_entry()
async def async_step_discovery_confirm(self, user_input=None):
"""Handle user-confirmation of discovered node."""
if user_input is not None:
return await self._async_authenticate_or_add(None)
return self.async_show_form(
step_id="discovery_confirm", description_placeholders={"name": self._name}
)
async def async_step_zeroconf(self, discovery_info: DiscoveryInfoType):
"""Handle zeroconf discovery."""
# Hostname is format: livingroom.local.
local_name = discovery_info["hostname"][:-1]
node_name = local_name[: -len(".local")]
address = discovery_info["properties"].get("address", local_name)
# Check if already configured
await self.async_set_unique_id(node_name)
self._abort_if_unique_id_configured(
updates={CONF_HOST: discovery_info[CONF_HOST]}
)
for entry in self._async_current_entries():
already_configured = False
if CONF_HOST in entry.data and entry.data[CONF_HOST] in [
address,
discovery_info[CONF_HOST],
]:
# Is this address or IP address already configured?
already_configured = True
elif entry.entry_id in self.hass.data.get(DATA_KEY, {}):
# Does a config entry with this name already exist?
data: RuntimeEntryData = self.hass.data[DATA_KEY][entry.entry_id]
# Node names are unique in the network
if data.device_info is not None:
already_configured = data.device_info.name == node_name
if already_configured:
# Backwards compat, we update old entries
if not entry.unique_id:
self.hass.config_entries.async_update_entry(
entry,
data={**entry.data, CONF_HOST: discovery_info[CONF_HOST]},
unique_id=node_name,
)
return self.async_abort(reason="already_configured")
self._host = discovery_info[CONF_HOST]
self._port = discovery_info[CONF_PORT]
self._name = node_name
return await self.async_step_discovery_confirm()
@callback
def _async_get_entry(self):
return self.async_create_entry(
title=self._name,
data={
CONF_HOST: self._host,
CONF_PORT: self._port,
# The API uses protobuf, so empty string denotes absence
CONF_PASSWORD: self._password or "",
},
)
async def async_step_authenticate(self, user_input=None, error=None):
"""Handle getting password for authentication."""
if user_input is not None:
self._password = user_input[CONF_PASSWORD]
error = await self.try_login()
if error:
return await self.async_step_authenticate(error=error)
return self._async_get_entry()
errors = {}
if error is not None:
errors["base"] = error
return self.async_show_form(
step_id="authenticate",
data_schema=vol.Schema({vol.Required("password"): str}),
description_placeholders={"name": self._name},
errors=errors,
)
async def fetch_device_info(self):
"""Fetch device info from API and return any errors."""
zeroconf_instance = await zeroconf.async_get_instance(self.hass)
cli = APIClient(
self.hass.loop,
self._host,
self._port,
"",
zeroconf_instance=zeroconf_instance,
)
try:
await cli.connect()
device_info = await cli.device_info()
except APIConnectionError as err:
if "resolving" in str(err):
return "resolve_error", None
return "connection_error", None
finally:
await cli.disconnect(force=True)
return None, device_info
async def try_login(self):
"""Try logging in to device and return any errors."""
zeroconf_instance = await zeroconf.async_get_instance(self.hass)
cli = APIClient(
self.hass.loop,
self._host,
self._port,
self._password,
zeroconf_instance=zeroconf_instance,
)
try:
await cli.connect(login=True)
except APIConnectionError:
await cli.disconnect(force=True)
return "invalid_auth"
return None
|
import os
import unittest
from perfkitbenchmarker.scripts.database_scripts import plot_sysbench_results
TEST_FILE_1 = '../tests/data/sysbench_stderr_output_sample.txt'
TEST_FILE_1_RUN_SECONDS = 480
TEST_FILE_1_REPORT_INTERVAL = 2
TEST_FILE_1_FIRST_TPS_VALUE = 144.13
TEST_RUN_URI = 'abcdefgh'
class PlotterTestCase(unittest.TestCase):
def setUp(self):
self.plotter = plot_sysbench_results.Plotter(
TEST_FILE_1_RUN_SECONDS, TEST_FILE_1_REPORT_INTERVAL, TEST_RUN_URI)
def testadd_file(self):
self.assertRaises(plot_sysbench_results.STDERRFileDoesNotExistError,
self.plotter.add_file, '')
def testparse_file(self):
# TODO(samspano): Implement test that will raise PatternNotFoundError.
path1 = os.path.join(
os.path.dirname(__file__), TEST_FILE_1)
with open(path1) as file1:
results = self.plotter._parse_file(file1)
self.assertEqual(len(results), self.plotter.data_entries_per_file)
self.assertEqual(results[0], TEST_FILE_1_FIRST_TPS_VALUE)
if __name__ == '__main__':
unittest.main()
|
import logging
from homeassistant.components.binary_sensor import ENTITY_ID_FORMAT, BinarySensorEntity
from homeassistant.const import CONF_ID
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import async_generate_entity_id
from . import DOMAIN
from .const import BINARY_SENSOR_INFO, DATA_GATEWAYS, DATA_OPENTHERM_GW
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the OpenTherm Gateway binary sensors."""
sensors = []
for var, info in BINARY_SENSOR_INFO.items():
device_class = info[0]
friendly_name_format = info[1]
sensors.append(
OpenThermBinarySensor(
hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]],
var,
device_class,
friendly_name_format,
)
)
async_add_entities(sensors)
class OpenThermBinarySensor(BinarySensorEntity):
"""Represent an OpenTherm Gateway binary sensor."""
def __init__(self, gw_dev, var, device_class, friendly_name_format):
"""Initialize the binary sensor."""
self.entity_id = async_generate_entity_id(
ENTITY_ID_FORMAT, f"{var}_{gw_dev.gw_id}", hass=gw_dev.hass
)
self._gateway = gw_dev
self._var = var
self._state = None
self._device_class = device_class
self._friendly_name = friendly_name_format.format(gw_dev.name)
self._unsub_updates = None
async def async_added_to_hass(self):
"""Subscribe to updates from the component."""
_LOGGER.debug("Added OpenTherm Gateway binary sensor %s", self._friendly_name)
self._unsub_updates = async_dispatcher_connect(
self.hass, self._gateway.update_signal, self.receive_report
)
async def async_will_remove_from_hass(self):
"""Unsubscribe from updates from the component."""
_LOGGER.debug(
"Removing OpenTherm Gateway binary sensor %s", self._friendly_name
)
self._unsub_updates()
@property
def available(self):
"""Return availability of the sensor."""
return self._state is not None
@property
def entity_registry_enabled_default(self):
"""Disable binary_sensors by default."""
return False
@callback
def receive_report(self, status):
"""Handle status updates from the component."""
state = status.get(self._var)
self._state = None if state is None else bool(state)
self.async_write_ha_state()
@property
def name(self):
"""Return the friendly name."""
return self._friendly_name
@property
def device_info(self):
"""Return device info."""
return {
"identifiers": {(DOMAIN, self._gateway.gw_id)},
"name": self._gateway.name,
"manufacturer": "Schelte Bron",
"model": "OpenTherm Gateway",
"sw_version": self._gateway.gw_version,
}
@property
def unique_id(self):
"""Return a unique ID."""
return f"{self._gateway.gw_id}-{self._var}"
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return self._state
@property
def device_class(self):
"""Return the class of this device."""
return self._device_class
@property
def should_poll(self):
"""Return False because entity pushes its state."""
return False
|
import pygogo as gogo
from . import reactor
try:
from twisted.test.proto_helpers import MemoryReactorClock
except ImportError:
MemoryReactorClock = object
FakeReactor = lambda _: lambda: None
logger = gogo.Gogo(__name__, monolog=True).logger
class FakeReactor(MemoryReactorClock):
"""A fake reactor to be used in tests. This reactor doesn't actually do
much that's useful yet. It accepts TCP connection setup attempts, but
they will never succeed.
Examples:
>>> import sys
>>>
>>> try:
... from twisted import internet
... except ImportError:
... pass
... else:
... from twisted.internet.fdesc import readFromFD, setNonBlocking
... FileDescriptor = internet.abstract.FileDescriptor
...
... reactor = FakeReactor()
... f = FileDescriptor(reactor)
... f.fileno = sys.__stdout__.fileno
... fd = f.fileno()
... setNonBlocking(fd)
... readFromFD(fd, print)
"""
_DELAY = 1
def __init__(self):
super(FakeReactor, self).__init__()
reactor.fake = True
msg = 'Attention! Running fake reactor.'
logger.debug(f"{msg} Some deferreds may not work as intended.")
def callLater(self, when, what, *args, **kwargs):
"""Schedule a unit of work to be done later.
"""
delayed = super(FakeReactor, self).callLater(when, what, *args, **kwargs)
self.pump()
return delayed
def pump(self):
"""Perform scheduled work
"""
self.advance(self._DELAY)
|
import logging
import typing
import voluptuous as vol
from homeassistant.const import (
ATTR_EDITABLE,
ATTR_MODE,
ATTR_UNIT_OF_MEASUREMENT,
CONF_ICON,
CONF_ID,
CONF_MODE,
CONF_NAME,
SERVICE_RELOAD,
)
from homeassistant.core import callback
from homeassistant.helpers import collection
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.restore_state import RestoreEntity
import homeassistant.helpers.service
from homeassistant.helpers.storage import Store
from homeassistant.helpers.typing import ConfigType, HomeAssistantType, ServiceCallType
_LOGGER = logging.getLogger(__name__)
DOMAIN = "input_number"
CONF_INITIAL = "initial"
CONF_MIN = "min"
CONF_MAX = "max"
CONF_STEP = "step"
MODE_SLIDER = "slider"
MODE_BOX = "box"
ATTR_INITIAL = "initial"
ATTR_VALUE = "value"
ATTR_MIN = "min"
ATTR_MAX = "max"
ATTR_STEP = "step"
SERVICE_SET_VALUE = "set_value"
SERVICE_INCREMENT = "increment"
SERVICE_DECREMENT = "decrement"
def _cv_input_number(cfg):
"""Configure validation helper for input number (voluptuous)."""
minimum = cfg.get(CONF_MIN)
maximum = cfg.get(CONF_MAX)
if minimum >= maximum:
raise vol.Invalid(
f"Maximum ({minimum}) is not greater than minimum ({maximum})"
)
state = cfg.get(CONF_INITIAL)
if state is not None and (state < minimum or state > maximum):
raise vol.Invalid(f"Initial value {state} not in range {minimum}-{maximum}")
return cfg
CREATE_FIELDS = {
vol.Required(CONF_NAME): vol.All(str, vol.Length(min=1)),
vol.Required(CONF_MIN): vol.Coerce(float),
vol.Required(CONF_MAX): vol.Coerce(float),
vol.Optional(CONF_INITIAL): vol.Coerce(float),
vol.Optional(CONF_STEP, default=1): vol.All(vol.Coerce(float), vol.Range(min=1e-3)),
vol.Optional(CONF_ICON): cv.icon,
vol.Optional(ATTR_UNIT_OF_MEASUREMENT): cv.string,
vol.Optional(CONF_MODE, default=MODE_SLIDER): vol.In([MODE_BOX, MODE_SLIDER]),
}
UPDATE_FIELDS = {
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_MIN): vol.Coerce(float),
vol.Optional(CONF_MAX): vol.Coerce(float),
vol.Optional(CONF_INITIAL): vol.Coerce(float),
vol.Optional(CONF_STEP): vol.All(vol.Coerce(float), vol.Range(min=1e-3)),
vol.Optional(CONF_ICON): cv.icon,
vol.Optional(ATTR_UNIT_OF_MEASUREMENT): cv.string,
vol.Optional(CONF_MODE): vol.In([MODE_BOX, MODE_SLIDER]),
}
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: cv.schema_with_slug_keys(
vol.All(
{
vol.Optional(CONF_NAME): cv.string,
vol.Required(CONF_MIN): vol.Coerce(float),
vol.Required(CONF_MAX): vol.Coerce(float),
vol.Optional(CONF_INITIAL): vol.Coerce(float),
vol.Optional(CONF_STEP, default=1): vol.All(
vol.Coerce(float), vol.Range(min=1e-3)
),
vol.Optional(CONF_ICON): cv.icon,
vol.Optional(ATTR_UNIT_OF_MEASUREMENT): cv.string,
vol.Optional(CONF_MODE, default=MODE_SLIDER): vol.In(
[MODE_BOX, MODE_SLIDER]
),
},
_cv_input_number,
)
)
},
extra=vol.ALLOW_EXTRA,
)
RELOAD_SERVICE_SCHEMA = vol.Schema({})
STORAGE_KEY = DOMAIN
STORAGE_VERSION = 1
async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool:
"""Set up an input slider."""
component = EntityComponent(_LOGGER, DOMAIN, hass)
id_manager = collection.IDManager()
yaml_collection = collection.YamlCollection(
logging.getLogger(f"{__name__}.yaml_collection"), id_manager
)
collection.attach_entity_component_collection(
component, yaml_collection, InputNumber.from_yaml
)
storage_collection = NumberStorageCollection(
Store(hass, STORAGE_VERSION, STORAGE_KEY),
logging.getLogger(f"{__name__}.storage_collection"),
id_manager,
)
collection.attach_entity_component_collection(
component, storage_collection, InputNumber
)
await yaml_collection.async_load(
[{CONF_ID: id_, **(conf or {})} for id_, conf in config.get(DOMAIN, {}).items()]
)
await storage_collection.async_load()
collection.StorageCollectionWebsocket(
storage_collection, DOMAIN, DOMAIN, CREATE_FIELDS, UPDATE_FIELDS
).async_setup(hass)
collection.attach_entity_registry_cleaner(hass, DOMAIN, DOMAIN, yaml_collection)
collection.attach_entity_registry_cleaner(hass, DOMAIN, DOMAIN, storage_collection)
async def reload_service_handler(service_call: ServiceCallType) -> None:
"""Reload yaml entities."""
conf = await component.async_prepare_reload(skip_reset=True)
if conf is None:
conf = {DOMAIN: {}}
await yaml_collection.async_load(
[{CONF_ID: id_, **conf} for id_, conf in conf.get(DOMAIN, {}).items()]
)
homeassistant.helpers.service.async_register_admin_service(
hass,
DOMAIN,
SERVICE_RELOAD,
reload_service_handler,
schema=RELOAD_SERVICE_SCHEMA,
)
component.async_register_entity_service(
SERVICE_SET_VALUE,
{vol.Required(ATTR_VALUE): vol.Coerce(float)},
"async_set_value",
)
component.async_register_entity_service(SERVICE_INCREMENT, {}, "async_increment")
component.async_register_entity_service(SERVICE_DECREMENT, {}, "async_decrement")
return True
class NumberStorageCollection(collection.StorageCollection):
"""Input storage based collection."""
CREATE_SCHEMA = vol.Schema(vol.All(CREATE_FIELDS, _cv_input_number))
UPDATE_SCHEMA = vol.Schema(UPDATE_FIELDS)
async def _process_create_data(self, data: typing.Dict) -> typing.Dict:
"""Validate the config is valid."""
return self.CREATE_SCHEMA(data)
@callback
def _get_suggested_id(self, info: typing.Dict) -> str:
"""Suggest an ID based on the config."""
return info[CONF_NAME]
async def _update_data(self, data: dict, update_data: typing.Dict) -> typing.Dict:
"""Return a new updated data object."""
update_data = self.UPDATE_SCHEMA(update_data)
return _cv_input_number({**data, **update_data})
class InputNumber(RestoreEntity):
"""Representation of a slider."""
def __init__(self, config: typing.Dict):
"""Initialize an input number."""
self._config = config
self.editable = True
self._current_value = config.get(CONF_INITIAL)
@classmethod
def from_yaml(cls, config: typing.Dict) -> "InputNumber":
"""Return entity instance initialized from yaml storage."""
input_num = cls(config)
input_num.entity_id = f"{DOMAIN}.{config[CONF_ID]}"
input_num.editable = False
return input_num
@property
def should_poll(self):
"""If entity should be polled."""
return False
@property
def _minimum(self) -> float:
"""Return minimum allowed value."""
return self._config[CONF_MIN]
@property
def _maximum(self) -> float:
"""Return maximum allowed value."""
return self._config[CONF_MAX]
@property
def name(self):
"""Return the name of the input slider."""
return self._config.get(CONF_NAME)
@property
def icon(self):
"""Return the icon to be used for this entity."""
return self._config.get(CONF_ICON)
@property
def state(self):
"""Return the state of the component."""
return self._current_value
@property
def _step(self) -> int:
"""Return entity's increment/decrement step."""
return self._config[CONF_STEP]
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._config.get(ATTR_UNIT_OF_MEASUREMENT)
@property
def unique_id(self) -> typing.Optional[str]:
"""Return unique id of the entity."""
return self._config[CONF_ID]
@property
def state_attributes(self):
"""Return the state attributes."""
return {
ATTR_INITIAL: self._config.get(CONF_INITIAL),
ATTR_EDITABLE: self.editable,
ATTR_MIN: self._minimum,
ATTR_MAX: self._maximum,
ATTR_STEP: self._step,
ATTR_MODE: self._config[CONF_MODE],
}
async def async_added_to_hass(self):
"""Run when entity about to be added to hass."""
await super().async_added_to_hass()
if self._current_value is not None:
return
state = await self.async_get_last_state()
value = state and float(state.state)
# Check against None because value can be 0
if value is not None and self._minimum <= value <= self._maximum:
self._current_value = value
else:
self._current_value = self._minimum
async def async_set_value(self, value):
"""Set new value."""
num_value = float(value)
if num_value < self._minimum or num_value > self._maximum:
raise vol.Invalid(
f"Invalid value for {self.entity_id}: {value} (range {self._minimum} - {self._maximum})"
)
self._current_value = num_value
self.async_write_ha_state()
async def async_increment(self):
"""Increment value."""
await self.async_set_value(min(self._current_value + self._step, self._maximum))
async def async_decrement(self):
"""Decrement value."""
await self.async_set_value(max(self._current_value - self._step, self._minimum))
async def async_update_config(self, config: typing.Dict) -> None:
"""Handle when the config is updated."""
self._config = config
# just in case min/max values changed
self._current_value = min(self._current_value, self._maximum)
self._current_value = max(self._current_value, self._minimum)
self.async_write_ha_state()
|
import itertools
import logging
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker import regex_util
from perfkitbenchmarker import sample
from perfkitbenchmarker.linux_packages import lmbench
FLAGS = flags.FLAGS
BENCHMARK_NAME = 'lmbench'
BENCHMARK_CONFIG = """
lmbench:
description: Runs Lmbench Microbenchmark.
vm_groups:
default:
vm_spec: *default_dual_core
vm_count: null
"""
_LMBENCH_HARDWARE_DEFAULT = 'NO'
flags.DEFINE_integer(
'lmbench_mem_size', None,
'The range of memory on which several benchmarks operate. If not provided, '
'the memory size should be 8MB as default'
)
flags.DEFINE_enum(
'lmbench_hardware', _LMBENCH_HARDWARE_DEFAULT, ['YES', 'NO'],
'The decision to run BENCHMARK_HARDWARE tests: YES or NO. The default is NO'
)
def GetConfig(user_config):
return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
def _PrepareLmbench(vm):
"""Builds Lmbench on a single vm."""
logging.info('Installing Lmbench on %s', vm)
vm.Install('lmbench')
def _ConfigureRun(vm):
"""Configure Lmbench tests."""
logging.info('Set Lmbench run parameters')
vm.RemoteCommand('cd {0} && mkdir bin && cd bin && '
'mkdir x86_64-linux-gnu'.format(lmbench.LMBENCH_DIR))
vm.RobustRemoteCommand(
'cd {0} && cd scripts && echo "1" >>input.txt && '
'echo "1" >>input.txt && ./config-run <input.txt'.format(
lmbench.LMBENCH_DIR))
sed_cmd = (
'sed -i -e "s/OUTPUT=\\/dev\\/tty/OUTPUT=\\/dev\\/null/" '
'{0}/bin/x86_64-linux-gnu/CONFIG.*'.format(lmbench.LMBENCH_DIR))
vm.RemoteCommand(sed_cmd)
if FLAGS.lmbench_mem_size:
sed_cmd = (
'sed -i -e "s/MB=/MB={0}/" {1}/bin/x86_64-linux-gnu/CONFIG.*'.format(
FLAGS.lmbench_mem_size, lmbench.LMBENCH_DIR))
vm.RemoteCommand(sed_cmd)
if FLAGS.lmbench_hardware == _LMBENCH_HARDWARE_DEFAULT:
sed_cmd = (
'sed -i -e "s/BENCHMARK_HARDWARE=YES/BENCHMARK_HARDWARE={0}/" '
'{1}/bin/x86_64-linux-gnu/CONFIG.*'.format(
FLAGS.lmbench_hardware, lmbench.LMBENCH_DIR))
vm.RemoteCommand(sed_cmd)
def Prepare(benchmark_spec):
"""Install Lmbench on the target vms.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
vms = benchmark_spec.vms
vm = vms[0]
_PrepareLmbench(vm)
_ConfigureRun(vm)
def _ParseContextSwitching(lines, title, metadata, results):
"""Parse the context switching test results.
Context switching - times in microseconds - smaller is better.
Args:
lines: The lines following context switching title size=* ovr=*.
title: The context switching subset title.
metadata: A diction of metadata.
results: A list of samples to be published.
Context switching test results:
"size=0k ovr=0.93
2 11.57
4 14.81
8 14.69
16 12.86
24 12.50
32 12.63
64 13.30
96 11.45
"""
size = regex_util.ExtractGroup('"size=([0-9]*)', title)
ovr = regex_util.ExtractGroup('"size=.* ovr=([0-9]*\\.[0-9]*)', title)
metadata_clone = metadata.copy()
metadata_clone['memory_size'] = '%sk' % size
metadata_clone['ovr'] = ovr
for line in lines:
metric_value = line.split()
current_metadata = metadata_clone.copy()
current_metadata['num_of_processes'] = int(metric_value[0])
results.append(sample.Sample('context_switching_time',
float(metric_value[1]),
'microseconds', current_metadata))
def _UpdataMetadata(lmbench_output, metadata):
metadata['MB'] = regex_util.ExtractGroup('MB: ([0-9]*)', lmbench_output)
metadata['BENCHMARK_HARDWARE'] = regex_util.ExtractGroup(
'BENCHMARK_HARDWARE: (YES|NO)', lmbench_output)
metadata['BENCHMARK_OS'] = regex_util.ExtractGroup(
'BENCHMARK_OS: (YES|NO)', lmbench_output)
def _ParseSections(lmbench_output, parse_section_func_dict, metadata, results):
"""Parse some sections from the output.
For different output sections, we may apply different parsing strategies.
Such that parse_section_func_dict is defined where the key is the section
name, the value is the corresponding function. All the associated parsing
functions take same parameters. So When we parse more
sections, simply define new parsing functions if needed.
Args:
lmbench_output: A string containing the test results of lmbench.
parse_section_func_dict: A dictionary where the key is the section name,
the value is the corresponding function name to
parse that section.
metadata: A dictionary of metadata.
results: A list of samples to be published.
"""
lines_iter = iter(lmbench_output.split('\n'))
stop_parsing = ''
while True:
lines_iter = itertools.dropwhile(
lambda line: line not in parse_section_func_dict, lines_iter)
title = next(lines_iter, None)
if title is None:
break
function = parse_section_func_dict[title]
reading_buffer = [
item for item in itertools.takewhile(lambda line: line != stop_parsing,
lines_iter)
]
function(reading_buffer, title, metadata, results)
def _AddProcessorMetricSamples(lmbench_output, processor_metric_list, metadata,
results):
"""Parse results for "Processor, Processes - times in microseconds - smaller is better."
Args:
lmbench_output: A string containing the test results of lmbench.
processor_metric_list: A tuple of metrics.
metadata: A dictionary of metadata.
results: A list of samples to be published.
Processor test output:
Simple syscall: 0.2345 microseconds
Simple read: 0.3515 microseconds
Simple write: 0.3082 microseconds
Simple stat: 0.6888 microseconds
Simple fstat: 0.3669 microseconds
Simple open/close: 1.5541 microseconds
Select on 10 fd's: 0.4464 microseconds
Select on 100 fd's: 1.0422 microseconds
Select on 250 fd's: 2.0069 microseconds
Select on 500 fd's: 3.7366 microseconds
Select on 10 tcp fd's: 0.5690 microseconds
Select on 100 tcp fd's: 6.4521 microseconds
Select on 250 tcp fd's: 16.7513 microseconds
Select on 500 tcp fd's: 32.8527 microseconds
Signal handler installation: 0.3226 microseconds
Signal handler overhead: 1.1736 microseconds
Protection fault: 0.7491 microseconds
Pipe latency: 25.5437 microseconds
AF_UNIX sock stream latency: 25.2813 microseconds
Process fork+exit: 121.7399 microseconds
Process fork+execve: 318.6445 microseconds
Process fork+/bin/sh -c: 800.2188 microseconds
Pagefaults on /var/tmp/XXX: 0.1639 microseconds
"""
for metric in processor_metric_list:
regex = '%s: (.*)' % metric
value_unit = regex_util.ExtractGroup(regex, lmbench_output)
[value, unit] = value_unit.split(' ')
results.append(
sample.Sample('%s' % metric.replace('\\', ''), float(value), unit,
metadata))
def _ParseOutput(lmbench_output):
"""Parse the output from lmbench.
Args:
lmbench_output: A string containing the test results of lmbench.
Returns:
A list of samples to be published (in the same format as Run() returns).
"""
results = []
metadata = dict()
# Updata metadata
_UpdataMetadata(lmbench_output, metadata)
# Parse results for "Processor, Processes - times in microseconds - smaller is better"
# TODO(user): Parse more metric for processor section.
processor_metric_list = ('syscall', 'read', 'write', 'stat', 'fstat',
'open/close', 'Signal handler installation',
'Signal handler overhead', 'Protection fault',
'Pipe latency', r'Process fork\+exit',
r'Process fork\+execve', r'Process fork\+/bin/sh -c')
_AddProcessorMetricSamples(lmbench_output, processor_metric_list, metadata,
results)
# Parse some sections from the output.
parse_section_func_dict = {}
contex_switching_titles = regex_util.ExtractAllMatches('"size=.* ovr=.*',
lmbench_output)
for title in contex_switching_titles:
parse_section_func_dict[title] = _ParseContextSwitching
_ParseSections(lmbench_output, parse_section_func_dict, metadata, results)
return results
def Run(benchmark_spec):
"""Run LMBENCH on the cluster.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample objects.
"""
vms = benchmark_spec.vms
vm = vms[0]
# Use the current configuration to run the benchmark tests.
vm.RobustRemoteCommand(
'cd {0} && sudo make rerun'.format(lmbench.LMBENCH_DIR))
stdout, _ = vm.RobustRemoteCommand(
'cd {0} && cd results/x86_64-linux-gnu && cat *.*'.format(
lmbench.LMBENCH_DIR), should_log=True)
vm.RobustRemoteCommand(
'cd {0} && cd results/x86_64-linux-gnu && '
'mkdir -p /tmp/lmbench && '
'sudo mv *.* /tmp/lmbench; '.format(
lmbench.LMBENCH_DIR))
return _ParseOutput(stdout)
def Cleanup(unused_benchmark_spec):
pass
|
import logging
from homeassistant.components.binary_sensor import BinarySensorEntity
from . import CONF_BINARY_SENSORS, DATA_EIGHT, NAME_MAP, EightSleepHeatEntity
_LOGGER = logging.getLogger(__name__)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the eight sleep binary sensor."""
if discovery_info is None:
return
name = "Eight"
sensors = discovery_info[CONF_BINARY_SENSORS]
eight = hass.data[DATA_EIGHT]
all_sensors = []
for sensor in sensors:
all_sensors.append(EightHeatSensor(name, eight, sensor))
async_add_entities(all_sensors, True)
class EightHeatSensor(EightSleepHeatEntity, BinarySensorEntity):
"""Representation of a Eight Sleep heat-based sensor."""
def __init__(self, name, eight, sensor):
"""Initialize the sensor."""
super().__init__(eight)
self._sensor = sensor
self._mapped_name = NAME_MAP.get(self._sensor, self._sensor)
self._name = f"{name} {self._mapped_name}"
self._state = None
self._side = self._sensor.split("_")[0]
self._userid = self._eight.fetch_userid(self._side)
self._usrobj = self._eight.users[self._userid]
_LOGGER.debug(
"Presence Sensor: %s, Side: %s, User: %s",
self._sensor,
self._side,
self._userid,
)
@property
def name(self):
"""Return the name of the sensor, if any."""
return self._name
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return self._state
async def async_update(self):
"""Retrieve latest state."""
self._state = self._usrobj.bed_presence
|
import os
from typing import Any, Dict, Optional
import serial.tools.list_ports
import voluptuous as vol
from zigpy.config import CONF_DEVICE, CONF_DEVICE_PATH
from homeassistant import config_entries
from .core.const import ( # pylint:disable=unused-import
CONF_BAUDRATE,
CONF_FLOWCONTROL,
CONF_RADIO_TYPE,
DOMAIN,
RadioType,
)
CONF_MANUAL_PATH = "Enter Manually"
SUPPORTED_PORT_SETTINGS = (
CONF_BAUDRATE,
CONF_FLOWCONTROL,
)
class ZhaFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow."""
VERSION = 2
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_PUSH
def __init__(self):
"""Initialize flow instance."""
self._device_path = None
self._radio_type = None
async def async_step_user(self, user_input=None):
"""Handle a zha config flow start."""
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
ports = await self.hass.async_add_executor_job(serial.tools.list_ports.comports)
list_of_ports = [
f"{p}, s/n: {p.serial_number or 'n/a'}"
+ (f" - {p.manufacturer}" if p.manufacturer else "")
for p in ports
]
list_of_ports.append(CONF_MANUAL_PATH)
if user_input is not None:
user_selection = user_input[CONF_DEVICE_PATH]
if user_selection == CONF_MANUAL_PATH:
return await self.async_step_pick_radio()
port = ports[list_of_ports.index(user_selection)]
dev_path = await self.hass.async_add_executor_job(
get_serial_by_id, port.device
)
auto_detected_data = await detect_radios(dev_path)
if auto_detected_data is not None:
title = f"{port.description}, s/n: {port.serial_number or 'n/a'}"
title += f" - {port.manufacturer}" if port.manufacturer else ""
return self.async_create_entry(
title=title,
data=auto_detected_data,
)
# did not detect anything
self._device_path = dev_path
return await self.async_step_pick_radio()
schema = vol.Schema({vol.Required(CONF_DEVICE_PATH): vol.In(list_of_ports)})
return self.async_show_form(step_id="user", data_schema=schema)
async def async_step_pick_radio(self, user_input=None):
"""Select radio type."""
if user_input is not None:
self._radio_type = RadioType.get_by_description(user_input[CONF_RADIO_TYPE])
return await self.async_step_port_config()
schema = {vol.Required(CONF_RADIO_TYPE): vol.In(sorted(RadioType.list()))}
return self.async_show_form(
step_id="pick_radio",
data_schema=vol.Schema(schema),
)
async def async_step_port_config(self, user_input=None):
"""Enter port settings specific for this type of radio."""
errors = {}
app_cls = RadioType[self._radio_type].controller
if user_input is not None:
self._device_path = user_input.get(CONF_DEVICE_PATH)
if await app_cls.probe(user_input):
serial_by_id = await self.hass.async_add_executor_job(
get_serial_by_id, user_input[CONF_DEVICE_PATH]
)
user_input[CONF_DEVICE_PATH] = serial_by_id
return self.async_create_entry(
title=user_input[CONF_DEVICE_PATH],
data={CONF_DEVICE: user_input, CONF_RADIO_TYPE: self._radio_type},
)
errors["base"] = "cannot_connect"
schema = {
vol.Required(
CONF_DEVICE_PATH, default=self._device_path or vol.UNDEFINED
): str
}
radio_schema = app_cls.SCHEMA_DEVICE.schema
if isinstance(radio_schema, vol.Schema):
radio_schema = radio_schema.schema
for param, value in radio_schema.items():
if param in SUPPORTED_PORT_SETTINGS:
schema[param] = value
return self.async_show_form(
step_id="port_config",
data_schema=vol.Schema(schema),
errors=errors,
)
async def detect_radios(dev_path: str) -> Optional[Dict[str, Any]]:
"""Probe all radio types on the device port."""
for radio in RadioType:
dev_config = radio.controller.SCHEMA_DEVICE({CONF_DEVICE_PATH: dev_path})
if await radio.controller.probe(dev_config):
return {CONF_RADIO_TYPE: radio.name, CONF_DEVICE: dev_config}
return None
def get_serial_by_id(dev_path: str) -> str:
"""Return a /dev/serial/by-id match for given device if available."""
by_id = "/dev/serial/by-id"
if not os.path.isdir(by_id):
return dev_path
for path in (entry.path for entry in os.scandir(by_id) if entry.is_symlink()):
if os.path.realpath(path) == dev_path:
return path
return dev_path
|
from pylatex.utils import _latex_item_to_string
from pylatex.base_classes import LatexObject
TEST_STR = 'hello'
def test_string():
name = 'abc'
assert _latex_item_to_string(name) == name
def test_user_latex_object():
class TestLatexObject(LatexObject):
def dumps(self):
return TEST_STR
assert _latex_item_to_string(TestLatexObject()) == TEST_STR
def test_foreign_object():
class ForeignObject:
def dumps(self):
return 15
def __str__(self):
return TEST_STR
assert _latex_item_to_string(ForeignObject()) == TEST_STR
|
import os
import pytest
from homeassistant import loader, setup
from homeassistant.requirements import (
CONSTRAINT_FILE,
RequirementsNotFound,
async_get_integration_with_requirements,
async_process_requirements,
)
from tests.async_mock import call, patch
from tests.common import MockModule, mock_integration
def env_without_wheel_links():
"""Return env without wheel links."""
env = dict(os.environ)
env.pop("WHEEL_LINKS", None)
return env
async def test_requirement_installed_in_venv(hass):
"""Test requirement installed in virtual environment."""
with patch("os.path.dirname", return_value="ha_package_path"), patch(
"homeassistant.util.package.is_virtual_env", return_value=True
), patch("homeassistant.util.package.is_docker_env", return_value=False), patch(
"homeassistant.util.package.install_package", return_value=True
) as mock_install, patch.dict(
os.environ, env_without_wheel_links(), clear=True
):
hass.config.skip_pip = False
mock_integration(hass, MockModule("comp", requirements=["package==0.0.1"]))
assert await setup.async_setup_component(hass, "comp", {})
assert "comp" in hass.config.components
assert mock_install.call_args == call(
"package==0.0.1",
constraints=os.path.join("ha_package_path", CONSTRAINT_FILE),
no_cache_dir=False,
)
async def test_requirement_installed_in_deps(hass):
"""Test requirement installed in deps directory."""
with patch("os.path.dirname", return_value="ha_package_path"), patch(
"homeassistant.util.package.is_virtual_env", return_value=False
), patch("homeassistant.util.package.is_docker_env", return_value=False), patch(
"homeassistant.util.package.install_package", return_value=True
) as mock_install, patch.dict(
os.environ, env_without_wheel_links(), clear=True
):
hass.config.skip_pip = False
mock_integration(hass, MockModule("comp", requirements=["package==0.0.1"]))
assert await setup.async_setup_component(hass, "comp", {})
assert "comp" in hass.config.components
assert mock_install.call_args == call(
"package==0.0.1",
target=hass.config.path("deps"),
constraints=os.path.join("ha_package_path", CONSTRAINT_FILE),
no_cache_dir=False,
)
async def test_install_existing_package(hass):
"""Test an install attempt on an existing package."""
with patch(
"homeassistant.util.package.install_package", return_value=True
) as mock_inst:
await async_process_requirements(hass, "test_component", ["hello==1.0.0"])
assert len(mock_inst.mock_calls) == 1
with patch("homeassistant.util.package.is_installed", return_value=True), patch(
"homeassistant.util.package.install_package"
) as mock_inst:
await async_process_requirements(hass, "test_component", ["hello==1.0.0"])
assert len(mock_inst.mock_calls) == 0
async def test_install_missing_package(hass):
"""Test an install attempt on an existing package."""
with patch(
"homeassistant.util.package.install_package", return_value=False
) as mock_inst:
with pytest.raises(RequirementsNotFound):
await async_process_requirements(hass, "test_component", ["hello==1.0.0"])
assert len(mock_inst.mock_calls) == 1
async def test_get_integration_with_requirements(hass):
"""Check getting an integration with loaded requirements."""
hass.config.skip_pip = False
mock_integration(
hass, MockModule("test_component_dep", requirements=["test-comp-dep==1.0.0"])
)
mock_integration(
hass,
MockModule(
"test_component_after_dep", requirements=["test-comp-after-dep==1.0.0"]
),
)
mock_integration(
hass,
MockModule(
"test_component",
requirements=["test-comp==1.0.0"],
dependencies=["test_component_dep"],
partial_manifest={"after_dependencies": ["test_component_after_dep"]},
),
)
with patch(
"homeassistant.util.package.is_installed", return_value=False
) as mock_is_installed, patch(
"homeassistant.util.package.install_package", return_value=True
) as mock_inst:
integration = await async_get_integration_with_requirements(
hass, "test_component"
)
assert integration
assert integration.domain == "test_component"
assert len(mock_is_installed.mock_calls) == 3
assert sorted(mock_call[1][0] for mock_call in mock_is_installed.mock_calls) == [
"test-comp-after-dep==1.0.0",
"test-comp-dep==1.0.0",
"test-comp==1.0.0",
]
assert len(mock_inst.mock_calls) == 3
assert sorted(mock_call[1][0] for mock_call in mock_inst.mock_calls) == [
"test-comp-after-dep==1.0.0",
"test-comp-dep==1.0.0",
"test-comp==1.0.0",
]
async def test_install_with_wheels_index(hass):
"""Test an install attempt with wheels index URL."""
hass.config.skip_pip = False
mock_integration(hass, MockModule("comp", requirements=["hello==1.0.0"]))
with patch("homeassistant.util.package.is_installed", return_value=False), patch(
"homeassistant.util.package.is_docker_env", return_value=True
), patch("homeassistant.util.package.install_package") as mock_inst, patch.dict(
os.environ, {"WHEELS_LINKS": "https://wheels.hass.io/test"}
), patch(
"os.path.dirname"
) as mock_dir:
mock_dir.return_value = "ha_package_path"
assert await setup.async_setup_component(hass, "comp", {})
assert "comp" in hass.config.components
assert mock_inst.call_args == call(
"hello==1.0.0",
find_links="https://wheels.hass.io/test",
constraints=os.path.join("ha_package_path", CONSTRAINT_FILE),
no_cache_dir=True,
)
async def test_install_on_docker(hass):
"""Test an install attempt on an docker system env."""
hass.config.skip_pip = False
mock_integration(hass, MockModule("comp", requirements=["hello==1.0.0"]))
with patch("homeassistant.util.package.is_installed", return_value=False), patch(
"homeassistant.util.package.is_docker_env", return_value=True
), patch("homeassistant.util.package.install_package") as mock_inst, patch(
"os.path.dirname"
) as mock_dir, patch.dict(
os.environ, env_without_wheel_links(), clear=True
):
mock_dir.return_value = "ha_package_path"
assert await setup.async_setup_component(hass, "comp", {})
assert "comp" in hass.config.components
assert mock_inst.call_args == call(
"hello==1.0.0",
constraints=os.path.join("ha_package_path", CONSTRAINT_FILE),
no_cache_dir=True,
)
async def test_discovery_requirements_mqtt(hass):
"""Test that we load discovery requirements."""
hass.config.skip_pip = False
mqtt = await loader.async_get_integration(hass, "mqtt")
mock_integration(
hass, MockModule("mqtt_comp", partial_manifest={"mqtt": ["foo/discovery"]})
)
with patch(
"homeassistant.requirements.async_process_requirements",
) as mock_process:
await async_get_integration_with_requirements(hass, "mqtt_comp")
assert len(mock_process.mock_calls) == 2 # mqtt also depends on http
assert mock_process.mock_calls[0][1][2] == mqtt.requirements
async def test_discovery_requirements_ssdp(hass):
"""Test that we load discovery requirements."""
hass.config.skip_pip = False
ssdp = await loader.async_get_integration(hass, "ssdp")
mock_integration(
hass, MockModule("ssdp_comp", partial_manifest={"ssdp": [{"st": "roku:ecp"}]})
)
with patch(
"homeassistant.requirements.async_process_requirements",
) as mock_process:
await async_get_integration_with_requirements(hass, "ssdp_comp")
assert len(mock_process.mock_calls) == 3
assert mock_process.mock_calls[0][1][2] == ssdp.requirements
# Ensure zeroconf is a dep for ssdp
assert mock_process.mock_calls[1][1][1] == "zeroconf"
@pytest.mark.parametrize(
"partial_manifest",
[{"zeroconf": ["_googlecast._tcp.local."]}, {"homekit": {"models": ["LIFX"]}}],
)
async def test_discovery_requirements_zeroconf(hass, partial_manifest):
"""Test that we load discovery requirements."""
hass.config.skip_pip = False
zeroconf = await loader.async_get_integration(hass, "zeroconf")
mock_integration(
hass,
MockModule("comp", partial_manifest=partial_manifest),
)
with patch(
"homeassistant.requirements.async_process_requirements",
) as mock_process:
await async_get_integration_with_requirements(hass, "comp")
assert len(mock_process.mock_calls) == 2 # zeroconf also depends on http
assert mock_process.mock_calls[0][1][2] == zeroconf.requirements
|
from homeassistant.const import ATTR_ENTITY_ID
from homeassistant.core import callback
from .const import DOMAIN, DOOR_STATION, DOOR_STATION_EVENT_ENTITY_IDS
@callback
def async_describe_events(hass, async_describe_event):
"""Describe logbook events."""
@callback
def async_describe_logbook_event(event):
"""Describe a logbook event."""
_, doorbird_event = event.event_type.split("_", 1)
return {
"name": "Doorbird",
"message": f"Event {event.event_type} was fired.",
"entity_id": hass.data[DOMAIN][DOOR_STATION_EVENT_ENTITY_IDS].get(
doorbird_event, event.data.get(ATTR_ENTITY_ID)
),
}
domain_data = hass.data[DOMAIN]
for config_entry_id in domain_data:
door_station = domain_data[config_entry_id][DOOR_STATION]
for event in door_station.doorstation_events:
async_describe_event(
DOMAIN, f"{DOMAIN}_{event}", async_describe_logbook_event
)
|
from google_nest_sdm.auth import AbstractAuth
from google_nest_sdm.device import Device
from homeassistant.components import camera
from homeassistant.components.camera import STATE_IDLE
from .common import async_setup_sdm_platform
PLATFORM = "camera"
CAMERA_DEVICE_TYPE = "sdm.devices.types.CAMERA"
DEVICE_ID = "some-device-id"
class FakeResponse:
"""A fake web response used for returning results of commands."""
def __init__(self, json):
"""Initialize the FakeResponse."""
self._json = json
def raise_for_status(self):
"""Mimics a successful response status."""
pass
async def json(self):
"""Return a dict with the response."""
return self._json
class FakeAuth(AbstractAuth):
"""Fake authentication object that returns fake responses."""
def __init__(self, response: FakeResponse):
"""Initialize the FakeAuth."""
super().__init__(None, "")
self._response = response
async def async_get_access_token(self):
"""Return a fake access token."""
return "some-token"
async def creds(self):
"""Return a fake creds."""
return None
async def request(self, method: str, url: str, **kwargs):
"""Pass through the FakeResponse."""
return self._response
async def async_setup_camera(hass, traits={}, auth=None):
"""Set up the platform and prerequisites."""
devices = {}
if traits:
devices[DEVICE_ID] = Device.MakeDevice(
{
"name": DEVICE_ID,
"type": CAMERA_DEVICE_TYPE,
"traits": traits,
},
auth=auth,
)
return await async_setup_sdm_platform(hass, PLATFORM, devices)
async def test_no_devices(hass):
"""Test configuration that returns no devices."""
await async_setup_camera(hass)
assert len(hass.states.async_all()) == 0
async def test_ineligible_device(hass):
"""Test configuration with devices that do not support cameras."""
await async_setup_camera(
hass,
{
"sdm.devices.traits.Info": {
"customName": "My Camera",
},
},
)
assert len(hass.states.async_all()) == 0
async def test_camera_device(hass):
"""Test a basic camera with a live stream."""
await async_setup_camera(
hass,
{
"sdm.devices.traits.Info": {
"customName": "My Camera",
},
"sdm.devices.traits.CameraLiveStream": {
"maxVideoResolution": {
"width": 640,
"height": 480,
},
"videoCodecs": ["H264"],
"audioCodecs": ["AAC"],
},
},
)
assert len(hass.states.async_all()) == 1
camera = hass.states.get("camera.my_camera")
assert camera is not None
assert camera.state == STATE_IDLE
registry = await hass.helpers.entity_registry.async_get_registry()
entry = registry.async_get("camera.my_camera")
assert entry.unique_id == "some-device-id-camera"
assert entry.original_name == "My Camera"
assert entry.domain == "camera"
device_registry = await hass.helpers.device_registry.async_get_registry()
device = device_registry.async_get(entry.device_id)
assert device.name == "My Camera"
assert device.model == "Camera"
assert device.identifiers == {("nest", DEVICE_ID)}
async def test_camera_stream(hass):
"""Test a basic camera and fetch its live stream."""
response = FakeResponse(
{
"results": {
"streamUrls": {"rtspUrl": "rtsp://some/url?auth=g.0.streamingToken"},
"streamExtensionToken": "g.1.extensionToken",
"streamToken": "g.0.streamingToken",
"expiresAt": "2018-01-04T18:30:00.000Z",
},
}
)
await async_setup_camera(
hass,
{
"sdm.devices.traits.Info": {
"customName": "My Camera",
},
"sdm.devices.traits.CameraLiveStream": {
"maxVideoResolution": {
"width": 640,
"height": 480,
},
"videoCodecs": ["H264"],
"audioCodecs": ["AAC"],
},
},
auth=FakeAuth(response),
)
assert len(hass.states.async_all()) == 1
cam = hass.states.get("camera.my_camera")
assert cam is not None
assert cam.state == STATE_IDLE
stream_source = await camera.async_get_stream_source(hass, "camera.my_camera")
assert stream_source == "rtsp://some/url?auth=g.0.streamingToken"
|
import os
import time
from datetime import timedelta
from email.mime.image import MIMEImage
from celery.schedules import crontab
from django.conf import settings
from django.core.mail import EmailMultiAlternatives, get_connection
from django.utils.timezone import now
from html2text import HTML2Text
from social_django.models import Code, Partial
from weblate.utils.celery import app
from weblate.utils.errors import report_error
@app.task(trail=False)
def cleanup_social_auth():
"""Cleanup expired partial social authentications."""
for partial in Partial.objects.iterator():
kwargs = partial.data["kwargs"]
if "weblate_expires" not in kwargs or kwargs["weblate_expires"] < time.time():
# Old entry without expiry set, or expired entry
partial.delete()
age = now() - timedelta(seconds=settings.AUTH_TOKEN_VALID)
# Delete old not verified codes
Code.objects.filter(verified=False, timestamp__lt=age).delete()
# Delete old partial data
Partial.objects.filter(timestamp__lt=age).delete()
@app.task(trail=False)
def cleanup_auditlog():
"""Cleanup old auditlog entries."""
from weblate.accounts.models import AuditLog
AuditLog.objects.filter(
timestamp__lt=now() - timedelta(days=settings.AUDITLOG_EXPIRY)
).delete()
@app.task(trail=False)
def notify_change(change_id):
from weblate.accounts.notifications import NOTIFICATIONS_ACTIONS
from weblate.trans.models import Change
change = Change.objects.get(pk=change_id)
perm_cache = {}
if change.action in NOTIFICATIONS_ACTIONS:
outgoing = []
for notification_cls in NOTIFICATIONS_ACTIONS[change.action]:
notification = notification_cls(outgoing, perm_cache)
notification.notify_immediate(change)
if outgoing:
send_mails.delay(outgoing)
def notify_digest(method):
from weblate.accounts.notifications import NOTIFICATIONS
outgoing = []
for notification_cls in NOTIFICATIONS:
notification = notification_cls(outgoing)
getattr(notification, method)()
if outgoing:
send_mails.delay(outgoing)
@app.task(trail=False)
def notify_daily():
notify_digest("notify_daily")
@app.task(trail=False)
def notify_weekly():
notify_digest("notify_weekly")
@app.task(trail=False)
def notify_monthly():
notify_digest("notify_monthly")
@app.task(trail=False)
def notify_auditlog(log_id, email):
from weblate.accounts.models import AuditLog
from weblate.accounts.notifications import send_notification_email
audit = AuditLog.objects.get(pk=log_id)
send_notification_email(
audit.user.profile.language,
[email],
"account_activity",
context={
"message": audit.get_message,
"extra_message": audit.get_extra_message,
"address": audit.address,
"user_agent": audit.user_agent,
},
info=f"{audit.activity} from {audit.address}",
)
@app.task(trail=False)
def send_mails(mails):
"""Send multiple mails in single connection."""
images = []
for name in ("email-logo.png", "email-logo-footer.png"):
filename = os.path.join(settings.STATIC_ROOT, name)
with open(filename, "rb") as handle:
image = MIMEImage(handle.read())
image.add_header("Content-ID", f"<{name}@cid.weblate.org>")
image.add_header("Content-Disposition", "inline", filename=name)
images.append(image)
connection = get_connection()
try:
connection.open()
except Exception:
report_error(cause="Failed to send notifications")
connection.close()
return
html2text = HTML2Text(bodywidth=78)
html2text.unicode_snob = True
html2text.ignore_images = True
html2text.pad_tables = True
try:
for mail in mails:
email = EmailMultiAlternatives(
settings.EMAIL_SUBJECT_PREFIX + mail["subject"],
html2text.handle(mail["body"]),
to=[mail["address"]],
headers=mail["headers"],
connection=connection,
)
email.mixed_subtype = "related"
for image in images:
email.attach(image)
email.attach_alternative(mail["body"], "text/html")
email.send()
finally:
connection.close()
@app.on_after_finalize.connect
def setup_periodic_tasks(sender, **kwargs):
sender.add_periodic_task(3600, cleanup_social_auth.s(), name="social-auth-cleanup")
sender.add_periodic_task(3600, cleanup_auditlog.s(), name="auditlog-cleanup")
sender.add_periodic_task(
crontab(hour=1, minute=0), notify_daily.s(), name="notify-daily"
)
sender.add_periodic_task(
crontab(hour=2, minute=0, day_of_week="monday"),
notify_weekly.s(),
name="notify-weekly",
)
sender.add_periodic_task(
crontab(hour=3, minute=0, day_of_month=1),
notify_monthly.s(),
name="notify-monthly",
)
|
import numpy as np
import pytest
import xarray as xr
from xarray import DataArray
from xarray.tests import assert_allclose, assert_equal, raises_regex
from . import raise_if_dask_computes, requires_cftime, requires_dask
@pytest.mark.parametrize("as_dataset", (True, False))
def test_weighted_non_DataArray_weights(as_dataset):
data = DataArray([1, 2])
if as_dataset:
data = data.to_dataset(name="data")
with raises_regex(ValueError, "`weights` must be a DataArray"):
data.weighted([1, 2])
@pytest.mark.parametrize("as_dataset", (True, False))
@pytest.mark.parametrize("weights", ([np.nan, 2], [np.nan, np.nan]))
def test_weighted_weights_nan_raises(as_dataset, weights):
data = DataArray([1, 2])
if as_dataset:
data = data.to_dataset(name="data")
with pytest.raises(ValueError, match="`weights` cannot contain missing values."):
data.weighted(DataArray(weights))
@requires_dask
@pytest.mark.parametrize("as_dataset", (True, False))
@pytest.mark.parametrize("weights", ([np.nan, 2], [np.nan, np.nan]))
def test_weighted_weights_nan_raises_dask(as_dataset, weights):
data = DataArray([1, 2]).chunk({"dim_0": -1})
if as_dataset:
data = data.to_dataset(name="data")
weights = DataArray(weights).chunk({"dim_0": -1})
with raise_if_dask_computes():
weighted = data.weighted(weights)
with pytest.raises(ValueError, match="`weights` cannot contain missing values."):
weighted.sum().load()
@requires_cftime
@requires_dask
@pytest.mark.parametrize("time_chunks", (1, 5))
@pytest.mark.parametrize("resample_spec", ("1AS", "5AS", "10AS"))
def test_weighted_lazy_resample(time_chunks, resample_spec):
# https://github.com/pydata/xarray/issues/4625
# simple customized weighted mean function
def mean_func(ds):
return ds.weighted(ds.weights).mean("time")
# example dataset
t = xr.cftime_range(start="2000", periods=20, freq="1AS")
weights = xr.DataArray(np.random.rand(len(t)), dims=["time"], coords={"time": t})
data = xr.DataArray(
np.random.rand(len(t)), dims=["time"], coords={"time": t, "weights": weights}
)
ds = xr.Dataset({"data": data}).chunk({"time": time_chunks})
with raise_if_dask_computes():
ds.resample(time=resample_spec).map(mean_func)
@pytest.mark.parametrize(
("weights", "expected"),
(([1, 2], 3), ([2, 0], 2), ([0, 0], np.nan), ([-1, 1], np.nan)),
)
def test_weighted_sum_of_weights_no_nan(weights, expected):
da = DataArray([1, 2])
weights = DataArray(weights)
result = da.weighted(weights).sum_of_weights()
expected = DataArray(expected)
assert_equal(expected, result)
@pytest.mark.parametrize(
("weights", "expected"),
(([1, 2], 2), ([2, 0], np.nan), ([0, 0], np.nan), ([-1, 1], 1)),
)
def test_weighted_sum_of_weights_nan(weights, expected):
da = DataArray([np.nan, 2])
weights = DataArray(weights)
result = da.weighted(weights).sum_of_weights()
expected = DataArray(expected)
assert_equal(expected, result)
def test_weighted_sum_of_weights_bool():
# https://github.com/pydata/xarray/issues/4074
da = DataArray([1, 2])
weights = DataArray([True, True])
result = da.weighted(weights).sum_of_weights()
expected = DataArray(2)
assert_equal(expected, result)
@pytest.mark.parametrize("da", ([1.0, 2], [1, np.nan], [np.nan, np.nan]))
@pytest.mark.parametrize("factor", [0, 1, 3.14])
@pytest.mark.parametrize("skipna", (True, False))
def test_weighted_sum_equal_weights(da, factor, skipna):
# if all weights are 'f'; weighted sum is f times the ordinary sum
da = DataArray(da)
weights = xr.full_like(da, factor)
expected = da.sum(skipna=skipna) * factor
result = da.weighted(weights).sum(skipna=skipna)
assert_equal(expected, result)
@pytest.mark.parametrize(
("weights", "expected"), (([1, 2], 5), ([0, 2], 4), ([0, 0], 0))
)
def test_weighted_sum_no_nan(weights, expected):
da = DataArray([1, 2])
weights = DataArray(weights)
result = da.weighted(weights).sum()
expected = DataArray(expected)
assert_equal(expected, result)
@pytest.mark.parametrize(
("weights", "expected"), (([1, 2], 4), ([0, 2], 4), ([1, 0], 0), ([0, 0], 0))
)
@pytest.mark.parametrize("skipna", (True, False))
def test_weighted_sum_nan(weights, expected, skipna):
da = DataArray([np.nan, 2])
weights = DataArray(weights)
result = da.weighted(weights).sum(skipna=skipna)
if skipna:
expected = DataArray(expected)
else:
expected = DataArray(np.nan)
assert_equal(expected, result)
@pytest.mark.filterwarnings("error")
@pytest.mark.parametrize("da", ([1.0, 2], [1, np.nan], [np.nan, np.nan]))
@pytest.mark.parametrize("skipna", (True, False))
@pytest.mark.parametrize("factor", [1, 2, 3.14])
def test_weighted_mean_equal_weights(da, skipna, factor):
# if all weights are equal (!= 0), should yield the same result as mean
da = DataArray(da)
# all weights as 1.
weights = xr.full_like(da, factor)
expected = da.mean(skipna=skipna)
result = da.weighted(weights).mean(skipna=skipna)
assert_equal(expected, result)
@pytest.mark.parametrize(
("weights", "expected"), (([4, 6], 1.6), ([1, 0], 1.0), ([0, 0], np.nan))
)
def test_weighted_mean_no_nan(weights, expected):
da = DataArray([1, 2])
weights = DataArray(weights)
expected = DataArray(expected)
result = da.weighted(weights).mean()
assert_equal(expected, result)
@pytest.mark.parametrize(
("weights", "expected"), (([4, 6], 2.0), ([1, 0], np.nan), ([0, 0], np.nan))
)
@pytest.mark.parametrize("skipna", (True, False))
def test_weighted_mean_nan(weights, expected, skipna):
da = DataArray([np.nan, 2])
weights = DataArray(weights)
if skipna:
expected = DataArray(expected)
else:
expected = DataArray(np.nan)
result = da.weighted(weights).mean(skipna=skipna)
assert_equal(expected, result)
def test_weighted_mean_bool():
# https://github.com/pydata/xarray/issues/4074
da = DataArray([1, 1])
weights = DataArray([True, True])
expected = DataArray(1)
result = da.weighted(weights).mean()
assert_equal(expected, result)
def expected_weighted(da, weights, dim, skipna, operation):
"""
Generate expected result using ``*`` and ``sum``. This is checked against
the result of da.weighted which uses ``dot``
"""
weighted_sum = (da * weights).sum(dim=dim, skipna=skipna)
if operation == "sum":
return weighted_sum
masked_weights = weights.where(da.notnull())
sum_of_weights = masked_weights.sum(dim=dim, skipna=True)
valid_weights = sum_of_weights != 0
sum_of_weights = sum_of_weights.where(valid_weights)
if operation == "sum_of_weights":
return sum_of_weights
weighted_mean = weighted_sum / sum_of_weights
if operation == "mean":
return weighted_mean
def check_weighted_operations(data, weights, dim, skipna):
# check sum of weights
result = data.weighted(weights).sum_of_weights(dim)
expected = expected_weighted(data, weights, dim, skipna, "sum_of_weights")
assert_allclose(expected, result)
# check weighted sum
result = data.weighted(weights).sum(dim, skipna=skipna)
expected = expected_weighted(data, weights, dim, skipna, "sum")
assert_allclose(expected, result)
# check weighted mean
result = data.weighted(weights).mean(dim, skipna=skipna)
expected = expected_weighted(data, weights, dim, skipna, "mean")
assert_allclose(expected, result)
@pytest.mark.parametrize("dim", ("a", "b", "c", ("a", "b"), ("a", "b", "c"), None))
@pytest.mark.parametrize("add_nans", (True, False))
@pytest.mark.parametrize("skipna", (None, True, False))
def test_weighted_operations_3D(dim, add_nans, skipna):
dims = ("a", "b", "c")
coords = dict(a=[0, 1, 2, 3], b=[0, 1, 2, 3], c=[0, 1, 2, 3])
weights = DataArray(np.random.randn(4, 4, 4), dims=dims, coords=coords)
data = np.random.randn(4, 4, 4)
# add approximately 25 % NaNs (https://stackoverflow.com/a/32182680/3010700)
if add_nans:
c = int(data.size * 0.25)
data.ravel()[np.random.choice(data.size, c, replace=False)] = np.NaN
data = DataArray(data, dims=dims, coords=coords)
check_weighted_operations(data, weights, dim, skipna)
data = data.to_dataset(name="data")
check_weighted_operations(data, weights, dim, skipna)
def test_weighted_operations_nonequal_coords():
weights = DataArray(np.random.randn(4), dims=("a",), coords=dict(a=[0, 1, 2, 3]))
data = DataArray(np.random.randn(4), dims=("a",), coords=dict(a=[1, 2, 3, 4]))
check_weighted_operations(data, weights, dim="a", skipna=None)
data = data.to_dataset(name="data")
check_weighted_operations(data, weights, dim="a", skipna=None)
@pytest.mark.parametrize("shape_data", ((4,), (4, 4), (4, 4, 4)))
@pytest.mark.parametrize("shape_weights", ((4,), (4, 4), (4, 4, 4)))
@pytest.mark.parametrize("add_nans", (True, False))
@pytest.mark.parametrize("skipna", (None, True, False))
def test_weighted_operations_different_shapes(
shape_data, shape_weights, add_nans, skipna
):
weights = DataArray(np.random.randn(*shape_weights))
data = np.random.randn(*shape_data)
# add approximately 25 % NaNs
if add_nans:
c = int(data.size * 0.25)
data.ravel()[np.random.choice(data.size, c, replace=False)] = np.NaN
data = DataArray(data)
check_weighted_operations(data, weights, "dim_0", skipna)
check_weighted_operations(data, weights, None, skipna)
data = data.to_dataset(name="data")
check_weighted_operations(data, weights, "dim_0", skipna)
check_weighted_operations(data, weights, None, skipna)
@pytest.mark.parametrize("operation", ("sum_of_weights", "sum", "mean"))
@pytest.mark.parametrize("as_dataset", (True, False))
@pytest.mark.parametrize("keep_attrs", (True, False, None))
def test_weighted_operations_keep_attr(operation, as_dataset, keep_attrs):
weights = DataArray(np.random.randn(2, 2), attrs=dict(attr="weights"))
data = DataArray(np.random.randn(2, 2))
if as_dataset:
data = data.to_dataset(name="data")
data.attrs = dict(attr="weights")
result = getattr(data.weighted(weights), operation)(keep_attrs=True)
if operation == "sum_of_weights":
assert weights.attrs == result.attrs
else:
assert data.attrs == result.attrs
result = getattr(data.weighted(weights), operation)(keep_attrs=None)
assert not result.attrs
result = getattr(data.weighted(weights), operation)(keep_attrs=False)
assert not result.attrs
@pytest.mark.parametrize("operation", ("sum", "mean"))
def test_weighted_operations_keep_attr_da_in_ds(operation):
# GH #3595
weights = DataArray(np.random.randn(2, 2))
data = DataArray(np.random.randn(2, 2), attrs=dict(attr="data"))
data = data.to_dataset(name="a")
result = getattr(data.weighted(weights), operation)(keep_attrs=True)
assert data.a.attrs == result.a.attrs
|
import pytest
import pytz
import re
from datetime import timedelta
from django import VERSION as DJANGO_VERSION
from django.conf import settings
from django.urls import reverse
from django.core import mail
from django.utils.timezone import datetime
from shop.serializers.auth import PasswordResetRequestSerializer
from shop.views.auth import PasswordResetConfirmView
@pytest.mark.django_db
def test_login_fail(api_client):
login_url = reverse('shop:login')
data = {
'form_data': {
'username': 'a',
'password': 'b',
}
}
response = api_client.post(login_url, data, format='json')
assert response.status_code == 400
assert response.json() == {'login_form': {'non_field_errors': ['Unable to log in with provided credentials.']}}
assert response.cookies.get('sessionid') is None
@pytest.mark.django_db
def test_login_success(registered_customer, api_client):
login_url = reverse('shop:login')
data = {
'form_data': {
'username': registered_customer.email,
'password': 'secret',
}
}
response = api_client.post(login_url, data, format='json')
assert response.status_code == 200
assert len(response.json().get('key')) == 40
session_cookie = response.cookies.get('sessionid')
assert session_cookie['expires'] == ''
assert session_cookie['max-age'] == ''
@pytest.mark.django_db
def test_login_presistent(registered_customer, api_client):
login_url = reverse('shop:login')
data = {
'form_data': {
'username': registered_customer.email,
'password': 'secret',
'stay_logged_in': True
}
}
response = api_client.post(login_url, data, format='json')
tz_gmt = pytz.timezone('GMT')
shall_expire = datetime.now(tz=tz_gmt).replace(microsecond=0) + timedelta(seconds=settings.SESSION_COOKIE_AGE)
assert response.status_code == 200
session_cookie = response.cookies.get('sessionid')
expires = datetime.strptime(session_cookie['expires'], '%a, %d %b %Y %H:%M:%S GMT')
expires = expires.replace(tzinfo=tz_gmt)
assert abs(expires - shall_expire) < timedelta(seconds=5)
assert session_cookie['max-age'] == settings.SESSION_COOKIE_AGE
@pytest.mark.django_db
def test_logout(registered_customer, api_client):
assert api_client.login(username=registered_customer.email, password='secret') is True
logout_url = reverse('shop:logout')
response = api_client.post(logout_url, {}, format='json')
assert response.status_code == 200
assert response.json() == {'logout_form': {'success_message': 'Successfully logged out.'}}
@pytest.mark.django_db
def test_change_password_fail(registered_customer, api_client):
assert api_client.login(username=registered_customer.email, password='secret') is True
change_url = reverse('shop:password-change')
data = {
'form_data': {
'new_password1': 'secret1',
'new_password2': 'secret2',
}
}
response = api_client.post(change_url, data, format='json')
assert response.status_code == 422
payload = response.json()
if DJANGO_VERSION < (3,):
assert payload == {'password_change_form': {'new_password2': ["The two password fields didn't match."]}}
else:
assert payload == {'password_change_form': {'new_password2': ["The two password fields didn’t match."]}}
@pytest.mark.django_db
def test_change_password_success(registered_customer, api_client):
api_client.login(username=registered_customer.email, password='secret')
change_url = reverse('shop:password-change')
data = {
'form_data': {
'new_password1': 'secret1',
'new_password2': 'secret1',
}
}
response = api_client.post(change_url, data, format='json')
assert response.status_code == 200
assert response.json() == {'password_change_form': {'success_message': 'Password has been changed successfully.'}}
api_client.logout()
assert api_client.login(username=registered_customer.email, password='secret') is False
assert api_client.login(username=registered_customer.email, password='secret1') is True
@pytest.mark.django_db
def test_password_reset(settings, registered_customer, api_client, api_rf):
settings.EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
reset_request_url = reverse('shop:password-reset-request')
data = {
'form_data': {
'email': registered_customer.email,
}
}
response = api_client.post(reset_request_url, data, format='json')
assert response.status_code == 200
assert response.json() == {
'password_reset_request_form': {
'success_message': "Instructions on how to reset the password have been sent to '[email protected]'."
}
}
body_begin = "You're receiving this email because you requested a password reset for your user\naccount '[email protected]' at example.com."
assert len(mail.outbox) == 1
assert mail.outbox[0].body.startswith(body_begin)
matches = re.search(PasswordResetRequestSerializer.invalid_password_reset_confirm_url + r'([^/]+)/([0-9A-Za-z-]+)',
mail.outbox[0].body)
assert matches
request = api_rf.get('/pasword-reset-confirm')
response = PasswordResetConfirmView.as_view()(request, uidb64=matches.group(1), token=matches.group(2))
assert response.status_code == 200
assert response.data == {'validlink': True, 'user_name': '[email protected]', 'form_name': 'password_reset_form'}
request = api_rf.post('/pasword-reset-confirm/', {'form_data': '__invalid__'})
response = PasswordResetConfirmView.as_view()(request, uidb64=matches.group(1), token=matches.group(2))
assert response.status_code == 422
assert response.data == {'password_reset_confirm_form': {'non_field_errors': ['Invalid POST data.']}}
data = {
'form_data': {
'new_password1': 'secret1',
'new_password2': 'secret1',
}
}
request = api_rf.post('/pasword-reset-confirm/', data, format='json')
response = PasswordResetConfirmView.as_view()(request, uidb64=matches.group(1), token=matches.group(2))
assert response.status_code == 200
assert response.data == {'password_reset_confirm_form': {'success_message': 'Password has been reset with the new password.'}}
def test_password_reset_fail(api_rf):
request = api_rf.get('/pasword-reset-confirm')
response = PasswordResetConfirmView.as_view()(request, uidb64='INV', token='alid')
assert response.status_code == 200
assert response.data == {'validlink': False}
data = {
'form_data': {
'new_password1': 'secret1',
'new_password2': 'secret1',
}
}
request = api_rf.post('/pasword-reset-confirm', data, format='json')
response = PasswordResetConfirmView.as_view()(request, uidb64='INV', token='alid')
assert response.status_code == 422
assert response.data == {'password_reset_confirm_form': {'uid': ['Invalid value']}}
@pytest.mark.django_db
def test_register_user_with_password(api_client):
"""
Test if a new user can register himself providing his own new password.
"""
from testshop.models import Customer
register_user_url = reverse('shop:register-user')
data = {
'form_data': {
'email': '[email protected]',
'password1': 'secret',
'password2': 'secret',
'preset_password': False,
}
}
response = api_client.post(register_user_url, data, format='json')
assert response.status_code == 200
assert response.json() == {'register_user_form': {'success_message': 'Successfully registered yourself.'}}
customer = Customer.objects.get(user__email='[email protected]')
assert customer is not None
@pytest.mark.django_db
def test_register_user_generate_password(settings, api_client):
"""
Test if a new user can register himself and django-SHOP send a generated password by email.
"""
settings.EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
from testshop.models import Customer
register_user_url = reverse('shop:register-user')
data = {
'form_data': {
'email': '[email protected]',
'password1': '',
'password2': '',
'preset_password': True,
}
}
response = api_client.post(register_user_url, data, format='json')
assert response.status_code == 200
assert response.json() == {'register_user_form': {'success_message': 'Successfully registered yourself.'}}
customer = Customer.objects.get(user__email='[email protected]')
assert customer is not None
body_begin = "You're receiving this e-mail because you or someone else has requested an auto-generated password"
assert len(mail.outbox) == 1
assert mail.outbox[0].body.startswith(body_begin)
matches = re.search('please use username [email protected] with password ([0-9A-Za-z]+)', mail.outbox[0].body)
assert matches
password = matches.group(1)
assert api_client.login(username=customer.email, password=password) is True
@pytest.mark.django_db
def test_register_user_fail(registered_customer, api_client):
"""
Test if a new user cannot register himself, if that user already exists.
"""
register_user_url = reverse('shop:register-user')
data = {
'form_data': {
'email': registered_customer.email,
'password1': '',
'password2': '',
'preset_password': True,
}
}
response = api_client.post(register_user_url, data, format='json')
assert response.status_code == 422
assert response.json() == {
'register_user_form': {
'__all__': ["A customer with the e-mail address '[email protected]' already exists.\nIf you have used this address previously, try to reset the password."]
}
}
|
import unittest
from mock import MagicMock, patch
import os
import subprocess
from uiautomator import Adb
class TestAdb(unittest.TestCase):
def setUp(self):
self.os_name = os.name
def tearDown(self):
os.name = self.os_name
def test_serial(self):
serial = "abcdef1234567890"
adb = Adb(serial)
self.assertEqual(adb.default_serial, serial)
adb.devices = MagicMock()
adb.devices.return_value = [serial, "123456"]
self.assertEqual(adb.device_serial(), serial)
def test_adb_from_env(self):
home_dir = '/android/home'
with patch.dict('os.environ', {'ANDROID_HOME': home_dir}):
with patch('os.path.exists') as exists:
exists.return_value = True
os.name = "posix" # linux
adb_obj = Adb()
adb_path = os.path.join(home_dir, "platform-tools", "adb")
self.assertEqual(adb_obj.adb(), adb_path)
exists.assert_called_once_with(adb_path)
self.assertEqual(adb_obj.adb(), adb_path)
# the second call will return the __adb_cmd directly
exists.assert_called_once_with(adb_path)
os.name = "nt" # linux
adb_obj = Adb()
adb_path = os.path.join(home_dir, "platform-tools", "adb.exe")
self.assertEqual(adb_obj.adb(), adb_path)
exists.return_value = False
with self.assertRaises(EnvironmentError):
Adb().adb()
def test_adb_from_find(self):
with patch.dict('os.environ', {}, clear=True):
with patch("distutils.spawn.find_executable") as find_executable:
find_executable.return_value = "/usr/bin/adb"
with patch("os.path.realpath") as realpath:
realpath.return_value = "/home/user/android/platform-tools/adb"
self.assertEqual(realpath.return_value, Adb().adb())
find_executable.assert_called_once_with("adb") # find_exectable should be called once
realpath.assert_called_once_with(find_executable.return_value)
realpath.return_value = find_executable.return_value
self.assertEqual(find_executable.return_value, Adb().adb())
find_executable.return_value = None
call_count = find_executable.call_count
with self.assertRaises(EnvironmentError):
Adb().adb()
self.assertEqual(call_count + 1, find_executable.call_count)
def test_devices(self):
adb = Adb()
adb.raw_cmd = MagicMock()
adb.raw_cmd.return_value.communicate.return_value = (b"List of devices attached \r\n014E05DE0F02000E\tdevice\r\n489328DKFL7DF\tdevice", b"")
self.assertEqual(adb.devices(), {"014E05DE0F02000E": "device", "489328DKFL7DF": "device"})
adb.raw_cmd.assert_called_once_with("devices")
adb.raw_cmd.return_value.communicate.return_value = (b"List of devices attached \n\r014E05DE0F02000E\tdevice\n\r489328DKFL7DF\tdevice", b"")
self.assertEqual(adb.devices(), {"014E05DE0F02000E": "device", "489328DKFL7DF": "device"})
adb.raw_cmd.return_value.communicate.return_value = (b"List of devices attached \r014E05DE0F02000E\tdevice\r489328DKFL7DF\tdevice", b"")
self.assertEqual(adb.devices(), {"014E05DE0F02000E": "device", "489328DKFL7DF": "device"})
adb.raw_cmd.return_value.communicate.return_value = (b"List of devices attached \n014E05DE0F02000E\tdevice\n489328DKFL7DF\tdevice", b"")
self.assertEqual(adb.devices(), {"014E05DE0F02000E": "device", "489328DKFL7DF": "device"})
adb.raw_cmd.return_value.communicate.return_value = (b"not match", "")
with self.assertRaises(EnvironmentError):
adb.devices()
def test_forward(self):
adb = Adb()
adb.cmd = MagicMock()
adb.forward(90, 91)
adb.cmd.assert_called_once_with("forward", "tcp:90", "tcp:91")
adb.cmd.return_value.wait.assert_called_once_with()
def test_adb_raw_cmd(self):
import subprocess
adb = Adb()
adb.adb = MagicMock()
adb.adb.return_value = "adb"
args = ["a", "b", "c"]
with patch("subprocess.Popen") as Popen:
os.name = "posix"
adb.raw_cmd(*args)
Popen.assert_called_once_with(["%s %s" % (adb.adb(), " ".join(args))], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
with patch("subprocess.Popen") as Popen:
os.name = "nt"
adb.raw_cmd(*args)
Popen.assert_called_once_with([adb.adb()] + list(args), shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
def test_adb_cmd(self):
adb = Adb()
adb.device_serial = MagicMock()
adb.device_serial.return_value = "ANDROID_SERIAL"
adb.raw_cmd = MagicMock()
args = ["a", "b", "c"]
adb.cmd(*args)
adb.raw_cmd.assert_called_once_with("-s", "%s" % adb.device_serial(), *args)
adb.device_serial.return_value = "ANDROID SERIAL"
adb.raw_cmd = MagicMock()
args = ["a", "b", "c"]
adb.cmd(*args)
adb.raw_cmd.assert_called_once_with("-s", "'%s'" % adb.device_serial(), *args)
def test_adb_cmd_server_host(self):
adb = Adb(adb_server_host="localhost", adb_server_port=5037)
adb.adb = MagicMock()
adb.adb.return_value = "adb"
adb.device_serial = MagicMock()
adb.device_serial.return_value = "ANDROID_SERIAL"
args = ["a", "b", "c"]
with patch("subprocess.Popen") as Popen:
os.name = "nt"
adb.raw_cmd(*args)
Popen.assert_called_once_with(
[adb.adb()] + args,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
adb = Adb(adb_server_host="test.com", adb_server_port=1000)
adb.adb = MagicMock()
adb.adb.return_value = "adb"
adb.device_serial = MagicMock()
adb.device_serial.return_value = "ANDROID_SERIAL"
args = ["a", "b", "c"]
with patch("subprocess.Popen") as Popen:
os.name = "posix"
adb.raw_cmd(*args)
Popen.assert_called_once_with(
[" ".join([adb.adb()] + ["-H", "test.com", "-P", "1000"] + args)],
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
def test_device_serial(self):
with patch.dict('os.environ', {'ANDROID_SERIAL': "ABCDEF123456"}):
adb = Adb()
adb.devices = MagicMock()
adb.devices.return_value = {"ABCDEF123456": "device"}
self.assertEqual(adb.device_serial(), "ABCDEF123456")
with patch.dict('os.environ', {'ANDROID_SERIAL': "ABCDEF123456"}):
adb = Adb()
adb.devices = MagicMock()
adb.devices.return_value = {"ABCDEF123456": "device", "123456ABCDEF": "device"}
self.assertEqual(adb.device_serial(), "ABCDEF123456")
with patch.dict('os.environ', {'ANDROID_SERIAL': "HIJKLMN098765"}):
adb = Adb()
adb.devices = MagicMock()
adb.devices.return_value = {"ABCDEF123456": "device", "123456ABCDEF": "device"}
self.assertEqual(adb.device_serial(), "HIJKLMN098765")
with patch.dict('os.environ', {}, clear=True):
adb = Adb()
adb.devices = MagicMock()
adb.devices.return_value = {"ABCDEF123456": "device", "123456ABCDEF": "device"}
with self.assertRaises(EnvironmentError):
adb.device_serial()
with patch.dict('os.environ', {}, clear=True):
adb = Adb()
adb.devices = MagicMock()
adb.devices.return_value = {"ABCDEF123456": "device"}
self.assertEqual(adb.device_serial(), "ABCDEF123456")
with self.assertRaises(EnvironmentError):
adb = Adb()
adb.devices = MagicMock()
adb.devices.return_value = {}
adb.device_serial()
def test_forward_list(self):
adb = Adb()
adb.version = MagicMock()
adb.version.return_value = ['1.0.31', '1', '0', '31']
adb.raw_cmd = MagicMock()
adb.raw_cmd.return_value.communicate.return_value = (b"014E05DE0F02000E tcp:9008 tcp:9008\r\n489328DKFL7DF tcp:9008 tcp:9008", b"")
self.assertEqual(adb.forward_list(), [['014E05DE0F02000E', 'tcp:9008', 'tcp:9008'], ['489328DKFL7DF', 'tcp:9008', 'tcp:9008']])
adb.version.return_value = ['1.0.29', '1', '0', '29']
with self.assertRaises(EnvironmentError):
adb.forward_list()
|
import os
import sys
import pytest
@pytest.fixture(params=[
'favicon.ico',
'scaffold/static/made_with_cherrypy_small.png',
'tutorial/tutorial.conf',
'tutorial/custom_error.html',
])
def data_file_path(request):
'generates data file paths expected to be found in the package'
return request.param
@pytest.fixture(autouse=True, scope='session')
def remove_paths_to_checkout():
"""Remove paths to ./cherrypy"""
to_remove = [
path
for path in sys.path
if os.path.isdir(path)
and os.path.samefile(path, os.path.curdir)
]
print('Removing', to_remove)
list(map(sys.path.remove, to_remove))
assert 'cherrypy' not in sys.modules
def test_data_files_installed(data_file_path):
"""
Ensure that data file paths are available in the
installed package as expected.
"""
import cherrypy
root = os.path.dirname(cherrypy.__file__)
fn = os.path.join(root, data_file_path)
assert os.path.exists(fn), fn
# make sure the file isn't in the local checkout
assert not os.path.samefile(fn, os.path.join('cherrypy', data_file_path))
def test_sanity():
"""
Test the test to show that it does fail when a file
is missing.
"""
with pytest.raises(Exception):
test_data_files_installed('does not exist')
|
import os
import pytest
from molecule.command.init import template
@pytest.fixture
def _command_args():
return {
'role_name': 'test-role',
'url': 'https://github.com/ansible/molecule-cookiecutter.git',
'no_input': True,
'subcommand': __name__,
}
@pytest.fixture
def _instance(_command_args):
return template.Template(_command_args)
def test_execute(temp_dir, _instance, patched_logger_info,
patched_logger_success):
_instance.execute()
assert os.path.isdir('./test-role')
msg = 'Initializing new role test-role...'
patched_logger_info.assert_called_once_with(msg)
role_directory = os.path.join(temp_dir.strpath, 'test-role')
msg = 'Initialized role in {} successfully.'.format(role_directory)
patched_logger_success.assert_called_once_with(msg)
def test_execute_role_exists(temp_dir, _instance, patched_logger_critical):
_instance.execute()
with pytest.raises(SystemExit) as e:
_instance.execute()
assert 1 == e.value.code
msg = ('The directory test-role exists. Cannot create new role.')
patched_logger_critical.assert_called_once_with(msg)
|
from datetime import timedelta
import logging
import telnetlib
import voluptuous as vol
from homeassistant.components.switch import (
ENTITY_ID_FORMAT,
PLATFORM_SCHEMA,
SwitchEntity,
)
from homeassistant.const import (
CONF_COMMAND_OFF,
CONF_COMMAND_ON,
CONF_COMMAND_STATE,
CONF_NAME,
CONF_PORT,
CONF_RESOURCE,
CONF_SWITCHES,
CONF_TIMEOUT,
CONF_VALUE_TEMPLATE,
)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEFAULT_PORT = 23
DEFAULT_TIMEOUT = 0.2
SWITCH_SCHEMA = vol.Schema(
{
vol.Required(CONF_COMMAND_OFF): cv.string,
vol.Required(CONF_COMMAND_ON): cv.string,
vol.Required(CONF_RESOURCE): cv.string,
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_COMMAND_STATE): cv.string,
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): vol.Coerce(float),
}
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_SWITCHES): cv.schema_with_slug_keys(SWITCH_SCHEMA)}
)
SCAN_INTERVAL = timedelta(seconds=10)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Find and return switches controlled by telnet commands."""
devices = config.get(CONF_SWITCHES, {})
switches = []
for object_id, device_config in devices.items():
value_template = device_config.get(CONF_VALUE_TEMPLATE)
if value_template is not None:
value_template.hass = hass
switches.append(
TelnetSwitch(
hass,
object_id,
device_config.get(CONF_RESOURCE),
device_config.get(CONF_PORT),
device_config.get(CONF_NAME, object_id),
device_config.get(CONF_COMMAND_ON),
device_config.get(CONF_COMMAND_OFF),
device_config.get(CONF_COMMAND_STATE),
value_template,
device_config.get(CONF_TIMEOUT),
)
)
if not switches:
_LOGGER.error("No switches added")
return
add_entities(switches)
class TelnetSwitch(SwitchEntity):
"""Representation of a switch that can be toggled using telnet commands."""
def __init__(
self,
hass,
object_id,
resource,
port,
friendly_name,
command_on,
command_off,
command_state,
value_template,
timeout,
):
"""Initialize the switch."""
self._hass = hass
self.entity_id = ENTITY_ID_FORMAT.format(object_id)
self._resource = resource
self._port = port
self._name = friendly_name
self._state = False
self._command_on = command_on
self._command_off = command_off
self._command_state = command_state
self._value_template = value_template
self._timeout = timeout
def _telnet_command(self, command):
try:
telnet = telnetlib.Telnet(self._resource, self._port)
telnet.write(command.encode("ASCII") + b"\r")
response = telnet.read_until(b"\r", timeout=self._timeout)
_LOGGER.debug("telnet response: %s", response.decode("ASCII").strip())
return response.decode("ASCII").strip()
except OSError as error:
_LOGGER.error(
'Command "%s" failed with exception: %s', command, repr(error)
)
return None
@property
def name(self):
"""Return the name of the switch."""
return self._name
@property
def should_poll(self):
"""Only poll if we have state command."""
return self._command_state is not None
@property
def is_on(self):
"""Return true if device is on."""
return self._state
@property
def assumed_state(self):
"""Return true if no state command is defined, false otherwise."""
return self._command_state is None
def update(self):
"""Update device state."""
response = self._telnet_command(self._command_state)
if response:
rendered = self._value_template.render_with_possible_json_value(response)
self._state = rendered == "True"
else:
_LOGGER.warning("Empty response for command: %s", self._command_state)
def turn_on(self, **kwargs):
"""Turn the device on."""
self._telnet_command(self._command_on)
if self.assumed_state:
self._state = True
def turn_off(self, **kwargs):
"""Turn the device off."""
self._telnet_command(self._command_off)
if self.assumed_state:
self._state = False
|
from pysmartthings import Attribute, Capability
from homeassistant.components.cover import (
ATTR_CURRENT_POSITION,
ATTR_POSITION,
DOMAIN as COVER_DOMAIN,
SERVICE_CLOSE_COVER,
SERVICE_OPEN_COVER,
SERVICE_SET_COVER_POSITION,
STATE_CLOSED,
STATE_CLOSING,
STATE_OPEN,
STATE_OPENING,
)
from homeassistant.components.smartthings.const import DOMAIN, SIGNAL_SMARTTHINGS_UPDATE
from homeassistant.const import ATTR_BATTERY_LEVEL, ATTR_ENTITY_ID
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .conftest import setup_platform
async def test_entity_and_device_attributes(hass, device_factory):
"""Test the attributes of the entity are correct."""
# Arrange
device = device_factory(
"Garage", [Capability.garage_door_control], {Attribute.door: "open"}
)
entity_registry = await hass.helpers.entity_registry.async_get_registry()
device_registry = await hass.helpers.device_registry.async_get_registry()
# Act
await setup_platform(hass, COVER_DOMAIN, devices=[device])
# Assert
entry = entity_registry.async_get("cover.garage")
assert entry
assert entry.unique_id == device.device_id
entry = device_registry.async_get_device({(DOMAIN, device.device_id)}, [])
assert entry
assert entry.name == device.label
assert entry.model == device.device_type_name
assert entry.manufacturer == "Unavailable"
async def test_open(hass, device_factory):
"""Test the cover opens doors, garages, and shades successfully."""
# Arrange
devices = {
device_factory("Door", [Capability.door_control], {Attribute.door: "closed"}),
device_factory(
"Garage", [Capability.garage_door_control], {Attribute.door: "closed"}
),
device_factory(
"Shade", [Capability.window_shade], {Attribute.window_shade: "closed"}
),
}
await setup_platform(hass, COVER_DOMAIN, devices=devices)
entity_ids = ["cover.door", "cover.garage", "cover.shade"]
# Act
await hass.services.async_call(
COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: entity_ids}, blocking=True
)
# Assert
for entity_id in entity_ids:
state = hass.states.get(entity_id)
assert state is not None
assert state.state == STATE_OPENING
async def test_close(hass, device_factory):
"""Test the cover closes doors, garages, and shades successfully."""
# Arrange
devices = {
device_factory("Door", [Capability.door_control], {Attribute.door: "open"}),
device_factory(
"Garage", [Capability.garage_door_control], {Attribute.door: "open"}
),
device_factory(
"Shade", [Capability.window_shade], {Attribute.window_shade: "open"}
),
}
await setup_platform(hass, COVER_DOMAIN, devices=devices)
entity_ids = ["cover.door", "cover.garage", "cover.shade"]
# Act
await hass.services.async_call(
COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: entity_ids}, blocking=True
)
# Assert
for entity_id in entity_ids:
state = hass.states.get(entity_id)
assert state is not None
assert state.state == STATE_CLOSING
async def test_set_cover_position(hass, device_factory):
"""Test the cover sets to the specific position."""
# Arrange
device = device_factory(
"Shade",
[Capability.window_shade, Capability.battery, Capability.switch_level],
{Attribute.window_shade: "opening", Attribute.battery: 95, Attribute.level: 10},
)
await setup_platform(hass, COVER_DOMAIN, devices=[device])
# Act
await hass.services.async_call(
COVER_DOMAIN,
SERVICE_SET_COVER_POSITION,
{ATTR_POSITION: 50, "entity_id": "all"},
blocking=True,
)
state = hass.states.get("cover.shade")
# Result of call does not update state
assert state.state == STATE_OPENING
assert state.attributes[ATTR_BATTERY_LEVEL] == 95
assert state.attributes[ATTR_CURRENT_POSITION] == 10
# Ensure API called
# pylint: disable=protected-access
assert device._api.post_device_command.call_count == 1 # type: ignore
async def test_set_cover_position_unsupported(hass, device_factory):
"""Test set position does nothing when not supported by device."""
# Arrange
device = device_factory(
"Shade", [Capability.window_shade], {Attribute.window_shade: "opening"}
)
await setup_platform(hass, COVER_DOMAIN, devices=[device])
# Act
await hass.services.async_call(
COVER_DOMAIN,
SERVICE_SET_COVER_POSITION,
{"entity_id": "all", ATTR_POSITION: 50},
blocking=True,
)
state = hass.states.get("cover.shade")
assert ATTR_CURRENT_POSITION not in state.attributes
# Ensure API was not called
# pylint: disable=protected-access
assert device._api.post_device_command.call_count == 0 # type: ignore
async def test_update_to_open_from_signal(hass, device_factory):
"""Test the cover updates to open when receiving a signal."""
# Arrange
device = device_factory(
"Garage", [Capability.garage_door_control], {Attribute.door: "opening"}
)
await setup_platform(hass, COVER_DOMAIN, devices=[device])
device.status.update_attribute_value(Attribute.door, "open")
assert hass.states.get("cover.garage").state == STATE_OPENING
# Act
async_dispatcher_send(hass, SIGNAL_SMARTTHINGS_UPDATE, [device.device_id])
# Assert
await hass.async_block_till_done()
state = hass.states.get("cover.garage")
assert state is not None
assert state.state == STATE_OPEN
async def test_update_to_closed_from_signal(hass, device_factory):
"""Test the cover updates to closed when receiving a signal."""
# Arrange
device = device_factory(
"Garage", [Capability.garage_door_control], {Attribute.door: "closing"}
)
await setup_platform(hass, COVER_DOMAIN, devices=[device])
device.status.update_attribute_value(Attribute.door, "closed")
assert hass.states.get("cover.garage").state == STATE_CLOSING
# Act
async_dispatcher_send(hass, SIGNAL_SMARTTHINGS_UPDATE, [device.device_id])
# Assert
await hass.async_block_till_done()
state = hass.states.get("cover.garage")
assert state is not None
assert state.state == STATE_CLOSED
async def test_unload_config_entry(hass, device_factory):
"""Test the lock is removed when the config entry is unloaded."""
# Arrange
device = device_factory(
"Garage", [Capability.garage_door_control], {Attribute.door: "open"}
)
config_entry = await setup_platform(hass, COVER_DOMAIN, devices=[device])
# Act
await hass.config_entries.async_forward_entry_unload(config_entry, COVER_DOMAIN)
# Assert
assert not hass.states.get("cover.garage")
|
from enum import IntEnum
SAN_NAMING_TEMPLATE = "SAN-{subject}-{issuer}-{not_before}-{not_after}"
DEFAULT_NAMING_TEMPLATE = "{subject}-{issuer}-{not_before}-{not_after}"
NONSTANDARD_NAMING_TEMPLATE = "{issuer}-{not_before}-{not_after}"
SUCCESS_METRIC_STATUS = "success"
FAILURE_METRIC_STATUS = "failure"
CERTIFICATE_KEY_TYPES = [
"RSA2048",
"RSA4096",
"ECCPRIME192V1",
"ECCPRIME256V1",
"ECCSECP192R1",
"ECCSECP224R1",
"ECCSECP256R1",
"ECCSECP384R1",
"ECCSECP521R1",
"ECCSECP256K1",
"ECCSECT163K1",
"ECCSECT233K1",
"ECCSECT283K1",
"ECCSECT409K1",
"ECCSECT571K1",
"ECCSECT163R2",
"ECCSECT233R1",
"ECCSECT283R1",
"ECCSECT409R1",
"ECCSECT571R2",
]
# As per RFC 5280 section 5.3.1 (https://tools.ietf.org/html/rfc5280#section-5.3.1)
class CRLReason(IntEnum):
unspecified = 0,
keyCompromise = 1,
cACompromise = 2,
affiliationChanged = 3,
superseded = 4,
cessationOfOperation = 5,
certificateHold = 6,
removeFromCRL = 8,
privilegeWithdrawn = 9,
aACompromise = 10
|
import pytest
import socket
from unittest.mock import Mock, patch
from case import ContextMock
from kombu.mixins import ConsumerMixin
def Message(body, content_type='text/plain', content_encoding='utf-8'):
m = Mock(name='Message')
m.body = body
m.content_type = content_type
m.content_encoding = content_encoding
return m
class Cons(ConsumerMixin):
def __init__(self, consumers):
self.calls = Mock(name='ConsumerMixin')
self.calls.get_consumers.return_value = consumers
self.get_consumers = self.calls.get_consumers
self.on_connection_revived = self.calls.on_connection_revived
self.on_consume_ready = self.calls.on_consume_ready
self.on_consume_end = self.calls.on_consume_end
self.on_iteration = self.calls.on_iteration
self.on_decode_error = self.calls.on_decode_error
self.on_connection_error = self.calls.on_connection_error
self.extra_context = ContextMock(name='extra_context')
self.extra_context.return_value = self.extra_context
class test_ConsumerMixin:
def _context(self):
Acons = ContextMock(name='consumerA')
Bcons = ContextMock(name='consumerB')
c = Cons([Acons, Bcons])
_conn = c.connection = ContextMock(name='connection')
est = c.establish_connection = Mock(name='est_connection')
est.return_value = _conn
return c, Acons, Bcons
def test_consume(self):
c, Acons, Bcons = self._context()
c.should_stop = False
it = c.consume(no_ack=True)
next(it)
Acons.__enter__.assert_called_with()
Bcons.__enter__.assert_called_with()
c.extra_context.__enter__.assert_called_with()
c.on_consume_ready.assert_called()
c.on_iteration.assert_called_with()
c.connection.drain_events.assert_called_with(timeout=1)
next(it)
next(it)
next(it)
c.should_stop = True
with pytest.raises(StopIteration):
next(it)
def test_consume_drain_raises_socket_error(self):
c, Acons, Bcons = self._context()
c.should_stop = False
it = c.consume(no_ack=True)
c.connection.drain_events.side_effect = socket.error
with pytest.raises(socket.error):
next(it)
def se2(*args, **kwargs):
c.should_stop = True
raise OSError()
c.connection.drain_events.side_effect = se2
it = c.consume(no_ack=True)
with pytest.raises(StopIteration):
next(it)
def test_consume_drain_raises_socket_timeout(self):
c, Acons, Bcons = self._context()
c.should_stop = False
it = c.consume(no_ack=True, timeout=1)
def se(*args, **kwargs):
c.should_stop = True
raise socket.timeout()
c.connection.drain_events.side_effect = se
with pytest.raises(socket.error):
next(it)
def test_Consumer_context(self):
c, Acons, Bcons = self._context()
with c.Consumer() as (conn, channel, consumer):
assert conn is c.connection
assert channel is conn.default_channel
c.on_connection_revived.assert_called_with()
c.get_consumers.assert_called()
cls = c.get_consumers.call_args[0][0]
subcons = cls()
assert subcons.on_decode_error is c.on_decode_error
assert subcons.channel is conn.default_channel
Acons.__enter__.assert_called_with()
Bcons.__enter__.assert_called_with()
c.on_consume_end.assert_called_with(conn, channel)
class test_ConsumerMixin_interface:
def setup(self):
self.c = ConsumerMixin()
def test_get_consumers(self):
with pytest.raises(NotImplementedError):
self.c.get_consumers(Mock(), Mock())
def test_on_connection_revived(self):
assert self.c.on_connection_revived() is None
def test_on_consume_ready(self):
assert self.c.on_consume_ready(Mock(), Mock(), []) is None
def test_on_consume_end(self):
assert self.c.on_consume_end(Mock(), Mock()) is None
def test_on_iteration(self):
assert self.c.on_iteration() is None
def test_on_decode_error(self):
message = Message('foo')
with patch('kombu.mixins.error') as error:
self.c.on_decode_error(message, KeyError('foo'))
error.assert_called()
message.ack.assert_called_with()
def test_on_connection_error(self):
with patch('kombu.mixins.warn') as warn:
self.c.on_connection_error(KeyError('foo'), 3)
warn.assert_called()
def test_extra_context(self):
with self.c.extra_context(Mock(), Mock()):
pass
def test_restart_limit(self):
assert self.c.restart_limit
def test_connection_errors(self):
conn = Mock(name='connection')
self.c.connection = conn
conn.connection_errors = (KeyError,)
assert self.c.connection_errors == conn.connection_errors
conn.channel_errors = (ValueError,)
assert self.c.channel_errors == conn.channel_errors
def test__consume_from(self):
a = ContextMock(name='A')
b = ContextMock(name='B')
a.__enter__ = Mock(name='A.__enter__')
b.__enter__ = Mock(name='B.__enter__')
with self.c._consume_from(a, b):
pass
a.__enter__.assert_called_with()
b.__enter__.assert_called_with()
def test_establish_connection(self):
conn = ContextMock(name='connection')
conn.clone.return_value = conn
self.c.connection = conn
self.c.connect_max_retries = 3
with self.c.establish_connection() as conn:
assert conn
conn.ensure_connection.assert_called_with(
self.c.on_connection_error, 3,
)
def test_maybe_conn_error(self):
conn = ContextMock(name='connection')
conn.connection_errors = (KeyError,)
conn.channel_errors = ()
self.c.connection = conn
def raises():
raise KeyError('foo')
self.c.maybe_conn_error(raises)
def test_run(self):
conn = ContextMock(name='connection')
self.c.connection = conn
conn.connection_errors = (KeyError,)
conn.channel_errors = ()
consume = self.c.consume = Mock(name='c.consume')
def se(*args, **kwargs):
self.c.should_stop = True
return [1]
self.c.should_stop = False
consume.side_effect = se
self.c.run()
def test_run_restart_rate_limited(self):
conn = ContextMock(name='connection')
self.c.connection = conn
conn.connection_errors = (KeyError,)
conn.channel_errors = ()
consume = self.c.consume = Mock(name='c.consume')
with patch('kombu.mixins.sleep') as sleep:
counter = [0]
def se(*args, **kwargs):
if counter[0] >= 1:
self.c.should_stop = True
counter[0] += 1
return counter
self.c.should_stop = False
consume.side_effect = se
self.c.run()
sleep.assert_called()
def test_run_raises(self):
conn = ContextMock(name='connection')
self.c.connection = conn
conn.connection_errors = (KeyError,)
conn.channel_errors = ()
consume = self.c.consume = Mock(name='c.consume')
with patch('kombu.mixins.warn') as warn:
def se_raises(*args, **kwargs):
self.c.should_stop = True
raise KeyError('foo')
self.c.should_stop = False
consume.side_effect = se_raises
self.c.run()
warn.assert_called()
|
from datetime import timedelta
import logging
from aioeafm import get_station
import async_timeout
from homeassistant.const import ATTR_ATTRIBUTION, LENGTH_METERS
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
)
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
UNIT_MAPPING = {
"http://qudt.org/1.1/vocab/unit#Meter": LENGTH_METERS,
}
def get_measures(station_data):
"""Force measure key to always be a list."""
if "measures" not in station_data:
return []
if isinstance(station_data["measures"], dict):
return [station_data["measures"]]
return station_data["measures"]
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up UK Flood Monitoring Sensors."""
station_key = config_entry.data["station"]
session = async_get_clientsession(hass=hass)
measurements = set()
async def async_update_data():
# DataUpdateCoordinator will handle aiohttp ClientErrors and timouts
async with async_timeout.timeout(30):
data = await get_station(session, station_key)
measures = get_measures(data)
entities = []
# Look to see if payload contains new measures
for measure in measures:
if measure["@id"] in measurements:
continue
if "latestReading" not in measure:
# Don't create a sensor entity for a gauge that isn't available
continue
entities.append(Measurement(hass.data[DOMAIN][station_key], measure["@id"]))
measurements.add(measure["@id"])
async_add_entities(entities)
# Turn data.measures into a dict rather than a list so easier for entities to
# find themselves.
data["measures"] = {measure["@id"]: measure for measure in measures}
return data
hass.data[DOMAIN][station_key] = coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name="sensor",
update_method=async_update_data,
update_interval=timedelta(seconds=15 * 60),
)
# Fetch initial data so we have data when entities subscribe
await coordinator.async_refresh()
class Measurement(CoordinatorEntity):
"""A gauge at a flood monitoring station."""
attribution = "This uses Environment Agency flood and river level data from the real-time data API"
def __init__(self, coordinator, key):
"""Initialise the gauge with a data instance and station."""
super().__init__(coordinator)
self.key = key
@property
def station_name(self):
"""Return the station name for the measure."""
return self.coordinator.data["label"]
@property
def station_id(self):
"""Return the station id for the measure."""
return self.coordinator.data["measures"][self.key]["stationReference"]
@property
def qualifier(self):
"""Return the qualifier for the station."""
return self.coordinator.data["measures"][self.key]["qualifier"]
@property
def parameter_name(self):
"""Return the parameter name for the station."""
return self.coordinator.data["measures"][self.key]["parameterName"]
@property
def name(self):
"""Return the name of the gauge."""
return f"{self.station_name} {self.parameter_name} {self.qualifier}"
@property
def unique_id(self):
"""Return the unique id of the gauge."""
return self.key
@property
def device_info(self):
"""Return the device info."""
return {
"identifiers": {(DOMAIN, "measure-id", self.station_id)},
"name": self.name,
"manufacturer": "https://environment.data.gov.uk/",
"model": self.parameter_name,
"entry_type": "service",
}
@property
def available(self) -> bool:
"""Return True if entity is available."""
if not self.coordinator.last_update_success:
return False
# If sensor goes offline it will no longer contain a reading
if "latestReading" not in self.coordinator.data["measures"][self.key]:
return False
# Sometimes lastestReading key is present but actually a URL rather than a piece of data
# This is usually because the sensor has been archived
if not isinstance(
self.coordinator.data["measures"][self.key]["latestReading"], dict
):
return False
return True
@property
def unit_of_measurement(self):
"""Return units for the sensor."""
measure = self.coordinator.data["measures"][self.key]
if "unit" not in measure:
return None
return UNIT_MAPPING.get(measure["unit"], measure["unitName"])
@property
def device_state_attributes(self):
"""Return the sensor specific state attributes."""
return {ATTR_ATTRIBUTION: self.attribution}
@property
def state(self):
"""Return the current sensor value."""
return self.coordinator.data["measures"][self.key]["latestReading"]["value"]
|
import warnings
import numpy as np
import pandas as pd
from numpy.core.multiarray import normalize_axis_index
try:
import bottleneck as bn
_USE_BOTTLENECK = True
except ImportError:
# use numpy methods instead
bn = np
_USE_BOTTLENECK = False
def _select_along_axis(values, idx, axis):
other_ind = np.ix_(*[np.arange(s) for s in idx.shape])
sl = other_ind[:axis] + (idx,) + other_ind[axis:]
return values[sl]
def nanfirst(values, axis):
axis = normalize_axis_index(axis, values.ndim)
idx_first = np.argmax(~pd.isnull(values), axis=axis)
return _select_along_axis(values, idx_first, axis)
def nanlast(values, axis):
axis = normalize_axis_index(axis, values.ndim)
rev = (slice(None),) * axis + (slice(None, None, -1),)
idx_last = -1 - np.argmax(~pd.isnull(values)[rev], axis=axis)
return _select_along_axis(values, idx_last, axis)
def inverse_permutation(indices):
"""Return indices for an inverse permutation.
Parameters
----------
indices : 1D np.ndarray with dtype=int
Integer positions to assign elements to.
Returns
-------
inverse_permutation : 1D np.ndarray with dtype=int
Integer indices to take from the original array to create the
permutation.
"""
# use intp instead of int64 because of windows :(
inverse_permutation = np.empty(len(indices), dtype=np.intp)
inverse_permutation[indices] = np.arange(len(indices), dtype=np.intp)
return inverse_permutation
def _ensure_bool_is_ndarray(result, *args):
# numpy will sometimes return a scalar value from binary comparisons if it
# can't handle the comparison instead of broadcasting, e.g.,
# In [10]: 1 == np.array(['a', 'b'])
# Out[10]: False
# This function ensures that the result is the appropriate shape in these
# cases
if isinstance(result, bool):
shape = np.broadcast(*args).shape
constructor = np.ones if result else np.zeros
result = constructor(shape, dtype=bool)
return result
def array_eq(self, other):
with warnings.catch_warnings():
warnings.filterwarnings("ignore", r"elementwise comparison failed")
return _ensure_bool_is_ndarray(self == other, self, other)
def array_ne(self, other):
with warnings.catch_warnings():
warnings.filterwarnings("ignore", r"elementwise comparison failed")
return _ensure_bool_is_ndarray(self != other, self, other)
def _is_contiguous(positions):
"""Given a non-empty list, does it consist of contiguous integers?"""
previous = positions[0]
for current in positions[1:]:
if current != previous + 1:
return False
previous = current
return True
def _advanced_indexer_subspaces(key):
"""Indices of the advanced indexes subspaces for mixed indexing and vindex."""
if not isinstance(key, tuple):
key = (key,)
advanced_index_positions = [
i for i, k in enumerate(key) if not isinstance(k, slice)
]
if not advanced_index_positions or not _is_contiguous(advanced_index_positions):
# Nothing to reorder: dimensions on the indexing result are already
# ordered like vindex. See NumPy's rule for "Combining advanced and
# basic indexing":
# https://docs.scipy.org/doc/numpy/reference/arrays.indexing.html#combining-advanced-and-basic-indexing
return (), ()
non_slices = [k for k in key if not isinstance(k, slice)]
ndim = len(np.broadcast(*non_slices).shape)
mixed_positions = advanced_index_positions[0] + np.arange(ndim)
vindex_positions = np.arange(ndim)
return mixed_positions, vindex_positions
class NumpyVIndexAdapter:
"""Object that implements indexing like vindex on a np.ndarray.
This is a pure Python implementation of (some of) the logic in this NumPy
proposal: https://github.com/numpy/numpy/pull/6256
"""
def __init__(self, array):
self._array = array
def __getitem__(self, key):
mixed_positions, vindex_positions = _advanced_indexer_subspaces(key)
return np.moveaxis(self._array[key], mixed_positions, vindex_positions)
def __setitem__(self, key, value):
"""Value must have dimensionality matching the key."""
mixed_positions, vindex_positions = _advanced_indexer_subspaces(key)
self._array[key] = np.moveaxis(value, vindex_positions, mixed_positions)
def rolling_window(a, axis, window, center, fill_value):
""" rolling window with padding. """
pads = [(0, 0) for s in a.shape]
if not hasattr(axis, "__len__"):
axis = [axis]
window = [window]
center = [center]
for ax, win, cent in zip(axis, window, center):
if cent:
start = int(win / 2) # 10 -> 5, 9 -> 4
end = win - 1 - start
pads[ax] = (start, end)
else:
pads[ax] = (win - 1, 0)
a = np.pad(a, pads, mode="constant", constant_values=fill_value)
for ax, win in zip(axis, window):
a = _rolling_window(a, win, ax)
return a
def _rolling_window(a, window, axis=-1):
"""
Make an ndarray with a rolling window along axis.
Parameters
----------
a : array_like
Array to add rolling window to
axis: int
axis position along which rolling window will be applied.
window : int
Size of rolling window
Returns
-------
Array that is a view of the original array with a added dimension
of size w.
Examples
--------
>>> x = np.arange(10).reshape((2, 5))
>>> _rolling_window(x, 3, axis=-1)
array([[[0, 1, 2],
[1, 2, 3],
[2, 3, 4]],
<BLANKLINE>
[[5, 6, 7],
[6, 7, 8],
[7, 8, 9]]])
Calculate rolling mean of last dimension:
>>> np.mean(_rolling_window(x, 3, axis=-1), -1)
array([[1., 2., 3.],
[6., 7., 8.]])
This function is taken from https://github.com/numpy/numpy/pull/31
but slightly modified to accept axis option.
"""
axis = normalize_axis_index(axis, a.ndim)
a = np.swapaxes(a, axis, -1)
if window < 1:
raise ValueError(f"`window` must be at least 1. Given : {window}")
if window > a.shape[-1]:
raise ValueError(f"`window` is too long. Given : {window}")
shape = a.shape[:-1] + (a.shape[-1] - window + 1, window)
strides = a.strides + (a.strides[-1],)
rolling = np.lib.stride_tricks.as_strided(
a, shape=shape, strides=strides, writeable=False
)
return np.swapaxes(rolling, -2, axis)
def _create_bottleneck_method(name, npmodule=np):
def f(values, axis=None, **kwargs):
dtype = kwargs.get("dtype", None)
bn_func = getattr(bn, name, None)
if (
_USE_BOTTLENECK
and isinstance(values, np.ndarray)
and bn_func is not None
and not isinstance(axis, tuple)
and values.dtype.kind in "uifc"
and values.dtype.isnative
and (dtype is None or np.dtype(dtype) == values.dtype)
):
# bottleneck does not take care dtype, min_count
kwargs.pop("dtype", None)
result = bn_func(values, axis=axis, **kwargs)
else:
result = getattr(npmodule, name)(values, axis=axis, **kwargs)
return result
f.__name__ = name
return f
def _nanpolyfit_1d(arr, x, rcond=None):
out = np.full((x.shape[1] + 1,), np.nan)
mask = np.isnan(arr)
if not np.all(mask):
out[:-1], resid, rank, _ = np.linalg.lstsq(x[~mask, :], arr[~mask], rcond=rcond)
out[-1] = resid if resid.size > 0 else np.nan
warn_on_deficient_rank(rank, x.shape[1])
return out
def warn_on_deficient_rank(rank, order):
if rank != order:
warnings.warn("Polyfit may be poorly conditioned", np.RankWarning, stacklevel=2)
def least_squares(lhs, rhs, rcond=None, skipna=False):
if skipna:
added_dim = rhs.ndim == 1
if added_dim:
rhs = rhs.reshape(rhs.shape[0], 1)
nan_cols = np.any(np.isnan(rhs), axis=0)
out = np.empty((lhs.shape[1] + 1, rhs.shape[1]))
if np.any(nan_cols):
out[:, nan_cols] = np.apply_along_axis(
_nanpolyfit_1d, 0, rhs[:, nan_cols], lhs
)
if np.any(~nan_cols):
out[:-1, ~nan_cols], resids, rank, _ = np.linalg.lstsq(
lhs, rhs[:, ~nan_cols], rcond=rcond
)
out[-1, ~nan_cols] = resids if resids.size > 0 else np.nan
warn_on_deficient_rank(rank, lhs.shape[1])
coeffs = out[:-1, :]
residuals = out[-1, :]
if added_dim:
coeffs = coeffs.reshape(coeffs.shape[0])
residuals = residuals.reshape(residuals.shape[0])
else:
coeffs, residuals, rank, _ = np.linalg.lstsq(lhs, rhs, rcond=rcond)
if residuals.size == 0:
residuals = coeffs[0] * np.nan
warn_on_deficient_rank(rank, lhs.shape[1])
return coeffs, residuals
nanmin = _create_bottleneck_method("nanmin")
nanmax = _create_bottleneck_method("nanmax")
nanmean = _create_bottleneck_method("nanmean")
nanmedian = _create_bottleneck_method("nanmedian")
nanvar = _create_bottleneck_method("nanvar")
nanstd = _create_bottleneck_method("nanstd")
nanprod = _create_bottleneck_method("nanprod")
nancumsum = _create_bottleneck_method("nancumsum")
nancumprod = _create_bottleneck_method("nancumprod")
nanargmin = _create_bottleneck_method("nanargmin")
nanargmax = _create_bottleneck_method("nanargmax")
|
import json
from homeassistant.components.climate.const import (
HVAC_MODE_COOL,
HVAC_MODE_DRY,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
SUPPORT_FAN_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.components.fan import SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM
from homeassistant.components.melissa import DATA_MELISSA, climate as melissa
from homeassistant.components.melissa.climate import MelissaClimate
from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS
from tests.async_mock import AsyncMock, Mock, patch
from tests.common import load_fixture
_SERIAL = "12345678"
def melissa_mock():
"""Use this to mock the melissa api."""
api = Mock()
api.async_fetch_devices = AsyncMock(
return_value=json.loads(load_fixture("melissa_fetch_devices.json"))
)
api.async_status = AsyncMock(
return_value=json.loads(load_fixture("melissa_status.json"))
)
api.async_cur_settings = AsyncMock(
return_value=json.loads(load_fixture("melissa_cur_settings.json"))
)
api.async_send = AsyncMock(return_value=True)
api.STATE_OFF = 0
api.STATE_ON = 1
api.STATE_IDLE = 2
api.MODE_AUTO = 0
api.MODE_FAN = 1
api.MODE_HEAT = 2
api.MODE_COOL = 3
api.MODE_DRY = 4
api.FAN_AUTO = 0
api.FAN_LOW = 1
api.FAN_MEDIUM = 2
api.FAN_HIGH = 3
api.STATE = "state"
api.MODE = "mode"
api.FAN = "fan"
api.TEMP = "temp"
return api
async def test_setup_platform(hass):
"""Test setup_platform."""
with patch(
"homeassistant.components.melissa.climate.MelissaClimate"
) as mocked_thermostat:
api = melissa_mock()
device = (await api.async_fetch_devices())[_SERIAL]
thermostat = mocked_thermostat(api, device["serial_number"], device)
thermostats = [thermostat]
hass.data[DATA_MELISSA] = api
config = {}
add_entities = Mock()
discovery_info = {}
await melissa.async_setup_platform(hass, config, add_entities, discovery_info)
add_entities.assert_called_once_with(thermostats)
async def test_get_name(hass):
"""Test name property."""
with patch("homeassistant.components.melissa"):
api = melissa_mock()
device = (await api.async_fetch_devices())[_SERIAL]
thermostat = MelissaClimate(api, _SERIAL, device)
assert thermostat.name == "Melissa 12345678"
async def test_current_fan_mode(hass):
"""Test current_fan_mode property."""
with patch("homeassistant.components.melissa"):
api = melissa_mock()
device = (await api.async_fetch_devices())[_SERIAL]
thermostat = MelissaClimate(api, _SERIAL, device)
await thermostat.async_update()
assert SPEED_LOW == thermostat.fan_mode
thermostat._cur_settings = None
assert thermostat.fan_mode is None
async def test_current_temperature(hass):
"""Test current temperature."""
with patch("homeassistant.components.melissa"):
api = melissa_mock()
device = (await api.async_fetch_devices())[_SERIAL]
thermostat = MelissaClimate(api, _SERIAL, device)
assert thermostat.current_temperature == 27.4
async def test_current_temperature_no_data(hass):
"""Test current temperature without data."""
with patch("homeassistant.components.melissa"):
api = melissa_mock()
device = (await api.async_fetch_devices())[_SERIAL]
thermostat = MelissaClimate(api, _SERIAL, device)
thermostat._data = None
assert thermostat.current_temperature is None
async def test_target_temperature_step(hass):
"""Test current target_temperature_step."""
with patch("homeassistant.components.melissa"):
api = melissa_mock()
device = (await api.async_fetch_devices())[_SERIAL]
thermostat = MelissaClimate(api, _SERIAL, device)
assert thermostat.target_temperature_step == 1
async def test_current_operation(hass):
"""Test current operation."""
with patch("homeassistant.components.melissa"):
api = melissa_mock()
device = (await api.async_fetch_devices())[_SERIAL]
thermostat = MelissaClimate(api, _SERIAL, device)
await thermostat.async_update()
assert thermostat.state == HVAC_MODE_HEAT
thermostat._cur_settings = None
assert thermostat.hvac_action is None
async def test_operation_list(hass):
"""Test the operation list."""
with patch("homeassistant.components.melissa"):
api = melissa_mock()
device = (await api.async_fetch_devices())[_SERIAL]
thermostat = MelissaClimate(api, _SERIAL, device)
assert [
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_DRY,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_OFF,
] == thermostat.hvac_modes
async def test_fan_modes(hass):
"""Test the fan list."""
with patch("homeassistant.components.melissa"):
api = melissa_mock()
device = (await api.async_fetch_devices())[_SERIAL]
thermostat = MelissaClimate(api, _SERIAL, device)
assert ["auto", SPEED_HIGH, SPEED_MEDIUM, SPEED_LOW] == thermostat.fan_modes
async def test_target_temperature(hass):
"""Test target temperature."""
with patch("homeassistant.components.melissa"):
api = melissa_mock()
device = (await api.async_fetch_devices())[_SERIAL]
thermostat = MelissaClimate(api, _SERIAL, device)
await thermostat.async_update()
assert thermostat.target_temperature == 16
thermostat._cur_settings = None
assert thermostat.target_temperature is None
async def test_state(hass):
"""Test state."""
with patch("homeassistant.components.melissa"):
api = melissa_mock()
device = (await api.async_fetch_devices())[_SERIAL]
thermostat = MelissaClimate(api, _SERIAL, device)
await thermostat.async_update()
assert HVAC_MODE_HEAT == thermostat.state
thermostat._cur_settings = None
assert thermostat.state is None
async def test_temperature_unit(hass):
"""Test temperature unit."""
with patch("homeassistant.components.melissa"):
api = melissa_mock()
device = (await api.async_fetch_devices())[_SERIAL]
thermostat = MelissaClimate(api, _SERIAL, device)
assert TEMP_CELSIUS == thermostat.temperature_unit
async def test_min_temp(hass):
"""Test min temp."""
with patch("homeassistant.components.melissa"):
api = melissa_mock()
device = (await api.async_fetch_devices())[_SERIAL]
thermostat = MelissaClimate(api, _SERIAL, device)
assert thermostat.min_temp == 16
async def test_max_temp(hass):
"""Test max temp."""
with patch("homeassistant.components.melissa"):
api = melissa_mock()
device = (await api.async_fetch_devices())[_SERIAL]
thermostat = MelissaClimate(api, _SERIAL, device)
assert thermostat.max_temp == 30
async def test_supported_features(hass):
"""Test supported_features property."""
with patch("homeassistant.components.melissa"):
api = melissa_mock()
device = (await api.async_fetch_devices())[_SERIAL]
thermostat = MelissaClimate(api, _SERIAL, device)
features = SUPPORT_TARGET_TEMPERATURE | SUPPORT_FAN_MODE
assert features == thermostat.supported_features
async def test_set_temperature(hass):
"""Test set_temperature."""
with patch("homeassistant.components.melissa"):
api = melissa_mock()
device = (await api.async_fetch_devices())[_SERIAL]
thermostat = MelissaClimate(api, _SERIAL, device)
await thermostat.async_update()
await thermostat.async_set_temperature(**{ATTR_TEMPERATURE: 25})
assert thermostat.target_temperature == 25
async def test_fan_mode(hass):
"""Test set_fan_mode."""
with patch("homeassistant.components.melissa"):
api = melissa_mock()
device = (await api.async_fetch_devices())[_SERIAL]
thermostat = MelissaClimate(api, _SERIAL, device)
await thermostat.async_update()
await hass.async_block_till_done()
await thermostat.async_set_fan_mode(SPEED_HIGH)
await hass.async_block_till_done()
assert SPEED_HIGH == thermostat.fan_mode
async def test_set_operation_mode(hass):
"""Test set_operation_mode."""
with patch("homeassistant.components.melissa"):
api = melissa_mock()
device = (await api.async_fetch_devices())[_SERIAL]
thermostat = MelissaClimate(api, _SERIAL, device)
await thermostat.async_update()
await hass.async_block_till_done()
await thermostat.async_set_hvac_mode(HVAC_MODE_COOL)
await hass.async_block_till_done()
assert HVAC_MODE_COOL == thermostat.hvac_mode
async def test_send(hass):
"""Test send."""
with patch("homeassistant.components.melissa"):
api = melissa_mock()
device = (await api.async_fetch_devices())[_SERIAL]
thermostat = MelissaClimate(api, _SERIAL, device)
await thermostat.async_update()
await hass.async_block_till_done()
await thermostat.async_send({"fan": api.FAN_MEDIUM})
await hass.async_block_till_done()
assert SPEED_MEDIUM == thermostat.fan_mode
api.async_send.return_value = AsyncMock(return_value=False)
thermostat._cur_settings = None
await thermostat.async_send({"fan": api.FAN_LOW})
await hass.async_block_till_done()
assert SPEED_LOW != thermostat.fan_mode
assert thermostat._cur_settings is None
async def test_update(hass):
"""Test update."""
with patch(
"homeassistant.components.melissa.climate._LOGGER.warning"
) as mocked_warning:
with patch("homeassistant.components.melissa"):
api = melissa_mock()
device = (await api.async_fetch_devices())[_SERIAL]
thermostat = MelissaClimate(api, _SERIAL, device)
await thermostat.async_update()
assert SPEED_LOW == thermostat.fan_mode
assert HVAC_MODE_HEAT == thermostat.state
api.async_status = AsyncMock(side_effect=KeyError("boom"))
await thermostat.async_update()
mocked_warning.assert_called_once_with(
"Unable to update entity %s", thermostat.entity_id
)
async def test_melissa_op_to_hass(hass):
"""Test for translate melissa operations to hass."""
with patch("homeassistant.components.melissa"):
api = melissa_mock()
device = (await api.async_fetch_devices())[_SERIAL]
thermostat = MelissaClimate(api, _SERIAL, device)
assert HVAC_MODE_FAN_ONLY == thermostat.melissa_op_to_hass(1)
assert HVAC_MODE_HEAT == thermostat.melissa_op_to_hass(2)
assert HVAC_MODE_COOL == thermostat.melissa_op_to_hass(3)
assert HVAC_MODE_DRY == thermostat.melissa_op_to_hass(4)
assert thermostat.melissa_op_to_hass(5) is None
async def test_melissa_fan_to_hass(hass):
"""Test for translate melissa fan state to hass."""
with patch("homeassistant.components.melissa"):
api = melissa_mock()
device = (await api.async_fetch_devices())[_SERIAL]
thermostat = MelissaClimate(api, _SERIAL, device)
assert "auto" == thermostat.melissa_fan_to_hass(0)
assert SPEED_LOW == thermostat.melissa_fan_to_hass(1)
assert SPEED_MEDIUM == thermostat.melissa_fan_to_hass(2)
assert SPEED_HIGH == thermostat.melissa_fan_to_hass(3)
assert thermostat.melissa_fan_to_hass(4) is None
async def test_hass_mode_to_melissa(hass):
"""Test for hass operations to melssa."""
with patch(
"homeassistant.components.melissa.climate._LOGGER.warning"
) as mocked_warning:
with patch("homeassistant.components.melissa"):
api = melissa_mock()
device = (await api.async_fetch_devices())[_SERIAL]
thermostat = MelissaClimate(api, _SERIAL, device)
assert thermostat.hass_mode_to_melissa(HVAC_MODE_FAN_ONLY) == 1
assert thermostat.hass_mode_to_melissa(HVAC_MODE_HEAT) == 2
assert thermostat.hass_mode_to_melissa(HVAC_MODE_COOL) == 3
assert thermostat.hass_mode_to_melissa(HVAC_MODE_DRY) == 4
thermostat.hass_mode_to_melissa("test")
mocked_warning.assert_called_once_with(
"Melissa have no setting for %s mode", "test"
)
async def test_hass_fan_to_melissa(hass):
"""Test for translate melissa states to hass."""
with patch(
"homeassistant.components.melissa.climate._LOGGER.warning"
) as mocked_warning:
with patch("homeassistant.components.melissa"):
api = melissa_mock()
device = (await api.async_fetch_devices())[_SERIAL]
thermostat = MelissaClimate(api, _SERIAL, device)
assert thermostat.hass_fan_to_melissa("auto") == 0
assert thermostat.hass_fan_to_melissa(SPEED_LOW) == 1
assert thermostat.hass_fan_to_melissa(SPEED_MEDIUM) == 2
assert thermostat.hass_fan_to_melissa(SPEED_HIGH) == 3
thermostat.hass_fan_to_melissa("test")
mocked_warning.assert_called_once_with(
"Melissa have no setting for %s fan mode", "test"
)
|
__author__ = '[email protected]'
from absl import flags
flags.DEFINE_string('host', None, 'Redshift host.')
flags.DEFINE_string('database', None, 'Redshift Database.')
flags.DEFINE_string('user', None, 'Redshift User.')
flags.DEFINE_string('password', None, 'Redshift Password.')
flags.mark_flags_as_required(['host', 'database', 'user', 'password'])
FLAGS = flags.FLAGS
def generate_provider_specific_cmd_list(script, driver, output, error):
"""Method to compile the Redshift specific script execution command.
Arguments:
script: SQL script which contains the query.
driver: Driver that contains the Redshift specific script executor.
output: Output log file.
error: Error log file.
Returns:
Command list to execute the supplied script.
"""
return [driver, FLAGS.host, FLAGS.database, FLAGS.user, FLAGS.password,
script, output, error]
|
import json
from test import CollectorTestCase
from test import get_collector_config
from mock import Mock, patch
from scribe import ScribeCollector
class ScribeCollectorTestCase(CollectorTestCase):
def setUp(self):
config = get_collector_config('ScribeCollector', {})
self.collector = ScribeCollector(config, None)
def test_import(self):
self.assertTrue(ScribeCollector)
def test_key_to_metric(self):
fn = self.collector.key_to_metric
self.assertEqual(fn("foo! bar!"), "foo__bar_")
self.assertEqual(fn(" foo:BAR"), "_foo_BAR")
self.assertEqual(fn("the_same"), "the_same")
def test_get_scribe_stats(self):
scribe_ctrl_output = self.getFixture('scribe_ctrl').getvalue()
expected_scribe_stats = json.loads(self.getFixture(
'scribe_ctrl_stats.json')
.getvalue())
with patch.object(ScribeCollector, 'get_scribe_ctrl_output',
Mock(return_value=scribe_ctrl_output)):
scribe_stats = self.collector.get_scribe_stats()
self.assertEqual(dict(scribe_stats), expected_scribe_stats)
|
from django.db import models
from django.template.loader import select_template
from django.utils.translation import gettext_lazy as _
from shop.conf import app_settings
from shop.models.customer import BaseCustomer
class Customer(BaseCustomer):
"""
Default materialized model for Customer, adding a customer's number and salutation.
If this model is materialized, then also register the corresponding serializer class
:class:`shop.serializers.defaults.customer.CustomerSerializer`.
"""
SALUTATION = [('mrs', _("Mrs.")), ('mr', _("Mr.")), ('na', _("(n/a)"))]
number = models.PositiveIntegerField(
_("Customer Number"),
null=True,
default=None,
unique=True,
)
salutation = models.CharField(
_("Salutation"),
max_length=5,
choices=SALUTATION,
)
def get_number(self):
return self.number
def get_or_assign_number(self):
if self.number is None:
aggr = Customer.objects.filter(number__isnull=False).aggregate(models.Max('number'))
self.number = (aggr['number__max'] or 0) + 1
self.save()
return self.get_number()
def as_text(self):
template_names = [
'{}/customer.txt'.format(app_settings.APP_LABEL),
'shop/customer.txt',
]
template = select_template(template_names)
return template.render({'customer': self})
|
import unittest
from ReText import tablemode
class TestTableMode(unittest.TestCase):
def performEdit(self, text, offset, editSize, paddingchar=None, fragment=None):
if editSize < 0:
text = text[:offset + editSize] + text[offset:]
else:
fragment = paddingchar * editSize if not fragment else fragment
text = text[:offset] + fragment + text[offset:]
return text
def checkDetermineEditLists(self, paddingChars, before, edit, after, alignWithAnyEdge):
class Row():
def __init__(self, text, separatorLine, paddingChar):
self.text = text
self.separatorline = separatorLine
self.paddingchar = paddingChar
# Do some sanity checks on the test data to catch simple mistakes
self.assertEqual(len(paddingChars), len(before),
'The number of padding chars should be equal to the number of rows')
self.assertEqual(len(before), len(after),
'The number of rows before and after should be the same')
# Apart from spacing edit only contains a's or d's
self.assertTrue(edit[1].strip(' d') == '' or
edit[1].strip(' a') == '',
"An edit should be a sequence of a's or d's surrounded by spaces")
rows = []
for paddingChar, text in zip(paddingChars, before):
rows.append(Row(text, (paddingChar != ' '), paddingChar))
editedline = edit[0]
editstripped = edit[1].strip()
editsize = len(editstripped)
# The offset passed to _determineEditLists is the one received from the
# contentsChange signal and is always the start of the set of chars
# that are added or removed.
contentsChangeOffset = edit[1].index(editstripped[0])
# However, the editoffset indicates the position before which chars
# must be deleted or after which they must be added (just like the
# offset used in the edits returned by _determineEditLists),
# so for deletions we'll need to add the size of the edit to it
if editstripped[0] == 'd':
editsize = -editsize
editoffset = contentsChangeOffset + len(editstripped)
else:
editoffset = contentsChangeOffset
editLists = tablemode._determineEditLists(rows, edit[0], contentsChangeOffset, editsize, alignWithAnyEdge)
editedRows = []
self.assertEqual(len(editLists), len(rows))
for i, (row, editList) in enumerate(zip(rows, editLists)):
editedText = row.text
for editEntry in editList:
editedText = self.performEdit(editedText, editEntry[0], editEntry[1], paddingchar=row.paddingchar)
editedRows.append(editedText)
editedRows[editedline] = self.performEdit(editedRows[editedline], editoffset, editsize, fragment=editstripped)
if editedRows != after:
if alignWithAnyEdge:
alignmentScenario = "when aligning any edge with another"
else:
alignmentScenario = "when only aligning edges of cells in the same column"
assertMessage = ["Output differs %s." % alignmentScenario,
"",
"Input:"] + \
["%3d '%s'" % (i, line) for i, line in enumerate(before)] + \
["",
"Edit:",
"%3d '%s'" % edit,
"",
"Expected output:"] + \
["%3d '%s'" % (i, line) for i, line in enumerate(after)] + \
["",
"Actual output:"] + \
["%3d '%s'" % (i, line) for i, line in enumerate(editedRows)]
self.fail('\n'.join(assertMessage))
def test_simpleInsert(self):
# Insert at the start of a cell so it doesn't need to grow
separatorChars = ' '
before = ['| |',
'| |']
edit = (0, ' a ')
after = ['|a |',
'| |']
self.checkDetermineEditLists(separatorChars, before, edit, after, True)
self.checkDetermineEditLists(separatorChars, before, edit, after, False)
# Insert at the last position in a cell where it doesn't need to grow
separatorChars = ' '
before = ['| |',
'| |']
edit = (0, ' a')
after = ['| a|',
'| |']
self.checkDetermineEditLists(separatorChars, before, edit, after, True)
self.checkDetermineEditLists(separatorChars, before, edit, after, False)
# Insert at the end of a cell so it will have to grow
separatorChars = ' '
before = ['| |',
'| |']
edit = (0, ' a')
after = ['| a|',
'| |']
self.checkDetermineEditLists(separatorChars, before, edit, after, True)
self.checkDetermineEditLists(separatorChars, before, edit, after, False)
def test_insertPushAhead(self):
# Insert with enough room to push without growing the cell
separatorChars = ' '
before = ['| x |',
'| |']
edit = (0, ' a ')
after = ['|a x|',
'| |']
self.checkDetermineEditLists(separatorChars, before, edit, after, True)
self.checkDetermineEditLists(separatorChars, before, edit, after, False)
# Insert without enough room to push, so the cell will have to grow
separatorChars = ' '
before = ['| x|',
'| |']
edit = (0, ' a ')
after = ['|a x|',
'| |']
self.checkDetermineEditLists(separatorChars, before, edit, after, True)
# Insert without enough room to push, so the cell will have to grow,
# but the edge of the cell below it does not move with it because it is
# of an earlier column
separatorChars = ' '
before = ['| | x|',
' | |']
edit = (0, ' a ')
after = ['| |a x|',
' | |']
self.checkDetermineEditLists(separatorChars, before, edit, after, False)
# Insert without enough room to push, so the cell will have to grow,
# but the edge of the cell below it does not move with it because it is
# of a later column
separatorChars = ' '
before = [' | x|',
'| | |']
edit = (0, ' a ')
after = [' |a x|',
'| | |']
self.checkDetermineEditLists(separatorChars, before, edit, after, False)
# Insert multiple characters forcing a partial grow
separatorChars = ' '
before = ['| |',
'| |']
edit = (0, ' aaaaaa')
after = ['| aaaaaa|',
'| |']
self.checkDetermineEditLists(separatorChars, before, edit, after, True)
self.checkDetermineEditLists(separatorChars, before, edit, after, False)
# Insert multiple characters forcing a partial grow through pushing other chars ahead
separatorChars = ' '
before = ['| bb |',
'| |']
edit = (0, ' aaaaaaa')
after = ['| aaaaaaabb|',
'| |']
self.checkDetermineEditLists(separatorChars, before, edit, after, True)
self.checkDetermineEditLists(separatorChars, before, edit, after, False)
def test_insertInSeparatorCell(self):
# Insert in a cell on a separator line
separatorChars = ' -'
before = ['| |',
'|----|']
edit = (1, ' a ')
after = ['| |',
'|--a-|']
self.checkDetermineEditLists(separatorChars, before, edit, after, True)
self.checkDetermineEditLists(separatorChars, before, edit, after, False)
# Insert in a cell on a separator line forcing it to grow
separatorChars = ' -'
before = ['| |',
'|----|']
edit = (1, ' a ')
after = ['| |',
'|---a-|']
self.checkDetermineEditLists(separatorChars, before, edit, after, True)
self.checkDetermineEditLists(separatorChars, before, edit, after, False)
# Insert in a cell on a separator line with an alignment marker
separatorChars = ' -'
before = ['| |',
'|---:|']
edit = (1, ' a ')
after = ['| |',
'|--a:|']
self.checkDetermineEditLists(separatorChars, before, edit, after, True)
self.checkDetermineEditLists(separatorChars, before, edit, after, False)
# Insert in a cell on a separator line with an alignment marker forcing it to grow
separatorChars = ' -'
before = ['| |',
'|---:|']
edit = (1, ' a ')
after = ['| |',
'|---a:|']
self.checkDetermineEditLists(separatorChars, before, edit, after, True)
self.checkDetermineEditLists(separatorChars, before, edit, after, False)
# Insert in a cell on a separator line after the alignment marker forcing it to grow
separatorChars = ' -'
before = ['| |',
'|---:|']
edit = (1, ' a')
after = ['| |',
'|---:a|']
self.checkDetermineEditLists(separatorChars, before, edit, after, True)
self.checkDetermineEditLists(separatorChars, before, edit, after, False)
def test_insertAboveSeparatorLine(self):
# Insert on another line, without growing the cell
separatorChars = ' -'
before = ['| |',
'|----|']
edit = (0, ' a')
after = ['| a|',
'|----|']
self.checkDetermineEditLists(separatorChars, before, edit, after, True)
self.checkDetermineEditLists(separatorChars, before, edit, after, False)
# Insert on another line, forcing the separator cell to grow
separatorChars = ' -'
before = ['| |',
'|----|']
edit = (0, ' a')
after = ['| a|',
'|-----|']
self.checkDetermineEditLists(separatorChars, before, edit, after, True)
self.checkDetermineEditLists(separatorChars, before, edit, after, False)
# Insert on another line, without growing the cell with alignment marker
separatorChars = ' -'
before = ['| |',
'|---:|']
edit = (0, ' a')
after = ['| a|',
'|---:|']
self.checkDetermineEditLists(separatorChars, before, edit, after, True)
self.checkDetermineEditLists(separatorChars, before, edit, after, False)
# Insert on another line, forcing the separator cell with alignment marker to grow
separatorChars = ' -'
before = ['| |',
'|---:|']
edit = (0, ' a')
after = ['| a|',
'|----:|']
self.checkDetermineEditLists(separatorChars, before, edit, after, True)
self.checkDetermineEditLists(separatorChars, before, edit, after, False)
# Insert on another line, forcing the separator cell that ends with a regular char to grow
separatorChars = ' -'
before = ['| |',
'|--- |']
edit = (0, ' a')
after = ['| a|',
'|---- |']
self.checkDetermineEditLists(separatorChars, before, edit, after, True)
self.checkDetermineEditLists(separatorChars, before, edit, after, False)
def test_insertCascade(self):
# Test if growing of cells cascades onto other lines through edges that are shifted
separatorChars = ' '
before = ['| |',
' | |',
' | |',
' |']
edit = (0, ' a')
after = ['| a|',
' | |',
' | |',
' |']
self.checkDetermineEditLists(separatorChars, before, edit, after, True)
# Test if growing of cells cascades onto other lines but does not affect unconnected edges
separatorChars = ' '
before = ['| |',
' | |',
' | | |']
edit = (0, ' a')
after = ['| a|',
' | |',
' | | |']
self.checkDetermineEditLists(separatorChars, before, edit, after, True)
def test_simpleDelete(self):
# Delete at start of cell
separatorChars = ' '
before = ['|abcd|',
'| |']
edit = (0, ' d ')
after = ['|bcd|',
'| |']
self.checkDetermineEditLists(separatorChars, before, edit, after, True)
self.checkDetermineEditLists(separatorChars, before, edit, after, False)
# Delete at end of cell
separatorChars = ' '
before = ['|abcd|',
'| |']
edit = (0, ' d')
after = ['|abc|',
'| |']
self.checkDetermineEditLists(separatorChars, before, edit, after, True)
self.checkDetermineEditLists(separatorChars, before, edit, after, False)
def test_deleteShrinking(self):
# Shrinking limited by cell on other row
separatorChars = ' '
before = ['|abc |',
'|efgh|']
edit = (0, ' d ')
after = ['|bc |',
'|efgh|']
self.checkDetermineEditLists(separatorChars, before, edit, after, True)
self.checkDetermineEditLists(separatorChars, before, edit, after, False)
# Shrinking limited by cell on other row (cont'd)
separatorChars = ' '
before = ['|abcd|',
'|efgh|']
edit = (0, ' d')
after = ['|abc |',
'|efgh|']
self.checkDetermineEditLists(separatorChars, before, edit, after, True)
self.checkDetermineEditLists(separatorChars, before, edit, after, False)
# Shrinking of next cell limited by cell on other row
separatorChars = ' '
before = ['|abc | |',
'|efghi|klm|']
edit = (0, ' d ')
after = ['|bc | |',
'|efghi|klm|']
self.checkDetermineEditLists(separatorChars, before, edit, after, True)
self.checkDetermineEditLists(separatorChars, before, edit, after, False)
# Shrink current cell fully, grow next cell a partially
separatorChars = ' '
before = ['| aabb| |',
'|xxxxxx|x |']
edit = (0, ' dddd')
after = ['| | |',
'|xxxxxx|x|']
self.checkDetermineEditLists(separatorChars, before, edit, after, True)
self.checkDetermineEditLists(separatorChars, before, edit, after, False)
# Shrink current cell fully, do not change next cell
separatorChars = ' '
before = ['| aabb| |',
'|xxxxxxxx |']
edit = (0, ' dddd')
after = ['| | |',
'|xxxxxxxx |']
self.checkDetermineEditLists(separatorChars, before, edit, after, False)
def test_deleteShrinkingSeparatorRow(self):
# Shrinking not limited by size of separator cell
separatorChars = ' -'
before = ['|abcd|',
'|----|']
edit = (0, ' d ')
after = ['|acd|',
'|---|']
self.checkDetermineEditLists(separatorChars, before, edit, after, True)
self.checkDetermineEditLists(separatorChars, before, edit, after, False)
# Shrinking limited by size of separator cell
separatorChars = ' -'
before = ['|abc|',
'|---|']
edit = (0, ' d ')
after = ['|ac |',
'|---|']
self.checkDetermineEditLists(separatorChars, before, edit, after, True)
self.checkDetermineEditLists(separatorChars, before, edit, after, False)
# Shrinking not limited by size of separator cell with alignment markers
separatorChars = ' -'
before = ['|abcd|',
'|:--:|']
edit = (0, ' d ')
after = ['|acd|',
'|:-:|']
self.checkDetermineEditLists(separatorChars, before, edit, after, True)
self.checkDetermineEditLists(separatorChars, before, edit, after, False)
# Shrinking limited by size of separator cell with alignment markers
separatorChars = ' -'
before = ['|abc|',
'|:-:|']
edit = (0, ' d ')
after = ['|ac |',
'|:-:|']
self.checkDetermineEditLists(separatorChars, before, edit, after, True)
self.checkDetermineEditLists(separatorChars, before, edit, after, False)
# Shrinking partially limited by size of separator cell with alignment markers
separatorChars = ' -'
before = ['|abcde|',
'|:---:|']
edit = (0, ' dddd')
after = ['|a |',
'|:-:|']
self.checkDetermineEditLists(separatorChars, before, edit, after, True)
self.checkDetermineEditLists(separatorChars, before, edit, after, False)
if __name__ == '__main__':
unittest.main()
|
from lemur.plugins.base import Plugin
class NotificationPlugin(Plugin):
"""
This is the base class from which all of the supported
issuers will inherit from.
"""
type = "notification"
def send(self, notification_type, message, targets, options, **kwargs):
raise NotImplementedError
def get_recipients(self, options, additional_recipients):
"""
Given a set of options (which should include configured recipient info), returns the parsed list of recipients
from those options plus the additional recipients specified. The returned value has no duplicates.
For any notification types where recipients can't be dynamically modified, this returns only the additional recipients.
"""
return additional_recipients
class ExpirationNotificationPlugin(NotificationPlugin):
"""
This is the base class for all expiration notification plugins.
It contains some default options that are needed for all expiration
notification plugins.
"""
default_options = [
{
"name": "interval",
"type": "int",
"required": True,
"validation": r"^\d+$",
"helpMessage": "Number of days to be alert before expiration.",
},
{
"name": "unit",
"type": "select",
"required": True,
"validation": "",
"available": ["days", "weeks", "months"],
"helpMessage": "Interval unit",
},
]
@property
def options(self):
return self.default_options + self.additional_options
def send(self, notification_type, message, excluded_targets, options, **kwargs):
raise NotImplementedError
|
import glob
import os
import sh
from molecule import logger
from molecule import util
from molecule.verifier import base
LOG = logger.get_logger(__name__)
class Testinfra(base.Base):
"""
`Testinfra`_ is the default test runner.
Additional options can be passed to ``testinfra`` through the options
dict. Any option set in this section will override the defaults.
.. note::
Molecule will remove any options matching '^[v]+$', and pass ``-vvv``
to the underlying ``py.test`` command when executing
``molecule --debug``.
.. code-block:: yaml
verifier:
name: testinfra
options:
n: 1
The testing can be disabled by setting ``enabled`` to False.
.. code-block:: yaml
verifier:
name: testinfra
enabled: False
Environment variables can be passed to the verifier.
.. code-block:: yaml
verifier:
name: testinfra
env:
FOO: bar
Change path to the test directory.
.. code-block:: yaml
verifier:
name: testinfra
directory: /foo/bar/
Additional tests from another file or directory relative to the scenario's
tests directory (supports regexp).
.. code-block:: yaml
verifier:
name: testinfra
additional_files_or_dirs:
- ../path/to/test_1.py
- ../path/to/test_2.py
- ../path/to/directory/*
.. _`Testinfra`: https://testinfra.readthedocs.io
"""
def __init__(self, config):
"""
Sets up the requirements to execute ``testinfra`` and returns None.
:param config: An instance of a Molecule config.
:return: None
"""
super(Testinfra, self).__init__(config)
self._testinfra_command = None
if config:
self._tests = self._get_tests()
@property
def name(self):
return 'testinfra'
@property
def default_options(self):
d = self._config.driver.testinfra_options
d['p'] = 'no:cacheprovider'
if self._config.debug:
d['debug'] = True
d['vvv'] = True
if self._config.args.get('sudo'):
d['sudo'] = True
return d
# NOTE(retr0h): Override the base classes' options() to handle
# ``ansible-galaxy`` one-off.
@property
def options(self):
o = self._config.config['verifier']['options']
# NOTE(retr0h): Remove verbose options added by the user while in
# debug.
if self._config.debug:
o = util.filter_verbose_permutation(o)
return util.merge_dicts(self.default_options, o)
@property
def default_env(self):
env = util.merge_dicts(os.environ.copy(), self._config.env)
env = util.merge_dicts(env, self._config.provisioner.env)
return env
@property
def additional_files_or_dirs(self):
files_list = []
c = self._config.config
for f in c['verifier']['additional_files_or_dirs']:
glob_path = os.path.join(self._config.verifier.directory, f)
glob_list = glob.glob(glob_path)
if glob_list:
files_list.extend(glob_list)
return files_list
def bake(self):
"""
Bake a ``testinfra`` command so it's ready to execute and returns None.
:return: None
"""
options = self.options
verbose_flag = util.verbose_flag(options)
args = verbose_flag + self.additional_files_or_dirs
self._testinfra_command = sh.Command('py.test').bake(
options,
self._tests,
*args,
_cwd=self._config.scenario.directory,
_env=self.env,
_out=LOG.out,
_err=LOG.error)
def execute(self):
if not self.enabled:
msg = 'Skipping, verifier is disabled.'
LOG.warn(msg)
return
if not len(self._tests) > 0:
msg = 'Skipping, no tests found.'
LOG.warn(msg)
return
if self._testinfra_command is None:
self.bake()
msg = 'Executing Testinfra tests found in {}/...'.format(
self.directory)
LOG.info(msg)
try:
util.run_command(self._testinfra_command, debug=self._config.debug)
msg = 'Verifier completed successfully.'
LOG.success(msg)
except sh.ErrorReturnCode as e:
util.sysexit(e.exit_code)
def _get_tests(self):
"""
Walk the verifier's directory for tests and returns a list.
:return: list
"""
return [
filename for filename in util.os_walk(self.directory, 'test_*.py')
]
|
import json
import homeassistant.components.mqtt_eventstream as eventstream
from homeassistant.const import EVENT_STATE_CHANGED
from homeassistant.core import State, callback
from homeassistant.helpers.json import JSONEncoder
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.async_mock import ANY, patch
from tests.common import (
async_fire_mqtt_message,
async_fire_time_changed,
mock_state_change_event,
)
async def add_eventstream(hass, sub_topic=None, pub_topic=None, ignore_event=None):
"""Add a mqtt_eventstream component."""
config = {}
if sub_topic:
config["subscribe_topic"] = sub_topic
if pub_topic:
config["publish_topic"] = pub_topic
if ignore_event:
config["ignore_event"] = ignore_event
return await async_setup_component(
hass, eventstream.DOMAIN, {eventstream.DOMAIN: config}
)
async def test_setup_succeeds(hass, mqtt_mock):
"""Test the success of the setup."""
assert await add_eventstream(hass)
async def test_setup_with_pub(hass, mqtt_mock):
"""Test the setup with subscription."""
# Should start off with no listeners for all events
assert hass.bus.async_listeners().get("*") is None
assert await add_eventstream(hass, pub_topic="bar")
await hass.async_block_till_done()
# Verify that the event handler has been added as a listener
assert hass.bus.async_listeners().get("*") == 1
async def test_subscribe(hass, mqtt_mock):
"""Test the subscription."""
sub_topic = "foo"
assert await add_eventstream(hass, sub_topic=sub_topic)
await hass.async_block_till_done()
# Verify that the this entity was subscribed to the topic
mqtt_mock.async_subscribe.assert_called_with(sub_topic, ANY, 0, ANY)
async def test_state_changed_event_sends_message(hass, mqtt_mock):
"""Test the sending of a new message if event changed."""
now = dt_util.as_utc(dt_util.now())
e_id = "fake.entity"
pub_topic = "bar"
with patch(
("homeassistant.core.dt_util.utcnow"),
return_value=now,
):
# Add the eventstream component for publishing events
assert await add_eventstream(hass, pub_topic=pub_topic)
await hass.async_block_till_done()
# Reset the mock because it will have already gotten calls for the
# mqtt_eventstream state change on initialization, etc.
mqtt_mock.async_publish.reset_mock()
# Set a state of an entity
mock_state_change_event(hass, State(e_id, "on"))
await hass.async_block_till_done()
await hass.async_block_till_done()
# The order of the JSON is indeterminate,
# so first just check that publish was called
mqtt_mock.async_publish.assert_called_with(pub_topic, ANY, 0, False)
assert mqtt_mock.async_publish.called
# Get the actual call to publish and make sure it was the one
# we were looking for
msg = mqtt_mock.async_publish.call_args[0][1]
event = {}
event["event_type"] = EVENT_STATE_CHANGED
new_state = {
"last_updated": now.isoformat(),
"state": "on",
"entity_id": e_id,
"attributes": {},
"last_changed": now.isoformat(),
}
event["event_data"] = {"new_state": new_state, "entity_id": e_id}
# Verify that the message received was that expected
result = json.loads(msg)
result["event_data"]["new_state"].pop("context")
assert result == event
async def test_time_event_does_not_send_message(hass, mqtt_mock):
"""Test the sending of a new message if time event."""
assert await add_eventstream(hass, pub_topic="bar")
await hass.async_block_till_done()
# Reset the mock because it will have already gotten calls for the
# mqtt_eventstream state change on initialization, etc.
mqtt_mock.async_publish.reset_mock()
async_fire_time_changed(hass, dt_util.utcnow())
assert not mqtt_mock.async_publish.called
async def test_receiving_remote_event_fires_hass_event(hass, mqtt_mock):
"""Test the receiving of the remotely fired event."""
sub_topic = "foo"
assert await add_eventstream(hass, sub_topic=sub_topic)
await hass.async_block_till_done()
calls = []
@callback
def listener(_):
calls.append(1)
hass.bus.async_listen_once("test_event", listener)
await hass.async_block_till_done()
payload = json.dumps(
{"event_type": "test_event", "event_data": {}}, cls=JSONEncoder
)
async_fire_mqtt_message(hass, sub_topic, payload)
await hass.async_block_till_done()
assert 1 == len(calls)
async def test_ignored_event_doesnt_send_over_stream(hass, mqtt_mock):
"""Test the ignoring of sending events if defined."""
assert await add_eventstream(hass, pub_topic="bar", ignore_event=["state_changed"])
await hass.async_block_till_done()
e_id = "entity.test_id"
event = {}
event["event_type"] = EVENT_STATE_CHANGED
new_state = {"state": "on", "entity_id": e_id, "attributes": {}}
event["event_data"] = {"new_state": new_state, "entity_id": e_id}
# Reset the mock because it will have already gotten calls for the
# mqtt_eventstream state change on initialization, etc.
mqtt_mock.async_publish.reset_mock()
# Set a state of an entity
mock_state_change_event(hass, State(e_id, "on"))
await hass.async_block_till_done()
assert not mqtt_mock.async_publish.called
async def test_wrong_ignored_event_sends_over_stream(hass, mqtt_mock):
"""Test the ignoring of sending events if defined."""
assert await add_eventstream(hass, pub_topic="bar", ignore_event=["statee_changed"])
await hass.async_block_till_done()
e_id = "entity.test_id"
event = {}
event["event_type"] = EVENT_STATE_CHANGED
new_state = {"state": "on", "entity_id": e_id, "attributes": {}}
event["event_data"] = {"new_state": new_state, "entity_id": e_id}
# Reset the mock because it will have already gotten calls for the
# mqtt_eventstream state change on initialization, etc.
mqtt_mock.async_publish.reset_mock()
# Set a state of an entity
mock_state_change_event(hass, State(e_id, "on"))
await hass.async_block_till_done()
await hass.async_block_till_done()
assert mqtt_mock.async_publish.called
|
import time
from unittest.mock import patch
from django.core.cache import cache
from django.test import SimpleTestCase
from ..checks import is_celery_queue_long
class CeleryQueueTest(SimpleTestCase):
@staticmethod
def set_cache(value):
cache.set("celery_queue_stats", value)
def test_empty(self):
self.set_cache({})
self.assertFalse(is_celery_queue_long())
# The current time should be in the cache
self.assertEqual(len(cache.get("celery_queue_stats")), 1)
def test_current(self):
self.set_cache({int(time.time() / 3600): {}})
self.assertFalse(is_celery_queue_long())
def test_past(self):
self.set_cache({int(time.time() / 3600) - 1: {}})
self.assertFalse(is_celery_queue_long())
def test_cleanup(self):
hour = int(time.time() / 3600)
self.set_cache({i: {} for i in range(hour - 2, hour)})
self.assertFalse(is_celery_queue_long())
def test_trigger(self):
with patch(
"weblate.utils.checks.get_queue_stats", return_value={"celery": 1000}
):
self.set_cache({int(time.time() / 3600) - 1: {}})
self.assertFalse(is_celery_queue_long())
self.set_cache({int(time.time() / 3600) - 1: {"celery": 1000}})
self.assertTrue(is_celery_queue_long())
def test_translate(self):
with patch(
"weblate.utils.checks.get_queue_stats", return_value={"translate": 2000}
):
self.set_cache({int(time.time() / 3600) - 1: {}})
self.assertFalse(is_celery_queue_long())
self.set_cache({int(time.time() / 3600) - 1: {"translate": 100}})
self.assertFalse(is_celery_queue_long())
self.set_cache({int(time.time() / 3600) - 1: {"translate": 2000}})
self.assertTrue(is_celery_queue_long())
|
import asyncio
import datetime
import json
from collections import Counter
from pathlib import Path
from typing import Mapping
import aiohttp
import discord
from redbot.core import Config
from redbot.core.bot import Red
from redbot.core.commands import Cog
from redbot.core.data_manager import cog_data_path
from redbot.core.i18n import Translator, cog_i18n
from ..utils import PlaylistScope
from . import abc, cog_utils, commands, events, tasks, utilities
from .cog_utils import CompositeMetaClass
_ = Translator("Audio", Path(__file__))
@cog_i18n(_)
class Audio(
commands.Commands,
events.Events,
tasks.Tasks,
utilities.Utilities,
Cog,
metaclass=CompositeMetaClass,
):
"""Play audio through voice channels."""
_default_lavalink_settings = {
"host": "localhost",
"rest_port": 2333,
"ws_port": 2333,
"password": "youshallnotpass",
}
def __init__(self, bot: Red):
super().__init__()
self.bot = bot
self.config = Config.get_conf(self, 2711759130, force_registration=True)
self.api_interface = None
self.player_manager = None
self.playlist_api = None
self.local_folder_current_path = None
self.db_conn = None
self._error_counter = Counter()
self._error_timer = {}
self._disconnected_players = {}
self._daily_playlist_cache = {}
self._daily_global_playlist_cache = {}
self._persist_queue_cache = {}
self._dj_status_cache = {}
self._dj_role_cache = {}
self.skip_votes = {}
self.play_lock = {}
self.lavalink_connect_task = None
self._restore_task = None
self.player_automated_timer_task = None
self.cog_cleaned_up = False
self.lavalink_connection_aborted = False
self.permission_cache = discord.Permissions(
embed_links=True,
read_messages=True,
send_messages=True,
read_message_history=True,
add_reactions=True,
)
self.session = aiohttp.ClientSession(json_serialize=json.dumps)
self.cog_ready_event = asyncio.Event()
self.cog_init_task = None
self.global_api_user = {
"fetched": False,
"can_read": False,
"can_post": False,
"can_delete": False,
}
self._ll_guild_updates = set()
self._last_ll_update = datetime.datetime.now(datetime.timezone.utc)
default_global = dict(
schema_version=1,
owner_notification=0,
cache_level=0,
cache_age=365,
daily_playlists=False,
global_db_enabled=False,
global_db_get_timeout=5,
status=False,
use_external_lavalink=False,
restrict=True,
localpath=str(cog_data_path(raw_name="Audio")),
url_keyword_blacklist=[],
url_keyword_whitelist=[],
java_exc_path="java",
**self._default_lavalink_settings,
)
default_guild = dict(
auto_play=False,
auto_deafen=True,
autoplaylist={"enabled": False, "id": None, "name": None, "scope": None},
persist_queue=True,
disconnect=False,
dj_enabled=False,
dj_role=None,
daily_playlists=False,
emptydc_enabled=False,
emptydc_timer=0,
emptypause_enabled=False,
emptypause_timer=0,
jukebox=False,
jukebox_price=0,
maxlength=0,
notify=False,
prefer_lyrics=False,
repeat=False,
shuffle=False,
shuffle_bumped=True,
thumbnail=False,
volume=100,
vote_enabled=False,
vote_percent=0,
room_lock=None,
url_keyword_blacklist=[],
url_keyword_whitelist=[],
country_code="US",
)
_playlist: Mapping = dict(id=None, author=None, name=None, playlist_url=None, tracks=[])
self.config.init_custom("EQUALIZER", 1)
self.config.register_custom("EQUALIZER", eq_bands=[], eq_presets={})
self.config.init_custom(PlaylistScope.GLOBAL.value, 1)
self.config.register_custom(PlaylistScope.GLOBAL.value, **_playlist)
self.config.init_custom(PlaylistScope.GUILD.value, 2)
self.config.register_custom(PlaylistScope.GUILD.value, **_playlist)
self.config.init_custom(PlaylistScope.USER.value, 2)
self.config.register_custom(PlaylistScope.USER.value, **_playlist)
self.config.register_guild(**default_guild)
self.config.register_global(**default_global)
self.config.register_user(country_code=None)
|
import json
import pytest
from app.database import model
from . import WEBHOOKDATA, success, load_data
def test_server_new(tester, create_server):
resp = tester.post('/api/server/new', data={})
assert not success(resp)
server = create_server()
assert server['ip'] == '127.0.0.1'
def test_server_delete(tester, create_server, sql):
server = create_server()
resp = tester.post('/api/server/delete', data={'server_id': server['id']})
assert success(resp)
text = 'select count(*) from server where !deleted'
assert sql.execute(text).scalar() == 0
def test_server_list(tester, create_server):
create_server()
create_server()
resp = tester.get('/api/server/list')
assert success(resp)
assert len(load_data(resp)) == 2
def test_webhoot_new(tester, create_server, create_webhook):
server = create_server()
webhook = create_webhook(server_id=server['id'])
assert webhook['branch'] == 'master'
webhook = create_webhook(server_id=server['id'],
webhook_id=webhook['id'], branch='dev')
assert webhook['branch'] == 'dev'
def test_webhoot_delete(tester, create_server, create_webhook, sql):
server = create_server()
webhook = create_webhook(server_id=server['id'])
data = {'webhook_id': webhook['id']}
resp = tester.post('/api/webhook/delete', data=data)
assert success(resp)
text = 'select count(*) from web_hook where !deleted'
assert sql.execute(text).scalar() == 0
def test_webhook_list(tester, create_server, create_webhook):
server = create_server()
create_webhook(server_id=server['id'])
create_webhook(server_id=server['id'])
resp = tester.get('/api/webhook/list')
assert success(resp)
assert len(load_data(resp)) == 2
def test_history(tester, create_server, create_webhook, sql):
server = create_server()
webhook = create_webhook(server_id=server['id'])
query_string = {'webhook_id': webhook['id']}
resp = tester.get('/api/history/list', query_string=query_string)
assert len(load_data(resp)['histories']) == 0
history = model.History(
status='1',
webhook_id=webhook['id'],
data='null'
)
sql.add(history)
sql.commit()
resp = tester.get('/api/history/list', query_string=query_string)
assert len(load_data(resp)['histories']) == 1
@pytest.mark.parametrize("name,data", WEBHOOKDATA.items())
def test_git_webhook(tester, create_server, create_webhook, name, data):
server = create_server()
webhook = create_webhook(server_id=server['id'])
url = '/api/git-webhook/{}'.format(webhook['key'])
resp = tester.post(url, data=json.dumps(data))
assert b'Work put into Queue' in resp.data
|
import asyncio
from datetime import timedelta
import logging
import async_timeout
from poolsense import PoolSense
from poolsense.exceptions import PoolSenseError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import aiohttp_client
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
UpdateFailed,
)
from .const import DOMAIN
PLATFORMS = ["sensor", "binary_sensor"]
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the PoolSense component."""
# Make sure coordinator is initialized.
hass.data.setdefault(DOMAIN, {})
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up PoolSense from a config entry."""
poolsense = PoolSense(
aiohttp_client.async_get_clientsession(hass),
entry.data[CONF_EMAIL],
entry.data[CONF_PASSWORD],
)
auth_valid = await poolsense.test_poolsense_credentials()
if not auth_valid:
_LOGGER.error("Invalid authentication")
return False
coordinator = PoolSenseDataUpdateCoordinator(hass, entry)
await coordinator.async_refresh()
if not coordinator.last_update_success:
raise ConfigEntryNotReady
hass.data[DOMAIN][entry.entry_id] = coordinator
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
class PoolSenseEntity(CoordinatorEntity):
"""Implements a common class elements representing the PoolSense component."""
def __init__(self, coordinator, email, info_type):
"""Initialize poolsense sensor."""
super().__init__(coordinator)
self._unique_id = f"{email}-{info_type}"
self.info_type = info_type
@property
def unique_id(self):
"""Return a unique id."""
return self._unique_id
class PoolSenseDataUpdateCoordinator(DataUpdateCoordinator):
"""Define an object to hold PoolSense data."""
def __init__(self, hass, entry):
"""Initialize."""
self.poolsense = PoolSense(
aiohttp_client.async_get_clientsession(hass),
entry.data[CONF_EMAIL],
entry.data[CONF_PASSWORD],
)
self.hass = hass
self.entry = entry
super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=timedelta(hours=1))
async def _async_update_data(self):
"""Update data via library."""
data = {}
with async_timeout.timeout(10):
try:
data = await self.poolsense.get_poolsense_data()
except (PoolSenseError) as error:
_LOGGER.error("PoolSense query did not complete.")
raise UpdateFailed(error) from error
return data
|
import os
import re
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
def test_hostname(host):
assert re.search(r'instance-[12]', host.check_output('hostname -s'))
def test_etc_molecule_directory(host):
f = host.file('/etc/molecule')
assert f.is_directory
assert f.user == 'root'
assert f.group == 'root'
assert f.mode == 0o755
def test_etc_molecule_ansible_hostname_file(host):
filename = '/etc/molecule/{}'.format(host.check_output('hostname -s'))
f = host.file(filename)
assert f.is_file
assert f.user == 'root'
assert f.group == 'root'
assert f.mode == 0o644
def test_hostonly_interface(host):
i = host.interface('eth1').addresses
# NOTE(retr0h): Contains ipv4 and ipv6 addresses.
assert len(i) == 2
|
import hangups
from common import run_example
async def send_map_location(client, args):
request = hangups.hangouts_pb2.SendChatMessageRequest(
request_header=client.get_request_header(),
event_request_header=hangups.hangouts_pb2.EventRequestHeader(
conversation_id=hangups.hangouts_pb2.ConversationId(
id=args.conversation_id
),
client_generated_id=client.get_client_generated_id(),
),
location=hangups.hangouts_pb2.Location(
place=hangups.hangouts_pb2.Place(
name=args.name,
address=hangups.hangouts_pb2.EmbedItem(
postal_address=hangups.hangouts_pb2.EmbedItem.PostalAddress(
street_address=args.address
),
),
geo=hangups.hangouts_pb2.EmbedItem(
geo_coordinates=hangups.hangouts_pb2.EmbedItem.GeoCoordinates(
latitude=float(args.latitude),
longitude=float(args.longitude)
),
),
),
),
)
await client.send_chat_message(request)
if __name__ == '__main__':
run_example(
send_map_location, '--conversation-id', '--latitude', '--longitude',
'--name', '--address'
)
|
from typing import Callable, List
from homeassistant.components.remote import ATTR_NUM_REPEATS, RemoteEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.helpers.typing import HomeAssistantType
from . import RokuDataUpdateCoordinator, RokuEntity, roku_exception_handler
from .const import DOMAIN
async def async_setup_entry(
hass: HomeAssistantType,
entry: ConfigEntry,
async_add_entities: Callable[[List, bool], None],
) -> bool:
"""Load Roku remote based on a config entry."""
coordinator = hass.data[DOMAIN][entry.entry_id]
unique_id = coordinator.data.info.serial_number
async_add_entities([RokuRemote(unique_id, coordinator)], True)
class RokuRemote(RokuEntity, RemoteEntity):
"""Device that sends commands to an Roku."""
def __init__(self, unique_id: str, coordinator: RokuDataUpdateCoordinator) -> None:
"""Initialize the Roku device."""
super().__init__(
device_id=unique_id,
name=coordinator.data.info.name,
coordinator=coordinator,
)
self._unique_id = unique_id
@property
def unique_id(self) -> str:
"""Return the unique ID for this entity."""
return self._unique_id
@property
def is_on(self) -> bool:
"""Return true if device is on."""
return not self.coordinator.data.state.standby
@roku_exception_handler
async def async_turn_on(self, **kwargs) -> None:
"""Turn the device on."""
await self.coordinator.roku.remote("poweron")
await self.coordinator.async_request_refresh()
@roku_exception_handler
async def async_turn_off(self, **kwargs) -> None:
"""Turn the device off."""
await self.coordinator.roku.remote("poweroff")
await self.coordinator.async_request_refresh()
@roku_exception_handler
async def async_send_command(self, command: List, **kwargs) -> None:
"""Send a command to one device."""
num_repeats = kwargs[ATTR_NUM_REPEATS]
for _ in range(num_repeats):
for single_command in command:
await self.coordinator.roku.remote(single_command)
await self.coordinator.async_request_refresh()
|
import os
import unittest
import mock
from perfkitbenchmarker.linux_benchmarks import hpcg_benchmark
class HpcgBenchmarkTestCase(unittest.TestCase):
def setUp(self):
p = mock.patch(hpcg_benchmark.__name__ + '.FLAGS')
p.start()
self.addCleanup(p.stop)
path = os.path.join(os.path.dirname(__file__), '../data',
'hpcg_results.txt')
with open(path) as fp:
self.test_output = fp.read()
path = os.path.join(os.path.dirname(__file__), '../data',
'hpcg_results2.txt')
with open(path) as fp:
self.test_output2 = fp.read()
def testExtractThroughput(self):
throughput = hpcg_benchmark._ExtractThroughput(self.test_output)
self.assertEqual(202.6, throughput)
def testExtractThroughput2(self):
throughput = hpcg_benchmark._ExtractThroughput(self.test_output2)
self.assertEqual(62.3, throughput)
def testExtractProblemSize(self):
self.assertEqual([64, 128, 256],
hpcg_benchmark._ExtractProblemSize(self.test_output))
if __name__ == '__main__':
unittest.main()
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.