text
stringlengths 213
32.3k
|
---|
from lemur import database
from lemur.roles.models import Role
from lemur.users.models import User
def update(role_id, name, description, users):
"""
Update a role
:param role_id:
:param name:
:param description:
:param users:
:return:
"""
role = get(role_id)
role.name = name
role.description = description
role.users = users
database.update(role)
return role
def set_third_party(role_id, third_party_status=False):
"""
Sets a role to be a third party role. A user should pretty much never
call this directly.
:param role_id:
:param third_party_status:
:return:
"""
role = get(role_id)
role.third_party = third_party_status
database.update(role)
return role
def create(
name, password=None, description=None, username=None, users=None, third_party=False
):
"""
Create a new role
:param name:
:param users:
:param description:
:param username:
:param password:
:return:
"""
role = Role(
name=name,
description=description,
username=username,
password=password,
third_party=third_party,
)
if users:
role.users = users
return database.create(role)
def get(role_id):
"""
Retrieve a role by ID
:param role_id:
:return:
"""
return database.get(Role, role_id)
def get_by_name(role_name):
"""
Retrieve a role by its name
:param role_name:
:return:
"""
return database.get(Role, role_name, field="name")
def delete(role_id):
"""
Remove a role
:param role_id:
:return:
"""
return database.delete(get(role_id))
def render(args):
"""
Helper that filters subsets of roles depending on the parameters
passed to the REST Api
:param args:
:return:
"""
query = database.session_query(Role)
filt = args.pop("filter")
user_id = args.pop("user_id", None)
authority_id = args.pop("authority_id", None)
if user_id:
query = query.filter(Role.users.any(User.id == user_id))
if authority_id:
query = query.filter(Role.authority_id == authority_id)
if filt:
terms = filt.split(";")
query = database.filter(query, Role, terms)
return database.sort_and_page(query, Role, args)
def get_or_create(role_name, description):
role = get_by_name(role_name)
if not role:
role = create(name=role_name, description=description)
return role
|
import os
import unittest
from Tests.utils.utils import get_test_path
from kalliope.core.ConfigurationManager.DnaLoader import DnaLoader
from kalliope.core.Models.Dna import Dna
class TestDnaLoader(unittest.TestCase):
def setUp(self):
self.dna_test_file = get_test_path("modules/test_valid_dna.yml")
def tearDown(self):
pass
def test_get_yaml_config(self):
expected_result = {'kalliope_supported_version': [0.4],
'author': 'Kalliope project team',
'type': 'neuron',
'name': 'neuron_test',
'tags': ['test']}
dna_file_content = DnaLoader(self.dna_test_file).get_yaml_config()
self.assertEqual(dna_file_content, expected_result)
def test_get_dna(self):
expected_result = Dna()
expected_result.name = "neuron_test"
expected_result.module_type = "neuron"
expected_result.tags = ['test']
expected_result.author = 'Kalliope project team'
expected_result.kalliope_supported_version = [0.4]
dna_to_test = DnaLoader(self.dna_test_file).get_dna()
self.assertTrue(dna_to_test.__eq__(expected_result))
def test_load_dna(self):
# test with a valid DNA file
dna_to_test = DnaLoader(self.dna_test_file)._load_dna()
self.assertTrue(isinstance(dna_to_test, Dna))
# test with a non valid DNA file
dna_invalid_test_file = get_test_path("modules/test_invalid_dna.yml")
self.assertIsNone(DnaLoader(dna_invalid_test_file)._load_dna())
def test_check_dna(self):
# check with valid DNA file
test_dna = {'kalliope_supported_version': [0.4],
'author': 'Kalliope project team',
'type': 'neuron',
'name': 'neuron_test',
'tags': ['test']}
self.assertTrue(DnaLoader(file_path=self.dna_test_file)._check_dna_file(test_dna))
# invalid DNA, non existing resource type
test_dna = {'kalliope_supported_version': [0.5],
'author': 'Kalliope project team',
'type': 'non-existing',
'name': 'neuron_test',
'tags': ['test']}
self.assertFalse(DnaLoader(file_path=self.dna_test_file)._check_dna_file(test_dna))
# valid DNA, existing resource type
list_valid_dna = ["neuron", "stt", "tts", "trigger", "signal"]
for valid_resource in list_valid_dna:
test_dna = {'kalliope_supported_version': [0.5],
'author': 'Kalliope project team',
'type': valid_resource,
'name': '%s_test' % valid_resource,
'tags': ['test']}
self.assertTrue(DnaLoader(file_path=self.dna_test_file)._check_dna_file(test_dna))
# invalid DNA file, no name
test_dna = {'kalliope_supported_version': [0.4],
'author': 'Kalliope project team',
'type': 'neuron',
'tags': ['test']}
self.assertFalse(DnaLoader(file_path=self.dna_test_file)._check_dna_file(test_dna))
# invalid DNA file, no type
test_dna = {'kalliope_supported_version': [0.4],
'author': 'Kalliope project team',
'name': 'neuron_test',
'tags': ['test']}
self.assertFalse(DnaLoader(file_path=self.dna_test_file)._check_dna_file(test_dna))
# invalid DNA, wrong type
test_dna = {'kalliope_supported_version': [0.4],
'author': 'Kalliope project team',
'type': 'doesnotexist',
'name': 'neuron_test',
'tags': ['test']}
self.assertFalse(DnaLoader(file_path=self.dna_test_file)._check_dna_file(test_dna))
# invalid DNA, no kalliope_supported_version
test_dna = {'author': 'Kalliope project team',
'type': 'neuron',
'name': 'neuron_test',
'tags': ['test']}
self.assertFalse(DnaLoader(file_path=self.dna_test_file)._check_dna_file(test_dna))
# invalid DNA, kalliope_supported_version empty
test_dna = {'kalliope_supported_version': [],
'author': 'Kalliope project team',
'type': 'neuron',
'name': 'neuron_test',
'tags': ['test']}
self.assertFalse(DnaLoader(file_path=self.dna_test_file)._check_dna_file(test_dna))
# invalid DNA, kalliope_supported_version wrong format
test_dna = {'kalliope_supported_version': ['0.4.1'],
'author': 'Kalliope project team',
'type': 'neuron',
'name': 'neuron_test',
'tags': ['test']}
self.assertFalse(DnaLoader(file_path=self.dna_test_file)._check_dna_file(test_dna))
if __name__ == '__main__':
unittest.main()
|
from pi1wire import InvalidCRCException, UnsupportResponseException
import pytest
from homeassistant.components.onewire.const import DEFAULT_SYSBUS_MOUNT_DIR, DOMAIN
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.const import DEVICE_CLASS_TEMPERATURE, TEMP_CELSIUS
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
from tests.common import mock_device_registry, mock_registry
MOCK_CONFIG = {
SENSOR_DOMAIN: {
"platform": DOMAIN,
"mount_dir": DEFAULT_SYSBUS_MOUNT_DIR,
"names": {
"10-111111111111": "My DS18B20",
},
}
}
MOCK_DEVICE_SENSORS = {
"00-111111111111": {"sensors": []},
"10-111111111111": {
"device_info": {
"identifiers": {(DOMAIN, "10-111111111111")},
"manufacturer": "Maxim Integrated",
"model": "10",
"name": "10-111111111111",
},
"sensors": [
{
"entity_id": "sensor.my_ds18b20_temperature",
"unique_id": "/sys/bus/w1/devices/10-111111111111/w1_slave",
"injected_value": 25.123,
"result": "25.1",
"unit": TEMP_CELSIUS,
"class": DEVICE_CLASS_TEMPERATURE,
},
],
},
"12-111111111111": {"sensors": []},
"1D-111111111111": {"sensors": []},
"22-111111111111": {
"device_info": {
"identifiers": {(DOMAIN, "22-111111111111")},
"manufacturer": "Maxim Integrated",
"model": "22",
"name": "22-111111111111",
},
"sensors": [
{
"entity_id": "sensor.22_111111111111_temperature",
"unique_id": "/sys/bus/w1/devices/22-111111111111/w1_slave",
"injected_value": FileNotFoundError,
"result": "unknown",
"unit": TEMP_CELSIUS,
"class": DEVICE_CLASS_TEMPERATURE,
},
],
},
"26-111111111111": {"sensors": []},
"28-111111111111": {
"device_info": {
"identifiers": {(DOMAIN, "28-111111111111")},
"manufacturer": "Maxim Integrated",
"model": "28",
"name": "28-111111111111",
},
"sensors": [
{
"entity_id": "sensor.28_111111111111_temperature",
"unique_id": "/sys/bus/w1/devices/28-111111111111/w1_slave",
"injected_value": InvalidCRCException,
"result": "unknown",
"unit": TEMP_CELSIUS,
"class": DEVICE_CLASS_TEMPERATURE,
},
],
},
"3B-111111111111": {
"device_info": {
"identifiers": {(DOMAIN, "3B-111111111111")},
"manufacturer": "Maxim Integrated",
"model": "3B",
"name": "3B-111111111111",
},
"sensors": [
{
"entity_id": "sensor.3b_111111111111_temperature",
"unique_id": "/sys/bus/w1/devices/3B-111111111111/w1_slave",
"injected_value": 29.993,
"result": "30.0",
"unit": TEMP_CELSIUS,
"class": DEVICE_CLASS_TEMPERATURE,
},
],
},
"42-111111111111": {
"device_info": {
"identifiers": {(DOMAIN, "42-111111111111")},
"manufacturer": "Maxim Integrated",
"model": "42",
"name": "42-111111111111",
},
"sensors": [
{
"entity_id": "sensor.42_111111111111_temperature",
"unique_id": "/sys/bus/w1/devices/42-111111111111/w1_slave",
"injected_value": UnsupportResponseException,
"result": "unknown",
"unit": TEMP_CELSIUS,
"class": DEVICE_CLASS_TEMPERATURE,
},
],
},
"EF-111111111111": {
"sensors": [],
},
"EF-111111111112": {
"sensors": [],
},
}
@pytest.mark.parametrize("device_id", MOCK_DEVICE_SENSORS.keys())
async def test_onewiredirect_setup_valid_device(hass, device_id):
"""Test that sysbus config entry works correctly."""
entity_registry = mock_registry(hass)
device_registry = mock_device_registry(hass)
mock_device_sensor = MOCK_DEVICE_SENSORS[device_id]
glob_result = [f"/{DEFAULT_SYSBUS_MOUNT_DIR}/{device_id}"]
read_side_effect = []
expected_sensors = mock_device_sensor["sensors"]
for expected_sensor in expected_sensors:
read_side_effect.append(expected_sensor["injected_value"])
# Ensure enough read side effect
read_side_effect.extend([FileNotFoundError("Missing injected value")] * 20)
with patch(
"homeassistant.components.onewire.onewirehub.os.path.isdir", return_value=True
), patch("pi1wire._finder.glob.glob", return_value=glob_result,), patch(
"pi1wire.OneWire.get_temperature",
side_effect=read_side_effect,
):
assert await async_setup_component(hass, SENSOR_DOMAIN, MOCK_CONFIG)
await hass.async_block_till_done()
assert len(entity_registry.entities) == len(expected_sensors)
if len(expected_sensors) > 0:
device_info = mock_device_sensor["device_info"]
assert len(device_registry.devices) == 1
registry_entry = device_registry.async_get_device({(DOMAIN, device_id)}, set())
assert registry_entry is not None
assert registry_entry.identifiers == {(DOMAIN, device_id)}
assert registry_entry.manufacturer == device_info["manufacturer"]
assert registry_entry.name == device_info["name"]
assert registry_entry.model == device_info["model"]
for expected_sensor in expected_sensors:
entity_id = expected_sensor["entity_id"]
registry_entry = entity_registry.entities.get(entity_id)
assert registry_entry is not None
assert registry_entry.unique_id == expected_sensor["unique_id"]
assert registry_entry.unit_of_measurement == expected_sensor["unit"]
assert registry_entry.device_class == expected_sensor["class"]
state = hass.states.get(entity_id)
assert state.state == expected_sensor["result"]
|
import argparse
import logging
from .utils import do_db_auth, setup_logging
from ..arctic import Arctic, ArcticLibraryBinding
from ..hooks import get_mongodb_uri
logger = logging.getLogger(__name__)
def main():
usage = """
Check a Arctic Library for inconsistencies.
"""
setup_logging()
parser = argparse.ArgumentParser(usage=usage)
parser.add_argument("--host", default='localhost', help="Hostname, or clustername. Default: localhost")
parser.add_argument("--library", nargs='+', required=True, help="The name of the library. e.g. 'arctic_jblackburn.lib'")
parser.add_argument("-v", action='store_true', help="Verbose mode")
parser.add_argument("-f", action='store_true', help="Force ; Cleanup any problems found. (Default is dry-run.)")
parser.add_argument("-n", action='store_true', help="No FSCK ; just print stats.)")
opts = parser.parse_args()
if opts.v:
logger.setLevel(logging.DEBUG)
if not opts.f:
logger.info("DRY-RUN: No changes will be made.")
logger.info("FSCK'ing: %s on mongo %s" % (opts.library, opts.host))
store = Arctic(get_mongodb_uri(opts.host))
for lib in opts.library:
# Auth to the DB for making changes
if opts.f:
database_name, _ = ArcticLibraryBinding._parse_db_lib(lib)
do_db_auth(opts.host, store._conn, database_name)
orig_stats = store[lib].stats()
logger.info('----------------------------')
if not opts.n:
store[lib]._fsck(not opts.f)
logger.info('----------------------------')
final_stats = store[lib].stats()
logger.info('Stats:')
logger.info('Sharded: %s' % final_stats['chunks'].get('sharded', False))
logger.info('Symbols: %10d' % len(store[lib].list_symbols()))
logger.info('Versions: %10d Change(+/-) %6d (av: %.2fMB)' %
(final_stats['versions']['count'],
final_stats['versions']['count'] - orig_stats['versions']['count'],
final_stats['versions'].get('avgObjSize', 0) / 1024. / 1024.))
logger.info("Versions: %10.2fMB Change(+/-) %.2fMB" %
(final_stats['versions']['size'] / 1024. / 1024.,
(final_stats['versions']['size'] - orig_stats['versions']['size']) / 1024. / 1024.))
logger.info('Chunk Count: %7d Change(+/-) %6d (av: %.2fMB)' %
(final_stats['chunks']['count'],
final_stats['chunks']['count'] - orig_stats['chunks']['count'],
final_stats['chunks'].get('avgObjSize', 0) / 1024. / 1024.))
logger.info("Chunks: %12.2fMB Change(+/-) %6.2fMB" %
(final_stats['chunks']['size'] / 1024. / 1024.,
(final_stats['chunks']['size'] - orig_stats['chunks']['size']) / 1024. / 1024.))
logger.info('----------------------------')
if not opts.f:
logger.info("Done: DRY-RUN: No changes made. (Use -f to fix any problems)")
else:
logger.info("Done.")
if __name__ == '__main__':
main()
|
import unittest
from absl.testing import parameterized
import mock
from perfkitbenchmarker import errors
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.providers.azure import azure_virtual_machine
from perfkitbenchmarker.providers.azure import util
from tests import pkb_common_test_case
_COMPONENT = 'test_component'
class TestAzureVirtualMachine(pkb_common_test_case.TestOsMixin,
azure_virtual_machine.AzureVirtualMachine):
IMAGE_URN = 'test_image_urn'
class AzureVirtualMachineTest(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(AzureVirtualMachineTest, self).setUp()
self.enter_context(
mock.patch(azure_virtual_machine.__name__ +
'.azure_network.AzureNetwork.GetNetwork'))
self.enter_context(
mock.patch(azure_virtual_machine.__name__ +
'.azure_network.AzureFirewall.GetFirewall'))
self.enter_context(
mock.patch(azure_virtual_machine.__name__ +
'.azure_network.GetResourceGroup'))
self.mock_cmd = self.enter_context(
mock.patch.object(vm_util, 'IssueCommand'))
self.enter_context(mock.patch.object(util, 'GetResourceTags'))
@parameterized.named_parameters(
('QuotaExceeded', '', 'Error Code: QuotaExceeded', 1),
('CoreQuotaExceeded', '',
'Operation could not be completed as it results in exceeding approved '
'standardEv3Family Cores quota', 1),
('CoreQuotaExceededDifferentWording', '',
'The operation could not be completed as it results in exceeding quota '
'limit of standardEv3Family Cores', 1))
def testQuotaExceeded(self, _, stderror, retcode):
spec = azure_virtual_machine.AzureVmSpec(
_COMPONENT, machine_type='test_machine_type', zone='testing')
vm = TestAzureVirtualMachine(spec)
self.mock_cmd.side_effect = [(_, stderror, retcode)]
with self.assertRaises(errors.Benchmarks.QuotaFailure):
vm._Create()
def testInsufficientSpotCapacity(self):
spec = azure_virtual_machine.AzureVmSpec(
_COMPONENT, machine_type='test_machine_type', zone='testing',
low_priority=True)
vm = TestAzureVirtualMachine(spec)
self.mock_cmd.side_effect = [('', 'OverconstrainedAllocationRequest', 1)]
with self.assertRaises(errors.Benchmarks.InsufficientCapacityCloudFailure):
vm._Create()
class AzurePublicIPAddressTest(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(AzurePublicIPAddressTest, self).setUp()
self.enter_context(
mock.patch(azure_virtual_machine.__name__ +
'.azure_network.GetResourceGroup'))
self.mock_cmd = self.enter_context(
mock.patch.object(vm_util, 'IssueCommand'))
self.ip_address = azure_virtual_machine.AzurePublicIPAddress(
'westus2', None, 'test_ip')
def testQuotaExceeded(self):
quota_error = ('ERROR: Cannot create more than 20 public IP addresses for '
'this subscription in this region.')
self.mock_cmd.side_effect = [('', quota_error, 1)]
with self.assertRaises(errors.Benchmarks.QuotaFailure):
self.ip_address._Create()
if __name__ == '__main__':
unittest.main()
|
import pandas as pd
import pytest
import pytz
from qstrader.system.rebalance.daily import DailyRebalance
@pytest.mark.parametrize(
"start_date,end_date,pre_market,expected_dates,expected_time",
[
(
'2020-03-11', '2020-03-17', False, [
'2020-03-11', '2020-03-12', '2020-03-13',
'2020-03-16', '2020-03-17'
], '21:00:00'
),
(
'2019-12-26', '2020-01-07', True, [
'2019-12-26', '2019-12-27', '2019-12-30',
'2019-12-31', '2020-01-01', '2020-01-02',
'2020-01-03', '2020-01-06', '2020-01-07'
], '14:30:00'
)
]
)
def test_daily_rebalance(
start_date, end_date, pre_market, expected_dates, expected_time
):
"""
Checks that the daily rebalance provides the correct business
datetimes for the provided range.
"""
sd = pd.Timestamp(start_date, tz=pytz.UTC)
ed = pd.Timestamp(end_date, tz=pytz.UTC)
reb = DailyRebalance(start_date=sd, end_date=ed, pre_market=pre_market)
actual_datetimes = reb._generate_rebalances()
expected_datetimes = [
pd.Timestamp('%s %s' % (expected_date, expected_time), tz=pytz.UTC)
for expected_date in expected_dates
]
assert actual_datetimes == expected_datetimes
|
from time import monotonic
from homeassistant.components.switch import SwitchEntity
from . import CONF_SMARTPLUGS, HUB as hub
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Verisure switch platform."""
if not int(hub.config.get(CONF_SMARTPLUGS, 1)):
return False
hub.update_overview()
switches = []
switches.extend(
[
VerisureSmartplug(device_label)
for device_label in hub.get("$.smartPlugs[*].deviceLabel")
]
)
add_entities(switches)
class VerisureSmartplug(SwitchEntity):
"""Representation of a Verisure smartplug."""
def __init__(self, device_id):
"""Initialize the Verisure device."""
self._device_label = device_id
self._change_timestamp = 0
self._state = False
@property
def name(self):
"""Return the name or location of the smartplug."""
return hub.get_first(
"$.smartPlugs[?(@.deviceLabel == '%s')].area", self._device_label
)
@property
def is_on(self):
"""Return true if on."""
if monotonic() - self._change_timestamp < 10:
return self._state
self._state = (
hub.get_first(
"$.smartPlugs[?(@.deviceLabel == '%s')].currentState",
self._device_label,
)
== "ON"
)
return self._state
@property
def available(self):
"""Return True if entity is available."""
return (
hub.get_first("$.smartPlugs[?(@.deviceLabel == '%s')]", self._device_label)
is not None
)
def turn_on(self, **kwargs):
"""Set smartplug status on."""
hub.session.set_smartplug_state(self._device_label, True)
self._state = True
self._change_timestamp = monotonic()
def turn_off(self, **kwargs):
"""Set smartplug status off."""
hub.session.set_smartplug_state(self._device_label, False)
self._state = False
self._change_timestamp = monotonic()
# pylint: disable=no-self-use
def update(self):
"""Get the latest date of the smartplug."""
hub.update_overview()
|
from django.contrib import admin
from django.utils.html import format_html_join
from django.utils.translation import gettext_lazy as _
from shop.admin.customer import CustomerProxy, CustomerInlineAdminBase, CustomerAdminBase
class CustomerInlineAdmin(CustomerInlineAdminBase):
fieldsets = [
(None, {'fields': ['get_number', 'salutation']}),
(_("Addresses"), {'fields': ['get_shipping_addresses', 'get_billing_addresses']})
]
readonly_fields = ['get_number', 'get_shipping_addresses', 'get_billing_addresses']
def get_number(self, customer):
return customer.get_number() or '–'
get_number.short_description = _("Customer Number")
def get_shipping_addresses(self, customer):
addresses = [(a.as_text(),) for a in customer.shippingaddress_set.all()]
return format_html_join('', '<address>{0}</address>', addresses)
get_shipping_addresses.short_description = _("Shipping")
def get_billing_addresses(self, customer):
addresses = [(a.as_text(),) for a in customer.billingaddress_set.all()]
return format_html_join('', '<address>{0}</address>', addresses)
get_billing_addresses.short_description = _("Billing")
@admin.register(CustomerProxy)
class CustomerAdmin(CustomerAdminBase):
class Media:
css = {'all': ['shop/css/admin/customer.css']}
inlines = [CustomerInlineAdmin]
def get_list_display(self, request):
list_display = list(super().get_list_display(request))
list_display.insert(1, 'salutation')
return list_display
def salutation(self, user):
if hasattr(user, 'customer'):
return user.customer.get_salutation_display()
return ''
salutation.short_description = _("Salutation")
salutation.admin_order_field = 'customer__salutation'
|
import logging
from venstarcolortouch import VenstarColorTouch
import voluptuous as vol
from homeassistant.components.climate import PLATFORM_SCHEMA, ClimateEntity
from homeassistant.components.climate.const import (
ATTR_HVAC_MODE,
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
CURRENT_HVAC_COOL,
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
CURRENT_HVAC_OFF,
FAN_AUTO,
FAN_ON,
HVAC_MODE_AUTO,
HVAC_MODE_COOL,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
PRESET_AWAY,
PRESET_NONE,
SUPPORT_FAN_MODE,
SUPPORT_PRESET_MODE,
SUPPORT_TARGET_HUMIDITY,
SUPPORT_TARGET_TEMPERATURE,
SUPPORT_TARGET_TEMPERATURE_RANGE,
)
from homeassistant.const import (
ATTR_TEMPERATURE,
CONF_HOST,
CONF_PASSWORD,
CONF_PIN,
CONF_SSL,
CONF_TIMEOUT,
CONF_USERNAME,
PRECISION_HALVES,
STATE_ON,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
ATTR_FAN_STATE = "fan_state"
ATTR_HVAC_STATE = "hvac_mode"
CONF_HUMIDIFIER = "humidifier"
DEFAULT_SSL = False
VALID_FAN_STATES = [STATE_ON, HVAC_MODE_AUTO]
VALID_THERMOSTAT_MODES = [HVAC_MODE_HEAT, HVAC_MODE_COOL, HVAC_MODE_OFF, HVAC_MODE_AUTO]
HOLD_MODE_OFF = "off"
HOLD_MODE_TEMPERATURE = "temperature"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PASSWORD): cv.string,
vol.Optional(CONF_HUMIDIFIER, default=True): cv.boolean,
vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean,
vol.Optional(CONF_TIMEOUT, default=5): vol.All(
vol.Coerce(int), vol.Range(min=1)
),
vol.Optional(CONF_USERNAME): cv.string,
vol.Optional(CONF_PIN): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Venstar thermostat."""
username = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
pin = config.get(CONF_PIN)
host = config.get(CONF_HOST)
timeout = config.get(CONF_TIMEOUT)
humidifier = config.get(CONF_HUMIDIFIER)
protocol = "https" if config[CONF_SSL] else "http"
client = VenstarColorTouch(
addr=host,
timeout=timeout,
user=username,
password=password,
pin=pin,
proto=protocol,
)
add_entities([VenstarThermostat(client, humidifier)], True)
class VenstarThermostat(ClimateEntity):
"""Representation of a Venstar thermostat."""
def __init__(self, client, humidifier):
"""Initialize the thermostat."""
self._client = client
self._humidifier = humidifier
self._mode_map = {
HVAC_MODE_HEAT: self._client.MODE_HEAT,
HVAC_MODE_COOL: self._client.MODE_COOL,
HVAC_MODE_AUTO: self._client.MODE_AUTO,
}
def update(self):
"""Update the data from the thermostat."""
info_success = self._client.update_info()
sensor_success = self._client.update_sensors()
if not info_success or not sensor_success:
_LOGGER.error("Failed to update data")
@property
def supported_features(self):
"""Return the list of supported features."""
features = SUPPORT_TARGET_TEMPERATURE | SUPPORT_FAN_MODE | SUPPORT_PRESET_MODE
if self._client.mode == self._client.MODE_AUTO:
features |= SUPPORT_TARGET_TEMPERATURE_RANGE
if self._humidifier and hasattr(self._client, "hum_active"):
features |= SUPPORT_TARGET_HUMIDITY
return features
@property
def name(self):
"""Return the name of the thermostat."""
return self._client.name
@property
def precision(self):
"""Return the precision of the system.
Venstar temperature values are passed back and forth in the
API in C or F, with half-degree accuracy.
"""
return PRECISION_HALVES
@property
def temperature_unit(self):
"""Return the unit of measurement, as defined by the API."""
if self._client.tempunits == self._client.TEMPUNITS_F:
return TEMP_FAHRENHEIT
return TEMP_CELSIUS
@property
def fan_modes(self):
"""Return the list of available fan modes."""
return VALID_FAN_STATES
@property
def hvac_modes(self):
"""Return the list of available operation modes."""
return VALID_THERMOSTAT_MODES
@property
def current_temperature(self):
"""Return the current temperature."""
return self._client.get_indoor_temp()
@property
def current_humidity(self):
"""Return the current humidity."""
return self._client.get_indoor_humidity()
@property
def hvac_mode(self):
"""Return current operation mode ie. heat, cool, auto."""
if self._client.mode == self._client.MODE_HEAT:
return HVAC_MODE_HEAT
if self._client.mode == self._client.MODE_COOL:
return HVAC_MODE_COOL
if self._client.mode == self._client.MODE_AUTO:
return HVAC_MODE_AUTO
return HVAC_MODE_OFF
@property
def hvac_action(self):
"""Return current operation mode ie. heat, cool, auto."""
if self._client.state == self._client.STATE_IDLE:
return CURRENT_HVAC_IDLE
if self._client.state == self._client.STATE_HEATING:
return CURRENT_HVAC_HEAT
if self._client.state == self._client.STATE_COOLING:
return CURRENT_HVAC_COOL
return CURRENT_HVAC_OFF
@property
def fan_mode(self):
"""Return the current fan mode."""
if self._client.fan == self._client.FAN_ON:
return FAN_ON
return FAN_AUTO
@property
def device_state_attributes(self):
"""Return the optional state attributes."""
return {
ATTR_FAN_STATE: self._client.fanstate,
ATTR_HVAC_STATE: self._client.state,
}
@property
def target_temperature(self):
"""Return the target temperature we try to reach."""
if self._client.mode == self._client.MODE_HEAT:
return self._client.heattemp
if self._client.mode == self._client.MODE_COOL:
return self._client.cooltemp
return None
@property
def target_temperature_low(self):
"""Return the lower bound temp if auto mode is on."""
if self._client.mode == self._client.MODE_AUTO:
return self._client.heattemp
return None
@property
def target_temperature_high(self):
"""Return the upper bound temp if auto mode is on."""
if self._client.mode == self._client.MODE_AUTO:
return self._client.cooltemp
return None
@property
def target_humidity(self):
"""Return the humidity we try to reach."""
return self._client.hum_setpoint
@property
def min_humidity(self):
"""Return the minimum humidity. Hardcoded to 0 in API."""
return 0
@property
def max_humidity(self):
"""Return the maximum humidity. Hardcoded to 60 in API."""
return 60
@property
def preset_mode(self):
"""Return current preset."""
if self._client.away:
return PRESET_AWAY
if self._client.schedule == 0:
return HOLD_MODE_TEMPERATURE
return PRESET_NONE
@property
def preset_modes(self):
"""Return valid preset modes."""
return [PRESET_NONE, PRESET_AWAY, HOLD_MODE_TEMPERATURE]
def _set_operation_mode(self, operation_mode):
"""Change the operation mode (internal)."""
if operation_mode == HVAC_MODE_HEAT:
success = self._client.set_mode(self._client.MODE_HEAT)
elif operation_mode == HVAC_MODE_COOL:
success = self._client.set_mode(self._client.MODE_COOL)
elif operation_mode == HVAC_MODE_AUTO:
success = self._client.set_mode(self._client.MODE_AUTO)
else:
success = self._client.set_mode(self._client.MODE_OFF)
if not success:
_LOGGER.error("Failed to change the operation mode")
return success
def set_temperature(self, **kwargs):
"""Set a new target temperature."""
set_temp = True
operation_mode = kwargs.get(ATTR_HVAC_MODE)
temp_low = kwargs.get(ATTR_TARGET_TEMP_LOW)
temp_high = kwargs.get(ATTR_TARGET_TEMP_HIGH)
temperature = kwargs.get(ATTR_TEMPERATURE)
if operation_mode and self._mode_map.get(operation_mode) != self._client.mode:
set_temp = self._set_operation_mode(operation_mode)
if set_temp:
if (
self._mode_map.get(operation_mode, self._client.mode)
== self._client.MODE_HEAT
):
success = self._client.set_setpoints(temperature, self._client.cooltemp)
elif (
self._mode_map.get(operation_mode, self._client.mode)
== self._client.MODE_COOL
):
success = self._client.set_setpoints(self._client.heattemp, temperature)
elif (
self._mode_map.get(operation_mode, self._client.mode)
== self._client.MODE_AUTO
):
success = self._client.set_setpoints(temp_low, temp_high)
else:
success = False
_LOGGER.error(
"The thermostat is currently not in a mode "
"that supports target temperature: %s",
operation_mode,
)
if not success:
_LOGGER.error("Failed to change the temperature")
def set_fan_mode(self, fan_mode):
"""Set new target fan mode."""
if fan_mode == STATE_ON:
success = self._client.set_fan(self._client.FAN_ON)
else:
success = self._client.set_fan(self._client.FAN_AUTO)
if not success:
_LOGGER.error("Failed to change the fan mode")
def set_hvac_mode(self, hvac_mode):
"""Set new target operation mode."""
self._set_operation_mode(hvac_mode)
def set_humidity(self, humidity):
"""Set new target humidity."""
success = self._client.set_hum_setpoint(humidity)
if not success:
_LOGGER.error("Failed to change the target humidity level")
def set_preset_mode(self, preset_mode):
"""Set the hold mode."""
if preset_mode == PRESET_AWAY:
success = self._client.set_away(self._client.AWAY_AWAY)
elif preset_mode == HOLD_MODE_TEMPERATURE:
success = self._client.set_away(self._client.AWAY_HOME)
success = success and self._client.set_schedule(0)
elif preset_mode == PRESET_NONE:
success = self._client.set_away(self._client.AWAY_HOME)
success = success and self._client.set_schedule(1)
else:
_LOGGER.error("Unknown hold mode: %s", preset_mode)
success = False
if not success:
_LOGGER.error("Failed to change the schedule/hold state")
|
from typing import Optional, Any, Tuple
import numpy
Tensor = Any
def svd(
np, # TODO: Typing
tensor: Tensor,
pivot_axis: int,
max_singular_values: Optional[int] = None,
max_truncation_error: Optional[float] = None,
relative: Optional[bool] = False) -> Tuple[Tensor, Tensor, Tensor, Tensor]:
"""Computes the singular value decomposition (SVD) of a tensor.
See tensornetwork.backends.tensorflow.decompositions for details.
"""
left_dims = tensor.shape[:pivot_axis]
right_dims = tensor.shape[pivot_axis:]
tensor = np.reshape(tensor, [numpy.prod(left_dims), numpy.prod(right_dims)])
u, s, vh = np.linalg.svd(tensor, full_matrices=False)
if max_singular_values is None:
max_singular_values = np.size(s)
if max_truncation_error is not None:
# Cumulative norms of singular values in ascending order.
trunc_errs = np.sqrt(np.cumsum(np.square(s[::-1])))
# If relative is true, rescale max_truncation error with the largest
# singular value to yield the absolute maximal truncation error.
if relative:
abs_max_truncation_error = max_truncation_error * s[0]
else:
abs_max_truncation_error = max_truncation_error
# We must keep at least this many singular values to ensure the
# truncation error is <= abs_max_truncation_error.
num_sing_vals_err = np.count_nonzero(
(trunc_errs > abs_max_truncation_error).astype(np.int32))
else:
num_sing_vals_err = max_singular_values
num_sing_vals_keep = min(max_singular_values, num_sing_vals_err)
# tf.svd() always returns the singular values as a vector of float{32,64}.
# since tf.math_ops.real is automatically applied to s. This causes
# s to possibly not be the same dtype as the original tensor, which can cause
# issues for later contractions. To fix it, we recast to the original dtype.
s = s.astype(tensor.dtype)
s_rest = s[num_sing_vals_keep:]
s = s[:num_sing_vals_keep]
u = u[:, :num_sing_vals_keep]
vh = vh[:num_sing_vals_keep, :]
dim_s = s.shape[0]
u = np.reshape(u, list(left_dims) + [dim_s])
vh = np.reshape(vh, [dim_s] + list(right_dims))
return u, s, vh, s_rest
def qr(
np, # TODO: Typing
tensor: Tensor,
pivot_axis: int,
non_negative_diagonal: bool
) -> Tuple[Tensor, Tensor]:
"""Computes the QR decomposition of a tensor.
See tensornetwork.backends.tensorflow.decompositions for details.
"""
left_dims = tensor.shape[:pivot_axis]
right_dims = tensor.shape[pivot_axis:]
tensor = np.reshape(tensor, [numpy.prod(left_dims), numpy.prod(right_dims)])
q, r = np.linalg.qr(tensor)
if non_negative_diagonal:
phases = np.sign(np.diagonal(r))
q = q * phases
r = phases.conj()[:, None] * r
center_dim = q.shape[1]
q = np.reshape(q, list(left_dims) + [center_dim])
r = np.reshape(r, [center_dim] + list(right_dims))
return q, r
def rq(
np, # TODO: Typing
tensor: Tensor,
pivot_axis: int,
non_negative_diagonal: bool
) -> Tuple[Tensor, Tensor]:
"""Computes the RQ (reversed QR) decomposition of a tensor.
See tensornetwork.backends.tensorflow.decompositions for details.
"""
left_dims = tensor.shape[:pivot_axis]
right_dims = tensor.shape[pivot_axis:]
tensor = np.reshape(tensor, [numpy.prod(left_dims), numpy.prod(right_dims)])
q, r = np.linalg.qr(np.conj(np.transpose(tensor)))
if non_negative_diagonal:
phases = np.sign(np.diagonal(r))
q = q * phases
r = phases.conj()[:, None] * r
r, q = np.conj(np.transpose(r)), np.conj(
np.transpose(q)) #M=r*q at this point
center_dim = r.shape[1]
r = np.reshape(r, list(left_dims) + [center_dim])
q = np.reshape(q, [center_dim] + list(right_dims))
return r, q
|
import time
import unittest
from stash.tests.stashtest import StashTestCase, requires_network
class PingTests(StashTestCase):
"""tests for the 'ping' command."""
def test_help(self):
"""test 'ping --help'"""
output = self.run_command("ping --help", exitcode=0)
self.assertIn("ping", output)
self.assertIn("-h", output)
self.assertIn("--help", output)
self.assertIn("-c", output)
self.assertIn("--count", output)
self.assertIn("-W", output)
self.assertIn("--timeout", output)
@unittest.expectedFailure
@requires_network
def test_ping_normal(self):
"""test 'ping <ip>'."""
target = "8.8.8.8"
output = self.run_command("ping " + target, exitcode=0)
self.assertIn("got ping in " + target, output)
self.assertNotIn("failed", output)
@unittest.expectedFailure
@requires_network
def test_count(self):
"""test 'ping <target> --count <n>'."""
target = "8.8.8.8"
for n in (1, 3, 5):
output = self.run_command("ping " + target + " --count " + str(n), exitcode=0)
self.assertIn("got ping in " + target, output)
self.assertNotIn("failed", output)
c = output.count("got ping in")
self.assertEqaual(n, c)
@unittest.expectedFailure
@requires_network
def test_interval(self):
"""test 'ping <target> --interval <n>'."""
target = "8.8.8.8"
c = 3
for t in (1, 5, 10):
st = time.time()
output = self.run_command("ping " + target + " --count " + str(c) + " --interval " + str(t), exitcode=0)
et = time.time()
dt = et - st
self.assertIn("got ping in " + target, output)
self.assertNotIn("failed", output)
n = output.count("got ping in")
self.assertEqaual(n, c)
mintime = c * t
maxtime = c * t + 5
self.assertGreaterEqual(dt, mintime)
self.assertLessEqual(dt, maxtime)
@unittest.expectedFailure
@unittest.skip("Test not implemented")
def test_timeout():
"""test 'ping <target> --timeout <t>'."""
# no idea how to implement a test for this case
raise NotImplementedError
|
import sys
from ReText import globalSettings, getBundledIcon, getSettingsFilePath
from ReText.icontheme import get_icon_theme
from markups.common import CONFIGURATION_DIR
from os.path import join
from PyQt5.QtCore import pyqtSignal, QFile, QFileInfo, QUrl, Qt
from PyQt5.QtGui import QDesktopServices, QIcon
from PyQt5.QtWidgets import QCheckBox, QDialog, QDialogButtonBox, \
QFileDialog, QGridLayout, QLabel, QLineEdit, QPushButton, QSpinBox, \
QComboBox, QTabWidget, QVBoxLayout, QWidget
MKD_EXTS_FILE = join(CONFIGURATION_DIR, 'markdown-extensions.txt')
class FileDialogButton(QPushButton):
def __init__(self, parent, fileName):
QPushButton.__init__(self, parent)
self.fileName = fileName
self.defaultText = self.tr('(none)')
self.updateButtonText()
self.clicked.connect(self.processClick)
def processClick(self):
pass
def updateButtonText(self):
if self.fileName:
self.setText(QFileInfo(self.fileName).fileName())
else:
self.setText(self.defaultText)
class FileSelectButton(FileDialogButton):
def processClick(self):
startDir = (QFileInfo(self.fileName).absolutePath()
if self.fileName else '')
self.fileName = QFileDialog.getOpenFileName(
self, self.tr('Select file to open'), startDir)[0]
self.updateButtonText()
class DirectorySelectButton(FileDialogButton):
def processClick(self):
startDir = (QFileInfo(self.fileName).absolutePath()
if self.fileName else '')
self.fileName = QFileDialog.getExistingDirectory(
self, self.tr('Select directory to open'), startDir)
self.updateButtonText()
class ClickableLabel(QLabel):
clicked = pyqtSignal()
def mousePressEvent(self, event):
self.clicked.emit()
super().mousePressEvent(event)
def setIconThemeFromSettings():
QIcon.setThemeName(globalSettings.iconTheme)
if QIcon.themeName() in ('hicolor', ''):
if not QFile.exists(getBundledIcon('document-new')):
QIcon.setThemeName(get_icon_theme())
if QIcon.themeName() == 'Yaru' and not QIcon.hasThemeIcon('document-new'):
# Old Yaru does not have non-symbolic action icons, so all
# document-* icons fall back to mimetypes/document.png.
# See https://github.com/ubuntu/yaru/issues/1294
QIcon.setThemeName('Humanity')
class ConfigDialog(QDialog):
def __init__(self, parent):
QDialog.__init__(self, parent)
self.parent = parent
self.initConfigOptions()
self.layout = QVBoxLayout(self)
path = getSettingsFilePath()
pathLabel = QLabel(self.tr('Using configuration file at:') +
' <a href="%(path)s">%(path)s</a>' % {'path': path}, self)
pathLabel.linkActivated.connect(self.openLink)
self.layout.addWidget(pathLabel)
self.tabWidget = QTabWidget(self)
self.layout.addWidget(self.tabWidget)
buttonBox = QDialogButtonBox(self)
buttonBox.setStandardButtons(QDialogButtonBox.Ok |
QDialogButtonBox.Cancel)
buttonBox.accepted.connect(self.saveSettings)
buttonBox.rejected.connect(self.close)
self.initWidgets()
self.configurators['rightMargin'].valueChanged.connect(self.handleRightMarginSet)
self.configurators['rightMarginWrap'].stateChanged.connect(self.handleRightMarginWrapSet)
self.layout.addWidget(buttonBox)
def initConfigOptions(self):
self.tabs = (
(self.tr('Behavior'), (
(self.tr('Automatically save documents'), 'autoSave'),
(self.tr('Automatically open last documents on startup'), 'openLastFilesOnStartup'),
(self.tr('Number of recent documents'), 'recentDocumentsCount'),
(self.tr('Restore window geometry'), 'saveWindowGeometry'),
(self.tr('Default preview state'), 'defaultPreviewState'),
(self.tr('Open external links in ReText window'), 'handleWebLinks'),
(self.tr('Markdown syntax extensions (comma-separated)'), 'markdownExtensions'),
(None, 'markdownExtensions'),
(self.tr('Enable synchronized scrolling for Markdown'), 'syncScroll'),
# (self.tr('Default Markdown file extension'), 'markdownDefaultFileExtension'),
# (self.tr('Default reStructuredText file extension'), 'restDefaultFileExtension'),
)),
(self.tr('Editor'), (
(self.tr('Highlight current line'), 'highlightCurrentLine'),
(self.tr('Show line numbers'), 'lineNumbersEnabled'),
(self.tr('Line numbers are relative to current line'), 'relativeLineNumbers'),
(self.tr('Tab key inserts spaces'), 'tabInsertsSpaces'),
(self.tr('Tabulation width'), 'tabWidth'),
(self.tr('Draw vertical line at column'), 'rightMargin'),
(self.tr('Enable soft wrap'), 'rightMarginWrap'),
(self.tr('Show document stats'), 'documentStatsEnabled'),
(self.tr('Ordered list mode'), 'orderedListMode'),
)),
(self.tr('Interface'), (
(self.tr('Hide toolbar'), 'hideToolBar'),
(self.tr('Icon theme name'), 'iconTheme'),
(self.tr('Stylesheet file'), 'styleSheet', True),
(self.tr('Hide tabs bar when there is only one tab'), 'tabBarAutoHide'),
(self.tr('Show full path in window title'), 'windowTitleFullPath'),
(self.tr('Show directory tree'), 'showDirectoryTree', False),
(self.tr('Working directory'), 'directoryPath', True),
))
)
def initWidgets(self):
self.configurators = {}
for tabTitle, options in self.tabs:
page = self.getPageWidget(options)
self.tabWidget.addTab(page, tabTitle)
def getPageWidget(self, options):
page = QWidget(self)
layout = QGridLayout(page)
for index, option in enumerate(options):
displayname, name = option[:2]
fileselector = option[2] if len(option) > 2 else False
if name is None:
header = QLabel('<h3>%s</h3>' % displayname, self)
layout.addWidget(header, index, 0, 1, 2, Qt.AlignHCenter)
continue
if displayname:
label = ClickableLabel(displayname + ':', self)
if name == 'markdownExtensions':
if displayname:
url = QUrl('https://github.com/retext-project/retext/wiki/Markdown-extensions')
helpButton = QPushButton(self.tr('Help'), self)
helpButton.clicked.connect(lambda: QDesktopServices.openUrl(url))
layout.addWidget(label, index, 0)
layout.addWidget(helpButton, index, 1)
continue
try:
extsFile = open(MKD_EXTS_FILE)
value = extsFile.read().rstrip().replace(extsFile.newlines, ', ')
extsFile.close()
except Exception:
value = ''
self.configurators[name] = QLineEdit(self)
self.configurators[name].setText(value)
layout.addWidget(self.configurators[name], index, 0, 1, 2)
continue
value = getattr(globalSettings, name)
if name == 'defaultPreviewState':
self.configurators[name] = QComboBox(self)
self.configurators[name].addItem(self.tr('Editor'), 'editor')
self.configurators[name].addItem(self.tr('Live preview'), 'live-preview')
self.configurators[name].addItem(self.tr('Normal preview'), 'normal-preview')
comboBoxIndex = self.configurators[name].findData(value)
self.configurators[name].setCurrentIndex(comboBoxIndex)
elif name == 'highlightCurrentLine':
self.configurators[name] = QComboBox(self)
self.configurators[name].addItem(self.tr('Disabled'), 'disabled')
self.configurators[name].addItem(self.tr('Cursor Line'), 'cursor-line')
self.configurators[name].addItem(self.tr('Wrapped Line'), 'wrapped-line')
comboBoxIndex = self.configurators[name].findData(value)
self.configurators[name].setCurrentIndex(comboBoxIndex)
elif name == 'orderedListMode':
self.configurators[name] = QComboBox(self)
self.configurators[name].addItem(self.tr('Increment'), 'increment')
self.configurators[name].addItem(self.tr('Repeat'), 'repeat')
comboBoxIndex = self.configurators[name].findData(value)
self.configurators[name].setCurrentIndex(comboBoxIndex)
elif name == 'directoryPath':
self.configurators[name] = DirectorySelectButton(self, value)
elif isinstance(value, bool):
self.configurators[name] = QCheckBox(self)
self.configurators[name].setChecked(value)
label.clicked.connect(self.configurators[name].nextCheckState)
elif isinstance(value, int):
self.configurators[name] = QSpinBox(self)
if name == 'tabWidth':
self.configurators[name].setRange(1, 10)
elif name == 'recentDocumentsCount':
self.configurators[name].setRange(5, 20)
else:
self.configurators[name].setMaximum(200)
self.configurators[name].setValue(value)
elif isinstance(value, str) and fileselector:
self.configurators[name] = FileSelectButton(self, value)
elif isinstance(value, str):
self.configurators[name] = QLineEdit(self)
self.configurators[name].setText(value)
layout.addWidget(label, index, 0)
layout.addWidget(self.configurators[name], index, 1, Qt.AlignRight)
return page
def handleRightMarginSet(self, value):
if value < 10:
self.configurators['rightMarginWrap'].setChecked(False)
def handleRightMarginWrapSet(self, state):
if state == Qt.Checked and self.configurators['rightMargin'].value() < 10:
self.configurators['rightMargin'].setValue(80)
def saveSettings(self):
for name, configurator in self.configurators.items():
if name == 'markdownExtensions':
continue
if isinstance(configurator, QCheckBox):
value = configurator.isChecked()
elif isinstance(configurator, QSpinBox):
value = configurator.value()
elif isinstance(configurator, QLineEdit):
value = configurator.text()
elif isinstance(configurator, QComboBox):
value = configurator.currentData()
elif isinstance(configurator, FileDialogButton):
value = configurator.fileName
setattr(globalSettings, name, value)
self.applySettings()
self.close()
def applySettings(self):
setIconThemeFromSettings()
try:
extsFile = open(MKD_EXTS_FILE, 'w')
for ext in self.configurators['markdownExtensions'].text().split(','):
if ext.strip():
extsFile.write(ext.strip() + '\n')
extsFile.close()
except Exception as e:
print(e, file=sys.stderr)
for tab in self.parent.iterateTabs():
tab.editBox.updateFont()
tab.editBox.setWrapModeAndWidth()
tab.editBox.viewport().update()
self.parent.updateStyleSheet()
self.parent.tabWidget.setTabBarAutoHide(globalSettings.tabBarAutoHide)
self.parent.toolBar.setVisible(not globalSettings.hideToolBar)
self.parent.editBar.setVisible(not globalSettings.hideToolBar)
self.parent.initDirectoryTree(globalSettings.showDirectoryTree, globalSettings.directoryPath)
def openLink(self, link):
QDesktopServices.openUrl(QUrl.fromLocalFile(link))
|
from kalliope.core.Models.settings.SettingsEntry import SettingsEntry
class Options(SettingsEntry):
"""
This Class is representing an Option element with parameters and values
.. note:: must be defined in the settings.yml
"""
def __init__(self,
recognizer_multiplier=1,
recognizer_energy_ratio=1.5,
recognizer_recording_timeout=15.0,
recognizer_recording_timeout_with_silence=3.0,
deaf=None,
mute=None):
super(Options, self).__init__(name="Options")
self.deaf = deaf
self.mute = mute
self.recognizer_multiplier = recognizer_multiplier
self.recognizer_energy_ratio = recognizer_energy_ratio
self.recognizer_recording_timeout = recognizer_recording_timeout
self.recognizer_recording_timeout_with_silence = recognizer_recording_timeout_with_silence
def __str__(self):
return str(self.serialize())
def serialize(self):
return {
'name': self.name,
'recognizer_multiplier': self.recognizer_multiplier,
'recognizer_energy_ratio': self.recognizer_energy_ratio,
'recognizer_recording_timeout': self.recognizer_recording_timeout,
'recognizer_recording_timeout_with_silence': self.recognizer_recording_timeout_with_silence,
'deaf': self.deaf,
'mute': self.mute
}
def __eq__(self, other):
"""
This is used to compare 2 objects
:param other: the Options to compare
:return: True if both Options are similar, False otherwise
"""
return self.__dict__ == other.__dict__
|
from numato_gpio import NumatoGpioError
class NumatoModuleMock:
"""Mockup for the numato_gpio module."""
NumatoGpioError = NumatoGpioError
def __init__(self):
"""Initialize the numato_gpio module mockup class."""
self.devices = {}
class NumatoDeviceMock:
"""Mockup for the numato_gpio.NumatoUsbGpio class."""
def __init__(self, device):
"""Initialize numato device mockup."""
self.device = device
self.callbacks = {}
self.ports = set()
self.values = {}
def setup(self, port, direction):
"""Mockup for setup."""
self.ports.add(port)
self.values[port] = None
def write(self, port, value):
"""Mockup for write."""
self.values[port] = value
def read(self, port):
"""Mockup for read."""
return 1
def adc_read(self, port):
"""Mockup for adc_read."""
return 1023
def add_event_detect(self, port, callback, direction):
"""Mockup for add_event_detect."""
self.callbacks[port] = callback
def notify(self, enable):
"""Mockup for notify."""
def mockup_inject_notification(self, port, value):
"""Make the mockup execute a notification callback."""
self.callbacks[port](port, value)
OUT = 0
IN = 1
RISING = 1
FALLING = 2
BOTH = 3
def discover(self, _=None):
"""Mockup for the numato device discovery.
Ignore the device list argument, mock discovers /dev/ttyACM0.
"""
self.devices[0] = NumatoModuleMock.NumatoDeviceMock("/dev/ttyACM0")
def cleanup(self):
"""Mockup for the numato device cleanup."""
self.devices.clear()
|
import logging
from homeassistant.helpers.entity import ToggleEntity
from .const import VS_FANS, VS_SWITCHES
_LOGGER = logging.getLogger(__name__)
async def async_process_devices(hass, manager):
"""Assign devices to proper component."""
devices = {}
devices[VS_SWITCHES] = []
devices[VS_FANS] = []
await hass.async_add_executor_job(manager.update)
if manager.fans:
devices[VS_FANS].extend(manager.fans)
_LOGGER.info("%d VeSync fans found", len(manager.fans))
if manager.outlets:
devices[VS_SWITCHES].extend(manager.outlets)
_LOGGER.info("%d VeSync outlets found", len(manager.outlets))
if manager.switches:
for switch in manager.switches:
if not switch.is_dimmable():
devices[VS_SWITCHES].append(switch)
_LOGGER.info("%d VeSync standard switches found", len(manager.switches))
return devices
class VeSyncDevice(ToggleEntity):
"""Base class for VeSync Device Representations."""
def __init__(self, device):
"""Initialize the VeSync device."""
self.device = device
@property
def unique_id(self):
"""Return the ID of this device."""
if isinstance(self.device.sub_device_no, int):
return "{}{}".format(self.device.cid, str(self.device.sub_device_no))
return self.device.cid
@property
def name(self):
"""Return the name of the device."""
return self.device.device_name
@property
def is_on(self):
"""Return True if device is on."""
return self.device.device_status == "on"
@property
def available(self) -> bool:
"""Return True if device is available."""
return self.device.connection_status == "online"
def turn_off(self, **kwargs):
"""Turn the device off."""
self.device.turn_off()
def update(self):
"""Update vesync device."""
self.device.update()
|
from homeassistant.components.image_processing import (
ATTR_AGE,
ATTR_CONFIDENCE,
ATTR_GENDER,
ATTR_NAME,
ImageProcessingFaceEntity,
)
from homeassistant.components.openalpr_local.image_processing import (
ImageProcessingAlprEntity,
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the demo image processing platform."""
add_entities(
[
DemoImageProcessingAlpr("camera.demo_camera", "Demo Alpr"),
DemoImageProcessingFace("camera.demo_camera", "Demo Face"),
]
)
class DemoImageProcessingAlpr(ImageProcessingAlprEntity):
"""Demo ALPR image processing entity."""
def __init__(self, camera_entity, name):
"""Initialize demo ALPR image processing entity."""
super().__init__()
self._name = name
self._camera = camera_entity
@property
def camera_entity(self):
"""Return camera entity id from process pictures."""
return self._camera
@property
def confidence(self):
"""Return minimum confidence for send events."""
return 80
@property
def name(self):
"""Return the name of the entity."""
return self._name
def process_image(self, image):
"""Process image."""
demo_data = {
"AC3829": 98.3,
"BE392034": 95.5,
"CD02394": 93.4,
"DF923043": 90.8,
}
self.process_plates(demo_data, 1)
class DemoImageProcessingFace(ImageProcessingFaceEntity):
"""Demo face identify image processing entity."""
def __init__(self, camera_entity, name):
"""Initialize demo face image processing entity."""
super().__init__()
self._name = name
self._camera = camera_entity
@property
def camera_entity(self):
"""Return camera entity id from process pictures."""
return self._camera
@property
def confidence(self):
"""Return minimum confidence for send events."""
return 80
@property
def name(self):
"""Return the name of the entity."""
return self._name
def process_image(self, image):
"""Process image."""
demo_data = [
{
ATTR_CONFIDENCE: 98.34,
ATTR_NAME: "Hans",
ATTR_AGE: 16.0,
ATTR_GENDER: "male",
},
{ATTR_NAME: "Helena", ATTR_AGE: 28.0, ATTR_GENDER: "female"},
{ATTR_CONFIDENCE: 62.53, ATTR_NAME: "Luna"},
]
self.process_faces(demo_data, 4)
|
from hangups import pblite
from hangups.test import test_pblite_pb2
###############################################################################
# pblite.decode
###############################################################################
def test_decode():
message = test_pblite_pb2.TestMessage()
pblite.decode(message, [
1,
[3, 4],
'foo',
['bar', 'baz'],
1,
[2, 3],
[1],
[[2], [3]],
'AA==',
['AAE=', 'AAEC'],
])
assert message == test_pblite_pb2.TestMessage(
test_int=1,
test_repeated_int=[3, 4],
test_string='foo',
test_repeated_string=['bar', 'baz'],
test_enum=test_pblite_pb2.TestMessage.TEST_1,
test_repeated_enum=[test_pblite_pb2.TestMessage.TEST_2,
test_pblite_pb2.TestMessage.TEST_3],
test_embedded_message=test_pblite_pb2.TestMessage.EmbeddedMessage(
test_embedded_int=1,
),
test_repeated_embedded_message=[
test_pblite_pb2.TestMessage.EmbeddedMessage(
test_embedded_int=2,
),
test_pblite_pb2.TestMessage.EmbeddedMessage(
test_embedded_int=3,
),
],
test_bytes=b'\x00',
test_repeated_bytes=[b'\x00\x01', b'\x00\x01\x02'],
)
def test_decode_unserialized_fields():
message = test_pblite_pb2.TestMessage()
pblite.decode(message, [
None,
None,
'foo',
])
assert message == test_pblite_pb2.TestMessage(
test_string='foo',
)
def test_decode_unknown_field():
message = test_pblite_pb2.TestMessage()
pblite.decode(message, [None] * 99 + [1])
assert message == test_pblite_pb2.TestMessage()
def test_decode_unknown_enum():
message = test_pblite_pb2.TestMessage()
pblite.decode(message, [
None,
None,
None,
None,
99,
])
assert message == test_pblite_pb2.TestMessage()
def test_decode_unknown_repeated_enum():
message = test_pblite_pb2.TestMessage()
pblite.decode(message, [
None,
None,
None,
None,
None,
[1, 99],
])
assert message == test_pblite_pb2.TestMessage()
def test_decode_scalar_wrong_type():
message = test_pblite_pb2.TestMessage()
pblite.decode(message, [
'foo',
])
assert message == test_pblite_pb2.TestMessage()
def test_decode_repeated_scalar_wrong_type():
message = test_pblite_pb2.TestMessage()
pblite.decode(message, [
None,
[1, 'foo', 2]
])
assert message == test_pblite_pb2.TestMessage()
def test_decode_message_wrong_type():
message = test_pblite_pb2.TestMessage()
pblite.decode(message, [
None,
None,
None,
None,
None,
None,
1,
])
assert message == test_pblite_pb2.TestMessage()
def test_decode_repeated_message_wrong_type():
message = test_pblite_pb2.TestMessage()
pblite.decode(message, [
None,
None,
None,
None,
None,
None,
None,
[1],
])
assert message == test_pblite_pb2.TestMessage(
test_repeated_embedded_message=[
test_pblite_pb2.TestMessage.EmbeddedMessage(),
],
)
def test_decode_bytes_wrong_type():
message = test_pblite_pb2.TestMessage()
pblite.decode(message, [
None, None, None, None, None, None, None, None, 1,
])
assert message == test_pblite_pb2.TestMessage()
def test_decode_bytes_invalid_value():
message = test_pblite_pb2.TestMessage()
pblite.decode(message, [
None, None, None, None, None, None, None, None, 'A?==',
])
assert message == test_pblite_pb2.TestMessage()
def test_decode_repeated_bytes_wrong_type():
message = test_pblite_pb2.TestMessage()
pblite.decode(message, [
None, None, None, None, None, None, None, None, None, [1],
])
assert message == test_pblite_pb2.TestMessage()
def test_decode_repeated_bytes_invalid_value():
message = test_pblite_pb2.TestMessage()
pblite.decode(message, [
None, None, None, None, None, None, None, None, None, ['A?=='],
])
assert message == test_pblite_pb2.TestMessage()
def test_decode_ignore_first_item():
message = test_pblite_pb2.TestMessage()
pblite.decode(message, [
'ignored',
1,
[3, 4],
], ignore_first_item=True)
assert message == test_pblite_pb2.TestMessage(
test_int=1,
test_repeated_int=[3, 4],
)
def test_decode_dict():
message = test_pblite_pb2.TestMessage()
pblite.decode(message, [
1,
{
'7': [2],
},
])
assert message == test_pblite_pb2.TestMessage(
test_int=1,
test_embedded_message=test_pblite_pb2.TestMessage.EmbeddedMessage(
test_embedded_int=2,
),
)
|
import logging
import voluptuous as vol
from homeassistant import exceptions
from homeassistant.const import (
CONF_ABOVE,
CONF_ATTRIBUTE,
CONF_BELOW,
CONF_ENTITY_ID,
CONF_FOR,
CONF_PLATFORM,
CONF_VALUE_TEMPLATE,
)
from homeassistant.core import CALLBACK_TYPE, HassJob, callback
from homeassistant.helpers import condition, config_validation as cv, template
from homeassistant.helpers.event import (
async_track_same_state,
async_track_state_change_event,
)
# mypy: allow-incomplete-defs, allow-untyped-calls, allow-untyped-defs
# mypy: no-check-untyped-defs
def validate_above_below(value):
"""Validate that above and below can co-exist."""
above = value.get(CONF_ABOVE)
below = value.get(CONF_BELOW)
if above is None or below is None:
return value
if above > below:
raise vol.Invalid(
f"A value can never be above {above} and below {below} at the same time. You probably want two different triggers.",
)
return value
TRIGGER_SCHEMA = vol.All(
vol.Schema(
{
vol.Required(CONF_PLATFORM): "numeric_state",
vol.Required(CONF_ENTITY_ID): cv.entity_ids,
vol.Optional(CONF_BELOW): vol.Coerce(float),
vol.Optional(CONF_ABOVE): vol.Coerce(float),
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_FOR): cv.positive_time_period_template,
vol.Optional(CONF_ATTRIBUTE): cv.match_all,
}
),
cv.has_at_least_one_key(CONF_BELOW, CONF_ABOVE),
validate_above_below,
)
_LOGGER = logging.getLogger(__name__)
async def async_attach_trigger(
hass, config, action, automation_info, *, platform_type="numeric_state"
) -> CALLBACK_TYPE:
"""Listen for state changes based on configuration."""
entity_id = config.get(CONF_ENTITY_ID)
below = config.get(CONF_BELOW)
above = config.get(CONF_ABOVE)
time_delta = config.get(CONF_FOR)
template.attach(hass, time_delta)
value_template = config.get(CONF_VALUE_TEMPLATE)
unsub_track_same = {}
entities_triggered = set()
period: dict = {}
attribute = config.get(CONF_ATTRIBUTE)
job = HassJob(action)
if value_template is not None:
value_template.hass = hass
@callback
def check_numeric_state(entity, from_s, to_s):
"""Return True if criteria are now met."""
if to_s is None:
return False
variables = {
"trigger": {
"platform": "numeric_state",
"entity_id": entity,
"below": below,
"above": above,
"attribute": attribute,
}
}
return condition.async_numeric_state(
hass, to_s, below, above, value_template, variables, attribute
)
@callback
def state_automation_listener(event):
"""Listen for state changes and calls action."""
entity = event.data.get("entity_id")
from_s = event.data.get("old_state")
to_s = event.data.get("new_state")
@callback
def call_action():
"""Call action with right context."""
hass.async_run_hass_job(
job,
{
"trigger": {
"platform": platform_type,
"entity_id": entity,
"below": below,
"above": above,
"from_state": from_s,
"to_state": to_s,
"for": time_delta if not time_delta else period[entity],
"description": f"numeric state of {entity}",
}
},
to_s.context,
)
matching = check_numeric_state(entity, from_s, to_s)
if not matching:
entities_triggered.discard(entity)
elif entity not in entities_triggered:
entities_triggered.add(entity)
if time_delta:
variables = {
"trigger": {
"platform": "numeric_state",
"entity_id": entity,
"below": below,
"above": above,
}
}
try:
period[entity] = cv.positive_time_period(
template.render_complex(time_delta, variables)
)
except (exceptions.TemplateError, vol.Invalid) as ex:
_LOGGER.error(
"Error rendering '%s' for template: %s",
automation_info["name"],
ex,
)
entities_triggered.discard(entity)
return
unsub_track_same[entity] = async_track_same_state(
hass,
period[entity],
call_action,
entity_ids=entity,
async_check_same_func=check_numeric_state,
)
else:
call_action()
unsub = async_track_state_change_event(hass, entity_id, state_automation_listener)
@callback
def async_remove():
"""Remove state listeners async."""
unsub()
for async_remove in unsub_track_same.values():
async_remove()
unsub_track_same.clear()
return async_remove
|
import platform
import subprocess
import pytest
import homeassistant.components.switch as switch
from homeassistant.const import (
ATTR_ENTITY_ID,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_OFF,
STATE_ON,
)
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
from tests.common import async_mock_service
@pytest.fixture(autouse=True)
def mock_send_magic_packet():
"""Mock magic packet."""
with patch("wakeonlan.send_magic_packet") as mock_send:
yield mock_send
async def test_valid_hostname(hass):
"""Test with valid hostname."""
assert await async_setup_component(
hass,
switch.DOMAIN,
{
"switch": {
"platform": "wake_on_lan",
"mac": "00-01-02-03-04-05",
"host": "validhostname",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("switch.wake_on_lan")
assert STATE_OFF == state.state
with patch.object(subprocess, "call", return_value=0):
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.wake_on_lan"},
blocking=True,
)
state = hass.states.get("switch.wake_on_lan")
assert STATE_ON == state.state
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: "switch.wake_on_lan"},
blocking=True,
)
state = hass.states.get("switch.wake_on_lan")
assert STATE_ON == state.state
async def test_valid_hostname_windows(hass):
"""Test with valid hostname on windows."""
assert await async_setup_component(
hass,
switch.DOMAIN,
{
"switch": {
"platform": "wake_on_lan",
"mac": "00-01-02-03-04-05",
"host": "validhostname",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("switch.wake_on_lan")
assert STATE_OFF == state.state
with patch.object(subprocess, "call", return_value=0), patch.object(
platform, "system", return_value="Windows"
):
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.wake_on_lan"},
blocking=True,
)
state = hass.states.get("switch.wake_on_lan")
assert STATE_ON == state.state
async def test_broadcast_config_ip_and_port(hass, mock_send_magic_packet):
"""Test with broadcast address and broadcast port config."""
mac = "00-01-02-03-04-05"
broadcast_address = "255.255.255.255"
port = 999
assert await async_setup_component(
hass,
switch.DOMAIN,
{
"switch": {
"platform": "wake_on_lan",
"mac": mac,
"broadcast_address": broadcast_address,
"broadcast_port": port,
}
},
)
await hass.async_block_till_done()
state = hass.states.get("switch.wake_on_lan")
assert STATE_OFF == state.state
with patch.object(subprocess, "call", return_value=0):
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.wake_on_lan"},
blocking=True,
)
mock_send_magic_packet.assert_called_with(
mac, ip_address=broadcast_address, port=port
)
async def test_broadcast_config_ip(hass, mock_send_magic_packet):
"""Test with only broadcast address."""
mac = "00-01-02-03-04-05"
broadcast_address = "255.255.255.255"
assert await async_setup_component(
hass,
switch.DOMAIN,
{
"switch": {
"platform": "wake_on_lan",
"mac": mac,
"broadcast_address": broadcast_address,
}
},
)
await hass.async_block_till_done()
state = hass.states.get("switch.wake_on_lan")
assert STATE_OFF == state.state
with patch.object(subprocess, "call", return_value=0):
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.wake_on_lan"},
blocking=True,
)
mock_send_magic_packet.assert_called_with(mac, ip_address=broadcast_address)
async def test_broadcast_config_port(hass, mock_send_magic_packet):
"""Test with only broadcast port config."""
mac = "00-01-02-03-04-05"
port = 999
assert await async_setup_component(
hass,
switch.DOMAIN,
{"switch": {"platform": "wake_on_lan", "mac": mac, "broadcast_port": port}},
)
await hass.async_block_till_done()
state = hass.states.get("switch.wake_on_lan")
assert STATE_OFF == state.state
with patch.object(subprocess, "call", return_value=0):
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.wake_on_lan"},
blocking=True,
)
mock_send_magic_packet.assert_called_with(mac, port=port)
async def test_off_script(hass):
"""Test with turn off script."""
assert await async_setup_component(
hass,
switch.DOMAIN,
{
"switch": {
"platform": "wake_on_lan",
"mac": "00-01-02-03-04-05",
"host": "validhostname",
"turn_off": {"service": "shell_command.turn_off_target"},
}
},
)
await hass.async_block_till_done()
calls = async_mock_service(hass, "shell_command", "turn_off_target")
state = hass.states.get("switch.wake_on_lan")
assert STATE_OFF == state.state
with patch.object(subprocess, "call", return_value=0):
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.wake_on_lan"},
blocking=True,
)
state = hass.states.get("switch.wake_on_lan")
assert STATE_ON == state.state
assert len(calls) == 0
with patch.object(subprocess, "call", return_value=2):
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: "switch.wake_on_lan"},
blocking=True,
)
state = hass.states.get("switch.wake_on_lan")
assert STATE_OFF == state.state
assert len(calls) == 1
async def test_invalid_hostname_windows(hass):
"""Test with invalid hostname on windows."""
assert await async_setup_component(
hass,
switch.DOMAIN,
{
"switch": {
"platform": "wake_on_lan",
"mac": "00-01-02-03-04-05",
"host": "invalidhostname",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("switch.wake_on_lan")
assert STATE_OFF == state.state
with patch.object(subprocess, "call", return_value=2):
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.wake_on_lan"},
blocking=True,
)
state = hass.states.get("switch.wake_on_lan")
assert STATE_OFF == state.state
|
from pytest import raises
from paasta_tools.secret_providers import SecretProvider
def test_secret_provider():
SecretProvider(
soa_dir="/nail/blah",
service_name="universe",
cluster_names=["mesosstage"],
some="setting",
)
def test_decrypt_environment():
with raises(NotImplementedError):
SecretProvider(
soa_dir="/nail/blah", service_name="universe", cluster_names=["mesosstage"]
).decrypt_environment(environment={}, a="kwarg")
def test_write_secret():
with raises(NotImplementedError):
SecretProvider(
soa_dir="/nail/blah", service_name="universe", cluster_names=["mesosstage"]
).write_secret(
action="update", secret_name="whatididlastsummer", plaintext=b"noybw"
)
def test_decrypt_secret():
with raises(NotImplementedError):
SecretProvider(
soa_dir="/nail/blah", service_name="universe", cluster_names=["mesosstage"]
).decrypt_secret(secret_name="whatididlastsummer")
|
import abc
import unittest
from absl import flags
from absl.testing import flagsaver
import mock
from perfkitbenchmarker import benchmark_spec
from perfkitbenchmarker import context
from perfkitbenchmarker import disk
from perfkitbenchmarker import errors
from perfkitbenchmarker import linux_virtual_machine
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.configs import benchmark_config_spec
from perfkitbenchmarker.providers.aws import aws_disk
from perfkitbenchmarker.providers.aws import aws_virtual_machine
from perfkitbenchmarker.providers.aws import util as aws_util
from perfkitbenchmarker.providers.azure import azure_disk
from perfkitbenchmarker.providers.azure import azure_virtual_machine
from perfkitbenchmarker.providers.gcp import gce_disk
from perfkitbenchmarker.providers.gcp import gce_virtual_machine
from perfkitbenchmarker.providers.gcp import util
from tests import pkb_common_test_case # pylint:disable=unused-import
FLAGS = flags.FLAGS
_BENCHMARK_NAME = 'name'
_BENCHMARK_UID = 'uid'
_COMPONENT = 'test_component'
class ScratchDiskTestMixin(object):
"""Sets up and tears down some of the mocks needed to test scratch disks."""
@abc.abstractmethod
def _PatchCloudSpecific(self):
"""Adds any cloud specific patches to self.patches."""
pass
@abc.abstractmethod
def _CreateVm(self):
"""Creates and returns a VM object of the correct type for the cloud."""
pass
@abc.abstractmethod
def _GetDiskClass(self):
"""Returns the disk class for the given cloud."""
pass
def setUp(self):
self.saved_flag_values = flagsaver.save_flag_values()
self.patches = []
vm_prefix = linux_virtual_machine.__name__ + '.BaseLinuxMixin'
self.patches.append(
mock.patch(vm_prefix + '.FormatDisk'))
self.patches.append(
mock.patch(vm_prefix + '.MountDisk'))
self.patches.append(
mock.patch(
util.__name__ + '.GetDefaultProject', side_effect='test_project'))
# Patch subprocess.Popen to make sure we don't issue any commands to spin up
# resources.
self.patches.append(mock.patch('subprocess.Popen'))
self.patches.append(
mock.patch(vm_util.__name__ + '.GetTempDir', return_value='/tmp/dir'))
self._PatchCloudSpecific()
for p in self.patches:
p.start()
self.addCleanup(p.stop)
# We need the disk class mocks to return new mocks each time they are
# called. Otherwise all "disks" instantiated will be the same object.
self._GetDiskClass().side_effect = (
lambda *args, **kwargs: mock.MagicMock(is_striped=False))
# VM Creation depends on there being a BenchmarkSpec.
config_spec = benchmark_config_spec.BenchmarkConfigSpec(
_BENCHMARK_NAME, flag_values=FLAGS, vm_groups={})
self.spec = benchmark_spec.BenchmarkSpec(mock.MagicMock(), config_spec,
_BENCHMARK_UID)
self.addCleanup(context.SetThreadBenchmarkSpec, None)
self.addCleanup(flagsaver.restore_flag_values, self.saved_flag_values)
def testScratchDisks(self):
"""Test for creating and deleting scratch disks.
This test creates two scratch disks on a vm and deletes them, ensuring
that the proper calls to create, format, mount, and delete are made.
"""
vm = self._CreateVm()
disk_spec = disk.BaseDiskSpec(_COMPONENT, mount_point='/mountpoint0')
vm.CreateScratchDisk(disk_spec)
assert len(vm.scratch_disks) == 1, 'Disk not added to scratch disks.'
scratch_disk = vm.scratch_disks[0]
scratch_disk.Create.assert_called_once_with()
vm.FormatDisk.assert_called_once_with(scratch_disk.GetDevicePath(), None)
vm.MountDisk.assert_called_once_with(
scratch_disk.GetDevicePath(), '/mountpoint0',
None, scratch_disk.mount_options, scratch_disk.fstab_options)
disk_spec = disk.BaseDiskSpec(_COMPONENT, mount_point='/mountpoint1')
vm.CreateScratchDisk(disk_spec)
assert len(vm.scratch_disks) == 2, 'Disk not added to scratch disks.'
# Check that these execute without exceptions. The return value
# is a MagicMock, not a string, so we can't compare to expected results.
vm.GetScratchDir()
vm.GetScratchDir(0)
vm.GetScratchDir(1)
with self.assertRaises(errors.Error):
vm.GetScratchDir(2)
scratch_disk = vm.scratch_disks[1]
scratch_disk.Create.assert_called_once_with()
vm.FormatDisk.assert_called_with(scratch_disk.GetDevicePath(), None)
vm.MountDisk.assert_called_with(
scratch_disk.GetDevicePath(), '/mountpoint1',
None, scratch_disk.mount_options, scratch_disk.fstab_options)
vm.DeleteScratchDisks()
vm.scratch_disks[0].Delete.assert_called_once_with()
vm.scratch_disks[1].Delete.assert_called_once_with()
class AzureScratchDiskTest(ScratchDiskTestMixin, unittest.TestCase):
def _PatchCloudSpecific(self):
self.patches.append(mock.patch(azure_disk.__name__ + '.AzureDisk'))
def _CreateVm(self):
vm_spec = azure_virtual_machine.AzureVmSpec(
'test_vm_spec.Azure', zone='eastus2', machine_type='test_machine_type')
return azure_virtual_machine.Ubuntu1604BasedAzureVirtualMachine(vm_spec)
def _GetDiskClass(self):
return azure_disk.AzureDisk
class GceScratchDiskTest(ScratchDiskTestMixin, unittest.TestCase):
def _PatchCloudSpecific(self):
self.patches.append(mock.patch(gce_disk.__name__ + '.GceDisk'))
def _CreateVm(self):
vm_spec = gce_virtual_machine.GceVmSpec('test_vm_spec.GCP',
machine_type='test_machine_type')
return gce_virtual_machine.Ubuntu1604BasedGceVirtualMachine(vm_spec)
def _GetDiskClass(self):
return gce_disk.GceDisk
class AwsScratchDiskTest(ScratchDiskTestMixin, unittest.TestCase):
def _PatchCloudSpecific(self):
self.patches.append(mock.patch(aws_disk.__name__ + '.AwsDisk'))
self.patches.append(mock.patch(aws_util.__name__ + '.AddDefaultTags'))
# In Python3 the mocking of subprocess.Popen in setup() is problematic for
# platform.system(). It is called by RemoteCommand() in
# _GetNvmeBootIndex() so we'll mock that instead.
self.patches.append(mock.patch(
aws_virtual_machine.__name__ + '.AwsVirtualMachine._GetNvmeBootIndex'))
def _CreateVm(self):
vm_spec = aws_virtual_machine.AwsVmSpec('test_vm_spec.AWS',
zone='us-east-1a',
machine_type='test_machine_type')
return aws_virtual_machine.Ubuntu1604BasedAwsVirtualMachine(vm_spec)
def _GetDiskClass(self):
return aws_disk.AwsDisk
class GceDeviceIdTest(unittest.TestCase):
def testDeviceId(self):
with mock.patch(disk.__name__ + '.FLAGS') as disk_flags:
disk_flags.os_type = 'windows'
disk_spec = disk.BaseDiskSpec(_COMPONENT, disk_number=1, disk_size=2,
disk_type=gce_disk.PD_STANDARD)
disk_obj = gce_disk.GceDisk(disk_spec, 'name', 'zone', 'project')
self.assertEqual(disk_obj.GetDeviceId(), r'\\.\PHYSICALDRIVE1')
if __name__ == '__main__':
unittest.main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import sys
from absl import flags
_BUILT_IN_FLAGS = frozenset({
'help',
'helpshort',
'helpfull',
'helpxml',
'flagfile',
'undefok',
})
class ArgumentParser(argparse.ArgumentParser):
"""Custom ArgumentParser class to support special absl flags."""
def __init__(self, **kwargs):
"""Initializes ArgumentParser.
Args:
**kwargs: same as argparse.ArgumentParser, except:
1. It also accepts `inherited_absl_flags`: the absl flags to inherit.
The default is the global absl.flags.FLAGS instance. Pass None to
ignore absl flags.
2. The `prefix_chars` argument must be the default value '-'.
Raises:
ValueError: Raised when prefix_chars is not '-'.
"""
prefix_chars = kwargs.get('prefix_chars', '-')
if prefix_chars != '-':
raise ValueError(
'argparse_flags.ArgumentParser only supports "-" as the prefix '
'character, found "{}".'.format(prefix_chars))
# Remove inherited_absl_flags before calling super.
self._inherited_absl_flags = kwargs.pop('inherited_absl_flags', flags.FLAGS)
# Now call super to initialize argparse.ArgumentParser before calling
# add_argument in _define_absl_flags.
super(ArgumentParser, self).__init__(**kwargs)
if self.add_help:
# -h and --help are defined in super.
# Also add the --helpshort and --helpfull flags.
self.add_argument(
# Action 'help' defines a similar flag to -h/--help.
'--helpshort', action='help',
default=argparse.SUPPRESS, help=argparse.SUPPRESS)
self.add_argument(
'--helpfull', action=_HelpFullAction,
default=argparse.SUPPRESS, help='show full help message and exit')
if self._inherited_absl_flags:
self.add_argument('--undefok', help=argparse.SUPPRESS)
self._define_absl_flags(self._inherited_absl_flags)
def parse_known_args(self, args=None, namespace=None):
if args is None:
args = sys.argv[1:]
if self._inherited_absl_flags:
# Handle --flagfile.
# Explicitly specify force_gnu=True, since argparse behaves like
# gnu_getopt: flags can be specified after positional arguments.
args = self._inherited_absl_flags.read_flags_from_files(
args, force_gnu=True)
undefok_missing = object()
undefok = getattr(namespace, 'undefok', undefok_missing)
namespace, args = super(ArgumentParser, self).parse_known_args(
args, namespace)
# For Python <= 2.7.8: https://bugs.python.org/issue9351, a bug where
# sub-parsers don't preserve existing namespace attributes.
# Restore the undefok attribute if a sub-parser dropped it.
if undefok is not undefok_missing:
namespace.undefok = undefok
if self._inherited_absl_flags:
# Handle --undefok. At this point, `args` only contains unknown flags,
# so it won't strip defined flags that are also specified with --undefok.
# For Python <= 2.7.8: https://bugs.python.org/issue9351, a bug where
# sub-parsers don't preserve existing namespace attributes. The undefok
# attribute might not exist because a subparser dropped it.
if hasattr(namespace, 'undefok'):
args = _strip_undefok_args(namespace.undefok, args)
# absl flags are not exposed in the Namespace object. See Namespace:
# https://docs.python.org/3/library/argparse.html#argparse.Namespace.
del namespace.undefok
self._inherited_absl_flags.mark_as_parsed()
try:
self._inherited_absl_flags.validate_all_flags()
except flags.IllegalFlagValueError as e:
self.error(str(e))
return namespace, args
def _define_absl_flags(self, absl_flags):
"""Defines flags from absl_flags."""
key_flags = set(absl_flags.get_key_flags_for_module(sys.argv[0]))
for name in absl_flags:
if name in _BUILT_IN_FLAGS:
# Do not inherit built-in flags.
continue
flag_instance = absl_flags[name]
# Each flags with short_name appears in FLAGS twice, so only define
# when the dictionary key is equal to the regular name.
if name == flag_instance.name:
# Suppress the flag in the help short message if it's not a main
# module's key flag.
suppress = flag_instance not in key_flags
self._define_absl_flag(flag_instance, suppress)
def _define_absl_flag(self, flag_instance, suppress):
"""Defines a flag from the flag_instance."""
flag_name = flag_instance.name
short_name = flag_instance.short_name
argument_names = ['--' + flag_name]
if short_name:
argument_names.insert(0, '-' + short_name)
if suppress:
helptext = argparse.SUPPRESS
else:
# argparse help string uses %-formatting. Escape the literal %'s.
helptext = flag_instance.help.replace('%', '%%')
if flag_instance.boolean:
# Only add the `no` form to the long name.
argument_names.append('--no' + flag_name)
self.add_argument(
*argument_names, action=_BooleanFlagAction, help=helptext,
metavar=flag_instance.name.upper(),
flag_instance=flag_instance)
else:
self.add_argument(
*argument_names, action=_FlagAction, help=helptext,
metavar=flag_instance.name.upper(),
flag_instance=flag_instance)
class _FlagAction(argparse.Action):
"""Action class for Abseil non-boolean flags."""
def __init__(self, option_strings, dest, help, metavar, flag_instance): # pylint: disable=redefined-builtin
"""Initializes _FlagAction.
Args:
option_strings: See argparse.Action.
dest: Ignored. The flag is always defined with dest=argparse.SUPPRESS.
help: See argparse.Action.
metavar: See argparse.Action.
flag_instance: absl.flags.Flag, the absl flag instance.
"""
del dest
self._flag_instance = flag_instance
super(_FlagAction, self).__init__(
option_strings=option_strings,
dest=argparse.SUPPRESS,
help=help,
metavar=metavar)
def __call__(self, parser, namespace, values, option_string=None):
"""See https://docs.python.org/3/library/argparse.html#action-classes."""
self._flag_instance.parse(values)
self._flag_instance.using_default_value = False
class _BooleanFlagAction(argparse.Action):
"""Action class for Abseil boolean flags."""
def __init__(self, option_strings, dest, help, metavar, flag_instance): # pylint: disable=redefined-builtin
"""Initializes _BooleanFlagAction.
Args:
option_strings: See argparse.Action.
dest: Ignored. The flag is always defined with dest=argparse.SUPPRESS.
help: See argparse.Action.
metavar: See argparse.Action.
flag_instance: absl.flags.Flag, the absl flag instance.
"""
del dest
self._flag_instance = flag_instance
flag_names = [self._flag_instance.name]
if self._flag_instance.short_name:
flag_names.append(self._flag_instance.short_name)
self._flag_names = frozenset(flag_names)
super(_BooleanFlagAction, self).__init__(
option_strings=option_strings,
dest=argparse.SUPPRESS,
nargs=0, # Does not accept values, only `--bool` or `--nobool`.
help=help,
metavar=metavar)
def __call__(self, parser, namespace, values, option_string=None):
"""See https://docs.python.org/3/library/argparse.html#action-classes."""
if not isinstance(values, list) or values:
raise ValueError('values must be an empty list.')
if option_string.startswith('--'):
option = option_string[2:]
else:
option = option_string[1:]
if option in self._flag_names:
self._flag_instance.parse('true')
else:
if not option.startswith('no') or option[2:] not in self._flag_names:
raise ValueError('invalid option_string: ' + option_string)
self._flag_instance.parse('false')
self._flag_instance.using_default_value = False
class _HelpFullAction(argparse.Action):
"""Action class for --helpfull flag."""
def __init__(self, option_strings, dest, default, help): # pylint: disable=redefined-builtin
"""Initializes _HelpFullAction.
Args:
option_strings: See argparse.Action.
dest: Ignored. The flag is always defined with dest=argparse.SUPPRESS.
default: Ignored.
help: See argparse.Action.
"""
del dest, default
super(_HelpFullAction, self).__init__(
option_strings=option_strings,
dest=argparse.SUPPRESS,
default=argparse.SUPPRESS,
nargs=0,
help=help)
def __call__(self, parser, namespace, values, option_string=None):
"""See https://docs.python.org/3/library/argparse.html#action-classes."""
# This only prints flags when help is not argparse.SUPPRESS.
# It includes user defined argparse flags, as well as main module's
# key absl flags. Other absl flags use argparse.SUPPRESS, so they aren't
# printed here.
parser.print_help()
absl_flags = parser._inherited_absl_flags # pylint: disable=protected-access
if absl_flags:
modules = sorted(absl_flags.flags_by_module_dict())
main_module = sys.argv[0]
if main_module in modules:
# The main module flags are already printed in parser.print_help().
modules.remove(main_module)
print(absl_flags._get_help_for_modules( # pylint: disable=protected-access
modules, prefix='', include_special_flags=True))
parser.exit()
def _strip_undefok_args(undefok, args):
"""Returns a new list of args after removing flags in --undefok."""
if undefok:
undefok_names = set(name.strip() for name in undefok.split(','))
undefok_names |= set('no' + name for name in undefok_names)
# Remove undefok flags.
args = [arg for arg in args if not _is_undefok(arg, undefok_names)]
return args
def _is_undefok(arg, undefok_names):
"""Returns whether we can ignore arg based on a set of undefok flag names."""
if not arg.startswith('-'):
return False
if arg.startswith('--'):
arg_without_dash = arg[2:]
else:
arg_without_dash = arg[1:]
if '=' in arg_without_dash:
name, _ = arg_without_dash.split('=', 1)
else:
name = arg_without_dash
if name in undefok_names:
return True
return False
|
import aiohttp
from pyjuicenet import TokenError
from homeassistant import config_entries, setup
from homeassistant.components.juicenet.const import DOMAIN
from homeassistant.const import CONF_ACCESS_TOKEN
from tests.async_mock import MagicMock, patch
def _mock_juicenet_return_value(get_devices=None):
juicenet_mock = MagicMock()
type(juicenet_mock).get_devices = MagicMock(return_value=get_devices)
return juicenet_mock
async def test_form(hass):
"""Test we get the form."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch(
"homeassistant.components.juicenet.config_flow.Api.get_devices",
return_value=MagicMock(),
), patch(
"homeassistant.components.juicenet.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.juicenet.async_setup_entry", return_value=True
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_ACCESS_TOKEN: "access_token"}
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "JuiceNet"
assert result2["data"] == {CONF_ACCESS_TOKEN: "access_token"}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_invalid_auth(hass):
"""Test we handle invalid auth."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.juicenet.config_flow.Api.get_devices",
side_effect=TokenError,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_ACCESS_TOKEN: "access_token"}
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "invalid_auth"}
async def test_form_cannot_connect(hass):
"""Test we handle cannot connect error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.juicenet.config_flow.Api.get_devices",
side_effect=aiohttp.ClientError,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_ACCESS_TOKEN: "access_token"}
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "cannot_connect"}
async def test_form_catch_unknown_errors(hass):
"""Test we handle cannot connect error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.juicenet.config_flow.Api.get_devices",
side_effect=Exception,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_ACCESS_TOKEN: "access_token"}
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "unknown"}
async def test_import(hass):
"""Test that import works as expected."""
with patch(
"homeassistant.components.juicenet.config_flow.Api.get_devices",
return_value=MagicMock(),
), patch(
"homeassistant.components.juicenet.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.juicenet.async_setup_entry", return_value=True
) as mock_setup_entry:
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={CONF_ACCESS_TOKEN: "access_token"},
)
await hass.async_block_till_done()
assert result["type"] == "create_entry"
assert result["title"] == "JuiceNet"
assert result["data"] == {CONF_ACCESS_TOKEN: "access_token"}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
|
import flatbuffers
class Challenge(object):
__slots__ = ['_tab']
@classmethod
def GetRootAsChallenge(cls, buf, offset):
n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
x = Challenge()
x.Init(buf, n + offset)
return x
# Challenge
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
# Challenge
def Method(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos)
return 0
# Challenge
def Extra(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6))
if o != 0:
x = self._tab.Indirect(o + self._tab.Pos)
from .Map import Map
obj = Map()
obj.Init(self._tab.Bytes, x)
return obj
return None
def ChallengeStart(builder): builder.StartObject(2)
def ChallengeAddMethod(builder, method): builder.PrependUint8Slot(0, method, 0)
def ChallengeAddExtra(builder, extra): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(extra), 0)
def ChallengeEnd(builder): return builder.EndObject()
|
import os
from functools import lru_cache
from io import BytesIO
from tempfile import NamedTemporaryFile
import cairo
import gi
from django.conf import settings
from django.core.cache import cache
from django.utils.html import escape
from PIL import ImageFont
from weblate.utils.checks import weblate_check
from weblate.utils.data import data_dir
gi.require_version("PangoCairo", "1.0")
gi.require_version("Pango", "1.0")
from gi.repository import Pango, PangoCairo # noqa:E402,I001 isort:skip
FONTCONFIG_CONFIG = """<?xml version="1.0"?>
<!DOCTYPE fontconfig SYSTEM "fonts.dtd">
<fontconfig>
<cachedir>{}</cachedir>
<dir>{}</dir>
<dir>{}</dir>
<dir>{}</dir>
<dir>{}</dir>
<config>
<rescan>
<int>30</int>
</rescan>
</config>
<!--
Synthetic emboldening for fonts that do not have bold face available
-->
<match target="font">
<test name="weight" compare="less_eq">
<const>medium</const>
</test>
<test target="pattern" name="weight" compare="more_eq">
<const>bold</const>
</test>
<edit name="embolden" mode="assign">
<bool>true</bool>
</edit>
<edit name="weight" mode="assign">
<const>bold</const>
</edit>
</match>
</fontconfig>
"""
FONT_WEIGHTS = {
"normal": Pango.Weight.NORMAL,
"light": Pango.Weight.LIGHT,
"bold": Pango.Weight.BOLD,
"": None,
}
def configure_fontconfig():
"""Configures fontconfig to use custom configuration."""
if getattr(configure_fontconfig, "is_configured", False):
return
fonts_dir = data_dir("fonts")
config_name = os.path.join(fonts_dir, "fonts.conf")
if not os.path.exists(fonts_dir):
os.makedirs(fonts_dir)
# Generate the configuration
with open(config_name, "w") as handle:
handle.write(
FONTCONFIG_CONFIG.format(
data_dir("cache", "fonts"),
fonts_dir,
os.path.join(settings.STATIC_ROOT, "font-source", "TTF"),
os.path.join(settings.STATIC_ROOT, "font-dejavu"),
os.path.join(settings.STATIC_ROOT, "font-droid"),
)
)
# Inject into environment
os.environ["FONTCONFIG_FILE"] = config_name
configure_fontconfig.is_configured = True
def get_font_weight(weight):
return FONT_WEIGHTS[weight]
@lru_cache(maxsize=512)
def render_size(font, weight, size, spacing, text, width=1000, lines=1, cache_key=None):
"""Check whether rendered text fits."""
configure_fontconfig()
# Setup Pango/Cairo
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, width * 2, lines * size * 4)
context = cairo.Context(surface)
layout = PangoCairo.create_layout(context)
# Load and configure font
fontdesc = Pango.FontDescription.from_string(font)
fontdesc.set_size(size * Pango.SCALE)
if weight:
fontdesc.set_weight(weight)
layout.set_font_description(fontdesc)
# This seems to be only way to set letter spacing
# See https://stackoverflow.com/q/55533312/225718
layout.set_markup(
'<span letter_spacing="{}">{}</span>'.format(spacing, escape(text))
)
# Set width and line wrapping
layout.set_width(width * Pango.SCALE)
layout.set_wrap(Pango.WrapMode.WORD)
# Calculate dimensions
line_count = layout.get_line_count()
pixel_size = layout.get_pixel_size()
# Show text
PangoCairo.show_layout(context, layout)
# Render box around desired size
expected_height = lines * pixel_size.height / line_count
context.new_path()
context.set_source_rgb(0.8, 0.8, 0.8)
context.set_line_width(1)
context.move_to(1, 1)
context.line_to(width, 1)
context.line_to(width, expected_height)
context.line_to(1, expected_height)
context.line_to(1, 1)
context.stroke()
# Render box about actual size
context.new_path()
if pixel_size.width > width or line_count > lines:
context.set_source_rgb(246 / 255, 102 / 255, 76 / 255)
else:
context.set_source_rgb(0.4, 0.4, 0.4)
context.set_line_width(1)
context.move_to(1, 1)
context.line_to(pixel_size.width, 1)
context.line_to(pixel_size.width, pixel_size.height)
context.line_to(1, pixel_size.height)
context.line_to(1, 1)
context.stroke()
if cache_key:
with BytesIO() as buff:
surface.write_to_png(buff)
cache.set(cache_key, buff.getvalue())
return pixel_size, line_count
def check_render_size(font, weight, size, spacing, text, width, lines, cache_key=None):
"""Checks whether rendered text fits."""
size, actual_lines = render_size(
font, weight, size, spacing, text, width, lines, cache_key
)
return size.width <= width and actual_lines <= lines
def get_font_name(filelike):
"""Returns tuple of font family and style, for example ('Ubuntu', 'Regular')."""
if not hasattr(filelike, "loaded_font"):
# The tempfile creation is workaroud for Pillow crashing on invalid font
# see https://github.com/python-pillow/Pillow/issues/3853
# Once this is fixed, it should be possible to directly operate on filelike
temp = NamedTemporaryFile(delete=False)
try:
temp.write(filelike.read())
filelike.seek(0)
temp.close()
filelike.loaded_font = ImageFont.truetype(temp.name)
finally:
os.unlink(temp.name)
return filelike.loaded_font.getname()
def check_fonts(app_configs=None, **kwargs):
"""Checks font rendering."""
try:
render_size("DejaVu Sans", Pango.Weight.NORMAL, 11, 0, "test")
return []
except Exception as error:
return [weblate_check("weblate.C024", f"Failed to use Pango: {error}")]
|
from __future__ import absolute_import
import json
import os
import pytest
import click
from click.testing import CliRunner
import libtmux
from libtmux.common import has_lt_version
from tmuxp import cli, config
from tmuxp.cli import get_config_dir, is_pure_name, load_workspace, scan_config
from .fixtures._util import curjoin, loadfixture
def test_creates_config_dir_not_exists(tmpdir):
"""cli.startup() creates config dir if not exists."""
cli.startup(str(tmpdir))
assert os.path.exists(str(tmpdir))
def test_in_dir_from_config_dir(tmpdir):
"""config.in_dir() finds configs config dir."""
cli.startup(str(tmpdir))
tmpdir.join("myconfig.yaml").write("")
tmpdir.join("myconfig.json").write("")
configs_found = config.in_dir(str(tmpdir))
assert len(configs_found) == 2
def test_ignore_non_configs_from_current_dir(tmpdir):
"""cli.in_dir() ignore non-config from config dir."""
cli.startup(str(tmpdir))
tmpdir.join("myconfig.psd").write("")
tmpdir.join("watmyconfig.json").write("")
configs_found = config.in_dir(str(tmpdir))
assert len(configs_found) == 1
def test_get_configs_cwd(tmpdir):
"""config.in_cwd() find config in shell current working directory."""
confdir = tmpdir.mkdir("tmuxpconf2")
with confdir.as_cwd():
config1 = open('.tmuxp.json', 'w+b')
config1.close()
configs_found = config.in_cwd()
assert len(configs_found) == 1
assert '.tmuxp.json' in configs_found
@pytest.mark.parametrize(
'path,expect',
[
('.', False),
('./', False),
('', False),
('.tmuxp.yaml', False),
('../.tmuxp.yaml', False),
('../', False),
('/hello/world', False),
('~/.tmuxp/hey', False),
('~/work/c/tmux/', False),
('~/work/c/tmux/.tmuxp.yaml', False),
('myproject', True),
],
)
def test_is_pure_name(path, expect):
assert is_pure_name(path) == expect
"""
scans for .tmuxp.{yaml,yml,json} in directory, returns first result
log warning if multiple found:
- current directory: ., ./, noarg
- relative to cwd directory: ../, ./hello/, hello/, ./hello/
- absolute directory: /path/to/dir, /path/to/dir/, ~/
- no path, no ext, config_dir: projectname, tmuxp
load file directly -
- no directory (cwd): .tmuxp.yaml
- relative to cwd: ../.tmuxp.yaml, ./hello/.tmuxp.yaml
- absolute path: /path/to/file.yaml, ~/path/to/file/.tmuxp.yaml
Any case where file is not found should return error.
"""
@pytest.fixture
def homedir(tmpdir):
return tmpdir.join('home').mkdir()
@pytest.fixture
def configdir(homedir):
return homedir.join('.tmuxp').mkdir()
@pytest.fixture
def projectdir(homedir):
return homedir.join('work').join('project')
def test_tmuxp_configdir_env_var(tmpdir, monkeypatch):
monkeypatch.setenv('TMUXP_CONFIGDIR', str(tmpdir))
assert get_config_dir() == tmpdir
def test_tmuxp_configdir_xdg_config_dir(tmpdir, monkeypatch):
monkeypatch.setenv('XDG_CONFIG_HOME', str(tmpdir))
tmux_dir = tmpdir.mkdir("tmuxp")
assert get_config_dir() == str(tmux_dir)
def test_resolve_dot(tmpdir, homedir, configdir, projectdir, monkeypatch):
monkeypatch.setenv('HOME', str(homedir))
projectdir.join('.tmuxp.yaml').ensure()
user_config_name = 'myconfig'
user_config = configdir.join('%s.yaml' % user_config_name).ensure()
project_config = str(projectdir.join('.tmuxp.yaml'))
with projectdir.as_cwd():
expect = project_config
assert scan_config('.') == expect
assert scan_config('./') == expect
assert scan_config('') == expect
assert scan_config('../project') == expect
assert scan_config('../project/') == expect
assert scan_config('.tmuxp.yaml') == expect
assert scan_config('../../.tmuxp/%s.yaml' % user_config_name) == str(
user_config
)
assert scan_config('myconfig') == str(user_config)
assert scan_config('~/.tmuxp/myconfig.yaml') == str(user_config)
with pytest.raises(Exception):
scan_config('.tmuxp.json')
with pytest.raises(Exception):
scan_config('.tmuxp.ini')
with pytest.raises(Exception):
scan_config('../')
with pytest.raises(Exception):
scan_config('mooooooo')
with homedir.as_cwd():
expect = project_config
assert scan_config('work/project') == expect
assert scan_config('work/project/') == expect
assert scan_config('./work/project') == expect
assert scan_config('./work/project/') == expect
assert scan_config('.tmuxp/%s.yaml' % user_config_name) == str(user_config)
assert scan_config('./.tmuxp/%s.yaml' % user_config_name) == str(user_config)
assert scan_config('myconfig') == str(user_config)
assert scan_config('~/.tmuxp/myconfig.yaml') == str(user_config)
with pytest.raises(Exception):
scan_config('')
with pytest.raises(Exception):
scan_config('.')
with pytest.raises(Exception):
scan_config('.tmuxp.yaml')
with pytest.raises(Exception):
scan_config('../')
with pytest.raises(Exception):
scan_config('mooooooo')
with configdir.as_cwd():
expect = project_config
assert scan_config('../work/project') == expect
assert scan_config('../../home/work/project') == expect
assert scan_config('../work/project/') == expect
assert scan_config('%s.yaml' % user_config_name) == str(user_config)
assert scan_config('./%s.yaml' % user_config_name) == str(user_config)
assert scan_config('myconfig') == str(user_config)
assert scan_config('~/.tmuxp/myconfig.yaml') == str(user_config)
with pytest.raises(Exception):
scan_config('')
with pytest.raises(Exception):
scan_config('.')
with pytest.raises(Exception):
scan_config('.tmuxp.yaml')
with pytest.raises(Exception):
scan_config('../')
with pytest.raises(Exception):
scan_config('mooooooo')
with tmpdir.as_cwd():
expect = project_config
assert scan_config('home/work/project') == expect
assert scan_config('./home/work/project/') == expect
assert scan_config('home/.tmuxp/%s.yaml' % user_config_name) == str(user_config)
assert scan_config('./home/.tmuxp/%s.yaml' % user_config_name) == str(
user_config
)
assert scan_config('myconfig') == str(user_config)
assert scan_config('~/.tmuxp/myconfig.yaml') == str(user_config)
with pytest.raises(Exception):
scan_config('')
with pytest.raises(Exception):
scan_config('.')
with pytest.raises(Exception):
scan_config('.tmuxp.yaml')
with pytest.raises(Exception):
scan_config('../')
with pytest.raises(Exception):
scan_config('mooooooo')
def test_scan_config_arg(homedir, configdir, projectdir, monkeypatch):
runner = CliRunner()
@click.command()
@click.argument('config', type=cli.ConfigPath(exists=True), nargs=-1)
def config_cmd(config):
click.echo(config)
monkeypatch.setenv('HOME', str(homedir))
projectdir.join('.tmuxp.yaml').ensure()
user_config_name = 'myconfig'
user_config = configdir.join('%s.yaml' % user_config_name).ensure()
project_config = str(projectdir.join('.tmuxp.yaml'))
def check_cmd(config_arg):
return runner.invoke(config_cmd, [config_arg]).output
with projectdir.as_cwd():
expect = project_config
assert expect in check_cmd('.')
assert expect in check_cmd('./')
assert expect in check_cmd('')
assert expect in check_cmd('../project')
assert expect in check_cmd('../project/')
assert expect in check_cmd('.tmuxp.yaml')
assert str(user_config) in check_cmd('../../.tmuxp/%s.yaml' % user_config_name)
assert user_config.purebasename in check_cmd('myconfig')
assert str(user_config) in check_cmd('~/.tmuxp/myconfig.yaml')
assert 'file not found' in check_cmd('.tmuxp.json')
assert 'file not found' in check_cmd('.tmuxp.ini')
assert 'No tmuxp files found' in check_cmd('../')
assert 'config not found in config dir' in check_cmd('moo')
def test_load_workspace(server, monkeypatch):
# this is an implementation test. Since this testsuite may be ran within
# a tmux session by the developer himself, delete the TMUX variable
# temporarily.
monkeypatch.delenv('TMUX', raising=False)
session_file = curjoin("workspacebuilder/two_pane.yaml")
# open it detached
session = load_workspace(
session_file, socket_name=server.socket_name, detached=True
)
assert isinstance(session, libtmux.Session)
assert session.name == 'sampleconfig'
@pytest.mark.skipif(
has_lt_version('2.1'), reason='exact session name matches only tmux >= 2.1'
)
def test_load_workspace_name_match_regression_252(tmpdir, server, monkeypatch):
monkeypatch.delenv('TMUX', raising=False)
session_file = curjoin("workspacebuilder/two_pane.yaml")
# open it detached
session = load_workspace(
session_file, socket_name=server.socket_name, detached=True
)
assert isinstance(session, libtmux.Session)
assert session.name == 'sampleconfig'
projfile = tmpdir.join('simple.yaml')
projfile.write(
"""
session_name: sampleconfi
start_directory: './'
windows:
- panes:
- echo 'hey'"""
)
# open it detached
session = load_workspace(
projfile.strpath, socket_name=server.socket_name, detached=True
)
assert session.name == 'sampleconfi'
def test_load_symlinked_workspace(server, tmpdir, monkeypatch):
# this is an implementation test. Since this testsuite may be ran within
# a tmux session by the developer himself, delete the TMUX variable
# temporarily.
monkeypatch.delenv('TMUX', raising=False)
realtemp = tmpdir.mkdir('myrealtemp')
linktemp = tmpdir.join('symlinktemp')
linktemp.mksymlinkto(realtemp)
projfile = linktemp.join('simple.yaml')
projfile.write(
"""
session_name: samplesimple
start_directory: './'
windows:
- panes:
- echo 'hey'"""
)
# open it detached
session = load_workspace(
projfile.strpath, socket_name=server.socket_name, detached=True
)
pane = session.attached_window.attached_pane
assert isinstance(session, libtmux.Session)
assert session.name == 'samplesimple'
assert pane.current_path == realtemp.strpath
def test_regression_00132_session_name_with_dots(tmpdir, server, session):
yaml_config = curjoin("workspacebuilder/regression_00132_dots.yaml")
cli_args = [yaml_config]
inputs = []
runner = CliRunner()
result = runner.invoke(
cli.command_load, cli_args, input=''.join(inputs), standalone_mode=False
)
assert result.exception
assert isinstance(result.exception, libtmux.exc.BadSessionName)
@pytest.mark.parametrize("cli_args", [(['load', '.']), (['load', '.tmuxp.yaml'])])
def test_load_zsh_autotitle_warning(cli_args, tmpdir, monkeypatch):
# create dummy tmuxp yaml so we don't get yelled at
tmpdir.join('.tmuxp.yaml').ensure()
tmpdir.join('.oh-my-zsh').ensure(dir=True)
monkeypatch.setenv('HOME', str(tmpdir))
with tmpdir.as_cwd():
runner = CliRunner()
monkeypatch.delenv('DISABLE_AUTO_TITLE', raising=False)
monkeypatch.setenv('SHELL', 'zsh')
result = runner.invoke(cli.cli, cli_args)
assert 'Please set' in result.output
monkeypatch.setenv('DISABLE_AUTO_TITLE', 'false')
result = runner.invoke(cli.cli, cli_args)
assert 'Please set' in result.output
monkeypatch.setenv('DISABLE_AUTO_TITLE', 'true')
result = runner.invoke(cli.cli, cli_args)
assert 'Please set' not in result.output
monkeypatch.delenv('DISABLE_AUTO_TITLE', raising=False)
monkeypatch.setenv('SHELL', 'sh')
result = runner.invoke(cli.cli, cli_args)
assert 'Please set' not in result.output
@pytest.mark.parametrize("cli_args", [(['convert', '.']), (['convert', '.tmuxp.yaml'])])
def test_convert(cli_args, tmpdir, monkeypatch):
# create dummy tmuxp yaml so we don't get yelled at
tmpdir.join('.tmuxp.yaml').write(
"""
session_name: hello
"""
)
tmpdir.join('.oh-my-zsh').ensure(dir=True)
monkeypatch.setenv('HOME', str(tmpdir))
with tmpdir.as_cwd():
runner = CliRunner()
runner.invoke(cli.cli, cli_args, input='y\ny\n')
assert tmpdir.join('.tmuxp.json').check()
assert tmpdir.join('.tmuxp.json').open().read() == json.dumps(
{'session_name': 'hello'}, indent=2
)
@pytest.mark.parametrize("cli_args", [(['import'])])
def test_import(cli_args, monkeypatch):
runner = CliRunner()
result = runner.invoke(cli.cli, cli_args)
assert 'tmuxinator' in result.output
assert 'teamocil' in result.output
@pytest.mark.parametrize(
"cli_args,inputs",
[
(
['import', 'teamocil', './.teamocil/config.yaml'],
['\n', 'y\n', './la.yaml\n', 'y\n'],
),
(
['import', 'teamocil', './.teamocil/config.yaml'],
['\n', 'y\n', './exists.yaml\n', './la.yaml\n', 'y\n'],
),
(
['import', 'teamocil', 'config'],
['\n', 'y\n', './exists.yaml\n', './la.yaml\n', 'y\n'],
),
],
)
def test_import_teamocil(cli_args, inputs, tmpdir, monkeypatch):
teamocil_config = loadfixture('config_teamocil/test4.yaml')
teamocil_dir = tmpdir.join('.teamocil').mkdir()
teamocil_dir.join('config.yaml').write(teamocil_config)
tmpdir.join('exists.yaml').ensure()
monkeypatch.setenv('HOME', str(tmpdir))
with tmpdir.as_cwd():
runner = CliRunner()
runner.invoke(cli.cli, cli_args, input=''.join(inputs))
assert tmpdir.join('la.yaml').check()
@pytest.mark.parametrize(
"cli_args,inputs",
[
(
['import', 'tmuxinator', './.tmuxinator/config.yaml'],
['\n', 'y\n', './la.yaml\n', 'y\n'],
),
(
['import', 'tmuxinator', './.tmuxinator/config.yaml'],
['\n', 'y\n', './exists.yaml\n', './la.yaml\n', 'y\n'],
),
(
['import', 'tmuxinator', 'config'],
['\n', 'y\n', './exists.yaml\n', './la.yaml\n', 'y\n'],
),
],
)
def test_import_tmuxinator(cli_args, inputs, tmpdir, monkeypatch):
tmuxinator_config = loadfixture('config_tmuxinator/test3.yaml')
tmuxinator_dir = tmpdir.join('.tmuxinator').mkdir()
tmuxinator_dir.join('config.yaml').write(tmuxinator_config)
tmpdir.join('exists.yaml').ensure()
monkeypatch.setenv('HOME', str(tmpdir))
with tmpdir.as_cwd():
runner = CliRunner()
out = runner.invoke(cli.cli, cli_args, input=''.join(inputs))
print(out.output)
assert tmpdir.join('la.yaml').check()
def test_get_abs_path(tmpdir):
expect = str(tmpdir)
with tmpdir.as_cwd():
cli.get_abs_path('../') == os.path.dirname(expect)
cli.get_abs_path('.') == expect
cli.get_abs_path('./') == expect
cli.get_abs_path(expect) == expect
def test_get_tmuxinator_dir(monkeypatch):
assert cli.get_tmuxinator_dir() == os.path.expanduser('~/.tmuxinator/')
monkeypatch.setenv('HOME', '/moo')
assert cli.get_tmuxinator_dir() == '/moo/.tmuxinator/'
assert cli.get_tmuxinator_dir() == os.path.expanduser('~/.tmuxinator/')
def test_get_cwd(tmpdir):
assert cli.get_cwd() == os.getcwd()
with tmpdir.as_cwd():
assert cli.get_cwd() == str(tmpdir)
assert cli.get_cwd() == os.getcwd()
def test_get_teamocil_dir(monkeypatch):
assert cli.get_teamocil_dir() == os.path.expanduser('~/.teamocil/')
monkeypatch.setenv('HOME', '/moo')
assert cli.get_teamocil_dir() == '/moo/.teamocil/'
assert cli.get_teamocil_dir() == os.path.expanduser('~/.teamocil/')
def test_validate_choices():
validate = cli._validate_choices(['choice1', 'choice2'])
assert validate('choice1')
assert validate('choice2')
with pytest.raises(click.BadParameter):
assert validate('choice3')
def test_pass_config_dir_ClickPath(tmpdir):
configdir = tmpdir.join('myconfigdir')
configdir.mkdir()
user_config_name = 'myconfig'
user_config = configdir.join('%s.yaml' % user_config_name).ensure()
expect = str(configdir.join('myconfig.yaml'))
runner = CliRunner()
@click.command()
@click.argument(
'config',
type=cli.ConfigPath(exists=True, config_dir=(str(configdir))),
nargs=-1,
)
def config_cmd(config):
click.echo(config)
def check_cmd(config_arg):
return runner.invoke(config_cmd, [config_arg]).output
with configdir.as_cwd():
assert expect in check_cmd('myconfig')
assert expect in check_cmd('myconfig.yaml')
assert expect in check_cmd('./myconfig.yaml')
assert str(user_config) in check_cmd(str(configdir.join('myconfig.yaml')))
assert 'file not found' in check_cmd('.tmuxp.json')
|
from pyinsteon.address import Address
from pyinsteon.device_types import (
GeneralController_MiniRemote_4,
Hub,
SwitchedLightingControl_SwitchLinc,
)
from tests.async_mock import AsyncMock, MagicMock
class MockSwitchLinc(SwitchedLightingControl_SwitchLinc):
"""Mock SwitchLinc device."""
@property
def operating_flags(self):
"""Return no operating flags to force properties to be checked."""
return {}
class MockDevices:
"""Mock devices class."""
def __init__(self, connected=True):
"""Init the MockDevices class."""
self._devices = {}
self.modem = None
self._connected = connected
self.async_save = AsyncMock()
self.add_x10_device = MagicMock()
self.set_id = MagicMock()
def __getitem__(self, address):
"""Return a a device from the device address."""
return self._devices.get(address)
def __iter__(self):
"""Return an iterator of device addresses."""
yield from self._devices
def __len__(self):
"""Return the number of devices."""
return len(self._devices)
def get(self, address):
"""Return a device from an address or None if not found."""
return self._devices.get(Address(address))
async def async_load(self, *args, **kwargs):
"""Load the mock devices."""
if self._connected:
addr0 = Address("AA.AA.AA")
addr1 = Address("11.11.11")
addr2 = Address("22.22.22")
addr3 = Address("33.33.33")
self._devices[addr0] = Hub(addr0)
self._devices[addr1] = MockSwitchLinc(addr1, 0x02, 0x00)
self._devices[addr2] = GeneralController_MiniRemote_4(addr2, 0x00, 0x00)
self._devices[addr3] = SwitchedLightingControl_SwitchLinc(addr3, 0x02, 0x00)
for device in [self._devices[addr] for addr in [addr1, addr2, addr3]]:
device.async_read_config = AsyncMock()
for device in [self._devices[addr] for addr in [addr2, addr3]]:
device.async_status = AsyncMock()
self._devices[addr1].async_status = AsyncMock(side_effect=AttributeError)
self.modem = self._devices[addr0]
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import patch, Mock
from diamond.collector import Collector
from openstackswiftrecon import OpenstackSwiftReconCollector
class TestOpenstackSwiftReconCollector(CollectorTestCase):
def setUp(self, allowed_names=None):
if not allowed_names:
allowed_names = []
config = get_collector_config('OpenstackSwiftReconCollector', {
'allowed_names': allowed_names,
'interval': 1,
'recon_object_cache': self.getFixturePath('object.recon'),
'recon_container_cache': self.getFixturePath('container.recon'),
'recon_account_cache': self.getFixturePath('account.recon')
})
self.collector = OpenstackSwiftReconCollector(config, None)
def test_import(self):
self.assertTrue(OpenstackSwiftReconCollector)
@patch('__builtin__.open')
@patch('os.access', Mock(return_value=False))
@patch.object(Collector, 'publish')
def test_recon_no_access(self, publish_mock, open_mock):
self.assertFalse(open_mock.called)
self.assertFalse(publish_mock.called)
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_recon_publish(self, publish_mock):
self.collector.collect()
metrics = {'object.object_replication_time': 2409.806068432331,
'object.object_auditor_stats_ALL.passes': 43887,
'object.object_auditor_stats_ALL.errors': 0,
'object.object_auditor_stats_ALL.audit_time':
301695.1047577858,
'object.object_auditor_stats_ALL.start_time':
1357979417.104742,
'object.object_auditor_stats_ALL.quarantined': 0,
'object.object_auditor_stats_ALL.bytes_processed':
24799969235,
'object.async_pending': 0,
'object.object_updater_sweep': 0.767723798751831,
'object.object_auditor_stats_ZBF.passes': 99350,
'object.object_auditor_stats_ZBF.errors': 0,
'object.object_auditor_stats_ZBF.audit_time':
152991.46442770958,
'object.object_auditor_stats_ZBF.start_time':
1357979462.621007,
'object.object_auditor_stats_ZBF.quarantined': 0,
'object.object_auditor_stats_ZBF.bytes_processed': 0}
self.assertPublishedMany(publish_mock, metrics)
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
##########################################################################
if __name__ == "__main__":
unittest.main()
|
import voluptuous as vol
from homeassistant.components import rpi_pfio
from homeassistant.components.switch import PLATFORM_SCHEMA
from homeassistant.const import ATTR_NAME, DEVICE_DEFAULT_NAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import ToggleEntity
ATTR_INVERT_LOGIC = "invert_logic"
CONF_PORTS = "ports"
DEFAULT_INVERT_LOGIC = False
PORT_SCHEMA = vol.Schema(
{
vol.Optional(ATTR_NAME): cv.string,
vol.Optional(ATTR_INVERT_LOGIC, default=DEFAULT_INVERT_LOGIC): cv.boolean,
}
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Optional(CONF_PORTS, default={}): vol.Schema({cv.positive_int: PORT_SCHEMA})}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the PiFace Digital Output devices."""
switches = []
ports = config.get(CONF_PORTS)
for port, port_entity in ports.items():
name = port_entity.get(ATTR_NAME)
invert_logic = port_entity[ATTR_INVERT_LOGIC]
switches.append(RPiPFIOSwitch(port, name, invert_logic))
add_entities(switches)
class RPiPFIOSwitch(ToggleEntity):
"""Representation of a PiFace Digital Output."""
def __init__(self, port, name, invert_logic):
"""Initialize the pin."""
self._port = port
self._name = name or DEVICE_DEFAULT_NAME
self._invert_logic = invert_logic
self._state = False
rpi_pfio.write_output(self._port, 1 if self._invert_logic else 0)
@property
def name(self):
"""Return the name of the switch."""
return self._name
@property
def should_poll(self):
"""Return the polling state."""
return False
@property
def is_on(self):
"""Return true if device is on."""
return self._state
def turn_on(self, **kwargs):
"""Turn the device on."""
rpi_pfio.write_output(self._port, 0 if self._invert_logic else 1)
self._state = True
self.schedule_update_ha_state()
def turn_off(self, **kwargs):
"""Turn the device off."""
rpi_pfio.write_output(self._port, 1 if self._invert_logic else 0)
self._state = False
self.schedule_update_ha_state()
|
import json
import sys
import time
import lemur.common.utils as utils
import lemur.dns_providers.util as dnsutil
import requests
from flask import current_app
from lemur.extensions import metrics, sentry
REQUIRED_VARIABLES = [
"ACME_POWERDNS_APIKEYNAME",
"ACME_POWERDNS_APIKEY",
"ACME_POWERDNS_DOMAIN",
]
class Zone:
"""
This class implements a PowerDNS zone in JSON.
"""
def __init__(self, _data):
self._data = _data
@property
def id(self):
""" Zone id, has a trailing "." at the end, which we manually remove. """
return self._data["id"][:-1]
@property
def name(self):
""" Zone name, has a trailing "." at the end, which we manually remove. """
return self._data["name"][:-1]
@property
def kind(self):
""" Indicates whether the zone is setup as a PRIMARY or SECONDARY """
return self._data["kind"]
class Record:
"""
This class implements a PowerDNS record.
"""
def __init__(self, _data):
self._data = _data
@property
def name(self):
return self._data["name"]
@property
def type(self):
return self._data["type"]
@property
def ttl(self):
return self._data["ttl"]
@property
def content(self):
return self._data["content"]
@property
def disabled(self):
return self._data["disabled"]
def get_zones(account_number):
"""
Retrieve authoritative zones from the PowerDNS API and return a list of zones
:param account_number:
:raise: Exception
:return: list of Zone Objects
"""
_check_conf()
server_id = current_app.config.get("ACME_POWERDNS_SERVERID", "localhost")
path = f"/api/v1/servers/{server_id}/zones"
zones = []
function = sys._getframe().f_code.co_name
log_data = {
"function": function
}
try:
records = _get(path)
log_data["message"] = "Retrieved Zones Successfully"
current_app.logger.debug(log_data)
except Exception as e:
sentry.captureException()
log_data["message"] = "Failed to Retrieve Zone Data"
current_app.logger.debug(log_data)
raise
for record in records:
zone = Zone(record)
if zone.kind == 'Master':
zones.append(zone.name)
return zones
def create_txt_record(domain, token, account_number):
"""
Create a TXT record for the given domain and token and return a change_id tuple
:param domain: FQDN
:param token: challenge value
:param account_number:
:return: tuple of domain/token
"""
_check_conf()
function = sys._getframe().f_code.co_name
log_data = {
"function": function,
"fqdn": domain,
"token": token,
}
# Create new record
domain_id = domain + "."
records = [Record({'name': domain_id, 'content': f"\"{token}\"", 'disabled': False})]
# Get current records
cur_records = _get_txt_records(domain)
for record in cur_records:
if record.content != token:
records.append(record)
try:
_patch_txt_records(domain, account_number, records)
log_data["message"] = "TXT record(s) successfully created"
current_app.logger.debug(log_data)
except Exception as e:
sentry.captureException()
log_data["Exception"] = e
log_data["message"] = "Unable to create TXT record(s)"
current_app.logger.debug(log_data)
change_id = (domain, token)
return change_id
def wait_for_dns_change(change_id, account_number=None):
"""
Checks the authoritative DNS Server to see if changes have propagated.
:param change_id: tuple of domain/token
:param account_number:
:return:
"""
_check_conf()
domain, token = change_id
number_of_attempts = current_app.config.get("ACME_POWERDNS_RETRIES", 3)
zone_name = _get_zone_name(domain, account_number)
nameserver = dnsutil.get_authoritative_nameserver(zone_name)
record_found = False
for attempts in range(0, number_of_attempts):
txt_records = dnsutil.get_dns_records(domain, "TXT", nameserver)
for txt_record in txt_records:
if txt_record == token:
record_found = True
break
if record_found:
break
time.sleep(10)
function = sys._getframe().f_code.co_name
log_data = {
"function": function,
"fqdn": domain,
"status": record_found,
"message": "Record status on PowerDNS authoritative server"
}
current_app.logger.debug(log_data)
if record_found:
metrics.send(f"{function}.success", "counter", 1, metric_tags={"fqdn": domain, "txt_record": token})
else:
metrics.send(f"{function}.fail", "counter", 1, metric_tags={"fqdn": domain, "txt_record": token})
def delete_txt_record(change_id, account_number, domain, token):
"""
Delete the TXT record for the given domain and token
:param change_id: tuple of domain/token
:param account_number:
:param domain: FQDN
:param token: challenge to delete
:return:
"""
_check_conf()
function = sys._getframe().f_code.co_name
log_data = {
"function": function,
"fqdn": domain,
"token": token,
}
"""
Get existing TXT records matching the domain from DNS
The token to be deleted should already exist
There may be other records with different tokens as well
"""
cur_records = _get_txt_records(domain)
found = False
new_records = []
for record in cur_records:
if record.content == f"\"{token}\"":
found = True
else:
new_records.append(record)
# Since the matching token is not in DNS, there is nothing to delete
if not found:
log_data["message"] = "Unable to delete TXT record: Token not found in existing TXT records"
current_app.logger.debug(log_data)
return
# The record to delete has been found AND there are other tokens set on the same domain
# Since we only want to delete one token value from the RRSet, we need to use the Patch command to
# overwrite the current RRSet with the existing records.
elif new_records:
try:
_patch_txt_records(domain, account_number, new_records)
log_data["message"] = "TXT record successfully deleted"
current_app.logger.debug(log_data)
except Exception as e:
sentry.captureException()
log_data["Exception"] = e
log_data["message"] = "Unable to delete TXT record: patching exception"
current_app.logger.debug(log_data)
# The record to delete has been found AND there are no other token values set on the same domain
# Use the Delete command to delete the whole RRSet.
else:
zone_name = _get_zone_name(domain, account_number)
server_id = current_app.config.get("ACME_POWERDNS_SERVERID", "localhost")
zone_id = zone_name + "."
domain_id = domain + "."
path = f"/api/v1/servers/{server_id}/zones/{zone_id}"
payload = {
"rrsets": [
{
"name": domain_id,
"type": "TXT",
"ttl": 300,
"changetype": "DELETE",
"records": [
{
"content": f"\"{token}\"",
"disabled": False
}
],
"comments": []
}
]
}
function = sys._getframe().f_code.co_name
log_data = {
"function": function,
"fqdn": domain,
"token": token
}
try:
_patch(path, payload)
log_data["message"] = "TXT record successfully deleted"
current_app.logger.debug(log_data)
except Exception as e:
sentry.captureException()
log_data["Exception"] = e
log_data["message"] = "Unable to delete TXT record"
current_app.logger.debug(log_data)
def _check_conf():
"""
Verifies required configuration variables are set
:return:
"""
utils.validate_conf(current_app, REQUIRED_VARIABLES)
def _generate_header():
"""
Generate a PowerDNS API header and return it as a dictionary
:return: Dict of header parameters
"""
api_key_name = current_app.config.get("ACME_POWERDNS_APIKEYNAME")
api_key = current_app.config.get("ACME_POWERDNS_APIKEY")
headers = {api_key_name: api_key}
return headers
def _get_zone_name(domain, account_number):
"""
Get most specific matching zone for the given domain and return as a String
:param domain: FQDN
:param account_number:
:return: FQDN of domain
"""
zones = get_zones(account_number)
zone_name = ""
for z in zones:
if domain.endswith(z):
if z.count(".") > zone_name.count("."):
zone_name = z
if not zone_name:
function = sys._getframe().f_code.co_name
log_data = {
"function": function,
"fqdn": domain,
"message": "No PowerDNS zone name found.",
}
metrics.send(f"{function}.fail", "counter", 1)
return zone_name
def _get_txt_records(domain):
"""
Retrieve TXT records for a given domain and return list of Record Objects
:param domain: FQDN
:return: list of Record objects
"""
server_id = current_app.config.get("ACME_POWERDNS_SERVERID", "localhost")
path = f"/api/v1/servers/{server_id}/search-data?q={domain}&max=100&object_type=record"
function = sys._getframe().f_code.co_name
log_data = {
"function": function
}
try:
records = _get(path)
log_data["message"] = "Retrieved TXT Records Successfully"
current_app.logger.debug(log_data)
except Exception as e:
sentry.captureException()
log_data["Exception"] = e
log_data["message"] = "Failed to Retrieve TXT Records"
current_app.logger.debug(log_data)
return []
txt_records = []
for record in records:
cur_record = Record(record)
txt_records.append(cur_record)
return txt_records
def _get(path, params=None):
"""
Execute a GET request on the given URL (base_uri + path) and return response as JSON object
:param path: Relative URL path
:param params: additional parameters
:return: json response
"""
base_uri = current_app.config.get("ACME_POWERDNS_DOMAIN")
verify_value = current_app.config.get("ACME_POWERDNS_VERIFY", True)
resp = requests.get(
f"{base_uri}{path}",
headers=_generate_header(),
params=params,
verify=verify_value
)
resp.raise_for_status()
return resp.json()
def _patch_txt_records(domain, account_number, records):
"""
Send Patch request to PowerDNS Server
:param domain: FQDN
:param account_number:
:param records: List of Record objects
:return:
"""
domain_id = domain + "."
# Create records
txt_records = []
for record in records:
txt_records.append(
{'content': record.content, 'disabled': record.disabled}
)
# Create RRSet
payload = {
"rrsets": [
{
"name": domain_id,
"type": "TXT",
"ttl": 300,
"changetype": "REPLACE",
"records": txt_records,
"comments": []
}
]
}
# Create Txt Records
server_id = current_app.config.get("ACME_POWERDNS_SERVERID", "localhost")
zone_name = _get_zone_name(domain, account_number)
zone_id = zone_name + "."
path = f"/api/v1/servers/{server_id}/zones/{zone_id}"
_patch(path, payload)
def _patch(path, payload):
"""
Execute a Patch request on the given URL (base_uri + path) with given payload
:param path:
:param payload:
:return:
"""
base_uri = current_app.config.get("ACME_POWERDNS_DOMAIN")
verify_value = current_app.config.get("ACME_POWERDNS_VERIFY", True)
resp = requests.patch(
f"{base_uri}{path}",
data=json.dumps(payload),
headers=_generate_header(),
verify=verify_value
)
resp.raise_for_status()
|
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///app.db'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db = SQLAlchemy(app)
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(128))
@manager.command
def add():
db.session.add(User(name='test'))
db.session.commit()
if __name__ == '__main__':
manager.run()
|
import logging
from aiohomekit.model.characteristics import (
CharacteristicsTypes,
CurrentMediaStateValues,
RemoteKeyValues,
TargetMediaStateValues,
)
from aiohomekit.model.services import ServicesTypes
from aiohomekit.utils import clamp_enum_to_char
from homeassistant.components.media_player import DEVICE_CLASS_TV, MediaPlayerEntity
from homeassistant.components.media_player.const import (
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_SELECT_SOURCE,
SUPPORT_STOP,
)
from homeassistant.const import (
STATE_IDLE,
STATE_OK,
STATE_PAUSED,
STATE_PLAYING,
STATE_PROBLEM,
)
from homeassistant.core import callback
from . import KNOWN_DEVICES, HomeKitEntity
_LOGGER = logging.getLogger(__name__)
HK_TO_HA_STATE = {
CurrentMediaStateValues.PLAYING: STATE_PLAYING,
CurrentMediaStateValues.PAUSED: STATE_PAUSED,
CurrentMediaStateValues.STOPPED: STATE_IDLE,
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Homekit television."""
hkid = config_entry.data["AccessoryPairingID"]
conn = hass.data[KNOWN_DEVICES][hkid]
@callback
def async_add_service(aid, service):
if service["stype"] != "television":
return False
info = {"aid": aid, "iid": service["iid"]}
async_add_entities([HomeKitTelevision(conn, info)], True)
return True
conn.add_listener(async_add_service)
class HomeKitTelevision(HomeKitEntity, MediaPlayerEntity):
"""Representation of a HomeKit Controller Television."""
def get_characteristic_types(self):
"""Define the homekit characteristics the entity cares about."""
return [
CharacteristicsTypes.ACTIVE,
CharacteristicsTypes.CURRENT_MEDIA_STATE,
CharacteristicsTypes.TARGET_MEDIA_STATE,
CharacteristicsTypes.REMOTE_KEY,
CharacteristicsTypes.ACTIVE_IDENTIFIER,
# Characterics that are on the linked INPUT_SOURCE services
CharacteristicsTypes.CONFIGURED_NAME,
CharacteristicsTypes.IDENTIFIER,
]
@property
def device_class(self):
"""Define the device class for a HomeKit enabled TV."""
return DEVICE_CLASS_TV
@property
def supported_features(self):
"""Flag media player features that are supported."""
features = 0
if self.service.has(CharacteristicsTypes.ACTIVE_IDENTIFIER):
features |= SUPPORT_SELECT_SOURCE
if self.service.has(CharacteristicsTypes.TARGET_MEDIA_STATE):
if TargetMediaStateValues.PAUSE in self.supported_media_states:
features |= SUPPORT_PAUSE
if TargetMediaStateValues.PLAY in self.supported_media_states:
features |= SUPPORT_PLAY
if TargetMediaStateValues.STOP in self.supported_media_states:
features |= SUPPORT_STOP
if self.service.has(CharacteristicsTypes.REMOTE_KEY):
if RemoteKeyValues.PLAY_PAUSE in self.supported_remote_keys:
features |= SUPPORT_PAUSE | SUPPORT_PLAY
return features
@property
def supported_media_states(self):
"""Mediate state flags that are supported."""
if not self.service.has(CharacteristicsTypes.TARGET_MEDIA_STATE):
return frozenset()
return clamp_enum_to_char(
TargetMediaStateValues,
self.service[CharacteristicsTypes.TARGET_MEDIA_STATE],
)
@property
def supported_remote_keys(self):
"""Remote key buttons that are supported."""
if not self.service.has(CharacteristicsTypes.REMOTE_KEY):
return frozenset()
return clamp_enum_to_char(
RemoteKeyValues, self.service[CharacteristicsTypes.REMOTE_KEY]
)
@property
def source_list(self):
"""List of all input sources for this television."""
sources = []
this_accessory = self._accessory.entity_map.aid(self._aid)
this_tv = this_accessory.services.iid(self._iid)
input_sources = this_accessory.services.filter(
service_type=ServicesTypes.INPUT_SOURCE,
parent_service=this_tv,
)
for input_source in input_sources:
char = input_source[CharacteristicsTypes.CONFIGURED_NAME]
sources.append(char.value)
return sources
@property
def source(self):
"""Name of the current input source."""
active_identifier = self.service.value(CharacteristicsTypes.ACTIVE_IDENTIFIER)
if not active_identifier:
return None
this_accessory = self._accessory.entity_map.aid(self._aid)
this_tv = this_accessory.services.iid(self._iid)
input_source = this_accessory.services.first(
service_type=ServicesTypes.INPUT_SOURCE,
characteristics={CharacteristicsTypes.IDENTIFIER: active_identifier},
parent_service=this_tv,
)
char = input_source[CharacteristicsTypes.CONFIGURED_NAME]
return char.value
@property
def state(self):
"""State of the tv."""
active = self.service.value(CharacteristicsTypes.ACTIVE)
if not active:
return STATE_PROBLEM
homekit_state = self.service.value(CharacteristicsTypes.CURRENT_MEDIA_STATE)
if homekit_state is not None:
return HK_TO_HA_STATE.get(homekit_state, STATE_OK)
return STATE_OK
async def async_media_play(self):
"""Send play command."""
if self.state == STATE_PLAYING:
_LOGGER.debug("Cannot play while already playing")
return
if TargetMediaStateValues.PLAY in self.supported_media_states:
await self.async_put_characteristics(
{CharacteristicsTypes.TARGET_MEDIA_STATE: TargetMediaStateValues.PLAY}
)
elif RemoteKeyValues.PLAY_PAUSE in self.supported_remote_keys:
await self.async_put_characteristics(
{CharacteristicsTypes.REMOTE_KEY: RemoteKeyValues.PLAY_PAUSE}
)
async def async_media_pause(self):
"""Send pause command."""
if self.state == STATE_PAUSED:
_LOGGER.debug("Cannot pause while already paused")
return
if TargetMediaStateValues.PAUSE in self.supported_media_states:
await self.async_put_characteristics(
{CharacteristicsTypes.TARGET_MEDIA_STATE: TargetMediaStateValues.PAUSE}
)
elif RemoteKeyValues.PLAY_PAUSE in self.supported_remote_keys:
await self.async_put_characteristics(
{CharacteristicsTypes.REMOTE_KEY: RemoteKeyValues.PLAY_PAUSE}
)
async def async_media_stop(self):
"""Send stop command."""
if self.state == STATE_IDLE:
_LOGGER.debug("Cannot stop when already idle")
return
if TargetMediaStateValues.STOP in self.supported_media_states:
await self.async_put_characteristics(
{CharacteristicsTypes.TARGET_MEDIA_STATE: TargetMediaStateValues.STOP}
)
async def async_select_source(self, source):
"""Switch to a different media source."""
this_accessory = self._accessory.entity_map.aid(self._aid)
this_tv = this_accessory.services.iid(self._iid)
input_source = this_accessory.services.first(
service_type=ServicesTypes.INPUT_SOURCE,
characteristics={CharacteristicsTypes.CONFIGURED_NAME: source},
parent_service=this_tv,
)
if not input_source:
raise ValueError(f"Could not find source {source}")
identifier = input_source[CharacteristicsTypes.IDENTIFIER]
await self.async_put_characteristics(
{CharacteristicsTypes.ACTIVE_IDENTIFIER: identifier.value}
)
|
import contextlib
import json
import os
import unittest
from absl import flags
import mock
from perfkitbenchmarker import disk
from perfkitbenchmarker import relational_db
from perfkitbenchmarker import virtual_machine
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.configs import benchmark_config_spec
from perfkitbenchmarker.providers.gcp import gce_virtual_machine
from perfkitbenchmarker.providers.gcp import gcp_relational_db
from perfkitbenchmarker.providers.gcp import util
from perfkitbenchmarker.relational_db import MYSQL
from perfkitbenchmarker.relational_db import POSTGRES
from tests import pkb_common_test_case
from six.moves import builtins
FLAGS = flags.FLAGS
_BENCHMARK_NAME = 'name'
_BENCHMARK_UID = 'benchmark_uid'
_COMPONENT = 'test_component'
def CreateMockClientVM(db_class):
m = mock.MagicMock()
m.HasIpAddress = True
m.ip_address = '192.168.0.1'
db_class.client_vm = m
def CreateMockServerVM(db_class):
m = mock.MagicMock()
m.HasIpAddress = True
m.ip_address = '192.168.2.1'
db_class.server_vm = m
def CreateDbFromSpec(spec_dict):
mock_db_spec = mock.Mock(spec=benchmark_config_spec._RelationalDbSpec)
mock_db_spec.configure_mock(**spec_dict)
db_class = gcp_relational_db.GCPRelationalDb(mock_db_spec)
CreateMockClientVM(db_class)
return db_class
@contextlib.contextmanager
def PatchCriticalObjects(stdout='', stderr='', return_code=0):
"""A context manager that patches a few critical objects with mocks."""
retval = (stdout, stderr, return_code)
with mock.patch(
vm_util.__name__ + '.IssueCommand',
return_value=retval) as issue_command, mock.patch(
builtins.__name__ +
'.open'), mock.patch(vm_util.__name__ +
'.NamedTemporaryFile'), mock.patch(
util.__name__ + '.GetDefaultProject',
return_value='fakeproject'):
yield issue_command
def VmGroupSpec():
return {
'clients': {
'vm_spec': {
'GCP': {
'zone': 'us-central1-c',
'machine_type': 'n1-standard-1'
}
},
'disk_spec': {
'GCP': {
'disk_size': 500,
'disk_type': 'pd-ssd'
}
}
},
'servers': {
'vm_spec': {
'GCP': {
'zone': 'us-central1-c',
'machine_type': 'n1-standard-1'
}
},
'disk_spec': {
'GCP': {
'disk_size': 500,
'disk_type': 'pd-ssd'
}
}
}
}
class GcpMysqlRelationalDbTestCase(pkb_common_test_case.PkbCommonTestCase):
def createMySQLSpecDict(self):
db_spec = virtual_machine.BaseVmSpec(
'NAME', **{
'machine_type': 'db-n1-standard-1',
'zone': 'us-west1-b',
})
db_spec.cpus = None
db_spec.memory = None
db_disk_spec = disk.BaseDiskSpec('NAME', **{'disk_size': 50})
return {
'engine': MYSQL,
'engine_version': '5.7',
'run_uri': '123',
'database_name': 'fakedbname',
'database_password': 'fakepassword',
'db_spec': db_spec,
'db_disk_spec': db_disk_spec,
'high_availability': False,
'backup_enabled': True,
'backup_start_time': '07:00',
'vm_groups': VmGroupSpec(),
}
def setUp(self):
super(GcpMysqlRelationalDbTestCase, self).setUp()
FLAGS['run_uri'].parse('123')
FLAGS['gcloud_path'].parse('gcloud')
FLAGS['use_managed_db'].parse(True)
mock_db_spec_attrs = self.createMySQLSpecDict()
self.mock_db_spec = mock.Mock(spec=benchmark_config_spec._RelationalDbSpec)
self.mock_db_spec.configure_mock(**mock_db_spec_attrs)
def testNoHighAvailability(self):
with PatchCriticalObjects() as issue_command:
db = CreateDbFromSpec(self.createMySQLSpecDict())
db._Create()
self.assertEqual(issue_command.call_count, 1)
command_string = ' '.join(issue_command.call_args[0][0])
self.assertNotIn('--failover-replica-name', command_string)
self.assertNotIn('replica-pkb-db-instance-123', command_string)
def testCreate(self):
with PatchCriticalObjects() as issue_command:
db = gcp_relational_db.GCPRelationalDb(self.mock_db_spec)
CreateMockClientVM(db)
db._Create()
self.assertEqual(issue_command.call_count, 1)
command_string = ' '.join(issue_command.call_args[0][0])
self.assertTrue(
command_string.startswith(
'gcloud beta sql instances create pkb-db-instance-123'),
command_string)
self.assertIn('--project fakeproject', command_string)
self.assertIn('--tier=db-n1-standard-1', command_string)
self.assertIn('--storage-size=50', command_string)
self.assertIn('--backup', command_string)
self.assertIn('--backup-start-time=07:00', command_string)
self.assertIn('--zone=us-west1-b', command_string)
def testCorrectVmGroupsPresent(self):
with PatchCriticalObjects():
db = CreateDbFromSpec(self.createMySQLSpecDict())
CreateMockServerVM(db)
db._Create()
vms = relational_db.VmsToBoot(db.spec.vm_groups)
self.assertNotIn('servers', vms)
def testCreateWithBackupDisabled(self):
with PatchCriticalObjects() as issue_command:
spec = self.mock_db_spec
spec.backup_enabled = False
db = gcp_relational_db.GCPRelationalDb(self.mock_db_spec)
CreateMockClientVM(db)
db._Create()
self.assertEqual(issue_command.call_count, 1)
command_string = ' '.join(issue_command.call_args[0][0])
self.assertTrue(
command_string.startswith(
'gcloud beta sql instances create pkb-db-instance-123'),
command_string)
self.assertIn('--project fakeproject', command_string)
self.assertIn('--tier=db-n1-standard-1', command_string)
self.assertIn('--no-backup', command_string)
self.assertNotIn('--backup-start-time=07:00', command_string)
def testDelete(self):
with PatchCriticalObjects() as issue_command:
db = gcp_relational_db.GCPRelationalDb(self.mock_db_spec)
db._Delete()
self.assertEqual(issue_command.call_count, 1)
command_string = ' '.join(issue_command.call_args[0][0])
self.assertTrue(
command_string.startswith(
'gcloud sql instances delete pkb-db-instance-123'))
def testIsReady(self):
path = os.path.join(
os.path.dirname(__file__), '../../data',
'gcloud-describe-db-instances-available.json')
with open(path) as fp:
test_output = fp.read()
with PatchCriticalObjects(stdout=test_output):
db = CreateDbFromSpec(self.createMySQLSpecDict())
self.assertEqual(True, db._IsReady())
def testExists(self):
path = os.path.join(
os.path.dirname(__file__), '../../data',
'gcloud-describe-db-instances-available.json')
with open(path) as fp:
test_output = fp.read()
with PatchCriticalObjects(stdout=test_output):
db = CreateDbFromSpec(self.createMySQLSpecDict())
self.assertEqual(True, db._Exists())
def testHighAvailability(self):
with PatchCriticalObjects() as issue_command:
spec = self.createMySQLSpecDict()
spec['high_availability'] = True
db = CreateDbFromSpec(spec)
db._Create()
self.assertEqual(issue_command.call_count, 1)
command_string = ' '.join(issue_command.call_args[0][0])
self.assertIn('--failover-replica-name', command_string)
self.assertIn('replica-pkb-db-instance-123', command_string)
def testParseEndpoint(self):
path = os.path.join(
os.path.dirname(__file__), '../../data',
'gcloud-describe-db-instances-available.json')
with open(path) as fp:
test_output = fp.read()
with PatchCriticalObjects():
db = CreateDbFromSpec(self.createMySQLSpecDict())
self.assertEqual('', db._ParseEndpoint(None))
self.assertIn('10.10.0.35',
db._ParseEndpoint(json.loads(test_output)))
def testCreateUnmanagedDb(self):
FLAGS['use_managed_db'].parse(False)
FLAGS['project'].parse('test')
with PatchCriticalObjects() as issue_command:
db = CreateDbFromSpec(self.createMySQLSpecDict())
CreateMockServerVM(db)
db._Create()
self.assertTrue(db._Exists())
self.assertTrue(hasattr(db, 'firewall'))
self.assertEqual(db.endpoint, db.server_vm.ip_address)
self.assertEqual(db.spec.database_username, 'root')
self.assertEqual(db.spec.database_password, 'perfkitbenchmarker')
self.assertIsNone(issue_command.call_args)
class GcpPostgresRelationlDbTestCase(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(GcpPostgresRelationlDbTestCase, self).setUp()
FLAGS.project = ''
FLAGS.run_uri = ''
FLAGS.gcloud_path = ''
def createPostgresSpecDict(self):
machine_type = {
'machine_type': {'cpus': 1, 'memory': '3840MiB'},
'zone': 'us-west1-b',
}
db_spec = gce_virtual_machine.GceVmSpec('NAME', **machine_type)
db_disk_spec = disk.BaseDiskSpec('NAME', **{'disk_size': 50})
return {
'engine': POSTGRES,
'engine_version': '5.7',
'run_uri': '123',
'database_name': 'fakedbname',
'database_password': 'fakepassword',
'db_spec': db_spec,
'db_disk_spec': db_disk_spec,
'high_availability': False,
'backup_enabled': True,
'backup_start_time': '07:00'
}
def testValidateSpec(self):
with PatchCriticalObjects():
db_postgres = CreateDbFromSpec(self.createPostgresSpecDict())
db_postgres._ValidateSpec()
def testValidateMachineType(self):
with PatchCriticalObjects():
db = CreateDbFromSpec(self.createPostgresSpecDict())
self.assertRaises(ValueError, db._ValidateMachineType, 0, 0)
self.assertRaises(ValueError, db._ValidateMachineType, 3840, 0)
self.assertRaises(ValueError, db._ValidateMachineType, 255, 1)
self.assertRaises(ValueError, db._ValidateMachineType, 256000000000, 1)
self.assertRaises(ValueError, db._ValidateMachineType, 2560, 1)
db._ValidateMachineType(db.spec.db_spec.memory, db.spec.db_spec.cpus)
def testCreateNonHighAvailability(self):
with PatchCriticalObjects() as issue_command:
spec = self.createPostgresSpecDict()
spec['engine'] = 'postgres'
spec['engine_version'] = '9.6'
db = CreateDbFromSpec(spec)
db._Create()
self.assertEqual(issue_command.call_count, 1)
command_string = ' '.join(issue_command.call_args[0][0])
self.assertIn('database-version=POSTGRES_9_6', command_string)
self.assertIn('--cpu=1', command_string)
self.assertIn('--memory=3840MiB', command_string)
self.assertNotIn('--availability-type=REGIONAL', command_string)
def testCreateHighAvailability(self):
with PatchCriticalObjects() as issue_command:
spec = self.createPostgresSpecDict()
spec['high_availability'] = True
spec['engine'] = 'postgres'
spec['engine_version'] = '9.6'
db = CreateDbFromSpec(spec)
db._Create()
self.assertEqual(issue_command.call_count, 1)
command_string = ' '.join(issue_command.call_args[0][0])
self.assertIn('--availability-type=REGIONAL', command_string)
if __name__ == '__main__':
unittest.main()
|
import time
import re
import logging
from error import DiamondException
class Metric(object):
# This saves a significant amount of memory per object. This only matters
# due to the queue system that moves objects between processes and can end
# up storing a large number of objects in the queue waiting for the
# handlers to flush.
__slots__ = [
'path', 'value', 'raw_value', 'timestamp', 'precision',
'host', 'metric_type', 'ttl'
]
def __init__(self, path, value, raw_value=None, timestamp=None, precision=0,
host=None, metric_type='COUNTER', ttl=None):
"""
Create new instance of the Metric class
Takes:
path=string: string the specifies the path of the metric
value=[float|int]: the value to be submitted
timestamp=[float|int]: the timestamp, in seconds since the epoch
(as from time.time()) precision=int: the precision to apply.
Generally the default (2) should work fine.
"""
# Validate the path, value and metric_type submitted
if (None in [path, value] or metric_type not in ('COUNTER', 'GAUGE')):
raise DiamondException(("Invalid parameter when creating new "
"Metric with path: %r value: %r "
"metric_type: %r")
% (path, value, metric_type))
# If no timestamp was passed in, set it to the current time
if timestamp is None:
timestamp = int(time.time())
else:
# If the timestamp isn't an int, then make it one
if not isinstance(timestamp, int):
try:
timestamp = int(timestamp)
except ValueError as e:
raise DiamondException(("Invalid timestamp when "
"creating new Metric %r: %s")
% (path, e))
# The value needs to be a float or an int. If it is, great. If not,
# try to cast it to one of those.
if not isinstance(value, (int, float)):
try:
if precision == 0:
value = round(float(value))
else:
value = float(value)
except ValueError as e:
raise DiamondException(("Invalid value when creating new "
"Metric %r: %s") % (path, e))
self.path = path
self.value = value
self.raw_value = raw_value
self.timestamp = timestamp
self.precision = precision
self.host = host
self.metric_type = metric_type
self.ttl = ttl
def __repr__(self):
"""
Return the Metric as a string
"""
if not isinstance(self.precision, (int, long)):
log = logging.getLogger('diamond')
log.warn('Metric %s does not have a valid precision', self.path)
self.precision = 0
# Set the format string
fstring = "%%s %%0.%if %%i\n" % self.precision
# Return formated string
return fstring % (self.path, self.value, self.timestamp)
def __getstate__(self):
return dict(
(slot, getattr(self, slot))
for slot in self.__slots__
if hasattr(self, slot)
)
def __setstate__(self, state):
for slot, value in state.items():
setattr(self, slot, value)
@classmethod
def parse(cls, string):
"""
Parse a string and create a metric
"""
match = re.match(r'^(?P<name>[A-Za-z0-9\.\-_]+)\s+' +
'(?P<value>[0-9\.]+)\s+' +
'(?P<timestamp>[0-9\.]+)(\n?)$',
string)
try:
groups = match.groupdict()
# TODO: get precision from value string
return Metric(groups['name'],
groups['value'],
float(groups['timestamp']))
except:
raise DiamondException(
"Metric could not be parsed from string: %s." % string)
def getPathPrefix(self):
"""
Returns the path prefix path
servers.host.cpu.total.idle
return "servers"
"""
# If we don't have a host name, assume it's just the first part of the
# metric path
if self.host is None:
return self.path.split('.')[0]
offset = self.path.index(self.host) - 1
return self.path[0:offset]
def getCollectorPath(self):
"""
Returns collector path
servers.host.cpu.total.idle
return "cpu"
"""
# If we don't have a host name, assume it's just the third part of the
# metric path
if self.host is None:
return self.path.split('.')[2]
offset = self.path.index(self.host)
offset += len(self.host) + 1
endoffset = self.path.index('.', offset)
return self.path[offset:endoffset]
def getMetricPath(self):
"""
Returns the metric path after the collector name
servers.host.cpu.total.idle
return "total.idle"
"""
# If we don't have a host name, assume it's just the fourth+ part of the
# metric path
if self.host is None:
path = self.path.split('.')[3:]
return '.'.join(path)
prefix = '.'.join([self.getPathPrefix(), self.host,
self.getCollectorPath()])
offset = len(prefix) + 1
return self.path[offset:]
|
from numbers import Number
from typing import Optional
from homeassistant.const import (
CONF_UNIT_SYSTEM_IMPERIAL,
CONF_UNIT_SYSTEM_METRIC,
LENGTH,
LENGTH_KILOMETERS,
LENGTH_MILES,
MASS,
MASS_GRAMS,
MASS_KILOGRAMS,
MASS_OUNCES,
MASS_POUNDS,
PRESSURE,
PRESSURE_PA,
PRESSURE_PSI,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
TEMPERATURE,
UNIT_NOT_RECOGNIZED_TEMPLATE,
VOLUME,
VOLUME_GALLONS,
VOLUME_LITERS,
)
from homeassistant.util import (
distance as distance_util,
pressure as pressure_util,
temperature as temperature_util,
volume as volume_util,
)
LENGTH_UNITS = distance_util.VALID_UNITS
MASS_UNITS = [MASS_POUNDS, MASS_OUNCES, MASS_KILOGRAMS, MASS_GRAMS]
PRESSURE_UNITS = pressure_util.VALID_UNITS
VOLUME_UNITS = volume_util.VALID_UNITS
TEMPERATURE_UNITS = [TEMP_FAHRENHEIT, TEMP_CELSIUS]
def is_valid_unit(unit: str, unit_type: str) -> bool:
"""Check if the unit is valid for it's type."""
if unit_type == LENGTH:
units = LENGTH_UNITS
elif unit_type == TEMPERATURE:
units = TEMPERATURE_UNITS
elif unit_type == MASS:
units = MASS_UNITS
elif unit_type == VOLUME:
units = VOLUME_UNITS
elif unit_type == PRESSURE:
units = PRESSURE_UNITS
else:
return False
return unit in units
class UnitSystem:
"""A container for units of measure."""
def __init__(
self,
name: str,
temperature: str,
length: str,
volume: str,
mass: str,
pressure: str,
) -> None:
"""Initialize the unit system object."""
errors: str = ", ".join(
UNIT_NOT_RECOGNIZED_TEMPLATE.format(unit, unit_type)
for unit, unit_type in [
(temperature, TEMPERATURE),
(length, LENGTH),
(volume, VOLUME),
(mass, MASS),
(pressure, PRESSURE),
]
if not is_valid_unit(unit, unit_type)
)
if errors:
raise ValueError(errors)
self.name = name
self.temperature_unit = temperature
self.length_unit = length
self.mass_unit = mass
self.pressure_unit = pressure
self.volume_unit = volume
@property
def is_metric(self) -> bool:
"""Determine if this is the metric unit system."""
return self.name == CONF_UNIT_SYSTEM_METRIC
def temperature(self, temperature: float, from_unit: str) -> float:
"""Convert the given temperature to this unit system."""
if not isinstance(temperature, Number):
raise TypeError(f"{temperature!s} is not a numeric value.")
return temperature_util.convert(temperature, from_unit, self.temperature_unit)
def length(self, length: Optional[float], from_unit: str) -> float:
"""Convert the given length to this unit system."""
if not isinstance(length, Number):
raise TypeError(f"{length!s} is not a numeric value.")
# type ignore: https://github.com/python/mypy/issues/7207
return distance_util.convert( # type: ignore
length, from_unit, self.length_unit
)
def pressure(self, pressure: Optional[float], from_unit: str) -> float:
"""Convert the given pressure to this unit system."""
if not isinstance(pressure, Number):
raise TypeError(f"{pressure!s} is not a numeric value.")
# type ignore: https://github.com/python/mypy/issues/7207
return pressure_util.convert( # type: ignore
pressure, from_unit, self.pressure_unit
)
def volume(self, volume: Optional[float], from_unit: str) -> float:
"""Convert the given volume to this unit system."""
if not isinstance(volume, Number):
raise TypeError(f"{volume!s} is not a numeric value.")
# type ignore: https://github.com/python/mypy/issues/7207
return volume_util.convert(volume, from_unit, self.volume_unit) # type: ignore
def as_dict(self) -> dict:
"""Convert the unit system to a dictionary."""
return {
LENGTH: self.length_unit,
MASS: self.mass_unit,
PRESSURE: self.pressure_unit,
TEMPERATURE: self.temperature_unit,
VOLUME: self.volume_unit,
}
METRIC_SYSTEM = UnitSystem(
CONF_UNIT_SYSTEM_METRIC,
TEMP_CELSIUS,
LENGTH_KILOMETERS,
VOLUME_LITERS,
MASS_GRAMS,
PRESSURE_PA,
)
IMPERIAL_SYSTEM = UnitSystem(
CONF_UNIT_SYSTEM_IMPERIAL,
TEMP_FAHRENHEIT,
LENGTH_MILES,
VOLUME_GALLONS,
MASS_POUNDS,
PRESSURE_PSI,
)
|
import asyncio
import logging
from pyowm import OWM
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import (
CONF_API_KEY,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_MODE,
CONF_NAME,
)
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from .const import (
COMPONENTS,
CONF_LANGUAGE,
DOMAIN,
ENTRY_FORECAST_COORDINATOR,
ENTRY_NAME,
ENTRY_WEATHER_COORDINATOR,
UPDATE_LISTENER,
)
from .forecast_update_coordinator import ForecastUpdateCoordinator
from .weather_update_coordinator import WeatherUpdateCoordinator
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass: HomeAssistant, config: dict) -> bool:
"""Set up the OpenWeatherMap component."""
hass.data.setdefault(DOMAIN, {})
weather_configs = _filter_domain_configs(config.get("weather", []), DOMAIN)
sensor_configs = _filter_domain_configs(config.get("sensor", []), DOMAIN)
_import_configs(hass, weather_configs + sensor_configs)
return True
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry):
"""Set up OpenWeatherMap as config entry."""
name = config_entry.data[CONF_NAME]
api_key = config_entry.data[CONF_API_KEY]
latitude = config_entry.data.get(CONF_LATITUDE, hass.config.latitude)
longitude = config_entry.data.get(CONF_LONGITUDE, hass.config.longitude)
forecast_mode = _get_config_value(config_entry, CONF_MODE)
language = _get_config_value(config_entry, CONF_LANGUAGE)
owm = OWM(API_key=api_key, language=language)
weather_coordinator = WeatherUpdateCoordinator(owm, latitude, longitude, hass)
forecast_coordinator = ForecastUpdateCoordinator(
owm, latitude, longitude, forecast_mode, hass
)
await weather_coordinator.async_refresh()
await forecast_coordinator.async_refresh()
if (
not weather_coordinator.last_update_success
and not forecast_coordinator.last_update_success
):
raise ConfigEntryNotReady
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN][config_entry.entry_id] = {
ENTRY_NAME: name,
ENTRY_WEATHER_COORDINATOR: weather_coordinator,
ENTRY_FORECAST_COORDINATOR: forecast_coordinator,
}
for component in COMPONENTS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, component)
)
update_listener = config_entry.add_update_listener(async_update_options)
hass.data[DOMAIN][config_entry.entry_id][UPDATE_LISTENER] = update_listener
return True
async def async_update_options(hass: HomeAssistant, config_entry: ConfigEntry):
"""Update options."""
await hass.config_entries.async_reload(config_entry.entry_id)
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(config_entry, component)
for component in COMPONENTS
]
)
)
if unload_ok:
update_listener = hass.data[DOMAIN][config_entry.entry_id][UPDATE_LISTENER]
update_listener()
hass.data[DOMAIN].pop(config_entry.entry_id)
return unload_ok
def _import_configs(hass, configs):
for config in configs:
_LOGGER.debug("Importing OpenWeatherMap %s", config)
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data=config,
)
)
def _filter_domain_configs(elements, domain):
return list(filter(lambda elem: elem["platform"] == domain, elements))
def _get_config_value(config_entry, key):
if config_entry.options:
return config_entry.options[key]
return config_entry.data[key]
|
import os
import os.path as op
import numpy as np
from .._digitization import _format_dig_points
from ...utils import (verbose, logger, _clean_names, fill_doc, _check_option,
_validate_type)
from ..base import BaseRaw
from ..utils import _mult_cal_one, _blk_read_lims
from .res4 import _read_res4, _make_ctf_name
from .hc import _read_hc
from .eeg import _read_eeg, _read_pos
from .trans import _make_ctf_coord_trans_set
from .info import _compose_meas_info, _read_bad_chans, _annotate_bad_segments
from .constants import CTF
from .markers import _read_annotations_ctf_call
@fill_doc
def read_raw_ctf(directory, system_clock='truncate', preload=False,
clean_names=False, verbose=None):
"""Raw object from CTF directory.
Parameters
----------
directory : str
Path to the CTF data (ending in ``'.ds'``).
system_clock : str
How to treat the system clock. Use "truncate" (default) to truncate
the data file when the system clock drops to zero, and use "ignore"
to ignore the system clock (e.g., if head positions are measured
multiple times during a recording).
%(preload)s
clean_names : bool, optional
If True main channel names and compensation channel names will
be cleaned from CTF suffixes. The default is False.
%(verbose)s
Returns
-------
raw : instance of RawCTF
The raw data.
See Also
--------
mne.io.Raw : Documentation of attribute and methods.
Notes
-----
.. versionadded:: 0.11
"""
return RawCTF(directory, system_clock, preload=preload,
clean_names=clean_names, verbose=verbose)
@fill_doc
class RawCTF(BaseRaw):
"""Raw object from CTF directory.
Parameters
----------
directory : str
Path to the CTF data (ending in ``'.ds'``).
system_clock : str
How to treat the system clock. Use "truncate" (default) to truncate
the data file when the system clock drops to zero, and use "ignore"
to ignore the system clock (e.g., if head positions are measured
multiple times during a recording).
%(preload)s
clean_names : bool, optional
If True main channel names and compensation channel names will
be cleaned from CTF suffixes. The default is False.
%(verbose)s
See Also
--------
mne.io.Raw : Documentation of attribute and methods.
"""
@verbose
def __init__(self, directory, system_clock='truncate', preload=False,
verbose=None, clean_names=False): # noqa: D102
# adapted from mne_ctf2fiff.c
_validate_type(directory, 'path-like', 'directory')
directory = str(directory)
if not directory.endswith('.ds'):
raise TypeError('directory must be a directory ending with ".ds", '
f'got {directory}')
if not op.isdir(directory):
raise ValueError('directory does not exist: "%s"' % directory)
_check_option('system_clock', system_clock, ['ignore', 'truncate'])
logger.info('ds directory : %s' % directory)
res4 = _read_res4(directory) # Read the magical res4 file
coils = _read_hc(directory) # Read the coil locations
eeg = _read_eeg(directory) # Read the EEG electrode loc info
# Investigate the coil location data to get the coordinate trans
coord_trans = _make_ctf_coord_trans_set(res4, coils)
digs = _read_pos(directory, coord_trans)
# Compose a structure which makes fiff writing a piece of cake
info = _compose_meas_info(res4, coils, coord_trans, eeg)
info['dig'] += digs
info['dig'] = _format_dig_points(info['dig'])
info['bads'] += _read_bad_chans(directory, info)
# Determine how our data is distributed across files
fnames = list()
last_samps = list()
raw_extras = list()
while(True):
suffix = 'meg4' if len(fnames) == 0 else ('%d_meg4' % len(fnames))
meg4_name = _make_ctf_name(directory, suffix, raise_error=False)
if meg4_name is None:
break
# check how much data is in the file
sample_info = _get_sample_info(meg4_name, res4, system_clock)
if sample_info['n_samp'] == 0:
break
if len(fnames) == 0:
buffer_size_sec = sample_info['block_size'] / info['sfreq']
else:
buffer_size_sec = 1.
fnames.append(meg4_name)
last_samps.append(sample_info['n_samp'] - 1)
raw_extras.append(sample_info)
first_samps = [0] * len(last_samps)
super(RawCTF, self).__init__(
info, preload, first_samps=first_samps,
last_samps=last_samps, filenames=fnames,
raw_extras=raw_extras, orig_format='int',
buffer_size_sec=buffer_size_sec, verbose=verbose)
# Add bad segments as Annotations (correct for start time)
start_time = -res4['pre_trig_pts'] / float(info['sfreq'])
annot = _annotate_bad_segments(directory, start_time,
info['meas_date'])
marker_annot = _read_annotations_ctf_call(
directory=directory,
total_offset=(res4['pre_trig_pts'] / res4['sfreq']),
trial_duration=(res4['nsamp'] / res4['sfreq']),
meas_date=info['meas_date']
)
annot = marker_annot if annot is None else annot + marker_annot
self.set_annotations(annot)
if clean_names:
self._clean_names()
def _read_segment_file(self, data, idx, fi, start, stop, cals, mult):
"""Read a chunk of raw data."""
si = self._raw_extras[fi]
offset = 0
trial_start_idx, r_lims, d_lims = _blk_read_lims(start, stop,
int(si['block_size']))
with open(self._filenames[fi], 'rb') as fid:
for bi in range(len(r_lims)):
samp_offset = (bi + trial_start_idx) * si['res4_nsamp']
n_read = min(si['n_samp_tot'] - samp_offset, si['block_size'])
# read the chunk of data
pos = CTF.HEADER_SIZE
pos += samp_offset * si['n_chan'] * 4
fid.seek(pos, 0)
this_data = np.fromfile(fid, '>i4',
count=si['n_chan'] * n_read)
this_data.shape = (si['n_chan'], n_read)
this_data = this_data[:, r_lims[bi, 0]:r_lims[bi, 1]]
data_view = data[:, d_lims[bi, 0]:d_lims[bi, 1]]
_mult_cal_one(data_view, this_data, idx, cals, mult)
offset += n_read
def _clean_names(self):
"""Clean up CTF suffixes from channel names."""
mapping = dict(zip(self.ch_names, _clean_names(self.ch_names)))
self.rename_channels(mapping)
for comp in self.info['comps']:
for key in ('row_names', 'col_names'):
comp['data'][key] = _clean_names(comp['data'][key])
def _get_sample_info(fname, res4, system_clock):
"""Determine the number of valid samples."""
logger.info('Finding samples for %s: ' % (fname,))
if CTF.SYSTEM_CLOCK_CH in res4['ch_names']:
clock_ch = res4['ch_names'].index(CTF.SYSTEM_CLOCK_CH)
else:
clock_ch = None
for k, ch in enumerate(res4['chs']):
if ch['ch_name'] == CTF.SYSTEM_CLOCK_CH:
clock_ch = k
break
with open(fname, 'rb') as fid:
fid.seek(0, os.SEEK_END)
st_size = fid.tell()
fid.seek(0, 0)
if (st_size - CTF.HEADER_SIZE) % (4 * res4['nsamp'] *
res4['nchan']) != 0:
raise RuntimeError('The number of samples is not an even multiple '
'of the trial size')
n_samp_tot = (st_size - CTF.HEADER_SIZE) // (4 * res4['nchan'])
n_trial = n_samp_tot // res4['nsamp']
n_samp = n_samp_tot
if clock_ch is None:
logger.info(' System clock channel is not available, assuming '
'all samples to be valid.')
elif system_clock == 'ignore':
logger.info(' System clock channel is available, but ignored.')
else: # use it
logger.info(' System clock channel is available, checking '
'which samples are valid.')
for t in range(n_trial):
# Skip to the correct trial
samp_offset = t * res4['nsamp']
offset = CTF.HEADER_SIZE + (samp_offset * res4['nchan'] +
(clock_ch * res4['nsamp'])) * 4
fid.seek(offset, 0)
this_data = np.fromfile(fid, '>i4', res4['nsamp'])
if len(this_data) != res4['nsamp']:
raise RuntimeError('Cannot read data for trial %d'
% (t + 1))
end = np.where(this_data == 0)[0]
if len(end) > 0:
n_samp = samp_offset + end[0]
break
if n_samp < res4['nsamp']:
n_trial = 1
logger.info(' %d x %d = %d samples from %d chs'
% (n_trial, n_samp, n_samp, res4['nchan']))
else:
n_trial = n_samp // res4['nsamp']
n_omit = n_samp_tot - n_samp
logger.info(' %d x %d = %d samples from %d chs'
% (n_trial, res4['nsamp'], n_samp, res4['nchan']))
if n_omit != 0:
logger.info(' %d samples omitted at the end' % n_omit)
return dict(n_samp=n_samp, n_samp_tot=n_samp_tot, block_size=res4['nsamp'],
res4_nsamp=res4['nsamp'], n_chan=res4['nchan'])
|
import asyncio
import logging
import homeassistant.util.dt as dt_util
from . import const
_LOGGER = logging.getLogger(__name__)
def check_node_schema(node, schema):
"""Check if node matches the passed node schema."""
if const.DISC_NODE_ID in schema and node.node_id not in schema[const.DISC_NODE_ID]:
_LOGGER.debug(
"node.node_id %s not in node_id %s",
node.node_id,
schema[const.DISC_NODE_ID],
)
return False
if (
const.DISC_GENERIC_DEVICE_CLASS in schema
and node.generic not in schema[const.DISC_GENERIC_DEVICE_CLASS]
):
_LOGGER.debug(
"node.generic %s not in generic_device_class %s",
node.generic,
schema[const.DISC_GENERIC_DEVICE_CLASS],
)
return False
if (
const.DISC_SPECIFIC_DEVICE_CLASS in schema
and node.specific not in schema[const.DISC_SPECIFIC_DEVICE_CLASS]
):
_LOGGER.debug(
"node.specific %s not in specific_device_class %s",
node.specific,
schema[const.DISC_SPECIFIC_DEVICE_CLASS],
)
return False
return True
def check_value_schema(value, schema):
"""Check if the value matches the passed value schema."""
if (
const.DISC_COMMAND_CLASS in schema
and value.command_class not in schema[const.DISC_COMMAND_CLASS]
):
_LOGGER.debug(
"value.command_class %s not in command_class %s",
value.command_class,
schema[const.DISC_COMMAND_CLASS],
)
return False
if const.DISC_TYPE in schema and value.type not in schema[const.DISC_TYPE]:
_LOGGER.debug(
"value.type %s not in type %s", value.type, schema[const.DISC_TYPE]
)
return False
if const.DISC_GENRE in schema and value.genre not in schema[const.DISC_GENRE]:
_LOGGER.debug(
"value.genre %s not in genre %s", value.genre, schema[const.DISC_GENRE]
)
return False
if const.DISC_INDEX in schema and value.index not in schema[const.DISC_INDEX]:
_LOGGER.debug(
"value.index %s not in index %s", value.index, schema[const.DISC_INDEX]
)
return False
if (
const.DISC_INSTANCE in schema
and value.instance not in schema[const.DISC_INSTANCE]
):
_LOGGER.debug(
"value.instance %s not in instance %s",
value.instance,
schema[const.DISC_INSTANCE],
)
return False
if const.DISC_SCHEMAS in schema:
found = False
for schema_item in schema[const.DISC_SCHEMAS]:
found = found or check_value_schema(value, schema_item)
if not found:
return False
return True
def node_name(node):
"""Return the name of the node."""
if is_node_parsed(node):
return node.name or f"{node.manufacturer_name} {node.product_name}"
return f"Unknown Node {node.node_id}"
def node_device_id_and_name(node, instance=1):
"""Return the name and device ID for the value with the given index."""
name = node_name(node)
if instance == 1:
return ((const.DOMAIN, node.node_id), name)
name = f"{name} ({instance})"
return ((const.DOMAIN, node.node_id, instance), name)
async def check_has_unique_id(entity, ready_callback, timeout_callback):
"""Wait for entity to have unique_id."""
start_time = dt_util.utcnow()
while True:
waited = int((dt_util.utcnow() - start_time).total_seconds())
if entity.unique_id:
ready_callback(waited)
return
if waited >= const.NODE_READY_WAIT_SECS:
# Wait up to NODE_READY_WAIT_SECS seconds for unique_id to appear.
timeout_callback(waited)
return
await asyncio.sleep(1)
def is_node_parsed(node):
"""Check whether the node has been parsed or still waiting to be parsed."""
return bool((node.manufacturer_name and node.product_name) or node.name)
|
from typing import Sequence
import mock
from kubernetes.client import V1Deployment
from kubernetes.client import V1StatefulSet
from pytest import raises
from paasta_tools.kubernetes.application.controller_wrappers import Application
from paasta_tools.kubernetes_tools import InvalidKubernetesConfig
from paasta_tools.kubernetes_tools import KubeDeployment
from paasta_tools.setup_kubernetes_job import create_application_object
from paasta_tools.setup_kubernetes_job import main
from paasta_tools.setup_kubernetes_job import parse_args
from paasta_tools.setup_kubernetes_job import setup_kube_deployments
from paasta_tools.utils import NoConfigurationForServiceError
from paasta_tools.utils import NoDeploymentsAvailable
def test_parse_args():
with mock.patch(
"paasta_tools.setup_kubernetes_job.argparse", autospec=True
) as mock_argparse:
assert parse_args() == mock_argparse.ArgumentParser.return_value.parse_args()
def test_main():
with mock.patch(
"paasta_tools.setup_kubernetes_job.parse_args", autospec=True
) as mock_parse_args, mock.patch(
"paasta_tools.setup_kubernetes_job.KubeClient", autospec=True
) as mock_kube_client, mock.patch(
"paasta_tools.setup_kubernetes_job.ensure_namespace", autospec=True
) as mock_ensure_namespace, mock.patch(
"paasta_tools.setup_kubernetes_job.setup_kube_deployments", autospec=True
) as mock_setup_kube_deployments:
mock_setup_kube_deployments.return_value = True
with raises(SystemExit) as e:
main()
assert e.value.code == 0
assert mock_ensure_namespace.called
mock_setup_kube_deployments.assert_called_with(
kube_client=mock_kube_client.return_value,
service_instances=mock_parse_args.return_value.service_instance_list,
cluster=mock_parse_args.return_value.cluster,
soa_dir=mock_parse_args.return_value.soa_dir,
)
mock_setup_kube_deployments.return_value = False
with raises(SystemExit) as e:
main()
assert e.value.code == 1
def test_setup_kube_deployment_invalid_job_name():
with mock.patch(
"paasta_tools.setup_kubernetes_job.create_application_object", autospec=True
) as mock_create_application_object, mock.patch(
"paasta_tools.setup_kubernetes_job.list_all_deployments", autospec=True
) as mock_list_all_deployments, mock.patch(
"paasta_tools.setup_kubernetes_job.log", autospec=True
):
mock_client = mock.Mock()
mock_list_all_deployments.return_value = [
KubeDeployment(
service="kurupt", instance="f_m", git_sha="", config_sha="", replicas=0
)
]
mock_service_instances = ["kuruptf_m"]
setup_kube_deployments(
kube_client=mock_client,
service_instances=mock_service_instances,
cluster="fake_cluster",
soa_dir="/nail/blah",
)
assert mock_create_application_object.call_count == 0
def test_create_application_object():
with mock.patch(
"paasta_tools.setup_kubernetes_job.load_kubernetes_service_config_no_cache",
autospec=True,
) as mock_load_kubernetes_service_config_no_cache, mock.patch(
"paasta_tools.setup_kubernetes_job.load_system_paasta_config", autospec=True
), mock.patch(
"paasta_tools.kubernetes.application.controller_wrappers.Application.load_local_config",
autospec=True,
), mock.patch(
"paasta_tools.kubernetes.application.controller_wrappers.DeploymentWrapper",
autospec=True,
) as mock_deployment_wrapper, mock.patch(
"paasta_tools.kubernetes.application.controller_wrappers.StatefulSetWrapper",
autospec=True,
) as mock_stateful_set_wrapper:
mock_kube_client = mock.Mock()
mock_deploy = mock.MagicMock(spec=V1Deployment)
service_config = mock.MagicMock()
mock_load_kubernetes_service_config_no_cache.return_value = service_config
service_config.format_kubernetes_app.return_value = mock_deploy
# Create DeploymentWrapper
create_application_object(
kube_client=mock_kube_client,
service="kurupt",
instance="fm",
cluster="fake_cluster",
soa_dir="/nail/blah",
)
mock_deployment_wrapper.assert_called_with(mock_deploy)
mock_deploy = mock.MagicMock(spec=V1StatefulSet)
service_config.format_kubernetes_app.return_value = mock_deploy
# Create StatefulSetWrapper
create_application_object(
kube_client=mock_kube_client,
service="kurupt",
instance="fm",
cluster="fake_cluster",
soa_dir="/nail/blah",
)
mock_stateful_set_wrapper.assert_called_with(mock_deploy)
# Create object that is not statefulset/deployment
with raises(Exception):
service_config.format_kubernetes_app.return_value = mock.MagicMock()
create_application_object(
kube_client=mock_kube_client,
service="kurupt",
instance="fm",
cluster="fake_cluster",
soa_dir="/nail/blah",
)
mock_deployment_wrapper.reset_mock()
mock_stateful_set_wrapper.reset_mock()
mock_load_kubernetes_service_config_no_cache.side_effect = (
NoDeploymentsAvailable
)
ret = create_application_object(
kube_client=mock_kube_client,
service="kurupt",
instance="fm",
cluster="fake_cluster",
soa_dir="/nail/blah",
)
assert ret == (True, None)
assert not mock_deployment_wrapper.called
assert not mock_stateful_set_wrapper.called
mock_load_kubernetes_service_config_no_cache.side_effect = (
NoConfigurationForServiceError
)
ret = create_application_object(
kube_client=mock_kube_client,
service="kurupt",
instance="fm",
cluster="fake_cluster",
soa_dir="/nail/blah",
)
assert ret == (False, None)
assert not mock_deployment_wrapper.called
assert not mock_stateful_set_wrapper.called
mock_load_kubernetes_service_config_no_cache.side_effect = None
mock_load_kubernetes_service_config_no_cache.return_value = mock.Mock(
format_kubernetes_app=mock.Mock(
side_effect=InvalidKubernetesConfig(Exception("Oh no!"), "kurupt", "fm")
)
)
ret = create_application_object(
kube_client=mock_kube_client,
service="kurupt",
instance="fm",
cluster="fake_cluster",
soa_dir="/nail/blah",
)
assert ret == (False, None)
assert not mock_deployment_wrapper.called
assert not mock_stateful_set_wrapper.called
def test_setup_kube_deployment_create_update():
fake_create = mock.MagicMock()
fake_update = mock.MagicMock()
fake_update_related_api_objects = mock.MagicMock()
def simple_create_application_object(
kube_client, service, instance, cluster, soa_dir
):
fake_app = mock.MagicMock(spec=Application)
fake_app.kube_deployment = KubeDeployment(
service=service, instance=instance, git_sha="1", config_sha="1", replicas=1
)
fake_app.create = fake_create
fake_app.update = fake_update
fake_app.update_related_api_objects = fake_update_related_api_objects
fake_app.item = None
fake_app.soa_config = None
fake_app.__str__ = lambda app: "fake_app"
return True, fake_app
with mock.patch(
"paasta_tools.setup_kubernetes_job.create_application_object",
autospec=True,
side_effect=simple_create_application_object,
) as mock_create_application_object, mock.patch(
"paasta_tools.setup_kubernetes_job.list_all_deployments", autospec=True
) as mock_list_all_deployments, mock.patch(
"paasta_tools.setup_kubernetes_job.log", autospec=True
) as mock_log_obj:
mock_client = mock.Mock()
# No instances created
mock_service_instances: Sequence[str] = []
setup_kube_deployments(
kube_client=mock_client,
service_instances=mock_service_instances,
cluster="fake_cluster",
soa_dir="/nail/blah",
)
assert mock_create_application_object.call_count == 0
assert fake_update.call_count == 0
assert fake_update_related_api_objects.call_count == 0
assert mock_log_obj.info.call_count == 0
mock_log_obj.info.reset_mock()
# Create a new instance
mock_service_instances = ["kurupt.fm"]
setup_kube_deployments(
kube_client=mock_client,
service_instances=mock_service_instances,
cluster="fake_cluster",
soa_dir="/nail/blah",
)
assert fake_create.call_count == 1
assert fake_update.call_count == 0
assert fake_update_related_api_objects.call_count == 1
mock_log_obj.info.reset_mock()
# Update when gitsha changed
fake_create.reset_mock()
fake_update.reset_mock()
fake_update_related_api_objects.reset_mock()
mock_service_instances = ["kurupt.fm"]
mock_list_all_deployments.return_value = [
KubeDeployment(
service="kurupt", instance="fm", git_sha="2", config_sha="1", replicas=1
)
]
setup_kube_deployments(
kube_client=mock_client,
service_instances=mock_service_instances,
cluster="fake_cluster",
soa_dir="/nail/blah",
)
assert fake_update.call_count == 1
assert fake_create.call_count == 0
assert fake_update_related_api_objects.call_count == 1
mock_log_obj.info.reset_mock()
# Update when configsha changed
fake_create.reset_mock()
fake_update.reset_mock()
fake_update_related_api_objects.reset_mock()
mock_service_instances = ["kurupt.fm"]
mock_list_all_deployments.return_value = [
KubeDeployment(
service="kurupt", instance="fm", git_sha="1", config_sha="2", replicas=1
)
]
setup_kube_deployments(
kube_client=mock_client,
service_instances=mock_service_instances,
cluster="fake_cluster",
soa_dir="/nail/blah",
)
assert fake_update.call_count == 1
assert fake_create.call_count == 0
assert fake_update_related_api_objects.call_count == 1
mock_log_obj.info.reset_mock()
# Update when replica changed
fake_create.reset_mock()
fake_update.reset_mock()
fake_update_related_api_objects.reset_mock()
mock_service_instances = ["kurupt.fm"]
mock_list_all_deployments.return_value = [
KubeDeployment(
service="kurupt", instance="fm", git_sha="1", config_sha="1", replicas=2
)
]
setup_kube_deployments(
kube_client=mock_client,
service_instances=mock_service_instances,
cluster="fake_cluster",
soa_dir="/nail/blah",
)
assert fake_update.call_count == 1
assert fake_create.call_count == 0
assert fake_update_related_api_objects.call_count == 1
mock_log_obj.info.reset_mock()
# Update one and Create One
fake_create.reset_mock()
fake_update.reset_mock()
fake_update_related_api_objects.reset_mock()
mock_service_instances = ["kurupt.fm", "kurupt.garage"]
mock_list_all_deployments.return_value = [
KubeDeployment(
service="kurupt",
instance="garage",
git_sha="2",
config_sha="2",
replicas=1,
)
]
setup_kube_deployments(
kube_client=mock_client,
service_instances=mock_service_instances,
cluster="fake_cluster",
soa_dir="/nail/blah",
)
assert fake_update.call_count == 1
assert fake_create.call_count == 1
assert fake_update_related_api_objects.call_count == 2
mock_log_obj.info.reset_mock()
# Always attempt to update related API objects
fake_create.reset_mock()
fake_update.reset_mock()
fake_update_related_api_objects.reset_mock()
mock_service_instances = ["kurupt.garage"]
mock_list_all_deployments.return_value = [
KubeDeployment(
service="kurupt",
instance="garage",
git_sha="1",
config_sha="1",
replicas=1,
)
]
setup_kube_deployments(
kube_client=mock_client,
service_instances=mock_service_instances,
cluster="fake_cluster",
soa_dir="/nail/blah",
)
assert fake_update.call_count == 0
assert fake_create.call_count == 0
assert fake_update_related_api_objects.call_count == 1
assert mock_log_obj.info.call_args_list[0] == mock.call(
"fake_app is up-to-date!"
)
|
from perfkitbenchmarker import linux_packages
GIT_REPO = 'https://github.com/stephentu/silo.git'
GIT_TAG = '62d2d498984bf69d3b46a74e310e1fd12fd1f692'
SILO_DIR = '%s/silo' % linux_packages.INSTALL_DIR
APT_PACKAGES = ('libjemalloc-dev libnuma-dev libdb++-dev '
'libmysqld-dev libaio-dev libssl-dev')
YUM_PACKAGES = ('jemalloc-devel numactl-devel libdb-cxx-devel mysql-devel '
'libaio-devel openssl-devel')
def _Install(vm):
"""Installs the Silo package on the VM."""
nthreads = vm.NumCpusForBenchmark() * 2
vm.Install('build_tools')
vm.RemoteCommand('git clone {0} {1}'.format(GIT_REPO, SILO_DIR))
vm.RemoteCommand('cd {0} && git checkout {1}'.format(SILO_DIR,
GIT_TAG))
# This is due to a failing clone command when executing behind a proxy.
# Replacing the protocol to https instead of git fixes the issue.
vm.RemoteCommand('git config --global url."https://".insteadOf git://')
# Disable -Wmaybe-uninitialized errors when GCC has the option to workaround
# a spurious error in masstree.
cxx = '"g++ -std=gnu++0x \
$(echo | gcc -Wmaybe-uninitialized -E - >/dev/null 2>&1 && \
echo -Wno-error=maybe-uninitialized)"'
vm.RemoteCommand(
'cd {0} && CXX={2} MODE=perf DEBUG=0 CHECK_INVARIANTS=0 make -j{1} dbtest'
.format(SILO_DIR, nthreads, cxx))
def YumInstall(vm):
"""Installs the Silo package on the VM."""
vm.InstallEpelRepo()
vm.InstallPackages(YUM_PACKAGES)
_Install(vm)
def AptInstall(vm):
"""Installs the Silo package on the VM."""
vm.InstallPackages(APT_PACKAGES)
_Install(vm)
|
import logging
import boto3
import voluptuous as vol
from homeassistant.components.tts import PLATFORM_SCHEMA, Provider
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_REGION = "region_name"
CONF_ACCESS_KEY_ID = "aws_access_key_id"
CONF_SECRET_ACCESS_KEY = "aws_secret_access_key"
CONF_PROFILE_NAME = "profile_name"
ATTR_CREDENTIALS = "credentials"
DEFAULT_REGION = "us-east-1"
SUPPORTED_REGIONS = [
"us-east-1",
"us-east-2",
"us-west-1",
"us-west-2",
"ca-central-1",
"eu-west-1",
"eu-central-1",
"eu-west-2",
"eu-west-3",
"ap-southeast-1",
"ap-southeast-2",
"ap-northeast-2",
"ap-northeast-1",
"ap-south-1",
"sa-east-1",
]
CONF_ENGINE = "engine"
CONF_VOICE = "voice"
CONF_OUTPUT_FORMAT = "output_format"
CONF_SAMPLE_RATE = "sample_rate"
CONF_TEXT_TYPE = "text_type"
SUPPORTED_VOICES = [
"Zhiyu", # Chinese
"Mads",
"Naja", # Danish
"Ruben",
"Lotte", # Dutch
"Russell",
"Nicole", # English Australian
"Brian",
"Amy",
"Emma", # English
"Aditi",
"Raveena", # English, Indian
"Joey",
"Justin",
"Matthew",
"Ivy",
"Joanna",
"Kendra",
"Kimberly",
"Salli", # English
"Geraint", # English Welsh
"Mathieu",
"Celine",
"Lea", # French
"Chantal", # French Canadian
"Hans",
"Marlene",
"Vicki", # German
"Aditi", # Hindi
"Karl",
"Dora", # Icelandic
"Giorgio",
"Carla",
"Bianca", # Italian
"Takumi",
"Mizuki", # Japanese
"Seoyeon", # Korean
"Liv", # Norwegian
"Jacek",
"Jan",
"Ewa",
"Maja", # Polish
"Ricardo",
"Vitoria", # Portuguese, Brazilian
"Cristiano",
"Ines", # Portuguese, European
"Carmen", # Romanian
"Maxim",
"Tatyana", # Russian
"Enrique",
"Conchita",
"Lucia", # Spanish European
"Mia", # Spanish Mexican
"Miguel",
"Penelope", # Spanish US
"Astrid", # Swedish
"Filiz", # Turkish
"Gwyneth", # Welsh
]
SUPPORTED_OUTPUT_FORMATS = ["mp3", "ogg_vorbis", "pcm"]
SUPPORTED_ENGINES = ["neural", "standard"]
SUPPORTED_SAMPLE_RATES = ["8000", "16000", "22050", "24000"]
SUPPORTED_SAMPLE_RATES_MAP = {
"mp3": ["8000", "16000", "22050", "24000"],
"ogg_vorbis": ["8000", "16000", "22050"],
"pcm": ["8000", "16000"],
}
SUPPORTED_TEXT_TYPES = ["text", "ssml"]
CONTENT_TYPE_EXTENSIONS = {"audio/mpeg": "mp3", "audio/ogg": "ogg", "audio/pcm": "pcm"}
DEFAULT_ENGINE = "standard"
DEFAULT_VOICE = "Joanna"
DEFAULT_OUTPUT_FORMAT = "mp3"
DEFAULT_TEXT_TYPE = "text"
DEFAULT_SAMPLE_RATES = {"mp3": "22050", "ogg_vorbis": "22050", "pcm": "16000"}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_REGION, default=DEFAULT_REGION): vol.In(SUPPORTED_REGIONS),
vol.Inclusive(CONF_ACCESS_KEY_ID, ATTR_CREDENTIALS): cv.string,
vol.Inclusive(CONF_SECRET_ACCESS_KEY, ATTR_CREDENTIALS): cv.string,
vol.Exclusive(CONF_PROFILE_NAME, ATTR_CREDENTIALS): cv.string,
vol.Optional(CONF_VOICE, default=DEFAULT_VOICE): vol.In(SUPPORTED_VOICES),
vol.Optional(CONF_ENGINE, default=DEFAULT_ENGINE): vol.In(SUPPORTED_ENGINES),
vol.Optional(CONF_OUTPUT_FORMAT, default=DEFAULT_OUTPUT_FORMAT): vol.In(
SUPPORTED_OUTPUT_FORMATS
),
vol.Optional(CONF_SAMPLE_RATE): vol.All(
cv.string, vol.In(SUPPORTED_SAMPLE_RATES)
),
vol.Optional(CONF_TEXT_TYPE, default=DEFAULT_TEXT_TYPE): vol.In(
SUPPORTED_TEXT_TYPES
),
}
)
def get_engine(hass, config, discovery_info=None):
"""Set up Amazon Polly speech component."""
output_format = config[CONF_OUTPUT_FORMAT]
sample_rate = config.get(CONF_SAMPLE_RATE, DEFAULT_SAMPLE_RATES[output_format])
if sample_rate not in SUPPORTED_SAMPLE_RATES_MAP.get(output_format):
_LOGGER.error(
"%s is not a valid sample rate for %s", sample_rate, output_format
)
return None
config[CONF_SAMPLE_RATE] = sample_rate
profile = config.get(CONF_PROFILE_NAME)
if profile is not None:
boto3.setup_default_session(profile_name=profile)
aws_config = {
CONF_REGION: config[CONF_REGION],
CONF_ACCESS_KEY_ID: config.get(CONF_ACCESS_KEY_ID),
CONF_SECRET_ACCESS_KEY: config.get(CONF_SECRET_ACCESS_KEY),
}
del config[CONF_REGION]
del config[CONF_ACCESS_KEY_ID]
del config[CONF_SECRET_ACCESS_KEY]
polly_client = boto3.client("polly", **aws_config)
supported_languages = []
all_voices = {}
all_voices_req = polly_client.describe_voices()
for voice in all_voices_req.get("Voices"):
all_voices[voice.get("Id")] = voice
if voice.get("LanguageCode") not in supported_languages:
supported_languages.append(voice.get("LanguageCode"))
return AmazonPollyProvider(polly_client, config, supported_languages, all_voices)
class AmazonPollyProvider(Provider):
"""Amazon Polly speech api provider."""
def __init__(self, polly_client, config, supported_languages, all_voices):
"""Initialize Amazon Polly provider for TTS."""
self.client = polly_client
self.config = config
self.supported_langs = supported_languages
self.all_voices = all_voices
self.default_voice = self.config[CONF_VOICE]
self.name = "Amazon Polly"
@property
def supported_languages(self):
"""Return a list of supported languages."""
return self.supported_langs
@property
def default_language(self):
"""Return the default language."""
return self.all_voices.get(self.default_voice).get("LanguageCode")
@property
def default_options(self):
"""Return dict include default options."""
return {CONF_VOICE: self.default_voice}
@property
def supported_options(self):
"""Return a list of supported options."""
return [CONF_VOICE]
def get_tts_audio(self, message, language=None, options=None):
"""Request TTS file from Polly."""
voice_id = options.get(CONF_VOICE, self.default_voice)
voice_in_dict = self.all_voices.get(voice_id)
if language != voice_in_dict.get("LanguageCode"):
_LOGGER.error("%s does not support the %s language", voice_id, language)
return None, None
resp = self.client.synthesize_speech(
Engine=self.config[CONF_ENGINE],
OutputFormat=self.config[CONF_OUTPUT_FORMAT],
SampleRate=self.config[CONF_SAMPLE_RATE],
Text=message,
TextType=self.config[CONF_TEXT_TYPE],
VoiceId=voice_id,
)
return (
CONTENT_TYPE_EXTENSIONS[resp.get("ContentType")],
resp.get("AudioStream").read(),
)
|
from datetime import timedelta
import pytest
from homeassistant.components.binary_sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.components.modbus.const import (
CALL_TYPE_COIL,
CALL_TYPE_DISCRETE,
CONF_ADDRESS,
CONF_INPUT_TYPE,
CONF_INPUTS,
)
from homeassistant.const import CONF_NAME, STATE_OFF, STATE_ON
from .conftest import run_base_read_test, setup_base_test
@pytest.mark.parametrize(
"cfg,regs,expected",
[
(
{
CONF_INPUT_TYPE: CALL_TYPE_COIL,
},
[0xFF],
STATE_ON,
),
(
{
CONF_INPUT_TYPE: CALL_TYPE_COIL,
},
[0x01],
STATE_ON,
),
(
{
CONF_INPUT_TYPE: CALL_TYPE_COIL,
},
[0x00],
STATE_OFF,
),
(
{
CONF_INPUT_TYPE: CALL_TYPE_COIL,
},
[0x80],
STATE_OFF,
),
(
{
CONF_INPUT_TYPE: CALL_TYPE_COIL,
},
[0xFE],
STATE_OFF,
),
(
{
CONF_INPUT_TYPE: CALL_TYPE_DISCRETE,
},
[0xFF],
STATE_ON,
),
(
{
CONF_INPUT_TYPE: CALL_TYPE_DISCRETE,
},
[0x00],
STATE_OFF,
),
],
)
async def test_coil_true(hass, mock_hub, cfg, regs, expected):
"""Run test for given config."""
sensor_name = "modbus_test_binary_sensor"
scan_interval = 5
entity_id, now, device = await setup_base_test(
sensor_name,
hass,
mock_hub,
{CONF_INPUTS: [dict(**{CONF_NAME: sensor_name, CONF_ADDRESS: 1234}, **cfg)]},
SENSOR_DOMAIN,
scan_interval,
)
await run_base_read_test(
entity_id,
hass,
mock_hub,
cfg.get(CONF_INPUT_TYPE),
regs,
expected,
now + timedelta(seconds=scan_interval + 1),
)
|
from django.contrib import admin
from import_export.admin import ExportActionModelAdmin, ImportExportMixin, ImportMixin
from import_export.resources import ModelResource
from .forms import CustomConfirmImportForm, CustomImportForm
from .models import Author, Book, Category, Child, EBook
class ChildAdmin(ImportMixin, admin.ModelAdmin):
pass
class BookResource(ModelResource):
class Meta:
model = Book
def for_delete(self, row, instance):
return self.fields['name'].clean(row) == ''
class BookAdmin(ImportExportMixin, admin.ModelAdmin):
list_display = ('name', 'author', 'added')
list_filter = ['categories', 'author']
resource_class = BookResource
class CategoryAdmin(ExportActionModelAdmin):
pass
class AuthorAdmin(ImportMixin, admin.ModelAdmin):
pass
class CustomBookAdmin(BookAdmin):
"""BookAdmin with custom import forms"""
def get_import_form(self):
return CustomImportForm
def get_confirm_import_form(self):
return CustomConfirmImportForm
def get_form_kwargs(self, form, *args, **kwargs):
# update kwargs with authors (from CustomImportForm.cleaned_data)
if isinstance(form, CustomImportForm):
if form.is_valid():
author = form.cleaned_data['author']
kwargs.update({'author': author.id})
return kwargs
admin.site.register(Book, BookAdmin)
admin.site.register(Category, CategoryAdmin)
admin.site.register(Author, AuthorAdmin)
admin.site.register(Child, ChildAdmin)
admin.site.register(EBook, CustomBookAdmin)
|
import os.path as op
import numpy as np
from numpy.testing import (assert_array_almost_equal, assert_allclose,
assert_array_less)
import pytest
import mne
from mne.datasets import testing
from mne.label import read_label
from mne import (read_cov, read_forward_solution, read_evokeds,
convert_forward_solution)
from mne.inverse_sparse import mixed_norm, tf_mixed_norm
from mne.inverse_sparse.mxne_inverse import make_stc_from_dipoles, _split_gof
from mne.minimum_norm import apply_inverse, make_inverse_operator
from mne.minimum_norm.tests.test_inverse import \
assert_var_exp_log, assert_stc_res
from mne.utils import assert_stcs_equal, run_tests_if_main, catch_logging
from mne.dipole import Dipole
from mne.source_estimate import VolSourceEstimate
data_path = testing.data_path(download=False)
# NOTE: These use the ave and cov from sample dataset (no _trunc)
fname_data = op.join(data_path, 'MEG', 'sample', 'sample_audvis-ave.fif')
fname_cov = op.join(data_path, 'MEG', 'sample', 'sample_audvis-cov.fif')
fname_fwd = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc-meg-eeg-oct-6-fwd.fif')
label = 'Aud-rh'
fname_label = op.join(data_path, 'MEG', 'sample', 'labels', '%s.label' % label)
@pytest.fixture(scope='module', params=[testing._pytest_param])
def forward():
"""Get a forward solution."""
# module scope it for speed (but don't overwrite in use!)
return read_forward_solution(fname_fwd)
@testing.requires_testing_data
@pytest.mark.timeout(150) # ~30 sec on Travis Linux
@pytest.mark.slowtest
def test_mxne_inverse_standard(forward):
"""Test (TF-)MxNE inverse computation."""
# Read noise covariance matrix
cov = read_cov(fname_cov)
# Handling average file
loose = 0.0
depth = 0.9
evoked = read_evokeds(fname_data, condition=0, baseline=(None, 0))
evoked.crop(tmin=-0.05, tmax=0.2)
evoked_l21 = evoked.copy()
evoked_l21.crop(tmin=0.081, tmax=0.1)
label = read_label(fname_label)
assert label.hemi == 'rh'
forward = convert_forward_solution(forward, surf_ori=True)
# Reduce source space to make test computation faster
inverse_operator = make_inverse_operator(evoked_l21.info, forward, cov,
loose=loose, depth=depth,
fixed=True, use_cps=True)
stc_dspm = apply_inverse(evoked_l21, inverse_operator, lambda2=1. / 9.,
method='dSPM')
stc_dspm.data[np.abs(stc_dspm.data) < 12] = 0.0
stc_dspm.data[np.abs(stc_dspm.data) >= 12] = 1.
weights_min = 0.5
# MxNE tests
alpha = 70 # spatial regularization parameter
stc_prox = mixed_norm(evoked_l21, forward, cov, alpha, loose=loose,
depth=depth, maxit=300, tol=1e-8,
active_set_size=10, weights=stc_dspm,
weights_min=weights_min, solver='prox')
with pytest.warns(None): # CD
stc_cd = mixed_norm(evoked_l21, forward, cov, alpha, loose=loose,
depth=depth, maxit=300, tol=1e-8,
active_set_size=10, weights=stc_dspm,
weights_min=weights_min, solver='cd')
stc_bcd = mixed_norm(evoked_l21, forward, cov, alpha, loose=loose,
depth=depth, maxit=300, tol=1e-8, active_set_size=10,
weights=stc_dspm, weights_min=weights_min,
solver='bcd')
assert_array_almost_equal(stc_prox.times, evoked_l21.times, 5)
assert_array_almost_equal(stc_cd.times, evoked_l21.times, 5)
assert_array_almost_equal(stc_bcd.times, evoked_l21.times, 5)
assert_allclose(stc_prox.data, stc_cd.data, rtol=1e-3, atol=0.0)
assert_allclose(stc_prox.data, stc_bcd.data, rtol=1e-3, atol=0.0)
assert_allclose(stc_cd.data, stc_bcd.data, rtol=1e-3, atol=0.0)
assert stc_prox.vertices[1][0] in label.vertices
assert stc_cd.vertices[1][0] in label.vertices
assert stc_bcd.vertices[1][0] in label.vertices
# vector
with pytest.warns(None): # no convergence
stc = mixed_norm(evoked_l21, forward, cov, alpha, loose=1, maxit=2)
with pytest.warns(None): # no convergence
stc_vec = mixed_norm(evoked_l21, forward, cov, alpha, loose=1, maxit=2,
pick_ori='vector')
assert_stcs_equal(stc_vec.magnitude(), stc)
with pytest.warns(None), pytest.raises(ValueError, match='pick_ori='):
mixed_norm(evoked_l21, forward, cov, alpha, loose=0, maxit=2,
pick_ori='vector')
with pytest.warns(None), catch_logging() as log: # CD
dips = mixed_norm(evoked_l21, forward, cov, alpha, loose=loose,
depth=depth, maxit=300, tol=1e-8, active_set_size=10,
weights=stc_dspm, weights_min=weights_min,
solver='cd', return_as_dipoles=True, verbose=True)
stc_dip = make_stc_from_dipoles(dips, forward['src'])
assert isinstance(dips[0], Dipole)
assert stc_dip.subject == "sample"
assert_stcs_equal(stc_cd, stc_dip)
assert_var_exp_log(log.getvalue(), 51, 53) # 51.8
# Single time point things should match
with pytest.warns(None), catch_logging() as log:
dips = mixed_norm(evoked_l21.copy().crop(0.081, 0.081),
forward, cov, alpha, loose=loose,
depth=depth, maxit=300, tol=1e-8, active_set_size=10,
weights=stc_dspm, weights_min=weights_min,
solver='cd', return_as_dipoles=True, verbose=True)
assert_var_exp_log(log.getvalue(), 37.8, 38.0) # 37.9
gof = sum(dip.gof[0] for dip in dips) # these are now partial exp vars
assert_allclose(gof, 37.9, atol=0.1)
with pytest.warns(None), catch_logging() as log:
stc, res = mixed_norm(evoked_l21, forward, cov, alpha, loose=loose,
depth=depth, maxit=300, tol=1e-8,
weights=stc_dspm, # gh-6382
active_set_size=10, return_residual=True,
solver='cd', verbose=True)
assert_array_almost_equal(stc.times, evoked_l21.times, 5)
assert stc.vertices[1][0] in label.vertices
assert_var_exp_log(log.getvalue(), 51, 53) # 51.8
assert stc.data.min() < -1e-9 # signed
assert_stc_res(evoked_l21, stc, forward, res)
# irMxNE tests
with pytest.warns(None), catch_logging() as log: # CD
stc, residual = mixed_norm(
evoked_l21, forward, cov, alpha, n_mxne_iter=5, loose=0.0001,
depth=depth, maxit=300, tol=1e-8, active_set_size=10,
solver='cd', return_residual=True, pick_ori='vector', verbose=True)
assert_array_almost_equal(stc.times, evoked_l21.times, 5)
assert stc.vertices[1][0] in label.vertices
assert stc.vertices == [[63152], [79017]]
assert_var_exp_log(log.getvalue(), 51, 53) # 51.8
assert_stc_res(evoked_l21, stc, forward, residual)
# Do with TF-MxNE for test memory savings
alpha = 60. # overall regularization parameter
l1_ratio = 0.01 # temporal regularization proportion
stc, _ = tf_mixed_norm(evoked, forward, cov,
loose=loose, depth=depth, maxit=100, tol=1e-4,
tstep=4, wsize=16, window=0.1, weights=stc_dspm,
weights_min=weights_min, return_residual=True,
alpha=alpha, l1_ratio=l1_ratio)
assert_array_almost_equal(stc.times, evoked.times, 5)
assert stc.vertices[1][0] in label.vertices
# vector
stc_nrm = tf_mixed_norm(
evoked, forward, cov, loose=1, depth=depth, maxit=2, tol=1e-4,
tstep=4, wsize=16, window=0.1, weights=stc_dspm,
weights_min=weights_min, alpha=alpha, l1_ratio=l1_ratio)
stc_vec, residual = tf_mixed_norm(
evoked, forward, cov, loose=1, depth=depth, maxit=2, tol=1e-4,
tstep=4, wsize=16, window=0.1, weights=stc_dspm,
weights_min=weights_min, alpha=alpha, l1_ratio=l1_ratio,
pick_ori='vector', return_residual=True)
assert_stcs_equal(stc_vec.magnitude(), stc_nrm)
pytest.raises(ValueError, tf_mixed_norm, evoked, forward, cov,
alpha=101, l1_ratio=0.03)
pytest.raises(ValueError, tf_mixed_norm, evoked, forward, cov,
alpha=50., l1_ratio=1.01)
@pytest.mark.slowtest
@testing.requires_testing_data
def test_mxne_vol_sphere():
"""Test (TF-)MxNE with a sphere forward and volumic source space."""
evoked = read_evokeds(fname_data, condition=0, baseline=(None, 0))
evoked.crop(tmin=-0.05, tmax=0.2)
cov = read_cov(fname_cov)
evoked_l21 = evoked.copy()
evoked_l21.crop(tmin=0.081, tmax=0.1)
info = evoked.info
sphere = mne.make_sphere_model(r0=(0., 0., 0.), head_radius=0.080)
src = mne.setup_volume_source_space(subject=None, pos=15., mri=None,
sphere=(0.0, 0.0, 0.0, 0.08),
bem=None, mindist=5.0,
exclude=2.0, sphere_units='m')
fwd = mne.make_forward_solution(info, trans=None, src=src,
bem=sphere, eeg=False, meg=True)
alpha = 80.
pytest.raises(ValueError, mixed_norm, evoked, fwd, cov, alpha,
loose=0.0, return_residual=False,
maxit=3, tol=1e-8, active_set_size=10)
pytest.raises(ValueError, mixed_norm, evoked, fwd, cov, alpha,
loose=0.2, return_residual=False,
maxit=3, tol=1e-8, active_set_size=10)
# irMxNE tests
with catch_logging() as log:
stc = mixed_norm(evoked_l21, fwd, cov, alpha,
n_mxne_iter=1, maxit=30, tol=1e-8,
active_set_size=10, verbose=True)
assert isinstance(stc, VolSourceEstimate)
assert_array_almost_equal(stc.times, evoked_l21.times, 5)
assert_var_exp_log(log.getvalue(), 9, 11) # 10.2
# Compare orientation obtained using fit_dipole and gamma_map
# for a simulated evoked containing a single dipole
stc = mne.VolSourceEstimate(50e-9 * np.random.RandomState(42).randn(1, 4),
vertices=[stc.vertices[0][:1]],
tmin=stc.tmin,
tstep=stc.tstep)
evoked_dip = mne.simulation.simulate_evoked(fwd, stc, info, cov, nave=1e9,
use_cps=True)
dip_mxne = mixed_norm(evoked_dip, fwd, cov, alpha=80,
n_mxne_iter=1, maxit=30, tol=1e-8,
active_set_size=10, return_as_dipoles=True)
amp_max = [np.max(d.amplitude) for d in dip_mxne]
dip_mxne = dip_mxne[np.argmax(amp_max)]
assert dip_mxne.pos[0] in src[0]['rr'][stc.vertices[0]]
dip_fit = mne.fit_dipole(evoked_dip, cov, sphere)[0]
assert np.abs(np.dot(dip_fit.ori[0], dip_mxne.ori[0])) > 0.99
dist = 1000 * np.linalg.norm(dip_fit.pos[0] - dip_mxne.pos[0])
assert dist < 4. # within 4 mm
# Do with TF-MxNE for test memory savings
alpha = 60. # overall regularization parameter
l1_ratio = 0.01 # temporal regularization proportion
stc, _ = tf_mixed_norm(evoked, fwd, cov, maxit=3, tol=1e-4,
tstep=16, wsize=32, window=0.1, alpha=alpha,
l1_ratio=l1_ratio, return_residual=True)
assert isinstance(stc, VolSourceEstimate)
assert_array_almost_equal(stc.times, evoked.times, 5)
@pytest.mark.parametrize('mod', (
None, 'mult', 'augment', 'sign', 'zero', 'less'))
def test_split_gof_basic(mod):
"""Test splitting the goodness of fit."""
# first a trivial case
gain = np.array([[0., 1., 1.], [1., 1., 0.]]).T
M = np.ones((3, 1))
X = np.ones((2, 1))
M_est = gain @ X
assert_allclose(M_est, np.array([[1., 2., 1.]]).T) # a reasonable estimate
if mod == 'mult':
gain *= [1., -0.5]
X[1] *= -2
elif mod == 'augment':
gain = np.concatenate((gain, np.zeros((3, 1))), axis=1)
X = np.concatenate((X, [[1.]]))
elif mod == 'sign':
gain[1] *= -1
M[1] *= -1
M_est[1] *= -1
elif mod in ('zero', 'less'):
gain = np.array([[1, 1., 1.], [1., 1., 1.]]).T
if mod == 'zero':
X[:, 0] = [1., 0.]
else:
X[:, 0] = [1., 0.5]
M_est = gain @ X
else:
assert mod is None
res = M - M_est
gof = 100 * (1. - (res * res).sum() / (M * M).sum())
gof_split = _split_gof(M, X, gain)
assert_allclose(gof_split.sum(), gof)
want = gof_split[[0, 0]]
if mod == 'augment':
want = np.concatenate((want, [[0]]))
if mod in ('mult', 'less'):
assert_array_less(gof_split[1], gof_split[0])
elif mod == 'zero':
assert_allclose(gof_split[0], gof_split.sum(0))
assert_allclose(gof_split[1], 0., atol=1e-6)
else:
assert_allclose(gof_split, want, atol=1e-12)
@testing.requires_testing_data
@pytest.mark.parametrize('idx, weights', [
# empirically determined approximately orthogonal columns: 0, 15157, 19448
([0], [1]),
([0, 15157], [1, 1]),
([0, 15157], [1, 3]),
([0, 15157], [5, -1]),
([0, 15157, 19448], [1, 1, 1]),
([0, 15157, 19448], [1e-2, 1, 5]),
])
def test_split_gof_meg(forward, idx, weights):
"""Test GOF splitting on MEG data."""
gain = forward['sol']['data'][:, idx]
# close to orthogonal
norms = np.linalg.norm(gain, axis=0)
triu = np.triu_indices(len(idx), 1)
prods = np.abs(np.dot(gain.T, gain) / np.outer(norms, norms))[triu]
assert_array_less(prods, 5e-3) # approximately orthogonal
# first, split across time (one dipole per time point)
M = gain * weights
gof_split = _split_gof(M, np.diag(weights), gain)
assert_allclose(gof_split.sum(0), 100., atol=1e-5) # all sum to 100
assert_allclose(gof_split, 100 * np.eye(len(weights)), atol=1) # loc
# next, summed to a single time point (all dipoles active at one time pt)
weights = np.array(weights)[:, np.newaxis]
x = gain @ weights
assert x.shape == (gain.shape[0], 1)
gof_split = _split_gof(x, weights, gain)
want = (norms * weights.T).T ** 2
want = 100 * want / want.sum()
assert_allclose(gof_split, want, atol=1e-3, rtol=1e-2)
assert_allclose(gof_split.sum(), 100, rtol=1e-5)
run_tests_if_main()
|
import functools
import logging
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_GAS,
DEVICE_CLASS_MOISTURE,
DEVICE_CLASS_MOTION,
DEVICE_CLASS_MOVING,
DEVICE_CLASS_OCCUPANCY,
DEVICE_CLASS_OPENING,
DEVICE_CLASS_SMOKE,
DEVICE_CLASS_VIBRATION,
DOMAIN,
BinarySensorEntity,
)
from homeassistant.const import STATE_ON
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .core import discovery
from .core.const import (
CHANNEL_ACCELEROMETER,
CHANNEL_OCCUPANCY,
CHANNEL_ON_OFF,
CHANNEL_ZONE,
DATA_ZHA,
DATA_ZHA_DISPATCHERS,
SIGNAL_ADD_ENTITIES,
SIGNAL_ATTR_UPDATED,
)
from .core.registries import ZHA_ENTITIES
from .entity import ZhaEntity
_LOGGER = logging.getLogger(__name__)
# Zigbee Cluster Library Zone Type to Home Assistant device class
CLASS_MAPPING = {
0x000D: DEVICE_CLASS_MOTION,
0x0015: DEVICE_CLASS_OPENING,
0x0028: DEVICE_CLASS_SMOKE,
0x002A: DEVICE_CLASS_MOISTURE,
0x002B: DEVICE_CLASS_GAS,
0x002D: DEVICE_CLASS_VIBRATION,
}
STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, DOMAIN)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Zigbee Home Automation binary sensor from config entry."""
entities_to_create = hass.data[DATA_ZHA][DOMAIN]
unsub = async_dispatcher_connect(
hass,
SIGNAL_ADD_ENTITIES,
functools.partial(
discovery.async_add_entities, async_add_entities, entities_to_create
),
)
hass.data[DATA_ZHA][DATA_ZHA_DISPATCHERS].append(unsub)
class BinarySensor(ZhaEntity, BinarySensorEntity):
"""ZHA BinarySensor."""
SENSOR_ATTR = None
DEVICE_CLASS = None
def __init__(self, unique_id, zha_device, channels, **kwargs):
"""Initialize the ZHA binary sensor."""
super().__init__(unique_id, zha_device, channels, **kwargs)
self._channel = channels[0]
self._device_class = self.DEVICE_CLASS
async def get_device_class(self):
"""Get the HA device class from the channel."""
async def async_added_to_hass(self):
"""Run when about to be added to hass."""
await super().async_added_to_hass()
await self.get_device_class()
self.async_accept_signal(
self._channel, SIGNAL_ATTR_UPDATED, self.async_set_state
)
@callback
def async_restore_last_state(self, last_state):
"""Restore previous state."""
super().async_restore_last_state(last_state)
self._state = last_state.state == STATE_ON
@property
def is_on(self) -> bool:
"""Return True if the switch is on based on the state machine."""
if self._state is None:
return False
return self._state
@property
def device_class(self) -> str:
"""Return device class from component DEVICE_CLASSES."""
return self._device_class
@callback
def async_set_state(self, attr_id, attr_name, value):
"""Set the state."""
if self.SENSOR_ATTR is None or self.SENSOR_ATTR != attr_name:
return
self._state = bool(value)
self.async_write_ha_state()
async def async_update(self):
"""Attempt to retrieve on off state from the binary sensor."""
await super().async_update()
attribute = getattr(self._channel, "value_attribute", "on_off")
attr_value = await self._channel.get_attribute_value(attribute)
if attr_value is not None:
self._state = attr_value
@STRICT_MATCH(channel_names=CHANNEL_ACCELEROMETER)
class Accelerometer(BinarySensor):
"""ZHA BinarySensor."""
SENSOR_ATTR = "acceleration"
DEVICE_CLASS = DEVICE_CLASS_MOVING
@STRICT_MATCH(channel_names=CHANNEL_OCCUPANCY)
class Occupancy(BinarySensor):
"""ZHA BinarySensor."""
SENSOR_ATTR = "occupancy"
DEVICE_CLASS = DEVICE_CLASS_OCCUPANCY
@STRICT_MATCH(channel_names=CHANNEL_ON_OFF)
class Opening(BinarySensor):
"""ZHA BinarySensor."""
SENSOR_ATTR = "on_off"
DEVICE_CLASS = DEVICE_CLASS_OPENING
@STRICT_MATCH(
channel_names=CHANNEL_ON_OFF,
manufacturers="IKEA of Sweden",
models=lambda model: isinstance(model, str)
and model is not None
and model.find("motion") != -1,
)
@STRICT_MATCH(
channel_names=CHANNEL_ON_OFF,
manufacturers="Philips",
models={"SML001", "SML002"},
)
class Motion(BinarySensor):
"""ZHA BinarySensor."""
SENSOR_ATTR = "on_off"
DEVICE_CLASS = DEVICE_CLASS_MOTION
@STRICT_MATCH(channel_names=CHANNEL_ZONE)
class IASZone(BinarySensor):
"""ZHA IAS BinarySensor."""
SENSOR_ATTR = "zone_status"
async def get_device_class(self) -> None:
"""Get the HA device class from the channel."""
zone_type = await self._channel.get_attribute_value("zone_type")
self._device_class = CLASS_MAPPING.get(zone_type)
async def async_update(self):
"""Attempt to retrieve on off state from the binary sensor."""
await super().async_update()
value = await self._channel.get_attribute_value("zone_status")
if value is not None:
self._state = value & 3
|
import re
from django.utils.translation import gettext_lazy as _
from weblate.addons.base import BaseAddon
from weblate.addons.events import EVENT_PRE_COMMIT
SPLITTER = re.compile(r"\s*=\s*")
UNICODE = re.compile(r"\\[uU][0-9a-fA-F]{4}")
def sort_key(line):
"""Sort key for properties."""
prefix = SPLITTER.split(line, 1)[0]
return prefix.lower()
def unicode_format(match):
"""Callback for re.sub for formatting unicode chars."""
return "\\u{}".format(match.group(0)[2:].upper())
def fix_newlines(lines):
"""Convert newlines to unix."""
for i, line in enumerate(lines):
if line.endswith("\r\n"):
lines[i] = line[:-2] + "\n"
elif line.endswith("\r"):
lines[i] = line[:-1] + "\n"
def format_unicode(lines):
"""Standard formatting for unicode chars."""
for i, line in enumerate(lines):
if UNICODE.findall(line) is None:
continue
lines[i] = UNICODE.sub(unicode_format, line)
def value_quality(value):
"""Calculate value quality."""
if not value:
return 0
if "[translate me]" in value:
return 1
if "[auto]" in value:
return 2
return 3
def filter_lines(lines):
"""Filter comments, empty lines and duplicate strings."""
result = []
lastkey = None
lastvalue = None
for line in lines:
# Skip comments and blank lines
if line[0] == "#" or line.strip() == "":
continue
parts = SPLITTER.split(line, 1)
# Missing = or empty key
if len(parts) != 2 or not parts[0]:
continue
key, value = parts
# Strip trailing \n in value
value = value[:-1]
# Empty translation
if value in ("", "[auto]", "[translate me]"):
continue
# Check for duplicate key
if key == lastkey:
# Skip duplicate
if value == lastvalue:
continue
quality = value_quality(value)
lastquality = value_quality(lastvalue)
if quality > lastquality:
# Replace lower quality with new one
result.pop()
elif lastquality > quality or quality < 4:
# Drop lower quality one
continue
result.append(line)
lastkey = key
lastvalue = value
return result
def format_file(filename):
"""Format single properties file."""
with open(filename) as handle:
lines = handle.readlines()
result = sorted(lines, key=sort_key)
fix_newlines(result)
format_unicode(result)
result = filter_lines(result)
if lines != result:
with open(filename, "w") as handle:
handle.writelines(result)
class PropertiesSortAddon(BaseAddon):
events = (EVENT_PRE_COMMIT,)
name = "weblate.properties.sort"
verbose = _("Formats the Java properties file")
description = _("Sorts the Java properties file.")
compat = {"file_format": {"properties-utf8", "properties", "gwt"}}
icon = "sort-alphabetical.svg"
def pre_commit(self, translation, author):
format_file(translation.get_filename())
|
import asyncio
import logging
import re
import async_timeout
import elkm1_lib as elkm1
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import (
CONF_EXCLUDE,
CONF_HOST,
CONF_INCLUDE,
CONF_PASSWORD,
CONF_TEMPERATURE_UNIT,
CONF_USERNAME,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.typing import ConfigType
import homeassistant.util.dt as dt_util
from .const import (
ATTR_KEY,
ATTR_KEY_NAME,
ATTR_KEYPAD_ID,
BARE_TEMP_CELSIUS,
BARE_TEMP_FAHRENHEIT,
CONF_AREA,
CONF_AUTO_CONFIGURE,
CONF_COUNTER,
CONF_ENABLED,
CONF_KEYPAD,
CONF_OUTPUT,
CONF_PLC,
CONF_PREFIX,
CONF_SETTING,
CONF_TASK,
CONF_THERMOSTAT,
CONF_ZONE,
DOMAIN,
ELK_ELEMENTS,
EVENT_ELKM1_KEYPAD_KEY_PRESSED,
)
SYNC_TIMEOUT = 120
_LOGGER = logging.getLogger(__name__)
SUPPORTED_DOMAINS = [
"alarm_control_panel",
"climate",
"light",
"scene",
"sensor",
"switch",
]
SPEAK_SERVICE_SCHEMA = vol.Schema(
{
vol.Required("number"): vol.All(vol.Coerce(int), vol.Range(min=0, max=999)),
vol.Optional("prefix", default=""): cv.string,
}
)
SET_TIME_SERVICE_SCHEMA = vol.Schema(
{
vol.Optional("prefix", default=""): cv.string,
}
)
def _host_validator(config):
"""Validate that a host is properly configured."""
if config[CONF_HOST].startswith("elks://"):
if CONF_USERNAME not in config or CONF_PASSWORD not in config:
raise vol.Invalid("Specify username and password for elks://")
elif not config[CONF_HOST].startswith("elk://") and not config[
CONF_HOST
].startswith("serial://"):
raise vol.Invalid("Invalid host URL")
return config
def _elk_range_validator(rng):
def _housecode_to_int(val):
match = re.search(r"^([a-p])(0[1-9]|1[0-6]|[1-9])$", val.lower())
if match:
return (ord(match.group(1)) - ord("a")) * 16 + int(match.group(2))
raise vol.Invalid("Invalid range")
def _elk_value(val):
return int(val) if val.isdigit() else _housecode_to_int(val)
vals = [s.strip() for s in str(rng).split("-")]
start = _elk_value(vals[0])
end = start if len(vals) == 1 else _elk_value(vals[1])
return (start, end)
def _has_all_unique_prefixes(value):
"""Validate that each m1 configured has a unique prefix.
Uniqueness is determined case-independently.
"""
prefixes = [device[CONF_PREFIX] for device in value]
schema = vol.Schema(vol.Unique())
schema(prefixes)
return value
DEVICE_SCHEMA_SUBDOMAIN = vol.Schema(
{
vol.Optional(CONF_ENABLED, default=True): cv.boolean,
vol.Optional(CONF_INCLUDE, default=[]): [_elk_range_validator],
vol.Optional(CONF_EXCLUDE, default=[]): [_elk_range_validator],
}
)
DEVICE_SCHEMA = vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PREFIX, default=""): vol.All(cv.string, vol.Lower),
vol.Optional(CONF_USERNAME, default=""): cv.string,
vol.Optional(CONF_PASSWORD, default=""): cv.string,
vol.Optional(CONF_AUTO_CONFIGURE, default=False): cv.boolean,
# cv.temperature_unit will mutate 'C' -> '°C' and 'F' -> '°F'
vol.Optional(
CONF_TEMPERATURE_UNIT, default=BARE_TEMP_FAHRENHEIT
): cv.temperature_unit,
vol.Optional(CONF_AREA, default={}): DEVICE_SCHEMA_SUBDOMAIN,
vol.Optional(CONF_COUNTER, default={}): DEVICE_SCHEMA_SUBDOMAIN,
vol.Optional(CONF_KEYPAD, default={}): DEVICE_SCHEMA_SUBDOMAIN,
vol.Optional(CONF_OUTPUT, default={}): DEVICE_SCHEMA_SUBDOMAIN,
vol.Optional(CONF_PLC, default={}): DEVICE_SCHEMA_SUBDOMAIN,
vol.Optional(CONF_SETTING, default={}): DEVICE_SCHEMA_SUBDOMAIN,
vol.Optional(CONF_TASK, default={}): DEVICE_SCHEMA_SUBDOMAIN,
vol.Optional(CONF_THERMOSTAT, default={}): DEVICE_SCHEMA_SUBDOMAIN,
vol.Optional(CONF_ZONE, default={}): DEVICE_SCHEMA_SUBDOMAIN,
},
_host_validator,
)
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: vol.All(cv.ensure_list, [DEVICE_SCHEMA], _has_all_unique_prefixes)},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass: HomeAssistant, hass_config: ConfigType) -> bool:
"""Set up the Elk M1 platform."""
hass.data.setdefault(DOMAIN, {})
_create_elk_services(hass)
if DOMAIN not in hass_config:
return True
for index, conf in enumerate(hass_config[DOMAIN]):
_LOGGER.debug("Importing elkm1 #%d - %s", index, conf[CONF_HOST])
# The update of the config entry is done in async_setup
# to ensure the entry if updated before async_setup_entry
# is called to avoid a situation where the user has to restart
# twice for the changes to take effect
current_config_entry = _async_find_matching_config_entry(
hass, conf[CONF_PREFIX]
)
if current_config_entry:
# If they alter the yaml config we import the changes
# since there currently is no practical way to do an options flow
# with the large amount of include/exclude/enabled options that elkm1 has.
hass.config_entries.async_update_entry(current_config_entry, data=conf)
continue
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data=conf,
)
)
return True
@callback
def _async_find_matching_config_entry(hass, prefix):
for entry in hass.config_entries.async_entries(DOMAIN):
if entry.unique_id == prefix:
return entry
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up Elk-M1 Control from a config entry."""
conf = entry.data
_LOGGER.debug("Setting up elkm1 %s", conf["host"])
temperature_unit = TEMP_FAHRENHEIT
if conf[CONF_TEMPERATURE_UNIT] in (BARE_TEMP_CELSIUS, TEMP_CELSIUS):
temperature_unit = TEMP_CELSIUS
config = {"temperature_unit": temperature_unit}
if not conf[CONF_AUTO_CONFIGURE]:
# With elkm1-lib==0.7.16 and later auto configure is available
config["panel"] = {"enabled": True, "included": [True]}
for item, max_ in ELK_ELEMENTS.items():
config[item] = {
"enabled": conf[item][CONF_ENABLED],
"included": [not conf[item]["include"]] * max_,
}
try:
_included(conf[item]["include"], True, config[item]["included"])
_included(conf[item]["exclude"], False, config[item]["included"])
except (ValueError, vol.Invalid) as err:
_LOGGER.error("Config item: %s; %s", item, err)
return False
elk = elkm1.Elk(
{
"url": conf[CONF_HOST],
"userid": conf[CONF_USERNAME],
"password": conf[CONF_PASSWORD],
}
)
elk.connect()
def _element_changed(element, changeset):
keypress = changeset.get("last_keypress")
if keypress is None:
return
hass.bus.async_fire(
EVENT_ELKM1_KEYPAD_KEY_PRESSED,
{
ATTR_KEYPAD_ID: element.index + 1,
ATTR_KEY_NAME: keypress[0],
ATTR_KEY: keypress[1],
},
)
for keypad in elk.keypads: # pylint: disable=no-member
keypad.add_callback(_element_changed)
try:
if not await async_wait_for_elk_to_sync(elk, SYNC_TIMEOUT, conf[CONF_HOST]):
return False
except asyncio.TimeoutError as exc:
raise ConfigEntryNotReady from exc
hass.data[DOMAIN][entry.entry_id] = {
"elk": elk,
"prefix": conf[CONF_PREFIX],
"auto_configure": conf[CONF_AUTO_CONFIGURE],
"config": config,
"keypads": {},
}
for component in SUPPORTED_DOMAINS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
def _included(ranges, set_to, values):
for rng in ranges:
if not rng[0] <= rng[1] <= len(values):
raise vol.Invalid(f"Invalid range {rng}")
values[rng[0] - 1 : rng[1]] = [set_to] * (rng[1] - rng[0] + 1)
def _find_elk_by_prefix(hass, prefix):
"""Search all config entries for a given prefix."""
for entry_id in hass.data[DOMAIN]:
if hass.data[DOMAIN][entry_id]["prefix"] == prefix:
return hass.data[DOMAIN][entry_id]["elk"]
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in SUPPORTED_DOMAINS
]
)
)
# disconnect cleanly
hass.data[DOMAIN][entry.entry_id]["elk"].disconnect()
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
async def async_wait_for_elk_to_sync(elk, timeout, conf_host):
"""Wait until the elk has finished sync. Can fail login or timeout."""
def login_status(succeeded):
nonlocal success
success = succeeded
if succeeded:
_LOGGER.debug("ElkM1 login succeeded")
else:
elk.disconnect()
_LOGGER.error("ElkM1 login failed; invalid username or password")
event.set()
def sync_complete():
event.set()
success = True
event = asyncio.Event()
elk.add_handler("login", login_status)
elk.add_handler("sync_complete", sync_complete)
try:
with async_timeout.timeout(timeout):
await event.wait()
except asyncio.TimeoutError:
_LOGGER.error(
"Timed out after %d seconds while trying to sync with ElkM1 at %s",
timeout,
conf_host,
)
elk.disconnect()
raise
return success
def _create_elk_services(hass):
def _getelk(service):
prefix = service.data["prefix"]
elk = _find_elk_by_prefix(hass, prefix)
if elk is None:
raise HomeAssistantError(f"No ElkM1 with prefix '{prefix}' found")
return elk
def _speak_word_service(service):
_getelk(service).panel.speak_word(service.data["number"])
def _speak_phrase_service(service):
_getelk(service).panel.speak_phrase(service.data["number"])
def _set_time_service(service):
_getelk(service).panel.set_time(dt_util.now())
hass.services.async_register(
DOMAIN, "speak_word", _speak_word_service, SPEAK_SERVICE_SCHEMA
)
hass.services.async_register(
DOMAIN, "speak_phrase", _speak_phrase_service, SPEAK_SERVICE_SCHEMA
)
hass.services.async_register(
DOMAIN, "set_time", _set_time_service, SET_TIME_SERVICE_SCHEMA
)
def create_elk_entities(elk_data, elk_elements, element_type, class_, entities):
"""Create the ElkM1 devices of a particular class."""
auto_configure = elk_data["auto_configure"]
if not auto_configure and not elk_data["config"][element_type]["enabled"]:
return
elk = elk_data["elk"]
_LOGGER.debug("Creating elk entities for %s", elk)
for element in elk_elements:
if auto_configure:
if not element.configured:
continue
# Only check the included list if auto configure is not
elif not elk_data["config"][element_type]["included"][element.index]:
continue
entities.append(class_(element, elk, elk_data))
return entities
class ElkEntity(Entity):
"""Base class for all Elk entities."""
def __init__(self, element, elk, elk_data):
"""Initialize the base of all Elk devices."""
self._elk = elk
self._element = element
self._prefix = elk_data["prefix"]
self._temperature_unit = elk_data["config"]["temperature_unit"]
# unique_id starts with elkm1_ iff there is no prefix
# it starts with elkm1m_{prefix} iff there is a prefix
# this is to avoid a conflict between
# prefix=foo, name=bar (which would be elkm1_foo_bar)
# - and -
# prefix="", name="foo bar" (which would be elkm1_foo_bar also)
# we could have used elkm1__foo_bar for the latter, but that
# would have been a breaking change
if self._prefix != "":
uid_start = f"elkm1m_{self._prefix}"
else:
uid_start = "elkm1"
self._unique_id = f"{uid_start}_{self._element.default_name('_')}".lower()
@property
def name(self):
"""Name of the element."""
return f"{self._prefix}{self._element.name}"
@property
def unique_id(self):
"""Return unique id of the element."""
return self._unique_id
@property
def should_poll(self) -> bool:
"""Don't poll this device."""
return False
@property
def device_state_attributes(self):
"""Return the default attributes of the element."""
return {**self._element.as_dict(), **self.initial_attrs()}
@property
def available(self):
"""Is the entity available to be updated."""
return self._elk.is_connected()
def initial_attrs(self):
"""Return the underlying element's attributes as a dict."""
attrs = {}
attrs["index"] = self._element.index + 1
return attrs
def _element_changed(self, element, changeset):
pass
@callback
def _element_callback(self, element, changeset):
"""Handle callback from an Elk element that has changed."""
self._element_changed(element, changeset)
self.async_write_ha_state()
async def async_added_to_hass(self):
"""Register callback for ElkM1 changes and update entity state."""
self._element.add_callback(self._element_callback)
self._element_callback(self._element, {})
@property
def device_info(self):
"""Device info connecting via the ElkM1 system."""
return {
"via_device": (DOMAIN, f"{self._prefix}_system"),
}
class ElkAttachedEntity(ElkEntity):
"""An elk entity that is attached to the elk system."""
@property
def device_info(self):
"""Device info for the underlying ElkM1 system."""
device_name = "ElkM1"
if self._prefix:
device_name += f" {self._prefix}"
return {
"name": device_name,
"identifiers": {(DOMAIN, f"{self._prefix}_system")},
"sw_version": self._elk.panel.elkm1_version,
"manufacturer": "ELK Products, Inc.",
"model": "M1",
}
|
import io
import os
import re
import sys
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
def read(*parts):
# intentionally *not* adding an encoding option to open, See:
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
return io.open(os.path.join(here, *parts), 'r').read()
def find_version(*file_paths):
version_file = read(*file_paths)
version_match = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]', version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError('Unable to find version string.')
# requirements
setup_requirements = ['pytest-runner'] if {'pytest', 'test', 'ptr'}.intersection(sys.argv) else []
install_requirements = ['guessit>=3.0.0', 'babelfish>=0.5.2', 'enzyme>=0.4.1', 'beautifulsoup4>=4.4.0',
'requests>=2.0', 'click>=4.0', 'dogpile.cache>=0.6.0', 'stevedore>=1.20.0',
'chardet>=2.3.0', 'pysrt>=1.0.1', 'six>=1.9.0', 'appdirs>=1.3', 'rarfile>=2.7',
'pytz>=2012c']
if sys.version_info < (3, 2):
install_requirements.append('futures>=3.0')
test_requirements = ['sympy', 'vcrpy>=1.6.1', 'pytest', 'pytest-pep8', 'pytest-flakes', 'pytest-cov']
if sys.version_info < (3, 3):
test_requirements.append('mock')
dev_requirements = ['tox', 'sphinx', 'sphinx_rtd_theme', 'sphinxcontrib-programoutput', 'wheel']
setup(name='subliminal',
version=find_version('subliminal', '__init__.py'),
license='MIT',
description='Subtitles, faster than your thoughts',
long_description=read('README.rst') + '\n\n' + read('HISTORY.rst'),
keywords='subtitle subtitles video movie episode tv show series',
url='https://github.com/Diaoul/subliminal',
author='Antoine Bertin',
author_email='[email protected]',
packages=find_packages(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Multimedia :: Video'
],
entry_points={
'subliminal.providers': [
'addic7ed = subliminal.providers.addic7ed:Addic7edProvider',
'argenteam = subliminal.providers.argenteam:ArgenteamProvider',
'legendastv = subliminal.providers.legendastv:LegendasTVProvider',
'opensubtitles = subliminal.providers.opensubtitles:OpenSubtitlesProvider',
'podnapisi = subliminal.providers.podnapisi:PodnapisiProvider',
'shooter = subliminal.providers.shooter:ShooterProvider',
'thesubdb = subliminal.providers.thesubdb:TheSubDBProvider',
'tvsubtitles = subliminal.providers.tvsubtitles:TVsubtitlesProvider'
],
'subliminal.refiners': [
'hash = subliminal.refiners.hash:refine',
'metadata = subliminal.refiners.metadata:refine',
'omdb = subliminal.refiners.omdb:refine',
'tvdb = subliminal.refiners.tvdb:refine'
],
'babelfish.language_converters': [
'addic7ed = subliminal.converters.addic7ed:Addic7edConverter',
'legendastv = subliminal.converters.legendastv:LegendasTVConverter',
'shooter = subliminal.converters.shooter:ShooterConverter',
'thesubdb = subliminal.converters.thesubdb:TheSubDBConverter',
'tvsubtitles = subliminal.converters.tvsubtitles:TVsubtitlesConverter'
],
'console_scripts': [
'subliminal = subliminal.cli:subliminal'
]
},
setup_requires=setup_requirements,
install_requires=install_requirements,
tests_require=test_requirements,
extras_require={
'test': test_requirements,
'dev': dev_requirements
})
|
from unittest import TestCase
from scattertext.termcompaction.PhraseSelector import PhraseSelector
from scattertext.test.test_termDocMatrixFactory import build_hamlet_jz_term_doc_mat
class TestPhraseSelector(TestCase):
def test_compact(self):
tdm = build_hamlet_jz_term_doc_mat()
c = PhraseSelector(minimum_pmi=10).compact(tdm)
bigrams = [t for t in tdm.get_terms() if ' ' in t]
new_bigrams = [t for t in c.get_terms() if ' ' in t]
self.assertLess(len(new_bigrams), len(bigrams))
self.assertTrue(set(new_bigrams) -set(bigrams) == set())
|
import diamond.collector
from diamond.collector import str_to_bool
import subprocess
import re
class OssecCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(OssecCollector, self).get_default_config_help()
config_help.update({
'bin': 'Path to agent_control binary',
'use_sudo': 'Use sudo?',
'sudo_cmd': 'Path to sudo',
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(OssecCollector, self).get_default_config()
config.update({
'bin': '/var/ossec/bin/agent_control',
'use_sudo': True,
'sudo_cmd': '/usr/bin/sudo',
'path': 'ossec',
})
return config
def collect(self):
command = [self.config['bin'], '-l']
if str_to_bool(self.config['use_sudo']):
command.insert(0, self.config['sudo_cmd'])
try:
p = subprocess.Popen(command, stdout=subprocess.PIPE)
res = p.communicate()[0]
except Exception as e:
self.log.error('Unable to exec cmd: %s, because %s'
% (' '.join(command), str(e)))
return
if res == '':
self.log.error('Empty result from exec cmd: %s'
% (' '.join(command)))
return
states = {}
for line in res.split("\n"):
# ID: 000, Name: local-ossec-001.localdomain (server), IP:\
# 127.0.0.1, Active/Local
if not line.startswith(' ID: '):
continue
fragments = line.split(',')
state = fragments[-1].lstrip()
if state not in states:
states[state] = 1
else:
states[state] += 1
for state, count in states.items():
name = 'agents.' + re.sub('[^a-z]', '_', state.lower())
self.publish(name, count)
|
import pytest
from molecule.command import destroy
@pytest.fixture
def _patched_ansible_destroy(mocker):
return mocker.patch('molecule.provisioner.ansible.Ansible.destroy')
@pytest.fixture
def _patched_destroy_setup(mocker):
return mocker.patch('molecule.command.destroy.Destroy._setup')
# NOTE(retr0h): The use of the `patched_config_validate` fixture, disables
# config.Config._validate from executing. Thus preventing odd side-effects
# throughout patched.assert_called unit tests.
def test_execute(mocker, patched_logger_info, patched_config_validate,
_patched_ansible_destroy, config_instance):
d = destroy.Destroy(config_instance)
d.execute()
x = [
mocker.call("Scenario: 'default'"),
mocker.call("Action: 'destroy'"),
]
assert x == patched_logger_info.mock_calls
_patched_ansible_destroy.assert_called_once_with()
assert not config_instance.state.converged
assert not config_instance.state.created
@pytest.mark.parametrize(
'config_instance', ['command_driver_delegated_section_data'],
indirect=True)
def test_execute_skips_when_destroy_strategy_is_never(
_patched_destroy_setup, patched_logger_warn, _patched_ansible_destroy,
config_instance):
config_instance.command_args = {'destroy': 'never'}
d = destroy.Destroy(config_instance)
d.execute()
msg = "Skipping, '--destroy=never' requested."
patched_logger_warn.assert_called_once_with(msg)
assert not _patched_ansible_destroy.called
@pytest.mark.parametrize(
'config_instance', ['command_driver_delegated_section_data'],
indirect=True)
def test_execute_skips_when_delegated_driver(
_patched_destroy_setup, patched_logger_warn, _patched_ansible_destroy,
config_instance):
d = destroy.Destroy(config_instance)
d.execute()
msg = 'Skipping, instances are delegated.'
patched_logger_warn.assert_called_once_with(msg)
assert not _patched_ansible_destroy.called
|
from datetime import timedelta
import logging
import re
import WazeRouteCalculator
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_LATITUDE,
ATTR_LONGITUDE,
CONF_NAME,
CONF_REGION,
CONF_UNIT_SYSTEM_IMPERIAL,
CONF_UNIT_SYSTEM_METRIC,
EVENT_HOMEASSISTANT_START,
TIME_MINUTES,
)
from homeassistant.helpers import location
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
ATTR_DESTINATION = "destination"
ATTR_DURATION = "duration"
ATTR_DISTANCE = "distance"
ATTR_ORIGIN = "origin"
ATTR_ROUTE = "route"
ATTRIBUTION = "Powered by Waze"
CONF_DESTINATION = "destination"
CONF_ORIGIN = "origin"
CONF_INCL_FILTER = "incl_filter"
CONF_EXCL_FILTER = "excl_filter"
CONF_REALTIME = "realtime"
CONF_UNITS = "units"
CONF_VEHICLE_TYPE = "vehicle_type"
CONF_AVOID_TOLL_ROADS = "avoid_toll_roads"
CONF_AVOID_SUBSCRIPTION_ROADS = "avoid_subscription_roads"
CONF_AVOID_FERRIES = "avoid_ferries"
DEFAULT_NAME = "Waze Travel Time"
DEFAULT_REALTIME = True
DEFAULT_VEHICLE_TYPE = "car"
DEFAULT_AVOID_TOLL_ROADS = False
DEFAULT_AVOID_SUBSCRIPTION_ROADS = False
DEFAULT_AVOID_FERRIES = False
ICON = "mdi:car"
UNITS = [CONF_UNIT_SYSTEM_METRIC, CONF_UNIT_SYSTEM_IMPERIAL]
REGIONS = ["US", "NA", "EU", "IL", "AU"]
VEHICLE_TYPES = ["car", "taxi", "motorcycle"]
SCAN_INTERVAL = timedelta(minutes=5)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_ORIGIN): cv.string,
vol.Required(CONF_DESTINATION): cv.string,
vol.Required(CONF_REGION): vol.In(REGIONS),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_INCL_FILTER): cv.string,
vol.Optional(CONF_EXCL_FILTER): cv.string,
vol.Optional(CONF_REALTIME, default=DEFAULT_REALTIME): cv.boolean,
vol.Optional(CONF_VEHICLE_TYPE, default=DEFAULT_VEHICLE_TYPE): vol.In(
VEHICLE_TYPES
),
vol.Optional(CONF_UNITS): vol.In(UNITS),
vol.Optional(
CONF_AVOID_TOLL_ROADS, default=DEFAULT_AVOID_TOLL_ROADS
): cv.boolean,
vol.Optional(
CONF_AVOID_SUBSCRIPTION_ROADS, default=DEFAULT_AVOID_SUBSCRIPTION_ROADS
): cv.boolean,
vol.Optional(CONF_AVOID_FERRIES, default=DEFAULT_AVOID_FERRIES): cv.boolean,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Waze travel time sensor platform."""
destination = config.get(CONF_DESTINATION)
name = config.get(CONF_NAME)
origin = config.get(CONF_ORIGIN)
region = config.get(CONF_REGION)
incl_filter = config.get(CONF_INCL_FILTER)
excl_filter = config.get(CONF_EXCL_FILTER)
realtime = config.get(CONF_REALTIME)
vehicle_type = config.get(CONF_VEHICLE_TYPE)
avoid_toll_roads = config.get(CONF_AVOID_TOLL_ROADS)
avoid_subscription_roads = config.get(CONF_AVOID_SUBSCRIPTION_ROADS)
avoid_ferries = config.get(CONF_AVOID_FERRIES)
units = config.get(CONF_UNITS, hass.config.units.name)
data = WazeTravelTimeData(
None,
None,
region,
incl_filter,
excl_filter,
realtime,
units,
vehicle_type,
avoid_toll_roads,
avoid_subscription_roads,
avoid_ferries,
)
sensor = WazeTravelTime(name, origin, destination, data)
add_entities([sensor])
# Wait until start event is sent to load this component.
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, lambda _: sensor.update())
def _get_location_from_attributes(state):
"""Get the lat/long string from an states attributes."""
attr = state.attributes
return "{},{}".format(attr.get(ATTR_LATITUDE), attr.get(ATTR_LONGITUDE))
class WazeTravelTime(Entity):
"""Representation of a Waze travel time sensor."""
def __init__(self, name, origin, destination, waze_data):
"""Initialize the Waze travel time sensor."""
self._name = name
self._waze_data = waze_data
self._state = None
self._origin_entity_id = None
self._destination_entity_id = None
# Attempt to find entity_id without finding address with period.
pattern = "(?<![a-zA-Z0-9 ])[a-z_]+[.][a-zA-Z0-9_]+"
if re.fullmatch(pattern, origin):
_LOGGER.debug("Found origin source entity %s", origin)
self._origin_entity_id = origin
else:
self._waze_data.origin = origin
if re.fullmatch(pattern, destination):
_LOGGER.debug("Found destination source entity %s", destination)
self._destination_entity_id = destination
else:
self._waze_data.destination = destination
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
if self._waze_data.duration is not None:
return round(self._waze_data.duration)
return None
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return TIME_MINUTES
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return ICON
@property
def device_state_attributes(self):
"""Return the state attributes of the last update."""
if self._waze_data.duration is None:
return None
res = {ATTR_ATTRIBUTION: ATTRIBUTION}
res[ATTR_DURATION] = self._waze_data.duration
res[ATTR_DISTANCE] = self._waze_data.distance
res[ATTR_ROUTE] = self._waze_data.route
res[ATTR_ORIGIN] = self._waze_data.origin
res[ATTR_DESTINATION] = self._waze_data.destination
return res
def _get_location_from_entity(self, entity_id):
"""Get the location from the entity_id."""
state = self.hass.states.get(entity_id)
if state is None:
_LOGGER.error("Unable to find entity %s", entity_id)
return None
# Check if the entity has location attributes.
if location.has_location(state):
_LOGGER.debug("Getting %s location", entity_id)
return _get_location_from_attributes(state)
# Check if device is inside a zone.
zone_state = self.hass.states.get(f"zone.{state.state}")
if location.has_location(zone_state):
_LOGGER.debug(
"%s is in %s, getting zone location", entity_id, zone_state.entity_id
)
return _get_location_from_attributes(zone_state)
# If zone was not found in state then use the state as the location.
if entity_id.startswith("sensor."):
return state.state
# When everything fails just return nothing.
return None
def _resolve_zone(self, friendly_name):
"""Get a lat/long from a zones friendly_name."""
states = self.hass.states.all()
for state in states:
if state.domain == "zone" and state.name == friendly_name:
return _get_location_from_attributes(state)
return friendly_name
def update(self):
"""Fetch new state data for the sensor."""
_LOGGER.debug("Fetching Route for %s", self._name)
# Get origin latitude and longitude from entity_id.
if self._origin_entity_id is not None:
self._waze_data.origin = self._get_location_from_entity(
self._origin_entity_id
)
# Get destination latitude and longitude from entity_id.
if self._destination_entity_id is not None:
self._waze_data.destination = self._get_location_from_entity(
self._destination_entity_id
)
# Get origin from zone name.
self._waze_data.origin = self._resolve_zone(self._waze_data.origin)
# Get destination from zone name.
self._waze_data.destination = self._resolve_zone(self._waze_data.destination)
self._waze_data.update()
class WazeTravelTimeData:
"""WazeTravelTime Data object."""
def __init__(
self,
origin,
destination,
region,
include,
exclude,
realtime,
units,
vehicle_type,
avoid_toll_roads,
avoid_subscription_roads,
avoid_ferries,
):
"""Set up WazeRouteCalculator."""
self._calc = WazeRouteCalculator
self.origin = origin
self.destination = destination
self.region = region
self.include = include
self.exclude = exclude
self.realtime = realtime
self.units = units
self.duration = None
self.distance = None
self.route = None
self.avoid_toll_roads = avoid_toll_roads
self.avoid_subscription_roads = avoid_subscription_roads
self.avoid_ferries = avoid_ferries
# Currently WazeRouteCalc only supports PRIVATE, TAXI, MOTORCYCLE.
if vehicle_type.upper() == "CAR":
# Empty means PRIVATE for waze which translates to car.
self.vehicle_type = ""
else:
self.vehicle_type = vehicle_type.upper()
def update(self):
"""Update WazeRouteCalculator Sensor."""
if self.origin is not None and self.destination is not None:
try:
params = self._calc.WazeRouteCalculator(
self.origin,
self.destination,
self.region,
self.vehicle_type,
self.avoid_toll_roads,
self.avoid_subscription_roads,
self.avoid_ferries,
)
routes = params.calc_all_routes_info(real_time=self.realtime)
if self.include is not None:
routes = {
k: v
for k, v in routes.items()
if self.include.lower() in k.lower()
}
if self.exclude is not None:
routes = {
k: v
for k, v in routes.items()
if self.exclude.lower() not in k.lower()
}
route = list(routes)[0]
self.duration, distance = routes[route]
if self.units == CONF_UNIT_SYSTEM_IMPERIAL:
# Convert to miles.
self.distance = distance / 1.609
else:
self.distance = distance
self.route = route
except self._calc.WRCError as exp:
_LOGGER.warning("Error on retrieving data: %s", exp)
return
except KeyError:
_LOGGER.error("Error retrieving data from server")
return
|
import asyncio
from unittest.mock import patch
import pytest
# region Register Tests
@pytest.mark.asyncio
async def test_config_register_global(config):
config.register_global(enabled=False)
assert config.defaults["GLOBAL"]["enabled"] is False
assert await config.enabled() is False
def test_config_register_global_badvalues(config):
with pytest.raises(RuntimeError):
config.register_global(**{"invalid var name": True})
@pytest.mark.asyncio
async def test_config_register_guild(config, empty_guild):
config.register_guild(enabled=False, some_list=[], some_dict={})
assert config.defaults[config.GUILD]["enabled"] is False
assert config.defaults[config.GUILD]["some_list"] == []
assert config.defaults[config.GUILD]["some_dict"] == {}
assert await config.guild(empty_guild).enabled() is False
assert await config.guild(empty_guild).some_list() == []
assert await config.guild(empty_guild).some_dict() == {}
@pytest.mark.asyncio
async def test_config_register_channel(config, empty_channel):
config.register_channel(enabled=False)
assert config.defaults[config.CHANNEL]["enabled"] is False
assert await config.channel(empty_channel).enabled() is False
@pytest.mark.asyncio
async def test_config_register_role(config, empty_role):
config.register_role(enabled=False)
assert config.defaults[config.ROLE]["enabled"] is False
assert await config.role(empty_role).enabled() is False
@pytest.mark.asyncio
async def test_config_register_member(config, empty_member):
config.register_member(some_number=-1)
assert config.defaults[config.MEMBER]["some_number"] == -1
assert await config.member(empty_member).some_number() == -1
@pytest.mark.asyncio
async def test_config_register_user(config, empty_user):
config.register_user(some_value=None)
assert config.defaults[config.USER]["some_value"] is None
assert await config.user(empty_user).some_value() is None
@pytest.mark.asyncio
async def test_config_force_register_global(config_fr):
with pytest.raises(AttributeError):
await config_fr.enabled()
config_fr.register_global(enabled=True)
assert await config_fr.enabled() is True
# endregion
# Test nested registration
@pytest.mark.asyncio
async def test_nested_registration(config):
config.register_global(foo__bar__baz=False)
assert await config.foo.bar.baz() is False
@pytest.mark.asyncio
async def test_nested_registration_asdict(config):
defaults = {"bar": {"baz": False}}
config.register_global(foo=defaults)
assert await config.foo.bar.baz() is False
@pytest.mark.asyncio
async def test_nested_registration_and_changing(config):
defaults = {"bar": {"baz": False}}
config.register_global(foo=defaults)
assert await config.foo.bar.baz() is False
with pytest.raises(ValueError):
await config.foo.set(True)
@pytest.mark.asyncio
async def test_doubleset_default(config):
config.register_global(foo=True)
config.register_global(foo=False)
assert await config.foo() is False
@pytest.mark.asyncio
async def test_nested_registration_multidict(config):
defaults = {"foo": {"bar": {"baz": True}}, "blah": True}
config.register_global(**defaults)
assert await config.foo.bar.baz() is True
assert await config.blah() is True
def test_nested_group_value_badreg(config):
config.register_global(foo=True)
with pytest.raises(KeyError):
config.register_global(foo__bar=False)
@pytest.mark.asyncio
async def test_nested_toplevel_reg(config):
defaults = {"bar": True, "baz": False}
config.register_global(foo=defaults)
assert await config.foo.bar() is True
assert await config.foo.baz() is False
@pytest.mark.asyncio
async def test_nested_overlapping(config):
config.register_global(foo__bar=True)
config.register_global(foo__baz=False)
assert await config.foo.bar() is True
assert await config.foo.baz() is False
@pytest.mark.asyncio
async def test_nesting_nofr(config):
config.register_global(foo={})
assert await config.foo.bar() is None
assert await config.foo() == {}
# region Default Value Overrides
@pytest.mark.asyncio
async def test_global_default_override(config):
assert await config.enabled(True) is True
@pytest.mark.asyncio
async def test_global_default_nofr(config):
assert await config.nofr() is None
assert await config.nofr(True) is True
@pytest.mark.asyncio
async def test_guild_default_override(config, empty_guild):
assert await config.guild(empty_guild).enabled(True) is True
@pytest.mark.asyncio
async def test_channel_default_override(config, empty_channel):
assert await config.channel(empty_channel).enabled(True) is True
@pytest.mark.asyncio
async def test_role_default_override(config, empty_role):
assert await config.role(empty_role).enabled(True) is True
@pytest.mark.asyncio
async def test_member_default_override(config, empty_member):
assert await config.member(empty_member).enabled(True) is True
@pytest.mark.asyncio
async def test_user_default_override(config, empty_user):
assert await config.user(empty_user).some_value(True) is True
# endregion
# region Setting Values
@pytest.mark.asyncio
async def test_set_global(config):
await config.enabled.set(True)
assert await config.enabled() is True
@pytest.mark.asyncio
async def test_set_guild(config, empty_guild):
await config.guild(empty_guild).enabled.set(True)
assert await config.guild(empty_guild).enabled() is True
curr_list = await config.guild(empty_guild).some_list([1, 2, 3])
assert curr_list == [1, 2, 3]
curr_list.append(4)
await config.guild(empty_guild).some_list.set(curr_list)
assert await config.guild(empty_guild).some_list() == curr_list
@pytest.mark.asyncio
async def test_set_channel(config, empty_channel):
await config.channel(empty_channel).enabled.set(True)
assert await config.channel(empty_channel).enabled() is True
@pytest.mark.asyncio
async def test_set_channel_no_register(config, empty_channel):
await config.channel(empty_channel).no_register.set(True)
assert await config.channel(empty_channel).no_register() is True
# endregion
# Dynamic attribute testing
@pytest.mark.asyncio
async def test_set_dynamic_attr(config):
await config.set_raw("foobar", value=True)
assert await config.foobar() is True
@pytest.mark.asyncio
async def test_clear_dynamic_attr(config):
await config.foo.set(True)
await config.clear_raw("foo")
with pytest.raises(KeyError):
await config.get_raw("foo")
@pytest.mark.asyncio
async def test_get_dynamic_attr(config):
assert await config.get_raw("foobaz", default=True) is True
# Member Group testing
@pytest.mark.asyncio
async def test_membergroup_allguilds(config, empty_member):
await config.member(empty_member).foo.set(False)
all_servers = await config.all_members()
assert empty_member.guild.id in all_servers
@pytest.mark.asyncio
async def test_membergroup_allmembers(config, empty_member):
await config.member(empty_member).foo.set(False)
all_members = await config.all_members(empty_member.guild)
assert empty_member.id in all_members
# Clearing testing
@pytest.mark.asyncio
async def test_global_clear(config):
config.register_global(foo=True, bar=False)
await config.foo.set(False)
await config.bar.set(True)
assert await config.foo() is False
assert await config.bar() is True
await config.clear()
assert await config.foo() is True
assert await config.bar() is False
@pytest.mark.asyncio
async def test_member_clear(config, member_factory):
config.register_member(foo=True)
m1 = member_factory.get()
await config.member(m1).foo.set(False)
assert await config.member(m1).foo() is False
m2 = member_factory.get()
await config.member(m2).foo.set(False)
assert await config.member(m2).foo() is False
assert m1.guild.id != m2.guild.id
await config.member(m1).clear()
assert await config.member(m1).foo() is True
assert await config.member(m2).foo() is False
@pytest.mark.asyncio
async def test_member_clear_all(config, member_factory):
server_ids = []
for _ in range(5):
member = member_factory.get()
await config.member(member).foo.set(True)
server_ids.append(member.guild.id)
member = member_factory.get()
assert len(await config.all_members()) == len(server_ids)
await config.clear_all_members()
assert len(await config.all_members()) == 0
@pytest.mark.asyncio
async def test_clear_all(config):
await config.foo.set(True)
assert await config.foo() is True
await config.clear_all()
with pytest.raises(KeyError):
await config.get_raw("foo")
@pytest.mark.asyncio
async def test_clear_value(config):
await config.foo.set(True)
await config.foo.clear()
with pytest.raises(KeyError):
await config.get_raw("foo")
# Get All testing
@pytest.mark.asyncio
async def test_user_get_all_from_kind(config, user_factory):
config.register_user(foo=False, bar=True)
for _ in range(5):
user = user_factory.get()
await config.user(user).foo.set(True)
all_data = await config.all_users()
assert len(all_data) == 5
for _, v in all_data.items():
assert v["foo"] is True
assert v["bar"] is True
@pytest.mark.asyncio
async def test_user_getalldata(config, user_factory):
user = user_factory.get()
config.register_user(foo=True, bar=False)
await config.user(user).foo.set(False)
all_data = await config.user(user).all()
assert "foo" in all_data
assert "bar" in all_data
assert config.user(user).defaults["foo"] is True
@pytest.mark.asyncio
async def test_value_ctxmgr(config):
config.register_global(foo_list=[])
async with config.foo_list() as foo_list:
foo_list.append("foo")
foo_list = await config.foo_list()
assert "foo" in foo_list
@pytest.mark.asyncio
async def test_value_ctxmgr_saves(config):
config.register_global(bar_list=[])
try:
async with config.bar_list() as bar_list:
bar_list.append("bar")
raise RuntimeError()
except RuntimeError:
pass
bar_list = await config.bar_list()
assert "bar" in bar_list
@pytest.mark.asyncio
async def test_value_ctxmgr_immutable(config):
config.register_global(foo=True)
with pytest.raises(TypeError):
async with config.foo() as foo:
foo = False
foo = await config.foo()
assert foo is True
@pytest.mark.asyncio
async def test_ctxmgr_no_shared_default(config, member_factory):
config.register_member(foo=[])
m1 = member_factory.get()
m2 = member_factory.get()
async with config.member(m1).foo() as foo:
foo.append(1)
assert 1 not in await config.member(m2).foo()
@pytest.mark.asyncio
async def test_ctxmgr_no_unnecessary_write(config):
config.register_global(foo=[])
foo_value_obj = config.foo
with patch.object(foo_value_obj, "set") as set_method:
async with foo_value_obj() as foo:
pass
set_method.assert_not_called()
@pytest.mark.asyncio
async def test_get_then_mutate(config):
"""Tests that mutating an object after getting it as a value doesn't mutate the data store."""
config.register_global(list1=[])
await config.list1.set([])
list1 = await config.list1()
list1.append("foo")
list1 = await config.list1()
assert "foo" not in list1
@pytest.mark.asyncio
async def test_set_then_mutate(config):
"""Tests that mutating an object after setting it as a value doesn't mutate the data store."""
config.register_global(list1=[])
list1 = []
await config.list1.set(list1)
list1.append("foo")
list1 = await config.list1()
assert "foo" not in list1
@pytest.mark.asyncio
async def test_call_group_fills_defaults(config):
config.register_global(subgroup={"foo": True})
subgroup = await config.subgroup()
assert "foo" in subgroup
@pytest.mark.asyncio
async def test_group_call_ctxmgr_writes(config):
config.register_global(subgroup={"foo": True})
async with config.subgroup() as subgroup:
subgroup["bar"] = False
subgroup = await config.subgroup()
assert subgroup == {"foo": True, "bar": False}
@pytest.mark.asyncio
async def test_all_works_as_ctxmgr(config):
config.register_global(subgroup={"foo": True})
async with config.subgroup.all() as subgroup:
subgroup["bar"] = False
subgroup = await config.subgroup()
assert subgroup == {"foo": True, "bar": False}
@pytest.mark.asyncio
async def test_get_raw_mixes_defaults(config):
config.register_global(subgroup={"foo": True})
await config.subgroup.set_raw("bar", value=False)
subgroup = await config.get_raw("subgroup")
assert subgroup == {"foo": True, "bar": False}
@pytest.mark.asyncio
async def test_cast_str_raw(config):
await config.set_raw(123, 456, value=True)
assert await config.get_raw(123, 456) is True
assert await config.get_raw("123", "456") is True
await config.clear_raw("123", 456)
@pytest.mark.asyncio
async def test_cast_str_nested(config):
config.register_global(foo={})
await config.foo.set({123: True, 456: {789: False}})
assert await config.foo() == {"123": True, "456": {"789": False}}
def test_config_custom_noinit(config):
with pytest.raises(ValueError):
config.custom("TEST", 1, 2, 3)
def test_config_custom_init(config):
config.init_custom("TEST", 3)
config.custom("TEST", 1, 2, 3)
def test_config_custom_doubleinit(config):
config.init_custom("TEST", 3)
with pytest.raises(ValueError):
config.init_custom("TEST", 2)
@pytest.mark.asyncio
async def test_config_locks_cache(config, empty_guild):
lock1 = config.foo.get_lock()
assert lock1 is config.foo.get_lock()
lock2 = config.guild(empty_guild).foo.get_lock()
assert lock2 is config.guild(empty_guild).foo.get_lock()
assert lock1 is not lock2
@pytest.mark.asyncio
async def test_config_value_atomicity(config):
config.register_global(foo=[])
tasks = []
for _ in range(15):
async def func():
async with config.foo.get_lock():
foo = await config.foo()
foo.append(0)
await asyncio.sleep(0.1)
await config.foo.set(foo)
tasks.append(func())
await asyncio.wait(tasks, return_when=asyncio.ALL_COMPLETED)
assert len(await config.foo()) == 15
@pytest.mark.asyncio
async def test_config_ctxmgr_atomicity(config):
config.register_global(foo=[])
tasks = []
for _ in range(15):
async def func():
async with config.foo() as foo:
foo.append(0)
await asyncio.sleep(0.1)
tasks.append(func())
await asyncio.wait(tasks, return_when=asyncio.ALL_COMPLETED)
assert len(await config.foo()) == 15
@pytest.mark.asyncio
async def test_set_with_partial_primary_keys(config):
config.init_custom("CUSTOM", 3)
await config.custom("CUSTOM", "1").set({"11": {"111": {"foo": "bar"}}})
assert await config.custom("CUSTOM", "1", "11", "111").foo() == "bar"
await config.custom("CUSTOM", "2").set(
{
"11": {"111": {"foo": "bad"}},
"22": {"111": {"foo": "baz"}},
"33": {"111": {"foo": "boo"}, "222": {"foo": "boz"}},
}
)
assert await config.custom("CUSTOM", "2", "11", "111").foo() == "bad"
assert await config.custom("CUSTOM", "2", "22", "111").foo() == "baz"
assert await config.custom("CUSTOM", "2", "33", "111").foo() == "boo"
assert await config.custom("CUSTOM", "2", "33", "222").foo() == "boz"
await config.custom("CUSTOM", "2").set({"22": {}, "33": {"111": {}, "222": {"foo": "biz"}}})
with pytest.raises(KeyError):
await config.custom("CUSTOM").get_raw("2", "11")
with pytest.raises(KeyError):
await config.custom("CUSTOM").get_raw("2", "22", "111")
with pytest.raises(KeyError):
await config.custom("CUSTOM").get_raw("2", "33", "111", "foo")
assert await config.custom("CUSTOM", "2", "33", "222").foo() == "biz"
@pytest.mark.asyncio
async def test_raw_with_partial_primary_keys(config):
config.init_custom("CUSTOM", 1)
await config.custom("CUSTOM").set_raw("primary_key", "identifier", value=True)
assert await config.custom("CUSTOM", "primary_key").identifier() is True
await config.custom("CUSTOM").set_raw(value={"primary_key": {"identifier": False}})
assert await config.custom("CUSTOM", "primary_key").identifier() is False
"""
Following PARAMS can be generated with:
from functools import reduce
from pprint import pprint
def generate_test_args(print_args=True):
pkeys = ("1", "2", "3")
identifiers = ("foo",)
full_dict = {"1": {"2": {"3": {"foo": "bar"}}}}
argvalues = [
(
pkeys[:x],
(pkeys[x:] + identifiers)[:y],
reduce(lambda d, k: d[k], (pkeys + identifiers)[:x+y], full_dict),
)
for x in range(len(pkeys) + 1)
for y in range(len(pkeys) + len(identifiers) - x + 1)
]
if print_args:
print("[")
for args in argvalues:
print(f" {args!r},")
print("]")
else:
return argvalues
generate_test_args()
"""
PARAMS = [
((), (), {"1": {"2": {"3": {"foo": "bar"}}}}),
((), (1,), {"2": {"3": {"foo": "bar"}}}),
((), (1, 2), {"3": {"foo": "bar"}}),
((), (1, 2, 3), {"foo": "bar"}),
((), (1, 2, 3, "foo"), "bar"),
((1,), (), {"2": {"3": {"foo": "bar"}}}),
((1,), (2,), {"3": {"foo": "bar"}}),
((1,), (2, 3), {"foo": "bar"}),
((1,), (2, 3, "foo"), "bar"),
((1, 2), (), {"3": {"foo": "bar"}}),
((1, 2), (3,), {"foo": "bar"}),
((1, 2), (3, "foo"), "bar"),
((1, 2, 3), (), {"foo": "bar"}),
((1, 2, 3), ("foo",), "bar"),
]
@pytest.mark.parametrize("pkeys, raw_args, result", PARAMS)
@pytest.mark.asyncio
async def test_config_custom_partial_pkeys_get(config, pkeys, raw_args, result):
# setup
config.init_custom("TEST", 3)
config.register_custom("TEST")
await config.custom("TEST", 1, 2, 3).set({"foo": "bar"})
group = config.custom("TEST", *pkeys)
assert await group.get_raw(*raw_args) == result
@pytest.mark.parametrize("pkeys, raw_args, result", PARAMS)
@pytest.mark.asyncio
async def test_config_custom_partial_pkeys_set(config, pkeys, raw_args, result):
# setup
config.init_custom("TEST", 3)
config.register_custom("TEST")
await config.custom("TEST", 1, 2, 3).set({"foo": "blah"})
group = config.custom("TEST", *pkeys)
await group.set_raw(*raw_args, value=result)
assert await group.get_raw(*raw_args) == result
|
from typing import Optional, Sequence
from pysmartthings import Attribute, Capability
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_MOISTURE,
DEVICE_CLASS_MOTION,
DEVICE_CLASS_MOVING,
DEVICE_CLASS_OPENING,
DEVICE_CLASS_PRESENCE,
DEVICE_CLASS_PROBLEM,
DEVICE_CLASS_SOUND,
BinarySensorEntity,
)
from . import SmartThingsEntity
from .const import DATA_BROKERS, DOMAIN
CAPABILITY_TO_ATTRIB = {
Capability.acceleration_sensor: Attribute.acceleration,
Capability.contact_sensor: Attribute.contact,
Capability.filter_status: Attribute.filter_status,
Capability.motion_sensor: Attribute.motion,
Capability.presence_sensor: Attribute.presence,
Capability.sound_sensor: Attribute.sound,
Capability.tamper_alert: Attribute.tamper,
Capability.valve: Attribute.valve,
Capability.water_sensor: Attribute.water,
}
ATTRIB_TO_CLASS = {
Attribute.acceleration: DEVICE_CLASS_MOVING,
Attribute.contact: DEVICE_CLASS_OPENING,
Attribute.filter_status: DEVICE_CLASS_PROBLEM,
Attribute.motion: DEVICE_CLASS_MOTION,
Attribute.presence: DEVICE_CLASS_PRESENCE,
Attribute.sound: DEVICE_CLASS_SOUND,
Attribute.tamper: DEVICE_CLASS_PROBLEM,
Attribute.valve: DEVICE_CLASS_OPENING,
Attribute.water: DEVICE_CLASS_MOISTURE,
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Add binary sensors for a config entry."""
broker = hass.data[DOMAIN][DATA_BROKERS][config_entry.entry_id]
sensors = []
for device in broker.devices.values():
for capability in broker.get_assigned(device.device_id, "binary_sensor"):
attrib = CAPABILITY_TO_ATTRIB[capability]
sensors.append(SmartThingsBinarySensor(device, attrib))
async_add_entities(sensors)
def get_capabilities(capabilities: Sequence[str]) -> Optional[Sequence[str]]:
"""Return all capabilities supported if minimum required are present."""
return [
capability for capability in CAPABILITY_TO_ATTRIB if capability in capabilities
]
class SmartThingsBinarySensor(SmartThingsEntity, BinarySensorEntity):
"""Define a SmartThings Binary Sensor."""
def __init__(self, device, attribute):
"""Init the class."""
super().__init__(device)
self._attribute = attribute
@property
def name(self) -> str:
"""Return the name of the binary sensor."""
return f"{self._device.label} {self._attribute}"
@property
def unique_id(self) -> str:
"""Return a unique ID."""
return f"{self._device.device_id}.{self._attribute}"
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return self._device.status.is_on(self._attribute)
@property
def device_class(self):
"""Return the class of this device."""
return ATTRIB_TO_CLASS[self._attribute]
|
import os
from psdash.run import PsDashRunner
from psdash.node import LocalNode
import gevent
import socket
import unittest2
import tempfile
import time
class TestRunner(unittest2.TestCase):
def test_args_log(self):
_, filename = tempfile.mkstemp()
r = PsDashRunner(args=['-l', filename])
self.assertEqual(r.app.config['PSDASH_LOGS'][0], filename)
def test_args_bind(self):
r = PsDashRunner(args=['-b', '10.0.0.1'])
self.assertEqual(r.app.config['PSDASH_BIND_HOST'], '10.0.0.1')
def test_args_port(self):
r = PsDashRunner(args=['-p', '5555'])
self.assertEqual(r.app.config['PSDASH_PORT'], 5555)
def test_args_debug(self):
r = PsDashRunner(args=['-d'])
self.assertTrue(r.app.debug)
def test_default_args_dont_override_config(self):
_, filename = tempfile.mkstemp()
with open(filename, "w") as f:
f.write("PSDASH_LOGS = ['/var/log/boot.log', '/var/log/dmesg']\n")
f.flush()
os.environ['PSDASH_CONFIG'] = filename
r = PsDashRunner()
self.assertEquals(r.app.config['PSDASH_LOGS'], ['/var/log/boot.log', '/var/log/dmesg'])
del os.environ['PSDASH_CONFIG']
def test_reload_logs(self):
_, filename = tempfile.mkstemp()
r = PsDashRunner(args=['-l', filename])
pre_count = len(r.get_local_node().logs.available)
r.get_local_node().logs.add_patterns(r.app.config['PSDASH_LOGS'])
post_count = len(r.get_local_node().logs.available)
self.assertEqual(pre_count, post_count)
def test_update_net_io_counters(self):
r = PsDashRunner()
socket.getaddrinfo('example.org', 80)
counters = r.get_local_node().net_io_counters.update()
for c in counters.itervalues():
if c['rx_per_sec'] > 0 and c['tx_per_sec'] > 0:
break
else:
self.fail("Didn't find any changed network interface")
def test_local_node_is_added(self):
r = PsDashRunner()
self.assertIsInstance(r.get_local_node(), LocalNode)
def test_register_node_creates_proper_node_dict(self):
r = PsDashRunner()
now = int(time.time())
node = r.register_node('examplehost', 'example.org', 5000)
self.assertEqual(node.host, 'example.org')
self.assertEqual(node.port, 5000)
self.assertEqual(node.last_registered, now)
def test_reregister_node(self):
r = PsDashRunner()
now = int(time.time())
r.register_node('examplehost', 'example.org', 5000)
node = r.register_node('examplehost', 'example.org', 5000)
self.assertEqual(node.host, 'example.org')
self.assertEqual(node.port, 5000)
self.assertEqual(node.last_registered, now)
def test_get_all_nodes(self):
r = PsDashRunner()
r.register_node('examplehost', 'example.org', 5000)
self.assertEqual(len(r.get_nodes()), 2) # local + registered
def test_nodes_from_config(self):
config = {
'PSDASH_NODES': [
{
'name': 'test-node',
'host': 'remotehost.org',
'port': 5000
}
]
}
r = PsDashRunner(config)
self.assertEqual(len(r.get_nodes()), 2)
self.assertIn('remotehost.org:5000', r.get_nodes())
self.assertEqual(r.get_nodes()['remotehost.org:5000'].name, 'test-node')
self.assertEqual(r.get_nodes()['remotehost.org:5000'].host, 'remotehost.org')
self.assertEqual(r.get_nodes()['remotehost.org:5000'].port, 5000)
def test_register_agent(self):
jobs = []
agent_options = {
'PSDASH_AGENT': True,
'PSDASH_PORT': 5001,
'PSDASH_REGISTER_TO': 'http://localhost:5000',
'PSDASH_REGISTER_AS': 'the_agent'
}
r = PsDashRunner()
agent = PsDashRunner(agent_options)
jobs.append(gevent.spawn(r.run))
gevent.sleep(0.3)
jobs.append(gevent.spawn(agent.run))
gevent.sleep(0.3)
self.assertIn('127.0.0.1:5001', r.get_nodes())
self.assertEquals(r.get_node('127.0.0.1:5001').name, 'the_agent')
self.assertEquals(r.get_node('127.0.0.1:5001').port, 5001)
r.server.close()
agent.server.close()
gevent.killall(jobs)
def test_register_agent_without_name_defaults_to_hostname(self):
agent_options = {
'PSDASH_AGENT': True,
'PSDASH_PORT': 5001,
'PSDASH_REGISTER_TO': 'http://localhost:5000'
}
r = PsDashRunner()
agent = PsDashRunner(agent_options)
jobs = []
jobs.append(gevent.spawn(r.run))
gevent.sleep(0.3)
jobs.append(gevent.spawn(agent.run))
gevent.sleep(0.3)
self.assertIn('127.0.0.1:5001', r.get_nodes())
self.assertEquals(r.get_node('127.0.0.1:5001').name, socket.gethostname())
self.assertEquals(r.get_node('127.0.0.1:5001').port, 5001)
r.server.close()
agent.server.close()
gevent.killall(jobs)
def test_register_agent_to_auth_protected_host(self):
r = PsDashRunner({
'PSDASH_AUTH_USERNAME': 'user',
'PSDASH_AUTH_PASSWORD': 'pass'
})
agent = PsDashRunner({
'PSDASH_AGENT': True,
'PSDASH_PORT': 5001,
'PSDASH_REGISTER_TO': 'http://localhost:5000',
'PSDASH_AUTH_USERNAME': 'user',
'PSDASH_AUTH_PASSWORD': 'pass'
})
jobs = []
jobs.append(gevent.spawn(r.run))
gevent.sleep(0.3)
jobs.append(gevent.spawn(agent.run))
gevent.sleep(0.3)
self.assertIn('127.0.0.1:5001', r.get_nodes())
self.assertEquals(r.get_node('127.0.0.1:5001').name, socket.gethostname())
self.assertEquals(r.get_node('127.0.0.1:5001').port, 5001)
r.server.close()
agent.server.close()
gevent.killall(jobs)
|
import pypck
from homeassistant.components.climate import ClimateEntity, const
from homeassistant.const import ATTR_TEMPERATURE, CONF_ADDRESS, CONF_UNIT_OF_MEASUREMENT
from . import LcnDevice
from .const import (
CONF_CONNECTIONS,
CONF_LOCKABLE,
CONF_MAX_TEMP,
CONF_MIN_TEMP,
CONF_SETPOINT,
CONF_SOURCE,
DATA_LCN,
)
from .helpers import get_connection
async def async_setup_platform(
hass, hass_config, async_add_entities, discovery_info=None
):
"""Set up the LCN climate platform."""
if discovery_info is None:
return
devices = []
for config in discovery_info:
address, connection_id = config[CONF_ADDRESS]
addr = pypck.lcn_addr.LcnAddr(*address)
connections = hass.data[DATA_LCN][CONF_CONNECTIONS]
connection = get_connection(connections, connection_id)
address_connection = connection.get_address_conn(addr)
devices.append(LcnClimate(config, address_connection))
async_add_entities(devices)
class LcnClimate(LcnDevice, ClimateEntity):
"""Representation of a LCN climate device."""
def __init__(self, config, address_connection):
"""Initialize of a LCN climate device."""
super().__init__(config, address_connection)
self.variable = pypck.lcn_defs.Var[config[CONF_SOURCE]]
self.setpoint = pypck.lcn_defs.Var[config[CONF_SETPOINT]]
self.unit = pypck.lcn_defs.VarUnit.parse(config[CONF_UNIT_OF_MEASUREMENT])
self.regulator_id = pypck.lcn_defs.Var.to_set_point_id(self.setpoint)
self.is_lockable = config[CONF_LOCKABLE]
self._max_temp = config[CONF_MAX_TEMP]
self._min_temp = config[CONF_MIN_TEMP]
self._current_temperature = None
self._target_temperature = None
self._is_on = None
async def async_added_to_hass(self):
"""Run when entity about to be added to hass."""
await super().async_added_to_hass()
await self.address_connection.activate_status_request_handler(self.variable)
await self.address_connection.activate_status_request_handler(self.setpoint)
@property
def supported_features(self):
"""Return the list of supported features."""
return const.SUPPORT_TARGET_TEMPERATURE
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return self.unit.value
@property
def current_temperature(self):
"""Return the current temperature."""
return self._current_temperature
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self._target_temperature
@property
def hvac_mode(self):
"""Return hvac operation ie. heat, cool mode.
Need to be one of HVAC_MODE_*.
"""
if self._is_on:
return const.HVAC_MODE_HEAT
return const.HVAC_MODE_OFF
@property
def hvac_modes(self):
"""Return the list of available hvac operation modes.
Need to be a subset of HVAC_MODES.
"""
modes = [const.HVAC_MODE_HEAT]
if self.is_lockable:
modes.append(const.HVAC_MODE_OFF)
return modes
@property
def max_temp(self):
"""Return the maximum temperature."""
return self._max_temp
@property
def min_temp(self):
"""Return the minimum temperature."""
return self._min_temp
async def async_set_hvac_mode(self, hvac_mode):
"""Set new target hvac mode."""
if hvac_mode == const.HVAC_MODE_HEAT:
self._is_on = True
self.address_connection.lock_regulator(self.regulator_id, False)
elif hvac_mode == const.HVAC_MODE_OFF:
self._is_on = False
self.address_connection.lock_regulator(self.regulator_id, True)
self._target_temperature = None
self.async_write_ha_state()
async def async_set_temperature(self, **kwargs):
"""Set new target temperature."""
temperature = kwargs.get(ATTR_TEMPERATURE)
if temperature is None:
return
self._target_temperature = temperature
self.address_connection.var_abs(
self.setpoint, self._target_temperature, self.unit
)
self.async_write_ha_state()
def input_received(self, input_obj):
"""Set temperature value when LCN input object is received."""
if not isinstance(input_obj, pypck.inputs.ModStatusVar):
return
if input_obj.get_var() == self.variable:
self._current_temperature = input_obj.get_value().to_var_unit(self.unit)
elif input_obj.get_var() == self.setpoint:
self._is_on = not input_obj.get_value().is_locked_regulator()
if self._is_on:
self._target_temperature = input_obj.get_value().to_var_unit(self.unit)
self.async_write_ha_state()
|
import socket
import voluptuous as vol
from homeassistant.components.media_player.const import DOMAIN as MP_DOMAIN
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT
import homeassistant.helpers.config_validation as cv
from .const import CONF_ON_ACTION, DEFAULT_NAME, DOMAIN
def ensure_unique_hosts(value):
"""Validate that all configs have a unique host."""
vol.Schema(vol.Unique("duplicate host entries found"))(
[socket.gethostbyname(entry[CONF_HOST]) for entry in value]
)
return value
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.All(
cv.ensure_list,
[
cv.deprecated(CONF_PORT),
vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PORT): cv.port,
vol.Optional(CONF_ON_ACTION): cv.SCRIPT_SCHEMA,
}
),
],
ensure_unique_hosts,
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Set up the Samsung TV integration."""
if DOMAIN in config:
hass.data[DOMAIN] = {}
for entry_config in config[DOMAIN]:
ip_address = await hass.async_add_executor_job(
socket.gethostbyname, entry_config[CONF_HOST]
)
hass.data[DOMAIN][ip_address] = {
CONF_ON_ACTION: entry_config.get(CONF_ON_ACTION)
}
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": "import"}, data=entry_config
)
)
return True
async def async_setup_entry(hass, entry):
"""Set up the Samsung TV platform."""
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, MP_DOMAIN)
)
return True
|
import os
import re
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('foo')
def test_hostname(host):
assert re.search(r'instance-[12]', host.check_output('hostname -s'))
def test_etc_molecule_directory(host):
f = host.file('/etc/molecule')
assert f.is_directory
assert f.user == 'root'
assert f.group == 'root'
assert f.mode == 0o755
def test_etc_molecule_ansible_hostname_file(host):
filename = '/etc/molecule/{}'.format(host.check_output('hostname -s'))
f = host.file(filename)
assert f.is_file
assert f.user == 'root'
assert f.group == 'root'
assert f.mode == 0o644
|
import signal
from behave import then
from behave import when
from paasta_tools.utils import _run
@when("we run a trivial command with timeout {timeout} seconds")
def run_command(context, timeout):
fake_cmd = "sleep 1"
context.rc, context.output = _run(fake_cmd, timeout=float(timeout))
@then("the command is killed with signal {killsignal}")
def check_exit_code(context, killsignal):
assert context.rc == -1 * getattr(signal, killsignal)
|
import urllib2
import base64
import re
from diamond.collector import str_to_bool
try:
import json
except ImportError:
import simplejson as json
import diamond.collector
RE_LOGSTASH_INDEX = re.compile('^(.*)-\d{4}(\.\d{2}){2,3}$')
class ElasticSearchCollector(diamond.collector.Collector):
def process_config(self):
super(ElasticSearchCollector, self).process_config()
instance_list = self.config['instances']
if isinstance(instance_list, basestring):
instance_list = [instance_list]
if len(instance_list) == 0:
host = self.config['host']
port = self.config['port']
# use empty alias to identify single-instance config
# omitting the use of the alias in the metrics path
instance_list.append('@%s:%s' % (host, port))
self.instances = {}
for instance in instance_list:
if '@' in instance:
(alias, hostport) = instance.split('@', 1)
else:
alias = 'default'
hostport = instance
if ':' in hostport:
host, port = hostport.split(':', 1)
else:
host = hostport
port = 9200
self.instances[alias] = (host, int(port))
def get_default_config_help(self):
config_help = super(ElasticSearchCollector,
self).get_default_config_help()
config_help.update({
'host': "",
'port': "",
'user': "Username for Basic/Shield auth",
'password': "Password for Basic/Shield auth",
'instances': "List of instances. When set this overrides "
"the 'host' and 'port' settings. Instance format: "
"instance [<alias>@]<hostname>[:<port>]",
'scheme': "http (default) or https",
'cluster': "cluster/node/shard health",
'stats':
"Available stats:\n" +
" - jvm (JVM information)\n" +
" - thread_pool (Thread pool information)\n" +
" - indices (Individual index stats)\n",
'logstash_mode':
"If 'indices' stats are gathered, remove " +
"the YYYY.MM.DD suffix from the index name " +
"(e.g. logstash-adm-syslog-2014.01.03) and use that " +
"as a bucket for all 'day' index stats.",
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(ElasticSearchCollector, self).get_default_config()
config.update({
'host': '127.0.0.1',
'port': 9200,
'user': '',
'password': '',
'instances': [],
'scheme': 'http',
'path': 'elasticsearch',
'stats': ['jvm', 'thread_pool', 'indices'],
'logstash_mode': False,
'cluster': False,
})
return config
def _get(self, scheme, host, port, path, assert_key=None):
"""
Execute a ES API call. Convert response into JSON and
optionally assert its structure.
"""
url = '%s://%s:%i/%s' % (scheme, host, port, path)
try:
request = urllib2.Request(url)
if self.config['user'] and self.config['password']:
base64string = base64.standard_b64encode(
'%s:%s' % (self.config['user'], self.config['password']))
request.add_header("Authorization", "Basic %s" % base64string)
response = urllib2.urlopen(request)
except Exception as err:
self.log.error("%s: %s" % (url, err))
return False
try:
doc = json.load(response)
except (TypeError, ValueError):
self.log.error("Unable to parse response from elasticsearch as a" +
" json object")
return False
if assert_key and assert_key not in doc:
self.log.error("Bad response from elasticsearch, expected key "
"'%s' was missing for %s" % (assert_key, url))
return False
return doc
def _copy_one_level(self, metrics, prefix, data, filter=lambda key: True):
for key, value in data.iteritems():
if filter(key):
metric_path = '%s.%s' % (prefix, key)
self._set_or_sum_metric(metrics, metric_path, value)
def _copy_two_level(self, metrics, prefix, data, filter=lambda key: True):
for key1, d1 in data.iteritems():
self._copy_one_level(metrics, '%s.%s' % (prefix, key1), d1, filter)
def _index_metrics(self, metrics, prefix, index):
if self.config['logstash_mode']:
"""Remove the YYYY.MM.DD bit from logstash indices.
This way we keep using the same metric naming and not polute
our metrics system (e.g. Graphite) with new metrics every day."""
m = RE_LOGSTASH_INDEX.match(prefix)
if m:
prefix = m.group(1)
# keep a telly of the number of indexes
self._set_or_sum_metric(metrics,
'%s.indexes_in_group' % prefix, 1)
self._add_metric(metrics, '%s.docs.count' % prefix, index,
['docs', 'count'])
self._add_metric(metrics, '%s.docs.deleted' % prefix, index,
['docs', 'deleted'])
self._add_metric(metrics, '%s.datastore.size' % prefix, index,
['store', 'size_in_bytes'])
# publish all 'total' and 'time_in_millis' stats
self._copy_two_level(
metrics, prefix, index,
lambda key: key.endswith('total') or key.endswith('time_in_millis') or key.endswith('in_bytes')) # noqa
def _add_metric(self, metrics, metric_path, data, data_path):
"""If the path specified by data_path (a list) exists in data,
add to metrics. Use when the data path may not be present"""
current_item = data
for path_element in data_path:
current_item = current_item.get(path_element)
if current_item is None:
return
self._set_or_sum_metric(metrics, metric_path, current_item)
def _set_or_sum_metric(self, metrics, metric_path, value):
"""If we already have a datapoint for this metric, lets add
the value. This is used when the logstash mode is enabled."""
if metric_path in metrics:
metrics[metric_path] += value
else:
metrics[metric_path] = value
def collect_instance_cluster_stats(self, scheme, host, port, metrics):
result = self._get(scheme, host, port, '_cluster/health')
if not result:
return
self._add_metric(metrics, 'cluster_health.nodes.total',
result, ['number_of_nodes'])
self._add_metric(metrics, 'cluster_health.nodes.data',
result, ['number_of_data_nodes'])
self._add_metric(metrics, 'cluster_health.nodes.pending_tasks',
result, ['number_of_pending_tasks'])
self._add_metric(metrics, 'cluster_health.shards.active_primary',
result, ['active_primary_shards'])
self._add_metric(metrics, 'cluster_health.shards.active',
result, ['active_shards'])
self._add_metric(metrics, 'cluster_health.shards.active_percent',
result, ['active_shards_percent_as_number'])
self._add_metric(metrics, 'cluster_health.shards.delayed_unassigned',
result, ['delayed_unassigned_shards'])
self._add_metric(metrics, 'cluster_health.shards.relocating',
result, ['relocating_shards'])
self._add_metric(metrics, 'cluster_health.shards.unassigned',
result, ['unassigned_shards'])
self._add_metric(metrics, 'cluster_health.shards.initializing',
result, ['initializing_shards'])
CLUSTER_STATUS = {
'green': 2,
'yellow': 1,
'red': 0
}
metrics['cluster_health.status'] = CLUSTER_STATUS[result['status']]
def collect_instance_index_stats(self, scheme, host, port, metrics):
result = self._get(scheme, host, port, '_stats', '_all')
if not result:
return
_all = result['_all']
self._index_metrics(metrics, 'indices._all', _all['primaries'])
if 'indices' in _all:
indices = _all['indices']
elif 'indices' in result: # elasticsearch >= 0.90RC2
indices = result['indices']
else:
return
for name, index in indices.iteritems():
self._index_metrics(metrics, 'indices.%s' % name,
index['primaries'])
def collect_instance(self, alias, scheme, host, port):
result = self._get(scheme, host, port, '_nodes/_local/stats', 'nodes')
if not result:
return
metrics = {}
node = result['nodes'].keys()[0]
data = result['nodes'][node]
#
# http connections to ES
metrics['http.current'] = data['http']['current_open']
#
# indices
indices = data['indices']
metrics['indices.docs.count'] = indices['docs']['count']
metrics['indices.docs.deleted'] = indices['docs']['deleted']
metrics['indices.datastore.size'] = indices['store']['size_in_bytes']
transport = data['transport']
metrics['transport.rx.count'] = transport['rx_count']
metrics['transport.rx.size'] = transport['rx_size_in_bytes']
metrics['transport.tx.count'] = transport['tx_count']
metrics['transport.tx.size'] = transport['tx_size_in_bytes']
# elasticsearch < 0.90RC2
if 'cache' in indices:
cache = indices['cache']
self._add_metric(metrics, 'cache.bloom.size', cache,
['bloom_size_in_bytes'])
self._add_metric(metrics, 'cache.field.evictions', cache,
['field_evictions'])
self._add_metric(metrics, 'cache.field.size', cache,
['field_size_in_bytes'])
metrics['cache.filter.count'] = cache['filter_count']
metrics['cache.filter.evictions'] = cache['filter_evictions']
metrics['cache.filter.size'] = cache['filter_size_in_bytes']
self._add_metric(metrics, 'cache.id.size', cache,
['id_cache_size_in_bytes'])
# elasticsearch >= 0.90RC2
if 'filter_cache' in indices:
cache = indices['filter_cache']
metrics['cache.filter.evictions'] = cache['evictions']
metrics['cache.filter.size'] = cache['memory_size_in_bytes']
self._add_metric(metrics, 'cache.filter.count', cache, ['count'])
# elasticsearch >= 0.90RC2
if 'id_cache' in indices:
cache = indices['id_cache']
self._add_metric(metrics, 'cache.id.size', cache,
['memory_size_in_bytes'])
if 'query_cache' in indices:
cache = indices['query_cache']
metrics['cache.query.evictions'] = cache['evictions']
metrics['cache.query.size'] = cache['memory_size_in_bytes']
self._add_metric(metrics, 'cache.query.hit_count', cache,
['hit_count'])
self._add_metric(metrics, 'cache.query.miss_count', cache,
['miss_count'])
# elasticsearch >= 0.90
if 'fielddata' in indices:
fielddata = indices['fielddata']
self._add_metric(metrics, 'fielddata.size', fielddata,
['memory_size_in_bytes'])
self._add_metric(metrics, 'fielddata.evictions', fielddata,
['evictions'])
if 'segments' in indices:
segments = indices['segments']
self._add_metric(metrics, 'segments.count', segments, ['count'])
self._add_metric(metrics, 'segments.mem.size', segments,
['memory_in_bytes'])
self._add_metric(metrics, 'segments.index_writer.mem.size',
segments, ['index_writer_memory_in_bytes'])
self._add_metric(metrics, 'segments.index_writer.mem.max_size',
segments, ['index_writer_max_memory_in_bytes'])
self._add_metric(metrics, 'segments.version_map.mem.size',
segments, ['version_map_memory_in_bytes'])
self._add_metric(metrics, 'segments.fixed_bit_set.mem.size',
segments, ['fixed_bit_set_memory_in_bytes'])
#
# process mem/cpu (may not be present, depending on access
# restrictions)
self._add_metric(metrics, 'process.cpu.percent', data,
['process', 'cpu', 'percent'])
self._add_metric(metrics, 'process.mem.resident', data,
['process', 'mem', 'resident_in_bytes'])
self._add_metric(metrics, 'process.mem.share', data,
['process', 'mem', 'share_in_bytes'])
self._add_metric(metrics, 'process.mem.virtual', data,
['process', 'mem', 'total_virtual_in_bytes'])
#
# filesystem (may not be present, depending on access restrictions)
if 'fs' in data and 'data' in data['fs'] and data['fs']['data']:
fs_data = data['fs']['data'][0]
self._add_metric(metrics, 'disk.reads.count', fs_data,
['disk_reads'])
self._add_metric(metrics, 'disk.reads.size', fs_data,
['disk_read_size_in_bytes'])
self._add_metric(metrics, 'disk.writes.count', fs_data,
['disk_writes'])
self._add_metric(metrics, 'disk.writes.size', fs_data,
['disk_write_size_in_bytes'])
#
# jvm
if 'jvm' in self.config['stats']:
jvm = data['jvm']
mem = jvm['mem']
for k in ('heap_used', 'heap_committed', 'non_heap_used',
'non_heap_committed'):
metrics['jvm.mem.%s' % k] = mem['%s_in_bytes' % k]
if 'heap_used_percent' in mem:
metrics['jvm.mem.heap_used_percent'] = mem['heap_used_percent']
for pool, d in mem['pools'].iteritems():
pool = pool.replace(' ', '_')
metrics['jvm.mem.pools.%s.used' % pool] = d['used_in_bytes']
metrics['jvm.mem.pools.%s.max' % pool] = d['max_in_bytes']
metrics['jvm.threads.count'] = jvm['threads']['count']
gc = jvm['gc']
collection_count = 0
collection_time_in_millis = 0
for collector, d in gc['collectors'].iteritems():
metrics['jvm.gc.collection.%s.count' % collector] = d[
'collection_count']
collection_count += d['collection_count']
metrics['jvm.gc.collection.%s.time' % collector] = d[
'collection_time_in_millis']
collection_time_in_millis += d['collection_time_in_millis']
# calculate the totals, as they're absent in elasticsearch >
# 0.90.10
if 'collection_count' in gc:
metrics['jvm.gc.collection.count'] = gc['collection_count']
else:
metrics['jvm.gc.collection.count'] = collection_count
k = 'collection_time_in_millis'
if k in gc:
metrics['jvm.gc.collection.time'] = gc[k]
else:
metrics['jvm.gc.collection.time'] = collection_time_in_millis
#
# thread_pool
if 'thread_pool' in self.config['stats']:
self._copy_two_level(metrics, 'thread_pool', data['thread_pool'])
#
# network
if 'network' in data:
self._copy_two_level(metrics, 'network', data['network'])
#
# cluster (optional)
if str_to_bool(self.config['cluster']):
self.collect_instance_cluster_stats(scheme, host, port, metrics)
#
# indices (optional)
if 'indices' in self.config['stats']:
self.collect_instance_index_stats(scheme, host, port, metrics)
#
# all done, now publishing all metrics
for key in metrics:
full_key = key
if alias != '':
full_key = '%s.%s' % (alias, full_key)
self.publish(full_key, metrics[key])
def collect(self):
if json is None:
self.log.error('Unable to import json')
return {}
scheme = self.config['scheme']
for alias in sorted(self.instances):
(host, port) = self.instances[alias]
self.collect_instance(alias, scheme, host, port)
|
import numpy as np
def _find_outliers(X, threshold=3.0, max_iter=2, tail=0):
"""Find outliers based on iterated Z-scoring.
This procedure compares the absolute z-score against the threshold.
After excluding local outliers, the comparison is repeated until no
local outlier is present any more.
Parameters
----------
X : np.ndarray of float, shape (n_elemenets,)
The scores for which to find outliers.
threshold : float
The value above which a feature is classified as outlier.
max_iter : int
The maximum number of iterations.
tail : {0, 1, -1}
Whether to search for outliers on both extremes of the z-scores (0),
or on just the positive (1) or negative (-1) side.
Returns
-------
bad_idx : np.ndarray of int, shape (n_features)
The outlier indices.
"""
from scipy.stats import zscore
my_mask = np.zeros(len(X), dtype=bool)
for _ in range(max_iter):
X = np.ma.masked_array(X, my_mask)
if tail == 0:
this_z = np.abs(zscore(X))
elif tail == 1:
this_z = zscore(X)
elif tail == -1:
this_z = -zscore(X)
else:
raise ValueError("Tail parameter %s not recognised." % tail)
local_bad = this_z > threshold
my_mask = np.max([my_mask, local_bad], 0)
if not np.any(local_bad):
break
bad_idx = np.where(my_mask)[0]
return bad_idx
|
import hashlib
import os
import sys
import urllib.request
from optparse import OptionParser
NAME = 'download_checkmd5.py'
def main():
parser = OptionParser(usage='usage: %prog URI dest [md5sum]', prog=NAME)
options, args = parser.parse_args()
md5sum = None
if len(args) == 2:
uri, dest = args
elif len(args) == 3:
uri, dest, md5sum = args
else:
parser.error('wrong number of arguments')
# Create intermediate directories as necessary, #2970
d = os.path.dirname(dest)
if len(d) and not os.path.exists(d):
os.makedirs(d)
fresh = False
if not os.path.exists(dest):
sys.stdout.write('[rosbuild] Downloading %s to %s...' % (uri, dest))
sys.stdout.flush()
urllib.request.urlretrieve(uri, dest)
sys.stdout.write('Done\n')
fresh = True
if md5sum:
m = hashlib.md5(open(dest, "rb").read())
d = m.hexdigest()
print('[rosbuild] Checking md5sum on %s' % (dest))
if d != md5sum:
if not fresh:
print('[rosbuild] WARNING: md5sum mismatch (%s != %s); re-downloading file %s' % (d, md5sum, dest))
os.remove(dest)
# Try one more time
urllib.request.urlretrieve(uri, dest)
m = hashlib.md5(open(dest).read())
d = m.hexdigest()
if d != md5sum:
print('[rosbuild] ERROR: md5sum mismatch (%s != %s) on %s; aborting' % (d, md5sum, dest))
return 1
return 0
if __name__ == '__main__':
sys.exit(main())
|
from typing import Any, Dict
from aiohttp import payload, web
def aiohttp_serialize_response(response: web.Response) -> Dict[str, Any]:
"""Serialize an aiohttp response to a dictionary."""
body = response.body
if body is None:
pass
elif isinstance(body, payload.StringPayload):
# pylint: disable=protected-access
body = body._value.decode(body.encoding)
elif isinstance(body, bytes):
body = body.decode(response.charset or "utf-8")
else:
raise ValueError("Unknown payload encoding")
return {"status": response.status, "body": body, "headers": dict(response.headers)}
|
import datetime
import decimal
import json as stdjson
import uuid
try:
from django.utils.functional import Promise as DjangoPromise
except ImportError: # pragma: no cover
class DjangoPromise: # noqa
"""Dummy object."""
try:
import simplejson as json
from simplejson.decoder import JSONDecodeError as _DecodeError
_json_extra_kwargs = {'use_decimal': False}
except ImportError: # pragma: no cover
import json # noqa
_json_extra_kwargs = {} # noqa
class _DecodeError(Exception): # noqa
pass
_encoder_cls = type(json._default_encoder)
_default_encoder = None # ... set to JSONEncoder below.
class JSONEncoder(_encoder_cls):
"""Kombu custom json encoder."""
def default(self, o,
dates=(datetime.datetime, datetime.date),
times=(datetime.time,),
textual=(decimal.Decimal, uuid.UUID, DjangoPromise),
isinstance=isinstance,
datetime=datetime.datetime,
text_t=str):
reducer = getattr(o, '__json__', None)
if reducer is not None:
return reducer()
else:
if isinstance(o, dates):
if not isinstance(o, datetime):
o = datetime(o.year, o.month, o.day, 0, 0, 0, 0)
r = o.isoformat()
if r.endswith("+00:00"):
r = r[:-6] + "Z"
return r
elif isinstance(o, times):
return o.isoformat()
elif isinstance(o, textual):
return text_t(o)
return super().default(o)
_default_encoder = JSONEncoder
def dumps(s, _dumps=json.dumps, cls=None, default_kwargs=None, **kwargs):
"""Serialize object to json string."""
if not default_kwargs:
default_kwargs = _json_extra_kwargs
return _dumps(s, cls=cls or _default_encoder,
**dict(default_kwargs, **kwargs))
def loads(s, _loads=json.loads, decode_bytes=True):
"""Deserialize json from string."""
# None of the json implementations supports decoding from
# a buffer/memoryview, or even reading from a stream
# (load is just loads(fp.read()))
# but this is Python, we love copying strings, preferably many times
# over. Note that pickle does support buffer/memoryview
# </rant>
if isinstance(s, memoryview):
s = s.tobytes().decode('utf-8')
elif isinstance(s, bytearray):
s = s.decode('utf-8')
elif decode_bytes and isinstance(s, bytes):
s = s.decode('utf-8')
try:
return _loads(s)
except _DecodeError:
# catch "Unpaired high surrogate" error
return stdjson.loads(s)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import cPickle
import os
import sys
import tarfile
import numpy as np
from six.moves import urllib
import tensorflow as tf
from datasets import dataset_utils
# The URL where the CIFAR data can be downloaded.
_DATA_URL = 'https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz'
# The number of training files.
_NUM_TRAIN_FILES = 5
# The height and width of each image.
_IMAGE_SIZE = 32
# The names of the classes.
_CLASS_NAMES = [
'airplane',
'automobile',
'bird',
'cat',
'deer',
'dog',
'frog',
'horse',
'ship',
'truck',
]
def _add_to_tfrecord(filename, tfrecord_writer, offset=0):
"""Loads data from the cifar10 pickle files and writes files to a TFRecord.
Args:
filename: The filename of the cifar10 pickle file.
tfrecord_writer: The TFRecord writer to use for writing.
offset: An offset into the absolute number of images previously written.
Returns:
The new offset.
"""
with tf.gfile.Open(filename, 'r') as f:
data = cPickle.load(f)
images = data['data']
num_images = images.shape[0]
images = images.reshape((num_images, 3, 32, 32))
labels = data['labels']
with tf.Graph().as_default():
image_placeholder = tf.placeholder(dtype=tf.uint8)
encoded_image = tf.image.encode_png(image_placeholder)
with tf.Session('') as sess:
for j in range(num_images):
sys.stdout.write('\r>> Reading file [%s] image %d/%d' % (
filename, offset + j + 1, offset + num_images))
sys.stdout.flush()
image = np.squeeze(images[j]).transpose((1, 2, 0))
label = labels[j]
png_string = sess.run(encoded_image,
feed_dict={image_placeholder: image})
example = dataset_utils.image_to_tfexample(
png_string, 'png', _IMAGE_SIZE, _IMAGE_SIZE, label)
tfrecord_writer.write(example.SerializeToString())
return offset + num_images
def _get_output_filename(dataset_dir, split_name):
"""Creates the output filename.
Args:
dataset_dir: The dataset directory where the dataset is stored.
split_name: The name of the train/test split.
Returns:
An absolute file path.
"""
return '%s/cifar10_%s.tfrecord' % (dataset_dir, split_name)
def _download_and_uncompress_dataset(dataset_dir):
"""Downloads cifar10 and uncompresses it locally.
Args:
dataset_dir: The directory where the temporary files are stored.
"""
filename = _DATA_URL.split('/')[-1]
filepath = os.path.join(dataset_dir, filename)
if not os.path.exists(filepath):
def _progress(count, block_size, total_size):
sys.stdout.write('\r>> Downloading %s %.1f%%' % (
filename, float(count * block_size) / float(total_size) * 100.0))
sys.stdout.flush()
filepath, _ = urllib.request.urlretrieve(_DATA_URL, filepath, _progress)
print()
statinfo = os.stat(filepath)
print('Successfully downloaded', filename, statinfo.st_size, 'bytes.')
tarfile.open(filepath, 'r:gz').extractall(dataset_dir)
def _clean_up_temporary_files(dataset_dir):
"""Removes temporary files used to create the dataset.
Args:
dataset_dir: The directory where the temporary files are stored.
"""
filename = _DATA_URL.split('/')[-1]
filepath = os.path.join(dataset_dir, filename)
tf.gfile.Remove(filepath)
tmp_dir = os.path.join(dataset_dir, 'cifar-10-batches-py')
tf.gfile.DeleteRecursively(tmp_dir)
def run(dataset_dir):
"""Runs the download and conversion operation.
Args:
dataset_dir: The dataset directory where the dataset is stored.
"""
if not tf.gfile.Exists(dataset_dir):
tf.gfile.MakeDirs(dataset_dir)
training_filename = _get_output_filename(dataset_dir, 'train')
testing_filename = _get_output_filename(dataset_dir, 'test')
if tf.gfile.Exists(training_filename) and tf.gfile.Exists(testing_filename):
print('Dataset files already exist. Exiting without re-creating them.')
return
dataset_utils.download_and_uncompress_tarball(_DATA_URL, dataset_dir)
# First, process the training data:
with tf.python_io.TFRecordWriter(training_filename) as tfrecord_writer:
offset = 0
for i in range(_NUM_TRAIN_FILES):
filename = os.path.join(dataset_dir,
'cifar-10-batches-py',
'data_batch_%d' % (i + 1)) # 1-indexed.
offset = _add_to_tfrecord(filename, tfrecord_writer, offset)
# Next, process the testing data:
with tf.python_io.TFRecordWriter(testing_filename) as tfrecord_writer:
filename = os.path.join(dataset_dir,
'cifar-10-batches-py',
'test_batch')
_add_to_tfrecord(filename, tfrecord_writer)
# Finally, write the labels file:
labels_to_class_names = dict(zip(range(len(_CLASS_NAMES)), _CLASS_NAMES))
dataset_utils.write_label_file(labels_to_class_names, dataset_dir)
_clean_up_temporary_files(dataset_dir)
print('\nFinished converting the Cifar10 dataset!')
|
from homeassistant.components.cover import (
SUPPORT_CLOSE,
SUPPORT_CLOSE_TILT,
SUPPORT_OPEN,
SUPPORT_OPEN_TILT,
SUPPORT_SET_POSITION,
SUPPORT_SET_TILT_POSITION,
SUPPORT_STOP,
SUPPORT_STOP_TILT,
CoverEntity,
)
from tests.common import MockEntity
ENTITIES = {}
def init(empty=False):
"""Initialize the platform with entities."""
global ENTITIES
ENTITIES = (
[]
if empty
else [
MockCover(
name="Simple cover",
is_on=True,
unique_id="unique_cover",
supports_tilt=False,
),
MockCover(
name="Set position cover",
is_on=True,
unique_id="unique_set_pos_cover",
current_cover_position=50,
supports_tilt=False,
),
MockCover(
name="Set tilt position cover",
is_on=True,
unique_id="unique_set_pos_tilt_cover",
current_cover_tilt_position=50,
supports_tilt=True,
),
MockCover(
name="Tilt cover",
is_on=True,
unique_id="unique_tilt_cover",
supports_tilt=True,
),
]
)
async def async_setup_platform(
hass, config, async_add_entities_callback, discovery_info=None
):
"""Return mock entities."""
async_add_entities_callback(ENTITIES)
class MockCover(MockEntity, CoverEntity):
"""Mock Cover class."""
@property
def is_closed(self):
"""Return if the cover is closed or not."""
return False
@property
def current_cover_position(self):
"""Return current position of cover."""
return self._handle("current_cover_position")
@property
def current_cover_tilt_position(self):
"""Return current position of cover tilt."""
return self._handle("current_cover_tilt_position")
@property
def supported_features(self):
"""Flag supported features."""
supported_features = SUPPORT_OPEN | SUPPORT_CLOSE | SUPPORT_STOP
if self._handle("supports_tilt"):
supported_features |= (
SUPPORT_OPEN_TILT | SUPPORT_CLOSE_TILT | SUPPORT_STOP_TILT
)
if self.current_cover_position is not None:
supported_features |= SUPPORT_SET_POSITION
if self.current_cover_tilt_position is not None:
supported_features |= (
SUPPORT_OPEN_TILT
| SUPPORT_CLOSE_TILT
| SUPPORT_STOP_TILT
| SUPPORT_SET_TILT_POSITION
)
return supported_features
|
import logging
import numato_gpio as gpio
import voluptuous as vol
from homeassistant.const import (
CONF_BINARY_SENSORS,
CONF_ID,
CONF_NAME,
CONF_SENSORS,
CONF_SWITCHES,
EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STOP,
PERCENTAGE,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import load_platform
_LOGGER = logging.getLogger(__name__)
DOMAIN = "numato"
CONF_INVERT_LOGIC = "invert_logic"
CONF_DISCOVER = "discover"
CONF_DEVICES = "devices"
CONF_DEVICE_ID = "id"
CONF_PORTS = "ports"
CONF_SRC_RANGE = "source_range"
CONF_DST_RANGE = "destination_range"
CONF_DST_UNIT = "unit"
DEFAULT_INVERT_LOGIC = False
DEFAULT_SRC_RANGE = [0, 1024]
DEFAULT_DST_RANGE = [0.0, 100.0]
DEFAULT_DEV = [f"/dev/ttyACM{i}" for i in range(10)]
PORT_RANGE = range(1, 8) # ports 0-7 are ADC capable
DATA_PORTS_IN_USE = "ports_in_use"
DATA_API = "api"
def int_range(rng):
"""Validate the input array to describe a range by two integers."""
if not (isinstance(rng[0], int) and isinstance(rng[1], int)):
raise vol.Invalid(f"Only integers are allowed: {rng}")
if len(rng) != 2:
raise vol.Invalid(f"Only two numbers allowed in a range: {rng}")
if rng[0] > rng[1]:
raise vol.Invalid(f"Lower range bound must come first: {rng}")
return rng
def float_range(rng):
"""Validate the input array to describe a range by two floats."""
try:
coe = vol.Coerce(float)
coe(rng[0])
coe(rng[1])
except vol.CoerceInvalid as err:
raise vol.Invalid(f"Only int or float values are allowed: {rng}") from err
if len(rng) != 2:
raise vol.Invalid(f"Only two numbers allowed in a range: {rng}")
if rng[0] > rng[1]:
raise vol.Invalid(f"Lower range bound must come first: {rng}")
return rng
def adc_port_number(num):
"""Validate input number to be in the range of ADC enabled ports."""
try:
num = int(num)
except ValueError as err:
raise vol.Invalid(f"Port numbers must be integers: {num}") from err
if num not in range(1, 8):
raise vol.Invalid(f"Only port numbers from 1 to 7 are ADC capable: {num}")
return num
ADC_SCHEMA = vol.Schema(
{
vol.Required(CONF_NAME): cv.string,
vol.Optional(CONF_SRC_RANGE, default=DEFAULT_SRC_RANGE): int_range,
vol.Optional(CONF_DST_RANGE, default=DEFAULT_DST_RANGE): float_range,
vol.Optional(CONF_DST_UNIT, default=PERCENTAGE): cv.string,
}
)
PORTS_SCHEMA = vol.Schema({cv.positive_int: cv.string})
IO_PORTS_SCHEMA = vol.Schema(
{
vol.Required(CONF_PORTS): PORTS_SCHEMA,
vol.Optional(CONF_INVERT_LOGIC, default=DEFAULT_INVERT_LOGIC): cv.boolean,
}
)
DEVICE_SCHEMA = vol.Schema(
{
vol.Required(CONF_ID): cv.positive_int,
CONF_BINARY_SENSORS: IO_PORTS_SCHEMA,
CONF_SWITCHES: IO_PORTS_SCHEMA,
CONF_SENSORS: {CONF_PORTS: {adc_port_number: ADC_SCHEMA}},
}
)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: {
CONF_DEVICES: vol.All(cv.ensure_list, [DEVICE_SCHEMA]),
vol.Optional(CONF_DISCOVER, default=DEFAULT_DEV): vol.All(
cv.ensure_list, [cv.string]
),
},
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Initialize the numato integration.
Discovers available Numato devices and loads the binary_sensor, sensor and
switch platforms.
Returns False on error during device discovery (e.g. duplicate ID),
otherwise returns True.
No exceptions should occur, since the platforms are initialized on a best
effort basis, which means, errors are handled locally.
"""
hass.data[DOMAIN] = config[DOMAIN]
try:
gpio.discover(config[DOMAIN][CONF_DISCOVER])
except gpio.NumatoGpioError as err:
_LOGGER.info("Error discovering Numato devices: %s", err)
gpio.cleanup()
return False
_LOGGER.info(
"Initializing Numato 32 port USB GPIO expanders with IDs: %s",
", ".join(str(d) for d in gpio.devices),
)
hass.data[DOMAIN][DATA_API] = NumatoAPI()
def cleanup_gpio(event):
"""Stuff to do before stopping."""
_LOGGER.debug("Clean up Numato GPIO")
gpio.cleanup()
if DATA_API in hass.data[DOMAIN]:
hass.data[DOMAIN][DATA_API].ports_registered.clear()
def prepare_gpio(event):
"""Stuff to do when home assistant starts."""
_LOGGER.debug("Setup cleanup at stop for Numato GPIO")
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, cleanup_gpio)
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, prepare_gpio)
load_platform(hass, "binary_sensor", DOMAIN, {}, config)
load_platform(hass, "sensor", DOMAIN, {}, config)
load_platform(hass, "switch", DOMAIN, {}, config)
return True
# pylint: disable=no-self-use
class NumatoAPI:
"""Home-Assistant specific API for numato device access."""
def __init__(self):
"""Initialize API state."""
self.ports_registered = {}
def check_port_free(self, device_id, port, direction):
"""Check whether a port is still free set up.
Fail with exception if it has already been registered.
"""
if (device_id, port) not in self.ports_registered:
self.ports_registered[(device_id, port)] = direction
else:
raise gpio.NumatoGpioError(
"Device {} port {} already in use as {}.".format(
device_id,
port,
"input"
if self.ports_registered[(device_id, port)] == gpio.IN
else "output",
)
)
def check_device_id(self, device_id):
"""Check whether a device has been discovered.
Fail with exception.
"""
if device_id not in gpio.devices:
raise gpio.NumatoGpioError(f"Device {device_id} not available.")
def check_port(self, device_id, port, direction):
"""Raise an error if the port setup doesn't match the direction."""
self.check_device_id(device_id)
if (device_id, port) not in self.ports_registered:
raise gpio.NumatoGpioError(
f"Port {port} is not set up for numato device {device_id}."
)
msg = {
gpio.OUT: f"Trying to write to device {device_id} port {port} set up as input.",
gpio.IN: f"Trying to read from device {device_id} port {port} set up as output.",
}
if self.ports_registered[(device_id, port)] != direction:
raise gpio.NumatoGpioError(msg[direction])
def setup_output(self, device_id, port):
"""Set up a GPIO as output."""
self.check_device_id(device_id)
self.check_port_free(device_id, port, gpio.OUT)
gpio.devices[device_id].setup(port, gpio.OUT)
def setup_input(self, device_id, port):
"""Set up a GPIO as input."""
self.check_device_id(device_id)
gpio.devices[device_id].setup(port, gpio.IN)
self.check_port_free(device_id, port, gpio.IN)
def write_output(self, device_id, port, value):
"""Write a value to a GPIO."""
self.check_port(device_id, port, gpio.OUT)
gpio.devices[device_id].write(port, value)
def read_input(self, device_id, port):
"""Read a value from a GPIO."""
self.check_port(device_id, port, gpio.IN)
return gpio.devices[device_id].read(port)
def read_adc_input(self, device_id, port):
"""Read an ADC value from a GPIO ADC port."""
self.check_port(device_id, port, gpio.IN)
self.check_device_id(device_id)
return gpio.devices[device_id].adc_read(port)
def edge_detect(self, device_id, port, event_callback):
"""Add detection for RISING and FALLING events."""
self.check_port(device_id, port, gpio.IN)
gpio.devices[device_id].add_event_detect(port, event_callback, gpio.BOTH)
gpio.devices[device_id].notify = True
|
import asyncio
from datetime import timedelta
from pydroid_ipcam import PyDroidIPCam
import voluptuous as vol
from homeassistant.components.mjpeg.camera import CONF_MJPEG_URL, CONF_STILL_IMAGE_URL
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_PLATFORM,
CONF_PORT,
CONF_SCAN_INTERVAL,
CONF_SENSORS,
CONF_SWITCHES,
CONF_TIMEOUT,
CONF_USERNAME,
)
from homeassistant.core import callback
from homeassistant.helpers import discovery
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import async_track_point_in_utc_time
from homeassistant.util.dt import utcnow
ATTR_AUD_CONNS = "Audio Connections"
ATTR_HOST = "host"
ATTR_VID_CONNS = "Video Connections"
CONF_MOTION_SENSOR = "motion_sensor"
DATA_IP_WEBCAM = "android_ip_webcam"
DEFAULT_NAME = "IP Webcam"
DEFAULT_PORT = 8080
DEFAULT_TIMEOUT = 10
DOMAIN = "android_ip_webcam"
SCAN_INTERVAL = timedelta(seconds=10)
SIGNAL_UPDATE_DATA = "android_ip_webcam_update"
KEY_MAP = {
"audio_connections": "Audio Connections",
"adet_limit": "Audio Trigger Limit",
"antibanding": "Anti-banding",
"audio_only": "Audio Only",
"battery_level": "Battery Level",
"battery_temp": "Battery Temperature",
"battery_voltage": "Battery Voltage",
"coloreffect": "Color Effect",
"exposure": "Exposure Level",
"exposure_lock": "Exposure Lock",
"ffc": "Front-facing Camera",
"flashmode": "Flash Mode",
"focus": "Focus",
"focus_homing": "Focus Homing",
"focus_region": "Focus Region",
"focusmode": "Focus Mode",
"gps_active": "GPS Active",
"idle": "Idle",
"ip_address": "IPv4 Address",
"ipv6_address": "IPv6 Address",
"ivideon_streaming": "Ivideon Streaming",
"light": "Light Level",
"mirror_flip": "Mirror Flip",
"motion": "Motion",
"motion_active": "Motion Active",
"motion_detect": "Motion Detection",
"motion_event": "Motion Event",
"motion_limit": "Motion Limit",
"night_vision": "Night Vision",
"night_vision_average": "Night Vision Average",
"night_vision_gain": "Night Vision Gain",
"orientation": "Orientation",
"overlay": "Overlay",
"photo_size": "Photo Size",
"pressure": "Pressure",
"proximity": "Proximity",
"quality": "Quality",
"scenemode": "Scene Mode",
"sound": "Sound",
"sound_event": "Sound Event",
"sound_timeout": "Sound Timeout",
"torch": "Torch",
"video_connections": "Video Connections",
"video_chunk_len": "Video Chunk Length",
"video_recording": "Video Recording",
"video_size": "Video Size",
"whitebalance": "White Balance",
"whitebalance_lock": "White Balance Lock",
"zoom": "Zoom",
}
ICON_MAP = {
"audio_connections": "mdi:speaker",
"battery_level": "mdi:battery",
"battery_temp": "mdi:thermometer",
"battery_voltage": "mdi:battery-charging-100",
"exposure_lock": "mdi:camera",
"ffc": "mdi:camera-front-variant",
"focus": "mdi:image-filter-center-focus",
"gps_active": "mdi:crosshairs-gps",
"light": "mdi:flashlight",
"motion": "mdi:run",
"night_vision": "mdi:weather-night",
"overlay": "mdi:monitor",
"pressure": "mdi:gauge",
"proximity": "mdi:map-marker-radius",
"quality": "mdi:quality-high",
"sound": "mdi:speaker",
"sound_event": "mdi:speaker",
"sound_timeout": "mdi:speaker",
"torch": "mdi:white-balance-sunny",
"video_chunk_len": "mdi:video",
"video_connections": "mdi:eye",
"video_recording": "mdi:record-rec",
"whitebalance_lock": "mdi:white-balance-auto",
}
SWITCHES = [
"exposure_lock",
"ffc",
"focus",
"gps_active",
"motion_detect",
"night_vision",
"overlay",
"torch",
"whitebalance_lock",
"video_recording",
]
SENSORS = [
"audio_connections",
"battery_level",
"battery_temp",
"battery_voltage",
"light",
"motion",
"pressure",
"proximity",
"sound",
"video_connections",
]
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.All(
cv.ensure_list,
[
vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(
CONF_TIMEOUT, default=DEFAULT_TIMEOUT
): cv.positive_int,
vol.Optional(
CONF_SCAN_INTERVAL, default=SCAN_INTERVAL
): cv.time_period,
vol.Inclusive(CONF_USERNAME, "authentication"): cv.string,
vol.Inclusive(CONF_PASSWORD, "authentication"): cv.string,
vol.Optional(CONF_SWITCHES): vol.All(
cv.ensure_list, [vol.In(SWITCHES)]
),
vol.Optional(CONF_SENSORS): vol.All(
cv.ensure_list, [vol.In(SENSORS)]
),
vol.Optional(CONF_MOTION_SENSOR): cv.boolean,
}
)
],
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Set up the IP Webcam component."""
webcams = hass.data[DATA_IP_WEBCAM] = {}
websession = async_get_clientsession(hass)
async def async_setup_ipcamera(cam_config):
"""Set up an IP camera."""
host = cam_config[CONF_HOST]
username = cam_config.get(CONF_USERNAME)
password = cam_config.get(CONF_PASSWORD)
name = cam_config[CONF_NAME]
interval = cam_config[CONF_SCAN_INTERVAL]
switches = cam_config.get(CONF_SWITCHES)
sensors = cam_config.get(CONF_SENSORS)
motion = cam_config.get(CONF_MOTION_SENSOR)
# Init ip webcam
cam = PyDroidIPCam(
hass.loop,
websession,
host,
cam_config[CONF_PORT],
username=username,
password=password,
timeout=cam_config[CONF_TIMEOUT],
)
if switches is None:
switches = [
setting for setting in cam.enabled_settings if setting in SWITCHES
]
if sensors is None:
sensors = [sensor for sensor in cam.enabled_sensors if sensor in SENSORS]
sensors.extend(["audio_connections", "video_connections"])
if motion is None:
motion = "motion_active" in cam.enabled_sensors
async def async_update_data(now):
"""Update data from IP camera in SCAN_INTERVAL."""
await cam.update()
async_dispatcher_send(hass, SIGNAL_UPDATE_DATA, host)
async_track_point_in_utc_time(hass, async_update_data, utcnow() + interval)
await async_update_data(None)
# Load platforms
webcams[host] = cam
mjpeg_camera = {
CONF_PLATFORM: "mjpeg",
CONF_MJPEG_URL: cam.mjpeg_url,
CONF_STILL_IMAGE_URL: cam.image_url,
CONF_NAME: name,
}
if username and password:
mjpeg_camera.update({CONF_USERNAME: username, CONF_PASSWORD: password})
hass.async_create_task(
discovery.async_load_platform(hass, "camera", "mjpeg", mjpeg_camera, config)
)
if sensors:
hass.async_create_task(
discovery.async_load_platform(
hass,
"sensor",
DOMAIN,
{CONF_NAME: name, CONF_HOST: host, CONF_SENSORS: sensors},
config,
)
)
if switches:
hass.async_create_task(
discovery.async_load_platform(
hass,
"switch",
DOMAIN,
{CONF_NAME: name, CONF_HOST: host, CONF_SWITCHES: switches},
config,
)
)
if motion:
hass.async_create_task(
discovery.async_load_platform(
hass,
"binary_sensor",
DOMAIN,
{CONF_HOST: host, CONF_NAME: name},
config,
)
)
tasks = [async_setup_ipcamera(conf) for conf in config[DOMAIN]]
if tasks:
await asyncio.wait(tasks)
return True
class AndroidIPCamEntity(Entity):
"""The Android device running IP Webcam."""
def __init__(self, host, ipcam):
"""Initialize the data object."""
self._host = host
self._ipcam = ipcam
async def async_added_to_hass(self):
"""Register update dispatcher."""
@callback
def async_ipcam_update(host):
"""Update callback."""
if self._host != host:
return
self.async_schedule_update_ha_state(True)
self.async_on_remove(
async_dispatcher_connect(self.hass, SIGNAL_UPDATE_DATA, async_ipcam_update)
)
@property
def should_poll(self):
"""Return True if entity has to be polled for state."""
return False
@property
def available(self):
"""Return True if entity is available."""
return self._ipcam.available
@property
def device_state_attributes(self):
"""Return the state attributes."""
state_attr = {ATTR_HOST: self._host}
if self._ipcam.status_data is None:
return state_attr
state_attr[ATTR_VID_CONNS] = self._ipcam.status_data.get("video_connections")
state_attr[ATTR_AUD_CONNS] = self._ipcam.status_data.get("audio_connections")
return state_attr
|
from homeassistant import setup
from homeassistant.components import frontend
from tests.async_mock import Mock, patch
async def test_webcomponent_custom_path_not_found(hass):
"""Test if a web component is found in config panels dir."""
filename = "mock.file"
config = {
"panel_custom": {
"name": "todomvc",
"webcomponent_path": filename,
"sidebar_title": "Sidebar Title",
"sidebar_icon": "mdi:iconicon",
"url_path": "nice_url",
"config": 5,
}
}
with patch("os.path.isfile", Mock(return_value=False)):
result = await setup.async_setup_component(hass, "panel_custom", config)
assert not result
panels = hass.data.get(frontend.DATA_PANELS, [])
assert panels
assert "nice_url" not in panels
async def test_js_webcomponent(hass):
"""Test if a web component is found in config panels dir."""
config = {
"panel_custom": {
"name": "todo-mvc",
"js_url": "/local/bla.js",
"sidebar_title": "Sidebar Title",
"sidebar_icon": "mdi:iconicon",
"url_path": "nice_url",
"config": {"hello": "world"},
"embed_iframe": True,
"trust_external_script": True,
}
}
result = await setup.async_setup_component(hass, "panel_custom", config)
assert result
panels = hass.data.get(frontend.DATA_PANELS, [])
assert panels
assert "nice_url" in panels
panel = panels["nice_url"]
assert panel.config == {
"hello": "world",
"_panel_custom": {
"js_url": "/local/bla.js",
"name": "todo-mvc",
"embed_iframe": True,
"trust_external": True,
},
}
assert panel.frontend_url_path == "nice_url"
assert panel.sidebar_icon == "mdi:iconicon"
assert panel.sidebar_title == "Sidebar Title"
async def test_module_webcomponent(hass):
"""Test if a js module is found in config panels dir."""
config = {
"panel_custom": {
"name": "todo-mvc",
"module_url": "/local/bla.js",
"sidebar_title": "Sidebar Title",
"sidebar_icon": "mdi:iconicon",
"url_path": "nice_url",
"config": {"hello": "world"},
"embed_iframe": True,
"trust_external_script": True,
"require_admin": True,
}
}
result = await setup.async_setup_component(hass, "panel_custom", config)
assert result
panels = hass.data.get(frontend.DATA_PANELS, [])
assert panels
assert "nice_url" in panels
panel = panels["nice_url"]
assert panel.require_admin
assert panel.config == {
"hello": "world",
"_panel_custom": {
"module_url": "/local/bla.js",
"name": "todo-mvc",
"embed_iframe": True,
"trust_external": True,
},
}
assert panel.frontend_url_path == "nice_url"
assert panel.sidebar_icon == "mdi:iconicon"
assert panel.sidebar_title == "Sidebar Title"
async def test_latest_and_es5_build(hass):
"""Test specifying an es5 and latest build."""
config = {
"panel_custom": {
"name": "todo-mvc",
"js_url": "/local/es5.js",
"module_url": "/local/latest.js",
"url_path": "nice_url",
}
}
assert await setup.async_setup_component(hass, "panel_custom", config)
panels = hass.data.get(frontend.DATA_PANELS, {})
assert panels
assert "nice_url" in panels
panel = panels["nice_url"]
assert panel.config == {
"_panel_custom": {
"name": "todo-mvc",
"js_url": "/local/es5.js",
"module_url": "/local/latest.js",
"embed_iframe": False,
"trust_external": False,
},
}
assert panel.frontend_url_path == "nice_url"
async def test_url_path_conflict(hass):
"""Test config with overlapping url path."""
assert await setup.async_setup_component(
hass,
"panel_custom",
{
"panel_custom": [
{"name": "todo-mvc", "js_url": "/local/bla.js"},
{"name": "todo-mvc", "js_url": "/local/bla.js"},
]
},
)
|
from flask import Flask
from flask import jsonify
from flasgger import Swagger
from flasgger import swag_from
app = Flask(__name__)
swag = Swagger(app)
@app.route("/example")
@swag_from({
"responses": {
400: {
"description": "Invalid action"
},
401: {
"description": "Login required"
}
}
})
def view():
"""
A test view
---
responses:
200:
description: OK
400:
description: Modified description
"""
return jsonify(hello="world")
def test_swag(client, specs_data):
example_spec = specs_data["/apispec_1.json"]["paths"]["/example"]["get"]
assert "400" in example_spec["responses"]
assert "401" in example_spec["responses"]
assert "200" in example_spec["responses"]
assert example_spec["responses"]["400"]["description"] == "Modified description"
if __name__ == "__main__":
app.run(debug=True)
|
import logging
import os
import boto3
import boto3.session
def check(session):
client = session.client('s3')
try:
response = client.list_buckets()
except Exception as e:
logging.exception(e)
return None
else:
return [b['Name'] for b in response['Buckets']]
def check_implicit():
session = boto3.session.Session()
buckets = check(session)
if buckets:
print('implicit check OK: %r' % buckets)
else:
print('implicit check failed')
def check_explicit():
key_id = os.environ.get('AWS_ACCESS_KEY_ID')
secret_key = os.environ.get('AWS_SECRET_ACCESS_KEY')
if not (key_id and secret_key):
print('no credentials found in os.environ, skipping explicit check')
return
session = boto3.session.Session(aws_access_key_id=key_id, aws_secret_access_key=secret_key)
buckets = check(session)
if buckets:
print('explicit check OK: %r' % buckets)
else:
print('explicit check failed')
def main():
check_implicit()
check_explicit()
if __name__ == '__main__':
main()
|
import collections
import functools
from typing import (TYPE_CHECKING, Any, Callable, MutableMapping, MutableSequence,
Optional, Sequence, Union)
from PyQt5.QtCore import QObject, QTimer
from PyQt5.QtWidgets import QApplication
from PyQt5.QtWidgets import QWidget
from qutebrowser.utils import log, usertypes, utils
if TYPE_CHECKING:
from qutebrowser.mainwindow import mainwindow
_WindowTab = Union[str, int, None]
class RegistryUnavailableError(Exception):
"""Exception raised when a certain registry does not exist yet."""
class NoWindow(Exception):
"""Exception raised by last_window if no window is available."""
class CommandOnlyError(Exception):
"""Raised when an object is requested which is used for commands only."""
_IndexType = Union[str, int]
class ObjectRegistry(collections.UserDict):
"""A registry of long-living objects in qutebrowser.
Inspired by the eric IDE code (E5Gui/E5Application.py).
Attributes:
_partial_objs: A dictionary of the connected partial objects.
command_only: Objects which are only registered for commands.
"""
def __init__(self) -> None:
super().__init__()
self._partial_objs: MutableMapping[_IndexType, Callable[[], None]] = {}
self.command_only: MutableSequence[str] = []
def __setitem__(self, name: _IndexType, obj: Any) -> None:
"""Register an object in the object registry.
Sets a slot to remove QObjects when they are destroyed.
"""
if name is None:
raise TypeError("Registering '{}' with name 'None'!".format(obj))
if obj is None:
raise TypeError("Registering object None with name '{}'!".format(
name))
self._disconnect_destroyed(name)
if isinstance(obj, QObject):
func = functools.partial(self.on_destroyed, name)
obj.destroyed.connect(func)
self._partial_objs[name] = func
super().__setitem__(name, obj)
def __delitem__(self, name: str) -> None:
"""Extend __delitem__ to disconnect the destroyed signal."""
self._disconnect_destroyed(name)
super().__delitem__(name)
def _disconnect_destroyed(self, name: _IndexType) -> None:
"""Disconnect the destroyed slot if it was connected."""
try:
partial_objs = self._partial_objs
except AttributeError:
# This sometimes seems to happen on CI during
# test_history.test_adding_item_during_async_read
# and I have no idea why...
return
if name in partial_objs:
func = partial_objs[name]
try:
self[name].destroyed.disconnect(func)
except RuntimeError:
# If C++ has deleted the object, the slot is already
# disconnected.
pass
del partial_objs[name]
def on_destroyed(self, name: str) -> None:
"""Schedule removing of a destroyed QObject.
We don't remove the destroyed object immediately because it might still
be destroying its children, which might still use the object
registry.
"""
log.destroy.debug("schedule removal: {}".format(name))
QTimer.singleShot(0, functools.partial(self._on_destroyed, name))
def _on_destroyed(self, name: str) -> None:
"""Remove a destroyed QObject."""
log.destroy.debug("removed: {}".format(name))
if not hasattr(self, 'data'):
# This sometimes seems to happen on CI during
# test_history.test_adding_item_during_async_read
# and I have no idea why...
return
try:
del self[name]
del self._partial_objs[name]
except KeyError:
pass
def dump_objects(self) -> Sequence[str]:
"""Dump all objects as a list of strings."""
lines = []
for name, obj in self.data.items():
try:
obj_repr = repr(obj)
except RuntimeError:
# Underlying object deleted probably
obj_repr = '<deleted>'
suffix = (" (for commands only)" if name in self.command_only
else "")
lines.append("{}: {}{}".format(name, obj_repr, suffix))
return lines
# The registry for global objects
global_registry = ObjectRegistry()
# The window registry.
window_registry = ObjectRegistry()
def _get_tab_registry(win_id: _WindowTab,
tab_id: _WindowTab) -> ObjectRegistry:
"""Get the registry of a tab."""
if tab_id is None:
raise ValueError("Got tab_id None (win_id {})".format(win_id))
if tab_id == 'current' and win_id is None:
window: Optional[QWidget] = QApplication.activeWindow()
if window is None or not hasattr(window, 'win_id'):
raise RegistryUnavailableError('tab')
win_id = window.win_id
elif win_id is None:
raise TypeError("window is None with scope tab!")
if tab_id == 'current':
tabbed_browser = get('tabbed-browser', scope='window', window=win_id)
tab = tabbed_browser.widget.currentWidget()
if tab is None:
raise RegistryUnavailableError('window')
tab_id = tab.tab_id
tab_registry = get('tab-registry', scope='window', window=win_id)
try:
return tab_registry[tab_id].registry
except AttributeError:
raise RegistryUnavailableError('tab')
def _get_window_registry(window: _WindowTab) -> ObjectRegistry:
"""Get the registry of a window."""
if window is None:
raise TypeError("window is None with scope window!")
try:
if window == 'current':
win: Optional[QWidget] = QApplication.activeWindow()
elif window == 'last-focused':
win = last_focused_window()
else:
win = window_registry[window]
except (KeyError, NoWindow):
win = None
if win is None:
raise RegistryUnavailableError('window')
try:
return win.registry
except AttributeError:
raise RegistryUnavailableError('window')
def _get_registry(scope: str,
window: _WindowTab = None,
tab: _WindowTab = None) -> ObjectRegistry:
"""Get the correct registry for a given scope."""
if window is not None and scope not in ['window', 'tab']:
raise TypeError("window is set with scope {}".format(scope))
if tab is not None and scope != 'tab':
raise TypeError("tab is set with scope {}".format(scope))
if scope == 'global':
return global_registry
elif scope == 'tab':
return _get_tab_registry(window, tab)
elif scope == 'window':
return _get_window_registry(window)
else:
raise ValueError("Invalid scope '{}'!".format(scope))
def get(name: str,
default: Any = usertypes.UNSET,
scope: str = 'global',
window: _WindowTab = None,
tab: _WindowTab = None,
from_command: bool = False) -> Any:
"""Helper function to get an object.
Args:
default: A default to return if the object does not exist.
"""
reg = _get_registry(scope, window, tab)
if name in reg.command_only and not from_command:
raise CommandOnlyError("{} is only registered for commands"
.format(name))
try:
return reg[name]
except KeyError:
if default is not usertypes.UNSET:
return default
else:
raise
def register(name: str,
obj: Any,
update: bool = False,
scope: str = None,
registry: ObjectRegistry = None,
window: _WindowTab = None,
tab: _WindowTab = None,
command_only: bool = False) -> None:
"""Helper function to register an object.
Args:
name: The name the object will be registered as.
obj: The object to register.
update: If True, allows to update an already registered object.
"""
if scope is not None and registry is not None:
raise ValueError("scope ({}) and registry ({}) can't be given at the "
"same time!".format(scope, registry))
if registry is not None:
reg = registry
else:
if scope is None:
scope = 'global'
reg = _get_registry(scope, window, tab)
if not update and name in reg:
raise KeyError("Object '{}' is already registered ({})!".format(
name, repr(reg[name])))
reg[name] = obj
if command_only:
reg.command_only.append(name)
def delete(name: str,
scope: str = 'global',
window: _WindowTab = None,
tab: _WindowTab = None) -> None:
"""Helper function to unregister an object."""
reg = _get_registry(scope, window, tab)
del reg[name]
def dump_objects() -> Sequence[str]:
"""Get all registered objects in all registries as a string."""
blocks = []
lines = []
blocks.append(('global', global_registry.dump_objects()))
for win_id in window_registry:
registry = _get_registry('window', window=win_id)
blocks.append(('window-{}'.format(win_id), registry.dump_objects()))
tab_registry = get('tab-registry', scope='window', window=win_id)
for tab_id, tab in tab_registry.items():
dump = tab.registry.dump_objects()
data = [' ' + line for line in dump]
blocks.append((' tab-{}'.format(tab_id), data))
for name, block_data in blocks:
lines.append("")
lines.append("{} object registry - {} objects:".format(
name, len(block_data)))
for line in block_data:
lines.append(" {}".format(line))
return lines
def last_visible_window() -> 'mainwindow.MainWindow':
"""Get the last visible window, or the last focused window if none."""
try:
window = get('last-visible-main-window')
except KeyError:
return last_focused_window()
if window.tabbed_browser.is_shutting_down:
return last_focused_window()
return window
def last_focused_window() -> 'mainwindow.MainWindow':
"""Get the last focused window, or the last window if none."""
try:
window = get('last-focused-main-window')
except KeyError:
return last_opened_window()
if window.tabbed_browser.is_shutting_down:
return last_opened_window()
return window
def _window_by_index(idx: int) -> 'mainwindow.MainWindow':
"""Get the Nth opened window object."""
if not window_registry:
raise NoWindow()
key = sorted(window_registry)[idx]
return window_registry[key]
def last_opened_window() -> 'mainwindow.MainWindow':
"""Get the last opened window object."""
if not window_registry:
raise NoWindow()
for idx in range(-1, -(len(window_registry)+1), -1):
window = _window_by_index(idx)
if not window.tabbed_browser.is_shutting_down:
return window
raise utils.Unreachable()
def first_opened_window() -> 'mainwindow.MainWindow':
"""Get the first opened window object."""
if not window_registry:
raise NoWindow()
for idx in range(0, len(window_registry)+1):
window = _window_by_index(idx)
if not window.tabbed_browser.is_shutting_down:
return window
raise utils.Unreachable()
|
import logging
import re
from typing import Any, List
import pyvera as pv
from requests.exceptions import RequestException
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_EXCLUDE, CONF_LIGHTS, CONF_SOURCE
from homeassistant.core import callback
from homeassistant.helpers.entity_registry import EntityRegistry
from .const import ( # pylint: disable=unused-import
CONF_CONTROLLER,
CONF_LEGACY_UNIQUE_ID,
DOMAIN,
)
LIST_REGEX = re.compile("[^0-9]+")
_LOGGER = logging.getLogger(__name__)
def fix_device_id_list(data: List[Any]) -> List[int]:
"""Fix the id list by converting it to a supported int list."""
return str_to_int_list(list_to_str(data))
def str_to_int_list(data: str) -> List[int]:
"""Convert a string to an int list."""
return [int(s) for s in LIST_REGEX.split(data) if len(s) > 0]
def list_to_str(data: List[Any]) -> str:
"""Convert an int list to a string."""
return " ".join([str(i) for i in data])
def new_options(lights: List[int], exclude: List[int]) -> dict:
"""Create a standard options object."""
return {CONF_LIGHTS: lights, CONF_EXCLUDE: exclude}
def options_schema(options: dict = None) -> dict:
"""Return options schema."""
options = options or {}
return {
vol.Optional(
CONF_LIGHTS,
default=list_to_str(options.get(CONF_LIGHTS, [])),
): str,
vol.Optional(
CONF_EXCLUDE,
default=list_to_str(options.get(CONF_EXCLUDE, [])),
): str,
}
def options_data(user_input: dict) -> dict:
"""Return options dict."""
return new_options(
str_to_int_list(user_input.get(CONF_LIGHTS, "")),
str_to_int_list(user_input.get(CONF_EXCLUDE, "")),
)
class OptionsFlowHandler(config_entries.OptionsFlow):
"""Options for the component."""
def __init__(self, config_entry: ConfigEntry):
"""Init object."""
self.config_entry = config_entry
async def async_step_init(self, user_input: dict = None):
"""Manage the options."""
if user_input is not None:
return self.async_create_entry(
title="",
data=options_data(user_input),
)
return self.async_show_form(
step_id="init",
data_schema=vol.Schema(options_schema(self.config_entry.options)),
)
class VeraFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Vera config flow."""
@staticmethod
@callback
def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlowHandler:
"""Get the options flow."""
return OptionsFlowHandler(config_entry)
async def async_step_user(self, user_input: dict = None):
"""Handle user initiated flow."""
if user_input is not None:
return await self.async_step_finish(
{
**user_input,
**options_data(user_input),
**{CONF_SOURCE: config_entries.SOURCE_USER},
**{CONF_LEGACY_UNIQUE_ID: False},
}
)
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{**{vol.Required(CONF_CONTROLLER): str}, **options_schema()}
),
)
async def async_step_import(self, config: dict):
"""Handle a flow initialized by import."""
# If there are entities with the legacy unique_id, then this imported config
# should also use the legacy unique_id for entity creation.
entity_registry: EntityRegistry = (
await self.hass.helpers.entity_registry.async_get_registry()
)
use_legacy_unique_id = (
len(
[
entry
for entry in entity_registry.entities.values()
if entry.platform == DOMAIN and entry.unique_id.isdigit()
]
)
> 0
)
return await self.async_step_finish(
{
**config,
**{CONF_SOURCE: config_entries.SOURCE_IMPORT},
**{CONF_LEGACY_UNIQUE_ID: use_legacy_unique_id},
}
)
async def async_step_finish(self, config: dict):
"""Validate and create config entry."""
base_url = config[CONF_CONTROLLER] = config[CONF_CONTROLLER].rstrip("/")
controller = pv.VeraController(base_url)
# Verify the controller is online and get the serial number.
try:
await self.hass.async_add_executor_job(controller.refresh_data)
except RequestException:
_LOGGER.error("Failed to connect to vera controller %s", base_url)
return self.async_abort(
reason="cannot_connect", description_placeholders={"base_url": base_url}
)
await self.async_set_unique_id(controller.serial_number)
self._abort_if_unique_id_configured(config)
return self.async_create_entry(title=base_url, data=config)
|
import unittest
import xgboost
from distutils.version import StrictVersion
from sklearn import datasets
from xgboost import XGBClassifier
class TestXGBoost(unittest.TestCase):
def test_version(self):
# b/175051617 prevent xgboost version downgrade.
self.assertGreaterEqual(StrictVersion(xgboost.__version__), StrictVersion("1.2.1"))
def test_classifier(self):
boston = datasets.load_boston()
X, y = boston.data, boston.target
xgb1 = XGBClassifier(n_estimators=3)
xgb1.fit(X[0:70],y[0:70])
|
from textwrap import dedent
from unittest import skipIf, TestCase
from markdown import markdown
from markdown.extensions.codehilite import CodeHiliteExtension
from markdown.extensions.fenced_code import FencedCodeExtension
try:
from pymdownx.superfences import SuperFencesCodeExtension
except ImportError:
SuperFencesCodeExtension = None
from ReText.mdx_posmap import PosMapExtension
class PosMapTest(TestCase):
maxDiff = None
extensionsPosMap = [
CodeHiliteExtension(),
FencedCodeExtension(),
PosMapExtension()
]
extensionsNoPosMap = [
CodeHiliteExtension(),
FencedCodeExtension()
]
def test_normalUse(self):
text = dedent("""\
# line 1
- line 3
- line 4
- line 5
line 7
line 8
code block, line 10
""")
html = markdown(text, extensions=[PosMapExtension()])
self.assertIn('<h1 data-posmap="1">line 1</h1>', html)
self.assertIn('<ul data-posmap="5">', html)
self.assertIn('<p data-posmap="8">', html)
self.assertIn('<pre data-posmap="10"><code>code block, line 10', html)
self.assertNotIn("posmapmarker", html)
def test_highlightC(self):
text = dedent("""\
```c
#include <stdio.h>
int main(int argc, char **argv)
{
printf("Hello, world!\\n");
}
```""")
html = markdown(text, extensions=self.extensionsPosMap)
expected = markdown(text, extensions=self.extensionsNoPosMap)
self.assertIn('<div class="codehilite">', html)
self.assertMultiLineEqual(html, expected)
def test_highlightEmptyC(self):
text = dedent("""\
```c
```""")
html = markdown(text, extensions=self.extensionsPosMap)
expected = markdown(text, extensions=self.extensionsNoPosMap)
self.assertIn('<div class="codehilite">', html)
self.assertMultiLineEqual(html, expected)
def test_highlightPython(self):
text = dedent("""\
```python
if __name__ == "__main__":
print("Hello, world!")
```""")
html = markdown(text, extensions=self.extensionsPosMap)
expected = markdown(text, extensions=self.extensionsNoPosMap)
self.assertIn('<div class="codehilite">', html)
self.assertMultiLineEqual(html, expected)
def test_highlightEmptyPython(self):
text = dedent("""\
```python
```""")
html = markdown(text, extensions=self.extensionsPosMap)
expected = markdown(text, extensions=self.extensionsNoPosMap)
self.assertIn('<div class="codehilite">', html)
self.assertMultiLineEqual(html, expected)
def test_traditionalCodeBlock(self):
text = dedent("""\
:::python
if __name__ == "__main__":
print("Hello, world!")
a paragraph following the code block, line 5
""")
extensions = [CodeHiliteExtension(), PosMapExtension()]
html = markdown(text, extensions=extensions)
self.assertNotIn('posmapmarker', html)
self.assertIn('<div class="codehilite">', html)
self.assertIn('<p data-posmap="5">', html)
@skipIf(SuperFencesCodeExtension is None,
"pymdownx module is not available")
def test_superFences(self):
text = dedent("""\
```bash
tee ~/test << EOF
A
B
C
EOF
```""")
extensions = [SuperFencesCodeExtension(), PosMapExtension()]
html = markdown(text, extensions=extensions)
self.assertNotIn("posmapmarker", html)
expected = markdown(text, extensions=[SuperFencesCodeExtension()])
self.assertMultiLineEqual(html, expected)
|
from datetime import timedelta
import abodepy.helpers.constants as CONST
import abodepy.helpers.timeline as TIMELINE
import requests
from homeassistant.components.camera import Camera
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.util import Throttle
from . import AbodeDevice
from .const import DOMAIN, LOGGER
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=90)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Abode camera devices."""
data = hass.data[DOMAIN]
entities = []
for device in data.abode.get_devices(generic_type=CONST.TYPE_CAMERA):
entities.append(AbodeCamera(data, device, TIMELINE.CAPTURE_IMAGE))
async_add_entities(entities)
class AbodeCamera(AbodeDevice, Camera):
"""Representation of an Abode camera."""
def __init__(self, data, device, event):
"""Initialize the Abode device."""
AbodeDevice.__init__(self, data, device)
Camera.__init__(self)
self._event = event
self._response = None
async def async_added_to_hass(self):
"""Subscribe Abode events."""
await super().async_added_to_hass()
self.hass.async_add_executor_job(
self._data.abode.events.add_timeline_callback,
self._event,
self._capture_callback,
)
signal = f"abode_camera_capture_{self.entity_id}"
self.async_on_remove(async_dispatcher_connect(self.hass, signal, self.capture))
def capture(self):
"""Request a new image capture."""
return self._device.capture()
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def refresh_image(self):
"""Find a new image on the timeline."""
if self._device.refresh_image():
self.get_image()
def get_image(self):
"""Attempt to download the most recent capture."""
if self._device.image_url:
try:
self._response = requests.get(self._device.image_url, stream=True)
self._response.raise_for_status()
except requests.HTTPError as err:
LOGGER.warning("Failed to get camera image: %s", err)
self._response = None
else:
self._response = None
def camera_image(self):
"""Get a camera image."""
self.refresh_image()
if self._response:
return self._response.content
return None
def turn_on(self):
"""Turn on camera."""
self._device.privacy_mode(False)
def turn_off(self):
"""Turn off camera."""
self._device.privacy_mode(True)
def _capture_callback(self, capture):
"""Update the image with the device then refresh device."""
self._device.update_image_location(capture)
self.get_image()
self.schedule_update_ha_state()
@property
def is_on(self):
"""Return true if on."""
return self._device.is_on
|
import os.path as op
from copy import deepcopy
import numpy as np
from numpy.testing import assert_allclose, assert_array_equal
import pytest
from mne import (read_source_spaces, pick_types, read_trans, read_cov,
make_sphere_model, create_info, setup_volume_source_space,
find_events, Epochs, fit_dipole, transform_surface_to,
make_ad_hoc_cov, SourceEstimate, setup_source_space,
read_bem_solution, make_forward_solution,
convert_forward_solution, VolSourceEstimate,
make_bem_solution)
from mne.bem import _surfaces_to_bem
from mne.chpi import (read_head_pos, compute_chpi_amplitudes,
compute_chpi_locs, compute_head_pos, _get_hpi_info)
from mne.tests.test_chpi import _assert_quats
from mne.datasets import testing
from mne.simulation import (simulate_sparse_stc, simulate_raw, add_eog,
add_ecg, add_chpi, add_noise)
from mne.source_space import _compare_source_spaces
from mne.surface import _get_ico_surface
from mne.io import read_raw_fif, RawArray
from mne.io.constants import FIFF
from mne.time_frequency import psd_welch
from mne.utils import catch_logging, check_version
base_path = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data')
raw_fname_short = op.join(base_path, 'test_raw.fif')
data_path = testing.data_path(download=False)
raw_fname = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc_raw.fif')
cov_fname = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc-cov.fif')
trans_fname = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc-trans.fif')
subjects_dir = op.join(data_path, 'subjects')
bem_path = op.join(subjects_dir, 'sample', 'bem')
src_fname = op.join(bem_path, 'sample-oct-2-src.fif')
bem_fname = op.join(bem_path, 'sample-320-320-320-bem-sol.fif')
bem_1_fname = op.join(bem_path, 'sample-320-bem-sol.fif')
raw_chpi_fname = op.join(data_path, 'SSS', 'test_move_anon_raw.fif')
pos_fname = op.join(data_path, 'SSS', 'test_move_anon_raw_subsampled.pos')
def _assert_iter_sim(raw_sim, raw_new, new_event_id):
events = find_events(raw_sim, initial_event=True)
events_tuple = find_events(raw_new, initial_event=True)
assert_array_equal(events_tuple[:, :2], events[:, :2])
assert_array_equal(events_tuple[:, 2], new_event_id)
data_sim = raw_sim[:-1][0]
data_new = raw_new[:-1][0]
assert_array_equal(data_new, data_sim)
@pytest.mark.slowtest
def test_iterable():
"""Test iterable support for simulate_raw."""
raw = read_raw_fif(raw_fname_short).load_data()
raw.pick_channels(raw.ch_names[:10] + ['STI 014'])
src = setup_volume_source_space(
pos=dict(rr=[[-0.05, 0, 0], [0.1, 0, 0]],
nn=[[0, 1., 0], [0, 1., 0]]))
assert src.kind == 'discrete'
trans = None
sphere = make_sphere_model(head_radius=None, info=raw.info)
tstep = 1. / raw.info['sfreq']
rng = np.random.RandomState(0)
vertices = [np.array([1])]
data = rng.randn(1, 2)
stc = VolSourceEstimate(data, vertices, 0, tstep)
assert isinstance(stc.vertices[0], np.ndarray)
with pytest.raises(ValueError, match='at least three time points'):
simulate_raw(raw.info, stc, trans, src, sphere, None)
data = rng.randn(1, 1000)
n_events = (len(raw.times) - 1) // 1000 + 1
stc = VolSourceEstimate(data, vertices, 0, tstep)
assert isinstance(stc.vertices[0], np.ndarray)
raw_sim = simulate_raw(raw.info, [stc] * 15, trans, src, sphere, None,
first_samp=raw.first_samp)
raw_sim.crop(0, raw.times[-1])
assert_allclose(raw.times, raw_sim.times)
events = find_events(raw_sim, initial_event=True)
assert len(events) == n_events
assert_array_equal(events[:, 2], 1)
# Degenerate STCs
with pytest.raises(RuntimeError,
match=r'Iterable did not provide stc\[0\]'):
simulate_raw(raw.info, [], trans, src, sphere, None)
# tuple with ndarray
event_data = np.zeros(len(stc.times), int)
event_data[0] = 3
raw_new = simulate_raw(raw.info, [(stc, event_data)] * 15,
trans, src, sphere, None, first_samp=raw.first_samp)
assert raw_new.n_times == 15000
raw_new.crop(0, raw.times[-1])
_assert_iter_sim(raw_sim, raw_new, 3)
with pytest.raises(ValueError, match='event data had shape .* but need'):
simulate_raw(raw.info, [(stc, event_data[:-1])], trans, src, sphere,
None)
with pytest.raises(ValueError, match='stim_data in a stc tuple .* int'):
simulate_raw(raw.info, [(stc, event_data * 1.)], trans, src, sphere,
None)
# iterable
def stc_iter():
stim_data = np.zeros(len(stc.times), int)
stim_data[0] = 4
ii = 0
while ii < 15:
ii += 1
yield (stc, stim_data)
raw_new = simulate_raw(raw.info, stc_iter(), trans, src, sphere, None,
first_samp=raw.first_samp)
raw_new.crop(0, raw.times[-1])
_assert_iter_sim(raw_sim, raw_new, 4)
def stc_iter_bad():
ii = 0
while ii < 100:
ii += 1
yield (stc, 4, 3)
with pytest.raises(ValueError, match='stc, if tuple, must be length'):
simulate_raw(raw.info, stc_iter_bad(), trans, src, sphere, None)
_assert_iter_sim(raw_sim, raw_new, 4)
def stc_iter_bad():
ii = 0
while ii < 100:
ii += 1
stc_new = stc.copy()
stc_new.vertices[0] = np.array([ii % 2])
yield stc_new
with pytest.raises(RuntimeError, match=r'Vertex mismatch for stc\[1\]'):
simulate_raw(raw.info, stc_iter_bad(), trans, src, sphere, None)
# Forward omission
vertices = [np.array([0, 1])]
data = rng.randn(2, 1000)
stc = VolSourceEstimate(data, vertices, 0, tstep)
assert isinstance(stc.vertices[0], np.ndarray)
# XXX eventually we should support filtering based on sphere radius, too,
# by refactoring the code in source_space.py that does it!
surf = _get_ico_surface(3)
surf['rr'] *= 60 # mm
model = _surfaces_to_bem([surf], [FIFF.FIFFV_BEM_SURF_ID_BRAIN], [0.3])
bem = make_bem_solution(model)
with pytest.warns(RuntimeWarning,
match='1 of 2 SourceEstimate vertices'):
simulate_raw(raw.info, stc, trans, src, bem, None)
def _make_stc(raw, src):
"""Make a STC."""
seed = 42
sfreq = raw.info['sfreq'] # Hz
tstep = 1. / sfreq
n_samples = len(raw.times) // 10
times = np.arange(0, n_samples) * tstep
stc = simulate_sparse_stc(src, 10, times, random_state=seed)
return stc
@pytest.fixture(scope='function', params=[testing._pytest_param()])
def raw_data():
"""Get some starting data."""
# raw with ECG channel
raw = read_raw_fif(raw_fname).crop(0., 5.0).load_data()
data_picks = pick_types(raw.info, meg=True, eeg=True)
other_picks = pick_types(raw.info, meg=False, stim=True, eog=True)
picks = np.sort(np.concatenate((data_picks[::16], other_picks)))
raw = raw.pick_channels([raw.ch_names[p] for p in picks])
raw.info.normalize_proj()
ecg = RawArray(np.zeros((1, len(raw.times))),
create_info(['ECG 063'], raw.info['sfreq'], 'ecg'))
for key in ('dev_head_t', 'highpass', 'lowpass', 'dig'):
ecg.info[key] = raw.info[key]
raw.add_channels([ecg])
src = read_source_spaces(src_fname)
trans = read_trans(trans_fname)
sphere = make_sphere_model('auto', 'auto', raw.info)
stc = _make_stc(raw, src)
return raw, src, stc, trans, sphere
def _get_head_pos_sim(raw):
head_pos_sim = dict()
# these will be at 1., 2., ... sec
shifts = [[0.001, 0., -0.001], [-0.001, 0.001, 0.]]
for time_key, shift in enumerate(shifts):
# Create 4x4 matrix transform and normalize
temp_trans = deepcopy(raw.info['dev_head_t'])
temp_trans['trans'][:3, 3] += shift
head_pos_sim[time_key + 1.] = temp_trans['trans']
return head_pos_sim
def test_simulate_raw_sphere(raw_data, tmpdir):
"""Test simulation of raw data with sphere model."""
seed = 42
raw, src, stc, trans, sphere = raw_data
assert len(pick_types(raw.info, meg=False, ecg=True)) == 1
tempdir = str(tmpdir)
# head pos
head_pos_sim = _get_head_pos_sim(raw)
#
# Test raw simulation with basic parameters
#
raw.info.normalize_proj()
cov = read_cov(cov_fname)
cov['projs'] = raw.info['projs']
raw.info['bads'] = raw.ch_names[:1]
sphere_norad = make_sphere_model('auto', None, raw.info)
raw_meg = raw.copy().pick_types(meg=True)
raw_sim = simulate_raw(raw_meg.info, stc, trans, src, sphere_norad,
head_pos=head_pos_sim)
# Test IO on processed data
test_outname = op.join(tempdir, 'sim_test_raw.fif')
raw_sim.save(test_outname)
raw_sim_loaded = read_raw_fif(test_outname, preload=True)
assert_allclose(raw_sim_loaded[:][0], raw_sim[:][0], rtol=1e-6, atol=1e-20)
del raw_sim
# make sure it works with EEG-only and MEG-only
raw_sim_meg = simulate_raw(
raw.copy().pick_types(meg=True, eeg=False).info,
stc, trans, src, sphere)
raw_sim_eeg = simulate_raw(
raw.copy().pick_types(meg=False, eeg=True).info,
stc, trans, src, sphere)
raw_sim_meeg = simulate_raw(
raw.copy().pick_types(meg=True, eeg=True).info,
stc, trans, src, sphere)
for this_raw in (raw_sim_meg, raw_sim_eeg, raw_sim_meeg):
add_eog(this_raw, random_state=seed)
for this_raw in (raw_sim_meg, raw_sim_meeg):
add_ecg(this_raw, random_state=seed)
with pytest.raises(RuntimeError, match='only add ECG artifacts if MEG'):
add_ecg(raw_sim_eeg)
assert_allclose(np.concatenate((raw_sim_meg[:][0], raw_sim_eeg[:][0])),
raw_sim_meeg[:][0], rtol=1e-7, atol=1e-20)
del raw_sim_meg, raw_sim_eeg, raw_sim_meeg
# check that raw-as-info is supported
n_samp = len(stc.times)
raw_crop = raw.copy().crop(0., (n_samp - 1.) / raw.info['sfreq'])
assert len(raw_crop.times) == len(stc.times)
raw_sim = simulate_raw(raw_crop.info, stc, trans, src, sphere)
with catch_logging() as log:
raw_sim_2 = simulate_raw(raw_crop.info, stc, trans, src, sphere,
verbose=True)
log = log.getvalue()
assert '1 STC iteration provided' in log
assert len(raw_sim_2.times) == n_samp
assert_allclose(raw_sim[:, :n_samp][0],
raw_sim_2[:, :n_samp][0], rtol=1e-5, atol=1e-30)
del raw_sim, raw_sim_2
# check that different interpolations are similar given small movements
raw_sim = simulate_raw(raw.info, stc, trans, src, sphere,
head_pos=head_pos_sim, interp='linear')
raw_sim_hann = simulate_raw(raw.info, stc, trans, src, sphere,
head_pos=head_pos_sim, interp='hann')
assert_allclose(raw_sim[:][0], raw_sim_hann[:][0], rtol=1e-1, atol=1e-14)
del raw_sim_hann
# check that new Generator objects can be used
if check_version('numpy', '1.17'):
random_state = np.random.default_rng(seed)
add_ecg(raw_sim, random_state=random_state)
add_eog(raw_sim, random_state=random_state)
def test_degenerate(raw_data):
"""Test degenerate conditions."""
raw, src, stc, trans, sphere = raw_data
info = raw.info
# Make impossible transform (translate up into helmet) and ensure failure
hp_err = _get_head_pos_sim(raw)
hp_err[1.][2, 3] -= 0.1 # z trans upward 10cm
with pytest.raises(RuntimeError, match='collided with inner skull'):
simulate_raw(info, stc, trans, src, sphere, head_pos=hp_err)
# other degenerate conditions
with pytest.raises(TypeError, match='info must be an instance of'):
simulate_raw('foo', stc, trans, src, sphere)
with pytest.raises(TypeError, match='stc must be an instance of'):
simulate_raw(info, 'foo', trans, src, sphere)
with pytest.raises(ValueError, match='stc must have at least three time'):
simulate_raw(info, stc.copy().crop(0, 0), trans, src, sphere)
with pytest.raises(TypeError, match='must be an instance of Info'):
simulate_raw(0, stc, trans, src, sphere)
stc_bad = stc.copy()
stc_bad.tstep += 0.1
with pytest.raises(ValueError, match='same sample rate'):
simulate_raw(info, stc_bad, trans, src, sphere)
with pytest.raises(ValueError, match='interp must be one of'):
simulate_raw(info, stc, trans, src, sphere, interp='foo')
with pytest.raises(TypeError, match='unknown head_pos type'):
simulate_raw(info, stc, trans, src, sphere, head_pos=1.)
head_pos_sim_err = _get_head_pos_sim(raw)
head_pos_sim_err[-1.] = head_pos_sim_err[1.] # negative time
with pytest.raises(RuntimeError, match='All position times'):
simulate_raw(info, stc, trans, src, sphere,
head_pos=head_pos_sim_err)
raw_bad = raw.copy()
raw_bad.info['dig'] = None
with pytest.raises(RuntimeError, match='Cannot fit headshape'):
add_eog(raw_bad)
@pytest.mark.slowtest
def test_simulate_raw_bem(raw_data):
"""Test simulation of raw data with BEM."""
raw, src, stc, trans, sphere = raw_data
src = setup_source_space('sample', 'oct1', subjects_dir=subjects_dir)
for s in src:
s['nuse'] = 3
s['vertno'] = src[1]['vertno'][:3]
s['inuse'].fill(0)
s['inuse'][s['vertno']] = 1
# use different / more complete STC here
vertices = [s['vertno'] for s in src]
stc = SourceEstimate(np.eye(sum(len(v) for v in vertices)), vertices,
0, 1. / raw.info['sfreq'])
stcs = [stc] * 15
raw_sim_sph = simulate_raw(raw.info, stcs, trans, src, sphere)
raw_sim_bem = simulate_raw(raw.info, stcs, trans, src, bem_fname)
# some components (especially radial) might not match that well,
# so just make sure that most components have high correlation
assert_array_equal(raw_sim_sph.ch_names, raw_sim_bem.ch_names)
picks = pick_types(raw.info, meg=True, eeg=True)
n_ch = len(picks)
corr = np.corrcoef(raw_sim_sph[picks][0], raw_sim_bem[picks][0])
assert_array_equal(corr.shape, (2 * n_ch, 2 * n_ch))
med_corr = np.median(np.diag(corr[:n_ch, -n_ch:]))
assert med_corr > 0.65
# do some round-trip localization
for s in src:
transform_surface_to(s, 'head', trans)
locs = np.concatenate([s['rr'][s['vertno']] for s in src])
tmax = (len(locs) - 1) / raw.info['sfreq']
cov = make_ad_hoc_cov(raw.info)
# The tolerance for the BEM is surprisingly high (28) but I get the same
# result when using MNE-C and Xfit, even when using a proper 5120 BEM :(
for use_raw, bem, tol in ((raw_sim_sph, sphere, 2),
(raw_sim_bem, bem_fname, 31)):
events = find_events(use_raw, 'STI 014')
assert len(locs) == 6
evoked = Epochs(use_raw, events, 1, 0, tmax, baseline=None).average()
assert len(evoked.times) == len(locs)
fits = fit_dipole(evoked, cov, bem, trans, min_dist=1.)[0].pos
diffs = np.sqrt(np.sum((locs - fits) ** 2, axis=-1)) * 1000
med_diff = np.median(diffs)
assert med_diff < tol, '%s: %s' % (bem, med_diff)
def test_simulate_round_trip(raw_data):
"""Test simulate_raw round trip calculations."""
# Check a diagonal round-trip
raw, src, stc, trans, sphere = raw_data
raw.pick_types(meg=True, stim=True)
bem = read_bem_solution(bem_1_fname)
old_bem = bem.copy()
old_src = src.copy()
old_trans = trans.copy()
fwd = make_forward_solution(raw.info, trans, src, bem)
# no omissions
assert (sum(len(s['vertno']) for s in src) ==
sum(len(s['vertno']) for s in fwd['src']) ==
36)
# make sure things were not modified
assert (old_bem['surfs'][0]['coord_frame'] ==
bem['surfs'][0]['coord_frame'])
assert trans == old_trans
_compare_source_spaces(src, old_src)
data = np.eye(fwd['nsource'])
raw.crop(0, len(data) / raw.info['sfreq'], include_tmax=False)
stc = SourceEstimate(data, [s['vertno'] for s in fwd['src']],
0, 1. / raw.info['sfreq'])
for use_fwd in (None, fwd):
if use_fwd is None:
use_trans, use_src, use_bem = trans, src, bem
else:
use_trans = use_src = use_bem = None
this_raw = simulate_raw(raw.info, stc, use_trans, use_src, use_bem,
forward=use_fwd)
this_raw.pick_types(meg=True, eeg=True)
assert (old_bem['surfs'][0]['coord_frame'] ==
bem['surfs'][0]['coord_frame'])
assert trans == old_trans
_compare_source_spaces(src, old_src)
this_fwd = convert_forward_solution(fwd, force_fixed=True)
assert_allclose(this_raw[:][0], this_fwd['sol']['data'],
atol=1e-12, rtol=1e-6)
with pytest.raises(ValueError, match='If forward is not None then'):
simulate_raw(raw.info, stc, trans, src, bem, forward=fwd)
# Not iterable
with pytest.raises(TypeError, match='SourceEstimate, tuple, or iterable'):
simulate_raw(raw.info, 0., trans, src, bem, None)
# STC with a source that `src` does not have
assert 0 not in src[0]['vertno']
vertices = [[0, fwd['src'][0]['vertno'][0]], []]
stc_bad = SourceEstimate(data[:2], vertices, 0, 1. / raw.info['sfreq'])
with pytest.warns(RuntimeWarning,
match='1 of 2 SourceEstimate vertices'):
simulate_raw(raw.info, stc_bad, trans, src, bem)
assert 0 not in fwd['src'][0]['vertno']
with pytest.warns(RuntimeWarning,
match='1 of 2 SourceEstimate vertices'):
simulate_raw(raw.info, stc_bad, None, None, None, forward=fwd)
# dev_head_t mismatch
fwd['info']['dev_head_t']['trans'][0, 0] = 1.
with pytest.raises(ValueError, match='dev_head_t.*does not match'):
simulate_raw(raw.info, stc, None, None, None, forward=fwd)
@pytest.mark.slowtest
@testing.requires_testing_data
def test_simulate_raw_chpi():
"""Test simulation of raw data with cHPI."""
raw = read_raw_fif(raw_chpi_fname, allow_maxshield='yes')
picks = np.arange(len(raw.ch_names))
picks = np.setdiff1d(picks, pick_types(raw.info, meg=True, eeg=True)[::4])
raw.load_data().pick_channels([raw.ch_names[pick] for pick in picks])
raw.info.normalize_proj()
sphere = make_sphere_model('auto', 'auto', raw.info)
# make sparse spherical source space
sphere_vol = tuple(sphere['r0']) + (sphere.radius,)
src = setup_volume_source_space(sphere=sphere_vol, pos=70.,
sphere_units='m')
stcs = [_make_stc(raw, src)] * 15
# simulate data with cHPI on
raw_sim = simulate_raw(raw.info, stcs, None, src, sphere,
head_pos=pos_fname, interp='zero',
first_samp=raw.first_samp)
# need to trim extra samples off this one
raw_chpi = add_chpi(raw_sim.copy(), head_pos=pos_fname, interp='zero')
# test cHPI indication
hpi_freqs, hpi_pick, hpi_ons = _get_hpi_info(raw.info)
assert_allclose(raw_sim[hpi_pick][0], 0.)
assert_allclose(raw_chpi[hpi_pick][0], hpi_ons.sum())
# test that the cHPI signals make some reasonable values
picks_meg = pick_types(raw.info, meg=True, eeg=False)
picks_eeg = pick_types(raw.info, meg=False, eeg=True)
for picks in [picks_meg[:3], picks_eeg[:3]]:
psd_sim, freqs_sim = psd_welch(raw_sim, picks=picks)
psd_chpi, freqs_chpi = psd_welch(raw_chpi, picks=picks)
assert_array_equal(freqs_sim, freqs_chpi)
freq_idx = np.sort([np.argmin(np.abs(freqs_sim - f))
for f in hpi_freqs])
if picks is picks_meg:
assert (psd_chpi[:, freq_idx] >
100 * psd_sim[:, freq_idx]).all()
else:
assert_allclose(psd_sim, psd_chpi, atol=1e-20)
# test localization based on cHPI information
chpi_amplitudes = compute_chpi_amplitudes(raw, t_step_min=10.)
coil_locs = compute_chpi_locs(raw.info, chpi_amplitudes)
quats_sim = compute_head_pos(raw_chpi.info, coil_locs)
quats = read_head_pos(pos_fname)
_assert_quats(quats, quats_sim, dist_tol=5e-3, angle_tol=3.5,
vel_atol=0.03) # velicity huge because of t_step_min above
@testing.requires_testing_data
def test_simulation_cascade():
"""Test that cascading operations do not overwrite data."""
# Create 10 second raw dataset with zeros in the data matrix
raw_null = read_raw_fif(raw_chpi_fname, allow_maxshield='yes')
raw_null.crop(0, 1).pick_types(meg=True).load_data()
raw_null.apply_function(lambda x: np.zeros_like(x))
assert_array_equal(raw_null.get_data(), 0.)
# Calculate independent signal additions
raw_eog = raw_null.copy()
add_eog(raw_eog, random_state=0)
raw_ecg = raw_null.copy()
add_ecg(raw_ecg, random_state=0)
raw_noise = raw_null.copy()
cov = make_ad_hoc_cov(raw_null.info)
add_noise(raw_noise, cov, random_state=0)
raw_chpi = raw_null.copy()
add_chpi(raw_chpi)
# Calculate Cascading signal additions
raw_cascade = raw_null.copy()
add_eog(raw_cascade, random_state=0)
add_ecg(raw_cascade, random_state=0)
add_chpi(raw_cascade)
add_noise(raw_cascade, cov, random_state=0)
cascade_data = raw_cascade.get_data()
serial_data = 0.
for raw_other in (raw_eog, raw_ecg, raw_noise, raw_chpi):
serial_data += raw_other.get_data()
assert_allclose(cascade_data, serial_data, atol=1e-20)
|
import enum
import logging
from homeassistant.components import http
from homeassistant.core import callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import intent
from homeassistant.util.decorator import Registry
from .const import DOMAIN, SYN_RESOLUTION_MATCH
_LOGGER = logging.getLogger(__name__)
HANDLERS = Registry()
INTENTS_API_ENDPOINT = "/api/alexa"
class SpeechType(enum.Enum):
"""The Alexa speech types."""
plaintext = "PlainText"
ssml = "SSML"
SPEECH_MAPPINGS = {"plain": SpeechType.plaintext, "ssml": SpeechType.ssml}
class CardType(enum.Enum):
"""The Alexa card types."""
simple = "Simple"
link_account = "LinkAccount"
@callback
def async_setup(hass):
"""Activate Alexa component."""
hass.http.register_view(AlexaIntentsView)
async def async_setup_intents(hass):
"""
Do intents setup.
Right now this module does not expose any, but the intent component breaks
without it.
"""
pass # pylint: disable=unnecessary-pass
class UnknownRequest(HomeAssistantError):
"""When an unknown Alexa request is passed in."""
class AlexaIntentsView(http.HomeAssistantView):
"""Handle Alexa requests."""
url = INTENTS_API_ENDPOINT
name = "api:alexa"
async def post(self, request):
"""Handle Alexa."""
hass = request.app["hass"]
message = await request.json()
_LOGGER.debug("Received Alexa request: %s", message)
try:
response = await async_handle_message(hass, message)
return b"" if response is None else self.json(response)
except UnknownRequest as err:
_LOGGER.warning(str(err))
return self.json(intent_error_response(hass, message, str(err)))
except intent.UnknownIntent as err:
_LOGGER.warning(str(err))
return self.json(
intent_error_response(
hass,
message,
"This intent is not yet configured within Home Assistant.",
)
)
except intent.InvalidSlotInfo as err:
_LOGGER.error("Received invalid slot data from Alexa: %s", err)
return self.json(
intent_error_response(
hass, message, "Invalid slot information received for this intent."
)
)
except intent.IntentError as err:
_LOGGER.exception(str(err))
return self.json(
intent_error_response(hass, message, "Error handling intent.")
)
def intent_error_response(hass, message, error):
"""Return an Alexa response that will speak the error message."""
alexa_intent_info = message.get("request").get("intent")
alexa_response = AlexaResponse(hass, alexa_intent_info)
alexa_response.add_speech(SpeechType.plaintext, error)
return alexa_response.as_dict()
async def async_handle_message(hass, message):
"""Handle an Alexa intent.
Raises:
- UnknownRequest
- intent.UnknownIntent
- intent.InvalidSlotInfo
- intent.IntentError
"""
req = message.get("request")
req_type = req["type"]
handler = HANDLERS.get(req_type)
if not handler:
raise UnknownRequest(f"Received unknown request {req_type}")
return await handler(hass, message)
@HANDLERS.register("SessionEndedRequest")
async def async_handle_session_end(hass, message):
"""Handle a session end request."""
return None
@HANDLERS.register("IntentRequest")
@HANDLERS.register("LaunchRequest")
async def async_handle_intent(hass, message):
"""Handle an intent request.
Raises:
- intent.UnknownIntent
- intent.InvalidSlotInfo
- intent.IntentError
"""
req = message.get("request")
alexa_intent_info = req.get("intent")
alexa_response = AlexaResponse(hass, alexa_intent_info)
if req["type"] == "LaunchRequest":
intent_name = (
message.get("session", {}).get("application", {}).get("applicationId")
)
else:
intent_name = alexa_intent_info["name"]
intent_response = await intent.async_handle(
hass,
DOMAIN,
intent_name,
{key: {"value": value} for key, value in alexa_response.variables.items()},
)
for intent_speech, alexa_speech in SPEECH_MAPPINGS.items():
if intent_speech in intent_response.speech:
alexa_response.add_speech(
alexa_speech, intent_response.speech[intent_speech]["speech"]
)
break
if "simple" in intent_response.card:
alexa_response.add_card(
CardType.simple,
intent_response.card["simple"]["title"],
intent_response.card["simple"]["content"],
)
return alexa_response.as_dict()
def resolve_slot_synonyms(key, request):
"""Check slot request for synonym resolutions."""
# Default to the spoken slot value if more than one or none are found. For
# reference to the request object structure, see the Alexa docs:
# https://tinyurl.com/ybvm7jhs
resolved_value = request["value"]
if (
"resolutions" in request
and "resolutionsPerAuthority" in request["resolutions"]
and len(request["resolutions"]["resolutionsPerAuthority"]) >= 1
):
# Extract all of the possible values from each authority with a
# successful match
possible_values = []
for entry in request["resolutions"]["resolutionsPerAuthority"]:
if entry["status"]["code"] != SYN_RESOLUTION_MATCH:
continue
possible_values.extend([item["value"]["name"] for item in entry["values"]])
# If there is only one match use the resolved value, otherwise the
# resolution cannot be determined, so use the spoken slot value
if len(possible_values) == 1:
resolved_value = possible_values[0]
else:
_LOGGER.debug(
"Found multiple synonym resolutions for slot value: {%s: %s}",
key,
resolved_value,
)
return resolved_value
class AlexaResponse:
"""Help generating the response for Alexa."""
def __init__(self, hass, intent_info):
"""Initialize the response."""
self.hass = hass
self.speech = None
self.card = None
self.reprompt = None
self.session_attributes = {}
self.should_end_session = True
self.variables = {}
# Intent is None if request was a LaunchRequest or SessionEndedRequest
if intent_info is not None:
for key, value in intent_info.get("slots", {}).items():
# Only include slots with values
if "value" not in value:
continue
_key = key.replace(".", "_")
self.variables[_key] = resolve_slot_synonyms(key, value)
def add_card(self, card_type, title, content):
"""Add a card to the response."""
assert self.card is None
card = {"type": card_type.value}
if card_type == CardType.link_account:
self.card = card
return
card["title"] = title
card["content"] = content
self.card = card
def add_speech(self, speech_type, text):
"""Add speech to the response."""
assert self.speech is None
key = "ssml" if speech_type == SpeechType.ssml else "text"
self.speech = {"type": speech_type.value, key: text}
def add_reprompt(self, speech_type, text):
"""Add reprompt if user does not answer."""
assert self.reprompt is None
key = "ssml" if speech_type == SpeechType.ssml else "text"
self.reprompt = {
"type": speech_type.value,
key: text.async_render(self.variables, parse_result=False),
}
def as_dict(self):
"""Return response in an Alexa valid dict."""
response = {"shouldEndSession": self.should_end_session}
if self.card is not None:
response["card"] = self.card
if self.speech is not None:
response["outputSpeech"] = self.speech
if self.reprompt is not None:
response["reprompt"] = {"outputSpeech": self.reprompt}
return {
"version": "1.0",
"sessionAttributes": self.session_attributes,
"response": response,
}
|
import time
import requests
import json
import sys
import dns
import dns.exception
import dns.name
import dns.query
import dns.resolver
from flask import current_app
from lemur.extensions import metrics, sentry
class Record:
"""
This class implements an Ultra DNS record.
Accepts the response from the API call as the argument.
"""
def __init__(self, _data):
# Since we are dealing with only TXT records for Lemur, we expect only 1 RRSet in the response.
# Thus we default to picking up the first entry (_data["rrsets"][0]) from the response.
self._data = _data["rrSets"][0]
@property
def name(self):
return self._data["ownerName"]
@property
def rrtype(self):
return self._data["rrtype"]
@property
def rdata(self):
return self._data["rdata"]
@property
def ttl(self):
return self._data["ttl"]
class Zone:
"""
This class implements an Ultra DNS zone.
"""
def __init__(self, _data, _client="Client"):
self._data = _data
self._client = _client
@property
def name(self):
"""
Zone name, has a trailing "." at the end, which we manually remove.
"""
return self._data["properties"]["name"][:-1]
@property
def authoritative_type(self):
"""
Indicates whether the zone is setup as a PRIMARY or SECONDARY
"""
return self._data["properties"]["type"]
@property
def record_count(self):
return self._data["properties"]["resourceRecordCount"]
@property
def status(self):
"""
Returns the status of the zone - ACTIVE, SUSPENDED, etc
"""
return self._data["properties"]["status"]
def get_ultradns_token():
"""
Function to call the UltraDNS Authorization API.
Returns the Authorization access_token which is valid for 1 hour.
Each request calls this function and we generate a new token every time.
"""
path = "/v2/authorization/token"
data = {
"grant_type": "password",
"username": current_app.config.get("ACME_ULTRADNS_USERNAME", ""),
"password": current_app.config.get("ACME_ULTRADNS_PASSWORD", ""),
}
base_uri = current_app.config.get("ACME_ULTRADNS_DOMAIN", "")
resp = requests.post(f"{base_uri}{path}", data=data, verify=True)
return resp.json()["access_token"]
def _generate_header():
"""
Function to generate the header for a request.
Contains the Authorization access_key obtained from the get_ultradns_token() function.
"""
access_token = get_ultradns_token()
return {"Authorization": f"Bearer {access_token}", "Content-Type": "application/json"}
def _paginate(path, key):
limit = 100
params = {"offset": 0, "limit": 1}
resp = _get(path, params)
for index in range(0, resp["resultInfo"]["totalCount"], limit):
params["offset"] = index
params["limit"] = limit
resp = _get(path, params)
yield resp[key]
def _get(path, params=None):
"""Function to execute a GET request on the given URL (base_uri + path) with given params"""
base_uri = current_app.config.get("ACME_ULTRADNS_DOMAIN", "")
resp = requests.get(
f"{base_uri}{path}",
headers=_generate_header(),
params=params,
verify=True,
)
resp.raise_for_status()
return resp.json()
def _delete(path):
"""Function to execute a DELETE request on the given URL"""
base_uri = current_app.config.get("ACME_ULTRADNS_DOMAIN", "")
resp = requests.delete(
f"{base_uri}{path}",
headers=_generate_header(),
verify=True,
)
resp.raise_for_status()
def _post(path, params):
"""Executes a POST request on given URL. Body is sent in JSON format"""
base_uri = current_app.config.get("ACME_ULTRADNS_DOMAIN", "")
resp = requests.post(
f"{base_uri}{path}",
headers=_generate_header(),
data=json.dumps(params),
verify=True,
)
resp.raise_for_status()
def _has_dns_propagated(name, token, domain):
"""
Check whether the DNS change made by Lemur have propagated to the public DNS or not.
Invoked by wait_for_dns_change() function
"""
txt_records = []
try:
dns_resolver = dns.resolver.Resolver()
dns_resolver.nameservers = [domain]
dns_response = dns_resolver.query(name, "TXT")
for rdata in dns_response:
for txt_record in rdata.strings:
txt_records.append(txt_record.decode("utf-8"))
except dns.exception.DNSException:
function = sys._getframe().f_code.co_name
metrics.send(f"{function}.fail", "counter", 1)
return False
for txt_record in txt_records:
if txt_record == token:
function = sys._getframe().f_code.co_name
metrics.send(f"{function}.success", "counter", 1)
return True
return False
def wait_for_dns_change(change_id, account_number=None):
"""
Waits and checks if the DNS changes have propagated or not.
First check the domains authoritative server. Once this succeeds,
we ask a public DNS server (Google <8.8.8.8> in our case).
"""
fqdn, token = change_id
number_of_attempts = 20
nameserver = get_authoritative_nameserver(fqdn)
for attempts in range(0, number_of_attempts):
status = _has_dns_propagated(fqdn, token, nameserver)
function = sys._getframe().f_code.co_name
log_data = {
"function": function,
"fqdn": fqdn,
"status": status,
"message": "Record status on ultraDNS authoritative server"
}
current_app.logger.debug(log_data)
if status:
time.sleep(10)
break
time.sleep(10)
if status:
nameserver = get_public_authoritative_nameserver()
for attempts in range(0, number_of_attempts):
status = _has_dns_propagated(fqdn, token, nameserver)
log_data = {
"function": function,
"fqdn": fqdn,
"status": status,
"message": "Record status on Public DNS"
}
current_app.logger.debug(log_data)
if status:
metrics.send(f"{function}.success", "counter", 1)
break
time.sleep(10)
if not status:
metrics.send(f"{function}.fail", "counter", 1, metric_tags={"fqdn": fqdn, "txt_record": token})
sentry.captureException(extra={"fqdn": str(fqdn), "txt_record": str(token)})
return
def get_zones(account_number):
"""Get zones from the UltraDNS"""
path = "/v2/zones"
zones = []
for page in _paginate(path, "zones"):
for elem in page:
# UltraDNS zone names end with a "." - Example - lemur.example.com.
# We pick out the names minus the "." at the end while returning the list
zone = Zone(elem)
if zone.authoritative_type == "PRIMARY" and zone.status == "ACTIVE":
zones.append(zone.name)
return zones
def get_zone_name(domain, account_number):
"""Get the matching zone for the given domain"""
zones = get_zones(account_number)
zone_name = ""
for z in zones:
if domain.endswith(z):
# Find the most specific zone possible for the domain
# Ex: If fqdn is a.b.c.com, there is a zone for c.com,
# and a zone for b.c.com, we want to use b.c.com.
if z.count(".") > zone_name.count("."):
zone_name = z
if not zone_name:
function = sys._getframe().f_code.co_name
metrics.send(f"{function}.fail", "counter", 1)
raise Exception(f"No UltraDNS zone found for domain: {domain}")
return zone_name
def create_txt_record(domain, token, account_number):
"""
Create a TXT record for the given domain.
The part of the domain that matches with the zone becomes the zone name.
The remainder becomes the owner name (referred to as node name here)
Example: Let's say we have a zone named "exmaple.com" in UltraDNS and we
get a request to create a cert for lemur.example.com
Domain - _acme-challenge.lemur.example.com
Matching zone - example.com
Owner name - _acme-challenge.lemur
"""
zone_name = get_zone_name(domain, account_number)
zone_parts = len(zone_name.split("."))
node_name = ".".join(domain.split(".")[:-zone_parts])
fqdn = f"{node_name}.{zone_name}"
path = f"/v2/zones/{zone_name}/rrsets/TXT/{node_name}"
params = {
"ttl": 5,
"rdata": [
f"{token}"
],
}
try:
_post(path, params)
function = sys._getframe().f_code.co_name
log_data = {
"function": function,
"fqdn": fqdn,
"token": token,
"message": "TXT record created"
}
current_app.logger.debug(log_data)
except Exception as e:
function = sys._getframe().f_code.co_name
log_data = {
"function": function,
"domain": domain,
"token": token,
"Exception": e,
"message": "Unable to add record. Record already exists."
}
current_app.logger.debug(log_data)
change_id = (fqdn, token)
return change_id
def delete_txt_record(change_id, account_number, domain, token):
"""
Delete the TXT record that was created in the create_txt_record() function.
UltraDNS handles records differently compared to Dyn. It creates an RRSet
which is a set of records of the same type and owner. This means
that while deleting the record, we cannot delete any individual record from
the RRSet. Instead, we have to delete the entire RRSet. If multiple certs are
being created for the same domain at the same time, the challenge TXT records
that are created will be added under the same RRSet. If the RRSet had more
than 1 record, then we create a new RRSet on UltraDNS minus the record that
has to be deleted.
"""
if not domain:
function = sys._getframe().f_code.co_name
log_data = {
"function": function,
"message": "No domain passed"
}
current_app.logger.debug(log_data)
return
zone_name = get_zone_name(domain, account_number)
zone_parts = len(zone_name.split("."))
node_name = ".".join(domain.split(".")[:-zone_parts])
path = f"/v2/zones/{zone_name}/rrsets/16/{node_name}"
try:
rrsets = _get(path)
record = Record(rrsets)
except Exception as e:
function = sys._getframe().f_code.co_name
metrics.send(f"{function}.geterror", "counter", 1)
# No Text Records remain or host is not in the zone anymore because all records have been deleted.
return
try:
# Remove the record from the RRSet locally
record.rdata.remove(f"{token}")
except ValueError:
function = sys._getframe().f_code.co_name
log_data = {
"function": function,
"token": token,
"message": "Token not found"
}
current_app.logger.debug(log_data)
return
# Delete the RRSet from UltraDNS
_delete(path)
# Check if the RRSet has more records. If yes, add the modified RRSet back to UltraDNS
if len(record.rdata) > 0:
params = {
"ttl": 5,
"rdata": record.rdata,
}
_post(path, params)
def delete_acme_txt_records(domain):
if not domain:
function = sys._getframe().f_code.co_name
log_data = {
"function": function,
"message": "No domain passed"
}
current_app.logger.debug(log_data)
return
acme_challenge_string = "_acme-challenge"
if not domain.startswith(acme_challenge_string):
function = sys._getframe().f_code.co_name
log_data = {
"function": function,
"domain": domain,
"acme_challenge_string": acme_challenge_string,
"message": "Domain does not start with the acme challenge string"
}
current_app.logger.debug(log_data)
return
zone_name = get_zone_name(domain)
zone_parts = len(zone_name.split("."))
node_name = ".".join(domain.split(".")[:-zone_parts])
path = f"/v2/zones/{zone_name}/rrsets/16/{node_name}"
_delete(path)
def get_authoritative_nameserver(domain):
"""Get the authoritative nameserver for the given domain"""
n = dns.name.from_text(domain)
depth = 2
default = dns.resolver.get_default_resolver()
nameserver = default.nameservers[0]
last = False
while not last:
s = n.split(depth)
last = s[0].to_unicode() == u"@"
sub = s[1]
query = dns.message.make_query(sub, dns.rdatatype.NS)
response = dns.query.udp(query, nameserver)
rcode = response.rcode()
if rcode != dns.rcode.NOERROR:
function = sys._getframe().f_code.co_name
metrics.send(f"{function}.error", "counter", 1)
if rcode == dns.rcode.NXDOMAIN:
raise Exception("%s does not exist." % sub)
else:
raise Exception("Error %s" % dns.rcode.to_text(rcode))
if len(response.authority) > 0:
rrset = response.authority[0]
else:
rrset = response.answer[0]
rr = rrset[0]
if rr.rdtype != dns.rdatatype.SOA:
authority = rr.target
nameserver = default.query(authority).rrset[0].to_text()
depth += 1
return nameserver
def get_public_authoritative_nameserver():
return "8.8.8.8"
|
import numpy as np
from matchzoo.engine.base_metric import BaseMetric, sort_and_couple
class MeanReciprocalRank(BaseMetric):
"""Mean reciprocal rank metric."""
ALIAS = ['mean_reciprocal_rank', 'mrr']
def __init__(self, threshold: float = 0.):
"""
:class:`MeanReciprocalRankMetric`.
:param threshold: The label threshold of relevance degree.
"""
self._threshold = threshold
def __repr__(self) -> str:
""":return: Formated string representation of the metric."""
return f'{self.ALIAS[0]}({self._threshold})'
def __call__(self, y_true: np.array, y_pred: np.array) -> float:
"""
Calculate reciprocal of the rank of the first relevant item.
Example:
>>> import numpy as np
>>> y_pred = np.asarray([0.2, 0.3, 0.7, 1.0])
>>> y_true = np.asarray([1, 0, 0, 0])
>>> MeanReciprocalRank()(y_true, y_pred)
0.25
:param y_true: The ground true label of each document.
:param y_pred: The predicted scores of each document.
:return: Mean reciprocal rank.
"""
coupled_pair = sort_and_couple(y_true, y_pred)
for idx, (label, pred) in enumerate(coupled_pair):
if label > self._threshold:
return 1. / (idx + 1)
return 0.
|
from homeassistant.components.binary_sensor import BinarySensorEntity
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from . import (
CONF_ZONE_ID,
CONF_ZONE_NAME,
CONF_ZONE_TYPE,
CONF_ZONES,
SIGNAL_ZONE_CHANGED,
ZoneChangedData,
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Ness Alarm binary sensor devices."""
if not discovery_info:
return
configured_zones = discovery_info[CONF_ZONES]
devices = []
for zone_config in configured_zones:
zone_type = zone_config[CONF_ZONE_TYPE]
zone_name = zone_config[CONF_ZONE_NAME]
zone_id = zone_config[CONF_ZONE_ID]
device = NessZoneBinarySensor(
zone_id=zone_id, name=zone_name, zone_type=zone_type
)
devices.append(device)
async_add_entities(devices)
class NessZoneBinarySensor(BinarySensorEntity):
"""Representation of an Ness alarm zone as a binary sensor."""
def __init__(self, zone_id, name, zone_type):
"""Initialize the binary_sensor."""
self._zone_id = zone_id
self._name = name
self._type = zone_type
self._state = 0
async def async_added_to_hass(self):
"""Register callbacks."""
self.async_on_remove(
async_dispatcher_connect(
self.hass, SIGNAL_ZONE_CHANGED, self._handle_zone_change
)
)
@property
def name(self):
"""Return the name of the entity."""
return self._name
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def is_on(self):
"""Return true if sensor is on."""
return self._state == 1
@property
def device_class(self):
"""Return the class of this sensor, from DEVICE_CLASSES."""
return self._type
@callback
def _handle_zone_change(self, data: ZoneChangedData):
"""Handle zone state update."""
if self._zone_id == data.zone_id:
self._state = data.state
self.async_write_ha_state()
|
import textwrap
import warnings as _warnings
import numpy as _np
from .core.dataarray import DataArray as _DataArray
from .core.dataset import Dataset as _Dataset
from .core.duck_array_ops import _dask_or_eager_func
from .core.groupby import GroupBy as _GroupBy
from .core.pycompat import dask_array_type as _dask_array_type
from .core.variable import Variable as _Variable
_xarray_types = (_Variable, _DataArray, _Dataset, _GroupBy)
_dispatch_order = (_np.ndarray, _dask_array_type) + _xarray_types
def _dispatch_priority(obj):
for priority, cls in enumerate(_dispatch_order):
if isinstance(obj, cls):
return priority
return -1
class _UFuncDispatcher:
"""Wrapper for dispatching ufuncs."""
def __init__(self, name):
self._name = name
def __call__(self, *args, **kwargs):
if self._name not in ["angle", "iscomplex"]:
_warnings.warn(
"xarray.ufuncs will be deprecated when xarray no longer "
"supports versions of numpy older than v1.17. Instead, use "
"numpy ufuncs directly.",
PendingDeprecationWarning,
stacklevel=2,
)
new_args = args
f = _dask_or_eager_func(self._name, array_args=slice(len(args)))
if len(args) > 2 or len(args) == 0:
raise TypeError(
"cannot handle {} arguments for {!r}".format(len(args), self._name)
)
elif len(args) == 1:
if isinstance(args[0], _xarray_types):
f = args[0]._unary_op(self)
else: # len(args) = 2
p1, p2 = map(_dispatch_priority, args)
if p1 >= p2:
if isinstance(args[0], _xarray_types):
f = args[0]._binary_op(self)
else:
if isinstance(args[1], _xarray_types):
f = args[1]._binary_op(self, reflexive=True)
new_args = tuple(reversed(args))
res = f(*new_args, **kwargs)
if res is NotImplemented:
raise TypeError(
"%r not implemented for types (%r, %r)"
% (self._name, type(args[0]), type(args[1]))
)
return res
def _skip_signature(doc, name):
if not isinstance(doc, str):
return doc
if doc.startswith(name):
signature_end = doc.find("\n\n")
doc = doc[signature_end + 2 :]
return doc
def _remove_unused_reference_labels(doc):
if not isinstance(doc, str):
return doc
max_references = 5
for num in range(max_references):
label = f".. [{num}]"
reference = f"[{num}]_"
index = f"{num}. "
if label not in doc or reference in doc:
continue
doc = doc.replace(label, index)
return doc
def _dedent(doc):
if not isinstance(doc, str):
return doc
return textwrap.dedent(doc)
def _create_op(name):
func = _UFuncDispatcher(name)
func.__name__ = name
doc = getattr(_np, name).__doc__
doc = _remove_unused_reference_labels(_skip_signature(_dedent(doc), name))
func.__doc__ = (
"xarray specific variant of numpy.%s. Handles "
"xarray.Dataset, xarray.DataArray, xarray.Variable, "
"numpy.ndarray and dask.array.Array objects with "
"automatic dispatching.\n\n"
"Documentation from numpy:\n\n%s" % (name, doc)
)
return func
__all__ = ( # noqa: F822
"angle",
"arccos",
"arccosh",
"arcsin",
"arcsinh",
"arctan",
"arctan2",
"arctanh",
"ceil",
"conj",
"copysign",
"cos",
"cosh",
"deg2rad",
"degrees",
"exp",
"expm1",
"fabs",
"fix",
"floor",
"fmax",
"fmin",
"fmod",
"fmod",
"frexp",
"hypot",
"imag",
"iscomplex",
"isfinite",
"isinf",
"isnan",
"isreal",
"ldexp",
"log",
"log10",
"log1p",
"log2",
"logaddexp",
"logaddexp2",
"logical_and",
"logical_not",
"logical_or",
"logical_xor",
"maximum",
"minimum",
"nextafter",
"rad2deg",
"radians",
"real",
"rint",
"sign",
"signbit",
"sin",
"sinh",
"sqrt",
"square",
"tan",
"tanh",
"trunc",
)
for name in __all__:
globals()[name] = _create_op(name)
|
from unittest import TestCase
import pandas as pd
import numpy as np
from scattertext.termscoring.RelativeEntropy import RelativeEntropy
from scattertext.test.test_termDocMatrixFactory import build_hamlet_jz_corpus
class TestRelativeEntropy(TestCase):
@classmethod
def setUpClass(cls):
cls.corpus = build_hamlet_jz_corpus()
def test_get_scores(self):
result = RelativeEntropy(self.corpus).set_categories('hamlet').get_scores()
self.assertEquals(type(result), pd.Series)
np.testing.assert_array_equal(pd.np.array(result.index), self.corpus.get_terms())
def test_get_name(self):
self.assertEquals(RelativeEntropy(self.corpus).set_categories('hamlet').get_name(), 'Frankhauser Relative Entropy')
|
import logging
import socket
import pymusiccast
import voluptuous as vol
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
MEDIA_TYPE_MUSIC,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SELECT_SOURCE,
SUPPORT_STOP,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
)
from homeassistant.const import (
CONF_HOST,
CONF_PORT,
STATE_IDLE,
STATE_ON,
STATE_PAUSED,
STATE_PLAYING,
STATE_UNKNOWN,
)
import homeassistant.helpers.config_validation as cv
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
SUPPORTED_FEATURES = (
SUPPORT_PLAY
| SUPPORT_PAUSE
| SUPPORT_STOP
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_NEXT_TRACK
| SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_VOLUME_SET
| SUPPORT_VOLUME_MUTE
| SUPPORT_SELECT_SOURCE
)
KNOWN_HOSTS_KEY = "data_yamaha_musiccast"
INTERVAL_SECONDS = "interval_seconds"
DEFAULT_PORT = 5005
DEFAULT_INTERVAL = 480
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(INTERVAL_SECONDS, default=DEFAULT_INTERVAL): cv.positive_int,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Yamaha MusicCast platform."""
known_hosts = hass.data.get(KNOWN_HOSTS_KEY)
if known_hosts is None:
known_hosts = hass.data[KNOWN_HOSTS_KEY] = []
_LOGGER.debug("known_hosts: %s", known_hosts)
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
interval = config.get(INTERVAL_SECONDS)
# Get IP of host to prevent duplicates
try:
ipaddr = socket.gethostbyname(host)
except (OSError) as error:
_LOGGER.error("Could not communicate with %s:%d: %s", host, port, error)
return
if [item for item in known_hosts if item[0] == ipaddr]:
_LOGGER.warning("Host %s:%d already registered", host, port)
return
if [item for item in known_hosts if item[1] == port]:
_LOGGER.warning("Port %s:%d already registered", host, port)
return
reg_host = (ipaddr, port)
known_hosts.append(reg_host)
try:
receiver = pymusiccast.McDevice(ipaddr, udp_port=port, mc_interval=interval)
except pymusiccast.exceptions.YMCInitError as err:
_LOGGER.error(err)
receiver = None
if receiver:
for zone in receiver.zones:
_LOGGER.debug("Receiver: %s / Port: %d / Zone: %s", receiver, port, zone)
add_entities([YamahaDevice(receiver, receiver.zones[zone])], True)
else:
known_hosts.remove(reg_host)
class YamahaDevice(MediaPlayerEntity):
"""Representation of a Yamaha MusicCast device."""
def __init__(self, recv, zone):
"""Initialize the Yamaha MusicCast device."""
self._recv = recv
self._name = recv.name
self._source = None
self._source_list = []
self._zone = zone
self.mute = False
self.media_status = None
self.media_status_received = None
self.power = STATE_UNKNOWN
self.status = STATE_UNKNOWN
self.volume = 0
self.volume_max = 0
self._recv.set_yamaha_device(self)
self._zone.set_yamaha_device(self)
@property
def name(self):
"""Return the name of the device."""
return f"{self._name} ({self._zone.zone_id})"
@property
def state(self):
"""Return the state of the device."""
if self.power == STATE_ON and self.status != STATE_UNKNOWN:
return self.status
return self.power
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self.mute
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self.volume
@property
def supported_features(self):
"""Flag of features that are supported."""
return SUPPORTED_FEATURES
@property
def source(self):
"""Return the current input source."""
return self._source
@property
def source_list(self):
"""List of available input sources."""
return self._source_list
@source_list.setter
def source_list(self, value):
"""Set source_list attribute."""
self._source_list = value
@property
def media_content_type(self):
"""Return the media content type."""
return MEDIA_TYPE_MUSIC
@property
def media_duration(self):
"""Duration of current playing media in seconds."""
return self.media_status.media_duration if self.media_status else None
@property
def media_image_url(self):
"""Image url of current playing media."""
return self.media_status.media_image_url if self.media_status else None
@property
def media_artist(self):
"""Artist of current playing media, music track only."""
return self.media_status.media_artist if self.media_status else None
@property
def media_album(self):
"""Album of current playing media, music track only."""
return self.media_status.media_album if self.media_status else None
@property
def media_track(self):
"""Track number of current playing media, music track only."""
return self.media_status.media_track if self.media_status else None
@property
def media_title(self):
"""Title of current playing media."""
return self.media_status.media_title if self.media_status else None
@property
def media_position(self):
"""Position of current playing media in seconds."""
if self.media_status and self.state in [
STATE_PLAYING,
STATE_PAUSED,
STATE_IDLE,
]:
return self.media_status.media_position
@property
def media_position_updated_at(self):
"""When was the position of the current playing media valid.
Returns value from homeassistant.util.dt.utcnow().
"""
return self.media_status_received if self.media_status else None
def update(self):
"""Get the latest details from the device."""
_LOGGER.debug("update: %s", self.entity_id)
self._recv.update_status()
self._zone.update_status()
def update_hass(self):
"""Push updates to Home Assistant."""
if self.entity_id:
_LOGGER.debug("update_hass: pushing updates")
self.schedule_update_ha_state()
return True
def turn_on(self):
"""Turn on specified media player or all."""
_LOGGER.debug("Turn device: on")
self._zone.set_power(True)
def turn_off(self):
"""Turn off specified media player or all."""
_LOGGER.debug("Turn device: off")
self._zone.set_power(False)
def media_play(self):
"""Send the media player the command for play/pause."""
_LOGGER.debug("Play")
self._recv.set_playback("play")
def media_pause(self):
"""Send the media player the command for pause."""
_LOGGER.debug("Pause")
self._recv.set_playback("pause")
def media_stop(self):
"""Send the media player the stop command."""
_LOGGER.debug("Stop")
self._recv.set_playback("stop")
def media_previous_track(self):
"""Send the media player the command for prev track."""
_LOGGER.debug("Previous")
self._recv.set_playback("previous")
def media_next_track(self):
"""Send the media player the command for next track."""
_LOGGER.debug("Next")
self._recv.set_playback("next")
def mute_volume(self, mute):
"""Send mute command."""
_LOGGER.debug("Mute volume: %s", mute)
self._zone.set_mute(mute)
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
_LOGGER.debug("Volume level: %.2f / %d", volume, volume * self.volume_max)
self._zone.set_volume(volume * self.volume_max)
def select_source(self, source):
"""Send the media player the command to select input source."""
_LOGGER.debug("select_source: %s", source)
self.status = STATE_UNKNOWN
self._zone.set_input(source)
def new_media_status(self, status):
"""Handle updates of the media status."""
_LOGGER.debug("new media_status arrived")
self.media_status = status
self.media_status_received = dt_util.utcnow()
|
import numpy as np
import warnings
from typing import Optional, Any, Tuple
from tensornetwork.block_sparse.blocksparse_utils import (
_find_transposed_diagonal_sparse_blocks)
from tensornetwork.block_sparse.utils import get_real_dtype
from tensornetwork.block_sparse.sizetypes import SIZE_T
from tensornetwork.block_sparse.blocksparsetensor import (BlockSparseTensor,
ChargeArray)
Tensor = Any
def svd(
bt,
tensor: BlockSparseTensor,
pivot_axis: int,
max_singular_values: Optional[int] = None,
max_truncation_error: Optional[float] = None,
relative: Optional[bool] = False) -> Tuple[Tensor, Tensor, Tensor, Tensor]:
"""
Computes the singular value decomposition (SVD) of a tensor.
See tensornetwork.backends.tensorflow.decompositions for details.
"""
left_dims = tensor.shape[:pivot_axis]
right_dims = tensor.shape[pivot_axis:]
matrix = bt.reshape(tensor, [np.prod(left_dims), np.prod(right_dims)])
flat_charges = matrix._charges
flat_flows = matrix._flows
flat_order = matrix.flat_order
tr_partition = len(matrix._order[0])
blocks, charges, shapes = _find_transposed_diagonal_sparse_blocks(
flat_charges, flat_flows, tr_partition, flat_order)
u_blocks = []
singvals = []
v_blocks = []
for n, b in enumerate(blocks):
out = np.linalg.svd(
np.reshape(matrix.data[b], shapes[:, n]),
full_matrices=False,
compute_uv=True)
u_blocks.append(out[0])
singvals.append(out[1])
v_blocks.append(out[2])
orig_num_singvals = np.int64(np.sum([len(s) for s in singvals]))
orig_block_size = [len(s) for s in singvals]
discarded_singvals = np.zeros(0, dtype=get_real_dtype(tensor.dtype))
if (max_singular_values
is not None) and (max_singular_values >= orig_num_singvals):
max_singular_values = None
if (max_truncation_error is not None) or (max_singular_values is not None):
max_D = np.max([len(s) for s in singvals]) if len(singvals) > 0 else 0
#extend singvals of all blocks into a matrix by padding each block with 0
if len(singvals) > 0:
extended_singvals = np.stack([
np.append(s, np.zeros(max_D - len(s), dtype=s.dtype))
for s in singvals
],
axis=1)
else:
extended_singvals = np.empty((0, 0), dtype=get_real_dtype(tensor.dtype))
extended_flat_singvals = np.ravel(extended_singvals)
#sort singular values
inds = np.argsort(extended_flat_singvals, kind='stable')
discarded_inds = np.zeros(0, dtype=SIZE_T)
if inds.shape[0] > 0:
maxind = inds[-1]
else:
maxind = 0
if max_truncation_error is not None:
if relative and (len(singvals) > 0):
max_truncation_error = max_truncation_error * np.max(
[s[0] for s in singvals])
kept_inds_mask = np.sqrt(
np.cumsum(np.square(
extended_flat_singvals[inds]))) > max_truncation_error
trunc_inds_mask = np.logical_not(kept_inds_mask)
discarded_inds = inds[trunc_inds_mask]
inds = inds[kept_inds_mask]
if max_singular_values is not None:
#if the original number of non-zero singular values
#is smaller than `max_singular_values` we need to reset
#`max_singular_values` (we were filling in 0.0 into singular
#value blocks to facilitate trunction steps, thus we could end up
#with more singular values than originally there).
if max_singular_values > orig_num_singvals:
max_singular_values = orig_num_singvals
if max_singular_values < len(inds):
discarded_inds = np.append(discarded_inds,
inds[:(-1) * max_singular_values])
inds = inds[(-1) * max_singular_values::]
if len(inds) == 0:
#special case of truncation to 0 dimension;
warnings.warn("svd_decomposition truncated to 0 dimensions. "
"Adjusting to `max_singular_values = 1`")
inds = np.asarray([maxind])
if extended_singvals.shape[1] > 0:
#pylint: disable=no-member
keep = np.divmod(inds, extended_singvals.shape[1])
disc = np.divmod(discarded_inds, extended_singvals.shape[1])
else:
keep = (np.zeros(1, dtype=SIZE_T), np.zeros(1, dtype=SIZE_T))
disc = (np.zeros(0, dtype=SIZE_T), np.zeros(0, dtype=SIZE_T))
newsingvals = [
extended_singvals[keep[0][keep[1] == n], keep[1][keep[1] == n]][::-1]
for n in range(extended_singvals.shape[1])
]
discsingvals = [
extended_singvals[disc[0][disc[1] == n], disc[1][disc[1] == n]][::-1]
for n in range(extended_singvals.shape[1])
]
new_block_size = [len(s) for s in newsingvals]
discsingvals = [
d[:(orig_block_size[n] - new_block_size[n])]
for n, d in enumerate(discsingvals)
]
singvals = newsingvals
discarded_singvals = discsingvals
if len(singvals) > 0:
left_singval_charge_labels = np.concatenate([
np.full(singvals[n].shape[0], fill_value=n, dtype=np.int16)
for n in range(len(singvals))
])
all_singvals = np.concatenate(singvals)
#define the new charges on the two central bonds
left_charge_labels = np.concatenate([
np.full(len(singvals[n]), fill_value=n, dtype=np.int16)
for n in range(len(u_blocks))
])
right_charge_labels = np.concatenate([
np.full(len(singvals[n]), fill_value=n, dtype=np.int16)
for n in range(len(v_blocks))
])
all_ublocks = np.concatenate([
np.ravel(np.transpose(u_blocks[n][:, 0:len(singvals[n])]))
for n in range(len(u_blocks))
])
all_vblocks = np.concatenate([
np.ravel(v_blocks[n][0:len(singvals[n]), :])
for n in range(len(v_blocks))
])
else:
left_singval_charge_labels = np.empty(0, dtype=np.int16)
all_singvals = np.empty(0, dtype=get_real_dtype(tensor.dtype))
left_charge_labels = np.empty(0, dtype=np.int16)
right_charge_labels = np.empty(0, dtype=np.int16)
all_ublocks = np.empty(0, dtype=get_real_dtype(tensor.dtype))
all_vblocks = np.empty(0, dtype=get_real_dtype(tensor.dtype))
if len(discarded_singvals) > 0:
tmp_labels = [
np.full(discarded_singvals[n].shape[0], fill_value=n, dtype=np.int16)
for n in range(len(discarded_singvals))
]
left_discarded_singval_charge_labels = np.concatenate(tmp_labels)
all_discarded_singvals = np.concatenate(discarded_singvals)
else:
left_discarded_singval_charge_labels = np.empty(0, dtype=np.int16)
all_discarded_singvals = np.empty(0, dtype=get_real_dtype(tensor.dtype))
left_singval_charge = charges[left_singval_charge_labels]
S = ChargeArray(all_singvals, [left_singval_charge], [False])
left_discarded_singval_charge = charges[left_discarded_singval_charge_labels]
Sdisc = ChargeArray(all_discarded_singvals, [left_discarded_singval_charge],
[False])
new_left_charge = charges[left_charge_labels]
new_right_charge = charges[right_charge_labels]
#get the indices of the new tensors U,S and V
charges_u = [new_left_charge] + [matrix._charges[o] for o in matrix._order[0]]
order_u = [[0]] + [list(np.arange(1, len(matrix._order[0]) + 1))]
flows_u = [True] + [matrix._flows[o] for o in matrix._order[0]]
charges_v = [new_right_charge
] + [matrix._charges[o] for o in matrix._order[1]]
flows_v = [False] + [matrix._flows[o] for o in matrix._order[1]]
order_v = [[0]] + [list(np.arange(1, len(matrix._order[1]) + 1))]
#We fill in data into the transposed U
U = BlockSparseTensor(
all_ublocks,
charges=charges_u,
flows=flows_u,
order=order_u,
check_consistency=False).transpose((1, 0))
V = BlockSparseTensor(
all_vblocks,
charges=charges_v,
flows=flows_v,
order=order_v,
check_consistency=False)
left_shape = left_dims + (S.shape[0],)
right_shape = (S.shape[0],) + right_dims
return U.reshape(left_shape), S, V.reshape(right_shape), Sdisc
def qr(bt, tensor: BlockSparseTensor, pivot_axis: int) -> Tuple[Tensor, Tensor]:
"""Computes the QR decomposition of a tensor.
See tensornetwork.backends.tensorflow.decompositions for details.
"""
left_dims = tensor.shape[:pivot_axis]
right_dims = tensor.shape[pivot_axis:]
tensor = bt.reshape(tensor, [np.prod(left_dims), np.prod(right_dims)])
q, r = bt.qr(tensor)
center_dim = q.shape[1]
q = bt.reshape(q, list(left_dims) + [center_dim])
r = bt.reshape(r, [center_dim] + list(right_dims))
return q, r
def rq(bt, tensor: BlockSparseTensor, pivot_axis: int) -> Tuple[Tensor, Tensor]:
"""Computes the RQ (reversed QR) decomposition of a tensor.
See tensornetwork.backends.tensorflow.decompositions for details.
"""
left_dims = tensor.shape[:pivot_axis]
right_dims = tensor.shape[pivot_axis:]
tensor = bt.reshape(tensor, [np.prod(left_dims), np.prod(right_dims)])
q, r = bt.qr(bt.conj(bt.transpose(tensor, (1, 0))))
r, q = bt.conj(bt.transpose(r, (1, 0))), bt.conj(bt.transpose(
q, (1, 0))) #M=r*q at this point
center_dim = r.shape[1]
r = bt.reshape(r, list(left_dims) + [center_dim])
q = bt.reshape(q, [center_dim] + list(right_dims))
return r, q
|
from datetime import datetime
from flask import Blueprint, g
from flask_restful import reqparse, Api
from lemur.api_keys import service
from lemur.auth.service import AuthenticatedResource, create_token
from lemur.auth.permissions import ApiKeyCreatorPermission
from lemur.common.schema import validate_schema
from lemur.common.utils import paginated_parser
from lemur.api_keys.schemas import (
api_key_input_schema,
api_key_revoke_schema,
api_key_output_schema,
api_keys_output_schema,
api_key_described_output_schema,
user_api_key_input_schema,
)
mod = Blueprint("api_keys", __name__)
api = Api(mod)
class ApiKeyList(AuthenticatedResource):
""" Defines the 'api_keys' endpoint """
def __init__(self):
super(ApiKeyList, self).__init__()
@validate_schema(None, api_keys_output_schema)
def get(self):
"""
.. http:get:: /keys
The current list of api keys, that you can see.
**Example request**:
.. sourcecode:: http
GET /keys HTTP/1.1
Host: example.com
Accept: application/json, text/javascript
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: text/javascript
{
"items": [
{
"id": 1,
"name": "custom name",
"user_id": 1,
"ttl": -1,
"issued_at": 12,
"revoked": false
}
],
"total": 1
}
:query sortBy: field to sort on
:query sortDir: asc or desc
:query page: int default is 1
:query count: count number. default is 10
:query user_id: a user to filter by.
:query id: an access key to filter by.
:reqheader Authorization: OAuth token to authenticate
:statuscode 200: no error
:statuscode 403: unauthenticated
"""
parser = paginated_parser.copy()
args = parser.parse_args()
args["has_permission"] = ApiKeyCreatorPermission().can()
args["requesting_user_id"] = g.current_user.id
return service.render(args)
@validate_schema(api_key_input_schema, api_key_output_schema)
def post(self, data=None):
"""
.. http:post:: /keys
Creates an API Key.
**Example request**:
.. sourcecode:: http
POST /keys HTTP/1.1
Host: example.com
Accept: application/json, text/javascript
{
"name": "my custom name",
"user_id": 1,
"ttl": -1
}
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: text/javascript
{
"jwt": ""
}
:reqheader Authorization: OAuth token to authenticate
:statuscode 200: no error
:statuscode 403: unauthenticated
"""
if not ApiKeyCreatorPermission().can():
if data["user"]["id"] != g.current_user.id:
return (
dict(
message="You are not authorized to create tokens for: {0}".format(
data["user"]["username"]
)
),
403,
)
access_token = service.create(
name=data["name"],
user_id=data["user"]["id"],
ttl=data["ttl"],
revoked=False,
issued_at=int(datetime.utcnow().timestamp()),
)
return dict(
jwt=create_token(access_token.user_id, access_token.id, access_token.ttl)
)
class ApiKeyUserList(AuthenticatedResource):
""" Defines the 'keys' endpoint on the 'users' endpoint. """
def __init__(self):
super(ApiKeyUserList, self).__init__()
@validate_schema(None, api_keys_output_schema)
def get(self, user_id):
"""
.. http:get:: /users/:user_id/keys
The current list of api keys for a user, that you can see.
**Example request**:
.. sourcecode:: http
GET /users/1/keys HTTP/1.1
Host: example.com
Accept: application/json, text/javascript
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: text/javascript
{
"items": [
{
"id": 1,
"name": "custom name",
"user_id": 1,
"ttl": -1,
"issued_at": 12,
"revoked": false
}
],
"total": 1
}
:query sortBy: field to sort on
:query sortDir: asc or desc
:query page: int default is 1
:query count: count number. default is 10
:query id: an access key to filter by.
:reqheader Authorization: OAuth token to authenticate
:statuscode 200: no error
:statuscode 403: unauthenticated
"""
parser = paginated_parser.copy()
args = parser.parse_args()
args["has_permission"] = ApiKeyCreatorPermission().can()
args["requesting_user_id"] = g.current_user.id
args["user_id"] = user_id
return service.render(args)
@validate_schema(user_api_key_input_schema, api_key_output_schema)
def post(self, user_id, data=None):
"""
.. http:post:: /users/:user_id/keys
Creates an API Key for a user.
**Example request**:
.. sourcecode:: http
POST /users/1/keys HTTP/1.1
Host: example.com
Accept: application/json, text/javascript
{
"name": "my custom name"
"ttl": -1
}
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: text/javascript
{
"jwt": ""
}
:reqheader Authorization: OAuth token to authenticate
:statuscode 200: no error
:statuscode 403: unauthenticated
"""
if not ApiKeyCreatorPermission().can():
if user_id != g.current_user.id:
return (
dict(
message="You are not authorized to create tokens for: {0}".format(
user_id
)
),
403,
)
access_token = service.create(
name=data["name"],
user_id=user_id,
ttl=data["ttl"],
revoked=False,
issued_at=int(datetime.utcnow().timestamp()),
)
return dict(
jwt=create_token(access_token.user_id, access_token.id, access_token.ttl)
)
class ApiKeys(AuthenticatedResource):
def __init__(self):
self.reqparse = reqparse.RequestParser()
super(ApiKeys, self).__init__()
@validate_schema(None, api_key_output_schema)
def get(self, aid):
"""
.. http:get:: /keys/1
Fetch one api key
**Example request**:
.. sourcecode:: http
GET /keys/1 HTTP/1.1
Host: example.com
Accept: application/json, text/javascript
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: text/javascript
{
"jwt": ""
}
:reqheader Authorization: OAuth token to authenticate
:statuscode 200: no error
:statuscode 403: unauthenticated
"""
access_key = service.get(aid)
if access_key is None:
return dict(message="This token does not exist!"), 404
if access_key.user_id != g.current_user.id:
if not ApiKeyCreatorPermission().can():
return dict(message="You are not authorized to view this token!"), 403
return dict(jwt=create_token(access_key.user_id, access_key.id, access_key.ttl))
@validate_schema(api_key_revoke_schema, api_key_output_schema)
def put(self, aid, data=None):
"""
.. http:put:: /keys/1
update one api key
**Example request**:
.. sourcecode:: http
PUT /keys/1 HTTP/1.1
Host: example.com
Accept: application/json, text/javascript
{
"name": "new_name",
"revoked": false,
"ttl": -1
}
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: text/javascript
{
"jwt": ""
}
:reqheader Authorization: OAuth token to authenticate
:statuscode 200: no error
:statuscode 403: unauthenticated
"""
access_key = service.get(aid)
if access_key is None:
return dict(message="This token does not exist!"), 404
if access_key.user_id != g.current_user.id:
if not ApiKeyCreatorPermission().can():
return dict(message="You are not authorized to update this token!"), 403
service.update(
access_key, name=data["name"], revoked=data["revoked"], ttl=data["ttl"]
)
return dict(jwt=create_token(access_key.user_id, access_key.id, access_key.ttl))
def delete(self, aid):
"""
.. http:delete:: /keys/1
deletes one api key
**Example request**:
.. sourcecode:: http
DELETE /keys/1 HTTP/1.1
Host: example.com
Accept: application/json, text/javascript
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: text/javascript
{
"result": true
}
:reqheader Authorization: OAuth token to authenticate
:statuscode 200: no error
:statuscode 403: unauthenticated
"""
access_key = service.get(aid)
if access_key is None:
return dict(message="This token does not exist!"), 404
if access_key.user_id != g.current_user.id:
if not ApiKeyCreatorPermission().can():
return dict(message="You are not authorized to delete this token!"), 403
service.delete(access_key)
return {"result": True}
class UserApiKeys(AuthenticatedResource):
def __init__(self):
self.reqparse = reqparse.RequestParser()
super(UserApiKeys, self).__init__()
@validate_schema(None, api_key_output_schema)
def get(self, uid, aid):
"""
.. http:get:: /users/1/keys/1
Fetch one api key
**Example request**:
.. sourcecode:: http
GET /users/1/api_keys/1 HTTP/1.1
Host: example.com
Accept: application/json, text/javascript
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: text/javascript
{
"jwt": ""
}
:reqheader Authorization: OAuth token to authenticate
:statuscode 200: no error
:statuscode 403: unauthenticated
"""
if uid != g.current_user.id:
if not ApiKeyCreatorPermission().can():
return dict(message="You are not authorized to view this token!"), 403
access_key = service.get(aid)
if access_key is None:
return dict(message="This token does not exist!"), 404
if access_key.user_id != uid:
return dict(message="You are not authorized to view this token!"), 403
return dict(jwt=create_token(access_key.user_id, access_key.id, access_key.ttl))
@validate_schema(api_key_revoke_schema, api_key_output_schema)
def put(self, uid, aid, data=None):
"""
.. http:put:: /users/1/keys/1
update one api key
**Example request**:
.. sourcecode:: http
PUT /users/1/keys/1 HTTP/1.1
Host: example.com
Accept: application/json, text/javascript
{
"name": "new_name",
"revoked": false,
"ttl": -1
}
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: text/javascript
{
"jwt": ""
}
:reqheader Authorization: OAuth token to authenticate
:statuscode 200: no error
:statuscode 403: unauthenticated
"""
if uid != g.current_user.id:
if not ApiKeyCreatorPermission().can():
return dict(message="You are not authorized to view this token!"), 403
access_key = service.get(aid)
if access_key is None:
return dict(message="This token does not exist!"), 404
if access_key.user_id != uid:
return dict(message="You are not authorized to update this token!"), 403
service.update(
access_key, name=data["name"], revoked=data["revoked"], ttl=data["ttl"]
)
return dict(jwt=create_token(access_key.user_id, access_key.id, access_key.ttl))
def delete(self, uid, aid):
"""
.. http:delete:: /users/1/keys/1
deletes one api key
**Example request**:
.. sourcecode:: http
DELETE /users/1/keys/1 HTTP/1.1
Host: example.com
Accept: application/json, text/javascript
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: text/javascript
{
"result": true
}
:reqheader Authorization: OAuth token to authenticate
:statuscode 200: no error
:statuscode 403: unauthenticated
"""
if uid != g.current_user.id:
if not ApiKeyCreatorPermission().can():
return dict(message="You are not authorized to view this token!"), 403
access_key = service.get(aid)
if access_key is None:
return dict(message="This token does not exist!"), 404
if access_key.user_id != uid:
return dict(message="You are not authorized to delete this token!"), 403
service.delete(access_key)
return {"result": True}
class ApiKeysDescribed(AuthenticatedResource):
def __init__(self):
self.reqparse = reqparse.RequestParser()
super(ApiKeysDescribed, self).__init__()
@validate_schema(None, api_key_described_output_schema)
def get(self, aid):
"""
.. http:get:: /keys/1/described
Fetch one api key
**Example request**:
.. sourcecode:: http
GET /keys/1 HTTP/1.1
Host: example.com
Accept: application/json, text/javascript
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: text/javascript
{
"id": 2,
"name": "hoi",
"user_id": 2,
"ttl": -1,
"issued_at": 1222222,
"revoked": false
}
:reqheader Authorization: OAuth token to authenticate
:statuscode 200: no error
:statuscode 403: unauthenticated
"""
access_key = service.get(aid)
if access_key is None:
return dict(message="This token does not exist!"), 404
if access_key.user_id != g.current_user.id:
if not ApiKeyCreatorPermission().can():
return dict(message="You are not authorized to view this token!"), 403
return access_key
api.add_resource(ApiKeyList, "/keys", endpoint="api_keys")
api.add_resource(ApiKeys, "/keys/<int:aid>", endpoint="api_key")
api.add_resource(
ApiKeysDescribed, "/keys/<int:aid>/described", endpoint="api_key_described"
)
api.add_resource(ApiKeyUserList, "/users/<int:user_id>/keys", endpoint="user_api_keys")
api.add_resource(
UserApiKeys, "/users/<int:uid>/keys/<int:aid>", endpoint="user_api_key"
)
|
from datetime import datetime
from glob import glob
from os.path import basename, join, splitext
from xml.etree.ElementTree import parse
import numpy as np
from ...utils import logger
def _read_events(input_fname, info):
"""Read events for the record.
Parameters
----------
input_fname : str
The file path.
info : dict
Header info array.
"""
n_samples = info['last_samps'][-1]
mff_events, event_codes = _read_mff_events(input_fname, info['sfreq'])
info['n_events'] = len(event_codes)
info['event_codes'] = event_codes
events = np.zeros([info['n_events'], info['n_segments'] * n_samples])
for n, event in enumerate(event_codes):
for i in mff_events[event]:
events[n][i] = n + 1
return events, info
def _read_mff_events(filename, sfreq):
"""Extract the events.
Parameters
----------
filename : str
File path.
sfreq : float
The sampling frequency
"""
orig = {}
for xml_file in glob(join(filename, '*.xml')):
xml_type = splitext(basename(xml_file))[0]
orig[xml_type] = _parse_xml(xml_file)
xml_files = orig.keys()
xml_events = [x for x in xml_files if x[:7] == 'Events_']
for item in orig['info']:
if 'recordTime' in item:
start_time = _ns2py_time(item['recordTime'])
break
markers = []
code = []
for xml in xml_events:
for event in orig[xml][2:]:
event_start = _ns2py_time(event['beginTime'])
start = (event_start - start_time).total_seconds()
if event['code'] not in code:
code.append(event['code'])
marker = {'name': event['code'],
'start': start,
'start_sample': int(np.fix(start * sfreq)),
'end': start + float(event['duration']) / 1e9,
'chan': None,
}
markers.append(marker)
events_tims = dict()
for ev in code:
trig_samp = list(c['start_sample'] for n,
c in enumerate(markers) if c['name'] == ev)
events_tims.update({ev: trig_samp})
return events_tims, code
def _parse_xml(xml_file):
"""Parse XML file."""
xml = parse(xml_file)
root = xml.getroot()
return _xml2list(root)
def _xml2list(root):
"""Parse XML item."""
output = []
for element in root:
if len(element) > 0:
if element[0].tag != element[-1].tag:
output.append(_xml2dict(element))
else:
output.append(_xml2list(element))
elif element.text:
text = element.text.strip()
if text:
tag = _ns(element.tag)
output.append({tag: text})
return output
def _ns(s):
"""Remove namespace, but only if there is a namespace to begin with."""
if '}' in s:
return '}'.join(s.split('}')[1:])
else:
return s
def _xml2dict(root):
"""Use functions instead of Class.
remove namespace based on
http://stackoverflow.com/questions/2148119
"""
output = {}
if root.items():
output.update(dict(root.items()))
for element in root:
if len(element) > 0:
if len(element) == 1 or element[0].tag != element[1].tag:
one_dict = _xml2dict(element)
else:
one_dict = {_ns(element[0].tag): _xml2list(element)}
if element.items():
one_dict.update(dict(element.items()))
output.update({_ns(element.tag): one_dict})
elif element.items():
output.update({_ns(element.tag): dict(element.items())})
else:
output.update({_ns(element.tag): element.text})
return output
def _ns2py_time(nstime):
"""Parse times."""
nsdate = nstime[0:10]
nstime0 = nstime[11:26]
nstime00 = nsdate + " " + nstime0
pytime = datetime.strptime(nstime00, '%Y-%m-%d %H:%M:%S.%f')
return pytime
def _combine_triggers(data, remapping=None):
"""Combine binary triggers."""
new_trigger = np.zeros(data.shape[1])
if data.astype(bool).sum(axis=0).max() > 1: # ensure no overlaps
logger.info(' Found multiple events at the same time '
'sample. Cannot create trigger channel.')
return
if remapping is None:
remapping = np.arange(data) + 1
for d, event_id in zip(data, remapping):
idx = d.nonzero()
if np.any(idx):
new_trigger[idx] += event_id
return new_trigger
|
from typing import Callable
from pyisy.constants import ISY_VALUE_UNKNOWN, PROTO_GROUP
from homeassistant.components.switch import DOMAIN as SWITCH, SwitchEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.helpers.typing import HomeAssistantType
from .const import _LOGGER, DOMAIN as ISY994_DOMAIN, ISY994_NODES, ISY994_PROGRAMS
from .entity import ISYNodeEntity, ISYProgramEntity
from .helpers import migrate_old_unique_ids
async def async_setup_entry(
hass: HomeAssistantType,
entry: ConfigEntry,
async_add_entities: Callable[[list], None],
) -> bool:
"""Set up the ISY994 switch platform."""
hass_isy_data = hass.data[ISY994_DOMAIN][entry.entry_id]
devices = []
for node in hass_isy_data[ISY994_NODES][SWITCH]:
devices.append(ISYSwitchEntity(node))
for name, status, actions in hass_isy_data[ISY994_PROGRAMS][SWITCH]:
devices.append(ISYSwitchProgramEntity(name, status, actions))
await migrate_old_unique_ids(hass, SWITCH, devices)
async_add_entities(devices)
class ISYSwitchEntity(ISYNodeEntity, SwitchEntity):
"""Representation of an ISY994 switch device."""
@property
def is_on(self) -> bool:
"""Get whether the ISY994 device is in the on state."""
if self._node.status == ISY_VALUE_UNKNOWN:
return None
return bool(self._node.status)
def turn_off(self, **kwargs) -> None:
"""Send the turn off command to the ISY994 switch."""
if not self._node.turn_off():
_LOGGER.debug("Unable to turn off switch")
def turn_on(self, **kwargs) -> None:
"""Send the turn on command to the ISY994 switch."""
if not self._node.turn_on():
_LOGGER.debug("Unable to turn on switch")
@property
def icon(self) -> str:
"""Get the icon for groups."""
if hasattr(self._node, "protocol") and self._node.protocol == PROTO_GROUP:
return "mdi:google-circles-communities" # Matches isy scene icon
return super().icon
class ISYSwitchProgramEntity(ISYProgramEntity, SwitchEntity):
"""A representation of an ISY994 program switch."""
@property
def is_on(self) -> bool:
"""Get whether the ISY994 switch program is on."""
return bool(self._node.status)
def turn_on(self, **kwargs) -> None:
"""Send the turn on command to the ISY994 switch program."""
if not self._actions.run_then():
_LOGGER.error("Unable to turn on switch")
def turn_off(self, **kwargs) -> None:
"""Send the turn off command to the ISY994 switch program."""
if not self._actions.run_else():
_LOGGER.error("Unable to turn off switch")
@property
def icon(self) -> str:
"""Get the icon for programs."""
return "mdi:script-text-outline" # Matches isy program icon
|
import datetime
import logging
import os
import time
from fitbit import Fitbit
from fitbit.api import FitbitOauth2Client
from oauthlib.oauth2.rfc6749.errors import MismatchingStateError, MissingTokenError
import voluptuous as vol
from homeassistant.components.http import HomeAssistantView
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_CLIENT_ID,
CONF_CLIENT_SECRET,
CONF_UNIT_SYSTEM,
LENGTH_FEET,
MASS_KILOGRAMS,
MASS_MILLIGRAMS,
PERCENTAGE,
TIME_MILLISECONDS,
TIME_MINUTES,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.icon import icon_for_battery_level
from homeassistant.helpers.network import get_url
from homeassistant.util.json import load_json, save_json
_CONFIGURING = {}
_LOGGER = logging.getLogger(__name__)
ATTR_ACCESS_TOKEN = "access_token"
ATTR_REFRESH_TOKEN = "refresh_token"
ATTR_LAST_SAVED_AT = "last_saved_at"
CONF_MONITORED_RESOURCES = "monitored_resources"
CONF_CLOCK_FORMAT = "clock_format"
ATTRIBUTION = "Data provided by Fitbit.com"
FITBIT_AUTH_CALLBACK_PATH = "/api/fitbit/callback"
FITBIT_AUTH_START = "/api/fitbit"
FITBIT_CONFIG_FILE = "fitbit.conf"
FITBIT_DEFAULT_RESOURCES = ["activities/steps"]
SCAN_INTERVAL = datetime.timedelta(minutes=30)
DEFAULT_CONFIG = {
CONF_CLIENT_ID: "CLIENT_ID_HERE",
CONF_CLIENT_SECRET: "CLIENT_SECRET_HERE",
}
FITBIT_RESOURCES_LIST = {
"activities/activityCalories": ["Activity Calories", "cal", "fire"],
"activities/calories": ["Calories", "cal", "fire"],
"activities/caloriesBMR": ["Calories BMR", "cal", "fire"],
"activities/distance": ["Distance", "", "map-marker"],
"activities/elevation": ["Elevation", "", "walk"],
"activities/floors": ["Floors", "floors", "walk"],
"activities/heart": ["Resting Heart Rate", "bpm", "heart-pulse"],
"activities/minutesFairlyActive": ["Minutes Fairly Active", TIME_MINUTES, "walk"],
"activities/minutesLightlyActive": ["Minutes Lightly Active", TIME_MINUTES, "walk"],
"activities/minutesSedentary": [
"Minutes Sedentary",
TIME_MINUTES,
"seat-recline-normal",
],
"activities/minutesVeryActive": ["Minutes Very Active", TIME_MINUTES, "run"],
"activities/steps": ["Steps", "steps", "walk"],
"activities/tracker/activityCalories": ["Tracker Activity Calories", "cal", "fire"],
"activities/tracker/calories": ["Tracker Calories", "cal", "fire"],
"activities/tracker/distance": ["Tracker Distance", "", "map-marker"],
"activities/tracker/elevation": ["Tracker Elevation", "", "walk"],
"activities/tracker/floors": ["Tracker Floors", "floors", "walk"],
"activities/tracker/minutesFairlyActive": [
"Tracker Minutes Fairly Active",
TIME_MINUTES,
"walk",
],
"activities/tracker/minutesLightlyActive": [
"Tracker Minutes Lightly Active",
TIME_MINUTES,
"walk",
],
"activities/tracker/minutesSedentary": [
"Tracker Minutes Sedentary",
TIME_MINUTES,
"seat-recline-normal",
],
"activities/tracker/minutesVeryActive": [
"Tracker Minutes Very Active",
TIME_MINUTES,
"run",
],
"activities/tracker/steps": ["Tracker Steps", "steps", "walk"],
"body/bmi": ["BMI", "BMI", "human"],
"body/fat": ["Body Fat", PERCENTAGE, "human"],
"body/weight": ["Weight", "", "human"],
"devices/battery": ["Battery", None, None],
"sleep/awakeningsCount": ["Awakenings Count", "times awaken", "sleep"],
"sleep/efficiency": ["Sleep Efficiency", PERCENTAGE, "sleep"],
"sleep/minutesAfterWakeup": ["Minutes After Wakeup", TIME_MINUTES, "sleep"],
"sleep/minutesAsleep": ["Sleep Minutes Asleep", TIME_MINUTES, "sleep"],
"sleep/minutesAwake": ["Sleep Minutes Awake", TIME_MINUTES, "sleep"],
"sleep/minutesToFallAsleep": [
"Sleep Minutes to Fall Asleep",
TIME_MINUTES,
"sleep",
],
"sleep/startTime": ["Sleep Start Time", None, "clock"],
"sleep/timeInBed": ["Sleep Time in Bed", TIME_MINUTES, "hotel"],
}
FITBIT_MEASUREMENTS = {
"en_US": {
"duration": TIME_MILLISECONDS,
"distance": "mi",
"elevation": LENGTH_FEET,
"height": "in",
"weight": "lbs",
"body": "in",
"liquids": "fl. oz.",
"blood glucose": f"{MASS_MILLIGRAMS}/dL",
"battery": "",
},
"en_GB": {
"duration": TIME_MILLISECONDS,
"distance": "kilometers",
"elevation": "meters",
"height": "centimeters",
"weight": "stone",
"body": "centimeters",
"liquids": "milliliters",
"blood glucose": "mmol/L",
"battery": "",
},
"metric": {
"duration": TIME_MILLISECONDS,
"distance": "kilometers",
"elevation": "meters",
"height": "centimeters",
"weight": MASS_KILOGRAMS,
"body": "centimeters",
"liquids": "milliliters",
"blood glucose": "mmol/L",
"battery": "",
},
}
BATTERY_LEVELS = {"High": 100, "Medium": 50, "Low": 20, "Empty": 0}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(
CONF_MONITORED_RESOURCES, default=FITBIT_DEFAULT_RESOURCES
): vol.All(cv.ensure_list, [vol.In(FITBIT_RESOURCES_LIST)]),
vol.Optional(CONF_CLOCK_FORMAT, default="24H"): vol.In(["12H", "24H"]),
vol.Optional(CONF_UNIT_SYSTEM, default="default"): vol.In(
["en_GB", "en_US", "metric", "default"]
),
}
)
def request_app_setup(hass, config, add_entities, config_path, discovery_info=None):
"""Assist user with configuring the Fitbit dev application."""
configurator = hass.components.configurator
def fitbit_configuration_callback(callback_data):
"""Handle configuration updates."""
config_path = hass.config.path(FITBIT_CONFIG_FILE)
if os.path.isfile(config_path):
config_file = load_json(config_path)
if config_file == DEFAULT_CONFIG:
error_msg = (
"You didn't correctly modify fitbit.conf",
" please try again",
)
configurator.notify_errors(_CONFIGURING["fitbit"], error_msg)
else:
setup_platform(hass, config, add_entities, discovery_info)
else:
setup_platform(hass, config, add_entities, discovery_info)
start_url = f"{get_url(hass)}{FITBIT_AUTH_CALLBACK_PATH}"
description = f"""Please create a Fitbit developer app at
https://dev.fitbit.com/apps/new.
For the OAuth 2.0 Application Type choose Personal.
Set the Callback URL to {start_url}.
They will provide you a Client ID and secret.
These need to be saved into the file located at: {config_path}.
Then come back here and hit the below button.
"""
submit = "I have saved my Client ID and Client Secret into fitbit.conf."
_CONFIGURING["fitbit"] = configurator.request_config(
"Fitbit",
fitbit_configuration_callback,
description=description,
submit_caption=submit,
description_image="/static/images/config_fitbit_app.png",
)
def request_oauth_completion(hass):
"""Request user complete Fitbit OAuth2 flow."""
configurator = hass.components.configurator
if "fitbit" in _CONFIGURING:
configurator.notify_errors(
_CONFIGURING["fitbit"], "Failed to register, please try again."
)
return
def fitbit_configuration_callback(callback_data):
"""Handle configuration updates."""
start_url = f"{get_url(hass)}{FITBIT_AUTH_START}"
description = f"Please authorize Fitbit by visiting {start_url}"
_CONFIGURING["fitbit"] = configurator.request_config(
"Fitbit",
fitbit_configuration_callback,
description=description,
submit_caption="I have authorized Fitbit.",
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Fitbit sensor."""
config_path = hass.config.path(FITBIT_CONFIG_FILE)
if os.path.isfile(config_path):
config_file = load_json(config_path)
if config_file == DEFAULT_CONFIG:
request_app_setup(
hass, config, add_entities, config_path, discovery_info=None
)
return False
else:
save_json(config_path, DEFAULT_CONFIG)
request_app_setup(hass, config, add_entities, config_path, discovery_info=None)
return False
if "fitbit" in _CONFIGURING:
hass.components.configurator.request_done(_CONFIGURING.pop("fitbit"))
access_token = config_file.get(ATTR_ACCESS_TOKEN)
refresh_token = config_file.get(ATTR_REFRESH_TOKEN)
expires_at = config_file.get(ATTR_LAST_SAVED_AT)
if None not in (access_token, refresh_token):
authd_client = Fitbit(
config_file.get(CONF_CLIENT_ID),
config_file.get(CONF_CLIENT_SECRET),
access_token=access_token,
refresh_token=refresh_token,
expires_at=expires_at,
refresh_cb=lambda x: None,
)
if int(time.time()) - expires_at > 3600:
authd_client.client.refresh_token()
unit_system = config.get(CONF_UNIT_SYSTEM)
if unit_system == "default":
authd_client.system = authd_client.user_profile_get()["user"]["locale"]
if authd_client.system != "en_GB":
if hass.config.units.is_metric:
authd_client.system = "metric"
else:
authd_client.system = "en_US"
else:
authd_client.system = unit_system
dev = []
registered_devs = authd_client.get_devices()
clock_format = config.get(CONF_CLOCK_FORMAT)
for resource in config.get(CONF_MONITORED_RESOURCES):
# monitor battery for all linked FitBit devices
if resource == "devices/battery":
for dev_extra in registered_devs:
dev.append(
FitbitSensor(
authd_client,
config_path,
resource,
hass.config.units.is_metric,
clock_format,
dev_extra,
)
)
else:
dev.append(
FitbitSensor(
authd_client,
config_path,
resource,
hass.config.units.is_metric,
clock_format,
)
)
add_entities(dev, True)
else:
oauth = FitbitOauth2Client(
config_file.get(CONF_CLIENT_ID), config_file.get(CONF_CLIENT_SECRET)
)
redirect_uri = f"{get_url(hass)}{FITBIT_AUTH_CALLBACK_PATH}"
fitbit_auth_start_url, _ = oauth.authorize_token_url(
redirect_uri=redirect_uri,
scope=[
"activity",
"heartrate",
"nutrition",
"profile",
"settings",
"sleep",
"weight",
],
)
hass.http.register_redirect(FITBIT_AUTH_START, fitbit_auth_start_url)
hass.http.register_view(FitbitAuthCallbackView(config, add_entities, oauth))
request_oauth_completion(hass)
class FitbitAuthCallbackView(HomeAssistantView):
"""Handle OAuth finish callback requests."""
requires_auth = False
url = FITBIT_AUTH_CALLBACK_PATH
name = "api:fitbit:callback"
def __init__(self, config, add_entities, oauth):
"""Initialize the OAuth callback view."""
self.config = config
self.add_entities = add_entities
self.oauth = oauth
@callback
def get(self, request):
"""Finish OAuth callback request."""
hass = request.app["hass"]
data = request.query
response_message = """Fitbit has been successfully authorized!
You can close this window now!"""
result = None
if data.get("code") is not None:
redirect_uri = f"{get_url(hass, require_current_request=True)}{FITBIT_AUTH_CALLBACK_PATH}"
try:
result = self.oauth.fetch_access_token(data.get("code"), redirect_uri)
except MissingTokenError as error:
_LOGGER.error("Missing token: %s", error)
response_message = f"""Something went wrong when
attempting authenticating with Fitbit. The error
encountered was {error}. Please try again!"""
except MismatchingStateError as error:
_LOGGER.error("Mismatched state, CSRF error: %s", error)
response_message = f"""Something went wrong when
attempting authenticating with Fitbit. The error
encountered was {error}. Please try again!"""
else:
_LOGGER.error("Unknown error when authing")
response_message = """Something went wrong when
attempting authenticating with Fitbit.
An unknown error occurred. Please try again!
"""
if result is None:
_LOGGER.error("Unknown error when authing")
response_message = """Something went wrong when
attempting authenticating with Fitbit.
An unknown error occurred. Please try again!
"""
html_response = f"""<html><head><title>Fitbit Auth</title></head>
<body><h1>{response_message}</h1></body></html>"""
if result:
config_contents = {
ATTR_ACCESS_TOKEN: result.get("access_token"),
ATTR_REFRESH_TOKEN: result.get("refresh_token"),
CONF_CLIENT_ID: self.oauth.client_id,
CONF_CLIENT_SECRET: self.oauth.client_secret,
ATTR_LAST_SAVED_AT: int(time.time()),
}
save_json(hass.config.path(FITBIT_CONFIG_FILE), config_contents)
hass.async_add_job(setup_platform, hass, self.config, self.add_entities)
return html_response
class FitbitSensor(Entity):
"""Implementation of a Fitbit sensor."""
def __init__(
self, client, config_path, resource_type, is_metric, clock_format, extra=None
):
"""Initialize the Fitbit sensor."""
self.client = client
self.config_path = config_path
self.resource_type = resource_type
self.is_metric = is_metric
self.clock_format = clock_format
self.extra = extra
self._name = FITBIT_RESOURCES_LIST[self.resource_type][0]
if self.extra:
self._name = f"{self.extra.get('deviceVersion')} Battery"
unit_type = FITBIT_RESOURCES_LIST[self.resource_type][1]
if unit_type == "":
split_resource = self.resource_type.split("/")
try:
measurement_system = FITBIT_MEASUREMENTS[self.client.system]
except KeyError:
if self.is_metric:
measurement_system = FITBIT_MEASUREMENTS["metric"]
else:
measurement_system = FITBIT_MEASUREMENTS["en_US"]
unit_type = measurement_system[split_resource[-1]]
self._unit_of_measurement = unit_type
self._state = 0
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
@property
def icon(self):
"""Icon to use in the frontend, if any."""
if self.resource_type == "devices/battery" and self.extra:
battery_level = BATTERY_LEVELS[self.extra.get("battery")]
return icon_for_battery_level(battery_level=battery_level, charging=None)
return f"mdi:{FITBIT_RESOURCES_LIST[self.resource_type][2]}"
@property
def device_state_attributes(self):
"""Return the state attributes."""
attrs = {}
attrs[ATTR_ATTRIBUTION] = ATTRIBUTION
if self.extra:
attrs["model"] = self.extra.get("deviceVersion")
attrs["type"] = self.extra.get("type").lower()
return attrs
def update(self):
"""Get the latest data from the Fitbit API and update the states."""
if self.resource_type == "devices/battery" and self.extra:
self._state = self.extra.get("battery")
else:
container = self.resource_type.replace("/", "-")
response = self.client.time_series(self.resource_type, period="7d")
raw_state = response[container][-1].get("value")
if self.resource_type == "activities/distance":
self._state = format(float(raw_state), ".2f")
elif self.resource_type == "activities/tracker/distance":
self._state = format(float(raw_state), ".2f")
elif self.resource_type == "body/bmi":
self._state = format(float(raw_state), ".1f")
elif self.resource_type == "body/fat":
self._state = format(float(raw_state), ".1f")
elif self.resource_type == "body/weight":
self._state = format(float(raw_state), ".1f")
elif self.resource_type == "sleep/startTime":
if raw_state == "":
self._state = "-"
elif self.clock_format == "12H":
hours, minutes = raw_state.split(":")
hours, minutes = int(hours), int(minutes)
setting = "AM"
if hours > 12:
setting = "PM"
hours -= 12
elif hours == 0:
hours = 12
self._state = f"{hours}:{minutes:02d} {setting}"
else:
self._state = raw_state
else:
if self.is_metric:
self._state = raw_state
else:
try:
self._state = f"{int(raw_state):,}"
except TypeError:
self._state = raw_state
if self.resource_type == "activities/heart":
self._state = response[container][-1].get("value").get("restingHeartRate")
token = self.client.client.session.token
config_contents = {
ATTR_ACCESS_TOKEN: token.get("access_token"),
ATTR_REFRESH_TOKEN: token.get("refresh_token"),
CONF_CLIENT_ID: self.client.client.client_id,
CONF_CLIENT_SECRET: self.client.client.client_secret,
ATTR_LAST_SAVED_AT: int(time.time()),
}
save_json(self.config_path, config_contents)
|
import logging
import voluptuous as vol
from homeassistant.components.sensor import (
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_ILLUMINANCE,
DEVICE_CLASS_TEMPERATURE,
PLATFORM_SCHEMA,
)
from homeassistant.const import CONF_HOST, PERCENTAGE, TEMP_CELSIUS
from homeassistant.core import callback
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.entity import Entity
from .const import DOMAIN
from .helpers import import_device
_LOGGER = logging.getLogger(__name__)
SENSOR_TYPES = {
"temperature": ("Temperature", TEMP_CELSIUS, DEVICE_CLASS_TEMPERATURE),
"air_quality": ("Air Quality", None, None),
"humidity": ("Humidity", PERCENTAGE, DEVICE_CLASS_HUMIDITY),
"light": ("Light", None, DEVICE_CLASS_ILLUMINANCE),
"noise": ("Noise", None, None),
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_HOST): cv.string}, extra=vol.ALLOW_EXTRA
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Import the device and discontinue platform.
This is for backward compatibility.
Do not use this method.
"""
import_device(hass, config[CONF_HOST])
_LOGGER.warning(
"The sensor platform is deprecated, please remove it from your configuration"
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Broadlink sensor."""
device = hass.data[DOMAIN].devices[config_entry.entry_id]
sensor_data = device.update_manager.coordinator.data
sensors = [
BroadlinkSensor(device, monitored_condition)
for monitored_condition in sensor_data
if sensor_data[monitored_condition] or device.api.type == "A1"
]
async_add_entities(sensors)
class BroadlinkSensor(Entity):
"""Representation of a Broadlink sensor."""
def __init__(self, device, monitored_condition):
"""Initialize the sensor."""
self._device = device
self._coordinator = device.update_manager.coordinator
self._monitored_condition = monitored_condition
self._state = self._coordinator.data[monitored_condition]
@property
def unique_id(self):
"""Return the unique id of the sensor."""
return f"{self._device.unique_id}-{self._monitored_condition}"
@property
def name(self):
"""Return the name of the sensor."""
return f"{self._device.name} {SENSOR_TYPES[self._monitored_condition][0]}"
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def available(self):
"""Return True if the sensor is available."""
return self._device.update_manager.available
@property
def unit_of_measurement(self):
"""Return the unit of measurement of the sensor."""
return SENSOR_TYPES[self._monitored_condition][1]
@property
def should_poll(self):
"""Return True if the sensor has to be polled for state."""
return False
@property
def device_class(self):
"""Return device class."""
return SENSOR_TYPES[self._monitored_condition][2]
@property
def device_info(self):
"""Return device info."""
return {
"identifiers": {(DOMAIN, self._device.unique_id)},
"manufacturer": self._device.api.manufacturer,
"model": self._device.api.model,
"name": self._device.name,
"sw_version": self._device.fw_version,
}
@callback
def update_data(self):
"""Update data."""
if self._coordinator.last_update_success:
self._state = self._coordinator.data[self._monitored_condition]
self.async_write_ha_state()
async def async_added_to_hass(self):
"""Call when the sensor is added to hass."""
self.async_on_remove(self._coordinator.async_add_listener(self.update_data))
async def async_update(self):
"""Update the sensor."""
await self._coordinator.async_request_refresh()
|
import unittest
import numpy as np
from chainer import testing
from chainercv.transforms import flip
class TestRandomFlip(unittest.TestCase):
def test_random_flip(self):
img = np.random.uniform(size=(3, 24, 24))
out = flip(img, y_flip=True, x_flip=True)
expected = img
expected = expected[:, :, ::-1]
expected = expected[:, ::-1, :]
np.testing.assert_equal(out, expected)
def test_random_flip_vertical(self):
img = np.random.uniform(size=(3, 24, 24))
out = flip(img, y_flip=True, x_flip=False)
expected = img
expected = expected[:, ::-1, :]
np.testing.assert_equal(out, expected)
testing.run_module(__name__, __file__)
|
import datetime
from pyControl4.account import C4Account
from pyControl4.director import C4Director
from pyControl4.error_handling import Unauthorized
from homeassistant import config_entries, setup
from homeassistant.components.control4.const import DEFAULT_SCAN_INTERVAL, DOMAIN
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_SCAN_INTERVAL,
CONF_USERNAME,
)
from tests.async_mock import AsyncMock, patch
from tests.common import MockConfigEntry
def _get_mock_c4_account(
getAccountControllers={
"controllerCommonName": "control4_model_00AA00AA00AA",
"href": "https://apis.control4.com/account/v3/rest/accounts/000000",
"name": "Name",
},
getDirectorBearerToken={
"token": "token",
"token_expiration": datetime.datetime(2020, 7, 15, 13, 50, 15, 26940),
},
):
c4_account_mock = AsyncMock(C4Account)
c4_account_mock.getAccountControllers.return_value = getAccountControllers
c4_account_mock.getDirectorBearerToken.return_value = getDirectorBearerToken
return c4_account_mock
def _get_mock_c4_director(getAllItemInfo={}):
c4_director_mock = AsyncMock(C4Director)
c4_director_mock.getAllItemInfo.return_value = getAllItemInfo
return c4_director_mock
async def test_form(hass):
"""Test we get the form."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
c4_account = _get_mock_c4_account()
c4_director = _get_mock_c4_director()
with patch(
"homeassistant.components.control4.config_flow.C4Account",
return_value=c4_account,
), patch(
"homeassistant.components.control4.config_flow.C4Director",
return_value=c4_director,
), patch(
"homeassistant.components.control4.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.control4.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_HOST: "1.1.1.1",
CONF_USERNAME: "test-username",
CONF_PASSWORD: "test-password",
},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "control4_model_00AA00AA00AA"
assert result2["data"] == {
CONF_HOST: "1.1.1.1",
CONF_USERNAME: "test-username",
CONF_PASSWORD: "test-password",
"controller_unique_id": "control4_model_00AA00AA00AA",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_invalid_auth(hass):
"""Test we handle invalid auth."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.control4.config_flow.C4Account",
side_effect=Unauthorized("message"),
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_HOST: "1.1.1.1",
CONF_USERNAME: "test-username",
CONF_PASSWORD: "test-password",
},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "invalid_auth"}
async def test_form_unexpected_exception(hass):
"""Test we handle an unexpected exception."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.control4.config_flow.C4Account",
side_effect=ValueError("message"),
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_HOST: "1.1.1.1",
CONF_USERNAME: "test-username",
CONF_PASSWORD: "test-password",
},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "unknown"}
async def test_form_cannot_connect(hass):
"""Test we handle cannot connect error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.control4.config_flow.Control4Validator.authenticate",
return_value=True,
), patch(
"homeassistant.components.control4.config_flow.C4Director",
side_effect=Unauthorized("message"),
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_HOST: "1.1.1.1",
CONF_USERNAME: "test-username",
CONF_PASSWORD: "test-password",
},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "cannot_connect"}
async def test_option_flow(hass):
"""Test config flow options."""
entry = MockConfigEntry(domain=DOMAIN, data={}, options=None)
entry.add_to_hass(hass)
result = await hass.config_entries.options.async_init(entry.entry_id)
assert result["type"] == "form"
assert result["step_id"] == "init"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={CONF_SCAN_INTERVAL: 4},
)
assert result["type"] == "create_entry"
assert result["data"] == {
CONF_SCAN_INTERVAL: 4,
}
async def test_option_flow_defaults(hass):
"""Test config flow options."""
entry = MockConfigEntry(domain=DOMAIN, data={}, options=None)
entry.add_to_hass(hass)
result = await hass.config_entries.options.async_init(entry.entry_id)
assert result["type"] == "form"
assert result["step_id"] == "init"
result = await hass.config_entries.options.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] == "create_entry"
assert result["data"] == {
CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL,
}
|
import pytest
import jax
import tensornetwork
import tensorflow as tf
@pytest.fixture(
name="backend", params=["numpy", "tensorflow", "jax", "pytorch"])
def backend_fixture(request):
return request.param
@pytest.fixture(autouse=True)
def reset_default_backend():
tensornetwork.set_default_backend("numpy")
yield
tensornetwork.set_default_backend("numpy")
@pytest.fixture(autouse=True)
def enable_jax_64():
jax.config.update("jax_enable_x64", True)
yield
jax.config.update("jax_enable_x64", True)
@pytest.fixture(autouse=True)
def tf_enable_v2_behaviour():
tf.compat.v1.enable_v2_behavior()
yield
tf.compat.v1.enable_v2_behavior()
|
import tensornetwork as tn
import pytest
import numpy as np
from tensornetwork.block_sparse import (U1Charge, BlockSparseTensor, Index,
BaseCharge)
from tensornetwork.block_sparse.charge import charge_equal
from tensornetwork.block_sparse.blocksparse_utils import _find_diagonal_sparse_blocks #pylint: disable=line-too-long
import tensornetwork.linalg
import tensornetwork.linalg.node_linalg
def get_random(shape, num_charges, dtype=np.float64):
R = len(shape)
charges = [
BaseCharge(
np.random.randint(-5, 5, (shape[n], num_charges)),
charge_types=[U1Charge] * num_charges) for n in range(R)
]
flows = list(np.full(R, fill_value=False, dtype=np.bool))
indices = [Index(charges[n], flows[n]) for n in range(R)]
return BlockSparseTensor.random(indices=indices, dtype=dtype)
def get_square_matrix(shape, num_charges, dtype=np.float64):
charge = BaseCharge(
np.random.randint(-5, 5, (shape, num_charges)),
charge_types=[U1Charge] * num_charges)
flows = [True, False]
indices = [Index(charge, flows[n]) for n in range(2)]
return BlockSparseTensor.random(indices=indices, dtype=dtype)
@pytest.mark.parametrize("num_charges", [1, 2, 3])
def test_split_node_full_svd_names(num_charges):
np.random.seed(10)
a = tn.Node(
get_random((10, 10), num_charges=num_charges), backend='symmetric')
e1 = a[0]
e2 = a[1]
left, s, right, _, = tn.split_node_full_svd(
a, [e1], [e2],
left_name='left',
middle_name='center',
right_name='right',
left_edge_name='left_edge',
right_edge_name='right_edge')
assert left.name == 'left'
assert s.name == 'center'
assert right.name == 'right'
assert left.edges[-1].name == 'left_edge'
assert s[0].name == 'left_edge'
assert s[1].name == 'right_edge'
assert right.edges[0].name == 'right_edge'
@pytest.mark.parametrize("num_charges", [1, 2])
def test_split_node_rq_names(num_charges):
np.random.seed(10)
a = tn.Node(
get_random((5, 5, 5, 5, 5), num_charges=num_charges), backend='symmetric')
left_edges = []
for i in range(3):
left_edges.append(a[i])
right_edges = []
for i in range(3, 5):
right_edges.append(a[i])
left, right = tn.split_node_rq(
a,
left_edges,
right_edges,
left_name='left',
right_name='right',
edge_name='edge')
assert left.name == 'left'
assert right.name == 'right'
assert left.edges[-1].name == 'edge'
assert right.edges[0].name == 'edge'
@pytest.mark.parametrize("num_charges", [1, 2])
def test_split_node_qr_names(num_charges):
np.random.seed(10)
a = tn.Node(
get_random((5, 5, 5, 5, 5), num_charges=num_charges), backend='symmetric')
left_edges = []
for i in range(3):
left_edges.append(a[i])
right_edges = []
for i in range(3, 5):
right_edges.append(a[i])
left, right = tn.split_node_qr(
a,
left_edges,
right_edges,
left_name='left',
right_name='right',
edge_name='edge')
assert left.name == 'left'
assert right.name == 'right'
assert left.edges[-1].name == 'edge'
assert right.edges[0].name == 'edge'
@pytest.mark.parametrize("num_charges", [1, 2])
def test_split_node_names(num_charges):
np.random.seed(10)
a = tn.Node(
get_random((5, 5, 5, 5, 5), num_charges=num_charges), backend='symmetric')
left_edges = []
for i in range(3):
left_edges.append(a[i])
right_edges = []
for i in range(3, 5):
right_edges.append(a[i])
left, right, _ = tn.split_node(
a,
left_edges,
right_edges,
left_name='left',
right_name='right',
edge_name='edge')
assert left.name == 'left'
assert right.name == 'right'
assert left.edges[-1].name == 'edge'
assert right.edges[0].name == 'edge'
@pytest.mark.parametrize("dtype", [np.float64, np.complex128])
@pytest.mark.parametrize("num_charges", [1, 2, 3])
def test_split_node_rq_unitarity(dtype, num_charges):
np.random.seed(10)
a = tn.Node(
get_square_matrix(50, num_charges, dtype=dtype), backend='symmetric')
r, q = tn.split_node_rq(a, [a[0]], [a[1]])
r[1] | q[0]
qbar = tn.linalg.node_linalg.conj(q)
q[1] ^ qbar[1]
u1 = q @ qbar
qbar[0] ^ q[0]
u2 = qbar @ q
blocks, _, shapes = _find_diagonal_sparse_blocks(u1.tensor.flat_charges,
u1.tensor.flat_flows,
len(u1.tensor._order[0]))
for n, block in enumerate(blocks):
np.testing.assert_almost_equal(
np.reshape(u1.tensor.data[block], shapes[:, n]),
np.eye(N=shapes[0, n], M=shapes[1, n]))
blocks, _, shapes = _find_diagonal_sparse_blocks(u2.tensor.flat_charges,
u2.tensor.flat_flows,
len(u2.tensor._order[0]))
for n, block in enumerate(blocks):
np.testing.assert_almost_equal(
np.reshape(u2.tensor.data[block], shapes[:, n]),
np.eye(N=shapes[0, n], M=shapes[1, n]))
@pytest.mark.parametrize("dtype", [np.float64, np.complex128])
@pytest.mark.parametrize("num_charges", [1, 2, 3])
def test_split_node_rq(dtype, num_charges):
np.random.seed(10)
a = tn.Node(
get_random((6, 7, 8, 9, 10), num_charges, dtype=dtype),
backend='symmetric')
left_edges = []
for i in range(3):
left_edges.append(a[i])
right_edges = []
for i in range(3, 5):
right_edges.append(a[i])
left, right = tn.split_node_rq(a, left_edges, right_edges)
tn.check_correct([left, right])
result = tn.contract(left[3])
np.testing.assert_allclose(result.tensor.data, a.tensor.data)
assert np.all([
charge_equal(result.tensor._charges[n], a.tensor._charges[n])
for n in range(len(a.tensor._charges))
])
@pytest.mark.parametrize("dtype", [np.float64, np.complex128])
@pytest.mark.parametrize("num_charges", [1, 2, 3])
def test_split_node_qr_unitarity(dtype, num_charges):
np.random.seed(10)
a = tn.Node(
get_square_matrix(50, num_charges, dtype=dtype), backend='symmetric')
q, r = tn.split_node_qr(a, [a[0]], [a[1]])
r[0] | q[1]
qbar = tn.linalg.node_linalg.conj(q)
q[1] ^ qbar[1]
u1 = q @ qbar
qbar[0] ^ q[0]
u2 = qbar @ q
blocks, _, shapes = _find_diagonal_sparse_blocks(u1.tensor.flat_charges,
u1.tensor.flat_flows,
len(u1.tensor._order[0]))
for n, block in enumerate(blocks):
np.testing.assert_almost_equal(
np.reshape(u1.tensor.data[block], shapes[:, n]),
np.eye(N=shapes[0, n], M=shapes[1, n]))
blocks, _, shapes = _find_diagonal_sparse_blocks(u2.tensor.flat_charges,
u2.tensor.flat_flows,
len(u2.tensor._order[0]))
for n, block in enumerate(blocks):
np.testing.assert_almost_equal(
np.reshape(u2.tensor.data[block], shapes[:, n]),
np.eye(N=shapes[0, n], M=shapes[1, n]))
@pytest.mark.parametrize("dtype", [np.float64, np.complex128])
@pytest.mark.parametrize("num_charges", [1, 2, 3])
def test_split_node_qr(dtype, num_charges):
np.random.seed(10)
a = tn.Node(
get_random((6, 7, 8, 9, 10), num_charges=num_charges, dtype=dtype),
backend='symmetric')
left_edges = []
for i in range(3):
left_edges.append(a[i])
right_edges = []
for i in range(3, 5):
right_edges.append(a[i])
left, right = tn.split_node_qr(a, left_edges, right_edges)
tn.check_correct([left, right])
result = tn.contract(left[3])
np.testing.assert_allclose(result.tensor.data, a.tensor.data)
assert np.all([
charge_equal(result.tensor._charges[n], a.tensor._charges[n])
for n in range(len(a.tensor._charges))
])
@pytest.mark.parametrize("dtype", [np.float64, np.complex128])
@pytest.mark.parametrize("num_charges", [1, 2, 3])
def test_conj(dtype, num_charges):
np.random.seed(10)
a = tn.Node(
get_random((6, 7, 8, 9, 10), num_charges=num_charges, dtype=dtype),
backend='symmetric')
abar = tn.linalg.node_linalg.conj(a)
np.testing.assert_allclose(abar.tensor.data, a.backend.conj(a.tensor.data))
assert np.all([
charge_equal(abar.tensor._charges[n], a.tensor._charges[n])
for n in range(len(a.tensor._charges))
])
@pytest.mark.parametrize("dtype", [np.float64, np.complex128])
@pytest.mark.parametrize("num_charges", [1, 2, 3])
def test_transpose(dtype, num_charges):
np.random.seed(10)
a = tn.Node(
get_random((6, 7, 8, 9, 10), num_charges=num_charges, dtype=dtype),
backend='symmetric')
order = [a[n] for n in reversed(range(5))]
transpa = tn.linalg.node_linalg.transpose(a, [4, 3, 2, 1, 0])
a.reorder_edges(order)
np.testing.assert_allclose(a.tensor.data, transpa.tensor.data)
def test_switch_backend():
np.random.seed(10)
a = tn.Node(np.random.rand(3, 3, 3), name="A", backend="numpy")
b = tn.Node(np.random.rand(3, 3, 3), name="B", backend="numpy")
c = tn.Node(np.random.rand(3, 3, 3), name="C", backend="numpy")
nodes = [a, b, c]
with pytest.raises(ValueError):
tn.switch_backend(nodes, 'symmetric')
@pytest.mark.parametrize("dtype", [np.float64, np.complex128])
@pytest.mark.parametrize("num_charges", [1, 2, 3])
def test_contract_trace_edges(dtype, num_charges):
np.random.seed(10)
a = tn.Node(
get_random((3, 3, 3), num_charges=num_charges, dtype=dtype),
backend='symmetric')
with pytest.raises(ValueError):
tn.contract_trace_edges(a)
@pytest.mark.parametrize("num_charges", [1, 2, 3])
def test_switch_backend_raises_error(num_charges):
np.random.seed(10)
a = tn.Node(
get_random((3, 3, 3), num_charges=num_charges, dtype=np.float64),
backend='symmetric')
with pytest.raises(NotImplementedError):
tn.switch_backend({a}, 'numpy')
def test_switch_backend_raises_error_2():
np.random.seed(10)
a = tn.Node(np.random.rand(3, 3, 3))
with pytest.raises(ValueError):
tn.switch_backend({a}, 'symmetric')
|
from __future__ import print_function, division
from plumbum import cli
from plumbum.lib import six
class TestValidator:
def test_named(self):
class Try(object):
@cli.positional(x=abs, y=str)
def main(selfy, x, y):
pass
assert Try.main.positional == [abs, str]
assert Try.main.positional_varargs == None
def test_position(self):
class Try(object):
@cli.positional(abs, str)
def main(selfy, x, y):
pass
assert Try.main.positional == [abs, str]
assert Try.main.positional_varargs == None
def test_mix(self):
class Try(object):
@cli.positional(abs, str, d=bool)
def main(selfy, x, y, z, d):
pass
assert Try.main.positional == [abs, str, None, bool]
assert Try.main.positional_varargs == None
def test_var(self):
class Try(object):
@cli.positional(abs, str, int)
def main(selfy, x, y, *g):
pass
assert Try.main.positional == [abs, str]
assert Try.main.positional_varargs == int
def test_defaults(self):
class Try(object):
@cli.positional(abs, str)
def main(selfy, x, y = 'hello'):
pass
assert Try.main.positional == [abs, str]
class TestProg:
def test_prog(self, capsys):
class MainValidator(cli.Application):
@cli.positional(int, int, int)
def main(self, myint, myint2, *mylist):
print(repr(myint), myint2, mylist)
_, rc = MainValidator.run(["prog", "1", "2", '3', '4', '5'], exit = False)
assert rc == 0
assert "1 2 (3, 4, 5)" == capsys.readouterr()[0].strip()
def test_failure(self, capsys):
class MainValidator(cli.Application):
@cli.positional(int, int, int)
def main(self, myint, myint2, *mylist):
print(myint, myint2, mylist)
_, rc = MainValidator.run(["prog", "1.2", "2", '3', '4', '5'], exit = False)
assert rc == 2
value = capsys.readouterr()[0].strip()
assert 'int' in value
assert 'not' in value
assert '1.2' in value
def test_defaults(self, capsys):
class MainValidator(cli.Application):
@cli.positional(int, int)
def main(self, myint, myint2=2):
print(repr(myint), repr(myint2))
_, rc = MainValidator.run(["prog", "1"], exit = False)
assert rc == 0
assert "1 2" == capsys.readouterr()[0].strip()
_, rc = MainValidator.run(["prog", "1", "3"], exit = False)
assert rc == 0
assert "1 3" == capsys.readouterr()[0].strip()
|
import collections
import copy
import os
import os.path
import re
from coverage import env
from coverage.backward import configparser, iitems, string_class
from coverage.misc import contract, CoverageException, isolate_module
from coverage.misc import substitute_variables
from coverage.tomlconfig import TomlConfigParser, TomlDecodeError
os = isolate_module(os)
class HandyConfigParser(configparser.RawConfigParser):
"""Our specialization of ConfigParser."""
def __init__(self, our_file):
"""Create the HandyConfigParser.
`our_file` is True if this config file is specifically for coverage,
False if we are examining another config file (tox.ini, setup.cfg)
for possible settings.
"""
configparser.RawConfigParser.__init__(self)
self.section_prefixes = ["coverage:"]
if our_file:
self.section_prefixes.append("")
def read(self, filenames, encoding=None):
"""Read a file name as UTF-8 configuration data."""
kwargs = {}
if env.PYVERSION >= (3, 2):
kwargs['encoding'] = encoding or "utf-8"
return configparser.RawConfigParser.read(self, filenames, **kwargs)
def has_option(self, section, option):
for section_prefix in self.section_prefixes:
real_section = section_prefix + section
has = configparser.RawConfigParser.has_option(self, real_section, option)
if has:
return has
return False
def has_section(self, section):
for section_prefix in self.section_prefixes:
real_section = section_prefix + section
has = configparser.RawConfigParser.has_section(self, real_section)
if has:
return real_section
return False
def options(self, section):
for section_prefix in self.section_prefixes:
real_section = section_prefix + section
if configparser.RawConfigParser.has_section(self, real_section):
return configparser.RawConfigParser.options(self, real_section)
raise configparser.NoSectionError
def get_section(self, section):
"""Get the contents of a section, as a dictionary."""
d = {}
for opt in self.options(section):
d[opt] = self.get(section, opt)
return d
def get(self, section, option, *args, **kwargs):
"""Get a value, replacing environment variables also.
The arguments are the same as `RawConfigParser.get`, but in the found
value, ``$WORD`` or ``${WORD}`` are replaced by the value of the
environment variable ``WORD``.
Returns the finished value.
"""
for section_prefix in self.section_prefixes:
real_section = section_prefix + section
if configparser.RawConfigParser.has_option(self, real_section, option):
break
else:
raise configparser.NoOptionError
v = configparser.RawConfigParser.get(self, real_section, option, *args, **kwargs)
v = substitute_variables(v, os.environ)
return v
def getlist(self, section, option):
"""Read a list of strings.
The value of `section` and `option` is treated as a comma- and newline-
separated list of strings. Each value is stripped of whitespace.
Returns the list of strings.
"""
value_list = self.get(section, option)
values = []
for value_line in value_list.split('\n'):
for value in value_line.split(','):
value = value.strip()
if value:
values.append(value)
return values
def getregexlist(self, section, option):
"""Read a list of full-line regexes.
The value of `section` and `option` is treated as a newline-separated
list of regexes. Each value is stripped of whitespace.
Returns the list of strings.
"""
line_list = self.get(section, option)
value_list = []
for value in line_list.splitlines():
value = value.strip()
try:
re.compile(value)
except re.error as e:
raise CoverageException(
"Invalid [%s].%s value %r: %s" % (section, option, value, e)
)
if value:
value_list.append(value)
return value_list
# The default line exclusion regexes.
DEFAULT_EXCLUDE = [
r'#\s*(pragma|PRAGMA)[:\s]?\s*(no|NO)\s*(cover|COVER)',
]
# The default partial branch regexes, to be modified by the user.
DEFAULT_PARTIAL = [
r'#\s*(pragma|PRAGMA)[:\s]?\s*(no|NO)\s*(branch|BRANCH)',
]
# The default partial branch regexes, based on Python semantics.
# These are any Python branching constructs that can't actually execute all
# their branches.
DEFAULT_PARTIAL_ALWAYS = [
'while (True|1|False|0):',
'if (True|1|False|0):',
]
class CoverageConfig(object):
"""Coverage.py configuration.
The attributes of this class are the various settings that control the
operation of coverage.py.
"""
# pylint: disable=too-many-instance-attributes
def __init__(self):
"""Initialize the configuration attributes to their defaults."""
# Metadata about the config.
# We tried to read these config files.
self.attempted_config_files = []
# We did read these config files, but maybe didn't find any content for us.
self.config_files_read = []
# The file that gave us our configuration.
self.config_file = None
self._config_contents = None
# Defaults for [run] and [report]
self._include = None
self._omit = None
# Defaults for [run]
self.branch = False
self.command_line = None
self.concurrency = None
self.context = None
self.cover_pylib = False
self.data_file = ".coverage"
self.debug = []
self.disable_warnings = []
self.dynamic_context = None
self.note = None
self.parallel = False
self.plugins = []
self.relative_files = False
self.run_include = None
self.run_omit = None
self.source = None
self.source_pkgs = []
self.timid = False
self._crash = None
# Defaults for [report]
self.exclude_list = DEFAULT_EXCLUDE[:]
self.fail_under = 0.0
self.ignore_errors = False
self.report_include = None
self.report_omit = None
self.partial_always_list = DEFAULT_PARTIAL_ALWAYS[:]
self.partial_list = DEFAULT_PARTIAL[:]
self.precision = 0
self.report_contexts = None
self.show_missing = False
self.skip_covered = False
self.skip_empty = False
self.sort = None
# Defaults for [html]
self.extra_css = None
self.html_dir = "htmlcov"
self.html_title = "Coverage report"
self.show_contexts = False
# Defaults for [xml]
self.xml_output = "coverage.xml"
self.xml_package_depth = 99
# Defaults for [json]
self.json_output = "coverage.json"
self.json_pretty_print = False
self.json_show_contexts = False
# Defaults for [paths]
self.paths = collections.OrderedDict()
# Options for plugins
self.plugin_options = {}
MUST_BE_LIST = [
"debug", "concurrency", "plugins",
"report_omit", "report_include",
"run_omit", "run_include",
]
def from_args(self, **kwargs):
"""Read config values from `kwargs`."""
for k, v in iitems(kwargs):
if v is not None:
if k in self.MUST_BE_LIST and isinstance(v, string_class):
v = [v]
setattr(self, k, v)
@contract(filename=str)
def from_file(self, filename, our_file):
"""Read configuration from a .rc file.
`filename` is a file name to read.
`our_file` is True if this config file is specifically for coverage,
False if we are examining another config file (tox.ini, setup.cfg)
for possible settings.
Returns True or False, whether the file could be read, and it had some
coverage.py settings in it.
"""
_, ext = os.path.splitext(filename)
if ext == '.toml':
cp = TomlConfigParser(our_file)
else:
cp = HandyConfigParser(our_file)
self.attempted_config_files.append(filename)
try:
files_read = cp.read(filename)
except (configparser.Error, TomlDecodeError) as err:
raise CoverageException("Couldn't read config file %s: %s" % (filename, err))
if not files_read:
return False
self.config_files_read.extend(map(os.path.abspath, files_read))
any_set = False
try:
for option_spec in self.CONFIG_FILE_OPTIONS:
was_set = self._set_attr_from_config_option(cp, *option_spec)
if was_set:
any_set = True
except ValueError as err:
raise CoverageException("Couldn't read config file %s: %s" % (filename, err))
# Check that there are no unrecognized options.
all_options = collections.defaultdict(set)
for option_spec in self.CONFIG_FILE_OPTIONS:
section, option = option_spec[1].split(":")
all_options[section].add(option)
for section, options in iitems(all_options):
real_section = cp.has_section(section)
if real_section:
for unknown in set(cp.options(section)) - options:
raise CoverageException(
"Unrecognized option '[%s] %s=' in config file %s" % (
real_section, unknown, filename
)
)
# [paths] is special
if cp.has_section('paths'):
for option in cp.options('paths'):
self.paths[option] = cp.getlist('paths', option)
any_set = True
# plugins can have options
for plugin in self.plugins:
if cp.has_section(plugin):
self.plugin_options[plugin] = cp.get_section(plugin)
any_set = True
# Was this file used as a config file? If it's specifically our file,
# then it was used. If we're piggybacking on someone else's file,
# then it was only used if we found some settings in it.
if our_file:
used = True
else:
used = any_set
if used:
self.config_file = os.path.abspath(filename)
with open(filename, "rb") as f:
self._config_contents = f.read()
return used
def copy(self):
"""Return a copy of the configuration."""
return copy.deepcopy(self)
CONFIG_FILE_OPTIONS = [
# These are *args for _set_attr_from_config_option:
# (attr, where, type_="")
#
# attr is the attribute to set on the CoverageConfig object.
# where is the section:name to read from the configuration file.
# type_ is the optional type to apply, by using .getTYPE to read the
# configuration value from the file.
# [run]
('branch', 'run:branch', 'boolean'),
('command_line', 'run:command_line'),
('concurrency', 'run:concurrency', 'list'),
('context', 'run:context'),
('cover_pylib', 'run:cover_pylib', 'boolean'),
('data_file', 'run:data_file'),
('debug', 'run:debug', 'list'),
('disable_warnings', 'run:disable_warnings', 'list'),
('dynamic_context', 'run:dynamic_context'),
('note', 'run:note'),
('parallel', 'run:parallel', 'boolean'),
('plugins', 'run:plugins', 'list'),
('relative_files', 'run:relative_files', 'boolean'),
('run_include', 'run:include', 'list'),
('run_omit', 'run:omit', 'list'),
('source', 'run:source', 'list'),
('source_pkgs', 'run:source_pkgs', 'list'),
('timid', 'run:timid', 'boolean'),
('_crash', 'run:_crash'),
# [report]
('exclude_list', 'report:exclude_lines', 'regexlist'),
('fail_under', 'report:fail_under', 'float'),
('ignore_errors', 'report:ignore_errors', 'boolean'),
('partial_always_list', 'report:partial_branches_always', 'regexlist'),
('partial_list', 'report:partial_branches', 'regexlist'),
('precision', 'report:precision', 'int'),
('report_contexts', 'report:contexts', 'list'),
('report_include', 'report:include', 'list'),
('report_omit', 'report:omit', 'list'),
('show_missing', 'report:show_missing', 'boolean'),
('skip_covered', 'report:skip_covered', 'boolean'),
('skip_empty', 'report:skip_empty', 'boolean'),
('sort', 'report:sort'),
# [html]
('extra_css', 'html:extra_css'),
('html_dir', 'html:directory'),
('html_title', 'html:title'),
('show_contexts', 'html:show_contexts', 'boolean'),
# [xml]
('xml_output', 'xml:output'),
('xml_package_depth', 'xml:package_depth', 'int'),
# [json]
('json_output', 'json:output'),
('json_pretty_print', 'json:pretty_print', 'boolean'),
('json_show_contexts', 'json:show_contexts', 'boolean'),
]
def _set_attr_from_config_option(self, cp, attr, where, type_=''):
"""Set an attribute on self if it exists in the ConfigParser.
Returns True if the attribute was set.
"""
section, option = where.split(":")
if cp.has_option(section, option):
method = getattr(cp, 'get' + type_)
setattr(self, attr, method(section, option))
return True
return False
def get_plugin_options(self, plugin):
"""Get a dictionary of options for the plugin named `plugin`."""
return self.plugin_options.get(plugin, {})
def set_option(self, option_name, value):
"""Set an option in the configuration.
`option_name` is a colon-separated string indicating the section and
option name. For example, the ``branch`` option in the ``[run]``
section of the config file would be indicated with `"run:branch"`.
`value` is the new value for the option.
"""
# Special-cased options.
if option_name == "paths":
self.paths = value
return
# Check all the hard-coded options.
for option_spec in self.CONFIG_FILE_OPTIONS:
attr, where = option_spec[:2]
if where == option_name:
setattr(self, attr, value)
return
# See if it's a plugin option.
plugin_name, _, key = option_name.partition(":")
if key and plugin_name in self.plugins:
self.plugin_options.setdefault(plugin_name, {})[key] = value
return
# If we get here, we didn't find the option.
raise CoverageException("No such option: %r" % option_name)
def get_option(self, option_name):
"""Get an option from the configuration.
`option_name` is a colon-separated string indicating the section and
option name. For example, the ``branch`` option in the ``[run]``
section of the config file would be indicated with `"run:branch"`.
Returns the value of the option.
"""
# Special-cased options.
if option_name == "paths":
return self.paths
# Check all the hard-coded options.
for option_spec in self.CONFIG_FILE_OPTIONS:
attr, where = option_spec[:2]
if where == option_name:
return getattr(self, attr)
# See if it's a plugin option.
plugin_name, _, key = option_name.partition(":")
if key and plugin_name in self.plugins:
return self.plugin_options.get(plugin_name, {}).get(key)
# If we get here, we didn't find the option.
raise CoverageException("No such option: %r" % option_name)
def config_files_to_try(config_file):
"""What config files should we try to read?
Returns a list of tuples:
(filename, is_our_file, was_file_specified)
"""
# Some API users were specifying ".coveragerc" to mean the same as
# True, so make it so.
if config_file == ".coveragerc":
config_file = True
specified_file = (config_file is not True)
if not specified_file:
# No file was specified. Check COVERAGE_RCFILE.
config_file = os.environ.get('COVERAGE_RCFILE')
if config_file:
specified_file = True
if not specified_file:
# Still no file specified. Default to .coveragerc
config_file = ".coveragerc"
files_to_try = [
(config_file, True, specified_file),
("setup.cfg", False, False),
("tox.ini", False, False),
("pyproject.toml", False, False),
]
return files_to_try
def read_coverage_config(config_file, **kwargs):
"""Read the coverage.py configuration.
Arguments:
config_file: a boolean or string, see the `Coverage` class for the
tricky details.
all others: keyword arguments from the `Coverage` class, used for
setting values in the configuration.
Returns:
config:
config is a CoverageConfig object read from the appropriate
configuration file.
"""
# Build the configuration from a number of sources:
# 1) defaults:
config = CoverageConfig()
# 2) from a file:
if config_file:
files_to_try = config_files_to_try(config_file)
for fname, our_file, specified_file in files_to_try:
config_read = config.from_file(fname, our_file=our_file)
if config_read:
break
if specified_file:
raise CoverageException("Couldn't read '%s' as a config file" % fname)
# $set_env.py: COVERAGE_DEBUG - Options for --debug.
# 3) from environment variables:
env_data_file = os.environ.get('COVERAGE_FILE')
if env_data_file:
config.data_file = env_data_file
debugs = os.environ.get('COVERAGE_DEBUG')
if debugs:
config.debug.extend(d.strip() for d in debugs.split(","))
# 4) from constructor arguments:
config.from_args(**kwargs)
# Once all the config has been collected, there's a little post-processing
# to do.
config.data_file = os.path.expanduser(config.data_file)
config.html_dir = os.path.expanduser(config.html_dir)
config.xml_output = os.path.expanduser(config.xml_output)
config.paths = collections.OrderedDict(
(k, [os.path.expanduser(f) for f in v])
for k, v in config.paths.items()
)
return config
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import patch
from mock import Mock
from diamond.collector import Collector
from numa import NumaCollector
##########################################################################
class TestNumaCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('NumaCollector', {
'interval': 10,
'bin': 'true'
})
self.collector = NumaCollector(config, None)
def test_import(self):
self.assertTrue(NumaCollector)
@patch.object(Collector, 'publish')
def test(self, publish_mock):
self.collector.collect()
metrics = {
'node_0_free_MB': 342,
'node_0_size_MB': 15976
}
patch_communicate = patch(
'subprocess.Popen.communicate',
Mock(return_value=(
self.getFixture('single_node.txt').getvalue(),
'')))
patch_communicate.start()
self.collector.collect()
patch_communicate.stop()
self.setDocExample(
collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
import numpy as np
import inspect
import six
from sklearn.feature_extraction.text import CountVectorizer, TfidfVectorizer
from sklearn.decomposition import LatentDirichletAllocation, NMF
from sklearn.utils.validation import check_is_fitted
from sklearn.exceptions import NotFittedError
from sklearn.pipeline import Pipeline
from .._shared.helpers import memoize
from .._shared.params import default_params
from .load import load
# vector models
vectorizer_models = {
'CountVectorizer' : CountVectorizer,
'TfidfVectorizer' : TfidfVectorizer
}
# text models
texts = {
'LatentDirichletAllocation' : LatentDirichletAllocation,
'NMF' : NMF,
}
@memoize
def text2mat(data, vectorizer='CountVectorizer',
semantic='LatentDirichletAllocation', corpus='wiki'):
"""
Turns a list of text samples into a matrix using a vectorizer and a text model
Parameters
----------
data : list (or list of lists) of text samples
The text data to transform
vectorizer : str, dict, class or class instance
The vectorizer to use. Built-in options are 'CountVectorizer' or
'TfidfVectorizer'. To change default parameters, set to a dictionary
e.g. {'model' : 'CountVectorizer', 'params' : {'max_features' : 10}}. See
http://scikit-learn.org/stable/modules/classes.html#module-sklearn.feature_extraction.text
for details. You can also specify your own vectorizer model as a class,
or class instance. With either option, the class must have a
fit_transform method (see here: http://scikit-learn.org/stable/data_transforms.html).
If a class, pass any parameters as a dictionary to vectorizer_params. If
a class instance, no parameters can be passed.
semantic : str, dict, class or class instance
Text model to use to transform text data. Built-in options are
'LatentDirichletAllocation' or 'NMF' (default: LDA). To change default
parameters, set to a dictionary e.g. {'model' : 'NMF', 'params' :
{'n_components' : 10}}. See
http://scikit-learn.org/stable/modules/classes.html#module-sklearn.decomposition
for details on the two model options. You can also specify your own
text model as a class, or class instance. With either option, the class
must have a fit_transform method (see here:
http://scikit-learn.org/stable/data_transforms.html).
If a class, pass any parameters as a dictionary to text_params. If
a class instance, no parameters can be passed.
corpus : list (or list of lists) of text samples or 'wiki', 'nips', 'sotus'.
Text to use to fit the semantic model (optional). If set to 'wiki', 'nips'
or 'sotus' and the default semantic and vectorizer models are used, a
pretrained model will be loaded which can save a lot of time.
Returns
----------
transformed data : list of numpy arrays
The transformed text data
"""
if semantic is None:
semantic = 'LatentDirichletAllocation'
if vectorizer is None:
vectorizer = 'CountVectorizer'
model_is_fit=False
if corpus is not None:
if corpus in ('wiki', 'nips', 'sotus',):
if semantic == 'LatentDirichletAllocation' and vectorizer == 'CountVectorizer':
semantic = load(corpus + '_model')
vectorizer = None
model_is_fit = True
else:
corpus = np.array(load(corpus).get_data())
else:
corpus = np.array([corpus])
vtype = _check_mtype(vectorizer)
if vtype == 'str':
vectorizer_params = default_params(vectorizer)
elif vtype == 'dict':
vectorizer_params = default_params(vectorizer['model'], vectorizer['params'])
vectorizer = vectorizer['model']
elif vtype in ('class', 'class_instance'):
if hasattr(vectorizer, 'fit_transform'):
vectorizer_models.update({'user_model' : vectorizer})
vectorizer = 'user_model'
else:
raise RuntimeError('Error: Vectorizer model must have fit_transform '
'method following the scikit-learn API. See here '
'for more details: '
'http://scikit-learn.org/stable/data_transforms.html')
ttype = _check_mtype(semantic)
if ttype == 'str':
text_params = default_params(semantic)
elif ttype == 'dict':
text_params = default_params(semantic['model'], semantic['params'])
semantic = semantic['model']
elif ttype in ('class', 'class_instance'):
if hasattr(semantic, 'fit_transform'):
texts.update({'user_model' : semantic})
semantic = 'user_model'
else:
raise RuntimeError('Text model must have fit_transform '
'method following the scikit-learn API. See here '
'for more details: '
'http://scikit-learn.org/stable/data_transforms.html')
if vectorizer:
if vtype in ('str', 'dict'):
vmodel = vectorizer_models[vectorizer](**vectorizer_params)
elif vtype == 'class':
vmodel = vectorizer_models[vectorizer]()
elif vtype == 'class_instance':
vmodel = vectorizer_models[vectorizer]
else:
vmodel = None
if semantic:
if ttype in ('str', 'dict'):
tmodel = texts[semantic](**text_params)
elif ttype == 'class':
tmodel = texts[semantic]()
elif ttype == 'class_instance':
tmodel = texts[semantic]
else:
tmodel = None
if not isinstance(data, list):
data = [data]
if corpus is None:
_fit_models(vmodel, tmodel, data, model_is_fit)
else:
_fit_models(vmodel, tmodel, corpus, model_is_fit)
return _transform(vmodel, tmodel, data)
def _transform(vmodel, tmodel, x):
split = np.cumsum([len(xi) for xi in x])[:-1]
if vmodel is not None:
x = np.vsplit(vmodel.transform(np.vstack(x).ravel()).toarray(), split)
if tmodel is not None:
if isinstance(tmodel, Pipeline):
x = np.vsplit(tmodel.transform(np.vstack(x).ravel()), split)
else:
x = np.vsplit(tmodel.transform(np.vstack(x)), split)
return [xi for xi in x]
def _fit_models(vmodel, tmodel, x, model_is_fit):
if model_is_fit==True:
return
if vmodel is not None:
try:
check_is_fitted(vmodel, ['vocabulary_'])
except NotFittedError:
vmodel.fit(np.vstack(x).ravel())
if tmodel is not None:
try:
check_is_fitted(tmodel, ['components_'])
except NotFittedError:
if isinstance(tmodel, Pipeline):
tmodel.fit(np.vstack(x).ravel())
else:
tmodel.fit(vmodel.transform(np.vstack(x).ravel()))
def _check_mtype(x):
if isinstance(x, six.string_types):
return 'str'
elif isinstance(x, dict):
return 'dict'
elif inspect.isclass(x):
return 'class'
elif isinstance(x, type(None)):
return 'None'
else:
try:
if inspect.isclass(type(x)):
return 'class_instance'
except:
raise TypeError('Parameter must of type string, dict, class, or'
' class instance.')
|
import sys
import logging
import types
import enum
from typing import Union
import pytest
from qutebrowser.misc import objects
from qutebrowser.commands import cmdexc, argparser, command
from qutebrowser.api import cmdutils
from qutebrowser.utils import usertypes
@pytest.fixture(autouse=True)
def clear_globals(monkeypatch):
monkeypatch.setattr(objects, 'commands', {})
def _get_cmd(*args, **kwargs):
"""Get a command object created via @cmdutils.register.
Args:
Passed to @cmdutils.register decorator
"""
@cmdutils.register(*args, **kwargs)
def fun():
"""Blah."""
return objects.commands['fun']
class TestCheckOverflow:
def test_good(self):
cmdutils.check_overflow(1, 'int')
def test_bad(self):
int32_max = 2 ** 31 - 1
with pytest.raises(cmdutils.CommandError, match="Numeric argument is "
"too large for internal int representation."):
cmdutils.check_overflow(int32_max + 1, 'int')
class TestCheckExclusive:
@pytest.mark.parametrize('flags', [[], [False, True], [False, False]])
def test_good(self, flags):
cmdutils.check_exclusive(flags, [])
def test_bad(self):
with pytest.raises(cmdutils.CommandError,
match="Only one of -x/-y/-z can be given!"):
cmdutils.check_exclusive([True, True], 'xyz')
class TestRegister:
def test_simple(self):
@cmdutils.register()
def fun():
"""Blah."""
cmd = objects.commands['fun']
assert cmd.handler is fun
assert cmd.name == 'fun'
assert len(objects.commands) == 1
def test_underlines(self):
"""Make sure the function name is normalized correctly (_ -> -)."""
@cmdutils.register()
def eggs_bacon():
"""Blah."""
assert objects.commands['eggs-bacon'].name == 'eggs-bacon'
assert 'eggs_bacon' not in objects.commands
def test_lowercasing(self):
"""Make sure the function name is normalized correctly (uppercase)."""
@cmdutils.register()
def Test(): # noqa: N801,N806 pylint: disable=invalid-name
"""Blah."""
assert objects.commands['test'].name == 'test'
assert 'Test' not in objects.commands
def test_explicit_name(self):
"""Test register with explicit name."""
@cmdutils.register(name='foobar')
def fun():
"""Blah."""
assert objects.commands['foobar'].name == 'foobar'
assert 'fun' not in objects.commands
assert len(objects.commands) == 1
def test_multiple_registrations(self):
"""Make sure registering the same name twice raises ValueError."""
@cmdutils.register(name='foobar')
def fun():
"""Blah."""
with pytest.raises(ValueError):
@cmdutils.register(name='foobar')
def fun2():
"""Blah."""
def test_instance(self):
"""Make sure the instance gets passed to Command."""
@cmdutils.register(instance='foobar')
def fun(self):
"""Blah."""
assert objects.commands['fun']._instance == 'foobar'
def test_star_args(self):
"""Check handling of *args."""
@cmdutils.register()
def fun(*args):
"""Blah."""
assert args == ['one', 'two']
objects.commands['fun'].parser.parse_args(['one', 'two'])
def test_star_args_empty(self):
"""Check handling of *args without any value."""
@cmdutils.register()
def fun(*args):
"""Blah."""
assert not args
with pytest.raises(argparser.ArgumentParserError):
objects.commands['fun'].parser.parse_args([])
def test_star_args_type(self):
"""Check handling of *args with a type.
This isn't implemented, so be sure we catch it.
"""
with pytest.raises(AssertionError):
@cmdutils.register()
def fun(*args: int):
"""Blah."""
def test_star_args_optional(self):
"""Check handling of *args withstar_args_optional."""
@cmdutils.register(star_args_optional=True)
def fun(*args):
"""Blah."""
assert not args
cmd = objects.commands['fun']
cmd.namespace = cmd.parser.parse_args([])
args, kwargs = cmd._get_call_args(win_id=0)
fun(*args, **kwargs)
def test_star_args_optional_annotated(self):
@cmdutils.register(star_args_optional=True)
def fun(*args: str):
"""Blah."""
cmd = objects.commands['fun']
cmd.namespace = cmd.parser.parse_args([])
cmd._get_call_args(win_id=0)
@pytest.mark.parametrize('inp, expected', [
(['--arg'], True), (['-a'], True), ([], False)])
def test_flag(self, inp, expected):
@cmdutils.register()
def fun(arg=False):
"""Blah."""
assert arg == expected
cmd = objects.commands['fun']
cmd.namespace = cmd.parser.parse_args(inp)
assert cmd.namespace.arg == expected
def test_flag_argument(self):
@cmdutils.register()
@cmdutils.argument('arg', flag='b')
def fun(arg=False):
"""Blah."""
assert arg
cmd = objects.commands['fun']
with pytest.raises(argparser.ArgumentParserError):
cmd.parser.parse_args(['-a'])
cmd.namespace = cmd.parser.parse_args(['-b'])
assert cmd.namespace.arg
args, kwargs = cmd._get_call_args(win_id=0)
fun(*args, **kwargs)
def test_self_without_instance(self):
with pytest.raises(TypeError, match="fun is a class method, but "
"instance was not given!"):
@cmdutils.register()
def fun(self):
"""Blah."""
def test_instance_without_self(self):
with pytest.raises(TypeError, match="fun is not a class method, but "
"instance was given!"):
@cmdutils.register(instance='inst')
def fun():
"""Blah."""
def test_var_kw(self):
with pytest.raises(TypeError, match="fun: functions with varkw "
"arguments are not supported!"):
@cmdutils.register()
def fun(**kwargs):
"""Blah."""
def test_partial_arg(self):
"""Test with only some arguments decorated with @cmdutils.argument."""
@cmdutils.register()
@cmdutils.argument('arg1', flag='b')
def fun(arg1=False, arg2=False):
"""Blah."""
def test_win_id(self):
@cmdutils.register()
@cmdutils.argument('win_id', value=cmdutils.Value.win_id)
def fun(win_id):
"""Blah."""
assert objects.commands['fun']._get_call_args(42) == ([42], {})
def test_count(self):
@cmdutils.register()
@cmdutils.argument('count', value=cmdutils.Value.count)
def fun(count=0):
"""Blah."""
assert objects.commands['fun']._get_call_args(42) == ([0], {})
def test_fill_self(self):
with pytest.raises(TypeError, match="fun: Can't fill 'self' with "
"value!"):
@cmdutils.register(instance='foobar')
@cmdutils.argument('self', value=cmdutils.Value.count)
def fun(self):
"""Blah."""
def test_fill_invalid(self):
with pytest.raises(TypeError, match="fun: Invalid value='foo' for "
"argument 'arg'!"):
@cmdutils.register()
@cmdutils.argument('arg', value='foo')
def fun(arg):
"""Blah."""
def test_count_without_default(self):
with pytest.raises(TypeError, match="fun: handler has count parameter "
"without default!"):
@cmdutils.register()
@cmdutils.argument('count', value=cmdutils.Value.count)
def fun(count):
"""Blah."""
@pytest.mark.parametrize('hide', [True, False])
def test_pos_args(self, hide):
@cmdutils.register()
@cmdutils.argument('arg', hide=hide)
def fun(arg):
"""Blah."""
pos_args = objects.commands['fun'].pos_args
if hide:
assert pos_args == []
else:
assert pos_args == [('arg', 'arg')]
class Enum(enum.Enum):
# pylint: disable=invalid-name
x = enum.auto()
y = enum.auto()
@pytest.mark.parametrize('typ, inp, choices, expected', [
(int, '42', None, 42),
(int, 'x', None, cmdexc.ArgumentTypeError),
(str, 'foo', None, 'foo'),
(Union[str, int], 'foo', None, 'foo'),
(Union[str, int], '42', None, 42),
# Choices
(str, 'foo', ['foo'], 'foo'),
(str, 'bar', ['foo'], cmdexc.ArgumentTypeError),
# Choices with Union: only checked when it's a str
(Union[str, int], 'foo', ['foo'], 'foo'),
(Union[str, int], 'bar', ['foo'], cmdexc.ArgumentTypeError),
(Union[str, int], '42', ['foo'], 42),
(Enum, 'x', None, Enum.x),
(Enum, 'z', None, cmdexc.ArgumentTypeError),
])
def test_typed_args(self, typ, inp, choices, expected):
@cmdutils.register()
@cmdutils.argument('arg', choices=choices)
def fun(arg: typ):
"""Blah."""
assert arg == expected
cmd = objects.commands['fun']
cmd.namespace = cmd.parser.parse_args([inp])
if expected is cmdexc.ArgumentTypeError:
with pytest.raises(cmdexc.ArgumentTypeError):
cmd._get_call_args(win_id=0)
else:
args, kwargs = cmd._get_call_args(win_id=0)
assert args == [expected]
assert kwargs == {}
fun(*args, **kwargs)
def test_choices_no_annotation(self):
# https://github.com/qutebrowser/qutebrowser/issues/1871
@cmdutils.register()
@cmdutils.argument('arg', choices=['foo', 'bar'])
def fun(arg):
"""Blah."""
cmd = objects.commands['fun']
cmd.namespace = cmd.parser.parse_args(['fish'])
with pytest.raises(cmdexc.ArgumentTypeError):
cmd._get_call_args(win_id=0)
def test_choices_no_annotation_kwonly(self):
# https://github.com/qutebrowser/qutebrowser/issues/1871
@cmdutils.register()
@cmdutils.argument('arg', choices=['foo', 'bar'])
def fun(*, arg='foo'):
"""Blah."""
cmd = objects.commands['fun']
cmd.namespace = cmd.parser.parse_args(['--arg=fish'])
with pytest.raises(cmdexc.ArgumentTypeError):
cmd._get_call_args(win_id=0)
def test_pos_arg_info(self):
@cmdutils.register()
@cmdutils.argument('foo', choices=('a', 'b'))
@cmdutils.argument('bar', choices=('x', 'y'))
@cmdutils.argument('opt')
def fun(foo, bar, opt=False):
"""Blah."""
cmd = objects.commands['fun']
assert cmd.get_pos_arg_info(0) == command.ArgInfo(choices=('a', 'b'))
assert cmd.get_pos_arg_info(1) == command.ArgInfo(choices=('x', 'y'))
with pytest.raises(IndexError):
cmd.get_pos_arg_info(2)
def test_keyword_only_without_default(self):
# https://github.com/qutebrowser/qutebrowser/issues/1872
def fun(*, target):
"""Blah."""
with pytest.raises(TypeError, match="fun: handler has keyword only "
"argument 'target' without default!"):
fun = cmdutils.register()(fun)
def test_typed_keyword_only_without_default(self):
# https://github.com/qutebrowser/qutebrowser/issues/1872
def fun(*, target: int):
"""Blah."""
with pytest.raises(TypeError, match="fun: handler has keyword only "
"argument 'target' without default!"):
fun = cmdutils.register()(fun)
class TestArgument:
"""Test the @cmdutils.argument decorator."""
def test_invalid_argument(self):
with pytest.raises(ValueError, match="fun has no argument foo!"):
@cmdutils.argument('foo')
def fun(bar):
"""Blah."""
def test_storage(self):
@cmdutils.argument('foo', flag='x')
@cmdutils.argument('bar', flag='y')
def fun(foo, bar):
"""Blah."""
expected = {
'foo': command.ArgInfo(flag='x'),
'bar': command.ArgInfo(flag='y')
}
assert fun.qute_args == expected
def test_arginfo_boolean(self):
@cmdutils.argument('special1', value=cmdutils.Value.count)
@cmdutils.argument('special2', value=cmdutils.Value.win_id)
@cmdutils.argument('normal')
def fun(special1, special2, normal):
"""Blah."""
assert fun.qute_args['special1'].value
assert fun.qute_args['special2'].value
assert not fun.qute_args['normal'].value
def test_wrong_order(self):
"""When @cmdutils.argument is used above (after) @register, fail."""
with pytest.raises(ValueError, match=r"@cmdutils.argument got called "
r"above \(after\) @cmdutils.register for fun!"):
@cmdutils.argument('bar', flag='y')
@cmdutils.register()
def fun(bar):
"""Blah."""
def test_no_docstring(self, caplog):
with caplog.at_level(logging.WARNING):
@cmdutils.register()
def fun():
# no docstring
pass
assert len(caplog.records) == 1
assert caplog.messages[0].endswith('test_cmdutils.py has no docstring')
def test_no_docstring_with_optimize(self, monkeypatch):
"""With -OO we'd get a warning on start, but no warning afterwards."""
monkeypatch.setattr(sys, 'flags', types.SimpleNamespace(optimize=2))
@cmdutils.register()
def fun():
# no docstring
pass
class TestRun:
@pytest.fixture(autouse=True)
def patch_backend(self, mode_manager, monkeypatch):
monkeypatch.setattr(command.objects, 'backend',
usertypes.Backend.QtWebKit)
@pytest.mark.parametrize('backend, used, ok', [
(usertypes.Backend.QtWebEngine, usertypes.Backend.QtWebEngine, True),
(usertypes.Backend.QtWebEngine, usertypes.Backend.QtWebKit, False),
(usertypes.Backend.QtWebKit, usertypes.Backend.QtWebEngine, False),
(usertypes.Backend.QtWebKit, usertypes.Backend.QtWebKit, True),
(None, usertypes.Backend.QtWebEngine, True),
(None, usertypes.Backend.QtWebKit, True),
])
def test_backend(self, monkeypatch, backend, used, ok):
monkeypatch.setattr(command.objects, 'backend', used)
cmd = _get_cmd(backend=backend)
if ok:
cmd.run(win_id=0)
else:
with pytest.raises(cmdexc.PrerequisitesError,
match=r'.* backend\.'):
cmd.run(win_id=0)
def test_no_args(self):
cmd = _get_cmd()
cmd.run(win_id=0)
def test_instance_unavailable_with_backend(self, monkeypatch):
"""Test what happens when a backend doesn't have an objreg object.
For example, QtWebEngine doesn't have 'hintmanager' registered. We make
sure the backend checking happens before resolving the instance, so we
display an error instead of crashing.
"""
@cmdutils.register(instance='doesnotexist',
backend=usertypes.Backend.QtWebEngine)
def fun(self):
"""Blah."""
monkeypatch.setattr(command.objects, 'backend',
usertypes.Backend.QtWebKit)
cmd = objects.commands['fun']
with pytest.raises(cmdexc.PrerequisitesError, match=r'.* backend\.'):
cmd.run(win_id=0)
|
from homeassistant.components.homematicip_cloud import DOMAIN as HMIPC_DOMAIN
from homeassistant.components.weather import (
ATTR_WEATHER_ATTRIBUTION,
ATTR_WEATHER_HUMIDITY,
ATTR_WEATHER_TEMPERATURE,
ATTR_WEATHER_WIND_BEARING,
ATTR_WEATHER_WIND_SPEED,
DOMAIN as WEATHER_DOMAIN,
)
from homeassistant.setup import async_setup_component
from .helper import async_manipulate_test_data, get_and_check_entity_basics
async def test_manually_configured_platform(hass):
"""Test that we do not set up an access point."""
assert await async_setup_component(
hass, WEATHER_DOMAIN, {WEATHER_DOMAIN: {"platform": HMIPC_DOMAIN}}
)
assert not hass.data.get(HMIPC_DOMAIN)
async def test_hmip_weather_sensor(hass, default_mock_hap_factory):
"""Test HomematicipWeatherSensor."""
entity_id = "weather.weather_sensor_plus"
entity_name = "Weather Sensor – plus"
device_model = "HmIP-SWO-PL"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=[entity_name]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == ""
assert ha_state.attributes[ATTR_WEATHER_TEMPERATURE] == 4.3
assert ha_state.attributes[ATTR_WEATHER_HUMIDITY] == 97
assert ha_state.attributes[ATTR_WEATHER_WIND_SPEED] == 15.0
assert ha_state.attributes[ATTR_WEATHER_ATTRIBUTION] == "Powered by Homematic IP"
await async_manipulate_test_data(hass, hmip_device, "actualTemperature", 12.1)
ha_state = hass.states.get(entity_id)
assert ha_state.attributes[ATTR_WEATHER_TEMPERATURE] == 12.1
async def test_hmip_weather_sensor_pro(hass, default_mock_hap_factory):
"""Test HomematicipWeatherSensorPro."""
entity_id = "weather.wettersensor_pro"
entity_name = "Wettersensor - pro"
device_model = "HmIP-SWO-PR"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=[entity_name]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == "sunny"
assert ha_state.attributes[ATTR_WEATHER_TEMPERATURE] == 15.4
assert ha_state.attributes[ATTR_WEATHER_HUMIDITY] == 65
assert ha_state.attributes[ATTR_WEATHER_WIND_SPEED] == 2.6
assert ha_state.attributes[ATTR_WEATHER_WIND_BEARING] == 295.0
assert ha_state.attributes[ATTR_WEATHER_ATTRIBUTION] == "Powered by Homematic IP"
await async_manipulate_test_data(hass, hmip_device, "actualTemperature", 12.1)
ha_state = hass.states.get(entity_id)
assert ha_state.attributes[ATTR_WEATHER_TEMPERATURE] == 12.1
async def test_hmip_home_weather(hass, default_mock_hap_factory):
"""Test HomematicipHomeWeather."""
entity_id = "weather.weather_1010_wien_osterreich"
entity_name = "Weather 1010 Wien, Österreich"
device_model = None
mock_hap = await default_mock_hap_factory.async_get_mock_hap()
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert hmip_device
assert ha_state.state == "partlycloudy"
assert ha_state.attributes[ATTR_WEATHER_TEMPERATURE] == 16.6
assert ha_state.attributes[ATTR_WEATHER_HUMIDITY] == 54
assert ha_state.attributes[ATTR_WEATHER_WIND_SPEED] == 8.6
assert ha_state.attributes[ATTR_WEATHER_WIND_BEARING] == 294
assert ha_state.attributes[ATTR_WEATHER_ATTRIBUTION] == "Powered by Homematic IP"
await async_manipulate_test_data(
hass, mock_hap.home.weather, "temperature", 28.3, fire_device=mock_hap.home
)
ha_state = hass.states.get(entity_id)
assert ha_state.attributes[ATTR_WEATHER_TEMPERATURE] == 28.3
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.