text
stringlengths 213
32.3k
|
---|
from unittest import mock
from libpurecool.dyson_pure_cool import FanSpeed
def load_mock_device(device):
"""Load the mock with default values so it doesn't throw errors."""
device.serial = "XX-XXXXX-XX"
device.name = "Temp Name"
device.connect = mock.Mock(return_value=True)
device.auto_connect = mock.Mock(return_value=True)
device.environmental_state.particulate_matter_25 = "0000"
device.environmental_state.particulate_matter_10 = "0000"
device.environmental_state.nitrogen_dioxide = "0000"
device.environmental_state.volatil_organic_compounds = "0000"
device.environmental_state.volatile_organic_compounds = "0000"
device.environmental_state.temperature = 250
device.state.hepa_filter_state = 0
device.state.carbon_filter_state = 0
device.state.speed = FanSpeed.FAN_SPEED_1.value
device.state.oscillation_angle_low = "000"
device.state.oscillation_angle_high = "000"
device.state.filter_life = "000"
device.state.heat_target = 200
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from diamond.collector import Collector
from diskspace import DiskSpaceCollector
##########################################################################
class TestDiskSpaceCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('DiskSpaceCollector', {
'interval': 10,
'byte_unit': ['gigabyte'],
'exclude_filters': [
'^/export/home',
]
})
self.collector = DiskSpaceCollector(config, None)
def test_import(self):
self.assertTrue(DiskSpaceCollector)
def run_collection(self, statvfs_mock, os_major, os_minor):
os_stat_mock = patch('os.stat')
os_path_isdir_mock = patch('os.path.isdir', Mock(return_value=False))
open_mock = patch('__builtin__.open',
Mock(return_value=self.getFixture('proc_mounts')))
os_statvfs_mock = patch('os.statvfs', Mock(return_value=statvfs_mock))
os_stat_mock.start()
os_path_isdir_mock.start()
open_mock.start()
os_statvfs_mock.start()
self.collector.collect()
os_stat_mock.stop()
os_path_isdir_mock.stop()
open_mock.stop()
os_statvfs_mock.stop()
@patch('os.access', Mock(return_value=True))
def test_get_file_systems(self):
result = None
os_stat_mock = patch('os.stat')
os_realpath_mock = patch('os.path.realpath')
open_mock = patch('__builtin__.open',
Mock(return_value=self.getFixture('proc_mounts')))
stat_mock = os_stat_mock.start()
stat_mock.return_value.st_dev = 42
realpath_mock = os_realpath_mock.start()
realpath_mock.return_value = '/dev/sda1'
omock = open_mock.start()
result = self.collector.get_file_systems()
os_stat_mock.stop()
os_realpath_mock.stop()
open_mock.stop()
stat_mock.assert_called_once_with('/')
realpath_mock.assert_called_once_with(
'/dev/disk/by-uuid/81969733-a724-4651-9cf5-64970f86daba')
self.assertEqual(result, {
42: {
'device':
'/dev/sda1',
'fs_type': 'ext3',
'mount_point': '/'}
})
omock.assert_called_once_with('/proc/mounts')
return result
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
statvfs_mock = Mock()
statvfs_mock.f_bsize = 1048576
statvfs_mock.f_frsize = 4096
statvfs_mock.f_blocks = 360540255
statvfs_mock.f_bfree = 285953527
statvfs_mock.f_bavail = 267639130
statvfs_mock.f_files = 91578368
statvfs_mock.f_ffree = 91229495
statvfs_mock.f_favail = 91229495
statvfs_mock.f_flag = 4096
statvfs_mock.f_namemax = 255
self.run_collection(statvfs_mock, 9, 0)
metrics = {
'root.gigabyte_used': (284.525, 2),
'root.gigabyte_free': (1090.826, 2),
'root.gigabyte_avail': (1020.962, 2),
'root.inodes_used': 348873,
'root.inodes_free': 91229495,
'root.inodes_avail': 91229495,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_work_with_tmpfs(self, publish_mock):
config = get_collector_config('DiskSpaceCollector', {
'interval': 10,
'byte_unit': ['gigabyte'],
'exclude_filters': [],
'filesystems': 'tmpfs',
'exclude_filters': '^/sys'
})
self.collector = DiskSpaceCollector(config, None)
statvfs_mock = Mock()
statvfs_mock.f_bsize = 4096
statvfs_mock.f_frsize = 4096
statvfs_mock.f_blocks = 360540255
statvfs_mock.f_bfree = 285953527
statvfs_mock.f_bavail = 267639130
statvfs_mock.f_files = 91578368
statvfs_mock.f_ffree = 91229495
statvfs_mock.f_favail = 91229495
statvfs_mock.f_flag = 4096
statvfs_mock.f_namemax = 255
self.run_collection(statvfs_mock, 4, 0)
metrics = {
'tmp.gigabyte_used': (284.525, 2),
'tmp.gigabyte_free': (1090.826, 2),
'tmp.gigabyte_avail': (1020.962, 2),
'tmp.inodes_used': 348873,
'tmp.inodes_free': 91229495,
'tmp.inodes_avail': 91229495,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_work_in_system_directories(self, publish_mock):
config = get_collector_config('DiskSpaceCollector', {
'interval': 10,
'byte_unit': ['gigabyte'],
'exclude_filters': [],
'filesystems': 'tmpfs',
'exclude_filters': '^/tmp'
})
self.collector = DiskSpaceCollector(config, None)
statvfs_mock = Mock()
statvfs_mock.f_bsize = 4096
statvfs_mock.f_frsize = 4096
statvfs_mock.f_blocks = 360540255
statvfs_mock.f_bfree = 285953527
statvfs_mock.f_bavail = 267639130
statvfs_mock.f_files = 91578368
statvfs_mock.f_ffree = 91229495
statvfs_mock.f_favail = 91229495
statvfs_mock.f_flag = 4096
statvfs_mock.f_namemax = 255
self.run_collection(statvfs_mock, 4, 0)
metrics = {
'_sys_fs_cgroup.gigabyte_used': (284.525, 2),
'_sys_fs_cgroup.gigabyte_free': (1090.826, 2),
'_sys_fs_cgroup.gigabyte_avail': (1020.962, 2),
'_sys_fs_cgroup.inodes_used': 348873,
'_sys_fs_cgroup.inodes_free': 91229495,
'_sys_fs_cgroup.inodes_avail': 91229495,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
import datetime
import time
import traceback
from typing import NamedTuple
import humanize
from paasta_tools import marathon_tools
from paasta_tools.deployd.common import BounceTimers
from paasta_tools.deployd.common import DelayDeadlineQueueProtocol
from paasta_tools.deployd.common import exponential_back_off
from paasta_tools.deployd.common import PaastaThread
from paasta_tools.deployd.common import ServiceInstance
from paasta_tools.metrics.metrics_lib import BaseMetrics
from paasta_tools.setup_marathon_job import deploy_marathon_service
from paasta_tools.utils import load_system_paasta_config
from paasta_tools.utils import SystemPaastaConfig
class BounceResults(NamedTuple):
bounce_again_in_seconds: float
return_code: int
class PaastaDeployWorker(PaastaThread):
def __init__(
self,
worker_number: int,
instances_to_bounce: DelayDeadlineQueueProtocol,
config: SystemPaastaConfig,
metrics_provider: BaseMetrics,
) -> None:
super().__init__()
self.daemon = True
self.name = f"Worker{worker_number}"
self.instances_to_bounce = instances_to_bounce
self.metrics = metrics_provider
self.config = config
self.cluster = self.config.get_cluster()
self.busy = False
self.setup()
def setup(self) -> None:
system_paasta_config = load_system_paasta_config()
self.marathon_servers = marathon_tools.get_marathon_servers(
system_paasta_config
)
self.marathon_clients = marathon_tools.get_marathon_clients(
self.marathon_servers
)
self.max_failures = (
system_paasta_config.get_deployd_max_service_instance_failures()
)
def setup_timers(self, service_instance: ServiceInstance) -> BounceTimers:
bounce_length_timer = self.metrics.create_timer(
"bounce_length_timer",
service=service_instance.service,
instance=service_instance.instance,
paasta_cluster=self.cluster,
)
processed_by_worker_timer = self.metrics.create_timer(
"processed_by_worker",
service=service_instance.service,
instance=service_instance.instance,
paasta_cluster=self.cluster,
)
setup_marathon_timer = self.metrics.create_timer(
"setup_marathon_timer",
service=service_instance.service,
instance=service_instance.instance,
paasta_cluster=self.cluster,
)
return BounceTimers(
processed_by_worker=processed_by_worker_timer,
setup_marathon=setup_marathon_timer,
bounce_length=bounce_length_timer,
)
def run(self) -> None:
"""Takes things from the to_bounce_now queue, processes them, then
might put them on the bounce_later queue for future processing"""
self.log.info(f"{self.name} starting up")
while True:
with self.instances_to_bounce.get() as service_instance:
self.busy = True
try:
(
bounce_again_in_seconds,
return_code,
) = self.process_service_instance(service_instance)
except Exception:
self.log.error(
f"{self.name} Worker failed to process service instance and will retry. "
f"Caused by exception: {traceback.format_exc()}"
)
return_code = -2
failures = service_instance.failures
if return_code != 0:
failures = service_instance.failures + 1
bounce_again_in_seconds = exponential_back_off(
failures=failures,
factor=self.config.get_deployd_worker_failure_backoff_factor(),
base=2,
max_time=6000,
)
if bounce_again_in_seconds:
if failures >= self.max_failures:
self.log.info(
f"{self.name} Worker removing "
f"{service_instance.service}.{service_instance.instance} "
f"from queue because it has failed {failures} times "
f"(max is {self.max_failures})"
)
else:
bounce_by = int(time.time()) + bounce_again_in_seconds
service_instance = ServiceInstance(
service=service_instance.service,
instance=service_instance.instance,
bounce_by=bounce_by,
wait_until=bounce_by,
watcher=self.name,
failures=failures,
processed_count=service_instance.processed_count + 1,
bounce_start_time=service_instance.bounce_start_time,
enqueue_time=time.time(),
)
self.instances_to_bounce.put(service_instance)
self.busy = False
time.sleep(0.1)
def process_service_instance(
self, service_instance: ServiceInstance
) -> BounceResults:
bounce_timers = self.setup_timers(service_instance)
if service_instance.enqueue_time is not None:
bounce_timers.processed_by_worker.record(
time.time() - service_instance.enqueue_time
)
human_bounce_by = humanize.naturaldelta(
datetime.timedelta(seconds=(time.time() - service_instance.bounce_by))
)
self.log.info(
f"{self.name} processing {service_instance.service}.{service_instance.instance} (bounce_by {human_bounce_by} ago)"
) # noqa E501
bounce_timers.setup_marathon.start()
return_code, bounce_again_in_seconds = deploy_marathon_service(
service=service_instance.service,
instance=service_instance.instance,
clients=self.marathon_clients,
soa_dir=marathon_tools.DEFAULT_SOA_DIR,
marathon_apps_with_clients=None,
)
bounce_timers.setup_marathon.stop()
self.log.info(
f"{self.name} setup marathon completed with exit code {return_code} for {service_instance.service}.{service_instance.instance}"
) # noqa E501
if bounce_again_in_seconds:
self.log.info(
f"{self.name} {service_instance.service}.{service_instance.instance} not in steady state so bouncing again in {bounce_again_in_seconds}"
) # noqa E501
else:
self.log.info(
f"{self.name} {service_instance.service}.{service_instance.instance} in steady state"
)
if service_instance.processed_count > 0:
bounce_timers.bounce_length.record(
time.time() - service_instance.bounce_start_time
)
return BounceResults(bounce_again_in_seconds, return_code)
|
import logging
from perfkitbenchmarker import errors
from perfkitbenchmarker import vm_util
def YumInstall(vm):
"""Installs the package on the VM."""
vm.InstallPackages(vm.PYTHON_PACKAGE)
_SetDefaultPythonIfNeeded(vm, '/usr/bin/{}'.format(vm.PYTHON_PACKAGE))
def AptInstall(vm):
"""Installs the package on the VM."""
vm.InstallPackages('python python2.7')
_SetDefaultPythonIfNeeded(vm, '/usr/bin/python2')
def SwupdInstall(vm):
"""Installs the package on the VM."""
vm.InstallPackages('python-basic')
def _SetDefaultPythonIfNeeded(vm, python_path):
"""Sets the default version of python to the specified path if required.
Some linux distributions do not set a default version of python to use so that
running "python ..." will fail. If the "alternatives" program is found on
the VM it is used to set the default version to the one specified.
Logs a warning if the default version is not set and the alternatives program
could not be run to set it.
Args:
vm: The virtual machine to set the default version of python on.
python_path: Path to the python executable.
Raises:
PythonPackageRequirementUnfulfilled: If the default version of python is
not set and it is possible to set via "alternatives" but the alternatives
program failed.
"""
@vm_util.Retry(
retryable_exceptions=(errors.VirtualMachine.RemoteCommandError,))
def _RunCommand(command):
return vm.RemoteCommandWithReturnCode(command, ignore_failure=True)
python_version_cmd = 'python --version'
python_exists_cmd = 'ls {}'.format(python_path)
alternatives_exists_cmd = 'which update-alternatives'
alternatives_cmd = 'sudo update-alternatives --set python {}'.format(
python_path)
stdout, stderr, return_code = _RunCommand(python_version_cmd)
if not return_code:
logging.info(
'Default version of python: %s', (stdout or stderr).strip().split()[-1])
return
logging.info('Trying to set the default python version')
_, _, return_code = _RunCommand(alternatives_exists_cmd)
if return_code:
# Some distros might not include update-alternatives
logging.warning('Can not set default version of python as '
'update-alternatives program does not exist')
return
_, _, return_code = _RunCommand(python_exists_cmd)
if return_code:
# This is most likely an error but user could specify path to python
logging.warning('No default version of python set and %s does not exist',
python_path)
return
_, error_text, return_code = _RunCommand(alternatives_cmd)
if return_code:
raise errors.Setup.PythonPackageRequirementUnfulfilled(
'Could not set via update-alternatives default python version: {}'
.format(error_text))
_, txt, return_code = _RunCommand(python_version_cmd)
if return_code:
raise errors.Setup.PythonPackageRequirementUnfulfilled(
'Set default python path to {} but could not use default version'
.format(python_path))
logging.info('Set default python version to %s', txt.strip().split()[-1])
|
import struct
import configparser
import glob
import os
import shutil
SPECS = {os.path.splitext(os.path.basename(spec_file))[0]: spec_file for spec_file in glob.glob(os.path.join(os.path.dirname(__file__), '*.cfg'))}
EVENT_FORMAT = '@llHHI'
EV_KEY = 0x01
KEY_ACTION = {
'up': 0x00,
'down': 0x01,
'repeat': 0x02
}
def touch(fname, times=None):
with open(fname, 'a'):
os.utime(fname, times)
class FakeDevice(object):
@staticmethod
def parse_endpoint_line(line):
components = line.split(',')
if len(components) == 2:
chmod, name = components
default = None
elif len(components) == 3:
chmod, name, default = components
else:
raise ValueError('Invalid config line "{0}"'.format(line))
orig_perm = chmod
if chmod == 'r':
chmod = 0o660
elif chmod == 'w':
chmod = 0o660
else:
chmod = 0o660
return chmod, name, default, orig_perm
@staticmethod
def create_endpoint(path, chmod, default=None):
if os.path.exists(path):
os.chmod(path, 0o660)
os.remove(path)
if default is not None:
# Convert to bytes
if default.startswith("0x"):
default = bytes.fromhex(default[2:])
else:
default = default.encode('UTF-8')
with open(path, 'wb') as f:
f.write(default)
else:
touch(path)
os.chmod(path, chmod)
def __init__(self, spec_name, serial=None, tmp_dir=os.environ.get('TMPDIR', '/tmp')):
if spec_name not in SPECS:
raise ValueError("Spec {0} not in SPECS".format(spec_name))
self._tick = 1
self.spec_name = spec_name
self._config = configparser.ConfigParser()
self._config.read(SPECS[spec_name])
self._serial = serial
self._tmp_dir = os.path.join(tmp_dir, self._config.get('device', 'dir_name'))
os.makedirs(self._tmp_dir, exist_ok=True)
self.endpoints = {}
self.events = {}
self.create_endpoints()
self.create_events()
if serial is not None:
self.set('device_serial', serial)
def _get_endpoint_path(self, endpoint):
return os.path.join(self._tmp_dir, endpoint)
def _get_event_path(self, event):
return os.path.join(self._tmp_dir, 'input', event)
def create_events(self):
"""
Goes through event files and creates them as needed
"""
event_files = self._config.get('device', 'event', fallback=None)
if event_files is None:
event_files = []
else:
event_files = event_files.splitlines()
for index, event_file in enumerate(event_files):
path = self._get_event_path(event_file)
os.makedirs(os.path.dirname(path), exist_ok=True)
if not os.path.exists(path):
os.mkfifo(path)
file_object = os.open(path, os.O_RDWR)
self.events[str(index)] = (event_file, file_object)
def create_endpoints(self):
for endpoint_line in self._config.get('device', 'files').splitlines():
chmod, name, default, orig_perm = self.parse_endpoint_line(endpoint_line)
path = self._get_endpoint_path(name)
if name == 'device_serial' and self._serial is not None:
default = self._serial
self.endpoints[name] = (chmod, default, orig_perm)
self.create_endpoint(path, chmod, default)
def get(self, endpoint, binary=False):
"""
Gets a value from a given endpoint
:param endpoint: Endpoint to read from
:type endpoint: str
:param binary: Is binary data being read
:type binary: bool
:return: Result
:rtype: str or bytes
"""
if endpoint not in self.endpoints:
raise ValueError("Endpoint {0} does not exist".format(endpoint))
path = self._get_endpoint_path(endpoint)
if binary:
read_mode = 'rb'
else:
read_mode = 'r'
try:
with open(path, read_mode) as open_endpoint:
result = open_endpoint.read()
except UnicodeDecodeError as e:
return str(e)
return result
def set(self, endpoint, value, binary=False):
if endpoint not in self.endpoints:
raise ValueError("Endpoint {0} does not exist".format(endpoint))
path = self._get_endpoint_path(endpoint)
if binary:
write_mode = 'wb'
else:
write_mode = 'w'
with open(path, write_mode) as open_endpoint:
open_endpoint.write(value)
def emit_kb_event(self, file_id, key_code, value):
if file_id not in self.events:
raise ValueError("file_id {0} does not exist".format(file_id))
if value in KEY_ACTION:
value = KEY_ACTION[value]
else:
value = 0x00
event_binary = struct.pack(EVENT_FORMAT, self._tick, 0, EV_KEY, key_code, value)
pipe_fd = self.events[file_id][1]
os.write(pipe_fd, event_binary)
return len(event_binary)
def close(self):
if os.path.exists(self._tmp_dir):
# Allow deletion
for endpoint in self.endpoints:
path = os.path.join(self._tmp_dir, endpoint)
os.chmod(path, 0o660)
shutil.rmtree(self._tmp_dir)
if __name__ == '__main__':
a = FakeDevice('razertartarus')
print()
a.emit_kb_event('0', 62, 'up')
print()
|
import os
import platform
import socket
import struct
import sys
try:
# Python 2.x
from cStringIO import StringIO # noqa: F401
python3 = 0
except ImportError:
# Python 3.x
from io import BytesIO # noqa: F401
python3 = 1
try:
import urllib.parse as urlparse # noqa: F401
except ImportError:
import urlparse # noqa: F401
# TODO: change this to rosgraph equivalents once we have ported this module
ROS_IP = 'ROS_IP'
ROS_HOSTNAME = 'ROS_HOSTNAME'
SIOCGIFCONF = 0x8912
SIOCGIFADDR = 0x8915
if platform.system() == 'FreeBSD':
SIOCGIFADDR = 0xc0206921
if platform.architecture()[0] == '64bit':
SIOCGIFCONF = 0xc0106924
else:
SIOCGIFCONF = 0xc0086924
if 0:
# disabling netifaces as it accounts for 50% of startup latency
try:
import netifaces
_use_netifaces = True
except Exception:
# NOTE: in rare cases, I've seen Python fail to extract the egg
# cache when launching multiple python nodes. Thus, we do
# except-all instead of except ImportError (kwc).
_use_netifaces = False
else:
_use_netifaces = False
def _is_unix_like_platform():
"""
@return: true if the platform conforms to UNIX/POSIX-style APIs
@rtype: bool
"""
# return platform.system() in ['Linux', 'Mac OS X', 'Darwin']
return platform.system() in ['Linux', 'FreeBSD']
def get_address_override():
"""
@return: ROS_IP/ROS_HOSTNAME override or None
@rtype: str
@raise ValueError: if ROS_IP/ROS_HOSTNAME/__ip/__hostname are invalidly specified
"""
# #998: check for command-line remappings first
for arg in sys.argv:
if arg.startswith('__hostname:=') or arg.startswith('__ip:='):
try:
_, val = arg.split(':=')
return val
except Exception: # split didn't unpack properly
raise ValueError("invalid ROS command-line remapping argument '%s'" % arg)
# check ROS_HOSTNAME and ROS_IP environment variables, which are
# aliases for each other
if ROS_HOSTNAME in os.environ:
return os.environ[ROS_HOSTNAME]
elif ROS_IP in os.environ:
return os.environ[ROS_IP]
return None
def is_local_address(hostname):
"""
@param hostname: host name/address
@type hostname: str
@return True: if hostname maps to a local address, False otherwise. False conditions include invalid hostnames.
"""
try:
reverse_ip = socket.gethostbyname(hostname)
except socket.error:
return False
# 127. check is due to #1260
if reverse_ip not in get_local_addresses() and not reverse_ip.startswith('127.'):
return False
return True
def get_local_address():
"""
@return: default local IP address (e.g. eth0). May be overriden by ROS_IP/ROS_HOSTNAME/__ip/__hostname
@rtype: str
"""
override = get_address_override()
if override:
return override
addrs = get_local_addresses()
if len(addrs) == 1:
return addrs[0]
for addr in addrs:
# pick first non 127/8 address
if not addr.startswith('127.'):
return addr
else: # loopback
return '127.0.0.1'
# cache for performance reasons
_local_addrs = None
def get_local_addresses():
"""
@return: known local addresses. Not affected by ROS_IP/ROS_HOSTNAME
@rtype: [str]
"""
# cache address data as it can be slow to calculate
global _local_addrs
if _local_addrs is not None:
return _local_addrs
local_addrs = None
if _use_netifaces:
# #552: netifaces is a more robust package for looking up
# #addresses on multiple platforms (OS X, Unix, Windows)
local_addrs = []
# see http://alastairs-place.net/netifaces/
for i in netifaces.interfaces():
try:
local_addrs.extend([d['addr'] for d in netifaces.ifaddresses(i)[netifaces.AF_INET]])
except KeyError:
pass
elif _is_unix_like_platform():
# unix-only branch
# adapted from code from Rosen Diankov ([email protected])
# and from ActiveState recipe
import fcntl
import array
ifsize = 32
if platform.system() == 'Linux' and platform.architecture()[0] == '64bit':
ifsize = 40 # untested
# 32 interfaces allowed, far more than ROS can sanely deal with
max_bytes = 32 * ifsize
# according to http://docs.python.org/library/fcntl.html, the buffer limit is 1024 bytes
buff = array.array('B', b'\0' * max_bytes)
# serialize the buffer length and address to ioctl
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
info = fcntl.ioctl(sock.fileno(), SIOCGIFCONF,
struct.pack('iL', max_bytes, buff.buffer_info()[0]))
retbytes = struct.unpack('iL', info)[0]
buffstr = buff.tostring()
if platform.system() == 'Linux':
local_addrs = [socket.inet_ntoa(buffstr[i+20:i+24]) for i in range(0, retbytes, ifsize)]
else:
# in FreeBSD, ifsize is variable: 16 + (16 or 28 or 56) bytes
# When ifsize is 32 bytes, it contains the interface name and address,
# else it contains the interface name and other information
# This means the buffer must be traversed in its entirety
local_addrs = []
bufpos = 0
while bufpos < retbytes:
bufpos += 16
ifreqsize = ord(buffstr[bufpos])
if ifreqsize == 16:
local_addrs += [socket.inet_ntoa(buffstr[bufpos+4:bufpos+8])]
bufpos += ifreqsize
else:
# cross-platform branch, can only resolve one address
local_addrs = [socket.gethostbyname(socket.gethostname())]
_local_addrs = local_addrs
return local_addrs
def get_bind_address(address=None):
"""
@param address: (optional) address to compare against
@type address: str
@return: address TCP/IP sockets should use for binding. This is
generally 0.0.0.0, but if \a address or ROS_IP/ROS_HOSTNAME is set
to localhost it will return 127.0.0.1
@rtype: str
"""
if address is None:
address = get_address_override()
if address and (address == 'localhost' or address.startswith('127.')):
# localhost or 127/8
return '127.0.0.1' # loopback
else:
return '0.0.0.0'
# #528: semi-complicated logic for determining XML-RPC URI
def get_host_name():
"""
Determine host-name for use in host-name-based addressing (e.g. XML-RPC URIs):
- if ROS_IP/ROS_HOSTNAME is set, use that address
- if the hostname returns a non-localhost value, use that
- use whatever L{get_local_address()} returns
"""
hostname = get_address_override()
if not hostname:
try:
hostname = socket.gethostname()
except Exception:
pass
if not hostname or hostname == 'localhost' or hostname.startswith('127.'):
hostname = get_local_address()
return hostname
def create_local_xmlrpc_uri(port):
"""
Determine the XMLRPC URI for local servers. This handles the search
logic of checking ROS environment variables, the known hostname,
and local interface IP addresses to determine the best possible
URI.
@param port: port that server is running on
@type port: int
@return: XMLRPC URI
@rtype: str
"""
# TODO: merge logic in roslib.xmlrpc with this routine
# in the future we may not want to be locked to http protocol nor root path
return 'http://%s:%s/' % (get_host_name(), port)
# handshake utils ###########################################
class ROSHandshakeException(Exception):
"""
Exception to represent errors decoding handshake
"""
pass
def decode_ros_handshake_header(header_str):
"""
Decode serialized ROS handshake header into a Python dictionary
header is a list of string key=value pairs, each prefixed by a
4-byte length field. It is preceeded by a 4-byte length field for
the entire header.
@param header_str: encoded header string. May contain extra data at the end.
@type header_str: str
@return: key value pairs encoded in \a header_str
@rtype: {str: str}
"""
(size, ) = struct.unpack('<I', header_str[0:4])
size += 4 # add in 4 to include size of size field
header_len = len(header_str)
if size > header_len:
raise ROSHandshakeException('Incomplete header. Expected %s bytes but only have %s' % ((size+4), header_len))
d = {}
start = 4
while start < size:
(field_size, ) = struct.unpack('<I', header_str[start:start+4])
if field_size == 0:
raise ROSHandshakeException('Invalid 0-length handshake header field')
start += field_size + 4
if start > size:
raise ROSHandshakeException('Invalid line length in handshake header: %s' % size)
line = header_str[start-field_size:start]
# python3 compatibility
if python3 == 1:
line = line.decode()
idx = line.find('=')
if idx < 0:
raise ROSHandshakeException('Invalid line in handshake header: [%s]' % line)
key = line[:idx]
value = line[idx+1:]
d[key.strip()] = value
return d
def read_ros_handshake_header(sock, b, buff_size):
"""
Read in tcpros header off the socket \a sock using buffer \a b.
@param sock: socket must be in blocking mode
@type sock: socket
@param b: buffer to use
@type b: StringIO for Python2, BytesIO for Python 3
@param buff_size: incoming buffer size to use
@type buff_size: int
@return: key value pairs encoded in handshake
@rtype: {str: str}
@raise ROSHandshakeException: If header format does not match expected
"""
header_str = None
while not header_str:
d = sock.recv(buff_size)
if not d:
raise ROSHandshakeException('connection from sender terminated before handshake header received. %s bytes were received. Please check sender for additional details.' % b.tell())
b.write(d)
btell = b.tell()
if btell > 4:
# most likely we will get the full header in the first recv, so
# not worth tiny optimizations possible here
bval = b.getvalue()
(size,) = struct.unpack('<I', bval[0:4])
if btell - 4 >= size:
header_str = bval
# memmove the remnants of the buffer back to the start
leftovers = bval[size+4:]
b.truncate(len(leftovers))
b.seek(0)
b.write(leftovers)
# process the header
return decode_ros_handshake_header(bval)
def encode_ros_handshake_header(header):
"""
Encode ROS handshake header as a byte string. Each header
field is a string key value pair. The encoded header is
prefixed by a length field, as is each field key/value pair.
key/value pairs a separated by a '=' equals sign.
FORMAT: (4-byte length + [4-byte field length + field=value ]*)
@param header: header field keys/values
@type header: dict
@return: header encoded as byte string
@rtype: str
"""
fields = ['%s=%s' % (k, v) for k, v in header.items()]
# in the usual configuration, the error 'TypeError: can't concat bytes to str' appears:
if python3 == 0:
# python 2
s = ''.join(['%s%s' % (struct.pack('<I', len(f)), f) for f in fields])
return struct.pack('<I', len(s)) + s
else:
# python 3
s = b''.join([(struct.pack('<I', len(f)) + f.encode('utf-8')) for f in fields])
return struct.pack('<I', len(s)) + s
def write_ros_handshake_header(sock, header):
"""
Write ROS handshake header header to socket sock
@param sock: socket to write to (must be in blocking mode)
@type sock: socket.socket
@param header: header field keys/values
@type header: {str : str}
@return: Number of bytes sent (for statistics)
@rtype: int
"""
s = encode_ros_handshake_header(header)
sock.sendall(s)
return len(s) # STATS
|
import asyncio
from homeassistant.components.remote import (
ATTR_ACTIVITY,
ATTR_DELAY_SECS,
DEFAULT_DELAY_SECS,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, CONF_NAME
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .const import DOMAIN, HARMONY_OPTIONS_UPDATE, PLATFORMS
from .remote import HarmonyRemote
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the Logitech Harmony Hub component."""
hass.data.setdefault(DOMAIN, {})
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up Logitech Harmony Hub from a config entry."""
# As there currently is no way to import options from yaml
# when setting up a config entry, we fallback to adding
# the options to the config entry and pull them out here if
# they are missing from the options
_async_import_options_from_data_if_missing(hass, entry)
address = entry.data[CONF_HOST]
name = entry.data[CONF_NAME]
activity = entry.options.get(ATTR_ACTIVITY)
delay_secs = entry.options.get(ATTR_DELAY_SECS, DEFAULT_DELAY_SECS)
harmony_conf_file = hass.config.path(f"harmony_{entry.unique_id}.conf")
try:
device = HarmonyRemote(
name, entry.unique_id, address, activity, harmony_conf_file, delay_secs
)
connected_ok = await device.connect()
except (asyncio.TimeoutError, ValueError, AttributeError) as err:
raise ConfigEntryNotReady from err
if not connected_ok:
raise ConfigEntryNotReady
hass.data[DOMAIN][entry.entry_id] = device
entry.add_update_listener(_update_listener)
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
@callback
def _async_import_options_from_data_if_missing(hass: HomeAssistant, entry: ConfigEntry):
options = dict(entry.options)
modified = 0
for importable_option in [ATTR_ACTIVITY, ATTR_DELAY_SECS]:
if importable_option not in entry.options and importable_option in entry.data:
options[importable_option] = entry.data[importable_option]
modified = 1
if modified:
hass.config_entries.async_update_entry(entry, options=options)
async def _update_listener(hass: HomeAssistant, entry: ConfigEntry):
"""Handle options update."""
async_dispatcher_send(
hass, f"{HARMONY_OPTIONS_UPDATE}-{entry.unique_id}", entry.options
)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
# Shutdown a harmony remote for removal
device = hass.data[DOMAIN][entry.entry_id]
await device.shutdown()
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
|
from homematicip.aio.auth import AsyncAuth
from homematicip.aio.connection import AsyncConnection
from homematicip.aio.home import AsyncHome
from homematicip.base.enums import WeatherCondition, WeatherDayTime
import pytest
from homeassistant import config_entries
from homeassistant.components.homematicip_cloud import (
DOMAIN as HMIPC_DOMAIN,
async_setup as hmip_async_setup,
)
from homeassistant.components.homematicip_cloud.const import (
HMIPC_AUTHTOKEN,
HMIPC_HAPID,
HMIPC_NAME,
HMIPC_PIN,
)
from homeassistant.components.homematicip_cloud.hap import HomematicipHAP
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from .helper import AUTH_TOKEN, HAPID, HAPPIN, HomeFactory
from tests.async_mock import AsyncMock, MagicMock, Mock, patch
from tests.common import MockConfigEntry
from tests.components.light.conftest import mock_light_profiles # noqa
@pytest.fixture(name="mock_connection")
def mock_connection_fixture() -> AsyncConnection:
"""Return a mocked connection."""
connection = MagicMock(spec=AsyncConnection)
def _rest_call_side_effect(path, body=None):
return path, body
connection._restCall.side_effect = ( # pylint: disable=protected-access
_rest_call_side_effect
)
connection.api_call = AsyncMock(return_value=True)
connection.init = AsyncMock(side_effect=True)
return connection
@pytest.fixture(name="hmip_config_entry")
def hmip_config_entry_fixture() -> config_entries.ConfigEntry:
"""Create a mock config entriy for homematic ip cloud."""
entry_data = {
HMIPC_HAPID: HAPID,
HMIPC_AUTHTOKEN: AUTH_TOKEN,
HMIPC_NAME: "",
HMIPC_PIN: HAPPIN,
}
config_entry = MockConfigEntry(
version=1,
domain=HMIPC_DOMAIN,
title=HAPID,
unique_id=HAPID,
data=entry_data,
source=SOURCE_IMPORT,
connection_class=config_entries.CONN_CLASS_CLOUD_PUSH,
system_options={"disable_new_entities": False},
)
return config_entry
@pytest.fixture(name="default_mock_hap_factory")
async def default_mock_hap_factory_fixture(
hass: HomeAssistantType, mock_connection, hmip_config_entry
) -> HomematicipHAP:
"""Create a mocked homematic access point."""
return HomeFactory(hass, mock_connection, hmip_config_entry)
@pytest.fixture(name="hmip_config")
def hmip_config_fixture() -> ConfigType:
"""Create a config for homematic ip cloud."""
entry_data = {
HMIPC_HAPID: HAPID,
HMIPC_AUTHTOKEN: AUTH_TOKEN,
HMIPC_NAME: "",
HMIPC_PIN: HAPPIN,
}
return {HMIPC_DOMAIN: [entry_data]}
@pytest.fixture(name="dummy_config")
def dummy_config_fixture() -> ConfigType:
"""Create a dummy config."""
return {"blabla": None}
@pytest.fixture(name="mock_hap_with_service")
async def mock_hap_with_service_fixture(
hass: HomeAssistantType, default_mock_hap_factory, dummy_config
) -> HomematicipHAP:
"""Create a fake homematic access point with hass services."""
mock_hap = await default_mock_hap_factory.async_get_mock_hap()
await hmip_async_setup(hass, dummy_config)
await hass.async_block_till_done()
hass.data[HMIPC_DOMAIN] = {HAPID: mock_hap}
return mock_hap
@pytest.fixture(name="simple_mock_home")
def simple_mock_home_fixture():
"""Return a simple mocked connection."""
mock_home = Mock(
spec=AsyncHome,
name="Demo",
devices=[],
groups=[],
location=Mock(),
weather=Mock(
temperature=0.0,
weatherCondition=WeatherCondition.UNKNOWN,
weatherDayTime=WeatherDayTime.DAY,
minTemperature=0.0,
maxTemperature=0.0,
humidity=0,
windSpeed=0.0,
windDirection=0,
vaporAmount=0.0,
),
id=42,
dutyCycle=88,
connected=True,
currentAPVersion="2.0.36",
)
with patch(
"homeassistant.components.homematicip_cloud.hap.AsyncHome",
autospec=True,
return_value=mock_home,
):
yield
@pytest.fixture(name="mock_connection_init")
def mock_connection_init_fixture():
"""Return a simple mocked connection."""
with patch(
"homeassistant.components.homematicip_cloud.hap.AsyncHome.init",
return_value=None,
), patch(
"homeassistant.components.homematicip_cloud.hap.AsyncAuth.init",
return_value=None,
):
yield
@pytest.fixture(name="simple_mock_auth")
def simple_mock_auth_fixture() -> AsyncAuth:
"""Return a simple AsyncAuth Mock."""
return Mock(spec=AsyncAuth, pin=HAPPIN, create=True)
|
import numpy as np
import unittest
import chainer
from chainer.testing import attr
from chainercv.links.model.light_head_rcnn.global_context_module \
import GlobalContextModule
def _random_array(xp, shape):
return xp.array(
np.random.uniform(-1, 1, size=shape), dtype=np.float32)
class TestGlobalContextModule(unittest.TestCase):
def setUp(self):
self.in_channels = 4
self.mid_channels = 4
self.out_channels = 4
self.ksize = 7
self.H = 24
self.W = 32
self.global_context_module = GlobalContextModule(
self.in_channels, self.mid_channels,
self.out_channels, self.ksize)
def check_call(self):
xp = self.global_context_module.xp
x = chainer.Variable(
_random_array(xp, (1, self.in_channels, self.H, self.W)))
y = self.global_context_module(x)
self.assertIsInstance(y, chainer.Variable)
self.assertIsInstance(y.array, xp.ndarray)
self.assertEqual(y.shape, (1, self.out_channels, self.H, self.W))
def test_call_cpu(self):
self.check_call()
@attr.gpu
def test_call_gpu(self):
self.global_context_module.to_gpu()
self.check_call()
|
import unittest
from absl import flags
import mock
from perfkitbenchmarker import errors
from perfkitbenchmarker.linux_packages import python
from tests import pkb_common_test_case
FLAGS = flags.FLAGS
# Responses for error returning remote commands
_RESPONSE_BAD = '', '', 1
PYTHON_MISSING = _RESPONSE_BAD
PYTHON_PATH_MISSING = _RESPONSE_BAD
ALTERNATIVES_MISSING = _RESPONSE_BAD
ALTERNATIVES_CALL_BAD = _RESPONSE_BAD
PYTHON_VERSION_CALL_BAD = _RESPONSE_BAD
# Responses for non-error remote commands
_RESPONSE_GOOD = '', '', 0
PYTHON_VERSION_2 = '', 'Python 2.7', 0
ALTERNATIVES_FOUND = _RESPONSE_GOOD
PYTHON_PATH_FOUND = _RESPONSE_GOOD
ALTERNATIVES_CALL_GOOD = _RESPONSE_GOOD
class PythonTest(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(PythonTest, self).setUp()
FLAGS['default_timeout'].parse(0) # due to @retry
def RunSetDefault(self,
responses,
expected_last_call=None):
vm = mock.Mock()
vm.PYTHON_PACKAGE = 'python2'
vm.RemoteCommandWithReturnCode.side_effect = responses
python._SetDefaultPythonIfNeeded(vm, '/usr/bin/python2')
if expected_last_call:
vm.RemoteCommandWithReturnCode.assert_called_with(
expected_last_call, ignore_failure=True)
self.assertLen(vm.RemoteCommandWithReturnCode.call_args_list,
len(responses))
@mock.patch.object(python, '_SetDefaultPythonIfNeeded')
def testYumCall(self, mock_set_default):
vm = mock.Mock()
vm.PYTHON_PACKAGE = 'python3'
python.YumInstall(vm)
mock_set_default.assert_called_with(vm, '/usr/bin/python3')
@mock.patch.object(python, '_SetDefaultPythonIfNeeded')
def testAptCall(self, mock_set_default):
vm = mock.Mock()
python.AptInstall(vm)
mock_set_default.assert_called_with(vm, '/usr/bin/python2')
def testDefaultPythonAlreadySet(self):
responses = [PYTHON_VERSION_2]
expected = 'python --version'
self.RunSetDefault(responses, expected)
def testNoAlternativesProgram(self):
responses = [PYTHON_MISSING, ALTERNATIVES_MISSING]
expected = 'which update-alternatives'
self.RunSetDefault(responses, expected)
def testMissingPythonPath(self):
responses = [PYTHON_MISSING, ALTERNATIVES_FOUND, PYTHON_PATH_MISSING]
expected = 'ls /usr/bin/python2'
self.RunSetDefault(responses, expected)
def testBadAlternativesResponse(self):
responses = [
PYTHON_MISSING, ALTERNATIVES_FOUND, PYTHON_PATH_FOUND,
ALTERNATIVES_CALL_BAD
]
with self.assertRaises(errors.Setup.PythonPackageRequirementUnfulfilled):
self.RunSetDefault(responses)
def testNoPythonVersionAfterSet(self):
responses = [
PYTHON_MISSING, ALTERNATIVES_FOUND, PYTHON_PATH_FOUND,
ALTERNATIVES_CALL_GOOD, PYTHON_VERSION_CALL_BAD
]
with self.assertRaises(errors.Setup.PythonPackageRequirementUnfulfilled):
self.RunSetDefault(responses)
def testPythonVersionAfterSet(self):
responses = [
PYTHON_MISSING, ALTERNATIVES_FOUND, PYTHON_PATH_FOUND,
ALTERNATIVES_CALL_GOOD, PYTHON_VERSION_2
]
expected = 'python --version'
self.RunSetDefault(responses, expected)
if __name__ == '__main__':
unittest.main()
|
from collections import OrderedDict
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import CONF_NAME
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.typing import HomeAssistantType
from .const import CONF_ENDPOINT, DOMAIN
SONGPAL_CONFIG_SCHEMA = vol.Schema(
{vol.Optional(CONF_NAME): cv.string, vol.Required(CONF_ENDPOINT): cv.string}
)
CONFIG_SCHEMA = vol.Schema(
{vol.Optional(DOMAIN): vol.All(cv.ensure_list, [SONGPAL_CONFIG_SCHEMA])},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass: HomeAssistantType, config: OrderedDict) -> bool:
"""Set up songpal environment."""
conf = config.get(DOMAIN)
if conf is None:
return True
for config_entry in conf:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data=config_entry,
),
)
return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool:
"""Set up songpal media player."""
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, "media_player")
)
return True
async def async_unload_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool:
"""Unload songpal media player."""
return await hass.config_entries.async_forward_entry_unload(entry, "media_player")
|
import os
import pytest
QtWebEngineWidgets = pytest.importorskip("PyQt5.QtWebEngineWidgets")
QWebEngineProfile = QtWebEngineWidgets.QWebEngineProfile
from qutebrowser.utils import javascript
DEFAULT_BODY_BG = "rgba(0, 0, 0, 0)"
GREEN_BODY_BG = "rgb(0, 255, 0)"
CSS_BODY_GREEN = "body {background-color: rgb(0, 255, 0);}"
CSS_BODY_RED = "body {background-color: rgb(255, 0, 0);}"
class StylesheetTester:
"""Helper class (for the stylesheet_tester fixture) for asserts.
Attributes:
js: The js_tester fixture.
config_stub: The config stub object.
"""
def __init__(self, js_tester, config_stub):
self.js = js_tester
self.config_stub = config_stub
def init_stylesheet(self, css_file="green.css"):
"""Initialize the stylesheet with a provided css file."""
css_path = os.path.join(os.path.dirname(__file__), css_file)
self.config_stub.val.content.user_stylesheets = css_path
def set_css(self, css):
"""Set document style to `css` via stylesheet.js."""
code = javascript.assemble('stylesheet', 'set_css', css)
self.js.run(code, None)
def check_set(self, value, css_style="background-color",
document_element="document.body"):
"""Check whether the css in ELEMENT is set to VALUE."""
self.js.run("console.log({document});"
"window.getComputedStyle({document}, null)"
".getPropertyValue({prop});".format(
document=document_element,
prop=javascript.to_js(css_style)),
value)
def check_eq(self, one, two, true=True):
"""Check if one and two are equal."""
self.js.run("{} === {};".format(one, two), true)
@pytest.fixture
def stylesheet_tester(js_tester_webengine, config_stub):
"""Helper fixture to test stylesheets."""
ss_tester = StylesheetTester(js_tester_webengine, config_stub)
ss_tester.js.tab.show()
return ss_tester
@pytest.mark.parametrize('page', ['stylesheet/simple.html',
'stylesheet/simple_bg_set_red.html'])
def test_set_delayed(stylesheet_tester, page):
"""Test a delayed invocation of set_css."""
stylesheet_tester.js.load(page)
stylesheet_tester.init_stylesheet("none.css")
stylesheet_tester.set_css("body {background-color: rgb(0, 255, 0);}")
stylesheet_tester.check_set("rgb(0, 255, 0)")
@pytest.mark.parametrize('page', ['stylesheet/simple.html',
'stylesheet/simple_bg_set_red.html'])
def test_set_clear_bg(stylesheet_tester, page):
"""Test setting and clearing the stylesheet."""
stylesheet_tester.js.load('stylesheet/simple.html')
stylesheet_tester.init_stylesheet()
stylesheet_tester.check_set(GREEN_BODY_BG)
stylesheet_tester.set_css("")
stylesheet_tester.check_set(DEFAULT_BODY_BG)
def test_set_xml(stylesheet_tester):
"""Test stylesheet is applied without altering xml files."""
stylesheet_tester.js.load_file('stylesheet/simple.xml')
stylesheet_tester.init_stylesheet()
stylesheet_tester.check_set(GREEN_BODY_BG)
stylesheet_tester.check_eq('"html"', "document.documentElement.nodeName")
def test_set_svg(stylesheet_tester):
"""Test stylesheet is applied for svg files."""
stylesheet_tester.js.load_file('../../../misc/cheatsheet.svg')
stylesheet_tester.init_stylesheet()
stylesheet_tester.check_set(GREEN_BODY_BG,
document_element="document.documentElement")
stylesheet_tester.check_eq('"svg"', "document.documentElement.nodeName")
@pytest.mark.skip(reason="Too flaky, see #3771")
def test_set_error(stylesheet_tester, config_stub):
"""Test stylesheet modifies file not found error pages."""
config_stub.changed.disconnect() # This test is flaky otherwise...
stylesheet_tester.init_stylesheet()
stylesheet_tester.js.tab._init_stylesheet()
stylesheet_tester.js.load_file('non-existent.html', force=True)
stylesheet_tester.check_set(GREEN_BODY_BG)
def test_appendchild(stylesheet_tester):
stylesheet_tester.js.load('stylesheet/simple.html')
stylesheet_tester.init_stylesheet()
js_test_file_path = ('../tests/unit/javascript/stylesheet/'
'test_appendchild.js')
stylesheet_tester.js.run_file(js_test_file_path, {})
|
import os.path as op
from ..base import BaseRaw
from ..utils import _read_segments_file, _file_size
from ..meas_info import create_info
from ...utils import logger, verbose, warn, fill_doc
@fill_doc
def read_raw_eximia(fname, preload=False, verbose=None):
"""Reader for an eXimia EEG file.
Parameters
----------
fname : str
Path to the eXimia data file (.nxe).
%(preload)s
%(verbose)s
Returns
-------
raw : instance of RawEximia
A Raw object containing eXimia data.
See Also
--------
mne.io.Raw : Documentation of attribute and methods.
"""
return RawEximia(fname, preload, verbose)
@fill_doc
class RawEximia(BaseRaw):
"""Raw object from an Eximia EEG file.
Parameters
----------
fname : str
Path to the eXimia data file (.nxe).
%(preload)s
%(verbose)s
See Also
--------
mne.io.Raw : Documentation of attribute and methods.
"""
@verbose
def __init__(self, fname, preload=False, verbose=None):
data_name = op.basename(fname)
logger.info('Loading %s' % data_name)
# Create vhdr and vmrk files so that we can use mne_brain_vision2fiff
n_chan = 64
sfreq = 1450.
# data are multiplexed int16
ch_names = ['GateIn', 'Trig1', 'Trig2', 'EOG']
ch_types = ['stim', 'stim', 'stim', 'eog']
cals = [0.0015259021896696422, 0.0015259021896696422,
0.0015259021896696422, 0.3814755474174106]
ch_names += ('Fp1 Fpz Fp2 AF1 AFz AF2 '
'F7 F3 F1 Fz F2 F4 F8 '
'FT9 FT7 FC5 FC3 FC1 FCz FC2 FC4 FC6 FT8 FT10 '
'T7 C5 C3 C1 Cz C2 C4 C6 T8 '
'TP9 TP7 CP5 CP3 CP1 CPz CP2 CP4 CP6 TP8 TP10 '
'P9 P7 P3 P1 Pz P2 P4 P8 '
'P10 PO3 POz PO4 O1 Oz O2 Iz'.split())
n_eeg = len(ch_names) - len(cals)
cals += [0.07629510948348212] * n_eeg
ch_types += ['eeg'] * n_eeg
assert len(ch_names) == n_chan
info = create_info(ch_names, sfreq, ch_types)
n_bytes = _file_size(fname)
n_samples, extra = divmod(n_bytes, (n_chan * 2))
if extra != 0:
warn('Incorrect number of samples in file (%s), the file is '
'likely truncated' % (n_samples,))
for ch, cal in zip(info['chs'], cals):
ch['cal'] = cal
super(RawEximia, self).__init__(
info, preload=preload, last_samps=(n_samples - 1,),
filenames=[fname], orig_format='short')
def _read_segment_file(self, data, idx, fi, start, stop, cals, mult):
"""Read a chunk of raw data."""
_read_segments_file(
self, data, idx, fi, start, stop, cals, mult, dtype='<i2')
|
from homeassistant.components.emulated_roku.binding import (
ATTR_APP_ID,
ATTR_COMMAND_TYPE,
ATTR_KEY,
ATTR_SOURCE_NAME,
EVENT_ROKU_COMMAND,
ROKU_COMMAND_KEYDOWN,
ROKU_COMMAND_KEYPRESS,
ROKU_COMMAND_KEYUP,
ROKU_COMMAND_LAUNCH,
EmulatedRoku,
)
from tests.async_mock import AsyncMock, Mock, patch
async def test_events_fired_properly(hass):
"""Test that events are fired correctly."""
binding = EmulatedRoku(
hass, "Test Emulated Roku", "1.2.3.4", 8060, None, None, None
)
events = []
roku_event_handler = None
def instantiate(
loop,
handler,
roku_usn,
host_ip,
listen_port,
advertise_ip=None,
advertise_port=None,
bind_multicast=None,
):
nonlocal roku_event_handler
roku_event_handler = handler
return Mock(start=AsyncMock(), close=AsyncMock())
def listener(event):
events.append(event)
with patch(
"homeassistant.components.emulated_roku.binding.EmulatedRokuServer", instantiate
):
hass.bus.async_listen(EVENT_ROKU_COMMAND, listener)
assert await binding.setup() is True
assert roku_event_handler is not None
roku_event_handler.on_keydown("Test Emulated Roku", "A")
roku_event_handler.on_keyup("Test Emulated Roku", "A")
roku_event_handler.on_keypress("Test Emulated Roku", "C")
roku_event_handler.launch("Test Emulated Roku", "1")
await hass.async_block_till_done()
assert len(events) == 4
assert events[0].event_type == EVENT_ROKU_COMMAND
assert events[0].data[ATTR_COMMAND_TYPE] == ROKU_COMMAND_KEYDOWN
assert events[0].data[ATTR_SOURCE_NAME] == "Test Emulated Roku"
assert events[0].data[ATTR_KEY] == "A"
assert events[1].event_type == EVENT_ROKU_COMMAND
assert events[1].data[ATTR_COMMAND_TYPE] == ROKU_COMMAND_KEYUP
assert events[1].data[ATTR_SOURCE_NAME] == "Test Emulated Roku"
assert events[1].data[ATTR_KEY] == "A"
assert events[2].event_type == EVENT_ROKU_COMMAND
assert events[2].data[ATTR_COMMAND_TYPE] == ROKU_COMMAND_KEYPRESS
assert events[2].data[ATTR_SOURCE_NAME] == "Test Emulated Roku"
assert events[2].data[ATTR_KEY] == "C"
assert events[3].event_type == EVENT_ROKU_COMMAND
assert events[3].data[ATTR_COMMAND_TYPE] == ROKU_COMMAND_LAUNCH
assert events[3].data[ATTR_SOURCE_NAME] == "Test Emulated Roku"
assert events[3].data[ATTR_APP_ID] == "1"
|
from pytest import mark
from cerberus import errors
from cerberus.tests import assert_fail, assert_not_has_error, assert_success
@mark.parametrize(
("test_function", "document"),
[
(assert_success, {'field': 5}),
(assert_fail, {'field': -1}),
(assert_fail, {'field': 11}),
],
)
def test_allof(test_function, document):
test_function(
schema={'field': {'allof': [{'type': 'integer'}, {'min': 0}, {'max': 10}]}},
document=document,
)
def test_anyof_fails():
schema = {'field': {'type': 'integer', 'anyof': [{'min': 0}, {'min': 10}]}}
assert_fail(
document={'field': -1},
schema=schema,
error=(('field',), ('field', 'anyof'), errors.ANYOF, ({'min': 0}, {'min': 10})),
child_errors=[
(('field',), ('field', 'anyof', 0, 'min'), errors.MIN_VALUE, 0),
(('field',), ('field', 'anyof', 1, 'min'), errors.MIN_VALUE, 10),
],
)
assert_fail(document={'field': 5.5}, schema=schema)
assert_fail(document={'field': '5.5'}, schema=schema)
assert_fail(
schema={'field': {'anyof': [{'min': 0, 'max': 10}, {'min': 100, 'max': 110}]}},
document={'field': 50},
)
@mark.parametrize(
("schema", "document"),
[
({'field': {'min': 0, 'max': 10}}, {'field': 5}),
(
{'field': {'anyof': [{'min': 0, 'max': 10}, {'min': 100, 'max': 110}]}},
{'field': 105},
),
(
{'field': {'type': 'integer', 'anyof': [{'min': 0}, {'min': 10}]}},
{'field': 10},
),
(
{'field': {'type': 'integer', 'anyof': [{'min': 0}, {'min': 10}]}},
{'field': 5},
),
],
)
def test_anyof_succeeds(schema, document):
assert_success(schema=schema, document=document)
@mark.parametrize(
("test_function", "document"),
[
(assert_success, {'field': -1.5}),
(assert_success, {'field': -1}),
(assert_success, {'field': 11}),
(assert_success, {'field': 11.5}),
(assert_fail, {'field': 5}),
(assert_fail, {'field': 5.5}),
(assert_fail, {'field': '5.5'}),
],
)
def test_anyof_in_allof(test_function, document):
test_function(
schema={
'field': {
'allof': [
{'anyof': [{'type': 'float'}, {'type': 'integer'}]},
{'anyof': [{'min': 10}, {'max': 0}]},
]
}
},
document=document,
)
def test_anyof_in_itemsrules(validator):
# test that a list of schemas can be specified.
valid_parts = (
{
'schema': {
'model number': {'type': ('string',)},
'count': {'type': ('integer',)},
}
},
{'schema': {'serial number': {'type': (str,)}, 'count': {'type': (int,)}}},
)
valid_item = {'type': ('dict', 'string'), 'anyof': valid_parts}
schema = {'parts': {'type': 'list', 'itemsrules': valid_item}}
document = {
'parts': [
{'model number': 'MX-009', 'count': 100},
{'serial number': '898-001'},
'misc',
]
}
# document is valid. each entry in 'parts' matches a type or schema
assert_success(document=document, schema=schema, validator=validator)
document['parts'].append({'product name': "Monitors", 'count': 18})
# document is invalid. 'product name' does not match any valid schemas
assert_fail(document=document, schema=schema, validator=validator)
document['parts'].pop()
# document is valid again
assert_success(document=document, schema=schema, validator=validator)
document['parts'].append({'product name': "Monitors", 'count': 18})
document['parts'].append(10)
# and invalid. numbers are not allowed.
_errors = assert_fail(
document,
schema,
validator=validator,
error=('parts', ('parts', 'itemsrules'), errors.ITEMSRULES, valid_item),
child_errors=[
(('parts', 3), ('parts', 'itemsrules', 'anyof'), errors.ANYOF, valid_parts),
(
('parts', 4),
('parts', 'itemsrules', 'type'),
errors.TYPE,
('dict', 'string'),
),
],
)
assert_not_has_error(
_errors,
('parts', 4),
('parts', 'itemsrules', 'anyof'),
errors.ANYOF,
valid_parts,
)
# tests errors.BasicErrorHandler's tree representation
_errors = validator.errors
assert 'parts' in _errors
assert 3 in _errors['parts'][-1]
assert _errors['parts'][-1][3][0] == "no definitions validate"
scope = _errors['parts'][-1][3][-1]
assert 'anyof definition 0' in scope
assert 'anyof definition 1' in scope
assert scope['anyof definition 0'] == [{"product name": ["unknown field"]}]
assert scope['anyof definition 1'] == [{"product name": ["unknown field"]}]
assert _errors['parts'][-1][4] == ["must be one of these types: ('dict', 'string')"]
@mark.parametrize(
("test_function", "document"),
[
(assert_success, {'field': {'val': 0}}),
(assert_success, {'field': {'val': '0'}}),
(assert_fail, {'field': {'val': 1.1}}),
],
)
def test_anyof_with_semantically_equal_schemas(test_function, document):
test_function(
schema={
'field': {
'anyof': [
{'type': 'dict', 'schema': {'val': {'type': 'integer'}}},
{'type': 'dict', 'schema': {'val': {'type': 'string'}}},
]
}
},
document=document,
)
test_function(
schema={
'field': {
'type': 'dict',
'anyof': [
{'schema': {'val': {'type': 'integer'}}},
{'schema': {'val': {'type': 'string'}}},
],
}
},
document=document,
)
@mark.parametrize(
("test_function", "document"),
[
(assert_success, {'field': -1}),
(assert_fail, {'field': -5}),
(assert_fail, {'field': 1}),
(assert_fail, {'field': 5}),
(assert_fail, {'field': 11}),
(assert_fail, {'field': 15}),
],
)
def test_noneof(test_function, document):
test_function(
schema={
'field': {
'type': 'integer',
'noneof': [{'min': 0}, {'min': 10}, {'allowed': [-5, 5, 15]}],
}
},
document=document,
)
@mark.parametrize(
("test_function", "document"),
[
(assert_success, {'field': -5}),
(assert_success, {'field': 1}),
(assert_fail, {'field': -1}),
(assert_fail, {'field': 5}),
(assert_fail, {'field': 11}),
(assert_fail, {'field': 15}),
],
)
def test_oneof(test_function, document):
test_function(
schema={
'field': {
'type': 'integer',
'oneof': [{'min': 0}, {'min': 10}, {'allowed': [-5, 5, 15]}],
}
},
document=document,
)
def test_schema_is_not_spoiled(validator):
validator.schema = {
'field': {'type': 'integer', 'anyof': [{'min': 0}, {'min': 10}]}
}
assert 'type' not in validator.schema['field']['anyof'][0]
assert 'type' not in validator.schema['field']['anyof'][1]
assert 'allow_unknown' not in validator.schema['field']['anyof'][0]
assert 'allow_unknown' not in validator.schema['field']['anyof'][1]
@mark.parametrize("document", [{'field': 'bar'}, {'field': 23}])
def test_anyof_type(document):
assert_success(
schema={'field': {'anyof_type': ['string', 'integer']}}, document=document
)
@mark.parametrize(
("test_function", "document"),
[
(assert_success, {'oneof_schema': {'digits': 19}}),
(assert_success, {'oneof_schema': {'text': '84'}}),
(assert_fail, {'oneof_schema': {'digits': 19, 'text': '84'}}),
],
)
def test_oneof_schema(test_function, document):
test_function(
schema={
'oneof_schema': {
'type': 'dict',
'oneof_schema': [
{'digits': {'type': 'integer', 'min': 0, 'max': 99}},
{'text': {'type': 'string', 'regex': '^[0-9]{2}$'}},
],
}
},
document=document,
)
@mark.parametrize(
"document", [{'nested_oneof_type': {'foo': 'a'}}, {'nested_oneof_type': {'bar': 3}}]
)
def test_oneof_type_in_valuesrules(document):
assert_success(
schema={
'nested_oneof_type': {'valuesrules': {'oneof_type': ['string', 'integer']}}
},
document=document,
)
def test_oneof_type_in_oneof_schema(validator):
assert_fail(
schema={
'abc': {
'type': 'dict',
'oneof_schema': [
{
'foo': {
'type': 'dict',
'schema': {'bar': {'oneof_type': ['integer', 'float']}},
}
},
{'baz': {'type': 'string'}},
],
}
},
document={'abc': {'foo': {'bar': 'bad'}}},
validator=validator,
)
assert validator.errors == {
'abc': [
'none or more than one rule validate',
{
'oneof definition 0': [
{
'foo': [
{
'bar': [
'none or more than one rule validate',
{
'oneof definition 0': [
"must be one of these types: ('integer',)"
],
'oneof definition 1': [
"must be one of these " "types: ('float',)"
],
},
]
}
]
}
],
'oneof definition 1': [{'foo': ['unknown field']}],
},
]
}
def test_allow_unknown_in_oneof():
# https://github.com/pyeve/cerberus/issues/251
schema = {
'test': {
'oneof': (
{
'type': ('dict',),
'allow_unknown': True,
'schema': {'known': {'type': ('string',)}},
},
{'type': ('dict',), 'schema': {'known': {'type': ('string',)}}},
)
}
}
# check regression and that allow unknown does not cause any different
# than expected behaviour for one-of.
assert_fail(
schema=schema,
document={'test': {'known': 's'}},
error=('test', ('test', 'oneof'), errors.ONEOF, schema['test']['oneof']),
)
# check that allow_unknown is actually applied
assert_success(document={'test': {'known': 's', 'unknown': 'asd'}}, schema=schema)
|
import copy
import contextlib
import functools
from typing import (TYPE_CHECKING, Any, Callable, Dict, Iterator, List, Mapping,
MutableMapping, MutableSequence, Optional, Tuple, cast)
from PyQt5.QtCore import pyqtSignal, QObject, QUrl
from qutebrowser.config import configdata, configexc, configutils
from qutebrowser.utils import utils, log, urlmatch
from qutebrowser.misc import objects
from qutebrowser.keyinput import keyutils
if TYPE_CHECKING:
from qutebrowser.config import configcache, configfiles
from qutebrowser.misc import savemanager
# An easy way to access the config from other code via config.val.foo
val = cast('ConfigContainer', None)
instance = cast('Config', None)
key_instance = cast('KeyConfig', None)
cache = cast('configcache.ConfigCache', None)
# Keeping track of all change filters to validate them later.
change_filters = []
# Sentinel
UNSET = object()
class change_filter: # noqa: N801,N806 pylint: disable=invalid-name
"""Decorator to filter calls based on a config section/option matching.
This could also be a function, but as a class (with a "wrong" name) it's
much cleaner to implement.
Attributes:
_option: An option or prefix to be filtered
_function: Whether a function rather than a method is decorated.
"""
def __init__(self, option: str, function: bool = False) -> None:
"""Save decorator arguments.
Gets called on parse-time with the decorator arguments.
Args:
option: The option to be filtered.
function: Whether a function rather than a method is decorated.
"""
self._option = option
self._function = function
change_filters.append(self)
def validate(self) -> None:
"""Make sure the configured option or prefix exists.
We can't do this in __init__ as configdata isn't ready yet.
"""
if (self._option not in configdata.DATA and
not configdata.is_valid_prefix(self._option)):
raise configexc.NoOptionError(self._option)
def check_match(self, option: Optional[str]) -> bool:
"""Check if the given option matches the filter."""
if option is None:
# Called directly, not from a config change event.
return True
elif option == self._option:
return True
elif option.startswith(self._option + '.'):
# prefix match
return True
else:
return False
def __call__(self, func: Callable) -> Callable:
"""Filter calls to the decorated function.
Gets called when a function should be decorated.
Adds a filter which returns if we're not interested in the change-event
and calls the wrapped function if we are.
We assume the function passed doesn't take any parameters.
Args:
func: The function to be decorated.
Return:
The decorated function.
"""
if self._function:
@functools.wraps(func)
def func_wrapper(option: str = None) -> Any:
"""Call the underlying function."""
if self.check_match(option):
return func()
return None
return func_wrapper
else:
@functools.wraps(func)
def meth_wrapper(wrapper_self: Any, option: str = None) -> Any:
"""Call the underlying function."""
if self.check_match(option):
return func(wrapper_self)
return None
return meth_wrapper
class KeyConfig:
"""Utilities related to keybindings.
Note that the actual values are saved in the config itself, not here.
Attributes:
_config: The Config object to be used.
"""
_ReverseBindings = Dict[str, MutableSequence[str]]
def __init__(self, config: 'Config') -> None:
self._config = config
def _validate(self, key: keyutils.KeySequence, mode: str) -> None:
"""Validate the given key and mode."""
# Catch old usage of this code
assert isinstance(key, keyutils.KeySequence), key
if mode not in configdata.DATA['bindings.default'].default:
raise configexc.KeybindingError("Invalid mode {}!".format(mode))
def get_bindings_for(self, mode: str) -> Dict[keyutils.KeySequence, str]:
"""Get the combined bindings for the given mode."""
bindings = dict(val.bindings.default[mode])
for key, binding in val.bindings.commands[mode].items():
if not binding:
bindings.pop(key, None)
else:
bindings[key] = binding
return bindings
def get_reverse_bindings_for(self, mode: str) -> '_ReverseBindings':
"""Get a dict of commands to a list of bindings for the mode."""
cmd_to_keys: KeyConfig._ReverseBindings = {}
bindings = self.get_bindings_for(mode)
for seq, full_cmd in sorted(bindings.items()):
for cmd in full_cmd.split(';;'):
cmd = cmd.strip()
cmd_to_keys.setdefault(cmd, [])
# Put bindings involving modifiers last
if any(info.modifiers for info in seq):
cmd_to_keys[cmd].append(str(seq))
else:
cmd_to_keys[cmd].insert(0, str(seq))
return cmd_to_keys
def get_command(self,
key: keyutils.KeySequence,
mode: str,
default: bool = False) -> Optional[str]:
"""Get the command for a given key (or None)."""
self._validate(key, mode)
if default:
bindings = dict(val.bindings.default[mode])
else:
bindings = self.get_bindings_for(mode)
return bindings.get(key, None)
def bind(self,
key: keyutils.KeySequence,
command: str, *,
mode: str,
save_yaml: bool = False) -> None:
"""Add a new binding from key to command."""
if not command.strip():
raise configexc.KeybindingError(
"Can't add binding '{}' with empty command in {} "
'mode'.format(key, mode))
self._validate(key, mode)
log.keyboard.vdebug( # type: ignore[attr-defined]
"Adding binding {} -> {} in mode {}.".format(key, command, mode))
bindings = self._config.get_mutable_obj('bindings.commands')
if mode not in bindings:
bindings[mode] = {}
bindings[mode][str(key)] = command
self._config.update_mutables(save_yaml=save_yaml)
def bind_default(self,
key: keyutils.KeySequence, *,
mode: str = 'normal',
save_yaml: bool = False) -> None:
"""Restore a default keybinding."""
self._validate(key, mode)
bindings_commands = self._config.get_mutable_obj('bindings.commands')
try:
del bindings_commands[mode][str(key)]
except KeyError:
raise configexc.KeybindingError(
"Can't find binding '{}' in {} mode".format(key, mode))
self._config.update_mutables(save_yaml=save_yaml)
def unbind(self,
key: keyutils.KeySequence, *,
mode: str = 'normal',
save_yaml: bool = False) -> None:
"""Unbind the given key in the given mode."""
self._validate(key, mode)
bindings_commands = self._config.get_mutable_obj('bindings.commands')
if val.bindings.commands[mode].get(key, None) is not None:
# In custom bindings -> remove it
del bindings_commands[mode][str(key)]
elif key in val.bindings.default[mode]:
# In default bindings -> shadow it with None
if mode not in bindings_commands:
bindings_commands[mode] = {}
bindings_commands[mode][str(key)] = None
else:
raise configexc.KeybindingError(
"Can't find binding '{}' in {} mode".format(key, mode))
self._config.update_mutables(save_yaml=save_yaml)
class Config(QObject):
"""Main config object.
Class attributes:
MUTABLE_TYPES: Types returned from the config which could potentially
be mutated.
Attributes:
_values: A dict mapping setting names to configutils.Values objects.
_mutables: A dictionary of mutable objects to be checked for changes.
_yaml: A YamlConfig object or None.
Signals:
changed: Emitted with the option name when an option changed.
"""
MUTABLE_TYPES = (dict, list)
changed = pyqtSignal(str)
def __init__(self,
yaml_config: 'configfiles.YamlConfig',
parent: QObject = None) -> None:
super().__init__(parent)
self._mutables: MutableMapping[str, Tuple[Any, Any]] = {}
self._yaml = yaml_config
self._init_values()
self.yaml_loaded = False
self.config_py_loaded = False
self.warn_autoconfig = True
def _init_values(self) -> None:
"""Populate the self._values dict."""
self._values: Mapping = {}
for name, opt in configdata.DATA.items():
self._values[name] = configutils.Values(opt)
def __iter__(self) -> Iterator[configutils.Values]:
"""Iterate over configutils.Values items."""
yield from self._values.values()
def init_save_manager(self,
save_manager: 'savemanager.SaveManager') -> None:
"""Make sure the config gets saved properly.
We do this outside of __init__ because the config gets created before
the save_manager exists.
"""
self._yaml.init_save_manager(save_manager)
def _set_value(self,
opt: 'configdata.Option',
value: Any,
pattern: urlmatch.UrlPattern = None,
hide_userconfig: bool = False) -> None:
"""Set the given option to the given value."""
if not isinstance(objects.backend, objects.NoBackend):
if objects.backend not in opt.backends:
raise configexc.BackendError(opt.name, objects.backend,
opt.raw_backends)
opt.typ.to_py(value) # for validation
self._values[opt.name].add(opt.typ.from_obj(value),
pattern, hide_userconfig=hide_userconfig)
self.changed.emit(opt.name)
log.config.debug("Config option changed: {} = {}".format(
opt.name, value))
def _check_yaml(self, opt: 'configdata.Option', save_yaml: bool) -> None:
"""Make sure the given option may be set in autoconfig.yml."""
if save_yaml and opt.no_autoconfig:
raise configexc.NoAutoconfigError(opt.name)
def read_yaml(self) -> None:
"""Read the YAML settings from self._yaml."""
self._yaml.load()
self.yaml_loaded = True
for values in self._yaml:
for scoped in values:
self._set_value(values.opt, scoped.value,
pattern=scoped.pattern)
def get_opt(self, name: str) -> 'configdata.Option':
"""Get a configdata.Option object for the given setting."""
try:
return configdata.DATA[name]
except KeyError:
deleted = name in configdata.MIGRATIONS.deleted
renamed = configdata.MIGRATIONS.renamed.get(name)
exception = configexc.NoOptionError(
name, deleted=deleted, renamed=renamed)
raise exception from None
def ensure_has_opt(self, name: str) -> None:
"""Raise NoOptionError if the given setting does not exist."""
self.get_opt(name)
def get(self,
name: str,
url: QUrl = None, *,
fallback: bool = True) -> Any:
"""Get the given setting converted for Python code.
Args:
fallback: Use the global value if there's no URL-specific one.
"""
opt = self.get_opt(name)
obj = self.get_obj(name, url=url, fallback=fallback)
return opt.typ.to_py(obj)
def _maybe_copy(self, value: Any) -> Any:
"""Copy the value if it could potentially be mutated."""
if isinstance(value, self.MUTABLE_TYPES):
# For mutable objects, create a copy so we don't accidentally
# mutate the config's internal value.
return copy.deepcopy(value)
else:
# Shouldn't be mutable (and thus hashable)
assert value.__hash__ is not None, value
return value
def get_obj(self,
name: str, *,
url: QUrl = None,
fallback: bool = True) -> Any:
"""Get the given setting as object (for YAML/config.py).
Note that the returned values are not watched for mutation.
If a URL is given, return the value which should be used for that URL.
"""
self.ensure_has_opt(name)
value = self._values[name].get_for_url(url, fallback=fallback)
return self._maybe_copy(value)
def get_obj_for_pattern(
self, name: str, *,
pattern: Optional[urlmatch.UrlPattern]
) -> Any:
"""Get the given setting as object (for YAML/config.py).
This gets the overridden value for a given pattern, or
usertypes.UNSET if no such override exists.
"""
self.ensure_has_opt(name)
value = self._values[name].get_for_pattern(pattern, fallback=False)
return self._maybe_copy(value)
def get_mutable_obj(self, name: str, *,
pattern: urlmatch.UrlPattern = None) -> Any:
"""Get an object which can be mutated, e.g. in a config.py.
If a pattern is given, return the value for that pattern.
Note that it's impossible to get a mutable object for a URL as we
wouldn't know what pattern to apply.
"""
self.ensure_has_opt(name)
# If we allow mutation, there is a chance that prior mutations already
# entered the mutable dictionary and thus further copies are unneeded
# until update_mutables() is called
if name in self._mutables:
_copy, obj = self._mutables[name]
return obj
value = self._values[name].get_for_pattern(pattern)
copy_value = self._maybe_copy(value)
# Watch the returned object for changes if it's mutable.
if isinstance(copy_value, self.MUTABLE_TYPES):
self._mutables[name] = (value, copy_value) # old, new
return copy_value
def get_str(self, name: str, *,
pattern: urlmatch.UrlPattern = None) -> str:
"""Get the given setting as string.
If a pattern is given, get the setting for the given pattern or
usertypes.UNSET.
"""
opt = self.get_opt(name)
values = self._values[name]
value = values.get_for_pattern(pattern)
return opt.typ.to_str(value)
def set_obj(self, name: str,
value: Any, *,
pattern: urlmatch.UrlPattern = None,
save_yaml: bool = False,
hide_userconfig: bool = False) -> None:
"""Set the given setting from a YAML/config.py object.
If save_yaml=True is given, store the new value to YAML.
If hide_userconfig=True is given, hide the value from
dump_userconfig().
"""
opt = self.get_opt(name)
self._check_yaml(opt, save_yaml)
self._set_value(opt, value, pattern=pattern,
hide_userconfig=hide_userconfig)
if save_yaml:
self._yaml.set_obj(name, value, pattern=pattern)
def set_str(self, name: str,
value: str, *,
pattern: urlmatch.UrlPattern = None,
save_yaml: bool = False) -> None:
"""Set the given setting from a string.
If save_yaml=True is given, store the new value to YAML.
"""
opt = self.get_opt(name)
self._check_yaml(opt, save_yaml)
converted = opt.typ.from_str(value)
log.config.debug("Setting {} (type {}) to {!r} (converted from {!r})"
.format(name, opt.typ.__class__.__name__, converted,
value))
self._set_value(opt, converted, pattern=pattern)
if save_yaml:
self._yaml.set_obj(name, converted, pattern=pattern)
def unset(self, name: str, *,
save_yaml: bool = False,
pattern: urlmatch.UrlPattern = None) -> None:
"""Set the given setting back to its default."""
opt = self.get_opt(name)
self._check_yaml(opt, save_yaml)
changed = self._values[name].remove(pattern)
if changed:
self.changed.emit(name)
if save_yaml:
self._yaml.unset(name, pattern=pattern)
def clear(self, *, save_yaml: bool = False) -> None:
"""Clear all settings in the config.
If save_yaml=True is given, also remove all customization from the YAML
file.
"""
for name, values in self._values.items():
if values:
values.clear()
self.changed.emit(name)
if save_yaml:
self._yaml.clear()
def update_mutables(self, *, save_yaml: bool = False) -> None:
"""Update mutable settings if they changed.
Every time someone calls get_obj() on a mutable object, we save a
reference to the original object and a copy.
Here, we check all those saved copies for mutations, and if something
mutated, we call set_obj again so we save the new value.
"""
for name, (old_value, new_value) in self._mutables.items():
if old_value != new_value:
log.config.debug("{} was mutated, updating".format(name))
self.set_obj(name, new_value, save_yaml=save_yaml)
self._mutables = {}
def dump_userconfig(self) -> str:
"""Get the part of the config which was changed by the user.
Return:
The changed config part as string.
"""
lines: List[str] = []
for values in sorted(self, key=lambda v: v.opt.name):
lines += values.dump()
if not lines:
return '<Default configuration>'
return '\n'.join(lines)
class ConfigContainer:
"""An object implementing config access via __getattr__.
Attributes:
_config: The Config object.
_prefix: The __getattr__ chain leading up to this object.
_configapi: If given, get values suitable for config.py and
add errors to the given ConfigAPI object.
_pattern: The URL pattern to be used.
"""
def __init__(self, config: Config,
configapi: 'configfiles.ConfigAPI' = None,
prefix: str = '',
pattern: urlmatch.UrlPattern = None) -> None:
self._config = config
self._prefix = prefix
self._configapi = configapi
self._pattern = pattern
if configapi is None and pattern is not None:
raise TypeError("Can't use pattern without configapi!")
def __repr__(self) -> str:
return utils.get_repr(self, constructor=True, config=self._config,
configapi=self._configapi, prefix=self._prefix,
pattern=self._pattern)
@contextlib.contextmanager
def _handle_error(self, action: str, name: str) -> Iterator[None]:
try:
yield
except configexc.Error as e:
if self._configapi is None:
raise
text = "While {} '{}'".format(action, name)
self._configapi.errors.append(configexc.ConfigErrorDesc(text, e))
def __getattr__(self, attr: str) -> Any:
"""Get an option or a new ConfigContainer with the added prefix.
If we get an option which exists, we return the value for it.
If we get a part of an option name, we return a new ConfigContainer.
Those two never overlap as configdata.py ensures there are no shadowing
options.
"""
if attr.startswith('_'):
return self.__getattribute__(attr)
name = self._join(attr)
if configdata.is_valid_prefix(name):
return ConfigContainer(config=self._config,
configapi=self._configapi,
prefix=name, pattern=self._pattern)
with self._handle_error('getting', name):
if self._configapi is None:
# access from Python code
return self._config.get(name)
else:
# access from config.py
return self._config.get_mutable_obj(
name, pattern=self._pattern)
def __setattr__(self, attr: str, value: Any) -> None:
"""Set the given option in the config."""
if attr.startswith('_'):
super().__setattr__(attr, value)
return
name = self._join(attr)
with self._handle_error('setting', name):
self._config.set_obj(name, value, pattern=self._pattern)
def _join(self, attr: str) -> str:
"""Get the prefix joined with the given attribute."""
if self._prefix:
return '{}.{}'.format(self._prefix, attr)
else:
return attr
|
from homeassistant.components.binary_sensor import BinarySensorEntity
from .const import DOMAIN, NEXIA_DEVICE, UPDATE_COORDINATOR
from .entity import NexiaThermostatEntity
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up sensors for a Nexia device."""
nexia_data = hass.data[DOMAIN][config_entry.entry_id]
nexia_home = nexia_data[NEXIA_DEVICE]
coordinator = nexia_data[UPDATE_COORDINATOR]
entities = []
for thermostat_id in nexia_home.get_thermostat_ids():
thermostat = nexia_home.get_thermostat_by_id(thermostat_id)
entities.append(
NexiaBinarySensor(
coordinator, thermostat, "is_blower_active", "Blower Active"
)
)
if thermostat.has_emergency_heat():
entities.append(
NexiaBinarySensor(
coordinator,
thermostat,
"is_emergency_heat_active",
"Emergency Heat Active",
)
)
async_add_entities(entities, True)
class NexiaBinarySensor(NexiaThermostatEntity, BinarySensorEntity):
"""Provices Nexia BinarySensor support."""
def __init__(self, coordinator, thermostat, sensor_call, sensor_name):
"""Initialize the nexia sensor."""
super().__init__(
coordinator,
thermostat,
name=f"{thermostat.get_name()} {sensor_name}",
unique_id=f"{thermostat.thermostat_id}_{sensor_call}",
)
self._call = sensor_call
self._state = None
@property
def is_on(self):
"""Return the status of the sensor."""
return getattr(self._thermostat, self._call)()
|
from typing import Dict, Optional, Set
import voluptuous as vol
from homeassistant.components.cover import (
ATTR_CURRENT_POSITION,
ATTR_CURRENT_TILT_POSITION,
ATTR_POSITION,
ATTR_TILT_POSITION,
DOMAIN,
PLATFORM_SCHEMA,
SERVICE_CLOSE_COVER,
SERVICE_CLOSE_COVER_TILT,
SERVICE_OPEN_COVER,
SERVICE_OPEN_COVER_TILT,
SERVICE_SET_COVER_POSITION,
SERVICE_SET_COVER_TILT_POSITION,
SERVICE_STOP_COVER,
SERVICE_STOP_COVER_TILT,
SUPPORT_CLOSE,
SUPPORT_CLOSE_TILT,
SUPPORT_OPEN,
SUPPORT_OPEN_TILT,
SUPPORT_SET_POSITION,
SUPPORT_SET_TILT_POSITION,
SUPPORT_STOP,
SUPPORT_STOP_TILT,
CoverEntity,
)
from homeassistant.const import (
ATTR_ASSUMED_STATE,
ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES,
CONF_ENTITIES,
CONF_NAME,
STATE_CLOSING,
STATE_OPEN,
STATE_OPENING,
)
from homeassistant.core import CoreState, State
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import async_track_state_change_event
from . import GroupEntity
# mypy: allow-incomplete-defs, allow-untyped-calls, allow-untyped-defs
# mypy: no-check-untyped-defs
KEY_OPEN_CLOSE = "open_close"
KEY_STOP = "stop"
KEY_POSITION = "position"
DEFAULT_NAME = "Cover Group"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Required(CONF_ENTITIES): cv.entities_domain(DOMAIN),
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Group Cover platform."""
async_add_entities([CoverGroup(config[CONF_NAME], config[CONF_ENTITIES])])
class CoverGroup(GroupEntity, CoverEntity):
"""Representation of a CoverGroup."""
def __init__(self, name, entities):
"""Initialize a CoverGroup entity."""
self._name = name
self._is_closed = False
self._is_closing = False
self._is_opening = False
self._cover_position: Optional[int] = 100
self._tilt_position = None
self._supported_features = 0
self._assumed_state = True
self._entities = entities
self._covers: Dict[str, Set[str]] = {
KEY_OPEN_CLOSE: set(),
KEY_STOP: set(),
KEY_POSITION: set(),
}
self._tilts: Dict[str, Set[str]] = {
KEY_OPEN_CLOSE: set(),
KEY_STOP: set(),
KEY_POSITION: set(),
}
async def _update_supported_features_event(self, event):
self.async_set_context(event.context)
await self.async_update_supported_features(
event.data.get("entity_id"), event.data.get("new_state")
)
async def async_update_supported_features(
self,
entity_id: str,
new_state: Optional[State],
update_state: bool = True,
) -> None:
"""Update dictionaries with supported features."""
if not new_state:
for values in self._covers.values():
values.discard(entity_id)
for values in self._tilts.values():
values.discard(entity_id)
if update_state:
await self.async_defer_or_update_ha_state()
return
features = new_state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
if features & (SUPPORT_OPEN | SUPPORT_CLOSE):
self._covers[KEY_OPEN_CLOSE].add(entity_id)
else:
self._covers[KEY_OPEN_CLOSE].discard(entity_id)
if features & (SUPPORT_STOP):
self._covers[KEY_STOP].add(entity_id)
else:
self._covers[KEY_STOP].discard(entity_id)
if features & (SUPPORT_SET_POSITION):
self._covers[KEY_POSITION].add(entity_id)
else:
self._covers[KEY_POSITION].discard(entity_id)
if features & (SUPPORT_OPEN_TILT | SUPPORT_CLOSE_TILT):
self._tilts[KEY_OPEN_CLOSE].add(entity_id)
else:
self._tilts[KEY_OPEN_CLOSE].discard(entity_id)
if features & (SUPPORT_STOP_TILT):
self._tilts[KEY_STOP].add(entity_id)
else:
self._tilts[KEY_STOP].discard(entity_id)
if features & (SUPPORT_SET_TILT_POSITION):
self._tilts[KEY_POSITION].add(entity_id)
else:
self._tilts[KEY_POSITION].discard(entity_id)
if update_state:
await self.async_defer_or_update_ha_state()
async def async_added_to_hass(self):
"""Register listeners."""
for entity_id in self._entities:
new_state = self.hass.states.get(entity_id)
await self.async_update_supported_features(
entity_id, new_state, update_state=False
)
assert self.hass is not None
self.async_on_remove(
async_track_state_change_event(
self.hass, self._entities, self._update_supported_features_event
)
)
if self.hass.state == CoreState.running:
await self.async_update()
return
await super().async_added_to_hass()
@property
def name(self):
"""Return the name of the cover."""
return self._name
@property
def assumed_state(self):
"""Enable buttons even if at end position."""
return self._assumed_state
@property
def supported_features(self):
"""Flag supported features for the cover."""
return self._supported_features
@property
def is_closed(self):
"""Return if all covers in group are closed."""
return self._is_closed
@property
def is_opening(self):
"""Return if the cover is opening or not."""
return self._is_opening
@property
def is_closing(self):
"""Return if the cover is closing or not."""
return self._is_closing
@property
def current_cover_position(self) -> Optional[int]:
"""Return current position for all covers."""
return self._cover_position
@property
def current_cover_tilt_position(self):
"""Return current tilt position for all covers."""
return self._tilt_position
@property
def device_state_attributes(self):
"""Return the state attributes for the cover group."""
return {ATTR_ENTITY_ID: self._entities}
async def async_open_cover(self, **kwargs):
"""Move the covers up."""
data = {ATTR_ENTITY_ID: self._covers[KEY_OPEN_CLOSE]}
await self.hass.services.async_call(
DOMAIN, SERVICE_OPEN_COVER, data, blocking=True, context=self._context
)
async def async_close_cover(self, **kwargs):
"""Move the covers down."""
data = {ATTR_ENTITY_ID: self._covers[KEY_OPEN_CLOSE]}
await self.hass.services.async_call(
DOMAIN, SERVICE_CLOSE_COVER, data, blocking=True, context=self._context
)
async def async_stop_cover(self, **kwargs):
"""Fire the stop action."""
data = {ATTR_ENTITY_ID: self._covers[KEY_STOP]}
await self.hass.services.async_call(
DOMAIN, SERVICE_STOP_COVER, data, blocking=True, context=self._context
)
async def async_set_cover_position(self, **kwargs):
"""Set covers position."""
data = {
ATTR_ENTITY_ID: self._covers[KEY_POSITION],
ATTR_POSITION: kwargs[ATTR_POSITION],
}
await self.hass.services.async_call(
DOMAIN,
SERVICE_SET_COVER_POSITION,
data,
blocking=True,
context=self._context,
)
async def async_open_cover_tilt(self, **kwargs):
"""Tilt covers open."""
data = {ATTR_ENTITY_ID: self._tilts[KEY_OPEN_CLOSE]}
await self.hass.services.async_call(
DOMAIN, SERVICE_OPEN_COVER_TILT, data, blocking=True, context=self._context
)
async def async_close_cover_tilt(self, **kwargs):
"""Tilt covers closed."""
data = {ATTR_ENTITY_ID: self._tilts[KEY_OPEN_CLOSE]}
await self.hass.services.async_call(
DOMAIN, SERVICE_CLOSE_COVER_TILT, data, blocking=True, context=self._context
)
async def async_stop_cover_tilt(self, **kwargs):
"""Stop cover tilt."""
data = {ATTR_ENTITY_ID: self._tilts[KEY_STOP]}
await self.hass.services.async_call(
DOMAIN, SERVICE_STOP_COVER_TILT, data, blocking=True, context=self._context
)
async def async_set_cover_tilt_position(self, **kwargs):
"""Set tilt position."""
data = {
ATTR_ENTITY_ID: self._tilts[KEY_POSITION],
ATTR_TILT_POSITION: kwargs[ATTR_TILT_POSITION],
}
await self.hass.services.async_call(
DOMAIN,
SERVICE_SET_COVER_TILT_POSITION,
data,
blocking=True,
context=self._context,
)
async def async_update(self):
"""Update state and attributes."""
self._assumed_state = False
self._is_closed = True
self._is_closing = False
self._is_opening = False
for entity_id in self._entities:
state = self.hass.states.get(entity_id)
if not state:
continue
if state.state == STATE_OPEN:
self._is_closed = False
break
if state.state == STATE_CLOSING:
self._is_closing = True
break
if state.state == STATE_OPENING:
self._is_opening = True
break
self._cover_position = None
if self._covers[KEY_POSITION]:
position = -1
self._cover_position = 0 if self.is_closed else 100
for entity_id in self._covers[KEY_POSITION]:
state = self.hass.states.get(entity_id)
pos = state.attributes.get(ATTR_CURRENT_POSITION)
if position == -1:
position = pos
elif position != pos:
self._assumed_state = True
break
else:
if position != -1:
self._cover_position = position
self._tilt_position = None
if self._tilts[KEY_POSITION]:
position = -1
self._tilt_position = 100
for entity_id in self._tilts[KEY_POSITION]:
state = self.hass.states.get(entity_id)
pos = state.attributes.get(ATTR_CURRENT_TILT_POSITION)
if position == -1:
position = pos
elif position != pos:
self._assumed_state = True
break
else:
if position != -1:
self._tilt_position = position
supported_features = 0
supported_features |= (
SUPPORT_OPEN | SUPPORT_CLOSE if self._covers[KEY_OPEN_CLOSE] else 0
)
supported_features |= SUPPORT_STOP if self._covers[KEY_STOP] else 0
supported_features |= SUPPORT_SET_POSITION if self._covers[KEY_POSITION] else 0
supported_features |= (
SUPPORT_OPEN_TILT | SUPPORT_CLOSE_TILT if self._tilts[KEY_OPEN_CLOSE] else 0
)
supported_features |= SUPPORT_STOP_TILT if self._tilts[KEY_STOP] else 0
supported_features |= (
SUPPORT_SET_TILT_POSITION if self._tilts[KEY_POSITION] else 0
)
self._supported_features = supported_features
if not self._assumed_state:
for entity_id in self._entities:
state = self.hass.states.get(entity_id)
if state and state.attributes.get(ATTR_ASSUMED_STATE):
self._assumed_state = True
break
|
import collections
import html
import logging
import re
import subprocess
import sys
logger = logging.getLogger(__name__)
Notification = collections.namedtuple('Notification', [
'title', 'subtitle', 'message'
])
class Notifier:
"""Sends a desktop notification.
This base class just discards the notification.
"""
def send(self, notification):
"""Send a notification."""
class DefaultNotifier(Notifier):
"""Notifier that picks the best implementation for the current platform."""
def __init__(self):
# TODO: Make this smarter.
if sys.platform == 'darwin':
self._notifier = AppleNotifier()
else:
self._notifier = DbusNotifier()
def send(self, notification):
self._notifier.send(notification)
class BellNotifier(Notifier):
"""Notifier that rings the terminal bell."""
def send(self, notification):
sys.stdout.write('\a')
class DbusNotifier(Notifier):
"""Notifier that creates a freedesktop.org notification.
The gdbus utility is used to avoid dependency on a DBus library.
If a new notification is created while a previous one is still open, the
previous notification is instantly replaced (see replaces_id).
"""
NOTIFY_CMD = [
'gdbus', 'call', '--session', '--dest',
'org.freedesktop.Notifications', '--object-path',
'/org/freedesktop/Notifications', '--method',
'org.freedesktop.Notifications.Notify', 'hangups', '{replaces_id}', '',
'{summary}', '{body}', '[]', '{{}}', ' -1'
]
RESULT_RE = re.compile(r'\(uint32 ([\d]+),\)')
def __init__(self):
self._replaces_id = 0
def send(self, notification):
output = _run_command(self.NOTIFY_CMD, dict(
summary=self._escape(notification.title),
body=self._escape(notification.message),
replaces_id=self._replaces_id,
))
try:
self._replaces_id = self.RESULT_RE.match(output).groups()[0]
except (AttributeError, IndexError) as e:
logger.info(
'Failed to parse notification command result: %s', e
)
@staticmethod
def _escape(text):
# Escape HTML-style markup:
res = html.escape(text, quote=False)
# Escape other characters that cause issues with how gdbus parses
# gvariants:
res = res.replace('\\', '\\\\')
res = res.replace('"', '\\u0022')
res = res.replace('\'', '\\u0027')
return res
class AppleNotifier(Notifier):
"""Notifier that displays an Apple macOS notification.
The osascript utility is used to display a notification using AppleScript.
"""
NOTIFY_CMD = [
'osascript', '-e',
('display notification "{message}" with '
'title "{title}" '
'subtitle "{subtitle}"'),
]
def send(self, notification):
_run_command(self.NOTIFY_CMD, dict(
title=self._escape(notification.title),
subtitle=self._escape(notification.subtitle),
message=self._escape(notification.message),
))
@staticmethod
def _escape(text):
# Escape double quotes:
return text.replace('"', '\\"')
def _run_command(args, format_values):
cmd = [arg.format(**format_values) for arg in args]
logger.info('Creating notification with command: %s', cmd)
try:
# Intentionally avoid using a shell to avoid a shell injection attack.
return subprocess.check_output(cmd, stderr=subprocess.STDOUT).decode()
except (subprocess.CalledProcessError, FileNotFoundError) as e:
# Only log at INFO level to prevent spam when command isn't available.
logger.info('Notification command failed: %s', e)
return ''
|
import os
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import patch
import amavis
from diamond.collector import Collector
##########################################################################
MOCK_PATH = os.path.join(os.path.dirname(__file__), 'mock-amavisd-agent')
class TestAmavisCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('AmavisCollector', {
'amavisd_exe': MOCK_PATH,
})
self.collector = amavis.AmavisCollector(config, None)
@patch.object(Collector, 'publish')
def test_publish(self, publish_mock):
self.collector.collect()
# a couple of the metrics contained in mock-amavisd-agent
metrics = {
'OutMsgsSizeProtoSMTP.size': 116,
'OutMsgsSizeProtoSMTP.frequency': 0,
'OutMsgsSizeProtoSMTP.percentage': 96.4,
'OutMsgsProtoSMTPRelay.count': 22778,
'OutMsgsProtoSMTPRelay.frequency': 41,
'OutMsgsProtoSMTPRelay.percentage': 71.5,
'TimeElapsedDecoding.time': 652,
'TimeElapsedDecoding.frequency': 0.024,
'virus.byname.Eicar-Test-Signature.count': 4436,
'virus.byname.Eicar-Test-Signature.frequency': 8,
'virus.byname.Eicar-Test-Signature.percentage': 100.0,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
@patch('amavis.subprocess.Popen')
def test_amavisd_agent_command(self, popen_mock, publish_mock):
config = get_collector_config('AmavisCollector', {})
amavis.AmavisCollector(config, None).collect()
popen_mock.assert_called_with(
['/usr/sbin/amavisd-agent', '-c', '1'],
stdout=-1
)
@patch.object(Collector, 'publish')
@patch('amavis.subprocess.Popen')
def test_amavisd_agent_command_with_sudo(self, popen_mock, publish_mock):
config = get_collector_config('AmavisCollector', {
'use_sudo': True,
'sudo_user': 'chosen_sudo_user',
})
amavis.AmavisCollector(config, None).collect()
popen_mock.assert_called_with(
['/usr/bin/sudo', '-u', 'chosen_sudo_user', '--',
'/usr/sbin/amavisd-agent', '-c', '1'],
stdout=-1
)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
import os.path as op
from functools import partial
import numpy as np
from numpy.testing import assert_array_equal, assert_equal
import pytest
import matplotlib
import matplotlib.pyplot as plt
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
from matplotlib.figure import Figure
from matplotlib.patches import Circle
from mne import (read_evokeds, read_proj, make_fixed_length_events, Epochs,
compute_proj_evoked, find_layout, pick_types, create_info,
read_cov)
from mne.io.proj import make_eeg_average_ref_proj, Projection
from mne.io import read_raw_fif, read_info, RawArray
from mne.io.constants import FIFF
from mne.io.pick import pick_info, channel_indices_by_type
from mne.io.compensator import get_current_comp
from mne.channels import read_layout, make_dig_montage
from mne.datasets import testing
from mne.time_frequency.tfr import AverageTFR
from mne.viz import plot_evoked_topomap, plot_projs_topomap, topomap
from mne.viz.topomap import (_get_pos_outlines, _onselect, plot_topomap,
plot_arrowmap, plot_psds_topomap)
from mne.viz.utils import _find_peaks, _fake_click
from mne.utils import requires_sklearn
data_dir = testing.data_path(download=False)
subjects_dir = op.join(data_dir, 'subjects')
ecg_fname = op.join(data_dir, 'MEG', 'sample', 'sample_audvis_ecg-proj.fif')
triux_fname = op.join(data_dir, 'SSS', 'TRIUX', 'triux_bmlhus_erm_raw.fif')
base_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data')
evoked_fname = op.join(base_dir, 'test-ave.fif')
raw_fname = op.join(base_dir, 'test_raw.fif')
event_name = op.join(base_dir, 'test-eve.fif')
ctf_fname = op.join(base_dir, 'test_ctf_comp_raw.fif')
layout = read_layout('Vectorview-all')
cov_fname = op.join(base_dir, 'test-cov.fif')
def test_plot_topomap_interactive():
"""Test interactive topomap projection plotting."""
evoked = read_evokeds(evoked_fname, baseline=(None, 0))[0]
evoked.pick_types(meg='mag')
evoked.info['projs'] = []
assert not evoked.proj
evoked.add_proj(compute_proj_evoked(evoked, n_mag=1))
plt.close('all')
fig = Figure()
canvas = FigureCanvas(fig)
ax = fig.gca()
kwargs = dict(vmin=-240, vmax=240, times=[0.1], colorbar=False, axes=ax,
res=8, time_unit='s')
evoked.copy().plot_topomap(proj=False, **kwargs)
canvas.draw()
image_noproj = np.frombuffer(canvas.tostring_rgb(), dtype='uint8')
assert len(plt.get_fignums()) == 1
ax.clear()
evoked.copy().plot_topomap(proj=True, **kwargs)
canvas.draw()
image_proj = np.frombuffer(canvas.tostring_rgb(), dtype='uint8')
assert not np.array_equal(image_noproj, image_proj)
assert len(plt.get_fignums()) == 1
ax.clear()
evoked.copy().plot_topomap(proj='interactive', **kwargs)
canvas.draw()
image_interactive = np.frombuffer(canvas.tostring_rgb(), dtype='uint8')
assert_array_equal(image_noproj, image_interactive)
assert not np.array_equal(image_proj, image_interactive)
assert len(plt.get_fignums()) == 2
proj_fig = plt.figure(plt.get_fignums()[-1])
_fake_click(proj_fig, proj_fig.axes[0], [0.5, 0.5], xform='data')
canvas.draw()
image_interactive_click = np.frombuffer(
canvas.tostring_rgb(), dtype='uint8')
assert_array_equal(image_proj, image_interactive_click)
assert not np.array_equal(image_noproj, image_interactive_click)
_fake_click(proj_fig, proj_fig.axes[0], [0.5, 0.5], xform='data')
canvas.draw()
image_interactive_click = np.frombuffer(
canvas.tostring_rgb(), dtype='uint8')
assert_array_equal(image_noproj, image_interactive_click)
assert not np.array_equal(image_proj, image_interactive_click)
@testing.requires_testing_data
def test_plot_projs_topomap():
"""Test plot_projs_topomap."""
projs = read_proj(ecg_fname)
info = read_info(raw_fname)
fast_test = {"res": 8, "contours": 0, "sensors": False}
plot_projs_topomap(projs, info=info, colorbar=True, **fast_test)
plt.close('all')
ax = plt.subplot(111)
projs[3].plot_topomap(info)
plot_projs_topomap(projs[:1], info, axes=ax, **fast_test) # test axes
plt.close('all')
triux_info = read_info(triux_fname)
plot_projs_topomap(triux_info['projs'][-1:], triux_info, **fast_test)
plt.close('all')
plot_projs_topomap(triux_info['projs'][:1], triux_info, **fast_test)
plt.close('all')
eeg_avg = make_eeg_average_ref_proj(info)
eeg_avg.plot_topomap(info, **fast_test)
plt.close('all')
# test vlims
for vlim in ('joint', (-1, 1), (None, 0.5), (0.5, None), (None, None)):
plot_projs_topomap(projs[:-1], info, vlim=vlim, colorbar=True)
plt.close('all')
eeg_proj = make_eeg_average_ref_proj(info)
info_meg = pick_info(info, pick_types(info, meg=True, eeg=False))
with pytest.raises(ValueError, match='No channel names in info match p'):
plot_projs_topomap([eeg_proj], info_meg)
def test_plot_topomap_animation(capsys):
"""Test topomap plotting."""
# evoked
evoked = read_evokeds(evoked_fname, 'Left Auditory',
baseline=(None, 0))
# Test animation
_, anim = evoked.animate_topomap(ch_type='grad', times=[0, 0.1],
butterfly=False, time_unit='s',
verbose='debug')
anim._func(1) # _animate has to be tested separately on 'Agg' backend.
out, _ = capsys.readouterr()
assert 'Interpolation mode local to 0' in out
plt.close('all')
def test_plot_topomap_animation_nirs(fnirs_evoked, capsys):
"""Test topomap plotting for nirs data."""
fig, anim = fnirs_evoked.animate_topomap(ch_type='hbo', verbose='debug')
anim._func(1) # _animate has to be tested separately on 'Agg' backend.
out, _ = capsys.readouterr()
assert 'Interpolation mode head to 0' in out
assert len(fig.axes) == 2
plt.close('all')
@pytest.mark.slowtest
def test_plot_topomap_basic(monkeypatch):
"""Test basics of topomap plotting."""
evoked = read_evokeds(evoked_fname, 'Left Auditory',
baseline=(None, 0))
res = 8
fast_test = dict(res=res, contours=0, sensors=False, time_unit='s')
fast_test_noscale = dict(res=res, contours=0, sensors=False)
ev_bad = evoked.copy().pick_types(meg=False, eeg=True)
ev_bad.pick_channels(ev_bad.ch_names[:2])
plt_topomap = partial(ev_bad.plot_topomap, **fast_test)
plt_topomap(times=ev_bad.times[:2] - 1e-6) # auto, plots EEG
pytest.raises(ValueError, plt_topomap, ch_type='mag')
pytest.raises(ValueError, plt_topomap, times=[-100]) # bad time
pytest.raises(ValueError, plt_topomap, times=[[0]]) # bad time
evoked.plot_topomap([0.1], ch_type='eeg', scalings=1, res=res,
contours=[-100, 0, 100], time_unit='ms')
# extrapolation to the edges of the convex hull or the head circle
evoked.plot_topomap([0.1], ch_type='eeg', scalings=1, res=res,
contours=[-100, 0, 100], time_unit='ms',
extrapolate='local')
evoked.plot_topomap([0.1], ch_type='eeg', scalings=1, res=res,
contours=[-100, 0, 100], time_unit='ms',
extrapolate='head')
evoked.plot_topomap([0.1], ch_type='eeg', scalings=1, res=res,
contours=[-100, 0, 100], time_unit='ms',
extrapolate='head', outlines='skirt')
# extrapolation options when < 4 channels:
temp_data = np.random.random(3)
picks = channel_indices_by_type(evoked.info)['mag'][:3]
info_sel = pick_info(evoked.info, picks)
plot_topomap(temp_data, info_sel, extrapolate='local', res=res)
plot_topomap(temp_data, info_sel, extrapolate='head', res=res)
# make sure extrapolation works for 3 channels with border='mean'
# (if extra points are placed incorrectly some of them have only
# other extra points as neighbours and border='mean' fails)
plot_topomap(temp_data, info_sel, extrapolate='local', border='mean',
res=res)
# border=0 and border='mean':
# ---------------------------
ch_pos = np.array(sum(([[0, 0, r], [r, 0, 0], [-r, 0, 0],
[0, -r, 0], [0, r, 0]]
for r in np.linspace(0.2, 1.0, 5)), []))
rng = np.random.RandomState(23)
data = np.full(len(ch_pos), 5) + rng.randn(len(ch_pos))
info = create_info(len(ch_pos), 250, 'eeg')
ch_pos_dict = {name: pos for name, pos in zip(info['ch_names'], ch_pos)}
dig = make_dig_montage(ch_pos_dict, coord_frame='head')
info.set_montage(dig)
# border=0
ax, _ = plot_topomap(data, info, extrapolate='head', border=0, sphere=1)
img_data = ax.get_array().data
assert np.abs(img_data[31, 31] - data[0]) < 0.12
assert np.abs(img_data[0, 0]) < 1.5
# border='mean'
ax, _ = plot_topomap(data, info, extrapolate='head', border='mean',
sphere=1)
img_data = ax.get_array().data
assert np.abs(img_data[31, 31] - data[0]) < 0.12
assert img_data[0, 0] > 5
# error when not numeric or str:
error_msg = 'border must be an instance of numeric or str'
with pytest.raises(TypeError, match=error_msg):
plot_topomap(data, info, extrapolate='head', border=[1, 2, 3])
# error when str is not 'mean':
error_msg = "The only allowed value is 'mean', but got 'fancy' instead."
with pytest.raises(ValueError, match=error_msg):
plot_topomap(data, info, extrapolate='head', border='fancy')
# test channel placement when only 'grad' are picked:
# ---------------------------------------------------
info_grad = evoked.copy().pick('grad').info
n_grads = len(info_grad['ch_names'])
data = np.random.randn(n_grads)
img, _ = plot_topomap(data, info_grad)
# check that channels are scattered around x == 0
pos = img.axes.collections[-1].get_offsets()
prop_channels_on_the_right = (pos[:, 0] > 0).mean()
assert prop_channels_on_the_right < 0.6
# other:
# ------
plt_topomap = partial(evoked.plot_topomap, **fast_test)
plt.close('all')
axes = [plt.subplot(221), plt.subplot(222)]
plt_topomap(axes=axes, colorbar=False)
plt.close('all')
plt_topomap(times=[-0.1, 0.2])
plt.close('all')
evoked_grad = evoked.copy().crop(0, 0).pick_types(meg='grad')
mask = np.zeros((204, 1), bool)
mask[[0, 3, 5, 6]] = True
names = []
def proc_names(x):
names.append(x)
return x[4:]
evoked_grad.plot_topomap(ch_type='grad', times=[0], mask=mask,
show_names=proc_names, **fast_test)
assert_equal(sorted(names),
['MEG 011x', 'MEG 012x', 'MEG 013x', 'MEG 014x'])
mask = np.zeros_like(evoked.data, dtype=bool)
mask[[1, 5], :] = True
plt_topomap(ch_type='mag', outlines=None)
times = [0.1]
plt_topomap(times, ch_type='grad', mask=mask)
plt_topomap(times, ch_type='planar1')
plt_topomap(times, ch_type='planar2')
plt_topomap(times, ch_type='grad', mask=mask, show_names=True,
mask_params={'marker': 'x'})
plt.close('all')
with pytest.raises(ValueError, match='number of seconds; got -'):
plt_topomap(times, ch_type='eeg', average=-1e3)
with pytest.raises(TypeError, match='number of seconds; got type'):
plt_topomap(times, ch_type='eeg', average='x')
p = plt_topomap(times, ch_type='grad', image_interp='bilinear',
show_names=lambda x: x.replace('MEG', ''))
subplot = [x for x in p.get_children() if 'Subplot' in str(type(x))]
assert len(subplot) >= 1, [type(x) for x in p.get_children()]
subplot = subplot[0]
have_all = all('MEG' not in x.get_text()
for x in subplot.get_children()
if isinstance(x, matplotlib.text.Text))
assert have_all
# Plot array
for ch_type in ('mag', 'grad'):
evoked_ = evoked.copy().pick_types(eeg=False, meg=ch_type)
plot_topomap(evoked_.data[:, 0], evoked_.info, **fast_test_noscale)
# fail with multiple channel types
pytest.raises(ValueError, plot_topomap, evoked.data[0, :], evoked.info)
# Test title
def get_texts(p):
return [x.get_text() for x in p.get_children() if
isinstance(x, matplotlib.text.Text)]
p = plt_topomap(times, ch_type='eeg', average=0.01)
assert_equal(len(get_texts(p)), 0)
p = plt_topomap(times, ch_type='eeg', title='Custom')
texts = get_texts(p)
assert_equal(len(texts), 1)
assert_equal(texts[0], 'Custom')
plt.close('all')
# delaunay triangulation warning
plt_topomap(times, ch_type='mag')
# projs have already been applied
pytest.raises(RuntimeError, plot_evoked_topomap, evoked, 0.1, 'mag',
proj='interactive', time_unit='s')
# change to no-proj mode
evoked = read_evokeds(evoked_fname, 'Left Auditory',
baseline=(None, 0), proj=False)
fig1 = evoked.plot_topomap('interactive', 'mag', proj='interactive',
**fast_test)
_fake_click(fig1, fig1.axes[1], (0.5, 0.5)) # click slider
data_max = np.max(fig1.axes[0].images[0]._A)
fig2 = plt.gcf()
_fake_click(fig2, fig2.axes[0], (0.075, 0.775)) # toggle projector
# make sure projector gets toggled
assert (np.max(fig1.axes[0].images[0]._A) != data_max)
with monkeypatch.context() as m: # speed it up by not actually plotting
m.setattr(topomap, '_plot_topomap',
lambda *args, **kwargs: (None, None, None))
with pytest.warns(RuntimeWarning, match='More than 25 topomaps plots'):
plot_evoked_topomap(evoked, [0.1] * 26, colorbar=False)
pytest.raises(ValueError, plot_evoked_topomap, evoked, [-3e12, 15e6],
time_unit='s')
for ch in evoked.info['chs']:
if ch['coil_type'] == FIFF.FIFFV_COIL_EEG:
ch['loc'].fill(0)
# Remove extra digitization point, so EEG digitization points
# correspond with the EEG electrodes
del evoked.info['dig'][85]
# Plot skirt
evoked.plot_topomap(times, ch_type='eeg', outlines='skirt', **fast_test)
# Pass custom outlines without patch
eeg_picks = pick_types(evoked.info, meg=False, eeg=True)
pos, outlines = _get_pos_outlines(evoked.info, eeg_picks, 0.1)
evoked.plot_topomap(times, ch_type='eeg', outlines=outlines, **fast_test)
plt.close('all')
# Test interactive cmap
fig = plot_evoked_topomap(evoked, times=[0., 0.1], ch_type='eeg',
cmap=('Reds', True), title='title', **fast_test)
fig.canvas.key_press_event('up')
fig.canvas.key_press_event(' ')
fig.canvas.key_press_event('down')
cbar = fig.get_axes()[0].CB # Fake dragging with mouse.
ax = cbar.cbar.ax
_fake_click(fig, ax, (0.1, 0.1))
_fake_click(fig, ax, (0.1, 0.2), kind='motion')
_fake_click(fig, ax, (0.1, 0.3), kind='release')
_fake_click(fig, ax, (0.1, 0.1), button=3)
_fake_click(fig, ax, (0.1, 0.2), button=3, kind='motion')
_fake_click(fig, ax, (0.1, 0.3), kind='release')
fig.canvas.scroll_event(0.5, 0.5, -0.5) # scroll down
fig.canvas.scroll_event(0.5, 0.5, 0.5) # scroll up
plt.close('all')
# Pass custom outlines with patch callable
def patch():
return Circle((0.5, 0.4687), radius=.46,
clip_on=True, transform=plt.gca().transAxes)
outlines['patch'] = patch
plot_evoked_topomap(evoked, times, ch_type='eeg', outlines=outlines,
**fast_test)
# Remove digitization points. Now topomap should fail
evoked.info['dig'] = None
pytest.raises(RuntimeError, plot_evoked_topomap, evoked,
times, ch_type='eeg', time_unit='s')
plt.close('all')
# Error for missing names
n_channels = len(pos)
data = np.ones(n_channels)
pytest.raises(ValueError, plot_topomap, data, pos, show_names=True)
# Test error messages for invalid pos parameter
pos_1d = np.zeros(n_channels)
pos_3d = np.zeros((n_channels, 2, 2))
pytest.raises(ValueError, plot_topomap, data, pos_1d)
pytest.raises(ValueError, plot_topomap, data, pos_3d)
pytest.raises(ValueError, plot_topomap, data, pos[:3, :])
pos_x = pos[:, :1]
pos_xyz = np.c_[pos, np.zeros(n_channels)[:, np.newaxis]]
pytest.raises(ValueError, plot_topomap, data, pos_x)
pytest.raises(ValueError, plot_topomap, data, pos_xyz)
# An #channels x 4 matrix should work though. In this case (x, y, width,
# height) is assumed.
pos_xywh = np.c_[pos, np.zeros((n_channels, 2))]
plot_topomap(data, pos_xywh)
plt.close('all')
# Test peak finder
axes = [plt.subplot(131), plt.subplot(132)]
evoked.plot_topomap(times='peaks', axes=axes, **fast_test)
plt.close('all')
evoked.data = np.zeros(evoked.data.shape)
evoked.data[50][1] = 1
assert_array_equal(_find_peaks(evoked, 10), evoked.times[1])
evoked.data[80][100] = 1
assert_array_equal(_find_peaks(evoked, 10), evoked.times[[1, 100]])
evoked.data[2][95] = 2
assert_array_equal(_find_peaks(evoked, 10), evoked.times[[1, 95]])
assert_array_equal(_find_peaks(evoked, 1), evoked.times[95])
# Test excluding bads channels
evoked_grad.info['bads'] += [evoked_grad.info['ch_names'][0]]
orig_bads = evoked_grad.info['bads']
evoked_grad.plot_topomap(ch_type='grad', times=[0], time_unit='ms')
assert_array_equal(evoked_grad.info['bads'], orig_bads)
plt.close('all')
def test_plot_tfr_topomap():
"""Test plotting of TFR data."""
raw = read_raw_fif(raw_fname)
times = np.linspace(-0.1, 0.1, 200)
res = 8
n_freqs = 3
nave = 1
rng = np.random.RandomState(42)
picks = [93, 94, 96, 97, 21, 22, 24, 25, 129, 130, 315, 316, 2, 5, 8, 11]
info = pick_info(raw.info, picks)
data = rng.randn(len(picks), n_freqs, len(times))
tfr = AverageTFR(info, data, times, np.arange(n_freqs), nave)
tfr.plot_topomap(ch_type='mag', tmin=0.05, tmax=0.150, fmin=0, fmax=10,
res=res, contours=0)
eclick = matplotlib.backend_bases.MouseEvent(
'button_press_event', plt.gcf().canvas, 0, 0, 1)
eclick.xdata = eclick.ydata = 0.1
eclick.inaxes = plt.gca()
erelease = matplotlib.backend_bases.MouseEvent(
'button_release_event', plt.gcf().canvas, 0.9, 0.9, 1)
erelease.xdata = 0.3
erelease.ydata = 0.2
pos = np.array([[0.11, 0.11], [0.25, 0.5], [0.0, 0.2], [0.2, 0.39]])
_onselect(eclick, erelease, tfr, pos, 'grad', 1, 3, 1, 3, 'RdBu_r', list())
_onselect(eclick, erelease, tfr, pos, 'mag', 1, 3, 1, 3, 'RdBu_r', list())
eclick.xdata = eclick.ydata = 0.
erelease.xdata = erelease.ydata = 0.9
tfr._onselect(eclick, erelease, None, 'mean', None)
plt.close('all')
# test plot_psds_topomap
info = raw.info.copy()
chan_inds = channel_indices_by_type(info)
info = pick_info(info, chan_inds['grad'][:4])
fig, axes = plt.subplots()
freqs = np.arange(3., 9.5)
bands = [(4, 8, 'Theta')]
psd = np.random.rand(len(info['ch_names']), freqs.shape[0])
plot_psds_topomap(psd, freqs, info, bands=bands, axes=[axes])
def test_ctf_plotting():
"""Test CTF topomap plotting."""
raw = read_raw_fif(ctf_fname, preload=True)
assert raw.compensation_grade == 3
events = make_fixed_length_events(raw, duration=0.01)
assert len(events) > 10
evoked = Epochs(raw, events, tmin=0, tmax=0.01, baseline=None).average()
assert get_current_comp(evoked.info) == 3
# smoke test that compensation does not matter
evoked.plot_topomap(time_unit='s')
# better test that topomaps can still be used without plotting ref
evoked.pick_types(meg=True, ref_meg=False)
evoked.plot_topomap()
@pytest.mark.slowtest # can be slow on OSX
@testing.requires_testing_data
def test_plot_arrowmap():
"""Test arrowmap plotting."""
evoked = read_evokeds(evoked_fname, 'Left Auditory',
baseline=(None, 0))
with pytest.raises(ValueError, match='Multiple channel types'):
plot_arrowmap(evoked.data[:, 0], evoked.info)
evoked_eeg = evoked.copy().pick_types(meg=False, eeg=True)
with pytest.raises(ValueError, match='Multiple channel types'):
plot_arrowmap(evoked_eeg.data[:, 0], evoked.info)
evoked_mag = evoked.copy().pick_types(meg='mag')
evoked_grad = evoked.copy().pick_types(meg='grad')
plot_arrowmap(evoked_mag.data[:, 0], evoked_mag.info)
plot_arrowmap(evoked_grad.data[:, 0], evoked_grad.info,
info_to=evoked_mag.info)
@testing.requires_testing_data
def test_plot_topomap_neuromag122():
"""Test topomap plotting."""
res = 8
fast_test = dict(res=res, contours=0, sensors=False)
evoked = read_evokeds(evoked_fname, 'Left Auditory',
baseline=(None, 0))
evoked.pick_types(meg='grad')
evoked.pick_channels(evoked.ch_names[:122])
ch_names = ['MEG %03d' % k for k in range(1, 123)]
for c in evoked.info['chs']:
c['coil_type'] = FIFF.FIFFV_COIL_NM_122
evoked.rename_channels({c_old: c_new for (c_old, c_new) in
zip(evoked.ch_names, ch_names)})
layout = find_layout(evoked.info)
assert layout.kind.startswith('Neuromag_122')
evoked.plot_topomap(times=[0.1], **fast_test)
proj = Projection(active=False,
desc="test", kind=1,
data=dict(nrow=1, ncol=122,
row_names=None,
col_names=evoked.ch_names, data=np.ones(122)),
explained_var=0.5)
plot_projs_topomap([proj], evoked.info, **fast_test)
def test_plot_topomap_bads():
"""Test plotting topomap with bad channels (gh-7213)."""
import matplotlib.pyplot as plt
data = np.random.RandomState(0).randn(3, 1000)
raw = RawArray(data, create_info(3, 1000., 'eeg'))
ch_pos_dict = {name: pos for name, pos in zip(raw.ch_names, np.eye(3))}
raw.info.set_montage(make_dig_montage(ch_pos_dict, coord_frame='head'))
for count in range(3):
raw.info['bads'] = raw.ch_names[:count]
raw.info._check_consistency()
plot_topomap(data[:, 0], raw.info)
plt.close('all')
def test_plot_topomap_nirs_overlap(fnirs_epochs):
"""Test plotting nirs topomap with overlapping channels (gh-7414)."""
fig = fnirs_epochs['A'].average(picks='hbo').plot_topomap()
assert len(fig.axes) == 5
plt.close('all')
@requires_sklearn
def test_plot_topomap_nirs_ica(fnirs_epochs):
"""Test plotting nirs ica topomap."""
from mne.preprocessing import ICA
fnirs_epochs = fnirs_epochs.load_data().pick(picks='hbo')
fnirs_epochs = fnirs_epochs.pick(picks=range(30))
ica = ICA().fit(fnirs_epochs)
fig = ica.plot_components()
assert len(fig[0].axes) == 20
plt.close('all')
def test_plot_cov_topomap():
"""Test plotting a covariance topomap."""
cov = read_cov(cov_fname)
info = read_info(evoked_fname)
cov.plot_topomap(info)
cov.plot_topomap(info, noise_cov=cov)
plt.close('all')
|
from pylatex import Document, LongTable, MultiColumn
def genenerate_longtabu():
geometry_options = {
"margin": "2.54cm",
"includeheadfoot": True
}
doc = Document(page_numbers=True, geometry_options=geometry_options)
# Generate data table
with doc.create(LongTable("l l l")) as data_table:
data_table.add_hline()
data_table.add_row(["header 1", "header 2", "header 3"])
data_table.add_hline()
data_table.end_table_header()
data_table.add_hline()
data_table.add_row((MultiColumn(3, align='r',
data='Continued on Next Page'),))
data_table.add_hline()
data_table.end_table_footer()
data_table.add_hline()
data_table.add_row((MultiColumn(3, align='r',
data='Not Continued on Next Page'),))
data_table.add_hline()
data_table.end_table_last_footer()
row = ["Content1", "9", "Longer String"]
for i in range(150):
data_table.add_row(row)
doc.generate_pdf("longtable", clean_tex=False)
genenerate_longtabu()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl._enum_module import enum
from absl.flags import _argument_parser
from absl.testing import absltest
from absl.testing import parameterized
import six
class ArgumentParserTest(absltest.TestCase):
def test_instance_cache(self):
parser1 = _argument_parser.FloatParser()
parser2 = _argument_parser.FloatParser()
self.assertIs(parser1, parser2)
def test_parse_wrong_type(self):
parser = _argument_parser.ArgumentParser()
with self.assertRaises(TypeError):
parser.parse(0)
if bytes is not str:
# In PY3, it does not accept bytes.
with self.assertRaises(TypeError):
parser.parse(b'')
class BooleanParserTest(absltest.TestCase):
def setUp(self):
self.parser = _argument_parser.BooleanParser()
def test_parse_bytes(self):
if six.PY2:
self.assertTrue(self.parser.parse(b'true'))
else:
with self.assertRaises(TypeError):
self.parser.parse(b'true')
def test_parse_str(self):
self.assertTrue(self.parser.parse('true'))
def test_parse_unicode(self):
self.assertTrue(self.parser.parse(u'true'))
def test_parse_wrong_type(self):
with self.assertRaises(TypeError):
self.parser.parse(1.234)
def test_parse_str_false(self):
self.assertFalse(self.parser.parse('false'))
def test_parse_integer(self):
self.assertTrue(self.parser.parse(1))
def test_parse_invalid_integer(self):
with self.assertRaises(ValueError):
self.parser.parse(-1)
def test_parse_invalid_str(self):
with self.assertRaises(ValueError):
self.parser.parse('nottrue')
class FloatParserTest(absltest.TestCase):
def setUp(self):
self.parser = _argument_parser.FloatParser()
def test_parse_string(self):
self.assertEqual(1.5, self.parser.parse('1.5'))
def test_parse_wrong_type(self):
with self.assertRaises(TypeError):
self.parser.parse(False)
class IntegerParserTest(absltest.TestCase):
def setUp(self):
self.parser = _argument_parser.IntegerParser()
def test_parse_string(self):
self.assertEqual(1, self.parser.parse('1'))
def test_parse_wrong_type(self):
with self.assertRaises(TypeError):
self.parser.parse(1e2)
with self.assertRaises(TypeError):
self.parser.parse(False)
class EnumParserTest(absltest.TestCase):
def test_empty_values(self):
with self.assertRaises(ValueError):
_argument_parser.EnumParser([])
def test_parse(self):
parser = _argument_parser.EnumParser(['apple', 'banana'])
self.assertEqual('apple', parser.parse('apple'))
def test_parse_not_found(self):
parser = _argument_parser.EnumParser(['apple', 'banana'])
with self.assertRaises(ValueError):
parser.parse('orange')
class Fruit(enum.Enum):
APPLE = 1
BANANA = 2
class EmptyEnum(enum.Enum):
pass
class MixedCaseEnum(enum.Enum):
APPLE = 1
BANANA = 2
apple = 3
class EnumClassParserTest(parameterized.TestCase):
def test_requires_enum(self):
with self.assertRaises(TypeError):
_argument_parser.EnumClassParser(['apple', 'banana'])
def test_requires_non_empty_enum_class(self):
with self.assertRaises(ValueError):
_argument_parser.EnumClassParser(EmptyEnum)
def test_case_sensitive_rejects_duplicates(self):
unused_normal_parser = _argument_parser.EnumClassParser(MixedCaseEnum)
with self.assertRaisesRegex(ValueError, 'Duplicate.+apple'):
_argument_parser.EnumClassParser(MixedCaseEnum, case_sensitive=False)
def test_parse_string(self):
parser = _argument_parser.EnumClassParser(Fruit)
self.assertEqual(Fruit.APPLE, parser.parse('APPLE'))
def test_parse_string_case_sensitive(self):
parser = _argument_parser.EnumClassParser(Fruit)
with self.assertRaises(ValueError):
parser.parse('apple')
@parameterized.parameters('APPLE', 'apple', 'Apple')
def test_parse_string_case_insensitive(self, value):
parser = _argument_parser.EnumClassParser(Fruit, case_sensitive=False)
self.assertIs(Fruit.APPLE, parser.parse(value))
def test_parse_literal(self):
parser = _argument_parser.EnumClassParser(Fruit)
self.assertEqual(Fruit.APPLE, parser.parse(Fruit.APPLE))
def test_parse_not_found(self):
parser = _argument_parser.EnumClassParser(Fruit)
with self.assertRaises(ValueError):
parser.parse('ORANGE')
@parameterized.parameters((Fruit.BANANA, False, 'BANANA'),
(Fruit.BANANA, True, 'banana'))
def test_serialize_parse(self, value, lowercase, expected):
serializer = _argument_parser.EnumClassSerializer(lowercase=lowercase)
parser = _argument_parser.EnumClassParser(
Fruit, case_sensitive=not lowercase)
serialized = serializer.serialize(value)
self.assertEqual(serialized, expected)
self.assertEqual(value, parser.parse(expected))
class HelperFunctionsTest(absltest.TestCase):
def test_is_integer_type(self):
self.assertTrue(_argument_parser._is_integer_type(1))
# Note that isinstance(False, int) == True.
self.assertFalse(_argument_parser._is_integer_type(False))
if __name__ == '__main__':
absltest.main()
|
from somfy_mylink_synergy import SomfyMyLinkSynergy
import voluptuous as vol
from homeassistant.const import CONF_HOST, CONF_PORT
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.discovery import async_load_platform
CONF_ENTITY_CONFIG = "entity_config"
CONF_SYSTEM_ID = "system_id"
CONF_REVERSE = "reverse"
CONF_DEFAULT_REVERSE = "default_reverse"
DATA_SOMFY_MYLINK = "somfy_mylink_data"
DOMAIN = "somfy_mylink"
SOMFY_MYLINK_COMPONENTS = ["cover"]
def validate_entity_config(values):
"""Validate config entry for CONF_ENTITY."""
entity_config_schema = vol.Schema({vol.Optional(CONF_REVERSE): cv.boolean})
if not isinstance(values, dict):
raise vol.Invalid("expected a dictionary")
entities = {}
for entity_id, config in values.items():
entity = cv.entity_id(entity_id)
config = entity_config_schema(config)
entities[entity] = config
return entities
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_SYSTEM_ID): cv.string,
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PORT, default=44100): cv.port,
vol.Optional(CONF_DEFAULT_REVERSE, default=False): cv.boolean,
vol.Optional(CONF_ENTITY_CONFIG, default={}): validate_entity_config,
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Set up the MyLink platform."""
host = config[DOMAIN][CONF_HOST]
port = config[DOMAIN][CONF_PORT]
system_id = config[DOMAIN][CONF_SYSTEM_ID]
entity_config = config[DOMAIN][CONF_ENTITY_CONFIG]
entity_config[CONF_DEFAULT_REVERSE] = config[DOMAIN][CONF_DEFAULT_REVERSE]
somfy_mylink = SomfyMyLinkSynergy(system_id, host, port)
hass.data[DATA_SOMFY_MYLINK] = somfy_mylink
for component in SOMFY_MYLINK_COMPONENTS:
hass.async_create_task(
async_load_platform(hass, component, DOMAIN, entity_config, config)
)
return True
|
import logging
import pyombi
import voluptuous as vol
from homeassistant.const import (
CONF_API_KEY,
CONF_HOST,
CONF_PASSWORD,
CONF_PORT,
CONF_SSL,
CONF_USERNAME,
)
import homeassistant.helpers.config_validation as cv
from .const import (
ATTR_NAME,
ATTR_SEASON,
CONF_URLBASE,
DEFAULT_PORT,
DEFAULT_SEASON,
DEFAULT_SSL,
DEFAULT_URLBASE,
DOMAIN,
SERVICE_MOVIE_REQUEST,
SERVICE_MUSIC_REQUEST,
SERVICE_TV_REQUEST,
)
_LOGGER = logging.getLogger(__name__)
def urlbase(value) -> str:
"""Validate and transform urlbase."""
if value is None:
raise vol.Invalid("string value is None")
value = str(value).strip("/")
if not value:
return value
return f"{value}/"
SUBMIT_MOVIE_REQUEST_SERVICE_SCHEMA = vol.Schema({vol.Required(ATTR_NAME): cv.string})
SUBMIT_MUSIC_REQUEST_SERVICE_SCHEMA = vol.Schema({vol.Required(ATTR_NAME): cv.string})
SUBMIT_TV_REQUEST_SERVICE_SCHEMA = vol.Schema(
{
vol.Required(ATTR_NAME): cv.string,
vol.Optional(ATTR_SEASON, default=DEFAULT_SEASON): vol.In(
["first", "latest", "all"]
),
}
)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Exclusive(CONF_API_KEY, "auth"): cv.string,
vol.Exclusive(CONF_PASSWORD, "auth"): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_URLBASE, default=DEFAULT_URLBASE): urlbase,
vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean,
},
cv.has_at_least_one_key("auth"),
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Set up the Ombi component platform."""
ombi = pyombi.Ombi(
ssl=config[DOMAIN][CONF_SSL],
host=config[DOMAIN][CONF_HOST],
port=config[DOMAIN][CONF_PORT],
urlbase=config[DOMAIN][CONF_URLBASE],
username=config[DOMAIN][CONF_USERNAME],
password=config[DOMAIN].get(CONF_PASSWORD),
api_key=config[DOMAIN].get(CONF_API_KEY),
)
try:
ombi.authenticate()
ombi.test_connection()
except pyombi.OmbiError as err:
_LOGGER.warning("Unable to setup Ombi: %s", err)
return False
hass.data[DOMAIN] = {"instance": ombi}
def submit_movie_request(call):
"""Submit request for movie."""
name = call.data[ATTR_NAME]
movies = ombi.search_movie(name)
if movies:
movie = movies[0]
ombi.request_movie(movie["theMovieDbId"])
else:
raise Warning("No movie found.")
def submit_tv_request(call):
"""Submit request for TV show."""
name = call.data[ATTR_NAME]
tv_shows = ombi.search_tv(name)
if tv_shows:
season = call.data[ATTR_SEASON]
show = tv_shows[0]["id"]
if season == "first":
ombi.request_tv(show, request_first=True)
elif season == "latest":
ombi.request_tv(show, request_latest=True)
elif season == "all":
ombi.request_tv(show, request_all=True)
else:
raise Warning("No TV show found.")
def submit_music_request(call):
"""Submit request for music album."""
name = call.data[ATTR_NAME]
music = ombi.search_music_album(name)
if music:
ombi.request_music(music[0]["foreignAlbumId"])
else:
raise Warning("No music album found.")
hass.services.register(
DOMAIN,
SERVICE_MOVIE_REQUEST,
submit_movie_request,
schema=SUBMIT_MOVIE_REQUEST_SERVICE_SCHEMA,
)
hass.services.register(
DOMAIN,
SERVICE_MUSIC_REQUEST,
submit_music_request,
schema=SUBMIT_MUSIC_REQUEST_SERVICE_SCHEMA,
)
hass.services.register(
DOMAIN,
SERVICE_TV_REQUEST,
submit_tv_request,
schema=SUBMIT_TV_REQUEST_SERVICE_SCHEMA,
)
hass.helpers.discovery.load_platform("sensor", DOMAIN, {}, config)
return True
|
import copy
from datetime import timedelta
import json
from hatasmota.utils import (
get_topic_stat_status,
get_topic_stat_switch,
get_topic_tele_sensor,
get_topic_tele_will,
)
from homeassistant.components import binary_sensor
from homeassistant.components.tasmota.const import DEFAULT_PREFIX
from homeassistant.const import (
ATTR_ASSUMED_STATE,
EVENT_STATE_CHANGED,
STATE_OFF,
STATE_ON,
)
import homeassistant.core as ha
import homeassistant.util.dt as dt_util
from .test_common import (
DEFAULT_CONFIG,
help_test_availability,
help_test_availability_discovery_update,
help_test_availability_poll_state,
help_test_availability_when_connection_lost,
help_test_discovery_device_remove,
help_test_discovery_removal,
help_test_discovery_update_unchanged,
help_test_entity_id_update_discovery_update,
help_test_entity_id_update_subscriptions,
)
from tests.async_mock import patch
from tests.common import async_fire_mqtt_message, async_fire_time_changed
async def test_controlling_state_via_mqtt(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["swc"][0] = 1
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_OFF
assert not state.attributes.get(ATTR_ASSUMED_STATE)
# Test normal state update
async_fire_mqtt_message(hass, "tasmota_49A3BC/stat/SWITCH1", '{"STATE":"ON"}')
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "tasmota_49A3BC/stat/SWITCH1", '{"STATE":"OFF"}')
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_OFF
# Test periodic state update
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/SENSOR", '{"Switch1":"ON"}')
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/SENSOR", '{"Switch1":"OFF"}')
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_OFF
# Test polled state update
async_fire_mqtt_message(
hass, "tasmota_49A3BC/stat/STATUS8", '{"StatusSNS":{"Switch1":"ON"}}'
)
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_ON
async_fire_mqtt_message(
hass, "tasmota_49A3BC/stat/STATUS8", '{"StatusSNS":{"Switch1":"OFF"}}'
)
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_OFF
async def test_friendly_names(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 1
config["swc"][0] = 1
config["swc"][1] = 1
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.tasmota_binary_sensor_1")
assert state.state == "unavailable"
assert state.attributes.get("friendly_name") == "Tasmota binary_sensor 1"
state = hass.states.get("binary_sensor.beer")
assert state.state == "unavailable"
assert state.attributes.get("friendly_name") == "Beer"
async def test_off_delay(hass, mqtt_mock, setup_tasmota):
"""Test off_delay option."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["swc"][0] = 13 # PUSHON: 1s off_delay
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
events = []
@ha.callback
def callback(event):
"""Verify event got called."""
events.append(event.data["new_state"].state)
hass.bus.async_listen(EVENT_STATE_CHANGED, callback)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
await hass.async_block_till_done()
assert events == ["off"]
async_fire_mqtt_message(hass, "tasmota_49A3BC/stat/SWITCH1", '{"STATE":"ON"}')
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_ON
assert events == ["off", "on"]
async_fire_mqtt_message(hass, "tasmota_49A3BC/stat/SWITCH1", '{"STATE":"ON"}')
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_ON
assert events == ["off", "on", "on"]
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=1))
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_OFF
assert events == ["off", "on", "on", "off"]
async def test_availability_when_connection_lost(
hass, mqtt_client_mock, mqtt_mock, setup_tasmota
):
"""Test availability after MQTT disconnection."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["swc"][0] = 1
await help_test_availability_when_connection_lost(
hass, mqtt_client_mock, mqtt_mock, binary_sensor.DOMAIN, config
)
async def test_availability(hass, mqtt_mock, setup_tasmota):
"""Test availability."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["swc"][0] = 1
await help_test_availability(hass, mqtt_mock, binary_sensor.DOMAIN, config)
async def test_availability_discovery_update(hass, mqtt_mock, setup_tasmota):
"""Test availability discovery update."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["swc"][0] = 1
await help_test_availability_discovery_update(
hass, mqtt_mock, binary_sensor.DOMAIN, config
)
async def test_availability_poll_state(
hass, mqtt_client_mock, mqtt_mock, setup_tasmota
):
"""Test polling after MQTT connection (re)established."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["swc"][0] = 1
poll_topic = "tasmota_49A3BC/cmnd/STATUS"
await help_test_availability_poll_state(
hass, mqtt_client_mock, mqtt_mock, binary_sensor.DOMAIN, config, poll_topic, "8"
)
async def test_discovery_removal_binary_sensor(hass, mqtt_mock, caplog, setup_tasmota):
"""Test removal of discovered binary_sensor."""
config1 = copy.deepcopy(DEFAULT_CONFIG)
config2 = copy.deepcopy(DEFAULT_CONFIG)
config1["swc"][0] = 1
config2["swc"][0] = 0
await help_test_discovery_removal(
hass, mqtt_mock, caplog, binary_sensor.DOMAIN, config1, config2
)
async def test_discovery_update_unchanged_binary_sensor(
hass, mqtt_mock, caplog, setup_tasmota
):
"""Test update of discovered binary_sensor."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["swc"][0] = 1
with patch(
"homeassistant.components.tasmota.binary_sensor.TasmotaBinarySensor.discovery_update"
) as discovery_update:
await help_test_discovery_update_unchanged(
hass, mqtt_mock, caplog, binary_sensor.DOMAIN, config, discovery_update
)
async def test_discovery_device_remove(hass, mqtt_mock, setup_tasmota):
"""Test device registry remove."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["swc"][0] = 1
unique_id = f"{DEFAULT_CONFIG['mac']}_binary_sensor_switch_0"
await help_test_discovery_device_remove(
hass, mqtt_mock, binary_sensor.DOMAIN, unique_id, config
)
async def test_entity_id_update_subscriptions(hass, mqtt_mock, setup_tasmota):
"""Test MQTT subscriptions are managed when entity_id is updated."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["swc"][0] = 1
topics = [
get_topic_stat_switch(config, 0),
get_topic_tele_sensor(config),
get_topic_stat_status(config, 8),
get_topic_tele_will(config),
]
await help_test_entity_id_update_subscriptions(
hass, mqtt_mock, binary_sensor.DOMAIN, config, topics
)
async def test_entity_id_update_discovery_update(hass, mqtt_mock, setup_tasmota):
"""Test MQTT discovery update when entity_id is updated."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["swc"][0] = 1
await help_test_entity_id_update_discovery_update(
hass, mqtt_mock, binary_sensor.DOMAIN, config
)
|
import pytest
from mne.io.constants import FIFF
from mne.io.write import start_file, write_int
def test_write_int(tmpdir):
"""Test that write_int raises an error on bad values."""
with start_file(tmpdir.join('temp.fif')) as fid:
write_int(fid, FIFF.FIFF_MNE_EVENT_LIST, [2147483647]) # 2 ** 31 - 1
write_int(fid, FIFF.FIFF_MNE_EVENT_LIST, []) # 2 ** 31 - 1
with pytest.raises(TypeError, match=r'.*exceeds max.*EVENT_LIST\)'):
write_int(fid, FIFF.FIFF_MNE_EVENT_LIST, [2147483648]) # 2 ** 31
with pytest.raises(TypeError, match='Cannot safely write'):
write_int(fid, FIFF.FIFF_MNE_EVENT_LIST, [0.]) # float
|
import json
import os
import numpy as np
import pytest
from queue import Queue
from yandextank.common.util import get_test_path
from conftest import MAX_TS, random_split
from yandextank.aggregator import TankAggregator
from yandextank.aggregator.aggregator import Aggregator, DataPoller
from yandextank.aggregator.chopper import TimeChopper
from yandextank.plugins.Phantom.reader import string_to_df
from netort.data_processing import Drain
AGGR_CONFIG = TankAggregator.load_config()
class TestPipeline(object):
def test_partially_reversed_data(self, data):
results_queue = Queue()
chunks = list(random_split(data))
chunks[5], chunks[6] = chunks[6], chunks[5]
pipeline = Aggregator(
TimeChopper(
DataPoller(
source=chunks, poll_period=0.1), cache_size=3),
AGGR_CONFIG,
False)
drain = Drain(pipeline, results_queue)
drain.run()
assert results_queue.qsize() == MAX_TS
def test_slow_producer(self, data):
results_queue = Queue()
chunks = list(random_split(data))
chunks[5], chunks[6] = chunks[6], chunks[5]
def producer():
for chunk in chunks:
if np.random.random() > 0.5:
yield None
yield chunk
pipeline = Aggregator(
TimeChopper(
DataPoller(
source=producer(), poll_period=0.1), cache_size=3),
AGGR_CONFIG,
False)
drain = Drain(pipeline, results_queue)
drain.run()
assert results_queue.qsize() == MAX_TS
@pytest.mark.parametrize('phout, expected_results', [
('yandextank/aggregator/tests/phout2927', 'yandextank/aggregator/tests/phout2927res.jsonl')
])
def test_invalid_ammo(self, phout, expected_results):
with open(os.path.join(get_test_path(), phout)) as fp:
reader = [string_to_df(line) for line in fp.readlines()]
pipeline = Aggregator(
TimeChopper(
DataPoller(source=reader, poll_period=0),
cache_size=3),
AGGR_CONFIG,
True)
with open(os.path.join(get_test_path(), expected_results)) as fp:
expected_results_parsed = json.load(fp)
for item, expected_result in zip(pipeline, expected_results_parsed):
for key, expected_value in expected_result.items():
assert item[key] == expected_value
|
from __future__ import division
import numpy as np
import os
from six.moves.urllib import request
import unittest
from chainer import testing
from chainercv.evaluations import calc_detection_voc_ap
from chainercv.evaluations import calc_detection_voc_prec_rec
from chainercv.evaluations import eval_detection_voc
@testing.parameterize(*(
testing.product_dict(
[{
'pred_bboxes': [
[[0, 0, 1, 1], [0, 0, 2, 2], [0.3, 0.3, 0.5, 0.5]],
],
'pred_labels': [
[0, 0, 0],
],
'pred_scores': [
[0.8, 0.9, 1],
],
'gt_bboxes': [
[[0, 0, 1, 0.9]],
],
'gt_labels': [
[0],
],
}],
[
{
'iou_thresh': 0.5,
'prec': [
[0, 0, 1 / 3],
],
'rec': [
[0, 0, 1],
],
},
{
'iou_thresh': 0.97,
'prec': [
[0, 0, 0],
],
'rec': [
[0, 0, 0],
],
},
]
) +
[
{
'pred_bboxes': [
[[0, 4, 1, 5], [0, 0, 1, 1]],
[[0, 0, 2, 2], [2, 2, 3, 3], [5, 5, 7, 7]],
],
'pred_labels': [
[0, 0],
[0, 2, 2],
],
'pred_scores': [
[1, 0.9],
[0.7, 0.6, 0.8],
],
'gt_bboxes': [
[[0, 0, 1, 1], [1, 0, 4, 4]],
[[2, 2, 3, 3]],
],
'gt_labels': [
[0, 0],
[2],
],
'iou_thresh': 0.4,
'prec': [
[0, 0.5, 1 / 3],
None,
[0, 0.5],
],
'rec': [
[0, 0.5, 0.5],
None,
[0, 1],
],
},
{
'pred_bboxes': [
[[0, 0, 1, 1], [0, 0, 2, 2], [0.3, 0.3, 0.5, 0.5]],
],
'pred_labels': [
[0, 0, 0],
],
'pred_scores': [
[0.8, 0.9, 1],
],
'gt_bboxes': [
[[0, 0, 1, 0.9], [1., 1., 2., 2.]],
],
'gt_labels': [
[0, 0],
],
'gt_difficults': [
[False, True],
],
'iou_thresh': 0.5,
'prec': [
[0, 0, 1 / 3],
],
'rec': [
[0, 0, 1],
],
},
{
'pred_bboxes': [
[[0, 4, 1, 5], [0, 0, 1, 1]],
[[0, 0, 2, 2], [2, 2, 3, 3], [2, 2, 3, 3]],
],
'pred_labels': [
[0, 0],
[0, 2, 2],
],
'pred_scores': [
[1, 0.9],
[0.7, 0.6, 0.8],
],
'gt_bboxes': [
[[0, 0, 1, 1], [1, 0, 4, 4]],
[[2, 2, 3, 3]],
],
'gt_labels': [
[0, 0],
[2],
],
'gt_difficults': [
[False, False],
[True],
],
'iou_thresh': 0.4,
'prec': [
[0, 0.5, 1 / 3],
None,
[np.nan, np.nan],
],
'rec': [
[0, 0.5, 0.5],
None,
None,
],
},
]
))
class TestCalcDetectionVOCPrecRec(unittest.TestCase):
def setUp(self):
self.pred_bboxes = (np.array(bbox) for bbox in self.pred_bboxes)
self.pred_labels = (np.array(label) for label in self.pred_labels)
self.pred_scores = (np.array(score) for score in self.pred_scores)
self.gt_bboxes = (np.array(bbox) for bbox in self.gt_bboxes)
self.gt_labels = (np.array(label) for label in self.gt_labels)
if hasattr(self, 'gt_difficults'):
self.gt_difficults = (
np.array(difficult) for difficult in self.gt_difficults)
else:
self.gt_difficults = None
def test_calc_detection_voc_prec_rec(self):
prec, rec = calc_detection_voc_prec_rec(
self.pred_bboxes, self.pred_labels, self.pred_scores,
self.gt_bboxes, self.gt_labels, self.gt_difficults,
iou_thresh=self.iou_thresh)
self.assertEqual(len(prec), len(self.prec))
for prec_l, expected_prec_l in zip(prec, self.prec):
if prec_l is None and expected_prec_l is None:
continue
np.testing.assert_equal(prec_l, expected_prec_l)
self.assertEqual(len(rec), len(self.rec))
for rec_l, expected_rec_l in zip(rec, self.rec):
if rec_l is None and expected_rec_l is None:
continue
np.testing.assert_equal(rec_l, expected_rec_l)
@testing.parameterize(
{'use_07_metric': False,
'ap': [0.25, np.nan, 0.5]},
{'use_07_metric': True,
'ap': [0.5 / 11 * 6, np.nan, 0.5]},
)
class TestCalcDetectionVOCAP(unittest.TestCase):
prec = [[0, 0.5, 1 / 3], None, [np.nan, 0.5]]
rec = [[0, 0.5, 0.5], None, [0, 1]]
def setUp(self):
self.prec = [
np.array(prec_l) if prec_l is not None else None
for prec_l in self.prec]
self.rec = [
np.array(rec_l) if rec_l is not None else None
for rec_l in self.rec]
def test_calc_detection_voc_ap(self):
ap = calc_detection_voc_ap(
self.prec, self.rec, use_07_metric=self.use_07_metric)
np.testing.assert_almost_equal(ap, self.ap)
class TestEvalDetectionVOCAP(unittest.TestCase):
@classmethod
def setUpClass(cls):
base_url = 'https://chainercv-models.preferred.jp/tests'
cls.dataset = np.load(request.urlretrieve(os.path.join(
base_url, 'eval_detection_voc_dataset_2017_06_06.npz'))[0],
allow_pickle=True)
cls.result = np.load(request.urlretrieve(os.path.join(
base_url, 'eval_detection_voc_result_2017_06_06.npz'))[0],
allow_pickle=True)
def test_eval_detection_voc(self):
pred_bboxes = self.result['bboxes']
pred_labels = self.result['labels']
pred_scores = self.result['scores']
gt_bboxes = self.dataset['bboxes']
gt_labels = self.dataset['labels']
gt_difficults = self.dataset['difficults']
result = eval_detection_voc(
pred_bboxes, pred_labels, pred_scores,
gt_bboxes, gt_labels, gt_difficults,
use_07_metric=True)
# these scores were calculated by MATLAB code
expected = [
0.772727,
0.738780,
0.957576,
0.640153,
0.579473,
1.000000,
0.970030,
1.000000,
0.705931,
0.678719,
0.863636,
1.000000,
1.000000,
0.561364,
0.798813,
0.712121,
0.939394,
0.563636,
0.927273,
0.654545,
]
np.testing.assert_almost_equal(result['ap'], expected, decimal=5)
np.testing.assert_almost_equal(
result['map'], np.nanmean(expected), decimal=5)
testing.run_module(__name__, __file__)
|
import os.path as op
from collections import namedtuple
import re
import numpy as np
from datetime import datetime, timezone
from ..base import BaseRaw
from ..meas_info import create_info
from ..tag import _coil_trans_to_loc
from ..utils import _read_segments_file, _mult_cal_one
from ..constants import FIFF
from ..ctf.trans import _quaternion_align
from ...surface import _normal_orth
from ...transforms import (apply_trans, Transform, get_ras_to_neuromag_trans,
combine_transforms, invert_transform,
_angle_between_quats, rot_to_quat)
from ...utils import (check_fname, check_version, logger, verbose, warn,
_check_fname)
from ...annotations import Annotations
FILE_EXTENSIONS = {
"Curry 7": {
"info": ".dap",
"data": ".dat",
"labels": ".rs3",
"events": ".cef",
"hpi": ".hpi",
},
"Curry 8": {
"info": ".cdt.dpa",
"data": ".cdt",
"labels": ".cdt.dpa",
"events": ".cdt.cef",
"hpi": ".cdt.hpi",
}
}
CHANTYPES = {"meg": "_MAG1", "eeg": "", "misc": "_OTHERS"}
FIFFV_CHANTYPES = {"meg": FIFF.FIFFV_MEG_CH, "eeg": FIFF.FIFFV_EEG_CH,
"misc": FIFF.FIFFV_MISC_CH}
FIFFV_COILTYPES = {"meg": FIFF.FIFFV_COIL_CTF_GRAD, "eeg": FIFF.FIFFV_COIL_EEG,
"misc": FIFF.FIFFV_COIL_NONE}
SI_UNITS = dict(V=FIFF.FIFF_UNIT_V, T=FIFF.FIFF_UNIT_T)
SI_UNIT_SCALE = dict(c=1e-2, m=1e-3, u=1e-6, µ=1e-6, n=1e-9, p=1e-12, f=1e-15)
CurryParameters = namedtuple('CurryParameters',
'n_samples, sfreq, is_ascii, unit_dict, '
'n_chans, dt_start, chanidx_in_file')
def _get_curry_version(file_extension):
"""Check out the curry file version."""
return "Curry 8" if "cdt" in file_extension else "Curry 7"
def _get_curry_file_structure(fname, required=()):
"""Store paths to a dict and check for required files."""
_msg = "The following required files cannot be found: {0}.\nPlease make " \
"sure all required files are located in the same directory as {1}."
_check_fname(fname, overwrite='read', must_exist=True)
# we don't use os.path.splitext to also handle extensions like .cdt.dpa
fname_base, ext = fname.split(".", maxsplit=1)
version = _get_curry_version(ext)
my_curry = dict()
for key in ('info', 'data', 'labels', 'events', 'hpi'):
fname = fname_base + FILE_EXTENSIONS[version][key]
if op.isfile(fname):
my_curry[key] = fname
missing = [field for field in required if field not in my_curry]
if missing:
raise FileNotFoundError(_msg.format(np.unique(missing), fname))
return my_curry
def _read_curry_lines(fname, regex_list):
"""Read through the lines of a curry parameter files and save data.
Parameters
----------
fname : str
Path to a curry file.
regex_list : list of str
A list of strings or regular expressions to search within the file.
Each element `regex` in `regex_list` must be formulated so that
`regex + " START_LIST"` initiates the start and `regex + " END_LIST"`
initiates the end of the elements that should be saved.
Returns
-------
data_dict : dict
A dictionary containing the extracted data. For each element `regex`
in `regex_list` a dictionary key `data_dict[regex]` is created, which
contains a list of the according data.
"""
save_lines = {}
data_dict = {}
for regex in regex_list:
save_lines[regex] = False
data_dict[regex] = []
with open(fname) as fid:
for line in fid:
for regex in regex_list:
if re.match(regex + " END_LIST", line):
save_lines[regex] = False
if save_lines[regex] and line != "\n":
result = line.replace("\n", "")
if "\t" in result:
result = result.split("\t")
data_dict[regex].append(result)
if re.match(regex + " START_LIST", line):
save_lines[regex] = True
return data_dict
def _read_curry_parameters(fname):
"""Extract Curry params from a Curry info file."""
_msg_match = "The sampling frequency and the time steps extracted from " \
"the parameter file do not match."
_msg_invalid = "sfreq must be greater than 0. Got sfreq = {0}"
var_names = ['NumSamples', 'SampleFreqHz',
'DataFormat', 'SampleTimeUsec',
'NumChannels',
'StartYear', 'StartMonth', 'StartDay', 'StartHour',
'StartMin', 'StartSec', 'StartMillisec',
'NUM_SAMPLES', 'SAMPLE_FREQ_HZ',
'DATA_FORMAT', 'SAMPLE_TIME_USEC',
'NUM_CHANNELS',
'START_YEAR', 'START_MONTH', 'START_DAY', 'START_HOUR',
'START_MIN', 'START_SEC', 'START_MILLISEC']
param_dict = dict()
unit_dict = dict()
with open(fname) as fid:
for line in iter(fid):
if any(var_name in line for var_name in var_names):
key, val = line.replace(" ", "").replace("\n", "").split("=")
param_dict[key.lower().replace("_", "")] = val
for type in CHANTYPES:
if "DEVICE_PARAMETERS" + CHANTYPES[type] + " START" in line:
data_unit = next(fid)
unit_dict[type] = data_unit.replace(" ", "") \
.replace("\n", "").split("=")[-1]
# look for CHAN_IN_FILE sections, which may or may not exist; issue #8391
types = ["meg", "eeg", "misc"]
chanidx_in_file = _read_curry_lines(fname,
["CHAN_IN_FILE" +
CHANTYPES[key] for key in types])
n_samples = int(param_dict["numsamples"])
sfreq = float(param_dict["samplefreqhz"])
time_step = float(param_dict["sampletimeusec"]) * 1e-6
is_ascii = param_dict["dataformat"] == "ASCII"
n_channels = int(param_dict["numchannels"])
try:
dt_start = datetime(int(param_dict["startyear"]),
int(param_dict["startmonth"]),
int(param_dict["startday"]),
int(param_dict["starthour"]),
int(param_dict["startmin"]),
int(param_dict["startsec"]),
int(param_dict["startmillisec"]) * 1000,
timezone.utc)
# Note that the time zone information is not stored in the Curry info
# file, and it seems the start time info is in the local timezone
# of the acquisition system (which is unknown); therefore, just set
# the timezone to be UTC. If the user knows otherwise, they can
# change it later. (Some Curry files might include StartOffsetUTCMin,
# but its presence is unpredictable, so we won't rely on it.)
except (ValueError, KeyError):
dt_start = None # if missing keywords or illegal values, don't set
if time_step == 0:
true_sfreq = sfreq
elif sfreq == 0:
true_sfreq = 1 / time_step
elif not np.isclose(sfreq, 1 / time_step):
raise ValueError(_msg_match)
else: # they're equal and != 0
true_sfreq = sfreq
if true_sfreq <= 0:
raise ValueError(_msg_invalid.format(true_sfreq))
return CurryParameters(n_samples, true_sfreq, is_ascii, unit_dict,
n_channels, dt_start, chanidx_in_file)
def _read_curry_info(curry_paths):
"""Extract info from curry parameter files."""
curry_params = _read_curry_parameters(curry_paths['info'])
R = np.eye(4)
R[[0, 1], [0, 1]] = -1 # rotate 180 deg
# shift down and back
# (chosen by eyeballing to make the CTF helmet look roughly correct)
R[:3, 3] = [0., -0.015, -0.12]
curry_dev_dev_t = Transform('ctf_meg', 'meg', R)
# read labels from label files
label_fname = curry_paths['labels']
types = ["meg", "eeg", "misc"]
labels = _read_curry_lines(label_fname,
["LABELS" + CHANTYPES[key] for key in types])
sensors = _read_curry_lines(label_fname,
["SENSORS" + CHANTYPES[key] for key in types])
normals = _read_curry_lines(label_fname,
['NORMALS' + CHANTYPES[key] for key in types])
assert len(labels) == len(sensors) == len(normals)
all_chans = list()
for key in ["meg", "eeg", "misc"]:
chanidx_is_explicit = (len(curry_params.chanidx_in_file["CHAN_IN_FILE"
+ CHANTYPES[key]]) > 0) # channel index
# position in the datafile may or may not be explicitly declared,
# based on the CHAN_IN_FILE section in info file
for ind, chan in enumerate(labels["LABELS" + CHANTYPES[key]]):
chanidx = len(all_chans) + 1 # by default, just assume the
# channel index in the datafile is in order of the channel
# names as we found them in the labels file
if chanidx_is_explicit: # but, if explicitly declared, use
# that index number
chanidx = int(curry_params.chanidx_in_file["CHAN_IN_FILE"
+ CHANTYPES[key]][ind])
if chanidx <= 0: # if chanidx was explicitly declared to be ' 0',
# it means the channel is not actually saved in the data file
# (e.g. the "Ref" channel), so don't add it to our list.
# Git issue #8391
continue
ch = {"ch_name": chan,
"unit": curry_params.unit_dict[key],
"kind": FIFFV_CHANTYPES[key],
"coil_type": FIFFV_COILTYPES[key],
"ch_idx": chanidx
}
if key == "eeg":
loc = np.array(sensors["SENSORS" + CHANTYPES[key]][ind], float)
# XXX just the sensor, where is ref (next 3)?
assert loc.shape == (3,)
loc /= 1000. # to meters
loc = np.concatenate([loc, np.zeros(9)])
ch['loc'] = loc
# XXX need to check/ensure this
ch['coord_frame'] = FIFF.FIFFV_COORD_HEAD
elif key == 'meg':
pos = np.array(sensors["SENSORS" + CHANTYPES[key]][ind], float)
pos /= 1000. # to meters
pos = pos[:3] # just the inner coil
pos = apply_trans(curry_dev_dev_t, pos)
nn = np.array(normals["NORMALS" + CHANTYPES[key]][ind], float)
assert np.isclose(np.linalg.norm(nn), 1., atol=1e-4)
nn /= np.linalg.norm(nn)
nn = apply_trans(curry_dev_dev_t, nn, move=False)
trans = np.eye(4)
trans[:3, 3] = pos
trans[:3, :3] = _normal_orth(nn).T
ch['loc'] = _coil_trans_to_loc(trans)
ch['coord_frame'] = FIFF.FIFFV_COORD_DEVICE
all_chans.append(ch)
ch_count = len(all_chans)
assert (ch_count == curry_params.n_chans) # ensure that we have assembled
# the same number of channels as declared in the info (.DAP) file in the
# DATA_PARAMETERS section. Git issue #8391
# sort the channels to assure they are in the order that matches how
# recorded in the datafile. In general they most likely are already in
# the correct order, but if the channel index in the data file was
# explicitly declared we might as well use it.
all_chans = sorted(all_chans, key=lambda ch: ch['ch_idx'])
ch_names = [chan["ch_name"] for chan in all_chans]
info = create_info(ch_names, curry_params.sfreq)
info['meas_date'] = curry_params.dt_start # for Git issue #8398
_make_trans_dig(curry_paths, info, curry_dev_dev_t)
for ind, ch_dict in enumerate(info["chs"]):
all_chans[ind].pop('ch_idx')
ch_dict.update(all_chans[ind])
assert ch_dict['loc'].shape == (12,)
ch_dict['unit'] = SI_UNITS[all_chans[ind]['unit'][1]]
ch_dict['cal'] = SI_UNIT_SCALE[all_chans[ind]['unit'][0]]
return info, curry_params.n_samples, curry_params.is_ascii
_card_dict = {'Left ear': FIFF.FIFFV_POINT_LPA,
'Nasion': FIFF.FIFFV_POINT_NASION,
'Right ear': FIFF.FIFFV_POINT_RPA}
def _make_trans_dig(curry_paths, info, curry_dev_dev_t):
# Coordinate frame transformations and definitions
no_msg = 'Leaving device<->head transform as None'
info['dev_head_t'] = None
label_fname = curry_paths['labels']
key = 'LANDMARKS' + CHANTYPES['meg']
lm = _read_curry_lines(label_fname, [key])[key]
lm = np.array(lm, float)
lm.shape = (-1, 3)
if len(lm) == 0:
# no dig
logger.info(no_msg + ' (no landmarks found)')
return
lm /= 1000.
key = 'LM_REMARKS' + CHANTYPES['meg']
remarks = _read_curry_lines(label_fname, [key])[key]
assert len(remarks) == len(lm)
info['dig'] = list()
cards = dict()
for remark, r in zip(remarks, lm):
kind = ident = None
if remark in _card_dict:
kind = FIFF.FIFFV_POINT_CARDINAL
ident = _card_dict[remark]
cards[ident] = r
elif remark.startswith('HPI'):
kind = FIFF.FIFFV_POINT_HPI
ident = int(remark[3:]) - 1
if kind is not None:
info['dig'].append(dict(
kind=kind, ident=ident, r=r,
coord_frame=FIFF.FIFFV_COORD_UNKNOWN))
info['dig'].sort(key=lambda x: (x['kind'], x['ident']))
has_cards = len(cards) == 3
has_hpi = 'hpi' in curry_paths
if has_cards and has_hpi: # have all three
logger.info('Composing device<->head transformation from dig points')
hpi_u = np.array([d['r'] for d in info['dig']
if d['kind'] == FIFF.FIFFV_POINT_HPI], float)
hpi_c = np.ascontiguousarray(
_first_hpi(curry_paths['hpi'])[:len(hpi_u), 1:4])
unknown_curry_t = _quaternion_align(
'unknown', 'ctf_meg', hpi_u, hpi_c, 1e-2)
angle = np.rad2deg(_angle_between_quats(
np.zeros(3), rot_to_quat(unknown_curry_t['trans'][:3, :3])))
dist = 1000 * np.linalg.norm(unknown_curry_t['trans'][:3, 3])
logger.info(' Fit a %0.1f° rotation, %0.1f mm translation'
% (angle, dist))
unknown_dev_t = combine_transforms(
unknown_curry_t, curry_dev_dev_t, 'unknown', 'meg')
unknown_head_t = Transform(
'unknown', 'head',
get_ras_to_neuromag_trans(
*(cards[key] for key in (FIFF.FIFFV_POINT_NASION,
FIFF.FIFFV_POINT_LPA,
FIFF.FIFFV_POINT_RPA))))
info['dev_head_t'] = combine_transforms(
invert_transform(unknown_dev_t), unknown_head_t, 'meg', 'head')
for d in info['dig']:
d.update(coord_frame=FIFF.FIFFV_COORD_HEAD,
r=apply_trans(unknown_head_t, d['r']))
else:
if has_cards:
no_msg += ' (no .hpi file found)'
elif has_hpi:
no_msg += ' (not all cardinal points found)'
else:
no_msg += ' (neither cardinal points nor .hpi file found)'
logger.info(no_msg)
def _first_hpi(fname):
# Get the first HPI result
with open(fname, 'r') as fid:
for line in fid:
line = line.strip()
if any(x in line for x in ('FileVersion', 'NumCoils')) or not line:
continue
hpi = np.array(line.split(), float)
break
else:
raise RuntimeError('Could not find valid HPI in %s' % (fname,))
# t is the first enttry
assert hpi.ndim == 1
hpi = hpi[1:]
hpi.shape = (-1, 5)
hpi /= 1000.
return hpi
def _read_events_curry(fname):
"""Read events from Curry event files.
Parameters
----------
fname : str
Path to a curry event file with extensions .cef, .ceo,
.cdt.cef, or .cdt.ceo
Returns
-------
events : ndarray, shape (n_events, 3)
The array of events.
"""
check_fname(fname, 'curry event', ('.cef', '.cdt.cef'),
endings_err=('.cef', '.cdt.cef'))
events_dict = _read_curry_lines(fname, ["NUMBER_LIST"])
# The first 3 column seem to contain the event information
curry_events = np.array(events_dict["NUMBER_LIST"], dtype=int)[:, 0:3]
return curry_events
def _read_annotations_curry(fname, sfreq='auto'):
r"""Read events from Curry event files.
Parameters
----------
fname : str
The filename.
sfreq : float | 'auto'
The sampling frequency in the file. If set to 'auto' then the
``sfreq`` is taken from the respective info file of the same name with
according file extension (\*.dap for Curry 7; \*.cdt.dpa for Curry8).
So data.cef looks in data.dap and data.cdt.cef looks in data.cdt.dpa.
Returns
-------
annot : instance of Annotations | None
The annotations.
"""
required = ["events", "info"] if sfreq == 'auto' else ["events"]
curry_paths = _get_curry_file_structure(fname, required)
events = _read_events_curry(curry_paths['events'])
if sfreq == 'auto':
sfreq = _read_curry_parameters(curry_paths['info']).sfreq
onset = events[:, 0] / sfreq
duration = np.zeros(events.shape[0])
description = events[:, 2]
return Annotations(onset, duration, description)
@verbose
def read_raw_curry(fname, preload=False, verbose=None):
"""Read raw data from Curry files.
Parameters
----------
fname : str
Path to a curry file with extensions .dat, .dap, .rs3, .cdt, cdt.dpa,
.cdt.cef or .cef.
%(preload)s
%(verbose)s
Returns
-------
raw : instance of RawCurry
A Raw object containing Curry data.
"""
return RawCurry(fname, preload, verbose)
class RawCurry(BaseRaw):
"""Raw object from Curry file.
Parameters
----------
fname : str
Path to a curry file with extensions .dat, .dap, .rs3, .cdt, cdt.dpa,
.cdt.cef or .cef.
%(preload)s
%(verbose)s
See Also
--------
mne.io.Raw : Documentation of attribute and methods.
"""
@verbose
def __init__(self, fname, preload=False, verbose=None):
curry_paths = _get_curry_file_structure(
fname, required=["info", "data", "labels"])
data_fname = op.abspath(curry_paths['data'])
info, n_samples, is_ascii = _read_curry_info(curry_paths)
last_samps = [n_samples - 1]
raw_extras = dict(is_ascii=is_ascii)
super(RawCurry, self).__init__(
info, preload, filenames=[data_fname], last_samps=last_samps,
orig_format='int', raw_extras=[raw_extras], verbose=verbose)
if 'events' in curry_paths:
logger.info('Event file found. Extracting Annotations from'
' %s...' % curry_paths['events'])
annots = _read_annotations_curry(curry_paths['events'],
sfreq=self.info["sfreq"])
self.set_annotations(annots)
else:
logger.info('Event file not found. No Annotations set.')
def _read_segment_file(self, data, idx, fi, start, stop, cals, mult):
"""Read a chunk of raw data."""
if self._raw_extras[fi]['is_ascii']:
if isinstance(idx, slice):
idx = np.arange(idx.start, idx.stop)
kwargs = dict(skiprows=start, usecols=idx)
if check_version("numpy", "1.16.0"):
kwargs['max_rows'] = stop - start
else:
warn("Data reading might take longer for ASCII files. Update "
"numpy to version 1.16.0 or greater for more efficient "
"data reading.")
block = np.loadtxt(self._filenames[0], **kwargs)[:stop - start].T
data_view = data[:, :block.shape[1]]
_mult_cal_one(data_view, block, idx, cals, mult)
else:
_read_segments_file(
self, data, idx, fi, start, stop, cals, mult, dtype="<f4")
|
import numpy as np
from hypertools.datageometry import DataGeometry
from hypertools.plot.plot import plot
data = [np.random.multivariate_normal(np.zeros(4), np.eye(4), size=100) for i
in range(2)]
geo = plot(data, show=False)
def test_geo():
assert isinstance(geo, DataGeometry)
def test_geo_data():
assert isinstance(geo.data, list)
def test_geo_get_data():
assert np.array_equal(data[0], geo.get_data()[0])
def test_geo_get_formatted_data():
assert np.array_equal(data[0], geo.get_formatted_data()[0])
def test_geo_data_dims():
assert (geo.data[0].shape[0]==100) and (geo.data[0].shape[1]==4)
def test_geo_kwargs():
assert isinstance(geo.kwargs, dict)
def test_geo_reduce():
assert isinstance(geo.reduce, dict)
def test_geo_xform_data_dims1():
assert (geo.xform_data[0].shape[0]==100) and (geo.xform_data[0].shape[1]==3)
def test_geo_xform_data_dims2():
geo = plot(data, ndims=4, show=False)
assert (geo.xform_data[0].shape[0]==100) and (geo.xform_data[0].shape[1]==4)
def test_geo_transform():
assert isinstance(geo.transform(data), list)
def test_geo_transform_dims():
assert geo.transform(data)[0].shape[1]==3
def test_geo_plot():
assert isinstance(geo.plot(show=False), DataGeometry)
def test_geo_text_data():
data = [['i like cats alot', 'cats r pretty cool', 'cats are better than dogs'],
['dogs rule the haus', 'dogs are my jam', 'dogs are a mans best friend']]
geo = plot(data, show=False)
assert isinstance(geo, DataGeometry)
assert geo.transform(data)[0].shape[1]==3
assert geo.semantic == 'LatentDirichletAllocation'
assert isinstance(geo.plot(show=False), DataGeometry)
def test_geo_text_data_marker():
data = [['i like cats alot', 'cats r pretty cool', 'cats are better than dogs'],
['dogs rule the haus', 'dogs are my jam', 'dogs are a mans best friend']]
geo = plot(data, '.', show=False)
assert isinstance(geo, DataGeometry)
assert geo.transform(data)[0].shape[1]==3
assert geo.semantic == 'LatentDirichletAllocation'
assert isinstance(geo.plot(show=False), DataGeometry)
|
import pytest
from sqlalchemy import create_engine
from sqlalchemy.pool import StaticPool
from homeassistant.bootstrap import async_setup_component
from homeassistant.components.recorder import const, migration, models
# pylint: disable=protected-access
from tests.async_mock import call, patch
from tests.components.recorder import models_original
def create_engine_test(*args, **kwargs):
"""Test version of create_engine that initializes with old schema.
This simulates an existing db with the old schema.
"""
engine = create_engine(*args, **kwargs)
models_original.Base.metadata.create_all(engine)
return engine
async def test_schema_update_calls(hass):
"""Test that schema migrations occur in correct order."""
with patch(
"homeassistant.components.recorder.create_engine", new=create_engine_test
), patch(
"homeassistant.components.recorder.migration._apply_update",
wraps=migration._apply_update,
) as update:
await async_setup_component(
hass, "recorder", {"recorder": {"db_url": "sqlite://"}}
)
await hass.async_block_till_done()
update.assert_has_calls(
[
call(hass.data[const.DATA_INSTANCE].engine, version + 1, 0)
for version in range(0, models.SCHEMA_VERSION)
]
)
async def test_schema_migrate(hass):
"""Test the full schema migration logic.
We're just testing that the logic can execute successfully here without
throwing exceptions. Maintaining a set of assertions based on schema
inspection could quickly become quite cumbersome.
"""
with patch("sqlalchemy.create_engine", new=create_engine_test), patch(
"homeassistant.components.recorder.Recorder._setup_run"
) as setup_run:
await async_setup_component(
hass, "recorder", {"recorder": {"db_url": "sqlite://"}}
)
await hass.async_block_till_done()
assert setup_run.called
def test_invalid_update():
"""Test that an invalid new version raises an exception."""
with pytest.raises(ValueError):
migration._apply_update(None, -1, 0)
def test_forgiving_add_column():
"""Test that add column will continue if column exists."""
engine = create_engine("sqlite://", poolclass=StaticPool)
engine.execute("CREATE TABLE hello (id int)")
migration._add_columns(engine, "hello", ["context_id CHARACTER(36)"])
migration._add_columns(engine, "hello", ["context_id CHARACTER(36)"])
def test_forgiving_add_index():
"""Test that add index will continue if index exists."""
engine = create_engine("sqlite://", poolclass=StaticPool)
models.Base.metadata.create_all(engine)
migration._create_index(engine, "states", "ix_states_context_id")
|
import pytest
import six
from dogpile.cache import make_region
try:
from unittest.mock import Mock
except ImportError:
from mock import Mock
# A Mock version is already provided in conftest.py so no need to configure it again
from subliminal.cache import region as region_custom
# Configure default dogpile cache
region_dogpile = make_region()
region_dogpile.configure('dogpile.cache.null')
region_dogpile.configure = Mock()
unicode_string = u'The Simpsons-S12E09-HOMЯ'
byte_string = b'The Simpsons-S12E09-HOM\xd0\xaf'
namespace = 'namespace'
expected_key = 'test_cache:fn|namespace|The Simpsons-S12E09-HOMЯ' # Key is expected as native string
def fn():
pass
def test_dogpile_cache_key_generator_unicode_string():
if six.PY2:
with pytest.raises(UnicodeEncodeError):
region_dogpile.function_key_generator(namespace, fn)(unicode_string)
else:
key = region_dogpile.function_key_generator(namespace, fn)(unicode_string)
assert key == expected_key
assert isinstance(key, six.text_type) # In Python 3, the native string type is unicode
def test_dogpile_cache_key_generator_byte_string():
key = region_dogpile.function_key_generator(namespace, fn)(byte_string)
if six.PY2:
assert key == expected_key
assert isinstance(key, six.binary_type) # In Python 2, the native string type is bytes
else:
assert key == 'test_cache:fn|namespace|' + str(b'The Simpsons-S12E09-HOM\xd0\xaf')
assert key != expected_key # Key is not as expected
assert isinstance(key, six.text_type) # In Python 3, the native string type is unicode
def test_custom_cache_key_generator_unicode_string():
key = region_custom.function_key_generator(namespace, fn)(unicode_string)
assert key == expected_key
if six.PY2:
assert isinstance(key, six.binary_type) # In Python 2, the native string type is bytes
else:
assert isinstance(key, six.text_type) # In Python 3, the native string type is unicode
def test_custom_cache_key_generator_byte_string():
key = region_custom.function_key_generator(namespace, fn)(byte_string)
assert key == expected_key
if six.PY2:
assert isinstance(key, six.binary_type) # In Python 2, the native string type is bytes
else:
assert isinstance(key, six.text_type) # In Python 3, the native string type is unicode
|
import asyncio
import logging
from pyps4_2ndscreen.errors import NotReady, PSDataIncomplete
import pyps4_2ndscreen.ps4 as pyps4
from homeassistant.components.media_player import MediaPlayerEntity
from homeassistant.components.media_player.const import (
ATTR_MEDIA_CONTENT_TYPE,
ATTR_MEDIA_TITLE,
MEDIA_TYPE_APP,
MEDIA_TYPE_GAME,
SUPPORT_PAUSE,
SUPPORT_SELECT_SOURCE,
SUPPORT_STOP,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
)
from homeassistant.components.ps4 import format_unique_id, load_games, save_games
from homeassistant.const import (
ATTR_LOCKED,
CONF_HOST,
CONF_NAME,
CONF_REGION,
CONF_TOKEN,
STATE_IDLE,
STATE_PLAYING,
STATE_STANDBY,
)
from homeassistant.core import callback
from homeassistant.helpers import device_registry, entity_registry
from .const import (
ATTR_MEDIA_IMAGE_URL,
DEFAULT_ALIAS,
DOMAIN as PS4_DOMAIN,
PS4_DATA,
REGIONS as deprecated_regions,
)
_LOGGER = logging.getLogger(__name__)
SUPPORT_PS4 = (
SUPPORT_TURN_OFF
| SUPPORT_TURN_ON
| SUPPORT_PAUSE
| SUPPORT_STOP
| SUPPORT_SELECT_SOURCE
)
ICON = "mdi:sony-playstation"
MEDIA_IMAGE_DEFAULT = None
DEFAULT_RETRIES = 2
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up PS4 from a config entry."""
config = config_entry
creds = config.data[CONF_TOKEN]
device_list = []
for device in config.data["devices"]:
host = device[CONF_HOST]
region = device[CONF_REGION]
name = device[CONF_NAME]
ps4 = pyps4.Ps4Async(host, creds, device_name=DEFAULT_ALIAS)
device_list.append(PS4Device(config, name, host, region, ps4, creds))
async_add_entities(device_list, update_before_add=True)
class PS4Device(MediaPlayerEntity):
"""Representation of a PS4."""
def __init__(self, config, name, host, region, ps4, creds):
"""Initialize the ps4 device."""
self._entry_id = config.entry_id
self._ps4 = ps4
self._host = host
self._name = name
self._region = region
self._creds = creds
self._state = None
self._media_content_id = None
self._media_title = None
self._media_image = None
self._media_type = None
self._source = None
self._games = {}
self._source_list = []
self._retry = 0
self._disconnected = False
self._info = None
self._unique_id = None
@callback
def status_callback(self):
"""Handle status callback. Parse status."""
self._parse_status()
self.async_write_ha_state()
@callback
def subscribe_to_protocol(self):
"""Notify protocol to callback with update changes."""
self.hass.data[PS4_DATA].protocol.add_callback(self._ps4, self.status_callback)
@callback
def unsubscribe_to_protocol(self):
"""Notify protocol to remove callback."""
self.hass.data[PS4_DATA].protocol.remove_callback(
self._ps4, self.status_callback
)
def check_region(self):
"""Display logger msg if region is deprecated."""
# Non-Breaking although data returned may be inaccurate.
if self._region in deprecated_regions:
_LOGGER.info(
"""Region: %s has been deprecated.
Please remove PS4 integration
and Re-configure again to utilize
current regions""",
self._region,
)
async def async_added_to_hass(self):
"""Subscribe PS4 events."""
self.hass.data[PS4_DATA].devices.append(self)
self.check_region()
async def async_update(self):
"""Retrieve the latest data."""
if self._ps4.ddp_protocol is not None:
# Request Status with asyncio transport.
self._ps4.get_status()
# Don't attempt to connect if entity is connected or if,
# PS4 is in standby or disconnected from LAN or powered off.
if (
not self._ps4.connected
and not self._ps4.is_standby
and self._ps4.is_available
):
try:
await self._ps4.async_connect()
except NotReady:
pass
# Try to ensure correct status is set on startup for device info.
if self._ps4.ddp_protocol is None:
# Use socket.socket.
await self.hass.async_add_executor_job(self._ps4.get_status)
if self._info is None:
# Add entity to registry.
await self.async_get_device_info(self._ps4.status)
self._ps4.ddp_protocol = self.hass.data[PS4_DATA].protocol
self.subscribe_to_protocol()
self._parse_status()
def _parse_status(self):
"""Parse status."""
status = self._ps4.status
if status is not None:
self._games = load_games(self.hass, self._unique_id)
if self._games:
self.get_source_list()
self._retry = 0
self._disconnected = False
if status.get("status") == "Ok":
title_id = status.get("running-app-titleid")
name = status.get("running-app-name")
if title_id and name is not None:
self._state = STATE_PLAYING
if self._media_content_id != title_id:
self._media_content_id = title_id
if self._use_saved():
_LOGGER.debug("Using saved data for media: %s", title_id)
return
self._media_title = name
self._source = self._media_title
self._media_type = None
# Get data from PS Store.
asyncio.ensure_future(self.async_get_title_data(title_id, name))
else:
if self._state != STATE_IDLE:
self.idle()
else:
if self._state != STATE_STANDBY:
self.state_standby()
elif self._retry > DEFAULT_RETRIES:
self.state_unknown()
else:
self._retry += 1
def _use_saved(self) -> bool:
"""Return True, Set media attrs if data is locked."""
if self._media_content_id in self._games:
store = self._games[self._media_content_id]
# If locked get attributes from file.
locked = store.get(ATTR_LOCKED)
if locked:
self._media_title = store.get(ATTR_MEDIA_TITLE)
self._source = self._media_title
self._media_image = store.get(ATTR_MEDIA_IMAGE_URL)
self._media_type = store.get(ATTR_MEDIA_CONTENT_TYPE)
return True
return False
def idle(self):
"""Set states for state idle."""
self.reset_title()
self._state = STATE_IDLE
def state_standby(self):
"""Set states for state standby."""
self.reset_title()
self._state = STATE_STANDBY
def state_unknown(self):
"""Set states for state unknown."""
self.reset_title()
self._state = None
if self._disconnected is False:
_LOGGER.warning("PS4 could not be reached")
self._disconnected = True
self._retry = 0
def reset_title(self):
"""Update if there is no title."""
self._media_title = None
self._media_content_id = None
self._media_type = None
self._source = None
async def async_get_title_data(self, title_id, name):
"""Get PS Store Data."""
app_name = None
art = None
media_type = None
try:
title = await self._ps4.async_get_ps_store_data(
name, title_id, self._region
)
except PSDataIncomplete:
title = None
except asyncio.TimeoutError:
title = None
_LOGGER.error("PS Store Search Timed out")
else:
if title is not None:
app_name = title.name
art = title.cover_art
# Assume media type is game if not app.
if title.game_type != "App":
media_type = MEDIA_TYPE_GAME
else:
media_type = MEDIA_TYPE_APP
else:
_LOGGER.error(
"Could not find data in region: %s for PS ID: %s",
self._region,
title_id,
)
finally:
self._media_title = app_name or name
self._source = self._media_title
self._media_image = art or None
self._media_type = media_type
await self.hass.async_add_executor_job(self.update_list)
self.async_write_ha_state()
def update_list(self):
"""Update Game List, Correct data if different."""
if self._media_content_id in self._games:
store = self._games[self._media_content_id]
if (
store.get(ATTR_MEDIA_TITLE) != self._media_title
or store.get(ATTR_MEDIA_IMAGE_URL) != self._media_image
):
self._games.pop(self._media_content_id)
if self._media_content_id not in self._games:
self.add_games(
self._media_content_id,
self._media_title,
self._media_image,
self._media_type,
)
self._games = load_games(self.hass, self._unique_id)
self.get_source_list()
def get_source_list(self):
"""Parse data entry and update source list."""
games = []
for data in self._games.values():
games.append(data[ATTR_MEDIA_TITLE])
self._source_list = sorted(games)
def add_games(self, title_id, app_name, image, g_type, is_locked=False):
"""Add games to list."""
games = self._games
if title_id is not None and title_id not in games:
game = {
title_id: {
ATTR_MEDIA_TITLE: app_name,
ATTR_MEDIA_IMAGE_URL: image,
ATTR_MEDIA_CONTENT_TYPE: g_type,
ATTR_LOCKED: is_locked,
}
}
games.update(game)
save_games(self.hass, games, self._unique_id)
async def async_get_device_info(self, status):
"""Set device info for registry."""
# If cannot get status on startup, assume info from registry.
if status is None:
_LOGGER.info("Assuming status from registry")
e_registry = await entity_registry.async_get_registry(self.hass)
d_registry = await device_registry.async_get_registry(self.hass)
for entity_id, entry in e_registry.entities.items():
if entry.config_entry_id == self._entry_id:
self._unique_id = entry.unique_id
self.entity_id = entity_id
break
for device in d_registry.devices.values():
if self._entry_id in device.config_entries:
self._info = {
"name": device.name,
"model": device.model,
"identifiers": device.identifiers,
"manufacturer": device.manufacturer,
"sw_version": device.sw_version,
}
break
else:
_sw_version = status["system-version"]
_sw_version = _sw_version[1:4]
sw_version = f"{_sw_version[0]}.{_sw_version[1:]}"
self._info = {
"name": status["host-name"],
"model": "PlayStation 4",
"identifiers": {(PS4_DOMAIN, status["host-id"])},
"manufacturer": "Sony Interactive Entertainment Inc.",
"sw_version": sw_version,
}
self._unique_id = format_unique_id(self._creds, status["host-id"])
async def async_will_remove_from_hass(self):
"""Remove Entity from Home Assistant."""
# Close TCP Transport.
if self._ps4.connected:
await self._ps4.close()
self.unsubscribe_to_protocol()
self.hass.data[PS4_DATA].devices.remove(self)
@property
def device_info(self):
"""Return information about the device."""
return self._info
@property
def unique_id(self):
"""Return Unique ID for entity."""
return self._unique_id
@property
def entity_picture(self):
"""Return picture."""
if self._state == STATE_PLAYING and self._media_content_id is not None:
image_hash = self.media_image_hash
if image_hash is not None:
return (
f"/api/media_player_proxy/{self.entity_id}?"
f"token={self.access_token}&cache={image_hash}"
)
return MEDIA_IMAGE_DEFAULT
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def icon(self):
"""Icon."""
return ICON
@property
def media_content_id(self):
"""Content ID of current playing media."""
return self._media_content_id
@property
def media_content_type(self):
"""Content type of current playing media."""
return self._media_type
@property
def media_image_url(self):
"""Image url of current playing media."""
if self._media_content_id is None:
return MEDIA_IMAGE_DEFAULT
return self._media_image
@property
def media_title(self):
"""Title of current playing media."""
return self._media_title
@property
def supported_features(self):
"""Media player features that are supported."""
return SUPPORT_PS4
@property
def source(self):
"""Return the current input source."""
return self._source
@property
def source_list(self):
"""List of available input sources."""
return self._source_list
async def async_turn_off(self):
"""Turn off media player."""
await self._ps4.standby()
async def async_turn_on(self):
"""Turn on the media player."""
self._ps4.wakeup()
async def async_toggle(self):
"""Toggle media player."""
await self._ps4.toggle()
async def async_media_pause(self):
"""Send keypress ps to return to menu."""
await self.async_send_remote_control("ps")
async def async_media_stop(self):
"""Send keypress ps to return to menu."""
await self.async_send_remote_control("ps")
async def async_select_source(self, source):
"""Select input source."""
for title_id, data in self._games.items():
game = data[ATTR_MEDIA_TITLE]
if (
source.lower().encode(encoding="utf-8")
== game.lower().encode(encoding="utf-8")
or source == title_id
):
_LOGGER.debug(
"Starting PS4 game %s (%s) using source %s", game, title_id, source
)
await self._ps4.start_title(title_id, self._media_content_id)
return
_LOGGER.warning("Could not start title. '%s' is not in source list", source)
return
async def async_send_command(self, command):
"""Send Button Command."""
await self.async_send_remote_control(command)
async def async_send_remote_control(self, command):
"""Send RC command."""
await self._ps4.remote_control(command)
|
import logging
from datetime import datetime
from .cursors import cursor
logger = logging.getLogger(__name__)
def datetime_from_string(dt: str) -> datetime:
try:
dt = datetime.strptime(dt, '%Y-%m-%d %H:%M:%S.%f+00:00')
except ValueError:
dt = datetime.strptime(dt, '%Y-%m-%d %H:%M:%S+00:00')
return dt
CONFLICTING_NODE_SQL = """SELECT n.*, f.* FROM nodes n
JOIN parentage p ON n.id = p.child
LEFT OUTER JOIN files f ON n.id = f.id
WHERE p.parent = (?) AND LOWER(name) = (?) AND status = 'AVAILABLE'
ORDER BY n.name"""
CHILDREN_SQL = """SELECT n.*, f.* FROM nodes n
JOIN parentage p ON n.id = p.child
LEFT OUTER JOIN files f ON n.id = f.id
WHERE p.parent = (?)
ORDER BY n.name"""
CHILDRENS_NAMES_SQL = """SELECT n.name FROM nodes n
JOIN parentage p ON n.id = p.child
WHERE p.parent = (?) AND n.status == 'AVAILABLE'
ORDER BY n.name"""
NUM_CHILDREN_SQL = """SELECT COUNT(n.id) FROM nodes n
JOIN parentage p ON n.id = p.child
WHERE p.parent = (?) AND n.status == 'AVAILABLE'"""
NUM_PARENTS_SQL = """SELECT COUNT(n.id) FROM nodes n
JOIN parentage p ON n.id = p.parent
WHERE p.child = (?) AND n.status == 'AVAILABLE'"""
NUM_NODES_SQL = 'SELECT COUNT(*) FROM nodes'
NUM_FILES_SQL = 'SELECT COUNT(*) FROM files'
NUM_FOLDERS_SQL = 'SELECT COUNT(*) FROM nodes WHERE type == "folder"'
CHILD_OF_SQL = """SELECT n.*, f.* FROM nodes n
JOIN parentage p ON n.id = p.child
LEFT OUTER JOIN files f ON n.id = f.id
WHERE n.name = (?) AND p.parent = (?)
ORDER BY n.status"""
NODE_BY_ID_SQL = """SELECT n.*, f.* FROM nodes n LEFT OUTER JOIN files f ON n.id = f.id
WHERE n.id = (?)"""
USAGE_SQL = 'SELECT SUM(size) FROM files'
FIND_BY_NAME_SQL = """SELECT n.*, f.* FROM nodes n
LEFT OUTER JOIN files f ON n.id = f.id
WHERE n.name LIKE ?
ORDER BY n.name"""
FIND_BY_REGEX_SQL = """SELECT n.*, f.* FROM nodes n
LEFT OUTER JOIN files f ON n.id = f.id
WHERE n.name REGEXP ?
ORDER BY n.name"""
FIND_BY_MD5_SQL = """SELECT n.*, f.* FROM nodes n
LEFT OUTER JOIN files f ON n.id = f.id
WHERE f.md5 == (?)
ORDER BY n.name"""
FIND_FIRST_PARENT_SQL = """SELECT n.* FROM nodes n
JOIN parentage p ON n.id = p.parent
WHERE p.child = (?)
ORDER BY n.status, n.id"""
# TODO: exclude files in trashed folders?!
FILE_SIZE_EXISTS_SQL = """SELECT COUNT(*) FROM files f
JOIN nodes n ON n.id = f.id
WHERE f.size == (?) AND n.status == 'AVAILABLE'"""
class Node(object):
def __init__(self, row):
self.id = row['id']
self.type = row['type']
self.name = row['name']
self.description = row['description']
self.cre = row['created']
self.mod = row['modified']
self.updated = row['updated']
self.status = row['status']
try:
self.md5 = row['md5']
except IndexError:
self.md5 = None
try:
self.size = row['size']
except IndexError:
self.size = 0
def __lt__(self, other):
return self.name < other.name
def __hash__(self):
return hash(self.id)
def __repr__(self):
return 'Node(%r, %r)' % (self.id, self.name)
@property
def is_folder(self):
return self.type == 'folder'
@property
def is_file(self):
return self.type == 'file'
@property
def is_available(self):
return self.status == 'AVAILABLE'
@property
def is_trashed(self):
return self.status == 'TRASH'
@property
def created(self):
return datetime_from_string(self.cre)
@property
def modified(self):
return datetime_from_string(self.mod)
@property
def simple_name(self):
if self.is_file:
return self.name
return (self.name if self.name else '') + '/'
class QueryMixin(object):
def get_node(self, id) -> 'Union[Node|None]':
with cursor(self._conn) as c:
c.execute(NODE_BY_ID_SQL, [id])
r = c.fetchone()
if r:
return Node(r)
def get_root_node(self):
return self.get_node(self.root_id)
def get_conflicting_node(self, name: str, parent_id: str):
"""Finds conflicting node in folder specified by *parent_id*, if one exists."""
with cursor(self._conn) as c:
c.execute(CONFLICTING_NODE_SQL, [parent_id, name.lower()])
r = c.fetchone()
if r:
return Node(r)
def resolve(self, path: str, trash=False) -> 'Union[Node|None]':
segments = list(filter(bool, path.split('/')))
if not segments:
if not self.root_id:
return
with cursor(self._conn) as c:
c.execute(NODE_BY_ID_SQL, [self.root_id])
r = c.fetchone()
return Node(r)
parent = self.root_id
for i, segment in enumerate(segments):
with cursor(self._conn) as c:
c.execute(CHILD_OF_SQL, [segment, parent])
r = c.fetchone()
r2 = c.fetchone()
if not r:
return
r = Node(r)
if not r.is_available:
if not trash:
return
if r2:
logger.debug('None-unique trash name "%s" in %s.' % (segment, parent))
return
if i + 1 == len(segments):
return r
if r.is_folder:
parent = r.id
continue
else:
return
def childrens_names(self, folder_id) -> 'List[str]':
with cursor(self._conn) as c:
c.execute(CHILDRENS_NAMES_SQL, [folder_id])
kids = []
row = c.fetchone()
while row:
kids.append(row['name'])
row = c.fetchone()
return kids
def get_node_count(self) -> int:
with cursor(self._conn) as c:
c.execute(NUM_NODES_SQL)
r = c.fetchone()[0]
return r
def get_folder_count(self) -> int:
with cursor(self._conn) as c:
c.execute(NUM_FOLDERS_SQL)
r = c.fetchone()[0]
return r
def get_file_count(self) -> int:
with cursor(self._conn) as c:
c.execute(NUM_FILES_SQL)
r = c.fetchone()[0]
return r
def calculate_usage(self):
with cursor(self._conn) as c:
c.execute(USAGE_SQL)
r = c.fetchone()
return r[0] if r and r[0] else 0
def num_children(self, folder_id) -> int:
with cursor(self._conn) as c:
c.execute(NUM_CHILDREN_SQL, [folder_id])
num = c.fetchone()[0]
return num
def num_parents(self, node_id) -> int:
with cursor(self._conn) as c:
c.execute(NUM_PARENTS_SQL, [node_id])
num = c.fetchone()[0]
return num
def get_child(self, folder_id, child_name) -> 'Union[Node|None]':
with cursor(self._conn) as c:
c.execute(CHILD_OF_SQL, [child_name, folder_id])
r = c.fetchone()
if r:
r = Node(r)
if r.is_available:
return r
def list_children(self, folder_id, trash=False) -> 'Tuple[List[Node], List[Node]]':
files = []
folders = []
with cursor(self._conn) as c:
c.execute(CHILDREN_SQL, [folder_id])
node = c.fetchone()
while node:
node = Node(node)
if node.is_available or trash:
if node.is_file:
files.append(node)
elif node.is_folder:
folders.append(node)
node = c.fetchone()
return folders, files
def list_trashed_children(self, folder_id) -> 'Tuple[List[Node], List[Node]]':
folders, files = self.list_children(folder_id, True)
folders[:] = [f for f in folders if f.is_trashed]
files[:] = [f for f in files if f.is_trashed]
return folders, files
def first_path(self, node_id: str) -> str:
if node_id == self.root_id:
return '/'
with cursor(self._conn) as c:
c.execute(FIND_FIRST_PARENT_SQL, (node_id,))
r = c.fetchone()
node = Node(r)
if node.id == self.root_id:
return node.simple_name
return self.first_path(node.id) + node.name + '/'
def find_by_name(self, name: str) -> 'List[Node]':
nodes = []
with cursor(self._conn) as c:
c.execute(FIND_BY_NAME_SQL, ['%' + name + '%'])
r = c.fetchone()
while r:
nodes.append(Node(r))
r = c.fetchone()
return nodes
def find_by_md5(self, md5) -> 'List[Node]':
nodes = []
with cursor(self._conn) as c:
c.execute(FIND_BY_MD5_SQL, (md5,))
r = c.fetchone()
while r:
nodes.append(Node(r))
r = c.fetchone()
return nodes
def find_by_regex(self, regex) -> 'List[Node]':
nodes = []
with cursor(self._conn) as c:
c.execute(FIND_BY_REGEX_SQL, (regex,))
r = c.fetchone()
while r:
nodes.append(Node(r))
r = c.fetchone()
return nodes
def file_size_exists(self, size) -> bool:
with cursor(self._conn) as c:
c.execute(FILE_SIZE_EXISTS_SQL, [size])
no = c.fetchone()[0]
return bool(no)
|
from homeassistant import config_entries, setup
from homeassistant.components.nest.const import DOMAIN, OAUTH2_AUTHORIZE, OAUTH2_TOKEN
from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET
from homeassistant.helpers import config_entry_oauth2_flow
from tests.async_mock import patch
CLIENT_ID = "1234"
CLIENT_SECRET = "5678"
PROJECT_ID = "project-id-4321"
SUBSCRIBER_ID = "subscriber-id-9876"
async def test_full_flow(hass, aiohttp_client, aioclient_mock, current_request):
"""Check full flow."""
assert await setup.async_setup_component(
hass,
DOMAIN,
{
DOMAIN: {
"project_id": PROJECT_ID,
"subscriber_id": SUBSCRIBER_ID,
CONF_CLIENT_ID: CLIENT_ID,
CONF_CLIENT_SECRET: CLIENT_SECRET,
},
"http": {"base_url": "https://example.com"},
},
)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
state = config_entry_oauth2_flow._encode_jwt(hass, {"flow_id": result["flow_id"]})
oauth_authorize = OAUTH2_AUTHORIZE.format(project_id=PROJECT_ID)
assert result["url"] == (
f"{oauth_authorize}?response_type=code&client_id={CLIENT_ID}"
"&redirect_uri=https://example.com/auth/external/callback"
f"&state={state}&scope=https://www.googleapis.com/auth/sdm.service"
"+https://www.googleapis.com/auth/pubsub"
"&access_type=offline&prompt=consent"
)
client = await aiohttp_client(hass.http.app)
resp = await client.get(f"/auth/external/callback?code=abcd&state={state}")
assert resp.status == 200
assert resp.headers["content-type"] == "text/html; charset=utf-8"
aioclient_mock.post(
OAUTH2_TOKEN,
json={
"refresh_token": "mock-refresh-token",
"access_token": "mock-access-token",
"type": "Bearer",
"expires_in": 60,
},
)
with patch(
"homeassistant.components.nest.async_setup_entry", return_value=True
) as mock_setup:
await hass.config_entries.flow.async_configure(result["flow_id"])
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
assert len(mock_setup.mock_calls) == 1
|
import os
import sh
from molecule import logger
from molecule import util
from molecule.dependency import base
LOG = logger.get_logger(__name__)
class AnsibleGalaxy(base.Base):
"""
`Ansible Galaxy`_ is the default dependency manager.
Additional options can be passed to ``ansible-galaxy install`` through the
options dict. Any option set in this section will override the defaults.
.. note::
Molecule will remove any options matching '^[v]+$', and pass ``-vvv``
to the underlying ``ansible-galaxy`` command when executing
`molecule --debug`.
.. code-block:: yaml
dependency:
name: galaxy
options:
ignore-certs: True
ignore-errors: True
role-file: requirements.yml
The dependency manager can be disabled by setting ``enabled`` to False.
.. code-block:: yaml
dependency:
name: galaxy
enabled: False
Environment variables can be passed to the dependency.
.. code-block:: yaml
dependency:
name: galaxy
env:
FOO: bar
.. _`Ansible Galaxy`: https://docs.ansible.com/ansible/latest/\
reference_appendices/galaxy.html
"""
def __init__(self, config):
super(AnsibleGalaxy, self).__init__(config)
self._sh_command = None
self.command = 'ansible-galaxy'
@property
def default_options(self):
d = {
'force':
True,
'role-file':
os.path.join(self._config.scenario.directory, 'requirements.yml'),
'roles-path':
os.path.join(self._config.scenario.ephemeral_directory, 'roles'),
}
if self._config.debug:
d['vvv'] = True
return d
# NOTE(retr0h): Override the base classes' options() to handle
# ``ansible-galaxy`` one-off.
@property
def options(self):
o = self._config.config['dependency']['options']
# NOTE(retr0h): Remove verbose options added by the user while in
# debug.
if self._config.debug:
o = util.filter_verbose_permutation(o)
return util.merge_dicts(self.default_options, o)
@property
def default_env(self):
return util.merge_dicts(os.environ.copy(), self._config.env)
def bake(self):
"""
Bake an ``ansible-galaxy`` command so it's ready to execute and returns
None.
:return: None
"""
options = self.options
verbose_flag = util.verbose_flag(options)
self._sh_command = getattr(sh, self.command)
self._sh_command = self._sh_command.bake(
'install',
options,
*verbose_flag,
_env=self.env,
_out=LOG.out,
_err=LOG.error)
def execute(self):
if not self.enabled:
msg = 'Skipping, dependency is disabled.'
LOG.warn(msg)
return
if not self._has_requirements_file():
msg = 'Skipping, missing the requirements file.'
LOG.warn(msg)
return
if self._sh_command is None:
self.bake()
self._setup()
try:
util.run_command(self._sh_command, debug=self._config.debug)
msg = 'Dependency completed successfully.'
LOG.success(msg)
except sh.ErrorReturnCode as e:
util.sysexit(e.exit_code)
def _setup(self):
"""
Prepare the system for using ``ansible-galaxy`` and returns None.
:return: None
"""
role_directory = os.path.join(self._config.scenario.directory,
self.options['roles-path'])
if not os.path.isdir(role_directory):
os.makedirs(role_directory)
def _role_file(self):
return self.options.get('role-file')
def _has_requirements_file(self):
return os.path.isfile(self._role_file())
|
import logging
from hangups import parsers, message_parser, user, hangouts_pb2
logger = logging.getLogger(__name__)
chat_message_parser = message_parser.ChatMessageParser()
class ConversationEvent:
"""An event which becomes part of the permanent record of a conversation.
This is a wrapper for the ``Event`` message, which may contain one of many
subtypes, represented here as other subclasses.
Args:
event: ``Event`` message.
"""
def __init__(self, event):
self._event = event # Event
@property
def timestamp(self):
"""When the event occurred (:class:`datetime.datetime`)."""
return parsers.from_timestamp(self._event.timestamp)
@property
def user_id(self):
"""Who created the event (:class:`~hangups.user.UserID`)."""
return user.UserID(chat_id=self._event.sender_id.chat_id,
gaia_id=self._event.sender_id.gaia_id)
@property
def conversation_id(self):
"""ID of the conversation containing the event (:class:`str`)."""
return self._event.conversation_id.id
@property
def id_(self):
"""ID of this event (:class:`str`)."""
return self._event.event_id
class ChatMessageSegment:
"""A segment of a chat message in :class:`ChatMessageEvent`.
Args:
text (str): Text of the segment.
segment_type: (optional) One of ``SEGMENT_TYPE_TEXT``,
``SEGMENT_TYPE_LINE_BREAK``, or ``SEGMENT_TYPE_LINK``. Defaults to
``SEGMENT_TYPE_TEXT``, or ``SEGMENT_TYPE_LINK`` if ``link_target``
is specified.
is_bold (bool): (optional) Whether the text is bold. Defaults to
``False``.
is_italic (bool): (optional) Whether the text is italic. Defaults to
``False``.
is_strikethrough (bool): (optional) Whether the text is struck through.
Defaults to ``False``.
is_underline (bool): (optional) Whether the text is underlined.
Defaults to ``False``.
link_target (str): (option) URL to link to. Defaults to ``None``.
"""
def __init__(self, text, segment_type=None,
is_bold=False, is_italic=False, is_strikethrough=False,
is_underline=False, link_target=None):
"""Create a new chat message segment."""
if segment_type is not None:
self.type_ = segment_type
elif link_target is not None:
self.type_ = hangouts_pb2.SEGMENT_TYPE_LINK
else:
self.type_ = hangouts_pb2.SEGMENT_TYPE_TEXT
self.text = text
self.is_bold = is_bold
self.is_italic = is_italic
self.is_strikethrough = is_strikethrough
self.is_underline = is_underline
self.link_target = link_target
@staticmethod
def from_str(text):
"""Construct :class:`ChatMessageSegment` list parsed from a string.
Args:
text (str): Text to parse. May contain line breaks, URLs and
formatting markup (simplified Markdown and HTML) to be
converted into equivalent segments.
Returns:
List of :class:`ChatMessageSegment` objects.
"""
segment_list = chat_message_parser.parse(text)
return [ChatMessageSegment(segment.text, **segment.params)
for segment in segment_list]
@staticmethod
def deserialize(segment):
"""Construct :class:`ChatMessageSegment` from ``Segment`` message.
Args:
segment: ``Segment`` message to parse.
Returns:
:class:`ChatMessageSegment` object.
"""
link_target = segment.link_data.link_target
return ChatMessageSegment(
segment.text, segment_type=segment.type,
is_bold=segment.formatting.bold,
is_italic=segment.formatting.italic,
is_strikethrough=segment.formatting.strikethrough,
is_underline=segment.formatting.underline,
link_target=None if link_target == '' else link_target
)
def serialize(self):
"""Serialize this segment to a ``Segment`` message.
Returns:
``Segment`` message.
"""
segment = hangouts_pb2.Segment(
type=self.type_,
text=self.text,
formatting=hangouts_pb2.Formatting(
bold=self.is_bold,
italic=self.is_italic,
strikethrough=self.is_strikethrough,
underline=self.is_underline,
),
)
if self.link_target is not None:
segment.link_data.link_target = self.link_target
return segment
class ChatMessageEvent(ConversationEvent):
"""An event that adds a new message to a conversation.
Corresponds to the ``ChatMessage`` message.
"""
@property
def text(self):
"""Text of the message without formatting (:class:`str`)."""
lines = ['']
for segment in self.segments:
if segment.type_ == hangouts_pb2.SEGMENT_TYPE_TEXT:
lines[-1] += segment.text
elif segment.type_ == hangouts_pb2.SEGMENT_TYPE_LINK:
lines[-1] += segment.text
elif segment.type_ == hangouts_pb2.SEGMENT_TYPE_LINE_BREAK:
lines.append('')
else:
logger.warning('Ignoring unknown chat message segment type: {}'
.format(segment.type_))
lines.extend(self.attachments)
return '\n'.join(lines)
@property
def segments(self):
"""List of :class:`ChatMessageSegment` in message (:class:`list`)."""
seg_list = self._event.chat_message.message_content.segment
return [ChatMessageSegment.deserialize(seg) for seg in seg_list]
@property
def attachments(self):
"""List of attachments in the message (:class:`list`)."""
raw_attachments = self._event.chat_message.message_content.attachment
if raw_attachments is None:
raw_attachments = []
attachments = []
for attachment in raw_attachments:
for embed_item_type in attachment.embed_item.type:
known_types = [
hangouts_pb2.ITEM_TYPE_PLUS_PHOTO,
hangouts_pb2.ITEM_TYPE_PLACE_V2,
hangouts_pb2.ITEM_TYPE_PLACE,
hangouts_pb2.ITEM_TYPE_THING,
]
if embed_item_type not in known_types:
logger.warning('Received chat message attachment with '
'unknown embed type: %r', embed_item_type)
if attachment.embed_item.HasField('plus_photo'):
attachments.append(
attachment.embed_item.plus_photo.thumbnail.image_url
)
return attachments
class OTREvent(ConversationEvent):
"""An event that changes a conversation's OTR (history) mode.
Corresponds to the ``OTRModification`` message.
"""
@property
def new_otr_status(self):
"""The conversation's new OTR status.
May be either ``OFF_THE_RECORD_STATUS_OFF_THE_RECORD`` or
``OFF_THE_RECORD_STATUS_ON_THE_RECORD``.
"""
return self._event.otr_modification.new_otr_status
@property
def old_otr_status(self):
"""The conversation's old OTR status.
May be either ``OFF_THE_RECORD_STATUS_OFF_THE_RECORD`` or
``OFF_THE_RECORD_STATUS_ON_THE_RECORD``.
"""
return self._event.otr_modification.old_otr_status
class RenameEvent(ConversationEvent):
"""An event that renames a conversation.
Corresponds to the ``ConversationRename`` message.
"""
@property
def new_name(self):
"""The conversation's new name (:class:`str`).
May be an empty string if the conversation's name was cleared.
"""
return self._event.conversation_rename.new_name
@property
def old_name(self):
"""The conversation's old name (:class:`str`).
May be an empty string if the conversation had no previous name.
"""
return self._event.conversation_rename.old_name
class MembershipChangeEvent(ConversationEvent):
"""An event that adds or removes a conversation participant.
Corresponds to the ``MembershipChange`` message.
"""
@property
def type_(self):
"""The type of membership change.
May be either ``MEMBERSHIP_CHANGE_TYPE_JOIN`` or
``MEMBERSHIP_CHANGE_TYPE_LEAVE``.
"""
return self._event.membership_change.type
@property
def participant_ids(self):
""":class:`~hangups.user.UserID` of users involved (:class:`list`)."""
return [user.UserID(chat_id=id_.chat_id, gaia_id=id_.gaia_id)
for id_ in self._event.membership_change.participant_ids]
class HangoutEvent(ConversationEvent):
"""An event that is related to a Hangout voice or video call.
Corresponds to the ``HangoutEvent`` message.
"""
@property
def event_type(self):
"""The Hangout event type.
May be one of ``HANGOUT_EVENT_TYPE_START``, ``HANGOUT_EVENT_TYPE_END``,
``HANGOUT_EVENT_TYPE_JOIN``, ``HANGOUT_EVENT_TYPE_LEAVE``,
``HANGOUT_EVENT_TYPE_COMING_SOON``, or ``HANGOUT_EVENT_TYPE_ONGOING``.
"""
return self._event.hangout_event.event_type
class GroupLinkSharingModificationEvent(ConversationEvent):
"""An event that modifies a conversation's group link sharing status.
Corresponds to the ``GroupLinkSharingModification`` message.
"""
@property
def new_status(self):
"""The new group link sharing status.
May be either ``GROUP_LINK_SHARING_STATUS_ON`` or
``GROUP_LINK_SHARING_STATUS_OFF``.
"""
return self._event.group_link_sharing_modification.new_status
|
import pytest
from jinja2 import Environment
from jinja2 import Markup
class MyDict(dict):
pass
class TestTestsCase:
def test_defined(self, env):
tmpl = env.from_string("{{ missing is defined }}|{{ true is defined }}")
assert tmpl.render() == "False|True"
def test_even(self, env):
tmpl = env.from_string("""{{ 1 is even }}|{{ 2 is even }}""")
assert tmpl.render() == "False|True"
def test_odd(self, env):
tmpl = env.from_string("""{{ 1 is odd }}|{{ 2 is odd }}""")
assert tmpl.render() == "True|False"
def test_lower(self, env):
tmpl = env.from_string("""{{ "foo" is lower }}|{{ "FOO" is lower }}""")
assert tmpl.render() == "True|False"
# Test type checks
@pytest.mark.parametrize(
"op,expect",
(
("none is none", True),
("false is none", False),
("true is none", False),
("42 is none", False),
("none is true", False),
("false is true", False),
("true is true", True),
("0 is true", False),
("1 is true", False),
("42 is true", False),
("none is false", False),
("false is false", True),
("true is false", False),
("0 is false", False),
("1 is false", False),
("42 is false", False),
("none is boolean", False),
("false is boolean", True),
("true is boolean", True),
("0 is boolean", False),
("1 is boolean", False),
("42 is boolean", False),
("0.0 is boolean", False),
("1.0 is boolean", False),
("3.14159 is boolean", False),
("none is integer", False),
("false is integer", False),
("true is integer", False),
("42 is integer", True),
("3.14159 is integer", False),
("(10 ** 100) is integer", True),
("none is float", False),
("false is float", False),
("true is float", False),
("42 is float", False),
("4.2 is float", True),
("(10 ** 100) is float", False),
("none is number", False),
("false is number", True),
("true is number", True),
("42 is number", True),
("3.14159 is number", True),
("complex is number", True),
("(10 ** 100) is number", True),
("none is string", False),
("false is string", False),
("true is string", False),
("42 is string", False),
('"foo" is string', True),
("none is sequence", False),
("false is sequence", False),
("42 is sequence", False),
('"foo" is sequence', True),
("[] is sequence", True),
("[1, 2, 3] is sequence", True),
("{} is sequence", True),
("none is mapping", False),
("false is mapping", False),
("42 is mapping", False),
('"foo" is mapping', False),
("[] is mapping", False),
("{} is mapping", True),
("mydict is mapping", True),
("none is iterable", False),
("false is iterable", False),
("42 is iterable", False),
('"foo" is iterable', True),
("[] is iterable", True),
("{} is iterable", True),
("range(5) is iterable", True),
("none is callable", False),
("false is callable", False),
("42 is callable", False),
('"foo" is callable', False),
("[] is callable", False),
("{} is callable", False),
("range is callable", True),
),
)
def test_types(self, env, op, expect):
t = env.from_string(f"{{{{ {op} }}}}")
assert t.render(mydict=MyDict(), complex=complex(1, 2)) == str(expect)
def test_upper(self, env):
tmpl = env.from_string('{{ "FOO" is upper }}|{{ "foo" is upper }}')
assert tmpl.render() == "True|False"
def test_equalto(self, env):
tmpl = env.from_string(
"{{ foo is eq 12 }}|"
"{{ foo is eq 0 }}|"
"{{ foo is eq (3 * 4) }}|"
'{{ bar is eq "baz" }}|'
'{{ bar is eq "zab" }}|'
'{{ bar is eq ("ba" + "z") }}|'
"{{ bar is eq bar }}|"
"{{ bar is eq foo }}"
)
assert (
tmpl.render(foo=12, bar="baz")
== "True|False|True|True|False|True|True|False"
)
@pytest.mark.parametrize(
"op,expect",
(
("eq 2", True),
("eq 3", False),
("ne 3", True),
("ne 2", False),
("lt 3", True),
("lt 2", False),
("le 2", True),
("le 1", False),
("gt 1", True),
("gt 2", False),
("ge 2", True),
("ge 3", False),
),
)
def test_compare_aliases(self, env, op, expect):
t = env.from_string(f"{{{{ 2 is {op} }}}}")
assert t.render() == str(expect)
def test_sameas(self, env):
tmpl = env.from_string("{{ foo is sameas false }}|{{ 0 is sameas false }}")
assert tmpl.render(foo=False) == "True|False"
def test_no_paren_for_arg1(self, env):
tmpl = env.from_string("{{ foo is sameas none }}")
assert tmpl.render(foo=None) == "True"
def test_escaped(self, env):
env = Environment(autoescape=True)
tmpl = env.from_string("{{ x is escaped }}|{{ y is escaped }}")
assert tmpl.render(x="foo", y=Markup("foo")) == "False|True"
def test_greaterthan(self, env):
tmpl = env.from_string("{{ 1 is greaterthan 0 }}|{{ 0 is greaterthan 1 }}")
assert tmpl.render() == "True|False"
def test_lessthan(self, env):
tmpl = env.from_string("{{ 0 is lessthan 1 }}|{{ 1 is lessthan 0 }}")
assert tmpl.render() == "True|False"
def test_multiple_tests(self):
items = []
def matching(x, y):
items.append((x, y))
return False
env = Environment()
env.tests["matching"] = matching
tmpl = env.from_string(
"{{ 'us-west-1' is matching '(us-east-1|ap-northeast-1)'"
" or 'stage' is matching '(dev|stage)' }}"
)
assert tmpl.render() == "False"
assert items == [
("us-west-1", "(us-east-1|ap-northeast-1)"),
("stage", "(dev|stage)"),
]
def test_in(self, env):
tmpl = env.from_string(
'{{ "o" is in "foo" }}|'
'{{ "foo" is in "foo" }}|'
'{{ "b" is in "foo" }}|'
"{{ 1 is in ((1, 2)) }}|"
"{{ 3 is in ((1, 2)) }}|"
"{{ 1 is in [1, 2] }}|"
"{{ 3 is in [1, 2] }}|"
'{{ "foo" is in {"foo": 1}}}|'
'{{ "baz" is in {"bar": 1}}}'
)
assert tmpl.render() == "True|True|False|True|False|True|False|True|False"
|
import unittest
from unittest.mock import Mock, patch
from kaggle_gcp import KaggleKernelCredentials, init_automl
from test.support import EnvironmentVarGuard
from google.cloud import storage, automl_v1beta1, automl
def _make_credentials():
import google.auth.credentials
return Mock(spec=google.auth.credentials.Credentials)
class TestAutoMl(unittest.TestCase):
class FakeClient:
def __init__(self, credentials=None, client_info=None, **kwargs):
self.credentials = credentials
class FakeConnection():
def __init__(self, user_agent):
self.user_agent = user_agent
if (client_info is not None):
self._connection = FakeConnection(client_info.user_agent)
@patch("google.cloud.automl.AutoMlClient", new=FakeClient)
def test_user_provided_credentials(self):
credentials = _make_credentials()
env = EnvironmentVarGuard()
env.set('KAGGLE_USER_SECRETS_TOKEN', 'foobar')
env.set('KAGGLE_KERNEL_INTEGRATIONS', 'AUTOML')
with env:
init_automl()
client = automl.AutoMlClient(credentials=credentials)
self.assertNotIsInstance(client.credentials, KaggleKernelCredentials)
self.assertIsNotNone(client.credentials)
def test_tables_gcs_client(self):
# The GcsClient can't currently be monkeypatched for default
# credentials because it requires a project which can't be set.
# Verify that creating an automl_v1beta1.GcsClient given an actual
# storage.Client sets the client properly.
gcs_client = storage.Client(project="xyz", credentials=_make_credentials())
tables_gcs_client = automl_v1beta1.GcsClient(client=gcs_client)
self.assertIs(tables_gcs_client.client, gcs_client)
@patch("google.cloud.automl_v1beta1.gapic.auto_ml_client.AutoMlClient", new=FakeClient)
def test_tables_client_credentials(self):
credentials = _make_credentials()
env = EnvironmentVarGuard()
env.set('KAGGLE_USER_SECRETS_TOKEN', 'foobar')
env.set('KAGGLE_KERNEL_INTEGRATIONS', 'AUTOML')
with env:
init_automl()
tables_client = automl_v1beta1.TablesClient(credentials=credentials)
self.assertEqual(tables_client.auto_ml_client.credentials, credentials)
@patch("google.cloud.automl.AutoMlClient", new=FakeClient)
def test_default_credentials_automl_client(self):
env = EnvironmentVarGuard()
env.set('KAGGLE_USER_SECRETS_TOKEN', 'foobar')
env.set('KAGGLE_KERNEL_INTEGRATIONS', 'AUTOML')
with env:
init_automl()
automl_client = automl.AutoMlClient()
self.assertIsNotNone(automl_client.credentials)
self.assertIsInstance(automl_client.credentials, KaggleKernelCredentials)
self.assertTrue(automl_client._connection.user_agent.startswith("kaggle-gcp-client/1.0"))
@patch("google.cloud.automl_v1beta1.AutoMlClient", new=FakeClient)
def test_default_credentials_automl_v1beta1_client(self):
env = EnvironmentVarGuard()
env.set('KAGGLE_USER_SECRETS_TOKEN', 'foobar')
env.set('KAGGLE_KERNEL_INTEGRATIONS', 'AUTOML')
with env:
init_automl()
automl_client = automl_v1beta1.AutoMlClient()
self.assertIsNotNone(automl_client.credentials)
self.assertIsInstance(automl_client.credentials, KaggleKernelCredentials)
self.assertTrue(automl_client._connection.user_agent.startswith("kaggle-gcp-client/1.0"))
@patch("google.cloud.automl_v1beta1.TablesClient", new=FakeClient)
def test_default_credentials_tables_client(self):
env = EnvironmentVarGuard()
env.set('KAGGLE_USER_SECRETS_TOKEN', 'foobar')
env.set('KAGGLE_KERNEL_INTEGRATIONS', 'AUTOML')
with env:
init_automl()
tables_client = automl_v1beta1.TablesClient()
self.assertIsNotNone(tables_client.credentials)
self.assertIsInstance(tables_client.credentials, KaggleKernelCredentials)
self.assertTrue(tables_client._connection.user_agent.startswith("kaggle-gcp-client/1.0"))
@patch("google.cloud.automl.PredictionServiceClient", new=FakeClient)
def test_default_credentials_prediction_client(self):
env = EnvironmentVarGuard()
env.set('KAGGLE_USER_SECRETS_TOKEN', 'foobar')
env.set('KAGGLE_KERNEL_INTEGRATIONS', 'AUTOML')
with env:
prediction_client = automl.PredictionServiceClient()
self.assertIsNotNone(prediction_client.credentials)
self.assertIsInstance(prediction_client.credentials, KaggleKernelCredentials)
self.assertTrue(prediction_client._connection.user_agent.startswith("kaggle-gcp-client/1.0"))
@patch("google.cloud.automl_v1beta1.PredictionServiceClient", new=FakeClient)
def test_default_credentials_prediction_v1beta1_client(self):
env = EnvironmentVarGuard()
env.set('KAGGLE_USER_SECRETS_TOKEN', 'foobar')
env.set('KAGGLE_KERNEL_INTEGRATIONS', 'AUTOML')
with env:
prediction_client = automl_v1beta1.PredictionServiceClient()
self.assertIsNotNone(prediction_client.credentials)
self.assertIsInstance(prediction_client.credentials, KaggleKernelCredentials)
self.assertTrue(prediction_client._connection.user_agent.startswith("kaggle-gcp-client/1.0"))
def test_monkeypatching_idempotent(self):
env = EnvironmentVarGuard()
env.set('KAGGLE_USER_SECRETS_TOKEN', 'foobar')
env.set('KAGGLE_KERNEL_INTEGRATIONS', 'AUTOML')
with env:
client1 = automl.AutoMlClient.__init__
init_automl()
client2 = automl.AutoMlClient.__init__
self.assertEqual(client1, client2)
|
from gi.repository import GObject, Gtk
from meld.recent import RecentFiles
@Gtk.Template(resource_path='/org/gnome/meld/ui/recent-selector.ui')
class RecentSelector(Gtk.Grid):
__gtype_name__ = 'RecentSelector'
@GObject.Signal(
flags=(
GObject.SignalFlags.RUN_FIRST |
GObject.SignalFlags.ACTION
),
arg_types=(str,),
)
def open_recent(self, uri: str) -> None:
...
recent_chooser = Gtk.Template.Child()
search_entry = Gtk.Template.Child()
open_button = Gtk.Template.Child()
def do_realize(self):
self.filter_text = ''
self.recent_chooser.set_filter(self.make_recent_filter())
return Gtk.Grid.do_realize(self)
def custom_recent_filter_func(
self, filter_info: Gtk.RecentFilterInfo) -> bool:
"""Filter function for Meld-specific files
Normal GTK recent filter rules are all OR-ed together to check
whether an entry should be shown. This filter instead only ever
shows Meld-specific entries, and then filters down from there.
"""
if filter_info.mime_type != RecentFiles.mime_type:
return False
if self.filter_text not in filter_info.display_name.lower():
return False
return True
def make_recent_filter(self) -> Gtk.RecentFilter:
recent_filter = Gtk.RecentFilter()
recent_filter.add_custom(
(
Gtk.RecentFilterFlags.MIME_TYPE |
Gtk.RecentFilterFlags.DISPLAY_NAME
),
self.custom_recent_filter_func,
)
return recent_filter
@Gtk.Template.Callback()
def on_filter_text_changed(self, *args):
self.filter_text = self.search_entry.get_text().lower()
# This feels unnecessary, but there's no other good way to get
# the RecentChooser to re-evaluate the filter.
self.recent_chooser.set_filter(self.make_recent_filter())
@Gtk.Template.Callback()
def on_selection_changed(self, *args):
have_selection = bool(self.recent_chooser.get_current_uri())
self.open_button.set_sensitive(have_selection)
@Gtk.Template.Callback()
def on_activate(self, *args):
uri = self.recent_chooser.get_current_uri()
if uri:
self.open_recent.emit(uri)
|
import typing
from pathlib import Path
import numpy as np
import keras
import matchzoo
from matchzoo.engine.base_model import BaseModel
class EvaluateAllMetrics(keras.callbacks.Callback):
"""
Callback to evaluate all metrics.
MatchZoo metrics can not be evaluated batch-wise since they require
dataset-level information. As a result, MatchZoo metrics are not
evaluated automatically when a Model `fit`. When this callback is used,
all metrics, including MatchZoo metrics and Keras metrics, are evluated
once every `once_every` epochs.
:param model: Model to evaluate.
:param x: X.
:param y: y.
:param once_every: Evaluation only triggers when `epoch % once_every == 0`.
(default: 1, i.e. evaluate on every epoch's end)
:param batch_size: Number of samples per evaluation. This only affects the
evaluation of Keras metrics, since MatchZoo metrics are always
evaluated using the full data.
:param model_save_path: Directory path to save the model after each
evaluate callback, (default: None, i.e., no saving.)
:param verbose: Verbosity.
"""
def __init__(
self,
model: 'BaseModel',
x: typing.Union[np.ndarray, typing.List[np.ndarray]],
y: np.ndarray,
once_every: int = 1,
batch_size: int = 128,
model_save_path: str = None,
verbose=1
):
"""Initializer."""
super().__init__()
self._model = model
self._dev_x = x
self._dev_y = y
self._valid_steps = once_every
self._batch_size = batch_size
self._model_save_path = model_save_path
self._verbose = verbose
def on_epoch_end(self, epoch: int, logs: dict = None):
"""
Called at the end of en epoch.
:param epoch: integer, index of epoch.
:param logs: dictionary of logs.
:return: dictionary of logs.
"""
if (epoch + 1) % self._valid_steps == 0:
val_logs = self._model.evaluate(self._dev_x, self._dev_y,
self._batch_size)
if self._verbose:
print('Validation: ' + ' - '.join(
f'{k}: {v}' for k, v in val_logs.items()))
for k, v in val_logs.items():
logs[k] = v
if self._model_save_path:
curr_path = self._model_save_path + str('%d/' % (epoch + 1))
self._model.save(curr_path)
|
import chainer
from chainer.backends import cuda
import chainer.functions as F
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
from chainer.testing import condition
import numpy as np
import unittest
from chainercv import functions
def _outsize(x):
if isinstance(x, chainer.utils.collections_abc.Iterable):
if len(x) == 2:
return (None, ) + x
else:
return x
return None, x, x
@testing.parameterize(*testing.product({
'spatial_scale': [np.float(0.6), np.int(1), 0.6, 1.0, 2.0],
'outsize': [(np.int(2), np.int(4), np.int(4)), (2, 4, 4), (4, 4), 4],
}))
class TestPSROIMaxPooling2D(unittest.TestCase):
def setUp(self):
self.N = 3
self.group_size = 2
self.out_c, self.out_h, self.out_w = _outsize(self.outsize)
if self.out_c is None:
self.out_c = 2
self.n_channels = self.group_size * self.group_size * self.out_c
self.x = np.arange(
self.N * self.n_channels * 10 * 12,
dtype=np.float32).reshape((self.N, self.n_channels, 10, 12))
np.random.shuffle(self.x)
self.x = 2 * self.x / self.x.size - 1
self.x = self.x.astype(np.float32)
self.rois = np.array(
[[0, 0, 7, 7],
[1, 0, 5, 12],
[0, 1, 10, 5],
[3, 3, 4, 4]],
dtype=np.float32
)
self.roi_indices = np.array([0, 2, 1, 0], dtype=np.int32)
self.n_roi = self.rois.shape[0]
self.gy = np.random.uniform(
-1, 1, (self.n_roi, self.out_c, self.out_h, self.out_w))
self.gy = self.gy.astype(np.float32)
self.check_backward_options = {'atol': 5e-4, 'rtol': 5e-3}
def check_forward(self, x_data, roi_data, roi_index_data):
x = chainer.Variable(x_data)
rois = chainer.Variable(roi_data)
roi_indices = chainer.Variable(roi_index_data)
y = functions.ps_roi_max_pooling_2d(
x, rois, roi_indices, self.outsize,
self.spatial_scale, self.group_size)
self.assertEqual(y.data.dtype, np.float32)
y_data = cuda.to_cpu(y.data)
self.assertEqual(
(self.n_roi, self.out_c, self.out_h, self.out_w), y_data.shape)
@condition.retry(3)
def test_forward_cpu(self):
self.check_forward(self.x, self.rois, self.roi_indices)
@attr.gpu
@condition.retry(3)
def test_forward_gpu(self):
self.check_forward(
cuda.to_gpu(self.x), cuda.to_gpu(self.rois),
cuda.to_gpu(self.roi_indices))
def check_backward(self, x_data, roi_data, roi_index_data, y_grad_data):
def f(x, rois, roi_indices):
y = functions.ps_roi_max_pooling_2d(
x, rois, roi_indices, self.outsize,
self.spatial_scale, self.group_size)
xp = cuda.get_array_module(y)
y = F.where(
xp.isinf(y.array), xp.zeros(y.shape, dtype=y.dtype), y)
return y
gradient_check.check_backward(
f, (x_data, roi_data, roi_index_data), y_grad_data,
no_grads=[False, True, True], **self.check_backward_options)
@condition.retry(3)
def test_backward_cpu(self):
self.check_backward(self.x, self.rois, self.roi_indices, self.gy)
@attr.gpu
@condition.retry(3)
def test_backward_gpu(self):
self.check_backward(
cuda.to_gpu(self.x), cuda.to_gpu(self.rois),
cuda.to_gpu(self.roi_indices), cuda.to_gpu(self.gy))
def apply_backward(self, x_data, roi_data, roi_index_data, y_grad_data):
x = chainer.Variable(x_data)
rois = chainer.Variable(roi_data)
roi_indices = chainer.Variable(roi_index_data)
y = functions.ps_roi_max_pooling_2d(
x, rois, roi_indices, self.outsize,
self.spatial_scale, self.group_size)
x.cleargrad()
y.grad = y_grad_data
y.backward()
return x, y
@attr.gpu
@condition.retry(3)
def test_consistency_with_gpu(self):
x_cpu, y_cpu = self.apply_backward(
self.x, self.rois, self.roi_indices, self.gy)
x_gpu, y_gpu = self.apply_backward(
cuda.to_gpu(self.x), cuda.to_gpu(self.rois),
cuda.to_gpu(self.roi_indices), cuda.to_gpu(self.gy))
testing.assert_allclose(y_cpu.data, y_gpu.data)
testing.assert_allclose(x_cpu.grad, x_gpu.grad)
@testing.parameterize(*testing.product({
'outsize': [(2, 4, 4), (4, 4), 4]
}))
class TestPSROIMaxPooling2DFailure(unittest.TestCase):
def setUp(self):
self.N = 3
self.group_size = 2
self.spatial_scale = 0.6
out_c, _, _ = _outsize(self.outsize)
if out_c is None:
self.n_channels = self.group_size * self.group_size * 2 - 1
else:
self.n_channels = self.group_size * self.group_size * (out_c + 1)
self.x = np.arange(
self.N * self.n_channels * 10 * 12,
dtype=np.float32).reshape((self.N, self.n_channels, 10, 12))
np.random.shuffle(self.x)
self.x = 2 * self.x / self.x.size - 1
self.x = self.x.astype(np.float32)
self.rois = np.array(
[[0, 0, 7, 7],
[1, 0, 5, 12],
[0, 1, 10, 5],
[3, 3, 4, 4]],
dtype=np.float32
)
self.roi_indices = np.array([0, 2, 1, 0], dtype=np.int32)
self.n_roi = self.rois.shape[0]
def check_forward(self, x_data, roi_data, roi_index_data):
x = chainer.Variable(x_data)
rois = chainer.Variable(roi_data)
roi_indices = chainer.Variable(roi_index_data)
functions.ps_roi_max_pooling_2d(
x, rois, roi_indices, self.outsize,
self.spatial_scale, self.group_size)
@condition.retry(3)
def test_invalid_outsize_cpu(self):
with self.assertRaises(ValueError):
self.check_forward(self.x, self.rois, self.roi_indices)
@attr.gpu
@condition.retry(3)
def test_invalid_outsize_gpu(self):
with self.assertRaises(ValueError):
self.check_forward(
cuda.to_gpu(self.x), cuda.to_gpu(self.rois),
cuda.to_gpu(self.roi_indices))
testing.run_module(__name__, __file__)
|
from datetime import timedelta
import logging
from pyobihai import PyObihai
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_USERNAME,
DEVICE_CLASS_TIMESTAMP,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(seconds=5)
OBIHAI = "Obihai"
DEFAULT_USERNAME = "admin"
DEFAULT_PASSWORD = "admin"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_USERNAME, default=DEFAULT_USERNAME): cv.string,
vol.Optional(CONF_PASSWORD, default=DEFAULT_PASSWORD): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Obihai sensor platform."""
username = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
host = config[CONF_HOST]
sensors = []
pyobihai = PyObihai(host, username, password)
login = pyobihai.check_account()
if not login:
_LOGGER.error("Invalid credentials")
return
serial = pyobihai.get_device_serial()
services = pyobihai.get_state()
line_services = pyobihai.get_line_state()
call_direction = pyobihai.get_call_direction()
for key in services:
sensors.append(ObihaiServiceSensors(pyobihai, serial, key))
if line_services is not None:
for key in line_services:
sensors.append(ObihaiServiceSensors(pyobihai, serial, key))
for key in call_direction:
sensors.append(ObihaiServiceSensors(pyobihai, serial, key))
add_entities(sensors)
class ObihaiServiceSensors(Entity):
"""Get the status of each Obihai Lines."""
def __init__(self, pyobihai, serial, service_name):
"""Initialize monitor sensor."""
self._service_name = service_name
self._state = None
self._name = f"{OBIHAI} {self._service_name}"
self._pyobihai = pyobihai
self._unique_id = f"{serial}-{self._service_name}"
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def available(self):
"""Return if sensor is available."""
if self._state is not None:
return True
return False
@property
def unique_id(self):
"""Return the unique ID."""
return self._unique_id
@property
def device_class(self):
"""Return the device class for uptime sensor."""
if self._service_name == "Last Reboot":
return DEVICE_CLASS_TIMESTAMP
return None
@property
def icon(self):
"""Return an icon."""
if self._service_name == "Call Direction":
if self._state == "No Active Calls":
return "mdi:phone-off"
if self._state == "Inbound Call":
return "mdi:phone-incoming"
return "mdi:phone-outgoing"
if "Caller Info" in self._service_name:
return "mdi:phone-log"
if "Port" in self._service_name:
if self._state == "Ringing":
return "mdi:phone-ring"
if self._state == "Off Hook":
return "mdi:phone-in-talk"
return "mdi:phone-hangup"
return "mdi:phone"
def update(self):
"""Update the sensor."""
services = self._pyobihai.get_state()
if self._service_name in services:
self._state = services.get(self._service_name)
services = self._pyobihai.get_line_state()
if services is not None:
if self._service_name in services:
self._state = services.get(self._service_name)
call_direction = self._pyobihai.get_call_direction()
if self._service_name in call_direction:
self._state = call_direction.get(self._service_name)
|
import json
import re
from urllib.parse import urlparse
from django.conf import settings
from django.db.models import Q
from django.http import (
Http404,
HttpResponseBadRequest,
HttpResponseNotAllowed,
JsonResponse,
)
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
from weblate.logger import LOGGER
from weblate.trans.models import Change, Component
from weblate.trans.tasks import perform_update
from weblate.utils.errors import report_error
from weblate.utils.views import get_component, get_project
BITBUCKET_GIT_REPOS = (
"ssh://git@{server}/{full_name}.git",
"ssh://git@{server}/{full_name}",
"git@{server}:{full_name}.git",
"git@{server}:{full_name}",
"https://{server}/{full_name}.git",
"https://{server}/{full_name}",
)
BITBUCKET_HG_REPOS = (
"https://{server}/{full_name}",
"ssh://hg@{server}/{full_name}",
"hg::ssh://hg@{server}/{full_name}",
"hg::https://{server}/{full_name}",
)
GITHUB_REPOS = (
"git://github.com/%(owner)s/%(slug)s.git",
"git://github.com/%(owner)s/%(slug)s",
"https://github.com/%(owner)s/%(slug)s.git",
"https://github.com/%(owner)s/%(slug)s",
"[email protected]:%(owner)s/%(slug)s.git",
"[email protected]:%(owner)s/%(slug)s",
)
PAGURE_REPOS = (
"https://{server}/{project}",
"https://{server}/{project}.git",
"ssh://git@{server}/{project}",
"ssh://git@{server}/{project}.git",
)
AZURE_REPOS = (
"https://dev.azure.com/{organization}/{project}/_git/{repository}",
"https://dev.azure.com/{organization}/{projectId}/_git/{repositoryId}",
"[email protected]:v3/{organization}/{project}/{repository}",
"https://{organization}.visualstudio.com/{project}/_git/{repository}",
"{organization}@vs-ssh.visualstudio.com:v3/{organization}/{project}/{repository}",
)
HOOK_HANDLERS = {}
def hook_response(
response: str = "Update triggered",
message: str = "success",
status: int = 200,
**kwargs,
):
"""Generic okay hook response."""
data = {"status": message, "message": response}
data.update(kwargs)
return JsonResponse(data=data, status=status)
def register_hook(handler):
"""Register hook handler."""
name = handler.__name__.split("_")[0]
HOOK_HANDLERS[name] = handler
return handler
@csrf_exempt
def update_component(request, project, component):
"""API hook for updating git repos."""
if not settings.ENABLE_HOOKS:
return HttpResponseNotAllowed([])
obj = get_component(request, project, component, True)
if not obj.project.enable_hooks:
return HttpResponseNotAllowed([])
perform_update.delay("Component", obj.pk)
return hook_response()
@csrf_exempt
def update_project(request, project):
"""API hook for updating git repos."""
if not settings.ENABLE_HOOKS:
return HttpResponseNotAllowed([])
obj = get_project(request, project, True)
if not obj.enable_hooks:
return HttpResponseNotAllowed([])
perform_update.delay("Project", obj.pk)
return hook_response()
def parse_hook_payload(request):
"""Parse hook payload.
We handle both application/x-www-form-urlencoded and application/json.
"""
if "application/json" in request.META["CONTENT_TYPE"].lower():
return json.loads(request.body.decode())
return json.loads(request.POST["payload"])
@require_POST
@csrf_exempt
def vcs_service_hook(request, service):
"""Shared code between VCS service hooks.
Currently used for bitbucket_hook, github_hook and gitlab_hook, but should be usable
for other VCS services (Google Code, custom coded sites, etc.) too.
"""
# We support only post methods
if not settings.ENABLE_HOOKS:
return HttpResponseNotAllowed(())
# Get service helper
try:
hook_helper = HOOK_HANDLERS[service]
except KeyError:
raise Http404(f"Hook {service} not supported")
# Check if we got payload
try:
data = parse_hook_payload(request)
except (ValueError, KeyError):
return HttpResponseBadRequest("Could not parse JSON payload!")
if not data:
return HttpResponseBadRequest("Invalid data in json payload!")
# Send the request data to the service handler.
try:
service_data = hook_helper(data, request)
except Exception:
LOGGER.error("failed to parse service %s data", service)
report_error()
return HttpResponseBadRequest("Invalid data in json payload!")
# This happens on ping request upon installation
if service_data is None:
return hook_response("Hook working", status=201)
# Log data
service_long_name = service_data["service_long_name"]
repos = service_data["repos"]
repo_url = service_data["repo_url"]
branch = service_data["branch"]
full_name = service_data["full_name"]
# Generate filter
spfilter = (
Q(repo__in=repos)
| Q(repo__iendswith=full_name)
| Q(repo__iendswith=f"{full_name}.git")
)
for repo in repos:
# We need to match also URLs which include username and password
if repo.startswith("http://"):
spfilter |= Q(repo__startswith="http://") & Q(
repo__endswith="@{}".format(repo[7:])
)
elif repo.startswith("https://"):
spfilter |= Q(repo__startswith="https://") & Q(
repo__endswith="@{}".format(repo[8:])
)
# Include URLs with trailing slash
spfilter |= Q(repo=repo + "/")
all_components = repo_components = Component.objects.filter(spfilter)
if branch is not None:
all_components = repo_components.filter(branch=branch)
enabled_components = all_components.filter(project__enable_hooks=True)
LOGGER.info(
"received %s notification on repository %s, URL %s, branch %s, "
"%d matching components, %d to process, %d linked",
service_long_name,
full_name,
repo_url,
branch,
all_components.count(),
enabled_components.count(),
Component.objects.filter(linked_component__in=enabled_components).count(),
)
# Trigger updates
updates = 0
for obj in enabled_components:
updates += 1
LOGGER.info("%s notification will update %s", service_long_name, obj)
Change.objects.create(
component=obj, action=Change.ACTION_HOOK, details=service_data
)
perform_update.delay("Component", obj.pk)
match_status = {
"repository_matches": repo_components.count(),
"branch_matches": all_components.count(),
"enabled_hook_matches": len(enabled_components),
}
if updates == 0:
return hook_response(
"No matching repositories found!",
"failure",
status=202,
match_status=match_status,
)
updated_components = [obj.full_slug for obj in enabled_components]
return hook_response(
"Update triggered: {}".format(", ".join(updated_components)),
match_status=match_status,
updated_components=updated_components,
)
def bitbucket_extract_changes(data):
if "changes" in data:
return data["changes"]
if "push" in data:
return data["push"]["changes"]
if "commits" in data:
return data["commits"]
return []
def bitbucket_extract_branch(data):
changes = bitbucket_extract_changes(data)
if changes:
last = changes[-1]
if "branch" in last:
return last["branch"]
if last.get("new"):
return changes[-1]["new"]["name"]
if last.get("old"):
return changes[-1]["old"]["name"]
if "ref" in last:
return last["ref"]["displayId"]
# Pullrequest merged action
if "pullrequest" in data:
return data["pullrequest"]["destination"]["branch"]["name"]
return None
def bitbucket_extract_full_name(repository):
if "full_name" in repository:
return repository["full_name"]
if "fullName" in repository:
return repository["fullName"]
if "owner" in repository and "slug" in repository:
return "{}/{}".format(repository["owner"], repository["slug"])
if "project" in repository and "slug" in repository:
return "{}/{}".format(repository["project"]["key"], repository["slug"])
raise ValueError("Could not determine repository full name")
def bitbucket_extract_repo_url(data, repository):
if "links" in repository:
if "html" in repository["links"]:
return repository["links"]["html"]["href"]
return repository["links"]["self"][0]["href"]
if "canon_url" in data:
return "{}{}".format(data["canon_url"], repository["absolute_url"])
raise ValueError("Could not determine repository URL")
@register_hook
def bitbucket_hook_helper(data, request):
"""API to handle service hooks from Bitbucket."""
# Bitbucket ping event
if request and request.META.get("HTTP_X_EVENT_KEY") not in (
"repo:push",
"repo:refs_changed",
"pullrequest:fulfilled",
"pr:merged",
):
return None
if "pullRequest" in data:
# The pr:merged event
repository = data["pullRequest"]["fromRef"]["repository"]
else:
repository = data["repository"]
full_name = bitbucket_extract_full_name(repository)
repo_url = bitbucket_extract_repo_url(data, repository)
# Extract repository links
if "links" in repository and "clone" in repository["links"]:
repos = [val["href"] for val in repository["links"]["clone"]]
else:
repo_servers = {"bitbucket.org", urlparse(repo_url).hostname}
repos = []
if "scm" not in data["repository"]:
templates = BITBUCKET_GIT_REPOS + BITBUCKET_HG_REPOS
elif data["repository"]["scm"] == "hg":
templates = BITBUCKET_HG_REPOS
else:
templates = BITBUCKET_GIT_REPOS
# Construct possible repository URLs
for repo in templates:
repos.extend(
repo.format(full_name=full_name, server=server)
for server in repo_servers
)
if not repos:
LOGGER.error("unsupported repository: %s", repr(data["repository"]))
raise ValueError("unsupported repository")
return {
"service_long_name": "Bitbucket",
"repo_url": repo_url,
"repos": repos,
"branch": bitbucket_extract_branch(data),
"full_name": f"{full_name}.git",
}
@register_hook
def github_hook_helper(data, request):
"""API to handle commit hooks from GitHub."""
# Ignore non push events
if request and request.META.get("HTTP_X_GITHUB_EVENT") != "push":
return None
# Parse owner, branch and repository name
o_data = data["repository"]["owner"]
owner = o_data["login"] if "login" in o_data else o_data["name"]
slug = data["repository"]["name"]
branch = re.sub(r"^refs/heads/", "", data["ref"])
params = {"owner": owner, "slug": slug}
if "clone_url" not in data["repository"]:
# Construct possible repository URLs
repos = [repo % params for repo in GITHUB_REPOS]
else:
repos = []
keys = ["clone_url", "git_url", "ssh_url", "svn_url", "html_url", "url"]
for key in keys:
if key in data["repository"]:
repos.append(data["repository"][key])
return {
"service_long_name": "GitHub",
"repo_url": data["repository"]["url"],
"repos": repos,
"branch": branch,
"full_name": f"{owner}/{slug}.git",
}
@register_hook
def gitea_hook_helper(data, request):
return {
"service_long_name": "Gitea",
"repo_url": data["repository"]["html_url"],
"repos": [
data["repository"]["clone_url"],
data["repository"]["ssh_url"],
data["repository"]["html_url"],
],
"branch": re.sub(r"^refs/heads/", "", data["ref"]),
"full_name": "{}.git".format(data["repository"]["full_name"]),
}
@register_hook
def gitee_hook_helper(data, request):
return {
"service_long_name": "Gitee",
"repo_url": data["repository"]["html_url"],
"repos": [
data["repository"]["git_http_url"],
data["repository"]["git_ssh_url"],
data["repository"]["git_url"],
data["repository"]["ssh_url"],
data["repository"]["html_url"],
],
"branch": re.sub(r"^refs/heads/", "", data["ref"]),
"full_name": "{}.git".format(data["repository"]["path_with_namespace"]),
}
@register_hook
def gitlab_hook_helper(data, request):
"""API to handle commit hooks from GitLab."""
# Ignore non known events
if "ref" not in data:
return None
ssh_url = data["repository"]["url"]
http_url = ".".join((data["repository"]["homepage"], "git"))
branch = re.sub(r"^refs/heads/", "", data["ref"])
# Construct possible repository URLs
repos = [
ssh_url,
http_url,
data["repository"]["git_http_url"],
data["repository"]["git_ssh_url"],
data["repository"]["homepage"],
]
return {
"service_long_name": "GitLab",
"repo_url": data["repository"]["homepage"],
"repos": repos,
"branch": branch,
"full_name": ssh_url.split(":", 1)[1],
}
@register_hook
def pagure_hook_helper(data, request):
"""API to handle commit hooks from Pagure."""
# Ignore non known events
if "msg" not in data or data.get("topic") != "git.receive":
return None
server = urlparse(data["msg"]["pagure_instance"]).hostname
project = data["msg"]["project_fullname"]
repos = [repo.format(server=server, project=project) for repo in PAGURE_REPOS]
return {
"service_long_name": "Pagure",
"repo_url": repos[0],
"repos": repos,
"branch": data["msg"]["branch"],
"full_name": project,
}
@register_hook
def azure_hook_helper(data, request):
if data.get("eventType") != "git.push":
return None
http_url = data["resource"]["repository"]["remoteUrl"]
branch = re.sub(r"^refs/heads/", "", data["resource"]["refUpdates"][0]["name"])
project = data["resource"]["repository"]["project"]["name"]
projectid = data["resource"]["repository"]["project"]["id"]
repository = data["resource"]["repository"]["name"]
repositoryid = data["resource"]["repository"]["id"]
match = re.match(
r"^https?:\/\/dev\.azure\.com\/"
r"(?P<organization>[a-zA-Z0-9]+[a-zA-Z0-9-]*[a-zA-Z0-9]*)",
http_url,
)
# Fallback to support old url structure {organization}.visualstudio.com
if match is None:
match = re.match(
r"^https?:\/\/"
r"(?P<organization>[a-zA-Z0-9]+[a-zA-Z0-9-]*[a-zA-Z0-9]*)"
r"\.visualstudio\.com",
http_url,
)
organization = None
if match is not None:
organization = match.group("organization")
if organization is not None:
repos = [
repo.format(
organization=organization,
project=project,
projectId=projectid,
repository=repository,
repositoryId=repositoryid,
)
for repo in AZURE_REPOS
]
else:
repos = [http_url]
return {
"service_long_name": "Azure",
"repo_url": http_url,
"repos": repos,
"branch": branch,
"full_name": repository,
}
|
import contextlib
import gzip
import io
import six
import unittest
import uuid
import warnings
import boto3
from parameterizedtestcase import ParameterizedTestCase as PTestCase
import smart_open
import smart_open.concurrency
import smart_open.constants
from initialize_s3_bucket import CONTENTS
BUCKET_NAME = 'smartopen-integration-tests'
def setUpModule():
assert boto3.resource('s3').Bucket(BUCKET_NAME).creation_date, 'see initialize_s3_bucket.py'
def ignore_resource_warnings():
#
# https://github.com/boto/boto3/issues/454
# Py2 doesn't have ResourceWarning, so do nothing.
#
if six.PY2:
return
warnings.filterwarnings("ignore", category=ResourceWarning, message="unclosed.*<ssl.SSLSocket.*>") # noqa
class ReaderTest(unittest.TestCase):
def setUp(self):
ignore_resource_warnings()
def test_iter(self):
"""Are S3 files iterated over correctly?"""
key_name = 'hello.txt'
expected = CONTENTS[key_name].split(b'\n')
fin = smart_open.s3.Reader(BUCKET_NAME, key_name)
actual = [line.rstrip(b'\n') for line in fin]
self.assertEqual(expected, actual)
def test_iter_context_manager(self):
# same thing but using a context manager
key_name = 'hello.txt'
expected = CONTENTS[key_name].split(b'\n')
with smart_open.s3.Reader(BUCKET_NAME, key_name) as fin:
actual = [line.rstrip(b'\n') for line in fin]
self.assertEqual(expected, actual)
def test_read(self):
"""Are S3 files read correctly?"""
key_name = 'hello.txt'
expected = CONTENTS[key_name]
fin = smart_open.s3.Reader(BUCKET_NAME, key_name)
self.assertEqual(expected[:6], fin.read(6))
self.assertEqual(expected[6:14], fin.read(8)) # ř is 2 bytes
self.assertEqual(expected[14:], fin.read()) # read the rest
def test_seek_beginning(self):
"""Does seeking to the beginning of S3 files work correctly?"""
key_name = 'hello.txt'
expected = CONTENTS[key_name]
fin = smart_open.s3.Reader(BUCKET_NAME, key_name)
self.assertEqual(expected[:6], fin.read(6))
self.assertEqual(expected[6:14], fin.read(8)) # ř is 2 bytes
fin.seek(0)
self.assertEqual(expected, fin.read()) # no size given => read whole file
fin.seek(0)
self.assertEqual(expected, fin.read(-1)) # same thing
def test_seek_start(self):
"""Does seeking from the start of S3 files work correctly?"""
fin = smart_open.s3.Reader(BUCKET_NAME, 'hello.txt')
seek = fin.seek(6)
self.assertEqual(seek, 6)
self.assertEqual(fin.tell(), 6)
self.assertEqual(fin.read(6), u'wořld'.encode('utf-8'))
def test_seek_current(self):
"""Does seeking from the middle of S3 files work correctly?"""
fin = smart_open.s3.Reader(BUCKET_NAME, 'hello.txt')
self.assertEqual(fin.read(5), b'hello')
seek = fin.seek(1, whence=smart_open.constants.WHENCE_CURRENT)
self.assertEqual(seek, 6)
self.assertEqual(fin.read(6), u'wořld'.encode('utf-8'))
def test_seek_end(self):
"""Does seeking from the end of S3 files work correctly?"""
key_name = 'hello.txt'
expected = CONTENTS[key_name]
fin = smart_open.s3.Reader(BUCKET_NAME, key_name)
seek = fin.seek(-4, whence=smart_open.constants.WHENCE_END)
self.assertEqual(seek, len(expected) - 4)
self.assertEqual(fin.read(), b'you?')
def test_detect_eof(self):
key_name = 'hello.txt'
expected = CONTENTS[key_name]
fin = smart_open.s3.Reader(BUCKET_NAME, key_name)
fin.read()
eof = fin.tell()
self.assertEqual(eof, len(expected))
fin.seek(0, whence=smart_open.constants.WHENCE_END)
self.assertEqual(eof, fin.tell())
def test_read_gzip(self):
key_name = 'hello.txt.gz'
with gzip.GzipFile(fileobj=io.BytesIO(CONTENTS[key_name])) as fin:
expected = fin.read()
with smart_open.s3.Reader(BUCKET_NAME, key_name) as fin:
with gzip.GzipFile(fileobj=fin) as zipfile:
actual = zipfile.read()
self.assertEqual(expected, actual)
def test_readline(self):
key_name = 'multiline.txt'
expected = CONTENTS[key_name]
with smart_open.s3.Reader(BUCKET_NAME, key_name) as fin:
fin.readline()
self.assertEqual(fin.tell(), expected.index(b'\n')+1)
fin.seek(0)
actual = list(fin)
self.assertEqual(fin.tell(), len(expected))
expected = [b'englishman\n', b'in\n', b'new\n', b'york\n']
self.assertEqual(expected, actual)
def test_readline_tiny_buffer(self):
key_name = 'multiline.txt'
expected = CONTENTS[key_name]
with smart_open.s3.Reader(BUCKET_NAME, key_name, buffer_size=8) as fin:
actual = list(fin)
expected = [b'englishman\n', b'in\n', b'new\n', b'york\n']
self.assertEqual(expected, actual)
def test_read0_does_not_return_data(self):
with smart_open.s3.Reader(BUCKET_NAME, 'hello.txt') as fin:
data = fin.read(0)
self.assertEqual(data, b'')
def test_to_boto3(self):
key_name = 'multiline.txt'
expected = CONTENTS[key_name]
with smart_open.s3.Reader(BUCKET_NAME, key_name) as fin:
returned_obj = fin.to_boto3()
boto3_body = returned_obj.get()['Body'].read()
self.assertEqual(expected, boto3_body)
def read_key(key):
return boto3.resource('s3').Object(BUCKET_NAME, key).get()['Body'].read()
class WriterTest(unittest.TestCase):
def setUp(self):
#
# Write to a unique key each time to avoid cross-talk between
# simultaneous test runs.
#
self.key = 'writer-test/' + uuid.uuid4().hex
def tearDown(self):
boto3.resource('s3').Object(BUCKET_NAME, self.key).delete()
def test_write(self):
"""Does writing into s3 work correctly?"""
test_string = u"žluťoučký koníček".encode('utf8')
with smart_open.s3.MultipartWriter(BUCKET_NAME, self.key) as fout:
fout.write(test_string)
data = read_key(self.key)
self.assertEqual(data, test_string)
def test_multipart(self):
"""Does s3 multipart chunking work correctly?"""
with smart_open.s3.MultipartWriter(BUCKET_NAME, self.key, min_part_size=10) as fout:
fout.write(b"test")
self.assertEqual(fout._buf.tell(), 4)
fout.write(b"test\n")
self.assertEqual(fout._buf.tell(), 9)
self.assertEqual(fout._total_parts, 0)
fout.write(b"test")
self.assertEqual(fout._buf.tell(), 0)
self.assertEqual(fout._total_parts, 1)
data = read_key(self.key)
self.assertEqual(data, b"testtest\ntest")
def test_empty_key(self):
"""Does writing no data cause key with an empty value to be created?"""
smart_open_write = smart_open.s3.MultipartWriter(BUCKET_NAME, self.key)
with smart_open_write as fout: # noqa
pass
# read back the same key and check its content
data = read_key(self.key)
self.assertEqual(data, b'')
def test_buffered_writer_wrapper_works(self):
"""
Ensure that we can wrap a smart_open s3 stream in a BufferedWriter, which
passes a memoryview object to the underlying stream in python >= 2.7
"""
expected = u'не думай о секундах свысока'
with smart_open.s3.MultipartWriter(BUCKET_NAME, self.key) as fout:
with io.BufferedWriter(fout) as sub_out:
sub_out.write(expected.encode('utf-8'))
text = read_key(self.key).decode('utf-8')
self.assertEqual(expected, text)
def test_double_close(self):
text = u'там за туманами, вечными, пьяными'.encode('utf-8')
fout = smart_open.s3.open(BUCKET_NAME, self.key, 'wb')
fout.write(text)
fout.close()
fout.close()
result = read_key(self.key)
self.assertEqual(result, text)
def test_flush_close(self):
text = u'там за туманами, вечными, пьяными'.encode('utf-8')
fout = smart_open.s3.open(BUCKET_NAME, self.key, 'wb')
fout.write(text)
fout.flush()
fout.close()
result = read_key(self.key)
self.assertEqual(result, text)
@contextlib.contextmanager
def force(multiprocessing=False, concurrent_futures=False):
assert not (multiprocessing and concurrent_futures)
old_multiprocessing = smart_open.concurrency._MULTIPROCESSING
old_concurrent_futures = smart_open.concurrency._CONCURRENT_FUTURES
smart_open.concurrency._MULTIPROCESSING = multiprocessing
smart_open.concurrency._CONCURRENT_FUTURES = concurrent_futures
yield
smart_open.concurrency._MULTIPROCESSING = old_multiprocessing
smart_open.concurrency._CONCURRENT_FUTURES = old_concurrent_futures
class IterBucketTest(PTestCase):
def setUp(self):
self.expected = [
(key, value)
for (key, value) in CONTENTS.items()
if key.startswith('iter_bucket/')
]
self.expected.sort()
def test_singleprocess(self):
with force():
actual = list(smart_open.s3.iter_bucket(BUCKET_NAME, prefix='iter_bucket'))
self.assertEqual(len(self.expected), len(actual))
self.assertEqual(self.expected, sorted(actual))
@unittest.skipIf(not smart_open.concurrency._MULTIPROCESSING, 'multiprocessing unavailable')
def test_multiprocess(self):
with force(multiprocessing=True):
actual = list(smart_open.s3.iter_bucket(BUCKET_NAME, prefix='iter_bucket'))
self.assertEqual(len(self.expected), len(actual))
self.assertEqual(self.expected, sorted(actual))
@unittest.skipIf(not smart_open.concurrency._CONCURRENT_FUTURES, 'concurrent.futures unavailable')
def test_concurrent_futures(self):
with force(concurrent_futures=True):
actual = list(smart_open.s3.iter_bucket(BUCKET_NAME, prefix='iter_bucket'))
self.assertEqual(len(self.expected), len(actual))
self.assertEqual(self.expected, sorted(actual))
def test_accept_key(self):
expected = [(key, value) for (key, value) in self.expected if '4' in key]
actual = list(
smart_open.s3.iter_bucket(
BUCKET_NAME,
prefix='iter_bucket',
accept_key=lambda key: '4' in key
)
)
self.assertEqual(len(expected), len(actual))
self.assertEqual(expected, sorted(actual))
@PTestCase.parameterize(('workers',), [(x,) for x in (1, 4, 8, 16, 64)])
def test_workers(self, workers):
actual = list(smart_open.s3.iter_bucket(BUCKET_NAME, prefix='iter_bucket', workers=workers))
self.assertEqual(len(self.expected), len(actual))
self.assertEqual(self.expected, sorted(actual))
class DownloadKeyTest(unittest.TestCase):
def test(self):
key_name = 'hello.txt'
expected = (key_name, CONTENTS[key_name])
actual = smart_open.s3._download_key(key_name, bucket_name=BUCKET_NAME)
self.assertEqual(expected, actual)
|
import logging
from pymonoprice import get_async_monoprice
from serial import SerialException
import voluptuous as vol
from homeassistant import config_entries, core, exceptions
from homeassistant.const import CONF_PORT
from .const import (
CONF_SOURCE_1,
CONF_SOURCE_2,
CONF_SOURCE_3,
CONF_SOURCE_4,
CONF_SOURCE_5,
CONF_SOURCE_6,
CONF_SOURCES,
)
from .const import DOMAIN # pylint:disable=unused-import
_LOGGER = logging.getLogger(__name__)
SOURCES = [
CONF_SOURCE_1,
CONF_SOURCE_2,
CONF_SOURCE_3,
CONF_SOURCE_4,
CONF_SOURCE_5,
CONF_SOURCE_6,
]
OPTIONS_FOR_DATA = {vol.Optional(source): str for source in SOURCES}
DATA_SCHEMA = vol.Schema({vol.Required(CONF_PORT): str, **OPTIONS_FOR_DATA})
@core.callback
def _sources_from_config(data):
sources_config = {
str(idx + 1): data.get(source) for idx, source in enumerate(SOURCES)
}
return {
index: name.strip()
for index, name in sources_config.items()
if (name is not None and name.strip() != "")
}
async def validate_input(hass: core.HomeAssistant, data):
"""Validate the user input allows us to connect.
Data has the keys from DATA_SCHEMA with values provided by the user.
"""
try:
await get_async_monoprice(data[CONF_PORT], hass.loop)
except SerialException as err:
_LOGGER.error("Error connecting to Monoprice controller")
raise CannotConnect from err
sources = _sources_from_config(data)
# Return info that you want to store in the config entry.
return {CONF_PORT: data[CONF_PORT], CONF_SOURCES: sources}
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Monoprice 6-Zone Amplifier."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
async def async_step_user(self, user_input=None):
"""Handle the initial step."""
errors = {}
if user_input is not None:
try:
info = await validate_input(self.hass, user_input)
return self.async_create_entry(title=user_input[CONF_PORT], data=info)
except CannotConnect:
errors["base"] = "cannot_connect"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
return self.async_show_form(
step_id="user", data_schema=DATA_SCHEMA, errors=errors
)
@staticmethod
@core.callback
def async_get_options_flow(config_entry):
"""Define the config flow to handle options."""
return MonopriceOptionsFlowHandler(config_entry)
@core.callback
def _key_for_source(index, source, previous_sources):
if str(index) in previous_sources:
key = vol.Optional(
source, description={"suggested_value": previous_sources[str(index)]}
)
else:
key = vol.Optional(source)
return key
class MonopriceOptionsFlowHandler(config_entries.OptionsFlow):
"""Handle a Monoprice options flow."""
def __init__(self, config_entry):
"""Initialize."""
self.config_entry = config_entry
@core.callback
def _previous_sources(self):
if CONF_SOURCES in self.config_entry.options:
previous = self.config_entry.options[CONF_SOURCES]
else:
previous = self.config_entry.data[CONF_SOURCES]
return previous
async def async_step_init(self, user_input=None):
"""Manage the options."""
if user_input is not None:
return self.async_create_entry(
title="", data={CONF_SOURCES: _sources_from_config(user_input)}
)
previous_sources = self._previous_sources()
options = {
_key_for_source(idx + 1, source, previous_sources): str
for idx, source in enumerate(SOURCES)
}
return self.async_show_form(
step_id="init",
data_schema=vol.Schema(options),
)
class CannotConnect(exceptions.HomeAssistantError):
"""Error to indicate we cannot connect."""
|
import diamond.collector
import os
import re
class VMStatCollector(diamond.collector.Collector):
PROC = '/proc/vmstat'
MAX_VALUES = {
'pgfault': diamond.collector.MAX_COUNTER,
'pgmajfault': diamond.collector.MAX_COUNTER,
'pgpgin': diamond.collector.MAX_COUNTER,
'pgpgout': diamond.collector.MAX_COUNTER,
'pswpin': diamond.collector.MAX_COUNTER,
'pswpout': diamond.collector.MAX_COUNTER,
}
def get_default_config_help(self):
config_help = super(VMStatCollector, self).get_default_config_help()
config_help.update({
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(VMStatCollector, self).get_default_config()
config.update({
'path': 'vmstat'
})
return config
def collect(self):
if not os.access(self.PROC, os.R_OK):
return None
# open file
file = open(self.PROC)
exp = '^(pgfault|pgmajfault|pgpgin|pgpgout|pswpin|pswpout)\s(\d+)'
reg = re.compile(exp)
# Build regex
for line in file:
match = reg.match(line)
if match:
name = match.group(1)
value = match.group(2)
max_value = self.MAX_VALUES[name]
derived = self.derivative(name, int(value), max_value)
self.publish(name, derived, raw_value=int(value), precision=2)
# Close file
file.close()
|
import re
__all__ = [
"URL_RE",
"INVITE_URL_RE",
"MASS_MENTION_RE",
"filter_urls",
"filter_invites",
"filter_mass_mentions",
"filter_various_mentions",
"normalize_smartquotes",
"escape_spoilers",
"escape_spoilers_and_mass_mentions",
]
# regexes
URL_RE = re.compile(r"(https?|s?ftp)://(\S+)", re.I)
INVITE_URL_RE = re.compile(r"(discord\.(?:gg|io|me|li)|discord(?:app)?\.com\/invite)\/(\S+)", re.I)
MASS_MENTION_RE = re.compile(r"(@)(?=everyone|here)") # This only matches the @ for sanitizing
OTHER_MENTION_RE = re.compile(r"(<)(@[!&]?|#)(\d+>)")
SMART_QUOTE_REPLACEMENT_DICT = {
"\u2018": "'", # Left single quote
"\u2019": "'", # Right single quote
"\u201C": '"', # Left double quote
"\u201D": '"', # Right double quote
}
SMART_QUOTE_REPLACE_RE = re.compile("|".join(SMART_QUOTE_REPLACEMENT_DICT.keys()))
SPOILER_CONTENT_RE = re.compile(
r"(?s)(?<!\\)(?P<OPEN>\|{2})(?P<SPOILERED>.*?)(?<!\\)(?P<CLOSE>\|{2})"
)
# convenience wrappers
def filter_urls(to_filter: str) -> str:
"""Get a string with URLs sanitized.
This will match any URLs starting with these protocols:
- ``http://``
- ``https://``
- ``ftp://``
- ``sftp://``
Parameters
----------
to_filter : str
The string to filter.
Returns
-------
str
The sanitized string.
"""
return URL_RE.sub("[SANITIZED URL]", to_filter)
def filter_invites(to_filter: str) -> str:
"""Get a string with discord invites sanitized.
Will match any discord.gg, discordapp.com/invite, discord.com/invite, discord.me, or discord.io/discord.li
invite URL.
Parameters
----------
to_filter : str
The string to filter.
Returns
-------
str
The sanitized string.
"""
return INVITE_URL_RE.sub("[SANITIZED INVITE]", to_filter)
def filter_mass_mentions(to_filter: str) -> str:
"""Get a string with mass mentions sanitized.
Will match any *here* and/or *everyone* mentions.
Parameters
----------
to_filter : str
The string to filter.
Returns
-------
str
The sanitized string.
"""
return MASS_MENTION_RE.sub("@\u200b", to_filter)
def filter_various_mentions(to_filter: str) -> str:
"""
Get a string with role, user, and channel mentions sanitized.
This is mainly for use on user display names, not message content,
and should be applied sparingly.
Parameters
----------
to_filter : str
The string to filter.
Returns
-------
str
The sanitized string.
"""
return OTHER_MENTION_RE.sub(r"\1\\\2\3", to_filter)
def normalize_smartquotes(to_normalize: str) -> str:
"""
Get a string with smart quotes replaced with normal ones
Parameters
----------
to_normalize : str
The string to normalize.
Returns
-------
str
The normalized string.
"""
def replacement_for(obj):
return SMART_QUOTE_REPLACEMENT_DICT.get(obj.group(0), "")
return SMART_QUOTE_REPLACE_RE.sub(replacement_for, to_normalize)
def escape_spoilers(content: str) -> str:
"""
Get a string with spoiler syntax escaped.
Parameters
----------
content : str
The string to escape.
Returns
-------
str
The escaped string.
"""
return SPOILER_CONTENT_RE.sub(r"\\\g<OPEN>\g<SPOILERED>\\\g<CLOSE>", content)
def escape_spoilers_and_mass_mentions(content: str) -> str:
"""
Get a string with spoiler syntax and mass mentions escaped
Parameters
----------
content : str
The string to escape.
Returns
-------
str
The escaped string.
"""
return escape_spoilers(filter_mass_mentions(content))
|
from appconf import AppConf
from django.utils.functional import cached_property
from weblate.utils.classloader import ClassLoader
class ExporterLoader(ClassLoader):
def __init__(self):
super().__init__("WEBLATE_EXPORTERS", False)
def list_exporters(self, translation):
return [
{"name": x.name, "verbose": x.verbose}
for x in sorted(self.values(), key=lambda x: x.name)
if x.supports(translation)
]
def list_exporters_filter(self, allowed):
return [
{"name": x.name, "verbose": x.verbose}
for x in sorted(self.values(), key=lambda x: x.name)
if x.name in allowed
]
EXPORTERS = ExporterLoader()
class FileFormatLoader(ClassLoader):
def __init__(self):
super().__init__("WEBLATE_FORMATS", False)
self.errors = {}
@cached_property
def autoload(self):
result = []
for fileformat in self.data.values():
for autoload in fileformat.autoload:
result.append((autoload, fileformat))
return result
def load_data(self):
result = super().load_data()
for fileformat in list(result.values()):
try:
fileformat.get_class()
except (AttributeError, ImportError) as error:
result.pop(fileformat.format_id)
self.errors[fileformat.format_id] = str(error)
return result
FILE_FORMATS = FileFormatLoader()
class FormatsConf(AppConf):
EXPORTERS = (
"weblate.formats.exporters.PoExporter",
"weblate.formats.exporters.PoXliffExporter",
"weblate.formats.exporters.XliffExporter",
"weblate.formats.exporters.TBXExporter",
"weblate.formats.exporters.TMXExporter",
"weblate.formats.exporters.MoExporter",
"weblate.formats.exporters.CSVExporter",
"weblate.formats.exporters.XlsxExporter",
"weblate.formats.exporters.JSONExporter",
"weblate.formats.exporters.AndroidResourceExporter",
"weblate.formats.exporters.StringsExporter",
)
FORMATS = (
"weblate.formats.ttkit.PoFormat",
"weblate.formats.ttkit.PoMonoFormat",
"weblate.formats.ttkit.TSFormat",
"weblate.formats.ttkit.XliffFormat",
"weblate.formats.ttkit.PoXliffFormat",
"weblate.formats.ttkit.StringsFormat",
"weblate.formats.ttkit.StringsUtf8Format",
"weblate.formats.ttkit.PropertiesUtf8Format",
"weblate.formats.ttkit.PropertiesUtf16Format",
"weblate.formats.ttkit.PropertiesFormat",
"weblate.formats.ttkit.JoomlaFormat",
"weblate.formats.ttkit.GWTFormat",
"weblate.formats.ttkit.PhpFormat",
"weblate.formats.ttkit.LaravelPhpFormat",
"weblate.formats.ttkit.RESXFormat",
"weblate.formats.ttkit.AndroidFormat",
"weblate.formats.ttkit.JSONFormat",
"weblate.formats.ttkit.JSONNestedFormat",
"weblate.formats.ttkit.WebExtensionJSONFormat",
"weblate.formats.ttkit.I18NextFormat",
"weblate.formats.ttkit.GoI18JSONFormat",
"weblate.formats.ttkit.ARBFormat",
"weblate.formats.ttkit.CSVFormat",
"weblate.formats.ttkit.CSVSimpleFormat",
"weblate.formats.ttkit.CSVSimpleFormatISO",
"weblate.formats.ttkit.YAMLFormat",
"weblate.formats.ttkit.RubyYAMLFormat",
"weblate.formats.ttkit.SubRipFormat",
"weblate.formats.ttkit.MicroDVDFormat",
"weblate.formats.ttkit.AdvSubStationAlphaFormat",
"weblate.formats.ttkit.SubStationAlphaFormat",
"weblate.formats.ttkit.DTDFormat",
"weblate.formats.ttkit.FlatXMLFormat",
"weblate.formats.ttkit.INIFormat",
"weblate.formats.ttkit.InnoSetupINIFormat",
"weblate.formats.external.XlsxFormat",
"weblate.formats.txt.AppStoreFormat",
"weblate.formats.convert.HTMLFormat",
"weblate.formats.convert.IDMLFormat",
"weblate.formats.convert.OpenDocumentFormat",
"weblate.formats.convert.WindowsRCFormat",
"weblate.formats.ttkit.XWikiPropertiesFormat",
"weblate.formats.ttkit.XWikiPagePropertiesFormat",
"weblate.formats.ttkit.XWikiFullPageFormat",
)
class Meta:
prefix = "WEBLATE"
|
import warnings
from django.test import TestCase
from django.test.utils import override_settings
from zinnia import url_shortener as us_settings
from zinnia.url_shortener import get_url_shortener
from zinnia.url_shortener.backends import default
class URLShortenerTestCase(TestCase):
"""Test cases for zinnia.url_shortener"""
def setUp(self):
self.original_backend = us_settings.URL_SHORTENER_BACKEND
def tearDown(self):
us_settings.URL_SHORTENER_BACKEND = self.original_backend
def test_get_url_shortener(self):
us_settings.URL_SHORTENER_BACKEND = 'mymodule.myclass'
with warnings.catch_warnings(record=True) as w:
self.assertEqual(get_url_shortener(), default.backend)
self.assertTrue(issubclass(w[-1].category, RuntimeWarning))
self.assertEqual(
str(w[-1].message),
'mymodule.myclass backend cannot be imported')
us_settings.URL_SHORTENER_BACKEND = ('zinnia.tests.implementations.'
'custom_url_shortener')
with warnings.catch_warnings(record=True) as w:
self.assertEqual(get_url_shortener(), default.backend)
self.assertTrue(issubclass(w[-1].category, RuntimeWarning))
self.assertEqual(
str(w[-1].message),
'This backend only exists for testing')
us_settings.URL_SHORTENER_BACKEND = 'zinnia.url_shortener'\
'.backends.default'
self.assertEqual(get_url_shortener(), default.backend)
class FakeEntry(object):
"""Fake entry with only 'pk' as attribute"""
def __init__(self, pk):
self.pk = pk
@override_settings(
ROOT_URLCONF='zinnia.tests.implementations.urls.default'
)
class UrlShortenerDefaultBackendTestCase(TestCase):
"""Tests cases for the default url shortener backend"""
def test_backend(self):
original_protocol = default.PROTOCOL
default.PROTOCOL = 'http'
entry = FakeEntry(1)
self.assertEqual(default.backend(entry),
'http://example.com/1/')
default.PROTOCOL = 'https'
entry = FakeEntry(100)
self.assertEqual(default.backend(entry),
'https://example.com/2S/')
default.PROTOCOL = original_protocol
def test_base36(self):
self.assertEqual(default.base36(1), '1')
self.assertEqual(default.base36(100), '2S')
self.assertEqual(default.base36(46656), '1000')
|
from .const import DOMAIN, PREF_PRELOAD_STREAM
# mypy: allow-untyped-defs, no-check-untyped-defs
STORAGE_KEY = DOMAIN
STORAGE_VERSION = 1
_UNDEF = object()
class CameraEntityPreferences:
"""Handle preferences for camera entity."""
def __init__(self, prefs):
"""Initialize prefs."""
self._prefs = prefs
def as_dict(self):
"""Return dictionary version."""
return self._prefs
@property
def preload_stream(self):
"""Return if stream is loaded on hass start."""
return self._prefs.get(PREF_PRELOAD_STREAM, False)
class CameraPreferences:
"""Handle camera preferences."""
def __init__(self, hass):
"""Initialize camera prefs."""
self._hass = hass
self._store = hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY)
self._prefs = None
async def async_initialize(self):
"""Finish initializing the preferences."""
prefs = await self._store.async_load()
if prefs is None:
prefs = {}
self._prefs = prefs
async def async_update(
self, entity_id, *, preload_stream=_UNDEF, stream_options=_UNDEF
):
"""Update camera preferences."""
if not self._prefs.get(entity_id):
self._prefs[entity_id] = {}
for key, value in ((PREF_PRELOAD_STREAM, preload_stream),):
if value is not _UNDEF:
self._prefs[entity_id][key] = value
await self._store.async_save(self._prefs)
def get(self, entity_id):
"""Get preferences for an entity."""
return CameraEntityPreferences(self._prefs.get(entity_id, {}))
|
import logging
import os
import socket
import sys
import psutil
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_RESOURCES,
CONF_TYPE,
DATA_GIBIBYTES,
DATA_MEBIBYTES,
DATA_RATE_MEGABYTES_PER_SECOND,
PERCENTAGE,
STATE_OFF,
STATE_ON,
TEMP_CELSIUS,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import slugify
import homeassistant.util.dt as dt_util
# mypy: allow-untyped-defs, no-check-untyped-defs
_LOGGER = logging.getLogger(__name__)
CONF_ARG = "arg"
if sys.maxsize > 2 ** 32:
CPU_ICON = "mdi:cpu-64-bit"
else:
CPU_ICON = "mdi:cpu-32-bit"
# Schema: [name, unit of measurement, icon, device class, flag if mandatory arg]
SENSOR_TYPES = {
"disk_free": ["Disk free", DATA_GIBIBYTES, "mdi:harddisk", None, False],
"disk_use": ["Disk use", DATA_GIBIBYTES, "mdi:harddisk", None, False],
"disk_use_percent": [
"Disk use (percent)",
PERCENTAGE,
"mdi:harddisk",
None,
False,
],
"ipv4_address": ["IPv4 address", "", "mdi:server-network", None, True],
"ipv6_address": ["IPv6 address", "", "mdi:server-network", None, True],
"last_boot": ["Last boot", "", "mdi:clock", "timestamp", False],
"load_15m": ["Load (15m)", " ", CPU_ICON, None, False],
"load_1m": ["Load (1m)", " ", CPU_ICON, None, False],
"load_5m": ["Load (5m)", " ", CPU_ICON, None, False],
"memory_free": ["Memory free", DATA_MEBIBYTES, "mdi:memory", None, False],
"memory_use": ["Memory use", DATA_MEBIBYTES, "mdi:memory", None, False],
"memory_use_percent": [
"Memory use (percent)",
PERCENTAGE,
"mdi:memory",
None,
False,
],
"network_in": ["Network in", DATA_MEBIBYTES, "mdi:server-network", None, True],
"network_out": ["Network out", DATA_MEBIBYTES, "mdi:server-network", None, True],
"packets_in": ["Packets in", " ", "mdi:server-network", None, True],
"packets_out": ["Packets out", " ", "mdi:server-network", None, True],
"throughput_network_in": [
"Network throughput in",
DATA_RATE_MEGABYTES_PER_SECOND,
"mdi:server-network",
None,
True,
],
"throughput_network_out": [
"Network throughput out",
DATA_RATE_MEGABYTES_PER_SECOND,
"mdi:server-network",
True,
],
"process": ["Process", " ", CPU_ICON, None, True],
"processor_use": ["Processor use (percent)", PERCENTAGE, CPU_ICON, None, False],
"processor_temperature": [
"Processor temperature",
TEMP_CELSIUS,
CPU_ICON,
None,
False,
],
"swap_free": ["Swap free", DATA_MEBIBYTES, "mdi:harddisk", None, False],
"swap_use": ["Swap use", DATA_MEBIBYTES, "mdi:harddisk", None, False],
"swap_use_percent": ["Swap use (percent)", PERCENTAGE, "mdi:harddisk", None, False],
}
def check_required_arg(value):
"""Validate that the required "arg" for the sensor types that need it are set."""
for sensor in value:
sensor_type = sensor[CONF_TYPE]
sensor_arg = sensor.get(CONF_ARG)
if sensor_arg is None and SENSOR_TYPES[sensor_type][4]:
raise vol.RequiredFieldInvalid(
f"Mandatory 'arg' is missing for sensor type '{sensor_type}'."
)
return value
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_RESOURCES, default={CONF_TYPE: "disk_use"}): vol.All(
cv.ensure_list,
[
vol.Schema(
{
vol.Required(CONF_TYPE): vol.In(SENSOR_TYPES),
vol.Optional(CONF_ARG): cv.string,
}
)
],
check_required_arg,
)
}
)
IO_COUNTER = {
"network_out": 0,
"network_in": 1,
"packets_out": 2,
"packets_in": 3,
"throughput_network_out": 0,
"throughput_network_in": 1,
}
IF_ADDRS_FAMILY = {"ipv4_address": socket.AF_INET, "ipv6_address": socket.AF_INET6}
# There might be additional keys to be added for different
# platforms / hardware combinations.
# Taken from last version of "glances" integration before they moved to
# a generic temperature sensor logic.
# https://github.com/home-assistant/core/blob/5e15675593ba94a2c11f9f929cdad317e27ce190/homeassistant/components/glances/sensor.py#L199
CPU_SENSOR_PREFIXES = [
"amdgpu 1",
"aml_thermal",
"Core 0",
"Core 1",
"CPU Temperature",
"CPU",
"cpu-thermal 1",
"cpu_thermal 1",
"exynos-therm 1",
"Package id 0",
"Physical id 0",
"radeon 1",
"soc-thermal 1",
"soc_thermal 1",
]
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the system monitor sensors."""
dev = []
for resource in config[CONF_RESOURCES]:
# Initialize the sensor argument if none was provided.
# For disk monitoring default to "/" (root) to prevent runtime errors, if argument was not specified.
if CONF_ARG not in resource:
if resource[CONF_TYPE].startswith("disk_"):
resource[CONF_ARG] = "/"
else:
resource[CONF_ARG] = ""
# Verify if we can retrieve CPU / processor temperatures.
# If not, do not create the entity and add a warning to the log
if resource[CONF_TYPE] == "processor_temperature":
if SystemMonitorSensor.read_cpu_temperature() is None:
_LOGGER.warning("Cannot read CPU / processor temperature information.")
continue
dev.append(SystemMonitorSensor(resource[CONF_TYPE], resource[CONF_ARG]))
add_entities(dev, True)
class SystemMonitorSensor(Entity):
"""Implementation of a system monitor sensor."""
def __init__(self, sensor_type, argument=""):
"""Initialize the sensor."""
self._name = "{} {}".format(SENSOR_TYPES[sensor_type][0], argument)
self._unique_id = slugify(f"{sensor_type}_{argument}")
self.argument = argument
self.type = sensor_type
self._state = None
self._unit_of_measurement = SENSOR_TYPES[sensor_type][1]
self._available = True
if sensor_type in ["throughput_network_out", "throughput_network_in"]:
self._last_value = None
self._last_update_time = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name.rstrip()
@property
def unique_id(self):
"""Return the unique ID."""
return self._unique_id
@property
def device_class(self):
"""Return the class of this sensor."""
return SENSOR_TYPES[self.type][3]
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return SENSOR_TYPES[self.type][2]
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
@property
def available(self):
"""Return True if entity is available."""
return self._available
def update(self):
"""Get the latest system information."""
if self.type == "disk_use_percent":
self._state = psutil.disk_usage(self.argument).percent
elif self.type == "disk_use":
self._state = round(psutil.disk_usage(self.argument).used / 1024 ** 3, 1)
elif self.type == "disk_free":
self._state = round(psutil.disk_usage(self.argument).free / 1024 ** 3, 1)
elif self.type == "memory_use_percent":
self._state = psutil.virtual_memory().percent
elif self.type == "memory_use":
virtual_memory = psutil.virtual_memory()
self._state = round(
(virtual_memory.total - virtual_memory.available) / 1024 ** 2, 1
)
elif self.type == "memory_free":
self._state = round(psutil.virtual_memory().available / 1024 ** 2, 1)
elif self.type == "swap_use_percent":
self._state = psutil.swap_memory().percent
elif self.type == "swap_use":
self._state = round(psutil.swap_memory().used / 1024 ** 2, 1)
elif self.type == "swap_free":
self._state = round(psutil.swap_memory().free / 1024 ** 2, 1)
elif self.type == "processor_use":
self._state = round(psutil.cpu_percent(interval=None))
elif self.type == "processor_temperature":
self._state = self.read_cpu_temperature()
elif self.type == "process":
for proc in psutil.process_iter():
try:
if self.argument == proc.name():
self._state = STATE_ON
return
except psutil.NoSuchProcess as err:
_LOGGER.warning(
"Failed to load process with id: %s, old name: %s",
err.pid,
err.name,
)
self._state = STATE_OFF
elif self.type == "network_out" or self.type == "network_in":
counters = psutil.net_io_counters(pernic=True)
if self.argument in counters:
counter = counters[self.argument][IO_COUNTER[self.type]]
self._state = round(counter / 1024 ** 2, 1)
else:
self._state = None
elif self.type == "packets_out" or self.type == "packets_in":
counters = psutil.net_io_counters(pernic=True)
if self.argument in counters:
self._state = counters[self.argument][IO_COUNTER[self.type]]
else:
self._state = None
elif (
self.type == "throughput_network_out"
or self.type == "throughput_network_in"
):
counters = psutil.net_io_counters(pernic=True)
if self.argument in counters:
counter = counters[self.argument][IO_COUNTER[self.type]]
now = dt_util.utcnow()
if self._last_value and self._last_value < counter:
self._state = round(
(counter - self._last_value)
/ 1000 ** 2
/ (now - self._last_update_time).seconds,
3,
)
else:
self._state = None
self._last_update_time = now
self._last_value = counter
else:
self._state = None
elif self.type == "ipv4_address" or self.type == "ipv6_address":
addresses = psutil.net_if_addrs()
if self.argument in addresses:
for addr in addresses[self.argument]:
if addr.family == IF_ADDRS_FAMILY[self.type]:
self._state = addr.address
else:
self._state = None
elif self.type == "last_boot":
self._state = dt_util.as_local(
dt_util.utc_from_timestamp(psutil.boot_time())
).isoformat()
elif self.type == "load_1m":
self._state = round(os.getloadavg()[0], 2)
elif self.type == "load_5m":
self._state = round(os.getloadavg()[1], 2)
elif self.type == "load_15m":
self._state = round(os.getloadavg()[2], 2)
@staticmethod
def read_cpu_temperature():
"""Attempt to read CPU / processor temperature."""
temps = psutil.sensors_temperatures()
for name, entries in temps.items():
i = 1
for entry in entries:
# In case the label is empty (e.g. on Raspberry PI 4),
# construct it ourself here based on the sensor key name.
if not entry.label:
_label = f"{name} {i}"
else:
_label = entry.label
if _label in CPU_SENSOR_PREFIXES:
return round(entry.current, 1)
i += 1
|
import asyncio
import base64
import json
import logging
import aiobotocore
from homeassistant.components.notify import (
ATTR_TARGET,
ATTR_TITLE,
ATTR_TITLE_DEFAULT,
BaseNotificationService,
)
from homeassistant.const import CONF_NAME, CONF_PLATFORM
from homeassistant.helpers.json import JSONEncoder
from .const import (
CONF_CONTEXT,
CONF_CREDENTIAL_NAME,
CONF_PROFILE_NAME,
CONF_REGION,
CONF_SERVICE,
DATA_SESSIONS,
)
_LOGGER = logging.getLogger(__name__)
async def get_available_regions(hass, service):
"""Get available regions for a service."""
session = aiobotocore.get_session()
# get_available_regions is not a coroutine since it does not perform
# network I/O. But it still perform file I/O heavily, so put it into
# an executor thread to unblock event loop
return await hass.async_add_executor_job(session.get_available_regions, service)
async def async_get_service(hass, config, discovery_info=None):
"""Get the AWS notification service."""
if discovery_info is None:
_LOGGER.error("Please config aws notify platform in aws component")
return None
session = None
conf = discovery_info
service = conf[CONF_SERVICE]
region_name = conf[CONF_REGION]
available_regions = await get_available_regions(hass, service)
if region_name not in available_regions:
_LOGGER.error(
"Region %s is not available for %s service, must in %s",
region_name,
service,
available_regions,
)
return None
aws_config = conf.copy()
del aws_config[CONF_SERVICE]
del aws_config[CONF_REGION]
if CONF_PLATFORM in aws_config:
del aws_config[CONF_PLATFORM]
if CONF_NAME in aws_config:
del aws_config[CONF_NAME]
if CONF_CONTEXT in aws_config:
del aws_config[CONF_CONTEXT]
if not aws_config:
# no platform config, use the first aws component credential instead
if hass.data[DATA_SESSIONS]:
session = next(iter(hass.data[DATA_SESSIONS].values()))
else:
_LOGGER.error("Missing aws credential for %s", config[CONF_NAME])
return None
if session is None:
credential_name = aws_config.get(CONF_CREDENTIAL_NAME)
if credential_name is not None:
session = hass.data[DATA_SESSIONS].get(credential_name)
if session is None:
_LOGGER.warning("No available aws session for %s", credential_name)
del aws_config[CONF_CREDENTIAL_NAME]
if session is None:
profile = aws_config.get(CONF_PROFILE_NAME)
if profile is not None:
session = aiobotocore.AioSession(profile=profile)
del aws_config[CONF_PROFILE_NAME]
else:
session = aiobotocore.AioSession()
aws_config[CONF_REGION] = region_name
if service == "lambda":
context_str = json.dumps(
{"custom": conf.get(CONF_CONTEXT, {})}, cls=JSONEncoder
)
context_b64 = base64.b64encode(context_str.encode("utf-8"))
context = context_b64.decode("utf-8")
return AWSLambda(session, aws_config, context)
if service == "sns":
return AWSSNS(session, aws_config)
if service == "sqs":
return AWSSQS(session, aws_config)
# should not reach here since service was checked in schema
return None
class AWSNotify(BaseNotificationService):
"""Implement the notification service for the AWS service."""
def __init__(self, session, aws_config):
"""Initialize the service."""
self.session = session
self.aws_config = aws_config
class AWSLambda(AWSNotify):
"""Implement the notification service for the AWS Lambda service."""
service = "lambda"
def __init__(self, session, aws_config, context):
"""Initialize the service."""
super().__init__(session, aws_config)
self.context = context
async def async_send_message(self, message="", **kwargs):
"""Send notification to specified LAMBDA ARN."""
if not kwargs.get(ATTR_TARGET):
_LOGGER.error("At least one target is required")
return
cleaned_kwargs = {k: v for k, v in kwargs.items() if v is not None}
payload = {"message": message}
payload.update(cleaned_kwargs)
json_payload = json.dumps(payload)
async with self.session.create_client(
self.service, **self.aws_config
) as client:
tasks = []
for target in kwargs.get(ATTR_TARGET, []):
tasks.append(
client.invoke(
FunctionName=target,
Payload=json_payload,
ClientContext=self.context,
)
)
if tasks:
await asyncio.gather(*tasks)
class AWSSNS(AWSNotify):
"""Implement the notification service for the AWS SNS service."""
service = "sns"
async def async_send_message(self, message="", **kwargs):
"""Send notification to specified SNS ARN."""
if not kwargs.get(ATTR_TARGET):
_LOGGER.error("At least one target is required")
return
message_attributes = {
k: {"StringValue": json.dumps(v), "DataType": "String"}
for k, v in kwargs.items()
if v is not None
}
subject = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)
async with self.session.create_client(
self.service, **self.aws_config
) as client:
tasks = []
for target in kwargs.get(ATTR_TARGET, []):
tasks.append(
client.publish(
TargetArn=target,
Message=message,
Subject=subject,
MessageAttributes=message_attributes,
)
)
if tasks:
await asyncio.gather(*tasks)
class AWSSQS(AWSNotify):
"""Implement the notification service for the AWS SQS service."""
service = "sqs"
async def async_send_message(self, message="", **kwargs):
"""Send notification to specified SQS ARN."""
if not kwargs.get(ATTR_TARGET):
_LOGGER.error("At least one target is required")
return
cleaned_kwargs = {k: v for k, v in kwargs.items() if v is not None}
message_body = {"message": message}
message_body.update(cleaned_kwargs)
json_body = json.dumps(message_body)
message_attributes = {}
for key, val in cleaned_kwargs.items():
message_attributes[key] = {
"StringValue": json.dumps(val),
"DataType": "String",
}
async with self.session.create_client(
self.service, **self.aws_config
) as client:
tasks = []
for target in kwargs.get(ATTR_TARGET, []):
tasks.append(
client.send_message(
QueueUrl=target,
MessageBody=json_body,
MessageAttributes=message_attributes,
)
)
if tasks:
await asyncio.gather(*tasks)
|
import logging
from pyspcwebgw import SpcWebGateway
from pyspcwebgw.area import Area
from pyspcwebgw.zone import Zone
import voluptuous as vol
from homeassistant.helpers import aiohttp_client, discovery
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_send
_LOGGER = logging.getLogger(__name__)
CONF_WS_URL = "ws_url"
CONF_API_URL = "api_url"
DOMAIN = "spc"
DATA_API = "spc_api"
SIGNAL_UPDATE_ALARM = "spc_update_alarm_{}"
SIGNAL_UPDATE_SENSOR = "spc_update_sensor_{}"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_WS_URL): cv.string,
vol.Required(CONF_API_URL): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Set up the SPC component."""
async def async_upate_callback(spc_object):
if isinstance(spc_object, Area):
async_dispatcher_send(hass, SIGNAL_UPDATE_ALARM.format(spc_object.id))
elif isinstance(spc_object, Zone):
async_dispatcher_send(hass, SIGNAL_UPDATE_SENSOR.format(spc_object.id))
session = aiohttp_client.async_get_clientsession(hass)
spc = SpcWebGateway(
loop=hass.loop,
session=session,
api_url=config[DOMAIN].get(CONF_API_URL),
ws_url=config[DOMAIN].get(CONF_WS_URL),
async_callback=async_upate_callback,
)
hass.data[DATA_API] = spc
if not await spc.async_load_parameters():
_LOGGER.error("Failed to load area/zone information from SPC")
return False
# add sensor devices for each zone (typically motion/fire/door sensors)
hass.async_create_task(
discovery.async_load_platform(hass, "binary_sensor", DOMAIN, {}, config)
)
# create a separate alarm panel for each area
hass.async_create_task(
discovery.async_load_platform(hass, "alarm_control_panel", DOMAIN, {}, config)
)
# start listening for incoming events over websocket
spc.start()
return True
|
import logging
from pycmus import exceptions, remote
import voluptuous as vol
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
MEDIA_TYPE_MUSIC,
MEDIA_TYPE_PLAYLIST,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SEEK,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_SET,
)
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
STATE_OFF,
STATE_PAUSED,
STATE_PLAYING,
)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "cmus"
DEFAULT_PORT = 3000
SUPPORT_CMUS = (
SUPPORT_PAUSE
| SUPPORT_VOLUME_SET
| SUPPORT_TURN_OFF
| SUPPORT_TURN_ON
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_NEXT_TRACK
| SUPPORT_PLAY_MEDIA
| SUPPORT_SEEK
| SUPPORT_PLAY
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Inclusive(CONF_HOST, "remote"): cv.string,
vol.Inclusive(CONF_PASSWORD, "remote"): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discover_info=None):
"""Set up the CMUS platform."""
host = config.get(CONF_HOST)
password = config.get(CONF_PASSWORD)
port = config[CONF_PORT]
name = config[CONF_NAME]
try:
cmus_remote = CmusDevice(host, password, port, name)
except exceptions.InvalidPassword:
_LOGGER.error("The provided password was rejected by cmus")
return False
add_entities([cmus_remote], True)
class CmusDevice(MediaPlayerEntity):
"""Representation of a running cmus."""
# pylint: disable=no-member
def __init__(self, server, password, port, name):
"""Initialize the CMUS device."""
if server:
self.cmus = remote.PyCmus(server=server, password=password, port=port)
auto_name = f"cmus-{server}"
else:
self.cmus = remote.PyCmus()
auto_name = "cmus-local"
self._name = name or auto_name
self.status = {}
def update(self):
"""Get the latest data and update the state."""
status = self.cmus.get_status_dict()
if not status:
_LOGGER.warning("Received no status from cmus")
else:
self.status = status
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the media state."""
if self.status.get("status") == "playing":
return STATE_PLAYING
if self.status.get("status") == "paused":
return STATE_PAUSED
return STATE_OFF
@property
def media_content_id(self):
"""Content ID of current playing media."""
return self.status.get("file")
@property
def content_type(self):
"""Content type of the current playing media."""
return MEDIA_TYPE_MUSIC
@property
def media_duration(self):
"""Duration of current playing media in seconds."""
return self.status.get("duration")
@property
def media_title(self):
"""Title of current playing media."""
return self.status["tag"].get("title")
@property
def media_artist(self):
"""Artist of current playing media, music track only."""
return self.status["tag"].get("artist")
@property
def media_track(self):
"""Track number of current playing media, music track only."""
return self.status["tag"].get("tracknumber")
@property
def media_album_name(self):
"""Album name of current playing media, music track only."""
return self.status["tag"].get("album")
@property
def media_album_artist(self):
"""Album artist of current playing media, music track only."""
return self.status["tag"].get("albumartist")
@property
def volume_level(self):
"""Return the volume level."""
left = self.status["set"].get("vol_left")[0]
right = self.status["set"].get("vol_right")[0]
if left != right:
volume = float(left + right) / 2
else:
volume = left
return int(volume) / 100
@property
def supported_features(self):
"""Flag media player features that are supported."""
return SUPPORT_CMUS
def turn_off(self):
"""Service to send the CMUS the command to stop playing."""
self.cmus.player_stop()
def turn_on(self):
"""Service to send the CMUS the command to start playing."""
self.cmus.player_play()
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
self.cmus.set_volume(int(volume * 100))
def volume_up(self):
"""Set the volume up."""
left = self.status["set"].get("vol_left")
right = self.status["set"].get("vol_right")
if left != right:
current_volume = float(left + right) / 2
else:
current_volume = left
if current_volume <= 100:
self.cmus.set_volume(int(current_volume) + 5)
def volume_down(self):
"""Set the volume down."""
left = self.status["set"].get("vol_left")
right = self.status["set"].get("vol_right")
if left != right:
current_volume = float(left + right) / 2
else:
current_volume = left
if current_volume <= 100:
self.cmus.set_volume(int(current_volume) - 5)
def play_media(self, media_type, media_id, **kwargs):
"""Send the play command."""
if media_type in [MEDIA_TYPE_MUSIC, MEDIA_TYPE_PLAYLIST]:
self.cmus.player_play_file(media_id)
else:
_LOGGER.error(
"Invalid media type %s. Only %s and %s are supported",
media_type,
MEDIA_TYPE_MUSIC,
MEDIA_TYPE_PLAYLIST,
)
def media_pause(self):
"""Send the pause command."""
self.cmus.player_pause()
def media_next_track(self):
"""Send next track command."""
self.cmus.player_next()
def media_previous_track(self):
"""Send next track command."""
self.cmus.player_prev()
def media_seek(self, position):
"""Send seek command."""
self.cmus.seek(position)
def media_play(self):
"""Send the play command."""
self.cmus.player_play()
def media_stop(self):
"""Send the stop command."""
self.cmus.stop()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from compare_gan import utils
from compare_gan.gans import ops
import gin
import tensorflow as tf
@gin.configurable
def no_penalty():
return tf.constant(0.0)
@gin.configurable(whitelist=[])
def dragan_penalty(discriminator, x, y, is_training):
"""Returns the DRAGAN gradient penalty.
Args:
discriminator: Instance of `AbstractDiscriminator`.
x: Samples from the true distribution, shape [bs, h, w, channels].
y: Encoded class embedding for the samples. None for unsupervised models.
is_training: boolean, are we in train or eval model.
Returns:
A tensor with the computed penalty.
"""
with tf.name_scope("dragan_penalty"):
_, var = tf.nn.moments(x, axes=list(range(len(x.get_shape()))))
std = tf.sqrt(var)
x_noisy = x + std * (ops.random_uniform(x.shape) - 0.5)
x_noisy = tf.clip_by_value(x_noisy, 0.0, 1.0)
logits = discriminator(x_noisy, y=y, is_training=is_training, reuse=True)[1]
gradients = tf.gradients(logits, [x_noisy])[0]
slopes = tf.sqrt(0.0001 + tf.reduce_sum(
tf.square(gradients), reduction_indices=[1, 2, 3]))
gradient_penalty = tf.reduce_mean(tf.square(slopes - 1.0))
return gradient_penalty
@gin.configurable(whitelist=[])
def wgangp_penalty(discriminator, x, x_fake, y, is_training):
"""Returns the WGAN gradient penalty.
Args:
discriminator: Instance of `AbstractDiscriminator`.
x: samples from the true distribution, shape [bs, h, w, channels].
x_fake: samples from the fake distribution, shape [bs, h, w, channels].
y: Encoded class embedding for the samples. None for unsupervised models.
is_training: boolean, are we in train or eval model.
Returns:
A tensor with the computed penalty.
"""
with tf.name_scope("wgangp_penalty"):
alpha = ops.random_uniform(shape=[x.shape[0].value, 1, 1, 1], name="alpha")
interpolates = x + alpha * (x_fake - x)
logits = discriminator(
interpolates, y=y, is_training=is_training, reuse=True)[1]
gradients = tf.gradients(logits, [interpolates])[0]
slopes = tf.sqrt(0.0001 + tf.reduce_sum(
tf.square(gradients), reduction_indices=[1, 2, 3]))
gradient_penalty = tf.reduce_mean(tf.square(slopes - 1.0))
return gradient_penalty
@gin.configurable(whitelist=[])
def l2_penalty(discriminator):
"""Returns the L2 penalty for each matrix/vector excluding biases.
Assumes a specific tensor naming followed throughout the compare_gan library.
We penalize all fully connected, conv2d, and deconv2d layers.
Args:
discriminator: Instance of `AbstractDiscriminator`.
Returns:
A tensor with the computed penalty.
"""
with tf.name_scope("l2_penalty"):
d_weights = [v for v in discriminator.trainable_variables
if v.name.endswith("/kernel:0")]
return tf.reduce_mean(
[tf.nn.l2_loss(i) for i in d_weights], name="l2_penalty")
@gin.configurable("penalty", whitelist=["fn"])
def get_penalty_loss(fn=no_penalty, **kwargs):
"""Returns the penalty loss."""
return utils.call_with_accepted_args(fn, **kwargs)
|
from urllib.parse import urlencode
from django import forms
from django.contrib.auth.decorators import permission_required
from django.contrib.sites.models import Site
from django.shortcuts import redirect
from django.template.defaultfilters import slugify
from django.urls import reverse
from django.utils import timezone
from django.utils.decorators import method_decorator
from django.utils.encoding import smart_str
from django.utils.html import linebreaks
from django.views.generic.base import View
from zinnia.managers import DRAFT
from zinnia.managers import PUBLISHED
from zinnia.models.entry import Entry
from zinnia.settings import MARKUP_LANGUAGE
class QuickEntryForm(forms.ModelForm):
"""
Form for posting an entry quickly.
"""
class Meta:
model = Entry
exclude = ('comment_count',
'pingback_count',
'trackback_count')
class QuickEntry(View):
"""
View handling the quick post of a short Entry.
"""
@method_decorator(permission_required('zinnia.add_entry'))
def dispatch(self, *args, **kwargs):
"""
Decorate the view dispatcher with permission_required.
"""
return super(QuickEntry, self).dispatch(*args, **kwargs)
def get(self, request, *args, **kwargs):
"""
GET only do a redirection to the admin for adding and entry.
"""
return redirect('admin:zinnia_entry_add')
def post(self, request, *args, **kwargs):
"""
Handle the datas for posting a quick entry,
and redirect to the admin in case of error or
to the entry's page in case of success.
"""
now = timezone.now()
data = {
'title': request.POST.get('title'),
'slug': slugify(request.POST.get('title')),
'status': DRAFT if 'save_draft' in request.POST else PUBLISHED,
'sites': [Site.objects.get_current().pk],
'authors': [request.user.pk],
'content_template': 'zinnia/_entry_detail.html',
'detail_template': 'entry_detail.html',
'publication_date': now,
'creation_date': now,
'last_update': now,
'content': request.POST.get('content'),
'tags': request.POST.get('tags')}
form = QuickEntryForm(data)
if form.is_valid():
form.instance.content = self.htmlize(form.cleaned_data['content'])
entry = form.save()
return redirect(entry)
data = {'title': smart_str(request.POST.get('title', '')),
'content': smart_str(self.htmlize(
request.POST.get('content', ''))),
'tags': smart_str(request.POST.get('tags', '')),
'slug': slugify(request.POST.get('title', '')),
'authors': request.user.pk,
'sites': Site.objects.get_current().pk}
return redirect('%s?%s' % (reverse('admin:zinnia_entry_add'),
urlencode(data)))
def htmlize(self, content):
"""
Convert to HTML the content if the MARKUP_LANGUAGE
is set to HTML to optimize the rendering and avoid
ugly effect in WYMEditor.
"""
if MARKUP_LANGUAGE == 'html':
return linebreaks(content)
return content
|
import pytest
from moto import mock_iam, mock_sts
from lemur.tests.vectors import EXTERNAL_VALID_STR, SAN_CERT_KEY
def test_get_name_from_arn():
from lemur.plugins.lemur_aws.iam import get_name_from_arn
arn = "arn:aws:iam::123456789012:server-certificate/tttt2.netflixtest.net-NetflixInc-20150624-20150625"
assert (
get_name_from_arn(arn) == "tttt2.netflixtest.net-NetflixInc-20150624-20150625"
)
@pytest.mark.skipif(
True, reason="this fails because moto is not currently returning what boto does"
)
@mock_sts()
@mock_iam()
def test_get_all_server_certs(app):
from lemur.plugins.lemur_aws.iam import upload_cert, get_all_certificates
upload_cert("123456789012", "testCert", EXTERNAL_VALID_STR, SAN_CERT_KEY)
certs = get_all_certificates("123456789012")
assert len(certs) == 1
|
from gmusicapi import Musicmanager
def authenticate():
"""Make an instance of the api and attempts to authenticate the user.
Return the authenticated api.
"""
# We are uploading and then downloading so we want Musicmanager
api = Musicmanager()
# Attempt to authenticate and log in
logged_in = api.login()
# If login() returns false, you have not performed oauth yet, or did not
# write your credentials to your disk. Using oauth allows authentication
# without providing plaintext credentials to the application
if not logged_in:
print('No oauth credentials found, please authenticate your account')
# Performs oauth and stores generated credentials to Appdirs
# 'user_data_dir' by default. oauth only needs to be performed once per
# machine if the credentials are stored, which is the default behavior.
authenticated = api.perform_oauth(open_browser=True)
else:
print('Successfully logged in.\n')
return api
def demonstrate():
""" Demonstrate some api features. """
api = authenticate()
# Demonstrate upload feature.
# Create a list of one or more file paths of the mp3s you would like
# to upload
filepaths = []
filepaths.append('./song1.mp3')
# Upload an mp3 to your library. upload() returns a tuple of information
# about the success or failure of uploads
print("Beginning upload...\n")
uploaded = api.upload(filepaths)
# Print all successfully uploaded songs
if len(uploaded[0]) > 0:
print("Successfully uploaded:")
i = 1
for key in uploaded[0]:
print("%d. %s" % (i, key))
i += 1
# Print all unsuccessfully uploaded songs and a description of why
# songs weren't uploaded
if len(uploaded[2]) == 0:
print("\nAll songs successfully uploaded.")
else:
print("Not all songs were successfully uploaded:")
i = 1
for key in uploaded[2]:
print("%d. %s not uploaded: %s" % (i, key, uploaded[2][key]))
i += 1
# Demonstrate download feature
# Get information about songs previously uploaded that are available
# to be downloaded
uploaded_songs = api.get_uploaded_songs()
if len(uploaded_songs) == 0:
print("There are no songs currently available for download")
else:
# Print songs that are available for download and store their ids
# so we can download them
song_ids = []
print("\nThe following songs are available for download")
for i in range(len(uploaded_songs)):
song_ids.append(uploaded_songs[i]['id'])
print("%d. %s" % (i+1, uploaded_songs[i]['title']))
# Download uploaded songs from your library
print("\nBeginning download...")
for i in range(len(song_ids)):
filename, audio = api.download_song(song_ids[i])
# Write song to disk
with open(filename, 'wb') as f:
f.write(audio)
print("%d. Written to ./%s" % (i + 1, filename))
print("\nDownload complete.")
# It's good practice to logout when finished
api.logout()
if __name__ == '__main__':
demonstrate()
|
import os.path as op
from mne.filter import next_fast_len
import mne
print(__doc__)
data_path = mne.datasets.opm.data_path()
subject = 'OPM_sample'
subjects_dir = op.join(data_path, 'subjects')
bem_dir = op.join(subjects_dir, subject, 'bem')
bem_fname = op.join(subjects_dir, subject, 'bem',
subject + '-5120-5120-5120-bem-sol.fif')
src_fname = op.join(bem_dir, '%s-oct6-src.fif' % subject)
vv_fname = data_path + '/MEG/SQUID/SQUID_resting_state.fif'
vv_erm_fname = data_path + '/MEG/SQUID/SQUID_empty_room.fif'
vv_trans_fname = data_path + '/MEG/SQUID/SQUID-trans.fif'
opm_fname = data_path + '/MEG/OPM/OPM_resting_state_raw.fif'
opm_erm_fname = data_path + '/MEG/OPM/OPM_empty_room_raw.fif'
opm_trans_fname = None
opm_coil_def_fname = op.join(data_path, 'MEG', 'OPM', 'coil_def.dat')
##############################################################################
# Load data, resample. We will store the raw objects in dicts with entries
# "vv" and "opm" to simplify housekeeping and simplify looping later.
raws = dict()
raw_erms = dict()
new_sfreq = 90. # Nyquist frequency (45 Hz) < line noise freq (50 Hz)
raws['vv'] = mne.io.read_raw_fif(vv_fname, verbose='error') # ignore naming
raws['vv'].load_data().resample(new_sfreq)
raws['vv'].info['bads'] = ['MEG2233', 'MEG1842']
raw_erms['vv'] = mne.io.read_raw_fif(vv_erm_fname, verbose='error')
raw_erms['vv'].load_data().resample(new_sfreq)
raw_erms['vv'].info['bads'] = ['MEG2233', 'MEG1842']
raws['opm'] = mne.io.read_raw_fif(opm_fname)
raws['opm'].load_data().resample(new_sfreq)
raw_erms['opm'] = mne.io.read_raw_fif(opm_erm_fname)
raw_erms['opm'].load_data().resample(new_sfreq)
# Make sure our assumptions later hold
assert raws['opm'].info['sfreq'] == raws['vv'].info['sfreq']
##############################################################################
# Do some minimal artifact rejection just for VectorView data
titles = dict(vv='VectorView', opm='OPM')
ssp_ecg, _ = mne.preprocessing.compute_proj_ecg(
raws['vv'], tmin=-0.1, tmax=0.1, n_grad=1, n_mag=1)
raws['vv'].add_proj(ssp_ecg, remove_existing=True)
# due to how compute_proj_eog works, it keeps the old projectors, so
# the output contains both projector types (and also the original empty-room
# projectors)
ssp_ecg_eog, _ = mne.preprocessing.compute_proj_eog(
raws['vv'], n_grad=1, n_mag=1, ch_name='MEG0112')
raws['vv'].add_proj(ssp_ecg_eog, remove_existing=True)
raw_erms['vv'].add_proj(ssp_ecg_eog)
fig = mne.viz.plot_projs_topomap(raws['vv'].info['projs'][-4:],
info=raws['vv'].info)
fig.suptitle(titles['vv'])
fig.subplots_adjust(0.05, 0.05, 0.95, 0.85)
##############################################################################
# Explore data
kinds = ('vv', 'opm')
n_fft = next_fast_len(int(round(4 * new_sfreq)))
print('Using n_fft=%d (%0.1f sec)' % (n_fft, n_fft / raws['vv'].info['sfreq']))
for kind in kinds:
fig = raws[kind].plot_psd(n_fft=n_fft, proj=True)
fig.suptitle(titles[kind])
fig.subplots_adjust(0.1, 0.1, 0.95, 0.85)
##############################################################################
# Alignment and forward
# ---------------------
# Here we use a reduced size source space (oct5) just for speed
src = mne.setup_source_space(
subject, 'oct5', add_dist=False, subjects_dir=subjects_dir)
# This line removes source-to-source distances that we will not need.
# We only do it here to save a bit of memory, in general this is not required.
del src[0]['dist'], src[1]['dist']
bem = mne.read_bem_solution(bem_fname)
fwd = dict()
# check alignment and generate forward for VectorView
kwargs = dict(azimuth=0, elevation=90, distance=0.6, focalpoint=(0., 0., 0.))
fig = mne.viz.plot_alignment(
raws['vv'].info, trans=vv_trans_fname, subject=subject,
subjects_dir=subjects_dir, dig=True, coord_frame='mri',
surfaces=('head', 'white'))
mne.viz.set_3d_view(figure=fig, **kwargs)
fwd['vv'] = mne.make_forward_solution(
raws['vv'].info, vv_trans_fname, src, bem, eeg=False, verbose=True)
##############################################################################
# And for OPM:
with mne.use_coil_def(opm_coil_def_fname):
fig = mne.viz.plot_alignment(
raws['opm'].info, trans=opm_trans_fname, subject=subject,
subjects_dir=subjects_dir, dig=False, coord_frame='mri',
surfaces=('head', 'white'))
mne.viz.set_3d_view(figure=fig, **kwargs)
fwd['opm'] = mne.make_forward_solution(
raws['opm'].info, opm_trans_fname, src, bem, eeg=False, verbose=True)
del src, bem
##############################################################################
# Compute and apply inverse to PSD estimated using multitaper + Welch.
# Group into frequency bands, then normalize each source point and sensor
# independently. This makes the value of each sensor point and source location
# in each frequency band the percentage of the PSD accounted for by that band.
freq_bands = dict(
delta=(2, 4), theta=(5, 7), alpha=(8, 12), beta=(15, 29), gamma=(30, 45))
topos = dict(vv=dict(), opm=dict())
stcs = dict(vv=dict(), opm=dict())
snr = 3.
lambda2 = 1. / snr ** 2
for kind in kinds:
noise_cov = mne.compute_raw_covariance(raw_erms[kind])
inverse_operator = mne.minimum_norm.make_inverse_operator(
raws[kind].info, forward=fwd[kind], noise_cov=noise_cov, verbose=True)
stc_psd, sensor_psd = mne.minimum_norm.compute_source_psd(
raws[kind], inverse_operator, lambda2=lambda2,
n_fft=n_fft, dB=False, return_sensor=True, verbose=True)
topo_norm = sensor_psd.data.sum(axis=1, keepdims=True)
stc_norm = stc_psd.sum() # same operation on MNE object, sum across freqs
# Normalize each source point by the total power across freqs
for band, limits in freq_bands.items():
data = sensor_psd.copy().crop(*limits).data.sum(axis=1, keepdims=True)
topos[kind][band] = mne.EvokedArray(
100 * data / topo_norm, sensor_psd.info)
stcs[kind][band] = \
100 * stc_psd.copy().crop(*limits).sum() / stc_norm.data
del inverse_operator
del fwd, raws, raw_erms
###############################################################################
# Now we can make some plots of each frequency band. Note that the OPM head
# coverage is only over right motor cortex, so only localization
# of beta is likely to be worthwhile.
#
# Theta
# -----
def plot_band(kind, band):
"""Plot activity within a frequency band on the subject's brain."""
title = "%s %s\n(%d-%d Hz)" % ((titles[kind], band,) + freq_bands[band])
topos[kind][band].plot_topomap(
times=0., scalings=1., cbar_fmt='%0.1f', vmin=0, cmap='inferno',
time_format=title)
brain = stcs[kind][band].plot(
subject=subject, subjects_dir=subjects_dir, views='cau', hemi='both',
time_label=title, title=title, colormap='inferno',
time_viewer=False, show_traces=False,
clim=dict(kind='percent', lims=(70, 85, 99)), smoothing_steps=10)
brain.show_view(dict(azimuth=0, elevation=0), roll=0)
return fig, brain
fig_theta, brain_theta = plot_band('vv', 'theta')
###############################################################################
# Alpha
# -----
fig_alpha, brain_alpha = plot_band('vv', 'alpha')
###############################################################################
# Beta
# ----
# Here we also show OPM data, which shows a profile similar to the VectorView
# data beneath the sensors. VectorView first:
fig_beta, brain_beta = plot_band('vv', 'beta')
###############################################################################
# Then OPM:
fig_beta_opm, brain_beta_opm = plot_band('opm', 'beta')
###############################################################################
# Gamma
# -----
fig_gamma, brain_gamma = plot_band('vv', 'gamma')
###############################################################################
# References
# ----------
# .. [1] Tadel F, Baillet S, Mosher JC, Pantazis D, Leahy RM.
# Brainstorm: A User-Friendly Application for MEG/EEG Analysis.
# Computational Intelligence and Neuroscience, vol. 2011, Article ID
# 879716, 13 pages, 2011. doi:10.1155/2011/879716
|
import os
import pytest
from molecule import config
from molecule import platforms
from molecule import scenario
from molecule import state
from molecule import util
from molecule.dependency import ansible_galaxy
from molecule.dependency import gilt
from molecule.dependency import shell
from molecule.driver import azure
from molecule.driver import delegated
from molecule.driver import digitalocean
from molecule.driver import docker
from molecule.driver import ec2
from molecule.driver import gce
from molecule.driver import lxc
from molecule.driver import lxd
from molecule.driver import openstack
from molecule.driver import vagrant
from molecule.lint import yamllint
from molecule.provisioner import ansible
from molecule.verifier import goss
from molecule.verifier import inspec
from molecule.verifier import testinfra
def test_molecule_file_private_member(molecule_file_fixture, config_instance):
assert molecule_file_fixture == config_instance.molecule_file
def test_args_member(config_instance):
assert {} == config_instance.args
def test_command_args_member(config_instance):
x = {'subcommand': 'test'}
assert x == config_instance.command_args
def test_debug_property(config_instance):
assert not config_instance.debug
def test_env_file_property(config_instance):
config_instance.args = {'env_file': '.env'}
result = config_instance.env_file
assert util.abs_path(config_instance.args.get('env_file')) == result
def test_subcommand_property(config_instance):
assert 'test' == config_instance.subcommand
def test_action_property(config_instance):
assert config_instance.action is None
def test_action_setter(config_instance):
config_instance.action = 'foo'
assert 'foo' == config_instance.action
def test_init_calls_validate(patched_config_validate, config_instance):
patched_config_validate.assert_called_once_with()
def test_project_directory_property(config_instance):
assert os.getcwd() == config_instance.project_directory
def test_molecule_directory_property(config_instance):
x = os.path.join(os.getcwd(), 'molecule')
assert x == config_instance.molecule_directory
def test_dependency_property(config_instance):
assert isinstance(config_instance.dependency, ansible_galaxy.AnsibleGalaxy)
@pytest.fixture
def _config_dependency_gilt_section_data():
return {
'dependency': {
'name': 'gilt'
},
}
@pytest.mark.parametrize(
'config_instance', ['_config_dependency_gilt_section_data'], indirect=True)
def test_dependency_property_is_gilt(config_instance):
assert isinstance(config_instance.dependency, gilt.Gilt)
@pytest.fixture
def _config_dependency_shell_section_data():
return {
'dependency': {
'name': 'shell',
'command': 'bin/command',
},
}
@pytest.mark.parametrize(
'config_instance', ['_config_dependency_shell_section_data'],
indirect=True)
def test_dependency_property_is_shell(config_instance):
assert isinstance(config_instance.dependency, shell.Shell)
def test_driver_property(config_instance):
assert isinstance(config_instance.driver, docker.Docker)
@pytest.fixture
def _config_driver_azure_section_data():
return {
'driver': {
'name': 'azure'
},
}
@pytest.mark.parametrize(
'config_instance', ['_config_driver_azure_section_data'], indirect=True)
def test_driver_property_is_azure(config_instance):
assert isinstance(config_instance.driver, azure.Azure)
@pytest.fixture
def _config_driver_delegated_section_data():
return {
'driver': {
'name': 'delegated',
'options': {
'managed': False,
},
},
}
@pytest.mark.parametrize(
'config_instance', ['_config_driver_delegated_section_data'],
indirect=True)
def test_driver_property_is_delegated(config_instance):
assert isinstance(config_instance.driver, delegated.Delegated)
@pytest.fixture
def _config_driver_digitalocean_section_data():
return {
'driver': {
'name': 'digitalocean'
},
}
@pytest.mark.parametrize(
'config_instance', ['_config_driver_digitalocean_section_data'],
indirect=True)
def test_driver_property_is_digitalocean(config_instance):
assert isinstance(config_instance.driver, digitalocean.DigitalOcean)
@pytest.fixture
def _config_driver_ec2_section_data():
return {
'driver': {
'name': 'ec2'
},
}
@pytest.mark.parametrize(
'config_instance', ['_config_driver_ec2_section_data'], indirect=True)
def test_driver_property_is_ec2(config_instance):
assert isinstance(config_instance.driver, ec2.EC2)
@pytest.fixture
def _config_driver_gce_section_data():
return {
'driver': {
'name': 'gce'
},
}
@pytest.mark.parametrize(
'config_instance', ['_config_driver_gce_section_data'], indirect=True)
def test_driver_property_is_gce(config_instance):
assert isinstance(config_instance.driver, gce.GCE)
@pytest.fixture
def _config_driver_lxc_section_data():
return {
'driver': {
'name': 'lxc'
},
}
@pytest.mark.parametrize(
'config_instance', ['_config_driver_lxc_section_data'], indirect=True)
def test_driver_property_is_lxc(config_instance):
assert isinstance(config_instance.driver, lxc.LXC)
@pytest.fixture
def _config_driver_lxd_section_data():
return {
'driver': {
'name': 'lxd'
},
}
@pytest.mark.parametrize(
'config_instance', ['_config_driver_lxd_section_data'], indirect=True)
def test_driver_property_is_lxd(config_instance):
assert isinstance(config_instance.driver, lxd.LXD)
@pytest.fixture
def _config_driver_openstack_section_data():
return {
'driver': {
'name': 'openstack'
},
}
@pytest.mark.parametrize(
'config_instance', ['_config_driver_openstack_section_data'],
indirect=True)
def test_driver_property_is_openstack(config_instance):
assert isinstance(config_instance.driver, openstack.Openstack)
@pytest.fixture
def _config_driver_vagrant_section_data():
return {
'driver': {
'name': 'vagrant',
'provider': {
'name': 'virtualbox',
},
},
}
@pytest.mark.parametrize(
'config_instance', ['_config_driver_vagrant_section_data'], indirect=True)
def test_driver_property_is_vagrant(config_instance):
assert isinstance(config_instance.driver, vagrant.Vagrant)
def test_drivers_property(config_instance):
x = [
'azure',
'delegated',
'digitalocean',
'docker',
'ec2',
'gce',
'linode',
'lxc',
'lxd',
'openstack',
'vagrant',
]
assert x == config_instance.drivers
def test_env(config_instance):
config_instance.args = {'env_file': '.env'}
x = {
'MOLECULE_DEBUG': 'False',
'MOLECULE_FILE': config_instance.molecule_file,
'MOLECULE_ENV_FILE': util.abs_path(
config_instance.args.get('env_file')),
'MOLECULE_INVENTORY_FILE': config_instance.provisioner.inventory_file,
'MOLECULE_EPHEMERAL_DIRECTORY':
config_instance.scenario.ephemeral_directory,
'MOLECULE_SCENARIO_DIRECTORY': config_instance.scenario.directory,
'MOLECULE_PROJECT_DIRECTORY': config_instance.project_directory,
'MOLECULE_INSTANCE_CONFIG': config_instance.driver.instance_config,
'MOLECULE_DEPENDENCY_NAME': 'galaxy',
'MOLECULE_DRIVER_NAME': 'docker',
'MOLECULE_LINT_NAME': 'yamllint',
'MOLECULE_PROVISIONER_NAME': 'ansible',
'MOLECULE_PROVISIONER_LINT_NAME': 'ansible-lint',
'MOLECULE_SCENARIO_NAME': 'default',
'MOLECULE_VERIFIER_NAME': 'testinfra',
'MOLECULE_VERIFIER_LINT_NAME': 'flake8',
'MOLECULE_VERIFIER_TEST_DIRECTORY': config_instance.verifier.directory,
}
assert x == config_instance.env
def test_lint_property(config_instance):
assert isinstance(config_instance.lint, yamllint.Yamllint)
def test_platforms_property(config_instance):
assert isinstance(config_instance.platforms, platforms.Platforms)
def test_provisioner_property(config_instance):
assert isinstance(config_instance.provisioner, ansible.Ansible)
def test_scenario_property(config_instance):
assert isinstance(config_instance.scenario, scenario.Scenario)
def test_state_property(config_instance):
assert isinstance(config_instance.state, state.State)
def test_verifier_property(config_instance):
assert isinstance(config_instance.verifier, testinfra.Testinfra)
@pytest.fixture
def _config_verifier_inspec_section_data():
return {
'verifier': {
'name': 'inspec',
'lint': {
'name': 'rubocop',
},
},
}
@pytest.mark.parametrize(
'config_instance', ['_config_verifier_inspec_section_data'], indirect=True)
def test_verifier_property_is_inspec(config_instance):
assert isinstance(config_instance.verifier, inspec.Inspec)
@pytest.fixture
def _config_verifier_goss_section_data():
return {
'verifier': {
'name': 'goss',
'lint': {
'name': 'yamllint',
},
},
}
@pytest.mark.parametrize(
'config_instance', ['_config_verifier_goss_section_data'], indirect=True)
def test_verifier_property_is_goss(config_instance):
assert isinstance(config_instance.verifier, goss.Goss)
def test_verifiers_property(config_instance):
x = ['goss', 'inspec', 'testinfra']
assert x == config_instance.verifiers
def test_get_driver_name_from_state_file(config_instance):
config_instance.state.change_state('driver', 'state-driver')
assert 'state-driver' == config_instance._get_driver_name()
def test_get_driver_name_from_cli(config_instance):
config_instance.command_args = {'driver_name': 'cli-driver'}
assert 'cli-driver' == config_instance._get_driver_name()
def test_get_driver_name(config_instance):
assert 'docker' == config_instance._get_driver_name()
def test_get_driver_name_raises_when_different_driver_used(
patched_logger_critical, config_instance):
config_instance.state.change_state('driver', 'foo')
config_instance.command_args = {'driver_name': 'bar'}
with pytest.raises(SystemExit) as e:
config_instance._get_driver_name()
assert 1 == e.value.code
msg = ("Instance(s) were created with the 'foo' driver, "
"but the subcommand is using 'bar' driver.")
patched_logger_critical.assert_called_once_with(msg)
def test_get_config(config_instance):
assert isinstance(config_instance._get_config(), dict)
def test_get_config_with_base_config(config_instance):
config_instance.args = {'base_config': './foo.yml'}
contents = {'foo': 'bar'}
util.write_file(config_instance.args['base_config'],
util.safe_dump(contents))
result = config_instance._get_config()
assert result['foo'] == 'bar'
def test_reget_config(config_instance):
assert isinstance(config_instance._reget_config(), dict)
def test_interpolate(patched_logger_critical, config_instance):
string = 'foo: $HOME'
x = 'foo: {}'.format(os.environ['HOME'])
assert x == config_instance._interpolate(string, os.environ, None)
def test_interpolate_raises_on_failed_interpolation(patched_logger_critical,
config_instance):
string = '$6$8I5Cfmpr$kGZB'
with pytest.raises(SystemExit) as e:
config_instance._interpolate(string, os.environ, None)
assert 1 == e.value.code
msg = ("parsing config file '{}'.\n\n"
'Invalid placeholder in string: line 1, col 1\n'
'$6$8I5Cfmpr$kGZB').format(config_instance.molecule_file)
patched_logger_critical.assert_called_once_with(msg)
def test_get_defaults(config_instance, mocker):
mocker.patch.object(config_instance, 'molecule_file',
'/path/to/test_scenario_name/molecule.yml')
defaults = config_instance._get_defaults()
assert defaults['scenario']['name'] == 'test_scenario_name'
def test_preflight(mocker, config_instance, patched_logger_info):
m = mocker.patch('molecule.model.schema_v2.pre_validate')
m.return_value = None
config_instance._preflight('foo')
m.assert_called_once_with('foo', os.environ, config.MOLECULE_KEEP_STRING)
def test_preflight_exists_when_validation_fails(
mocker, patched_logger_critical, config_instance):
m = mocker.patch('molecule.model.schema_v2.pre_validate')
m.return_value = 'validation errors'
with pytest.raises(SystemExit) as e:
config_instance._preflight('invalid stream')
assert 1 == e.value.code
msg = 'Failed to validate.\n\nvalidation errors'
patched_logger_critical.assert_called_once_with(msg)
def test_validate(mocker, config_instance, patched_logger_info,
patched_logger_success):
m = mocker.patch('molecule.model.schema_v2.validate')
m.return_value = None
config_instance._validate()
msg = 'Validating schema {}.'.format(config_instance.molecule_file)
patched_logger_info.assert_called_once_with(msg)
m.assert_called_once_with(config_instance.config)
msg = 'Validation completed successfully.'
patched_logger_success.assert_called_once_with(msg)
def test_validate_exists_when_validation_fails(mocker, patched_logger_critical,
config_instance):
m = mocker.patch('molecule.model.schema_v2.validate')
m.return_value = 'validation errors'
with pytest.raises(SystemExit) as e:
config_instance._validate()
assert 1 == e.value.code
msg = 'Failed to validate.\n\nvalidation errors'
patched_logger_critical.assert_called_once_with(msg)
def test_molecule_directory():
assert '/foo/bar/molecule' == config.molecule_directory('/foo/bar')
def test_molecule_file():
assert '/foo/bar/molecule.yml' == config.molecule_file('/foo/bar')
def test_molecule_drivers():
x = [
'azure',
'delegated',
'digitalocean',
'docker',
'ec2',
'gce',
'linode',
'lxc',
'lxd',
'openstack',
'vagrant',
]
assert x == config.molecule_drivers()
def test_molecule_verifiers():
x = ['goss', 'inspec', 'testinfra']
assert x == config.molecule_verifiers()
def test_set_env_from_file(config_instance):
config_instance.args = {'env_file': '.env'}
contents = {
'foo': 'bar',
'BAZ': 'zzyzx',
}
env_file = config_instance.args.get('env_file')
util.write_file(env_file, util.safe_dump(contents))
env = config.set_env_from_file({}, env_file)
assert contents == env
def test_set_env_from_file_returns_original_env_when_env_file_not_found(
config_instance):
env = config.set_env_from_file({}, 'file-not-found')
assert {} == env
|
class MasterNotAvailableException(Exception):
pass
class MasterTemporarilyNotAvailableException(Exception):
pass
class NoSlavesAvailableError(Exception):
pass
class MultipleSlavesForIDError(Exception):
pass
class TaskNotFoundException(Exception):
pass
class FileNotFoundForTaskException(Exception):
pass
class MultipleTasksForIDError(Exception):
pass
class FileDoesNotExist(Exception):
pass
class MissingExecutor(Exception):
pass
class SlaveDoesNotExist(Exception):
pass
class SkipResult(Exception):
pass
|
import unittest
from rosmake import parallel_build
class TestDependencyTracker(unittest.TestCase):
def setUp(self):
self.deps = {}
self.deps1 = {}
self.deps['a'] = ['b', 'c', 'd', 'e']
self.deps1['a'] = ['b']
self.deps['b'] = ['c']
self.deps1['b'] = ['c']
self.deps['d'] = ['c', 'e']
self.deps1['d'] = ['c', 'e']
self.dt = parallel_build.DependencyTracker()
self.dt.load_fake_deps(self.deps, self.deps1)
def test_deps_1(self):
self.assertEquals(self.deps1['a'], self.dt.get_deps_1('a'))
self.assertEquals(self.deps1['b'], self.dt.get_deps_1('b'))
self.assertEquals(self.deps1['d'], self.dt.get_deps_1('d'))
def test_deps(self):
self.assertEquals(self.deps['a'], self.dt.get_deps('a'))
self.assertEquals(self.deps['b'], self.dt.get_deps('b'))
self.assertEquals(self.deps['d'], self.dt.get_deps('d'))
def test_not_package(self):
self.assertEquals([], self.dt.get_deps('This is not a valid package name'))
self.assertEquals([], self.dt.get_deps_1('This is not a valid package name'))
class TestBuildQueue(unittest.TestCase):
def setUp(self):
deps = {}
deps1 = {}
deps1['a'] = ['b']
deps['a'] = ['b', 'c', 'd', 'e', 'f']
deps1['b'] = ['c']
deps['b'] = ['c', 'd', 'e', 'f']
deps1['c'] = ['d']
deps['c'] = ['d', 'e', 'f']
deps1['d'] = ['e']
deps['d'] = ['e', 'f']
deps['e'] = ['f']
deps1['e'] = ['f']
deps['f'] = []
deps1['f'] = []
self.serial_tracker = parallel_build.DependencyTracker()
self.serial_tracker.load_fake_deps(deps, deps1)
deps = {}
deps1 = {}
deps['a'] = ['b', 'c', 'd', 'e', 'f']
deps1['a'] = ['b', 'c', 'd', 'e', 'f']
deps['b'] = []
deps1['b'] = []
deps['c'] = []
deps1['c'] = []
deps['d'] = []
deps1['d'] = []
deps['e'] = []
deps1['e'] = []
deps['f'] = []
deps1['f'] = []
self.parallel_tracker = parallel_build.DependencyTracker()
self.parallel_tracker.load_fake_deps(deps, deps1)
# full queue
def test_full_build(self):
bq = parallel_build.BuildQueue(['a', 'b', 'c', 'd', 'e', 'f'], self.serial_tracker)
self.assertFalse(bq.is_done())
self.assertFalse(bq.succeeded())
self.assertEqual('f', bq.get_valid_package())
self.assertEqual(0, len(bq.built))
bq.return_built('f')
self.assertEqual(1, len(bq.built))
self.assertFalse(bq.is_done())
self.assertFalse(bq.succeeded())
self.assertEqual('e', bq.get_valid_package())
bq.return_built('e')
self.assertEqual(2, len(bq.built))
self.assertFalse(bq.is_done())
self.assertFalse(bq.succeeded())
self.assertEqual('d', bq.get_valid_package())
bq.return_built('d')
self.assertEqual(3, len(bq.built))
self.assertFalse(bq.is_done())
self.assertFalse(bq.succeeded())
self.assertEqual('c', bq.get_valid_package())
bq.return_built('c')
self.assertEqual(4, len(bq.built))
self.assertFalse(bq.is_done())
self.assertFalse(bq.succeeded())
self.assertEqual('b', bq.get_valid_package())
bq.return_built('b')
self.assertEqual(5, len(bq.built))
self.assertFalse(bq.is_done())
self.assertFalse(bq.succeeded())
self.assertEqual('a', bq.get_valid_package())
self.assertFalse(bq.is_done())
self.assertFalse(bq.succeeded())
bq.return_built('a')
self.assertEqual(6, len(bq.built))
self.assertTrue(bq.is_done())
self.assertTrue(bq.succeeded())
# partial build
def test_partial_build(self):
bq = parallel_build.BuildQueue(['d', 'e', 'f'], self.serial_tracker)
self.assertFalse(bq.is_done())
self.assertFalse(bq.succeeded())
self.assertEqual('f', bq.get_valid_package())
self.assertEqual(0, len(bq.built))
bq.return_built('f')
self.assertEqual(1, len(bq.built))
self.assertFalse(bq.is_done())
self.assertFalse(bq.succeeded())
self.assertEqual('e', bq.get_valid_package())
bq.return_built('e')
self.assertEqual(2, len(bq.built))
self.assertFalse(bq.is_done())
self.assertFalse(bq.succeeded())
self.assertEqual('d', bq.get_valid_package())
self.assertFalse(bq.is_done())
self.assertFalse(bq.succeeded())
bq.return_built('d')
self.assertEqual(3, len(bq.built))
self.assertTrue(bq.is_done())
self.assertTrue(bq.succeeded())
# abort early
def test_abort_early(self):
bq = parallel_build.BuildQueue(['a', 'b', 'c', 'd', 'e', 'f'], self.serial_tracker)
self.assertFalse(bq.is_done())
self.assertFalse(bq.succeeded())
self.assertEqual(0, len(bq.built))
self.assertEqual('f', bq.get_valid_package())
bq.return_built('f')
self.assertEqual(1, len(bq.built))
self.assertFalse(bq.is_done())
self.assertFalse(bq.succeeded())
self.assertEqual('e', bq.get_valid_package())
bq.return_built('e')
self.assertEqual(2, len(bq.built))
self.assertFalse(bq.is_done())
self.assertFalse(bq.succeeded())
self.assertEqual('d', bq.get_valid_package())
bq.return_built('d')
self.assertEqual(3, len(bq.built))
self.assertFalse(bq.is_done())
self.assertFalse(bq.succeeded())
bq.stop()
self.assertTrue(bq.is_done())
self.assertFalse(bq.succeeded())
self.assertEqual(None, bq.get_valid_package())
# many parallel
def test_parallel_build(self):
bq = parallel_build.BuildQueue(['a', 'b', 'c', 'd', 'e', 'f'], self.parallel_tracker)
self.assertFalse(bq.is_done())
self.assertFalse(bq.succeeded())
dependents = ['b', 'c', 'd', 'e', 'f']
count = 0
total = 6
while len(dependents) > 0:
result = bq.get_valid_package()
done = len(bq.built)
pkgs = bq._total_pkgs
self.assertTrue(result in dependents)
# print result, done, pkgs
dependents.remove(result)
self.assertEqual(count, done)
self.assertEqual(total, pkgs)
self.assertFalse(bq.is_done())
self.assertFalse(bq.succeeded())
bq.return_built(result)
count = count + 1
self.assertFalse(bq.is_done())
self.assertFalse(bq.succeeded())
self.assertEqual('a', bq.get_valid_package())
self.assertFalse(bq.is_done())
self.assertFalse(bq.succeeded())
bq.return_built('a')
self.assertTrue(bq.is_done())
self.assertTrue(bq.succeeded())
# stalled(future)
|
import json
import requests
from docutils import nodes
from docutils.parsers.rst import Directive, directives
from nikola.plugin_categories import RestExtension
from nikola.plugins.compile.rest import _align_choice, _align_options_base
class Plugin(RestExtension):
"""Plugin for vimeo reST directive."""
name = "rest_vimeo"
def set_site(self, site):
"""Set Nikola site."""
self.site = site
directives.register_directive('vimeo', Vimeo)
return super().set_site(site)
CODE = """<div class="vimeo-video{align}">
<iframe src="https://player.vimeo.com/video/{vimeo_id}"
width="{width}" height="{height}"
frameborder="0" webkitAllowFullScreen="webkitAllowFullScreen" mozallowfullscreen="mozallowfullscreen" allowFullScreen="allowFullScreen">
</iframe>
</div>
"""
VIDEO_DEFAULT_HEIGHT = 500
VIDEO_DEFAULT_WIDTH = 281
class Vimeo(Directive):
"""reST extension for inserting vimeo embedded videos.
Usage:
.. vimeo:: 20241459
:height: 400
:width: 600
"""
has_content = True
required_arguments = 1
option_spec = {
"width": directives.positive_int,
"height": directives.positive_int,
"align": _align_choice
}
# set to False for not querying the vimeo api for size
request_size = True
def run(self):
"""Run the vimeo directive."""
self.check_content()
options = {
'vimeo_id': self.arguments[0],
'width': VIDEO_DEFAULT_WIDTH,
'height': VIDEO_DEFAULT_HEIGHT,
}
if self.request_size:
err = self.check_modules()
if err:
return err
self.set_video_size()
options.update(self.options)
if self.options.get('align') in _align_options_base:
options['align'] = ' align-' + self.options['align']
else:
options['align'] = ''
return [nodes.raw('', CODE.format(**options), format='html')]
def check_modules(self):
"""Check modules."""
return None
def set_video_size(self):
"""Set video size."""
# Only need to make a connection if width and height aren't provided
if 'height' not in self.options or 'width' not in self.options:
self.options['height'] = VIDEO_DEFAULT_HEIGHT
self.options['width'] = VIDEO_DEFAULT_WIDTH
if json: # we can attempt to retrieve video attributes from vimeo
try:
url = ('https://vimeo.com/api/v2/video/{0}'
'.json'.format(self.arguments[0]))
data = requests.get(url).text
video_attributes = json.loads(data)[0]
self.options['height'] = video_attributes['height']
self.options['width'] = video_attributes['width']
except Exception:
# fall back to the defaults
pass
def check_content(self):
"""Check if content exists."""
if self.content:
raise self.warning("This directive does not accept content. The "
"'key=value' format for options is deprecated, "
"use ':key: value' instead")
|
import pytest
from homeassistant.const import (
PRESSURE_HPA,
PRESSURE_INHG,
PRESSURE_MBAR,
PRESSURE_PA,
PRESSURE_PSI,
)
import homeassistant.util.pressure as pressure_util
INVALID_SYMBOL = "bob"
VALID_SYMBOL = PRESSURE_PA
def test_convert_same_unit():
"""Test conversion from any unit to same unit."""
assert pressure_util.convert(2, PRESSURE_PA, PRESSURE_PA) == 2
assert pressure_util.convert(3, PRESSURE_HPA, PRESSURE_HPA) == 3
assert pressure_util.convert(4, PRESSURE_MBAR, PRESSURE_MBAR) == 4
assert pressure_util.convert(5, PRESSURE_INHG, PRESSURE_INHG) == 5
def test_convert_invalid_unit():
"""Test exception is thrown for invalid units."""
with pytest.raises(ValueError):
pressure_util.convert(5, INVALID_SYMBOL, VALID_SYMBOL)
with pytest.raises(ValueError):
pressure_util.convert(5, VALID_SYMBOL, INVALID_SYMBOL)
def test_convert_nonnumeric_value():
"""Test exception is thrown for nonnumeric type."""
with pytest.raises(TypeError):
pressure_util.convert("a", PRESSURE_HPA, PRESSURE_INHG)
def test_convert_from_hpascals():
"""Test conversion from hPA to other units."""
hpascals = 1000
assert pressure_util.convert(hpascals, PRESSURE_HPA, PRESSURE_PSI) == pytest.approx(
14.5037743897
)
assert pressure_util.convert(
hpascals, PRESSURE_HPA, PRESSURE_INHG
) == pytest.approx(29.5299801647)
assert pressure_util.convert(hpascals, PRESSURE_HPA, PRESSURE_PA) == pytest.approx(
100000
)
assert pressure_util.convert(
hpascals, PRESSURE_HPA, PRESSURE_MBAR
) == pytest.approx(1000)
def test_convert_from_inhg():
"""Test conversion from inHg to other units."""
inhg = 30
assert pressure_util.convert(inhg, PRESSURE_INHG, PRESSURE_PSI) == pytest.approx(
14.7346266155
)
assert pressure_util.convert(inhg, PRESSURE_INHG, PRESSURE_HPA) == pytest.approx(
1015.9167
)
assert pressure_util.convert(inhg, PRESSURE_INHG, PRESSURE_PA) == pytest.approx(
101591.67
)
assert pressure_util.convert(inhg, PRESSURE_INHG, PRESSURE_MBAR) == pytest.approx(
1015.9167
)
|
import os
from stash.system.shcommon import IN_PYTHONISTA
# check if running on travis
ON_TRAVIS = "TRAVIS" in os.environ
def get_platform():
"""
Return a string describing the UI implementation to use.
:return: platform identifier
:rtype: str
"""
# platform specific UIs
if IN_PYTHONISTA:
return "pythonista"
elif ON_TRAVIS:
return "stub"
# attempt to fall back to tkinter
try:
from six.moves import tkinter
except ImportError:
# can not import tkinter
# ignore this case. If this executes successfully, it is handled in the 'else' clause
pass
else:
return "tkinter"
# this function has still not returned.
# this means that all UIs tried above failed.
# we raise an error in this case.
raise NotImplementedError("There is no UI implemented for this platform. If you are on a PC, you may be able to fix this by installing tkinter.")
def get_ui_implementation(platform=None):
"""
Return the classes implementing the UI for the platform.
:param platform: identifier describing the platform to get the UI implementation for. Defaults to None, in which case it tries to find the best UI.
:type platform: str
:return: (ShUI, ShSequentialRenderer)
:rtype: tuple of (stash.shui.base.ShBaseUI, stash.shui.base.ShBaseSequentialRenderer)
"""
if platform is None:
platform = get_platform()
if platform == "pythonista":
from .pythonista_ui import ShUI, ShTerminal, ShSequentialRenderer
return (ShUI, ShSequentialRenderer)
elif platform == "stub":
from .stubui import ShUI, ShTerminal, ShSequentialRenderer
return (ShUI, ShSequentialRenderer)
elif platform == "tkinter":
from .tkui import ShUI, ShTerminal, ShSequentialRenderer
return (ShUI, ShSequentialRenderer)
else:
raise NotImplementedError("No UI implemented for platform {}!".format(repr(platform)))
__all__ = ["get_platform", "get_ui_implementation"]
|
from datetime import timedelta
from homeassistant.const import ENERGY_KILO_WATT_HOUR, POWER_WATT, TIME_SECONDS
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.async_mock import patch
async def test_state(hass):
"""Test integration sensor state."""
config = {
"sensor": {
"platform": "integration",
"name": "integration",
"source": "sensor.power",
"unit": ENERGY_KILO_WATT_HOUR,
"round": 2,
}
}
assert await async_setup_component(hass, "sensor", config)
entity_id = config["sensor"]["source"]
hass.states.async_set(entity_id, 1, {})
await hass.async_block_till_done()
now = dt_util.utcnow() + timedelta(seconds=3600)
with patch("homeassistant.util.dt.utcnow", return_value=now):
hass.states.async_set(entity_id, 1, {}, force_update=True)
await hass.async_block_till_done()
state = hass.states.get("sensor.integration")
assert state is not None
# Testing a power sensor at 1 KiloWatts for 1hour = 1kWh
assert round(float(state.state), config["sensor"]["round"]) == 1.0
assert state.attributes.get("unit_of_measurement") == ENERGY_KILO_WATT_HOUR
async def test_trapezoidal(hass):
"""Test integration sensor state."""
config = {
"sensor": {
"platform": "integration",
"name": "integration",
"source": "sensor.power",
"unit": ENERGY_KILO_WATT_HOUR,
"round": 2,
}
}
assert await async_setup_component(hass, "sensor", config)
entity_id = config["sensor"]["source"]
hass.states.async_set(entity_id, 0, {})
await hass.async_block_till_done()
# Testing a power sensor with non-monotonic intervals and values
for time, value in [(20, 10), (30, 30), (40, 5), (50, 0)]:
now = dt_util.utcnow() + timedelta(minutes=time)
with patch("homeassistant.util.dt.utcnow", return_value=now):
hass.states.async_set(entity_id, value, {}, force_update=True)
await hass.async_block_till_done()
state = hass.states.get("sensor.integration")
assert state is not None
assert round(float(state.state), config["sensor"]["round"]) == 8.33
assert state.attributes.get("unit_of_measurement") == ENERGY_KILO_WATT_HOUR
async def test_left(hass):
"""Test integration sensor state with left reimann method."""
config = {
"sensor": {
"platform": "integration",
"name": "integration",
"method": "left",
"source": "sensor.power",
"unit": ENERGY_KILO_WATT_HOUR,
"round": 2,
}
}
assert await async_setup_component(hass, "sensor", config)
entity_id = config["sensor"]["source"]
hass.states.async_set(entity_id, 0, {})
await hass.async_block_till_done()
# Testing a power sensor with non-monotonic intervals and values
for time, value in [(20, 10), (30, 30), (40, 5), (50, 0)]:
now = dt_util.utcnow() + timedelta(minutes=time)
with patch("homeassistant.util.dt.utcnow", return_value=now):
hass.states.async_set(entity_id, value, {}, force_update=True)
await hass.async_block_till_done()
state = hass.states.get("sensor.integration")
assert state is not None
assert round(float(state.state), config["sensor"]["round"]) == 7.5
assert state.attributes.get("unit_of_measurement") == ENERGY_KILO_WATT_HOUR
async def test_right(hass):
"""Test integration sensor state with left reimann method."""
config = {
"sensor": {
"platform": "integration",
"name": "integration",
"method": "right",
"source": "sensor.power",
"unit": ENERGY_KILO_WATT_HOUR,
"round": 2,
}
}
assert await async_setup_component(hass, "sensor", config)
entity_id = config["sensor"]["source"]
hass.states.async_set(entity_id, 0, {})
await hass.async_block_till_done()
# Testing a power sensor with non-monotonic intervals and values
for time, value in [(20, 10), (30, 30), (40, 5), (50, 0)]:
now = dt_util.utcnow() + timedelta(minutes=time)
with patch("homeassistant.util.dt.utcnow", return_value=now):
hass.states.async_set(entity_id, value, {}, force_update=True)
await hass.async_block_till_done()
state = hass.states.get("sensor.integration")
assert state is not None
assert round(float(state.state), config["sensor"]["round"]) == 9.17
assert state.attributes.get("unit_of_measurement") == ENERGY_KILO_WATT_HOUR
async def test_prefix(hass):
"""Test integration sensor state using a power source."""
config = {
"sensor": {
"platform": "integration",
"name": "integration",
"source": "sensor.power",
"round": 2,
"unit_prefix": "k",
}
}
assert await async_setup_component(hass, "sensor", config)
entity_id = config["sensor"]["source"]
hass.states.async_set(entity_id, 1000, {"unit_of_measurement": POWER_WATT})
await hass.async_block_till_done()
now = dt_util.utcnow() + timedelta(seconds=3600)
with patch("homeassistant.util.dt.utcnow", return_value=now):
hass.states.async_set(
entity_id, 1000, {"unit_of_measurement": POWER_WATT}, force_update=True
)
await hass.async_block_till_done()
state = hass.states.get("sensor.integration")
assert state is not None
# Testing a power sensor at 1000 Watts for 1hour = 1kWh
assert round(float(state.state), config["sensor"]["round"]) == 1.0
assert state.attributes.get("unit_of_measurement") == ENERGY_KILO_WATT_HOUR
async def test_suffix(hass):
"""Test integration sensor state using a network counter source."""
config = {
"sensor": {
"platform": "integration",
"name": "integration",
"source": "sensor.bytes_per_second",
"round": 2,
"unit_prefix": "k",
"unit_time": TIME_SECONDS,
}
}
assert await async_setup_component(hass, "sensor", config)
entity_id = config["sensor"]["source"]
hass.states.async_set(entity_id, 1000, {})
await hass.async_block_till_done()
now = dt_util.utcnow() + timedelta(seconds=10)
with patch("homeassistant.util.dt.utcnow", return_value=now):
hass.states.async_set(entity_id, 1000, {}, force_update=True)
await hass.async_block_till_done()
state = hass.states.get("sensor.integration")
assert state is not None
# Testing a network speed sensor at 1000 bytes/s over 10s = 10kbytes
assert round(float(state.state)) == 10
|
import logging
from openzwavemqtt.const import ATTR_LABEL, ATTR_POSITION, ATTR_VALUE
from openzwavemqtt.util.node import get_node_from_manager, set_config_parameter
import voluptuous as vol
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from . import const
_LOGGER = logging.getLogger(__name__)
class ZWaveServices:
"""Class that holds our services ( Zwave Commands) that should be published to hass."""
def __init__(self, hass, manager):
"""Initialize with both hass and ozwmanager objects."""
self._hass = hass
self._manager = manager
@callback
def async_register(self):
"""Register all our services."""
self._hass.services.async_register(
const.DOMAIN,
const.SERVICE_ADD_NODE,
self.async_add_node,
schema=vol.Schema(
{
vol.Optional(const.ATTR_INSTANCE_ID, default=1): vol.Coerce(int),
vol.Optional(const.ATTR_SECURE, default=False): vol.Coerce(bool),
}
),
)
self._hass.services.async_register(
const.DOMAIN,
const.SERVICE_REMOVE_NODE,
self.async_remove_node,
schema=vol.Schema(
{vol.Optional(const.ATTR_INSTANCE_ID, default=1): vol.Coerce(int)}
),
)
self._hass.services.async_register(
const.DOMAIN,
const.SERVICE_CANCEL_COMMAND,
self.async_cancel_command,
schema=vol.Schema(
{vol.Optional(const.ATTR_INSTANCE_ID, default=1): vol.Coerce(int)}
),
)
self._hass.services.async_register(
const.DOMAIN,
const.SERVICE_SET_CONFIG_PARAMETER,
self.async_set_config_parameter,
schema=vol.Schema(
{
vol.Optional(const.ATTR_INSTANCE_ID, default=1): vol.Coerce(int),
vol.Required(const.ATTR_NODE_ID): vol.Coerce(int),
vol.Required(const.ATTR_CONFIG_PARAMETER): vol.Coerce(int),
vol.Required(const.ATTR_CONFIG_VALUE): vol.Any(
vol.All(
cv.ensure_list,
[
vol.All(
{
vol.Exclusive(ATTR_LABEL, "bit"): cv.string,
vol.Exclusive(ATTR_POSITION, "bit"): vol.Coerce(
int
),
vol.Required(ATTR_VALUE): bool,
},
cv.has_at_least_one_key(ATTR_LABEL, ATTR_POSITION),
)
],
),
vol.Coerce(int),
bool,
cv.string,
),
}
),
)
@callback
def async_set_config_parameter(self, service):
"""Set a config parameter to a node."""
instance_id = service.data[const.ATTR_INSTANCE_ID]
node_id = service.data[const.ATTR_NODE_ID]
param = service.data[const.ATTR_CONFIG_PARAMETER]
selection = service.data[const.ATTR_CONFIG_VALUE]
# These function calls may raise an exception but that's ok because
# the exception will show in the UI to the user
node = get_node_from_manager(self._manager, instance_id, node_id)
payload = set_config_parameter(node, param, selection)
_LOGGER.info(
"Setting configuration parameter %s on Node %s with value %s",
param,
node_id,
payload,
)
@callback
def async_add_node(self, service):
"""Enter inclusion mode on the controller."""
instance_id = service.data[const.ATTR_INSTANCE_ID]
secure = service.data[const.ATTR_SECURE]
instance = self._manager.get_instance(instance_id)
if instance is None:
raise ValueError(f"No OpenZWave Instance with ID {instance_id}")
instance.add_node(secure)
@callback
def async_remove_node(self, service):
"""Enter exclusion mode on the controller."""
instance_id = service.data[const.ATTR_INSTANCE_ID]
instance = self._manager.get_instance(instance_id)
if instance is None:
raise ValueError(f"No OpenZWave Instance with ID {instance_id}")
instance.remove_node()
@callback
def async_cancel_command(self, service):
"""Tell the controller to cancel an add or remove command."""
instance_id = service.data[const.ATTR_INSTANCE_ID]
instance = self._manager.get_instance(instance_id)
if instance is None:
raise ValueError(f"No OpenZWave Instance with ID {instance_id}")
instance.cancel_controller_command()
|
import asyncio
import voluptuous as vol
from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN
from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import (
CONF_CLIENT_ID,
CONF_CLIENT_SECRET,
CONF_SCAN_INTERVAL,
EVENT_HOMEASSISTANT_STARTED,
)
from homeassistant.core import CoreState, HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv, device_registry as dr
from homeassistant.helpers.config_entry_oauth2_flow import (
OAuth2Session,
async_get_config_entry_implementation,
)
from homeassistant.helpers.typing import ConfigType
from .const import CONF_AGREEMENT_ID, CONF_MIGRATE, DEFAULT_SCAN_INTERVAL, DOMAIN
from .coordinator import ToonDataUpdateCoordinator
from .oauth2 import register_oauth2_implementations
ENTITY_COMPONENTS = {
BINARY_SENSOR_DOMAIN,
CLIMATE_DOMAIN,
SENSOR_DOMAIN,
SWITCH_DOMAIN,
}
# Validation of the user's configuration
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.All(
cv.deprecated(CONF_SCAN_INTERVAL),
vol.Schema(
{
vol.Required(CONF_CLIENT_ID): cv.string,
vol.Required(CONF_CLIENT_SECRET): cv.string,
vol.Optional(
CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL
): cv.positive_time_period,
}
),
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Toon components."""
if DOMAIN not in config:
return True
register_oauth2_implementations(
hass, config[DOMAIN][CONF_CLIENT_ID], config[DOMAIN][CONF_CLIENT_SECRET]
)
hass.async_create_task(
hass.config_entries.flow.async_init(DOMAIN, context={"source": SOURCE_IMPORT})
)
return True
async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Handle migration of a previous version config entry."""
if entry.version == 1:
# There is no usable data in version 1 anymore.
# The integration switched to OAuth and because of this, uses
# different unique identifiers as well.
# Force this by removing the existing entry and trigger a new flow.
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={CONF_MIGRATE: entry.entry_id},
)
)
return False
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Toon from a config entry."""
implementation = await async_get_config_entry_implementation(hass, entry)
session = OAuth2Session(hass, entry, implementation)
coordinator = ToonDataUpdateCoordinator(hass, entry=entry, session=session)
await coordinator.toon.activate_agreement(
agreement_id=entry.data[CONF_AGREEMENT_ID]
)
await coordinator.async_refresh()
if not coordinator.last_update_success:
raise ConfigEntryNotReady
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN][entry.entry_id] = coordinator
# Register device for the Meter Adapter, since it will have no entities.
device_registry = await dr.async_get_registry(hass)
device_registry.async_get_or_create(
config_entry_id=entry.entry_id,
identifiers={
(DOMAIN, coordinator.data.agreement.agreement_id, "meter_adapter")
},
manufacturer="Eneco",
name="Meter Adapter",
via_device=(DOMAIN, coordinator.data.agreement.agreement_id),
)
# Spin up the platforms
for component in ENTITY_COMPONENTS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
# If Home Assistant is already in a running state, register the webhook
# immediately, else trigger it after Home Assistant has finished starting.
if hass.state == CoreState.running:
await coordinator.register_webhook()
else:
hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_STARTED, coordinator.register_webhook
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload Toon config entry."""
# Remove webhooks registration
await hass.data[DOMAIN][entry.entry_id].unregister_webhook()
# Unload entities for this entry/device.
unload_ok = all(
await asyncio.gather(
*(
hass.config_entries.async_forward_entry_unload(entry, component)
for component in ENTITY_COMPONENTS
)
)
)
# Cleanup
if unload_ok:
del hass.data[DOMAIN][entry.entry_id]
return unload_ok
|
import logging
import sys
from unittest.mock import ANY, Mock, patch
from kombu.log import (
get_logger,
get_loglevel,
safeify_format,
Log,
LogMixin,
setup_logging,
)
class test_get_logger:
def test_when_string(self):
logger = get_logger('foo')
assert logger is logging.getLogger('foo')
h1 = logger.handlers[0]
assert isinstance(h1, logging.NullHandler)
def test_when_logger(self):
logger = get_logger(logging.getLogger('foo'))
h1 = logger.handlers[0]
assert isinstance(h1, logging.NullHandler)
def test_with_custom_handler(self):
logger = logging.getLogger('bar')
handler = logging.NullHandler()
logger.addHandler(handler)
logger = get_logger('bar')
assert logger.handlers[0] is handler
def test_get_loglevel(self):
assert get_loglevel('DEBUG') == logging.DEBUG
assert get_loglevel('ERROR') == logging.ERROR
assert get_loglevel(logging.INFO) == logging.INFO
def test_safe_format():
fmt = 'The %r jumped %x over the %s'
args = ['frog', 'foo', 'elephant']
res = list(safeify_format(fmt, args))
assert [x.strip('u') for x in res] == ["'frog'", 'foo', 'elephant']
class test_LogMixin:
def setup(self):
self.log = Log('Log', Mock())
self.logger = self.log.logger
def test_debug(self):
self.log.debug('debug')
self.logger.log.assert_called_with(logging.DEBUG, 'Log - debug')
def test_info(self):
self.log.info('info')
self.logger.log.assert_called_with(logging.INFO, 'Log - info')
def test_warning(self):
self.log.warn('warning')
self.logger.log.assert_called_with(logging.WARN, 'Log - warning')
def test_error(self):
self.log.error('error', exc_info='exc')
self.logger.log.assert_called_with(
logging.ERROR, 'Log - error', exc_info='exc',
)
def test_critical(self):
self.log.critical('crit', exc_info='exc')
self.logger.log.assert_called_with(
logging.CRITICAL, 'Log - crit', exc_info='exc',
)
def test_error_when_DISABLE_TRACEBACKS(self):
from kombu import log
log.DISABLE_TRACEBACKS = True
try:
self.log.error('error')
self.logger.log.assert_called_with(logging.ERROR, 'Log - error')
finally:
log.DISABLE_TRACEBACKS = False
def test_get_loglevel(self):
assert self.log.get_loglevel('DEBUG') == logging.DEBUG
assert self.log.get_loglevel('ERROR') == logging.ERROR
assert self.log.get_loglevel(logging.INFO) == logging.INFO
def test_is_enabled_for(self):
self.logger.isEnabledFor.return_value = True
assert self.log.is_enabled_for('DEBUG')
self.logger.isEnabledFor.assert_called_with(logging.DEBUG)
def test_LogMixin_get_logger(self):
assert LogMixin().get_logger() is logging.getLogger('LogMixin')
def test_Log_get_logger(self):
assert Log('test_Log').get_logger() is logging.getLogger('test_Log')
def test_log_when_not_enabled(self):
self.logger.isEnabledFor.return_value = False
self.log.debug('debug')
self.logger.log.assert_not_called()
def test_log_with_format(self):
self.log.debug('Host %r removed', 'example.com')
self.logger.log.assert_called_with(
logging.DEBUG, 'Log - Host %s removed', ANY,
)
assert self.logger.log.call_args[0][2].strip('u') == "'example.com'"
class test_setup_logging:
@patch('logging.getLogger')
def test_set_up_default_values(self, getLogger):
logger = logging.getLogger.return_value = Mock()
logger.handlers = []
setup_logging()
logger.setLevel.assert_called_with(logging.ERROR)
logger.addHandler.assert_called()
ah_args, _ = logger.addHandler.call_args
handler = ah_args[0]
assert isinstance(handler, logging.StreamHandler)
assert handler.stream is sys.__stderr__
@patch('logging.getLogger')
@patch('kombu.log.WatchedFileHandler')
def test_setup_custom_values(self, getLogger, WatchedFileHandler):
logger = logging.getLogger.return_value = Mock()
logger.handlers = []
setup_logging(loglevel=logging.DEBUG, logfile='/var/logfile')
logger.setLevel.assert_called_with(logging.DEBUG)
logger.addHandler.assert_called()
WatchedFileHandler.assert_called()
@patch('logging.getLogger')
def test_logger_already_setup(self, getLogger):
logger = logging.getLogger.return_value = Mock()
logger.handlers = [Mock()]
setup_logging()
logger.setLevel.assert_not_called()
|
import json
import logging
import re
import pytest
import pytest_bdd as bdd
bdd.scenarios('history.feature')
@pytest.fixture(autouse=True)
def turn_on_sql_history(quteproc):
"""Make sure SQL writing is enabled for tests in this module."""
cmd = ":debug-pyeval objects.debug_flags.remove('no-sql-history')"
quteproc.send_cmd(cmd)
quteproc.wait_for_load_finished_url('qute://pyeval')
@bdd.then(bdd.parsers.parse("the query parameter {name} should be set to "
"{value}"))
def check_query(quteproc, name, value):
"""Check if a given query is set correctly.
This assumes we're on the server query page.
"""
content = quteproc.get_content()
data = json.loads(content)
print(data)
assert data[name] == value
@bdd.then(bdd.parsers.parse("the history should contain:\n{expected}"))
def check_history(quteproc, server, tmpdir, expected):
path = tmpdir / 'history'
quteproc.send_cmd(':debug-dump-history "{}"'.format(path))
quteproc.wait_for(category='message', loglevel=logging.INFO,
message='Dumped history to {}'.format(path))
with path.open('r', encoding='utf-8') as f:
# ignore access times, they will differ in each run
actual = '\n'.join(re.sub('^\\d+-?', '', line).strip() for line in f)
expected = expected.replace('(port)', str(server.port))
assert actual == expected
@bdd.then("the history should be empty")
def check_history_empty(quteproc, server, tmpdir):
check_history(quteproc, server, tmpdir, '')
|
from importlib import import_module
from radicale.log import logger
def load_plugin(internal_types, module_name, class_name, configuration):
type_ = configuration.get(module_name, "type")
if callable(type_):
logger.info("%s type is %r", module_name, type_)
return type_(configuration)
if type_ in internal_types:
module = "radicale.%s.%s" % (module_name, type_)
else:
module = type_
try:
class_ = getattr(import_module(module), class_name)
except Exception as e:
raise RuntimeError("Failed to load %s module %r: %s" %
(module_name, module, e)) from e
logger.info("%s type is %r", module_name, module)
return class_(configuration)
|
from meteofrance.model import Place
import pytest
from homeassistant import data_entry_flow
from homeassistant.components.meteo_france.const import (
CONF_CITY,
DOMAIN,
FORECAST_MODE_DAILY,
FORECAST_MODE_HOURLY,
)
from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, CONF_MODE
from homeassistant.helpers.typing import HomeAssistantType
from tests.async_mock import patch
from tests.common import MockConfigEntry
CITY_1_POSTAL = "74220"
CITY_1_NAME = "La Clusaz"
CITY_1_LAT = 45.90417
CITY_1_LON = 6.42306
CITY_1_COUNTRY = "FR"
CITY_1_ADMIN = "Rhône-Alpes"
CITY_1_ADMIN2 = "74"
CITY_1 = Place(
{
"name": CITY_1_NAME,
"lat": CITY_1_LAT,
"lon": CITY_1_LON,
"country": CITY_1_COUNTRY,
"admin": CITY_1_ADMIN,
"admin2": CITY_1_ADMIN2,
}
)
CITY_2_NAME = "Auch"
CITY_2_LAT = 43.64528
CITY_2_LON = 0.58861
CITY_2_COUNTRY = "FR"
CITY_2_ADMIN = "Midi-Pyrénées"
CITY_2_ADMIN2 = "32"
CITY_2 = Place(
{
"name": CITY_2_NAME,
"lat": CITY_2_LAT,
"lon": CITY_2_LON,
"country": CITY_2_COUNTRY,
"admin": CITY_2_ADMIN,
"admin2": CITY_2_ADMIN2,
}
)
CITY_3_NAME = "Auchel"
CITY_3_LAT = 50.50833
CITY_3_LON = 2.47361
CITY_3_COUNTRY = "FR"
CITY_3_ADMIN = "Nord-Pas-de-Calais"
CITY_3_ADMIN2 = "62"
CITY_3 = Place(
{
"name": CITY_3_NAME,
"lat": CITY_3_LAT,
"lon": CITY_3_LON,
"country": CITY_3_COUNTRY,
"admin": CITY_3_ADMIN,
"admin2": CITY_3_ADMIN2,
}
)
@pytest.fixture(name="client_single")
def mock_controller_client_single():
"""Mock a successful client."""
with patch(
"homeassistant.components.meteo_france.config_flow.MeteoFranceClient",
update=False,
) as service_mock:
service_mock.return_value.search_places.return_value = [CITY_1]
yield service_mock
@pytest.fixture(autouse=True)
def mock_setup():
"""Prevent setup."""
with patch(
"homeassistant.components.meteo_france.async_setup",
return_value=True,
), patch(
"homeassistant.components.meteo_france.async_setup_entry",
return_value=True,
):
yield
@pytest.fixture(name="client_multiple")
def mock_controller_client_multiple():
"""Mock a successful client."""
with patch(
"homeassistant.components.meteo_france.config_flow.MeteoFranceClient",
update=False,
) as service_mock:
service_mock.return_value.search_places.return_value = [CITY_2, CITY_3]
yield service_mock
@pytest.fixture(name="client_empty")
def mock_controller_client_empty():
"""Mock a successful client."""
with patch(
"homeassistant.components.meteo_france.config_flow.MeteoFranceClient",
update=False,
) as service_mock:
service_mock.return_value.search_places.return_value = []
yield service_mock
async def test_user(hass, client_single):
"""Test user config."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
# test with all provided with search returning only 1 place
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={CONF_CITY: CITY_1_POSTAL},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["result"].unique_id == f"{CITY_1_LAT}, {CITY_1_LON}"
assert result["title"] == f"{CITY_1}"
assert result["data"][CONF_LATITUDE] == str(CITY_1_LAT)
assert result["data"][CONF_LONGITUDE] == str(CITY_1_LON)
async def test_user_list(hass, client_multiple):
"""Test user config."""
# test with all provided with search returning more than 1 place
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={CONF_CITY: CITY_2_NAME},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "cities"
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={CONF_CITY: f"{CITY_3};{CITY_3_LAT};{CITY_3_LON}"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["result"].unique_id == f"{CITY_3_LAT}, {CITY_3_LON}"
assert result["title"] == f"{CITY_3}"
assert result["data"][CONF_LATITUDE] == str(CITY_3_LAT)
assert result["data"][CONF_LONGITUDE] == str(CITY_3_LON)
async def test_import(hass, client_multiple):
"""Test import step."""
# import with all
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={CONF_CITY: CITY_2_NAME},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["result"].unique_id == f"{CITY_2_LAT}, {CITY_2_LON}"
assert result["title"] == f"{CITY_2}"
assert result["data"][CONF_LATITUDE] == str(CITY_2_LAT)
assert result["data"][CONF_LONGITUDE] == str(CITY_2_LON)
async def test_search_failed(hass, client_empty):
"""Test error displayed if no result in search."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={CONF_CITY: CITY_1_POSTAL},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {CONF_CITY: "empty"}
async def test_abort_if_already_setup(hass, client_single):
"""Test we abort if already setup."""
MockConfigEntry(
domain=DOMAIN,
data={CONF_LATITUDE: CITY_1_LAT, CONF_LONGITUDE: CITY_1_LON},
unique_id=f"{CITY_1_LAT}, {CITY_1_LON}",
).add_to_hass(hass)
# Should fail, same CITY same postal code (import)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={CONF_CITY: CITY_1_POSTAL},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
# Should fail, same CITY same postal code (flow)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={CONF_CITY: CITY_1_POSTAL},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_options_flow(hass: HomeAssistantType):
"""Test config flow options."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_LATITUDE: CITY_1_LAT, CONF_LONGITUDE: CITY_1_LON},
unique_id=f"{CITY_1_LAT}, {CITY_1_LON}",
)
config_entry.add_to_hass(hass)
assert config_entry.options == {}
result = await hass.config_entries.options.async_init(config_entry.entry_id)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "init"
# Default
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert config_entry.options[CONF_MODE] == FORECAST_MODE_DAILY
# Manual
result = await hass.config_entries.options.async_init(config_entry.entry_id)
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={CONF_MODE: FORECAST_MODE_HOURLY},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert config_entry.options[CONF_MODE] == FORECAST_MODE_HOURLY
|
import unittest
import os
import sys
from kalliope.core.Models.Neuron import Neuron
from kalliope.neurons.say.say import Say
from kalliope.core.Utils.Utils import Utils
from kalliope.core.ConfigurationManager import SettingLoader
class TestUtils(unittest.TestCase):
"""
Class to test Utils methods
"""
def setUp(self):
pass
def test_get_current_file_parent_path(self):
"""
Expect to get back the parent path file
"""
path_to_test = "../kalliope/core/Utils"
expected_result = os.path.normpath("../kalliope/core")
self.assertEqual(Utils.get_current_file_parent_path(path_to_test),
expected_result,
"fail getting the parent parent path from the given path")
def test_get_current_file_parent_parent_path(self):
"""
Expect to get back the parent parent path file
"""
path_to_test = "../kalliope/core/Utils"
expected_result = os.path.normpath("../kalliope")
self.assertEqual(Utils.get_current_file_parent_parent_path(path_to_test),
expected_result,
"fail getting the parent parent path from the given path")
def test_get_real_file_path(self):
"""
Expect to load the proper file following the order :
- Provided absolute path
- Current user path + file_name
- /etc/kalliope + file_name
- /path/to/kalliope/ +file_name
"""
###
# Test the absolute path
dir_path = "/tmp/kalliope/tests/"
file_name = "test_real_file_path"
absolute_path_to_test = os.path.join(dir_path, file_name)
expected_result = absolute_path_to_test
if not os.path.exists(dir_path):
os.makedirs(dir_path)
# touch the file
open(absolute_path_to_test, 'a').close()
self.assertEqual(Utils.get_real_file_path(absolute_path_to_test),
expected_result,
"Fail to match the given absolute path ")
# Clean up
if os.path.exists(absolute_path_to_test):
os.remove(absolute_path_to_test)
###
# test the Current path
file_name = "test_real_file_path"
expected_result = os.getcwd() + os.sep + file_name
# touch the file
open(file_name, 'a').close()
self.assertEqual(Utils.get_real_file_path(file_name),
expected_result,
"Fail to match the Current path ")
# Clean up
if os.path.exists(file_name):
os.remove(file_name)
###
# test /etc/kalliope
# /!\ need permissions
# dir_path = "/etc/kalliope/"
# file_name = "test_real_file_path"
# path_to_test = os.path.join(dir_path,file_name)
# expected_result = "/etc/kalliope" + os.sep + file_name
# if not os.path.exists(dir_path):
# os.makedirs(dir_path)
#
# # touch the file
# open(path_to_test, 'a').close()
#
# self.assertEquals(Utils.get_real_file_path(file_name),
# expected_result,
# "Fail to match the /etc/kalliope path")
# # Clean up
# if os.path.exists(file_name):
# os.remove(file_name)
###
# /an/unknown/path/kalliope/
dir_path = "../kalliope/"
file_name = "test_real_file_path"
path_to_test = os.path.join(dir_path, file_name)
expected_result = os.path.normpath(os.getcwd() + os.sep + os.pardir + os.sep + "kalliope" + os.sep + file_name)
if not os.path.exists(dir_path):
os.makedirs(dir_path)
# touch the file
open(path_to_test, 'a').close()
self.assertEqual(Utils.get_real_file_path(file_name),
expected_result,
"Fail to match the /an/unknown/path/kalliope path")
# Clean up
if os.path.exists(expected_result):
os.remove(expected_result)
def test_get_dynamic_class_instantiation(self):
"""
Test that an instance as been instantiate properly.
"""
sl = SettingLoader()
sl.settings.resource_dir = '/var/tmp/test/resources'
neuron = Neuron(name='Say', parameters={'message': 'test dynamic class instantiate'})
self.assertTrue(isinstance(Utils.get_dynamic_class_instantiation(package_name="neurons",
module_name=neuron.name.capitalize(),
parameters=neuron.parameters,
resources_dir='/var/tmp/test/resources'),
Say),
"Fail instantiate a class")
def test_is_containing_bracket(self):
# Success
order_to_test = "This test contains {{ bracket }}"
self.assertTrue(Utils.is_containing_bracket(order_to_test),
"Fail returning True when order contains spaced brackets")
order_to_test = "This test contains {{bracket }}"
self.assertTrue(Utils.is_containing_bracket(order_to_test),
"Fail returning True when order contains right spaced bracket")
order_to_test = "This test contains {{ bracket}}"
self.assertTrue(Utils.is_containing_bracket(order_to_test),
"Fail returning True when order contains left spaced bracket")
order_to_test = "This test contains {{bracket}}"
self.assertTrue(Utils.is_containing_bracket(order_to_test),
"Fail returning True when order contains no spaced bracket")
# Failure
order_to_test = "This test does not contain bracket"
self.assertFalse(Utils.is_containing_bracket(order_to_test),
"Fail returning False when order has no brackets")
# Behaviour
order_to_test = ""
self.assertFalse(Utils.is_containing_bracket(order_to_test),
"Fail returning False when no order")
# Behaviour int
order_to_test = 6
self.assertFalse(Utils.is_containing_bracket(order_to_test),
"Fail returning False when an int")
# Behaviour unicode
order_to_test = "j'aime les goûters l'été"
self.assertFalse(Utils.is_containing_bracket(order_to_test),
"Fail returning False when an int")
def test_get_next_value_list(self):
# Success
list_to_test = {1, 2, 3}
self.assertEqual(Utils.get_next_value_list(list_to_test), 2,
"Fail to match the expected next value from the list")
# Failure
list_to_test = {1}
self.assertEqual(Utils.get_next_value_list(list_to_test), None,
"Fail to ensure there is no next value from the list")
# Behaviour
list_to_test = {}
self.assertEqual(Utils.get_next_value_list(list_to_test), None,
"Fail to ensure the empty list return None value")
def test_find_all_matching_brackets(self):
"""
Test the Utils find all matching brackets
"""
sentence = "This is the {{bracket}}"
expected_result = ["{{bracket}}"]
self.assertEqual(Utils.find_all_matching_brackets(sentence=sentence),
expected_result,
"Fail to match one bracket")
sentence = "This is the {{bracket}} {{second}}"
expected_result = ["{{bracket}}", "{{second}}"]
self.assertEqual(Utils.find_all_matching_brackets(sentence=sentence),
expected_result,
"Fail to match two brackets")
def test_remove_spaces_in_brackets(self):
"""
Test the Utils remove_spaces_in_brackets
"""
sentence = "This is the {{ bracket }}"
expected_result = "This is the {{bracket}}"
self.assertEqual(Utils.remove_spaces_in_brackets(sentence=sentence),
expected_result,
"Fail to remove spaces in one bracket")
sentence = "This is the {{ bracket }} {{ second }}"
expected_result = "This is the {{bracket}} {{second}}"
self.assertEqual(Utils.remove_spaces_in_brackets(sentence=sentence),
expected_result,
"Fail to remove spaces in two brackets")
# test with json
sentence = "{\"params\": {\"apikey\": \"ISNOTMYPASSWORD\", " \
"\"query\": \"met le chauffage a {{ valeur }} degres\"}}"
expected_result = "{\"params\": {\"apikey\": \"ISNOTMYPASSWORD\", " \
"\"query\": \"met le chauffage a {{valeur}} degres\"}}"
self.assertEqual(Utils.remove_spaces_in_brackets(sentence=sentence),
expected_result,
"Fail to remove spaces in two brackets")
def test_encode_text_utf8(self):
"""
Test encoding the text in utf8
"""
sentence = "kâllìöpé"
if sys.version_info[0] < 3:
sentence = sentence.decode('utf8')
expected_sentence = "kâllìöpé"
self.assertEqual(Utils.encode_text_utf8(text=sentence),
expected_sentence)
if __name__ == '__main__':
unittest.main()
|
from functools import reduce
from pygal.graph.dual import Dual
from pygal.graph.line import Line
from pygal.util import cached_property, compose, ident
class XY(Line, Dual):
"""XY Line graph class"""
_x_adapters = []
@cached_property
def xvals(self):
"""All x values"""
return [
val[0] for serie in self.all_series for val in serie.values
if val[0] is not None
]
@cached_property
def yvals(self):
"""All y values"""
return [
val[1] for serie in self.series for val in serie.values
if val[1] is not None
]
@cached_property
def _min(self):
"""Getter for the minimum series value"""
return (
self.range[0] if (self.range and self.range[0] is not None) else
(min(self.yvals) if self.yvals else None)
)
@cached_property
def _max(self):
"""Getter for the maximum series value"""
return (
self.range[1] if (self.range and self.range[1] is not None) else
(max(self.yvals) if self.yvals else None)
)
def _compute(self):
"""Compute x/y min and max and x/y scale and set labels"""
if self.xvals:
if self.xrange:
x_adapter = reduce(compose, self._x_adapters) if getattr(
self, '_x_adapters', None
) else ident
xmin = x_adapter(self.xrange[0])
xmax = x_adapter(self.xrange[1])
else:
xmin = min(self.xvals)
xmax = max(self.xvals)
xrng = (xmax - xmin)
else:
xrng = None
if self.yvals:
ymin = self._min
ymax = self._max
if self.include_x_axis:
ymin = min(ymin or 0, 0)
ymax = max(ymax or 0, 0)
yrng = (ymax - ymin)
else:
yrng = None
for serie in self.all_series:
serie.points = serie.values
if self.interpolate:
vals = list(
zip(
*sorted(
filter(lambda t: None not in t, serie.points),
key=lambda x: x[0]
)
)
)
serie.interpolated = self._interpolate(vals[0], vals[1])
if self.interpolate:
self.xvals = [
val[0] for serie in self.all_series
for val in serie.interpolated
]
self.yvals = [
val[1] for serie in self.series for val in serie.interpolated
]
if self.xvals:
xmin = min(self.xvals)
xmax = max(self.xvals)
xrng = (xmax - xmin)
else:
xrng = None
# these values can also be 0 (zero), so testing explicitly for None
if xrng is not None:
self._box.xmin, self._box.xmax = xmin, xmax
if yrng is not None:
self._box.ymin, self._box.ymax = ymin, ymax
|
import math
from redbot.core import commands
from redbot.core.i18n import Translator
__all__ = ("finite_float",)
_ = Translator("Trivia", __file__)
def finite_float(arg: str) -> float:
try:
ret = float(arg)
except ValueError:
raise commands.BadArgument(_("`{arg}` is not a number.").format(arg=arg))
if not math.isfinite(ret):
raise commands.BadArgument(_("`{arg}` is not a finite number.").format(arg=ret))
return ret
|
import mock
import pytest
from addict import Dict
from paasta_tools import utils
from paasta_tools.frameworks import native_scheduler
from paasta_tools.frameworks.native_scheduler import TASK_KILLED
from paasta_tools.frameworks.native_scheduler import TASK_RUNNING
from paasta_tools.frameworks.native_service_config import NativeServiceConfig
from paasta_tools.frameworks.task_store import DictTaskStore
@pytest.fixture
def system_paasta_config():
return utils.SystemPaastaConfig(
{"docker_registry": "fake", "volumes": [], "dockercfg_location": "/foo/bar"},
"/fake/system/configs",
)
def make_fake_offer(
cpu=50000, mem=50000, port_begin=31000, port_end=32000, pool="default"
):
offer = Dict(
agent_id=Dict(value="super_big_slave"),
resources=[
Dict(name="cpus", scalar=Dict(value=cpu)),
Dict(name="mem", scalar=Dict(value=mem)),
Dict(
name="ports", ranges=Dict(range=[Dict(begin=port_begin, end=port_end)])
),
],
attributes=[],
)
if pool is not None:
offer.attributes = [Dict(name="pool", text=Dict(value=pool))]
return offer
class TestNativeScheduler:
@mock.patch("paasta_tools.frameworks.native_scheduler._log", autospec=True)
def test_start_upgrade_rollback_scaledown(self, mock_log, system_paasta_config):
service_name = "service_name"
instance_name = "instance_name"
cluster = "cluster"
service_configs = []
for force_bounce in range(2):
service_configs.append(
NativeServiceConfig(
service=service_name,
instance=instance_name,
cluster=cluster,
config_dict={
"cpus": 0.1,
"mem": 50,
"instances": 3,
"cmd": "sleep 50",
"drain_method": "test",
},
branch_dict={
"docker_image": "busybox",
"desired_state": "start",
"force_bounce": str(force_bounce),
},
soa_dir="/nail/etc/services",
)
)
scheduler = native_scheduler.NativeScheduler(
service_name=service_name,
instance_name=instance_name,
cluster=cluster,
system_paasta_config=system_paasta_config,
service_config=service_configs[0],
staging_timeout=1,
task_store_type=DictTaskStore,
)
fake_driver = mock.Mock()
scheduler.registered(
driver=fake_driver, frameworkId={"value": "foo"}, masterInfo=mock.Mock()
)
with mock.patch(
"paasta_tools.utils.load_system_paasta_config",
autospec=True,
return_value=system_paasta_config,
):
# First, start up 3 old tasks
old_tasks = scheduler.launch_tasks_for_offers(
fake_driver, [make_fake_offer()]
)
assert len(scheduler.task_store.get_all_tasks()) == 3
# and mark the old tasks as up
for task in old_tasks:
scheduler.statusUpdate(
fake_driver, dict(task_id=task["task_id"], state=TASK_RUNNING)
)
assert len(scheduler.drain_method.downed_task_ids) == 0
# Now, change force_bounce
scheduler.service_config = service_configs[1]
# and start 3 more tasks
new_tasks = scheduler.launch_tasks_for_offers(
fake_driver, [make_fake_offer()]
)
assert len(scheduler.task_store.get_all_tasks()) == 6
# It should not drain anything yet, since the new tasks aren't up.
scheduler.kill_tasks_if_necessary(fake_driver)
assert len(scheduler.task_store.get_all_tasks()) == 6
assert len(scheduler.drain_method.downed_task_ids) == 0
# Now we mark the new tasks as up.
for i, task in enumerate(new_tasks):
scheduler.statusUpdate(
fake_driver, dict(task_id=task["task_id"], state=TASK_RUNNING)
)
# As each of these new tasks come up, we should drain an old one.
assert len(scheduler.drain_method.downed_task_ids) == i + 1
# Now let's roll back and make sure it undrains the old ones and drains new.
scheduler.service_config = service_configs[0]
scheduler.kill_tasks_if_necessary(fake_driver)
assert scheduler.drain_method.downed_task_ids == set()
scheduler.kill_tasks_if_necessary(fake_driver)
assert scheduler.drain_method.downed_task_ids == {
t["task_id"]["value"] for t in new_tasks
}
# Once we drain the new tasks, it should kill them.
assert fake_driver.killTask.call_count == 0
# we issue duplicate kills for tasks until we get notified about TASK_KILLED, so we keep track of
# the unique IDs of tasks being killed.
killed_tasks = set()
def killTask_side_effect(task_id):
killed_tasks.add(task_id["value"])
fake_driver.killTask.side_effect = killTask_side_effect
scheduler.drain_method.mark_arbitrary_task_as_safe_to_kill()
scheduler.kill_tasks_if_necessary(fake_driver)
assert len(killed_tasks) == 1
scheduler.drain_method.mark_arbitrary_task_as_safe_to_kill()
scheduler.kill_tasks_if_necessary(fake_driver)
assert len(killed_tasks) == 2
scheduler.drain_method.mark_arbitrary_task_as_safe_to_kill()
scheduler.kill_tasks_if_necessary(fake_driver)
assert scheduler.drain_method.safe_to_kill_task_ids == {
t["task_id"]["value"] for t in new_tasks
}
assert len(killed_tasks) == 3
for task in new_tasks:
fake_driver.killTask.assert_any_call(task["task_id"])
# Now tell the scheduler those tasks have died.
for task in new_tasks:
scheduler.statusUpdate(
fake_driver, dict(task_id=task["task_id"], state=TASK_KILLED)
)
# Clean up the TestDrainMethod for the rest of this test.
assert not list(scheduler.drain_method.downed_task_ids)
# Now scale down old app
scheduler.service_config.config_dict["instances"] = 2
scheduler.kill_tasks_if_necessary(fake_driver)
assert len(scheduler.drain_method.downed_task_ids) == 1
# mark it as drained and let the scheduler kill it.
scheduler.drain_method.mark_arbitrary_task_as_safe_to_kill()
killed_tasks.clear()
scheduler.kill_tasks_if_necessary(fake_driver)
assert len(killed_tasks) == 1
def test_tasks_for_offer_chooses_port(self, system_paasta_config):
service_name = "service_name"
instance_name = "instance_name"
cluster = "cluster"
service_configs = []
service_configs.append(
NativeServiceConfig(
service=service_name,
instance=instance_name,
cluster=cluster,
config_dict={
"cpus": 0.1,
"mem": 50,
"instances": 1,
"cmd": "sleep 50",
"drain_method": "test",
},
branch_dict={
"docker_image": "busybox",
"desired_state": "start",
"force_bounce": "0",
},
soa_dir="/nail/etc/services",
)
)
scheduler = native_scheduler.NativeScheduler(
service_name=service_name,
instance_name=instance_name,
cluster=cluster,
system_paasta_config=system_paasta_config,
service_config=service_configs[0],
reconcile_start_time=0,
staging_timeout=1,
task_store_type=DictTaskStore,
)
scheduler.registered(
driver=mock.Mock(), frameworkId={"value": "foo"}, masterInfo=mock.Mock()
)
with mock.patch(
"paasta_tools.utils.load_system_paasta_config",
autospec=True,
return_value=system_paasta_config,
):
tasks, _ = scheduler.tasks_and_state_for_offer(
mock.Mock(), make_fake_offer(port_begin=12345, port_end=12345), {}
)
assert {
"name": "ports",
"ranges": {"range": [{"begin": 12345, "end": 12345}]},
"type": "RANGES",
} in tasks[0]["resources"]
def test_offer_matches_pool(self):
service_name = "service_name"
instance_name = "instance_name"
cluster = "cluster"
service_config = NativeServiceConfig(
service=service_name,
instance=instance_name,
cluster=cluster,
config_dict={
"cpus": 0.1,
"mem": 50,
"instances": 1,
"cmd": "sleep 50",
"drain_method": "test",
"pool": "default",
},
branch_dict={
"docker_image": "busybox",
"desired_state": "start",
"force_bounce": "0",
},
soa_dir="/nail/etc/services",
)
scheduler = native_scheduler.NativeScheduler(
service_name=service_name,
instance_name=instance_name,
cluster=cluster,
system_paasta_config=system_paasta_config,
service_config=service_config,
staging_timeout=1,
task_store_type=DictTaskStore,
)
scheduler.registered(
driver=mock.Mock(), frameworkId={"value": "foo"}, masterInfo=mock.Mock()
)
assert scheduler.offer_matches_pool(
make_fake_offer(port_begin=12345, port_end=12345, pool="default")
)
assert not scheduler.offer_matches_pool(
make_fake_offer(port_begin=12345, port_end=12345, pool="somethingelse")
)
assert not scheduler.offer_matches_pool(
make_fake_offer(port_begin=12345, port_end=12345, pool=None)
)
class TestNativeServiceConfig:
def test_base_task(self, system_paasta_config):
service_name = "service_name"
instance_name = "instance_name"
cluster = "cluster"
service_config = NativeServiceConfig(
service=service_name,
instance=instance_name,
cluster=cluster,
config_dict={
"cpus": 0.1,
"mem": 50,
"instances": 3,
"cmd": "sleep 50",
"drain_method": "test",
"extra_volumes": [
{"containerPath": "/foo", "hostPath": "/bar", "mode": "RW"}
],
},
branch_dict={
"docker_image": "busybox",
"desired_state": "start",
"force_bounce": "0",
},
soa_dir="/nail/etc/services",
)
with mock.patch(
"paasta_tools.utils.load_system_paasta_config",
autospec=True,
return_value=system_paasta_config,
), mock.patch(
"paasta_tools.utils.InstanceConfig.use_docker_disk_quota",
autospec=True,
return_value=True,
):
task = service_config.base_task(system_paasta_config)
assert task == {
"container": {
"type": "DOCKER",
"docker": {
"image": "fake/busybox",
"parameters": [
{"key": "memory-swap", "value": mock.ANY},
{"key": "cpu-period", "value": mock.ANY},
{"key": "cpu-quota", "value": mock.ANY},
{"key": "storage-opt", "value": mock.ANY},
{"key": "label", "value": mock.ANY}, # service
{"key": "label", "value": mock.ANY}, # instance
{"key": "init", "value": "true"},
{"key": "cap-drop", "value": "SETPCAP"},
{"key": "cap-drop", "value": "MKNOD"},
{"key": "cap-drop", "value": "AUDIT_WRITE"},
{"key": "cap-drop", "value": "CHOWN"},
{"key": "cap-drop", "value": "NET_RAW"},
{"key": "cap-drop", "value": "DAC_OVERRIDE"},
{"key": "cap-drop", "value": "FOWNER"},
{"key": "cap-drop", "value": "FSETID"},
{"key": "cap-drop", "value": "KILL"},
{"key": "cap-drop", "value": "SETGID"},
{"key": "cap-drop", "value": "SETUID"},
{"key": "cap-drop", "value": "NET_BIND_SERVICE"},
{"key": "cap-drop", "value": "SYS_CHROOT"},
{"key": "cap-drop", "value": "SETFCAP"},
],
"network": "BRIDGE",
"port_mappings": [
{"container_port": 8888, "host_port": 0, "protocol": "tcp"}
],
},
"volumes": [
{"mode": "RW", "container_path": "/foo", "host_path": "/bar"}
],
},
"command": {
"value": "sleep 50",
"uris": [
{
"value": system_paasta_config.get_dockercfg_location(),
"extract": False,
}
],
},
"resources": [
{"name": "cpus", "scalar": {"value": 0.1}, "type": "SCALAR"},
{"name": "mem", "scalar": {"value": 50}, "type": "SCALAR"},
{"name": "ports", "ranges": mock.ANY, "type": "RANGES"},
],
"name": mock.ANY,
"agent_id": {"value": ""},
"task_id": {"value": ""},
}
assert task["name"].startswith("service_name.instance_name.gitbusybox.config")
def test_resource_offers_ignores_blacklisted_slaves(self, system_paasta_config):
service_name = "service_name"
instance_name = "instance_name"
cluster = "cluster"
service_configs = [
NativeServiceConfig(
service=service_name,
instance=instance_name,
cluster=cluster,
config_dict={
"cpus": 0.1,
"mem": 50,
"instances": 3,
"cmd": "sleep 50",
"drain_method": "test",
},
branch_dict={"docker_image": "busybox", "desired_state": "start"},
soa_dir="/nail/etc/services",
)
]
scheduler = native_scheduler.NativeScheduler(
service_name=service_name,
instance_name=instance_name,
cluster=cluster,
system_paasta_config=system_paasta_config,
service_config=service_configs[0],
staging_timeout=1,
task_store_type=DictTaskStore,
)
fake_driver = mock.Mock()
scheduler.registered(
driver=fake_driver, frameworkId={"value": "foo"}, masterInfo=mock.Mock()
)
scheduler.blacklist_slave("super big slave")
assert len(scheduler.blacklisted_slaves) == 1
scheduler.resourceOffers(fake_driver, [make_fake_offer()])
assert len(scheduler.task_store.get_all_tasks()) == 0
def test_make_drain_task_works_with_hacheck_drain_method(
self, system_paasta_config
):
service_name = "service_name"
instance_name = "instance_name"
cluster = "cluster"
service_config = NativeServiceConfig(
service=service_name,
instance=instance_name,
cluster=cluster,
config_dict={
"cpus": 0.1,
"mem": 50,
"instances": 1,
"cmd": "sleep 50",
"drain_method": "hacheck",
"pool": "default",
},
branch_dict={
"docker_image": "busybox",
"desired_state": "start",
"force_bounce": "0",
},
soa_dir="/nail/etc/services",
)
scheduler = native_scheduler.NativeScheduler(
service_name=service_name,
instance_name=instance_name,
cluster=cluster,
system_paasta_config=system_paasta_config,
service_config=service_config,
staging_timeout=1,
task_store_type=DictTaskStore,
)
fake_driver = mock.Mock()
scheduler.registered(
driver=fake_driver, frameworkId={"value": "foo"}, masterInfo=mock.Mock()
)
# launch a task
offer = make_fake_offer(port_begin=31337, port_end=31337)
with mock.patch(
"paasta_tools.utils.load_system_paasta_config",
autospec=True,
return_value=system_paasta_config,
):
scheduler.launch_tasks_for_offers(driver=fake_driver, offers=[offer])
expected = [
"http://super_big_slave:6666/spool/service_name.instance_name/31337/status"
]
actual = scheduler.drain_method.spool_urls(
scheduler.make_drain_task(
list(scheduler.task_store.get_all_tasks().keys())[0]
)
)
assert actual == expected
|
import os
import sys
import pytest
try:
from unittest.mock import Mock
except ImportError:
from mock import Mock
from vcr import VCR
from subliminal.cache import region
vcr = VCR(path_transformer=lambda path: path + '.yaml',
record_mode=os.environ.get('VCR_RECORD_MODE', 'once'),
match_on=['method', 'scheme', 'host', 'port', 'path', 'query', 'body'],
cassette_library_dir=os.path.realpath(os.path.join('docs', 'cassettes')))
@pytest.fixture(autouse=True, scope='session')
def configure_region():
region.configure('dogpile.cache.null')
region.configure = Mock()
@pytest.fixture(autouse=True)
def chdir(tmpdir, monkeypatch):
monkeypatch.chdir(str(tmpdir))
@pytest.yield_fixture(autouse=True)
def use_cassette(request):
with vcr.use_cassette('test_' + request.fspath.purebasename):
yield
@pytest.fixture(autouse=True)
def skip_python_2():
if sys.version_info < (3, 0):
return pytest.skip('Requires python 3')
|
import os
import unittest
import mock
from perfkitbenchmarker import test_util
from perfkitbenchmarker.linux_packages import nvidia_driver
AUTOBOOST_ENABLED_DICT = {'autoboost': True, 'autoboost_default': True}
AUTOBOOST_DISABLED_DICT = {'autoboost': False, 'autoboost_default': False}
class NvidiaDriverTestCase(unittest.TestCase, test_util.SamplesTestMixin):
def setUp(self):
super(NvidiaDriverTestCase, self).setUp()
path = os.path.join(os.path.dirname(__file__), '../data',
'nvidia_smi_output.txt')
with open(path) as fp:
self.nvidia_smi_output = fp.read()
def testQueryNumberOfGpus(self):
vm = mock.MagicMock()
vm.RemoteCommand = mock.MagicMock(return_value=('count\n8', None))
self.assertEqual(8, nvidia_driver.QueryNumberOfGpus(vm))
def testQueryGpuClockSpeed(self):
vm = mock.MagicMock()
vm.RemoteCommand = mock.MagicMock(
return_value=('clocks.applications.graphics [MHz], '
'clocks.applications.memory [Mhz]\n'
'324 MHz, 527 MHz', None))
self.assertEqual((324, 527), nvidia_driver.QueryGpuClockSpeed(vm, 3))
vm.RemoteCommand.assert_called_with(
'sudo nvidia-smi '
'--query-gpu=clocks.applications.memory,'
'clocks.applications.graphics --format=csv --id=3', should_log=True)
def testGetDriverVersion(self):
vm = mock.MagicMock()
vm.RemoteCommand = mock.MagicMock(
return_value=(self.nvidia_smi_output, ''))
self.assertEqual('375.66', nvidia_driver.GetDriverVersion(vm))
def testGetPeerToPeerTopology(self):
path = os.path.join(os.path.dirname(__file__), '../data',
'nvidia_smi_topo_output.txt')
with open(path) as fp:
nvidia_smi_output = fp.read()
vm = mock.MagicMock()
vm.RemoteCommand = mock.MagicMock(
return_value=(nvidia_smi_output, ''))
expected = 'Y Y N N;Y Y N N;N N Y Y;N N Y Y'
actual = nvidia_driver.GetPeerToPeerTopology(vm)
self.assertEqual(expected, actual)
vm.RemoteCommand.assert_called_with('nvidia-smi topo -p2p r',
should_log=True)
def testQueryAutoboostNull(self):
path = os.path.join(os.path.dirname(__file__), '../data',
'nvidia_smi_describe_clocks_p100.txt')
with open(path) as fp:
nvidia_smi_output = fp.read()
vm = mock.MagicMock()
vm.RemoteCommand = mock.MagicMock(
return_value=(nvidia_smi_output, ''))
self.assertEqual({'autoboost': None, 'autoboost_default': None},
nvidia_driver.QueryAutoboostPolicy(vm, 0))
def testQueryAutoboostOn(self):
path = os.path.join(os.path.dirname(__file__), '../data',
'nvidia_smi_describe_clocks_k80.txt')
with open(path) as fp:
nvidia_smi_output = fp.read()
vm = mock.MagicMock()
vm.RemoteCommand = mock.MagicMock(
return_value=(nvidia_smi_output, ''))
self.assertEqual({'autoboost': False, 'autoboost_default': True},
nvidia_driver.QueryAutoboostPolicy(vm, 0))
def testGetGpuTypeP100(self):
path = os.path.join(os.path.dirname(__file__), '../data',
'list_gpus_output_p100.txt')
with open(path) as fp:
nvidia_smi_output = fp.read()
vm = mock.MagicMock()
vm.RemoteCommand = mock.MagicMock(
return_value=(nvidia_smi_output, ''))
self.assertEqual(nvidia_driver.NVIDIA_TESLA_P100,
nvidia_driver.GetGpuType(vm))
def testGetGpuTypeK80(self):
path = os.path.join(os.path.dirname(__file__), '../data',
'list_gpus_output_k80.txt')
with open(path) as fp:
nvidia_smi_output = fp.read()
vm = mock.MagicMock()
vm.RemoteCommand = mock.MagicMock(
return_value=(nvidia_smi_output, ''))
self.assertEqual(nvidia_driver.NVIDIA_TESLA_K80,
nvidia_driver.GetGpuType(vm))
def testHetergeneousGpuTypes(self):
path = os.path.join(os.path.dirname(__file__), '../data',
'list_gpus_output_heterogeneous.txt')
with open(path) as fp:
nvidia_smi_output = fp.read()
vm = mock.MagicMock()
vm.RemoteCommand = mock.MagicMock(
return_value=(nvidia_smi_output, ''))
self.assertRaisesRegexp(nvidia_driver.HeterogeneousGpuTypesError,
'PKB only supports one type of gpu per VM',
nvidia_driver.GetGpuType, vm)
@mock.patch(nvidia_driver.__name__ + '.QueryNumberOfGpus', return_value=2)
@mock.patch(nvidia_driver.__name__ + '.QueryAutoboostPolicy',
return_value=AUTOBOOST_ENABLED_DICT)
def testSetAutoboostPolicyWhenValuesAreTheSame(self,
query_autoboost_mock,
num_gpus_mock):
vm = mock.MagicMock()
vm.RemoteCommand = mock.MagicMock()
nvidia_driver.SetAutoboostDefaultPolicy(vm, True)
query_autoboost_mock.assetCalled()
vm.RemoteCommand.assert_not_called()
@mock.patch(nvidia_driver.__name__ + '.QueryNumberOfGpus', return_value=2)
@mock.patch(nvidia_driver.__name__ + '.QueryAutoboostPolicy',
return_value=AUTOBOOST_DISABLED_DICT)
def testSetAutoboostPolicyWhenValuesAreDifferent(self,
query_autoboost_mock,
num_gpus_mock):
vm = mock.MagicMock()
vm.RemoteCommand = mock.MagicMock()
nvidia_driver.SetAutoboostDefaultPolicy(vm, True)
query_autoboost_mock.assetCalled()
self.assertEqual(2, vm.RemoteCommand.call_count)
@mock.patch(nvidia_driver.__name__ + '.QueryNumberOfGpus', return_value=2)
@mock.patch(nvidia_driver.__name__ + '.QueryGpuClockSpeed',
return_value=(2505, 875))
def testSetClockSpeedWhenValuesAreTheSame(self,
query_clock_speed_mock,
num_gpus_mock):
vm = mock.MagicMock()
vm.RemoteCommand = mock.MagicMock()
nvidia_driver.SetGpuClockSpeed(vm, 2505, 875)
query_clock_speed_mock.assetCalled()
vm.RemoteCommand.assert_not_called()
@mock.patch(nvidia_driver.__name__ + '.QueryNumberOfGpus', return_value=2)
@mock.patch(nvidia_driver.__name__ + '.QueryGpuClockSpeed',
return_value=(2505, 875))
def testSetClockSpeedWhenValuesAreDifferent(self,
query_clock_speed_mock,
num_gpus_mock):
vm = mock.MagicMock()
vm.RemoteCommand = mock.MagicMock()
nvidia_driver.SetGpuClockSpeed(vm, 2505, 562)
query_clock_speed_mock.assetCalled()
self.assertEqual(2, vm.RemoteCommand.call_count)
if __name__ == '__main__':
unittest.main()
|
import logging
import voluptuous as vol
from homeassistant.components.vacuum import (
ATTR_FAN_SPEED,
DOMAIN as VACUUM_DOMAIN,
SERVICE_CLEAN_SPOT,
SERVICE_LOCATE,
SERVICE_PAUSE,
SERVICE_RETURN_TO_BASE,
SERVICE_SET_FAN_SPEED,
SERVICE_START,
SERVICE_STOP,
STATE_CLEANING,
STATE_DOCKED,
STATE_ERROR,
STATE_IDLE,
STATE_PAUSED,
STATE_RETURNING,
SUPPORT_BATTERY,
SUPPORT_CLEAN_SPOT,
SUPPORT_FAN_SPEED,
SUPPORT_LOCATE,
SUPPORT_PAUSE,
SUPPORT_RETURN_HOME,
SUPPORT_START,
SUPPORT_STATE,
SUPPORT_STOP,
StateVacuumEntity,
)
from homeassistant.const import (
CONF_ENTITY_ID,
CONF_FRIENDLY_NAME,
CONF_UNIQUE_ID,
CONF_VALUE_TEMPLATE,
STATE_UNKNOWN,
)
from homeassistant.core import callback
from homeassistant.exceptions import TemplateError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import async_generate_entity_id
from homeassistant.helpers.reload import async_setup_reload_service
from homeassistant.helpers.script import Script
from .const import CONF_AVAILABILITY_TEMPLATE, DOMAIN, PLATFORMS
from .template_entity import TemplateEntity
_LOGGER = logging.getLogger(__name__)
CONF_VACUUMS = "vacuums"
CONF_BATTERY_LEVEL_TEMPLATE = "battery_level_template"
CONF_FAN_SPEED_LIST = "fan_speeds"
CONF_FAN_SPEED_TEMPLATE = "fan_speed_template"
CONF_ATTRIBUTE_TEMPLATES = "attribute_templates"
ENTITY_ID_FORMAT = VACUUM_DOMAIN + ".{}"
_VALID_STATES = [
STATE_CLEANING,
STATE_DOCKED,
STATE_PAUSED,
STATE_IDLE,
STATE_RETURNING,
STATE_ERROR,
]
VACUUM_SCHEMA = vol.All(
cv.deprecated(CONF_ENTITY_ID),
vol.Schema(
{
vol.Optional(CONF_FRIENDLY_NAME): cv.string,
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_BATTERY_LEVEL_TEMPLATE): cv.template,
vol.Optional(CONF_FAN_SPEED_TEMPLATE): cv.template,
vol.Optional(CONF_AVAILABILITY_TEMPLATE): cv.template,
vol.Optional(CONF_ATTRIBUTE_TEMPLATES, default={}): vol.Schema(
{cv.string: cv.template}
),
vol.Required(SERVICE_START): cv.SCRIPT_SCHEMA,
vol.Optional(SERVICE_PAUSE): cv.SCRIPT_SCHEMA,
vol.Optional(SERVICE_STOP): cv.SCRIPT_SCHEMA,
vol.Optional(SERVICE_RETURN_TO_BASE): cv.SCRIPT_SCHEMA,
vol.Optional(SERVICE_CLEAN_SPOT): cv.SCRIPT_SCHEMA,
vol.Optional(SERVICE_LOCATE): cv.SCRIPT_SCHEMA,
vol.Optional(SERVICE_SET_FAN_SPEED): cv.SCRIPT_SCHEMA,
vol.Optional(CONF_FAN_SPEED_LIST, default=[]): cv.ensure_list,
vol.Optional(CONF_ENTITY_ID): cv.entity_ids,
vol.Optional(CONF_UNIQUE_ID): cv.string,
}
),
)
PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend(
{vol.Required(CONF_VACUUMS): vol.Schema({cv.slug: VACUUM_SCHEMA})}
)
async def _async_create_entities(hass, config):
"""Create the Template Vacuums."""
vacuums = []
for device, device_config in config[CONF_VACUUMS].items():
friendly_name = device_config.get(CONF_FRIENDLY_NAME, device)
state_template = device_config.get(CONF_VALUE_TEMPLATE)
battery_level_template = device_config.get(CONF_BATTERY_LEVEL_TEMPLATE)
fan_speed_template = device_config.get(CONF_FAN_SPEED_TEMPLATE)
availability_template = device_config.get(CONF_AVAILABILITY_TEMPLATE)
attribute_templates = device_config.get(CONF_ATTRIBUTE_TEMPLATES)
start_action = device_config[SERVICE_START]
pause_action = device_config.get(SERVICE_PAUSE)
stop_action = device_config.get(SERVICE_STOP)
return_to_base_action = device_config.get(SERVICE_RETURN_TO_BASE)
clean_spot_action = device_config.get(SERVICE_CLEAN_SPOT)
locate_action = device_config.get(SERVICE_LOCATE)
set_fan_speed_action = device_config.get(SERVICE_SET_FAN_SPEED)
fan_speed_list = device_config[CONF_FAN_SPEED_LIST]
unique_id = device_config.get(CONF_UNIQUE_ID)
vacuums.append(
TemplateVacuum(
hass,
device,
friendly_name,
state_template,
battery_level_template,
fan_speed_template,
availability_template,
start_action,
pause_action,
stop_action,
return_to_base_action,
clean_spot_action,
locate_action,
set_fan_speed_action,
fan_speed_list,
attribute_templates,
unique_id,
)
)
return vacuums
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the template vacuums."""
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
async_add_entities(await _async_create_entities(hass, config))
class TemplateVacuum(TemplateEntity, StateVacuumEntity):
"""A template vacuum component."""
def __init__(
self,
hass,
device_id,
friendly_name,
state_template,
battery_level_template,
fan_speed_template,
availability_template,
start_action,
pause_action,
stop_action,
return_to_base_action,
clean_spot_action,
locate_action,
set_fan_speed_action,
fan_speed_list,
attribute_templates,
unique_id,
):
"""Initialize the vacuum."""
super().__init__(
attribute_templates=attribute_templates,
availability_template=availability_template,
)
self.entity_id = async_generate_entity_id(
ENTITY_ID_FORMAT, device_id, hass=hass
)
self._name = friendly_name
self._template = state_template
self._battery_level_template = battery_level_template
self._fan_speed_template = fan_speed_template
self._supported_features = SUPPORT_START
domain = __name__.split(".")[-2]
self._start_script = Script(hass, start_action, friendly_name, domain)
self._pause_script = None
if pause_action:
self._pause_script = Script(hass, pause_action, friendly_name, domain)
self._supported_features |= SUPPORT_PAUSE
self._stop_script = None
if stop_action:
self._stop_script = Script(hass, stop_action, friendly_name, domain)
self._supported_features |= SUPPORT_STOP
self._return_to_base_script = None
if return_to_base_action:
self._return_to_base_script = Script(
hass, return_to_base_action, friendly_name, domain
)
self._supported_features |= SUPPORT_RETURN_HOME
self._clean_spot_script = None
if clean_spot_action:
self._clean_spot_script = Script(
hass, clean_spot_action, friendly_name, domain
)
self._supported_features |= SUPPORT_CLEAN_SPOT
self._locate_script = None
if locate_action:
self._locate_script = Script(hass, locate_action, friendly_name, domain)
self._supported_features |= SUPPORT_LOCATE
self._set_fan_speed_script = None
if set_fan_speed_action:
self._set_fan_speed_script = Script(
hass, set_fan_speed_action, friendly_name, domain
)
self._supported_features |= SUPPORT_FAN_SPEED
self._state = None
self._battery_level = None
self._fan_speed = None
if self._template:
self._supported_features |= SUPPORT_STATE
if self._battery_level_template:
self._supported_features |= SUPPORT_BATTERY
self._unique_id = unique_id
# List of valid fan speeds
self._fan_speed_list = fan_speed_list
@property
def name(self):
"""Return the display name of this vacuum."""
return self._name
@property
def unique_id(self):
"""Return the unique id of this vacuum."""
return self._unique_id
@property
def supported_features(self) -> int:
"""Flag supported features."""
return self._supported_features
@property
def state(self):
"""Return the status of the vacuum cleaner."""
return self._state
@property
def battery_level(self):
"""Return the battery level of the vacuum cleaner."""
return self._battery_level
@property
def fan_speed(self):
"""Return the fan speed of the vacuum cleaner."""
return self._fan_speed
@property
def fan_speed_list(self) -> list:
"""Get the list of available fan speeds."""
return self._fan_speed_list
async def async_start(self):
"""Start or resume the cleaning task."""
await self._start_script.async_run(context=self._context)
async def async_pause(self):
"""Pause the cleaning task."""
if self._pause_script is None:
return
await self._pause_script.async_run(context=self._context)
async def async_stop(self, **kwargs):
"""Stop the cleaning task."""
if self._stop_script is None:
return
await self._stop_script.async_run(context=self._context)
async def async_return_to_base(self, **kwargs):
"""Set the vacuum cleaner to return to the dock."""
if self._return_to_base_script is None:
return
await self._return_to_base_script.async_run(context=self._context)
async def async_clean_spot(self, **kwargs):
"""Perform a spot clean-up."""
if self._clean_spot_script is None:
return
await self._clean_spot_script.async_run(context=self._context)
async def async_locate(self, **kwargs):
"""Locate the vacuum cleaner."""
if self._locate_script is None:
return
await self._locate_script.async_run(context=self._context)
async def async_set_fan_speed(self, fan_speed, **kwargs):
"""Set fan speed."""
if self._set_fan_speed_script is None:
return
if fan_speed in self._fan_speed_list:
self._fan_speed = fan_speed
await self._set_fan_speed_script.async_run(
{ATTR_FAN_SPEED: fan_speed}, context=self._context
)
else:
_LOGGER.error(
"Received invalid fan speed: %s. Expected: %s",
fan_speed,
self._fan_speed_list,
)
async def async_added_to_hass(self):
"""Register callbacks."""
if self._template is not None:
self.add_template_attribute(
"_state", self._template, None, self._update_state
)
if self._fan_speed_template is not None:
self.add_template_attribute(
"_fan_speed",
self._fan_speed_template,
None,
self._update_fan_speed,
)
if self._battery_level_template is not None:
self.add_template_attribute(
"_battery_level",
self._battery_level_template,
None,
self._update_battery_level,
none_on_template_error=True,
)
await super().async_added_to_hass()
@callback
def _update_state(self, result):
super()._update_state(result)
if isinstance(result, TemplateError):
# This is legacy behavior
self._state = STATE_UNKNOWN
if not self._availability_template:
self._available = True
return
# Validate state
if result in _VALID_STATES:
self._state = result
elif result == STATE_UNKNOWN:
self._state = None
else:
_LOGGER.error(
"Received invalid vacuum state: %s. Expected: %s",
result,
", ".join(_VALID_STATES),
)
self._state = None
@callback
def _update_battery_level(self, battery_level):
try:
battery_level_int = int(battery_level)
if not 0 <= battery_level_int <= 100:
raise ValueError
except ValueError:
_LOGGER.error(
"Received invalid battery level: %s. Expected: 0-100", battery_level
)
self._battery_level = None
return
self._battery_level = battery_level_int
@callback
def _update_fan_speed(self, fan_speed):
if isinstance(fan_speed, TemplateError):
# This is legacy behavior
self._fan_speed = None
self._state = None
return
if fan_speed in self._fan_speed_list:
self._fan_speed = fan_speed
elif fan_speed == STATE_UNKNOWN:
self._fan_speed = None
else:
_LOGGER.error(
"Received invalid fan speed: %s. Expected: %s",
fan_speed,
self._fan_speed_list,
)
self._fan_speed = None
|
from functools import wraps
from rest_framework.throttling import AnonRateThrottle as DRFAnonRateThrottle
from rest_framework.throttling import UserRateThrottle as DRFUserRateThrottle
def patch_throttle_request(func):
"""Stores throttling state in request to be picked up by ThrottlingMiddleware."""
@wraps(func)
def patched(self, request, view):
result = func(self, request, view)
if result and hasattr(self, "history"):
request.META["throttling_state"] = self
return result
return patched
class AnonRateThrottle(DRFAnonRateThrottle):
@patch_throttle_request
def allow_request(self, request, view):
return super().allow_request(request, view)
class UserRateThrottle(DRFUserRateThrottle):
@patch_throttle_request
def allow_request(self, request, view):
return super().allow_request(request, view)
|
from __future__ import division
import chainer
import chainer.functions as F
from chainercv.links import Conv2DBNActiv
from chainercv.links import SeparableConv2DBNActiv
import numpy as np
class XceptionBlock(chainer.Chain):
"""A building block for Xceptions.
Not only final outputs, this block also returns unactivated outputs
of second separable convolution.
Args:
in_channels (int): The number of channels of the input array.
depthlist (tuple of ints): Tuple of integers which defines
number of channels of intermediate arrays. The length of
this tuple must be 3.
stride (int or tuple of ints): Stride of filter application.
dilate (int or tuple of ints): Dilation factor of filter applications.
:obj:`dilate=d` and :obj:`dilate=(d, d)` are equivalent.
skip_type (string): the type of skip connection. If :obj:`sum`,
original input is summed to output of network directly.
When :obj:`conv`, convolution layer is applied before summation.
When :obj:`none`, skip connection is not used.
The default value is :obj:`conv`.
activ_first (boolean): If :obj:`True`, activation function is
applied first in this block.
The default value is :obj:`True`
bn_kwargs (dict): Keywod arguments passed to initialize the batch
normalization layers of :class:`chainercv.links.Conv2DBNActiv` and
:class:`chainercv.links.SeparableConv2DBNActiv`.
"""
def __init__(self, in_channels, depthlist, stride=1, dilate=1,
skip_type='conv', activ_first=True, bn_kwargs={},
dw_activ_list=[None, None, None],
pw_activ_list=[F.relu, F.relu, None]):
super(XceptionBlock, self).__init__()
self.skip_type = skip_type
self.activ_first = activ_first
self.separable2_activ = pw_activ_list[1]
with self.init_scope():
self.separable1 = SeparableConv2DBNActiv(
in_channels, depthlist[0], 3, 1,
dilate, dilate, nobias=True, bn_kwargs=bn_kwargs,
dw_activ=dw_activ_list[0], pw_activ=pw_activ_list[0])
self.separable2 = SeparableConv2DBNActiv(
depthlist[0], depthlist[1], 3, 1,
dilate, dilate, nobias=True, bn_kwargs=bn_kwargs,
dw_activ=dw_activ_list[1], pw_activ=F.identity)
self.separable3 = SeparableConv2DBNActiv(
depthlist[1], depthlist[2], 3, stride,
dilate, dilate, nobias=True, bn_kwargs=bn_kwargs,
dw_activ=dw_activ_list[2], pw_activ=pw_activ_list[2])
if skip_type == 'conv':
self.conv = Conv2DBNActiv(
in_channels, depthlist[2], 1, activ=F.identity,
nobias=True, stride=stride, bn_kwargs=bn_kwargs)
def forward(self, x):
if self.activ_first:
h = F.relu(x)
else:
h = x
h = self.separable1(h)
h = self.separable2(h)
separable2 = h
h = self.separable2_activ(h)
h = self.separable3(h)
if self.skip_type == 'conv':
skip = self.conv(x)
h = h + skip
elif self.skip_type == 'sum':
h = h + x
elif self.skip_type == 'none':
pass
if not self.activ_first:
h = F.relu(h)
return h, separable2
class Xception65(chainer.Chain):
"""Xception65 for backbone network of DeepLab v3+.
Unlike original Xception65, this follows implementation in deeplab v3
(https://github.com/tensorflow/models/tree/master/research/deeplab).
This returns lowlevel feature (an output of second convolution in second
block in entryflow) and highlevel feature (an output before final average
pooling in original).
Args:
bn_kwargs (dict): Keywod arguments passed to initialize the batch
normalization layers of :class:`chainercv.links.Conv2DBNActiv` and
:class:`chainercv.links.SeparableConv2DBNActiv`.
"""
mean = np.array([127.5, 127.5, 127.5],
dtype=np.float32)[:, np.newaxis, np.newaxis]
def __init__(self, bn_kwargs={}):
super(Xception65, self).__init__()
with self.init_scope():
self.entryflow_conv1 = Conv2DBNActiv(
3, 32, 3, 2, 1, bn_kwargs=bn_kwargs)
self.entryflow_conv2 = Conv2DBNActiv(
32, 64, 3, 1, 1, bn_kwargs=bn_kwargs)
self.entryflow_block1 = XceptionBlock(
64, [128, 128, 128], stride=2,
skip_type='conv', bn_kwargs=bn_kwargs)
self.entryflow_block2 = XceptionBlock(
128, [256, 256, 256], stride=2,
skip_type='conv', bn_kwargs=bn_kwargs)
self.entryflow_block3 = XceptionBlock(
256, [728, 728, 728], stride=1,
skip_type='conv', bn_kwargs=bn_kwargs)
for i in range(1, 17):
block = XceptionBlock(
728, [728, 728, 728], stride=1, dilate=2,
skip_type='sum', bn_kwargs=bn_kwargs)
self.__setattr__('middleflow_block{}'.format(i), block)
self.exitflow_block1 = XceptionBlock(
728, [728, 1024, 1024], stride=1, dilate=2,
skip_type='conv', bn_kwargs=bn_kwargs)
self.exitflow_block2 = XceptionBlock(
1024, [1536, 1536, 2048], stride=1, dilate=4,
skip_type='none', bn_kwargs=bn_kwargs, activ_first=False,
dw_activ_list=[F.relu, F.relu, F.relu],
pw_activ_list=[F.relu, F.relu, F.relu])
def forward(self, x):
h = self.entryflow_conv1(x)
h = self.entryflow_conv2(h)
h, _ = self.entryflow_block1(h)
h, lowlevel = self.entryflow_block2(h)
h, _ = self.entryflow_block3(h)
for i in range(1, 17):
h, _ = self['middleflow_block{}'.format(i)](h)
h, _ = self.exitflow_block1(h)
highlevel, _ = self.exitflow_block2(h)
return lowlevel, highlevel
|
from pydexcom import AccountError, Dexcom, SessionError
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_PASSWORD, CONF_UNIT_OF_MEASUREMENT, CONF_USERNAME
from homeassistant.core import callback
from .const import ( # pylint:disable=unused-import
CONF_SERVER,
DOMAIN,
MG_DL,
MMOL_L,
SERVER_OUS,
SERVER_US,
)
DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_USERNAME): str,
vol.Required(CONF_PASSWORD): str,
vol.Required(CONF_SERVER): vol.In({SERVER_US, SERVER_OUS}),
}
)
class DexcomConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Dexcom."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
async def async_step_user(self, user_input=None):
"""Handle the initial step."""
errors = {}
if user_input is not None:
try:
await self.hass.async_add_executor_job(
Dexcom,
user_input[CONF_USERNAME],
user_input[CONF_PASSWORD],
user_input[CONF_SERVER] == SERVER_OUS,
)
except SessionError:
errors["base"] = "cannot_connect"
except AccountError:
errors["base"] = "invalid_auth"
except Exception: # pylint: disable=broad-except
errors["base"] = "unknown"
if "base" not in errors:
await self.async_set_unique_id(user_input[CONF_USERNAME])
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=user_input[CONF_USERNAME], data=user_input
)
return self.async_show_form(
step_id="user", data_schema=DATA_SCHEMA, errors=errors
)
@staticmethod
@callback
def async_get_options_flow(config_entry):
"""Get the options flow for this handler."""
return DexcomOptionsFlowHandler(config_entry)
class DexcomOptionsFlowHandler(config_entries.OptionsFlow):
"""Handle a option flow for Dexcom."""
def __init__(self, config_entry: config_entries.ConfigEntry):
"""Initialize options flow."""
self.config_entry = config_entry
async def async_step_init(self, user_input=None):
"""Handle options flow."""
if user_input is not None:
return self.async_create_entry(title="", data=user_input)
data_schema = vol.Schema(
{
vol.Optional(
CONF_UNIT_OF_MEASUREMENT,
default=self.config_entry.options.get(
CONF_UNIT_OF_MEASUREMENT, MG_DL
),
): vol.In({MG_DL, MMOL_L}),
}
)
return self.async_show_form(step_id="init", data_schema=data_schema)
|
from threading import Event, Thread, current_thread
from time import time
from warnings import warn
import atexit
__all__ = ["TMonitor", "TqdmSynchronisationWarning"]
class TqdmSynchronisationWarning(RuntimeWarning):
"""tqdm multi-thread/-process errors which may cause incorrect nesting
but otherwise no adverse effects"""
pass
class TMonitor(Thread):
"""
Monitoring thread for tqdm bars.
Monitors if tqdm bars are taking too much time to display
and readjusts miniters automatically if necessary.
Parameters
----------
tqdm_cls : class
tqdm class to use (can be core tqdm or a submodule).
sleep_interval : fload
Time to sleep between monitoring checks.
"""
# internal vars for unit testing
_time = None
_event = None
def __init__(self, tqdm_cls, sleep_interval):
Thread.__init__(self)
self.daemon = True # kill thread when main killed (KeyboardInterrupt)
self.was_killed = Event()
self.woken = 0 # last time woken up, to sync with monitor
self.tqdm_cls = tqdm_cls
self.sleep_interval = sleep_interval
if TMonitor._time is not None:
self._time = TMonitor._time
else:
self._time = time
if TMonitor._event is not None:
self._event = TMonitor._event
else:
self._event = Event
atexit.register(self.exit)
self.start()
def exit(self):
self.was_killed.set()
if self is not current_thread():
self.join()
return self.report()
def get_instances(self):
# returns a copy of started `tqdm_cls` instances
return [i for i in self.tqdm_cls._instances.copy()
# Avoid race by checking that the instance started
if hasattr(i, 'start_t')]
def run(self):
cur_t = self._time()
while True:
# After processing and before sleeping, notify that we woke
# Need to be done just before sleeping
self.woken = cur_t
# Sleep some time...
self.was_killed.wait(self.sleep_interval)
# Quit if killed
if self.was_killed.is_set():
return
# Then monitor!
# Acquire lock (to access _instances)
with self.tqdm_cls.get_lock():
cur_t = self._time()
# Check tqdm instances are waiting too long to print
instances = self.get_instances()
for instance in instances:
# Check event in loop to reduce blocking time on exit
if self.was_killed.is_set():
return
# Only if mininterval > 1 (else iterations are just slow)
# and last refresh exceeded maxinterval
if instance.miniters > 1 and \
(cur_t - instance.last_print_t) >= \
instance.maxinterval:
# force bypassing miniters on next iteration
# (dynamic_miniters adjusts mininterval automatically)
instance.miniters = 1
# Refresh now! (works only for manual tqdm)
instance.refresh(nolock=True)
if instances != self.get_instances(): # pragma: nocover
warn("Set changed size during iteration" +
" (see https://github.com/tqdm/tqdm/issues/481)",
TqdmSynchronisationWarning, stacklevel=2)
def report(self):
return not self.was_killed.is_set()
|
from copy import deepcopy
import pytest
from pytradfri.device import Device
from pytradfri.device.light import Light
from pytradfri.device.light_control import LightControl
from homeassistant.components import tradfri
from . import MOCK_GATEWAY_ID
from tests.async_mock import MagicMock, Mock, PropertyMock, patch
from tests.common import MockConfigEntry
DEFAULT_TEST_FEATURES = {
"can_set_dimmer": False,
"can_set_color": False,
"can_set_temp": False,
}
# [
# {bulb features},
# {turn_on arguments},
# {expected result}
# ]
TURN_ON_TEST_CASES = [
# Turn On
[{}, {}, {"state": "on"}],
# Brightness > 0
[{"can_set_dimmer": True}, {"brightness": 100}, {"state": "on", "brightness": 100}],
# Brightness == 1
[{"can_set_dimmer": True}, {"brightness": 1}, {"brightness": 1}],
# Brightness > 254
[{"can_set_dimmer": True}, {"brightness": 1000}, {"brightness": 254}],
# color_temp
[{"can_set_temp": True}, {"color_temp": 250}, {"color_temp": 250}],
# color_temp < 250
[{"can_set_temp": True}, {"color_temp": 1}, {"color_temp": 250}],
# color_temp > 454
[{"can_set_temp": True}, {"color_temp": 1000}, {"color_temp": 454}],
# hs color
[
{"can_set_color": True},
{"hs_color": [300, 100]},
{"state": "on", "hs_color": [300, 100]},
],
# ct + brightness
[
{"can_set_dimmer": True, "can_set_temp": True},
{"color_temp": 250, "brightness": 200},
{"state": "on", "color_temp": 250, "brightness": 200},
],
# ct + brightness (no temp support)
[
{"can_set_dimmer": True, "can_set_temp": False, "can_set_color": True},
{"color_temp": 250, "brightness": 200},
{"state": "on", "hs_color": [26.807, 34.869], "brightness": 200},
],
# ct + brightness (no temp or color support)
[
{"can_set_dimmer": True, "can_set_temp": False, "can_set_color": False},
{"color_temp": 250, "brightness": 200},
{"state": "on", "brightness": 200},
],
# hs + brightness
[
{"can_set_dimmer": True, "can_set_color": True},
{"hs_color": [300, 100], "brightness": 200},
{"state": "on", "hs_color": [300, 100], "brightness": 200},
],
]
# Result of transition is not tested, but data is passed to turn on service.
TRANSITION_CASES_FOR_TESTS = [None, 0, 1]
@pytest.fixture(autouse=True, scope="module")
def setup(request):
"""Set up patches for pytradfri methods."""
p_1 = patch(
"pytradfri.device.LightControl.raw",
new_callable=PropertyMock,
return_value=[{"mock": "mock"}],
)
p_2 = patch("pytradfri.device.LightControl.lights")
p_1.start()
p_2.start()
def teardown():
"""Remove patches for pytradfri methods."""
p_1.stop()
p_2.stop()
request.addfinalizer(teardown)
async def generate_psk(self, code):
"""Mock psk."""
return "mock"
async def setup_integration(hass):
"""Load the Tradfri platform with a mock gateway."""
entry = MockConfigEntry(
domain=tradfri.DOMAIN,
data={
"host": "mock-host",
"identity": "mock-identity",
"key": "mock-key",
"import_groups": True,
"gateway_id": MOCK_GATEWAY_ID,
},
)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
def mock_light(test_features=None, test_state=None, light_number=0):
"""Mock a tradfri light."""
if test_features is None:
test_features = {}
if test_state is None:
test_state = {}
mock_light_data = Mock(**test_state)
dev_info_mock = MagicMock()
dev_info_mock.manufacturer = "manufacturer"
dev_info_mock.model_number = "model"
dev_info_mock.firmware_version = "1.2.3"
_mock_light = Mock(
id=f"mock-light-id-{light_number}",
reachable=True,
observe=Mock(),
device_info=dev_info_mock,
has_light_control=True,
has_socket_control=False,
has_blind_control=False,
has_signal_repeater_control=False,
)
_mock_light.name = f"tradfri_light_{light_number}"
# Set supported features for the light.
features = {**DEFAULT_TEST_FEATURES, **test_features}
light_control = LightControl(_mock_light)
for attr, value in features.items():
setattr(light_control, attr, value)
# Store the initial state.
setattr(light_control, "lights", [mock_light_data])
_mock_light.light_control = light_control
return _mock_light
async def test_light(hass, mock_gateway, api_factory):
"""Test that lights are correctly added."""
features = {"can_set_dimmer": True, "can_set_color": True, "can_set_temp": True}
state = {
"state": True,
"dimmer": 100,
"color_temp": 250,
"hsb_xy_color": (100, 100, 100, 100, 100),
}
mock_gateway.mock_devices.append(
mock_light(test_features=features, test_state=state)
)
await setup_integration(hass)
lamp_1 = hass.states.get("light.tradfri_light_0")
assert lamp_1 is not None
assert lamp_1.state == "on"
assert lamp_1.attributes["brightness"] == 100
assert lamp_1.attributes["hs_color"] == (0.549, 0.153)
async def test_light_observed(hass, mock_gateway, api_factory):
"""Test that lights are correctly observed."""
light = mock_light()
mock_gateway.mock_devices.append(light)
await setup_integration(hass)
assert len(light.observe.mock_calls) > 0
async def test_light_available(hass, mock_gateway, api_factory):
"""Test light available property."""
light = mock_light({"state": True}, light_number=1)
light.reachable = True
light2 = mock_light({"state": True}, light_number=2)
light2.reachable = False
mock_gateway.mock_devices.append(light)
mock_gateway.mock_devices.append(light2)
await setup_integration(hass)
assert hass.states.get("light.tradfri_light_1").state == "on"
assert hass.states.get("light.tradfri_light_2").state == "unavailable"
def create_all_turn_on_cases():
"""Create all turn on test cases."""
# Combine TURN_ON_TEST_CASES and TRANSITION_CASES_FOR_TESTS
all_turn_on_test_cases = [
["test_features", "test_data", "expected_result", "device_id"],
[],
]
index = 1
for test_case in TURN_ON_TEST_CASES:
for trans in TRANSITION_CASES_FOR_TESTS:
case = deepcopy(test_case)
if trans is not None:
case[1]["transition"] = trans
case.append(index)
index += 1
all_turn_on_test_cases[1].append(case)
return all_turn_on_test_cases
@pytest.mark.parametrize(*create_all_turn_on_cases())
async def test_turn_on(
hass,
mock_gateway,
api_factory,
test_features,
test_data,
expected_result,
device_id,
):
"""Test turning on a light."""
# Note pytradfri style, not hass. Values not really important.
initial_state = {
"state": False,
"dimmer": 0,
"color_temp": 250,
"hsb_xy_color": (100, 100, 100, 100, 100),
}
# Setup the gateway with a mock light.
light = mock_light(
test_features=test_features, test_state=initial_state, light_number=device_id
)
mock_gateway.mock_devices.append(light)
await setup_integration(hass)
# Use the turn_on service call to change the light state.
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": f"light.tradfri_light_{device_id}", **test_data},
blocking=True,
)
await hass.async_block_till_done()
# Check that the light is observed.
mock_func = light.observe
assert len(mock_func.mock_calls) > 0
_, callkwargs = mock_func.call_args
assert "callback" in callkwargs
# Callback function to refresh light state.
callback = callkwargs["callback"]
responses = mock_gateway.mock_responses
# State on command data.
data = {"3311": [{"5850": 1}]}
# Add data for all sent commands.
for resp in responses:
data["3311"][0] = {**data["3311"][0], **resp["3311"][0]}
# Use the callback function to update the light state.
dev = Device(data)
light_data = Light(dev, 0)
light.light_control.lights[0] = light_data
callback(light)
await hass.async_block_till_done()
# Check that the state is correct.
states = hass.states.get(f"light.tradfri_light_{device_id}")
for result, value in expected_result.items():
if result == "state":
assert states.state == value
else:
# Allow some rounding error in color conversions.
assert states.attributes[result] == pytest.approx(value, abs=0.01)
async def test_turn_off(hass, mock_gateway, api_factory):
"""Test turning off a light."""
state = {"state": True, "dimmer": 100}
light = mock_light(test_state=state)
mock_gateway.mock_devices.append(light)
await setup_integration(hass)
# Use the turn_off service call to change the light state.
await hass.services.async_call(
"light", "turn_off", {"entity_id": "light.tradfri_light_0"}, blocking=True
)
await hass.async_block_till_done()
# Check that the light is observed.
mock_func = light.observe
assert len(mock_func.mock_calls) > 0
_, callkwargs = mock_func.call_args
assert "callback" in callkwargs
# Callback function to refresh light state.
callback = callkwargs["callback"]
responses = mock_gateway.mock_responses
data = {"3311": [{}]}
# Add data for all sent commands.
for resp in responses:
data["3311"][0] = {**data["3311"][0], **resp["3311"][0]}
# Use the callback function to update the light state.
dev = Device(data)
light_data = Light(dev, 0)
light.light_control.lights[0] = light_data
callback(light)
await hass.async_block_till_done()
# Check that the state is correct.
states = hass.states.get("light.tradfri_light_0")
assert states.state == "off"
def mock_group(test_state=None, group_number=0):
"""Mock a Tradfri group."""
if test_state is None:
test_state = {}
default_state = {"state": False, "dimmer": 0}
state = {**default_state, **test_state}
_mock_group = Mock(member_ids=[], observe=Mock(), **state)
_mock_group.name = f"tradfri_group_{group_number}"
return _mock_group
async def test_group(hass, mock_gateway, api_factory):
"""Test that groups are correctly added."""
mock_gateway.mock_groups.append(mock_group())
state = {"state": True, "dimmer": 100}
mock_gateway.mock_groups.append(mock_group(state, 1))
await setup_integration(hass)
group = hass.states.get("light.tradfri_group_0")
assert group is not None
assert group.state == "off"
group = hass.states.get("light.tradfri_group_1")
assert group is not None
assert group.state == "on"
assert group.attributes["brightness"] == 100
async def test_group_turn_on(hass, mock_gateway, api_factory):
"""Test turning on a group."""
group = mock_group()
group2 = mock_group(group_number=1)
group3 = mock_group(group_number=2)
mock_gateway.mock_groups.append(group)
mock_gateway.mock_groups.append(group2)
mock_gateway.mock_groups.append(group3)
await setup_integration(hass)
# Use the turn_off service call to change the light state.
await hass.services.async_call(
"light", "turn_on", {"entity_id": "light.tradfri_group_0"}, blocking=True
)
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": "light.tradfri_group_1", "brightness": 100},
blocking=True,
)
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": "light.tradfri_group_2", "brightness": 100, "transition": 1},
blocking=True,
)
await hass.async_block_till_done()
group.set_state.assert_called_with(1)
group2.set_dimmer.assert_called_with(100)
group3.set_dimmer.assert_called_with(100, transition_time=10)
async def test_group_turn_off(hass, mock_gateway, api_factory):
"""Test turning off a group."""
group = mock_group({"state": True})
mock_gateway.mock_groups.append(group)
await setup_integration(hass)
# Use the turn_off service call to change the light state.
await hass.services.async_call(
"light", "turn_off", {"entity_id": "light.tradfri_group_0"}, blocking=True
)
await hass.async_block_till_done()
group.set_state.assert_called_with(0)
|
import copy
import datetime
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker import dpb_service
from perfkitbenchmarker import errors
from perfkitbenchmarker import sample
from perfkitbenchmarker.dpb_service import BaseDpbService
BENCHMARK_NAME = 'dpb_distcp_benchmark'
BENCHMARK_CONFIG = """
dpb_distcp_benchmark:
description: Run distcp on dataproc and emr
dpb_service:
service_type: dataproc
worker_group:
vm_spec:
GCP:
machine_type: n1-standard-4
AWS:
machine_type: m4.xlarge
disk_spec:
GCP:
disk_size: 1500
disk_type: pd-standard
AWS:
disk_size: 1500
disk_type: gp2
worker_count: 8
"""
flags.DEFINE_enum('distcp_source_fs', BaseDpbService.GCS_FS,
[BaseDpbService.GCS_FS, BaseDpbService.S3_FS,
BaseDpbService.HDFS_FS],
'File System to use as the source of the distcp operation')
flags.DEFINE_enum('distcp_dest_fs', BaseDpbService.GCS_FS,
[BaseDpbService.GCS_FS, BaseDpbService.S3_FS,
BaseDpbService.HDFS_FS],
'File System to use as destination of the distcp operation')
flags.DEFINE_integer('distcp_file_size_mbs', 10,
'File size to use for each of the distcp source files')
flags.DEFINE_integer('distcp_num_files', 10, 'Number of distcp source files')
FLAGS = flags.FLAGS
SUPPORTED_DPB_BACKENDS = [dpb_service.DATAPROC, dpb_service.EMR]
def GetConfig(user_config):
return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
def CheckPrerequisites(benchmark_config):
"""Verifies that the required resources are present.
Raises:
perfkitbenchmarker.errors.Config.InvalidValue: On encountering invalid
configuration.
"""
dpb_service_type = benchmark_config.dpb_service.service_type
if dpb_service_type not in SUPPORTED_DPB_BACKENDS:
raise errors.Config.InvalidValue('Invalid backend for distcp. Not in:{}'.
format(str(SUPPORTED_DPB_BACKENDS)))
def Prepare(benchmark_spec):
del benchmark_spec # Unused.
def Run(benchmark_spec):
"""Runs distributed_copy benchmark and reports the results.
Args:
benchmark_spec: Spec needed to run the distributed synth benchmark
Returns:
A list of samples
"""
run_uri = benchmark_spec.uuid.split('-')[0]
source = '{}'.format(run_uri)
update_source_default_fs = False
if FLAGS.distcp_source_fs != BaseDpbService.HDFS_FS:
benchmark_spec.dpb_service.CreateBucket(source)
source = '{}://{}'.format(FLAGS.distcp_source_fs, source)
update_source_default_fs = True
source_dir = '{}{}'.format(source, '/dfsio')
source_data_dir = '{}{}'.format(source_dir, '/io_data')
# TODO(saksena): Respond to data generation failure
benchmark_spec.dpb_service.generate_data(source_dir, update_source_default_fs,
FLAGS.distcp_num_files,
FLAGS.distcp_file_size_mbs)
destination_dir = ('{}://{}{}'.format(FLAGS.distcp_source_fs, run_uri,
'/dfsio_destination')
if (FLAGS.distcp_dest_fs != BaseDpbService.HDFS_FS) else
'/{}{}'.format(run_uri, '/dfsio_destination'))
start = datetime.datetime.now()
benchmark_spec.dpb_service.distributed_copy(source_data_dir, destination_dir)
end_time = datetime.datetime.now()
run_time = (end_time - start).total_seconds()
results = []
metadata = copy.copy(benchmark_spec.dpb_service.GetMetadata())
metadata.update({'source_fs': FLAGS.distcp_source_fs})
metadata.update({'destination_fs': FLAGS.distcp_dest_fs})
metadata.update({'distcp_num_files': FLAGS.distcp_num_files})
metadata.update({'distcp_file_size_mbs': FLAGS.distcp_file_size_mbs})
if FLAGS.zones:
zone = FLAGS.zones[0]
region = zone.rsplit('-', 1)[0]
metadata.update({'regional': True})
metadata.update({'region': region})
elif FLAGS.cloud == 'AWS':
metadata.update({'regional': True})
metadata.update({'region': 'aws_default'})
results.append(sample.Sample('run_time', run_time, 'seconds', metadata))
benchmark_spec.dpb_service.cleanup_data(source, update_source_default_fs)
return results
def Cleanup(benchmark_spec):
"""Cleans up the distcp benchmark"""
del benchmark_spec # Unused.
|
import vilfo
from homeassistant import config_entries, data_entry_flow, setup
from homeassistant.components.vilfo.const import DOMAIN
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_HOST, CONF_ID, CONF_MAC
from tests.async_mock import Mock, patch
async def test_form(hass):
"""Test we get the form."""
await setup.async_setup_component(hass, "persistent_notification", {})
mock_mac = "FF-00-00-00-00-00"
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {}
with patch("vilfo.Client.ping", return_value=None), patch(
"vilfo.Client.get_board_information", return_value=None
), patch("vilfo.Client.resolve_mac_address", return_value=mock_mac), patch(
"homeassistant.components.vilfo.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.vilfo.async_setup_entry"
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_HOST: "testadmin.vilfo.com", CONF_ACCESS_TOKEN: "test-token"},
)
await hass.async_block_till_done()
assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result2["title"] == "testadmin.vilfo.com"
assert result2["data"] == {
"host": "testadmin.vilfo.com",
"access_token": "test-token",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_invalid_auth(hass):
"""Test we handle invalid auth."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch("vilfo.Client.ping", return_value=None), patch(
"vilfo.Client.resolve_mac_address", return_value=None
), patch(
"vilfo.Client.get_board_information",
side_effect=vilfo.exceptions.AuthenticationException,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": "testadmin.vilfo.com", "access_token": "test-token"},
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result2["errors"] == {"base": "invalid_auth"}
async def test_form_cannot_connect(hass):
"""Test we handle cannot connect error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch("vilfo.Client.ping", side_effect=vilfo.exceptions.VilfoException), patch(
"vilfo.Client.resolve_mac_address"
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": "testadmin.vilfo.com", "access_token": "test-token"},
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result2["errors"] == {"base": "cannot_connect"}
with patch("vilfo.Client.ping", side_effect=vilfo.exceptions.VilfoException), patch(
"vilfo.Client.resolve_mac_address"
):
result3 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": "testadmin.vilfo.com", "access_token": "test-token"},
)
assert result3["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result3["errors"] == {"base": "cannot_connect"}
async def test_form_wrong_host(hass):
"""Test we handle wrong host errors."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_USER},
data={"host": "this is an invalid hostname", "access_token": "test-token"},
)
assert result["errors"] == {"host": "wrong_host"}
async def test_form_already_configured(hass):
"""Test that we handle already configured exceptions appropriately."""
first_flow_result1 = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch("vilfo.Client.ping", return_value=None), patch(
"vilfo.Client.get_board_information",
return_value=None,
), patch("vilfo.Client.resolve_mac_address", return_value=None):
first_flow_result2 = await hass.config_entries.flow.async_configure(
first_flow_result1["flow_id"],
{CONF_HOST: "testadmin.vilfo.com", CONF_ACCESS_TOKEN: "test-token"},
)
second_flow_result1 = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch("vilfo.Client.ping", return_value=None), patch(
"vilfo.Client.get_board_information",
return_value=None,
), patch("vilfo.Client.resolve_mac_address", return_value=None):
second_flow_result2 = await hass.config_entries.flow.async_configure(
second_flow_result1["flow_id"],
{CONF_HOST: "testadmin.vilfo.com", CONF_ACCESS_TOKEN: "test-token"},
)
assert first_flow_result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert second_flow_result2["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert second_flow_result2["reason"] == "already_configured"
async def test_form_unexpected_exception(hass):
"""Test that we handle unexpected exceptions."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.vilfo.config_flow.VilfoClient",
) as mock_client:
mock_client.return_value.ping = Mock(side_effect=Exception)
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": "testadmin.vilfo.com", "access_token": "test-token"},
)
assert result2["errors"] == {"base": "unknown"}
async def test_validate_input_returns_data(hass):
"""Test we handle the MAC address being resolved or not."""
mock_data = {"host": "testadmin.vilfo.com", "access_token": "test-token"}
mock_data_with_ip = {"host": "192.168.0.1", "access_token": "test-token"}
mock_mac = "FF-00-00-00-00-00"
with patch("vilfo.Client.ping", return_value=None), patch(
"vilfo.Client.get_board_information", return_value=None
), patch("vilfo.Client.resolve_mac_address", return_value=None):
result = await hass.components.vilfo.config_flow.validate_input(
hass, data=mock_data
)
assert result["title"] == mock_data["host"]
assert result[CONF_HOST] == mock_data["host"]
assert result[CONF_MAC] is None
assert result[CONF_ID] == mock_data["host"]
with patch("vilfo.Client.ping", return_value=None), patch(
"vilfo.Client.get_board_information", return_value=None
), patch("vilfo.Client.resolve_mac_address", return_value=mock_mac):
result2 = await hass.components.vilfo.config_flow.validate_input(
hass, data=mock_data
)
result3 = await hass.components.vilfo.config_flow.validate_input(
hass, data=mock_data_with_ip
)
assert result2["title"] == mock_data["host"]
assert result2[CONF_HOST] == mock_data["host"]
assert result2[CONF_MAC] == mock_mac
assert result2[CONF_ID] == mock_mac
assert result3["title"] == mock_data_with_ip["host"]
assert result3[CONF_HOST] == mock_data_with_ip["host"]
assert result3[CONF_MAC] == mock_mac
assert result3[CONF_ID] == mock_mac
|
from ordered_set import OrderedSet
from ..utils import dumps_list
from abc import abstractmethod, ABCMeta
from reprlib import recursive_repr
from inspect import getfullargspec
class _CreatePackages(ABCMeta):
def __init__(cls, name, bases, d): # noqa
packages = OrderedSet()
for b in bases:
if hasattr(b, 'packages'):
packages |= b.packages
if 'packages' in d:
packages |= d['packages']
cls.packages = packages
super().__init__(name, bases, d)
class LatexObject(metaclass=_CreatePackages):
"""The class that every other LaTeX class is a subclass of.
This class implements the main methods that every LaTeX object needs. For
conversion to LaTeX formatted strings it implements the dumps, dump and
generate_tex methods. It also provides the methods that can be used to
represent the packages required by the LatexObject.
"""
_latex_name = None
_star_latex_name = False # latex_name + ('*' if True else '')
#: Set this to an iterable to override the list of default repr
#: attributes.
_repr_attributes_override = None
#: Set this to a dict to change some of the default repr attributes to
#: other attributes. The key is the old one, the value the new one.
_repr_attributes_mapping = None
#: Set on a class to make instances default to a certain kind of escaping
_default_escape = True
#: Only set this directly by changing the cls.escape
_escape = None
@property
def escape(self):
"""Determine whether or not to escape content of this class.
This defaults to `True` for most classes.
"""
if self._escape is not None:
return self._escape
if self._default_escape is not None:
return self._default_escape
return True
@escape.setter
def escape(self, value):
"""Escape flag setter - to be used at object level."""
self._escape = value
#: Start a new paragraph before this environment.
begin_paragraph = False
#: Start a new paragraph after this environment.
end_paragraph = False
#: Same as enabling `begin_paragraph` and `end_paragraph`, so
#: effectively placing this element in its own paragraph.
separate_paragraph = False
def __init__(self):
# TODO: only create a copy of packages when it will
# Create a copy of the packages attribute, so changing it in an
# instance will not change the class default.
self.packages = self.packages.copy()
@recursive_repr()
def __repr__(self):
"""Create a printable representation of the object."""
return self.__class__.__name__ + '(' + \
', '.join(map(repr, self._repr_values)) + ')'
@property
def _repr_values(self):
"""Return values that are to be shown in repr string."""
def getattr_better(obj, field):
try:
return getattr(obj, field)
except AttributeError as e:
try:
return getattr(obj, '_' + field)
except AttributeError:
raise e
return (getattr_better(self, attr) for attr in self._repr_attributes)
@property
def _repr_attributes(self):
"""Return attributes that should be part of the repr string."""
if self._repr_attributes_override is None:
# Default to init arguments
attrs = getfullargspec(self.__init__).args[1:]
mapping = self._repr_attributes_mapping
if mapping:
attrs = [mapping[a] if a in mapping else a for a in attrs]
return attrs
return self._repr_attributes_override
@property
def latex_name(self):
"""Return the name of the class used in LaTeX.
It can be `None` when the class doesn't have a name.
"""
star = ('*' if self._star_latex_name else '')
if self._latex_name is not None:
return self._latex_name + star
return self.__class__.__name__.lower() + star
@latex_name.setter
def latex_name(self, value):
self._latex_name = value
@abstractmethod
def dumps(self):
"""Represent the class as a string in LaTeX syntax.
This method should be implemented by any class that subclasses this
class.
"""
def dump(self, file_w):
"""Write the LaTeX representation of the class to a file.
Args
----
file_w: io.TextIOBase
The file object in which to save the data
"""
file_w.write(self.dumps())
def generate_tex(self, filepath):
"""Generate a .tex file.
Args
----
filepath: str
The name of the file (without .tex)
"""
with open(filepath + '.tex', 'w', encoding='utf-8') as newf:
self.dump(newf)
def dumps_packages(self):
"""Represent the packages needed as a string in LaTeX syntax.
Returns
-------
list
"""
return dumps_list(self.packages)
def dump_packages(self, file_w):
"""Write the LaTeX representation of the packages to a file.
Args
----
file_w: io.TextIOBase
The file object in which to save the data
"""
file_w.write(self.dumps_packages())
def dumps_as_content(self):
"""Create a string representation of the object as content.
This is currently only used to add new lines before and after the
output of the dumps function. These can be added or removed by changing
the `begin_paragraph`, `end_paragraph` and `separate_paragraph`
attributes of the class.
"""
string = self.dumps()
if self.separate_paragraph or self.begin_paragraph:
string = '\n\n' + string.lstrip('\n')
if self.separate_paragraph or self.end_paragraph:
string = string.rstrip('\n') + '\n\n'
return string
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from netapp_inode import netapp_inode
###############################################################################
class Testnetapp_inode(CollectorTestCase):
def setUp(self):
config = get_collector_config('netapp_inode', {
})
self.collector = netapp_inode(config, None)
def test_import(self):
self.assertTrue(netapp_inode)
###############################################################################
if __name__ == "__main__":
unittest.main()
|
import logging
import pytest
from homeassistant.config import YAML_CONFIG_FILE
import homeassistant.scripts.check_config as check_config
from tests.async_mock import patch
from tests.common import get_test_config_dir, patch_yaml_files
_LOGGER = logging.getLogger(__name__)
BASE_CONFIG = (
"homeassistant:\n"
" name: Home\n"
" latitude: -26.107361\n"
" longitude: 28.054500\n"
" elevation: 1600\n"
" unit_system: metric\n"
" time_zone: GMT\n"
"\n\n"
)
BAD_CORE_CONFIG = "homeassistant:\n unit_system: bad\n\n\n"
@pytest.fixture(autouse=True)
async def apply_stop_hass(stop_hass):
"""Make sure all hass are stopped."""
def normalize_yaml_files(check_dict):
"""Remove configuration path from ['yaml_files']."""
root = get_test_config_dir()
return [key.replace(root, "...") for key in sorted(check_dict["yaml_files"].keys())]
@patch("os.path.isfile", return_value=True)
def test_bad_core_config(isfile_patch, loop):
"""Test a bad core config setup."""
files = {YAML_CONFIG_FILE: BAD_CORE_CONFIG}
with patch_yaml_files(files):
res = check_config.check(get_test_config_dir())
assert res["except"].keys() == {"homeassistant"}
assert res["except"]["homeassistant"][1] == {"unit_system": "bad"}
@patch("os.path.isfile", return_value=True)
def test_config_platform_valid(isfile_patch, loop):
"""Test a valid platform setup."""
files = {YAML_CONFIG_FILE: BASE_CONFIG + "light:\n platform: demo"}
with patch_yaml_files(files):
res = check_config.check(get_test_config_dir())
assert res["components"].keys() == {"homeassistant", "light"}
assert res["components"]["light"] == [{"platform": "demo"}]
assert res["except"] == {}
assert res["secret_cache"] == {}
assert res["secrets"] == {}
assert len(res["yaml_files"]) == 1
@patch("os.path.isfile", return_value=True)
def test_component_platform_not_found(isfile_patch, loop):
"""Test errors if component or platform not found."""
# Make sure they don't exist
files = {YAML_CONFIG_FILE: BASE_CONFIG + "beer:"}
with patch_yaml_files(files):
res = check_config.check(get_test_config_dir())
assert res["components"].keys() == {"homeassistant"}
assert res["except"] == {
check_config.ERROR_STR: [
"Component error: beer - Integration 'beer' not found."
]
}
assert res["secret_cache"] == {}
assert res["secrets"] == {}
assert len(res["yaml_files"]) == 1
files = {YAML_CONFIG_FILE: BASE_CONFIG + "light:\n platform: beer"}
with patch_yaml_files(files):
res = check_config.check(get_test_config_dir())
assert res["components"].keys() == {"homeassistant", "light"}
assert res["components"]["light"] == []
assert res["except"] == {
check_config.ERROR_STR: [
"Platform error light.beer - Integration 'beer' not found."
]
}
assert res["secret_cache"] == {}
assert res["secrets"] == {}
assert len(res["yaml_files"]) == 1
@patch("os.path.isfile", return_value=True)
def test_secrets(isfile_patch, loop):
"""Test secrets config checking method."""
secrets_path = get_test_config_dir("secrets.yaml")
files = {
get_test_config_dir(YAML_CONFIG_FILE): BASE_CONFIG
+ ("http:\n cors_allowed_origins: !secret http_pw"),
secrets_path: ("logger: debug\nhttp_pw: http://google.com"),
}
with patch_yaml_files(files):
res = check_config.check(get_test_config_dir(), True)
assert res["except"] == {}
assert res["components"].keys() == {"homeassistant", "http"}
assert res["components"]["http"] == {
"cors_allowed_origins": ["http://google.com"],
"ip_ban_enabled": True,
"login_attempts_threshold": -1,
"server_port": 8123,
"ssl_profile": "modern",
}
assert res["secret_cache"] == {secrets_path: {"http_pw": "http://google.com"}}
assert res["secrets"] == {"http_pw": "http://google.com"}
assert normalize_yaml_files(res) == [
".../configuration.yaml",
".../secrets.yaml",
]
@patch("os.path.isfile", return_value=True)
def test_package_invalid(isfile_patch, loop):
"""Test an invalid package."""
files = {
YAML_CONFIG_FILE: BASE_CONFIG + (" packages:\n p1:\n" ' group: ["a"]')
}
with patch_yaml_files(files):
res = check_config.check(get_test_config_dir())
assert res["except"].keys() == {"homeassistant.packages.p1.group"}
assert res["except"]["homeassistant.packages.p1.group"][1] == {"group": ["a"]}
assert len(res["except"]) == 1
assert res["components"].keys() == {"homeassistant"}
assert len(res["components"]) == 1
assert res["secret_cache"] == {}
assert res["secrets"] == {}
assert len(res["yaml_files"]) == 1
def test_bootstrap_error(loop):
"""Test a valid platform setup."""
files = {YAML_CONFIG_FILE: BASE_CONFIG + "automation: !include no.yaml"}
with patch_yaml_files(files):
res = check_config.check(get_test_config_dir(YAML_CONFIG_FILE))
err = res["except"].pop(check_config.ERROR_STR)
assert len(err) == 1
assert res["except"] == {}
assert res["components"] == {} # No components, load failed
assert res["secret_cache"] == {}
assert res["secrets"] == {}
assert res["yaml_files"] == {}
|
import asyncio
from collections import defaultdict
from datetime import timedelta
import logging
import async_timeout
from teslajsonpy import Controller as TeslaAPI, TeslaException
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import (
ATTR_BATTERY_CHARGING,
ATTR_BATTERY_LEVEL,
CONF_ACCESS_TOKEN,
CONF_PASSWORD,
CONF_SCAN_INTERVAL,
CONF_TOKEN,
CONF_USERNAME,
)
from homeassistant.core import callback
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import aiohttp_client, config_validation as cv
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
UpdateFailed,
)
from homeassistant.util import slugify
from .config_flow import (
CannotConnect,
InvalidAuth,
configured_instances,
validate_input,
)
from .const import (
CONF_WAKE_ON_START,
DATA_LISTENER,
DEFAULT_SCAN_INTERVAL,
DEFAULT_WAKE_ON_START,
DOMAIN,
ICONS,
MIN_SCAN_INTERVAL,
TESLA_COMPONENTS,
)
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(
CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL
): vol.All(cv.positive_int, vol.Clamp(min=MIN_SCAN_INTERVAL)),
}
)
},
extra=vol.ALLOW_EXTRA,
)
@callback
def _async_save_tokens(hass, config_entry, access_token, refresh_token):
hass.config_entries.async_update_entry(
config_entry,
data={
**config_entry.data,
CONF_ACCESS_TOKEN: access_token,
CONF_TOKEN: refresh_token,
},
)
async def async_setup(hass, base_config):
"""Set up of Tesla component."""
def _update_entry(email, data=None, options=None):
data = data or {}
options = options or {
CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL,
CONF_WAKE_ON_START: DEFAULT_WAKE_ON_START,
}
for entry in hass.config_entries.async_entries(DOMAIN):
if email != entry.title:
continue
hass.config_entries.async_update_entry(entry, data=data, options=options)
config = base_config.get(DOMAIN)
if not config:
return True
email = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
scan_interval = config[CONF_SCAN_INTERVAL]
if email in configured_instances(hass):
try:
info = await validate_input(hass, config)
except (CannotConnect, InvalidAuth):
return False
_update_entry(
email,
data={
CONF_ACCESS_TOKEN: info[CONF_ACCESS_TOKEN],
CONF_TOKEN: info[CONF_TOKEN],
},
options={CONF_SCAN_INTERVAL: scan_interval},
)
else:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={CONF_USERNAME: email, CONF_PASSWORD: password},
)
)
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN][email] = {CONF_SCAN_INTERVAL: scan_interval}
return True
async def async_setup_entry(hass, config_entry):
"""Set up Tesla as config entry."""
hass.data.setdefault(DOMAIN, {})
config = config_entry.data
websession = aiohttp_client.async_get_clientsession(hass)
email = config_entry.title
if email in hass.data[DOMAIN] and CONF_SCAN_INTERVAL in hass.data[DOMAIN][email]:
scan_interval = hass.data[DOMAIN][email][CONF_SCAN_INTERVAL]
hass.config_entries.async_update_entry(
config_entry, options={CONF_SCAN_INTERVAL: scan_interval}
)
hass.data[DOMAIN].pop(email)
try:
controller = TeslaAPI(
websession,
refresh_token=config[CONF_TOKEN],
access_token=config[CONF_ACCESS_TOKEN],
update_interval=config_entry.options.get(
CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL
),
)
(refresh_token, access_token) = await controller.connect(
wake_if_asleep=config_entry.options.get(
CONF_WAKE_ON_START, DEFAULT_WAKE_ON_START
)
)
except TeslaException as ex:
_LOGGER.error("Unable to communicate with Tesla API: %s", ex.message)
return False
_async_save_tokens(hass, config_entry, access_token, refresh_token)
coordinator = TeslaDataUpdateCoordinator(
hass, config_entry=config_entry, controller=controller
)
# Fetch initial data so we have data when entities subscribe
entry_data = hass.data[DOMAIN][config_entry.entry_id] = {
"coordinator": coordinator,
"devices": defaultdict(list),
DATA_LISTENER: [config_entry.add_update_listener(update_listener)],
}
_LOGGER.debug("Connected to the Tesla API")
await coordinator.async_refresh()
if not coordinator.last_update_success:
raise ConfigEntryNotReady
all_devices = controller.get_homeassistant_components()
if not all_devices:
return False
for device in all_devices:
entry_data["devices"][device.hass_type].append(device)
for component in TESLA_COMPONENTS:
_LOGGER.debug("Loading %s", component)
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, component)
)
return True
async def async_unload_entry(hass, config_entry) -> bool:
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(config_entry, component)
for component in TESLA_COMPONENTS
]
)
)
for listener in hass.data[DOMAIN][config_entry.entry_id][DATA_LISTENER]:
listener()
username = config_entry.title
if unload_ok:
hass.data[DOMAIN].pop(config_entry.entry_id)
_LOGGER.debug("Unloaded entry for %s", username)
return True
return False
async def update_listener(hass, config_entry):
"""Update when config_entry options update."""
controller = hass.data[DOMAIN][config_entry.entry_id]["coordinator"].controller
old_update_interval = controller.update_interval
controller.update_interval = config_entry.options.get(CONF_SCAN_INTERVAL)
if old_update_interval != controller.update_interval:
_LOGGER.debug(
"Changing scan_interval from %s to %s",
old_update_interval,
controller.update_interval,
)
class TeslaDataUpdateCoordinator(DataUpdateCoordinator):
"""Class to manage fetching Tesla data."""
def __init__(self, hass, *, config_entry, controller):
"""Initialize global Tesla data updater."""
self.controller = controller
self.config_entry = config_entry
update_interval = timedelta(seconds=MIN_SCAN_INTERVAL)
super().__init__(
hass,
_LOGGER,
name=DOMAIN,
update_interval=update_interval,
)
async def _async_update_data(self):
"""Fetch data from API endpoint."""
if self.controller.is_token_refreshed():
(refresh_token, access_token) = self.controller.get_tokens()
_async_save_tokens(
self.hass, self.config_entry, access_token, refresh_token
)
_LOGGER.debug("Saving new tokens in config_entry")
try:
# Note: asyncio.TimeoutError and aiohttp.ClientError are already
# handled by the data update coordinator.
async with async_timeout.timeout(30):
return await self.controller.update()
except TeslaException as err:
raise UpdateFailed(f"Error communicating with API: {err}") from err
class TeslaDevice(CoordinatorEntity):
"""Representation of a Tesla device."""
def __init__(self, tesla_device, coordinator):
"""Initialise the Tesla device."""
super().__init__(coordinator)
self.tesla_device = tesla_device
self._name = self.tesla_device.name
self._unique_id = slugify(self.tesla_device.uniq_name)
self._attributes = self.tesla_device.attrs.copy()
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def unique_id(self) -> str:
"""Return a unique ID."""
return self._unique_id
@property
def icon(self):
"""Return the icon of the sensor."""
if self.device_class:
return None
return ICONS.get(self.tesla_device.type)
@property
def device_state_attributes(self):
"""Return the state attributes of the device."""
attr = self._attributes
if self.tesla_device.has_battery():
attr[ATTR_BATTERY_LEVEL] = self.tesla_device.battery_level()
attr[ATTR_BATTERY_CHARGING] = self.tesla_device.battery_charging()
return attr
@property
def device_info(self):
"""Return the device_info of the device."""
return {
"identifiers": {(DOMAIN, self.tesla_device.id())},
"name": self.tesla_device.car_name(),
"manufacturer": "Tesla",
"model": self.tesla_device.car_type,
"sw_version": self.tesla_device.car_version,
}
async def async_added_to_hass(self):
"""Register state update callback."""
self.async_on_remove(self.coordinator.async_add_listener(self.refresh))
@callback
def refresh(self) -> None:
"""Refresh the state of the device.
This assumes the coordinator has updated the controller.
"""
self.tesla_device.refresh()
self._attributes = self.tesla_device.attrs.copy()
self.async_write_ha_state()
|
from __future__ import print_function
import re
import sys
import unittest
try:
from cStringIO import StringIO
except ImportError:
from io import StringIO
from rosunit.xmlrunner import XMLTestRunner
class XMLTestRunnerTest(unittest.TestCase):
def setUp(self):
self._stream = StringIO()
def _try_test_run(self, test_class, expected):
"""Run the test suite against the supplied test class and compare the
XML result against the expected XML string. Fail if the expected
string doesn't match the actual string. All time attribute in the
expected string should have the value "0.000". All error and failure
messages are reduced to "Foobar"."""
runner = XMLTestRunner(self._stream)
runner.run(unittest.makeSuite(test_class))
got = self._stream.getvalue()
# Replace all time="X.YYY" attributes by time="0.000" to enable a
# simple string comparison.
got = re.sub(r'time="\d+\.\d+"', 'time="0.000"', got)
# Likewise, replace all failure and error messages by a simple "Foobar"
# string.
got = re.sub(r'(?s)<failure (.*?)>.*?</failure>', r'<failure \1>Foobar</failure>', got)
got = re.sub(r'(?s)<error (.*?)>.*?</error>', r'<error \1>Foobar</error>', got)
self.assertIn(got, expected)
def test_no_tests(self):
"""Regression test: Check whether a test run without any tests
matches a previous run.
"""
class TestTest(unittest.TestCase):
pass
self._try_test_run(TestTest, ["""<testsuite errors="0" failures="0" name="unittest.suite.TestSuite" tests="0" time="0.000"><system-out><![CDATA[\n\n]]></system-out><system-err><![CDATA[\n\n]]></system-err></testsuite>"""])
def test_success(self):
"""Regression test: Check whether a test run with a successful test
matches a previous run.
"""
class TestTest(unittest.TestCase):
def test_foo(self):
pass
py2_expected = """<testsuite errors="0" failures="0" name="unittest.suite.TestSuite" tests="1" time="0.000"><testcase classname="test.test_xmlrunner.TestTest" name="test_foo" time="0.000" /><system-out><![CDATA[\n\n]]></system-out><system-err><![CDATA[\n\n]]></system-err></testsuite>"""
py3_expected = py2_expected.replace('TestTest', 'XMLTestRunnerTest.test_success.<locals>.TestTest')
self._try_test_run(TestTest, [py2_expected, py3_expected])
def test_failure(self):
"""Regression test: Check whether a test run with a failing test
matches a previous run.
"""
class TestTest(unittest.TestCase):
def test_foo(self):
self.assert_(False)
py2_expected = """<testsuite errors="0" failures="1" name="unittest.suite.TestSuite" tests="1" time="0.000"><testcase classname="test.test_xmlrunner.TestTest" name="test_foo" time="0.000"><failure type="AssertionError">Foobar</failure></testcase><system-out><![CDATA[\n\n]]></system-out><system-err><![CDATA[\n\n]]></system-err></testsuite>"""
py3_expected = py2_expected.replace('TestTest', 'XMLTestRunnerTest.test_failure.<locals>.TestTest')
self._try_test_run(TestTest, [py2_expected, py3_expected])
def test_error(self):
"""Regression test: Check whether a test run with a erroneous test
matches a previous run.
"""
class TestTest(unittest.TestCase):
def test_foo(self):
raise IndexError()
py2_expected = """<testsuite errors="1" failures="0" name="unittest.suite.TestSuite" tests="1" time="0.000"><testcase classname="test.test_xmlrunner.TestTest" name="test_foo" time="0.000"><error type="IndexError">Foobar</error></testcase><system-out><![CDATA[\n\n]]></system-out><system-err><![CDATA[\n\n]]></system-err></testsuite>"""
py3_expected = py2_expected.replace('TestTest', 'XMLTestRunnerTest.test_error.<locals>.TestTest')
self._try_test_run(TestTest, [py2_expected, py3_expected])
def test_stdout_capture(self):
"""Regression test: Check whether a test run with output to stdout
matches a previous run.
"""
class TestTest(unittest.TestCase):
def test_foo(self):
print('Foo > Bar')
py2_expected = """<testsuite errors="0" failures="0" name="unittest.suite.TestSuite" tests="1" time="0.000"><testcase classname="test.test_xmlrunner.TestTest" name="test_foo" time="0.000" /><system-out><![CDATA[\nFoo > Bar\n\n]]></system-out><system-err><![CDATA[\n\n]]></system-err></testsuite>"""
py3_expected = py2_expected.replace('TestTest', 'XMLTestRunnerTest.test_stdout_capture.<locals>.TestTest')
self._try_test_run(TestTest, [py2_expected, py3_expected])
def test_stderr_capture(self):
"""Regression test: Check whether a test run with output to stderr
matches a previous run.
"""
class TestTest(unittest.TestCase):
def test_foo(self):
print('Foo > Bar', file=sys.stderr)
py2_expected = """<testsuite errors="0" failures="0" name="unittest.suite.TestSuite" tests="1" time="0.000"><testcase classname="test.test_xmlrunner.TestTest" name="test_foo" time="0.000" /><system-out><![CDATA[\n\n]]></system-out><system-err><![CDATA[\nFoo > Bar\n\n]]></system-err></testsuite>"""
py3_expected = py2_expected.replace('TestTest', 'XMLTestRunnerTest.test_stderr_capture.<locals>.TestTest')
self._try_test_run(TestTest, [py2_expected, py3_expected])
class NullStream(object):
"""A file-like object that discards everything written to it."""
def write(self, buffer):
pass
def test_unittests_changing_stdout(self):
"""Check whether the XMLTestRunner recovers gracefully from unit tests
that change stdout, but don't change it back properly.
"""
class TestTest(unittest.TestCase):
def test_foo(self):
sys.stdout = XMLTestRunnerTest.NullStream()
runner = XMLTestRunner(self._stream)
runner.run(unittest.makeSuite(TestTest))
def test_unittests_changing_stderr(self):
"""Check whether the XMLTestRunner recovers gracefully from unit tests
that change stderr, but don't change it back properly.
"""
class TestTest(unittest.TestCase):
def test_foo(self):
sys.stderr = XMLTestRunnerTest.NullStream()
runner = XMLTestRunner(self._stream)
runner.run(unittest.makeSuite(TestTest))
class XMLTestProgram(unittest.TestProgram):
def runTests(self):
if self.testRunner is None:
self.testRunner = XMLTestRunner()
unittest.TestProgram.runTests(self)
main = XMLTestProgram
if __name__ == '__main__':
main(module=None)
|
def replace_emoticons(string):
"""Replace emoticon words in string with corresponding emoji."""
return _replace_words(HANGOUTS_EMOTICONS_TO_EMOJI, string)
def _replace_words(replacements, string):
"""Replace words with corresponding values in replacements dict.
Words must be separated by spaces or newlines.
"""
output_lines = []
for line in string.split('\n'):
output_words = []
for word in line.split(' '):
new_word = replacements.get(word, word)
output_words.append(new_word)
output_lines.append(output_words)
return '\n'.join(' '.join(output_words) for output_words in output_lines)
# Emoticon conversions extracted from hangouts.google.com
HANGOUTS_EMOTICONS_TO_EMOJI = {
':)': '\U0000263a',
':-)': '\U0000263a',
'<3': '\U00002764',
'-<@%': '\U0001f41d',
':(|)': '\U0001f435',
':(:)': '\U0001f437',
'(y)': '\U0001f44d',
'(Y)': '\U0001f44d',
'(n)': '\U0001f44e',
'(N)': '\U0001f44e',
'(]:{': '\U0001f473',
'<\\3': '\U0001f494',
'</3': '\U0001f494',
'~@~': '\U0001f4a9',
':D': '\U0001f600',
':-D': '\U0001f600',
'^_^': '\U0001f601',
":''D": '\U0001f602',
'=D': '\U0001f604',
'^_^;;': '\U0001f605',
'O:)': '\U0001f607',
'O=)': '\U0001f607',
'O:-)': '\U0001f607',
'}:-)': '\U0001f608',
'}=)': '\U0001f608',
'}:)': '\U0001f608',
';-)': '\U0001f609',
';)': '\U0001f609',
'=)': '\U0001f60a',
'B-)': '\U0001f60e',
':,': '\U0001f60f',
':-,': '\U0001f60f',
'=|': '\U0001f610',
':-|': '\U0001f610',
':|': '\U0001f610',
'-_-': '\U0001f611',
'o_o;': '\U0001f613',
'u_u': '\U0001f614',
'=\\': '\U0001f615',
':-\\': '\U0001f615',
':-/': '\U0001f615',
':\\': '\U0001f615',
':/': '\U0001f615',
'=/': '\U0001f615',
':-s': '\U0001f616',
':-S': '\U0001f616',
':S': '\U0001f616',
':s': '\U0001f616',
':*': '\U0001f617',
':-*': '\U0001f617',
';-*': '\U0001f618',
';*': '\U0001f618',
'=*': '\U0001f61a',
':-P': '\U0001f61b',
':p': '\U0001f61b',
':-p': '\U0001f61b',
':P': '\U0001f61b',
'=P': '\U0001f61b',
'=p': '\U0001f61b',
';p': '\U0001f61c',
';P': '\U0001f61c',
';-p': '\U0001f61c',
';-P': '\U0001f61c',
':(': '\U0001f61e',
'=(': '\U0001f61e',
':-(': '\U0001f61e',
'>.<': '\U0001f621',
'>=(': '\U0001f621',
'>:(': '\U0001f621',
'>:-(': '\U0001f621',
';_;': '\U0001f622',
"='(": '\U0001f622',
'T_T': '\U0001f622',
":'(": '\U0001f622',
'>_<': '\U0001f623',
'D:': '\U0001f626',
":''(": '\U0001f62d',
':o': '\U0001f62e',
':-o': '\U0001f62e',
':-O': '\U0001f62e',
'=O': '\U0001f62e',
':O': '\U0001f62e',
'o.o': '\U0001f62e',
'=o': '\U0001f62e',
'O.O': '\U0001f632',
'X-O': '\U0001f635',
'x_x': '\U0001f635',
'X(': '\U0001f635',
'X-o': '\U0001f635',
'X-(': '\U0001f635',
':X)': '\U0001f638',
'(=^..^=)': '\U0001f638',
':3': '\U0001f638',
'=^_^=': '\U0001f638',
'(=^.^=)': '\U0001f638',
'!:)': '\U0001f643',
'!:-)': '\U0001f643',
'>:(X': '\U0001f645',
'o/': '\U0001f64b',
'\\o': '\U0001f64b',
':)X': '\U0001f917',
'>:D<': '\U0001f917',
':-)X': '\U0001f917',
'\\m/': '\U0001f918',
'V.v.V': '\U0001f980',
}
|
from homeassistant.components.fritzbox.const import DOMAIN as FB_DOMAIN
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
from homeassistant.config_entries import ENTRY_STATE_LOADED, ENTRY_STATE_NOT_LOADED
from homeassistant.const import CONF_DEVICES, CONF_HOST, CONF_PASSWORD, CONF_USERNAME
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.setup import async_setup_component
from . import MOCK_CONFIG, FritzDeviceSwitchMock
from tests.async_mock import Mock, call
from tests.common import MockConfigEntry
async def test_setup(hass: HomeAssistantType, fritz: Mock):
"""Test setup of integration."""
assert await async_setup_component(hass, FB_DOMAIN, MOCK_CONFIG)
await hass.async_block_till_done()
entries = hass.config_entries.async_entries()
assert entries
assert entries[0].data[CONF_HOST] == "fake_host"
assert entries[0].data[CONF_PASSWORD] == "fake_pass"
assert entries[0].data[CONF_USERNAME] == "fake_user"
assert fritz.call_count == 1
assert fritz.call_args_list == [
call(host="fake_host", password="fake_pass", user="fake_user")
]
async def test_setup_duplicate_config(hass: HomeAssistantType, fritz: Mock, caplog):
"""Test duplicate config of integration."""
DUPLICATE = {
FB_DOMAIN: {
CONF_DEVICES: [
MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0],
MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0],
]
}
}
assert not await async_setup_component(hass, FB_DOMAIN, DUPLICATE)
await hass.async_block_till_done()
assert not hass.states.async_entity_ids()
assert not hass.states.async_all()
assert "duplicate host entries found" in caplog.text
async def test_unload(hass: HomeAssistantType, fritz: Mock):
"""Test unload of integration."""
fritz().get_devices.return_value = [FritzDeviceSwitchMock()]
entity_id = f"{SWITCH_DOMAIN}.fake_name"
entry = MockConfigEntry(
domain=FB_DOMAIN,
data=MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0],
unique_id=entity_id,
)
entry.add_to_hass(hass)
config_entries = hass.config_entries.async_entries(FB_DOMAIN)
assert len(config_entries) == 1
assert entry is config_entries[0]
assert await async_setup_component(hass, FB_DOMAIN, {}) is True
await hass.async_block_till_done()
assert entry.state == ENTRY_STATE_LOADED
state = hass.states.get(entity_id)
assert state
await hass.config_entries.async_unload(entry.entry_id)
assert fritz().logout.call_count == 1
assert entry.state == ENTRY_STATE_NOT_LOADED
state = hass.states.get(entity_id)
assert state is None
|
import logging
import os
import re
from gmusicapi.utils import utils
from gmusicapi import Mobileclient
log = utils.DynamicClientLogger(__name__)
# A regex for the gm id format, eg:
# c293dd5a-9aa9-33c4-8b09-0c865b56ce46
hex_set = "[0-9a-f]"
gm_id_regex = re.compile(("{h}{{8}}-" +
("{h}{{4}}-" * 3) +
"{h}{{12}}").format(h=hex_set))
# Get the absolute paths of the test files, which are located in the same
# directory as this file.
test_file_dir = os.path.dirname(os.path.abspath(__file__))
small_mp3 = os.path.join(test_file_dir, 'audiotest_small.mp3')
image_filename = os.path.join(test_file_dir, 'imagetest_10x10_check.png')
# that dumb intro track on conspiracy of one
aa_song_id = 'Tqqufr34tuqojlvkolsrwdwx7pe'
class NoticeLogging(logging.Handler):
"""A log handler that, if asked to emit, will set
``self.seen_message`` to True.
"""
def __init__(self):
logging.Handler.__init__(self) # cannot use super in py 2.6; logging is still old-style
self.seen_message = False
def emit(self, record):
self.seen_message = True
def new_test_client(cls, **kwargs):
"""Make an instance of a client, login, and return it.
kwargs are passed through to cls.login().
"""
client = cls(debug_logging=True)
if isinstance(client, Mobileclient):
client.oauth_login(**kwargs)
else:
client.login(**kwargs)
return client
def md_entry_same(entry_name, s1, s2):
"""Returns (s1 and s2 have the same value for entry_name?, message)."""
s1_val = s1[entry_name]
s2_val = s2[entry_name]
return (s1_val == s2_val, "(" + entry_name + ") " + repr(s1_val) + ", " + repr(s2_val))
def is_gm_id(s):
"""Returns True if the given string is in Google Music id form."""
return re.match(gm_id_regex, s) is not None
def is_song(d):
"""Returns True is the given dict is a GM song dict."""
# Not really precise, but should be good enough.
return is_gm_id(d["id"])
def is_song_list(lst):
return all(map(is_song, lst))
def is_id_list(lst):
"""Returns True if the given list is made up of all strings in GM id form."""
return all(map(is_gm_id, lst))
def is_id_pair_list(lst):
"""Returns True if the given list is made up of all (id, id) pairs."""
a, b = list(zip(*lst))
return is_id_list(a + b)
|
from pcal9535a import PCAL9535A
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchEntity
from homeassistant.const import DEVICE_DEFAULT_NAME
import homeassistant.helpers.config_validation as cv
CONF_INVERT_LOGIC = "invert_logic"
CONF_I2C_ADDRESS = "i2c_address"
CONF_I2C_BUS = "i2c_bus"
CONF_PINS = "pins"
CONF_STRENGTH = "strength"
STRENGTH_025 = "0.25"
STRENGTH_050 = "0.5"
STRENGTH_075 = "0.75"
STRENGTH_100 = "1.0"
DEFAULT_INVERT_LOGIC = False
DEFAULT_I2C_ADDRESS = 0x20
DEFAULT_I2C_BUS = 1
DEFAULT_STRENGTH = STRENGTH_100
_SWITCHES_SCHEMA = vol.Schema({cv.positive_int: cv.string})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_PINS): _SWITCHES_SCHEMA,
vol.Optional(CONF_INVERT_LOGIC, default=DEFAULT_INVERT_LOGIC): cv.boolean,
vol.Optional(CONF_STRENGTH, default=DEFAULT_STRENGTH): vol.In(
[STRENGTH_025, STRENGTH_050, STRENGTH_075, STRENGTH_100]
),
vol.Optional(CONF_I2C_ADDRESS, default=DEFAULT_I2C_ADDRESS): vol.Coerce(int),
vol.Optional(CONF_I2C_BUS, default=DEFAULT_I2C_BUS): cv.positive_int,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the PCAL9535A devices."""
invert_logic = config[CONF_INVERT_LOGIC]
i2c_address = config[CONF_I2C_ADDRESS]
bus = config[CONF_I2C_BUS]
pcal = PCAL9535A(bus, i2c_address)
switches = []
pins = config[CONF_PINS]
for pin_num, pin_name in pins.items():
pin = pcal.get_pin(pin_num // 8, pin_num % 8)
switches.append(PCAL9535ASwitch(pin_name, pin, invert_logic))
add_entities(switches)
class PCAL9535ASwitch(SwitchEntity):
"""Representation of a PCAL9535A output pin."""
def __init__(self, name, pin, invert_logic):
"""Initialize the pin."""
self._name = name or DEVICE_DEFAULT_NAME
self._pin = pin
self._pin.inverted = invert_logic
self._pin.input = False
self._state = self._pin.level
@property
def name(self):
"""Return the name of the switch."""
return self._name
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def is_on(self):
"""Return true if device is on."""
return self._state
@property
def assumed_state(self):
"""Return true if optimistic updates are used."""
return True
def turn_on(self, **kwargs):
"""Turn the device on."""
self._pin.level = True
self._state = True
self.schedule_update_ha_state()
def turn_off(self, **kwargs):
"""Turn the device off."""
self._pin.level = False
self._state = False
self.schedule_update_ha_state()
|
from datetime import timedelta
import logging
from typing import Awaitable, Callable, NamedTuple, Optional
from gogogate2_api import AbstractGateApi, GogoGate2Api, ISmartGateApi
from gogogate2_api.common import AbstractDoor
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_DEVICE,
CONF_IP_ADDRESS,
CONF_PASSWORD,
CONF_USERNAME,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.debounce import Debouncer
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DATA_UPDATE_COORDINATOR, DEVICE_TYPE_ISMARTGATE, DOMAIN
_LOGGER = logging.getLogger(__name__)
class StateData(NamedTuple):
"""State data for a cover entity."""
config_unique_id: str
unique_id: Optional[str]
door: Optional[AbstractDoor]
class DeviceDataUpdateCoordinator(DataUpdateCoordinator):
"""Manages polling for state changes from the device."""
def __init__(
self,
hass: HomeAssistant,
logger: logging.Logger,
api: AbstractGateApi,
*,
name: str,
update_interval: timedelta,
update_method: Optional[Callable[[], Awaitable]] = None,
request_refresh_debouncer: Optional[Debouncer] = None,
):
"""Initialize the data update coordinator."""
DataUpdateCoordinator.__init__(
self,
hass,
logger,
name=name,
update_interval=update_interval,
update_method=update_method,
request_refresh_debouncer=request_refresh_debouncer,
)
self.api = api
def get_data_update_coordinator(
hass: HomeAssistant, config_entry: ConfigEntry
) -> DeviceDataUpdateCoordinator:
"""Get an update coordinator."""
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN].setdefault(config_entry.entry_id, {})
config_entry_data = hass.data[DOMAIN][config_entry.entry_id]
if DATA_UPDATE_COORDINATOR not in config_entry_data:
api = get_api(config_entry.data)
async def async_update_data():
try:
return await hass.async_add_executor_job(api.info)
except Exception as exception:
raise UpdateFailed(
f"Error communicating with API: {exception}"
) from exception
config_entry_data[DATA_UPDATE_COORDINATOR] = DeviceDataUpdateCoordinator(
hass,
_LOGGER,
api,
# Name of the data. For logging purposes.
name="gogogate2",
update_method=async_update_data,
# Polling interval. Will only be polled if there are subscribers.
update_interval=timedelta(seconds=5),
)
return config_entry_data[DATA_UPDATE_COORDINATOR]
def cover_unique_id(config_entry: ConfigEntry, door: AbstractDoor) -> str:
"""Generate a cover entity unique id."""
return f"{config_entry.unique_id}_{door.door_id}"
def get_api(config_data: dict) -> AbstractGateApi:
"""Get an api object for config data."""
gate_class = GogoGate2Api
if config_data[CONF_DEVICE] == DEVICE_TYPE_ISMARTGATE:
gate_class = ISmartGateApi
return gate_class(
config_data[CONF_IP_ADDRESS],
config_data[CONF_USERNAME],
config_data[CONF_PASSWORD],
)
|
from __future__ import print_function
import argparse
import os
import sys
import threading
from distutils.version import StrictVersion
from six.moves import input
try:
import paramiko
except ImportError:
paramiko = None
_SYS_STDOUT = sys.__stdout__
_stash = globals()['_stash']
""":type : StaSh"""
try:
import pyte
except ImportError:
_stash('pip install pyte==0.4.10')
import pyte
if (paramiko is None) or (StrictVersion(paramiko.__version__) < StrictVersion('1.15')):
# Install paramiko 1.16.0 to fix a bug with version < 1.15
_stash('pip install paramiko==1.16.0')
print('Please restart Pythonista for changes to take full effect')
sys.exit(0)
class StashSSH(object):
"""
Wrapper class for paramiko client and pyte screen
"""
def __init__(self):
# Initialize the pyte screen based on the current screen size
# noinspection PyUnresolvedReferences
self.screen = pyte.screens.DiffScreen(*_stash.terminal.get_wh())
self.stream = pyte.Stream()
self.stream.attach(self.screen)
self.client = paramiko.SSHClient()
self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
def connect(self, host, passwd=None, port=22):
print('Connecting...')
if "@" in host:
username, host = host.split('@')
else:
# TODO: find better default username
username = "root"
if passwd is not None:
return self._connect_with_passwd(host, username, passwd, port)
else:
print('Looking for SSH keys...')
key_filename = self.find_ssh_keys()
if len(key_filename) > 0:
try:
self.client.connect(host, username=username, password=passwd, port=port, key_filename=key_filename)
return True
except paramiko.SSHException as e:
print('Failed to login with SSH Keys: {}'.format(repr(e)))
print('Trying password ...')
passwd = input('Enter password:')
return self._connect_with_passwd(host, username, passwd, port)
except Exception as e:
print('Error: {}'.format(e))
return False
else:
print('No SSH key found. Trying password ...')
passwd = input('Enter password:')
return self._connect_with_passwd(host, username, passwd, port)
def _connect_with_passwd(self, host, username, passwd, port):
try:
self.client.connect(host, username=username, password=passwd, port=port)
return True
except Exception as e:
print('Error: {}'.format(e))
return False
def find_ssh_keys(self):
ssh_dir = os.path.join(os.environ['STASH_ROOT'], '.ssh')
if not os.path.exists(ssh_dir):
# create directory
os.mkdir(ssh_dir)
return [os.path.join(ssh_dir, filename) for filename in os.listdir(ssh_dir) if '.' not in filename]
def stdout_thread(self):
while True:
if self.chan.recv_ready():
rcv = self.chan.recv(4096)
# _SYS_STDOUT.write('RRR {%s}\n' % repr(rcv))
rcv = rcv.decode('utf-8', errors='ignore')
x, y = self.screen.cursor.x, self.screen.cursor.y
self.stream.feed(rcv)
if self.screen.dirty or x != self.screen.cursor.x or y != self.screen.cursor.y:
self.update_screen()
self.screen.dirty.clear()
if self.chan.eof_received:
break
def update_screen(self):
_stash.main_screen.load_pyte_screen(self.screen)
_stash.renderer.render(no_wait=True)
def single_exec(self, command):
sin, sout, serr = self.client.exec_command(command)
print(sout.read())
print(serr.read())
self.client.close()
def interactive(self):
self.chan = self.client.get_transport().open_session()
self.chan.get_pty('linux', width=self.screen.columns, height=self.screen.lines)
self.chan.invoke_shell()
self.chan.set_combine_stderr(True)
t1 = threading.Thread(target=self.stdout_thread)
t1.start()
t1.join()
self.chan.close()
self.client.close()
print('\nconnection closed\n')
CTRL_KEY_FLAG = (1 << 18)
class SshUserActionDelegate(object):
"""
Substitute the default user actions delegates
"""
def __init__(self, ssh):
self.ssh = ssh
def send(self, s):
while True:
if self.ssh.chan.eof_received:
break
if self.ssh.chan.send_ready():
# _SYS_STDOUT.write('%s, [%s]' % (rng, replacement))
self.ssh.chan.send(s.encode('utf-8'))
break
class SshTvVkKcDelegate(SshUserActionDelegate):
"""
Delegate for TextView, Virtual keys and Key command
"""
def textview_did_begin_editing(self, tv):
_stash.terminal.is_editing = True
def textview_did_end_editing(self, tv):
_stash.terminal.is_editing = False
def textview_should_change(self, tv, rng, replacement):
if replacement == '': # delete
replacement = '\x08'
self.send(replacement)
return False # always false
def textview_did_change(self, tv):
pass
def textview_did_change_selection(self, tv):
pass
def kc_pressed(self, key, modifierFlags):
if modifierFlags == CTRL_KEY_FLAG:
if key == 'C':
self.send('\x03')
elif key == 'D':
self.send('\x04')
elif key == 'A':
self.send('\x01')
elif key == 'E':
self.send('\x05')
elif key == 'K':
self.send('\x0B')
elif key == 'L':
self.send('\x0C')
elif key == 'U':
self.send('\x15')
elif key == 'Z':
self.send('\x1A')
elif key == '[':
self.send('\x1B') # ESC
elif modifierFlags == 0:
if key == 'UIKeyInputUpArrow':
self.send('\x10')
elif key == 'UIKeyInputDownArrow':
self.send('\x0E')
elif key == 'UIKeyInputLeftArrow':
self.send('\033[D')
elif key == 'UIKeyInputRightArrow':
self.send('\033[C')
def vk_tapped(self, vk):
if vk.name == 'k_tab':
self.send('\t')
elif vk.name == 'k_CC':
self.kc_pressed('C', CTRL_KEY_FLAG)
elif vk.name == 'k_CD':
self.kc_pressed('D', CTRL_KEY_FLAG)
elif vk.name == 'k_CU':
self.kc_pressed('U', CTRL_KEY_FLAG)
elif vk.name == 'k_CZ':
self.kc_pressed('Z', CTRL_KEY_FLAG)
elif vk.name == 'k_hup':
self.kc_pressed('UIKeyInputUpArrow', 0)
elif vk.name == 'k_hdn':
self.kc_pressed('UIKeyInputDownArrow', 0)
elif vk.name == 'k_KB':
if _stash.terminal.is_editing:
_stash.terminal.end_editing()
else:
_stash.terminal.begin_editing()
class SshSVDelegate(SshUserActionDelegate):
"""
Delegate for scroll view
"""
SCROLL_PER_CHAR = 20.0 # Number of pixels to scroll to move 1 character
def scrollview_did_scroll(self, scrollview):
# integrate small scroll motions, but keep scrollview from actually moving
if not scrollview.decelerating:
scrollview.superview.dx -= scrollview.content_offset[0] / SshSVDelegate.SCROLL_PER_CHAR
scrollview.content_offset = (0.0, 0.0)
offset = int(scrollview.superview.dx)
if offset:
scrollview.superview.dx -= offset
if offset > 0:
self.send('\033[C')
else:
self.send('\033[D')
if __name__ == '__main__':
ap = argparse.ArgumentParser()
ap.add_argument('--password', action='store', default=None, help='Password for rsa/dsa key or password login')
ap.add_argument('-p', '--port', action='store', default=22, type=int, help='port for ssh default: 22')
ap.add_argument('host', help='host ex. [email protected]')
ap.add_argument('command', nargs='?', default=False, help='Command to send as a quoted string')
args = ap.parse_args()
ssh = StashSSH()
tv_vk_kc_delegate = SshTvVkKcDelegate(ssh)
sv_delegate = SshSVDelegate(ssh)
if ssh.connect(host=args.host, passwd=args.password, port=args.port):
print('Connected')
if args.command:
ssh.single_exec(args.command)
else:
_stash.stream.feed(u'\u009bc', render_it=False)
with _stash.user_action_proxy.config(tv_responder=tv_vk_kc_delegate,
kc_responder=tv_vk_kc_delegate.kc_pressed,
vk_responder=tv_vk_kc_delegate.vk_tapped,
sv_responder=sv_delegate):
ssh.interactive()
else:
print('Unable to connect')
|
import heapq
from collections import Counter
import datetime
import os.path as op
import numpy as np
from ..utils import logger, warn, Bunch, _validate_type
from .constants import FIFF, _coord_frame_named
from .tree import dir_tree_find
from .tag import read_tag
from .write import (start_file, end_file, write_dig_points)
from ..transforms import (apply_trans, Transform,
get_ras_to_neuromag_trans, combine_transforms,
invert_transform, _to_const, _str_to_frame,
_coord_frame_name)
from .. import __version__
_dig_kind_dict = {
'cardinal': FIFF.FIFFV_POINT_CARDINAL,
'hpi': FIFF.FIFFV_POINT_HPI,
'eeg': FIFF.FIFFV_POINT_EEG,
'extra': FIFF.FIFFV_POINT_EXTRA,
}
_dig_kind_ints = tuple(sorted(_dig_kind_dict.values()))
_dig_kind_proper = {'cardinal': 'Cardinal',
'hpi': 'HPI',
'eeg': 'EEG',
'extra': 'Extra',
'unknown': 'Unknown'}
_dig_kind_rev = {val: key for key, val in _dig_kind_dict.items()}
_cardinal_kind_rev = {1: 'LPA', 2: 'Nasion', 3: 'RPA', 4: 'Inion'}
def _format_dig_points(dig, enforce_order=False):
"""Format the dig points nicely."""
if enforce_order and dig is not None:
# reorder points based on type:
# Fiducials/HPI, EEG, extra (headshape)
fids_digpoints = []
hpi_digpoints = []
eeg_digpoints = []
extra_digpoints = []
head_digpoints = []
# use a heap to enforce order on FIDS, EEG, Extra
for idx, digpoint in enumerate(dig):
ident = digpoint['ident']
kind = digpoint['kind']
# push onto heap based on 'ident' (for the order) for
# each of the possible DigPoint 'kind's
# keep track of 'idx' in case of any clashes in
# the 'ident' variable, which can occur when
# user passes in DigMontage + DigMontage
if kind == FIFF.FIFFV_POINT_CARDINAL:
heapq.heappush(fids_digpoints, (ident, idx, digpoint))
elif kind == FIFF.FIFFV_POINT_HPI:
heapq.heappush(hpi_digpoints, (ident, idx, digpoint))
elif kind == FIFF.FIFFV_POINT_EEG:
heapq.heappush(eeg_digpoints, (ident, idx, digpoint))
elif kind == FIFF.FIFFV_POINT_EXTRA:
heapq.heappush(extra_digpoints, (ident, idx, digpoint))
elif kind == FIFF.FIFFV_POINT_HEAD:
heapq.heappush(head_digpoints, (ident, idx, digpoint))
# now recreate dig based on sorted order
fids_digpoints.sort(), hpi_digpoints.sort()
eeg_digpoints.sort()
extra_digpoints.sort(), head_digpoints.sort()
new_dig = []
for idx, d in enumerate(fids_digpoints + hpi_digpoints +
extra_digpoints + eeg_digpoints +
head_digpoints):
new_dig.append(d[-1])
dig = new_dig
return [DigPoint(d) for d in dig] if dig is not None else dig
def _get_dig_eeg(dig):
return [d for d in dig if d['kind'] == FIFF.FIFFV_POINT_EEG]
def _count_points_by_type(dig):
"""Get the number of points of each type."""
occurrences = Counter([d['kind'] for d in dig])
return dict(
fid=occurrences[FIFF.FIFFV_POINT_CARDINAL],
hpi=occurrences[FIFF.FIFFV_POINT_HPI],
eeg=occurrences[FIFF.FIFFV_POINT_EEG],
extra=occurrences[FIFF.FIFFV_POINT_EXTRA],
)
_dig_keys = {'kind', 'ident', 'r', 'coord_frame'}
class DigPoint(dict):
"""Container for a digitization point.
This is a simple subclass of the standard dict type designed to provide
a readable string representation.
Parameters
----------
kind : int
The kind of channel,
e.g. ``FIFFV_POINT_EEG``, ``FIFFV_POINT_CARDINAL``.
r : array, shape (3,)
3D position in m. and coord_frame.
ident : int
Number specifying the identity of the point.
e.g. ``FIFFV_POINT_NASION`` if kind is ``FIFFV_POINT_CARDINAL``,
or 42 if kind is ``FIFFV_POINT_EEG``.
coord_frame : int
The coordinate frame used, e.g. ``FIFFV_COORD_HEAD``.
"""
def __repr__(self): # noqa: D105
if self['kind'] == FIFF.FIFFV_POINT_CARDINAL:
id_ = _cardinal_kind_rev.get(self['ident'], 'Unknown cardinal')
else:
id_ = _dig_kind_proper[
_dig_kind_rev.get(self['kind'], 'unknown')]
id_ = ('%s #%s' % (id_, self['ident']))
id_ = id_.rjust(10)
cf = _coord_frame_name(self['coord_frame'])
pos = ('(%0.1f, %0.1f, %0.1f) mm' % tuple(1000 * self['r'])).ljust(25)
return ('<DigPoint | %s : %s : %s frame>' % (id_, pos, cf))
# speed up info copy by only deep copying the mutable item
def __deepcopy__(self, memodict):
"""Make a deepcopy."""
return DigPoint(
kind=self['kind'], r=self['r'].copy(),
ident=self['ident'], coord_frame=self['coord_frame'])
def __eq__(self, other): # noqa: D105
"""Compare two DigPoints.
Two digpoints are equal if they are the same kind, share the same
coordinate frame and position.
"""
my_keys = ['kind', 'ident', 'coord_frame']
if set(self.keys()) != set(other.keys()):
return False
elif any(self[_] != other[_] for _ in my_keys):
return False
else:
return np.allclose(self['r'], other['r'])
def _read_dig_fif(fid, meas_info):
"""Read digitizer data from a FIFF file."""
isotrak = dir_tree_find(meas_info, FIFF.FIFFB_ISOTRAK)
dig = None
if len(isotrak) == 0:
logger.info('Isotrak not found')
elif len(isotrak) > 1:
warn('Multiple Isotrak found')
else:
isotrak = isotrak[0]
coord_frame = FIFF.FIFFV_COORD_HEAD
dig = []
for k in range(isotrak['nent']):
kind = isotrak['directory'][k].kind
pos = isotrak['directory'][k].pos
if kind == FIFF.FIFF_DIG_POINT:
tag = read_tag(fid, pos)
dig.append(tag.data)
elif kind == FIFF.FIFF_MNE_COORD_FRAME:
tag = read_tag(fid, pos)
coord_frame = _coord_frame_named.get(int(tag.data))
for d in dig:
d['coord_frame'] = coord_frame
return _format_dig_points(dig)
def write_dig(fname, pts, coord_frame=None):
"""Write digitization data to a FIF file.
Parameters
----------
fname : str
Destination file name.
pts : iterator of dict
Iterator through digitizer points. Each point is a dictionary with
the keys 'kind', 'ident' and 'r'.
coord_frame : int | str | None
If all the points have the same coordinate frame, specify the type
here. Can be None (default) if the points could have varying
coordinate frames.
"""
if coord_frame is not None:
coord_frame = _to_const(coord_frame)
pts_frames = {pt.get('coord_frame', coord_frame) for pt in pts}
bad_frames = pts_frames - {coord_frame}
if len(bad_frames) > 0:
raise ValueError(
'Points have coord_frame entries that are incompatible with '
'coord_frame=%i: %s.' % (coord_frame, str(tuple(bad_frames))))
with start_file(fname) as fid:
write_dig_points(fid, pts, block=True, coord_frame=coord_frame)
end_file(fid)
_cardinal_ident_mapping = {
FIFF.FIFFV_POINT_NASION: 'nasion',
FIFF.FIFFV_POINT_LPA: 'lpa',
FIFF.FIFFV_POINT_RPA: 'rpa',
}
# XXXX:
# This does something really similar to _read_dig_montage_fif but:
# - does not check coord_frame
# - does not do any operation that implies assumptions with the names
def _get_data_as_dict_from_dig(dig):
"""Obtain coordinate data from a Dig.
Parameters
----------
dig : list of dicts
A container of DigPoints to be added to the info['dig'].
Returns
-------
ch_pos : dict
The container of all relevant channel positions inside dig.
"""
# Split up the dig points by category
hsp, hpi, elp = list(), list(), list()
fids, dig_ch_pos_location = dict(), list()
for d in dig:
if d['kind'] == FIFF.FIFFV_POINT_CARDINAL:
fids[_cardinal_ident_mapping[d['ident']]] = d['r']
elif d['kind'] == FIFF.FIFFV_POINT_HPI:
hpi.append(d['r'])
elp.append(d['r'])
# XXX: point_names.append('HPI%03d' % d['ident'])
elif d['kind'] == FIFF.FIFFV_POINT_EXTRA:
hsp.append(d['r'])
elif d['kind'] == FIFF.FIFFV_POINT_EEG:
# XXX: dig_ch_pos['EEG%03d' % d['ident']] = d['r']
if d['ident'] != 0: # ref channel
dig_ch_pos_location.append(d['r'])
dig_coord_frames = set([d['coord_frame'] for d in dig])
assert len(dig_coord_frames) == 1, \
'Only single coordinate frame in dig is supported' # XXX
return Bunch(
nasion=fids.get('nasion', None),
lpa=fids.get('lpa', None),
rpa=fids.get('rpa', None),
hsp=np.array(hsp) if len(hsp) else None,
hpi=np.array(hpi) if len(hpi) else None,
elp=np.array(elp) if len(elp) else None,
dig_ch_pos_location=dig_ch_pos_location,
coord_frame=dig_coord_frames.pop(),
)
def _get_fid_coords(dig, raise_error=True):
fid_coords = Bunch(nasion=None, lpa=None, rpa=None)
fid_coord_frames = dict()
for d in dig:
if d['kind'] == FIFF.FIFFV_POINT_CARDINAL:
key = _cardinal_ident_mapping[d['ident']]
fid_coords[key] = d['r']
fid_coord_frames[key] = d['coord_frame']
if len(fid_coord_frames) > 0 and raise_error:
if set(fid_coord_frames.keys()) != set(['nasion', 'lpa', 'rpa']):
raise ValueError("Some fiducial points are missing (got %s)." %
fid_coords.keys())
if len(set(fid_coord_frames.values())) > 1:
raise ValueError(
'All fiducial points must be in the same coordinate system '
'(got %s)' % len(fid_coord_frames)
)
coord_frame = fid_coord_frames.popitem()[1] if fid_coord_frames else None
return fid_coords, coord_frame
def _write_dig_points(fname, dig_points):
"""Write points to text file.
Parameters
----------
fname : str
Path to the file to write. The kind of file to write is determined
based on the extension: '.txt' for tab separated text file.
dig_points : numpy.ndarray, shape (n_points, 3)
Points.
"""
_, ext = op.splitext(fname)
dig_points = np.asarray(dig_points)
if (dig_points.ndim != 2) or (dig_points.shape[1] != 3):
err = ("Points must be of shape (n_points, 3), "
"not %s" % (dig_points.shape,))
raise ValueError(err)
if ext == '.txt':
with open(fname, 'wb') as fid:
version = __version__
now = datetime.datetime.now().strftime("%I:%M%p on %B %d, %Y")
fid.write(b'%% Ascii 3D points file created by mne-python version'
b' %s at %s\n' % (version.encode(), now.encode()))
fid.write(b'%% %d 3D points, x y z per line\n' % len(dig_points))
np.savetxt(fid, dig_points, delimiter='\t', newline='\n')
else:
msg = "Unrecognized extension: %r. Need '.txt'." % ext
raise ValueError(msg)
def _coord_frame_const(coord_frame):
if not isinstance(coord_frame, str) or coord_frame not in _str_to_frame:
raise ValueError('coord_frame must be one of %s, got %s'
% (sorted(_str_to_frame.keys()), coord_frame))
return _str_to_frame[coord_frame]
def _make_dig_points(nasion=None, lpa=None, rpa=None, hpi=None,
extra_points=None, dig_ch_pos=None,
coord_frame='head'):
"""Construct digitizer info for the info.
Parameters
----------
nasion : array-like | numpy.ndarray, shape (3,) | None
Point designated as the nasion point.
lpa : array-like | numpy.ndarray, shape (3,) | None
Point designated as the left auricular point.
rpa : array-like | numpy.ndarray, shape (3,) | None
Point designated as the right auricular point.
hpi : array-like | numpy.ndarray, shape (n_points, 3) | None
Points designated as head position indicator points.
extra_points : array-like | numpy.ndarray, shape (n_points, 3)
Points designed as the headshape points.
dig_ch_pos : dict
Dict of EEG channel positions.
coord_frame : str
The coordinate frame of the points. Usually this is "unknown"
for native digitizer space. Defaults to "head".
Returns
-------
dig : list of dicts
A container of DigPoints to be added to the info['dig'].
"""
coord_frame = _coord_frame_const(coord_frame)
dig = []
if lpa is not None:
lpa = np.asarray(lpa)
if lpa.shape != (3,):
raise ValueError('LPA should have the shape (3,) instead of %s'
% (lpa.shape,))
dig.append({'r': lpa, 'ident': FIFF.FIFFV_POINT_LPA,
'kind': FIFF.FIFFV_POINT_CARDINAL,
'coord_frame': coord_frame})
if nasion is not None:
nasion = np.asarray(nasion)
if nasion.shape != (3,):
raise ValueError('Nasion should have the shape (3,) instead of %s'
% (nasion.shape,))
dig.append({'r': nasion, 'ident': FIFF.FIFFV_POINT_NASION,
'kind': FIFF.FIFFV_POINT_CARDINAL,
'coord_frame': coord_frame})
if rpa is not None:
rpa = np.asarray(rpa)
if rpa.shape != (3,):
raise ValueError('RPA should have the shape (3,) instead of %s'
% (rpa.shape,))
dig.append({'r': rpa, 'ident': FIFF.FIFFV_POINT_RPA,
'kind': FIFF.FIFFV_POINT_CARDINAL,
'coord_frame': coord_frame})
if hpi is not None:
hpi = np.asarray(hpi)
if hpi.ndim != 2 or hpi.shape[1] != 3:
raise ValueError('HPI should have the shape (n_points, 3) instead '
'of %s' % (hpi.shape,))
for idx, point in enumerate(hpi):
dig.append({'r': point, 'ident': idx + 1,
'kind': FIFF.FIFFV_POINT_HPI,
'coord_frame': coord_frame})
if extra_points is not None:
extra_points = np.asarray(extra_points)
if extra_points.shape[1] != 3:
raise ValueError('Points should have the shape (n_points, 3) '
'instead of %s' % (extra_points.shape,))
for idx, point in enumerate(extra_points):
dig.append({'r': point, 'ident': idx + 1,
'kind': FIFF.FIFFV_POINT_EXTRA,
'coord_frame': coord_frame})
if dig_ch_pos is not None:
try: # use the last 3 as int if possible (e.g., EEG001->1)
idents = []
for key in dig_ch_pos:
_validate_type(key, str, 'dig_ch_pos')
idents.append(int(key[-3:]))
except ValueError: # and if any conversion fails, simply use arange
idents = np.arange(1, len(dig_ch_pos) + 1)
for key, ident in zip(dig_ch_pos, idents):
dig.append({'r': dig_ch_pos[key], 'ident': int(ident),
'kind': FIFF.FIFFV_POINT_EEG,
'coord_frame': coord_frame})
return _format_dig_points(dig)
def _call_make_dig_points(nasion, lpa, rpa, hpi, extra, convert=True):
if convert:
neuromag_trans = get_ras_to_neuromag_trans(nasion, lpa, rpa)
nasion = apply_trans(neuromag_trans, nasion)
lpa = apply_trans(neuromag_trans, lpa)
rpa = apply_trans(neuromag_trans, rpa)
if hpi is not None:
hpi = apply_trans(neuromag_trans, hpi)
extra = apply_trans(neuromag_trans, extra).astype(np.float32)
else:
neuromag_trans = None
ctf_head_t = Transform(fro='ctf_head', to='head', trans=neuromag_trans)
info_dig = _make_dig_points(nasion=nasion,
lpa=lpa,
rpa=rpa,
hpi=hpi,
extra_points=extra)
return info_dig, ctf_head_t
##############################################################################
# From artemis123 (we have modified the function a bit)
def _artemis123_read_pos(nas, lpa, rpa, hpi, extra):
# move into MNE head coords
dig_points, _ = _call_make_dig_points(nas, lpa, rpa, hpi, extra)
return dig_points
##############################################################################
# From bti
def _make_bti_dig_points(nasion, lpa, rpa, hpi, extra,
convert=False, use_hpi=False,
bti_dev_t=False, dev_ctf_t=False):
_hpi = hpi if use_hpi else None
info_dig, ctf_head_t = _call_make_dig_points(nasion, lpa, rpa, _hpi, extra,
convert)
if convert:
t = combine_transforms(invert_transform(bti_dev_t), dev_ctf_t,
'meg', 'ctf_head')
dev_head_t = combine_transforms(t, ctf_head_t, 'meg', 'head')
else:
dev_head_t = Transform('meg', 'head', trans=None)
return info_dig, dev_head_t, ctf_head_t # ctf_head_t should not be needed
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.