text
stringlengths 213
32.3k
|
---|
from flask import Flask, jsonify, request
from flasgger import Swagger, LazyString, LazyJSONEncoder
app = Flask(__name__)
# Set the LAzyString JSON Encoder
app.json_encoder = LazyJSONEncoder
app.config['SWAGGER'] = {
'uiversion': 2
}
template = dict(
info={
'title': LazyString(lambda: 'Lazy Title'),
'version': LazyString(lambda: '99.9.9'),
'description': LazyString(lambda: 'Hello Lazy World'),
'termsOfService': LazyString(lambda: '/there_is_no_tos')
},
host=LazyString(lambda: request.host),
schemes=[LazyString(lambda: 'https' if request.is_secure else 'http')],
foo=LazyString(lambda: "Bar")
)
Swagger(app, template=template)
@app.route('/colors/<palette>/')
def colors(palette):
"""Example endpoint return a list of colors by palette
This is using docstring for specifications
---
tags:
- colors
parameters:
- name: palette
in: path
type: string
enum: ['all', 'rgb', 'cmyk']
required: true
default: all
description: Which palette to filter?
operationId: get_colors
consumes:
- application/json
produces:
- application/json
security:
colors_auth:
- 'write:colors'
- 'read:colors'
schemes: ['http', 'https']
deprecated: false
externalDocs:
description: Project repository
url: http://github.com/rochacbruno/flasgger
definitions:
Palette:
type: object
properties:
palette_name:
type: array
items:
$ref: '#/definitions/Color'
Color:
type: string
responses:
200:
description: A list of colors (may be filtered by palette)
schema:
$ref: '#/definitions/Palette'
examples:
rgb: ['red', 'green', 'blue']
"""
all_colors = {
'cmyk': ['cian', 'magenta', 'yellow', 'black'],
'rgb': ['red', 'green', 'blue']
}
if palette == 'all':
result = all_colors
else:
result = {palette: all_colors.get(palette)}
return jsonify(result)
def test_swag(client, specs_data):
"""
This test is runs automatically in Travis CI
:param client: Flask app test client
:param specs_data: {'url': {swag_specs}} for every spec in app
"""
for url, spec in specs_data.items():
assert 'Palette' in spec['definitions']
assert 'Color' in spec['definitions']
assert 'colors' in spec['paths']['/colors/{palette}/']['get']['tags']
assert spec['schemes'] == ['http']
assert spec['foo'] == 'Bar'
if __name__ == "__main__":
app.run(debug=True)
|
from pyiqvia import Client
from pyiqvia.errors import InvalidZipError
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.helpers import aiohttp_client
from .const import CONF_ZIP_CODE, DOMAIN # pylint:disable=unused-import
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle an IQVIA config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
def __init__(self):
"""Initialize the config flow."""
self.data_schema = vol.Schema({vol.Required(CONF_ZIP_CODE): str})
async def async_step_user(self, user_input=None):
"""Handle the start of the config flow."""
if not user_input:
return self.async_show_form(step_id="user", data_schema=self.data_schema)
await self.async_set_unique_id(user_input[CONF_ZIP_CODE])
self._abort_if_unique_id_configured()
websession = aiohttp_client.async_get_clientsession(self.hass)
try:
Client(user_input[CONF_ZIP_CODE], websession)
except InvalidZipError:
return self.async_show_form(
step_id="user",
data_schema=self.data_schema,
errors={CONF_ZIP_CODE: "invalid_zip_code"},
)
return self.async_create_entry(title=user_input[CONF_ZIP_CODE], data=user_input)
|
import amqp
from kombu.utils.amq_manager import get_manager
from kombu.utils.text import version_string_as_tuple
from . import base
from .base import to_rabbitmq_queue_arguments
DEFAULT_PORT = 5672
DEFAULT_SSL_PORT = 5671
class Message(base.Message):
"""AMQP Message."""
def __init__(self, msg, channel=None, **kwargs):
props = msg.properties
super().__init__(
body=msg.body,
channel=channel,
delivery_tag=msg.delivery_tag,
content_type=props.get('content_type'),
content_encoding=props.get('content_encoding'),
delivery_info=msg.delivery_info,
properties=msg.properties,
headers=props.get('application_headers') or {},
**kwargs)
class Channel(amqp.Channel, base.StdChannel):
"""AMQP Channel."""
Message = Message
def prepare_message(self, body, priority=None,
content_type=None, content_encoding=None,
headers=None, properties=None, _Message=amqp.Message):
"""Prepare message so that it can be sent using this transport."""
return _Message(
body,
priority=priority,
content_type=content_type,
content_encoding=content_encoding,
application_headers=headers,
**properties or {}
)
def prepare_queue_arguments(self, arguments, **kwargs):
return to_rabbitmq_queue_arguments(arguments, **kwargs)
def message_to_python(self, raw_message):
"""Convert encoded message body back to a Python value."""
return self.Message(raw_message, channel=self)
class Connection(amqp.Connection):
"""AMQP Connection."""
Channel = Channel
class Transport(base.Transport):
"""AMQP Transport."""
Connection = Connection
default_port = DEFAULT_PORT
default_ssl_port = DEFAULT_SSL_PORT
# it's very annoying that pyamqp sometimes raises AttributeError
# if the connection is lost, but nothing we can do about that here.
connection_errors = amqp.Connection.connection_errors
channel_errors = amqp.Connection.channel_errors
recoverable_connection_errors = \
amqp.Connection.recoverable_connection_errors
recoverable_channel_errors = amqp.Connection.recoverable_channel_errors
driver_name = 'py-amqp'
driver_type = 'amqp'
implements = base.Transport.implements.extend(
asynchronous=True,
heartbeats=True,
)
def __init__(self, client,
default_port=None, default_ssl_port=None, **kwargs):
self.client = client
self.default_port = default_port or self.default_port
self.default_ssl_port = default_ssl_port or self.default_ssl_port
def driver_version(self):
return amqp.__version__
def create_channel(self, connection):
return connection.channel()
def drain_events(self, connection, **kwargs):
return connection.drain_events(**kwargs)
def _collect(self, connection):
if connection is not None:
connection.collect()
def establish_connection(self):
"""Establish connection to the AMQP broker."""
conninfo = self.client
for name, default_value in self.default_connection_params.items():
if not getattr(conninfo, name, None):
setattr(conninfo, name, default_value)
if conninfo.hostname == 'localhost':
conninfo.hostname = '127.0.0.1'
opts = dict({
'host': conninfo.host,
'userid': conninfo.userid,
'password': conninfo.password,
'login_method': conninfo.login_method,
'virtual_host': conninfo.virtual_host,
'insist': conninfo.insist,
'ssl': conninfo.ssl,
'connect_timeout': conninfo.connect_timeout,
'heartbeat': conninfo.heartbeat,
}, **conninfo.transport_options or {})
conn = self.Connection(**opts)
conn.client = self.client
conn.connect()
return conn
def verify_connection(self, connection):
return connection.connected
def close_connection(self, connection):
"""Close the AMQP broker connection."""
connection.client = None
connection.close()
def get_heartbeat_interval(self, connection):
return connection.heartbeat
def register_with_event_loop(self, connection, loop):
connection.transport.raise_on_initial_eintr = True
loop.add_reader(connection.sock, self.on_readable, connection, loop)
def heartbeat_check(self, connection, rate=2):
return connection.heartbeat_tick(rate=rate)
def qos_semantics_matches_spec(self, connection):
props = connection.server_properties
if props.get('product') == 'RabbitMQ':
return version_string_as_tuple(props['version']) < (3, 3)
return True
@property
def default_connection_params(self):
return {
'userid': 'guest',
'password': 'guest',
'port': (self.default_ssl_port if self.client.ssl
else self.default_port),
'hostname': 'localhost',
'login_method': 'AMQPLAIN',
}
def get_manager(self, *args, **kwargs):
return get_manager(self.client, *args, **kwargs)
class SSLTransport(Transport):
"""AMQP SSL Transport."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# ugh, not exactly pure, but hey, it's python.
if not self.client.ssl: # not dict or False
self.client.ssl = True
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import re
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker import sample
from perfkitbenchmarker.linux_packages import cuda_toolkit
from perfkitbenchmarker.linux_packages import mxnet
from perfkitbenchmarker.linux_packages import mxnet_cnn
from perfkitbenchmarker.linux_packages import nvidia_driver
from six.moves import range
FLAGS = flags.FLAGS
BENCHMARK_NAME = 'mxnet'
BENCHMARK_CONFIG = """
mxnet:
description: Runs MXNet Benchmark.
vm_groups:
default:
os_type: ubuntu1604
vm_spec:
GCP:
machine_type: n1-highmem-4
zone: us-east1-d
boot_disk_size: 200
AWS:
machine_type: p2.xlarge
zone: us-east-1
boot_disk_size: 200
Azure:
machine_type: Standard_NC6
zone: eastus
"""
GPU = 'gpu'
CPU = 'cpu'
MODELS = ['alexnet', 'googlenet', 'inception-bn', 'inception-resnet-v2',
'inception-v3', 'inception-v4', 'lenet', 'mlp', 'mobilenet',
'resnet-v1', 'resnet', 'resnext', 'vgg']
flags.DEFINE_list('mx_models', ['inception-v3', 'vgg', 'alexnet', 'resnet'],
'The network to train')
flags.register_validator('mx_models',
lambda models: models and set(models).issubset(MODELS),
'Invalid models list. mx_models must be a subset of '
+ ', '.join(MODELS))
flags.DEFINE_integer('mx_batch_size', None, 'The batch size for SGD training.')
flags.DEFINE_integer('mx_num_epochs', 80,
'The maximal number of epochs to train.')
flags.DEFINE_enum('mx_device', GPU, [CPU, GPU],
'Device to use for computation: cpu or gpu')
flags.DEFINE_integer('mx_num_layers', None, 'Number of layers in the neural '
'network, required by some networks such as resnet')
flags.DEFINE_enum('mx_precision', 'float32', ['float16', 'float32'],
'Precision')
flags.DEFINE_enum('mx_key_value_store', 'device',
['local', 'device', 'nccl', 'dist_sync', 'dist_device_sync',
'dist_async'], 'Key-Value store types.')
flags.DEFINE_string('mx_image_shape', None,
'The image shape that feeds into the network.')
DEFAULT_BATCH_SIZE = 64
DEFAULT = 'default'
DEFAULT_BATCH_SIZES_BY_MODEL = {
'vgg': {
16: 32
},
'alexnet': {
DEFAULT: 512
},
'resnet': {
152: 32
}
}
DEFAULT_NUM_LAYERS_BY_MODEL = {
'vgg': 16,
'resnet': 50
}
INCEPTION3_IMAGE_SHAPE = '3,299,299'
MNIST_IMAGE_SHAPE = '1,28,28'
IMAGENET_IMAGE_SHAPE = '3,224,224'
DEFAULT_IMAGE_SHAPE_BY_MODEL = {
'inception-v3': INCEPTION3_IMAGE_SHAPE,
'inception-v4': INCEPTION3_IMAGE_SHAPE,
'inception-bn': IMAGENET_IMAGE_SHAPE,
'inception-resnet-v2': IMAGENET_IMAGE_SHAPE,
'alexnet': IMAGENET_IMAGE_SHAPE,
'googlenet': IMAGENET_IMAGE_SHAPE,
'mobilenet': IMAGENET_IMAGE_SHAPE,
'resnet-v1': IMAGENET_IMAGE_SHAPE,
'resnet': IMAGENET_IMAGE_SHAPE,
'resnext': IMAGENET_IMAGE_SHAPE,
'vgg': IMAGENET_IMAGE_SHAPE,
'lenet': MNIST_IMAGE_SHAPE
}
class MXParseOutputException(Exception):
pass
def GetConfig(user_config):
"""Load and return benchmark config.
Args:
user_config: user supplied configuration (flags and config file)
Returns:
loaded benchmark configuration
"""
return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
def _GetDefaultBatchSize(model, num_layers=None):
return DEFAULT_BATCH_SIZES_BY_MODEL.get(model, {}).get(num_layers or DEFAULT,
DEFAULT_BATCH_SIZE)
def _GetBatchSize(model, num_layers=None):
return FLAGS.mx_batch_size or _GetDefaultBatchSize(model, num_layers)
def _GetDefaultImageShape(model):
return DEFAULT_IMAGE_SHAPE_BY_MODEL.get(model, IMAGENET_IMAGE_SHAPE)
def _GetImageShape(model):
return FLAGS.mx_image_shape or _GetDefaultImageShape(model)
def _GetDefaultNumLayersByModel(model):
return DEFAULT_NUM_LAYERS_BY_MODEL.get(model)
def _GetNumLayers(model):
return FLAGS.mx_num_layers or _GetDefaultNumLayersByModel(model)
def _UpdateBenchmarkSpecWithFlags(benchmark_spec):
"""Update the benchmark_spec with supplied command line flags.
Args:
benchmark_spec: benchmark specification to update
"""
benchmark_spec.models = FLAGS.mx_models
benchmark_spec.batch_size = FLAGS.mx_batch_size
benchmark_spec.num_epochs = FLAGS.mx_num_epochs
benchmark_spec.device = FLAGS.mx_device
benchmark_spec.num_layers = FLAGS.mx_num_layers
benchmark_spec.precision = FLAGS.mx_precision
benchmark_spec.key_value_store = FLAGS.mx_key_value_store
def Prepare(benchmark_spec):
"""Install and set up MXNet on the target vm.
Args:
benchmark_spec: The benchmark specification
"""
_UpdateBenchmarkSpecWithFlags(benchmark_spec)
vm = benchmark_spec.vms[0]
vm.Install('mxnet')
vm.Install('mxnet_cnn')
benchmark_spec.mxnet_version = mxnet.GetMXNetVersion(vm)
def _CreateMetadataDict(benchmark_spec):
"""Create metadata dict to be used in run results.
Args:
benchmark_spec: benchmark spec
Returns:
metadata dict
"""
vm = benchmark_spec.vms[0]
metadata = {
'batch_size': benchmark_spec.batch_size,
'num_epochs': benchmark_spec.num_epochs,
'device': benchmark_spec.device,
'num_layers': benchmark_spec.num_layers,
'model': benchmark_spec.model,
'mxnet_version': benchmark_spec.mxnet_version,
'precision': benchmark_spec.precision,
'key_value_store': benchmark_spec.key_value_store,
'image_shape': benchmark_spec.image_shape,
'commit': mxnet_cnn.GetCommit(vm)
}
if benchmark_spec.device == GPU:
metadata.update(cuda_toolkit.GetMetadata(vm))
return metadata
def _ExtractThroughput(output):
"""Extract throughput from MXNet output.
Sample output:
INFO:root:Epoch[0] Batch [460-480] Speed: 50.42 samples/sec accuracy=1.000000
INFO:root:Epoch[0] Batch [480-500] Speed: 50.47 samples/sec accuracy=1.000000
INFO:root:Epoch[0] Train-accuracy=1.000000
INFO:root:Epoch[0] Time cost=634.243
Args:
output: MXNet output
Returns:
throughput (float)
"""
regex = r'Speed:\s+(\d+.\d+)'
match = re.findall(regex, output)
try:
return sum(float(step) for step in match) / len(match)
except:
raise MXParseOutputException('Unable to parse MXNet output')
def _MakeSamplesFromOutput(benchmark_spec, output):
"""Create a sample continaing the measured MXNet throughput.
Args:
benchmark_spec: benchmark spec
output: MXNet output
Returns:
a Sample containing the MXNet throughput in samples/sec
"""
metadata = _CreateMetadataDict(benchmark_spec)
mx_throughput = _ExtractThroughput(output)
return sample.Sample('Training synthetic data', mx_throughput,
'samples/sec', metadata)
def Run(benchmark_spec):
"""Run MXNet on the cluster for each model specified.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample objects.
"""
_UpdateBenchmarkSpecWithFlags(benchmark_spec)
vm = benchmark_spec.vms[0]
mx_benchmark_dir = 'incubator-mxnet/example/image-classification'
results = []
for model in FLAGS.mx_models:
num_layers = _GetNumLayers(model)
batch_size = _GetBatchSize(model, num_layers)
benchmark_spec.model = model
benchmark_spec.batch_size = batch_size
benchmark_spec.num_layers = num_layers
benchmark_spec.image_shape = _GetImageShape(model)
mx_benchmark_cmd = (
'python train_imagenet.py '
'--benchmark=1 '
'--network={network} '
'--batch-size={batch_size} '
'--image-shape={image_shape} '
'--num-epochs={num_epochs} '
'--dtype={precision} '
'--kv-store={key_value_store}').format(
network=model,
batch_size=batch_size,
image_shape=benchmark_spec.image_shape,
num_epochs=benchmark_spec.num_epochs,
precision=benchmark_spec.precision,
key_value_store=benchmark_spec.key_value_store)
if benchmark_spec.device == GPU:
num_gpus = nvidia_driver.QueryNumberOfGpus(vm)
mx_benchmark_cmd = '{env} {cmd} --gpus {gpus}'.format(
env=mxnet.GetEnvironmentVars(vm),
cmd=mx_benchmark_cmd,
gpus=','.join(str(n) for n in range(num_gpus)))
elif benchmark_spec.device == CPU:
# Specifies the number of threads to use in CPU test.
# https://mxnet.incubator.apache.org/faq/perf.html
mx_benchmark_cmd = 'OMP_NUM_THREADS={omp_num_threads} {cmd}'.format(
omp_num_threads=vm.NumCpusForBenchmark() // 2,
cmd=mx_benchmark_cmd)
if num_layers:
mx_benchmark_cmd = '%s --num-layers %s' % (mx_benchmark_cmd, num_layers)
run_command = 'cd %s && %s' % (mx_benchmark_dir,
mx_benchmark_cmd)
stdout, stderr = vm.RobustRemoteCommand(run_command, should_log=True)
results.append(_MakeSamplesFromOutput(benchmark_spec, stdout or stderr))
return results
def Cleanup(unused_benchmark_spec):
"""Cleanup MXNet on the cluster."""
pass
|
import json
import os
import threading
import unittest
from http.server import BaseHTTPRequestHandler, HTTPServer
from test.support import EnvironmentVarGuard
from urllib.parse import urlparse
from kaggle_web_client import (KaggleWebClient,
_KAGGLE_URL_BASE_ENV_VAR_NAME,
_KAGGLE_USER_SECRETS_TOKEN_ENV_VAR_NAME,
_KAGGLE_IAP_TOKEN_ENV_VAR_NAME,
CredentialError, BackendError)
from kaggle_datasets import KaggleDatasets, _KAGGLE_TPU_NAME_ENV_VAR_NAME
_TEST_JWT = 'test-secrets-key'
_TEST_IAP = 'IAP_TOKEN'
_TPU_GCS_BUCKET = 'gs://kds-tpu-ea1971a458ffd4cd51389e7574c022ecc0a82bb1b52ccef08c8a'
_AUTOML_GCS_BUCKET = 'gs://kds-automl-ea1971a458ffd4cd51389e7574c022ecc0a82bb1b52ccef08c8a'
class GcsDatasetsHTTPHandler(BaseHTTPRequestHandler):
def set_request(self):
raise NotImplementedError()
def get_response(self):
raise NotImplementedError()
def do_HEAD(s):
s.send_response(200)
def do_POST(s):
s.set_request()
s.send_response(200)
s.send_header("Content-type", "application/json")
s.end_headers()
s.wfile.write(json.dumps(s.get_response()).encode("utf-8"))
class TestDatasets(unittest.TestCase):
SERVER_ADDRESS = urlparse(os.getenv(_KAGGLE_URL_BASE_ENV_VAR_NAME, default="http://127.0.0.1:8001"))
def _test_client(self, client_func, expected_path, expected_body, is_tpu=True, success=True, iap_token=False):
_request = {}
class GetGcsPathHandler(GcsDatasetsHTTPHandler):
def set_request(self):
_request['path'] = self.path
content_len = int(self.headers.get('Content-Length'))
_request['body'] = json.loads(self.rfile.read(content_len))
_request['headers'] = self.headers
def get_response(self):
if success:
gcs_path = _TPU_GCS_BUCKET if is_tpu else _AUTOML_GCS_BUCKET
return {'result': {
'destinationBucket': gcs_path,
'destinationPath': None}, 'wasSuccessful': "true"}
else:
return {'wasSuccessful': "false"}
env = EnvironmentVarGuard()
env.set(_KAGGLE_USER_SECRETS_TOKEN_ENV_VAR_NAME, _TEST_JWT)
if is_tpu:
env.set(_KAGGLE_TPU_NAME_ENV_VAR_NAME, 'FAKE_TPU')
if iap_token:
env.set(_KAGGLE_IAP_TOKEN_ENV_VAR_NAME, _TEST_IAP)
with env:
with HTTPServer((self.SERVER_ADDRESS.hostname, self.SERVER_ADDRESS.port), GetGcsPathHandler) as httpd:
threading.Thread(target=httpd.serve_forever).start()
try:
client_func()
finally:
httpd.shutdown()
path, headers, body = _request['path'], _request['headers'], _request['body']
self.assertEqual(
path,
expected_path,
msg="Fake server did not receive the right request from the KaggleDatasets client.")
self.assertEqual(
body,
expected_body,
msg="Fake server did not receive the right body from the KaggleDatasets client.")
self.assertIn('Content-Type', headers.keys(),
msg="Fake server did not receive a Content-Type header from the KaggleDatasets client.")
self.assertEqual('application/json', headers.get('Content-Type'),
msg="Fake server did not receive an application/json content type header from the KaggleDatasets client.")
self.assertIn('X-Kaggle-Authorization', headers.keys(),
msg="Fake server did not receive an X-Kaggle-Authorization header from the KaggleDatasets client.")
if iap_token:
self.assertEqual(f'Bearer {_TEST_IAP}', headers.get('Authorization'),
msg="Fake server did not receive an Authorization header from the KaggleDatasets client.")
else:
self.assertNotIn('Authorization', headers.keys(),
msg="Fake server received an Authorization header from the KaggleDatasets client. It shouldn't.")
self.assertEqual(f'Bearer {_TEST_JWT}', headers.get('X-Kaggle-Authorization'),
msg="Fake server did not receive the right X-Kaggle-Authorization header from the KaggleDatasets client.")
def test_no_token_fails(self):
env = EnvironmentVarGuard()
env.unset(_KAGGLE_USER_SECRETS_TOKEN_ENV_VAR_NAME)
with env:
with self.assertRaises(CredentialError):
client = KaggleDatasets()
def test_get_gcs_path_tpu_succeeds(self):
def call_get_gcs_path():
client = KaggleDatasets()
gcs_path = client.get_gcs_path()
self.assertEqual(gcs_path, _TPU_GCS_BUCKET)
self._test_client(call_get_gcs_path,
'/requests/CopyDatasetVersionToKnownGcsBucketRequest',
{'MountSlug': None, 'IntegrationType': 2},
is_tpu=True)
def test_get_gcs_path_automl_succeeds(self):
def call_get_gcs_path():
client = KaggleDatasets()
gcs_path = client.get_gcs_path()
self.assertEqual(gcs_path, _AUTOML_GCS_BUCKET)
self._test_client(call_get_gcs_path,
'/requests/CopyDatasetVersionToKnownGcsBucketRequest',
{'MountSlug': None, 'IntegrationType': 1},
is_tpu=False)
def test_get_gcs_path_handles_unsuccessful(self):
def call_get_gcs_path():
client = KaggleDatasets()
with self.assertRaises(BackendError):
gcs_path = client.get_gcs_path()
self._test_client(call_get_gcs_path,
'/requests/CopyDatasetVersionToKnownGcsBucketRequest',
{'MountSlug': None, 'IntegrationType': 2},
is_tpu=True,
success=False)
def test_iap_token(self):
def call_get_gcs_path():
client = KaggleDatasets()
gcs_path = client.get_gcs_path()
self._test_client(call_get_gcs_path,
'/requests/CopyDatasetVersionToKnownGcsBucketRequest',
{'MountSlug': None, 'IntegrationType': 1},
is_tpu=False, iap_token=True)
|
import voluptuous as vol
from homeassistant.components.binary_sensor import PLATFORM_SCHEMA, BinarySensorEntity
from homeassistant.const import DEVICE_DEFAULT_NAME
import homeassistant.helpers.config_validation as cv
from . import CONF_PORTS, DATA_GC100
_SENSORS_SCHEMA = vol.Schema({cv.string: cv.string})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_PORTS): vol.All(cv.ensure_list, [_SENSORS_SCHEMA])}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the GC100 devices."""
binary_sensors = []
ports = config.get(CONF_PORTS)
for port in ports:
for port_addr, port_name in port.items():
binary_sensors.append(
GC100BinarySensor(port_name, port_addr, hass.data[DATA_GC100])
)
add_entities(binary_sensors, True)
class GC100BinarySensor(BinarySensorEntity):
"""Representation of a binary sensor from GC100."""
def __init__(self, name, port_addr, gc100):
"""Initialize the GC100 binary sensor."""
self._name = name or DEVICE_DEFAULT_NAME
self._port_addr = port_addr
self._gc100 = gc100
self._state = None
# Subscribe to be notified about state changes (PUSH)
self._gc100.subscribe(self._port_addr, self.set_state)
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def is_on(self):
"""Return the state of the entity."""
return self._state
def update(self):
"""Update the sensor state."""
self._gc100.read_sensor(self._port_addr, self.set_state)
def set_state(self, state):
"""Set the current state."""
self._state = state == 1
self.schedule_update_ha_state()
|
import logging
import pytest
import yaml
from PyQt5.QtCore import QUrl, QPoint, QByteArray, QObject
QWebView = pytest.importorskip('PyQt5.QtWebKitWidgets').QWebView
from qutebrowser.misc import sessions
from qutebrowser.misc.sessions import TabHistoryItem as Item
from qutebrowser.utils import objreg, qtutils
from qutebrowser.browser.webkit import tabhistory
pytestmark = pytest.mark.qt_log_ignore('QIODevice::read.*: device not open')
webengine_refactoring_xfail = pytest.mark.xfail(
True, reason='Broke during QtWebEngine refactoring, will be fixed after '
'sessions are refactored too.')
@pytest.fixture
def sess_man(tmp_path):
"""Fixture providing a SessionManager."""
return sessions.SessionManager(base_path=str(tmp_path))
class TestInit:
@pytest.fixture(autouse=True)
def cleanup(self, monkeypatch):
monkeypatch.setattr(sessions, 'session_manager', None)
yield
try:
objreg.delete('session-manager')
except KeyError:
pass
@pytest.mark.parametrize('create_dir', [True, False])
def test_with_standarddir(self, tmp_path, monkeypatch, create_dir):
monkeypatch.setattr(sessions.standarddir, 'data',
lambda: str(tmp_path))
session_dir = tmp_path / 'sessions'
if create_dir:
session_dir.mkdir()
sessions.init()
assert session_dir.exists()
assert sessions.session_manager._base_path == str(session_dir)
def test_did_not_load(sess_man):
assert not sess_man.did_load
class TestExists:
@pytest.mark.parametrize('absolute', [True, False])
def test_existent(self, tmp_path, absolute):
session_dir = tmp_path / 'sessions'
abs_session = tmp_path / 'foo.yml'
rel_session = session_dir / 'foo.yml'
session_dir.mkdir()
abs_session.touch()
rel_session.touch()
man = sessions.SessionManager(str(session_dir))
if absolute:
name = str(abs_session)
else:
name = 'foo'
assert man.exists(name)
@pytest.mark.parametrize('absolute', [True, False])
def test_inexistent(self, tmp_path, absolute):
man = sessions.SessionManager(str(tmp_path))
if absolute:
name = str(tmp_path / 'foo')
else:
name = 'foo'
assert not man.exists(name)
@webengine_refactoring_xfail
class TestSaveTab:
@pytest.mark.parametrize('is_active', [True, False])
def test_active(self, sess_man, webview, is_active):
data = sess_man._save_tab(webview, is_active)
if is_active:
assert data['active']
else:
assert 'active' not in data
def test_no_history(self, sess_man, webview):
data = sess_man._save_tab(webview, active=False)
assert not data['history']
class FakeMainWindow(QObject):
"""Helper class for the fake_main_window fixture.
A fake MainWindow which provides a saveGeometry method.
Needs to be a QObject so sip.isdeleted works.
"""
def __init__(self, geometry, win_id, parent=None):
super().__init__(parent)
self._geometry = QByteArray(geometry)
self.win_id = win_id
def saveGeometry(self):
return self._geometry
@pytest.fixture
def fake_window(tabbed_browser_stubs):
"""Fixture which provides a fake main windows with a tabbedbrowser."""
win0 = FakeMainWindow(b'fake-geometry-0', win_id=0)
objreg.register('main-window', win0, scope='window', window=0)
yield
objreg.delete('main-window', scope='window', window=0)
class TestSaveAll:
def test_no_history(self, sess_man):
# FIXME can this ever actually happen?
assert not objreg.window_registry
data = sess_man._save_all()
assert not data['windows']
@webengine_refactoring_xfail
def test_no_active_window(self, sess_man, fake_window, stubs,
monkeypatch):
qapp = stubs.FakeQApplication(active_window=None)
monkeypatch.setattr(sessions, 'QApplication', qapp)
sess_man._save_all()
@pytest.mark.parametrize('arg, config, current, expected', [
('foo', None, None, 'foo'),
(sessions.default, 'foo', None, 'foo'),
(sessions.default, None, 'foo', 'foo'),
(sessions.default, None, None, 'default'),
])
def test_get_session_name(config_stub, sess_man, arg, config, current,
expected):
config_stub.val.session.default_name = config
sess_man.current = current
assert sess_man._get_session_name(arg) == expected
class TestSave:
@pytest.fixture
def fake_history(self, stubs, tabbed_browser_stubs, monkeypatch, webview):
"""Fixture which provides a window with a fake history."""
win = FakeMainWindow(b'fake-geometry-0', win_id=0)
objreg.register('main-window', win, scope='window', window=0)
browser = tabbed_browser_stubs[0]
qapp = stubs.FakeQApplication(active_window=win)
monkeypatch.setattr(sessions, 'QApplication', qapp)
def set_data(items):
history = browser.widgets()[0].page().history()
stream, _data, user_data = tabhistory.serialize(items)
qtutils.deserialize_stream(stream, history)
for i, data in enumerate(user_data):
history.itemAt(i).setUserData(data)
yield set_data
objreg.delete('main-window', scope='window', window=0)
objreg.delete('tabbed-browser', scope='window', window=0)
def test_no_state_config(self, sess_man, tmp_path, state_config):
session_path = tmp_path / 'foo.yml'
sess_man.save(str(session_path))
assert 'session' not in state_config['general']
def test_last_window_session_none(self, caplog, sess_man, tmp_path):
session_path = tmp_path / 'foo.yml'
with caplog.at_level(logging.ERROR):
sess_man.save(str(session_path), last_window=True)
msg = "last_window_session is None while saving!"
assert caplog.messages == [msg]
assert not session_path.exists()
def test_last_window_session(self, sess_man, tmp_path):
sess_man.save_last_window_session()
session_path = tmp_path / 'foo.yml'
sess_man.save(str(session_path), last_window=True)
data = session_path.read_text('utf-8')
assert data == 'windows: []\n'
@pytest.mark.parametrize('exception', [
OSError('foo'), UnicodeEncodeError('ascii', '', 0, 2, 'foo'),
yaml.YAMLError('foo')])
def test_fake_exception(self, mocker, sess_man, tmp_path, exception):
mocker.patch('qutebrowser.misc.sessions.yaml.dump',
side_effect=exception)
with pytest.raises(sessions.SessionError, match=str(exception)):
sess_man.save(str(tmp_path / 'foo.yml'))
assert not list(tmp_path.glob('*'))
def test_load_next_time(self, tmp_path, state_config, sess_man):
session_path = tmp_path / 'foo.yml'
sess_man.save(str(session_path), load_next_time=True)
assert state_config['general']['session'] == str(session_path)
@webengine_refactoring_xfail
def test_utf_8_invalid(self, tmp_path, sess_man, fake_history):
"""Make sure data containing invalid UTF8 raises SessionError."""
session_path = tmp_path / 'foo.yml'
fake_history([Item(QUrl('http://www.qutebrowser.org/'), '\ud800',
active=True)])
try:
sess_man.save(str(session_path))
except sessions.SessionError:
# This seems to happen on some systems only?!
pass
else:
data = session_path.read_text('utf-8')
assert r'title: "\uD800"' in data
def _set_data(self, browser, tab_id, items):
"""Helper function for test_long_output."""
history = browser.widgets()[tab_id].page().history()
stream, _data, user_data = tabhistory.serialize(items)
qtutils.deserialize_stream(stream, history)
for i, data in enumerate(user_data):
history.itemAt(i).setUserData(data)
class FakeWebView:
"""A QWebView fake which provides a "page" with a load_history method.
Attributes:
loaded_history: The history which has been loaded by load_history, or
None.
raise_error: The exception to raise on load_history, or None.
"""
def __init__(self):
self.loaded_history = None
self.raise_error = None
def page(self):
return self
def load_history(self, data):
self.loaded_history = data
if self.raise_error is not None:
raise self.raise_error # pylint: disable=raising-bad-type
@pytest.fixture
def fake_webview():
return FakeWebView()
@webengine_refactoring_xfail
class TestLoadTab:
def test_no_history(self, sess_man, fake_webview):
sess_man._load_tab(fake_webview, {'history': []})
assert fake_webview.loaded_history == []
def test_load_fail(self, sess_man, fake_webview):
fake_webview.raise_error = ValueError
with pytest.raises(sessions.SessionError):
sess_man._load_tab(fake_webview, {'history': []})
@pytest.mark.parametrize('key, val, expected', [
('zoom', 1.23, 1.23),
('scroll-pos', {'x': 23, 'y': 42}, QPoint(23, 42)),
])
@pytest.mark.parametrize('in_main_data', [True, False])
def test_user_data(self, sess_man, fake_webview, key, val, expected,
in_main_data):
item = {'url': 'http://www.example.com/', 'title': 'foo'}
if in_main_data:
# This information got saved in the main data instead of saving it
# per item - make sure the old format can still be read
# https://github.com/qutebrowser/qutebrowser/issues/728
d = {'history': [item], key: val}
else:
item[key] = val
d = {'history': [item]}
sess_man._load_tab(fake_webview, d)
assert len(fake_webview.loaded_history) == 1
assert fake_webview.loaded_history[0].user_data[key] == expected
@pytest.mark.parametrize('original_url', ['http://example.org/', None])
def test_urls(self, sess_man, fake_webview, original_url):
url = 'http://www.example.com/'
item = {'url': url, 'title': 'foo'}
if original_url is None:
expected = QUrl(url)
else:
item['original-url'] = original_url
expected = QUrl(original_url)
d = {'history': [item]}
sess_man._load_tab(fake_webview, d)
assert len(fake_webview.loaded_history) == 1
loaded_item = fake_webview.loaded_history[0]
assert loaded_item.url == QUrl(url)
assert loaded_item.original_url == expected
class TestListSessions:
def test_no_sessions(self, tmp_path):
sess_man = sessions.SessionManager(str(tmp_path))
assert not sess_man.list_sessions()
def test_with_sessions(self, tmp_path):
(tmp_path / 'foo.yml').touch()
(tmp_path / 'bar.yml').touch()
sess_man = sessions.SessionManager(str(tmp_path))
assert sess_man.list_sessions() == ['bar', 'foo']
def test_with_other_files(self, tmp_path):
(tmp_path / 'foo.yml').touch()
(tmp_path / 'bar.html').touch()
sess_man = sessions.SessionManager(str(tmp_path))
assert sess_man.list_sessions() == ['foo']
|
import os
from kaggle_web_client import KaggleWebClient
_KAGGLE_TPU_NAME_ENV_VAR_NAME = 'TPU_NAME'
class KaggleDatasets:
GET_GCS_PATH_ENDPOINT = '/requests/CopyDatasetVersionToKnownGcsBucketRequest'
TIMEOUT_SECS = 600
# Integration types for GCS
AUTO_ML = 1
TPU = 2
def __init__(self):
self.web_client = KaggleWebClient()
self.has_tpu = os.getenv(_KAGGLE_TPU_NAME_ENV_VAR_NAME) is not None
def get_gcs_path(self, dataset_dir: str = None) -> str:
integration_type = self.TPU if self.has_tpu else self.AUTO_ML
data = {
'MountSlug': dataset_dir,
'IntegrationType': integration_type,
}
result = self.web_client.make_post_request(data, self.GET_GCS_PATH_ENDPOINT, self.TIMEOUT_SECS)
return result['destinationBucket']
|
from .unit import Unit
from .digit_removal import DigitRemoval
from .fixed_length import FixedLength
from .frequency_filter import FrequencyFilter
from .lemmatization import Lemmatization
from .lowercase import Lowercase
from .matching_histogram import MatchingHistogram
from .ngram_letter import NgramLetter
from .punc_removal import PuncRemoval
from .stateful_unit import StatefulUnit
from .stemming import Stemming
from .stop_removal import StopRemoval
from .tokenize import Tokenize
from .vocabulary import Vocabulary
from .word_hashing import WordHashing
from .character_index import CharacterIndex
from .word_exact_match import WordExactMatch
from .bert_clean import BertClean
from .bert_clean import StripAccent
from .tokenize import ChineseTokenize
from .tokenize import BasicTokenize
from .tokenize import WordPieceTokenize
from .vocabulary import BertVocabulary
def list_available() -> list:
from matchzoo.utils import list_recursive_concrete_subclasses
return list_recursive_concrete_subclasses(Unit)
|
from django.contrib import admin
from django.urls import NoReverseMatch
from django.utils.html import format_html
from django.utils.translation import gettext_lazy as _
from zinnia.admin.forms import CategoryAdminForm
class CategoryAdmin(admin.ModelAdmin):
"""
Admin for Category model.
"""
form = CategoryAdminForm
fields = ('title', 'parent', 'description', 'slug')
list_display = ('title', 'slug', 'get_tree_path', 'description')
sortable_by = ('title', 'slug')
prepopulated_fields = {'slug': ('title', )}
search_fields = ('title', 'description')
list_filter = ('parent',)
def __init__(self, model, admin_site):
self.form.admin_site = admin_site
super(CategoryAdmin, self).__init__(model, admin_site)
def get_tree_path(self, category):
"""
Return the category's tree path in HTML.
"""
try:
return format_html(
'<a href="{}" target="blank">/{}/</a>',
category.get_absolute_url(), category.tree_path)
except NoReverseMatch:
return '/%s/' % category.tree_path
get_tree_path.short_description = _('tree path')
|
from behave import then
@then('it should have a return code of "{code:d}"')
def see_expected_return_code(context, code):
print(context.output)
print(context.return_code)
print()
assert context.return_code == code
@then('the output should contain "{output_string}"')
def output_contains(context, output_string):
print(output_string)
assert output_string in context.output
|
from datetime import timedelta
import pytest
import homeassistant.components.automation as automation
from homeassistant.components.light import DOMAIN
from homeassistant.const import CONF_PLATFORM, STATE_OFF, STATE_ON
from homeassistant.helpers import device_registry
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.common import (
MockConfigEntry,
async_fire_time_changed,
async_get_device_automation_capabilities,
async_get_device_automations,
async_mock_service,
mock_device_registry,
mock_registry,
)
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
@pytest.fixture
def calls(hass):
"""Track calls to a mock service."""
return async_mock_service(hass, "test", "automation")
async def test_get_triggers(hass, device_reg, entity_reg):
"""Test we get the expected triggers from a light."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
expected_triggers = [
{
"platform": "device",
"domain": DOMAIN,
"type": "turned_off",
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
},
{
"platform": "device",
"domain": DOMAIN,
"type": "turned_on",
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
},
]
triggers = await async_get_device_automations(hass, "trigger", device_entry.id)
assert triggers == expected_triggers
async def test_get_trigger_capabilities(hass, device_reg, entity_reg):
"""Test we get the expected capabilities from a light trigger."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
expected_capabilities = {
"extra_fields": [
{"name": "for", "optional": True, "type": "positive_time_period_dict"}
]
}
triggers = await async_get_device_automations(hass, "trigger", device_entry.id)
for trigger in triggers:
capabilities = await async_get_device_automation_capabilities(
hass, "trigger", trigger
)
assert capabilities == expected_capabilities
async def test_if_fires_on_state_change(hass, calls):
"""Test for turn_on and turn_off triggers firing."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
await hass.async_block_till_done()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
ent1, ent2, ent3 = platform.ENTITIES
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {
"platform": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": ent1.entity_id,
"type": "turned_on",
},
"action": {
"service": "test.automation",
"data_template": {
"some": "turn_on {{ trigger.%s }}"
% "}} - {{ trigger.".join(
(
"platform",
"entity_id",
"from_state.state",
"to_state.state",
"for",
)
)
},
},
},
{
"trigger": {
"platform": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": ent1.entity_id,
"type": "turned_off",
},
"action": {
"service": "test.automation",
"data_template": {
"some": "turn_off {{ trigger.%s }}"
% "}} - {{ trigger.".join(
(
"platform",
"entity_id",
"from_state.state",
"to_state.state",
"for",
)
)
},
},
},
]
},
)
await hass.async_block_till_done()
assert hass.states.get(ent1.entity_id).state == STATE_ON
assert len(calls) == 0
hass.states.async_set(ent1.entity_id, STATE_OFF)
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == "turn_off device - {} - on - off - None".format(
ent1.entity_id
)
hass.states.async_set(ent1.entity_id, STATE_ON)
await hass.async_block_till_done()
assert len(calls) == 2
assert calls[1].data["some"] == "turn_on device - {} - off - on - None".format(
ent1.entity_id
)
async def test_if_fires_on_state_change_with_for(hass, calls):
"""Test for triggers firing with delay."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
await hass.async_block_till_done()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
ent1, ent2, ent3 = platform.ENTITIES
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {
"platform": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": ent1.entity_id,
"type": "turned_off",
"for": {"seconds": 5},
},
"action": {
"service": "test.automation",
"data_template": {
"some": "turn_off {{ trigger.%s }}"
% "}} - {{ trigger.".join(
(
"platform",
"entity_id",
"from_state.state",
"to_state.state",
"for",
)
)
},
},
}
]
},
)
await hass.async_block_till_done()
assert hass.states.get(ent1.entity_id).state == STATE_ON
assert len(calls) == 0
hass.states.async_set(ent1.entity_id, STATE_OFF)
await hass.async_block_till_done()
assert len(calls) == 0
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10))
await hass.async_block_till_done()
assert len(calls) == 1
await hass.async_block_till_done()
assert calls[0].data["some"] == "turn_off device - {} - on - off - 0:00:05".format(
ent1.entity_id
)
|
from datetime import timedelta
import logging
from async_timeout import timeout
from canary.api import Api
from requests import ConnectTimeout, HTTPError
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
class CanaryDataUpdateCoordinator(DataUpdateCoordinator):
"""Class to manage fetching Canary data."""
def __init__(self, hass: HomeAssistantType, *, api: Api):
"""Initialize global Canary data updater."""
self.canary = api
update_interval = timedelta(seconds=30)
super().__init__(
hass,
_LOGGER,
name=DOMAIN,
update_interval=update_interval,
)
def _update_data(self) -> dict:
"""Fetch data from Canary via sync functions."""
locations_by_id = {}
readings_by_device_id = {}
for location in self.canary.get_locations():
location_id = location.location_id
locations_by_id[location_id] = location
for device in location.devices:
if device.is_online:
readings_by_device_id[
device.device_id
] = self.canary.get_latest_readings(device.device_id)
return {
"locations": locations_by_id,
"readings": readings_by_device_id,
}
async def _async_update_data(self) -> dict:
"""Fetch data from Canary."""
try:
async with timeout(15):
return await self.hass.async_add_executor_job(self._update_data)
except (ConnectTimeout, HTTPError) as error:
raise UpdateFailed(f"Invalid response from API: {error}") from error
|
import pytest
import matchzoo as mz
@pytest.mark.cron
def test_load_data():
train_data = mz.datasets.wiki_qa.load_data('train', task='ranking')
assert len(train_data) == 20360
train_data, _ = mz.datasets.wiki_qa.load_data('train',
task='classification',
return_classes=True)
assert len(train_data) == 20360
dev_data = mz.datasets.wiki_qa.load_data('dev', task='ranking',
filtered=False)
assert len(dev_data) == 2733
dev_data, tag = mz.datasets.wiki_qa.load_data('dev', task='classification',
filtered=True,
return_classes=True)
assert len(dev_data) == 1126
assert tag == [False, True]
test_data = mz.datasets.wiki_qa.load_data('test', task='ranking',
filtered=False)
assert len(test_data) == 6165
test_data, tag = mz.datasets.wiki_qa.load_data('test',
task='classification',
filtered=True,
return_classes=True)
assert len(test_data) == 2341
assert tag == [False, True]
@pytest.mark.cron
def test_load_snli():
train_data, classes = mz.datasets.snli.load_data('train',
'classification',
return_classes=True)
num_samples = 550146
assert len(train_data) == num_samples
x, y = train_data.unpack()
assert len(x['text_left']) == num_samples
assert len(x['text_right']) == num_samples
assert y.shape == (num_samples, 4)
assert classes == ['entailment', 'contradiction', 'neutral', '-']
dev_data, classes = mz.datasets.snli.load_data('dev', 'classification',
return_classes=True)
assert len(dev_data) == 10000
assert classes == ['entailment', 'contradiction', 'neutral', '-']
test_data, classes = mz.datasets.snli.load_data('test', 'classification',
return_classes=True)
assert len(test_data) == 10000
assert classes == ['entailment', 'contradiction', 'neutral', '-']
train_data = mz.datasets.snli.load_data('train', 'ranking')
x, y = train_data.unpack()
assert len(x['text_left']) == num_samples
assert len(x['text_right']) == num_samples
assert y.shape == (num_samples, 1)
@pytest.mark.cron
def test_load_quora_qp():
train_data = mz.datasets.quora_qp.load_data(task='classification')
assert len(train_data) == 363177
dev_data, tag = mz.datasets.quora_qp.load_data(
'dev',
task='classification',
return_classes=True)
assert tag == [False, True]
assert len(dev_data) == 40371
x, y = dev_data.unpack()
assert len(x['text_left']) == 40371
assert len(x['text_right']) == 40371
assert y.shape == (40371, 2)
test_data = mz.datasets.quora_qp.load_data('test')
assert len(test_data) == 390965
dev_data = mz.datasets.quora_qp.load_data('dev', 'ranking')
x, y = dev_data.unpack()
assert y.shape == (40371, 1)
@pytest.mark.cron
def test_load_cqa_ql_16():
# test load question pairs
train_data = mz.datasets.cqa_ql_16.load_data(task='classification')
assert len(train_data) == 3998
dev_data, tag = mz.datasets.cqa_ql_16.load_data(
'dev',
task='classification',
return_classes=True)
assert tag == ['PerfectMatch', 'Relevant', 'Irrelevant']
assert len(dev_data) == 500
x, y = dev_data.unpack()
assert y.shape == (500, 3)
test_data = mz.datasets.cqa_ql_16.load_data('test')
assert len(test_data) == 700
# test load answer pairs
train_data = mz.datasets.cqa_ql_16.load_data(match_type='answer')
assert len(train_data) == 39980
test_data = mz.datasets.cqa_ql_16.load_data(stage='test', match_type='answer')
assert len(test_data) == 7000
# test load external answer pairs
train_data = mz.datasets.cqa_ql_16.load_data(match_type='external_answer')
assert len(train_data) == 39980
# test load rank data
train_data = mz.datasets.cqa_ql_16.load_data(task='ranking')
x, y = train_data.unpack()
assert y.shape == (3998, 1)
dev_data = mz.datasets.cqa_ql_16.load_data('dev', task='ranking', match_type='answer', target_label='Good')
x, y = dev_data.unpack()
assert y.shape == (5000, 1)
|
import datetime
import logging
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP,
ATTR_RGB_COLOR,
ATTR_TRANSITION,
ATTR_WHITE_VALUE,
ATTR_XY_COLOR,
DOMAIN as LIGHT_DOMAIN,
VALID_TRANSITION,
is_on,
)
from homeassistant.components.switch import DOMAIN, SwitchEntity
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_LIGHTS,
CONF_MODE,
CONF_NAME,
CONF_PLATFORM,
SERVICE_TURN_ON,
STATE_ON,
SUN_EVENT_SUNRISE,
SUN_EVENT_SUNSET,
)
from homeassistant.helpers import config_validation as cv, event
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.sun import get_astral_event_date
from homeassistant.util import slugify
from homeassistant.util.color import (
color_RGB_to_xy_brightness,
color_temperature_kelvin_to_mired,
color_temperature_to_rgb,
)
from homeassistant.util.dt import as_local, utcnow as dt_utcnow
_LOGGER = logging.getLogger(__name__)
CONF_START_TIME = "start_time"
CONF_STOP_TIME = "stop_time"
CONF_START_CT = "start_colortemp"
CONF_SUNSET_CT = "sunset_colortemp"
CONF_STOP_CT = "stop_colortemp"
CONF_BRIGHTNESS = "brightness"
CONF_DISABLE_BRIGHTNESS_ADJUST = "disable_brightness_adjust"
CONF_INTERVAL = "interval"
MODE_XY = "xy"
MODE_MIRED = "mired"
MODE_RGB = "rgb"
DEFAULT_MODE = MODE_XY
PLATFORM_SCHEMA = vol.Schema(
{
vol.Required(CONF_PLATFORM): "flux",
vol.Required(CONF_LIGHTS): cv.entity_ids,
vol.Optional(CONF_NAME, default="Flux"): cv.string,
vol.Optional(CONF_START_TIME): cv.time,
vol.Optional(CONF_STOP_TIME): cv.time,
vol.Optional(CONF_START_CT, default=4000): vol.All(
vol.Coerce(int), vol.Range(min=1000, max=40000)
),
vol.Optional(CONF_SUNSET_CT, default=3000): vol.All(
vol.Coerce(int), vol.Range(min=1000, max=40000)
),
vol.Optional(CONF_STOP_CT, default=1900): vol.All(
vol.Coerce(int), vol.Range(min=1000, max=40000)
),
vol.Optional(CONF_BRIGHTNESS): vol.All(
vol.Coerce(int), vol.Range(min=0, max=255)
),
vol.Optional(CONF_DISABLE_BRIGHTNESS_ADJUST): cv.boolean,
vol.Optional(CONF_MODE, default=DEFAULT_MODE): vol.Any(
MODE_XY, MODE_MIRED, MODE_RGB
),
vol.Optional(CONF_INTERVAL, default=30): cv.positive_int,
vol.Optional(ATTR_TRANSITION, default=30): VALID_TRANSITION,
}
)
async def async_set_lights_xy(hass, lights, x_val, y_val, brightness, transition):
"""Set color of array of lights."""
for light in lights:
if is_on(hass, light):
service_data = {ATTR_ENTITY_ID: light}
if x_val is not None and y_val is not None:
service_data[ATTR_XY_COLOR] = [x_val, y_val]
if brightness is not None:
service_data[ATTR_BRIGHTNESS] = brightness
service_data[ATTR_WHITE_VALUE] = brightness
if transition is not None:
service_data[ATTR_TRANSITION] = transition
await hass.services.async_call(LIGHT_DOMAIN, SERVICE_TURN_ON, service_data)
async def async_set_lights_temp(hass, lights, mired, brightness, transition):
"""Set color of array of lights."""
for light in lights:
if is_on(hass, light):
service_data = {ATTR_ENTITY_ID: light}
if mired is not None:
service_data[ATTR_COLOR_TEMP] = int(mired)
if brightness is not None:
service_data[ATTR_BRIGHTNESS] = brightness
if transition is not None:
service_data[ATTR_TRANSITION] = transition
await hass.services.async_call(LIGHT_DOMAIN, SERVICE_TURN_ON, service_data)
async def async_set_lights_rgb(hass, lights, rgb, transition):
"""Set color of array of lights."""
for light in lights:
if is_on(hass, light):
service_data = {ATTR_ENTITY_ID: light}
if rgb is not None:
service_data[ATTR_RGB_COLOR] = rgb
if transition is not None:
service_data[ATTR_TRANSITION] = transition
await hass.services.async_call(LIGHT_DOMAIN, SERVICE_TURN_ON, service_data)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Flux switches."""
name = config.get(CONF_NAME)
lights = config.get(CONF_LIGHTS)
start_time = config.get(CONF_START_TIME)
stop_time = config.get(CONF_STOP_TIME)
start_colortemp = config.get(CONF_START_CT)
sunset_colortemp = config.get(CONF_SUNSET_CT)
stop_colortemp = config.get(CONF_STOP_CT)
brightness = config.get(CONF_BRIGHTNESS)
disable_brightness_adjust = config.get(CONF_DISABLE_BRIGHTNESS_ADJUST)
mode = config.get(CONF_MODE)
interval = config.get(CONF_INTERVAL)
transition = config.get(ATTR_TRANSITION)
flux = FluxSwitch(
name,
hass,
lights,
start_time,
stop_time,
start_colortemp,
sunset_colortemp,
stop_colortemp,
brightness,
disable_brightness_adjust,
mode,
interval,
transition,
)
async_add_entities([flux])
async def async_update(call=None):
"""Update lights."""
await flux.async_flux_update()
service_name = slugify(f"{name} update")
hass.services.async_register(DOMAIN, service_name, async_update)
class FluxSwitch(SwitchEntity, RestoreEntity):
"""Representation of a Flux switch."""
def __init__(
self,
name,
hass,
lights,
start_time,
stop_time,
start_colortemp,
sunset_colortemp,
stop_colortemp,
brightness,
disable_brightness_adjust,
mode,
interval,
transition,
):
"""Initialize the Flux switch."""
self._name = name
self.hass = hass
self._lights = lights
self._start_time = start_time
self._stop_time = stop_time
self._start_colortemp = start_colortemp
self._sunset_colortemp = sunset_colortemp
self._stop_colortemp = stop_colortemp
self._brightness = brightness
self._disable_brightness_adjust = disable_brightness_adjust
self._mode = mode
self._interval = interval
self._transition = transition
self.unsub_tracker = None
@property
def name(self):
"""Return the name of the device if any."""
return self._name
@property
def is_on(self):
"""Return true if switch is on."""
return self.unsub_tracker is not None
async def async_added_to_hass(self):
"""Call when entity about to be added to hass."""
last_state = await self.async_get_last_state()
if last_state and last_state.state == STATE_ON:
await self.async_turn_on()
async def async_turn_on(self, **kwargs):
"""Turn on flux."""
if self.is_on:
return
self.unsub_tracker = event.async_track_time_interval(
self.hass,
self.async_flux_update,
datetime.timedelta(seconds=self._interval),
)
# Make initial update
await self.async_flux_update()
self.async_write_ha_state()
async def async_turn_off(self, **kwargs):
"""Turn off flux."""
if self.is_on:
self.unsub_tracker()
self.unsub_tracker = None
self.async_write_ha_state()
async def async_flux_update(self, utcnow=None):
"""Update all the lights using flux."""
if utcnow is None:
utcnow = dt_utcnow()
now = as_local(utcnow)
sunset = get_astral_event_date(self.hass, SUN_EVENT_SUNSET, now.date())
start_time = self.find_start_time(now)
stop_time = self.find_stop_time(now)
if stop_time <= start_time:
# stop_time does not happen in the same day as start_time
if start_time < now:
# stop time is tomorrow
stop_time += datetime.timedelta(days=1)
elif now < start_time:
# stop_time was yesterday since the new start_time is not reached
stop_time -= datetime.timedelta(days=1)
if start_time < now < sunset:
# Daytime
time_state = "day"
temp_range = abs(self._start_colortemp - self._sunset_colortemp)
day_length = int(sunset.timestamp() - start_time.timestamp())
seconds_from_start = int(now.timestamp() - start_time.timestamp())
percentage_complete = seconds_from_start / day_length
temp_offset = temp_range * percentage_complete
if self._start_colortemp > self._sunset_colortemp:
temp = self._start_colortemp - temp_offset
else:
temp = self._start_colortemp + temp_offset
else:
# Night time
time_state = "night"
if now < stop_time:
if stop_time < start_time and stop_time.day == sunset.day:
# we need to use yesterday's sunset time
sunset_time = sunset - datetime.timedelta(days=1)
else:
sunset_time = sunset
night_length = int(stop_time.timestamp() - sunset_time.timestamp())
seconds_from_sunset = int(now.timestamp() - sunset_time.timestamp())
percentage_complete = seconds_from_sunset / night_length
else:
percentage_complete = 1
temp_range = abs(self._sunset_colortemp - self._stop_colortemp)
temp_offset = temp_range * percentage_complete
if self._sunset_colortemp > self._stop_colortemp:
temp = self._sunset_colortemp - temp_offset
else:
temp = self._sunset_colortemp + temp_offset
rgb = color_temperature_to_rgb(temp)
x_val, y_val, b_val = color_RGB_to_xy_brightness(*rgb)
brightness = self._brightness if self._brightness else b_val
if self._disable_brightness_adjust:
brightness = None
if self._mode == MODE_XY:
await async_set_lights_xy(
self.hass, self._lights, x_val, y_val, brightness, self._transition
)
_LOGGER.debug(
"Lights updated to x:%s y:%s brightness:%s, %s%% "
"of %s cycle complete at %s",
x_val,
y_val,
brightness,
round(percentage_complete * 100),
time_state,
now,
)
elif self._mode == MODE_RGB:
await async_set_lights_rgb(self.hass, self._lights, rgb, self._transition)
_LOGGER.debug(
"Lights updated to rgb:%s, %s%% of %s cycle complete at %s",
rgb,
round(percentage_complete * 100),
time_state,
now,
)
else:
# Convert to mired and clamp to allowed values
mired = color_temperature_kelvin_to_mired(temp)
await async_set_lights_temp(
self.hass, self._lights, mired, brightness, self._transition
)
_LOGGER.debug(
"Lights updated to mired:%s brightness:%s, %s%% "
"of %s cycle complete at %s",
mired,
brightness,
round(percentage_complete * 100),
time_state,
now,
)
def find_start_time(self, now):
"""Return sunrise or start_time if given."""
if self._start_time:
sunrise = now.replace(
hour=self._start_time.hour, minute=self._start_time.minute, second=0
)
else:
sunrise = get_astral_event_date(self.hass, SUN_EVENT_SUNRISE, now.date())
return sunrise
def find_stop_time(self, now):
"""Return dusk or stop_time if given."""
if self._stop_time:
dusk = now.replace(
hour=self._stop_time.hour, minute=self._stop_time.minute, second=0
)
else:
dusk = get_astral_event_date(self.hass, "dusk", now.date())
return dusk
|
import pytest
import homeassistant.components.remote as remote
from homeassistant.components.remote import ATTR_COMMAND
from homeassistant.const import (
ATTR_ENTITY_ID,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_OFF,
STATE_ON,
)
from homeassistant.setup import async_setup_component
ENTITY_ID = "remote.remote_one"
SERVICE_SEND_COMMAND = "send_command"
@pytest.fixture(autouse=True)
async def setup_component(hass):
"""Initialize components."""
assert await async_setup_component(
hass, remote.DOMAIN, {"remote": {"platform": "demo"}}
)
await hass.async_block_till_done()
async def test_methods(hass):
"""Test if services call the entity methods as expected."""
await hass.services.async_call(
remote.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}
)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_ID)
assert state.state == STATE_ON
await hass.services.async_call(
remote.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}
)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_ID)
assert state.state == STATE_OFF
await hass.services.async_call(
remote.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}
)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_ID)
assert state.state == STATE_ON
data = {
ATTR_ENTITY_ID: ENTITY_ID,
ATTR_COMMAND: ["test"],
}
await hass.services.async_call(remote.DOMAIN, SERVICE_SEND_COMMAND, data)
await hass.async_block_till_done()
state = hass.states.get(ENTITY_ID)
assert state.attributes == {
"friendly_name": "Remote One",
"last_command_sent": "test",
"supported_features": 0,
}
|
import sys
import pathlib
import pytest
from scripts.dev import check_coverage
pytest_plugins = 'pytester'
pytestmark = [pytest.mark.linux, pytest.mark.not_frozen]
class CovtestHelper:
"""Helper object for covtest fixture.
Attributes:
_testdir: The testdir fixture from pytest.
_monkeypatch: The monkeypatch fixture from pytest.
"""
def __init__(self, testdir, monkeypatch):
self._testdir = testdir
self._monkeypatch = monkeypatch
def makefile(self, code):
"""Generate a module.py for the given code."""
self._testdir.makepyfile(module=code)
def run(self):
"""Run pytest with coverage for the given module.py."""
coveragerc = str(self._testdir.tmpdir / 'coveragerc')
self._monkeypatch.delenv('PYTEST_ADDOPTS', raising=False)
res = self._testdir.runpytest('--cov=module',
'--cov-config={}'.format(coveragerc),
'--cov-report=xml',
plugins=['no:faulthandler', 'no:xvfb'])
assert res.ret == 0
return res
def check(self, perfect_files=None):
"""Run check_coverage.py and run its return value."""
coverage_file = self._testdir.tmpdir / 'coverage.xml'
if perfect_files is None:
perfect_files = [(None, 'module.py')]
argv = [sys.argv[0]]
self._monkeypatch.setattr(check_coverage.sys, 'argv', argv)
with self._testdir.tmpdir.as_cwd():
with coverage_file.open(encoding='utf-8') as f:
return check_coverage.check(f, perfect_files=perfect_files)
def check_skipped(self, args, reason):
"""Run check_coverage.py and make sure it's skipped."""
argv = [sys.argv[0]] + list(args)
self._monkeypatch.setattr(check_coverage.sys, 'argv', argv)
with pytest.raises(check_coverage.Skipped) as excinfo:
return check_coverage.check(None, perfect_files=[])
assert excinfo.value.reason == reason
@pytest.fixture
def covtest(testdir, monkeypatch):
"""Fixture which provides a coveragerc and a test to call module.func."""
testdir.makefile(ext='', coveragerc="""
[run]
branch=True
""")
testdir.makepyfile(test_module="""
from module import func
def test_module():
func()
""")
# Check if coverage plugin is available
res = testdir.runpytest('--version', '--version')
assert res.ret == 0
output = res.stderr.str()
assert 'This is pytest version' in output
if 'pytest-cov' not in output:
pytest.skip("cov plugin not available")
return CovtestHelper(testdir, monkeypatch)
def test_tested_no_branches(covtest):
covtest.makefile("""
def func():
pass
""")
covtest.run()
assert covtest.check() == []
def test_tested_with_branches(covtest):
covtest.makefile("""
def func2(arg):
if arg:
pass
else:
pass
def func():
func2(True)
func2(False)
""")
covtest.run()
assert covtest.check() == []
def test_untested(covtest):
covtest.makefile("""
def func():
pass
def untested():
pass
""")
covtest.run()
expected = check_coverage.Message(
check_coverage.MsgType.insufficient_coverage,
'module.py',
'module.py has 75.00% line and 100.00% branch coverage!')
assert covtest.check() == [expected]
def test_untested_floats(covtest):
"""Make sure we don't report 58.330000000000005% coverage."""
covtest.makefile("""
def func():
pass
def untested():
pass
def untested2():
pass
def untested3():
pass
def untested4():
pass
def untested5():
pass
""")
covtest.run()
expected = check_coverage.Message(
check_coverage.MsgType.insufficient_coverage,
'module.py',
'module.py has 58.33% line and 100.00% branch coverage!')
assert covtest.check() == [expected]
def test_untested_branches(covtest):
covtest.makefile("""
def func2(arg):
if arg:
pass
else:
pass
def func():
func2(True)
""")
covtest.run()
expected = check_coverage.Message(
check_coverage.MsgType.insufficient_coverage,
'module.py',
'module.py has 100.00% line and 50.00% branch coverage!')
assert covtest.check() == [expected]
def test_tested_unlisted(covtest):
covtest.makefile("""
def func():
pass
""")
covtest.run()
expected = check_coverage.Message(
check_coverage.MsgType.perfect_file,
'module.py',
'module.py has 100% coverage but is not in perfect_files!')
assert covtest.check(perfect_files=[]) == [expected]
@pytest.mark.parametrize('args, reason', [
(['-k', 'foo'], "because -k is given."),
(['-m', 'foo'], "because -m is given."),
(['--lf'], "because --lf is given."),
(['blah', '-m', 'foo'], "because -m is given."),
(['tests/foo'], "because there is nothing to check."),
])
def test_skipped_args(covtest, args, reason):
covtest.check_skipped(args, reason)
@pytest.mark.fake_os('windows')
def test_skipped_non_linux(covtest):
covtest.check_skipped([], "on non-Linux system.")
def _generate_files():
"""Get filenames from WHITELISTED_/PERFECT_FILES."""
for src_file in check_coverage.WHITELISTED_FILES:
yield pathlib.Path(src_file)
for test_file, src_file in check_coverage.PERFECT_FILES:
if test_file is not None:
yield pathlib.Path(test_file)
yield pathlib.Path(src_file)
@pytest.mark.parametrize('filename', list(_generate_files()))
def test_files_exist(filename):
basedir = pathlib.Path(check_coverage.__file__).parents[2]
assert (basedir / filename).exists()
|
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA
from homeassistant.const import DEVICE_DEFAULT_NAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import ToggleEntity
from . import CONF_PORTS, DATA_GC100
_SWITCH_SCHEMA = vol.Schema({cv.string: cv.string})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_PORTS): vol.All(cv.ensure_list, [_SWITCH_SCHEMA])}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the GC100 devices."""
switches = []
ports = config.get(CONF_PORTS)
for port in ports:
for port_addr, port_name in port.items():
switches.append(GC100Switch(port_name, port_addr, hass.data[DATA_GC100]))
add_entities(switches, True)
class GC100Switch(ToggleEntity):
"""Represent a switch/relay from GC100."""
def __init__(self, name, port_addr, gc100):
"""Initialize the GC100 switch."""
self._name = name or DEVICE_DEFAULT_NAME
self._port_addr = port_addr
self._gc100 = gc100
self._state = None
@property
def name(self):
"""Return the name of the switch."""
return self._name
@property
def is_on(self):
"""Return the state of the entity."""
return self._state
def turn_on(self, **kwargs):
"""Turn the device on."""
self._gc100.write_switch(self._port_addr, 1, self.set_state)
def turn_off(self, **kwargs):
"""Turn the device off."""
self._gc100.write_switch(self._port_addr, 0, self.set_state)
def update(self):
"""Update the sensor state."""
self._gc100.read_sensor(self._port_addr, self.set_state)
def set_state(self, state):
"""Set the current state."""
self._state = state == 1
self.schedule_update_ha_state()
|
import logging
import voluptuous as vol
from homeassistant.components.alarm_control_panel import (
ENTITY_ID_FORMAT,
FORMAT_NUMBER,
PLATFORM_SCHEMA,
AlarmControlPanelEntity,
)
from homeassistant.components.alarm_control_panel.const import (
SUPPORT_ALARM_ARM_AWAY,
SUPPORT_ALARM_ARM_HOME,
SUPPORT_ALARM_ARM_NIGHT,
)
from homeassistant.const import (
ATTR_CODE,
CONF_NAME,
CONF_UNIQUE_ID,
CONF_VALUE_TEMPLATE,
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_ARMING,
STATE_ALARM_DISARMED,
STATE_ALARM_PENDING,
STATE_ALARM_TRIGGERED,
STATE_UNAVAILABLE,
)
from homeassistant.core import callback
from homeassistant.exceptions import TemplateError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import async_generate_entity_id
from homeassistant.helpers.reload import async_setup_reload_service
from homeassistant.helpers.script import Script
from .const import DOMAIN, PLATFORMS
from .template_entity import TemplateEntity
_LOGGER = logging.getLogger(__name__)
_VALID_STATES = [
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_ARMING,
STATE_ALARM_DISARMED,
STATE_ALARM_PENDING,
STATE_ALARM_TRIGGERED,
STATE_UNAVAILABLE,
]
CONF_ARM_AWAY_ACTION = "arm_away"
CONF_ARM_HOME_ACTION = "arm_home"
CONF_ARM_NIGHT_ACTION = "arm_night"
CONF_DISARM_ACTION = "disarm"
CONF_ALARM_CONTROL_PANELS = "panels"
CONF_CODE_ARM_REQUIRED = "code_arm_required"
ALARM_CONTROL_PANEL_SCHEMA = vol.Schema(
{
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_DISARM_ACTION): cv.SCRIPT_SCHEMA,
vol.Optional(CONF_ARM_AWAY_ACTION): cv.SCRIPT_SCHEMA,
vol.Optional(CONF_ARM_HOME_ACTION): cv.SCRIPT_SCHEMA,
vol.Optional(CONF_ARM_NIGHT_ACTION): cv.SCRIPT_SCHEMA,
vol.Optional(CONF_CODE_ARM_REQUIRED, default=True): cv.boolean,
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_UNIQUE_ID): cv.string,
}
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_ALARM_CONTROL_PANELS): cv.schema_with_slug_keys(
ALARM_CONTROL_PANEL_SCHEMA
),
}
)
async def _async_create_entities(hass, config):
"""Create Template Alarm Control Panels."""
alarm_control_panels = []
for device, device_config in config[CONF_ALARM_CONTROL_PANELS].items():
name = device_config.get(CONF_NAME, device)
state_template = device_config.get(CONF_VALUE_TEMPLATE)
disarm_action = device_config.get(CONF_DISARM_ACTION)
arm_away_action = device_config.get(CONF_ARM_AWAY_ACTION)
arm_home_action = device_config.get(CONF_ARM_HOME_ACTION)
arm_night_action = device_config.get(CONF_ARM_NIGHT_ACTION)
code_arm_required = device_config[CONF_CODE_ARM_REQUIRED]
unique_id = device_config.get(CONF_UNIQUE_ID)
alarm_control_panels.append(
AlarmControlPanelTemplate(
hass,
device,
name,
state_template,
disarm_action,
arm_away_action,
arm_home_action,
arm_night_action,
code_arm_required,
unique_id,
)
)
return alarm_control_panels
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Template Alarm Control Panels."""
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
async_add_entities(await _async_create_entities(hass, config))
class AlarmControlPanelTemplate(TemplateEntity, AlarmControlPanelEntity):
"""Representation of a templated Alarm Control Panel."""
def __init__(
self,
hass,
device_id,
name,
state_template,
disarm_action,
arm_away_action,
arm_home_action,
arm_night_action,
code_arm_required,
unique_id,
):
"""Initialize the panel."""
super().__init__()
self.entity_id = async_generate_entity_id(
ENTITY_ID_FORMAT, device_id, hass=hass
)
self._name = name
self._template = state_template
self._disarm_script = None
self._code_arm_required = code_arm_required
domain = __name__.split(".")[-2]
if disarm_action is not None:
self._disarm_script = Script(hass, disarm_action, name, domain)
self._arm_away_script = None
if arm_away_action is not None:
self._arm_away_script = Script(hass, arm_away_action, name, domain)
self._arm_home_script = None
if arm_home_action is not None:
self._arm_home_script = Script(hass, arm_home_action, name, domain)
self._arm_night_script = None
if arm_night_action is not None:
self._arm_night_script = Script(hass, arm_night_action, name, domain)
self._state = None
self._unique_id = unique_id
@property
def name(self):
"""Return the display name of this alarm control panel."""
return self._name
@property
def unique_id(self):
"""Return the unique id of this alarm control panel."""
return self._unique_id
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def supported_features(self) -> int:
"""Return the list of supported features."""
supported_features = 0
if self._arm_night_script is not None:
supported_features = supported_features | SUPPORT_ALARM_ARM_NIGHT
if self._arm_home_script is not None:
supported_features = supported_features | SUPPORT_ALARM_ARM_HOME
if self._arm_away_script is not None:
supported_features = supported_features | SUPPORT_ALARM_ARM_AWAY
return supported_features
@property
def code_format(self):
"""Return one or more digits/characters."""
return FORMAT_NUMBER
@property
def code_arm_required(self):
"""Whether the code is required for arm actions."""
return self._code_arm_required
@callback
def _update_state(self, result):
if isinstance(result, TemplateError):
self._state = None
return
# Validate state
if result in _VALID_STATES:
self._state = result
_LOGGER.debug("Valid state - %s", result)
return
_LOGGER.error(
"Received invalid alarm panel state: %s. Expected: %s",
result,
", ".join(_VALID_STATES),
)
self._state = None
async def async_added_to_hass(self):
"""Register callbacks."""
if self._template:
self.add_template_attribute(
"_state", self._template, None, self._update_state
)
await super().async_added_to_hass()
async def _async_alarm_arm(self, state, script=None, code=None):
"""Arm the panel to specified state with supplied script."""
optimistic_set = False
if self._template is None:
self._state = state
optimistic_set = True
if script is not None:
await script.async_run({ATTR_CODE: code}, context=self._context)
else:
_LOGGER.error("No script action defined for %s", state)
if optimistic_set:
self.async_write_ha_state()
async def async_alarm_arm_away(self, code=None):
"""Arm the panel to Away."""
await self._async_alarm_arm(
STATE_ALARM_ARMED_AWAY, script=self._arm_away_script, code=code
)
async def async_alarm_arm_home(self, code=None):
"""Arm the panel to Home."""
await self._async_alarm_arm(
STATE_ALARM_ARMED_HOME, script=self._arm_home_script, code=code
)
async def async_alarm_arm_night(self, code=None):
"""Arm the panel to Night."""
await self._async_alarm_arm(
STATE_ALARM_ARMED_NIGHT, script=self._arm_night_script, code=code
)
async def async_alarm_disarm(self, code=None):
"""Disarm the panel."""
await self._async_alarm_arm(
STATE_ALARM_DISARMED, script=self._disarm_script, code=code
)
|
from os import path
import sphinx
__version__ = '0.5.0'
__version_full__ = __version__
def get_html_theme_path():
"""Return list of HTML theme paths."""
cur_dir = path.abspath(path.dirname(path.dirname(__file__)))
return cur_dir
# See http://www.sphinx-doc.org/en/stable/theming.html#distribute-your-theme-as-a-python-package
def setup(app):
if sphinx.version_info >= (1, 6, 0):
# Register the theme that can be referenced without adding a theme path
app.add_html_theme('sphinx_rtd_theme', path.abspath(path.dirname(__file__)))
if sphinx.version_info >= (1, 8, 0):
# Add Sphinx message catalog for newer versions of Sphinx
# See http://www.sphinx-doc.org/en/master/extdev/appapi.html#sphinx.application.Sphinx.add_message_catalog
rtd_locale_path = path.join(path.abspath(path.dirname(__file__)), 'locale')
app.add_message_catalog('sphinx', rtd_locale_path)
return {'parallel_read_safe': True, 'parallel_write_safe': True}
|
import boto3
from moto import mock_sts, mock_s3
@mock_sts()
@mock_s3()
def test_put_delete_s3_object(app):
from lemur.plugins.lemur_aws.s3 import put, delete, get
bucket = "public-bucket"
region = "us-east-1"
account = "123456789012"
path = "some-path/foo"
data = "dummy data"
s3_client = boto3.client('s3')
s3_client.create_bucket(Bucket=bucket)
put(bucket_name=bucket,
region_name=region,
prefix=path,
data=data,
encrypt=False,
account_number=account,
region=region)
response = get(bucket_name=bucket, prefixed_object_name=path, account_number=account)
# put data, and getting the same data
assert (response == data)
response = get(bucket_name="wrong-bucket", prefixed_object_name=path, account_number=account)
# attempting to get thccle wrong data
assert (response is None)
delete(bucket_name=bucket, prefixed_object_name=path, account_number=account)
response = get(bucket_name=bucket, prefixed_object_name=path, account_number=account)
# delete data, and getting the same data
assert (response is None)
|
import logging
from pyhap.const import (
CATEGORY_GARAGE_DOOR_OPENER,
CATEGORY_WINDOW,
CATEGORY_WINDOW_COVERING,
)
from homeassistant.components.cover import (
ATTR_CURRENT_POSITION,
ATTR_CURRENT_TILT_POSITION,
ATTR_POSITION,
ATTR_TILT_POSITION,
DOMAIN,
SUPPORT_SET_TILT_POSITION,
SUPPORT_STOP,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES,
SERVICE_CLOSE_COVER,
SERVICE_OPEN_COVER,
SERVICE_SET_COVER_POSITION,
SERVICE_SET_COVER_TILT_POSITION,
SERVICE_STOP_COVER,
STATE_CLOSED,
STATE_CLOSING,
STATE_ON,
STATE_OPEN,
STATE_OPENING,
)
from homeassistant.core import callback
from homeassistant.helpers.event import async_track_state_change_event
from .accessories import TYPES, HomeAccessory, debounce
from .const import (
ATTR_OBSTRUCTION_DETECTED,
CHAR_CURRENT_DOOR_STATE,
CHAR_CURRENT_POSITION,
CHAR_CURRENT_TILT_ANGLE,
CHAR_HOLD_POSITION,
CHAR_OBSTRUCTION_DETECTED,
CHAR_POSITION_STATE,
CHAR_TARGET_DOOR_STATE,
CHAR_TARGET_POSITION,
CHAR_TARGET_TILT_ANGLE,
CONF_LINKED_OBSTRUCTION_SENSOR,
DEVICE_PRECISION_LEEWAY,
HK_DOOR_CLOSED,
HK_DOOR_CLOSING,
HK_DOOR_OPEN,
HK_DOOR_OPENING,
HK_POSITION_GOING_TO_MAX,
HK_POSITION_GOING_TO_MIN,
HK_POSITION_STOPPED,
SERV_GARAGE_DOOR_OPENER,
SERV_WINDOW,
SERV_WINDOW_COVERING,
)
DOOR_CURRENT_HASS_TO_HK = {
STATE_OPEN: HK_DOOR_OPEN,
STATE_CLOSED: HK_DOOR_CLOSED,
STATE_OPENING: HK_DOOR_OPENING,
STATE_CLOSING: HK_DOOR_CLOSING,
}
# HomeKit only has two states for
# Target Door State:
# 0: Open
# 1: Closed
# Opening is mapped to 0 since the target is Open
# Closing is mapped to 1 since the target is Closed
DOOR_TARGET_HASS_TO_HK = {
STATE_OPEN: HK_DOOR_OPEN,
STATE_CLOSED: HK_DOOR_CLOSED,
STATE_OPENING: HK_DOOR_OPEN,
STATE_CLOSING: HK_DOOR_CLOSED,
}
_LOGGER = logging.getLogger(__name__)
@TYPES.register("GarageDoorOpener")
class GarageDoorOpener(HomeAccessory):
"""Generate a Garage Door Opener accessory for a cover entity.
The cover entity must be in the 'garage' device class
and support no more than open, close, and stop.
"""
def __init__(self, *args):
"""Initialize a GarageDoorOpener accessory object."""
super().__init__(*args, category=CATEGORY_GARAGE_DOOR_OPENER)
state = self.hass.states.get(self.entity_id)
serv_garage_door = self.add_preload_service(SERV_GARAGE_DOOR_OPENER)
self.char_current_state = serv_garage_door.configure_char(
CHAR_CURRENT_DOOR_STATE, value=0
)
self.char_target_state = serv_garage_door.configure_char(
CHAR_TARGET_DOOR_STATE, value=0, setter_callback=self.set_state
)
self.char_obstruction_detected = serv_garage_door.configure_char(
CHAR_OBSTRUCTION_DETECTED, value=False
)
self.linked_obstruction_sensor = self.config.get(CONF_LINKED_OBSTRUCTION_SENSOR)
if self.linked_obstruction_sensor:
self._async_update_obstruction_state(
self.hass.states.get(self.linked_obstruction_sensor)
)
self.async_update_state(state)
async def run_handler(self):
"""Handle accessory driver started event.
Run inside the Home Assistant event loop.
"""
if self.linked_obstruction_sensor:
async_track_state_change_event(
self.hass,
[self.linked_obstruction_sensor],
self._async_update_obstruction_event,
)
await super().run_handler()
@callback
def _async_update_obstruction_event(self, event):
"""Handle state change event listener callback."""
self._async_update_obstruction_state(event.data.get("new_state"))
@callback
def _async_update_obstruction_state(self, new_state):
"""Handle linked obstruction sensor state change to update HomeKit value."""
if not new_state:
return
detected = new_state.state == STATE_ON
if self.char_obstruction_detected.value == detected:
return
self.char_obstruction_detected.set_value(detected)
_LOGGER.debug(
"%s: Set linked obstruction %s sensor to %d",
self.entity_id,
self.linked_obstruction_sensor,
detected,
)
def set_state(self, value):
"""Change garage state if call came from HomeKit."""
_LOGGER.debug("%s: Set state to %d", self.entity_id, value)
params = {ATTR_ENTITY_ID: self.entity_id}
if value == HK_DOOR_OPEN:
if self.char_current_state.value != value:
self.char_current_state.set_value(HK_DOOR_OPENING)
self.call_service(DOMAIN, SERVICE_OPEN_COVER, params)
elif value == HK_DOOR_CLOSED:
if self.char_current_state.value != value:
self.char_current_state.set_value(HK_DOOR_CLOSING)
self.call_service(DOMAIN, SERVICE_CLOSE_COVER, params)
@callback
def async_update_state(self, new_state):
"""Update cover state after state changed."""
hass_state = new_state.state
target_door_state = DOOR_TARGET_HASS_TO_HK.get(hass_state)
current_door_state = DOOR_CURRENT_HASS_TO_HK.get(hass_state)
if ATTR_OBSTRUCTION_DETECTED in new_state.attributes:
obstruction_detected = (
new_state.attributes[ATTR_OBSTRUCTION_DETECTED] is True
)
if self.char_obstruction_detected.value != obstruction_detected:
self.char_obstruction_detected.set_value(obstruction_detected)
if (
target_door_state is not None
and self.char_target_state.value != target_door_state
):
self.char_target_state.set_value(target_door_state)
if (
current_door_state is not None
and self.char_current_state.value != current_door_state
):
self.char_current_state.set_value(current_door_state)
class OpeningDeviceBase(HomeAccessory):
"""Generate a base Window accessory for a cover entity.
This class is used for WindowCoveringBasic and
WindowCovering
"""
def __init__(self, *args, category, service):
"""Initialize a OpeningDeviceBase accessory object."""
super().__init__(*args, category=category)
state = self.hass.states.get(self.entity_id)
self.features = state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
self._supports_stop = self.features & SUPPORT_STOP
self._homekit_target_tilt = None
self.chars = []
if self._supports_stop:
self.chars.append(CHAR_HOLD_POSITION)
self._supports_tilt = self.features & SUPPORT_SET_TILT_POSITION
if self._supports_tilt:
self.chars.extend([CHAR_TARGET_TILT_ANGLE, CHAR_CURRENT_TILT_ANGLE])
self.serv_cover = self.add_preload_service(service, self.chars)
if self._supports_stop:
self.char_hold_position = self.serv_cover.configure_char(
CHAR_HOLD_POSITION, setter_callback=self.set_stop
)
if self._supports_tilt:
self.char_target_tilt = self.serv_cover.configure_char(
CHAR_TARGET_TILT_ANGLE, setter_callback=self.set_tilt
)
self.char_current_tilt = self.serv_cover.configure_char(
CHAR_CURRENT_TILT_ANGLE, value=0
)
def set_stop(self, value):
"""Stop the cover motion from HomeKit."""
if value != 1:
return
self.call_service(DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: self.entity_id})
@debounce
def set_tilt(self, value):
"""Set tilt to value if call came from HomeKit."""
self._homekit_target_tilt = value
_LOGGER.info("%s: Set tilt to %d", self.entity_id, value)
# HomeKit sends values between -90 and 90.
# We'll have to normalize to [0,100]
value = round((value + 90) / 180.0 * 100.0)
params = {ATTR_ENTITY_ID: self.entity_id, ATTR_TILT_POSITION: value}
self.call_service(DOMAIN, SERVICE_SET_COVER_TILT_POSITION, params, value)
@callback
def async_update_state(self, new_state):
"""Update cover position and tilt after state changed."""
# update tilt
current_tilt = new_state.attributes.get(ATTR_CURRENT_TILT_POSITION)
if isinstance(current_tilt, (float, int)):
# HomeKit sends values between -90 and 90.
# We'll have to normalize to [0,100]
current_tilt = (current_tilt / 100.0 * 180.0) - 90.0
current_tilt = int(current_tilt)
if self.char_current_tilt.value != current_tilt:
self.char_current_tilt.set_value(current_tilt)
# We have to assume that the device has worse precision than HomeKit.
# If it reports back a state that is only _close_ to HK's requested
# state, we'll "fix" what HomeKit requested so that it won't appear
# out of sync.
if self._homekit_target_tilt is None or abs(
current_tilt - self._homekit_target_tilt < DEVICE_PRECISION_LEEWAY
):
if self.char_target_tilt.value != current_tilt:
self.char_target_tilt.set_value(current_tilt)
self._homekit_target_tilt = None
class OpeningDevice(OpeningDeviceBase, HomeAccessory):
"""Generate a Window/WindowOpening accessory for a cover entity.
The cover entity must support: set_cover_position.
"""
def __init__(self, *args, category, service):
"""Initialize a WindowCovering accessory object."""
super().__init__(*args, category=category, service=service)
state = self.hass.states.get(self.entity_id)
self._homekit_target = None
self.char_current_position = self.serv_cover.configure_char(
CHAR_CURRENT_POSITION, value=0
)
self.char_target_position = self.serv_cover.configure_char(
CHAR_TARGET_POSITION, value=0, setter_callback=self.move_cover
)
self.char_position_state = self.serv_cover.configure_char(
CHAR_POSITION_STATE, value=HK_POSITION_STOPPED
)
self.async_update_state(state)
@debounce
def move_cover(self, value):
"""Move cover to value if call came from HomeKit."""
_LOGGER.debug("%s: Set position to %d", self.entity_id, value)
self._homekit_target = value
params = {ATTR_ENTITY_ID: self.entity_id, ATTR_POSITION: value}
self.call_service(DOMAIN, SERVICE_SET_COVER_POSITION, params, value)
@callback
def async_update_state(self, new_state):
"""Update cover position and tilt after state changed."""
current_position = new_state.attributes.get(ATTR_CURRENT_POSITION)
if isinstance(current_position, (float, int)):
current_position = int(current_position)
if self.char_current_position.value != current_position:
self.char_current_position.set_value(current_position)
# We have to assume that the device has worse precision than HomeKit.
# If it reports back a state that is only _close_ to HK's requested
# state, we'll "fix" what HomeKit requested so that it won't appear
# out of sync.
if (
self._homekit_target is None
or abs(current_position - self._homekit_target)
< DEVICE_PRECISION_LEEWAY
):
if self.char_target_position.value != current_position:
self.char_target_position.set_value(current_position)
self._homekit_target = None
if new_state.state == STATE_OPENING:
if self.char_position_state.value != HK_POSITION_GOING_TO_MAX:
self.char_position_state.set_value(HK_POSITION_GOING_TO_MAX)
elif new_state.state == STATE_CLOSING:
if self.char_position_state.value != HK_POSITION_GOING_TO_MIN:
self.char_position_state.set_value(HK_POSITION_GOING_TO_MIN)
else:
if self.char_position_state.value != HK_POSITION_STOPPED:
self.char_position_state.set_value(HK_POSITION_STOPPED)
super().async_update_state(new_state)
@TYPES.register("Window")
class Window(OpeningDevice):
"""Generate a Window accessory for a cover entity with DEVICE_CLASS_WINDOW.
The entity must support: set_cover_position.
"""
def __init__(self, *args):
"""Initialize a Window accessory object."""
super().__init__(*args, category=CATEGORY_WINDOW, service=SERV_WINDOW)
@TYPES.register("WindowCovering")
class WindowCovering(OpeningDevice):
"""Generate a WindowCovering accessory for a cover entity.
The entity must support: set_cover_position.
"""
def __init__(self, *args):
"""Initialize a WindowCovering accessory object."""
super().__init__(
*args, category=CATEGORY_WINDOW_COVERING, service=SERV_WINDOW_COVERING
)
@TYPES.register("WindowCoveringBasic")
class WindowCoveringBasic(OpeningDeviceBase, HomeAccessory):
"""Generate a Window accessory for a cover entity.
The cover entity must support: open_cover, close_cover,
stop_cover (optional).
"""
def __init__(self, *args):
"""Initialize a WindowCoveringBasic accessory object."""
super().__init__(
*args, category=CATEGORY_WINDOW_COVERING, service=SERV_WINDOW_COVERING
)
state = self.hass.states.get(self.entity_id)
self.char_current_position = self.serv_cover.configure_char(
CHAR_CURRENT_POSITION, value=0
)
self.char_target_position = self.serv_cover.configure_char(
CHAR_TARGET_POSITION, value=0, setter_callback=self.move_cover
)
self.char_position_state = self.serv_cover.configure_char(
CHAR_POSITION_STATE, value=HK_POSITION_STOPPED
)
self.async_update_state(state)
@debounce
def move_cover(self, value):
"""Move cover to value if call came from HomeKit."""
_LOGGER.debug("%s: Set position to %d", self.entity_id, value)
if self._supports_stop:
if value > 70:
service, position = (SERVICE_OPEN_COVER, 100)
elif value < 30:
service, position = (SERVICE_CLOSE_COVER, 0)
else:
service, position = (SERVICE_STOP_COVER, 50)
else:
if value >= 50:
service, position = (SERVICE_OPEN_COVER, 100)
else:
service, position = (SERVICE_CLOSE_COVER, 0)
params = {ATTR_ENTITY_ID: self.entity_id}
self.call_service(DOMAIN, service, params)
# Snap the current/target position to the expected final position.
self.char_current_position.set_value(position)
self.char_target_position.set_value(position)
@callback
def async_update_state(self, new_state):
"""Update cover position after state changed."""
position_mapping = {STATE_OPEN: 100, STATE_CLOSED: 0}
hk_position = position_mapping.get(new_state.state)
if hk_position is not None:
if self.char_current_position.value != hk_position:
self.char_current_position.set_value(hk_position)
if self.char_target_position.value != hk_position:
self.char_target_position.set_value(hk_position)
if new_state.state == STATE_OPENING:
if self.char_position_state.value != HK_POSITION_GOING_TO_MAX:
self.char_position_state.set_value(HK_POSITION_GOING_TO_MAX)
elif new_state.state == STATE_CLOSING:
if self.char_position_state.value != HK_POSITION_GOING_TO_MIN:
self.char_position_state.set_value(HK_POSITION_GOING_TO_MIN)
else:
if self.char_position_state.value != HK_POSITION_STOPPED:
self.char_position_state.set_value(HK_POSITION_STOPPED)
super().async_update_state(new_state)
|
import os
import imp
try:
from StringIO import StringIO
except ImportError: # python 3
from io import StringIO
import sys
import tempfile
import unittest
from unittest import skipUnless
from lxml.builder import ElementMaker
from lxml.etree import Element, ElementTree, ParserError
from lxml.html import html_parser, XHTML_NAMESPACE
try:
import urlparse
except ImportError:
import urllib.parse as urlparse
try:
from urllib import pathname2url
except ImportError:
from urllib.request import pathname2url
def path2url(path):
return urlparse.urljoin(
'file:', pathname2url(path))
try:
import html5lib
except ImportError:
html5lib = None
class BogusModules(object):
# See PEP 302 for details on how this works
def __init__(self, mocks):
self.mocks = mocks
def find_module(self, fullname, path=None):
if fullname in self.mocks:
return self
return None
def load_module(self, fullname):
mod = sys.modules.setdefault(fullname, imp.new_module(fullname))
mod.__file__, mod.__loader__, mod.__path__ = "<dummy>", self, []
mod.__dict__.update(self.mocks[fullname])
return mod
# Fake just enough of html5lib so that html5parser.py is importable
# without errors.
sys.meta_path.append(BogusModules({
'html5lib': {
# A do-nothing HTMLParser class
'HTMLParser': type('HTMLParser', (object,), {
'__init__': lambda self, **kw: None,
}),
},
'html5lib.treebuilders': {
},
'html5lib.treebuilders.etree_lxml': {
'TreeBuilder': 'dummy treebuilder',
},
}))
class Test_HTMLParser(unittest.TestCase):
def make_one(self, **kwargs):
from lxml.html.html5parser import HTMLParser
return HTMLParser(**kwargs)
@skipUnless(html5lib, 'html5lib is not installed')
def test_integration(self):
parser = self.make_one(strict=True)
tree = parser.parse(XHTML_TEST_DOCUMENT)
root = tree.getroot()
self.assertEqual(root.tag, xhtml_tag('html'))
class Test_XHTMLParser(unittest.TestCase):
def make_one(self, **kwargs):
from lxml.html.html5parser import XHTMLParser
return XHTMLParser(**kwargs)
@skipUnless(hasattr(html5lib, 'XHTMLParser'),
'xhtml5lib does not have XHTMLParser')
def test_integration(self):
# XXX: This test are untested. (html5lib no longer has an XHTMLParser)
parser = self.make_one(strict=True)
tree = parser.parse(XHTML_TEST_DOCUMENT)
root = tree.getroot()
self.assertEqual(root.tag, xhtml_tag('html'))
class Test_document_fromstring(unittest.TestCase):
def call_it(self, *args, **kwargs):
from lxml.html.html5parser import document_fromstring
return document_fromstring(*args, **kwargs)
def test_basic(self):
parser = DummyParser(doc=DummyElementTree(root='dummy root'))
elem = self.call_it(b'dummy input', parser=parser)
self.assertEqual(elem, 'dummy root')
self.assertEqual(parser.parse_args, (b'dummy input',))
self.assertEqual(parser.parse_kwargs, {'useChardet': True})
def test_guess_charset_not_used_for_unicode(self):
parser = DummyParser()
elem = self.call_it(b''.decode('ascii'), parser=parser)
self.assertEqual(parser.parse_kwargs, {})
def test_guess_charset_arg_gets_passed_to_parser(self):
parser = DummyParser()
elem = self.call_it(b'', guess_charset='gc_arg', parser=parser)
self.assertEqual(parser.parse_kwargs, {'useChardet': 'gc_arg'})
def test_raises_type_error_on_nonstring_input(self):
not_a_string = None
self.assertRaises(TypeError, self.call_it, not_a_string)
@skipUnless(html5lib, 'html5lib is not installed')
def test_integration(self):
elem = self.call_it(XHTML_TEST_DOCUMENT)
self.assertEqual(elem.tag, xhtml_tag('html'))
class Test_fragments_fromstring(unittest.TestCase):
def call_it(self, *args, **kwargs):
from lxml.html.html5parser import fragments_fromstring
return fragments_fromstring(*args, **kwargs)
def test_basic(self):
parser = DummyParser(fragments='fragments')
fragments = self.call_it(b'dummy input', parser=parser)
self.assertEqual(fragments, 'fragments')
self.assertEqual(parser.parseFragment_kwargs, {'useChardet': False})
def test_guess_charset_arg_gets_passed_to_parser(self):
parser = DummyParser()
elem = self.call_it(b'', guess_charset='gc_arg', parser=parser)
self.assertEqual(parser.parseFragment_kwargs, {'useChardet': 'gc_arg'})
def test_guess_charset_not_used_for_unicode(self):
parser = DummyParser()
elem = self.call_it(b''.decode('ascii'), parser=parser)
self.assertEqual(parser.parseFragment_kwargs, {})
def test_raises_type_error_on_nonstring_input(self):
not_a_string = None
self.assertRaises(TypeError, self.call_it, not_a_string)
def test_no_leading_text_strips_empty_leading_text(self):
parser = DummyParser(fragments=['', 'tail'])
fragments = self.call_it('', parser=parser, no_leading_text=True)
self.assertEqual(fragments, ['tail'])
def test_no_leading_text_raises_error_if_leading_text(self):
parser = DummyParser(fragments=['leading text', 'tail'])
self.assertRaises(ParserError, self.call_it,
'', parser=parser, no_leading_text=True)
@skipUnless(html5lib, 'html5lib is not installed')
def test_integration(self):
fragments = self.call_it('a<b>c</b>')
self.assertEqual(len(fragments), 2)
self.assertEqual(fragments[0], 'a')
self.assertEqual(fragments[1].tag, xhtml_tag('b'))
class Test_fragment_fromstring(unittest.TestCase):
def call_it(self, *args, **kwargs):
from lxml.html.html5parser import fragment_fromstring
return fragment_fromstring(*args, **kwargs)
def test_basic(self):
element = DummyElement()
parser = DummyParser(fragments=[element])
self.assertEqual(self.call_it('html', parser=parser), element)
def test_raises_type_error_on_nonstring_input(self):
not_a_string = None
self.assertRaises(TypeError, self.call_it, not_a_string)
def test_create_parent(self):
parser = DummyParser(fragments=['head', Element('child')])
elem = self.call_it('html', parser=parser, create_parent='parent')
self.assertEqual(elem.tag, 'parent')
self.assertEqual(elem.text, 'head')
self.assertEqual(elem[0].tag, 'child')
def test_create_parent_default_type_no_ns(self):
parser = DummyParser(fragments=[], namespaceHTMLElements=False)
elem = self.call_it('html', parser=parser, create_parent=True)
self.assertEqual(elem.tag, 'div')
def test_raises_error_on_leading_text(self):
parser = DummyParser(fragments=['leading text'])
self.assertRaises(ParserError, self.call_it, 'html', parser=parser)
def test_raises_error_if_no_elements_found(self):
parser = DummyParser(fragments=[])
self.assertRaises(ParserError, self.call_it, 'html', parser=parser)
def test_raises_error_if_multiple_elements_found(self):
parser = DummyParser(fragments=[DummyElement(), DummyElement()])
self.assertRaises(ParserError, self.call_it, 'html', parser=parser)
def test_raises_error_if_tail(self):
parser = DummyParser(fragments=[DummyElement(tail='tail')])
self.assertRaises(ParserError, self.call_it, 'html', parser=parser)
class Test_fromstring(unittest.TestCase):
def call_it(self, *args, **kwargs):
from lxml.html.html5parser import fromstring
return fromstring(*args, **kwargs)
def test_returns_whole_doc_if_input_contains_html_tag(self):
parser = DummyParser(root='the doc')
self.assertEqual(self.call_it('<html></html>', parser=parser),
'the doc')
def test_returns_whole_doc_if_input_contains_doctype(self):
parser = DummyParser(root='the doc')
self.assertEqual(self.call_it('<!DOCTYPE html>', parser=parser),
'the doc')
def test_returns_whole_doc_if_input_is_encoded(self):
parser = DummyParser(root='the doc')
input = '<!DOCTYPE html>'.encode('ascii')
self.assertEqual(self.call_it(input, parser=parser),
'the doc')
def test_returns_whole_doc_if_head_not_empty(self, use_ns=True):
E = HTMLElementMaker(namespaceHTMLElements=use_ns)
root = E.html(E.head(E.title()))
parser = DummyParser(root=root)
self.assertEqual(self.call_it('', parser=parser), root)
def test_returns_whole_doc_if_head_not_empty_no_ns(self):
self.test_returns_whole_doc_if_head_not_empty(use_ns=False)
def test_returns_unwraps_body_if_single_element(self):
E = HTMLElementMaker()
elem = E.p('test')
root = E.html(E.head(), E.body(elem))
parser = DummyParser(root=root)
self.assertEqual(self.call_it('', parser=parser), elem)
def test_returns_body_if_has_text(self):
E = HTMLElementMaker()
elem = E.p('test')
body = E.body('text', elem)
root = E.html(E.head(), body)
parser = DummyParser(root=root)
self.assertEqual(self.call_it('', parser=parser), body)
def test_returns_body_if_single_element_has_tail(self):
E = HTMLElementMaker()
elem = E.p('test')
elem.tail = 'tail'
body = E.body(elem)
root = E.html(E.head(), body)
parser = DummyParser(root=root)
self.assertEqual(self.call_it('', parser=parser), body)
def test_wraps_multiple_fragments_in_div_no_ns(self):
E = HTMLElementMaker(namespaceHTMLElements=False)
parser = DummyParser(root=E.html(E.head(), E.body(E.h1(), E.p())),
namespaceHTMLElements=False)
elem = self.call_it('', parser=parser)
self.assertEqual(elem.tag, 'div')
def test_wraps_multiple_fragments_in_span_no_ns(self):
E = HTMLElementMaker(namespaceHTMLElements=False)
parser = DummyParser(root=E.html(E.head(), E.body('foo', E.a('link'))),
namespaceHTMLElements=False)
elem = self.call_it('', parser=parser)
self.assertEqual(elem.tag, 'span')
def test_raises_type_error_on_nonstring_input(self):
not_a_string = None
self.assertRaises(TypeError, self.call_it, not_a_string)
@skipUnless(html5lib, 'html5lib is not installed')
def test_integration_whole_doc(self):
elem = self.call_it(XHTML_TEST_DOCUMENT)
self.assertEqual(elem.tag, xhtml_tag('html'))
@skipUnless(html5lib, 'html5lib is not installed')
def test_integration_single_fragment(self):
elem = self.call_it('<p></p>')
self.assertEqual(elem.tag, xhtml_tag('p'))
class Test_parse(unittest.TestCase):
def call_it(self, *args, **kwargs):
from lxml.html.html5parser import parse
return parse(*args, **kwargs)
def make_temp_file(self, contents=''):
tmpfile = tempfile.NamedTemporaryFile(delete=False)
try:
tmpfile.write(contents.encode('utf8'))
tmpfile.flush()
tmpfile.seek(0)
return tmpfile
except Exception:
try:
tmpfile.close()
finally:
os.unlink(tmpfile.name)
raise
def test_with_file_object(self):
parser = DummyParser(doc='the doc')
fp = open(__file__)
try:
self.assertEqual(self.call_it(fp, parser=parser), 'the doc')
self.assertEqual(parser.parse_args, (fp,))
finally:
fp.close()
def test_with_file_name(self):
parser = DummyParser(doc='the doc')
tmpfile = self.make_temp_file('data')
try:
data = tmpfile.read()
finally:
tmpfile.close()
try:
self.assertEqual(self.call_it(tmpfile.name, parser=parser), 'the doc')
fp, = parser.parse_args
try:
self.assertEqual(fp.read(), data)
finally:
fp.close()
finally:
os.unlink(tmpfile.name)
def test_with_url(self):
parser = DummyParser(doc='the doc')
tmpfile = self.make_temp_file('content')
try:
data = tmpfile.read()
finally:
tmpfile.close()
try:
url = path2url(tmpfile.name)
self.assertEqual(self.call_it(url, parser=parser), 'the doc')
fp, = parser.parse_args
try:
self.assertEqual(fp.read(), data)
finally:
fp.close()
finally:
os.unlink(tmpfile.name)
@skipUnless(html5lib, 'html5lib is not installed')
def test_integration(self):
doc = self.call_it(StringIO(XHTML_TEST_DOCUMENT))
root = doc.getroot()
self.assertEqual(root.tag, xhtml_tag('html'))
def test_suite():
loader = unittest.TestLoader()
return loader.loadTestsFromModule(sys.modules[__name__])
class HTMLElementMaker(ElementMaker):
def __init__(self, namespaceHTMLElements=True):
initargs = dict(makeelement=html_parser.makeelement)
if namespaceHTMLElements:
initargs.update(namespace=XHTML_NAMESPACE,
nsmap={None: XHTML_NAMESPACE})
ElementMaker.__init__(self, **initargs)
class DummyParser(object):
def __init__(self, doc=None, root=None,
fragments=None, namespaceHTMLElements=True):
self.doc = doc or DummyElementTree(root=root)
self.fragments = fragments
self.tree = DummyTreeBuilder(namespaceHTMLElements)
def parse(self, *args, **kwargs):
self.parse_args = args
self.parse_kwargs = kwargs
return self.doc
def parseFragment(self, *args, **kwargs):
self.parseFragment_args = args
self.parseFragment_kwargs = kwargs
return self.fragments
class DummyTreeBuilder(object):
def __init__(self, namespaceHTMLElements=True):
self.namespaceHTMLElements = namespaceHTMLElements
class DummyElementTree(object):
def __init__(self, root):
self.root = root
def getroot(self):
return self.root
class DummyElement(object):
def __init__(self, tag='tag', tail=None):
self.tag = tag
self.tail = tail
def xhtml_tag(tag):
return '{%s}%s' % (XHTML_NAMESPACE, tag)
XHTML_TEST_DOCUMENT = '''
<!DOCTYPE html>
<html>
<head><title>TITLE</title></head>
<body></body>
</html>
'''
|
from absl import flags
from perfkitbenchmarker import errors
flags.DEFINE_enum('tcmalloc_version', 'off',
['off', 'gperftools', 'experimental'],
'the tcmalloc version to be preloaded')
flags.DEFINE_string(
'tcmalloc_experimental_url', '',
'the GCS URL for downloading the tcmalloc experimental lib')
flags.DEFINE_string(
'tcmalloc_settings',
'',
'tcmalloc settings modifying runtime behavior as environment variables '
'such as "ARG1=foo,ARG2=bar", see more: '
'https://gperftools.github.io/gperftools/tcmalloc.html',
)
FLAGS = flags.FLAGS
TEMP_BASHRC = '/tmp/bash.bashrc'
BASHRC = '/etc/bash.bashrc'
def AptInstall(vm):
"""Installs the tcmalloc shared library on a Debian VM."""
if FLAGS.tcmalloc_version == 'off':
return
# Write tcmalloc settings as environment variables
settings = FLAGS.tcmalloc_settings.split(',')
for setting in settings:
if setting:
vm.RemoteCommand('echo "export {setting}" | sudo tee -a {tmp}'.format(
setting=setting, # e.g. 'TCMALLOC_RELEASE_RATE=0.5'
tmp=TEMP_BASHRC,
))
if FLAGS.tcmalloc_version == 'gperftools':
vm.InstallPackages('libgoogle-perftools-dev')
libtcmalloc_paths = [
'/usr/lib/libtcmalloc.so.4', # before v2.7
'/usr/lib/x86_64-linux-gnu/libtcmalloc.so.4', # since v2.7
]
vm.RemoteCommand(
'test -f {path1} '
'&& echo "export LD_PRELOAD={path1}" | sudo tee -a {tmp} '
'|| echo "export LD_PRELOAD={path2}" | sudo tee -a {tmp} '.format(
path1=libtcmalloc_paths[0],
path2=libtcmalloc_paths[1],
tmp=TEMP_BASHRC,
))
if FLAGS.tcmalloc_version == 'experimental':
vm.Install('google_cloud_sdk')
local_path = '/tmp/libtcmalloc.so'
vm.RemoteCommand(
'gsutil cp {url} {path} '
'&& echo "export LD_PRELOAD={path}" | sudo tee -a {tmp}'.format(
url=FLAGS.tcmalloc_experimental_url,
path=local_path,
tmp=TEMP_BASHRC))
# The environment variables must be exported before a potential termination
# of bashrc when the shell is not interactive
vm.RemoteCommand('sudo cat {tmp} {bashrc} | sudo tee {bashrc}'.format(
tmp=TEMP_BASHRC,
bashrc=BASHRC,
))
# Verify that libtcmalloc is preloaded in new process
stdout, unused_stderr = vm.RemoteCommand('echo $LD_PRELOAD')
if 'libtcmalloc.so' not in stdout:
raise errors.Setup.InvalidSetupError(
'Fail to install tcmalloc. LD_PRELOAD="{}"'.format(stdout))
|
from tabulate import tabulate
from flask_script import Manager
from lemur.reporting.service import fqdns, expiring_certificates
manager = Manager(usage="Reporting related tasks.")
@manager.option(
"-v",
"--validity",
dest="validity",
choices=["all", "expired", "valid"],
default="all",
help="Filter certificates by validity.",
)
@manager.option(
"-d",
"--deployment",
dest="deployment",
choices=["all", "deployed", "ready"],
default="all",
help="Filter by deployment status.",
)
def fqdn(deployment, validity):
"""
Generates a report in order to determine the number of FQDNs covered by Lemur issued certificates.
"""
headers = [
"FQDN",
"Root Domain",
"Issuer",
"Owner",
"Validity End",
"Total Length (days), Time Until Expiration (days)",
]
rows = []
for cert in fqdns(validity=validity, deployment=deployment).all():
for domain in cert.domains:
rows.append(
[
domain.name,
".".join(domain.name.split(".")[1:]),
cert.issuer,
cert.owner,
cert.not_after,
cert.validity_range.days,
cert.validity_remaining.days,
]
)
print(tabulate(rows, headers=headers))
@manager.option("-ttl", "--ttl", dest="ttl", default=30, help="Days til expiration.")
@manager.option(
"-d",
"--deployment",
dest="deployment",
choices=["all", "deployed", "ready"],
default="all",
help="Filter by deployment status.",
)
def expiring(ttl, deployment):
"""
Returns certificates expiring in the next n days.
"""
headers = ["Common Name", "Owner", "Issuer", "Validity End", "Endpoint"]
rows = []
for cert in expiring_certificates(ttl=ttl, deployment=deployment).all():
for endpoint in cert.endpoints:
rows.append(
[cert.cn, cert.owner, cert.issuer, cert.not_after, endpoint.dnsname]
)
print(tabulate(rows, headers=headers))
|
from datetime import timedelta
import logging
from typing import List
from greeclimate.device import (
FanSpeed,
HorizontalSwing,
Mode,
TemperatureUnits,
VerticalSwing,
)
from greeclimate.exceptions import DeviceTimeoutError
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
FAN_AUTO,
FAN_HIGH,
FAN_LOW,
FAN_MEDIUM,
HVAC_MODE_AUTO,
HVAC_MODE_COOL,
HVAC_MODE_DRY,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
PRESET_AWAY,
PRESET_BOOST,
PRESET_ECO,
PRESET_NONE,
PRESET_SLEEP,
SUPPORT_FAN_MODE,
SUPPORT_PRESET_MODE,
SUPPORT_SWING_MODE,
SUPPORT_TARGET_TEMPERATURE,
SWING_BOTH,
SWING_HORIZONTAL,
SWING_OFF,
SWING_VERTICAL,
)
from homeassistant.const import (
ATTR_TEMPERATURE,
PRECISION_WHOLE,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
from .const import (
DOMAIN,
FAN_MEDIUM_HIGH,
FAN_MEDIUM_LOW,
MAX_ERRORS,
MAX_TEMP,
MIN_TEMP,
TARGET_TEMPERATURE_STEP,
)
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(seconds=60)
PARALLEL_UPDATES = 0
HVAC_MODES = {
Mode.Auto: HVAC_MODE_AUTO,
Mode.Cool: HVAC_MODE_COOL,
Mode.Dry: HVAC_MODE_DRY,
Mode.Fan: HVAC_MODE_FAN_ONLY,
Mode.Heat: HVAC_MODE_HEAT,
}
HVAC_MODES_REVERSE = {v: k for k, v in HVAC_MODES.items()}
PRESET_MODES = [
PRESET_ECO, # Power saving mode
PRESET_AWAY, # Steady heat, or 8C mode on gree units
PRESET_BOOST, # Turbo mode
PRESET_NONE, # Default operating mode
PRESET_SLEEP, # Sleep mode
]
FAN_MODES = {
FanSpeed.Auto: FAN_AUTO,
FanSpeed.Low: FAN_LOW,
FanSpeed.MediumLow: FAN_MEDIUM_LOW,
FanSpeed.Medium: FAN_MEDIUM,
FanSpeed.MediumHigh: FAN_MEDIUM_HIGH,
FanSpeed.High: FAN_HIGH,
}
FAN_MODES_REVERSE = {v: k for k, v in FAN_MODES.items()}
SWING_MODES = [SWING_OFF, SWING_VERTICAL, SWING_HORIZONTAL, SWING_BOTH]
SUPPORTED_FEATURES = (
SUPPORT_TARGET_TEMPERATURE
| SUPPORT_FAN_MODE
| SUPPORT_PRESET_MODE
| SUPPORT_SWING_MODE
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Gree HVAC device from a config entry."""
async_add_entities(
GreeClimateEntity(device) for device in hass.data[DOMAIN].pop("pending")
)
class GreeClimateEntity(ClimateEntity):
"""Representation of a Gree HVAC device."""
def __init__(self, device):
"""Initialize the Gree device."""
self._device = device
self._name = device.device_info.name
self._mac = device.device_info.mac
self._available = False
self._error_count = 0
async def async_update(self):
"""Update the state of the device."""
try:
await self._device.update_state()
if not self._available and self._error_count:
_LOGGER.warning(
"Device is available: %s (%s)",
self._name,
str(self._device.device_info),
)
self._available = True
self._error_count = 0
except DeviceTimeoutError:
self._error_count += 1
# Under normal conditions GREE units timeout every once in a while
if self._available and self._error_count >= MAX_ERRORS:
self._available = False
_LOGGER.warning(
"Device is unavailable: %s (%s)",
self._name,
self._device.device_info,
)
except Exception: # pylint: disable=broad-except
# Under normal conditions GREE units timeout every once in a while
if self._available:
self._available = False
_LOGGER.exception(
"Unknown exception caught during update by gree device: %s (%s)",
self._name,
self._device.device_info,
)
async def _push_state_update(self):
"""Send state updates to the physical device."""
try:
return await self._device.push_state_update()
except DeviceTimeoutError:
self._error_count += 1
# Under normal conditions GREE units timeout every once in a while
if self._available and self._error_count >= MAX_ERRORS:
self._available = False
_LOGGER.warning(
"Device timedout while sending state update: %s (%s)",
self._name,
self._device.device_info,
)
except Exception: # pylint: disable=broad-except
# Under normal conditions GREE units timeout every once in a while
if self._available:
self._available = False
_LOGGER.exception(
"Unknown exception caught while sending state update to: %s (%s)",
self._name,
self._device.device_info,
)
@property
def available(self) -> bool:
"""Return if the device is available."""
return self._available
@property
def name(self) -> str:
"""Return the name of the device."""
return self._name
@property
def unique_id(self) -> str:
"""Return a unique id for the device."""
return self._mac
@property
def device_info(self):
"""Return device specific attributes."""
return {
"name": self._name,
"identifiers": {(DOMAIN, self._mac)},
"manufacturer": "Gree",
"connections": {(CONNECTION_NETWORK_MAC, self._mac)},
}
@property
def temperature_unit(self) -> str:
"""Return the temperature units for the device."""
units = self._device.temperature_units
return TEMP_CELSIUS if units == TemperatureUnits.C else TEMP_FAHRENHEIT
@property
def precision(self) -> float:
"""Return the precision of temperature for the device."""
return PRECISION_WHOLE
@property
def current_temperature(self) -> float:
"""Return the target temperature, gree devices don't provide internal temp."""
return self.target_temperature
@property
def target_temperature(self) -> float:
"""Return the target temperature for the device."""
return self._device.target_temperature
async def async_set_temperature(self, **kwargs):
"""Set new target temperature."""
if ATTR_TEMPERATURE not in kwargs:
raise ValueError(f"Missing parameter {ATTR_TEMPERATURE}")
temperature = kwargs[ATTR_TEMPERATURE]
_LOGGER.debug(
"Setting temperature to %d for %s",
temperature,
self._name,
)
self._device.target_temperature = round(temperature)
await self._push_state_update()
@property
def min_temp(self) -> float:
"""Return the minimum temperature supported by the device."""
return MIN_TEMP
@property
def max_temp(self) -> float:
"""Return the maximum temperature supported by the device."""
return MAX_TEMP
@property
def target_temperature_step(self) -> float:
"""Return the target temperature step support by the device."""
return TARGET_TEMPERATURE_STEP
@property
def hvac_mode(self) -> str:
"""Return the current HVAC mode for the device."""
if not self._device.power:
return HVAC_MODE_OFF
return HVAC_MODES.get(self._device.mode)
async def async_set_hvac_mode(self, hvac_mode):
"""Set new target hvac mode."""
if hvac_mode not in self.hvac_modes:
raise ValueError(f"Invalid hvac_mode: {hvac_mode}")
_LOGGER.debug(
"Setting HVAC mode to %s for device %s",
hvac_mode,
self._name,
)
if hvac_mode == HVAC_MODE_OFF:
self._device.power = False
await self._push_state_update()
return
if not self._device.power:
self._device.power = True
self._device.mode = HVAC_MODES_REVERSE.get(hvac_mode)
await self._push_state_update()
@property
def hvac_modes(self) -> List[str]:
"""Return the HVAC modes support by the device."""
modes = [*HVAC_MODES_REVERSE]
modes.append(HVAC_MODE_OFF)
return modes
@property
def preset_mode(self) -> str:
"""Return the current preset mode for the device."""
if self._device.steady_heat:
return PRESET_AWAY
if self._device.power_save:
return PRESET_ECO
if self._device.sleep:
return PRESET_SLEEP
if self._device.turbo:
return PRESET_BOOST
return PRESET_NONE
async def async_set_preset_mode(self, preset_mode):
"""Set new preset mode."""
if preset_mode not in PRESET_MODES:
raise ValueError(f"Invalid preset mode: {preset_mode}")
_LOGGER.debug(
"Setting preset mode to %s for device %s",
preset_mode,
self._name,
)
self._device.steady_heat = False
self._device.power_save = False
self._device.turbo = False
self._device.sleep = False
if preset_mode == PRESET_AWAY:
self._device.steady_heat = True
elif preset_mode == PRESET_ECO:
self._device.power_save = True
elif preset_mode == PRESET_BOOST:
self._device.turbo = True
elif preset_mode == PRESET_SLEEP:
self._device.sleep = True
await self._push_state_update()
@property
def preset_modes(self) -> List[str]:
"""Return the preset modes support by the device."""
return PRESET_MODES
@property
def fan_mode(self) -> str:
"""Return the current fan mode for the device."""
speed = self._device.fan_speed
return FAN_MODES.get(speed)
async def async_set_fan_mode(self, fan_mode):
"""Set new target fan mode."""
if fan_mode not in FAN_MODES_REVERSE:
raise ValueError(f"Invalid fan mode: {fan_mode}")
self._device.fan_speed = FAN_MODES_REVERSE.get(fan_mode)
await self._push_state_update()
@property
def fan_modes(self) -> List[str]:
"""Return the fan modes support by the device."""
return [*FAN_MODES_REVERSE]
@property
def swing_mode(self) -> str:
"""Return the current swing mode for the device."""
h_swing = self._device.horizontal_swing == HorizontalSwing.FullSwing
v_swing = self._device.vertical_swing == VerticalSwing.FullSwing
if h_swing and v_swing:
return SWING_BOTH
if h_swing:
return SWING_HORIZONTAL
if v_swing:
return SWING_VERTICAL
return SWING_OFF
async def async_set_swing_mode(self, swing_mode):
"""Set new target swing operation."""
if swing_mode not in SWING_MODES:
raise ValueError(f"Invalid swing mode: {swing_mode}")
_LOGGER.debug(
"Setting swing mode to %s for device %s",
swing_mode,
self._name,
)
self._device.horizontal_swing = HorizontalSwing.Center
self._device.vertical_swing = VerticalSwing.FixedMiddle
if swing_mode in (SWING_BOTH, SWING_HORIZONTAL):
self._device.horizontal_swing = HorizontalSwing.FullSwing
if swing_mode in (SWING_BOTH, SWING_VERTICAL):
self._device.vertical_swing = VerticalSwing.FullSwing
await self._push_state_update()
@property
def swing_modes(self) -> List[str]:
"""Return the swing modes currently supported for this device."""
return SWING_MODES
@property
def supported_features(self) -> int:
"""Return the supported features for this device integration."""
return SUPPORTED_FEATURES
|
import argparse
import pathlib
import sys
import os
import os.path
import shutil
import venv
import subprocess
from typing import List, Optional, Tuple
sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.pardir))
from scripts import utils, link_pyqt
REPO_ROOT = pathlib.Path(__file__).parent.parent
def parse_args() -> argparse.Namespace:
"""Parse commandline arguments."""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--keep',
action='store_true',
help="Reuse an existing virtualenv.")
parser.add_argument('--venv-dir',
default='.venv',
help="Where to place the virtualenv.")
parser.add_argument('--pyqt-version',
choices=pyqt_versions(),
default='auto',
help="PyQt version to install.")
parser.add_argument('--pyqt-type',
choices=['binary', 'source', 'link', 'wheels', 'skip'],
default='binary',
help="How to install PyQt/Qt.")
parser.add_argument('--pyqt-wheels-dir',
default='wheels',
help="Directory to get PyQt wheels from.")
parser.add_argument('--virtualenv',
action='store_true',
help="Use virtualenv instead of venv.")
parser.add_argument('--asciidoc', help="Full path to python and "
"asciidoc.py. If not given, it's searched in PATH.",
nargs=2, required=False,
metavar=('PYTHON', 'ASCIIDOC'))
parser.add_argument('--dev',
action='store_true',
help="Also install dev/test dependencies.")
parser.add_argument('--skip-docs',
action='store_true',
help="Skip doc generation.")
parser.add_argument('--tox-error',
action='store_true',
help=argparse.SUPPRESS)
return parser.parse_args()
def pyqt_versions() -> List[str]:
"""Get a list of all available PyQt versions.
The list is based on the filenames of misc/requirements/ files.
"""
version_set = set()
requirements_dir = REPO_ROOT / 'misc' / 'requirements'
for req in requirements_dir.glob('requirements-pyqt-*.txt'):
version_set.add(req.stem.split('-')[-1])
versions = sorted(version_set,
key=lambda v: [int(c) for c in v.split('.')])
return versions + ['auto']
def run_venv(venv_dir: pathlib.Path, executable, *args: str) -> None:
"""Run the given command inside the virtualenv."""
subdir = 'Scripts' if os.name == 'nt' else 'bin'
try:
subprocess.run([str(venv_dir / subdir / executable)] +
[str(arg) for arg in args], check=True)
except subprocess.CalledProcessError as e:
utils.print_error("Subprocess failed, exiting")
sys.exit(e.returncode)
def pip_install(venv_dir: pathlib.Path, *args: str) -> None:
"""Run a pip install command inside the virtualenv."""
arg_str = ' '.join(str(arg) for arg in args)
utils.print_col('venv$ pip install {}'.format(arg_str), 'blue')
run_venv(venv_dir, 'python', '-m', 'pip', 'install', *args)
def delete_old_venv(venv_dir: pathlib.Path) -> None:
"""Remove an existing virtualenv directory."""
if not venv_dir.exists():
return
markers = [
venv_dir / '.tox-config1', # tox
venv_dir / 'pyvenv.cfg', # venv
venv_dir / 'Scripts', # Windows
venv_dir / 'bin', # Linux
]
if not any(m.exists() for m in markers):
utils.print_error('{} does not look like a virtualenv, '
'cowardly refusing to remove it.'.format(venv_dir))
sys.exit(1)
utils.print_col('$ rm -r {}'.format(venv_dir), 'blue')
shutil.rmtree(str(venv_dir))
def create_venv(venv_dir: pathlib.Path, use_virtualenv: bool = False) -> None:
"""Create a new virtualenv."""
if use_virtualenv:
utils.print_col('$ python3 -m virtualenv {}'.format(venv_dir), 'blue')
try:
subprocess.run([sys.executable, '-m', 'virtualenv', venv_dir],
check=True)
except subprocess.CalledProcessError as e:
utils.print_error("virtualenv failed, exiting")
sys.exit(e.returncode)
else:
utils.print_col('$ python3 -m venv {}'.format(venv_dir), 'blue')
venv.create(str(venv_dir), with_pip=True)
def upgrade_seed_pkgs(venv_dir: pathlib.Path) -> None:
"""Upgrade initial seed packages inside a virtualenv.
This also makes sure that wheel is installed, which causes pip to use its
wheel cache for rebuilds.
"""
utils.print_title("Upgrading initial packages")
pip_install(venv_dir, '-U', 'pip')
pip_install(venv_dir, '-U', 'setuptools', 'wheel')
def requirements_file(name: str) -> pathlib.Path:
"""Get the filename of a requirements file."""
return (REPO_ROOT / 'misc' / 'requirements' /
'requirements-{}.txt'.format(name))
def pyqt_requirements_file(version: str) -> pathlib.Path:
"""Get the filename of the requirements file for the given PyQt version."""
name = 'pyqt' if version == 'auto' else 'pyqt-{}'.format(version)
return requirements_file(name)
def install_pyqt_binary(venv_dir: pathlib.Path, version: str) -> None:
"""Install PyQt from a binary wheel."""
utils.print_title("Installing PyQt from binary")
utils.print_col("No proprietary codec support will be available in "
"qutebrowser.", 'bold')
pip_install(venv_dir, '-r', pyqt_requirements_file(version),
'--only-binary', 'PyQt5,PyQtWebEngine')
def install_pyqt_source(venv_dir: pathlib.Path, version: str) -> None:
"""Install PyQt from the source tarball."""
utils.print_title("Installing PyQt from sources")
pip_install(venv_dir, '-r', pyqt_requirements_file(version),
'--verbose', '--no-binary', 'PyQt5,PyQtWebEngine')
def install_pyqt_link(venv_dir: pathlib.Path) -> None:
"""Install PyQt by linking a system-wide install."""
utils.print_title("Linking system-wide PyQt")
lib_path = link_pyqt.get_venv_lib_path(str(venv_dir))
link_pyqt.link_pyqt(sys.executable, lib_path)
def install_pyqt_wheels(venv_dir: pathlib.Path,
wheels_dir: pathlib.Path) -> None:
"""Install PyQt from the wheels/ directory."""
utils.print_title("Installing PyQt wheels")
wheels = [str(wheel) for wheel in wheels_dir.glob('*.whl')]
pip_install(venv_dir, *wheels)
def install_requirements(venv_dir: pathlib.Path) -> None:
"""Install qutebrowser's requirement.txt."""
utils.print_title("Installing other qutebrowser dependencies")
requirements = REPO_ROOT / 'requirements.txt'
pip_install(venv_dir, '-r', str(requirements))
def install_dev_requirements(venv_dir: pathlib.Path) -> None:
"""Install development dependencies."""
utils.print_title("Installing dev dependencies")
pip_install(venv_dir,
'-r', str(requirements_file('dev')),
'-r', requirements_file('tests'))
def install_qutebrowser(venv_dir: pathlib.Path) -> None:
"""Install qutebrowser itself as an editable install."""
utils.print_title("Installing qutebrowser")
pip_install(venv_dir, '-e', str(REPO_ROOT))
def regenerate_docs(venv_dir: pathlib.Path,
asciidoc: Optional[Tuple[str, str]]):
"""Regenerate docs using asciidoc."""
utils.print_title("Generating documentation")
if asciidoc is not None:
a2h_args = ['--asciidoc'] + asciidoc
else:
a2h_args = []
script_path = pathlib.Path(__file__).parent / 'asciidoc2html.py'
utils.print_col('venv$ python3 scripts/asciidoc2html.py {}'
.format(' '.join(a2h_args)), 'blue')
run_venv(venv_dir, 'python', str(script_path), *a2h_args)
def main() -> None:
"""Install qutebrowser in a virtualenv.."""
args = parse_args()
venv_dir = pathlib.Path(args.venv_dir)
wheels_dir = pathlib.Path(args.pyqt_wheels_dir)
utils.change_cwd()
if (args.pyqt_version != 'auto' and
args.pyqt_type not in ['binary', 'source']):
utils.print_error('The --pyqt-version option is only available when '
'installing PyQt from binary or source')
sys.exit(1)
elif args.pyqt_wheels_dir != 'wheels' and args.pyqt_type != 'wheels':
utils.print_error('The --pyqt-wheels-dir option is only available '
'when installing PyQt from wheels')
sys.exit(1)
if not args.keep:
utils.print_title("Creating virtual environment")
delete_old_venv(venv_dir)
create_venv(venv_dir, use_virtualenv=args.virtualenv)
upgrade_seed_pkgs(venv_dir)
if args.pyqt_type == 'binary':
install_pyqt_binary(venv_dir, args.pyqt_version)
elif args.pyqt_type == 'source':
install_pyqt_source(venv_dir, args.pyqt_version)
elif args.pyqt_type == 'link':
install_pyqt_link(venv_dir)
elif args.pyqt_type == 'wheels':
install_pyqt_wheels(venv_dir, wheels_dir)
elif args.pyqt_type == 'skip':
pass
else:
raise AssertionError
install_requirements(venv_dir)
install_qutebrowser(venv_dir)
if args.dev:
install_dev_requirements(venv_dir)
if not args.skip_docs:
regenerate_docs(venv_dir, args.asciidoc)
if __name__ == '__main__':
main()
|
import logging
import os
logging.basicConfig()
logger = logging.getLogger("kalliope")
class FileManager:
"""
Class used to manage Files
"""
def __init__(self):
pass
@staticmethod
def create_directory(cache_path):
"""
Create a directory at the provided `cache_path`
:param cache_path: the path of the directory to create
:type cache_path: str
"""
if not os.path.exists(cache_path):
os.makedirs(cache_path)
@staticmethod
def write_in_file(file_path, content):
"""
Write contents into a file
:param file_path: the path of the file to write on
:type file_path: str
:param content: the contents to write in the file
:type content: str or bytes
.. raises:: IOError
"""
try:
with open(file_path, "wb") as file_open:
if type(content) == bytes:
file_open.write(content)
else:
file_open.write(content.encode())
return not FileManager.file_is_empty(file_path)
except IOError as e:
logger.error("I/O error(%s): %s", e.errno, e.strerror)
return False
@staticmethod
def file_is_empty(file_path):
"""
Check if the file is empty
:param file_path: the path of the file
:return: True if the file is empty, False otherwise
"""
return os.path.getsize(file_path) == 0
@staticmethod
def remove_file(file_path):
"""
Remove the file locate at the provided `file_path`
:param file_path:
:return: True if the file has been removed successfully, False otherwise
"""
if os.path.exists(file_path):
return os.remove(file_path)
@staticmethod
def is_path_creatable(pathname):
"""
`True` if the current user has sufficient permissions to create the passed
pathname; `False` otherwise.
"""
dirname = os.path.dirname(pathname) or os.getcwd()
return os.access(dirname, os.W_OK)
@staticmethod
def is_path_exists_or_creatable(pathname):
"""
`True` if the passed pathname is a valid pathname for the current OS _and_
either currently exists or is hypothetically creatable; `False` otherwise.
This function is guaranteed to _never_ raise exceptions.
.. raises:: OSError
"""
try:
return os.path.exists(pathname) or FileManager.is_path_creatable(pathname)
except OSError as e:
logger.error("OSError(%s): %s", e.errno, e.strerror)
return False
|
from datetime import datetime, timedelta
import logging
import time
from pytz import timezone
import requests
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_API_KEY,
CONF_HOST,
CONF_MONITORED_CONDITIONS,
CONF_PORT,
CONF_SSL,
DATA_BYTES,
DATA_EXABYTES,
DATA_GIGABYTES,
DATA_KILOBYTES,
DATA_MEGABYTES,
DATA_PETABYTES,
DATA_TERABYTES,
DATA_YOTTABYTES,
DATA_ZETTABYTES,
HTTP_OK,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
CONF_DAYS = "days"
CONF_INCLUDED = "include_paths"
CONF_UNIT = "unit"
CONF_URLBASE = "urlbase"
DEFAULT_HOST = "localhost"
DEFAULT_PORT = 7878
DEFAULT_URLBASE = ""
DEFAULT_DAYS = "1"
DEFAULT_UNIT = DATA_GIGABYTES
SCAN_INTERVAL = timedelta(minutes=10)
SENSOR_TYPES = {
"diskspace": ["Disk Space", DATA_GIGABYTES, "mdi:harddisk"],
"upcoming": ["Upcoming", "Movies", "mdi:television"],
"wanted": ["Wanted", "Movies", "mdi:television"],
"movies": ["Movies", "Movies", "mdi:television"],
"commands": ["Commands", "Commands", "mdi:code-braces"],
"status": ["Status", "Status", "mdi:information"],
}
ENDPOINTS = {
"diskspace": "{0}://{1}:{2}/{3}api/diskspace",
"upcoming": "{0}://{1}:{2}/{3}api/calendar?start={4}&end={5}",
"movies": "{0}://{1}:{2}/{3}api/movie",
"commands": "{0}://{1}:{2}/{3}api/command",
"status": "{0}://{1}:{2}/{3}api/system/status",
}
# Support to Yottabytes for the future, why not
BYTE_SIZES = [
DATA_BYTES,
DATA_KILOBYTES,
DATA_MEGABYTES,
DATA_GIGABYTES,
DATA_TERABYTES,
DATA_PETABYTES,
DATA_EXABYTES,
DATA_ZETTABYTES,
DATA_YOTTABYTES,
]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Optional(CONF_DAYS, default=DEFAULT_DAYS): cv.string,
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_INCLUDED, default=[]): cv.ensure_list,
vol.Optional(CONF_MONITORED_CONDITIONS, default=["movies"]): vol.All(
cv.ensure_list, [vol.In(list(SENSOR_TYPES))]
),
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_SSL, default=False): cv.boolean,
vol.Optional(CONF_UNIT, default=DEFAULT_UNIT): vol.In(BYTE_SIZES),
vol.Optional(CONF_URLBASE, default=DEFAULT_URLBASE): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Radarr platform."""
conditions = config.get(CONF_MONITORED_CONDITIONS)
add_entities([RadarrSensor(hass, config, sensor) for sensor in conditions], True)
class RadarrSensor(Entity):
"""Implementation of the Radarr sensor."""
def __init__(self, hass, conf, sensor_type):
"""Create Radarr entity."""
self.conf = conf
self.host = conf.get(CONF_HOST)
self.port = conf.get(CONF_PORT)
self.urlbase = conf.get(CONF_URLBASE)
if self.urlbase:
self.urlbase = f"{self.urlbase.strip('/')}/"
self.apikey = conf.get(CONF_API_KEY)
self.included = conf.get(CONF_INCLUDED)
self.days = int(conf.get(CONF_DAYS))
self.ssl = "https" if conf.get(CONF_SSL) else "http"
self._state = None
self.data = []
self._tz = timezone(str(hass.config.time_zone))
self.type = sensor_type
self._name = SENSOR_TYPES[self.type][0]
if self.type == "diskspace":
self._unit = conf.get(CONF_UNIT)
else:
self._unit = SENSOR_TYPES[self.type][1]
self._icon = SENSOR_TYPES[self.type][2]
self._available = False
@property
def name(self):
"""Return the name of the sensor."""
return "{} {}".format("Radarr", self._name)
@property
def state(self):
"""Return sensor state."""
return self._state
@property
def available(self):
"""Return sensor availability."""
return self._available
@property
def unit_of_measurement(self):
"""Return the unit of the sensor."""
return self._unit
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
attributes = {}
if self.type == "upcoming":
for movie in self.data:
attributes[to_key(movie)] = get_release_date(movie)
elif self.type == "commands":
for command in self.data:
attributes[command["name"]] = command["state"]
elif self.type == "diskspace":
for data in self.data:
free_space = to_unit(data["freeSpace"], self._unit)
total_space = to_unit(data["totalSpace"], self._unit)
percentage_used = (
0 if total_space == 0 else free_space / total_space * 100
)
attributes[data["path"]] = "{:.2f}/{:.2f}{} ({:.2f}%)".format(
free_space, total_space, self._unit, percentage_used
)
elif self.type == "movies":
for movie in self.data:
attributes[to_key(movie)] = movie["downloaded"]
elif self.type == "status":
attributes = self.data
return attributes
@property
def icon(self):
"""Return the icon of the sensor."""
return self._icon
def update(self):
"""Update the data for the sensor."""
start = get_date(self._tz)
end = get_date(self._tz, self.days)
try:
res = requests.get(
ENDPOINTS[self.type].format(
self.ssl, self.host, self.port, self.urlbase, start, end
),
headers={"X-Api-Key": self.apikey},
timeout=10,
)
except OSError:
_LOGGER.warning("Host %s is not available", self.host)
self._available = False
self._state = None
return
if res.status_code == HTTP_OK:
if self.type in ["upcoming", "movies", "commands"]:
self.data = res.json()
self._state = len(self.data)
elif self.type == "diskspace":
# If included paths are not provided, use all data
if self.included == []:
self.data = res.json()
else:
# Filter to only show lists that are included
self.data = list(
filter(lambda x: x["path"] in self.included, res.json())
)
self._state = "{:.2f}".format(
to_unit(sum([data["freeSpace"] for data in self.data]), self._unit)
)
elif self.type == "status":
self.data = res.json()
self._state = self.data["version"]
self._available = True
def get_date(zone, offset=0):
"""Get date based on timezone and offset of days."""
day = 60 * 60 * 24
return datetime.date(datetime.fromtimestamp(time.time() + day * offset, tz=zone))
def get_release_date(data):
"""Get release date."""
date = data.get("physicalRelease")
if not date:
date = data.get("inCinemas")
return date
def to_key(data):
"""Get key."""
return "{} ({})".format(data["title"], data["year"])
def to_unit(value, unit):
"""Convert bytes to give unit."""
return value / 1024 ** BYTE_SIZES.index(unit)
|
import os
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
def test_hostname(host):
assert 'instance' == host.check_output('hostname -s')
def test_etc_molecule_directory(host):
f = host.file('/etc/molecule')
assert f.is_directory
assert f.user == 'root'
assert f.group == 'root'
assert f.mode == 0o755
def test_etc_molecule_ansible_hostname_file(host):
f = host.file('/etc/molecule/instance')
assert f.is_file
assert f.user == 'root'
assert f.group == 'root'
assert f.mode == 0o644
|
from scattertext.TermDocMatrix import TermDocMatrix
from scattertext.indexstore import IndexStore, IndexStoreFromList, IndexStoreFromDict
from scipy.sparse import csr_matrix
class DimensionMismatchException(Exception):
pass
class TermDocMatrixFromScikit(object):
'''
A factory class for building a TermDocMatrix from a scikit-learn-processed
dataset.
>>> from scattertext import TermDocMatrixFromScikit
>>> from sklearn.datasets import fetch_20newsgroups
>>> from sklearn.feature_extraction.text import CountVectorizer
>>> newsgroups_train = fetch_20newsgroups(subset='train', remove=('headers', 'footers', 'quotes'))
>>> count_vectorizer = CountVectorizer()
>>> X_counts = count_vectorizer.fit_transform(newsgroups_train.data)
>>> term_doc_mat = TermDocMatrixFromScikit(
... X = X_counts,
... y = newsgroups_train.target,
... feature_vocabulary=count_vectorizer.vocabulary_,
... category_names=newsgroups_train.target_names
... ).build()
>>> term_doc_mat.get_categories()[:2]
['alt.atheism', 'comp.graphics']
>>> term_doc_mat.get_term_freq_df().assign(score=term_doc_mat.get_scaled_f_scores('alt.atheism')).sort_values(by='score', ascending=False).index.tolist()[:5]
['atheism', 'atheists', 'islam', 'atheist', 'matthew']
'''
def __init__(self,
X,
y,
feature_vocabulary,
category_names,
unigram_frequency_path=None):
'''
Parameters
----------
X: sparse matrix integer, giving term-document-matrix counts
y: list, integer categories
feature_vocabulary: dict (feat_name -> idx)
category_names: list of category names (len of y)
unigram_frequency_path: str (see TermDocMatrix)
'''
if X.shape != (len(y), len(feature_vocabulary)):
raise DimensionMismatchException('The shape of X is expected to be ' +
str((len(y), len(feature_vocabulary))) +
'but was actually: ' + str(X.shape))
self.X = X
self.y = y
self.feature_vocabulary = feature_vocabulary
self.category_names = category_names
self.unigram_frequency_path = unigram_frequency_path
def build(self):
'''
Returns
-------
TermDocMatrix
'''
constructor_kwargs = self._get_build_kwargs()
return TermDocMatrix(
**constructor_kwargs
)
def _get_build_kwargs(self):
constructor_kwargs = {'X': self.X,
'mX': csr_matrix((0, 0)),
'y': self.y,
'term_idx_store': IndexStoreFromDict.build(self.feature_vocabulary),
'metadata_idx_store': IndexStore(),
'category_idx_store': IndexStoreFromList.build(self.category_names),
'unigram_frequency_path': self.unigram_frequency_path}
return constructor_kwargs
|
import unittest
import openrazer_daemon.device
DEVICE1_SERIAL = 'XX000000'
DEVICE1_ID = '0000:0000:0000.0000'
DEVICE2_SERIAL = 'XX000001'
DEVICE2_ID = '0000:0000:0000.0001'
class DummyDBusObject(object):
def __init__(self):
self.notify_msg = None
self.parent = None
def notify(self, msg):
self.notify_msg = msg
def register_parent(self, parent):
self.parent = parent
def notify_parent(self, msg):
self.parent.notify_parent(msg)
class DummyParentObject(object):
def __init__(self):
self.notify_msg = None
self.notify_device = None
def notify(self, device_object, msg):
self.notify_device = device_object
self.notify_msg = msg
# TODO move device_object creation to setUp
class DeviceTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_device_properties(self):
dbus_object = DummyDBusObject()
device_object = openrazer_daemon.device.Device(DEVICE1_ID, DEVICE1_SERIAL, dbus_object)
self.assertEqual(device_object.device_id, DEVICE1_ID)
self.assertEqual(device_object.serial, DEVICE1_SERIAL)
self.assertEqual(device_object.dbus, dbus_object)
def test_device_register_parent(self):
dbus_object = DummyDBusObject()
parent_object = DummyParentObject()
device_object = openrazer_daemon.device.Device(DEVICE1_ID, DEVICE1_SERIAL, dbus_object)
device_object.register_parent(parent_object)
self.assertEqual(device_object._parent, parent_object)
def test_device_notify_child(self):
msg = ('test', 1)
dbus_object = DummyDBusObject()
device_object = openrazer_daemon.device.Device(DEVICE1_ID, DEVICE1_SERIAL, dbus_object)
device_object.notify_child(msg)
self.assertEqual(dbus_object.notify_msg, msg)
def test_device_notify_parent(self):
msg = ('test', 1)
dbus_object = DummyDBusObject()
parent_object = DummyParentObject()
device_object = openrazer_daemon.device.Device(DEVICE1_ID, DEVICE1_SERIAL, dbus_object)
device_object.register_parent(parent_object)
device_object.notify_parent(msg)
self.assertEqual(parent_object.notify_msg, msg)
self.assertEqual(parent_object.notify_device, device_object)
class DeviceCollectionTest(unittest.TestCase):
def setUp(self):
self.device_collection = openrazer_daemon.device.DeviceCollection()
def test_add(self):
dbus_object = DummyDBusObject()
self.device_collection.add(DEVICE1_ID, DEVICE1_SERIAL, dbus_object)
self.assertIn(DEVICE1_ID, self.device_collection._id_map)
self.assertIn(DEVICE1_SERIAL, self.device_collection._serial_map)
device_obj_from_id = self.device_collection._id_map[DEVICE1_ID]
device_obj_from_serial = self.device_collection._serial_map[DEVICE1_SERIAL]
self.assertIs(device_obj_from_id, device_obj_from_serial)
def test_get(self):
dbus_object = DummyDBusObject()
self.device_collection.add(DEVICE1_ID, DEVICE1_SERIAL, dbus_object)
device_obj_by_id = self.device_collection[DEVICE1_ID]
device_obj_by_serial = self.device_collection[DEVICE1_SERIAL]
self.assertIs(device_obj_by_id, device_obj_by_serial)
def test_invalid_get(self):
try:
device = self.device_collection.get('INVALID')
except IndexError:
pass
def test_contains(self):
dbus_object = DummyDBusObject()
self.device_collection.add(DEVICE1_ID, DEVICE1_SERIAL, dbus_object)
self.assertIn(DEVICE1_ID, self.device_collection)
self.assertIn(DEVICE1_SERIAL, self.device_collection)
def test_remove(self):
dbus_object = DummyDBusObject()
self.device_collection.add(DEVICE1_ID, DEVICE1_SERIAL, dbus_object)
self.assertIn(DEVICE1_ID, self.device_collection)
self.device_collection.remove(DEVICE1_ID)
self.assertNotIn(DEVICE1_ID, self.device_collection)
self.device_collection.add(DEVICE1_ID, DEVICE1_SERIAL, dbus_object)
self.assertIn(DEVICE1_ID, self.device_collection)
self.device_collection.remove(DEVICE1_SERIAL)
self.assertNotIn(DEVICE1_SERIAL, self.device_collection)
def test_items(self):
dbus_object = DummyDBusObject()
self.device_collection.add(DEVICE1_ID, DEVICE1_SERIAL, dbus_object)
device_id, device_obj1 = list(self.device_collection.id_items())[0]
device_serial, device_obj2 = list(self.device_collection.serial_items())[0]
self.assertEqual(device_id, DEVICE1_ID)
self.assertEqual(device_serial, DEVICE1_SERIAL)
self.assertIs(device_obj1, device_obj2)
def test_iter(self):
dbus_object = DummyDBusObject()
self.device_collection.add(DEVICE1_ID, DEVICE1_SERIAL, dbus_object)
devices = [self.device_collection.get(DEVICE1_ID)]
for device in self.device_collection:
devices.remove(device)
self.assertEqual(len(devices), 0)
def test_serials(self):
dbus_object1 = DummyDBusObject()
dbus_object2 = DummyDBusObject()
self.device_collection.add(DEVICE1_ID, DEVICE1_SERIAL, dbus_object1)
self.device_collection.add(DEVICE2_ID, DEVICE2_SERIAL, dbus_object2)
serials = self.device_collection.serials()
self.assertIn(DEVICE1_SERIAL, serials)
self.assertIn(DEVICE2_SERIAL, serials)
def test_devices(self):
dbus_object1 = DummyDBusObject()
dbus_object2 = DummyDBusObject()
self.device_collection.add(DEVICE1_ID, DEVICE1_SERIAL, dbus_object1)
self.device_collection.add(DEVICE2_ID, DEVICE2_SERIAL, dbus_object2)
device_list = self.device_collection.devices
available_dbus = [dbus_object1, dbus_object2]
for device in device_list:
available_dbus.remove(device.dbus)
# Ensure both dbus objects have been seen
self.assertEqual(len(available_dbus), 0)
def test_cross_device_notify(self):
dbus_object1 = DummyDBusObject()
dbus_object2 = DummyDBusObject()
msg = ('test', 1)
self.device_collection.add(DEVICE1_ID, DEVICE1_SERIAL, dbus_object1)
self.device_collection.add(DEVICE2_ID, DEVICE2_SERIAL, dbus_object2)
self.assertIs(dbus_object1.notify_msg, None)
self.assertIs(dbus_object2.notify_msg, None)
dbus_object1.notify_parent(msg)
# Ensure message gets sent to other devices and not itself
self.assertIs(dbus_object1.notify_msg, None)
self.assertIs(dbus_object2.notify_msg, msg)
|
import pytest
import pyvera as pv
from requests.exceptions import RequestException
from homeassistant.components.vera import (
CONF_CONTROLLER,
CONF_EXCLUDE,
CONF_LIGHTS,
DOMAIN,
)
from homeassistant.config_entries import ENTRY_STATE_NOT_LOADED
from homeassistant.core import HomeAssistant
from .common import ComponentFactory, ConfigSource, new_simple_controller_config
from tests.async_mock import MagicMock
from tests.common import MockConfigEntry, mock_registry
async def test_init(
hass: HomeAssistant, vera_component_factory: ComponentFactory
) -> None:
"""Test function."""
vera_device1 = MagicMock(spec=pv.VeraBinarySensor) # type: pv.VeraBinarySensor
vera_device1.device_id = 1
vera_device1.vera_device_id = vera_device1.device_id
vera_device1.name = "first_dev"
vera_device1.is_tripped = False
entity1_id = "binary_sensor.first_dev_1"
await vera_component_factory.configure_component(
hass=hass,
controller_config=new_simple_controller_config(
config={CONF_CONTROLLER: "http://127.0.0.1:111"},
config_source=ConfigSource.CONFIG_FLOW,
serial_number="first_serial",
devices=(vera_device1,),
),
)
entity_registry = await hass.helpers.entity_registry.async_get_registry()
entry1 = entity_registry.async_get(entity1_id)
assert entry1
assert entry1.unique_id == "vera_first_serial_1"
async def test_init_from_file(
hass: HomeAssistant, vera_component_factory: ComponentFactory
) -> None:
"""Test function."""
vera_device1 = MagicMock(spec=pv.VeraBinarySensor) # type: pv.VeraBinarySensor
vera_device1.device_id = 1
vera_device1.vera_device_id = vera_device1.device_id
vera_device1.name = "first_dev"
vera_device1.is_tripped = False
entity1_id = "binary_sensor.first_dev_1"
await vera_component_factory.configure_component(
hass=hass,
controller_config=new_simple_controller_config(
config={CONF_CONTROLLER: "http://127.0.0.1:111"},
config_source=ConfigSource.FILE,
serial_number="first_serial",
devices=(vera_device1,),
),
)
entity_registry = await hass.helpers.entity_registry.async_get_registry()
entry1 = entity_registry.async_get(entity1_id)
assert entry1
assert entry1.unique_id == "vera_first_serial_1"
async def test_multiple_controllers_with_legacy_one(
hass: HomeAssistant, vera_component_factory: ComponentFactory
) -> None:
"""Test multiple controllers with one legacy controller."""
vera_device1 = MagicMock(spec=pv.VeraBinarySensor) # type: pv.VeraBinarySensor
vera_device1.device_id = 1
vera_device1.vera_device_id = vera_device1.device_id
vera_device1.name = "first_dev"
vera_device1.is_tripped = False
entity1_id = "binary_sensor.first_dev_1"
vera_device2 = MagicMock(spec=pv.VeraBinarySensor) # type: pv.VeraBinarySensor
vera_device2.device_id = 2
vera_device2.vera_device_id = vera_device2.device_id
vera_device2.name = "second_dev"
vera_device2.is_tripped = False
entity2_id = "binary_sensor.second_dev_2"
# Add existing entity registry entry from previous setup.
entity_registry = mock_registry(hass)
entity_registry.async_get_or_create(
domain="switch", platform=DOMAIN, unique_id="12"
)
await vera_component_factory.configure_component(
hass=hass,
controller_config=new_simple_controller_config(
config={CONF_CONTROLLER: "http://127.0.0.1:111"},
config_source=ConfigSource.FILE,
serial_number="first_serial",
devices=(vera_device1,),
),
)
await vera_component_factory.configure_component(
hass=hass,
controller_config=new_simple_controller_config(
config={CONF_CONTROLLER: "http://127.0.0.1:222"},
config_source=ConfigSource.CONFIG_FLOW,
serial_number="second_serial",
devices=(vera_device2,),
),
)
entity_registry = await hass.helpers.entity_registry.async_get_registry()
entry1 = entity_registry.async_get(entity1_id)
assert entry1
assert entry1.unique_id == "1"
entry2 = entity_registry.async_get(entity2_id)
assert entry2
assert entry2.unique_id == "vera_second_serial_2"
async def test_unload(
hass: HomeAssistant, vera_component_factory: ComponentFactory
) -> None:
"""Test function."""
vera_device1 = MagicMock(spec=pv.VeraBinarySensor) # type: pv.VeraBinarySensor
vera_device1.device_id = 1
vera_device1.vera_device_id = vera_device1.device_id
vera_device1.name = "first_dev"
vera_device1.is_tripped = False
await vera_component_factory.configure_component(
hass=hass, controller_config=new_simple_controller_config()
)
entries = hass.config_entries.async_entries(DOMAIN)
assert entries
for config_entry in entries:
assert await hass.config_entries.async_unload(config_entry.entry_id)
assert config_entry.state == ENTRY_STATE_NOT_LOADED
async def test_async_setup_entry_error(
hass: HomeAssistant, vera_component_factory: ComponentFactory
) -> None:
"""Test function."""
def setup_callback(controller: pv.VeraController) -> None:
controller.get_devices.side_effect = RequestException()
controller.get_scenes.side_effect = RequestException()
await vera_component_factory.configure_component(
hass=hass,
controller_config=new_simple_controller_config(setup_callback=setup_callback),
)
entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_CONTROLLER: "http://127.0.0.1"},
options={},
unique_id="12345",
)
entry.add_to_hass(hass)
assert not await hass.config_entries.async_setup(entry.entry_id)
@pytest.mark.parametrize(
["options"],
[
[{CONF_LIGHTS: [4, 10, 12, "AAA"], CONF_EXCLUDE: [1, "BBB"]}],
[{CONF_LIGHTS: ["4", "10", "12", "AAA"], CONF_EXCLUDE: ["1", "BBB"]}],
],
)
async def test_exclude_and_light_ids(
hass: HomeAssistant, vera_component_factory: ComponentFactory, options
) -> None:
"""Test device exclusion, marking switches as lights and fixing the data type."""
vera_device1 = MagicMock(spec=pv.VeraBinarySensor) # type: pv.VeraBinarySensor
vera_device1.device_id = 1
vera_device1.vera_device_id = 1
vera_device1.name = "dev1"
vera_device1.is_tripped = False
entity_id1 = "binary_sensor.dev1_1"
vera_device2 = MagicMock(spec=pv.VeraBinarySensor) # type: pv.VeraBinarySensor
vera_device2.device_id = 2
vera_device2.vera_device_id = 2
vera_device2.name = "dev2"
vera_device2.is_tripped = False
entity_id2 = "binary_sensor.dev2_2"
vera_device3 = MagicMock(spec=pv.VeraSwitch) # type: pv.VeraSwitch
vera_device3.device_id = 3
vera_device3.vera_device_id = 3
vera_device3.name = "dev3"
vera_device3.category = pv.CATEGORY_SWITCH
vera_device3.is_switched_on = MagicMock(return_value=False)
entity_id3 = "switch.dev3_3"
vera_device4 = MagicMock(spec=pv.VeraSwitch) # type: pv.VeraSwitch
vera_device4.device_id = 4
vera_device4.vera_device_id = 4
vera_device4.name = "dev4"
vera_device4.category = pv.CATEGORY_SWITCH
vera_device4.is_switched_on = MagicMock(return_value=False)
entity_id4 = "light.dev4_4"
component_data = await vera_component_factory.configure_component(
hass=hass,
controller_config=new_simple_controller_config(
config_source=ConfigSource.CONFIG_ENTRY,
devices=(vera_device1, vera_device2, vera_device3, vera_device4),
config={**{CONF_CONTROLLER: "http://127.0.0.1:123"}, **options},
),
)
# Assert the entries were setup correctly.
config_entry = next(iter(hass.config_entries.async_entries(DOMAIN)))
assert config_entry.options[CONF_LIGHTS] == [4, 10, 12]
assert config_entry.options[CONF_EXCLUDE] == [1]
update_callback = component_data.controller_data[0].update_callback
update_callback(vera_device1)
update_callback(vera_device2)
update_callback(vera_device3)
update_callback(vera_device4)
await hass.async_block_till_done()
assert hass.states.get(entity_id1) is None
assert hass.states.get(entity_id2) is not None
assert hass.states.get(entity_id3) is not None
assert hass.states.get(entity_id4) is not None
|
import os
import pytest
import numpy as np
from mne.viz.backends.tests._utils import (skips_if_not_mayavi,
skips_if_not_pyvista)
from mne.viz.backends._utils import ALLOWED_QUIVER_MODES
@pytest.fixture
def backend_mocker():
"""Help to test set up 3d backend."""
from mne.viz.backends import renderer
del renderer.MNE_3D_BACKEND
yield
renderer.MNE_3D_BACKEND = None
@pytest.mark.parametrize('backend', [
pytest.param('mayavi', marks=skips_if_not_mayavi),
pytest.param('pyvista', marks=skips_if_not_pyvista),
pytest.param('foo', marks=pytest.mark.xfail(raises=ValueError)),
])
def test_backend_environment_setup(backend, backend_mocker, monkeypatch):
"""Test set up 3d backend based on env."""
monkeypatch.setenv("MNE_3D_BACKEND", backend)
assert os.environ['MNE_3D_BACKEND'] == backend # just double-check
# reload the renderer to check if the 3d backend selection by
# environment variable has been updated correctly
from mne.viz.backends import renderer
renderer.set_3d_backend(backend)
assert renderer.MNE_3D_BACKEND == backend
assert renderer.get_3d_backend() == backend
def test_3d_functions(renderer):
"""Test figure management functions."""
fig = renderer.create_3d_figure((300, 300))
# Mayavi actually needs something in the display to set the title
wrap_renderer = renderer.backend._Renderer(fig=fig)
wrap_renderer.sphere(np.array([0., 0., 0.]), 'w', 1.)
renderer.backend._check_3d_figure(fig)
renderer.backend._set_3d_view(figure=fig, azimuth=None, elevation=None,
focalpoint=(0., 0., 0.), distance=None)
renderer.backend._set_3d_title(figure=fig, title='foo')
renderer.backend._take_3d_screenshot(figure=fig)
renderer.backend._close_all()
def test_3d_backend(renderer):
"""Test default plot."""
# set data
win_size = (600, 600)
win_color = 'black'
tet_size = 1.0
tet_x = np.array([0, tet_size, 0, 0])
tet_y = np.array([0, 0, tet_size, 0])
tet_z = np.array([0, 0, 0, tet_size])
tet_indices = np.array([[0, 1, 2],
[0, 1, 3],
[0, 2, 3],
[1, 2, 3]])
tet_color = 'white'
sph_center = np.column_stack((tet_x, tet_y, tet_z))
sph_color = 'red'
sph_scale = tet_size / 3.0
ct_scalars = np.array([0.0, 0.0, 0.0, 1.0])
ct_levels = [0.2, 0.4, 0.6, 0.8]
ct_surface = {
"rr": sph_center,
"tris": tet_indices
}
qv_color = 'blue'
qv_scale = tet_size / 2.0
qv_center = np.array([np.mean((sph_center[va, :],
sph_center[vb, :],
sph_center[vc, :]), axis=0)
for (va, vb, vc) in tet_indices])
center = np.mean(qv_center, axis=0)
qv_dir = qv_center - center
qv_scale_mode = 'scalar'
qv_scalars = np.linspace(1.0, 2.0, 4)
txt_x = 0.0
txt_y = 0.0
txt_text = "renderer"
txt_size = 14
cam_distance = 5 * tet_size
# init scene
rend = renderer.create_3d_figure(
size=win_size,
bgcolor=win_color,
smooth_shading=True,
scene=False,
)
for interaction in ('terrain', 'trackball'):
rend.set_interaction(interaction)
# use mesh
mesh_data = rend.mesh(
x=tet_x,
y=tet_y,
z=tet_z,
triangles=tet_indices,
color=tet_color,
)
rend.remove_mesh(mesh_data)
# use contour
rend.contour(surface=ct_surface, scalars=ct_scalars,
contours=ct_levels, kind='line')
rend.contour(surface=ct_surface, scalars=ct_scalars,
contours=ct_levels, kind='tube')
# use sphere
rend.sphere(center=sph_center, color=sph_color,
scale=sph_scale, radius=1.0)
# use quiver3d
kwargs = dict(
x=qv_center[:, 0],
y=qv_center[:, 1],
z=qv_center[:, 2],
u=qv_dir[:, 0],
v=qv_dir[:, 1],
w=qv_dir[:, 2],
color=qv_color,
scale=qv_scale,
scale_mode=qv_scale_mode,
scalars=qv_scalars,
)
for mode in ALLOWED_QUIVER_MODES:
rend.quiver3d(mode=mode, **kwargs)
with pytest.raises(ValueError, match='Invalid value'):
rend.quiver3d(mode='foo', **kwargs)
# use tube
rend.tube(origin=np.array([[0, 0, 0]]),
destination=np.array([[0, 1, 0]]))
tube = rend.tube(origin=np.array([[1, 0, 0]]),
destination=np.array([[1, 1, 0]]),
scalars=np.array([[1.0, 1.0]]))
# scalar bar
rend.scalarbar(source=tube, title="Scalar Bar",
bgcolor=[1, 1, 1])
# use text
rend.text2d(x_window=txt_x, y_window=txt_y, text=txt_text,
size=txt_size, justification='right')
rend.text3d(x=0, y=0, z=0, text=txt_text, scale=1.0)
rend.set_camera(azimuth=180.0, elevation=90.0,
distance=cam_distance,
focalpoint=center)
rend.reset_camera()
rend.show()
def test_get_3d_backend(renderer):
"""Test get_3d_backend function call for side-effects."""
# Test twice to ensure the first call had no side-effect
orig_backend = renderer.MNE_3D_BACKEND
assert renderer.get_3d_backend() == orig_backend
assert renderer.get_3d_backend() == orig_backend
|
import diamond.collector
import telnetlib
import time
class MogilefsCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(MogilefsCollector, self).get_default_config_help()
config_help.update({
'path': 'Metric path',
})
return config_help
def get_default_config(self):
config = super(MogilefsCollector, self).get_default_config()
config.update({
'path': 'mogilefs'
})
return config
def collect(self):
tn = telnetlib.Telnet("127.0.0.1", 7001, 3)
time.sleep(1)
tn.write("!stats" + '\r\n')
out = tn.read_until('.', 3)
myvars = {}
for line in out.splitlines()[:-1]:
name, var = line.partition(" ")[::2]
myvars[name.strip()] = long(var)
for key, value in myvars.iteritems():
# Set Metric Name
metric_name = key
# Set Metric Value
metric_value = value
# Publish Metric
self.publish(metric_name, metric_value)
|
from typing import Optional
from homeassistant.components.device_tracker import SOURCE_TYPE_GPS
from homeassistant.components.device_tracker.config_entry import TrackerEntity
from . import DOMAIN as TESLA_DOMAIN, TeslaDevice
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Tesla binary_sensors by config_entry."""
entities = [
TeslaDeviceEntity(
device,
hass.data[TESLA_DOMAIN][config_entry.entry_id]["coordinator"],
)
for device in hass.data[TESLA_DOMAIN][config_entry.entry_id]["devices"][
"devices_tracker"
]
]
async_add_entities(entities, True)
class TeslaDeviceEntity(TeslaDevice, TrackerEntity):
"""A class representing a Tesla device."""
@property
def latitude(self) -> Optional[float]:
"""Return latitude value of the device."""
location = self.tesla_device.get_location()
return self.tesla_device.get_location().get("latitude") if location else None
@property
def longitude(self) -> Optional[float]:
"""Return longitude value of the device."""
location = self.tesla_device.get_location()
return self.tesla_device.get_location().get("longitude") if location else None
@property
def source_type(self):
"""Return the source type, eg gps or router, of the device."""
return SOURCE_TYPE_GPS
@property
def device_state_attributes(self):
"""Return the state attributes of the device."""
attr = super().device_state_attributes.copy()
location = self.tesla_device.get_location()
if location:
attr.update(
{
"trackr_id": self.unique_id,
"heading": location["heading"],
"speed": location["speed"],
}
)
return attr
|
from __future__ import unicode_literals
import os
from lib.fun.fun import cool
from core.CONF import build_conf_dic
from core.PATTERN import build_pattern_dic
from lib.data.data import paths, pyoptions
def plug_parser():
if pyoptions.args_plug[0] not in pyoptions.plug_range:
exit("[!] Choose plugin from ({0})".format(cool.fuchsia(",".join(pyoptions.plug_range))))
else:
pyoptions.plugins_operator.get(pyoptions.args_plug[0])(pyoptions.args_plug)
def conf_parser():
if pyoptions.args_conf == 'const':
if os.path.isfile(paths.buildconf_path):
build_conf_dic(source=paths.buildconf_path)
else:
paths.buildconf_path = pyoptions.args_conf
build_conf_dic(source=paths.buildconf_path)
def pattern_parser():
build_pattern_dic(source=pyoptions.args_pattern)
def tool_parser():
if len(pyoptions.args_tool) >= 1:
if pyoptions.args_tool[0] in pyoptions.tool_range:
pyoptions.tools_operator.get(pyoptions.args_tool[0])(pyoptions.args_tool)
else:
exit(pyoptions.CRLF + "[!] Choose tool from ({})".format(cool.fuchsia(" ".join(pyoptions.tool_range))))
else:
exit(pyoptions.CRLF + cool.red("[-] Please specified tool name"))
|
from datetime import datetime as dt
import pytz
from mock import patch
from pandas.util.testing import assert_frame_equal
from arctic.date._mktz import mktz
from tests.util import read_str_as_pandas, multi_index_df_from_arrs
pytest_plugins = ['arctic.fixtures.arctic']
ts1 = read_str_as_pandas(""" sample_dt | near
2012-09-08 17:06:11.040 | 1.0
2012-10-08 17:06:11.040 | 2.0
2012-10-09 17:06:11.040 | 2.5
2012-11-08 17:06:11.040 | 3.0""")
ts1_update = read_str_as_pandas(""" sample_dt | near
2012-09-08 17:06:11.040 | 1.0
2012-10-08 17:06:11.040 | 2.0
2012-10-09 17:06:11.040 | 2.5
2012-11-08 17:06:11.040 | 3.0
2012-11-09 17:06:11.040 | 3.5""")
LOCAL_TZ = mktz()
def test_new_ts_read_write(bitemporal_library):
bitemporal_library.update('spam', ts1)
assert_frame_equal(ts1, bitemporal_library.read('spam').data)
def test_read_ts_raw(bitemporal_library):
bitemporal_library.update('spam', ts1, as_of=dt(2015, 5, 1, tzinfo=mktz('UTC')))
assert_frame_equal(bitemporal_library.read('spam', raw=True).data.tz_convert(tz=mktz('UTC'), level=1), read_str_as_pandas(
""" sample_dt | observed_dt | near
2012-09-08 17:06:11.040 | 2015-05-01 | 1.0
2012-10-08 17:06:11.040 | 2015-05-01 | 2.0
2012-10-09 17:06:11.040 | 2015-05-01 | 2.5
2012-11-08 17:06:11.040 | 2015-05-01 | 3.0""", num_index=2).tz_localize(tz=mktz('UTC'), level=1))
def test_write_ts_with_column_name_same_as_observed_dt_ok(bitemporal_library):
ts1 = read_str_as_pandas(""" sample_dt | observed_dt | near
2012-09-08 17:06:11.040 | 2015-1-1 | 1.0
2012-10-08 17:06:11.040 | 2015-1-1 | 2.0
2012-10-09 17:06:11.040 | 2015-1-1 | 2.5
2012-11-08 17:06:11.040 | 2015-1-1 | 3.0""")
bitemporal_library.update('spam', ts1)
assert_frame_equal(ts1, bitemporal_library.read('spam').data)
def test_last_update(bitemporal_library):
bitemporal_library.update('spam', ts1, as_of=dt(2015, 1, 1))
bitemporal_library.update('spam', ts1, as_of=dt(2015, 1, 2))
assert bitemporal_library.read('spam').last_updated == dt(2015, 1, 2, tzinfo=LOCAL_TZ)
def test_existing_ts_update_and_read(bitemporal_library):
bitemporal_library.update('spam', ts1)
bitemporal_library.update('spam', ts1_update[-1:])
assert_frame_equal(ts1_update, bitemporal_library.read('spam').data)
def test_existing_ts_update_existing_data_and_read(bitemporal_library):
bitemporal_library.update('spam', ts1)
bitemporal_library.update('spam', read_str_as_pandas(""" sample_dt | near
2012-10-09 17:06:11.040 | 4.2"""))
expected_ts = read_str_as_pandas(""" sample_dt | near
2012-09-08 17:06:11.040 | 1.0
2012-10-08 17:06:11.040 | 2.0
2012-10-09 17:06:11.040 | 4.2
2012-11-08 17:06:11.040 | 3.0""")
assert_frame_equal(expected_ts, bitemporal_library.read('spam').data)
def test_read_ts_with_historical_update(bitemporal_library):
with patch('arctic.store.bitemporal_store.dt') as mock_dt:
mock_dt.now.return_value = dt(2015, 5, 1)
mock_dt.side_effect = lambda *args, **kwargs: dt(*args, **kwargs)
bitemporal_library.update('spam', ts1)
bitemporal_library.update('spam', read_str_as_pandas(""" sample_dt | near
2012-10-09 17:06:11.040 | 4.2"""),
as_of=dt(2015, 5, 2))
bitemporal_library.update('spam', read_str_as_pandas(""" sample_dt | near
2012-10-09 17:06:11.040 | 6.6"""),
as_of=dt(2015, 5, 3))
assert_frame_equal(bitemporal_library.read('spam', as_of=dt(2015, 5, 2, 10, tzinfo=pytz.timezone("Europe/London"))).data, read_str_as_pandas(
"""sample_dt | near
2012-09-08 17:06:11.040 | 1.0
2012-10-08 17:06:11.040 | 2.0
2012-10-09 17:06:11.040 | 4.2
2012-11-08 17:06:11.040 | 3.0"""))
assert_frame_equal(bitemporal_library.read('spam').data, read_str_as_pandas(""" sample_dt | near
2012-09-08 17:06:11.040 | 1.0
2012-10-08 17:06:11.040 | 2.0
2012-10-09 17:06:11.040 | 6.6
2012-11-08 17:06:11.040 | 3.0"""))
assert_frame_equal(bitemporal_library.read('spam', as_of=dt(2015, 5, 1, 10, tzinfo=pytz.timezone("Europe/London"))).data, ts1)
def test_read_ts_with_historical_update_and_new_row(bitemporal_library):
with patch('arctic.store.bitemporal_store.dt') as mock_dt:
mock_dt.now.return_value = dt(2015, 5, 1)
mock_dt.side_effect = lambda *args, **kwargs: dt(*args, **kwargs)
bitemporal_library.update('spam', ts1)
bitemporal_library.update('spam', read_str_as_pandas(""" sample_dt | near
2012-10-09 17:06:11.040 | 4.2
2012-12-01 17:06:11.040 | 100"""),
as_of=dt(2015, 5, 2))
assert_frame_equal(bitemporal_library.read('spam').data, read_str_as_pandas(""" sample_dt | near
2012-09-08 17:06:11.040 | 1.0
2012-10-08 17:06:11.040 | 2.0
2012-10-09 17:06:11.040 | 4.2
2012-11-08 17:06:11.040 | 3.0
2012-12-01 17:06:11.040 | 100"""))
assert_frame_equal(bitemporal_library.read('spam', as_of=dt(2015, 5, 1, 10)).data, ts1)
def test_insert_new_rows_in_middle_remains_sorted(bitemporal_library):
bitemporal_library.update('spam', ts1)
bitemporal_library.update('spam', read_str_as_pandas(""" sample_dt | near
2012-10-09 12:00:00.000 | 30.0
2012-12-01 17:06:11.040 | 100"""))
assert_frame_equal(bitemporal_library.read('spam').data, read_str_as_pandas(""" sample_dt | near
2012-09-08 17:06:11.040 | 1.0
2012-10-08 17:06:11.040 | 2.0
2012-10-09 12:00:00.000 | 30.0
2012-10-09 17:06:11.040 | 2.5
2012-11-08 17:06:11.040 | 3.0
2012-12-01 17:06:11.040 | 100"""))
def test_insert_versions_inbetween_works_ok(bitemporal_library):
bitemporal_library.update('spam', ts1, as_of=dt(2015, 5, 1))
bitemporal_library.update('spam', read_str_as_pandas(""" sample_dt | near
2012-12-01 17:06:11.040 | 100"""),
as_of=dt(2015, 5, 10))
bitemporal_library.update('spam', read_str_as_pandas(""" sample_dt | near
2012-12-01 17:06:11.040 | 25"""),
as_of=dt(2015, 5, 8))
assert_frame_equal(bitemporal_library.read('spam').data, read_str_as_pandas(""" sample_dt | near
2012-09-08 17:06:11.040 | 1.0
2012-10-08 17:06:11.040 | 2.0
2012-10-09 17:06:11.040 | 2.5
2012-11-08 17:06:11.040 | 3.0
2012-12-01 17:06:11.040 | 100"""))
assert_frame_equal(bitemporal_library.read('spam', as_of=dt(2015, 5, 9)).data, read_str_as_pandas(
""" sample_dt | near
2012-09-08 17:06:11.040 | 1.0
2012-10-08 17:06:11.040 | 2.0
2012-10-09 17:06:11.040 | 2.5
2012-11-08 17:06:11.040 | 3.0
2012-12-01 17:06:11.040 | 25"""))
def test_read_ts_raw_all_version_ok(bitemporal_library):
bitemporal_library.update('spam', ts1, as_of=dt(2015, 5, 1, tzinfo=mktz('UTC')))
bitemporal_library.update('spam', read_str_as_pandas(""" sample_dt | near
2012-12-01 17:06:11.040 | 25"""),
as_of=dt(2015, 5, 5, tzinfo=mktz('UTC')))
bitemporal_library.update('spam', read_str_as_pandas(""" sample_dt | near
2012-11-08 17:06:11.040 | 42"""),
as_of=dt(2015, 5, 3, tzinfo=mktz('UTC')))
bitemporal_library.update('spam', read_str_as_pandas(""" sample_dt | near
2012-10-08 17:06:11.040 | 42
2013-01-01 17:06:11.040 | 100"""),
as_of=dt(2015, 5, 10, tzinfo=mktz('UTC')))
assert_frame_equal(bitemporal_library.read('spam', raw=True).data.tz_localize(tz=None, level=1), read_str_as_pandas(
""" sample_dt | observed_dt | near
2012-09-08 17:06:11.040 | 2015-05-01 | 1.0
2012-10-08 17:06:11.040 | 2015-05-01 | 2.0
2012-10-08 17:06:11.040 | 2015-05-10 | 42
2012-10-09 17:06:11.040 | 2015-05-01 | 2.5
2012-11-08 17:06:11.040 | 2015-05-01 | 3.0
2012-11-08 17:06:11.040 | 2015-05-03 | 42
2012-12-01 17:06:11.040 | 2015-05-05 | 25
2013-01-01 17:06:11.040 | 2015-05-10 | 100""", num_index=2))
def test_bitemporal_store_saves_as_of_with_timezone(bitemporal_library):
bitemporal_library.update('spam', ts1, as_of=dt(2015, 5, 1))
df = bitemporal_library.read('spam', raw=True).data
assert all([x[1] == dt(2015, 5, 1, tzinfo=LOCAL_TZ) for x in df.index])
def test_bitemporal_store_read_as_of_timezone(bitemporal_library):
bitemporal_library.update('spam', ts1, as_of=dt(2015, 5, 1, tzinfo=mktz('Europe/London')))
bitemporal_library.update('spam', read_str_as_pandas(""" sample_dt | near
2012-12-01 17:06:11.040 | 25"""),
as_of=dt(2015, 5, 2, tzinfo=mktz('Europe/London')))
df = bitemporal_library.read('spam', as_of=dt(2015, 5, 2, tzinfo=mktz('Asia/Hong_Kong'))).data
assert_frame_equal(df, ts1)
def test_multi_index_ts_read_write(bitemporal_library):
ts = multi_index_df_from_arrs(
index_headers=('index 1', 'index 2'),
index_arrs=[
['2012-09-08 17:06:11.040', '2012-10-08 17:06:11.040', '2012-10-09 17:06:11.040', '2012-11-08 17:06:11.040'],
['SPAM Index'] * 4
],
data_dict={'near': [1.0, 2.0, 2.5, 3.0]}
)
bitemporal_library.update('spam', ts)
assert_frame_equal(ts, bitemporal_library.read('spam').data)
def test_multi_index_ts_read_raw(bitemporal_library):
ts = multi_index_df_from_arrs(
index_headers=('index 1', 'index 2'),
index_arrs=[
['2012-09-08 17:06:11.040', '2012-10-08 17:06:11.040', '2012-10-09 17:06:11.040', '2012-11-08 17:06:11.040'],
['SPAM Index'] * 4
],
data_dict={'near': [1.0, 2.0, 2.5, 3.0]}
)
expected_ts = multi_index_df_from_arrs(
index_headers=('index 1', 'index 2', 'observed_dt'),
index_arrs=[
['2012-09-08 17:06:11.040', '2012-10-08 17:06:11.040', '2012-10-09 17:06:11.040', '2012-11-08 17:06:11.040'],
['SPAM Index'] * 4,
['2015-01-01'] * 4,
],
data_dict={'near': [1.0, 2.0, 2.5, 3.0]}
)
bitemporal_library.update('spam', ts, as_of=dt(2015, 1, 1))
assert_frame_equal(expected_ts.tz_localize(tz=LOCAL_TZ, level=2), bitemporal_library.read('spam', raw=True).data)
def test_multi_index_update(bitemporal_library):
sample_timerange = list(sorted(['2012-09-08 17:06:11.040', '2012-10-08 17:06:11.040', '2012-10-09 17:06:11.040', '2012-11-08 17:06:11.040'] * 2))
ts = multi_index_df_from_arrs(
index_headers=('index 1', 'index 2'),
index_arrs=[
sample_timerange,
['SPAM Index', 'EGG Index'] * 4
],
data_dict={'near': [1.0, 1.1, 2.0, 2.1, 2.5, 2.6, 3.0, 3.1]}
)
ts2 = multi_index_df_from_arrs(
index_headers=('index 1', 'index 2'),
index_arrs=[
['2012-09-08 17:06:11.040', '2012-09-08 17:06:11.040', '2012-12-08 17:06:11.040'],
['SPAM Index', 'EGG Index', 'SPAM Index'],
],
data_dict={'near': [1.2, 1.6, 4.0]}
)
expected_ts = multi_index_df_from_arrs(
index_headers=('index 1', 'index 2'),
index_arrs=[
sample_timerange + ['2012-12-08 17:06:11.040'],
['EGG Index', 'SPAM Index'] * 4 + ['SPAM Index']
],
data_dict={'near': [1.6, 1.2, 2.1, 2.0, 2.6, 2.5, 3.1, 3.0, 4.0]}
)
bitemporal_library.update('spam', ts, as_of=dt(2015, 1, 1))
bitemporal_library.update('spam', ts2, as_of=dt(2015, 1, 2))
assert_frame_equal(expected_ts, bitemporal_library.read('spam').data)
assert bitemporal_library.read('spam').last_updated == dt(2015, 1, 2, tzinfo=LOCAL_TZ)
|
from homeassistant.components.abode import ATTR_DEVICE_ID
from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_FRIENDLY_NAME,
SERVICE_LOCK,
SERVICE_UNLOCK,
STATE_LOCKED,
)
from .common import setup_platform
from tests.async_mock import patch
DEVICE_ID = "lock.test_lock"
async def test_entity_registry(hass):
"""Tests that the devices are registered in the entity registry."""
await setup_platform(hass, LOCK_DOMAIN)
entity_registry = await hass.helpers.entity_registry.async_get_registry()
entry = entity_registry.async_get(DEVICE_ID)
assert entry.unique_id == "51cab3b545d2o34ed7fz02731bda5324"
async def test_attributes(hass):
"""Test the lock attributes are correct."""
await setup_platform(hass, LOCK_DOMAIN)
state = hass.states.get(DEVICE_ID)
assert state.state == STATE_LOCKED
assert state.attributes.get(ATTR_DEVICE_ID) == "ZW:00000004"
assert not state.attributes.get("battery_low")
assert not state.attributes.get("no_response")
assert state.attributes.get("device_type") == "Door Lock"
assert state.attributes.get(ATTR_FRIENDLY_NAME) == "Test Lock"
async def test_lock(hass):
"""Test the lock can be locked."""
await setup_platform(hass, LOCK_DOMAIN)
with patch("abodepy.AbodeLock.lock") as mock_lock:
await hass.services.async_call(
LOCK_DOMAIN, SERVICE_LOCK, {ATTR_ENTITY_ID: DEVICE_ID}, blocking=True
)
await hass.async_block_till_done()
mock_lock.assert_called_once()
async def test_unlock(hass):
"""Test the lock can be unlocked."""
await setup_platform(hass, LOCK_DOMAIN)
with patch("abodepy.AbodeLock.unlock") as mock_unlock:
await hass.services.async_call(
LOCK_DOMAIN, SERVICE_UNLOCK, {ATTR_ENTITY_ID: DEVICE_ID}, blocking=True
)
await hass.async_block_till_done()
mock_unlock.assert_called_once()
|
import voluptuous as vol
from homeassistant.components.cover import (
ATTR_POSITION,
DEVICE_CLASSES_SCHEMA,
PLATFORM_SCHEMA,
SUPPORT_CLOSE,
SUPPORT_OPEN,
SUPPORT_SET_POSITION,
SUPPORT_STOP,
CoverEntity,
)
from homeassistant.const import CONF_DEVICE_CLASS, CONF_NAME
import homeassistant.helpers.config_validation as cv
from . import (
CONF_ADS_VAR,
CONF_ADS_VAR_POSITION,
DATA_ADS,
STATE_KEY_POSITION,
STATE_KEY_STATE,
AdsEntity,
)
DEFAULT_NAME = "ADS Cover"
CONF_ADS_VAR_SET_POS = "adsvar_set_position"
CONF_ADS_VAR_OPEN = "adsvar_open"
CONF_ADS_VAR_CLOSE = "adsvar_close"
CONF_ADS_VAR_STOP = "adsvar_stop"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_ADS_VAR): cv.string,
vol.Optional(CONF_ADS_VAR_POSITION): cv.string,
vol.Optional(CONF_ADS_VAR_SET_POS): cv.string,
vol.Optional(CONF_ADS_VAR_CLOSE): cv.string,
vol.Optional(CONF_ADS_VAR_OPEN): cv.string,
vol.Optional(CONF_ADS_VAR_STOP): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the cover platform for ADS."""
ads_hub = hass.data[DATA_ADS]
ads_var_is_closed = config.get(CONF_ADS_VAR)
ads_var_position = config.get(CONF_ADS_VAR_POSITION)
ads_var_pos_set = config.get(CONF_ADS_VAR_SET_POS)
ads_var_open = config.get(CONF_ADS_VAR_OPEN)
ads_var_close = config.get(CONF_ADS_VAR_CLOSE)
ads_var_stop = config.get(CONF_ADS_VAR_STOP)
name = config[CONF_NAME]
device_class = config.get(CONF_DEVICE_CLASS)
add_entities(
[
AdsCover(
ads_hub,
ads_var_is_closed,
ads_var_position,
ads_var_pos_set,
ads_var_open,
ads_var_close,
ads_var_stop,
name,
device_class,
)
]
)
class AdsCover(AdsEntity, CoverEntity):
"""Representation of ADS cover."""
def __init__(
self,
ads_hub,
ads_var_is_closed,
ads_var_position,
ads_var_pos_set,
ads_var_open,
ads_var_close,
ads_var_stop,
name,
device_class,
):
"""Initialize AdsCover entity."""
super().__init__(ads_hub, name, ads_var_is_closed)
if self._ads_var is None:
if ads_var_position is not None:
self._unique_id = ads_var_position
elif ads_var_pos_set is not None:
self._unique_id = ads_var_pos_set
elif ads_var_open is not None:
self._unique_id = ads_var_open
self._state_dict[STATE_KEY_POSITION] = None
self._ads_var_position = ads_var_position
self._ads_var_pos_set = ads_var_pos_set
self._ads_var_open = ads_var_open
self._ads_var_close = ads_var_close
self._ads_var_stop = ads_var_stop
self._device_class = device_class
async def async_added_to_hass(self):
"""Register device notification."""
if self._ads_var is not None:
await self.async_initialize_device(
self._ads_var, self._ads_hub.PLCTYPE_BOOL
)
if self._ads_var_position is not None:
await self.async_initialize_device(
self._ads_var_position, self._ads_hub.PLCTYPE_BYTE, STATE_KEY_POSITION
)
@property
def device_class(self):
"""Return the class of this cover."""
return self._device_class
@property
def is_closed(self):
"""Return if the cover is closed."""
if self._ads_var is not None:
return self._state_dict[STATE_KEY_STATE]
if self._ads_var_position is not None:
return self._state_dict[STATE_KEY_POSITION] == 0
return None
@property
def current_cover_position(self):
"""Return current position of cover."""
return self._state_dict[STATE_KEY_POSITION]
@property
def supported_features(self):
"""Flag supported features."""
supported_features = SUPPORT_OPEN | SUPPORT_CLOSE
if self._ads_var_stop is not None:
supported_features |= SUPPORT_STOP
if self._ads_var_pos_set is not None:
supported_features |= SUPPORT_SET_POSITION
return supported_features
def stop_cover(self, **kwargs):
"""Fire the stop action."""
if self._ads_var_stop:
self._ads_hub.write_by_name(
self._ads_var_stop, True, self._ads_hub.PLCTYPE_BOOL
)
def set_cover_position(self, **kwargs):
"""Set cover position."""
position = kwargs[ATTR_POSITION]
if self._ads_var_pos_set is not None:
self._ads_hub.write_by_name(
self._ads_var_pos_set, position, self._ads_hub.PLCTYPE_BYTE
)
def open_cover(self, **kwargs):
"""Move the cover up."""
if self._ads_var_open is not None:
self._ads_hub.write_by_name(
self._ads_var_open, True, self._ads_hub.PLCTYPE_BOOL
)
elif self._ads_var_pos_set is not None:
self.set_cover_position(position=100)
def close_cover(self, **kwargs):
"""Move the cover down."""
if self._ads_var_close is not None:
self._ads_hub.write_by_name(
self._ads_var_close, True, self._ads_hub.PLCTYPE_BOOL
)
elif self._ads_var_pos_set is not None:
self.set_cover_position(position=0)
@property
def available(self):
"""Return False if state has not been updated yet."""
if self._ads_var is not None or self._ads_var_position is not None:
return (
self._state_dict[STATE_KEY_STATE] is not None
or self._state_dict[STATE_KEY_POSITION] is not None
)
return True
|
import unittest
import mock
from docker_registry.lib.index import db
class TestVersion(unittest.TestCase):
def setUp(self):
self.version = db.Version()
def test_repr(self):
self.assertEqual(type(repr(self.version)), str)
class TestRepository(unittest.TestCase):
def setUp(self):
self.repository = db.Repository()
def test_repr(self):
self.assertEqual(type(repr(self.repository)), str)
class TestSQLAlchemyIndex(unittest.TestCase):
def setUp(self):
self.index = db.SQLAlchemyIndex(database="sqlite://")
@mock.patch('sqlalchemy.engine.Engine.has_table', return_value=True)
@mock.patch('sqlalchemy.orm.query.Query.first')
def test_setup_database(self, first, has_table):
first = mock.Mock( # noqa
side_effect=db.sqlalchemy.exc.OperationalError)
self.assertRaises(
NotImplementedError, db.SQLAlchemyIndex, database="sqlite://")
|
revision = "1db4f82bc780"
down_revision = "3adfdd6598df"
from alembic import op
from flask import current_app
from logging import Formatter, FileHandler, getLogger
log = getLogger(__name__)
handler = FileHandler(current_app.config.get("LOG_UPGRADE_FILE", "db_upgrade.log"))
handler.setFormatter(
Formatter(
"%(asctime)s %(levelname)s: %(message)s " "[in %(pathname)s:%(lineno)d]"
)
)
handler.setLevel(current_app.config.get("LOG_LEVEL", "DEBUG"))
log.setLevel(current_app.config.get("LOG_LEVEL", "DEBUG"))
log.addHandler(handler)
def upgrade():
connection = op.get_bind()
result = connection.execute(
"""\
UPDATE certificates
SET rotation_policy_id=(SELECT id FROM rotation_policies WHERE name='default')
WHERE rotation_policy_id IS NULL
RETURNING id
"""
)
log.info("Filled rotation_policy for %d certificates" % result.rowcount)
def downgrade():
pass
|
import pytest
from homeassistant.components.homekit.const import ATTR_VALUE
from homeassistant.components.homekit.type_locks import Lock
from homeassistant.components.lock import DOMAIN
from homeassistant.const import (
ATTR_CODE,
ATTR_ENTITY_ID,
STATE_LOCKED,
STATE_UNKNOWN,
STATE_UNLOCKED,
)
from tests.common import async_mock_service
async def test_lock_unlock(hass, hk_driver, events):
"""Test if accessory and HA are updated accordingly."""
code = "1234"
config = {ATTR_CODE: code}
entity_id = "lock.kitchen_door"
hass.states.async_set(entity_id, None)
await hass.async_block_till_done()
acc = Lock(hass, hk_driver, "Lock", entity_id, 2, config)
await acc.run_handler()
assert acc.aid == 2
assert acc.category == 6 # DoorLock
assert acc.char_current_state.value == 3
assert acc.char_target_state.value == 1
hass.states.async_set(entity_id, STATE_LOCKED)
await hass.async_block_till_done()
assert acc.char_current_state.value == 1
assert acc.char_target_state.value == 1
hass.states.async_set(entity_id, STATE_UNLOCKED)
await hass.async_block_till_done()
assert acc.char_current_state.value == 0
assert acc.char_target_state.value == 0
hass.states.async_set(entity_id, STATE_UNKNOWN)
await hass.async_block_till_done()
assert acc.char_current_state.value == 3
assert acc.char_target_state.value == 0
hass.states.async_remove(entity_id)
await hass.async_block_till_done()
assert acc.char_current_state.value == 3
assert acc.char_target_state.value == 0
# Set from HomeKit
call_lock = async_mock_service(hass, DOMAIN, "lock")
call_unlock = async_mock_service(hass, DOMAIN, "unlock")
await hass.async_add_executor_job(acc.char_target_state.client_update_value, 1)
await hass.async_block_till_done()
assert call_lock
assert call_lock[0].data[ATTR_ENTITY_ID] == entity_id
assert call_lock[0].data[ATTR_CODE] == code
assert acc.char_target_state.value == 1
assert len(events) == 1
assert events[-1].data[ATTR_VALUE] is None
await hass.async_add_executor_job(acc.char_target_state.client_update_value, 0)
await hass.async_block_till_done()
assert call_unlock
assert call_unlock[0].data[ATTR_ENTITY_ID] == entity_id
assert call_unlock[0].data[ATTR_CODE] == code
assert acc.char_target_state.value == 0
assert len(events) == 2
assert events[-1].data[ATTR_VALUE] is None
@pytest.mark.parametrize("config", [{}, {ATTR_CODE: None}])
async def test_no_code(hass, hk_driver, config, events):
"""Test accessory if lock doesn't require a code."""
entity_id = "lock.kitchen_door"
hass.states.async_set(entity_id, None)
await hass.async_block_till_done()
acc = Lock(hass, hk_driver, "Lock", entity_id, 2, config)
# Set from HomeKit
call_lock = async_mock_service(hass, DOMAIN, "lock")
await hass.async_add_executor_job(acc.char_target_state.client_update_value, 1)
await hass.async_block_till_done()
assert call_lock
assert call_lock[0].data[ATTR_ENTITY_ID] == entity_id
assert ATTR_CODE not in call_lock[0].data
assert acc.char_target_state.value == 1
assert len(events) == 1
assert events[-1].data[ATTR_VALUE] is None
|
from homeassistant.components.media_player import DOMAIN as MP_DOMAIN
from .const import DOMAIN, HASS_DATA_REMOVE_LISTENERS_KEY, HASS_DATA_UPDATER_KEY
async def async_setup(hass, config):
"""Set up the forked-daapd component."""
return True
async def async_setup_entry(hass, entry):
"""Set up forked-daapd from a config entry by forwarding to platform."""
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, MP_DOMAIN)
)
return True
async def async_unload_entry(hass, entry):
"""Remove forked-daapd component."""
status = await hass.config_entries.async_forward_entry_unload(entry, MP_DOMAIN)
if status and hass.data.get(DOMAIN) and hass.data[DOMAIN].get(entry.entry_id):
hass.data[DOMAIN][entry.entry_id][
HASS_DATA_UPDATER_KEY
].websocket_handler.cancel()
for remove_listener in hass.data[DOMAIN][entry.entry_id][
HASS_DATA_REMOVE_LISTENERS_KEY
]:
remove_listener()
del hass.data[DOMAIN][entry.entry_id]
if not hass.data[DOMAIN]:
del hass.data[DOMAIN]
return status
|
import asyncio
from types import ModuleType
from typing import Any, Callable, Dict, Optional
import attr
from homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE
from homeassistant.core import callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_per_platform
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from homeassistant.setup import async_prepare_setup_platform
from homeassistant.util import dt as dt_util
from .const import (
CONF_SCAN_INTERVAL,
DOMAIN,
LOGGER,
PLATFORM_TYPE_LEGACY,
SCAN_INTERVAL,
SOURCE_TYPE_ROUTER,
)
@attr.s
class DeviceTrackerPlatform:
"""Class to hold platform information."""
LEGACY_SETUP = (
"async_get_scanner",
"get_scanner",
"async_setup_scanner",
"setup_scanner",
)
name: str = attr.ib()
platform: ModuleType = attr.ib()
config: Dict = attr.ib()
@property
def type(self):
"""Return platform type."""
for methods, platform_type in ((self.LEGACY_SETUP, PLATFORM_TYPE_LEGACY),):
for meth in methods:
if hasattr(self.platform, meth):
return platform_type
return None
async def async_setup_legacy(self, hass, tracker, discovery_info=None):
"""Set up a legacy platform."""
LOGGER.info("Setting up %s.%s", DOMAIN, self.type)
try:
scanner = None
setup = None
if hasattr(self.platform, "async_get_scanner"):
scanner = await self.platform.async_get_scanner(
hass, {DOMAIN: self.config}
)
elif hasattr(self.platform, "get_scanner"):
scanner = await hass.async_add_executor_job(
self.platform.get_scanner, hass, {DOMAIN: self.config}
)
elif hasattr(self.platform, "async_setup_scanner"):
setup = await self.platform.async_setup_scanner(
hass, self.config, tracker.async_see, discovery_info
)
elif hasattr(self.platform, "setup_scanner"):
setup = await hass.async_add_executor_job(
self.platform.setup_scanner,
hass,
self.config,
tracker.see,
discovery_info,
)
else:
raise HomeAssistantError("Invalid legacy device_tracker platform.")
if scanner:
async_setup_scanner_platform(
hass, self.config, scanner, tracker.async_see, self.type
)
return
if not setup:
LOGGER.error("Error setting up platform %s", self.type)
return
except Exception: # pylint: disable=broad-except
LOGGER.exception("Error setting up platform %s", self.type)
async def async_extract_config(hass, config):
"""Extract device tracker config and split between legacy and modern."""
legacy = []
for platform in await asyncio.gather(
*(
async_create_platform_type(hass, config, p_type, p_config)
for p_type, p_config in config_per_platform(config, DOMAIN)
)
):
if platform is None:
continue
if platform.type == PLATFORM_TYPE_LEGACY:
legacy.append(platform)
else:
raise ValueError(
f"Unable to determine type for {platform.name}: {platform.type}"
)
return legacy
async def async_create_platform_type(
hass, config, p_type, p_config
) -> Optional[DeviceTrackerPlatform]:
"""Determine type of platform."""
platform = await async_prepare_setup_platform(hass, config, DOMAIN, p_type)
if platform is None:
return None
return DeviceTrackerPlatform(p_type, platform, p_config)
@callback
def async_setup_scanner_platform(
hass: HomeAssistantType,
config: ConfigType,
scanner: Any,
async_see_device: Callable,
platform: str,
):
"""Set up the connect scanner-based platform to device tracker.
This method must be run in the event loop.
"""
interval = config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL)
update_lock = asyncio.Lock()
scanner.hass = hass
# Initial scan of each mac we also tell about host name for config
seen: Any = set()
async def async_device_tracker_scan(now: dt_util.dt.datetime):
"""Handle interval matches."""
if update_lock.locked():
LOGGER.warning(
"Updating device list from %s took longer than the scheduled "
"scan interval %s",
platform,
interval,
)
return
async with update_lock:
found_devices = await scanner.async_scan_devices()
for mac in found_devices:
if mac in seen:
host_name = None
else:
host_name = await scanner.async_get_device_name(mac)
seen.add(mac)
try:
extra_attributes = await scanner.async_get_extra_attributes(mac)
except NotImplementedError:
extra_attributes = {}
kwargs = {
"mac": mac,
"host_name": host_name,
"source_type": SOURCE_TYPE_ROUTER,
"attributes": {
"scanner": scanner.__class__.__name__,
**extra_attributes,
},
}
zone_home = hass.states.get(hass.components.zone.ENTITY_ID_HOME)
if zone_home:
kwargs["gps"] = [
zone_home.attributes[ATTR_LATITUDE],
zone_home.attributes[ATTR_LONGITUDE],
]
kwargs["gps_accuracy"] = 0
hass.async_create_task(async_see_device(**kwargs))
async_track_time_interval(hass, async_device_tracker_scan, interval)
hass.async_create_task(async_device_tracker_scan(None))
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from diamond.collector import Collector
from nfacct import NetfilterAccountingCollector
##########################################################################
class TestNetfilterAccountingCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('NetfilterAccountingCollector', {
'interval': 10,
'bin': 'true',
})
self.collector = NetfilterAccountingCollector(config, None)
def test_import(self):
self.assertTrue(NetfilterAccountingCollector)
@patch.object(Collector, 'publish')
def test_no_counters(self, publish_mock):
patch_communicate = patch(
'subprocess.Popen.communicate',
Mock(return_value=('', '')))
patch_communicate.start()
self.collector.collect()
patch_communicate.stop()
self.assertPublishedMany(publish_mock, {})
@patch.object(Collector, 'publish')
def test_counters(self, publish_mock):
patch_communicate = patch(
'subprocess.Popen.communicate',
Mock(return_value=(self.getFixture('nfacct').getvalue(), '')))
patch_communicate.start()
self.collector.collect()
patch_communicate.stop()
self.assertPublishedMany(publish_mock, {
'Tcp.pkts': 3,
'Tcp.bytes': 300,
'Udp.pkts': 0,
'Udp.bytes': 0,
'Tcp.Incoming.pkts': 1,
'Tcp.Incoming.bytes': 100,
'Tcp.Outgoing.pkts': 2,
'Tcp.Outgoing.bytes': 200,
})
##########################################################################
if __name__ == "__main__":
unittest.main()
|
from os import environ
from sys import executable
from functools import partial
from riko.parsers import _make_content, entity2text
try:
from twisted.internet.defer import maybeDeferred, Deferred
except ImportError:
maybeDeferred = lambda *args: None
else:
from twisted.internet import defer
from twisted.internet.utils import getProcessOutput
from twisted.internet.reactor import callLater
from . import microdom
from .microdom import EntityReference
async_none = defer.succeed(None)
async_return = partial(defer.succeed)
async_partial = lambda f, **kwargs: partial(maybeDeferred, f, **kwargs)
def async_sleep(seconds):
d = Deferred()
callLater(seconds, d.callback, None)
return d
def defer_to_process(command):
return getProcessOutput(executable, ['-c', command], environ)
def xml2etree(f, xml=True):
readable = hasattr(f, 'read')
if xml and readable:
parse = microdom.parseXML
elif readable:
parse = partial(microdom.parse, lenient=True)
elif xml:
parse = microdom.parseXMLString
else:
parse = partial(microdom.parseString, lenient=True)
return parse(f)
def etree2dict(element, tag='content'):
"""Convert a microdom element tree into a dict imitating how Yahoo Pipes
does it.
TODO: checkout twisted.words.xish
"""
i = dict(element.attributes) if hasattr(element, 'attributes') else {}
value = element.nodeValue if hasattr(element, 'nodeValue') else None
if isinstance(element, EntityReference):
value = entity2text(value)
i.update(_make_content(i, value, tag))
for child in element.childNodes:
tag = child.tagName if hasattr(child, 'tagName') else 'content'
value = etree2dict(child, tag)
# try to join the content first since microdom likes to split up
# elements that contain a mix of text and entity reference
try:
i.update(_make_content(i, value, tag, append=False))
except TypeError:
i.update(_make_content(i, value, tag))
if ('content' in i) and not set(i).difference(['content']):
# element is leaf node and doesn't have attributes
i = i['content']
return i
|
import logging
from typing import List, Optional
from pizone import Controller, Zone
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
FAN_AUTO,
FAN_HIGH,
FAN_LOW,
FAN_MEDIUM,
HVAC_MODE_COOL,
HVAC_MODE_DRY,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_HEAT,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
PRESET_ECO,
PRESET_NONE,
SUPPORT_FAN_MODE,
SUPPORT_PRESET_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.const import (
ATTR_TEMPERATURE,
CONF_EXCLUDE,
PRECISION_HALVES,
PRECISION_TENTHS,
TEMP_CELSIUS,
)
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.temperature import display_temp as show_temp
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from .const import (
DATA_CONFIG,
DATA_DISCOVERY_SERVICE,
DISPATCH_CONTROLLER_DISCONNECTED,
DISPATCH_CONTROLLER_DISCOVERED,
DISPATCH_CONTROLLER_RECONNECTED,
DISPATCH_CONTROLLER_UPDATE,
DISPATCH_ZONE_UPDATE,
IZONE,
)
_LOGGER = logging.getLogger(__name__)
_IZONE_FAN_TO_HA = {
Controller.Fan.LOW: FAN_LOW,
Controller.Fan.MED: FAN_MEDIUM,
Controller.Fan.HIGH: FAN_HIGH,
Controller.Fan.AUTO: FAN_AUTO,
}
async def async_setup_entry(
hass: HomeAssistantType, config: ConfigType, async_add_entities
):
"""Initialize an IZone Controller."""
disco = hass.data[DATA_DISCOVERY_SERVICE]
@callback
def init_controller(ctrl: Controller):
"""Register the controller device and the containing zones."""
conf = hass.data.get(DATA_CONFIG) # type: ConfigType
# Filter out any entities excluded in the config file
if conf and ctrl.device_uid in conf[CONF_EXCLUDE]:
_LOGGER.info("Controller UID=%s ignored as excluded", ctrl.device_uid)
return
_LOGGER.info("Controller UID=%s discovered", ctrl.device_uid)
device = ControllerDevice(ctrl)
async_add_entities([device])
async_add_entities(device.zones.values())
# create any components not yet created
for controller in disco.pi_disco.controllers.values():
init_controller(controller)
# connect to register any further components
async_dispatcher_connect(hass, DISPATCH_CONTROLLER_DISCOVERED, init_controller)
return True
def _return_on_connection_error(ret=None):
def wrap(func):
def wrapped_f(*args, **kwargs):
if not args[0].available:
return ret
try:
return func(*args, **kwargs)
except ConnectionError:
return ret
return wrapped_f
return wrap
class ControllerDevice(ClimateEntity):
"""Representation of iZone Controller."""
def __init__(self, controller: Controller) -> None:
"""Initialise ControllerDevice."""
self._controller = controller
self._supported_features = SUPPORT_FAN_MODE
if (
controller.ras_mode == "master" and controller.zone_ctrl == 13
) or controller.ras_mode == "RAS":
self._supported_features |= SUPPORT_TARGET_TEMPERATURE
self._state_to_pizone = {
HVAC_MODE_COOL: Controller.Mode.COOL,
HVAC_MODE_HEAT: Controller.Mode.HEAT,
HVAC_MODE_HEAT_COOL: Controller.Mode.AUTO,
HVAC_MODE_FAN_ONLY: Controller.Mode.VENT,
HVAC_MODE_DRY: Controller.Mode.DRY,
}
if controller.free_air_enabled:
self._supported_features |= SUPPORT_PRESET_MODE
self._fan_to_pizone = {}
for fan in controller.fan_modes:
self._fan_to_pizone[_IZONE_FAN_TO_HA[fan]] = fan
self._available = True
self._device_info = {
"identifiers": {(IZONE, self.unique_id)},
"name": self.name,
"manufacturer": "IZone",
"model": self._controller.sys_type,
}
# Create the zones
self.zones = {}
for zone in controller.zones:
self.zones[zone] = ZoneDevice(self, zone)
async def async_added_to_hass(self):
"""Call on adding to hass."""
# Register for connect/disconnect/update events
@callback
def controller_disconnected(ctrl: Controller, ex: Exception) -> None:
"""Disconnected from controller."""
if ctrl is not self._controller:
return
self.set_available(False, ex)
self.async_on_remove(
async_dispatcher_connect(
self.hass, DISPATCH_CONTROLLER_DISCONNECTED, controller_disconnected
)
)
@callback
def controller_reconnected(ctrl: Controller) -> None:
"""Reconnected to controller."""
if ctrl is not self._controller:
return
self.set_available(True)
self.async_on_remove(
async_dispatcher_connect(
self.hass, DISPATCH_CONTROLLER_RECONNECTED, controller_reconnected
)
)
@callback
def controller_update(ctrl: Controller) -> None:
"""Handle controller data updates."""
if ctrl is not self._controller:
return
self.async_write_ha_state()
for zone in self.zones.values():
zone.async_schedule_update_ha_state()
self.async_on_remove(
async_dispatcher_connect(
self.hass, DISPATCH_CONTROLLER_UPDATE, controller_update
)
)
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self._available
@callback
def set_available(self, available: bool, ex: Exception = None) -> None:
"""
Set availability for the controller.
Also sets zone availability as they follow the same availability.
"""
if self.available == available:
return
if available:
_LOGGER.info("Reconnected controller %s ", self._controller.device_uid)
else:
_LOGGER.info(
"Controller %s disconnected due to exception: %s",
self._controller.device_uid,
ex,
)
self._available = available
self.async_write_ha_state()
for zone in self.zones.values():
zone.async_schedule_update_ha_state()
@property
def device_info(self):
"""Return the device info for the iZone system."""
return self._device_info
@property
def unique_id(self):
"""Return the ID of the controller device."""
return self._controller.device_uid
@property
def name(self) -> str:
"""Return the name of the entity."""
return f"iZone Controller {self._controller.device_uid}"
@property
def should_poll(self) -> bool:
"""Return True if entity has to be polled for state.
False if entity pushes its state to HA.
"""
return False
@property
def supported_features(self) -> int:
"""Return the list of supported features."""
return self._supported_features
@property
def temperature_unit(self) -> str:
"""Return the unit of measurement which this thermostat uses."""
return TEMP_CELSIUS
@property
def precision(self) -> float:
"""Return the precision of the system."""
return PRECISION_TENTHS
@property
def device_state_attributes(self):
"""Return the optional state attributes."""
return {
"supply_temperature": show_temp(
self.hass,
self.supply_temperature,
self.temperature_unit,
self.precision,
),
"temp_setpoint": show_temp(
self.hass,
self._controller.temp_setpoint,
self.temperature_unit,
PRECISION_HALVES,
),
}
@property
def hvac_mode(self) -> str:
"""Return current operation ie. heat, cool, idle."""
if not self._controller.is_on:
return HVAC_MODE_OFF
mode = self._controller.mode
if mode == Controller.Mode.FREE_AIR:
return HVAC_MODE_FAN_ONLY
for (key, value) in self._state_to_pizone.items():
if value == mode:
return key
assert False, "Should be unreachable"
@property
@_return_on_connection_error([])
def hvac_modes(self) -> List[str]:
"""Return the list of available operation modes."""
if self._controller.free_air:
return [HVAC_MODE_OFF, HVAC_MODE_FAN_ONLY]
return [HVAC_MODE_OFF, *self._state_to_pizone]
@property
@_return_on_connection_error(PRESET_NONE)
def preset_mode(self):
"""Eco mode is external air."""
return PRESET_ECO if self._controller.free_air else PRESET_NONE
@property
@_return_on_connection_error([PRESET_NONE])
def preset_modes(self):
"""Available preset modes, normal or eco."""
if self._controller.free_air_enabled:
return [PRESET_NONE, PRESET_ECO]
return [PRESET_NONE]
@property
@_return_on_connection_error()
def current_temperature(self) -> Optional[float]:
"""Return the current temperature."""
if self._controller.mode == Controller.Mode.FREE_AIR:
return self._controller.temp_supply
return self._controller.temp_return
@property
@_return_on_connection_error()
def target_temperature(self) -> Optional[float]:
"""Return the temperature we try to reach."""
if not self._supported_features & SUPPORT_TARGET_TEMPERATURE:
return None
return self._controller.temp_setpoint
@property
def supply_temperature(self) -> float:
"""Return the current supply, or in duct, temperature."""
return self._controller.temp_supply
@property
def target_temperature_step(self) -> Optional[float]:
"""Return the supported step of target temperature."""
return 0.5
@property
def fan_mode(self) -> Optional[str]:
"""Return the fan setting."""
return _IZONE_FAN_TO_HA[self._controller.fan]
@property
def fan_modes(self) -> Optional[List[str]]:
"""Return the list of available fan modes."""
return list(self._fan_to_pizone)
@property
@_return_on_connection_error(0.0)
def min_temp(self) -> float:
"""Return the minimum temperature."""
return self._controller.temp_min
@property
@_return_on_connection_error(50.0)
def max_temp(self) -> float:
"""Return the maximum temperature."""
return self._controller.temp_max
async def wrap_and_catch(self, coro):
"""Catch any connection errors and set unavailable."""
try:
await coro
except ConnectionError as ex:
self.set_available(False, ex)
else:
self.set_available(True)
async def async_set_temperature(self, **kwargs) -> None:
"""Set new target temperature."""
if not self.supported_features & SUPPORT_TARGET_TEMPERATURE:
self.async_schedule_update_ha_state(True)
return
temp = kwargs.get(ATTR_TEMPERATURE)
if temp is not None:
await self.wrap_and_catch(self._controller.set_temp_setpoint(temp))
async def async_set_fan_mode(self, fan_mode: str) -> None:
"""Set new target fan mode."""
fan = self._fan_to_pizone[fan_mode]
await self.wrap_and_catch(self._controller.set_fan(fan))
async def async_set_hvac_mode(self, hvac_mode: str) -> None:
"""Set new target operation mode."""
if hvac_mode == HVAC_MODE_OFF:
await self.wrap_and_catch(self._controller.set_on(False))
return
if not self._controller.is_on:
await self.wrap_and_catch(self._controller.set_on(True))
if self._controller.free_air:
return
mode = self._state_to_pizone[hvac_mode]
await self.wrap_and_catch(self._controller.set_mode(mode))
async def async_set_preset_mode(self, preset_mode: str) -> None:
"""Set the preset mode."""
await self.wrap_and_catch(
self._controller.set_free_air(preset_mode == PRESET_ECO)
)
async def async_turn_on(self) -> None:
"""Turn the entity on."""
await self.wrap_and_catch(self._controller.set_on(True))
class ZoneDevice(ClimateEntity):
"""Representation of iZone Zone."""
def __init__(self, controller: ControllerDevice, zone: Zone) -> None:
"""Initialise ZoneDevice."""
self._controller = controller
self._zone = zone
self._name = zone.name.title()
self._supported_features = 0
if zone.type != Zone.Type.AUTO:
self._state_to_pizone = {
HVAC_MODE_OFF: Zone.Mode.CLOSE,
HVAC_MODE_FAN_ONLY: Zone.Mode.OPEN,
}
else:
self._state_to_pizone = {
HVAC_MODE_OFF: Zone.Mode.CLOSE,
HVAC_MODE_FAN_ONLY: Zone.Mode.OPEN,
HVAC_MODE_HEAT_COOL: Zone.Mode.AUTO,
}
self._supported_features |= SUPPORT_TARGET_TEMPERATURE
self._device_info = {
"identifiers": {(IZONE, controller.unique_id, zone.index)},
"name": self.name,
"manufacturer": "IZone",
"via_device": (IZONE, controller.unique_id),
"model": zone.type.name.title(),
}
async def async_added_to_hass(self):
"""Call on adding to hass."""
@callback
def zone_update(ctrl: Controller, zone: Zone) -> None:
"""Handle zone data updates."""
if zone is not self._zone:
return
self._name = zone.name.title()
self.async_write_ha_state()
self.async_on_remove(
async_dispatcher_connect(self.hass, DISPATCH_ZONE_UPDATE, zone_update)
)
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self._controller.available
@property
def assumed_state(self) -> bool:
"""Return True if unable to access real state of the entity."""
return self._controller.assumed_state
@property
def device_info(self):
"""Return the device info for the iZone system."""
return self._device_info
@property
def unique_id(self):
"""Return the ID of the controller device."""
return f"{self._controller.unique_id}_z{self._zone.index + 1}"
@property
def name(self) -> str:
"""Return the name of the entity."""
return self._name
@property
def should_poll(self) -> bool:
"""Return True if entity has to be polled for state.
False if entity pushes its state to HA.
"""
return False
@property
@_return_on_connection_error(0)
def supported_features(self):
"""Return the list of supported features."""
if self._zone.mode == Zone.Mode.AUTO:
return self._supported_features
return self._supported_features & ~SUPPORT_TARGET_TEMPERATURE
@property
def temperature_unit(self):
"""Return the unit of measurement which this thermostat uses."""
return TEMP_CELSIUS
@property
def precision(self):
"""Return the precision of the system."""
return PRECISION_TENTHS
@property
def hvac_mode(self):
"""Return current operation ie. heat, cool, idle."""
mode = self._zone.mode
for (key, value) in self._state_to_pizone.items():
if value == mode:
return key
return None
@property
def hvac_modes(self):
"""Return the list of available operation modes."""
return list(self._state_to_pizone)
@property
def current_temperature(self):
"""Return the current temperature."""
return self._zone.temp_current
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
if self._zone.type != Zone.Type.AUTO:
return None
return self._zone.temp_setpoint
@property
def target_temperature_step(self):
"""Return the supported step of target temperature."""
return 0.5
@property
def min_temp(self):
"""Return the minimum temperature."""
return self._controller.min_temp
@property
def max_temp(self):
"""Return the maximum temperature."""
return self._controller.max_temp
async def async_set_temperature(self, **kwargs):
"""Set new target temperature."""
if self._zone.mode != Zone.Mode.AUTO:
return
temp = kwargs.get(ATTR_TEMPERATURE)
if temp is not None:
await self._controller.wrap_and_catch(self._zone.set_temp_setpoint(temp))
async def async_set_hvac_mode(self, hvac_mode: str) -> None:
"""Set new target operation mode."""
mode = self._state_to_pizone[hvac_mode]
await self._controller.wrap_and_catch(self._zone.set_mode(mode))
self.async_write_ha_state()
@property
def is_on(self):
"""Return true if on."""
return self._zone.mode != Zone.Mode.CLOSE
async def async_turn_on(self):
"""Turn device on (open zone)."""
if self._zone.type == Zone.Type.AUTO:
await self._controller.wrap_and_catch(self._zone.set_mode(Zone.Mode.AUTO))
else:
await self._controller.wrap_and_catch(self._zone.set_mode(Zone.Mode.OPEN))
self.async_write_ha_state()
async def async_turn_off(self):
"""Turn device off (close zone)."""
await self._controller.wrap_and_catch(self._zone.set_mode(Zone.Mode.CLOSE))
self.async_write_ha_state()
|
from uuid import uuid4
from homeassistant.components.alexa import config, smart_home
from homeassistant.core import Context
from tests.common import async_mock_service
TEST_URL = "https://api.amazonalexa.com/v3/events"
TEST_TOKEN_URL = "https://api.amazon.com/auth/o2/token"
TEST_LOCALE = "en-US"
class MockConfig(config.AbstractConfig):
"""Mock Alexa config."""
entity_config = {
"binary_sensor.test_doorbell": {"display_categories": "DOORBELL"},
"binary_sensor.test_contact_forced": {"display_categories": "CONTACT_SENSOR"},
"binary_sensor.test_motion_forced": {"display_categories": "MOTION_SENSOR"},
"binary_sensor.test_motion_camera_event": {"display_categories": "CAMERA"},
"camera.test": {"display_categories": "CAMERA"},
}
@property
def supports_auth(self):
"""Return if config supports auth."""
return True
@property
def endpoint(self):
"""Endpoint for report state."""
return TEST_URL
@property
def locale(self):
"""Return config locale."""
return TEST_LOCALE
def should_expose(self, entity_id):
"""If an entity should be exposed."""
return True
async def async_get_access_token(self):
"""Get an access token."""
return "thisisnotanacesstoken"
async def async_accept_grant(self, code):
"""Accept a grant."""
DEFAULT_CONFIG = MockConfig(None)
def get_new_request(namespace, name, endpoint=None):
"""Generate a new API message."""
raw_msg = {
"directive": {
"header": {
"namespace": namespace,
"name": name,
"messageId": str(uuid4()),
"correlationToken": str(uuid4()),
"payloadVersion": "3",
},
"endpoint": {
"scope": {"type": "BearerToken", "token": str(uuid4())},
"endpointId": endpoint,
},
"payload": {},
}
}
if not endpoint:
raw_msg["directive"].pop("endpoint")
return raw_msg
async def assert_request_calls_service(
namespace,
name,
endpoint,
service,
hass,
response_type="Response",
payload=None,
instance=None,
):
"""Assert an API request calls a hass service."""
context = Context()
request = get_new_request(namespace, name, endpoint)
if payload:
request["directive"]["payload"] = payload
if instance:
request["directive"]["header"]["instance"] = instance
domain, service_name = service.split(".")
calls = async_mock_service(hass, domain, service_name)
msg = await smart_home.async_handle_message(hass, DEFAULT_CONFIG, request, context)
await hass.async_block_till_done()
assert len(calls) == 1
call = calls[0]
assert "event" in msg
assert call.data["entity_id"] == endpoint.replace("#", ".")
assert msg["event"]["header"]["name"] == response_type
assert call.context == context
return call, msg
async def assert_request_fails(
namespace, name, endpoint, service_not_called, hass, payload=None
):
"""Assert an API request returns an ErrorResponse."""
request = get_new_request(namespace, name, endpoint)
if payload:
request["directive"]["payload"] = payload
domain, service_name = service_not_called.split(".")
call = async_mock_service(hass, domain, service_name)
msg = await smart_home.async_handle_message(hass, DEFAULT_CONFIG, request)
await hass.async_block_till_done()
assert not call
assert "event" in msg
assert msg["event"]["header"]["name"] == "ErrorResponse"
return msg
async def assert_power_controller_works(endpoint, on_service, off_service, hass):
"""Assert PowerController API requests work."""
await assert_request_calls_service(
"Alexa.PowerController", "TurnOn", endpoint, on_service, hass
)
await assert_request_calls_service(
"Alexa.PowerController", "TurnOff", endpoint, off_service, hass
)
async def assert_scene_controller_works(
endpoint, activate_service, deactivate_service, hass
):
"""Assert SceneController API requests work."""
_, response = await assert_request_calls_service(
"Alexa.SceneController",
"Activate",
endpoint,
activate_service,
hass,
response_type="ActivationStarted",
)
assert response["event"]["payload"]["cause"]["type"] == "VOICE_INTERACTION"
assert "timestamp" in response["event"]["payload"]
if deactivate_service:
await assert_request_calls_service(
"Alexa.SceneController",
"Deactivate",
endpoint,
deactivate_service,
hass,
response_type="DeactivationStarted",
)
cause_type = response["event"]["payload"]["cause"]["type"]
assert cause_type == "VOICE_INTERACTION"
assert "timestamp" in response["event"]["payload"]
async def reported_properties(hass, endpoint):
"""Use ReportState to get properties and return them.
The result is a ReportedProperties instance, which has methods to make
assertions about the properties.
"""
request = get_new_request("Alexa", "ReportState", endpoint)
msg = await smart_home.async_handle_message(hass, DEFAULT_CONFIG, request)
await hass.async_block_till_done()
return ReportedProperties(msg["context"]["properties"])
class ReportedProperties:
"""Class to help assert reported properties."""
def __init__(self, properties):
"""Initialize class."""
self.properties = properties
def assert_not_has_property(self, namespace, name):
"""Assert a property does not exist."""
for prop in self.properties:
if prop["namespace"] == namespace and prop["name"] == name:
assert False, "Property %s:%s exists"
def assert_equal(self, namespace, name, value):
"""Assert a property is equal to a given value."""
for prop in self.properties:
if prop["namespace"] == namespace and prop["name"] == name:
assert prop["value"] == value
return prop
assert False, f"property {namespace}:{name} not in {self.properties!r}"
|
import asyncio
import logging
from types import MappingProxyType
from typing import Any, Dict, Iterable, Optional
from homeassistant.const import (
ATTR_ENTITY_ID,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_OFF,
STATE_ON,
)
from homeassistant.core import Context, State
from homeassistant.helpers.typing import HomeAssistantType
from . import (
ATTR_DIRECTION,
ATTR_OSCILLATING,
ATTR_SPEED,
DOMAIN,
SERVICE_OSCILLATE,
SERVICE_SET_DIRECTION,
SERVICE_SET_SPEED,
)
_LOGGER = logging.getLogger(__name__)
VALID_STATES = {STATE_ON, STATE_OFF}
ATTRIBUTES = { # attribute: service
ATTR_DIRECTION: SERVICE_SET_DIRECTION,
ATTR_OSCILLATING: SERVICE_OSCILLATE,
ATTR_SPEED: SERVICE_SET_SPEED,
}
async def _async_reproduce_state(
hass: HomeAssistantType,
state: State,
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Reproduce a single state."""
cur_state = hass.states.get(state.entity_id)
if cur_state is None:
_LOGGER.warning("Unable to find entity %s", state.entity_id)
return
if state.state not in VALID_STATES:
_LOGGER.warning(
"Invalid state specified for %s: %s", state.entity_id, state.state
)
return
# Return if we are already at the right state.
if cur_state.state == state.state and all(
check_attr_equal(cur_state.attributes, state.attributes, attr)
for attr in ATTRIBUTES
):
return
service_data = {ATTR_ENTITY_ID: state.entity_id}
service_calls = {} # service: service_data
if state.state == STATE_ON:
# The fan should be on
if cur_state.state != STATE_ON:
# Turn on the fan at first
service_calls[SERVICE_TURN_ON] = service_data
for attr, service in ATTRIBUTES.items():
# Call services to adjust the attributes
if attr in state.attributes and not check_attr_equal(
state.attributes, cur_state.attributes, attr
):
data = service_data.copy()
data[attr] = state.attributes[attr]
service_calls[service] = data
elif state.state == STATE_OFF:
service_calls[SERVICE_TURN_OFF] = service_data
for service, data in service_calls.items():
await hass.services.async_call(
DOMAIN, service, data, context=context, blocking=True
)
async def async_reproduce_states(
hass: HomeAssistantType,
states: Iterable[State],
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Reproduce Fan states."""
await asyncio.gather(
*(
_async_reproduce_state(
hass, state, context=context, reproduce_options=reproduce_options
)
for state in states
)
)
def check_attr_equal(
attr1: MappingProxyType, attr2: MappingProxyType, attr_str: str
) -> bool:
"""Return true if the given attributes are equal."""
return attr1.get(attr_str) == attr2.get(attr_str)
|
import openzwavemqtt.const as const_ozw
from openzwavemqtt.const import CommandClass, ValueGenre, ValueIndex, ValueType
from . import const
DISCOVERY_SCHEMAS = (
{ # Binary sensors
const.DISC_COMPONENT: "binary_sensor",
const.DISC_VALUES: {
const.DISC_PRIMARY: {
const.DISC_COMMAND_CLASS: CommandClass.SENSOR_BINARY,
const.DISC_TYPE: ValueType.BOOL,
const.DISC_GENRE: ValueGenre.USER,
},
"off_delay": {
const.DISC_COMMAND_CLASS: CommandClass.CONFIGURATION,
const.DISC_INDEX: 9,
const.DISC_OPTIONAL: True,
},
},
},
{ # Notification CommandClass translates to binary_sensor
const.DISC_COMPONENT: "binary_sensor",
const.DISC_VALUES: {
const.DISC_PRIMARY: {
const.DISC_COMMAND_CLASS: CommandClass.NOTIFICATION,
const.DISC_GENRE: ValueGenre.USER,
const.DISC_TYPE: (ValueType.BOOL, ValueType.LIST),
}
},
},
{ # Z-Wave Thermostat device translates to Climate entity
const.DISC_COMPONENT: "climate",
const.DISC_GENERIC_DEVICE_CLASS: (
const_ozw.GENERIC_TYPE_THERMOSTAT,
const_ozw.GENERIC_TYPE_SENSOR_MULTILEVEL,
),
const.DISC_SPECIFIC_DEVICE_CLASS: (
const_ozw.SPECIFIC_TYPE_THERMOSTAT_GENERAL,
const_ozw.SPECIFIC_TYPE_THERMOSTAT_GENERAL_V2,
const_ozw.SPECIFIC_TYPE_SETBACK_THERMOSTAT,
const_ozw.SPECIFIC_TYPE_THERMOSTAT_HEATING,
const_ozw.SPECIFIC_TYPE_SETPOINT_THERMOSTAT,
const_ozw.SPECIFIC_TYPE_NOT_USED,
),
const.DISC_VALUES: {
const.DISC_PRIMARY: {
const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_MODE,)
},
"mode": {
const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_MODE,),
const.DISC_OPTIONAL: True,
},
"temperature": {
const.DISC_COMMAND_CLASS: (CommandClass.SENSOR_MULTILEVEL,),
const.DISC_INDEX: (1,),
const.DISC_OPTIONAL: True,
},
"fan_mode": {
const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_FAN_MODE,),
const.DISC_OPTIONAL: True,
},
"operating_state": {
const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_OPERATING_STATE,),
const.DISC_OPTIONAL: True,
},
"fan_action": {
const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_FAN_STATE,),
const.DISC_OPTIONAL: True,
},
"valve_position": {
const.DISC_COMMAND_CLASS: (CommandClass.SWITCH_MULTILEVEL,),
const.DISC_INDEX: (0,),
const.DISC_OPTIONAL: True,
},
"setpoint_heating": {
const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_SETPOINT,),
const.DISC_INDEX: (1,),
const.DISC_OPTIONAL: True,
},
"setpoint_cooling": {
const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_SETPOINT,),
const.DISC_INDEX: (2,),
const.DISC_OPTIONAL: True,
},
"setpoint_furnace": {
const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_SETPOINT,),
const.DISC_INDEX: (7,),
const.DISC_OPTIONAL: True,
},
"setpoint_dry_air": {
const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_SETPOINT,),
const.DISC_INDEX: (8,),
const.DISC_OPTIONAL: True,
},
"setpoint_moist_air": {
const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_SETPOINT,),
const.DISC_INDEX: (9,),
const.DISC_OPTIONAL: True,
},
"setpoint_auto_changeover": {
const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_SETPOINT,),
const.DISC_INDEX: (10,),
const.DISC_OPTIONAL: True,
},
"setpoint_eco_heating": {
const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_SETPOINT,),
const.DISC_INDEX: (11,),
const.DISC_OPTIONAL: True,
},
"setpoint_eco_cooling": {
const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_SETPOINT,),
const.DISC_INDEX: (12,),
const.DISC_OPTIONAL: True,
},
"setpoint_away_heating": {
const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_SETPOINT,),
const.DISC_INDEX: (13,),
const.DISC_OPTIONAL: True,
},
"setpoint_away_cooling": {
const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_SETPOINT,),
const.DISC_INDEX: (14,),
const.DISC_OPTIONAL: True,
},
"setpoint_full_power": {
const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_SETPOINT,),
const.DISC_INDEX: (15,),
const.DISC_OPTIONAL: True,
},
},
},
{ # Z-Wave Thermostat device without mode support
const.DISC_COMPONENT: "climate",
const.DISC_GENERIC_DEVICE_CLASS: (const_ozw.GENERIC_TYPE_THERMOSTAT,),
const.DISC_SPECIFIC_DEVICE_CLASS: (
const_ozw.SPECIFIC_TYPE_SETPOINT_THERMOSTAT,
const_ozw.SPECIFIC_TYPE_NOT_USED,
),
const.DISC_VALUES: {
const.DISC_PRIMARY: {
const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_SETPOINT,)
},
"temperature": {
const.DISC_COMMAND_CLASS: (CommandClass.SENSOR_MULTILEVEL,),
const.DISC_INDEX: (1,),
const.DISC_OPTIONAL: True,
},
"operating_state": {
const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_OPERATING_STATE,),
const.DISC_OPTIONAL: True,
},
"valve_position": {
const.DISC_COMMAND_CLASS: (CommandClass.SWITCH_MULTILEVEL,),
const.DISC_INDEX: (0,),
const.DISC_OPTIONAL: True,
},
"setpoint_heating": {
const.DISC_COMMAND_CLASS: (CommandClass.THERMOSTAT_SETPOINT,),
const.DISC_INDEX: (1,),
const.DISC_OPTIONAL: True,
},
},
},
{ # Rollershutter
const.DISC_COMPONENT: "cover",
const.DISC_GENERIC_DEVICE_CLASS: (const_ozw.GENERIC_TYPE_SWITCH_MULTILEVEL,),
const.DISC_SPECIFIC_DEVICE_CLASS: (
const_ozw.SPECIFIC_TYPE_CLASS_A_MOTOR_CONTROL,
const_ozw.SPECIFIC_TYPE_CLASS_B_MOTOR_CONTROL,
const_ozw.SPECIFIC_TYPE_CLASS_C_MOTOR_CONTROL,
const_ozw.SPECIFIC_TYPE_MOTOR_MULTIPOSITION,
const_ozw.SPECIFIC_TYPE_SECURE_BARRIER_ADDON,
const_ozw.SPECIFIC_TYPE_SECURE_DOOR,
),
const.DISC_VALUES: {
const.DISC_PRIMARY: {
const.DISC_COMMAND_CLASS: CommandClass.SWITCH_MULTILEVEL,
const.DISC_INDEX: ValueIndex.SWITCH_MULTILEVEL_LEVEL,
const.DISC_GENRE: ValueGenre.USER,
},
"open": {
const.DISC_COMMAND_CLASS: CommandClass.SWITCH_MULTILEVEL,
const.DISC_INDEX: ValueIndex.SWITCH_MULTILEVEL_BRIGHT,
const.DISC_OPTIONAL: True,
},
"close": {
const.DISC_COMMAND_CLASS: CommandClass.SWITCH_MULTILEVEL,
const.DISC_INDEX: ValueIndex.SWITCH_MULTILEVEL_DIM,
const.DISC_OPTIONAL: True,
},
},
},
{ # Garage Door Barrier
const.DISC_COMPONENT: "cover",
const.DISC_GENERIC_DEVICE_CLASS: (const_ozw.GENERIC_TYPE_ENTRY_CONTROL,),
const.DISC_SPECIFIC_DEVICE_CLASS: (
const_ozw.SPECIFIC_TYPE_SECURE_BARRIER_ADDON,
),
const.DISC_VALUES: {
const.DISC_PRIMARY: {
const.DISC_COMMAND_CLASS: CommandClass.BARRIER_OPERATOR,
const.DISC_INDEX: ValueIndex.BARRIER_OPERATOR_LABEL,
},
},
},
{ # Fan
const.DISC_COMPONENT: "fan",
const.DISC_GENERIC_DEVICE_CLASS: const_ozw.GENERIC_TYPE_SWITCH_MULTILEVEL,
const.DISC_SPECIFIC_DEVICE_CLASS: const_ozw.SPECIFIC_TYPE_FAN_SWITCH,
const.DISC_VALUES: {
const.DISC_PRIMARY: {
const.DISC_COMMAND_CLASS: CommandClass.SWITCH_MULTILEVEL,
const.DISC_INDEX: ValueIndex.SWITCH_MULTILEVEL_LEVEL,
const.DISC_TYPE: ValueType.BYTE,
},
},
},
{ # Light
const.DISC_COMPONENT: "light",
const.DISC_GENERIC_DEVICE_CLASS: (
const_ozw.GENERIC_TYPE_SWITCH_MULTILEVEL,
const_ozw.GENERIC_TYPE_SWITCH_REMOTE,
),
const.DISC_SPECIFIC_DEVICE_CLASS: (
const_ozw.SPECIFIC_TYPE_POWER_SWITCH_MULTILEVEL,
const_ozw.SPECIFIC_TYPE_SCENE_SWITCH_MULTILEVEL,
const_ozw.SPECIFIC_TYPE_COLOR_TUNABLE_BINARY,
const_ozw.SPECIFIC_TYPE_COLOR_TUNABLE_MULTILEVEL,
const_ozw.SPECIFIC_TYPE_NOT_USED,
),
const.DISC_VALUES: {
const.DISC_PRIMARY: {
const.DISC_COMMAND_CLASS: (CommandClass.SWITCH_MULTILEVEL,),
const.DISC_INDEX: ValueIndex.SWITCH_MULTILEVEL_LEVEL,
const.DISC_TYPE: ValueType.BYTE,
},
"dimming_duration": {
const.DISC_COMMAND_CLASS: (CommandClass.SWITCH_MULTILEVEL,),
const.DISC_INDEX: ValueIndex.SWITCH_MULTILEVEL_DURATION,
const.DISC_OPTIONAL: True,
},
"color": {
const.DISC_COMMAND_CLASS: (CommandClass.SWITCH_COLOR,),
const.DISC_INDEX: ValueIndex.SWITCH_COLOR_COLOR,
const.DISC_OPTIONAL: True,
},
"color_channels": {
const.DISC_COMMAND_CLASS: (CommandClass.SWITCH_COLOR,),
const.DISC_INDEX: ValueIndex.SWITCH_COLOR_CHANNELS,
const.DISC_OPTIONAL: True,
},
"min_kelvin": {
const.DISC_COMMAND_CLASS: (CommandClass.CONFIGURATION,),
const.DISC_INDEX: 81, # PR for upstream to add SWITCH_COLOR_CT_WARM
const.DISC_OPTIONAL: True,
},
"max_kelvin": {
const.DISC_COMMAND_CLASS: (CommandClass.CONFIGURATION,),
const.DISC_INDEX: 82, # PR for upstream to add SWITCH_COLOR_CT_COLD
const.DISC_OPTIONAL: True,
},
},
},
{ # All other text/numeric sensors
const.DISC_COMPONENT: "sensor",
const.DISC_VALUES: {
const.DISC_PRIMARY: {
const.DISC_COMMAND_CLASS: (
CommandClass.SENSOR_MULTILEVEL,
CommandClass.METER,
CommandClass.ALARM,
CommandClass.SENSOR_ALARM,
CommandClass.INDICATOR,
CommandClass.BATTERY,
CommandClass.NOTIFICATION,
CommandClass.BASIC,
),
const.DISC_TYPE: (
ValueType.DECIMAL,
ValueType.INT,
ValueType.STRING,
ValueType.BYTE,
ValueType.LIST,
),
}
},
},
{ # Switch platform
const.DISC_COMPONENT: "switch",
const.DISC_VALUES: {
const.DISC_PRIMARY: {
const.DISC_COMMAND_CLASS: (CommandClass.SWITCH_BINARY,),
const.DISC_TYPE: ValueType.BOOL,
const.DISC_GENRE: ValueGenre.USER,
}
},
},
{ # Lock platform
const.DISC_COMPONENT: "lock",
const.DISC_VALUES: {
const.DISC_PRIMARY: {
const.DISC_COMMAND_CLASS: (CommandClass.DOOR_LOCK,),
const.DISC_TYPE: ValueType.BOOL,
const.DISC_GENRE: ValueGenre.USER,
}
},
},
)
def check_node_schema(node, schema):
"""Check if node matches the passed node schema."""
if const.DISC_NODE_ID in schema and node.node_id not in schema[const.DISC_NODE_ID]:
return False
if const.DISC_GENERIC_DEVICE_CLASS in schema and not eq_or_in(
node.node_generic, schema[const.DISC_GENERIC_DEVICE_CLASS]
):
return False
if const.DISC_SPECIFIC_DEVICE_CLASS in schema and not eq_or_in(
node.node_specific, schema[const.DISC_SPECIFIC_DEVICE_CLASS]
):
return False
return True
def check_value_schema(value, schema):
"""Check if the value matches the passed value schema."""
if const.DISC_COMMAND_CLASS in schema and not eq_or_in(
value.parent.command_class_id, schema[const.DISC_COMMAND_CLASS]
):
return False
if const.DISC_TYPE in schema and not eq_or_in(value.type, schema[const.DISC_TYPE]):
return False
if const.DISC_GENRE in schema and not eq_or_in(
value.genre, schema[const.DISC_GENRE]
):
return False
if const.DISC_INDEX in schema and not eq_or_in(
value.index, schema[const.DISC_INDEX]
):
return False
if const.DISC_INSTANCE in schema and not eq_or_in(
value.instance, schema[const.DISC_INSTANCE]
):
return False
return True
def eq_or_in(val, options):
"""Return True if options contains value or if value is equal to options."""
return val in options if isinstance(options, tuple) else val == options
|
from requests import ConnectTimeout
from homeassistant.components.camera.const import DOMAIN as CAMERA_DOMAIN
from homeassistant.components.canary.const import CONF_FFMPEG_ARGUMENTS, DOMAIN
from homeassistant.config_entries import (
ENTRY_STATE_LOADED,
ENTRY_STATE_NOT_LOADED,
ENTRY_STATE_SETUP_RETRY,
)
from homeassistant.const import CONF_PASSWORD, CONF_TIMEOUT, CONF_USERNAME
from homeassistant.setup import async_setup_component
from . import YAML_CONFIG, init_integration
from tests.async_mock import patch
async def test_import_from_yaml(hass, canary) -> None:
"""Test import from YAML."""
with patch(
"homeassistant.components.canary.async_setup_entry",
return_value=True,
):
assert await async_setup_component(hass, DOMAIN, {DOMAIN: YAML_CONFIG})
await hass.async_block_till_done()
entries = hass.config_entries.async_entries(DOMAIN)
assert len(entries) == 1
assert entries[0].data[CONF_USERNAME] == "test-username"
assert entries[0].data[CONF_PASSWORD] == "test-password"
assert entries[0].data[CONF_TIMEOUT] == 5
async def test_import_from_yaml_ffmpeg(hass, canary) -> None:
"""Test import from YAML with ffmpeg arguments."""
with patch(
"homeassistant.components.canary.async_setup_entry",
return_value=True,
):
assert await async_setup_component(
hass,
DOMAIN,
{
DOMAIN: YAML_CONFIG,
CAMERA_DOMAIN: [{"platform": DOMAIN, CONF_FFMPEG_ARGUMENTS: "-v"}],
},
)
await hass.async_block_till_done()
entries = hass.config_entries.async_entries(DOMAIN)
assert len(entries) == 1
assert entries[0].data[CONF_USERNAME] == "test-username"
assert entries[0].data[CONF_PASSWORD] == "test-password"
assert entries[0].data[CONF_TIMEOUT] == 5
assert entries[0].data.get(CONF_FFMPEG_ARGUMENTS) == "-v"
async def test_unload_entry(hass, canary):
"""Test successful unload of entry."""
entry = await init_integration(hass)
assert entry
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
assert entry.state == ENTRY_STATE_LOADED
assert await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
assert entry.state == ENTRY_STATE_NOT_LOADED
assert not hass.data.get(DOMAIN)
async def test_async_setup_raises_entry_not_ready(hass, canary):
"""Test that it throws ConfigEntryNotReady when exception occurs during setup."""
canary.side_effect = ConnectTimeout()
entry = await init_integration(hass)
assert entry
assert entry.state == ENTRY_STATE_SETUP_RETRY
|
import json
import threading
from absl import flags
from perfkitbenchmarker import network
from perfkitbenchmarker import providers
from perfkitbenchmarker import resource
from perfkitbenchmarker.providers.rackspace import util
FLAGS = flags.FLAGS
INGRESS = 'ingress'
EGRESS = 'egress'
SEC_GROUP_DIRECTIONS = frozenset([INGRESS, EGRESS])
IPV4 = 'ipv4'
IPV6 = 'ipv6'
ETHER_TYPES = frozenset([IPV4, IPV6])
TCP = 'tcp'
UDP = 'udp'
ICMP = 'icmp'
SEC_GROUP_PROTOCOLS = frozenset([TCP, UDP, ICMP])
PORT_RANGE_MIN = '1'
PORT_RANGE_MAX = '65535'
PUBLIC_NET_ID = '00000000-0000-0000-0000-000000000000'
SERVICE_NET_ID = '11111111-1111-1111-1111-111111111111'
DEFAULT_SUBNET_CIDR = '192.168.0.0/16'
SSH_PORT = 22
class RackspaceSecurityGroup(resource.BaseResource):
"""An object representing a Rackspace Security Group."""
def __init__(self, name):
super(RackspaceSecurityGroup, self).__init__()
self.name = name
self.id = None
def _Create(self):
cmd = util.RackCLICommand(self, 'networks', 'security-group', 'create')
cmd.flags['name'] = self.name
stdout, stderr, _ = cmd.Issue()
resp = json.loads(stdout)
self.id = resp['ID']
def _Delete(self):
if self.id is None:
return
cmd = util.RackCLICommand(self, 'networks', 'security-group', 'delete')
cmd.flags['id'] = self.id
cmd.Issue()
def _Exists(self):
if self.id is None:
return False
cmd = util.RackCLICommand(self, 'networks', 'security-group', 'get')
cmd.flags['id'] = self.id
stdout, stderr, _ = cmd.Issue()
return not stderr
class RackspaceSecurityGroupRule(resource.BaseResource):
"""An object representing a Security Group Rule."""
def __init__(self, sec_group_rule_name, sec_group_id, direction=INGRESS,
ip_ver=IPV4, protocol=TCP, port_range_min=PORT_RANGE_MIN,
port_range_max=PORT_RANGE_MAX, source_cidr=None):
super(RackspaceSecurityGroupRule, self).__init__()
self.id = None
self.name = sec_group_rule_name
self.sec_group_id = sec_group_id
assert direction in SEC_GROUP_DIRECTIONS
self.direction = direction
assert ip_ver in ETHER_TYPES
self.ip_ver = ip_ver
assert protocol in SEC_GROUP_PROTOCOLS
self.protocol = protocol
assert (int(PORT_RANGE_MIN) <= int(port_range_min) <= int(PORT_RANGE_MAX))
self.port_range_min = port_range_min
assert (int(PORT_RANGE_MIN) <= int(port_range_max) <= int(PORT_RANGE_MAX))
self.port_range_max = port_range_max
assert int(port_range_min) <= int(port_range_max)
self.source_cidr = source_cidr
def __eq__(self, other):
# Name does not matter
return (self.sec_group_id == other.sec_group_id and
self.direction == other.direction and
self.ip_ver == other.ip_ver and
self.protocol == other.protocol and
self.source_cidr == other.source_cidr)
def _Create(self):
cmd = util.RackCLICommand(self, 'networks', 'security-group-rule', 'create')
cmd.flags['security-group-id'] = self.sec_group_id
cmd.flags['direction'] = self.direction
cmd.flags['ether-type'] = self.ip_ver
cmd.flags['protocol'] = self.protocol
cmd.flags['port-range-min'] = self.port_range_min
cmd.flags['port-range-max'] = self.port_range_max
if self.source_cidr:
cmd.flags['remote-ip-prefix'] = self.source_cidr
stdout, stderr, _ = cmd.Issue()
resp = json.loads(stdout)
self.id = resp['ID']
def _Delete(self):
if self.id is None:
return
cmd = util.RackCLICommand(self, 'networks', 'security-group-rule', 'delete')
cmd.flags['id'] = self.id
cmd.Issue()
def _Exists(self):
if self.id is None:
return False
cmd = util.RackCLICommand(self, 'networks', 'security-group-rule', 'get')
cmd.flags['id'] = self.id
stdout, stderr, _ = cmd.Issue()
return not stderr
class RackspaceSubnet(resource.BaseResource):
"""An object that represents a Rackspace Subnet,"""
def __init__(self, network_id, cidr, ip_ver, name=None, tenant_id=None):
super(RackspaceSubnet, self).__init__()
self.id = None
self.network_id = network_id
self.cidr = cidr
self.ip_ver = ip_ver
self.name = name
self.tenant_id = tenant_id
def _Create(self):
cmd = util.RackCLICommand(self, 'networks', 'subnet', 'create')
cmd.flags['network-id'] = self.network_id
cmd.flags['cidr'] = self.cidr
cmd.flags['ip-version'] = self.ip_ver
if self.name:
cmd.flags['name'] = self.name
if self.tenant_id:
cmd.flags['tenant-id'] = self.tenant_id
stdout, stderr, _ = cmd.Issue()
resp = json.loads(stdout)
self.id = resp['ID']
def _Delete(self):
if self.id is None:
return
cmd = util.RackCLICommand(self, 'networks', 'subnet', 'delete')
cmd.flags['id'] = self.id
cmd.Issue()
def _Exists(self):
if self.id is None:
return False
cmd = util.RackCLICommand(self, 'networks', 'subnet', 'get')
cmd.flags['id'] = self.id
stdout, stderr, _ = cmd.Issue()
return not stderr
class RackspaceNetworkSpec(network.BaseNetworkSpec):
"""Object containing the information needed to create a Rackspace network."""
def __init__(self, tenant_id=None, region=None, **kwargs):
super(RackspaceNetworkSpec, self).__init__(**kwargs)
self.tenant_id = tenant_id
self.region = region
class RackspaceNetworkResource(resource.BaseResource):
"""Object representing a Rackspace Network Resource."""
def __init__(self, name, tenant_id=None):
super(RackspaceNetworkResource, self).__init__()
self.name = name
self.tenant_id = tenant_id
self.id = None
def _Create(self):
cmd = util.RackCLICommand(self, 'networks', 'network', 'create')
cmd.flags['name'] = self.name
if self.tenant_id:
cmd.flags['tenant-id'] = self.tenant_id
stdout, _, _ = cmd.Issue()
resp = json.loads(stdout)
if resp['ID']:
self.id = resp['ID']
def _Delete(self):
if self.id is None:
return
cmd = util.RackCLICommand(self, 'networks', 'network', 'delete')
cmd.flags['id'] = self.id
cmd.Issue()
def _Exists(self):
if self.id is None:
return False
cmd = util.RackCLICommand(self, 'networks', 'network', 'get')
cmd.flags['id'] = self.id
stdout, stderr, _ = cmd.Issue()
return not stderr
class RackspaceNetwork(network.BaseNetwork):
"""An object representing a Rackspace Network."""
CLOUD = providers.RACKSPACE
def __init__(self, network_spec):
super(RackspaceNetwork, self).__init__(network_spec)
self.tenant_id = network_spec.tenant_id
name = FLAGS.rackspace_network_name or 'pkb-network-%s' % FLAGS.run_uri
self.network_resource = RackspaceNetworkResource(name, self.tenant_id)
self.subnet = RackspaceSubnet(self.network_resource.id, DEFAULT_SUBNET_CIDR,
ip_ver='4', name='subnet-%s' % name,
tenant_id=self.tenant_id)
self.security_group = RackspaceSecurityGroup('default-internal-%s' % name)
self.default_firewall_rules = []
@staticmethod
def _GetNetworkSpecFromVm(vm):
return RackspaceNetworkSpec(tenant_id=vm.tenant_id, region=vm.zone)
@classmethod
def _GetKeyFromNetworkSpec(cls, spec):
return (cls.CLOUD, spec.tenant_id, spec.region)
def Create(self):
if FLAGS.rackspace_network_name is None:
self.network_resource.Create()
self.subnet.Create()
self.security_group.Create()
self.default_firewall_rules = self._GenerateDefaultRules(
self.security_group.id, self.network_resource.name)
for rule in self.default_firewall_rules:
rule.Create()
def Delete(self):
if FLAGS.rackspace_network_name is None:
for rule in self.default_firewall_rules:
rule.Delete()
self.security_group.Delete()
self.subnet.Delete()
self.network_resource.Delete()
def _GenerateDefaultRules(self, sec_group_id, network_name):
firewall_rules = [
RackspaceSecurityGroupRule(
sec_group_rule_name='tcp-default-internal-%s' % network_name,
sec_group_id=sec_group_id,
direction=INGRESS,
ip_ver=IPV4,
protocol=TCP),
RackspaceSecurityGroupRule(
sec_group_rule_name='udp-default-internal-%s' % network_name,
sec_group_id=sec_group_id,
direction=INGRESS,
ip_ver=IPV4, protocol=UDP),
RackspaceSecurityGroupRule(
sec_group_rule_name='icmp-default-internal-%s' % network_name,
sec_group_id=sec_group_id,
direction=INGRESS,
ip_ver=IPV4, protocol=ICMP)]
return firewall_rules
class RackspaceFirewall(network.BaseFirewall):
"""An object representing a Rackspace Security Group applied to PublicNet and
ServiceNet."""
CLOUD = providers.RACKSPACE
def __init__(self):
# TODO(meteorfox) Support a Firewall per region
self._lock = threading.Lock() # Guards security-group creation/deletion
self.firewall_rules = {}
def AllowPort(self, vm, start_port, end_port=None, source_range=None):
# At Rackspace all ports are open by default
# TODO(meteorfox) Implement security groups support
if FLAGS.rackspace_use_security_group:
raise NotImplementedError()
def DisallowAllPorts(self):
if FLAGS.rackspace_use_security_group:
raise NotImplementedError()
|
from json import loads
from homeassistant.components.advantage_air.const import (
ADVANTAGE_AIR_STATE_CLOSE,
ADVANTAGE_AIR_STATE_OPEN,
)
from homeassistant.components.cover import (
ATTR_POSITION,
DEVICE_CLASS_DAMPER,
DOMAIN as COVER_DOMAIN,
SERVICE_CLOSE_COVER,
SERVICE_OPEN_COVER,
SERVICE_SET_COVER_POSITION,
)
from homeassistant.const import ATTR_ENTITY_ID, STATE_OPEN
from tests.components.advantage_air import (
TEST_SET_RESPONSE,
TEST_SET_URL,
TEST_SYSTEM_DATA,
TEST_SYSTEM_URL,
add_mock_config,
)
async def test_cover_async_setup_entry(hass, aioclient_mock):
"""Test climate setup without sensors."""
aioclient_mock.get(
TEST_SYSTEM_URL,
text=TEST_SYSTEM_DATA,
)
aioclient_mock.get(
TEST_SET_URL,
text=TEST_SET_RESPONSE,
)
await add_mock_config(hass)
registry = await hass.helpers.entity_registry.async_get_registry()
assert len(aioclient_mock.mock_calls) == 1
# Test Cover Zone Entity
entity_id = "cover.zone_open_without_sensor"
state = hass.states.get(entity_id)
assert state
assert state.state == STATE_OPEN
assert state.attributes.get("device_class") == DEVICE_CLASS_DAMPER
assert state.attributes.get("current_position") == 100
entry = registry.async_get(entity_id)
assert entry
assert entry.unique_id == "uniqueid-ac2-z01"
await hass.services.async_call(
COVER_DOMAIN,
SERVICE_CLOSE_COVER,
{ATTR_ENTITY_ID: [entity_id]},
blocking=True,
)
assert len(aioclient_mock.mock_calls) == 3
assert aioclient_mock.mock_calls[-2][0] == "GET"
assert aioclient_mock.mock_calls[-2][1].path == "/setAircon"
data = loads(aioclient_mock.mock_calls[-2][1].query["json"])
assert data["ac2"]["zones"]["z01"]["state"] == ADVANTAGE_AIR_STATE_CLOSE
assert aioclient_mock.mock_calls[-1][0] == "GET"
assert aioclient_mock.mock_calls[-1][1].path == "/getSystemData"
await hass.services.async_call(
COVER_DOMAIN,
SERVICE_OPEN_COVER,
{ATTR_ENTITY_ID: [entity_id]},
blocking=True,
)
assert len(aioclient_mock.mock_calls) == 5
assert aioclient_mock.mock_calls[-2][0] == "GET"
assert aioclient_mock.mock_calls[-2][1].path == "/setAircon"
data = loads(aioclient_mock.mock_calls[-2][1].query["json"])
assert data["ac2"]["zones"]["z01"]["state"] == ADVANTAGE_AIR_STATE_OPEN
assert data["ac2"]["zones"]["z01"]["value"] == 100
assert aioclient_mock.mock_calls[-1][0] == "GET"
assert aioclient_mock.mock_calls[-1][1].path == "/getSystemData"
await hass.services.async_call(
COVER_DOMAIN,
SERVICE_SET_COVER_POSITION,
{ATTR_ENTITY_ID: [entity_id], ATTR_POSITION: 50},
blocking=True,
)
assert len(aioclient_mock.mock_calls) == 7
assert aioclient_mock.mock_calls[-2][0] == "GET"
assert aioclient_mock.mock_calls[-2][1].path == "/setAircon"
data = loads(aioclient_mock.mock_calls[-2][1].query["json"])
assert data["ac2"]["zones"]["z01"]["value"] == 50
assert aioclient_mock.mock_calls[-1][0] == "GET"
assert aioclient_mock.mock_calls[-1][1].path == "/getSystemData"
await hass.services.async_call(
COVER_DOMAIN,
SERVICE_SET_COVER_POSITION,
{ATTR_ENTITY_ID: [entity_id], ATTR_POSITION: 0},
blocking=True,
)
assert len(aioclient_mock.mock_calls) == 9
assert aioclient_mock.mock_calls[-2][0] == "GET"
assert aioclient_mock.mock_calls[-2][1].path == "/setAircon"
data = loads(aioclient_mock.mock_calls[-2][1].query["json"])
assert data["ac2"]["zones"]["z01"]["state"] == ADVANTAGE_AIR_STATE_CLOSE
assert aioclient_mock.mock_calls[-1][0] == "GET"
assert aioclient_mock.mock_calls[-1][1].path == "/getSystemData"
|
import asyncio
import logging
from typing import Tuple
import discord
from redbot.core import Config, checks, commands
from redbot.core.i18n import Translator, cog_i18n
from redbot.core.utils.chat_formatting import box
from .announcer import Announcer
from .converters import SelfRole
log = logging.getLogger("red.admin")
T_ = Translator("Admin", __file__)
_ = lambda s: s
GENERIC_FORBIDDEN = _(
"I attempted to do something that Discord denied me permissions for."
" Your command failed to successfully complete."
)
HIERARCHY_ISSUE_ADD = _(
"I can not give {role.name} to {member.display_name}"
" because that role is higher than or equal to my highest role"
" in the Discord hierarchy."
)
HIERARCHY_ISSUE_REMOVE = _(
"I can not remove {role.name} from {member.display_name}"
" because that role is higher than or equal to my highest role"
" in the Discord hierarchy."
)
ROLE_HIERARCHY_ISSUE = _(
"I can not edit {role.name}"
" because that role is higher than my or equal to highest role"
" in the Discord hierarchy."
)
USER_HIERARCHY_ISSUE_ADD = _(
"I can not let you give {role.name} to {member.display_name}"
" because that role is higher than or equal to your highest role"
" in the Discord hierarchy."
)
USER_HIERARCHY_ISSUE_REMOVE = _(
"I can not let you remove {role.name} from {member.display_name}"
" because that role is higher than or equal to your highest role"
" in the Discord hierarchy."
)
ROLE_USER_HIERARCHY_ISSUE = _(
"I can not let you edit {role.name}"
" because that role is higher than or equal to your highest role"
" in the Discord hierarchy."
)
NEED_MANAGE_ROLES = _("I need manage roles permission to do that.")
RUNNING_ANNOUNCEMENT = _(
"I am already announcing something. If you would like to make a"
" different announcement please use `{prefix}announce cancel`"
" first."
)
_ = T_
@cog_i18n(_)
class Admin(commands.Cog):
"""A collection of server administration utilities."""
def __init__(self):
self.config = Config.get_conf(self, 8237492837454039, force_registration=True)
self.config.register_global(serverlocked=False, schema_version=0)
self.config.register_guild(
announce_channel=None, # Integer ID
selfroles=[], # List of integer ID's
)
self.__current_announcer = None
self._ready = asyncio.Event()
asyncio.create_task(self.handle_migrations())
# As this is a data migration, don't store this for cancelation.
async def cog_before_invoke(self, ctx: commands.Context):
await self._ready.wait()
async def red_delete_data_for_user(self, **kwargs):
""" Nothing to delete """
return
async def handle_migrations(self):
lock = self.config.get_guilds_lock()
async with lock:
# This prevents the edge case of someone loading admin,
# unloading it, loading it again during a migration
current_schema = await self.config.schema_version()
if current_schema == 0:
await self.migrate_config_from_0_to_1()
await self.config.schema_version.set(1)
self._ready.set()
async def migrate_config_from_0_to_1(self):
all_guilds = await self.config.all_guilds()
for guild_id, guild_data in all_guilds.items():
if guild_data.get("announce_ignore", False):
async with self.config.guild_from_id(guild_id).all(
acquire_lock=False
) as guild_config:
guild_config.pop("announce_channel", None)
guild_config.pop("announce_ignore", None)
def cog_unload(self):
try:
self.__current_announcer.cancel()
except AttributeError:
pass
def is_announcing(self) -> bool:
"""
Is the bot currently announcing something?
:return:
"""
if self.__current_announcer is None:
return False
return self.__current_announcer.active or False
@staticmethod
def pass_hierarchy_check(ctx: commands.Context, role: discord.Role) -> bool:
"""
Determines if the bot has a higher role than the given one.
:param ctx:
:param role: Role object.
:return:
"""
return ctx.guild.me.top_role > role
@staticmethod
def pass_user_hierarchy_check(ctx: commands.Context, role: discord.Role) -> bool:
"""
Determines if a user is allowed to add/remove/edit the given role.
:param ctx:
:param role:
:return:
"""
return ctx.author.top_role > role or ctx.author == ctx.guild.owner
async def _addrole(
self, ctx: commands.Context, member: discord.Member, role: discord.Role, *, check_user=True
):
if role in member.roles:
await ctx.send(
_("{member.display_name} already has the role {role.name}.").format(
role=role, member=member
)
)
return
if check_user and not self.pass_user_hierarchy_check(ctx, role):
await ctx.send(_(USER_HIERARCHY_ISSUE_ADD).format(role=role, member=member))
return
if not self.pass_hierarchy_check(ctx, role):
await ctx.send(_(HIERARCHY_ISSUE_ADD).format(role=role, member=member))
return
if not ctx.guild.me.guild_permissions.manage_roles:
await ctx.send(_(NEED_MANAGE_ROLES))
return
try:
await member.add_roles(role)
except discord.Forbidden:
await ctx.send(_(GENERIC_FORBIDDEN))
else:
await ctx.send(
_("I successfully added {role.name} to {member.display_name}").format(
role=role, member=member
)
)
async def _removerole(
self, ctx: commands.Context, member: discord.Member, role: discord.Role, *, check_user=True
):
if role not in member.roles:
await ctx.send(
_("{member.display_name} does not have the role {role.name}.").format(
role=role, member=member
)
)
return
if check_user and not self.pass_user_hierarchy_check(ctx, role):
await ctx.send(_(USER_HIERARCHY_ISSUE_REMOVE).format(role=role, member=member))
return
if not self.pass_hierarchy_check(ctx, role):
await ctx.send(_(HIERARCHY_ISSUE_REMOVE).format(role=role, member=member))
return
if not ctx.guild.me.guild_permissions.manage_roles:
await ctx.send(_(NEED_MANAGE_ROLES))
return
try:
await member.remove_roles(role)
except discord.Forbidden:
await ctx.send(_(GENERIC_FORBIDDEN))
else:
await ctx.send(
_("I successfully removed {role.name} from {member.display_name}").format(
role=role, member=member
)
)
@commands.command()
@commands.guild_only()
@checks.admin_or_permissions(manage_roles=True)
async def addrole(
self, ctx: commands.Context, rolename: discord.Role, *, user: discord.Member = None
):
"""
Add a role to a user.
Use double quotes if the role contains spaces.
If user is left blank it defaults to the author of the command.
"""
if user is None:
user = ctx.author
await self._addrole(ctx, user, rolename)
@commands.command()
@commands.guild_only()
@checks.admin_or_permissions(manage_roles=True)
async def removerole(
self, ctx: commands.Context, rolename: discord.Role, *, user: discord.Member = None
):
"""
Remove a role from a user.
Use double quotes if the role contains spaces.
If user is left blank it defaults to the author of the command.
"""
if user is None:
user = ctx.author
await self._removerole(ctx, user, rolename)
@commands.group()
@commands.guild_only()
@checks.admin_or_permissions(manage_roles=True)
async def editrole(self, ctx: commands.Context):
"""Edit role settings."""
pass
@editrole.command(name="colour", aliases=["color"])
async def editrole_colour(
self, ctx: commands.Context, role: discord.Role, value: discord.Colour
):
"""
Edit a role's colour.
Use double quotes if the role contains spaces.
Colour must be in hexadecimal format.
[Online colour picker](http://www.w3schools.com/colors/colors_picker.asp)
Examples:
`[p]editrole colour "The Transistor" #ff0000`
`[p]editrole colour Test #ff9900`
"""
author = ctx.author
reason = "{}({}) changed the colour of role '{}'".format(author.name, author.id, role.name)
if not self.pass_user_hierarchy_check(ctx, role):
await ctx.send(_(ROLE_USER_HIERARCHY_ISSUE).format(role=role))
return
if not self.pass_hierarchy_check(ctx, role):
await ctx.send(_(ROLE_HIERARCHY_ISSUE).format(role=role))
return
if not ctx.guild.me.guild_permissions.manage_roles:
await ctx.send(_(NEED_MANAGE_ROLES))
return
try:
await role.edit(reason=reason, color=value)
except discord.Forbidden:
await ctx.send(_(GENERIC_FORBIDDEN))
else:
log.info(reason)
await ctx.send(_("Done."))
@editrole.command(name="name")
async def edit_role_name(self, ctx: commands.Context, role: discord.Role, name: str):
"""
Edit a role's name.
Use double quotes if the role or the name contain spaces.
Example:
`[p]editrole name \"The Transistor\" Test`
"""
author = ctx.message.author
old_name = role.name
reason = "{}({}) changed the name of role '{}' to '{}'".format(
author.name, author.id, old_name, name
)
if not self.pass_user_hierarchy_check(ctx, role):
await ctx.send(_(ROLE_USER_HIERARCHY_ISSUE).format(role=role))
return
if not self.pass_hierarchy_check(ctx, role):
await ctx.send(_(ROLE_HIERARCHY_ISSUE).format(role=role))
return
if not ctx.guild.me.guild_permissions.manage_roles:
await ctx.send(_(NEED_MANAGE_ROLES))
return
try:
await role.edit(reason=reason, name=name)
except discord.Forbidden:
await ctx.send(_(GENERIC_FORBIDDEN))
else:
log.info(reason)
await ctx.send(_("Done."))
@commands.group(invoke_without_command=True)
@checks.is_owner()
async def announce(self, ctx: commands.Context, *, message: str):
"""Announce a message to all servers the bot is in."""
if not self.is_announcing():
announcer = Announcer(ctx, message, config=self.config)
announcer.start()
self.__current_announcer = announcer
await ctx.send(_("The announcement has begun."))
else:
prefix = ctx.clean_prefix
await ctx.send(_(RUNNING_ANNOUNCEMENT).format(prefix=prefix))
@announce.command(name="cancel")
async def announce_cancel(self, ctx):
"""Cancel a running announce."""
if not self.is_announcing():
await ctx.send(_("There is no currently running announcement."))
return
self.__current_announcer.cancel()
await ctx.send(_("The current announcement has been cancelled."))
@commands.group()
@commands.guild_only()
@checks.guildowner_or_permissions(administrator=True)
async def announceset(self, ctx):
"""Change how announcements are sent in this guild."""
pass
@announceset.command(name="channel")
async def announceset_channel(self, ctx, *, channel: discord.TextChannel = None):
"""
Change the channel where the bot will send announcements.
If channel is left blank it defaults to the current channel.
"""
if channel is None:
channel = ctx.channel
await self.config.guild(ctx.guild).announce_channel.set(channel.id)
await ctx.send(
_("The announcement channel has been set to {channel.mention}").format(channel=channel)
)
@announceset.command(name="clearchannel")
async def announceset_clear_channel(self, ctx):
"""Unsets the channel for announcements."""
await self.config.guild(ctx.guild).announce_channel.clear()
await ctx.tick()
async def _valid_selfroles(self, guild: discord.Guild) -> Tuple[discord.Role]:
"""
Returns a tuple of valid selfroles
:param guild:
:return:
"""
selfrole_ids = set(await self.config.guild(guild).selfroles())
guild_roles = guild.roles
valid_roles = tuple(r for r in guild_roles if r.id in selfrole_ids)
valid_role_ids = set(r.id for r in valid_roles)
if selfrole_ids != valid_role_ids:
await self.config.guild(guild).selfroles.set(list(valid_role_ids))
# noinspection PyTypeChecker
return valid_roles
@commands.guild_only()
@commands.group()
async def selfrole(self, ctx: commands.Context):
"""Apply selfroles."""
pass
@selfrole.command(name="add")
async def selfrole_add(self, ctx: commands.Context, *, selfrole: SelfRole):
"""
Add a selfrole to yourself.
Server admins must have configured the role as user settable.
NOTE: The role is case sensitive!
"""
# noinspection PyTypeChecker
await self._addrole(ctx, ctx.author, selfrole, check_user=False)
@selfrole.command(name="remove")
async def selfrole_remove(self, ctx: commands.Context, *, selfrole: SelfRole):
"""
Remove a selfrole from yourself.
Server admins must have configured the role as user settable.
NOTE: The role is case sensitive!
"""
# noinspection PyTypeChecker
await self._removerole(ctx, ctx.author, selfrole, check_user=False)
@selfrole.command(name="list")
async def selfrole_list(self, ctx: commands.Context):
"""
Lists all available selfroles.
"""
selfroles = await self._valid_selfroles(ctx.guild)
fmt_selfroles = "\n".join(["+ " + r.name for r in selfroles])
if not fmt_selfroles:
await ctx.send("There are currently no selfroles.")
return
msg = _("Available Selfroles:\n{selfroles}").format(selfroles=fmt_selfroles)
await ctx.send(box(msg, "diff"))
@commands.group()
@checks.admin_or_permissions(manage_roles=True)
async def selfroleset(self, ctx: commands.Context):
"""Manage selfroles."""
pass
@selfroleset.command(name="add")
async def selfroleset_add(self, ctx: commands.Context, *, role: discord.Role):
"""
Add a role to the list of available selfroles.
NOTE: The role is case sensitive!
"""
if not self.pass_user_hierarchy_check(ctx, role):
await ctx.send(
_(
"I cannot let you add {role.name} as a selfrole because that role is higher than or equal to your highest role in the Discord hierarchy."
).format(role=role)
)
return
async with self.config.guild(ctx.guild).selfroles() as curr_selfroles:
if role.id not in curr_selfroles:
curr_selfroles.append(role.id)
await ctx.send(_("Added."))
return
await ctx.send(_("That role is already a selfrole."))
@selfroleset.command(name="remove")
async def selfroleset_remove(self, ctx: commands.Context, *, role: SelfRole):
"""
Remove a role from the list of available selfroles.
NOTE: The role is case sensitive!
"""
if not self.pass_user_hierarchy_check(ctx, role):
await ctx.send(
_(
"I cannot let you remove {role.name} from being a selfrole because that role is higher than or equal to your highest role in the Discord hierarchy."
).format(role=role)
)
return
async with self.config.guild(ctx.guild).selfroles() as curr_selfroles:
curr_selfroles.remove(role.id)
await ctx.send(_("Removed."))
@commands.command()
@checks.is_owner()
async def serverlock(self, ctx: commands.Context):
"""Lock a bot to its current servers only."""
serverlocked = await self.config.serverlocked()
await self.config.serverlocked.set(not serverlocked)
if serverlocked:
await ctx.send(_("The bot is no longer serverlocked."))
else:
await ctx.send(_("The bot is now serverlocked."))
# region Event Handlers
@commands.Cog.listener()
async def on_guild_join(self, guild: discord.Guild):
if await self.config.serverlocked():
await guild.leave()
# endregion
|
import pytest
import pytest_httpbin
import vcr
from vcr.patch import force_reset
from assertions import assert_cassette_empty, assert_is_json
urllib3 = pytest.importorskip("urllib3")
@pytest.fixture(scope="module")
def verify_pool_mgr():
return urllib3.PoolManager(
cert_reqs="CERT_REQUIRED", ca_certs=pytest_httpbin.certs.where() # Force certificate check.
)
@pytest.fixture(scope="module")
def pool_mgr():
return urllib3.PoolManager(cert_reqs="CERT_NONE")
def test_status_code(httpbin_both, tmpdir, verify_pool_mgr):
"""Ensure that we can read the status code"""
url = httpbin_both.url
with vcr.use_cassette(str(tmpdir.join("atts.yaml"))):
status_code = verify_pool_mgr.request("GET", url).status
with vcr.use_cassette(str(tmpdir.join("atts.yaml"))):
assert status_code == verify_pool_mgr.request("GET", url).status
def test_headers(tmpdir, httpbin_both, verify_pool_mgr):
"""Ensure that we can read the headers back"""
url = httpbin_both.url
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
headers = verify_pool_mgr.request("GET", url).headers
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
assert headers == verify_pool_mgr.request("GET", url).headers
def test_body(tmpdir, httpbin_both, verify_pool_mgr):
"""Ensure the responses are all identical enough"""
url = httpbin_both.url + "/bytes/1024"
with vcr.use_cassette(str(tmpdir.join("body.yaml"))):
content = verify_pool_mgr.request("GET", url).data
with vcr.use_cassette(str(tmpdir.join("body.yaml"))):
assert content == verify_pool_mgr.request("GET", url).data
def test_auth(tmpdir, httpbin_both, verify_pool_mgr):
"""Ensure that we can handle basic auth"""
auth = ("user", "passwd")
headers = urllib3.util.make_headers(basic_auth="{}:{}".format(*auth))
url = httpbin_both.url + "/basic-auth/user/passwd"
with vcr.use_cassette(str(tmpdir.join("auth.yaml"))):
one = verify_pool_mgr.request("GET", url, headers=headers)
with vcr.use_cassette(str(tmpdir.join("auth.yaml"))):
two = verify_pool_mgr.request("GET", url, headers=headers)
assert one.data == two.data
assert one.status == two.status
def test_auth_failed(tmpdir, httpbin_both, verify_pool_mgr):
"""Ensure that we can save failed auth statuses"""
auth = ("user", "wrongwrongwrong")
headers = urllib3.util.make_headers(basic_auth="{}:{}".format(*auth))
url = httpbin_both.url + "/basic-auth/user/passwd"
with vcr.use_cassette(str(tmpdir.join("auth-failed.yaml"))) as cass:
# Ensure that this is empty to begin with
assert_cassette_empty(cass)
one = verify_pool_mgr.request("GET", url, headers=headers)
two = verify_pool_mgr.request("GET", url, headers=headers)
assert one.data == two.data
assert one.status == two.status == 401
def test_post(tmpdir, httpbin_both, verify_pool_mgr):
"""Ensure that we can post and cache the results"""
data = {"key1": "value1", "key2": "value2"}
url = httpbin_both.url + "/post"
with vcr.use_cassette(str(tmpdir.join("verify_pool_mgr.yaml"))):
req1 = verify_pool_mgr.request("POST", url, data).data
with vcr.use_cassette(str(tmpdir.join("verify_pool_mgr.yaml"))):
req2 = verify_pool_mgr.request("POST", url, data).data
assert req1 == req2
def test_redirects(tmpdir, httpbin_both, verify_pool_mgr):
"""Ensure that we can handle redirects"""
url = httpbin_both.url + "/redirect-to?url=bytes/1024"
with vcr.use_cassette(str(tmpdir.join("verify_pool_mgr.yaml"))):
content = verify_pool_mgr.request("GET", url).data
with vcr.use_cassette(str(tmpdir.join("verify_pool_mgr.yaml"))) as cass:
assert content == verify_pool_mgr.request("GET", url).data
# Ensure that we've now cached *two* responses. One for the redirect
# and one for the final fetch
assert len(cass) == 2
assert cass.play_count == 2
def test_cross_scheme(tmpdir, httpbin, httpbin_secure, verify_pool_mgr):
"""Ensure that requests between schemes are treated separately"""
# First fetch a url under http, and then again under https and then
# ensure that we haven't served anything out of cache, and we have two
# requests / response pairs in the cassette
with vcr.use_cassette(str(tmpdir.join("cross_scheme.yaml"))) as cass:
verify_pool_mgr.request("GET", httpbin_secure.url)
verify_pool_mgr.request("GET", httpbin.url)
assert cass.play_count == 0
assert len(cass) == 2
def test_gzip(tmpdir, httpbin_both, verify_pool_mgr):
"""
Ensure that requests (actually urllib3) is able to automatically decompress
the response body
"""
url = httpbin_both.url + "/gzip"
response = verify_pool_mgr.request("GET", url)
with vcr.use_cassette(str(tmpdir.join("gzip.yaml"))):
response = verify_pool_mgr.request("GET", url)
assert_is_json(response.data)
with vcr.use_cassette(str(tmpdir.join("gzip.yaml"))):
assert_is_json(response.data)
def test_https_with_cert_validation_disabled(tmpdir, httpbin_secure, pool_mgr):
with vcr.use_cassette(str(tmpdir.join("cert_validation_disabled.yaml"))):
pool_mgr.request("GET", httpbin_secure.url)
def test_urllib3_force_reset():
cpool = urllib3.connectionpool
http_original = cpool.HTTPConnection
https_original = cpool.HTTPSConnection
verified_https_original = cpool.VerifiedHTTPSConnection
with vcr.use_cassette(path="test"):
first_cassette_HTTPConnection = cpool.HTTPConnection
first_cassette_HTTPSConnection = cpool.HTTPSConnection
first_cassette_VerifiedHTTPSConnection = cpool.VerifiedHTTPSConnection
with force_reset():
assert cpool.HTTPConnection is http_original
assert cpool.HTTPSConnection is https_original
assert cpool.VerifiedHTTPSConnection is verified_https_original
assert cpool.HTTPConnection is first_cassette_HTTPConnection
assert cpool.HTTPSConnection is first_cassette_HTTPSConnection
assert cpool.VerifiedHTTPSConnection is first_cassette_VerifiedHTTPSConnection
|
import cerberus
import cerberus.errors
base_schema = {
'ansible': {
'type': 'dict',
'schema': {
'config_file': {
'type': 'string',
},
'playbook': {
'type': 'string',
},
'raw_env_vars': {
'type': 'dict',
'keyschema': {
'type': 'string',
'regex': '^[A-Z0-9_-]+$',
},
},
'extra_vars': {
'type': 'string',
},
'verbose': {
'type': 'boolean',
},
'become': {
'type': 'boolean',
},
'tags': {
'type': 'string',
},
}
},
'driver': {
'type': 'dict',
'schema': {
'name': {
'type': 'string',
},
}
},
'vagrant': {
'type': 'dict',
'schema': {
'platforms': {
'type': 'list',
'schema': {
'type': 'dict',
'schema': {
'name': {
'type': 'string',
},
'box': {
'type': 'string',
},
'box_version': {
'type': 'string',
},
'box_url': {
'type': 'string',
},
}
}
},
'providers': {
'type': 'list',
'schema': {
'type': 'dict',
'schema': {
'name': {
'type': 'string',
},
'type': {
'type': 'string',
},
'options': {
'type': 'dict',
},
}
}
},
'instances': {
'type': 'list',
'schema': {
'type': 'dict',
'schema': {
'name': {
'type': 'string',
},
'ansible_groups': {
'type': 'list',
'schema': {
'type': 'string',
}
},
'interfaces': {
'type': 'list',
'schema': {
'type': 'dict',
}
},
'raw_config_args': {
'type': 'list',
'schema': {
'type': 'string',
}
},
}
}
},
}
},
'verifier': {
'type': 'dict',
'schema': {
'name': {
'type': 'string',
},
'options': {
'type': 'dict',
},
}
},
}
def validate(c):
v = cerberus.Validator()
v.validate(c, base_schema)
return v.errors
|
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
CONF_ACCESS_KEY = "access_key"
CONF_APP_ID = "app_id"
DATA_TTN = "data_thethingsnetwork"
DOMAIN = "thethingsnetwork"
TTN_ACCESS_KEY = "ttn_access_key"
TTN_APP_ID = "ttn_app_id"
TTN_DATA_STORAGE_URL = (
"https://{app_id}.data.thethingsnetwork.org/{endpoint}/{device_id}"
)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_APP_ID): cv.string,
vol.Required(CONF_ACCESS_KEY): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Initialize of The Things Network component."""
conf = config[DOMAIN]
app_id = conf.get(CONF_APP_ID)
access_key = conf.get(CONF_ACCESS_KEY)
hass.data[DATA_TTN] = {TTN_ACCESS_KEY: access_key, TTN_APP_ID: app_id}
return True
|
import nltk
from .unit import Unit
class Stemming(Unit):
"""
Process unit for token stemming.
:param stemmer: stemmer to use, `porter` or `lancaster`.
"""
def __init__(self, stemmer='porter'):
"""Initialization."""
self.stemmer = stemmer
def transform(self, input_: list) -> list:
"""
Reducing inflected words to their word stem, base or root form.
:param input_: list of string to be stemmed.
"""
if self.stemmer == 'porter':
porter_stemmer = nltk.stem.PorterStemmer()
return [porter_stemmer.stem(token) for token in input_]
elif self.stemmer == 'lancaster' or self.stemmer == 'krovetz':
lancaster_stemmer = nltk.stem.lancaster.LancasterStemmer()
return [lancaster_stemmer.stem(token) for token in input_]
else:
raise ValueError(
'Not supported supported stemmer type: {}'.format(
self.stemmer))
|
import sys
from unittest.mock import Mock
import twisted.internet
from twisted.trial import unittest
from autobahn.twisted import choosereactor
class ChooseReactorTests(unittest.TestCase):
def patch_reactor(self, name, new_reactor):
"""
Patch ``name`` so that Twisted will grab a fake reactor instead of
a real one.
"""
if hasattr(twisted.internet, name):
self.patch(twisted.internet, name, new_reactor)
else:
def _cleanup():
delattr(twisted.internet, name)
setattr(twisted.internet, name, new_reactor)
def patch_modules(self):
"""
Patch ``sys.modules`` so that Twisted believes there is no
installed reactor.
"""
old_modules = dict(sys.modules)
new_modules = dict(sys.modules)
del new_modules["twisted.internet.reactor"]
def _cleanup():
sys.modules = old_modules
self.addCleanup(_cleanup)
sys.modules = new_modules
def test_unknown(self):
"""
``install_optimal_reactor`` will use the default reactor if it is
unable to detect the platform it is running on.
"""
reactor_mock = Mock()
self.patch_reactor("selectreactor", reactor_mock)
self.patch(sys, "platform", "unknown")
# Emulate that a reactor has not been installed
self.patch_modules()
choosereactor.install_optimal_reactor()
reactor_mock.install.assert_called_once_with()
def test_mac(self):
"""
``install_optimal_reactor`` will install KQueueReactor on
Darwin (OS X).
"""
reactor_mock = Mock()
self.patch_reactor("kqreactor", reactor_mock)
self.patch(sys, "platform", "darwin")
# Emulate that a reactor has not been installed
self.patch_modules()
choosereactor.install_optimal_reactor()
reactor_mock.install.assert_called_once_with()
def test_win(self):
"""
``install_optimal_reactor`` will install IOCPReactor on Windows.
"""
if sys.platform != 'win32':
raise unittest.SkipTest('unit test requires Windows')
reactor_mock = Mock()
self.patch_reactor("iocpreactor", reactor_mock)
self.patch(sys, "platform", "win32")
# Emulate that a reactor has not been installed
self.patch_modules()
choosereactor.install_optimal_reactor()
reactor_mock.install.assert_called_once_with()
def test_bsd(self):
"""
``install_optimal_reactor`` will install KQueueReactor on BSD.
"""
reactor_mock = Mock()
self.patch_reactor("kqreactor", reactor_mock)
self.patch(sys, "platform", "freebsd11")
# Emulate that a reactor has not been installed
self.patch_modules()
choosereactor.install_optimal_reactor()
reactor_mock.install.assert_called_once_with()
def test_linux(self):
"""
``install_optimal_reactor`` will install EPollReactor on Linux.
"""
reactor_mock = Mock()
self.patch_reactor("epollreactor", reactor_mock)
self.patch(sys, "platform", "linux")
# Emulate that a reactor has not been installed
self.patch_modules()
choosereactor.install_optimal_reactor()
reactor_mock.install.assert_called_once_with()
|
import os
import sys
from coverage import env
from coverage.backward import litems, range # pylint: disable=redefined-builtin
from coverage.debug import short_stack
from coverage.disposition import FileDisposition
from coverage.misc import CoverageException, isolate_module
from coverage.pytracer import PyTracer
os = isolate_module(os)
try:
# Use the C extension code when we can, for speed.
from coverage.tracer import CTracer, CFileDisposition
except ImportError:
# Couldn't import the C extension, maybe it isn't built.
if os.getenv('COVERAGE_TEST_TRACER') == 'c':
# During testing, we use the COVERAGE_TEST_TRACER environment variable
# to indicate that we've fiddled with the environment to test this
# fallback code. If we thought we had a C tracer, but couldn't import
# it, then exit quickly and clearly instead of dribbling confusing
# errors. I'm using sys.exit here instead of an exception because an
# exception here causes all sorts of other noise in unittest.
sys.stderr.write("*** COVERAGE_TEST_TRACER is 'c' but can't import CTracer!\n")
sys.exit(1)
CTracer = None
class Collector(object):
"""Collects trace data.
Creates a Tracer object for each thread, since they track stack
information. Each Tracer points to the same shared data, contributing
traced data points.
When the Collector is started, it creates a Tracer for the current thread,
and installs a function to create Tracers for each new thread started.
When the Collector is stopped, all active Tracers are stopped.
Threads started while the Collector is stopped will never have Tracers
associated with them.
"""
# The stack of active Collectors. Collectors are added here when started,
# and popped when stopped. Collectors on the stack are paused when not
# the top, and resumed when they become the top again.
_collectors = []
# The concurrency settings we support here.
SUPPORTED_CONCURRENCIES = set(["greenlet", "eventlet", "gevent", "thread"])
def __init__(
self, should_trace, check_include, should_start_context, file_mapper,
timid, branch, warn, concurrency,
):
"""Create a collector.
`should_trace` is a function, taking a file name and a frame, and
returning a `coverage.FileDisposition object`.
`check_include` is a function taking a file name and a frame. It returns
a boolean: True if the file should be traced, False if not.
`should_start_context` is a function taking a frame, and returning a
string. If the frame should be the start of a new context, the string
is the new context. If the frame should not be the start of a new
context, return None.
`file_mapper` is a function taking a filename, and returning a Unicode
filename. The result is the name that will be recorded in the data
file.
If `timid` is true, then a slower simpler trace function will be
used. This is important for some environments where manipulation of
tracing functions make the faster more sophisticated trace function not
operate properly.
If `branch` is true, then branches will be measured. This involves
collecting data on which statements followed each other (arcs). Use
`get_arc_data` to get the arc data.
`warn` is a warning function, taking a single string message argument
and an optional slug argument which will be a string or None, to be
used if a warning needs to be issued.
`concurrency` is a list of strings indicating the concurrency libraries
in use. Valid values are "greenlet", "eventlet", "gevent", or "thread"
(the default). Of these four values, only one can be supplied. Other
values are ignored.
"""
self.should_trace = should_trace
self.check_include = check_include
self.should_start_context = should_start_context
self.file_mapper = file_mapper
self.warn = warn
self.branch = branch
self.threading = None
self.covdata = None
self.static_context = None
self.origin = short_stack()
self.concur_id_func = None
self.mapped_file_cache = {}
# We can handle a few concurrency options here, but only one at a time.
these_concurrencies = self.SUPPORTED_CONCURRENCIES.intersection(concurrency)
if len(these_concurrencies) > 1:
raise CoverageException("Conflicting concurrency settings: %s" % concurrency)
self.concurrency = these_concurrencies.pop() if these_concurrencies else ''
try:
if self.concurrency == "greenlet":
import greenlet
self.concur_id_func = greenlet.getcurrent
elif self.concurrency == "eventlet":
import eventlet.greenthread # pylint: disable=import-error,useless-suppression
self.concur_id_func = eventlet.greenthread.getcurrent
elif self.concurrency == "gevent":
import gevent # pylint: disable=import-error,useless-suppression
self.concur_id_func = gevent.getcurrent
elif self.concurrency == "thread" or not self.concurrency:
# It's important to import threading only if we need it. If
# it's imported early, and the program being measured uses
# gevent, then gevent's monkey-patching won't work properly.
import threading
self.threading = threading
else:
raise CoverageException("Don't understand concurrency=%s" % concurrency)
except ImportError:
raise CoverageException(
"Couldn't trace with concurrency=%s, the module isn't installed." % (
self.concurrency,
)
)
self.reset()
if timid:
# Being timid: use the simple Python trace function.
self._trace_class = PyTracer
else:
# Being fast: use the C Tracer if it is available, else the Python
# trace function.
self._trace_class = CTracer or PyTracer
if self._trace_class is CTracer:
self.file_disposition_class = CFileDisposition
self.supports_plugins = True
else:
self.file_disposition_class = FileDisposition
self.supports_plugins = False
def __repr__(self):
return "<Collector at 0x%x: %s>" % (id(self), self.tracer_name())
def use_data(self, covdata, context):
"""Use `covdata` for recording data."""
self.covdata = covdata
self.static_context = context
self.covdata.set_context(self.static_context)
def tracer_name(self):
"""Return the class name of the tracer we're using."""
return self._trace_class.__name__
def _clear_data(self):
"""Clear out existing data, but stay ready for more collection."""
# We used to used self.data.clear(), but that would remove filename
# keys and data values that were still in use higher up the stack
# when we are called as part of switch_context.
for d in self.data.values():
d.clear()
for tracer in self.tracers:
tracer.reset_activity()
def reset(self):
"""Clear collected data, and prepare to collect more."""
# A dictionary mapping file names to dicts with line number keys (if not
# branch coverage), or mapping file names to dicts with line number
# pairs as keys (if branch coverage).
self.data = {}
# A dictionary mapping file names to file tracer plugin names that will
# handle them.
self.file_tracers = {}
self.disabled_plugins = set()
# The .should_trace_cache attribute is a cache from file names to
# coverage.FileDisposition objects, or None. When a file is first
# considered for tracing, a FileDisposition is obtained from
# Coverage.should_trace. Its .trace attribute indicates whether the
# file should be traced or not. If it should be, a plugin with dynamic
# file names can decide not to trace it based on the dynamic file name
# being excluded by the inclusion rules, in which case the
# FileDisposition will be replaced by None in the cache.
if env.PYPY:
import __pypy__ # pylint: disable=import-error
# Alex Gaynor said:
# should_trace_cache is a strictly growing key: once a key is in
# it, it never changes. Further, the keys used to access it are
# generally constant, given sufficient context. That is to say, at
# any given point _trace() is called, pypy is able to know the key.
# This is because the key is determined by the physical source code
# line, and that's invariant with the call site.
#
# This property of a dict with immutable keys, combined with
# call-site-constant keys is a match for PyPy's module dict,
# which is optimized for such workloads.
#
# This gives a 20% benefit on the workload described at
# https://bitbucket.org/pypy/pypy/issue/1871/10x-slower-than-cpython-under-coverage
self.should_trace_cache = __pypy__.newdict("module")
else:
self.should_trace_cache = {}
# Our active Tracers.
self.tracers = []
self._clear_data()
def _start_tracer(self):
"""Start a new Tracer object, and store it in self.tracers."""
tracer = self._trace_class()
tracer.data = self.data
tracer.trace_arcs = self.branch
tracer.should_trace = self.should_trace
tracer.should_trace_cache = self.should_trace_cache
tracer.warn = self.warn
if hasattr(tracer, 'concur_id_func'):
tracer.concur_id_func = self.concur_id_func
elif self.concur_id_func:
raise CoverageException(
"Can't support concurrency=%s with %s, only threads are supported" % (
self.concurrency, self.tracer_name(),
)
)
if hasattr(tracer, 'file_tracers'):
tracer.file_tracers = self.file_tracers
if hasattr(tracer, 'threading'):
tracer.threading = self.threading
if hasattr(tracer, 'check_include'):
tracer.check_include = self.check_include
if hasattr(tracer, 'should_start_context'):
tracer.should_start_context = self.should_start_context
tracer.switch_context = self.switch_context
if hasattr(tracer, 'disable_plugin'):
tracer.disable_plugin = self.disable_plugin
fn = tracer.start()
self.tracers.append(tracer)
return fn
# The trace function has to be set individually on each thread before
# execution begins. Ironically, the only support the threading module has
# for running code before the thread main is the tracing function. So we
# install this as a trace function, and the first time it's called, it does
# the real trace installation.
def _installation_trace(self, frame, event, arg):
"""Called on new threads, installs the real tracer."""
# Remove ourselves as the trace function.
sys.settrace(None)
# Install the real tracer.
fn = self._start_tracer()
# Invoke the real trace function with the current event, to be sure
# not to lose an event.
if fn:
fn = fn(frame, event, arg)
# Return the new trace function to continue tracing in this scope.
return fn
def start(self):
"""Start collecting trace information."""
if self._collectors:
self._collectors[-1].pause()
self.tracers = []
# Check to see whether we had a fullcoverage tracer installed. If so,
# get the stack frames it stashed away for us.
traces0 = []
fn0 = sys.gettrace()
if fn0:
tracer0 = getattr(fn0, '__self__', None)
if tracer0:
traces0 = getattr(tracer0, 'traces', [])
try:
# Install the tracer on this thread.
fn = self._start_tracer()
except:
if self._collectors:
self._collectors[-1].resume()
raise
# If _start_tracer succeeded, then we add ourselves to the global
# stack of collectors.
self._collectors.append(self)
# Replay all the events from fullcoverage into the new trace function.
for args in traces0:
(frame, event, arg), lineno = args
try:
fn(frame, event, arg, lineno=lineno)
except TypeError:
raise Exception("fullcoverage must be run with the C trace function.")
# Install our installation tracer in threading, to jump-start other
# threads.
if self.threading:
self.threading.settrace(self._installation_trace)
def stop(self):
"""Stop collecting trace information."""
assert self._collectors
if self._collectors[-1] is not self:
print("self._collectors:")
for c in self._collectors:
print(" {!r}\n{}".format(c, c.origin))
assert self._collectors[-1] is self, (
"Expected current collector to be %r, but it's %r" % (self, self._collectors[-1])
)
self.pause()
# Remove this Collector from the stack, and resume the one underneath
# (if any).
self._collectors.pop()
if self._collectors:
self._collectors[-1].resume()
def pause(self):
"""Pause tracing, but be prepared to `resume`."""
for tracer in self.tracers:
tracer.stop()
stats = tracer.get_stats()
if stats:
print("\nCoverage.py tracer stats:")
for k in sorted(stats.keys()):
print("%20s: %s" % (k, stats[k]))
if self.threading:
self.threading.settrace(None)
def resume(self):
"""Resume tracing after a `pause`."""
for tracer in self.tracers:
tracer.start()
if self.threading:
self.threading.settrace(self._installation_trace)
else:
self._start_tracer()
def _activity(self):
"""Has any activity been traced?
Returns a boolean, True if any trace function was invoked.
"""
return any(tracer.activity() for tracer in self.tracers)
def switch_context(self, new_context):
"""Switch to a new dynamic context."""
self.flush_data()
if self.static_context:
context = self.static_context
if new_context:
context += "|" + new_context
else:
context = new_context
self.covdata.set_context(context)
def disable_plugin(self, disposition):
"""Disable the plugin mentioned in `disposition`."""
file_tracer = disposition.file_tracer
plugin = file_tracer._coverage_plugin
plugin_name = plugin._coverage_plugin_name
self.warn("Disabling plug-in {!r} due to previous exception".format(plugin_name))
plugin._coverage_enabled = False
disposition.trace = False
def cached_mapped_file(self, filename):
"""A locally cached version of file names mapped through file_mapper."""
key = (type(filename), filename)
try:
return self.mapped_file_cache[key]
except KeyError:
return self.mapped_file_cache.setdefault(key, self.file_mapper(filename))
def mapped_file_dict(self, d):
"""Return a dict like d, but with keys modified by file_mapper."""
# The call to litems() ensures that the GIL protects the dictionary
# iterator against concurrent modifications by tracers running
# in other threads. We try three times in case of concurrent
# access, hoping to get a clean copy.
runtime_err = None
for _ in range(3):
try:
items = litems(d)
except RuntimeError as ex:
runtime_err = ex
else:
break
else:
raise runtime_err
return dict((self.cached_mapped_file(k), v) for k, v in items if v)
def plugin_was_disabled(self, plugin):
"""Record that `plugin` was disabled during the run."""
self.disabled_plugins.add(plugin._coverage_plugin_name)
def flush_data(self):
"""Save the collected data to our associated `CoverageData`.
Data may have also been saved along the way. This forces the
last of the data to be saved.
Returns True if there was data to save, False if not.
"""
if not self._activity():
return False
if self.branch:
self.covdata.add_arcs(self.mapped_file_dict(self.data))
else:
self.covdata.add_lines(self.mapped_file_dict(self.data))
file_tracers = {
k: v for k, v in self.file_tracers.items()
if v not in self.disabled_plugins
}
self.covdata.add_file_tracers(self.mapped_file_dict(file_tracers))
self._clear_data()
return True
|
import asyncio
import logging
import async_timeout
from pyflick.authentication import AuthException, SimpleFlickAuth
from pyflick.const import DEFAULT_CLIENT_ID, DEFAULT_CLIENT_SECRET
import voluptuous as vol
from homeassistant import config_entries, exceptions
from homeassistant.const import (
CONF_CLIENT_ID,
CONF_CLIENT_SECRET,
CONF_PASSWORD,
CONF_USERNAME,
)
from homeassistant.helpers import aiohttp_client
from .const import DOMAIN # pylint: disable=unused-import
_LOGGER = logging.getLogger(__name__)
DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_USERNAME): str,
vol.Required(CONF_PASSWORD): str,
vol.Optional(CONF_CLIENT_ID): str,
vol.Optional(CONF_CLIENT_SECRET): str,
}
)
class FlickConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Flick config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
async def _validate_input(self, user_input):
auth = SimpleFlickAuth(
username=user_input[CONF_USERNAME],
password=user_input[CONF_PASSWORD],
websession=aiohttp_client.async_get_clientsession(self.hass),
client_id=user_input.get(CONF_CLIENT_ID, DEFAULT_CLIENT_ID),
client_secret=user_input.get(CONF_CLIENT_SECRET, DEFAULT_CLIENT_SECRET),
)
try:
with async_timeout.timeout(60):
token = await auth.async_get_access_token()
except asyncio.TimeoutError as err:
raise CannotConnect() from err
except AuthException as err:
raise InvalidAuth() from err
else:
return token is not None
async def async_step_user(self, user_input=None):
"""Handle gathering login info."""
errors = {}
if user_input is not None:
try:
await self._validate_input(user_input)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
await self.async_set_unique_id(
f"flick_electric_{user_input[CONF_USERNAME]}"
)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=f"Flick Electric: {user_input[CONF_USERNAME]}",
data=user_input,
)
return self.async_show_form(
step_id="user", data_schema=DATA_SCHEMA, errors=errors
)
class CannotConnect(exceptions.HomeAssistantError):
"""Error to indicate we cannot connect."""
class InvalidAuth(exceptions.HomeAssistantError):
"""Error to indicate there is invalid auth."""
|
import json
from django.conf import settings
import weblate
from weblate.machinery.base import (
MachineTranslation,
MachineTranslationError,
MissingConfiguration,
)
class ModernMTTranslation(MachineTranslation):
"""ModernMT machine translation support."""
name = "ModernMT"
max_score = 90
def __init__(self):
"""Check configuration."""
super().__init__()
if settings.MT_MODERNMT_KEY is None:
raise MissingConfiguration("ModernMT requires API key")
def get_authentication(self):
"""Hook for backends to allow add authentication headers to request."""
return {
"MMT-ApiKey": settings.MT_MODERNMT_KEY,
"MMT-Platform": "Weblate",
"MMT-PlatformVersion": weblate.VERSION,
}
def is_supported(self, source, language):
"""Check whether given language combination is supported."""
return (source, language) in self.supported_languages
def download_languages(self):
"""List of supported languages."""
response = self.request("get", settings.MT_MODERNMT_URL + "languages")
payload = response.json()
for source, targets in payload["data"].items():
yield from ((source, target) for target in targets)
def download_translations(
self,
source,
language,
text: str,
unit,
user,
search: bool,
threshold: int = 75,
):
"""Download list of possible translations from a service."""
response = self.request(
"get",
settings.MT_MODERNMT_URL + "translate",
params={"q": text, "source": source, "target": language},
)
payload = response.json()
if "error" in payload:
raise MachineTranslationError(payload["error"]["message"])
yield {
"text": payload["data"]["translation"],
"quality": self.max_score,
"service": self.name,
"source": text,
}
def get_error_message(self, exc):
if hasattr(exc, "read"):
content = exc.read()
try:
data = json.loads(content)
return data["error"]["message"]
except Exception:
pass
return super().get_error_message(exc)
|
from pynzbgetapi import NZBGetAPIException
from homeassistant.components.nzbget.const import DOMAIN
from homeassistant.config_entries import (
ENTRY_STATE_LOADED,
ENTRY_STATE_NOT_LOADED,
ENTRY_STATE_SETUP_RETRY,
)
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT
from homeassistant.setup import async_setup_component
from . import (
ENTRY_CONFIG,
YAML_CONFIG,
_patch_async_setup_entry,
_patch_history,
_patch_status,
_patch_version,
init_integration,
)
from tests.async_mock import patch
from tests.common import MockConfigEntry
async def test_import_from_yaml(hass) -> None:
"""Test import from YAML."""
with _patch_version(), _patch_status(), _patch_history(), _patch_async_setup_entry():
assert await async_setup_component(hass, DOMAIN, {DOMAIN: YAML_CONFIG})
await hass.async_block_till_done()
entries = hass.config_entries.async_entries(DOMAIN)
assert len(entries) == 1
assert entries[0].data[CONF_NAME] == "GetNZBsTest"
assert entries[0].data[CONF_HOST] == "10.10.10.30"
assert entries[0].data[CONF_PORT] == 6789
async def test_unload_entry(hass, nzbget_api):
"""Test successful unload of entry."""
entry = await init_integration(hass)
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
assert entry.state == ENTRY_STATE_LOADED
assert await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
assert entry.state == ENTRY_STATE_NOT_LOADED
assert not hass.data.get(DOMAIN)
async def test_async_setup_raises_entry_not_ready(hass):
"""Test that it throws ConfigEntryNotReady when exception occurs during setup."""
config_entry = MockConfigEntry(domain=DOMAIN, data=ENTRY_CONFIG)
config_entry.add_to_hass(hass)
with _patch_version(), patch(
"homeassistant.components.nzbget.coordinator.NZBGetAPI.status",
side_effect=NZBGetAPIException(),
):
await hass.config_entries.async_setup(config_entry.entry_id)
assert config_entry.state == ENTRY_STATE_SETUP_RETRY
|
import pytest
def pytest_addoption(parser):
"""Add command-line flags for pytest."""
parser.addoption("--run-flaky", action="store_true", help="runs flaky tests")
parser.addoption(
"--run-network-tests",
action="store_true",
help="runs tests requiring a network connection",
)
def pytest_runtest_setup(item):
# based on https://stackoverflow.com/questions/47559524
if "flaky" in item.keywords and not item.config.getoption("--run-flaky"):
pytest.skip("set --run-flaky option to run flaky tests")
if "network" in item.keywords and not item.config.getoption("--run-network-tests"):
pytest.skip(
"set --run-network-tests to run test requiring an internet connection"
)
@pytest.fixture(autouse=True)
def add_standard_imports(doctest_namespace, tmpdir):
import numpy as np
import pandas as pd
import xarray as xr
doctest_namespace["np"] = np
doctest_namespace["pd"] = pd
doctest_namespace["xr"] = xr
# always seed numpy.random to make the examples deterministic
np.random.seed(0)
# always switch to the temporary directory, so files get written there
tmpdir.chdir()
|
from ... import event
from . import Widget
class ImageWidget(Widget):
""" Display an image from a url.
The ``node`` of this widget is an
`<img> <https://developer.mozilla.org/docs/Web/HTML/Element/img>`_
wrapped in a `<div> <https://developer.mozilla.org/docs/Web/HTML/Element/div>`_
(the ``outernode``) to handle sizing.
"""
DEFAULT_MIN_SIZE = 16, 16
_sequence = 0
source = event.StringProp('', settable=True, doc="""
The source of the image, This can be anything that an HTML
img element supports.
""")
stretch = event.BoolProp(False, settable=True, doc="""
Whether the image should stretch to fill all available
space, or maintain its aspect ratio (default).
""")
def _create_dom(self):
global window
outer = window.document.createElement('div')
inner = window.document.createElement('img')
outer.appendChild(inner)
return outer, inner
@event.reaction
def __resize_image(self):
size = self.size
if self.stretch:
self.node.style.maxWidth = None
self.node.style.maxHeight = None
self.node.style.width = size[0] + 'px'
self.node.style.height = size[1] + 'px'
else:
self.node.style.maxWidth = size[0] + 'px'
self.node.style.maxHeight = size[1] + 'px'
self.node.style.width = None
self.node.style.height = None
@event.reaction
def __source_changed(self):
self.node.src = self.source
class VideoWidget(Widget):
""" Display a video from a url.
The ``node`` of this widget is a
`<video> <https://developer.mozilla.org/docs/Web/HTML/Element/video>`_.
"""
DEFAULT_MIN_SIZE = 100, 100
source = event.StringProp('', settable=True, doc="""
The source of the video. This must be a url of a resource
on the web.
""")
def _create_dom(self):
global window
node = window.document.createElement('video')
node.controls = 'controls'
node.textContent = 'Your browser does not support HTML5 video.'
self.src_node = window.document.createElement('source')
self.src_node.type = 'video/mp4'
self.src_node.src = None
node.appendChild(self.src_node)
return node
def _render_dom(self):
return None
@event.reaction
def __source_changed(self):
self.src_node.src = self.source or None
self.node.load()
class YoutubeWidget(Widget):
""" Display a Youtube video.
The ``node`` of this widget is a
`<div> <https://developer.mozilla.org/docs/Web/HTML/Element/div>`_
containing an
`<iframe> <https://developer.mozilla.org/docs/Web/HTML/Element/iframe>`_
that loads the youtube page.
"""
DEFAULT_MIN_SIZE = 100, 100
source = event.StringProp('oHg5SJYRHA0', settable=True, doc="""
The source of the video represented as the Youtube id.
""")
def _create_dom(self):
global window
node = window.document.createElement('div')
self.inode = window.document.createElement('iframe')
node.appendChild(self.inode)
return node
@event.reaction
def _update_canvas_size(self, *events):
size = self.size
if size[0] or size[1]:
self.inode.style.width = size[0] + 'px'
self.inode.style.height = size[1] + 'px'
@event.reaction
def __source_changed(self, *events):
base_url = 'http://www.youtube.com/embed/'
self.inode.src = base_url + self.source + '?autoplay=0'
# todo: SVG? Icon?
|
import asyncio
import logging
# pylint: disable=import-error
from haffmpeg.tools import IMAGE_JPEG, ImageFrame
from pyezviz.camera import EzvizCamera
from pyezviz.client import EzvizClient, PyEzvizError
import voluptuous as vol
from homeassistant.components.camera import PLATFORM_SCHEMA, SUPPORT_STREAM, Camera
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.helpers import config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_CAMERAS = "cameras"
DEFAULT_CAMERA_USERNAME = "admin"
DEFAULT_RTSP_PORT = "554"
DATA_FFMPEG = "ffmpeg"
EZVIZ_DATA = "ezviz"
ENTITIES = "entities"
CAMERA_SCHEMA = vol.Schema(
{vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string}
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_CAMERAS, default={}): {cv.string: CAMERA_SCHEMA},
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Ezviz IP Cameras."""
conf_cameras = config[CONF_CAMERAS]
account = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
try:
ezviz_client = EzvizClient(account, password)
ezviz_client.login()
cameras = ezviz_client.load_cameras()
except PyEzvizError as exp:
_LOGGER.error(exp)
return
# now, let's build the HASS devices
camera_entities = []
# Add the cameras as devices in HASS
for camera in cameras:
camera_username = DEFAULT_CAMERA_USERNAME
camera_password = ""
camera_rtsp_stream = ""
camera_serial = camera["serial"]
# There seem to be a bug related to localRtspPort in Ezviz API...
local_rtsp_port = DEFAULT_RTSP_PORT
if camera["local_rtsp_port"] and camera["local_rtsp_port"] != 0:
local_rtsp_port = camera["local_rtsp_port"]
if camera_serial in conf_cameras:
camera_username = conf_cameras[camera_serial][CONF_USERNAME]
camera_password = conf_cameras[camera_serial][CONF_PASSWORD]
camera_rtsp_stream = f"rtsp://{camera_username}:{camera_password}@{camera['local_ip']}:{local_rtsp_port}"
_LOGGER.debug(
"Camera %s source stream: %s", camera["serial"], camera_rtsp_stream
)
else:
_LOGGER.info(
"Found camera with serial %s without configuration. Add it to configuration.yaml to see the camera stream",
camera_serial,
)
camera["username"] = camera_username
camera["password"] = camera_password
camera["rtsp_stream"] = camera_rtsp_stream
camera["ezviz_camera"] = EzvizCamera(ezviz_client, camera_serial)
camera_entities.append(HassEzvizCamera(**camera))
add_entities(camera_entities)
class HassEzvizCamera(Camera):
"""An implementation of a Foscam IP camera."""
def __init__(self, **data):
"""Initialize an Ezviz camera."""
super().__init__()
self._username = data["username"]
self._password = data["password"]
self._rtsp_stream = data["rtsp_stream"]
self._ezviz_camera = data["ezviz_camera"]
self._serial = data["serial"]
self._name = data["name"]
self._status = data["status"]
self._privacy = data["privacy"]
self._audio = data["audio"]
self._ir_led = data["ir_led"]
self._state_led = data["state_led"]
self._follow_move = data["follow_move"]
self._alarm_notify = data["alarm_notify"]
self._alarm_sound_mod = data["alarm_sound_mod"]
self._encrypted = data["encrypted"]
self._local_ip = data["local_ip"]
self._detection_sensibility = data["detection_sensibility"]
self._device_sub_category = data["device_sub_category"]
self._local_rtsp_port = data["local_rtsp_port"]
self._ffmpeg = None
def update(self):
"""Update the camera states."""
data = self._ezviz_camera.status()
self._name = data["name"]
self._status = data["status"]
self._privacy = data["privacy"]
self._audio = data["audio"]
self._ir_led = data["ir_led"]
self._state_led = data["state_led"]
self._follow_move = data["follow_move"]
self._alarm_notify = data["alarm_notify"]
self._alarm_sound_mod = data["alarm_sound_mod"]
self._encrypted = data["encrypted"]
self._local_ip = data["local_ip"]
self._detection_sensibility = data["detection_sensibility"]
self._device_sub_category = data["device_sub_category"]
self._local_rtsp_port = data["local_rtsp_port"]
async def async_added_to_hass(self):
"""Subscribe to ffmpeg and add camera to list."""
self._ffmpeg = self.hass.data[DATA_FFMPEG]
@property
def should_poll(self) -> bool:
"""Return True if entity has to be polled for state.
False if entity pushes its state to HA.
"""
return True
@property
def device_state_attributes(self):
"""Return the Ezviz-specific camera state attributes."""
return {
# if privacy == true, the device closed the lid or did a 180° tilt
"privacy": self._privacy,
# is the camera listening ?
"audio": self._audio,
# infrared led on ?
"ir_led": self._ir_led,
# state led on ?
"state_led": self._state_led,
# if true, the camera will move automatically to follow movements
"follow_move": self._follow_move,
# if true, if some movement is detected, the app is notified
"alarm_notify": self._alarm_notify,
# if true, if some movement is detected, the camera makes some sound
"alarm_sound_mod": self._alarm_sound_mod,
# are the camera's stored videos/images encrypted?
"encrypted": self._encrypted,
# camera's local ip on local network
"local_ip": self._local_ip,
# from 1 to 9, the higher is the sensibility, the more it will detect small movements
"detection_sensibility": self._detection_sensibility,
}
@property
def available(self):
"""Return True if entity is available."""
return self._status
@property
def brand(self):
"""Return the camera brand."""
return "Ezviz"
@property
def supported_features(self):
"""Return supported features."""
if self._rtsp_stream:
return SUPPORT_STREAM
return 0
@property
def model(self):
"""Return the camera model."""
return self._device_sub_category
@property
def is_on(self):
"""Return true if on."""
return self._status
@property
def name(self):
"""Return the name of this camera."""
return self._name
async def async_camera_image(self):
"""Return a frame from the camera stream."""
ffmpeg = ImageFrame(self._ffmpeg.binary, loop=self.hass.loop)
image = await asyncio.shield(
ffmpeg.get_image(self._rtsp_stream, output_format=IMAGE_JPEG)
)
return image
async def stream_source(self):
"""Return the stream source."""
if self._local_rtsp_port:
rtsp_stream_source = (
f"rtsp://{self._username}:{self._password}@"
f"{self._local_ip}:{self._local_rtsp_port}"
)
_LOGGER.debug(
"Camera %s source stream: %s", self._serial, rtsp_stream_source
)
self._rtsp_stream = rtsp_stream_source
return rtsp_stream_source
return None
|
import logging
from pylutron_caseta.smartbridge import Smartbridge
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_HOST
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from .const import CONF_CA_CERTS, CONF_CERTFILE, CONF_KEYFILE
_LOGGER = logging.getLogger(__name__)
DOMAIN = "lutron_caseta"
DATA_BRIDGE_CONFIG = "lutron_caseta_bridges"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.All(
cv.ensure_list,
[
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_KEYFILE): cv.string,
vol.Required(CONF_CERTFILE): cv.string,
vol.Required(CONF_CA_CERTS): cv.string,
}
],
)
},
extra=vol.ALLOW_EXTRA,
)
LUTRON_CASETA_COMPONENTS = ["light", "switch", "cover", "scene", "fan", "binary_sensor"]
async def async_setup(hass, base_config):
"""Set up the Lutron component."""
bridge_configs = base_config[DOMAIN]
hass.data.setdefault(DOMAIN, {})
for config in bridge_configs:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
# extract the config keys one-by-one just to be explicit
data={
CONF_HOST: config[CONF_HOST],
CONF_KEYFILE: config[CONF_KEYFILE],
CONF_CERTFILE: config[CONF_CERTFILE],
CONF_CA_CERTS: config[CONF_CA_CERTS],
},
)
)
return True
async def async_setup_entry(hass, config_entry):
"""Set up a bridge from a config entry."""
host = config_entry.data[CONF_HOST]
keyfile = hass.config.path(config_entry.data[CONF_KEYFILE])
certfile = hass.config.path(config_entry.data[CONF_CERTFILE])
ca_certs = hass.config.path(config_entry.data[CONF_CA_CERTS])
bridge = Smartbridge.create_tls(
hostname=host, keyfile=keyfile, certfile=certfile, ca_certs=ca_certs
)
await bridge.connect()
if not bridge.is_connected():
_LOGGER.error("Unable to connect to Lutron Caseta bridge at %s", host)
return False
_LOGGER.debug("Connected to Lutron Caseta bridge at %s", host)
# Store this bridge (keyed by entry_id) so it can be retrieved by the
# components we're setting up.
hass.data[DOMAIN][config_entry.entry_id] = bridge
for component in LUTRON_CASETA_COMPONENTS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, component)
)
return True
class LutronCasetaDevice(Entity):
"""Common base class for all Lutron Caseta devices."""
def __init__(self, device, bridge):
"""Set up the base class.
[:param]device the device metadata
[:param]bridge the smartbridge object
"""
self._device = device
self._smartbridge = bridge
async def async_added_to_hass(self):
"""Register callbacks."""
self._smartbridge.add_subscriber(self.device_id, self.async_write_ha_state)
@property
def device_id(self):
"""Return the device ID used for calling pylutron_caseta."""
return self._device["device_id"]
@property
def name(self):
"""Return the name of the device."""
return self._device["name"]
@property
def serial(self):
"""Return the serial number of the device."""
return self._device["serial"]
@property
def unique_id(self):
"""Return the unique ID of the device (serial)."""
return str(self.serial)
@property
def device_info(self):
"""Return the device info."""
return {
"identifiers": {(DOMAIN, self.serial)},
"name": self.name,
"manufacturer": "Lutron",
"model": self._device["model"],
}
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {"device_id": self.device_id, "zone_id": self._device["zone"]}
@property
def should_poll(self):
"""No polling needed."""
return False
|
import os
import pytest
from molecule import config
from molecule.driver import vagrant
@pytest.fixture
def _driver_section_data():
return {
'driver': {
'name': 'vagrant',
'provider': {
'name': 'virtualbox',
},
'options': {},
'ssh_connection_options': ['-o foo=bar'],
'safe_files': ['foo'],
}
}
# NOTE(retr0h): The use of the `patched_config_validate` fixture, disables
# config.Config._validate from executing. Thus preventing odd side-effects
# throughout patched.assert_called unit tests.
@pytest.fixture
def _instance(patched_config_validate, config_instance):
return vagrant.Vagrant(config_instance)
def test_config_private_member(_instance):
assert isinstance(_instance._config, config.Config)
def test_testinfra_options_property(_instance):
x = {
'connection': 'ansible',
'ansible-inventory': _instance._config.provisioner.inventory_file
}
assert x == _instance.testinfra_options
def test_name_property(_instance):
assert 'vagrant' == _instance.name
def test_options_property(_instance):
x = {'managed': True}
assert x == _instance.options
@pytest.mark.parametrize(
'config_instance', ['_driver_section_data'], indirect=True)
def test_login_cmd_template_property(_instance):
x = 'ssh {address} -l {user} -p {port} -i {identity_file} -o foo=bar'
assert x == _instance.login_cmd_template
@pytest.mark.parametrize(
'config_instance', ['_driver_section_data'], indirect=True)
def test_safe_files_property(_instance):
x = [
os.path.join(_instance._config.scenario.ephemeral_directory,
'Vagrantfile'),
os.path.join(_instance._config.scenario.ephemeral_directory,
'vagrant.yml'),
os.path.join(_instance._config.scenario.ephemeral_directory,
'instance_config.yml'),
os.path.join(_instance._config.scenario.ephemeral_directory,
'.vagrant'),
os.path.join(_instance._config.scenario.ephemeral_directory,
'vagrant-*.out'),
os.path.join(_instance._config.scenario.ephemeral_directory,
'vagrant-*.err'),
'foo',
]
assert x == _instance.safe_files
def test_default_safe_files_property(_instance):
x = [
os.path.join(_instance._config.scenario.ephemeral_directory,
'Vagrantfile'),
os.path.join(_instance._config.scenario.ephemeral_directory,
'vagrant.yml'),
os.path.join(_instance._config.scenario.ephemeral_directory,
'instance_config.yml'),
os.path.join(_instance._config.scenario.ephemeral_directory,
'.vagrant'),
os.path.join(_instance._config.scenario.ephemeral_directory,
'vagrant-*.out'),
os.path.join(_instance._config.scenario.ephemeral_directory,
'vagrant-*.err'),
]
assert x == _instance.default_safe_files
def test_delegated_property(_instance):
assert not _instance.delegated
def test_managed_property(_instance):
assert _instance.managed
def test_default_ssh_connection_options_property(_instance):
x = [
'-o UserKnownHostsFile=/dev/null',
'-o ControlMaster=auto',
'-o ControlPersist=60s',
'-o IdentitiesOnly=yes',
'-o StrictHostKeyChecking=no',
]
assert x == _instance.default_ssh_connection_options
def test_login_options(mocker, _instance):
m = mocker.patch('molecule.driver.vagrant.Vagrant._get_instance_config')
m.return_value = {
'instance': 'foo',
'address': '127.0.0.1',
'user': 'vagrant',
'port': 2222,
'identity_file': '/foo/bar',
}
x = {
'instance': 'foo',
'address': '127.0.0.1',
'user': 'vagrant',
'port': 2222,
'identity_file': '/foo/bar'
}
assert x == _instance.login_options('foo')
@pytest.mark.parametrize(
'config_instance', ['_driver_section_data'], indirect=True)
def test_ansible_connection_options(mocker, _instance):
m = mocker.patch('molecule.driver.vagrant.Vagrant._get_instance_config')
m.return_value = {
'instance': 'foo',
'address': '127.0.0.1',
'user': 'vagrant',
'port': 2222,
'identity_file': '/foo/bar',
}
x = {
'ansible_host': '127.0.0.1',
'ansible_port': 2222,
'ansible_user': 'vagrant',
'ansible_private_key_file': '/foo/bar',
'connection': 'ssh',
'ansible_ssh_common_args': '-o foo=bar',
}
assert x == _instance.ansible_connection_options('foo')
def test_ansible_connection_options_handles_missing_instance_config(
mocker, _instance):
m = mocker.patch('molecule.util.safe_load_file')
m.side_effect = IOError
assert {} == _instance.ansible_connection_options('foo')
def test_ansible_connection_options_handles_missing_results_key(
mocker, _instance):
m = mocker.patch('molecule.util.safe_load_file')
m.side_effect = StopIteration
assert {} == _instance.ansible_connection_options('foo')
def test_vagrantfile_property(_instance):
x = os.path.join(_instance._config.scenario.ephemeral_directory,
'Vagrantfile')
assert x == _instance.vagrantfile
def test_vagrantfile_config_property(_instance):
x = os.path.join(_instance._config.scenario.ephemeral_directory,
'vagrant.yml')
assert x == _instance.vagrantfile_config
def test_instance_config_property(_instance):
x = os.path.join(_instance._config.scenario.ephemeral_directory,
'instance_config.yml')
assert x == _instance.instance_config
@pytest.mark.parametrize(
'config_instance', ['_driver_section_data'], indirect=True)
def test_ssh_connection_options_property(_instance):
x = ['-o foo=bar']
assert x == _instance.ssh_connection_options
def test_status(mocker, _instance):
result = _instance.status()
assert 2 == len(result)
assert result[0].instance_name == 'instance-1'
assert result[0].driver_name == 'vagrant'
assert result[0].provisioner_name == 'ansible'
assert result[0].scenario_name == 'default'
assert result[0].created == 'false'
assert result[0].converged == 'false'
assert result[1].instance_name == 'instance-2'
assert result[1].driver_name == 'vagrant'
assert result[1].provisioner_name == 'ansible'
assert result[1].scenario_name == 'default'
assert result[1].created == 'false'
assert result[1].converged == 'false'
def test_get_instance_config(mocker, _instance):
m = mocker.patch('molecule.util.safe_load_file')
m.return_value = [{
'instance': 'foo',
}, {
'instance': 'bar',
}]
x = {
'instance': 'foo',
}
assert x == _instance._get_instance_config('foo')
def test_created(_instance):
assert 'false' == _instance._created()
def test_converged(_instance):
assert 'false' == _instance._converged()
|
import logging
from contextlib import contextmanager
from lark import Lark, logger
from unittest import TestCase, main
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
@contextmanager
def capture_log():
stream = StringIO()
orig_handler = logger.handlers[0]
del logger.handlers[:]
logger.addHandler(logging.StreamHandler(stream))
yield stream
del logger.handlers[:]
logger.addHandler(orig_handler)
class Testlogger(TestCase):
def test_debug(self):
logger.setLevel(logging.DEBUG)
collision_grammar = '''
start: as as
as: a*
a: "a"
'''
with capture_log() as log:
Lark(collision_grammar, parser='lalr', debug=True)
log = log.getvalue()
# since there are conflicts about A
# symbol A should appear in the log message for hint
self.assertIn("A", log)
def test_non_debug(self):
logger.setLevel(logging.DEBUG)
collision_grammar = '''
start: as as
as: a*
a: "a"
'''
with capture_log() as log:
Lark(collision_grammar, parser='lalr', debug=False)
log = log.getvalue()
# no log messge
self.assertEqual(len(log), 0)
def test_loglevel_higher(self):
logger.setLevel(logging.ERROR)
collision_grammar = '''
start: as as
as: a*
a: "a"
'''
with capture_log() as log:
Lark(collision_grammar, parser='lalr', debug=True)
log = log.getvalue()
# no log messge
self.assertEqual(len(log), 0)
if __name__ == '__main__':
main()
|
import chainer
import chainer.functions as F
import chainer.links as L
class SEBlock(chainer.Chain):
"""A squeeze-and-excitation block.
This block is part of squeeze-and-excitation networks. Channel-wise
multiplication weights are inferred from and applied to input feature map.
Please refer to `the original paper
<https://arxiv.org/pdf/1709.01507.pdf>`_ for more details.
.. seealso::
:class:`chainercv.links.model.senet.SEResNet`
Args:
n_channel (int): The number of channels of the input and output array.
ratio (int): Reduction ratio of :obj:`n_channel` to the number of
hidden layer units.
"""
def __init__(self, n_channel, ratio=16):
super(SEBlock, self).__init__()
reduction_size = n_channel // ratio
with self.init_scope():
self.down = L.Linear(n_channel, reduction_size)
self.up = L.Linear(reduction_size, n_channel)
def forward(self, u):
B, C, H, W = u.shape
z = F.average(u, axis=(2, 3))
x = F.relu(self.down(z))
x = F.sigmoid(self.up(x))
x = F.reshape(x, x.shape[:2] + (1, 1))
# Spatial axes of `x` will be broadcasted.
return u * x
|
from scrapy.utils.misc import load_object
from scrapy.utils.serialize import ScrapyJSONEncoder
from twisted.internet.threads import deferToThread
from . import connection, defaults
default_serialize = ScrapyJSONEncoder().encode
class RedisPipeline(object):
"""Pushes serialized item into a redis list/queue
Settings
--------
REDIS_ITEMS_KEY : str
Redis key where to store items.
REDIS_ITEMS_SERIALIZER : str
Object path to serializer function.
"""
def __init__(self, server,
key=defaults.PIPELINE_KEY,
serialize_func=default_serialize):
"""Initialize pipeline.
Parameters
----------
server : StrictRedis
Redis client instance.
key : str
Redis key where to store items.
serialize_func : callable
Items serializer function.
"""
self.server = server
self.key = key
self.serialize = serialize_func
@classmethod
def from_settings(cls, settings):
params = {
'server': connection.from_settings(settings),
}
if settings.get('REDIS_ITEMS_KEY'):
params['key'] = settings['REDIS_ITEMS_KEY']
if settings.get('REDIS_ITEMS_SERIALIZER'):
params['serialize_func'] = load_object(
settings['REDIS_ITEMS_SERIALIZER']
)
return cls(**params)
@classmethod
def from_crawler(cls, crawler):
return cls.from_settings(crawler.settings)
def process_item(self, item, spider):
return deferToThread(self._process_item, item, spider)
def _process_item(self, item, spider):
key = self.item_key(item, spider)
data = self.serialize(item)
self.server.rpush(key, data)
return item
def item_key(self, item, spider):
"""Returns redis key based on given spider.
Override this function to use a different key depending on the item
and/or spider.
"""
return self.key % {'spider': spider.name}
|
import pytest
from unittest.mock import Mock
from kombu.asynchronous.aws.sqs.message import AsyncMessage
from kombu.asynchronous.aws.sqs.queue import AsyncQueue
from t.mocks import PromiseMock
from ..case import AWSCase
class test_AsyncQueue(AWSCase):
def setup(self):
self.conn = Mock(name='connection')
self.x = AsyncQueue(self.conn, '/url')
self.callback = PromiseMock(name='callback')
def test_message_class(self):
assert issubclass(self.x.message_class, AsyncMessage)
def test_get_attributes(self):
self.x.get_attributes(attributes='QueueSize', callback=self.callback)
self.x.connection.get_queue_attributes.assert_called_with(
self.x, 'QueueSize', self.callback,
)
def test_set_attribute(self):
self.x.set_attribute('key', 'value', callback=self.callback)
self.x.connection.set_queue_attribute.assert_called_with(
self.x, 'key', 'value', self.callback,
)
def test_get_timeout(self):
self.x.get_timeout(callback=self.callback)
self.x.connection.get_queue_attributes.assert_called()
on_ready = self.x.connection.get_queue_attributes.call_args[0][2]
self.x.connection.get_queue_attributes.assert_called_with(
self.x, 'VisibilityTimeout', on_ready,
)
on_ready({'VisibilityTimeout': '303'})
self.callback.assert_called_with(303)
def test_set_timeout(self):
self.x.set_timeout(808, callback=self.callback)
self.x.connection.set_queue_attribute.assert_called()
on_ready = self.x.connection.set_queue_attribute.call_args[0][3]
self.x.connection.set_queue_attribute.assert_called_with(
self.x, 'VisibilityTimeout', 808, on_ready,
)
on_ready(808)
self.callback.assert_called_with(808)
assert self.x.visibility_timeout == 808
on_ready(None)
assert self.x.visibility_timeout == 808
def test_add_permission(self):
self.x.add_permission(
'label', 'accid', 'action', callback=self.callback,
)
self.x.connection.add_permission.assert_called_with(
self.x, 'label', 'accid', 'action', self.callback,
)
def test_remove_permission(self):
self.x.remove_permission('label', callback=self.callback)
self.x.connection.remove_permission.assert_called_with(
self.x, 'label', self.callback,
)
def test_read(self):
self.x.read(visibility_timeout=909, callback=self.callback)
self.x.connection.receive_message.assert_called()
on_ready = self.x.connection.receive_message.call_args[1]['callback']
self.x.connection.receive_message.assert_called_with(
self.x, number_messages=1, visibility_timeout=909,
attributes=None, wait_time_seconds=None, callback=on_ready,
)
messages = [Mock(name='message1')]
on_ready(messages)
self.callback.assert_called_with(messages[0])
def MockMessage(self, id, md5):
m = Mock(name=f'Message-{id}')
m.id = id
m.md5 = md5
return m
def test_write(self):
message = self.MockMessage('id1', 'digest1')
self.x.write(message, delay_seconds=303, callback=self.callback)
self.x.connection.send_message.assert_called()
on_ready = self.x.connection.send_message.call_args[1]['callback']
self.x.connection.send_message.assert_called_with(
self.x, message.get_body_encoded(), 303,
callback=on_ready,
)
new_message = self.MockMessage('id2', 'digest2')
on_ready(new_message)
assert message.id == 'id2'
assert message.md5 == 'digest2'
def test_write_batch(self):
messages = [('id1', 'A', 0), ('id2', 'B', 303)]
self.x.write_batch(messages, callback=self.callback)
self.x.connection.send_message_batch.assert_called_with(
self.x, messages, callback=self.callback,
)
def test_delete_message(self):
message = self.MockMessage('id1', 'digest1')
self.x.delete_message(message, callback=self.callback)
self.x.connection.delete_message.assert_called_with(
self.x, message, self.callback,
)
def test_delete_message_batch(self):
messages = [
self.MockMessage('id1', 'r1'),
self.MockMessage('id2', 'r2'),
]
self.x.delete_message_batch(messages, callback=self.callback)
self.x.connection.delete_message_batch.assert_called_with(
self.x, messages, callback=self.callback,
)
def test_change_message_visibility_batch(self):
messages = [
(self.MockMessage('id1', 'r1'), 303),
(self.MockMessage('id2', 'r2'), 909),
]
self.x.change_message_visibility_batch(
messages, callback=self.callback,
)
self.x.connection.change_message_visibility_batch.assert_called_with(
self.x, messages, callback=self.callback,
)
def test_delete(self):
self.x.delete(callback=self.callback)
self.x.connection.delete_queue.assert_called_with(
self.x, callback=self.callback,
)
def test_count(self):
self.x.count(callback=self.callback)
self.x.connection.get_queue_attributes.assert_called()
on_ready = self.x.connection.get_queue_attributes.call_args[0][2]
self.x.connection.get_queue_attributes.assert_called_with(
self.x, 'ApproximateNumberOfMessages', on_ready,
)
on_ready({'ApproximateNumberOfMessages': '909'})
self.callback.assert_called_with(909)
def test_interface__count_slow(self):
with pytest.raises(NotImplementedError):
self.x.count_slow()
def test_interface__dump(self):
with pytest.raises(NotImplementedError):
self.x.dump()
def test_interface__save_to_file(self):
with pytest.raises(NotImplementedError):
self.x.save_to_file()
def test_interface__save_to_filename(self):
with pytest.raises(NotImplementedError):
self.x.save_to_filename()
def test_interface__save(self):
with pytest.raises(NotImplementedError):
self.x.save()
def test_interface__save_to_s3(self):
with pytest.raises(NotImplementedError):
self.x.save_to_s3()
def test_interface__load_from_s3(self):
with pytest.raises(NotImplementedError):
self.x.load_from_s3()
def test_interface__load_from_file(self):
with pytest.raises(NotImplementedError):
self.x.load_from_file()
def test_interface__load_from_filename(self):
with pytest.raises(NotImplementedError):
self.x.load_from_filename()
def test_interface__load(self):
with pytest.raises(NotImplementedError):
self.x.load()
def test_interface__clear(self):
with pytest.raises(NotImplementedError):
self.x.clear()
|
import logging
from freesms import FreeClient
import voluptuous as vol
from homeassistant.components.notify import PLATFORM_SCHEMA, BaseNotificationService
from homeassistant.const import (
CONF_ACCESS_TOKEN,
CONF_USERNAME,
HTTP_BAD_REQUEST,
HTTP_FORBIDDEN,
HTTP_INTERNAL_SERVER_ERROR,
)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_ACCESS_TOKEN): cv.string}
)
def get_service(hass, config, discovery_info=None):
"""Get the Free Mobile SMS notification service."""
return FreeSMSNotificationService(config[CONF_USERNAME], config[CONF_ACCESS_TOKEN])
class FreeSMSNotificationService(BaseNotificationService):
"""Implement a notification service for the Free Mobile SMS service."""
def __init__(self, username, access_token):
"""Initialize the service."""
self.free_client = FreeClient(username, access_token)
def send_message(self, message="", **kwargs):
"""Send a message to the Free Mobile user cell."""
resp = self.free_client.send_sms(message)
if resp.status_code == HTTP_BAD_REQUEST:
_LOGGER.error("At least one parameter is missing")
elif resp.status_code == 402:
_LOGGER.error("Too much SMS send in a few time")
elif resp.status_code == HTTP_FORBIDDEN:
_LOGGER.error("Wrong Username/Password")
elif resp.status_code == HTTP_INTERNAL_SERVER_ERROR:
_LOGGER.error("Server error, try later")
|
from datetime import datetime as dt, timedelta
import pytest
from homeassistant.components import jewish_calendar
from homeassistant.const import STATE_OFF, STATE_ON
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from . import (
HDATE_DEFAULT_ALTITUDE,
alter_time,
make_jerusalem_test_params,
make_nyc_test_params,
)
from tests.common import async_fire_time_changed
MELACHA_PARAMS = [
make_nyc_test_params(dt(2018, 9, 1, 16, 0), STATE_ON),
make_nyc_test_params(dt(2018, 9, 1, 20, 21), STATE_OFF),
make_nyc_test_params(dt(2018, 9, 7, 13, 1), STATE_OFF),
make_nyc_test_params(dt(2018, 9, 8, 21, 25), STATE_OFF),
make_nyc_test_params(dt(2018, 9, 9, 21, 25), STATE_ON),
make_nyc_test_params(dt(2018, 9, 10, 21, 25), STATE_ON),
make_nyc_test_params(dt(2018, 9, 28, 21, 25), STATE_ON),
make_nyc_test_params(dt(2018, 9, 29, 21, 25), STATE_OFF),
make_nyc_test_params(dt(2018, 9, 30, 21, 25), STATE_ON),
make_nyc_test_params(dt(2018, 10, 1, 21, 25), STATE_ON),
make_jerusalem_test_params(dt(2018, 9, 29, 21, 25), STATE_OFF),
make_jerusalem_test_params(dt(2018, 9, 30, 21, 25), STATE_ON),
make_jerusalem_test_params(dt(2018, 10, 1, 21, 25), STATE_OFF),
]
MELACHA_TEST_IDS = [
"currently_first_shabbat",
"after_first_shabbat",
"friday_upcoming_shabbat",
"upcoming_rosh_hashana",
"currently_rosh_hashana",
"second_day_rosh_hashana",
"currently_shabbat_chol_hamoed",
"upcoming_two_day_yomtov_in_diaspora",
"currently_first_day_of_two_day_yomtov_in_diaspora",
"currently_second_day_of_two_day_yomtov_in_diaspora",
"upcoming_one_day_yom_tov_in_israel",
"currently_one_day_yom_tov_in_israel",
"after_one_day_yom_tov_in_israel",
]
@pytest.mark.parametrize(
[
"now",
"candle_lighting",
"havdalah",
"diaspora",
"tzname",
"latitude",
"longitude",
"result",
],
MELACHA_PARAMS,
ids=MELACHA_TEST_IDS,
)
async def test_issur_melacha_sensor(
hass,
legacy_patchable_time,
now,
candle_lighting,
havdalah,
diaspora,
tzname,
latitude,
longitude,
result,
):
"""Test Issur Melacha sensor output."""
time_zone = dt_util.get_time_zone(tzname)
test_time = time_zone.localize(now)
hass.config.time_zone = time_zone
hass.config.latitude = latitude
hass.config.longitude = longitude
registry = await hass.helpers.entity_registry.async_get_registry()
with alter_time(test_time):
assert await async_setup_component(
hass,
jewish_calendar.DOMAIN,
{
"jewish_calendar": {
"name": "test",
"language": "english",
"diaspora": diaspora,
"candle_lighting_minutes_before_sunset": candle_lighting,
"havdalah_minutes_after_sunset": havdalah,
}
},
)
await hass.async_block_till_done()
future = dt_util.utcnow() + timedelta(seconds=30)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
assert (
hass.states.get("binary_sensor.test_issur_melacha_in_effect").state
== result
)
entity = registry.async_get("binary_sensor.test_issur_melacha_in_effect")
target_uid = "_".join(
map(
str,
[
latitude,
longitude,
time_zone,
HDATE_DEFAULT_ALTITUDE,
diaspora,
"english",
candle_lighting,
havdalah,
"issur_melacha_in_effect",
],
)
)
assert entity.unique_id == target_uid
|
from homeassistant.helpers.entity import Entity
from .const import DEFAULT_NAME, DOMAIN
class TadoZoneEntity(Entity):
"""Base implementation for tado device."""
def __init__(self, zone_name, device_info, device_id, zone_id):
"""Initialize an August device."""
super().__init__()
self._device_zone_id = f"{device_id}_{zone_id}"
self._device_info = device_info
self.zone_name = zone_name
@property
def device_info(self):
"""Return the device_info of the device."""
return {
"identifiers": {(DOMAIN, self._device_zone_id)},
"name": self.zone_name,
"manufacturer": DEFAULT_NAME,
"sw_version": self._device_info["currentFwVersion"],
"model": self._device_info["deviceType"],
"via_device": (DOMAIN, self._device_info["serialNo"]),
}
@property
def should_poll(self):
"""Do not poll."""
return False
|
from collections import deque
import datetime
import email
import imaplib
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_DATE,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_USERNAME,
CONF_VALUE_TEMPLATE,
CONTENT_TYPE_TEXT_PLAIN,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
CONF_SERVER = "server"
CONF_SENDERS = "senders"
CONF_FOLDER = "folder"
ATTR_FROM = "from"
ATTR_BODY = "body"
ATTR_SUBJECT = "subject"
DEFAULT_PORT = 993
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_NAME): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_SERVER): cv.string,
vol.Required(CONF_SENDERS): [cv.string],
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_FOLDER, default="INBOX"): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Email sensor platform."""
reader = EmailReader(
config.get(CONF_USERNAME),
config.get(CONF_PASSWORD),
config.get(CONF_SERVER),
config.get(CONF_PORT),
config.get(CONF_FOLDER),
)
value_template = config.get(CONF_VALUE_TEMPLATE)
if value_template is not None:
value_template.hass = hass
sensor = EmailContentSensor(
hass,
reader,
config.get(CONF_NAME) or config.get(CONF_USERNAME),
config.get(CONF_SENDERS),
value_template,
)
if sensor.connected:
add_entities([sensor], True)
else:
return False
class EmailReader:
"""A class to read emails from an IMAP server."""
def __init__(self, user, password, server, port, folder):
"""Initialize the Email Reader."""
self._user = user
self._password = password
self._server = server
self._port = port
self._folder = folder
self._last_id = None
self._unread_ids = deque([])
self.connection = None
def connect(self):
"""Login and setup the connection."""
try:
self.connection = imaplib.IMAP4_SSL(self._server, self._port)
self.connection.login(self._user, self._password)
return True
except imaplib.IMAP4.error:
_LOGGER.error("Failed to login to %s", self._server)
return False
def _fetch_message(self, message_uid):
"""Get an email message from a message id."""
_, message_data = self.connection.uid("fetch", message_uid, "(RFC822)")
if message_data is None:
return None
if message_data[0] is None:
return None
raw_email = message_data[0][1]
email_message = email.message_from_bytes(raw_email)
return email_message
def read_next(self):
"""Read the next email from the email server."""
try:
self.connection.select(self._folder, readonly=True)
if not self._unread_ids:
search = f"SINCE {datetime.date.today():%d-%b-%Y}"
if self._last_id is not None:
search = f"UID {self._last_id}:*"
_, data = self.connection.uid("search", None, search)
self._unread_ids = deque(data[0].split())
while self._unread_ids:
message_uid = self._unread_ids.popleft()
if self._last_id is None or int(message_uid) > self._last_id:
self._last_id = int(message_uid)
return self._fetch_message(message_uid)
return self._fetch_message(str(self._last_id))
except imaplib.IMAP4.error:
_LOGGER.info("Connection to %s lost, attempting to reconnect", self._server)
try:
self.connect()
_LOGGER.info(
"Reconnect to %s succeeded, trying last message", self._server
)
if self._last_id is not None:
return self._fetch_message(str(self._last_id))
except imaplib.IMAP4.error:
_LOGGER.error("Failed to reconnect")
return None
class EmailContentSensor(Entity):
"""Representation of an EMail sensor."""
def __init__(self, hass, email_reader, name, allowed_senders, value_template):
"""Initialize the sensor."""
self.hass = hass
self._email_reader = email_reader
self._name = name
self._allowed_senders = [sender.upper() for sender in allowed_senders]
self._value_template = value_template
self._last_id = None
self._message = None
self._state_attributes = None
self.connected = self._email_reader.connect()
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the current email state."""
return self._message
@property
def device_state_attributes(self):
"""Return other state attributes for the message."""
return self._state_attributes
def render_template(self, email_message):
"""Render the message template."""
variables = {
ATTR_FROM: EmailContentSensor.get_msg_sender(email_message),
ATTR_SUBJECT: EmailContentSensor.get_msg_subject(email_message),
ATTR_DATE: email_message["Date"],
ATTR_BODY: EmailContentSensor.get_msg_text(email_message),
}
return self._value_template.render(variables, parse_result=False)
def sender_allowed(self, email_message):
"""Check if the sender is in the allowed senders list."""
return EmailContentSensor.get_msg_sender(email_message).upper() in (
sender for sender in self._allowed_senders
)
@staticmethod
def get_msg_sender(email_message):
"""Get the parsed message sender from the email."""
return str(email.utils.parseaddr(email_message["From"])[1])
@staticmethod
def get_msg_subject(email_message):
"""Decode the message subject."""
decoded_header = email.header.decode_header(email_message["Subject"])
header = email.header.make_header(decoded_header)
return str(header)
@staticmethod
def get_msg_text(email_message):
"""
Get the message text from the email.
Will look for text/plain or use text/html if not found.
"""
message_text = None
message_html = None
message_untyped_text = None
for part in email_message.walk():
if part.get_content_type() == CONTENT_TYPE_TEXT_PLAIN:
if message_text is None:
message_text = part.get_payload()
elif part.get_content_type() == "text/html":
if message_html is None:
message_html = part.get_payload()
elif part.get_content_type().startswith("text"):
if message_untyped_text is None:
message_untyped_text = part.get_payload()
if message_text is not None:
return message_text
if message_html is not None:
return message_html
if message_untyped_text is not None:
return message_untyped_text
return email_message.get_payload()
def update(self):
"""Read emails and publish state change."""
email_message = self._email_reader.read_next()
if email_message is None:
self._message = None
self._state_attributes = {}
return
if self.sender_allowed(email_message):
message = EmailContentSensor.get_msg_subject(email_message)
if self._value_template is not None:
message = self.render_template(email_message)
self._message = message
self._state_attributes = {
ATTR_FROM: EmailContentSensor.get_msg_sender(email_message),
ATTR_SUBJECT: EmailContentSensor.get_msg_subject(email_message),
ATTR_DATE: email_message["Date"],
ATTR_BODY: EmailContentSensor.get_msg_text(email_message),
}
|
import io
import logging
import os
import subprocess
from PIL import Image
import voluptuous as vol
from homeassistant.components.image_processing import (
CONF_ENTITY_ID,
CONF_NAME,
CONF_SOURCE,
PLATFORM_SCHEMA,
ImageProcessingEntity,
)
from homeassistant.core import split_entity_id
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_DIGITS = "digits"
CONF_EXTRA_ARGUMENTS = "extra_arguments"
CONF_HEIGHT = "height"
CONF_ROTATE = "rotate"
CONF_SSOCR_BIN = "ssocr_bin"
CONF_THRESHOLD = "threshold"
CONF_WIDTH = "width"
CONF_X_POS = "x_position"
CONF_Y_POS = "y_position"
DEFAULT_BINARY = "ssocr"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_EXTRA_ARGUMENTS, default=""): cv.string,
vol.Optional(CONF_DIGITS): cv.positive_int,
vol.Optional(CONF_HEIGHT, default=0): cv.positive_int,
vol.Optional(CONF_SSOCR_BIN, default=DEFAULT_BINARY): cv.string,
vol.Optional(CONF_THRESHOLD, default=0): cv.positive_int,
vol.Optional(CONF_ROTATE, default=0): cv.positive_int,
vol.Optional(CONF_WIDTH, default=0): cv.positive_int,
vol.Optional(CONF_X_POS, default=0): cv.string,
vol.Optional(CONF_Y_POS, default=0): cv.positive_int,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Seven segments OCR platform."""
entities = []
for camera in config[CONF_SOURCE]:
entities.append(
ImageProcessingSsocr(
hass, camera[CONF_ENTITY_ID], config, camera.get(CONF_NAME)
)
)
async_add_entities(entities)
class ImageProcessingSsocr(ImageProcessingEntity):
"""Representation of the seven segments OCR image processing entity."""
def __init__(self, hass, camera_entity, config, name):
"""Initialize seven segments processing."""
self.hass = hass
self._camera_entity = camera_entity
if name:
self._name = name
else:
self._name = "SevenSegment OCR {}".format(split_entity_id(camera_entity)[1])
self._state = None
self.filepath = os.path.join(
self.hass.config.config_dir,
"ssocr-{}.png".format(self._name.replace(" ", "_")),
)
crop = [
"crop",
str(config[CONF_X_POS]),
str(config[CONF_Y_POS]),
str(config[CONF_WIDTH]),
str(config[CONF_HEIGHT]),
]
digits = ["-d", str(config.get(CONF_DIGITS, -1))]
rotate = ["rotate", str(config[CONF_ROTATE])]
threshold = ["-t", str(config[CONF_THRESHOLD])]
extra_arguments = config[CONF_EXTRA_ARGUMENTS].split(" ")
self._command = (
[config[CONF_SSOCR_BIN]]
+ crop
+ digits
+ threshold
+ rotate
+ extra_arguments
)
self._command.append(self.filepath)
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return "ocr"
@property
def camera_entity(self):
"""Return camera entity id from process pictures."""
return self._camera_entity
@property
def name(self):
"""Return the name of the image processor."""
return self._name
@property
def state(self):
"""Return the state of the entity."""
return self._state
def process_image(self, image):
"""Process the image."""
stream = io.BytesIO(image)
img = Image.open(stream)
img.save(self.filepath, "png")
ocr = subprocess.Popen(
self._command, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
out = ocr.communicate()
if out[0] != b"":
self._state = out[0].strip().decode("utf-8")
else:
self._state = None
_LOGGER.warning(
"Unable to detect value: %s", out[1].strip().decode("utf-8")
)
|
import discord
from redbot.core.utils.chat_formatting import box, humanize_number
from redbot.core import checks, bank, commands
from redbot.core.i18n import Translator, cog_i18n
from redbot.core.bot import Red # Only used for type hints
_ = Translator("Bank", __file__)
def is_owner_if_bank_global():
"""
Command decorator. If the bank is global, it checks if the author is
bot owner, otherwise it only checks
if command was used in guild - it DOES NOT check any permissions.
When used on the command, this should be combined
with permissions check like `guildowner_or_permissions()`.
"""
async def pred(ctx: commands.Context):
author = ctx.author
if not await bank.is_global():
if not ctx.guild:
return False
return True
else:
return await ctx.bot.is_owner(author)
return commands.check(pred)
@cog_i18n(_)
class Bank(commands.Cog):
"""Bank"""
def __init__(self, bot: Red):
super().__init__()
self.bot = bot
# SECTION commands
@is_owner_if_bank_global()
@checks.guildowner_or_permissions(administrator=True)
@commands.group()
async def bankset(self, ctx: commands.Context):
"""Base command for bank settings."""
@bankset.command(name="showsettings")
async def bankset_showsettings(self, ctx: commands.Context):
"""Show the current bank settings."""
cur_setting = await bank.is_global()
if cur_setting:
group = bank._config
else:
if not ctx.guild:
return
group = bank._config.guild(ctx.guild)
group_data = await group.all()
bank_name = group_data["bank_name"]
bank_scope = _("Global") if cur_setting else _("Server")
currency_name = group_data["currency"]
default_balance = group_data["default_balance"]
max_balance = group_data["max_balance"]
settings = _(
"Bank settings:\n\nBank name: {bank_name}\nBank scope: {bank_scope}\n"
"Currency: {currency_name}\nDefault balance: {default_balance}\n"
"Maximum allowed balance: {maximum_bal}\n"
).format(
bank_name=bank_name,
bank_scope=bank_scope,
currency_name=currency_name,
default_balance=humanize_number(default_balance),
maximum_bal=humanize_number(max_balance),
)
await ctx.send(box(settings))
@bankset.command(name="toggleglobal")
@checks.is_owner()
async def bankset_toggleglobal(self, ctx: commands.Context, confirm: bool = False):
"""Toggle whether the bank is global or not.
If the bank is global, it will become per-server.
If the bank is per-server, it will become global.
"""
cur_setting = await bank.is_global()
word = _("per-server") if cur_setting else _("global")
if confirm is False:
await ctx.send(
_(
"This will toggle the bank to be {banktype}, deleting all accounts "
"in the process! If you're sure, type `{command}`"
).format(banktype=word, command=f"{ctx.clean_prefix}bankset toggleglobal yes")
)
else:
await bank.set_global(not cur_setting)
await ctx.send(_("The bank is now {banktype}.").format(banktype=word))
@is_owner_if_bank_global()
@checks.guildowner_or_permissions(administrator=True)
@bankset.command(name="bankname")
async def bankset_bankname(self, ctx: commands.Context, *, name: str):
"""Set the bank's name."""
await bank.set_bank_name(name, ctx.guild)
await ctx.send(_("Bank name has been set to: {name}").format(name=name))
@is_owner_if_bank_global()
@checks.guildowner_or_permissions(administrator=True)
@bankset.command(name="creditsname")
async def bankset_creditsname(self, ctx: commands.Context, *, name: str):
"""Set the name for the bank's currency."""
await bank.set_currency_name(name, ctx.guild)
await ctx.send(_("Currency name has been set to: {name}").format(name=name))
@is_owner_if_bank_global()
@checks.guildowner_or_permissions(administrator=True)
@bankset.command(name="maxbal")
async def bankset_maxbal(self, ctx: commands.Context, *, amount: int):
"""Set the maximum balance a user can get."""
try:
await bank.set_max_balance(amount, ctx.guild)
except ValueError:
# noinspection PyProtectedMember
return await ctx.send(
_("Amount must be greater than zero and less than {max}.").format(
max=humanize_number(bank._MAX_BALANCE)
)
)
await ctx.send(
_("Maximum balance has been set to: {amount}").format(amount=humanize_number(amount))
)
# ENDSECTION
async def red_delete_data_for_user(self, **kwargs):
""" Nothing to delete """
return
|
import asyncio
import async_timeout
from pydeconz import DeconzSession, errors
from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PORT
from homeassistant.core import callback
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import aiohttp_client
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .const import (
CONF_ALLOW_CLIP_SENSOR,
CONF_ALLOW_DECONZ_GROUPS,
CONF_ALLOW_NEW_DEVICES,
CONF_MASTER_GATEWAY,
DEFAULT_ALLOW_CLIP_SENSOR,
DEFAULT_ALLOW_DECONZ_GROUPS,
DEFAULT_ALLOW_NEW_DEVICES,
DOMAIN,
LOGGER,
NEW_GROUP,
NEW_LIGHT,
NEW_SCENE,
NEW_SENSOR,
SUPPORTED_PLATFORMS,
)
from .deconz_event import async_setup_events, async_unload_events
from .errors import AuthenticationRequired, CannotConnect
@callback
def get_gateway_from_config_entry(hass, config_entry):
"""Return gateway with a matching bridge id."""
return hass.data[DOMAIN][config_entry.unique_id]
class DeconzGateway:
"""Manages a single deCONZ gateway."""
def __init__(self, hass, config_entry) -> None:
"""Initialize the system."""
self.hass = hass
self.config_entry = config_entry
self.api = None
self.available = True
self.ignore_state_updates = False
self.deconz_ids = {}
self.entities = {}
self.events = []
self.listeners = []
self._current_option_allow_clip_sensor = self.option_allow_clip_sensor
self._current_option_allow_deconz_groups = self.option_allow_deconz_groups
@property
def bridgeid(self) -> str:
"""Return the unique identifier of the gateway."""
return self.config_entry.unique_id
@property
def host(self) -> str:
"""Return the host of the gateway."""
return self.config_entry.data[CONF_HOST]
@property
def master(self) -> bool:
"""Gateway which is used with deCONZ services without defining id."""
return self.config_entry.options[CONF_MASTER_GATEWAY]
# Options
@property
def option_allow_clip_sensor(self) -> bool:
"""Allow loading clip sensor from gateway."""
return self.config_entry.options.get(
CONF_ALLOW_CLIP_SENSOR, DEFAULT_ALLOW_CLIP_SENSOR
)
@property
def option_allow_deconz_groups(self) -> bool:
"""Allow loading deCONZ groups from gateway."""
return self.config_entry.options.get(
CONF_ALLOW_DECONZ_GROUPS, DEFAULT_ALLOW_DECONZ_GROUPS
)
@property
def option_allow_new_devices(self) -> bool:
"""Allow automatic adding of new devices."""
return self.config_entry.options.get(
CONF_ALLOW_NEW_DEVICES, DEFAULT_ALLOW_NEW_DEVICES
)
# Signals
@property
def signal_reachable(self) -> str:
"""Gateway specific event to signal a change in connection status."""
return f"deconz-reachable-{self.bridgeid}"
@callback
def async_signal_new_device(self, device_type) -> str:
"""Gateway specific event to signal new device."""
new_device = {
NEW_GROUP: f"deconz_new_group_{self.bridgeid}",
NEW_LIGHT: f"deconz_new_light_{self.bridgeid}",
NEW_SCENE: f"deconz_new_scene_{self.bridgeid}",
NEW_SENSOR: f"deconz_new_sensor_{self.bridgeid}",
}
return new_device[device_type]
# Callbacks
@callback
def async_connection_status_callback(self, available) -> None:
"""Handle signals of gateway connection status."""
self.available = available
self.ignore_state_updates = False
async_dispatcher_send(self.hass, self.signal_reachable, True)
@callback
def async_add_device_callback(self, device_type, device) -> None:
"""Handle event of new device creation in deCONZ."""
if not self.option_allow_new_devices:
return
if not isinstance(device, list):
device = [device]
async_dispatcher_send(
self.hass, self.async_signal_new_device(device_type), device
)
async def async_update_device_registry(self) -> None:
"""Update device registry."""
device_registry = await self.hass.helpers.device_registry.async_get_registry()
# Host device
device_registry.async_get_or_create(
config_entry_id=self.config_entry.entry_id,
connections={(CONNECTION_NETWORK_MAC, self.api.config.mac)},
)
# Gateway service
device_registry.async_get_or_create(
config_entry_id=self.config_entry.entry_id,
identifiers={(DOMAIN, self.api.config.bridgeid)},
manufacturer="Dresden Elektronik",
model=self.api.config.modelid,
name=self.api.config.name,
sw_version=self.api.config.swversion,
via_device=(CONNECTION_NETWORK_MAC, self.api.config.mac),
)
async def async_setup(self) -> bool:
"""Set up a deCONZ gateway."""
try:
self.api = await get_gateway(
self.hass,
self.config_entry.data,
self.async_add_device_callback,
self.async_connection_status_callback,
)
except CannotConnect as err:
raise ConfigEntryNotReady from err
except Exception as err: # pylint: disable=broad-except
LOGGER.error("Error connecting with deCONZ gateway: %s", err)
return False
for component in SUPPORTED_PLATFORMS:
self.hass.async_create_task(
self.hass.config_entries.async_forward_entry_setup(
self.config_entry, component
)
)
self.hass.async_create_task(async_setup_events(self))
self.api.start()
self.config_entry.add_update_listener(self.async_config_entry_updated)
return True
@staticmethod
async def async_config_entry_updated(hass, entry) -> None:
"""Handle signals of config entry being updated.
This is a static method because a class method (bound method), can not be used with weak references.
Causes for this is either discovery updating host address or config entry options changing.
"""
gateway = get_gateway_from_config_entry(hass, entry)
if gateway.api.host != gateway.host:
gateway.api.close()
gateway.api.host = gateway.host
gateway.api.start()
return
await gateway.options_updated()
async def options_updated(self):
"""Manage entities affected by config entry options."""
deconz_ids = []
if self._current_option_allow_clip_sensor != self.option_allow_clip_sensor:
self._current_option_allow_clip_sensor = self.option_allow_clip_sensor
sensors = [
sensor
for sensor in self.api.sensors.values()
if sensor.type.startswith("CLIP")
]
if self.option_allow_clip_sensor:
self.async_add_device_callback(NEW_SENSOR, sensors)
else:
deconz_ids += [sensor.deconz_id for sensor in sensors]
if self._current_option_allow_deconz_groups != self.option_allow_deconz_groups:
self._current_option_allow_deconz_groups = self.option_allow_deconz_groups
groups = list(self.api.groups.values())
if self.option_allow_deconz_groups:
self.async_add_device_callback(NEW_GROUP, groups)
else:
deconz_ids += [group.deconz_id for group in groups]
entity_registry = await self.hass.helpers.entity_registry.async_get_registry()
for entity_id, deconz_id in self.deconz_ids.items():
if deconz_id in deconz_ids and entity_registry.async_is_registered(
entity_id
):
# Removing an entity from the entity registry will also remove them
# from Home Assistant
entity_registry.async_remove(entity_id)
@callback
def shutdown(self, event) -> None:
"""Wrap the call to deconz.close.
Used as an argument to EventBus.async_listen_once.
"""
self.api.close()
async def async_reset(self):
"""Reset this gateway to default state."""
self.api.async_connection_status_callback = None
self.api.close()
for component in SUPPORTED_PLATFORMS:
await self.hass.config_entries.async_forward_entry_unload(
self.config_entry, component
)
for unsub_dispatcher in self.listeners:
unsub_dispatcher()
self.listeners = []
async_unload_events(self)
self.deconz_ids = {}
return True
async def get_gateway(
hass, config, async_add_device_callback, async_connection_status_callback
) -> DeconzSession:
"""Create a gateway object and verify configuration."""
session = aiohttp_client.async_get_clientsession(hass)
deconz = DeconzSession(
session,
config[CONF_HOST],
config[CONF_PORT],
config[CONF_API_KEY],
async_add_device=async_add_device_callback,
connection_status=async_connection_status_callback,
)
try:
with async_timeout.timeout(10):
await deconz.initialize()
return deconz
except errors.Unauthorized as err:
LOGGER.warning("Invalid key for deCONZ at %s", config[CONF_HOST])
raise AuthenticationRequired from err
except (asyncio.TimeoutError, errors.RequestError) as err:
LOGGER.error("Error connecting to deCONZ gateway at %s", config[CONF_HOST])
raise CannotConnect from err
|
import logging
from unittest.mock import Mock, patch
from kombu.utils.debug import Logwrapped, setup_logging
class test_setup_logging:
def test_adds_handlers_sets_level(self):
with patch('kombu.utils.debug.get_logger') as get_logger:
logger = get_logger.return_value = Mock()
setup_logging(loggers=['kombu.test'])
get_logger.assert_called_with('kombu.test')
logger.addHandler.assert_called()
logger.setLevel.assert_called_with(logging.DEBUG)
class test_Logwrapped:
def test_wraps(self):
with patch('kombu.utils.debug.get_logger') as get_logger:
logger = get_logger.return_value = Mock()
W = Logwrapped(Mock(), 'kombu.test')
get_logger.assert_called_with('kombu.test')
assert W.instance is not None
assert W.logger is logger
W.instance.__repr__ = lambda s: 'foo'
assert repr(W) == 'foo'
W.instance.some_attr = 303
assert W.some_attr == 303
W.instance.some_method.__name__ = 'some_method'
W.some_method(1, 2, kw=1)
W.instance.some_method.assert_called_with(1, 2, kw=1)
W.some_method()
W.instance.some_method.assert_called_with()
W.some_method(kw=1)
W.instance.some_method.assert_called_with(kw=1)
W.ident = 'ident'
W.some_method(kw=1)
logger.debug.assert_called()
assert 'ident' in logger.debug.call_args[0][0]
assert dir(W) == dir(W.instance)
|
import os
import unittest
from perfkitbenchmarker import pkb
from perfkitbenchmarker import test_util
MOUNT_POINT = '/scratch'
@unittest.skipUnless('PERFKIT_INTEGRATION' in os.environ,
'PERFKIT_INTEGRATION not in environment')
class GcpScratchDiskIntegrationTest(unittest.TestCase):
"""Integration tests for GCE disks.
Please see the section on integration testing in the README.
"""
def setUp(self):
pkb.SetUpPKB()
def testPDStandard(self):
test_util.assertDiskMounts({
'vm_groups': {
'vm_group_1': {
'cloud': 'GCP',
'vm_spec': {
'GCP': {
'machine_type': 'n1-standard-2',
'zone': 'us-central1-a'
}
},
'disk_spec': {
'GCP': {
'disk_type': 'pd-standard',
'disk_size': 2,
'mount_point': MOUNT_POINT
}
}
}
}
}, MOUNT_POINT)
def testPDSSD(self):
test_util.assertDiskMounts({
'vm_groups': {
'vm_group_1': {
'cloud': 'GCP',
'vm_spec': {
'GCP': {
'machine_type': 'n1-standard-2',
'zone': 'us-central1-a'
}
},
'disk_spec': {
'GCP': {
'disk_type': 'pd-ssd',
'disk_size': 2,
'mount_point': MOUNT_POINT
}
}
}
}
}, MOUNT_POINT)
def testLocalSSD(self):
test_util.assertDiskMounts({
'vm_groups': {
'vm_group_1': {
'cloud': 'GCP',
'vm_spec': {
'GCP': {
'machine_type': 'n1-standard-2',
'zone': 'us-central1-a',
'num_local_ssds': 1
}
},
'disk_spec': {
'GCP': {
'disk_type': 'local',
'mount_point': MOUNT_POINT
}
}
}
}
}, MOUNT_POINT)
if __name__ == '__main__':
unittest.main()
|
import os
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
def test_hostname(host):
assert 'instance' == host.check_output('hostname -s')
def test_etc_molecule_directory(host):
f = host.file('/etc/molecule')
assert f.is_directory
assert f.user == 'root'
assert f.group == 'root'
assert f.mode == 0o755
def test_etc_molecule_ansible_hostname_file(host):
f = host.file('/etc/molecule/instance')
assert f.is_file
assert f.user == 'root'
assert f.group == 'root'
assert f.mode == 0o644
|
from homeassistant.components.switch import SwitchEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.helpers.typing import HomeAssistantType
from .const import DOMAIN
from .devolo_device import DevoloDeviceEntity
async def async_setup_entry(
hass: HomeAssistantType, entry: ConfigEntry, async_add_entities
) -> None:
"""Get all devices and setup the switch devices via config entry."""
entities = []
for gateway in hass.data[DOMAIN][entry.entry_id]["gateways"]:
for device in gateway.binary_switch_devices:
for binary_switch in device.binary_switch_property:
# Exclude the binary switch which also has multi_level_switches here,
# because those are implemented as light entities now.
if not hasattr(device, "multi_level_switch_property"):
entities.append(
DevoloSwitch(
homecontrol=gateway,
device_instance=device,
element_uid=binary_switch,
)
)
async_add_entities(entities)
class DevoloSwitch(DevoloDeviceEntity, SwitchEntity):
"""Representation of a switch."""
def __init__(self, homecontrol, device_instance, element_uid):
"""Initialize an devolo Switch."""
super().__init__(
homecontrol=homecontrol,
device_instance=device_instance,
element_uid=element_uid,
)
self._binary_switch_property = self._device_instance.binary_switch_property.get(
self._unique_id
)
self._is_on = self._binary_switch_property.state
if hasattr(self._device_instance, "consumption_property"):
self._consumption = self._device_instance.consumption_property.get(
self._unique_id.replace("BinarySwitch", "Meter")
).current
else:
self._consumption = None
@property
def is_on(self):
"""Return the state."""
return self._is_on
@property
def current_power_w(self):
"""Return the current consumption."""
return self._consumption
def turn_on(self, **kwargs):
"""Switch on the device."""
self._is_on = True
self._binary_switch_property.set(state=True)
def turn_off(self, **kwargs):
"""Switch off the device."""
self._is_on = False
self._binary_switch_property.set(state=False)
def _sync(self, message):
"""Update the binary switch state and consumption."""
if message[0].startswith("devolo.BinarySwitch"):
self._is_on = self._device_instance.binary_switch_property[message[0]].state
elif message[0].startswith("devolo.Meter"):
self._consumption = self._device_instance.consumption_property[
message[0]
].current
else:
self._generic_message(message)
self.schedule_update_ha_state()
|
import os
import pytest
from molecule import config
from molecule.driver import docker
@pytest.fixture
def _instance(config_instance):
return docker.Docker(config_instance)
def test_config_private_member(_instance):
assert isinstance(_instance._config, config.Config)
def test_testinfra_options_property(_instance):
assert {
'connection': 'ansible',
'ansible-inventory': _instance._config.provisioner.inventory_file
} == _instance.testinfra_options
def test_name_property(_instance):
assert 'docker' == _instance.name
def test_options_property(_instance):
x = {'managed': True}
assert x == _instance.options
def test_login_cmd_template_property(_instance):
x = ('docker exec '
'-e COLUMNS={columns} '
'-e LINES={lines} '
'-e TERM=bash '
'-e TERM=xterm '
'-ti {instance} bash')
assert x == _instance.login_cmd_template
def test_safe_files_property(_instance):
x = [
os.path.join(_instance._config.scenario.ephemeral_directory,
'Dockerfile')
]
assert x == _instance.safe_files
def test_default_safe_files_property(_instance):
x = [
os.path.join(_instance._config.scenario.ephemeral_directory,
'Dockerfile')
]
assert x == _instance.default_safe_files
def test_delegated_property(_instance):
assert not _instance.delegated
def test_managed_property(_instance):
assert _instance.managed
def test_default_ssh_connection_options_property(_instance):
assert [] == _instance.default_ssh_connection_options
def test_login_options(_instance):
assert {'instance': 'foo'} == _instance.login_options('foo')
def test_ansible_connection_options(_instance):
x = {'ansible_connection': 'docker'}
assert x == _instance.ansible_connection_options('foo')
def test_instance_config_property(_instance):
x = os.path.join(_instance._config.scenario.ephemeral_directory,
'instance_config.yml')
assert x == _instance.instance_config
def test_ssh_connection_options_property(_instance):
assert [] == _instance.ssh_connection_options
def test_status(_instance):
result = _instance.status()
assert 2 == len(result)
assert result[0].instance_name == 'instance-1'
assert result[0].driver_name == 'docker'
assert result[0].provisioner_name == 'ansible'
assert result[0].scenario_name == 'default'
assert result[0].created == 'false'
assert result[0].converged == 'false'
assert result[1].instance_name == 'instance-2'
assert result[1].driver_name == 'docker'
assert result[1].provisioner_name == 'ansible'
assert result[1].scenario_name == 'default'
assert result[1].created == 'false'
assert result[1].converged == 'false'
def test_created(_instance):
assert 'false' == _instance._created()
def test_converged(_instance):
assert 'false' == _instance._converged()
def test_sanity_checks_missing_docker_dependency(mocker, _instance):
target = 'ansible.module_utils.docker_common.HAS_DOCKER_PY'
mocker.patch(target, False)
with pytest.raises(SystemExit):
_instance.sanity_checks()
|
from homeassistant.const import (
CONF_DEVICE_CLASS,
CONF_ICON,
CONF_NAME,
CONF_TYPE,
CONF_UNIT_OF_MEASUREMENT,
)
from homeassistant.helpers.entity import Entity
from . import DOMAIN as DAIKIN_DOMAIN, DaikinApi
from .const import (
ATTR_COOL_ENERGY,
ATTR_HEAT_ENERGY,
ATTR_HUMIDITY,
ATTR_INSIDE_TEMPERATURE,
ATTR_OUTSIDE_TEMPERATURE,
ATTR_TARGET_HUMIDITY,
ATTR_TOTAL_POWER,
SENSOR_TYPE_ENERGY,
SENSOR_TYPE_HUMIDITY,
SENSOR_TYPE_POWER,
SENSOR_TYPE_TEMPERATURE,
SENSOR_TYPES,
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Old way of setting up the Daikin sensors.
Can only be called when a user accidentally mentions the platform in their
config. But even in that case it would have been ignored.
"""
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up Daikin climate based on config_entry."""
daikin_api = hass.data[DAIKIN_DOMAIN].get(entry.entry_id)
sensors = [ATTR_INSIDE_TEMPERATURE]
if daikin_api.device.support_outside_temperature:
sensors.append(ATTR_OUTSIDE_TEMPERATURE)
if daikin_api.device.support_energy_consumption:
sensors.append(ATTR_TOTAL_POWER)
sensors.append(ATTR_COOL_ENERGY)
sensors.append(ATTR_HEAT_ENERGY)
if daikin_api.device.support_humidity:
sensors.append(ATTR_HUMIDITY)
sensors.append(ATTR_TARGET_HUMIDITY)
async_add_entities([DaikinSensor.factory(daikin_api, sensor) for sensor in sensors])
class DaikinSensor(Entity):
"""Representation of a Sensor."""
@staticmethod
def factory(api: DaikinApi, monitored_state: str):
"""Initialize any DaikinSensor."""
cls = {
SENSOR_TYPE_TEMPERATURE: DaikinClimateSensor,
SENSOR_TYPE_HUMIDITY: DaikinClimateSensor,
SENSOR_TYPE_POWER: DaikinPowerSensor,
SENSOR_TYPE_ENERGY: DaikinPowerSensor,
}[SENSOR_TYPES[monitored_state][CONF_TYPE]]
return cls(api, monitored_state)
def __init__(self, api: DaikinApi, monitored_state: str) -> None:
"""Initialize the sensor."""
self._api = api
self._sensor = SENSOR_TYPES[monitored_state]
self._name = f"{api.name} {self._sensor[CONF_NAME]}"
self._device_attribute = monitored_state
@property
def unique_id(self):
"""Return a unique ID."""
return f"{self._api.device.mac}-{self._device_attribute}"
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
raise NotImplementedError
@property
def device_class(self):
"""Return the class of this device."""
return self._sensor.get(CONF_DEVICE_CLASS)
@property
def icon(self):
"""Return the icon of this device."""
return self._sensor.get(CONF_ICON)
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self._sensor[CONF_UNIT_OF_MEASUREMENT]
async def async_update(self):
"""Retrieve latest state."""
await self._api.async_update()
@property
def device_info(self):
"""Return a device description for device registry."""
return self._api.device_info
class DaikinClimateSensor(DaikinSensor):
"""Representation of a Climate Sensor."""
@property
def state(self):
"""Return the internal state of the sensor."""
if self._device_attribute == ATTR_INSIDE_TEMPERATURE:
return self._api.device.inside_temperature
if self._device_attribute == ATTR_OUTSIDE_TEMPERATURE:
return self._api.device.outside_temperature
if self._device_attribute == ATTR_HUMIDITY:
return self._api.device.humidity
if self._device_attribute == ATTR_TARGET_HUMIDITY:
return self._api.device.target_humidity
return None
class DaikinPowerSensor(DaikinSensor):
"""Representation of a power/energy consumption sensor."""
@property
def state(self):
"""Return the state of the sensor."""
if self._device_attribute == ATTR_TOTAL_POWER:
return round(self._api.device.current_total_power_consumption, 3)
if self._device_attribute == ATTR_COOL_ENERGY:
return round(self._api.device.last_hour_cool_energy_consumption, 3)
if self._device_attribute == ATTR_HEAT_ENERGY:
return round(self._api.device.last_hour_heat_energy_consumption, 3)
return None
|
import numpy as np
import six
from chainercv.utils.testing.assertions.assert_is_image import assert_is_image
def assert_is_semantic_segmentation_dataset(dataset, n_class, n_example=None):
"""Checks if a dataset satisfies semantic segmentation dataset APIs.
This function checks if a given dataset satisfies semantic segmentation
dataset APIs or not.
If the dataset does not satifiy the APIs, this function raises an
:class:`AssertionError`.
Args:
dataset: A dataset to be checked.
n_class (int): The number of classes including background.
n_example (int): The number of examples to be checked.
If this argument is specified, this function picks
examples ramdomly and checks them. Otherwise,
this function checks all examples.
"""
assert len(dataset) > 0, 'The length of dataset must be greater than zero.'
if n_example:
for _ in six.moves.range(n_example):
i = np.random.randint(0, len(dataset))
_check_example(dataset[i], n_class)
else:
for i in six.moves.range(len(dataset)):
_check_example(dataset[i], n_class)
def _check_example(example, n_class):
assert len(example) >= 2, \
'Each example must have at least two elements:' \
'img and label.'
img, label = example[:2]
assert_is_image(img, color=True)
assert isinstance(label, np.ndarray), \
'label must be a numpy.ndarray.'
assert label.dtype == np.int32, \
'The type of label must be numpy.int32.'
assert label.shape == img.shape[1:], \
'The shape of label must be (H, W).'
assert label.min() >= -1 and label.max() < n_class, \
'The value of label must be in [-1, n_class - 1].'
|
from netort.process import execute
from ...common.interfaces import AbstractPlugin
class Plugin(AbstractPlugin):
"""
ShellExec plugin
allows executing shell scripts before/after test
"""
SECTION = 'shellexec'
def __init__(self, core, cfg, name):
AbstractPlugin.__init__(self, core, cfg, name)
self.catch_out = False
self.end = None
self.poll = None
self.prepare = None
self.start = None
self.postprocess = None
@staticmethod
def get_key():
return __file__
def get_available_options(self):
return ["prepare", "start", "end", "poll", "post_process", "catch_out"]
def configure(self):
self.catch_out = True if self.get_option("catch_out", False) else False
self.prepare = self.get_option("prepare", '')
self.start = self.get_option("start", '')
self.end = self.get_option("end", '')
self.poll = self.get_option("poll", '')
self.postprocess = self.get_option("post_process", '')
def prepare_test(self):
if self.prepare:
self.execute(self.prepare)
def start_test(self):
if self.start:
self.execute(self.start)
def is_test_finished(self):
if self.poll:
self.log.info("Executing: %s", self.poll)
retcode = execute(
self.poll,
shell=True,
poll_period=0.1,
catch_out=self.catch_out)[0]
if retcode:
self.log.warn(
"Non-zero exit code, interrupting test: %s", retcode)
return retcode
return -1
def end_test(self, retcode):
if self.end:
self.execute(self.end)
return retcode
def post_process(self, retcode):
if self.postprocess:
self.execute(self.postprocess)
return retcode
def execute(self, cmd):
"""
Execute and check exit code
"""
self.log.info("Executing: %s", cmd)
retcode = execute(
cmd, shell=True, poll_period=0.1, catch_out=self.catch_out)[0]
if retcode:
raise RuntimeError("Subprocess returned %s" % retcode)
return retcode
|
import os
import pytest
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
@pytest.mark.skip(reason='Scenario tests not implemented yet')
def test_hostname(host):
assert 'instance' == host.check_output('hostname -s')
@pytest.mark.skip(reason='Scenario tests not implemented yet')
def test_etc_molecule_directory(host):
f = host.file('/etc/molecule')
assert f.is_directory
assert f.user == 'root'
assert f.group == 'root'
assert f.mode == 0o755
@pytest.mark.skip(reason='Scenario tests not implemented yet')
def test_etc_molecule_ansible_hostname_file(host):
f = host.file('/etc/molecule/instance')
assert f.is_file
assert f.user == 'root'
assert f.group == 'root'
assert f.mode == 0o644
|
from pprint import pprint
from riko.bado import coroutine
from riko.collections import SyncPipe, AsyncPipe
p1_conf = {
'attrs': [
{
'value': 'http://www.caltrain.com/Fares/farechart.html',
'key': 'url'}]}
p2_conf = {
'rule': {
'field': 'url', 'match': {'subkey': 'url'}, 'replace': 'farechart'}}
def pipe(test=False):
stream = (SyncPipe('itembuilder', conf=p1_conf, test=test)
.regex(conf=p2_conf)
.list)
for i in stream:
pprint(str(i['url']))
return stream
@coroutine
def async_pipe(reactor, test=False):
stream = yield (AsyncPipe('itembuilder', conf=p1_conf, test=test)
.regex(conf=p2_conf)
.list)
for i in stream:
pprint(str(i['url']))
|
import re # noqa: F401
import sys # noqa: F401
import nulltype # noqa: F401
from paasta_tools.paastaapi.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from paasta_tools.paastaapi.model.resource_item import ResourceItem
globals()['ResourceItem'] = ResourceItem
class Resource(ModelSimple):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'value': ([ResourceItem],),
}
@cached_property
def discriminator():
return None
attribute_map = {}
_composed_schemas = None
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
"""Resource - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] ([ResourceItem]): # noqa: E501
Keyword Args:
value ([ResourceItem]): # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
|
import unittest
class TestImports(unittest.TestCase):
def test_import_boto3(self):
import boto3
print boto3
def test_import_numpy(self):
import numpy
print numpy
def test_import_osmium(self):
import osmium
print osmium
def test_import_requests(self):
import requests
print requests
def test_import_shapely(self):
import shapely
print shapely
def test_import_tensorflow(self):
import tensorflow
print tensorflow
def test_import_tflearn(self):
import tflearn
print tflearn
def test_import_osgeo(self):
import osgeo
print osgeo
def test_import_PIL(self):
import PIL
print PIL
def test_import_pyproj(self):
import pyproj
print pyproj
if __name__ == "__main__":
unittest.main()
|
import sys
import unittest
import numpy as np
from pandas import DataFrame
from mock import patch, call
from pgmpy.factors.discrete import State
from pgmpy.models import MarkovChain as MC
class TestMarkovChain(unittest.TestCase):
def setUp(self):
self.variables = ["intel", "diff", "grade"]
self.card = [3, 2, 3]
self.cardinalities = {"intel": 3, "diff": 2, "grade": 3}
self.intel_tm = {
0: {0: 0.1, 1: 0.25, 2: 0.65},
1: {0: 0.5, 1: 0.3, 2: 0.2},
2: {0: 0.3, 1: 0.3, 2: 0.4},
}
self.intel_tm_matrix = np.array(
[[0.1, 0.25, 0.65], [0.5, 0.3, 0.2], [0.3, 0.3, 0.4]]
)
self.diff_tm = {0: {0: 0.3, 1: 0.7}, 1: {0: 0.75, 1: 0.25}}
self.diff_tm_matrix = np.array([[0.3, 0.7], [0.75, 0.25]])
self.grade_tm = {
0: {0: 0.4, 1: 0.2, 2: 0.4},
1: {0: 0.9, 1: 0.05, 2: 0.05},
2: {0: 0.1, 1: 0.4, 2: 0.5},
}
self.grade_tm_matrix = [[0.4, 0.2, 0.4], [0.9, 0.05, 0.05], [0.1, 0.4, 0.5]]
self.start_state = [State("intel", 0), State("diff", 1), State("grade", 2)]
self.model = MC()
self.sample = DataFrame(index=range(200), columns=["a", "b"])
self.sample.a = [1] * 100 + [0] * 100
self.sample.b = [0] * 100 + [1] * 100
def tearDown(self):
del self.variables
del self.card
del self.cardinalities
del self.intel_tm
del self.diff_tm
del self.grade_tm
del self.start_state
del self.model
del self.sample
@patch("pgmpy.models.MarkovChain._check_state", autospec=True)
def test_init(self, check_state):
model = MC(self.variables, self.card, self.start_state)
self.assertListEqual(model.variables, self.variables)
self.assertDictEqual(model.cardinalities, self.cardinalities)
self.assertDictEqual(
model.transition_models, {var: {} for var in self.variables}
)
check_state.assert_called_once_with(model, self.start_state)
self.assertListEqual(model.state, self.start_state)
def test_init_bad_variables_type(self):
# variables is non-iterable
self.assertRaises(ValueError, MC, variables=123)
# variables is a string
self.assertRaises(ValueError, MC, variables="abc")
def test_init_bad_card_type(self):
# card is non-iterable
self.assertRaises(ValueError, MC, card=123)
# card is a string
self.assertRaises(ValueError, MC, card="abc")
def test_init_less_args(self):
model = MC()
self.assertListEqual(model.variables, [])
self.assertDictEqual(model.cardinalities, {})
self.assertDictEqual(model.transition_models, {})
self.assertIsNone(model.state)
@patch("pgmpy.models.MarkovChain._check_state", autospec=True)
def test_set_start_state_list(self, check_state):
model = MC(["b", "a"], [1, 2])
check_state.return_value = True
model.set_start_state([State("a", 0), State("b", 1)])
model_state = [State("b", 1), State("a", 0)]
check_state.assert_called_once_with(model, model_state)
self.assertEqual(model.state, model_state)
def test_set_start_state_none(self):
model = MC()
model.state = "state"
model.set_start_state(None)
self.assertIsNone(model.state)
def test_check_state_bad_type(self):
model = MC()
# state is non-iterable
self.assertRaises(ValueError, model._check_state, 123)
# state is a string
self.assertRaises(ValueError, model._check_state, "abc")
def test_check_state_bad_vars(self):
model = MC()
# state_vars and model_vars differ
self.assertRaises(ValueError, model._check_state, [State(1, 2)])
def test_check_state_bad_var_value(self):
model = MC(["a"], [2])
# value of variable >= cardinaliity
self.assertRaises(ValueError, model._check_state, [State("a", 3)])
def test_check_state_success(self):
model = MC(["a"], [2])
self.assertTrue(model._check_state([State("a", 1)]))
def test_add_variable_new(self):
model = MC(["a"], [2])
model.add_variable("p", 3)
self.assertIn("p", model.variables)
self.assertEqual(model.cardinalities["p"], 3)
self.assertDictEqual(model.transition_models["p"], {})
def test_copy(self):
model = MC(["a", "b"], [2, 2], [State("a", 0), State("b", 1)])
model.add_transition_model("a", {0: {0: 0.1, 1: 0.9}, 1: {0: 0.2, 1: 0.8}})
model.add_transition_model("b", {0: {0: 0.3, 1: 0.7}, 1: {0: 0.4, 1: 0.6}})
copy = model.copy()
self.assertIsInstance(copy, MC)
self.assertEqual(sorted(model.variables), sorted(copy.variables))
self.assertEqual(model.cardinalities, copy.cardinalities)
self.assertEqual(model.transition_models, copy.transition_models)
self.assertEqual(model.state, copy.state)
model.add_variable("p", 1)
model.set_start_state([State("a", 0), State("b", 1), State("p", 0)])
model.add_transition_model("p", {0: {0: 1}})
self.assertNotEqual(sorted(model.variables), sorted(copy.variables))
self.assertEqual(sorted(["a", "b"]), sorted(copy.variables))
self.assertNotEqual(model.cardinalities, copy.cardinalities)
self.assertEqual({"a": 2, "b": 2}, copy.cardinalities)
self.assertNotEqual(model.state, copy.state)
self.assertEqual([State("a", 0), State("b", 1)], copy.state)
self.assertNotEqual(model.transition_models, copy.transition_models)
self.assertEqual(len(copy.transition_models), 2)
self.assertEqual(
copy.transition_models["a"], {0: {0: 0.1, 1: 0.9}, 1: {0: 0.2, 1: 0.8}}
)
self.assertEqual(
copy.transition_models["b"], {0: {0: 0.3, 1: 0.7}, 1: {0: 0.4, 1: 0.6}}
)
@patch.object(sys.modules["pgmpy.models.MarkovChain"], "warn")
def test_add_variable_existing(self, warn):
model = MC(["p"], [2])
model.add_variable("p", 3)
self.assertEqual(warn.call_count, 1)
@patch("pgmpy.models.MarkovChain.add_variable", autospec=True)
def test_add_variables_from(self, add_var):
model = MC()
model.add_variables_from(self.variables, self.card)
calls = [call(model, *p) for p in zip(self.variables, self.card)]
add_var.assert_has_calls(calls)
def test_add_transition_model_bad_type(self):
model = MC()
grade_tm_matrix_bad = [[0.1, 0.5, 0.4], [0.2, 0.2, 0.6], "abc"]
# if transition_model is not a dict or np.array
self.assertRaises(ValueError, model.add_transition_model, "var", 123)
self.assertRaises(
ValueError, model.add_transition_model, "var", grade_tm_matrix_bad
)
def test_add_transition_model_bad_states(self):
model = MC(["var"], [2])
# transition for state=1 not defined
transition_model = {0: {0: 0.1, 1: 0.9}}
self.assertRaises(
ValueError, model.add_transition_model, "var", transition_model
)
def test_add_transition_model_bad_transition(self):
model = MC(["var"], [2])
# transition for state=1 is not a dict
transition_model = {0: {0: 0.1, 1: 0.9}, 1: "abc"}
self.assertRaises(
ValueError, model.add_transition_model, "var", transition_model
)
def test_add_transition_model_bad_probability(self):
model = MC(["var"], [2])
transition_model = {0: {0: -0.1, 1: 1.1}, 1: {0: 0.5, 1: 0.5}}
self.assertRaises(
ValueError, model.add_transition_model, "var", transition_model
)
def test_add_transition_model_bad_probability_sum(self):
model = MC(["var"], [2])
# transition probabilities from state=0 do not sum to 1.0
transition_model = {0: {0: 0.1, 1: 0.2}, 1: {0: 0.5, 1: 0.5}}
self.assertRaises(
ValueError, model.add_transition_model, "var", transition_model
)
def test_add_transition_model_success(self):
model = MC(["var"], [2])
transition_model = {0: {0: 0.3, 1: 0.7}, 1: {0: 0.5, 1: 0.5}}
model.add_transition_model("var", transition_model)
self.assertDictEqual(model.transition_models["var"], transition_model)
def test_transition_model_bad_matrix_dimension(self):
model = MC(["var"], [2])
transition_model = np.array([0.3, 0.7])
# check for square dimension of the matrix
self.assertRaises(
ValueError, model.add_transition_model, "var", transition_model
)
transition_model = np.array([[0.3, 0.6, 0.1], [0.3, 0.3, 0.4]])
self.assertRaises(
ValueError, model.add_transition_model, "var", transition_model
)
def test_transition_model_dict_to_matrix(self):
model = MC(["var"], [2])
transition_model = {0: {0: 0.3, 1: 0.7}, 1: {0: 0.5, 1: 0.5}}
transition_model_matrix = np.array([[0.3, 0.7], [0.5, 0.5]])
model.add_transition_model("var", transition_model_matrix)
self.assertDictEqual(model.transition_models["var"], transition_model)
def test_sample(self):
model = MC(["a", "b"], [2, 2])
model.transition_models["a"] = {0: {0: 0.1, 1: 0.9}, 1: {0: 0.2, 1: 0.8}}
model.transition_models["b"] = {0: {0: 0.3, 1: 0.7}, 1: {0: 0.4, 1: 0.6}}
sample = model.sample(start_state=[State("a", 0), State("b", 1)], size=2)
self.assertEqual(len(sample), 2)
self.assertEqual(list(sample.columns), ["a", "b"])
self.assertTrue(list(sample.loc[0]) in [[0, 0], [0, 1], [1, 0], [1, 1]])
self.assertTrue(list(sample.loc[1]) in [[0, 0], [0, 1], [1, 0], [1, 1]])
@patch("pgmpy.models.MarkovChain.random_state", autospec=True)
def test_sample_less_arg(self, random_state):
model = MC(["a", "b"], [2, 2])
random_state.return_value = [State("a", 0), State("b", 1)]
sample = model.sample(size=1)
random_state.assert_called_once_with(model)
self.assertEqual(model.state, random_state.return_value)
self.assertEqual(len(sample), 1)
self.assertEqual(list(sample.columns), ["a", "b"])
self.assertEqual(list(sample.loc[0]), [0, 1])
@patch("pgmpy.models.MarkovChain.sample", autospec=True)
def test_prob_from_sample(self, sample):
model = MC(["a", "b"], [2, 2])
sample.return_value = self.sample
probabilites = model.prob_from_sample([State("a", 1), State("b", 0)])
self.assertEqual(list(probabilites), [1] * 50 + [0] * 50)
def test_is_stationarity_success(self):
model = MC(["intel", "diff"], [2, 3])
model.set_start_state([State("intel", 0), State("diff", 2)])
intel_tm = {0: {0: 0.25, 1: 0.75}, 1: {0: 0.5, 1: 0.5}}
model.add_transition_model("intel", intel_tm)
diff_tm = {
0: {0: 0.1, 1: 0.5, 2: 0.4},
1: {0: 0.2, 1: 0.2, 2: 0.6},
2: {0: 0.7, 1: 0.15, 2: 0.15},
}
model.add_transition_model("diff", diff_tm)
self.assertTrue(model.is_stationarity)
def test_is_stationarity_failure(self):
model = MC(["intel", "diff"], [2, 3])
model.set_start_state([State("intel", 0), State("diff", 2)])
intel_tm = {0: {0: 0.25, 1: 0.75}, 1: {0: 0.5, 1: 0.5}}
model.add_transition_model("intel", intel_tm)
diff_tm = {
0: {0: 0.1, 1: 0.5, 2: 0.4},
1: {0: 0.2, 1: 0.2, 2: 0.6},
2: {0: 0.7, 1: 0.15, 2: 0.15},
}
model.add_transition_model("diff", diff_tm)
self.assertFalse(model.is_stationarity(0.002, None))
@patch.object(sys.modules["pgmpy.models.MarkovChain"], "sample_discrete")
def test_generate_sample(self, sample_discrete):
model = MC(["a", "b"], [2, 2])
model.transition_models["a"] = {0: {0: 0.1, 1: 0.9}, 1: {0: 0.2, 1: 0.8}}
model.transition_models["b"] = {0: {0: 0.3, 1: 0.7}, 1: {0: 0.4, 1: 0.6}}
sample_discrete.side_effect = [[1], [0]] * 2
gen = model.generate_sample(start_state=[State("a", 0), State("b", 1)], size=2)
samples = [sample for sample in gen]
expected_samples = [[State("a", 1), State("b", 0)]] * 2
self.assertEqual(samples, expected_samples)
@patch.object(sys.modules["pgmpy.models.MarkovChain"], "sample_discrete")
@patch("pgmpy.models.MarkovChain.random_state", autospec=True)
def test_generate_sample_less_arg(self, random_state, sample_discrete):
model = MC(["a", "b"], [2, 2])
model.transition_models["a"] = {0: {0: 0.1, 1: 0.9}, 1: {0: 0.2, 1: 0.8}}
model.transition_models["b"] = {0: {0: 0.3, 1: 0.7}, 1: {0: 0.4, 1: 0.6}}
random_state.return_value = [State("a", 0), State("b", 1)]
sample_discrete.side_effect = [[1], [0]] * 2
gen = model.generate_sample(size=2)
samples = [sample for sample in gen]
expected_samples = [[State("a", 1), State("b", 0)]] * 2
self.assertEqual(samples, expected_samples)
def test_random_state(self):
model = MC(["a", "b"], [2, 3])
state = model.random_state()
vars = [v for v, s in state]
self.assertEqual(vars, ["a", "b"])
self.assertGreaterEqual(state[0].state, 0)
self.assertGreaterEqual(state[1].state, 0)
self.assertLessEqual(state[0].state, 1)
self.assertLessEqual(state[1].state, 2)
|
from typing import Dict
from urllib.parse import urlparse
import voluptuous as vol
from voluptuous.humanize import humanize_error
from .model import Integration
DOCUMENTATION_URL_SCHEMA = "https"
DOCUMENTATION_URL_HOST = "www.home-assistant.io"
DOCUMENTATION_URL_PATH_PREFIX = "/integrations/"
DOCUMENTATION_URL_EXCEPTIONS = {"https://www.home-assistant.io/hassio"}
SUPPORTED_QUALITY_SCALES = ["gold", "internal", "platinum", "silver"]
def documentation_url(value: str) -> str:
"""Validate that a documentation url has the correct path and domain."""
if value in DOCUMENTATION_URL_EXCEPTIONS:
return value
parsed_url = urlparse(value)
if parsed_url.scheme != DOCUMENTATION_URL_SCHEMA:
raise vol.Invalid("Documentation url is not prefixed with https")
if parsed_url.netloc == DOCUMENTATION_URL_HOST and not parsed_url.path.startswith(
DOCUMENTATION_URL_PATH_PREFIX
):
raise vol.Invalid(
"Documentation url does not begin with www.home-assistant.io/integrations"
)
return value
MANIFEST_SCHEMA = vol.Schema(
{
vol.Required("domain"): str,
vol.Required("name"): str,
vol.Optional("config_flow"): bool,
vol.Optional("mqtt"): [str],
vol.Optional("zeroconf"): [
vol.Any(
str,
vol.Schema(
{
vol.Required("type"): str,
vol.Optional("macaddress"): str,
vol.Optional("name"): str,
}
),
)
],
vol.Optional("ssdp"): vol.Schema(
vol.All([vol.All(vol.Schema({}, extra=vol.ALLOW_EXTRA), vol.Length(min=1))])
),
vol.Optional("homekit"): vol.Schema({vol.Optional("models"): [str]}),
vol.Required("documentation"): vol.All(
vol.Url(), documentation_url # pylint: disable=no-value-for-parameter
),
vol.Optional(
"issue_tracker"
): vol.Url(), # pylint: disable=no-value-for-parameter
vol.Optional("quality_scale"): vol.In(SUPPORTED_QUALITY_SCALES),
vol.Optional("requirements"): [str],
vol.Optional("dependencies"): [str],
vol.Optional("after_dependencies"): [str],
vol.Required("codeowners"): [str],
vol.Optional("disabled"): str,
}
)
def validate_manifest(integration: Integration):
"""Validate manifest."""
try:
MANIFEST_SCHEMA(integration.manifest)
except vol.Invalid as err:
integration.add_error(
"manifest", f"Invalid manifest: {humanize_error(integration.manifest, err)}"
)
integration.manifest = None
return
if integration.manifest["domain"] != integration.path.name:
integration.add_error("manifest", "Domain does not match dir name")
def validate(integrations: Dict[str, Integration], config):
"""Handle all integrations manifests."""
for integration in integrations.values():
if integration.manifest:
validate_manifest(integration)
|
from django.http import HttpRequest, HttpResponseRedirect
from django.test import TestCase
from django.test.utils import override_settings
from weblate.accounts.middleware import RequireLoginMiddleware
from weblate.auth.models import User, get_anonymous
class MiddlewareTest(TestCase):
def view_method(self):
return "VIEW"
def test_disabled(self):
middleware = RequireLoginMiddleware()
request = HttpRequest()
self.assertIsNone(middleware.process_view(request, self.view_method, (), {}))
@override_settings(LOGIN_REQUIRED_URLS=(r"/project/(.*)$",))
def test_protect_project(self):
middleware = RequireLoginMiddleware()
request = HttpRequest()
request.user = User()
request.META["SERVER_NAME"] = "testserver"
request.META["SERVER_PORT"] = "80"
# No protection for not protected path
self.assertIsNone(middleware.process_view(request, self.view_method, (), {}))
request.path = "/project/foo/"
# No protection for protected path and signed in user
self.assertIsNone(middleware.process_view(request, self.view_method, (), {}))
# Protection for protected path and not signed in user
request.user = get_anonymous()
self.assertIsInstance(
middleware.process_view(request, self.view_method, (), {}),
HttpResponseRedirect,
)
# No protection for login and not signed in user
request.path = "/accounts/login/"
self.assertIsNone(middleware.process_view(request, self.view_method, (), {}))
|
import json
from absl import flags
from perfkitbenchmarker import container_service
from perfkitbenchmarker import context
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.providers import azure
from perfkitbenchmarker.providers.azure import azure_network
from perfkitbenchmarker.providers.azure import util
FLAGS = flags.FLAGS
class AciContainer(container_service.BaseContainer):
"""Class representing an ACI container."""
def __init__(self, container_spec, name, resource_group):
super(AciContainer, self).__init__(container_spec)
self.name = name
self.resource_group = resource_group
benchmark_spec = context.GetThreadBenchmarkSpec()
self.registry = benchmark_spec.container_registry
def _Create(self):
"""Creates the container."""
create_cmd = [
azure.AZURE_PATH, 'container', 'create',
'--name', self.name,
'--image', self.image,
'--restart-policy', 'Never',
'--cpu', str(int(self.cpus)),
'--memory', '%0.1f' % (self.memory / 1024.0),
] + self.resource_group.args
if self.registry and self.registry.CLOUD == azure.CLOUD:
create_cmd.extend([
'--registry-login-server', self.registry.login_server,
'--registry-username', self.registry.service_principal.app_id,
'--registry-password', self.registry.service_principal.password,
])
if self.command:
# Note that this is inconsistent with other containers which use lists
# of command/args. This creates some differences mostly when
# the command contains quotes.
create_cmd.extend(['--command-line', ' '.join(self.command)])
vm_util.IssueCommand(create_cmd)
def _Delete(self):
"""Deletes the container."""
delete_cmd = [
azure.AZURE_PATH, 'container', 'delete',
'--name', self.name, '--yes',
] + self.resource_group.args
vm_util.IssueCommand(delete_cmd, raise_on_failure=False)
@property
def ip_address(self):
"""Container instances don't have private ips yet."""
raise NotImplementedError('ACI containers don\'t have private ips.')
@ip_address.setter
def ip_address(self, value):
"""Sets the containers ip_address."""
self.__ip_address = value
def _GetContainerInstance(self):
"""Gets a representation of the container and returns it."""
show_cmd = [
azure.AZURE_PATH, 'container', 'show', '--name', self.name
] + self.resource_group.args
stdout, _, _ = vm_util.IssueCommand(show_cmd)
return json.loads(stdout)
def _IsReady(self):
"""Returns true if the container has stopped pending."""
state = self._GetContainerInstance()['instanceView']['state']
return state != 'Pending'
def WaitForExit(self, timeout=None):
"""Waits until the container has finished running."""
@vm_util.Retry(timeout=timeout)
def _WaitForExit():
container = self._GetContainerInstance()['containers'][0]
state = container['instanceView']['currentState']['state']
if state != 'Terminated':
raise Exception('Container not in terminated state (%s).' % state)
_WaitForExit()
def GetLogs(self):
"""Returns the logs from the container."""
logs_cmd = [
azure.AZURE_PATH, 'container', 'logs', '--name', self.name
] + self.resource_group.args
stdout, _, _ = vm_util.IssueCommand(logs_cmd)
return stdout
class AciCluster(container_service.BaseContainerCluster):
"""Class that can deploy ACI containers."""
CLOUD = azure.CLOUD
CLUSTER_TYPE = 'aci'
def __init__(self, cluster_spec):
super(AciCluster, self).__init__(cluster_spec)
self.location = util.GetLocationFromZone(self.zone)
self.resource_group = azure_network.GetResourceGroup(self.location)
def _Create(self):
"""ACI has no cluster."""
pass
def _Delete(self):
"""ACI has no cluster."""
pass
def _CreateDependencies(self):
"""Creates the resource group."""
self.resource_group.Create()
def _DeleteDependencies(self):
"""Deletes the resource group."""
self.resource_group.Delete()
def DeployContainer(self, base_name, container_spec):
"""Deploys Containers according to the ContainerSpec."""
name = base_name + str(len(self.containers[base_name]))
container = AciContainer(container_spec, name, self.resource_group)
self.containers[base_name].append(container)
container.Create()
def DeployContainerService(self, name, container_spec):
"""Deploys a ContainerSerivice according to the ContainerSpec."""
raise NotImplementedError()
|
from sense_energy import SenseAPITimeoutException, SenseAuthenticationException
from homeassistant import config_entries, setup
from homeassistant.components.sense.const import DOMAIN
from tests.async_mock import patch
async def test_form(hass):
"""Test we get the form."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch("sense_energy.ASyncSenseable.authenticate", return_value=True,), patch(
"homeassistant.components.sense.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.sense.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"timeout": "6", "email": "test-email", "password": "test-password"},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "test-email"
assert result2["data"] == {
"timeout": 6,
"email": "test-email",
"password": "test-password",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_invalid_auth(hass):
"""Test we handle invalid auth."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"sense_energy.ASyncSenseable.authenticate",
side_effect=SenseAuthenticationException,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"timeout": "6", "email": "test-email", "password": "test-password"},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "invalid_auth"}
async def test_form_cannot_connect(hass):
"""Test we handle cannot connect error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"sense_energy.ASyncSenseable.authenticate",
side_effect=SenseAPITimeoutException,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"timeout": "6", "email": "test-email", "password": "test-password"},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "cannot_connect"}
|
__author__ = '[email protected]'
from absl import flags
flags.DEFINE_string('bq_project_id', None, 'Project Id which contains the query'
' dataset and table.')
flags.DEFINE_string('bq_dataset_id', None, 'Dataset Id which contains the query'
' table.')
flags.mark_flags_as_required(['bq_project_id', 'bq_dataset_id'])
FLAGS = flags.FLAGS
def generate_provider_specific_cmd_list(script, driver, output, error):
"""Method to compile the BigQuery specific script execution command.
Arguments:
script: SQL script which contains the query.
driver: Driver that contains the BigQuery specific script executor.
output: Output log file.
error: Error log file.
Returns:
Command list to execute the supplied script.
"""
cmd_list = [driver, FLAGS.bq_project_id, FLAGS.bq_dataset_id,
script, output, error]
return cmd_list
|
import numpy as np
import unittest
import chainer
from chainer import testing
from chainer.testing import attr
from chainercv.links import FasterRCNNVGG16
from chainercv.links.model.faster_rcnn import FasterRCNNTrainChain
from chainercv.utils import generate_random_bbox
@testing.parameterize(
{'train': False},
{'train': True}
)
class TestFasterRCNNVGG16(unittest.TestCase):
B = 2
n_fg_class = 20
n_class = 21
n_anchor = 9
n_train_post_nms = 12
n_test_post_nms = 8
n_conv5_3_channel = 512
def setUp(self):
proposal_creator_params = {
'n_train_post_nms': self.n_train_post_nms,
'n_test_post_nms': self.n_test_post_nms
}
self.link = FasterRCNNVGG16(
self.n_fg_class, pretrained_model=None,
proposal_creator_params=proposal_creator_params)
def check_call(self):
xp = self.link.xp
feat_size = (12, 16)
x = chainer.Variable(
xp.random.uniform(
low=-1., high=1.,
size=(self.B, 3, feat_size[0] * 16, feat_size[1] * 16)
).astype(np.float32))
with chainer.using_config('train', self.train):
roi_cls_locs, roi_scores, rois, roi_indices = self.link(x)
if self.train:
n_roi = self.B * self.n_train_post_nms
else:
n_roi = self.B * self.n_test_post_nms
self.assertIsInstance(roi_cls_locs, chainer.Variable)
self.assertIsInstance(roi_cls_locs.array, xp.ndarray)
self.assertEqual(roi_cls_locs.shape, (n_roi, self.n_class * 4))
self.assertIsInstance(roi_scores, chainer.Variable)
self.assertIsInstance(roi_scores.array, xp.ndarray)
self.assertEqual(roi_scores.shape, (n_roi, self.n_class))
self.assertIsInstance(rois, xp.ndarray)
self.assertEqual(rois.shape, (n_roi, 4))
self.assertIsInstance(roi_indices, xp.ndarray)
self.assertEqual(roi_indices.shape, (n_roi,))
@attr.slow
def test_call_cpu(self):
self.check_call()
@attr.gpu
@attr.slow
def test_call_gpu(self):
self.link.to_gpu()
self.check_call()
class TestFasterRCNNVGG16Loss(unittest.TestCase):
n_fg_class = 20
def setUp(self):
faster_rcnn = FasterRCNNVGG16(
n_fg_class=self.n_fg_class, pretrained_model=False)
self.link = FasterRCNNTrainChain(faster_rcnn)
self.n_bbox = 3
self.bboxes = chainer.Variable(
generate_random_bbox(self.n_bbox, (600, 800), 16, 350)[np.newaxis])
_labels = np.random.randint(
0, self.n_fg_class, size=(1, self.n_bbox)).astype(np.int32)
self.labels = chainer.Variable(_labels)
_imgs = np.random.uniform(
low=-122.5, high=122.5, size=(1, 3, 600, 800)).astype(np.float32)
self.imgs = chainer.Variable(_imgs)
self.scale = chainer.Variable(np.array([1.]))
def check_call(self):
loss = self.link(self.imgs, self.bboxes, self.labels, self.scale)
self.assertEqual(loss.shape, ())
@attr.slow
def test_call_cpu(self):
self.check_call()
@attr.gpu
@attr.slow
def test_call_gpu(self):
self.link.to_gpu()
self.bboxes.to_gpu()
self.labels.to_gpu()
self.imgs.to_gpu()
self.scale.to_gpu()
self.check_call()
@testing.parameterize(*testing.product({
'n_fg_class': [None, 10, 20],
'pretrained_model': ['voc0712', 'imagenet'],
}))
class TestFasterRCNNVGG16Pretrained(unittest.TestCase):
@attr.slow
def test_pretrained(self):
kwargs = {
'n_fg_class': self.n_fg_class,
'pretrained_model': self.pretrained_model,
}
if self.pretrained_model == 'voc0712':
valid = self.n_fg_class in {None, 20}
elif self.pretrained_model == 'imagenet':
valid = self.n_fg_class is not None
if valid:
FasterRCNNVGG16(**kwargs)
else:
with self.assertRaises(ValueError):
FasterRCNNVGG16(**kwargs)
testing.run_module(__name__, __file__)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from nets import vgg
slim = tf.contrib.slim
class VGGATest(tf.test.TestCase):
def testBuild(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = vgg.vgg_a(inputs, num_classes)
self.assertEquals(logits.op.name, 'vgg_a/fc8/squeezed')
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
def testFullyConvolutional(self):
batch_size = 1
height, width = 256, 256
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = vgg.vgg_a(inputs, num_classes, spatial_squeeze=False)
self.assertEquals(logits.op.name, 'vgg_a/fc8/BiasAdd')
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, 2, 2, num_classes])
def testEndPoints(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
_, end_points = vgg.vgg_a(inputs, num_classes)
expected_names = ['vgg_a/conv1/conv1_1',
'vgg_a/pool1',
'vgg_a/conv2/conv2_1',
'vgg_a/pool2',
'vgg_a/conv3/conv3_1',
'vgg_a/conv3/conv3_2',
'vgg_a/pool3',
'vgg_a/conv4/conv4_1',
'vgg_a/conv4/conv4_2',
'vgg_a/pool4',
'vgg_a/conv5/conv5_1',
'vgg_a/conv5/conv5_2',
'vgg_a/pool5',
'vgg_a/fc6',
'vgg_a/fc7',
'vgg_a/fc8'
]
self.assertSetEqual(set(end_points.keys()), set(expected_names))
def testModelVariables(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
vgg.vgg_a(inputs, num_classes)
expected_names = ['vgg_a/conv1/conv1_1/weights',
'vgg_a/conv1/conv1_1/biases',
'vgg_a/conv2/conv2_1/weights',
'vgg_a/conv2/conv2_1/biases',
'vgg_a/conv3/conv3_1/weights',
'vgg_a/conv3/conv3_1/biases',
'vgg_a/conv3/conv3_2/weights',
'vgg_a/conv3/conv3_2/biases',
'vgg_a/conv4/conv4_1/weights',
'vgg_a/conv4/conv4_1/biases',
'vgg_a/conv4/conv4_2/weights',
'vgg_a/conv4/conv4_2/biases',
'vgg_a/conv5/conv5_1/weights',
'vgg_a/conv5/conv5_1/biases',
'vgg_a/conv5/conv5_2/weights',
'vgg_a/conv5/conv5_2/biases',
'vgg_a/fc6/weights',
'vgg_a/fc6/biases',
'vgg_a/fc7/weights',
'vgg_a/fc7/biases',
'vgg_a/fc8/weights',
'vgg_a/fc8/biases',
]
model_variables = [v.op.name for v in slim.get_model_variables()]
self.assertSetEqual(set(model_variables), set(expected_names))
def testEvaluation(self):
batch_size = 2
height, width = 224, 224
num_classes = 1000
with self.test_session():
eval_inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = vgg.vgg_a(eval_inputs, is_training=False)
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
predictions = tf.argmax(logits, 1)
self.assertListEqual(predictions.get_shape().as_list(), [batch_size])
def testTrainEvalWithReuse(self):
train_batch_size = 2
eval_batch_size = 1
train_height, train_width = 224, 224
eval_height, eval_width = 256, 256
num_classes = 1000
with self.test_session():
train_inputs = tf.random_uniform(
(train_batch_size, train_height, train_width, 3))
logits, _ = vgg.vgg_a(train_inputs)
self.assertListEqual(logits.get_shape().as_list(),
[train_batch_size, num_classes])
tf.get_variable_scope().reuse_variables()
eval_inputs = tf.random_uniform(
(eval_batch_size, eval_height, eval_width, 3))
logits, _ = vgg.vgg_a(eval_inputs, is_training=False,
spatial_squeeze=False)
self.assertListEqual(logits.get_shape().as_list(),
[eval_batch_size, 2, 2, num_classes])
logits = tf.reduce_mean(logits, [1, 2])
predictions = tf.argmax(logits, 1)
self.assertEquals(predictions.get_shape().as_list(), [eval_batch_size])
def testForward(self):
batch_size = 1
height, width = 224, 224
with self.test_session() as sess:
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = vgg.vgg_a(inputs)
sess.run(tf.global_variables_initializer())
output = sess.run(logits)
self.assertTrue(output.any())
class VGG16Test(tf.test.TestCase):
def testBuild(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = vgg.vgg_16(inputs, num_classes)
self.assertEquals(logits.op.name, 'vgg_16/fc8/squeezed')
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
def testFullyConvolutional(self):
batch_size = 1
height, width = 256, 256
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = vgg.vgg_16(inputs, num_classes, spatial_squeeze=False)
self.assertEquals(logits.op.name, 'vgg_16/fc8/BiasAdd')
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, 2, 2, num_classes])
def testEndPoints(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
_, end_points = vgg.vgg_16(inputs, num_classes)
expected_names = ['vgg_16/conv1/conv1_1',
'vgg_16/conv1/conv1_2',
'vgg_16/pool1',
'vgg_16/conv2/conv2_1',
'vgg_16/conv2/conv2_2',
'vgg_16/pool2',
'vgg_16/conv3/conv3_1',
'vgg_16/conv3/conv3_2',
'vgg_16/conv3/conv3_3',
'vgg_16/pool3',
'vgg_16/conv4/conv4_1',
'vgg_16/conv4/conv4_2',
'vgg_16/conv4/conv4_3',
'vgg_16/pool4',
'vgg_16/conv5/conv5_1',
'vgg_16/conv5/conv5_2',
'vgg_16/conv5/conv5_3',
'vgg_16/pool5',
'vgg_16/fc6',
'vgg_16/fc7',
'vgg_16/fc8'
]
self.assertSetEqual(set(end_points.keys()), set(expected_names))
def testModelVariables(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
vgg.vgg_16(inputs, num_classes)
expected_names = ['vgg_16/conv1/conv1_1/weights',
'vgg_16/conv1/conv1_1/biases',
'vgg_16/conv1/conv1_2/weights',
'vgg_16/conv1/conv1_2/biases',
'vgg_16/conv2/conv2_1/weights',
'vgg_16/conv2/conv2_1/biases',
'vgg_16/conv2/conv2_2/weights',
'vgg_16/conv2/conv2_2/biases',
'vgg_16/conv3/conv3_1/weights',
'vgg_16/conv3/conv3_1/biases',
'vgg_16/conv3/conv3_2/weights',
'vgg_16/conv3/conv3_2/biases',
'vgg_16/conv3/conv3_3/weights',
'vgg_16/conv3/conv3_3/biases',
'vgg_16/conv4/conv4_1/weights',
'vgg_16/conv4/conv4_1/biases',
'vgg_16/conv4/conv4_2/weights',
'vgg_16/conv4/conv4_2/biases',
'vgg_16/conv4/conv4_3/weights',
'vgg_16/conv4/conv4_3/biases',
'vgg_16/conv5/conv5_1/weights',
'vgg_16/conv5/conv5_1/biases',
'vgg_16/conv5/conv5_2/weights',
'vgg_16/conv5/conv5_2/biases',
'vgg_16/conv5/conv5_3/weights',
'vgg_16/conv5/conv5_3/biases',
'vgg_16/fc6/weights',
'vgg_16/fc6/biases',
'vgg_16/fc7/weights',
'vgg_16/fc7/biases',
'vgg_16/fc8/weights',
'vgg_16/fc8/biases',
]
model_variables = [v.op.name for v in slim.get_model_variables()]
self.assertSetEqual(set(model_variables), set(expected_names))
def testEvaluation(self):
batch_size = 2
height, width = 224, 224
num_classes = 1000
with self.test_session():
eval_inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = vgg.vgg_16(eval_inputs, is_training=False)
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
predictions = tf.argmax(logits, 1)
self.assertListEqual(predictions.get_shape().as_list(), [batch_size])
def testTrainEvalWithReuse(self):
train_batch_size = 2
eval_batch_size = 1
train_height, train_width = 224, 224
eval_height, eval_width = 256, 256
num_classes = 1000
with self.test_session():
train_inputs = tf.random_uniform(
(train_batch_size, train_height, train_width, 3))
logits, _ = vgg.vgg_16(train_inputs)
self.assertListEqual(logits.get_shape().as_list(),
[train_batch_size, num_classes])
tf.get_variable_scope().reuse_variables()
eval_inputs = tf.random_uniform(
(eval_batch_size, eval_height, eval_width, 3))
logits, _ = vgg.vgg_16(eval_inputs, is_training=False,
spatial_squeeze=False)
self.assertListEqual(logits.get_shape().as_list(),
[eval_batch_size, 2, 2, num_classes])
logits = tf.reduce_mean(logits, [1, 2])
predictions = tf.argmax(logits, 1)
self.assertEquals(predictions.get_shape().as_list(), [eval_batch_size])
def testForward(self):
batch_size = 1
height, width = 224, 224
with self.test_session() as sess:
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = vgg.vgg_16(inputs)
sess.run(tf.global_variables_initializer())
output = sess.run(logits)
self.assertTrue(output.any())
class VGG19Test(tf.test.TestCase):
def testBuild(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = vgg.vgg_19(inputs, num_classes)
self.assertEquals(logits.op.name, 'vgg_19/fc8/squeezed')
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
def testFullyConvolutional(self):
batch_size = 1
height, width = 256, 256
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = vgg.vgg_19(inputs, num_classes, spatial_squeeze=False)
self.assertEquals(logits.op.name, 'vgg_19/fc8/BiasAdd')
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, 2, 2, num_classes])
def testEndPoints(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
_, end_points = vgg.vgg_19(inputs, num_classes)
expected_names = [
'vgg_19/conv1/conv1_1',
'vgg_19/conv1/conv1_2',
'vgg_19/pool1',
'vgg_19/conv2/conv2_1',
'vgg_19/conv2/conv2_2',
'vgg_19/pool2',
'vgg_19/conv3/conv3_1',
'vgg_19/conv3/conv3_2',
'vgg_19/conv3/conv3_3',
'vgg_19/conv3/conv3_4',
'vgg_19/pool3',
'vgg_19/conv4/conv4_1',
'vgg_19/conv4/conv4_2',
'vgg_19/conv4/conv4_3',
'vgg_19/conv4/conv4_4',
'vgg_19/pool4',
'vgg_19/conv5/conv5_1',
'vgg_19/conv5/conv5_2',
'vgg_19/conv5/conv5_3',
'vgg_19/conv5/conv5_4',
'vgg_19/pool5',
'vgg_19/fc6',
'vgg_19/fc7',
'vgg_19/fc8'
]
self.assertSetEqual(set(end_points.keys()), set(expected_names))
def testModelVariables(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
vgg.vgg_19(inputs, num_classes)
expected_names = [
'vgg_19/conv1/conv1_1/weights',
'vgg_19/conv1/conv1_1/biases',
'vgg_19/conv1/conv1_2/weights',
'vgg_19/conv1/conv1_2/biases',
'vgg_19/conv2/conv2_1/weights',
'vgg_19/conv2/conv2_1/biases',
'vgg_19/conv2/conv2_2/weights',
'vgg_19/conv2/conv2_2/biases',
'vgg_19/conv3/conv3_1/weights',
'vgg_19/conv3/conv3_1/biases',
'vgg_19/conv3/conv3_2/weights',
'vgg_19/conv3/conv3_2/biases',
'vgg_19/conv3/conv3_3/weights',
'vgg_19/conv3/conv3_3/biases',
'vgg_19/conv3/conv3_4/weights',
'vgg_19/conv3/conv3_4/biases',
'vgg_19/conv4/conv4_1/weights',
'vgg_19/conv4/conv4_1/biases',
'vgg_19/conv4/conv4_2/weights',
'vgg_19/conv4/conv4_2/biases',
'vgg_19/conv4/conv4_3/weights',
'vgg_19/conv4/conv4_3/biases',
'vgg_19/conv4/conv4_4/weights',
'vgg_19/conv4/conv4_4/biases',
'vgg_19/conv5/conv5_1/weights',
'vgg_19/conv5/conv5_1/biases',
'vgg_19/conv5/conv5_2/weights',
'vgg_19/conv5/conv5_2/biases',
'vgg_19/conv5/conv5_3/weights',
'vgg_19/conv5/conv5_3/biases',
'vgg_19/conv5/conv5_4/weights',
'vgg_19/conv5/conv5_4/biases',
'vgg_19/fc6/weights',
'vgg_19/fc6/biases',
'vgg_19/fc7/weights',
'vgg_19/fc7/biases',
'vgg_19/fc8/weights',
'vgg_19/fc8/biases',
]
model_variables = [v.op.name for v in slim.get_model_variables()]
self.assertSetEqual(set(model_variables), set(expected_names))
def testEvaluation(self):
batch_size = 2
height, width = 224, 224
num_classes = 1000
with self.test_session():
eval_inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = vgg.vgg_19(eval_inputs, is_training=False)
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
predictions = tf.argmax(logits, 1)
self.assertListEqual(predictions.get_shape().as_list(), [batch_size])
def testTrainEvalWithReuse(self):
train_batch_size = 2
eval_batch_size = 1
train_height, train_width = 224, 224
eval_height, eval_width = 256, 256
num_classes = 1000
with self.test_session():
train_inputs = tf.random_uniform(
(train_batch_size, train_height, train_width, 3))
logits, _ = vgg.vgg_19(train_inputs)
self.assertListEqual(logits.get_shape().as_list(),
[train_batch_size, num_classes])
tf.get_variable_scope().reuse_variables()
eval_inputs = tf.random_uniform(
(eval_batch_size, eval_height, eval_width, 3))
logits, _ = vgg.vgg_19(eval_inputs, is_training=False,
spatial_squeeze=False)
self.assertListEqual(logits.get_shape().as_list(),
[eval_batch_size, 2, 2, num_classes])
logits = tf.reduce_mean(logits, [1, 2])
predictions = tf.argmax(logits, 1)
self.assertEquals(predictions.get_shape().as_list(), [eval_batch_size])
def testForward(self):
batch_size = 1
height, width = 224, 224
with self.test_session() as sess:
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = vgg.vgg_19(inputs)
sess.run(tf.global_variables_initializer())
output = sess.run(logits)
self.assertTrue(output.any())
if __name__ == '__main__':
tf.test.main()
|