text
stringlengths 213
32.3k
|
---|
from plumlightpad import Plum
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
async def load_plum(username: str, password: str, hass: HomeAssistant) -> Plum:
"""Initialize Plum Lightpad API and load metadata stored in the cloud."""
plum = Plum(username, password)
cloud_web_session = async_get_clientsession(hass, verify_ssl=True)
await plum.loadCloudData(cloud_web_session)
return plum
|
import logging
from typing import List, Optional, cast
import uuid
import voluptuous as vol
from homeassistant.const import CONF_TYPE
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import collection, storage
from .const import (
CONF_RESOURCE_TYPE_WS,
CONF_RESOURCES,
DOMAIN,
RESOURCE_CREATE_FIELDS,
RESOURCE_SCHEMA,
RESOURCE_UPDATE_FIELDS,
)
from .dashboard import LovelaceConfig
RESOURCE_STORAGE_KEY = f"{DOMAIN}_resources"
RESOURCES_STORAGE_VERSION = 1
_LOGGER = logging.getLogger(__name__)
class ResourceYAMLCollection:
"""Collection representing static YAML."""
loaded = True
def __init__(self, data):
"""Initialize a resource YAML collection."""
self.data = data
async def async_get_info(self):
"""Return the resources info for YAML mode."""
return {"resources": len(self.async_items() or [])}
@callback
def async_items(self) -> List[dict]:
"""Return list of items in collection."""
return self.data
class ResourceStorageCollection(collection.StorageCollection):
"""Collection to store resources."""
loaded = False
CREATE_SCHEMA = vol.Schema(RESOURCE_CREATE_FIELDS)
UPDATE_SCHEMA = vol.Schema(RESOURCE_UPDATE_FIELDS)
def __init__(self, hass: HomeAssistant, ll_config: LovelaceConfig):
"""Initialize the storage collection."""
super().__init__(
storage.Store(hass, RESOURCES_STORAGE_VERSION, RESOURCE_STORAGE_KEY),
_LOGGER,
)
self.ll_config = ll_config
async def async_get_info(self):
"""Return the resources info for YAML mode."""
if not self.loaded:
await self.async_load()
self.loaded = True
return {"resources": len(self.async_items() or [])}
async def _async_load_data(self) -> Optional[dict]:
"""Load the data."""
data = await self.store.async_load()
if data is not None:
return cast(Optional[dict], data)
# Import it from config.
try:
conf = await self.ll_config.async_load(False)
except HomeAssistantError:
return None
if CONF_RESOURCES not in conf:
return None
# Remove it from config and save both resources + config
data = conf[CONF_RESOURCES]
try:
vol.Schema([RESOURCE_SCHEMA])(data)
except vol.Invalid as err:
_LOGGER.warning("Resource import failed. Data invalid: %s", err)
return None
conf.pop(CONF_RESOURCES)
for item in data:
item[collection.CONF_ID] = uuid.uuid4().hex
data = {"items": data}
await self.store.async_save(data)
await self.ll_config.async_save(conf)
return data
async def _process_create_data(self, data: dict) -> dict:
"""Validate the config is valid."""
data = self.CREATE_SCHEMA(data)
data[CONF_TYPE] = data.pop(CONF_RESOURCE_TYPE_WS)
return data
@callback
def _get_suggested_id(self, info: dict) -> str:
"""Return unique ID."""
return uuid.uuid4().hex
async def _update_data(self, data: dict, update_data: dict) -> dict:
"""Return a new updated data object."""
if not self.loaded:
await self.async_load()
self.loaded = True
update_data = self.UPDATE_SCHEMA(update_data)
if CONF_RESOURCE_TYPE_WS in update_data:
update_data[CONF_TYPE] = update_data.pop(CONF_RESOURCE_TYPE_WS)
return {**data, **update_data}
|
import json
import arrow
import boto3
from flask import current_app
def publish(topic_arn, certificates, notification_type, **kwargs):
sns_client = boto3.client("sns", **kwargs)
message_ids = {}
subject = "Lemur: {0} Notification".format(notification_type.capitalize())
for certificate in certificates:
message_ids[certificate["name"]] = publish_single(sns_client, topic_arn, certificate, notification_type, subject)
return message_ids
def publish_single(sns_client, topic_arn, certificate, notification_type, subject):
response = sns_client.publish(
TopicArn=topic_arn,
Message=format_message(certificate, notification_type),
Subject=subject,
)
response_code = response["ResponseMetadata"]["HTTPStatusCode"]
if response_code != 200:
raise Exception(f"Failed to publish {notification_type} notification to SNS topic {topic_arn}. "
f"SNS response: {response_code} {response}")
current_app.logger.info(f"AWS SNS message published to topic [{topic_arn}] with message ID {response['MessageId']}")
current_app.logger.debug(f"AWS SNS message published to topic [{topic_arn}]: [{response}]")
return response["MessageId"]
def create_certificate_url(name):
return "https://{hostname}/#/certificates/{name}".format(
hostname=current_app.config.get("LEMUR_HOSTNAME"), name=name
)
def format_message(certificate, notification_type):
json_message = {
"notification_type": notification_type,
"certificate_name": certificate["name"],
"expires": arrow.get(certificate["validityEnd"]).format("YYYY-MM-DDTHH:mm:ss"), # 2047-12-31T22:00:00
"endpoints_detected": len(certificate["endpoints"]),
"owner": certificate["owner"],
"details": create_certificate_url(certificate["name"])
}
return json.dumps(json_message)
|
import cherrypy
from cherrypy.lib.reprconf import attributes
from cherrypy._cpcompat import text_or_bytes
from cherrypy.process.servers import ServerAdapter
__all__ = ('Server', )
class Server(ServerAdapter):
"""An adapter for an HTTP server.
You can set attributes (like socket_host and socket_port)
on *this* object (which is probably cherrypy.server), and call
quickstart. For example::
cherrypy.server.socket_port = 80
cherrypy.quickstart()
"""
socket_port = 8080
"""The TCP port on which to listen for connections."""
_socket_host = '127.0.0.1'
@property
def socket_host(self): # noqa: D401; irrelevant for properties
"""The hostname or IP address on which to listen for connections.
Host values may be any IPv4 or IPv6 address, or any valid hostname.
The string 'localhost' is a synonym for '127.0.0.1' (or '::1', if
your hosts file prefers IPv6). The string '0.0.0.0' is a special
IPv4 entry meaning "any active interface" (INADDR_ANY), and '::'
is the similar IN6ADDR_ANY for IPv6. The empty string or None are
not allowed.
"""
return self._socket_host
@socket_host.setter
def socket_host(self, value):
if value == '':
raise ValueError("The empty string ('') is not an allowed value. "
"Use '0.0.0.0' instead to listen on all active "
'interfaces (INADDR_ANY).')
self._socket_host = value
socket_file = None
"""If given, the name of the UNIX socket to use instead of TCP/IP.
When this option is not None, the `socket_host` and `socket_port` options
are ignored."""
socket_queue_size = 5
"""The 'backlog' argument to socket.listen(); specifies the maximum number
of queued connections (default 5)."""
socket_timeout = 10
"""The timeout in seconds for accepted connections (default 10)."""
accepted_queue_size = -1
"""The maximum number of requests which will be queued up before
the server refuses to accept it (default -1, meaning no limit)."""
accepted_queue_timeout = 10
"""The timeout in seconds for attempting to add a request to the
queue when the queue is full (default 10)."""
shutdown_timeout = 5
"""The time to wait for HTTP worker threads to clean up."""
protocol_version = 'HTTP/1.1'
"""The version string to write in the Status-Line of all HTTP responses,
for example, "HTTP/1.1" (the default). Depending on the HTTP server used,
this should also limit the supported features used in the response."""
thread_pool = 10
"""The number of worker threads to start up in the pool."""
thread_pool_max = -1
"""The maximum size of the worker-thread pool. Use -1 to indicate no limit.
"""
max_request_header_size = 500 * 1024
"""The maximum number of bytes allowable in the request headers.
If exceeded, the HTTP server should return "413 Request Entity Too Large".
"""
max_request_body_size = 100 * 1024 * 1024
"""The maximum number of bytes allowable in the request body. If exceeded,
the HTTP server should return "413 Request Entity Too Large"."""
instance = None
"""If not None, this should be an HTTP server instance (such as
cheroot.wsgi.Server) which cherrypy.server will control.
Use this when you need
more control over object instantiation than is available in the various
configuration options."""
ssl_context = None
"""When using PyOpenSSL, an instance of SSL.Context."""
ssl_certificate = None
"""The filename of the SSL certificate to use."""
ssl_certificate_chain = None
"""When using PyOpenSSL, the certificate chain to pass to
Context.load_verify_locations."""
ssl_private_key = None
"""The filename of the private key to use with SSL."""
ssl_ciphers = None
"""The ciphers list of SSL."""
ssl_module = 'builtin'
"""The name of a registered SSL adaptation module to use with
the builtin WSGI server. Builtin options are: 'builtin' (to
use the SSL library built into recent versions of Python).
You may also register your own classes in the
cheroot.server.ssl_adapters dict."""
statistics = False
"""Turns statistics-gathering on or off for aware HTTP servers."""
nodelay = True
"""If True (the default since 3.1), sets the TCP_NODELAY socket option."""
wsgi_version = (1, 0)
"""The WSGI version tuple to use with the builtin WSGI server.
The provided options are (1, 0) [which includes support for PEP 3333,
which declares it covers WSGI version 1.0.1 but still mandates the
wsgi.version (1, 0)] and ('u', 0), an experimental unicode version.
You may create and register your own experimental versions of the WSGI
protocol by adding custom classes to the cheroot.server.wsgi_gateways dict.
"""
peercreds = False
"""If True, peer cred lookup for UNIX domain socket will put to WSGI env.
This information will then be available through WSGI env vars:
* X_REMOTE_PID
* X_REMOTE_UID
* X_REMOTE_GID
"""
peercreds_resolve = False
"""If True, username/group will be looked up in the OS from peercreds.
This information will then be available through WSGI env vars:
* REMOTE_USER
* X_REMOTE_USER
* X_REMOTE_GROUP
"""
def __init__(self):
"""Initialize Server instance."""
self.bus = cherrypy.engine
self.httpserver = None
self.interrupt = None
self.running = False
def httpserver_from_self(self, httpserver=None):
"""Return a (httpserver, bind_addr) pair based on self attributes."""
if httpserver is None:
httpserver = self.instance
if httpserver is None:
from cherrypy import _cpwsgi_server
httpserver = _cpwsgi_server.CPWSGIServer(self)
if isinstance(httpserver, text_or_bytes):
# Is anyone using this? Can I add an arg?
httpserver = attributes(httpserver)(self)
return httpserver, self.bind_addr
def start(self):
"""Start the HTTP server."""
if not self.httpserver:
self.httpserver, self.bind_addr = self.httpserver_from_self()
super(Server, self).start()
start.priority = 75
@property
def bind_addr(self):
"""Return bind address.
A (host, port) tuple for TCP sockets or a str for Unix domain sockts.
"""
if self.socket_file:
return self.socket_file
if self.socket_host is None and self.socket_port is None:
return None
return (self.socket_host, self.socket_port)
@bind_addr.setter
def bind_addr(self, value):
if value is None:
self.socket_file = None
self.socket_host = None
self.socket_port = None
elif isinstance(value, text_or_bytes):
self.socket_file = value
self.socket_host = None
self.socket_port = None
else:
try:
self.socket_host, self.socket_port = value
self.socket_file = None
except ValueError:
raise ValueError('bind_addr must be a (host, port) tuple '
'(for TCP sockets) or a string (for Unix '
'domain sockets), not %r' % value)
def base(self):
"""Return the base for this server.
e.i. scheme://host[:port] or sock file
"""
if self.socket_file:
return self.socket_file
host = self.socket_host
if host in ('0.0.0.0', '::'):
# 0.0.0.0 is INADDR_ANY and :: is IN6ADDR_ANY.
# Look up the host name, which should be the
# safest thing to spit out in a URL.
import socket
host = socket.gethostname()
port = self.socket_port
if self.ssl_certificate:
scheme = 'https'
if port != 443:
host += ':%s' % port
else:
scheme = 'http'
if port != 80:
host += ':%s' % port
return '%s://%s' % (scheme, host)
|
import asyncio
import itertools
import logging
from aiohttp import ClientError, ClientResponseError
from august.authenticator import ValidationResult
from august.exceptions import AugustApiAIOHTTPError
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import (
CONF_PASSWORD,
CONF_TIMEOUT,
CONF_USERNAME,
HTTP_UNAUTHORIZED,
)
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError
import homeassistant.helpers.config_validation as cv
from .activity import ActivityStream
from .const import (
AUGUST_COMPONENTS,
CONF_ACCESS_TOKEN_CACHE_FILE,
CONF_INSTALL_ID,
CONF_LOGIN_METHOD,
DATA_AUGUST,
DEFAULT_AUGUST_CONFIG_FILE,
DEFAULT_NAME,
DEFAULT_TIMEOUT,
DOMAIN,
LOGIN_METHODS,
MIN_TIME_BETWEEN_DETAIL_UPDATES,
VERIFICATION_CODE_KEY,
)
from .exceptions import CannotConnect, InvalidAuth, RequireValidation
from .gateway import AugustGateway
from .subscriber import AugustSubscriberMixin
_LOGGER = logging.getLogger(__name__)
TWO_FA_REVALIDATE = "verify_configurator"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_LOGIN_METHOD): vol.In(LOGIN_METHODS),
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_INSTALL_ID): cv.string,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_request_validation(hass, config_entry, august_gateway):
"""Request a new verification code from the user."""
#
# In the future this should start a new config flow
# instead of using the legacy configurator
#
_LOGGER.error("Access token is no longer valid")
configurator = hass.components.configurator
entry_id = config_entry.entry_id
async def async_august_configuration_validation_callback(data):
code = data.get(VERIFICATION_CODE_KEY)
result = await august_gateway.authenticator.async_validate_verification_code(
code
)
if result == ValidationResult.INVALID_VERIFICATION_CODE:
configurator.async_notify_errors(
hass.data[DOMAIN][entry_id][TWO_FA_REVALIDATE],
"Invalid verification code, please make sure you are using the latest code and try again.",
)
elif result == ValidationResult.VALIDATED:
return await async_setup_august(hass, config_entry, august_gateway)
return False
if TWO_FA_REVALIDATE not in hass.data[DOMAIN][entry_id]:
await august_gateway.authenticator.async_send_verification_code()
entry_data = config_entry.data
login_method = entry_data.get(CONF_LOGIN_METHOD)
username = entry_data.get(CONF_USERNAME)
hass.data[DOMAIN][entry_id][TWO_FA_REVALIDATE] = configurator.async_request_config(
f"{DEFAULT_NAME} ({username})",
async_august_configuration_validation_callback,
description=(
"August must be re-verified. "
f"Please check your {login_method} ({username}) "
"and enter the verification code below"
),
submit_caption="Verify",
fields=[
{"id": VERIFICATION_CODE_KEY, "name": "Verification code", "type": "string"}
],
)
return
async def async_setup_august(hass, config_entry, august_gateway):
"""Set up the August component."""
entry_id = config_entry.entry_id
hass.data[DOMAIN].setdefault(entry_id, {})
try:
await august_gateway.async_authenticate()
except RequireValidation:
await async_request_validation(hass, config_entry, august_gateway)
raise
# We still use the configurator to get a new 2fa code
# when needed since config_flow doesn't have a way
# to re-request if it expires
if TWO_FA_REVALIDATE in hass.data[DOMAIN][entry_id]:
hass.components.configurator.async_request_done(
hass.data[DOMAIN][entry_id].pop(TWO_FA_REVALIDATE)
)
hass.data[DOMAIN][entry_id][DATA_AUGUST] = AugustData(hass, august_gateway)
await hass.data[DOMAIN][entry_id][DATA_AUGUST].async_setup()
for component in AUGUST_COMPONENTS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, component)
)
return True
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the August component from YAML."""
conf = config.get(DOMAIN)
hass.data.setdefault(DOMAIN, {})
if not conf:
return True
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={
CONF_LOGIN_METHOD: conf.get(CONF_LOGIN_METHOD),
CONF_USERNAME: conf.get(CONF_USERNAME),
CONF_PASSWORD: conf.get(CONF_PASSWORD),
CONF_INSTALL_ID: conf.get(CONF_INSTALL_ID),
CONF_ACCESS_TOKEN_CACHE_FILE: DEFAULT_AUGUST_CONFIG_FILE,
},
)
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up August from a config entry."""
august_gateway = AugustGateway(hass)
try:
await august_gateway.async_setup(entry.data)
return await async_setup_august(hass, entry, august_gateway)
except ClientResponseError as err:
if err.status == HTTP_UNAUTHORIZED:
_async_start_reauth(hass, entry)
return False
raise ConfigEntryNotReady from err
except InvalidAuth:
_async_start_reauth(hass, entry)
return False
except RequireValidation:
return False
except (CannotConnect, asyncio.TimeoutError) as err:
raise ConfigEntryNotReady from err
def _async_start_reauth(hass: HomeAssistant, entry: ConfigEntry):
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": "reauth"},
data=entry.data,
)
)
_LOGGER.error("Password is no longer valid. Please reauthenticate")
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in AUGUST_COMPONENTS
]
)
)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
class AugustData(AugustSubscriberMixin):
"""August data object."""
def __init__(self, hass, august_gateway):
"""Init August data object."""
super().__init__(hass, MIN_TIME_BETWEEN_DETAIL_UPDATES)
self._hass = hass
self._august_gateway = august_gateway
self.activity_stream = None
self._api = august_gateway.api
self._device_detail_by_id = {}
self._doorbells_by_id = {}
self._locks_by_id = {}
self._house_ids = set()
async def async_setup(self):
"""Async setup of august device data and activities."""
locks = (
await self._api.async_get_operable_locks(self._august_gateway.access_token)
or []
)
doorbells = (
await self._api.async_get_doorbells(self._august_gateway.access_token) or []
)
self._doorbells_by_id = {device.device_id: device for device in doorbells}
self._locks_by_id = {device.device_id: device for device in locks}
self._house_ids = {
device.house_id for device in itertools.chain(locks, doorbells)
}
await self._async_refresh_device_detail_by_ids(
[device.device_id for device in itertools.chain(locks, doorbells)]
)
# We remove all devices that we are missing
# detail as we cannot determine if they are usable.
# This also allows us to avoid checking for
# detail being None all over the place
self._remove_inoperative_locks()
self._remove_inoperative_doorbells()
self.activity_stream = ActivityStream(
self._hass, self._api, self._august_gateway, self._house_ids
)
await self.activity_stream.async_setup()
@property
def doorbells(self):
"""Return a list of py-august Doorbell objects."""
return self._doorbells_by_id.values()
@property
def locks(self):
"""Return a list of py-august Lock objects."""
return self._locks_by_id.values()
def get_device_detail(self, device_id):
"""Return the py-august LockDetail or DoorbellDetail object for a device."""
return self._device_detail_by_id[device_id]
async def _async_refresh(self, time):
await self._async_refresh_device_detail_by_ids(self._subscriptions.keys())
async def _async_refresh_device_detail_by_ids(self, device_ids_list):
for device_id in device_ids_list:
if device_id in self._locks_by_id:
await self._async_update_device_detail(
self._locks_by_id[device_id], self._api.async_get_lock_detail
)
# keypads are always attached to locks
if (
device_id in self._device_detail_by_id
and self._device_detail_by_id[device_id].keypad is not None
):
keypad = self._device_detail_by_id[device_id].keypad
self._device_detail_by_id[keypad.device_id] = keypad
elif device_id in self._doorbells_by_id:
await self._async_update_device_detail(
self._doorbells_by_id[device_id],
self._api.async_get_doorbell_detail,
)
_LOGGER.debug(
"async_signal_device_id_update (from detail updates): %s", device_id
)
self.async_signal_device_id_update(device_id)
async def _async_update_device_detail(self, device, api_call):
_LOGGER.debug(
"Started retrieving detail for %s (%s)",
device.device_name,
device.device_id,
)
try:
self._device_detail_by_id[device.device_id] = await api_call(
self._august_gateway.access_token, device.device_id
)
except ClientError as ex:
_LOGGER.error(
"Request error trying to retrieve %s details for %s. %s",
device.device_id,
device.device_name,
ex,
)
_LOGGER.debug(
"Completed retrieving detail for %s (%s)",
device.device_name,
device.device_id,
)
def _get_device_name(self, device_id):
"""Return doorbell or lock name as August has it stored."""
if self._locks_by_id.get(device_id):
return self._locks_by_id[device_id].device_name
if self._doorbells_by_id.get(device_id):
return self._doorbells_by_id[device_id].device_name
async def async_lock(self, device_id):
"""Lock the device."""
return await self._async_call_api_op_requires_bridge(
device_id,
self._api.async_lock_return_activities,
self._august_gateway.access_token,
device_id,
)
async def async_unlock(self, device_id):
"""Unlock the device."""
return await self._async_call_api_op_requires_bridge(
device_id,
self._api.async_unlock_return_activities,
self._august_gateway.access_token,
device_id,
)
async def _async_call_api_op_requires_bridge(
self, device_id, func, *args, **kwargs
):
"""Call an API that requires the bridge to be online and will change the device state."""
ret = None
try:
ret = await func(*args, **kwargs)
except AugustApiAIOHTTPError as err:
device_name = self._get_device_name(device_id)
if device_name is None:
device_name = f"DeviceID: {device_id}"
raise HomeAssistantError(f"{device_name}: {err}") from err
return ret
def _remove_inoperative_doorbells(self):
doorbells = list(self.doorbells)
for doorbell in doorbells:
device_id = doorbell.device_id
doorbell_is_operative = False
doorbell_detail = self._device_detail_by_id.get(device_id)
if doorbell_detail is None:
_LOGGER.info(
"The doorbell %s could not be setup because the system could not fetch details about the doorbell",
doorbell.device_name,
)
else:
doorbell_is_operative = True
if not doorbell_is_operative:
del self._doorbells_by_id[device_id]
del self._device_detail_by_id[device_id]
def _remove_inoperative_locks(self):
# Remove non-operative locks as there must
# be a bridge (August Connect) for them to
# be usable
locks = list(self.locks)
for lock in locks:
device_id = lock.device_id
lock_is_operative = False
lock_detail = self._device_detail_by_id.get(device_id)
if lock_detail is None:
_LOGGER.info(
"The lock %s could not be setup because the system could not fetch details about the lock",
lock.device_name,
)
elif lock_detail.bridge is None:
_LOGGER.info(
"The lock %s could not be setup because it does not have a bridge (Connect)",
lock.device_name,
)
elif not lock_detail.bridge.operative:
_LOGGER.info(
"The lock %s could not be setup because the bridge (Connect) is not operative",
lock.device_name,
)
else:
lock_is_operative = True
if not lock_is_operative:
del self._locks_by_id[device_id]
del self._device_detail_by_id[device_id]
|
import pytest
from homeassistant.components import fan
from homeassistant.const import (
ATTR_ENTITY_ID,
ENTITY_MATCH_ALL,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_OFF,
STATE_ON,
)
from homeassistant.setup import async_setup_component
FAN_ENTITY_ID = "fan.living_room_fan"
@pytest.fixture(autouse=True)
async def setup_comp(hass):
"""Initialize components."""
assert await async_setup_component(hass, fan.DOMAIN, {"fan": {"platform": "demo"}})
await hass.async_block_till_done()
async def test_turn_on(hass):
"""Test turning on the device."""
state = hass.states.get(FAN_ENTITY_ID)
assert state.state == STATE_OFF
await hass.services.async_call(
fan.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: FAN_ENTITY_ID}, blocking=True
)
state = hass.states.get(FAN_ENTITY_ID)
assert state.state == STATE_ON
await hass.services.async_call(
fan.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: FAN_ENTITY_ID, fan.ATTR_SPEED: fan.SPEED_HIGH},
blocking=True,
)
state = hass.states.get(FAN_ENTITY_ID)
assert state.state == STATE_ON
assert state.attributes[fan.ATTR_SPEED] == fan.SPEED_HIGH
async def test_turn_off(hass):
"""Test turning off the device."""
state = hass.states.get(FAN_ENTITY_ID)
assert state.state == STATE_OFF
await hass.services.async_call(
fan.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: FAN_ENTITY_ID}, blocking=True
)
state = hass.states.get(FAN_ENTITY_ID)
assert state.state == STATE_ON
await hass.services.async_call(
fan.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: FAN_ENTITY_ID}, blocking=True
)
state = hass.states.get(FAN_ENTITY_ID)
assert state.state == STATE_OFF
async def test_turn_off_without_entity_id(hass):
"""Test turning off all fans."""
state = hass.states.get(FAN_ENTITY_ID)
assert state.state == STATE_OFF
await hass.services.async_call(
fan.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: FAN_ENTITY_ID}, blocking=True
)
state = hass.states.get(FAN_ENTITY_ID)
assert state.state == STATE_ON
await hass.services.async_call(
fan.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True
)
state = hass.states.get(FAN_ENTITY_ID)
assert state.state == STATE_OFF
async def test_set_direction(hass):
"""Test setting the direction of the device."""
state = hass.states.get(FAN_ENTITY_ID)
assert state.state == STATE_OFF
await hass.services.async_call(
fan.DOMAIN,
fan.SERVICE_SET_DIRECTION,
{ATTR_ENTITY_ID: FAN_ENTITY_ID, fan.ATTR_DIRECTION: fan.DIRECTION_REVERSE},
blocking=True,
)
state = hass.states.get(FAN_ENTITY_ID)
assert state.attributes[fan.ATTR_DIRECTION] == fan.DIRECTION_REVERSE
async def test_set_speed(hass):
"""Test setting the speed of the device."""
state = hass.states.get(FAN_ENTITY_ID)
assert state.state == STATE_OFF
await hass.services.async_call(
fan.DOMAIN,
fan.SERVICE_SET_SPEED,
{ATTR_ENTITY_ID: FAN_ENTITY_ID, fan.ATTR_SPEED: fan.SPEED_LOW},
blocking=True,
)
state = hass.states.get(FAN_ENTITY_ID)
assert state.attributes[fan.ATTR_SPEED] == fan.SPEED_LOW
async def test_oscillate(hass):
"""Test oscillating the fan."""
state = hass.states.get(FAN_ENTITY_ID)
assert state.state == STATE_OFF
assert not state.attributes.get(fan.ATTR_OSCILLATING)
await hass.services.async_call(
fan.DOMAIN,
fan.SERVICE_OSCILLATE,
{ATTR_ENTITY_ID: FAN_ENTITY_ID, fan.ATTR_OSCILLATING: True},
blocking=True,
)
state = hass.states.get(FAN_ENTITY_ID)
assert state.attributes[fan.ATTR_OSCILLATING] is True
await hass.services.async_call(
fan.DOMAIN,
fan.SERVICE_OSCILLATE,
{ATTR_ENTITY_ID: FAN_ENTITY_ID, fan.ATTR_OSCILLATING: False},
blocking=True,
)
state = hass.states.get(FAN_ENTITY_ID)
assert state.attributes[fan.ATTR_OSCILLATING] is False
async def test_is_on(hass):
"""Test is on service call."""
assert not fan.is_on(hass, FAN_ENTITY_ID)
await hass.services.async_call(
fan.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: FAN_ENTITY_ID}, blocking=True
)
assert fan.is_on(hass, FAN_ENTITY_ID)
|
import logging
import voluptuous as vol
from homeassistant.components import mqtt, switch
from homeassistant.components.switch import SwitchEntity
from homeassistant.const import (
CONF_DEVICE,
CONF_ICON,
CONF_NAME,
CONF_OPTIMISTIC,
CONF_PAYLOAD_OFF,
CONF_PAYLOAD_ON,
CONF_UNIQUE_ID,
CONF_VALUE_TEMPLATE,
STATE_ON,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.reload import async_setup_reload_service
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from . import (
ATTR_DISCOVERY_HASH,
CONF_COMMAND_TOPIC,
CONF_QOS,
CONF_RETAIN,
CONF_STATE_TOPIC,
DOMAIN,
PLATFORMS,
MqttAttributes,
MqttAvailability,
MqttDiscoveryUpdate,
MqttEntityDeviceInfo,
subscription,
)
from .debug_info import log_messages
from .discovery import MQTT_DISCOVERY_NEW, clear_discovery_hash
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "MQTT Switch"
DEFAULT_PAYLOAD_ON = "ON"
DEFAULT_PAYLOAD_OFF = "OFF"
DEFAULT_OPTIMISTIC = False
CONF_STATE_ON = "state_on"
CONF_STATE_OFF = "state_off"
PLATFORM_SCHEMA = (
mqtt.MQTT_RW_PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_DEVICE): mqtt.MQTT_ENTITY_DEVICE_INFO_SCHEMA,
vol.Optional(CONF_ICON): cv.icon,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
vol.Optional(CONF_PAYLOAD_OFF, default=DEFAULT_PAYLOAD_OFF): cv.string,
vol.Optional(CONF_PAYLOAD_ON, default=DEFAULT_PAYLOAD_ON): cv.string,
vol.Optional(CONF_STATE_OFF): cv.string,
vol.Optional(CONF_STATE_ON): cv.string,
vol.Optional(CONF_UNIQUE_ID): cv.string,
}
)
.extend(mqtt.MQTT_AVAILABILITY_SCHEMA.schema)
.extend(mqtt.MQTT_JSON_ATTRS_SCHEMA.schema)
)
async def async_setup_platform(
hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info=None
):
"""Set up MQTT switch through configuration.yaml."""
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
await _async_setup_entity(hass, config, async_add_entities, discovery_info)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up MQTT switch dynamically through MQTT discovery."""
async def async_discover(discovery_payload):
"""Discover and add a MQTT switch."""
discovery_data = discovery_payload.discovery_data
try:
config = PLATFORM_SCHEMA(discovery_payload)
await _async_setup_entity(
hass, config, async_add_entities, config_entry, discovery_data
)
except Exception:
clear_discovery_hash(hass, discovery_data[ATTR_DISCOVERY_HASH])
raise
async_dispatcher_connect(
hass, MQTT_DISCOVERY_NEW.format(switch.DOMAIN, "mqtt"), async_discover
)
async def _async_setup_entity(
hass, config, async_add_entities, config_entry=None, discovery_data=None
):
"""Set up the MQTT switch."""
async_add_entities([MqttSwitch(hass, config, config_entry, discovery_data)])
class MqttSwitch(
MqttAttributes,
MqttAvailability,
MqttDiscoveryUpdate,
MqttEntityDeviceInfo,
SwitchEntity,
RestoreEntity,
):
"""Representation of a switch that can be toggled using MQTT."""
def __init__(self, hass, config, config_entry, discovery_data):
"""Initialize the MQTT switch."""
self.hass = hass
self._state = False
self._sub_state = None
self._state_on = None
self._state_off = None
self._optimistic = None
self._unique_id = config.get(CONF_UNIQUE_ID)
# Load config
self._setup_from_config(config)
device_config = config.get(CONF_DEVICE)
MqttAttributes.__init__(self, config)
MqttAvailability.__init__(self, config)
MqttDiscoveryUpdate.__init__(self, discovery_data, self.discovery_update)
MqttEntityDeviceInfo.__init__(self, device_config, config_entry)
async def async_added_to_hass(self):
"""Subscribe to MQTT events."""
await super().async_added_to_hass()
await self._subscribe_topics()
async def discovery_update(self, discovery_payload):
"""Handle updated discovery message."""
config = PLATFORM_SCHEMA(discovery_payload)
self._setup_from_config(config)
await self.attributes_discovery_update(config)
await self.availability_discovery_update(config)
await self.device_info_discovery_update(config)
await self._subscribe_topics()
self.async_write_ha_state()
def _setup_from_config(self, config):
"""(Re)Setup the entity."""
self._config = config
state_on = config.get(CONF_STATE_ON)
self._state_on = state_on if state_on else config[CONF_PAYLOAD_ON]
state_off = config.get(CONF_STATE_OFF)
self._state_off = state_off if state_off else config[CONF_PAYLOAD_OFF]
self._optimistic = config[CONF_OPTIMISTIC]
template = self._config.get(CONF_VALUE_TEMPLATE)
if template is not None:
template.hass = self.hass
async def _subscribe_topics(self):
"""(Re)Subscribe to topics."""
@callback
@log_messages(self.hass, self.entity_id)
def state_message_received(msg):
"""Handle new MQTT state messages."""
payload = msg.payload
template = self._config.get(CONF_VALUE_TEMPLATE)
if template is not None:
payload = template.async_render_with_possible_json_value(payload)
if payload == self._state_on:
self._state = True
elif payload == self._state_off:
self._state = False
self.async_write_ha_state()
if self._config.get(CONF_STATE_TOPIC) is None:
# Force into optimistic mode.
self._optimistic = True
else:
self._sub_state = await subscription.async_subscribe_topics(
self.hass,
self._sub_state,
{
CONF_STATE_TOPIC: {
"topic": self._config.get(CONF_STATE_TOPIC),
"msg_callback": state_message_received,
"qos": self._config[CONF_QOS],
}
},
)
if self._optimistic:
last_state = await self.async_get_last_state()
if last_state:
self._state = last_state.state == STATE_ON
async def async_will_remove_from_hass(self):
"""Unsubscribe when removed."""
self._sub_state = await subscription.async_unsubscribe_topics(
self.hass, self._sub_state
)
await MqttAttributes.async_will_remove_from_hass(self)
await MqttAvailability.async_will_remove_from_hass(self)
await MqttDiscoveryUpdate.async_will_remove_from_hass(self)
@property
def should_poll(self):
"""Return the polling state."""
return False
@property
def name(self):
"""Return the name of the switch."""
return self._config[CONF_NAME]
@property
def is_on(self):
"""Return true if device is on."""
return self._state
@property
def assumed_state(self):
"""Return true if we do optimistic updates."""
return self._optimistic
@property
def unique_id(self):
"""Return a unique ID."""
return self._unique_id
@property
def icon(self):
"""Return the icon."""
return self._config.get(CONF_ICON)
async def async_turn_on(self, **kwargs):
"""Turn the device on.
This method is a coroutine.
"""
mqtt.async_publish(
self.hass,
self._config[CONF_COMMAND_TOPIC],
self._config[CONF_PAYLOAD_ON],
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
if self._optimistic:
# Optimistically assume that switch has changed state.
self._state = True
self.async_write_ha_state()
async def async_turn_off(self, **kwargs):
"""Turn the device off.
This method is a coroutine.
"""
mqtt.async_publish(
self.hass,
self._config[CONF_COMMAND_TOPIC],
self._config[CONF_PAYLOAD_OFF],
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
if self._optimistic:
# Optimistically assume that switch has changed state.
self._state = False
self.async_write_ha_state()
|
from .exceptions import GrammarError
from .lexer import Token
from .tree import Tree
from .visitors import InlineTransformer # XXX Deprecated
from .visitors import Transformer_InPlace
from .visitors import _vargs_meta, _vargs_meta_inline
###{standalone
from functools import partial, wraps
from itertools import repeat, product
class ExpandSingleChild:
def __init__(self, node_builder):
self.node_builder = node_builder
def __call__(self, children):
if len(children) == 1:
return children[0]
else:
return self.node_builder(children)
class PropagatePositions:
def __init__(self, node_builder):
self.node_builder = node_builder
def __call__(self, children):
res = self.node_builder(children)
# local reference to Tree.meta reduces number of presence checks
if isinstance(res, Tree):
res_meta = res.meta
for c in children:
if isinstance(c, Tree):
child_meta = c.meta
if not child_meta.empty:
res_meta.line = child_meta.line
res_meta.column = child_meta.column
res_meta.start_pos = child_meta.start_pos
res_meta.empty = False
break
elif isinstance(c, Token):
res_meta.line = c.line
res_meta.column = c.column
res_meta.start_pos = c.pos_in_stream
res_meta.empty = False
break
for c in reversed(children):
if isinstance(c, Tree):
child_meta = c.meta
if not child_meta.empty:
res_meta.end_line = child_meta.end_line
res_meta.end_column = child_meta.end_column
res_meta.end_pos = child_meta.end_pos
res_meta.empty = False
break
elif isinstance(c, Token):
res_meta.end_line = c.end_line
res_meta.end_column = c.end_column
res_meta.end_pos = c.end_pos
res_meta.empty = False
break
return res
class ChildFilter:
def __init__(self, to_include, append_none, node_builder):
self.node_builder = node_builder
self.to_include = to_include
self.append_none = append_none
def __call__(self, children):
filtered = []
for i, to_expand, add_none in self.to_include:
if add_none:
filtered += [None] * add_none
if to_expand:
filtered += children[i].children
else:
filtered.append(children[i])
if self.append_none:
filtered += [None] * self.append_none
return self.node_builder(filtered)
class ChildFilterLALR(ChildFilter):
"""Optimized childfilter for LALR (assumes no duplication in parse tree, so it's safe to change it)"""
def __call__(self, children):
filtered = []
for i, to_expand, add_none in self.to_include:
if add_none:
filtered += [None] * add_none
if to_expand:
if filtered:
filtered += children[i].children
else: # Optimize for left-recursion
filtered = children[i].children
else:
filtered.append(children[i])
if self.append_none:
filtered += [None] * self.append_none
return self.node_builder(filtered)
class ChildFilterLALR_NoPlaceholders(ChildFilter):
"Optimized childfilter for LALR (assumes no duplication in parse tree, so it's safe to change it)"
def __init__(self, to_include, node_builder):
self.node_builder = node_builder
self.to_include = to_include
def __call__(self, children):
filtered = []
for i, to_expand in self.to_include:
if to_expand:
if filtered:
filtered += children[i].children
else: # Optimize for left-recursion
filtered = children[i].children
else:
filtered.append(children[i])
return self.node_builder(filtered)
def _should_expand(sym):
return not sym.is_term and sym.name.startswith('_')
def maybe_create_child_filter(expansion, keep_all_tokens, ambiguous, _empty_indices):
# Prepare empty_indices as: How many Nones to insert at each index?
if _empty_indices:
assert _empty_indices.count(False) == len(expansion)
s = ''.join(str(int(b)) for b in _empty_indices)
empty_indices = [len(ones) for ones in s.split('0')]
assert len(empty_indices) == len(expansion)+1, (empty_indices, len(expansion))
else:
empty_indices = [0] * (len(expansion)+1)
to_include = []
nones_to_add = 0
for i, sym in enumerate(expansion):
nones_to_add += empty_indices[i]
if keep_all_tokens or not (sym.is_term and sym.filter_out):
to_include.append((i, _should_expand(sym), nones_to_add))
nones_to_add = 0
nones_to_add += empty_indices[len(expansion)]
if _empty_indices or len(to_include) < len(expansion) or any(to_expand for i, to_expand,_ in to_include):
if _empty_indices or ambiguous:
return partial(ChildFilter if ambiguous else ChildFilterLALR, to_include, nones_to_add)
else:
# LALR without placeholders
return partial(ChildFilterLALR_NoPlaceholders, [(i, x) for i,x,_ in to_include])
class AmbiguousExpander:
"""Deal with the case where we're expanding children ('_rule') into a parent but the children
are ambiguous. i.e. (parent->_ambig->_expand_this_rule). In this case, make the parent itself
ambiguous with as many copies as their are ambiguous children, and then copy the ambiguous children
into the right parents in the right places, essentially shifting the ambiguity up the tree."""
def __init__(self, to_expand, tree_class, node_builder):
self.node_builder = node_builder
self.tree_class = tree_class
self.to_expand = to_expand
def __call__(self, children):
def _is_ambig_tree(t):
return hasattr(t, 'data') and t.data == '_ambig'
# -- When we're repeatedly expanding ambiguities we can end up with nested ambiguities.
# All children of an _ambig node should be a derivation of that ambig node, hence
# it is safe to assume that if we see an _ambig node nested within an ambig node
# it is safe to simply expand it into the parent _ambig node as an alternative derivation.
ambiguous = []
for i, child in enumerate(children):
if _is_ambig_tree(child):
if i in self.to_expand:
ambiguous.append(i)
to_expand = [j for j, grandchild in enumerate(child.children) if _is_ambig_tree(grandchild)]
child.expand_kids_by_index(*to_expand)
if not ambiguous:
return self.node_builder(children)
expand = [iter(child.children) if i in ambiguous else repeat(child) for i, child in enumerate(children)]
return self.tree_class('_ambig', [self.node_builder(list(f[0])) for f in product(zip(*expand))])
def maybe_create_ambiguous_expander(tree_class, expansion, keep_all_tokens):
to_expand = [i for i, sym in enumerate(expansion)
if keep_all_tokens or ((not (sym.is_term and sym.filter_out)) and _should_expand(sym))]
if to_expand:
return partial(AmbiguousExpander, to_expand, tree_class)
class AmbiguousIntermediateExpander:
"""
Propagate ambiguous intermediate nodes and their derivations up to the
current rule.
In general, converts
rule
_iambig
_inter
someChildren1
...
_inter
someChildren2
...
someChildren3
...
to
_ambig
rule
someChildren1
...
someChildren3
...
rule
someChildren2
...
someChildren3
...
rule
childrenFromNestedIambigs
...
someChildren3
...
...
propagating up any nested '_iambig' nodes along the way.
"""
def __init__(self, tree_class, node_builder):
self.node_builder = node_builder
self.tree_class = tree_class
def __call__(self, children):
def _is_iambig_tree(child):
return hasattr(child, 'data') and child.data == '_iambig'
def _collapse_iambig(children):
"""
Recursively flatten the derivations of the parent of an '_iambig'
node. Returns a list of '_inter' nodes guaranteed not
to contain any nested '_iambig' nodes, or None if children does
not contain an '_iambig' node.
"""
# Due to the structure of the SPPF,
# an '_iambig' node can only appear as the first child
if children and _is_iambig_tree(children[0]):
iambig_node = children[0]
result = []
for grandchild in iambig_node.children:
collapsed = _collapse_iambig(grandchild.children)
if collapsed:
for child in collapsed:
child.children += children[1:]
result += collapsed
else:
new_tree = self.tree_class('_inter', grandchild.children + children[1:])
result.append(new_tree)
return result
collapsed = _collapse_iambig(children)
if collapsed:
processed_nodes = [self.node_builder(c.children) for c in collapsed]
return self.tree_class('_ambig', processed_nodes)
return self.node_builder(children)
def ptb_inline_args(func):
@wraps(func)
def f(children):
return func(*children)
return f
def inplace_transformer(func):
@wraps(func)
def f(children):
# function name in a Transformer is a rule name.
tree = Tree(func.__name__, children)
return func(tree)
return f
def apply_visit_wrapper(func, name, wrapper):
if wrapper is _vargs_meta or wrapper is _vargs_meta_inline:
raise NotImplementedError("Meta args not supported for internal transformer")
@wraps(func)
def f(children):
return wrapper(func, name, children, None)
return f
class ParseTreeBuilder:
def __init__(self, rules, tree_class, propagate_positions=False, ambiguous=False, maybe_placeholders=False):
self.tree_class = tree_class
self.propagate_positions = propagate_positions
self.ambiguous = ambiguous
self.maybe_placeholders = maybe_placeholders
self.rule_builders = list(self._init_builders(rules))
def _init_builders(self, rules):
for rule in rules:
options = rule.options
keep_all_tokens = options.keep_all_tokens
expand_single_child = options.expand1
wrapper_chain = list(filter(None, [
(expand_single_child and not rule.alias) and ExpandSingleChild,
maybe_create_child_filter(rule.expansion, keep_all_tokens, self.ambiguous, options.empty_indices if self.maybe_placeholders else None),
self.propagate_positions and PropagatePositions,
self.ambiguous and maybe_create_ambiguous_expander(self.tree_class, rule.expansion, keep_all_tokens),
self.ambiguous and partial(AmbiguousIntermediateExpander, self.tree_class)
]))
yield rule, wrapper_chain
def create_callback(self, transformer=None):
callbacks = {}
for rule, wrapper_chain in self.rule_builders:
user_callback_name = rule.alias or rule.options.template_source or rule.origin.name
try:
f = getattr(transformer, user_callback_name)
# XXX InlineTransformer is deprecated!
wrapper = getattr(f, 'visit_wrapper', None)
if wrapper is not None:
f = apply_visit_wrapper(f, user_callback_name, wrapper)
else:
if isinstance(transformer, InlineTransformer):
f = ptb_inline_args(f)
elif isinstance(transformer, Transformer_InPlace):
f = inplace_transformer(f)
except AttributeError:
f = partial(self.tree_class, user_callback_name)
for w in wrapper_chain:
f = w(f)
if rule in callbacks:
raise GrammarError("Rule '%s' already exists" % (rule,))
callbacks[rule] = f
return callbacks
###}
|
import io
import sys
import codecs
import argparse
from .learn_bpe import learn_bpe
from .apply_bpe import BPE, read_vocabulary
from .get_vocab import get_vocab
from .learn_joint_bpe_and_vocab import learn_joint_bpe_and_vocab
from .learn_bpe import create_parser as create_learn_bpe_parser
from .apply_bpe import create_parser as create_apply_bpe_parser
from .get_vocab import create_parser as create_get_vocab_parser
from .learn_joint_bpe_and_vocab import create_parser as create_learn_joint_bpe_and_vocab_parser
# hack for python2/3 compatibility
argparse.open = io.open
def main():
parser = argparse.ArgumentParser(
formatter_class=argparse.RawTextHelpFormatter,
description="subword-nmt: unsupervised word segmentation for neural machine translation and text generation ")
subparsers = parser.add_subparsers(dest='command',
help="""command to run. Run one of the commands with '-h' for more info.
learn-bpe: learn BPE merge operations on input text.
apply-bpe: apply given BPE operations to input text.
get-vocab: extract vocabulary and word frequencies from input text.
learn-joint-bpe-and-vocab: executes recommended workflow for joint BPE.""")
learn_bpe_parser = create_learn_bpe_parser(subparsers)
apply_bpe_parser = create_apply_bpe_parser(subparsers)
get_vocab_parser = create_get_vocab_parser(subparsers)
learn_joint_bpe_and_vocab_parser = create_learn_joint_bpe_and_vocab_parser(subparsers)
args = parser.parse_args()
if args.command == 'learn-bpe':
# read/write files as UTF-8
if args.input.name != '<stdin>':
args.input = codecs.open(args.input.name, encoding='utf-8')
if args.output.name != '<stdout>':
args.output = codecs.open(args.output.name, 'w', encoding='utf-8')
learn_bpe(args.input, args.output, args.symbols, args.min_frequency, args.verbose,
is_dict=args.dict_input, total_symbols=args.total_symbols)
elif args.command == 'apply-bpe':
# read/write files as UTF-8
args.codes = codecs.open(args.codes.name, encoding='utf-8')
if args.input.name != '<stdin>':
args.input = codecs.open(args.input.name, encoding='utf-8')
if args.output.name != '<stdout>':
args.output = codecs.open(args.output.name, 'w', encoding='utf-8')
if args.vocabulary:
args.vocabulary = codecs.open(args.vocabulary.name, encoding='utf-8')
if args.vocabulary:
vocabulary = read_vocabulary(args.vocabulary, args.vocabulary_threshold)
else:
vocabulary = None
if sys.version_info < (3, 0):
args.separator = args.separator.decode('UTF-8')
if args.glossaries:
args.glossaries = [g.decode('UTF-8') for g in args.glossaries]
bpe = BPE(args.codes, args.merges, args.separator, vocabulary, args.glossaries)
for line in args.input:
args.output.write(bpe.process_line(line, args.dropout))
elif args.command == 'get-vocab':
if args.input.name != '<stdin>':
args.input = codecs.open(args.input.name, encoding='utf-8')
if args.output.name != '<stdout>':
args.output = codecs.open(args.output.name, 'w', encoding='utf-8')
get_vocab(args.input, args.output)
elif args.command == 'learn-joint-bpe-and-vocab':
learn_joint_bpe_and_vocab(args)
if sys.version_info < (3, 0):
args.separator = args.separator.decode('UTF-8')
else:
raise Exception('Invalid command provided')
# python 2/3 compatibility
if sys.version_info < (3, 0):
sys.stderr = codecs.getwriter('UTF-8')(sys.stderr)
sys.stdout = codecs.getwriter('UTF-8')(sys.stdout)
sys.stdin = codecs.getreader('UTF-8')(sys.stdin)
else:
sys.stderr = codecs.getwriter('UTF-8')(sys.stderr.buffer)
sys.stdout = codecs.getwriter('UTF-8')(sys.stdout.buffer)
sys.stdin = codecs.getreader('UTF-8')(sys.stdin.buffer)
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from test import run_only
from mock import patch
from diamond.collector import Collector
from users import UsersCollector
import sys
##########################################################################
def run_only_if_pyutmp_is_available(func):
try:
import pyutmp
except ImportError:
pyutmp = None
try:
import utmp
except ImportError:
utmp = None
pred = lambda: pyutmp is not None or utmp is not None
return run_only(func, pred)
class TestUsersCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('UsersCollector', {
'utmp': self.getFixturePath('utmp.centos6'),
})
self.collector = UsersCollector(config, None)
def test_import(self):
self.assertTrue(UsersCollector)
@run_only_if_pyutmp_is_available
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
metrics = {
'kormoc': 2,
'root': 3,
'total': 5,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
# Because of the compiled nature of pyutmp, we can't actually test
# different operating system versions then the currently running
# one
if sys.platform.startswith('linux'):
self.collector.collect()
self.assertPublishedMany(publish_mock, metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
from abc import ABC, abstractmethod
import asyncio
import logging
from typing import Dict, List, Optional
from aiohttp import StreamReader, web
from aiohttp.hdrs import istr
from aiohttp.web_exceptions import (
HTTPBadRequest,
HTTPNotFound,
HTTPUnsupportedMediaType,
)
import attr
from homeassistant.components.http import HomeAssistantView
from homeassistant.core import callback
from homeassistant.helpers import config_per_platform, discovery
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.setup import async_prepare_setup_platform
from .const import (
DOMAIN,
AudioBitRates,
AudioChannels,
AudioCodecs,
AudioFormats,
AudioSampleRates,
SpeechResultState,
)
# mypy: allow-untyped-defs, no-check-untyped-defs
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass: HomeAssistantType, config):
"""Set up STT."""
providers = {}
async def async_setup_platform(p_type, p_config=None, discovery_info=None):
"""Set up a TTS platform."""
if p_config is None:
p_config = {}
platform = await async_prepare_setup_platform(hass, config, DOMAIN, p_type)
if platform is None:
return
try:
provider = await platform.async_get_engine(hass, p_config, discovery_info)
if provider is None:
_LOGGER.error("Error setting up platform %s", p_type)
return
provider.name = p_type
provider.hass = hass
providers[provider.name] = provider
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Error setting up platform: %s", p_type)
return
setup_tasks = [
async_setup_platform(p_type, p_config)
for p_type, p_config in config_per_platform(config, DOMAIN)
]
if setup_tasks:
await asyncio.wait(setup_tasks)
# Add discovery support
async def async_platform_discovered(platform, info):
"""Handle for discovered platform."""
await async_setup_platform(platform, discovery_info=info)
discovery.async_listen_platform(hass, DOMAIN, async_platform_discovered)
hass.http.register_view(SpeechToTextView(providers))
return True
@attr.s
class SpeechMetadata:
"""Metadata of audio stream."""
language: str = attr.ib()
format: AudioFormats = attr.ib()
codec: AudioCodecs = attr.ib()
bit_rate: AudioBitRates = attr.ib(converter=int)
sample_rate: AudioSampleRates = attr.ib(converter=int)
channel: AudioChannels = attr.ib(converter=int)
@attr.s
class SpeechResult:
"""Result of audio Speech."""
text: Optional[str] = attr.ib()
result: SpeechResultState = attr.ib()
class Provider(ABC):
"""Represent a single STT provider."""
hass: Optional[HomeAssistantType] = None
name: Optional[str] = None
@property
@abstractmethod
def supported_languages(self) -> List[str]:
"""Return a list of supported languages."""
@property
@abstractmethod
def supported_formats(self) -> List[AudioFormats]:
"""Return a list of supported formats."""
@property
@abstractmethod
def supported_codecs(self) -> List[AudioCodecs]:
"""Return a list of supported codecs."""
@property
@abstractmethod
def supported_bit_rates(self) -> List[AudioBitRates]:
"""Return a list of supported bit rates."""
@property
@abstractmethod
def supported_sample_rates(self) -> List[AudioSampleRates]:
"""Return a list of supported sample rates."""
@property
@abstractmethod
def supported_channels(self) -> List[AudioChannels]:
"""Return a list of supported channels."""
@abstractmethod
async def async_process_audio_stream(
self, metadata: SpeechMetadata, stream: StreamReader
) -> SpeechResult:
"""Process an audio stream to STT service.
Only streaming of content are allow!
"""
@callback
def check_metadata(self, metadata: SpeechMetadata) -> bool:
"""Check if given metadata supported by this provider."""
if (
metadata.language not in self.supported_languages
or metadata.format not in self.supported_formats
or metadata.codec not in self.supported_codecs
or metadata.bit_rate not in self.supported_bit_rates
or metadata.sample_rate not in self.supported_sample_rates
or metadata.channel not in self.supported_channels
):
return False
return True
class SpeechToTextView(HomeAssistantView):
"""STT view to generate a text from audio stream."""
requires_auth = True
url = "/api/stt/{provider}"
name = "api:stt:provider"
def __init__(self, providers: Dict[str, Provider]) -> None:
"""Initialize a tts view."""
self.providers = providers
@staticmethod
def _metadata_from_header(request: web.Request) -> Optional[SpeechMetadata]:
"""Extract metadata from header.
X-Speech-Content: format=wav; codec=pcm; sample_rate=16000; bit_rate=16; channel=1; language=de_de
"""
try:
data = request.headers[istr("X-Speech-Content")].split(";")
except KeyError:
_LOGGER.warning("Missing X-Speech-Content")
return None
# Convert Header data
args = {}
for value in data:
value = value.strip()
args[value.partition("=")[0]] = value.partition("=")[2]
try:
return SpeechMetadata(**args)
except TypeError as err:
_LOGGER.warning("Wrong format of X-Speech-Content: %s", err)
return None
async def post(self, request: web.Request, provider: str) -> web.Response:
"""Convert Speech (audio) to text."""
if provider not in self.providers:
raise HTTPNotFound()
stt_provider: Provider = self.providers[provider]
# Get metadata
metadata = self._metadata_from_header(request)
if not metadata:
raise HTTPBadRequest()
# Check format
if not stt_provider.check_metadata(metadata):
raise HTTPUnsupportedMediaType()
# Process audio stream
result = await stt_provider.async_process_audio_stream(
metadata, request.content
)
# Return result
return self.json(attr.asdict(result))
async def get(self, request: web.Request, provider: str) -> web.Response:
"""Return provider specific audio information."""
if provider not in self.providers:
raise HTTPNotFound()
stt_provider: Provider = self.providers[provider]
return self.json(
{
"languages": stt_provider.supported_languages,
"formats": stt_provider.supported_formats,
"codecs": stt_provider.supported_codecs,
"sample_rates": stt_provider.supported_sample_rates,
"bit_rates": stt_provider.supported_bit_rates,
"channels": stt_provider.supported_channels,
}
)
|
import pytest
import zigpy.profiles.zha as zha
import zigpy.zcl.clusters.general as general
import zigpy.zcl.clusters.hvac as hvac
from homeassistant.components import fan
from homeassistant.components.fan import (
ATTR_SPEED,
DOMAIN,
SERVICE_SET_SPEED,
SPEED_HIGH,
SPEED_MEDIUM,
SPEED_OFF,
)
from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN
from homeassistant.components.zha.core.discovery import GROUP_PROBE
from homeassistant.const import (
ATTR_ENTITY_ID,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_OFF,
STATE_ON,
STATE_UNAVAILABLE,
)
from .common import (
async_enable_traffic,
async_find_group_entity_id,
async_test_rejoin,
find_entity_id,
get_zha_gateway,
send_attributes_report,
)
from tests.async_mock import call
IEEE_GROUPABLE_DEVICE = "01:2d:6f:00:0a:90:69:e8"
IEEE_GROUPABLE_DEVICE2 = "02:2d:6f:00:0a:90:69:e8"
@pytest.fixture
def zigpy_device(zigpy_device_mock):
"""Device tracker zigpy device."""
endpoints = {
1: {
"in_clusters": [hvac.Fan.cluster_id],
"out_clusters": [],
"device_type": zha.DeviceType.ON_OFF_SWITCH,
}
}
return zigpy_device_mock(endpoints)
@pytest.fixture
async def coordinator(hass, zigpy_device_mock, zha_device_joined):
"""Test zha fan platform."""
zigpy_device = zigpy_device_mock(
{
1: {
"in_clusters": [],
"out_clusters": [],
"device_type": zha.DeviceType.COLOR_DIMMABLE_LIGHT,
}
},
ieee="00:15:8d:00:02:32:4f:32",
nwk=0x0000,
node_descriptor=b"\xf8\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff",
)
zha_device = await zha_device_joined(zigpy_device)
zha_device.available = True
return zha_device
@pytest.fixture
async def device_fan_1(hass, zigpy_device_mock, zha_device_joined):
"""Test zha fan platform."""
zigpy_device = zigpy_device_mock(
{
1: {
"in_clusters": [general.OnOff.cluster_id, hvac.Fan.cluster_id],
"out_clusters": [],
}
},
ieee=IEEE_GROUPABLE_DEVICE,
)
zha_device = await zha_device_joined(zigpy_device)
zha_device.available = True
return zha_device
@pytest.fixture
async def device_fan_2(hass, zigpy_device_mock, zha_device_joined):
"""Test zha fan platform."""
zigpy_device = zigpy_device_mock(
{
1: {
"in_clusters": [
general.OnOff.cluster_id,
hvac.Fan.cluster_id,
general.LevelControl.cluster_id,
],
"out_clusters": [],
}
},
ieee=IEEE_GROUPABLE_DEVICE2,
)
zha_device = await zha_device_joined(zigpy_device)
zha_device.available = True
return zha_device
async def test_fan(hass, zha_device_joined_restored, zigpy_device):
"""Test zha fan platform."""
zha_device = await zha_device_joined_restored(zigpy_device)
cluster = zigpy_device.endpoints.get(1).fan
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
assert entity_id is not None
assert hass.states.get(entity_id).state == STATE_OFF
await async_enable_traffic(hass, [zha_device], enabled=False)
# test that the fan was created and that it is unavailable
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
# allow traffic to flow through the gateway and device
await async_enable_traffic(hass, [zha_device])
# test that the state has changed from unavailable to off
assert hass.states.get(entity_id).state == STATE_OFF
# turn on at fan
await send_attributes_report(hass, cluster, {1: 2, 0: 1, 2: 3})
assert hass.states.get(entity_id).state == STATE_ON
# turn off at fan
await send_attributes_report(hass, cluster, {1: 1, 0: 0, 2: 2})
assert hass.states.get(entity_id).state == STATE_OFF
# turn on from HA
cluster.write_attributes.reset_mock()
await async_turn_on(hass, entity_id)
assert len(cluster.write_attributes.mock_calls) == 1
assert cluster.write_attributes.call_args == call({"fan_mode": 2})
# turn off from HA
cluster.write_attributes.reset_mock()
await async_turn_off(hass, entity_id)
assert len(cluster.write_attributes.mock_calls) == 1
assert cluster.write_attributes.call_args == call({"fan_mode": 0})
# change speed from HA
cluster.write_attributes.reset_mock()
await async_set_speed(hass, entity_id, speed=fan.SPEED_HIGH)
assert len(cluster.write_attributes.mock_calls) == 1
assert cluster.write_attributes.call_args == call({"fan_mode": 3})
# test adding new fan to the network and HA
await async_test_rejoin(hass, zigpy_device, [cluster], (1,))
async def async_turn_on(hass, entity_id, speed=None):
"""Turn fan on."""
data = {
key: value
for key, value in [(ATTR_ENTITY_ID, entity_id), (ATTR_SPEED, speed)]
if value is not None
}
await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, data, blocking=True)
async def async_turn_off(hass, entity_id):
"""Turn fan off."""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
await hass.services.async_call(DOMAIN, SERVICE_TURN_OFF, data, blocking=True)
async def async_set_speed(hass, entity_id, speed=None):
"""Set speed for specified fan."""
data = {
key: value
for key, value in [(ATTR_ENTITY_ID, entity_id), (ATTR_SPEED, speed)]
if value is not None
}
await hass.services.async_call(DOMAIN, SERVICE_SET_SPEED, data, blocking=True)
async def async_test_zha_group_fan_entity(
hass, device_fan_1, device_fan_2, coordinator
):
"""Test the fan entity for a ZHA group."""
zha_gateway = get_zha_gateway(hass)
assert zha_gateway is not None
zha_gateway.coordinator_zha_device = coordinator
coordinator._zha_gateway = zha_gateway
device_fan_1._zha_gateway = zha_gateway
device_fan_2._zha_gateway = zha_gateway
member_ieee_addresses = [device_fan_1.ieee, device_fan_2.ieee]
# test creating a group with 2 members
zha_group = await zha_gateway.async_create_zigpy_group(
"Test Group", member_ieee_addresses
)
await hass.async_block_till_done()
assert zha_group is not None
assert len(zha_group.members) == 2
for member in zha_group.members:
assert member.ieee in member_ieee_addresses
entity_domains = GROUP_PROBE.determine_entity_domains(zha_group)
assert len(entity_domains) == 2
assert LIGHT_DOMAIN in entity_domains
assert DOMAIN in entity_domains
entity_id = async_find_group_entity_id(hass, DOMAIN, zha_group)
assert hass.states.get(entity_id) is not None
group_fan_cluster = zha_group.endpoint[hvac.Fan.cluster_id]
dev1_fan_cluster = device_fan_1.endpoints[1].fan
dev2_fan_cluster = device_fan_2.endpoints[1].fan
# test that the lights were created and that they are unavailable
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
# allow traffic to flow through the gateway and device
await async_enable_traffic(hass, zha_group.members)
# test that the fan group entity was created and is off
assert hass.states.get(entity_id).state == STATE_OFF
# turn on from HA
group_fan_cluster.write_attributes.reset_mock()
await async_turn_on(hass, entity_id)
assert len(group_fan_cluster.write_attributes.mock_calls) == 1
assert group_fan_cluster.write_attributes.call_args == call({"fan_mode": 2})
assert hass.states.get(entity_id).state == SPEED_MEDIUM
# turn off from HA
group_fan_cluster.write_attributes.reset_mock()
await async_turn_off(hass, entity_id)
assert len(group_fan_cluster.write_attributes.mock_calls) == 1
assert group_fan_cluster.write_attributes.call_args == call({"fan_mode": 0})
assert hass.states.get(entity_id).state == STATE_OFF
# change speed from HA
group_fan_cluster.write_attributes.reset_mock()
await async_set_speed(hass, entity_id, speed=fan.SPEED_HIGH)
assert len(group_fan_cluster.write_attributes.mock_calls) == 1
assert group_fan_cluster.write_attributes.call_args == call({"fan_mode": 3})
assert hass.states.get(entity_id).state == SPEED_HIGH
# test some of the group logic to make sure we key off states correctly
await dev1_fan_cluster.async_set_speed(SPEED_OFF)
await dev2_fan_cluster.async_set_speed(SPEED_OFF)
# test that group fan is off
assert hass.states.get(entity_id).state == STATE_OFF
await dev1_fan_cluster.async_set_speed(SPEED_MEDIUM)
# test that group fan is speed medium
assert hass.states.get(entity_id).state == SPEED_MEDIUM
await dev1_fan_cluster.async_set_speed(SPEED_OFF)
# test that group fan is now off
assert hass.states.get(entity_id).state == STATE_OFF
|
from typing import Mapping
from homeassistant.components.upnp.const import (
BYTES_RECEIVED,
BYTES_SENT,
PACKETS_RECEIVED,
PACKETS_SENT,
TIMESTAMP,
)
from homeassistant.components.upnp.device import Device
import homeassistant.util.dt as dt_util
class MockDevice(Device):
"""Mock device for Device."""
def __init__(self, udn):
"""Initialize mock device."""
igd_device = object()
super().__init__(igd_device)
self._udn = udn
self.added_port_mappings = []
self.removed_port_mappings = []
@classmethod
async def async_create_device(cls, hass, ssdp_location):
"""Return self."""
return cls("UDN")
@property
def udn(self) -> str:
"""Get the UDN."""
return self._udn
@property
def manufacturer(self) -> str:
"""Get manufacturer."""
return "mock-manufacturer"
@property
def name(self) -> str:
"""Get name."""
return "mock-name"
@property
def model_name(self) -> str:
"""Get the model name."""
return "mock-model-name"
@property
def device_type(self) -> str:
"""Get the device type."""
return "urn:schemas-upnp-org:device:InternetGatewayDevice:1"
async def _async_add_port_mapping(
self, external_port: int, local_ip: str, internal_port: int
) -> None:
"""Add a port mapping."""
entry = [external_port, local_ip, internal_port]
self.added_port_mappings.append(entry)
async def _async_delete_port_mapping(self, external_port: int) -> None:
"""Remove a port mapping."""
entry = external_port
self.removed_port_mappings.append(entry)
async def async_get_traffic_data(self) -> Mapping[str, any]:
"""Get traffic data."""
return {
TIMESTAMP: dt_util.utcnow(),
BYTES_RECEIVED: 0,
BYTES_SENT: 0,
PACKETS_RECEIVED: 0,
PACKETS_SENT: 0,
}
|
import argparse
import glob
import os
import struct
import sys
def clamp_to_min_max(value, min, max):
if value > max:
value = max
elif value < min:
value = min
return value
def clamp_to_u8(value):
return clamp_to_min_max(value, 0, 255)
def parse_args():
parser = argparse.ArgumentParser(description="Set the low battery threshold")
parser.add_argument('-d', '--device', type=str, help="Device string like \"0003:1532:0045.000C\"")
parser.add_argument('--threshold', required=True, type=int, help="Threshold (1-25)")
args = parser.parse_args()
return args
def run():
args = parse_args()
if args.device is None:
mouse_dirs = glob.glob(os.path.join('/sys/bus/hid/drivers/razermouse/', "*:*:*.*"))
if len(mouse_dirs) > 1:
print("Multiple mouse directories found. Rerun with -d", file=sys.stderr)
sys.exit(1)
if len(mouse_dirs) < 1:
print("No mouse directories found. Make sure the driver is binded", file=sys.stderr)
sys.exit(1)
mouse_dir = mouse_dirs[0]
else:
mouse_dir = os.path.join('/sys/bus/hid/drivers/razermouse/', args.device)
if not os.path.isdir(mouse_dir):
print("Multiple mouse directories found. Rerun with -d", file=sys.stderr)
sys.exit(1)
threshold = clamp_to_min_max(args.threshold, 1, 25)
threshold_scaled = clamp_to_u8(int(round((255 / 100) * threshold, 0)))
byte_string = bytes(str(threshold_scaled), 'utf-8') # Convert string to bytestring
low_battery_threshold_filepath = os.path.join(mouse_dir, "set_low_battery_threshold")
with open(low_battery_threshold_filepath, 'wb') as low_battery_threshold_file:
low_battery_threshold_file.write(byte_string)
print("Done")
if __name__ == '__main__':
run()
|
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations
def forwards(apps, schema_editor):
CascadeElement = apps.get_model('cmsplugin_cascade', 'CascadeElement')
for element in CascadeElement.objects.all():
if element.plugin_type == 'BillingAddressFormPlugin':
element.plugin_type = 'CheckoutAddressPlugin'
element.glossary['address_form'] = 'billing'
element.glossary['allow_use_primary'] = element.glossary.get('allow_use_shipping', '')
element.save()
elif element.plugin_type == 'ShippingAddressFormPlugin':
element.plugin_type = 'CheckoutAddressPlugin'
element.glossary['address_form'] = 'shipping'
element.save()
def backwards(apps, schema_editor):
CascadeElement = apps.get_model('cmsplugin_cascade', 'CascadeElement')
for element in CascadeElement.objects.all():
if element.plugin_type == 'CheckoutAddressPlugin':
if element.glossary['address_form'] == 'billing':
element.plugin_type = 'BillingAddressFormPlugin'
element.glossary['allow_use_shipping'] = element.glossary.get('allow_use_primary', '')
elif element.glossary['address_form'] == 'shipping':
element.plugin_type = 'ShippingAddressFormPlugin'
element.save()
class Migration(migrations.Migration):
dependencies = [
('shop', '0004_ckeditor31'),
]
operations = []
if 'cmsplugin_cascade' in settings.INSTALLED_APPS:
dependencies.append(('cmsplugin_cascade', '0017_fake_proxy_models'))
operations.append(migrations.RunPython(forwards, reverse_code=backwards))
|
from homeassistant import data_entry_flow, setup
from homeassistant.components.smappee.const import (
CONF_HOSTNAME,
CONF_SERIALNUMBER,
DOMAIN,
ENV_CLOUD,
ENV_LOCAL,
TOKEN_URL,
)
from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF
from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET
from homeassistant.helpers import config_entry_oauth2_flow
from tests.async_mock import patch
from tests.common import MockConfigEntry
CLIENT_ID = "1234"
CLIENT_SECRET = "5678"
async def test_show_user_form(hass):
"""Test that the user set up form is served."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
)
assert result["step_id"] == "environment"
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
async def test_show_user_host_form(hass):
"""Test that the host form is served after choosing the local option."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
)
assert result["step_id"] == "environment"
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"environment": ENV_LOCAL}
)
assert result["step_id"] == ENV_LOCAL
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
async def test_show_zeroconf_connection_error_form(hass):
"""Test that the zeroconf confirmation form is served."""
with patch("pysmappee.api.SmappeeLocalApi.logon", return_value=None):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_ZEROCONF},
data={
"host": "1.2.3.4",
"port": 22,
CONF_HOSTNAME: "Smappee1006000212.local.",
"type": "_ssh._tcp.local.",
"name": "Smappee1006000212._ssh._tcp.local.",
"properties": {"_raw": {}},
},
)
assert result["description_placeholders"] == {CONF_SERIALNUMBER: "1006000212"}
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "zeroconf_confirm"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"host": "1.2.3.4"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "cannot_connect"
assert len(hass.config_entries.async_entries(DOMAIN)) == 0
async def test_connection_error(hass):
"""Test we show user form on Smappee connection error."""
with patch("pysmappee.api.SmappeeLocalApi.logon", return_value=None):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
)
assert result["step_id"] == "environment"
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"environment": ENV_LOCAL}
)
assert result["step_id"] == ENV_LOCAL
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"host": "1.2.3.4"}
)
assert result["reason"] == "cannot_connect"
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
async def test_zeroconf_wrong_mdns(hass):
"""Test we abort if unsupported mDNS name is discovered."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_ZEROCONF},
data={
"host": "1.2.3.4",
"port": 22,
CONF_HOSTNAME: "example.local.",
"type": "_ssh._tcp.local.",
"name": "example._ssh._tcp.local.",
"properties": {"_raw": {}},
},
)
assert result["reason"] == "invalid_mdns"
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
async def test_full_user_wrong_mdns(hass):
"""Test we abort user flow if unsupported mDNS name got resolved."""
with patch("pysmappee.api.SmappeeLocalApi.logon", return_value={}), patch(
"pysmappee.api.SmappeeLocalApi.load_advanced_config",
return_value=[{"key": "mdnsHostName", "value": "Smappee5010000001"}],
), patch(
"pysmappee.api.SmappeeLocalApi.load_command_control_config", return_value=[]
), patch(
"pysmappee.api.SmappeeLocalApi.load_instantaneous",
return_value=[{"key": "phase0ActivePower", "value": 0}],
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
)
assert result["step_id"] == "environment"
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"environment": ENV_LOCAL}
)
assert result["step_id"] == ENV_LOCAL
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"host": "1.2.3.4"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "invalid_mdns"
async def test_user_device_exists_abort(hass):
"""Test we abort user flow if Smappee device already configured."""
with patch("pysmappee.api.SmappeeLocalApi.logon", return_value={}), patch(
"pysmappee.api.SmappeeLocalApi.load_advanced_config",
return_value=[{"key": "mdnsHostName", "value": "Smappee1006000212"}],
), patch(
"pysmappee.api.SmappeeLocalApi.load_command_control_config", return_value=[]
), patch(
"pysmappee.api.SmappeeLocalApi.load_instantaneous",
return_value=[{"key": "phase0ActivePower", "value": 0}],
):
config_entry = MockConfigEntry(
domain=DOMAIN,
data={"host": "1.2.3.4"},
unique_id="1006000212",
source=SOURCE_USER,
)
config_entry.add_to_hass(hass)
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
)
assert result["step_id"] == "environment"
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"environment": ENV_LOCAL}
)
assert result["step_id"] == ENV_LOCAL
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"host": "1.2.3.4"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
async def test_zeroconf_device_exists_abort(hass):
"""Test we abort zeroconf flow if Smappee device already configured."""
with patch("pysmappee.api.SmappeeLocalApi.logon", return_value={}), patch(
"pysmappee.api.SmappeeLocalApi.load_advanced_config",
return_value=[{"key": "mdnsHostName", "value": "Smappee1006000212"}],
), patch(
"pysmappee.api.SmappeeLocalApi.load_command_control_config", return_value=[]
), patch(
"pysmappee.api.SmappeeLocalApi.load_instantaneous",
return_value=[{"key": "phase0ActivePower", "value": 0}],
):
config_entry = MockConfigEntry(
domain=DOMAIN,
data={"host": "1.2.3.4"},
unique_id="1006000212",
source=SOURCE_USER,
)
config_entry.add_to_hass(hass)
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_ZEROCONF},
data={
"host": "1.2.3.4",
"port": 22,
CONF_HOSTNAME: "Smappee1006000212.local.",
"type": "_ssh._tcp.local.",
"name": "Smappee1006000212._ssh._tcp.local.",
"properties": {"_raw": {}},
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
async def test_cloud_device_exists_abort(hass):
"""Test we abort cloud flow if Smappee Cloud device already configured."""
config_entry = MockConfigEntry(
domain=DOMAIN,
unique_id="smappeeCloud",
source=SOURCE_USER,
)
config_entry.add_to_hass(hass)
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured_device"
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
async def test_zeroconf_abort_if_cloud_device_exists(hass):
"""Test we abort zeroconf flow if Smappee Cloud device already configured."""
config_entry = MockConfigEntry(
domain=DOMAIN,
unique_id="smappeeCloud",
source=SOURCE_USER,
)
config_entry.add_to_hass(hass)
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_ZEROCONF},
data={
"host": "1.2.3.4",
"port": 22,
CONF_HOSTNAME: "Smappee1006000212.local.",
"type": "_ssh._tcp.local.",
"name": "Smappee1006000212._ssh._tcp.local.",
"properties": {"_raw": {}},
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured_device"
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
async def test_zeroconf_confirm_abort_if_cloud_device_exists(hass):
"""Test we abort zeroconf confirm flow if Smappee Cloud device already configured."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_ZEROCONF},
data={
"host": "1.2.3.4",
"port": 22,
CONF_HOSTNAME: "Smappee1006000212.local.",
"type": "_ssh._tcp.local.",
"name": "Smappee1006000212._ssh._tcp.local.",
"properties": {"_raw": {}},
},
)
config_entry = MockConfigEntry(
domain=DOMAIN,
unique_id="smappeeCloud",
source=SOURCE_USER,
)
config_entry.add_to_hass(hass)
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured_device"
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
async def test_abort_cloud_flow_if_local_device_exists(hass):
"""Test we abort the cloud flow if a Smappee local device already configured."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data={"host": "1.2.3.4"},
unique_id="1006000212",
source=SOURCE_USER,
)
config_entry.add_to_hass(hass)
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"environment": ENV_CLOUD}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured_local_device"
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
async def test_full_user_flow(hass, aiohttp_client, aioclient_mock, current_request):
"""Check full flow."""
assert await setup.async_setup_component(
hass,
DOMAIN,
{
DOMAIN: {CONF_CLIENT_ID: CLIENT_ID, CONF_CLIENT_SECRET: CLIENT_SECRET},
"http": {"base_url": "https://example.com"},
},
)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"environment": ENV_CLOUD}
)
state = config_entry_oauth2_flow._encode_jwt(hass, {"flow_id": result["flow_id"]})
client = await aiohttp_client(hass.http.app)
resp = await client.get(f"/auth/external/callback?code=abcd&state={state}")
assert resp.status == 200
assert resp.headers["content-type"] == "text/html; charset=utf-8"
aioclient_mock.post(
TOKEN_URL["PRODUCTION"],
json={
"refresh_token": "mock-refresh-token",
"access_token": "mock-access-token",
"type": "Bearer",
"expires_in": 60,
},
)
with patch(
"homeassistant.components.smappee.async_setup_entry", return_value=True
) as mock_setup:
await hass.config_entries.flow.async_configure(result["flow_id"])
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
assert len(mock_setup.mock_calls) == 1
async def test_full_zeroconf_flow(hass):
"""Test the full zeroconf flow."""
with patch("pysmappee.api.SmappeeLocalApi.logon", return_value={}), patch(
"pysmappee.api.SmappeeLocalApi.load_advanced_config",
return_value=[{"key": "mdnsHostName", "value": "Smappee1006000212"}],
), patch(
"pysmappee.api.SmappeeLocalApi.load_command_control_config", return_value=[]
), patch(
"pysmappee.api.SmappeeLocalApi.load_instantaneous",
return_value=[{"key": "phase0ActivePower", "value": 0}],
), patch(
"homeassistant.components.smappee.async_setup_entry", return_value=True
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_ZEROCONF},
data={
"host": "1.2.3.4",
"port": 22,
CONF_HOSTNAME: "Smappee1006000212.local.",
"type": "_ssh._tcp.local.",
"name": "Smappee1006000212._ssh._tcp.local.",
"properties": {"_raw": {}},
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "zeroconf_confirm"
assert result["description_placeholders"] == {CONF_SERIALNUMBER: "1006000212"}
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"host": "1.2.3.4"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "smappee1006000212"
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
entry = hass.config_entries.async_entries(DOMAIN)[0]
assert entry.unique_id == "1006000212"
async def test_full_user_local_flow(hass):
"""Test the full zeroconf flow."""
with patch("pysmappee.api.SmappeeLocalApi.logon", return_value={}), patch(
"pysmappee.api.SmappeeLocalApi.load_advanced_config",
return_value=[{"key": "mdnsHostName", "value": "Smappee1006000212"}],
), patch(
"pysmappee.api.SmappeeLocalApi.load_command_control_config", return_value=[]
), patch(
"pysmappee.api.SmappeeLocalApi.load_instantaneous",
return_value=[{"key": "phase0ActivePower", "value": 0}],
), patch(
"homeassistant.components.smappee.async_setup_entry", return_value=True
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
)
assert result["step_id"] == "environment"
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["description_placeholders"] is None
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"environment": ENV_LOCAL},
)
assert result["step_id"] == ENV_LOCAL
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"host": "1.2.3.4"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "smappee1006000212"
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
entry = hass.config_entries.async_entries(DOMAIN)[0]
assert entry.unique_id == "1006000212"
|
import logging
import socket
import voluptuous as vol
from ziggo_mediabox_xl import ZiggoMediaboxXL
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SELECT_SOURCE,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
)
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
STATE_OFF,
STATE_PAUSED,
STATE_PLAYING,
)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DATA_KNOWN_DEVICES = "ziggo_mediabox_xl_known_devices"
SUPPORT_ZIGGO = (
SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_NEXT_TRACK
| SUPPORT_PAUSE
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_SELECT_SOURCE
| SUPPORT_PLAY
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_NAME): cv.string}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Ziggo Mediabox XL platform."""
hass.data[DATA_KNOWN_DEVICES] = known_devices = set()
# Is this a manual configuration?
if config.get(CONF_HOST) is not None:
host = config.get(CONF_HOST)
name = config.get(CONF_NAME)
manual_config = True
elif discovery_info is not None:
host = discovery_info.get("host")
name = discovery_info.get("name")
manual_config = False
else:
_LOGGER.error("Cannot determine device")
return
# Only add a device once, so discovered devices do not override manual
# config.
hosts = []
connection_successful = False
ip_addr = socket.gethostbyname(host)
if ip_addr not in known_devices:
try:
# Mediabox instance with a timeout of 3 seconds.
mediabox = ZiggoMediaboxXL(ip_addr, 3)
# Check if a connection can be established to the device.
if mediabox.test_connection():
connection_successful = True
else:
if manual_config:
_LOGGER.info("Can't connect to %s", host)
else:
_LOGGER.error("Can't connect to %s", host)
# When the device is in eco mode it's not connected to the network
# so it needs to be added anyway if it's configured manually.
if manual_config or connection_successful:
hosts.append(
ZiggoMediaboxXLDevice(mediabox, host, name, connection_successful)
)
known_devices.add(ip_addr)
except OSError as error:
_LOGGER.error("Can't connect to %s: %s", host, error)
else:
_LOGGER.info("Ignoring duplicate Ziggo Mediabox XL %s", host)
add_entities(hosts, True)
class ZiggoMediaboxXLDevice(MediaPlayerEntity):
"""Representation of a Ziggo Mediabox XL Device."""
def __init__(self, mediabox, host, name, available):
"""Initialize the device."""
self._mediabox = mediabox
self._host = host
self._name = name
self._available = available
self._state = None
def update(self):
"""Retrieve the state of the device."""
try:
if self._mediabox.test_connection():
if self._mediabox.turned_on():
if self._state != STATE_PAUSED:
self._state = STATE_PLAYING
else:
self._state = STATE_OFF
self._available = True
else:
self._available = False
except OSError:
_LOGGER.error("Couldn't fetch state from %s", self._host)
self._available = False
def send_keys(self, keys):
"""Send keys to the device and handle exceptions."""
try:
self._mediabox.send_keys(keys)
except OSError:
_LOGGER.error("Couldn't send keys to %s", self._host)
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def available(self):
"""Return True if the device is available."""
return self._available
@property
def source_list(self):
"""List of available sources (channels)."""
return [
self._mediabox.channels()[c]
for c in sorted(self._mediabox.channels().keys())
]
@property
def supported_features(self):
"""Flag media player features that are supported."""
return SUPPORT_ZIGGO
def turn_on(self):
"""Turn the media player on."""
self.send_keys(["POWER"])
def turn_off(self):
"""Turn off media player."""
self.send_keys(["POWER"])
def media_play(self):
"""Send play command."""
self.send_keys(["PLAY"])
self._state = STATE_PLAYING
def media_pause(self):
"""Send pause command."""
self.send_keys(["PAUSE"])
self._state = STATE_PAUSED
def media_play_pause(self):
"""Simulate play pause media player."""
self.send_keys(["PAUSE"])
if self._state == STATE_PAUSED:
self._state = STATE_PLAYING
else:
self._state = STATE_PAUSED
def media_next_track(self):
"""Channel up."""
self.send_keys(["CHAN_UP"])
self._state = STATE_PLAYING
def media_previous_track(self):
"""Channel down."""
self.send_keys(["CHAN_DOWN"])
self._state = STATE_PLAYING
def select_source(self, source):
"""Select the channel."""
if str(source).isdigit():
digits = str(source)
else:
digits = next(
(
key
for key, value in self._mediabox.channels().items()
if value == source
),
None,
)
if digits is None:
return
self.send_keys([f"NUM_{digit}" for digit in str(digits)])
self._state = STATE_PLAYING
|
import distutils.cmd
import distutils.log
import itertools
import os
import platform
import shutil
import sys
from setuptools import setup, find_packages, Extension
from setuptools.command.build_ext import build_ext
c_extensions = {
'gensim.models.word2vec_inner': 'gensim/models/word2vec_inner.c',
'gensim.corpora._mmreader': 'gensim/corpora/_mmreader.c',
'gensim.models.fasttext_inner': 'gensim/models/fasttext_inner.c',
'gensim._matutils': 'gensim/_matutils.c',
'gensim.models.nmf_pgd': 'gensim/models/nmf_pgd.c',
}
cpp_extensions = {
'gensim.models.doc2vec_inner': 'gensim/models/doc2vec_inner.cpp',
'gensim.models.word2vec_corpusfile': 'gensim/models/word2vec_corpusfile.cpp',
'gensim.models.fasttext_corpusfile': 'gensim/models/fasttext_corpusfile.cpp',
'gensim.models.doc2vec_corpusfile': 'gensim/models/doc2vec_corpusfile.cpp',
}
def need_cython():
"""Return True if we need Cython to translate any of the extensions.
If the extensions have already been translated to C/C++, then we don't need
to install Cython and perform the translation.
"""
expected = list(c_extensions.values()) + list(cpp_extensions.values())
return any([not os.path.isfile(f) for f in expected])
def make_c_ext(use_cython=False):
for module, source in c_extensions.items():
if use_cython:
source = source.replace('.c', '.pyx')
extra_args = []
# extra_args.extend(['-g', '-O0']) # uncomment if optimization limiting crash info
yield Extension(
module,
sources=[source],
language='c',
extra_compile_args=extra_args,
)
def make_cpp_ext(use_cython=False):
extra_args = []
system = platform.system()
if system == 'Linux':
extra_args.append('-std=c++11')
elif system == 'Darwin':
extra_args.extend(['-stdlib=libc++', '-std=c++11'])
# extra_args.extend(['-g', '-O0']) # uncomment if optimization limiting crash info
for module, source in cpp_extensions.items():
if use_cython:
source = source.replace('.cpp', '.pyx')
yield Extension(
module,
sources=[source],
language='c++',
extra_compile_args=extra_args,
extra_link_args=extra_args,
)
#
# We use use_cython=False here for two reasons:
#
# 1. Cython may not be available at this stage
# 2. The actual translation from Cython to C/C++ happens inside CustomBuildExt
#
ext_modules = list(itertools.chain(make_c_ext(use_cython=False), make_cpp_ext(use_cython=False)))
class CustomBuildExt(build_ext):
"""Custom build_ext action with bootstrapping.
We need this in order to use numpy and Cython in this script without
importing them at module level, because they may not be available yet.
"""
#
# http://stackoverflow.com/questions/19919905/how-to-bootstrap-numpy-installation-in-setup-py
#
def finalize_options(self):
build_ext.finalize_options(self)
# Prevent numpy from thinking it is still in its setup process:
# https://docs.python.org/2/library/__builtin__.html#module-__builtin__
__builtins__.__NUMPY_SETUP__ = False
import numpy
self.include_dirs.append(numpy.get_include())
if need_cython():
import Cython.Build
Cython.Build.cythonize(list(make_c_ext(use_cython=True)))
Cython.Build.cythonize(list(make_cpp_ext(use_cython=True)))
class CleanExt(distutils.cmd.Command):
description = 'Remove C sources, C++ sources and binaries for gensim extensions'
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
for root, dirs, files in os.walk('gensim'):
files = [
os.path.join(root, f)
for f in files
if os.path.splitext(f)[1] in ('.c', '.cpp', '.so')
]
for f in files:
self.announce('removing %s' % f, level=distutils.log.INFO)
os.unlink(f)
if os.path.isdir('build'):
self.announce('recursively removing build', level=distutils.log.INFO)
shutil.rmtree('build')
cmdclass = {'build_ext': CustomBuildExt, 'clean_ext': CleanExt}
WHEELHOUSE_UPLOADER_COMMANDS = {'fetch_artifacts', 'upload_all'}
if WHEELHOUSE_UPLOADER_COMMANDS.intersection(sys.argv):
import wheelhouse_uploader.cmd
cmdclass.update(vars(wheelhouse_uploader.cmd))
LONG_DESCRIPTION = u"""
==============================================
gensim -- Topic Modelling in Python
==============================================
|Travis|_
|Wheel|_
.. |Travis| image:: https://img.shields.io/travis/RaRe-Technologies/gensim/develop.svg
.. |Wheel| image:: https://img.shields.io/pypi/wheel/gensim.svg
.. _Travis: https://travis-ci.org/RaRe-Technologies/gensim
.. _Downloads: https://pypi.python.org/pypi/gensim
.. _License: http://radimrehurek.com/gensim/about.html
.. _Wheel: https://pypi.python.org/pypi/gensim
Gensim is a Python library for *topic modelling*, *document indexing* and *similarity retrieval* with large corpora.
Target audience is the *natural language processing* (NLP) and *information retrieval* (IR) community.
Features
---------
* All algorithms are **memory-independent** w.r.t. the corpus size (can process input larger than RAM, streamed, out-of-core)
* **Intuitive interfaces**
* easy to plug in your own input corpus/datastream (simple streaming API)
* easy to extend with other Vector Space algorithms (simple transformation API)
* Efficient multicore implementations of popular algorithms, such as online **Latent Semantic Analysis (LSA/LSI/SVD)**,
**Latent Dirichlet Allocation (LDA)**, **Random Projections (RP)**, **Hierarchical Dirichlet Process (HDP)** or **word2vec deep learning**.
* **Distributed computing**: can run *Latent Semantic Analysis* and *Latent Dirichlet Allocation* on a cluster of computers.
* Extensive `documentation and Jupyter Notebook tutorials <https://github.com/RaRe-Technologies/gensim/#documentation>`_.
If this feature list left you scratching your head, you can first read more about the `Vector
Space Model <http://en.wikipedia.org/wiki/Vector_space_model>`_ and `unsupervised
document analysis <http://en.wikipedia.org/wiki/Latent_semantic_indexing>`_ on Wikipedia.
Installation
------------
This software depends on `NumPy and Scipy <http://www.scipy.org/Download>`_, two Python packages for scientific computing.
You must have them installed prior to installing `gensim`.
It is also recommended you install a fast BLAS library before installing NumPy. This is optional, but using an optimized BLAS such as `ATLAS <http://math-atlas.sourceforge.net/>`_ or `OpenBLAS <http://xianyi.github.io/OpenBLAS/>`_ is known to improve performance by as much as an order of magnitude. On OS X, NumPy picks up the BLAS that comes with it automatically, so you don't need to do anything special.
Install the latest version of gensim::
pip install --upgrade gensim
Or, if you have instead downloaded and unzipped the `source tar.gz <http://pypi.python.org/pypi/gensim>`_ package::
python setup.py install
For alternative modes of installation, see the `documentation <http://radimrehurek.com/gensim/install.html>`_.
Gensim is being `continuously tested <https://travis-ci.org/RaRe-Technologies/gensim>`_ under Python 3.6, 3.7 and 3.8.
Support for Python 2.7 was dropped in gensim 4.0.0 – install gensim 3.8.3 if you must use Python 2.7.
How come gensim is so fast and memory efficient? Isn't it pure Python, and isn't Python slow and greedy?
--------------------------------------------------------------------------------------------------------
Many scientific algorithms can be expressed in terms of large matrix operations (see the BLAS note above). Gensim taps into these low-level BLAS libraries, by means of its dependency on NumPy. So while gensim-the-top-level-code is pure Python, it actually executes highly optimized Fortran/C under the hood, including multithreading (if your BLAS is so configured).
Memory-wise, gensim makes heavy use of Python's built-in generators and iterators for streamed data processing. Memory efficiency was one of gensim's `design goals <http://radimrehurek.com/gensim/about.html>`_, and is a central feature of gensim, rather than something bolted on as an afterthought.
Documentation
-------------
* `QuickStart`_
* `Tutorials`_
* `Tutorial Videos`_
* `Official Documentation and Walkthrough`_
Citing gensim
-------------
When `citing gensim in academic papers and theses <https://scholar.google.cz/citations?view_op=view_citation&hl=en&user=9vG_kV0AAAAJ&citation_for_view=9vG_kV0AAAAJ:u-x6o8ySG0sC>`_, please use this BibTeX entry::
@inproceedings{rehurek_lrec,
title = {{Software Framework for Topic Modelling with Large Corpora}},
author = {Radim {\\v R}eh{\\r u}{\\v r}ek and Petr Sojka},
booktitle = {{Proceedings of the LREC 2010 Workshop on New
Challenges for NLP Frameworks}},
pages = {45--50},
year = 2010,
month = May,
day = 22,
publisher = {ELRA},
address = {Valletta, Malta},
language={English}
}
----------------
Gensim is open source software released under the `GNU LGPLv2.1 license <http://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html>`_.
Copyright (c) 2009-now Radim Rehurek
|Analytics|_
.. |Analytics| image:: https://ga-beacon.appspot.com/UA-24066335-5/your-repo/page-name
.. _Analytics: https://github.com/igrigorik/ga-beacon
.. _Official Documentation and Walkthrough: http://radimrehurek.com/gensim/
.. _Tutorials: https://github.com/RaRe-Technologies/gensim/blob/develop/tutorials.md#tutorials
.. _Tutorial Videos: https://github.com/RaRe-Technologies/gensim/blob/develop/tutorials.md#videos
.. _QuickStart: https://radimrehurek.com/gensim/gensim_numfocus/auto_examples/core/run_core_concepts.html
"""
distributed_env = ['Pyro4 >= 4.27']
visdom_req = ['visdom >= 0.1.8, != 0.1.8.7']
# packages included for build-testing everywhere
core_testenv = [
'pytest',
# 'pytest-rerunfailures', # disabled 2020-08-28 for <https://github.com/pytest-dev/pytest-rerunfailures/issues/128>
'mock',
'cython',
'nmslib',
'pyemd',
'testfixtures',
'Morfessor==2.0.2a4',
'python-Levenshtein >= 0.10.2',
'scikit-learn',
]
# Add additional requirements for testing on Linux that are skipped on Windows.
linux_testenv = core_testenv[:] + visdom_req + ['pyemd', ]
# Skip problematic/uninstallable packages (& thus related conditional tests) in Windows builds.
# We still test them in Linux via Travis, see linux_testenv above.
# See https://github.com/RaRe-Technologies/gensim/pull/2814
win_testenv = core_testenv[:]
#
# This list partially duplicates requirements_docs.txt.
# The main difference is that we don't include version pins here unless
# absolutely necessary, whereas requirements_docs.txt includes pins for
# everything, by design.
#
# For more info about the difference between the two:
#
# https://packaging.python.org/discussions/install-requires-vs-requirements/
#
docs_testenv = core_testenv + distributed_env + visdom_req + [
'sphinx <= 2.4.4', # avoid `sphinx >= 3.0` that breaks the build
'sphinx-gallery',
'sphinxcontrib.programoutput',
'sphinxcontrib-napoleon',
'matplotlib', # expected by sphinx-gallery
'plotly',
#
# Pattern is a PITA to install, it requires mysqlclient, which in turn
# requires MySQL dev tools be installed. We don't need it for building
# documentation.
#
# 'Pattern==3.6', # Need 3.6 or later for Py3 support
'memory_profiler',
'annoy',
'Pyro4',
'scikit-learn',
'nltk',
'testfixtures',
'statsmodels',
'pyemd',
'pandas',
]
NUMPY_STR = 'numpy >= 1.11.3'
#
# We pin the Cython version for reproducibility. We expect our extensions
# to build with any sane version of Cython, so we should update this pin
# periodically.
#
CYTHON_STR = 'Cython==0.29.14'
install_requires = [
NUMPY_STR,
'scipy >= 0.18.1',
'smart_open >= 1.8.1',
"dataclasses; python_version < '3.7'", # pre-py3.7 needs `dataclasses` backport for use of `dataclass` in doc2vec.py
]
setup_requires = [NUMPY_STR]
if need_cython():
install_requires.append(CYTHON_STR)
setup_requires.append(CYTHON_STR)
setup(
name='gensim',
version='4.0.0beta',
description='Python framework for fast Vector Space Modelling',
long_description=LONG_DESCRIPTION,
ext_modules=ext_modules,
cmdclass=cmdclass,
packages=find_packages(),
author=u'Radim Rehurek',
author_email='[email protected]',
url='http://radimrehurek.com/gensim',
download_url='http://pypi.python.org/pypi/gensim',
license='LGPL-2.1-only',
keywords='Singular Value Decomposition, SVD, Latent Semantic Indexing, '
'LSA, LSI, Latent Dirichlet Allocation, LDA, '
'Hierarchical Dirichlet Process, HDP, Random Projections, '
'TFIDF, word2vec',
platforms='any',
zip_safe=False,
classifiers=[ # from http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Science/Research',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: Text Processing :: Linguistic',
],
test_suite="gensim.test",
python_requires='>=3.6',
setup_requires=setup_requires,
install_requires=install_requires,
tests_require=linux_testenv,
extras_require={
'distributed': distributed_env,
'test-win': win_testenv,
'test': linux_testenv,
'docs': docs_testenv,
},
include_package_data=True,
)
|
from mill import Mill
import voluptuous as vol
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
FAN_ON,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
SUPPORT_FAN_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.const import (
ATTR_TEMPERATURE,
CONF_PASSWORD,
CONF_USERNAME,
TEMP_CELSIUS,
)
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import (
ATTR_AWAY_TEMP,
ATTR_COMFORT_TEMP,
ATTR_ROOM_NAME,
ATTR_SLEEP_TEMP,
DOMAIN,
MANUFACTURER,
MAX_TEMP,
MIN_TEMP,
SERVICE_SET_ROOM_TEMP,
)
SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE | SUPPORT_FAN_MODE
SET_ROOM_TEMP_SCHEMA = vol.Schema(
{
vol.Required(ATTR_ROOM_NAME): cv.string,
vol.Optional(ATTR_AWAY_TEMP): cv.positive_int,
vol.Optional(ATTR_COMFORT_TEMP): cv.positive_int,
vol.Optional(ATTR_SLEEP_TEMP): cv.positive_int,
}
)
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up the Mill climate."""
mill_data_connection = Mill(
entry.data[CONF_USERNAME],
entry.data[CONF_PASSWORD],
websession=async_get_clientsession(hass),
)
if not await mill_data_connection.connect():
raise ConfigEntryNotReady
await mill_data_connection.find_all_heaters()
dev = []
for heater in mill_data_connection.heaters.values():
dev.append(MillHeater(heater, mill_data_connection))
async_add_entities(dev)
async def set_room_temp(service):
"""Set room temp."""
room_name = service.data.get(ATTR_ROOM_NAME)
sleep_temp = service.data.get(ATTR_SLEEP_TEMP)
comfort_temp = service.data.get(ATTR_COMFORT_TEMP)
away_temp = service.data.get(ATTR_AWAY_TEMP)
await mill_data_connection.set_room_temperatures_by_name(
room_name, sleep_temp, comfort_temp, away_temp
)
hass.services.async_register(
DOMAIN, SERVICE_SET_ROOM_TEMP, set_room_temp, schema=SET_ROOM_TEMP_SCHEMA
)
class MillHeater(ClimateEntity):
"""Representation of a Mill Thermostat device."""
def __init__(self, heater, mill_data_connection):
"""Initialize the thermostat."""
self._heater = heater
self._conn = mill_data_connection
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_FLAGS
@property
def available(self):
"""Return True if entity is available."""
return self._heater.available
@property
def unique_id(self):
"""Return a unique ID."""
return self._heater.device_id
@property
def name(self):
"""Return the name of the entity."""
return self._heater.name
@property
def device_state_attributes(self):
"""Return the state attributes."""
res = {
"open_window": self._heater.open_window,
"heating": self._heater.is_heating,
"controlled_by_tibber": self._heater.tibber_control,
"heater_generation": 1 if self._heater.is_gen1 else 2,
}
if self._heater.room:
res["room"] = self._heater.room.name
res["avg_room_temp"] = self._heater.room.avg_temp
else:
res["room"] = "Independent device"
return res
@property
def temperature_unit(self):
"""Return the unit of measurement which this thermostat uses."""
return TEMP_CELSIUS
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self._heater.set_temp
@property
def target_temperature_step(self):
"""Return the supported step of target temperature."""
return 1
@property
def current_temperature(self):
"""Return the current temperature."""
return self._heater.current_temp
@property
def fan_mode(self):
"""Return the fan setting."""
return FAN_ON if self._heater.fan_status == 1 else HVAC_MODE_OFF
@property
def fan_modes(self):
"""List of available fan modes."""
return [FAN_ON, HVAC_MODE_OFF]
@property
def min_temp(self):
"""Return the minimum temperature."""
return MIN_TEMP
@property
def max_temp(self):
"""Return the maximum temperature."""
return MAX_TEMP
@property
def hvac_action(self):
"""Return current hvac i.e. heat, cool, idle."""
if self._heater.is_gen1 or self._heater.is_heating == 1:
return CURRENT_HVAC_HEAT
return CURRENT_HVAC_IDLE
@property
def hvac_mode(self) -> str:
"""Return hvac operation ie. heat, cool mode.
Need to be one of HVAC_MODE_*.
"""
if self._heater.is_gen1 or self._heater.power_status == 1:
return HVAC_MODE_HEAT
return HVAC_MODE_OFF
@property
def hvac_modes(self):
"""Return the list of available hvac operation modes.
Need to be a subset of HVAC_MODES.
"""
if self._heater.is_gen1:
return [HVAC_MODE_HEAT]
return [HVAC_MODE_HEAT, HVAC_MODE_OFF]
async def async_set_temperature(self, **kwargs):
"""Set new target temperature."""
temperature = kwargs.get(ATTR_TEMPERATURE)
if temperature is None:
return
await self._conn.set_heater_temp(self._heater.device_id, int(temperature))
async def async_set_fan_mode(self, fan_mode):
"""Set new target fan mode."""
fan_status = 1 if fan_mode == FAN_ON else 0
await self._conn.heater_control(self._heater.device_id, fan_status=fan_status)
async def async_set_hvac_mode(self, hvac_mode):
"""Set new target hvac mode."""
if hvac_mode == HVAC_MODE_HEAT:
await self._conn.heater_control(self._heater.device_id, power_status=1)
elif hvac_mode == HVAC_MODE_OFF and not self._heater.is_gen1:
await self._conn.heater_control(self._heater.device_id, power_status=0)
async def async_update(self):
"""Retrieve latest state."""
self._heater = await self._conn.update_device(self._heater.device_id)
@property
def device_id(self):
"""Return the ID of the physical device this sensor is part of."""
return self._heater.device_id
@property
def device_info(self):
"""Return the device_info of the device."""
device_info = {
"identifiers": {(DOMAIN, self.device_id)},
"name": self.name,
"manufacturer": MANUFACTURER,
"model": f"generation {1 if self._heater.is_gen1 else 2}",
}
return device_info
|
from gi.repository import GObject, Gtk, GtkSource
from meld.conf import _
# TODO: Current pygobject support for templates excludes subclassing of
# templated classes, which is why we have two near-identical UI files
# here, and why we can't subclass Gtk.Grid directly in
# FilteredListSelector.
class FilteredListSelector:
# FilteredListSelector was initially based on gedit's
# GeditHighlightModeSelector
# Copyright (C) 2013 - Ignacio Casal Quinteiro
# Python translation and adaptations
# Copyright (C) 2015, 2017 Kai Willadsen <[email protected]>
__gtype_name__ = 'FilteredListSelector'
NAME_COLUMN, VALUE_COLUMN = 0, 1
def __init__(self):
super().__init__()
self.treeview_selection = self.treeview.get_selection()
# FIXME: Should be able to access as a template child, but can't.
self.listfilter = self.treeview.get_model()
self.liststore = self.listfilter.get_model()
self.populate_model()
self.filter_string = ''
self.entry.connect('changed', self.on_entry_changed)
self.listfilter.set_visible_func(self.name_filter)
self.entry.connect('activate', self.on_activate)
self.treeview.connect('row-activated', self.on_activate)
def populate_model(self):
raise NotImplementedError
def select_value(self, value):
if not value:
return
new_value_getter = getattr(value, self.value_accessor)
for row in self.liststore:
row_value = row[self.VALUE_COLUMN]
if not row_value:
continue
old_value_getter = getattr(row_value, self.value_accessor)
if old_value_getter() != new_value_getter():
continue
self.treeview_selection.select_path(row.path)
self.treeview.scroll_to_cell(row.path, None, True, 0.5, 0)
def name_filter(self, model, it, *args):
if not self.filter_string:
return True
name = model.get_value(it, self.NAME_COLUMN).lower()
return self.filter_string.lower() in name
def on_entry_changed(self, entry):
self.filter_string = entry.get_text()
self.listfilter.refilter()
first = self.listfilter.get_iter_first()
if first:
self.treeview_selection.select_iter(first)
def on_activate(self, *args):
model, it = self.treeview_selection.get_selected()
if not it:
return
value = model.get_value(it, self.VALUE_COLUMN)
self.emit(self.change_signal_name, value)
# The subclassing here is weird; the Selector must directly subclass
# Gtk.Grid; we can't do this on the FilteredListSelector. Likewise, the
# Gtk.Template.Child attributes must be per-class, because of how
# they're registered by the templating engine.
@Gtk.Template(resource_path='/org/gnome/meld/ui/encoding-selector.ui')
class EncodingSelector(FilteredListSelector, Gtk.Grid):
# The subclassing here is weird; the Selector must directly
# subclass Gtk.Grid, or the template building explodes.
__gtype_name__ = 'EncodingSelector'
__gsignals__ = {
'encoding-selected': (
GObject.SignalFlags.RUN_FIRST | GObject.SignalFlags.ACTION,
None, (GtkSource.Encoding,)),
}
# These exist solely to make subclassing easier.
value_accessor = 'get_charset'
change_signal_name = 'encoding-selected'
entry = Gtk.Template.Child('entry')
treeview = Gtk.Template.Child('treeview')
def populate_model(self):
for enc in GtkSource.Encoding.get_all():
self.liststore.append((self.get_value_label(enc), enc))
def get_value_label(self, enc):
return _('{name} ({charset})').format(
name=enc.get_name(), charset=enc.get_charset())
# SourceLangSelector was initially based on gedit's
# GeditHighlightModeSelector
# Copyright (C) 2013 - Ignacio Casal Quinteiro
# Python translation and adaptations
# Copyright (C) 2015, 2017 Kai Willadsen <[email protected]>
@Gtk.Template(resource_path='/org/gnome/meld/ui/language-selector.ui')
class SourceLangSelector(FilteredListSelector, Gtk.Grid):
__gtype_name__ = "SourceLangSelector"
__gsignals__ = {
'language-selected': (
GObject.SignalFlags.RUN_FIRST | GObject.SignalFlags.ACTION,
None, (GtkSource.Language,)),
}
# These exist solely to make subclassing easier.
value_accessor = 'get_id'
change_signal_name = 'language-selected'
entry = Gtk.Template.Child('entry')
treeview = Gtk.Template.Child('treeview')
def populate_model(self):
self.liststore.append((_("Plain Text"), None))
manager = GtkSource.LanguageManager.get_default()
for lang_id in manager.get_language_ids():
lang = manager.get_language(lang_id)
self.liststore.append((lang.get_name(), lang))
def get_value_label(self, lang):
if not lang:
return _("Plain Text")
return lang.get_name()
|
import numpy as np
import matplotlib.pyplot as plt
from scipy.io import loadmat
from os.path import join
import mne
from mne.decoding import ReceptiveField
from sklearn.model_selection import KFold
from sklearn.preprocessing import scale
###############################################################################
# Load the data from the publication
# ----------------------------------
#
# First we will load the data collected in :footcite:`CrosseEtAl2016`.
# In this experiment subjects
# listened to natural speech. Raw EEG and the speech stimulus are provided.
# We will load these below, downsampling the data in order to speed up
# computation since we know that our features are primarily low-frequency in
# nature. Then we'll visualize both the EEG and speech envelope.
path = mne.datasets.mtrf.data_path()
decim = 2
data = loadmat(join(path, 'speech_data.mat'))
raw = data['EEG'].T
speech = data['envelope'].T
sfreq = float(data['Fs'])
sfreq /= decim
speech = mne.filter.resample(speech, down=decim, npad='auto')
raw = mne.filter.resample(raw, down=decim, npad='auto')
# Read in channel positions and create our MNE objects from the raw data
montage = mne.channels.make_standard_montage('biosemi128')
info = mne.create_info(montage.ch_names, sfreq, 'eeg').set_montage(montage)
raw = mne.io.RawArray(raw, info)
n_channels = len(raw.ch_names)
# Plot a sample of brain and stimulus activity
fig, ax = plt.subplots()
lns = ax.plot(scale(raw[:, :800][0].T), color='k', alpha=.1)
ln1 = ax.plot(scale(speech[0, :800]), color='r', lw=2)
ax.legend([lns[0], ln1[0]], ['EEG', 'Speech Envelope'], frameon=False)
ax.set(title="Sample activity", xlabel="Time (s)")
mne.viz.tight_layout()
###############################################################################
# Create and fit a receptive field model
# --------------------------------------
#
# We will construct an encoding model to find the linear relationship between
# a time-delayed version of the speech envelope and the EEG signal. This allows
# us to make predictions about the response to new stimuli.
# Define the delays that we will use in the receptive field
tmin, tmax = -.2, .4
# Initialize the model
rf = ReceptiveField(tmin, tmax, sfreq, feature_names=['envelope'],
estimator=1., scoring='corrcoef')
# We'll have (tmax - tmin) * sfreq delays
# and an extra 2 delays since we are inclusive on the beginning / end index
n_delays = int((tmax - tmin) * sfreq) + 2
n_splits = 3
cv = KFold(n_splits)
# Prepare model data (make time the first dimension)
speech = speech.T
Y, _ = raw[:] # Outputs for the model
Y = Y.T
# Iterate through splits, fit the model, and predict/test on held-out data
coefs = np.zeros((n_splits, n_channels, n_delays))
scores = np.zeros((n_splits, n_channels))
for ii, (train, test) in enumerate(cv.split(speech)):
print('split %s / %s' % (ii + 1, n_splits))
rf.fit(speech[train], Y[train])
scores[ii] = rf.score(speech[test], Y[test])
# coef_ is shape (n_outputs, n_features, n_delays). we only have 1 feature
coefs[ii] = rf.coef_[:, 0, :]
times = rf.delays_ / float(rf.sfreq)
# Average scores and coefficients across CV splits
mean_coefs = coefs.mean(axis=0)
mean_scores = scores.mean(axis=0)
# Plot mean prediction scores across all channels
fig, ax = plt.subplots()
ix_chs = np.arange(n_channels)
ax.plot(ix_chs, mean_scores)
ax.axhline(0, ls='--', color='r')
ax.set(title="Mean prediction score", xlabel="Channel", ylabel="Score ($r$)")
mne.viz.tight_layout()
###############################################################################
# Investigate model coefficients
# ==============================
# Finally, we will look at how the linear coefficients (sometimes
# referred to as beta values) are distributed across time delays as well as
# across the scalp. We will recreate `figure 1`_ and `figure 2`_ from
# :footcite:`CrosseEtAl2016`.
# Print mean coefficients across all time delays / channels (see Fig 1)
time_plot = 0.180 # For highlighting a specific time.
fig, ax = plt.subplots(figsize=(4, 8))
max_coef = mean_coefs.max()
ax.pcolormesh(times, ix_chs, mean_coefs, cmap='RdBu_r',
vmin=-max_coef, vmax=max_coef, shading='gouraud')
ax.axvline(time_plot, ls='--', color='k', lw=2)
ax.set(xlabel='Delay (s)', ylabel='Channel', title="Mean Model\nCoefficients",
xlim=times[[0, -1]], ylim=[len(ix_chs) - 1, 0],
xticks=np.arange(tmin, tmax + .2, .2))
plt.setp(ax.get_xticklabels(), rotation=45)
mne.viz.tight_layout()
# Make a topographic map of coefficients for a given delay (see Fig 2C)
ix_plot = np.argmin(np.abs(time_plot - times))
fig, ax = plt.subplots()
mne.viz.plot_topomap(mean_coefs[:, ix_plot], pos=info, axes=ax, show=False,
vmin=-max_coef, vmax=max_coef)
ax.set(title="Topomap of model coefficients\nfor delay %s" % time_plot)
mne.viz.tight_layout()
###############################################################################
# Create and fit a stimulus reconstruction model
# ----------------------------------------------
#
# We will now demonstrate another use case for the for the
# :class:`mne.decoding.ReceptiveField` class as we try to predict the stimulus
# activity from the EEG data. This is known in the literature as a decoding, or
# stimulus reconstruction model :footcite:`CrosseEtAl2016`.
# A decoding model aims to find the
# relationship between the speech signal and a time-delayed version of the EEG.
# This can be useful as we exploit all of the available neural data in a
# multivariate context, compared to the encoding case which treats each M/EEG
# channel as an independent feature. Therefore, decoding models might provide a
# better quality of fit (at the expense of not controlling for stimulus
# covariance), especially for low SNR stimuli such as speech.
# We use the same lags as in :footcite:`CrosseEtAl2016`. Negative lags now
# index the relationship
# between the neural response and the speech envelope earlier in time, whereas
# positive lags would index how a unit change in the amplitude of the EEG would
# affect later stimulus activity (obviously this should have an amplitude of
# zero).
tmin, tmax = -.2, 0.
# Initialize the model. Here the features are the EEG data. We also specify
# ``patterns=True`` to compute inverse-transformed coefficients during model
# fitting (cf. next section and :footcite:`HaufeEtAl2014`).
# We'll use a ridge regression estimator with an alpha value similar to
# Crosse et al.
sr = ReceptiveField(tmin, tmax, sfreq, feature_names=raw.ch_names,
estimator=1e4, scoring='corrcoef', patterns=True)
# We'll have (tmax - tmin) * sfreq delays
# and an extra 2 delays since we are inclusive on the beginning / end index
n_delays = int((tmax - tmin) * sfreq) + 2
n_splits = 3
cv = KFold(n_splits)
# Iterate through splits, fit the model, and predict/test on held-out data
coefs = np.zeros((n_splits, n_channels, n_delays))
patterns = coefs.copy()
scores = np.zeros((n_splits,))
for ii, (train, test) in enumerate(cv.split(speech)):
print('split %s / %s' % (ii + 1, n_splits))
sr.fit(Y[train], speech[train])
scores[ii] = sr.score(Y[test], speech[test])[0]
# coef_ is shape (n_outputs, n_features, n_delays). We have 128 features
coefs[ii] = sr.coef_[0, :, :]
patterns[ii] = sr.patterns_[0, :, :]
times = sr.delays_ / float(sr.sfreq)
# Average scores and coefficients across CV splits
mean_coefs = coefs.mean(axis=0)
mean_patterns = patterns.mean(axis=0)
mean_scores = scores.mean(axis=0)
max_coef = np.abs(mean_coefs).max()
max_patterns = np.abs(mean_patterns).max()
###############################################################################
# Visualize stimulus reconstruction
# =================================
#
# To get a sense of our model performance, we can plot the actual and predicted
# stimulus envelopes side by side.
y_pred = sr.predict(Y[test])
time = np.linspace(0, 2., 5 * int(sfreq))
fig, ax = plt.subplots(figsize=(8, 4))
ax.plot(time, speech[test][sr.valid_samples_][:int(5 * sfreq)],
color='grey', lw=2, ls='--')
ax.plot(time, y_pred[sr.valid_samples_][:int(5 * sfreq)], color='r', lw=2)
ax.legend([lns[0], ln1[0]], ['Envelope', 'Reconstruction'], frameon=False)
ax.set(title="Stimulus reconstruction")
ax.set_xlabel('Time (s)')
mne.viz.tight_layout()
###############################################################################
# Investigate model coefficients
# ==============================
#
# Finally, we will look at how the decoding model coefficients are distributed
# across the scalp. We will attempt to recreate `figure 5`_ from
# :footcite:`CrosseEtAl2016`. The
# decoding model weights reflect the channels that contribute most toward
# reconstructing the stimulus signal, but are not directly interpretable in a
# neurophysiological sense. Here we also look at the coefficients obtained
# via an inversion procedure :footcite:`HaufeEtAl2014`, which have a more
# straightforward
# interpretation as their value (and sign) directly relates to the stimulus
# signal's strength (and effect direction).
time_plot = (-.140, -.125) # To average between two timepoints.
ix_plot = np.arange(np.argmin(np.abs(time_plot[0] - times)),
np.argmin(np.abs(time_plot[1] - times)))
fig, ax = plt.subplots(1, 2)
mne.viz.plot_topomap(np.mean(mean_coefs[:, ix_plot], axis=1),
pos=info, axes=ax[0], show=False,
vmin=-max_coef, vmax=max_coef)
ax[0].set(title="Model coefficients\nbetween delays %s and %s"
% (time_plot[0], time_plot[1]))
mne.viz.plot_topomap(np.mean(mean_patterns[:, ix_plot], axis=1),
pos=info, axes=ax[1],
show=False, vmin=-max_patterns, vmax=max_patterns)
ax[1].set(title="Inverse-transformed coefficients\nbetween delays %s and %s"
% (time_plot[0], time_plot[1]))
mne.viz.tight_layout()
###############################################################################
# References
# ----------
#
# .. footbibliography::
|
from weblate.fonts.models import FONT_STORAGE
from weblate.fonts.tasks import cleanup_font_files
from weblate.fonts.tests.utils import FontTestCase
from weblate.fonts.utils import configure_fontconfig
class FontModelTest(FontTestCase):
def test_save(self):
font = self.add_font()
self.assertEqual(font.family, "Droid Sans Fallback")
self.assertEqual(font.style, "Regular")
def test_cleanup(self):
configure_fontconfig()
cleanup_font_files()
# There should always be fonts.conf present
self.assertEqual(len(FONT_STORAGE.listdir(".")[1]), 1)
font = self.add_font()
self.assertEqual(len(FONT_STORAGE.listdir(".")[1]), 2)
cleanup_font_files()
self.assertEqual(len(FONT_STORAGE.listdir(".")[1]), 2)
font.delete()
self.assertEqual(len(FONT_STORAGE.listdir(".")[1]), 2)
cleanup_font_files()
self.assertEqual(len(FONT_STORAGE.listdir(".")[1]), 1)
|
import os.path
from datetime import timedelta
from appconf import AppConf
from django.conf import settings
from django.core.exceptions import ValidationError
from django.db import models
from django.db.models import Prefetch, Q
from django.db.models.signals import m2m_changed, post_save
from django.dispatch import receiver
from django.urls import reverse
from django.utils import timezone
from django.utils.functional import cached_property
from django.utils.html import escape
from django.utils.safestring import mark_safe
from django.utils.translation import gettext_lazy as _
from django.utils.translation import ngettext
from weblate.auth.models import User
from weblate.trans.models import Component, Project
from weblate.utils.decorators import disable_for_loaddata
from weblate.utils.fields import JSONField
from weblate.utils.stats import prefetch_stats
class LibreCheck:
def __init__(self, result, message, component=None):
self.result = result
self.message = message
self.component = component
def __bool__(self):
return self.result
def __str__(self):
return self.message
class PlanQuerySet(models.QuerySet):
def public(self, user=None):
"""List of public paid plans which are available."""
base = self.exclude(Q(price=0) & Q(yearly_price=0))
result = base.filter(public=True)
if user:
result |= base.filter(
public=False, billing__in=Billing.objects.for_user(user)
)
return result.distinct().order_by("price")
class Plan(models.Model):
name = models.CharField(max_length=100, unique=True)
slug = models.SlugField(max_length=100, unique=True)
price = models.IntegerField(default=0)
yearly_price = models.IntegerField(default=0)
limit_strings = models.IntegerField(default=0)
display_limit_strings = models.IntegerField(default=0)
limit_languages = models.IntegerField(default=0)
display_limit_languages = models.IntegerField(default=0)
limit_projects = models.IntegerField(default=0)
display_limit_projects = models.IntegerField(default=0)
change_access_control = models.BooleanField(default=True)
public = models.BooleanField(default=False)
objects = PlanQuerySet.as_manager()
def __str__(self):
return self.name
@property
def vat_price(self):
return round(self.price * settings.VAT_RATE, 2)
@property
def vat_yearly_price(self):
return round(self.yearly_price * settings.VAT_RATE, 2)
@property
def is_free(self):
return self.price == 0 and self.yearly_price == 0
class BillingManager(models.Manager):
def check_limits(self):
for bill in self.iterator():
bill.check_limits()
class BillingQuerySet(models.QuerySet):
def get_out_of_limits(self):
return self.filter(in_limits=False)
def get_unpaid(self):
return self.filter(paid=False, state=Billing.STATE_ACTIVE)
def get_valid(self):
return self.filter(
Q(in_limits=True)
& (
(Q(state=Billing.STATE_ACTIVE) & Q(paid=True))
| Q(state=Billing.STATE_TRIAL)
)
)
def for_user(self, user):
if user.is_superuser:
return self.all().order_by("state")
return (
self.filter(
Q(projects__in=user.projects_with_perm("billing.view")) | Q(owners=user)
)
.distinct()
.order_by("state")
)
def prefetch(self):
return self.prefetch_related(
"owners",
"owners__profile",
"plan",
Prefetch(
"projects",
queryset=Project.objects.order(),
to_attr="ordered_projects",
),
)
class Billing(models.Model):
STATE_ACTIVE = 0
STATE_TRIAL = 1
STATE_TERMINATED = 3
EXPIRING_STATES = (STATE_TRIAL,)
plan = models.ForeignKey(
Plan, on_delete=models.deletion.CASCADE, verbose_name=_("Billing plan")
)
projects = models.ManyToManyField(
Project, blank=True, verbose_name=_("Billed projects")
)
owners = models.ManyToManyField(User, blank=True, verbose_name=_("Billing owners"))
state = models.IntegerField(
choices=(
(STATE_ACTIVE, _("Active")),
(STATE_TRIAL, _("Trial")),
(STATE_TERMINATED, _("Terminated")),
),
default=STATE_ACTIVE,
verbose_name=_("Billing state"),
)
expiry = models.DateTimeField(
blank=True,
null=True,
default=None,
verbose_name=_("Trial expiry date"),
help_text="After expiry removal with 15 days grace period is scheduled.",
)
removal = models.DateTimeField(
blank=True,
null=True,
default=None,
verbose_name=_("Scheduled removal"),
help_text="This is automatically set after trial expiry.",
)
paid = models.BooleanField(default=True, verbose_name=_("Paid"), editable=False)
# Translators: Whether the package is inside actual (hard) limits
in_limits = models.BooleanField(
default=True, verbose_name=_("In limits"), editable=False
)
# Payment detailed information, used for integration
# with payment processor
payment = JSONField(editable=False, default={})
objects = BillingManager.from_queryset(BillingQuerySet)()
def __str__(self):
projects = self.projects_display
owners = self.owners.order()
if projects:
base = projects
elif owners:
base = ", ".join(x.get_author_name(False) for x in owners)
else:
base = "Unassigned"
trial = ", trial" if self.is_trial else ""
return f"{base} ({self.plan}{trial})"
def save(
self,
force_insert=False,
force_update=False,
using=None,
update_fields=None,
skip_limits=False,
):
if not skip_limits and self.pk:
if self.check_limits(save=False) and update_fields:
update_fields = set(update_fields)
update_fields.update(
("state", "expiry", "removal", "paid", "in_limits")
)
super().save(
force_insert=force_insert,
force_update=force_update,
using=using,
update_fields=update_fields,
)
def get_absolute_url(self):
return reverse("billing-detail", kwargs={"pk": self.pk})
@cached_property
def ordered_projects(self):
return self.projects.order()
@cached_property
def all_projects(self):
return prefetch_stats(self.ordered_projects)
@cached_property
def projects_display(self):
return ", ".join(str(x) for x in self.all_projects)
@property
def is_trial(self):
return self.state == Billing.STATE_TRIAL
@property
def is_terminated(self):
return self.state == Billing.STATE_TERMINATED
@property
def is_libre_trial(self):
return self.is_trial and self.plan.price == 0
@cached_property
def can_be_paid(self):
if self.state in (Billing.STATE_ACTIVE, Billing.STATE_TRIAL):
return True
return self.count_projects > 0
@cached_property
def monthly_changes(self):
return sum(project.stats.monthly_changes for project in self.all_projects)
monthly_changes.short_description = _("Changes in last month")
@cached_property
def total_changes(self):
return sum(project.stats.total_changes for project in self.all_projects)
total_changes.short_description = _("Number of changes")
@cached_property
def count_projects(self):
return len(self.all_projects)
def display_projects(self):
return f"{self.count_projects} / {self.plan.display_limit_projects}"
display_projects.short_description = _("Projects")
@cached_property
def count_strings(self):
return sum(p.stats.source_strings for p in self.all_projects)
def display_strings(self):
return f"{self.count_strings} / {self.plan.display_limit_strings}"
display_strings.short_description = _("Source strings")
@cached_property
def count_words(self):
return sum(p.stats.source_words for p in self.all_projects)
@cached_property
def hosted_words(self):
return sum(p.stats.all_words for p in self.all_projects)
def display_words(self):
return f"{self.count_words}"
display_words.short_description = _("Source words")
@cached_property
def count_languages(self):
if not self.all_projects:
return 0
return max(p.stats.languages for p in self.all_projects)
def display_languages(self):
return f"{self.count_languages} / {self.plan.display_limit_languages}"
display_languages.short_description = _("Languages")
def flush_cache(self):
keys = list(self.__dict__.keys())
for key in keys:
if key.startswith("count_"):
del self.__dict__[key]
def check_in_limits(self, plan=None):
if plan is None:
plan = self.plan
return (
(plan.limit_projects == 0 or self.count_projects <= plan.limit_projects)
and (plan.limit_strings == 0 or self.count_strings <= plan.limit_strings)
and (
plan.limit_languages == 0
or self.count_languages <= plan.limit_languages
)
)
def check_expiry(self):
return (
self.state in Billing.EXPIRING_STATES
and self.expiry
and self.expiry < timezone.now()
)
def unit_count(self):
return sum(p.stats.all for p in self.all_projects)
unit_count.short_description = _("Number of strings")
def last_invoice(self):
try:
invoice = self.invoice_set.order_by("-start")[0]
return f"{invoice.start} - {invoice.end}"
except IndexError:
return _("N/A")
last_invoice.short_description = _("Last invoice")
def in_display_limits(self, plan=None):
if plan is None:
plan = self.plan
return (
(
plan.display_limit_projects == 0
or self.count_projects <= plan.display_limit_projects
)
and (
plan.display_limit_strings == 0
or self.count_strings <= plan.display_limit_strings
)
and (
plan.display_limit_languages == 0
or self.count_languages <= plan.display_limit_languages
)
)
in_display_limits.boolean = True
# Translators: Whether the package is inside displayed (soft) limits
in_display_limits.short_description = _("In display limits")
def check_payment_status(self, now: bool = False):
"""Check current payment status.
Compared to paid attribute, this does not include grace period.
"""
end = timezone.now()
if not now:
end -= timedelta(days=settings.BILLING_GRACE_PERIOD)
return (
(self.plan.is_free and self.state == Billing.STATE_ACTIVE)
or self.invoice_set.filter(end__gte=end).exists()
or self.state == Billing.STATE_TRIAL
)
def check_limits(self, save=True):
self.flush_cache()
in_limits = self.check_in_limits()
paid = self.check_payment_status()
modified = False
if self.check_expiry():
self.expiry = None
self.removal = timezone.now() + timedelta(
days=settings.BILLING_REMOVAL_PERIOD
)
modified = True
if self.state not in Billing.EXPIRING_STATES and self.expiry:
self.expiry = None
modified = True
if self.in_limits != in_limits or self.paid != paid:
self.in_limits = in_limits
self.paid = paid
modified = True
if save and modified:
self.save(skip_limits=True)
return modified
def is_active(self):
return self.state in (Billing.STATE_ACTIVE, Billing.STATE_TRIAL)
def get_notify_users(self):
users = self.owners.distinct()
for project in self.projects.iterator():
users |= User.objects.having_perm("billing.view", project)
return users.exclude(is_superuser=True)
def _get_libre_checklist(self):
yield LibreCheck(
self.count_projects == 1,
ngettext("Contains %d project", "Contains %d projects", self.count_projects)
% self.count_projects,
)
for project in self.all_projects:
yield LibreCheck(
bool(project.web),
mark_safe(
'<a href="{0}">{1}</a>, <a href="{2}">{2}</a>'.format(
escape(project.get_absolute_url()),
escape(project),
escape(project.web),
)
),
)
components = Component.objects.filter(project__in=self.all_projects)
yield LibreCheck(
len(components) > 0,
ngettext("Contains %d component", "Contains %d components", len(components))
% len(components),
)
for component in components:
yield LibreCheck(
component.libre_license,
mark_safe(
"""
<a href="{0}">{1}</a>,
<a href="{2}">{3}</a>,
<a href="{4}">{4}</a>,
{5}""".format(
escape(component.get_absolute_url()),
escape(component.name),
escape(component.license_url or "#"),
escape(component.get_license_display() or _("Missing license")),
escape(component.repo),
escape(component.get_file_format_display()),
)
),
component=component,
)
@cached_property
def libre_checklist(self):
return list(self._get_libre_checklist())
@property
def valid_libre(self):
return all(self.libre_checklist)
class InvoiceQuerySet(models.QuerySet):
def order(self):
return self.order_by("-start")
class Invoice(models.Model):
CURRENCY_EUR = 0
CURRENCY_BTC = 1
CURRENCY_USD = 2
CURRENCY_CZK = 3
billing = models.ForeignKey(Billing, on_delete=models.deletion.CASCADE)
start = models.DateField()
end = models.DateField()
amount = models.FloatField()
currency = models.IntegerField(
choices=(
(CURRENCY_EUR, "EUR"),
(CURRENCY_BTC, "mBTC"),
(CURRENCY_USD, "USD"),
(CURRENCY_CZK, "CZK"),
),
default=CURRENCY_EUR,
)
ref = models.CharField(blank=True, max_length=50)
note = models.TextField(blank=True)
# Payment detailed information, used for integration
# with payment processor
payment = JSONField(editable=False, default={})
objects = InvoiceQuerySet.as_manager()
def __str__(self):
return "{} - {}: {}".format(
self.start, self.end, self.billing if self.billing_id else None
)
@cached_property
def filename(self):
if self.ref:
return f"{self.ref}.pdf"
return None
@cached_property
def full_filename(self):
return os.path.join(settings.INVOICE_PATH, self.filename)
@cached_property
def filename_valid(self):
return os.path.exists(self.full_filename)
def clean(self):
if self.end is None or self.start is None:
return
if self.end <= self.start:
raise ValidationError("Start has be to before end!")
if not self.billing_id:
return
overlapping = Invoice.objects.filter(
(Q(start__lte=self.end) & Q(end__gte=self.end))
| (Q(start__lte=self.start) & Q(end__gte=self.start))
).filter(billing=self.billing)
if self.pk:
overlapping = overlapping.exclude(pk=self.pk)
if overlapping.exists():
raise ValidationError(
"Overlapping invoices exist: {}".format(
", ".join(str(x) for x in overlapping)
)
)
@receiver(post_save, sender=Component)
@receiver(post_save, sender=Project)
@receiver(post_save, sender=Plan)
@disable_for_loaddata
def update_project_bill(sender, instance, **kwargs):
if isinstance(instance, Component):
instance = instance.project
for billing in instance.billing_set.iterator():
billing.check_limits()
@receiver(post_save, sender=Invoice)
@disable_for_loaddata
def update_invoice_bill(sender, instance, **kwargs):
instance.billing.check_limits()
@receiver(m2m_changed, sender=Billing.projects.through)
@disable_for_loaddata
def change_billing_projects(sender, instance, action, **kwargs):
if not action.startswith("post_"):
return
instance.check_limits()
class WeblateConf(AppConf):
GRACE_PERIOD = 15
REMOVAL_PERIOD = 15
class Meta:
prefix = "BILLING"
|
from typing import Optional
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import PresenceData, XboxUpdateCoordinator
from .const import DOMAIN
class XboxBaseSensorEntity(CoordinatorEntity):
"""Base Sensor for the Xbox Integration."""
def __init__(self, coordinator: XboxUpdateCoordinator, xuid: str, attribute: str):
"""Initialize Xbox binary sensor."""
super().__init__(coordinator)
self.xuid = xuid
self.attribute = attribute
@property
def unique_id(self) -> str:
"""Return a unique, Home Assistant friendly identifier for this entity."""
return f"{self.xuid}_{self.attribute}"
@property
def data(self) -> Optional[PresenceData]:
"""Return coordinator data for this console."""
return self.coordinator.data.presence.get(self.xuid)
@property
def name(self) -> str:
"""Return the name of the sensor."""
if not self.data:
return None
if self.attribute == "online":
return self.data.gamertag
attr_name = " ".join([part.title() for part in self.attribute.split("_")])
return f"{self.data.gamertag} {attr_name}"
@property
def entity_picture(self) -> str:
"""Return the gamer pic."""
if not self.data:
return None
return self.data.display_pic.replace("&mode=Padding", "")
@property
def entity_registry_enabled_default(self) -> bool:
"""Return if the entity should be enabled when first added to the entity registry."""
return self.attribute == "online"
@property
def device_info(self):
"""Return a device description for device registry."""
return {
"identifiers": {(DOMAIN, "xbox_live")},
"name": "Xbox Live",
"manufacturer": "Microsoft",
"model": "Xbox Live",
"entry_type": "service",
}
|
from aiohttp import web
import pytest
from homeassistant.components.cloud import DOMAIN
from homeassistant.components.cloud.client import CloudClient
from homeassistant.components.cloud.const import PREF_ENABLE_ALEXA, PREF_ENABLE_GOOGLE
from homeassistant.const import CONTENT_TYPE_JSON
from homeassistant.core import State
from homeassistant.setup import async_setup_component
from . import mock_cloud, mock_cloud_prefs
from tests.async_mock import AsyncMock, MagicMock, patch
from tests.components.alexa import test_smart_home as test_alexa
@pytest.fixture
def mock_cloud_inst():
"""Mock cloud class."""
return MagicMock(subscription_expired=False)
async def test_handler_alexa(hass):
"""Test handler Alexa."""
hass.states.async_set("switch.test", "on", {"friendly_name": "Test switch"})
hass.states.async_set("switch.test2", "on", {"friendly_name": "Test switch 2"})
await mock_cloud(
hass,
{
"alexa": {
"filter": {"exclude_entities": "switch.test2"},
"entity_config": {
"switch.test": {
"name": "Config name",
"description": "Config description",
"display_categories": "LIGHT",
}
},
}
},
)
mock_cloud_prefs(hass)
cloud = hass.data["cloud"]
resp = await cloud.client.async_alexa_message(
test_alexa.get_new_request("Alexa.Discovery", "Discover")
)
endpoints = resp["event"]["payload"]["endpoints"]
assert len(endpoints) == 1
device = endpoints[0]
assert device["description"] == "Config description via Home Assistant"
assert device["friendlyName"] == "Config name"
assert device["displayCategories"] == ["LIGHT"]
assert device["manufacturerName"] == "Home Assistant"
async def test_handler_alexa_disabled(hass, mock_cloud_fixture):
"""Test handler Alexa when user has disabled it."""
mock_cloud_fixture._prefs[PREF_ENABLE_ALEXA] = False
cloud = hass.data["cloud"]
resp = await cloud.client.async_alexa_message(
test_alexa.get_new_request("Alexa.Discovery", "Discover")
)
assert resp["event"]["header"]["namespace"] == "Alexa"
assert resp["event"]["header"]["name"] == "ErrorResponse"
assert resp["event"]["payload"]["type"] == "BRIDGE_UNREACHABLE"
async def test_handler_google_actions(hass):
"""Test handler Google Actions."""
hass.states.async_set("switch.test", "on", {"friendly_name": "Test switch"})
hass.states.async_set("switch.test2", "on", {"friendly_name": "Test switch 2"})
hass.states.async_set("group.all_locks", "on", {"friendly_name": "Evil locks"})
await mock_cloud(
hass,
{
"google_actions": {
"filter": {"exclude_entities": "switch.test2"},
"entity_config": {
"switch.test": {
"name": "Config name",
"aliases": "Config alias",
"room": "living room",
}
},
}
},
)
mock_cloud_prefs(hass)
cloud = hass.data["cloud"]
reqid = "5711642932632160983"
data = {"requestId": reqid, "inputs": [{"intent": "action.devices.SYNC"}]}
with patch(
"hass_nabucasa.Cloud._decode_claims",
return_value={"cognito:username": "myUserName"},
):
await cloud.client.get_google_config()
resp = await cloud.client.async_google_message(data)
assert resp["requestId"] == reqid
payload = resp["payload"]
assert payload["agentUserId"] == "myUserName"
devices = payload["devices"]
assert len(devices) == 1
device = devices[0]
assert device["id"] == "switch.test"
assert device["name"]["name"] == "Config name"
assert device["name"]["nicknames"] == ["Config name", "Config alias"]
assert device["type"] == "action.devices.types.SWITCH"
assert device["roomHint"] == "living room"
async def test_handler_google_actions_disabled(hass, mock_cloud_fixture):
"""Test handler Google Actions when user has disabled it."""
mock_cloud_fixture._prefs[PREF_ENABLE_GOOGLE] = False
with patch("hass_nabucasa.Cloud.start"):
assert await async_setup_component(hass, "cloud", {})
reqid = "5711642932632160983"
data = {"requestId": reqid, "inputs": [{"intent": "action.devices.SYNC"}]}
cloud = hass.data["cloud"]
resp = await cloud.client.async_google_message(data)
assert resp["requestId"] == reqid
assert resp["payload"]["errorCode"] == "deviceTurnedOff"
async def test_webhook_msg(hass, caplog):
"""Test webhook msg."""
with patch("hass_nabucasa.Cloud.start"):
setup = await async_setup_component(hass, "cloud", {"cloud": {}})
assert setup
cloud = hass.data["cloud"]
await cloud.client.prefs.async_initialize()
await cloud.client.prefs.async_update(
cloudhooks={
"mock-webhook-id": {
"webhook_id": "mock-webhook-id",
"cloudhook_id": "mock-cloud-id",
},
"no-longere-existing": {
"webhook_id": "no-longere-existing",
"cloudhook_id": "mock-nonexisting-id",
},
}
)
received = []
async def handler(hass, webhook_id, request):
"""Handle a webhook."""
received.append(request)
return web.json_response({"from": "handler"})
hass.components.webhook.async_register("test", "Test", "mock-webhook-id", handler)
response = await cloud.client.async_webhook_message(
{
"cloudhook_id": "mock-cloud-id",
"body": '{"hello": "world"}',
"headers": {"content-type": CONTENT_TYPE_JSON},
"method": "POST",
"query": None,
}
)
assert response == {
"status": 200,
"body": '{"from": "handler"}',
"headers": {"Content-Type": CONTENT_TYPE_JSON},
}
assert len(received) == 1
assert await received[0].json() == {"hello": "world"}
# Non existing webhook
caplog.clear()
response = await cloud.client.async_webhook_message(
{
"cloudhook_id": "mock-nonexisting-id",
"body": '{"nonexisting": "payload"}',
"headers": {"content-type": CONTENT_TYPE_JSON},
"method": "POST",
"query": None,
}
)
assert response == {
"status": 200,
"body": None,
"headers": {"Content-Type": "application/octet-stream"},
}
assert (
"Received message for unregistered webhook no-longere-existing from cloud"
in caplog.text
)
assert '{"nonexisting": "payload"}' in caplog.text
async def test_google_config_expose_entity(hass, mock_cloud_setup, mock_cloud_login):
"""Test Google config exposing entity method uses latest config."""
cloud_client = hass.data[DOMAIN].client
state = State("light.kitchen", "on")
gconf = await cloud_client.get_google_config()
assert gconf.should_expose(state)
await cloud_client.prefs.async_update_google_entity_config(
entity_id="light.kitchen", should_expose=False
)
assert not gconf.should_expose(state)
async def test_google_config_should_2fa(hass, mock_cloud_setup, mock_cloud_login):
"""Test Google config disabling 2FA method uses latest config."""
cloud_client = hass.data[DOMAIN].client
gconf = await cloud_client.get_google_config()
state = State("light.kitchen", "on")
assert gconf.should_2fa(state)
await cloud_client.prefs.async_update_google_entity_config(
entity_id="light.kitchen", disable_2fa=True
)
assert not gconf.should_2fa(state)
async def test_set_username(hass):
"""Test we set username during login."""
prefs = MagicMock(
alexa_enabled=False,
google_enabled=False,
async_set_username=AsyncMock(return_value=None),
)
client = CloudClient(hass, prefs, None, {}, {})
client.cloud = MagicMock(is_logged_in=True, username="mock-username")
await client.logged_in()
assert len(prefs.async_set_username.mock_calls) == 1
assert prefs.async_set_username.mock_calls[0][1][0] == "mock-username"
|
import os
import unittest
from absl import flags
import mock
from perfkitbenchmarker import benchmark_spec
from perfkitbenchmarker.linux_benchmarks import ping_benchmark
flags.FLAGS.mark_as_parsed()
class TestGenerateJobFileString(unittest.TestCase):
def testRunCountTest(self):
vm_spec = mock.MagicMock(spec=benchmark_spec.BenchmarkSpec)
vm0 = mock.MagicMock()
vm1 = mock.MagicMock()
vm_spec.vms = [vm0, vm1]
path = os.path.join(os.path.dirname(__file__), '..', 'data', 'ping.out')
outfile = open(path, 'r')
pingstdout = outfile.read()
for vm in vm_spec.vms:
vm.RemoteCommand.side_effect = [(pingstdout, ' '), (pingstdout, ' ')]
ping_benchmark.Prepare(vm_spec)
samples = ping_benchmark.Run(vm_spec)
ping_benchmark.Cleanup(vm_spec)
self.assertEqual(vm_spec.vms[0].RemoteCommand.call_count, 2)
self.assertEqual(vm_spec.vms[1].RemoteCommand.call_count, 2)
self.assertEqual(len(samples), 16)
if __name__ == '__main__':
unittest.main()
|
from pygal.adapters import none_to_zero
from pygal.graph.line import Line
class StackedLine(Line):
"""Stacked Line graph class"""
_adapters = [none_to_zero]
def __init__(self, *args, **kwargs):
"""Custom variable initialization"""
self._previous_line = None
super(StackedLine, self).__init__(*args, **kwargs)
def _value_format(self, value, serie, index):
"""
Display value and cumulation
"""
sum_ = serie.points[index][1]
if serie in self.series and (
self.stack_from_top
and self.series.index(serie) == self._order - 1
or not self.stack_from_top and self.series.index(serie) == 0):
return super(StackedLine, self)._value_format(value)
return '%s (+%s)' % (self._y_format(sum_), self._y_format(value))
def _fill(self, values):
"""Add extra values to fill the line"""
if not self._previous_line:
self._previous_line = values
return super(StackedLine, self)._fill(values)
new_values = values + list(reversed(self._previous_line))
self._previous_line = values
return new_values
def _points(self, x_pos):
"""
Convert given data values into drawable points (x, y)
and interpolated points if interpolate option is specified
"""
for series_group in (self.series, self.secondary_series):
accumulation = [0] * self._len
for serie in series_group[::-1 if self.stack_from_top else 1]:
accumulation = list(map(sum, zip(accumulation, serie.values)))
serie.points = [(x_pos[i], v)
for i, v in enumerate(accumulation)]
if serie.points and self.interpolate:
serie.interpolated = self._interpolate(x_pos, accumulation)
else:
serie.interpolated = []
def _plot(self):
"""Plot stacked serie lines and stacked secondary lines"""
for serie in self.series[::-1 if self.stack_from_top else 1]:
self.line(serie)
for serie in self.secondary_series[::-1 if self.stack_from_top else 1]:
self.line(serie, True)
|
def get_repo_name(hook_data):
return hook_data.get('repository', {}).get('name', '') \
or hook_data.get('push_data', {}).get('repository', {}).get('name', '')
# repo branch
def get_repo_branch(hook_data):
branch = hook_data.get('ref', '') # github, gitlib
if not branch:
branch = hook_data.get('push_data', {}).get('ref', '')
if '/' in branch:
return branch[branch.rfind("/") + 1:]
return branch
# push user name
def get_push_name(hook_data):
uid = hook_data.get('pusher', {}).get('name', None) # github的data格式
if uid:
return uid
uid = hook_data.get('user_name', None) # gitlib 格式
if uid:
return uid
uid = hook_data.get('pusher', {}).get('username', None) # gogs 格式
if uid:
return uid
uid = hook_data\
.get('push_data', {})\
.get('user', {}).get('name', None) # gitosc的data格式
if uid:
return uid
return ''
# push user email
def get_push_email(hook_data):
uid = hook_data.get('pusher', {}).get('email', None) # github的data格式
if uid:
return uid
uid = hook_data.get('user_email', None) # gitlib 格式
if uid:
return uid
uid = hook_data\
.get('push_data', {})\
.get('user', {}).get('email', None) # gitosc的data格式
if uid:
return uid
return ''
|
import time
import six
from kalliope.core.NeuronModule import NeuronModule, MissingParameterException
class Sleep(NeuronModule):
def __init__(self, **kwargs):
super(Sleep, self).__init__(**kwargs)
self.seconds = kwargs.get('seconds', None)
# check parameters
if self._is_parameters_ok():
if isinstance(self.seconds, str) or \
isinstance(self.seconds, six.text_type):
self.seconds = float(self.seconds)
time.sleep(self.seconds)
def _is_parameters_ok(self):
"""
Check if received parameters are ok to perform operations in the neuron
:return: true if parameters are ok, raise an exception otherwise
.. raises:: MissingParameterException
"""
if self.seconds is None:
raise MissingParameterException("You must set a number of seconds as parameter")
return True
|
import enum
from gi.repository import GtkSource
from meld.conf import _
class ActionMode(enum.IntEnum):
"""Action mode for chunk change actions"""
Replace = 0
Delete = 1
Insert = 2
class ChunkAction(enum.Enum):
delete = 'delete'
replace = 'replace'
copy_down = 'copy_down'
copy_up = 'copy_up'
class FileComparisonMode(enum.Enum):
AutoMerge = 'AutoMerge'
Compare = 'Compare'
NEWLINES = {
GtkSource.NewlineType.LF: ('\n', _("UNIX (LF)")),
GtkSource.NewlineType.CR_LF: ('\r\n', _("DOS/Windows (CR-LF)")),
GtkSource.NewlineType.CR: ('\r', _("Mac OS (CR)")),
}
FILE_FILTER_ACTION_FORMAT = 'folder-custom-filter-{}'
TEXT_FILTER_ACTION_FORMAT = 'text-custom-filter-{}'
#: Sentinel value for mtimes on files that don't exist.
MISSING_TIMESTAMP = -2147483648
|
from aqualogic.core import States
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchEntity
from homeassistant.const import CONF_MONITORED_CONDITIONS
import homeassistant.helpers.config_validation as cv
from . import DOMAIN, UPDATE_TOPIC
SWITCH_TYPES = {
"lights": "Lights",
"filter": "Filter",
"filter_low_speed": "Filter Low Speed",
"aux_1": "Aux 1",
"aux_2": "Aux 2",
"aux_3": "Aux 3",
"aux_4": "Aux 4",
"aux_5": "Aux 5",
"aux_6": "Aux 6",
"aux_7": "Aux 7",
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_MONITORED_CONDITIONS, default=list(SWITCH_TYPES)): vol.All(
cv.ensure_list, [vol.In(SWITCH_TYPES)]
)
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the switch platform."""
switches = []
processor = hass.data[DOMAIN]
for switch_type in config[CONF_MONITORED_CONDITIONS]:
switches.append(AquaLogicSwitch(processor, switch_type))
async_add_entities(switches)
class AquaLogicSwitch(SwitchEntity):
"""Switch implementation for the AquaLogic component."""
def __init__(self, processor, switch_type):
"""Initialize switch."""
self._processor = processor
self._type = switch_type
self._state_name = {
"lights": States.LIGHTS,
"filter": States.FILTER,
"filter_low_speed": States.FILTER_LOW_SPEED,
"aux_1": States.AUX_1,
"aux_2": States.AUX_2,
"aux_3": States.AUX_3,
"aux_4": States.AUX_4,
"aux_5": States.AUX_5,
"aux_6": States.AUX_6,
"aux_7": States.AUX_7,
}[switch_type]
@property
def name(self):
"""Return the name of the switch."""
return f"AquaLogic {SWITCH_TYPES[self._type]}"
@property
def should_poll(self):
"""Return the polling state."""
return False
@property
def is_on(self):
"""Return true if device is on."""
panel = self._processor.panel
if panel is None:
return False
state = panel.get_state(self._state_name)
return state
def turn_on(self, **kwargs):
"""Turn the device on."""
panel = self._processor.panel
if panel is None:
return
panel.set_state(self._state_name, True)
def turn_off(self, **kwargs):
"""Turn the device off."""
panel = self._processor.panel
if panel is None:
return
panel.set_state(self._state_name, False)
async def async_added_to_hass(self):
"""Register callbacks."""
self.async_on_remove(
self.hass.helpers.dispatcher.async_dispatcher_connect(
UPDATE_TOPIC, self.async_write_ha_state
)
)
|
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import (
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_RADIUS,
CONF_SCAN_INTERVAL,
CONF_UNIT_SYSTEM,
CONF_UNIT_SYSTEM_IMPERIAL,
CONF_UNIT_SYSTEM_METRIC,
)
from homeassistant.core import callback
from homeassistant.helpers import config_validation as cv
from .const import DEFAULT_RADIUS, DEFAULT_SCAN_INTERVAL, DOMAIN
@callback
def configured_instances(hass):
"""Return a set of configured GeoNet NZ Volcano instances."""
return {
f"{entry.data[CONF_LATITUDE]}, {entry.data[CONF_LONGITUDE]}"
for entry in hass.config_entries.async_entries(DOMAIN)
}
class GeonetnzVolcanoFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a GeoNet NZ Volcano config flow."""
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
async def _show_form(self, errors=None):
"""Show the form to the user."""
data_schema = vol.Schema(
{vol.Optional(CONF_RADIUS, default=DEFAULT_RADIUS): cv.positive_int}
)
return self.async_show_form(
step_id="user", data_schema=data_schema, errors=errors or {}
)
async def async_step_import(self, import_config):
"""Import a config entry from configuration.yaml."""
return await self.async_step_user(import_config)
async def async_step_user(self, user_input=None):
"""Handle the start of the config flow."""
if not user_input:
return await self._show_form()
latitude = user_input.get(CONF_LATITUDE, self.hass.config.latitude)
user_input[CONF_LATITUDE] = latitude
longitude = user_input.get(CONF_LONGITUDE, self.hass.config.longitude)
user_input[CONF_LONGITUDE] = longitude
identifier = f"{user_input[CONF_LATITUDE]}, {user_input[CONF_LONGITUDE]}"
if identifier in configured_instances(self.hass):
return await self._show_form({"base": "already_configured"})
if self.hass.config.units.name == CONF_UNIT_SYSTEM_IMPERIAL:
user_input[CONF_UNIT_SYSTEM] = CONF_UNIT_SYSTEM_IMPERIAL
else:
user_input[CONF_UNIT_SYSTEM] = CONF_UNIT_SYSTEM_METRIC
scan_interval = user_input.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL)
user_input[CONF_SCAN_INTERVAL] = scan_interval.seconds
return self.async_create_entry(title=identifier, data=user_input)
|
from __future__ import print_function
import argparse
import os
import sys
from git.gitutils import (GitError, _get_repo, any_one, count_commits_between, find_revision_sha, get_remote_tracking_branch)
from six import iteritems
from six.moves import input
def branch(args):
repo = _get_repo()
parser = argparse.ArgumentParser(prog='git branch', description="List, create, or delete branches")
#list
list_grp = parser.add_mutually_exclusive_group(required=False)
list_grp.add_argument('-r', '--remotes', action='store_true', help='list or delete remotep tracking branches')
list_grp.add_argument('-a', '--all', action='store_true', help='list both remote and local branches')
# move type commands
move_type = parser.add_mutually_exclusive_group(required=False)
move_type.add_argument(
'-m',
'--move',
nargs='+',
metavar=('[oldbranch]',
'newbranch'),
help='move/rename oldbranch or HEAD'
)
move_type.add_argument(
'-M',
nargs='+',
metavar=('[oldbranch]',
'newbranch'),
help='move/rename even if branch already exists'
)
# delete type commands
delete_flags = parser.add_mutually_exclusive_group(required=False)
delete_flags.add_argument(
'-d',
'--delete',
nargs=1,
metavar=('branchname'),
help='delete branchname,TODO: branch must be fully merged with upstream '
)
delete_flags.add_argument('-D', nargs=1, metavar=('branchname'), help='Delete a branch irrespective of its merged status.')
# misc flags
parser.add_argument(
'-v',
'--verbose',
action='count',
help='When in list mode, show sha1 and commit subject line for each head, along with relationship to upstream branch (if any). If given twice, print the name of the upstream branch, as well (see also git remote show <remote>).'
)
parser.add_argument(
'-f',
'--force',
action='store_true',
help='Reset <branchname> to <startpoint> if <branchname> exists already. Without -f git branch refuses to change an existing branch.'
)
abbrevgrp = parser.add_mutually_exclusive_group()
abbrevgrp.add_argument(
'--abbrev',
action='store',
nargs='?',
help='set number of characters to display in sha',
type=int,
default=7
)
abbrevgrp.add_argument('--no-abbrev', action='store_const', help='do not abbreviate sha ', const=40, dest='abbrev')
track_flags = parser.add_mutually_exclusive_group(required=False)
track_flags.add_argument(
'--set-upstream',
action='store',
nargs=2,
metavar=('branchname',
'upstream'),
help='set branchname to track upstream'
)
track_flags.add_argument(
'--no-track',
nargs='+',
metavar=('branchname',
'startpoint'),
help='set existing branch to not track, or create new branch that doesnt track'
)
# add_branch
parser.add_argument('branchname', nargs='?')
parser.add_argument('startpoint', nargs='?')
parser.add_argument('--edit_description', action='store', nargs='?', metavar='branchname', const=repo.active_branch)
result = parser.parse_args(args)
# combine args
edit_description = result.edit_description
delete_branchname = result.delete or result.D
move_branchname = result.move or result.M
no_track = result.no_track
add_branchname = (result.branchname, result.startpoint or repo.active_branch)
set_upstream = result.set_upstream
force = result.force or result.D or result.M
mutual_exclusive_list = (delete_branchname, move_branchname, edit_description, result.branchname, set_upstream, no_track)
list_flag = not any_one(mutual_exclusive_list)
if not any_one((list_flag, ) + mutual_exclusive_list):
raise GitError('too many options specified.\n' + parser.print_help())
if list_flag:
branch_list(result)
elif delete_branchname:
delete_branch(delete_branchname[0], force, result.remotes, result.verbose)
elif move_branchname:
move_branch(move_branchname, force, result.verbose)
elif add_branchname[0]:
create_branch(add_branchname[0], add_branchname[1], force, False)
elif edit_description:
edit_branch_description(edit_description)
elif set_upstream:
add_tracking(set_upstream[0], *(['origin'] + set_upstream[1].split('/'))[-2:])
print(set_upstream[0], format_tracking_branch_desc(repo, set_upstream[0]))
elif no_track:
if len(no_track) == 1:
remove_tracking(no_track[0])
else:
create_branch(no_track[0], no_track[1], force, True)
#print result
def format_tracking_branch_desc(repo, branchname):
try:
remote = get_remote_tracking_branch(repo, branchname)
mysha = repo.branches[branchname]
theirsha = repo.remote_branches[remote]
ahead, behind = count_commits_between(repo, mysha, theirsha)
return '+{}/-{} relative to {} ({})'.format(ahead, behind, remote, theirsha)
except KeyError:
return ''
def edit_branch_description(branchname, description=None):
description = description or input('enter description:')
config = _get_repo().repo.get_config()
if not branchname in _get_repo().branches:
GitError('{} is not an existing branch'.format(branchname))
config.set(('branch', branchname), 'description', description)
config.write_to_path()
def branch_list(result):
# TODO: tracking branches
N = result.abbrev
repo = _get_repo()
if not result.remotes:
for key, value in iteritems(repo.branches):
dispval = value[0:N] #todo, --abbrev=n
commitmsg = (repo[value].message if result.verbose else '').strip()
tracking = get_remote_tracking_branch(repo, key)
trackmsg = ''
diffmsg = trackingsha = ''
if tracking:
trackingsha = repo.remote_branches[tracking]
ahead, behind = count_commits_between(repo, value, trackingsha)
diffmsg = '+{}/-{} compare to'.format(ahead, behind) if result.verbose else ''
trackmsg = '[{} {} {}]'.format(diffmsg, tracking, trackingsha[0:N])
print(' '.join([('* ' if repo.active_branch == key else '') + key, dispval, commitmsg]))
if result.remotes or result.all:
for key, value in iteritems(repo.remote_branches):
dispval = value[0:N] #todo, --abbrev=n
commitmsg = (repo[value].message if result.verbose else '').strip()
print(' '.join([('* ' if repo.active_branch == key else '') + key, dispval, commitmsg]))
def delete_branch(delete_branchname, force=False, remote=None, verbose=0):
'''delete a branch.
if remote=True, then look in refs/remotes, otherwise check refs/heads
for local, check if it has a remote tracking branch, and only allow delete if upstream has merged
'''
print('delete', delete_branchname, force, remote)
repo = _get_repo()
if remote:
qualified_branch = repo._format_ref_remote(delete_branchname)
else:
qualified_branch = repo._format_ref_branch(delete_branchname)
if delete_branchname == repo.active_branch:
GitError('Cannot delete active branch. ')
remote_tracking_branch = get_remote_tracking_branch(repo, delete_branchname)
if remote_tracking_branch and not force:
#see if local is ahead of remote
commits_ahead = count_commits_between(repo,
repo.refs[qualified_branch],
repo.remote_branches[remote_tracking_branch])[0]
if commits_ahead:
raise GitError(
'{0} is ahead of {1} by {2} commits.\nuse git branch -D\n'.format(
delete_branchname,
remote_tracking_branch,
commits_ahead
)
)
print('removing {} (was {})\n'.format(delete_branchname, repo.refs[qualified_branch]))
del repo.repo.refs[qualified_branch]
if not remote:
remove_tracking(delete_branchname)
#todo reflog
def move_branch(movebranch, force, verbose):
'''move oldbranch (or active_branch) to newbranch. update config if needed'''
repo = _get_repo()
oldbranch, newbranch = ([repo.active_branch] + movebranch)[-2:]
if oldbranch not in repo.branches:
raise GitError('{} does not exist in branches'.format(oldbranch))
if newbranch in repo.branches and not force:
raise GitError('{} already exists. use -M to force overwriting'.format(newbranch))
if newbranch != oldbranch:
print('Renaming {} ({}) to {}\n'.format(oldbranch, repo.branches[oldbranch], newbranch))
repo.add_ref(repo._format_ref_branch(newbranch), repo._format_ref_branch(oldbranch))
del repo.repo.refs[repo._format_ref_branch(oldbranch)]
#todo: reflog
if oldbranch == repo.active_branch:
repo.active_branch = newbranch
def remove_tracking(branchname):
'''remove branch entry from config'''
# Get repo's config
config = _get_repo().repo.get_config()
try:
del config[('branch', branchname)]['remote']
del config[('branch', branchname)]['merge']
if not config[('branch', branchname)]:
del config[('branch', branchname)]
except KeyError:
pass
# Write to disk
config.write_to_path()
def add_tracking(branchname, remote, remotebranch):
# Get repo's config
config = _get_repo().repo.get_config()
# Add new entries for remote
config.set(('branch', branchname), 'remote', remote)
config.set(('branch', branchname), 'merge', 'refs/heads/' + remotebranch)
# Write to disk
config.write_to_path()
def create_branch(new_branch, base_rev, force=False, no_track=False):
"""Try creating a new branch which tracks the given remote
if such a branch does not exist then branch off a local branch
"""
repo = _get_repo()
# Already exists
if new_branch in repo.branches:
if not force:
raise GitError("branch %s already exists\n use --force to overwrite anyway" % new_branch)
# fork with new sha
new_ref = repo._format_ref_branch(new_branch)
base_sha = find_revision_sha(repo, base_rev)
repo.repo.refs[new_ref] = base_sha
#handle tracking, only if this was a remote
tracking, remote_branch = (['origin'] + base_rev.split('/'))[-2:] #branch-> origin/branch. remote/branch stays as is
qualified_remote_branch = os.path.sep.join([tracking, remote_branch])
if qualified_remote_branch in repo.remote_branches and not base_rev in repo.branches:
if not no_track:
add_tracking(new_branch, tracking, remote_branch)
else:
remove_tracking(new_branch)
#todo reflog
return new_ref
def test():
import os
os.chdir('../..')
def run(cmd):
print('branch ', cmd)
branch(cmd.split())
print('')
#run('-d test')
run('')
run('-f test origin/master')
run('')
print('delete test: should delete')
run('-d test')
print('set to remote')
run('test origin/master')
run('-v')
try:
run('test dev')
except GitError:
pass
else:
print('did not error!')
run('-f test dev')
run('-v')
run('-m test test2')
if __name__ == '__main__':
branch(sys.argv[1:])
|
import datetime
import json
from homeassistant.components.mqtt import CONF_QOS, CONF_STATE_TOPIC, DEFAULT_QOS
import homeassistant.components.sensor as sensor
from homeassistant.const import CONF_NAME, CONF_PLATFORM
from homeassistant.setup import async_setup_component
from homeassistant.util import dt
from tests.async_mock import patch
from tests.common import async_fire_mqtt_message
DEVICE_ID = "123TESTMAC"
NAME = "test_device"
BEDROOM = "bedroom"
LIVING_ROOM = "living_room"
BEDROOM_TOPIC = f"room_presence/{BEDROOM}"
LIVING_ROOM_TOPIC = f"room_presence/{LIVING_ROOM}"
SENSOR_STATE = f"sensor.{NAME}"
CONF_DEVICE_ID = "device_id"
CONF_TIMEOUT = "timeout"
NEAR_MESSAGE = {"id": DEVICE_ID, "name": NAME, "distance": 1}
FAR_MESSAGE = {"id": DEVICE_ID, "name": NAME, "distance": 10}
REALLY_FAR_MESSAGE = {"id": DEVICE_ID, "name": NAME, "distance": 20}
async def send_message(hass, topic, message):
"""Test the sending of a message."""
async_fire_mqtt_message(hass, topic, json.dumps(message))
await hass.async_block_till_done()
await hass.async_block_till_done()
async def assert_state(hass, room):
"""Test the assertion of a room state."""
state = hass.states.get(SENSOR_STATE)
assert state.state == room
async def assert_distance(hass, distance):
"""Test the assertion of a distance state."""
state = hass.states.get(SENSOR_STATE)
assert state.attributes.get("distance") == distance
async def test_room_update(hass, mqtt_mock):
"""Test the updating between rooms."""
assert await async_setup_component(
hass,
sensor.DOMAIN,
{
sensor.DOMAIN: {
CONF_PLATFORM: "mqtt_room",
CONF_NAME: NAME,
CONF_DEVICE_ID: DEVICE_ID,
CONF_STATE_TOPIC: "room_presence",
CONF_QOS: DEFAULT_QOS,
CONF_TIMEOUT: 5,
}
},
)
await hass.async_block_till_done()
await send_message(hass, BEDROOM_TOPIC, FAR_MESSAGE)
await assert_state(hass, BEDROOM)
await assert_distance(hass, 10)
await send_message(hass, LIVING_ROOM_TOPIC, NEAR_MESSAGE)
await assert_state(hass, LIVING_ROOM)
await assert_distance(hass, 1)
await send_message(hass, BEDROOM_TOPIC, FAR_MESSAGE)
await assert_state(hass, LIVING_ROOM)
await assert_distance(hass, 1)
time = dt.utcnow() + datetime.timedelta(seconds=7)
with patch("homeassistant.helpers.condition.dt_util.utcnow", return_value=time):
await send_message(hass, BEDROOM_TOPIC, FAR_MESSAGE)
await assert_state(hass, BEDROOM)
await assert_distance(hass, 10)
|
import homeassistant.components.persistent_notification as pn
from homeassistant.components.websocket_api.const import TYPE_RESULT
from homeassistant.setup import async_setup_component, setup_component
from tests.common import get_test_home_assistant
class TestPersistentNotification:
"""Test persistent notification component."""
def setup_method(self, method):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
setup_component(self.hass, pn.DOMAIN, {})
def teardown_method(self, method):
"""Stop everything that was started."""
self.hass.stop()
def test_create(self):
"""Test creating notification without title or notification id."""
notifications = self.hass.data[pn.DOMAIN]["notifications"]
assert len(self.hass.states.entity_ids(pn.DOMAIN)) == 0
assert len(notifications) == 0
pn.create(self.hass, "Hello World {{ 1 + 1 }}", title="{{ 1 + 1 }} beers")
self.hass.block_till_done()
entity_ids = self.hass.states.entity_ids(pn.DOMAIN)
assert len(entity_ids) == 1
assert len(notifications) == 1
state = self.hass.states.get(entity_ids[0])
assert state.state == pn.STATE
assert state.attributes.get("message") == "Hello World 2"
assert state.attributes.get("title") == "2 beers"
notification = notifications.get(entity_ids[0])
assert notification["status"] == pn.STATUS_UNREAD
assert notification["message"] == "Hello World 2"
assert notification["title"] == "2 beers"
assert notification["created_at"] is not None
notifications.clear()
def test_create_notification_id(self):
"""Ensure overwrites existing notification with same id."""
notifications = self.hass.data[pn.DOMAIN]["notifications"]
assert len(self.hass.states.entity_ids(pn.DOMAIN)) == 0
assert len(notifications) == 0
pn.create(self.hass, "test", notification_id="Beer 2")
self.hass.block_till_done()
assert len(self.hass.states.entity_ids()) == 1
assert len(notifications) == 1
entity_id = "persistent_notification.beer_2"
state = self.hass.states.get(entity_id)
assert state.attributes.get("message") == "test"
notification = notifications.get(entity_id)
assert notification["message"] == "test"
assert notification["title"] is None
pn.create(self.hass, "test 2", notification_id="Beer 2")
self.hass.block_till_done()
# We should have overwritten old one
assert len(self.hass.states.entity_ids()) == 1
state = self.hass.states.get(entity_id)
assert state.attributes.get("message") == "test 2"
notification = notifications.get(entity_id)
assert notification["message"] == "test 2"
notifications.clear()
def test_create_template_error(self):
"""Ensure we output templates if contain error."""
notifications = self.hass.data[pn.DOMAIN]["notifications"]
assert len(self.hass.states.entity_ids(pn.DOMAIN)) == 0
assert len(notifications) == 0
pn.create(self.hass, "{{ message + 1 }}", "{{ title + 1 }}")
self.hass.block_till_done()
entity_ids = self.hass.states.entity_ids(pn.DOMAIN)
assert len(entity_ids) == 1
assert len(notifications) == 1
state = self.hass.states.get(entity_ids[0])
assert state.attributes.get("message") == "{{ message + 1 }}"
assert state.attributes.get("title") == "{{ title + 1 }}"
notification = notifications.get(entity_ids[0])
assert notification["message"] == "{{ message + 1 }}"
assert notification["title"] == "{{ title + 1 }}"
notifications.clear()
def test_dismiss_notification(self):
"""Ensure removal of specific notification."""
notifications = self.hass.data[pn.DOMAIN]["notifications"]
assert len(self.hass.states.entity_ids(pn.DOMAIN)) == 0
assert len(notifications) == 0
pn.create(self.hass, "test", notification_id="Beer 2")
self.hass.block_till_done()
assert len(self.hass.states.entity_ids(pn.DOMAIN)) == 1
assert len(notifications) == 1
pn.dismiss(self.hass, notification_id="Beer 2")
self.hass.block_till_done()
assert len(self.hass.states.entity_ids(pn.DOMAIN)) == 0
assert len(notifications) == 0
notifications.clear()
def test_mark_read(self):
"""Ensure notification is marked as Read."""
notifications = self.hass.data[pn.DOMAIN]["notifications"]
assert len(notifications) == 0
pn.create(self.hass, "test", notification_id="Beer 2")
self.hass.block_till_done()
entity_id = "persistent_notification.beer_2"
assert len(notifications) == 1
notification = notifications.get(entity_id)
assert notification["status"] == pn.STATUS_UNREAD
self.hass.services.call(
pn.DOMAIN, pn.SERVICE_MARK_READ, {"notification_id": "Beer 2"}
)
self.hass.block_till_done()
assert len(notifications) == 1
notification = notifications.get(entity_id)
assert notification["status"] == pn.STATUS_READ
notifications.clear()
async def test_ws_get_notifications(hass, hass_ws_client):
"""Test websocket endpoint for retrieving persistent notifications."""
await async_setup_component(hass, pn.DOMAIN, {})
client = await hass_ws_client(hass)
await client.send_json({"id": 5, "type": "persistent_notification/get"})
msg = await client.receive_json()
assert msg["id"] == 5
assert msg["type"] == TYPE_RESULT
assert msg["success"]
notifications = msg["result"]
assert len(notifications) == 0
# Create
hass.components.persistent_notification.async_create(
"test", notification_id="Beer 2"
)
await client.send_json({"id": 6, "type": "persistent_notification/get"})
msg = await client.receive_json()
assert msg["id"] == 6
assert msg["type"] == TYPE_RESULT
assert msg["success"]
notifications = msg["result"]
assert len(notifications) == 1
notification = notifications[0]
assert notification["notification_id"] == "Beer 2"
assert notification["message"] == "test"
assert notification["title"] is None
assert notification["status"] == pn.STATUS_UNREAD
assert notification["created_at"] is not None
# Mark Read
await hass.services.async_call(
pn.DOMAIN, pn.SERVICE_MARK_READ, {"notification_id": "Beer 2"}
)
await client.send_json({"id": 7, "type": "persistent_notification/get"})
msg = await client.receive_json()
notifications = msg["result"]
assert len(notifications) == 1
assert notifications[0]["status"] == pn.STATUS_READ
# Dismiss
hass.components.persistent_notification.async_dismiss("Beer 2")
await client.send_json({"id": 8, "type": "persistent_notification/get"})
msg = await client.receive_json()
notifications = msg["result"]
assert len(notifications) == 0
|
from django.db import transaction
from weblate.accounts.tasks import cleanup_social_auth
from weblate.screenshots.tasks import cleanup_screenshot_files
from weblate.trans.models import Project
from weblate.trans.tasks import (
cleanup_old_comments,
cleanup_old_suggestions,
cleanup_project,
cleanup_stale_repos,
cleanup_suggestions,
)
from weblate.utils.management.base import BaseCommand
class Command(BaseCommand):
help = "clenups orphaned checks and suggestions"
def handle(self, *args, **options):
"""Perfom cleanup of Weblate database."""
cleanup_screenshot_files()
with transaction.atomic():
cleanup_social_auth()
for project in Project.objects.values_list("id", flat=True):
cleanup_project(project)
cleanup_suggestions()
cleanup_stale_repos()
cleanup_old_suggestions()
cleanup_old_comments()
|
import json
import os
from absl import flags
from perfkitbenchmarker import linux_packages
from perfkitbenchmarker import regex_util
from perfkitbenchmarker import sample
from perfkitbenchmarker.linux_packages import fio
flags.DEFINE_list(
'ch_params', [],
'A list of comma seperated "key=value" parameters passed into '
'cloud harmony benchmarks.')
BENCHMARK = 'block-storage'
INSTALL_PATH = os.path.join(linux_packages.INSTALL_DIR,
BENCHMARK)
STEADY_STATE_MEASUREMENT_WINDOW = '-ssmw'
def _Install(vm):
vm.InstallPackages('fio') # CloudHarmony doesn't work well with v2.7 fio
for deps in ['php', 'build_tools']:
vm.Install(deps)
vm.RemoteCommand(
('git clone https://github.com/cloudharmony/{benchmark}.git '
'{dir}').format(benchmark=BENCHMARK, dir=INSTALL_PATH))
def YumInstall(vm):
_Install(vm)
def AptInstall(vm):
_Install(vm)
def _ParseFioJson(fio_json):
"""Parse fio json output.
Args:
fio_json: string. Json output from fio comomand.
Returns:
A list of sample.Sample object.
"""
samples = []
for job in json.loads(fio_json)['jobs']:
cmd = job['fio_command']
# Get rid of ./fio.
cmd = ' '.join(cmd.split()[1:])
additional_metadata = {'cmd': cmd}
# Remove ssmw suffix from job name.
try:
job['jobname'] = regex_util.Substitute(
STEADY_STATE_MEASUREMENT_WINDOW, '', job['jobname'])
additional_metadata['steady_state'] = True
except regex_util.NoMatchError:
additional_metadata['steady_state'] = False
# Mock fio_json to reuse fio parser.
mock_json = {'jobs': [job]}
new_samples = fio.ParseResults(
fio.FioParametersToJob(cmd).__str__(), mock_json)
for s in new_samples:
s.metadata.update(additional_metadata)
samples += new_samples
return samples
def _ParseCHResultsJson(results_json):
"""Parse json output from CloudHarmony block storage benchmark.
Args:
results_json: JSON formatted results for test. Each test provides a
key/value pair.
Returns:
A list of sample.Sample object.
"""
metadata = {}
_ExtractMetadata(metadata)
return [sample.Sample(metric, val, '', metadata)
for metric, val in json.loads(results_json).items()]
def ParseOutput(results_json, fio_json_list):
"""Parse json output from CloudHarmony block storage benchmark.
Args:
results_json: string. Json output reported by CloudHarmony.
fio_json_list: list of string. Json output strings from fio command.
Returns:
A list of sample.Sample object.
"""
return _ParseCHResultsJson(results_json) + [
s for fio_json in fio_json_list for s in _ParseFioJson(fio_json)]
def _ExtractMetadata(metadata):
"""Extract run metadata from flags."""
metadata.update(dict([p.split('=') for p in flags.FLAGS.ch_params]))
|
import unittest
from sklearn import datasets
from sklearn.ensemble import RandomForestClassifier
from sklearn.linear_model import LinearRegression
class TestSklearn(unittest.TestCase):
def test_random_forest_classifier(self):
iris = datasets.load_iris()
X, y = iris.data, iris.target
rf1 = RandomForestClassifier()
rf1.fit(X,y)
def test_linearn_classifier(self):
boston = datasets.load_boston()
X, y = boston.data, boston.target
lr1 = LinearRegression()
lr1.fit(X,y)
|
import asyncio
import os
from typing import Any, Dict, Iterable, List, Optional, Set, Union, cast
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.loader import Integration, IntegrationNotFound, async_get_integration
import homeassistant.util.package as pkg_util
DATA_PIP_LOCK = "pip_lock"
DATA_PKG_CACHE = "pkg_cache"
DATA_INTEGRATIONS_WITH_REQS = "integrations_with_reqs"
CONSTRAINT_FILE = "package_constraints.txt"
DISCOVERY_INTEGRATIONS: Dict[str, Iterable[str]] = {
"mqtt": ("mqtt",),
"ssdp": ("ssdp",),
"zeroconf": ("zeroconf", "homekit"),
}
_UNDEF = object()
class RequirementsNotFound(HomeAssistantError):
"""Raised when a component is not found."""
def __init__(self, domain: str, requirements: List) -> None:
"""Initialize a component not found error."""
super().__init__(f"Requirements for {domain} not found: {requirements}.")
self.domain = domain
self.requirements = requirements
async def async_get_integration_with_requirements(
hass: HomeAssistant, domain: str, done: Optional[Set[str]] = None
) -> Integration:
"""Get an integration with all requirements installed, including the dependencies.
This can raise IntegrationNotFound if manifest or integration
is invalid, RequirementNotFound if there was some type of
failure to install requirements.
"""
if done is None:
done = {domain}
else:
done.add(domain)
integration = await async_get_integration(hass, domain)
if hass.config.skip_pip:
return integration
cache = hass.data.get(DATA_INTEGRATIONS_WITH_REQS)
if cache is None:
cache = hass.data[DATA_INTEGRATIONS_WITH_REQS] = {}
int_or_evt: Union[Integration, asyncio.Event, None] = cache.get(domain, _UNDEF)
if isinstance(int_or_evt, asyncio.Event):
await int_or_evt.wait()
int_or_evt = cache.get(domain, _UNDEF)
# When we have waited and it's _UNDEF, it doesn't exist
# We don't cache that it doesn't exist, or else people can't fix it
# and then restart, because their config will never be valid.
if int_or_evt is _UNDEF:
raise IntegrationNotFound(domain)
if int_or_evt is not _UNDEF:
return cast(Integration, int_or_evt)
event = cache[domain] = asyncio.Event()
if integration.requirements:
await async_process_requirements(
hass, integration.domain, integration.requirements
)
deps_to_check = [
dep
for dep in integration.dependencies + integration.after_dependencies
if dep not in done
]
for check_domain, to_check in DISCOVERY_INTEGRATIONS.items():
if (
check_domain not in done
and check_domain not in deps_to_check
and any(check in integration.manifest for check in to_check)
):
deps_to_check.append(check_domain)
if deps_to_check:
await asyncio.gather(
*[
async_get_integration_with_requirements(hass, dep, done)
for dep in deps_to_check
]
)
cache[domain] = integration
event.set()
return integration
async def async_process_requirements(
hass: HomeAssistant, name: str, requirements: List[str]
) -> None:
"""Install the requirements for a component or platform.
This method is a coroutine. It will raise RequirementsNotFound
if an requirement can't be satisfied.
"""
pip_lock = hass.data.get(DATA_PIP_LOCK)
if pip_lock is None:
pip_lock = hass.data[DATA_PIP_LOCK] = asyncio.Lock()
kwargs = pip_kwargs(hass.config.config_dir)
async with pip_lock:
for req in requirements:
if pkg_util.is_installed(req):
continue
def _install(req: str, kwargs: Dict) -> bool:
"""Install requirement."""
return pkg_util.install_package(req, **kwargs)
ret = await hass.async_add_executor_job(_install, req, kwargs)
if not ret:
raise RequirementsNotFound(name, [req])
def pip_kwargs(config_dir: Optional[str]) -> Dict[str, Any]:
"""Return keyword arguments for PIP install."""
is_docker = pkg_util.is_docker_env()
kwargs = {
"constraints": os.path.join(os.path.dirname(__file__), CONSTRAINT_FILE),
"no_cache_dir": is_docker,
}
if "WHEELS_LINKS" in os.environ:
kwargs["find_links"] = os.environ["WHEELS_LINKS"]
if not (config_dir is None or pkg_util.is_virtual_env()) and not is_docker:
kwargs["target"] = os.path.join(config_dir, "deps")
return kwargs
|
import numpy as np
class FeatureLister(object):
def __init__(self, X, idx_store, num_docs):
self.X = X
self.idx_store = idx_store
self.num_docs = num_docs
def output(self):
# () -> list
toret = [{} for i in range(self.num_docs)]
X = self.X.tocoo()
for row, col, val in zip(X.row, X.col, X.data):
toret[row][self.idx_store.getval(col)] = np.asscalar(val)
return toret
|
import base64
from homeassistant.components import media_player
from homeassistant.components.websocket_api.const import TYPE_RESULT
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
async def test_get_image(hass, hass_ws_client, caplog):
"""Test get image via WS command."""
await async_setup_component(
hass, "media_player", {"media_player": {"platform": "demo"}}
)
await hass.async_block_till_done()
client = await hass_ws_client(hass)
with patch(
"homeassistant.components.media_player.MediaPlayerEntity."
"async_get_media_image",
return_value=(b"image", "image/jpeg"),
):
await client.send_json(
{
"id": 5,
"type": "media_player_thumbnail",
"entity_id": "media_player.bedroom",
}
)
msg = await client.receive_json()
assert msg["id"] == 5
assert msg["type"] == TYPE_RESULT
assert msg["success"]
assert msg["result"]["content_type"] == "image/jpeg"
assert msg["result"]["content"] == base64.b64encode(b"image").decode("utf-8")
assert "media_player_thumbnail is deprecated" in caplog.text
async def test_get_image_http(hass, aiohttp_client):
"""Test get image via http command."""
await async_setup_component(
hass, "media_player", {"media_player": {"platform": "demo"}}
)
await hass.async_block_till_done()
state = hass.states.get("media_player.bedroom")
assert "entity_picture_local" not in state.attributes
client = await aiohttp_client(hass.http.app)
with patch(
"homeassistant.components.media_player.MediaPlayerEntity."
"async_get_media_image",
return_value=(b"image", "image/jpeg"),
):
resp = await client.get(state.attributes["entity_picture"])
content = await resp.read()
assert content == b"image"
async def test_get_image_http_remote(hass, aiohttp_client):
"""Test get image url via http command."""
with patch(
"homeassistant.components.media_player.MediaPlayerEntity."
"media_image_remotely_accessible",
return_value=True,
):
await async_setup_component(
hass, "media_player", {"media_player": {"platform": "demo"}}
)
await hass.async_block_till_done()
state = hass.states.get("media_player.bedroom")
assert "entity_picture_local" in state.attributes
client = await aiohttp_client(hass.http.app)
with patch(
"homeassistant.components.media_player.MediaPlayerEntity."
"async_get_media_image",
return_value=(b"image", "image/jpeg"),
):
resp = await client.get(state.attributes["entity_picture_local"])
content = await resp.read()
assert content == b"image"
def test_deprecated_base_class(caplog):
"""Test deprecated base class."""
class CustomMediaPlayer(media_player.MediaPlayerDevice):
pass
CustomMediaPlayer()
assert "MediaPlayerDevice is deprecated, modify CustomMediaPlayer" in caplog.text
async def test_media_browse(hass, hass_ws_client):
"""Test browsing media."""
await async_setup_component(
hass, "media_player", {"media_player": {"platform": "demo"}}
)
await hass.async_block_till_done()
client = await hass_ws_client(hass)
with patch(
"homeassistant.components.demo.media_player.YOUTUBE_PLAYER_SUPPORT",
media_player.SUPPORT_BROWSE_MEDIA,
), patch(
"homeassistant.components.media_player.MediaPlayerEntity." "async_browse_media",
return_value={"bla": "yo"},
) as mock_browse_media:
await client.send_json(
{
"id": 5,
"type": "media_player/browse_media",
"entity_id": "media_player.bedroom",
"media_content_type": "album",
"media_content_id": "abcd",
}
)
msg = await client.receive_json()
assert msg["id"] == 5
assert msg["type"] == TYPE_RESULT
assert msg["success"]
assert msg["result"] == {"bla": "yo"}
assert mock_browse_media.mock_calls[0][1] == ("album", "abcd")
with patch(
"homeassistant.components.demo.media_player.YOUTUBE_PLAYER_SUPPORT",
media_player.SUPPORT_BROWSE_MEDIA,
), patch(
"homeassistant.components.media_player.MediaPlayerEntity." "async_browse_media",
return_value={"bla": "yo"},
):
await client.send_json(
{
"id": 6,
"type": "media_player/browse_media",
"entity_id": "media_player.bedroom",
}
)
msg = await client.receive_json()
assert msg["id"] == 6
assert msg["type"] == TYPE_RESULT
assert msg["success"]
assert msg["result"] == {"bla": "yo"}
|
from pytest import mark
from cerberus import errors, Validator
from cerberus.tests import assert_fail, assert_success
def test_dependencies_basic_error_handler_representation(validator):
schema = {
'field1': {'required': False},
'field2': {'required': True, 'dependencies': {'field1': ['one', 'two']}},
}
validator.validate({'field2': 7}, schema=schema)
expected_message = errors.BasicErrorHandler.messages[
errors.DEPENDENCIES_FIELD_VALUE.code
].format(field='field2', constraint={'field1': ['one', 'two']})
assert validator.errors == {'field2': [expected_message]}
def test_dependencies_errors():
v = Validator(
{
'field1': {'required': False},
'field2': {'required': True, 'dependencies': {'field1': ['one', 'two']}},
}
)
assert_fail(
{'field1': 'three', 'field2': 7},
validator=v,
error=(
'field2',
('field2', 'dependencies'),
errors.DEPENDENCIES_FIELD_VALUE,
{'field1': ['one', 'two']},
({'field1': 'three'},),
),
)
@mark.parametrize(
("test_function", "document"),
[
(assert_success, {'a': 1, 'b': 'foo'}),
(assert_success, {'a': 2, 'c': 'bar'}),
(assert_fail, {'a': 1, 'c': 'foo'}),
(assert_fail, {'a': 2, 'b': 'bar'}),
],
)
def test_dependencies_in_oneof(test_function, document):
# https://github.com/pyeve/cerberus/issues/241
test_function(
schema={
'a': {
'type': 'integer',
'oneof': [
{'allowed': [1], 'dependencies': 'b'},
{'allowed': [2], 'dependencies': 'c'},
],
},
'b': {},
'c': {},
},
document=document,
)
@mark.parametrize(
("test_function", "document"),
[
(assert_success, {'field': 'foobar', 'foo': 'bar', 'bar': 'foo'}),
(assert_fail, {'field': 'foobar', 'foo': 'bar'}),
],
)
def test_dependencies_of_multiple_fields(test_function, document):
test_function(
schema={'field': {'dependencies': ['foo', 'bar']}, 'foo': {}, 'bar': {}},
document=document,
)
@mark.parametrize(
"document",
[
{'field': 'foobar'},
{'field': 'foobar', 'foo': 'bar'},
{'field': 'foobar', 'bar': 'foo'},
{'foo': 'bar', 'bar': 'foo'},
{'foo': 'bar'},
],
)
def test_dependencies_of_multiple_fields_with_required_field_fails(document):
assert_fail(
schema={
'field': {'required': True, 'dependencies': ['foo', 'bar']},
'foo': {},
'bar': {},
},
document=document,
)
def test_dependencies_of_multiple_fields_with_required_field_succeeds():
assert_success(
schema={
'field': {'required': False, 'dependencies': ['foo', 'bar']},
'foo': {},
'bar': {},
},
document={'foo': 'bar', 'bar': 'foo'},
)
@mark.parametrize(
("test_function", "document"),
[
(assert_success, {'foo': None, 'bar': 1}),
(assert_success, {'foo': None}),
(assert_fail, {'bar': 1}),
],
)
def test_dependencies_of_nullable_field_succeeds(test_function, document):
# https://github.com/pyeve/cerberus/issues/305
test_function(
schema={'foo': {'nullable': True}, 'bar': {'dependencies': 'foo'}},
document=document,
)
@mark.parametrize(
("test_function", "document"),
[
(assert_success, {'field': 'foobar', 'foo': 'bar'}),
(assert_fail, {'field': 'foobar'}),
],
)
def test_dependencies_of_single_field(test_function, document):
test_function(
schema={'field': {'dependencies': 'foo'}, 'foo': {'type': 'string'}},
document=document,
)
def test_dependencies_relative_to_document_root():
# https://github.com/pyeve/cerberus/issues/288
subschema = {'version': {'dependencies': ('^repo',)}}
schema = {'package': {'allow_unknown': True, 'schema': subschema}, 'repo': {}}
assert_success({'repo': 'somewhere', 'package': {'version': 1}}, schema)
assert_fail(
{'package': {'repo': 'somewhere', 'version': 0}},
schema,
error=('package', ('package', 'schema'), errors.SCHEMA, subschema),
child_errors=[
(
('package', 'version'),
('package', 'schema', 'version', 'dependencies'),
errors.DEPENDENCIES_FIELD,
('^repo',),
('^repo',),
)
],
)
@mark.parametrize(
("test_function", "document"),
[
(assert_success, {'foo': None, 'bar': None}),
(assert_success, {'foo': 1, 'bar': 1}),
(assert_success, {'foo': None, 'bar': 1}),
(assert_fail, {'foo': None}),
(assert_fail, {'foo': 1}),
],
)
def test_dependencies_with_mutually_dependent_nullable_fields(test_function, document):
# https://github.com/pyeve/cerberus/pull/306
test_function(
schema={
'foo': {'dependencies': 'bar', 'nullable': True},
'bar': {'dependencies': 'foo', 'nullable': True},
},
document=document,
)
@mark.parametrize(
("test_function", "document"),
[
(assert_success, {'text': 'foo', 'deleted': False}),
(assert_fail, {'text': 'foo', 'deleted': True}),
(assert_fail, {'text': 'foo'}),
],
)
def test_dependencies_with_required_boolean_value(test_function, document):
# https://github.com/pyeve/cerberus/issues/138
test_function(
schema={
'deleted': {'type': 'boolean'},
'text': {'dependencies': {'deleted': False}},
},
document=document,
)
@mark.parametrize(
("test_function", "document"),
[
(assert_success, {'text': 'foo', 'deleted': False}),
(assert_fail, {'text': 'foo', 'deleted': True}),
(assert_fail, {'text': 'foo'}),
],
)
def test_dependencies_with_required_boolean_value_defined_in_list(
test_function, document
):
# https://github.com/pyeve/cerberus/issues/138
test_function(
schema={
'deleted': {'type': 'boolean'},
'text': {'dependencies': {'deleted': [False]}},
},
document=document,
)
@mark.parametrize(
"document",
[
{'field': 'foobar'},
{'field': 'foobar', 'foo': 'foo'},
{'field': 'foobar', 'bar': 'bar'},
{'foo': 'foo', 'bar': 'bar'},
{'foo': 'bar'},
],
)
def test_dependencies_with_required_rule_and_required_value_fails(document):
assert_fail(
schema={
'field': {'required': True, 'dependencies': {'foo': 'foo', 'bar': 'bar'}},
'foo': {},
'bar': {},
},
document=document,
)
def test_dependencies_with_required_rule_and_required_value_succeeds():
schema = {
'field': {'required': True, 'dependencies': {'foo': 'foo', 'bar': 'bar'}},
'foo': {},
'bar': {},
}
assert_success(
document={'field': 'foobar', 'foo': 'foo', 'bar': 'bar'}, schema=schema
)
schema['field']['required'] = False
assert_success(document={'foo': 'bar', 'bar': 'foo'}, schema=schema)
@mark.parametrize(
"document",
[
{'field': 'foobar', 'foo': 'foo'},
{'field': 'foobar', 'foo': 'bar'},
{'field': 'foobar', 'bar': 'bar'},
{'field': 'foobar', 'bar': 'foo'},
{'field': 'foobar'},
],
)
def test_dependencies_with_required_value_fails(document):
assert_fail(
schema={
'field': {'dependencies': {'foo': 'foo', 'bar': 'bar'}},
'foo': {},
'bar': {},
},
document=document,
)
def test_dependencies_with_required_value_succeeds():
assert_success(
schema={
'field': {'dependencies': {'foo': 'foo', 'bar': 'bar'}},
'foo': {},
'bar': {},
},
document={'field': 'foobar', 'foo': 'foo', 'bar': 'bar'},
)
def test_nested_dependencies():
schema = {
'field': {'dependencies': ['a_dict.foo', 'a_dict.bar']},
'a_dict': {'type': 'dict', 'schema': {'foo': {}, 'bar': {}}},
}
assert_success(
document={'field': 'foobar', 'a_dict': {'foo': 'foo', 'bar': 'bar'}},
schema=schema,
)
assert_fail(document={'field': 'foobar', 'a_dict': {}}, schema=schema)
assert_fail(document={'field': 'foobar', 'a_dict': {'foo': 'foo'}}, schema=schema)
@mark.parametrize(
("test_function", "document"),
[
(assert_success, {'field': 'foobar', 'a_dict': {'foo': 'foo', 'bar': 'bar'}}),
(assert_success, {'field': 'foobar', 'a_dict': {'foo': 'bar', 'bar': 'bar'}}),
(assert_fail, {'field': 'foobar', 'a_dict': {}}),
(assert_fail, {'field': 'foobar', 'a_dict': {'foo': 'foo', 'bar': 'foo'}}),
(assert_fail, {'field': 'foobar', 'a_dict': {'bar': 'foo'}}),
(assert_fail, {'field': 'foobar', 'a_dict': {'bar': 'bar'}}),
],
)
def test_nested_dependencies_with_required_values(test_function, document):
test_function(
schema={
'field': {
'dependencies': {'a_dict.foo': ['foo', 'bar'], 'a_dict.bar': 'bar'}
},
'a_dict': {'type': 'dict', 'schema': {'foo': {}, 'bar': {}}},
},
document=document,
)
|
import logging
import pyitachip2ir
import voluptuous as vol
from homeassistant.components import remote
from homeassistant.components.remote import (
ATTR_NUM_REPEATS,
DEFAULT_NUM_REPEATS,
PLATFORM_SCHEMA,
)
from homeassistant.const import (
CONF_DEVICES,
CONF_HOST,
CONF_MAC,
CONF_NAME,
CONF_PORT,
DEVICE_DEFAULT_NAME,
)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEFAULT_PORT = 4998
CONNECT_TIMEOUT = 5000
DEFAULT_MODADDR = 1
DEFAULT_CONNADDR = 1
DEFAULT_IR_COUNT = 1
CONF_MODADDR = "modaddr"
CONF_CONNADDR = "connaddr"
CONF_COMMANDS = "commands"
CONF_DATA = "data"
CONF_IR_COUNT = "ir_count"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_MAC): cv.string,
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Required(CONF_DEVICES): vol.All(
cv.ensure_list,
[
{
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_MODADDR): cv.positive_int,
vol.Required(CONF_CONNADDR): cv.positive_int,
vol.Optional(CONF_IR_COUNT): cv.positive_int,
vol.Required(CONF_COMMANDS): vol.All(
cv.ensure_list,
[
{
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_DATA): cv.string,
}
],
),
}
],
),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the ITach connection and devices."""
itachip2ir = pyitachip2ir.ITachIP2IR(
config.get(CONF_MAC), config.get(CONF_HOST), int(config.get(CONF_PORT))
)
if not itachip2ir.ready(CONNECT_TIMEOUT):
_LOGGER.error("Unable to find iTach")
return False
devices = []
for data in config.get(CONF_DEVICES):
name = data.get(CONF_NAME)
modaddr = int(data.get(CONF_MODADDR, DEFAULT_MODADDR))
connaddr = int(data.get(CONF_CONNADDR, DEFAULT_CONNADDR))
ir_count = int(data.get(CONF_IR_COUNT, DEFAULT_IR_COUNT))
cmddatas = ""
for cmd in data.get(CONF_COMMANDS):
cmdname = cmd[CONF_NAME].strip()
if not cmdname:
cmdname = '""'
cmddata = cmd[CONF_DATA].strip()
if not cmddata:
cmddata = '""'
cmddatas += f"{cmdname}\n{cmddata}\n"
itachip2ir.addDevice(name, modaddr, connaddr, cmddatas)
devices.append(ITachIP2IRRemote(itachip2ir, name, ir_count))
add_entities(devices, True)
return True
class ITachIP2IRRemote(remote.RemoteEntity):
"""Device that sends commands to an ITachIP2IR device."""
def __init__(self, itachip2ir, name, ir_count):
"""Initialize device."""
self.itachip2ir = itachip2ir
self._power = False
self._name = name or DEVICE_DEFAULT_NAME
self._ir_count = ir_count or DEFAULT_IR_COUNT
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def is_on(self):
"""Return true if device is on."""
return self._power
def turn_on(self, **kwargs):
"""Turn the device on."""
self._power = True
self.itachip2ir.send(self._name, "ON", self._ir_count)
self.schedule_update_ha_state()
def turn_off(self, **kwargs):
"""Turn the device off."""
self._power = False
self.itachip2ir.send(self._name, "OFF", self._ir_count)
self.schedule_update_ha_state()
def send_command(self, command, **kwargs):
"""Send a command to one device."""
num_repeats = kwargs.get(ATTR_NUM_REPEATS, DEFAULT_NUM_REPEATS)
for single_command in command:
self.itachip2ir.send(
self._name, single_command, self._ir_count * num_repeats
)
def update(self):
"""Update the device."""
self.itachip2ir.update()
|
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_NAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from . import DOMAIN
CONF_PINS = "pins"
CONF_TYPE = "analog"
PIN_SCHEMA = vol.Schema({vol.Required(CONF_NAME): cv.string})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_PINS): vol.Schema({cv.positive_int: PIN_SCHEMA})}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Arduino platform."""
board = hass.data[DOMAIN]
pins = config[CONF_PINS]
sensors = []
for pinnum, pin in pins.items():
sensors.append(ArduinoSensor(pin.get(CONF_NAME), pinnum, CONF_TYPE, board))
add_entities(sensors)
class ArduinoSensor(Entity):
"""Representation of an Arduino Sensor."""
def __init__(self, name, pin, pin_type, board):
"""Initialize the sensor."""
self._pin = pin
self._name = name
self.pin_type = pin_type
self.direction = "in"
self._value = None
board.set_mode(self._pin, self.direction, self.pin_type)
self._board = board
@property
def state(self):
"""Return the state of the sensor."""
return self._value
@property
def name(self):
"""Get the name of the sensor."""
return self._name
def update(self):
"""Get the latest value from the pin."""
self._value = self._board.get_analog_inputs()[self._pin][1]
|
from typing import Optional, Sequence
from pysmartthings import Capability
from homeassistant.components.fan import (
SPEED_HIGH,
SPEED_LOW,
SPEED_MEDIUM,
SPEED_OFF,
SUPPORT_SET_SPEED,
FanEntity,
)
from . import SmartThingsEntity
from .const import DATA_BROKERS, DOMAIN
VALUE_TO_SPEED = {0: SPEED_OFF, 1: SPEED_LOW, 2: SPEED_MEDIUM, 3: SPEED_HIGH}
SPEED_TO_VALUE = {v: k for k, v in VALUE_TO_SPEED.items()}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Add fans for a config entry."""
broker = hass.data[DOMAIN][DATA_BROKERS][config_entry.entry_id]
async_add_entities(
[
SmartThingsFan(device)
for device in broker.devices.values()
if broker.any_assigned(device.device_id, "fan")
]
)
def get_capabilities(capabilities: Sequence[str]) -> Optional[Sequence[str]]:
"""Return all capabilities supported if minimum required are present."""
supported = [Capability.switch, Capability.fan_speed]
# Must have switch and fan_speed
if all(capability in capabilities for capability in supported):
return supported
class SmartThingsFan(SmartThingsEntity, FanEntity):
"""Define a SmartThings Fan."""
async def async_set_speed(self, speed: str):
"""Set the speed of the fan."""
value = SPEED_TO_VALUE[speed]
await self._device.set_fan_speed(value, set_status=True)
# State is set optimistically in the command above, therefore update
# the entity state ahead of receiving the confirming push updates
self.async_write_ha_state()
async def async_turn_on(self, speed: str = None, **kwargs) -> None:
"""Turn the fan on."""
if speed is not None:
value = SPEED_TO_VALUE[speed]
await self._device.set_fan_speed(value, set_status=True)
else:
await self._device.switch_on(set_status=True)
# State is set optimistically in the commands above, therefore update
# the entity state ahead of receiving the confirming push updates
self.async_write_ha_state()
async def async_turn_off(self, **kwargs) -> None:
"""Turn the fan off."""
await self._device.switch_off(set_status=True)
# State is set optimistically in the command above, therefore update
# the entity state ahead of receiving the confirming push updates
self.async_write_ha_state()
@property
def is_on(self) -> bool:
"""Return true if fan is on."""
return self._device.status.switch
@property
def speed(self) -> str:
"""Return the current speed."""
return VALUE_TO_SPEED[self._device.status.fan_speed]
@property
def speed_list(self) -> list:
"""Get the list of available speeds."""
return [SPEED_OFF, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH]
@property
def supported_features(self) -> int:
"""Flag supported features."""
return SUPPORT_SET_SPEED
|
import logging
import threading
from tellcore.constants import (
TELLSTICK_DIM,
TELLSTICK_TURNOFF,
TELLSTICK_TURNON,
TELLSTICK_UP,
)
from tellcore.library import TelldusError
from tellcore.telldus import AsyncioCallbackDispatcher, TelldusCore
from tellcorenet import TellCoreClient
import voluptuous as vol
from homeassistant.const import CONF_HOST, CONF_PORT, EVENT_HOMEASSISTANT_STOP
from homeassistant.core import callback
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
ATTR_DISCOVER_CONFIG = "config"
ATTR_DISCOVER_DEVICES = "devices"
CONF_SIGNAL_REPETITIONS = "signal_repetitions"
DEFAULT_SIGNAL_REPETITIONS = 1
DOMAIN = "tellstick"
DATA_TELLSTICK = "tellstick_device"
SIGNAL_TELLCORE_CALLBACK = "tellstick_callback"
# Use a global tellstick domain lock to avoid getting Tellcore errors when
# calling concurrently.
TELLSTICK_LOCK = threading.RLock()
# A TellstickRegistry that keeps a map from tellcore_id to the corresponding
# tellcore_device and HA device (entity).
TELLCORE_REGISTRY = None
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Inclusive(CONF_HOST, "tellcore-net"): cv.string,
vol.Inclusive(CONF_PORT, "tellcore-net"): vol.All(
cv.ensure_list, [cv.port], vol.Length(min=2, max=2)
),
vol.Optional(
CONF_SIGNAL_REPETITIONS, default=DEFAULT_SIGNAL_REPETITIONS
): vol.Coerce(int),
}
)
},
extra=vol.ALLOW_EXTRA,
)
def _discover(hass, config, component_name, found_tellcore_devices):
"""Set up and send the discovery event."""
if not found_tellcore_devices:
return
_LOGGER.info(
"Discovered %d new %s devices", len(found_tellcore_devices), component_name
)
signal_repetitions = config[DOMAIN].get(CONF_SIGNAL_REPETITIONS)
discovery.load_platform(
hass,
component_name,
DOMAIN,
{
ATTR_DISCOVER_DEVICES: found_tellcore_devices,
ATTR_DISCOVER_CONFIG: signal_repetitions,
},
config,
)
def setup(hass, config):
"""Set up the Tellstick component."""
conf = config.get(DOMAIN, {})
net_host = conf.get(CONF_HOST)
net_ports = conf.get(CONF_PORT)
# Initialize remote tellcore client
if net_host:
net_client = TellCoreClient(
host=net_host, port_client=net_ports[0], port_events=net_ports[1]
)
net_client.start()
def stop_tellcore_net(event):
"""Event handler to stop the client."""
net_client.stop()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_tellcore_net)
try:
tellcore_lib = TelldusCore(
callback_dispatcher=AsyncioCallbackDispatcher(hass.loop)
)
except OSError:
_LOGGER.exception("Could not initialize Tellstick")
return False
# Get all devices, switches and lights alike
tellcore_devices = tellcore_lib.devices()
# Register devices
hass.data[DATA_TELLSTICK] = {device.id: device for device in tellcore_devices}
# Discover the lights
_discover(
hass,
config,
"light",
[device.id for device in tellcore_devices if device.methods(TELLSTICK_DIM)],
)
# Discover the cover
_discover(
hass,
config,
"cover",
[device.id for device in tellcore_devices if device.methods(TELLSTICK_UP)],
)
# Discover the switches
_discover(
hass,
config,
"switch",
[
device.id
for device in tellcore_devices
if (not device.methods(TELLSTICK_UP) and not device.methods(TELLSTICK_DIM))
],
)
@callback
def async_handle_callback(tellcore_id, tellcore_command, tellcore_data, cid):
"""Handle the actual callback from Tellcore."""
hass.helpers.dispatcher.async_dispatcher_send(
SIGNAL_TELLCORE_CALLBACK, tellcore_id, tellcore_command, tellcore_data
)
# Register callback
callback_id = tellcore_lib.register_device_event(async_handle_callback)
def clean_up_callback(event):
"""Unregister the callback bindings."""
if callback_id is not None:
tellcore_lib.unregister_callback(callback_id)
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, clean_up_callback)
return True
class TellstickDevice(Entity):
"""Representation of a Tellstick device.
Contains the common logic for all Tellstick devices.
"""
def __init__(self, tellcore_device, signal_repetitions):
"""Init the Tellstick device."""
self._signal_repetitions = signal_repetitions
self._state = None
self._requested_state = None
self._requested_data = None
self._repeats_left = 0
# Look up our corresponding tellcore device
self._tellcore_device = tellcore_device
self._name = tellcore_device.name
async def async_added_to_hass(self):
"""Register callbacks."""
self.async_on_remove(
self.hass.helpers.dispatcher.async_dispatcher_connect(
SIGNAL_TELLCORE_CALLBACK, self.update_from_callback
)
)
@property
def should_poll(self):
"""Tell Home Assistant not to poll this device."""
return False
@property
def assumed_state(self):
"""Tellstick devices are always assumed state."""
return True
@property
def name(self):
"""Return the name of the device as reported by tellcore."""
return self._name
@property
def is_on(self):
"""Return true if the device is on."""
return self._state
def _parse_ha_data(self, kwargs):
"""Turn the value from HA into something useful."""
raise NotImplementedError
def _parse_tellcore_data(self, tellcore_data):
"""Turn the value received from tellcore into something useful."""
raise NotImplementedError
def _update_model(self, new_state, data):
"""Update the device entity state to match the arguments."""
raise NotImplementedError
def _send_device_command(self, requested_state, requested_data):
"""Let tellcore update the actual device to the requested state."""
raise NotImplementedError
def _send_repeated_command(self):
"""Send a tellstick command once and decrease the repeat count."""
with TELLSTICK_LOCK:
if self._repeats_left > 0:
self._repeats_left -= 1
try:
self._send_device_command(
self._requested_state, self._requested_data
)
except TelldusError as err:
_LOGGER.error(err)
def _change_device_state(self, new_state, data):
"""Turn on or off the device."""
with TELLSTICK_LOCK:
# Set the requested state and number of repeats before calling
# _send_repeated_command the first time. Subsequent calls will be
# made from the callback. (We don't want to queue a lot of commands
# in case the user toggles the switch the other way before the
# queue is fully processed.)
self._requested_state = new_state
self._requested_data = data
self._repeats_left = self._signal_repetitions
self._send_repeated_command()
# Sooner or later this will propagate to the model from the
# callback, but for a fluid UI experience update it directly.
self._update_model(new_state, data)
self.schedule_update_ha_state()
def turn_on(self, **kwargs):
"""Turn the switch on."""
self._change_device_state(True, self._parse_ha_data(kwargs))
def turn_off(self, **kwargs):
"""Turn the switch off."""
self._change_device_state(False, None)
def _update_model_from_command(self, tellcore_command, tellcore_data):
"""Update the model, from a sent tellcore command and data."""
if tellcore_command not in [TELLSTICK_TURNON, TELLSTICK_TURNOFF, TELLSTICK_DIM]:
_LOGGER.debug("Unhandled tellstick command: %d", tellcore_command)
return
self._update_model(
tellcore_command != TELLSTICK_TURNOFF,
self._parse_tellcore_data(tellcore_data),
)
def update_from_callback(self, tellcore_id, tellcore_command, tellcore_data):
"""Handle updates from the tellcore callback."""
if tellcore_id != self._tellcore_device.id:
return
self._update_model_from_command(tellcore_command, tellcore_data)
self.schedule_update_ha_state()
# This is a benign race on _repeats_left -- it's checked with the lock
# in _send_repeated_command.
if self._repeats_left > 0:
self._send_repeated_command()
def _update_from_tellcore(self):
"""Read the current state of the device from the tellcore library."""
with TELLSTICK_LOCK:
try:
last_command = self._tellcore_device.last_sent_command(
TELLSTICK_TURNON | TELLSTICK_TURNOFF | TELLSTICK_DIM
)
last_data = self._tellcore_device.last_sent_value()
self._update_model_from_command(last_command, last_data)
except TelldusError as err:
_LOGGER.error(err)
def update(self):
"""Poll the current state of the device."""
self._update_from_tellcore()
|
try:
import cPickle as pickle
except ImportError:
import pickle
from datetime import datetime as dt
import pandas as pd
import pytest
from pandas.util.testing import assert_frame_equal
symbol1 = 'symbol1'
symbol2 = 'symbol2'
start_time0 = dt(2000, 1, 1)
start_time1 = dt(2001, 1, 1)
start_time2 = dt(2001, 4, 1)
metadata1 = {'key1': 'value1'}
metadata2 = {'key2': 'value2'}
dataframe1 = pd.DataFrame({symbol1: [metadata1]}, [start_time1])
dataframe2 = pd.DataFrame({symbol2: [metadata1, metadata2]}, [start_time1, start_time2])
dataframe3 = pd.DataFrame({symbol1: [metadata1, metadata2]}, [start_time1, start_time2])
dataframe4 = pd.DataFrame({symbol1: [metadata2]}, [start_time2])
dataframe5 = pd.DataFrame({symbol1: [metadata1, metadata2]}, [start_time0, start_time2])
def integrity_check(ms_lib, symbol):
# Lower level checks to ensure end_time is set correctly
start_time = 'start'
metadata = None
for item in ms_lib.find({'symbol': symbol}, sort=[('start_time', 1)]):
if start_time != 'start' and item['start_time'] != start_time:
raise ValueError('end_time not set correctly')
start_time = item.get('end_time')
if item['metadata'] == metadata:
raise ValueError('consecutive duplicate metadata')
metadata = item['metadata']
assert start_time == None, 'end_time of the last entry should be unset'
def test_pickle(ms_lib):
buff = pickle.dumps(ms_lib)
mnew = pickle.loads(buff)
assert ms_lib._arctic_lib.get_name() == mnew._arctic_lib.get_name()
assert "arctic_test.TEST" in str(ms_lib)
assert str(ms_lib) == repr(ms_lib)
def test_has_symbol(ms_lib):
assert not ms_lib.has_symbol(symbol1)
ms_lib.append(symbol1, metadata1)
assert ms_lib.has_symbol(symbol1)
def test_list_symbols(ms_lib):
ms_lib.append(symbol1, metadata1)
assert symbol1 in ms_lib.list_symbols()
def test_read_history(ms_lib):
assert_frame_equal(ms_lib.read_history(symbol1), pd.DataFrame({symbol1: []}, []))
ms_lib.append(symbol1, metadata1, start_time1)
assert_frame_equal(ms_lib.read_history(symbol1), dataframe1)
def test_read(ms_lib):
assert ms_lib.read(symbol1) == None
ms_lib.append(symbol1, metadata1, start_time1)
assert ms_lib.read(symbol1) == metadata1
ms_lib.append(symbol1, metadata2, start_time2)
assert ms_lib.read(symbol1, as_of=start_time1) == metadata1
def test_write_history(ms_lib):
collection = [pd.DataFrame({symbol1: [metadata1, metadata1]}, [start_time1, start_time2]),
pd.DataFrame({symbol2: [metadata1, metadata2]}, [start_time1, start_time2])]
ms_lib.write_history(collection)
integrity_check(ms_lib, symbol1)
integrity_check(ms_lib, symbol2)
assert_frame_equal(ms_lib.read_history(symbol1), dataframe1)
assert_frame_equal(ms_lib.read_history(symbol2), dataframe2)
def test_append(ms_lib):
ret1 = ms_lib.append(symbol1, None)
assert not ms_lib.has_symbol(symbol1)
assert ret1 is None
ret2 = ms_lib.append(symbol1, metadata1, start_time1)
assert ms_lib.read(symbol1) == metadata1
assert ret2['symbol'] == symbol1
assert ret2['start_time'] == start_time1
assert ret2['metadata'] == metadata1
# ensure writing same metadata does not create new entry
ret3 = ms_lib.append(symbol1, metadata1, start_time2)
assert ms_lib.read(symbol1) == metadata1
assert_frame_equal(ms_lib.read_history(symbol1), dataframe1)
assert ret3 == ret2
ret4 = ms_lib.append(symbol1, metadata2, start_time2)
assert_frame_equal(ms_lib.read_history(symbol1), dataframe3)
assert ret4['metadata'] == metadata2
with pytest.raises(ValueError):
ms_lib.append(symbol1, metadata1, start_time1)
integrity_check(ms_lib, symbol1)
def test_prepend(ms_lib):
ret1 = ms_lib.prepend(symbol1, None)
assert not ms_lib.has_symbol(symbol1)
assert ret1 is None
ret2 = ms_lib.prepend(symbol1, metadata2, start_time2)
assert ms_lib.read(symbol1) == metadata2
assert_frame_equal(ms_lib.read_history(symbol1), dataframe4)
assert ret2['symbol'] == symbol1
assert ret2['start_time'] == start_time2
assert ret2['metadata'] == metadata2
ret3 = ms_lib.prepend(symbol1, metadata1, start_time1)
assert_frame_equal(ms_lib.read_history(symbol1), dataframe3)
assert ret3['metadata'] == metadata1
# ensure writing same metadata does not create new entry
ret4 = ms_lib.prepend(symbol1, metadata1, start_time0)
assert_frame_equal(ms_lib.read_history(symbol1), dataframe5)
ret3['start_time'] = start_time0
assert ret4 == ret3
with pytest.raises(ValueError):
ms_lib.append(symbol1, metadata2, start_time2)
integrity_check(ms_lib, symbol1)
def test_pop(ms_lib):
ms_lib.write_history([pd.DataFrame({symbol1: [metadata1, metadata2]}, [start_time1, start_time2])])
ms_lib.pop(symbol1)
assert_frame_equal(ms_lib.read_history(symbol1), dataframe1)
integrity_check(ms_lib, symbol1)
def test_purge(ms_lib):
ms_lib.write_history([pd.DataFrame({symbol1: [metadata1, metadata2]}, [start_time1, start_time2])])
ms_lib.purge(symbol1)
assert not ms_lib.has_symbol(symbol1)
|
import re
from django.utils.translation import gettext_lazy as _
from weblate.trans.autofixes.base import AutoFix
NEWLINES = re.compile(r"\r\n|\r|\n")
START = re.compile(r"^(\s+)", re.UNICODE)
END = re.compile(r"(\s+)$", re.UNICODE)
class SameBookendingWhitespace(AutoFix):
"""Help non-techy translators with their whitespace."""
fix_id = "end-whitespace"
name = _("Trailing and leading whitespace")
def fix_single_target(self, target, source, unit):
# normalize newlines of source
source = NEWLINES.sub("\n", source)
flags = unit.all_flags
stripped = target
# Capture and strip leading space
if "ignore-begin-space" in flags:
head = ""
else:
start = START.search(source)
head = start.group() if start else ""
stripped = stripped.lstrip()
# Capture and strip trailing space
if "ignore-end-space" in flags:
tail = ""
else:
end = END.search(source)
tail = end.group() if end else ""
stripped = stripped.rstrip()
# add the whitespace around the target translation (ignore blanks)
if stripped:
newtarget = head + stripped + tail
return newtarget, newtarget != target
return target, False
|
from datetime import timedelta
import logging
import shodan
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CONF_API_KEY, CONF_NAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Data provided by Shodan"
CONF_QUERY = "query"
DEFAULT_NAME = "Shodan Sensor"
ICON = "mdi:tooltip-text"
SCAN_INTERVAL = timedelta(minutes=15)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_QUERY): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Shodan sensor."""
api_key = config.get(CONF_API_KEY)
name = config.get(CONF_NAME)
query = config.get(CONF_QUERY)
data = ShodanData(shodan.Shodan(api_key), query)
try:
data.update()
except shodan.exception.APIError as error:
_LOGGER.warning("Unable to connect to Shodan.io: %s", error)
return False
add_entities([ShodanSensor(data, name)], True)
class ShodanSensor(Entity):
"""Representation of the Shodan sensor."""
def __init__(self, data, name):
"""Initialize the Shodan sensor."""
self.data = data
self._name = name
self._state = None
self._unit_of_measurement = "Hits"
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return ICON
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
return {ATTR_ATTRIBUTION: ATTRIBUTION}
def update(self):
"""Get the latest data and updates the states."""
self.data.update()
self._state = self.data.details["total"]
class ShodanData:
"""Get the latest data and update the states."""
def __init__(self, api, query):
"""Initialize the data object."""
self._api = api
self._query = query
self.details = None
def update(self):
"""Get the latest data from shodan.io."""
self.details = self._api.count(self._query)
|
import logging
import socket
from typing import Dict
from typing import List
from typing import Mapping
from typing import Optional
from typing import Tuple
import service_configuration_lib
from mypy_extensions import TypedDict
from paasta_tools.utils import BranchDictV2
from paasta_tools.utils import compose_job_id
from paasta_tools.utils import decompose_job_id
from paasta_tools.utils import DEFAULT_SOA_DIR
from paasta_tools.utils import DeployBlacklist
from paasta_tools.utils import DeployWhitelist
from paasta_tools.utils import InstanceConfig
from paasta_tools.utils import InstanceConfigDict
from paasta_tools.utils import InvalidInstanceConfig
from paasta_tools.utils import InvalidJobNameError
from paasta_tools.utils import SystemPaastaConfig
log = logging.getLogger(__name__)
logging.getLogger("marathon").setLevel(logging.WARNING)
ZK_PAUSE_AUTOSCALE_PATH = "/autoscaling/paused"
DEFAULT_CONTAINER_PORT = 8888
class LongRunningServiceConfigDict(InstanceConfigDict, total=False):
drain_method: str
iam_role: str
iam_role_provider: str
fs_group: int
container_port: int
drain_method_params: Dict
healthcheck_cmd: str
healthcheck_grace_period_seconds: float
healthcheck_interval_seconds: float
healthcheck_max_consecutive_failures: int
healthcheck_mode: str
healthcheck_timeout_seconds: float
healthcheck_uri: str
instances: int
max_instances: int
min_instances: int
nerve_ns: str
network_mode: str
registrations: List[str]
replication_threshold: int
bounce_start_deadline: float
# Defined here to avoid import cycles -- this gets used in bounce_lib and subclassed in marathon_tools.
BounceMethodConfigDict = TypedDict("BounceMethodConfigDict", {"instances": int})
class ServiceNamespaceConfig(dict):
def get_healthcheck_mode(self) -> str:
"""Get the healthcheck mode for the service. In most cases, this will match the mode
of the service, but we do provide the opportunity for users to specify both. Default to the mode
if no healthcheck_mode is specified.
"""
healthcheck_mode = self.get("healthcheck_mode", None)
if not healthcheck_mode:
return self.get_mode()
else:
return healthcheck_mode
def get_mode(self) -> str:
"""Get the mode that the service runs in and check that we support it.
If the mode is not specified, we check whether the service uses smartstack
in order to determine the appropriate default value. If proxy_port is specified
in the config, the service uses smartstack, and we can thus safely assume its mode is http.
If the mode is not defined and the service does not use smartstack, we set the mode to None.
"""
mode = self.get("mode", None)
if mode is None:
if not self.is_in_smartstack():
return None
else:
return "http"
elif mode in ["http", "tcp", "https"]:
return mode
else:
raise InvalidSmartstackMode("Unknown mode: %s" % mode)
def get_healthcheck_uri(self) -> str:
return self.get("healthcheck_uri", "/status")
def get_discover(self) -> str:
return self.get("discover", "region")
def is_in_smartstack(self) -> bool:
if self.get("proxy_port") is not None:
return True
else:
return False
class LongRunningServiceConfig(InstanceConfig):
config_dict: LongRunningServiceConfigDict
def __init__(
self,
service: str,
cluster: str,
instance: str,
config_dict: LongRunningServiceConfigDict,
branch_dict: Optional[BranchDictV2],
soa_dir: str = DEFAULT_SOA_DIR,
) -> None:
super().__init__(
cluster=cluster,
instance=instance,
service=service,
config_dict=config_dict,
branch_dict=branch_dict,
soa_dir=soa_dir,
)
def get_bounce_method(self) -> str:
raise NotImplementedError
def get_kubernetes_namespace(self) -> str:
"""
Only needed on kubernetes LongRunningServiceConfig
"""
raise NotImplementedError
def get_sanitised_deployment_name(self) -> str:
"""
Only needed on kubernetes LongRunningServiceConfig
"""
raise NotImplementedError
def get_service_name_smartstack(self) -> str:
"""
This is just the service name here
For cassandra we have to override this to support apollo
"""
return self.get_service()
def get_env(
self, system_paasta_config: Optional[SystemPaastaConfig] = None
) -> Dict[str, str]:
env = super().get_env(system_paasta_config=system_paasta_config)
env["PAASTA_PORT"] = str(self.get_container_port())
return env
def get_container_port(self) -> int:
return self.config_dict.get("container_port", DEFAULT_CONTAINER_PORT)
def get_drain_method(self, service_namespace_config: ServiceNamespaceConfig) -> str:
"""Get the drain method specified in the service's marathon configuration.
:param service_config: The service instance's configuration dictionary
:returns: The drain method specified in the config, or 'noop' if not specified"""
default = "noop"
# Default to hacheck draining if the service is in smartstack
if service_namespace_config.is_in_smartstack():
default = "hacheck"
return self.config_dict.get("drain_method", default)
def get_drain_method_params(
self, service_namespace_config: ServiceNamespaceConfig
) -> Dict:
"""Get the drain method parameters specified in the service's marathon configuration.
:param service_config: The service instance's configuration dictionary
:returns: The drain_method_params dictionary specified in the config, or {} if not specified"""
default: Dict = {}
if service_namespace_config.is_in_smartstack():
default = {"delay": 60}
return self.config_dict.get("drain_method_params", default)
# FIXME(jlynch|2016-08-02, PAASTA-4964): DEPRECATE nerve_ns and remove it
def get_nerve_namespace(self) -> str:
return decompose_job_id(self.get_registrations()[0])[1]
def get_registrations(self) -> List[str]:
registrations = self.config_dict.get("registrations", [])
for registration in registrations:
try:
decompose_job_id(registration)
except InvalidJobNameError:
log.error(
"Provided registration {} for service "
"{} is invalid".format(registration, self.service)
)
# Backwards compatibility with nerve_ns
# FIXME(jlynch|2016-08-02, PAASTA-4964): DEPRECATE nerve_ns and remove it
if not registrations and "nerve_ns" in self.config_dict:
registrations.append(
compose_job_id(self.service, self.config_dict["nerve_ns"])
)
return registrations or [compose_job_id(self.service, self.instance)]
def get_replication_crit_percentage(self) -> int:
return self.config_dict.get("replication_threshold", 50)
def get_iam_role(self) -> str:
return self.config_dict.get("iam_role", "")
def get_iam_role_provider(self) -> str:
return self.config_dict.get("iam_role_provider", "kiam")
def get_fs_group(self) -> Optional[int]:
return self.config_dict.get("fs_group")
def get_healthcheck_uri(
self, service_namespace_config: ServiceNamespaceConfig
) -> str:
return self.config_dict.get(
"healthcheck_uri", service_namespace_config.get_healthcheck_uri()
)
def get_healthcheck_cmd(self) -> str:
cmd = self.config_dict.get("healthcheck_cmd", None)
if cmd is None:
raise InvalidInstanceConfig(
"healthcheck mode 'cmd' requires a healthcheck_cmd to run"
)
else:
return cmd
def get_healthcheck_grace_period_seconds(self) -> float:
"""
Grace periods indicate different things on kubernetes/marathon: on
marathon, it indicates how long marathon will tolerate failing
healthchecks; on kubernetes, how long before kubernetes will start
sending healthcheck and liveness probes.
"""
return self.config_dict.get("healthcheck_grace_period_seconds", 60)
def get_healthcheck_interval_seconds(self) -> float:
return self.config_dict.get("healthcheck_interval_seconds", 10)
def get_healthcheck_timeout_seconds(self) -> float:
return self.config_dict.get("healthcheck_timeout_seconds", 10)
def get_healthcheck_max_consecutive_failures(self) -> int:
return self.config_dict.get("healthcheck_max_consecutive_failures", 30)
def get_healthcheck_mode(
self, service_namespace_config: ServiceNamespaceConfig
) -> str:
mode = self.config_dict.get("healthcheck_mode", None)
if mode is None:
mode = service_namespace_config.get_healthcheck_mode()
elif mode not in ["http", "https", "tcp", "cmd", None]:
raise InvalidHealthcheckMode("Unknown mode: %s" % mode)
return mode
def get_bounce_start_deadline(self) -> float:
return self.config_dict.get("bounce_start_deadline", 0)
def get_autoscaled_instances(self) -> int:
raise NotImplementedError()
def get_instances(self, with_limit: bool = True) -> int:
"""Gets the number of instances for a service, ignoring whether the user has requested
the service to be started or stopped"""
if self.is_autoscaling_enabled():
autoscaled_instances = self.get_autoscaled_instances()
if autoscaled_instances is None:
return self.get_max_instances()
else:
limited_instances = (
self.limit_instance_count(autoscaled_instances)
if with_limit
else autoscaled_instances
)
return limited_instances
else:
instances = self.config_dict.get("instances", 1)
log.debug("Autoscaling not enabled, returning %d instances" % instances)
return instances
def get_min_instances(self) -> int:
return self.config_dict.get("min_instances", 1)
def is_autoscaling_enabled(self) -> bool:
return self.get_max_instances() is not None
def get_max_instances(self) -> Optional[int]:
return self.config_dict.get("max_instances", None)
def get_desired_instances(self) -> int:
"""Get the number of instances specified in zookeeper or the service's marathon configuration.
If the number of instances in zookeeper is less than min_instances, returns min_instances.
If the number of instances in zookeeper is greater than max_instances, returns max_instances.
Defaults to 0 if not specified in the config.
:returns: The number of instances specified in the config, 0 if not
specified or if desired_state is not 'start'.
"""
if self.get_desired_state() == "start":
return self.get_instances()
else:
log.debug("Instance is set to stop. Returning '0' instances")
return 0
def limit_instance_count(self, instances: int) -> int:
"""
Returns param instances if it is between min_instances and max_instances.
Returns max_instances if instances > max_instances
Returns min_instances if instances < min_instances
"""
return max(self.get_min_instances(), min(self.get_max_instances(), instances))
class InvalidHealthcheckMode(Exception):
pass
def get_healthcheck_for_instance(
service: str,
instance: str,
service_manifest: LongRunningServiceConfig,
random_port: int,
soa_dir: str = DEFAULT_SOA_DIR,
) -> Tuple[Optional[str], Optional[str]]:
"""
Returns healthcheck for a given service instance in the form of a tuple (mode, healthcheck_command)
or (None, None) if no healthcheck
"""
namespace = service_manifest.get_nerve_namespace()
smartstack_config = load_service_namespace_config(
service=service, namespace=namespace, soa_dir=soa_dir
)
mode = service_manifest.get_healthcheck_mode(smartstack_config)
hostname = socket.getfqdn()
if mode == "http" or mode == "https":
path = service_manifest.get_healthcheck_uri(smartstack_config)
healthcheck_command = "%s://%s:%d%s" % (mode, hostname, random_port, path)
elif mode == "tcp":
healthcheck_command = "%s://%s:%d" % (mode, hostname, random_port)
elif mode == "cmd":
healthcheck_command = service_manifest.get_healthcheck_cmd()
else:
mode = None
healthcheck_command = None
return (mode, healthcheck_command)
def load_service_namespace_config(
service: str, namespace: str, soa_dir: str = DEFAULT_SOA_DIR
) -> ServiceNamespaceConfig:
"""Attempt to read the configuration for a service's namespace in a more strict fashion.
Retrieves the following keys:
- proxy_port: the proxy port defined for the given namespace
- healthcheck_mode: the mode for the healthcheck (http or tcp)
- healthcheck_port: An alternate port to use for health checking
- healthcheck_uri: URI target for healthchecking
- healthcheck_timeout_s: healthcheck timeout in seconds
- healthcheck_body_expect: an expected string in healthcheck response body
- updown_timeout_s: updown_service timeout in seconds
- timeout_connect_ms: proxy frontend timeout in milliseconds
- timeout_server_ms: proxy server backend timeout in milliseconds
- timeout_client_ms: proxy server client timeout in milliseconds
- retries: the number of retries on a proxy backend
- mode: the mode the service is run in (http or tcp)
- routes: a list of tuples of (source, destination)
- discover: the scope at which to discover services e.g. 'habitat'
- advertise: a list of scopes to advertise services at e.g. ['habitat', 'region']
- extra_advertise: a list of tuples of (source, destination)
e.g. [('region:dc6-prod', 'region:useast1-prod')]
- extra_healthcheck_headers: a dict of HTTP headers that must
be supplied when health checking. E.g. { 'Host': 'example.com' }
:param service: The service name
:param namespace: The namespace to read
:param soa_dir: The SOA config directory to read from
:returns: A dict of the above keys, if they were defined
"""
smartstack_config = service_configuration_lib.read_extra_service_information(
service_name=service, extra_info="smartstack", soa_dir=soa_dir, deepcopy=False,
)
namespace_config_from_file = smartstack_config.get(namespace, {})
service_namespace_config = ServiceNamespaceConfig()
# We can't really use .get, as we don't want the key to be in the returned
# dict at all if it doesn't exist in the config file.
# We also can't just copy the whole dict, as we only care about some keys
# and there's other things that appear in the smartstack section in
# several cases.
key_whitelist = {
"healthcheck_mode",
"healthcheck_uri",
"healthcheck_port",
"healthcheck_timeout_s",
"healthcheck_body_expect",
"updown_timeout_s",
"proxy_port",
"timeout_connect_ms",
"timeout_server_ms",
"timeout_client_ms",
"retries",
"mode",
"discover",
"advertise",
"extra_healthcheck_headers",
}
for key, value in namespace_config_from_file.items():
if key in key_whitelist:
service_namespace_config[key] = value
# Other code in paasta_tools checks 'mode' after the config file
# is loaded, so this ensures that it is set to the appropriate default
# if not otherwise specified, even if appropriate default is None.
service_namespace_config["mode"] = service_namespace_config.get_mode()
if "routes" in namespace_config_from_file:
service_namespace_config["routes"] = [
(route["source"], dest)
for route in namespace_config_from_file["routes"]
for dest in route["destinations"]
]
if "extra_advertise" in namespace_config_from_file:
service_namespace_config["extra_advertise"] = [
(src, dst)
for src in namespace_config_from_file["extra_advertise"]
for dst in namespace_config_from_file["extra_advertise"][src]
]
return service_namespace_config
class InvalidSmartstackMode(Exception):
pass
def get_proxy_port_for_instance(
service_config: LongRunningServiceConfig,
) -> Optional[int]:
"""Get the proxy_port defined in the first namespace configuration for a
service instance.
This means that the namespace first has to be loaded from the service instance's
configuration, and then the proxy_port has to loaded from the smartstack configuration
for that namespace.
:param service_config: The instance of the services LongRunningServiceConfig
:returns: The proxy_port for the service instance, or None if not defined"""
registration = service_config.get_registrations()[0]
service, namespace, _, __ = decompose_job_id(registration)
nerve_dict = load_service_namespace_config(
service=service, namespace=namespace, soa_dir=service_config.soa_dir
)
return nerve_dict.get("proxy_port")
def host_passes_blacklist(
host_attributes: Mapping[str, str], blacklist: DeployBlacklist
) -> bool:
"""
:param host: A single host attributes dict
:param blacklist: A list of lists like [["location_type", "location"], ["foo", "bar"]]
:returns: boolean, True if the host gets passed the blacklist
"""
try:
for location_type, location in blacklist:
if host_attributes.get(location_type) == location:
return False
except ValueError as e:
log.error(f"Errors processing the following blacklist: {blacklist}")
log.error("I will assume the host does not pass\nError was: %s" % e)
return False
return True
def host_passes_whitelist(
host_attributes: Mapping[str, str], whitelist: DeployWhitelist
) -> bool:
"""
:param host: A single host attributes dict.
:param whitelist: A 2 item list like ["location_type", ["location1", 'location2']]
:returns: boolean, True if the host gets past the whitelist
"""
# No whitelist, so disable whitelisting behavior.
if whitelist is None or len(whitelist) == 0:
return True
try:
(location_type, locations) = whitelist
if host_attributes.get(location_type) in locations:
return True
except ValueError as e:
log.error(f"Errors processing the following whitelist: {whitelist}")
log.error("I will assume the host does not pass\nError was: %s" % e)
return False
return False
|
import colorsys
import random
from openrazer.client import DeviceManager
from openrazer.client import constants as razer_constants
# Create a DeviceManager. This is used to get specific devices
device_manager = DeviceManager()
print("Found {} Razer devices".format(len(device_manager.devices)))
print()
# Disable daemon effect syncing.
# Without this, the daemon will try to set the lighting effect to every device.
device_manager.sync_effects = False
# List of effect I've chosen to make an example for
effects = [
'breath_random',
'breath_single',
'breath_dual',
'breath_triple',
'reactive',
'spectrum',
'static',
'wave',
]
# Helper function to generate interesting colors
def random_color():
rgb = colorsys.hsv_to_rgb(random.uniform(0, 1), random.uniform(0.5, 1), 1)
return tuple(map(lambda x: int(256 * x), rgb))
# Iterate over each device and set a random effect that it supports.
for device in device_manager.devices:
# Check which effect this device supports.
device_effects = [effect for effect in effects if device.fx.has(effect)]
# print("{} supports {}".format(device.name, device_effects))
if len(device_effects) == 0:
print("Device {} doesn't support any of the effects".format(device.name))
continue
effect = random.choice(device_effects)
print("Setting {} to effect {}".format(device.name, effect))
# Ad an example for each effect
if effect == 'breath_random':
device.fx.breath_random()
elif effect == 'breath_single':
color = random_color()
device.fx.breath_single(color[0], color[1], color[2])
elif effect == 'breath_dual':
color = random_color()
color2 = random_color()
device.fx.breath_dual(color[0], color[1], color[2],
color2[0], color2[1], color2[2])
elif effect == 'breath_triple':
color = random_color()
color2 = random_color()
color3 = random_color()
device.fx.breath_triple(color[0], color[1], color[2],
color2[0], color2[1], color2[2],
color3[0], color3[1], color3[2])
elif effect == 'reactive':
color = random_color()
times = [razer_constants.REACTIVE_500MS, razer_constants.REACTIVE_1000MS,
razer_constants.REACTIVE_1500MS, razer_constants.REACTIVE_2000MS]
device.fx.reactive(color[0], color[1], color[2], random.choice(times))
elif effect == 'spectrum':
device.fx.spectrum()
elif effect == 'static':
color = random_color()
device.fx.static(*color)
elif effect == 'wave':
directions = [razer_constants.WAVE_LEFT, razer_constants.WAVE_RIGHT]
device.fx.wave(random.choice(directions))
|
from enum import Enum
import logging
from proxmoxer import ProxmoxAPI
from proxmoxer.backends.https import AuthenticationError
from requests.exceptions import SSLError
import voluptuous as vol
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_PORT,
CONF_USERNAME,
CONF_VERIFY_SSL,
)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DOMAIN = "proxmoxve"
PROXMOX_CLIENTS = "proxmox_clients"
CONF_REALM = "realm"
CONF_NODE = "node"
CONF_NODES = "nodes"
CONF_VMS = "vms"
CONF_CONTAINERS = "containers"
DEFAULT_PORT = 8006
DEFAULT_REALM = "pam"
DEFAULT_VERIFY_SSL = True
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.All(
cv.ensure_list,
[
vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_REALM, default=DEFAULT_REALM): cv.string,
vol.Optional(
CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL
): cv.boolean,
vol.Required(CONF_NODES): vol.All(
cv.ensure_list,
[
vol.Schema(
{
vol.Required(CONF_NODE): cv.string,
vol.Optional(CONF_VMS, default=[]): [
cv.positive_int
],
vol.Optional(CONF_CONTAINERS, default=[]): [
cv.positive_int
],
}
)
],
),
}
)
],
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Set up the component."""
# Create API Clients for later use
hass.data[PROXMOX_CLIENTS] = {}
for entry in config[DOMAIN]:
host = entry[CONF_HOST]
port = entry[CONF_PORT]
user = entry[CONF_USERNAME]
realm = entry[CONF_REALM]
password = entry[CONF_PASSWORD]
verify_ssl = entry[CONF_VERIFY_SSL]
try:
# Construct an API client with the given data for the given host
proxmox_client = ProxmoxClient(
host, port, user, realm, password, verify_ssl
)
proxmox_client.build_client()
except AuthenticationError:
_LOGGER.warning(
"Invalid credentials for proxmox instance %s:%d", host, port
)
continue
except SSLError:
_LOGGER.error(
'Unable to verify proxmox server SSL. Try using "verify_ssl: false"'
)
continue
hass.data[PROXMOX_CLIENTS][f"{host}:{port}"] = proxmox_client
if hass.data[PROXMOX_CLIENTS]:
hass.helpers.discovery.load_platform(
"binary_sensor", DOMAIN, {"entries": config[DOMAIN]}, config
)
return True
return False
class ProxmoxItemType(Enum):
"""Represents the different types of machines in Proxmox."""
qemu = 0
lxc = 1
class ProxmoxClient:
"""A wrapper for the proxmoxer ProxmoxAPI client."""
def __init__(self, host, port, user, realm, password, verify_ssl):
"""Initialize the ProxmoxClient."""
self._host = host
self._port = port
self._user = user
self._realm = realm
self._password = password
self._verify_ssl = verify_ssl
self._proxmox = None
self._connection_start_time = None
def build_client(self):
"""Construct the ProxmoxAPI client. Allows inserting the realm within the `user` value."""
if "@" in self._user:
user_id = self._user
else:
user_id = f"{self._user}@{self._realm}"
self._proxmox = ProxmoxAPI(
self._host,
port=self._port,
user=user_id,
password=self._password,
verify_ssl=self._verify_ssl,
)
def get_api_client(self):
"""Return the ProxmoxAPI client."""
return self._proxmox
|
import os
import numpy as np
import pandas as pd
import pytest
from yandextank.common.util import get_test_path
MAX_TS = 1000
def random_split(df):
i = 0
while True:
step = np.random.randint(500, 1200)
if i + step < len(df):
yield df.loc[i:i + step - 1]
i += step
else:
yield df.loc[i:]
break
@pytest.fixture
def data():
df = pd.read_csv(os.path.join(get_test_path(), 'yandextank/aggregator/tests/data.csv'), delimiter=',', index_col=0)
return df
|
import logging
from typing import Optional
from urllib.parse import urlparse
from songpal import Device, SongpalException
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components import ssdp
from homeassistant.const import CONF_HOST, CONF_NAME
from homeassistant.core import callback
from .const import CONF_ENDPOINT, DOMAIN # pylint: disable=unused-import
_LOGGER = logging.getLogger(__name__)
class SongpalConfig:
"""Device Configuration."""
def __init__(self, name, host, endpoint):
"""Initialize Configuration."""
self.name = name
self.host = host
self.endpoint = endpoint
class SongpalConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Songpal configuration flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_PUSH
def __init__(self):
"""Initialize the flow."""
self.conf: Optional[SongpalConfig] = None
async def async_step_user(self, user_input=None):
"""Handle a flow initiated by the user."""
if user_input is None:
return self.async_show_form(
step_id="user",
data_schema=vol.Schema({vol.Required(CONF_ENDPOINT): str}),
)
# Validate input
endpoint = user_input[CONF_ENDPOINT]
parsed_url = urlparse(endpoint)
# Try to connect and get device name
try:
device = Device(endpoint)
await device.get_supported_methods()
interface_info = await device.get_interface_information()
name = interface_info.modelName
except SongpalException as ex:
_LOGGER.debug("Connection failed: %s", ex)
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(
CONF_ENDPOINT, default=user_input.get(CONF_ENDPOINT, "")
): str,
}
),
errors={"base": "cannot_connect"},
)
self.conf = SongpalConfig(name, parsed_url.hostname, endpoint)
return await self.async_step_init(user_input)
async def async_step_init(self, user_input=None):
"""Handle a flow start."""
# Check if already configured
if self._async_endpoint_already_configured():
return self.async_abort(reason="already_configured")
if user_input is None:
return self.async_show_form(
step_id="init",
description_placeholders={
CONF_NAME: self.conf.name,
CONF_HOST: self.conf.host,
},
)
await self.async_set_unique_id(self.conf.endpoint)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=self.conf.name,
data={CONF_NAME: self.conf.name, CONF_ENDPOINT: self.conf.endpoint},
)
async def async_step_ssdp(self, discovery_info):
"""Handle a discovered Songpal device."""
await self.async_set_unique_id(discovery_info[ssdp.ATTR_UPNP_UDN])
self._abort_if_unique_id_configured()
_LOGGER.debug("Discovered: %s", discovery_info)
friendly_name = discovery_info[ssdp.ATTR_UPNP_FRIENDLY_NAME]
parsed_url = urlparse(discovery_info[ssdp.ATTR_SSDP_LOCATION])
scalarweb_info = discovery_info["X_ScalarWebAPI_DeviceInfo"]
endpoint = scalarweb_info["X_ScalarWebAPI_BaseURL"]
service_types = scalarweb_info["X_ScalarWebAPI_ServiceList"][
"X_ScalarWebAPI_ServiceType"
]
# Ignore Bravia TVs
if "videoScreen" in service_types:
return self.async_abort(reason="not_songpal_device")
# pylint: disable=no-member
self.context["title_placeholders"] = {
CONF_NAME: friendly_name,
CONF_HOST: parsed_url.hostname,
}
self.conf = SongpalConfig(friendly_name, parsed_url.hostname, endpoint)
return await self.async_step_init()
async def async_step_import(self, user_input=None):
"""Import a config entry."""
name = user_input.get(CONF_NAME)
endpoint = user_input.get(CONF_ENDPOINT)
parsed_url = urlparse(endpoint)
# Try to connect to test the endpoint
try:
device = Device(endpoint)
await device.get_supported_methods()
# Get name
if name is None:
interface_info = await device.get_interface_information()
name = interface_info.modelName
except SongpalException as ex:
_LOGGER.error("Import from yaml configuration failed: %s", ex)
return self.async_abort(reason="cannot_connect")
self.conf = SongpalConfig(name, parsed_url.hostname, endpoint)
return await self.async_step_init(user_input)
@callback
def _async_endpoint_already_configured(self):
"""See if we already have an endpoint matching user input configured."""
for entry in self._async_current_entries():
if entry.data.get(CONF_ENDPOINT) == self.conf.endpoint:
return True
return False
|
import asyncio
from datetime import timedelta
from itertools import chain
import logging
from types import ModuleType
from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Union
import voluptuous as vol
from homeassistant import config as conf_util
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ENTITY_NAMESPACE, CONF_SCAN_INTERVAL
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import (
config_per_platform,
config_validation as cv,
discovery,
entity,
service,
)
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from homeassistant.loader import async_get_integration, bind_hass
from homeassistant.setup import async_prepare_setup_platform
from .entity_platform import EntityPlatform
DEFAULT_SCAN_INTERVAL = timedelta(seconds=15)
DATA_INSTANCES = "entity_components"
@bind_hass
async def async_update_entity(hass: HomeAssistant, entity_id: str) -> None:
"""Trigger an update for an entity."""
domain = entity_id.split(".", 1)[0]
entity_comp = hass.data.get(DATA_INSTANCES, {}).get(domain)
if entity_comp is None:
logging.getLogger(__name__).warning(
"Forced update failed. Component for %s not loaded.", entity_id
)
return
entity_obj = entity_comp.get_entity(entity_id)
if entity_obj is None:
logging.getLogger(__name__).warning(
"Forced update failed. Entity %s not found.", entity_id
)
return
await entity_obj.async_update_ha_state(True)
class EntityComponent:
"""The EntityComponent manages platforms that manages entities.
This class has the following responsibilities:
- Process the configuration and set up a platform based component.
- Manage the platforms and their entities.
- Help extract the entities from a service call.
- Listen for discovery events for platforms related to the domain.
"""
def __init__(
self,
logger: logging.Logger,
domain: str,
hass: HomeAssistant,
scan_interval: timedelta = DEFAULT_SCAN_INTERVAL,
):
"""Initialize an entity component."""
self.logger = logger
self.hass = hass
self.domain = domain
self.scan_interval = scan_interval
self.config: Optional[ConfigType] = None
self._platforms: Dict[
Union[str, Tuple[str, Optional[timedelta], Optional[str]]], EntityPlatform
] = {domain: self._async_init_entity_platform(domain, None)}
self.async_add_entities = self._platforms[domain].async_add_entities
self.add_entities = self._platforms[domain].add_entities
hass.data.setdefault(DATA_INSTANCES, {})[domain] = self
@property
def entities(self) -> Iterable[entity.Entity]:
"""Return an iterable that returns all entities."""
return chain.from_iterable(
platform.entities.values() for platform in self._platforms.values()
)
def get_entity(self, entity_id: str) -> Optional[entity.Entity]:
"""Get an entity."""
for platform in self._platforms.values():
entity_obj = platform.entities.get(entity_id)
if entity_obj is not None:
return entity_obj
return None
def setup(self, config: ConfigType) -> None:
"""Set up a full entity component.
This doesn't block the executor to protect from deadlocks.
"""
self.hass.add_job(self.async_setup(config)) # type: ignore
async def async_setup(self, config: ConfigType) -> None:
"""Set up a full entity component.
Loads the platforms from the config and will listen for supported
discovered platforms.
This method must be run in the event loop.
"""
self.config = config
# Look in config for Domain, Domain 2, Domain 3 etc and load them
for p_type, p_config in config_per_platform(config, self.domain):
self.hass.async_create_task(self.async_setup_platform(p_type, p_config))
# Generic discovery listener for loading platform dynamically
# Refer to: homeassistant.helpers.discovery.async_load_platform()
async def component_platform_discovered(
platform: str, info: Optional[Dict[str, Any]]
) -> None:
"""Handle the loading of a platform."""
await self.async_setup_platform(platform, {}, info)
discovery.async_listen_platform(
self.hass, self.domain, component_platform_discovered
)
async def async_setup_entry(self, config_entry: ConfigEntry) -> bool:
"""Set up a config entry."""
platform_type = config_entry.domain
platform = await async_prepare_setup_platform(
self.hass,
# In future PR we should make hass_config part of the constructor
# params.
self.config or {},
self.domain,
platform_type,
)
if platform is None:
return False
key = config_entry.entry_id
if key in self._platforms:
raise ValueError("Config entry has already been setup!")
self._platforms[key] = self._async_init_entity_platform(
platform_type,
platform,
scan_interval=getattr(platform, "SCAN_INTERVAL", None),
)
return await self._platforms[key].async_setup_entry(config_entry)
async def async_unload_entry(self, config_entry: ConfigEntry) -> bool:
"""Unload a config entry."""
key = config_entry.entry_id
platform = self._platforms.pop(key, None)
if platform is None:
raise ValueError("Config entry was never loaded!")
await platform.async_reset()
return True
async def async_extract_from_service(
self, service_call: ServiceCall, expand_group: bool = True
) -> List[entity.Entity]:
"""Extract all known and available entities from a service call.
Will return an empty list if entities specified but unknown.
This method must be run in the event loop.
"""
return await service.async_extract_entities(
self.hass, self.entities, service_call, expand_group
)
@callback
def async_register_entity_service(
self,
name: str,
schema: Union[Dict[str, Any], vol.Schema],
func: str,
required_features: Optional[List[int]] = None,
) -> None:
"""Register an entity service."""
if isinstance(schema, dict):
schema = cv.make_entity_service_schema(schema)
async def handle_service(call: Callable) -> None:
"""Handle the service."""
await self.hass.helpers.service.entity_service_call(
self._platforms.values(), func, call, required_features
)
self.hass.services.async_register(self.domain, name, handle_service, schema)
async def async_setup_platform(
self,
platform_type: str,
platform_config: ConfigType,
discovery_info: Optional[DiscoveryInfoType] = None,
) -> None:
"""Set up a platform for this component."""
if self.config is None:
raise RuntimeError("async_setup needs to be called first")
platform = await async_prepare_setup_platform(
self.hass, self.config, self.domain, platform_type
)
if platform is None:
return
# Use config scan interval, fallback to platform if none set
scan_interval = platform_config.get(
CONF_SCAN_INTERVAL, getattr(platform, "SCAN_INTERVAL", None)
)
entity_namespace = platform_config.get(CONF_ENTITY_NAMESPACE)
key = (platform_type, scan_interval, entity_namespace)
if key not in self._platforms:
self._platforms[key] = self._async_init_entity_platform(
platform_type, platform, scan_interval, entity_namespace
)
await self._platforms[key].async_setup( # type: ignore
platform_config, discovery_info
)
async def _async_reset(self) -> None:
"""Remove entities and reset the entity component to initial values.
This method must be run in the event loop.
"""
tasks = []
for key, platform in self._platforms.items():
if key == self.domain:
tasks.append(platform.async_reset())
else:
tasks.append(platform.async_destroy())
if tasks:
await asyncio.gather(*tasks)
self._platforms = {self.domain: self._platforms[self.domain]}
self.config = None
async def async_remove_entity(self, entity_id: str) -> None:
"""Remove an entity managed by one of the platforms."""
found = None
for platform in self._platforms.values():
if entity_id in platform.entities:
found = platform
break
if found:
await found.async_remove_entity(entity_id)
async def async_prepare_reload(self, *, skip_reset: bool = False) -> Optional[dict]:
"""Prepare reloading this entity component.
This method must be run in the event loop.
"""
try:
conf = await conf_util.async_hass_config_yaml(self.hass)
except HomeAssistantError as err:
self.logger.error(err)
return None
integration = await async_get_integration(self.hass, self.domain)
processed_conf = await conf_util.async_process_component_config(
self.hass, conf, integration
)
if processed_conf is None:
return None
if not skip_reset:
await self._async_reset()
return processed_conf
@callback
def _async_init_entity_platform(
self,
platform_type: str,
platform: Optional[ModuleType],
scan_interval: Optional[timedelta] = None,
entity_namespace: Optional[str] = None,
) -> EntityPlatform:
"""Initialize an entity platform."""
if scan_interval is None:
scan_interval = self.scan_interval
return EntityPlatform(
hass=self.hass,
logger=self.logger,
domain=self.domain,
platform_name=platform_type,
platform=platform,
scan_interval=scan_interval,
entity_namespace=entity_namespace,
)
|
from datetime import timedelta
from homeassistant.components import mikrotik
import homeassistant.components.device_tracker as device_tracker
from homeassistant.helpers import entity_registry
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from . import DEVICE_2_WIRELESS, DHCP_DATA, MOCK_DATA, MOCK_OPTIONS, WIRELESS_DATA
from .test_hub import setup_mikrotik_entry
from tests.common import MockConfigEntry, patch
DEFAULT_DETECTION_TIME = timedelta(seconds=300)
def mock_command(self, cmd, params=None):
"""Mock the Mikrotik command method."""
if cmd == mikrotik.const.MIKROTIK_SERVICES[mikrotik.const.IS_WIRELESS]:
return True
if cmd == mikrotik.const.MIKROTIK_SERVICES[mikrotik.const.DHCP]:
return DHCP_DATA
if cmd == mikrotik.const.MIKROTIK_SERVICES[mikrotik.const.WIRELESS]:
return WIRELESS_DATA
return {}
async def test_platform_manually_configured(hass):
"""Test that nothing happens when configuring mikrotik through device tracker platform."""
assert (
await async_setup_component(
hass,
device_tracker.DOMAIN,
{device_tracker.DOMAIN: {"platform": "mikrotik"}},
)
is False
)
assert mikrotik.DOMAIN not in hass.data
async def test_device_trackers(hass, legacy_patchable_time):
"""Test device_trackers created by mikrotik."""
# test devices are added from wireless list only
hub = await setup_mikrotik_entry(hass)
device_1 = hass.states.get("device_tracker.device_1")
assert device_1 is not None
assert device_1.state == "home"
device_2 = hass.states.get("device_tracker.device_2")
assert device_2 is None
with patch.object(mikrotik.hub.MikrotikData, "command", new=mock_command):
# test device_2 is added after connecting to wireless network
WIRELESS_DATA.append(DEVICE_2_WIRELESS)
await hub.async_update()
await hass.async_block_till_done()
device_2 = hass.states.get("device_tracker.device_2")
assert device_2 is not None
assert device_2.state == "home"
# test state remains home if last_seen consider_home_interval
del WIRELESS_DATA[1] # device 2 is removed from wireless list
hub.api.devices["00:00:00:00:00:02"]._last_seen = dt_util.utcnow() - timedelta(
minutes=4
)
await hub.async_update()
await hass.async_block_till_done()
device_2 = hass.states.get("device_tracker.device_2")
assert device_2.state != "not_home"
# test state changes to away if last_seen > consider_home_interval
hub.api.devices["00:00:00:00:00:02"]._last_seen = dt_util.utcnow() - timedelta(
minutes=5
)
await hub.async_update()
await hass.async_block_till_done()
device_2 = hass.states.get("device_tracker.device_2")
assert device_2.state == "not_home"
async def test_restoring_devices(hass):
"""Test restoring existing device_tracker entities if not detected on startup."""
config_entry = MockConfigEntry(
domain=mikrotik.DOMAIN, data=MOCK_DATA, options=MOCK_OPTIONS
)
config_entry.add_to_hass(hass)
registry = await entity_registry.async_get_registry(hass)
registry.async_get_or_create(
device_tracker.DOMAIN,
mikrotik.DOMAIN,
"00:00:00:00:00:01",
suggested_object_id="device_1",
config_entry=config_entry,
)
registry.async_get_or_create(
device_tracker.DOMAIN,
mikrotik.DOMAIN,
"00:00:00:00:00:02",
suggested_object_id="device_2",
config_entry=config_entry,
)
await setup_mikrotik_entry(hass)
# test device_2 which is not in wireless list is restored
device_1 = hass.states.get("device_tracker.device_1")
assert device_1 is not None
assert device_1.state == "home"
device_2 = hass.states.get("device_tracker.device_2")
assert device_2 is not None
assert device_2.state == "not_home"
|
import diamond.collector
from diamond.collector import str_to_bool
from subprocess import Popen, PIPE
import os
import getpass
class IPMISensorCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(IPMISensorCollector,
self).get_default_config_help()
config_help.update({
'bin': 'Path to the ipmitool binary',
'use_sudo': 'Use sudo?',
'sudo_cmd': 'Path to sudo',
'thresholds': 'Collect thresholds as well as reading',
'delimiter': 'Parse blanks in sensor names into a delimiter'
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(IPMISensorCollector, self).get_default_config()
config.update({
'bin': '/usr/bin/ipmitool',
'use_sudo': False,
'sudo_cmd': '/usr/bin/sudo',
'path': 'ipmi.sensors',
'thresholds': False,
'delimiter': '.'
})
return config
def parse_value(self, value):
"""
Convert value string to float for reporting
"""
value = value.strip()
# Skip missing sensors
if value == 'na':
return None
# Try just getting the float value
try:
return float(value)
except:
pass
# Next best guess is a hex value
try:
return float.fromhex(value)
except:
pass
# No luck, bail
return None
def collect(self):
use_sudo = str_to_bool(self.config['use_sudo'])
if ((not os.access(self.config['bin'], os.X_OK) or
(use_sudo and
not os.access(self.config['sudo_cmd'], os.X_OK)))):
return False
command = [self.config['bin'], 'sensor']
if use_sudo and getpass.getuser() != 'root':
command.insert(0, self.config['sudo_cmd'])
p = Popen(command, stdout=PIPE).communicate()[0][:-1]
for i, v in enumerate(p.split("\n")):
data = v.split("|")
try:
# Complex keys are fun!
metric_name = data[0].strip()
metric_name = metric_name.replace(".", "_")
metric_name = metric_name.replace(" ",
self.config['delimiter'])
metrics = []
# Each sensor line is a column seperated by a | with the
# following descriptions:
# 1. Sensor ID
# 2. Sensor Reading
# 3. Units
# 4. Status
# 5. Lower Non-Recoverable
# 6. Lower Critical
# 7. Lower Non-Critical
# 8. Upper Non-Critical
# 9. Upper Critical
# 10. Upper Non-Recoverable
if not self.config['thresholds']:
metrics.append((metric_name, self.parse_value(data[1])))
else:
metrics.append((metric_name + ".Reading",
self.parse_value(data[1])))
metrics.append((metric_name + ".Lower.NonRecoverable",
self.parse_value(data[4])))
metrics.append((metric_name + ".Lower.Critical",
self.parse_value(data[5])))
metrics.append((metric_name + ".Lower.NonCritical",
self.parse_value(data[6])))
metrics.append((metric_name + ".Upper.NonCritical",
self.parse_value(data[7])))
metrics.append((metric_name + ".Upper.Critical",
self.parse_value(data[8])))
metrics.append((metric_name + ".Upper.NonRecoverable",
self.parse_value(data[9])))
[self.publish(name, value)
for (name, value) in metrics
if value is not None]
except ValueError:
continue
except IndexError:
continue
return True
|
from omnilogic import LoginException, OmniLogicException
from homeassistant import config_entries, data_entry_flow, setup
from homeassistant.components.omnilogic.const import DOMAIN
from tests.async_mock import patch
from tests.common import MockConfigEntry
DATA = {"username": "test-username", "password": "test-password"}
async def test_form(hass):
"""Test we get the form."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch(
"homeassistant.components.omnilogic.config_flow.OmniLogic.connect",
return_value=True,
), patch(
"homeassistant.components.omnilogic.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.omnilogic.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
DATA,
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "Omnilogic"
assert result2["data"] == DATA
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_already_configured(hass):
"""Test config flow when Omnilogic component is already setup."""
MockConfigEntry(domain="omnilogic", data=DATA).add_to_hass(hass)
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "abort"
assert result["reason"] == "single_instance_allowed"
async def test_with_invalid_credentials(hass):
"""Test with invalid credentials."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.omnilogic.OmniLogic.connect",
side_effect=LoginException,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
DATA,
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {"base": "invalid_auth"}
async def test_form_cannot_connect(hass):
"""Test if invalid response or no connection returned from Hayward."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.omnilogic.OmniLogic.connect",
side_effect=OmniLogicException,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
DATA,
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {"base": "cannot_connect"}
async def test_with_unknown_error(hass):
"""Test with unknown error response from Hayward."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.omnilogic.OmniLogic.connect",
side_effect=Exception,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
DATA,
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {"base": "unknown"}
async def test_option_flow(hass):
"""Test option flow."""
entry = MockConfigEntry(domain=DOMAIN, data=DATA)
entry.add_to_hass(hass)
assert not entry.options
with patch(
"homeassistant.components.omnilogic.async_setup_entry", return_value=True
):
result = await hass.config_entries.options.async_init(
entry.entry_id,
data=None,
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "init"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={"polling_interval": 9},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == ""
assert result["data"]["polling_interval"] == 9
|
import os
import tensorflow as tf
slim = tf.contrib.slim
def get_split(split_name, dataset_dir, file_pattern, reader, split_to_sizes,
items_to_descriptions, num_classes):
"""Gets a dataset tuple with instructions for reading KITTI dataset.
Args:
split_name: A train/val split name.
dataset_dir: The base directory of the dataset sources.
file_pattern: The file pattern to use when matching the dataset sources.
It is assumed that the pattern contains a '%s' string so that the split
name can be inserted.
reader: The TensorFlow reader type.
Returns:
A `Dataset` namedtuple.
Raises:
ValueError: if `split_name` is not a valid train/val split.
"""
if split_name not in ['train', 'val']:
raise ValueError('split name %s was not recognized.' % split_name)
file_pattern = os.path.join(dataset_dir, file_pattern % split_name)
# Allowing None in the signature so that dataset_factory can use the default.
if reader is None:
reader = tf.TFRecordReader
keys_to_features = {
'image/encoded': tf.FixedLenFeature((), tf.string, default_value=''),
'image/format': tf.FixedLenFeature((), tf.string, default_value='png'),
'image/height': tf.FixedLenFeature([1], tf.int64),
'image/width': tf.FixedLenFeature([1], tf.int64),
'image/channels': tf.FixedLenFeature([1], tf.int64),
'image/shape': tf.FixedLenFeature([3], tf.int64),
'image/object/bbox/xmin': tf.VarLenFeature(dtype=tf.float32),
'image/object/bbox/ymin': tf.VarLenFeature(dtype=tf.float32),
'image/object/bbox/xmax': tf.VarLenFeature(dtype=tf.float32),
'image/object/bbox/ymax': tf.VarLenFeature(dtype=tf.float32),
'image/object/bbox/label': tf.VarLenFeature(dtype=tf.int64),
}
items_to_handlers = {
'image': slim.tfexample_decoder.Image('image/encoded', 'image/format'),
'shape': slim.tfexample_decoder.Tensor('image/shape'),
'object/bbox': slim.tfexample_decoder.BoundingBox(
['ymin', 'xmin', 'ymax', 'xmax'], 'image/object/bbox/'),
'object/label': slim.tfexample_decoder.Tensor('image/object/bbox/label'),
}
decoder = slim.tfexample_decoder.TFExampleDecoder(
keys_to_features, items_to_handlers)
labels_to_names = {0: "Pedestrian",
1: "Cyclist",
2: "Car"}
return slim.dataset.Dataset(
data_sources=file_pattern,
reader=reader,
decoder=decoder,
num_samples=split_to_sizes[split_name],
items_to_descriptions=items_to_descriptions,
num_classes=num_classes,
labels_to_names=labels_to_names)
|
from molecule import logger
from molecule.driver import base
from molecule import util
log = logger.get_logger(__name__)
class DigitalOcean(base.Base):
"""
This class is responsible for managing `DigitalOcean`_ instances.
`DigitalOcean`_ is **not** the default driver used in Molecule.
Molecule leverages Ansible's `digital_ocean_module`_, by mapping variables
from ``molecule.yml`` into ``create.yml`` and ``destroy.yml``.
.. _`digital_ocean_module`: https://docs.ansible.com/ansible/latest/modules/digital_ocean_module.html#digital-ocean-module
.. code-block:: yaml
driver:
name: digitalocean
platforms:
- name: instance
.. code-block:: bash
$ pip install 'molecule[digitalocean]'
Change the options passed to the ssh client.
.. code-block:: yaml
driver:
name: digitalocean
ssh_connection_options:
-o ControlPath=~/.ansible/cp/%r@%h-%p
.. important::
Molecule does not merge lists, when overriding the developer must
provide all options.
Provide the files Molecule will preserve upon each subcommand execution.
.. code-block:: yaml
driver:
name: digitalocean
safe_files:
- foo
.. _`DigitalOcean`: https://www.digitalocean.com
""" # noqa
def __init__(self, config):
super(DigitalOcean, self).__init__(config)
self._name = 'digitalocean'
@property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = value
@property
def login_cmd_template(self):
connection_options = ' '.join(self.ssh_connection_options)
return ('ssh {{address}} '
'-l {{user}} '
'-p {{port}} '
'-i {{identity_file}} '
'{}').format(connection_options)
@property
def default_safe_files(self):
return [
self.instance_config,
]
@property
def default_ssh_connection_options(self):
return self._get_ssh_connection_options()
def login_options(self, instance_name):
d = {'instance': instance_name}
return util.merge_dicts(d, self._get_instance_config(instance_name))
def ansible_connection_options(self, instance_name):
try:
d = self._get_instance_config(instance_name)
return {
'ansible_user': d['user'],
'ansible_host': d['address'],
'ansible_port': d['port'],
'ansible_private_key_file': d['identity_file'],
'connection': 'ssh',
'ansible_ssh_common_args':
' '.join(self.ssh_connection_options),
}
except StopIteration:
return {}
except IOError:
# Instance has yet to be provisioned , therefore the
# instance_config is not on disk.
return {}
def _get_instance_config(self, instance_name):
instance_config_dict = util.safe_load_file(
self._config.driver.instance_config)
return next(item for item in instance_config_dict
if item['instance'] == instance_name)
def sanity_checks(self):
# FIXME(decentral1se): Implement sanity checks
pass
|
import argparse
import glob
import os
import sys
def parse_args():
parser = argparse.ArgumentParser(description="Get charging status")
parser.add_argument('-d', '--device', type=str, help="Device string like \"0003:1532:0045.000C\"")
args = parser.parse_args()
return args
def run():
args = parse_args()
if args.device is None:
mouse_dirs = glob.glob(os.path.join('/sys/bus/hid/drivers/razermouse/', "*:*:*.*"))
if len(mouse_dirs) > 1:
print("Multiple mouse directories found. Rerun with -d", file=sys.stderr)
sys.exit(1)
if len(mouse_dirs) < 1:
print("No mouse directories found. Make sure the driver is binded", file=sys.stderr)
sys.exit(1)
mouse_dir = mouse_dirs[0]
else:
mouse_dir = os.path.join('/sys/bus/hid/drivers/razermouse/', args.device)
if not os.path.isdir(mouse_dir):
print("Multiple mouse directories found. Rerun with -d", file=sys.stderr)
sys.exit(1)
is_charging_filepath = os.path.join(mouse_dir, "is_charging")
with open(is_charging_filepath, 'r') as is_charging_file:
try:
is_charging = int(is_charging_file.read().strip())
if is_charging:
print("charging")
else:
print("not charging")
except ValueError as ex:
print("Failed to get charging status.\n{0}".format(ex), file=sys.stderr)
sys.exit(1)
if __name__ == '__main__':
run()
|
from homeassistant.components.air_quality import (
ATTR_AQI,
ATTR_PM_2_5,
ATTR_PM_10,
AirQualityEntity,
)
from homeassistant.const import CONF_NAME
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import (
ATTR_API_ADVICE,
ATTR_API_CAQI,
ATTR_API_CAQI_DESCRIPTION,
ATTR_API_CAQI_LEVEL,
ATTR_API_PM10,
ATTR_API_PM10_LIMIT,
ATTR_API_PM10_PERCENT,
ATTR_API_PM25,
ATTR_API_PM25_LIMIT,
ATTR_API_PM25_PERCENT,
DEFAULT_NAME,
DOMAIN,
MANUFACTURER,
)
ATTRIBUTION = "Data provided by Airly"
LABEL_ADVICE = "advice"
LABEL_AQI_DESCRIPTION = f"{ATTR_AQI}_description"
LABEL_AQI_LEVEL = f"{ATTR_AQI}_level"
LABEL_PM_2_5_LIMIT = f"{ATTR_PM_2_5}_limit"
LABEL_PM_2_5_PERCENT = f"{ATTR_PM_2_5}_percent_of_limit"
LABEL_PM_10_LIMIT = f"{ATTR_PM_10}_limit"
LABEL_PM_10_PERCENT = f"{ATTR_PM_10}_percent_of_limit"
PARALLEL_UPDATES = 1
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Airly air_quality entity based on a config entry."""
name = config_entry.data[CONF_NAME]
coordinator = hass.data[DOMAIN][config_entry.entry_id]
async_add_entities([AirlyAirQuality(coordinator, name)], False)
def round_state(func):
"""Round state."""
def _decorator(self):
res = func(self)
if isinstance(res, float):
return round(res)
return res
return _decorator
class AirlyAirQuality(CoordinatorEntity, AirQualityEntity):
"""Define an Airly air quality."""
def __init__(self, coordinator, name):
"""Initialize."""
super().__init__(coordinator)
self._name = name
self._icon = "mdi:blur"
@property
def name(self):
"""Return the name."""
return self._name
@property
def icon(self):
"""Return the icon."""
return self._icon
@property
@round_state
def air_quality_index(self):
"""Return the air quality index."""
return self.coordinator.data[ATTR_API_CAQI]
@property
@round_state
def particulate_matter_2_5(self):
"""Return the particulate matter 2.5 level."""
return self.coordinator.data[ATTR_API_PM25]
@property
@round_state
def particulate_matter_10(self):
"""Return the particulate matter 10 level."""
return self.coordinator.data[ATTR_API_PM10]
@property
def attribution(self):
"""Return the attribution."""
return ATTRIBUTION
@property
def unique_id(self):
"""Return a unique_id for this entity."""
return f"{self.coordinator.latitude}-{self.coordinator.longitude}"
@property
def device_info(self):
"""Return the device info."""
return {
"identifiers": {
(DOMAIN, self.coordinator.latitude, self.coordinator.longitude)
},
"name": DEFAULT_NAME,
"manufacturer": MANUFACTURER,
"entry_type": "service",
}
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {
LABEL_AQI_DESCRIPTION: self.coordinator.data[ATTR_API_CAQI_DESCRIPTION],
LABEL_ADVICE: self.coordinator.data[ATTR_API_ADVICE],
LABEL_AQI_LEVEL: self.coordinator.data[ATTR_API_CAQI_LEVEL],
LABEL_PM_2_5_LIMIT: self.coordinator.data[ATTR_API_PM25_LIMIT],
LABEL_PM_2_5_PERCENT: round(self.coordinator.data[ATTR_API_PM25_PERCENT]),
LABEL_PM_10_LIMIT: self.coordinator.data[ATTR_API_PM10_LIMIT],
LABEL_PM_10_PERCENT: round(self.coordinator.data[ATTR_API_PM10_PERCENT]),
}
|
from dynalite_devices_lib.switch import DynalitePresetSwitchDevice
import pytest
from homeassistant.const import ATTR_FRIENDLY_NAME
from .common import (
ATTR_METHOD,
ATTR_SERVICE,
create_entity_from_device,
create_mock_device,
run_service_tests,
)
@pytest.fixture
def mock_device():
"""Mock a Dynalite device."""
return create_mock_device("switch", DynalitePresetSwitchDevice)
async def test_switch_setup(hass, mock_device):
"""Test a successful setup."""
await create_entity_from_device(hass, mock_device)
entity_state = hass.states.get("switch.name")
assert entity_state.attributes[ATTR_FRIENDLY_NAME] == mock_device.name
await run_service_tests(
hass,
mock_device,
"switch",
[
{ATTR_SERVICE: "turn_on", ATTR_METHOD: "async_turn_on"},
{ATTR_SERVICE: "turn_off", ATTR_METHOD: "async_turn_off"},
],
)
|
import errno
import pytest
from unittest.mock import Mock, call, patch
from vine import promise
from kombu.asynchronous import hub as _hub
from kombu.asynchronous import Hub, READ, WRITE, ERR
from kombu.asynchronous.debug import callback_for, repr_flag, _rcb
from kombu.asynchronous.hub import (
Stop, get_event_loop, set_event_loop,
_raise_stop_error, _dummy_context
)
from kombu.asynchronous.semaphore import DummyLock, LaxBoundedSemaphore
class File:
def __init__(self, fd):
self.fd = fd
def fileno(self):
return self.fd
def __eq__(self, other):
if isinstance(other, File):
return self.fd == other.fd
return NotImplemented
def __hash__(self):
return hash(self.fd)
def test_DummyLock():
with DummyLock():
pass
class test_LaxBoundedSemaphore:
def test_acquire_release(self):
x = LaxBoundedSemaphore(2)
c1 = Mock()
x.acquire(c1, 1)
assert x.value == 1
c1.assert_called_with(1)
c2 = Mock()
x.acquire(c2, 2)
assert x.value == 0
c2.assert_called_with(2)
c3 = Mock()
x.acquire(c3, 3)
assert x.value == 0
c3.assert_not_called()
x.release()
assert x.value == 0
x.release()
assert x.value == 1
x.release()
assert x.value == 2
c3.assert_called_with(3)
def test_repr(self):
assert repr(LaxBoundedSemaphore(2))
def test_bounded(self):
x = LaxBoundedSemaphore(2)
for i in range(100):
x.release()
assert x.value == 2
def test_grow_shrink(self):
x = LaxBoundedSemaphore(1)
assert x.initial_value == 1
cb1 = Mock()
x.acquire(cb1, 1)
cb1.assert_called_with(1)
assert x.value == 0
cb2 = Mock()
x.acquire(cb2, 2)
cb2.assert_not_called()
assert x.value == 0
cb3 = Mock()
x.acquire(cb3, 3)
cb3.assert_not_called()
x.grow(2)
cb2.assert_called_with(2)
cb3.assert_called_with(3)
assert x.value == 2
assert x.initial_value == 3
assert not x._waiting
x.grow(3)
for i in range(x.initial_value):
assert x.acquire(Mock())
assert not x.acquire(Mock())
x.clear()
x.shrink(3)
for i in range(x.initial_value):
assert x.acquire(Mock())
assert not x.acquire(Mock())
assert x.value == 0
for i in range(100):
x.release()
assert x.value == x.initial_value
def test_clear(self):
x = LaxBoundedSemaphore(10)
for i in range(11):
x.acquire(Mock())
assert x._waiting
assert x.value == 0
x.clear()
assert not x._waiting
assert x.value == x.initial_value
class test_Utils:
def setup(self):
self._prev_loop = get_event_loop()
def teardown(self):
set_event_loop(self._prev_loop)
def test_get_set_event_loop(self):
set_event_loop(None)
assert _hub._current_loop is None
assert get_event_loop() is None
hub = Hub()
set_event_loop(hub)
assert _hub._current_loop is hub
assert get_event_loop() is hub
def test_dummy_context(self):
with _dummy_context():
pass
def test_raise_stop_error(self):
with pytest.raises(Stop):
_raise_stop_error()
class test_Hub:
def setup(self):
self.hub = Hub()
def teardown(self):
self.hub.close()
def test_reset(self):
self.hub.close = Mock(name='close')
self.hub._create_poller = Mock(name='_create_poller')
self.hub.reset()
self.hub.close.assert_called_with()
self.hub._create_poller.assert_called_with()
def test__close_poller__no_poller(self):
self.hub.poller = None
self.hub._close_poller()
def test__close_poller(self):
poller = self.hub.poller = Mock(name='poller')
self.hub._close_poller()
poller.close.assert_called_with()
assert self.hub._poller is None
def test_stop(self):
self.hub.call_soon = Mock(name='call_soon')
self.hub.stop()
self.hub.call_soon.assert_called_with(_raise_stop_error)
@patch('kombu.asynchronous.hub.promise')
def test_call_soon(self, promise):
callback = Mock(name='callback')
ret = self.hub.call_soon(callback, 1, 2, 3)
promise.assert_called_with(callback, (1, 2, 3))
assert promise() in self.hub._ready
assert ret is promise()
def test_call_soon__promise_argument(self):
callback = promise(Mock(name='callback'), (1, 2, 3))
ret = self.hub.call_soon(callback)
assert ret is callback
assert ret in self.hub._ready
def test_call_later(self):
callback = Mock(name='callback')
self.hub.timer = Mock(name='hub.timer')
self.hub.call_later(10.0, callback, 1, 2)
self.hub.timer.call_after.assert_called_with(10.0, callback, (1, 2))
def test_call_at(self):
callback = Mock(name='callback')
self.hub.timer = Mock(name='hub.timer')
self.hub.call_at(21231122, callback, 1, 2)
self.hub.timer.call_at.assert_called_with(21231122, callback, (1, 2))
def test_repr(self):
assert repr(self.hub)
def test_repr_flag(self):
assert repr_flag(READ) == 'R'
assert repr_flag(WRITE) == 'W'
assert repr_flag(ERR) == '!'
assert repr_flag(READ | WRITE) == 'RW'
assert repr_flag(READ | ERR) == 'R!'
assert repr_flag(WRITE | ERR) == 'W!'
assert repr_flag(READ | WRITE | ERR) == 'RW!'
def test_repr_callback_rcb(self):
def f():
pass
assert _rcb(f) == f.__name__
assert _rcb('foo') == 'foo'
@patch('kombu.asynchronous.hub.poll')
def test_start_stop(self, poll):
self.hub = Hub()
poll.assert_called_with()
poller = self.hub.poller
self.hub.stop()
mock_callback = Mock()
self.hub._ready = {mock_callback}
self.hub.close()
poller.close.assert_called_with()
mock_callback.assert_called_once_with()
assert self.hub._ready == set()
def test_poller_regeneration_on_access(self):
self.hub = Hub()
assert self.hub.poller
self.hub.stop()
self.hub._ready = set()
self.hub.close()
assert self.hub._poller is None
assert self.hub.poller, 'It should be regenerated automatically!'
def test_fire_timers(self):
self.hub.timer = Mock()
self.hub.timer._queue = []
assert self.hub.fire_timers(
min_delay=42.324, max_delay=32.321) == 32.321
self.hub.timer._queue = [1]
self.hub.scheduler = iter([(3.743, None)])
assert self.hub.fire_timers() == 3.743
e1, e2, e3 = Mock(), Mock(), Mock()
entries = [e1, e2, e3]
def reset():
return [m.reset() for m in [e1, e2, e3]]
def se():
while 1:
while entries:
yield None, entries.pop()
yield 3.982, None
self.hub.scheduler = se()
assert self.hub.fire_timers(max_timers=10) == 3.982
for E in [e3, e2, e1]:
E.assert_called_with()
reset()
entries[:] = [Mock() for _ in range(11)]
keep = list(entries)
assert self.hub.fire_timers(
max_timers=10, min_delay=1.13) == 1.13
for E in reversed(keep[1:]):
E.assert_called_with()
reset()
assert self.hub.fire_timers(max_timers=10) == 3.982
keep[0].assert_called_with()
def test_fire_timers_raises(self):
eback = Mock()
eback.side_effect = KeyError('foo')
self.hub.timer = Mock()
self.hub.scheduler = iter([(0, eback)])
with pytest.raises(KeyError):
self.hub.fire_timers(propagate=(KeyError,))
eback.side_effect = ValueError('foo')
self.hub.scheduler = iter([(0, eback)])
with patch('kombu.asynchronous.hub.logger') as logger:
with pytest.raises(StopIteration):
self.hub.fire_timers()
logger.error.assert_called()
eback.side_effect = MemoryError('foo')
self.hub.scheduler = iter([(0, eback)])
with pytest.raises(MemoryError):
self.hub.fire_timers()
eback.side_effect = OSError()
eback.side_effect.errno = errno.ENOMEM
self.hub.scheduler = iter([(0, eback)])
with pytest.raises(OSError):
self.hub.fire_timers()
eback.side_effect = OSError()
eback.side_effect.errno = errno.ENOENT
self.hub.scheduler = iter([(0, eback)])
with patch('kombu.asynchronous.hub.logger') as logger:
with pytest.raises(StopIteration):
self.hub.fire_timers()
logger.error.assert_called()
def test_add_raises_ValueError(self):
self.hub.poller = Mock(name='hub.poller')
self.hub.poller.register.side_effect = ValueError()
self.hub._discard = Mock(name='hub.discard')
with pytest.raises(ValueError):
self.hub.add(2, Mock(), READ)
self.hub._discard.assert_called_with(2)
def test_remove_reader(self):
self.hub.poller = Mock(name='hub.poller')
self.hub.add(2, Mock(), READ)
self.hub.add(2, Mock(), WRITE)
self.hub.remove_reader(2)
assert 2 not in self.hub.readers
assert 2 in self.hub.writers
def test_remove_reader__not_writeable(self):
self.hub.poller = Mock(name='hub.poller')
self.hub.add(2, Mock(), READ)
self.hub.remove_reader(2)
assert 2 not in self.hub.readers
def test_remove_writer(self):
self.hub.poller = Mock(name='hub.poller')
self.hub.add(2, Mock(), READ)
self.hub.add(2, Mock(), WRITE)
self.hub.remove_writer(2)
assert 2 in self.hub.readers
assert 2 not in self.hub.writers
def test_remove_writer__not_readable(self):
self.hub.poller = Mock(name='hub.poller')
self.hub.add(2, Mock(), WRITE)
self.hub.remove_writer(2)
assert 2 not in self.hub.writers
def test_add__consolidate(self):
self.hub.poller = Mock(name='hub.poller')
self.hub.add(2, Mock(), WRITE, consolidate=True)
assert 2 in self.hub.consolidate
assert self.hub.writers[2] is None
@patch('kombu.asynchronous.hub.logger')
def test_on_callback_error(self, logger):
self.hub.on_callback_error(Mock(name='callback'), KeyError())
logger.error.assert_called()
def test_loop_property(self):
self.hub._loop = None
self.hub.create_loop = Mock(name='hub.create_loop')
assert self.hub.loop is self.hub.create_loop()
assert self.hub._loop is self.hub.create_loop()
def test_run_forever(self):
self.hub.run_once = Mock(name='hub.run_once')
self.hub.run_once.side_effect = Stop()
self.hub.run_forever()
def test_run_once(self):
self.hub._loop = iter([1])
self.hub.run_once()
self.hub.run_once()
assert self.hub._loop is None
def test_repr_active(self):
self.hub.readers = {1: Mock(), 2: Mock()}
self.hub.writers = {3: Mock(), 4: Mock()}
for value in list(
self.hub.readers.values()) + list(self.hub.writers.values()):
value.__name__ = 'mock'
assert self.hub.repr_active()
def test_repr_events(self):
self.hub.readers = {6: Mock(), 7: Mock(), 8: Mock()}
self.hub.writers = {9: Mock()}
for value in list(
self.hub.readers.values()) + list(self.hub.writers.values()):
value.__name__ = 'mock'
assert self.hub.repr_events([
(6, READ),
(7, ERR),
(8, READ | ERR),
(9, WRITE),
(10, 13213),
])
def test_callback_for(self):
reader, writer = Mock(), Mock()
self.hub.readers = {6: reader}
self.hub.writers = {7: writer}
assert callback_for(self.hub, 6, READ) == reader
assert callback_for(self.hub, 7, WRITE) == writer
with pytest.raises(KeyError):
callback_for(self.hub, 6, WRITE)
assert callback_for(self.hub, 6, WRITE, 'foo') == 'foo'
def test_add_remove_readers(self):
P = self.hub.poller = Mock()
read_A = Mock()
read_B = Mock()
self.hub.add_reader(10, read_A, 10)
self.hub.add_reader(File(11), read_B, 11)
P.register.assert_has_calls([
call(10, self.hub.READ | self.hub.ERR),
call(11, self.hub.READ | self.hub.ERR),
], any_order=True)
assert self.hub.readers[10] == (read_A, (10,))
assert self.hub.readers[11] == (read_B, (11,))
self.hub.remove(10)
assert 10 not in self.hub.readers
self.hub.remove(File(11))
assert 11 not in self.hub.readers
P.unregister.assert_has_calls([
call(10), call(11),
])
def test_can_remove_unknown_fds(self):
self.hub.poller = Mock()
self.hub.remove(30)
self.hub.remove(File(301))
def test_remove__unregister_raises(self):
self.hub.poller = Mock()
self.hub.poller.unregister.side_effect = OSError()
self.hub.remove(313)
def test_add_writers(self):
P = self.hub.poller = Mock()
write_A = Mock()
write_B = Mock()
self.hub.add_writer(20, write_A)
self.hub.add_writer(File(21), write_B)
P.register.assert_has_calls([
call(20, self.hub.WRITE),
call(21, self.hub.WRITE),
], any_order=True)
assert self.hub.writers[20], (write_A == ())
assert self.hub.writers[21], (write_B == ())
self.hub.remove(20)
assert 20 not in self.hub.writers
self.hub.remove(File(21))
assert 21 not in self.hub.writers
P.unregister.assert_has_calls([
call(20), call(21),
])
def test_enter__exit(self):
P = self.hub.poller = Mock()
on_close = Mock()
self.hub.on_close.add(on_close)
try:
read_A = Mock()
read_B = Mock()
self.hub.add_reader(10, read_A)
self.hub.add_reader(File(11), read_B)
write_A = Mock()
write_B = Mock()
self.hub.add_writer(20, write_A)
self.hub.add_writer(File(21), write_B)
assert self.hub.readers
assert self.hub.writers
finally:
assert self.hub.poller
self.hub.close()
assert not self.hub.readers
assert not self.hub.writers
P.unregister.assert_has_calls([
call(10), call(11), call(20), call(21),
], any_order=True)
on_close.assert_called_with(self.hub)
def test_scheduler_property(self):
hub = Hub(timer=[1, 2, 3])
assert list(hub.scheduler), [1, 2 == 3]
def test_loop__tick_callbacks(self):
ticks = [Mock(name='cb1'), Mock(name='cb2')]
self.hub.on_tick = list(ticks)
next(self.hub.loop)
ticks[0].assert_called_once_with()
ticks[1].assert_called_once_with()
def test_loop__todo(self):
deferred = Mock(name='cb_deferred')
def defer():
self.hub.call_soon(deferred)
callbacks = [Mock(name='cb1', wraps=defer), Mock(name='cb2')]
for cb in callbacks:
self.hub.call_soon(cb)
self.hub._ready.add(None)
next(self.hub.loop)
callbacks[0].assert_called_once_with()
callbacks[1].assert_called_once_with()
deferred.assert_not_called()
|
from threading import local
# stolen from https://github.com/getsentry/sentry/
class PluginMount(type):
def __new__(cls, name, bases, attrs):
new_cls = type.__new__(cls, name, bases, attrs)
if IPlugin in bases:
return new_cls
if new_cls.title is None:
new_cls.title = new_cls.__name__
if not new_cls.slug:
new_cls.slug = new_cls.title.replace(" ", "-").lower()
return new_cls
class IPlugin(local):
"""
Plugin interface. Should not be inherited from directly.
A plugin should be treated as if it were a singleton. The owner does not
control when or how the plugin gets instantiated, nor is it guaranteed that
it will happen, or happen more than once.
>>> from lemur.plugins import Plugin
>>>
>>> class MyPlugin(Plugin):
>>> def get_title(self):
>>> return 'My Plugin'
As a general rule all inherited methods should allow ``**kwargs`` to ensure
ease of future compatibility.
"""
# Generic plugin information
title = None
slug = None
description = None
version = None
author = None
author_url = None
resource_links = ()
# Configuration specifics
conf_key = None
conf_title = None
options = {}
# Global enabled state
enabled = True
can_disable = True
def is_enabled(self):
"""
Returns a boolean representing if this plugin is enabled.
If ``project`` is passed, it will limit the scope to that project.
>>> plugin.is_enabled()
"""
if not self.enabled:
return False
if not self.can_disable:
return True
return True
def get_conf_key(self):
"""
Returns a string representing the configuration keyspace prefix for this plugin.
"""
if not self.conf_key:
self.conf_key = self.get_conf_title().lower().replace(" ", "_")
return self.conf_key
def get_conf_title(self):
"""
Returns a string representing the title to be shown on the configuration page.
"""
return self.conf_title or self.get_title()
def get_title(self):
"""
Returns the general title for this plugin.
>>> plugin.get_title()
"""
return self.title
def get_description(self):
"""
Returns the description for this plugin. This is shown on the plugin configuration
page.
>>> plugin.get_description()
"""
return self.description
def get_resource_links(self):
"""
Returns a list of tuples pointing to various resources for this plugin.
>>> def get_resource_links(self):
>>> return [
>>> ('Documentation', 'https://lemur.readthedocs.io'),
>>> ('Bug Tracker', 'https://github.com/Netflix/lemur/issues'),
>>> ('Source', 'https://github.com/Netflix/lemur'),
>>> ]
"""
return self.resource_links
@staticmethod
def get_option(name, options):
for o in options:
if o.get("name") == name:
return o.get("value", o.get("default"))
class Plugin(IPlugin):
"""
A plugin should be treated as if it were a singleton. The owner does not
control when or how the plugin gets instantiated, nor is it guaranteed that
it will happen, or happen more than once.
"""
__version__ = 1
__metaclass__ = PluginMount
|
import logging
import weakref
logging.basicConfig()
logger = logging.getLogger("kalliope")
class NotificationManager(object):
"""
Class sued to send messages to all instantiated object that use it as parent class
"""
_instances = set()
def __init__(self):
self._instances.add(weakref.ref(self))
logger.debug("[NotificationManager] Add new instance to the manager")
@classmethod
def get_instances(cls):
dead = set()
for ref in cls._instances:
obj = ref()
if obj is not None:
yield obj
else:
dead.add(ref)
cls._instances -= dead
@classmethod
def send_notification(cls, notification=None, payload=None):
logger.debug("[NotificationManager] send notification to all child: notification: %s, payload: %s"
% (notification, payload))
for instance in cls.get_instances():
try:
instance.on_notification_received(notification=notification, payload=payload)
except NotImplementedError:
logger.debug("[NotificationManager] The signal %s does not implement send_notification method"
% instance.__class__.__name__)
pass
|
import mock
from pytest import raises
from paasta_tools import long_running_service_tools
from paasta_tools.utils import InvalidInstanceConfig
class TestLongRunningServiceConfig:
def test_get_healthcheck_cmd_happy(self):
fake_conf = long_running_service_tools.LongRunningServiceConfig(
service="fake_name",
cluster="fake_cluster",
config_dict={"healthcheck_cmd": "/bin/true"},
instance="fake_instance",
branch_dict=None,
)
actual = fake_conf.get_healthcheck_cmd()
assert actual == "/bin/true"
def test_get_healthcheck_cmd_raises_when_unset(self):
fake_conf = long_running_service_tools.LongRunningServiceConfig(
service="fake_name",
cluster="fake_cluster",
instance="fake_instance",
config_dict={},
branch_dict=None,
)
with raises(InvalidInstanceConfig) as exc:
fake_conf.get_healthcheck_cmd()
assert "healthcheck mode 'cmd' requires a healthcheck_cmd to run" in str(
exc.value
)
def test_get_healthcheck_for_instance_http(self):
fake_service = "fake_service"
fake_namespace = "fake_namespace"
fake_hostname = "fake_hostname"
fake_random_port = 666
fake_path = "/fake_path"
fake_service_config = long_running_service_tools.LongRunningServiceConfig(
service=fake_service,
cluster="fake_cluster",
instance=fake_namespace,
config_dict={},
branch_dict=None,
)
fake_service_namespace_config = long_running_service_tools.ServiceNamespaceConfig(
{"mode": "http", "healthcheck_uri": fake_path}
)
with mock.patch(
"paasta_tools.long_running_service_tools.load_service_namespace_config",
autospec=True,
return_value=fake_service_namespace_config,
), mock.patch("socket.getfqdn", autospec=True, return_value=fake_hostname):
expected = (
"http",
"http://%s:%d%s" % (fake_hostname, fake_random_port, fake_path),
)
actual = long_running_service_tools.get_healthcheck_for_instance(
fake_service, fake_namespace, fake_service_config, fake_random_port
)
assert expected == actual
def test_get_healthcheck_for_instance_not_matching_mode(self):
fake_service = "fake_service"
fake_namespace = "fake_namespace"
fake_hostname = "fake_hostname"
fake_random_port = 666
fake_service_config = long_running_service_tools.LongRunningServiceConfig(
service=fake_service,
cluster="fake_cluster",
instance=fake_namespace,
config_dict={},
branch_dict=None,
)
fake_service_namespace_config = long_running_service_tools.ServiceNamespaceConfig(
{"mode": "http"}
)
with mock.patch(
"paasta_tools.long_running_service_tools.load_service_namespace_config",
autospec=True,
return_value=fake_service_namespace_config,
), mock.patch("socket.getfqdn", autospec=True, return_value=fake_hostname):
expected = ("http", "http://fake_hostname:666/status")
actual = long_running_service_tools.get_healthcheck_for_instance(
fake_service, fake_namespace, fake_service_config, fake_random_port
)
assert expected == actual
def test_get_healthcheck_for_instance_tcp(self):
fake_service = "fake_service"
fake_namespace = "fake_namespace"
fake_hostname = "fake_hostname"
fake_random_port = 666
fake_service_config = long_running_service_tools.LongRunningServiceConfig(
service=fake_service,
cluster="fake_cluster",
instance=fake_namespace,
config_dict={},
branch_dict=None,
)
fake_service_namespace_config = long_running_service_tools.ServiceNamespaceConfig(
{"mode": "tcp"}
)
with mock.patch(
"paasta_tools.long_running_service_tools.load_service_namespace_config",
autospec=True,
return_value=fake_service_namespace_config,
), mock.patch("socket.getfqdn", autospec=True, return_value=fake_hostname):
expected = ("tcp", "tcp://%s:%d" % (fake_hostname, fake_random_port))
actual = long_running_service_tools.get_healthcheck_for_instance(
fake_service, fake_namespace, fake_service_config, fake_random_port
)
assert expected == actual
def test_get_healthcheck_for_instance_cmd(self):
fake_service = "fake_service"
fake_namespace = "fake_namespace"
fake_hostname = "fake_hostname"
fake_random_port = 666
fake_cmd = "/bin/fake_command"
fake_service_config = long_running_service_tools.LongRunningServiceConfig(
service=fake_service,
cluster="fake_cluster",
instance=fake_namespace,
config_dict={
"instances": 1,
"healthcheck_mode": "cmd",
"healthcheck_cmd": fake_cmd,
},
branch_dict=None,
)
fake_service_namespace_config = long_running_service_tools.ServiceNamespaceConfig(
{}
)
with mock.patch(
"paasta_tools.long_running_service_tools.load_service_namespace_config",
autospec=True,
return_value=fake_service_namespace_config,
), mock.patch("socket.getfqdn", autospec=True, return_value=fake_hostname):
expected = ("cmd", fake_cmd)
actual = long_running_service_tools.get_healthcheck_for_instance(
fake_service, fake_namespace, fake_service_config, fake_random_port
)
assert expected == actual
def test_get_healthcheck_for_instance_other(self):
fake_service = "fake_service"
fake_namespace = "fake_namespace"
fake_hostname = "fake_hostname"
fake_random_port = 666
fake_service_config = long_running_service_tools.LongRunningServiceConfig(
service=fake_service,
cluster="fake_cluster",
instance=fake_namespace,
config_dict={"healthcheck_mode": None},
branch_dict=None,
)
fake_service_namespace_config = long_running_service_tools.ServiceNamespaceConfig(
{}
)
with mock.patch(
"paasta_tools.long_running_service_tools.load_service_namespace_config",
autospec=True,
return_value=fake_service_namespace_config,
), mock.patch("socket.getfqdn", autospec=True, return_value=fake_hostname):
expected = (None, None)
actual = long_running_service_tools.get_healthcheck_for_instance(
fake_service, fake_namespace, fake_service_config, fake_random_port
)
assert expected == actual
def test_get_healthcheck_for_instance_custom_soadir(self):
fake_service = "fake_service"
fake_namespace = "fake_namespace"
fake_hostname = "fake_hostname"
fake_random_port = 666
fake_soadir = "/fake/soadir"
fake_service_config = long_running_service_tools.LongRunningServiceConfig(
service=fake_service,
cluster="fake_cluster",
instance=fake_namespace,
config_dict={"healthcheck_mode": None},
branch_dict=None,
)
fake_service_namespace_config = long_running_service_tools.ServiceNamespaceConfig(
{}
)
with mock.patch(
"paasta_tools.long_running_service_tools.load_service_namespace_config",
autospec=True,
return_value=fake_service_namespace_config,
) as load_service_namespace_config_patch, mock.patch(
"socket.getfqdn", autospec=True, return_value=fake_hostname
):
expected = (None, None)
actual = long_running_service_tools.get_healthcheck_for_instance(
fake_service,
fake_namespace,
fake_service_config,
fake_random_port,
soa_dir=fake_soadir,
)
assert expected == actual
load_service_namespace_config_patch.assert_called_once_with(
fake_service, fake_namespace, fake_soadir
)
def test_get_instances_in_config(self):
fake_conf = long_running_service_tools.LongRunningServiceConfig(
service="fake_name",
cluster="fake_cluster",
instance="fake_instance",
config_dict={"instances": -10},
branch_dict={
"desired_state": "start",
"git_sha": "c0ded00d",
"docker_image": "docker_image",
"force_bounce": None,
},
)
assert fake_conf.get_instances() == -10
def test_get_instances_default(self):
fake_conf = long_running_service_tools.LongRunningServiceConfig(
service="fake_name",
cluster="fake_cluster",
instance="fake_instance",
config_dict={},
branch_dict=None,
)
assert fake_conf.get_instances() == 1
def test_get_instances_respects_false(self):
fake_conf = long_running_service_tools.LongRunningServiceConfig(
service="fake_name",
cluster="fake_cluster",
instance="fake_instance",
config_dict={"instances": False},
branch_dict={
"desired_state": "start",
"git_sha": "c0debabe",
"docker_image": "docker_image",
"force_bounce": None,
},
)
assert fake_conf.get_instances() == 0
class TestServiceNamespaceConfig:
def test_get_mode_default(self):
assert long_running_service_tools.ServiceNamespaceConfig().get_mode() is None
def test_get_mode_default_when_port_specified(self):
config = {"proxy_port": 1234}
assert (
long_running_service_tools.ServiceNamespaceConfig(config).get_mode()
== "http"
)
def test_get_mode_valid(self):
config = {"mode": "tcp"}
assert (
long_running_service_tools.ServiceNamespaceConfig(config).get_mode()
== "tcp"
)
def test_get_mode_invalid(self):
config = {"mode": "paasta"}
with raises(long_running_service_tools.InvalidSmartstackMode):
long_running_service_tools.ServiceNamespaceConfig(config).get_mode()
def test_get_healthcheck_uri_default(self):
assert (
long_running_service_tools.ServiceNamespaceConfig().get_healthcheck_uri()
== "/status"
)
def test_get_discover_default(self):
assert (
long_running_service_tools.ServiceNamespaceConfig().get_discover()
== "region"
)
def test_get_proxy_port_for_instance():
mock_config = mock.Mock(
get_registrations=mock.Mock(return_value=["thing.main.sha.sha"]),
soa_dir="/nail/blah",
)
with mock.patch(
"paasta_tools.long_running_service_tools.load_service_namespace_config",
autospec=True,
) as mock_load_service_namespace_config:
mock_load_service_namespace_config.return_value = {"proxy_port": 1234}
assert (
long_running_service_tools.get_proxy_port_for_instance(mock_config) == 1234
)
mock_load_service_namespace_config.assert_called_once_with(
service="thing", namespace="main", soa_dir="/nail/blah"
)
def test_host_passes_blacklist_passes():
slave_attributes = {"fake_attribute": "fake_value_1"}
blacklist = [("fake_attribute", "No what we have here"), ("foo", "bar")]
actual = long_running_service_tools.host_passes_blacklist(
host_attributes=slave_attributes, blacklist=blacklist
)
assert actual is True
def test_host_passes_blacklist_blocks_blacklisted_locations():
slave_attributes = {"fake_attribute": "fake_value_1"}
blacklist = [("fake_attribute", "fake_value_1")]
actual = long_running_service_tools.host_passes_blacklist(
host_attributes=slave_attributes, blacklist=blacklist
)
assert actual is False
def test_host_passes_whitelist():
fake_slave_attributes = {
"location_type": "fake_location",
"fake_location_type": "fake_location",
}
fake_whitelist_allow = ("fake_location_type", ["fake_location"])
fake_whitelist_deny = ("anoterfake_location_type", ["anotherfake_location"])
slave_passes = long_running_service_tools.host_passes_whitelist(
fake_slave_attributes, fake_whitelist_deny
)
assert not slave_passes
slave_passes = long_running_service_tools.host_passes_whitelist(
fake_slave_attributes, fake_whitelist_allow
)
assert slave_passes
slave_passes = long_running_service_tools.host_passes_whitelist(
fake_slave_attributes, None
)
assert slave_passes
|
import html
import functools
from typing import cast
from PyQt5.QtCore import pyqtSlot, pyqtSignal, Qt, QUrl, QPoint
from PyQt5.QtGui import QDesktopServices
from PyQt5.QtNetwork import QNetworkReply, QNetworkRequest
from PyQt5.QtWidgets import QFileDialog
from PyQt5.QtPrintSupport import QPrintDialog
from PyQt5.QtWebKitWidgets import QWebPage, QWebFrame
from qutebrowser.config import websettings
from qutebrowser.browser import pdfjs, shared, downloads, greasemonkey
from qutebrowser.browser.webkit import http
from qutebrowser.browser.webkit.network import networkmanager
from qutebrowser.utils import message, usertypes, log, jinja, objreg
from qutebrowser.qt import sip
class BrowserPage(QWebPage):
"""Our own QWebPage with advanced features.
Attributes:
error_occurred: Whether an error occurred while loading.
_extension_handlers: Mapping of QWebPage extensions to their handlers.
_networkmanager: The NetworkManager used.
_win_id: The window ID this BrowserPage is associated with.
_ignore_load_started: Whether to ignore the next loadStarted signal.
_is_shutting_down: Whether the page is currently shutting down.
_tabdata: The TabData object of the tab this page is in.
Signals:
shutting_down: Emitted when the page is currently shutting down.
reloading: Emitted before a web page reloads.
arg: The URL which gets reloaded.
navigation_request: Emitted on acceptNavigationRequest.
"""
shutting_down = pyqtSignal()
reloading = pyqtSignal(QUrl)
navigation_request = pyqtSignal(usertypes.NavigationRequest)
def __init__(self, win_id, tab_id, tabdata, private, parent=None):
super().__init__(parent)
self._win_id = win_id
self._tabdata = tabdata
self._is_shutting_down = False
self._extension_handlers = {
QWebPage.ErrorPageExtension: self._handle_errorpage,
QWebPage.ChooseMultipleFilesExtension: self._handle_multiple_files,
}
self._ignore_load_started = False
self.error_occurred = False
self._networkmanager = networkmanager.NetworkManager(
win_id=win_id, tab_id=tab_id, private=private, parent=self)
self.setNetworkAccessManager(self._networkmanager)
self.setForwardUnsupportedContent(True)
self.reloading.connect(self._networkmanager.clear_rejected_ssl_errors)
self.printRequested.connect( # type: ignore[attr-defined]
self.on_print_requested)
self.downloadRequested.connect( # type: ignore[attr-defined]
self.on_download_requested)
self.unsupportedContent.connect( # type: ignore[attr-defined]
self.on_unsupported_content)
self.loadStarted.connect( # type: ignore[attr-defined]
self.on_load_started)
self.featurePermissionRequested.connect( # type: ignore[attr-defined]
self._on_feature_permission_requested)
self.saveFrameStateRequested.connect( # type: ignore[attr-defined]
self.on_save_frame_state_requested)
self.restoreFrameStateRequested.connect( # type: ignore[attr-defined]
self.on_restore_frame_state_requested)
self.loadFinished.connect( # type: ignore[attr-defined]
functools.partial(self._inject_userjs, self.mainFrame()))
self.frameCreated.connect( # type: ignore[attr-defined]
self._connect_userjs_signals)
@pyqtSlot('QWebFrame*')
def _connect_userjs_signals(self, frame):
"""Connect userjs related signals to `frame`.
Connect the signals used as triggers for injecting user
JavaScripts into the passed QWebFrame.
"""
log.greasemonkey.debug("Connecting to frame {} ({})"
.format(frame, frame.url().toDisplayString()))
frame.loadFinished.connect(
functools.partial(self._inject_userjs, frame))
def javaScriptPrompt(self, frame, js_msg, default):
"""Override javaScriptPrompt to use qutebrowser prompts."""
if self._is_shutting_down:
return (False, "")
try:
return shared.javascript_prompt(frame.url(), js_msg, default,
abort_on=[self.loadStarted,
self.shutting_down])
except shared.CallSuper:
return super().javaScriptPrompt(frame, js_msg, default)
def _handle_errorpage(self, info, errpage):
"""Display an error page if needed.
Loosely based on Helpviewer/HelpBrowserWV.py from eric5
(line 260 @ 5d937eb378dd)
Args:
info: The QWebPage.ErrorPageExtensionOption instance.
errpage: The QWebPage.ErrorPageExtensionReturn instance, where the
error page will get written to.
Return:
False if no error page should be displayed, True otherwise.
"""
ignored_errors = [
(QWebPage.QtNetwork, QNetworkReply.OperationCanceledError),
# "Loading is handled by the media engine"
(QWebPage.WebKit, 203),
# "Frame load interrupted by policy change"
(QWebPage.WebKit, 102),
]
errpage.baseUrl = info.url
urlstr = info.url.toDisplayString()
if (info.domain, info.error) == (QWebPage.QtNetwork,
QNetworkReply.ProtocolUnknownError):
# For some reason, we get a segfault when we use
# QDesktopServices::openUrl with info.url directly - however it
# works when we construct a copy of it.
url = QUrl(info.url)
scheme = url.scheme()
message.confirm_async(
title="Open external application for {}-link?".format(scheme),
text="URL: <b>{}</b>".format(
html.escape(url.toDisplayString())),
yes_action=functools.partial(QDesktopServices.openUrl, url),
url=info.url.toString(QUrl.RemovePassword | QUrl.FullyEncoded))
return True
elif (info.domain, info.error) in ignored_errors:
log.webview.debug("Ignored error on {}: {} (error domain: {}, "
"error code: {})".format(
urlstr, info.errorString, info.domain,
info.error))
return False
else:
error_str = info.errorString
if error_str == networkmanager.HOSTBLOCK_ERROR_STRING:
# We don't set error_occurred in this case.
error_str = "Request blocked by host blocker."
main_frame = info.frame.page().mainFrame()
if info.frame != main_frame:
# Content in an iframe -> Hide the frame so it doesn't use
# any space. We can't hide the frame's documentElement
# directly though.
for elem in main_frame.documentElement().findAll('iframe'):
if QUrl(elem.attribute('src')) == info.url:
elem.setAttribute('style', 'display: none')
return False
else:
self._ignore_load_started = True
self.error_occurred = True
log.webview.error("Error while loading {}: {}".format(
urlstr, error_str))
log.webview.debug("Error domain: {}, error code: {}".format(
info.domain, info.error))
title = "Error loading page: {}".format(urlstr)
error_html = jinja.render(
'error.html',
title=title, url=urlstr, error=error_str)
errpage.content = error_html.encode('utf-8')
errpage.encoding = 'utf-8'
return True
def _handle_multiple_files(self, info, files):
"""Handle uploading of multiple files.
Loosely based on Helpviewer/HelpBrowserWV.py from eric5.
Args:
info: The ChooseMultipleFilesExtensionOption instance.
files: The ChooseMultipleFilesExtensionReturn instance to write
return values to.
Return:
True on success, the superclass return value on failure.
"""
suggested_file = ""
if info.suggestedFileNames:
suggested_file = info.suggestedFileNames[0]
files.fileNames, _ = QFileDialog.getOpenFileNames(
None, None, suggested_file) # type: ignore[arg-type]
return True
def shutdown(self):
"""Prepare the web page for being deleted."""
self._is_shutting_down = True
self.shutting_down.emit()
download_manager = objreg.get('qtnetwork-download-manager')
nam = self.networkAccessManager()
if download_manager.has_downloads_with_nam(nam):
nam.setParent(download_manager)
else:
nam.shutdown()
def display_content(self, reply, mimetype):
"""Display a QNetworkReply with an explicitly set mimetype."""
self.mainFrame().setContent(reply.readAll(), mimetype, reply.url())
reply.deleteLater()
def on_print_requested(self, frame):
"""Handle printing when requested via javascript."""
printdiag = QPrintDialog()
printdiag.setAttribute(Qt.WA_DeleteOnClose)
printdiag.open(lambda: frame.print(printdiag.printer()))
def on_download_requested(self, request):
"""Called when the user wants to download a link.
We need to construct a copy of the QNetworkRequest here as the
download_manager needs it async and we'd get a segfault otherwise as
soon as the user has entered the filename, as Qt seems to delete it
after this slot returns.
"""
req = QNetworkRequest(request)
download_manager = objreg.get('qtnetwork-download-manager')
download_manager.get_request(req, qnam=self.networkAccessManager())
@pyqtSlot('QNetworkReply*')
def on_unsupported_content(self, reply):
"""Handle an unsupportedContent signal.
Most likely this will mean we need to download the reply, but we
correct for some common errors the server do.
At some point we might want to implement the MIME Sniffing standard
here: http://mimesniff.spec.whatwg.org/
"""
inline, suggested_filename = http.parse_content_disposition(reply)
download_manager = objreg.get('qtnetwork-download-manager')
if not inline:
# Content-Disposition: attachment -> force download
download_manager.fetch(reply,
suggested_filename=suggested_filename)
return
mimetype, _rest = http.parse_content_type(reply)
if mimetype == 'image/jpg':
# Some servers (e.g. the LinkedIn CDN) send a non-standard
# image/jpg (instead of image/jpeg, defined in RFC 1341 section
# 7.5). If this is the case, we force displaying with a corrected
# mimetype.
if reply.isFinished():
self.display_content(reply, 'image/jpeg')
else:
reply.finished.connect(functools.partial(
self.display_content, reply, 'image/jpeg'))
elif pdfjs.should_use_pdfjs(mimetype, reply.url()):
download_manager.fetch(reply,
target=downloads.PDFJSDownloadTarget(),
auto_remove=True)
else:
# Unknown mimetype, so download anyways.
download_manager.fetch(reply,
suggested_filename=suggested_filename)
@pyqtSlot()
def on_load_started(self):
"""Reset error_occurred when loading of a new page started."""
if self._ignore_load_started:
self._ignore_load_started = False
else:
self.error_occurred = False
def _inject_userjs(self, frame):
"""Inject user JavaScripts into the page.
Args:
frame: The QWebFrame to inject the user scripts into.
"""
if sip.isdeleted(frame):
log.greasemonkey.debug("_inject_userjs called for deleted frame!")
return
url = frame.url()
if url.isEmpty():
url = frame.requestedUrl()
log.greasemonkey.debug("_inject_userjs called for {} ({})"
.format(frame, url.toDisplayString()))
scripts = greasemonkey.gm_manager.scripts_for(url)
# QtWebKit has trouble providing us with signals representing
# page load progress at reasonable times, so we just load all
# scripts on the same event.
toload = scripts.start + scripts.end + scripts.idle
if url.isEmpty():
# This happens during normal usage like with view source but may
# also indicate a bug.
log.greasemonkey.debug("Not running scripts for frame with no "
"url: {}".format(frame))
assert not toload, toload
for script in toload:
if frame is self.mainFrame() or script.runs_on_sub_frames:
log.webview.debug('Running GM script: {}'.format(script.name))
frame.evaluateJavaScript(script.code())
@pyqtSlot('QWebFrame*', 'QWebPage::Feature')
def _on_feature_permission_requested(self, frame, feature):
"""Ask the user for approval for geolocation/notifications."""
if not isinstance(frame, QWebFrame): # pragma: no cover
# This makes no sense whatsoever, but someone reported this being
# called with a QBuffer...
log.misc.error("on_feature_permission_requested got called with "
"{!r}!".format(frame))
return
options = {
QWebPage.Notifications: 'content.notifications',
QWebPage.Geolocation: 'content.geolocation',
}
messages = {
QWebPage.Notifications: 'show notifications',
QWebPage.Geolocation: 'access your location',
}
yes_action = functools.partial(
self.setFeaturePermission, frame, feature,
QWebPage.PermissionGrantedByUser)
no_action = functools.partial(
self.setFeaturePermission, frame, feature,
QWebPage.PermissionDeniedByUser)
url = frame.url().adjusted(cast(QUrl.FormattingOptions,
QUrl.RemoveUserInfo |
QUrl.RemovePath |
QUrl.RemoveQuery |
QUrl.RemoveFragment))
question = shared.feature_permission(
url=url,
option=options[feature], msg=messages[feature],
yes_action=yes_action, no_action=no_action,
abort_on=[self.shutting_down, self.loadStarted])
if question is not None:
self.featurePermissionRequestCanceled.connect( # type: ignore[attr-defined]
functools.partial(self._on_feature_permission_cancelled,
question, frame, feature))
def _on_feature_permission_cancelled(self, question, frame, feature,
cancelled_frame, cancelled_feature):
"""Slot invoked when a feature permission request was cancelled.
To be used with functools.partial.
"""
if frame is cancelled_frame and feature == cancelled_feature:
try:
question.abort()
except RuntimeError:
# The question could already be deleted, e.g. because it was
# aborted after a loadStarted signal.
pass
def on_save_frame_state_requested(self, frame, item):
"""Save scroll position and zoom in history.
Args:
frame: The QWebFrame which gets saved.
item: The QWebHistoryItem to be saved.
"""
if frame != self.mainFrame():
return
data = {
'zoom': frame.zoomFactor(),
'scroll-pos': frame.scrollPosition(),
}
item.setUserData(data)
def on_restore_frame_state_requested(self, frame):
"""Restore scroll position and zoom from history.
Args:
frame: The QWebFrame which gets restored.
"""
if frame != self.mainFrame():
return
data = self.history().currentItem().userData()
if data is None:
return
if 'zoom' in data:
frame.page().view().tab.zoom.set_factor(data['zoom'])
if 'scroll-pos' in data and frame.scrollPosition() == QPoint(0, 0):
frame.setScrollPosition(data['scroll-pos'])
def userAgentForUrl(self, url):
"""Override QWebPage::userAgentForUrl to customize the user agent."""
if not url.isValid():
url = None
return websettings.user_agent(url)
def supportsExtension(self, ext):
"""Override QWebPage::supportsExtension to provide error pages.
Args:
ext: The extension to check for.
Return:
True if the extension can be handled, False otherwise.
"""
return ext in self._extension_handlers
def extension(self, ext, opt, out):
"""Override QWebPage::extension to provide error pages.
Args:
ext: The extension.
opt: Extension options instance.
out: Extension output instance.
Return:
Handler return value.
"""
try:
handler = self._extension_handlers[ext]
except KeyError:
log.webview.warning("Extension {} not supported!".format(ext))
return super().extension(ext, opt, out)
return handler(opt, out)
def javaScriptAlert(self, frame, js_msg):
"""Override javaScriptAlert to use qutebrowser prompts."""
if self._is_shutting_down:
return
try:
shared.javascript_alert(frame.url(), js_msg,
abort_on=[self.loadStarted,
self.shutting_down])
except shared.CallSuper:
super().javaScriptAlert(frame, js_msg)
def javaScriptConfirm(self, frame, js_msg):
"""Override javaScriptConfirm to use the statusbar."""
if self._is_shutting_down:
return False
try:
return shared.javascript_confirm(frame.url(), js_msg,
abort_on=[self.loadStarted,
self.shutting_down])
except shared.CallSuper:
return super().javaScriptConfirm(frame, js_msg)
def javaScriptConsoleMessage(self, msg, line, source):
"""Override javaScriptConsoleMessage to use debug log."""
shared.javascript_log_message(usertypes.JsLogLevel.unknown,
source, line, msg)
def acceptNavigationRequest(self,
frame: QWebFrame,
request: QNetworkRequest,
typ: QWebPage.NavigationType) -> bool:
"""Override acceptNavigationRequest to handle clicked links.
Setting linkDelegationPolicy to DelegateAllLinks and using a slot bound
to linkClicked won't work correctly, because when in a frameset, we
have no idea in which frame the link should be opened.
Checks if it should open it in a tab (middle-click or control) or not,
and then conditionally opens the URL here or in another tab/window.
"""
type_map = {
QWebPage.NavigationTypeLinkClicked:
usertypes.NavigationRequest.Type.link_clicked,
QWebPage.NavigationTypeFormSubmitted:
usertypes.NavigationRequest.Type.form_submitted,
QWebPage.NavigationTypeFormResubmitted:
usertypes.NavigationRequest.Type.form_resubmitted,
QWebPage.NavigationTypeBackOrForward:
usertypes.NavigationRequest.Type.back_forward,
QWebPage.NavigationTypeReload:
usertypes.NavigationRequest.Type.reloaded,
QWebPage.NavigationTypeOther:
usertypes.NavigationRequest.Type.other,
}
is_main_frame = frame is self.mainFrame()
navigation = usertypes.NavigationRequest(url=request.url(),
navigation_type=type_map[typ],
is_main_frame=is_main_frame)
if navigation.navigation_type == navigation.Type.reloaded:
self.reloading.emit(navigation.url)
self.navigation_request.emit(navigation)
return navigation.accepted
|
from datetime import datetime, timedelta
import io
import os
from babelfish import Language
import pytest
try:
from unittest.mock import Mock
except ImportError:
from mock import Mock
from vcr import VCR
from subliminal.core import (AsyncProviderPool, ProviderPool, check_video, download_best_subtitles, download_subtitles,
list_subtitles, refine, save_subtitles, scan_archive, scan_video, scan_videos,
search_external_subtitles)
from subliminal.extensions import provider_manager
from subliminal.providers.addic7ed import Addic7edSubtitle
from subliminal.providers.thesubdb import TheSubDBSubtitle
from subliminal.providers.tvsubtitles import TVsubtitlesSubtitle
from subliminal.score import episode_scores
from subliminal.subtitle import Subtitle
from subliminal.utils import timestamp
from subliminal.video import Movie
vcr = VCR(path_transformer=lambda path: path + '.yaml',
record_mode=os.environ.get('VCR_RECORD_MODE', 'once'),
match_on=['method', 'scheme', 'host', 'port', 'path', 'query', 'body'],
cassette_library_dir=os.path.realpath(os.path.join('tests', 'cassettes', 'core')))
@pytest.fixture
def mock_providers(monkeypatch):
for provider in provider_manager:
monkeypatch.setattr(provider.plugin, 'initialize', Mock())
monkeypatch.setattr(provider.plugin, 'list_subtitles', Mock(return_value=[provider.name]))
monkeypatch.setattr(provider.plugin, 'download_subtitle', Mock())
monkeypatch.setattr(provider.plugin, 'terminate', Mock())
def test_provider_pool_get_keyerror():
pool = ProviderPool()
with pytest.raises(KeyError):
pool['de7cidda']
def test_provider_pool_del_keyerror():
pool = ProviderPool()
with pytest.raises(KeyError):
del pool['addic7ed']
def test_provider_pool_iter(mock_providers):
pool = ProviderPool()
assert len(list(pool)) == 0
pool['tvsubtitles']
assert len(list(pool)) == 1
def test_provider_pool_list_subtitles_provider(episodes, mock_providers):
pool = ProviderPool()
subtitles = pool.list_subtitles_provider('tvsubtitles', episodes['bbt_s07e05'], {Language('eng')})
assert subtitles == ['tvsubtitles']
assert provider_manager['tvsubtitles'].plugin.initialize.called
assert provider_manager['tvsubtitles'].plugin.list_subtitles.called
def test_provider_pool_list_subtitles(episodes, mock_providers):
pool = ProviderPool()
subtitles = pool.list_subtitles(episodes['bbt_s07e05'], {Language('eng')})
assert sorted(subtitles) == ['addic7ed', 'legendastv', 'opensubtitles', 'podnapisi', 'shooter', 'thesubdb',
'tvsubtitles']
for provider in subtitles:
assert provider_manager[provider].plugin.initialize.called
assert provider_manager[provider].plugin.list_subtitles.called
def test_async_provider_pool_list_subtitles_provider(episodes, mock_providers):
pool = AsyncProviderPool()
subtitles = pool.list_subtitles_provider('tvsubtitles', episodes['bbt_s07e05'], {Language('eng')})
assert subtitles == ('tvsubtitles', ['tvsubtitles'])
assert provider_manager['tvsubtitles'].plugin.initialize.called
assert provider_manager['tvsubtitles'].plugin.list_subtitles.called
def test_async_provider_pool_list_subtitles(episodes, mock_providers):
pool = AsyncProviderPool()
subtitles = pool.list_subtitles(episodes['bbt_s07e05'], {Language('eng')})
assert sorted(subtitles) == ['addic7ed', 'legendastv', 'opensubtitles', 'podnapisi', 'shooter', 'thesubdb',
'tvsubtitles']
for provider in subtitles:
assert provider_manager[provider].plugin.initialize.called
assert provider_manager[provider].plugin.list_subtitles.called
def test_check_video_languages(movies):
video = movies['man_of_steel']
languages = {Language('fra'), Language('eng')}
assert check_video(video, languages=languages)
video.subtitle_languages = languages
assert not check_video(video, languages=languages)
def test_check_video_age(movies, monkeypatch):
video = movies['man_of_steel']
monkeypatch.setattr('subliminal.video.Video.age', timedelta(weeks=2))
assert check_video(video, age=timedelta(weeks=3))
assert not check_video(video, age=timedelta(weeks=1))
def test_check_video_undefined(movies):
video = movies['man_of_steel']
assert check_video(video, undefined=False)
assert check_video(video, undefined=True)
video.subtitle_languages = {Language('und')}
assert check_video(video, undefined=False)
assert not check_video(video, undefined=True)
def test_search_external_subtitles(episodes, tmpdir):
video_name = os.path.split(episodes['bbt_s07e05'].name)[1]
video_root = os.path.splitext(video_name)[0]
video_path = str(tmpdir.ensure(video_name))
expected_subtitles = {
video_name + '.srt': Language('und'),
video_root + '.srt': Language('und'),
video_root + '.en.srt': Language('eng'),
video_name + '.fra.srt': Language('fra'),
video_root + '.pt-BR.srt': Language('por', 'BR'),
video_name + '.sr_cyrl.sub': Language('srp', script='Cyrl'),
video_name + '.re.srt': Language('und'),
video_name + '.something.srt': Language('und')
}
tmpdir.ensure(os.path.split(episodes['got_s03e10'].name)[1] + '.srt')
for path in expected_subtitles:
tmpdir.ensure(path)
subtitles = search_external_subtitles(video_path)
assert subtitles == expected_subtitles
def test_search_external_subtitles_archive(movies, tmpdir):
video_name = os.path.split(movies['interstellar'].name)[1]
video_root = os.path.splitext(video_name)[0]
video_path = str(tmpdir.ensure(video_name))
expected_subtitles = {
video_name + '.srt': Language('und'),
video_root + '.srt': Language('und'),
video_root + '.en.srt': Language('eng'),
video_name + '.fra.srt': Language('fra'),
video_root + '.pt-BR.srt': Language('por', 'BR'),
video_name + '.sr_cyrl.sub': Language('srp', script='Cyrl'),
video_name + '.something.srt': Language('und')
}
tmpdir.ensure(os.path.split(movies['interstellar'].name)[1] + '.srt')
for path in expected_subtitles:
tmpdir.ensure(path)
subtitles = search_external_subtitles(video_path)
assert subtitles == expected_subtitles
def test_search_external_subtitles_no_directory(movies, tmpdir, monkeypatch):
video_name = os.path.split(movies['man_of_steel'].name)[1]
video_root = os.path.splitext(video_name)[0]
tmpdir.ensure(video_name)
monkeypatch.chdir(str(tmpdir))
expected_subtitles = {
video_name + '.srt': Language('und'),
video_root + '.en.srt': Language('eng')
}
for path in expected_subtitles:
tmpdir.ensure(path)
subtitles = search_external_subtitles(video_name)
assert subtitles == expected_subtitles
def test_search_external_subtitles_in_directory(episodes, tmpdir):
video_name = episodes['marvels_agents_of_shield_s02e06'].name
video_root = os.path.splitext(video_name)[0]
tmpdir.ensure('tvshows', video_name)
subtitles_directory = str(tmpdir.ensure('subtitles', dir=True))
expected_subtitles = {
video_name + '.srt': Language('und'),
video_root + '.en.srt': Language('eng')
}
tmpdir.ensure('tvshows', video_name + '.fr.srt')
for path in expected_subtitles:
tmpdir.ensure('subtitles', path)
subtitles = search_external_subtitles(video_name, directory=subtitles_directory)
assert subtitles == expected_subtitles
def test_scan_video_movie(movies, tmpdir, monkeypatch):
video = movies['man_of_steel']
monkeypatch.chdir(str(tmpdir))
tmpdir.ensure(video.name)
scanned_video = scan_video(video.name)
assert scanned_video.name == video.name
assert scanned_video.source == video.source
assert scanned_video.release_group == video.release_group
assert scanned_video.resolution == video.resolution
assert scanned_video.video_codec == video.video_codec
assert scanned_video.audio_codec is None
assert scanned_video.imdb_id is None
assert scanned_video.hashes == {}
assert scanned_video.size == 0
assert scanned_video.subtitle_languages == set()
assert scanned_video.title == video.title
assert scanned_video.year == video.year
def test_scan_video_episode(episodes, tmpdir, monkeypatch):
video = episodes['bbt_s07e05']
monkeypatch.chdir(str(tmpdir))
tmpdir.ensure(video.name)
scanned_video = scan_video(video.name)
assert scanned_video.name, video.name
assert scanned_video.source == video.source
assert scanned_video.release_group == video.release_group
assert scanned_video.resolution == video.resolution
assert scanned_video.video_codec == video.video_codec
assert scanned_video.audio_codec is None
assert scanned_video.imdb_id is None
assert scanned_video.hashes == {}
assert scanned_video.size == 0
assert scanned_video.subtitle_languages == set()
assert scanned_video.series == video.series
assert scanned_video.season == video.season
assert scanned_video.episode == video.episode
assert scanned_video.title is None
assert scanned_video.year is None
assert scanned_video.tvdb_id is None
def test_refine_video_metadata(mkv):
scanned_video = scan_video(mkv['test5'])
refine(scanned_video, episode_refiners=('metadata',), movie_refiners=('metadata',))
assert type(scanned_video) is Movie
assert scanned_video.name == mkv['test5']
assert scanned_video.source is None
assert scanned_video.release_group is None
assert scanned_video.resolution is None
assert scanned_video.video_codec == 'H.264'
assert scanned_video.audio_codec == 'AAC'
assert scanned_video.imdb_id is None
assert scanned_video.hashes == {
'opensubtitles': '49e2530ea3bd0d18',
'shooter': '36f3e2c50566ca01f939bf15d8031432;b6132ab62b8f7d4aaabe9d6344b90d90;'
'bea6074cef7f1de85794f3941530ba8b;18db05758d5d0d96f246249e4e4b5d79',
'thesubdb': '64a8b87f12daa4f31895616e6c3fd39e'}
assert scanned_video.size == 31762747
assert scanned_video.subtitle_languages == {Language('spa'), Language('deu'), Language('jpn'), Language('und'),
Language('ita'), Language('fra'), Language('hun')}
assert scanned_video.title == 'test5'
assert scanned_video.year is None
def test_scan_video_path_does_not_exist(movies):
with pytest.raises(ValueError) as excinfo:
scan_video(movies['man_of_steel'].name)
assert str(excinfo.value) == 'Path does not exist'
def test_scan_video_invalid_extension(movies, tmpdir, monkeypatch):
monkeypatch.chdir(str(tmpdir))
movie_name = os.path.splitext(movies['man_of_steel'].name)[0] + '.mp3'
tmpdir.ensure(movie_name)
with pytest.raises(ValueError) as excinfo:
scan_video(movie_name)
assert str(excinfo.value) == '\'.mp3\' is not a valid video extension'
def test_scan_video_broken(mkv, tmpdir, monkeypatch):
broken_path = 'test1.mkv'
with io.open(mkv['test1'], 'rb') as original:
with tmpdir.join(broken_path).open('wb') as broken:
broken.write(original.read(512))
monkeypatch.chdir(str(tmpdir))
scanned_video = scan_video(broken_path)
assert type(scanned_video) is Movie
assert scanned_video.name == str(broken_path)
assert scanned_video.source is None
assert scanned_video.release_group is None
assert scanned_video.resolution is None
assert scanned_video.video_codec is None
assert scanned_video.audio_codec is None
assert scanned_video.imdb_id is None
assert scanned_video.hashes == {}
assert scanned_video.size == 512
assert scanned_video.subtitle_languages == set()
assert scanned_video.title == 'test1'
assert scanned_video.year is None
def test_scan_archive_invalid_extension(movies, tmpdir, monkeypatch):
monkeypatch.chdir(str(tmpdir))
movie_name = os.path.splitext(movies['interstellar'].name)[0] + '.mp3'
tmpdir.ensure(movie_name)
with pytest.raises(ValueError) as excinfo:
scan_archive(movie_name)
assert str(excinfo.value) == '\'.mp3\' is not a valid archive'
def test_scan_videos_path_does_not_exist(movies):
with pytest.raises(ValueError) as excinfo:
scan_videos(movies['man_of_steel'].name)
assert str(excinfo.value) == 'Path does not exist'
def test_scan_videos_path_is_not_a_directory(movies, tmpdir, monkeypatch):
monkeypatch.chdir(str(tmpdir))
tmpdir.ensure(movies['man_of_steel'].name)
with pytest.raises(ValueError) as excinfo:
scan_videos(movies['man_of_steel'].name)
assert str(excinfo.value) == 'Path is not a directory'
def test_scan_videos(movies, tmpdir, monkeypatch):
man_of_steel = tmpdir.ensure('movies', movies['man_of_steel'].name)
tmpdir.ensure('movies', '.private', 'sextape.mkv')
tmpdir.ensure('movies', '.hidden_video.mkv')
tmpdir.ensure('movies', 'Sample', 'video.mkv')
tmpdir.ensure('movies', 'sample.mkv')
tmpdir.ensure('movies', movies['enders_game'].name)
tmpdir.ensure('movies', movies['interstellar'].name)
tmpdir.ensure('movies', os.path.splitext(movies['enders_game'].name)[0] + '.nfo')
tmpdir.ensure('movies', 'watched', dir=True)
watched_path = tmpdir.join('movies', 'watched', os.path.split(movies['man_of_steel'].name)[1])
if hasattr(watched_path, 'mksymlinkto'):
watched_path.mksymlinkto(man_of_steel)
# mock scan_video and scan_archive with the correct types
mock_video = Mock(subtitle_languages=set())
mock_scan_video = Mock(return_value=mock_video)
monkeypatch.setattr('subliminal.core.scan_video', mock_scan_video)
mock_scan_archive = Mock(return_value=mock_video)
monkeypatch.setattr('subliminal.core.scan_archive', mock_scan_archive)
monkeypatch.chdir(str(tmpdir))
videos = scan_videos('movies')
# general asserts
assert len(videos) == 3
assert mock_scan_video.call_count == 2
assert mock_scan_archive.call_count == 1
# scan_video calls
kwargs = dict()
scan_video_calls = [((os.path.join('movies', movies['man_of_steel'].name),), kwargs),
((os.path.join('movies', movies['enders_game'].name),), kwargs)]
mock_scan_video.assert_has_calls(scan_video_calls, any_order=True)
# scan_archive calls
kwargs = dict()
scan_archive_calls = [((os.path.join('movies', movies['interstellar'].name),), kwargs)]
mock_scan_archive.assert_has_calls(scan_archive_calls, any_order=True)
def test_scan_videos_age(movies, tmpdir, monkeypatch):
tmpdir.ensure('movies', movies['man_of_steel'].name)
tmpdir.ensure('movies', movies['enders_game'].name).setmtime(timestamp(datetime.utcnow() - timedelta(days=10)))
# mock scan_video and scan_archive with the correct types
mock_video = Mock(subtitle_languages=set())
mock_scan_video = Mock(return_value=mock_video)
monkeypatch.setattr('subliminal.core.scan_video', mock_scan_video)
mock_scan_archive = Mock(return_value=mock_video)
monkeypatch.setattr('subliminal.core.scan_archive', mock_scan_archive)
monkeypatch.chdir(str(tmpdir))
videos = scan_videos('movies', age=timedelta(days=7))
# general asserts
assert len(videos) == 1
assert mock_scan_video.call_count == 1
assert mock_scan_archive.call_count == 0
# scan_video calls
kwargs = dict()
scan_video_calls = [((os.path.join('movies', movies['man_of_steel'].name),), kwargs)]
mock_scan_video.assert_has_calls(scan_video_calls, any_order=True)
def test_list_subtitles_movie(movies, mock_providers):
video = movies['man_of_steel']
languages = {Language('eng')}
subtitles = list_subtitles({video}, languages)
# test providers
assert not provider_manager['addic7ed'].plugin.list_subtitles.called
assert provider_manager['legendastv'].plugin.list_subtitles.called
assert provider_manager['opensubtitles'].plugin.list_subtitles.called
assert provider_manager['podnapisi'].plugin.list_subtitles.called
assert provider_manager['shooter'].plugin.list_subtitles.called
assert provider_manager['thesubdb'].plugin.list_subtitles.called
assert not provider_manager['tvsubtitles'].plugin.list_subtitles.called
# test result
assert len(subtitles) == 1
assert sorted(subtitles[movies['man_of_steel']]) == ['legendastv', 'opensubtitles', 'podnapisi', 'shooter',
'thesubdb']
def test_list_subtitles_episode(episodes, mock_providers):
video = episodes['bbt_s07e05']
languages = {Language('eng'), Language('heb')}
subtitles = list_subtitles({video}, languages)
# test providers
assert provider_manager['addic7ed'].plugin.list_subtitles.called
assert provider_manager['legendastv'].plugin.list_subtitles.called
assert provider_manager['opensubtitles'].plugin.list_subtitles.called
assert provider_manager['podnapisi'].plugin.list_subtitles.called
assert provider_manager['shooter'].plugin.list_subtitles.called
assert provider_manager['thesubdb'].plugin.list_subtitles.called
assert provider_manager['tvsubtitles'].plugin.list_subtitles.called
# test result
assert len(subtitles) == 1
assert sorted(subtitles[episodes['bbt_s07e05']]) == ['addic7ed', 'legendastv', 'opensubtitles', 'podnapisi',
'shooter', 'thesubdb', 'tvsubtitles']
def test_list_subtitles_providers(episodes, mock_providers):
video = episodes['bbt_s07e05']
languages = {Language('eng')}
subtitles = list_subtitles({video}, languages, providers=['addic7ed'])
# test providers
assert provider_manager['addic7ed'].plugin.list_subtitles.called
assert not provider_manager['opensubtitles'].plugin.list_subtitles.called
assert not provider_manager['podnapisi'].plugin.list_subtitles.called
assert not provider_manager['thesubdb'].plugin.list_subtitles.called
assert not provider_manager['tvsubtitles'].plugin.list_subtitles.called
# test result
assert len(subtitles) == 1
assert sorted(subtitles[episodes['bbt_s07e05']]) == ['addic7ed']
def test_list_subtitles_episode_no_hash(episodes, mock_providers):
video = episodes['dallas_s01e03']
languages = {Language('eng'), Language('heb')}
subtitles = list_subtitles({video}, languages)
# test providers
assert provider_manager['addic7ed'].plugin.list_subtitles.called
assert provider_manager['legendastv'].plugin.list_subtitles.called
assert provider_manager['opensubtitles'].plugin.list_subtitles.called
assert provider_manager['podnapisi'].plugin.list_subtitles.called
assert not provider_manager['thesubdb'].plugin.list_subtitles.called
assert provider_manager['tvsubtitles'].plugin.list_subtitles.called
# test result
assert len(subtitles) == 1
assert sorted(subtitles[episodes['dallas_s01e03']]) == ['addic7ed', 'legendastv', 'opensubtitles', 'podnapisi',
'shooter', 'tvsubtitles']
def test_list_subtitles_no_language(episodes, mock_providers):
video = episodes['dallas_s01e03']
languages = {Language('eng')}
video.subtitle_languages = languages
subtitles = list_subtitles({video}, languages)
# test providers
assert not provider_manager['addic7ed'].plugin.list_subtitles.called
assert not provider_manager['opensubtitles'].plugin.list_subtitles.called
assert not provider_manager['podnapisi'].plugin.list_subtitles.called
assert not provider_manager['thesubdb'].plugin.list_subtitles.called
assert not provider_manager['tvsubtitles'].plugin.list_subtitles.called
# test result
assert len(subtitles) == 0
def test_download_subtitles(mock_providers):
subtitles = [
Addic7edSubtitle(Language('eng'), True, None, 'The Big Bang Theory', 7, 5, 'The Workplace Proximity', 2007,
'DIMENSION', None),
TheSubDBSubtitle(Language('eng'), 'ad32876133355929d814457537e12dc2'),
TVsubtitlesSubtitle(Language('por'), None, 261077, 'Game of Thrones', 3, 10, None, '1080p.BluRay', 'DEMAND')
]
download_subtitles(subtitles)
# test providers
assert provider_manager['addic7ed'].plugin.download_subtitle.called
assert not provider_manager['legendastv'].plugin.download_subtitle.called
assert not provider_manager['opensubtitles'].plugin.download_subtitle.called
assert not provider_manager['podnapisi'].plugin.download_subtitle.called
assert provider_manager['thesubdb'].plugin.download_subtitle.called
assert provider_manager['tvsubtitles'].plugin.download_subtitle.called
@pytest.mark.integration
@vcr.use_cassette
def test_download_best_subtitles(episodes):
video = episodes['bbt_s07e05']
languages = {Language('nld'), Language('por', 'BR')}
providers = ['addic7ed', 'thesubdb']
expected_subtitles = {('addic7ed', 'updated/17/80254/11'), ('thesubdb', '9dbbfb7ba81c9a6237237dae8589fccc-pt-BR')}
subtitles = download_best_subtitles({video}, languages, providers=providers)
assert len(subtitles) == 1
assert len(subtitles[video]) == 2
assert {(s.provider_name, s.id) for s in subtitles[video]} == expected_subtitles
@pytest.mark.integration
@vcr.use_cassette
def test_download_best_subtitles_min_score(episodes):
video = episodes['bbt_s07e05']
languages = {Language('fra')}
providers = ['addic7ed']
subtitles = download_best_subtitles({video}, languages, min_score=episode_scores['hash'], providers=providers)
assert len(subtitles) == 1
assert len(subtitles[video]) == 0
def test_download_best_subtitles_no_language(episodes):
video = episodes['bbt_s07e05']
languages = {Language('fra')}
video.subtitle_languages = languages
providers = ['addic7ed']
subtitles = download_best_subtitles({video}, languages, min_score=episode_scores['hash'], providers=providers)
assert len(subtitles) == 0
def test_download_best_subtitles_undefined(episodes):
video = episodes['bbt_s07e05']
languages = {Language('und')}
video.subtitle_languages = languages
providers = ['addic7ed']
subtitles = download_best_subtitles({video}, languages, min_score=episode_scores['hash'], only_one=True,
providers=providers)
assert len(subtitles) == 0
@pytest.mark.integration
@vcr.use_cassette('test_download_best_subtitles')
def test_download_best_subtitles_only_one(episodes):
video = episodes['bbt_s07e05']
languages = {Language('nld'), Language('por', 'BR')}
providers = ['addic7ed', 'thesubdb']
expected_subtitles = {('thesubdb', '9dbbfb7ba81c9a6237237dae8589fccc-pt-BR')}
subtitles = download_best_subtitles({video}, languages, only_one=True, providers=providers)
assert len(subtitles) == 1
assert len(subtitles[video]) == 1
assert {(s.provider_name, s.id) for s in subtitles[video]} == expected_subtitles
def test_save_subtitles(movies, tmpdir, monkeypatch):
monkeypatch.chdir(str(tmpdir))
tmpdir.ensure(movies['man_of_steel'].name)
subtitle_no_content = Subtitle(Language('eng'))
subtitle = Subtitle(Language('fra'))
subtitle.content = b'Some content'
subtitle_other = Subtitle(Language('fra'))
subtitle_other.content = b'Some other content'
subtitle_pt_br = Subtitle(Language('por', 'BR'))
subtitle_pt_br.content = b'Some brazilian content'
subtitles = [subtitle_no_content, subtitle, subtitle_other, subtitle_pt_br]
save_subtitles(movies['man_of_steel'], subtitles)
# subtitle without content is skipped
path = os.path.join(str(tmpdir), os.path.splitext(movies['man_of_steel'].name)[0] + '.en.srt')
assert not os.path.exists(path)
# first subtitle with language is saved
path = os.path.join(str(tmpdir), os.path.splitext(movies['man_of_steel'].name)[0] + '.fr.srt')
assert os.path.exists(path)
assert io.open(path, 'rb').read() == b'Some content'
# ietf language in path
path = os.path.join(str(tmpdir), os.path.splitext(movies['man_of_steel'].name)[0] + '.pt-BR.srt')
assert os.path.exists(path)
assert io.open(path, 'rb').read() == b'Some brazilian content'
def test_save_subtitles_single_directory_encoding(movies, tmpdir):
subtitle = Subtitle(Language('jpn'))
subtitle.content = u'ハローワールド'.encode('shift-jis')
subtitle_pt_br = Subtitle(Language('por', 'BR'))
subtitle_pt_br.content = b'Some brazilian content'
subtitles = [subtitle, subtitle_pt_br]
save_subtitles(movies['man_of_steel'], subtitles, single=True, directory=str(tmpdir), encoding='utf-8')
# first subtitle only and correctly encoded
path = os.path.join(str(tmpdir), os.path.splitext(os.path.split(movies['man_of_steel'].name)[1])[0] + '.srt')
assert os.path.exists(path)
assert io.open(path, encoding='utf-8').read() == u'ハローワールド'
@pytest.mark.integration
@vcr.use_cassette
def test_download_bad_subtitle(movies):
pool = ProviderPool()
subtitles = pool.list_subtitles_provider('legendastv', movies['man_of_steel'], {Language('eng')})
pool.download_subtitle(subtitles[0])
assert subtitles[0].content is None
assert subtitles[0].is_valid() is False
def test_scan_archive_with_one_video(rar, mkv):
rar_file = rar['video']
actual = scan_archive(rar_file)
assert actual.name == os.path.join(os.path.split(rar_file)[0], mkv['test1'])
def test_scan_archive_with_multiple_videos(rar, mkv):
rar_file = rar['videos']
actual = scan_archive(rar_file)
assert actual.name == os.path.join(os.path.split(rar_file)[0], mkv['test5'])
def test_scan_archive_with_no_video(rar):
with pytest.raises(ValueError) as excinfo:
scan_archive(rar['simple'])
assert excinfo.value.args == ('No video in archive', )
def test_scan_bad_archive(mkv):
with pytest.raises(ValueError) as excinfo:
scan_archive(mkv['test1'])
assert excinfo.value.args == ("'.mkv' is not a valid archive", )
def test_scan_password_protected_archive(rar):
with pytest.raises(ValueError) as excinfo:
scan_archive(rar['pwd-protected'])
assert excinfo.value.args == ('Rar requires a password', )
|
import numpy as np
import tensorflow as tf
import pytest
from tensornetwork.backends.tensorflow import tensorflow_backend
tf_randn_dtypes = [tf.float32, tf.float16, tf.float64]
tf_dtypes = tf_randn_dtypes + [tf.complex128, tf.complex64]
def test_tensordot():
backend = tensorflow_backend.TensorFlowBackend()
a = backend.convert_to_tensor(2 * np.ones((2, 3, 4)))
b = backend.convert_to_tensor(np.ones((2, 3, 4)))
actual = backend.tensordot(a, b, ((1, 2), (1, 2)))
expected = np.array([[24.0, 24.0], [24.0, 24.0]])
np.testing.assert_allclose(expected, actual)
def test_tensordot_int():
backend = tensorflow_backend.TensorFlowBackend()
a = backend.convert_to_tensor(2 * np.ones((3, 3, 3)))
b = backend.convert_to_tensor(np.ones((3, 3, 3)))
actual = backend.tensordot(a, b, 1)
expected = tf.tensordot(a, b, 1)
np.testing.assert_allclose(expected, actual)
def test_reshape():
backend = tensorflow_backend.TensorFlowBackend()
a = backend.convert_to_tensor(np.ones((2, 3, 4)))
actual = backend.shape_tuple(backend.reshape(a, np.array((6, 4, 1))))
assert actual == (6, 4, 1)
def test_transpose():
backend = tensorflow_backend.TensorFlowBackend()
a = backend.convert_to_tensor(
np.array([[[1., 2.], [3., 4.]], [[5., 6.], [7., 8.]]]))
actual = backend.transpose(a, [2, 0, 1])
expected = np.array([[[1.0, 3.0], [5.0, 7.0]], [[2.0, 4.0], [6.0, 8.0]]])
np.testing.assert_allclose(expected, actual)
def test_transpose_noperm():
backend = tensorflow_backend.TensorFlowBackend()
a = backend.convert_to_tensor(
np.array([[[1., 2.], [3., 4.]], [[5., 6.], [7., 8.]]]))
actual = backend.transpose(a) # [2, 1, 0]
actual = backend.transpose(actual, perm=[0, 2, 1])
expected = np.array([[[1.0, 3.0], [5.0, 7.0]], [[2.0, 4.0], [6.0, 8.0]]])
np.testing.assert_allclose(expected, actual)
def test_shape_concat():
backend = tensorflow_backend.TensorFlowBackend()
a = backend.convert_to_tensor(2 * np.ones((1, 3, 1)))
b = backend.convert_to_tensor(np.ones((1, 2, 1)))
expected = backend.shape_concat((a, b), axis=1)
actual = np.array([[[2.0], [2.0], [2.0], [1.0], [1.0]]])
np.testing.assert_allclose(expected, actual)
def test_slice():
backend = tensorflow_backend.TensorFlowBackend()
a = backend.convert_to_tensor(
np.array([[1., 2., 3.], [4., 5., 6.], [7., 8., 9.]]))
actual = backend.slice(a, (1, 1), (2, 2))
expected = np.array([[5., 6.], [8., 9.]])
np.testing.assert_allclose(expected, actual)
def test_shape_tensor():
backend = tensorflow_backend.TensorFlowBackend()
a = backend.convert_to_tensor(np.ones([2, 3, 4]))
assert isinstance(backend.shape_tensor(a), type(a))
actual = backend.shape_tensor(a)
expected = np.array([2, 3, 4])
np.testing.assert_allclose(expected, actual)
def test_shape_tuple():
backend = tensorflow_backend.TensorFlowBackend()
a = backend.convert_to_tensor(np.ones([2, 3, 4]))
actual = backend.shape_tuple(a)
assert actual == (2, 3, 4)
def test_shape_prod():
backend = tensorflow_backend.TensorFlowBackend()
a = backend.convert_to_tensor(2 * np.ones([1, 2, 3, 4]))
actual = np.array(backend.shape_prod(a))
assert actual == 2**24
def test_sqrt():
backend = tensorflow_backend.TensorFlowBackend()
a = backend.convert_to_tensor(np.array([4., 9.]))
actual = backend.sqrt(a)
expected = np.array([2, 3])
np.testing.assert_allclose(expected, actual)
def test_convert_to_tensor():
backend = tensorflow_backend.TensorFlowBackend()
array = np.ones((2, 3, 4))
actual = backend.convert_to_tensor(array)
expected = tf.ones((2, 3, 4))
assert isinstance(actual, type(expected))
np.testing.assert_allclose(expected, actual)
def test_outer_product():
backend = tensorflow_backend.TensorFlowBackend()
a = backend.convert_to_tensor(2 * np.ones((2, 1)))
b = backend.convert_to_tensor(np.ones((1, 2, 2)))
actual = backend.outer_product(a, b)
expected = np.array([[[[[2.0, 2.0], [2.0, 2.0]]]], [[[[2.0, 2.0], [2.0,
2.0]]]]])
np.testing.assert_allclose(expected, actual)
def test_einsum():
backend = tensorflow_backend.TensorFlowBackend()
a = backend.convert_to_tensor(2 * np.ones((2, 1)))
b = backend.convert_to_tensor(np.ones((1, 2, 2)))
actual = backend.einsum('ij,jil->l', a, b)
expected = np.array([4.0, 4.0])
np.testing.assert_allclose(expected, actual)
def test_norm():
backend = tensorflow_backend.TensorFlowBackend()
a = backend.convert_to_tensor(np.ones((2, 2)))
assert backend.norm(a).numpy() == 2
@pytest.mark.parametrize("dtype", tf_dtypes)
def test_eye(dtype):
backend = tensorflow_backend.TensorFlowBackend()
a = backend.eye(N=4, M=5, dtype=dtype)
np.testing.assert_allclose(tf.eye(num_rows=4, num_columns=5, dtype=dtype), a)
@pytest.mark.parametrize("dtype", tf_dtypes)
def test_ones(dtype):
backend = tensorflow_backend.TensorFlowBackend()
a = backend.ones((4, 4), dtype=dtype)
np.testing.assert_allclose(tf.ones((4, 4), dtype=dtype), a)
@pytest.mark.parametrize("dtype", tf_dtypes)
def test_zeros(dtype):
backend = tensorflow_backend.TensorFlowBackend()
a = backend.zeros((4, 4), dtype=dtype)
np.testing.assert_allclose(tf.zeros((4, 4), dtype=dtype), a)
@pytest.mark.parametrize("dtype", tf_randn_dtypes)
def test_randn(dtype):
backend = tensorflow_backend.TensorFlowBackend()
a = backend.randn((4, 4), dtype=dtype)
assert a.shape == (4, 4)
@pytest.mark.parametrize("dtype", tf_dtypes)
def test_random_uniform(dtype):
backend = tensorflow_backend.TensorFlowBackend()
a = backend.random_uniform((4, 4), dtype=dtype, seed=10)
assert a.shape == (4, 4)
@pytest.mark.parametrize("dtype", [tf.complex64, tf.complex128])
def test_randn_non_zero_imag(dtype):
backend = tensorflow_backend.TensorFlowBackend()
a = backend.randn((4, 4), dtype=dtype)
assert tf.math.greater(tf.linalg.norm(tf.math.imag(a)), 0.0)
@pytest.mark.parametrize("dtype", [tf.complex64, tf.complex128])
def test_random_uniform_non_zero_imag(dtype):
backend = tensorflow_backend.TensorFlowBackend()
a = backend.random_uniform((4, 4), dtype=dtype, seed=10)
assert tf.math.greater(tf.linalg.norm(tf.math.imag(a)), 0.0)
@pytest.mark.parametrize("dtype", tf_dtypes)
def test_eye_dtype(dtype):
backend = tensorflow_backend.TensorFlowBackend()
a = backend.eye(N=4, M=4, dtype=dtype)
assert a.dtype == dtype
@pytest.mark.parametrize("dtype", tf_dtypes)
def test_ones_dtype(dtype):
backend = tensorflow_backend.TensorFlowBackend()
a = backend.ones((4, 4), dtype=dtype)
assert a.dtype == dtype
@pytest.mark.parametrize("dtype", tf_dtypes)
def test_zeros_dtype(dtype):
backend = tensorflow_backend.TensorFlowBackend()
a = backend.zeros((4, 4), dtype=dtype)
assert a.dtype == dtype
@pytest.mark.parametrize("dtype", tf_randn_dtypes)
def test_randn_dtype(dtype):
backend = tensorflow_backend.TensorFlowBackend()
a = backend.randn((4, 4), dtype=dtype)
assert a.dtype == dtype
@pytest.mark.parametrize("dtype", tf_dtypes)
def test_random_uniform_dtype(dtype):
backend = tensorflow_backend.TensorFlowBackend()
a = backend.random_uniform((4, 4), dtype=dtype, seed=10)
assert a.dtype == dtype
@pytest.mark.parametrize("dtype", tf_randn_dtypes)
def test_randn_seed(dtype):
backend = tensorflow_backend.TensorFlowBackend()
a = backend.randn((4, 4), seed=10, dtype=dtype)
b = backend.randn((4, 4), seed=10, dtype=dtype)
np.testing.assert_allclose(a, b)
@pytest.mark.parametrize("dtype", tf_dtypes)
def test_random_uniform_seed(dtype):
test = tf.test.TestCase()
backend = tensorflow_backend.TensorFlowBackend()
a = backend.random_uniform((4, 4), seed=10, dtype=dtype)
b = backend.random_uniform((4, 4), seed=10, dtype=dtype)
test.assertAllCloseAccordingToType(a, b)
@pytest.mark.parametrize("dtype", tf_randn_dtypes)
def test_random_uniform_boundaries(dtype):
test = tf.test.TestCase()
lb = 1.2
ub = 4.8
backend = tensorflow_backend.TensorFlowBackend()
a = backend.random_uniform((4, 4), seed=10, dtype=dtype)
b = backend.random_uniform((4, 4), (lb, ub), seed=10, dtype=dtype)
test.assertAllInRange(a, 0, 1)
test.assertAllInRange(b, lb, ub)
def test_conj():
backend = tensorflow_backend.TensorFlowBackend()
real = np.random.rand(2, 2, 2)
imag = np.random.rand(2, 2, 2)
a = backend.convert_to_tensor(real + 1j * imag)
actual = backend.conj(a)
expected = real - 1j * imag
np.testing.assert_allclose(expected, actual)
@pytest.mark.parametrize("a, b, expected", [
pytest.param(1, 1, 2),
pytest.param(2. * np.ones(()), 1. * np.ones((1, 2, 3)), 3. * np.ones(
(1, 2, 3))),
])
def test_addition(a, b, expected):
backend = tensorflow_backend.TensorFlowBackend()
tensor1 = backend.convert_to_tensor(a)
tensor2 = backend.convert_to_tensor(b)
result = backend.addition(tensor1, tensor2)
np.testing.assert_allclose(result, expected)
assert tensor1.dtype == tensor2.dtype == result.dtype
@pytest.mark.parametrize("a, b, expected", [
pytest.param(1, 1, 0),
pytest.param(np.ones((1, 2, 3)), np.ones((1, 2, 3)), np.zeros((1, 2, 3))),
])
def test_subtraction(a, b, expected):
backend = tensorflow_backend.TensorFlowBackend()
tensor1 = backend.convert_to_tensor(a)
tensor2 = backend.convert_to_tensor(b)
result = backend.subtraction(tensor1, tensor2)
np.testing.assert_allclose(result, expected)
assert tensor1.dtype == tensor2.dtype == result.dtype
@pytest.mark.parametrize("a, b, expected", [
pytest.param(1, 1, 1),
pytest.param(np.ones((1, 2, 3)), np.ones((1, 2, 3)), np.ones((1, 2, 3))),
])
def test_multiply(a, b, expected):
backend = tensorflow_backend.TensorFlowBackend()
tensor1 = backend.convert_to_tensor(a)
tensor2 = backend.convert_to_tensor(b)
result = backend.multiply(tensor1, tensor2)
np.testing.assert_allclose(result, expected)
assert tensor1.dtype == tensor2.dtype == result.dtype
@pytest.mark.parametrize("a, b, expected", [
pytest.param(2., 2., 1.),
pytest.param(
np.ones(()), 2. * np.ones((1, 2, 3)), 0.5 * np.ones((1, 2, 3))),
])
def test_divide(a, b, expected):
backend = tensorflow_backend.TensorFlowBackend()
tensor1 = backend.convert_to_tensor(a)
tensor2 = backend.convert_to_tensor(b)
result = backend.divide(tensor1, tensor2)
np.testing.assert_allclose(result, expected)
assert tensor1.dtype == tensor2.dtype == result.dtype
@pytest.mark.parametrize("dtype", [tf.float64, tf.complex128])
def test_eigh(dtype):
backend = tensorflow_backend.TensorFlowBackend()
H = backend.randn((4, 4), dtype)
H = H + tf.math.conj(tf.transpose(H))
eta, U = backend.eigh(H)
eta_ac, U_ac = tf.linalg.eigh(H)
np.testing.assert_allclose(eta, eta_ac)
np.testing.assert_allclose(U, U_ac)
@pytest.mark.parametrize("dtype", tf_randn_dtypes)
def test_index_update(dtype):
backend = tensorflow_backend.TensorFlowBackend()
tensor = backend.randn((4, 2, 3), dtype=dtype, seed=10)
out = backend.index_update(tensor, tensor > 0.1, 0.0)
tensor_np = tensor.numpy()
tensor_np[tensor_np > 0.1] = 0.0
np.testing.assert_allclose(out, tensor_np)
@pytest.mark.parametrize("dtype", [tf.float64, tf.complex128])
def test_matrix_inv(dtype):
backend = tensorflow_backend.TensorFlowBackend()
matrix = backend.randn((4, 4), dtype=dtype, seed=10)
inverse = backend.inv(matrix)
m1 = tf.matmul(matrix, inverse)
m2 = tf.matmul(inverse, matrix)
np.testing.assert_almost_equal(m1, np.eye(4))
np.testing.assert_almost_equal(m2, np.eye(4))
@pytest.mark.parametrize("dtype", tf_dtypes)
def test_matrix_inv_raises(dtype):
backend = tensorflow_backend.TensorFlowBackend()
matrix = backend.randn((4, 4, 4), dtype=dtype, seed=10)
with pytest.raises(ValueError):
backend.inv(matrix)
def test_eigs_not_implemented():
backend = tensorflow_backend.TensorFlowBackend()
with pytest.raises(NotImplementedError):
backend.eigs(np.ones((2, 2)))
def test_gmres_not_implemented():
backend = tensorflow_backend.TensorFlowBackend()
with pytest.raises(NotImplementedError):
backend.gmres(lambda x: x, np.ones((2)))
def test_eigsh_lanczos_not_implemented():
backend = tensorflow_backend.TensorFlowBackend()
with pytest.raises(NotImplementedError):
backend.eigsh_lanczos(lambda x: x, [])
@pytest.mark.parametrize("dtype", [tf.float64, tf.complex128])
def test_broadcast_right_multiplication(dtype):
backend = tensorflow_backend.TensorFlowBackend()
tensor1 = backend.randn((2, 4, 3), dtype=dtype, seed=10)
tensor2 = backend.randn((3,), dtype=dtype, seed=10)
out = backend.broadcast_right_multiplication(tensor1, tensor2)
np.testing.assert_allclose(out, tensor1 * tensor2)
def test_broadcast_right_multiplication_raises():
dtype = tf.float64
backend = tensorflow_backend.TensorFlowBackend()
tensor1 = backend.randn((2, 4, 3), dtype=dtype, seed=10)
tensor2 = backend.randn((3, 3), dtype=dtype, seed=10)
with pytest.raises(ValueError):
backend.broadcast_right_multiplication(tensor1, tensor2)
@pytest.mark.parametrize("dtype", [tf.float64, tf.complex128])
def test_broadcast_left_multiplication(dtype):
backend = tensorflow_backend.TensorFlowBackend()
tensor1 = backend.randn((3,), dtype=dtype, seed=10)
tensor2 = backend.randn((3, 4, 2), dtype=dtype, seed=10)
out = backend.broadcast_left_multiplication(tensor1, tensor2)
np.testing.assert_allclose(out, np.reshape(tensor1, (3, 1, 1)) * tensor2)
def test_broadcast_left_multiplication_raises():
dtype = tf.float64
backend = tensorflow_backend.TensorFlowBackend()
tensor1 = backend.randn((3, 3), dtype=dtype, seed=10)
tensor2 = backend.randn((2, 4, 3), dtype=dtype, seed=10)
with pytest.raises(ValueError):
backend.broadcast_left_multiplication(tensor1, tensor2)
def test_sparse_shape():
dtype = tf.float64
backend = tensorflow_backend.TensorFlowBackend()
tensor = backend.randn((2, 3, 4), dtype=dtype, seed=10)
np.testing.assert_allclose(backend.sparse_shape(tensor), tensor.shape)
@pytest.mark.parametrize("dtype,method", [(tf.float64, "sin"),
(tf.complex128, "sin"),
(tf.float64, "cos"),
(tf.complex128, "cos"),
(tf.float64, "exp"),
(tf.complex128, "exp"),
(tf.float64, "log"),
(tf.complex128, "log")])
def test_elementwise_ops(dtype, method):
backend = tensorflow_backend.TensorFlowBackend()
tensor = backend.randn((4, 2, 1), dtype=dtype, seed=10)
if method == "log":
tensor = tf.math.abs(tensor)
tensor1 = getattr(backend, method)(tensor)
tensor2 = getattr(tf.math, method)(tensor)
print(tensor1, tensor2)
np.testing.assert_almost_equal(tensor1.numpy(), tensor2.numpy())
@pytest.mark.parametrize("dtype,method", [(tf.float64, "expm"),
(tf.complex128, "expm")])
def test_matrix_ops(dtype, method):
backend = tensorflow_backend.TensorFlowBackend()
matrix = backend.randn((4, 4), dtype=dtype, seed=10)
matrix1 = getattr(backend, method)(matrix)
matrix2 = getattr(tf.linalg, method)(matrix)
np.testing.assert_almost_equal(matrix1.numpy(), matrix2.numpy())
@pytest.mark.parametrize("dtype,method", [(tf.float64, "expm"),
(tf.complex128, "expm")])
def test_matrix_ops_raises(dtype, method):
backend = tensorflow_backend.TensorFlowBackend()
matrix = backend.randn((4, 4, 4), dtype=dtype, seed=10)
with pytest.raises(ValueError, match=r".*Only matrices.*"):
getattr(backend, method)(matrix)
matrix = backend.randn((4, 3), dtype=dtype, seed=10)
with pytest.raises(ValueError, match=r".*N\*N matrix.*"):
getattr(backend, method)(matrix)
def test_jit():
backend = tensorflow_backend.TensorFlowBackend()
def fun(x, A, y):
return tf.tensordot(x, tf.tensordot(A, y, ([1], [0])), ([0], [0]))
fun_jit = backend.jit(fun)
x = tf.convert_to_tensor(np.random.rand(4))
y = tf.convert_to_tensor(np.random.rand(4))
A = tf.convert_to_tensor(np.random.rand(4, 4))
res1 = fun(x, A, y)
res2 = fun_jit(x, A, y)
np.testing.assert_allclose(res1, res2)
def test_jit_args():
backend = tensorflow_backend.TensorFlowBackend()
def fun(x, A, y):
return tf.tensordot(x, tf.tensordot(A, y, ([1], [0])), ([0], [0]))
fun_jit = backend.jit(fun)
x = tf.convert_to_tensor(np.random.rand(4))
y = tf.convert_to_tensor(np.random.rand(4))
A = tf.convert_to_tensor(np.random.rand(4, 4))
res1 = fun(x, A, y)
res2 = fun_jit(x, A, y)
res3 = fun_jit(x, y=y, A=A)
np.testing.assert_allclose(res1, res2)
np.testing.assert_allclose(res1, res3)
def test_sum():
np.random.seed(10)
backend = tensorflow_backend.TensorFlowBackend()
tensor = np.random.rand(2, 3, 4)
a = backend.convert_to_tensor(tensor)
actual = backend.sum(a, axis=(1, 2))
expected = np.sum(tensor, axis=(1, 2))
np.testing.assert_allclose(expected, actual)
actual = backend.sum(a, axis=(1, 2), keepdims=True)
expected = np.sum(a, axis=(1, 2), keepdims=True)
np.testing.assert_allclose(expected, actual)
def test_matmul():
np.random.seed(10)
backend = tensorflow_backend.TensorFlowBackend()
t1 = np.random.rand(10, 2, 3)
t2 = np.random.rand(10, 3, 4)
a = backend.convert_to_tensor(t1)
b = backend.convert_to_tensor(t2)
actual = backend.matmul(a, b)
expected = np.matmul(t1, t2)
np.testing.assert_allclose(expected, actual)
@pytest.mark.parametrize("dtype", tf_dtypes)
@pytest.mark.parametrize("offset", range(-2, 2))
@pytest.mark.parametrize("axis1", [-2, 0])
@pytest.mark.parametrize("axis2", [-1, 0])
def test_diagonal(dtype, offset, axis1, axis2):
shape = (5, 5, 5, 5)
backend = tensorflow_backend.TensorFlowBackend()
array = backend.randn(shape, dtype=dtype, seed=10)
if axis1 != -2 or axis2 != -1:
with pytest.raises(NotImplementedError):
actual = backend.diagonal(array, offset=offset, axis1=axis1, axis2=axis2)
else:
actual = backend.diagonal(array, offset=offset, axis1=axis1, axis2=axis2)
expected = np.diagonal(array, offset=offset, axis1=axis1, axis2=axis2)
np.testing.assert_allclose(actual, expected)
@pytest.mark.parametrize("dtype", tf_dtypes)
@pytest.mark.parametrize("k", range(-2, 2))
def test_diagflat(dtype, k):
backend = tensorflow_backend.TensorFlowBackend()
array = backend.randn((16,), dtype=dtype, seed=10)
actual = backend.diagflat(array, k=k)
# pylint: disable=unexpected-keyword-arg
expected = tf.linalg.diag(array, k=k)
np.testing.assert_allclose(expected, actual)
@pytest.mark.parametrize("dtype", tf_dtypes)
def test_abs(dtype):
shape = (4, 3, 2)
backend = tensorflow_backend.TensorFlowBackend()
tensor = backend.randn(shape, dtype=dtype, seed=10)
actual = backend.abs(tensor)
expected = tf.math.abs(tensor)
np.testing.assert_allclose(expected, actual)
@pytest.mark.parametrize("dtype", tf_dtypes)
def test_sign(dtype):
shape = (4, 3, 2)
backend = tensorflow_backend.TensorFlowBackend()
tensor = backend.randn(shape, dtype=dtype, seed=10)
actual = backend.sign(tensor)
expected = tf.math.sign(tensor)
np.testing.assert_allclose(expected, actual)
@pytest.mark.parametrize("dtype", tf_dtypes)
@pytest.mark.parametrize("offset", [0, 1])
@pytest.mark.parametrize("axis1", [-2, 0])
@pytest.mark.parametrize("axis2", [-1, 0])
def test_trace(dtype, offset, axis1, axis2):
shape = (5, 5, 5, 5)
backend = tensorflow_backend.TensorFlowBackend()
tf_array = backend.randn(shape, dtype=dtype, seed=10)
array = tf_array.numpy()
if offset != 0:
with pytest.raises(NotImplementedError):
actual = backend.trace(tf_array, offset=offset, axis1=axis1, axis2=axis2)
elif axis1 == axis2:
with pytest.raises(ValueError):
actual = backend.trace(tf_array, offset=offset, axis1=axis1, axis2=axis2)
else:
actual = backend.trace(tf_array, offset=offset, axis1=axis1, axis2=axis2)
expected = np.trace(array, axis1=axis1, axis2=axis2)
tol = array.size * np.finfo(array.dtype).eps
np.testing.assert_allclose(actual, expected, rtol=tol, atol=tol)
@pytest.mark.parametrize("pivot_axis", [-1, 1, 2])
@pytest.mark.parametrize("dtype", tf_dtypes)
def test_pivot(dtype, pivot_axis):
shape = (4, 3, 2, 8)
pivot_shape = (np.prod(shape[:pivot_axis]), np.prod(shape[pivot_axis:]))
backend = tensorflow_backend.TensorFlowBackend()
tensor = backend.randn(shape, dtype=dtype, seed=10)
expected = tf.reshape(tensor, pivot_shape)
actual = backend.pivot(tensor, pivot_axis=pivot_axis)
np.testing.assert_allclose(expected, actual)
@pytest.mark.parametrize("dtype", tf_dtypes)
def test_item(dtype):
backend = tensorflow_backend.TensorFlowBackend()
tensor = backend.ones(1, dtype=dtype) * 5.0
assert backend.item(tensor) == 5.0
backend = tensorflow_backend.TensorFlowBackend()
tensor = backend.ones((2, 1), dtype=dtype)
with pytest.raises(ValueError, match="expected"):
backend.item(tensor)
@pytest.mark.parametrize("dtype", tf_dtypes)
def test_power(dtype):
shape = (4, 3, 2)
backend = tensorflow_backend.TensorFlowBackend()
base_tensor = backend.randn(shape, dtype=dtype, seed=10)
power_tensor = backend.randn(shape, dtype=dtype, seed=10)
actual = backend.power(base_tensor, power_tensor)
expected = tf.math.pow(base_tensor, power_tensor)
np.testing.assert_allclose(expected, actual)
power = np.random.rand(1)[0]
actual = backend.power(base_tensor, power)
expected = tf.math.pow(base_tensor, power)
np.testing.assert_allclose(expected, actual)
|
from copy import deepcopy
from datetime import timedelta
import logging
import statistics
from requests.exceptions import ConnectTimeout, HTTPError
from solaredge_local import SolarEdge
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_IP_ADDRESS,
CONF_NAME,
ELECTRICAL_CURRENT_AMPERE,
ENERGY_WATT_HOUR,
FREQUENCY_HERTZ,
POWER_WATT,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
VOLT,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
DOMAIN = "solaredge_local"
UPDATE_DELAY = timedelta(seconds=10)
INVERTER_MODES = (
"SHUTTING_DOWN",
"ERROR",
"STANDBY",
"PAIRING",
"POWER_PRODUCTION",
"AC_CHARGING",
"NOT_PAIRED",
"NIGHT_MODE",
"GRID_MONITORING",
"IDLE",
)
# Supported sensor types:
# Key: ['json_key', 'name', unit, icon, attribute name]
SENSOR_TYPES = {
"current_AC_voltage": ["gridvoltage", "Grid Voltage", VOLT, "mdi:current-ac", None],
"current_DC_voltage": ["dcvoltage", "DC Voltage", VOLT, "mdi:current-dc", None],
"current_frequency": [
"gridfrequency",
"Grid Frequency",
FREQUENCY_HERTZ,
"mdi:current-ac",
None,
],
"current_power": [
"currentPower",
"Current Power",
POWER_WATT,
"mdi:solar-power",
None,
],
"energy_this_month": [
"energyThisMonth",
"Energy This Month",
ENERGY_WATT_HOUR,
"mdi:solar-power",
None,
],
"energy_this_year": [
"energyThisYear",
"Energy This Year",
ENERGY_WATT_HOUR,
"mdi:solar-power",
None,
],
"energy_today": [
"energyToday",
"Energy Today",
ENERGY_WATT_HOUR,
"mdi:solar-power",
None,
],
"inverter_temperature": [
"invertertemperature",
"Inverter Temperature",
TEMP_CELSIUS,
"mdi:thermometer",
"operating_mode",
],
"lifetime_energy": [
"energyTotal",
"Lifetime Energy",
ENERGY_WATT_HOUR,
"mdi:solar-power",
None,
],
"optimizer_connected": [
"optimizers",
"Optimizers Online",
"optimizers",
"mdi:solar-panel",
"optimizers_connected",
],
"optimizer_current": [
"optimizercurrent",
"Average Optimizer Current",
ELECTRICAL_CURRENT_AMPERE,
"mdi:solar-panel",
None,
],
"optimizer_power": [
"optimizerpower",
"Average Optimizer Power",
POWER_WATT,
"mdi:solar-panel",
None,
],
"optimizer_temperature": [
"optimizertemperature",
"Average Optimizer Temperature",
TEMP_CELSIUS,
"mdi:solar-panel",
None,
],
"optimizer_voltage": [
"optimizervoltage",
"Average Optimizer Voltage",
VOLT,
"mdi:solar-panel",
None,
],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_IP_ADDRESS): cv.string,
vol.Optional(CONF_NAME, default="SolarEdge"): cv.string,
}
)
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Create the SolarEdge Monitoring API sensor."""
ip_address = config[CONF_IP_ADDRESS]
platform_name = config[CONF_NAME]
# Create new SolarEdge object to retrieve data.
api = SolarEdge(f"http://{ip_address}/")
# Check if api can be reached and site is active.
try:
status = api.get_status()
_LOGGER.debug("Credentials correct and site is active")
except AttributeError:
_LOGGER.error("Missing details data in solaredge status")
_LOGGER.debug("Status is: %s", status)
return
except (ConnectTimeout, HTTPError):
_LOGGER.error("Could not retrieve details from SolarEdge API")
return
# Changing inverter temperature unit.
sensors = deepcopy(SENSOR_TYPES)
if status.inverters.primary.temperature.units.farenheit:
sensors["inverter_temperature"] = [
"invertertemperature",
"Inverter Temperature",
TEMP_FAHRENHEIT,
"mdi:thermometer",
"operating_mode",
None,
]
try:
if status.metersList[0]:
sensors["import_current_power"] = [
"currentPowerimport",
"current import Power",
POWER_WATT,
"mdi:arrow-collapse-down",
None,
]
sensors["import_meter_reading"] = [
"totalEnergyimport",
"total import Energy",
ENERGY_WATT_HOUR,
"mdi:counter",
None,
]
except IndexError:
_LOGGER.debug("Import meter sensors are not created")
try:
if status.metersList[1]:
sensors["export_current_power"] = [
"currentPowerexport",
"current export Power",
POWER_WATT,
"mdi:arrow-expand-up",
None,
]
sensors["export_meter_reading"] = [
"totalEnergyexport",
"total export Energy",
ENERGY_WATT_HOUR,
"mdi:counter",
None,
]
except IndexError:
_LOGGER.debug("Export meter sensors are not created")
# Create solaredge data service which will retrieve and update the data.
data = SolarEdgeData(hass, api)
# Create a new sensor for each sensor type.
entities = []
for sensor_info in sensors.values():
sensor = SolarEdgeSensor(
platform_name,
data,
sensor_info[0],
sensor_info[1],
sensor_info[2],
sensor_info[3],
sensor_info[4],
)
entities.append(sensor)
add_entities(entities, True)
class SolarEdgeSensor(Entity):
"""Representation of an SolarEdge Monitoring API sensor."""
def __init__(self, platform_name, data, json_key, name, unit, icon, attr):
"""Initialize the sensor."""
self._platform_name = platform_name
self._data = data
self._state = None
self._json_key = json_key
self._name = name
self._unit_of_measurement = unit
self._icon = icon
self._attr = attr
@property
def name(self):
"""Return the name."""
return f"{self._platform_name} ({self._name})"
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self._unit_of_measurement
@property
def device_state_attributes(self):
"""Return the state attributes."""
if self._attr:
try:
return {self._attr: self._data.info[self._json_key]}
except KeyError:
return None
return None
@property
def icon(self):
"""Return the sensor icon."""
return self._icon
@property
def state(self):
"""Return the state of the sensor."""
return self._state
def update(self):
"""Get the latest data from the sensor and update the state."""
self._data.update()
self._state = self._data.data[self._json_key]
class SolarEdgeData:
"""Get and update the latest data."""
def __init__(self, hass, api):
"""Initialize the data object."""
self.hass = hass
self.api = api
self.data = {}
self.info = {}
@Throttle(UPDATE_DELAY)
def update(self):
"""Update the data from the SolarEdge Monitoring API."""
try:
status = self.api.get_status()
_LOGGER.debug("Status from SolarEdge: %s", status)
except ConnectTimeout:
_LOGGER.error("Connection timeout, skipping update")
return
except HTTPError:
_LOGGER.error("Could not retrieve status, skipping update")
return
try:
maintenance = self.api.get_maintenance()
_LOGGER.debug("Maintenance from SolarEdge: %s", maintenance)
except ConnectTimeout:
_LOGGER.error("Connection timeout, skipping update")
return
except HTTPError:
_LOGGER.error("Could not retrieve maintenance, skipping update")
return
temperature = []
voltage = []
current = []
power = 0
for optimizer in maintenance.diagnostics.inverters.primary.optimizer:
if not optimizer.online:
continue
temperature.append(optimizer.temperature.value)
voltage.append(optimizer.inputV)
current.append(optimizer.inputC)
if not voltage:
temperature.append(0)
voltage.append(0)
current.append(0)
else:
power = statistics.mean(voltage) * statistics.mean(current)
if status.sn:
self.data["energyTotal"] = round(status.energy.total, 2)
self.data["energyThisYear"] = round(status.energy.thisYear, 2)
self.data["energyThisMonth"] = round(status.energy.thisMonth, 2)
self.data["energyToday"] = round(status.energy.today, 2)
self.data["currentPower"] = round(status.powerWatt, 2)
self.data["invertertemperature"] = round(
status.inverters.primary.temperature.value, 2
)
self.data["dcvoltage"] = round(status.inverters.primary.voltage, 2)
self.data["gridfrequency"] = round(status.frequencyHz, 2)
self.data["gridvoltage"] = round(status.voltage, 2)
self.data["optimizers"] = status.optimizersStatus.online
self.info["optimizers"] = status.optimizersStatus.total
self.info["invertertemperature"] = INVERTER_MODES[status.status]
try:
if status.metersList[1]:
self.data["currentPowerimport"] = status.metersList[1].currentPower
self.data["totalEnergyimport"] = status.metersList[1].totalEnergy
except IndexError:
pass
try:
if status.metersList[0]:
self.data["currentPowerexport"] = status.metersList[0].currentPower
self.data["totalEnergyexport"] = status.metersList[0].totalEnergy
except IndexError:
pass
if maintenance.system.name:
self.data["optimizertemperature"] = round(statistics.mean(temperature), 2)
self.data["optimizervoltage"] = round(statistics.mean(voltage), 2)
self.data["optimizercurrent"] = round(statistics.mean(current), 2)
self.data["optimizerpower"] = round(power, 2)
|
from os import path as p
from subprocess import call, check_call, CalledProcessError
from manager import Manager
manager = Manager()
BASEDIR = p.dirname(__file__)
DEF_WHERE = ["riko", "tests", "examples", "setup.py", "manage.py"]
def _upload():
"""Upload distribution files"""
dist = p.join(BASEDIR, 'dist', '*')
arg = '--repository-url'
url = 'https://upload.pypi.org/legacy/'
check_call(['twine', 'upload', arg, url, dist])
def _sdist():
"""Create a source distribution package"""
check_call(p.join(BASEDIR, 'helpers', 'srcdist'))
def _wheel():
"""Create a wheel package"""
check_call(p.join(BASEDIR, 'helpers', 'wheel'))
def _clean():
"""Remove Python file and build artifacts"""
check_call(p.join(BASEDIR, 'helpers', 'clean'))
@manager.command
def check():
"""Check staged changes for lint errors"""
exit(call(p.join(BASEDIR, 'helpers', 'check-stage')))
@manager.arg('where', 'w', help='Modules to check', default='meza')
@manager.arg('strict', 's', help='Check with pylint')
@manager.arg('compatibility', 'c', help='Check with pylint porting checker')
@manager.command
def lint(where=None, strict=False, compatibility=False):
"""Check style with linters"""
_where = where or ' '.join(DEF_WHERE)
command = f"pylint --rcfile=tests/standard.rc -rn -fparseable {_where}"
try:
check_call(['flake8'] + _where.split(' '))
if strict:
check_call(command, shell=True)
if compatibility:
check_call(f"{command} --py3k", shell=True)
except CalledProcessError as e:
exit(e.returncode)
@manager.command
def pipme():
"""Install requirements.txt"""
exit(call('pip', 'install', '-r', 'requirements.txt'))
@manager.arg('where', 'w', help='requirements file', default=None)
@manager.command
def require(where=None):
"""Create requirements.txt"""
prefix = '%s-' % where if where else ''
cmd = 'pip freeze -l | grep -xFf %srequirements.txt' % prefix
exit(check_call(cmd, shell=True))
@manager.arg('source', 's', help='the tests to run', default=None)
@manager.arg('where', 'w', help='test path', default=None)
@manager.arg(
'stop', 'x', help='Stop after first error', type=bool, default=False)
@manager.arg(
'failed', 'f', help='Run failed tests', type=bool, default=False)
@manager.arg(
'cover', 'c', help='Add coverage report', type=bool, default=False)
@manager.arg('tox', 't', help='Run tox tests', type=bool, default=False)
@manager.arg('detox', 'd', help='Run detox tests', type=bool, default=False)
@manager.arg(
'verbose', 'v', help='Use detailed errors', type=bool, default=False)
@manager.arg(
'parallel', 'p', help='Run tests in parallel in multiple processes',
type=bool, default=False)
@manager.arg(
'debug', 'D', help='Use nose.loader debugger', type=bool, default=False)
@manager.command
def test(source=None, where=None, stop=False, **kwargs):
"""Run nose, tox, and script tests"""
opts = '-xv' if stop else '-v'
opts += ' --with-coverage' if kwargs.get('cover') else ''
opts += ' --failed' if kwargs.get('failed') else ' --with-id'
opts += ' --processes=-1' if kwargs.get('parallel') else ''
opts += ' --detailed-errors' if kwargs.get('verbose') else ''
opts += ' --debug=nose.loader' if kwargs.get('debug') else ''
opts += ' -w {}'.format(where) if where else ''
opts += ' {}'.format(source) if source else ''
try:
if kwargs.get('tox'):
check_call('tox')
elif kwargs.get('detox'):
check_call('detox')
else:
check_call(('nosetests {}'.format(opts)).split(' '))
if not source:
check_call(['python', p.join(BASEDIR, 'tests', 'test.py')])
except CalledProcessError as e:
exit(e.returncode)
@manager.command
def release():
"""Package and upload a release"""
try:
_clean()
_sdist()
_wheel()
_upload()
except CalledProcessError as e:
exit(e.returncode)
@manager.command
def build():
"""Create a source distribution and wheel package"""
try:
_clean()
_sdist()
_wheel()
except CalledProcessError as e:
exit(e.returncode)
@manager.command
def upload():
"""Upload distribution files"""
try:
_upload()
except CalledProcessError as e:
exit(e.returncode)
@manager.command
def sdist():
"""Create a source distribution package"""
try:
_sdist()
except CalledProcessError as e:
exit(e.returncode)
@manager.command
def wheel():
"""Create a wheel package"""
try:
_wheel()
except CalledProcessError as e:
exit(e.returncode)
@manager.command
def clean():
"""Remove Python file and build artifacts"""
try:
_clean()
except CalledProcessError as e:
exit(e.returncode)
if __name__ == '__main__':
manager.main()
|
import logging
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.const import PERCENTAGE
from homeassistant.core import callback
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.icon import icon_for_battery_level
from homeassistant.util import slugify
from . import (
DOMAIN as MYCHEVY_DOMAIN,
ERROR_TOPIC,
MYCHEVY_ERROR,
MYCHEVY_SUCCESS,
UPDATE_TOPIC,
EVSensorConfig,
)
_LOGGER = logging.getLogger(__name__)
BATTERY_SENSOR = "batteryLevel"
SENSORS = [
EVSensorConfig("Mileage", "totalMiles", "miles", "mdi:speedometer"),
EVSensorConfig("Electric Range", "electricRange", "miles", "mdi:speedometer"),
EVSensorConfig("Charged By", "estimatedFullChargeBy"),
EVSensorConfig("Charge Mode", "chargeMode"),
EVSensorConfig(
"Battery Level", BATTERY_SENSOR, PERCENTAGE, "mdi:battery", ["charging"]
),
]
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the MyChevy sensors."""
if discovery_info is None:
return
hub = hass.data[MYCHEVY_DOMAIN]
sensors = [MyChevyStatus()]
for sconfig in SENSORS:
for car in hub.cars:
sensors.append(EVSensor(hub, sconfig, car.vid))
add_entities(sensors)
class MyChevyStatus(Entity):
"""A string representing the charge mode."""
_name = "MyChevy Status"
_icon = "mdi:car-connected"
def __init__(self):
"""Initialize sensor with car connection."""
self._state = None
async def async_added_to_hass(self):
"""Register callbacks."""
self.async_on_remove(
self.hass.helpers.dispatcher.async_dispatcher_connect(
UPDATE_TOPIC, self.success
)
)
self.async_on_remove(
self.hass.helpers.dispatcher.async_dispatcher_connect(
ERROR_TOPIC, self.error
)
)
@callback
def success(self):
"""Update state, trigger updates."""
if self._state != MYCHEVY_SUCCESS:
_LOGGER.debug("Successfully connected to mychevy website")
self._state = MYCHEVY_SUCCESS
self.async_write_ha_state()
@callback
def error(self):
"""Update state, trigger updates."""
_LOGGER.error(
"Connection to mychevy website failed. "
"This probably means the mychevy to OnStar link is down"
)
self._state = MYCHEVY_ERROR
self.async_write_ha_state()
@property
def icon(self):
"""Return the icon."""
return self._icon
@property
def name(self):
"""Return the name."""
return self._name
@property
def state(self):
"""Return the state."""
return self._state
@property
def should_poll(self):
"""Return the polling state."""
return False
class EVSensor(Entity):
"""Base EVSensor class.
The only real difference between sensors is which units and what
attribute from the car object they are returning. All logic can be
built with just setting subclass attributes.
"""
def __init__(self, connection, config, car_vid):
"""Initialize sensor with car connection."""
self._conn = connection
self._name = config.name
self._attr = config.attr
self._extra_attrs = config.extra_attrs
self._unit_of_measurement = config.unit_of_measurement
self._icon = config.icon
self._state = None
self._state_attributes = {}
self._car_vid = car_vid
self.entity_id = f"{SENSOR_DOMAIN}.{MYCHEVY_DOMAIN}_{slugify(self._car.name)}_{slugify(self._name)}"
async def async_added_to_hass(self):
"""Register callbacks."""
self.hass.helpers.dispatcher.async_dispatcher_connect(
UPDATE_TOPIC, self.async_update_callback
)
@property
def _car(self):
"""Return the car."""
return self._conn.get_car(self._car_vid)
@property
def icon(self):
"""Return the icon."""
if self._attr == BATTERY_SENSOR:
charging = self._state_attributes.get("charging", False)
return icon_for_battery_level(self.state, charging)
return self._icon
@property
def name(self):
"""Return the name."""
return self._name
@callback
def async_update_callback(self):
"""Update state."""
if self._car is not None:
self._state = getattr(self._car, self._attr, None)
for attr in self._extra_attrs:
self._state_attributes[attr] = getattr(self._car, attr)
self.async_write_ha_state()
@property
def state(self):
"""Return the state."""
return self._state
@property
def device_state_attributes(self):
"""Return all the state attributes."""
return self._state_attributes
@property
def unit_of_measurement(self):
"""Return the unit of measurement the state is expressed in."""
return self._unit_of_measurement
@property
def should_poll(self):
"""Return the polling state."""
return False
|
import django.views.defaults
import rest_framework.exceptions
from django.conf import settings
from django.middleware.csrf import REASON_NO_CSRF_COOKIE, REASON_NO_REFERER
from django.utils.translation import gettext as _
from sentry_sdk import last_event_id
from weblate.trans.util import render
from weblate.utils.errors import report_error
def bad_request(request, exception=None):
"""Error handler for bad request."""
if "text/html" not in request.META.get("HTTP_ACCEPT", ""):
return rest_framework.exceptions.bad_request(request, exception)
if exception:
report_error(cause="Bad request")
return render(request, "400.html", {"title": _("Bad Request")}, status=400)
def not_found(request, exception=None):
"""Error handler showing list of available projects."""
return render(request, "404.html", {"title": _("Page Not Found")}, status=404)
def denied(request, exception=None):
return render(request, "403.html", {"title": _("Permission Denied")}, status=403)
def csrf_failure(request, reason=""):
response = render(
request,
"403_csrf.html",
{
"title": _("Permission Denied"),
"no_referer": reason == REASON_NO_REFERER,
"no_cookie": reason == REASON_NO_CSRF_COOKIE,
},
status=403,
)
# Avoid setting CSRF cookie on CSRF failure page, otherwise we end up creating
# new session even when user might already have one (because browser did not
# send the cookies with the CSRF request and Django doesn't see the session
# cookie).
response.csrf_cookie_set = True
return response
def server_error(request):
"""Error handler for server errors."""
if "text/html" not in request.META.get("HTTP_ACCEPT", ""):
return rest_framework.exceptions.server_error(request)
try:
return render(
request,
"500.html",
{
"title": _("Internal Server Error"),
"sentry_dsn": settings.SENTRY_DSN,
"sentry_event_id": last_event_id(),
},
status=500,
)
except Exception:
return django.views.defaults.server_error(request)
|
from datetime import timedelta
import pytest
from homeassistant.components.modbus.const import (
CALL_TYPE_REGISTER_HOLDING,
CALL_TYPE_REGISTER_INPUT,
CONF_COUNT,
CONF_DATA_TYPE,
CONF_OFFSET,
CONF_PRECISION,
CONF_REGISTER,
CONF_REGISTER_TYPE,
CONF_REGISTERS,
CONF_REVERSE_ORDER,
CONF_SCALE,
DATA_TYPE_FLOAT,
DATA_TYPE_INT,
DATA_TYPE_STRING,
DATA_TYPE_UINT,
)
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.const import CONF_NAME
from .conftest import run_base_read_test, setup_base_test
@pytest.mark.parametrize(
"cfg,regs,expected",
[
(
{
CONF_COUNT: 1,
CONF_DATA_TYPE: DATA_TYPE_INT,
CONF_SCALE: 1,
CONF_OFFSET: 0,
CONF_PRECISION: 0,
},
[0],
"0",
),
(
{},
[0x8000],
"-32768",
),
(
{
CONF_COUNT: 1,
CONF_DATA_TYPE: DATA_TYPE_INT,
CONF_SCALE: 1,
CONF_OFFSET: 13,
CONF_PRECISION: 0,
},
[7],
"20",
),
(
{
CONF_COUNT: 1,
CONF_DATA_TYPE: DATA_TYPE_INT,
CONF_SCALE: 3,
CONF_OFFSET: 13,
CONF_PRECISION: 0,
},
[7],
"34",
),
(
{
CONF_COUNT: 1,
CONF_DATA_TYPE: DATA_TYPE_UINT,
CONF_SCALE: 3,
CONF_OFFSET: 13,
CONF_PRECISION: 4,
},
[7],
"34.0000",
),
(
{
CONF_COUNT: 1,
CONF_DATA_TYPE: DATA_TYPE_INT,
CONF_SCALE: 1.5,
CONF_OFFSET: 0,
CONF_PRECISION: 0,
},
[1],
"2",
),
(
{
CONF_COUNT: 1,
CONF_DATA_TYPE: DATA_TYPE_INT,
CONF_SCALE: "1.5",
CONF_OFFSET: "5",
CONF_PRECISION: "1",
},
[9],
"18.5",
),
(
{
CONF_COUNT: 1,
CONF_DATA_TYPE: DATA_TYPE_INT,
CONF_SCALE: 2.4,
CONF_OFFSET: 0,
CONF_PRECISION: 2,
},
[1],
"2.40",
),
(
{
CONF_COUNT: 1,
CONF_DATA_TYPE: DATA_TYPE_INT,
CONF_SCALE: 1,
CONF_OFFSET: -10.3,
CONF_PRECISION: 1,
},
[2],
"-8.3",
),
(
{
CONF_COUNT: 2,
CONF_DATA_TYPE: DATA_TYPE_INT,
CONF_SCALE: 1,
CONF_OFFSET: 0,
CONF_PRECISION: 0,
},
[0x89AB, 0xCDEF],
"-1985229329",
),
(
{
CONF_COUNT: 2,
CONF_DATA_TYPE: DATA_TYPE_UINT,
CONF_SCALE: 1,
CONF_OFFSET: 0,
CONF_PRECISION: 0,
},
[0x89AB, 0xCDEF],
str(0x89ABCDEF),
),
(
{
CONF_COUNT: 2,
CONF_DATA_TYPE: DATA_TYPE_UINT,
CONF_REVERSE_ORDER: True,
},
[0x89AB, 0xCDEF],
str(0xCDEF89AB),
),
(
{
CONF_COUNT: 4,
CONF_DATA_TYPE: DATA_TYPE_UINT,
CONF_SCALE: 1,
CONF_OFFSET: 0,
CONF_PRECISION: 0,
},
[0x89AB, 0xCDEF, 0x0123, 0x4567],
"9920249030613615975",
),
(
{
CONF_COUNT: 4,
CONF_DATA_TYPE: DATA_TYPE_UINT,
CONF_SCALE: 2,
CONF_OFFSET: 3,
CONF_PRECISION: 0,
},
[0x0123, 0x4567, 0x89AB, 0xCDEF],
"163971058432973793",
),
(
{
CONF_COUNT: 4,
CONF_DATA_TYPE: DATA_TYPE_UINT,
CONF_SCALE: 2.0,
CONF_OFFSET: 3.0,
CONF_PRECISION: 0,
},
[0x0123, 0x4567, 0x89AB, 0xCDEF],
"163971058432973792",
),
(
{
CONF_COUNT: 2,
CONF_REGISTER_TYPE: CALL_TYPE_REGISTER_INPUT,
CONF_DATA_TYPE: DATA_TYPE_UINT,
CONF_SCALE: 1,
CONF_OFFSET: 0,
CONF_PRECISION: 0,
},
[0x89AB, 0xCDEF],
str(0x89ABCDEF),
),
(
{
CONF_COUNT: 2,
CONF_REGISTER_TYPE: CALL_TYPE_REGISTER_HOLDING,
CONF_DATA_TYPE: DATA_TYPE_UINT,
CONF_SCALE: 1,
CONF_OFFSET: 0,
CONF_PRECISION: 0,
},
[0x89AB, 0xCDEF],
str(0x89ABCDEF),
),
(
{
CONF_COUNT: 2,
CONF_REGISTER_TYPE: CALL_TYPE_REGISTER_HOLDING,
CONF_DATA_TYPE: DATA_TYPE_FLOAT,
CONF_SCALE: 1,
CONF_OFFSET: 0,
CONF_PRECISION: 5,
},
[16286, 1617],
"1.23457",
),
(
{
CONF_COUNT: 8,
CONF_REGISTER_TYPE: CALL_TYPE_REGISTER_HOLDING,
CONF_DATA_TYPE: DATA_TYPE_STRING,
CONF_SCALE: 1,
CONF_OFFSET: 0,
CONF_PRECISION: 0,
},
[0x3037, 0x2D30, 0x352D, 0x3230, 0x3230, 0x2031, 0x343A, 0x3335],
"07-05-2020 14:35",
),
],
)
async def test_all_sensor(hass, mock_hub, cfg, regs, expected):
"""Run test for sensor."""
sensor_name = "modbus_test_sensor"
scan_interval = 5
entity_id, now, device = await setup_base_test(
sensor_name,
hass,
mock_hub,
{
CONF_REGISTERS: [
dict(**{CONF_NAME: sensor_name, CONF_REGISTER: 1234}, **cfg)
]
},
SENSOR_DOMAIN,
scan_interval,
)
await run_base_read_test(
entity_id,
hass,
mock_hub,
cfg.get(CONF_REGISTER_TYPE),
regs,
expected,
now + timedelta(seconds=scan_interval + 1),
)
|
from __future__ import print_function
from pyVim.connect import SmartConnect, Disconnect
from pyVmomi import vim, VmomiSupport
import argparse
import atexit
import getpass
import json
import ssl
def GetArgs():
"""
Supports the command-line arguments listed below.
"""
parser = argparse.ArgumentParser(
description='Process args for extracting JSON from a Managed Object')
parser.add_argument('-s', '--host', required=True, action='store',
help='Remote host to connect to')
parser.add_argument('-o', '--port', type=int, default=443, action='store',
help='Port to connect on')
parser.add_argument('-u', '--user', required=True, action='store',
help='User name to use when connecting to host')
parser.add_argument('-p', '--password', required=False, action='store',
help='Password to use when connecting to host')
parser.add_argument('-t', '--type', required=True, action='store',
help='The vim type lookup, ex: "VirtualMachine"')
parser.add_argument('-i', '--id', required=True, action='store',
help='The MOID to lookup, ex: "vm-42"')
args = parser.parse_args()
return args
def main():
"""
Simple command-line program for dumping the contents of any managed object.
"""
args = GetArgs()
if args.password:
password = args.password
else:
password = getpass.getpass(prompt='Enter password for host %s and '
'user %s: ' % (args.host,args.user))
context = None
if hasattr(ssl, '_create_unverified_context'):
context = ssl._create_unverified_context()
si = SmartConnect(host=args.host,
user=args.user,
pwd=password,
port=int(args.port),
sslContext=context)
if not si:
print("Could not connect to the specified host using specified "
"username and password")
return -1
atexit.register(Disconnect, si)
obj = VmomiSupport.templateOf(args.type)(args.id, si._stub)
print(json.dumps(obj, cls=VmomiSupport.VmomiJSONEncoder,
sort_keys=True, indent=4))
# Start program
if __name__ == "__main__":
main()
|
from django.urls import reverse
from weblate.fonts.models import Font, FontGroup
from weblate.fonts.tests.utils import FONT, FontTestCase
from weblate.lang.models import Language
class FontViewTest(FontTestCase):
@property
def fonts_url(self):
return reverse("fonts", kwargs=self.kw_project)
def test_noperm(self):
font = self.add_font()
response = self.client.get(self.fonts_url)
self.assertContains(response, font.family)
self.assertNotContains(response, "Add font")
def test_manage(self):
self.user.is_superuser = True
self.user.save()
# Validate the form is there
response = self.client.get(self.fonts_url)
self.assertContains(response, "Add font")
# Upload font
with open(FONT, "rb") as handle:
response = self.client.post(self.fonts_url, {"font": handle}, follow=True)
self.assertContains(response, "Droid Sans Fallback")
font = Font.objects.get()
self.assertContains(
self.client.get(font.get_absolute_url()), "Droid Sans Fallback"
)
# Create font group
response = self.client.post(
self.fonts_url, {"name": "font-group", "font": font.pk}, follow=True
)
self.assertContains(response, "font-group")
group = FontGroup.objects.get()
self.assertContains(self.client.get(group.get_absolute_url()), "font-group")
# Add override
language = Language.objects.all()[0]
response = self.client.post(
group.get_absolute_url(),
{"language": language.pk, "font": font.pk},
follow=True,
)
self.assertContains(response, language.name)
override = group.fontoverride_set.get()
# Remove override
self.client.post(
group.get_absolute_url(), {"override": override.pk}, follow=True
)
self.assertEqual(group.fontoverride_set.count(), 0)
# Remove group
self.client.post(group.get_absolute_url())
self.assertEqual(FontGroup.objects.count(), 0)
# Remove font
self.client.post(font.get_absolute_url())
self.assertEqual(Font.objects.count(), 0)
|
import os
import warnings
import builtins
import cherrypy
class Checker(object):
"""A checker for CherryPy sites and their mounted applications.
When this object is called at engine startup, it executes each
of its own methods whose names start with ``check_``. If you wish
to disable selected checks, simply add a line in your global
config which sets the appropriate method to False::
[global]
checker.check_skipped_app_config = False
You may also dynamically add or replace ``check_*`` methods in this way.
"""
on = True
"""If True (the default), run all checks; if False, turn off all checks."""
def __init__(self):
"""Initialize Checker instance."""
self._populate_known_types()
def __call__(self):
"""Run all check_* methods."""
if self.on:
oldformatwarning = warnings.formatwarning
warnings.formatwarning = self.formatwarning
try:
for name in dir(self):
if name.startswith('check_'):
method = getattr(self, name)
if method and hasattr(method, '__call__'):
method()
finally:
warnings.formatwarning = oldformatwarning
def formatwarning(self, message, category, filename, lineno, line=None):
"""Format a warning."""
return 'CherryPy Checker:\n%s\n\n' % message
# This value should be set inside _cpconfig.
global_config_contained_paths = False
def check_app_config_entries_dont_start_with_script_name(self):
"""Check for App config with sections that repeat script_name."""
for sn, app in cherrypy.tree.apps.items():
if not isinstance(app, cherrypy.Application):
continue
if not app.config:
continue
if sn == '':
continue
sn_atoms = sn.strip('/').split('/')
for key in app.config.keys():
key_atoms = key.strip('/').split('/')
if key_atoms[:len(sn_atoms)] == sn_atoms:
warnings.warn(
'The application mounted at %r has config '
'entries that start with its script name: %r' % (sn,
key))
def check_site_config_entries_in_app_config(self):
"""Check for mounted Applications that have site-scoped config."""
for sn, app in cherrypy.tree.apps.items():
if not isinstance(app, cherrypy.Application):
continue
msg = []
for section, entries in app.config.items():
if section.startswith('/'):
for key, value in entries.items():
for n in ('engine.', 'server.', 'tree.', 'checker.'):
if key.startswith(n):
msg.append('[%s] %s = %s' %
(section, key, value))
if msg:
msg.insert(0,
'The application mounted at %r contains the '
'following config entries, which are only allowed '
'in site-wide config. Move them to a [global] '
'section and pass them to cherrypy.config.update() '
'instead of tree.mount().' % sn)
warnings.warn(os.linesep.join(msg))
def check_skipped_app_config(self):
"""Check for mounted Applications that have no config."""
for sn, app in cherrypy.tree.apps.items():
if not isinstance(app, cherrypy.Application):
continue
if not app.config:
msg = 'The Application mounted at %r has an empty config.' % sn
if self.global_config_contained_paths:
msg += (' It looks like the config you passed to '
'cherrypy.config.update() contains application-'
'specific sections. You must explicitly pass '
'application config via '
'cherrypy.tree.mount(..., config=app_config)')
warnings.warn(msg)
return
def check_app_config_brackets(self):
"""Check for App config with extraneous brackets in section names."""
for sn, app in cherrypy.tree.apps.items():
if not isinstance(app, cherrypy.Application):
continue
if not app.config:
continue
for key in app.config.keys():
if key.startswith('[') or key.endswith(']'):
warnings.warn(
'The application mounted at %r has config '
'section names with extraneous brackets: %r. '
'Config *files* need brackets; config *dicts* '
'(e.g. passed to tree.mount) do not.' % (sn, key))
def check_static_paths(self):
"""Check Application config for incorrect static paths."""
# Use the dummy Request object in the main thread.
request = cherrypy.request
for sn, app in cherrypy.tree.apps.items():
if not isinstance(app, cherrypy.Application):
continue
request.app = app
for section in app.config:
# get_resource will populate request.config
request.get_resource(section + '/dummy.html')
conf = request.config.get
if conf('tools.staticdir.on', False):
msg = ''
root = conf('tools.staticdir.root')
dir = conf('tools.staticdir.dir')
if dir is None:
msg = 'tools.staticdir.dir is not set.'
else:
fulldir = ''
if os.path.isabs(dir):
fulldir = dir
if root:
msg = ('dir is an absolute path, even '
'though a root is provided.')
testdir = os.path.join(root, dir[1:])
if os.path.exists(testdir):
msg += (
'\nIf you meant to serve the '
'filesystem folder at %r, remove the '
'leading slash from dir.' % (testdir,))
else:
if not root:
msg = (
'dir is a relative path and '
'no root provided.')
else:
fulldir = os.path.join(root, dir)
if not os.path.isabs(fulldir):
msg = ('%r is not an absolute path.' % (
fulldir,))
if fulldir and not os.path.exists(fulldir):
if msg:
msg += '\n'
msg += ('%r (root + dir) is not an existing '
'filesystem path.' % fulldir)
if msg:
warnings.warn('%s\nsection: [%s]\nroot: %r\ndir: %r'
% (msg, section, root, dir))
# -------------------------- Compatibility -------------------------- #
obsolete = {
'server.default_content_type': 'tools.response_headers.headers',
'log_access_file': 'log.access_file',
'log_config_options': None,
'log_file': 'log.error_file',
'log_file_not_found': None,
'log_request_headers': 'tools.log_headers.on',
'log_to_screen': 'log.screen',
'show_tracebacks': 'request.show_tracebacks',
'throw_errors': 'request.throw_errors',
'profiler.on': ('cherrypy.tree.mount(profiler.make_app('
'cherrypy.Application(Root())))'),
}
deprecated = {}
def _compat(self, config):
"""Process config and warn on each obsolete or deprecated entry."""
for section, conf in config.items():
if isinstance(conf, dict):
for k in conf:
if k in self.obsolete:
warnings.warn('%r is obsolete. Use %r instead.\n'
'section: [%s]' %
(k, self.obsolete[k], section))
elif k in self.deprecated:
warnings.warn('%r is deprecated. Use %r instead.\n'
'section: [%s]' %
(k, self.deprecated[k], section))
else:
if section in self.obsolete:
warnings.warn('%r is obsolete. Use %r instead.'
% (section, self.obsolete[section]))
elif section in self.deprecated:
warnings.warn('%r is deprecated. Use %r instead.'
% (section, self.deprecated[section]))
def check_compatibility(self):
"""Process config and warn on each obsolete or deprecated entry."""
self._compat(cherrypy.config)
for sn, app in cherrypy.tree.apps.items():
if not isinstance(app, cherrypy.Application):
continue
self._compat(app.config)
# ------------------------ Known Namespaces ------------------------ #
extra_config_namespaces = []
def _known_ns(self, app):
ns = ['wsgi']
ns.extend(app.toolboxes)
ns.extend(app.namespaces)
ns.extend(app.request_class.namespaces)
ns.extend(cherrypy.config.namespaces)
ns += self.extra_config_namespaces
for section, conf in app.config.items():
is_path_section = section.startswith('/')
if is_path_section and isinstance(conf, dict):
for k in conf:
atoms = k.split('.')
if len(atoms) > 1:
if atoms[0] not in ns:
# Spit out a special warning if a known
# namespace is preceded by "cherrypy."
if atoms[0] == 'cherrypy' and atoms[1] in ns:
msg = (
'The config entry %r is invalid; '
'try %r instead.\nsection: [%s]'
% (k, '.'.join(atoms[1:]), section))
else:
msg = (
'The config entry %r is invalid, '
'because the %r config namespace '
'is unknown.\n'
'section: [%s]' % (k, atoms[0], section))
warnings.warn(msg)
elif atoms[0] == 'tools':
if atoms[1] not in dir(cherrypy.tools):
msg = (
'The config entry %r may be invalid, '
'because the %r tool was not found.\n'
'section: [%s]' % (k, atoms[1], section))
warnings.warn(msg)
def check_config_namespaces(self):
"""Process config and warn on each unknown config namespace."""
for sn, app in cherrypy.tree.apps.items():
if not isinstance(app, cherrypy.Application):
continue
self._known_ns(app)
# -------------------------- Config Types -------------------------- #
known_config_types = {}
def _populate_known_types(self):
b = [x for x in vars(builtins).values()
if type(x) is type(str)]
def traverse(obj, namespace):
for name in dir(obj):
# Hack for 3.2's warning about body_params
if name == 'body_params':
continue
vtype = type(getattr(obj, name, None))
if vtype in b:
self.known_config_types[namespace + '.' + name] = vtype
traverse(cherrypy.request, 'request')
traverse(cherrypy.response, 'response')
traverse(cherrypy.server, 'server')
traverse(cherrypy.engine, 'engine')
traverse(cherrypy.log, 'log')
def _known_types(self, config):
msg = ('The config entry %r in section %r is of type %r, '
'which does not match the expected type %r.')
for section, conf in config.items():
if not isinstance(conf, dict):
conf = {section: conf}
for k, v in conf.items():
if v is not None:
expected_type = self.known_config_types.get(k, None)
vtype = type(v)
if expected_type and vtype != expected_type:
warnings.warn(msg % (k, section, vtype.__name__,
expected_type.__name__))
def check_config_types(self):
"""Assert that config values are of the same type as default values."""
self._known_types(cherrypy.config)
for sn, app in cherrypy.tree.apps.items():
if not isinstance(app, cherrypy.Application):
continue
self._known_types(app.config)
# -------------------- Specific config warnings -------------------- #
def check_localhost(self):
"""Warn if any socket_host is 'localhost'. See #711."""
for k, v in cherrypy.config.items():
if k == 'server.socket_host' and v == 'localhost':
warnings.warn("The use of 'localhost' as a socket host can "
'cause problems on newer systems, since '
"'localhost' can map to either an IPv4 or an "
"IPv6 address. You should use '127.0.0.1' "
"or '[::1]' instead.")
|
import datetime as dt
import re
from typing import Any, Dict, List, Optional, Union, cast
import ciso8601
import pytz
import pytz.exceptions as pytzexceptions
import pytz.tzinfo as pytzinfo
from homeassistant.const import MATCH_ALL
DATE_STR_FORMAT = "%Y-%m-%d"
NATIVE_UTC = dt.timezone.utc
UTC = pytz.utc
DEFAULT_TIME_ZONE: dt.tzinfo = pytz.utc
# Copyright (c) Django Software Foundation and individual contributors.
# All rights reserved.
# https://github.com/django/django/blob/master/LICENSE
DATETIME_RE = re.compile(
r"(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})"
r"[T ](?P<hour>\d{1,2}):(?P<minute>\d{1,2})"
r"(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?"
r"(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$"
)
def set_default_time_zone(time_zone: dt.tzinfo) -> None:
"""Set a default time zone to be used when none is specified.
Async friendly.
"""
global DEFAULT_TIME_ZONE # pylint: disable=global-statement
# NOTE: Remove in the future in favour of typing
assert isinstance(time_zone, dt.tzinfo)
DEFAULT_TIME_ZONE = time_zone
def get_time_zone(time_zone_str: str) -> Optional[dt.tzinfo]:
"""Get time zone from string. Return None if unable to determine.
Async friendly.
"""
try:
return pytz.timezone(time_zone_str)
except pytzexceptions.UnknownTimeZoneError:
return None
def utcnow() -> dt.datetime:
"""Get now in UTC time."""
return dt.datetime.now(NATIVE_UTC)
def now(time_zone: Optional[dt.tzinfo] = None) -> dt.datetime:
"""Get now in specified time zone."""
return dt.datetime.now(time_zone or DEFAULT_TIME_ZONE)
def as_utc(dattim: dt.datetime) -> dt.datetime:
"""Return a datetime as UTC time.
Assumes datetime without tzinfo to be in the DEFAULT_TIME_ZONE.
"""
if dattim.tzinfo == UTC:
return dattim
if dattim.tzinfo is None:
dattim = DEFAULT_TIME_ZONE.localize(dattim) # type: ignore
return dattim.astimezone(UTC)
def as_timestamp(dt_value: dt.datetime) -> float:
"""Convert a date/time into a unix time (seconds since 1970)."""
if hasattr(dt_value, "timestamp"):
parsed_dt: Optional[dt.datetime] = dt_value
else:
parsed_dt = parse_datetime(str(dt_value))
if parsed_dt is None:
raise ValueError("not a valid date/time.")
return parsed_dt.timestamp()
def as_local(dattim: dt.datetime) -> dt.datetime:
"""Convert a UTC datetime object to local time zone."""
if dattim.tzinfo == DEFAULT_TIME_ZONE:
return dattim
if dattim.tzinfo is None:
dattim = UTC.localize(dattim)
return dattim.astimezone(DEFAULT_TIME_ZONE)
def utc_from_timestamp(timestamp: float) -> dt.datetime:
"""Return a UTC time from a timestamp."""
return UTC.localize(dt.datetime.utcfromtimestamp(timestamp))
def start_of_local_day(
dt_or_d: Union[dt.date, dt.datetime, None] = None
) -> dt.datetime:
"""Return local datetime object of start of day from date or datetime."""
if dt_or_d is None:
date: dt.date = now().date()
elif isinstance(dt_or_d, dt.datetime):
date = dt_or_d.date()
return DEFAULT_TIME_ZONE.localize( # type: ignore
dt.datetime.combine(date, dt.time())
)
# Copyright (c) Django Software Foundation and individual contributors.
# All rights reserved.
# https://github.com/django/django/blob/master/LICENSE
def parse_datetime(dt_str: str) -> Optional[dt.datetime]:
"""Parse a string and return a datetime.datetime.
This function supports time zone offsets. When the input contains one,
the output uses a timezone with a fixed offset from UTC.
Raises ValueError if the input is well formatted but not a valid datetime.
Returns None if the input isn't well formatted.
"""
try:
return ciso8601.parse_datetime(dt_str)
except (ValueError, IndexError):
pass
match = DATETIME_RE.match(dt_str)
if not match:
return None
kws: Dict[str, Any] = match.groupdict()
if kws["microsecond"]:
kws["microsecond"] = kws["microsecond"].ljust(6, "0")
tzinfo_str = kws.pop("tzinfo")
tzinfo: Optional[dt.tzinfo] = None
if tzinfo_str == "Z":
tzinfo = UTC
elif tzinfo_str is not None:
offset_mins = int(tzinfo_str[-2:]) if len(tzinfo_str) > 3 else 0
offset_hours = int(tzinfo_str[1:3])
offset = dt.timedelta(hours=offset_hours, minutes=offset_mins)
if tzinfo_str[0] == "-":
offset = -offset
tzinfo = dt.timezone(offset)
kws = {k: int(v) for k, v in kws.items() if v is not None}
kws["tzinfo"] = tzinfo
return dt.datetime(**kws)
def parse_date(dt_str: str) -> Optional[dt.date]:
"""Convert a date string to a date object."""
try:
return dt.datetime.strptime(dt_str, DATE_STR_FORMAT).date()
except ValueError: # If dt_str did not match our format
return None
def parse_time(time_str: str) -> Optional[dt.time]:
"""Parse a time string (00:20:00) into Time object.
Return None if invalid.
"""
parts = str(time_str).split(":")
if len(parts) < 2:
return None
try:
hour = int(parts[0])
minute = int(parts[1])
second = int(parts[2]) if len(parts) > 2 else 0
return dt.time(hour, minute, second)
except ValueError:
# ValueError if value cannot be converted to an int or not in range
return None
def get_age(date: dt.datetime) -> str:
"""
Take a datetime and return its "age" as a string.
The age can be in second, minute, hour, day, month or year. Only the
biggest unit is considered, e.g. if it's 2 days and 3 hours, "2 days" will
be returned.
Make sure date is not in the future, or else it won't work.
"""
def formatn(number: int, unit: str) -> str:
"""Add "unit" if it's plural."""
if number == 1:
return f"1 {unit}"
return f"{number:d} {unit}s"
delta = (now() - date).total_seconds()
rounded_delta = round(delta)
units = ["second", "minute", "hour", "day", "month"]
factors = [60, 60, 24, 30, 12]
selected_unit = "year"
for i, next_factor in enumerate(factors):
if rounded_delta < next_factor:
selected_unit = units[i]
break
delta /= next_factor
rounded_delta = round(delta)
return formatn(rounded_delta, selected_unit)
def parse_time_expression(parameter: Any, min_value: int, max_value: int) -> List[int]:
"""Parse the time expression part and return a list of times to match."""
if parameter is None or parameter == MATCH_ALL:
res = list(range(min_value, max_value + 1))
elif isinstance(parameter, str):
if parameter.startswith("/"):
parameter = int(parameter[1:])
res = [x for x in range(min_value, max_value + 1) if x % parameter == 0]
else:
res = [int(parameter)]
elif not hasattr(parameter, "__iter__"):
res = [int(parameter)]
else:
res = list(sorted(int(x) for x in parameter))
for val in res:
if val < min_value or val > max_value:
raise ValueError(
f"Time expression '{parameter}': parameter {val} out of range "
f"({min_value} to {max_value})"
)
return res
def find_next_time_expression_time(
now: dt.datetime, # pylint: disable=redefined-outer-name
seconds: List[int],
minutes: List[int],
hours: List[int],
) -> dt.datetime:
"""Find the next datetime from now for which the time expression matches.
The algorithm looks at each time unit separately and tries to find the
next one that matches for each. If any of them would roll over, all
time units below that are reset to the first matching value.
Timezones are also handled (the tzinfo of the now object is used),
including daylight saving time.
"""
if not seconds or not minutes or not hours:
raise ValueError("Cannot find a next time: Time expression never matches!")
def _lower_bound(arr: List[int], cmp: int) -> Optional[int]:
"""Return the first value in arr greater or equal to cmp.
Return None if no such value exists.
"""
left = 0
right = len(arr)
while left < right:
mid = (left + right) // 2
if arr[mid] < cmp:
left = mid + 1
else:
right = mid
if left == len(arr):
return None
return arr[left]
result = now.replace(microsecond=0)
# Match next second
next_second = _lower_bound(seconds, result.second)
if next_second is None:
# No second to match in this minute. Roll-over to next minute.
next_second = seconds[0]
result += dt.timedelta(minutes=1)
result = result.replace(second=next_second)
# Match next minute
next_minute = _lower_bound(minutes, result.minute)
if next_minute != result.minute:
# We're in the next minute. Seconds needs to be reset.
result = result.replace(second=seconds[0])
if next_minute is None:
# No minute to match in this hour. Roll-over to next hour.
next_minute = minutes[0]
result += dt.timedelta(hours=1)
result = result.replace(minute=next_minute)
# Match next hour
next_hour = _lower_bound(hours, result.hour)
if next_hour != result.hour:
# We're in the next hour. Seconds+minutes needs to be reset.
result = result.replace(second=seconds[0], minute=minutes[0])
if next_hour is None:
# No minute to match in this day. Roll-over to next day.
next_hour = hours[0]
result += dt.timedelta(days=1)
result = result.replace(hour=next_hour)
if result.tzinfo is None:
return result
# Now we need to handle timezones. We will make this datetime object
# "naive" first and then re-convert it to the target timezone.
# This is so that we can call pytz's localize and handle DST changes.
tzinfo: pytzinfo.DstTzInfo = UTC if result.tzinfo == NATIVE_UTC else result.tzinfo
result = result.replace(tzinfo=None)
try:
result = tzinfo.localize(result, is_dst=None)
except pytzexceptions.AmbiguousTimeError:
# This happens when we're leaving daylight saving time and local
# clocks are rolled back. In this case, we want to trigger
# on both the DST and non-DST time. So when "now" is in the DST
# use the DST-on time, and if not, use the DST-off time.
use_dst = bool(now.dst())
result = tzinfo.localize(result, is_dst=use_dst)
except pytzexceptions.NonExistentTimeError:
# This happens when we're entering daylight saving time and local
# clocks are rolled forward, thus there are local times that do
# not exist. In this case, we want to trigger on the next time
# that *does* exist.
# In the worst case, this will run through all the seconds in the
# time shift, but that's max 3600 operations for once per year
result = result.replace(tzinfo=tzinfo) + dt.timedelta(seconds=1)
return find_next_time_expression_time(result, seconds, minutes, hours)
result_dst = cast(dt.timedelta, result.dst())
now_dst = cast(dt.timedelta, now.dst()) or dt.timedelta(0)
if result_dst >= now_dst:
return result
# Another edge-case when leaving DST:
# When now is in DST and ambiguous *and* the next trigger time we *should*
# trigger is ambiguous and outside DST, the excepts above won't catch it.
# For example: if triggering on 2:30 and now is 28.10.2018 2:30 (in DST)
# we should trigger next on 28.10.2018 2:30 (out of DST), but our
# algorithm above would produce 29.10.2018 2:30 (out of DST)
# Step 1: Check if now is ambiguous
try:
tzinfo.localize(now.replace(tzinfo=None), is_dst=None)
return result
except pytzexceptions.AmbiguousTimeError:
pass
# Step 2: Check if result of (now - DST) is ambiguous.
check = now - now_dst
check_result = find_next_time_expression_time(check, seconds, minutes, hours)
try:
tzinfo.localize(check_result.replace(tzinfo=None), is_dst=None)
return result
except pytzexceptions.AmbiguousTimeError:
pass
# OK, edge case does apply. We must override the DST to DST-off
check_result = tzinfo.localize(check_result.replace(tzinfo=None), is_dst=False)
return check_result
|
from datetime import datetime
import pytest
from marshmallow.exceptions import ValidationError
from lemur.common.utils import parse_private_key
from lemur.common.validators import verify_private_key_match
from lemur.tests.vectors import INTERMEDIATE_CERT, SAN_CERT, SAN_CERT_KEY
def test_private_key(session):
parse_private_key(SAN_CERT_KEY)
with pytest.raises(ValueError):
parse_private_key("invalid_private_key")
def test_validate_private_key(session):
key = parse_private_key(SAN_CERT_KEY)
verify_private_key_match(key, SAN_CERT)
with pytest.raises(ValidationError):
# Wrong key for certificate
verify_private_key_match(key, INTERMEDIATE_CERT)
def test_sub_alt_type(session):
from lemur.common.validators import sub_alt_type
with pytest.raises(ValidationError):
sub_alt_type("CNAME")
def test_dates(session):
from lemur.common.validators import dates
dates(dict(validity_start=datetime(2016, 1, 1), validity_end=datetime(2016, 1, 5)))
with pytest.raises(ValidationError):
dates(dict(validity_start=datetime(2016, 1, 1)))
with pytest.raises(ValidationError):
dates(dict(validity_end=datetime(2016, 1, 1)))
with pytest.raises(ValidationError):
dates(
dict(validity_start=datetime(2016, 1, 5), validity_end=datetime(2016, 1, 1))
)
with pytest.raises(ValidationError):
dates(
dict(
validity_start=datetime(2016, 1, 1), validity_end=datetime(2016, 1, 10)
)
)
|
import numpy as np
import unittest
import chainer
from chainer import testing
from chainer.testing import attr
from chainercv.links import FeaturePredictor
class DummyFeatureExtractor(chainer.Chain):
def __init__(self, in_channels, shape_0, shape_1):
super(DummyFeatureExtractor, self).__init__()
self.shape_0 = shape_0
self.shape_1 = shape_1
self.mean = np.zeros(in_channels).reshape((in_channels, 1, 1))
def forward(self, x):
shape = (x.shape[0],) + self.shape_0
y0 = self.xp.random.rand(*shape).astype(np.float32)
if self.shape_1 is None:
return chainer.Variable(y0)
shape = (x.shape[0],) + self.shape_1
y1 = self.xp.random.rand(*shape).astype(np.float32)
return chainer.Variable(y0), chainer.Variable(y1)
@testing.parameterize(*(
testing.product_dict(
[
{'shape_0': (5, 10, 10), 'shape_1': None, 'crop': 'center'},
{'shape_0': (8,), 'shape_1': None, 'crop': '10'},
{'shape_0': (5, 10, 10), 'shape_1': (12,), 'crop': 'center'},
{'shape_0': (8,), 'shape_1': (10,), 'crop': '10'}
],
[
{'in_channels': 1},
{'in_channels': 3}
]
)
))
class TestFeaturePredictorPredict(unittest.TestCase):
def setUp(self):
self.link = FeaturePredictor(
DummyFeatureExtractor(
self.in_channels, self.shape_0, self.shape_1),
crop_size=5, crop=self.crop)
self.x = np.random.uniform(
size=(3, self.in_channels, 32, 32)).astype(np.float32)
self.one_output = self.shape_1 is None
def check(self, x):
out = self.link.predict(x)
if self.one_output:
self.assertEqual(out.shape, (self.x.shape[0],) + self.shape_0)
self.assertIsInstance(out, np.ndarray)
else:
out_0, out_1 = out
self.assertEqual(out_0.shape, (self.x.shape[0],) + self.shape_0)
self.assertEqual(out_1.shape, (self.x.shape[0],) + self.shape_1)
self.assertIsInstance(out_0, np.ndarray)
self.assertIsInstance(out_1, np.ndarray)
def test_cpu(self):
self.check(self.x)
@attr.gpu
def test_gpu(self):
self.link.to_gpu()
self.check(self.x)
@testing.parameterize(*testing.product({
'crop': ['center', '10'],
'crop_size': [192, (192, 256), (256, 192)],
'scale_size': [None, 256, (256, 256)],
'in_channels': [1, 3],
'mean': [None, np.float32(1)],
}))
class TestFeaturePredictor(unittest.TestCase):
def setUp(self):
self.link = FeaturePredictor(
DummyFeatureExtractor(self.in_channels, (1,), None),
crop_size=self.crop_size, scale_size=self.scale_size,
crop=self.crop, mean=self.mean)
if isinstance(self.crop_size, int):
hw = (self.crop_size, self.crop_size)
else:
hw = self.crop_size
if self.crop == 'center':
self.expected_shape = (1, self.in_channels) + hw
elif self.crop == '10':
self.expected_shape = (10, self.in_channels) + hw
def test_prepare(self):
out = self.link._prepare(
np.random.uniform(size=(self.in_channels, 286, 286)))
self.assertEqual(out.shape, self.expected_shape)
def test_prepare_original_unaffected(self):
original = np.random.uniform(size=(self.in_channels, 286, 286))
input_ = original.copy()
self.link._prepare(input_)
np.testing.assert_equal(original, input_)
def test_mean(self):
if self.mean is None:
np.testing.assert_equal(self.link.mean, self.link.extractor.mean)
else:
np.testing.assert_equal(self.link.mean, self.mean)
testing.run_module(__name__, __file__)
|
from august.authenticator_common import AuthenticationState
from homeassistant.components.august.const import DOMAIN
from homeassistant.components.august.gateway import AugustGateway
from tests.async_mock import MagicMock, patch
from tests.components.august.mocks import _mock_august_authentication, _mock_get_config
async def test_refresh_access_token(hass):
"""Test token refreshes."""
await _patched_refresh_access_token(hass, "new_token", 5678)
@patch("homeassistant.components.august.gateway.ApiAsync.async_get_operable_locks")
@patch("homeassistant.components.august.gateway.AuthenticatorAsync.async_authenticate")
@patch("homeassistant.components.august.gateway.AuthenticatorAsync.should_refresh")
@patch(
"homeassistant.components.august.gateway.AuthenticatorAsync.async_refresh_access_token"
)
async def _patched_refresh_access_token(
hass,
new_token,
new_token_expire_time,
refresh_access_token_mock,
should_refresh_mock,
authenticate_mock,
async_get_operable_locks_mock,
):
authenticate_mock.side_effect = MagicMock(
return_value=_mock_august_authentication(
"original_token", 1234, AuthenticationState.AUTHENTICATED
)
)
august_gateway = AugustGateway(hass)
mocked_config = _mock_get_config()
await august_gateway.async_setup(mocked_config[DOMAIN])
await august_gateway.async_authenticate()
should_refresh_mock.return_value = False
await august_gateway.async_refresh_access_token_if_needed()
refresh_access_token_mock.assert_not_called()
should_refresh_mock.return_value = True
refresh_access_token_mock.return_value = _mock_august_authentication(
new_token, new_token_expire_time, AuthenticationState.AUTHENTICATED
)
await august_gateway.async_refresh_access_token_if_needed()
refresh_access_token_mock.assert_called()
assert august_gateway.access_token == new_token
assert august_gateway.authentication.access_token_expires == new_token_expire_time
|
from unittest import TestCase
from django.core.exceptions import ValidationError
from weblate.accounts.password_validation import CharsPasswordValidator
class ValidationTest(TestCase):
def validate(self, password):
validator = CharsPasswordValidator()
return validator.validate(password)
def test_chars_good(self):
self.assertIsNone(self.validate("123"))
def test_chars_whitespace(self):
with self.assertRaises(ValidationError):
self.validate(" \r\n\t")
def test_chars_same(self):
with self.assertRaises(ValidationError):
self.validate("x" * 10)
|
import re
import time
import unicodedata
from django.conf import settings
from django.shortcuts import redirect
from django.urls import reverse
from django.utils.http import url_has_allowed_host_and_scheme
from django.utils.translation import gettext as _
from social_core.exceptions import AuthAlreadyAssociated, AuthMissingParameter
from social_core.pipeline.partial import partial
from social_core.utils import PARTIAL_TOKEN_SESSION_NAME
from weblate.accounts.models import AuditLog, VerifiedEmail
from weblate.accounts.notifications import send_notification_email
from weblate.accounts.templatetags.authnames import get_auth_name
from weblate.accounts.utils import (
adjust_session_expiry,
cycle_session_keys,
invalidate_reset_codes,
)
from weblate.auth.models import User
from weblate.trans.defines import FULLNAME_LENGTH
from weblate.utils import messages
from weblate.utils.requests import request
from weblate.utils.validators import USERNAME_MATCHER, clean_fullname
STRIP_MATCHER = re.compile(r"[^\w\s.@+-]")
CLEANUP_MATCHER = re.compile(r"[-\s]+")
class UsernameAlreadyAssociated(AuthAlreadyAssociated):
pass
class EmailAlreadyAssociated(AuthAlreadyAssociated):
pass
def get_github_email(access_token):
"""Get real e-mail from GitHub."""
response = request(
"get",
"https://api.github.com/user/emails",
headers={"Authorization": f"token {access_token}"},
timeout=10.0,
)
data = response.json()
email = None
for entry in data:
# Skip not verified ones
if not entry["verified"]:
continue
email = entry["email"]
if entry["primary"]:
break
return email
@partial
def reauthenticate(strategy, backend, user, social, uid, weblate_action, **kwargs):
"""Force authentication when adding new association."""
session = strategy.request.session
if session.pop("reauthenticate_done", False):
return None
if weblate_action != "activation":
return None
if user and not social and user.has_usable_password():
session["reauthenticate"] = {
"backend": backend.name,
"backend_verbose": str(get_auth_name(backend.name)),
"uid": uid,
"user_pk": user.pk,
}
return redirect("confirm")
return None
@partial
def require_email(backend, details, weblate_action, user=None, is_new=False, **kwargs):
"""Force entering e-mail for backends which don't provide it."""
if backend.name == "github":
email = get_github_email(kwargs["response"]["access_token"])
if email is not None:
details["email"] = email
if details.get("email", "").endswith("@users.noreply.github.com"):
del details["email"]
# Remove any pending e-mail validation codes
if details.get("email") and backend.name == "email":
invalidate_reset_codes(emails=(details["email"],))
# Remove all account reset codes
if user and weblate_action == "reset":
invalidate_reset_codes(user=user)
if user and user.email:
# Force validation of new e-mail address
if backend.name == "email":
return {"is_new": True}
return None
if is_new and not details.get("email"):
raise AuthMissingParameter(backend, "email")
return None
def send_validation(strategy, backend, code, partial_token):
"""Send verification e-mail."""
# We need to have existing session
session = strategy.request.session
if not session.session_key:
session.create()
session["registration-email-sent"] = True
url = "{}?verification_code={}&partial_token={}".format(
reverse("social:complete", args=(backend.name,)), code.code, partial_token
)
context = {"url": url, "validity": settings.AUTH_TOKEN_VALID // 3600}
template = "activation"
if session.get("password_reset"):
template = "reset"
elif session.get("account_remove"):
template = "remove"
elif session.get("user_invite"):
template = "invite"
context.update(session["invitation_context"])
send_notification_email(None, [code.email], template, info=url, context=context)
@partial
def password_reset(
strategy, backend, user, social, details, weblate_action, current_partial, **kwargs
):
"""Set unusable password on reset."""
if strategy.request is not None and user is not None and weblate_action == "reset":
AuditLog.objects.create(
user,
strategy.request,
"reset",
method=backend.name,
name=social.uid,
password=user.password,
)
user.set_unusable_password()
user.save(update_fields=["password"])
# Remove partial pipeline, we do not need it
strategy.really_clean_partial_pipeline(current_partial.token)
session = strategy.request.session
# Store user ID
session["perform_reset"] = user.pk
# Set short session expiry
session.set_expiry(90)
# Redirect to form to change password
return redirect("password_reset")
return None
@partial
def remove_account(
strategy, backend, user, social, details, weblate_action, current_partial, **kwargs
):
"""Set unusable password on reset."""
if strategy.request is not None and user is not None and weblate_action == "remove":
# Remove partial pipeline, we do not need it
strategy.really_clean_partial_pipeline(current_partial.token)
# Set short session expiry
session = strategy.request.session
session.set_expiry(90)
session["remove_confirm"] = True
# Redirect to form to change password
return redirect("remove")
return None
def verify_open(strategy, backend, user, weblate_action, **kwargs):
"""Check whether it is possible to create new user."""
# Check whether registration is open
if (
not user
and weblate_action not in ("reset", "remove", "invite")
and (not settings.REGISTRATION_OPEN or settings.REGISTRATION_ALLOW_BACKENDS)
and backend.name not in settings.REGISTRATION_ALLOW_BACKENDS
):
raise AuthMissingParameter(backend, "disabled")
# Ensure it's still same user (if sessions was kept as this is to avoid
# completing authentication under diferent user than initiated it, with
# new session, it will complete as new user)
current_user = strategy.request.user.pk
init_user = strategy.request.session.get("social_auth_user")
if strategy.request.session.session_key and current_user != init_user:
raise AuthMissingParameter(backend, "user")
def cleanup_next(strategy, **kwargs):
# This is mostly fix for lack of next validation in Python Social Auth
# see https://github.com/python-social-auth/social-core/issues/62
url = strategy.session_get("next")
if url and not url_has_allowed_host_and_scheme(url, allowed_hosts=None):
strategy.session_set("next", None)
if url_has_allowed_host_and_scheme(kwargs.get("next", ""), allowed_hosts=None):
return None
return {"next": None}
def store_params(strategy, user, **kwargs):
"""Store Weblate specific parameters in the pipeline."""
# Registering user
if user and user.is_authenticated:
registering_user = user.pk
else:
registering_user = None
# Pipeline action
session = strategy.request.session
if session.get("password_reset"):
action = "reset"
elif session.get("account_remove"):
action = "remove"
elif session.get("user_invite"):
action = "invite"
else:
action = "activation"
return {
"weblate_action": action,
"registering_user": registering_user,
"weblate_expires": int(time.time() + settings.AUTH_TOKEN_VALID),
}
def verify_username(strategy, backend, details, username, user=None, **kwargs):
"""Verified whether username is still free.
It can happen that user has registered several times or other user has taken the
username meanwhile.
"""
if user or not username:
return
if User.objects.filter(username=username).exists():
raise UsernameAlreadyAssociated(backend, "Username exists")
return
def revoke_mail_code(strategy, details, **kwargs):
"""Remove old mail validation code for Python Social Auth.
PSA keeps them around, but we really don't need them again.
"""
data = strategy.request_data()
if "email" in details and details["email"] and "verification_code" in data:
try:
code = strategy.storage.code.objects.get(
code=data["verification_code"], email=details["email"], verified=True
)
code.delete()
except strategy.storage.code.DoesNotExist:
return
def ensure_valid(
strategy,
backend,
user,
registering_user,
weblate_action,
weblate_expires,
new_association,
details,
**kwargs,
):
"""Ensure the activation link is still."""
# Didn't the link expire?
if weblate_expires < time.time():
raise AuthMissingParameter(backend, "expires")
# We allow password reset for unauthenticated users
if weblate_action == "reset":
if strategy.request.user.is_authenticated:
messages.warning(
strategy.request,
_("You can not complete password reset while signed in."),
)
messages.warning(
strategy.request, _("The registration link has been invalidated.")
)
raise AuthMissingParameter(backend, "user")
return
# Add e-mail/register should stay on same user
if user and user.is_authenticated:
current_user = user.pk
else:
current_user = None
if current_user != registering_user:
if registering_user is None:
messages.warning(
strategy.request,
_("You can not complete registration while signed in."),
)
else:
messages.warning(
strategy.request,
_("You can confirm your registration only while signed in."),
)
messages.warning(
strategy.request, _("The registration link has been invalidated.")
)
raise AuthMissingParameter(backend, "user")
# Verify if this mail is not used on other accounts
if new_association:
same = VerifiedEmail.objects.filter(email__iexact=details["email"])
if user:
same = same.exclude(social__user=user)
if same.exists():
AuditLog.objects.create(same[0].social.user, strategy.request, "connect")
raise EmailAlreadyAssociated(backend, "E-mail exists")
def store_email(strategy, backend, user, social, details, **kwargs):
"""Store verified e-mail."""
# The email can be empty for some services
if details.get("email"):
verified, created = VerifiedEmail.objects.get_or_create(
social=social, defaults={"email": details["email"]}
)
if not created and verified.email != details["email"]:
verified.email = details["email"]
verified.save()
def notify_connect(
strategy, backend, user, social, new_association=False, is_new=False, **kwargs
):
"""Notify about adding new link."""
if user and not is_new:
if new_association:
action = "auth-connect"
else:
action = "login"
adjust_session_expiry(strategy.request)
AuditLog.objects.create(
user,
strategy.request,
action,
method=backend.name,
name=social.uid,
)
# Remove partial pipeline
session = strategy.request.session
if PARTIAL_TOKEN_SESSION_NAME in session:
strategy.really_clean_partial_pipeline(session[PARTIAL_TOKEN_SESSION_NAME])
def user_full_name(strategy, details, username, user=None, **kwargs):
"""Update user full name using data from provider."""
if user and not user.full_name:
full_name = details.get("fullname", "").strip()
if not full_name and ("first_name" in details or "last_name" in details):
first_name = details.get("first_name", "")
last_name = details.get("last_name", "")
if first_name and first_name not in last_name:
full_name = f"{first_name} {last_name}"
elif first_name:
full_name = first_name
else:
full_name = last_name
if not full_name and username:
full_name = username
if not full_name and user.username:
full_name = user.username
full_name = clean_fullname(full_name)
# The User model limit is 150 chars
if len(full_name) > FULLNAME_LENGTH:
full_name = full_name[:FULLNAME_LENGTH]
if full_name:
user.full_name = full_name
strategy.storage.user.changed(user)
def slugify_username(value):
"""Clean up username.
This is based on Django slugify with exception of lowercasing
- Converts to ascii
- Removes not wanted chars
- Merges whitespaces and - into single -
"""
value = (
unicodedata.normalize("NFKD", value).encode("ascii", "ignore").decode("ascii")
)
# Return username if it matches our standards
if USERNAME_MATCHER.match(value):
return value
value = STRIP_MATCHER.sub("", value).strip().lstrip(".")
return CLEANUP_MATCHER.sub("-", value)
def cycle_session(strategy, user, *args, **kwargs):
# Change key for current session and invalidate others
cycle_session_keys(strategy.request, user)
def adjust_primary_mail(strategy, entries, user, *args, **kwargs):
"""Fix primary mail on disconnect."""
# Remove pending verification codes
invalidate_reset_codes(user=user, entries=entries)
# Check remaining verified mails
verified = VerifiedEmail.objects.filter(social__user=user).exclude(
social__in=entries
)
if verified.filter(email=user.email).exists():
return
user.email = verified[0].email
user.save()
messages.warning(
strategy.request,
_(
"Your e-mail no longer belongs to verified account, "
"it has been changed to {0}."
).format(user.email),
)
def notify_disconnect(strategy, backend, entries, user, **kwargs):
"""Store verified e-mail."""
for social in entries:
AuditLog.objects.create(
user,
strategy.request,
"auth-disconnect",
method=backend.name,
name=social.uid,
)
|
import logging
import voluptuous as vol
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
CURRENT_HVAC_OFF,
FAN_AUTO,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
PRESET_AWAY,
PRESET_HOME,
SUPPORT_FAN_MODE,
SUPPORT_PRESET_MODE,
SUPPORT_SWING_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_TEMPERATURE, PRECISION_TENTHS, TEMP_CELSIUS
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import config_validation as cv, entity_platform
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import (
CONST_FAN_AUTO,
CONST_FAN_OFF,
CONST_MODE_AUTO,
CONST_MODE_COOL,
CONST_MODE_HEAT,
CONST_MODE_OFF,
CONST_MODE_SMART_SCHEDULE,
CONST_OVERLAY_MANUAL,
CONST_OVERLAY_TADO_MODE,
CONST_OVERLAY_TIMER,
DATA,
DOMAIN,
HA_TO_TADO_FAN_MODE_MAP,
HA_TO_TADO_HVAC_MODE_MAP,
ORDERED_KNOWN_TADO_MODES,
SIGNAL_TADO_UPDATE_RECEIVED,
SUPPORT_PRESET,
TADO_HVAC_ACTION_TO_HA_HVAC_ACTION,
TADO_MODES_WITH_NO_TEMP_SETTING,
TADO_SWING_OFF,
TADO_SWING_ON,
TADO_TO_HA_FAN_MODE_MAP,
TADO_TO_HA_HVAC_MODE_MAP,
TYPE_AIR_CONDITIONING,
TYPE_HEATING,
)
from .entity import TadoZoneEntity
_LOGGER = logging.getLogger(__name__)
SERVICE_CLIMATE_TIMER = "set_climate_timer"
ATTR_TIME_PERIOD = "time_period"
CLIMATE_TIMER_SCHEMA = {
vol.Required(ATTR_TIME_PERIOD, default="01:00:00"): vol.All(
cv.time_period, cv.positive_timedelta, lambda td: td.total_seconds()
),
vol.Required(ATTR_TEMPERATURE): vol.Coerce(float),
}
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities
):
"""Set up the Tado climate platform."""
tado = hass.data[DOMAIN][entry.entry_id][DATA]
entities = await hass.async_add_executor_job(_generate_entities, tado)
platform = entity_platform.current_platform.get()
platform.async_register_entity_service(
SERVICE_CLIMATE_TIMER,
CLIMATE_TIMER_SCHEMA,
"set_timer",
)
if entities:
async_add_entities(entities, True)
def _generate_entities(tado):
"""Create all climate entities."""
entities = []
for zone in tado.zones:
if zone["type"] in [TYPE_HEATING, TYPE_AIR_CONDITIONING]:
entity = create_climate_entity(
tado, zone["name"], zone["id"], zone["devices"][0]
)
if entity:
entities.append(entity)
return entities
def create_climate_entity(tado, name: str, zone_id: int, zone: dict):
"""Create a Tado climate entity."""
capabilities = tado.get_capabilities(zone_id)
_LOGGER.debug("Capabilities for zone %s: %s", zone_id, capabilities)
zone_type = capabilities["type"]
support_flags = SUPPORT_PRESET_MODE | SUPPORT_TARGET_TEMPERATURE
supported_hvac_modes = [
TADO_TO_HA_HVAC_MODE_MAP[CONST_MODE_OFF],
TADO_TO_HA_HVAC_MODE_MAP[CONST_MODE_SMART_SCHEDULE],
]
supported_fan_modes = None
heat_temperatures = None
cool_temperatures = None
if zone_type == TYPE_AIR_CONDITIONING:
# Heat is preferred as it generally has a lower minimum temperature
for mode in ORDERED_KNOWN_TADO_MODES:
if mode not in capabilities:
continue
supported_hvac_modes.append(TADO_TO_HA_HVAC_MODE_MAP[mode])
if capabilities[mode].get("swings"):
support_flags |= SUPPORT_SWING_MODE
if not capabilities[mode].get("fanSpeeds"):
continue
support_flags |= SUPPORT_FAN_MODE
if supported_fan_modes:
continue
supported_fan_modes = [
TADO_TO_HA_FAN_MODE_MAP[speed]
for speed in capabilities[mode]["fanSpeeds"]
]
cool_temperatures = capabilities[CONST_MODE_COOL]["temperatures"]
else:
supported_hvac_modes.append(HVAC_MODE_HEAT)
if CONST_MODE_HEAT in capabilities:
heat_temperatures = capabilities[CONST_MODE_HEAT]["temperatures"]
if heat_temperatures is None and "temperatures" in capabilities:
heat_temperatures = capabilities["temperatures"]
if cool_temperatures is None and heat_temperatures is None:
_LOGGER.debug("Not adding zone %s since it has no temperatures", name)
return None
heat_min_temp = None
heat_max_temp = None
heat_step = None
cool_min_temp = None
cool_max_temp = None
cool_step = None
if heat_temperatures is not None:
heat_min_temp = float(heat_temperatures["celsius"]["min"])
heat_max_temp = float(heat_temperatures["celsius"]["max"])
heat_step = heat_temperatures["celsius"].get("step", PRECISION_TENTHS)
if cool_temperatures is not None:
cool_min_temp = float(cool_temperatures["celsius"]["min"])
cool_max_temp = float(cool_temperatures["celsius"]["max"])
cool_step = cool_temperatures["celsius"].get("step", PRECISION_TENTHS)
entity = TadoClimate(
tado,
name,
zone_id,
zone_type,
heat_min_temp,
heat_max_temp,
heat_step,
cool_min_temp,
cool_max_temp,
cool_step,
supported_hvac_modes,
supported_fan_modes,
support_flags,
zone,
)
return entity
class TadoClimate(TadoZoneEntity, ClimateEntity):
"""Representation of a Tado climate entity."""
def __init__(
self,
tado,
zone_name,
zone_id,
zone_type,
heat_min_temp,
heat_max_temp,
heat_step,
cool_min_temp,
cool_max_temp,
cool_step,
supported_hvac_modes,
supported_fan_modes,
support_flags,
device_info,
):
"""Initialize of Tado climate entity."""
self._tado = tado
super().__init__(zone_name, device_info, tado.device_id, zone_id)
self.zone_id = zone_id
self.zone_type = zone_type
self._unique_id = f"{zone_type} {zone_id} {tado.device_id}"
self._ac_device = zone_type == TYPE_AIR_CONDITIONING
self._supported_hvac_modes = supported_hvac_modes
self._supported_fan_modes = supported_fan_modes
self._support_flags = support_flags
self._available = False
self._cur_temp = None
self._cur_humidity = None
self._heat_min_temp = heat_min_temp
self._heat_max_temp = heat_max_temp
self._heat_step = heat_step
self._cool_min_temp = cool_min_temp
self._cool_max_temp = cool_max_temp
self._cool_step = cool_step
self._target_temp = None
self._current_tado_fan_speed = CONST_FAN_OFF
self._current_tado_hvac_mode = CONST_MODE_OFF
self._current_tado_hvac_action = CURRENT_HVAC_OFF
self._current_tado_swing_mode = TADO_SWING_OFF
self._tado_zone_data = None
self._async_update_zone_data()
async def async_added_to_hass(self):
"""Register for sensor updates."""
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_TADO_UPDATE_RECEIVED.format(
self._tado.device_id, "zone", self.zone_id
),
self._async_update_callback,
)
)
@property
def supported_features(self):
"""Return the list of supported features."""
return self._support_flags
@property
def name(self):
"""Return the name of the entity."""
return self.zone_name
@property
def unique_id(self):
"""Return the unique id."""
return self._unique_id
@property
def current_humidity(self):
"""Return the current humidity."""
return self._tado_zone_data.current_humidity
@property
def current_temperature(self):
"""Return the sensor temperature."""
return self._tado_zone_data.current_temp
@property
def hvac_mode(self):
"""Return hvac operation ie. heat, cool mode.
Need to be one of HVAC_MODE_*.
"""
return TADO_TO_HA_HVAC_MODE_MAP.get(self._current_tado_hvac_mode, HVAC_MODE_OFF)
@property
def hvac_modes(self):
"""Return the list of available hvac operation modes.
Need to be a subset of HVAC_MODES.
"""
return self._supported_hvac_modes
@property
def hvac_action(self):
"""Return the current running hvac operation if supported.
Need to be one of CURRENT_HVAC_*.
"""
return TADO_HVAC_ACTION_TO_HA_HVAC_ACTION.get(
self._tado_zone_data.current_hvac_action, CURRENT_HVAC_OFF
)
@property
def fan_mode(self):
"""Return the fan setting."""
if self._ac_device:
return TADO_TO_HA_FAN_MODE_MAP.get(self._current_tado_fan_speed, FAN_AUTO)
return None
@property
def fan_modes(self):
"""List of available fan modes."""
return self._supported_fan_modes
def set_fan_mode(self, fan_mode: str):
"""Turn fan on/off."""
self._control_hvac(fan_mode=HA_TO_TADO_FAN_MODE_MAP[fan_mode])
@property
def preset_mode(self):
"""Return the current preset mode (home, away)."""
if self._tado_zone_data.is_away:
return PRESET_AWAY
return PRESET_HOME
@property
def preset_modes(self):
"""Return a list of available preset modes."""
return SUPPORT_PRESET
def set_preset_mode(self, preset_mode):
"""Set new preset mode."""
self._tado.set_presence(preset_mode)
@property
def temperature_unit(self):
"""Return the unit of measurement used by the platform."""
return TEMP_CELSIUS
@property
def target_temperature_step(self):
"""Return the supported step of target temperature."""
if self._tado_zone_data.current_hvac_mode == CONST_MODE_COOL:
return self._cool_step or self._heat_step
return self._heat_step or self._cool_step
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
# If the target temperature will be None
# if the device is performing an action
# that does not affect the temperature or
# the device is switching states
return self._tado_zone_data.target_temp or self._tado_zone_data.current_temp
def set_timer(self, time_period, temperature=None):
"""Set the timer on the entity, and temperature if supported."""
self._control_hvac(
hvac_mode=CONST_MODE_HEAT, target_temp=temperature, duration=time_period
)
def set_temperature(self, **kwargs):
"""Set new target temperature."""
temperature = kwargs.get(ATTR_TEMPERATURE)
if temperature is None:
return
if self._current_tado_hvac_mode not in (
CONST_MODE_OFF,
CONST_MODE_AUTO,
CONST_MODE_SMART_SCHEDULE,
):
self._control_hvac(target_temp=temperature)
return
new_hvac_mode = CONST_MODE_COOL if self._ac_device else CONST_MODE_HEAT
self._control_hvac(target_temp=temperature, hvac_mode=new_hvac_mode)
def set_hvac_mode(self, hvac_mode):
"""Set new target hvac mode."""
self._control_hvac(hvac_mode=HA_TO_TADO_HVAC_MODE_MAP[hvac_mode])
@property
def available(self):
"""Return if the device is available."""
return self._tado_zone_data.available
@property
def min_temp(self):
"""Return the minimum temperature."""
if (
self._current_tado_hvac_mode == CONST_MODE_COOL
and self._cool_min_temp is not None
):
return self._cool_min_temp
if self._heat_min_temp is not None:
return self._heat_min_temp
return self._cool_min_temp
@property
def max_temp(self):
"""Return the maximum temperature."""
if (
self._current_tado_hvac_mode == CONST_MODE_HEAT
and self._heat_max_temp is not None
):
return self._heat_max_temp
if self._heat_max_temp is not None:
return self._heat_max_temp
return self._heat_max_temp
@property
def swing_mode(self):
"""Active swing mode for the device."""
return self._current_tado_swing_mode
@property
def swing_modes(self):
"""Swing modes for the device."""
if self._support_flags & SUPPORT_SWING_MODE:
return [TADO_SWING_ON, TADO_SWING_OFF]
return None
def set_swing_mode(self, swing_mode):
"""Set swing modes for the device."""
self._control_hvac(swing_mode=swing_mode)
@callback
def _async_update_zone_data(self):
"""Load tado data into zone."""
self._tado_zone_data = self._tado.data["zone"][self.zone_id]
self._current_tado_fan_speed = self._tado_zone_data.current_fan_speed
self._current_tado_hvac_mode = self._tado_zone_data.current_hvac_mode
self._current_tado_hvac_action = self._tado_zone_data.current_hvac_action
self._current_tado_swing_mode = self._tado_zone_data.current_swing_mode
@callback
def _async_update_callback(self):
"""Load tado data and update state."""
self._async_update_zone_data()
self.async_write_ha_state()
def _normalize_target_temp_for_hvac_mode(self):
# Set a target temperature if we don't have any
# This can happen when we switch from Off to On
if self._target_temp is None:
self._target_temp = self._tado_zone_data.current_temp
elif self._current_tado_hvac_mode == CONST_MODE_COOL:
if self._target_temp > self._cool_max_temp:
self._target_temp = self._cool_max_temp
elif self._target_temp < self._cool_min_temp:
self._target_temp = self._cool_min_temp
elif self._current_tado_hvac_mode == CONST_MODE_HEAT:
if self._target_temp > self._heat_max_temp:
self._target_temp = self._heat_max_temp
elif self._target_temp < self._heat_min_temp:
self._target_temp = self._heat_min_temp
def _control_hvac(
self,
hvac_mode=None,
target_temp=None,
fan_mode=None,
swing_mode=None,
duration=None,
):
"""Send new target temperature to Tado."""
if hvac_mode:
self._current_tado_hvac_mode = hvac_mode
if target_temp:
self._target_temp = target_temp
if fan_mode:
self._current_tado_fan_speed = fan_mode
if swing_mode:
self._current_tado_swing_mode = swing_mode
self._normalize_target_temp_for_hvac_mode()
# tado does not permit setting the fan speed to
# off, you must turn off the device
if (
self._current_tado_fan_speed == CONST_FAN_OFF
and self._current_tado_hvac_mode != CONST_MODE_OFF
):
self._current_tado_fan_speed = CONST_FAN_AUTO
if self._current_tado_hvac_mode == CONST_MODE_OFF:
_LOGGER.debug(
"Switching to OFF for zone %s (%d)", self.zone_name, self.zone_id
)
self._tado.set_zone_off(self.zone_id, CONST_OVERLAY_MANUAL, self.zone_type)
return
if self._current_tado_hvac_mode == CONST_MODE_SMART_SCHEDULE:
_LOGGER.debug(
"Switching to SMART_SCHEDULE for zone %s (%d)",
self.zone_name,
self.zone_id,
)
self._tado.reset_zone_overlay(self.zone_id)
return
_LOGGER.debug(
"Switching to %s for zone %s (%d) with temperature %s °C and duration %s",
self._current_tado_hvac_mode,
self.zone_name,
self.zone_id,
self._target_temp,
duration,
)
overlay_mode = CONST_OVERLAY_MANUAL
if duration:
overlay_mode = CONST_OVERLAY_TIMER
elif self._tado.fallback:
# Fallback to Smart Schedule at next Schedule switch if we have fallback enabled
overlay_mode = CONST_OVERLAY_TADO_MODE
temperature_to_send = self._target_temp
if self._current_tado_hvac_mode in TADO_MODES_WITH_NO_TEMP_SETTING:
# A temperature cannot be passed with these modes
temperature_to_send = None
fan_speed = None
if self._support_flags & SUPPORT_FAN_MODE:
fan_speed = self._current_tado_fan_speed
swing = None
if self._support_flags & SUPPORT_SWING_MODE:
swing = self._current_tado_swing_mode
self._tado.set_zone_overlay(
zone_id=self.zone_id,
overlay_mode=overlay_mode, # What to do when the period ends
temperature=temperature_to_send,
duration=duration,
device_type=self.zone_type,
mode=self._current_tado_hvac_mode,
fan_speed=fan_speed, # api defaults to not sending fanSpeed if None specified
swing=swing, # api defaults to not sending swing if None specified
)
|
from mficlient.client import FailedToLogin
import pytest
import requests
import homeassistant.components.mfi.sensor as mfi
import homeassistant.components.sensor as sensor_component
from homeassistant.const import TEMP_CELSIUS
from homeassistant.setup import async_setup_component
import tests.async_mock as mock
PLATFORM = mfi
COMPONENT = sensor_component
THING = "sensor"
GOOD_CONFIG = {
"sensor": {
"platform": "mfi",
"host": "foo",
"port": 6123,
"username": "user",
"password": "pass",
"ssl": True,
"verify_ssl": True,
}
}
async def test_setup_missing_config(hass):
"""Test setup with missing configuration."""
with mock.patch("homeassistant.components.mfi.sensor.MFiClient") as mock_client:
config = {"sensor": {"platform": "mfi"}}
assert await async_setup_component(hass, "sensor", config)
assert not mock_client.called
async def test_setup_failed_login(hass):
"""Test setup with login failure."""
with mock.patch("homeassistant.components.mfi.sensor.MFiClient") as mock_client:
mock_client.side_effect = FailedToLogin
assert not PLATFORM.setup_platform(hass, dict(GOOD_CONFIG), None)
async def test_setup_failed_connect(hass):
"""Test setup with connection failure."""
with mock.patch("homeassistant.components.mfi.sensor.MFiClient") as mock_client:
mock_client.side_effect = requests.exceptions.ConnectionError
assert not PLATFORM.setup_platform(hass, dict(GOOD_CONFIG), None)
async def test_setup_minimum(hass):
"""Test setup with minimum configuration."""
with mock.patch("homeassistant.components.mfi.sensor.MFiClient") as mock_client:
config = dict(GOOD_CONFIG)
del config[THING]["port"]
assert await async_setup_component(hass, COMPONENT.DOMAIN, config)
await hass.async_block_till_done()
assert mock_client.call_count == 1
assert mock_client.call_args == mock.call(
"foo", "user", "pass", port=6443, use_tls=True, verify=True
)
async def test_setup_with_port(hass):
"""Test setup with port."""
with mock.patch("homeassistant.components.mfi.sensor.MFiClient") as mock_client:
config = dict(GOOD_CONFIG)
config[THING]["port"] = 6123
assert await async_setup_component(hass, COMPONENT.DOMAIN, config)
await hass.async_block_till_done()
assert mock_client.call_count == 1
assert mock_client.call_args == mock.call(
"foo", "user", "pass", port=6123, use_tls=True, verify=True
)
async def test_setup_with_tls_disabled(hass):
"""Test setup without TLS."""
with mock.patch("homeassistant.components.mfi.sensor.MFiClient") as mock_client:
config = dict(GOOD_CONFIG)
del config[THING]["port"]
config[THING]["ssl"] = False
config[THING]["verify_ssl"] = False
assert await async_setup_component(hass, COMPONENT.DOMAIN, config)
await hass.async_block_till_done()
assert mock_client.call_count == 1
assert mock_client.call_args == mock.call(
"foo", "user", "pass", port=6080, use_tls=False, verify=False
)
async def test_setup_adds_proper_devices(hass):
"""Test if setup adds devices."""
with mock.patch(
"homeassistant.components.mfi.sensor.MFiClient"
) as mock_client, mock.patch(
"homeassistant.components.mfi.sensor.MfiSensor"
) as mock_sensor:
ports = {
i: mock.MagicMock(model=model) for i, model in enumerate(mfi.SENSOR_MODELS)
}
ports["bad"] = mock.MagicMock(model="notasensor")
mock_client.return_value.get_devices.return_value = [
mock.MagicMock(ports=ports)
]
assert await async_setup_component(hass, COMPONENT.DOMAIN, GOOD_CONFIG)
await hass.async_block_till_done()
for ident, port in ports.items():
if ident != "bad":
mock_sensor.assert_any_call(port, hass)
assert mock.call(ports["bad"], hass) not in mock_sensor.mock_calls
@pytest.fixture(name="port")
def port_fixture():
"""Port fixture."""
return mock.MagicMock()
@pytest.fixture(name="sensor")
def sensor_fixture(hass, port):
"""Sensor fixture."""
return mfi.MfiSensor(port, hass)
async def test_name(port, sensor):
"""Test the name."""
assert port.label == sensor.name
async def test_uom_temp(port, sensor):
"""Test the UOM temperature."""
port.tag = "temperature"
assert TEMP_CELSIUS == sensor.unit_of_measurement
async def test_uom_power(port, sensor):
"""Test the UOEM power."""
port.tag = "active_pwr"
assert sensor.unit_of_measurement == "Watts"
async def test_uom_digital(port, sensor):
"""Test the UOM digital input."""
port.model = "Input Digital"
assert sensor.unit_of_measurement == "State"
async def test_uom_unknown(port, sensor):
"""Test the UOM."""
port.tag = "balloons"
assert sensor.unit_of_measurement == "balloons"
async def test_uom_uninitialized(port, sensor):
"""Test that the UOM defaults if not initialized."""
type(port).tag = mock.PropertyMock(side_effect=ValueError)
assert sensor.unit_of_measurement == "State"
async def test_state_digital(port, sensor):
"""Test the digital input."""
port.model = "Input Digital"
port.value = 0
assert mfi.STATE_OFF == sensor.state
port.value = 1
assert mfi.STATE_ON == sensor.state
port.value = 2
assert mfi.STATE_ON == sensor.state
async def test_state_digits(port, sensor):
"""Test the state of digits."""
port.tag = "didyoucheckthedict?"
port.value = 1.25
with mock.patch.dict(mfi.DIGITS, {"didyoucheckthedict?": 1}):
assert sensor.state == 1.2
with mock.patch.dict(mfi.DIGITS, {}):
assert sensor.state == 1.0
async def test_state_uninitialized(port, sensor):
"""Test the state of uninitialized sensorfs."""
type(port).tag = mock.PropertyMock(side_effect=ValueError)
assert mfi.STATE_OFF == sensor.state
async def test_update(port, sensor):
"""Test the update."""
sensor.update()
assert port.refresh.call_count == 1
assert port.refresh.call_args == mock.call()
|
import logging
import voluptuous as vol
from homeassistant.components import cover, mqtt
from homeassistant.components.cover import (
ATTR_POSITION,
ATTR_TILT_POSITION,
DEVICE_CLASSES_SCHEMA,
SUPPORT_CLOSE,
SUPPORT_CLOSE_TILT,
SUPPORT_OPEN,
SUPPORT_OPEN_TILT,
SUPPORT_SET_POSITION,
SUPPORT_SET_TILT_POSITION,
SUPPORT_STOP,
SUPPORT_STOP_TILT,
CoverEntity,
)
from homeassistant.const import (
CONF_DEVICE,
CONF_DEVICE_CLASS,
CONF_NAME,
CONF_OPTIMISTIC,
CONF_UNIQUE_ID,
CONF_VALUE_TEMPLATE,
STATE_CLOSED,
STATE_CLOSING,
STATE_OPEN,
STATE_OPENING,
STATE_UNKNOWN,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.reload import async_setup_reload_service
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from . import (
ATTR_DISCOVERY_HASH,
CONF_COMMAND_TOPIC,
CONF_QOS,
CONF_RETAIN,
CONF_STATE_TOPIC,
DOMAIN,
PLATFORMS,
MqttAttributes,
MqttAvailability,
MqttDiscoveryUpdate,
MqttEntityDeviceInfo,
subscription,
)
from .debug_info import log_messages
from .discovery import MQTT_DISCOVERY_NEW, clear_discovery_hash
_LOGGER = logging.getLogger(__name__)
CONF_GET_POSITION_TOPIC = "position_topic"
CONF_SET_POSITION_TEMPLATE = "set_position_template"
CONF_SET_POSITION_TOPIC = "set_position_topic"
CONF_TILT_COMMAND_TOPIC = "tilt_command_topic"
CONF_TILT_STATUS_TOPIC = "tilt_status_topic"
CONF_TILT_STATUS_TEMPLATE = "tilt_status_template"
CONF_PAYLOAD_CLOSE = "payload_close"
CONF_PAYLOAD_OPEN = "payload_open"
CONF_PAYLOAD_STOP = "payload_stop"
CONF_POSITION_CLOSED = "position_closed"
CONF_POSITION_OPEN = "position_open"
CONF_STATE_CLOSED = "state_closed"
CONF_STATE_CLOSING = "state_closing"
CONF_STATE_OPEN = "state_open"
CONF_STATE_OPENING = "state_opening"
CONF_TILT_CLOSED_POSITION = "tilt_closed_value"
CONF_TILT_INVERT_STATE = "tilt_invert_state"
CONF_TILT_MAX = "tilt_max"
CONF_TILT_MIN = "tilt_min"
CONF_TILT_OPEN_POSITION = "tilt_opened_value"
CONF_TILT_STATE_OPTIMISTIC = "tilt_optimistic"
TILT_PAYLOAD = "tilt"
COVER_PAYLOAD = "cover"
DEFAULT_NAME = "MQTT Cover"
DEFAULT_OPTIMISTIC = False
DEFAULT_PAYLOAD_CLOSE = "CLOSE"
DEFAULT_PAYLOAD_OPEN = "OPEN"
DEFAULT_PAYLOAD_STOP = "STOP"
DEFAULT_POSITION_CLOSED = 0
DEFAULT_POSITION_OPEN = 100
DEFAULT_RETAIN = False
DEFAULT_TILT_CLOSED_POSITION = 0
DEFAULT_TILT_INVERT_STATE = False
DEFAULT_TILT_MAX = 100
DEFAULT_TILT_MIN = 0
DEFAULT_TILT_OPEN_POSITION = 100
DEFAULT_TILT_OPTIMISTIC = False
OPEN_CLOSE_FEATURES = SUPPORT_OPEN | SUPPORT_CLOSE
TILT_FEATURES = (
SUPPORT_OPEN_TILT
| SUPPORT_CLOSE_TILT
| SUPPORT_STOP_TILT
| SUPPORT_SET_TILT_POSITION
)
def validate_options(value):
"""Validate options.
If set position topic is set then get position topic is set as well.
"""
if CONF_SET_POSITION_TOPIC in value and CONF_GET_POSITION_TOPIC not in value:
raise vol.Invalid(
"set_position_topic must be set together with position_topic."
)
return value
PLATFORM_SCHEMA = vol.All(
mqtt.MQTT_BASE_PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_COMMAND_TOPIC): mqtt.valid_publish_topic,
vol.Optional(CONF_DEVICE): mqtt.MQTT_ENTITY_DEVICE_INFO_SCHEMA,
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_GET_POSITION_TOPIC): mqtt.valid_subscribe_topic,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
vol.Optional(CONF_PAYLOAD_CLOSE, default=DEFAULT_PAYLOAD_CLOSE): cv.string,
vol.Optional(CONF_PAYLOAD_OPEN, default=DEFAULT_PAYLOAD_OPEN): cv.string,
vol.Optional(CONF_PAYLOAD_STOP, default=DEFAULT_PAYLOAD_STOP): vol.Any(
cv.string, None
),
vol.Optional(CONF_POSITION_CLOSED, default=DEFAULT_POSITION_CLOSED): int,
vol.Optional(CONF_POSITION_OPEN, default=DEFAULT_POSITION_OPEN): int,
vol.Optional(CONF_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
vol.Optional(CONF_SET_POSITION_TEMPLATE): cv.template,
vol.Optional(CONF_SET_POSITION_TOPIC): mqtt.valid_publish_topic,
vol.Optional(CONF_STATE_CLOSED, default=STATE_CLOSED): cv.string,
vol.Optional(CONF_STATE_CLOSING, default=STATE_CLOSING): cv.string,
vol.Optional(CONF_STATE_OPEN, default=STATE_OPEN): cv.string,
vol.Optional(CONF_STATE_OPENING, default=STATE_OPENING): cv.string,
vol.Optional(CONF_STATE_TOPIC): mqtt.valid_subscribe_topic,
vol.Optional(
CONF_TILT_CLOSED_POSITION, default=DEFAULT_TILT_CLOSED_POSITION
): int,
vol.Optional(CONF_TILT_COMMAND_TOPIC): mqtt.valid_publish_topic,
vol.Optional(
CONF_TILT_INVERT_STATE, default=DEFAULT_TILT_INVERT_STATE
): cv.boolean,
vol.Optional(CONF_TILT_MAX, default=DEFAULT_TILT_MAX): int,
vol.Optional(CONF_TILT_MIN, default=DEFAULT_TILT_MIN): int,
vol.Optional(
CONF_TILT_OPEN_POSITION, default=DEFAULT_TILT_OPEN_POSITION
): int,
vol.Optional(
CONF_TILT_STATE_OPTIMISTIC, default=DEFAULT_TILT_OPTIMISTIC
): cv.boolean,
vol.Optional(CONF_TILT_STATUS_TOPIC): mqtt.valid_subscribe_topic,
vol.Optional(CONF_TILT_STATUS_TEMPLATE): cv.template,
vol.Optional(CONF_UNIQUE_ID): cv.string,
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
}
)
.extend(mqtt.MQTT_AVAILABILITY_SCHEMA.schema)
.extend(mqtt.MQTT_JSON_ATTRS_SCHEMA.schema),
validate_options,
)
async def async_setup_platform(
hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info=None
):
"""Set up MQTT cover through configuration.yaml."""
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
await _async_setup_entity(hass, config, async_add_entities)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up MQTT cover dynamically through MQTT discovery."""
async def async_discover(discovery_payload):
"""Discover and add an MQTT cover."""
discovery_data = discovery_payload.discovery_data
try:
config = PLATFORM_SCHEMA(discovery_payload)
await _async_setup_entity(
hass, config, async_add_entities, config_entry, discovery_data
)
except Exception:
clear_discovery_hash(hass, discovery_data[ATTR_DISCOVERY_HASH])
raise
async_dispatcher_connect(
hass, MQTT_DISCOVERY_NEW.format(cover.DOMAIN, "mqtt"), async_discover
)
async def _async_setup_entity(
hass, config, async_add_entities, config_entry=None, discovery_data=None
):
"""Set up the MQTT Cover."""
async_add_entities([MqttCover(hass, config, config_entry, discovery_data)])
class MqttCover(
MqttAttributes,
MqttAvailability,
MqttDiscoveryUpdate,
MqttEntityDeviceInfo,
CoverEntity,
):
"""Representation of a cover that can be controlled using MQTT."""
def __init__(self, hass, config, config_entry, discovery_data):
"""Initialize the cover."""
self.hass = hass
self._unique_id = config.get(CONF_UNIQUE_ID)
self._position = None
self._state = None
self._sub_state = None
self._optimistic = None
self._tilt_value = None
self._tilt_optimistic = None
# Load config
self._setup_from_config(config)
device_config = config.get(CONF_DEVICE)
MqttAttributes.__init__(self, config)
MqttAvailability.__init__(self, config)
MqttDiscoveryUpdate.__init__(self, discovery_data, self.discovery_update)
MqttEntityDeviceInfo.__init__(self, device_config, config_entry)
async def async_added_to_hass(self):
"""Subscribe MQTT events."""
await super().async_added_to_hass()
await self._subscribe_topics()
async def discovery_update(self, discovery_payload):
"""Handle updated discovery message."""
config = PLATFORM_SCHEMA(discovery_payload)
self._setup_from_config(config)
await self.attributes_discovery_update(config)
await self.availability_discovery_update(config)
await self.device_info_discovery_update(config)
await self._subscribe_topics()
self.async_write_ha_state()
def _setup_from_config(self, config):
self._config = config
self._optimistic = config[CONF_OPTIMISTIC] or (
config.get(CONF_STATE_TOPIC) is None
and config.get(CONF_GET_POSITION_TOPIC) is None
)
self._tilt_optimistic = config[CONF_TILT_STATE_OPTIMISTIC]
template = self._config.get(CONF_VALUE_TEMPLATE)
if template is not None:
template.hass = self.hass
set_position_template = self._config.get(CONF_SET_POSITION_TEMPLATE)
if set_position_template is not None:
set_position_template.hass = self.hass
tilt_status_template = self._config.get(CONF_TILT_STATUS_TEMPLATE)
if tilt_status_template is not None:
tilt_status_template.hass = self.hass
async def _subscribe_topics(self):
"""(Re)Subscribe to topics."""
topics = {}
@callback
@log_messages(self.hass, self.entity_id)
def tilt_message_received(msg):
"""Handle tilt updates."""
payload = msg.payload
tilt_status_template = self._config.get(CONF_TILT_STATUS_TEMPLATE)
if tilt_status_template is not None:
payload = tilt_status_template.async_render_with_possible_json_value(
payload
)
if payload.isnumeric() and (
self._config[CONF_TILT_MIN]
<= int(payload)
<= self._config[CONF_TILT_MAX]
):
level = self.find_percentage_in_range(float(payload))
self._tilt_value = level
self.async_write_ha_state()
@callback
@log_messages(self.hass, self.entity_id)
def state_message_received(msg):
"""Handle new MQTT state messages."""
payload = msg.payload
template = self._config.get(CONF_VALUE_TEMPLATE)
if template is not None:
payload = template.async_render_with_possible_json_value(payload)
if payload == self._config[CONF_STATE_OPEN]:
self._state = STATE_OPEN
elif payload == self._config[CONF_STATE_OPENING]:
self._state = STATE_OPENING
elif payload == self._config[CONF_STATE_CLOSED]:
self._state = STATE_CLOSED
elif payload == self._config[CONF_STATE_CLOSING]:
self._state = STATE_CLOSING
else:
_LOGGER.warning(
"Payload is not supported (e.g. open, closed, opening, closing): %s",
payload,
)
return
self.async_write_ha_state()
@callback
@log_messages(self.hass, self.entity_id)
def position_message_received(msg):
"""Handle new MQTT state messages."""
payload = msg.payload
template = self._config.get(CONF_VALUE_TEMPLATE)
if template is not None:
payload = template.async_render_with_possible_json_value(payload)
if payload.isnumeric():
percentage_payload = self.find_percentage_in_range(
float(payload), COVER_PAYLOAD
)
self._position = percentage_payload
self._state = (
STATE_CLOSED
if percentage_payload == DEFAULT_POSITION_CLOSED
else STATE_OPEN
)
else:
_LOGGER.warning("Payload is not integer within range: %s", payload)
return
self.async_write_ha_state()
if self._config.get(CONF_GET_POSITION_TOPIC):
topics["get_position_topic"] = {
"topic": self._config.get(CONF_GET_POSITION_TOPIC),
"msg_callback": position_message_received,
"qos": self._config[CONF_QOS],
}
elif self._config.get(CONF_STATE_TOPIC):
topics["state_topic"] = {
"topic": self._config.get(CONF_STATE_TOPIC),
"msg_callback": state_message_received,
"qos": self._config[CONF_QOS],
}
else:
# Force into optimistic mode.
self._optimistic = True
if self._config.get(CONF_TILT_STATUS_TOPIC) is None:
self._tilt_optimistic = True
else:
self._tilt_value = STATE_UNKNOWN
topics["tilt_status_topic"] = {
"topic": self._config.get(CONF_TILT_STATUS_TOPIC),
"msg_callback": tilt_message_received,
"qos": self._config[CONF_QOS],
}
self._sub_state = await subscription.async_subscribe_topics(
self.hass, self._sub_state, topics
)
async def async_will_remove_from_hass(self):
"""Unsubscribe when removed."""
self._sub_state = await subscription.async_unsubscribe_topics(
self.hass, self._sub_state
)
await MqttAttributes.async_will_remove_from_hass(self)
await MqttAvailability.async_will_remove_from_hass(self)
await MqttDiscoveryUpdate.async_will_remove_from_hass(self)
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def assumed_state(self):
"""Return true if we do optimistic updates."""
return self._optimistic
@property
def name(self):
"""Return the name of the cover."""
return self._config[CONF_NAME]
@property
def is_closed(self):
"""Return true if the cover is closed or None if the status is unknown."""
if self._state is None:
return None
return self._state == STATE_CLOSED
@property
def is_opening(self):
"""Return true if the cover is actively opening."""
return self._state == STATE_OPENING
@property
def is_closing(self):
"""Return true if the cover is actively closing."""
return self._state == STATE_CLOSING
@property
def current_cover_position(self):
"""Return current position of cover.
None is unknown, 0 is closed, 100 is fully open.
"""
return self._position
@property
def current_cover_tilt_position(self):
"""Return current position of cover tilt."""
return self._tilt_value
@property
def device_class(self):
"""Return the class of this sensor."""
return self._config.get(CONF_DEVICE_CLASS)
@property
def supported_features(self):
"""Flag supported features."""
supported_features = 0
if self._config.get(CONF_COMMAND_TOPIC) is not None:
supported_features = OPEN_CLOSE_FEATURES
if self._config.get(CONF_PAYLOAD_STOP) is not None:
supported_features |= SUPPORT_STOP
if self._config.get(CONF_SET_POSITION_TOPIC) is not None:
supported_features |= SUPPORT_SET_POSITION
if self._config.get(CONF_TILT_COMMAND_TOPIC) is not None:
supported_features |= TILT_FEATURES
return supported_features
async def async_open_cover(self, **kwargs):
"""Move the cover up.
This method is a coroutine.
"""
mqtt.async_publish(
self.hass,
self._config.get(CONF_COMMAND_TOPIC),
self._config[CONF_PAYLOAD_OPEN],
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
if self._optimistic:
# Optimistically assume that cover has changed state.
self._state = STATE_OPEN
if self._config.get(CONF_GET_POSITION_TOPIC):
self._position = self.find_percentage_in_range(
self._config[CONF_POSITION_OPEN], COVER_PAYLOAD
)
self.async_write_ha_state()
async def async_close_cover(self, **kwargs):
"""Move the cover down.
This method is a coroutine.
"""
mqtt.async_publish(
self.hass,
self._config.get(CONF_COMMAND_TOPIC),
self._config[CONF_PAYLOAD_CLOSE],
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
if self._optimistic:
# Optimistically assume that cover has changed state.
self._state = STATE_CLOSED
if self._config.get(CONF_GET_POSITION_TOPIC):
self._position = self.find_percentage_in_range(
self._config[CONF_POSITION_CLOSED], COVER_PAYLOAD
)
self.async_write_ha_state()
async def async_stop_cover(self, **kwargs):
"""Stop the device.
This method is a coroutine.
"""
mqtt.async_publish(
self.hass,
self._config.get(CONF_COMMAND_TOPIC),
self._config[CONF_PAYLOAD_STOP],
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
async def async_open_cover_tilt(self, **kwargs):
"""Tilt the cover open."""
mqtt.async_publish(
self.hass,
self._config.get(CONF_TILT_COMMAND_TOPIC),
self._config[CONF_TILT_OPEN_POSITION],
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
if self._tilt_optimistic:
self._tilt_value = self.find_percentage_in_range(
float(self._config[CONF_TILT_OPEN_POSITION])
)
self.async_write_ha_state()
async def async_close_cover_tilt(self, **kwargs):
"""Tilt the cover closed."""
mqtt.async_publish(
self.hass,
self._config.get(CONF_TILT_COMMAND_TOPIC),
self._config[CONF_TILT_CLOSED_POSITION],
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
if self._tilt_optimistic:
self._tilt_value = self.find_percentage_in_range(
float(self._config[CONF_TILT_CLOSED_POSITION])
)
self.async_write_ha_state()
async def async_set_cover_tilt_position(self, **kwargs):
"""Move the cover tilt to a specific position."""
position = kwargs[ATTR_TILT_POSITION]
# The position needs to be between min and max
level = self.find_in_range_from_percent(position)
mqtt.async_publish(
self.hass,
self._config.get(CONF_TILT_COMMAND_TOPIC),
level,
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
async def async_set_cover_position(self, **kwargs):
"""Move the cover to a specific position."""
set_position_template = self._config.get(CONF_SET_POSITION_TEMPLATE)
position = kwargs[ATTR_POSITION]
percentage_position = position
if set_position_template is not None:
position = set_position_template.async_render(parse_result=False, **kwargs)
else:
position = self.find_in_range_from_percent(position, COVER_PAYLOAD)
mqtt.async_publish(
self.hass,
self._config.get(CONF_SET_POSITION_TOPIC),
position,
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
if self._optimistic:
self._state = (
STATE_CLOSED
if percentage_position == self._config[CONF_POSITION_CLOSED]
else STATE_OPEN
)
self._position = percentage_position
self.async_write_ha_state()
async def async_toggle_tilt(self, **kwargs):
"""Toggle the entity."""
if self.is_tilt_closed():
await self.async_open_cover_tilt(**kwargs)
else:
await self.async_close_cover_tilt(**kwargs)
def is_tilt_closed(self):
"""Return if the cover is tilted closed."""
return self._tilt_value == self.find_percentage_in_range(
float(self._config[CONF_TILT_CLOSED_POSITION])
)
def find_percentage_in_range(self, position, range_type=TILT_PAYLOAD):
"""Find the 0-100% value within the specified range."""
# the range of motion as defined by the min max values
if range_type == COVER_PAYLOAD:
max_range = self._config[CONF_POSITION_OPEN]
min_range = self._config[CONF_POSITION_CLOSED]
else:
max_range = self._config[CONF_TILT_MAX]
min_range = self._config[CONF_TILT_MIN]
current_range = max_range - min_range
# offset to be zero based
offset_position = position - min_range
position_percentage = round(float(offset_position) / current_range * 100.0)
max_percent = 100
min_percent = 0
position_percentage = min(max(position_percentage, min_percent), max_percent)
if range_type == TILT_PAYLOAD and self._config[CONF_TILT_INVERT_STATE]:
return 100 - position_percentage
return position_percentage
def find_in_range_from_percent(self, percentage, range_type=TILT_PAYLOAD):
"""
Find the adjusted value for 0-100% within the specified range.
if the range is 80-180 and the percentage is 90
this method would determine the value to send on the topic
by offsetting the max and min, getting the percentage value and
returning the offset
"""
if range_type == COVER_PAYLOAD:
max_range = self._config[CONF_POSITION_OPEN]
min_range = self._config[CONF_POSITION_CLOSED]
else:
max_range = self._config[CONF_TILT_MAX]
min_range = self._config[CONF_TILT_MIN]
offset = min_range
current_range = max_range - min_range
position = round(current_range * (percentage / 100.0))
position += offset
if range_type == TILT_PAYLOAD and self._config[CONF_TILT_INVERT_STATE]:
position = max_range - position + offset
return position
@property
def unique_id(self):
"""Return a unique ID."""
return self._unique_id
|
import numpy as np
import unittest
import chainer
from chainer import testing
from chainer.testing import attr
from chainercv.links import SegNetBasic
from chainercv.utils import assert_is_semantic_segmentation_link
@testing.parameterize(
{'train': False},
{'train': True}
)
class TestSegNetBasic(unittest.TestCase):
def setUp(self):
self.n_class = 10
self.link = SegNetBasic(n_class=self.n_class)
def check_call(self):
xp = self.link.xp
x = chainer.Variable(xp.random.uniform(
low=-1, high=1, size=(2, 3, 128, 160)).astype(np.float32))
y = self.link(x)
self.assertIsInstance(y, chainer.Variable)
self.assertIsInstance(y.array, xp.ndarray)
self.assertEqual(y.shape, (2, self.n_class, 128, 160))
@attr.slow
def test_call_cpu(self):
self.check_call()
@attr.gpu
@attr.slow
def test_call_gpu(self):
self.link.to_gpu()
self.check_call()
@attr.slow
def test_predict_cpu(self):
assert_is_semantic_segmentation_link(self.link, self.n_class)
@attr.gpu
@attr.slow
def test_predict_gpu(self):
self.link.to_gpu()
assert_is_semantic_segmentation_link(self.link, self.n_class)
@testing.parameterize(*testing.product({
'n_class': [None, 5, 11],
'pretrained_model': ['camvid'],
}))
class TestSegNetPretrained(unittest.TestCase):
@attr.slow
def test_pretrained(self):
kwargs = {
'n_class': self.n_class,
'pretrained_model': self.pretrained_model,
}
if self.pretrained_model == 'camvid':
valid = self.n_class in {None, 11}
if valid:
SegNetBasic(**kwargs)
else:
with self.assertRaises(ValueError):
SegNetBasic(**kwargs)
testing.run_module(__name__, __file__)
|
from datetime import timedelta
from homeassistant.components.uptime.sensor import UptimeSensor
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
async def test_uptime_min_config(hass):
"""Test minimum uptime configuration."""
config = {"sensor": {"platform": "uptime"}}
assert await async_setup_component(hass, "sensor", config)
await hass.async_block_till_done()
state = hass.states.get("sensor.uptime")
assert state.attributes.get("unit_of_measurement") == "days"
async def test_uptime_sensor_name_change(hass):
"""Test uptime sensor with different name."""
config = {"sensor": {"platform": "uptime", "name": "foobar"}}
assert await async_setup_component(hass, "sensor", config)
await hass.async_block_till_done()
state = hass.states.get("sensor.foobar")
assert state.attributes.get("unit_of_measurement") == "days"
async def test_uptime_sensor_config_hours(hass):
"""Test uptime sensor with hours defined in config."""
config = {"sensor": {"platform": "uptime", "unit_of_measurement": "hours"}}
assert await async_setup_component(hass, "sensor", config)
await hass.async_block_till_done()
state = hass.states.get("sensor.uptime")
assert state.attributes.get("unit_of_measurement") == "hours"
async def test_uptime_sensor_config_minutes(hass):
"""Test uptime sensor with minutes defined in config."""
config = {"sensor": {"platform": "uptime", "unit_of_measurement": "minutes"}}
assert await async_setup_component(hass, "sensor", config)
await hass.async_block_till_done()
state = hass.states.get("sensor.uptime")
assert state.attributes.get("unit_of_measurement") == "minutes"
async def test_uptime_sensor_days_output(hass):
"""Test uptime sensor output data."""
sensor = UptimeSensor("test", "days")
assert sensor.unit_of_measurement == "days"
new_time = sensor.initial + timedelta(days=1)
with patch("homeassistant.util.dt.now", return_value=new_time):
await sensor.async_update()
assert sensor.state == 1.00
new_time = sensor.initial + timedelta(days=111.499)
with patch("homeassistant.util.dt.now", return_value=new_time):
await sensor.async_update()
assert sensor.state == 111.50
async def test_uptime_sensor_hours_output(hass):
"""Test uptime sensor output data."""
sensor = UptimeSensor("test", "hours")
assert sensor.unit_of_measurement == "hours"
new_time = sensor.initial + timedelta(hours=16)
with patch("homeassistant.util.dt.now", return_value=new_time):
await sensor.async_update()
assert sensor.state == 16.00
new_time = sensor.initial + timedelta(hours=72.499)
with patch("homeassistant.util.dt.now", return_value=new_time):
await sensor.async_update()
assert sensor.state == 72.50
async def test_uptime_sensor_minutes_output(hass):
"""Test uptime sensor output data."""
sensor = UptimeSensor("test", "minutes")
assert sensor.unit_of_measurement == "minutes"
new_time = sensor.initial + timedelta(minutes=16)
with patch("homeassistant.util.dt.now", return_value=new_time):
await sensor.async_update()
assert sensor.state == 16.00
new_time = sensor.initial + timedelta(minutes=12.499)
with patch("homeassistant.util.dt.now", return_value=new_time):
await sensor.async_update()
assert sensor.state == 12.50
|
from homeassistant.core import State
from tests.common import async_mock_service
async def test_reproducing_states(hass, caplog):
"""Test reproducing Input datetime states."""
hass.states.async_set(
"input_datetime.entity_datetime",
"2010-10-10 01:20:00",
{"has_date": True, "has_time": True},
)
hass.states.async_set(
"input_datetime.entity_time", "01:20:00", {"has_date": False, "has_time": True}
)
hass.states.async_set(
"input_datetime.entity_date",
"2010-10-10",
{"has_date": True, "has_time": False},
)
datetime_calls = async_mock_service(hass, "input_datetime", "set_datetime")
# These calls should do nothing as entities already in desired state
await hass.helpers.state.async_reproduce_state(
[
State("input_datetime.entity_datetime", "2010-10-10 01:20:00"),
State("input_datetime.entity_time", "01:20:00"),
State("input_datetime.entity_date", "2010-10-10"),
],
)
assert len(datetime_calls) == 0
# Test invalid state is handled
await hass.helpers.state.async_reproduce_state(
[
State("input_datetime.entity_datetime", "not_supported"),
State("input_datetime.entity_datetime", "not-valid-date"),
State("input_datetime.entity_datetime", "not:valid:time"),
State("input_datetime.entity_datetime", "1234-56-78 90:12:34"),
],
)
assert "not_supported" in caplog.text
assert "not-valid-date" in caplog.text
assert "not:valid:time" in caplog.text
assert "1234-56-78 90:12:34" in caplog.text
assert len(datetime_calls) == 0
# Make sure correct services are called
await hass.helpers.state.async_reproduce_state(
[
State("input_datetime.entity_datetime", "2011-10-10 02:20:00"),
State("input_datetime.entity_time", "02:20:00"),
State("input_datetime.entity_date", "2011-10-10"),
# Should not raise
State("input_datetime.non_existing", "2010-10-10 01:20:00"),
],
)
valid_calls = [
{
"entity_id": "input_datetime.entity_datetime",
"datetime": "2011-10-10 02:20:00",
},
{"entity_id": "input_datetime.entity_time", "time": "02:20:00"},
{"entity_id": "input_datetime.entity_date", "date": "2011-10-10"},
]
assert len(datetime_calls) == 3
for call in datetime_calls:
assert call.domain == "input_datetime"
assert call.data in valid_calls
valid_calls.remove(call.data)
|
from kombu import Connection, Producer, Consumer, Queue, uuid
class FibonacciRpcClient:
def __init__(self, connection):
self.connection = connection
self.callback_queue = Queue(uuid(), exclusive=True, auto_delete=True)
def on_response(self, message):
if message.properties['correlation_id'] == self.correlation_id:
self.response = message.payload['result']
def call(self, n):
self.response = None
self.correlation_id = uuid()
with Producer(self.connection) as producer:
producer.publish(
{'n': n},
exchange='',
routing_key='rpc_queue',
declare=[self.callback_queue],
reply_to=self.callback_queue.name,
correlation_id=self.correlation_id,
)
with Consumer(self.connection,
on_message=self.on_response,
queues=[self.callback_queue], no_ack=True):
while self.response is None:
self.connection.drain_events()
return self.response
def main(broker_url):
connection = Connection(broker_url)
fibonacci_rpc = FibonacciRpcClient(connection)
print(' [x] Requesting fib(30)')
response = fibonacci_rpc.call(30)
print(f' [.] Got {response!r}')
if __name__ == '__main__':
main('pyamqp://')
|
import os
from flexx import flx
from tornado.web import StaticFileHandler
# The directory to load video's from
dirname = os.path.expanduser('~/Videos')
# Collect videos that look like they can be read in html5
videos = {}
for fname in os.listdir(dirname):
if fname.endswith('.mp4'):
videos[fname] = '/videos/' + fname
# Add some online videos too, for fun
videos['bbb.mp4 (online)'] = 'http://www.w3schools.com/tags/mov_bbb.mp4'
videos['ice-age.mp4 (online)'] = ('https://dl.dropboxusercontent.com/u/1463853/'
'ice%20age%204%20trailer.mp4')
# Make use of Tornado's static file handler
tornado_app = flx.create_server().app
tornado_app.add_handlers(r".*", [
(r"/videos/(.*)", StaticFileHandler, {"path": dirname}),
])
class VideoViewer(flx.Widget):
""" A simple videoviewer that displays a list of videos found on the
server's computer, plus a few online videos. Note that not all videos
may be playable in HTML5.
"""
def init(self):
with flx.HSplit():
with flx.TreeWidget(max_selected=1, flex=1) as self.videolist:
for name in sorted(videos):
flx.TreeItem(text=name)
self.player = flx.VideoWidget(flex=5)
@flx.reaction('videolist.children*.selected')
def on_select(self, *events):
for ev in events:
if ev.source.selected:
fname = ev.source.text
self.player.set_source(videos[fname])
if __name__ == '__main__':
m = flx.launch(VideoViewer)
flx.run()
|
import numpy as np
from ..annotations import _annotations_starts_stops
from ..utils import logger, verbose, sum_squared, warn
from ..filter import filter_data
from ..epochs import Epochs, BaseEpochs
from ..io.base import BaseRaw
from ..evoked import Evoked
from ..io import RawArray
from ..io.meas_info import create_info
from ..io.pick import _picks_to_idx, pick_types, pick_channels
@verbose
def qrs_detector(sfreq, ecg, thresh_value=0.6, levels=2.5, n_thresh=3,
l_freq=5, h_freq=35, tstart=0, filter_length='10s',
verbose=None):
"""Detect QRS component in ECG channels.
QRS is the main wave on the heart beat.
Parameters
----------
sfreq : float
Sampling rate
ecg : array
ECG signal
thresh_value : float | str
qrs detection threshold. Can also be "auto" for automatic
selection of threshold.
levels : float
number of std from mean to include for detection
n_thresh : int
max number of crossings
l_freq : float
Low pass frequency
h_freq : float
High pass frequency
%(ecg_tstart)s
%(ecg_filter_length)s
%(verbose)s
Returns
-------
events : array
Indices of ECG peaks
"""
win_size = int(round((60.0 * sfreq) / 120.0))
filtecg = filter_data(ecg, sfreq, l_freq, h_freq, None, filter_length,
0.5, 0.5, phase='zero-double', fir_window='hann',
fir_design='firwin2')
ecg_abs = np.abs(filtecg)
init = int(sfreq)
n_samples_start = int(sfreq * tstart)
ecg_abs = ecg_abs[n_samples_start:]
n_points = len(ecg_abs)
maxpt = np.empty(3)
maxpt[0] = np.max(ecg_abs[:init])
maxpt[1] = np.max(ecg_abs[init:init * 2])
maxpt[2] = np.max(ecg_abs[init * 2:init * 3])
init_max = np.mean(maxpt)
if thresh_value == 'auto':
thresh_runs = np.arange(0.3, 1.1, 0.05)
elif isinstance(thresh_value, str):
raise ValueError('threshold value must be "auto" or a float')
else:
thresh_runs = [thresh_value]
# Try a few thresholds (or just one)
clean_events = list()
for thresh_value in thresh_runs:
thresh1 = init_max * thresh_value
numcross = list()
time = list()
rms = list()
ii = 0
while ii < (n_points - win_size):
window = ecg_abs[ii:ii + win_size]
if window[0] > thresh1:
max_time = np.argmax(window)
time.append(ii + max_time)
nx = np.sum(np.diff(((window > thresh1).astype(np.int64) ==
1).astype(int)))
numcross.append(nx)
rms.append(np.sqrt(sum_squared(window) / window.size))
ii += win_size
else:
ii += 1
if len(rms) == 0:
rms.append(0.0)
time.append(0.0)
time = np.array(time)
rms_mean = np.mean(rms)
rms_std = np.std(rms)
rms_thresh = rms_mean + (rms_std * levels)
b = np.where(rms < rms_thresh)[0]
a = np.array(numcross)[b]
ce = time[b[a < n_thresh]]
ce += n_samples_start
clean_events.append(ce)
# pick the best threshold; first get effective heart rates
rates = np.array([60. * len(cev) / (len(ecg) / float(sfreq))
for cev in clean_events])
# now find heart rates that seem reasonable (infant through adult athlete)
idx = np.where(np.logical_and(rates <= 160., rates >= 40.))[0]
if len(idx) > 0:
ideal_rate = np.median(rates[idx]) # get close to the median
else:
ideal_rate = 80. # get close to a reasonable default
idx = np.argmin(np.abs(rates - ideal_rate))
clean_events = clean_events[idx]
return clean_events
@verbose
def find_ecg_events(raw, event_id=999, ch_name=None, tstart=0.0,
l_freq=5, h_freq=35, qrs_threshold='auto',
filter_length='10s', return_ecg=False,
reject_by_annotation=True, verbose=None):
"""Find ECG events by localizing the R wave peaks.
Parameters
----------
raw : instance of Raw
The raw data.
%(ecg_event_id)s
%(ecg_ch_name)s
%(ecg_tstart)s
%(ecg_filter_freqs)s
qrs_threshold : float | str
Between 0 and 1. qrs detection threshold. Can also be "auto" to
automatically choose the threshold that generates a reasonable
number of heartbeats (40-160 beats / min).
%(ecg_filter_length)s
return_ecg : bool
Return the ECG data. This is especially useful if no ECG channel
is present in the input data, so one will be synthesized. Defaults to
``False``.
%(reject_by_annotation_all)s
.. versionadded:: 0.18
%(verbose)s
Returns
-------
ecg_events : array
The events corresponding to the peaks of the R waves.
ch_ecg : string
Name of channel used.
average_pulse : float
The estimated average pulse. If no ECG events could be found, this will
be zero.
ecg : array | None
The ECG data of the synthesized ECG channel, if any. This will only
be returned if ``return_ecg=True`` was passed.
See Also
--------
create_ecg_epochs
compute_proj_ecg
"""
skip_by_annotation = ('edge', 'bad') if reject_by_annotation else ()
del reject_by_annotation
idx_ecg = _get_ecg_channel_index(ch_name, raw)
if idx_ecg is not None:
logger.info('Using channel %s to identify heart beats.'
% raw.ch_names[idx_ecg])
ecg = raw.get_data(picks=idx_ecg)
else:
ecg = _make_ecg(raw, None, None)[0]
assert ecg.ndim == 2 and ecg.shape[0] == 1
ecg = ecg[0]
# Deal with filtering the same way we do in raw, i.e. filter each good
# segment
onsets, ends = _annotations_starts_stops(
raw, skip_by_annotation, 'reject_by_annotation', invert=True)
ecgs = list()
max_idx = (ends - onsets).argmax()
for si, (start, stop) in enumerate(zip(onsets, ends)):
# Only output filter params once (for info level), and only warn
# once about the length criterion (longest segment is too short)
use_verbose = verbose if si == max_idx else 'error'
ecgs.append(filter_data(
ecg[start:stop], raw.info['sfreq'], l_freq, h_freq, [0],
filter_length, 0.5, 0.5, 1, 'fir', None, copy=False,
phase='zero-double', fir_window='hann', fir_design='firwin2',
verbose=use_verbose))
ecg = np.concatenate(ecgs)
# detecting QRS and generating events. Since not user-controlled, don't
# output filter params here (hardcode verbose=False)
ecg_events = qrs_detector(raw.info['sfreq'], ecg, tstart=tstart,
thresh_value=qrs_threshold, l_freq=None,
h_freq=None, verbose=False)
# map ECG events back to original times
remap = np.empty(len(ecg), int)
offset = 0
for start, stop in zip(onsets, ends):
this_len = stop - start
assert this_len >= 0
remap[offset:offset + this_len] = np.arange(start, stop)
offset += this_len
assert offset == len(ecg)
ecg_events = remap[ecg_events]
n_events = len(ecg_events)
duration_sec = len(ecg) / raw.info['sfreq'] - tstart
duration_min = duration_sec / 60.
average_pulse = n_events / duration_min
logger.info("Number of ECG events detected : %d (average pulse %d / "
"min.)" % (n_events, average_pulse))
ecg_events = np.array([ecg_events + raw.first_samp,
np.zeros(n_events, int),
event_id * np.ones(n_events, int)]).T
out = (ecg_events, idx_ecg, average_pulse)
ecg = ecg[np.newaxis] # backward compat output 2D
if return_ecg:
out += (ecg,)
return out
def _get_ecg_channel_index(ch_name, inst):
"""Get ECG channel index, if no channel found returns None."""
if ch_name is None:
ecg_idx = pick_types(inst.info, meg=False, eeg=False, stim=False,
eog=False, ecg=True, emg=False, ref_meg=False,
exclude='bads')
else:
if ch_name not in inst.ch_names:
raise ValueError('%s not in channel list (%s)' %
(ch_name, inst.ch_names))
ecg_idx = pick_channels(inst.ch_names, include=[ch_name])
if len(ecg_idx) == 0:
return None
# raise RuntimeError('No ECG channel found. Please specify ch_name '
# 'parameter e.g. MEG 1531')
if len(ecg_idx) > 1:
warn('More than one ECG channel found. Using only %s.'
% inst.ch_names[ecg_idx[0]])
return ecg_idx[0]
@verbose
def create_ecg_epochs(raw, ch_name=None, event_id=999, picks=None, tmin=-0.5,
tmax=0.5, l_freq=8, h_freq=16, reject=None, flat=None,
baseline=None, preload=True, keep_ecg=False,
reject_by_annotation=True, decim=1, verbose=None):
"""Conveniently generate epochs around ECG artifact events.
%(create_ecg_epochs)s
.. note:: Filtering is only applied to the ECG channel while finding
events. The resulting ``ecg_epochs`` will have no filtering
applied (i.e., have the same filter properties as the input
``raw`` instance).
Parameters
----------
raw : instance of Raw
The raw data.
%(ecg_ch_name)s
%(ecg_event_id)s
%(picks_all)s
tmin : float
Start time before event.
tmax : float
End time after event.
%(ecg_filter_freqs)s
%(reject_epochs)s
%(flat)s
%(baseline_epochs)s
preload : bool
Preload epochs or not (default True). Must be True if
keep_ecg is True.
keep_ecg : bool
When ECG is synthetically created (after picking), should it be added
to the epochs? Must be False when synthetic channel is not used.
Defaults to False.
%(reject_by_annotation_epochs)s
.. versionadded:: 0.14.0
%(decim)s
.. versionadded:: 0.21.0
%(verbose)s
Returns
-------
ecg_epochs : instance of Epochs
Data epoched around ECG R wave peaks.
See Also
--------
find_ecg_events
compute_proj_ecg
"""
has_ecg = 'ecg' in raw or ch_name is not None
if keep_ecg and (has_ecg or not preload):
raise ValueError('keep_ecg can be True only if the ECG channel is '
'created synthetically and preload=True.')
events, _, _, ecg = find_ecg_events(
raw, ch_name=ch_name, event_id=event_id, l_freq=l_freq, h_freq=h_freq,
return_ecg=True, reject_by_annotation=reject_by_annotation)
picks = _picks_to_idx(raw.info, picks, 'all', exclude=())
# create epochs around ECG events and baseline (important)
ecg_epochs = Epochs(raw, events=events, event_id=event_id,
tmin=tmin, tmax=tmax, proj=False, flat=flat,
picks=picks, reject=reject, baseline=baseline,
reject_by_annotation=reject_by_annotation,
preload=preload, decim=decim)
if keep_ecg:
# We know we have created a synthetic channel and epochs are preloaded
ecg_raw = RawArray(
ecg, create_info(ch_names=['ECG-SYN'],
sfreq=raw.info['sfreq'], ch_types=['ecg']),
first_samp=raw.first_samp)
ignore = ['ch_names', 'chs', 'nchan', 'bads']
for k, v in raw.info.items():
if k not in ignore:
ecg_raw.info[k] = v
syn_epochs = Epochs(ecg_raw, events=ecg_epochs.events,
event_id=event_id, tmin=tmin, tmax=tmax,
proj=False, picks=[0], baseline=baseline,
decim=decim, preload=True)
ecg_epochs = ecg_epochs.add_channels([syn_epochs])
return ecg_epochs
@verbose
def _make_ecg(inst, start, stop, reject_by_annotation=False, verbose=None):
"""Create ECG signal from cross channel average."""
if not any(c in inst for c in ['mag', 'grad']):
raise ValueError('Unable to generate artificial ECG channel')
for ch in ['mag', 'grad']:
if ch in inst:
break
logger.info('Reconstructing ECG signal from {}'
.format({'mag': 'Magnetometers',
'grad': 'Gradiometers'}[ch]))
picks = pick_types(inst.info, meg=ch, eeg=False, ref_meg=False)
if isinstance(inst, BaseRaw):
reject_by_annotation = 'omit' if reject_by_annotation else None
ecg, times = inst.get_data(picks, start, stop, reject_by_annotation,
True)
elif isinstance(inst, BaseEpochs):
ecg = np.hstack(inst.copy().crop(start, stop).get_data())
times = inst.times
elif isinstance(inst, Evoked):
ecg = inst.data
times = inst.times
return ecg.mean(0, keepdims=True), times
|
from django.db import models
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
from mptt.managers import TreeManager
from mptt.models import MPTTModel
from mptt.models import TreeForeignKey
from zinnia.managers import EntryRelatedPublishedManager
from zinnia.managers import entries_published
class Category(MPTTModel):
"""
Simple model for categorizing entries.
"""
title = models.CharField(
_('title'), max_length=255)
slug = models.SlugField(
_('slug'), unique=True, max_length=255,
help_text=_("Used to build the category's URL."))
description = models.TextField(
_('description'), blank=True)
parent = TreeForeignKey(
'self',
related_name='children',
null=True, blank=True,
on_delete=models.SET_NULL,
verbose_name=_('parent category'))
objects = TreeManager()
published = EntryRelatedPublishedManager()
def entries_published(self):
"""
Returns category's published entries.
"""
return entries_published(self.entries)
@property
def tree_path(self):
"""
Returns category's tree path
by concatening the slug of his ancestors.
"""
if self.parent_id:
return '/'.join(
[ancestor.slug for ancestor in self.get_ancestors()] +
[self.slug])
return self.slug
def get_absolute_url(self):
"""
Builds and returns the category's URL
based on his tree path.
"""
return reverse('zinnia:category_detail', args=(self.tree_path,))
def __str__(self):
return self.title
class Meta:
"""
Category's meta informations.
"""
ordering = ['title']
verbose_name = _('category')
verbose_name_plural = _('categories')
class MPTTMeta:
"""
Category MPTT's meta informations.
"""
order_insertion_by = ['title']
|
import logging
import re
from typing import Union, Dict
from datetime import timedelta
from discord.ext.commands.converter import Converter
from redbot.core import commands
log = logging.getLogger("red.cogs.mutes")
# the following regex is slightly modified from Red
# it's changed to be slightly more strict on matching with finditer
# this is to prevent "empty" matches when parsing the full reason
# This is also designed more to allow time interval at the beginning or the end of the mute
# to account for those times when you think of adding time *after* already typing out the reason
# https://github.com/Cog-Creators/Red-DiscordBot/blob/V3/develop/redbot/core/commands/converter.py#L55
TIME_RE_STRING = r"|".join(
[
r"((?P<weeks>\d+?)\s?(weeks?|w))",
r"((?P<days>\d+?)\s?(days?|d))",
r"((?P<hours>\d+?)\s?(hours?|hrs|hr?))",
r"((?P<minutes>\d+?)\s?(minutes?|mins?|m(?!o)))", # prevent matching "months"
r"((?P<seconds>\d+?)\s?(seconds?|secs?|s))",
]
)
TIME_RE = re.compile(TIME_RE_STRING, re.I)
TIME_SPLIT = re.compile(r"t(?:ime)?=")
class MuteTime(Converter):
"""
This will parse my defined multi response pattern and provide usable formats
to be used in multiple reponses
"""
async def convert(
self, ctx: commands.Context, argument: str
) -> Dict[str, Union[timedelta, str, None]]:
time_split = TIME_SPLIT.split(argument)
result: Dict[str, Union[timedelta, str, None]] = {}
if time_split:
maybe_time = time_split[-1]
else:
maybe_time = argument
time_data = {}
for time in TIME_RE.finditer(maybe_time):
argument = argument.replace(time[0], "")
for k, v in time.groupdict().items():
if v:
time_data[k] = int(v)
if time_data:
result["duration"] = timedelta(**time_data)
result["reason"] = argument
return result
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from diamond.collector import Collector
from ipmisensor import IPMISensorCollector
##########################################################################
class TestIPMISensorCollector(CollectorTestCase):
def setUp(self, thresholds=False):
config = get_collector_config('IPMISensorCollector', {
'interval': 10,
'bin': 'true',
'use_sudo': False,
'thresholds': thresholds,
})
self.collector = IPMISensorCollector(config, None)
def test_import(self):
self.assertTrue(IPMISensorCollector)
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
patch_communicate = patch(
'subprocess.Popen.communicate',
Mock(return_value=(self.getFixture('ipmitool.out').getvalue(), '')))
patch_communicate.start()
self.collector.collect()
patch_communicate.stop()
metrics = {
'CPU1.Temp': 0.0,
'CPU2.Temp': 0.0,
'System.Temp': 32.000000,
'CPU1.Vcore': 1.080000,
'CPU2.Vcore': 1.000000,
'CPU1.VTT': 1.120000,
'CPU2.VTT': 1.176000,
'CPU1.DIMM': 1.512000,
'CPU2.DIMM': 1.512000,
'+1_5V': 1.512000,
'+1_8V': 1.824000,
'+5V': 4.992000,
'+12V': 12.031000,
'+1_1V': 1.112000,
'+3_3V': 3.288000,
'+3_3VSB': 3.240000,
'VBAT': 3.240000,
'Fan1': 4185.000000,
'Fan2': 4185.000000,
'Fan3': 4185.000000,
'Fan7': 3915.000000,
'Fan8': 3915.000000,
'Intrusion': 0.000000,
'PS.Status': 0.000000,
'P1-DIMM1A.Temp': 41.000000,
'P1-DIMM1B.Temp': 39.000000,
'P1-DIMM2A.Temp': 38.000000,
'P1-DIMM2B.Temp': 40.000000,
'P1-DIMM3A.Temp': 37.000000,
'P1-DIMM3B.Temp': 38.000000,
'P2-DIMM1A.Temp': 39.000000,
'P2-DIMM1B.Temp': 38.000000,
'P2-DIMM2A.Temp': 39.000000,
'P2-DIMM2B.Temp': 39.000000,
'P2-DIMM3A.Temp': 39.000000,
'P2-DIMM3B.Temp': 40.000000,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_thresholds(self, publish_mock):
self.setUp(thresholds=True)
patch_communicate = patch(
'subprocess.Popen.communicate',
Mock(return_value=(self.getFixture('ipmitool.out').getvalue(), '')))
patch_communicate.start()
self.collector.collect()
patch_communicate.stop()
metrics = {
'System.Temp.Reading': 32.0,
'System.Temp.Lower.NonRecoverable': 0.0,
'System.Temp.Lower.Critical': 0.0,
'System.Temp.Lower.NonCritical': 0.0,
'System.Temp.Upper.NonCritical': 81.0,
'System.Temp.Upper.Critical': 82.0,
'System.Temp.Upper.NonRecoverable': 83.0,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_work_with_real_data_hpilo(self, publish_mock):
patch_communicate = patch(
'subprocess.Popen.communicate',
Mock(return_value=(self.getFixture('ipmihp.out').getvalue(), '')))
patch_communicate.start()
self.collector.collect()
patch_communicate.stop()
metrics = {
'01-Inlet.Ambient': 18.0,
'02-CPU': 40.0,
'03-P1.DIMM.1-2': 28.0,
'05-Chipset': 55.00,
'06-Chipset.Zone': 40.00,
'07-VR.P1.Zone': 45.00,
'09-iLO.Zone': 40.00,
'Fan.1': 15.68,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
import logging
import sys
import threading
import traceback
from .screen import Screen
from ...common.interfaces import AbstractPlugin, AggregateResultListener
class Plugin(AbstractPlugin, AggregateResultListener):
''' Console plugin '''
SECTION = 'console'
def __init__(self, core, cfg, name):
AbstractPlugin.__init__(self, core, cfg, name)
self.log = logging.getLogger(__name__)
self.screen = None
self.render_exception = None
self.console_markup = None
self.info_panel_width = self.get_option("info_panel_width")
self.short_only = self.get_option("short_only")
# these three provide non-blocking console output
self.__console_view = None
self.__writer_thread = None
self.__writer_event = None
self.cases_sort_by = self.get_option("cases_sort_by")
self.cases_max_spark = self.get_option("cases_max_spark")
self.max_case_len = self.get_option("max_case_len")
self.times_max_spark = self.get_option("times_max_spark")
self.sizes_max_spark = self.get_option("sizes_max_spark")
@staticmethod
def get_key():
return __file__
def get_available_options(self):
return [
"info_panel_width", "short_only", "disable_all_colors",
"disable_colors"
]
def configure(self):
if not self.get_option("disable_all_colors"):
self.console_markup = RealConsoleMarkup()
else:
self.console_markup = NoConsoleMarkup()
for color in self.get_option("disable_colors").split(' '):
self.console_markup.__dict__[color] = ''
self.screen = Screen(
self.info_panel_width, self.console_markup,
cases_sort_by=self.cases_sort_by,
cases_max_spark=self.cases_max_spark,
max_case_len=self.max_case_len,
times_max_spark=self.times_max_spark,
sizes_max_spark=self.sizes_max_spark
)
try:
aggregator = self.core.job.aggregator
aggregator.add_result_listener(self)
except KeyError:
self.log.debug("No aggregator for console")
self.screen.block_rows = []
self.screen.info_panel_percent = 100
def __console_writer(self):
while True:
self.__writer_event.wait()
self.__writer_event.clear()
if self.__console_view:
if not self.short_only:
self.log.debug("Writing console view to STDOUT")
sys.stdout.write(self.console_markup.clear)
sys.stdout.write(self.__console_view.decode('utf8'))
sys.stdout.write(self.console_markup.TOTAL_RESET)
def is_test_finished(self):
if not self.__writer_thread:
self.__writer_event = threading.Event()
self.__writer_thread = threading.Thread(
target=self.__console_writer)
self.__writer_thread.daemon = True
self.__writer_thread.start()
try:
self.__console_view = self.screen.render_screen().encode('utf-8')
except Exception as ex:
self.log.warn("Exception inside render: %s", traceback.format_exc())
self.render_exception = ex
self.__console_view = ""
self.__writer_event.set()
return -1
def on_aggregated_data(self, data, stats):
# TODO: use stats data somehow
if self.short_only:
overall = data.get('overall')
quantiles = dict(
zip(
overall['interval_real']['q']['q'],
overall['interval_real']['q']['value']))
info = (
"ts:{ts}\tRPS:{rps}\tavg:{avg_rt:.2f}\t"
"min:{min:.2f}\tmax:{q100:.2f}\tq95:{q95:.2f}\t").format(
ts=data.get('ts'),
rps=overall['interval_real']['len'],
avg_rt=float(overall['interval_real']['total'])
/ overall['interval_real']['len'] / 1000.0,
min=overall['interval_real']['min'] / 1000.0,
q100=quantiles[100] / 1000,
q95=quantiles[95] / 1000)
self.log.info(info)
else:
self.screen.add_second_data(data)
def add_info_widget(self, widget):
''' add right panel widget '''
if not self.screen:
self.log.debug("No screen instance to add widget")
else:
self.screen.add_info_widget(widget)
# ======================================================
class RealConsoleMarkup(object):
'''
Took colors from here: https://www.siafoo.net/snippet/88
'''
WHITE_ON_BLACK = '\033[37;40m'
TOTAL_RESET = '\033[0m'
clear = "\x1b[2J\x1b[H"
new_line = "\n"
YELLOW = '\033[1;33m'
RED = '\033[1;31m'
RED_DARK = '\033[31;3m'
RESET = WHITE_ON_BLACK + '\033[1;m'
CYAN = "\033[1;36m"
GREEN = "\033[1;32m"
WHITE = "\033[1;37m"
MAGENTA = '\033[1;35m'
BG_MAGENTA = '\033[1;45m'
BG_GREEN = '\033[1;42m'
BG_BROWN = '\033[1;43m'
BG_CYAN = '\033[1;46m'
def get_markup_vars(self):
return [
self.YELLOW, self.RED, self.RESET, self.CYAN, self.BG_MAGENTA,
self.WHITE, self.BG_GREEN, self.GREEN, self.BG_BROWN,
self.RED_DARK, self.MAGENTA, self.BG_CYAN
]
def clean_markup(self, orig_str):
''' clean markup from string '''
for val in self.get_markup_vars():
orig_str = orig_str.replace(val, '')
return orig_str
# ======================================================
# FIXME: 3 better way to have it?
class NoConsoleMarkup(RealConsoleMarkup):
''' all colors are disabled '''
WHITE_ON_BLACK = ''
TOTAL_RESET = ''
clear = ""
new_line = "\n"
YELLOW = ''
RED = ''
RED_DARK = ''
RESET = ''
CYAN = ""
GREEN = ""
WHITE = ""
MAGENTA = ''
BG_MAGENTA = ''
BG_GREEN = ''
BG_BROWN = ''
BG_CYAN = ''
# ======================================================
|
from datetime import timedelta
import logging
import voluptuous as vol
from vultr import Vultr as VultrAPI
from homeassistant.const import CONF_API_KEY
import homeassistant.helpers.config_validation as cv
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
ATTR_AUTO_BACKUPS = "auto_backups"
ATTR_ALLOWED_BANDWIDTH = "allowed_bandwidth_gb"
ATTR_COST_PER_MONTH = "cost_per_month"
ATTR_CURRENT_BANDWIDTH_USED = "current_bandwidth_gb"
ATTR_CREATED_AT = "created_at"
ATTR_DISK = "disk"
ATTR_SUBSCRIPTION_ID = "subid"
ATTR_SUBSCRIPTION_NAME = "label"
ATTR_IPV4_ADDRESS = "ipv4_address"
ATTR_IPV6_ADDRESS = "ipv6_address"
ATTR_MEMORY = "memory"
ATTR_OS = "os"
ATTR_PENDING_CHARGES = "pending_charges"
ATTR_REGION = "region"
ATTR_VCPUS = "vcpus"
CONF_SUBSCRIPTION = "subscription"
DATA_VULTR = "data_vultr"
DOMAIN = "vultr"
NOTIFICATION_ID = "vultr_notification"
NOTIFICATION_TITLE = "Vultr Setup"
VULTR_PLATFORMS = ["binary_sensor", "sensor", "switch"]
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: vol.Schema({vol.Required(CONF_API_KEY): cv.string})}, extra=vol.ALLOW_EXTRA
)
def setup(hass, config):
"""Set up the Vultr component."""
api_key = config[DOMAIN].get(CONF_API_KEY)
vultr = Vultr(api_key)
try:
vultr.update()
except RuntimeError as ex:
_LOGGER.error("Failed to make update API request because: %s", ex)
hass.components.persistent_notification.create(
"Error: {}" "".format(ex),
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID,
)
return False
hass.data[DATA_VULTR] = vultr
return True
class Vultr:
"""Handle all communication with the Vultr API."""
def __init__(self, api_key):
"""Initialize the Vultr connection."""
self._api_key = api_key
self.data = None
self.api = VultrAPI(self._api_key)
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Use the data from Vultr API."""
self.data = self.api.server_list()
def _force_update(self):
"""Use the data from Vultr API."""
self.data = self.api.server_list()
def halt(self, subscription):
"""Halt a subscription (hard power off)."""
self.api.server_halt(subscription)
self._force_update()
def start(self, subscription):
"""Start a subscription."""
self.api.server_start(subscription)
self._force_update()
|
from datetime import timedelta
import logging
from blockchain import exchangerates, statistics
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_CURRENCY,
CONF_DISPLAY_OPTIONS,
TIME_MINUTES,
TIME_SECONDS,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Data provided by blockchain.com"
DEFAULT_CURRENCY = "USD"
ICON = "mdi:currency-btc"
SCAN_INTERVAL = timedelta(minutes=5)
OPTION_TYPES = {
"exchangerate": ["Exchange rate (1 BTC)", None],
"trade_volume_btc": ["Trade volume", "BTC"],
"miners_revenue_usd": ["Miners revenue", "USD"],
"btc_mined": ["Mined", "BTC"],
"trade_volume_usd": ["Trade volume", "USD"],
"difficulty": ["Difficulty", None],
"minutes_between_blocks": ["Time between Blocks", TIME_MINUTES],
"number_of_transactions": ["No. of Transactions", None],
"hash_rate": ["Hash rate", f"PH/{TIME_SECONDS}"],
"timestamp": ["Timestamp", None],
"mined_blocks": ["Mined Blocks", None],
"blocks_size": ["Block size", None],
"total_fees_btc": ["Total fees", "BTC"],
"total_btc_sent": ["Total sent", "BTC"],
"estimated_btc_sent": ["Estimated sent", "BTC"],
"total_btc": ["Total", "BTC"],
"total_blocks": ["Total Blocks", None],
"next_retarget": ["Next retarget", None],
"estimated_transaction_volume_usd": ["Est. Transaction volume", "USD"],
"miners_revenue_btc": ["Miners revenue", "BTC"],
"market_price_usd": ["Market price", "USD"],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_DISPLAY_OPTIONS, default=[]): vol.All(
cv.ensure_list, [vol.In(OPTION_TYPES)]
),
vol.Optional(CONF_CURRENCY, default=DEFAULT_CURRENCY): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Bitcoin sensors."""
currency = config[CONF_CURRENCY]
if currency not in exchangerates.get_ticker():
_LOGGER.warning("Currency %s is not available. Using USD", currency)
currency = DEFAULT_CURRENCY
data = BitcoinData()
dev = []
for variable in config[CONF_DISPLAY_OPTIONS]:
dev.append(BitcoinSensor(data, variable, currency))
add_entities(dev, True)
class BitcoinSensor(Entity):
"""Representation of a Bitcoin sensor."""
def __init__(self, data, option_type, currency):
"""Initialize the sensor."""
self.data = data
self._name = OPTION_TYPES[option_type][0]
self._unit_of_measurement = OPTION_TYPES[option_type][1]
self._currency = currency
self.type = option_type
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return ICON
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
return {ATTR_ATTRIBUTION: ATTRIBUTION}
def update(self):
"""Get the latest data and updates the states."""
self.data.update()
stats = self.data.stats
ticker = self.data.ticker
if self.type == "exchangerate":
self._state = ticker[self._currency].p15min
self._unit_of_measurement = self._currency
elif self.type == "trade_volume_btc":
self._state = f"{stats.trade_volume_btc:.1f}"
elif self.type == "miners_revenue_usd":
self._state = f"{stats.miners_revenue_usd:.0f}"
elif self.type == "btc_mined":
self._state = str(stats.btc_mined * 0.00000001)
elif self.type == "trade_volume_usd":
self._state = f"{stats.trade_volume_usd:.1f}"
elif self.type == "difficulty":
self._state = f"{stats.difficulty:.0f}"
elif self.type == "minutes_between_blocks":
self._state = f"{stats.minutes_between_blocks:.2f}"
elif self.type == "number_of_transactions":
self._state = str(stats.number_of_transactions)
elif self.type == "hash_rate":
self._state = f"{stats.hash_rate * 0.000001:.1f}"
elif self.type == "timestamp":
self._state = stats.timestamp
elif self.type == "mined_blocks":
self._state = str(stats.mined_blocks)
elif self.type == "blocks_size":
self._state = f"{stats.blocks_size:.1f}"
elif self.type == "total_fees_btc":
self._state = f"{stats.total_fees_btc * 0.00000001:.2f}"
elif self.type == "total_btc_sent":
self._state = f"{stats.total_btc_sent * 0.00000001:.2f}"
elif self.type == "estimated_btc_sent":
self._state = f"{stats.estimated_btc_sent * 0.00000001:.2f}"
elif self.type == "total_btc":
self._state = f"{stats.total_btc * 0.00000001:.2f}"
elif self.type == "total_blocks":
self._state = f"{stats.total_blocks:.0f}"
elif self.type == "next_retarget":
self._state = f"{stats.next_retarget:.2f}"
elif self.type == "estimated_transaction_volume_usd":
self._state = f"{stats.estimated_transaction_volume_usd:.2f}"
elif self.type == "miners_revenue_btc":
self._state = f"{stats.miners_revenue_btc * 0.00000001:.1f}"
elif self.type == "market_price_usd":
self._state = f"{stats.market_price_usd:.2f}"
class BitcoinData:
"""Get the latest data and update the states."""
def __init__(self):
"""Initialize the data object."""
self.stats = None
self.ticker = None
def update(self):
"""Get the latest data from blockchain.com."""
self.stats = statistics.get()
self.ticker = exchangerates.get_ticker()
|
import syslog
import voluptuous as vol
from homeassistant.components.notify import (
ATTR_TITLE,
ATTR_TITLE_DEFAULT,
PLATFORM_SCHEMA,
BaseNotificationService,
)
CONF_FACILITY = "facility"
CONF_OPTION = "option"
CONF_PRIORITY = "priority"
SYSLOG_FACILITY = {
"kernel": "LOG_KERN",
"user": "LOG_USER",
"mail": "LOG_MAIL",
"daemon": "LOG_DAEMON",
"auth": "LOG_KERN",
"LPR": "LOG_LPR",
"news": "LOG_NEWS",
"uucp": "LOG_UUCP",
"cron": "LOG_CRON",
"syslog": "LOG_SYSLOG",
"local0": "LOG_LOCAL0",
"local1": "LOG_LOCAL1",
"local2": "LOG_LOCAL2",
"local3": "LOG_LOCAL3",
"local4": "LOG_LOCAL4",
"local5": "LOG_LOCAL5",
"local6": "LOG_LOCAL6",
"local7": "LOG_LOCAL7",
}
SYSLOG_OPTION = {
"pid": "LOG_PID",
"cons": "LOG_CONS",
"ndelay": "LOG_NDELAY",
"nowait": "LOG_NOWAIT",
"perror": "LOG_PERROR",
}
SYSLOG_PRIORITY = {
5: "LOG_EMERG",
4: "LOG_ALERT",
3: "LOG_CRIT",
2: "LOG_ERR",
1: "LOG_WARNING",
0: "LOG_NOTICE",
-1: "LOG_INFO",
-2: "LOG_DEBUG",
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_FACILITY, default="syslog"): vol.In(SYSLOG_FACILITY.keys()),
vol.Optional(CONF_OPTION, default="pid"): vol.In(SYSLOG_OPTION.keys()),
vol.Optional(CONF_PRIORITY, default=-1): vol.In(SYSLOG_PRIORITY.keys()),
}
)
def get_service(hass, config, discovery_info=None):
"""Get the syslog notification service."""
facility = getattr(syslog, SYSLOG_FACILITY[config.get(CONF_FACILITY)])
option = getattr(syslog, SYSLOG_OPTION[config.get(CONF_OPTION)])
priority = getattr(syslog, SYSLOG_PRIORITY[config.get(CONF_PRIORITY)])
return SyslogNotificationService(facility, option, priority)
class SyslogNotificationService(BaseNotificationService):
"""Implement the syslog notification service."""
def __init__(self, facility, option, priority):
"""Initialize the service."""
self._facility = facility
self._option = option
self._priority = priority
def send_message(self, message="", **kwargs):
"""Send a message to a user."""
title = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)
syslog.openlog(title, self._option, self._facility)
syslog.syslog(self._priority, message)
syslog.closelog()
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.