text
stringlengths 213
32.3k
|
---|
import os
from flask import Flask, request, jsonify, Response
from flask.views import MethodView
from flasgger import Swagger
app = Flask(__name__)
app.config['SWAGGER'] = {
'title': 'Flasgger Parsed Method/Function View Example',
'doc_dir': './examples/docs/'
}
swag = Swagger(
app,
template_file=os.path.join(
os.getcwd(), 'examples', 'docs', 'template.yml'),
parse=True)
class ItemsView(MethodView):
"""
Flasgger will try to load "./examples/docs/items/{method}.yml" as
swagger document
"""
def get(self):
"""
If we set "parse" is True in Flasgger app, we will get parsed and
validated data stored in "flask.request.parsed_data".
In "parsed_data", different location's var stored in different key,
there is a map between RequestParser's location and swagger
doc's "in" parameter, eg: 'query' -> 'args'.See
"Swagger.SCHEMA_LOCATIONS" for more locations
"""
return jsonify(
[{'name': 'test', 'id': 1,
'type': request.parsed_data['args']['type']},
{'name': 'test2', 'id': 2,
'type': request.parsed_data['args']['type']}])
def post(self):
return jsonify(
{'name': request.parsed_data['json']['name'], 'id': 3,
'type': request.parsed_data['json']['type']})
class ItemMethodView(MethodView):
def get(self, id):
return jsonify({'name': 'test', 'id': id, 'type': 'NORMAL'})
def put(self, id):
return jsonify(
{'name': request.parsed_data['json']['name'],
'id': 3, 'type': 'NORMAL'})
class EmptyView(MethodView):
"""In this view, we do not provide api doc"""
def get(self):
return Response(status=200)
@app.route('/api/users/<group>/', methods=['POST'])
def users(group):
"""Create one user with nested json body.
---
tags:
- FunctionView
parameters:
- name: group
in: path
type: string
required: true
- name: User
in: body
schema:
type: object
required:
- data
properties:
data:
type: object
required:
- name
- age
properties:
age:
type: integer
name:
type: string
tags:
type: array
minItems: 1
items:
type: integer
definitions:
User:
type: object
properties:
id:
type: integer
name:
type: string
group:
type: string
age:
type: integer
responses:
200:
description: ok
schema:
$ref: '#/definitions/User'
examples:
{'id': 1, 'name': 'test', 'group': 1, 'age': 20}
400:
description: Miss data
"""
return jsonify(
{'id': 1,
'name': request.parsed_data['json']['data']['name'],
'age': request.parsed_data['json']['data']['age'],
'group': request.parsed_data['path']['group']})
@app.route('/api/user/')
def user():
"""Flasgger will try to load './examples/docs/user.yml' as swagger doc
"""
return jsonify({'id': request.parsed_data['args']['id'], 'name': 'test'})
app.add_url_rule(
'/api/items/', view_func=ItemsView.as_view(name='items'),
methods=['GET', 'POST'])
app.add_url_rule(
'/api/items/<int:id>/', view_func=ItemMethodView.as_view(name='item'),
methods=['GET', 'PUT'])
app.add_url_rule(
'/api/empty/', view_func=EmptyView.as_view(name='empty'),
methods=['GET'])
def test_swag(client, specs_data):
"""
This test is runs automatically in Travis CI
:param client: Flask app test client
:param specs_data: {'url': {swag_specs}} for every spec in app
"""
res = client.get('/api/items/')
assert res.status_code == 400
res = client.get('/api/items/?type=myLengthIsTooLong')
assert res.status_code == 400
res = client.get('/api/items/?type=NORMAL')
assert res.status_code == 200
res = client.get('/api/items/1/')
assert res.status_code == 200
res = client.put('/api/items/1/')
assert res.status_code == 400
res = client.put('/api/items/1/', json={'name': 'test'})
assert res.status_code == 200
res = client.post('/api/users/1/', json={'name': 'miss data'})
assert res.status_code == 400
res = client.post(
'/api/users/1/',
json={'data': {'name': 'test', 'age': 20}, 'tags': ['error_tag']})
assert res.status_code == 400
res = client.post(
'/api/users/1/',
json={'data': {'name': 'test', 'age': 20}, 'tags': [1, 2]})
assert res.status_code == 200
if __name__ == '__main__':
app.run(debug=True)
|
from itertools import count
from homeassistant.const import ATTR_BATTERY_LEVEL, ATTR_ENTITY_ID, ATTR_WAKEUP
from homeassistant.core import callback
from homeassistant.helpers.device_registry import async_get_registry as get_dev_reg
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_registry import async_get_registry
from .const import (
ATTR_BASIC_LEVEL,
ATTR_NODE_ID,
ATTR_SCENE_DATA,
ATTR_SCENE_ID,
COMMAND_CLASS_CENTRAL_SCENE,
COMMAND_CLASS_VERSION,
COMMAND_CLASS_WAKE_UP,
DOMAIN,
EVENT_NODE_EVENT,
EVENT_SCENE_ACTIVATED,
)
from .util import is_node_parsed, node_device_id_and_name, node_name
ATTR_QUERY_STAGE = "query_stage"
ATTR_AWAKE = "is_awake"
ATTR_READY = "is_ready"
ATTR_FAILED = "is_failed"
ATTR_PRODUCT_NAME = "product_name"
ATTR_MANUFACTURER_NAME = "manufacturer_name"
ATTR_NODE_NAME = "node_name"
ATTR_APPLICATION_VERSION = "application_version"
STAGE_COMPLETE = "Complete"
_REQUIRED_ATTRIBUTES = [
ATTR_QUERY_STAGE,
ATTR_AWAKE,
ATTR_READY,
ATTR_FAILED,
"is_info_received",
"max_baud_rate",
"is_zwave_plus",
]
_OPTIONAL_ATTRIBUTES = ["capabilities", "neighbors", "location"]
_COMM_ATTRIBUTES = [
"sentCnt",
"sentFailed",
"retries",
"receivedCnt",
"receivedDups",
"receivedUnsolicited",
"sentTS",
"receivedTS",
"lastRequestRTT",
"averageRequestRTT",
"lastResponseRTT",
"averageResponseRTT",
]
ATTRIBUTES = _REQUIRED_ATTRIBUTES + _OPTIONAL_ATTRIBUTES
class ZWaveBaseEntity(Entity):
"""Base class for Z-Wave Node and Value entities."""
def __init__(self):
"""Initialize the base Z-Wave class."""
self._update_scheduled = False
def maybe_schedule_update(self):
"""Maybe schedule state update.
If value changed after device was created but before setup_platform
was called - skip updating state.
"""
if self.hass and not self._update_scheduled:
self.hass.add_job(self._schedule_update)
@callback
def _schedule_update(self):
"""Schedule delayed update."""
if self._update_scheduled:
return
@callback
def do_update():
"""Really update."""
self.async_write_ha_state()
self._update_scheduled = False
self._update_scheduled = True
self.hass.loop.call_later(0.1, do_update)
def try_remove_and_add(self):
"""Remove this entity and add it back."""
async def _async_remove_and_add():
await self.async_remove()
self.entity_id = None
await self.platform.async_add_entities([self])
if self.hass and self.platform:
self.hass.add_job(_async_remove_and_add)
async def node_removed(self):
"""Call when a node is removed from the Z-Wave network."""
await self.async_remove()
registry = await async_get_registry(self.hass)
if self.entity_id not in registry.entities:
return
registry.async_remove(self.entity_id)
class ZWaveNodeEntity(ZWaveBaseEntity):
"""Representation of a Z-Wave node."""
def __init__(self, node, network):
"""Initialize node."""
# pylint: disable=import-error
super().__init__()
from openzwave.network import ZWaveNetwork
from pydispatch import dispatcher
self._network = network
self.node = node
self.node_id = self.node.node_id
self._name = node_name(self.node)
self._product_name = node.product_name
self._manufacturer_name = node.manufacturer_name
self._unique_id = self._compute_unique_id()
self._application_version = None
self._attributes = {}
self.wakeup_interval = None
self.location = None
self.battery_level = None
dispatcher.connect(
self.network_node_value_added, ZWaveNetwork.SIGNAL_VALUE_ADDED
)
dispatcher.connect(self.network_node_changed, ZWaveNetwork.SIGNAL_VALUE_CHANGED)
dispatcher.connect(self.network_node_changed, ZWaveNetwork.SIGNAL_NODE)
dispatcher.connect(self.network_node_changed, ZWaveNetwork.SIGNAL_NOTIFICATION)
dispatcher.connect(self.network_node_event, ZWaveNetwork.SIGNAL_NODE_EVENT)
dispatcher.connect(
self.network_scene_activated, ZWaveNetwork.SIGNAL_SCENE_EVENT
)
@property
def unique_id(self):
"""Return unique ID of Z-wave node."""
return self._unique_id
@property
def device_info(self):
"""Return device information."""
identifier, name = node_device_id_and_name(self.node)
info = {
"identifiers": {identifier},
"manufacturer": self.node.manufacturer_name,
"model": self.node.product_name,
"name": name,
}
if self.node_id > 1:
info["via_device"] = (DOMAIN, 1)
return info
def maybe_update_application_version(self, value):
"""Update application version if value is a Command Class Version, Application Value."""
if (
value
and value.command_class == COMMAND_CLASS_VERSION
and value.label == "Application Version"
):
self._application_version = value.data
def network_node_value_added(self, node=None, value=None, args=None):
"""Handle a added value to a none on the network."""
if node and node.node_id != self.node_id:
return
if args is not None and "nodeId" in args and args["nodeId"] != self.node_id:
return
self.maybe_update_application_version(value)
def network_node_changed(self, node=None, value=None, args=None):
"""Handle a changed node on the network."""
if node and node.node_id != self.node_id:
return
if args is not None and "nodeId" in args and args["nodeId"] != self.node_id:
return
# Process central scene activation
if value is not None and value.command_class == COMMAND_CLASS_CENTRAL_SCENE:
self.central_scene_activated(value.index, value.data)
self.maybe_update_application_version(value)
self.node_changed()
def get_node_statistics(self):
"""Retrieve statistics from the node."""
return self._network.manager.getNodeStatistics(
self._network.home_id, self.node_id
)
def node_changed(self):
"""Update node properties."""
attributes = {}
stats = self.get_node_statistics()
for attr in ATTRIBUTES:
value = getattr(self.node, attr)
if attr in _REQUIRED_ATTRIBUTES or value:
attributes[attr] = value
for attr in _COMM_ATTRIBUTES:
attributes[attr] = stats[attr]
if self.node.can_wake_up():
for value in self.node.get_values(COMMAND_CLASS_WAKE_UP).values():
if value.index != 0:
continue
self.wakeup_interval = value.data
break
else:
self.wakeup_interval = None
self.battery_level = self.node.get_battery_level()
self._product_name = self.node.product_name
self._manufacturer_name = self.node.manufacturer_name
self._name = node_name(self.node)
self._attributes = attributes
if not self._unique_id:
self._unique_id = self._compute_unique_id()
if self._unique_id:
# Node info parsed. Remove and re-add
self.try_remove_and_add()
self.maybe_schedule_update()
async def node_renamed(self, update_ids=False):
"""Rename the node and update any IDs."""
identifier, self._name = node_device_id_and_name(self.node)
# Set the name in the devices. If they're customised
# the customisation will not be stored as name and will stick.
dev_reg = await get_dev_reg(self.hass)
device = dev_reg.async_get_device(identifiers={identifier}, connections=set())
dev_reg.async_update_device(device.id, name=self._name)
# update sub-devices too
for i in count(2):
identifier, new_name = node_device_id_and_name(self.node, i)
device = dev_reg.async_get_device(
identifiers={identifier}, connections=set()
)
if not device:
break
dev_reg.async_update_device(device.id, name=new_name)
# Update entity ID.
if update_ids:
ent_reg = await async_get_registry(self.hass)
new_entity_id = ent_reg.async_generate_entity_id(
DOMAIN, self._name, self.platform.entities.keys() - {self.entity_id}
)
if new_entity_id != self.entity_id:
# Don't change the name attribute, it will be None unless
# customised and if it's been customised, keep the
# customisation.
ent_reg.async_update_entity(self.entity_id, new_entity_id=new_entity_id)
return
# else for the above two ifs, update if not using update_entity
self.async_write_ha_state()
def network_node_event(self, node, value):
"""Handle a node activated event on the network."""
if node.node_id == self.node.node_id:
self.node_event(value)
def node_event(self, value):
"""Handle a node activated event for this node."""
if self.hass is None:
return
self.hass.bus.fire(
EVENT_NODE_EVENT,
{
ATTR_ENTITY_ID: self.entity_id,
ATTR_NODE_ID: self.node.node_id,
ATTR_BASIC_LEVEL: value,
},
)
def network_scene_activated(self, node, scene_id):
"""Handle a scene activated event on the network."""
if node.node_id == self.node.node_id:
self.scene_activated(scene_id)
def scene_activated(self, scene_id):
"""Handle an activated scene for this node."""
if self.hass is None:
return
self.hass.bus.fire(
EVENT_SCENE_ACTIVATED,
{
ATTR_ENTITY_ID: self.entity_id,
ATTR_NODE_ID: self.node.node_id,
ATTR_SCENE_ID: scene_id,
},
)
def central_scene_activated(self, scene_id, scene_data):
"""Handle an activated central scene for this node."""
if self.hass is None:
return
self.hass.bus.fire(
EVENT_SCENE_ACTIVATED,
{
ATTR_ENTITY_ID: self.entity_id,
ATTR_NODE_ID: self.node_id,
ATTR_SCENE_ID: scene_id,
ATTR_SCENE_DATA: scene_data,
},
)
@property
def state(self):
"""Return the state."""
if ATTR_READY not in self._attributes:
return None
if self._attributes[ATTR_FAILED]:
return "dead"
if self._attributes[ATTR_QUERY_STAGE] != "Complete":
return "initializing"
if not self._attributes[ATTR_AWAKE]:
return "sleeping"
if self._attributes[ATTR_READY]:
return "ready"
return None
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def device_state_attributes(self):
"""Return the device specific state attributes."""
attrs = {
ATTR_NODE_ID: self.node_id,
ATTR_NODE_NAME: self._name,
ATTR_MANUFACTURER_NAME: self._manufacturer_name,
ATTR_PRODUCT_NAME: self._product_name,
}
attrs.update(self._attributes)
if self.battery_level is not None:
attrs[ATTR_BATTERY_LEVEL] = self.battery_level
if self.wakeup_interval is not None:
attrs[ATTR_WAKEUP] = self.wakeup_interval
if self._application_version is not None:
attrs[ATTR_APPLICATION_VERSION] = self._application_version
return attrs
def _compute_unique_id(self):
if is_node_parsed(self.node) or self.node.is_ready:
return f"node-{self.node_id}"
return None
|
import logging
from typing import Any, Dict, Optional
from bsblan import BSBLan, BSBLanError, Info
import voluptuous as vol
from homeassistant.config_entries import CONN_CLASS_LOCAL_POLL, ConfigFlow
from homeassistant.const import CONF_HOST, CONF_PORT
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.typing import ConfigType
from .const import ( # pylint:disable=unused-import
CONF_DEVICE_IDENT,
CONF_PASSKEY,
DOMAIN,
)
_LOGGER = logging.getLogger(__name__)
class BSBLanFlowHandler(ConfigFlow, domain=DOMAIN):
"""Handle a BSBLan config flow."""
VERSION = 1
CONNECTION_CLASS = CONN_CLASS_LOCAL_POLL
async def async_step_user(
self, user_input: Optional[ConfigType] = None
) -> Dict[str, Any]:
"""Handle a flow initiated by the user."""
if user_input is None:
return self._show_setup_form()
try:
info = await self._get_bsblan_info(
host=user_input[CONF_HOST],
port=user_input[CONF_PORT],
passkey=user_input.get(CONF_PASSKEY),
)
except BSBLanError:
return self._show_setup_form({"base": "cannot_connect"})
# Check if already configured
await self.async_set_unique_id(info.device_identification)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=info.device_identification,
data={
CONF_HOST: user_input[CONF_HOST],
CONF_PORT: user_input[CONF_PORT],
CONF_PASSKEY: user_input.get(CONF_PASSKEY),
CONF_DEVICE_IDENT: info.device_identification,
},
)
def _show_setup_form(self, errors: Optional[Dict] = None) -> Dict[str, Any]:
"""Show the setup form to the user."""
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_HOST): str,
vol.Optional(CONF_PORT, default=80): int,
vol.Optional(CONF_PASSKEY): str,
}
),
errors=errors or {},
)
async def _get_bsblan_info(
self, host: str, passkey: Optional[str], port: int
) -> Info:
"""Get device information from an BSBLan device."""
session = async_get_clientsession(self.hass)
_LOGGER.debug("request bsblan.info:")
bsblan = BSBLan(host, passkey=passkey, port=port, session=session)
return await bsblan.info()
|
from datetime import timedelta
from pyripple import get_balance
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CONF_ADDRESS, CONF_NAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
ATTRIBUTION = "Data provided by ripple.com"
DEFAULT_NAME = "Ripple Balance"
SCAN_INTERVAL = timedelta(minutes=5)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_ADDRESS): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Ripple.com sensors."""
address = config.get(CONF_ADDRESS)
name = config.get(CONF_NAME)
add_entities([RippleSensor(name, address)], True)
class RippleSensor(Entity):
"""Representation of an Ripple.com sensor."""
def __init__(self, name, address):
"""Initialize the sensor."""
self._name = name
self.address = address
self._state = None
self._unit_of_measurement = "XRP"
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement this sensor expresses itself in."""
return self._unit_of_measurement
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
return {ATTR_ATTRIBUTION: ATTRIBUTION}
def update(self):
"""Get the latest state of the sensor."""
balance = get_balance(self.address)
if balance is not None:
self._state = balance
|
import pytest
from lemur.auth.ldap import * # noqa
from mock import patch, MagicMock
class LdapPrincipalTester(LdapPrincipal):
def __init__(self, args):
super().__init__(args)
self.ldap_server = "ldap://localhost"
def bind_test(self):
groups = [
(
"user",
{
"memberOf": [
"CN=Lemur Access,OU=Groups,DC=example,DC=com".encode("utf-8"),
"CN=Pen Pushers,OU=Groups,DC=example,DC=com".encode("utf-8"),
]
},
)
]
self.ldap_client = MagicMock()
self.ldap_client.search_s.return_value = groups
self._bind()
def authorize_test_groups_to_roles_admin(self):
self.ldap_groups = "".join(
[
"CN=Pen Pushers,OU=Groups,DC=example,DC=com",
"CN=Lemur Admins,OU=Groups,DC=example,DC=com",
"CN=Lemur Read Only,OU=Groups,DC=example,DC=com",
]
)
self.ldap_required_group = None
self.ldap_groups_to_roles = {
"Lemur Admins": "admin",
"Lemur Read Only": "read-only",
}
return self._authorize()
def authorize_test_required_group(self, group):
self.ldap_groups = "".join(
[
"CN=Lemur Access,OU=Groups,DC=example,DC=com",
"CN=Pen Pushers,OU=Groups,DC=example,DC=com",
]
)
self.ldap_required_group = group
return self._authorize()
@pytest.fixture()
def principal(session):
args = {"username": "user", "password": "p4ssw0rd"}
yield LdapPrincipalTester(args)
class TestLdapPrincipal:
@patch("ldap.initialize")
def test_bind(self, app, principal):
self.test_ldap_user = principal
self.test_ldap_user.bind_test()
group = "Pen Pushers"
assert group in self.test_ldap_user.ldap_groups
assert self.test_ldap_user.ldap_principal == "[email protected]"
def test_authorize_groups_to_roles_admin(self, app, principal):
self.test_ldap_user = principal
roles = self.test_ldap_user.authorize_test_groups_to_roles_admin()
assert any(x.name == "admin" for x in roles)
def test_authorize_required_group_missing(self, app, principal):
self.test_ldap_user = principal
roles = self.test_ldap_user.authorize_test_required_group("Not Allowed")
assert not roles
def test_authorize_required_group_access(self, session, principal):
self.test_ldap_user = principal
roles = self.test_ldap_user.authorize_test_required_group("Lemur Access")
assert len(roles) >= 1
assert any(x.name == "[email protected]" for x in roles)
|
import logging
from functools import partial
from pymongo.errors import OperationFailure
from .versioned_item import VersionedItem, ChangedItem
from .._util import are_equals
from ..decorators import _get_host
from ..exceptions import NoDataFoundException, ConcurrentModificationException
logger = logging.getLogger(__name__)
class DataChange(object):
"""
Object representing incoming data change
"""
def __init__(self, date_range, new_data):
self.date_range = date_range
self.new_data = new_data
class ArcticTransaction(object):
"""Use this context manager if you want to modify data in a version store while ensuring that no other writes
interfere with your own.
To use, base your modifications on the `base_ts` context manager field and put your newly created timeseries and
call the `write` method of the context manager to output changes. The changes will only be written when the block
exits.
NB changes may be audited.
Example:
-------
with ArcticTransaction(Arctic('hostname')['some_library'], 'symbol') as mt:
ts_version_info = mt.base_ts
# do some processing, come up with a new ts for 'symbol' called new_symbol_ts, presumably based on ts_version_info.data
mt.write('symbol', new_symbol_ts, metadata=new_symbol_metadata)
The block will raise a ConcurrentModificationException if an inconsistency has been detected. You will have to
retry the whole block should that happens, as the assumption is that you need to base your changes on a different
starting timeseries.
"""
def __init__(self, version_store, symbol, user, log, modify_timeseries=None, audit=True,
*args, **kwargs):
"""
Parameters
----------
version_store: `VersionStore` Arctic Library
Needs to support write, read, list_versions, _delete_version this is the underlying store that we'll
be securing for write
symbol: `str`
symbol name for the item that's being modified
user: `str`
user making the change
log: `str`
Log message for the change
modify_timeseries:
if given, it will check the assumption that this is the latest data available for symbol in version_store
Should not this be the case, a ConcurrentModificationException will be raised. Use this if you're
interacting with code that read in the data already and for some reason you cannot refactor the read-write
operation to be contained within this context manager
audit: `bool`
should we 'audit' the transaction. An audited write transaction is equivalent to a snapshot
before and after the data change - i.e. we won't prune versions of the data involved in an
audited transaction. This can be used to ensure that the history of certain data changes is
preserved indefinitely.
all other args:
Will be passed into the initial read
"""
self._version_store = version_store
self._symbol = symbol
self._user = user
self._log = log
self._audit = audit
logger.info("MT: {}@{}: [{}] {}: {}".format(_get_host(version_store).get('l'),
_get_host(version_store).get('mhost'),
user, log, symbol))
try:
self.base_ts = self._version_store.read(self._symbol, *args, **kwargs)
except NoDataFoundException:
versions = [x['version'] for x in self._version_store.list_versions(self._symbol, latest_only=True)]
versions.append(0)
self.base_ts = VersionedItem(symbol=self._symbol, library=None,
version=versions[0], metadata=None, data=None, host=None)
except OperationFailure:
# TODO: Current errors in mongo "Incorrect Number of Segments Returned"
# This workaround should be removed once underlying problem is resolved.
self.base_ts = self._version_store.read_metadata(symbol=self._symbol)
if modify_timeseries is not None and not are_equals(modify_timeseries, self.base_ts.data):
raise ConcurrentModificationException()
self._do_write = False
def change(self, symbol, data_changes, **kwargs):
"""
Change, and audit 'data' under the specified 'symbol' name to this library.
Parameters
----------
symbol: `str`
symbol name for the item
data_changes: `list DataChange`
list of DataChange objects
"""
pass
def write(self, symbol, data, prune_previous_version=True, metadata=None, **kwargs):
"""
Records a write request to be actioned on context exit. Takes exactly the same parameters as the regular
library write call.
"""
if data is not None:
# We only write data if existing data is None or the Timeseries data has changed or metadata has changed
if self.base_ts.data is None or not are_equals(data, self.base_ts.data) or metadata != self.base_ts.metadata:
self._do_write = True
self._write = partial(self._version_store.write, symbol, data, prune_previous_version=prune_previous_version,
metadata=metadata, **kwargs)
def __enter__(self):
return self
def __exit__(self, *args, **kwargs):
if self._do_write:
written_ver = self._write()
versions = [x['version'] for x in self._version_store.list_versions(self._symbol)]
versions.append(0)
versions.reverse()
base_offset = versions.index(self.base_ts.version)
new_offset = versions.index(written_ver.version)
if len(versions[base_offset: new_offset + 1]) != 2:
self._version_store._delete_version(self._symbol, written_ver.version)
raise ConcurrentModificationException("Inconsistent Versions: {}: {}->{}".format(
self._symbol, self.base_ts.version, written_ver.version))
changed = ChangedItem(self._symbol, self.base_ts, written_ver, None)
if self._audit:
self._version_store._write_audit(self._user, self._log, changed)
|
from datetime import timedelta
import logging
from typing import Optional
from georss_qld_bushfire_alert_client import QldBushfireAlertFeedManager
import voluptuous as vol
from homeassistant.components.geo_location import PLATFORM_SCHEMA, GeolocationEvent
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_RADIUS,
CONF_SCAN_INTERVAL,
EVENT_HOMEASSISTANT_START,
LENGTH_KILOMETERS,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_connect, dispatcher_send
from homeassistant.helpers.event import track_time_interval
_LOGGER = logging.getLogger(__name__)
ATTR_CATEGORY = "category"
ATTR_EXTERNAL_ID = "external_id"
ATTR_PUBLICATION_DATE = "publication_date"
ATTR_STATUS = "status"
ATTR_UPDATED_DATE = "updated_date"
CONF_CATEGORIES = "categories"
DEFAULT_RADIUS_IN_KM = 20.0
SCAN_INTERVAL = timedelta(minutes=5)
SIGNAL_DELETE_ENTITY = "qld_bushfire_delete_{}"
SIGNAL_UPDATE_ENTITY = "qld_bushfire_update_{}"
SOURCE = "qld_bushfire"
VALID_CATEGORIES = [
"Emergency Warning",
"Watch and Act",
"Advice",
"Notification",
"Information",
]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_LATITUDE): cv.latitude,
vol.Optional(CONF_LONGITUDE): cv.longitude,
vol.Optional(CONF_RADIUS, default=DEFAULT_RADIUS_IN_KM): vol.Coerce(float),
vol.Optional(CONF_CATEGORIES, default=[]): vol.All(
cv.ensure_list, [vol.In(VALID_CATEGORIES)]
),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Queensland Bushfire Alert Feed platform."""
scan_interval = config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL)
coordinates = (
config.get(CONF_LATITUDE, hass.config.latitude),
config.get(CONF_LONGITUDE, hass.config.longitude),
)
radius_in_km = config[CONF_RADIUS]
categories = config[CONF_CATEGORIES]
# Initialize the entity manager.
feed = QldBushfireFeedEntityManager(
hass, add_entities, scan_interval, coordinates, radius_in_km, categories
)
def start_feed_manager(event):
"""Start feed manager."""
feed.startup()
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, start_feed_manager)
class QldBushfireFeedEntityManager:
"""Feed Entity Manager for Qld Bushfire Alert GeoRSS feed."""
def __init__(
self, hass, add_entities, scan_interval, coordinates, radius_in_km, categories
):
"""Initialize the Feed Entity Manager."""
self._hass = hass
self._feed_manager = QldBushfireAlertFeedManager(
self._generate_entity,
self._update_entity,
self._remove_entity,
coordinates,
filter_radius=radius_in_km,
filter_categories=categories,
)
self._add_entities = add_entities
self._scan_interval = scan_interval
def startup(self):
"""Start up this manager."""
self._feed_manager.update()
self._init_regular_updates()
def _init_regular_updates(self):
"""Schedule regular updates at the specified interval."""
track_time_interval(
self._hass, lambda now: self._feed_manager.update(), self._scan_interval
)
def get_entry(self, external_id):
"""Get feed entry by external id."""
return self._feed_manager.feed_entries.get(external_id)
def _generate_entity(self, external_id):
"""Generate new entity."""
new_entity = QldBushfireLocationEvent(self, external_id)
# Add new entities to HA.
self._add_entities([new_entity], True)
def _update_entity(self, external_id):
"""Update entity."""
dispatcher_send(self._hass, SIGNAL_UPDATE_ENTITY.format(external_id))
def _remove_entity(self, external_id):
"""Remove entity."""
dispatcher_send(self._hass, SIGNAL_DELETE_ENTITY.format(external_id))
class QldBushfireLocationEvent(GeolocationEvent):
"""This represents an external event with Qld Bushfire feed data."""
def __init__(self, feed_manager, external_id):
"""Initialize entity with data from feed entry."""
self._feed_manager = feed_manager
self._external_id = external_id
self._name = None
self._distance = None
self._latitude = None
self._longitude = None
self._attribution = None
self._category = None
self._publication_date = None
self._updated_date = None
self._status = None
self._remove_signal_delete = None
self._remove_signal_update = None
async def async_added_to_hass(self):
"""Call when entity is added to hass."""
self._remove_signal_delete = async_dispatcher_connect(
self.hass,
SIGNAL_DELETE_ENTITY.format(self._external_id),
self._delete_callback,
)
self._remove_signal_update = async_dispatcher_connect(
self.hass,
SIGNAL_UPDATE_ENTITY.format(self._external_id),
self._update_callback,
)
@callback
def _delete_callback(self):
"""Remove this entity."""
self._remove_signal_delete()
self._remove_signal_update()
self.hass.async_create_task(self.async_remove())
@callback
def _update_callback(self):
"""Call update method."""
self.async_schedule_update_ha_state(True)
@property
def should_poll(self):
"""No polling needed for Qld Bushfire Alert feed location events."""
return False
async def async_update(self):
"""Update this entity from the data held in the feed manager."""
_LOGGER.debug("Updating %s", self._external_id)
feed_entry = self._feed_manager.get_entry(self._external_id)
if feed_entry:
self._update_from_feed(feed_entry)
def _update_from_feed(self, feed_entry):
"""Update the internal state from the provided feed entry."""
self._name = feed_entry.title
self._distance = feed_entry.distance_to_home
self._latitude = feed_entry.coordinates[0]
self._longitude = feed_entry.coordinates[1]
self._attribution = feed_entry.attribution
self._category = feed_entry.category
self._publication_date = feed_entry.published
self._updated_date = feed_entry.updated
self._status = feed_entry.status
@property
def icon(self):
"""Return the icon to use in the frontend."""
return "mdi:fire"
@property
def source(self) -> str:
"""Return source value of this external event."""
return SOURCE
@property
def name(self) -> Optional[str]:
"""Return the name of the entity."""
return self._name
@property
def distance(self) -> Optional[float]:
"""Return distance value of this external event."""
return self._distance
@property
def latitude(self) -> Optional[float]:
"""Return latitude value of this external event."""
return self._latitude
@property
def longitude(self) -> Optional[float]:
"""Return longitude value of this external event."""
return self._longitude
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return LENGTH_KILOMETERS
@property
def device_state_attributes(self):
"""Return the device state attributes."""
attributes = {}
for key, value in (
(ATTR_EXTERNAL_ID, self._external_id),
(ATTR_CATEGORY, self._category),
(ATTR_ATTRIBUTION, self._attribution),
(ATTR_PUBLICATION_DATE, self._publication_date),
(ATTR_UPDATED_DATE, self._updated_date),
(ATTR_STATUS, self._status),
):
if value or isinstance(value, bool):
attributes[key] = value
return attributes
|
from collections import OrderedDict
import fnmatch
import logging
import os
import sys
from typing import Dict, Iterator, List, TypeVar, Union, overload
import yaml
from homeassistant.exceptions import HomeAssistantError
from .const import _SECRET_NAMESPACE, SECRET_YAML
from .objects import NodeListClass, NodeStrClass
try:
import keyring
except ImportError:
keyring = None
try:
import credstash
except ImportError:
credstash = None
# mypy: allow-untyped-calls, no-warn-return-any
JSON_TYPE = Union[List, Dict, str] # pylint: disable=invalid-name
DICT_T = TypeVar("DICT_T", bound=Dict) # pylint: disable=invalid-name
_LOGGER = logging.getLogger(__name__)
__SECRET_CACHE: Dict[str, JSON_TYPE] = {}
def clear_secret_cache() -> None:
"""Clear the secret cache.
Async friendly.
"""
__SECRET_CACHE.clear()
class SafeLineLoader(yaml.SafeLoader):
"""Loader class that keeps track of line numbers."""
def compose_node(self, parent: yaml.nodes.Node, index: int) -> yaml.nodes.Node:
"""Annotate a node with the first line it was seen."""
last_line: int = self.line
node: yaml.nodes.Node = super().compose_node(parent, index)
node.__line__ = last_line + 1 # type: ignore
return node
def load_yaml(fname: str) -> JSON_TYPE:
"""Load a YAML file."""
try:
with open(fname, encoding="utf-8") as conf_file:
# If configuration file is empty YAML returns None
# We convert that to an empty dict
return yaml.load(conf_file, Loader=SafeLineLoader) or OrderedDict()
except yaml.YAMLError as exc:
_LOGGER.error(str(exc))
raise HomeAssistantError(exc) from exc
except UnicodeDecodeError as exc:
_LOGGER.error("Unable to read file %s: %s", fname, exc)
raise HomeAssistantError(exc) from exc
@overload
def _add_reference(
obj: Union[list, NodeListClass], loader: yaml.SafeLoader, node: yaml.nodes.Node
) -> NodeListClass:
...
@overload
def _add_reference(
obj: Union[str, NodeStrClass], loader: yaml.SafeLoader, node: yaml.nodes.Node
) -> NodeStrClass:
...
@overload
def _add_reference(
obj: DICT_T, loader: yaml.SafeLoader, node: yaml.nodes.Node
) -> DICT_T:
...
def _add_reference(obj, loader: SafeLineLoader, node: yaml.nodes.Node): # type: ignore
"""Add file reference information to an object."""
if isinstance(obj, list):
obj = NodeListClass(obj)
if isinstance(obj, str):
obj = NodeStrClass(obj)
setattr(obj, "__config_file__", loader.name)
setattr(obj, "__line__", node.start_mark.line)
return obj
def _include_yaml(loader: SafeLineLoader, node: yaml.nodes.Node) -> JSON_TYPE:
"""Load another YAML file and embeds it using the !include tag.
Example:
device_tracker: !include device_tracker.yaml
"""
fname = os.path.join(os.path.dirname(loader.name), node.value)
try:
return _add_reference(load_yaml(fname), loader, node)
except FileNotFoundError as exc:
raise HomeAssistantError(
f"{node.start_mark}: Unable to read file {fname}."
) from exc
def _is_file_valid(name: str) -> bool:
"""Decide if a file is valid."""
return not name.startswith(".")
def _find_files(directory: str, pattern: str) -> Iterator[str]:
"""Recursively load files in a directory."""
for root, dirs, files in os.walk(directory, topdown=True):
dirs[:] = [d for d in dirs if _is_file_valid(d)]
for basename in sorted(files):
if _is_file_valid(basename) and fnmatch.fnmatch(basename, pattern):
filename = os.path.join(root, basename)
yield filename
def _include_dir_named_yaml(
loader: SafeLineLoader, node: yaml.nodes.Node
) -> OrderedDict:
"""Load multiple files from directory as a dictionary."""
mapping: OrderedDict = OrderedDict()
loc = os.path.join(os.path.dirname(loader.name), node.value)
for fname in _find_files(loc, "*.yaml"):
filename = os.path.splitext(os.path.basename(fname))[0]
if os.path.basename(fname) == SECRET_YAML:
continue
mapping[filename] = load_yaml(fname)
return _add_reference(mapping, loader, node)
def _include_dir_merge_named_yaml(
loader: SafeLineLoader, node: yaml.nodes.Node
) -> OrderedDict:
"""Load multiple files from directory as a merged dictionary."""
mapping: OrderedDict = OrderedDict()
loc = os.path.join(os.path.dirname(loader.name), node.value)
for fname in _find_files(loc, "*.yaml"):
if os.path.basename(fname) == SECRET_YAML:
continue
loaded_yaml = load_yaml(fname)
if isinstance(loaded_yaml, dict):
mapping.update(loaded_yaml)
return _add_reference(mapping, loader, node)
def _include_dir_list_yaml(
loader: SafeLineLoader, node: yaml.nodes.Node
) -> List[JSON_TYPE]:
"""Load multiple files from directory as a list."""
loc = os.path.join(os.path.dirname(loader.name), node.value)
return [
load_yaml(f)
for f in _find_files(loc, "*.yaml")
if os.path.basename(f) != SECRET_YAML
]
def _include_dir_merge_list_yaml(
loader: SafeLineLoader, node: yaml.nodes.Node
) -> JSON_TYPE:
"""Load multiple files from directory as a merged list."""
loc: str = os.path.join(os.path.dirname(loader.name), node.value)
merged_list: List[JSON_TYPE] = []
for fname in _find_files(loc, "*.yaml"):
if os.path.basename(fname) == SECRET_YAML:
continue
loaded_yaml = load_yaml(fname)
if isinstance(loaded_yaml, list):
merged_list.extend(loaded_yaml)
return _add_reference(merged_list, loader, node)
def _ordered_dict(loader: SafeLineLoader, node: yaml.nodes.MappingNode) -> OrderedDict:
"""Load YAML mappings into an ordered dictionary to preserve key order."""
loader.flatten_mapping(node)
nodes = loader.construct_pairs(node)
seen: Dict = {}
for (key, _), (child_node, _) in zip(nodes, node.value):
line = child_node.start_mark.line
try:
hash(key)
except TypeError as exc:
fname = getattr(loader.stream, "name", "")
raise yaml.MarkedYAMLError(
context=f'invalid key: "{key}"',
context_mark=yaml.Mark(fname, 0, line, -1, None, None),
) from exc
if key in seen:
fname = getattr(loader.stream, "name", "")
_LOGGER.warning(
'YAML file %s contains duplicate key "%s". Check lines %d and %d',
fname,
key,
seen[key],
line,
)
seen[key] = line
return _add_reference(OrderedDict(nodes), loader, node)
def _construct_seq(loader: SafeLineLoader, node: yaml.nodes.Node) -> JSON_TYPE:
"""Add line number and file name to Load YAML sequence."""
(obj,) = loader.construct_yaml_seq(node)
return _add_reference(obj, loader, node)
def _env_var_yaml(loader: SafeLineLoader, node: yaml.nodes.Node) -> str:
"""Load environment variables and embed it into the configuration YAML."""
args = node.value.split()
# Check for a default value
if len(args) > 1:
return os.getenv(args[0], " ".join(args[1:]))
if args[0] in os.environ:
return os.environ[args[0]]
_LOGGER.error("Environment variable %s not defined", node.value)
raise HomeAssistantError(node.value)
def _load_secret_yaml(secret_path: str) -> JSON_TYPE:
"""Load the secrets yaml from path."""
secret_path = os.path.join(secret_path, SECRET_YAML)
if secret_path in __SECRET_CACHE:
return __SECRET_CACHE[secret_path]
_LOGGER.debug("Loading %s", secret_path)
try:
secrets = load_yaml(secret_path)
if not isinstance(secrets, dict):
raise HomeAssistantError("Secrets is not a dictionary")
if "logger" in secrets:
logger = str(secrets["logger"]).lower()
if logger == "debug":
_LOGGER.setLevel(logging.DEBUG)
else:
_LOGGER.error(
"secrets.yaml: 'logger: debug' expected, but 'logger: %s' found",
logger,
)
del secrets["logger"]
except FileNotFoundError:
secrets = {}
__SECRET_CACHE[secret_path] = secrets
return secrets
def secret_yaml(loader: SafeLineLoader, node: yaml.nodes.Node) -> JSON_TYPE:
"""Load secrets and embed it into the configuration YAML."""
secret_path = os.path.dirname(loader.name)
while True:
secrets = _load_secret_yaml(secret_path)
if node.value in secrets:
_LOGGER.debug(
"Secret %s retrieved from secrets.yaml in folder %s",
node.value,
secret_path,
)
return secrets[node.value]
if secret_path == os.path.dirname(sys.path[0]):
break # sys.path[0] set to config/deps folder by bootstrap
secret_path = os.path.dirname(secret_path)
if not os.path.exists(secret_path) or len(secret_path) < 5:
break # Somehow we got past the .homeassistant config folder
if keyring:
# do some keyring stuff
pwd = keyring.get_password(_SECRET_NAMESPACE, node.value)
if pwd:
_LOGGER.debug("Secret %s retrieved from keyring", node.value)
return pwd
global credstash # pylint: disable=invalid-name, global-statement
if credstash:
# pylint: disable=no-member
try:
pwd = credstash.getSecret(node.value, table=_SECRET_NAMESPACE)
if pwd:
_LOGGER.debug("Secret %s retrieved from credstash", node.value)
return pwd
except credstash.ItemNotFound:
pass
except Exception: # pylint: disable=broad-except
# Catch if package installed and no config
credstash = None
raise HomeAssistantError(f"Secret {node.value} not defined")
yaml.SafeLoader.add_constructor("!include", _include_yaml)
yaml.SafeLoader.add_constructor(
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, _ordered_dict
)
yaml.SafeLoader.add_constructor(
yaml.resolver.BaseResolver.DEFAULT_SEQUENCE_TAG, _construct_seq
)
yaml.SafeLoader.add_constructor("!env_var", _env_var_yaml)
yaml.SafeLoader.add_constructor("!secret", secret_yaml)
yaml.SafeLoader.add_constructor("!include_dir_list", _include_dir_list_yaml)
yaml.SafeLoader.add_constructor("!include_dir_merge_list", _include_dir_merge_list_yaml)
yaml.SafeLoader.add_constructor("!include_dir_named", _include_dir_named_yaml)
yaml.SafeLoader.add_constructor(
"!include_dir_merge_named", _include_dir_merge_named_yaml
)
|
from homeassistant.components.switch import SwitchEntity
from . import ElkAttachedEntity, create_elk_entities
from .const import DOMAIN
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Create the Elk-M1 switch platform."""
elk_data = hass.data[DOMAIN][config_entry.entry_id]
entities = []
elk = elk_data["elk"]
create_elk_entities(elk_data, elk.outputs, "output", ElkOutput, entities)
async_add_entities(entities, True)
class ElkOutput(ElkAttachedEntity, SwitchEntity):
"""Elk output as switch."""
@property
def is_on(self) -> bool:
"""Get the current output status."""
return self._element.output_on
async def async_turn_on(self, **kwargs):
"""Turn on the output."""
self._element.turn_on(0)
async def async_turn_off(self, **kwargs):
"""Turn off the output."""
self._element.turn_off()
|
import os
import sys
from paasta_tools.cli.cmds.check import makefile_responds_to
from paasta_tools.cli.utils import validate_service_name
from paasta_tools.utils import _log
from paasta_tools.utils import _log_audit
from paasta_tools.utils import _run
from paasta_tools.utils import DEFAULT_SOA_DIR
from paasta_tools.utils import get_username
def add_subparser(subparsers):
list_parser = subparsers.add_parser(
"cook-image",
description="Calls 'make cook-image' as part of the PaaSTA contract",
help=(
"'paasta cook-image' calls 'make cook-image' as part of the PaaSTA contract.\n\n"
"The PaaSTA contract specifies that a service MUST respond to 'cook-image' and produce "
"a docker image as a result. This command is often run as part of the normal build pipeline "
"('paasta itest'), or via a 'paasta local-run --build'."
),
epilog="This command assumes that the Makefile is in the current working directory.",
)
list_parser.add_argument(
"-s",
"--service",
help=(
"Build docker image for this service. Leading "
'"services-", as included in a Jenkins job name, '
"will be stripped."
),
required=True,
)
list_parser.add_argument(
"-y",
"--yelpsoa-config-root",
dest="yelpsoa_config_root",
help="A directory from which yelpsoa-configs should be read from",
default=DEFAULT_SOA_DIR,
)
list_parser.set_defaults(command=paasta_cook_image)
def paasta_cook_image(args, service=None, soa_dir=None):
"""Build a docker image"""
if not service:
service = args.service
if service.startswith("services-"):
service = service.split("services-", 1)[1]
if not soa_dir:
soa_dir = args.yelpsoa_config_root
validate_service_name(service, soa_dir)
run_env = os.environ.copy()
default_tag = "paasta-cook-image-{}-{}".format(service, get_username())
tag = run_env.get("DOCKER_TAG", default_tag)
run_env["DOCKER_TAG"] = tag
if not makefile_responds_to("cook-image"):
print(
"ERROR: local-run now requires a cook-image target to be present in the Makefile. See"
"http://paasta.readthedocs.io/en/latest/about/contract.html",
file=sys.stderr,
)
return 1
try:
cmd = "make cook-image"
returncode, output = _run(
cmd,
env=run_env,
log=True,
component="build",
service=service,
loglevel="debug",
)
if returncode != 0:
_log(
service=service,
line="ERROR: make cook-image failed for %s." % service,
component="build",
level="event",
)
else:
action_details = {"tag": tag}
_log_audit(
action="cook-image", action_details=action_details, service=service
)
return returncode
except KeyboardInterrupt:
print("\nProcess interrupted by the user. Cancelling.", file=sys.stderr)
return 2
|
from datetime import date
from django.contrib.sites.models import Site
from django.core.exceptions import ImproperlyConfigured
from django.test import TestCase
from zinnia.managers import PUBLISHED
from zinnia.models.author import Author
from zinnia.models.category import Category
from zinnia.models.entry import Entry
from zinnia.signals import disconnect_entry_signals
from zinnia.tests.utils import datetime
from zinnia.tests.utils import skip_if_custom_user
from zinnia.views.mixins.archives import PreviousNextPublishedMixin
from zinnia.views.mixins.callable_queryset import CallableQuerysetMixin
from zinnia.views.mixins.prefetch_related import PrefetchCategoriesAuthorsMixin
from zinnia.views.mixins.prefetch_related import PrefetchRelatedMixin
from zinnia.views.mixins.templates import EntryArchiveTemplateResponseMixin
from zinnia.views.mixins.templates import EntryQuerysetArchiveTemplateResponseMixin # noqa
from zinnia.views.mixins.templates import EntryQuerysetTemplateResponseMixin
class MixinTestCase(TestCase):
"""Test cases for zinnia.views.mixins"""
maxDiff = None
def setUp(self):
disconnect_entry_signals()
def test_callable_queryset_mixin(self):
instance = CallableQuerysetMixin()
self.assertRaises(ImproperlyConfigured,
instance.get_queryset)
def qs():
return []
instance.queryset = qs
self.assertEqual(instance.get_queryset(),
[])
def test_entry_queryset_template_response_mixin(self):
instance = EntryQuerysetTemplateResponseMixin()
self.assertRaises(ImproperlyConfigured,
instance.get_model_type)
self.assertRaises(ImproperlyConfigured,
instance.get_model_name)
instance.model_type = 'model'
instance.model_name = 'name'
self.assertEqual(instance.get_model_type(),
'model')
self.assertEqual(instance.get_model_name(),
'name')
self.assertEqual(instance.get_template_names(),
['zinnia/model/name/entry_list.html',
'zinnia/model/name_entry_list.html',
'zinnia/model/entry_list.html',
'zinnia/entry_list.html'])
instance.template_name = 'zinnia/entry_search.html'
self.assertEqual(instance.get_template_names(),
['zinnia/entry_search.html',
'zinnia/model/name/entry_list.html',
'zinnia/model/name_entry_list.html',
'zinnia/model/entry_list.html',
'zinnia/entry_list.html'])
def test_entry_queryset_archive_template_response_mixin(self):
def get_year():
return 2012
def get_week():
return 16
def get_month():
return '04'
def get_day():
return 21
instance = EntryQuerysetArchiveTemplateResponseMixin()
self.assertEqual(
instance.get_template_names(),
['zinnia/archives/entry_archive.html',
'zinnia/entry_archive.html',
'entry_archive.html'])
instance.get_year = get_year
self.assertEqual(
instance.get_template_names(),
['zinnia/archives/2012/entry_archive.html',
'zinnia/archives/entry_archive.html',
'zinnia/entry_archive.html',
'entry_archive.html'])
instance.get_week = get_week
self.assertEqual(
instance.get_template_names(),
['zinnia/archives/2012/week/16/entry_archive.html',
'zinnia/archives/week/16/entry_archive.html',
'zinnia/archives/2012/entry_archive.html',
'zinnia/archives/entry_archive.html',
'zinnia/entry_archive.html',
'entry_archive.html'])
instance.get_month = get_month
self.assertEqual(
instance.get_template_names(),
['zinnia/archives/2012/month/04/entry_archive.html',
'zinnia/archives/month/04/entry_archive.html',
'zinnia/archives/2012/week/16/entry_archive.html',
'zinnia/archives/week/16/entry_archive.html',
'zinnia/archives/2012/entry_archive.html',
'zinnia/archives/entry_archive.html',
'zinnia/entry_archive.html',
'entry_archive.html'])
instance.get_day = get_day
self.assertEqual(
instance.get_template_names(),
['zinnia/archives/2012/04/21/entry_archive.html',
'zinnia/archives/month/04/day/21/entry_archive.html',
'zinnia/archives/2012/day/21/entry_archive.html',
'zinnia/archives/day/21/entry_archive.html',
'zinnia/archives/2012/month/04/entry_archive.html',
'zinnia/archives/month/04/entry_archive.html',
'zinnia/archives/2012/week/16/entry_archive.html',
'zinnia/archives/week/16/entry_archive.html',
'zinnia/archives/2012/entry_archive.html',
'zinnia/archives/entry_archive.html',
'zinnia/entry_archive.html',
'entry_archive.html'])
instance.template_name = 'zinnia/entry_search.html'
self.assertEqual(
instance.get_template_names(),
['zinnia/entry_search.html',
'zinnia/archives/2012/04/21/entry_archive.html',
'zinnia/archives/month/04/day/21/entry_archive.html',
'zinnia/archives/2012/day/21/entry_archive.html',
'zinnia/archives/day/21/entry_archive.html',
'zinnia/archives/2012/month/04/entry_archive.html',
'zinnia/archives/month/04/entry_archive.html',
'zinnia/archives/2012/week/16/entry_archive.html',
'zinnia/archives/week/16/entry_archive.html',
'zinnia/archives/2012/entry_archive.html',
'zinnia/archives/entry_archive.html',
'zinnia/entry_archive.html',
'entry_archive.html'])
def test_entry_archive_template_response_mixin(self):
class FakeEntry(object):
detail_template = 'entry_detail.html'
slug = 'my-fake-entry'
def get_year():
return 2012
def get_week():
return 16
def get_month():
return '04'
def get_day():
return 21
instance = EntryArchiveTemplateResponseMixin()
instance.get_year = get_year
instance.get_month = get_month
instance.get_week = get_week
instance.get_day = get_day
instance.object = FakeEntry()
self.assertEqual(
instance.get_template_names(),
['zinnia/archives/2012/04/21/my-fake-entry_entry_detail.html',
'zinnia/archives/month/04/day/21/my-fake-entry_entry_detail.html',
'zinnia/archives/2012/day/21/my-fake-entry_entry_detail.html',
'zinnia/archives/day/21/my-fake-entry_entry_detail.html',
'zinnia/archives/2012/04/21/my-fake-entry.html',
'zinnia/archives/month/04/day/21/my-fake-entry.html',
'zinnia/archives/2012/day/21/my-fake-entry.html',
'zinnia/archives/day/21/my-fake-entry.html',
'zinnia/archives/2012/04/21/entry_detail.html',
'zinnia/archives/month/04/day/21/entry_detail.html',
'zinnia/archives/2012/day/21/entry_detail.html',
'zinnia/archives/day/21/entry_detail.html',
'zinnia/archives/2012/month/04/my-fake-entry_entry_detail.html',
'zinnia/archives/month/04/my-fake-entry_entry_detail.html',
'zinnia/archives/2012/month/04/my-fake-entry.html',
'zinnia/archives/month/04/my-fake-entry.html',
'zinnia/archives/2012/month/04/entry_detail.html',
'zinnia/archives/month/04/entry_detail.html',
'zinnia/archives/2012/week/16/my-fake-entry_entry_detail.html',
'zinnia/archives/week/16/my-fake-entry_entry_detail.html',
'zinnia/archives/2012/week/16/my-fake-entry.html',
'zinnia/archives/week/16/my-fake-entry.html',
'zinnia/archives/2012/week/16/entry_detail.html',
'zinnia/archives/week/16/entry_detail.html',
'zinnia/archives/2012/my-fake-entry_entry_detail.html',
'zinnia/archives/2012/my-fake-entry.html',
'zinnia/archives/2012/entry_detail.html',
'zinnia/archives/my-fake-entry_entry_detail.html',
'zinnia/my-fake-entry_entry_detail.html',
'my-fake-entry_entry_detail.html',
'zinnia/archives/my-fake-entry.html',
'zinnia/my-fake-entry.html',
'my-fake-entry.html',
'zinnia/archives/entry_detail.html',
'zinnia/entry_detail.html',
'entry_detail.html'])
instance.object.detail_template = 'custom.html'
self.assertEqual(
instance.get_template_names(),
['zinnia/archives/2012/04/21/my-fake-entry_custom.html',
'zinnia/archives/month/04/day/21/my-fake-entry_custom.html',
'zinnia/archives/2012/day/21/my-fake-entry_custom.html',
'zinnia/archives/day/21/my-fake-entry_custom.html',
'zinnia/archives/2012/04/21/my-fake-entry.html',
'zinnia/archives/month/04/day/21/my-fake-entry.html',
'zinnia/archives/2012/day/21/my-fake-entry.html',
'zinnia/archives/day/21/my-fake-entry.html',
'zinnia/archives/2012/04/21/custom.html',
'zinnia/archives/month/04/day/21/custom.html',
'zinnia/archives/2012/day/21/custom.html',
'zinnia/archives/day/21/custom.html',
'zinnia/archives/2012/month/04/my-fake-entry_custom.html',
'zinnia/archives/month/04/my-fake-entry_custom.html',
'zinnia/archives/2012/month/04/my-fake-entry.html',
'zinnia/archives/month/04/my-fake-entry.html',
'zinnia/archives/2012/month/04/custom.html',
'zinnia/archives/month/04/custom.html',
'zinnia/archives/2012/week/16/my-fake-entry_custom.html',
'zinnia/archives/week/16/my-fake-entry_custom.html',
'zinnia/archives/2012/week/16/my-fake-entry.html',
'zinnia/archives/week/16/my-fake-entry.html',
'zinnia/archives/2012/week/16/custom.html',
'zinnia/archives/week/16/custom.html',
'zinnia/archives/2012/my-fake-entry_custom.html',
'zinnia/archives/2012/my-fake-entry.html',
'zinnia/archives/2012/custom.html',
'zinnia/archives/my-fake-entry_custom.html',
'zinnia/my-fake-entry_custom.html',
'my-fake-entry_custom.html',
'zinnia/archives/my-fake-entry.html',
'zinnia/my-fake-entry.html',
'my-fake-entry.html',
'zinnia/archives/custom.html',
'zinnia/custom.html',
'custom.html'])
def test_previous_next_published_mixin(self):
site = Site.objects.get_current()
params = {'title': 'Entry 1', 'content': 'Entry 1',
'slug': 'entry-1', 'status': PUBLISHED,
'publication_date': datetime(2012, 1, 1, 12)}
entry_1 = Entry.objects.create(**params)
entry_1.sites.add(site)
params = {'title': 'Entry 2', 'content': 'Entry 2',
'slug': 'entry-2', 'status': PUBLISHED,
'publication_date': datetime(2012, 3, 15, 12)}
entry_2 = Entry.objects.create(**params)
entry_2.sites.add(site)
params = {'title': 'Entry 3', 'content': 'Entry 3',
'slug': 'entry-3', 'status': PUBLISHED,
'publication_date': datetime(2013, 6, 2, 12)}
entry_3 = Entry.objects.create(**params)
entry_3.sites.add(site)
class EntryPreviousNextPublished(PreviousNextPublishedMixin):
def get_queryset(self):
return Entry.published.all()
test_date = datetime(2009, 12, 1)
epnp = EntryPreviousNextPublished()
self.assertEqual(epnp.get_previous_year(test_date), None)
self.assertEqual(epnp.get_previous_week(test_date), None)
self.assertEqual(epnp.get_previous_month(test_date), None)
self.assertEqual(epnp.get_previous_day(test_date), None)
self.assertEqual(epnp.get_next_year(test_date), date(2012, 1, 1))
self.assertEqual(epnp.get_next_week(test_date), date(2011, 12, 26))
self.assertEqual(epnp.get_next_month(test_date), date(2012, 1, 1))
self.assertEqual(epnp.get_next_day(test_date), date(2012, 1, 1))
test_date = datetime(2012, 1, 1)
epnp = EntryPreviousNextPublished()
self.assertEqual(epnp.get_previous_year(test_date), None)
self.assertEqual(epnp.get_previous_week(test_date), None)
self.assertEqual(epnp.get_previous_month(test_date), None)
self.assertEqual(epnp.get_previous_day(test_date), None)
self.assertEqual(epnp.get_next_year(test_date), date(2013, 1, 1))
self.assertEqual(epnp.get_next_week(test_date), date(2012, 3, 12))
self.assertEqual(epnp.get_next_month(test_date), date(2012, 3, 1))
self.assertEqual(epnp.get_next_day(test_date), date(2012, 3, 15))
test_date = datetime(2012, 3, 15)
epnp = EntryPreviousNextPublished()
self.assertEqual(epnp.get_previous_year(test_date), None)
self.assertEqual(epnp.get_previous_week(test_date), date(2011, 12, 26))
self.assertEqual(epnp.get_previous_month(test_date), date(2012, 1, 1))
self.assertEqual(epnp.get_previous_day(test_date), date(2012, 1, 1))
self.assertEqual(epnp.get_next_year(test_date), date(2013, 1, 1))
self.assertEqual(epnp.get_next_week(test_date), date(2013, 5, 27))
self.assertEqual(epnp.get_next_month(test_date), date(2013, 6, 1))
self.assertEqual(epnp.get_next_day(test_date), date(2013, 6, 2))
test_date = datetime(2013, 6, 2)
epnp = EntryPreviousNextPublished()
self.assertEqual(epnp.get_previous_year(test_date), date(2012, 1, 1))
self.assertEqual(epnp.get_previous_week(test_date), date(2012, 3, 12))
self.assertEqual(epnp.get_previous_month(test_date), date(2012, 3, 1))
self.assertEqual(epnp.get_previous_day(test_date), date(2012, 3, 15))
self.assertEqual(epnp.get_next_year(test_date), None)
self.assertEqual(epnp.get_next_week(test_date), None)
self.assertEqual(epnp.get_next_month(test_date), None)
self.assertEqual(epnp.get_next_day(test_date), None)
test_date = datetime(2014, 5, 1)
epnp = EntryPreviousNextPublished()
self.assertEqual(epnp.get_previous_year(test_date), date(2013, 1, 1))
self.assertEqual(epnp.get_previous_week(test_date), date(2013, 5, 27))
self.assertEqual(epnp.get_previous_month(test_date), date(2013, 6, 1))
self.assertEqual(epnp.get_previous_day(test_date), date(2013, 6, 2))
self.assertEqual(epnp.get_next_year(test_date), None)
self.assertEqual(epnp.get_next_week(test_date), None)
self.assertEqual(epnp.get_next_month(test_date), None)
self.assertEqual(epnp.get_next_day(test_date), None)
def test_prefetch_related_mixin(self):
instance = PrefetchRelatedMixin()
self.assertRaises(ImproperlyConfigured,
instance.get_queryset)
instance.relation_names = 'string'
self.assertRaises(ImproperlyConfigured,
instance.get_queryset)
@skip_if_custom_user
def test_prefetch_categories_authors_mixin(self):
author = Author.objects.create_user(username='author',
email='[email protected]')
category = Category.objects.create(title='Category',
slug='category')
for i in range(3):
params = {'title': 'My entry',
'content': 'My content',
'slug': 'my-entry-%s' % i}
entry = Entry.objects.create(**params)
entry.authors.add(author)
entry.categories.add(category)
class View(object):
def get_queryset(self):
return Entry.objects.all()
class ViewCategoriesAuthorsPrefetched(
PrefetchCategoriesAuthorsMixin, View):
pass
with self.assertNumQueries(7):
for entry in View().get_queryset():
entry.authors.count()
entry.categories.count()
with self.assertNumQueries(3):
for entry in ViewCategoriesAuthorsPrefetched().get_queryset():
entry.authors.count()
entry.categories.count()
|
import unittest
from absl import flags
import mock
from perfkitbenchmarker import benchmark_spec
from perfkitbenchmarker import configs
from perfkitbenchmarker import context
from perfkitbenchmarker import providers
from perfkitbenchmarker import spark_service
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.configs import benchmark_config_spec
from perfkitbenchmarker.providers.aws import aws_emr
from perfkitbenchmarker.providers.gcp import gcp_dataproc
from tests import pkb_common_test_case
FLAGS = flags.FLAGS
NAME = 'name'
UID = 'name0'
SERVICE_CONFIG = """
name:
spark_service:
service_type: managed
worker_group:
vm_spec:
GCP:
machine_type: n1-standard-4
boot_disk_size: 500
zone: us-west1-a
AWS:
machine_type: m4.xlarge
zone: us-west-1
vm_count: 4
"""
PKB_MANAGED_CONFIG = """
name:
spark_service:
service_type: pkb_managed
worker_group:
vm_spec:
GCP:
machine_type: n1-standard-4
boot_disk_size: 500
AWS:
machine_type: m4.xlarge
vm_count: 2
"""
class _BenchmarkSpecTestCase(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(_BenchmarkSpecTestCase, self).setUp()
self.addCleanup(context.SetThreadBenchmarkSpec, None)
p = mock.patch(vm_util.__name__ + '.GetTempDir', return_value='/tmp/dir')
p.start()
self.addCleanup(p.stop)
def _CreateBenchmarkSpecFromYaml(self, yaml_string, benchmark_name=NAME):
config = configs.LoadConfig(yaml_string, {}, benchmark_name)
return self._CreateBenchmarkSpecFromConfigDict(config, benchmark_name)
def _CreateBenchmarkSpecFromConfigDict(self, config_dict, benchmark_name):
config_spec = benchmark_config_spec.BenchmarkConfigSpec(
benchmark_name, flag_values=FLAGS, **config_dict)
return benchmark_spec.BenchmarkSpec(mock.MagicMock(), config_spec, UID)
class ConstructSparkServiceTestCase(_BenchmarkSpecTestCase):
def testDataprocConfig(self):
spec = self._CreateBenchmarkSpecFromYaml(SERVICE_CONFIG)
spec.ConstructSparkService()
spec.ConstructVirtualMachines()
self.assertTrue(hasattr(spec, 'spark_service'))
self.assertTrue(spec.spark_service is not None)
self.assertEqual(len(spec.vms), 0)
machine_type = spec.config.spark_service.worker_group.vm_spec.machine_type
self.assertEqual(spec.config.spark_service.worker_group.vm_count, 4,
str(spec.config.spark_service.__dict__))
self.assertEqual(spec.config.spark_service.service_type,
spark_service.PROVIDER_MANAGED)
self.assertEqual(machine_type,
'n1-standard-4', str(spec.config.spark_service.__dict__))
self.assertTrue(isinstance(spec.spark_service,
gcp_dataproc.GcpDataproc))
def testEMRConfig(self):
FLAGS.cloud = providers.AWS
FLAGS.zones = 'us-west-2'
spec = self._CreateBenchmarkSpecFromYaml(SERVICE_CONFIG)
spec.ConstructSparkService()
spec.ConstructVirtualMachines()
self.assertTrue(hasattr(spec, 'spark_service'))
self.assertTrue(spec.spark_service is not None)
self.assertEqual(len(spec.vms), 0)
self.assertEqual(spec.config.spark_service.worker_group.vm_count, 4,
str(spec.config.spark_service.__dict__))
machine_type = spec.config.spark_service.worker_group.vm_spec.machine_type
self.assertEqual(spec.config.spark_service.service_type,
spark_service.PROVIDER_MANAGED)
self.assertEqual(machine_type, 'm4.xlarge',
str(spec.config.spark_service.__dict__))
self.assertTrue(isinstance(spec.spark_service, aws_emr.AwsEMR))
def testPkbManaged(self):
spec = self._CreateBenchmarkSpecFromYaml(PKB_MANAGED_CONFIG)
self.assertEqual(spec.config.spark_service.worker_group.vm_count, 2,
str(spec.config.spark_service.__dict__))
self.assertEqual(spec.config.spark_service.service_type,
spark_service.PKB_MANAGED)
spec.ConstructSparkService()
spec.ConstructVirtualMachines()
self.assertEqual(len(spec.vms), 3)
self.assertEqual(len(spec.vm_groups['master_group']), 1)
self.assertEqual(len(spec.vm_groups['worker_group']), 2)
self.assertEqual(len(spec.spark_service.vms['worker_group']), 2)
self.assertEqual(len(spec.spark_service.vms['master_group']), 1)
self.assertTrue(isinstance(spec.spark_service,
spark_service.PkbSparkService))
if __name__ == '__main__':
unittest.main()
|
from collections import namedtuple
from datetime import timedelta
import logging
from fints.client import FinTS3PinTanClient
from fints.dialog import FinTSDialogError
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_NAME, CONF_PIN, CONF_URL, CONF_USERNAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(hours=4)
ICON = "mdi:currency-eur"
BankCredentials = namedtuple("BankCredentials", "blz login pin url")
CONF_BIN = "bank_identification_number"
CONF_ACCOUNTS = "accounts"
CONF_HOLDINGS = "holdings"
CONF_ACCOUNT = "account"
ATTR_ACCOUNT = CONF_ACCOUNT
ATTR_BANK = "bank"
ATTR_ACCOUNT_TYPE = "account_type"
SCHEMA_ACCOUNTS = vol.Schema(
{
vol.Required(CONF_ACCOUNT): cv.string,
vol.Optional(CONF_NAME, default=None): vol.Any(None, cv.string),
}
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_BIN): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PIN): cv.string,
vol.Required(CONF_URL): cv.string,
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_ACCOUNTS, default=[]): cv.ensure_list(SCHEMA_ACCOUNTS),
vol.Optional(CONF_HOLDINGS, default=[]): cv.ensure_list(SCHEMA_ACCOUNTS),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the sensors.
Login to the bank and get a list of existing accounts. Create a
sensor for each account.
"""
credentials = BankCredentials(
config[CONF_BIN], config[CONF_USERNAME], config[CONF_PIN], config[CONF_URL]
)
fints_name = config.get(CONF_NAME, config[CONF_BIN])
account_config = {
acc[CONF_ACCOUNT]: acc[CONF_NAME] for acc in config[CONF_ACCOUNTS]
}
holdings_config = {
acc[CONF_ACCOUNT]: acc[CONF_NAME] for acc in config[CONF_HOLDINGS]
}
client = FinTsClient(credentials, fints_name)
balance_accounts, holdings_accounts = client.detect_accounts()
accounts = []
for account in balance_accounts:
if config[CONF_ACCOUNTS] and account.iban not in account_config:
_LOGGER.info("skipping account %s for bank %s", account.iban, fints_name)
continue
account_name = account_config.get(account.iban)
if not account_name:
account_name = f"{fints_name} - {account.iban}"
accounts.append(FinTsAccount(client, account, account_name))
_LOGGER.debug("Creating account %s for bank %s", account.iban, fints_name)
for account in holdings_accounts:
if config[CONF_HOLDINGS] and account.accountnumber not in holdings_config:
_LOGGER.info(
"skipping holdings %s for bank %s", account.accountnumber, fints_name
)
continue
account_name = holdings_config.get(account.accountnumber)
if not account_name:
account_name = f"{fints_name} - {account.accountnumber}"
accounts.append(FinTsHoldingsAccount(client, account, account_name))
_LOGGER.debug(
"Creating holdings %s for bank %s", account.accountnumber, fints_name
)
add_entities(accounts, True)
class FinTsClient:
"""Wrapper around the FinTS3PinTanClient.
Use this class as Context Manager to get the FinTS3Client object.
"""
def __init__(self, credentials: BankCredentials, name: str):
"""Initialize a FinTsClient."""
self._credentials = credentials
self.name = name
@property
def client(self):
"""Get the client object.
As the fints library is stateless, there is not benefit in caching
the client objects. If that ever changes, consider caching the client
object and also think about potential concurrency problems.
"""
return FinTS3PinTanClient(
self._credentials.blz,
self._credentials.login,
self._credentials.pin,
self._credentials.url,
)
def detect_accounts(self):
"""Identify the accounts of the bank."""
balance_accounts = []
holdings_accounts = []
for account in self.client.get_sepa_accounts():
try:
self.client.get_balance(account)
balance_accounts.append(account)
except IndexError:
# account is not a balance account.
pass
except FinTSDialogError:
# account is not a balance account.
pass
try:
self.client.get_holdings(account)
holdings_accounts.append(account)
except FinTSDialogError:
# account is not a holdings account.
pass
return balance_accounts, holdings_accounts
class FinTsAccount(Entity):
"""Sensor for a FinTS balance account.
A balance account contains an amount of money (=balance). The amount may
also be negative.
"""
def __init__(self, client: FinTsClient, account, name: str) -> None:
"""Initialize a FinTs balance account."""
self._client = client
self._account = account
self._name = name
self._balance: float = None
self._currency: str = None
def update(self) -> None:
"""Get the current balance and currency for the account."""
bank = self._client.client
balance = bank.get_balance(self._account)
self._balance = balance.amount.amount
self._currency = balance.amount.currency
_LOGGER.debug("updated balance of account %s", self.name)
@property
def name(self) -> str:
"""Friendly name of the sensor."""
return self._name
@property
def state(self) -> float:
"""Return the balance of the account as state."""
return self._balance
@property
def unit_of_measurement(self) -> str:
"""Use the currency as unit of measurement."""
return self._currency
@property
def device_state_attributes(self) -> dict:
"""Additional attributes of the sensor."""
attributes = {ATTR_ACCOUNT: self._account.iban, ATTR_ACCOUNT_TYPE: "balance"}
if self._client.name:
attributes[ATTR_BANK] = self._client.name
return attributes
@property
def icon(self) -> str:
"""Set the icon for the sensor."""
return ICON
class FinTsHoldingsAccount(Entity):
"""Sensor for a FinTS holdings account.
A holdings account does not contain money but rather some financial
instruments, e.g. stocks.
"""
def __init__(self, client: FinTsClient, account, name: str) -> None:
"""Initialize a FinTs holdings account."""
self._client = client
self._name = name
self._account = account
self._holdings = []
self._total: float = None
def update(self) -> None:
"""Get the current holdings for the account."""
bank = self._client.client
self._holdings = bank.get_holdings(self._account)
self._total = sum(h.total_value for h in self._holdings)
@property
def state(self) -> float:
"""Return total market value as state."""
return self._total
@property
def icon(self) -> str:
"""Set the icon for the sensor."""
return ICON
@property
def device_state_attributes(self) -> dict:
"""Additional attributes of the sensor.
Lists each holding of the account with the current value.
"""
attributes = {
ATTR_ACCOUNT: self._account.accountnumber,
ATTR_ACCOUNT_TYPE: "holdings",
}
if self._client.name:
attributes[ATTR_BANK] = self._client.name
for holding in self._holdings:
total_name = f"{holding.name} total"
attributes[total_name] = holding.total_value
pieces_name = f"{holding.name} pieces"
attributes[pieces_name] = holding.pieces
price_name = f"{holding.name} price"
attributes[price_name] = holding.market_value
return attributes
@property
def name(self) -> str:
"""Friendly name of the sensor."""
return self._name
@property
def unit_of_measurement(self) -> str:
"""Get the unit of measurement.
Hardcoded to EUR, as the library does not provide the currency for the
holdings. And as FinTS is only used in Germany, most accounts will be
in EUR anyways.
"""
return "EUR"
|
from wolf_smartset.models import (
HoursParameter,
ListItemParameter,
Parameter,
PercentageParameter,
Pressure,
SimpleParameter,
Temperature,
)
from homeassistant.const import (
DEVICE_CLASS_PRESSURE,
DEVICE_CLASS_TEMPERATURE,
PRESSURE_BAR,
TEMP_CELSIUS,
TIME_HOURS,
)
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import COORDINATOR, DEVICE_ID, DOMAIN, PARAMETERS, STATES
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up all entries for Wolf Platform."""
coordinator = hass.data[DOMAIN][config_entry.entry_id][COORDINATOR]
parameters = hass.data[DOMAIN][config_entry.entry_id][PARAMETERS]
device_id = hass.data[DOMAIN][config_entry.entry_id][DEVICE_ID]
entities = []
for parameter in parameters:
if isinstance(parameter, Temperature):
entities.append(WolfLinkTemperature(coordinator, parameter, device_id))
if isinstance(parameter, Pressure):
entities.append(WolfLinkPressure(coordinator, parameter, device_id))
if isinstance(parameter, PercentageParameter):
entities.append(WolfLinkPercentage(coordinator, parameter, device_id))
if isinstance(parameter, ListItemParameter):
entities.append(WolfLinkState(coordinator, parameter, device_id))
if isinstance(parameter, HoursParameter):
entities.append(WolfLinkHours(coordinator, parameter, device_id))
if isinstance(parameter, SimpleParameter):
entities.append(WolfLinkSensor(coordinator, parameter, device_id))
async_add_entities(entities, True)
class WolfLinkSensor(CoordinatorEntity):
"""Base class for all Wolf entities."""
def __init__(self, coordinator, wolf_object: Parameter, device_id):
"""Initialize."""
super().__init__(coordinator)
self.wolf_object = wolf_object
self.device_id = device_id
self._state = None
@property
def name(self):
"""Return the name."""
return f"{self.wolf_object.name}"
@property
def state(self):
"""Return the state. Wolf Client is returning only changed values so we need to store old value here."""
if self.wolf_object.value_id in self.coordinator.data:
self._state = self.coordinator.data[self.wolf_object.value_id]
return self._state
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {
"parameter_id": self.wolf_object.parameter_id,
"value_id": self.wolf_object.value_id,
"parent": self.wolf_object.parent,
}
@property
def unique_id(self):
"""Return a unique_id for this entity."""
return f"{self.device_id}:{self.wolf_object.parameter_id}"
class WolfLinkHours(WolfLinkSensor):
"""Class for hour based entities."""
@property
def icon(self):
"""Icon to display in the front Aend."""
return "mdi:clock"
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return TIME_HOURS
class WolfLinkTemperature(WolfLinkSensor):
"""Class for temperature based entities."""
@property
def device_class(self):
"""Return the device_class."""
return DEVICE_CLASS_TEMPERATURE
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return TEMP_CELSIUS
class WolfLinkPressure(WolfLinkSensor):
"""Class for pressure based entities."""
@property
def device_class(self):
"""Return the device_class."""
return DEVICE_CLASS_PRESSURE
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return PRESSURE_BAR
class WolfLinkPercentage(WolfLinkSensor):
"""Class for percentage based entities."""
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self.wolf_object.unit
class WolfLinkState(WolfLinkSensor):
"""Class for entities which has defined list of state."""
@property
def device_class(self):
"""Return the device class."""
return "wolflink__state"
@property
def state(self):
"""Return the state converting with supported values."""
state = super().state
resolved_state = [
item for item in self.wolf_object.items if item.value == int(state)
]
if resolved_state:
resolved_name = resolved_state[0].name
return STATES.get(resolved_name, resolved_name)
return state
|
import boto3
from smart_open import open
BUCKET, KEY = 'smart-open-versioned', 'demo.txt'
"""Our have a public-readable bucket with a versioned object."""
URL = 's3://%s/%s' % (BUCKET, KEY)
def assert_equal(a, b):
assert a == b, '%r != %r' % (a, b)
def main():
versions = [
v.id for v in boto3.resource('s3').Bucket(BUCKET).object_versions.filter(Prefix=KEY)
]
expected_versions = [
'KiQpZPsKI5Dm2oJZy_RzskTOtl2snjBg',
'N0GJcE3TQCKtkaS.gF.MUBZS85Gs3hzn',
]
assert_equal(versions, expected_versions)
contents = [
open(URL, transport_params={'version_id': v}).read()
for v in versions
]
expected_contents = ['second version\n', 'first version\n']
assert_equal(contents, expected_contents)
with open(URL) as fin:
most_recent_contents = fin.read()
assert_equal(most_recent_contents, expected_contents[0])
print('OK')
if __name__ == '__main__':
main()
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from diamond.collector import Collector
from elasticsearch import ElasticSearchCollector
##########################################################################
class TestElasticSearchCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('ElasticSearchCollector', {})
self.collector = ElasticSearchCollector(config, None)
def test_import(self):
self.assertTrue(ElasticSearchCollector)
def test_new__instances_default(self):
config = get_collector_config('ElasticSearchCollector', {})
self.collector = ElasticSearchCollector(config, None)
self.assertEqual(self.collector.instances, {'': ('127.0.0.1', 9200)})
def test_new__instances_single(self):
config = get_collector_config('ElasticSearchCollector', {
'instances': 'bla'})
self.collector = ElasticSearchCollector(config, None)
self.assertEqual(self.collector.instances, {'default': ('bla', 9200)})
def test_new__instances_multi(self):
config = get_collector_config('ElasticSearchCollector', {
'instances': [
'something',
'foo@1234',
'bar@bla:1234',
]})
self.collector = ElasticSearchCollector(config, None)
self.assertEqual(self.collector.instances, {
'default': ('something', 9200),
'foo': ('1234', 9200),
'bar': ('bla', 1234),
})
@patch.object(Collector, 'publish')
def test_should_work_with_real_data_and_basic_auth(self, publish_mock):
self.collector.config["user"] = "user"
self.collector.config["password"] = "password"
self.test_should_work_with_real_data()
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
returns = [
self.getFixture('stats'),
self.getFixture('cluster_stats'),
self.getFixture('indices_stats'),
]
urlopen_mock = patch('urllib2.urlopen', Mock(
side_effect=lambda *args: returns.pop(0)))
self.collector.config['cluster'] = True
urlopen_mock.start()
self.collector.collect()
urlopen_mock.stop()
# check how many fixtures were consumed
self.assertEqual(urlopen_mock.new.call_count, 3)
metrics = {
'http.current': 1,
'indices.docs.count': 11968062,
'indices.docs.deleted': 2692068,
'indices.datastore.size': 22724243633,
'indices._all.docs.count': 4,
'indices._all.docs.deleted': 0,
'indices._all.datastore.size': 2674,
'indices.test.docs.count': 4,
'indices.test.docs.deleted': 0,
'indices.test.datastore.size': 2674,
'process.cpu.percent': 58,
'process.mem.resident': 5192126464,
'process.mem.share': 11075584,
'process.mem.virtual': 7109668864,
'disk.reads.count': 55996,
'disk.reads.size': 1235387392,
'disk.writes.count': 5808198,
'disk.writes.size': 23287275520,
'thread_pool.generic.threads': 1,
'network.tcp.active_opens': 2299,
'jvm.mem.pools.CMS_Old_Gen.used': 530915016,
'cluster_health.nodes.total': 3,
'cluster_health.nodes.data': 3,
'cluster_health.shards.active_primary': 5,
'cluster_health.shards.active': 10,
'cluster_health.shards.relocating': 0,
'cluster_health.shards.unassigned': 0,
'cluster_health.shards.initializing': 0,
'cluster_health.status': 2,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_should_work_with_real_data_v2(self, publish_mock):
returns = [
self.getFixture('stats'),
self.getFixture('cluster_stats_v2'),
self.getFixture('indices_stats'),
]
urlopen_mock = patch('urllib2.urlopen', Mock(
side_effect=lambda *args: returns.pop(0)))
self.collector.config['cluster'] = True
urlopen_mock.start()
self.collector.collect()
urlopen_mock.stop()
# check how many fixtures were consumed
self.assertEqual(urlopen_mock.new.call_count, 3)
metrics = {
'http.current': 1,
'indices.docs.count': 11968062,
'indices.docs.deleted': 2692068,
'indices.datastore.size': 22724243633,
'indices._all.docs.count': 4,
'indices._all.docs.deleted': 0,
'indices._all.datastore.size': 2674,
'indices.test.docs.count': 4,
'indices.test.docs.deleted': 0,
'indices.test.datastore.size': 2674,
'process.cpu.percent': 58,
'process.mem.resident': 5192126464,
'process.mem.share': 11075584,
'process.mem.virtual': 7109668864,
'disk.reads.count': 55996,
'disk.reads.size': 1235387392,
'disk.writes.count': 5808198,
'disk.writes.size': 23287275520,
'thread_pool.generic.threads': 1,
'network.tcp.active_opens': 2299,
'jvm.mem.pools.CMS_Old_Gen.used': 530915016,
'cluster_health.nodes.pending_tasks': 266,
'cluster_health.nodes.data': 4,
'cluster_health.nodes.total': 8,
'cluster_health.shards.active_primary': 10,
'cluster_health.shards.active': 30,
'cluster_health.shards.active_percent': 100,
'cluster_health.shards.delayed_unassigned': 0,
'cluster_health.shards.relocating': 0,
'cluster_health.shards.unassigned': 0,
'cluster_health.shards.initializing': 0,
'cluster_health.status': 2,
}
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_should_work_with_real_data_logstash_mode(self, publish_mock):
returns = [
self.getFixture('stats'),
self.getFixture('logstash_indices_stats'),
]
urlopen_mock = patch('urllib2.urlopen', Mock(
side_effect=lambda *args: returns.pop(0)))
self.collector.config['logstash_mode'] = True
urlopen_mock.start()
self.collector.collect()
urlopen_mock.stop()
# check how many fixtures were consumed
self.assertEqual(urlopen_mock.new.call_count, 2)
# Omit all non-indices metrics, since those were already
# checked in previous test.
metrics = {
'indices.docs.count': 11968062,
'indices.docs.deleted': 2692068,
'indices.datastore.size': 22724243633,
'indices._all.docs.count': 35856619,
'indices._all.docs.deleted': 0,
'indices._all.datastore.size': 21903813340,
'indices._all.get.exists_time_in_millis': 0,
'indices._all.get.exists_total': 0,
'indices._all.get.missing_time_in_millis': 0,
'indices._all.get.missing_total': 0,
'indices._all.get.time_in_millis': 0,
'indices._all.get.total': 0,
'indices._all.indexing.delete_time_in_millis': 0,
'indices._all.indexing.delete_total': 0,
'indices._all.indexing.index_time_in_millis': 29251475,
'indices._all.indexing.index_total': 35189321,
'indices._all.search.fetch_time_in_millis': 6962,
'indices._all.search.fetch_total': 4084,
'indices._all.search.query_time_in_millis': 41211,
'indices._all.search.query_total': 4266,
'indices._all.store.throttle_time_in_millis': 0,
'indices.logstash-adm-syslog.indexes_in_group': 3,
'indices.logstash-adm-syslog.datastore.size': 21903813340,
'indices.logstash-adm-syslog.docs.count': 35856619,
'indices.logstash-adm-syslog.docs.deleted': 0,
'indices.logstash-adm-syslog.get.exists_time_in_millis': 0,
'indices.logstash-adm-syslog.get.exists_total': 0,
'indices.logstash-adm-syslog.get.missing_time_in_millis': 0,
'indices.logstash-adm-syslog.get.missing_total': 0,
'indices.logstash-adm-syslog.get.time_in_millis': 0,
'indices.logstash-adm-syslog.get.total': 0,
'indices.logstash-adm-syslog.indexing.delete_time_in_millis': 0,
'indices.logstash-adm-syslog.indexing.delete_total': 0,
'indices.logstash-adm-syslog.indexing.index_time_in_millis': 29251475, # NOQA
'indices.logstash-adm-syslog.indexing.index_total': 35189321,
'indices.logstash-adm-syslog.search.fetch_time_in_millis': 6962,
'indices.logstash-adm-syslog.search.fetch_total': 4084,
'indices.logstash-adm-syslog.search.query_time_in_millis': 41211,
'indices.logstash-adm-syslog.search.query_total': 4266,
'indices.logstash-adm-syslog.store.throttle_time_in_millis': 0,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_should_work_with_real_data_logstash_hourlymode(self, publish_mock):
returns = [
self.getFixture('stats'),
self.getFixture('logstash_hourly_indices_stats'),
]
urlopen_mock = patch('urllib2.urlopen', Mock(
side_effect=lambda *args: returns.pop(0)))
self.collector.config['logstash_mode'] = True
urlopen_mock.start()
self.collector.collect()
urlopen_mock.stop()
# check how many fixtures were consumed
self.assertEqual(urlopen_mock.new.call_count, 2)
# Omit all non-indices metrics, since those were already
# checked in previous test.
metrics = {
'indices.docs.count': 11968062,
'indices.docs.deleted': 2692068,
'indices.datastore.size': 22724243633,
'indices._all.docs.count': 35856619,
'indices._all.docs.deleted': 0,
'indices._all.datastore.size': 21903813340,
'indices._all.get.exists_time_in_millis': 0,
'indices._all.get.exists_total': 0,
'indices._all.get.missing_time_in_millis': 0,
'indices._all.get.missing_total': 0,
'indices._all.get.time_in_millis': 0,
'indices._all.get.total': 0,
'indices._all.indexing.delete_time_in_millis': 0,
'indices._all.indexing.delete_total': 0,
'indices._all.indexing.index_time_in_millis': 29251475,
'indices._all.indexing.index_total': 35189321,
'indices._all.search.fetch_time_in_millis': 6962,
'indices._all.search.fetch_total': 4084,
'indices._all.search.query_time_in_millis': 41211,
'indices._all.search.query_total': 4266,
'indices._all.store.throttle_time_in_millis': 0,
'indices.logstash-adm-syslog.indexes_in_group': 3,
'indices.logstash-adm-syslog.datastore.size': 21903813340,
'indices.logstash-adm-syslog.docs.count': 35856619,
'indices.logstash-adm-syslog.docs.deleted': 0,
'indices.logstash-adm-syslog.get.exists_time_in_millis': 0,
'indices.logstash-adm-syslog.get.exists_total': 0,
'indices.logstash-adm-syslog.get.missing_time_in_millis': 0,
'indices.logstash-adm-syslog.get.missing_total': 0,
'indices.logstash-adm-syslog.get.time_in_millis': 0,
'indices.logstash-adm-syslog.get.total': 0,
'indices.logstash-adm-syslog.indexing.delete_time_in_millis': 0,
'indices.logstash-adm-syslog.indexing.delete_total': 0,
'indices.logstash-adm-syslog.indexing.index_time_in_millis': 29251475, # NOQA
'indices.logstash-adm-syslog.indexing.index_total': 35189321,
'indices.logstash-adm-syslog.search.fetch_time_in_millis': 6962,
'indices.logstash-adm-syslog.search.fetch_total': 4084,
'indices.logstash-adm-syslog.search.query_time_in_millis': 41211,
'indices.logstash-adm-syslog.search.query_total': 4266,
'indices.logstash-adm-syslog.store.throttle_time_in_millis': 0,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_should_work_with_real_0_90_data(self, publish_mock):
returns = [
self.getFixture('stats0.90'),
self.getFixture('indices_stats'),
]
urlopen_mock = patch('urllib2.urlopen', Mock(
side_effect=lambda *args: returns.pop(0)))
urlopen_mock.start()
self.collector.collect()
urlopen_mock.stop()
# check how many fixtures were consumed
self.assertEqual(urlopen_mock.new.call_count, 2)
# test some 0.90 specific stats
metrics = {
'cache.filter.size': 1700,
'cache.filter.evictions': 9,
'cache.id.size': 98,
'fielddata.size': 1448,
'fielddata.evictions': 12,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_should_fail_gracefully(self, publish_mock):
urlopen_mock = patch('urllib2.urlopen', Mock(
return_value=self.getFixture('stats_blank')))
urlopen_mock.start()
self.collector.collect()
urlopen_mock.stop()
self.assertPublishedMany(publish_mock, {})
@patch.object(Collector, 'publish')
def test_multi_instances_with_real_data(self, publish_mock):
config = get_collector_config('ElasticSearchCollector', {
'instances': [
'[email protected]:9200',
'[email protected]:9200',
]})
self.collector = ElasticSearchCollector(config, None)
self.assertEqual(len(self.collector.instances), 2)
returns = [
self.getFixture('stats'),
self.getFixture('indices_stats'),
self.getFixture('stats2'),
self.getFixture('indices_stats2'),
]
urlopen_mock = patch('urllib2.urlopen', Mock(
side_effect=lambda *args: returns.pop(0)))
urlopen_mock.start()
self.collector.collect()
urlopen_mock.stop()
# check how many fixtures were consumed
self.assertEqual(urlopen_mock.new.call_count, 4)
metrics = {
'esprodata01.http.current': 1,
'esprodata02.http.current': 2,
'esprodata01.indices.docs.count': 11968062,
'esprodata02.indices.docs.count': 11968000,
'esprodata01.thread_pool.generic.threads': 1,
'esprodata02.thread_pool.generic.threads': 2,
'esprodata01.jvm.mem.pools.Par_Survivor_Space.max': 8716288,
'esprodata02.jvm.mem.pools.Par_Survivor_Space.max': 8710000,
'esprodata01.indices._all.docs.count': 4,
'esprodata02.indices._all.docs.count': 8,
}
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_should_work_with_real_1_7_data(self, publish_mock):
returns = [
self.getFixture('stats1.7'),
self.getFixture('indices_stats'),
]
urlopen_mock = patch('urllib2.urlopen', Mock(
side_effect=lambda *args: returns.pop(0)))
urlopen_mock.start()
self.collector.collect()
urlopen_mock.stop()
# check how many fixtures were consumed
self.assertEqual(urlopen_mock.new.call_count, 2)
# test some 1.7 specific stats
metrics = {
'segments.count': 7,
'segments.mem.size': 75726,
'segments.index_writer.mem.size': 0,
'segments.index_writer.mem.max_size': 469762048,
'segments.version_map.mem.size': 0,
'segments.fixed_bit_set.mem.size': 0
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
import asyncio
from aiohttp import client_exceptions
import aiohue
from aiohue.discovery import URL_NUPNP
import pytest
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components import ssdp
from homeassistant.components.hue import config_flow, const
from tests.async_mock import AsyncMock, Mock, patch
from tests.common import MockConfigEntry
@pytest.fixture(name="hue_setup", autouse=True)
def hue_setup_fixture():
"""Mock hue entry setup."""
with patch("homeassistant.components.hue.async_setup_entry", return_value=True):
yield
def get_mock_bridge(
bridge_id="aabbccddeeff", host="1.2.3.4", mock_create_user=None, username=None
):
"""Return a mock bridge."""
mock_bridge = Mock()
mock_bridge.host = host
mock_bridge.username = username
mock_bridge.config.name = "Mock Bridge"
mock_bridge.id = bridge_id
if not mock_create_user:
async def create_user(username):
mock_bridge.username = username
mock_create_user = create_user
mock_bridge.create_user = mock_create_user
mock_bridge.initialize = AsyncMock()
return mock_bridge
async def test_flow_works(hass):
"""Test config flow ."""
mock_bridge = get_mock_bridge()
with patch(
"homeassistant.components.hue.config_flow.discover_nupnp",
return_value=[mock_bridge],
):
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": "user"}
)
assert result["type"] == "form"
assert result["step_id"] == "init"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={"id": mock_bridge.id}
)
assert result["type"] == "form"
assert result["step_id"] == "link"
flow = next(
flow
for flow in hass.config_entries.flow.async_progress()
if flow["flow_id"] == result["flow_id"]
)
assert flow["context"]["unique_id"] == "aabbccddeeff"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] == "create_entry"
assert result["title"] == "Mock Bridge"
assert result["data"] == {
"host": "1.2.3.4",
"username": "home-assistant#test-home",
}
assert len(mock_bridge.initialize.mock_calls) == 1
async def test_manual_flow_works(hass, aioclient_mock):
"""Test config flow discovers only already configured bridges."""
mock_bridge = get_mock_bridge()
MockConfigEntry(
domain="hue", source=config_entries.SOURCE_IGNORE, unique_id="bla"
).add_to_hass(hass)
with patch(
"homeassistant.components.hue.config_flow.discover_nupnp",
return_value=[mock_bridge],
):
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": "user"}
)
assert result["type"] == "form"
assert result["step_id"] == "init"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={"id": "manual"}
)
assert result["type"] == "form"
assert result["step_id"] == "manual"
bridge = get_mock_bridge(
bridge_id="id-1234", host="2.2.2.2", username="username-abc"
)
with patch(
"aiohue.Bridge",
return_value=bridge,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"host": "2.2.2.2"}
)
assert result["type"] == "form"
assert result["step_id"] == "link"
with patch("homeassistant.components.hue.config_flow.authenticate_bridge"), patch(
"homeassistant.components.hue.async_unload_entry", return_value=True
):
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == "create_entry"
assert result["title"] == "Mock Bridge"
assert result["data"] == {
"host": "2.2.2.2",
"username": "username-abc",
}
entries = hass.config_entries.async_entries("hue")
assert len(entries) == 2
entry = entries[-1]
assert entry.unique_id == "id-1234"
async def test_manual_flow_bridge_exist(hass, aioclient_mock):
"""Test config flow discovers only already configured bridges."""
MockConfigEntry(
domain="hue", unique_id="id-1234", data={"host": "2.2.2.2"}
).add_to_hass(hass)
with patch(
"homeassistant.components.hue.config_flow.discover_nupnp",
return_value=[],
):
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": "user"}
)
assert result["type"] == "form"
assert result["step_id"] == "manual"
bridge = get_mock_bridge(
bridge_id="id-1234", host="2.2.2.2", username="username-abc"
)
with patch(
"aiohue.Bridge",
return_value=bridge,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"host": "2.2.2.2"}
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
async def test_manual_flow_no_discovered_bridges(hass, aioclient_mock):
"""Test config flow discovers no bridges."""
aioclient_mock.get(URL_NUPNP, json=[])
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": "user"}
)
assert result["type"] == "form"
assert result["step_id"] == "manual"
async def test_flow_all_discovered_bridges_exist(hass, aioclient_mock):
"""Test config flow discovers only already configured bridges."""
aioclient_mock.get(URL_NUPNP, json=[{"internalipaddress": "1.2.3.4", "id": "bla"}])
MockConfigEntry(
domain="hue", unique_id="bla", data={"host": "1.2.3.4"}
).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": "user"}
)
assert result["type"] == "form"
assert result["step_id"] == "manual"
async def test_flow_bridges_discovered(hass, aioclient_mock):
"""Test config flow discovers two bridges."""
# Add ignored config entry. Should still show up as option.
MockConfigEntry(
domain="hue", source=config_entries.SOURCE_IGNORE, unique_id="bla"
).add_to_hass(hass)
aioclient_mock.get(
URL_NUPNP,
json=[
{"internalipaddress": "1.2.3.4", "id": "bla"},
{"internalipaddress": "5.6.7.8", "id": "beer"},
],
)
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": "user"}
)
assert result["type"] == "form"
assert result["step_id"] == "init"
with pytest.raises(vol.Invalid):
assert result["data_schema"]({"id": "not-discovered"})
result["data_schema"]({"id": "bla"})
result["data_schema"]({"id": "beer"})
result["data_schema"]({"id": "manual"})
async def test_flow_two_bridges_discovered_one_new(hass, aioclient_mock):
"""Test config flow discovers two bridges."""
aioclient_mock.get(
URL_NUPNP,
json=[
{"internalipaddress": "1.2.3.4", "id": "bla"},
{"internalipaddress": "5.6.7.8", "id": "beer"},
],
)
MockConfigEntry(
domain="hue", unique_id="bla", data={"host": "1.2.3.4"}
).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": "user"}
)
assert result["type"] == "form"
assert result["step_id"] == "init"
assert result["data_schema"]({"id": "beer"})
assert result["data_schema"]({"id": "manual"})
with pytest.raises(vol.error.MultipleInvalid):
assert not result["data_schema"]({"id": "bla"})
async def test_flow_timeout_discovery(hass):
"""Test config flow ."""
with patch(
"homeassistant.components.hue.config_flow.discover_nupnp",
side_effect=asyncio.TimeoutError,
):
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": "user"}
)
assert result["type"] == "abort"
assert result["reason"] == "discover_timeout"
async def test_flow_link_timeout(hass):
"""Test config flow."""
mock_bridge = get_mock_bridge(
mock_create_user=AsyncMock(side_effect=asyncio.TimeoutError),
)
with patch(
"homeassistant.components.hue.config_flow.discover_nupnp",
return_value=[mock_bridge],
):
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": "user"}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={"id": mock_bridge.id}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] == "abort"
assert result["reason"] == "cannot_connect"
async def test_flow_link_unknown_error(hass):
"""Test if a unknown error happened during the linking processes."""
mock_bridge = get_mock_bridge(
mock_create_user=AsyncMock(side_effect=OSError),
)
with patch(
"homeassistant.components.hue.config_flow.discover_nupnp",
return_value=[mock_bridge],
):
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": "user"}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={"id": mock_bridge.id}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] == "form"
assert result["step_id"] == "link"
assert result["errors"] == {"base": "linking"}
async def test_flow_link_button_not_pressed(hass):
"""Test config flow ."""
mock_bridge = get_mock_bridge(
mock_create_user=AsyncMock(side_effect=aiohue.LinkButtonNotPressed),
)
with patch(
"homeassistant.components.hue.config_flow.discover_nupnp",
return_value=[mock_bridge],
):
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": "user"}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={"id": mock_bridge.id}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] == "form"
assert result["step_id"] == "link"
assert result["errors"] == {"base": "register_failed"}
async def test_flow_link_unknown_host(hass):
"""Test config flow ."""
mock_bridge = get_mock_bridge(
mock_create_user=AsyncMock(side_effect=client_exceptions.ClientOSError),
)
with patch(
"homeassistant.components.hue.config_flow.discover_nupnp",
return_value=[mock_bridge],
):
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": "user"}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={"id": mock_bridge.id}
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] == "abort"
assert result["reason"] == "cannot_connect"
async def test_bridge_ssdp(hass):
"""Test a bridge being discovered."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN,
context={"source": "ssdp"},
data={
ssdp.ATTR_SSDP_LOCATION: "http://0.0.0.0/",
ssdp.ATTR_UPNP_MANUFACTURER_URL: config_flow.HUE_MANUFACTURERURL,
ssdp.ATTR_UPNP_SERIAL: "1234",
},
)
assert result["type"] == "form"
assert result["step_id"] == "link"
async def test_bridge_ssdp_discover_other_bridge(hass):
"""Test that discovery ignores other bridges."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN,
context={"source": "ssdp"},
data={ssdp.ATTR_UPNP_MANUFACTURER_URL: "http://www.notphilips.com"},
)
assert result["type"] == "abort"
assert result["reason"] == "not_hue_bridge"
async def test_bridge_ssdp_emulated_hue(hass):
"""Test if discovery info is from an emulated hue instance."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN,
context={"source": "ssdp"},
data={
ssdp.ATTR_SSDP_LOCATION: "http://0.0.0.0/",
ssdp.ATTR_UPNP_FRIENDLY_NAME: "Home Assistant Bridge",
ssdp.ATTR_UPNP_MANUFACTURER_URL: config_flow.HUE_MANUFACTURERURL,
ssdp.ATTR_UPNP_SERIAL: "1234",
},
)
assert result["type"] == "abort"
assert result["reason"] == "not_hue_bridge"
async def test_bridge_ssdp_missing_location(hass):
"""Test if discovery info is missing a location attribute."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN,
context={"source": "ssdp"},
data={
ssdp.ATTR_UPNP_MANUFACTURER_URL: config_flow.HUE_MANUFACTURERURL,
ssdp.ATTR_UPNP_SERIAL: "1234",
},
)
assert result["type"] == "abort"
assert result["reason"] == "not_hue_bridge"
async def test_bridge_ssdp_missing_serial(hass):
"""Test if discovery info is a serial attribute."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN,
context={"source": "ssdp"},
data={
ssdp.ATTR_SSDP_LOCATION: "http://0.0.0.0/",
ssdp.ATTR_UPNP_MANUFACTURER_URL: config_flow.HUE_MANUFACTURERURL,
},
)
assert result["type"] == "abort"
assert result["reason"] == "not_hue_bridge"
async def test_bridge_ssdp_espalexa(hass):
"""Test if discovery info is from an Espalexa based device."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN,
context={"source": "ssdp"},
data={
ssdp.ATTR_SSDP_LOCATION: "http://0.0.0.0/",
ssdp.ATTR_UPNP_FRIENDLY_NAME: "Espalexa (0.0.0.0)",
ssdp.ATTR_UPNP_MANUFACTURER_URL: config_flow.HUE_MANUFACTURERURL,
ssdp.ATTR_UPNP_SERIAL: "1234",
},
)
assert result["type"] == "abort"
assert result["reason"] == "not_hue_bridge"
async def test_bridge_ssdp_already_configured(hass):
"""Test if a discovered bridge has already been configured."""
MockConfigEntry(
domain="hue", unique_id="1234", data={"host": "0.0.0.0"}
).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
const.DOMAIN,
context={"source": "ssdp"},
data={
ssdp.ATTR_SSDP_LOCATION: "http://0.0.0.0/",
ssdp.ATTR_UPNP_MANUFACTURER_URL: config_flow.HUE_MANUFACTURERURL,
ssdp.ATTR_UPNP_SERIAL: "1234",
},
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
async def test_import_with_no_config(hass):
"""Test importing a host without an existing config file."""
result = await hass.config_entries.flow.async_init(
const.DOMAIN,
context={"source": "import"},
data={"host": "0.0.0.0"},
)
assert result["type"] == "form"
assert result["step_id"] == "link"
async def test_creating_entry_removes_entries_for_same_host_or_bridge(hass):
"""Test that we clean up entries for same host and bridge.
An IP can only hold a single bridge and a single bridge can only be
accessible via a single IP. So when we create a new entry, we'll remove
all existing entries that either have same IP or same bridge_id.
"""
orig_entry = MockConfigEntry(
domain="hue",
data={"host": "0.0.0.0", "username": "aaaa"},
unique_id="id-1234",
)
orig_entry.add_to_hass(hass)
MockConfigEntry(
domain="hue",
data={"host": "1.2.3.4", "username": "bbbb"},
unique_id="id-5678",
).add_to_hass(hass)
assert len(hass.config_entries.async_entries("hue")) == 2
bridge = get_mock_bridge(
bridge_id="id-1234", host="2.2.2.2", username="username-abc"
)
with patch(
"aiohue.Bridge",
return_value=bridge,
):
result = await hass.config_entries.flow.async_init(
"hue", data={"host": "2.2.2.2"}, context={"source": "import"}
)
assert result["type"] == "form"
assert result["step_id"] == "link"
with patch("homeassistant.components.hue.config_flow.authenticate_bridge"), patch(
"homeassistant.components.hue.async_unload_entry", return_value=True
):
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == "create_entry"
assert result["title"] == "Mock Bridge"
assert result["data"] == {
"host": "2.2.2.2",
"username": "username-abc",
}
entries = hass.config_entries.async_entries("hue")
assert len(entries) == 2
new_entry = entries[-1]
assert orig_entry.entry_id != new_entry.entry_id
assert new_entry.unique_id == "id-1234"
async def test_bridge_homekit(hass, aioclient_mock):
"""Test a bridge being discovered via HomeKit."""
aioclient_mock.get(URL_NUPNP, json=[{"internalipaddress": "1.2.3.4", "id": "bla"}])
result = await hass.config_entries.flow.async_init(
const.DOMAIN,
context={"source": "homekit"},
data={
"host": "0.0.0.0",
"serial": "1234",
"manufacturerURL": config_flow.HUE_MANUFACTURERURL,
"properties": {"id": "aa:bb:cc:dd:ee:ff"},
},
)
assert result["type"] == "form"
assert result["step_id"] == "init"
async def test_bridge_import_already_configured(hass):
"""Test if a import flow aborts if host is already configured."""
MockConfigEntry(
domain="hue", unique_id="aabbccddeeff", data={"host": "0.0.0.0"}
).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
const.DOMAIN,
context={"source": "import"},
data={"host": "0.0.0.0", "properties": {"id": "aa:bb:cc:dd:ee:ff"}},
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
async def test_bridge_homekit_already_configured(hass):
"""Test if a HomeKit discovered bridge has already been configured."""
MockConfigEntry(
domain="hue", unique_id="aabbccddeeff", data={"host": "0.0.0.0"}
).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
const.DOMAIN,
context={"source": "homekit"},
data={"host": "0.0.0.0", "properties": {"id": "aa:bb:cc:dd:ee:ff"}},
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
async def test_ssdp_discovery_update_configuration(hass):
"""Test if a discovered bridge is configured and updated with new host."""
entry = MockConfigEntry(
domain="hue", unique_id="aabbccddeeff", data={"host": "0.0.0.0"}
)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
const.DOMAIN,
context={"source": "ssdp"},
data={
ssdp.ATTR_SSDP_LOCATION: "http://1.1.1.1/",
ssdp.ATTR_UPNP_MANUFACTURER_URL: config_flow.HUE_MANUFACTURERURL,
ssdp.ATTR_UPNP_SERIAL: "aabbccddeeff",
},
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
assert entry.data["host"] == "1.1.1.1"
async def test_options_flow(hass):
"""Test options config flow."""
entry = MockConfigEntry(
domain="hue",
unique_id="aabbccddeeff",
data={"host": "0.0.0.0"},
)
entry.add_to_hass(hass)
result = await hass.config_entries.options.async_init(entry.entry_id)
assert result["type"] == "form"
assert result["step_id"] == "init"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={
const.CONF_ALLOW_HUE_GROUPS: True,
const.CONF_ALLOW_UNREACHABLE: True,
},
)
assert result["type"] == "create_entry"
assert result["data"] == {
const.CONF_ALLOW_HUE_GROUPS: True,
const.CONF_ALLOW_UNREACHABLE: True,
}
|
from functools import reduce
from glob import glob
import os
import os.path as op
from shutil import copyfile, copytree
import pytest
import numpy as np
from numpy.testing import (assert_array_almost_equal, assert_allclose,
assert_array_equal)
import mne
from mne.datasets import testing
from mne.transforms import (Transform, apply_trans, rotation, translation,
scaling)
from mne.coreg import (fit_matched_points, create_default_subject, scale_mri,
_is_mri_subject, scale_labels, scale_source_space,
coregister_fiducials, get_mni_fiducials)
from mne.io import read_fiducials
from mne.io.constants import FIFF
from mne.utils import (run_tests_if_main, requires_nibabel, modified_env,
check_version)
from mne.source_space import write_source_spaces
data_path = testing.data_path(download=False)
@pytest.fixture
def few_surfaces():
"""Set the _MNE_FEW_SURFACES env var."""
with modified_env(_MNE_FEW_SURFACES='true'):
yield
def test_coregister_fiducials():
"""Test coreg.coregister_fiducials()."""
# prepare head and MRI fiducials
trans = Transform('head', 'mri',
rotation(.4, .1, 0).dot(translation(.1, -.1, .1)))
coords_orig = np.array([[-0.08061612, -0.02908875, -0.04131077],
[0.00146763, 0.08506715, -0.03483611],
[0.08436285, -0.02850276, -0.04127743]])
coords_trans = apply_trans(trans, coords_orig)
def make_dig(coords, cf):
return ({'coord_frame': cf, 'ident': 1, 'kind': 1, 'r': coords[0]},
{'coord_frame': cf, 'ident': 2, 'kind': 1, 'r': coords[1]},
{'coord_frame': cf, 'ident': 3, 'kind': 1, 'r': coords[2]})
mri_fiducials = make_dig(coords_trans, FIFF.FIFFV_COORD_MRI)
info = {'dig': make_dig(coords_orig, FIFF.FIFFV_COORD_HEAD)}
# test coregister_fiducials()
trans_est = coregister_fiducials(info, mri_fiducials)
assert trans_est.from_str == trans.from_str
assert trans_est.to_str == trans.to_str
assert_array_almost_equal(trans_est['trans'], trans['trans'])
@pytest.mark.slowtest # can take forever on OSX Travis
@testing.requires_testing_data
@pytest.mark.parametrize('scale', (.9, [1, .2, .8]))
def test_scale_mri(tmpdir, few_surfaces, scale):
"""Test creating fsaverage and scaling it."""
# create fsaverage using the testing "fsaverage" instead of the FreeSurfer
# one
tempdir = str(tmpdir)
fake_home = testing.data_path()
create_default_subject(subjects_dir=tempdir, fs_home=fake_home,
verbose=True)
assert _is_mri_subject('fsaverage', tempdir), "Creating fsaverage failed"
fid_path = op.join(tempdir, 'fsaverage', 'bem', 'fsaverage-fiducials.fif')
os.remove(fid_path)
create_default_subject(update=True, subjects_dir=tempdir,
fs_home=fake_home)
assert op.exists(fid_path), "Updating fsaverage"
# copy MRI file from sample data (shouldn't matter that it's incorrect,
# so here choose a small one)
path_from = op.join(testing.data_path(), 'subjects', 'sample', 'mri',
'T1.mgz')
path_to = op.join(tempdir, 'fsaverage', 'mri', 'orig.mgz')
copyfile(path_from, path_to)
# remove redundant label files
label_temp = op.join(tempdir, 'fsaverage', 'label', '*.label')
label_paths = glob(label_temp)
for label_path in label_paths[1:]:
os.remove(label_path)
# create source space
print('Creating surface source space')
path = op.join(tempdir, 'fsaverage', 'bem', 'fsaverage-%s-src.fif')
src = mne.setup_source_space('fsaverage', 'ico0', subjects_dir=tempdir,
add_dist=False)
mri = op.join(tempdir, 'fsaverage', 'mri', 'orig.mgz')
print('Creating volume source space')
vsrc = mne.setup_volume_source_space(
'fsaverage', pos=50, mri=mri, subjects_dir=tempdir,
add_interpolator=False)
write_source_spaces(path % 'vol-50', vsrc)
# scale fsaverage
write_source_spaces(path % 'ico-0', src, overwrite=True)
with pytest.warns(None): # sometimes missing nibabel
scale_mri('fsaverage', 'flachkopf', scale, True,
subjects_dir=tempdir, verbose='debug')
assert _is_mri_subject('flachkopf', tempdir), "Scaling failed"
spath = op.join(tempdir, 'flachkopf', 'bem', 'flachkopf-%s-src.fif')
assert op.exists(spath % 'ico-0'), "Source space ico-0 was not scaled"
assert os.path.isfile(os.path.join(tempdir, 'flachkopf', 'surf',
'lh.sphere.reg'))
vsrc_s = mne.read_source_spaces(spath % 'vol-50')
for vox in ([0, 0, 0], [1, 0, 0], [0, 1, 0], [0, 0, 1], [1, 2, 3]):
idx = np.ravel_multi_index(vox, vsrc[0]['shape'], order='F')
err_msg = f'idx={idx} @ {vox}, scale={scale}'
assert_allclose(apply_trans(vsrc[0]['src_mri_t'], vox),
vsrc[0]['rr'][idx], err_msg=err_msg)
assert_allclose(apply_trans(vsrc_s[0]['src_mri_t'], vox),
vsrc_s[0]['rr'][idx], err_msg=err_msg)
scale_labels('flachkopf', subjects_dir=tempdir)
# add distances to source space after hacking the properties to make
# it run *much* faster
src_dist = src.copy()
for s in src_dist:
s.update(rr=s['rr'][s['vertno']], nn=s['nn'][s['vertno']],
tris=s['use_tris'])
s.update(np=len(s['rr']), ntri=len(s['tris']),
vertno=np.arange(len(s['rr'])),
inuse=np.ones(len(s['rr']), int))
mne.add_source_space_distances(src_dist)
write_source_spaces(path % 'ico-0', src_dist, overwrite=True)
# scale with distances
os.remove(spath % 'ico-0')
scale_source_space('flachkopf', 'ico-0', subjects_dir=tempdir)
ssrc = mne.read_source_spaces(spath % 'ico-0')
assert ssrc[0]['dist'] is not None
assert ssrc[0]['nearest'] is not None
# check patch info computation (only if SciPy is new enough to be fast)
if check_version('scipy', '1.3'):
for s in src_dist:
for key in ('dist', 'dist_limit'):
s[key] = None
write_source_spaces(path % 'ico-0', src_dist, overwrite=True)
# scale with distances
os.remove(spath % 'ico-0')
scale_source_space('flachkopf', 'ico-0', subjects_dir=tempdir)
ssrc = mne.read_source_spaces(spath % 'ico-0')
assert ssrc[0]['dist'] is None
assert ssrc[0]['nearest'] is not None
@pytest.mark.slowtest # can take forever on OSX Travis
@testing.requires_testing_data
@requires_nibabel()
def test_scale_mri_xfm(tmpdir, few_surfaces):
"""Test scale_mri transforms and MRI scaling."""
# scale fsaverage
tempdir = str(tmpdir)
fake_home = testing.data_path()
# add fsaverage
create_default_subject(subjects_dir=tempdir, fs_home=fake_home,
verbose=True)
# add sample (with few files)
sample_dir = op.join(tempdir, 'sample')
os.mkdir(sample_dir)
os.mkdir(op.join(sample_dir, 'bem'))
for dirname in ('mri', 'surf'):
copytree(op.join(fake_home, 'subjects', 'sample', dirname),
op.join(sample_dir, dirname))
subject_to = 'flachkopf'
spacing = 'oct2'
for subject_from in ('fsaverage', 'sample'):
if subject_from == 'fsaverage':
scale = 1. # single dim
else:
scale = [0.9, 2, .8] # separate
src_from_fname = op.join(tempdir, subject_from, 'bem',
'%s-%s-src.fif' % (subject_from, spacing))
src_from = mne.setup_source_space(
subject_from, spacing, subjects_dir=tempdir, add_dist=False)
write_source_spaces(src_from_fname, src_from)
vertices_from = np.concatenate([s['vertno'] for s in src_from])
assert len(vertices_from) == 36
hemis = ([0] * len(src_from[0]['vertno']) +
[1] * len(src_from[0]['vertno']))
mni_from = mne.vertex_to_mni(vertices_from, hemis, subject_from,
subjects_dir=tempdir)
if subject_from == 'fsaverage': # identity transform
source_rr = np.concatenate([s['rr'][s['vertno']]
for s in src_from]) * 1e3
assert_allclose(mni_from, source_rr)
if subject_from == 'fsaverage':
overwrite = skip_fiducials = False
else:
with pytest.raises(IOError, match='No fiducials file'):
scale_mri(subject_from, subject_to, scale,
subjects_dir=tempdir)
skip_fiducials = True
with pytest.raises(IOError, match='already exists'):
scale_mri(subject_from, subject_to, scale,
subjects_dir=tempdir, skip_fiducials=skip_fiducials)
overwrite = True
if subject_from == 'sample': # support for not needing all surf files
os.remove(op.join(sample_dir, 'surf', 'lh.curv'))
scale_mri(subject_from, subject_to, scale, subjects_dir=tempdir,
verbose='debug', overwrite=overwrite,
skip_fiducials=skip_fiducials)
if subject_from == 'fsaverage':
assert _is_mri_subject(subject_to, tempdir), "Scaling failed"
src_to_fname = op.join(tempdir, subject_to, 'bem',
'%s-%s-src.fif' % (subject_to, spacing))
assert op.exists(src_to_fname), "Source space was not scaled"
# Check MRI scaling
fname_mri = op.join(tempdir, subject_to, 'mri', 'T1.mgz')
assert op.exists(fname_mri), "MRI was not scaled"
# Check MNI transform
src = mne.read_source_spaces(src_to_fname)
vertices = np.concatenate([s['vertno'] for s in src])
assert_array_equal(vertices, vertices_from)
mni = mne.vertex_to_mni(vertices, hemis, subject_to,
subjects_dir=tempdir)
assert_allclose(mni, mni_from, atol=1e-3) # 0.001 mm
def test_fit_matched_points():
"""Test fit_matched_points: fitting two matching sets of points."""
tgt_pts = np.random.RandomState(42).uniform(size=(6, 3))
# rotation only
trans = rotation(2, 6, 3)
src_pts = apply_trans(trans, tgt_pts)
trans_est = fit_matched_points(src_pts, tgt_pts, translate=False,
out='trans')
est_pts = apply_trans(trans_est, src_pts)
assert_array_almost_equal(tgt_pts, est_pts, 2, "fit_matched_points with "
"rotation")
# rotation & translation
trans = np.dot(translation(2, -6, 3), rotation(2, 6, 3))
src_pts = apply_trans(trans, tgt_pts)
trans_est = fit_matched_points(src_pts, tgt_pts, out='trans')
est_pts = apply_trans(trans_est, src_pts)
assert_array_almost_equal(tgt_pts, est_pts, 2, "fit_matched_points with "
"rotation and translation.")
# rotation & translation & scaling
trans = reduce(np.dot, (translation(2, -6, 3), rotation(1.5, .3, 1.4),
scaling(.5, .5, .5)))
src_pts = apply_trans(trans, tgt_pts)
trans_est = fit_matched_points(src_pts, tgt_pts, scale=1, out='trans')
est_pts = apply_trans(trans_est, src_pts)
assert_array_almost_equal(tgt_pts, est_pts, 2, "fit_matched_points with "
"rotation, translation and scaling.")
# test exceeding tolerance
tgt_pts[0, :] += 20
pytest.raises(RuntimeError, fit_matched_points, tgt_pts, src_pts, tol=10)
@testing.requires_testing_data
@requires_nibabel()
def test_get_mni_fiducials():
"""Test get_mni_fiducials."""
subjects_dir = op.join(data_path, 'subjects')
fid_fname = op.join(subjects_dir, 'sample', 'bem',
'sample-fiducials.fif')
fids, coord_frame = read_fiducials(fid_fname)
assert coord_frame == FIFF.FIFFV_COORD_MRI
assert [f['ident'] for f in fids] == list(range(1, 4))
fids = np.array([f['r'] for f in fids])
fids_est = get_mni_fiducials('sample', subjects_dir)
fids_est = np.array([f['r'] for f in fids_est])
dists = np.linalg.norm(fids - fids_est, axis=-1) * 1000. # -> mm
assert (dists < 8).all(), dists
run_tests_if_main()
|
import posixpath
import re
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker import hpc_util
from perfkitbenchmarker import regex_util
from perfkitbenchmarker import sample
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.linux_packages import cuda_toolkit
from perfkitbenchmarker.linux_packages import nvidia_driver
FLAGS = flags.FLAGS
BENCHMARK_NAME = 'robertammlm'
BENCHMARK_CONFIG = """
robertammlm:
description: Runs FairSeq Roberta Masked Multilingual LM benchmark'
vm_groups:
default:
os_type: ubuntu1604
disk_spec: *default_500_gb
vm_count: 2
vm_spec:
GCP:
machine_type: n1-highmem-96
zone: us-west1-b
image_family: tf-latest-gpu-gvnic
image_project: deeplearning-platform-release
boot_disk_size: 105
boot_disk_type: pd-ssd
gpu_type: v100
gpu_count: 1
AWS:
machine_type: p3dn.24xlarge
boot_disk_size: 105
zone: us-east-1a
image: ami-0a4a0d42e3b855a2c
Azure:
machine_type: Standard_ND40s_v2
zone: eastus
boot_disk_size: 105
"""
NVPROF = 'nvprof'
TFPROF = 'tfprof'
NONE = 'none'
DATA_PATH = '/tmp/data'
HOSTFILE = 'HOSTFILE'
# Facebook AI Research Sequence-to-Sequence Toolkit written in Python
FAIRSEQ_GIT = 'https://github.com/taylanbil/fairseq.git '
FAIRSEQ_BRANCH = 'synth-data-roberta'
# The raw WikiText103 dataset
WIKI_TEXT = 'https://s3.amazonaws.com/research.metamind.io/wikitext/wikitext-103-raw-v1.zip'
ENCODER_JSON = 'https://dl.fbaipublicfiles.com/fairseq/gpt2_bpe/encoder.json'
VOCAB_BPE = 'https://dl.fbaipublicfiles.com/fairseq/gpt2_bpe/vocab.bpe'
FAIRSEQ_DICT = 'https://dl.fbaipublicfiles.com/fairseq/gpt2_bpe/dict.txt'
WORD_COUNT = 249997
METADATA_COLUMNS = ('epoch', 'step', 'steps per epoch', 'loss', 'nll_loss',
'ppl', 'wps', 'ups', 'wpb', 'bsz', 'num_updates', 'lr',
'gnorm', 'clip', 'oom', 'loss_scale', 'wall', 'train_wall')
flags.DEFINE_integer('robertammlm_max_sentences', 2, 'max sentences')
flags.DEFINE_integer('robertammlm_log_interval', 10, 'log interval')
flags.DEFINE_integer('robertammlm_nproc_per_node', 8, 'nproc per node')
flags.DEFINE_integer('robertammlm_update_freq', None, 'update frequence')
flags.DEFINE_integer('robertammlm_num_copies', None,
'num of training data copies.')
flags.DEFINE_integer('robertammlm_global_batch_size', 8192, 'global batch size')
flags.DEFINE_integer('robertammlm_max_epoch', 1, 'max number of epoch')
flags.DEFINE_enum('robertammlm_profiler', None, [NVPROF, TFPROF],
'profiler used to analysis GPU training')
def GetConfig(user_config):
"""Load and return benchmark config.
Args:
user_config: user supplied configuration (flags and config file)
Returns:
loaded benchmark configuration
"""
config = configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
return config
def _UpdateBenchmarkSpecWithFlags(benchmark_spec):
"""Update the benchmark_spec with supplied command line flags.
Args:
benchmark_spec: benchmark specification to update
"""
benchmark_spec.max_sentences = FLAGS.robertammlm_max_sentences
benchmark_spec.nproc_per_node = FLAGS.robertammlm_nproc_per_node
benchmark_spec.log_interval = FLAGS.robertammlm_log_interval
benchmark_spec.profiler = FLAGS.robertammlm_profiler
benchmark_spec.max_epoch = FLAGS.robertammlm_max_epoch
vms = benchmark_spec.vms
vm = vms[0]
num_vms = len(vms)
benchmark_spec.num_vms = num_vms
benchmark_spec.global_batch_size = FLAGS.robertammlm_global_batch_size
num_accelerators = nvidia_driver.QueryNumberOfGpus(vm) * num_vms
benchmark_spec.num_accelerators = num_accelerators
if FLAGS.robertammlm_update_freq:
benchmark_spec.update_freq = FLAGS.robertammlm_update_freq
else:
benchmark_spec.update_freq = (benchmark_spec.global_batch_size // (
benchmark_spec.max_sentences * num_accelerators))
if FLAGS.robertammlm_num_copies:
benchmark_spec.num_copies = FLAGS.robertammlm_num_copies
else:
benchmark_spec.num_copies = max(1, num_accelerators // 32)
def _DownloadData(benchmark_spec, rank):
"""Downloads train valid and test on the target vm.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
rank: integer, the node rank in distributed training.
"""
vm = benchmark_spec.vms[rank]
vm.InstallPackages('python3-pip')
vm.Install('wget')
vm.RemoteCommand('[ -d $HOME/fairseq ] || git clone {git} -b {branch}'
.format(git=FAIRSEQ_GIT, branch=FAIRSEQ_BRANCH))
vm.RemoteCommand(f'{FLAGS.torch_env} python3 -m pip install pyarrow')
vm.RemoteCommand(f'cd fairseq && {FLAGS.torch_env} python3 -m pip install '
'--editable .')
vm.RemoteCommand('mkdir -p {}'.format(DATA_PATH))
text_zip = posixpath.join(DATA_PATH, posixpath.basename(WIKI_TEXT))
vm.RemoteCommand('wget -O {des} {src}'.format(des=text_zip, src=WIKI_TEXT))
vm.RemoteCommand('unzip {text_zip} -d {data_path}'
.format(data_path=DATA_PATH, text_zip=text_zip))
bpe_dir = posixpath.join(DATA_PATH, 'gpt2_bpe')
vm.RemoteCommand('mkdir -p {}'.format(bpe_dir))
vm.RemoteCommand('wget -O {des}/encoder.json {src}'
.format(des=bpe_dir, src=ENCODER_JSON))
vm.RemoteCommand('wget -O {des}/vocab.bpe {src}'
.format(des=bpe_dir, src=VOCAB_BPE))
for phase in ('train', 'valid', 'test'):
vm.RemoteCommand(
f'cd {DATA_PATH} && {FLAGS.torch_env} python3 -m '
'examples.roberta.multiprocessing_bpe_encoder '
'--encoder-json gpt2_bpe/encoder.json '
'--vocab-bpe gpt2_bpe/vocab.bpe '
f'--inputs wikitext-103-raw/wiki.{phase}.raw '
f'--outputs wikitext-103-raw/wiki.{phase}.bpe '
'--keep-empty '
'--workers 60 ')
vm.RemoteCommand('wget -O {des}/dict.txt {src}'
.format(des=bpe_dir, src=FAIRSEQ_DICT))
vm.RemoteCommand(
f'cd {DATA_PATH} && {FLAGS.torch_env} fairseq-preprocess '
'--only-source --srcdict gpt2_bpe/dict.txt '
'--trainpref wikitext-103-raw/wiki.train.bpe '
'--validpref wikitext-103-raw/wiki.valid.bpe '
'--testpref wikitext-103-raw/wiki.test.bpe '
'--destdir data-bin/wikitext-103 '
'--workers 60')
data_bin = posixpath.join(DATA_PATH, 'data-bin')
vm.RemoteCommand('mkdir -p {}/mlm-w103'.format(data_bin))
vm.RemoteCommand('for x in `seq 1 {word_count}`;'
'do echo "$x 1" >> {data_bin}/mlm-w103/dict.txt;'
'done'.format(word_count=WORD_COUNT, data_bin=data_bin))
for copy in range(benchmark_spec.num_copies):
vm.RemoteCommand('cp -r {data_bin}/wikitext-103 {data_bin}/mlm-w103/{copy}'
.format(data_bin=data_bin, copy=copy))
vm.RemoteCommand('cp {data_bin}/mlm-w103/dict.txt {data_bin}/mlm-w103/'
'{copy}'.format(data_bin=data_bin, copy=copy))
def _PrepareVm(benchmark_spec, rank):
vm = benchmark_spec.vms[rank]
vm.InstallPackages('python3-pip')
if nvidia_driver.CheckNvidiaGpuExists(vm):
vm.Install('cuda_toolkit')
vm.AuthenticateVm()
vm.Install('openmpi')
vm.Install('nccl')
_DownloadData(benchmark_spec, rank)
vm.Install('pytorch')
def Prepare(benchmark_spec):
"""Install and set up RoBERTa mmlm on the target vm..
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
_UpdateBenchmarkSpecWithFlags(benchmark_spec)
vms = benchmark_spec.vms
benchmark_spec.always_call_cleanup = True
list_params = [((benchmark_spec, rank), {})
for rank in range(benchmark_spec.num_vms)]
vm_util.RunThreaded(_PrepareVm, list_params)
master = vms[0]
if nvidia_driver.CheckNvidiaGpuExists(master):
gpus_per_vm = nvidia_driver.QueryNumberOfGpus(master)
hpc_util.CreateMachineFile(vms, lambda _: gpus_per_vm, HOSTFILE)
def _CreateMetadataDict(benchmark_spec):
"""Create metadata dict to be used in run results.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
metadata dict
"""
return {
'model': 'roberta-mmlm',
'log-interval': benchmark_spec.log_interval,
'max-sentences': benchmark_spec.max_sentences,
'nproc-per-node': benchmark_spec.nproc_per_node,
'update-freq': benchmark_spec.update_freq,
'global-batch-size': benchmark_spec.global_batch_size,
'profiler': benchmark_spec.profiler,
'max-epoch': benchmark_spec.max_epoch,
'num_accelerators': benchmark_spec.num_accelerators,
'nccl_version': FLAGS.nccl_version,
'nccl_net_plugin': FLAGS.nccl_net_plugin,
'nccl_extra_params': FLAGS.nccl_extra_params,
}
def MakeSamplesFromOutput(metadata, output):
"""Create samples containing metrics.
Args:
metadata: dict contains all the metadata that reports.
output: string, command output
Returns:
Samples containing training metrics.
"""
results = regex_util.ExtractAllMatches(
r'^\| epoch (\d+):\s+'
r'(\d+) / (\d+) '
r'loss=(\S+), '
r'nll_loss=(\S+), '
r'ppl=(\S+), '
r'wps=(\S+), '
r'ups=(\S+), '
r'wpb=(\S+), '
r'bsz=(\S+), '
r'num_updates=(\S+), '
r'lr=(\S+), '
r'gnorm=(\S+), '
r'clip=(\S+), '
r'oom=(\S+), '
r'loss_scale=(\S+), '
r'wall=(\S+), '
r'train_wall=(\S+)$',
output, re.MULTILINE)
samples = []
for row in results:
metadata_copy = metadata.copy()
metadata_copy.update(zip(METADATA_COLUMNS, row))
wps = float(metadata_copy['wps'])
samples.append(sample.Sample('wps', wps, 'wps', metadata_copy))
samples.append(sample.Sample('wps per accelerator',
wps / metadata['num_accelerators'],
'wps', metadata_copy))
return samples
def _Run(benchmark_spec, rank):
"""Run the benchmark on the target vm.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
rank: integer, the node rank in distributed training.
Returns:
A list of sample.Sample objects.
"""
vm = benchmark_spec.vms[rank]
master = benchmark_spec.vms[0]
nccl_env = []
if FLAGS.nccl_cuda_visible_devices:
nccl_env.append('CUDA_VISIBLE_DEVICES={}'
.format(FLAGS.nccl_cuda_visible_devices))
nccl_env.extend(FLAGS.nccl_extra_params)
prof_cmd = ''
if benchmark_spec.profiler:
prof_cmd = (r'{}/bin/nvprof --profile-child-processes '
r'-o /tmp/pkb/%h.%p.nvprof'.format(cuda_toolkit.CUDA_HOME))
distributed_cmd = (
'torch.distributed.launch '
'--nproc_per_node={nproc_per_node} '
'--nnodes={num_vms} '
'--node_rank={rank} '
'--master_addr={addr} '
'--master_port=2222'
.format(num_vms=benchmark_spec.num_vms,
rank=rank,
addr=master.internal_ip,
nproc_per_node=benchmark_spec.nproc_per_node))
cmd_flags = {
'adam-betas': "'(0.9, 0.98)'",
'adam-eps': 1e-06,
'arch': 'roberta_large',
'attention-dropout': 0.1,
'clip-norm': 1.0,
'criterion': 'masked_lm',
'disable-validation': '',
'distributed-no-spawn': '',
'dropout': 0.1,
'fast-stat-sync': '',
'log-format': 'simple',
'lr': 0.0004,
'lr-scheduler': 'polynomial_decay',
'max-tokens': 6000,
'max-update': 1500000,
'memory-efficient-fp16': '',
'multilang-sampling-alpha': 0.7,
'num-workers': 4,
'no-epoch-checkpoints': '',
'no-save': '',
'optimizer': 'adam',
'sample-break-mode': 'complete',
'save-interval-updates': 3000,
'task': 'multilingual_masked_lm',
'tokens-per-sample': 512,
'total-num-update': 1500000,
'train-subset': 'train',
'valid-subset': 'valid',
'warmup-updates': 15000,
'weight-decay': 0.01,
}
cmd_flags.update({
'log-interval': benchmark_spec.log_interval,
'max-sentences': benchmark_spec.max_sentences,
'update-freq': benchmark_spec.update_freq,
'max-epoch': benchmark_spec.max_epoch,
})
roberta_benchmark_flags = ' '.join(
f'--{key}={value}' if value else f'--{key}'
for key, value in sorted(cmd_flags.items()))
roberta_benchmark_cmd = (
f'{FLAGS.torch_env} DGXSYSTEM=DGX1 NEXP=1 PULL=0 LOGDIR=/tmp/robertammlm '
f'{" ".join(nccl_env)} {prof_cmd} python3 -m {distributed_cmd} '
f'$HOME/fairseq/train.py {DATA_PATH}/data-bin/mlm-w103 '
f'{roberta_benchmark_flags}')
metadata = _CreateMetadataDict(benchmark_spec)
stdout, _ = vm.RobustRemoteCommand(roberta_benchmark_cmd, should_log=True)
return MakeSamplesFromOutput(metadata, stdout) if master == vm else []
def Run(benchmark_spec):
_UpdateBenchmarkSpecWithFlags(benchmark_spec)
samples = []
list_params = [((benchmark_spec, rank), {})
for rank in range(benchmark_spec.num_vms)]
for results in vm_util.RunThreaded(_Run, list_params):
samples.extend(results)
return samples
def Cleanup(_):
"""Cleanup on the cluster."""
pass
|
import errno
import os
import re
from collections import defaultdict
from . import _vc
class Vc(_vc.Vc):
CMD = "bzr"
CMDARGS = ["--no-aliases", "--no-plugins"]
NAME = "Bazaar"
VC_DIR = ".bzr"
PATCH_INDEX_RE = "^=== modified file '(.*)' (.*)$"
CONFLICT_RE = "conflict in (.*)$"
RENAMED_RE = "^(.*) => (.*)$"
commit_statuses = (
_vc.STATE_MODIFIED, _vc.STATE_RENAMED, _vc.STATE_NEW, _vc.STATE_REMOVED
)
conflict_map = {
_vc.CONFLICT_BASE: '.BASE',
_vc.CONFLICT_OTHER: '.OTHER',
_vc.CONFLICT_THIS: '.THIS',
_vc.CONFLICT_MERGED: '',
}
# We use None here to indicate flags that we don't deal with or care about
state_1_map = {
" ": None, # First status column empty
"+": None, # File versioned
"-": None, # File unversioned
"R": _vc.STATE_RENAMED, # File renamed
"?": _vc.STATE_NONE, # File unknown
"X": None, # File nonexistent (and unknown to bzr)
"C": _vc.STATE_CONFLICT, # File has conflicts
"P": None, # Entry for a pending merge (not a file)
}
state_2_map = {
" ": _vc.STATE_NORMAL, # Second status column empty
"N": _vc.STATE_NEW, # File created
"D": _vc.STATE_REMOVED, # File deleted
"K": None, # File kind changed
"M": _vc.STATE_MODIFIED, # File modified
}
state_3_map = {
" ": None,
"*": _vc.STATE_MODIFIED,
"/": _vc.STATE_MODIFIED,
"@": _vc.STATE_MODIFIED,
}
valid_status_re = r'[%s][%s][%s]\s*' % (''.join(state_1_map.keys()),
''.join(state_2_map.keys()),
''.join(state_3_map.keys()),)
def add(self, runner, files):
fullcmd = [self.CMD] + self.CMDARGS
command = [fullcmd, 'add']
runner(command, files, refresh=True, working_dir=self.root)
def commit(self, runner, files, message):
fullcmd = [self.CMD] + self.CMDARGS
command = [fullcmd, 'commit', '-m', message]
runner(command, [], refresh=True, working_dir=self.root)
def revert(self, runner, files):
runner(
[self.CMD] + self.CMDARGS + ["revert"] + files, [], refresh=True,
working_dir=self.root)
def push(self, runner):
runner(
[self.CMD] + self.CMDARGS + ["push"], [], refresh=True,
working_dir=self.root)
def update(self, runner):
# TODO: Handle checkouts/bound branches by calling
# update instead of pull. For now we've replicated existing
# functionality, as update will not work for unbound branches.
runner(
[self.CMD] + self.CMDARGS + ["pull"], [], refresh=True,
working_dir=self.root)
def resolve(self, runner, files):
runner(
[self.CMD] + self.CMDARGS + ["resolve"] + files, [], refresh=True,
working_dir=self.root)
def remove(self, runner, files):
runner(
[self.CMD] + self.CMDARGS + ["rm"] + files, [], refresh=True,
working_dir=self.root)
@classmethod
def valid_repo(cls, path):
return not _vc.call([cls.CMD, "root"], cwd=path)
def get_files_to_commit(self, paths):
files = []
for p in paths:
if os.path.isdir(p):
for path, status in self._tree_cache.items():
if status in self.commit_statuses and path.startswith(p):
files.append(os.path.relpath(path, self.root))
else:
files.append(os.path.relpath(p, self.root))
return sorted(list(set(files)))
def _update_tree_state_cache(self, path):
# FIXME: This actually clears out state information, because the
# current API doesn't have any state outside of _tree_cache.
branch_root = _vc.popen(
[self.CMD] + self.CMDARGS + ["root", path],
cwd=self.location).read().rstrip('\n')
entries = []
while 1:
try:
proc = _vc.popen([self.CMD] + self.CMDARGS +
["status", "-S", "--no-pending", branch_root])
entries = proc.read().split("\n")[:-1]
break
except OSError as e:
if e.errno != errno.EAGAIN:
raise
tree_cache = defaultdict(set)
tree_meta_cache = defaultdict(list)
self._rename_cache = rename_cache = {}
self._reverse_rename_cache = {}
# Files can appear twice in the list if they conflict and were renamed
# at once.
for entry in entries:
meta = []
old_name = None
state_string, name = entry[:3], entry[4:].strip()
if not re.match(self.valid_status_re, state_string):
continue
state1 = self.state_1_map.get(state_string[0])
state2 = self.state_2_map.get(state_string[1])
state3 = self.state_3_map.get(state_string[2])
states = {state1, state2, state3} - {None}
if _vc.STATE_CONFLICT in states:
real_path_match = re.search(self.CONFLICT_RE, name)
if real_path_match is not None:
name = real_path_match.group(1)
if _vc.STATE_RENAMED in states:
real_path_match = re.search(self.RENAMED_RE, name)
if real_path_match is not None:
old_name = real_path_match.group(1)
name = real_path_match.group(2)
meta.append("%s ➡ %s" % (old_name, name))
path = os.path.join(branch_root, name)
if old_name:
old_path = os.path.join(branch_root, old_name)
rename_cache[old_path] = path
if state3 and state3 is _vc.STATE_MODIFIED:
# line = _vc.popen(self.diff_command() + [path]).readline()
line = _vc.popen(['bzr', 'diff', path]).readline()
executable_match = re.search(self.PATCH_INDEX_RE, line)
if executable_match:
meta.append(executable_match.group(2))
path = path[:-1] if path.endswith('/') else path
tree_cache[path].update(states)
tree_meta_cache[path].extend(meta)
# Bazaar entries will only be REMOVED in the second state column
self._add_missing_cache_entry(path, state2)
# Handle any renames now
for old, new in rename_cache.items():
if old in tree_cache:
tree_cache[new].update(tree_cache[old])
tree_meta_cache[new].extend(tree_meta_cache[old])
del tree_cache[old]
del tree_meta_cache[old]
self._reverse_rename_cache[new] = old
self._tree_cache.update(
dict((x, max(y)) for x, y in tree_cache.items()))
self._tree_meta_cache = dict(tree_meta_cache)
def get_path_for_repo_file(self, path, commit=None):
if not path.startswith(self.root + os.path.sep):
raise _vc.InvalidVCPath(self, path, "Path not in repository")
path = path[len(self.root) + 1:]
args = [self.CMD, "cat", path]
if commit:
args.append("-r%s" % commit)
return _vc.call_temp_output(args, cwd=self.root)
def get_path_for_conflict(self, path, conflict):
if path in self._reverse_rename_cache and not \
conflict == _vc.CONFLICT_MERGED:
path = self._reverse_rename_cache[path]
if not path.startswith(self.root + os.path.sep):
raise _vc.InvalidVCPath(self, path, "Path not in repository")
# bzr paths are all temporary files
return "%s%s" % (path, self.conflict_map[conflict]), False
|
from __future__ import division, print_function
import sys
try: # python 2.4 and 2.5 compat
bytes = bytes
except NameError:
bytes = str
PYTHON3 = sys.version_info > (3, 0)
# for iterating over byte strings:
# ord2 calls ord in python2 only
# chr2 converts an ordinal value to a length-1 byte string
# B returns a byte string in all supported python versions
# bytes3 creates a byte string from a list of ordinal values
if PYTHON3:
ord2 = lambda x: x
chr2 = lambda x: bytes([x])
B = lambda x: x.encode('iso8859-1')
bytes3 = bytes
text_type = str
xrange = range
text_types = (str,)
def reraise(tp, value, tb=None):
"""
Reraise an exception.
Taken from "six" library (https://pythonhosted.org/six/).
"""
try:
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
finally:
value = None
tb = None
else:
ord2 = ord
chr2 = chr
B = lambda x: x
bytes3 = lambda x: bytes().join([chr(c) for c in x])
text_type = unicode
xrange = xrange
text_types = (str, unicode)
"""
Reraise an exception.
Taken from "six" library (https://pythonhosted.org/six/).
"""
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
exec_("""def reraise(tp, value, tb=None):
try:
raise tp, value, tb
finally:
tb = None
""")
def with_metaclass(meta, *bases):
"""
Create a base class with a metaclass.
Taken from "six" library (https://pythonhosted.org/six/).
"""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(type):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
@classmethod
def __prepare__(cls, name, this_bases):
return meta.__prepare__(name, bases)
return type.__new__(metaclass, 'temporary_class', (), {})
|
import pytest
from nikola import __main__
from .helper import append_config, cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
append_config(
target_dir,
"""
POSTS = (("posts/*.txt", "posts", "post.tmpl"),
("posts/*.txt", "posts", "post.tmpl"))
""",
)
with cd(target_dir):
__main__.main(["build"])
|
from django.db import migrations
from django.db.models import Count
def update_counts(apps, schema_editor):
Comment = apps.get_model("trans", "Comment")
Profile = apps.get_model("accounts", "Profile")
db_alias = schema_editor.connection.alias
comments = Comment.objects.using(db_alias).values_list("user").annotate(Count("id"))
for userid, count in comments:
Profile.objects.using(db_alias).filter(user_id=userid).update(commented=count)
class Migration(migrations.Migration):
dependencies = [
("accounts", "0004_profile_commented"),
]
operations = [
migrations.RunPython(update_counts, migrations.RunPython.noop, elidable=True),
]
|
from __future__ import print_function
from xdrlib import Packer, Unpacker
import socket
slope_str2int = {'zero': 0,
'positive': 1,
'negative': 2,
'both': 3,
'unspecified': 4}
# could be autogenerated from previous but whatever
slope_int2str = {0: 'zero',
1: 'positive',
2: 'negative',
3: 'both',
4: 'unspecified'}
class Gmetric:
"""
Class to send gmetric/gmond 2.X packets
Thread safe
"""
type = ('', 'string', 'uint16', 'int16', 'uint32', 'int32', 'float',
'double', 'timestamp')
protocol = ('udp', 'multicast')
def __init__(self, host, port, protocol):
if protocol not in self.protocol:
raise ValueError("Protocol must be one of: " + str(self.protocol))
self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
if protocol == 'multicast':
self.socket.setsockopt(socket.IPPROTO_IP,
socket.IP_MULTICAST_TTL, 20)
self.hostport = (host, int(port))
# self.socket.connect(self.hostport)
def send(self, NAME, VAL, TYPE='', UNITS='', SLOPE='both',
TMAX=60, DMAX=0, GROUP=""):
if SLOPE not in slope_str2int:
raise ValueError("Slope must be one of: " + str(self.slope.keys()))
if TYPE not in self.type:
raise ValueError("Type must be one of: " + str(self.type))
if len(NAME) == 0:
raise ValueError("Name must be non-empty")
(meta_msg, data_msg) = gmetric_write(NAME,
VAL,
TYPE,
UNITS,
SLOPE,
TMAX,
DMAX,
GROUP)
# print(msg)
self.socket.sendto(meta_msg, self.hostport)
self.socket.sendto(data_msg, self.hostport)
def gmetric_write(NAME, VAL, TYPE, UNITS, SLOPE, TMAX, DMAX, GROUP):
"""
Arguments are in all upper-case to match XML
"""
packer = Packer()
HOSTNAME = "test"
SPOOF = 0
# Meta data about a metric
packer.pack_int(128)
packer.pack_string(HOSTNAME)
packer.pack_string(NAME)
packer.pack_int(SPOOF)
packer.pack_string(TYPE)
packer.pack_string(NAME)
packer.pack_string(UNITS)
packer.pack_int(slope_str2int[SLOPE]) # map slope string to int
packer.pack_uint(int(TMAX))
packer.pack_uint(int(DMAX))
# Magic number. Indicates number of entries to follow. Put in 1 for GROUP
if GROUP == "":
packer.pack_int(0)
else:
packer.pack_int(1)
packer.pack_string("GROUP")
packer.pack_string(GROUP)
# Actual data sent in a separate packet
data = Packer()
data.pack_int(128 + 5)
data.pack_string(HOSTNAME)
data.pack_string(NAME)
data.pack_int(SPOOF)
data.pack_string("%s")
data.pack_string(str(VAL))
return packer.get_buffer(), data.get_buffer()
def gmetric_read(msg):
unpacker = Unpacker(msg)
values = dict()
unpacker.unpack_int()
values['TYPE'] = unpacker.unpack_string()
values['NAME'] = unpacker.unpack_string()
values['VAL'] = unpacker.unpack_string()
values['UNITS'] = unpacker.unpack_string()
values['SLOPE'] = slope_int2str[unpacker.unpack_int()]
values['TMAX'] = unpacker.unpack_uint()
values['DMAX'] = unpacker.unpack_uint()
unpacker.done()
return values
if __name__ == '__main__':
import optparse
parser = optparse.OptionParser()
parser.add_option("", "--protocol", dest="protocol", default="udp",
help="The gmetric internet protocol, either udp or" +
"multicast, default udp")
parser.add_option("", "--host", dest="host", default="127.0.0.1",
help="GMond aggregator hostname to send data to")
parser.add_option("", "--port", dest="port", default="8649",
help="GMond aggregator port to send data to")
parser.add_option("", "--name", dest="name", default="",
help="The name of the metric")
parser.add_option("", "--value", dest="value", default="",
help="The value of the metric")
parser.add_option("", "--units", dest="units", default="",
help="The units for the value, e.g. 'kb/sec'")
parser.add_option("", "--slope", dest="slope", default="both",
help="Sign of the derivative of the value over time," +
"one of zero, positive, negative, both (default)")
parser.add_option("", "--type", dest="type", default="",
help="The value data type, one of string, int8, uint8," +
"int16, uint16, int32, uint32, float, double")
parser.add_option("", "--tmax", dest="tmax", default="60",
help="Maximum time in seconds between gmetric calls," +
"default 60")
parser.add_option("", "--dmax", dest="dmax", default="0",
help="Lifetime in seconds of this metric, default=0," +
"meaning unlimited")
parser.add_option("", "--group", dest="group", default="",
help="Group metric belongs to. If not specified Ganglia" +
"will show it as no_group")
(options, args) = parser.parse_args()
g = Gmetric(options.host, options.port, options.protocol)
g.send(options.name, options.value, options.type, options.units,
options.slope, options.tmax, options.dmax, options.group)
|
import logging
from homeassistant.const import (
ATTR_STATE,
CONF_DEVICES,
CONF_NAME,
CONF_SWITCHES,
CONF_ZONE,
)
from homeassistant.helpers.entity import ToggleEntity
from .const import (
CONF_ACTIVATION,
CONF_MOMENTARY,
CONF_PAUSE,
CONF_REPEAT,
DOMAIN as KONNECTED_DOMAIN,
STATE_HIGH,
STATE_LOW,
)
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up switches attached to a Konnected device from a config entry."""
data = hass.data[KONNECTED_DOMAIN]
device_id = config_entry.data["id"]
switches = [
KonnectedSwitch(device_id, zone_data.get(CONF_ZONE), zone_data)
for zone_data in data[CONF_DEVICES][device_id][CONF_SWITCHES]
]
async_add_entities(switches)
class KonnectedSwitch(ToggleEntity):
"""Representation of a Konnected switch."""
def __init__(self, device_id, zone_num, data):
"""Initialize the Konnected switch."""
self._data = data
self._device_id = device_id
self._zone_num = zone_num
self._activation = self._data.get(CONF_ACTIVATION, STATE_HIGH)
self._momentary = self._data.get(CONF_MOMENTARY)
self._pause = self._data.get(CONF_PAUSE)
self._repeat = self._data.get(CONF_REPEAT)
self._state = self._boolean_state(self._data.get(ATTR_STATE))
self._name = self._data.get(CONF_NAME)
self._unique_id = (
f"{device_id}-{self._zone_num}-{self._momentary}-"
f"{self._pause}-{self._repeat}"
)
@property
def unique_id(self) -> str:
"""Return the unique id."""
return self._unique_id
@property
def name(self):
"""Return the name of the switch."""
return self._name
@property
def is_on(self):
"""Return the status of the sensor."""
return self._state
@property
def panel(self):
"""Return the Konnected HTTP client."""
device_data = self.hass.data[KONNECTED_DOMAIN][CONF_DEVICES][self._device_id]
return device_data.get("panel")
@property
def device_info(self):
"""Return the device info."""
return {
"identifiers": {(KONNECTED_DOMAIN, self._device_id)},
}
@property
def available(self):
"""Return whether the panel is available."""
return self.panel.available
async def async_turn_on(self, **kwargs):
"""Send a command to turn on the switch."""
resp = await self.panel.update_switch(
self._zone_num,
int(self._activation == STATE_HIGH),
self._momentary,
self._repeat,
self._pause,
)
if resp.get(ATTR_STATE) is not None:
self._set_state(True)
if self._momentary and resp.get(ATTR_STATE) != -1:
# Immediately set the state back off for momentary switches
self._set_state(False)
async def async_turn_off(self, **kwargs):
"""Send a command to turn off the switch."""
resp = await self.panel.update_switch(
self._zone_num, int(self._activation == STATE_LOW)
)
if resp.get(ATTR_STATE) is not None:
self._set_state(self._boolean_state(resp.get(ATTR_STATE)))
def _boolean_state(self, int_state):
if int_state is None:
return False
if int_state == 0:
return self._activation == STATE_LOW
if int_state == 1:
return self._activation == STATE_HIGH
def _set_state(self, state):
self._state = state
self.async_write_ha_state()
_LOGGER.debug(
"Setting status of %s actuator zone %s to %s",
self._device_id,
self.name,
state,
)
async def async_added_to_hass(self):
"""Store entity_id."""
self._data["entity_id"] = self.entity_id
|
from homeassistant.components.air_quality import AirQualityEntity
from . import BleBoxEntity, create_blebox_entities
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up a BleBox air quality entity."""
create_blebox_entities(
hass, config_entry, async_add_entities, BleBoxAirQualityEntity, "air_qualities"
)
class BleBoxAirQualityEntity(BleBoxEntity, AirQualityEntity):
"""Representation of a BleBox air quality feature."""
@property
def icon(self):
"""Return the icon."""
return "mdi:blur"
@property
def particulate_matter_0_1(self):
"""Return the particulate matter 0.1 level."""
return self._feature.pm1
@property
def particulate_matter_2_5(self):
"""Return the particulate matter 2.5 level."""
return self._feature.pm2_5
@property
def particulate_matter_10(self):
"""Return the particulate matter 10 level."""
return self._feature.pm10
|
__docformat__ = "restructuredtext en"
import email
from encodings import search_function
import sys
from email.utils import parseaddr, parsedate
from email.header import decode_header
from datetime import datetime
from six import text_type, binary_type
try:
from mx.DateTime import DateTime
except ImportError:
DateTime = datetime
import logilab.common as lgc
def decode_QP(string):
parts = []
for decoded, charset in decode_header(string):
if not charset :
charset = 'iso-8859-15'
# python 3 sometimes returns str and sometimes bytes.
# the 'official' fix is to use the new 'policy' APIs
# https://bugs.python.org/issue24797
# let's just handle this bug ourselves for now
if isinstance(decoded, binary_type):
decoded = decoded.decode(charset, 'replace')
assert isinstance(decoded, text_type)
parts.append(decoded)
if sys.version_info < (3, 3):
# decoding was non-RFC compliant wrt to whitespace handling
# see http://bugs.python.org/issue1079
return u' '.join(parts)
return u''.join(parts)
def message_from_file(fd):
try:
return UMessage(email.message_from_file(fd))
except email.errors.MessageParseError:
return ''
def message_from_string(string):
try:
return UMessage(email.message_from_string(string))
except email.errors.MessageParseError:
return ''
class UMessage:
"""Encapsulates an email.Message instance and returns only unicode objects.
"""
def __init__(self, message):
self.message = message
# email.Message interface #################################################
def get(self, header, default=None):
value = self.message.get(header, default)
if value:
return decode_QP(value)
return value
def __getitem__(self, header):
return self.get(header)
def get_all(self, header, default=()):
return [decode_QP(val) for val in self.message.get_all(header, default)
if val is not None]
def is_multipart(self):
return self.message.is_multipart()
def get_boundary(self):
return self.message.get_boundary()
def walk(self):
for part in self.message.walk():
yield UMessage(part)
def get_payload(self, index=None, decode=False):
message = self.message
if index is None:
payload = message.get_payload(index, decode)
if isinstance(payload, list):
return [UMessage(msg) for msg in payload]
if message.get_content_maintype() != 'text':
return payload
if isinstance(payload, text_type):
return payload
charset = message.get_content_charset() or 'iso-8859-1'
if search_function(charset) is None:
charset = 'iso-8859-1'
return text_type(payload or b'', charset, "replace")
else:
payload = UMessage(message.get_payload(index, decode))
return payload
def get_content_maintype(self):
return text_type(self.message.get_content_maintype())
def get_content_type(self):
return text_type(self.message.get_content_type())
def get_filename(self, failobj=None):
value = self.message.get_filename(failobj)
if value is failobj:
return value
try:
return text_type(value)
except UnicodeDecodeError:
return u'error decoding filename'
# other convenience methods ###############################################
def headers(self):
"""return an unicode string containing all the message's headers"""
values = []
for header in self.message.keys():
values.append(u'%s: %s' % (header, self.get(header)))
return '\n'.join(values)
def multi_addrs(self, header):
"""return a list of 2-uple (name, address) for the given address (which
is expected to be an header containing address such as from, to, cc...)
"""
persons = []
for person in self.get_all(header, ()):
name, mail = parseaddr(person)
persons.append((name, mail))
return persons
def date(self, alternative_source=False, return_str=False):
"""return a datetime object for the email's date or None if no date is
set or if it can't be parsed
"""
value = self.get('date')
if value is None and alternative_source:
unix_from = self.message.get_unixfrom()
if unix_from is not None:
try:
value = unix_from.split(" ", 2)[2]
except IndexError:
pass
if value is not None:
datetuple = parsedate(value)
if datetuple:
if lgc.USE_MX_DATETIME:
return DateTime(*datetuple[:6])
return datetime(*datetuple[:6])
elif not return_str:
return None
return value
|
from dataclasses import dataclass
from typing import List, Optional, Tuple, Union
from urllib.parse import urlparse
from aiohttp.test_utils import TestClient
import arrow
import pytz
from withings_api.common import (
MeasureGetMeasResponse,
NotifyAppli,
NotifyListResponse,
SleepGetSummaryResponse,
UserGetDeviceResponse,
)
from homeassistant import data_entry_flow
import homeassistant.components.api as api
from homeassistant.components.homeassistant import DOMAIN as HA_DOMAIN
import homeassistant.components.webhook as webhook
from homeassistant.components.withings import async_unload_entry
from homeassistant.components.withings.common import (
ConfigEntryWithingsApi,
DataManager,
get_all_data_managers,
)
import homeassistant.components.withings.const as const
from homeassistant.config import async_process_ha_core_config
from homeassistant.config_entries import SOURCE_USER, ConfigEntry
from homeassistant.const import (
CONF_CLIENT_ID,
CONF_CLIENT_SECRET,
CONF_EXTERNAL_URL,
CONF_UNIT_SYSTEM,
CONF_UNIT_SYSTEM_METRIC,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_entry_oauth2_flow
from homeassistant.helpers.config_entry_oauth2_flow import AUTH_CALLBACK_PATH
from homeassistant.setup import async_setup_component
from tests.async_mock import MagicMock
from tests.test_util.aiohttp import AiohttpClientMocker
@dataclass
class ProfileConfig:
"""Data representing a user profile."""
profile: str
user_id: int
api_response_user_get_device: Union[UserGetDeviceResponse, Exception]
api_response_measure_get_meas: Union[MeasureGetMeasResponse, Exception]
api_response_sleep_get_summary: Union[SleepGetSummaryResponse, Exception]
api_response_notify_list: Union[NotifyListResponse, Exception]
api_response_notify_revoke: Optional[Exception]
def new_profile_config(
profile: str,
user_id: int,
api_response_user_get_device: Optional[
Union[UserGetDeviceResponse, Exception]
] = None,
api_response_measure_get_meas: Optional[
Union[MeasureGetMeasResponse, Exception]
] = None,
api_response_sleep_get_summary: Optional[
Union[SleepGetSummaryResponse, Exception]
] = None,
api_response_notify_list: Optional[Union[NotifyListResponse, Exception]] = None,
api_response_notify_revoke: Optional[Exception] = None,
) -> ProfileConfig:
"""Create a new profile config immutable object."""
return ProfileConfig(
profile=profile,
user_id=user_id,
api_response_user_get_device=api_response_user_get_device
or UserGetDeviceResponse(devices=[]),
api_response_measure_get_meas=api_response_measure_get_meas
or MeasureGetMeasResponse(
measuregrps=[],
more=False,
offset=0,
timezone=pytz.UTC,
updatetime=arrow.get(12345),
),
api_response_sleep_get_summary=api_response_sleep_get_summary
or SleepGetSummaryResponse(more=False, offset=0, series=[]),
api_response_notify_list=api_response_notify_list
or NotifyListResponse(profiles=[]),
api_response_notify_revoke=api_response_notify_revoke,
)
@dataclass
class WebhookResponse:
"""Response data from a webhook."""
message: str
message_code: int
class ComponentFactory:
"""Manages the setup and unloading of the withing component and profiles."""
def __init__(
self,
hass: HomeAssistant,
api_class_mock: MagicMock,
aiohttp_client,
aioclient_mock: AiohttpClientMocker,
) -> None:
"""Initialize the object."""
self._hass = hass
self._api_class_mock = api_class_mock
self._aiohttp_client = aiohttp_client
self._aioclient_mock = aioclient_mock
self._client_id = None
self._client_secret = None
self._profile_configs: Tuple[ProfileConfig, ...] = ()
async def configure_component(
self,
client_id: str = "my_client_id",
client_secret: str = "my_client_secret",
profile_configs: Tuple[ProfileConfig, ...] = (),
) -> None:
"""Configure the wihings component."""
self._client_id = client_id
self._client_secret = client_secret
self._profile_configs = profile_configs
hass_config = {
"homeassistant": {
CONF_UNIT_SYSTEM: CONF_UNIT_SYSTEM_METRIC,
CONF_EXTERNAL_URL: "http://127.0.0.1:8080/",
},
api.DOMAIN: {},
const.DOMAIN: {
CONF_CLIENT_ID: self._client_id,
CONF_CLIENT_SECRET: self._client_secret,
const.CONF_USE_WEBHOOK: True,
},
}
await async_process_ha_core_config(self._hass, hass_config.get("homeassistant"))
assert await async_setup_component(self._hass, HA_DOMAIN, {})
assert await async_setup_component(self._hass, webhook.DOMAIN, hass_config)
assert await async_setup_component(self._hass, const.DOMAIN, hass_config)
await self._hass.async_block_till_done()
@staticmethod
def _setup_api_method(api_method, value) -> None:
if isinstance(value, Exception):
api_method.side_effect = value
else:
api_method.return_value = value
async def setup_profile(self, user_id: int) -> ConfigEntryWithingsApi:
"""Set up a user profile through config flows."""
profile_config = next(
iter(
[
profile_config
for profile_config in self._profile_configs
if profile_config.user_id == user_id
]
)
)
api_mock: ConfigEntryWithingsApi = MagicMock(spec=ConfigEntryWithingsApi)
ComponentFactory._setup_api_method(
api_mock.user_get_device, profile_config.api_response_user_get_device
)
ComponentFactory._setup_api_method(
api_mock.sleep_get_summary, profile_config.api_response_sleep_get_summary
)
ComponentFactory._setup_api_method(
api_mock.measure_get_meas, profile_config.api_response_measure_get_meas
)
ComponentFactory._setup_api_method(
api_mock.notify_list, profile_config.api_response_notify_list
)
ComponentFactory._setup_api_method(
api_mock.notify_revoke, profile_config.api_response_notify_revoke
)
self._api_class_mock.reset_mocks()
self._api_class_mock.return_value = api_mock
# Get the withings config flow.
result = await self._hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": SOURCE_USER}
)
assert result
# pylint: disable=protected-access
state = config_entry_oauth2_flow._encode_jwt(
self._hass, {"flow_id": result["flow_id"]}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_EXTERNAL_STEP
assert result["url"] == (
"https://account.withings.com/oauth2_user/authorize2?"
f"response_type=code&client_id={self._client_id}&"
"redirect_uri=http://127.0.0.1:8080/auth/external/callback&"
f"state={state}"
"&scope=user.info,user.metrics,user.activity,user.sleepevents"
)
# Simulate user being redirected from withings site.
client: TestClient = await self._aiohttp_client(self._hass.http.app)
resp = await client.get(f"{AUTH_CALLBACK_PATH}?code=abcd&state={state}")
assert resp.status == 200
assert resp.headers["content-type"] == "text/html; charset=utf-8"
self._aioclient_mock.clear_requests()
self._aioclient_mock.post(
"https://account.withings.com/oauth2/token",
json={
"refresh_token": "mock-refresh-token",
"access_token": "mock-access-token",
"type": "Bearer",
"expires_in": 60,
"userid": profile_config.user_id,
},
)
# Present user with a list of profiles to choose from.
result = await self._hass.config_entries.flow.async_configure(result["flow_id"])
assert result.get("type") == "form"
assert result.get("step_id") == "profile"
assert "profile" in result.get("data_schema").schema
# Provide the user profile.
result = await self._hass.config_entries.flow.async_configure(
result["flow_id"], {const.PROFILE: profile_config.profile}
)
# Finish the config flow by calling it again.
assert result.get("type") == "create_entry"
assert result.get("result")
config_data = result.get("result").data
assert config_data.get(const.PROFILE) == profile_config.profile
assert config_data.get("auth_implementation") == const.DOMAIN
assert config_data.get("token")
# Wait for remaining tasks to complete.
await self._hass.async_block_till_done()
# Mock the webhook.
data_manager = get_data_manager_by_user_id(self._hass, user_id)
self._aioclient_mock.clear_requests()
self._aioclient_mock.request(
"HEAD",
data_manager.webhook_config.url,
)
return self._api_class_mock.return_value
async def call_webhook(self, user_id: int, appli: NotifyAppli) -> WebhookResponse:
"""Call the webhook to notify of data changes."""
client: TestClient = await self._aiohttp_client(self._hass.http.app)
data_manager = get_data_manager_by_user_id(self._hass, user_id)
resp = await client.post(
urlparse(data_manager.webhook_config.url).path,
data={"userid": user_id, "appli": appli.value},
)
# Wait for remaining tasks to complete.
await self._hass.async_block_till_done()
data = await resp.json()
resp.close()
return WebhookResponse(message=data["message"], message_code=data["code"])
async def unload(self, profile: ProfileConfig) -> None:
"""Unload the component for a specific user."""
config_entries = get_config_entries_for_user_id(self._hass, profile.user_id)
for config_entry in config_entries:
await async_unload_entry(self._hass, config_entry)
await self._hass.async_block_till_done()
assert not get_data_manager_by_user_id(self._hass, profile.user_id)
def get_config_entries_for_user_id(
hass: HomeAssistant, user_id: int
) -> Tuple[ConfigEntry]:
"""Get a list of config entries that apply to a specific withings user."""
return tuple(
[
config_entry
for config_entry in hass.config_entries.async_entries(const.DOMAIN)
if config_entry.data.get("token", {}).get("userid") == user_id
]
)
def async_get_flow_for_user_id(hass: HomeAssistant, user_id: int) -> List[dict]:
"""Get a flow for a user id."""
return [
flow
for flow in hass.config_entries.flow.async_progress()
if flow["handler"] == const.DOMAIN and flow["context"].get("userid") == user_id
]
def get_data_manager_by_user_id(
hass: HomeAssistant, user_id: int
) -> Optional[DataManager]:
"""Get a data manager by the user id."""
return next(
iter(
[
data_manager
for data_manager in get_all_data_managers(hass)
if data_manager.user_id == user_id
]
),
None,
)
|
import arrow
from datetime import timedelta
from sqlalchemy import cast, not_
from sqlalchemy_utils import ArrowType
from lemur import database
from lemur.certificates.models import Certificate
def filter_by_validity(query, validity=None):
if validity == "expired":
query = query.filter(Certificate.expired == True) # noqa
elif validity == "valid":
query = query.filter(Certificate.expired == False) # noqa
return query
def filter_by_owner(query, owner=None):
if owner:
return query.filter(Certificate.owner == owner)
return query
def filter_by_issuer(query, issuer=None):
if issuer:
return query.filter(Certificate.issuer == issuer)
return query
def filter_by_deployment(query, deployment=None):
if deployment == "deployed":
query = query.filter(Certificate.endpoints.any())
elif deployment == "ready":
query = query.filter(not_(Certificate.endpoints.any()))
return query
def filter_by_validity_end(query, validity_end=None):
if validity_end:
return query.filter(cast(Certificate.not_after, ArrowType) <= validity_end)
return query
def fqdns(**kwargs):
"""
Returns an FQDN report.
:return:
"""
query = database.session_query(Certificate)
query = filter_by_deployment(query, deployment=kwargs.get("deployed"))
query = filter_by_validity(query, validity=kwargs.get("validity"))
return query
def expiring_certificates(**kwargs):
"""
Returns an Expiring report.
:return:
"""
ttl = kwargs.get("ttl", 30)
now = arrow.utcnow()
validity_end = now + timedelta(days=ttl)
query = database.session_query(Certificate)
query = filter_by_deployment(query, deployment=kwargs.get("deployed"))
query = filter_by_validity(query, validity="valid")
query = filter_by_validity_end(query, validity_end=validity_end)
return query
|
class Dna(object):
def __init__(self, name=None, module_type=None, author=None, kalliope_supported_version=None, tags=None):
self.name = name
self.module_type = module_type # type is a reserved python
self.author = author
self.kalliope_supported_version = kalliope_supported_version
self.tags = tags
def serialize(self):
"""
This method allows to serialize in a proper way this object
:return: A dict of name and parameters
:rtype: Dict
"""
return {
'name': self.name,
'type': self.module_type,
'author': self.author,
'kalliope_supported_version': self.kalliope_supported_version,
'tags': self.tags
}
def __str__(self):
return str(self.serialize())
def __eq__(self, other):
"""
This is used to compare 2 objects
:param other:
:return:
"""
return self.__dict__ == other.__dict__
|
import logging
import os
from queue import Queue
import threading
from typing import List
import voluptuous as vol
from homeassistant.const import EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP
import homeassistant.helpers.config_validation as cv
from .minio_helper import MinioEventThread, create_minio_client
_LOGGER = logging.getLogger(__name__)
DOMAIN = "minio"
CONF_HOST = "host"
CONF_PORT = "port"
CONF_ACCESS_KEY = "access_key"
CONF_SECRET_KEY = "secret_key"
CONF_SECURE = "secure"
CONF_LISTEN = "listen"
CONF_LISTEN_BUCKET = "bucket"
CONF_LISTEN_PREFIX = "prefix"
CONF_LISTEN_SUFFIX = "suffix"
CONF_LISTEN_EVENTS = "events"
ATTR_BUCKET = "bucket"
ATTR_KEY = "key"
ATTR_FILE_PATH = "file_path"
DEFAULT_LISTEN_PREFIX = ""
DEFAULT_LISTEN_SUFFIX = ".*"
DEFAULT_LISTEN_EVENTS = "s3:ObjectCreated:*"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PORT): cv.port,
vol.Required(CONF_ACCESS_KEY): cv.string,
vol.Required(CONF_SECRET_KEY): cv.string,
vol.Required(CONF_SECURE): cv.boolean,
vol.Optional(CONF_LISTEN, default=[]): vol.All(
cv.ensure_list,
[
vol.Schema(
{
vol.Required(CONF_LISTEN_BUCKET): cv.string,
vol.Optional(
CONF_LISTEN_PREFIX, default=DEFAULT_LISTEN_PREFIX
): cv.string,
vol.Optional(
CONF_LISTEN_SUFFIX, default=DEFAULT_LISTEN_SUFFIX
): cv.string,
vol.Optional(
CONF_LISTEN_EVENTS, default=DEFAULT_LISTEN_EVENTS
): cv.string,
}
)
],
),
}
)
},
extra=vol.ALLOW_EXTRA,
)
BUCKET_KEY_SCHEMA = vol.Schema(
{vol.Required(ATTR_BUCKET): cv.template, vol.Required(ATTR_KEY): cv.template}
)
BUCKET_KEY_FILE_SCHEMA = BUCKET_KEY_SCHEMA.extend(
{vol.Required(ATTR_FILE_PATH): cv.template}
)
def setup(hass, config):
"""Set up MinioClient and event listeners."""
conf = config[DOMAIN]
host = conf[CONF_HOST]
port = conf[CONF_PORT]
access_key = conf[CONF_ACCESS_KEY]
secret_key = conf[CONF_SECRET_KEY]
secure = conf[CONF_SECURE]
queue_listener = QueueListener(hass)
queue = queue_listener.queue
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, queue_listener.start_handler)
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, queue_listener.stop_handler)
def _setup_listener(listener_conf):
bucket = listener_conf[CONF_LISTEN_BUCKET]
prefix = listener_conf[CONF_LISTEN_PREFIX]
suffix = listener_conf[CONF_LISTEN_SUFFIX]
events = listener_conf[CONF_LISTEN_EVENTS]
minio_listener = MinioListener(
queue,
get_minio_endpoint(host, port),
access_key,
secret_key,
secure,
bucket,
prefix,
suffix,
events,
)
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, minio_listener.start_handler)
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, minio_listener.stop_handler)
for listen_conf in conf[CONF_LISTEN]:
_setup_listener(listen_conf)
minio_client = create_minio_client(
get_minio_endpoint(host, port), access_key, secret_key, secure
)
def _render_service_value(service, key):
value = service.data[key]
value.hass = hass
return value.async_render(parse_result=False)
def put_file(service):
"""Upload file service."""
bucket = _render_service_value(service, ATTR_BUCKET)
key = _render_service_value(service, ATTR_KEY)
file_path = _render_service_value(service, ATTR_FILE_PATH)
if not hass.config.is_allowed_path(file_path):
_LOGGER.error("Invalid file_path %s", file_path)
return
minio_client.fput_object(bucket, key, file_path)
def get_file(service):
"""Download file service."""
bucket = _render_service_value(service, ATTR_BUCKET)
key = _render_service_value(service, ATTR_KEY)
file_path = _render_service_value(service, ATTR_FILE_PATH)
if not hass.config.is_allowed_path(file_path):
_LOGGER.error("Invalid file_path %s", file_path)
return
minio_client.fget_object(bucket, key, file_path)
def remove_file(service):
"""Delete file service."""
bucket = _render_service_value(service, ATTR_BUCKET)
key = _render_service_value(service, ATTR_KEY)
minio_client.remove_object(bucket, key)
hass.services.register(DOMAIN, "put", put_file, schema=BUCKET_KEY_FILE_SCHEMA)
hass.services.register(DOMAIN, "get", get_file, schema=BUCKET_KEY_FILE_SCHEMA)
hass.services.register(DOMAIN, "remove", remove_file, schema=BUCKET_KEY_SCHEMA)
return True
def get_minio_endpoint(host: str, port: int) -> str:
"""Create minio endpoint from host and port."""
return f"{host}:{port}"
class QueueListener(threading.Thread):
"""Forward events from queue into Home Assistant event bus."""
def __init__(self, hass):
"""Create queue."""
super().__init__()
self._hass = hass
self._queue = Queue()
def run(self):
"""Listen to queue events, and forward them to Home Assistant event bus."""
_LOGGER.info("Running QueueListener")
while True:
event = self._queue.get()
if event is None:
break
_, file_name = os.path.split(event[ATTR_KEY])
_LOGGER.debug(
"Sending event %s, %s, %s",
event["event_name"],
event[ATTR_BUCKET],
event[ATTR_KEY],
)
self._hass.bus.fire(DOMAIN, {"file_name": file_name, **event})
@property
def queue(self):
"""Return wrapped queue."""
return self._queue
def stop(self):
"""Stop run by putting None into queue and join the thread."""
_LOGGER.info("Stopping QueueListener")
self._queue.put(None)
self.join()
_LOGGER.info("Stopped QueueListener")
def start_handler(self, _):
"""Start handler helper method."""
self.start()
def stop_handler(self, _):
"""Stop handler helper method."""
self.stop()
class MinioListener:
"""MinioEventThread wrapper with helper methods."""
def __init__(
self,
queue: Queue,
endpoint: str,
access_key: str,
secret_key: str,
secure: bool,
bucket_name: str,
prefix: str,
suffix: str,
events: List[str],
):
"""Create Listener."""
self._queue = queue
self._endpoint = endpoint
self._access_key = access_key
self._secret_key = secret_key
self._secure = secure
self._bucket_name = bucket_name
self._prefix = prefix
self._suffix = suffix
self._events = events
self._minio_event_thread = None
def start_handler(self, _):
"""Create and start the event thread."""
self._minio_event_thread = MinioEventThread(
self._queue,
self._endpoint,
self._access_key,
self._secret_key,
self._secure,
self._bucket_name,
self._prefix,
self._suffix,
self._events,
)
self._minio_event_thread.start()
def stop_handler(self, _):
"""Issue stop and wait for thread to join."""
if self._minio_event_thread is not None:
self._minio_event_thread.stop()
|
from test import CollectorTestCase
from test import get_collector_config
from mock import Mock
from mock import patch
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from diamond.collector import Collector
from uptime import UptimeCollector
###############################################################################
class TestUptimeCollector(CollectorTestCase):
def setUp(self, config=None):
if config is None:
config = get_collector_config('UptimeCollector', {
'interval': '10',
})
else:
config = get_collector_config('UptimeCollector', config)
self.collector = UptimeCollector(config, None)
def test_import(self):
self.assertTrue(UptimeCollector)
@patch('__builtin__.open')
@patch('os.path.exists', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_open_proc_uptime(self, publish_mock, open_mock):
open_mock.return_value = StringIO('1288459.83 10036802.26')
self.collector.collect()
open_mock.assert_called_once_with('/proc/uptime')
@patch.object(Collector, 'publish')
def test_sanity_check(self, publish_mock):
self.collector.PROC = self.getFixturePath('sanity_check')
self.collector.collect()
self.assertPublishedMany(publish_mock, {
'minutes': 60
})
@patch.object(Collector, 'publish')
def test_malformed_input(self, publish_mock):
self.collector.PROC = self.getFixturePath('malformed_input')
self.collector.collect()
self.collector.PROC = self.getFixturePath('sanity_check')
self.collector.collect()
self.assertPublishedMany(publish_mock, {
'minutes': 60.0
})
@patch.object(Collector, 'publish')
def test_seconds(self, publish_mock):
self.setUp(config={
'interval': '10',
'metric_name': 'seconds',
})
self.collector.PROC = self.getFixturePath('sanity_check')
self.collector.collect()
self.assertPublishedMany(publish_mock, {
'seconds': 3600
})
|
import inspect
import logging
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker import sample
BENCHMARK_NAME = 'dpb_spark_pi_benchmark'
BENCHMARK_CONFIG = """
dpb_spark_pi_benchmark:
flags:
cloud: GCP
dpb_service_zone: us-east1-b
description: >
Create a dpb cluster and Run Spark Pi application.
dpb_service:
service_type: dataproc
worker_group:
vm_spec:
GCP:
machine_type: n1-standard-1
disk_spec:
GCP:
disk_type: pd-standard
disk_size: 500
worker_count: 2
"""
flags.DEFINE_integer('dpb_spark_pi_partitions', 100, 'Number of task'
' partitions.')
FLAGS = flags.FLAGS
def GetConfig(user_config):
return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
def Prepare(unused_benchmark_spec):
pass
def Run(benchmark_spec):
"""Executes the given jar on the specified Spark cluster.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample objects.
Raises:
JobSubmissionError if the job fails.
"""
metadata = {}
metadata.update(benchmark_spec.dpb_service.GetMetadata())
num_partitions = str(FLAGS.dpb_spark_pi_partitions)
metadata.update({'spark_pi_partitions': num_partitions})
results = []
dpb_service_instance = benchmark_spec.dpb_service
result = dpb_service_instance.SubmitSparkJob(
spark_application_jar=inspect.getmodule(
benchmark_spec.dpb_service).SPARK_SAMPLE_LOCATION,
spark_application_classname='org.apache.spark.examples.SparkPi',
spark_application_args=[num_partitions])
logging.info(result)
results.append(
sample.Sample('wall_time', result.wall_time, 'seconds', metadata))
results.append(
sample.Sample('run_time', result.run_time, 'seconds', metadata))
return results
def Cleanup(unused_benchmark_spec):
pass
|
import abc
import json
import logging
import os
from typing import Dict
from typing import Mapping
from typing import Optional
from typing import Tuple
import pysensu_yelp
import service_configuration_lib
from paasta_tools.long_running_service_tools import LongRunningServiceConfig
from paasta_tools.utils import _log
from paasta_tools.utils import DEFAULT_SOA_DIR
from paasta_tools.utils import is_under_replicated
from paasta_tools.utils import load_system_paasta_config
from paasta_tools.utils import PaastaNotConfiguredError
from paasta_tools.utils import time_cache
class ReplicationChecker(abc.ABC):
@abc.abstractmethod
def get_replication_for_instance(
self, instance_config: LongRunningServiceConfig
) -> Dict[str, Dict[str, Dict[str, int]]]:
...
try:
import yelp_meteorite
except ImportError:
yelp_meteorite = None
log = logging.getLogger(__name__)
def monitoring_defaults(key):
defaults = {
"runbook": 'Please set a `runbook` field in your monitoring.yaml. Like "y/rb-mesos". Docs: '
"https://paasta.readthedocs.io/en/latest/yelpsoa_configs.html#monitoring-yaml",
"tip": "Please set a `tip` field in your monitoring.yaml. Docs: "
"https://paasta.readthedocs.io/en/latest/yelpsoa_configs.html#monitoring-yaml",
"ticket": False,
"project": None,
"realert_every": -1,
"tags": [],
}
return defaults.get(key, None)
def get_team(overrides, service, soa_dir=DEFAULT_SOA_DIR):
return __get_monitoring_config_value("team", overrides, service, soa_dir)
def get_runbook(overrides, service, soa_dir=DEFAULT_SOA_DIR):
return __get_monitoring_config_value("runbook", overrides, service, soa_dir)
def get_tip(overrides, service, soa_dir=DEFAULT_SOA_DIR):
return __get_monitoring_config_value("tip", overrides, service, soa_dir)
def get_notification_email(overrides, service, soa_dir=DEFAULT_SOA_DIR):
return __get_monitoring_config_value(
"notification_email", overrides, service, soa_dir
)
def get_page(overrides, service, soa_dir=DEFAULT_SOA_DIR):
return __get_monitoring_config_value("page", overrides, service, soa_dir)
def get_alert_after(overrides, service, soa_dir=DEFAULT_SOA_DIR):
return __get_monitoring_config_value("alert_after", overrides, service, soa_dir)
def get_realert_every(
overrides, service, soa_dir=DEFAULT_SOA_DIR, monitoring_defaults=monitoring_defaults
):
return __get_monitoring_config_value(
"realert_every",
overrides=overrides,
service=service,
soa_dir=soa_dir,
monitoring_defaults=monitoring_defaults,
)
def get_check_every(overrides, service, soa_dir=DEFAULT_SOA_DIR):
return __get_monitoring_config_value("check_every", overrides, service, soa_dir)
def get_irc_channels(overrides, service, soa_dir=DEFAULT_SOA_DIR):
return __get_monitoring_config_value("irc_channels", overrides, service, soa_dir)
def get_slack_channels(overrides, service, soa_dir=DEFAULT_SOA_DIR):
return __get_monitoring_config_value("slack_channels", overrides, service, soa_dir)
def get_dependencies(overrides, service, soa_dir=DEFAULT_SOA_DIR):
return __get_monitoring_config_value("dependencies", overrides, service, soa_dir)
def get_ticket(overrides, service, soa_dir=DEFAULT_SOA_DIR):
return __get_monitoring_config_value("ticket", overrides, service, soa_dir)
def get_project(overrides, service, soa_dir=DEFAULT_SOA_DIR):
return __get_monitoring_config_value("project", overrides, service, soa_dir)
def get_priority(overrides, service, soa_dir=DEFAULT_SOA_DIR):
return __get_monitoring_config_value("priority", overrides, service, soa_dir)
def get_tags(overrides, service, soa_dir=DEFAULT_SOA_DIR):
return __get_monitoring_config_value("tags", overrides, service, soa_dir)
def get_component(overrides, service, soa_dir=DEFAULT_SOA_DIR):
return __get_monitoring_config_value("component", overrides, service, soa_dir)
def get_description(overrides, service, soa_dir=DEFAULT_SOA_DIR):
return __get_monitoring_config_value("description", overrides, service, soa_dir)
# Our typical usage pattern is that we call all the different get_* functions back to back. Applying a small amount of
# cache here helps cut down on the number of times we re-parse service.yaml.
_cached_read_service_configuration = time_cache(ttl=5)(
service_configuration_lib.read_service_configuration
)
def __get_monitoring_config_value(
key,
overrides,
service,
soa_dir=DEFAULT_SOA_DIR,
monitoring_defaults=monitoring_defaults,
):
general_config = _cached_read_service_configuration(service, soa_dir=soa_dir)
monitor_config = read_monitoring_config(service, soa_dir=soa_dir)
service_default = general_config.get(key, monitoring_defaults(key))
service_default = general_config.get("monitoring", {key: service_default}).get(
key, service_default
)
service_default = monitor_config.get(key, service_default)
return overrides.get(key, service_default)
def get_sensu_team_data(team):
"""Takes a team and returns the dictionary of Sensu configuration
settings for that team. The data is in this format:
https://github.com/Yelp/sensu_handlers#teams
Returns an empty dictionary if there is nothing to return.
Not all teams specify all the different types of configuration settings.
for example, a team may not specify a `notification_email`. It is up
to the caller of this function to handle that case.
"""
global_team_data = _load_sensu_team_data()["team_data"]
return global_team_data.get(team, {})
def _load_sensu_team_data():
try:
with open("/etc/sensu/team_data.json") as f:
team_data = json.load(f)
except IOError:
log.warning(
"No Sensu Team data (/etc/sensu/team_data.json) available. Using empty defaults"
)
team_data = {}
return team_data
def send_event(
service,
check_name,
overrides,
status,
output,
soa_dir,
ttl=None,
cluster=None,
system_paasta_config=None,
):
"""Send an event to sensu via pysensu_yelp with the given information.
:param service: The service name the event is about
:param check_name: The name of the check as it appears in Sensu
:param overrides: A dictionary containing overrides for monitoring options
(e.g. notification_email, ticket, page)
:param status: The status to emit for this event
:param output: The output to emit for this event
:param soa_dir: The service directory to read monitoring information from
:param ttl: TTL (optional)
:param cluster: The cluster name (optional)
:param system_paasta_config: A SystemPaastaConfig object representing the system
"""
# This function assumes the input is a string like "mumble.main"
team = get_team(overrides, service, soa_dir)
if not team:
return
if system_paasta_config is None:
system_paasta_config = load_system_paasta_config()
if cluster is None:
try:
cluster = system_paasta_config.get_cluster()
except PaastaNotConfiguredError:
cluster = "localhost"
alert_after = overrides.get("alert_after", "5m")
result_dict = {
"name": check_name,
"runbook": overrides.get("runbook", "http://y/paasta-troubleshooting"),
"status": status,
"output": output,
"team": team,
"page": get_page(overrides, service, soa_dir),
"tip": get_tip(overrides, service, soa_dir),
"notification_email": get_notification_email(overrides, service, soa_dir),
"check_every": overrides.get("check_every", "1m"),
"realert_every": overrides.get(
"realert_every", monitoring_defaults("realert_every")
),
"alert_after": f"{alert_after}s"
if isinstance(alert_after, int)
else alert_after,
"irc_channels": get_irc_channels(overrides, service, soa_dir),
"slack_channels": get_slack_channels(overrides, service, soa_dir),
"ticket": get_ticket(overrides, service, soa_dir),
"project": get_project(overrides, service, soa_dir),
"priority": get_priority(overrides, service, soa_dir),
"source": "paasta-%s" % cluster,
"tags": get_tags(overrides, service, soa_dir),
"ttl": ttl,
"sensu_host": system_paasta_config.get_sensu_host(),
"sensu_port": system_paasta_config.get_sensu_port(),
"component": get_component(overrides, service, soa_dir),
"description": get_description(overrides, service, soa_dir),
}
if result_dict.get("sensu_host"):
pysensu_yelp.send_event(**result_dict)
@time_cache(ttl=5)
def read_monitoring_config(service, soa_dir=DEFAULT_SOA_DIR):
"""Read a service's monitoring.yaml file.
:param service: The service name
:param soa_dir: THe SOA configuration directory to read from
:returns: A dictionary of whatever was in soa_dir/name/monitoring.yaml"""
rootdir = os.path.abspath(soa_dir)
monitoring_file = os.path.join(rootdir, service, "monitoring.yaml")
monitor_conf = service_configuration_lib.read_monitoring(monitoring_file)
return monitor_conf
def list_teams():
"""Loads team data from the system. Returns a set of team names (or empty
set).
"""
team_data = _load_sensu_team_data()
teams = set(team_data.get("team_data", {}).keys())
return teams
def send_replication_event(instance_config, status, output):
"""Send an event to sensu via pysensu_yelp with the given information.
:param instance_config: an instance of LongRunningServiceConfig
:param status: The status to emit for this event
:param output: The output to emit for this event"""
# This function assumes the input is a string like "mumble.main"
monitoring_overrides = instance_config.get_monitoring()
if "alert_after" not in monitoring_overrides:
monitoring_overrides["alert_after"] = "2m"
monitoring_overrides["check_every"] = "1m"
monitoring_overrides["runbook"] = get_runbook(
monitoring_overrides, instance_config.service, soa_dir=instance_config.soa_dir
)
check_name = "check_paasta_services_replication.%s" % instance_config.job_id
send_event(
service=instance_config.service,
check_name=check_name,
overrides=monitoring_overrides,
status=status,
output=output,
soa_dir=instance_config.soa_dir,
cluster=instance_config.cluster,
)
_log(
service=instance_config.service,
line="Replication: %s" % output,
component="monitoring",
level="debug",
cluster=instance_config.cluster,
instance=instance_config.instance,
)
def emit_replication_metrics(
replication_infos: Mapping[str, Mapping[str, Mapping[str, int]]],
instance_config: LongRunningServiceConfig,
expected_count: int,
) -> None:
for provider, replication_info in replication_infos.items():
meteorite_dims = {
"paasta_service": instance_config.service,
"paasta_cluster": instance_config.cluster,
"paasta_instance": instance_config.instance,
"paasta_pool": instance_config.get_pool(),
"service_discovery_provider": provider,
}
num_available_backends = 0
for available_backends in replication_info.values():
num_available_backends += available_backends.get(instance_config.job_id, 0)
available_backends_gauge = yelp_meteorite.create_gauge(
"paasta.service.available_backends", meteorite_dims
)
available_backends_gauge.set(num_available_backends)
critical_percentage = instance_config.get_replication_crit_percentage()
num_critical_backends = critical_percentage * expected_count / 100.0
critical_backends_gauge = yelp_meteorite.create_gauge(
"paasta.service.critical_backends", meteorite_dims
)
critical_backends_gauge.set(num_critical_backends)
expected_backends_gauge = yelp_meteorite.create_gauge(
"paasta.service.expected_backends", meteorite_dims
)
expected_backends_gauge.set(expected_count)
def check_replication_for_instance(
instance_config: LongRunningServiceConfig,
expected_count: int,
replication_checker: ReplicationChecker,
) -> bool:
"""Check a set of namespaces to see if their number of available backends is too low,
emitting events to Sensu based on the fraction available and the thresholds defined in
the corresponding yelpsoa config.
:param instance_config: an instance of MarathonServiceConfig
:param replication_checker: an instance of ReplicationChecker
"""
crit_threshold = instance_config.get_replication_crit_percentage()
log.info(
"Checking instance %s in service discovery providers", instance_config.job_id
)
replication_infos = replication_checker.get_replication_for_instance(
instance_config
)
log.debug(f"Got replication info for {instance_config.job_id}: {replication_infos}")
if yelp_meteorite is not None:
emit_replication_metrics(
replication_infos, instance_config, expected_count,
)
combined_output = ""
service_is_under_replicated = False
failed_service_discovery_providers = set()
for service_discovery_provider, replication_info in replication_infos.items():
if len(replication_info) == 0:
output = (
"Service %s has no %s replication info. Make sure the discover key in the corresponding config (e.g. smartstack.yaml for Smartstack) is valid!\n"
) % (instance_config.job_id, service_discovery_provider)
log.error(output)
service_is_under_replicated = True
failed_service_discovery_providers.add(service_discovery_provider)
else:
expected_count_per_location = int(expected_count / len(replication_info))
output = ""
output_critical = ""
output_ok = ""
under_replication_per_location = []
for location, available_backends in sorted(replication_info.items()):
num_available_in_location = available_backends.get(
instance_config.job_id, 0
)
under_replicated, ratio = is_under_replicated(
num_available_in_location,
expected_count_per_location,
crit_threshold,
)
if under_replicated:
output_critical += (
"- Service %s has %d out of %d expected instances in %s according to %s (CRITICAL: %d%%)\n"
% (
instance_config.job_id,
num_available_in_location,
expected_count_per_location,
location,
service_discovery_provider,
ratio,
)
)
failed_service_discovery_providers.add(service_discovery_provider)
else:
output_ok += (
"- Service %s has %d out of %d expected instances in %s according to %s (OK: %d%%)\n"
% (
instance_config.job_id,
num_available_in_location,
expected_count_per_location,
location,
service_discovery_provider,
ratio,
)
)
under_replication_per_location.append(under_replicated)
output += output_critical
if output_critical and output_ok:
output += "\n\n"
output += "The following locations are OK:\n"
output += output_ok
service_is_under_replicated_anywhere = any(under_replication_per_location)
service_is_under_replicated |= service_is_under_replicated_anywhere
if service_is_under_replicated_anywhere:
log.error(output)
else:
log.info(output)
combined_output += output
if service_is_under_replicated:
failed_service_discovery_providers_list = ",".join(
failed_service_discovery_providers
)
combined_output += (
"\n\n"
"What this alert means:\n"
"\n"
" This replication alert means that a %(service_discovery_provider)s powered loadbalancer\n"
" doesn't have enough healthy backends. Not having enough healthy backends\n"
" means that clients of that service will get 503s (http) or connection refused\n"
" (tcp) when trying to connect to it.\n"
"\n"
"Reasons this might be happening:\n"
"\n"
" The service may simply not have enough copies or it could simply be\n"
" unhealthy in that location. There also may not be enough resources\n"
" in the cluster to support the requested instance count.\n"
"\n"
"Things you can do:\n"
"\n"
" * You can view the logs for the job with:\n"
" paasta logs -s %(service)s -i %(instance)s -c %(cluster)s\n"
"\n"
" * Fix the cause of the unhealthy service. Try running:\n"
"\n"
" paasta status -s %(service)s -i %(instance)s -c %(cluster)s -vv\n"
"\n"
" * Widen %(service_discovery_provider)s discovery settings\n"
" * Increase the instance count\n"
"\n"
) % {
"service": instance_config.service,
"instance": instance_config.instance,
"cluster": instance_config.cluster,
"service_discovery_provider": failed_service_discovery_providers_list,
}
status = pysensu_yelp.Status.CRITICAL
else:
status = pysensu_yelp.Status.OK
send_replication_event(
instance_config=instance_config, status=status, output=combined_output
)
return not service_is_under_replicated
def check_under_replication(
instance_config: LongRunningServiceConfig,
expected_count: int,
num_available: int,
sub_component: Optional[str] = None,
) -> Tuple[bool, str]:
"""Check if a component/sub_component is under-replicated and returns both the result of the check in the form of a
boolean and a human-readable text to be used in logging or monitoring events.
"""
crit_threshold = instance_config.get_replication_crit_percentage()
if sub_component is not None:
output = (
"Service %s has %d out of %d expected instances of %s available! (threshold: %d%%)"
) % (
instance_config.job_id,
num_available,
expected_count,
sub_component,
crit_threshold,
)
else:
output = (
"Service %s has %d out of %d expected instances available! (threshold: %d%%)"
) % (instance_config.job_id, num_available, expected_count, crit_threshold)
under_replicated, _ = is_under_replicated(
num_available, expected_count, crit_threshold
)
if under_replicated:
output += (
"\n\n"
"What this alert means:\n"
"\n"
" This replication alert means that the service PaaSTA can't keep the\n"
" requested number of copies up and healthy in the cluster.\n"
"\n"
"Reasons this might be happening:\n"
"\n"
" The service may simply be unhealthy. There also may not be enough resources\n"
" in the cluster to support the requested instance count.\n"
"\n"
"Things you can do:\n"
"\n"
" * Increase the instance count\n"
" * Fix the cause of the unhealthy service. Try running:\n"
"\n"
" paasta status -s %(service)s -i %(instance)s -c %(cluster)s -vv\n"
) % {
"service": instance_config.service,
"instance": instance_config.instance,
"cluster": instance_config.cluster,
}
return under_replicated, output
def send_replication_event_if_under_replication(
instance_config: LongRunningServiceConfig,
expected_count: int,
num_available: int,
sub_component: Optional[str] = None,
):
under_replicated, output = check_under_replication(
instance_config, expected_count, num_available, sub_component
)
if under_replicated:
log.error(output)
status = pysensu_yelp.Status.CRITICAL
else:
log.info(output)
status = pysensu_yelp.Status.OK
send_replication_event(
instance_config=instance_config, status=status, output=output
)
|
import importlib
import locale
import os
import platform
import struct
import subprocess
import sys
def get_sys_info():
"""Returns system information as a dict"""
blob = []
# get full commit hash
commit = None
if os.path.isdir(".git") and os.path.isdir("xarray"):
try:
pipe = subprocess.Popen(
'git log --format="%H" -n 1'.split(" "),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
so, _ = pipe.communicate()
except Exception:
pass
else:
if pipe.returncode == 0:
commit = so
try:
commit = so.decode("utf-8")
except ValueError:
pass
commit = commit.strip().strip('"')
blob.append(("commit", commit))
try:
(sysname, _nodename, release, _version, machine, processor) = platform.uname()
blob.extend(
[
("python", sys.version),
("python-bits", struct.calcsize("P") * 8),
("OS", "%s" % (sysname)),
("OS-release", "%s" % (release)),
# ("Version", "%s" % (version)),
("machine", "%s" % (machine)),
("processor", "%s" % (processor)),
("byteorder", "%s" % sys.byteorder),
("LC_ALL", "%s" % os.environ.get("LC_ALL", "None")),
("LANG", "%s" % os.environ.get("LANG", "None")),
("LOCALE", "%s.%s" % locale.getlocale()),
]
)
except Exception:
pass
return blob
def netcdf_and_hdf5_versions():
libhdf5_version = None
libnetcdf_version = None
try:
import netCDF4
libhdf5_version = netCDF4.__hdf5libversion__
libnetcdf_version = netCDF4.__netcdf4libversion__
except ImportError:
try:
import h5py
libhdf5_version = h5py.version.hdf5_version
except ImportError:
pass
return [("libhdf5", libhdf5_version), ("libnetcdf", libnetcdf_version)]
def show_versions(file=sys.stdout):
"""print the versions of xarray and its dependencies
Parameters
----------
file : file-like, optional
print to the given file-like object. Defaults to sys.stdout.
"""
sys_info = get_sys_info()
try:
sys_info.extend(netcdf_and_hdf5_versions())
except Exception as e:
print(f"Error collecting netcdf / hdf5 version: {e}")
deps = [
# (MODULE_NAME, f(mod) -> mod version)
("xarray", lambda mod: mod.__version__),
("pandas", lambda mod: mod.__version__),
("numpy", lambda mod: mod.__version__),
("scipy", lambda mod: mod.__version__),
# xarray optionals
("netCDF4", lambda mod: mod.__version__),
("pydap", lambda mod: mod.__version__),
("h5netcdf", lambda mod: mod.__version__),
("h5py", lambda mod: mod.__version__),
("Nio", lambda mod: mod.__version__),
("zarr", lambda mod: mod.__version__),
("cftime", lambda mod: mod.__version__),
("nc_time_axis", lambda mod: mod.__version__),
("PseudoNetCDF", lambda mod: mod.__version__),
("rasterio", lambda mod: mod.__version__),
("cfgrib", lambda mod: mod.__version__),
("iris", lambda mod: mod.__version__),
("bottleneck", lambda mod: mod.__version__),
("dask", lambda mod: mod.__version__),
("distributed", lambda mod: mod.__version__),
("matplotlib", lambda mod: mod.__version__),
("cartopy", lambda mod: mod.__version__),
("seaborn", lambda mod: mod.__version__),
("numbagg", lambda mod: mod.__version__),
("pint", lambda mod: mod.__version__),
# xarray setup/test
("setuptools", lambda mod: mod.__version__),
("pip", lambda mod: mod.__version__),
("conda", lambda mod: mod.__version__),
("pytest", lambda mod: mod.__version__),
# Misc.
("IPython", lambda mod: mod.__version__),
("sphinx", lambda mod: mod.__version__),
]
deps_blob = []
for (modname, ver_f) in deps:
try:
if modname in sys.modules:
mod = sys.modules[modname]
else:
mod = importlib.import_module(modname)
except Exception:
deps_blob.append((modname, None))
else:
try:
ver = ver_f(mod)
deps_blob.append((modname, ver))
except Exception:
deps_blob.append((modname, "installed"))
print("\nINSTALLED VERSIONS", file=file)
print("------------------", file=file)
for k, stat in sys_info:
print(f"{k}: {stat}", file=file)
print("", file=file)
for k, stat in deps_blob:
print(f"{k}: {stat}", file=file)
if __name__ == "__main__":
show_versions()
|
from decimal import Decimal
from django.utils.translation import gettext_lazy as _
from shop import messages
from shop.exceptions import ProductNotAvailable
from shop.money import AbstractMoney, Money
from shop.modifiers.base import BaseCartModifier
class DefaultCartModifier(BaseCartModifier):
"""
This modifier is required for almost every shopping cart. It handles the most basic
calculations, ie. multiplying the items unit prices with the chosen quantity.
Since this modifier sets the cart items line total, it must be listed as the first
entry in `SHOP_CART_MODIFIERS`.
"""
identifier = 'default'
def pre_process_cart_item(self, cart, cart_item, request, raise_exception=False):
"""
Limit the ordered quantity in the cart to the availability in the inventory.
"""
kwargs = {'product_code': cart_item.product_code}
kwargs.update(cart_item.extra)
availability = cart_item.product.get_availability(request, **kwargs)
if cart_item.quantity > availability.quantity:
if raise_exception:
raise ProductNotAvailable(cart_item.product)
cart_item.quantity = availability.quantity
cart_item.save(update_fields=['quantity'])
message = _("The ordered quantity for item '{product_name}' has been adjusted to "\
"{quantity} which is the maximum, currently available in stock.").\
format(product_name=cart_item.product.product_name, quantity=availability.quantity)
messages.info(request, message, title=_("Verify Quantity"), delay=5)
return super().pre_process_cart_item(cart, cart_item, request, raise_exception)
def process_cart_item(self, cart_item, request):
cart_item.unit_price = cart_item.product.get_price(request)
cart_item.line_total = cart_item.unit_price * cart_item.quantity
return super().process_cart_item(cart_item, request)
def process_cart(self, cart, request):
if not isinstance(cart.subtotal, AbstractMoney):
# if we don't know the currency, use the default
cart.subtotal = Money(cart.subtotal)
cart.total = cart.subtotal
return super().process_cart(cart, request)
class WeightedCartModifier(BaseCartModifier):
"""
This modifier is required for all shopping cart where we are interested into its weight.
It sums up the weight of all articles, ie. multiplying the items weight with the chosen
quantity.
If this modifier is used, the classes implementing the product shall override their
method ``get_weight()``, which must return the weight in kg as Decimal type.
"""
identifier = 'weights'
initial_weight = Decimal(0.01) # in kg
def pre_process_cart(self, cart, request, raise_exception=False):
cart.weight = self.initial_weight
return super().pre_process_cart(cart, request, raise_exception)
def pre_process_cart_item(self, cart, cart_item, request, raise_exception=False):
cart.weight += Decimal(cart_item.product.get_weight() * cart_item.quantity)
return super().pre_process_cart_item(cart_item, request, raise_exception)
|
import hashlib
import logging
import os
import six
from kalliope.core.ConfigurationManager import SettingLoader
from kalliope.core.PlayerLauncher import PlayerLauncher
from kalliope.core.Utils.FileManager import FileManager
logging.basicConfig()
logger = logging.getLogger("kalliope")
class MissingTTSParameter(Exception):
"""
Some TTS Parameters are missing in the settings.yml file.
.. seealose:: Settings
"""
pass
class TtsGenerateAudioFunctionNotFound(Exception):
"""
You must provide a callBack to the TTS
"""
pass
class FailToLoadSoundFile(Exception):
"""
Fail while truing to load the sound file.
"""
pass
class TTSModule(object):
"""
Mother class of TTS module. Handle:
- Cache: call cache object to create file, delete file, check if file exist
- Player: call the default player to play the generated file
"""
def __init__(self, **kwargs):
# set parameter from what we receive from the settings
self.cache = kwargs.get('cache', False)
self.language = kwargs.get('language', "default")
self.voice = kwargs.get('voice', "default")
# the name of the TSS is the name of the Tss module that have instantiated TTSModule
self.tts_caller_name = self.__class__.__name__
# we don't know yet the words that will be converted to an audio and so we don't have the audio path yet too
self.words = None
self.file_path = None
self.base_cache_path = None
# load settings
sl = SettingLoader()
self.settings = sl.settings
self.player = PlayerLauncher.get_player(settings=self.settings)
# create the path in the tmp folder
base_path = os.path.join(self.settings.cache_path, self.tts_caller_name, self.language, self.voice)
FileManager.create_directory(base_path)
logger.debug("Class TTSModule called from module %s, cache: %s, language: %s, voice: %s" % (self.tts_caller_name,
self.cache,
self.language,
self.voice))
def play_audio(self):
"""
Play the audio file
"""
# Mplayer.play(self.file_path)
self.player.play(self.file_path)
def generate_and_play(self, words, generate_audio_function_from_child=None):
"""
Generate an audio file from <words> if not already in cache and call the Player to play it
:param words: Sentence text from which we want to generate an audio file
:type words: String
:param generate_audio_function_from_child: The child function to generate a file if necessary
:type generate_audio_function_from_child; Callback function
.. raises:: TtsGenerateAudioFunctionNotFound
"""
if generate_audio_function_from_child is None:
raise TtsGenerateAudioFunctionNotFound
self.words = words
# we can generate the file path from info we have
self.file_path = self._get_path_to_store_audio()
if not self.cache:
# no cache, we need to generate the file
generate_audio_function_from_child()
else:
# we check if the file already exist. If not we generate it with the TTS engine
if not self._is_file_already_in_cache(self.base_cache_path, self.file_path):
generate_audio_function_from_child()
# then play the generated audio file
self.play_audio()
# if the user don't want to keep the cache we remove the file
if not self.cache:
FileManager.remove_file(self.file_path)
def _get_path_to_store_audio(self):
"""
Get a sentence (a text) an return the full path of the file
Path syntax:
</path/in/settings>/<tts.name>/tts.parameter["language"]/tts.parameter["voice"]/<md5_of_sentence.tts
E.g:
/tmp/kalliope/acapela/fr/abcd12345.tts
:return: path String
"""
md5 = self.generate_md5_from_words(self.words)+".tts"
self.base_cache_path = os.path.join(self.settings.cache_path, self.tts_caller_name, self.language, self.voice)
returned_path = os.path.join(self.base_cache_path, md5)
logger.debug("get_path_to_store_audio return: %s" % returned_path)
return returned_path
@staticmethod
def generate_md5_from_words(words):
"""
Generate a md5 hash from received text
:param words: Text to convert into md5 hash
:return: String md5 hash from the received words
"""
if isinstance(words, six.text_type):
words = words.encode('utf-8')
return hashlib.md5(words).hexdigest()
@staticmethod
def _is_file_already_in_cache(base_cache_path, file_path):
"""
Return true if the file to generate has already been generated before
"""
# generate sub folder
FileManager.create_directory(base_cache_path)
# check if the audio file exist
exist_in_cache = os.path.exists(file_path)
if exist_in_cache:
logger.debug("TTSModule, File already in cache: %s" % file_path)
else:
logger.debug("TTSModule, File not yet in cache: %s" % file_path)
return exist_in_cache
|
from __future__ import unicode_literals
import string
from lib.fun.osjudger import py_ver_egt_3
from lib.data.data import paths, pyoptions
from lib.fun.fun import walk_pure_file, rreplace
def get_leet_cfg():
leet_mode = dict()
leets = walk_pure_file(paths.leetmode_path, pure=False)
for _ in leets:
chunk = _.split(pyoptions.key_value_split)
leet_mode[chunk[0].strip()] = chunk[1].strip()
return leet_mode.items()
# leet mode magic function
def leet_mode_magic(strings, code, *args):
intab = outtab = ""
if code == 0:
for leet in get_leet_cfg():
intab += leet[0]
outtab += leet[1]
if not py_ver_egt_3():
maptab = string.maketrans(intab, outtab)
ret = str(strings).translate(maptab)
else:
maptab = str.maketrans(intab, outtab)
ret = strings.translate(maptab)
return ret
elif 11 <= code <= 29 or 1 <= code <= 2:
if 21 <= code <= 29 or code == 2:
searchstrs = strings[::-1]
else:
searchstrs = strings
search = 0
ret = strings
searchover = len(searchstrs)
for s in searchstrs:
search += 1
for leet in get_leet_cfg():
if leet[0] == s:
if code == 1:
ret = strings.replace(leet[0], leet[1])
return ret
elif code == 2:
ret = rreplace(strings, leet[0], leet[1])
return ret
elif 11 <= code <= 19:
ret = strings.replace(leet[0], leet[1], code % 10)
return ret
elif 21 <= code <= 29:
ret = rreplace(strings, leet[0], leet[1], code % 20)
return ret
else:
return strings
if search >= searchover:
return ret
else:
return strings
|
import pytest
import sys
from case import mock
from kombu import compression
class test_compression:
def test_encoders__gzip(self):
assert 'application/x-gzip' in compression.encoders()
def test_encoders__bz2(self):
pytest.importorskip('bz2')
assert 'application/x-bz2' in compression.encoders()
def test_encoders__brotli(self):
pytest.importorskip('brotli')
assert 'application/x-brotli' in compression.encoders()
def test_encoders__lzma(self):
pytest.importorskip('lzma')
assert 'application/x-lzma' in compression.encoders()
def test_encoders__backports_lzma(self):
pytest.importorskip('backports.lzma')
assert 'application/x-lzma' in compression.encoders()
def test_encoders__zstd(self):
pytest.importorskip('zstandard')
assert 'application/zstd' in compression.encoders()
def test_compress__decompress__zlib(self):
text = b'The Quick Brown Fox Jumps Over The Lazy Dog'
c, ctype = compression.compress(text, 'zlib')
assert text != c
d = compression.decompress(c, ctype)
assert d == text
def test_compress__decompress__bzip2(self):
text = b'The Brown Quick Fox Over The Lazy Dog Jumps'
c, ctype = compression.compress(text, 'bzip2')
assert text != c
d = compression.decompress(c, ctype)
assert d == text
def test_compress__decompress__brotli(self):
pytest.importorskip('brotli')
text = b'The Brown Quick Fox Over The Lazy Dog Jumps'
c, ctype = compression.compress(text, 'brotli')
def test_compress__decompress__lzma(self):
pytest.importorskip('lzma')
text = b'The Brown Quick Fox Over The Lazy Dog Jumps'
c, ctype = compression.compress(text, 'lzma')
assert text != c
d = compression.decompress(c, ctype)
assert d == text
def test_compress__decompress__backports_lzma(self):
pytest.importorskip('backports.lzma')
text = b'The Brown Quick Fox Over The Lazy Dog Jumps'
c, ctype = compression.compress(text, 'lzma')
assert text != c
d = compression.decompress(c, ctype)
assert d == text
def test_compress__decompress__zstd(self):
pytest.importorskip('zstandard')
text = b'The Brown Quick Fox Over The Lazy Dog Jumps'
c, ctype = compression.compress(text, 'zstd')
assert text != c
d = compression.decompress(c, ctype)
assert d == text
@mock.mask_modules('bz2')
def test_no_bz2(self):
c = sys.modules.pop('kombu.compression')
try:
import kombu.compression
assert not hasattr(kombu.compression, 'bz2')
finally:
if c is not None:
sys.modules['kombu.compression'] = c
|
from lxml import etree
from lxml.etree import tostring
#from dateutil.parser import parse as parse_date
from datetime import datetime
import uuid
import cgi
import copy
__all__ = [
'ATOM', 'atom_ns', 'Element', 'tostring']
ATOM_NAMESPACE = atom_ns = 'http://www.w3.org/2005/Atom'
app_ns = 'http://www.w3.org/2007/app'
xhtml_ns = 'http://www.w3.org/1999/xhtml'
nsmap = {'': atom_ns, 'app': app_ns}
_rel_alternate_xpath = etree.XPath(
"./atom:link[not(@rel) or @rel = 'alternate']",
namespaces=dict(atom=atom_ns))
_rel_other_xpath = etree.XPath(
"./atom:link[@rel = $rel]",
namespaces=dict(atom=atom_ns))
class AtomLookup(etree.CustomElementClassLookup):
_elements = {}
_app_elements = {}
def lookup(self, node_type, document, namespace, name):
if node_type == 'element':
if namespace == atom_ns:
return self._elements.get(name, AtomElement)
elif namespace == app_ns:
return self._app_elements.get(name, APPElement)
## FIXME: is this default good?
return AtomElement
# Otherwise normal lookup
return None
atom_parser = etree.XMLParser()
atom_parser.setElementClassLookup(AtomLookup())
def parse(input):
return etree.parse(input, atom_parser)
def ATOM(atom):
"""
Parse an Atom document
"""
return etree.XML(atom, atom_parser)
def Element(tag, *args, **kw):
"""
Create an Atom element. Adds the Atom namespace if no namespace
is given.
"""
if '{' not in tag:
# No namespace means the atom namespace
tag = '{%s}%s' % (atom_ns, tag)
return atom_parser.makeelement(tag, *args, **kw)
def _strftime(d):
"""
Format a date the way Atom likes it (RFC3339?)
"""
return d.strftime('%Y-%m-%dT%H:%M:%SZ%z')
## try:
## from lxml import builder
## except ImportError:
## pass
## else:
## E = builder.ElementMaker(parser=atom_parser,
## typemap={datetime: lambda e, v: _strftime(v)})
from lxml import builder
E = builder.ElementMaker(#parser=atom_parser,
typemap={datetime: lambda e, v: _strftime(v)})
__all__.append('E')
class NoDefault:
pass
class _LiveList(list):
"""
This list calls on_add or on_remove whenever the list is modified.
"""
on_add = on_remove = None
name = None
def __init__(self, *args, **kw):
on_add = on_remove = name = None
if 'on_add' in kw:
on_add = kw.pop('on_add')
if 'on_remove' in kw:
on_remove = kw.pop('on_remove')
if 'name' in kw:
name = kw.pop('name')
list.__init__(self, *args, **kw)
self.on_add = on_add
self.on_remove = on_remove
self.name = name
def _make_list(self, obj):
if not isinstance(obj, (list, tuple)):
obj = list(obj)
return obj
def _do_add(self, items):
if self.on_add is not None:
for item in items:
self.on_add(self, item)
def _do_remove(self, items):
if self.on_remove is not None:
for item in items:
self.on_remove(self, item)
def __setslice__(self, i, j, other):
other = self._make_list(other)
old = self[i:j]
list.__setslice__(self, i, j, other)
self._do_remove(old)
self._do_add(other)
def __delslice__(self, i, j):
old = self[i:j]
list.__delslice__(self, i, j)
self._do_remove(old)
def __iadd__(self, other):
other = self._make_list(other)
list.__iadd__(self, other)
self._do_add(other)
def __imul__(self, n):
while n > 0:
self += self
n -= 1
def append(self, item):
list.append(self, item)
self._do_add([item])
def insert(self, i, item):
list.insert(self, i, item)
self._do_add([item])
def pop(self, i=-1):
item = self[i]
result = list.pop(self, i)
self._do_remove([item])
return result
def remove(self, item):
list.remove(self, item)
self._do_remove([item])
def extend(self, other):
for item in other:
self.append(item)
def __repr__(self):
name = self.name
if name is None:
name = '_LiveList'
return '%s(%s)' % (name, list.__repr__(self))
class _findall_property(object):
"""
Returns a LiveList of all the objects with the given tag. You can
append or remove items to the list to add or remove them from the
containing tag.
"""
def __init__(self, tag, ns=atom_ns):
self.tag = tag
self.ns = ns
self.__doc__ = 'Return live list of all the <atom:%s> element' % self.tag
def __get__(self, obj, type=None):
if obj is None:
return self
def add(lst, item):
# FIXME: shouldn't just be an append
obj.append(item)
def remove(lst, item):
obj.remove(item)
return _LiveList(obj._atom_iter(self.tag, ns=self.ns),
on_add=add, on_remove=remove,
name='live_%s_list' % self.tag)
def __set__(self, obj, value):
cur = self.__get__(obj)
cur[:] = value
class _text_element_property(object):
"""
Creates an attribute that returns the text content of the given
subelement. E.g., ``title = _text_element_property('title')``
will make ``obj.title`` return the contents of the ``<title>``.
Similarly setting the attribute sets the text content of the
attribute.
"""
def __init__(self, tag, strip=True):
self.tag = tag
self.strip = strip
self.__doc__ = 'Access the <atom:%s> element as text' % self.tag
def __get__(self, obj, type=None):
if obj is None:
return self
v = obj._atom_findtext(self.tag)
if self.strip:
if v is not None:
v = v.strip()
else:
return ''
return v
def __set__(self, obj, value):
el = obj._get_or_create(self.tag)
el.text = value
def __delete__(self, obj):
el = obj._atom_get(self.tag)
if el:
# FIXME: should it be an error if it doesn't exist?
obj.remove(el)
class _element_property(object):
"""
Returns a single subelement based on tag. Setting the attribute
removes the element and adds a new one. Deleting it removes the
element.
"""
def __init__(self, tag):
self.tag = tag
self.__doc__ = 'Get the <atom:%s> element' % self.tag
def __get__(self, obj, type=None):
if obj is None:
return self
return obj._atom_get(self.tag)
def __set__(self, obj, value):
el = obj._atom_get(self.tag)
if el is not None:
parent = el.getparent()
index = parent.index(el)
parent[index] = value
else:
obj.append(value)
def __delete__(self):
el = obj._atom_get(self.tag)
if el is not None:
obj.remove(el)
class _attr_element_property(object):
"""
Get/set the value of the attribute on this element.
"""
def __init__(self, attr, default=NoDefault):
self.attr = attr
self.default = default
self.__doc__ = 'Access the %s attribute' % self.attr
def __get__(self, obj, type=None):
if obj is None:
return self
try:
return obj.attrib[self.attr]
except KeyError:
if self.default is not NoDefault:
return self.default
raise AttributeError(self.attr)
def __set__(self, obj, value):
if value is None:
self.__delete__(obj)
else:
obj.attrib[self.attr] = value
def __delete__(self, obj):
if self.attr in obj.attrib:
del obj.attrib[self.attr]
class _date_element_property(object):
"""
Get/set the parsed date value of the text content of a tag.
"""
def __init__(self, tag, ns=atom_ns):
self.tag = tag
self.ns = ns
self.__doc__ = 'Access the date in %s' % self.tag
def __get__(self, obj, type=None):
if obj is None:
return self
el = obj._atom_get(self.tag, ns=self.ns)
if el is None:
return None
return el.date
def __set__(self, obj, value):
el = obj._get_or_create(self.tag, ns=self.ns)
el.date = value
def __delete__(self):
el = obj._atom_get(self.tag)
if el is not None:
obj.remove(el)
class _date_text_property(object):
def __get__(self, obj, type=None):
if obj is None:
return self
return parse_date(obj.text)
def __set__(self, obj, value):
if not value:
obj.text = None
return
if isinstance(value, datetime):
value = _strftime(value)
obj.text = value
def __del__(self, obj):
obj.text = None
class AtomElement(etree.ElementBase):
def _get_or_create(self, tag, ns=atom_ns):
el = self.find('{%s}%s' % (ns, tag))
if el is None:
el = self.makeelement('{%s}%s' % (ns, tag))
self.append(el)
return el
def _atom_get(self, tag, ns=atom_ns):
for item in self._atom_iter(tag, ns=ns):
return item
return None
def _atom_iter(self, tag, ns=atom_ns):
return self.getiterator('{%s}%s' % (ns, tag))
def _atom_findtext(self, tag, ns=atom_ns):
return self.findtext('{%s}%s' % (ns, tag))
def _get_parent(self, tag, ns=atom_ns):
parent = self
while 1:
if parent.tag == '{%s}%s' % (ns, tag):
return parent
parent = parent.getparent()
if parent is None:
return None
@property
def feed(self):
return self._get_parent('feed')
def rel_links(self, rel='alternate'):
"""
Return all the links with the given ``rel`` attribute. The
default relation is ``'alternate'``, and as specified for Atom
links with no ``rel`` attribute are assumed to mean alternate.
"""
if rel is None:
return self._atom_iter('link')
return [
el for el in self._atom_iter('link')
if el.get('rel') == rel
or rel == 'alternate' and not el.get('rel')]
def __repr__(self):
tag = self.tag
if '}' in tag:
tag = tag.split('}', 1)[1]
return '<%s.%s atom:%s at %s>' % (
self.__class__.__module__,
self.__class__.__name__,
tag,
hex(abs(id(self)))[2:])
class Feed(AtomElement):
"""
For ``<feed>`` elements.
"""
@property
def feed(self):
return self
entries = _findall_property('entry')
title = _text_element_property('title')
author = _element_property('author')
class Entry(AtomElement):
"""
For ``<entry>`` elements.
"""
@property
def entry(self):
return self
id = _text_element_property('id')
title = _text_element_property('title')
published = _date_element_property('published')
updated = _date_element_property('updated')
edited = _date_element_property('edited', ns=app_ns)
def update_edited(self):
"""
Set app:edited to current time
"""
self.edited = datetime.utcnow()
def update_updated(self):
"""
Set atom:updated to the current time
"""
self.updated = datetime.utcnow()
def make_id(self):
"""
Create an artificial id for this entry
"""
assert not self.id, (
"You cannot make an id if one already exists")
self.id = 'uuid:%s' % uuid.uuid4()
def author__get(self):
el = self._atom_get('author')
if el is None:
if self.feed is not None:
return self.feed.author
return el
def author__set(self, value):
el = self._atom_get('author')
if el is not None:
self.remove(el)
self.append(value)
def author__del(self):
el = self._atom_get('author')
if el is not None:
self.remove(el)
author = property(author__get, author__set, author__del)
categories = _findall_property('category')
class _EntryElement(AtomElement):
@property
def entry(self):
return self._get_parent('entry')
class Category(_EntryElement):
"""
For ``<category>`` elements.
"""
term = _attr_element_property('term')
scheme = _attr_element_property('scheme', None)
label = _attr_element_property('label', None)
def as_string(self):
"""
Returns the string representation of the category, using the
GData convention of ``{scheme}term``
"""
if self.scheme is not None:
return '{%s}%s' % (self.scheme, self.term)
else:
return self.term
class PersonElement(_EntryElement):
"""
Represents authors and contributors
"""
email = _text_element_property('email')
uri = _text_element_property('uri')
name = _text_element_property('name')
class DateElement(_EntryElement):
"""
For elements that contain a date in their text content.
"""
date = _date_text_property()
class TextElement(_EntryElement):
type = _attr_element_property('type', None)
src = _attr_element_property('src', None)
def _html__get(self):
"""
Gives the parsed HTML of element's content. May return an
HtmlElement (from lxml.html) or an XHTML tree. If the element
is ``type="text"`` then it is returned as quoted HTML.
You can also set this attribute to either an lxml.html
element, an XHTML element, or an HTML string.
Raises AttributeError if this is not HTML content.
"""
## FIXME: should this handle text/html types?
if self.type == 'html':
content = self.text
elif self.type == 'text':
content = cgi.escape(self.text)
elif self.type == 'xhtml':
div = copy.deepcopy(self[0])
# Now remove the namespaces:
for el in div.getiterator():
if el.tag.startswith('{'):
el.tag = el.tag.split('}', 1)[1]
if div.tag.startswith('{'):
div.tag = el.tag.split('}', 1)[1]
from lxml.html import tostring
content = tostring(div)
else:
raise AttributeError(
"Not an HTML or text content (type=%r)" % self.type)
from lxml.html import fromstring
return fromstring(content)
def _html__set(self, value):
if value is None:
del self.html
return
if isinstance(value, basestring):
# Some HTML text
self.type = 'html'
self.text = value
return
if value.tag.startswith('{%s}' % xhtml_ns):
if value.tag != '{%s}div' % xhtml_ns:
# Need to wrap it in a <div>
el = self.makeelement('{%s}div' % xhtml_ns)
el.append(value)
value = el
self[:] = []
self.type = 'xhtml'
self.append(value)
return
from lxml import html
if isinstance(value, html.HtmlElement):
value = tostring(value)
self[:] = []
self.type = 'html'
self.text = value
return
raise TypeError(
"Unknown HTML type: %s" % type(value))
def _html__del(self):
self.text = None
html = property(_html__get, _html__set, _html__del, doc=_html__get.__doc__)
def _binary__get(self):
"""
Gets/sets the binary content, which is base64 encoded in the
text.
"""
text = self.text
if text is None:
raise AttributeError(
"No text (maybe in src?)")
text = text.decode('base64')
return text
def _binary__set(self, value):
if isinstance(value, unicode):
## FIXME: is this kosher?
value = value.encode('utf8')
if not isinstance(value, str):
raise TypeError(
"Must set .binary to a str or unicode object (not %s)"
% type(value))
value = value.encode('base64')
self.text = value
def _binary__del(self):
self.text = None
binary = property(_binary__get, _binary__set, _binary__del, doc=_binary__get.__doc__)
class LinkElement(_EntryElement):
"""
For ``<link>`` elements.
"""
href = _attr_element_property('href', None)
rel = _attr_element_property('rel', None)
type = _attr_element_property('type', None)
title = _attr_element_property('title', None)
def __repr__(self):
return '<%s.%s at %s rel=%r href=%r>' % (
self.__class__.__module__,
self.__class__.__name__,
hex(abs(id(self)))[2:],
self.rel, self.href)
AtomLookup._elements.update(dict(
feed=Feed,
entry=Entry,
category=Category,
author=PersonElement,
contributor=PersonElement,
published=DateElement,
updated=DateElement,
content=TextElement,
summary=TextElement,
title=TextElement,
rights=TextElement,
subtitle=TextElement,
link=LinkElement,
))
class APPElement(etree.ElementBase):
def __repr__(self):
tag = self.tag
if '}' in tag:
tag = tag.split('}', 1)[1]
return '<%s.%s app:%s at %s>' % (
self.__class__.__module__,
self.__class__.__name__,
tag,
hex(abs(id(self)))[2:])
class Service(APPElement):
workspaces = _findall_property('workspace', ns=app_ns)
class Workspace(APPElement):
collections = _findall_property('collection', ns=app_ns)
class Collection(APPElement):
pass
class Edited(APPElement):
date = _date_text_property()
AtomLookup._app_elements.update(dict(
service=Service,
workspace=Workspace,
collection=Collection,
edited=Edited,
))
|
from abodepy.exceptions import AbodeAuthenticationException
from homeassistant import data_entry_flow
from homeassistant.components.abode import config_flow
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, HTTP_INTERNAL_SERVER_ERROR
from tests.async_mock import patch
from tests.common import MockConfigEntry
CONF_POLLING = "polling"
async def test_show_form(hass):
"""Test that the form is served with no input."""
flow = config_flow.AbodeFlowHandler()
flow.hass = hass
result = await flow.async_step_user(user_input=None)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
async def test_one_config_allowed(hass):
"""Test that only one Abode configuration is allowed."""
flow = config_flow.AbodeFlowHandler()
flow.hass = hass
MockConfigEntry(
domain="abode",
data={CONF_USERNAME: "[email protected]", CONF_PASSWORD: "password"},
).add_to_hass(hass)
step_user_result = await flow.async_step_user()
assert step_user_result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert step_user_result["reason"] == "single_instance_allowed"
conf = {
CONF_USERNAME: "[email protected]",
CONF_PASSWORD: "password",
CONF_POLLING: False,
}
import_config_result = await flow.async_step_import(conf)
assert import_config_result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert import_config_result["reason"] == "single_instance_allowed"
async def test_invalid_credentials(hass):
"""Test that invalid credentials throws an error."""
conf = {CONF_USERNAME: "[email protected]", CONF_PASSWORD: "password"}
flow = config_flow.AbodeFlowHandler()
flow.hass = hass
with patch(
"homeassistant.components.abode.config_flow.Abode",
side_effect=AbodeAuthenticationException((400, "auth error")),
):
result = await flow.async_step_user(user_input=conf)
assert result["errors"] == {"base": "invalid_auth"}
async def test_connection_error(hass):
"""Test other than invalid credentials throws an error."""
conf = {CONF_USERNAME: "[email protected]", CONF_PASSWORD: "password"}
flow = config_flow.AbodeFlowHandler()
flow.hass = hass
with patch(
"homeassistant.components.abode.config_flow.Abode",
side_effect=AbodeAuthenticationException(
(HTTP_INTERNAL_SERVER_ERROR, "connection error")
),
):
result = await flow.async_step_user(user_input=conf)
assert result["errors"] == {"base": "cannot_connect"}
async def test_step_import(hass):
"""Test that the import step works."""
conf = {
CONF_USERNAME: "[email protected]",
CONF_PASSWORD: "password",
CONF_POLLING: False,
}
flow = config_flow.AbodeFlowHandler()
flow.hass = hass
with patch("homeassistant.components.abode.config_flow.Abode"):
result = await flow.async_step_import(import_config=conf)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
result = await flow.async_step_user(user_input=result["data"])
assert result["title"] == "[email protected]"
assert result["data"] == {
CONF_USERNAME: "[email protected]",
CONF_PASSWORD: "password",
CONF_POLLING: False,
}
async def test_step_user(hass):
"""Test that the user step works."""
conf = {CONF_USERNAME: "[email protected]", CONF_PASSWORD: "password"}
flow = config_flow.AbodeFlowHandler()
flow.hass = hass
with patch("homeassistant.components.abode.config_flow.Abode"):
result = await flow.async_step_user(user_input=conf)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "[email protected]"
assert result["data"] == {
CONF_USERNAME: "[email protected]",
CONF_PASSWORD: "password",
CONF_POLLING: False,
}
|
import numpy as np
from matchzoo.engine.base_metric import sort_and_couple
from matchzoo import metrics
def test_sort_and_couple():
l = [0, 1, 2]
s = [0.1, 0.4, 0.2]
c = sort_and_couple(l, s)
assert (c == np.array([(1, 0.4), (2, 0.2), (0, 0.1)])).all()
def test_mean_reciprocal_rank():
label = [0, 1, 2]
score = [0.1, 0.4, 0.2]
assert metrics.MeanReciprocalRank()(label, score) == 1
def test_precision_at_k():
label = [0, 1, 2]
score = [0.1, 0.4, 0.2]
assert metrics.Precision(k=1)(label, score) == 1.
assert metrics.Precision(k=2)(label, score) == 1.
assert round(metrics.Precision(k=3)(label, score), 2) == 0.67
def test_average_precision():
label = [0, 1, 2]
score = [0.1, 0.4, 0.2]
assert round(metrics.AveragePrecision()(label, score), 2) == 0.89
def test_mean_average_precision():
label = [0, 1, 2]
score = [0.1, 0.4, 0.2]
assert metrics.MeanAveragePrecision()(label, score) == 1.
def test_dcg_at_k():
label = [0, 1, 2]
score = [0.1, 0.4, 0.2]
dcg = metrics.DiscountedCumulativeGain
assert round(dcg(k=1)(label, score), 2) == 1.44
assert round(dcg(k=2)(label, score), 2) == 4.17
assert round(dcg(k=3)(label, score), 2) == 4.17
def test_ndcg_at_k():
label = [0, 1, 2]
score = [0.1, 0.4, 0.2]
ndcg = metrics.NormalizedDiscountedCumulativeGain
assert round(ndcg(k=1)(label, score), 2) == 0.33
assert round(ndcg(k=2)(label, score), 2) == 0.80
assert round(ndcg(k=3)(label, score), 2) == 0.80
|
import os
import subprocess
import sys
import time
from importlib import import_module
from shutil import copyfile
from celery.schedules import crontab
from django.conf import settings
from django.core.cache import cache
from django.core.management.commands import diffsettings
import weblate
from weblate.formats.models import FILE_FORMATS
from weblate.trans.util import get_clean_env
from weblate.utils.celery import app
from weblate.utils.data import data_dir
from weblate.utils.errors import report_error
from weblate.vcs.models import VCS_REGISTRY
@app.task(trail=False)
def ping():
return {
"version": weblate.GIT_VERSION,
"vcs": sorted(VCS_REGISTRY.keys()),
"formats": sorted(FILE_FORMATS.keys()),
"encoding": [sys.getfilesystemencoding(), sys.getdefaultencoding()],
}
@app.task(trail=False)
def heartbeat():
cache.set("celery_loaded", time.time())
cache.set("celery_heartbeat", time.time())
def ensure_backup_dir():
backup_dir = data_dir("backups")
if not os.path.exists(backup_dir):
os.makedirs(backup_dir)
@app.task(trail=False)
def settings_backup():
ensure_backup_dir()
# Expand settings in case it contains non-trivial code
command = diffsettings.Command()
kwargs = {"default": None, "all": False, "output": "hash"}
with open(data_dir("backups", "settings-expanded.py"), "w") as handle:
handle.write(command.handle(**kwargs))
# Backup original settings
if settings.SETTINGS_MODULE:
settings_mod = import_module(settings.SETTINGS_MODULE)
copyfile(settings_mod.__file__, data_dir("backups", "settings.py"))
@app.task(trail=False)
def database_backup():
if settings.DATABASE_BACKUP == "none":
return
ensure_backup_dir()
database = settings.DATABASES["default"]
if database["ENGINE"] != "django.db.backends.postgresql":
return
cmd = ["pg_dump", "--dbname", database["NAME"]]
if database["HOST"]:
cmd += ["--host", database["HOST"]]
if database["PORT"]:
cmd += ["--port", database["PORT"]]
if database["USER"]:
cmd += ["--username", database["USER"]]
if settings.DATABASE_BACKUP == "compressed":
cmd += ["--file", data_dir("backups", "database.sql.gz")]
cmd += ["--compress", "6"]
else:
cmd += ["--file", data_dir("backups", "database.sql")]
try:
subprocess.run(
cmd,
env=get_clean_env({"PGPASSWORD": database["PASSWORD"]}),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.DEVNULL,
check=True,
universal_newlines=True,
)
except subprocess.CalledProcessError as error:
report_error(extra_data={"stdout": error.stdout, "stderr": error.stderr})
raise
@app.on_after_finalize.connect
def setup_periodic_tasks(sender, **kwargs):
cache.set("celery_loaded", time.time())
sender.add_periodic_task(
crontab(hour=1, minute=0), settings_backup.s(), name="settings-backup"
)
sender.add_periodic_task(
crontab(hour=1, minute=30), database_backup.s(), name="database-backup"
)
sender.add_periodic_task(60, heartbeat.s(), name="heartbeat")
|
import re
import string
from lib.fun.fun import cool
from itertools import groupby
from collections import Counter
from lib.data.data import pyoptions
def lenght_filter(item, minlen=pyoptions.minlen, maxlen=pyoptions.maxlen, lenght_is_filter=False):
if item and lenght_is_filter:
if minlen <= len(item) <= maxlen:
return item
else:
return item
def headtail_filter(item, head='', tail=''):
if item:
return head + item + tail
else:
return item
def encode_filter(item, encode='none'):
return pyoptions.operator.get(encode)(item)
def occur_filter(item, letter_occur=pyoptions.letter_occur, digital_occur=pyoptions.digital_occur,
special_occur=pyoptions.special_occur, occur_is_filter=False):
l_count = d_count = s_count = 0
def occur():
l_op_inner = d_op_inner = s_op_inner = '<='
l_wantcount = d_wantcount = s_wantcount = 99
pattern = '((<|>|=)=?)(\d*)$'
letter_match = re.match(pattern, letter_occur)
digital_match = re.match(pattern, digital_occur)
special_match = re.match(pattern, special_occur)
if letter_match and letter_match.group():
l_op_inner = letter_match.group(1)
l_wantcount = int(letter_match.group(len(letter_match.groups())))
if digital_match and digital_match.group():
d_op_inner = digital_match.group(1)
d_wantcount = int(digital_match.group(len(digital_match.groups())))
if special_match and special_match.group():
s_op_inner = special_match.group(1)
s_wantcount = int(special_match.group(len(special_match.groups())))
pyoptions.letter_occur = l_op_inner + str(l_wantcount)
pyoptions.digital_occur = d_op_inner + str(d_wantcount)
pyoptions.specail_occur = s_op_inner + str(s_wantcount)
return l_op_inner, l_wantcount, d_op_inner, d_wantcount, s_op_inner, s_wantcount
l_op, l_wantcount, d_op, d_wantcount, s_op, s_wantcount = occur()
if item and occur_is_filter:
for word in item:
if word in string.ascii_letters:
l_count += 1
elif word in string.digits:
d_count += 1
elif word in string.printable[62:-5]:
s_count += 1
letter_map = {'<': l_count < l_wantcount, '<=': l_count <= l_wantcount,
'>': l_count > l_wantcount, '>=': l_count >= l_wantcount,
'=': l_count == l_wantcount, '==': l_count == l_wantcount, }
digital_map = {'<': d_count < d_wantcount, '<=': d_count <= d_wantcount,
'>': d_count > d_wantcount, '>=': d_count >= d_wantcount,
'=': d_count == d_wantcount, '==': d_count == d_wantcount, }
special_map = {'<': s_count < s_wantcount, '<=': s_count <= s_wantcount,
'>': s_count > s_wantcount, '>=': s_count >= s_wantcount,
'=': s_count == s_wantcount, '==': s_count == s_wantcount, }
if letter_map[l_op] and digital_map[d_op] and special_map[s_op]:
return item
else:
return item
def types_filter(item, letter_types=pyoptions.letter_types, digital_types=pyoptions.digital_types,
special_types=pyoptions.special_types, types_is_filter=False):
if item and types_is_filter:
l_types = d_types = s_types = 0
l_op = d_op = s_op = '>='
l_wanttypes = d_wanttypes = s_wanttypes = 0
pattern = '((<|>|=)=?)(\d*)$'
letter_match = re.match(pattern, letter_types)
digital_match = re.match(pattern, digital_types)
special_match = re.match(pattern, special_types)
if letter_match and letter_match.group():
l_op = letter_match.group(1)
l_wanttypes = int(letter_match.group(len(letter_match.groups())))
if digital_match and digital_match.group():
d_op = digital_match.group(1)
d_wanttypes = int(digital_match.group(len(digital_match.groups())))
if special_match and special_match.group():
s_op = special_match.group(1)
s_wanttypes = int(special_match.group(len(special_match.groups())))
wordicts = dict(Counter(item))
for key in wordicts.keys():
if key in string.ascii_letters:
l_types += 1
elif key in string.digits:
d_types += 1
elif key in string.printable[62:-5]:
s_types += 1
letter_map = {'<': l_types < l_wanttypes, '<=': l_types <= l_wanttypes,
'>': l_types > l_wanttypes, '>=': l_types >= l_wanttypes,
'=': l_types == l_wanttypes, '==': l_types == l_wanttypes, }
digital_map = {'<': d_types < d_wanttypes, '<=': d_types <= d_wanttypes,
'>': d_types > d_wanttypes, '>=': d_types >= d_wanttypes,
'=': d_types == d_wanttypes, '==': d_types == d_wanttypes, }
special_map = {'<': s_types < s_wanttypes, '<=': s_types <= s_wanttypes,
'>': s_types > s_wanttypes, '>=': s_types >= s_wanttypes,
'=': s_types == s_wanttypes, '==': s_types == s_wanttypes, }
if letter_map[l_op] and digital_map[d_op] and special_map[s_op]:
return item
else:
return item
def repeat_filter(item, letter_repeat=pyoptions.letter_repeat, digital_repeat=pyoptions.digital_repeat,
special_repeat=pyoptions.special_repeat, repeat_is_filter=False):
if item and repeat_is_filter:
l_repeat = d_repeat = s_repeat = 0
l_op = d_op = s_op = '>='
l_wantrepeat = d_wantrepeat = s_wantrepeat = 0
pattern = '((<|>|=)=?)(\d*)$'
letter_match = re.match(pattern, letter_repeat)
digital_match = re.match(pattern, digital_repeat)
special_match = re.match(pattern, special_repeat)
if letter_match and letter_match.group():
l_op = letter_match.group(1)
l_wantrepeat = int(letter_match.group(len(letter_match.groups())))
if digital_match and digital_match.group():
d_op = digital_match.group(1)
d_wantrepeat = int(digital_match.group(len(digital_match.groups())))
if special_match and special_match.group():
s_op = special_match.group(1)
s_wantrepeat = int(special_match.group(len(special_match.groups())))
groups = groupby(item)
repeat_dict = [{label: sum(1 for _ in group)} for label, group in groups]
for r in repeat_dict:
key = r.keys()[0]
value = r.values()[0]
if key in string.ascii_letters:
l_repeat = max(l_repeat, value)
elif key in string.digits:
d_repeat = max(d_repeat, value)
elif key in string.printable[62:-5]:
s_repeat = max(s_repeat, value)
letter_map = {'<': l_repeat < l_wantrepeat, '<=': l_repeat <= l_wantrepeat,
'>': l_repeat > l_wantrepeat, '>=': l_repeat >= l_wantrepeat,
'=': l_repeat == l_wantrepeat, '==': l_repeat == l_wantrepeat, }
digital_map = {'<': d_repeat < d_wantrepeat, '<=': d_repeat <= d_wantrepeat,
'>': d_repeat > d_wantrepeat, '>=': d_repeat >= d_wantrepeat,
'=': d_repeat == d_wantrepeat, '==': d_repeat == d_wantrepeat, }
special_map = {'<': s_repeat < s_wantrepeat, '<=': s_repeat <= s_wantrepeat,
'>': s_repeat > s_wantrepeat, '>=': s_repeat >= s_wantrepeat,
'=': s_repeat == s_wantrepeat, '==': s_repeat == s_wantrepeat, }
if letter_map[l_op] and digital_map[d_op] and special_map[s_op]:
return item
else:
return item
def regex_filter(item, regex='.*?', regex_is_filter=False):
if item and regex_is_filter:
try:
if re.match(regex, item):
return item
except:
pass
else:
return item
def cutout_filter(lists, start='pos-1', end='pos--1', cutout_is_filter=False):
start_pos = 0
end_pos = len(lists)
def pos_change(position, init_pos, is_start=True):
final_pos = init_pos
try:
match = re.match('pos-(\d*)', position)
if match.group(1):
final_pos = int(match.group(1)) - 1 if is_start else int(match.group(1))
else:
match = re.match('pos--(\d*)', position)
if match.group(1):
pos = -int(match.group(1))
if is_start:
final_pos = pos - 1 if pos >= 1 else len(lists) + pos
else:
final_pos = pos if pos >= 1 else len(lists) + pos + 1
except:
pos = 0
for _ in lists:
if position == str(_):
final_pos = pos if is_start else pos + 1
pos += 1
return final_pos
if lists and cutout_is_filter:
start_pos = pos_change(start, start_pos, is_start=True)
end_pos = pos_change(end, end_pos, is_start=False)
if start_pos == 0 and start != 'pos-1':
print(cool.fuchsia('[!] invalid start position changed' + pyoptions.CRLF))
elif end_pos == len(lists) and end != 'pos--1':
print(cool.fuchsia('[!] invalid end position changed' + pyoptions.CRLF))
elif start_pos >= end_pos:
start_pos = 0
end_pos = len(lists)
exit(cool.red('[!] start string behind from the end string' + pyoptions.CRLF))
elif start_pos > len(lists) - 1:
print(cool.fuchsia('[!] invalid position changed' + pyoptions.CRLF))
elif end_pos > len(lists):
print (cool.fuchsia('[!] end position beyond the scope' + pyoptions.CRLF))
for _ in lists[start_pos:end_pos]:
return _
else:
for _ in lists:
return _
def filterforfun(item):
item = headtail_filter(item, head=pyoptions.head, tail=pyoptions.tail)
item = lenght_filter(item, minlen=pyoptions.minlen, maxlen=pyoptions.maxlen, lenght_is_filter=pyoptions.args_pick)
item = occur_filter(item, letter_occur=pyoptions.letter_occur, digital_occur=pyoptions.digital_occur,
special_occur=pyoptions.special_occur, occur_is_filter=pyoptions.occur_is_filter)
item = types_filter(item, letter_types=pyoptions.letter_types, digital_types=pyoptions.digital_types,
special_types=pyoptions.special_types, types_is_filter=pyoptions.types_is_filter)
item = repeat_filter(item, letter_repeat=pyoptions.letter_repeat, digital_repeat=pyoptions.digital_repeat,
special_repeat=pyoptions.special_repeat, repeat_is_filter=pyoptions.repeat_is_filter)
item = regex_filter(item, regex=pyoptions.filter_regex, regex_is_filter=pyoptions.regex_is_filter)
item = encode_filter(item, encode=pyoptions.encode)
return item
|
import argparse
import os
import sys
import logging
import threading
import time
from queue import Queue
import Pyro4
from gensim import utils
from gensim.models.lda_worker import LDA_WORKER_PREFIX
logger = logging.getLogger("gensim.models.lda_dispatcher")
# How many jobs (=chunks of N documents) to keep "pre-fetched" in a queue?
# A small number is usually enough, unless iteration over the corpus is very very
# slow (slower than the actual computation of LDA), in which case you can override
# this value from command line. ie. run "python ./lda_dispatcher.py 100"
MAX_JOBS_QUEUE = 10
# timeout for the Queue object put/get blocking methods.
# it should theoretically be infinity, but then keyboard interrupts don't work.
# so this is really just a hack, see http://bugs.python.org/issue1360
HUGE_TIMEOUT = 365 * 24 * 60 * 60 # one year
LDA_DISPATCHER_PREFIX = 'gensim.lda_dispatcher'
class Dispatcher:
"""Dispatcher object that communicates and coordinates individual workers.
Warnings
--------
There should never be more than one dispatcher running at any one time.
"""
def __init__(self, maxsize=MAX_JOBS_QUEUE, ns_conf=None):
"""Partly initializes the dispatcher.
A full initialization (including initialization of the workers) requires a call to
:meth:`~gensim.models.lda_dispatcher.Dispatcher.initialize`
Parameters
----------
maxsize : int, optional
Maximum number of jobs to be kept pre-fetched in the queue.
ns_conf : dict of (str, object)
Sets up the name server configuration for the pyro daemon server of dispatcher.
This also helps to keep track of your objects in your network by using logical object names
instead of exact object name(or id) and its location.
"""
self.maxsize = maxsize
self.callback = None
self.ns_conf = ns_conf if ns_conf is not None else {}
@Pyro4.expose
def initialize(self, **model_params):
"""Fully initialize the dispatcher and all its workers.
Parameters
----------
**model_params
Keyword parameters used to initialize individual workers, see :class:`~gensim.models.ldamodel.LdaModel`.
Raises
------
RuntimeError
When no workers are found (the :mod:`gensim.models.lda_worker` script must be ran beforehand).
"""
self.jobs = Queue(maxsize=self.maxsize)
self.lock_update = threading.Lock()
self._jobsdone = 0
self._jobsreceived = 0
self.workers = {}
with utils.getNS(**self.ns_conf) as ns:
self.callback = Pyro4.Proxy(ns.list(prefix=LDA_DISPATCHER_PREFIX)[LDA_DISPATCHER_PREFIX])
for name, uri in ns.list(prefix=LDA_WORKER_PREFIX).items():
try:
worker = Pyro4.Proxy(uri)
workerid = len(self.workers)
# make time consuming methods work asynchronously
logger.info("registering worker #%i at %s", workerid, uri)
worker.initialize(workerid, dispatcher=self.callback, **model_params)
self.workers[workerid] = worker
except Pyro4.errors.PyroError:
logger.warning("unresponsive worker at %s,deleting it from the name server", uri)
ns.remove(name)
if not self.workers:
raise RuntimeError('no workers found; run some lda_worker scripts on your machines first!')
@Pyro4.expose
def getworkers(self):
"""Return pyro URIs of all registered workers.
Returns
-------
list of URIs
The pyro URIs for each worker.
"""
return [worker._pyroUri for worker in self.workers.values()]
@Pyro4.expose
def getjob(self, worker_id):
"""Atomically pop a job from the queue.
Parameters
----------
worker_id : int
The worker that requested the job.
Returns
-------
iterable of list of (int, float)
The corpus in BoW format.
"""
logger.info("worker #%i requesting a new job", worker_id)
job = self.jobs.get(block=True, timeout=1)
logger.info("worker #%i got a new job (%i left)", worker_id, self.jobs.qsize())
return job
@Pyro4.expose
def putjob(self, job):
"""Atomically add a job to the queue.
Parameters
----------
job : iterable of list of (int, float)
The corpus in BoW format.
"""
self._jobsreceived += 1
self.jobs.put(job, block=True, timeout=HUGE_TIMEOUT)
logger.info("added a new job (len(queue)=%i items)", self.jobs.qsize())
@Pyro4.expose
def getstate(self):
"""Merge states from across all workers and return the result.
Returns
-------
:class:`~gensim.models.ldamodel.LdaState`
Merged resultant state
"""
logger.info("end of input, assigning all remaining jobs")
logger.debug("jobs done: %s, jobs received: %s", self._jobsdone, self._jobsreceived)
i = 0
count = 10
while self._jobsdone < self._jobsreceived:
time.sleep(0.5) # check every half a second
i += 1
if i > count:
i = 0
for workerid, worker in self.workers.items():
logger.info("checking aliveness for worker %s", workerid)
worker.ping()
logger.info("merging states from %i workers", len(self.workers))
workers = list(self.workers.values())
result = workers[0].getstate()
for worker in workers[1:]:
result.merge(worker.getstate())
logger.info("sending out merged state")
return result
@Pyro4.expose
def reset(self, state):
"""Reinitialize all workers for a new EM iteration.
Parameters
----------
state : :class:`~gensim.models.ldamodel.LdaState`
State of :class:`~gensim.models.lda.LdaModel`.
"""
for workerid, worker in self.workers.items():
logger.info("resetting worker %s", workerid)
worker.reset(state)
worker.requestjob()
self._jobsdone = 0
self._jobsreceived = 0
@Pyro4.expose
@Pyro4.oneway
@utils.synchronous('lock_update')
def jobdone(self, workerid):
"""A worker has finished its job. Log this event and then asynchronously transfer control back to the worker.
Callback used by workers to notify when their job is done.
The job done event is logged and then control is asynchronously transfered back to the worker
(who can then request another job). In this way, control flow basically oscillates between
:meth:`gensim.models.lda_dispatcher.Dispatcher.jobdone` and :meth:`gensim.models.lda_worker.Worker.requestjob`.
Parameters
----------
workerid : int
The ID of the worker that finished the job (used for logging).
"""
self._jobsdone += 1
logger.info("worker #%s finished job #%i", workerid, self._jobsdone)
self.workers[workerid].requestjob() # tell the worker to ask for another job, asynchronously (one-way)
def jobsdone(self):
"""Wrap :attr:`~gensim.models.lda_dispatcher.Dispatcher._jobsdone` needed for remote access through proxies.
Returns
-------
int
Number of jobs already completed.
"""
return self._jobsdone
@Pyro4.oneway
def exit(self):
"""Terminate all registered workers and then the dispatcher."""
for workerid, worker in self.workers.items():
logger.info("terminating worker %s", workerid)
worker.exit()
logger.info("terminating dispatcher")
os._exit(0) # exit the whole process (not just this thread ala sys.exit())
def main():
parser = argparse.ArgumentParser(description=__doc__[:-135], formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument(
"--maxsize",
help="How many jobs (=chunks of N documents) to keep 'pre-fetched' in a queue (default: %(default)s)",
type=int, default=MAX_JOBS_QUEUE
)
parser.add_argument("--host", help="Nameserver hostname (default: %(default)s)", default=None)
parser.add_argument("--port", help="Nameserver port (default: %(default)s)", default=None, type=int)
parser.add_argument("--no-broadcast", help="Disable broadcast (default: %(default)s)",
action='store_const', default=True, const=False)
parser.add_argument("--hmac", help="Nameserver hmac key (default: %(default)s)", default=None)
parser.add_argument(
'-v', '--verbose',
help='Verbose flag',
action='store_const', dest="loglevel", const=logging.INFO, default=logging.WARNING
)
args = parser.parse_args()
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=args.loglevel)
logger.info("running %s", " ".join(sys.argv))
ns_conf = {
"broadcast": args.no_broadcast,
"host": args.host,
"port": args.port,
"hmac_key": args.hmac
}
utils.pyro_daemon(LDA_DISPATCHER_PREFIX, Dispatcher(maxsize=args.maxsize, ns_conf=ns_conf), ns_conf=ns_conf)
logger.info("finished running %s", " ".join(sys.argv))
if __name__ == '__main__':
main()
|
from datetime import timedelta
import logging
from numbers import Number
from pyflume import FlumeData
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import (
CONF_CLIENT_ID,
CONF_CLIENT_SECRET,
CONF_NAME,
CONF_PASSWORD,
CONF_USERNAME,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
UpdateFailed,
)
from .const import (
DEFAULT_NAME,
DOMAIN,
FLUME_AUTH,
FLUME_DEVICES,
FLUME_HTTP_SESSION,
FLUME_QUERIES_SENSOR,
FLUME_TYPE_SENSOR,
KEY_DEVICE_ID,
KEY_DEVICE_LOCATION,
KEY_DEVICE_LOCATION_NAME,
KEY_DEVICE_LOCATION_TIMEZONE,
KEY_DEVICE_TYPE,
)
_LOGGER = logging.getLogger(__name__)
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=15)
SCAN_INTERVAL = timedelta(minutes=1)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_CLIENT_ID): cv.string,
vol.Required(CONF_CLIENT_SECRET): cv.string,
vol.Optional(CONF_NAME): cv.string,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Import the platform into a config entry."""
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=config
)
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Flume sensor."""
flume_domain_data = hass.data[DOMAIN][config_entry.entry_id]
flume_auth = flume_domain_data[FLUME_AUTH]
http_session = flume_domain_data[FLUME_HTTP_SESSION]
flume_devices = flume_domain_data[FLUME_DEVICES]
config = config_entry.data
name = config.get(CONF_NAME, DEFAULT_NAME)
flume_entity_list = []
for device in flume_devices.device_list:
if device[KEY_DEVICE_TYPE] != FLUME_TYPE_SENSOR:
continue
device_id = device[KEY_DEVICE_ID]
device_name = device[KEY_DEVICE_LOCATION][KEY_DEVICE_LOCATION_NAME]
device_timezone = device[KEY_DEVICE_LOCATION][KEY_DEVICE_LOCATION_TIMEZONE]
device_friendly_name = f"{name} {device_name}"
flume_device = FlumeData(
flume_auth,
device_id,
device_timezone,
SCAN_INTERVAL,
update_on_init=False,
http_session=http_session,
)
coordinator = _create_flume_device_coordinator(hass, flume_device)
for flume_query_sensor in FLUME_QUERIES_SENSOR.items():
flume_entity_list.append(
FlumeSensor(
coordinator,
flume_device,
flume_query_sensor,
f"{device_friendly_name} {flume_query_sensor[1]['friendly_name']}",
device_id,
)
)
if flume_entity_list:
async_add_entities(flume_entity_list)
class FlumeSensor(CoordinatorEntity):
"""Representation of the Flume sensor."""
def __init__(self, coordinator, flume_device, flume_query_sensor, name, device_id):
"""Initialize the Flume sensor."""
super().__init__(coordinator)
self._flume_device = flume_device
self._flume_query_sensor = flume_query_sensor
self._name = name
self._device_id = device_id
self._state = None
@property
def device_info(self):
"""Device info for the flume sensor."""
return {
"name": self._name,
"identifiers": {(DOMAIN, self._device_id)},
"manufacturer": "Flume, Inc.",
"model": "Flume Smart Water Monitor",
}
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
sensor_key = self._flume_query_sensor[0]
if sensor_key not in self._flume_device.values:
return None
return _format_state_value(self._flume_device.values[sensor_key])
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
# This is in gallons per SCAN_INTERVAL
return self._flume_query_sensor[1]["unit_of_measurement"]
@property
def unique_id(self):
"""Flume query and Device unique ID."""
return f"{self._flume_query_sensor[0]}_{self._device_id}"
async def async_added_to_hass(self):
"""Request an update when added."""
await super().async_added_to_hass()
# We do not ask for an update with async_add_entities()
# because it will update disabled entities
await self.coordinator.async_request_refresh()
def _format_state_value(value):
return round(value, 1) if isinstance(value, Number) else None
def _create_flume_device_coordinator(hass, flume_device):
"""Create a data coordinator for the flume device."""
async def _async_update_data():
"""Get the latest data from the Flume."""
_LOGGER.debug("Updating Flume data")
try:
await hass.async_add_executor_job(flume_device.update_force)
except Exception as ex: # pylint: disable=broad-except
raise UpdateFailed(f"Error communicating with flume API: {ex}") from ex
_LOGGER.debug(
"Flume update details: %s",
{
"values": flume_device.values,
"query_payload": flume_device.query_payload,
},
)
return DataUpdateCoordinator(
hass,
_LOGGER,
# Name of the data. For logging purposes.
name=flume_device.device_id,
update_method=_async_update_data,
# Polling interval. Will only be polled if there are subscribers.
update_interval=SCAN_INTERVAL,
)
|
from django.db.models import Count
from django.shortcuts import get_object_or_404
from django.views.generic.list import BaseListView
from django.views.generic.list import ListView
from zinnia.models.category import Category
from zinnia.settings import PAGINATION
from zinnia.views.mixins.prefetch_related import PrefetchCategoriesAuthorsMixin
from zinnia.views.mixins.templates import EntryQuerysetTemplateResponseMixin
def get_category_or_404(path):
"""
Retrieve a Category instance by a path.
"""
path_bits = [p for p in path.split('/') if p]
return get_object_or_404(Category, slug=path_bits[-1])
class CategoryList(ListView):
"""
View returning a list of published categories.
"""
def get_queryset(self):
"""
Return a queryset of published categories,
with a count of their entries published.
"""
return Category.published.all().order_by('title').annotate(
count_entries_published=Count('entries'))
class BaseCategoryDetail(object):
"""
Mixin providing the behavior of the category detail view,
by returning in the context the current category and a
queryset containing the entries published under it.
"""
def get_queryset(self):
"""
Retrieve the category by his path and
build a queryset of her published entries.
"""
self.category = get_category_or_404(self.kwargs['path'])
return self.category.entries_published()
def get_context_data(self, **kwargs):
"""
Add the current category in context.
"""
context = super(BaseCategoryDetail, self).get_context_data(**kwargs)
context['category'] = self.category
return context
class CategoryDetail(EntryQuerysetTemplateResponseMixin,
PrefetchCategoriesAuthorsMixin,
BaseCategoryDetail,
BaseListView):
"""
Detailed view for a Category combinating these mixins:
- EntryQuerysetTemplateResponseMixin to provide custom templates
for the category display page.
- PrefetchCategoriesAuthorsMixin to prefetch related Categories
and Authors to belonging the entry list.
- BaseCategoryDetail to provide the behavior of the view.
- BaseListView to implement the ListView.
"""
model_type = 'category'
paginate_by = PAGINATION
def get_model_name(self):
"""
The model name is the category's slug.
"""
return self.category.slug
|
import urwid
import urwid.raw_display
class SwitchingPadding(urwid.Padding):
def padding_values(self, size, focus):
maxcol = size[0]
width, ignore = self.original_widget.pack(size, focus=focus)
if maxcol > width:
self.align = "left"
else:
self.align = "right"
return urwid.Padding.padding_values(self, size, focus)
class BigTextDisplay:
palette = [
('body', 'black', 'light gray', 'standout'),
('header', 'white', 'dark red', 'bold'),
('button normal','light gray', 'dark blue', 'standout'),
('button select','white', 'dark green'),
('button disabled','dark gray','dark blue'),
('edit', 'light gray', 'dark blue'),
('bigtext', 'white', 'black'),
('chars', 'light gray', 'black'),
('exit', 'white', 'dark cyan'),
]
def create_radio_button(self, g, name, font, fn):
w = urwid.RadioButton(g, name, False, on_state_change=fn)
w.font = font
w = urwid.AttrWrap(w, 'button normal', 'button select')
return w
def create_disabled_radio_button(self, name):
w = urwid.Text(" " + name + " (UTF-8 mode required)")
w = urwid.AttrWrap(w, 'button disabled')
return w
def create_edit(self, label, text, fn):
w = urwid.Edit(label, text)
urwid.connect_signal(w, 'change', fn)
fn(w, text)
w = urwid.AttrWrap(w, 'edit')
return w
def set_font_event(self, w, state):
if state:
self.bigtext.set_font(w.font)
self.chars_avail.set_text(w.font.characters())
def edit_change_event(self, widget, text):
self.bigtext.set_text(text)
def setup_view(self):
fonts = urwid.get_all_fonts()
# setup mode radio buttons
self.font_buttons = []
group = []
utf8 = urwid.get_encoding_mode() == "utf8"
for name, fontcls in fonts:
font = fontcls()
if font.utf8_required and not utf8:
rb = self.create_disabled_radio_button(name)
else:
rb = self.create_radio_button(group, name, font,
self.set_font_event)
if fontcls == urwid.Thin6x6Font:
chosen_font_rb = rb
exit_font = font
self.font_buttons.append( rb )
# Create BigText
self.bigtext = urwid.BigText("", None)
bt = SwitchingPadding(self.bigtext, 'left', None)
bt = urwid.AttrWrap(bt, 'bigtext')
bt = urwid.Filler(bt, 'bottom', None, 7)
bt = urwid.BoxAdapter(bt, 7)
# Create chars_avail
cah = urwid.Text("Characters Available:")
self.chars_avail = urwid.Text("", wrap='any')
ca = urwid.AttrWrap(self.chars_avail, 'chars')
chosen_font_rb.set_state(True) # causes set_font_event call
# Create Edit widget
edit = self.create_edit("", "Urwid "+urwid.__version__,
self.edit_change_event)
# ListBox
chars = urwid.Pile([cah, ca])
fonts = urwid.Pile([urwid.Text("Fonts:")] + self.font_buttons,
focus_item=1)
col = urwid.Columns([('fixed',16,chars), fonts], 3,
focus_column=1)
bt = urwid.Pile([bt, edit], focus_item=1)
l = [bt, urwid.Divider(), col]
w = urwid.ListBox(urwid.SimpleListWalker(l))
# Frame
w = urwid.AttrWrap(w, 'body')
hdr = urwid.Text("Urwid BigText example program - F8 exits.")
hdr = urwid.AttrWrap(hdr, 'header')
w = urwid.Frame(header=hdr, body=w)
# Exit message
exit = urwid.BigText(('exit'," Quit? "), exit_font)
exit = urwid.Overlay(exit, w, 'center', None, 'middle', None)
return w, exit
def main(self):
self.view, self.exit_view = self.setup_view()
self.loop = urwid.MainLoop(self.view, self.palette,
unhandled_input=self.unhandled_input)
self.loop.run()
def unhandled_input(self, key):
if key == 'f8':
self.loop.widget = self.exit_view
return True
if self.loop.widget != self.exit_view:
return
if key in ('y', 'Y'):
raise urwid.ExitMainLoop()
if key in ('n', 'N'):
self.loop.widget = self.view
return True
def main():
BigTextDisplay().main()
if '__main__'==__name__:
main()
|
from unittest.mock import patch
import pytest
from homeassistant.components.websocket_api.auth import (
TYPE_AUTH,
TYPE_AUTH_INVALID,
TYPE_AUTH_OK,
TYPE_AUTH_REQUIRED,
)
from homeassistant.components.websocket_api.const import (
SIGNAL_WEBSOCKET_CONNECTED,
SIGNAL_WEBSOCKET_DISCONNECTED,
URL,
)
from homeassistant.core import callback
from homeassistant.setup import async_setup_component
from tests.common import mock_coro
@pytest.fixture
def track_connected(hass):
"""Track connected and disconnected events."""
connected_evt = []
@callback
def track_connected():
connected_evt.append(1)
hass.helpers.dispatcher.async_dispatcher_connect(
SIGNAL_WEBSOCKET_CONNECTED, track_connected
)
disconnected_evt = []
@callback
def track_disconnected():
disconnected_evt.append(1)
hass.helpers.dispatcher.async_dispatcher_connect(
SIGNAL_WEBSOCKET_DISCONNECTED, track_disconnected
)
return {"connected": connected_evt, "disconnected": disconnected_evt}
async def test_auth_events(
hass, no_auth_websocket_client, legacy_auth, hass_access_token, track_connected
):
"""Test authenticating."""
await test_auth_active_with_token(hass, no_auth_websocket_client, hass_access_token)
assert len(track_connected["connected"]) == 1
assert not track_connected["disconnected"]
await no_auth_websocket_client.close()
await hass.async_block_till_done()
assert len(track_connected["disconnected"]) == 1
async def test_auth_via_msg_incorrect_pass(no_auth_websocket_client):
"""Test authenticating."""
with patch(
"homeassistant.components.websocket_api.auth.process_wrong_login",
return_value=mock_coro(),
) as mock_process_wrong_login:
await no_auth_websocket_client.send_json(
{"type": TYPE_AUTH, "api_password": "wrong"}
)
msg = await no_auth_websocket_client.receive_json()
assert mock_process_wrong_login.called
assert msg["type"] == TYPE_AUTH_INVALID
assert msg["message"] == "Invalid access token or password"
async def test_auth_events_incorrect_pass(no_auth_websocket_client, track_connected):
"""Test authenticating."""
await test_auth_via_msg_incorrect_pass(no_auth_websocket_client)
assert not track_connected["connected"]
assert not track_connected["disconnected"]
await no_auth_websocket_client.close()
assert not track_connected["connected"]
assert not track_connected["disconnected"]
async def test_pre_auth_only_auth_allowed(no_auth_websocket_client):
"""Verify that before authentication, only auth messages are allowed."""
await no_auth_websocket_client.send_json(
{
"type": "call_service",
"domain": "domain_test",
"service": "test_service",
"service_data": {"hello": "world"},
}
)
msg = await no_auth_websocket_client.receive_json()
assert msg["type"] == TYPE_AUTH_INVALID
assert msg["message"].startswith("Auth message incorrectly formatted")
async def test_auth_active_with_token(
hass, no_auth_websocket_client, hass_access_token
):
"""Test authenticating with a token."""
await no_auth_websocket_client.send_json(
{"type": TYPE_AUTH, "access_token": hass_access_token}
)
auth_msg = await no_auth_websocket_client.receive_json()
assert auth_msg["type"] == TYPE_AUTH_OK
async def test_auth_active_user_inactive(hass, aiohttp_client, hass_access_token):
"""Test authenticating with a token."""
refresh_token = await hass.auth.async_validate_access_token(hass_access_token)
refresh_token.user.is_active = False
assert await async_setup_component(hass, "websocket_api", {})
await hass.async_block_till_done()
client = await aiohttp_client(hass.http.app)
async with client.ws_connect(URL) as ws:
auth_msg = await ws.receive_json()
assert auth_msg["type"] == TYPE_AUTH_REQUIRED
await ws.send_json({"type": TYPE_AUTH, "access_token": hass_access_token})
auth_msg = await ws.receive_json()
assert auth_msg["type"] == TYPE_AUTH_INVALID
async def test_auth_active_with_password_not_allow(hass, aiohttp_client):
"""Test authenticating with a token."""
assert await async_setup_component(hass, "websocket_api", {})
await hass.async_block_till_done()
client = await aiohttp_client(hass.http.app)
async with client.ws_connect(URL) as ws:
auth_msg = await ws.receive_json()
assert auth_msg["type"] == TYPE_AUTH_REQUIRED
await ws.send_json({"type": TYPE_AUTH, "api_password": "some-password"})
auth_msg = await ws.receive_json()
assert auth_msg["type"] == TYPE_AUTH_INVALID
async def test_auth_legacy_support_with_password(hass, aiohttp_client, legacy_auth):
"""Test authenticating with a token."""
assert await async_setup_component(hass, "websocket_api", {})
await hass.async_block_till_done()
client = await aiohttp_client(hass.http.app)
async with client.ws_connect(URL) as ws:
auth_msg = await ws.receive_json()
assert auth_msg["type"] == TYPE_AUTH_REQUIRED
await ws.send_json({"type": TYPE_AUTH, "api_password": "some-password"})
auth_msg = await ws.receive_json()
assert auth_msg["type"] == TYPE_AUTH_INVALID
async def test_auth_with_invalid_token(hass, aiohttp_client):
"""Test authenticating with a token."""
assert await async_setup_component(hass, "websocket_api", {})
await hass.async_block_till_done()
client = await aiohttp_client(hass.http.app)
async with client.ws_connect(URL) as ws:
auth_msg = await ws.receive_json()
assert auth_msg["type"] == TYPE_AUTH_REQUIRED
await ws.send_json({"type": TYPE_AUTH, "access_token": "incorrect"})
auth_msg = await ws.receive_json()
assert auth_msg["type"] == TYPE_AUTH_INVALID
|
import argparse
from paasta_tools import tron_tools
def parse_args():
parser = argparse.ArgumentParser(
description="Lists Tron namespaces for a cluster, excluding MASTER"
)
parser.add_argument(
"-c",
"--cluster",
dest="cluster",
default=None,
help="Use a different Tron cluster",
)
parser.add_argument(
"-d",
"--soa-dir",
dest="soa_dir",
default=tron_tools.DEFAULT_SOA_DIR,
help="Use a different soa config directory",
)
args = parser.parse_args()
return args
def main():
args = parse_args()
namespaces = tron_tools.get_tron_namespaces(
cluster=args.cluster, soa_dir=args.soa_dir
)
print("\n".join(namespaces))
if __name__ == "__main__":
main()
|
import itertools
import os
import requests
from flask import current_app
from lemur.common.defaults import common_name
from lemur.common.utils import parse_certificate, base64encode
from lemur.plugins.bases import DestinationPlugin
DEFAULT_API_VERSION = "v1"
def ensure_resource(k8s_api, k8s_base_uri, namespace, kind, name, data):
# _resolve_uri(k8s_base_uri, namespace, kind, name, api_ver=DEFAULT_API_VERSION)
url = _resolve_uri(k8s_base_uri, namespace, kind)
current_app.logger.debug("K8S POST request URL: %s", url)
create_resp = k8s_api.post(url, json=data)
current_app.logger.debug("K8S POST response: %s", create_resp)
if 200 <= create_resp.status_code <= 299:
return None
elif create_resp.json().get("reason", "") != "AlreadyExists":
return create_resp.content
url = _resolve_uri(k8s_base_uri, namespace, kind, name)
current_app.logger.debug("K8S PUT request URL: %s", url)
update_resp = k8s_api.put(url, json=data)
current_app.logger.debug("K8S PUT response: %s", update_resp)
if not 200 <= update_resp.status_code <= 299:
return update_resp.content
return
def _resolve_ns(k8s_base_uri, namespace, api_ver=DEFAULT_API_VERSION):
api_group = "api"
if "/" in api_ver:
api_group = "apis"
return "{base}/{api_group}/{api_ver}/namespaces".format(
base=k8s_base_uri, api_group=api_group, api_ver=api_ver
) + ("/" + namespace if namespace else "")
def _resolve_uri(k8s_base_uri, namespace, kind, name=None, api_ver=DEFAULT_API_VERSION):
if not namespace:
namespace = "default"
return "/".join(
itertools.chain.from_iterable(
[
(_resolve_ns(k8s_base_uri, namespace, api_ver=api_ver),),
((kind + "s").lower(),),
(name,) if name else (),
]
)
)
def build_secret(secret_format, secret_name, body, private_key, cert_chain):
secret = {
"apiVersion": "v1",
"kind": "Secret",
"type": "Opaque",
"metadata": {"name": secret_name},
}
if secret_format == "Full":
secret["data"] = {
"combined.pem": base64encode("%s\n%s" % (body, private_key)),
"ca.crt": base64encode(cert_chain),
"service.key": base64encode(private_key),
"service.crt": base64encode(body),
}
if secret_format == "TLS":
secret["type"] = "kubernetes.io/tls"
secret["data"] = {
"tls.crt": base64encode("%s\n%s" % (body, cert_chain)),
"tls.key": base64encode(private_key),
}
if secret_format == "Certificate":
secret["data"] = {"tls.crt": base64encode(cert_chain)}
return secret
class KubernetesDestinationPlugin(DestinationPlugin):
title = "Kubernetes"
slug = "kubernetes-destination"
description = "Allow the uploading of certificates to Kubernetes as secret"
author = "Mikhail Khodorovskiy"
author_url = "https://github.com/mik373/lemur"
options = [
{
"name": "secretNameFormat",
"type": "str",
"required": False,
# Validation is difficult. This regex is used by kubectl to validate secret names:
# [a-z0-9]([-a-z0-9]*[a-z0-9])?(\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*
# Allowing the insertion of "{common_name}" (or any other such placeholder}
# at any point in the string proved very challenging and had a tendency to
# cause my browser to hang. The specified expression will allow any valid string
# but will also accept many invalid strings.
"validation": "(?:[a-z0-9.-]|\\{common_name\\})+",
"helpMessage": 'Must be a valid secret name, possibly including "{common_name}"',
"default": "{common_name}",
},
{
"name": "kubernetesURL",
"type": "str",
"required": False,
"validation": "https?://[a-zA-Z0-9.-]+(?::[0-9]+)?",
"helpMessage": "Must be a valid Kubernetes server URL!",
"default": "https://kubernetes.default",
},
{
"name": "kubernetesAuthToken",
"type": "str",
"required": False,
"validation": "[0-9a-zA-Z-_.]+",
"helpMessage": "Must be a valid Kubernetes server Token!",
},
{
"name": "kubernetesAuthTokenFile",
"type": "str",
"required": False,
"validation": "(/[^/]+)+",
"helpMessage": "Must be a valid file path!",
"default": "/var/run/secrets/kubernetes.io/serviceaccount/token",
},
{
"name": "kubernetesServerCertificate",
"type": "textarea",
"required": False,
"validation": "-----BEGIN CERTIFICATE-----[a-zA-Z0-9/+\\s\\r\\n]+-----END CERTIFICATE-----",
"helpMessage": "Must be a valid Kubernetes server Certificate!",
},
{
"name": "kubernetesServerCertificateFile",
"type": "str",
"required": False,
"validation": "(/[^/]+)+",
"helpMessage": "Must be a valid file path!",
"default": "/var/run/secrets/kubernetes.io/serviceaccount/ca.crt",
},
{
"name": "kubernetesNamespace",
"type": "str",
"required": False,
"validation": "[a-z0-9]([-a-z0-9]*[a-z0-9])?",
"helpMessage": "Must be a valid Kubernetes Namespace!",
},
{
"name": "kubernetesNamespaceFile",
"type": "str",
"required": False,
"validation": "(/[^/]+)+",
"helpMessage": "Must be a valid file path!",
"default": "/var/run/secrets/kubernetes.io/serviceaccount/namespace",
},
{
"name": "secretFormat",
"type": "select",
"required": True,
"available": ["Full", "TLS", "Certificate"],
"helpMessage": "The type of Secret to create.",
"default": "Full",
},
]
def __init__(self, *args, **kwargs):
super(KubernetesDestinationPlugin, self).__init__(*args, **kwargs)
def upload(self, name, body, private_key, cert_chain, options, **kwargs):
try:
k8_base_uri = self.get_option("kubernetesURL", options)
secret_format = self.get_option("secretFormat", options)
k8s_api = K8sSession(self.k8s_bearer(options), self.k8s_cert(options))
cn = common_name(parse_certificate(body))
secret_name_format = self.get_option("secretNameFormat", options)
secret_name = secret_name_format.format(common_name=cn)
secret = build_secret(
secret_format, secret_name, body, private_key, cert_chain
)
err = ensure_resource(
k8s_api,
k8s_base_uri=k8_base_uri,
namespace=self.k8s_namespace(options),
kind="secret",
name=secret_name,
data=secret,
)
except Exception as e:
current_app.logger.exception(
"Exception in upload: {}".format(e), exc_info=True
)
raise
if err is not None:
current_app.logger.error("Error deploying resource: %s", err)
raise Exception("Error uploading secret: " + err)
def k8s_bearer(self, options):
bearer = self.get_option("kubernetesAuthToken", options)
if not bearer:
bearer_file = self.get_option("kubernetesAuthTokenFile", options)
with open(bearer_file, "r") as file:
bearer = file.readline()
if bearer:
current_app.logger.debug("Using token read from %s", bearer_file)
else:
raise Exception(
"Unable to locate token in options or from %s", bearer_file
)
else:
current_app.logger.debug("Using token from options")
return bearer
def k8s_cert(self, options):
cert_file = self.get_option("kubernetesServerCertificateFile", options)
cert = self.get_option("kubernetesServerCertificate", options)
if cert:
cert_file = os.path.join(
os.path.abspath(os.path.dirname(__file__)), "k8.cert"
)
with open(cert_file, "w") as text_file:
text_file.write(cert)
current_app.logger.debug("Using certificate from options")
else:
current_app.logger.debug("Using certificate from %s", cert_file)
return cert_file
def k8s_namespace(self, options):
namespace = self.get_option("kubernetesNamespace", options)
if not namespace:
namespace_file = self.get_option("kubernetesNamespaceFile", options)
with open(namespace_file, "r") as file:
namespace = file.readline()
if namespace:
current_app.logger.debug(
"Using namespace %s from %s", namespace, namespace_file
)
else:
raise Exception(
"Unable to locate namespace in options or from %s", namespace_file
)
else:
current_app.logger.debug("Using namespace %s from options", namespace)
return namespace
class K8sSession(requests.Session):
def __init__(self, bearer, cert_file):
super(K8sSession, self).__init__()
self.headers.update({"Authorization": "Bearer %s" % bearer})
self.verify = cert_file
def request(
self,
method,
url,
params=None,
data=None,
headers=None,
cookies=None,
files=None,
auth=None,
timeout=30,
allow_redirects=True,
proxies=None,
hooks=None,
stream=None,
verify=None,
cert=None,
json=None,
):
"""
This method overrides the default timeout to be 10s.
"""
return super(K8sSession, self).request(
method,
url,
params,
data,
headers,
cookies,
files,
auth,
timeout,
allow_redirects,
proxies,
hooks,
stream,
verify,
cert,
json,
)
|
import datetime
import os
from perfkitbenchmarker import configs
from perfkitbenchmarker import sample
from perfkitbenchmarker.linux_packages import kernel_compile
BENCHMARK_NAME = 'kernel_compile'
BENCHMARK_CONFIG = """
kernel_compile:
description: Compile the Linux kernel
vm_groups:
default:
vm_spec: *default_single_core
disk_spec: *default_500_gb
"""
class _Paths(object):
def __init__(self, vm):
self.working_dir = os.path.join(vm.GetScratchDir(), BENCHMARK_NAME)
self.source_dir = os.path.join(self.working_dir, kernel_compile.UNTAR_DIR)
def GetConfig(user_config):
return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
def _GetVm(benchmark_spec):
vms = benchmark_spec.vms
if len(vms) != 1:
raise ValueError(
'kernel_compile benchmark requires exactly one machine, found {0}'
.format(len(vms)))
return vms[0]
def Prepare(benchmark_spec):
"""Install Linux kernel source code and build dependencies.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
vm = _GetVm(benchmark_spec)
vm.Install('kernel_compile')
def Run(benchmark_spec):
vm = _GetVm(benchmark_spec)
paths = _Paths(vm)
def time_command(command):
start = datetime.datetime.now()
vm.RemoteCommand(command)
return (datetime.datetime.now() - start).total_seconds()
def make(target=''):
return time_command(
'make -C {} -j$(egrep -c "^processor" /proc/cpuinfo) {}'
.format(paths.source_dir, target))
untar_time = time_command('rm -rf {dir} && '
'mkdir {dir} && '
'tar -C {dir} -xzf {tarball}'.format(
dir=paths.working_dir,
tarball=kernel_compile.KERNEL_TARBALL))
vm.PushDataFile('kernel_compile.config',
'{}/.config'.format(paths.source_dir))
cold_build_time = make()
clean_time = make('clean')
warm_build_time = make()
return [
sample.Sample('Untar time', untar_time, 'seconds'),
sample.Sample('Cold build time', cold_build_time, 'seconds'),
sample.Sample('Clean time', clean_time, 'seconds'),
sample.Sample('Warm build time', warm_build_time, 'seconds'),
]
def Cleanup(benchmark_spec):
vm = _GetVm(benchmark_spec)
paths = _Paths(vm)
vm.RemoteCommand('rm -rf {}'.format(paths.working_dir))
|
import logging
from homeassistant import config_entries
from homeassistant.helpers import config_entry_oauth2_flow
from .const import DOMAIN
class OAuth2FlowHandler(
config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain=DOMAIN
):
"""Config flow to handle NEW_NAME OAuth2 authentication."""
DOMAIN = DOMAIN
# TODO Pick one from config_entries.CONN_CLASS_*
CONNECTION_CLASS = config_entries.CONN_CLASS_UNKNOWN
@property
def logger(self) -> logging.Logger:
"""Return logger."""
return logging.getLogger(__name__)
|
from .model import Info
DATA = {
"config_flow": {
"title": "Config Flow",
"docs": "https://developers.home-assistant.io/docs/en/config_entries_config_flow_handler.html",
},
"config_flow_discovery": {
"title": "Discoverable Config Flow",
"docs": "https://developers.home-assistant.io/docs/en/config_entries_config_flow_handler.html#discoverable-integrations-that-require-no-authentication",
},
"config_flow_oauth2": {
"title": "OAuth2 Config Flow",
"docs": "https://developers.home-assistant.io/docs/en/next/config_entries_config_flow_handler.html#configuration-via-oauth2",
},
"device_action": {
"title": "Device Action",
"docs": "https://developers.home-assistant.io/docs/en/device_automation_action.html",
},
"device_condition": {
"title": "Device Condition",
"docs": "https://developers.home-assistant.io/docs/en/device_automation_condition.html",
},
"device_trigger": {
"title": "Device Trigger",
"docs": "https://developers.home-assistant.io/docs/en/device_automation_trigger.html",
},
"integration": {
"title": "Integration",
"docs": "https://developers.home-assistant.io/docs/en/creating_integration_file_structure.html",
},
"reproduce_state": {
"title": "Reproduce State",
"docs": "https://developers.home-assistant.io/docs/en/reproduce_state_index.html",
"extra": "You will now need to update the code to make sure that every attribute that can occur in the state will cause the right service to be called.",
},
}
def print_relevant_docs(template: str, info: Info) -> None:
"""Print relevant docs."""
data = DATA[template]
print()
print("**************************")
print()
print()
print(f"{data['title']} code has been generated")
print()
if info.files_added:
print("Added the following files:")
for file in info.files_added:
print(f"- {file}")
print()
if info.tests_added:
print("Added the following tests:")
for file in info.tests_added:
print(f"- {file}")
print()
if info.examples_added:
print(
"Because some files already existed, we added the following example files. Please copy the relevant code to the existing files."
)
for file in info.examples_added:
print(f"- {file}")
print()
print(
"The next step is to look at the files and deal with all areas marked as TODO."
)
if "extra" in data:
print(data["extra"])
|
import unittest
import numpy as np
from chainer import testing
from chainer.testing import attr
from chainercv.datasets import coco_bbox_label_names
from chainercv.datasets import COCOBboxDataset
from chainercv.utils import assert_is_bbox_dataset
def _create_paramters():
split_years = testing.product({
'split': ['train', 'val'],
'year': ['2014', '2017']})
split_years += [{'split': 'minival', 'year': '2014'},
{'split': 'valminusminival', 'year': '2014'}]
use_and_return_args = testing.product({
'use_crowded': [False, True],
'return_crowded': [False, True],
'return_area': [False, True]})
params = testing.product_dict(
split_years,
use_and_return_args)
return params
@testing.parameterize(*_create_paramters())
class TestCOCOBboxDataset(unittest.TestCase):
def setUp(self):
self.dataset = COCOBboxDataset(
split=self.split, year=self.year,
use_crowded=self.use_crowded, return_area=self.return_area,
return_crowded=self.return_crowded)
@attr.slow
def test_coco_bbox_dataset(self):
assert_is_bbox_dataset(
self.dataset, len(coco_bbox_label_names), n_example=30)
if self.return_area:
for _ in range(10):
i = np.random.randint(0, len(self.dataset))
_, bbox, _, area = self.dataset[i][:4]
self.assertIsInstance(area, np.ndarray)
self.assertEqual(area.dtype, np.float32)
self.assertEqual(area.shape, (bbox.shape[0],))
if self.return_crowded:
for _ in range(10):
i = np.random.randint(0, len(self.dataset))
example = self.dataset[i]
crowded = example[-1]
bbox = example[1]
self.assertIsInstance(crowded, np.ndarray)
self.assertEqual(crowded.dtype, np.bool)
self.assertEqual(crowded.shape, (bbox.shape[0],))
if not self.use_crowded:
np.testing.assert_equal(crowded, 0)
testing.run_module(__name__, __file__)
|
from homeassistant import data_entry_flow
from homeassistant.components.iqvia import CONF_ZIP_CODE, DOMAIN
from homeassistant.config_entries import SOURCE_USER
from tests.async_mock import patch
from tests.common import MockConfigEntry
async def test_duplicate_error(hass):
"""Test that errors are shown when duplicates are added."""
conf = {CONF_ZIP_CODE: "12345"}
MockConfigEntry(domain=DOMAIN, unique_id="12345", data=conf).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=conf
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_invalid_zip_code(hass):
"""Test that an invalid ZIP code key throws an error."""
conf = {CONF_ZIP_CODE: "abcde"}
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=conf
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {CONF_ZIP_CODE: "invalid_zip_code"}
async def test_show_form(hass):
"""Test that the form is served with no input."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
async def test_step_user(hass):
"""Test that the user step works (without MFA)."""
conf = {CONF_ZIP_CODE: "12345"}
with patch("homeassistant.components.iqvia.async_setup_entry", return_value=True):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=conf
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "12345"
assert result["data"] == {CONF_ZIP_CODE: "12345"}
|
import voluptuous as vol
from homeassistant.components.binary_sensor import PLATFORM_SCHEMA, BinarySensorEntity
from homeassistant.const import CONF_USERNAME
import homeassistant.helpers.config_validation as cv
from . import CONF_SERVERS, DATA_UPCLOUD, UpCloudServerEntity
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_SERVERS): vol.All(cv.ensure_list, [cv.string])}
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the UpCloud server binary sensor."""
coordinator = hass.data[DATA_UPCLOUD].coordinators[config_entry.data[CONF_USERNAME]]
entities = [UpCloudBinarySensor(coordinator, uuid) for uuid in coordinator.data]
async_add_entities(entities, True)
class UpCloudBinarySensor(UpCloudServerEntity, BinarySensorEntity):
"""Representation of an UpCloud server sensor."""
|
import pytest
import six
from pandas import DataFrame, Series
from arctic.chunkstore.passthrough_chunker import PassthroughChunker
def test_pass_thru():
p = PassthroughChunker()
with pytest.raises(StopIteration):
six.next(p.to_chunks([]))
assert(p.to_range(None, None) == b'NA')
assert(p.chunk_to_str(None) == b'NA')
assert(p.to_mongo(None) == {})
assert(p.filter(None, None) is None)
assert(p.exclude(DataFrame(data=[1, 2, 3]), None).equals(DataFrame()))
assert(p.exclude(Series([1, 2, 3]), None).equals(Series()))
|
import unittest
from absl import flags
from absl.testing import parameterized
import mock
from perfkitbenchmarker import linux_virtual_machine
from perfkitbenchmarker import os_types
from perfkitbenchmarker import pkb
from perfkitbenchmarker import sample
from perfkitbenchmarker import test_util
from tests import pkb_common_test_case
FLAGS = flags.FLAGS
def CreateTestLinuxVm():
vm_spec = pkb_common_test_case.CreateTestVmSpec()
return pkb_common_test_case.TestLinuxVirtualMachine(vm_spec=vm_spec)
# /proc/cmdline on a GCP CentOS7 vm
_CENTOS7_KERNEL_COMMAND_LINE = (
'BOOT_IMAGE=/boot/vmlinuz-3.10.0-1127.13.1.el7.x86_64 '
'root=UUID=1-2-3-4-5 ro crashkernel=auto console=ttyS0,38400n8')
class TestSetFiles(pkb_common_test_case.PkbCommonTestCase):
def runTest(self, set_files, calls):
"""Run a SetFiles test.
Args:
set_files: the value of FLAGS.set_files
calls: a list of mock.call() objects giving the expected calls to
vm.RemoteCommand() for the test.
"""
FLAGS['set_files'].parse(set_files)
vm = CreateTestLinuxVm()
with mock.patch.object(vm, 'RemoteCommand') as remote_command:
vm.SetFiles()
self.assertCountEqual( # use assertCountEqual because order is undefined
remote_command.call_args_list,
calls)
def testNoFiles(self):
self.runTest([],
[])
def testOneFile(self):
self.runTest(['/sys/kernel/mm/transparent_hugepage/enabled=always'],
[mock.call('echo "always" | sudo tee '
'/sys/kernel/mm/transparent_hugepage/enabled')])
def testMultipleFiles(self):
self.runTest(['/sys/kernel/mm/transparent_hugepage/enabled=always',
'/sys/kernel/mm/transparent_hugepage/defrag=never'],
[mock.call('echo "always" | sudo tee '
'/sys/kernel/mm/transparent_hugepage/enabled'),
mock.call('echo "never" | sudo tee '
'/sys/kernel/mm/transparent_hugepage/defrag')])
class TestSysctl(pkb_common_test_case.PkbCommonTestCase):
def runTest(self, sysctl, calls):
FLAGS['sysctl'].parse(sysctl)
vm = CreateTestLinuxVm()
with mock.patch.object(vm, 'RemoteCommand') as remote_command:
vm.DoSysctls()
self.assertEqual(sorted(remote_command.call_args_list), sorted(calls))
def testSysctl(self):
self.runTest(
['vm.dirty_background_ratio=10', 'vm.dirty_ratio=25'],
[mock.call('sudo bash -c \'echo "vm.dirty_background_ratio=10" >> '
'/etc/sysctl.conf\''),
mock.call('sudo bash -c \'echo "vm.dirty_ratio=25" >> '
'/etc/sysctl.conf\'')])
def testNoSysctl(self):
self.runTest([],
[])
class TestDiskOperations(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(TestDiskOperations, self).setUp()
FLAGS['default_timeout'].parse(0) # due to @retry
patcher = mock.patch.object(pkb_common_test_case.TestLinuxVirtualMachine,
'RemoteHostCommand')
self.remote_command = patcher.start()
self.addCleanup(patcher.stop)
self.remote_command.side_effect = [('', None, 0), ('', None, 0)]
self.vm = CreateTestLinuxVm()
def assertRemoteHostCalled(self, *calls):
self.assertEqual([mock.call(call) for call in calls],
self.remote_command.call_args_list)
def testMountDisk(self):
mkdir_cmd = ('sudo mkdir -p mp;'
'sudo mount -o discard dp mp && '
'sudo chown $USER:$USER mp;')
fstab_cmd = 'echo "dp mp ext4 defaults" | sudo tee -a /etc/fstab'
self.vm.MountDisk('dp', 'mp')
self.assertRemoteHostCalled(mkdir_cmd, fstab_cmd)
def testFormatDisk(self):
expected_command = ('[[ -d /mnt ]] && sudo umount /mnt; '
'sudo mke2fs -F -E lazy_itable_init=0,discard '
'-O ^has_journal -t ext4 -b 4096 dp')
self.vm.FormatDisk('dp')
self.assertRemoteHostCalled(expected_command)
self.assertEqual('ext4', self.vm.os_metadata['disk_filesystem_type'])
self.assertEqual(4096, self.vm.os_metadata['disk_filesystem_blocksize'])
def testNfsMountDisk(self):
mkdir_cmd = ('sudo mkdir -p mp;'
'sudo mount -t nfs -o hard,ro dp mp && '
'sudo chown $USER:$USER mp;')
fstab_cmd = 'echo "dp mp nfs ro" | sudo tee -a /etc/fstab'
self.vm.MountDisk('dp', 'mp',
disk_type='nfs', mount_options='hard,ro',
fstab_options='ro')
self.assertRemoteHostCalled(mkdir_cmd, fstab_cmd)
def testNfsFormatDisk(self):
self.vm.FormatDisk('dp', disk_type='nfs')
self.assertRemoteHostCalled() # no format disk command executed
class LogDmesgTestCase(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(LogDmesgTestCase, self).setUp()
self.vm = CreateTestLinuxVm()
def testPreDeleteDoesNotCallDmesg(self):
FLAGS.log_dmesg = False
with mock.patch.object(self.vm, 'RemoteCommand') as remote_command:
self.vm._PreDelete()
remote_command.assert_not_called()
def testPreDeleteCallsDmesg(self):
FLAGS.log_dmesg = True
with mock.patch.object(self.vm, 'RemoteCommand') as remote_command:
self.vm._PreDelete()
remote_command.assert_called_once_with('hostname && dmesg', should_log=True)
class TestLsCpu(unittest.TestCase, test_util.SamplesTestMixin):
LSCPU_DATA = {
'NUMA node(s)': '1',
'Core(s) per socket': '2',
'Socket(s)': '3',
'a': 'b',
}
PROC_CPU_TEXT = """
processor: 29
cpu family: 6
core id: 13
oddkey: v29
apicid: 27
processor: 30
cpu family: 6
core id: 14
oddkey: v30
apicid:29
processor: 31
cpu family: 6
core id: 15
apicid: 31
"""
def LsCpuText(self, data):
return '\n'.join(['%s:%s' % entry for entry in data.items()])
def CreateVm(self, os_type, remote_command_text):
vm = CreateTestLinuxVm()
vm.OS_TYPE = os_type # pylint: disable=invalid-name
vm.RemoteCommand = mock.Mock() # pylint: disable=invalid-name
vm.RemoteCommand.return_value = remote_command_text, ''
vm.name = 'pkb-test'
return vm
def testRecordLscpuOutputLinux(self):
vm = self.CreateVm(os_types.UBUNTU1604, self.LsCpuText(self.LSCPU_DATA))
samples = pkb._CreateLscpuSamples([vm])
vm.RemoteCommand.assert_called_with('lscpu')
self.assertEqual(1, len(samples))
metadata = {'node_name': vm.name}
metadata.update(self.LSCPU_DATA)
expected = sample.Sample('lscpu', 0, '', metadata, samples[0].timestamp)
self.assertEqual(expected, samples[0])
def testRecordLscpuOutputNonLinux(self):
vm = self.CreateVm(os_types.WINDOWS, '')
samples = pkb._CreateLscpuSamples([vm])
self.assertEqual(0, len(samples))
vm.RemoteCommand.assert_not_called()
def testMissingRequiredLsCpuEntries(self):
with self.assertRaises(ValueError):
linux_virtual_machine.LsCpuResults('')
def testLsCpuParsing(self):
vm = self.CreateVm(os_types.UBUNTU1604,
self.LsCpuText(self.LSCPU_DATA) + '\nThis Line=Invalid')
results = vm.CheckLsCpu()
self.assertEqual(1, results.numa_node_count)
self.assertEqual(2, results.cores_per_socket)
self.assertEqual(3, results.socket_count)
self.assertEqual(
{
'NUMA node(s)': '1',
'Core(s) per socket': '2',
'Socket(s)': '3',
'a': 'b'
}, results.data)
def testProcCpuParsing(self):
vm = self.CreateVm(os_types.UBUNTU1604, self.PROC_CPU_TEXT)
results = vm.CheckProcCpu()
expected_mappings = {}
expected_mappings[29] = {'apicid': '27', 'core id': '13'}
expected_mappings[30] = {'apicid': '29', 'core id': '14'}
expected_mappings[31] = {'apicid': '31', 'core id': '15'}
expected_common = {
'cpu family': '6',
'oddkey': 'v29;v30',
'proccpu': 'cpu family,oddkey'
}
self.assertEqual(expected_mappings, results.mappings)
self.assertEqual(expected_common, results.GetValues())
def testProcCpuSamples(self):
vm = self.CreateVm(os_types.UBUNTU1604, self.PROC_CPU_TEXT)
samples = pkb._CreateProcCpuSamples([vm])
proccpu_metadata = {
'cpu family': '6',
'node_name': 'pkb-test',
'oddkey': 'v29;v30',
'proccpu': 'cpu family,oddkey',
}
proccpu_mapping_metadata = {
'node_name': 'pkb-test',
'proc_29': 'apicid=27;core id=13',
'proc_30': 'apicid=29;core id=14',
'proc_31': 'apicid=31;core id=15'
}
expected_samples = [
sample.Sample('proccpu', 0, '', proccpu_metadata),
sample.Sample('proccpu_mapping', 0, '', proccpu_mapping_metadata)
]
self.assertSampleListsEqualUpToTimestamp(expected_samples, samples)
class TestPartitionTable(unittest.TestCase):
def CreateVm(self, remote_command_text):
vm = CreateTestLinuxVm()
vm.RemoteCommand = mock.Mock() # pylint: disable=invalid-name
vm.RemoteCommand.return_value = remote_command_text, ''
vm.name = 'pkb-test'
vm._partition_table = {}
return vm
def testFdiskNoPartitonTable(self):
vm = self.CreateVm('')
results = vm.partition_table
self.assertEqual({}, results)
def testFdiskParsingBootDiskOnly(self):
vm = self.CreateVm("""
Disk /dev/sda: 10.7 GB, 10737418240 bytes
4 heads, 32 sectors/track, 163840 cylinders, total 20971520 sectors
Units = sectors of 1 * 512 = 512 bytes
Sector size (logical/physical): 512 bytes / 4096 bytes
I/O size (minimum/optimal): 4096 bytes / 4096 bytes
Disk identifier: 0x00067934
Device Boot Start End Blocks Id System
/dev/sda1 * 2048 20971519 10484736 83 Linux
""")
results = vm.partition_table
self.assertEqual(
{'/dev/sda': 10737418240}, results)
def testFdiskParsingWithRaidDisk(self):
vm = self.CreateVm("""
Disk /dev/sda: 10 GiB, 10737418240 bytes, 20971520 sectors
Units: sectors of 1 * 512 = 512 bytes
Sector size (logical/physical): 512 bytes / 4096 bytes
I/O size (minimum/optimal): 4096 bytes / 4096 bytes
Disklabel type: dos
Disk identifier: 0x8c87e63b
Device Boot Start End Sectors Size Id Type
/dev/sda1 * 2048 20971486 20969439 10G 83 Linux
Disk /dev/sdb: 375 GiB, 402653184000 bytes, 98304000 sectors
Units: sectors of 1 * 4096 = 4096 bytes
Sector size (logical/physical): 4096 bytes / 4096 bytes
I/O size (minimum/optimal): 4096 bytes / 4096 bytes
Disk /dev/sdc: 375 GiB, 402653184000 bytes, 98304000 sectors
Units: sectors of 1 * 4096 = 4096 bytes
Sector size (logical/physical): 4096 bytes / 4096 bytes
I/O size (minimum/optimal): 4096 bytes / 4096 bytes
Disk /dev/md0: 749.8 GiB, 805037932544 bytes, 196542464 sectors
Units: sectors of 1 * 4096 = 4096 bytes
Sector size (logical/physical): 4096 bytes / 4096 bytes
I/O size (minimum/optimal): 524288 bytes / 1048576 bytes
""")
results = vm.partition_table
self.assertEqual(
{'/dev/sda': 10737418240,
'/dev/sdb': 402653184000,
'/dev/sdc': 402653184000,
'/dev/md0': 805037932544}, results)
class LinuxVirtualMachineTestCase(pkb_common_test_case.PkbCommonTestCase):
os_info = 'Ubuntu 18.04.1 LTS'
kernel_release = '5.3.0-1026'
partition_table = 'Disk /dev/sda: 1 GiB, 1073741824 bytes, 2097152 sectors'
lscpu_output = '\n'.join([
'NUMA node(s): 1',
'Core(s) per socket: 1',
'Socket(s): 1',
])
normal_boot_responses = [
'cubic', 'Description: ' + os_info, kernel_release, partition_table
]
def CreateVm(self, array_of_stdout):
vm = CreateTestLinuxVm()
vm.RemoteHostCommandWithReturnCode = mock.Mock(
side_effect=[(str(text), '') for text in array_of_stdout])
vm.CheckLsCpu = mock.Mock(
return_value=linux_virtual_machine.LsCpuResults(self.lscpu_output))
return vm
@parameterized.named_parameters(
('has_smt_centos7', _CENTOS7_KERNEL_COMMAND_LINE, True),
('no_smt_centos7', _CENTOS7_KERNEL_COMMAND_LINE + ' noht nosmt nr_cpus=1',
False))
def testIsSmtEnabled(self, proc_cmdline, is_enabled):
vm = self.CreateVm([proc_cmdline])
self.assertEqual(is_enabled, vm.IsSmtEnabled())
@parameterized.named_parameters(
('hasSMT_want_real', 32, 'regular', 16),
('noSMT_want_real', 32, 'nosmt', 32),
)
def testNumCpusForBenchmarkNoSmt(self, vcpus, kernel_command_line,
expected_num_cpus):
vm = self.CreateVm([kernel_command_line, vcpus])
self.assertEqual(expected_num_cpus, vm.NumCpusForBenchmark(True))
def testNumCpusForBenchmarkDefaultCall(self):
# shows that IsSmtEnabled is not called unless new optional parameter used
vm = self.CreateVm([32])
vm.IsSmtEnabled = mock.Mock()
self.assertEqual(32, vm.NumCpusForBenchmark())
vm.IsSmtEnabled.assert_not_called()
self.assertEqual(32, vm.NumCpusForBenchmark(False))
vm.IsSmtEnabled.assert_not_called()
def testBoot(self):
vm = self.CreateVm(self.normal_boot_responses)
vm.RecordAdditionalMetadata()
expected_os_metadata = {
'/dev/sda': 1073741824,
'kernel_release': self.kernel_release,
'os_info': self.os_info,
}
self.assertEqual(expected_os_metadata, vm.os_metadata)
def testReboot(self):
os_info_new = 'Ubuntu 18.04.1b LTS'
kernel_release_new = '5.3.0-1027'
additional_commands = [
'(reboot command)',
'(myhostname)',
'(last boot time)',
'(create install dir)',
'Description: ' + os_info_new,
kernel_release_new,
'(create install dir)',
'(create tmp dir)',
]
vm = self.CreateVm(self.normal_boot_responses + additional_commands)
vm.RecordAdditionalMetadata()
vm.Reboot()
self.assertEqual(os_info_new, vm.os_metadata['os_info'])
self.assertEqual(kernel_release_new, vm.os_metadata['kernel_release'])
def testCpuVulnerabilitiesEmpty(self):
self.assertEqual({}, self.CreateVm(['']).cpu_vulnerabilities.asdict)
def testCpuVulnerabilities(self):
# lines returned from running "grep . .../cpu/vulnerabilities/*"
cpu_vuln_lines = [
'.../itlb_multihit:KVM: Vulnerable',
'.../l1tf:Mitigation: PTE Inversion',
'.../mds:Vulnerable: Clear CPU buffers attempted, no microcode',
'.../meltdown:Mitigation: PTI',
'.../spec_store_bypass:Mitigation: Speculative Store Bypass disabled',
'.../spectre_v1:Mitigation: usercopy/swapgs barriers',
'.../spectre_v2:Mitigation: Full generic retpoline, IBPB: conditional',
'.../srbds:Not affected',
'.../tsx_async_abort:Not affected',
# Not actually seen, shows that falls into "unknowns"
'.../made_up:Unknown Entry',
]
cpu_vuln = self.CreateVm(['\n'.join(cpu_vuln_lines)]).cpu_vulnerabilities
expected_mitigation = {
'l1tf': 'PTE Inversion',
'meltdown': 'PTI',
'spec_store_bypass': 'Speculative Store Bypass disabled',
'spectre_v1': 'usercopy/swapgs barriers',
'spectre_v2': 'Full generic retpoline, IBPB: conditional',
}
self.assertEqual(expected_mitigation, cpu_vuln.mitigations)
expected_vulnerability = {
'itlb_multihit': 'KVM',
'mds': 'Clear CPU buffers attempted, no microcode'
}
self.assertEqual(expected_vulnerability, cpu_vuln.vulnerabilities)
expected_notaffecteds = set(['srbds', 'tsx_async_abort'])
self.assertEqual(expected_notaffecteds, cpu_vuln.notaffecteds)
expected_unknowns = {'made_up': 'Unknown Entry'}
self.assertEqual(expected_unknowns, cpu_vuln.unknowns)
expected_asdict = {
'mitigations': 'l1tf,meltdown,spec_store_bypass,spectre_v1,spectre_v2',
'mitigation_l1tf': 'PTE Inversion',
'mitigation_meltdown': 'PTI',
'mitigation_spec_store_bypass': 'Speculative Store Bypass disabled',
'mitigation_spectre_v1': 'usercopy/swapgs barriers',
'mitigation_spectre_v2': 'Full generic retpoline, IBPB: conditional',
'notaffecteds': 'srbds,tsx_async_abort',
'unknown_made_up': 'Unknown Entry',
'unknowns': 'made_up',
'vulnerabilities': 'itlb_multihit,mds',
'vulnerability_itlb_multihit': 'KVM',
'vulnerability_mds': 'Clear CPU buffers attempted, no microcode',
}
self.assertEqual(expected_asdict, cpu_vuln.asdict)
if __name__ == '__main__':
unittest.main()
|
from functools import partial
from typing import Dict, List
from homeassistant.core import callback
from homeassistant.helpers.entity_registry import (
async_get_registry as async_get_entity_registry,
)
from homeassistant.helpers.typing import HomeAssistantType
from . import XboxUpdateCoordinator
from .base_sensor import XboxBaseSensorEntity
from .const import DOMAIN
SENSOR_ATTRIBUTES = ["status", "gamer_score", "account_tier", "gold_tenure"]
async def async_setup_entry(hass: HomeAssistantType, config_entry, async_add_entities):
"""Set up Xbox Live friends."""
coordinator: XboxUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id][
"coordinator"
]
update_friends = partial(async_update_friends, coordinator, {}, async_add_entities)
unsub = coordinator.async_add_listener(update_friends)
hass.data[DOMAIN][config_entry.entry_id]["sensor_unsub"] = unsub
update_friends()
class XboxSensorEntity(XboxBaseSensorEntity):
"""Representation of a Xbox presence state."""
@property
def state(self):
"""Return the state of the requested attribute."""
if not self.coordinator.last_update_success:
return None
return getattr(self.data, self.attribute, None)
@callback
def async_update_friends(
coordinator: XboxUpdateCoordinator,
current: Dict[str, List[XboxSensorEntity]],
async_add_entities,
) -> None:
"""Update friends."""
new_ids = set(coordinator.data.presence)
current_ids = set(current)
# Process new favorites, add them to Home Assistant
new_entities = []
for xuid in new_ids - current_ids:
current[xuid] = [
XboxSensorEntity(coordinator, xuid, attribute)
for attribute in SENSOR_ATTRIBUTES
]
new_entities = new_entities + current[xuid]
if new_entities:
async_add_entities(new_entities)
# Process deleted favorites, remove them from Home Assistant
for xuid in current_ids - new_ids:
coordinator.hass.async_create_task(
async_remove_entities(xuid, coordinator, current)
)
async def async_remove_entities(
xuid: str,
coordinator: XboxUpdateCoordinator,
current: Dict[str, XboxSensorEntity],
) -> None:
"""Remove friend sensors from Home Assistant."""
registry = await async_get_entity_registry(coordinator.hass)
entities = current[xuid]
for entity in entities:
if entity.entity_id in registry.entities:
registry.async_remove(entity.entity_id)
del current[xuid]
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from compare_gan.metrics import jacobian_conditioning
import mock
import numpy as np
from six.moves import range
import tensorflow as tf
_BATCH_SIZE = 32
def SlowJacobian(xs, fx):
"""Computes df/dx matrix.
As jacobian_conditioning.compute_jacobian, but explicitly loops over
dimensions of f.
Args:
xs: input tensor(s) of arbitrary shape.
fx: f(x) tensor of arbitrary shape.
Returns:
df/dx tensor.
"""
fxs = tf.unstack(fx, axis=-1)
grads = [tf.gradients(fx_i, xs) for fx_i in fxs]
grads = [grad[0] for grad in grads]
df_dx = tf.stack(grads, axis=1)
return df_dx
class JacobianConditioningTest(tf.test.TestCase):
def test_jacobian_simple_case(self):
x = tf.random_normal([_BATCH_SIZE, 2])
W = tf.constant([[2., -1.], [1.5, 1.]]) # pylint: disable=invalid-name
f = tf.matmul(x, W)
j_tensor = jacobian_conditioning.compute_jacobian(xs=x, fx=f)
with tf.Session() as sess:
jacobian = sess.run(j_tensor)
# Transpose of W in 'expected' is expected because in vector notation
# f = W^T * x.
expected = tf.tile([[[2, 1.5], [-1, 1]]], [_BATCH_SIZE, 1, 1])
self.assertAllClose(jacobian, expected)
def test_jacobian_against_slow_version(self):
x = tf.random_normal([_BATCH_SIZE, 2])
h1 = tf.contrib.layers.fully_connected(x, 20)
h2 = tf.contrib.layers.fully_connected(h1, 20)
f = tf.contrib.layers.fully_connected(h2, 10)
j_slow_tensor = SlowJacobian(xs=x, fx=f)
j_fast_tensor = jacobian_conditioning.compute_jacobian(xs=x, fx=f)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
j_fast, j_slow = sess.run([j_fast_tensor, j_slow_tensor])
self.assertAllClose(j_fast, j_slow)
def test_jacobian_numerically(self):
x = tf.random_normal([_BATCH_SIZE, 2])
h1 = tf.contrib.layers.fully_connected(x, 20)
h2 = tf.contrib.layers.fully_connected(h1, 20)
f = tf.contrib.layers.fully_connected(h2, 10)
j_tensor = jacobian_conditioning.compute_jacobian(xs=x, fx=f)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
x_np = sess.run(x)
jacobian = sess.run(j_tensor, feed_dict={x: x_np})
# Test 10 random elements.
for _ in range(10):
# Pick a random element of Jacobian to test.
batch_idx = np.random.randint(_BATCH_SIZE)
x_idx = np.random.randint(2)
f_idx = np.random.randint(10)
# Test with finite differences.
epsilon = 1e-4
x_plus = x_np.copy()
x_plus[batch_idx, x_idx] += epsilon
f_plus = sess.run(f, feed_dict={x: x_plus})[batch_idx, f_idx]
x_minus = x_np.copy()
x_minus[batch_idx, x_idx] -= epsilon
f_minus = sess.run(f, feed_dict={x: x_minus})[batch_idx, f_idx]
self.assertAllClose(
jacobian[batch_idx, f_idx, x_idx],
(f_plus - f_minus) / (2. * epsilon),
rtol=1e-3,
atol=1e-3)
def test_analyze_metric_tensor(self):
# Assumes NumPy works, just tests that output shapes are as expected.
jacobian = np.random.normal(0, 1, (_BATCH_SIZE, 2, 10))
metric_tensor = np.matmul(np.transpose(jacobian, [0, 2, 1]), jacobian)
result_dict = jacobian_conditioning._analyze_metric_tensor(metric_tensor)
self.assertAllEqual(result_dict['eigenvalues'].shape, [_BATCH_SIZE, 10])
self.assertAllEqual(result_dict['logdet'].shape, [_BATCH_SIZE])
self.assertAllEqual(result_dict['log_condition_number'].shape,
[_BATCH_SIZE])
def test_analyze_jacobian(self):
m = mock.patch.object(
jacobian_conditioning, '_analyze_metric_tensor', new=lambda x: x)
m.start()
jacobian = np.array([[[1, 2], [3, 4]], [[2, 4], [6, 8]]])
result_dict = jacobian_conditioning.analyze_jacobian(jacobian)
self.assertAllEqual(result_dict['metric_tensor'],
[[[10, 14], [14, 20]], [[40, 56], [56, 80]]])
self.assertAllEqual(result_dict['mean_metric_tensor'],
[[[25, 35], [35, 50]]])
m.stop()
if __name__ == '__main__':
tf.test.main()
|
import unittest
from perfkitbenchmarker import custom_virtual_machine_spec
from perfkitbenchmarker import errors
import six
_COMPONENT = 'test_component'
_FLAGS = None
_STRING_TYPE_NAME = six.string_types[0].__name__
class MemoryDecoderTestCase(unittest.TestCase):
def setUp(self):
super(MemoryDecoderTestCase, self).setUp()
self.decoder = custom_virtual_machine_spec.MemoryDecoder(option='memory')
def testValidStrings(self):
self.assertEqual(self.decoder.Decode('1280MiB', _COMPONENT, _FLAGS), 1280)
self.assertEqual(self.decoder.Decode('7.5GiB', _COMPONENT, _FLAGS), 7680)
def testImproperPattern(self):
with self.assertRaises(errors.Config.InvalidValue) as cm:
self.decoder.Decode('1280', _COMPONENT, _FLAGS)
self.assertEqual(str(cm.exception), (
'Invalid test_component.memory value: "1280". Examples of valid '
'values: "1280MiB", "7.5GiB".'))
def testInvalidFloat(self):
with self.assertRaises(errors.Config.InvalidValue) as cm:
self.decoder.Decode('1280.9.8MiB', _COMPONENT, _FLAGS)
self.assertEqual(str(cm.exception), (
'Invalid test_component.memory value: "1280.9.8MiB". "1280.9.8" is not '
'a valid float.'))
def testNonIntegerMiB(self):
with self.assertRaises(errors.Config.InvalidValue) as cm:
self.decoder.Decode('7.6GiB', _COMPONENT, _FLAGS)
self.assertEqual(str(cm.exception), (
'Invalid test_component.memory value: "7.6GiB". The specified size '
'must be an integer number of MiB.'))
class CustomMachineTypeSpecTestCase(unittest.TestCase):
def testValid(self):
result = custom_virtual_machine_spec.CustomMachineTypeSpec(
_COMPONENT, cpus=1, memory='7.5GiB')
self.assertEqual(result.cpus, 1)
self.assertEqual(result.memory, 7680)
def testMissingCpus(self):
with self.assertRaises(errors.Config.MissingOption) as cm:
custom_virtual_machine_spec.CustomMachineTypeSpec(
_COMPONENT, memory='7.5GiB')
self.assertEqual(str(cm.exception), (
'Required options were missing from test_component: cpus.'))
def testMissingMemory(self):
with self.assertRaises(errors.Config.MissingOption) as cm:
custom_virtual_machine_spec.CustomMachineTypeSpec(_COMPONENT, cpus=1)
self.assertEqual(str(cm.exception), (
'Required options were missing from test_component: memory.'))
def testExtraOptions(self):
with self.assertRaises(errors.Config.UnrecognizedOption) as cm:
custom_virtual_machine_spec.CustomMachineTypeSpec(
_COMPONENT, cpus=1, memory='7.5GiB', extra1='one', extra2=2)
self.assertEqual(str(cm.exception), (
'Unrecognized options were found in test_component: extra1, extra2.'))
def testInvalidCpus(self):
with self.assertRaises(errors.Config.InvalidValue) as cm:
custom_virtual_machine_spec.CustomMachineTypeSpec(_COMPONENT, cpus=0,
memory='7.5GiB')
self.assertEqual(str(cm.exception), (
'Invalid test_component.cpus value: "0". Value must be at least 1.'))
def testInvalidMemory(self):
with self.assertRaises(errors.Config.InvalidValue) as cm:
custom_virtual_machine_spec.CustomMachineTypeSpec(
_COMPONENT, cpus=1, memory=None)
self.assertEqual(str(cm.exception), (
'Invalid test_component.memory value: "None" (of type "NoneType"). '
'Value must be one of the following types: %s.' % _STRING_TYPE_NAME))
class MachineTypeDecoderTestCase(unittest.TestCase):
def setUp(self):
super(MachineTypeDecoderTestCase, self).setUp()
self.decoder = custom_virtual_machine_spec.MachineTypeDecoder(
option='machine_type')
def testDecodeString(self):
result = self.decoder.Decode('n1-standard-8', _COMPONENT, {})
self.assertEqual(result, 'n1-standard-8')
def testDecodeCustomVm(self):
result = self.decoder.Decode({'cpus': 1, 'memory': '7.5GiB'}, _COMPONENT,
{})
self.assertIsInstance(result,
custom_virtual_machine_spec.CustomMachineTypeSpec)
self.assertEqual(result.cpus, 1)
self.assertEqual(result.memory, 7680)
def testDecodeInvalidType(self):
with self.assertRaises(errors.Config.InvalidValue) as cm:
self.decoder.Decode(None, _COMPONENT, {})
self.assertEqual(str(cm.exception), (
'Invalid test_component.machine_type value: "None" (of type '
'"NoneType"). Value must be one of the following types: %s, '
'dict.' % _STRING_TYPE_NAME))
def testDecodeInvalidValue(self):
with self.assertRaises(errors.Config.InvalidValue) as cm:
self.decoder.Decode({'cpus': 0, 'memory': '7.5GiB'}, _COMPONENT, {})
self.assertEqual(str(cm.exception), (
'Invalid test_component.machine_type.cpus value: "0". Value must be at '
'least 1.'))
if __name__ == '__main__':
unittest.main()
|
import pytest
from homeassistant.helpers import config_validation as cv, template
async def test_static_vars():
"""Test static vars."""
orig = {"hello": "world"}
var = cv.SCRIPT_VARIABLES_SCHEMA(orig)
rendered = var.async_render(None, None)
assert rendered is not orig
assert rendered == orig
async def test_static_vars_run_args():
"""Test static vars."""
orig = {"hello": "world"}
orig_copy = dict(orig)
var = cv.SCRIPT_VARIABLES_SCHEMA(orig)
rendered = var.async_render(None, {"hello": "override", "run": "var"})
assert rendered == {"hello": "override", "run": "var"}
# Make sure we don't change original vars
assert orig == orig_copy
async def test_static_vars_no_default():
"""Test static vars."""
orig = {"hello": "world"}
var = cv.SCRIPT_VARIABLES_SCHEMA(orig)
rendered = var.async_render(None, None, render_as_defaults=False)
assert rendered is not orig
assert rendered == orig
async def test_static_vars_run_args_no_default():
"""Test static vars."""
orig = {"hello": "world"}
orig_copy = dict(orig)
var = cv.SCRIPT_VARIABLES_SCHEMA(orig)
rendered = var.async_render(
None, {"hello": "override", "run": "var"}, render_as_defaults=False
)
assert rendered == {"hello": "world", "run": "var"}
# Make sure we don't change original vars
assert orig == orig_copy
async def test_template_vars(hass):
"""Test template vars."""
var = cv.SCRIPT_VARIABLES_SCHEMA({"hello": "{{ 1 + 1 }}"})
rendered = var.async_render(hass, None)
assert rendered == {"hello": 2}
async def test_template_vars_run_args(hass):
"""Test template vars."""
var = cv.SCRIPT_VARIABLES_SCHEMA(
{
"something": "{{ run_var_ex + 1 }}",
"something_2": "{{ run_var_ex + 1 }}",
}
)
rendered = var.async_render(
hass,
{
"run_var_ex": 5,
"something_2": 1,
},
)
assert rendered == {
"run_var_ex": 5,
"something": 6,
"something_2": 1,
}
async def test_template_vars_no_default(hass):
"""Test template vars."""
var = cv.SCRIPT_VARIABLES_SCHEMA({"hello": "{{ 1 + 1 }}"})
rendered = var.async_render(hass, None, render_as_defaults=False)
assert rendered == {"hello": 2}
async def test_template_vars_run_args_no_default(hass):
"""Test template vars."""
var = cv.SCRIPT_VARIABLES_SCHEMA(
{
"something": "{{ run_var_ex + 1 }}",
"something_2": "{{ run_var_ex + 1 }}",
}
)
rendered = var.async_render(
hass,
{
"run_var_ex": 5,
"something_2": 1,
},
render_as_defaults=False,
)
assert rendered == {
"run_var_ex": 5,
"something": 6,
"something_2": 6,
}
async def test_template_vars_error(hass):
"""Test template vars."""
var = cv.SCRIPT_VARIABLES_SCHEMA({"hello": "{{ canont.work }}"})
with pytest.raises(template.TemplateError):
var.async_render(hass, None)
|
from .util import async_init_integration
async def test_air_con_create_sensors(hass):
"""Test creation of aircon sensors."""
await async_init_integration(hass)
state = hass.states.get("sensor.air_conditioning_power")
assert state.state == "ON"
state = hass.states.get("sensor.air_conditioning_link")
assert state.state == "ONLINE"
state = hass.states.get("sensor.air_conditioning_link")
assert state.state == "ONLINE"
state = hass.states.get("sensor.air_conditioning_tado_mode")
assert state.state == "HOME"
state = hass.states.get("sensor.air_conditioning_temperature")
assert state.state == "24.76"
state = hass.states.get("sensor.air_conditioning_ac")
assert state.state == "ON"
state = hass.states.get("sensor.air_conditioning_overlay")
assert state.state == "True"
state = hass.states.get("sensor.air_conditioning_humidity")
assert state.state == "60.9"
state = hass.states.get("sensor.air_conditioning_open_window")
assert state.state == "False"
async def test_heater_create_sensors(hass):
"""Test creation of heater sensors."""
await async_init_integration(hass)
state = hass.states.get("sensor.baseboard_heater_power")
assert state.state == "ON"
state = hass.states.get("sensor.baseboard_heater_link")
assert state.state == "ONLINE"
state = hass.states.get("sensor.baseboard_heater_link")
assert state.state == "ONLINE"
state = hass.states.get("sensor.baseboard_heater_tado_mode")
assert state.state == "HOME"
state = hass.states.get("sensor.baseboard_heater_temperature")
assert state.state == "20.65"
state = hass.states.get("sensor.baseboard_heater_early_start")
assert state.state == "False"
state = hass.states.get("sensor.baseboard_heater_overlay")
assert state.state == "True"
state = hass.states.get("sensor.baseboard_heater_humidity")
assert state.state == "45.2"
state = hass.states.get("sensor.baseboard_heater_open_window")
assert state.state == "False"
async def test_water_heater_create_sensors(hass):
"""Test creation of water heater sensors."""
await async_init_integration(hass)
state = hass.states.get("sensor.water_heater_tado_mode")
assert state.state == "HOME"
state = hass.states.get("sensor.water_heater_link")
assert state.state == "ONLINE"
state = hass.states.get("sensor.water_heater_overlay")
assert state.state == "False"
state = hass.states.get("sensor.water_heater_power")
assert state.state == "ON"
async def test_home_create_sensors(hass):
"""Test creation of home sensors."""
await async_init_integration(hass)
state = hass.states.get("sensor.home_name_tado_bridge_status")
assert state.state == "True"
|
import requests
import voluptuous as vol
from homeassistant.components.binary_sensor import PLATFORM_SCHEMA, BinarySensorEntity
from homeassistant.const import CONF_HOST
import homeassistant.helpers.config_validation as cv
from . import (
CONF_BOUNCETIME,
CONF_INVERT_LOGIC,
CONF_PULL_MODE,
DEFAULT_BOUNCETIME,
DEFAULT_INVERT_LOGIC,
DEFAULT_PULL_MODE,
)
from .. import remote_rpi_gpio
CONF_PORTS = "ports"
_SENSORS_SCHEMA = vol.Schema({cv.positive_int: cv.string})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PORTS): _SENSORS_SCHEMA,
vol.Optional(CONF_INVERT_LOGIC, default=DEFAULT_INVERT_LOGIC): cv.boolean,
vol.Optional(CONF_BOUNCETIME, default=DEFAULT_BOUNCETIME): cv.positive_int,
vol.Optional(CONF_PULL_MODE, default=DEFAULT_PULL_MODE): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Raspberry PI GPIO devices."""
address = config["host"]
invert_logic = config[CONF_INVERT_LOGIC]
pull_mode = config[CONF_PULL_MODE]
ports = config["ports"]
bouncetime = config[CONF_BOUNCETIME] / 1000
devices = []
for port_num, port_name in ports.items():
try:
button = remote_rpi_gpio.setup_input(
address, port_num, pull_mode, bouncetime
)
except (ValueError, IndexError, KeyError, OSError):
return
new_sensor = RemoteRPiGPIOBinarySensor(port_name, button, invert_logic)
devices.append(new_sensor)
add_entities(devices, True)
class RemoteRPiGPIOBinarySensor(BinarySensorEntity):
"""Represent a binary sensor that uses a Remote Raspberry Pi GPIO."""
def __init__(self, name, button, invert_logic):
"""Initialize the RPi binary sensor."""
self._name = name
self._invert_logic = invert_logic
self._state = False
self._button = button
async def async_added_to_hass(self):
"""Run when entity about to be added to hass."""
def read_gpio():
"""Read state from GPIO."""
self._state = remote_rpi_gpio.read_input(self._button)
self.schedule_update_ha_state()
self._button.when_released = read_gpio
self._button.when_pressed = read_gpio
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def is_on(self):
"""Return the state of the entity."""
return self._state != self._invert_logic
@property
def device_class(self):
"""Return the class of this sensor, from DEVICE_CLASSES."""
return
def update(self):
"""Update the GPIO state."""
try:
self._state = remote_rpi_gpio.read_input(self._button)
except requests.exceptions.ConnectionError:
return
|
import os
import time
# mypy: allow-untyped-calls, allow-untyped-defs
def install_osx():
"""Set up to run via launchd on OS X."""
with os.popen("which hass") as inp:
hass_path = inp.read().strip()
with os.popen("whoami") as inp:
user = inp.read().strip()
template_path = os.path.join(os.path.dirname(__file__), "launchd.plist")
with open(template_path, encoding="utf-8") as tinp:
plist = tinp.read()
plist = plist.replace("$HASS_PATH$", hass_path)
plist = plist.replace("$USER$", user)
path = os.path.expanduser("~/Library/LaunchAgents/org.homeassistant.plist")
try:
with open(path, "w", encoding="utf-8") as outp:
outp.write(plist)
except OSError as err:
print(f"Unable to write to {path}", err)
return
os.popen(f"launchctl load -w -F {path}")
print(
"Home Assistant has been installed. \
Open it here: http://localhost:8123"
)
def uninstall_osx():
"""Unload from launchd on OS X."""
path = os.path.expanduser("~/Library/LaunchAgents/org.homeassistant.plist")
os.popen(f"launchctl unload {path}")
print("Home Assistant has been uninstalled.")
def run(args):
"""Handle OSX commandline script."""
commands = "install", "uninstall", "restart"
if not args or args[0] not in commands:
print("Invalid command. Available commands:", ", ".join(commands))
return 1
if args[0] == "install":
install_osx()
return 0
if args[0] == "uninstall":
uninstall_osx()
return 0
if args[0] == "restart":
uninstall_osx()
# A small delay is needed on some systems to let the unload finish.
time.sleep(0.5)
install_osx()
return 0
|
import os
import re
import tempfile
from django.conf import settings
from django.core.management.base import CommandError
from weblate.formats.models import FILE_FORMATS
from weblate.lang.models import Language
from weblate.logger import LOGGER
from weblate.trans.discovery import ComponentDiscovery
from weblate.trans.models import Component, Project
from weblate.trans.util import is_repo_link
from weblate.utils.files import remove_tree
from weblate.utils.management.base import BaseCommand
from weblate.vcs.base import RepositoryException
from weblate.vcs.models import VCS_REGISTRY
class Command(BaseCommand):
"""Command for mass importing of repositories into Weblate."""
help = "imports projects with more components"
def add_arguments(self, parser):
super().add_arguments(parser)
parser.add_argument(
"--name-template",
default="{{ component }}",
help=(
"Template string, transforming the filemask " "match to a project name"
),
)
parser.add_argument(
"--base-file-template",
default="",
help=(
"Template string, transforming the filemask "
"match to a monolingual base filename"
),
)
parser.add_argument(
"--new-base-template",
default="",
help=(
"Template string, transforming the filemask "
"match to a base filename for new translations"
),
)
parser.add_argument(
"--file-format",
default="po",
help="File format type, defaults to Gettext PO",
)
parser.add_argument(
"--language-regex",
default="^[^.]+$",
help=(
"Language filter regular expression to be used for created"
" components"
),
)
parser.add_argument(
"--license", default="", help="License of imported components"
)
parser.add_argument(
"--license-url", default="", help="License URL of imported components"
)
parser.add_argument(
"--vcs", default=settings.DEFAULT_VCS, help="Version control system to use"
)
parser.add_argument(
"--push-url", default="", help="Set push URL for the project"
)
parser.add_argument(
"--push-url-same",
action="store_true",
default=False,
help="Set push URL for the project to same as pull",
)
parser.add_argument(
"--disable-push-on-commit",
action="store_false",
default=settings.DEFAULT_PUSH_ON_COMMIT,
dest="push_on_commit",
help="Disable push on commit for created components",
)
parser.add_argument(
"--push-on-commit",
action="store_true",
default=settings.DEFAULT_PUSH_ON_COMMIT,
dest="push_on_commit",
help="Enable push on commit for created components",
)
parser.add_argument(
"--main-component",
default=None,
help=(
"Define which component will be used as main - including full"
" VCS repository"
),
)
parser.add_argument(
"--source-language",
default=settings.DEFAULT_LANGUAGE,
help="Source language code",
)
parser.add_argument("project", help="Existing project slug")
parser.add_argument("repo", help="VCS repository URL")
parser.add_argument("branch", help="VCS repository branch")
parser.add_argument("filemask", help="File mask")
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.filemask = None
self.component_re = None
self.file_format = None
self.language_regex = None
self.license = None
self.main_component = None
self.name_template = None
self.base_file_template = None
self.new_base_template = None
self.vcs = None
self.push_url = None
self.logger = LOGGER
self.push_on_commit = True
self.discovery = None
def checkout_tmp(self, project, repo, branch):
"""Checkout project to temporary location."""
# Create temporary working dir
workdir = tempfile.mkdtemp(dir=project.full_path)
# Make the temporary directory readable by others
os.chmod(workdir, 0o755) # nosec
# Initialize git repository
self.logger.info("Cloning git repository...")
try:
gitrepo = VCS_REGISTRY[self.vcs].clone(repo, workdir, branch)
except RepositoryException as error:
raise CommandError(f"Failed clone: {error}")
self.logger.info("Updating working copy in git repository...")
with gitrepo.lock:
gitrepo.configure_branch(branch)
return workdir
def parse_options(self, repo, options):
"""Parse parameters."""
self.filemask = options["filemask"]
self.vcs = options["vcs"]
if options["push_url_same"]:
self.push_url = repo
else:
self.push_url = options["push_url"]
self.file_format = options["file_format"]
self.language_regex = options["language_regex"]
self.main_component = options["main_component"]
self.name_template = options["name_template"]
self.source_language = Language.objects.get(code=options["source_language"])
if "%s" in self.name_template:
self.name_template = self.name_template.replace("%s", "{{ component }}")
self.license = options["license"]
self.push_on_commit = options["push_on_commit"]
self.base_file_template = options["base_file_template"]
self.new_base_template = options["new_base_template"]
if "%s" in self.base_file_template:
self.base_file_template = self.base_file_template.replace(
"%s", "{{ component }}"
)
# Is file format supported?
if self.file_format not in FILE_FORMATS:
raise CommandError("Invalid file format: {}".format(options["file_format"]))
# Is vcs supported?
if self.vcs not in VCS_REGISTRY:
raise CommandError("Invalid vcs: {}".format(options["vcs"]))
# Do we have correct mask?
# - if there is **, then it's simple mask (it's invalid in regexp)
# - validate regexp otherwise
if "**" in self.filemask and "*" in self.filemask.replace("**", ""):
match = re.escape(self.filemask)
match = match.replace(r"\*\*", "(?P<component>[[WILDCARD]])", 1)
match = match.replace(r"\*\*", "(?P=component)")
match = match.replace(r"\*", "(?P<language>[[WILDCARD]])", 1)
match = match.replace(r"\*", "(?P=language)")
match = match.replace("[[WILDCARD]]", "[^/]*")
self.filemask = match
else:
try:
compiled = re.compile(self.filemask)
except re.error as error:
raise CommandError(
f'Failed to compile regular expression "{self.filemask}": {error}'
)
if (
"component" not in compiled.groupindex
or "language" not in compiled.groupindex
):
raise CommandError(
"Component regular expression lacks named group "
'"component" and/or "language"'
)
def handle(self, *args, **options):
"""Automatic import of project."""
# Read params
repo = options["repo"]
branch = options["branch"]
self.parse_options(repo, options)
# Try to get project
try:
project = Project.objects.get(slug=options["project"])
except Project.DoesNotExist:
raise CommandError(
'Project "{}" not found, please create it first!'.format(
options["project"]
)
)
# Get or create main component
if is_repo_link(repo):
try:
component = Component.objects.get_linked(repo)
# Avoid operating on link
if component.is_repo_link:
component = component.linked_component
except Component.DoesNotExist:
raise CommandError(
f'Component "{repo}" not found, please create it first!'
)
else:
component = self.import_initial(project, repo, branch)
discovery = self.get_discovery(component)
discovery.perform()
def get_discovery(self, component, path=None):
"""Return discovery object after doing basic sanity check."""
if self.discovery is not None:
self.discovery.component = component
else:
self.discovery = ComponentDiscovery(
component,
match=self.filemask,
name_template=self.name_template,
language_regex=self.language_regex,
base_file_template=self.base_file_template,
new_base_template=self.new_base_template,
file_format=self.file_format,
path=path,
)
self.logger.info(
"Found %d matching files", len(self.discovery.matched_files)
)
if not self.discovery.matched_files:
raise CommandError("Your mask did not match any files!")
self.logger.info(
"Found %d components", len(self.discovery.matched_components)
)
langs = set()
for match in self.discovery.matched_components.values():
langs.update(match["languages"])
self.logger.info("Found %d languages", len(langs))
# Do some basic sanity check on languages
if Language.objects.filter(code__in=langs).count() == 0:
raise CommandError(
"None of matched languages exists, maybe you have "
"mixed * and ** in the mask?"
)
return self.discovery
def import_initial(self, project, repo, branch):
"""Import the first repository of a project."""
# Checkout git to temporary dir
workdir = self.checkout_tmp(project, repo, branch)
# Create fake discovery without existing component
discovery = self.get_discovery(None, workdir)
components = project.component_set.all()
component = None
# Create first component (this one will get full git repo)
if self.main_component:
match = None
for match in discovery.matched_components.values():
if match["slug"] == self.main_component:
break
if match is None or match["slug"] != self.main_component:
raise CommandError(
"Specified --main-component was not found in matches!"
)
else:
# Try if one is already there
for match in discovery.matched_components.values():
try:
component = components.get(repo=repo, filemask=match["mask"])
except Component.DoesNotExist:
continue
# Pick random
if component is None:
match = list(discovery.matched_components.values())[0]
try:
if component is None:
component = components.get(slug=match["slug"])
self.logger.warning(
"Component %s already exists, skipping and using it "
"as a main component",
match["slug"],
)
remove_tree(workdir)
except Component.DoesNotExist:
self.logger.info("Creating component %s as main one", match["slug"])
# Rename gitrepository to new name
os.rename(workdir, os.path.join(project.full_path, match["slug"]))
# Create new component
component = discovery.create_component(
None,
match,
project=project,
source_language=self.source_language,
repo=repo,
branch=branch,
vcs=self.vcs,
push_on_commit=self.push_on_commit,
license=self.license,
)
return component
|
from datetime import datetime, timedelta
from pylgnetcast import LgNetCastClient, LgNetCastError
from requests import RequestException
import voluptuous as vol
from homeassistant import util
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
MEDIA_TYPE_CHANNEL,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SELECT_SOURCE,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_STEP,
)
from homeassistant.const import (
CONF_ACCESS_TOKEN,
CONF_HOST,
CONF_NAME,
STATE_OFF,
STATE_PAUSED,
STATE_PLAYING,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.script import Script
DEFAULT_NAME = "LG TV Remote"
CONF_ON_ACTION = "turn_on_action"
MIN_TIME_BETWEEN_FORCED_SCANS = timedelta(seconds=1)
MIN_TIME_BETWEEN_SCANS = timedelta(seconds=10)
SUPPORT_LGTV = (
SUPPORT_PAUSE
| SUPPORT_VOLUME_STEP
| SUPPORT_VOLUME_MUTE
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_NEXT_TRACK
| SUPPORT_TURN_OFF
| SUPPORT_SELECT_SOURCE
| SUPPORT_PLAY
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_ON_ACTION): cv.SCRIPT_SCHEMA,
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_ACCESS_TOKEN): vol.All(cv.string, vol.Length(max=6)),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the LG TV platform."""
host = config.get(CONF_HOST)
access_token = config.get(CONF_ACCESS_TOKEN)
name = config.get(CONF_NAME)
on_action = config.get(CONF_ON_ACTION)
client = LgNetCastClient(host, access_token)
domain = __name__.split(".")[-2]
on_action_script = Script(hass, on_action, name, domain) if on_action else None
add_entities([LgTVDevice(client, name, on_action_script)], True)
class LgTVDevice(MediaPlayerEntity):
"""Representation of a LG TV."""
def __init__(self, client, name, on_action_script):
"""Initialize the LG TV device."""
self._client = client
self._name = name
self._muted = False
self._on_action_script = on_action_script
# Assume that the TV is in Play mode
self._playing = True
self._volume = 0
self._channel_name = ""
self._program_name = ""
self._state = None
self._sources = {}
self._source_names = []
def send_command(self, command):
"""Send remote control commands to the TV."""
try:
with self._client as client:
client.send_command(command)
except (LgNetCastError, RequestException):
self._state = STATE_OFF
@util.Throttle(MIN_TIME_BETWEEN_SCANS, MIN_TIME_BETWEEN_FORCED_SCANS)
def update(self):
"""Retrieve the latest data from the LG TV."""
try:
with self._client as client:
self._state = STATE_PLAYING
volume_info = client.query_data("volume_info")
if volume_info:
volume_info = volume_info[0]
self._volume = float(volume_info.find("level").text)
self._muted = volume_info.find("mute").text == "true"
channel_info = client.query_data("cur_channel")
if channel_info:
channel_info = channel_info[0]
self._channel_name = channel_info.find("chname").text
self._program_name = channel_info.find("progName").text
if self._channel_name is None:
self._channel_name = channel_info.find("inputSourceName").text
if self._program_name is None:
self._program_name = channel_info.find("labelName").text
channel_list = client.query_data("channel_list")
if channel_list:
channel_names = []
for channel in channel_list:
channel_name = channel.find("chname")
if channel_name is not None:
channel_names.append(str(channel_name.text))
self._sources = dict(zip(channel_names, channel_list))
# sort source names by the major channel number
source_tuples = [
(k, self._sources[k].find("major").text) for k in self._sources
]
sorted_sources = sorted(
source_tuples, key=lambda channel: int(channel[1])
)
self._source_names = [n for n, k in sorted_sources]
except (LgNetCastError, RequestException):
self._state = STATE_OFF
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._muted
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._volume / 100.0
@property
def source(self):
"""Return the current input source."""
return self._channel_name
@property
def source_list(self):
"""List of available input sources."""
return self._source_names
@property
def media_content_type(self):
"""Content type of current playing media."""
return MEDIA_TYPE_CHANNEL
@property
def media_channel(self):
"""Channel currently playing."""
return self._channel_name
@property
def media_title(self):
"""Title of current playing media."""
return self._program_name
@property
def supported_features(self):
"""Flag media player features that are supported."""
if self._on_action_script:
return SUPPORT_LGTV | SUPPORT_TURN_ON
return SUPPORT_LGTV
@property
def media_image_url(self):
"""URL for obtaining a screen capture."""
return (
f"{self._client.url}data?target=screen_image&_={datetime.now().timestamp()}"
)
def turn_off(self):
"""Turn off media player."""
self.send_command(1)
def turn_on(self):
"""Turn on the media player."""
if self._on_action_script:
self._on_action_script.run(context=self._context)
def volume_up(self):
"""Volume up the media player."""
self.send_command(24)
def volume_down(self):
"""Volume down media player."""
self.send_command(25)
def mute_volume(self, mute):
"""Send mute command."""
self.send_command(26)
def select_source(self, source):
"""Select input source."""
self._client.change_channel(self._sources[source])
def media_play_pause(self):
"""Simulate play pause media player."""
if self._playing:
self.media_pause()
else:
self.media_play()
def media_play(self):
"""Send play command."""
self._playing = True
self._state = STATE_PLAYING
self.send_command(33)
def media_pause(self):
"""Send media pause command to media player."""
self._playing = False
self._state = STATE_PAUSED
self.send_command(34)
def media_next_track(self):
"""Send next track command."""
self.send_command(36)
def media_previous_track(self):
"""Send the previous track command."""
self.send_command(37)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
import io
import os
import re
import sys
import tempfile
from absl import logging
from absl.testing import absltest
import mock
logging.get_verbosity() # Access --verbosity before flag parsing.
# Access --logtostderr before flag parsing.
logging.get_absl_handler().use_absl_log_file()
class Error(Exception):
pass
@contextlib.contextmanager
def captured_stderr_filename():
"""Captures stderr and writes them to a temporary file.
This uses os.dup/os.dup2 to redirect the stderr fd for capturing standard
error of logging at import-time. We cannot mock sys.stderr because on the
first log call, a default log handler writing to the mock sys.stderr is
registered, and it will never be removed and subsequent logs go to the mock
in addition to the real stder.
Yields:
The filename of captured stderr.
"""
stderr_capture_file_fd, stderr_capture_file_name = tempfile.mkstemp()
original_stderr_fd = os.dup(sys.stderr.fileno())
os.dup2(stderr_capture_file_fd, sys.stderr.fileno())
try:
yield stderr_capture_file_name
finally:
os.close(stderr_capture_file_fd)
os.dup2(original_stderr_fd, sys.stderr.fileno())
# Pre-initialization (aka "import" / __main__ time) test.
with captured_stderr_filename() as before_set_verbosity_filename:
# Warnings and above go to stderr.
logging.debug('Debug message at parse time.')
logging.info('Info message at parse time.')
logging.error('Error message at parse time.')
logging.warning('Warning message at parse time.')
try:
raise Error('Exception reason.')
except Error:
logging.exception('Exception message at parse time.')
logging.set_verbosity(logging.ERROR)
with captured_stderr_filename() as after_set_verbosity_filename:
# Verbosity is set to ERROR, errors and above go to stderr.
logging.debug('Debug message at parse time.')
logging.info('Info message at parse time.')
logging.warning('Warning message at parse time.')
logging.error('Error message at parse time.')
class LoggingInitWarningTest(absltest.TestCase):
def test_captured_pre_init_warnings(self):
with open(before_set_verbosity_filename) as stderr_capture_file:
captured_stderr = stderr_capture_file.read()
self.assertNotIn('Debug message at parse time.', captured_stderr)
self.assertNotIn('Info message at parse time.', captured_stderr)
traceback_re = re.compile(
r'\nTraceback \(most recent call last\):.*?Error: Exception reason.',
re.MULTILINE | re.DOTALL)
if not traceback_re.search(captured_stderr):
self.fail(
'Cannot find traceback message from logging.exception '
'in stderr:\n{}'.format(captured_stderr))
# Remove the traceback so the rest of the stderr is deterministic.
captured_stderr = traceback_re.sub('', captured_stderr)
captured_stderr_lines = captured_stderr.splitlines()
self.assertLen(captured_stderr_lines, 3)
self.assertIn('Error message at parse time.', captured_stderr_lines[0])
self.assertIn('Warning message at parse time.', captured_stderr_lines[1])
self.assertIn('Exception message at parse time.', captured_stderr_lines[2])
def test_set_verbosity_pre_init(self):
with open(after_set_verbosity_filename) as stderr_capture_file:
captured_stderr = stderr_capture_file.read()
captured_stderr_lines = captured_stderr.splitlines()
self.assertNotIn('Debug message at parse time.', captured_stderr)
self.assertNotIn('Info message at parse time.', captured_stderr)
self.assertNotIn('Warning message at parse time.', captured_stderr)
self.assertLen(captured_stderr_lines, 1)
self.assertIn('Error message at parse time.', captured_stderr_lines[0])
def test_no_more_warnings(self):
fake_stderr_type = io.BytesIO if bytes is str else io.StringIO
with mock.patch('sys.stderr', new=fake_stderr_type()) as mock_stderr:
self.assertMultiLineEqual('', mock_stderr.getvalue())
logging.warning('Hello. hello. hello. Is there anybody out there?')
self.assertNotIn('Logging before flag parsing goes to stderr',
mock_stderr.getvalue())
logging.info('A major purpose of this executable is merely not to crash.')
if __name__ == '__main__':
absltest.main() # This calls the app.run() init equivalent.
|
import os
from unittest import mock
import pytest
import nikola.plugins.command.import_wordpress
def test_create_import_work_without_argument(import_command):
"""
Running import command without an argument must not fail.
It should show the proper usage of the command.
"""
import_command.execute()
@pytest.mark.parametrize(
"key, expected_value",
[
("DEFAULT_LANG", "de"),
("BLOG_TITLE", "Wordpress blog title"),
("BLOG_DESCRIPTION", "Nikola test blog ;) - with moré Ümläüts"),
("SITE_URL", "http://some.blog/"),
("BLOG_EMAIL", "[email protected]"),
("BLOG_AUTHOR", "Niko"),
],
)
def test_populate_context(import_command, import_filename, key, expected_value):
channel = import_command.get_channel_from_file(import_filename)
import_command.html2text = False
import_command.transform_to_markdown = False
import_command.transform_to_html = False
import_command.use_wordpress_compiler = False
import_command.translations_pattern = "{path}.{lang}.{ext}"
context = import_command.populate_context(channel)
for required_key in ("POSTS", "PAGES", "COMPILERS"):
assert required_key in context
assert expected_value == context[key]
def test_importing_posts_and_attachments(module, import_command, import_filename):
channel = import_command.get_channel_from_file(import_filename)
import_command.base_dir = ""
import_command.output_folder = "new_site"
import_command.squash_newlines = True
import_command.no_downloads = False
import_command.export_categories_as_categories = False
import_command.export_comments = False
import_command.html2text = False
import_command.transform_to_markdown = False
import_command.transform_to_html = False
import_command.use_wordpress_compiler = False
import_command.tag_saniziting_strategy = "first"
import_command.separate_qtranslate_content = False
import_command.translations_pattern = "{path}.{lang}.{ext}"
import_command.context = import_command.populate_context(channel)
# Ensuring clean results
# assert not import_command.url_map
assert not module.links
import_command.url_map = {}
write_metadata = mock.MagicMock()
write_content = mock.MagicMock()
write_attachments_info = mock.MagicMock()
download_mock = mock.MagicMock()
with mock.patch(
"nikola.plugins.command.import_wordpress.CommandImportWordpress.write_content",
write_content,
), mock.patch(
"nikola.plugins.command.import_wordpress.CommandImportWordpress.write_metadata",
write_metadata,
), mock.patch(
"nikola.plugins.command.import_wordpress.CommandImportWordpress.download_url_content_to_file",
download_mock,
), mock.patch(
"nikola.plugins.command.import_wordpress.CommandImportWordpress.write_attachments_info",
write_attachments_info,
), mock.patch(
"nikola.plugins.command.import_wordpress.os.makedirs"
):
import_command.import_posts(channel)
assert download_mock.called
qpath = "new_site/files/wp-content/uploads/2008/07/arzt_und_pfusch-sick-cover.png"
download_mock.assert_any_call(
"http://some.blog/wp-content/uploads/2008/07/arzt_und_pfusch-sick-cover.png",
qpath.replace("/", os.sep),
)
assert write_metadata.called
write_metadata.assert_any_call(
"new_site/pages/kontakt.meta".replace("/", os.sep),
"Kontakt",
"kontakt",
"2009-07-16 20:20:32",
"",
[],
**{"wp-status": "publish"}
)
assert write_content.called
write_content.assert_any_call(
"new_site/posts/2007/04/hoert.md".replace("/", os.sep),
"""An image.
<img class="size-full wp-image-16" title="caption test" src="http://some.blog/wp-content/uploads/2009/07/caption_test.jpg" alt="caption test" width="739" height="517" />
Some source code.
```Python
import sys
print sys.version
```
The end.
""",
True,
)
assert write_attachments_info.called
write_attachments_info.assert_any_call(
"new_site/posts/2008/07/arzt-und-pfusch-s-i-c-k.attachments.json".replace(
"/", os.sep
),
{
10: {
"wordpress_user_name": "Niko",
"files_meta": [
{"width": 300, "height": 299},
{"width": 150, "size": "thumbnail", "height": 150},
],
"excerpt": "Arzt+Pfusch - S.I.C.K.",
"date_utc": "2009-07-16 19:40:37",
"content": "Das Cover von Arzt+Pfusch - S.I.C.K.",
"files": [
"/wp-content/uploads/2008/07/arzt_und_pfusch-sick-cover.png",
"/wp-content/uploads/2008/07/arzt_und_pfusch-sick-cover-150x150.png",
],
"title": "Arzt+Pfusch - S.I.C.K.",
}
},
)
write_content.assert_any_call(
"new_site/posts/2008/07/arzt-und-pfusch-s-i-c-k.md".replace("/", os.sep),
"""<img class="size-full wp-image-10 alignright" title="Arzt+Pfusch - S.I.C.K." src="http://some.blog/wp-content/uploads/2008/07/arzt_und_pfusch-sick-cover.png" alt="Arzt+Pfusch - S.I.C.K." width="210" height="209" />Arzt+Pfusch - S.I.C.K.Gerade bin ich \xfcber das Album <em>S.I.C.K</em> von <a title="Arzt+Pfusch" href="http://www.arztpfusch.com/" target="_blank">Arzt+Pfusch</a> gestolpert, welches Arzt+Pfusch zum Download f\xfcr lau anbieten. Das Album steht unter einer Creative Commons <a href="http://creativecommons.org/licenses/by-nc-nd/3.0/de/">BY-NC-ND</a>-Lizenz.
Die Ladung <em>noisebmstupidevildustrial</em> gibts als MP3s mit <a href="http://www.archive.org/download/dmp005/dmp005_64kb_mp3.zip">64kbps</a> und <a href="http://www.archive.org/download/dmp005/dmp005_vbr_mp3.zip">VBR</a>, als Ogg Vorbis und als FLAC (letztere <a href="http://www.archive.org/details/dmp005">hier</a>). <a href="http://www.archive.org/download/dmp005/dmp005-artwork.zip">Artwork</a> und <a href="http://www.archive.org/download/dmp005/dmp005-lyrics.txt">Lyrics</a> gibts nochmal einzeln zum Download.""",
True,
)
write_content.assert_any_call(
"new_site/pages/kontakt.md".replace("/", os.sep),
"""<h1>Datenschutz</h1>
Ich erhebe und speichere automatisch in meine Server Log Files Informationen, die dein Browser an mich \xfcbermittelt. Dies sind:
<ul>
<li>Browsertyp und -version</li>
<li>verwendetes Betriebssystem</li>
<li>Referrer URL (die zuvor besuchte Seite)</li>
<li>IP Adresse des zugreifenden Rechners</li>
<li>Uhrzeit der Serveranfrage.</li>
</ul>
Diese Daten sind f\xfcr mich nicht bestimmten Personen zuordenbar. Eine Zusammenf\xfchrung dieser Daten mit anderen Datenquellen wird nicht vorgenommen, die Daten werden einzig zu statistischen Zwecken erhoben.""",
True,
)
assert len(import_command.url_map) > 0
assert (
"http://some.blog/posts/2007/04/hoert.html" ==
import_command.url_map["http://some.blog/2007/04/hoert/"]
)
assert (
"http://some.blog/posts/2008/07/arzt-und-pfusch-s-i-c-k.html" ==
import_command.url_map["http://some.blog/2008/07/arzt-und-pfusch-s-i-c-k/"]
)
assert (
"http://some.blog/pages/kontakt.html" ==
import_command.url_map["http://some.blog/kontakt/"]
)
image_thumbnails = [
"http://some.blog/wp-content/uploads/2012/12/2012-12-19-1355925145_1024x600_scrot-64x64.png",
"http://some.blog/wp-content/uploads/2012/12/2012-12-19-1355925145_1024x600_scrot-300x175.png",
"http://some.blog/wp-content/uploads/2012/12/2012-12-19-1355925145_1024x600_scrot-36x36.png",
"http://some.blog/wp-content/uploads/2012/12/2012-12-19-1355925145_1024x600_scrot-24x24.png",
"http://some.blog/wp-content/uploads/2012/12/2012-12-19-1355925145_1024x600_scrot-96x96.png",
"http://some.blog/wp-content/uploads/2012/12/2012-12-19-1355925145_1024x600_scrot-96x96.png",
"http://some.blog/wp-content/uploads/2012/12/2012-12-19-1355925145_1024x600_scrot-48x48.png",
"http://some.blog/wp-content/uploads/2012/12/2012-12-19-1355925145_1024x600_scrot-96x96.png",
"http://some.blog/wp-content/uploads/2012/12/2012-12-19-1355925145_1024x600_scrot-150x150.png",
]
for link in image_thumbnails:
assert link in module.links
def test_transforming_content(import_command):
"""Applying markup conversions to content."""
import_command.html2text = False
import_command.transform_to_markdown = False
import_command.transform_to_html = False
import_command.use_wordpress_compiler = False
import_command.translations_pattern = "{path}.{lang}.{ext}"
transform_code = mock.MagicMock()
transform_caption = mock.MagicMock()
transform_newlines = mock.MagicMock()
with mock.patch(
"nikola.plugins.command.import_wordpress.CommandImportWordpress.transform_code",
transform_code,
), mock.patch(
"nikola.plugins.command.import_wordpress.CommandImportWordpress.transform_caption",
transform_caption,
), mock.patch(
"nikola.plugins.command.import_wordpress.CommandImportWordpress.transform_multiple_newlines",
transform_newlines,
):
import_command.transform_content("random content", "wp", None)
assert transform_code.called
assert transform_caption.called
assert transform_newlines.called
def test_transforming_source_code(import_command):
"""
Tests the handling of sourcecode tags.
"""
content = """Hello World.
[sourcecode language="Python"]
import sys
print sys.version
[/sourcecode]"""
content = import_command.transform_code(content)
assert "[/sourcecode]" not in content
assert "[sourcecode language=" not in content
replaced_content = """Hello World.
```Python
import sys
print sys.version
```"""
assert content == replaced_content
def test_transform_caption(import_command):
caption = '[caption id="attachment_16" align="alignnone" width="739" caption="beautiful picture"]<img class="size-full wp-image-16" src="http://some.blog/wp-content/uploads/2009/07/caption_test.jpg" alt="beautiful picture" width="739" height="517" />[/caption]'
transformed_content = import_command.transform_caption(caption)
expected_content = '<img class="size-full wp-image-16" src="http://some.blog/wp-content/uploads/2009/07/caption_test.jpg" alt="beautiful picture" width="739" height="517" />'
assert transformed_content == expected_content
def test_transform_multiple_captions_in_a_post(import_command):
content = """asdasdas
[caption id="attachment_16" align="alignnone" width="739" caption="beautiful picture"]<img class="size-full wp-image-16" src="http://some.blog/wp-content/uploads/2009/07/caption_test.jpg" alt="beautiful picture" width="739" height="517" />[/caption]
asdasdas
asdasdas
[caption id="attachment_16" align="alignnone" width="739" caption="beautiful picture"]<img class="size-full wp-image-16" title="pretty" src="http://some.blog/wp-content/uploads/2009/07/caption_test.jpg" alt="beautiful picture" width="739" height="517" />[/caption]
asdasdas"""
expected_content = """asdasdas
<img class="size-full wp-image-16" src="http://some.blog/wp-content/uploads/2009/07/caption_test.jpg" alt="beautiful picture" width="739" height="517" />
asdasdas
asdasdas
<img class="size-full wp-image-16" title="pretty" src="http://some.blog/wp-content/uploads/2009/07/caption_test.jpg" alt="beautiful picture" width="739" height="517" />
asdasdas"""
assert expected_content == import_command.transform_caption(content)
def test_transform_multiple_newlines(import_command):
content = """This
has
way to many
newlines.
"""
expected_content = """This
has
way to many
newlines.
"""
import_command.squash_newlines = False
assert content == import_command.transform_multiple_newlines(content)
import_command.squash_newlines = True
assert expected_content == import_command.transform_multiple_newlines(content)
def test_transform_caption_with_link_inside(import_command):
content = """[caption caption="Fehlermeldung"]<a href="http://some.blog/openttd-missing_sound.png"><img class="size-thumbnail wp-image-551" title="openttd-missing_sound" src="http://some.blog/openttd-missing_sound-150x150.png" alt="Fehlermeldung" /></a>[/caption]"""
transformed_content = import_command.transform_caption(content)
expected_content = """<a href="http://some.blog/openttd-missing_sound.png"><img class="size-thumbnail wp-image-551" title="openttd-missing_sound" src="http://some.blog/openttd-missing_sound-150x150.png" alt="Fehlermeldung" /></a>"""
assert expected_content == transformed_content
def test_get_configuration_output_path(import_command):
import_command.output_folder = "new_site"
default_config_path = os.path.join("new_site", "conf.py")
import_command.import_into_existing_site = False
assert default_config_path == import_command.get_configuration_output_path()
import_command.import_into_existing_site = True
config_path_with_timestamp = import_command.get_configuration_output_path()
assert default_config_path != config_path_with_timestamp
assert import_command.name in config_path_with_timestamp
def test_write_content_does_not_detroy_text(import_command):
content = b"""FOO"""
open_mock = mock.mock_open()
with mock.patch("nikola.plugins.basic_import.open", open_mock, create=True):
import_command.write_content("some_file", content)
open_mock.assert_has_calls(
[
mock.call(u"some_file", u"wb+"),
mock.call().__enter__(),
mock.call().write(b"<html><body><p>FOO</p></body></html>"),
mock.call().__exit__(None, None, None),
]
)
def test_configure_redirections(import_command):
"""
Testing the configuration of the redirections.
We need to make sure that we have valid sources and target links.
"""
url_map = {"/somewhere/else": "http://foo.bar/posts/somewhereelse.html"}
redirections = import_command.configure_redirections(url_map)
assert 1 == len(redirections)
assert ("somewhere/else/index.html", "/posts/somewhereelse.html") in redirections
@pytest.mark.parametrize(
"options, additional_args",
[
pytest.param(None, None, id="only import filename"),
({"output_folder": "some_folder"}, None),
(None, ["folder_argument"]),
],
)
def test_create_import(
patched_import_command, import_filename, mocks, options, additional_args
):
arguments = {"args": [import_filename]}
if options:
arguments["options"] = options
if additional_args:
arguments["args"].extend(additional_args)
patched_import_command.execute(**arguments)
for applied_mock in mocks:
assert applied_mock.called
assert patched_import_command.exclude_drafts is False
@pytest.mark.parametrize(
"options",
[
{"exclude_drafts": True},
{"exclude_drafts": True, "output_folder": "some_folder"},
],
)
def test_ignoring_drafts_during_import(
patched_import_command, import_filename, options
):
arguments = {"options": options, "args": [import_filename]}
patched_import_command.execute(**arguments)
assert patched_import_command.exclude_drafts is True
@pytest.fixture
def import_command(module):
command = module.CommandImportWordpress()
command.onefile = False
return command
@pytest.fixture
def module():
return nikola.plugins.command.import_wordpress
@pytest.fixture
def import_filename(test_dir):
return os.path.abspath(
os.path.join(
test_dir, "data", "wordpress_import", "wordpress_export_example.xml"
)
)
@pytest.fixture
def patched_import_command(import_command, testsite, mocks):
"""
Import command with disabled site generation and various functions mocked.
"""
data_import, site_generation, write_urlmap, write_configuration = mocks
import_command.site = testsite
with mock.patch("os.system", site_generation), mock.patch(
"nikola.plugins.command.import_wordpress.CommandImportWordpress.import_posts",
data_import,
), mock.patch(
"nikola.plugins.command.import_wordpress.CommandImportWordpress.write_urlmap_csv",
write_urlmap,
), mock.patch(
"nikola.plugins.command.import_wordpress.CommandImportWordpress.write_configuration",
write_configuration,
):
yield import_command
@pytest.fixture
def testsite():
return FakeSite()
class FakeSite:
def link(self, *args, **kwargs):
# We need a link function.
# Stubbed because there is nothing done with the results.
pass
@pytest.fixture
def mocks():
"Mocks to be used in `patched_import_command`"
return [
mock.MagicMock(name="data_import"),
mock.MagicMock(name="site_generation"),
mock.MagicMock(name="write_urlmap"),
mock.MagicMock(name="write_configuration"),
]
|
from typing import Callable, List, Optional
from python_awair.devices import AwairDevice
import voluptuous as vol
from homeassistant.components.awair import AwairDataUpdateCoordinator, AwairResult
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import ATTR_ATTRIBUTION, ATTR_DEVICE_CLASS, CONF_ACCESS_TOKEN
from homeassistant.helpers import device_registry as dr
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import (
API_DUST,
API_PM25,
API_SCORE,
API_TEMP,
API_VOC,
ATTR_ICON,
ATTR_LABEL,
ATTR_UNIQUE_ID,
ATTR_UNIT,
ATTRIBUTION,
DOMAIN,
DUST_ALIASES,
LOGGER,
SENSOR_TYPES,
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_ACCESS_TOKEN): cv.string},
extra=vol.ALLOW_EXTRA,
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Import Awair configuration from YAML."""
LOGGER.warning(
"Loading Awair via platform setup is deprecated. Please remove it from your configuration."
)
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data=config,
)
)
async def async_setup_entry(
hass: HomeAssistantType,
config_entry: ConfigType,
async_add_entities: Callable[[List[Entity], bool], None],
):
"""Set up Awair sensor entity based on a config entry."""
coordinator = hass.data[DOMAIN][config_entry.entry_id]
sensors = []
data: List[AwairResult] = coordinator.data.values()
for result in data:
if result.air_data:
sensors.append(AwairSensor(API_SCORE, result.device, coordinator))
device_sensors = result.air_data.sensors.keys()
for sensor in device_sensors:
if sensor in SENSOR_TYPES:
sensors.append(AwairSensor(sensor, result.device, coordinator))
# The "DUST" sensor for Awair is a combo pm2.5/pm10 sensor only
# present on first-gen devices in lieu of separate pm2.5/pm10 sensors.
# We handle that by creating fake pm2.5/pm10 sensors that will always
# report identical values, and we let users decide how they want to use
# that data - because we can't really tell what kind of particles the
# "DUST" sensor actually detected. However, it's still useful data.
if API_DUST in device_sensors:
for alias_kind in DUST_ALIASES:
sensors.append(AwairSensor(alias_kind, result.device, coordinator))
async_add_entities(sensors)
class AwairSensor(CoordinatorEntity):
"""Defines an Awair sensor entity."""
def __init__(
self,
kind: str,
device: AwairDevice,
coordinator: AwairDataUpdateCoordinator,
) -> None:
"""Set up an individual AwairSensor."""
super().__init__(coordinator)
self._kind = kind
self._device = device
@property
def name(self) -> str:
"""Return the name of the sensor."""
name = SENSOR_TYPES[self._kind][ATTR_LABEL]
if self._device.name:
name = f"{self._device.name} {name}"
return name
@property
def unique_id(self) -> str:
"""Return the uuid as the unique_id."""
unique_id_tag = SENSOR_TYPES[self._kind][ATTR_UNIQUE_ID]
# This integration used to create a sensor that was labelled as a "PM2.5"
# sensor for first-gen Awair devices, but its unique_id reflected the truth:
# under the hood, it was a "DUST" sensor. So we preserve that specific unique_id
# for users with first-gen devices that are upgrading.
if self._kind == API_PM25 and API_DUST in self._air_data.sensors:
unique_id_tag = "DUST"
return f"{self._device.uuid}_{unique_id_tag}"
@property
def available(self) -> bool:
"""Determine if the sensor is available based on API results."""
# If the last update was successful...
if self.coordinator.last_update_success and self._air_data:
# and the results included our sensor type...
if self._kind in self._air_data.sensors:
# then we are available.
return True
# or, we're a dust alias
if self._kind in DUST_ALIASES and API_DUST in self._air_data.sensors:
return True
# or we are API_SCORE
if self._kind == API_SCORE:
# then we are available.
return True
# Otherwise, we are not.
return False
@property
def state(self) -> float:
"""Return the state, rounding off to reasonable values."""
state: float
# Special-case for "SCORE", which we treat as the AQI
if self._kind == API_SCORE:
state = self._air_data.score
elif self._kind in DUST_ALIASES and API_DUST in self._air_data.sensors:
state = self._air_data.sensors.dust
else:
state = self._air_data.sensors[self._kind]
if self._kind == API_VOC or self._kind == API_SCORE:
return round(state)
if self._kind == API_TEMP:
return round(state, 1)
return round(state, 2)
@property
def icon(self) -> str:
"""Return the icon."""
return SENSOR_TYPES[self._kind][ATTR_ICON]
@property
def device_class(self) -> str:
"""Return the device_class."""
return SENSOR_TYPES[self._kind][ATTR_DEVICE_CLASS]
@property
def unit_of_measurement(self) -> str:
"""Return the unit the value is expressed in."""
return SENSOR_TYPES[self._kind][ATTR_UNIT]
@property
def device_state_attributes(self) -> dict:
"""Return the Awair Index alongside state attributes.
The Awair Index is a subjective score ranging from 0-4 (inclusive) that
is is used by the Awair app when displaying the relative "safety" of a
given measurement. Each value is mapped to a color indicating the safety:
0: green
1: yellow
2: light-orange
3: orange
4: red
The API indicates that both positive and negative values may be returned,
but the negative values are mapped to identical colors as the positive values.
Knowing that, we just return the absolute value of a given index so that
users don't have to handle positive/negative values that ultimately "mean"
the same thing.
https://docs.developer.getawair.com/?version=latest#awair-score-and-index
"""
attrs = {ATTR_ATTRIBUTION: ATTRIBUTION}
if self._kind in self._air_data.indices:
attrs["awair_index"] = abs(self._air_data.indices[self._kind])
elif self._kind in DUST_ALIASES and API_DUST in self._air_data.indices:
attrs["awair_index"] = abs(self._air_data.indices.dust)
return attrs
@property
def device_info(self) -> dict:
"""Device information."""
info = {
"identifiers": {(DOMAIN, self._device.uuid)},
"manufacturer": "Awair",
"model": self._device.model,
}
if self._device.name:
info["name"] = self._device.name
if self._device.mac_address:
info["connections"] = {
(dr.CONNECTION_NETWORK_MAC, self._device.mac_address)
}
return info
@property
def _air_data(self) -> Optional[AwairResult]:
"""Return the latest data for our device, or None."""
result: Optional[AwairResult] = self.coordinator.data.get(self._device.uuid)
if result:
return result.air_data
return None
|
import unittest
import pandas as pd
import numpy as np
from tsfresh import extract_features
class TestTsFresh(unittest.TestCase):
def test_extract_feature(self):
ts = pd.DataFrame({
'id': np.array(['a', 'a', 'a', 'b', 'b', 'b']),
'time': np.array([0,1,2,0,1,2]),
'x': np.array([3,4,5,7,8,10])
})
extracted_features = extract_features(ts, column_id='id', column_sort='time')
self.assertEqual(2, len(extracted_features))
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from diamond.collector import Collector
from apcupsd import ApcupsdCollector
##########################################################################
class TestApcupsdCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('ApcupsdCollector', {
'interval': 10
})
self.collector = ApcupsdCollector(config, None)
def test_import(self):
self.assertTrue(ApcupsdCollector)
@patch.object(Collector, 'publish')
def test_should_work_with_synthetic_data(self, publish_mock):
patch_getdata = patch.object(ApcupsdCollector, 'getData', Mock(
return_value=(
'APC : 001,039,1056\n\x00' +
'\'DATE : 2012-07-16 12:53:58 -0700 \n\x00' +
' HOSTNAME : localhost\n\x00' +
'+VERSION : 3.14.8 (16 January 2010) redhat\n\x00' +
' UPSNAME : localhost\n\x00' +
'\x15CABLE : USB Cable\n\x00' +
'\x1dMODEL : Back-UPS BX1300G \n\x00' +
'\x17UPSMODE : Stand Alone\n\x00' +
'\'STARTTIME: 2011-12-07 10:28:24 -0800 \n\x00' +
'\x13STATUS : ONLINE \n\x00' +
'\x17LINEV : 124.0 Volts\n\x00' +
'\'LOADPCT : 5.0 Percent Load Capacity\n\x00' +
'\x19BCHARGE : 100.0 Percent\n\x00' +
'\x19TIMELEFT : 73.9 Minutes\n\x00' +
'\x15MBATTCHG : 5 Percent\n\x00' +
'\x15MINTIMEL : 3 Minutes\n\x00' +
'\x15MAXTIME : 0 Seconds\n\x00' +
'\x12SENSE : Medium\n\x00' +
'\x17LOTRANS : 088.0 Volts\n\x00' +
'\x17HITRANS : 139.0 Volts\n\x00' +
'\x12ALARMDEL : Always\n\x00' +
'\x16BATTV : 27.3 Volts\n\x00' +
'+LASTXFER : Automatic or explicit self test\n\x00' +
'\x0eNUMXFERS : 19\n\x00' +
'\'XONBATT : 2012-07-13 09:11:52 -0700 \n\x00' +
'\x15TONBATT : 0 seconds\n\x00' +
'\x17CUMONBATT: 130 seconds\n\x00' +
'\'XOFFBATT : 2012-07-13 09:12:01 -0700 \n\x00' +
'\'LASTSTEST: 2012-07-13 09:11:52 -0700 \n\x00' +
'\x0eSELFTEST : NO\n\x00' +
'"STATFLAG : 0x07000008 Status Flag\n\x00' +
'\x16MANDATE : 2009-10-08\n\x00' +
'\x1aSERIALNO : 3B0941X40219 \n\x00' +
'\x16BATTDATE : 2009-10-08\n\x00' +
'\x15NOMINV : 120 Volts\n\x00' +
'\x17NOMBATTV : 24.0 ')))
patch_getdata.start()
self.collector.collect()
patch_getdata.stop()
metrics = {
'localhost.LINEV': 124.000000,
'localhost.LOADPCT': 5.000000,
'localhost.BCHARGE': 100.000000,
'localhost.TIMELEFT': 73.900000,
'localhost.BATTV': 27.300000,
'localhost.NUMXFERS': 0.000000,
'localhost.TONBATT': 0.000000,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
import os
from unittest import SkipTest
from django.db import transaction
from weblate.trans.models import Component
from weblate.trans.tests.test_views import ViewTestCase
from weblate.trans.tests.utils import REPOWEB_URL
from weblate.utils.files import remove_tree
from weblate.utils.state import STATE_TRANSLATED
from weblate.vcs.models import VCS_REGISTRY
EXTRA_PO = """
#: accounts/models.py:319 trans/views/basic.py:104 weblate/html/index.html:21
msgid "Languages"
msgstr "Jazyky"
"""
MINIMAL_PO = r"""
msgid ""
msgstr ""
"Project-Id-Version: Weblate Hello World 2012\n"
"Report-Msgid-Bugs-To: <[email protected]>\n"
"POT-Creation-Date: 2012-03-14 15:54+0100\n"
"PO-Revision-Date: 2013-08-25 15:23+0200\n"
"Last-Translator: testuser <>\n"
"Language-Team: Czech <http://example.com/projects/test/test/cs/>\n"
"Language: cs\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Plural-Forms: nplurals=3; plural=(n==1) ? 0 : (n>=2 && n<=4) ? 1 : 2;\n"
"X-Generator: Weblate 1.7-dev\n"
#: main.c:11
#, c-format
msgid "Hello, world!\n"
msgstr "Nazdar svete!\n"
"""
class MultiRepoTest(ViewTestCase):
"""Test handling of remote changes, conflicts and so on."""
_vcs = "git"
_branch = "master"
_filemask = "po/*.po"
def setUp(self):
super().setUp()
if self._vcs not in VCS_REGISTRY:
raise SkipTest(f"VCS {self._vcs} not available!")
repo = push = self.format_local_path(getattr(self, f"{self._vcs}_repo_path"))
self.component2 = Component.objects.create(
name="Test 2",
slug="test-2",
project=self.project,
repo=repo,
push=push,
vcs=self._vcs,
filemask=self._filemask,
template="",
file_format="po",
repoweb=REPOWEB_URL,
new_base="",
branch=self._branch,
)
self.request = self.get_request()
def push_first(self, propagate=True, newtext="Nazdar svete!\n"):
"""Change and pushes first component."""
if not propagate:
# Disable changes propagating
self.component2.allow_translation_propagation = False
self.component2.save()
unit = self.get_unit()
unit.translate(self.user, [newtext], STATE_TRANSLATED)
self.assertEqual(self.get_translation().stats.translated, 1)
self.component.do_push(self.request)
def push_replace(self, content, mode):
"""Replace content of a po file and pushes it to remote repository."""
# Manually edit po file, adding new unit
translation = self.component.translation_set.get(language_code="cs")
with open(translation.get_filename(), mode) as handle:
handle.write(content)
# Do changes in first repo
with transaction.atomic():
translation.git_commit(self.request.user, "TEST <[email protected]>")
self.assertFalse(translation.needs_commit())
translation.component.do_push(self.request)
def test_propagate(self):
"""Test handling of propagating."""
# Do changes in first repo
self.push_first()
# Verify changes got to the second one
translation = self.component2.translation_set.get(language_code="cs")
self.assertEqual(translation.stats.translated, 1)
# The text is intentionally duplicated to trigger check
new_text = "Other text text\n"
# Propagate edit
unit = self.get_unit()
self.assertEqual(len(unit.all_checks), 0)
self.assertEqual(len(unit.same_source_units), 1)
unit.translate(self.user, [new_text], STATE_TRANSLATED)
# Verify new content
unit = self.get_unit()
self.assertEqual(unit.target, new_text)
self.assertEqual(len(unit.same_source_units), 1)
other_unit = unit.same_source_units[0]
self.assertEqual(other_unit.target, new_text)
# There should be no checks on both
self.assertEqual(
list(unit.check_set.values_list("check", flat=True)), ["duplicate"]
)
self.assertEqual(
list(other_unit.check_set.values_list("check", flat=True)), ["duplicate"]
)
def test_failed_update(self):
"""Test failed remote update."""
if os.path.exists(self.git_repo_path):
remove_tree(self.git_repo_path)
if os.path.exists(self.mercurial_repo_path):
remove_tree(self.mercurial_repo_path)
if os.path.exists(self.subversion_repo_path):
remove_tree(self.subversion_repo_path)
translation = self.component.translation_set.get(language_code="cs")
self.assertFalse(translation.do_update(self.request))
def test_update(self):
"""Test handling update in case remote has changed."""
# Do changes in first repo
self.push_first(False)
# Test pull
translation = self.component2.translation_set.get(language_code="cs")
translation.invalidate_cache()
self.assertEqual(translation.stats.translated, 0)
translation.do_update(self.request)
translation = self.component2.translation_set.get(language_code="cs")
self.assertEqual(translation.stats.translated, 1)
def test_rebase(self):
"""Testing of rebase."""
self.component2.merge_style = "rebase"
self.component2.save()
self.test_update()
def test_conflict(self):
"""Test conflict handling."""
# Do changes in first repo
self.push_first(False)
# Do changes in the second repo
translation = self.component2.translation_set.get(language_code="cs")
unit = translation.unit_set.get(source="Hello, world!\n")
unit.translate(self.user, ["Ahoj svete!\n"], STATE_TRANSLATED)
self.assertFalse(translation.do_update(self.request))
self.assertFalse(translation.do_push(self.request))
def test_more_changes(self):
"""Test more string changes in remote repo."""
translation = self.component2.translation_set.get(language_code="cs")
self.push_first(False, "Hello, world!\n")
translation.do_update(self.request)
translation = self.component2.translation_set.get(language_code="cs")
self.assertEqual(translation.stats.allchecks, 1)
self.push_first(False, "Nazdar svete\n")
translation.do_update(self.request)
translation = self.component2.translation_set.get(language_code="cs")
self.assertEqual(translation.stats.allchecks, 0)
def test_new_unit(self):
"""Test adding new unit with update."""
self.push_replace(EXTRA_PO, "a")
self.component2.do_update(self.request)
translation = self.component2.translation_set.get(language_code="cs")
self.assertEqual(translation.stats.all, 5)
def test_deleted_unit(self):
"""Test removing several units from remote repo."""
self.push_replace(MINIMAL_PO, "w")
self.component2.do_update(self.request)
translation = self.component2.translation_set.get(language_code="cs")
self.assertEqual(translation.stats.all, 1)
def test_deleted_stale_unit(self):
"""Test removing several units from remote repo.
There is no other reference, so full cleanup has to happen.
"""
self.push_replace(MINIMAL_PO, "w")
self.component.delete()
self.component2.do_update(self.request)
translation = self.component2.translation_set.get(language_code="cs")
self.assertEqual(translation.stats.all, 1)
class GitBranchMultiRepoTest(MultiRepoTest):
_vcs = "git"
_branch = "translations"
_filemask = "translations/*.po"
def create_component(self):
return self.create_po_branch()
class MercurialMultiRepoTest(MultiRepoTest):
_vcs = "mercurial"
_branch = "default"
def create_component(self):
return self.create_po_mercurial()
class SubversionMultiRepoTest(MultiRepoTest):
_vcs = "subversion"
def create_component(self):
return self.create_po_svn()
|
from typing import Any
import voluptuous as vol
from homeassistant.const import CONF_ICON, CONF_TYPE, CONF_URL
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv
from homeassistant.util import slugify
DOMAIN = "lovelace"
EVENT_LOVELACE_UPDATED = "lovelace_updated"
DEFAULT_ICON = "hass:view-dashboard"
CONF_MODE = "mode"
MODE_YAML = "yaml"
MODE_STORAGE = "storage"
LOVELACE_CONFIG_FILE = "ui-lovelace.yaml"
CONF_RESOURCES = "resources"
CONF_URL_PATH = "url_path"
CONF_RESOURCE_TYPE_WS = "res_type"
RESOURCE_TYPES = ["js", "css", "module", "html"]
RESOURCE_FIELDS = {
CONF_TYPE: vol.In(RESOURCE_TYPES),
CONF_URL: cv.string,
}
RESOURCE_SCHEMA = vol.Schema(RESOURCE_FIELDS)
RESOURCE_CREATE_FIELDS = {
vol.Required(CONF_RESOURCE_TYPE_WS): vol.In(RESOURCE_TYPES),
vol.Required(CONF_URL): cv.string,
}
RESOURCE_UPDATE_FIELDS = {
vol.Optional(CONF_RESOURCE_TYPE_WS): vol.In(RESOURCE_TYPES),
vol.Optional(CONF_URL): cv.string,
}
SERVICE_RELOAD_RESOURCES = "reload_resources"
RESOURCE_RELOAD_SERVICE_SCHEMA = vol.Schema({})
CONF_TITLE = "title"
CONF_REQUIRE_ADMIN = "require_admin"
CONF_SHOW_IN_SIDEBAR = "show_in_sidebar"
DASHBOARD_BASE_CREATE_FIELDS = {
vol.Optional(CONF_REQUIRE_ADMIN, default=False): cv.boolean,
vol.Optional(CONF_ICON): cv.icon,
vol.Required(CONF_TITLE): cv.string,
vol.Optional(CONF_SHOW_IN_SIDEBAR, default=True): cv.boolean,
}
DASHBOARD_BASE_UPDATE_FIELDS = {
vol.Optional(CONF_REQUIRE_ADMIN): cv.boolean,
vol.Optional(CONF_ICON): vol.Any(cv.icon, None),
vol.Optional(CONF_TITLE): cv.string,
vol.Optional(CONF_SHOW_IN_SIDEBAR): cv.boolean,
}
STORAGE_DASHBOARD_CREATE_FIELDS = {
**DASHBOARD_BASE_CREATE_FIELDS,
vol.Required(CONF_URL_PATH): cv.string,
# For now we write "storage" as all modes.
# In future we can adjust this to be other modes.
vol.Optional(CONF_MODE, default=MODE_STORAGE): MODE_STORAGE,
}
STORAGE_DASHBOARD_UPDATE_FIELDS = DASHBOARD_BASE_UPDATE_FIELDS
def url_slug(value: Any) -> str:
"""Validate value is a valid url slug."""
if value is None:
raise vol.Invalid("Slug should not be None")
if "-" not in value:
raise vol.Invalid("Url path needs to contain a hyphen (-)")
str_value = str(value)
slg = slugify(str_value, separator="-")
if str_value == slg:
return str_value
raise vol.Invalid(f"invalid slug {value} (try {slg})")
class ConfigNotFound(HomeAssistantError):
"""When no config available."""
|
from functools import wraps
import voluptuous as vol
from homeassistant.components import websocket_api
from homeassistant.core import callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv
from .const import CONF_URL_PATH, DOMAIN, ConfigNotFound
def _handle_errors(func):
"""Handle error with WebSocket calls."""
@wraps(func)
async def send_with_error_handling(hass, connection, msg):
url_path = msg.get(CONF_URL_PATH)
config = hass.data[DOMAIN]["dashboards"].get(url_path)
if config is None:
connection.send_error(
msg["id"], "config_not_found", f"Unknown config specified: {url_path}"
)
return
error = None
try:
result = await func(hass, connection, msg, config)
except ConfigNotFound:
error = "config_not_found", "No config found."
except HomeAssistantError as err:
error = "error", str(err)
if error is not None:
connection.send_error(msg["id"], *error)
return
if msg is not None:
await connection.send_big_result(msg["id"], result)
else:
connection.send_result(msg["id"], result)
return send_with_error_handling
@websocket_api.async_response
@websocket_api.websocket_command({"type": "lovelace/resources"})
async def websocket_lovelace_resources(hass, connection, msg):
"""Send Lovelace UI resources over WebSocket configuration."""
resources = hass.data[DOMAIN]["resources"]
if not resources.loaded:
await resources.async_load()
resources.loaded = True
connection.send_result(msg["id"], resources.async_items())
@websocket_api.async_response
@websocket_api.websocket_command(
{
"type": "lovelace/config",
vol.Optional("force", default=False): bool,
vol.Optional(CONF_URL_PATH): vol.Any(None, cv.string),
}
)
@_handle_errors
async def websocket_lovelace_config(hass, connection, msg, config):
"""Send Lovelace UI config over WebSocket configuration."""
return await config.async_load(msg["force"])
@websocket_api.require_admin
@websocket_api.async_response
@websocket_api.websocket_command(
{
"type": "lovelace/config/save",
"config": vol.Any(str, dict),
vol.Optional(CONF_URL_PATH): vol.Any(None, cv.string),
}
)
@_handle_errors
async def websocket_lovelace_save_config(hass, connection, msg, config):
"""Save Lovelace UI configuration."""
await config.async_save(msg["config"])
@websocket_api.require_admin
@websocket_api.async_response
@websocket_api.websocket_command(
{
"type": "lovelace/config/delete",
vol.Optional(CONF_URL_PATH): vol.Any(None, cv.string),
}
)
@_handle_errors
async def websocket_lovelace_delete_config(hass, connection, msg, config):
"""Delete Lovelace UI configuration."""
await config.async_delete()
@websocket_api.websocket_command({"type": "lovelace/dashboards/list"})
@callback
def websocket_lovelace_dashboards(hass, connection, msg):
"""Delete Lovelace UI configuration."""
connection.send_result(
msg["id"],
[
dashboard.config
for dashboard in hass.data[DOMAIN]["dashboards"].values()
if dashboard.config
],
)
|
from typing import Optional
from homematicip.aio.device import (
AsyncFullFlushBlind,
AsyncFullFlushShutter,
AsyncGarageDoorModuleTormatic,
AsyncHoermannDrivesModule,
)
from homematicip.aio.group import AsyncExtendedLinkedShutterGroup
from homematicip.base.enums import DoorCommand, DoorState
from homeassistant.components.cover import (
ATTR_POSITION,
ATTR_TILT_POSITION,
CoverEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.helpers.typing import HomeAssistantType
from . import DOMAIN as HMIPC_DOMAIN, HomematicipGenericEntity
from .hap import HomematicipHAP
HMIP_COVER_OPEN = 0
HMIP_COVER_CLOSED = 1
HMIP_SLATS_OPEN = 0
HMIP_SLATS_CLOSED = 1
async def async_setup_entry(
hass: HomeAssistantType, config_entry: ConfigEntry, async_add_entities
) -> None:
"""Set up the HomematicIP cover from a config entry."""
hap = hass.data[HMIPC_DOMAIN][config_entry.unique_id]
entities = []
for device in hap.home.devices:
if isinstance(device, AsyncFullFlushBlind):
entities.append(HomematicipCoverSlats(hap, device))
elif isinstance(device, AsyncFullFlushShutter):
entities.append(HomematicipCoverShutter(hap, device))
elif isinstance(
device, (AsyncHoermannDrivesModule, AsyncGarageDoorModuleTormatic)
):
entities.append(HomematicipGarageDoorModule(hap, device))
for group in hap.home.groups:
if isinstance(group, AsyncExtendedLinkedShutterGroup):
entities.append(HomematicipCoverShutterGroup(hap, group))
if entities:
async_add_entities(entities)
class HomematicipCoverShutter(HomematicipGenericEntity, CoverEntity):
"""Representation of the HomematicIP cover shutter."""
@property
def current_cover_position(self) -> int:
"""Return current position of cover."""
if self._device.shutterLevel is not None:
return int((1 - self._device.shutterLevel) * 100)
return None
async def async_set_cover_position(self, **kwargs) -> None:
"""Move the cover to a specific position."""
position = kwargs[ATTR_POSITION]
# HmIP cover is closed:1 -> open:0
level = 1 - position / 100.0
await self._device.set_shutter_level(level)
@property
def is_closed(self) -> Optional[bool]:
"""Return if the cover is closed."""
if self._device.shutterLevel is not None:
return self._device.shutterLevel == HMIP_COVER_CLOSED
return None
async def async_open_cover(self, **kwargs) -> None:
"""Open the cover."""
await self._device.set_shutter_level(HMIP_COVER_OPEN)
async def async_close_cover(self, **kwargs) -> None:
"""Close the cover."""
await self._device.set_shutter_level(HMIP_COVER_CLOSED)
async def async_stop_cover(self, **kwargs) -> None:
"""Stop the device if in motion."""
await self._device.set_shutter_stop()
class HomematicipCoverSlats(HomematicipCoverShutter, CoverEntity):
"""Representation of the HomematicIP cover slats."""
@property
def current_cover_tilt_position(self) -> int:
"""Return current tilt position of cover."""
if self._device.slatsLevel is not None:
return int((1 - self._device.slatsLevel) * 100)
return None
async def async_set_cover_tilt_position(self, **kwargs) -> None:
"""Move the cover to a specific tilt position."""
position = kwargs[ATTR_TILT_POSITION]
# HmIP slats is closed:1 -> open:0
level = 1 - position / 100.0
await self._device.set_slats_level(level)
async def async_open_cover_tilt(self, **kwargs) -> None:
"""Open the slats."""
await self._device.set_slats_level(HMIP_SLATS_OPEN)
async def async_close_cover_tilt(self, **kwargs) -> None:
"""Close the slats."""
await self._device.set_slats_level(HMIP_SLATS_CLOSED)
async def async_stop_cover_tilt(self, **kwargs) -> None:
"""Stop the device if in motion."""
await self._device.set_shutter_stop()
class HomematicipGarageDoorModule(HomematicipGenericEntity, CoverEntity):
"""Representation of the HomematicIP Garage Door Module."""
@property
def current_cover_position(self) -> int:
"""Return current position of cover."""
door_state_to_position = {
DoorState.CLOSED: 0,
DoorState.OPEN: 100,
DoorState.VENTILATION_POSITION: 10,
DoorState.POSITION_UNKNOWN: None,
}
return door_state_to_position.get(self._device.doorState)
@property
def is_closed(self) -> Optional[bool]:
"""Return if the cover is closed."""
return self._device.doorState == DoorState.CLOSED
async def async_open_cover(self, **kwargs) -> None:
"""Open the cover."""
await self._device.send_door_command(DoorCommand.OPEN)
async def async_close_cover(self, **kwargs) -> None:
"""Close the cover."""
await self._device.send_door_command(DoorCommand.CLOSE)
async def async_stop_cover(self, **kwargs) -> None:
"""Stop the cover."""
await self._device.send_door_command(DoorCommand.STOP)
class HomematicipCoverShutterGroup(HomematicipCoverSlats, CoverEntity):
"""Representation of the HomematicIP cover shutter group."""
def __init__(self, hap: HomematicipHAP, device, post: str = "ShutterGroup") -> None:
"""Initialize switching group."""
device.modelType = f"HmIP-{post}"
super().__init__(hap, device, post)
|
import diamond.collector
from diamond.collector import str_to_bool
try:
import sensors
sensors # workaround for pyflakes issue #13
except ImportError:
sensors = None
class LMSensorsCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(LMSensorsCollector, self).get_default_config_help()
config_help.update({
'send_zero': 'Send sensor data even when there is no value'
})
return config_help
def get_default_config(self):
"""
Returns default collector settings.
"""
config = super(LMSensorsCollector, self).get_default_config()
config.update({
'path': 'sensors',
'send_zero': False
})
return config
def collect(self):
if sensors is None:
self.log.error('Unable to import module sensors')
return {}
sensors.init()
try:
for chip in sensors.iter_detected_chips():
for feature in chip:
label = feature.label.replace(' ', '-')
value = None
try:
value = feature.get_value()
except Exception:
if str_to_bool(self.config['send_zero']):
value = 0
if value is not None:
self.publish(".".join([str(chip), label]),
value,
precision=2)
finally:
sensors.cleanup()
|
import numpy as np
from ..core import indexing
from ..core.pycompat import integer_types
from ..core.utils import Frozen, FrozenDict, close_on_error, is_dict_like, is_remote_uri
from ..core.variable import Variable
from .common import AbstractDataStore, BackendArray, BackendEntrypoint, robust_getitem
from .store import open_backend_dataset_store
class PydapArrayWrapper(BackendArray):
def __init__(self, array):
self.array = array
@property
def shape(self):
return self.array.shape
@property
def dtype(self):
return self.array.dtype
def __getitem__(self, key):
return indexing.explicit_indexing_adapter(
key, self.shape, indexing.IndexingSupport.BASIC, self._getitem
)
def _getitem(self, key):
# pull the data from the array attribute if possible, to avoid
# downloading coordinate data twice
array = getattr(self.array, "array", self.array)
result = robust_getitem(array, key, catch=ValueError)
# in some cases, pydap doesn't squeeze axes automatically like numpy
axis = tuple(n for n, k in enumerate(key) if isinstance(k, integer_types))
if result.ndim + len(axis) != array.ndim and len(axis) > 0:
result = np.squeeze(result, axis)
return result
def _fix_attributes(attributes):
attributes = dict(attributes)
for k in list(attributes):
if k.lower() == "global" or k.lower().endswith("_global"):
# move global attributes to the top level, like the netcdf-C
# DAP client
attributes.update(attributes.pop(k))
elif is_dict_like(attributes[k]):
# Make Hierarchical attributes to a single level with a
# dot-separated key
attributes.update(
{
f"{k}.{k_child}": v_child
for k_child, v_child in attributes.pop(k).items()
}
)
return attributes
class PydapDataStore(AbstractDataStore):
"""Store for accessing OpenDAP datasets with pydap.
This store provides an alternative way to access OpenDAP datasets that may
be useful if the netCDF4 library is not available.
"""
def __init__(self, ds):
"""
Parameters
----------
ds : pydap DatasetType
"""
self.ds = ds
@classmethod
def open(cls, url, session=None):
import pydap.client
ds = pydap.client.open_url(url, session=session)
return cls(ds)
def open_store_variable(self, var):
data = indexing.LazilyOuterIndexedArray(PydapArrayWrapper(var))
return Variable(var.dimensions, data, _fix_attributes(var.attributes))
def get_variables(self):
return FrozenDict(
(k, self.open_store_variable(self.ds[k])) for k in self.ds.keys()
)
def get_attrs(self):
return Frozen(_fix_attributes(self.ds.attributes))
def get_dimensions(self):
return Frozen(self.ds.dimensions)
def guess_can_open_pydap(store_spec):
return isinstance(store_spec, str) and is_remote_uri(store_spec)
def open_backend_dataset_pydap(
filename_or_obj,
mask_and_scale=True,
decode_times=None,
concat_characters=None,
decode_coords=None,
drop_variables=None,
use_cftime=None,
decode_timedelta=None,
session=None,
):
store = PydapDataStore.open(
filename_or_obj,
session=session,
)
with close_on_error(store):
ds = open_backend_dataset_store(
store,
mask_and_scale=mask_and_scale,
decode_times=decode_times,
concat_characters=concat_characters,
decode_coords=decode_coords,
drop_variables=drop_variables,
use_cftime=use_cftime,
decode_timedelta=decode_timedelta,
)
return ds
pydap_backend = BackendEntrypoint(
open_dataset=open_backend_dataset_pydap, guess_can_open=guess_can_open_pydap
)
|
from datetime import timedelta
import pytest
import homeassistant.components.automation as automation
from homeassistant.components.switch import DOMAIN
from homeassistant.const import CONF_PLATFORM, STATE_OFF, STATE_ON
from homeassistant.helpers import device_registry
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.async_mock import patch
from tests.common import (
MockConfigEntry,
async_get_device_automation_capabilities,
async_get_device_automations,
async_mock_service,
mock_device_registry,
mock_registry,
)
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
@pytest.fixture
def calls(hass):
"""Track calls to a mock service."""
return async_mock_service(hass, "test", "automation")
async def test_get_conditions(hass, device_reg, entity_reg):
"""Test we get the expected conditions from a switch."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
expected_conditions = [
{
"condition": "device",
"domain": DOMAIN,
"type": "is_off",
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
},
{
"condition": "device",
"domain": DOMAIN,
"type": "is_on",
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
},
]
conditions = await async_get_device_automations(hass, "condition", device_entry.id)
assert conditions == expected_conditions
async def test_get_condition_capabilities(hass, device_reg, entity_reg):
"""Test we get the expected capabilities from a switch condition."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
expected_capabilities = {
"extra_fields": [
{"name": "for", "optional": True, "type": "positive_time_period_dict"}
]
}
conditions = await async_get_device_automations(hass, "condition", device_entry.id)
for condition in conditions:
capabilities = await async_get_device_automation_capabilities(
hass, "condition", condition
)
assert capabilities == expected_capabilities
async def test_if_state(hass, calls):
"""Test for turn_on and turn_off conditions."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
ent1, ent2, ent3 = platform.ENTITIES
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {"platform": "event", "event_type": "test_event1"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": ent1.entity_id,
"type": "is_on",
}
],
"action": {
"service": "test.automation",
"data_template": {
"some": "is_on {{ trigger.%s }}"
% "}} - {{ trigger.".join(("platform", "event.event_type"))
},
},
},
{
"trigger": {"platform": "event", "event_type": "test_event2"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": ent1.entity_id,
"type": "is_off",
}
],
"action": {
"service": "test.automation",
"data_template": {
"some": "is_off {{ trigger.%s }}"
% "}} - {{ trigger.".join(("platform", "event.event_type"))
},
},
},
]
},
)
await hass.async_block_till_done()
assert hass.states.get(ent1.entity_id).state == STATE_ON
assert len(calls) == 0
hass.bus.async_fire("test_event1")
hass.bus.async_fire("test_event2")
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == "is_on event - test_event1"
hass.states.async_set(ent1.entity_id, STATE_OFF)
hass.bus.async_fire("test_event1")
hass.bus.async_fire("test_event2")
await hass.async_block_till_done()
assert len(calls) == 2
assert calls[1].data["some"] == "is_off event - test_event2"
async def test_if_fires_on_for_condition(hass, calls):
"""Test for firing if condition is on with delay."""
point1 = dt_util.utcnow()
point2 = point1 + timedelta(seconds=10)
point3 = point2 + timedelta(seconds=10)
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
ent1, ent2, ent3 = platform.ENTITIES
with patch("homeassistant.core.dt_util.utcnow") as mock_utcnow:
mock_utcnow.return_value = point1
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {"platform": "event", "event_type": "test_event1"},
"condition": {
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": ent1.entity_id,
"type": "is_off",
"for": {"seconds": 5},
},
"action": {
"service": "test.automation",
"data_template": {
"some": "is_off {{ trigger.%s }}"
% "}} - {{ trigger.".join(
("platform", "event.event_type")
)
},
},
}
]
},
)
await hass.async_block_till_done()
assert hass.states.get(ent1.entity_id).state == STATE_ON
assert len(calls) == 0
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 0
# Time travel 10 secs into the future
mock_utcnow.return_value = point2
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 0
hass.states.async_set(ent1.entity_id, STATE_OFF)
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 0
# Time travel 20 secs into the future
mock_utcnow.return_value = point3
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == "is_off event - test_event1"
|
from .devolo_device import DevoloDeviceEntity
class DevoloMultiLevelSwitchDeviceEntity(DevoloDeviceEntity):
"""Representation of a multi level switch device within devolo Home Control. Something like a dimmer or a thermostat."""
def __init__(self, homecontrol, device_instance, element_uid):
"""Initialize a multi level switch within devolo Home Control."""
super().__init__(
homecontrol=homecontrol,
device_instance=device_instance,
element_uid=element_uid,
)
self._multi_level_switch_property = device_instance.multi_level_switch_property[
element_uid
]
self._value = self._multi_level_switch_property.value
|
import unittest
from meld.matchers import myers
class MatchersTests(unittest.TestCase):
def test_basic_matcher(self):
a = list('abcbdefgabcdefg')
b = list('gfabcdefcd')
r = [(0, 2, 3), (4, 5, 3), (10, 8, 2), (15, 10, 0)]
matcher = myers.MyersSequenceMatcher(None, a, b)
blocks = matcher.get_matching_blocks()
self.assertEqual(blocks, r)
def test_postprocessing_cleanup(self):
a = list('abcfabgcd')
b = list('afabcgabgcabcd')
r = [(0, 2, 3), (4, 6, 3), (7, 12, 2), (9, 14, 0)]
matcher = myers.MyersSequenceMatcher(None, a, b)
blocks = matcher.get_matching_blocks()
self.assertEqual(blocks, r)
def test_inline_matcher(self):
a = 'red, blue, yellow, white'
b = 'black green, hue, white'
r = [(17, 16, 7), (24, 23, 0)]
matcher = myers.InlineMyersSequenceMatcher(None, a, b)
blocks = matcher.get_matching_blocks()
self.assertEqual(blocks, r)
def test_sync_point_matcher0(self):
a = list('012a3456c789')
b = list('0a3412b5678')
r = [(0, 0, 1), (3, 1, 3), (6, 7, 2), (9, 9, 2), (12, 11, 0)]
matcher = myers.SyncPointMyersSequenceMatcher(None, a, b)
blocks = matcher.get_matching_blocks()
self.assertEqual(blocks, r)
def test_sync_point_matcher2(self):
a = list('012a3456c789')
b = list('0a3412b5678')
r = [(0, 0, 1), (1, 4, 2), (6, 7, 2), (9, 9, 2), (12, 11, 0)]
matcher = myers.SyncPointMyersSequenceMatcher(None, a, b, [(3, 6)])
blocks = matcher.get_matching_blocks()
self.assertEqual(blocks, r)
def test_sync_point_matcher3(self):
a = list('012a3456c789')
b = list('02a341b5678')
r = [(0, 0, 1), (2, 1, 1), (3, 2, 3), (9, 9, 2), (12, 11, 0)]
matcher = myers.SyncPointMyersSequenceMatcher(
None, a, b, [(3, 2), (8, 6)])
blocks = matcher.get_matching_blocks()
self.assertEqual(blocks, r)
|
import argparse
import contextlib
import shutil
import sys
from cookiecutter.main import cookiecutter
from paasta_tools.cli.fsm.autosuggest import suggest_smartstack_proxy_port
from paasta_tools.utils import load_system_paasta_config
from paasta_tools.utils import PaastaColors
@contextlib.contextmanager
def make_copyfile_symlink_aware():
"""The reasoning behind this monkeypatch is that cookiecutter doesn't
respect symlinks at all, and at Yelp we use symlinks to reduce duplication
in the soa configs. Maybe cookie-cutter will accept a symlink-aware PR?
"""
orig_copyfile = shutil.copyfile
orig_copymode = shutil.copymode
def symlink_aware_copyfile(*args, **kwargs):
kwargs.setdefault("follow_symlinks", False)
orig_copyfile(*args, **kwargs)
def symlink_aware_copymode(*args, **kwargs):
kwargs.setdefault("follow_symlinks", False)
orig_copymode(*args, **kwargs)
shutil.copyfile = symlink_aware_copyfile
shutil.copymode = symlink_aware_copymode
try:
yield
finally:
shutil.copyfile = orig_copyfile
shutil.copymode = orig_copymode
def parse_args():
fsm_parser = argparse.ArgumentParser(
"fsm",
description=(
"'paasta fsm' is used to generate example soa-configs, which is useful during initial "
"service creation. Currently 'fsm' generates 'yelp-specific' configuration, but can still "
"be used as an example of a fully working PaaSTA service.\n\n"
"After 'paasta fsm' is run, the operator should inspect the generated boilerplate configuration "
"and adjust it to meet the particular needs of the new service."
),
)
fsm_parser.add_argument(
"-y",
"--yelpsoa-config-root",
dest="yelpsoa_config_root",
default=".",
help=(
"Path to root of yelpsoa-configs checkout\n"
"Defaults to current working directory"
),
)
fsm_parser.set_defaults(command=paasta_fsm)
return fsm_parser.parse_args()
def get_paasta_config(yelpsoa_config_root):
variables = {"proxy_port": suggest_smartstack_proxy_port(yelpsoa_config_root)}
return variables
def write_paasta_config(variables, template, destination):
print("Using cookiecutter template from %s" % template)
with make_copyfile_symlink_aware():
cookiecutter(
template=template,
extra_context=variables,
output_dir=destination,
overwrite_if_exists=True,
no_input=not sys.stdout.isatty(),
)
def paasta_fsm(args):
variables = get_paasta_config(yelpsoa_config_root=args.yelpsoa_config_root)
destination = args.yelpsoa_config_root
paasta_config = load_system_paasta_config()
template = paasta_config.get_fsm_template()
write_paasta_config(variables=variables, template=template, destination=destination)
print(PaastaColors.yellow(" _ _(o)_(o)_ _"))
print(PaastaColors.red(r" ._\`:_ F S M _:' \_,"))
print(PaastaColors.green(r" / (`---'\ `-."))
print(PaastaColors.cyan(" ,-` _) (_,"))
print("With My Noodly Appendage I Have Written Configs!")
print()
print("Customize Them If It Makes You Happy -- http://y/paasta For Details")
print("Remember To Add, Commit, And Push When You're Done:")
print()
def main():
args = parse_args()
paasta_fsm(args)
if __name__ == "__main__":
main()
|
import sys
import os
import re
import tempfile
import inspect
import argparse
import vulture
import qutebrowser.app # pylint: disable=unused-import
from qutebrowser.extensions import loader
from qutebrowser.misc import objects
from qutebrowser.utils import utils, version
from qutebrowser.browser.webkit import rfc6266
# To run the decorators from there
# pylint: disable=unused-import
from qutebrowser.browser.webkit.network import webkitqutescheme
# pylint: enable=unused-import
from qutebrowser.browser import qutescheme
from qutebrowser.config import configtypes
def whitelist_generator(): # noqa: C901
"""Generator which yields lines to add to a vulture whitelist."""
loader.load_components(skip_hooks=True)
# qutebrowser commands
for cmd in objects.commands.values():
yield utils.qualname(cmd.handler)
# pyPEG2 classes
for name, member in inspect.getmembers(rfc6266, inspect.isclass):
for attr in ['grammar', 'regex']:
if hasattr(member, attr):
yield 'qutebrowser.browser.webkit.rfc6266.{}.{}'.format(name,
attr)
# PyQt properties
yield 'qutebrowser.mainwindow.statusbar.bar.StatusBar.color_flags'
yield 'qutebrowser.mainwindow.statusbar.url.UrlText.urltype'
# Not used yet, but soon (or when debugging)
yield 'qutebrowser.utils.debug.log_events'
yield 'qutebrowser.utils.debug.log_signals'
yield 'qutebrowser.utils.debug.qflags_key'
yield 'qutebrowser.utils.qtutils.QtOSError.qt_errno'
yield 'scripts.utils.bg_colors'
yield 'qutebrowser.misc.sql.SqliteErrorCode.CONSTRAINT'
yield 'qutebrowser.misc.throttle.Throttle.set_delay'
# Qt attributes
yield 'PyQt5.QtWebKit.QWebPage.ErrorPageExtensionReturn().baseUrl'
yield 'PyQt5.QtWebKit.QWebPage.ErrorPageExtensionReturn().content'
yield 'PyQt5.QtWebKit.QWebPage.ErrorPageExtensionReturn().encoding'
yield 'PyQt5.QtWebKit.QWebPage.ErrorPageExtensionReturn().fileNames'
yield 'PyQt5.QtWidgets.QStyleOptionViewItem.backgroundColor'
## qute://... handlers
for name in qutescheme._HANDLERS: # pylint: disable=protected-access
name = name.replace('-', '_')
yield 'qutebrowser.browser.qutescheme.qute_' + name
# Other false-positives
yield 'qutebrowser.completion.models.listcategory.ListCategory().lessThan'
yield 'qutebrowser.utils.jinja.Loader.get_source'
yield 'qutebrowser.utils.log.QtWarningFilter.filter'
yield 'qutebrowser.browser.pdfjs.is_available'
yield 'qutebrowser.misc.guiprocess.spawn_output'
yield 'qutebrowser.utils.usertypes.ExitStatus.reserved'
yield 'QEvent.posted'
yield 'log_stack' # from message.py
yield 'propagate' # logging.getLogger('...).propagate = False
# vulture doesn't notice the hasattr() and thus thinks netrc_used is unused
# in NetworkManager.on_authentication_required
yield 'PyQt5.QtNetwork.QNetworkReply.netrc_used'
yield 'qutebrowser.browser.downloads.last_used_directory'
yield 'PaintContext.clip' # from completiondelegate.py
yield 'logging.LogRecord.log_color' # from logging.py
yield 'scripts.utils.use_color' # from asciidoc2html.py
for attr in ['pyeval_output', 'log_clipboard', 'fake_clipboard']:
yield 'qutebrowser.misc.utilcmds.' + attr
for attr in ['fileno', 'truncate', 'closed', 'readable']:
yield 'qutebrowser.utils.qtutils.PyQIODevice.' + attr
for attr in ['msgs', 'priority', 'visit_attribute']:
yield 'scripts.dev.pylint_checkers.config.' + attr
for attr in ['visit_call', 'process_module']:
yield 'scripts.dev.pylint_checkers.modeline.' + attr
for name, _member in inspect.getmembers(configtypes, inspect.isclass):
yield 'qutebrowser.config.configtypes.' + name
yield 'qutebrowser.config.configexc.ConfigErrorDesc.traceback'
yield 'qutebrowser.config.configfiles.ConfigAPI.load_autoconfig'
yield 'types.ModuleType.c' # configfiles:read_config_py
for name in ['configdir', 'datadir']:
yield 'qutebrowser.config.configfiles.ConfigAPI.' + name
yield 'include_aliases'
for attr in ['_get_default_metavar_for_optional',
'_get_default_metavar_for_positional', '_metavar_formatter']:
yield 'scripts.dev.src2asciidoc.UsageFormatter.' + attr
for dist in version.Distribution:
yield 'qutebrowser.utils.version.Distribution.{}'.format(dist.name)
# attrs
yield 'qutebrowser.browser.webkit.network.networkmanager.ProxyId.hostname'
yield 'qutebrowser.command.command.ArgInfo._validate_exclusive'
yield 'scripts.get_coredumpctl_traces.Line.uid'
yield 'scripts.get_coredumpctl_traces.Line.gid'
yield 'scripts.importer.import_moz_places.places.row_factory'
# component hooks
yield 'qutebrowser.components.adblock.on_config_changed'
# used in type comments
yield 'pending_download_type'
yield 'world_id_type'
yield 'ParserDictType'
yield 'qutebrowser.config.configutils.Values._VmapKeyType'
def filter_func(item):
"""Check if a missing function should be filtered or not.
Return:
True if the missing function should be filtered/ignored, False
otherwise.
"""
return bool(re.fullmatch(r'[a-z]+[A-Z][a-zA-Z]+', item.name))
def report(items):
"""Generate a report based on the given vulture.Item's.
Based on vulture.Vulture.report, but we can't use that as we can't set the
properties which get used for the items.
"""
output = []
for item in sorted(items,
key=lambda e: (e.filename.lower(), e.first_lineno)):
output.append(item.get_report())
return output
def run(files):
"""Run vulture over the given files."""
with tempfile.NamedTemporaryFile(mode='w', delete=False) as whitelist_file:
for line in whitelist_generator():
whitelist_file.write(line + '\n')
whitelist_file.close()
vult = vulture.Vulture(verbose=False)
vult.scavenge(files + [whitelist_file.name])
os.remove(whitelist_file.name)
filters = {
'unused_funcs': filter_func,
'unused_props': lambda item: False,
'unused_vars': lambda item: False,
'unused_attrs': lambda item: False,
}
items = []
for attr, func in filters.items():
sub_items = getattr(vult, attr)
for item in sub_items:
filtered = func(item)
if not filtered:
items.append(item)
return report(items)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('files', nargs='*', default=['qutebrowser', 'scripts',
'setup.py'])
args = parser.parse_args()
out = run(args.files)
for line in out:
print(line)
sys.exit(bool(out))
if __name__ == '__main__':
main()
|
import coverage
from coverage.version import _make_url, _make_version
from tests.coveragetest import CoverageTest
class VersionTest(CoverageTest):
"""Tests of version.py"""
run_in_temp_dir = False
def test_version_info(self):
# Make sure we didn't screw up the version_info tuple.
self.assertIsInstance(coverage.version_info, tuple)
self.assertEqual([type(d) for d in coverage.version_info], [int, int, int, str, int])
self.assertIn(coverage.version_info[3], ['alpha', 'beta', 'candidate', 'final'])
def test_make_version(self):
self.assertEqual(_make_version(4, 0, 0, 'alpha', 0), "4.0a0")
self.assertEqual(_make_version(4, 0, 0, 'alpha', 1), "4.0a1")
self.assertEqual(_make_version(4, 0, 0, 'final', 0), "4.0")
self.assertEqual(_make_version(4, 1, 2, 'beta', 3), "4.1.2b3")
self.assertEqual(_make_version(4, 1, 2, 'final', 0), "4.1.2")
self.assertEqual(_make_version(5, 10, 2, 'candidate', 7), "5.10.2rc7")
def test_make_url(self):
self.assertEqual(
_make_url(4, 0, 0, 'final', 0),
"https://coverage.readthedocs.io"
)
self.assertEqual(
_make_url(4, 1, 2, 'beta', 3),
"https://coverage.readthedocs.io/en/coverage-4.1.2b3"
)
|
__docformat__ = "restructuredtext en"
from bisect import insort_left
from six.moves import queue
LOW = 0
MEDIUM = 10
HIGH = 100
PRIORITY = {
'LOW': LOW,
'MEDIUM': MEDIUM,
'HIGH': HIGH,
}
REVERSE_PRIORITY = dict((values, key) for key, values in PRIORITY.items())
class PrioritizedTasksQueue(queue.Queue):
def _init(self, maxsize):
"""Initialize the queue representation"""
self.maxsize = maxsize
# ordered list of task, from the lowest to the highest priority
self.queue = []
def _put(self, item):
"""Put a new item in the queue"""
for i, task in enumerate(self.queue):
# equivalent task
if task == item:
# if new task has a higher priority, remove the one already
# queued so the new priority will be considered
if task < item:
item.merge(task)
del self.queue[i]
break
# else keep it so current order is kept
task.merge(item)
return
insort_left(self.queue, item)
def _get(self):
"""Get an item from the queue"""
return self.queue.pop()
def __iter__(self):
return iter(self.queue)
def remove(self, tid):
"""remove a specific task from the queue"""
# XXX acquire lock
for i, task in enumerate(self):
if task.id == tid:
self.queue.pop(i)
return
raise ValueError('not task of id %s in queue' % tid)
class Task(object):
def __init__(self, tid, priority=LOW):
# task id
self.id = tid
# task priority
self.priority = priority
def __repr__(self):
return '<Task %s @%#x>' % (self.id, id(self))
def __cmp__(self, other):
return cmp(self.priority, other.priority)
def __lt__(self, other):
return self.priority < other.priority
def __eq__(self, other):
return self.id == other.id
__hash__ = object.__hash__
def merge(self, other):
pass
|
import sys
import base64
import pickle
import dropbox
import keychain
import clipboard
from stashutils import core
DB_SERVICE = "DropBox"
def dropbox_setup(username, stdin, stdout):
"""helper-interface to setup dropbox."""
_stash = core.get_stash()
Text = _stash.text_color # alias
stdout.write(Text("=" * 40 + "\nDropbox-setup\n" + "=" * 25 + "\n", "blue"))
header = "This interface will help you setup the dropbox access"
header += " for '{n}'.".format(n=Text(username, "blue"))
abort = Text("abort", "yellow")
choices = ("I already have an authorization-code", "I dont have an authorizaion-code", abort)
choice = _menu(header, choices, stdin, stdout)
if choice == 2:
raise KeyboardInterrupt("Setup aborted.")
elif choice == 0:
pass
elif choice == 1:
stdout.write("Please read this. After reading, press enter to continue.\n")
text1 = "To allow StaSh access to your dropbox, "
text2 = "you will have to perform the following steps:\n"
stdout.write(text1 + text2)
stdout.write(" 1) Create a dropbox account (if you dont have one yet)\n")
stdout.write(" 2) Upgrade your Account to a dropbox-developer account.\n")
stdout.write(" 3) Create a dropbox-app.\n")
stdout.write(" 4) Generate an access token.\n")
stdout.write(" 5) Enter the access token.\n")
stdout.write(Text("Continue?", "yellow"))
stdin.readline()
while True:
header = "Select action"
choices = ("Register to dropbox", "Go to the developer-page", "proceed", abort)
choice = _menu(header, choices, stdin, stdout)
if choice == 0:
_open_url("https://www.dropbox.com/register")
elif choice == 1:
_open_url("https://developer.dropbox.com")
elif choice == 2:
break
elif choice == 3:
raise KeyboardInterrupt("Setup aborted.")
stdout.write("Enter the access token (leave empty to use clipboard):\n>")
access_token = stdin.readline().strip()
if len(access_token) == 0:
access_token = clipboard.get()
stdout.write("Using clipboard (length={l}).\n".format(l=len(access_token)))
stdout.write("Testing token... ")
try:
db = dropbox.Dropbox(access_token)
db.files_list_folder("")
except (dropbox.exceptions.ApiError, dropbox.exceptions.BadInputError):
sys.stdout.write(Text("Error", "red"))
sys.stdout.write(".\nAuthorization failed! Please try again.\n")
raise KeyboardInterrupt("Setup failed!")
stdout.write(Text("Done", "green"))
stdout.write(".\nSaving... ")
save_dropbox_data(username, access_token)
stdout.write(Text("Done", "green"))
stdout.write(".\n")
return True
def save_dropbox_data(username, access_token):
"""saves dropbox access information for username."""
data = {
"api_version": 2,
"access_token": access_token,
}
dumped = pickle.dumps(data)
encoded = base64.b64encode(dumped)
keychain.set_password(DB_SERVICE, username, encoded)
def load_dropbox_data(username):
"""load dropbox access information for username."""
encoded = keychain.get_password(DB_SERVICE, username)
if encoded is None:
return None
dumped = base64.b64decode(encoded)
raw = pickle.loads(dumped)
return raw
def get_dropbox_client(username, setup=True, stdin=None, stdout=None):
"""
checks wether a dropbox.dropbox.Dropbox is available for username.
If it is, it is returned.
Otherwise, if setup is True, a command-line setup is shown.
The setup uses stdin and stout, both defaulting to the sys.std*.
If no client was found and setup is False, None will be returned.
"""
if stdout is None:
stdout = sys.stdout
if stdin is None:
stdin = sys.stdin
data = load_dropbox_data(username)
if data is None:
stdout.write("\n")
if not setup:
return None
dropbox_setup(username, stdin, stdout)
data = load_dropbox_data(username)
token = data["access_token"]
dbclient = dropbox.dropbox.Dropbox(token)
return dbclient
def reset_dropbox(username):
"""resets the dropbox configuration for the user username"""
try:
db = get_dropbox_client(username, setup=False)
except:
db = None
if hasattr(db, "auth_token_revoke"):
try:
db.auth_token_revoke()
except:
pass
keychain.delete_password(DB_SERVICE, username)
def _menu(header, choices, stdin=None, stdout=None):
"""a command-line menu."""
if stdin is None:
stdin = sys.stdin
if stdout is None:
stdout = sys.stdout
assert len(choices) > 0, ValueError("No choices!")
while True:
stdout.write(header + "\n")
for i, n in enumerate(choices):
stdout.write(" {i: >3}: {n}\n".format(i=i, n=n))
stdout.write("n?>")
answer = stdin.readline().strip()
try:
answer = int(answer)
return answer
except (KeyError, ValueError, IndexError):
stdout.write("\n" * 20)
def _open_url(url):
"""opens an url"""
_stash = core.get_stash()
_stash("webviewer {u}".format(u=url))
|
from collections import Counter
from scattertext.features.FeatsFromSpacyDoc import FeatsFromSpacyDoc
class FeatsFromSentencePiece(FeatsFromSpacyDoc):
def __init__(self, sp, *args, **kwargs):
'''
:param sp: sentencepiece.SentencePieceProcessor
'''
self._sp = sp
super(FeatsFromSentencePiece, self).__init__(*args, **kwargs)
def get_doc_metadata(self, doc):
'''
:param doc: spacy.Doc
:return: pd.Series
'''
return Counter(self._sp.encode_as_pieces(str(doc)))
|
import io
import os
import sys
import re
import platform
import tempfile
import urllib.parse
import unittest.mock
from http.client import HTTPConnection
import pytest
import py.path
import path
import cherrypy
from cherrypy.lib import static
from cherrypy._cpcompat import HTTPSConnection, ntou, tonative
from cherrypy.test import helper
@pytest.fixture
def unicode_filesystem(tmpdir):
_check_unicode_filesystem(tmpdir)
def _check_unicode_filesystem(tmpdir):
filename = tmpdir / ntou('☃', 'utf-8')
tmpl = 'File system encoding ({encoding}) cannot support unicode filenames'
msg = tmpl.format(encoding=sys.getfilesystemencoding())
try:
io.open(str(filename), 'w').close()
except UnicodeEncodeError:
pytest.skip(msg)
def ensure_unicode_filesystem():
"""
TODO: replace with simply pytest fixtures once webtest.TestCase
no longer implies unittest.
"""
tmpdir = py.path.local(tempfile.mkdtemp())
try:
_check_unicode_filesystem(tmpdir)
finally:
tmpdir.remove()
curdir = path.Path(__file__).dirname()
has_space_filepath = curdir / 'static' / 'has space.html'
bigfile_filepath = curdir / 'static' / 'bigfile.log'
# The file size needs to be big enough such that half the size of it
# won't be socket-buffered (or server-buffered) all in one go. See
# test_file_stream.
MB = 2 ** 20
BIGFILE_SIZE = 32 * MB
class StaticTest(helper.CPWebCase):
files_to_remove = []
@staticmethod
def setup_server():
if not os.path.exists(has_space_filepath):
with open(has_space_filepath, 'wb') as f:
f.write(b'Hello, world\r\n')
needs_bigfile = (
not os.path.exists(bigfile_filepath) or
os.path.getsize(bigfile_filepath) != BIGFILE_SIZE
)
if needs_bigfile:
with open(bigfile_filepath, 'wb') as f:
f.write(b'x' * BIGFILE_SIZE)
class Root:
@cherrypy.expose
@cherrypy.config(**{'response.stream': True})
def bigfile(self):
self.f = static.serve_file(bigfile_filepath)
return self.f
@cherrypy.expose
def tell(self):
if self.f.input.closed:
return ''
return repr(self.f.input.tell()).rstrip('L')
@cherrypy.expose
def fileobj(self):
f = open(os.path.join(curdir, 'style.css'), 'rb')
return static.serve_fileobj(f, content_type='text/css')
@cherrypy.expose
def bytesio(self):
f = io.BytesIO(b'Fee\nfie\nfo\nfum')
return static.serve_fileobj(f, content_type='text/plain')
@cherrypy.expose
def serve_file_utf8_filename(self):
return static.serve_file(
__file__,
disposition='attachment',
name='has_utf-8_character_☃.html')
@cherrypy.expose
def serve_fileobj_utf8_filename(self):
return static.serve_fileobj(
io.BytesIO('☃\nfie\nfo\nfum'.encode('utf-8')),
disposition='attachment',
name='has_utf-8_character_☃.html')
class Static:
@cherrypy.expose
def index(self):
return 'You want the Baron? You can have the Baron!'
@cherrypy.expose
def dynamic(self):
return 'This is a DYNAMIC page'
root = Root()
root.static = Static()
rootconf = {
'/static': {
'tools.staticdir.on': True,
'tools.staticdir.dir': 'static',
'tools.staticdir.root': curdir,
},
'/static-long': {
'tools.staticdir.on': True,
'tools.staticdir.dir': r'\\?\%s' % curdir,
},
'/style.css': {
'tools.staticfile.on': True,
'tools.staticfile.filename': os.path.join(curdir, 'style.css'),
},
'/docroot': {
'tools.staticdir.on': True,
'tools.staticdir.root': curdir,
'tools.staticdir.dir': 'static',
'tools.staticdir.index': 'index.html',
},
'/error': {
'tools.staticdir.on': True,
'request.show_tracebacks': True,
},
'/404test': {
'tools.staticdir.on': True,
'tools.staticdir.root': curdir,
'tools.staticdir.dir': 'static',
'error_page.404': error_page_404,
}
}
rootApp = cherrypy.Application(root)
rootApp.merge(rootconf)
test_app_conf = {
'/test': {
'tools.staticdir.index': 'index.html',
'tools.staticdir.on': True,
'tools.staticdir.root': curdir,
'tools.staticdir.dir': 'static',
},
}
testApp = cherrypy.Application(Static())
testApp.merge(test_app_conf)
vhost = cherrypy._cpwsgi.VirtualHost(rootApp, {'virt.net': testApp})
cherrypy.tree.graft(vhost)
@classmethod
def teardown_class(cls):
super(cls, cls).teardown_class()
files_to_remove = has_space_filepath, bigfile_filepath
files_to_remove += tuple(cls.files_to_remove)
for file in files_to_remove:
file.remove_p()
def test_static(self):
self.getPage('/static/index.html')
self.assertStatus('200 OK')
self.assertHeader('Content-Type', 'text/html')
self.assertBody('Hello, world\r\n')
# Using a staticdir.root value in a subdir...
self.getPage('/docroot/index.html')
self.assertStatus('200 OK')
self.assertHeader('Content-Type', 'text/html')
self.assertBody('Hello, world\r\n')
# Check a filename with spaces in it
self.getPage('/static/has%20space.html')
self.assertStatus('200 OK')
self.assertHeader('Content-Type', 'text/html')
self.assertBody('Hello, world\r\n')
self.getPage('/style.css')
self.assertStatus('200 OK')
self.assertHeader('Content-Type', 'text/css')
# Note: The body should be exactly 'Dummy stylesheet\n', but
# unfortunately some tools such as WinZip sometimes turn \n
# into \r\n on Windows when extracting the CherryPy tarball so
# we just check the content
self.assertMatchesBody('^Dummy stylesheet')
# Check a filename with utf-8 characters in it
ascii_fn = 'has_utf-8_character_.html'
url_quote_fn = 'has_utf-8_character_%E2%98%83.html' # %E2%98%83 == ☃
expected_content_disposition = (
'attachment; filename="{!s}"; filename*=UTF-8\'\'{!s}'.
format(ascii_fn, url_quote_fn)
)
self.getPage('/serve_file_utf8_filename')
self.assertStatus('200 OK')
self.assertHeader('Content-Disposition', expected_content_disposition)
self.getPage('/serve_fileobj_utf8_filename')
self.assertStatus('200 OK')
self.assertHeader('Content-Disposition', expected_content_disposition)
@pytest.mark.skipif(platform.system() != 'Windows', reason='Windows only')
def test_static_longpath(self):
"""Test serving of a file in subdir of a Windows long-path
staticdir."""
self.getPage('/static-long/static/index.html')
self.assertStatus('200 OK')
self.assertHeader('Content-Type', 'text/html')
self.assertBody('Hello, world\r\n')
def test_fallthrough(self):
# Test that NotFound will then try dynamic handlers (see [878]).
self.getPage('/static/dynamic')
self.assertBody('This is a DYNAMIC page')
# Check a directory via fall-through to dynamic handler.
self.getPage('/static/')
self.assertStatus('200 OK')
self.assertHeader('Content-Type', 'text/html;charset=utf-8')
self.assertBody('You want the Baron? You can have the Baron!')
def test_index(self):
# Check a directory via "staticdir.index".
self.getPage('/docroot/')
self.assertStatus('200 OK')
self.assertHeader('Content-Type', 'text/html')
self.assertBody('Hello, world\r\n')
# The same page should be returned even if redirected.
self.getPage('/docroot')
self.assertStatus(301)
self.assertHeader('Location', '%s/docroot/' % self.base())
self.assertMatchesBody(
"This resource .* <a href=(['\"])%s/docroot/\\1>"
'%s/docroot/</a>.'
% (self.base(), self.base())
)
def test_config_errors(self):
# Check that we get an error if no .file or .dir
self.getPage('/error/thing.html')
self.assertErrorPage(500)
if sys.version_info >= (3, 3):
errmsg = (
r'TypeError: staticdir\(\) missing 2 '
'required positional arguments'
)
else:
errmsg = (
r'TypeError: staticdir\(\) takes at least 2 '
r'(positional )?arguments \(0 given\)'
)
self.assertMatchesBody(errmsg.encode('ascii'))
def test_security(self):
# Test up-level security
self.getPage('/static/../../test/style.css')
self.assertStatus((400, 403))
def test_modif(self):
# Test modified-since on a reasonably-large file
self.getPage('/static/dirback.jpg')
self.assertStatus('200 OK')
lastmod = ''
for k, v in self.headers:
if k == 'Last-Modified':
lastmod = v
ims = ('If-Modified-Since', lastmod)
self.getPage('/static/dirback.jpg', headers=[ims])
self.assertStatus(304)
self.assertNoHeader('Content-Type')
self.assertNoHeader('Content-Length')
self.assertNoHeader('Content-Disposition')
self.assertBody('')
def test_755_vhost(self):
self.getPage('/test/', [('Host', 'virt.net')])
self.assertStatus(200)
self.getPage('/test', [('Host', 'virt.net')])
self.assertStatus(301)
self.assertHeader('Location', self.scheme + '://virt.net/test/')
def test_serve_fileobj(self):
self.getPage('/fileobj')
self.assertStatus('200 OK')
self.assertHeader('Content-Type', 'text/css;charset=utf-8')
self.assertMatchesBody('^Dummy stylesheet')
def test_serve_bytesio(self):
self.getPage('/bytesio')
self.assertStatus('200 OK')
self.assertHeader('Content-Type', 'text/plain;charset=utf-8')
self.assertHeader('Content-Length', 14)
self.assertMatchesBody('Fee\nfie\nfo\nfum')
@pytest.mark.xfail(reason='#1475')
def test_file_stream(self):
if cherrypy.server.protocol_version != 'HTTP/1.1':
return self.skip()
self.PROTOCOL = 'HTTP/1.1'
# Make an initial request
self.persistent = True
conn = self.HTTP_CONN
conn.putrequest('GET', '/bigfile', skip_host=True)
conn.putheader('Host', self.HOST)
conn.endheaders()
response = conn.response_class(conn.sock, method='GET')
response.begin()
self.assertEqual(response.status, 200)
body = b''
remaining = BIGFILE_SIZE
while remaining > 0:
data = response.fp.read(65536)
if not data:
break
body += data
remaining -= len(data)
if self.scheme == 'https':
newconn = HTTPSConnection
else:
newconn = HTTPConnection
s, h, b = helper.webtest.openURL(
b'/tell', headers=[], host=self.HOST, port=self.PORT,
http_conn=newconn)
if not b:
# The file was closed on the server.
tell_position = BIGFILE_SIZE
else:
tell_position = int(b)
read_so_far = len(body)
# It is difficult for us to force the server to only read
# the bytes that we ask for - there are going to be buffers
# inbetween.
#
# CherryPy will attempt to write as much data as it can to
# the socket, and we don't have a way to determine what that
# size will be. So we make the following assumption - by
# the time we have read in the entire file on the server,
# we will have at least received half of it. If this is not
# the case, then this is an indicator that either:
# - machines that are running this test are using buffer
# sizes greater than half of BIGFILE_SIZE; or
# - streaming is broken.
#
# At the time of writing, we seem to have encountered
# buffer sizes bigger than 512K, so we've increased
# BIGFILE_SIZE to 4MB and in 2016 to 20MB and then 32MB.
# This test is going to keep failing according to the
# improvements in hardware and OS buffers.
if tell_position >= BIGFILE_SIZE:
if read_so_far < (BIGFILE_SIZE / 2):
self.fail(
'The file should have advanced to position %r, but '
'has already advanced to the end of the file. It '
'may not be streamed as intended, or at the wrong '
'chunk size (64k)' % read_so_far)
elif tell_position < read_so_far:
self.fail(
'The file should have advanced to position %r, but has '
'only advanced to position %r. It may not be streamed '
'as intended, or at the wrong chunk size (64k)' %
(read_so_far, tell_position))
if body != b'x' * BIGFILE_SIZE:
self.fail("Body != 'x' * %d. Got %r instead (%d bytes)." %
(BIGFILE_SIZE, body[:50], len(body)))
conn.close()
def test_file_stream_deadlock(self):
if cherrypy.server.protocol_version != 'HTTP/1.1':
return self.skip()
self.PROTOCOL = 'HTTP/1.1'
# Make an initial request but abort early.
self.persistent = True
conn = self.HTTP_CONN
conn.putrequest('GET', '/bigfile', skip_host=True)
conn.putheader('Host', self.HOST)
conn.endheaders()
response = conn.response_class(conn.sock, method='GET')
response.begin()
self.assertEqual(response.status, 200)
body = response.fp.read(65536)
if body != b'x' * len(body):
self.fail("Body != 'x' * %d. Got %r instead (%d bytes)." %
(65536, body[:50], len(body)))
response.close()
conn.close()
# Make a second request, which should fetch the whole file.
self.persistent = False
self.getPage('/bigfile')
if self.body != b'x' * BIGFILE_SIZE:
self.fail("Body != 'x' * %d. Got %r instead (%d bytes)." %
(BIGFILE_SIZE, self.body[:50], len(body)))
def test_error_page_with_serve_file(self):
self.getPage('/404test/yunyeen')
self.assertStatus(404)
self.assertInBody("I couldn't find that thing")
@unittest.mock.patch(
'http.client._contains_disallowed_url_pchar_re',
re.compile(r'[\n]'),
create=True,
)
def test_null_bytes(self):
self.getPage('/static/\x00')
self.assertStatus('404 Not Found')
@classmethod
def unicode_file(cls):
filename = ntou('Слава Україні.html', 'utf-8')
filepath = curdir / 'static' / filename
with filepath.open('w', encoding='utf-8')as strm:
strm.write(ntou('Героям Слава!', 'utf-8'))
cls.files_to_remove.append(filepath)
def test_unicode(self):
ensure_unicode_filesystem()
self.unicode_file()
url = ntou('/static/Слава Україні.html', 'utf-8')
# quote function requires str
url = tonative(url, 'utf-8')
url = urllib.parse.quote(url)
self.getPage(url)
expected = ntou('Героям Слава!', 'utf-8')
self.assertInBody(expected)
def error_page_404(status, message, traceback, version):
path = os.path.join(curdir, 'static', '404.html')
return static.serve_file(path, content_type='text/html')
|
import numpy as np
import unittest
from chainer import testing
from chainercv.links.model.ssd import resize_with_random_interpolation
try:
import cv2 # NOQA
_cv2_available = True
except ImportError:
_cv2_available = False
@unittest.skipUnless(_cv2_available, 'cv2 is not installed')
class TestResizeWithRandomInterpolation(unittest.TestCase):
def test_resize_color(self):
img = np.random.uniform(size=(3, 24, 32))
out = resize_with_random_interpolation(img, size=(32, 64))
self.assertEqual(out.shape, (3, 32, 64))
def test_resize_grayscale(self):
img = np.random.uniform(size=(1, 24, 32))
out = resize_with_random_interpolation(img, size=(32, 64))
self.assertEqual(out.shape, (1, 32, 64))
testing.run_module(__name__, __file__)
|
from __future__ import division
import chainer
import chainer.functions as F
from chainercv.links import Conv2DBNActiv
from chainercv.links import SeparableConv2DBNActiv
class SeparableASPP(chainer.Chain):
"""Atrous Spatial Pyramid Pooling with Separable Convolution.
average pooling with FC layer
1x1 Convolution
in --> Separable Convolution(k=12) --> concat --> 1x1 Convolution
Separable Convolution(k=24)
Separable Convolution(k=36)
Args:
in_channels (int): Number of channels of input arrays.
out_channels (int): Number of channels of output arrays.
dilate_list (tuple of ints): Tuple of Dilation factors.
the length of this tuple must be 3.
bn_kwargs (dict): Keywod arguments passed to initialize the batch
normalization layers of :class:`chainercv.links.Conv2DBNActiv` and
:class:`chainercv.links.SeparableConv2DBNActiv`.
"""
def __init__(self, in_channels, out_channels,
dilate_list=(12, 24, 36), bn_kwargs={}):
super(SeparableASPP, self).__init__()
with self.init_scope():
self.image_pooling_conv = Conv2DBNActiv(
in_channels, out_channels, 1, bn_kwargs=bn_kwargs)
self.conv1x1 = Conv2DBNActiv(
in_channels, out_channels, 1, bn_kwargs=bn_kwargs)
self.atrous1 = SeparableConv2DBNActiv(
in_channels, out_channels, 3, 1,
dilate_list[0], dilate_list[0], nobias=True,
dw_activ=F.relu, pw_activ=F.relu, bn_kwargs=bn_kwargs)
self.atrous2 = SeparableConv2DBNActiv(
in_channels, out_channels, 3, 1,
dilate_list[1], dilate_list[1], nobias=True,
dw_activ=F.relu, pw_activ=F.relu, bn_kwargs=bn_kwargs)
self.atrous3 = SeparableConv2DBNActiv(
in_channels, out_channels, 3, 1,
dilate_list[2], dilate_list[2], nobias=True,
dw_activ=F.relu, pw_activ=F.relu, bn_kwargs=bn_kwargs)
self.proj = Conv2DBNActiv(
out_channels * 5, out_channels, 1, bn_kwargs=bn_kwargs)
def image_pooling(self, x):
_, _, H, W = x.shape
x = F.average(x, axis=(2, 3), keepdims=True)
x = self.image_pooling_conv(x)
B, C, _, _ = x.shape
x = F.broadcast_to(x, (B, C, H, W))
return x
def forward(self, x):
h = []
h.append(self.image_pooling(x))
h.append(self.conv1x1(x))
h.append(self.atrous1(x))
h.append(self.atrous2(x))
h.append(self.atrous3(x))
h = F.concat(h, axis=1)
h = self.proj(h)
h = F.dropout(h)
return h
|
from __future__ import absolute_import, unicode_literals
import os
import pytest
from tmuxp import exc
from tmuxp.exc import BeforeLoadScriptError, BeforeLoadScriptNotExists
from tmuxp.util import run_before_script
from . import fixtures_dir
def test_raise_BeforeLoadScriptNotExists_if_not_exists():
script_file = os.path.join(fixtures_dir, 'script_noexists.sh')
with pytest.raises(BeforeLoadScriptNotExists):
run_before_script(script_file)
with pytest.raises(OSError):
run_before_script(script_file)
def test_raise_BeforeLoadScriptError_if_retcode():
script_file = os.path.join(fixtures_dir, 'script_failed.sh')
with pytest.raises(BeforeLoadScriptError):
run_before_script(script_file)
def test_return_stdout_if_ok(capsys):
script_file = os.path.join(fixtures_dir, 'script_complete.sh')
run_before_script(script_file)
out, err = capsys.readouterr()
assert 'hello' in out
def test_beforeload_returncode():
script_file = os.path.join(fixtures_dir, 'script_failed.sh')
with pytest.raises(exc.BeforeLoadScriptError) as excinfo:
run_before_script(script_file)
assert excinfo.match(r'113')
def test_beforeload_returns_stderr_messages():
script_file = os.path.join(fixtures_dir, 'script_failed.sh')
with pytest.raises(exc.BeforeLoadScriptError) as excinfo:
run_before_script(script_file)
assert excinfo.match(r'failed with returncode')
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
slim = tf.contrib.slim
def vgg_arg_scope(weight_decay=0.0005):
"""Defines the VGG arg scope.
Args:
weight_decay: The l2 regularization coefficient.
Returns:
An arg_scope.
"""
with slim.arg_scope([slim.conv2d, slim.fully_connected],
activation_fn=tf.nn.relu,
weights_regularizer=slim.l2_regularizer(weight_decay),
biases_initializer=tf.zeros_initializer()):
with slim.arg_scope([slim.conv2d], padding='SAME') as arg_sc:
return arg_sc
def vgg_a(inputs,
num_classes=1000,
is_training=True,
dropout_keep_prob=0.5,
spatial_squeeze=True,
scope='vgg_a',
fc_conv_padding='VALID'):
"""Oxford Net VGG 11-Layers version A Example.
Note: All the fully_connected layers have been transformed to conv2d layers.
To use in classification mode, resize input to 224x224.
Args:
inputs: a tensor of size [batch_size, height, width, channels].
num_classes: number of predicted classes.
is_training: whether or not the model is being trained.
dropout_keep_prob: the probability that activations are kept in the dropout
layers during training.
spatial_squeeze: whether or not should squeeze the spatial dimensions of the
outputs. Useful to remove unnecessary dimensions for classification.
scope: Optional scope for the variables.
fc_conv_padding: the type of padding to use for the fully connected layer
that is implemented as a convolutional layer. Use 'SAME' padding if you
are applying the network in a fully convolutional manner and want to
get a prediction map downsampled by a factor of 32 as an output. Otherwise,
the output prediction map will be (input / 32) - 6 in case of 'VALID' padding.
Returns:
the last op containing the log predictions and end_points dict.
"""
with tf.variable_scope(scope, 'vgg_a', [inputs]) as sc:
end_points_collection = sc.name + '_end_points'
# Collect outputs for conv2d, fully_connected and max_pool2d.
with slim.arg_scope([slim.conv2d, slim.max_pool2d],
outputs_collections=end_points_collection):
net = slim.repeat(inputs, 1, slim.conv2d, 64, [3, 3], scope='conv1')
net = slim.max_pool2d(net, [2, 2], scope='pool1')
net = slim.repeat(net, 1, slim.conv2d, 128, [3, 3], scope='conv2')
net = slim.max_pool2d(net, [2, 2], scope='pool2')
net = slim.repeat(net, 2, slim.conv2d, 256, [3, 3], scope='conv3')
net = slim.max_pool2d(net, [2, 2], scope='pool3')
net = slim.repeat(net, 2, slim.conv2d, 512, [3, 3], scope='conv4')
net = slim.max_pool2d(net, [2, 2], scope='pool4')
net = slim.repeat(net, 2, slim.conv2d, 512, [3, 3], scope='conv5')
net = slim.max_pool2d(net, [2, 2], scope='pool5')
# Use conv2d instead of fully_connected layers.
net = slim.conv2d(net, 4096, [7, 7], padding=fc_conv_padding, scope='fc6')
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='dropout6')
net = slim.conv2d(net, 4096, [1, 1], scope='fc7')
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='dropout7')
net = slim.conv2d(net, num_classes, [1, 1],
activation_fn=None,
normalizer_fn=None,
scope='fc8')
# Convert end_points_collection into a end_point dict.
end_points = slim.utils.convert_collection_to_dict(end_points_collection)
if spatial_squeeze:
net = tf.squeeze(net, [1, 2], name='fc8/squeezed')
end_points[sc.name + '/fc8'] = net
return net, end_points
vgg_a.default_image_size = 224
def vgg_16(inputs,
num_classes=1000,
is_training=True,
dropout_keep_prob=0.5,
spatial_squeeze=True,
scope='vgg_16',
fc_conv_padding='VALID'):
"""Oxford Net VGG 16-Layers version D Example.
Note: All the fully_connected layers have been transformed to conv2d layers.
To use in classification mode, resize input to 224x224.
Args:
inputs: a tensor of size [batch_size, height, width, channels].
num_classes: number of predicted classes.
is_training: whether or not the model is being trained.
dropout_keep_prob: the probability that activations are kept in the dropout
layers during training.
spatial_squeeze: whether or not should squeeze the spatial dimensions of the
outputs. Useful to remove unnecessary dimensions for classification.
scope: Optional scope for the variables.
fc_conv_padding: the type of padding to use for the fully connected layer
that is implemented as a convolutional layer. Use 'SAME' padding if you
are applying the network in a fully convolutional manner and want to
get a prediction map downsampled by a factor of 32 as an output. Otherwise,
the output prediction map will be (input / 32) - 6 in case of 'VALID' padding.
Returns:
the last op containing the log predictions and end_points dict.
"""
with tf.variable_scope(scope, 'vgg_16', [inputs]) as sc:
end_points_collection = sc.name + '_end_points'
# Collect outputs for conv2d, fully_connected and max_pool2d.
with slim.arg_scope([slim.conv2d, slim.fully_connected, slim.max_pool2d],
outputs_collections=end_points_collection):
net = slim.repeat(inputs, 2, slim.conv2d, 64, [3, 3], scope='conv1')
net = slim.max_pool2d(net, [2, 2], scope='pool1')
net = slim.repeat(net, 2, slim.conv2d, 128, [3, 3], scope='conv2')
net = slim.max_pool2d(net, [2, 2], scope='pool2')
net = slim.repeat(net, 3, slim.conv2d, 256, [3, 3], scope='conv3')
net = slim.max_pool2d(net, [2, 2], scope='pool3')
net = slim.repeat(net, 3, slim.conv2d, 512, [3, 3], scope='conv4')
net = slim.max_pool2d(net, [2, 2], scope='pool4')
net = slim.repeat(net, 3, slim.conv2d, 512, [3, 3], scope='conv5')
net = slim.max_pool2d(net, [2, 2], scope='pool5')
# Use conv2d instead of fully_connected layers.
net = slim.conv2d(net, 4096, [7, 7], padding=fc_conv_padding, scope='fc6')
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='dropout6')
net = slim.conv2d(net, 4096, [1, 1], scope='fc7')
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='dropout7')
net = slim.conv2d(net, num_classes, [1, 1],
activation_fn=None,
normalizer_fn=None,
scope='fc8')
# Convert end_points_collection into a end_point dict.
end_points = slim.utils.convert_collection_to_dict(end_points_collection)
if spatial_squeeze:
net = tf.squeeze(net, [1, 2], name='fc8/squeezed')
end_points[sc.name + '/fc8'] = net
return net, end_points
vgg_16.default_image_size = 224
def vgg_19(inputs,
num_classes=1000,
is_training=True,
dropout_keep_prob=0.5,
spatial_squeeze=True,
scope='vgg_19',
fc_conv_padding='VALID'):
"""Oxford Net VGG 19-Layers version E Example.
Note: All the fully_connected layers have been transformed to conv2d layers.
To use in classification mode, resize input to 224x224.
Args:
inputs: a tensor of size [batch_size, height, width, channels].
num_classes: number of predicted classes.
is_training: whether or not the model is being trained.
dropout_keep_prob: the probability that activations are kept in the dropout
layers during training.
spatial_squeeze: whether or not should squeeze the spatial dimensions of the
outputs. Useful to remove unnecessary dimensions for classification.
scope: Optional scope for the variables.
fc_conv_padding: the type of padding to use for the fully connected layer
that is implemented as a convolutional layer. Use 'SAME' padding if you
are applying the network in a fully convolutional manner and want to
get a prediction map downsampled by a factor of 32 as an output. Otherwise,
the output prediction map will be (input / 32) - 6 in case of 'VALID' padding.
Returns:
the last op containing the log predictions and end_points dict.
"""
with tf.variable_scope(scope, 'vgg_19', [inputs]) as sc:
end_points_collection = sc.name + '_end_points'
# Collect outputs for conv2d, fully_connected and max_pool2d.
with slim.arg_scope([slim.conv2d, slim.fully_connected, slim.max_pool2d],
outputs_collections=end_points_collection):
net = slim.repeat(inputs, 2, slim.conv2d, 64, [3, 3], scope='conv1')
net = slim.max_pool2d(net, [2, 2], scope='pool1')
net = slim.repeat(net, 2, slim.conv2d, 128, [3, 3], scope='conv2')
net = slim.max_pool2d(net, [2, 2], scope='pool2')
net = slim.repeat(net, 4, slim.conv2d, 256, [3, 3], scope='conv3')
net = slim.max_pool2d(net, [2, 2], scope='pool3')
net = slim.repeat(net, 4, slim.conv2d, 512, [3, 3], scope='conv4')
net = slim.max_pool2d(net, [2, 2], scope='pool4')
net = slim.repeat(net, 4, slim.conv2d, 512, [3, 3], scope='conv5')
net = slim.max_pool2d(net, [2, 2], scope='pool5')
# Use conv2d instead of fully_connected layers.
net = slim.conv2d(net, 4096, [7, 7], padding=fc_conv_padding, scope='fc6')
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='dropout6')
net = slim.conv2d(net, 4096, [1, 1], scope='fc7')
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='dropout7')
net = slim.conv2d(net, num_classes, [1, 1],
activation_fn=None,
normalizer_fn=None,
scope='fc8')
# Convert end_points_collection into a end_point dict.
end_points = slim.utils.convert_collection_to_dict(end_points_collection)
if spatial_squeeze:
net = tf.squeeze(net, [1, 2], name='fc8/squeezed')
end_points[sc.name + '/fc8'] = net
return net, end_points
vgg_19.default_image_size = 224
# Alias
vgg_d = vgg_16
vgg_e = vgg_19
|
from logilab.common.decorators import monkeypatch
from sphinx.ext import autodoc
class DocstringOnlyModuleDocumenter(autodoc.ModuleDocumenter):
objtype = 'docstring'
def format_signature(self):
pass
def add_directive_header(self, sig):
pass
def document_members(self, all_members=False):
pass
def resolve_name(self, modname, parents, path, base):
if modname is not None:
return modname, parents + [base]
return (path or '') + base, []
#autodoc.add_documenter(DocstringOnlyModuleDocumenter)
def setup(app):
app.add_autodocumenter(DocstringOnlyModuleDocumenter)
from sphinx.ext.autodoc import (ViewList, Options, AutodocReporter, nodes,
assemble_option_dict, nested_parse_with_titles)
@monkeypatch(autodoc.AutoDirective)
def run(self):
self.filename_set = set() # a set of dependent filenames
self.reporter = self.state.document.reporter
self.env = self.state.document.settings.env
self.warnings = []
self.result = ViewList()
# find out what documenter to call
objtype = self.name[4:]
doc_class = self._registry[objtype]
# process the options with the selected documenter's option_spec
self.genopt = Options(assemble_option_dict(
self.options.items(), doc_class.option_spec))
# generate the output
documenter = doc_class(self, self.arguments[0])
documenter.generate(more_content=self.content)
if not self.result:
return self.warnings
# record all filenames as dependencies -- this will at least
# partially make automatic invalidation possible
for fn in self.filename_set:
self.env.note_dependency(fn)
# use a custom reporter that correctly assigns lines to source
# filename/description and lineno
old_reporter = self.state.memo.reporter
self.state.memo.reporter = AutodocReporter(self.result,
self.state.memo.reporter)
if self.name in ('automodule', 'autodocstring'):
node = nodes.section()
# necessary so that the child nodes get the right source/line set
node.document = self.state.document
nested_parse_with_titles(self.state, self.result, node)
else:
node = nodes.paragraph()
node.document = self.state.document
self.state.nested_parse(self.result, 0, node)
self.state.memo.reporter = old_reporter
return self.warnings + node.children
|
import voluptuous as vol
from homeassistant.components.homeassistant.triggers import event as event_trigger
from homeassistant.const import CONF_PLATFORM
from homeassistant.helpers import config_validation as cv
from .const import DEVICE_ID, DOMAIN, EVENT_TAG_SCANNED, TAG_ID
TRIGGER_SCHEMA = vol.Schema(
{
vol.Required(CONF_PLATFORM): DOMAIN,
vol.Required(TAG_ID): cv.string,
vol.Optional(DEVICE_ID): cv.string,
}
)
async def async_attach_trigger(hass, config, action, automation_info):
"""Listen for tag_scanned events based on configuration."""
tag_id = config.get(TAG_ID)
device_id = config.get(DEVICE_ID)
event_data = {TAG_ID: tag_id}
if device_id:
event_data[DEVICE_ID] = device_id
event_config = {
event_trigger.CONF_PLATFORM: "event",
event_trigger.CONF_EVENT_TYPE: EVENT_TAG_SCANNED,
event_trigger.CONF_EVENT_DATA: event_data,
}
event_config = event_trigger.TRIGGER_SCHEMA(event_config)
return await event_trigger.async_attach_trigger(
hass, event_config, action, automation_info, platform_type=DOMAIN
)
|
import keras
def make_keras_optimizer_picklable():
"""
Fix https://github.com/NTMC-Community/MatchZoo/issues/726.
This function changes how keras behaves, use with caution.
"""
def __getstate__(self):
return keras.optimizers.serialize(self)
def __setstate__(self, state):
optimizer = keras.optimizers.deserialize(state)
self.__dict__ = optimizer.__dict__
cls = keras.optimizers.Optimizer
cls.__getstate__ = __getstate__
cls.__setstate__ = __setstate__
|
from unittest import mock
import pkg_resources
import pytest
from xarray.backends import common, plugins
def dummy_open_dataset_args(filename_or_obj, *args):
pass
def dummy_open_dataset_kwargs(filename_or_obj, **kwargs):
pass
def dummy_open_dataset(filename_or_obj, *, decoder):
pass
dummy_cfgrib = common.BackendEntrypoint(dummy_open_dataset)
@pytest.fixture
def dummy_duplicated_entrypoints():
specs = [
"engine1 = xarray.tests.test_plugins:backend_1",
"engine1 = xarray.tests.test_plugins:backend_2",
"engine2 = xarray.tests.test_plugins:backend_1",
"engine2 = xarray.tests.test_plugins:backend_2",
]
eps = [pkg_resources.EntryPoint.parse(spec) for spec in specs]
return eps
@pytest.mark.filterwarnings("ignore:Found")
def test_remove_duplicates(dummy_duplicated_entrypoints):
with pytest.warns(RuntimeWarning):
entrypoints = plugins.remove_duplicates(dummy_duplicated_entrypoints)
assert len(entrypoints) == 2
def test_remove_duplicates_warnings(dummy_duplicated_entrypoints):
with pytest.warns(RuntimeWarning) as record:
_ = plugins.remove_duplicates(dummy_duplicated_entrypoints)
assert len(record) == 2
message0 = str(record[0].message)
message1 = str(record[1].message)
assert "entrypoints" in message0
assert "entrypoints" in message1
@mock.patch("pkg_resources.EntryPoint.load", mock.MagicMock(return_value=None))
def test_create_engines_dict():
specs = [
"engine1 = xarray.tests.test_plugins:backend_1",
"engine2 = xarray.tests.test_plugins:backend_2",
]
entrypoints = [pkg_resources.EntryPoint.parse(spec) for spec in specs]
engines = plugins.create_engines_dict(entrypoints)
assert len(engines) == 2
assert engines.keys() == set(("engine1", "engine2"))
def test_set_missing_parameters():
backend_1 = common.BackendEntrypoint(dummy_open_dataset)
backend_2 = common.BackendEntrypoint(dummy_open_dataset, ("filename_or_obj",))
engines = {"engine_1": backend_1, "engine_2": backend_2}
plugins.set_missing_parameters(engines)
assert len(engines) == 2
engine_1 = engines["engine_1"]
assert engine_1.open_dataset_parameters == ("filename_or_obj", "decoder")
engine_2 = engines["engine_2"]
assert engine_2.open_dataset_parameters == ("filename_or_obj",)
def test_set_missing_parameters_raise_error():
backend = common.BackendEntrypoint(dummy_open_dataset_args)
with pytest.raises(TypeError):
plugins.set_missing_parameters({"engine": backend})
backend = common.BackendEntrypoint(
dummy_open_dataset_args, ("filename_or_obj", "decoder")
)
plugins.set_missing_parameters({"engine": backend})
backend = common.BackendEntrypoint(dummy_open_dataset_kwargs)
with pytest.raises(TypeError):
plugins.set_missing_parameters({"engine": backend})
backend = plugins.BackendEntrypoint(
dummy_open_dataset_kwargs, ("filename_or_obj", "decoder")
)
plugins.set_missing_parameters({"engine": backend})
@mock.patch("pkg_resources.EntryPoint.load", mock.MagicMock(return_value=dummy_cfgrib))
def test_build_engines():
dummy_cfgrib_pkg_entrypoint = pkg_resources.EntryPoint.parse(
"cfgrib = xarray.tests.test_plugins:backend_1"
)
backend_entrypoints = plugins.build_engines([dummy_cfgrib_pkg_entrypoint])
assert backend_entrypoints["cfgrib"] is dummy_cfgrib
assert backend_entrypoints["cfgrib"].open_dataset_parameters == (
"filename_or_obj",
"decoder",
)
|
import json
import diamond.collector
try:
import docker
except ImportError:
docker = None
class DockerCollector(diamond.collector.Collector):
METRICS = {
# memory stats
"memory_stats.stats.total_rss": "RSS_byte",
"memory_stats.stats.total_cache": "cache_byte",
"memory_stats.stats.total_swap": "swap_byte",
"memory_stats.stats.total_pgpgin": "pagein_count",
"memory_stats.stats.total_pgpgout": "pageout_count",
# cpu stats
"cpu_stats.cpu_usage.total_usage": "cpu.total",
"cpu_stats.cpu_usage.usage_in_kernelmode": "cpu.kernelmode",
"cpu_stats.cpu_usage.usage_in_usermode": "cpu.usermode",
"cpu_stats.system_cpu_usage": "cpu.system",
}
def get_default_config_help(self):
return super(DockerCollector, self).get_default_config_help()
def get_default_config(self):
config = super(DockerCollector, self).get_default_config()
config.update({
'path': 'docker'
})
return config
def get_value(self, path, dictionary):
keys = path.split(".")
cur = dictionary
for key in keys:
if not isinstance(cur, dict):
raise Exception("metric '{}' does not exist".format(path))
cur = cur.get(key)
if cur is None:
break
return cur
def collect(self):
if docker is None:
self.log.error('Unable to import docker')
# Collect info
results = {}
client = docker.Client(version='auto')
# Top level stats
running_containers = client.containers()
results['containers_running_count'] = (
len(running_containers), 'GAUGE')
all_containers = client.containers(all=True)
results['containers_stopped_count'] = (
len(all_containers) - len(running_containers), 'GAUGE')
images_count = len(set(client.images(quiet=True)))
results['images_count'] = (images_count, 'GAUGE')
dangling_images_count = len(set(client.images(
quiet=True, all=True, filters={'dangling': True})))
results['images_dangling_count'] = (dangling_images_count, 'GAUGE')
# Collect memory and cpu stats
for container in running_containers:
name = "containers." + "".join(container['Names'][0][1:])
s = client.stats(container["Id"])
stat = json.loads(s.next())
for path in self.METRICS:
val = self.get_value(path, stat)
if val is not None:
metric_key = ".".join([name, self.METRICS.get(path)])
results[metric_key] = (val, 'GAUGE')
s.close()
for name in sorted(results.keys()):
(value, metric_type) = results[name]
self.publish(name, value, metric_type=metric_type)
|
import pytest
from PyQt5.QtWidgets import QWidget
from qutebrowser.browser import inspector
from qutebrowser.misc import miscwidgets
class FakeInspector(inspector.AbstractWebInspector):
def __init__(self,
inspector_widget: QWidget,
splitter: miscwidgets.InspectorSplitter,
win_id: int,
parent: QWidget = None) -> None:
super().__init__(splitter, win_id, parent)
self._set_widget(inspector_widget)
self._inspected_page = None
self.needs_recreate = False
def inspect(self, page):
self._inspected_page = page
def _needs_recreate(self):
return self.needs_recreate
@pytest.fixture
def webview_widget(blue_widget):
return blue_widget
@pytest.fixture
def inspector_widget(red_widget):
return red_widget
@pytest.fixture
def splitter(qtbot, webview_widget):
splitter = miscwidgets.InspectorSplitter(
win_id=0, main_webview=webview_widget)
qtbot.add_widget(splitter)
return splitter
@pytest.fixture
def fake_inspector(qtbot, splitter, inspector_widget,
state_config, mode_manager):
insp = FakeInspector(inspector_widget=inspector_widget,
splitter=splitter,
win_id=0)
qtbot.add_widget(insp)
return insp
@pytest.mark.parametrize('position, splitter_count, window_visible', [
(inspector.Position.window, 1, True),
(inspector.Position.left, 2, False),
(inspector.Position.top, 2, False),
])
def test_set_position(position, splitter_count, window_visible,
fake_inspector, splitter):
fake_inspector.set_position(position)
assert splitter.count() == splitter_count
assert (fake_inspector.isWindow() and
fake_inspector.isVisible()) == window_visible
def test_toggle_window(fake_inspector):
fake_inspector.set_position(inspector.Position.window)
for visible in [True, False, True]:
assert (fake_inspector.isWindow() and
fake_inspector.isVisible()) == visible
fake_inspector.toggle()
def test_toggle_docked(fake_inspector, splitter, inspector_widget):
fake_inspector.set_position(inspector.Position.right)
splitter.show()
for visible in [True, False, True]:
assert inspector_widget.isVisible() == visible
fake_inspector.toggle()
def test_implicit_toggling(fake_inspector, splitter, inspector_widget):
fake_inspector.set_position(inspector.Position.right)
splitter.show()
assert inspector_widget.isVisible()
fake_inspector.set_position(None)
assert not inspector_widget.isVisible()
def test_position_saving(fake_inspector, state_config):
assert 'position' not in state_config['inspector']
fake_inspector.set_position(inspector.Position.left)
assert state_config['inspector']['position'] == 'left'
@pytest.mark.parametrize('config_value, expected', [
(None, inspector.Position.right),
('top', inspector.Position.top),
])
def test_position_loading(config_value, expected,
fake_inspector, state_config):
if config_value is None:
assert 'position' not in state_config['inspector']
else:
state_config['inspector']['position'] = config_value
fake_inspector.set_position(None)
assert fake_inspector._position == expected
@pytest.mark.parametrize('hidden_again', [True, False])
@pytest.mark.parametrize('needs_recreate', [True, False])
def test_detach_after_toggling(hidden_again, needs_recreate,
fake_inspector, inspector_widget, splitter,
qtbot):
"""Make sure we can still detach into a window after showing inline."""
fake_inspector.set_position(inspector.Position.right)
splitter.show()
assert inspector_widget.isVisible()
if hidden_again:
fake_inspector.toggle()
assert not inspector_widget.isVisible()
if needs_recreate:
fake_inspector.needs_recreate = True
with qtbot.waitSignal(fake_inspector.recreate):
fake_inspector.set_position(inspector.Position.window)
else:
with qtbot.assertNotEmitted(fake_inspector.recreate):
fake_inspector.set_position(inspector.Position.window)
assert fake_inspector.isVisible() and fake_inspector.isWindow()
|
import asyncio
import functools
import weakref
from collections import defaultdict
import mock
import pytest
from paasta_tools.async_utils import async_ttl_cache
@pytest.mark.asyncio
async def test_async_ttl_cache_hit():
return_values = iter(range(10))
@async_ttl_cache(ttl=None)
async def range_coroutine():
return next(return_values)
assert await range_coroutine() == await range_coroutine()
@pytest.mark.asyncio
async def test_async_ttl_cache_miss():
return_values = iter(range(10))
@async_ttl_cache(ttl=0)
async def range_coroutine():
return next(return_values)
assert await range_coroutine() != await range_coroutine()
@pytest.mark.asyncio
async def test_async_ttl_cache_doesnt_cache_failures():
flaky_error_raiser = mock.Mock(side_effect=[Exception, None])
@async_ttl_cache(ttl=None)
async def flaky_coroutine():
return flaky_error_raiser()
with pytest.raises(Exception):
await flaky_coroutine()
# if we were caching failures, this would fail
assert await flaky_coroutine() is None
class DataHolder:
def __init__(self, value):
self.value = value
@pytest.mark.asyncio
async def test_async_ttl_cache_returns_in_flight_future():
return_values = iter(range(10))
event = asyncio.Event()
condition = asyncio.Condition()
num_waiting_coroutines = DataHolder(value=0)
# Wait until we have enough coroutines waiting to return a result. This
# ensures that dependent coroutines have a chance to get a future out of
# the cache
@async_ttl_cache(ttl=0)
async def range_coroutine():
await event.wait()
return next(return_values)
# Wait until we have enough coroutines waiting on range_coroutine, then
# wake range_coroutine
async def event_setter():
async with condition:
while num_waiting_coroutines.value != 2:
await condition.wait()
event.set()
# Keep track of how many waiting range_coroutines we have to ensure both
# have had a chance to get the in-flight future out of the cache. This has
# to be separate from range_coroutine since we only end up with one
# invocation of that method due to caching. It also has to be separate
# from event_setter to ensure that the event is not set until both
# coroutines are waiting.
async def cache_waiter():
async with condition:
num_waiting_coroutines.value += 1
condition.notify_all()
return await range_coroutine()
event_setter_future = asyncio.ensure_future(event_setter())
future1 = asyncio.ensure_future(cache_waiter())
future2 = asyncio.ensure_future(cache_waiter())
await asyncio.wait([event_setter_future, future1, future2])
assert future1.result() == future2.result() == 0
@pytest.mark.asyncio
async def test_async_ttl_cache_dont_overwrite_new_cache_entry():
"""Make sure that we don't overwrite a new cache entry that was placed
while we were waiting to handle the result of a previously cached future
"""
range_continue_event = asyncio.Event()
update_cache_event = asyncio.Event()
return_values = iter(range(10))
# Wait until awaiter has had a chance to get the in-flight future out of
# the cache, then signal to the cache_updater to replace the cached future
# before returning. Because cache_updater is signalled first, it will
# replace the previously cached future before async_ttl_cache decides
# whether save the result of that future in the cache
async def range_coroutine():
await range_continue_event.wait()
update_cache_event.set()
return next(return_values)
range_coroutine_future = asyncio.ensure_future(range_coroutine())
cache_key = functools._make_key((), {}, typed=False)
cache = {cache_key: (range_coroutine_future, float("Inf"))}
cached_range_coroutine = async_ttl_cache(cache=cache, ttl=0)(range_coroutine)
new_range_coroutine_future = asyncio.ensure_future(range_coroutine())
async def awaiter():
range_continue_event.set()
await cached_range_coroutine()
async def cache_updater():
await update_cache_event.wait()
cache[cache_key] = (new_range_coroutine_future, float("Inf"))
await asyncio.gather(awaiter(), cache_updater())
assert cache[cache_key] == (new_range_coroutine_future, float("Inf"))
@pytest.mark.asyncio
async def test_async_ttl_cache_recover_if_cache_entry_removed():
"""Ensure we handle the case where we encounter an exception in the cached
future but another coroutine awaiting the same future ran first and alraedy
deleted the cache entry"""
range_continue_event = asyncio.Event()
num_awaiters_awaiting = DataHolder(value=0)
class TestException(Exception):
pass
async def range_coroutine():
await range_continue_event.wait()
raise TestException
range_coroutine_future = asyncio.ensure_future(range_coroutine())
cache_key = functools._make_key((), {}, typed=False)
cache = {cache_key: (range_coroutine_future, float("Inf"))}
cached_range_coroutine = async_ttl_cache(cache=cache, ttl=0)(range_coroutine)
async def awaiter():
num_awaiters_awaiting.value += 1
if num_awaiters_awaiting.value == 2:
range_continue_event.set()
try:
await cached_range_coroutine()
except TestException:
pass
# should not raise a KeyError!
await asyncio.gather(awaiter(), awaiter())
@pytest.mark.asyncio
async def test_async_ttl_cache_for_class_members_doesnt_leak_mem():
"""Ensure that we aren't leaking memory"""
x = 42
instance_caches = defaultdict(dict)
class TestClass:
@async_ttl_cache(ttl=None, cleanup_self=True, cache=instance_caches)
async def f(self):
return x
o1 = TestClass()
w1 = weakref.ref(o1)
assert w1() is not None
assert await o1.f() == x
assert len(instance_caches) == 1
assert list(instance_caches.keys())[0]() == o1
del o1
assert len(instance_caches) == 0
assert w1() is None
o2, o3, o4 = TestClass(), TestClass(), TestClass()
assert await o2.f() == x
assert await o3.f() == x
assert await o4.f() == x
assert len(instance_caches) == 3
del o2, o4
assert len(instance_caches) == 1
del o3
assert len(instance_caches) == 0
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.