text
stringlengths 213
32.3k
|
---|
import ipaddress
import logging
import re
from vilfo import Client as VilfoClient
from vilfo.exceptions import (
AuthenticationException as VilfoAuthenticationException,
VilfoException,
)
import voluptuous as vol
from homeassistant import config_entries, core, exceptions
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_HOST, CONF_ID, CONF_MAC
from .const import DOMAIN # pylint:disable=unused-import
from .const import ROUTER_DEFAULT_HOST
_LOGGER = logging.getLogger(__name__)
DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_HOST, default=ROUTER_DEFAULT_HOST): str,
vol.Required(CONF_ACCESS_TOKEN, default=""): str,
}
)
RESULT_SUCCESS = "success"
RESULT_CANNOT_CONNECT = "cannot_connect"
RESULT_INVALID_AUTH = "invalid_auth"
def host_valid(host):
"""Return True if hostname or IP address is valid."""
try:
if ipaddress.ip_address(host).version == (4 or 6):
return True
except ValueError:
disallowed = re.compile(r"[^a-zA-Z\d\-]")
return all(x and not disallowed.search(x) for x in host.split("."))
def _try_connect_and_fetch_basic_info(host, token):
"""Attempt to connect and call the ping endpoint and, if successful, fetch basic information."""
# Perform the ping. This doesn't validate authentication.
controller = VilfoClient(host=host, token=token)
result = {"type": None, "data": {}}
try:
controller.ping()
except VilfoException:
result["type"] = RESULT_CANNOT_CONNECT
result["data"] = CannotConnect
return result
# Perform a call that requires authentication.
try:
controller.get_board_information()
except VilfoAuthenticationException:
result["type"] = RESULT_INVALID_AUTH
result["data"] = InvalidAuth
return result
if controller.mac:
result["data"][CONF_ID] = controller.mac
result["data"][CONF_MAC] = controller.mac
else:
result["data"][CONF_ID] = host
result["data"][CONF_MAC] = None
result["type"] = RESULT_SUCCESS
return result
async def validate_input(hass: core.HomeAssistant, data):
"""Validate the user input allows us to connect.
Data has the keys from DATA_SCHEMA with values provided by the user.
"""
# Validate the host before doing anything else.
if not host_valid(data[CONF_HOST]):
raise InvalidHost
config = {}
result = await hass.async_add_executor_job(
_try_connect_and_fetch_basic_info, data[CONF_HOST], data[CONF_ACCESS_TOKEN]
)
if result["type"] != RESULT_SUCCESS:
raise result["data"]
# Return some info we want to store in the config entry.
result_data = result["data"]
config["title"] = f"{data[CONF_HOST]}"
config[CONF_MAC] = result_data[CONF_MAC]
config[CONF_HOST] = data[CONF_HOST]
config[CONF_ID] = result_data[CONF_ID]
return config
class DomainConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Vilfo Router."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
async def async_step_user(self, user_input=None):
"""Handle the initial step."""
errors = {}
if user_input is not None:
try:
info = await validate_input(self.hass, user_input)
except InvalidHost:
errors[CONF_HOST] = "wrong_host"
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except Exception as err: # pylint: disable=broad-except
_LOGGER.error("Unexpected exception: %s", err)
errors["base"] = "unknown"
else:
await self.async_set_unique_id(info[CONF_ID])
self._abort_if_unique_id_configured()
return self.async_create_entry(title=info["title"], data=user_input)
return self.async_show_form(
step_id="user", data_schema=DATA_SCHEMA, errors=errors
)
class CannotConnect(exceptions.HomeAssistantError):
"""Error to indicate we cannot connect."""
class InvalidAuth(exceptions.HomeAssistantError):
"""Error to indicate there is invalid auth."""
class InvalidHost(exceptions.HomeAssistantError):
"""Error to indicate that hostname/IP address is invalid."""
|
class DistanceMeasureBase(object):
@staticmethod
def distances(fixed_x, fixed_y, x_vec, y_vec):
'''
:param fixed_x: float
:param fixed_y: float
:param x_vec: np.array[float]
:param y_vec: np.array[float]
:return: np.array[float]
'''
raise NotImplementedError()
|
import argparse
import logging
import re
import subprocess
def pip_show(package_name, packages=[]):
if package_name in packages:
return # avoid checking the same package twice if multiple packages depends on it.
packages.append(package_name)
result = subprocess.run(['pip', 'show', package_name], stdout=subprocess.PIPE)
if result.returncode != 0:
logging.error("pip show %s failed", package_name)
show_stdout = result.stdout.decode("utf-8")
print(package_name + "==" + get_version(show_stdout))
for dependency in get_dependencies(show_stdout):
pip_show(dependency, packages=packages)
def get_version(show_stdout):
for line in show_stdout.split("\n"):
m = re.match(r"^Version:\s(?P<version>.+)$", line)
if m:
return m.group('version')
return "not found"
def get_dependencies(show_stdout):
for line in show_stdout.split("\n"):
m = re.match(r"^Requires:\s(?P<requires>.+)$", line)
if m:
return m.group('requires').split(', ')
return []
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('package', type=str, help='package name')
args = parser.parse_args()
pip_show(args.package)
|
from collections import deque
from datetime import datetime, timedelta
import logging
import voluptuous as vol
from homeassistant.components.recorder.models import States
from homeassistant.components.recorder.util import execute, session_scope
from homeassistant.const import (
ATTR_TEMPERATURE,
ATTR_UNIT_OF_MEASUREMENT,
CONDUCTIVITY,
CONF_SENSORS,
LIGHT_LUX,
PERCENTAGE,
STATE_OK,
STATE_PROBLEM,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
TEMP_CELSIUS,
)
from homeassistant.core import callback
from homeassistant.exceptions import HomeAssistantError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.event import async_track_state_change_event
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "plant"
READING_BATTERY = "battery"
READING_TEMPERATURE = ATTR_TEMPERATURE
READING_MOISTURE = "moisture"
READING_CONDUCTIVITY = "conductivity"
READING_BRIGHTNESS = "brightness"
ATTR_PROBLEM = "problem"
ATTR_SENSORS = "sensors"
PROBLEM_NONE = "none"
ATTR_MAX_BRIGHTNESS_HISTORY = "max_brightness"
# we're not returning only one value, we're returning a dict here. So we need
# to have a separate literal for it to avoid confusion.
ATTR_DICT_OF_UNITS_OF_MEASUREMENT = "unit_of_measurement_dict"
CONF_MIN_BATTERY_LEVEL = f"min_{READING_BATTERY}"
CONF_MIN_TEMPERATURE = f"min_{READING_TEMPERATURE}"
CONF_MAX_TEMPERATURE = f"max_{READING_TEMPERATURE}"
CONF_MIN_MOISTURE = f"min_{READING_MOISTURE}"
CONF_MAX_MOISTURE = f"max_{READING_MOISTURE}"
CONF_MIN_CONDUCTIVITY = f"min_{READING_CONDUCTIVITY}"
CONF_MAX_CONDUCTIVITY = f"max_{READING_CONDUCTIVITY}"
CONF_MIN_BRIGHTNESS = f"min_{READING_BRIGHTNESS}"
CONF_MAX_BRIGHTNESS = f"max_{READING_BRIGHTNESS}"
CONF_CHECK_DAYS = "check_days"
CONF_SENSOR_BATTERY_LEVEL = READING_BATTERY
CONF_SENSOR_MOISTURE = READING_MOISTURE
CONF_SENSOR_CONDUCTIVITY = READING_CONDUCTIVITY
CONF_SENSOR_TEMPERATURE = READING_TEMPERATURE
CONF_SENSOR_BRIGHTNESS = READING_BRIGHTNESS
DEFAULT_MIN_BATTERY_LEVEL = 20
DEFAULT_MIN_MOISTURE = 20
DEFAULT_MAX_MOISTURE = 60
DEFAULT_MIN_CONDUCTIVITY = 500
DEFAULT_MAX_CONDUCTIVITY = 3000
DEFAULT_CHECK_DAYS = 3
SCHEMA_SENSORS = vol.Schema(
{
vol.Optional(CONF_SENSOR_BATTERY_LEVEL): cv.entity_id,
vol.Optional(CONF_SENSOR_MOISTURE): cv.entity_id,
vol.Optional(CONF_SENSOR_CONDUCTIVITY): cv.entity_id,
vol.Optional(CONF_SENSOR_TEMPERATURE): cv.entity_id,
vol.Optional(CONF_SENSOR_BRIGHTNESS): cv.entity_id,
}
)
PLANT_SCHEMA = vol.Schema(
{
vol.Required(CONF_SENSORS): vol.Schema(SCHEMA_SENSORS),
vol.Optional(
CONF_MIN_BATTERY_LEVEL, default=DEFAULT_MIN_BATTERY_LEVEL
): cv.positive_int,
vol.Optional(CONF_MIN_TEMPERATURE): vol.Coerce(float),
vol.Optional(CONF_MAX_TEMPERATURE): vol.Coerce(float),
vol.Optional(CONF_MIN_MOISTURE, default=DEFAULT_MIN_MOISTURE): cv.positive_int,
vol.Optional(CONF_MAX_MOISTURE, default=DEFAULT_MAX_MOISTURE): cv.positive_int,
vol.Optional(
CONF_MIN_CONDUCTIVITY, default=DEFAULT_MIN_CONDUCTIVITY
): cv.positive_int,
vol.Optional(
CONF_MAX_CONDUCTIVITY, default=DEFAULT_MAX_CONDUCTIVITY
): cv.positive_int,
vol.Optional(CONF_MIN_BRIGHTNESS): cv.positive_int,
vol.Optional(CONF_MAX_BRIGHTNESS): cv.positive_int,
vol.Optional(CONF_CHECK_DAYS, default=DEFAULT_CHECK_DAYS): cv.positive_int,
}
)
DOMAIN = "plant"
CONFIG_SCHEMA = vol.Schema({DOMAIN: {cv.string: PLANT_SCHEMA}}, extra=vol.ALLOW_EXTRA)
# Flag for enabling/disabling the loading of the history from the database.
# This feature is turned off right now as its tests are not 100% stable.
ENABLE_LOAD_HISTORY = False
async def async_setup(hass, config):
"""Set up the Plant component."""
component = EntityComponent(_LOGGER, DOMAIN, hass)
entities = []
for plant_name, plant_config in config[DOMAIN].items():
_LOGGER.info("Added plant %s", plant_name)
entity = Plant(plant_name, plant_config)
entities.append(entity)
await component.async_add_entities(entities)
return True
class Plant(Entity):
"""Plant monitors the well-being of a plant.
It also checks the measurements against
configurable min and max values.
"""
READINGS = {
READING_BATTERY: {
ATTR_UNIT_OF_MEASUREMENT: PERCENTAGE,
"min": CONF_MIN_BATTERY_LEVEL,
},
READING_TEMPERATURE: {
ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS,
"min": CONF_MIN_TEMPERATURE,
"max": CONF_MAX_TEMPERATURE,
},
READING_MOISTURE: {
ATTR_UNIT_OF_MEASUREMENT: PERCENTAGE,
"min": CONF_MIN_MOISTURE,
"max": CONF_MAX_MOISTURE,
},
READING_CONDUCTIVITY: {
ATTR_UNIT_OF_MEASUREMENT: CONDUCTIVITY,
"min": CONF_MIN_CONDUCTIVITY,
"max": CONF_MAX_CONDUCTIVITY,
},
READING_BRIGHTNESS: {
ATTR_UNIT_OF_MEASUREMENT: LIGHT_LUX,
"min": CONF_MIN_BRIGHTNESS,
"max": CONF_MAX_BRIGHTNESS,
},
}
def __init__(self, name, config):
"""Initialize the Plant component."""
self._config = config
self._sensormap = {}
self._readingmap = {}
self._unit_of_measurement = {}
for reading, entity_id in config["sensors"].items():
self._sensormap[entity_id] = reading
self._readingmap[reading] = entity_id
self._state = None
self._name = name
self._battery = None
self._moisture = None
self._conductivity = None
self._temperature = None
self._brightness = None
self._problems = PROBLEM_NONE
self._conf_check_days = 3 # default check interval: 3 days
if CONF_CHECK_DAYS in self._config:
self._conf_check_days = self._config[CONF_CHECK_DAYS]
self._brightness_history = DailyHistory(self._conf_check_days)
@callback
def _state_changed_event(self, event):
"""Sensor state change event."""
self.state_changed(event.data.get("entity_id"), event.data.get("new_state"))
@callback
def state_changed(self, entity_id, new_state):
"""Update the sensor status."""
if new_state is None:
return
value = new_state.state
_LOGGER.debug("Received callback from %s with value %s", entity_id, value)
if value == STATE_UNKNOWN:
return
reading = self._sensormap[entity_id]
if reading == READING_MOISTURE:
if value != STATE_UNAVAILABLE:
value = int(float(value))
self._moisture = value
elif reading == READING_BATTERY:
if value != STATE_UNAVAILABLE:
value = int(float(value))
self._battery = value
elif reading == READING_TEMPERATURE:
if value != STATE_UNAVAILABLE:
value = float(value)
self._temperature = value
elif reading == READING_CONDUCTIVITY:
if value != STATE_UNAVAILABLE:
value = int(float(value))
self._conductivity = value
elif reading == READING_BRIGHTNESS:
if value != STATE_UNAVAILABLE:
value = int(float(value))
self._brightness = value
self._brightness_history.add_measurement(
self._brightness, new_state.last_updated
)
else:
raise HomeAssistantError(
f"Unknown reading from sensor {entity_id}: {value}"
)
if ATTR_UNIT_OF_MEASUREMENT in new_state.attributes:
self._unit_of_measurement[reading] = new_state.attributes.get(
ATTR_UNIT_OF_MEASUREMENT
)
self._update_state()
def _update_state(self):
"""Update the state of the class based sensor data."""
result = []
for sensor_name in self._sensormap.values():
params = self.READINGS[sensor_name]
value = getattr(self, f"_{sensor_name}")
if value is not None:
if value == STATE_UNAVAILABLE:
result.append(f"{sensor_name} unavailable")
else:
if sensor_name == READING_BRIGHTNESS:
result.append(
self._check_min(
sensor_name, self._brightness_history.max, params
)
)
else:
result.append(self._check_min(sensor_name, value, params))
result.append(self._check_max(sensor_name, value, params))
result = [r for r in result if r is not None]
if result:
self._state = STATE_PROBLEM
self._problems = ", ".join(result)
else:
self._state = STATE_OK
self._problems = PROBLEM_NONE
_LOGGER.debug("New data processed")
self.async_write_ha_state()
def _check_min(self, sensor_name, value, params):
"""If configured, check the value against the defined minimum value."""
if "min" in params and params["min"] in self._config:
min_value = self._config[params["min"]]
if value < min_value:
return f"{sensor_name} low"
def _check_max(self, sensor_name, value, params):
"""If configured, check the value against the defined maximum value."""
if "max" in params and params["max"] in self._config:
max_value = self._config[params["max"]]
if value > max_value:
return f"{sensor_name} high"
return None
async def async_added_to_hass(self):
"""After being added to hass, load from history."""
if ENABLE_LOAD_HISTORY and "recorder" in self.hass.config.components:
# only use the database if it's configured
await self.hass.async_add_executor_job(self._load_history_from_db)
self.async_write_ha_state()
async_track_state_change_event(
self.hass, list(self._sensormap), self._state_changed_event
)
for entity_id in self._sensormap:
state = self.hass.states.get(entity_id)
if state is not None:
self.state_changed(entity_id, state)
def _load_history_from_db(self):
"""Load the history of the brightness values from the database.
This only needs to be done once during startup.
"""
start_date = datetime.now() - timedelta(days=self._conf_check_days)
entity_id = self._readingmap.get(READING_BRIGHTNESS)
if entity_id is None:
_LOGGER.debug(
"Not reading the history from the database as "
"there is no brightness sensor configured"
)
return
_LOGGER.debug("Initializing values for %s from the database", self._name)
with session_scope(hass=self.hass) as session:
query = (
session.query(States)
.filter(
(States.entity_id == entity_id.lower())
and (States.last_updated > start_date)
)
.order_by(States.last_updated.asc())
)
states = execute(query, to_native=True, validate_entity_ids=False)
for state in states:
# filter out all None, NaN and "unknown" states
# only keep real values
try:
self._brightness_history.add_measurement(
int(state.state), state.last_updated
)
except ValueError:
pass
_LOGGER.debug("Initializing from database completed")
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the entity."""
return self._state
@property
def state_attributes(self):
"""Return the attributes of the entity.
Provide the individual measurements from the
sensor in the attributes of the device.
"""
attrib = {
ATTR_PROBLEM: self._problems,
ATTR_SENSORS: self._readingmap,
ATTR_DICT_OF_UNITS_OF_MEASUREMENT: self._unit_of_measurement,
}
for reading in self._sensormap.values():
attrib[reading] = getattr(self, f"_{reading}")
if self._brightness_history.max is not None:
attrib[ATTR_MAX_BRIGHTNESS_HISTORY] = self._brightness_history.max
return attrib
class DailyHistory:
"""Stores one measurement per day for a maximum number of days.
At the moment only the maximum value per day is kept.
"""
def __init__(self, max_length):
"""Create new DailyHistory with a maximum length of the history."""
self.max_length = max_length
self._days = None
self._max_dict = {}
self.max = None
def add_measurement(self, value, timestamp=None):
"""Add a new measurement for a certain day."""
day = (timestamp or datetime.now()).date()
if not isinstance(value, (int, float)):
return
if self._days is None:
self._days = deque()
self._add_day(day, value)
else:
current_day = self._days[-1]
if day == current_day:
self._max_dict[day] = max(value, self._max_dict[day])
elif day > current_day:
self._add_day(day, value)
else:
_LOGGER.warning("Received old measurement, not storing it")
self.max = max(self._max_dict.values())
def _add_day(self, day, value):
"""Add a new day to the history.
Deletes the oldest day, if the queue becomes too long.
"""
if len(self._days) == self.max_length:
oldest = self._days.popleft()
del self._max_dict[oldest]
self._days.append(day)
if not isinstance(value, (int, float)):
return
self._max_dict[day] = value
|
from flask import Flask, jsonify
from flasgger import Swagger
from flasgger.utils import swag_from
app = Flask(__name__)
app.config['SWAGGER'] = {
'title': 'Colors API',
'uiversion': 2
}
swag = Swagger(app)
colors_spec = {
"tags": [
"colors"
],
"parameters": [
{
"name": "palette",
"in": "path",
"type": "string",
"enum": [
"all",
"rgb",
"cmyk"
],
"required": True,
"default": "all",
"description": "Which palette to filter?"
}
],
"operationId": "get_colors",
"consumes": [
"application/json"
],
"produces": [
"application/json"
],
"security": {
"colors_auth": [
"write:colors",
"read:colors"
]
},
"schemes": [
"http",
"https"
],
"deprecated": False,
"externalDocs": {
"description": "Project repository",
"url": "http://github.com/rochacbruno/flasgger"
},
"definitions": {
"Palette": {
"type": "object",
"properties": {
"palette_name": {
"type": "array",
"items": {
"$ref": "#/definitions/Color"
}
}
}
},
"Color": {
"type": "string"
}
},
"responses": {
"200": {
"description": "A list of colors (may be filtered by palette)",
"schema": {
"$ref": "#/definitions/Palette"
},
"examples": {
"rgb": [
"red",
"green",
"blue"
]
}
}
}
}
@app.route('/colors/<palette>/')
@swag_from(colors_spec)
def colors(palette):
"""
Example using a dictionary as specification
This is the description
You can also set 'summary' and 'description' in
specs_dict
---
# values here overrides the specs dict
deprecated: true
"""
all_colors = {
'cmyk': ['cian', 'magenta', 'yellow', 'black'],
'rgb': ['red', 'green', 'blue']
}
if palette == 'all':
result = all_colors
else:
result = {palette: all_colors.get(palette)}
return jsonify(result)
if __name__ == "__main__":
app.run(debug=True)
|
import unittest
import torch
from transformers import AdamW
class TestTransformers(unittest.TestCase):
def assertListAlmostEqual(self, list1, list2, tol):
self.assertEqual(len(list1), len(list2))
for a, b in zip(list1, list2):
self.assertAlmostEqual(a, b, delta=tol)
def test_adam_w(self):
w = torch.tensor([0.1, -0.2, -0.1], requires_grad=True)
target = torch.tensor([0.4, 0.2, -0.5])
criterion = torch.nn.MSELoss()
# No warmup, constant schedule, no gradient clipping
optimizer = AdamW(params=[w], lr=2e-1, weight_decay=0.0)
for _ in range(100):
loss = criterion(w, target)
loss.backward()
optimizer.step()
w.grad.detach_() # No zero_grad() function on simple tensors. we do it ourselves.
w.grad.zero_()
self.assertListAlmostEqual(w.tolist(), [0.4, 0.2, -0.5], tol=1e-2)
|
import bisect
from typing import Dict, Optional
from gi.repository import Gdk, GdkPixbuf, GObject, Gtk
from meld.conf import _
from meld.const import ActionMode, ChunkAction
from meld.settings import get_meld_settings
from meld.style import get_common_theme
from meld.ui.gtkcompat import get_style
class ActionIcons:
#: Fixed size of the renderer. Ideally this would be font-dependent and
#: would adjust to other textview attributes, but that's both quite
#: difficult and not necessarily desirable.
pixbuf_height = 16
icon_cache: Dict[str, GdkPixbuf.Pixbuf] = {}
icon_name_prefix = 'meld-change'
@classmethod
def load(cls, icon_name: str):
icon = cls.icon_cache.get(icon_name)
if not icon:
icon_theme = Gtk.IconTheme.get_default()
icon = icon_theme.load_icon(
f'{cls.icon_name_prefix}-{icon_name}', cls.pixbuf_height, 0)
cls.icon_cache[icon_name] = icon
return icon
class ActionGutter(Gtk.DrawingArea):
__gtype_name__ = 'ActionGutter'
action_mode = GObject.Property(
type=int,
nick='Action mode for chunk change actions',
default=ActionMode.Replace,
)
@GObject.Property(
type=object,
nick='List of diff chunks for display',
)
def chunks(self):
return self._chunks
@chunks.setter
def chunks_set(self, chunks):
self._chunks = chunks
self.chunk_starts = [c.start_a for c in chunks]
@GObject.Property(
type=Gtk.IconLookupFlags,
nick='Which direction should directional changes appear to go',
flags=(
GObject.ParamFlags.READABLE |
GObject.ParamFlags.WRITABLE |
GObject.ParamFlags.CONSTRUCT_ONLY
),
default=Gtk.IconLookupFlags.DIR_LTR,
)
def icon_direction(self):
return self._icon_direction
@icon_direction.setter
def icon_direction_set(self, direction: Gtk.IconLookupFlags):
if direction not in (
Gtk.IconLookupFlags.DIR_LTR, Gtk.IconLookupFlags.DIR_RTL):
raise ValueError('Invalid icon direction {}'.format(direction))
replace_icons = {
Gtk.IconLookupFlags.DIR_LTR: 'apply-right',
Gtk.IconLookupFlags.DIR_RTL: 'apply-left',
}
self.action_map = {
ActionMode.Replace: ActionIcons.load(replace_icons[direction]),
ActionMode.Delete: ActionIcons.load('delete'),
ActionMode.Insert: ActionIcons.load('copy'),
}
self._icon_direction = direction
_source_view: Gtk.TextView
_source_editable_connect_id: int = 0
@GObject.Property(
type=Gtk.TextView,
nick='Text view for which action are displayed',
default=None,
)
def source_view(self):
return self._source_view
@source_view.setter
def source_view_setter(self, view: Gtk.TextView):
if self._source_editable_connect_id:
self._source_view.disconnect(self._source_editable_connect_id)
self._source_editable_connect_id = view.connect(
'notify::editable', lambda *args: self.queue_draw())
self._source_view = view
self.queue_draw()
_target_view: Gtk.TextView
_target_editable_connect_id: int = 0
@GObject.Property(
type=Gtk.TextView,
nick='Text view to which actions are directed',
default=None,
)
def target_view(self):
return self._target_view
@target_view.setter
def target_view_setter(self, view: Gtk.TextView):
if self._target_editable_connect_id:
self._target_view.disconnect(self._target_editable_connect_id)
self._target_editable_connect_id = view.connect(
'notify::editable', lambda *args: self.queue_draw())
self._target_view = view
self.queue_draw()
@GObject.Signal
def chunk_action_activated(
self,
action: str, # String-ified ChunkAction
from_view: Gtk.TextView,
to_view: Gtk.TextView,
chunk: object,
) -> None:
...
def __init__(self):
super().__init__()
# Object-type defaults
self.chunks = []
self.action_map = {}
# State for "button" implementation
self.buttons = []
self.pointer_chunk = None
self.pressed_chunk = None
def on_setting_changed(self, settings, key):
if key == 'style-scheme':
self.fill_colors, self.line_colors = get_common_theme()
alpha = self.fill_colors['current-chunk-highlight'].alpha
self.chunk_highlights = {
state: Gdk.RGBA(*[alpha + c * (1.0 - alpha) for c in colour])
for state, colour in self.fill_colors.items()
}
def do_realize(self):
self.set_events(
Gdk.EventMask.LEAVE_NOTIFY_MASK |
Gdk.EventMask.POINTER_MOTION_MASK |
Gdk.EventMask.BUTTON_PRESS_MASK |
Gdk.EventMask.BUTTON_RELEASE_MASK
)
self.connect('notify::action-mode', lambda *args: self.queue_draw())
meld_settings = get_meld_settings()
meld_settings.connect('changed', self.on_setting_changed)
self.on_setting_changed(meld_settings, 'style-scheme')
return Gtk.DrawingArea.do_realize(self)
def do_motion_notify_event(self, event):
# This is the simplest button/intersection implementation in
# the world, but it basically works for our purposes.
for button in self.buttons:
x1, y1, x2, y2, chunk = button
# Check y first; it's more likely to be out of range
if y1 <= event.y <= y2 and x1 <= event.x <= x2:
new_pointer_chunk = chunk
break
else:
new_pointer_chunk = None
if new_pointer_chunk != self.pointer_chunk:
self.pointer_chunk = new_pointer_chunk
self.queue_draw()
def do_leave_notify_event(self, event):
if self.pointer_chunk:
self.pointer_chunk = None
self.queue_draw()
def do_button_press_event(self, event):
if self.pointer_chunk:
self.pressed_chunk = self.pointer_chunk
return Gtk.DrawingArea.do_button_press_event(self, event)
def do_button_release_event(self, event):
if self.pointer_chunk and self.pointer_chunk == self.pressed_chunk:
self.activate(self.pressed_chunk)
self.pressed_chunk = None
return Gtk.DrawingArea.do_button_press_event(self, event)
def _action_on_chunk(self, action: ChunkAction, chunk):
self.chunk_action_activated.emit(
action.value, self.source_view, self.target_view, chunk)
def activate(self, chunk):
action = self._classify_change_actions(chunk)
# FIXME: When fully transitioned to GAction, we should see
# whether we can do this by getting the container's action
# group and activating the actions directly instead.
if action == ActionMode.Replace:
self._action_on_chunk(ChunkAction.replace, chunk)
elif action == ActionMode.Delete:
self._action_on_chunk(ChunkAction.delete, chunk)
elif action == ActionMode.Insert:
copy_menu = self._make_copy_menu(chunk)
copy_menu.popup_at_pointer(None)
def _make_copy_menu(self, chunk):
copy_menu = Gtk.Menu()
copy_up = Gtk.MenuItem.new_with_mnemonic(_('Copy _up'))
copy_down = Gtk.MenuItem.new_with_mnemonic(_('Copy _down'))
copy_menu.append(copy_up)
copy_menu.append(copy_down)
copy_menu.show_all()
def copy_chunk(widget, action):
self._action_on_chunk(action, chunk)
copy_up.connect('activate', copy_chunk, ChunkAction.copy_up)
copy_down.connect('activate', copy_chunk, ChunkAction.copy_down)
return copy_menu
def get_chunk_range(self, start_y, end_y):
start_line = self.source_view.get_line_num_for_y(start_y)
end_line = self.source_view.get_line_num_for_y(end_y)
start_idx = bisect.bisect(self.chunk_starts, start_line)
end_idx = bisect.bisect(self.chunk_starts, end_line)
if start_idx > 0 and start_line <= self.chunks[start_idx - 1].end_a:
start_idx -= 1
return self.chunks[start_idx:end_idx]
def do_draw(self, context):
view = self.source_view
if not view or not view.get_realized():
return
self.buttons = []
width = self.get_allocated_width()
height = self.get_allocated_height()
style_context = self.get_style_context()
Gtk.render_background(style_context, context, 0, 0, width, height)
buf = view.get_buffer()
context.save()
context.set_line_width(1.0)
# Get our linked view's visible offset, get our vertical offset
# against our view (e.g., for info bars at the top of the view)
# and translate our context to match.
view_y_start = view.get_visible_rect().y
view_y_offset = view.translate_coordinates(self, 0, 0)[1]
gutter_y_translate = view_y_offset - view_y_start
context.translate(0, gutter_y_translate)
button_x = 1
button_width = width - 2
for chunk in self.get_chunk_range(view_y_start, view_y_start + height):
change_type, start_line, end_line, *_unused = chunk
rect_y = view.get_y_for_line_num(start_line)
rect_height = max(
0, view.get_y_for_line_num(end_line) - rect_y - 1)
# Draw our rectangle outside x bounds, so we don't get
# vertical lines. Fill first, over-fill with a highlight
# if in the focused chunk, and then stroke the border.
context.rectangle(-0.5, rect_y + 0.5, width + 1, rect_height)
if start_line != end_line:
context.set_source_rgba(*self.fill_colors[change_type])
context.fill_preserve()
if view.current_chunk_check(chunk):
highlight = self.fill_colors['current-chunk-highlight']
context.set_source_rgba(*highlight)
context.fill_preserve()
context.set_source_rgba(*self.line_colors[change_type])
context.stroke()
# Button rendering and tracking
action = self._classify_change_actions(chunk)
if action is None:
continue
it = buf.get_iter_at_line(start_line)
button_y, button_height = view.get_line_yrange(it)
button_y += 1
button_height -= 2
button_style_context = get_style(None, 'button.flat.image-button')
if chunk == self.pointer_chunk:
button_style_context.set_state(Gtk.StateFlags.PRELIGHT)
Gtk.render_background(
button_style_context, context, button_x, button_y,
button_width, button_height)
Gtk.render_frame(
button_style_context, context, button_x, button_y,
button_width, button_height)
# TODO: Ideally we'd do this in a pre-render step of some
# kind, but I'm having trouble figuring out what that would
# look like.
self.buttons.append(
(
button_x,
button_y + gutter_y_translate,
button_x + button_width,
button_y + gutter_y_translate + button_height,
chunk,
)
)
pixbuf = self.action_map.get(action)
icon_x = button_x + (button_width - pixbuf.props.width) // 2
icon_y = button_y + (button_height - pixbuf.props.height) // 2
Gtk.render_icon(
button_style_context, context, pixbuf, icon_x, icon_y)
context.restore()
def _classify_change_actions(self, change) -> Optional[ActionMode]:
"""Classify possible actions for the given change
Returns the action that can be performed given the content and
context of the change.
"""
source_editable = self.source_view.get_editable()
target_editable = self.target_view.get_editable()
if not source_editable and not target_editable:
return None
# Reclassify conflict changes, since we treat them the same as a
# normal two-way change as far as actions are concerned
change_type = change[0]
if change_type == 'conflict':
if change[1] == change[2]:
change_type = 'insert'
elif change[3] == change[4]:
change_type = 'delete'
else:
change_type = 'replace'
if change_type == 'insert':
return None
action = self.action_mode
if action == ActionMode.Delete and not source_editable:
action = None
elif action == ActionMode.Insert and change_type == 'delete':
action = ActionMode.Replace
if not target_editable:
action = ActionMode.Delete
return action
ActionGutter.set_css_name('action-gutter')
|
from Handler import Handler
import logging
import urllib2
import json
from collections import deque
class LogentriesDiamondHandler(Handler):
"""
Implements the abstract Handler class
"""
def __init__(self, config=None):
"""
New instance of LogentriesDiamondHandler class
"""
Handler.__init__(self, config)
self.log_token = self.config.get('log_token', None)
self.queue_size = int(self.config['queue_size'])
self.queue = deque([])
if self.log_token is None:
raise Exception
def get_default_config_help(self):
"""
Help text
"""
config = super(LogentriesDiamondHandler,
self).get_default_config_help()
config.update({
'log_token':
'[Your log token](https://logentries.com/doc/input-token/)',
'queue_size': ''
})
return config
def get_default_config(self):
"""
Return default config for the handler
"""
config = super(LogentriesDiamondHandler, self).get_default_config()
config.update({
'log_token': '',
'queue_size': 100
})
return config
def process(self, metric):
"""
Process metric by sending it to datadog api
"""
self.queue.append(metric)
if len(self.queue) >= self.queue_size:
logging.debug("Queue is full, sending logs to Logentries")
self._send()
def _send(self):
"""
Convert message to a json object and send to Lognetries
"""
while len(self.queue) > 0:
metric = self.queue.popleft()
topic, value, timestamp = str(metric).split()
msg = json.dumps({"event": {topic: value}})
req = urllib2.Request("https://js.logentries.com/v1/logs/" +
self.log_token, msg)
try:
urllib2.urlopen(req)
except urllib2.URLError as e:
logging.error("Can't send log message to Logentries %s", e)
|
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
from django.http import JsonResponse
from django.shortcuts import redirect
from django.utils.translation import gettext as _
from django.views.decorators.http import require_POST
from weblate.auth.forms import InviteUserForm, send_invitation
from weblate.auth.models import Group, User
from weblate.trans.forms import UserManageForm
from weblate.trans.models import Change
from weblate.trans.util import render
from weblate.utils import messages
from weblate.utils.views import get_project, show_form_errors
def check_user_form(request, project, verbose=False, form_class=UserManageForm):
"""Check project permission and UserManageForm.
This is simple helper to perform needed validation for all user management views.
"""
obj = get_project(request, project)
if (
not request.user.has_perm("project.permissions", obj)
or obj.access_control == obj.ACCESS_CUSTOM
):
raise PermissionDenied()
form = form_class(request.POST)
if form.is_valid():
return obj, form
if verbose:
show_form_errors(request, form)
return obj, None
@require_POST
@login_required
def set_groups(request, project):
"""Change group assignment for a user."""
obj, form = check_user_form(request, project)
try:
group = obj.group_set.get(
name__contains="@", internal=True, pk=int(request.POST.get("group", ""))
)
except (Group.DoesNotExist, ValueError):
group = None
action = request.POST.get("action")
user = form.cleaned_data["user"] if form else None
if group is None or form is None:
code = 400
message = _("Invalid parameters!")
status = None
elif action == "remove":
owners = User.objects.all_admins(obj)
if group.name.endswith("@Administration") and owners.count() <= 1:
code = 400
message = _("You can not remove last owner!")
else:
code = 200
message = ""
user.groups.remove(group)
Change.objects.create(
project=obj,
action=Change.ACTION_REMOVE_USER,
user=request.user,
details={"username": user.username, "group": group.name},
)
status = user.groups.filter(pk=group.pk).exists()
else:
user.groups.add(group)
Change.objects.create(
project=obj,
action=Change.ACTION_ADD_USER,
user=request.user,
details={"username": user.username, "group": group.name},
)
code = 200
message = ""
status = user.groups.filter(pk=group.pk).exists()
return JsonResponse(
data={"responseCode": code, "message": message, "state": status}
)
@require_POST
@login_required
def add_user(request, project):
"""Add user to a project."""
obj, form = check_user_form(request, project, True)
if form is not None:
try:
user = form.cleaned_data["user"]
obj.add_user(user)
Change.objects.create(
project=obj,
action=Change.ACTION_ADD_USER,
user=request.user,
details={"username": user.username},
)
messages.success(request, _("User has been added to this project."))
except Group.DoesNotExist:
messages.error(request, _("Failed to find group to add a user!"))
return redirect("manage-access", project=obj.slug)
@require_POST
@login_required
def invite_user(request, project):
"""Invite user to a project."""
obj, form = check_user_form(request, project, True, form_class=InviteUserForm)
if form is not None:
try:
form.save(request, obj)
messages.success(request, _("User has been invited to this project."))
except Group.DoesNotExist:
messages.error(request, _("Failed to find group to add a user!"))
return redirect("manage-access", project=obj.slug)
@require_POST
@login_required
def resend_invitation(request, project):
"""Remove user from a project."""
obj, form = check_user_form(request, project, True)
if form is not None:
send_invitation(request, obj.name, form.cleaned_data["user"])
messages.success(request, _("User has been invited to this project."))
return redirect("manage-access", project=obj.slug)
@require_POST
@login_required
def delete_user(request, project):
"""Remove user from a project."""
obj, form = check_user_form(request, project, True)
if form is not None:
owners = User.objects.all_admins(obj)
user = form.cleaned_data["user"]
is_owner = owners.filter(pk=user.pk).exists()
if is_owner and owners.count() <= 1:
messages.error(request, _("You can not remove last owner!"))
else:
obj.remove_user(user)
Change.objects.create(
project=obj,
action=Change.ACTION_REMOVE_USER,
user=request.user,
details={"username": user.username},
)
messages.success(request, _("User has been removed from this project."))
return redirect("manage-access", project=obj.slug)
@login_required
def manage_access(request, project):
"""User management view."""
obj = get_project(request, project)
if not request.user.has_perm("project.permissions", obj):
raise PermissionDenied()
return render(
request,
"manage-access.html",
{
"object": obj,
"project": obj,
"groups": Group.objects.for_project(obj),
"all_users": User.objects.for_project(obj),
"add_user_form": UserManageForm(),
"invite_user_form": InviteUserForm(),
},
)
|
import copy
import functools
import glob
import os
import re
import shutil
import tempfile
import pytest
from molecule import util
from molecule import config
for d in glob.glob(os.path.join(tempfile.gettempdir(), 'molecule', '*')):
if re.search('[A-Z]{5}$', d):
shutil.rmtree(d)
@pytest.helpers.register
def write_molecule_file(filename, data):
util.write_file(filename, util.safe_dump(data))
@pytest.helpers.register
def os_split(s):
rest, tail = os.path.split(s)
if rest in ('', os.path.sep):
return tail,
return os_split(rest) + (tail, )
@pytest.fixture
def _molecule_dependency_galaxy_section_data():
return {
'dependency': {
'name': 'galaxy'
},
}
@pytest.fixture
def _molecule_driver_section_data():
return {
'driver': {
'name': 'docker',
'options': {
'managed': True,
},
},
}
@pytest.fixture
def _molecule_lint_section_data():
return {
'lint': {
'name': 'yamllint'
},
}
@pytest.fixture
def _molecule_platforms_section_data():
return {
'platforms': [
{
'name': 'instance-1',
'groups': ['foo', 'bar'],
'children': ['child1'],
},
{
'name': 'instance-2',
'groups': ['baz', 'foo'],
'children': ['child2'],
},
],
}
@pytest.fixture
def _molecule_provisioner_section_data():
return {
'provisioner': {
'name': 'ansible',
'options': {
'become': True,
},
'lint': {
'name': 'ansible-lint',
},
'config_options': {},
},
}
@pytest.fixture
def _molecule_scenario_section_data():
return {
'scenario': {
'name': 'default'
},
}
@pytest.fixture
def _molecule_verifier_section_data():
return {
'verifier': {
'name': 'testinfra',
'lint': {
'name': 'flake8',
},
},
}
@pytest.fixture
def molecule_data(
_molecule_dependency_galaxy_section_data,
_molecule_driver_section_data, _molecule_lint_section_data,
_molecule_platforms_section_data, _molecule_provisioner_section_data,
_molecule_scenario_section_data, _molecule_verifier_section_data):
fixtures = [
_molecule_dependency_galaxy_section_data,
_molecule_driver_section_data, _molecule_lint_section_data,
_molecule_platforms_section_data, _molecule_provisioner_section_data,
_molecule_scenario_section_data, _molecule_verifier_section_data
]
return functools.reduce(lambda x, y: util.merge_dicts(x, y), fixtures)
@pytest.fixture
def molecule_directory_fixture(temp_dir):
return pytest.helpers.molecule_directory()
@pytest.fixture
def molecule_scenario_directory_fixture(molecule_directory_fixture):
path = pytest.helpers.molecule_scenario_directory()
if not os.path.isdir(path):
os.makedirs(path)
return path
@pytest.fixture
def molecule_ephemeral_directory_fixture(molecule_scenario_directory_fixture):
path = pytest.helpers.molecule_ephemeral_directory()
if not os.path.isdir(path):
os.makedirs(path)
@pytest.fixture
def molecule_file_fixture(molecule_scenario_directory_fixture,
molecule_ephemeral_directory_fixture):
return pytest.helpers.molecule_file()
@pytest.fixture
def config_instance(molecule_file_fixture, molecule_data, request):
mdc = copy.deepcopy(molecule_data)
if hasattr(request, 'param'):
util.merge_dicts(mdc, request.getfuncargvalue(request.param))
pytest.helpers.write_molecule_file(molecule_file_fixture, mdc)
c = config.Config(molecule_file_fixture)
c.command_args = {'subcommand': 'test'}
return c
# Mocks
@pytest.fixture
def patched_print_debug(mocker):
return mocker.patch('molecule.util.print_debug')
@pytest.fixture
def patched_logger_info(mocker):
return mocker.patch('logging.Logger.info')
@pytest.fixture
def patched_logger_out(mocker):
return mocker.patch('molecule.logger.CustomLogger.out')
@pytest.fixture
def patched_logger_warn(mocker):
return mocker.patch('logging.Logger.warn')
@pytest.fixture
def patched_logger_error(mocker):
return mocker.patch('logging.Logger.error')
@pytest.fixture
def patched_logger_critical(mocker):
return mocker.patch('logging.Logger.critical')
@pytest.fixture
def patched_logger_success(mocker):
return mocker.patch('molecule.logger.CustomLogger.success')
@pytest.fixture
def patched_run_command(mocker):
m = mocker.patch('molecule.util.run_command')
m.return_value = mocker.Mock(stdout=b'patched-run-command-stdout')
return m
@pytest.fixture
def patched_ansible_converge(mocker):
m = mocker.patch('molecule.provisioner.ansible.Ansible.converge')
m.return_value = 'patched-ansible-converge-stdout'
return m
@pytest.fixture
def patched_add_or_update_vars(mocker):
return mocker.patch(
'molecule.provisioner.ansible.Ansible._add_or_update_vars')
@pytest.fixture
def patched_yamllint(mocker):
return mocker.patch('molecule.lint.yamllint.Yamllint.execute')
@pytest.fixture
def patched_flake8(mocker):
return mocker.patch('molecule.verifier.lint.flake8.Flake8.execute')
@pytest.fixture
def patched_ansible_galaxy(mocker):
return mocker.patch(
'molecule.dependency.ansible_galaxy.AnsibleGalaxy.execute')
@pytest.fixture
def patched_testinfra(mocker):
return mocker.patch('molecule.verifier.testinfra.Testinfra.execute')
@pytest.fixture
def patched_scenario_setup(mocker):
mocker.patch('molecule.config.Config.env')
return mocker.patch('molecule.scenario.Scenario._setup')
@pytest.fixture
def patched_config_validate(mocker):
return mocker.patch('molecule.config.Config._validate')
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from nets import inception
slim = tf.contrib.slim
class InceptionV3Test(tf.test.TestCase):
def testBuildClassificationNetwork(self):
batch_size = 5
height, width = 299, 299
num_classes = 1000
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, end_points = inception.inception_v3(inputs, num_classes)
self.assertTrue(logits.op.name.startswith('InceptionV3/Logits'))
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
self.assertTrue('Predictions' in end_points)
self.assertListEqual(end_points['Predictions'].get_shape().as_list(),
[batch_size, num_classes])
def testBuildBaseNetwork(self):
batch_size = 5
height, width = 299, 299
inputs = tf.random_uniform((batch_size, height, width, 3))
final_endpoint, end_points = inception.inception_v3_base(inputs)
self.assertTrue(final_endpoint.op.name.startswith(
'InceptionV3/Mixed_7c'))
self.assertListEqual(final_endpoint.get_shape().as_list(),
[batch_size, 8, 8, 2048])
expected_endpoints = ['Conv2d_1a_3x3', 'Conv2d_2a_3x3', 'Conv2d_2b_3x3',
'MaxPool_3a_3x3', 'Conv2d_3b_1x1', 'Conv2d_4a_3x3',
'MaxPool_5a_3x3', 'Mixed_5b', 'Mixed_5c', 'Mixed_5d',
'Mixed_6a', 'Mixed_6b', 'Mixed_6c', 'Mixed_6d',
'Mixed_6e', 'Mixed_7a', 'Mixed_7b', 'Mixed_7c']
self.assertItemsEqual(end_points.keys(), expected_endpoints)
def testBuildOnlyUptoFinalEndpoint(self):
batch_size = 5
height, width = 299, 299
endpoints = ['Conv2d_1a_3x3', 'Conv2d_2a_3x3', 'Conv2d_2b_3x3',
'MaxPool_3a_3x3', 'Conv2d_3b_1x1', 'Conv2d_4a_3x3',
'MaxPool_5a_3x3', 'Mixed_5b', 'Mixed_5c', 'Mixed_5d',
'Mixed_6a', 'Mixed_6b', 'Mixed_6c', 'Mixed_6d',
'Mixed_6e', 'Mixed_7a', 'Mixed_7b', 'Mixed_7c']
for index, endpoint in enumerate(endpoints):
with tf.Graph().as_default():
inputs = tf.random_uniform((batch_size, height, width, 3))
out_tensor, end_points = inception.inception_v3_base(
inputs, final_endpoint=endpoint)
self.assertTrue(out_tensor.op.name.startswith(
'InceptionV3/' + endpoint))
self.assertItemsEqual(endpoints[:index+1], end_points)
def testBuildAndCheckAllEndPointsUptoMixed7c(self):
batch_size = 5
height, width = 299, 299
inputs = tf.random_uniform((batch_size, height, width, 3))
_, end_points = inception.inception_v3_base(
inputs, final_endpoint='Mixed_7c')
endpoints_shapes = {'Conv2d_1a_3x3': [batch_size, 149, 149, 32],
'Conv2d_2a_3x3': [batch_size, 147, 147, 32],
'Conv2d_2b_3x3': [batch_size, 147, 147, 64],
'MaxPool_3a_3x3': [batch_size, 73, 73, 64],
'Conv2d_3b_1x1': [batch_size, 73, 73, 80],
'Conv2d_4a_3x3': [batch_size, 71, 71, 192],
'MaxPool_5a_3x3': [batch_size, 35, 35, 192],
'Mixed_5b': [batch_size, 35, 35, 256],
'Mixed_5c': [batch_size, 35, 35, 288],
'Mixed_5d': [batch_size, 35, 35, 288],
'Mixed_6a': [batch_size, 17, 17, 768],
'Mixed_6b': [batch_size, 17, 17, 768],
'Mixed_6c': [batch_size, 17, 17, 768],
'Mixed_6d': [batch_size, 17, 17, 768],
'Mixed_6e': [batch_size, 17, 17, 768],
'Mixed_7a': [batch_size, 8, 8, 1280],
'Mixed_7b': [batch_size, 8, 8, 2048],
'Mixed_7c': [batch_size, 8, 8, 2048]}
self.assertItemsEqual(endpoints_shapes.keys(), end_points.keys())
for endpoint_name in endpoints_shapes:
expected_shape = endpoints_shapes[endpoint_name]
self.assertTrue(endpoint_name in end_points)
self.assertListEqual(end_points[endpoint_name].get_shape().as_list(),
expected_shape)
def testModelHasExpectedNumberOfParameters(self):
batch_size = 5
height, width = 299, 299
inputs = tf.random_uniform((batch_size, height, width, 3))
with slim.arg_scope(inception.inception_v3_arg_scope()):
inception.inception_v3_base(inputs)
total_params, _ = slim.model_analyzer.analyze_vars(
slim.get_model_variables())
self.assertAlmostEqual(21802784, total_params)
def testBuildEndPoints(self):
batch_size = 5
height, width = 299, 299
num_classes = 1000
inputs = tf.random_uniform((batch_size, height, width, 3))
_, end_points = inception.inception_v3(inputs, num_classes)
self.assertTrue('Logits' in end_points)
logits = end_points['Logits']
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
self.assertTrue('AuxLogits' in end_points)
aux_logits = end_points['AuxLogits']
self.assertListEqual(aux_logits.get_shape().as_list(),
[batch_size, num_classes])
self.assertTrue('Mixed_7c' in end_points)
pre_pool = end_points['Mixed_7c']
self.assertListEqual(pre_pool.get_shape().as_list(),
[batch_size, 8, 8, 2048])
self.assertTrue('PreLogits' in end_points)
pre_logits = end_points['PreLogits']
self.assertListEqual(pre_logits.get_shape().as_list(),
[batch_size, 1, 1, 2048])
def testBuildEndPointsWithDepthMultiplierLessThanOne(self):
batch_size = 5
height, width = 299, 299
num_classes = 1000
inputs = tf.random_uniform((batch_size, height, width, 3))
_, end_points = inception.inception_v3(inputs, num_classes)
endpoint_keys = [key for key in end_points.keys()
if key.startswith('Mixed') or key.startswith('Conv')]
_, end_points_with_multiplier = inception.inception_v3(
inputs, num_classes, scope='depth_multiplied_net',
depth_multiplier=0.5)
for key in endpoint_keys:
original_depth = end_points[key].get_shape().as_list()[3]
new_depth = end_points_with_multiplier[key].get_shape().as_list()[3]
self.assertEqual(0.5 * original_depth, new_depth)
def testBuildEndPointsWithDepthMultiplierGreaterThanOne(self):
batch_size = 5
height, width = 299, 299
num_classes = 1000
inputs = tf.random_uniform((batch_size, height, width, 3))
_, end_points = inception.inception_v3(inputs, num_classes)
endpoint_keys = [key for key in end_points.keys()
if key.startswith('Mixed') or key.startswith('Conv')]
_, end_points_with_multiplier = inception.inception_v3(
inputs, num_classes, scope='depth_multiplied_net',
depth_multiplier=2.0)
for key in endpoint_keys:
original_depth = end_points[key].get_shape().as_list()[3]
new_depth = end_points_with_multiplier[key].get_shape().as_list()[3]
self.assertEqual(2.0 * original_depth, new_depth)
def testRaiseValueErrorWithInvalidDepthMultiplier(self):
batch_size = 5
height, width = 299, 299
num_classes = 1000
inputs = tf.random_uniform((batch_size, height, width, 3))
with self.assertRaises(ValueError):
_ = inception.inception_v3(inputs, num_classes, depth_multiplier=-0.1)
with self.assertRaises(ValueError):
_ = inception.inception_v3(inputs, num_classes, depth_multiplier=0.0)
def testHalfSizeImages(self):
batch_size = 5
height, width = 150, 150
num_classes = 1000
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, end_points = inception.inception_v3(inputs, num_classes)
self.assertTrue(logits.op.name.startswith('InceptionV3/Logits'))
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
pre_pool = end_points['Mixed_7c']
self.assertListEqual(pre_pool.get_shape().as_list(),
[batch_size, 3, 3, 2048])
def testUnknownImageShape(self):
tf.reset_default_graph()
batch_size = 2
height, width = 299, 299
num_classes = 1000
input_np = np.random.uniform(0, 1, (batch_size, height, width, 3))
with self.test_session() as sess:
inputs = tf.placeholder(tf.float32, shape=(batch_size, None, None, 3))
logits, end_points = inception.inception_v3(inputs, num_classes)
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
pre_pool = end_points['Mixed_7c']
feed_dict = {inputs: input_np}
tf.global_variables_initializer().run()
pre_pool_out = sess.run(pre_pool, feed_dict=feed_dict)
self.assertListEqual(list(pre_pool_out.shape), [batch_size, 8, 8, 2048])
def testUnknowBatchSize(self):
batch_size = 1
height, width = 299, 299
num_classes = 1000
inputs = tf.placeholder(tf.float32, (None, height, width, 3))
logits, _ = inception.inception_v3(inputs, num_classes)
self.assertTrue(logits.op.name.startswith('InceptionV3/Logits'))
self.assertListEqual(logits.get_shape().as_list(),
[None, num_classes])
images = tf.random_uniform((batch_size, height, width, 3))
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
output = sess.run(logits, {inputs: images.eval()})
self.assertEquals(output.shape, (batch_size, num_classes))
def testEvaluation(self):
batch_size = 2
height, width = 299, 299
num_classes = 1000
eval_inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = inception.inception_v3(eval_inputs, num_classes,
is_training=False)
predictions = tf.argmax(logits, 1)
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
output = sess.run(predictions)
self.assertEquals(output.shape, (batch_size,))
def testTrainEvalWithReuse(self):
train_batch_size = 5
eval_batch_size = 2
height, width = 150, 150
num_classes = 1000
train_inputs = tf.random_uniform((train_batch_size, height, width, 3))
inception.inception_v3(train_inputs, num_classes)
eval_inputs = tf.random_uniform((eval_batch_size, height, width, 3))
logits, _ = inception.inception_v3(eval_inputs, num_classes,
is_training=False, reuse=True)
predictions = tf.argmax(logits, 1)
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
output = sess.run(predictions)
self.assertEquals(output.shape, (eval_batch_size,))
def testLogitsNotSqueezed(self):
num_classes = 25
images = tf.random_uniform([1, 299, 299, 3])
logits, _ = inception.inception_v3(images,
num_classes=num_classes,
spatial_squeeze=False)
with self.test_session() as sess:
tf.global_variables_initializer().run()
logits_out = sess.run(logits)
self.assertListEqual(list(logits_out.shape), [1, 1, 1, num_classes])
if __name__ == '__main__':
tf.test.main()
|
import inspect
from functools import wraps
from django.db.models import F
from django.db.models.signals import post_delete
from django.db.models.signals import post_save
from django.dispatch import Signal
import django_comments as comments
from django_comments.signals import comment_was_flagged
from django_comments.signals import comment_was_posted
from zinnia import settings
from zinnia.comparison import EntryPublishedVectorBuilder
from zinnia.models.entry import Entry
from zinnia.ping import DirectoryPinger
from zinnia.ping import ExternalUrlsPinger
comment_model = comments.get_model()
ENTRY_PS_PING_DIRECTORIES = 'zinnia.entry.post_save.ping_directories'
ENTRY_PS_PING_EXTERNAL_URLS = 'zinnia.entry.post_save.ping_external_urls'
ENTRY_PS_FLUSH_SIMILAR_CACHE = 'zinnia.entry.post_save.flush_similar_cache'
ENTRY_PD_FLUSH_SIMILAR_CACHE = 'zinnia.entry.post_delete.flush_similar_cache'
COMMENT_PS_COUNT_DISCUSSIONS = 'zinnia.comment.post_save.count_discussions'
COMMENT_PD_COUNT_DISCUSSIONS = 'zinnia.comment.post_delete.count_discussions'
COMMENT_WF_COUNT_DISCUSSIONS = 'zinnia.comment.was_flagged.count_discussions'
COMMENT_WP_COUNT_COMMENTS = 'zinnia.comment.was_posted.count_comments'
PINGBACK_WF_COUNT_PINGBACKS = 'zinnia.pingback.was_flagged.count_pingbacks'
TRACKBACK_WF_COUNT_TRACKBACKS = 'zinnia.trackback.was_flagged.count_trackbacks'
pingback_was_posted = Signal(providing_args=['pingback', 'entry'])
trackback_was_posted = Signal(providing_args=['trackback', 'entry'])
def disable_for_loaddata(signal_handler):
"""
Decorator for disabling signals sent by 'post_save'
on loaddata command.
http://code.djangoproject.com/ticket/8399
"""
@wraps(signal_handler)
def wrapper(*args, **kwargs):
for fr in inspect.stack():
if inspect.getmodulename(fr[1]) == 'loaddata':
return # pragma: no cover
signal_handler(*args, **kwargs)
return wrapper
@disable_for_loaddata
def ping_directories_handler(sender, **kwargs):
"""
Ping directories when an entry is saved.
"""
entry = kwargs['instance']
if entry.is_visible and settings.SAVE_PING_DIRECTORIES:
for directory in settings.PING_DIRECTORIES:
DirectoryPinger(directory, [entry])
@disable_for_loaddata
def ping_external_urls_handler(sender, **kwargs):
"""
Ping externals URLS when an entry is saved.
"""
entry = kwargs['instance']
if entry.is_visible and settings.SAVE_PING_EXTERNAL_URLS:
ExternalUrlsPinger(entry)
@disable_for_loaddata
def flush_similar_cache_handler(sender, **kwargs):
"""
Flush the cache of similar entries when an entry is saved.
"""
entry = kwargs['instance']
if entry.is_visible:
EntryPublishedVectorBuilder().cache_flush()
def count_discussions_handler(sender, **kwargs):
"""
Update the count of each type of discussion on an entry.
"""
if kwargs.get('instance') and kwargs.get('created'):
# The signal is emitted by the comment creation,
# so we do nothing, comment_was_posted is used instead.
return
comment = 'comment' in kwargs and kwargs['comment'] or kwargs['instance']
entry = comment.content_object
if isinstance(entry, Entry):
entry.comment_count = entry.comments.count()
entry.pingback_count = entry.pingbacks.count()
entry.trackback_count = entry.trackbacks.count()
entry.save(update_fields=[
'comment_count', 'pingback_count', 'trackback_count'])
def count_comments_handler(sender, **kwargs):
"""
Update Entry.comment_count when a public comment was posted.
"""
comment = kwargs['comment']
if comment.is_public:
entry = comment.content_object
if isinstance(entry, Entry):
entry.comment_count = F('comment_count') + 1
entry.save(update_fields=['comment_count'])
def count_pingbacks_handler(sender, **kwargs):
"""
Update Entry.pingback_count when a pingback was posted.
"""
entry = kwargs['entry']
entry.pingback_count = F('pingback_count') + 1
entry.save(update_fields=['pingback_count'])
def count_trackbacks_handler(sender, **kwargs):
"""
Update Entry.trackback_count when a trackback was posted.
"""
entry = kwargs['entry']
entry.trackback_count = F('trackback_count') + 1
entry.save(update_fields=['trackback_count'])
def connect_entry_signals():
"""
Connect all the signals on Entry model.
"""
post_save.connect(
ping_directories_handler, sender=Entry,
dispatch_uid=ENTRY_PS_PING_DIRECTORIES)
post_save.connect(
ping_external_urls_handler, sender=Entry,
dispatch_uid=ENTRY_PS_PING_EXTERNAL_URLS)
post_save.connect(
flush_similar_cache_handler, sender=Entry,
dispatch_uid=ENTRY_PS_FLUSH_SIMILAR_CACHE)
post_delete.connect(
flush_similar_cache_handler, sender=Entry,
dispatch_uid=ENTRY_PD_FLUSH_SIMILAR_CACHE)
def disconnect_entry_signals():
"""
Disconnect all the signals on Entry model.
"""
post_save.disconnect(
sender=Entry,
dispatch_uid=ENTRY_PS_PING_DIRECTORIES)
post_save.disconnect(
sender=Entry,
dispatch_uid=ENTRY_PS_PING_EXTERNAL_URLS)
post_save.disconnect(
sender=Entry,
dispatch_uid=ENTRY_PS_FLUSH_SIMILAR_CACHE)
post_delete.disconnect(
sender=Entry,
dispatch_uid=ENTRY_PD_FLUSH_SIMILAR_CACHE)
def connect_discussion_signals():
"""
Connect all the signals on the Comment model to
maintains a valid discussion count on each entries
when an action is done with the comments.
"""
post_save.connect(
count_discussions_handler, sender=comment_model,
dispatch_uid=COMMENT_PS_COUNT_DISCUSSIONS)
post_delete.connect(
count_discussions_handler, sender=comment_model,
dispatch_uid=COMMENT_PD_COUNT_DISCUSSIONS)
comment_was_flagged.connect(
count_discussions_handler, sender=comment_model,
dispatch_uid=COMMENT_WF_COUNT_DISCUSSIONS)
comment_was_posted.connect(
count_comments_handler, sender=comment_model,
dispatch_uid=COMMENT_WP_COUNT_COMMENTS)
pingback_was_posted.connect(
count_pingbacks_handler, sender=comment_model,
dispatch_uid=PINGBACK_WF_COUNT_PINGBACKS)
trackback_was_posted.connect(
count_trackbacks_handler, sender=comment_model,
dispatch_uid=TRACKBACK_WF_COUNT_TRACKBACKS)
def disconnect_discussion_signals():
"""
Disconnect all the signals on Comment model
provided by Zinnia.
"""
post_save.disconnect(
sender=comment_model,
dispatch_uid=COMMENT_PS_COUNT_DISCUSSIONS)
post_delete.disconnect(
sender=comment_model,
dispatch_uid=COMMENT_PD_COUNT_DISCUSSIONS)
comment_was_flagged.disconnect(
sender=comment_model,
dispatch_uid=COMMENT_WF_COUNT_DISCUSSIONS)
comment_was_posted.disconnect(
sender=comment_model,
dispatch_uid=COMMENT_WP_COUNT_COMMENTS)
pingback_was_posted.disconnect(
sender=comment_model,
dispatch_uid=PINGBACK_WF_COUNT_PINGBACKS)
trackback_was_posted.disconnect(
sender=comment_model,
dispatch_uid=TRACKBACK_WF_COUNT_TRACKBACKS)
|
import pytest
from homeassistant import data_entry_flow
from homeassistant.components import zone
from homeassistant.components.geofency import CONF_MOBILE_BEACONS, DOMAIN
from homeassistant.config import async_process_ha_core_config
from homeassistant.const import (
ATTR_LATITUDE,
ATTR_LONGITUDE,
HTTP_OK,
HTTP_UNPROCESSABLE_ENTITY,
STATE_HOME,
STATE_NOT_HOME,
)
from homeassistant.setup import async_setup_component
from homeassistant.util import slugify
# pylint: disable=redefined-outer-name
from tests.async_mock import patch
HOME_LATITUDE = 37.239622
HOME_LONGITUDE = -115.815811
NOT_HOME_LATITUDE = 37.239394
NOT_HOME_LONGITUDE = -115.763283
GPS_ENTER_HOME = {
"latitude": HOME_LATITUDE,
"longitude": HOME_LONGITUDE,
"device": "4A7FE356-2E9D-4264-A43F-BF80ECAEE416",
"name": "Home",
"radius": 100,
"id": "BAAD384B-A4AE-4983-F5F5-4C2F28E68205",
"date": "2017-08-19T10:53:53Z",
"address": "Testing Trail 1",
"entry": "1",
}
GPS_EXIT_HOME = {
"latitude": HOME_LATITUDE,
"longitude": HOME_LONGITUDE,
"device": "4A7FE356-2E9D-4264-A43F-BF80ECAEE416",
"name": "Home",
"radius": 100,
"id": "BAAD384B-A4AE-4983-F5F5-4C2F28E68205",
"date": "2017-08-19T10:53:53Z",
"address": "Testing Trail 1",
"entry": "0",
}
BEACON_ENTER_HOME = {
"latitude": HOME_LATITUDE,
"longitude": HOME_LONGITUDE,
"beaconUUID": "FFEF0E83-09B2-47C8-9837-E7B563F5F556",
"minor": "36138",
"major": "8629",
"device": "4A7FE356-2E9D-4264-A43F-BF80ECAEE416",
"name": "Home",
"radius": 100,
"id": "BAAD384B-A4AE-4983-F5F5-4C2F28E68205",
"date": "2017-08-19T10:53:53Z",
"address": "Testing Trail 1",
"entry": "1",
}
BEACON_EXIT_HOME = {
"latitude": HOME_LATITUDE,
"longitude": HOME_LONGITUDE,
"beaconUUID": "FFEF0E83-09B2-47C8-9837-E7B563F5F556",
"minor": "36138",
"major": "8629",
"device": "4A7FE356-2E9D-4264-A43F-BF80ECAEE416",
"name": "Home",
"radius": 100,
"id": "BAAD384B-A4AE-4983-F5F5-4C2F28E68205",
"date": "2017-08-19T10:53:53Z",
"address": "Testing Trail 1",
"entry": "0",
}
BEACON_ENTER_CAR = {
"latitude": NOT_HOME_LATITUDE,
"longitude": NOT_HOME_LONGITUDE,
"beaconUUID": "FFEF0E83-09B2-47C8-9837-E7B563F5F556",
"minor": "36138",
"major": "8629",
"device": "4A7FE356-2E9D-4264-A43F-BF80ECAEE416",
"name": "Car 1",
"radius": 100,
"id": "BAAD384B-A4AE-4983-F5F5-4C2F28E68205",
"date": "2017-08-19T10:53:53Z",
"address": "Testing Trail 1",
"entry": "1",
}
BEACON_EXIT_CAR = {
"latitude": NOT_HOME_LATITUDE,
"longitude": NOT_HOME_LONGITUDE,
"beaconUUID": "FFEF0E83-09B2-47C8-9837-E7B563F5F556",
"minor": "36138",
"major": "8629",
"device": "4A7FE356-2E9D-4264-A43F-BF80ECAEE416",
"name": "Car 1",
"radius": 100,
"id": "BAAD384B-A4AE-4983-F5F5-4C2F28E68205",
"date": "2017-08-19T10:53:53Z",
"address": "Testing Trail 1",
"entry": "0",
}
@pytest.fixture(autouse=True)
def mock_dev_track(mock_device_tracker_conf):
"""Mock device tracker config loading."""
pass
@pytest.fixture
async def geofency_client(loop, hass, aiohttp_client):
"""Geofency mock client (unauthenticated)."""
assert await async_setup_component(hass, "persistent_notification", {})
assert await async_setup_component(
hass, DOMAIN, {DOMAIN: {CONF_MOBILE_BEACONS: ["Car 1"]}}
)
await hass.async_block_till_done()
with patch("homeassistant.components.device_tracker.legacy.update_config"):
return await aiohttp_client(hass.http.app)
@pytest.fixture(autouse=True)
async def setup_zones(loop, hass):
"""Set up Zone config in HA."""
assert await async_setup_component(
hass,
zone.DOMAIN,
{
"zone": {
"name": "Home",
"latitude": HOME_LATITUDE,
"longitude": HOME_LONGITUDE,
"radius": 100,
}
},
)
await hass.async_block_till_done()
@pytest.fixture
async def webhook_id(hass, geofency_client):
"""Initialize the Geofency component and get the webhook_id."""
await async_process_ha_core_config(
hass,
{"internal_url": "http://example.local:8123"},
)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM, result
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
await hass.async_block_till_done()
return result["result"].data["webhook_id"]
async def test_data_validation(geofency_client, webhook_id):
"""Test data validation."""
url = f"/api/webhook/{webhook_id}"
# No data
req = await geofency_client.post(url)
assert req.status == HTTP_UNPROCESSABLE_ENTITY
missing_attributes = ["address", "device", "entry", "latitude", "longitude", "name"]
# missing attributes
for attribute in missing_attributes:
copy = GPS_ENTER_HOME.copy()
del copy[attribute]
req = await geofency_client.post(url, data=copy)
assert req.status == HTTP_UNPROCESSABLE_ENTITY
async def test_gps_enter_and_exit_home(hass, geofency_client, webhook_id):
"""Test GPS based zone enter and exit."""
url = f"/api/webhook/{webhook_id}"
# Enter the Home zone
req = await geofency_client.post(url, data=GPS_ENTER_HOME)
await hass.async_block_till_done()
assert req.status == HTTP_OK
device_name = slugify(GPS_ENTER_HOME["device"])
state_name = hass.states.get(f"device_tracker.{device_name}").state
assert STATE_HOME == state_name
# Exit the Home zone
req = await geofency_client.post(url, data=GPS_EXIT_HOME)
await hass.async_block_till_done()
assert req.status == HTTP_OK
device_name = slugify(GPS_EXIT_HOME["device"])
state_name = hass.states.get(f"device_tracker.{device_name}").state
assert STATE_NOT_HOME == state_name
# Exit the Home zone with "Send Current Position" enabled
data = GPS_EXIT_HOME.copy()
data["currentLatitude"] = NOT_HOME_LATITUDE
data["currentLongitude"] = NOT_HOME_LONGITUDE
req = await geofency_client.post(url, data=data)
await hass.async_block_till_done()
assert req.status == HTTP_OK
device_name = slugify(GPS_EXIT_HOME["device"])
current_latitude = hass.states.get(f"device_tracker.{device_name}").attributes[
"latitude"
]
assert NOT_HOME_LATITUDE == current_latitude
current_longitude = hass.states.get(f"device_tracker.{device_name}").attributes[
"longitude"
]
assert NOT_HOME_LONGITUDE == current_longitude
dev_reg = await hass.helpers.device_registry.async_get_registry()
assert len(dev_reg.devices) == 1
ent_reg = await hass.helpers.entity_registry.async_get_registry()
assert len(ent_reg.entities) == 1
async def test_beacon_enter_and_exit_home(hass, geofency_client, webhook_id):
"""Test iBeacon based zone enter and exit - a.k.a stationary iBeacon."""
url = f"/api/webhook/{webhook_id}"
# Enter the Home zone
req = await geofency_client.post(url, data=BEACON_ENTER_HOME)
await hass.async_block_till_done()
assert req.status == HTTP_OK
device_name = slugify(f"beacon_{BEACON_ENTER_HOME['name']}")
state_name = hass.states.get(f"device_tracker.{device_name}").state
assert STATE_HOME == state_name
# Exit the Home zone
req = await geofency_client.post(url, data=BEACON_EXIT_HOME)
await hass.async_block_till_done()
assert req.status == HTTP_OK
device_name = slugify(f"beacon_{BEACON_ENTER_HOME['name']}")
state_name = hass.states.get(f"device_tracker.{device_name}").state
assert STATE_NOT_HOME == state_name
async def test_beacon_enter_and_exit_car(hass, geofency_client, webhook_id):
"""Test use of mobile iBeacon."""
url = f"/api/webhook/{webhook_id}"
# Enter the Car away from Home zone
req = await geofency_client.post(url, data=BEACON_ENTER_CAR)
await hass.async_block_till_done()
assert req.status == HTTP_OK
device_name = slugify(f"beacon_{BEACON_ENTER_CAR['name']}")
state_name = hass.states.get(f"device_tracker.{device_name}").state
assert STATE_NOT_HOME == state_name
# Exit the Car away from Home zone
req = await geofency_client.post(url, data=BEACON_EXIT_CAR)
await hass.async_block_till_done()
assert req.status == HTTP_OK
device_name = slugify(f"beacon_{BEACON_ENTER_CAR['name']}")
state_name = hass.states.get(f"device_tracker.{device_name}").state
assert STATE_NOT_HOME == state_name
# Enter the Car in the Home zone
data = BEACON_ENTER_CAR.copy()
data["latitude"] = HOME_LATITUDE
data["longitude"] = HOME_LONGITUDE
req = await geofency_client.post(url, data=data)
await hass.async_block_till_done()
assert req.status == HTTP_OK
device_name = slugify(f"beacon_{data['name']}")
state_name = hass.states.get(f"device_tracker.{device_name}").state
assert STATE_HOME == state_name
# Exit the Car in the Home zone
req = await geofency_client.post(url, data=data)
await hass.async_block_till_done()
assert req.status == HTTP_OK
device_name = slugify(f"beacon_{data['name']}")
state_name = hass.states.get(f"device_tracker.{device_name}").state
assert STATE_HOME == state_name
async def test_load_unload_entry(hass, geofency_client, webhook_id):
"""Test that the appropriate dispatch signals are added and removed."""
url = f"/api/webhook/{webhook_id}"
# Enter the Home zone
req = await geofency_client.post(url, data=GPS_ENTER_HOME)
await hass.async_block_till_done()
assert req.status == HTTP_OK
device_name = slugify(GPS_ENTER_HOME["device"])
state_1 = hass.states.get(f"device_tracker.{device_name}")
assert STATE_HOME == state_1.state
assert len(hass.data[DOMAIN]["devices"]) == 1
entry = hass.config_entries.async_entries(DOMAIN)[0]
assert await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
assert len(hass.data[DOMAIN]["devices"]) == 0
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
state_2 = hass.states.get(f"device_tracker.{device_name}")
assert state_2 is not None
assert state_1 is not state_2
assert STATE_HOME == state_2.state
assert state_2.attributes[ATTR_LATITUDE] == HOME_LATITUDE
assert state_2.attributes[ATTR_LONGITUDE] == HOME_LONGITUDE
|
import json
import os
import re
import threading
import unittest
from absl import flags
import mock
import numpy
from perfkitbenchmarker import benchmark_spec
from perfkitbenchmarker.linux_benchmarks import bidirectional_network_benchmark
from tests import pkb_common_test_case
FLAGS = flags.FLAGS
MBPS = 'Mbits/sec'
TOLERANCE = 0.000001
class BidirectionalNetworkBenchmarkTestCase(
pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(BidirectionalNetworkBenchmarkTestCase, self).setUp()
# Load netperf stdout data
path = os.path.join(os.path.dirname(__file__),
'..', 'data',
'bidirectional_network_results.json')
with open(path) as fp:
remote_stdouts = json.load(fp)
self.expected_stdout = [json.dumps(stdout)
for stdout in remote_stdouts]
def _AssertSamples(self, results, delta, values):
""""Asserts that the samples matching delta correspond to values."""
delta_metric = [r for r in results if r[1] == delta]
self.assertEqual(1, len(delta_metric), delta)
# The bidirectional_network_tests run in parallel and are grouped by
# <test number>_<test name>. This can vary due to the parallel nature of
# the tests, so we grab all samples with the header that matches the header
# of the delta sample.
delta_header = delta_metric[0][0][:len('0_TCP_STREAM_')]
netperf_run = [r for r in results
if r[0][:len(delta_header)] == delta_header]
header = delta_header + 'Throughput_'
# assert we have 8 values because percentiles are manually picked
self.assertEqual(8, len(values))
values.sort()
self._AssertSample(netperf_run, header + 'max', values[7], MBPS)
self._AssertSample(netperf_run, header + 'min', values[0], MBPS)
self._AssertSample(netperf_run, header + 'stddev',
numpy.std(values, ddof=1), MBPS)
self._AssertSample(netperf_run, header + 'p90', values[7], MBPS)
self._AssertSample(netperf_run, header + 'average', numpy.mean(values),
MBPS)
self._AssertSample(netperf_run, header + 'p50', values[4], MBPS)
self._AssertSample(netperf_run, header + 'total', sum(values), MBPS)
self._AssertSample(netperf_run, header + 'p99', values[7], MBPS)
def _AssertSample(self, results, metric, value, unit):
"""Asserts results contains a sample matching metric/value/unit."""
match = [r for r in results if (r.metric == metric and r.unit == unit and
abs(r.value - value) < TOLERANCE)]
self.assertEqual(1, len(match))
def testRun(self):
FLAGS.bidirectional_network_tests = [
'TCP_STREAM', 'TCP_MAERTS', 'TCP_MAERTS', 'TCP_MAERTS'
]
FLAGS.bidirectional_network_test_length = 60
FLAGS.bidirectional_stream_num_streams = 8
# Helper for GetNetperfStdOut whichs holds the last returned index for the
# given test. Used to ensure the mocked stdout is returned matching the
# requested netperf test and each is returned exactly once.
last_returned = {
'TCP_STREAM': -1,
'TCP_MAERTS': -1
}
stdout_lock = threading.Lock()
def GetNetperfStdOut(remote_cmd, timeout):
"""Mock returning Netperf stdout."""
del timeout # unused by mock
with stdout_lock:
match = re.search('-t (.*?) ', remote_cmd)
netperf_test = match.group(1)
i = last_returned[netperf_test]
while True:
i += 1
if FLAGS.bidirectional_network_tests[i] == netperf_test:
last_returned[netperf_test] = i
return (self.expected_stdout[i], '')
vm_spec = mock.MagicMock(spec=benchmark_spec.BenchmarkSpec)
vm_spec.vms = [mock.MagicMock(), mock.MagicMock(), mock.MagicMock(),
mock.MagicMock(), mock.MagicMock()]
vm_spec.vms[0].RemoteCommand.side_effect = GetNetperfStdOut
results = bidirectional_network_benchmark.Run(vm_spec)
# deltas calculated by
# cat bidirectional_network_results.json | jq '.[] | .[4] - .[3]'
# samples extracted with
# egrep -o "[0-9]*\.[0-9]*,10\^6bits/s" bidirectional_network_results.json
# TCP STREAM from bidirectional_network_results.json
delta0 = 0.009217977523803711
samples0 = [
1114.84,
2866.35,
4488.67,
1626.20,
675.19,
1223.60,
944.58,
2987.61,
]
# TCP MEARTS 0 from bidirectional_network_results.json
delta1 = 0.010135173797607422
samples1 = [
436.87,
433.89,
487.15,
1030.73,
1501.02,
415.35,
524.82,
587.19,
]
# TCP MEARTS 1 from bidirectional_network_results.json
delta2 = 0.009433984756469727
samples2 = [
89.63,
540.79,
1124.56,
672.74,
578.30,
561.74,
658.57,
525.62,
]
# TCP MEARTS 2 from bidirectional_network_results.json
delta3 = 0.010863065719604492
samples3 = [
608.63,
521.83,
382.78,
513.72,
607.00,
235.67,
653.73,
550.04,
]
# per test metrics
self._AssertSamples(results, delta0, samples0)
self._AssertSamples(results, delta1, samples1)
self._AssertSamples(results, delta2, samples2)
self._AssertSamples(results, delta3, samples3)
# summary metrics
self._AssertSample(results, 'outbound_network_total', sum(samples0), MBPS)
self._AssertSample(results, 'inbound_network_total',
sum(samples1 + samples2 + samples3), MBPS)
self._AssertSample(results, 'all_streams_start_delta',
1508187617.100678 - 1508187614.93243, 'seconds')
metrics_per_test = 9 # 8 throughput samples, 1 delta
num_tests = 4
summary_metrics = 3
total_metrics = num_tests * metrics_per_test + summary_metrics
self.assertEqual(total_metrics, len(results))
if __name__ == '__main__':
unittest.main()
|
import json
from django.urls import reverse
from weblate.trans.tests.test_views import FixtureTestCase
class ExportsViewTest(FixtureTestCase):
def test_view_rss(self):
response = self.client.get(reverse("rss"))
self.assertContains(response, "Test/Test")
def test_view_rss_project(self):
response = self.client.get(reverse("rss-project", kwargs=self.kw_project))
self.assertContains(response, "Test/Test")
def test_view_rss_component(self):
response = self.client.get(reverse("rss-component", kwargs=self.kw_component))
self.assertContains(response, "Test/Test")
def test_view_rss_translation(self):
response = self.client.get(
reverse("rss-translation", kwargs=self.kw_translation)
)
self.assertContains(response, "Test/Test")
def test_export_stats(self):
response = self.client.get(reverse("export_stats", kwargs=self.kw_component))
parsed = json.loads(response.content.decode())
self.assertEqual(parsed[0]["name"], "Czech")
def test_export_stats_csv(self):
response = self.client.get(
reverse("export_stats", kwargs=self.kw_component), {"format": "csv"}
)
self.assertContains(response, "name,code")
def test_export_project_stats(self):
response = self.client.get(reverse("export_stats", kwargs=self.kw_project))
parsed = json.loads(response.content.decode())
self.assertIn("Czech", [i["language"] for i in parsed])
def test_export_project_stats_csv(self):
response = self.client.get(
reverse("export_stats", kwargs=self.kw_project), {"format": "csv"}
)
self.assertContains(response, "language,code")
def test_data(self):
response = self.client.get(reverse("data_project", kwargs=self.kw_project))
self.assertContains(response, "Test")
|
import logging
from pyrail import iRail
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_LATITUDE,
ATTR_LONGITUDE,
CONF_NAME,
CONF_SHOW_ON_MAP,
TIME_MINUTES,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "NMBS"
DEFAULT_ICON = "mdi:train"
DEFAULT_ICON_ALERT = "mdi:alert-octagon"
CONF_STATION_FROM = "station_from"
CONF_STATION_TO = "station_to"
CONF_STATION_LIVE = "station_live"
CONF_EXCLUDE_VIAS = "exclude_vias"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_STATION_FROM): cv.string,
vol.Required(CONF_STATION_TO): cv.string,
vol.Optional(CONF_STATION_LIVE): cv.string,
vol.Optional(CONF_EXCLUDE_VIAS, default=False): cv.boolean,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_SHOW_ON_MAP, default=False): cv.boolean,
}
)
def get_time_until(departure_time=None):
"""Calculate the time between now and a train's departure time."""
if departure_time is None:
return 0
delta = dt_util.utc_from_timestamp(int(departure_time)) - dt_util.now()
return round(delta.total_seconds() / 60)
def get_delay_in_minutes(delay=0):
"""Get the delay in minutes from a delay in seconds."""
return round(int(delay) / 60)
def get_ride_duration(departure_time, arrival_time, delay=0):
"""Calculate the total travel time in minutes."""
duration = dt_util.utc_from_timestamp(
int(arrival_time)
) - dt_util.utc_from_timestamp(int(departure_time))
duration_time = int(round(duration.total_seconds() / 60))
return duration_time + get_delay_in_minutes(delay)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the NMBS sensor with iRail API."""
api_client = iRail()
name = config[CONF_NAME]
show_on_map = config[CONF_SHOW_ON_MAP]
station_from = config[CONF_STATION_FROM]
station_to = config[CONF_STATION_TO]
station_live = config.get(CONF_STATION_LIVE)
excl_vias = config[CONF_EXCLUDE_VIAS]
sensors = [
NMBSSensor(api_client, name, show_on_map, station_from, station_to, excl_vias)
]
if station_live is not None:
sensors.append(
NMBSLiveBoard(api_client, station_live, station_from, station_to)
)
add_entities(sensors, True)
class NMBSLiveBoard(Entity):
"""Get the next train from a station's liveboard."""
def __init__(self, api_client, live_station, station_from, station_to):
"""Initialize the sensor for getting liveboard data."""
self._station = live_station
self._api_client = api_client
self._station_from = station_from
self._station_to = station_to
self._attrs = {}
self._state = None
@property
def name(self):
"""Return the sensor default name."""
return f"NMBS Live ({self._station})"
@property
def unique_id(self):
"""Return a unique ID."""
unique_id = f"{self._station}_{self._station_from}_{self._station_to}"
return f"nmbs_live_{unique_id}"
@property
def icon(self):
"""Return the default icon or an alert icon if delays."""
if self._attrs and int(self._attrs["delay"]) > 0:
return DEFAULT_ICON_ALERT
return DEFAULT_ICON
@property
def state(self):
"""Return sensor state."""
return self._state
@property
def device_state_attributes(self):
"""Return the sensor attributes if data is available."""
if self._state is None or not self._attrs:
return None
delay = get_delay_in_minutes(self._attrs["delay"])
departure = get_time_until(self._attrs["time"])
attrs = {
"departure": f"In {departure} minutes",
"departure_minutes": departure,
"extra_train": int(self._attrs["isExtra"]) > 0,
"vehicle_id": self._attrs["vehicle"],
"monitored_station": self._station,
ATTR_ATTRIBUTION: "https://api.irail.be/",
}
if delay > 0:
attrs["delay"] = f"{delay} minutes"
attrs["delay_minutes"] = delay
return attrs
def update(self):
"""Set the state equal to the next departure."""
liveboard = self._api_client.get_liveboard(self._station)
if liveboard is None or not liveboard["departures"]:
return
next_departure = liveboard["departures"]["departure"][0]
self._attrs = next_departure
self._state = (
f"Track {next_departure['platform']} - {next_departure['station']}"
)
class NMBSSensor(Entity):
"""Get the the total travel time for a given connection."""
def __init__(
self, api_client, name, show_on_map, station_from, station_to, excl_vias
):
"""Initialize the NMBS connection sensor."""
self._name = name
self._show_on_map = show_on_map
self._api_client = api_client
self._station_from = station_from
self._station_to = station_to
self._excl_vias = excl_vias
self._attrs = {}
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return TIME_MINUTES
@property
def icon(self):
"""Return the sensor default icon or an alert icon if any delay."""
if self._attrs:
delay = get_delay_in_minutes(self._attrs["departure"]["delay"])
if delay > 0:
return "mdi:alert-octagon"
return "mdi:train"
@property
def device_state_attributes(self):
"""Return sensor attributes if data is available."""
if self._state is None or not self._attrs:
return None
delay = get_delay_in_minutes(self._attrs["departure"]["delay"])
departure = get_time_until(self._attrs["departure"]["time"])
attrs = {
"departure": f"In {departure} minutes",
"departure_minutes": departure,
"destination": self._station_to,
"direction": self._attrs["departure"]["direction"]["name"],
"platform_arriving": self._attrs["arrival"]["platform"],
"platform_departing": self._attrs["departure"]["platform"],
"vehicle_id": self._attrs["departure"]["vehicle"],
ATTR_ATTRIBUTION: "https://api.irail.be/",
}
if self._show_on_map and self.station_coordinates:
attrs[ATTR_LATITUDE] = self.station_coordinates[0]
attrs[ATTR_LONGITUDE] = self.station_coordinates[1]
if self.is_via_connection and not self._excl_vias:
via = self._attrs["vias"]["via"][0]
attrs["via"] = via["station"]
attrs["via_arrival_platform"] = via["arrival"]["platform"]
attrs["via_transfer_platform"] = via["departure"]["platform"]
attrs["via_transfer_time"] = get_delay_in_minutes(
via["timeBetween"]
) + get_delay_in_minutes(via["departure"]["delay"])
if delay > 0:
attrs["delay"] = f"{delay} minutes"
attrs["delay_minutes"] = delay
return attrs
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def station_coordinates(self):
"""Get the lat, long coordinates for station."""
if self._state is None or not self._attrs:
return []
latitude = float(self._attrs["departure"]["stationinfo"]["locationY"])
longitude = float(self._attrs["departure"]["stationinfo"]["locationX"])
return [latitude, longitude]
@property
def is_via_connection(self):
"""Return whether the connection goes through another station."""
if not self._attrs:
return False
return "vias" in self._attrs and int(self._attrs["vias"]["number"]) > 0
def update(self):
"""Set the state to the duration of a connection."""
connections = self._api_client.get_connections(
self._station_from, self._station_to
)
if connections is None or not connections["connection"]:
return
if int(connections["connection"][0]["departure"]["left"]) > 0:
next_connection = connections["connection"][1]
else:
next_connection = connections["connection"][0]
self._attrs = next_connection
if self._excl_vias and self.is_via_connection:
_LOGGER.debug(
"Skipping update of NMBSSensor \
because this connection is a via"
)
return
duration = get_ride_duration(
next_connection["departure"]["time"],
next_connection["arrival"]["time"],
next_connection["departure"]["delay"],
)
self._state = duration
|
import logging
import voluptuous as vol
from homeassistant import config_entries, core, exceptions
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME
from homeassistant.helpers import config_validation as cv
from .const import DOMAIN # pylint: disable=unused-import
from .data import MetOfficeData
_LOGGER = logging.getLogger(__name__)
async def validate_input(hass: core.HomeAssistant, data):
"""Validate that the user input allows us to connect to DataPoint.
Data has the keys from DATA_SCHEMA with values provided by the user.
"""
latitude = data[CONF_LATITUDE]
longitude = data[CONF_LONGITUDE]
api_key = data[CONF_API_KEY]
metoffice_data = MetOfficeData(hass, api_key, latitude, longitude)
await metoffice_data.async_update_site()
if metoffice_data.site_name is None:
raise CannotConnect()
return {"site_name": metoffice_data.site_name}
class MetOfficeConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Met Office weather integration."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
async def async_step_user(self, user_input=None):
"""Handle the initial step."""
errors = {}
if user_input is not None:
await self.async_set_unique_id(
f"{user_input[CONF_LATITUDE]}_{user_input[CONF_LONGITUDE]}"
)
self._abort_if_unique_id_configured()
try:
info = await validate_input(self.hass, user_input)
except CannotConnect:
errors["base"] = "cannot_connect"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
user_input[CONF_NAME] = info["site_name"]
return self.async_create_entry(
title=user_input[CONF_NAME], data=user_input
)
data_schema = vol.Schema(
{
vol.Required(CONF_API_KEY): str,
vol.Required(
CONF_LATITUDE, default=self.hass.config.latitude
): cv.latitude,
vol.Required(
CONF_LONGITUDE, default=self.hass.config.longitude
): cv.longitude,
},
)
return self.async_show_form(
step_id="user", data_schema=data_schema, errors=errors
)
class CannotConnect(exceptions.HomeAssistantError):
"""Error to indicate we cannot connect."""
|
from openrazer_daemon.dbus_services import endpoint
@endpoint('razer.device.macro', 'getMacros', out_sig='s')
def get_macros(self):
"""
Get macros
:return: JSON of macros
:rtype: str
"""
self.logger.debug("DBus call get_macros")
return self.key_manager.dbus_get_macros()
@endpoint('razer.device.macro', 'deleteMacro', in_sig='s')
def delete_macro(self, macro_key):
"""
Delete macro from key
:param macro_key: Macro key to delete bound macro from
:type macro_key: str
"""
self.logger.debug("DBus call delete_macro")
self.key_manager.dbus_delete_macro(macro_key)
@endpoint('razer.device.macro', 'addMacro', in_sig='ss')
def add_macro(self, macro_bind_key, macro_json):
"""
Add macro to key
The macro_json should be JSON form of a list of dictionaries
:param macro_bind_key: Macro key to delete bound macro from
:type macro_bind_key: str
:param macro_json: JSON list
:type macro_json: str
"""
self.logger.debug("DBus call add_macro")
self.key_manager.dbus_add_macro(macro_bind_key, macro_json)
|
import os
import os.path
import sys
import time
import xml.dom.minidom
from coverage import env
from coverage import __url__, __version__, files
from coverage.backward import iitems
from coverage.misc import isolate_module
from coverage.report import get_analysis_to_report
os = isolate_module(os)
DTD_URL = 'https://raw.githubusercontent.com/cobertura/web/master/htdocs/xml/coverage-04.dtd'
def rate(hit, num):
"""Return the fraction of `hit`/`num`, as a string."""
if num == 0:
return "1"
else:
return "%.4g" % (float(hit) / num)
class XmlReporter(object):
"""A reporter for writing Cobertura-style XML coverage results."""
def __init__(self, coverage):
self.coverage = coverage
self.config = self.coverage.config
self.source_paths = set()
if self.config.source:
for src in self.config.source:
if os.path.exists(src):
if not self.config.relative_files:
src = files.canonical_filename(src)
self.source_paths.add(src)
self.packages = {}
self.xml_out = None
def report(self, morfs, outfile=None):
"""Generate a Cobertura-compatible XML report for `morfs`.
`morfs` is a list of modules or file names.
`outfile` is a file object to write the XML to.
"""
# Initial setup.
outfile = outfile or sys.stdout
has_arcs = self.coverage.get_data().has_arcs()
# Create the DOM that will store the data.
impl = xml.dom.minidom.getDOMImplementation()
self.xml_out = impl.createDocument(None, "coverage", None)
# Write header stuff.
xcoverage = self.xml_out.documentElement
xcoverage.setAttribute("version", __version__)
xcoverage.setAttribute("timestamp", str(int(time.time()*1000)))
xcoverage.appendChild(self.xml_out.createComment(
" Generated by coverage.py: %s " % __url__
))
xcoverage.appendChild(self.xml_out.createComment(" Based on %s " % DTD_URL))
# Call xml_file for each file in the data.
for fr, analysis in get_analysis_to_report(self.coverage, morfs):
self.xml_file(fr, analysis, has_arcs)
xsources = self.xml_out.createElement("sources")
xcoverage.appendChild(xsources)
# Populate the XML DOM with the source info.
for path in sorted(self.source_paths):
xsource = self.xml_out.createElement("source")
xsources.appendChild(xsource)
txt = self.xml_out.createTextNode(path)
xsource.appendChild(txt)
lnum_tot, lhits_tot = 0, 0
bnum_tot, bhits_tot = 0, 0
xpackages = self.xml_out.createElement("packages")
xcoverage.appendChild(xpackages)
# Populate the XML DOM with the package info.
for pkg_name, pkg_data in sorted(iitems(self.packages)):
class_elts, lhits, lnum, bhits, bnum = pkg_data
xpackage = self.xml_out.createElement("package")
xpackages.appendChild(xpackage)
xclasses = self.xml_out.createElement("classes")
xpackage.appendChild(xclasses)
for _, class_elt in sorted(iitems(class_elts)):
xclasses.appendChild(class_elt)
xpackage.setAttribute("name", pkg_name.replace(os.sep, '.'))
xpackage.setAttribute("line-rate", rate(lhits, lnum))
if has_arcs:
branch_rate = rate(bhits, bnum)
else:
branch_rate = "0"
xpackage.setAttribute("branch-rate", branch_rate)
xpackage.setAttribute("complexity", "0")
lnum_tot += lnum
lhits_tot += lhits
bnum_tot += bnum
bhits_tot += bhits
xcoverage.setAttribute("lines-valid", str(lnum_tot))
xcoverage.setAttribute("lines-covered", str(lhits_tot))
xcoverage.setAttribute("line-rate", rate(lhits_tot, lnum_tot))
if has_arcs:
xcoverage.setAttribute("branches-valid", str(bnum_tot))
xcoverage.setAttribute("branches-covered", str(bhits_tot))
xcoverage.setAttribute("branch-rate", rate(bhits_tot, bnum_tot))
else:
xcoverage.setAttribute("branches-covered", "0")
xcoverage.setAttribute("branches-valid", "0")
xcoverage.setAttribute("branch-rate", "0")
xcoverage.setAttribute("complexity", "0")
# Write the output file.
outfile.write(serialize_xml(self.xml_out))
# Return the total percentage.
denom = lnum_tot + bnum_tot
if denom == 0:
pct = 0.0
else:
pct = 100.0 * (lhits_tot + bhits_tot) / denom
return pct
def xml_file(self, fr, analysis, has_arcs):
"""Add to the XML report for a single file."""
if self.config.skip_empty:
if analysis.numbers.n_statements == 0:
return
# Create the 'lines' and 'package' XML elements, which
# are populated later. Note that a package == a directory.
filename = fr.filename.replace("\\", "/")
for source_path in self.source_paths:
source_path = files.canonical_filename(source_path)
if filename.startswith(source_path.replace("\\", "/") + "/"):
rel_name = filename[len(source_path)+1:]
break
else:
rel_name = fr.relative_filename()
self.source_paths.add(fr.filename[:-len(rel_name)].rstrip(r"\/"))
dirname = os.path.dirname(rel_name) or u"."
dirname = "/".join(dirname.split("/")[:self.config.xml_package_depth])
package_name = dirname.replace("/", ".")
package = self.packages.setdefault(package_name, [{}, 0, 0, 0, 0])
xclass = self.xml_out.createElement("class")
xclass.appendChild(self.xml_out.createElement("methods"))
xlines = self.xml_out.createElement("lines")
xclass.appendChild(xlines)
xclass.setAttribute("name", os.path.relpath(rel_name, dirname))
xclass.setAttribute("filename", rel_name.replace("\\", "/"))
xclass.setAttribute("complexity", "0")
branch_stats = analysis.branch_stats()
missing_branch_arcs = analysis.missing_branch_arcs()
# For each statement, create an XML 'line' element.
for line in sorted(analysis.statements):
xline = self.xml_out.createElement("line")
xline.setAttribute("number", str(line))
# Q: can we get info about the number of times a statement is
# executed? If so, that should be recorded here.
xline.setAttribute("hits", str(int(line not in analysis.missing)))
if has_arcs:
if line in branch_stats:
total, taken = branch_stats[line]
xline.setAttribute("branch", "true")
xline.setAttribute(
"condition-coverage",
"%d%% (%d/%d)" % (100*taken//total, taken, total)
)
if line in missing_branch_arcs:
annlines = ["exit" if b < 0 else str(b) for b in missing_branch_arcs[line]]
xline.setAttribute("missing-branches", ",".join(annlines))
xlines.appendChild(xline)
class_lines = len(analysis.statements)
class_hits = class_lines - len(analysis.missing)
if has_arcs:
class_branches = sum(t for t, k in branch_stats.values())
missing_branches = sum(t - k for t, k in branch_stats.values())
class_br_hits = class_branches - missing_branches
else:
class_branches = 0.0
class_br_hits = 0.0
# Finalize the statistics that are collected in the XML DOM.
xclass.setAttribute("line-rate", rate(class_hits, class_lines))
if has_arcs:
branch_rate = rate(class_br_hits, class_branches)
else:
branch_rate = "0"
xclass.setAttribute("branch-rate", branch_rate)
package[0][rel_name] = xclass
package[1] += class_hits
package[2] += class_lines
package[3] += class_br_hits
package[4] += class_branches
def serialize_xml(dom):
"""Serialize a minidom node to XML."""
out = dom.toprettyxml()
if env.PY2:
out = out.encode("utf8")
return out
|
import logging
from gitterpy.client import GitterClient
from gitterpy.errors import GitterRoomError, GitterTokenError
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_API_KEY, CONF_NAME, CONF_ROOM
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
ATTR_MENTION = "mention"
ATTR_ROOM = "room"
ATTR_USERNAME = "username"
DEFAULT_NAME = "Gitter messages"
DEFAULT_ROOM = "home-assistant/home-assistant"
ICON = "mdi:message-cog"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_ROOM, default=DEFAULT_ROOM): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Gitter sensor."""
name = config.get(CONF_NAME)
api_key = config.get(CONF_API_KEY)
room = config.get(CONF_ROOM)
gitter = GitterClient(api_key)
try:
username = gitter.auth.get_my_id["name"]
except GitterTokenError:
_LOGGER.error("Token is not valid")
return
add_entities([GitterSensor(gitter, room, name, username)], True)
class GitterSensor(Entity):
"""Representation of a Gitter sensor."""
def __init__(self, data, room, name, username):
"""Initialize the sensor."""
self._name = name
self._data = data
self._room = room
self._username = username
self._state = None
self._mention = 0
self._unit_of_measurement = "Msg"
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {
ATTR_USERNAME: self._username,
ATTR_ROOM: self._room,
ATTR_MENTION: self._mention,
}
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return ICON
def update(self):
"""Get the latest data and updates the state."""
try:
data = self._data.user.unread_items(self._room)
except GitterRoomError as error:
_LOGGER.error(error)
return
if "error" not in data:
self._mention = len(data["mention"])
self._state = len(data["chat"])
else:
_LOGGER.error("Not joined: %s", self._room)
|
import unittest
from mock import MagicMock, patch
from uiautomator import JsonRPCMethod, JsonRPCClient
import os
class TestJsonRPCMethod_id(unittest.TestCase):
def test_id(self):
method = JsonRPCMethod("", "method", 30)
self.assertTrue(isinstance(method.id(), str))
self.assertTrue(len(method.id()) > 0)
for i in range(100):
self.assertNotEqual(method.id(), method.id())
class TestJsonRPCMethod_call(unittest.TestCase):
def setUp(self):
self.url = "http://localhost/jsonrpc"
self.timeout = 20
self.method_name = "ping"
self.id = "fGasV62G"
self.method = JsonRPCMethod(self.url, self.method_name, self.timeout)
self.method.id = MagicMock()
self.method.id.return_value = self.id
try:
import urllib2
self.urlopen_patch = patch('urllib2.urlopen')
except:
self.urlopen_patch = patch('urllib.request.urlopen')
finally:
self.urlopen = self.urlopen_patch.start()
def tearDown(self):
self.urlopen_patch.stop()
def test_normal_call(self):
return_mock = self.urlopen.return_value
return_mock.getcode.return_value = 200
return_mock.read.return_value = b'{"result": "pong", "error": null, "id": "DKNCJDLDJJ"}'
self.assertEqual("pong", self.method())
self.method.id.assert_called_once_with()
return_mock.read.return_value = b'{"result": "pong", "id": "JDLSFJLILJEMNC"}'
self.assertEqual("pong", self.method())
self.assertEqual("pong", self.method(1, 2, "str", {"a": 1}, ["1"]))
self.assertEqual("pong", self.method(a=1, b=2))
def test_normal_call_error(self):
return_mock = self.urlopen.return_value
return_mock.getcode.return_value = 500
with self.assertRaises(Exception):
self.method()
return_mock.getcode.return_value = 200
return_mock.read.return_value = b'{"result": "pong", "error": {"code": -513937, "message": "error message."}, "id": "fGasV62G"}'
with self.assertRaises(Exception):
self.method()
return_mock.read.assert_called_with()
return_mock.getcode.return_value = 200
return_mock.read.return_value = b'{"result": null, "error": null, "id": "fGasV62G"}'
with self.assertRaises(SyntaxError):
self.method(1, 2, kwarg1="")
class TestJsonRPCClient(unittest.TestCase):
def setUp(self):
self.url = "http://localhost/jsonrpc"
self.timeout = 20
def test_jsonrpc(self):
with patch('uiautomator.JsonRPCMethod') as JsonRPCMethod:
client = JsonRPCClient(self.url, self.timeout, JsonRPCMethod)
JsonRPCMethod.return_value = "Ok"
self.assertEqual(client.ping, "Ok")
JsonRPCMethod.assert_called_once_with(self.url, "ping", timeout=self.timeout)
JsonRPCMethod.return_value = {"width": 10, "height": 20}
self.assertEqual(client.info, {"width": 10, "height": 20})
JsonRPCMethod.assert_called_with(self.url, "info", timeout=self.timeout)
class TestJsonRPCMethod_call_on_windows(unittest.TestCase):
def setUp(self):
self.os_name = os.name
os.name = "nt"
self.url = "http://localhost/jsonrpc"
self.timeout = 20
self.method_name = "ping"
self.id = "fGasV62G"
self.method = JsonRPCMethod(self.url, self.method_name, self.timeout)
self.method.pool = MagicMock()
self.method.id = MagicMock()
self.method.id.return_value = self.id
def tearDown(self):
os.name = self.os_name
def test_normal_call(self):
urlopen = self.method.pool.urlopen
urlopen.return_value.status = 200
urlopen.return_value.data = b'{"result": "pong", "error": null, "id": "DKNCJDLDJJ"}'
self.assertEqual("pong", self.method())
self.method.id.assert_called_once_with()
urlopen.return_value.data = b'{"result": "pong", "id": "JDLSFJLILJEMNC"}'
self.assertEqual("pong", self.method())
self.assertEqual("pong", self.method(1, 2, "str", {"a": 1}, ["1"]))
self.assertEqual("pong", self.method(a=1, b=2))
def test_normal_call_error(self):
urlopen = self.method.pool.urlopen
urlopen.return_value.status = 500
with self.assertRaises(Exception):
self.method()
|
from pathlib import Path
from functools import partial
from . import (read_raw_edf, read_raw_bdf, read_raw_gdf, read_raw_brainvision,
read_raw_fif, read_raw_eeglab, read_raw_cnt, read_raw_egi,
read_raw_eximia, read_raw_nirx, read_raw_fieldtrip,
read_raw_artemis123, read_raw_nicolet, read_raw_kit,
read_raw_ctf)
from ..utils import fill_doc
def _read_unsupported(fname, **kwargs):
ext = "".join(Path(fname).suffixes)
msg = f"Unsupported file type ({ext})."
suggest = kwargs.get("suggest")
if suggest is not None:
msg += f" Try reading a {suggest} file instead."
msg += " Consider using a dedicated reader function for more options."
raise ValueError(msg)
# supported read file formats
supported = {".edf": read_raw_edf,
".bdf": read_raw_bdf,
".gdf": read_raw_gdf,
".vhdr": read_raw_brainvision,
".fif": read_raw_fif,
".fif.gz": read_raw_fif,
".set": read_raw_eeglab,
".cnt": read_raw_cnt,
".mff": read_raw_egi,
".nxe": read_raw_eximia,
".hdr": read_raw_nirx,
".mat": read_raw_fieldtrip,
".bin": read_raw_artemis123,
".data": read_raw_nicolet,
".sqd": read_raw_kit,
".ds": read_raw_ctf}
# known but unsupported file formats
suggested = {".vmrk": partial(_read_unsupported, suggest=".vhdr"),
".eeg": partial(_read_unsupported, suggest=".vhdr")}
# all known file formats
readers = {**supported, **suggested}
@fill_doc
def read_raw(fname, *, preload=False, verbose=None, **kwargs):
"""Read raw file.
Parameters
----------
fname : str
File name to load.
%(preload)s
%(verbose)s
**kwargs
Keyword arguments to pass to the underlying reader. For details, see
the arguments of the reader for the underlying file format.
Returns
-------
raw : mne.io.Raw
Raw object.
Notes
-----
This function is a wrapper for specific read_raw_xxx readers defined in the
readers dict. If it does not work with a specific file, try using a
dedicated reader function (read_raw_xxx) instead.
"""
ext = "".join(Path(fname).suffixes)
if ext in readers:
return readers[ext](fname, preload=preload, verbose=verbose, **kwargs)
else:
_read_unsupported(fname)
|
import voluptuous as vol
from homeassistant.const import PERCENTAGE
from homeassistant.helpers import config_validation as cv, entity_platform
from .const import ADVANTAGE_AIR_STATE_OPEN, DOMAIN as ADVANTAGE_AIR_DOMAIN
from .entity import AdvantageAirEntity
ADVANTAGE_AIR_SET_COUNTDOWN_VALUE = "minutes"
ADVANTAGE_AIR_SET_COUNTDOWN_UNIT = "min"
ADVANTAGE_AIR_SERVICE_SET_TIME_TO = "set_time_to"
PARALLEL_UPDATES = 0
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up AdvantageAir sensor platform."""
instance = hass.data[ADVANTAGE_AIR_DOMAIN][config_entry.entry_id]
entities = []
for ac_key, ac_device in instance["coordinator"].data["aircons"].items():
entities.append(AdvantageAirTimeTo(instance, ac_key, "On"))
entities.append(AdvantageAirTimeTo(instance, ac_key, "Off"))
for zone_key, zone in ac_device["zones"].items():
# Only show damper sensors when zone is in temperature control
if zone["type"] != 0:
entities.append(AdvantageAirZoneVent(instance, ac_key, zone_key))
# Only show wireless signal strength sensors when using wireless sensors
if zone["rssi"] > 0:
entities.append(AdvantageAirZoneSignal(instance, ac_key, zone_key))
async_add_entities(entities)
platform = entity_platform.current_platform.get()
platform.async_register_entity_service(
ADVANTAGE_AIR_SERVICE_SET_TIME_TO,
{vol.Required("minutes"): cv.positive_int},
"set_time_to",
)
class AdvantageAirTimeTo(AdvantageAirEntity):
"""Representation of Advantage Air timer control."""
def __init__(self, instance, ac_key, action):
"""Initialize the Advantage Air timer control."""
super().__init__(instance, ac_key)
self.action = action
self._time_key = f"countDownTo{self.action}"
@property
def name(self):
"""Return the name."""
return f'{self._ac["name"]} Time To {self.action}'
@property
def unique_id(self):
"""Return a unique id."""
return f'{self.coordinator.data["system"]["rid"]}-{self.ac_key}-timeto{self.action}'
@property
def state(self):
"""Return the current value."""
return self._ac[self._time_key]
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return ADVANTAGE_AIR_SET_COUNTDOWN_UNIT
@property
def icon(self):
"""Return a representative icon of the timer."""
if self._ac[self._time_key] > 0:
return "mdi:timer-outline"
return "mdi:timer-off-outline"
async def set_time_to(self, **kwargs):
"""Set the timer value."""
value = min(720, max(0, int(kwargs[ADVANTAGE_AIR_SET_COUNTDOWN_VALUE])))
await self.async_change({self.ac_key: {"info": {self._time_key: value}}})
class AdvantageAirZoneVent(AdvantageAirEntity):
"""Representation of Advantage Air Zone Vent Sensor."""
@property
def name(self):
"""Return the name."""
return f'{self._zone["name"]} Vent'
@property
def unique_id(self):
"""Return a unique id."""
return f'{self.coordinator.data["system"]["rid"]}-{self.ac_key}-{self.zone_key}-vent'
@property
def state(self):
"""Return the current value of the air vent."""
if self._zone["state"] == ADVANTAGE_AIR_STATE_OPEN:
return self._zone["value"]
return 0
@property
def unit_of_measurement(self):
"""Return the percent sign."""
return PERCENTAGE
@property
def icon(self):
"""Return a representative icon."""
if self._zone["state"] == ADVANTAGE_AIR_STATE_OPEN:
return "mdi:fan"
return "mdi:fan-off"
class AdvantageAirZoneSignal(AdvantageAirEntity):
"""Representation of Advantage Air Zone wireless signal sensor."""
@property
def name(self):
"""Return the name."""
return f'{self._zone["name"]} Signal'
@property
def unique_id(self):
"""Return a unique id."""
return f'{self.coordinator.data["system"]["rid"]}-{self.ac_key}-{self.zone_key}-signal'
@property
def state(self):
"""Return the current value of the wireless signal."""
return self._zone["rssi"]
@property
def unit_of_measurement(self):
"""Return the percent sign."""
return PERCENTAGE
@property
def icon(self):
"""Return a representative icon."""
if self._zone["rssi"] >= 80:
return "mdi:wifi-strength-4"
if self._zone["rssi"] >= 60:
return "mdi:wifi-strength-3"
if self._zone["rssi"] >= 40:
return "mdi:wifi-strength-2"
if self._zone["rssi"] >= 20:
return "mdi:wifi-strength-1"
return "mdi:wifi-strength-outline"
|
from copy import deepcopy
import pytest
from cerberus import Validator
@pytest.fixture
def document():
return deepcopy(sample_document)
@pytest.fixture
def schema():
return deepcopy(sample_schema)
@pytest.fixture
def validator():
return Validator(sample_schema)
sample_schema = {
'a_string': {'type': 'string', 'minlength': 2, 'maxlength': 10},
'a_bytestring': {'type': 'bytes', 'minlength': 2, 'maxlength': 10},
'a_nullable_integer': {'type': 'integer', 'nullable': True},
'an_integer': {'type': 'integer', 'min': 1, 'max': 100},
'a_boolean': {'type': 'boolean', 'meta': 'can haz two distinct states'},
'a_datetime': {'type': 'datetime', 'meta': {'format': '%a, %d. %b %Y'}},
'a_float': {'type': 'float', 'min': 1, 'max': 100},
'a_number': {'type': 'number', 'min': 1, 'max': 100},
'a_set': {'type': 'set'},
'one_or_more_strings': {
'type': ['string', 'list'],
'itemsrules': {'type': 'string'},
},
'a_regex_email': {
'type': 'string',
'regex': r'^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$',
},
'a_readonly_string': {'type': 'string', 'readonly': True},
'a_restricted_integer': {'type': 'integer', 'allowed': [-1, 0, 1]},
'a_restricted_string': {'type': 'string', 'allowed': ['agent', 'client', 'vendor']},
'an_array': {'type': 'list', 'allowed': ['agent', 'client', 'vendor']},
'an_array_from_set': {'type': 'list', 'allowed': {'agent', 'client', 'vendor'}},
'a_list_of_dicts': {
'type': 'list',
'itemsrules': {
'type': 'dict',
'schema': {
'sku': {'type': 'string'},
'price': {'type': 'integer', 'required': True},
},
},
},
'a_list_of_values': {
'type': 'list',
'items': [{'type': 'string'}, {'type': 'integer'}],
},
'a_list_of_integers': {'type': 'list', 'itemsrules': {'type': 'integer'}},
'a_dict': {
'type': 'dict',
'schema': {
'address': {'type': 'string'},
'city': {'type': 'string', 'required': True},
},
},
'a_dict_with_valuesrules': {'type': 'dict', 'valuesrules': {'type': 'integer'}},
'a_list_length': {
'type': 'list',
'itemsrules': {'type': 'integer'},
'minlength': 2,
'maxlength': 5,
},
'a_nullable_field_without_type': {'nullable': True},
'a_not_nullable_field_without_type': {},
}
sample_document = {'name': 'john doe'}
|
from __future__ import absolute_import
from __future__ import unicode_literals
import typing
try:
import simplejson as json
except ImportError:
import json # type: ignore
try:
import PIL.Image
import piexif
except ImportError:
PIL = None
piexif = None
try:
from operator import length_hint
except ImportError:
def length_hint(obj, default=0): # type: ignore
# type: (typing.Any, int) -> int
"""Return an estimate of the number of items in obj.
This is useful for presizing containers when building from an
iterable.
If the object supports len(), the result will be
exact. Otherwise, it may over- or under-estimate by an
arbitrary amount. The result will be an integer >= 0.
See Also:
`PEP 424 <https://www.python.org/dev/peps/pep-0424/>`_
"""
try:
return len(obj)
except TypeError:
try:
get_hint = type(obj).__length_hint__
except AttributeError:
return default
try:
hint = get_hint(obj)
except TypeError:
return default
if hint is NotImplemented:
return default
if not isinstance(hint, int):
raise TypeError("Length hint must be an integer, not %r" %
type(hint))
if hint < 0:
raise ValueError("__length_hint__() should return >= 0")
return hint
__all__ = ["PIL", "piexif", "json", "length_hint"]
|
import argparse
import glob
import os
import struct
import sys
def clamp_to_min_max(value, min, max):
if value > max:
value = max
elif value < min:
value = min
return value
def clamp_to_u8(value):
return clamp_to_min_max(value, 0, 255)
def parse_args():
parser = argparse.ArgumentParser(description="Set the wave effect")
parser.add_argument('-d', '--device', type=str, help="Device string like \"0003:1532:0045.000C\"")
parser.add_argument('--effect', required=True, type=int, help="Set whether or not to use static colour or last mouse effect for charging. 0 use mouse effect, 1 use colour")
args = parser.parse_args()
return args
def run():
args = parse_args()
if args.device is None:
mouse_dirs = glob.glob(os.path.join('/sys/bus/hid/drivers/razermouse/', "*:*:*.*"))
if len(mouse_dirs) > 1:
print("Multiple mouse directories found. Rerun with -d", file=sys.stderr)
sys.exit(1)
if len(mouse_dirs) < 1:
print("No mouse directories found. Make sure the driver is binded", file=sys.stderr)
sys.exit(1)
mouse_dir = mouse_dirs[0]
else:
mouse_dir = os.path.join('/sys/bus/hid/drivers/razermouse/', args.device)
if not os.path.isdir(mouse_dir):
print("Multiple mouse directories found. Rerun with -d", file=sys.stderr)
sys.exit(1)
number = clamp_to_min_max(args.effect, 0, 1)
if number:
byte_string = struct.pack(">B", 0x01)
else:
byte_string = struct.pack(">B", 0x00)
set_charging_effect_filepath = os.path.join(mouse_dir, "set_charging_effect")
with open(set_charging_effect_filepath, 'wb') as set_charging_effect_file:
set_charging_effect_file.write(byte_string)
print("Done")
if __name__ == '__main__':
run()
|
from objc_util import *
NSUserDefaults = ObjCClass('NSUserDefaults')
def add_action(scriptName, iconName='python', iconColor='', title=''):
'''adds an editor action. scriptName should start with /
(e.g /stash/stash.py)
iconName should be an icon without leading prefix,
or trailing size. i.e alert instead of iob:alert_256
iconColor should be a web style hex string, eg aa00ff
title is the alternative title
Call save_defaults() to store defaults
')'''
defaults = NSUserDefaults.standardUserDefaults()
kwargs = locals()
entry = {
key: kwargs[key]
for key in ('scriptName',
'iconName',
'iconColor',
'title',
'arguments')
if key in kwargs and kwargs[key]
}
editoractions = get_actions()
editoractions.append(ns(entry))
defaults.setObject_forKey_(editoractions, 'EditorActionInfos')
def remove_action(scriptName):
''' remove all instances of a given scriptname.
Call save_defaults() to store for next session
'''
defaults = NSUserDefaults.standardUserDefaults()
editoractions = get_actions()
[editoractions.remove(x) for x in editoractions if str(x['scriptName']) == scriptName]
defaults.setObject_forKey_(editoractions, 'EditorActionInfos')
def remove_action_at_index(index):
''' remove action at index. Call save_defaults() to save result.
'''
defaults = NSUserDefaults.standardUserDefaults()
editoractions = get_actions()
del editoractions[index]
defaults.setObject_forKey_(editoractions, 'EditorActionInfos')
def get_defaults_dict():
'''return NSdictionary of defaults'''
defaults = NSUserDefaults.standardUserDefaults()
return defaults.dictionaryRepresentation()
def get_actions():
'''return action list'''
defaults = NSUserDefaults.standardUserDefaults()
return list(defaults.arrayForKey_('EditorActionInfos') or ())
def save_defaults():
'''save current set of defaults'''
defaults = NSUserDefaults.standardUserDefaults()
NSUserDefaults.setStandardUserDefaults_(defaults)
|
import asyncio
from functools import partial
from devolo_home_control_api.homecontrol import HomeControl
from devolo_home_control_api.mydevolo import Mydevolo
from homeassistant.components import zeroconf
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, EVENT_HOMEASSISTANT_STOP
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.typing import HomeAssistantType
from .const import CONF_HOMECONTROL, CONF_MYDEVOLO, DOMAIN, PLATFORMS
async def async_setup(hass, config):
"""Get all devices and add them to hass."""
return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool:
"""Set up the devolo account from a config entry."""
conf = entry.data
hass.data.setdefault(DOMAIN, {})
try:
mydevolo = Mydevolo.get_instance()
except SyntaxError:
mydevolo = Mydevolo()
mydevolo.user = conf[CONF_USERNAME]
mydevolo.password = conf[CONF_PASSWORD]
mydevolo.url = conf[CONF_MYDEVOLO]
credentials_valid = await hass.async_add_executor_job(mydevolo.credentials_valid)
if not credentials_valid:
return False
if await hass.async_add_executor_job(mydevolo.maintenance):
raise ConfigEntryNotReady
gateway_ids = await hass.async_add_executor_job(mydevolo.get_gateway_ids)
try:
zeroconf_instance = await zeroconf.async_get_instance(hass)
hass.data[DOMAIN][entry.entry_id] = {"gateways": [], "listener": None}
for gateway_id in gateway_ids:
hass.data[DOMAIN][entry.entry_id]["gateways"].append(
await hass.async_add_executor_job(
partial(
HomeControl,
gateway_id=gateway_id,
zeroconf_instance=zeroconf_instance,
url=conf[CONF_HOMECONTROL],
)
)
)
except ConnectionError as err:
raise ConfigEntryNotReady from err
for platform in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, platform)
)
def shutdown(event):
for gateway in hass.data[DOMAIN][entry.entry_id]["gateways"]:
gateway.websocket_disconnect(
f"websocket disconnect requested by {EVENT_HOMEASSISTANT_STOP}"
)
# Listen when EVENT_HOMEASSISTANT_STOP is fired
hass.data[DOMAIN][entry.entry_id]["listener"] = hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_STOP, shutdown
)
return True
async def async_unload_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
unload = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, platform)
for platform in PLATFORMS
]
)
)
await asyncio.gather(
*[
hass.async_add_executor_job(gateway.websocket_disconnect)
for gateway in hass.data[DOMAIN][entry.entry_id]["gateways"]
]
)
hass.data[DOMAIN][entry.entry_id]["listener"]()
hass.data[DOMAIN].pop(entry.entry_id)
return unload
|
import unicodedata
from .tree import Tree
from .visitors import Transformer_InPlace
from .lexer import Token, PatternStr
from .grammar import Terminal, NonTerminal
from .tree_matcher import TreeMatcher, is_discarded_terminal
from .utils import is_id_continue
def is_iter_empty(i):
try:
_ = next(i)
return False
except StopIteration:
return True
class WriteTokensTransformer(Transformer_InPlace):
"Inserts discarded tokens into their correct place, according to the rules of grammar"
def __init__(self, tokens, term_subs):
self.tokens = tokens
self.term_subs = term_subs
def __default__(self, data, children, meta):
if not getattr(meta, 'match_tree', False):
return Tree(data, children)
iter_args = iter(children)
to_write = []
for sym in meta.orig_expansion:
if is_discarded_terminal(sym):
try:
v = self.term_subs[sym.name](sym)
except KeyError:
t = self.tokens[sym.name]
if not isinstance(t.pattern, PatternStr):
raise NotImplementedError("Reconstructing regexps not supported yet: %s" % t)
v = t.pattern.value
to_write.append(v)
else:
x = next(iter_args)
if isinstance(x, list):
to_write += x
else:
if isinstance(x, Token):
assert Terminal(x.type) == sym, x
else:
assert NonTerminal(x.data) == sym, (sym, x)
to_write.append(x)
assert is_iter_empty(iter_args)
return to_write
class Reconstructor(TreeMatcher):
"""
A Reconstructor that will, given a full parse Tree, generate source code.
Note:
The reconstructor cannot generate values from regexps. If you need to produce discarded
regexes, such as newlines, use `term_subs` and provide default values for them.
Paramters:
parser: a Lark instance
term_subs: a dictionary of [Terminal name as str] to [output text as str]
"""
def __init__(self, parser, term_subs=None):
TreeMatcher.__init__(self, parser)
self.write_tokens = WriteTokensTransformer({t.name:t for t in self.tokens}, term_subs or {})
def _reconstruct(self, tree):
unreduced_tree = self.match_tree(tree, tree.data)
res = self.write_tokens.transform(unreduced_tree)
for item in res:
if isinstance(item, Tree):
# TODO use orig_expansion.rulename to support templates
for x in self._reconstruct(item):
yield x
else:
yield item
def reconstruct(self, tree, postproc=None):
x = self._reconstruct(tree)
if postproc:
x = postproc(x)
y = []
prev_item = ''
for item in x:
if prev_item and item and is_id_continue(prev_item[-1]) and is_id_continue(item[0]):
y.append(' ')
y.append(item)
prev_item = item
return ''.join(y)
|
from homeassistant.components.air_quality import (
ATTR_CO,
ATTR_NO2,
ATTR_OZONE,
ATTR_PM_2_5,
ATTR_PM_10,
ATTR_SO2,
AirQualityEntity,
)
from homeassistant.const import CONF_NAME
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import ATTR_STATION, DEFAULT_NAME, DOMAIN, ICONS_MAP, MANUFACTURER
ATTRIBUTION = "Data provided by GIOŚ"
SENSOR_MAP = {
"CO": ATTR_CO,
"NO2": ATTR_NO2,
"O3": ATTR_OZONE,
"PM10": ATTR_PM_10,
"PM2.5": ATTR_PM_2_5,
"SO2": ATTR_SO2,
}
PARALLEL_UPDATES = 1
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Add a GIOS entities from a config_entry."""
name = config_entry.data[CONF_NAME]
coordinator = hass.data[DOMAIN][config_entry.entry_id]
async_add_entities([GiosAirQuality(coordinator, name)], False)
def round_state(func):
"""Round state."""
def _decorator(self):
res = func(self)
if isinstance(res, float):
return round(res)
return res
return _decorator
class GiosAirQuality(CoordinatorEntity, AirQualityEntity):
"""Define an GIOS sensor."""
def __init__(self, coordinator, name):
"""Initialize."""
super().__init__(coordinator)
self._name = name
self._attrs = {}
@property
def name(self):
"""Return the name."""
return self._name
@property
def icon(self):
"""Return the icon."""
if self.air_quality_index in ICONS_MAP:
return ICONS_MAP[self.air_quality_index]
return "mdi:blur"
@property
def air_quality_index(self):
"""Return the air quality index."""
return self._get_sensor_value("AQI")
@property
@round_state
def particulate_matter_2_5(self):
"""Return the particulate matter 2.5 level."""
return self._get_sensor_value("PM2.5")
@property
@round_state
def particulate_matter_10(self):
"""Return the particulate matter 10 level."""
return self._get_sensor_value("PM10")
@property
@round_state
def ozone(self):
"""Return the O3 (ozone) level."""
return self._get_sensor_value("O3")
@property
@round_state
def carbon_monoxide(self):
"""Return the CO (carbon monoxide) level."""
return self._get_sensor_value("CO")
@property
@round_state
def sulphur_dioxide(self):
"""Return the SO2 (sulphur dioxide) level."""
return self._get_sensor_value("SO2")
@property
@round_state
def nitrogen_dioxide(self):
"""Return the NO2 (nitrogen dioxide) level."""
return self._get_sensor_value("NO2")
@property
def attribution(self):
"""Return the attribution."""
return ATTRIBUTION
@property
def unique_id(self):
"""Return a unique_id for this entity."""
return self.coordinator.gios.station_id
@property
def device_info(self):
"""Return the device info."""
return {
"identifiers": {(DOMAIN, self.coordinator.gios.station_id)},
"name": DEFAULT_NAME,
"manufacturer": MANUFACTURER,
"entry_type": "service",
}
@property
def device_state_attributes(self):
"""Return the state attributes."""
# Different measuring stations have different sets of sensors. We don't know
# what data we will get.
for sensor in SENSOR_MAP:
if sensor in self.coordinator.data:
self._attrs[f"{SENSOR_MAP[sensor]}_index"] = self.coordinator.data[
sensor
]["index"]
self._attrs[ATTR_STATION] = self.coordinator.gios.station_name
return self._attrs
def _get_sensor_value(self, sensor):
"""Return value of specified sensor."""
if sensor in self.coordinator.data:
return self.coordinator.data[sensor]["value"]
return None
|
import pytest
from homeassistant import core
from homeassistant.helpers.json import JSONEncoder
from homeassistant.util import dt as dt_util
def test_json_encoder(hass):
"""Test the JSON Encoder."""
ha_json_enc = JSONEncoder()
state = core.State("test.test", "hello")
assert ha_json_enc.default(state) == state.as_dict()
# Default method raises TypeError if non HA object
with pytest.raises(TypeError):
ha_json_enc.default(1)
now = dt_util.utcnow()
assert ha_json_enc.default(now) == now.isoformat()
|
from homeassistant.components.binary_sensor import BinarySensorEntity
from homeassistant.const import ATTR_NAME
from homeassistant.core import callback
from . import (
SENSOR_TYPES,
TYPE_BATT1,
TYPE_BATT2,
TYPE_BATT3,
TYPE_BATT4,
TYPE_BATT5,
TYPE_BATT6,
TYPE_BATT7,
TYPE_BATT8,
TYPE_BATT9,
TYPE_BATT10,
TYPE_BATTOUT,
AmbientWeatherEntity,
)
from .const import (
ATTR_LAST_DATA,
ATTR_MONITORED_CONDITIONS,
DATA_CLIENT,
DOMAIN,
TYPE_BINARY_SENSOR,
)
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up Ambient PWS binary sensors based on a config entry."""
ambient = hass.data[DOMAIN][DATA_CLIENT][entry.entry_id]
binary_sensor_list = []
for mac_address, station in ambient.stations.items():
for condition in station[ATTR_MONITORED_CONDITIONS]:
name, _, kind, device_class = SENSOR_TYPES[condition]
if kind == TYPE_BINARY_SENSOR:
binary_sensor_list.append(
AmbientWeatherBinarySensor(
ambient,
mac_address,
station[ATTR_NAME],
condition,
name,
device_class,
)
)
async_add_entities(binary_sensor_list, True)
class AmbientWeatherBinarySensor(AmbientWeatherEntity, BinarySensorEntity):
"""Define an Ambient binary sensor."""
@property
def is_on(self):
"""Return the status of the sensor."""
if self._sensor_type in (
TYPE_BATT1,
TYPE_BATT10,
TYPE_BATT2,
TYPE_BATT3,
TYPE_BATT4,
TYPE_BATT5,
TYPE_BATT6,
TYPE_BATT7,
TYPE_BATT8,
TYPE_BATT9,
TYPE_BATTOUT,
):
return self._state == 0
return self._state == 1
@callback
def update_from_latest_data(self):
"""Fetch new state data for the entity."""
self._state = self._ambient.stations[self._mac_address][ATTR_LAST_DATA].get(
self._sensor_type
)
|
from PyQt5.QtWebKit import QWebSettings
from PyQt5.QtWebKitWidgets import QWebInspector, QWebPage
from PyQt5.QtWidgets import QWidget
from qutebrowser.browser import inspector
from qutebrowser.misc import miscwidgets
class WebKitInspector(inspector.AbstractWebInspector):
"""A web inspector for QtWebKit."""
def __init__(self, splitter: miscwidgets.InspectorSplitter,
win_id: int,
parent: QWidget = None) -> None:
super().__init__(splitter, win_id, parent)
qwebinspector = QWebInspector()
self._set_widget(qwebinspector)
def inspect(self, page: QWebPage) -> None: # type: ignore[override]
settings = QWebSettings.globalSettings()
settings.setAttribute(QWebSettings.DeveloperExtrasEnabled, True)
self._widget.setPage(page)
|
import os
import sys
import tempfile
try:
import numpy as np
except ImportError:
print("You really need numpy to proceed with this test")
sys.exit(1)
import smart_open
def tofile():
dt = np.dtype([('time', [('min', int), ('sec', int)]), ('temp', float)])
x = np.zeros((1,), dtype=dt)
with tempfile.NamedTemporaryFile(prefix='test_207', suffix='.dat', delete=False) as fout:
x.tofile(fout.name)
return fout.name
def test():
try:
path = tofile()
with smart_open.smart_open(path, 'rb') as fin:
loaded = np.fromfile(fin)
del loaded
return 0
finally:
os.unlink(path)
return 1
if __name__ == '__main__':
sys.exit(test())
|
from functools import partial
from collections import namedtuple
from flask import current_app
from flask_principal import Permission, RoleNeed
# Permissions
operator_permission = Permission(RoleNeed("operator"))
admin_permission = Permission(RoleNeed("admin"))
CertificateOwner = namedtuple("certificate", ["method", "value"])
CertificateOwnerNeed = partial(CertificateOwner, "role")
class SensitiveDomainPermission(Permission):
def __init__(self):
needs = [RoleNeed("admin")]
sensitive_domain_roles = current_app.config.get("SENSITIVE_DOMAIN_ROLES", [])
if sensitive_domain_roles:
for role in sensitive_domain_roles:
needs.append(RoleNeed(role))
super(SensitiveDomainPermission, self).__init__(*needs)
class CertificatePermission(Permission):
def __init__(self, owner, roles):
needs = [RoleNeed("admin"), RoleNeed(owner), RoleNeed("creator")]
for r in roles:
needs.append(CertificateOwnerNeed(str(r)))
# Backwards compatibility with mixed-case role names
if str(r) != str(r).lower():
needs.append(CertificateOwnerNeed(str(r).lower()))
super(CertificatePermission, self).__init__(*needs)
class ApiKeyCreatorPermission(Permission):
def __init__(self):
super(ApiKeyCreatorPermission, self).__init__(RoleNeed("admin"))
RoleMember = namedtuple("role", ["method", "value"])
RoleMemberNeed = partial(RoleMember, "member")
class RoleMemberPermission(Permission):
def __init__(self, role_id):
needs = [RoleNeed("admin"), RoleMemberNeed(role_id)]
super(RoleMemberPermission, self).__init__(*needs)
AuthorityCreator = namedtuple("authority", ["method", "value"])
AuthorityCreatorNeed = partial(AuthorityCreator, "authorityUse")
AuthorityOwner = namedtuple("authority", ["method", "value"])
AuthorityOwnerNeed = partial(AuthorityOwner, "role")
class AuthorityPermission(Permission):
def __init__(self, authority_id, roles):
needs = [RoleNeed("admin"), AuthorityCreatorNeed(str(authority_id))]
for r in roles:
needs.append(AuthorityOwnerNeed(str(r)))
super(AuthorityPermission, self).__init__(*needs)
|
import contextlib
import datetime
import os
import subprocess
import sys
import molecule
import molecule.config
import molecule.util
try:
import vagrant
except ImportError:
sys.exit('ERROR: Driver missing, install python-vagrant.')
DOCUMENTATION = '''
---
module: vagrant
short_description: Manage Vagrant instances
description:
- Manage the life cycle of Vagrant instances.
- Supports check mode. Run with --check and --diff to view config difference,
and list of actions to be taken.
version_added: 2.0
author:
- Cisco Systems, Inc.
options:
instance_name:
description:
- Assign a name to a new instance or match an existing instance.
required: True
default: None
instance_interfaces:
description:
- Assign interfaces to a new instance.
required: False
default: []
instance_raw_config_args:
description:
- Additional Vagrant options not explcitly exposed by this module.
required: False
default: None
config_options:
description:
- Additional config options not explcitly exposed by this module.
required: False
default: {}
platform_box:
description:
- Name of Vagrant box.
required: True
default: None
platform_box_version:
description:
- Explicit version of Vagrant box to use.
required: False
default: None
platform_box_url:
description:
- The URL to a Vagrant box.
required: False
default: None
provider_name:
description:
- Name of the Vagrant provider to use.
required: False
default: virtualbox
provider_memory:
description:
- Ammount of memory to allocate to the instance.
required: False
default: 512
provider_cpus:
description:
- Number of CPUs to allocate to the instance.
required: False
default: 2
provider_options:
description:
- Additional provider options not explcitly exposed by this module.
required: False
default: {}
provider_override_args:
description:
- Additional override options not explcitly exposed by this module.
required: False
default: None
provider_raw_config_args:
description:
- Additional Vagrant options not explcitly exposed by this module.
required: False
default: None
force_stop:
description:
- Force halt the instance, then destroy the instance.
required: False
default: False
state:
description:
- The desired state of the instance.
required: True
choices: ['up', 'halt', 'destroy']
default: None
requirements:
- python >= 2.6
- python-vagrant
- vagrant
'''
EXAMPLES = '''
See doc/source/configuration.rst
'''
VAGRANTFILE_TEMPLATE = '''
require 'yaml'
Vagrant.configure('2') do |config|
vagrant_config_hash = YAML::load_file('{{ vagrantfile_config }}')
if Vagrant.has_plugin?('vagrant-cachier')
config.cache.scope = 'machine'
end
##
# Configs
##
c = vagrant_config_hash['config']
if !c['options']['synced_folder']
config.vm.synced_folder ".", "/vagrant", disabled: true
end
c['options'].delete('synced_folder')
c['options'].each { |key, value|
eval("config.#{key} = #{value}")
}
##
# Platforms
##
platform = vagrant_config_hash['platform']
config.vm.box = platform['box']
if platform['box_version']
config.vm.box_version = platform['box_version']
end
if platform['box_url']
config.vm.box_url = platform['box_url']
end
##
# Provider
##
provider = vagrant_config_hash['provider']
provider_memory = provider['options']['memory']
provider_cpus = provider['options']['cpus']
provider['options'].delete('memory')
provider['options'].delete('cpus')
##
# Virtualbox
##
if provider['name'] == 'virtualbox'
config.vm.provider provider['name'] do |virtualbox, override|
virtualbox.memory = provider_memory
virtualbox.cpus = provider_cpus
if provider['options']['linked_clone']
if Gem::Version.new(Vagrant::VERSION) >= Gem::Version.new('1.8.0')
virtualbox.linked_clone = provider['options']['linked_clone']
end
else
if Gem::Version.new(Vagrant::VERSION) >= Gem::Version.new('1.8.0')
virtualbox.linked_clone = true
end
end
# Custom
provider['options'].each { |key, value|
if key != 'linked_clone'
eval("virtualbox.#{key} = #{value}")
end
}
# Raw Configuration
if provider['raw_config_args']
provider['raw_config_args'].each { |raw_config_arg|
eval("virtualbox.#{raw_config_arg}")
}
end
if provider['override_args']
provider['override_args'].each { |override_arg|
eval("override.#{override_arg}")
}
end
end
# The vagrant-vbguest plugin attempts to update packages
# before a RHEL based VM is registered.
# TODO: Port from the old .j2, should be done in raw config
if (vagrant_config_hash['platform'] =~ /rhel/i) != nil
if Vagrant.has_plugin?('vagrant-vbguest')
config.vbguest.auto_update = false
end
end
end
##
# VMware (vmware_fusion, vmware_workstation and vmware_desktop)
##
if provider['name'].start_with?('vmware_')
config.vm.provider provider['name'] do |vmware, override|
vmware.vmx['memsize'] = provider_memory
vmware.vmx['numvcpus'] = provider_cpus
# Custom
provider['options'].each { |key, value|
eval("vmware.#{key} = #{value}")
}
# Raw Configuration
if provider['raw_config_args']
provider['raw_config_args'].each { |raw_config_arg|
eval("vmware.#{raw_config_arg}")
}
end
if provider['override_args']
provider['override_args'].each { |override_arg|
eval("override.#{override_arg}")
}
end
end
end
##
# Parallels
##
if provider['name'] == 'parallels'
config.vm.provider provider['name'] do |parallels, override|
parallels.memory = provider_memory
parallels.cpus = provider_cpus
# Custom
provider['options'].each { |key, value|
eval("parallels.#{key} = #{value}")
}
# Raw Configuration
if provider['raw_config_args']
provider['raw_config_args'].each { |raw_config_arg|
eval("parallels.#{raw_config_arg}")
}
end
if provider['override_args']
provider['override_args'].each { |override_arg|
eval("override.#{override_arg}")
}
end
end
end
##
# Libvirt
##
if provider['name'] == 'libvirt'
config.vm.provider provider['name'] do |libvirt, override|
libvirt.memory = provider_memory
libvirt.cpus = provider_cpus
# Custom
provider['options'].each { |key, value|
eval("libvirt.#{key} = #{value}")
}
# Raw Configuration
if provider['raw_config_args']
provider['raw_config_args'].each { |raw_config_arg|
eval("libvirt.#{raw_config_arg}")
}
end
if provider['override_args']
provider['override_args'].each { |override_arg|
eval("override.#{override_arg}")
}
end
end
end
##
# Instances
##
if vagrant_config_hash['instance']
instance = vagrant_config_hash['instance']
config.vm.define instance['name'] do |c|
c.vm.hostname = instance['name']
if instance['interfaces']
instance['interfaces'].each { |interface|
c.vm.network "#{interface['network_name']}",
Hash[interface.select{|k| k != 'network_name'}.map{|k,v| [k.to_sym, v]}]
}
end
if instance['raw_config_args']
instance['raw_config_args'].each { |raw_config_arg|
eval("c.#{raw_config_arg}")
}
end
end
end
end
'''.strip() # noqa
class VagrantClient(object):
def __init__(self, module):
self._module = module
self._config = self._get_config()
self._vagrantfile = self._config.driver.vagrantfile
self._vagrant = self._get_vagrant()
self._write_configs()
self._has_error = None
self._datetime = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
@contextlib.contextmanager
def stdout_cm(self):
""" Redirect the stdout to a log file. """
with open(self._get_stdout_log(), 'a+') as fh:
msg = '### {} ###\n'.format(self._datetime)
fh.write(msg)
fh.flush()
yield fh
@contextlib.contextmanager
def stderr_cm(self):
""" Redirect the stderr to a log file. """
with open(self._get_stderr_log(), 'a+') as fh:
msg = '### {} ###\n'.format(self._datetime)
fh.write(msg)
fh.flush()
try:
yield fh
except Exception as e:
self._has_error = True
fh.write(e.message)
fh.flush()
raise
def up(self):
changed = False
if not self._created():
changed = True
provision = self._module.params['provision']
try:
self._vagrant.up(provision=provision)
except Exception:
# NOTE(retr0h): Ignore the exception since python-vagrant
# passes the actual error as a no-argument ContextManager.
pass
# NOTE(retr0h): Ansible wants only one module return `fail_json`
# or `exit_json`.
if not self._has_error:
self._module.exit_json(
changed=changed,
log=self._get_stdout_log(),
**self._conf())
else:
msg = "ERROR: See log file '{}'".format(self._get_stderr_log())
self._module.fail_json(msg=msg)
def destroy(self):
changed = False
if self._created():
changed = True
if self._module.params['force_stop']:
self._vagrant.halt(force=True)
self._vagrant.destroy()
self._module.exit_json(changed=changed)
def halt(self):
changed = False
if self._created():
changed = True
self._vagrant.halt(force=self._module.params['force_stop'])
self._module.exit_json(changed=changed)
def _conf(self):
instance_name = self._module.params['instance_name']
return self._vagrant.conf(vm_name=instance_name)
def _status(self):
instance_name = self._module.params['instance_name']
try:
s = self._vagrant.status(vm_name=instance_name)[0]
return {'name': s.name, 'state': s.state, 'provider': s.provider}
except AttributeError:
pass
except subprocess.CalledProcessError:
pass
def _created(self):
status = self._status()
if status and status['state'] == 'running':
return status
return {}
def _get_config(self):
molecule_file = os.environ['MOLECULE_FILE']
return molecule.config.Config(molecule_file)
def _write_vagrantfile(self):
template = molecule.util.render_template(
VAGRANTFILE_TEMPLATE,
vagrantfile_config=self._config.driver.vagrantfile_config)
molecule.util.write_file(self._vagrantfile, template)
def _write_vagrantfile_config(self, data):
molecule.util.write_file(self._config.driver.vagrantfile_config,
molecule.util.safe_dump(data))
def _write_configs(self):
self._write_vagrantfile_config(self._get_vagrant_config_dict())
self._write_vagrantfile()
def _get_vagrant(self):
env = os.environ.copy()
env['VAGRANT_CWD'] = os.environ['MOLECULE_EPHEMERAL_DIRECTORY']
v = vagrant.Vagrant(
out_cm=self.stdout_cm, err_cm=self.stderr_cm, env=env)
return v
def _get_vagrant_config_dict(self):
d = {
'config': {
# NOTE(retr0h): Options provided here will be passed to
# Vagrant as "config.#{key} = #{value}".
'options': {
# NOTE(retr0h): `synced_folder` does not represent the
# actual key used by Vagrant. Is used as a flag to
# simply enable/disable shared folder.
'synced_folder': False,
'ssh.insert_key': True,
}
},
'platform': {
'box': self._module.params['platform_box'],
'box_version': self._module.params['platform_box_version'],
'box_url': self._module.params['platform_box_url'],
},
'instance': {
'name': self._module.params['instance_name'],
'interfaces': self._module.params['instance_interfaces'],
'raw_config_args':
self._module.params['instance_raw_config_args'],
},
'provider': {
'name': self._module.params['provider_name'],
# NOTE(retr0h): Options provided here will be passed to
# Vagrant as "$provider_name.#{key} = #{value}".
'options': {
'memory': self._module.params['provider_memory'],
'cpus': self._module.params['provider_cpus'],
},
'raw_config_args':
self._module.params['provider_raw_config_args'],
'override_args': self._module.params['provider_override_args'],
}
}
molecule.util.merge_dicts(d['config']['options'],
self._module.params['config_options'])
molecule.util.merge_dicts(d['provider']['options'],
self._module.params['provider_options'])
return d
def _get_stdout_log(self):
return self._get_vagrant_log('out')
def _get_stderr_log(self):
return self._get_vagrant_log('err')
def _get_vagrant_log(self, __type):
instance_name = self._module.params['instance_name']
return os.path.join(self._config.scenario.ephemeral_directory,
'vagrant-{}.{}'.format(instance_name, __type))
def main():
module = AnsibleModule( # noqa
argument_spec=dict(
instance_name=dict(type='str', required=True),
instance_interfaces=dict(type='list', default=[]),
instance_raw_config_args=dict(type='list', default=None),
config_options=dict(type='dict', default={}),
platform_box=dict(type='str', required=False),
platform_box_version=dict(type='str'),
platform_box_url=dict(type='str'),
provider_name=dict(type='str', default='virtualbox'),
provider_memory=dict(type='int', default=512),
provider_cpus=dict(type='int', default=2),
provider_options=dict(type='dict', default={}),
provider_override_args=dict(type='list', default=None),
provider_raw_config_args=dict(type='list', default=None),
provision=dict(type='bool', default=False),
force_stop=dict(type='bool', default=False),
state=dict(
type='str', default='up', choices=['up', 'destroy', 'halt'])),
supports_check_mode=False)
v = VagrantClient(module)
if module.params['state'] == 'up':
v.up()
if module.params['state'] == 'destroy':
v.destroy()
if module.params['state'] == 'halt':
v.halt()
from ansible.module_utils.basic import * # noqa
main()
|
from aiohomekit.model.characteristics import CharacteristicsTypes
from homeassistant.components.alarm_control_panel import AlarmControlPanelEntity
from homeassistant.components.alarm_control_panel.const import (
SUPPORT_ALARM_ARM_AWAY,
SUPPORT_ALARM_ARM_HOME,
SUPPORT_ALARM_ARM_NIGHT,
)
from homeassistant.const import (
ATTR_BATTERY_LEVEL,
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_DISARMED,
STATE_ALARM_TRIGGERED,
)
from homeassistant.core import callback
from . import KNOWN_DEVICES, HomeKitEntity
ICON = "mdi:security"
CURRENT_STATE_MAP = {
0: STATE_ALARM_ARMED_HOME,
1: STATE_ALARM_ARMED_AWAY,
2: STATE_ALARM_ARMED_NIGHT,
3: STATE_ALARM_DISARMED,
4: STATE_ALARM_TRIGGERED,
}
TARGET_STATE_MAP = {
STATE_ALARM_ARMED_HOME: 0,
STATE_ALARM_ARMED_AWAY: 1,
STATE_ALARM_ARMED_NIGHT: 2,
STATE_ALARM_DISARMED: 3,
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Homekit alarm control panel."""
hkid = config_entry.data["AccessoryPairingID"]
conn = hass.data[KNOWN_DEVICES][hkid]
@callback
def async_add_service(aid, service):
if service["stype"] != "security-system":
return False
info = {"aid": aid, "iid": service["iid"]}
async_add_entities([HomeKitAlarmControlPanelEntity(conn, info)], True)
return True
conn.add_listener(async_add_service)
class HomeKitAlarmControlPanelEntity(HomeKitEntity, AlarmControlPanelEntity):
"""Representation of a Homekit Alarm Control Panel."""
def get_characteristic_types(self):
"""Define the homekit characteristics the entity cares about."""
return [
CharacteristicsTypes.SECURITY_SYSTEM_STATE_CURRENT,
CharacteristicsTypes.SECURITY_SYSTEM_STATE_TARGET,
CharacteristicsTypes.BATTERY_LEVEL,
]
@property
def icon(self):
"""Return icon."""
return ICON
@property
def state(self):
"""Return the state of the device."""
return CURRENT_STATE_MAP[
self.service.value(CharacteristicsTypes.SECURITY_SYSTEM_STATE_CURRENT)
]
@property
def supported_features(self) -> int:
"""Return the list of supported features."""
return SUPPORT_ALARM_ARM_HOME | SUPPORT_ALARM_ARM_AWAY | SUPPORT_ALARM_ARM_NIGHT
async def async_alarm_disarm(self, code=None):
"""Send disarm command."""
await self.set_alarm_state(STATE_ALARM_DISARMED, code)
async def async_alarm_arm_away(self, code=None):
"""Send arm command."""
await self.set_alarm_state(STATE_ALARM_ARMED_AWAY, code)
async def async_alarm_arm_home(self, code=None):
"""Send stay command."""
await self.set_alarm_state(STATE_ALARM_ARMED_HOME, code)
async def async_alarm_arm_night(self, code=None):
"""Send night command."""
await self.set_alarm_state(STATE_ALARM_ARMED_NIGHT, code)
async def set_alarm_state(self, state, code=None):
"""Send state command."""
await self.async_put_characteristics(
{CharacteristicsTypes.SECURITY_SYSTEM_STATE_TARGET: TARGET_STATE_MAP[state]}
)
@property
def device_state_attributes(self):
"""Return the optional state attributes."""
battery_level = self.service.value(CharacteristicsTypes.BATTERY_LEVEL)
if not battery_level:
return {}
return {ATTR_BATTERY_LEVEL: battery_level}
|
from __future__ import unicode_literals
import os
from lib.fun.osjudger import *
class Colored(object):
if is_Windows():
os.system("color")
RED = '\033[31m'
GREEN = '\033[32m'
YELLOW = '\033[33m'
ORANGE = '\033[0;33;1m'
BLUE = '\033[34m'
FUCHSIA = '\033[35m'
WHITE = '\033[37m'
#: no color
RESET = '\033[0m'
def color_str(self, color, s):
if is_higher_win10_v1511() or is_Linux() or is_Mac():
return '{}{}{}'.format(getattr(self, color), s, self.RESET)
else:
return '{}'.format(s)
def red(self, s):
return self.color_str('RED', s)
def green(self, s):
return self.color_str('GREEN', s)
def yellow(self, s):
return self.color_str('YELLOW', s)
def orange(self, s):
return self.color_str('ORANGE', s)
def blue(self, s):
return self.color_str('BLUE', s)
def fuchsia(self, s):
return self.color_str('FUCHSIA', s)
def white(self, s):
return self.color_str('WHITE', s)
|
from flexx import flx
class DeepEventConnections(flx.Widget):
def init(self):
# Put a label and some sliders deep in the hierarchy
with flx.VBox():
self.label = flx.Label()
with flx.HFix(flex=1):
for j in range(2):
with flx.VBox(flex=1):
for i in range(5):
flx.Slider(value=i/5)
@flx.reaction('!children**.value')
def on_slider_change(self, *events):
for ev in events:
self.label.set_text('Slider %s changed to %f' %
(ev.source.id, ev.new_value))
if __name__ == '__main__':
m = flx.launch(DeepEventConnections)
flx.run()
|
import argparse
import requests
from service_configuration_lib import read_extra_service_information
from paasta_tools.cli.utils import validate_service_name
from paasta_tools.utils import DEFAULT_SOA_DIR
from paasta_tools.utils import timeout
def add_subparser(subparsers):
list_parser = subparsers.add_parser(
"performance-check",
description="Performs a performance check",
help="Performs a performance check",
)
list_parser.add_argument(
"-s",
"--service",
help='Name of service for which you wish to check. Leading "services-", as included in a '
"Jenkins job name, will be stripped.",
)
list_parser.add_argument("-k", "--commit", help=argparse.SUPPRESS)
list_parser.add_argument(
"-d",
"--soa-dir",
dest="soa_dir",
metavar="SOA_DIR",
default=DEFAULT_SOA_DIR,
help="Define a different soa config directory",
)
list_parser.set_defaults(command=perform_performance_check)
def load_performance_check_config(service, soa_dir):
return read_extra_service_information(
service_name=service, extra_info="performance-check", soa_dir=soa_dir
)
def submit_performance_check_job(service, soa_dir):
performance_check_config = load_performance_check_config(service, soa_dir)
if not performance_check_config:
print("No performance-check.yaml. Skipping performance-check.")
return
endpoint = performance_check_config.pop("endpoint")
r = requests.post(url=endpoint, params=performance_check_config)
r.raise_for_status()
print("Posted a submission to the PaaSTA performance-check service.")
print(f"Endpoint: {endpoint}")
print(f"Parameters: {performance_check_config}")
@timeout()
def perform_performance_check(args):
service = args.service
if service.startswith("services-"):
service = service.split("services-", 1)[1]
validate_service_name(service, args.soa_dir)
try:
submit_performance_check_job(service=service, soa_dir=args.soa_dir)
except Exception as e:
print(
"Something went wrong with the performance check. Safely bailing. No need to panic."
)
print("Here was the error:")
print(str(e))
|
import os
import time
import contextlib
from typing import cast, Mapping, MutableSequence
from PyQt5.QtCore import pyqtSlot, QUrl, pyqtSignal
from PyQt5.QtWidgets import QProgressDialog, QApplication
from qutebrowser.config import config
from qutebrowser.api import cmdutils
from qutebrowser.utils import utils, log, usertypes, message, qtutils
from qutebrowser.misc import objects, sql
# increment to indicate that HistoryCompletion must be regenerated
_USER_VERSION = 2
web_history = cast('WebHistory', None)
class HistoryProgress:
"""Progress dialog for history imports/conversions.
This makes WebHistory simpler as it can call methods of this class even
when we don't want to show a progress dialog (for very small imports). This
means tick() and finish() can be called even when start() wasn't.
"""
def __init__(self):
self._progress = None
self._value = 0
def start(self, text, maximum):
"""Start showing a progress dialog."""
self._progress = QProgressDialog()
self._progress.setMinimumDuration(500)
self._progress.setLabelText(text)
self._progress.setMaximum(maximum)
self._progress.setCancelButton(None)
self._progress.show()
QApplication.processEvents()
def tick(self):
"""Increase the displayed progress value."""
self._value += 1
if self._progress is not None:
self._progress.setValue(self._value)
QApplication.processEvents()
def finish(self):
"""Finish showing the progress dialog."""
if self._progress is not None:
self._progress.hide()
class CompletionMetaInfo(sql.SqlTable):
"""Table containing meta-information for the completion."""
KEYS = {
'force_rebuild': False,
}
def __init__(self, parent=None):
super().__init__("CompletionMetaInfo", ['key', 'value'],
constraints={'key': 'PRIMARY KEY'})
for key, default in self.KEYS.items():
if key not in self:
self[key] = default
def _check_key(self, key):
if key not in self.KEYS:
raise KeyError(key)
def __contains__(self, key):
self._check_key(key)
query = self.contains_query('key')
return query.run(val=key).value()
def __getitem__(self, key):
self._check_key(key)
query = sql.Query('SELECT value FROM CompletionMetaInfo '
'WHERE key = :key')
return query.run(key=key).value()
def __setitem__(self, key, value):
self._check_key(key)
self.insert({'key': key, 'value': value}, replace=True)
class CompletionHistory(sql.SqlTable):
"""History which only has the newest entry for each URL."""
def __init__(self, parent=None):
super().__init__("CompletionHistory", ['url', 'title', 'last_atime'],
constraints={'url': 'PRIMARY KEY',
'title': 'NOT NULL',
'last_atime': 'NOT NULL'},
parent=parent)
self.create_index('CompletionHistoryAtimeIndex', 'last_atime')
class WebHistory(sql.SqlTable):
"""The global history of visited pages.
Attributes:
completion: A CompletionHistory instance.
metainfo: A CompletionMetaInfo instance.
_progress: A HistoryProgress instance.
Class attributes:
_PROGRESS_THRESHOLD: When to start showing progress dialogs.
"""
# All web history cleared
history_cleared = pyqtSignal()
# one url cleared
url_cleared = pyqtSignal(QUrl)
_PROGRESS_THRESHOLD = 1000
def __init__(self, progress, parent=None):
super().__init__("History", ['url', 'title', 'atime', 'redirect'],
constraints={'url': 'NOT NULL',
'title': 'NOT NULL',
'atime': 'NOT NULL',
'redirect': 'NOT NULL'},
parent=parent)
self._progress = progress
# Store the last saved url to avoid duplicate immedate saves.
self._last_url = None
self.completion = CompletionHistory(parent=self)
self.metainfo = CompletionMetaInfo(parent=self)
if sql.Query('pragma user_version').run().value() < _USER_VERSION:
self.completion.delete_all()
if self.metainfo['force_rebuild']:
self.completion.delete_all()
self.metainfo['force_rebuild'] = False
if not self.completion:
# either the table is out-of-date or the user wiped it manually
self._rebuild_completion()
self.create_index('HistoryIndex', 'url')
self.create_index('HistoryAtimeIndex', 'atime')
self._contains_query = self.contains_query('url')
self._between_query = sql.Query('SELECT * FROM History '
'where not redirect '
'and not url like "qute://%" '
'and atime > :earliest '
'and atime <= :latest '
'ORDER BY atime desc')
self._before_query = sql.Query('SELECT * FROM History '
'where not redirect '
'and not url like "qute://%" '
'and atime <= :latest '
'ORDER BY atime desc '
'limit :limit offset :offset')
config.instance.changed.connect(self._on_config_changed)
def __repr__(self):
return utils.get_repr(self, length=len(self))
def __contains__(self, url):
return self._contains_query.run(val=url).value()
@config.change_filter('completion.web_history.exclude')
def _on_config_changed(self):
self.metainfo['force_rebuild'] = True
@contextlib.contextmanager
def _handle_sql_errors(self):
try:
yield
except sql.KnownError as e:
message.error("Failed to write history: {}".format(e.text()))
def _is_excluded(self, url):
"""Check if the given URL is excluded from the completion."""
patterns = config.cache['completion.web_history.exclude']
return any(pattern.matches(url) for pattern in patterns)
def _rebuild_completion(self):
data: Mapping[str, MutableSequence[str]] = {
'url': [],
'title': [],
'last_atime': []
}
# select the latest entry for each url
q = sql.Query('SELECT url, title, max(atime) AS atime FROM History '
'WHERE NOT redirect and url NOT LIKE "qute://back%" '
'GROUP BY url ORDER BY atime asc')
entries = list(q.run())
if len(entries) > self._PROGRESS_THRESHOLD:
self._progress.start("Rebuilding completion...", len(entries))
for entry in entries:
self._progress.tick()
url = QUrl(entry.url)
if self._is_excluded(url):
continue
data['url'].append(self._format_completion_url(url))
data['title'].append(entry.title)
data['last_atime'].append(entry.atime)
self._progress.finish()
self.completion.insert_batch(data, replace=True)
sql.Query('pragma user_version = {}'.format(_USER_VERSION)).run()
def get_recent(self):
"""Get the most recent history entries."""
return self.select(sort_by='atime', sort_order='desc', limit=100)
def entries_between(self, earliest, latest):
"""Iterate non-redirect, non-qute entries between two timestamps.
Args:
earliest: Omit timestamps earlier than this.
latest: Omit timestamps later than this.
"""
self._between_query.run(earliest=earliest, latest=latest)
return iter(self._between_query)
def entries_before(self, latest, limit, offset):
"""Iterate non-redirect, non-qute entries occurring before a timestamp.
Args:
latest: Omit timestamps more recent than this.
limit: Max number of entries to include.
offset: Number of entries to skip.
"""
self._before_query.run(latest=latest, limit=limit, offset=offset)
return iter(self._before_query)
def clear(self):
"""Clear all browsing history."""
with self._handle_sql_errors():
self.delete_all()
self.completion.delete_all()
self.history_cleared.emit()
self._last_url = None
def delete_url(self, url):
"""Remove all history entries with the given url.
Args:
url: URL string to delete.
"""
qurl = QUrl(url)
qtutils.ensure_valid(qurl)
self.delete('url', self._format_url(qurl))
self.completion.delete('url', self._format_completion_url(qurl))
if self._last_url == url:
self._last_url = None
self.url_cleared.emit(qurl)
@pyqtSlot(QUrl, QUrl, str)
def add_from_tab(self, url, requested_url, title):
"""Add a new history entry as slot, called from a BrowserTab."""
if any(url.scheme() in ('data', 'view-source') or
(url.scheme(), url.host()) == ('qute', 'back')
for url in (url, requested_url)):
return
if url.isEmpty():
# things set via setHtml
return
no_formatting = QUrl.UrlFormattingOption(0)
if (requested_url.isValid() and
not requested_url.matches(url, no_formatting)):
# If the url of the page is different than the url of the link
# originally clicked, save them both.
self.add_url(requested_url, title, redirect=True)
if url != self._last_url:
self.add_url(url, title)
self._last_url = url
def add_url(self, url, title="", *, redirect=False, atime=None):
"""Called via add_from_tab when a URL should be added to the history.
Args:
url: A url (as QUrl) to add to the history.
redirect: Whether the entry was redirected to another URL
(hidden in completion)
atime: Override the atime used to add the entry
"""
if not url.isValid():
log.misc.warning("Ignoring invalid URL being added to history")
return
if 'no-sql-history' in objects.debug_flags:
return
atime = int(atime) if (atime is not None) else int(time.time())
with self._handle_sql_errors():
self.insert({'url': self._format_url(url),
'title': title,
'atime': atime,
'redirect': redirect})
if redirect or self._is_excluded(url):
return
self.completion.insert({
'url': self._format_completion_url(url),
'title': title,
'last_atime': atime
}, replace=True)
def _format_url(self, url):
return url.toString(QUrl.RemovePassword | QUrl.FullyEncoded)
def _format_completion_url(self, url):
return url.toString(QUrl.RemovePassword)
@cmdutils.register()
def history_clear(force=False):
"""Clear all browsing history.
Note this only clears the global history
(e.g. `~/.local/share/qutebrowser/history` on Linux) but not cookies,
the back/forward history of a tab, cache or other persistent data.
Args:
force: Don't ask for confirmation.
"""
if force:
web_history.clear()
else:
message.confirm_async(yes_action=web_history.clear,
title="Clear all browsing history?")
@cmdutils.register(debug=True)
def debug_dump_history(dest):
"""Dump the history to a file in the old pre-SQL format.
Args:
dest: Where to write the file to.
"""
dest = os.path.expanduser(dest)
lines = ('{}{} {} {}'.format(int(x.atime),
'-r' * x.redirect,
x.url,
x.title)
for x in web_history.select(sort_by='atime',
sort_order='asc'))
try:
with open(dest, 'w', encoding='utf-8') as f:
f.write('\n'.join(lines))
message.info("Dumped history to {}".format(dest))
except OSError as e:
raise cmdutils.CommandError('Could not write history: {}'
.format(e))
def init(parent=None):
"""Initialize the web history.
Args:
parent: The parent to use for WebHistory.
"""
global web_history
progress = HistoryProgress()
web_history = WebHistory(progress=progress, parent=parent)
if objects.backend == usertypes.Backend.QtWebKit: # pragma: no cover
from qutebrowser.browser.webkit import webkithistory
webkithistory.init(web_history)
return
assert objects.backend == usertypes.Backend.QtWebEngine, objects.backend
|
import pandas as pd
import pytest
import pytz
from qstrader.system.rebalance.buy_and_hold import BuyAndHoldRebalance
@pytest.mark.parametrize(
"start_dt", [('2020-01-01'), ('2020-02-02')]
)
def test_buy_and_hold_rebalance(start_dt):
"""
Checks that the buy and hold rebalance sets the
appropriate internal attributes.
"""
sd = pd.Timestamp(start_dt, tz=pytz.UTC)
reb = BuyAndHoldRebalance(start_dt=sd)
assert reb.start_dt == sd
assert reb.rebalances == [sd]
|
import pytest
from homeassistant import data_entry_flow
from homeassistant.components import gpslogger, zone
from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER_DOMAIN
from homeassistant.components.gpslogger import DOMAIN, TRACKER_UPDATE
from homeassistant.config import async_process_ha_core_config
from homeassistant.const import (
HTTP_OK,
HTTP_UNPROCESSABLE_ENTITY,
STATE_HOME,
STATE_NOT_HOME,
)
from homeassistant.helpers.dispatcher import DATA_DISPATCHER
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
HOME_LATITUDE = 37.239622
HOME_LONGITUDE = -115.815811
# pylint: disable=redefined-outer-name
@pytest.fixture(autouse=True)
def mock_dev_track(mock_device_tracker_conf):
"""Mock device tracker config loading."""
pass
@pytest.fixture
async def gpslogger_client(loop, hass, aiohttp_client):
"""Mock client for GPSLogger (unauthenticated)."""
assert await async_setup_component(hass, "persistent_notification", {})
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
await hass.async_block_till_done()
with patch("homeassistant.components.device_tracker.legacy.update_config"):
return await aiohttp_client(hass.http.app)
@pytest.fixture(autouse=True)
async def setup_zones(loop, hass):
"""Set up Zone config in HA."""
assert await async_setup_component(
hass,
zone.DOMAIN,
{
"zone": {
"name": "Home",
"latitude": HOME_LATITUDE,
"longitude": HOME_LONGITUDE,
"radius": 100,
}
},
)
await hass.async_block_till_done()
@pytest.fixture
async def webhook_id(hass, gpslogger_client):
"""Initialize the GPSLogger component and get the webhook_id."""
await async_process_ha_core_config(
hass,
{"internal_url": "http://example.local:8123"},
)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM, result
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
await hass.async_block_till_done()
return result["result"].data["webhook_id"]
async def test_missing_data(hass, gpslogger_client, webhook_id):
"""Test missing data."""
url = f"/api/webhook/{webhook_id}"
data = {"latitude": 1.0, "longitude": 1.1, "device": "123"}
# No data
req = await gpslogger_client.post(url)
await hass.async_block_till_done()
assert req.status == HTTP_UNPROCESSABLE_ENTITY
# No latitude
copy = data.copy()
del copy["latitude"]
req = await gpslogger_client.post(url, data=copy)
await hass.async_block_till_done()
assert req.status == HTTP_UNPROCESSABLE_ENTITY
# No device
copy = data.copy()
del copy["device"]
req = await gpslogger_client.post(url, data=copy)
await hass.async_block_till_done()
assert req.status == HTTP_UNPROCESSABLE_ENTITY
async def test_enter_and_exit(hass, gpslogger_client, webhook_id):
"""Test when there is a known zone."""
url = f"/api/webhook/{webhook_id}"
data = {"latitude": HOME_LATITUDE, "longitude": HOME_LONGITUDE, "device": "123"}
# Enter the Home
req = await gpslogger_client.post(url, data=data)
await hass.async_block_till_done()
assert req.status == HTTP_OK
state_name = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['device']}").state
assert STATE_HOME == state_name
# Enter Home again
req = await gpslogger_client.post(url, data=data)
await hass.async_block_till_done()
assert req.status == HTTP_OK
state_name = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['device']}").state
assert STATE_HOME == state_name
data["longitude"] = 0
data["latitude"] = 0
# Enter Somewhere else
req = await gpslogger_client.post(url, data=data)
await hass.async_block_till_done()
assert req.status == HTTP_OK
state_name = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['device']}").state
assert STATE_NOT_HOME == state_name
dev_reg = await hass.helpers.device_registry.async_get_registry()
assert len(dev_reg.devices) == 1
ent_reg = await hass.helpers.entity_registry.async_get_registry()
assert len(ent_reg.entities) == 1
async def test_enter_with_attrs(hass, gpslogger_client, webhook_id):
"""Test when additional attributes are present."""
url = f"/api/webhook/{webhook_id}"
data = {
"latitude": 1.0,
"longitude": 1.1,
"device": "123",
"accuracy": 10.5,
"battery": 10,
"speed": 100,
"direction": 105.32,
"altitude": 102,
"provider": "gps",
"activity": "running",
}
req = await gpslogger_client.post(url, data=data)
await hass.async_block_till_done()
assert req.status == HTTP_OK
state = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['device']}")
assert state.state == STATE_NOT_HOME
assert state.attributes["gps_accuracy"] == 10.5
assert state.attributes["battery_level"] == 10.0
assert state.attributes["speed"] == 100.0
assert state.attributes["direction"] == 105.32
assert state.attributes["altitude"] == 102.0
assert state.attributes["provider"] == "gps"
assert state.attributes["activity"] == "running"
data = {
"latitude": HOME_LATITUDE,
"longitude": HOME_LONGITUDE,
"device": "123",
"accuracy": 123,
"battery": 23,
"speed": 23,
"direction": 123,
"altitude": 123,
"provider": "gps",
"activity": "idle",
}
req = await gpslogger_client.post(url, data=data)
await hass.async_block_till_done()
assert req.status == HTTP_OK
state = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['device']}")
assert state.state == STATE_HOME
assert state.attributes["gps_accuracy"] == 123
assert state.attributes["battery_level"] == 23
assert state.attributes["speed"] == 23
assert state.attributes["direction"] == 123
assert state.attributes["altitude"] == 123
assert state.attributes["provider"] == "gps"
assert state.attributes["activity"] == "idle"
@pytest.mark.xfail(
reason="The device_tracker component does not support unloading yet."
)
async def test_load_unload_entry(hass, gpslogger_client, webhook_id):
"""Test that the appropriate dispatch signals are added and removed."""
url = f"/api/webhook/{webhook_id}"
data = {"latitude": HOME_LATITUDE, "longitude": HOME_LONGITUDE, "device": "123"}
# Enter the Home
req = await gpslogger_client.post(url, data=data)
await hass.async_block_till_done()
assert req.status == HTTP_OK
state_name = hass.states.get(f"{DEVICE_TRACKER_DOMAIN}.{data['device']}").state
assert STATE_HOME == state_name
assert len(hass.data[DATA_DISPATCHER][TRACKER_UPDATE]) == 1
entry = hass.config_entries.async_entries(DOMAIN)[0]
assert await gpslogger.async_unload_entry(hass, entry)
await hass.async_block_till_done()
assert not hass.data[DATA_DISPATCHER][TRACKER_UPDATE]
|
import requests_mock
from homeassistant.setup import async_setup_component
DTE_ENERGY_BRIDGE_CONFIG = {"platform": "dte_energy_bridge", "ip": "192.168.1.1"}
async def test_setup_with_config(hass):
"""Test the platform setup with configuration."""
assert await async_setup_component(
hass, "sensor", {"dte_energy_bridge": DTE_ENERGY_BRIDGE_CONFIG}
)
await hass.async_block_till_done()
async def test_setup_correct_reading(hass):
"""Test DTE Energy bridge returns a correct value."""
with requests_mock.Mocker() as mock_req:
mock_req.get(
"http://{}/instantaneousdemand".format(DTE_ENERGY_BRIDGE_CONFIG["ip"]),
text=".411 kW",
)
assert await async_setup_component(
hass, "sensor", {"sensor": DTE_ENERGY_BRIDGE_CONFIG}
)
await hass.async_block_till_done()
assert hass.states.get("sensor.current_energy_usage").state == "0.411"
async def test_setup_incorrect_units_reading(hass):
"""Test DTE Energy bridge handles a value with incorrect units."""
with requests_mock.Mocker() as mock_req:
mock_req.get(
"http://{}/instantaneousdemand".format(DTE_ENERGY_BRIDGE_CONFIG["ip"]),
text="411 kW",
)
assert await async_setup_component(
hass, "sensor", {"sensor": DTE_ENERGY_BRIDGE_CONFIG}
)
await hass.async_block_till_done()
assert hass.states.get("sensor.current_energy_usage").state == "0.411"
async def test_setup_bad_format_reading(hass):
"""Test DTE Energy bridge handles an invalid value."""
with requests_mock.Mocker() as mock_req:
mock_req.get(
"http://{}/instantaneousdemand".format(DTE_ENERGY_BRIDGE_CONFIG["ip"]),
text="411",
)
assert await async_setup_component(
hass, "sensor", {"sensor": DTE_ENERGY_BRIDGE_CONFIG}
)
await hass.async_block_till_done()
assert hass.states.get("sensor.current_energy_usage").state == "unknown"
|
import pyvera as pv
from homeassistant.core import HomeAssistant
from .common import ComponentFactory, new_simple_controller_config
from tests.async_mock import MagicMock
async def test_binary_sensor(
hass: HomeAssistant, vera_component_factory: ComponentFactory
) -> None:
"""Test function."""
vera_device = MagicMock(spec=pv.VeraBinarySensor) # type: pv.VeraBinarySensor
vera_device.device_id = 1
vera_device.vera_device_id = vera_device.device_id
vera_device.name = "dev1"
vera_device.is_tripped = False
entity_id = "binary_sensor.dev1_1"
component_data = await vera_component_factory.configure_component(
hass=hass,
controller_config=new_simple_controller_config(devices=(vera_device,)),
)
update_callback = component_data.controller_data[0].update_callback
vera_device.is_tripped = False
update_callback(vera_device)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == "off"
vera_device.is_tripped = True
update_callback(vera_device)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == "on"
|
import numpy as np
import sys
from scattertext import TermCategoryFrequencies, CorpusDF
from scattertext.Corpus import Corpus
from scattertext.ParsedCorpus import ParsedCorpus
class CorpusShouldBeParsedCorpusException(Exception):
pass
class DocsAndLabelsFromCorpus:
def __init__(self, corpus, alternative_text_field=None):
'''
Parameters
----------
corpus, Corpus: Corpus to extract documents and labels from
alternative_text_field, str or None: if str, corpus must be parsed corpus
'''
#assert (isinstance(corpus, (Corpus, ParsedCorpus, CorpusDF, TermCategoryFrequencies))
# or (issubclass(type(corpus), (Corpus, ParsedCorpus, CorpusDF, TermCategoryFrequencies))))
self._texts_to_display = None
if alternative_text_field is not None:
if not isinstance(corpus, ParsedCorpus):
raise CorpusShouldBeParsedCorpusException(
'Corpus type needs to be ParsedCorpus to use the alternative text field.')
self._texts_to_display = corpus.get_field(alternative_text_field)
self._use_non_text_features = False
self._corpus = corpus
def use_non_text_features(self):
self._use_non_text_features = True
return self
def get_labels_and_texts(self):
# type: () -> dict
texts = self._get_texts_to_display()
to_ret = {'categories': self._corpus.get_categories(),
'labels': self._corpus.get_doc_indices(),
'texts': self._get_list_from_texts(texts)}
if self._use_non_text_features:
to_ret['extra'] = self._corpus.list_extra_features()
return to_ret
def _get_list_from_texts(self, texts):
if sys.version_info[0] == 2:
return texts.astype(unicode).tolist()
else:
return texts.astype(str).tolist()
def _get_texts_to_display(self):
if self._there_are_no_alternative_texts_to_display():
return self._corpus.get_texts()
else:
return self._texts_to_display
def _there_are_no_alternative_texts_to_display(self):
return self._texts_to_display is None
def get_labels_and_texts_and_meta(self, metadata):
# type: (np.array) -> dict
data = self.get_labels_and_texts()
assert len(metadata) == len(data['labels'])
data['meta'] = list(metadata)
return data
class DocsAndLabelsFromCorpusSample(DocsAndLabelsFromCorpus):
def __init__(self, corpus, max_per_category, alternative_text_field=None, seed=None):
DocsAndLabelsFromCorpus.__init__(self, corpus, alternative_text_field)
self.max_per_category = max_per_category
if seed is not None:
np.random.seed(seed)
def get_labels_and_texts(self, metadata=None):
'''
Parameters
----------
metadata : (array like or None)
Returns
-------
{'labels':[], 'texts': []} or {'labels':[], 'texts': [], 'meta': []}
'''
to_ret = {'categories': self._corpus.get_categories(), 'labels': [], 'texts': []}
labels = self._corpus._y.astype(int)
texts = self._get_texts_to_display()
if self._use_non_text_features:
to_ret['extra'] = []
extrafeats = self._corpus.list_extra_features()
if metadata is not None:
to_ret['meta'] = []
for label_i in range(len(self._corpus._category_idx_store)):
label_indices = np.arange(0, len(labels))[labels == label_i]
if self.max_per_category < len(label_indices):
label_indices = np.random.choice(label_indices, self.max_per_category, replace=False)
to_ret['labels'] += list([int(e) for e in labels[label_indices]])
to_ret['texts'] += list(texts[label_indices])
if metadata is not None:
to_ret['meta'] += [metadata[i] for i in label_indices]
if self._use_non_text_features:
to_ret['extra'] += [extrafeats[i] for i in label_indices]
return to_ret
def get_labels_and_texts_and_meta(self, metadata):
return self.get_labels_and_texts(metadata)
|
import voluptuous as vol
from homeassistant.components import bbb_gpio
from homeassistant.components.switch import PLATFORM_SCHEMA
from homeassistant.const import CONF_NAME, DEVICE_DEFAULT_NAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import ToggleEntity
CONF_PINS = "pins"
CONF_INITIAL = "initial"
CONF_INVERT_LOGIC = "invert_logic"
PIN_SCHEMA = vol.Schema(
{
vol.Required(CONF_NAME): cv.string,
vol.Optional(CONF_INITIAL, default=False): cv.boolean,
vol.Optional(CONF_INVERT_LOGIC, default=False): cv.boolean,
}
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_PINS, default={}): vol.Schema({cv.string: PIN_SCHEMA})}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the BeagleBone Black GPIO devices."""
pins = config[CONF_PINS]
switches = []
for pin, params in pins.items():
switches.append(BBBGPIOSwitch(pin, params))
add_entities(switches)
class BBBGPIOSwitch(ToggleEntity):
"""Representation of a BeagleBone Black GPIO."""
def __init__(self, pin, params):
"""Initialize the pin."""
self._pin = pin
self._name = params[CONF_NAME] or DEVICE_DEFAULT_NAME
self._state = params[CONF_INITIAL]
self._invert_logic = params[CONF_INVERT_LOGIC]
bbb_gpio.setup_output(self._pin)
if self._state is False:
bbb_gpio.write_output(self._pin, 1 if self._invert_logic else 0)
else:
bbb_gpio.write_output(self._pin, 0 if self._invert_logic else 1)
@property
def name(self):
"""Return the name of the switch."""
return self._name
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def is_on(self):
"""Return true if device is on."""
return self._state
def turn_on(self, **kwargs):
"""Turn the device on."""
bbb_gpio.write_output(self._pin, 0 if self._invert_logic else 1)
self._state = True
self.schedule_update_ha_state()
def turn_off(self, **kwargs):
"""Turn the device off."""
bbb_gpio.write_output(self._pin, 1 if self._invert_logic else 0)
self._state = False
self.schedule_update_ha_state()
|
import datetime
import logging
from homeassistant.components.binary_sensor import BinarySensorEntity
from homeassistant.const import ATTR_LAST_TRIP_TIME
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.util import dt as dt_util
from . import (
CONF_ZONENAME,
CONF_ZONETYPE,
DATA_EVL,
SIGNAL_ZONE_UPDATE,
ZONE_SCHEMA,
EnvisalinkDevice,
)
_LOGGER = logging.getLogger(__name__)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Envisalink binary sensor devices."""
configured_zones = discovery_info["zones"]
devices = []
for zone_num in configured_zones:
device_config_data = ZONE_SCHEMA(configured_zones[zone_num])
device = EnvisalinkBinarySensor(
hass,
zone_num,
device_config_data[CONF_ZONENAME],
device_config_data[CONF_ZONETYPE],
hass.data[DATA_EVL].alarm_state["zone"][zone_num],
hass.data[DATA_EVL],
)
devices.append(device)
async_add_entities(devices)
class EnvisalinkBinarySensor(EnvisalinkDevice, BinarySensorEntity):
"""Representation of an Envisalink binary sensor."""
def __init__(self, hass, zone_number, zone_name, zone_type, info, controller):
"""Initialize the binary_sensor."""
self._zone_type = zone_type
self._zone_number = zone_number
_LOGGER.debug("Setting up zone: %s", zone_name)
super().__init__(zone_name, info, controller)
async def async_added_to_hass(self):
"""Register callbacks."""
async_dispatcher_connect(self.hass, SIGNAL_ZONE_UPDATE, self._update_callback)
@property
def device_state_attributes(self):
"""Return the state attributes."""
attr = {}
# The Envisalink library returns a "last_fault" value that's the
# number of seconds since the last fault, up to a maximum of 327680
# seconds (65536 5-second ticks).
#
# We don't want the HA event log to fill up with a bunch of no-op
# "state changes" that are just that number ticking up once per poll
# interval, so we subtract it from the current second-accurate time
# unless it is already at the maximum value, in which case we set it
# to None since we can't determine the actual value.
seconds_ago = self._info["last_fault"]
if seconds_ago < 65536 * 5:
now = dt_util.now().replace(microsecond=0)
delta = datetime.timedelta(seconds=seconds_ago)
last_trip_time = (now - delta).isoformat()
else:
last_trip_time = None
attr[ATTR_LAST_TRIP_TIME] = last_trip_time
return attr
@property
def is_on(self):
"""Return true if sensor is on."""
return self._info["status"]["open"]
@property
def device_class(self):
"""Return the class of this sensor, from DEVICE_CLASSES."""
return self._zone_type
@callback
def _update_callback(self, zone):
"""Update the zone's state, if needed."""
if zone is None or int(zone) == self._zone_number:
self.async_write_ha_state()
|
import io
import logging
# pylint: disable=import-error
import face_recognition
import voluptuous as vol
from homeassistant.components.image_processing import (
CONF_CONFIDENCE,
CONF_ENTITY_ID,
CONF_NAME,
CONF_SOURCE,
PLATFORM_SCHEMA,
ImageProcessingFaceEntity,
)
from homeassistant.core import split_entity_id
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
ATTR_NAME = "name"
CONF_FACES = "faces"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_FACES): {cv.string: cv.isfile},
vol.Optional(CONF_CONFIDENCE, default=0.6): vol.Coerce(float),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Dlib Face detection platform."""
entities = []
for camera in config[CONF_SOURCE]:
entities.append(
DlibFaceIdentifyEntity(
camera[CONF_ENTITY_ID],
config[CONF_FACES],
camera.get(CONF_NAME),
config[CONF_CONFIDENCE],
)
)
add_entities(entities)
class DlibFaceIdentifyEntity(ImageProcessingFaceEntity):
"""Dlib Face API entity for identify."""
def __init__(self, camera_entity, faces, name, tolerance):
"""Initialize Dlib face identify entry."""
super().__init__()
self._camera = camera_entity
if name:
self._name = name
else:
self._name = f"Dlib Face {split_entity_id(camera_entity)[1]}"
self._faces = {}
for face_name, face_file in faces.items():
try:
image = face_recognition.load_image_file(face_file)
self._faces[face_name] = face_recognition.face_encodings(image)[0]
except IndexError as err:
_LOGGER.error("Failed to parse %s. Error: %s", face_file, err)
self._tolerance = tolerance
@property
def camera_entity(self):
"""Return camera entity id from process pictures."""
return self._camera
@property
def name(self):
"""Return the name of the entity."""
return self._name
def process_image(self, image):
"""Process image."""
fak_file = io.BytesIO(image)
fak_file.name = "snapshot.jpg"
fak_file.seek(0)
image = face_recognition.load_image_file(fak_file)
unknowns = face_recognition.face_encodings(image)
found = []
for unknown_face in unknowns:
for name, face in self._faces.items():
result = face_recognition.compare_faces(
[face], unknown_face, tolerance=self._tolerance
)
if result[0]:
found.append({ATTR_NAME: name})
self.process_faces(found, len(unknowns))
|
import boto3
from moto import mock_sts, mock_ec2, mock_elb, mock_elbv2, mock_iam
@mock_sts()
@mock_elb()
def test_get_all_elbs(app, aws_credentials):
from lemur.plugins.lemur_aws.elb import get_all_elbs
client = boto3.client("elb", region_name="us-east-1")
elbs = get_all_elbs(account_number="123456789012", region="us-east-1")
assert not elbs
client.create_load_balancer(
LoadBalancerName="example-lb",
Listeners=[
{
"Protocol": "string",
"LoadBalancerPort": 443,
"InstanceProtocol": "tcp",
"InstancePort": 5443,
"SSLCertificateId": "tcp",
}
],
)
elbs = get_all_elbs(account_number="123456789012", region="us-east-1")
assert elbs
@mock_sts()
@mock_ec2
@mock_elbv2()
@mock_iam
def test_create_elb_with_https_listener_miscellaneous(app, aws_credentials):
from lemur.plugins.lemur_aws import iam, elb
endpoint_name = "example-lbv2"
account_number = "123456789012"
region_ue1 = "us-east-1"
client = boto3.client("elbv2", region_name="us-east-1")
ec2 = boto3.resource("ec2", region_name="us-east-1")
# Create VPC
vpc = ec2.create_vpc(CidrBlock="172.28.7.0/24")
# Create LB (elbv2) in above VPC
assert create_load_balancer(client, ec2, vpc.id, endpoint_name)
# Create target group
target_group_arn = create_target_group(client, vpc.id)
assert target_group_arn
# Test get_load_balancer_arn_from_endpoint
lb_arn = elb.get_load_balancer_arn_from_endpoint(endpoint_name,
account_number=account_number,
region=region_ue1)
assert lb_arn
# Test describe_listeners_v2
listeners = elb.describe_listeners_v2(account_number=account_number,
region=region_ue1,
LoadBalancerArn=lb_arn)
assert listeners
assert not listeners["Listeners"]
# Upload cert
response = iam.upload_cert("LemurTestCert", "testCert", "cert1", "cert2",
account_number=account_number)
assert response
cert_arn = response["ServerCertificateMetadata"]["Arn"]
assert cert_arn
# Create https listener using above cert
listeners = client.create_listener(
LoadBalancerArn=lb_arn,
Protocol="HTTPS",
Port=443,
Certificates=[{"CertificateArn": cert_arn}],
DefaultActions=[{"Type": "forward", "TargetGroupArn": target_group_arn}],
)
assert listeners
listener_arn = listeners["Listeners"][0]["ListenerArn"]
assert listener_arn
assert listeners["Listeners"]
for listener in listeners["Listeners"]:
if listener["Port"] == 443:
assert listener["Certificates"]
assert cert_arn == listener["Certificates"][0]["CertificateArn"]
# Test get_listener_arn_from_endpoint
assert listener_arn == elb.get_listener_arn_from_endpoint(
endpoint_name,
443,
account_number=account_number,
region=region_ue1,
)
@mock_sts()
@mock_elb()
def test_get_all_elbs_v2():
from lemur.plugins.lemur_aws.elb import get_all_elbs_v2
elbs = get_all_elbs_v2(account_number="123456789012",
region="us-east-1")
assert elbs
def create_load_balancer(client, ec2, vpc_id, endpoint_name):
subnet1 = ec2.create_subnet(
VpcId=vpc_id,
CidrBlock="172.28.7.192/26",
AvailabilityZone="us-east-1a"
)
return client.create_load_balancer(
Name=endpoint_name,
Subnets=[
subnet1.id,
],
)
def create_target_group(client, vpc_id):
response = client.create_target_group(
Name="a-target",
Protocol="HTTPS",
Port=443,
VpcId=vpc_id,
)
return response.get("TargetGroups")[0]["TargetGroupArn"]
|
import json
from homeassistant.components.media_player import BrowseError, BrowseMedia
from homeassistant.components.media_player.const import (
MEDIA_CLASS_ALBUM,
MEDIA_CLASS_ARTIST,
MEDIA_CLASS_CHANNEL,
MEDIA_CLASS_DIRECTORY,
MEDIA_CLASS_GENRE,
MEDIA_CLASS_PLAYLIST,
MEDIA_CLASS_TRACK,
MEDIA_TYPE_MUSIC,
)
PLAYABLE_ITEM_TYPES = [
"folder",
"song",
"mywebradio",
"webradio",
"playlist",
"cuesong",
"remdisk",
"cuefile",
"folder-with-favourites",
"internal-folder",
]
NON_EXPANDABLE_ITEM_TYPES = [
"song",
"webradio",
"mywebradio",
"cuesong",
"album",
"artist",
"cd",
"play-playlist",
]
PLAYLISTS_URI_PREFIX = "playlists"
ARTISTS_URI_PREFIX = "artists://"
ALBUMS_URI_PREFIX = "albums://"
GENRES_URI_PREFIX = "genres://"
RADIO_URI_PREFIX = "radio"
LAST_100_URI_PREFIX = "Last_100"
FAVOURITES_URI = "favourites"
def _item_to_children_media_class(item, info=None):
if info and "album" in info and "artist" in info:
return MEDIA_CLASS_TRACK
if item["uri"].startswith(PLAYLISTS_URI_PREFIX):
return MEDIA_CLASS_PLAYLIST
if item["uri"].startswith(ARTISTS_URI_PREFIX):
if len(item["uri"]) > len(ARTISTS_URI_PREFIX):
return MEDIA_CLASS_ALBUM
return MEDIA_CLASS_ARTIST
if item["uri"].startswith(ALBUMS_URI_PREFIX):
if len(item["uri"]) > len(ALBUMS_URI_PREFIX):
return MEDIA_CLASS_TRACK
return MEDIA_CLASS_ALBUM
if item["uri"].startswith(GENRES_URI_PREFIX):
if len(item["uri"]) > len(GENRES_URI_PREFIX):
return MEDIA_CLASS_ALBUM
return MEDIA_CLASS_GENRE
if item["uri"].startswith(LAST_100_URI_PREFIX) or item["uri"] == FAVOURITES_URI:
return MEDIA_CLASS_TRACK
if item["uri"].startswith(RADIO_URI_PREFIX):
return MEDIA_CLASS_CHANNEL
return MEDIA_CLASS_DIRECTORY
def _item_to_media_class(item, parent_item=None):
if "type" not in item:
return MEDIA_CLASS_DIRECTORY
if item["type"] in ["webradio", "mywebradio"]:
return MEDIA_CLASS_CHANNEL
if item["type"] in ["song", "cuesong"]:
return MEDIA_CLASS_TRACK
if item.get("artist"):
return MEDIA_CLASS_ALBUM
if item["uri"].startswith(ARTISTS_URI_PREFIX) and len(item["uri"]) > len(
ARTISTS_URI_PREFIX
):
return MEDIA_CLASS_ARTIST
if parent_item:
return _item_to_children_media_class(parent_item)
return MEDIA_CLASS_DIRECTORY
def _list_payload(media_library, item, children=None):
return BrowseMedia(
title=item["name"],
media_class=MEDIA_CLASS_DIRECTORY,
children_media_class=_item_to_children_media_class(item),
media_content_type=MEDIA_TYPE_MUSIC,
media_content_id=json.dumps(item),
can_play=False,
can_expand=True,
)
def _raw_item_payload(media_library, item, parent_item=None, title=None, info=None):
if "type" in item:
thumbnail = item.get("albumart")
if thumbnail:
thumbnail = media_library.canonic_url(thumbnail)
else:
# don't use the built-in volumio white-on-white icons
thumbnail = None
return {
"title": title or item.get("title"),
"media_class": _item_to_media_class(item, parent_item),
"children_media_class": _item_to_children_media_class(item, info),
"media_content_type": MEDIA_TYPE_MUSIC,
"media_content_id": json.dumps(item),
"can_play": item.get("type") in PLAYABLE_ITEM_TYPES,
"can_expand": item.get("type") not in NON_EXPANDABLE_ITEM_TYPES,
"thumbnail": thumbnail,
}
def _item_payload(media_library, item, parent_item):
return BrowseMedia(
**_raw_item_payload(media_library, item, parent_item=parent_item)
)
async def browse_top_level(media_library):
"""Browse the top-level of a Volumio media hierarchy."""
navigation = await media_library.browse()
children = [_list_payload(media_library, item) for item in navigation["lists"]]
return BrowseMedia(
media_class=MEDIA_CLASS_DIRECTORY,
media_content_id="library",
media_content_type="library",
title="Media Library",
can_play=False,
can_expand=True,
children=children,
)
async def browse_node(media_library, media_content_type, media_content_id):
"""Browse a node of a Volumio media hierarchy."""
json_item = json.loads(media_content_id)
navigation = await media_library.browse(json_item["uri"])
if "lists" not in navigation:
raise BrowseError(f"Media not found: {media_content_type} / {media_content_id}")
# we only use the first list since the second one could include all tracks
first_list = navigation["lists"][0]
children = [
_item_payload(media_library, item, parent_item=json_item)
for item in first_list["items"]
]
info = navigation.get("info")
title = first_list.get("title")
if not title:
if info:
title = f"{info.get('album')} ({info.get('artist')})"
else:
title = "Media Library"
payload = _raw_item_payload(media_library, json_item, title=title, info=info)
return BrowseMedia(**payload, children=children)
|
import asyncio
from homeassistant import data_entry_flow
from homeassistant.auth import auth_manager_from_config, models as auth_models
from homeassistant.auth.mfa_modules import auth_mfa_module_from_config
from tests.async_mock import patch
from tests.common import MockUser
MOCK_CODE = "123456"
async def test_validating_mfa(hass):
"""Test validating mfa code."""
totp_auth_module = await auth_mfa_module_from_config(hass, {"type": "totp"})
await totp_auth_module.async_setup_user("test-user", {})
with patch("pyotp.TOTP.verify", return_value=True):
assert await totp_auth_module.async_validate("test-user", {"code": MOCK_CODE})
async def test_validating_mfa_invalid_code(hass):
"""Test validating an invalid mfa code."""
totp_auth_module = await auth_mfa_module_from_config(hass, {"type": "totp"})
await totp_auth_module.async_setup_user("test-user", {})
with patch("pyotp.TOTP.verify", return_value=False):
assert (
await totp_auth_module.async_validate("test-user", {"code": MOCK_CODE})
is False
)
async def test_validating_mfa_invalid_user(hass):
"""Test validating an mfa code with invalid user."""
totp_auth_module = await auth_mfa_module_from_config(hass, {"type": "totp"})
await totp_auth_module.async_setup_user("test-user", {})
assert (
await totp_auth_module.async_validate("invalid-user", {"code": MOCK_CODE})
is False
)
async def test_setup_depose_user(hass):
"""Test despose user."""
totp_auth_module = await auth_mfa_module_from_config(hass, {"type": "totp"})
result = await totp_auth_module.async_setup_user("test-user", {})
assert len(totp_auth_module._users) == 1
result2 = await totp_auth_module.async_setup_user("test-user", {})
assert len(totp_auth_module._users) == 1
assert result != result2
await totp_auth_module.async_depose_user("test-user")
assert len(totp_auth_module._users) == 0
result = await totp_auth_module.async_setup_user(
"test-user2", {"secret": "secret-code"}
)
assert result == "secret-code"
assert len(totp_auth_module._users) == 1
async def test_login_flow_validates_mfa(hass):
"""Test login flow with mfa enabled."""
hass.auth = await auth_manager_from_config(
hass,
[
{
"type": "insecure_example",
"users": [{"username": "test-user", "password": "test-pass"}],
}
],
[{"type": "totp"}],
)
user = MockUser(
id="mock-user", is_owner=False, is_active=False, name="Paulus"
).add_to_auth_manager(hass.auth)
await hass.auth.async_link_user(
user,
auth_models.Credentials(
id="mock-id",
auth_provider_type="insecure_example",
auth_provider_id=None,
data={"username": "test-user"},
is_new=False,
),
)
await hass.auth.async_enable_user_mfa(user, "totp", {})
provider = hass.auth.auth_providers[0]
result = await hass.auth.login_flow.async_init((provider.type, provider.id))
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
result = await hass.auth.login_flow.async_configure(
result["flow_id"], {"username": "incorrect-user", "password": "test-pass"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"]["base"] == "invalid_auth"
result = await hass.auth.login_flow.async_configure(
result["flow_id"], {"username": "test-user", "password": "incorrect-pass"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"]["base"] == "invalid_auth"
result = await hass.auth.login_flow.async_configure(
result["flow_id"], {"username": "test-user", "password": "test-pass"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "mfa"
assert result["data_schema"].schema.get("code") == str
with patch("pyotp.TOTP.verify", return_value=False):
result = await hass.auth.login_flow.async_configure(
result["flow_id"], {"code": "invalid-code"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "mfa"
assert result["errors"]["base"] == "invalid_code"
with patch("pyotp.TOTP.verify", return_value=True):
result = await hass.auth.login_flow.async_configure(
result["flow_id"], {"code": MOCK_CODE}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"].id == "mock-user"
async def test_race_condition_in_data_loading(hass):
"""Test race condition in the data loading."""
counter = 0
async def mock_load(_):
"""Mock of homeassistant.helpers.storage.Store.async_load."""
nonlocal counter
counter += 1
await asyncio.sleep(0)
totp_auth_module = await auth_mfa_module_from_config(hass, {"type": "totp"})
with patch("homeassistant.helpers.storage.Store.async_load", new=mock_load):
task1 = totp_auth_module.async_validate("user", {"code": "value"})
task2 = totp_auth_module.async_validate("user", {"code": "value"})
results = await asyncio.gather(task1, task2, return_exceptions=True)
assert counter == 1
assert results[0] is False
assert results[1] is False
|
import os
from django.core.cache import cache
from django.core.files.storage import default_storage
from django.test import TestCase
from import_export.tmp_storages import CacheStorage, MediaStorage, TempFolderStorage
class TempStoragesTest(TestCase):
def setUp(self):
self.test_string = b"""
id,name,author,author_email,imported,published,price,categories
2,Bar,1,,0,,,
1,Foo,,,0,,,
"""
def test_temp_folder_storage(self):
tmp_storage = TempFolderStorage()
tmp_storage.save(self.test_string)
name = tmp_storage.name
tmp_storage = TempFolderStorage(name=name)
self.assertEqual(self.test_string.decode(), tmp_storage.read())
self.assertTrue(os.path.isfile(tmp_storage.get_full_path()))
tmp_storage.remove()
self.assertFalse(os.path.isfile(tmp_storage.get_full_path()))
def test_cache_storage(self):
tmp_storage = CacheStorage()
tmp_storage.save(self.test_string)
name = tmp_storage.name
tmp_storage = CacheStorage(name=name)
self.assertEqual(self.test_string, tmp_storage.read())
self.assertNotEqual(cache.get(tmp_storage.CACHE_PREFIX,
tmp_storage.name), None)
tmp_storage.remove()
self.assertEqual(cache.get(tmp_storage.name), None)
def test_media_storage(self):
tmp_storage = MediaStorage()
tmp_storage.save(self.test_string)
name = tmp_storage.name
tmp_storage = MediaStorage(name=name)
self.assertEqual(self.test_string, tmp_storage.read())
self.assertTrue(default_storage.exists(tmp_storage.get_full_path()))
tmp_storage.remove()
self.assertFalse(default_storage.exists(tmp_storage.get_full_path()))
def test_media_storage_read_mode(self):
# issue 416 - MediaStorage does not respect the read_mode parameter.
test_string = self.test_string.replace(b'\n', b'\r')
tmp_storage = MediaStorage()
tmp_storage.save(test_string)
name = tmp_storage.name
tmp_storage = MediaStorage(name=name)
self.assertEqual(self.test_string.decode(),
tmp_storage.read(read_mode='r'))
|
import os
import unittest
import mock
from Tests.utils.utils import get_test_path
from kalliope.core import LifoManager
from kalliope.core.ConfigurationManager import BrainLoader
from kalliope.core.Lifo.LIFOBuffer import Serialize, SynapseListAddedToLIFO
from kalliope.core.Models import Singleton
from kalliope.core.Models.MatchedSynapse import MatchedSynapse
class TestLIFOBuffer(unittest.TestCase):
def setUp(self):
# be sure the brain haven't been instantiated before
Singleton._instances = dict()
self.brain_to_test = get_test_path("brains/lifo_buffer_test_brain.yml")
BrainLoader(file_path=self.brain_to_test)
# create a new lifo buffer
self.lifo_buffer = LifoManager.get_singleton_lifo()
self.lifo_buffer.clean()
def test_execute(self):
"""
In this test the brain contains a neurotransmitter
"""
# --------------------------------------
# Test 1. The user answers correctly to all neurotransmitter
# --------------------------------------
# we suppose that the first synapse has matched the first synapse
synapse = BrainLoader().brain.get_synapse_by_name("synapse1")
order = "enter in synapse 1"
matched_synapse = MatchedSynapse(matched_synapse=synapse,
user_order=order,
matched_order=order)
list_matched_synapse = list()
list_matched_synapse.append(matched_synapse)
self.lifo_buffer.add_synapse_list_to_lifo(list_matched_synapse)
self.lifo_buffer.api_response.user_order = order
with mock.patch("kalliope.core.TTS.TTSModule.generate_and_play"):
response = self.lifo_buffer.execute(is_api_call=True)
expected_result = {
'status': 'waiting_for_answer',
'matched_synapses': [
{
'matched_order': 'enter in synapse 1',
'neuron_module_list':
[
{
'neuron_name': 'Say',
'generated_message': 'question in synapse 1'
}
],
'synapse_name': 'synapse1'
}
],
'user_order': 'enter in synapse 1'
}
self.assertEqual(response, expected_result)
# give an answer
answer = "answer synapse1"
response = self.lifo_buffer.execute(answer=answer,
is_api_call=True)
expected_result = {
'status': 'waiting_for_answer',
'matched_synapses': [
{
'matched_order': 'enter in synapse 1',
'neuron_module_list': [
{
'neuron_name': 'Say',
'generated_message': 'question in synapse 1'
},
{
'neuron_name': 'Neurotransmitter',
'generated_message': None
}
],
'synapse_name': 'synapse1'
},
{
'matched_order': 'answer synapse1',
'neuron_module_list': [
{
'neuron_name': 'Say',
'generated_message': 'enter synapse 2'
}
],
'synapse_name': 'synapse2'
}
],
'user_order': None
}
self.assertEqual(response, expected_result)
# give the last answer
answer = "synapse5"
response = self.lifo_buffer.execute(answer=answer,
is_api_call=True)
expected_result = {
'status': 'complete',
'matched_synapses': [
{
'matched_order': 'answer synapse1',
'neuron_module_list': [
{
'neuron_name': 'Say',
'generated_message': 'enter synapse 2'
},
{
'neuron_name': 'Neurotransmitter',
'generated_message': None
}
],
'synapse_name': 'synapse2'
},
{
'matched_order': 'synapse5',
'neuron_module_list': [
{
'neuron_name': 'Say',
'generated_message': 'execution of synapse 5'
}
],
'synapse_name': 'synapse5'
},
{
'matched_order': 'enter in synapse 1',
'neuron_module_list': [
{
'neuron_name': 'Say',
'generated_message': 'question in synapse 1'
},
{
'neuron_name': 'Neurotransmitter',
'generated_message': None
},
{
'neuron_name': 'Say',
'generated_message': 'last neuron in synapse 1'
}
],
'synapse_name': 'synapse1'
}
],
'user_order': None
}
self.assertEqual(response, expected_result)
# --------------------------------------
# Test 2. The user doesn't answered correctly to the first neurotransmitter
# --------------------------------------
# we suppose that the first synapse has matched the first synapse
synapse = BrainLoader().brain.get_synapse_by_name("synapse1")
order = "enter in synapse 1"
matched_synapse = MatchedSynapse(matched_synapse=synapse,
user_order=order,
matched_order=order)
list_matched_synapse = list()
list_matched_synapse.append(matched_synapse)
self.lifo_buffer.add_synapse_list_to_lifo(list_matched_synapse)
self.lifo_buffer.api_response.user_order = order
with mock.patch("kalliope.core.TTS.TTSModule.generate_and_play"):
# fist call to enter in the neurotransmitter
self.lifo_buffer.execute(is_api_call=True)
wrong_answer = "wrong answer"
response = self.lifo_buffer.execute(answer=wrong_answer, is_api_call=True)
expected_result = {
'status': 'complete',
'matched_synapses': [
{
'matched_order': 'enter in synapse 1',
'neuron_module_list': [
{
'neuron_name': 'Say',
'generated_message': 'question in synapse 1'
},
{
'neuron_name': 'Neurotransmitter',
'generated_message': None
},
{
'neuron_name': 'Say',
'generated_message': 'last neuron in synapse 1'
}
],
'synapse_name': 'synapse1'
},
{
'matched_order': None,
'neuron_module_list': [
{
'neuron_name': 'Say',
'generated_message': 'not understood'
}
],
'synapse_name': 'synapse4'
}
],
'user_order': None
}
self.assertEqual(response, expected_result)
# --------------------------------------
# Test 3. No synapse matched, we still execute the list
# --------------------------------------
list_matched_synapse = list()
self.lifo_buffer.add_synapse_list_to_lifo(list_matched_synapse)
self.lifo_buffer.api_response.user_order = "this is an order"
with mock.patch("kalliope.core.TTS.TTSModule.generate_and_play"):
# fist call to enter in the neurotransmitter
response = self.lifo_buffer.execute(is_api_call=True)
expected_result = {
'status': None,
'matched_synapses': [],
'user_order': 'this is an order'
}
self.assertEqual(response, expected_result)
def test_add_synapse_list_to_lifo(self):
"""
Testing to add a synapse to the lifo
"""
synapse = BrainLoader().brain.get_synapse_by_name("synapse1")
order = "enter in synapse 1"
matched_synapse = MatchedSynapse(matched_synapse=synapse,
user_order=order,
matched_order=order)
list_matched_synapse = list()
list_matched_synapse.append(matched_synapse)
self.lifo_buffer.add_synapse_list_to_lifo(list_matched_synapse)
self.assertEqual(self.lifo_buffer.lifo_list, [list_matched_synapse])
def test_clean(self):
"""
Test the Cleaning of the matched synapses list
"""
synapse = BrainLoader().brain.get_synapse_by_name("synapse1")
order = "enter in synapse 1"
matched_synapse = MatchedSynapse(matched_synapse=synapse,
user_order=order,
matched_order=order)
list_matched_synapse = list()
list_matched_synapse.append(matched_synapse)
self.lifo_buffer.add_synapse_list_to_lifo(list_matched_synapse)
self.lifo_buffer.clean()
self.assertEqual(0, len(self.lifo_buffer.lifo_list))
def test_return_serialized_api_response(self):
"""
Test the serialization
"""
self.lifo_buffer.clean()
self.lifo_buffer.execute(is_api_call=True)
expected_result = {'status': None, 'matched_synapses': [], 'user_order': None}
response = self.lifo_buffer._return_serialized_api_response()
self.assertEqual(expected_result, response)
def test_process_synapse_list(self):
"""
Testing the neuron list from a synapse
"""
synapse = BrainLoader().brain.get_synapse_by_name("synapse1")
order = "enter in synapse 1"
matched_synapse = MatchedSynapse(matched_synapse=synapse,
user_order=order,
matched_order=order)
list_matched_synapse = list()
list_matched_synapse.append(matched_synapse)
with mock.patch("kalliope.core.Lifo.LIFOBuffer._process_neuron_list"):
self.lifo_buffer._process_synapse_list(list_matched_synapse)
expected_response = {
'status': None,
'matched_synapses': [
{
'matched_order': 'enter in synapse 1',
'neuron_module_list': [],
'synapse_name': 'synapse1'
}
],
'user_order': None
}
self.assertEqual(expected_response, self.lifo_buffer.api_response.serialize())
self.assertEqual(0, len(self.lifo_buffer.lifo_list))
def test_process_neuron_list(self):
# Test with a neuron that doesn't wait for an answer
synapse = BrainLoader().brain.get_synapse_by_name("synapse5")
order = "synapse5"
matched_synapse = MatchedSynapse(matched_synapse=synapse,
user_order=order,
matched_order=order)
with mock.patch("kalliope.core.TTS.TTSModule.generate_and_play"):
self.lifo_buffer.set_api_call(True)
self.lifo_buffer._process_neuron_list(matched_synapse=matched_synapse)
self.assertEqual("complete", self.lifo_buffer.api_response.status)
# test with neuron that wait for an answer
LifoManager.clean_saved_lifo()
synapse = BrainLoader().brain.get_synapse_by_name("synapse6")
order = "synapse6"
matched_synapse = MatchedSynapse(matched_synapse=synapse,
user_order=order,
matched_order=order)
self.lifo_buffer.set_api_call(True)
with mock.patch("kalliope.core.TTS.TTSModule.generate_and_play"):
with self.assertRaises(Serialize):
self.lifo_buffer._process_neuron_list(matched_synapse=matched_synapse)
# test with a neuron that want to add a synapse list to the LIFO
LifoManager.clean_saved_lifo()
synapse = BrainLoader().brain.get_synapse_by_name("synapse6")
order = "synapse6"
matched_synapse = MatchedSynapse(matched_synapse=synapse,
user_order=order,
matched_order=order)
self.lifo_buffer.set_api_call(True)
self.lifo_buffer.set_answer("synapse 6 answer")
with mock.patch("kalliope.core.TTS.TTSModule.generate_and_play"):
self.assertRaises(SynapseListAddedToLIFO,
self.lifo_buffer._process_neuron_list(matched_synapse=matched_synapse))
if __name__ == '__main__':
unittest.main()
# suite = unittest.TestSuite()
# suite.addTest(TestLIFOBuffer("test_execute"))
# runner = unittest.TextTestRunner()
# runner.run(suite)
|
import difflib
import os.path
from typing import MutableSequence
import pygments
import pygments.lexers
import pygments.formatters
from qutebrowser.utils import standarddir
OLD_CONF = """
[general]
ignore-case = smart
startpage = https://start.duckduckgo.com
yank-ignored-url-parameters = ref,utm_source,utm_medium,utm_campaign,utm_term,utm_content
default-open-dispatcher =
default-page = ${startpage}
auto-search = naive
auto-save-config = true
auto-save-interval = 15000
editor = gvim -f "{}"
editor-encoding = utf-8
private-browsing = false
developer-extras = false
print-element-backgrounds = true
xss-auditing = false
default-encoding = iso-8859-1
new-instance-open-target = tab
new-instance-open-target.window = last-focused
log-javascript-console = debug
save-session = false
session-default-name =
url-incdec-segments = path,query
[ui]
history-session-interval = 30
zoom-levels = 25%,33%,50%,67%,75%,90%,100%,110%,125%,150%,175%,200%,250%,300%,400%,500%
default-zoom = 100%
downloads-position = top
status-position = bottom
message-timeout = 2000
message-unfocused = false
confirm-quit = never
zoom-text-only = false
frame-flattening = false
user-stylesheet =
hide-scrollbar = true
smooth-scrolling = false
remove-finished-downloads = -1
hide-statusbar = false
statusbar-padding = 1,1,0,0
window-title-format = {perc}{title}{title_sep}qutebrowser
modal-js-dialog = false
hide-wayland-decoration = false
keyhint-blacklist =
keyhint-delay = 500
prompt-radius = 8
prompt-filebrowser = true
[network]
do-not-track = true
accept-language = en-US,en
referer-header = same-domain
user-agent =
proxy = system
proxy-dns-requests = true
ssl-strict = ask
dns-prefetch = true
custom-headers =
netrc-file =
[completion]
show = always
download-path-suggestion = path
timestamp-format = %Y-%m-%d
height = 50%
cmd-history-max-items = 100
web-history-max-items = -1
quick-complete = true
shrink = false
scrollbar-width = 12
scrollbar-padding = 2
[input]
timeout = 500
partial-timeout = 5000
insert-mode-on-plugins = false
auto-leave-insert-mode = true
auto-insert-mode = false
forward-unbound-keys = auto
spatial-navigation = false
links-included-in-focus-chain = true
rocker-gestures = false
mouse-zoom-divider = 512
[tabs]
background-tabs = false
select-on-remove = next
new-tab-position = next
new-tab-position-explicit = last
last-close = ignore
show = always
show-switching-delay = 800
wrap = true
movable = true
close-mouse-button = middle
position = top
show-favicons = true
favicon-scale = 1.0
width = 20%
pinned-width = 43
indicator-width = 3
tabs-are-windows = false
title-format = {index}: {title}
title-format-pinned = {index}
title-alignment = left
mousewheel-tab-switching = true
padding = 0,0,5,5
indicator-padding = 2,2,0,4
[storage]
download-directory =
prompt-download-directory = true
remember-download-directory = true
maximum-pages-in-cache = 0
offline-web-application-cache = true
local-storage = true
cache-size =
[content]
allow-images = true
allow-javascript = true
allow-plugins = false
webgl = true
hyperlink-auditing = false
geolocation = ask
notifications = ask
media-capture = ask
javascript-can-open-windows-automatically = false
javascript-can-close-windows = false
javascript-can-access-clipboard = false
ignore-javascript-prompt = false
ignore-javascript-alert = false
local-content-can-access-remote-urls = false
local-content-can-access-file-urls = true
cookies-accept = no-3rdparty
cookies-store = true
host-block-lists = https://www.malwaredomainlist.com/hostslist/hosts.txt,http://someonewhocares.org/hosts/hosts,http://winhelp2002.mvps.org/hosts.zip,http://malwaredomains.lehigh.edu/files/justdomains.zip,https://pgl.yoyo.org/adservers/serverlist.php?hostformat=hosts&mimetype=plaintext
host-blocking-enabled = true
host-blocking-whitelist = piwik.org
enable-pdfjs = false
[hints]
border = 1px solid #E3BE23
mode = letter
chars = asdfghjkl
min-chars = 1
scatter = true
uppercase = false
dictionary = /usr/share/dict/words
auto-follow = unique-match
auto-follow-timeout = 0
next-regexes = \\bnext\\b,\\bmore\\b,\\bnewer\\b,\\b[>\u2192\u226b]\\b,\\b(>>|\xbb)\\b,\\bcontinue\\b
prev-regexes = \\bprev(ious)?\\b,\\bback\\b,\\bolder\\b,\\b[<\u2190\u226a]\\b,\\b(<<|\xab)\\b
find-implementation = python
hide-unmatched-rapid-hints = true
[searchengines]
DEFAULT = https://duckduckgo.com/?q={}
[aliases]
[colors]
completion.fg = white
completion.bg = #333333
completion.alternate-bg = #444444
completion.category.fg = white
completion.category.bg = qlineargradient(x1:0, y1:0, x2:0, y2:1, stop:0 #888888, stop:1 #505050)
completion.category.border.top = black
completion.category.border.bottom = ${completion.category.border.top}
completion.item.selected.fg = black
completion.item.selected.bg = #e8c000
completion.item.selected.border.top = #bbbb00
completion.item.selected.border.bottom = ${completion.item.selected.border.top}
completion.match.fg = #ff4444
completion.scrollbar.fg = ${completion.fg}
completion.scrollbar.bg = ${completion.bg}
statusbar.fg = white
statusbar.bg = black
statusbar.fg.private = ${statusbar.fg}
statusbar.bg.private = #666666
statusbar.fg.insert = ${statusbar.fg}
statusbar.bg.insert = darkgreen
statusbar.fg.command = ${statusbar.fg}
statusbar.bg.command = ${statusbar.bg}
statusbar.fg.command.private = ${statusbar.fg.private}
statusbar.bg.command.private = ${statusbar.bg.private}
statusbar.fg.caret = ${statusbar.fg}
statusbar.bg.caret = purple
statusbar.fg.caret-selection = ${statusbar.fg}
statusbar.bg.caret-selection = #a12dff
statusbar.progress.bg = white
statusbar.url.fg = ${statusbar.fg}
statusbar.url.fg.success = white
statusbar.url.fg.success.https = lime
statusbar.url.fg.error = orange
statusbar.url.fg.warn = yellow
statusbar.url.fg.hover = aqua
tabs.fg.odd = white
tabs.bg.odd = grey
tabs.fg.even = white
tabs.bg.even = darkgrey
tabs.fg.selected.odd = white
tabs.bg.selected.odd = black
tabs.fg.selected.even = ${tabs.fg.selected.odd}
tabs.bg.selected.even = ${tabs.bg.selected.odd}
tabs.bg.bar = #555555
tabs.indicator.start = #0000aa
tabs.indicator.stop = #00aa00
tabs.indicator.error = #ff0000
tabs.indicator.system = rgb
hints.fg = black
hints.bg = qlineargradient(x1:0, y1:0, x2:0, y2:1, stop:0 rgba(255, 247, 133, 0.8), stop:1 rgba(255, 197, 66, 0.8))
hints.fg.match = green
downloads.bg.bar = black
downloads.fg.start = white
downloads.bg.start = #0000aa
downloads.fg.stop = ${downloads.fg.start}
downloads.bg.stop = #00aa00
downloads.fg.system = rgb
downloads.bg.system = rgb
downloads.fg.error = white
downloads.bg.error = red
webpage.bg = white
keyhint.fg = #FFFFFF
keyhint.fg.suffix = #FFFF00
keyhint.bg = rgba(0, 0, 0, 80%)
messages.fg.error = white
messages.bg.error = red
messages.border.error = #bb0000
messages.fg.warning = white
messages.bg.warning = darkorange
messages.border.warning = #d47300
messages.fg.info = white
messages.bg.info = black
messages.border.info = #333333
prompts.fg = white
prompts.bg = darkblue
prompts.selected.bg = #308cc6
[fonts]
_monospace = xos4 Terminus, Terminus, Monospace, "DejaVu Sans Mono", Monaco, "Bitstream Vera Sans Mono", "Andale Mono", "Courier New", Courier, "Liberation Mono", monospace, Fixed, Consolas, Terminal
completion = 8pt ${_monospace}
completion.category = bold ${completion}
tabbar = 8pt ${_monospace}
statusbar = 8pt ${_monospace}
downloads = 8pt ${_monospace}
hints = bold 13px ${_monospace}
debug-console = 8pt ${_monospace}
web-family-standard =
web-family-fixed =
web-family-serif =
web-family-sans-serif =
web-family-cursive =
web-family-fantasy =
web-size-minimum = 0
web-size-minimum-logical = 6
web-size-default = 16
web-size-default-fixed = 13
keyhint = 8pt ${_monospace}
messages.error = 8pt ${_monospace}
messages.warning = 8pt ${_monospace}
messages.info = 8pt ${_monospace}
prompts = 8pt sans-serif
"""
OLD_KEYS_CONF = """
[!normal]
leave-mode
<escape>
<ctrl-[>
[normal]
clear-keychain ;; search ;; fullscreen --leave
<escape>
<ctrl-[>
set-cmd-text -s :open
o
set-cmd-text :open {url:pretty}
go
set-cmd-text -s :open -t
O
set-cmd-text :open -t -i {url:pretty}
gO
set-cmd-text -s :open -b
xo
set-cmd-text :open -b -i {url:pretty}
xO
set-cmd-text -s :open -w
wo
set-cmd-text :open -w {url:pretty}
wO
set-cmd-text /
/
set-cmd-text ?
?
set-cmd-text :
:
open -t
ga
<ctrl-t>
open -w
<ctrl-n>
tab-close
d
<ctrl-w>
tab-close -o
D
tab-only
co
tab-focus
T
tab-move
gm
tab-move -
gl
tab-move +
gr
tab-next
J
<ctrl-pgdown>
tab-prev
K
<ctrl-pgup>
tab-clone
gC
reload
r
<f5>
reload -f
R
<ctrl-f5>
back
H
<back>
back -t
th
back -w
wh
forward
L
<forward>
forward -t
tl
forward -w
wl
fullscreen
<f11>
hint
f
hint all tab
F
hint all window
wf
hint all tab-bg
;b
hint all tab-fg
;f
hint all hover
;h
hint images
;i
hint images tab
;I
hint links fill :open {hint-url}
;o
hint links fill :open -t -i {hint-url}
;O
hint links yank
;y
hint links yank-primary
;Y
hint --rapid links tab-bg
;r
hint --rapid links window
;R
hint links download
;d
hint inputs
;t
scroll left
h
scroll down
j
scroll up
k
scroll right
l
undo
u
<ctrl-shift-t>
scroll-perc 0
gg
scroll-perc
G
search-next
n
search-prev
N
enter-mode insert
i
enter-mode caret
v
enter-mode set_mark
`
enter-mode jump_mark
'
yank
yy
yank -s
yY
yank title
yt
yank title -s
yT
yank domain
yd
yank domain -s
yD
yank pretty-url
yp
yank pretty-url -s
yP
open -- {clipboard}
pp
open -- {primary}
pP
open -t -- {clipboard}
Pp
open -t -- {primary}
PP
open -w -- {clipboard}
wp
open -w -- {primary}
wP
quickmark-save
m
set-cmd-text -s :quickmark-load
b
set-cmd-text -s :quickmark-load -t
B
set-cmd-text -s :quickmark-load -w
wb
bookmark-add
M
set-cmd-text -s :bookmark-load
gb
set-cmd-text -s :bookmark-load -t
gB
set-cmd-text -s :bookmark-load -w
wB
save
sf
set-cmd-text -s :set
ss
set-cmd-text -s :set -t
sl
set-cmd-text -s :bind
sk
zoom-out
-
zoom-in
+
zoom
=
navigate prev
[[
navigate next
]]
navigate prev -t
{{
navigate next -t
}}
navigate up
gu
navigate up -t
gU
navigate increment
<ctrl-a>
navigate decrement
<ctrl-x>
inspector
wi
download
gd
download-cancel
ad
download-clear
cd
view-source
gf
set-cmd-text -s :buffer
gt
tab-focus last
<ctrl-tab>
<ctrl-6>
<ctrl-^>
enter-mode passthrough
<ctrl-v>
quit
<ctrl-q>
ZQ
wq
ZZ
scroll-page 0 1
<ctrl-f>
scroll-page 0 -1
<ctrl-b>
scroll-page 0 0.5
<ctrl-d>
scroll-page 0 -0.5
<ctrl-u>
tab-focus 1
<alt-1>
g0
g^
tab-focus 2
<alt-2>
tab-focus 3
<alt-3>
tab-focus 4
<alt-4>
tab-focus 5
<alt-5>
tab-focus 6
<alt-6>
tab-focus 7
<alt-7>
tab-focus 8
<alt-8>
tab-focus -1
<alt-9>
g$
home
<ctrl-h>
stop
<ctrl-s>
print
<ctrl-alt-p>
open qute://settings
Ss
follow-selected
<return>
<ctrl-m>
<ctrl-j>
<shift-return>
<enter>
<shift-enter>
follow-selected -t
<ctrl-return>
<ctrl-enter>
repeat-command
.
tab-pin
<ctrl-p>
record-macro
q
run-macro
@
[insert]
open-editor
<ctrl-e>
insert-text {primary}
<shift-ins>
[hint]
follow-hint
<return>
<ctrl-m>
<ctrl-j>
<shift-return>
<enter>
<shift-enter>
hint --rapid links tab-bg
<ctrl-r>
hint links
<ctrl-f>
hint all tab-bg
<ctrl-b>
[passthrough]
[command]
command-history-prev
<ctrl-p>
command-history-next
<ctrl-n>
completion-item-focus prev
<shift-tab>
<up>
completion-item-focus next
<tab>
<down>
completion-item-focus next-category
<ctrl-tab>
completion-item-focus prev-category
<ctrl-shift-tab>
completion-item-del
<ctrl-d>
command-accept
<return>
<ctrl-m>
<ctrl-j>
<shift-return>
<enter>
<shift-enter>
[prompt]
prompt-accept
<return>
<ctrl-m>
<ctrl-j>
<shift-return>
<enter>
<shift-enter>
prompt-accept yes
y
prompt-accept no
n
prompt-open-download
<ctrl-x>
prompt-item-focus prev
<shift-tab>
<up>
prompt-item-focus next
<tab>
<down>
[command,prompt]
rl-backward-char
<ctrl-b>
rl-forward-char
<ctrl-f>
rl-backward-word
<alt-b>
rl-forward-word
<alt-f>
rl-beginning-of-line
<ctrl-a>
rl-end-of-line
<ctrl-e>
rl-unix-line-discard
<ctrl-u>
rl-kill-line
<ctrl-k>
rl-kill-word
<alt-d>
rl-unix-word-rubout
<ctrl-w>
rl-backward-kill-word
<alt-backspace>
rl-yank
<ctrl-y>
rl-delete-char
<ctrl-?>
rl-backward-delete-char
<ctrl-h>
[caret]
toggle-selection
v
<space>
drop-selection
<ctrl-space>
enter-mode normal
c
move-to-next-line
j
move-to-prev-line
k
move-to-next-char
l
move-to-prev-char
h
move-to-end-of-word
e
move-to-next-word
w
move-to-prev-word
b
move-to-start-of-next-block
]
move-to-start-of-prev-block
[
move-to-end-of-next-block
}
move-to-end-of-prev-block
{
move-to-start-of-line
0
move-to-end-of-line
$
move-to-start-of-document
gg
move-to-end-of-document
G
yank selection -s
Y
yank selection
y
<return>
<ctrl-m>
<ctrl-j>
<shift-return>
<enter>
<shift-enter>
scroll left
H
scroll down
J
scroll up
K
scroll right
L
"""
def get_diff() -> str:
"""Get a HTML diff for the old config files."""
old_conf_lines: MutableSequence[str] = []
old_key_lines: MutableSequence[str] = []
for filename, dest in [('qutebrowser.conf', old_conf_lines),
('keys.conf', old_key_lines)]:
path = os.path.join(standarddir.config(), filename)
with open(path, 'r', encoding='utf-8') as f:
for line in f:
if not line.strip() or line.startswith('#'):
continue
dest.append(line.rstrip())
conf_delta = difflib.unified_diff(OLD_CONF.lstrip().splitlines(),
old_conf_lines)
key_delta = difflib.unified_diff(OLD_KEYS_CONF.lstrip().splitlines(),
old_key_lines)
conf_diff = '\n'.join(conf_delta)
key_diff = '\n'.join(key_delta)
# pylint: disable=no-member
# WORKAROUND for https://bitbucket.org/logilab/pylint/issue/491/
lexer = pygments.lexers.DiffLexer()
formatter = pygments.formatters.HtmlFormatter(
full=True, linenos='table',
title='Diffing pre-1.0 default config with pre-1.0 modified config')
# pylint: enable=no-member
return pygments.highlight(conf_diff + key_diff, lexer, formatter)
|
from django.test import TestCase
from django.urls import reverse
from .models import Setting
from .views import CustomCSSView
class SettingsTestCase(TestCase):
def setUp(self):
super().setUp()
CustomCSSView.drop_cache()
def test_blank(self):
response = self.client.get(reverse("css-custom"))
self.assertEqual(response.content.decode().strip(), "")
def test_cache(self):
Setting.objects.create(
category=Setting.CATEGORY_UI, name="hide_footer", value=True
)
response = self.client.get(reverse("css-custom"))
self.assertNotEqual(response.content.decode().strip(), "")
# Delete all settings
Setting.objects.all().delete()
# The response should be cached
response = self.client.get(reverse("css-custom"))
self.assertNotEqual(response.content.decode().strip(), "")
# Invalidate cache
CustomCSSView.drop_cache()
response = self.client.get(reverse("css-custom"))
self.assertEqual(response.content.decode().strip(), "")
|
import pytest
from tests.async_mock import AsyncMock, patch
@pytest.fixture
def mock_weather():
"""Mock weather data."""
with patch("metno.MetWeatherData") as mock_data:
mock_data = mock_data.return_value
mock_data.fetching_data = AsyncMock(return_value=True)
mock_data.get_current_weather.return_value = {
"condition": "cloudy",
"temperature": 15,
"pressure": 100,
"humidity": 50,
"wind_speed": 10,
"wind_bearing": "NE",
}
mock_data.get_forecast.return_value = {}
yield mock_data
|
import asyncio
from datetime import timedelta
from math import ceil
from pyairvisual import CloudAPI, NodeSamba
from pyairvisual.errors import (
AirVisualError,
InvalidKeyError,
KeyExpiredError,
NodeProError,
)
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_REAUTH
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_API_KEY,
CONF_IP_ADDRESS,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_PASSWORD,
CONF_SHOW_ON_MAP,
CONF_STATE,
)
from homeassistant.core import callback
from homeassistant.helpers import aiohttp_client, config_validation as cv
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
UpdateFailed,
)
from .const import (
CONF_CITY,
CONF_COUNTRY,
CONF_GEOGRAPHIES,
CONF_INTEGRATION_TYPE,
DATA_COORDINATOR,
DOMAIN,
INTEGRATION_TYPE_GEOGRAPHY,
INTEGRATION_TYPE_NODE_PRO,
LOGGER,
)
PLATFORMS = ["air_quality", "sensor"]
DATA_LISTENER = "listener"
DEFAULT_ATTRIBUTION = "Data provided by AirVisual"
DEFAULT_NODE_PRO_UPDATE_INTERVAL = timedelta(minutes=1)
DEFAULT_OPTIONS = {CONF_SHOW_ON_MAP: True}
GEOGRAPHY_COORDINATES_SCHEMA = vol.Schema(
{
vol.Required(CONF_LATITUDE): cv.latitude,
vol.Required(CONF_LONGITUDE): cv.longitude,
}
)
GEOGRAPHY_PLACE_SCHEMA = vol.Schema(
{
vol.Required(CONF_CITY): cv.string,
vol.Required(CONF_STATE): cv.string,
vol.Required(CONF_COUNTRY): cv.string,
}
)
CLOUD_API_SCHEMA = vol.Schema(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Optional(CONF_GEOGRAPHIES, default=[]): vol.All(
cv.ensure_list,
[vol.Any(GEOGRAPHY_COORDINATES_SCHEMA, GEOGRAPHY_PLACE_SCHEMA)],
),
}
)
CONFIG_SCHEMA = vol.Schema({DOMAIN: CLOUD_API_SCHEMA}, extra=vol.ALLOW_EXTRA)
@callback
def async_get_geography_id(geography_dict):
"""Generate a unique ID from a geography dict."""
if not geography_dict:
return
if CONF_CITY in geography_dict:
return ", ".join(
(
geography_dict[CONF_CITY],
geography_dict[CONF_STATE],
geography_dict[CONF_COUNTRY],
)
)
return ", ".join(
(str(geography_dict[CONF_LATITUDE]), str(geography_dict[CONF_LONGITUDE]))
)
@callback
def async_get_cloud_api_update_interval(hass, api_key, num_consumers):
"""Get a leveled scan interval for a particular cloud API key.
This will shift based on the number of active consumers, thus keeping the user
under the monthly API limit.
"""
# Assuming 10,000 calls per month and a "smallest possible month" of 28 days; note
# that we give a buffer of 1500 API calls for any drift, restarts, etc.:
minutes_between_api_calls = ceil(1 / (8500 / 28 / 24 / 60 / num_consumers))
LOGGER.debug(
"Leveling API key usage (%s): %s consumers, %s minutes between updates",
api_key,
num_consumers,
minutes_between_api_calls,
)
return timedelta(minutes=minutes_between_api_calls)
@callback
def async_get_cloud_coordinators_by_api_key(hass, api_key):
"""Get all DataUpdateCoordinator objects related to a particular API key."""
coordinators = []
for entry_id, coordinator in hass.data[DOMAIN][DATA_COORDINATOR].items():
config_entry = hass.config_entries.async_get_entry(entry_id)
if config_entry.data.get(CONF_API_KEY) == api_key:
coordinators.append(coordinator)
return coordinators
@callback
def async_sync_geo_coordinator_update_intervals(hass, api_key):
"""Sync the update interval for geography-based data coordinators (by API key)."""
coordinators = async_get_cloud_coordinators_by_api_key(hass, api_key)
if not coordinators:
return
update_interval = async_get_cloud_api_update_interval(
hass, api_key, len(coordinators)
)
for coordinator in coordinators:
LOGGER.debug(
"Updating interval for coordinator: %s, %s",
coordinator.name,
update_interval,
)
coordinator.update_interval = update_interval
async def async_setup(hass, config):
"""Set up the AirVisual component."""
hass.data[DOMAIN] = {DATA_COORDINATOR: {}, DATA_LISTENER: {}}
if DOMAIN not in config:
return True
conf = config[DOMAIN]
for geography in conf.get(
CONF_GEOGRAPHIES,
[{CONF_LATITUDE: hass.config.latitude, CONF_LONGITUDE: hass.config.longitude}],
):
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={CONF_API_KEY: conf[CONF_API_KEY], **geography},
)
)
return True
@callback
def _standardize_geography_config_entry(hass, config_entry):
"""Ensure that geography config entries have appropriate properties."""
entry_updates = {}
if not config_entry.unique_id:
# If the config entry doesn't already have a unique ID, set one:
entry_updates["unique_id"] = config_entry.data[CONF_API_KEY]
if not config_entry.options:
# If the config entry doesn't already have any options set, set defaults:
entry_updates["options"] = {CONF_SHOW_ON_MAP: True}
if CONF_INTEGRATION_TYPE not in config_entry.data:
# If the config entry data doesn't contain the integration type, add it:
entry_updates["data"] = {
**config_entry.data,
CONF_INTEGRATION_TYPE: INTEGRATION_TYPE_GEOGRAPHY,
}
if not entry_updates:
return
hass.config_entries.async_update_entry(config_entry, **entry_updates)
@callback
def _standardize_node_pro_config_entry(hass, config_entry):
"""Ensure that Node/Pro config entries have appropriate properties."""
entry_updates = {}
if CONF_INTEGRATION_TYPE not in config_entry.data:
# If the config entry data doesn't contain the integration type, add it:
entry_updates["data"] = {
**config_entry.data,
CONF_INTEGRATION_TYPE: INTEGRATION_TYPE_NODE_PRO,
}
if not entry_updates:
return
hass.config_entries.async_update_entry(config_entry, **entry_updates)
async def async_setup_entry(hass, config_entry):
"""Set up AirVisual as config entry."""
if CONF_API_KEY in config_entry.data:
_standardize_geography_config_entry(hass, config_entry)
websession = aiohttp_client.async_get_clientsession(hass)
cloud_api = CloudAPI(config_entry.data[CONF_API_KEY], session=websession)
async def async_update_data():
"""Get new data from the API."""
if CONF_CITY in config_entry.data:
api_coro = cloud_api.air_quality.city(
config_entry.data[CONF_CITY],
config_entry.data[CONF_STATE],
config_entry.data[CONF_COUNTRY],
)
else:
api_coro = cloud_api.air_quality.nearest_city(
config_entry.data[CONF_LATITUDE],
config_entry.data[CONF_LONGITUDE],
)
try:
return await api_coro
except (InvalidKeyError, KeyExpiredError):
matching_flows = [
flow
for flow in hass.config_entries.flow.async_progress()
if flow["context"]["source"] == SOURCE_REAUTH
and flow["context"]["unique_id"] == config_entry.unique_id
]
if not matching_flows:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={
"source": SOURCE_REAUTH,
"unique_id": config_entry.unique_id,
},
data=config_entry.data,
)
)
return {}
except AirVisualError as err:
raise UpdateFailed(f"Error while retrieving data: {err}") from err
coordinator = DataUpdateCoordinator(
hass,
LOGGER,
name=async_get_geography_id(config_entry.data),
# We give a placeholder update interval in order to create the coordinator;
# then, below, we use the coordinator's presence (along with any other
# coordinators using the same API key) to calculate an actual, leveled
# update interval:
update_interval=timedelta(minutes=5),
update_method=async_update_data,
)
hass.data[DOMAIN][DATA_COORDINATOR][config_entry.entry_id] = coordinator
async_sync_geo_coordinator_update_intervals(
hass, config_entry.data[CONF_API_KEY]
)
# Only geography-based entries have options:
hass.data[DOMAIN][DATA_LISTENER][
config_entry.entry_id
] = config_entry.add_update_listener(async_reload_entry)
else:
_standardize_node_pro_config_entry(hass, config_entry)
async def async_update_data():
"""Get new data from the API."""
try:
async with NodeSamba(
config_entry.data[CONF_IP_ADDRESS], config_entry.data[CONF_PASSWORD]
) as node:
return await node.async_get_latest_measurements()
except NodeProError as err:
raise UpdateFailed(f"Error while retrieving data: {err}") from err
coordinator = DataUpdateCoordinator(
hass,
LOGGER,
name="Node/Pro data",
update_interval=DEFAULT_NODE_PRO_UPDATE_INTERVAL,
update_method=async_update_data,
)
hass.data[DOMAIN][DATA_COORDINATOR][config_entry.entry_id] = coordinator
await coordinator.async_refresh()
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, component)
)
return True
async def async_migrate_entry(hass, config_entry):
"""Migrate an old config entry."""
version = config_entry.version
LOGGER.debug("Migrating from version %s", version)
# 1 -> 2: One geography per config entry
if version == 1:
version = config_entry.version = 2
# Update the config entry to only include the first geography (there is always
# guaranteed to be at least one):
geographies = list(config_entry.data[CONF_GEOGRAPHIES])
first_geography = geographies.pop(0)
first_id = async_get_geography_id(first_geography)
hass.config_entries.async_update_entry(
config_entry,
unique_id=first_id,
title=f"Cloud API ({first_id})",
data={CONF_API_KEY: config_entry.data[CONF_API_KEY], **first_geography},
)
# For any geographies that remain, create a new config entry for each one:
for geography in geographies:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={CONF_API_KEY: config_entry.data[CONF_API_KEY], **geography},
)
)
LOGGER.info("Migration to version %s successful", version)
return True
async def async_unload_entry(hass, config_entry):
"""Unload an AirVisual config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(config_entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN][DATA_COORDINATOR].pop(config_entry.entry_id)
remove_listener = hass.data[DOMAIN][DATA_LISTENER].pop(config_entry.entry_id)
remove_listener()
if config_entry.data[CONF_INTEGRATION_TYPE] == INTEGRATION_TYPE_GEOGRAPHY:
# Re-calculate the update interval period for any remaining consumers of
# this API key:
async_sync_geo_coordinator_update_intervals(
hass, config_entry.data[CONF_API_KEY]
)
return unload_ok
async def async_reload_entry(hass, config_entry):
"""Handle an options update."""
await hass.config_entries.async_reload(config_entry.entry_id)
class AirVisualEntity(CoordinatorEntity):
"""Define a generic AirVisual entity."""
def __init__(self, coordinator):
"""Initialize."""
super().__init__(coordinator)
self._attrs = {ATTR_ATTRIBUTION: DEFAULT_ATTRIBUTION}
self._icon = None
self._unit = None
@property
def device_state_attributes(self):
"""Return the device state attributes."""
return self._attrs
@property
def icon(self):
"""Return the icon."""
return self._icon
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit
async def async_added_to_hass(self):
"""Register callbacks."""
@callback
def update():
"""Update the state."""
self.update_from_latest_data()
self.async_write_ha_state()
self.async_on_remove(self.coordinator.async_add_listener(update))
self.update_from_latest_data()
@callback
def update_from_latest_data(self):
"""Update the entity from the latest data."""
raise NotImplementedError
|
from openrazer_daemon.dbus_services import endpoint
def set_led_effect_color_common(self, zone: str, effect: str, red: int, green: int, blue: int) -> None:
rgb_driver_path = self.get_driver_path(zone + '_led_rgb')
effect_driver_path = self.get_driver_path(zone + '_led_effect')
payload = bytes([red, green, blue])
with open(rgb_driver_path, 'wb') as rgb_driver_file, open(effect_driver_path, 'w') as effect_driver_file:
rgb_driver_file.write(payload)
effect_driver_file.write(effect)
def set_led_effect_common(self, zone: str, effect: str) -> None:
driver_path = self.get_driver_path(zone + '_led_effect')
with open(driver_path, 'w') as driver_file:
driver_file.write(effect)
@endpoint('razer.device.lighting.backlight', 'getBacklightActive', out_sig='b')
def get_backlight_active(self):
"""
Get if the backlight is lit up
:return: Active
:rtype: bool
"""
self.logger.debug("DBus call get_backlight_active")
return self.zone["backlight"]["active"]
@endpoint('razer.device.lighting.backlight', 'setBacklightActive', in_sig='b')
def set_backlight_active(self, active):
"""
Get if the backlight is lit up
:param active: Is active
:type active: bool
"""
self.logger.debug("DBus call set_backlight_active")
# remember status
self.set_persistence("backlight", "active", bool(active))
driver_path = self.get_driver_path('backlight_led_state')
with open(driver_path, 'w') as driver_file:
if active:
driver_file.write('1')
else:
driver_file.write('0')
@endpoint('razer.device.lighting.logo', 'getLogoActive', out_sig='b')
def get_logo_active(self):
"""
Get if the logo is lit up
:return: Active
:rtype: bool
"""
self.logger.debug("DBus call get_logo_active")
return self.zone["logo"]["active"]
@endpoint('razer.device.lighting.logo', 'setLogoActive', in_sig='b')
def set_logo_active(self, active):
"""
Set if the logo is lit up
:param active: Is active
:type active: bool
"""
self.logger.debug("DBus call set_logo_active")
# remember status
self.set_persistence("logo", "active", bool(active))
driver_path = self.get_driver_path('logo_led_state')
with open(driver_path, 'w') as driver_file:
driver_file.write('1' if active else '0')
@endpoint('razer.device.lighting.logo', 'getLogoBrightness', out_sig='d')
def get_logo_brightness(self):
"""
Get the device's brightness
:return: Brightness
:rtype: float
"""
self.logger.debug("DBus call get_logo_brightness")
return self.zone["logo"]["brightness"]
@endpoint('razer.device.lighting.logo', 'setLogoBrightness', in_sig='d')
def set_logo_brightness(self, brightness):
"""
Set the device's brightness
:param brightness: Brightness
:type brightness: int
"""
self.logger.debug("DBus call set_logo_brightness")
driver_path = self.get_driver_path('logo_led_brightness')
self.method_args['brightness'] = brightness
if brightness > 255:
brightness = 255
elif brightness < 0:
brightness = 0
self.set_persistence("logo", "brightness", int(brightness))
brightness = int(round(brightness * (255.0 / 100.0)))
with open(driver_path, 'w') as driver_file:
driver_file.write(str(brightness))
# Notify others
self.send_effect_event('setBrightness', brightness)
@endpoint('razer.device.lighting.logo', 'setLogoStatic', in_sig='yyy')
def set_logo_static(self, red, green, blue):
"""
Set the device to static colour
:param red: Red component
:type red: int
:param green: Green component
:type green: int
:param blue: Blue component
:type blue: int
"""
self.logger.debug("DBus call set_static_effect")
# Notify others
self.send_effect_event('setStatic', red, green, blue)
# remember effect
self.set_persistence("logo", "effect", 'static')
self.zone["logo"]["colors"][0:3] = int(red), int(green), int(blue)
set_led_effect_color_common(self, 'logo', '0', red, green, blue)
@endpoint('razer.device.lighting.logo', 'setLogoStaticMono')
def set_logo_static_mono(self):
"""
Set the device to static colour
"""
self.logger.debug("DBus call set_logo_static_mono")
# Notify others
self.send_effect_event('setStatic')
set_led_effect_common(self, 'logo', '0')
@endpoint('razer.device.lighting.logo', 'setLogoBlinking', in_sig='yyy')
def set_logo_blinking(self, red, green, blue):
"""
Set the device to blinking
:param red: Red component
:type red: int
:param green: Green component
:type green: int
:param blue: Blue component
:type blue: int
"""
self.logger.debug("DBus call set_logo_blinking")
# Notify others
self.send_effect_event('setLogoBlinking', red, green, blue)
# remember effect
self.set_persistence("logo", "effect", 'blinking')
self.zone["logo"]["colors"][0:3] = int(red), int(green), int(blue)
set_led_effect_color_common(self, 'logo', '1', red, green, blue)
@endpoint('razer.device.lighting.logo', 'setLogoPulsate', in_sig='yyy')
def set_logo_pulsate(self, red, green, blue):
"""
Set the device to pulsate
:param red: Red component
:type red: int
:param green: Green component
:type green: int
:param blue: Blue component
:type blue: int
"""
self.logger.debug("DBus call set_logo_pulsing")
# Notify others
self.send_effect_event('setPulsate', red, green, blue)
# remember effect
self.set_persistence("logo", "effect", 'pulsate')
self.zone["logo"]["colors"][0:3] = int(red), int(green), int(blue)
set_led_effect_color_common(self, 'logo', '2', red, green, blue)
@endpoint('razer.device.lighting.logo', 'setLogoPulsateMono')
def set_logo_pulsate_mono(self):
"""
Set the device to pulsate
"""
self.logger.debug("DBus call set_logo_pulsate_mono")
# Notify others
self.send_effect_event('setPulsate')
set_led_effect_common(self, 'logo', '2')
@endpoint('razer.device.lighting.logo', 'setLogoSpectrum')
def set_logo_spectrum(self):
"""
Set the device to spectrum
:param red: Red component
:type red: int
:param green: Green component
:type green: int
:param blue: Blue component
:type blue: int
"""
self.logger.debug("DBus call set_logo_spectrum")
# Notify others
self.send_effect_event('setSpectrum')
# remember effect
self.set_persistence("logo", "effect", 'spectrum')
set_led_effect_common(self, 'logo', '4')
@endpoint('razer.device.lighting.scroll', 'getScrollActive', out_sig='b')
def get_scroll_active(self):
"""
Get if the scroll is light up
:return: Active
:rtype: bool
"""
self.logger.debug("DBus call get_scroll_active")
return self.zone["scroll"]["active"]
@endpoint('razer.device.lighting.scroll', 'setScrollActive', in_sig='b')
def set_scroll_active(self, active):
"""
Get if the scroll is light up
:param active: Is active
:type active: bool
"""
self.logger.debug("DBus call set_scroll_active")
# remember status
self.set_persistence("scroll", "active", bool(active))
driver_path = self.get_driver_path('scroll_led_state')
with open(driver_path, 'w') as driver_file:
driver_file.write('1' if active else '0')
@endpoint('razer.device.lighting.scroll', 'getScrollBrightness', out_sig='d')
def get_scroll_brightness(self):
"""
Get the device's brightness
:return: Brightness
:rtype: float
"""
self.logger.debug("DBus call get_scroll_brightness")
return self.zone["scroll"]["brightness"]
@endpoint('razer.device.lighting.scroll', 'setScrollBrightness', in_sig='d')
def set_scroll_brightness(self, brightness):
"""
Set the device's brightness
:param brightness: Brightness
:type brightness: int
"""
self.logger.debug("DBus call set_scroll_brightness")
driver_path = self.get_driver_path('scroll_led_brightness')
self.method_args['brightness'] = brightness
if brightness > 255:
brightness = 255
elif brightness < 0:
brightness = 0
self.set_persistence("scroll", "brightness", int(brightness))
brightness = int(round(brightness * (255.0 / 100.0)))
with open(driver_path, 'w') as driver_file:
driver_file.write(str(brightness))
# Notify others
self.send_effect_event('setBrightness', brightness)
@endpoint('razer.device.lighting.scroll', 'setScrollStatic', in_sig='yyy')
def set_scroll_static(self, red, green, blue):
"""
Set the device to static colour
:param red: Red component
:type red: int
:param green: Green component
:type green: int
:param blue: Blue component
:type blue: int
"""
self.logger.debug("DBus call set_scroll_static")
# Notify others
self.send_effect_event('setStatic', red, green, blue)
# remember effect
self.set_persistence("scroll", "effect", 'static')
self.zone["scroll"]["colors"][0:3] = int(red), int(green), int(blue)
set_led_effect_color_common(self, 'scroll', '0', red, green, blue)
@endpoint('razer.device.lighting.scroll', 'setScrollStaticMono')
def set_scroll_static_mono(self):
"""
Set the device to static colour
"""
self.logger.debug("DBus call set_scroll_static_mono")
# Notify others
self.send_effect_event('setStatic')
set_led_effect_common(self, 'scroll', '0')
@endpoint('razer.device.lighting.scroll', 'setScrollBlinking', in_sig='yyy')
def set_scroll_blinking(self, red, green, blue):
"""
Set the device to blinking
:param red: Red component
:type red: int
:param green: Green component
:type green: int
:param blue: Blue component
:type blue: int
"""
self.logger.debug("DBus call set_scroll_pulsate")
# Notify others
self.send_effect_event('setPulsate', red, green, blue)
# remember effect
self.set_persistence("scroll", "effect", 'blinking')
self.zone["scroll"]["colors"][0:3] = int(red), int(green), int(blue)
set_led_effect_color_common(self, 'scroll', '1', red, green, blue)
@endpoint('razer.device.lighting.scroll', 'setScrollPulsate', in_sig='yyy')
def set_scroll_pulsate(self, red, green, blue):
"""
Set the device to pulsate
:param red: Red component
:type red: int
:param green: Green component
:type green: int
:param blue: Blue component
:type blue: int
"""
self.logger.debug("DBus call set_scroll_breathing")
# Notify others
self.send_effect_event('setPulsate', red, green, blue)
# remember effect
self.set_persistence("scroll", "effect", 'pulsate')
self.zone["scroll"]["colors"][0:3] = int(red), int(green), int(blue)
set_led_effect_color_common(self, 'scroll', '2', red, green, blue)
@endpoint('razer.device.lighting.scroll', 'setScrollPulsateMono')
def set_scroll_pulsate_mono(self):
"""
Set the device to pulsate
"""
self.logger.debug("DBus call set_scroll_pulsate_mono")
# Notify others
self.send_effect_event('setPulsate')
set_led_effect_common(self, 'scroll', '2')
@endpoint('razer.device.lighting.scroll', 'setScrollSpectrum')
def set_scroll_spectrum(self):
"""
Set the device to spectrum
"""
self.logger.debug("DBus call set_scroll_spectrum")
# Notify others
self.send_effect_event('setSpectrum')
# remember effect
self.set_persistence("scroll", "effect", 'spectrum')
set_led_effect_common(self, 'scroll', '4')
|
import os
import cherrypy
from cherrypy import tools
from cherrypy.test import helper
localDir = os.path.dirname(__file__)
logfile = os.path.join(localDir, 'test_misc_tools.log')
def setup_server():
class Root:
@cherrypy.expose
def index(self):
yield 'Hello, world'
h = [('Content-Language', 'en-GB'), ('Content-Type', 'text/plain')]
tools.response_headers(headers=h)(index)
@cherrypy.expose
@cherrypy.config(**{
'tools.response_headers.on': True,
'tools.response_headers.headers': [
('Content-Language', 'fr'),
('Content-Type', 'text/plain'),
],
'tools.log_hooks.on': True,
})
def other(self):
return 'salut'
@cherrypy.config(**{'tools.accept.on': True})
class Accept:
@cherrypy.expose
def index(self):
return '<a href="feed">Atom feed</a>'
@cherrypy.expose
@tools.accept(media='application/atom+xml')
def feed(self):
return """<?xml version="1.0" encoding="utf-8"?>
<feed xmlns="http://www.w3.org/2005/Atom">
<title>Unknown Blog</title>
</feed>"""
@cherrypy.expose
def select(self):
# We could also write this: mtype = cherrypy.lib.accept.accept(...)
mtype = tools.accept.callable(['text/html', 'text/plain'])
if mtype == 'text/html':
return '<h2>Page Title</h2>'
else:
return 'PAGE TITLE'
class Referer:
@cherrypy.expose
def accept(self):
return 'Accepted!'
reject = accept
class AutoVary:
@cherrypy.expose
def index(self):
# Read a header directly with 'get'
cherrypy.request.headers.get('Accept-Encoding')
# Read a header directly with '__getitem__'
cherrypy.request.headers['Host']
# Read a header directly with '__contains__'
'If-Modified-Since' in cherrypy.request.headers
# Read a header directly
'Range' in cherrypy.request.headers
# Call a lib function
tools.accept.callable(['text/html', 'text/plain'])
return 'Hello, world!'
conf = {'/referer': {'tools.referer.on': True,
'tools.referer.pattern': r'http://[^/]*example\.com',
},
'/referer/reject': {'tools.referer.accept': False,
'tools.referer.accept_missing': True,
},
'/autovary': {'tools.autovary.on': True},
}
root = Root()
root.referer = Referer()
root.accept = Accept()
root.autovary = AutoVary()
cherrypy.tree.mount(root, config=conf)
cherrypy.config.update({'log.error_file': logfile})
class ResponseHeadersTest(helper.CPWebCase):
setup_server = staticmethod(setup_server)
def testResponseHeadersDecorator(self):
self.getPage('/')
self.assertHeader('Content-Language', 'en-GB')
self.assertHeader('Content-Type', 'text/plain;charset=utf-8')
def testResponseHeaders(self):
self.getPage('/other')
self.assertHeader('Content-Language', 'fr')
self.assertHeader('Content-Type', 'text/plain;charset=utf-8')
class RefererTest(helper.CPWebCase):
setup_server = staticmethod(setup_server)
def testReferer(self):
self.getPage('/referer/accept')
self.assertErrorPage(403, 'Forbidden Referer header.')
self.getPage('/referer/accept',
headers=[('Referer', 'http://www.example.com/')])
self.assertStatus(200)
self.assertBody('Accepted!')
# Reject
self.getPage('/referer/reject')
self.assertStatus(200)
self.assertBody('Accepted!')
self.getPage('/referer/reject',
headers=[('Referer', 'http://www.example.com/')])
self.assertErrorPage(403, 'Forbidden Referer header.')
class AcceptTest(helper.CPWebCase):
setup_server = staticmethod(setup_server)
def test_Accept_Tool(self):
# Test with no header provided
self.getPage('/accept/feed')
self.assertStatus(200)
self.assertInBody('<title>Unknown Blog</title>')
# Specify exact media type
self.getPage('/accept/feed',
headers=[('Accept', 'application/atom+xml')])
self.assertStatus(200)
self.assertInBody('<title>Unknown Blog</title>')
# Specify matching media range
self.getPage('/accept/feed', headers=[('Accept', 'application/*')])
self.assertStatus(200)
self.assertInBody('<title>Unknown Blog</title>')
# Specify all media ranges
self.getPage('/accept/feed', headers=[('Accept', '*/*')])
self.assertStatus(200)
self.assertInBody('<title>Unknown Blog</title>')
# Specify unacceptable media types
self.getPage('/accept/feed', headers=[('Accept', 'text/html')])
self.assertErrorPage(406,
'Your client sent this Accept header: text/html. '
'But this resource only emits these media types: '
'application/atom+xml.')
# Test resource where tool is 'on' but media is None (not set).
self.getPage('/accept/')
self.assertStatus(200)
self.assertBody('<a href="feed">Atom feed</a>')
def test_accept_selection(self):
# Try both our expected media types
self.getPage('/accept/select', [('Accept', 'text/html')])
self.assertStatus(200)
self.assertBody('<h2>Page Title</h2>')
self.getPage('/accept/select', [('Accept', 'text/plain')])
self.assertStatus(200)
self.assertBody('PAGE TITLE')
self.getPage('/accept/select',
[('Accept', 'text/plain, text/*;q=0.5')])
self.assertStatus(200)
self.assertBody('PAGE TITLE')
# text/* and */* should prefer text/html since it comes first
# in our 'media' argument to tools.accept
self.getPage('/accept/select', [('Accept', 'text/*')])
self.assertStatus(200)
self.assertBody('<h2>Page Title</h2>')
self.getPage('/accept/select', [('Accept', '*/*')])
self.assertStatus(200)
self.assertBody('<h2>Page Title</h2>')
# Try unacceptable media types
self.getPage('/accept/select', [('Accept', 'application/xml')])
self.assertErrorPage(
406,
'Your client sent this Accept header: application/xml. '
'But this resource only emits these media types: '
'text/html, text/plain.')
class AutoVaryTest(helper.CPWebCase):
setup_server = staticmethod(setup_server)
def testAutoVary(self):
self.getPage('/autovary/')
self.assertHeader(
'Vary',
'Accept, Accept-Charset, Accept-Encoding, '
'Host, If-Modified-Since, Range'
)
|
import voluptuous as vol
from homeassistant.components import websocket_api
from homeassistant.components.websocket_api.decorators import (
async_response,
require_admin,
)
from homeassistant.core import callback
from homeassistant.helpers.device_registry import async_get_registry
WS_TYPE_LIST = "config/device_registry/list"
SCHEMA_WS_LIST = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
{vol.Required("type"): WS_TYPE_LIST}
)
WS_TYPE_UPDATE = "config/device_registry/update"
SCHEMA_WS_UPDATE = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend(
{
vol.Required("type"): WS_TYPE_UPDATE,
vol.Required("device_id"): str,
vol.Optional("area_id"): vol.Any(str, None),
vol.Optional("name_by_user"): vol.Any(str, None),
}
)
async def async_setup(hass):
"""Enable the Device Registry views."""
hass.components.websocket_api.async_register_command(
WS_TYPE_LIST, websocket_list_devices, SCHEMA_WS_LIST
)
hass.components.websocket_api.async_register_command(
WS_TYPE_UPDATE, websocket_update_device, SCHEMA_WS_UPDATE
)
return True
@async_response
async def websocket_list_devices(hass, connection, msg):
"""Handle list devices command."""
registry = await async_get_registry(hass)
connection.send_message(
websocket_api.result_message(
msg["id"], [_entry_dict(entry) for entry in registry.devices.values()]
)
)
@require_admin
@async_response
async def websocket_update_device(hass, connection, msg):
"""Handle update area websocket command."""
registry = await async_get_registry(hass)
msg.pop("type")
msg_id = msg.pop("id")
entry = registry.async_update_device(**msg)
connection.send_message(websocket_api.result_message(msg_id, _entry_dict(entry)))
@callback
def _entry_dict(entry):
"""Convert entry to API format."""
return {
"config_entries": list(entry.config_entries),
"connections": list(entry.connections),
"manufacturer": entry.manufacturer,
"model": entry.model,
"name": entry.name,
"sw_version": entry.sw_version,
"entry_type": entry.entry_type,
"id": entry.id,
"identifiers": list(entry.identifiers),
"via_device_id": entry.via_device_id,
"area_id": entry.area_id,
"name_by_user": entry.name_by_user,
}
|
import logging
from rpi_bad_power import new_under_voltage
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_PROBLEM,
BinarySensorEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
_LOGGER = logging.getLogger(__name__)
DESCRIPTION_NORMALIZED = "Voltage normalized. Everything is working as intended."
DESCRIPTION_UNDER_VOLTAGE = "Under-voltage was detected. Consider getting a uninterruptible power supply for your Raspberry Pi."
async def async_setup_entry(
hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities
):
"""Set up rpi_power binary sensor."""
under_voltage = await hass.async_add_executor_job(new_under_voltage)
async_add_entities([RaspberryChargerBinarySensor(under_voltage)], True)
class RaspberryChargerBinarySensor(BinarySensorEntity):
"""Binary sensor representing the rpi power status."""
def __init__(self, under_voltage):
"""Initialize the binary sensor."""
self._under_voltage = under_voltage
self._is_on = None
self._last_is_on = False
def update(self):
"""Update the state."""
self._is_on = self._under_voltage.get()
if self._is_on != self._last_is_on:
if self._is_on:
_LOGGER.warning(DESCRIPTION_UNDER_VOLTAGE)
else:
_LOGGER.info(DESCRIPTION_NORMALIZED)
self._last_is_on = self._is_on
@property
def unique_id(self):
"""Return the unique id of the sensor."""
return "rpi_power" # only one sensor possible
@property
def name(self):
"""Return the name of the sensor."""
return "RPi Power status"
@property
def is_on(self):
"""Return if there is a problem detected."""
return self._is_on
@property
def icon(self):
"""Return the icon of the sensor."""
return "mdi:raspberry-pi"
@property
def device_class(self):
"""Return the class of this device."""
return DEVICE_CLASS_PROBLEM
|
from .java_classes import *
import numpy as np
import ctypes
# Java instance initializations
native_ops = NativeOpsHolder.getInstance().getDeviceNativeOps()
# DATA TYPE MANAGEMENT
def set_context_dtype(dtype):
"""
Sets the dtype for nd4j
# Arguments
dtype: 'float' or 'double'
"""
dtype = DataTypeUtil.getDtypeFromContext(dtype)
DataTypeUtil.setDTypeForContext(dtype)
def get_context_dtype():
"""
Returns the nd4j dtype
"""
dtype = DataTypeUtil.getDtypeFromContext()
return DataTypeUtil.getDTypeForName(dtype)
def get_nd4j_dtype(np_dtype):
"""
Gets the equivalent nd4j data type
for a given numpy data type.
# Arguments
np_dtype: Numpy data type. One of
['float64', 'float32', 'float16']
"""
if type(np_dtype) == type:
np_dtype = np_dtype.__name__
elif type(np_dtype) == np.dtype:
np_dtype = np_dtype.name
mapping = {
'float64': 'double',
'float32': 'float',
'float16': 'half'
}
nd4j_dtype = mapping.get(np_dtype)
if not nd4j_dtype:
raise Exception('Invalid numpy data type : ' + np_dtype)
return nd4j_dtype
def get_np_dtype(nd4j_dtype):
"""
Gets the equivalent numpy data type
for a given nd4j data type.
# Arguments:
nd4j_dtype : Nd4j data type. One of
['double', 'float', 'half']
"""
mapping = {
'double': np.float64,
'float': np.float32,
'half': np.float16
}
np_dtype = mapping.get(nd4j_dtype)
if not np_dtype:
raise Exception('Invalid nd4j data type : ' + nd4j_dtype)
return np_dtype
set_context_dtype('double')
_refs = []
def _from_numpy(np_array):
"""
Convert numpy array to nd4j array
"""
# Convert the numpy array to nd4j context dtype
required_dtype = get_np_dtype(get_context_dtype())
if np_array.dtype != required_dtype:
raise Exception("{} is required, got {}".format(
repr(required_dtype), repr(np_array.dtype)))
# Nd4j does not have 1-d vectors.
# So we add a dummy dimension.
if np_array.ndim == 1:
np_array = np.expand_dims(np_array, 0)
# We have to maintain references to all incoming
# numpy arrays. Else they will get GCed
# creates a Nd4j array from a numpy array
# To create an Nd4j array, we need 3 things:
# buffer, strides, and shape
# Get the buffer
# A buffer is basically an array. To get the buffer object
# we need a pointer to the first element and the size.
pointer_address, _ = np_array.__array_interface__['data']
_refs.append(np_array)
pointer = native_ops.pointerForAddress(pointer_address)
size = np_array.size
mapping = {
np.float64: DoublePointer,
np.float32: FloatPointer,
}
pointer = mapping[required_dtype](pointer)
buff = Nd4j.createBuffer(pointer, size)
assert buff.address() == pointer_address
_refs.append(buff)
# Get the strides
# strides = tuple of bytes to step in each
# dimension when traversing an array.
elem_size = buff.getElementSize()
# Make sure word size is same in both python
# and java worlds
assert elem_size == np_array.dtype.itemsize
strides = np_array.strides
# numpy uses byte wise strides. We have to
# convert it to word wise strides.
strides = [dim / elem_size for dim in strides]
# Finally, shape:
shape = np_array.shape
nd4j_array = Nd4j.create(buff, shape, strides, 0)
assert buff.address() == nd4j_array.data().address()
return nd4j_array
def _to_numpy(nd4j_array):
"""
Convert nd4j array to numpy array
"""
buff = nd4j_array.data()
address = buff.pointer().address()
dtype = get_context_dtype()
mapping = {
'double': ctypes.c_double,
'float': ctypes.c_float
}
Pointer = ctypes.POINTER(mapping[dtype])
pointer = ctypes.cast(address, Pointer)
np_array = np.ctypeslib.as_array(pointer, tuple(nd4j_array.shape()))
return np_array
def _indarray(x):
if type(x) is INDArray:
return x
elif type(x) is ndarray:
return x.array
elif 'numpy' in str(type(x)):
return _from_numpy(x)
elif type(x) in (list, tuple):
return _from_numpy(np.array(x))
elif type(x) in (int, float):
return Nd4j.scalar(x)
else:
raise Exception('Data type not understood :' + str(type(x)))
def broadcast_like(y, x):
xs = x.shape()
ys = y.shape()
if xs == ys:
return y
_xs = tuple(xs)
_ys = tuple(ys)
nx = len(xs)
ny = len(ys)
if nx > ny:
diff = nx - ny
ys += [1] * diff
y = y.reshape(ys)
ny = nx
elif ny > nx:
raise Exception('Unable to broadcast shapes ' + str(_xs) + ''
' and ' + str(_ys))
yt = []
rep_y = False
for xd, yd in zip(xs, ys):
if xd == yd:
yt.append(1)
elif xd == 1:
raise Exception('Unable to broadcast shapes ' + str(_xs) + ''
' and ' + str(_ys))
elif yd == 1:
yt.append(xd)
rep_y = True
else:
raise Exception('Unable to broadcast shapes ' + str(_xs) + ''
' and ' + str(_ys))
if rep_y:
y = y.repmat(*yt)
return y
def broadcast(x, y):
xs = x.shape()
ys = y.shape()
if xs == ys:
return x, y
_xs = tuple(xs)
_ys = tuple(ys)
nx = len(xs)
ny = len(ys)
if nx > ny:
diff = nx - ny
ys += [1] * diff
y = y.reshape(*ys)
ny = nx
elif ny > nx:
diff = ny - nx
xs += [1] * diff
x = x.reshape(*xs)
nx = ny
xt = []
yt = []
rep_x = False
rep_y = False
for xd, yd in zip(xs, ys):
if xd == yd:
xt.append(1)
yt.append(1)
elif xd == 1:
xt.append(yd)
yt.append(1)
rep_x = True
elif yd == 1:
xt.append(1)
yt.append(xd)
rep_y = True
else:
raise Exception('Unable to broadcast shapes ' + str(_xs) + ''
' and ' + str(_ys))
if rep_x:
x = Nd4j.tile(x, *xt)
if rep_y:
y = Nd4j.tile(y, *yt)
return x, y
class ndarray(object):
def __init__(self, data, dtype=None):
# we ignore dtype for now
typ = type(data)
if typ is INDArray:
# Note that we don't make a copy here
self.array = data
elif typ is ndarray:
self.array = data.array.dup()
else:
if typ is not np.ndarray:
data = np.array(data)
self.array = _from_numpy(data)
def numpy(self):
# TODO: Too expensive. Make it cheaper.
np_array = _to_numpy(self.array)
return np_array
@property
def size(self):
return self.array.length()
@property
def shape(self):
return tuple(self.array.shape())
@shape.setter
def shape(self, value):
arr = self.reshape(value)
self.array = arr.array
@property
def ndim(self):
return len(self.array.shape())
@property
def ndim(self):
return len(self.array.shape())
def __getitem__(self, key):
if type(key) is int:
return ndarray(self.array.get(NDArrayIndex.point(key)))
if type(key) is slice:
start = key.start
stop = key.stop
step = key.step
if start is None:
start = 0
if stop is None:
shape = self.array.shape()
if shape[0] == 1:
stop = shape[1]
else:
stop = shape[0]
if stop - start <= 0:
return None
if step is None or step == 1:
return ndarray(self.array.get(NDArrayIndex.interval(start, stop)))
else:
return ndarray(self.array.get(NDArrayIndex.interval(start, step, stop)))
if type(key) is list:
raise NotImplemented(
'Sorry, this type of indexing is not supported yet.')
if type(key) is tuple:
key = list(key)
shape = self.array.shape()
ndim = len(shape)
nk = len(key)
key += [slice(None)] * (ndim - nk)
args = []
for i, dim in enumerate(key):
if type(dim) is int:
args.append(NDArrayIndex.point(dim))
elif type(dim) is slice:
if dim == slice(None):
args.append(NDArrayIndex.all())
else:
start = dim.start
stop = dim.stop
step = dim.step
if start is None:
start = 0
if stop is None:
stop = shape[i]
if stop - start <= 0:
return None
if step is None or step == 1:
args.append(NDArrayIndex.interval(start, stop))
else:
args.append(NDArrayIndex.interval(
start, step, stop))
elif type(dim) in (list, tuple):
raise NotImplemented(
'Sorry, this type of indexing is not supported yet.')
return ndarray(self.array.get(*args))
def __setitem__(self, key, other):
other = _indarray(other)
view = self[key]
if view is None:
return
view = view.array
other = broadcast_like(other, view)
view.assign(other)
def __add__(self, other):
other = _indarray(other)
x, y = broadcast(self.array, other)
return ndarray(x.add(y))
def __sub__(self, other):
other = _indarray(other)
x, y = broadcast(self.array, other)
return ndarray(x.sub(y))
def __mul__(self, other):
other = _indarray(other)
x, y = broadcast(self.array, other)
return ndarray(x.mul(y))
def __div__(self, other):
other = _indarray(other)
x, y = broadcast(self.array, other)
return ndarray(x.div(y))
def __iadd__(self, other):
other = _indarray(other)
if self.array.shape() == other.shape():
self.array = self.array.addi(other)
else:
x, y = broadcast(self.array, other)
self.array = x.add(y)
return self
def __isub__(self, other):
other = _indarray(other)
if self.array.shape() == other.shape():
self.array = self.array.subi(other)
else:
x, y = broadcast(self.array, other)
self.array = x.sub(y)
return self
def __imul__(self, other):
other = _indarray(other)
if self.array.shape() == other.shape():
self.array = self.array.muli(other)
else:
x, y = broadcast(self.array, other)
self.array = x.mul(y)
return self
def __idiv__(self, other):
other = _indarray(other)
if self.array.shape() == other.shape():
self.array = self.array.divi(other)
else:
x, y = broadcast(self.array, other)
self.array = x.div(y)
return self
# def __getattr__(self, attr):
# import ops
# f = getattr(ops, attr)
# setattr(ndarray, attr, f)
# return getattr(self, attr)
def __int__(self):
if self.array.length() == 1:
return self.array.getInt(0)
raise Exception('Applicable only for scalars')
def __float__(self):
if self.array.length() == 1:
return self.array.getDouble(0)
raise Exception('Applicable only for scalars')
@property
def T(self):
return self.transpose()
def array(*args, **kwargs):
return ndarray(*args, **kwargs)
|
import asyncio
from aiohttp import ClientResponseError
from august.authenticator_common import AuthenticationState
from august.exceptions import AugustApiAIOHTTPError
from homeassistant import setup
from homeassistant.components.august.const import (
CONF_ACCESS_TOKEN_CACHE_FILE,
CONF_INSTALL_ID,
CONF_LOGIN_METHOD,
DEFAULT_AUGUST_CONFIG_FILE,
DOMAIN,
)
from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN
from homeassistant.config_entries import (
ENTRY_STATE_SETUP_ERROR,
ENTRY_STATE_SETUP_RETRY,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_PASSWORD,
CONF_TIMEOUT,
CONF_USERNAME,
SERVICE_LOCK,
SERVICE_UNLOCK,
STATE_LOCKED,
STATE_ON,
)
from homeassistant.exceptions import HomeAssistantError
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
from tests.common import MockConfigEntry
from tests.components.august.mocks import (
_create_august_with_devices,
_mock_august_authentication,
_mock_doorsense_enabled_august_lock_detail,
_mock_doorsense_missing_august_lock_detail,
_mock_get_config,
_mock_inoperative_august_lock_detail,
_mock_operative_august_lock_detail,
)
async def test_august_is_offline(hass):
"""Config entry state is ENTRY_STATE_SETUP_RETRY when august is offline."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data=_mock_get_config()[DOMAIN],
title="August august",
)
config_entry.add_to_hass(hass)
await setup.async_setup_component(hass, "persistent_notification", {})
with patch(
"august.authenticator_async.AuthenticatorAsync.async_authenticate",
side_effect=asyncio.TimeoutError,
):
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert config_entry.state == ENTRY_STATE_SETUP_RETRY
async def test_unlock_throws_august_api_http_error(hass):
"""Test unlock throws correct error on http error."""
mocked_lock_detail = await _mock_operative_august_lock_detail(hass)
def _unlock_return_activities_side_effect(access_token, device_id):
raise AugustApiAIOHTTPError("This should bubble up as its user consumable")
await _create_august_with_devices(
hass,
[mocked_lock_detail],
api_call_side_effects={
"unlock_return_activities": _unlock_return_activities_side_effect
},
)
last_err = None
data = {ATTR_ENTITY_ID: "lock.a6697750d607098bae8d6baa11ef8063_name"}
try:
await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True)
except HomeAssistantError as err:
last_err = err
assert (
str(last_err)
== "A6697750D607098BAE8D6BAA11EF8063 Name: This should bubble up as its user consumable"
)
async def test_lock_throws_august_api_http_error(hass):
"""Test lock throws correct error on http error."""
mocked_lock_detail = await _mock_operative_august_lock_detail(hass)
def _lock_return_activities_side_effect(access_token, device_id):
raise AugustApiAIOHTTPError("This should bubble up as its user consumable")
await _create_august_with_devices(
hass,
[mocked_lock_detail],
api_call_side_effects={
"lock_return_activities": _lock_return_activities_side_effect
},
)
last_err = None
data = {ATTR_ENTITY_ID: "lock.a6697750d607098bae8d6baa11ef8063_name"}
try:
await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True)
except HomeAssistantError as err:
last_err = err
assert (
str(last_err)
== "A6697750D607098BAE8D6BAA11EF8063 Name: This should bubble up as its user consumable"
)
async def test_inoperative_locks_are_filtered_out(hass):
"""Ensure inoperative locks do not get setup."""
august_operative_lock = await _mock_operative_august_lock_detail(hass)
august_inoperative_lock = await _mock_inoperative_august_lock_detail(hass)
await _create_august_with_devices(
hass, [august_operative_lock, august_inoperative_lock]
)
lock_abc_name = hass.states.get("lock.abc_name")
assert lock_abc_name is None
lock_a6697750d607098bae8d6baa11ef8063_name = hass.states.get(
"lock.a6697750d607098bae8d6baa11ef8063_name"
)
assert lock_a6697750d607098bae8d6baa11ef8063_name.state == STATE_LOCKED
async def test_lock_has_doorsense(hass):
"""Check to see if a lock has doorsense."""
doorsenselock = await _mock_doorsense_enabled_august_lock_detail(hass)
nodoorsenselock = await _mock_doorsense_missing_august_lock_detail(hass)
await _create_august_with_devices(hass, [doorsenselock, nodoorsenselock])
binary_sensor_online_with_doorsense_name_open = hass.states.get(
"binary_sensor.online_with_doorsense_name_open"
)
assert binary_sensor_online_with_doorsense_name_open.state == STATE_ON
binary_sensor_missing_doorsense_id_name_open = hass.states.get(
"binary_sensor.missing_doorsense_id_name_open"
)
assert binary_sensor_missing_doorsense_id_name_open is None
async def test_set_up_from_yaml(hass):
"""Test to make sure config is imported from yaml."""
await setup.async_setup_component(hass, "persistent_notification", {})
with patch(
"homeassistant.components.august.async_setup_august",
return_value=True,
) as mock_setup_august, patch(
"homeassistant.components.august.config_flow.AugustGateway.async_authenticate",
return_value=True,
):
assert await async_setup_component(hass, DOMAIN, _mock_get_config())
await hass.async_block_till_done()
assert len(mock_setup_august.mock_calls) == 1
call = mock_setup_august.call_args
args, _ = call
imported_config_entry = args[1]
# The import must use DEFAULT_AUGUST_CONFIG_FILE so they
# do not loose their token when config is migrated
assert imported_config_entry.data == {
CONF_ACCESS_TOKEN_CACHE_FILE: DEFAULT_AUGUST_CONFIG_FILE,
CONF_INSTALL_ID: None,
CONF_LOGIN_METHOD: "email",
CONF_PASSWORD: "mocked_password",
CONF_TIMEOUT: None,
CONF_USERNAME: "mocked_username",
}
async def test_auth_fails(hass):
"""Config entry state is ENTRY_STATE_SETUP_ERROR when auth fails."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data=_mock_get_config()[DOMAIN],
title="August august",
)
config_entry.add_to_hass(hass)
assert hass.config_entries.flow.async_progress() == []
await setup.async_setup_component(hass, "persistent_notification", {})
with patch(
"august.authenticator_async.AuthenticatorAsync.async_authenticate",
side_effect=ClientResponseError(None, None, status=401),
):
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert config_entry.state == ENTRY_STATE_SETUP_ERROR
flows = hass.config_entries.flow.async_progress()
assert flows[0]["step_id"] == "user"
async def test_bad_password(hass):
"""Config entry state is ENTRY_STATE_SETUP_ERROR when the password has been changed."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data=_mock_get_config()[DOMAIN],
title="August august",
)
config_entry.add_to_hass(hass)
assert hass.config_entries.flow.async_progress() == []
await setup.async_setup_component(hass, "persistent_notification", {})
with patch(
"august.authenticator_async.AuthenticatorAsync.async_authenticate",
return_value=_mock_august_authentication(
"original_token", 1234, AuthenticationState.BAD_PASSWORD
),
):
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert config_entry.state == ENTRY_STATE_SETUP_ERROR
flows = hass.config_entries.flow.async_progress()
assert flows[0]["step_id"] == "user"
async def test_http_failure(hass):
"""Config entry state is ENTRY_STATE_SETUP_RETRY when august is offline."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data=_mock_get_config()[DOMAIN],
title="August august",
)
config_entry.add_to_hass(hass)
assert hass.config_entries.flow.async_progress() == []
await setup.async_setup_component(hass, "persistent_notification", {})
with patch(
"august.authenticator_async.AuthenticatorAsync.async_authenticate",
side_effect=ClientResponseError(None, None, status=500),
):
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert config_entry.state == ENTRY_STATE_SETUP_RETRY
assert hass.config_entries.flow.async_progress() == []
async def test_unknown_auth_state(hass):
"""Config entry state is ENTRY_STATE_SETUP_ERROR when august is in an unknown auth state."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data=_mock_get_config()[DOMAIN],
title="August august",
)
config_entry.add_to_hass(hass)
assert hass.config_entries.flow.async_progress() == []
await setup.async_setup_component(hass, "persistent_notification", {})
with patch(
"august.authenticator_async.AuthenticatorAsync.async_authenticate",
return_value=_mock_august_authentication("original_token", 1234, None),
):
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert config_entry.state == ENTRY_STATE_SETUP_ERROR
flows = hass.config_entries.flow.async_progress()
assert flows[0]["step_id"] == "user"
async def test_requires_validation_state(hass):
"""Config entry state is ENTRY_STATE_SETUP_ERROR when august requires validation."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data=_mock_get_config()[DOMAIN],
title="August august",
)
config_entry.add_to_hass(hass)
assert hass.config_entries.flow.async_progress() == []
await setup.async_setup_component(hass, "persistent_notification", {})
with patch(
"august.authenticator_async.AuthenticatorAsync.async_authenticate",
return_value=_mock_august_authentication(
"original_token", 1234, AuthenticationState.REQUIRES_VALIDATION
),
):
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert config_entry.state == ENTRY_STATE_SETUP_ERROR
assert hass.config_entries.flow.async_progress() == []
|
from django.conf import settings
from requests.auth import _basic_auth_str
from weblate.machinery.base import MachineTranslation, MissingConfiguration
class SAPTranslationHub(MachineTranslation):
# https://api.sap.com/shell/discover/contentpackage/SAPTranslationHub/api/translationhub
name = "SAP Translation Hub"
def __init__(self):
"""Check configuration."""
super().__init__()
if settings.MT_SAP_BASE_URL is None:
raise MissingConfiguration("missing SAP Translation Hub configuration")
def get_authentication(self):
"""Hook for backends to allow add authentication headers to request."""
# to access the sandbox
result = {}
if settings.MT_SAP_SANDBOX_APIKEY:
result["APIKey"] = settings.MT_SAP_SANDBOX_APIKEY
# to access the productive API
if settings.MT_SAP_USERNAME and settings.MT_SAP_PASSWORD:
result["Authorization"] = _basic_auth_str(
settings.MT_SAP_USERNAME, settings.MT_SAP_PASSWORD
)
return result
def download_languages(self):
"""Get all available languages from SAP Translation Hub."""
# get all available languages
response = self.request("get", settings.MT_SAP_BASE_URL + "languages")
payload = response.json()
return [d["id"] for d in payload["languages"]]
def download_translations(
self,
source,
language,
text: str,
unit,
user,
search: bool,
threshold: int = 75,
):
"""Download list of possible translations from a service."""
# should the machine translation service be used?
# (rather than only the term database)
enable_mt = bool(settings.MT_SAP_USE_MT)
# build the json body
data = {
"targetLanguages": [language],
"sourceLanguage": source,
"enableMT": enable_mt,
"enableTranslationQualityEstimation": enable_mt,
"units": [{"value": text}],
}
# perform the request
response = self.request(
"post", settings.MT_SAP_BASE_URL + "translate", json=data
)
payload = response.json()
# prepare the translations for weblate
for item in payload["units"]:
for translation in item["translations"]:
yield {
"text": translation["value"],
"quality": translation.get("qualityIndex", 100),
"service": self.name,
"source": text,
}
|
import pytest
from homeassistant import setup
from homeassistant.components.vacuum import (
ATTR_BATTERY_LEVEL,
STATE_CLEANING,
STATE_DOCKED,
STATE_IDLE,
STATE_PAUSED,
STATE_RETURNING,
)
from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN
from tests.common import assert_setup_component, async_mock_service
from tests.components.vacuum import common
_TEST_VACUUM = "vacuum.test_vacuum"
_STATE_INPUT_SELECT = "input_select.state"
_SPOT_CLEANING_INPUT_BOOLEAN = "input_boolean.spot_cleaning"
_LOCATING_INPUT_BOOLEAN = "input_boolean.locating"
_FAN_SPEED_INPUT_SELECT = "input_select.fan_speed"
_BATTERY_LEVEL_INPUT_NUMBER = "input_number.battery_level"
@pytest.fixture
def calls(hass):
"""Track calls to a mock service."""
return async_mock_service(hass, "test", "automation")
# Configuration tests #
async def test_missing_optional_config(hass, calls):
"""Test: missing optional template is ok."""
with assert_setup_component(1, "vacuum"):
assert await setup.async_setup_component(
hass,
"vacuum",
{
"vacuum": {
"platform": "template",
"vacuums": {
"test_vacuum": {"start": {"service": "script.vacuum_start"}}
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
_verify(hass, STATE_UNKNOWN, None)
async def test_missing_start_config(hass, calls):
"""Test: missing 'start' will fail."""
with assert_setup_component(0, "vacuum"):
assert await setup.async_setup_component(
hass,
"vacuum",
{
"vacuum": {
"platform": "template",
"vacuums": {"test_vacuum": {"value_template": "{{ 'on' }}"}},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert hass.states.async_all() == []
async def test_invalid_config(hass, calls):
"""Test: invalid config structure will fail."""
with assert_setup_component(0, "vacuum"):
assert await setup.async_setup_component(
hass,
"vacuum",
{
"platform": "template",
"vacuums": {
"test_vacuum": {"start": {"service": "script.vacuum_start"}}
},
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert hass.states.async_all() == []
# End of configuration tests #
# Template tests #
async def test_templates_with_entities(hass, calls):
"""Test templates with values from other entities."""
with assert_setup_component(1, "vacuum"):
assert await setup.async_setup_component(
hass,
"vacuum",
{
"vacuum": {
"platform": "template",
"vacuums": {
"test_vacuum": {
"value_template": "{{ states('input_select.state') }}",
"battery_level_template": "{{ states('input_number.battery_level') }}",
"start": {"service": "script.vacuum_start"},
}
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
_verify(hass, STATE_UNKNOWN, None)
hass.states.async_set(_STATE_INPUT_SELECT, STATE_CLEANING)
hass.states.async_set(_BATTERY_LEVEL_INPUT_NUMBER, 100)
await hass.async_block_till_done()
_verify(hass, STATE_CLEANING, 100)
async def test_templates_with_valid_values(hass, calls):
"""Test templates with valid values."""
with assert_setup_component(1, "vacuum"):
assert await setup.async_setup_component(
hass,
"vacuum",
{
"vacuum": {
"platform": "template",
"vacuums": {
"test_vacuum": {
"value_template": "{{ 'cleaning' }}",
"battery_level_template": "{{ 100 }}",
"start": {"service": "script.vacuum_start"},
}
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
_verify(hass, STATE_CLEANING, 100)
async def test_templates_invalid_values(hass, calls):
"""Test templates with invalid values."""
with assert_setup_component(1, "vacuum"):
assert await setup.async_setup_component(
hass,
"vacuum",
{
"vacuum": {
"platform": "template",
"vacuums": {
"test_vacuum": {
"value_template": "{{ 'abc' }}",
"battery_level_template": "{{ 101 }}",
"start": {"service": "script.vacuum_start"},
}
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
_verify(hass, STATE_UNKNOWN, None)
async def test_invalid_templates(hass, calls):
"""Test invalid templates."""
with assert_setup_component(1, "vacuum"):
assert await setup.async_setup_component(
hass,
"vacuum",
{
"vacuum": {
"platform": "template",
"vacuums": {
"test_vacuum": {
"value_template": "{{ this_function_does_not_exist() }}",
"battery_level_template": "{{ this_function_does_not_exist() }}",
"fan_speed_template": "{{ this_function_does_not_exist() }}",
"start": {"service": "script.vacuum_start"},
}
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
_verify(hass, STATE_UNKNOWN, None)
async def test_available_template_with_entities(hass, calls):
"""Test availability templates with values from other entities."""
assert await setup.async_setup_component(
hass,
"vacuum",
{
"vacuum": {
"platform": "template",
"vacuums": {
"test_template_vacuum": {
"availability_template": "{{ is_state('availability_state.state', 'on') }}",
"start": {"service": "script.vacuum_start"},
}
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
# When template returns true..
hass.states.async_set("availability_state.state", STATE_ON)
await hass.async_block_till_done()
# Device State should not be unavailable
assert hass.states.get("vacuum.test_template_vacuum").state != STATE_UNAVAILABLE
# When Availability template returns false
hass.states.async_set("availability_state.state", STATE_OFF)
await hass.async_block_till_done()
# device state should be unavailable
assert hass.states.get("vacuum.test_template_vacuum").state == STATE_UNAVAILABLE
async def test_invalid_availability_template_keeps_component_available(hass, caplog):
"""Test that an invalid availability keeps the device available."""
assert await setup.async_setup_component(
hass,
"vacuum",
{
"vacuum": {
"platform": "template",
"vacuums": {
"test_template_vacuum": {
"availability_template": "{{ x - 12 }}",
"start": {"service": "script.vacuum_start"},
}
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert hass.states.get("vacuum.test_template_vacuum") != STATE_UNAVAILABLE
assert ("UndefinedError: 'x' is undefined") in caplog.text
async def test_attribute_templates(hass, calls):
"""Test attribute_templates template."""
assert await setup.async_setup_component(
hass,
"vacuum",
{
"vacuum": {
"platform": "template",
"vacuums": {
"test_template_vacuum": {
"value_template": "{{ 'cleaning' }}",
"start": {"service": "script.vacuum_start"},
"attribute_templates": {
"test_attribute": "It {{ states.sensor.test_state.state }}."
},
}
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
state = hass.states.get("vacuum.test_template_vacuum")
assert state.attributes["test_attribute"] == "It ."
hass.states.async_set("sensor.test_state", "Works")
await hass.async_block_till_done()
await hass.helpers.entity_component.async_update_entity(
"vacuum.test_template_vacuum"
)
state = hass.states.get("vacuum.test_template_vacuum")
assert state.attributes["test_attribute"] == "It Works."
async def test_invalid_attribute_template(hass, caplog):
"""Test that errors are logged if rendering template fails."""
assert await setup.async_setup_component(
hass,
"vacuum",
{
"vacuum": {
"platform": "template",
"vacuums": {
"invalid_template": {
"value_template": "{{ states('input_select.state') }}",
"start": {"service": "script.vacuum_start"},
"attribute_templates": {
"test_attribute": "{{ this_function_does_not_exist() }}"
},
}
},
}
},
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
await hass.async_start()
await hass.async_block_till_done()
assert "test_attribute" in caplog.text
assert "TemplateError" in caplog.text
# End of template tests #
# Function tests #
async def test_state_services(hass, calls):
"""Test state services."""
await _register_components(hass)
# Start vacuum
await common.async_start(hass, _TEST_VACUUM)
await hass.async_block_till_done()
# verify
assert hass.states.get(_STATE_INPUT_SELECT).state == STATE_CLEANING
_verify(hass, STATE_CLEANING, None)
# Pause vacuum
await common.async_pause(hass, _TEST_VACUUM)
await hass.async_block_till_done()
# verify
assert hass.states.get(_STATE_INPUT_SELECT).state == STATE_PAUSED
_verify(hass, STATE_PAUSED, None)
# Stop vacuum
await common.async_stop(hass, _TEST_VACUUM)
await hass.async_block_till_done()
# verify
assert hass.states.get(_STATE_INPUT_SELECT).state == STATE_IDLE
_verify(hass, STATE_IDLE, None)
# Return vacuum to base
await common.async_return_to_base(hass, _TEST_VACUUM)
await hass.async_block_till_done()
# verify
assert hass.states.get(_STATE_INPUT_SELECT).state == STATE_RETURNING
_verify(hass, STATE_RETURNING, None)
async def test_unused_services(hass, calls):
"""Test calling unused services should not crash."""
await _register_basic_vacuum(hass)
# Pause vacuum
await common.async_pause(hass, _TEST_VACUUM)
await hass.async_block_till_done()
# Stop vacuum
await common.async_stop(hass, _TEST_VACUUM)
await hass.async_block_till_done()
# Return vacuum to base
await common.async_return_to_base(hass, _TEST_VACUUM)
await hass.async_block_till_done()
# Spot cleaning
await common.async_clean_spot(hass, _TEST_VACUUM)
await hass.async_block_till_done()
# Locate vacuum
await common.async_locate(hass, _TEST_VACUUM)
await hass.async_block_till_done()
# Set fan's speed
await common.async_set_fan_speed(hass, "medium", _TEST_VACUUM)
await hass.async_block_till_done()
_verify(hass, STATE_UNKNOWN, None)
async def test_clean_spot_service(hass, calls):
"""Test clean spot service."""
await _register_components(hass)
# Clean spot
await common.async_clean_spot(hass, _TEST_VACUUM)
await hass.async_block_till_done()
# verify
assert hass.states.get(_SPOT_CLEANING_INPUT_BOOLEAN).state == STATE_ON
async def test_locate_service(hass, calls):
"""Test locate service."""
await _register_components(hass)
# Locate vacuum
await common.async_locate(hass, _TEST_VACUUM)
await hass.async_block_till_done()
# verify
assert hass.states.get(_LOCATING_INPUT_BOOLEAN).state == STATE_ON
async def test_set_fan_speed(hass, calls):
"""Test set valid fan speed."""
await _register_components(hass)
# Set vacuum's fan speed to high
await common.async_set_fan_speed(hass, "high", _TEST_VACUUM)
await hass.async_block_till_done()
# verify
assert hass.states.get(_FAN_SPEED_INPUT_SELECT).state == "high"
# Set fan's speed to medium
await common.async_set_fan_speed(hass, "medium", _TEST_VACUUM)
await hass.async_block_till_done()
# verify
assert hass.states.get(_FAN_SPEED_INPUT_SELECT).state == "medium"
async def test_set_invalid_fan_speed(hass, calls):
"""Test set invalid fan speed when fan has valid speed."""
await _register_components(hass)
# Set vacuum's fan speed to high
await common.async_set_fan_speed(hass, "high", _TEST_VACUUM)
await hass.async_block_till_done()
# verify
assert hass.states.get(_FAN_SPEED_INPUT_SELECT).state == "high"
# Set vacuum's fan speed to 'invalid'
await common.async_set_fan_speed(hass, "invalid", _TEST_VACUUM)
await hass.async_block_till_done()
# verify fan speed is unchanged
assert hass.states.get(_FAN_SPEED_INPUT_SELECT).state == "high"
def _verify(hass, expected_state, expected_battery_level):
"""Verify vacuum's state and speed."""
state = hass.states.get(_TEST_VACUUM)
attributes = state.attributes
assert state.state == expected_state
assert attributes.get(ATTR_BATTERY_LEVEL) == expected_battery_level
async def _register_basic_vacuum(hass):
"""Register basic vacuum with only required options for testing."""
with assert_setup_component(1, "input_select"):
assert await setup.async_setup_component(
hass,
"input_select",
{"input_select": {"state": {"name": "State", "options": [STATE_CLEANING]}}},
)
with assert_setup_component(1, "vacuum"):
assert await setup.async_setup_component(
hass,
"vacuum",
{
"vacuum": {
"platform": "template",
"vacuums": {
"test_vacuum": {
"start": {
"service": "input_select.select_option",
"data": {
"entity_id": _STATE_INPUT_SELECT,
"option": STATE_CLEANING,
},
}
}
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
async def _register_components(hass):
"""Register basic components for testing."""
with assert_setup_component(2, "input_boolean"):
assert await setup.async_setup_component(
hass,
"input_boolean",
{"input_boolean": {"spot_cleaning": None, "locating": None}},
)
with assert_setup_component(2, "input_select"):
assert await setup.async_setup_component(
hass,
"input_select",
{
"input_select": {
"state": {
"name": "State",
"options": [
STATE_CLEANING,
STATE_DOCKED,
STATE_IDLE,
STATE_PAUSED,
STATE_RETURNING,
],
},
"fan_speed": {
"name": "Fan speed",
"options": ["", "low", "medium", "high"],
},
}
},
)
with assert_setup_component(1, "vacuum"):
test_vacuum_config = {
"value_template": "{{ states('input_select.state') }}",
"fan_speed_template": "{{ states('input_select.fan_speed') }}",
"start": {
"service": "input_select.select_option",
"data": {"entity_id": _STATE_INPUT_SELECT, "option": STATE_CLEANING},
},
"pause": {
"service": "input_select.select_option",
"data": {"entity_id": _STATE_INPUT_SELECT, "option": STATE_PAUSED},
},
"stop": {
"service": "input_select.select_option",
"data": {"entity_id": _STATE_INPUT_SELECT, "option": STATE_IDLE},
},
"return_to_base": {
"service": "input_select.select_option",
"data": {"entity_id": _STATE_INPUT_SELECT, "option": STATE_RETURNING},
},
"clean_spot": {
"service": "input_boolean.turn_on",
"entity_id": _SPOT_CLEANING_INPUT_BOOLEAN,
},
"locate": {
"service": "input_boolean.turn_on",
"entity_id": _LOCATING_INPUT_BOOLEAN,
},
"set_fan_speed": {
"service": "input_select.select_option",
"data_template": {
"entity_id": _FAN_SPEED_INPUT_SELECT,
"option": "{{ fan_speed }}",
},
},
"fan_speeds": ["low", "medium", "high"],
"attribute_templates": {
"test_attribute": "It {{ states.sensor.test_state.state }}."
},
}
assert await setup.async_setup_component(
hass,
"vacuum",
{
"vacuum": {
"platform": "template",
"vacuums": {"test_vacuum": test_vacuum_config},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
async def test_unique_id(hass):
"""Test unique_id option only creates one vacuum per id."""
await setup.async_setup_component(
hass,
"vacuum",
{
"vacuum": {
"platform": "template",
"vacuums": {
"test_template_vacuum_01": {
"unique_id": "not-so-unique-anymore",
"value_template": "{{ true }}",
"start": {"service": "script.vacuum_start"},
},
"test_template_vacuum_02": {
"unique_id": "not-so-unique-anymore",
"value_template": "{{ false }}",
"start": {"service": "script.vacuum_start"},
},
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
|
import argparse
import json
from weblate.auth.models import User
from weblate.lang.models import Language
from weblate.trans.models import Project
from weblate.utils.management.base import BaseCommand
class Command(BaseCommand):
help = "imports userdata from JSON dump of database"
def add_arguments(self, parser):
parser.add_argument(
"json-file",
type=argparse.FileType("r"),
help="JSON file containing user data to import",
)
@staticmethod
def import_watched(profile, userprofile):
"""Import user subscriptions."""
# Add subscriptions
for subscription in userprofile["watched"]:
try:
profile.watched.add(Project.objects.get(slug=subscription))
except Project.DoesNotExist:
continue
@staticmethod
def update_languages(profile, userprofile):
"""Update user language preferences."""
profile.language = userprofile["language"]
for lang in userprofile["secondary_languages"]:
profile.secondary_languages.add(Language.objects.auto_get_or_create(lang))
for lang in userprofile["languages"]:
profile.languages.add(Language.objects.auto_get_or_create(lang))
def handle_compat(self, data):
"""Compatibility with pre 3.6 dumps."""
if "basic" in data:
return
data["basic"] = {"username": data["username"]}
data["profile"] = {
"translated": data["translated"],
"suggested": data["suggested"],
"language": data["language"],
"uploaded": data.get("uploaded", 0),
"secondary_languages": data["secondary_languages"],
"languages": data["languages"],
"watched": data["subscriptions"],
}
def handle(self, **options):
"""Create default set of groups.
Also ptionally updates them and moves users around to default group.
"""
userdata = json.load(options["json-file"])
options["json-file"].close()
for userprofile in userdata:
self.handle_compat(userprofile)
username = userprofile["basic"]["username"]
try:
user = User.objects.get(username=username)
update = False
profile = user.profile
if not profile.language:
update = True
# Merge stats
profile.translated += userprofile["profile"]["translated"]
profile.suggested += userprofile["profile"]["suggested"]
profile.uploaded += userprofile["profile"]["uploaded"]
# Update fields if we should
if update:
self.update_languages(profile, userprofile["profile"])
# Add subscriptions
self.import_watched(profile, userprofile["profile"])
profile.save()
except User.DoesNotExist:
self.stderr.write(f"User not found: {username}\n")
|
import os
import unittest
from collections import namedtuple
import mock
from ansible.module_utils.common.collections import ImmutableDict
from kalliope.neurons.ansible_playbook import Ansible_playbook
from kalliope.core.NeuronModule import MissingParameterException
class TestAnsible_Playbook(unittest.TestCase):
def setUp(self):
self.task_file = "task_file"
self.random = "random"
self.test_file = "/tmp/kalliope_text_ansible_playbook.txt"
if "/tests" in os.getcwd():
self.test_tasks_file = os.getcwd() + os.sep + "test_ansible_playbook_neuron.yml"
else:
self.test_tasks_file = os.getcwd() + \
os.sep + "kalliope/neurons/ansible_playbook/tests/test_ansible_playbook_neuron.yml"
def testParameters(self):
def run_test(parameters_to_test):
with self.assertRaises(MissingParameterException):
Ansible_playbook(**parameters_to_test)
# empty
parameters = dict()
run_test(parameters)
# missing task_file
parameters = {
"random": self.random
}
run_test(parameters)
# missing sudo user
parameters = {
"sudo": True,
"random": self.random
}
run_test(parameters)
# missing sudo password
parameters = {
"sudo": True,
"sudo_user": "user"
}
run_test(parameters)
# parameters ok
parameters = {
"task_file": self.test_tasks_file,
"sudo": True,
"sudo_user": "user",
"sudo_password": "password"
}
with mock.patch("ansible.executor.task_queue_manager.TaskQueueManager.run"):
instantiated_neuron = Ansible_playbook(**parameters)
self.assertTrue(instantiated_neuron._is_parameters_ok)
def test_create_file_via_ansible_playbook(self):
"""
This test will use an ansible playbook the create a file. We check that the file has been created
"""
# without sudo
param = {
"task_file": self.test_tasks_file
}
Ansible_playbook(**param)
self.assertTrue(os.path.isfile(self.test_file))
if os.path.exists(self.test_file):
os.remove(self.test_file)
# with sudo
param = {
"task_file": self.test_tasks_file,
"sudo": True,
"sudo_user": "user",
"sudo_password": "password"
}
expected_option = ImmutableDict(connection='local', forks=100, become=True, become_method="sudo",
become_user="user", check=False, listhosts=False, listtasks=False, listtags=False,
syntax=False, module_path="", diff=False)
with mock.patch("ansible.executor.task_queue_manager.TaskQueueManager.run") as playbookExecutor:
instance_neuron = Ansible_playbook(**param)
playbookExecutor.assert_called_once()
self.assertEqual(instance_neuron._get_options(), expected_option)
if __name__ == '__main__':
unittest.main()
|
from openrazer_daemon.dbus_services import endpoint
@endpoint('razer.device.lighting.logo', 'setLogoWave', in_sig='i')
def set_logo_wave(self, direction):
"""
Set the wave effect on the device
:param direction: (0|1) - down to up, (1|2) up to down
:type direction: int
"""
self.logger.debug("DBus call set_logo_wave")
# Notify others
self.send_effect_event('setWave', direction)
# remember effect
self.set_persistence("logo", "effect", 'wave')
self.set_persistence("logo", "wave_dir", int(direction))
driver_path = self.get_driver_path('logo_matrix_effect_wave')
if direction not in self.WAVE_DIRS:
direction = self.WAVE_DIRS[0]
with open(driver_path, 'w') as driver_file:
driver_file.write(str(direction))
@endpoint('razer.device.lighting.scroll', 'setScrollWave', in_sig='i')
def set_scroll_wave(self, direction):
"""
Set the wave effect on the device
:param direction: (0|1) - down to up, (1|2) up to down
:type direction: int
"""
self.logger.debug("DBus call set_scroll_wave")
# Notify others
self.send_effect_event('setWave', direction)
# remember effect
self.set_persistence("scroll", "effect", 'wave')
self.set_persistence("scroll", "wave_dir", int(direction))
driver_path = self.get_driver_path('scroll_matrix_effect_wave')
if direction not in self.WAVE_DIRS:
direction = self.WAVE_DIRS[0]
with open(driver_path, 'w') as driver_file:
driver_file.write(str(direction))
@endpoint('razer.device.lighting.left', 'getLeftBrightness', out_sig='d')
def get_left_brightness(self):
"""
Get the device's brightness
:return: Brightness
:rtype: float
"""
self.logger.debug("DBus call get_left_brightness")
return self.zone["left"]["brightness"]
@endpoint('razer.device.lighting.left', 'setLeftBrightness', in_sig='d')
def set_left_brightness(self, brightness):
"""
Set the device's brightness
:param brightness: Brightness
:type brightness: int
"""
self.logger.debug("DBus call set_left_brightness")
driver_path = self.get_driver_path('left_led_brightness')
self.method_args['brightness'] = brightness
if brightness > 255:
brightness = 255
elif brightness < 0:
brightness = 0
self.set_persistence("left", "brightness", int(brightness))
brightness = int(round(brightness * (255.0 / 100.0)))
with open(driver_path, 'w') as driver_file:
driver_file.write(str(brightness))
# Notify others
self.send_effect_event('setBrightness', brightness)
@endpoint('razer.device.lighting.left', 'setLeftWave', in_sig='i')
def set_left_wave(self, direction):
"""
Set the wave effect on the device
:param direction: (0|1) - down to up, (1|2) up to down
:type direction: int
"""
self.logger.debug("DBus call set_left_wave")
# Notify others
self.send_effect_event('setWave', direction)
# remember effect
self.set_persistence("left", "effect", 'wave')
self.set_persistence("left", "wave_dir", int(direction))
driver_path = self.get_driver_path('left_matrix_effect_wave')
if direction not in self.WAVE_DIRS:
direction = self.WAVE_DIRS[0]
with open(driver_path, 'w') as driver_file:
driver_file.write(str(direction))
@endpoint('razer.device.lighting.left', 'setLeftStatic', in_sig='yyy')
def set_left_static(self, red, green, blue):
"""
Set the device to static colour
:param red: Red component
:type red: int
:param green: Green component
:type green: int
:param blue: Blue component
:type blue: int
"""
self.logger.debug("DBus call set_left_static")
# Notify others
self.send_effect_event('setStatic', red, green, blue)
# remember effect
self.set_persistence("left", "effect", 'static')
self.zone["left"]["colors"][0:3] = int(red), int(green), int(blue)
rgb_driver_path = self.get_driver_path('left_matrix_effect_static')
payload = bytes([red, green, blue])
with open(rgb_driver_path, 'wb') as rgb_driver_file:
rgb_driver_file.write(payload)
@endpoint('razer.device.lighting.left', 'setLeftSpectrum')
def set_left_spectrum(self):
"""
Set the device to spectrum mode
"""
self.logger.debug("DBus call set_left_spectrum")
# Notify others
self.send_effect_event('setSpectrum')
# remember effect
self.set_persistence("left", "effect", 'spectrum')
effect_driver_path = self.get_driver_path('left_matrix_effect_spectrum')
with open(effect_driver_path, 'w') as effect_driver_file:
effect_driver_file.write('1')
@endpoint('razer.device.lighting.left', 'setLeftNone')
def set_left_none(self):
"""
Set the device to effect none
"""
self.logger.debug("DBus call set_left_none")
# Notify others
self.send_effect_event('setNone')
# remember effect
self.set_persistence("left", "effect", 'none')
driver_path = self.get_driver_path('left_matrix_effect_none')
with open(driver_path, 'w') as driver_file:
driver_file.write('1')
@endpoint('razer.device.lighting.left', 'setLeftReactive', in_sig='yyyy')
def set_left_reactive(self, red, green, blue, speed):
"""
Set the device to reactive effect
:param red: Red component
:type red: int
:param green: Green component
:type green: int
:param blue: Blue component
:type blue: int
:param speed: Speed
:type speed: int
"""
self.logger.debug("DBus call set_left_reactive")
driver_path = self.get_driver_path('left_matrix_effect_reactive')
# Notify others
self.send_effect_event('setReactive', red, green, blue, speed)
# remember effect
self.set_persistence("left", "effect", 'reactive')
self.zone["left"]["colors"][0:3] = int(red), int(green), int(blue)
if speed not in (1, 2, 3, 4):
speed = 4
self.set_persistence("left", "speed", int(speed))
payload = bytes([speed, red, green, blue])
with open(driver_path, 'wb') as driver_file:
driver_file.write(payload)
@endpoint('razer.device.lighting.left', 'setLeftBreathRandom')
def set_left_breath_random(self):
"""
Set the device to random colour breathing effect
"""
self.logger.debug("DBus call set_left_breath_random")
# Notify others
self.send_effect_event('setBreathRandom')
# remember effect
self.set_persistence("left", "effect", 'breathRandom')
driver_path = self.get_driver_path('left_matrix_effect_breath')
payload = b'1'
with open(driver_path, 'wb') as driver_file:
driver_file.write(payload)
@endpoint('razer.device.lighting.left', 'setLeftBreathSingle', in_sig='yyy')
def set_left_breath_single(self, red, green, blue):
"""
Set the device to single colour breathing effect
:param red: Red component
:type red: int
:param green: Green component
:type green: int
:param blue: Blue component
:type blue: int
"""
self.logger.debug("DBus call set_left_breath_single")
# Notify others
self.send_effect_event('setBreathSingle', red, green, blue)
# remember effect
self.set_persistence("left", "effect", 'breathSingle')
self.zone["left"]["colors"][0:3] = int(red), int(green), int(blue)
driver_path = self.get_driver_path('left_matrix_effect_breath')
payload = bytes([red, green, blue])
with open(driver_path, 'wb') as driver_file:
driver_file.write(payload)
@endpoint('razer.device.lighting.left', 'setLeftBreathDual', in_sig='yyyyyy')
def set_left_breath_dual(self, red1, green1, blue1, red2, green2, blue2):
"""
Set the device to dual colour breathing effect
:param red1: Red component
:type red1: int
:param green1: Green component
:type green1: int
:param blue1: Blue component
:type blue1: int
:param red2: Red component
:type red2: int
:param green2: Green component
:type green2: int
:param blue2: Blue component
:type blue2: int
"""
self.logger.debug("DBus call set_left_breath_dual")
# Notify others
self.send_effect_event('setBreathDual', red1, green1, blue1, red2, green2, blue2)
# remember effect
self.set_persistence("left", "effect", 'breathDual')
self.zone["left"]["colors"][0:6] = int(red1), int(green1), int(blue1), int(red2), int(green2), int(blue2)
driver_path = self.get_driver_path('left_matrix_effect_breath')
payload = bytes([red1, green1, blue1, red2, green2, blue2])
with open(driver_path, 'wb') as driver_file:
driver_file.write(payload)
@endpoint('razer.device.lighting.right', 'getRightBrightness', out_sig='d')
def get_right_brightness(self):
"""
Get the device's brightness
:return: Brightness
:rtype: float
"""
self.logger.debug("DBus call get_right_brightness")
return self.zone["right"]["brightness"]
@endpoint('razer.device.lighting.right', 'setRightBrightness', in_sig='d')
def set_right_brightness(self, brightness):
"""
Set the device's brightness
:param brightness: Brightness
:type brightness: int
"""
self.logger.debug("DBus call set_right_brightness")
driver_path = self.get_driver_path('right_led_brightness')
self.method_args['brightness'] = brightness
if brightness > 255:
brightness = 255
elif brightness < 0:
brightness = 0
self.set_persistence("right", "brightness", int(brightness))
brightness = int(round(brightness * (255.0 / 100.0)))
with open(driver_path, 'w') as driver_file:
driver_file.write(str(brightness))
# Notify others
self.send_effect_event('setBrightness', brightness)
@endpoint('razer.device.lighting.right', 'setRightWave', in_sig='i')
def set_right_wave(self, direction):
"""
Set the wave effect on the device
:param direction: (0|1) - down to up, (1|2) up to down
:type direction: int
"""
self.logger.debug("DBus call set_right_wave")
# Notify others
self.send_effect_event('setWave', direction)
# remember effect
self.set_persistence("right", "effect", 'wave')
self.set_persistence("right", "wave_dir", int(direction))
driver_path = self.get_driver_path('right_matrix_effect_wave')
if direction not in self.WAVE_DIRS:
direction = self.WAVE_DIRS[0]
with open(driver_path, 'w') as driver_file:
driver_file.write(str(direction))
@endpoint('razer.device.lighting.right', 'setRightStatic', in_sig='yyy')
def set_right_static(self, red, green, blue):
"""
Set the device to static colour
:param red: Red component
:type red: int
:param green: Green component
:type green: int
:param blue: Blue component
:type blue: int
"""
self.logger.debug("DBus call set_right_static")
# Notify others
self.send_effect_event('setStatic', red, green, blue)
# remember effect
self.set_persistence("right", "effect", 'static')
self.zone["right"]["colors"][0:3] = int(red), int(green), int(blue)
rgb_driver_path = self.get_driver_path('right_matrix_effect_static')
payload = bytes([red, green, blue])
with open(rgb_driver_path, 'wb') as rgb_driver_file:
rgb_driver_file.write(payload)
@endpoint('razer.device.lighting.right', 'setRightSpectrum')
def set_right_spectrum(self):
"""
Set the device to spectrum mode
"""
self.logger.debug("DBus call set_right_spectrum")
# Notify others
self.send_effect_event('setSpectrum')
# remember effect
self.set_persistence("right", "effect", 'spectrum')
effect_driver_path = self.get_driver_path('right_matrix_effect_spectrum')
with open(effect_driver_path, 'w') as effect_driver_file:
effect_driver_file.write('1')
@endpoint('razer.device.lighting.right', 'setRightNone')
def set_right_none(self):
"""
Set the device to effect none
"""
self.logger.debug("DBus call set_right_none")
# Notify others
self.send_effect_event('setNone')
# remember effect
self.set_persistence("right", "effect", 'none')
driver_path = self.get_driver_path('right_matrix_effect_none')
with open(driver_path, 'w') as driver_file:
driver_file.write('1')
@endpoint('razer.device.lighting.right', 'setRightReactive', in_sig='yyyy')
def set_right_reactive(self, red, green, blue, speed):
"""
Set the device to reactive effect
:param red: Red component
:type red: int
:param green: Green component
:type green: int
:param blue: Blue component
:type blue: int
:param speed: Speed
:type speed: int
"""
self.logger.debug("DBus call set_right_reactive")
driver_path = self.get_driver_path('right_matrix_effect_reactive')
# Notify others
self.send_effect_event('setReactive', red, green, blue, speed)
# remember effect
self.set_persistence("right", "effect", 'reactive')
self.zone["right"]["colors"][0:3] = int(red), int(green), int(blue)
if speed not in (1, 2, 3, 4):
speed = 4
self.set_persistence("right", "speed", int(speed))
payload = bytes([speed, red, green, blue])
with open(driver_path, 'wb') as driver_file:
driver_file.write(payload)
@endpoint('razer.device.lighting.right', 'setRightBreathRandom')
def set_right_breath_random(self):
"""
Set the device to random colour breathing effect
"""
self.logger.debug("DBus call set_right_breath_random")
# Notify others
self.send_effect_event('setBreathRandom')
# remember effect
self.set_persistence("right", "effect", 'breathRandom')
driver_path = self.get_driver_path('right_matrix_effect_breath')
payload = b'1'
with open(driver_path, 'wb') as driver_file:
driver_file.write(payload)
@endpoint('razer.device.lighting.right', 'setRightBreathSingle', in_sig='yyy')
def set_right_breath_single(self, red, green, blue):
"""
Set the device to single colour breathing effect
:param red: Red component
:type red: int
:param green: Green component
:type green: int
:param blue: Blue component
:type blue: int
"""
self.logger.debug("DBus call set_right_breath_single")
# Notify others
self.send_effect_event('setBreathSingle', red, green, blue)
# remember effect
self.set_persistence("right", "effect", 'breathSingle')
self.zone["right"]["colors"][0:3] = int(red), int(green), int(blue)
driver_path = self.get_driver_path('right_matrix_effect_breath')
payload = bytes([red, green, blue])
with open(driver_path, 'wb') as driver_file:
driver_file.write(payload)
@endpoint('razer.device.lighting.right', 'setRightBreathDual', in_sig='yyyyyy')
def set_right_breath_dual(self, red1, green1, blue1, red2, green2, blue2):
"""
Set the device to dual colour breathing effect
:param red1: Red component
:type red1: int
:param green1: Green component
:type green1: int
:param blue1: Blue component
:type blue1: int
:param red2: Red component
:type red2: int
:param green2: Green component
:type green2: int
:param blue2: Blue component
:type blue2: int
"""
self.logger.debug("DBus call set_right_breath_dual")
# Notify others
self.send_effect_event('setBreathDual', red1, green1, blue1, red2, green2, blue2)
# remember effect
self.set_persistence("right", "effect", 'breathDual')
self.zone["right"]["colors"][0:6] = int(red1), int(green1), int(blue1), int(red2), int(green2), int(blue2)
driver_path = self.get_driver_path('right_matrix_effect_breath')
payload = bytes([red1, green1, blue1, red2, green2, blue2])
with open(driver_path, 'wb') as driver_file:
driver_file.write(payload)
|
from homeassistant.components.zwave import const, workaround
from tests.mock.zwave import MockNode, MockValue
def test_get_device_no_component_mapping():
"""Test that None is returned."""
node = MockNode(manufacturer_id=" ")
value = MockValue(data=0, node=node)
assert workaround.get_device_component_mapping(value) is None
def test_get_device_component_mapping():
"""Test that component is returned."""
node = MockNode(manufacturer_id="010f", product_type="0b00")
value = MockValue(data=0, node=node, command_class=const.COMMAND_CLASS_SENSOR_ALARM)
assert workaround.get_device_component_mapping(value) == "binary_sensor"
def test_get_device_component_mapping_mti():
"""Test that component is returned."""
# GE Fan controller
node = MockNode(manufacturer_id="0063", product_type="4944", product_id="3034")
value = MockValue(
data=0, node=node, command_class=const.COMMAND_CLASS_SWITCH_MULTILEVEL
)
assert workaround.get_device_component_mapping(value) == "fan"
# GE Dimmer
node = MockNode(manufacturer_id="0063", product_type="4944", product_id="3031")
value = MockValue(
data=0, node=node, command_class=const.COMMAND_CLASS_SWITCH_MULTILEVEL
)
assert workaround.get_device_component_mapping(value) is None
def test_get_device_no_mapping():
"""Test that no device mapping is returned."""
node = MockNode(manufacturer_id=" ")
value = MockValue(data=0, node=node)
assert workaround.get_device_mapping(value) is None
def test_get_device_mapping_mt():
"""Test that device mapping mt is returned."""
node = MockNode(manufacturer_id="0047", product_type="5a52")
value = MockValue(data=0, node=node)
assert workaround.get_device_mapping(value) == "workaround_no_position"
def test_get_device_mapping_mtii():
"""Test that device mapping mtii is returned."""
node = MockNode(manufacturer_id="013c", product_type="0002", product_id="0002")
value = MockValue(data=0, node=node, index=0)
assert workaround.get_device_mapping(value) == "trigger_no_off_event"
def test_get_device_mapping_mti_instance():
"""Test that device mapping mti_instance is returned."""
node = MockNode(manufacturer_id="013c", product_type="0001", product_id="0005")
value = MockValue(data=0, node=node, instance=1)
assert workaround.get_device_mapping(value) == "refresh_node_on_update"
value = MockValue(data=0, node=node, instance=2)
assert workaround.get_device_mapping(value) is None
|
import sys
from queue import Queue, Empty
from kombu.exceptions import reraise
from kombu.utils.objects import cached_property
from kombu.log import get_logger
from . import virtual
try:
import Pyro4 as pyro
from Pyro4.errors import NamingError
from Pyro4.util import SerializerBase
except ImportError: # pragma: no cover
pyro = NamingError = SerializerBase = None # noqa
DEFAULT_PORT = 9090
E_NAMESERVER = """\
Unable to locate pyro nameserver on host {0.hostname}\
"""
E_LOOKUP = """\
Unable to lookup '{0.virtual_host}' in pyro nameserver on host {0.hostname}\
"""
logger = get_logger(__name__)
class Channel(virtual.Channel):
"""Pyro Channel."""
def close(self):
super().close()
if self.shared_queues:
self.shared_queues._pyroRelease()
def queues(self):
return self.shared_queues.get_queue_names()
def _new_queue(self, queue, **kwargs):
if queue not in self.queues():
self.shared_queues.new_queue(queue)
def _has_queue(self, queue, **kwargs):
return self.shared_queues.has_queue(queue)
def _get(self, queue, timeout=None):
queue = self._queue_for(queue)
return self.shared_queues.get(queue)
def _queue_for(self, queue):
if queue not in self.queues():
self.shared_queues.new_queue(queue)
return queue
def _put(self, queue, message, **kwargs):
queue = self._queue_for(queue)
self.shared_queues.put(queue, message)
def _size(self, queue):
return self.shared_queues.size(queue)
def _delete(self, queue, *args, **kwargs):
self.shared_queues.delete(queue)
def _purge(self, queue):
return self.shared_queues.purge(queue)
def after_reply_message_received(self, queue):
pass
@cached_property
def shared_queues(self):
return self.connection.shared_queues
class Transport(virtual.Transport):
"""Pyro Transport."""
Channel = Channel
#: memory backend state is global.
state = virtual.BrokerState()
default_port = DEFAULT_PORT
driver_type = driver_name = 'pyro'
def _open(self):
logger.debug("trying Pyro nameserver to find the broker daemon")
conninfo = self.client
try:
nameserver = pyro.locateNS(host=conninfo.hostname,
port=self.default_port)
except NamingError:
reraise(NamingError, NamingError(E_NAMESERVER.format(conninfo)),
sys.exc_info()[2])
try:
# name of registered pyro object
uri = nameserver.lookup(conninfo.virtual_host)
return pyro.Proxy(uri)
except NamingError:
reraise(NamingError, NamingError(E_LOOKUP.format(conninfo)),
sys.exc_info()[2])
def driver_version(self):
return pyro.__version__
@cached_property
def shared_queues(self):
return self._open()
if pyro is not None:
SerializerBase.register_dict_to_class("queue.Empty",
lambda cls, data: Empty())
@pyro.expose
@pyro.behavior(instance_mode="single")
class KombuBroker:
"""Kombu Broker used by the Pyro transport.
You have to run this as a separate (Pyro) service.
"""
def __init__(self):
self.queues = {}
def get_queue_names(self):
return list(self.queues)
def new_queue(self, queue):
if queue in self.queues:
return # silently ignore the fact that queue already exists
self.queues[queue] = Queue()
def has_queue(self, queue):
return queue in self.queues
def get(self, queue):
return self.queues[queue].get(block=False)
def put(self, queue, message):
self.queues[queue].put(message)
def size(self, queue):
return self.queues[queue].qsize()
def delete(self, queue):
del self.queues[queue]
def purge(self, queue):
while True:
try:
self.queues[queue].get(blocking=False)
except Empty:
break
# launch a Kombu Broker daemon with the command:
# ``python -m kombu.transport.pyro``
if __name__ == "__main__":
print("Launching Broker for Kombu's Pyro transport.")
with pyro.Daemon() as daemon:
print("(Expecting a Pyro name server at {}:{})"
.format(pyro.config.NS_HOST, pyro.config.NS_PORT))
with pyro.locateNS() as ns:
print("You can connect with Kombu using the url "
"'pyro://{}/kombu.broker'".format(pyro.config.NS_HOST))
uri = daemon.register(KombuBroker)
ns.register("kombu.broker", uri)
daemon.requestLoop()
|
from datetime import timedelta
import logging
from nextcloudmonitor import NextcloudMonitor, NextcloudMonitorError
import voluptuous as vol
from homeassistant.const import (
CONF_PASSWORD,
CONF_SCAN_INTERVAL,
CONF_URL,
CONF_USERNAME,
)
from homeassistant.helpers import config_validation as cv, discovery
from homeassistant.helpers.event import track_time_interval
_LOGGER = logging.getLogger(__name__)
DOMAIN = "nextcloud"
NEXTCLOUD_COMPONENTS = ("sensor", "binary_sensor")
SCAN_INTERVAL = timedelta(seconds=60)
# Validate user configuration
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_URL): cv.url,
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_SCAN_INTERVAL, default=SCAN_INTERVAL): cv.time_period,
}
)
},
extra=vol.ALLOW_EXTRA,
)
BINARY_SENSORS = (
"nextcloud_system_enable_avatars",
"nextcloud_system_enable_previews",
"nextcloud_system_filelocking.enabled",
"nextcloud_system_debug",
)
SENSORS = (
"nextcloud_system_version",
"nextcloud_system_theme",
"nextcloud_system_memcache.local",
"nextcloud_system_memcache.distributed",
"nextcloud_system_memcache.locking",
"nextcloud_system_freespace",
"nextcloud_system_cpuload",
"nextcloud_system_mem_total",
"nextcloud_system_mem_free",
"nextcloud_system_swap_total",
"nextcloud_system_swap_free",
"nextcloud_system_apps_num_installed",
"nextcloud_system_apps_num_updates_available",
"nextcloud_system_apps_app_updates_calendar",
"nextcloud_system_apps_app_updates_contacts",
"nextcloud_system_apps_app_updates_tasks",
"nextcloud_system_apps_app_updates_twofactor_totp",
"nextcloud_storage_num_users",
"nextcloud_storage_num_files",
"nextcloud_storage_num_storages",
"nextcloud_storage_num_storages_local",
"nextcloud_storage_num_storages_home",
"nextcloud_storage_num_storages_other",
"nextcloud_shares_num_shares",
"nextcloud_shares_num_shares_user",
"nextcloud_shares_num_shares_groups",
"nextcloud_shares_num_shares_link",
"nextcloud_shares_num_shares_mail",
"nextcloud_shares_num_shares_room",
"nextcloud_shares_num_shares_link_no_password",
"nextcloud_shares_num_fed_shares_sent",
"nextcloud_shares_num_fed_shares_received",
"nextcloud_shares_permissions_3_1",
"nextcloud_server_webserver",
"nextcloud_server_php_version",
"nextcloud_server_php_memory_limit",
"nextcloud_server_php_max_execution_time",
"nextcloud_server_php_upload_max_filesize",
"nextcloud_database_type",
"nextcloud_database_version",
"nextcloud_database_version",
"nextcloud_activeUsers_last5minutes",
"nextcloud_activeUsers_last1hour",
"nextcloud_activeUsers_last24hours",
)
def setup(hass, config):
"""Set up the Nextcloud integration."""
# Fetch Nextcloud Monitor api data
conf = config[DOMAIN]
try:
ncm = NextcloudMonitor(conf[CONF_URL], conf[CONF_USERNAME], conf[CONF_PASSWORD])
except NextcloudMonitorError:
_LOGGER.error("Nextcloud setup failed - Check configuration")
hass.data[DOMAIN] = get_data_points(ncm.data)
hass.data[DOMAIN]["instance"] = conf[CONF_URL]
def nextcloud_update(event_time):
"""Update data from nextcloud api."""
try:
ncm.update()
except NextcloudMonitorError:
_LOGGER.error("Nextcloud update failed")
return False
hass.data[DOMAIN] = get_data_points(ncm.data)
hass.data[DOMAIN]["instance"] = conf[CONF_URL]
# Update sensors on time interval
track_time_interval(hass, nextcloud_update, conf[CONF_SCAN_INTERVAL])
for component in NEXTCLOUD_COMPONENTS:
discovery.load_platform(hass, component, DOMAIN, {}, config)
return True
# Use recursion to create list of sensors & values based on nextcloud api data
def get_data_points(api_data, key_path="", leaf=False):
"""Use Recursion to discover data-points and values.
Get dictionary of data-points by recursing through dict returned by api until
the dictionary value does not contain another dictionary and use the
resulting path of dictionary keys and resulting value as the name/value
for the data-point.
returns: dictionary of data-point/values
"""
result = {}
for key, value in api_data.items():
if isinstance(value, dict):
if leaf:
key_path = f"{key}_"
if not leaf:
key_path += f"{key}_"
leaf = True
result.update(get_data_points(value, key_path, leaf))
else:
result[f"{DOMAIN}_{key_path}{key}"] = value
leaf = False
return result
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from filterpy.common import Saver
from filterpy.gh import (GHFilter, GHKFilter, least_squares_parameters,
optimal_noise_smoothing, GHFilterOrder)
from numpy import array
from numpy.random import randn
import matplotlib.pyplot as plt
def test_least_squares():
""" there is an alternative form for computing h for the least squares.
It works for all but the very first term (n=0). Use it to partially test
the output of least_squares_parameters(). This test does not test that
g is correct"""
for n in range (1, 100):
g,h = least_squares_parameters(n)
h2 = 4 - 2*g - (4*(g-2)**2 - 3*g**2)**.5
assert abs(h2-h) < 1.e-12
def test_1d_array():
f1 = GHFilter (0, 0, 1, .8, .2)
f2 = GHFilter (array([0]), array([0]), 1, .8, .2)
str(f1)
str(f2)
# test both give same answers, and that we can
# use a scalar for the measurment
for i in range(1,10):
f1.update(i)
f2.update(i)
assert f1.x == f2.x[0]
assert f1.dx == f2.dx[0]
assert f1.VRF() == f2.VRF()
# test using an array for the measurement
s1 = Saver(f1)
s2 = Saver(f2)
for i in range(1,10):
f1.update(i)
f2.update(array([i]))
s1.save()
s2.save()
assert f1.x == f2.x[0]
assert f1.dx == f2.dx[0]
assert f1.VRF() == f2.VRF()
s1.to_array()
s2.to_array()
def test_2d_array():
""" test using 2 independent variables for the
state variable.
"""
f = GHFilter(array([0,1]), array([0,0]), 1, .8, .2)
f0 = GHFilter(0, 0, 1, .8, .2)
f1 = GHFilter(1, 0, 1, .8, .2)
# test using scalar in update (not normal, but possible)
for i in range (1,10):
f.update (i)
f0.update(i)
f1.update(i)
assert f.x[0] == f0.x
assert f.x[1] == f1.x
assert f.dx[0] == f0.dx
assert f.dx[1] == f1.dx
# test using array for update (typical scenario)
f = GHFilter(array([0,1]), array([0,0]), 1, .8, .2)
f0 = GHFilter(0, 0, 1, .8, .2)
f1 = GHFilter(1, 0, 1, .8, .2)
for i in range (1,10):
f.update (array([i, i+3]))
f0.update(i)
f1.update(i+3)
assert f.x[0] == f0.x
assert f.x[1] == f1.x
assert f.dx[0] == f0.dx
assert f.dx[1] == f1.dx
assert f.VRF() == f0.VRF()
assert f.VRF() == f1.VRF()
def optimal_test():
def fx(x):
return .1*x**2 + 3*x -4
g,h,k = optimal_noise_smoothing(.2)
f = GHKFilter(-4,0,0,1,g,h,k)
ys = []
zs = []
for i in range(100):
z = fx(i) + randn()*10
f.update(z)
ys.append(f.x)
zs.append(z)
plt.plot(ys)
plt.plot(zs)
def test_GHFilterOrder():
def fx(x):
return 2*x+1
f1 = GHFilterOrder(x0=array([0,0]), dt=1, order=1, g=.6, h=.02)
f2 = GHFilter(x=0, dx=0, dt=1, g=.6, h=.02)
for i in range(100):
z = fx(i) + randn()
f1.update(z)
f2.update(z)
assert abs(f1.x[0]-f2.x) < 1.e-18
if __name__ == "__main__":
optimal_test()
test_least_squares()
test_1d_array()
test_2d_array()
test_GHFilterOrder()
print('all passed')
|
from unittest import TestCase
import numpy as np
from scattertext.termsignificance import LogOddsRatioUninformativeDirichletPrior
from scattertext.termscoring.LogOddsUniformativePriorScore import LogOddsUninformativePriorScore
class TestLogOddsUninformativePriorScore(TestCase):
def test_get_score(self):
cat_counts, not_cat_counts = self._get_counts()
scores = LogOddsUninformativePriorScore.get_score(cat_counts, not_cat_counts)
np.testing.assert_almost_equal(
scores,
#np.array([ 0.0590679, 0.1006782, 0.0590679, -0.1475645])
np.array([ 0.4447054, 0.9433088, 0.4447054, -0.9971462])
)
'''
def test_get_delta_hats(self):
cat_counts, not_cat_counts = self._get_counts()
scores = LogOddsUninformativePriorScore.get_delta_hats(cat_counts, not_cat_counts)
np.testing.assert_almost_equal(scores,
np.array([-0.6095321, -1.0345766, -0.6095321, 1.5201005]))
'''
def test_get_score_threshold(self):
cat_counts = np.array( [1, 5, 2, 7, 10])
not_cat_counts = np.array([10, 10, 1, 5, 10])
scores = LogOddsUninformativePriorScore\
.get_thresholded_score(cat_counts, not_cat_counts, alpha_w=0.01, threshold=0.1)
np.testing.assert_almost_equal(
scores,
np.array([-0.9593012, -0. , 0. , 0.8197493, 0. ])
)
def test__turn_pvals_into_scores(self):
p_vals = np.array([0.01, 0.99, 0.5, 0.1, 0.9])
scores = LogOddsUninformativePriorScore._turn_pvals_into_scores(p_vals)
np.testing.assert_almost_equal(scores, [0.98, -0.98, -0., 0.8, -0.8])
def test__turn_counts_into_matrix(self):
cat_counts, not_cat_counts = self._get_counts()
X = LogOddsUninformativePriorScore._turn_counts_into_matrix(cat_counts, not_cat_counts)
np.testing.assert_almost_equal(X, np.array([[1, 100],
[5, 510],
[1, 100],
[9, 199]]))
def _get_counts(self):
cat_counts = np.array([1, 5, 1, 9])
not_cat_counts = np.array([100, 510, 100, 199])
return cat_counts, not_cat_counts
|
import collections
import hashlib
import io
import ipaddress
import itertools
import json
import logging
import os.path
import re
from contextlib import contextmanager
from paasta_tools import iptables
from paasta_tools.cli.utils import get_instance_config
from paasta_tools.marathon_tools import get_all_namespaces_for_service
from paasta_tools.marathon_tools import marathon_services_running_here
from paasta_tools.utils import get_running_mesos_docker_containers
from paasta_tools.utils import load_system_paasta_config
from paasta_tools.utils import NoConfigurationForServiceError
from paasta_tools.utils import timed_flock
INBOUND_PRIVATE_IP_RANGES = (
"127.0.0.0/255.0.0.0",
"169.254.0.0/255.255.0.0",
)
OUTBOUND_PRIVATE_IP_RANGES = (
"127.0.0.0/255.0.0.0",
"10.0.0.0/255.0.0.0",
"172.16.0.0/255.240.0.0",
"192.168.0.0/255.255.0.0",
"169.254.0.0/255.255.0.0",
)
DEFAULT_SYNAPSE_SERVICE_DIR = "/var/run/synapse/services"
DEFAULT_FIREWALL_FLOCK_PATH = "/var/lib/paasta/firewall.flock"
DEFAULT_FIREWALL_FLOCK_TIMEOUT_SECS = 5
RESOLV_CONF = "/etc/resolv.conf"
# not exactly correct, but sufficient to filter out ipv6 or other weird things
IPV4_REGEX = re.compile(r"[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$")
log = logging.getLogger(__name__)
class ServiceGroup(collections.namedtuple("ServiceGroup", ("service", "instance"))):
"""A service group.
:param service: service name
:param instance: instance name
"""
__slots__ = ()
@property
def chain_name(self):
"""Return iptables chain name.
Chain names are limited to 28 characters, so we have to trim quite a
bit. To attempt to ensure we don't have collisions due to shortening,
we append a hash to the end.
"""
chain = "PAASTA.{}".format(self.service[:10])
chain += "." + hashlib.sha256(json.dumps(self).encode("utf8")).hexdigest()[:10]
assert len(chain) <= 28, len(chain)
return chain
def get_rules(self, soa_dir, synapse_service_dir):
try:
conf = get_instance_config(
self.service,
self.instance,
load_system_paasta_config().get_cluster(),
load_deployments=False,
soa_dir=soa_dir,
)
except NotImplementedError:
# PAASTA-11414: new instance types may not provide this configuration information;
# we don't want to break all of the firewall infrastructure when that happens
return ()
except NoConfigurationForServiceError:
# PAASTA-12050: a deleted service may still have containers running on PaaSTA hosts
# for several minutes after the directory disappears from soa-configs.
return ()
rules = list()
if conf.get_inbound_firewall():
rules.extend(_inbound_traffic_rule(conf, self.service, self.instance))
if conf.get_outbound_firewall():
rules.extend(_default_rules(conf, self.log_prefix))
rules.extend(_well_known_rules(conf))
rules.extend(_smartstack_rules(conf, soa_dir, synapse_service_dir))
rules.extend(_cidr_rules(conf))
return tuple(rules)
def update_rules(self, soa_dir, synapse_service_dir):
iptables.ensure_chain(
self.chain_name, self.get_rules(soa_dir, synapse_service_dir)
)
iptables.reorder_chain(self.chain_name)
@property
def log_prefix(self):
# log-prefix is limited to 29 characters total
# space at the end is necessary to separate it from the rest of the line
# no restrictions on any particular characters afaict
return f"paasta.{self.service}"[:28] + " "
def _default_rules(conf, log_prefix):
log_rule = iptables.Rule(
protocol="ip",
src="0.0.0.0/0.0.0.0",
dst="0.0.0.0/0.0.0.0",
target="LOG",
target_parameters=(("log-prefix", (log_prefix,)),),
matches=(("limit", (("limit", ("1/sec",)), ("limit-burst", ("1",)))),),
)
policy = conf.get_outbound_firewall()
if policy == "block":
return (
iptables.Rule(
protocol="ip",
src="0.0.0.0/0.0.0.0",
dst="0.0.0.0/0.0.0.0",
target="REJECT",
matches=(),
target_parameters=((("reject-with", ("icmp-port-unreachable",))),),
),
log_rule,
)
elif policy == "monitor":
return (log_rule,)
else:
raise AssertionError(policy)
def _well_known_rules(conf):
# Allow access to certain resources for all services by default.
yield iptables.Rule(
protocol="ip",
src="0.0.0.0/0.0.0.0",
dst="0.0.0.0/0.0.0.0",
target="PAASTA-COMMON",
matches=(),
target_parameters=(),
)
for dep in conf.get_dependencies() or ():
resource = dep.get("well-known")
if resource == "internet":
yield iptables.Rule(
protocol="ip",
src="0.0.0.0/0.0.0.0",
dst="0.0.0.0/0.0.0.0",
target="PAASTA-INTERNET",
matches=(),
target_parameters=(),
)
elif resource is not None:
# TODO: handle better
raise AssertionError(resource)
def _synapse_backends(synapse_service_dir, namespace):
# Return the contents of the synapse JSON file for a particular service namespace
# e.g. /var/run/synapse/services/example_happyhour.main.json
with open(
os.path.join(synapse_service_dir, namespace + ".json")
) as synapse_backend_file:
synapse_backend_json = json.load(synapse_backend_file)
return synapse_backend_json
def _yocalhost_rule(port, comment, protocol="tcp"):
"""Return an iptables rule allowing access to a yocalhost port."""
return iptables.Rule(
protocol=protocol,
src="0.0.0.0/0.0.0.0",
dst="169.254.255.254/255.255.255.255",
target="ACCEPT",
matches=(
("comment", (("comment", (comment,)),)),
(protocol, (("dport", (str(port),)),)),
),
target_parameters=(),
)
def _nerve_ports_for_service_instance(service_name, instance_name):
"""Return the nerve ports for a given service instance"""
for name, instance, port in marathon_services_running_here():
if name == service_name and instance_name == instance:
yield port
def _inbound_traffic_rule(conf, service_name, instance_name, protocol="tcp"):
"""Return iptables rules for inbound traffic
If this is set to "reject", this is limited only to traffic from localhost"""
policy = conf.get_inbound_firewall()
if policy == "reject":
for port in _nerve_ports_for_service_instance(service_name, instance_name):
yield iptables.Rule(
protocol=protocol,
src="0.0.0.0/0.0.0.0",
dst="0.0.0.0/0.0.0.0",
target="REJECT",
matches=((protocol, (("dport", (str(port),)),)),),
target_parameters=((("reject-with", ("icmp-port-unreachable",))),),
)
for ip_range in INBOUND_PRIVATE_IP_RANGES:
yield iptables.Rule(
protocol=protocol,
src=ip_range,
dst="0.0.0.0/0.0.0.0",
target="ACCEPT",
matches=((protocol, (("dport", (str(port),)),)),),
target_parameters=(),
)
def _smartstack_rules(conf, soa_dir, synapse_service_dir):
for dep in conf.get_dependencies() or ():
namespace = dep.get("smartstack")
if namespace is None:
continue
# TODO: support wildcards
# synapse backends
try:
backends = _synapse_backends(synapse_service_dir, namespace)
except (OSError, IOError, ValueError):
# Don't fatal if something goes wrong loading the synapse files
log.exception(f"Unable to load backend {namespace}")
backends = ()
for backend in backends:
yield iptables.Rule(
protocol="tcp",
src="0.0.0.0/0.0.0.0",
dst="{}/255.255.255.255".format(backend["host"]),
target="ACCEPT",
matches=(
("comment", (("comment", ("backend " + namespace,)),)),
("tcp", (("dport", (str(backend["port"]),)),)),
),
target_parameters=(),
)
# synapse-haproxy proxy_port
service, _ = namespace.split(".", 1)
service_namespaces = get_all_namespaces_for_service(service, soa_dir=soa_dir)
port = dict(service_namespaces)[namespace]["proxy_port"]
yield _yocalhost_rule(port, "proxy_port " + namespace)
def _ports_valid(ports):
for port in ports:
try:
port = int(port)
except ValueError:
log.exception(f"Unable to parse port: {port}")
return False
if not 1 <= port <= 65535:
log.error(f"Bogus port number: {port}")
return False
else:
return True
def _cidr_rules(conf):
for dep in conf.get_dependencies() or ():
cidr = dep.get("cidr")
port_str = dep.get("port")
if cidr is None:
continue
try:
network = ipaddress.IPv4Network(cidr)
except ipaddress.AddressValueError:
log.exception(f"Unable to parse IP network: {cidr}")
continue
if port_str is not None:
# port can be either a single port like "443" or a range like "1024:65535"
ports = str(port_str).split(":")
if len(ports) > 2:
log.error(
f'"port" must be either a single value or a range like "1024:65535": {port_str}'
)
continue
if not _ports_valid(ports):
continue
# Set up an ip rule if no port, or a tcp/udp rule if there is a port
dst = f"{network.network_address.exploded}/{network.netmask}"
if port_str is None:
yield iptables.Rule(
protocol="ip",
src="0.0.0.0/0.0.0.0",
dst=dst,
target="ACCEPT",
matches=(("comment", (("comment", (f"allow {network}:*",)),)),),
target_parameters=(),
)
else:
for proto in ("tcp", "udp"):
yield iptables.Rule(
protocol=proto,
src="0.0.0.0/0.0.0.0",
dst=dst,
target="ACCEPT",
matches=(
("comment", (("comment", (f"allow {network}:{port_str}",)),)),
(proto, (("dport", (str(port_str),)),)),
),
target_parameters=(),
)
def services_running_here():
"""Generator helper that yields (service, instance, mac address) of both
marathon tasks.
"""
for container in get_running_mesos_docker_containers():
if container["HostConfig"]["NetworkMode"] != "bridge":
continue
service = container["Labels"].get("paasta_service")
instance = container["Labels"].get("paasta_instance")
if service is None or instance is None:
continue
network_info = container["NetworkSettings"]["Networks"]["bridge"]
mac = network_info["MacAddress"]
ip = network_info["IPAddress"]
yield service, instance, mac, ip
def active_service_groups():
"""Return active service groups."""
service_groups = collections.defaultdict(set)
for service, instance, mac, ip in services_running_here():
# TODO: only include macs that start with MAC_ADDRESS_PREFIX?
service_groups[ServiceGroup(service, instance)].add(mac)
return service_groups
def _dns_servers():
with io.open(RESOLV_CONF) as f:
for line in f:
parts = line.split()
if (
len(parts) == 2
and parts[0] == "nameserver"
and IPV4_REGEX.match(parts[1])
):
yield parts[1]
def ensure_shared_chains():
_ensure_dns_chain()
_ensure_internet_chain()
_ensure_common_chain()
def _ensure_common_chain():
"""The common chain allows access for all services to certain resources."""
iptables.ensure_chain(
"PAASTA-COMMON",
(
# Allow return traffic for incoming connections
iptables.Rule(
protocol="ip",
src="0.0.0.0/0.0.0.0",
dst="0.0.0.0/0.0.0.0",
target="ACCEPT",
matches=(("conntrack", (("ctstate", ("ESTABLISHED",)),)),),
target_parameters=(),
),
_yocalhost_rule(1463, "scribed"),
_yocalhost_rule(8125, "metrics-relay", protocol="udp"),
_yocalhost_rule(3030, "sensu"),
iptables.Rule(
protocol="ip",
src="0.0.0.0/0.0.0.0",
dst="0.0.0.0/0.0.0.0",
target="PAASTA-DNS",
matches=(),
target_parameters=(),
),
),
)
def _ensure_dns_chain():
iptables.ensure_chain(
"PAASTA-DNS",
tuple(
itertools.chain.from_iterable(
(
iptables.Rule(
protocol="udp",
src="0.0.0.0/0.0.0.0",
dst=f"{dns_server}/255.255.255.255",
target="ACCEPT",
matches=(("udp", (("dport", ("53",)),)),),
target_parameters=(),
),
# DNS goes over TCP sometimes, too!
iptables.Rule(
protocol="tcp",
src="0.0.0.0/0.0.0.0",
dst=f"{dns_server}/255.255.255.255",
target="ACCEPT",
matches=(("tcp", (("dport", ("53",)),)),),
target_parameters=(),
),
)
for dns_server in _dns_servers()
)
),
)
def _ensure_internet_chain():
iptables.ensure_chain(
"PAASTA-INTERNET",
(
iptables.Rule(
protocol="ip",
src="0.0.0.0/0.0.0.0",
dst="0.0.0.0/0.0.0.0",
target="ACCEPT",
matches=(),
target_parameters=(),
),
)
+ tuple(
iptables.Rule(
protocol="ip",
src="0.0.0.0/0.0.0.0",
dst=ip_range,
target="RETURN",
matches=(),
target_parameters=(),
)
for ip_range in OUTBOUND_PRIVATE_IP_RANGES
),
)
def ensure_service_chains(service_groups, soa_dir, synapse_service_dir):
"""Ensure service chains exist and have the right rules.
service_groups is a dict {ServiceGroup: set([mac_address..])}
Returns dictionary {[service chain] => [list of mac addresses]}.
"""
chains = {}
for service, macs in service_groups.items():
service.update_rules(soa_dir, synapse_service_dir)
chains[service.chain_name] = macs
return chains
def dispatch_rule(chain, mac):
return iptables.Rule(
protocol="ip",
src="0.0.0.0/0.0.0.0",
dst="0.0.0.0/0.0.0.0",
target=chain,
matches=(("mac", (("mac-source", (mac.upper(),)),)),),
target_parameters=(),
)
def ensure_dispatch_chains(service_chains):
paasta_rules = set(
itertools.chain.from_iterable(
(dispatch_rule(chain, mac) for mac in macs)
for chain, macs in service_chains.items()
)
)
iptables.ensure_chain("PAASTA", paasta_rules)
jump_to_paasta = iptables.Rule(
protocol="ip",
src="0.0.0.0/0.0.0.0",
dst="0.0.0.0/0.0.0.0",
target="PAASTA",
matches=(),
target_parameters=(),
)
iptables.ensure_rule("INPUT", jump_to_paasta)
iptables.ensure_rule("FORWARD", jump_to_paasta)
def garbage_collect_old_service_chains(desired_chains):
current_paasta_chains = {
chain for chain in iptables.all_chains() if chain.startswith("PAASTA.")
}
for chain in current_paasta_chains - set(desired_chains):
iptables.delete_chain(chain)
def general_update(soa_dir, synapse_service_dir):
"""Update iptables to match the current PaaSTA state."""
ensure_shared_chains()
service_chains = ensure_service_chains(
active_service_groups(), soa_dir, synapse_service_dir
)
ensure_dispatch_chains(service_chains)
garbage_collect_old_service_chains(service_chains)
def prepare_new_container(soa_dir, synapse_service_dir, service, instance, mac):
"""Update iptables to include rules for a new (not yet running) MAC address
"""
ensure_shared_chains() # probably already set, but just to be safe
service_group = ServiceGroup(service, instance)
service_group.update_rules(soa_dir, synapse_service_dir)
iptables.insert_rule("PAASTA", dispatch_rule(service_group.chain_name, mac))
@contextmanager
def firewall_flock(flock_path=DEFAULT_FIREWALL_FLOCK_PATH):
""" Grab an exclusive flock to avoid concurrent iptables updates
"""
with io.FileIO(flock_path, "w") as f:
with timed_flock(f, seconds=DEFAULT_FIREWALL_FLOCK_TIMEOUT_SECS):
yield
|
import unittest
import numpy as np
from chainer.backends import cuda
from chainer import testing
from chainer.testing import attr
from chainercv.links.model.faster_rcnn import bbox2loc
from chainercv.links.model.faster_rcnn import loc2bbox
from chainercv.utils import generate_random_bbox
class TestLocBboxConversions(unittest.TestCase):
def setUp(self):
self.src_bbox = np.array([[0, 0, 49, 29]], dtype=np.float32)
self.dst_bbox = np.array([[0, 0, 49, 29]], dtype=np.float32)
self.loc = np.array([[0, 0, 0, 0]], dtype=np.float32)
def check_bbox2loc(self, src_bbox, dst_bbox, loc):
pred_loc = bbox2loc(src_bbox, dst_bbox)
self.assertIsInstance(pred_loc, type(loc))
np.testing.assert_equal(cuda.to_cpu(pred_loc),
cuda.to_cpu(loc))
def test_bbox2loc_cpu(self):
self.check_bbox2loc(
self.src_bbox, self.dst_bbox, self.loc)
@attr.gpu
def test_bbox2loc_gpu(self):
self.check_bbox2loc(
cuda.to_gpu(self.src_bbox),
cuda.to_gpu(self.dst_bbox),
cuda.to_gpu(self.loc))
def check_loc2bbox(self, raw_bbox, bbox, expected):
pred_raw_bbox = loc2bbox(raw_bbox, bbox)
self.assertIsInstance(pred_raw_bbox, type(expected))
np.testing.assert_equal(
cuda.to_cpu(pred_raw_bbox), cuda.to_cpu(expected))
def test_loc2bbox_cpu(self):
self.check_loc2bbox(
self.src_bbox,
self.loc,
self.dst_bbox)
@attr.gpu
def test_loc2bbox_gpu(self):
self.check_loc2bbox(
cuda.to_gpu(self.src_bbox),
cuda.to_gpu(self.loc),
cuda.to_gpu(self.dst_bbox))
class TestDeltaEncodeDecodeConsistency(unittest.TestCase):
def setUp(self):
self.src_bbox = generate_random_bbox(8, (64, 32), 4, 16)
self.dst_bbox = self.src_bbox + 1
def check_bbox_loc_conversions_consistency(
self, src_bbox, dst_bbox):
bbox = bbox2loc(src_bbox, dst_bbox)
out_raw_bbox = loc2bbox(src_bbox, bbox)
np.testing.assert_almost_equal(
cuda.to_cpu(out_raw_bbox), cuda.to_cpu(dst_bbox), decimal=5)
def test_bbox_loc_conversions_consistency_cpu(self):
self.check_bbox_loc_conversions_consistency(
self.src_bbox, self.dst_bbox)
@attr.gpu
def test_bbox_loc_conversions_consistency_gpu(self):
self.check_bbox_loc_conversions_consistency(
cuda.to_gpu(self.src_bbox),
cuda.to_gpu(self.dst_bbox))
testing.run_module(__name__, __file__)
|
import pytest
from homeassistant.components.NEW_DOMAIN import DOMAIN
import homeassistant.components.automation as automation
from homeassistant.helpers import device_registry
from homeassistant.setup import async_setup_component
from tests.common import (
MockConfigEntry,
assert_lists_same,
async_get_device_automations,
async_mock_service,
mock_device_registry,
mock_registry,
)
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
async def test_get_actions(hass, device_reg, entity_reg):
"""Test we get the expected actions from a NEW_DOMAIN."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
expected_actions = [
{
"domain": DOMAIN,
"type": "turn_on",
"device_id": device_entry.id,
"entity_id": "NEW_DOMAIN.test_5678",
},
{
"domain": DOMAIN,
"type": "turn_off",
"device_id": device_entry.id,
"entity_id": "NEW_DOMAIN.test_5678",
},
]
actions = await async_get_device_automations(hass, "action", device_entry.id)
assert_lists_same(actions, expected_actions)
async def test_action(hass):
"""Test for turn_on and turn_off actions."""
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {
"platform": "event",
"event_type": "test_event_turn_off",
},
"action": {
"domain": DOMAIN,
"device_id": "abcdefgh",
"entity_id": "NEW_DOMAIN.entity",
"type": "turn_off",
},
},
{
"trigger": {
"platform": "event",
"event_type": "test_event_turn_on",
},
"action": {
"domain": DOMAIN,
"device_id": "abcdefgh",
"entity_id": "NEW_DOMAIN.entity",
"type": "turn_on",
},
},
]
},
)
turn_off_calls = async_mock_service(hass, "NEW_DOMAIN", "turn_off")
turn_on_calls = async_mock_service(hass, "NEW_DOMAIN", "turn_on")
hass.bus.async_fire("test_event_turn_off")
await hass.async_block_till_done()
assert len(turn_off_calls) == 1
assert len(turn_on_calls) == 0
hass.bus.async_fire("test_event_turn_on")
await hass.async_block_till_done()
assert len(turn_off_calls) == 1
assert len(turn_on_calls) == 1
|
import subprocess
import time
from datetime import datetime
import dateutil
from blinker import signal
from dateutil.tz import gettz
from nikola.plugin_categories import Command
from nikola.utils import clean_before_deployment
class CommandDeploy(Command):
"""Deploy site."""
name = "deploy"
doc_usage = "[preset [preset...]]"
doc_purpose = "deploy the site"
doc_description = "Deploy the site by executing deploy commands from the presets listed on the command line. If no presets are specified, `default` is executed."
def _execute(self, command, args):
"""Execute the deploy command."""
# Get last-deploy from persistent state
last_deploy = self.site.state.get('last_deploy')
if last_deploy is not None:
last_deploy = dateutil.parser.parse(last_deploy)
clean = False
else:
clean = True
if self.site.config['COMMENT_SYSTEM'] and self.site.config['COMMENT_SYSTEM_ID'] == 'nikolademo':
self.logger.warning("\nWARNING WARNING WARNING WARNING\n"
"You are deploying using the nikolademo Disqus account.\n"
"That means you will not be able to moderate the comments in your own site.\n"
"And is probably not what you want to do.\n"
"Think about it for 5 seconds, I'll wait :-)\n"
"(press Ctrl+C to abort)\n")
time.sleep(5)
# Remove drafts and future posts if requested
undeployed_posts = clean_before_deployment(self.site)
if undeployed_posts:
self.logger.warning("Deleted {0} posts due to DEPLOY_* settings".format(len(undeployed_posts)))
if args:
presets = args
else:
presets = ['default']
# test for preset existence
for preset in presets:
try:
self.site.config['DEPLOY_COMMANDS'][preset]
except KeyError:
self.logger.error('No such preset: {0}'.format(preset))
return 255
for preset in presets:
self.logger.info("=> preset '{0}'".format(preset))
for command in self.site.config['DEPLOY_COMMANDS'][preset]:
self.logger.info("==> {0}".format(command))
try:
subprocess.check_call(command, shell=True)
except subprocess.CalledProcessError as e:
self.logger.error('Failed deployment -- command {0} '
'returned {1}'.format(e.cmd, e.returncode))
return e.returncode
self.logger.info("Successful deployment")
new_deploy = datetime.utcnow()
if last_deploy is None:
last_deploy = new_deploy
self._emit_deploy_event(last_deploy, new_deploy, clean, undeployed_posts)
# Store timestamp of successful deployment
self.site.state.set('last_deploy', new_deploy.isoformat())
if clean:
self.logger.info(
'Looks like this is the first time you deployed this site. '
'Let us know you are using Nikola '
'at <https://users.getnikola.com/add/> if you want!')
def _emit_deploy_event(self, last_deploy, new_deploy, clean=False, undeployed=None):
"""Emit events for all timeline entries newer than last deploy.
last_deploy: datetime
Time stamp of the last successful deployment.
new_deploy: datetime
Time stamp of the current deployment.
clean: bool
True when it appears like deploy is being run after a clean.
"""
event = {
'last_deploy': last_deploy,
'new_deploy': new_deploy,
'clean': clean,
'undeployed': undeployed
}
if last_deploy.tzinfo is None:
last_deploy = last_deploy.replace(tzinfo=gettz('UTC'))
deployed = [
entry for entry in self.site.timeline
if entry.date > last_deploy and entry not in undeployed
]
event['deployed'] = deployed
if len(deployed) > 0 or len(undeployed) > 0:
signal('deployed').send(event)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import multiprocessing
import ntpath
from absl import flags
from perfkitbenchmarker import background_tasks
from perfkitbenchmarker import sample
from perfkitbenchmarker import vm_util
from six.moves import range
FLAGS = flags.FLAGS
flags.DEFINE_integer('max_bandwidth_mb', 500,
'The maximum bandwidth, in megabytes, to test in a '
'UDP stream.')
flags.DEFINE_integer('min_bandwidth_mb', 100,
'The minimum bandwidth, in megabytes, to test in a '
'UDP stream.')
flags.DEFINE_integer('bandwidth_step_mb', 100,
'The amount of megabytes to increase bandwidth in each '
'UDP stream test.')
flags.DEFINE_integer('udp_stream_seconds', 3,
'The amount of time to run the UDP stream test.')
flags.DEFINE_integer('udp_client_threads', 1,
'Number of parallel client threads to run.')
flags.DEFINE_integer('udp_buffer_len', 100,
'UDP packet size in bytes.')
flags.DEFINE_integer('tcp_stream_seconds', 3,
'The amount of time to run the TCP stream test.')
flags.DEFINE_integer('tcp_number_of_streams', 10,
'The number of parrallel streams to run in the TCP test.')
flags.DEFINE_integer('socket_buffer_size', None,
'The socket buffer size in megabytes. If None is '
'specified then the socket buffer size will not be set.')
flags.DEFINE_bool('run_tcp', True,
'setting to false will disable the run of the TCP test')
flags.DEFINE_bool('run_udp', False,
'setting to true will enable the run of the UDP test')
IPERF3_DIR = 'iperf-3.1.3-win64'
IPERF3_ZIP = IPERF3_DIR + '.zip'
# TODO(user): Add checksum for windows packages.
IPERF3_URL = 'https://iperf.fr/download/windows/' + IPERF3_ZIP
IPERF3_OUT_FILE = 'iperf_results'
IPERF3_UDP_PORT = 5201
IPERF3_TCP_PORT = IPERF3_UDP_PORT
def Install(vm):
zip_path = ntpath.join(vm.temp_dir, IPERF3_ZIP)
vm.DownloadFile(IPERF3_URL, zip_path)
vm.UnzipFile(zip_path, vm.temp_dir)
def RunIperf3TCPMultiStream(sending_vm, receiving_vm, use_internal_ip=True):
"""Run a multi-stream TCP bandwidth between two VMs.
Args:
sending_vm: The client VM that will send the TCP packets.
receiving_vm: The server VM that will receive the UDP packets.
use_internal_ip: if true, the private network will be used for the test.
if false, the external network will be used for the test.
Returns:
List of sample objects each representing a single metric on a single run.
"""
receiver_ip = (
receiving_vm.internal_ip if use_internal_ip else receiving_vm.ip_address)
socket_buffer_string = ''
if FLAGS.socket_buffer_size:
socket_buffer_string = ' -w {socket_buffer}M '.format(
socket_buffer=FLAGS.socket_buffer_size)
sender_args = ('--client {ip} --port {port} -t {time} -P {num_streams} -f m '
' {socket_buffer_arg} > {out_file}').format(
ip=receiver_ip,
port=IPERF3_TCP_PORT,
time=FLAGS.tcp_stream_seconds,
num_streams=FLAGS.tcp_number_of_streams,
socket_buffer_arg=socket_buffer_string,
out_file=IPERF3_OUT_FILE)
output = _RunIperf3ServerClientPair(sending_vm, sender_args, receiving_vm)
return ParseTCPMultiStreamOutput(output, sending_vm, receiving_vm,
FLAGS.tcp_number_of_streams, use_internal_ip)
def _RunIperf3(vm, options):
iperf3_exec_dir = ntpath.join(vm.temp_dir, IPERF3_DIR)
command = ('cd {iperf3_exec_dir}; '
'.\\iperf3.exe {options}').format(
iperf3_exec_dir=iperf3_exec_dir, options=options)
vm.RemoteCommand(command, timeout=FLAGS.tcp_stream_seconds + 30)
@vm_util.Retry(max_retries=3)
def _RunIperf3ServerClientPair(sending_vm, sender_args, receiving_vm):
"""Create a server-client iperf3 pair.
The server exits after the client completes its request.
Args:
sending_vm: The client VM that will send the UDP/TCP packets.
sender_args: the client VM iperf3 args.
receiving_vm: The server VM that will receive the UDP packets.
Returns:
output from the client iperf3 process.
"""
iperf3_exec_dir = ntpath.join(sending_vm.temp_dir, IPERF3_DIR)
receiver_args = '--server -1'
server_process = multiprocessing.Process(
name='server',
target=_RunIperf3,
args=(receiving_vm, receiver_args))
server_process.start()
receiving_vm.WaitForProcessRunning('iperf3', 3)
client_process = multiprocessing.Process(
name='client',
target=_RunIperf3,
args=(sending_vm, sender_args))
client_process.start()
server_process.join()
client_process.join()
cat_command = 'cd {iperf3_exec_dir}; cat {out_file}'.format(
iperf3_exec_dir=iperf3_exec_dir, out_file=IPERF3_OUT_FILE)
command_out, _ = sending_vm.RemoteCommand(cat_command)
return command_out
def RunIperf3UDPStream(sending_vm, receiving_vm, use_internal_ip=True):
"""Runs the Iperf3 UDP stream test.
Args:
sending_vm: The client VM that will send the UDP packets.
receiving_vm: The server VM that will receive the UDP packets.
use_internal_ip: if true, the private network will be used for the test.
if false, the external network will be used for the test.
Returns:
List of sample objects each representing a single metric on a single run.
"""
iperf3_exec_dir = ntpath.join(sending_vm.temp_dir, IPERF3_DIR)
def _RunIperf3UDP(vm, options):
command = 'cd {iperf3_exec_dir}; .\\iperf3.exe {options}'.format(
iperf3_exec_dir=iperf3_exec_dir,
options=options)
vm.RemoteCommand(command)
receiver_ip = (receiving_vm.internal_ip if use_internal_ip
else receiving_vm.ip_address)
samples = []
for bandwidth in range(FLAGS.min_bandwidth_mb,
FLAGS.max_bandwidth_mb,
FLAGS.bandwidth_step_mb):
sender_args = ('--client {server_ip} --udp -t {duration} -P {num_threads} '
'-b {bandwidth}M -l {buffer_len} > {out_file}'.format(
server_ip=receiver_ip,
duration=FLAGS.udp_stream_seconds,
num_threads=FLAGS.udp_client_threads,
bandwidth=bandwidth,
buffer_len=FLAGS.udp_buffer_len,
out_file=IPERF3_OUT_FILE))
# the "-1" flag will cause the server to exit after performing a single
# test. This is necessary because the RemoteCommand call will not return
# until the command completes, even if it is run as a daemon.
receiver_args = '--server -1'
process_args = [(_RunIperf3UDP, (receiving_vm, receiver_args), {}),
(_RunIperf3UDP, (sending_vm, sender_args), {})]
background_tasks.RunParallelProcesses(process_args, 200, 1)
# retrieve the results and parse them
cat_command = 'cd {iperf3_exec_dir}; cat {out_file}'.format(
iperf3_exec_dir=iperf3_exec_dir,
out_file=IPERF3_OUT_FILE)
command_out, _ = sending_vm.RemoteCommand(cat_command)
samples.extend(
GetUDPStreamSamples(sending_vm, receiving_vm, command_out, bandwidth,
use_internal_ip))
return samples
# Connecting to host 127.0.0.1, port 5201
# [ 4] local 127.0.0.1 port 53966 connected to 127.0.0.1 port 5201
# [ 6] local 127.0.0.1 port 53967 connected to 127.0.0.1 port 5201
# [ 8] local 127.0.0.1 port 53968 connected to 127.0.0.1 port 5201
# [ 10] local 127.0.0.1 port 53969 connected to 127.0.0.1 port 5201
# [ 12] local 127.0.0.1 port 53970 connected to 127.0.0.1 port 5201
# [ ID] Interval Transfer Bandwidth
# [ 4] 0.00-1.01 sec 102 MBytes 854 Mbits/sec
# [ 6] 0.00-1.01 sec 102 MBytes 854 Mbits/sec
# [ 8] 0.00-1.01 sec 102 MBytes 854 Mbits/sec
# [ 10] 0.00-1.01 sec 102 MBytes 854 Mbits/sec
# [ 12] 0.00-1.01 sec 102 MBytes 854 Mbits/sec
# [SUM] 0.00-1.01 sec 512 MBytes 4.27 Gbits/sec
# - - - - - - - - - - - - - - - - - - - - - - - - -
# [ 4] 1.01-2.00 sec 106 MBytes 895 Mbits/sec
# [ 6] 1.01-2.00 sec 106 MBytes 895 Mbits/sec
# [ 8] 1.01-2.00 sec 106 MBytes 895 Mbits/sec
# [ 10] 1.01-2.00 sec 106 MBytes 895 Mbits/sec
# [ 12] 1.01-2.00 sec 106 MBytes 895 Mbits/sec
# [SUM] 1.01-2.00 sec 531 MBytes 4.48 Gbits/sec
# - - - - - - - - - - - - - - - - - - - - - - - - -
# [ 4] 2.00-3.01 sec 126 MBytes 1.05 Gbits/sec
# [ 6] 2.00-3.01 sec 126 MBytes 1.05 Gbits/sec
# [ 8] 2.00-3.01 sec 126 MBytes 1.05 Gbits/sec
# [ 10] 2.00-3.01 sec 126 MBytes 1.05 Gbits/sec
# [ 12] 2.00-3.01 sec 126 MBytes 1.05 Gbits/sec
# [SUM] 2.00-3.01 sec 631 MBytes 5.27 Gbits/sec
# - - - - - - - - - - - - - - - - - - - - - - - - -
# [ ID] Interval Transfer Bandwidth
# [ 4] 0.00-3.01 sec 335 MBytes 935 Mbits/sec sender
# [ 4] 0.00-3.01 sec 335 MBytes 935 Mbits/sec receiver
# [ 6] 0.00-3.01 sec 335 MBytes 935 Mbits/sec sender
# [ 6] 0.00-3.01 sec 335 MBytes 935 Mbits/sec receiver
# [ 8] 0.00-3.01 sec 335 MBytes 935 Mbits/sec sender
# [ 8] 0.00-3.01 sec 335 MBytes 935 Mbits/sec receiver
# [ 10] 0.00-3.01 sec 335 MBytes 935 Mbits/sec sender
# [ 10] 0.00-3.01 sec 335 MBytes 935 Mbits/sec receiver
# [ 12] 0.00-3.01 sec 335 MBytes 935 Mbits/sec sender
# [ 12] 0.00-3.01 sec 335 MBytes 935 Mbits/sec receiver
# [SUM] 0.00-3.01 sec 1.64 GBytes 4.67 Gbits/sec sender
# [SUM] 0.00-3.01 sec 1.64 GBytes 4.67 Gbits/sec receiver
#
# iperf Done.
def ParseTCPMultiStreamOutput(results, sending_vm, receiving_vm, num_streams,
internal_ip_used):
"""Turns the 'results' into a list of samples.
Args:
results: string output of iperf3 TCP multi stream output.
sending_vm: vm where the client is run.
receiving_vm: vm where the server is run.
num_streams: number of TCP streams.
internal_ip_used: for the metadata, lets the user know if it was the
internal or external IP used in the test.
Returns:
List of samples representing the results.
"""
data_lines = [line.rstrip('\r') for line in results.split('\n')]
data_lines = [line for line in data_lines if 'receiver' in line]
samples = []
for line in data_lines:
line_data = [val for val in line.split(' ') if val]
if line_data[0] is '[':
line_data = line_data[1:]
thread_id = line_data[0].rstrip(']').lstrip('[')
metadata = {
'protocol': 'TCP',
'num_threads': num_streams,
'receiving_machine_type': receiving_vm.machine_type,
'receiving_zone': receiving_vm.zone,
'sending_machine_type': sending_vm.machine_type,
'sending_zone': sending_vm.zone,
'thread_id': thread_id,
'internal_ip_used': internal_ip_used,
'tcp_window_size': FLAGS.socket_buffer_size,
}
bandwidth = line_data[5]
units = line_data[6]
samples.append(
sample.Sample('Bandwidth', float(bandwidth), units, metadata))
return samples
# Example output from iperf3
# Connecting to host 10.129.0.3, port 5201
# [ 4] local 10.129.0.4 port 49526 connected to 10.129.0.3 port 5201
# [ ID] Interval Transfer Bandwidth Total Datagrams
# [ 4] 0.00-1.00 sec 159 MBytes 1.34 Gbits/sec 20398
# [ 4] 1.00-2.00 sec 166 MBytes 1.40 Gbits/sec 21292
# [ 4] 2.00-3.00 sec 167 MBytes 1.40 Gbits/sec 21323
# - - - - - - - - - - - - - - - - - - - - - - - - -
# [ ID] Interval Transfer Bandwidth Jitter Lost/Total Dat
# [ 4] 0.00-3.00 sec 492 MBytes 1.38 Gbits/sec 0.072 ms 35148/62949 (5
# [ 4] Sent 62949 datagrams
#
# iperf Done.
def GetUDPStreamSamples(sending_vm, receiving_vm, results, bandwidth,
internal_ip_used):
"""Parses Iperf3 results and outputs samples for PKB.
Args:
results: string containing iperf3 output.
bandwidth: the bandwidth used in the test
internal_ip_used: for the metadata, lets the user know if it was the
internal or external IP used in the test.
Returns:
List of samples.
"""
# 2 header lines, list of test results, then the 3 summary header lines
data_line_number = 2 + FLAGS.udp_stream_seconds + 3
data_line = results.split('\n')[data_line_number].split(' ')
data_line = [val for val in data_line if val]
# The data line should look like
# [ 4] 0.00-3.00 sec 492 MBytes 1.38 Gbits/sec 0.072 ms 35148/62949
jitter = float(data_line[8])
bandwidth_achieved = float(data_line[6])
bandwidth_achieved_unit = data_line[7].split('/')[0]
if bandwidth_achieved_unit == 'Gbits':
bandwidth_achieved *= 1000.0
if bandwidth_achieved_unit == 'Kbits':
bandwidth_achieved /= 1000.0
lost = int(data_line[10].split('/')[0])
total = int(data_line[10].split('/')[1])
metadata = {
'protocol': 'UDP',
'total_lost': lost,
'total_sent': total,
'bandwidth': bandwidth,
'receiving_machine_type': receiving_vm.machine_type,
'receiving_zone': receiving_vm.zone,
'sending_machine_type': sending_vm.machine_type,
'sending_zone': sending_vm.zone,
'internal_ip_used': internal_ip_used
}
# Get the percentage of packets lost.
loss_rate = round(lost * 100.0 / total, 3)
samples = [
sample.Sample('Loss Rate', loss_rate, 'Percent',
metadata),
sample.Sample('Bandwidth Achieved', bandwidth_achieved, 'Mbits/sec',
metadata),
sample.Sample('Jitter', jitter, 'ms',
metadata),
]
return samples
|
from gi.repository import Gdk, Gio, GObject
from meld.settings import load_settings_schema
WINDOW_STATE_SCHEMA = 'org.gnome.meld.WindowState'
class SavedWindowState(GObject.GObject):
'''Utility class for saving and restoring GtkWindow state'''
__gtype_name__ = 'SavedWindowState'
width = GObject.Property(
type=int, nick='Current window width', default=-1)
height = GObject.Property(
type=int, nick='Current window height', default=-1)
is_maximized = GObject.Property(
type=bool, nick='Is window maximized', default=False)
is_fullscreen = GObject.Property(
type=bool, nick='Is window fullscreen', default=False)
def bind(self, window):
window.connect('size-allocate', self.on_size_allocate)
window.connect('window-state-event', self.on_window_state_event)
# Don't re-read from gsettings after initialisation; we've seen
# what looked like issues with buggy debounce here.
bind_flags = (
Gio.SettingsBindFlags.DEFAULT |
Gio.SettingsBindFlags.GET_NO_CHANGES
)
self.settings = load_settings_schema(WINDOW_STATE_SCHEMA)
self.settings.bind('width', self, 'width', bind_flags)
self.settings.bind('height', self, 'height', bind_flags)
self.settings.bind('is-maximized', self, 'is-maximized', bind_flags)
self.settings.bind('is-fullscreen', self, 'is-fullscreen', bind_flags)
window.set_default_size(self.props.width, self.props.height)
if self.props.is_maximized:
window.maximize()
if self.props.is_fullscreen:
window.fullscreen()
def on_size_allocate(self, window, allocation):
if not (self.props.is_maximized or self.props.is_fullscreen):
width, height = window.get_size()
if width != self.props.width:
self.props.width = width
if height != self.props.height:
self.props.height = height
def on_window_state_event(self, window, event):
state = event.window.get_state()
is_maximized = state & Gdk.WindowState.MAXIMIZED
if is_maximized != self.props.is_maximized:
self.props.is_maximized = is_maximized
is_fullscreen = state & Gdk.WindowState.FULLSCREEN
if is_fullscreen != self.props.is_fullscreen:
self.props.is_fullscreen = is_fullscreen
|
import datetime
import logging
import subprocess
import time
import os
import shutil
from threading import Event
import yaml
from netort.resource import manager as resource_manager
from netort.resource import HttpOpener
from .reader import PandoraStatsReader
from ..Console import Plugin as ConsolePlugin
from ..Console import screen as ConsoleScreen
from ..Phantom import PhantomReader, string_to_df
from ...common.interfaces import AbstractInfoWidget, GeneratorPlugin
from ...common.util import tail_lines, FileMultiReader
logger = logging.getLogger(__name__)
class Plugin(GeneratorPlugin):
""" Pandora load generator plugin """
OPTION_CONFIG = "config"
SECTION = "pandora"
DEFAULT_REPORT_FILE = "phout.log"
DEFAULT_EXPVAR_PORT = 1234
def __init__(self, core, cfg, name):
super(Plugin, self).__init__(core, cfg, name)
self.output_finished = Event()
self.enum_ammo = False
self.pandora_cmd = None
self.pandora_config_file = None
self.config_contents = None
self.custom_config = False
self.expvar = self.get_option('expvar')
self.expvar_enabled = self.expvar
self.expvar_port = self.DEFAULT_EXPVAR_PORT
self.report_file = None
self.__address = None
self.__schedule = None
self.ammofile = None
self.process_stderr_file = None
self.resources = []
@staticmethod
def get_key():
return __file__
def get_available_options(self):
opts = [
"pandora_cmd", "buffered_seconds",
"config_content", "config_file"
]
return opts
def configure(self):
self.report_file = self.get_option("report_file")
self.buffered_seconds = self.get_option("buffered_seconds")
self.affinity = self.get_option("affinity", "")
self.resources = self.get_option("resources")
# if we use custom pandora binary, we can download it and make it executable
self.pandora_cmd = self.get_resource(self.get_option("pandora_cmd"), "./pandora", permissions=0o755)
# download all resources from self.get_options("resources")
if len(self.resources) > 0:
for resource in self.resources:
self.get_resource(resource["src"], resource["dst"])
# get config_contents and patch it: expand resources via resource manager
# config_content option has more priority over config_file
if self.get_option("config_content"):
logger.info('Found config_content option configuration')
self.config_contents = self.__patch_raw_config_and_dump(self.get_option("config_content"))
elif self.get_option("config_file"):
logger.info('Found config_file option configuration')
with open(self.get_option("config_file"), 'rb') as config:
external_file_config_contents = yaml.load(config.read(), Loader=yaml.FullLoader)
self.config_contents = self.__patch_raw_config_and_dump(external_file_config_contents)
else:
raise RuntimeError("Neither pandora.config_content, nor pandora.config_file specified")
logger.debug('Config after parsing for patching: %s', self.config_contents)
# find report filename and add to artifacts
self.report_file = self.__find_report_filename()
with open(self.report_file, 'w'):
pass
self.core.add_artifact_file(self.report_file)
def __patch_raw_config_and_dump(self, cfg_dict):
if not cfg_dict:
raise RuntimeError('Empty pandora config')
# patch
config_content = self.patch_config(cfg_dict)
# dump
self.pandora_config_file = self.core.mkstemp(".yaml", "pandora_config_")
self.core.add_artifact_file(self.pandora_config_file)
with open(self.pandora_config_file, 'w') as config_file:
yaml.dump(config_content, config_file)
return config_content
def patch_config(self, config):
"""
download remote resources, replace links with local filenames
add result file section
:param dict config: pandora config
"""
# get expvar parameters
if config.get("monitoring"):
if config["monitoring"].get("expvar"):
self.expvar_enabled = config["monitoring"]["expvar"].get("enabled")
if config["monitoring"]["expvar"].get("port"):
self.expvar_port = config["monitoring"]["expvar"].get("port")
# or set if expvar not exists
elif not self.expvar:
config["monitoring"] = {
"expvar": {
"enabled": True,
}
}
self.expvar_enabled = True
# FIXME this is broken for custom ammo providers due to interface incompatibility
# FIXME refactor pandora plx
for pool in config['pools']:
if pool.get('ammo', {}).get('file', ''):
self.ammofile = pool['ammo']['file']
opener = resource_manager.get_opener(self.ammofile)
if isinstance(opener, HttpOpener):
pool['ammo']['file'] = opener.download_file(True, try_ungzip=True)
else:
pool['ammo']['file'] = opener.get_filename
if not pool.get('result') or 'phout' not in pool.get('result', {}).get('type', ''):
logger.warning('Seems like pandora result file not specified... adding defaults')
pool['result'] = dict(
destination=self.DEFAULT_REPORT_FILE,
type='phout',
)
return config
@property
def address(self):
if not self.__address:
for pool in self.config_contents['pools']:
if pool.get('gun', {}).get('target'):
self.__address = pool.get('gun', {}).get('target')
break
else:
self.__address = 'unknown'
return self.__address
@property
def schedule(self):
if not self.__schedule:
for pool in self.config_contents['pools']:
if pool.get('rps'):
self.__schedule = pool.get('rps')
break
else:
self.__schedule = 'unknown'
return self.__schedule
def get_info(self):
return self.Info(
address=self.address,
ammo_file=self.ammofile,
duration=0,
instances=0,
loop_count=0,
port=self.address.split(':')[-1],
rps_schedule=self.schedule
)
def __find_report_filename(self):
for pool in self.config_contents['pools']:
if self.report_file:
return self.report_file
if pool.get('result', {}).get('destination', None):
report_filename = pool.get('result').get('destination')
logger.info('Found report file in pandora config: %s', report_filename)
return report_filename
return self.DEFAULT_REPORT_FILE
def get_reader(self, parser=string_to_df):
if self.reader is None:
self.reader = FileMultiReader(self.report_file, self.output_finished)
return PhantomReader(self.reader.get_file(), parser=parser)
def get_stats_reader(self):
if self.stats_reader is None:
self.stats_reader = PandoraStatsReader(self.expvar_enabled, self.expvar_port)
return self.stats_reader
def get_resource(self, resource, dst, permissions=0o644):
opener = resource_manager.get_opener(resource)
if isinstance(opener, HttpOpener):
tmp_path = opener.download_file(True, try_ungzip=True)
shutil.copy(tmp_path, dst)
logger.info('Successfully moved resource %s', dst)
else:
dst = opener.get_filename
os.chmod(dst, permissions)
logger.info('Permissions on %s have changed %d', dst, permissions)
return dst
def prepare_test(self):
try:
console = self.core.get_plugin_of_type(ConsolePlugin)
except KeyError as ex:
logger.debug("Console not found: %s", ex)
console = None
if console:
widget = PandoraInfoWidget(self)
console.add_info_widget(widget)
self.core.job.aggregator.add_result_listener(widget)
def start_test(self):
args = [self.pandora_cmd] +\
(['-expvar'] if self.expvar else []) +\
[self.pandora_config_file]
if self.affinity:
self.core.__setup_affinity(self.affinity, args=args)
logger.info("Starting: %s", args)
self.start_time = time.time()
self.process_stderr_file = self.core.mkstemp(".log", "pandora_")
self.core.add_artifact_file(self.process_stderr_file)
self.process_stderr = open(self.process_stderr_file, 'w')
try:
self.process = subprocess.Popen(
args,
stderr=self.process_stderr,
stdout=self.process_stderr,
close_fds=True)
except OSError:
logger.debug(
"Unable to start Pandora binary. Args: %s", args, exc_info=True)
raise RuntimeError(
"Unable to start Pandora binary and/or file does not exist: %s" % args)
def is_test_finished(self):
retcode = self.process.poll()
if retcode is not None and retcode == 0:
logger.info("Pandora subprocess done its work successfully and finished w/ retcode 0")
self.output_finished.set()
return retcode
elif retcode is not None and retcode != 0:
lines_amount = 20
logger.info("Pandora finished with non-zero retcode. Last %s logs of Pandora log:", lines_amount)
self.output_finished.set()
last_log_contents = tail_lines(self.process_stderr_file, lines_amount)
for logline in last_log_contents:
logger.info(logline.strip('\n'))
return abs(retcode)
else:
return -1
def end_test(self, retcode):
if self.process and self.process.poll() is None:
logger.warning(
"Terminating worker process with PID %s", self.process.pid)
self.process.terminate()
if self.process_stderr:
self.process_stderr.close()
else:
logger.debug("Seems subprocess finished OK")
self.output_finished.set()
return retcode
class PandoraInfoWidget(AbstractInfoWidget):
''' Right panel widget '''
def __init__(self, pandora):
AbstractInfoWidget.__init__(self)
self.krutilka = ConsoleScreen.krutilka()
self.owner = pandora
self.reqps = 0
self.active = 0
def get_index(self):
return 0
def on_aggregated_data(self, data, stats):
self.reqps = stats["metrics"]["reqps"]
self.active = stats["metrics"]["instances"]
def render(self, screen):
text = " Pandora Test %s" % next(self.krutilka)
space = screen.right_panel_width - len(text) - 1
left_spaces = space // 2
right_spaces = space // 2
dur_seconds = int(time.time()) - int(self.owner.start_time)
duration = str(datetime.timedelta(seconds=dur_seconds))
template = screen.markup.BG_BROWN + '~' * left_spaces + \
text + ' ' + '~' * right_spaces + screen.markup.RESET + "\n"
template += "Command Line: %s\n"
template += " Duration: %s\n"
template += " Requests/s: %s\n"
template += " Active reqs: %s\n"
template += " Target: %s\n"
template += " Schedule: \n%s\n"
data = (
self.owner.pandora_cmd,
duration,
self.reqps,
self.active,
self.owner.address,
yaml.dump(self.owner.schedule)
)
return template % data
|
import logging
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_HOST, CONF_PORT
from .const import DEFAULT_PORT, DOMAIN # pylint: disable=unused-import
from .errors import (
ConnectionRefused,
ConnectionTimeout,
ResolveFailed,
ValidationFailure,
)
from .helper import get_cert_expiry_timestamp
_LOGGER = logging.getLogger(__name__)
class CertexpiryConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
def __init__(self) -> None:
"""Initialize the config flow."""
self._errors = {}
async def _test_connection(self, user_input=None):
"""Test connection to the server and try to get the certificate."""
try:
await get_cert_expiry_timestamp(
self.hass,
user_input[CONF_HOST],
user_input.get(CONF_PORT, DEFAULT_PORT),
)
return True
except ResolveFailed:
self._errors[CONF_HOST] = "resolve_failed"
except ConnectionTimeout:
self._errors[CONF_HOST] = "connection_timeout"
except ConnectionRefused:
self._errors[CONF_HOST] = "connection_refused"
except ValidationFailure:
return True
return False
async def async_step_user(self, user_input=None):
"""Step when user initializes a integration."""
self._errors = {}
if user_input is not None:
host = user_input[CONF_HOST]
port = user_input.get(CONF_PORT, DEFAULT_PORT)
await self.async_set_unique_id(f"{host}:{port}")
self._abort_if_unique_id_configured()
if await self._test_connection(user_input):
title_port = f":{port}" if port != DEFAULT_PORT else ""
title = f"{host}{title_port}"
return self.async_create_entry(
title=title,
data={CONF_HOST: host, CONF_PORT: port},
)
if ( # pylint: disable=no-member
self.context["source"] == config_entries.SOURCE_IMPORT
):
_LOGGER.error("Config import failed for %s", user_input[CONF_HOST])
return self.async_abort(reason="import_failed")
else:
user_input = {}
user_input[CONF_HOST] = ""
user_input[CONF_PORT] = DEFAULT_PORT
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_HOST, default=user_input[CONF_HOST]): str,
vol.Required(
CONF_PORT, default=user_input.get(CONF_PORT, DEFAULT_PORT)
): int,
}
),
errors=self._errors,
)
async def async_step_import(self, user_input=None):
"""Import a config entry.
Only host was required in the yaml file all other fields are optional
"""
return await self.async_step_user(user_input)
|
import pytest
from xarray.backends.common import robust_getitem
class DummyFailure(Exception):
pass
class DummyArray:
def __init__(self, failures):
self.failures = failures
def __getitem__(self, key):
if self.failures:
self.failures -= 1
raise DummyFailure
return "success"
def test_robust_getitem():
array = DummyArray(failures=2)
with pytest.raises(DummyFailure):
array[...]
result = robust_getitem(array, ..., catch=DummyFailure, initial_delay=1)
assert result == "success"
array = DummyArray(failures=3)
with pytest.raises(DummyFailure):
robust_getitem(array, ..., catch=DummyFailure, initial_delay=1, max_retries=2)
|
from flask import Flask, render_template, Response, request
import pygal
from pygal.config import Config
from pygal.util import cut
from pygal.etree import etree
from pygal.style import styles, parametric_styles
from base64 import (
urlsafe_b64encode as b64encode, urlsafe_b64decode as b64decode
)
import string
import random
import pickle
def get(type):
from importlib import import_module
module = '.'.join(type.split('.')[:-1])
name = type.split('.')[-1]
return getattr(import_module(module), name)
def random_label():
chars = string.ascii_letters + string.digits + u' àéèçêâäëï'
return ''.join([
random.choice(chars) for i in range(random.randrange(4, 30))
])
def random_value(min=0, max=15):
return random.randrange(min, max, 1)
def create_app():
"""Creates the pygal test web app"""
app = Flask(__name__)
@app.before_request
def before_request():
if request.args.get('etree'):
etree.to_etree()
elif request.args.get('lxml'):
etree.to_lxml()
def _random(data, order):
max = 10**order
min = 10**random.randrange(0, order)
series = []
for i in range(random.randrange(1, 10)):
values = [(
random_value((-max, min)[random.randrange(0, 2)], max),
random_value((-max, min)[random.randrange(0, 2)], max)
) for i in range(data)]
series.append((random_label(), values, {}))
return series
def _random_series(type, data, order):
max = 10**order
min = 10**random.randrange(0, order)
with_secondary = bool(random.randint(0, 1))
series = []
for i in range(random.randrange(1, 10)):
if type == 'Pie':
values = random_value(min, max)
elif type == 'XY':
values = [(
random_value((-max, min)[random.randrange(0, 2)], max),
random_value((-max, min)[random.randrange(0, 2)], max)
) for i in range(data)]
else:
values = [
random_value((-max, min)[random.randrange(1, 2)], max)
for i in range(data)
]
config = {
'secondary': with_secondary and bool(random.randint(0, 1))
}
series.append((random_label(), values, config))
return series
from .tests import get_test_routes
links = get_test_routes(app)
@app.route("/")
def index():
return render_template(
'index.jinja2',
styles=styles,
parametric_styles=parametric_styles,
parametric_colors=(
'#ff5995', '#b6e354', '#feed6c', '#8cedff', '#9e6ffe'
),
links=links,
charts_name=pygal.CHARTS_NAMES
)
@app.route("/svg/<type>/<series>/<config>")
def svg(type, series, config):
graph = get(type)(pickle.loads(b64decode(str(config))))
for title, values, serie_config in pickle.loads(b64decode(
str(series))):
graph.add(title, values, **serie_config)
return graph.render_response()
@app.route("/table/<type>/<series>/<config>")
def table(type, series, config):
graph = get(type)(pickle.loads(b64decode(str(config))))
for title, values, serie_config in pickle.loads(b64decode(
str(series))):
graph.add(title, values, **serie_config)
return graph.render_table()
@app.route("/sparkline/<style>")
@app.route("/sparkline/parameric/<style>/<color>")
def sparkline(style, color=None):
if color is None:
style = styles[style]
else:
style = parametric_styles[style](color)
line = pygal.Line(style=style, pretty_print=True)
line.add('_', [random.randrange(0, 10) for _ in range(25)])
return Response(
line.render_sparkline(height=40), mimetype='image/svg+xml'
)
@app.route("/with/table/<type>")
def with_table(type):
chart = pygal.StackedBar(
disable_xml_declaration=True, x_label_rotation=35
)
chart.title = (
'What Linux distro do you primarily use'
' on your server computers? (Desktop'
' users vs Server Users)'
)
if type == 'series':
chart.add('Debian', [1775, 82])
chart.add('Ubuntu', [1515, 80])
chart.add('CentOS', [807, 60])
chart.add('Arch Linux', [549, 12])
chart.add('Red Hat Enterprise Linux', [247, 10])
chart.add('Gentoo', [129, 7])
chart.add('Fedora', [91, 6])
chart.add('Amazon Linux', [60, 0])
chart.add('OpenSUSE', [58, 0])
chart.add('Slackware', [50, 3])
chart.add('Xubuntu', [38, 1])
chart.add('Rasbian', [33, 4])
chart.add('SUSE Linux Enterprise Server', [33, 1])
chart.add('Linux Mint', [30, 4])
chart.add('Scientific Linux', [32, 0])
chart.add('Other', [187, 5])
elif type == 'labels':
chart.x_labels = [
'Debian', 'Ubuntu', 'CentOS', 'Arch Linux',
'Red Hat Enterprise Linux', 'Gentoo', 'Fedora', 'Amazon Linux',
'OpenSUSE', 'Slackware', 'Xubuntu', 'Rasbian',
'SUSE Linux Enterprise Server', 'Linux Mint',
'Scientific Linux', 'Other'
]
chart.add(
'Desktop Users', [
1775, 1515, 807, 549, 247, 129, 91, 60, 58, 50, 38, 33, 33,
30, 32, 187
]
)
chart.add(
'Server Users',
[82, 80, 60, 12, 10, 7, 6, 0, 0, 3, 1, 4, 1, 4, 0, 5]
)
return render_template('table.jinja2', chart=chart)
@app.route("/all")
@app.route("/all/<style>")
@app.route("/all/<style>/<color>")
@app.route("/all/<style>/<color>/<base_style>")
@app.route("/all/interpolate=<interpolate>")
def all(style='default', color=None, interpolate=None, base_style=None):
width, height = 600, 400
data = random.randrange(1, 10)
order = random.randrange(1, 10)
if color is None:
style = styles[style]
else:
style = parametric_styles[style](
color, base_style=styles[base_style or 'default']
)
xy_series = _random(data, order)
other_series = []
for title, values, config in xy_series:
other_series.append((title, cut(values, 1), config))
xy_series = b64encode(pickle.dumps(xy_series))
other_series = b64encode(pickle.dumps(other_series))
config = Config()
config.width = width
config.height = height
config.fill = bool(random.randrange(0, 2))
config.interpolate = interpolate
config.style = style
svgs = []
for chart in pygal.CHARTS:
type = '.'.join((chart.__module__, chart.__name__))
if chart._dual:
config.x_labels = None
else:
config.x_labels = [random_label() for i in range(data)]
svgs.append({
'type': type,
'series': xy_series if chart._dual else other_series,
'config': b64encode(pickle.dumps(config))
})
return render_template(
'svgs.jinja2', svgs=svgs, width=width, height=height
)
@app.route("/rotation")
def rotation():
width, height = 375, 245
config = Config()
config.width = width
config.height = height
config.fill = True
config.style = styles['neon']
data = random.randrange(1, 10)
order = random.randrange(1, 10)
series = b64encode(pickle.dumps(_random_series(type, data, order)))
labels = [random_label() for i in range(data)]
svgs = []
config.show_legend = bool(random.randrange(0, 2))
for angle in range(0, 370, 10):
config.title = "%d rotation" % angle
config.x_labels = labels
config.x_label_rotation = angle
config.y_label_rotation = angle
svgs.append({
'type': 'pygal.Bar',
'series': series,
'config': b64encode(pickle.dumps(config))
})
return render_template(
'svgs.jinja2', svgs=svgs, width=width, height=height
)
@app.route("/interpolation")
def interpolation():
width, height = 600, 400
config = Config()
config.width = width
config.height = height
config.fill = True
config.style = styles['neon']
data = random.randrange(1, 10)
order = random.randrange(1, 10)
series = b64encode(pickle.dumps(_random_series(type, data, order)))
svgs = []
for interpolation in 'quadratic', 'cubic', 'lagrange', 'trigonometric':
config.title = "%s interpolation" % interpolation
config.interpolate = interpolation
svgs.append({
'type': 'pygal.StackedLine',
'series': series,
'config': b64encode(pickle.dumps(config))
})
for params in [{'type': 'catmull_rom'}, {'type': 'finite_difference'},
{'type': 'cardinal',
'c': .25}, {'type': 'cardinal',
'c': .5}, {'type': 'cardinal', 'c': .75},
{'type': 'cardinal',
'c': 1.5}, {'type': 'cardinal',
'c': 2}, {'type': 'cardinal', 'c': 5},
{'type': 'kochanek_bartels', 'b': 1, 'c': 1,
't': 1}, {'type': 'kochanek_bartels', 'b': -1, 'c': 1,
't': 1}, {'type': 'kochanek_bartels', 'b': 1,
'c': -1, 't': 1},
{'type': 'kochanek_bartels', 'b': 1, 'c': 1, 't': -1}, {
'type': 'kochanek_bartels', 'b': -1, 'c': 1, 't': -1
}, {'type': 'kochanek_bartels', 'b': -1, 'c': -1,
't': 1}, {'type': 'kochanek_bartels', 'b': -1,
'c': -1, 't': -1}]:
config.title = "Hermite interpolation with params %r" % params
config.interpolate = 'hermite'
config.interpolation_parameters = params
svgs.append({
'type': 'pygal.StackedLine',
'series': series,
'config': b64encode(pickle.dumps(config))
})
return render_template(
'svgs.jinja2', svgs=svgs, width=width, height=height
)
@app.route("/raw_svgs/")
def raw_svgs():
svgs = []
for color in styles['neon'].colors:
chart = pygal.Pie(
style=parametric_styles['rotate'](color),
width=400,
height=300
)
chart.title = color
chart.disable_xml_declaration = True
chart.explicit_size = True
chart.js = ['http://l:2343/2.0.x/pygal-tooltips.js']
for i in range(6):
chart.add(str(i), 2**i)
svgs.append(chart.render())
return render_template('raw_svgs.jinja2', svgs=svgs)
return app
|
import pytest
from base64 import b64decode
from unittest.mock import call, patch
from case import mock
from kombu.exceptions import ContentDisallowed, EncodeError, DecodeError
from kombu.serialization import (
registry, register, SerializerNotInstalled,
raw_encode, register_yaml, register_msgpack,
dumps, loads, pickle, pickle_protocol,
unregister, register_pickle, enable_insecure_serializers,
disable_insecure_serializers,
)
from kombu.utils.encoding import str_to_bytes
import t.skip
# For content_encoding tests
unicode_string = 'abcdé\u8463'
unicode_string_as_utf8 = unicode_string.encode('utf-8')
latin_string = 'abcdé'
latin_string_as_latin1 = latin_string.encode('latin-1')
latin_string_as_utf8 = latin_string.encode('utf-8')
# For serialization tests
py_data = {
'string': 'The quick brown fox jumps over the lazy dog',
'int': 10,
'float': 3.14159265,
'unicode': 'Thé quick brown fox jumps over thé lazy dog',
'list': ['george', 'jerry', 'elaine', 'cosmo'],
}
# JSON serialization tests
json_data = """\
{"int": 10, "float": 3.1415926500000002, \
"list": ["george", "jerry", "elaine", "cosmo"], \
"string": "The quick brown fox jumps over the lazy \
dog", "unicode": "Th\\u00e9 quick brown fox jumps over \
th\\u00e9 lazy dog"}\
"""
# Pickle serialization tests
pickle_data = pickle.dumps(py_data, protocol=pickle_protocol)
# YAML serialization tests
yaml_data = """\
float: 3.1415926500000002
int: 10
list: [george, jerry, elaine, cosmo]
string: The quick brown fox jumps over the lazy dog
unicode: "Th\\xE9 quick brown fox jumps over th\\xE9 lazy dog"
"""
msgpack_py_data = dict(py_data)
msgpack_py_data['unicode'] = 'Th quick brown fox jumps over th lazy dog'
# Unicode chars are lost in transmit :(
msgpack_data = b64decode(str_to_bytes("""\
haNpbnQKpWZsb2F0y0AJIftTyNTxpGxpc3SUpmdlb3JnZaVqZXJyeaZlbGFpbmWlY29zbW+mc3Rya\
W5n2gArVGhlIHF1aWNrIGJyb3duIGZveCBqdW1wcyBvdmVyIHRoZSBsYXp5IGRvZ6d1bmljb2Rl2g\
ApVGggcXVpY2sgYnJvd24gZm94IGp1bXBzIG92ZXIgdGggbGF6eSBkb2c=\
"""))
registry.register('testS', lambda s: s, lambda s: 'decoded',
'application/testS', 'utf-8')
class test_Serialization:
def test_disable(self):
disabled = registry._disabled_content_types
try:
registry.disable('testS')
assert 'application/testS' in disabled
disabled.clear()
registry.disable('application/testS')
assert 'application/testS' in disabled
finally:
disabled.clear()
def test_enable(self):
registry._disabled_content_types.add('application/json')
registry.enable('json')
assert 'application/json' not in registry._disabled_content_types
registry._disabled_content_types.add('application/json')
registry.enable('application/json')
assert 'application/json' not in registry._disabled_content_types
def test_loads_when_disabled(self):
disabled = registry._disabled_content_types
try:
registry.disable('testS')
with pytest.raises(SerializerNotInstalled):
loads('xxd', 'application/testS', 'utf-8', force=False)
ret = loads('xxd', 'application/testS', 'utf-8', force=True)
assert ret == 'decoded'
finally:
disabled.clear()
def test_loads_when_data_is_None(self):
loads(None, 'application/testS', 'utf-8')
def test_content_type_decoding(self):
assert loads(
unicode_string_as_utf8,
content_type='plain/text',
content_encoding='utf-8') == unicode_string
assert loads(
latin_string_as_latin1,
content_type='application/data',
content_encoding='latin-1') == latin_string
def test_content_type_binary(self):
assert isinstance(
loads(unicode_string_as_utf8,
content_type='application/data', content_encoding='binary'),
bytes)
assert loads(
unicode_string_as_utf8,
content_type='application/data',
content_encoding='binary') == unicode_string_as_utf8
def test_content_type_encoding(self):
# Using the 'raw' serializer
assert (dumps(unicode_string, serializer='raw')[-1] ==
unicode_string_as_utf8)
assert (dumps(latin_string, serializer='raw')[-1] ==
latin_string_as_utf8)
# And again w/o a specific serializer to check the
# code where we force unicode objects into a string.
assert dumps(unicode_string)[-1] == unicode_string_as_utf8
assert dumps(latin_string)[-1] == latin_string_as_utf8
def test_enable_insecure_serializers(self):
with patch('kombu.serialization.registry') as registry:
enable_insecure_serializers()
registry.assert_has_calls([
call.enable('pickle'), call.enable('yaml'),
call.enable('msgpack'),
])
registry.enable.side_effect = KeyError()
enable_insecure_serializers()
with patch('kombu.serialization.registry') as registry:
enable_insecure_serializers(['msgpack'])
registry.assert_has_calls([call.enable('msgpack')])
def test_disable_insecure_serializers(self):
with patch('kombu.serialization.registry') as registry:
registry._decoders = ['pickle', 'yaml', 'doomsday']
disable_insecure_serializers(allowed=['doomsday'])
registry.disable.assert_has_calls([call('pickle'), call('yaml')])
registry.enable.assert_has_calls([call('doomsday')])
disable_insecure_serializers(allowed=None)
registry.disable.assert_has_calls([
call('pickle'), call('yaml'), call('doomsday')
])
def test_reraises_EncodeError(self):
with pytest.raises(EncodeError):
dumps([object()], serializer='json')
def test_reraises_DecodeError(self):
with pytest.raises(DecodeError):
loads(object(), content_type='application/json',
content_encoding='utf-8')
def test_json_loads(self):
assert loads(json_data,
content_type='application/json',
content_encoding='utf-8') == py_data
def test_json_dumps(self):
a = loads(
dumps(py_data, serializer='json')[-1],
content_type='application/json',
content_encoding='utf-8',
)
b = loads(
json_data,
content_type='application/json',
content_encoding='utf-8',
)
assert a == b
@t.skip.if_pypy
def test_msgpack_loads(self):
register_msgpack()
pytest.importorskip('msgpack')
res = loads(msgpack_data,
content_type='application/x-msgpack',
content_encoding='binary')
assert res == msgpack_py_data
@t.skip.if_pypy
def test_msgpack_dumps(self):
pytest.importorskip('msgpack')
register_msgpack()
a = loads(
dumps(msgpack_py_data, serializer='msgpack')[-1],
content_type='application/x-msgpack',
content_encoding='binary',
)
b = loads(
msgpack_data,
content_type='application/x-msgpack',
content_encoding='binary',
)
assert a == b
def test_yaml_loads(self):
pytest.importorskip('yaml')
register_yaml()
assert loads(
yaml_data,
content_type='application/x-yaml',
content_encoding='utf-8') == py_data
def test_yaml_dumps(self):
pytest.importorskip('yaml')
register_yaml()
a = loads(
dumps(py_data, serializer='yaml')[-1],
content_type='application/x-yaml',
content_encoding='utf-8',
)
b = loads(
yaml_data,
content_type='application/x-yaml',
content_encoding='utf-8',
)
assert a == b
def test_pickle_loads(self):
assert loads(
pickle_data,
content_type='application/x-python-serialize',
content_encoding='binary') == py_data
def test_pickle_dumps(self):
a = pickle.loads(pickle_data),
b = pickle.loads(dumps(py_data, serializer='pickle')[-1]),
assert a == b
def test_register(self):
register(None, None, None, None)
def test_unregister(self):
with pytest.raises(SerializerNotInstalled):
unregister('nonexisting')
dumps('foo', serializer='pickle')
unregister('pickle')
with pytest.raises(SerializerNotInstalled):
dumps('foo', serializer='pickle')
register_pickle()
def test_set_default_serializer_missing(self):
with pytest.raises(SerializerNotInstalled):
registry._set_default_serializer('nonexisting')
def test_dumps_missing(self):
with pytest.raises(SerializerNotInstalled):
dumps('foo', serializer='nonexisting')
def test_dumps__no_serializer(self):
ctyp, cenc, data = dumps(str_to_bytes('foo'))
assert ctyp == 'application/data'
assert cenc == 'binary'
def test_loads__trusted_content(self):
loads('tainted', 'application/data', 'binary', accept=[])
loads('tainted', 'application/text', 'utf-8', accept=[])
def test_loads__not_accepted(self):
with pytest.raises(ContentDisallowed):
loads('tainted', 'application/x-evil', 'binary', accept=[])
with pytest.raises(ContentDisallowed):
loads('tainted', 'application/x-evil', 'binary',
accept=['application/x-json'])
assert loads('tainted', 'application/x-doomsday', 'binary',
accept=['application/x-doomsday'])
def test_raw_encode(self):
assert raw_encode(b'foo') == (
'application/data', 'binary', b'foo',
)
@mock.mask_modules('yaml')
def test_register_yaml__no_yaml(self):
register_yaml()
with pytest.raises(SerializerNotInstalled):
loads('foo', 'application/x-yaml', 'utf-8')
@mock.mask_modules('msgpack')
def test_register_msgpack__no_msgpack(self):
register_msgpack()
with pytest.raises(SerializerNotInstalled):
loads('foo', 'application/x-msgpack', 'utf-8')
|
from functools import partial
from ...utils import verbose
from ..utils import (has_dataset, _data_path, _data_path_doc,
_get_version, _version_doc)
has_refmeg_noise_data = partial(has_dataset, name='refmeg_noise')
@verbose
def data_path(path=None, force_update=False, update_path=True, download=True,
verbose=None): # noqa: D103
return _data_path(path=path, force_update=force_update,
update_path=update_path, name='refmeg_noise',
download=download)
data_path.__doc__ = _data_path_doc.format(name='refmeg_noise',
conf='MNE_DATASETS_REFMEG_NOISE_PATH') # noqa
def get_version(): # noqa: D103
return _get_version('refmeg_noise')
get_version.__doc__ = _version_doc.format(name='refmeg_noise')
|
import json
import logging
from absl import flags
from perfkitbenchmarker import disk
from perfkitbenchmarker import providers
from perfkitbenchmarker import virtual_machine
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.configs import option_decoders
from perfkitbenchmarker.providers.mesos.mesos_disk import LocalDisk
import requests
from requests.auth import HTTPBasicAuth
import six.moves.urllib.parse
FLAGS = flags.FLAGS
MARATHON_API_PREFIX = '/v2/apps/'
USERNAME = 'root'
class MesosDockerSpec(virtual_machine.BaseVmSpec):
"""Object containing the information needed to create a MesosDockerInstance.
Attributes:
docker_cpus: None or float. Number of CPUs for Docker instances.
docker_memory_mb: None or int. Memory limit (in MB) for Docker instances.
mesos_privileged_docker: None of boolean. Indicates if Docker container
should be run in privileged mode.
"""
CLOUD = providers.MESOS
@classmethod
def _GetOptionDecoderConstructions(cls):
result = super(MesosDockerSpec, cls)._GetOptionDecoderConstructions()
result.update({
'docker_cpus': (option_decoders.FloatDecoder, {'default': 1}),
'docker_memory_mb': (option_decoders.IntDecoder, {'default': 2048}),
'mesos_privileged_docker': (option_decoders.BooleanDecoder,
{'default': False})})
return result
def _ApplyFlags(self, config_values, flag_values):
super(MesosDockerSpec, self)._ApplyFlags(config_values, flag_values)
if flag_values['docker_cpus'].present:
config_values['docker_cpus'] = flag_values.docker_cpus
if flag_values['docker_memory_mb'].present:
config_values['docker_memory_mb'] = flag_values.docker_memory_mb
if flag_values['mesos_privileged_docker'].present:
config_values['mesos_privileged_docker'] =\
flag_values.mesos_privileged_docker
class MesosDockerInstance(virtual_machine.BaseVirtualMachine):
"""
Represents a Docker instance spawned by Marathon framework on a Mesos cluster
"""
CLOUD = providers.MESOS
def __init__(self, vm_spec):
super(MesosDockerInstance, self).__init__(vm_spec)
self.user_name = USERNAME
self.cpus = vm_spec.docker_cpus
self.memory_mb = vm_spec.docker_memory_mb
self.privileged = vm_spec.mesos_privileged_docker
self.api_url = six.moves.urllib.parse.urljoin(FLAGS.marathon_address,
MARATHON_API_PREFIX)
self.app_url = six.moves.urllib.parse.urljoin(self.api_url, self.name)
auth = FLAGS.marathon_auth.split(":")
if len(auth) == 2:
self.auth = HTTPBasicAuth(auth[0], auth[1])
else:
self.auth = None
def _CreateDependencies(self):
self._CheckPrerequisites()
self._CreateVolumes()
def _Create(self):
self._CreateApp()
self._WaitForBootCompletion()
def _PostCreate(self):
self._SetupSSH()
self._ConfigureProxy()
def _Delete(self):
self._DeleteApp()
def _CheckPrerequisites(self):
"""
Exits if any of the prerequisites is not met.
"""
if self.disk_specs and self.disk_specs[0].disk_type == disk.STANDARD:
raise Exception('Currently only local disks are supported. Please '
're-run the benchmark with "--scratch_disk_type=local".')
if not FLAGS.marathon_address:
raise Exception('Please provide the address and port of Marathon '
'framework. Example: 10:20:30:40:8080')
def _CreateVolumes(self):
"""
Creates volumes for scratch disks.
"""
for disk_num, disk_spec in enumerate(self.disk_specs):
if disk_spec.disk_type == disk.LOCAL:
scratch_disk = LocalDisk(disk_num, disk_spec, self.name)
else:
raise Exception('Currently only local disks are supported. Please '
're-run the benchmark with "--scratch_disk_type=local"')
scratch_disk._Create()
self.scratch_disks.append(scratch_disk)
def _CreateApp(self):
"""
Creates Marathon's App (Docker instance).
"""
logging.info("Attempting to create App: %s" % self.name)
body = self._BuildAppBody()
headers = {'content-type': 'application/json'}
output = requests.post(self.api_url, data=body, headers=headers,
auth=self.auth)
if output.status_code != requests.codes.CREATED:
raise Exception("Unable to create App: %s" % output.text)
logging.info("App %s created successfully." % self.name)
@vm_util.Retry(poll_interval=10, max_retries=600, log_errors=False)
def _WaitForBootCompletion(self):
"""
Periodically asks Marathon if the instance is already running.
"""
logging.info("Waiting for App %s to get up and running. It may take a while"
" if a Docker image is being downloaded for the first time."
% self.name)
output = requests.get(self.app_url, auth=self.auth)
output = json.loads(output.text)
tasks_running = output['app']['tasksRunning']
if not tasks_running:
raise Exception("Container is not booted yet. Retrying.")
@vm_util.Retry(poll_interval=10, max_retries=100, log_errors=True)
def _SetupSSH(self):
"""
Setup SSH connection details for each instance:
- IP address of the instance is the address of a host which instance
is running on,
- SSH port is drawn by Marathon and is unique for each instance.
"""
output = requests.get(self.app_url, auth=self.auth)
output = json.loads(output.text)
tasks = output['app']['tasks']
if not tasks or not tasks[0]['ports']:
raise Exception("Unable to figure out where the container is running."
"Retrying to retrieve host and port.")
self.ip_address = tasks[0]['host']
self.ssh_port = tasks[0]['ports'][0]
internal_ip, _ = self.RemoteCommand("ifconfig eth0 | grep 'inet addr' | awk"
" -F: '{print $2}' | awk '{print $1}'")
self.internal_ip = internal_ip.rstrip()
@vm_util.Retry(poll_interval=10, max_retries=100, log_errors=True)
def _ConfigureProxy(self):
"""
In Docker containers environment variables from /etc/environment
are not sourced - this results in connection problems when running
behind proxy. Prepending proxy environment variables to bashrc
solves the problem. Note: APPENDING to bashrc will not work because
the script exits when it is NOT executed in interactive shell.
"""
if FLAGS.http_proxy:
http_proxy = "sed -i '1i export http_proxy=%s' /etc/bash.bashrc"
self.RemoteCommand(http_proxy % FLAGS.http_proxy)
if FLAGS.https_proxy:
https_proxy = "sed -i '1i export https_proxy=%s' /etc/bash.bashrc"
self.RemoteCommand(https_proxy % FLAGS.http_proxy)
if FLAGS.ftp_proxy:
ftp_proxy = "sed -i '1i export ftp_proxy=%s' /etc/bash.bashrc"
self.RemoteCommand(ftp_proxy % FLAGS.ftp_proxy)
@vm_util.Retry(poll_interval=10, max_retries=100, log_errors=True)
def _DeleteApp(self):
"""
Deletes an App.
"""
logging.info('Attempting to delete App: %s' % self.name)
output = requests.delete(self.app_url, auth=self.auth)
if output.status_code == requests.codes.NOT_FOUND:
logging.info('App %s has been already deleted.' % self.name)
return
if output.status_code != requests.codes.OK:
raise Exception("Deleting App: %s failed. Reattempting." % self.name)
def _BuildAppBody(self):
"""
Builds JSON which will be passed as a body of POST request to Marathon
API in order to create App.
"""
cat_cmd = ['cat', vm_util.GetPublicKeyPath()]
key_file, _ = vm_util.IssueRetryableCommand(cat_cmd)
cmd = "/bin/mkdir /root/.ssh; echo '%s' >> /root/.ssh/authorized_keys; " \
"/usr/sbin/sshd -D" % key_file
body = {
'id': self.name,
'mem': self.memory_mb,
'cpus': self.cpus,
'cmd': cmd,
'container': {
'type': 'DOCKER',
'docker': {
'image': self.image,
'network': 'BRIDGE',
'portMappings': [
{
'containerPort': 22,
'hostPort': 0,
'protocol': 'tcp'
}
],
'privileged': self.privileged,
'parameters': [{'key': 'hostname', 'value': self.name}]
}
}
}
for scratch_disk in self.scratch_disks:
scratch_disk.AttachVolumeInfo(body['container'])
return json.dumps(body)
def SetupLocalDisks(self):
# Do not call parent's method
return
|
from datetime import timedelta
import ipaddress
import logging
from vallox_websocket_api import PROFILE as VALLOX_PROFILE, Vallox
from vallox_websocket_api.constants import vlxDevConstants
from vallox_websocket_api.exceptions import ValloxApiException
import voluptuous as vol
from homeassistant.const import CONF_HOST, CONF_NAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import async_load_platform
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.event import async_track_time_interval
_LOGGER = logging.getLogger(__name__)
DOMAIN = "vallox"
DEFAULT_NAME = "Vallox"
SIGNAL_VALLOX_STATE_UPDATE = "vallox_state_update"
SCAN_INTERVAL = timedelta(seconds=60)
# Various metric keys that are reused between profiles.
METRIC_KEY_MODE = "A_CYC_MODE"
METRIC_KEY_PROFILE_FAN_SPEED_HOME = "A_CYC_HOME_SPEED_SETTING"
METRIC_KEY_PROFILE_FAN_SPEED_AWAY = "A_CYC_AWAY_SPEED_SETTING"
METRIC_KEY_PROFILE_FAN_SPEED_BOOST = "A_CYC_BOOST_SPEED_SETTING"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_HOST): vol.All(ipaddress.ip_address, cv.string),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
# pylint: disable=no-member
PROFILE_TO_STR_SETTABLE = {
VALLOX_PROFILE.HOME: "Home",
VALLOX_PROFILE.AWAY: "Away",
VALLOX_PROFILE.BOOST: "Boost",
VALLOX_PROFILE.FIREPLACE: "Fireplace",
}
STR_TO_PROFILE = {v: k for (k, v) in PROFILE_TO_STR_SETTABLE.items()}
# pylint: disable=no-member
PROFILE_TO_STR_REPORTABLE = {
**{VALLOX_PROFILE.NONE: "None", VALLOX_PROFILE.EXTRA: "Extra"},
**PROFILE_TO_STR_SETTABLE,
}
ATTR_PROFILE = "profile"
ATTR_PROFILE_FAN_SPEED = "fan_speed"
SERVICE_SCHEMA_SET_PROFILE = vol.Schema(
{vol.Required(ATTR_PROFILE): vol.All(cv.string, vol.In(STR_TO_PROFILE))}
)
SERVICE_SCHEMA_SET_PROFILE_FAN_SPEED = vol.Schema(
{
vol.Required(ATTR_PROFILE_FAN_SPEED): vol.All(
vol.Coerce(int), vol.Clamp(min=0, max=100)
)
}
)
SERVICE_SET_PROFILE = "set_profile"
SERVICE_SET_PROFILE_FAN_SPEED_HOME = "set_profile_fan_speed_home"
SERVICE_SET_PROFILE_FAN_SPEED_AWAY = "set_profile_fan_speed_away"
SERVICE_SET_PROFILE_FAN_SPEED_BOOST = "set_profile_fan_speed_boost"
SERVICE_TO_METHOD = {
SERVICE_SET_PROFILE: {
"method": "async_set_profile",
"schema": SERVICE_SCHEMA_SET_PROFILE,
},
SERVICE_SET_PROFILE_FAN_SPEED_HOME: {
"method": "async_set_profile_fan_speed_home",
"schema": SERVICE_SCHEMA_SET_PROFILE_FAN_SPEED,
},
SERVICE_SET_PROFILE_FAN_SPEED_AWAY: {
"method": "async_set_profile_fan_speed_away",
"schema": SERVICE_SCHEMA_SET_PROFILE_FAN_SPEED,
},
SERVICE_SET_PROFILE_FAN_SPEED_BOOST: {
"method": "async_set_profile_fan_speed_boost",
"schema": SERVICE_SCHEMA_SET_PROFILE_FAN_SPEED,
},
}
DEFAULT_FAN_SPEED_HOME = 50
DEFAULT_FAN_SPEED_AWAY = 25
DEFAULT_FAN_SPEED_BOOST = 65
async def async_setup(hass, config):
"""Set up the client and boot the platforms."""
conf = config[DOMAIN]
host = conf.get(CONF_HOST)
name = conf.get(CONF_NAME)
client = Vallox(host)
state_proxy = ValloxStateProxy(hass, client)
service_handler = ValloxServiceHandler(client, state_proxy)
hass.data[DOMAIN] = {"client": client, "state_proxy": state_proxy, "name": name}
for vallox_service in SERVICE_TO_METHOD:
schema = SERVICE_TO_METHOD[vallox_service]["schema"]
hass.services.async_register(
DOMAIN, vallox_service, service_handler.async_handle, schema=schema
)
# The vallox hardware expects quite strict timings for websocket
# requests. Timings that machines with less processing power, like
# Raspberries, cannot live up to during the busy start phase of Home
# Asssistant. Hence, async_add_entities() for fan and sensor in respective
# code will be called with update_before_add=False to intentionally delay
# the first request, increasing chance that it is issued only when the
# machine is less busy again.
hass.async_create_task(async_load_platform(hass, "sensor", DOMAIN, {}, config))
hass.async_create_task(async_load_platform(hass, "fan", DOMAIN, {}, config))
async_track_time_interval(hass, state_proxy.async_update, SCAN_INTERVAL)
return True
class ValloxStateProxy:
"""Helper class to reduce websocket API calls."""
def __init__(self, hass, client):
"""Initialize the proxy."""
self._hass = hass
self._client = client
self._metric_cache = {}
self._profile = None
self._valid = False
def fetch_metric(self, metric_key):
"""Return cached state value."""
_LOGGER.debug("Fetching metric key: %s", metric_key)
if not self._valid:
raise OSError("Device state out of sync.")
if metric_key not in vlxDevConstants.__dict__:
raise KeyError(f"Unknown metric key: {metric_key}")
return self._metric_cache[metric_key]
def get_profile(self):
"""Return cached profile value."""
_LOGGER.debug("Returning profile")
if not self._valid:
raise OSError("Device state out of sync.")
return PROFILE_TO_STR_REPORTABLE[self._profile]
async def async_update(self, event_time):
"""Fetch state update."""
_LOGGER.debug("Updating Vallox state cache")
try:
self._metric_cache = await self._client.fetch_metrics()
self._profile = await self._client.get_profile()
self._valid = True
except (OSError, ValloxApiException) as err:
_LOGGER.error("Error during state cache update: %s", err)
self._valid = False
async_dispatcher_send(self._hass, SIGNAL_VALLOX_STATE_UPDATE)
class ValloxServiceHandler:
"""Services implementation."""
def __init__(self, client, state_proxy):
"""Initialize the proxy."""
self._client = client
self._state_proxy = state_proxy
async def async_set_profile(self, profile: str = "Home") -> bool:
"""Set the ventilation profile."""
_LOGGER.debug("Setting ventilation profile to: %s", profile)
try:
await self._client.set_profile(STR_TO_PROFILE[profile])
return True
except (OSError, ValloxApiException) as err:
_LOGGER.error("Error setting ventilation profile: %s", err)
return False
async def async_set_profile_fan_speed_home(
self, fan_speed: int = DEFAULT_FAN_SPEED_HOME
) -> bool:
"""Set the fan speed in percent for the Home profile."""
_LOGGER.debug("Setting Home fan speed to: %d%%", fan_speed)
try:
await self._client.set_values(
{METRIC_KEY_PROFILE_FAN_SPEED_HOME: fan_speed}
)
return True
except (OSError, ValloxApiException) as err:
_LOGGER.error("Error setting fan speed for Home profile: %s", err)
return False
async def async_set_profile_fan_speed_away(
self, fan_speed: int = DEFAULT_FAN_SPEED_AWAY
) -> bool:
"""Set the fan speed in percent for the Home profile."""
_LOGGER.debug("Setting Away fan speed to: %d%%", fan_speed)
try:
await self._client.set_values(
{METRIC_KEY_PROFILE_FAN_SPEED_AWAY: fan_speed}
)
return True
except (OSError, ValloxApiException) as err:
_LOGGER.error("Error setting fan speed for Away profile: %s", err)
return False
async def async_set_profile_fan_speed_boost(
self, fan_speed: int = DEFAULT_FAN_SPEED_BOOST
) -> bool:
"""Set the fan speed in percent for the Boost profile."""
_LOGGER.debug("Setting Boost fan speed to: %d%%", fan_speed)
try:
await self._client.set_values(
{METRIC_KEY_PROFILE_FAN_SPEED_BOOST: fan_speed}
)
return True
except (OSError, ValloxApiException) as err:
_LOGGER.error("Error setting fan speed for Boost profile: %s", err)
return False
async def async_handle(self, service):
"""Dispatch a service call."""
method = SERVICE_TO_METHOD.get(service.service)
params = service.data.copy()
if not hasattr(self, method["method"]):
_LOGGER.error("Service not implemented: %s", method["method"])
return
result = await getattr(self, method["method"])(**params)
# Force state_proxy to refresh device state, so that updates are
# propagated to platforms.
if result:
await self._state_proxy.async_update(None)
|
import asyncio
import json
import logging
import aiohttp
import async_timeout
from homeassistant.const import HTTP_ACCEPTED, MATCH_ALL, STATE_ON
import homeassistant.util.dt as dt_util
from .const import API_CHANGE, Cause
from .entities import ENTITY_ADAPTERS, generate_alexa_id
from .messages import AlexaResponse
_LOGGER = logging.getLogger(__name__)
DEFAULT_TIMEOUT = 10
async def async_enable_proactive_mode(hass, smart_home_config):
"""Enable the proactive mode.
Proactive mode makes this component report state changes to Alexa.
"""
# Validate we can get access token.
await smart_home_config.async_get_access_token()
async def async_entity_state_listener(changed_entity, old_state, new_state):
if not hass.is_running:
return
if not new_state:
return
if new_state.domain not in ENTITY_ADAPTERS:
return
if not smart_home_config.should_expose(changed_entity):
_LOGGER.debug("Not exposing %s because filtered by config", changed_entity)
return
alexa_changed_entity = ENTITY_ADAPTERS[new_state.domain](
hass, smart_home_config, new_state
)
for interface in alexa_changed_entity.interfaces():
if interface.properties_proactively_reported():
await async_send_changereport_message(
hass, smart_home_config, alexa_changed_entity
)
return
if (
interface.name() == "Alexa.DoorbellEventSource"
and new_state.state == STATE_ON
):
await async_send_doorbell_event_message(
hass, smart_home_config, alexa_changed_entity
)
return
return hass.helpers.event.async_track_state_change(
MATCH_ALL, async_entity_state_listener
)
async def async_send_changereport_message(
hass, config, alexa_entity, *, invalidate_access_token=True
):
"""Send a ChangeReport message for an Alexa entity.
https://developer.amazon.com/docs/smarthome/state-reporting-for-a-smart-home-skill.html#report-state-with-changereport-events
"""
token = await config.async_get_access_token()
headers = {"Authorization": f"Bearer {token}"}
endpoint = alexa_entity.alexa_id()
# this sends all the properties of the Alexa Entity, whether they have
# changed or not. this should be improved, and properties that have not
# changed should be moved to the 'context' object
properties = list(alexa_entity.serialize_properties())
payload = {
API_CHANGE: {"cause": {"type": Cause.APP_INTERACTION}, "properties": properties}
}
message = AlexaResponse(name="ChangeReport", namespace="Alexa", payload=payload)
message.set_endpoint_full(token, endpoint)
message_serialized = message.serialize()
session = hass.helpers.aiohttp_client.async_get_clientsession()
try:
with async_timeout.timeout(DEFAULT_TIMEOUT):
response = await session.post(
config.endpoint,
headers=headers,
json=message_serialized,
allow_redirects=True,
)
except (asyncio.TimeoutError, aiohttp.ClientError):
_LOGGER.error("Timeout sending report to Alexa")
return
response_text = await response.text()
_LOGGER.debug("Sent: %s", json.dumps(message_serialized))
_LOGGER.debug("Received (%s): %s", response.status, response_text)
if response.status == HTTP_ACCEPTED:
return
response_json = json.loads(response_text)
if (
response_json["payload"]["code"] == "INVALID_ACCESS_TOKEN_EXCEPTION"
and not invalidate_access_token
):
config.async_invalidate_access_token()
return await async_send_changereport_message(
hass, config, alexa_entity, invalidate_access_token=False
)
_LOGGER.error(
"Error when sending ChangeReport to Alexa: %s: %s",
response_json["payload"]["code"],
response_json["payload"]["description"],
)
async def async_send_add_or_update_message(hass, config, entity_ids):
"""Send an AddOrUpdateReport message for entities.
https://developer.amazon.com/docs/device-apis/alexa-discovery.html#add-or-update-report
"""
token = await config.async_get_access_token()
headers = {"Authorization": f"Bearer {token}"}
endpoints = []
for entity_id in entity_ids:
domain = entity_id.split(".", 1)[0]
if domain not in ENTITY_ADAPTERS:
continue
alexa_entity = ENTITY_ADAPTERS[domain](hass, config, hass.states.get(entity_id))
endpoints.append(alexa_entity.serialize_discovery())
payload = {"endpoints": endpoints, "scope": {"type": "BearerToken", "token": token}}
message = AlexaResponse(
name="AddOrUpdateReport", namespace="Alexa.Discovery", payload=payload
)
message_serialized = message.serialize()
session = hass.helpers.aiohttp_client.async_get_clientsession()
return await session.post(
config.endpoint, headers=headers, json=message_serialized, allow_redirects=True
)
async def async_send_delete_message(hass, config, entity_ids):
"""Send an DeleteReport message for entities.
https://developer.amazon.com/docs/device-apis/alexa-discovery.html#deletereport-event
"""
token = await config.async_get_access_token()
headers = {"Authorization": f"Bearer {token}"}
endpoints = []
for entity_id in entity_ids:
domain = entity_id.split(".", 1)[0]
if domain not in ENTITY_ADAPTERS:
continue
endpoints.append({"endpointId": generate_alexa_id(entity_id)})
payload = {"endpoints": endpoints, "scope": {"type": "BearerToken", "token": token}}
message = AlexaResponse(
name="DeleteReport", namespace="Alexa.Discovery", payload=payload
)
message_serialized = message.serialize()
session = hass.helpers.aiohttp_client.async_get_clientsession()
return await session.post(
config.endpoint, headers=headers, json=message_serialized, allow_redirects=True
)
async def async_send_doorbell_event_message(hass, config, alexa_entity):
"""Send a DoorbellPress event message for an Alexa entity.
https://developer.amazon.com/docs/smarthome/send-events-to-the-alexa-event-gateway.html
"""
token = await config.async_get_access_token()
headers = {"Authorization": f"Bearer {token}"}
endpoint = alexa_entity.alexa_id()
message = AlexaResponse(
name="DoorbellPress",
namespace="Alexa.DoorbellEventSource",
payload={
"cause": {"type": Cause.PHYSICAL_INTERACTION},
"timestamp": f"{dt_util.utcnow().replace(tzinfo=None).isoformat()}Z",
},
)
message.set_endpoint_full(token, endpoint)
message_serialized = message.serialize()
session = hass.helpers.aiohttp_client.async_get_clientsession()
try:
with async_timeout.timeout(DEFAULT_TIMEOUT):
response = await session.post(
config.endpoint,
headers=headers,
json=message_serialized,
allow_redirects=True,
)
except (asyncio.TimeoutError, aiohttp.ClientError):
_LOGGER.error("Timeout sending report to Alexa")
return
response_text = await response.text()
_LOGGER.debug("Sent: %s", json.dumps(message_serialized))
_LOGGER.debug("Received (%s): %s", response.status, response_text)
if response.status == HTTP_ACCEPTED:
return
response_json = json.loads(response_text)
_LOGGER.error(
"Error when sending DoorbellPress event to Alexa: %s: %s",
response_json["payload"]["code"],
response_json["payload"]["description"],
)
|
import logging
from haffmpeg.camera import CameraMjpeg
import voluptuous as vol
from homeassistant.components.camera import PLATFORM_SCHEMA, Camera
from homeassistant.components.ffmpeg import DATA_FFMPEG
from homeassistant.const import ATTR_BATTERY_LEVEL
from homeassistant.helpers.aiohttp_client import async_aiohttp_proxy_stream
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from . import DATA_ARLO, DEFAULT_BRAND, SIGNAL_UPDATE_ARLO
_LOGGER = logging.getLogger(__name__)
ARLO_MODE_ARMED = "armed"
ARLO_MODE_DISARMED = "disarmed"
ATTR_BRIGHTNESS = "brightness"
ATTR_FLIPPED = "flipped"
ATTR_MIRRORED = "mirrored"
ATTR_MOTION = "motion_detection_sensitivity"
ATTR_POWERSAVE = "power_save_mode"
ATTR_SIGNAL_STRENGTH = "signal_strength"
ATTR_UNSEEN_VIDEOS = "unseen_videos"
ATTR_LAST_REFRESH = "last_refresh"
CONF_FFMPEG_ARGUMENTS = "ffmpeg_arguments"
DEFAULT_ARGUMENTS = "-pred 1"
POWERSAVE_MODE_MAPPING = {1: "best_battery_life", 2: "optimized", 3: "best_video"}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Optional(CONF_FFMPEG_ARGUMENTS, default=DEFAULT_ARGUMENTS): cv.string}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up an Arlo IP Camera."""
arlo = hass.data[DATA_ARLO]
cameras = []
for camera in arlo.cameras:
cameras.append(ArloCam(hass, camera, config))
add_entities(cameras)
class ArloCam(Camera):
"""An implementation of a Netgear Arlo IP camera."""
def __init__(self, hass, camera, device_info):
"""Initialize an Arlo camera."""
super().__init__()
self._camera = camera
self._name = self._camera.name
self._motion_status = False
self._ffmpeg = hass.data[DATA_FFMPEG]
self._ffmpeg_arguments = device_info.get(CONF_FFMPEG_ARGUMENTS)
self._last_refresh = None
self.attrs = {}
def camera_image(self):
"""Return a still image response from the camera."""
return self._camera.last_image_from_cache
async def async_added_to_hass(self):
"""Register callbacks."""
self.async_on_remove(
async_dispatcher_connect(
self.hass, SIGNAL_UPDATE_ARLO, self.async_write_ha_state
)
)
async def handle_async_mjpeg_stream(self, request):
"""Generate an HTTP MJPEG stream from the camera."""
video = await self.hass.async_add_executor_job(
getattr, self._camera, "last_video"
)
if not video:
error_msg = (
f"Video not found for {self.name}. "
f"Is it older than {self._camera.min_days_vdo_cache} days?"
)
_LOGGER.error(error_msg)
return
stream = CameraMjpeg(self._ffmpeg.binary, loop=self.hass.loop)
await stream.open_camera(video.video_url, extra_cmd=self._ffmpeg_arguments)
try:
stream_reader = await stream.get_reader()
return await async_aiohttp_proxy_stream(
self.hass,
request,
stream_reader,
self._ffmpeg.ffmpeg_stream_content_type,
)
finally:
await stream.close()
@property
def name(self):
"""Return the name of this camera."""
return self._name
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {
name: value
for name, value in (
(ATTR_BATTERY_LEVEL, self._camera.battery_level),
(ATTR_BRIGHTNESS, self._camera.brightness),
(ATTR_FLIPPED, self._camera.flip_state),
(ATTR_MIRRORED, self._camera.mirror_state),
(ATTR_MOTION, self._camera.motion_detection_sensitivity),
(
ATTR_POWERSAVE,
POWERSAVE_MODE_MAPPING.get(self._camera.powersave_mode),
),
(ATTR_SIGNAL_STRENGTH, self._camera.signal_strength),
(ATTR_UNSEEN_VIDEOS, self._camera.unseen_videos),
)
if value is not None
}
@property
def model(self):
"""Return the camera model."""
return self._camera.model_id
@property
def brand(self):
"""Return the camera brand."""
return DEFAULT_BRAND
@property
def motion_detection_enabled(self):
"""Return the camera motion detection status."""
return self._motion_status
def set_base_station_mode(self, mode):
"""Set the mode in the base station."""
# Get the list of base stations identified by library
base_stations = self.hass.data[DATA_ARLO].base_stations
# Some Arlo cameras does not have base station
# So check if there is base station detected first
# if yes, then choose the primary base station
# Set the mode on the chosen base station
if base_stations:
primary_base_station = base_stations[0]
primary_base_station.mode = mode
def enable_motion_detection(self):
"""Enable the Motion detection in base station (Arm)."""
self._motion_status = True
self.set_base_station_mode(ARLO_MODE_ARMED)
def disable_motion_detection(self):
"""Disable the motion detection in base station (Disarm)."""
self._motion_status = False
self.set_base_station_mode(ARLO_MODE_DISARMED)
|
from __future__ import print_function
import argparse
import os
import sys
def main(args):
p = argparse.ArgumentParser(description=__doc__)
p.add_argument("dir", action="store", nargs="?", default=os.environ["HOME2"], help="the new working directory")
ns = p.parse_args(args)
status = 0
try:
if os.path.exists(ns.dir):
if os.path.isdir(ns.dir):
# chdir does not raise exception until listdir is called, so check for access here
if os.access(ns.dir, os.R_OK):
os.chdir(ns.dir)
else:
print('cd: {} access denied'.format(ns.dir))
else:
print('cd: %s: Not a directory' % ns.dir)
else:
print('cd: %s: No such file or directory' % ns.dir)
except Exception as err:
print("cd: {}: {!s}".format(type(err).__name__, err), file=sys.stderr)
status = 1
sys.exit(status)
if __name__ == "__main__":
main(sys.argv[1:])
|
import numpy as np
def voc_colormap(labels):
"""Color map used in PASCAL VOC
Args:
labels (iterable of ints): Class ids.
Returns:
numpy.ndarray: Colors in RGB order. The shape is :math:`(N, 3)`,
where :math:`N` is the size of :obj:`labels`. The range of the values
is :math:`[0, 255]`.
"""
colors = []
for label in labels:
r, g, b = 0, 0, 0
i = label
for j in range(8):
if i & (1 << 0):
r |= 1 << (7 - j)
if i & (1 << 1):
g |= 1 << (7 - j)
if i & (1 << 2):
b |= 1 << (7 - j)
i >>= 3
colors.append((r, g, b))
return np.array(colors, dtype=np.float32)
|
import logging
from sense_energy import PlugInstance, SenseLink
import voluptuous as vol
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.components.switch import ATTR_CURRENT_POWER_W
from homeassistant.const import (
CONF_ENTITIES,
CONF_NAME,
CONF_UNIQUE_ID,
EVENT_HOMEASSISTANT_STARTED,
EVENT_HOMEASSISTANT_STOP,
STATE_ON,
)
from homeassistant.core import HomeAssistant
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity_registry import RegistryEntry
from homeassistant.helpers.template import Template, is_template_string
from .const import CONF_POWER, CONF_POWER_ENTITY, DOMAIN
_LOGGER = logging.getLogger(__name__)
CONFIG_ENTITY_SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_POWER): vol.Any(
vol.Coerce(float),
cv.template,
),
vol.Optional(CONF_POWER_ENTITY): cv.string,
}
)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_ENTITIES): vol.Schema(
{cv.entity_id: CONFIG_ENTITY_SCHEMA}
),
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the emulated_kasa component."""
conf = config.get(DOMAIN)
if not conf:
return True
entity_configs = conf[CONF_ENTITIES]
def devices():
"""Devices to be emulated."""
yield from get_plug_devices(hass, entity_configs)
server = SenseLink(devices)
async def stop_emulated_kasa(event):
await server.stop()
async def start_emulated_kasa(event):
await validate_configs(hass, entity_configs)
try:
await server.start()
except OSError as error:
_LOGGER.error("Failed to create UDP server at port 9999: %s", error)
else:
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_emulated_kasa)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, start_emulated_kasa)
return True
async def validate_configs(hass, entity_configs):
"""Validate that entities exist and ensure templates are ready to use."""
entity_registry = await hass.helpers.entity_registry.async_get_registry()
for entity_id, entity_config in entity_configs.items():
state = hass.states.get(entity_id)
if state is None:
_LOGGER.debug("Entity not found: %s", entity_id)
continue
entity = entity_registry.async_get(entity_id)
if entity:
entity_config[CONF_UNIQUE_ID] = get_system_unique_id(entity)
else:
entity_config[CONF_UNIQUE_ID] = entity_id
if CONF_POWER in entity_config:
power_val = entity_config[CONF_POWER]
if isinstance(power_val, str) and is_template_string(power_val):
entity_config[CONF_POWER] = Template(power_val, hass)
elif isinstance(power_val, Template):
entity_config[CONF_POWER].hass = hass
elif CONF_POWER_ENTITY in entity_config:
power_val = entity_config[CONF_POWER_ENTITY]
if hass.states.get(power_val) is None:
_LOGGER.debug("Sensor Entity not found: %s", power_val)
else:
entity_config[CONF_POWER] = power_val
elif state.domain == SENSOR_DOMAIN:
pass
elif ATTR_CURRENT_POWER_W in state.attributes:
pass
else:
_LOGGER.debug("No power value defined for: %s", entity_id)
def get_system_unique_id(entity: RegistryEntry):
"""Determine the system wide unique_id for an entity."""
return f"{entity.platform}.{entity.domain}.{entity.unique_id}"
def get_plug_devices(hass, entity_configs):
"""Produce list of plug devices from config entities."""
for entity_id, entity_config in entity_configs.items():
state = hass.states.get(entity_id)
if state is None:
continue
name = entity_config.get(CONF_NAME, state.name)
if state.state == STATE_ON or state.domain == SENSOR_DOMAIN:
if CONF_POWER in entity_config:
power_val = entity_config[CONF_POWER]
if isinstance(power_val, (float, int)):
power = float(power_val)
elif isinstance(power_val, str):
power = float(hass.states.get(power_val).state)
elif isinstance(power_val, Template):
power = float(power_val.async_render())
elif ATTR_CURRENT_POWER_W in state.attributes:
power = float(state.attributes[ATTR_CURRENT_POWER_W])
elif state.domain == SENSOR_DOMAIN:
power = float(state.state)
else:
power = 0.0
last_changed = state.last_changed.timestamp()
yield PlugInstance(
entity_config[CONF_UNIQUE_ID],
start_time=last_changed,
alias=name,
power=power,
)
|
from marshmallow import fields
from lemur.users.schemas import UserNestedOutputSchema
from lemur.authorities.schemas import AuthorityNestedOutputSchema
from lemur.common.schema import LemurInputSchema, LemurOutputSchema
from lemur.schemas import AssociatedUserSchema, AssociatedAuthoritySchema
class RoleInputSchema(LemurInputSchema):
id = fields.Integer()
name = fields.String(required=True)
username = fields.String()
password = fields.String()
description = fields.String()
authorities = fields.Nested(AssociatedAuthoritySchema, many=True)
users = fields.Nested(AssociatedUserSchema, many=True)
class RoleOutputSchema(LemurOutputSchema):
id = fields.Integer()
name = fields.String()
description = fields.String()
third_party = fields.Boolean()
authorities = fields.Nested(AuthorityNestedOutputSchema, many=True)
users = fields.Nested(UserNestedOutputSchema, many=True)
class RoleNestedOutputSchema(LemurOutputSchema):
__envelope__ = False
id = fields.Integer()
name = fields.String()
description = fields.String()
role_input_schema = RoleInputSchema()
role_output_schema = RoleOutputSchema()
roles_output_schema = RoleOutputSchema(many=True)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tensorflow as tf
from datasets import dataset_utils
slim = tf.contrib.slim
_FILE_PATTERN = 'flowers_%s_*.tfrecord'
SPLITS_TO_SIZES = {'train': 3320, 'validation': 350}
_NUM_CLASSES = 5
_ITEMS_TO_DESCRIPTIONS = {
'image': 'A color image of varying size.',
'label': 'A single integer between 0 and 4',
}
def get_split(split_name, dataset_dir, file_pattern=None, reader=None):
"""Gets a dataset tuple with instructions for reading flowers.
Args:
split_name: A train/validation split name.
dataset_dir: The base directory of the dataset sources.
file_pattern: The file pattern to use when matching the dataset sources.
It is assumed that the pattern contains a '%s' string so that the split
name can be inserted.
reader: The TensorFlow reader type.
Returns:
A `Dataset` namedtuple.
Raises:
ValueError: if `split_name` is not a valid train/validation split.
"""
if split_name not in SPLITS_TO_SIZES:
raise ValueError('split name %s was not recognized.' % split_name)
if not file_pattern:
file_pattern = _FILE_PATTERN
file_pattern = os.path.join(dataset_dir, file_pattern % split_name)
# Allowing None in the signature so that dataset_factory can use the default.
if reader is None:
reader = tf.TFRecordReader
keys_to_features = {
'image/encoded': tf.FixedLenFeature((), tf.string, default_value=''),
'image/format': tf.FixedLenFeature((), tf.string, default_value='png'),
'image/class/label': tf.FixedLenFeature(
[], tf.int64, default_value=tf.zeros([], dtype=tf.int64)),
}
items_to_handlers = {
'image': slim.tfexample_decoder.Image(),
'label': slim.tfexample_decoder.Tensor('image/class/label'),
}
decoder = slim.tfexample_decoder.TFExampleDecoder(
keys_to_features, items_to_handlers)
labels_to_names = None
if dataset_utils.has_labels(dataset_dir):
labels_to_names = dataset_utils.read_label_file(dataset_dir)
return slim.dataset.Dataset(
data_sources=file_pattern,
reader=reader,
decoder=decoder,
num_samples=SPLITS_TO_SIZES[split_name],
items_to_descriptions=_ITEMS_TO_DESCRIPTIONS,
num_classes=_NUM_CLASSES,
labels_to_names=labels_to_names)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import inspect
from absl import flags
from absl import logging
from compare_gan.tpu import tpu_random
import gin
import numpy as np
import tensorflow as tf
import tensorflow_datasets as tfds
FLAGS = flags.FLAGS
flags.DEFINE_string(
"tfds_data_dir", None,
"TFDS (TensorFlow Datasets) directory. If not set it will default to "
"'~/tensorflow_datasets'. If the directory does not contain the requested "
"dataset TFDS will download the dataset to this folder.")
flags.DEFINE_boolean(
"data_fake_dataset", False,
"If True don't load datasets from disk but create fake values.")
flags.DEFINE_integer(
"data_shuffle_buffer_size", 10000,
"Number of examples for the shuffle buffer.")
# Deprecated, only used for "replacing labels". TFDS will always use 64 threads.
flags.DEFINE_integer(
"data_reading_num_threads", 64,
"The number of threads used to read the dataset.")
class ImageDatasetV2(object):
"""Interface for Image datasets based on TFDS (TensorFlow Datasets).
This method handles both CPU/GPU and TPU data loading settings. If the flag
--data_fake_dataset is True the methods will create a small fake dataset from
in-memory NumPy arrays and not read from disk.
The pipleline of input operations is as follows:
1) Shuffle filenames (with seed).
2) Load file content from disk. Decode images.
Dataset content after this step is a dictionary.
3) Prefetch call here.
4) Filter examples (e.g. by size or label).
5) Parse example.
Dataset content after this step is a tuple of tensors (image, label).
6) train_only: Repeat dataset.
7) Transform (random cropping with seed, resizing).
8) Preprocess (adding sampled noise/labels with seed).
Dataset content after this step is a tuple (feature dictionary, label tensor).
9) train only: Shuffle examples (with seed).
10) Batch examples.
11) Prefetch examples.
Step 1-3 are done by _load_dataset() and wrap tfds.load().
Step 4-11 are done by train_input_fn() and eval_input_fn().
"""
def __init__(self,
name,
tfds_name,
resolution,
colors,
num_classes,
eval_test_samples,
seed):
logging.info("ImageDatasetV2(name=%s, tfds_name=%s, resolution=%d, "
"colors=%d, num_classes=%s, eval_test_samples=%s, seed=%s)",
name, tfds_name, resolution, colors, num_classes,
eval_test_samples, seed)
self._name = name
self._tfds_name = tfds_name
self._resolution = resolution
self._colors = colors
self._num_classes = num_classes
self._eval_test_sample = eval_test_samples
self._seed = seed
self._train_split = tfds.Split.TRAIN
self._eval_split = tfds.Split.TEST
@property
def name(self):
"""Name of the dataset."""
return self._name
@property
def num_classes(self):
return self._num_classes
@property
def eval_test_samples(self):
"""Number of examples in the "test" split of this dataset."""
if FLAGS.data_fake_dataset:
return 100
return self._eval_test_sample
@property
def image_shape(self):
"""Returns a tuple with the image shape."""
return (self._resolution, self._resolution, self._colors)
def _make_fake_dataset(self, split):
"""Returns a fake data set with the correct shapes."""
np.random.seed(self._seed)
num_samples_per_epoch = 100
num_epochs = self.eval_test_samples // 100 if split == "test" else None
images_shape = [num_samples_per_epoch] + list(self.image_shape)
images = np.random.uniform(size=images_shape).astype(np.float32)
labels = np.ones((num_samples_per_epoch,), dtype=np.int32)
ds = tf.data.Dataset.from_tensor_slices((images, labels))
return ds.repeat(num_epochs)
def _get_per_host_random_seed(self, tpu_context=None):
"""Returns the dataset seed for according to the TPUContext.
On CPU/GPU it returns the default seed. For TPUs the input_fn is executed
on every host machine (if per-host input is set, which is set by default).
We use a different (but deterministically computed) random seed on each host
to ensure each host machine sees a different stream of input data.
Args:
tpu_context: TPU execution context.
Returns:
The current seed if CPU/GPU and a host-specific seed for TPU.
"""
if self._seed is None:
logging.warning("Dataset seed not set.")
return None
if tpu_context is None:
logging.warning("No TPUContext, using unmodified dataset seed %s.",
self._seed)
return self._seed
seed = self._seed + tpu_context.current_host
logging.info("Running with %d hosts, modifying dataset seed for "
"host %d to %s.", tpu_context.num_hosts,
tpu_context.current_host, seed)
return seed
@gin.configurable("replace_labels", whitelist=["file_pattern"])
def _replace_labels(self, split, ds, file_pattern=None):
"""Replaces the labels in the dataset with labels from separate files.
This functionality is used if one wants to either replace the labels with
soft labels (i.e. softmax over the logits) or label the instances with
a new classifier.
Args:
split: Dataset split (e.g. train/test/validation).
ds: The underlying TFDS object.
file_pattern: Path to the replacement files.
Returns:
An instance of tf.data.Dataset with the updated labels.
"""
if not file_pattern:
return ds
file_pattern = file_pattern.format(split=split)
logging.warning("Using labels from %s for split %s.", file_pattern, split)
label_ds = tf.data.Dataset.list_files(file_pattern, shuffle=False)
label_ds = label_ds.interleave(
tf.data.TFRecordDataset,
cycle_length=FLAGS.data_reading_num_threads)
ds = tf.data.Dataset.zip((ds, label_ds)).map(self._replace_label)
return ds
def _replace_label(self, feature_dict, new_unparsed_label):
"""Replaces the label from the feature_dict with the new label.
Furthermore, if the feature_dict contains a key for the file_name which
identifies an instance, we double-check that the we are replacing the label
of the correct instance.
Args:
feature_dict: A serialized TFRecord containing the old label.
new_unparsed_label: A serialized TFRecord containing the new label.
Returns:
Updates the label in the label dict to the new label.
"""
label_spec = {
"file_name": tf.FixedLenFeature((), tf.string),
"label": tf.FixedLenFeature((), tf.int64),
}
parsed_label = tf.parse_single_example(new_unparsed_label, label_spec)
with tf.control_dependencies([
tf.assert_equal(parsed_label["file_name"], feature_dict["file_name"])]):
feature_dict["label"] = tf.identity(parsed_label["label"])
return feature_dict
def _parse_fn(self, features):
image = tf.cast(features["image"], tf.float32) / 255.0
return image, features["label"]
def _load_dataset(self, split):
"""Loads the underlying dataset split from disk.
Args:
split: Name of the split to load.
Returns:
Returns a `tf.data.Dataset` object with a tuple of image and label tensor.
"""
if FLAGS.data_fake_dataset:
return self._make_fake_dataset(split)
ds = tfds.load(
self._tfds_name,
split=split,
data_dir=FLAGS.tfds_data_dir,
as_dataset_kwargs={"shuffle_files": False})
ds = self._replace_labels(split, ds)
ds = ds.map(self._parse_fn)
return ds.prefetch(tf.contrib.data.AUTOTUNE)
def _train_filter_fn(self, image, label):
del image, label
return True
def _train_transform_fn(self, image, label, seed):
del seed
return image, label
def _eval_transform_fn(self, image, label, seed):
del seed
return image, label
def train_input_fn(self, params=None, preprocess_fn=None):
"""Input function for reading data.
Args:
params: Python dictionary with parameters. Must contain the key
"batch_size". TPUEstimator will set this for you!
preprocess_fn: Function to process single examples. This is allowed to
have a `seed` argument.
Returns:
`tf.data.Dataset` with preprocessed and batched examples.
"""
if params is None:
params = {}
seed = self._get_per_host_random_seed(params.get("context", None))
logging.info("train_input_fn(): params=%s seed=%s", params, seed)
ds = self._load_dataset(split=self._train_split)
ds = ds.filter(self._train_filter_fn)
ds = ds.repeat()
ds = ds.map(functools.partial(self._train_transform_fn, seed=seed))
if preprocess_fn is not None:
if "seed" in inspect.getargspec(preprocess_fn).args:
preprocess_fn = functools.partial(preprocess_fn, seed=seed)
ds = ds.map(preprocess_fn)
# Add a feature for the random offset of operations in tpu_random.py.
ds = tpu_random.add_random_offset_to_features(ds)
ds = ds.shuffle(FLAGS.data_shuffle_buffer_size, seed=seed)
if "batch_size" in params:
ds = ds.batch(params["batch_size"], drop_remainder=True)
return ds.prefetch(tf.contrib.data.AUTOTUNE)
def eval_input_fn(self, params=None, split=None):
"""Input function for reading data.
Args:
params: Python dictionary with parameters. Must contain the key
"batch_size". TPUEstimator will set this for you!
split: Name of the split to use. If None will use the default eval split
of the dataset.
Returns:
`tf.data.Dataset` with preprocessed and batched examples.
"""
if params is None:
params = {}
if split is None:
split = self._eval_split
seed = self._get_per_host_random_seed(params.get("context", None))
logging.info("eval_input_fn(): params=%s seed=%s", params, seed)
ds = self._load_dataset(split=split)
# No filter, no rpeat.
ds = ds.map(functools.partial(self._eval_transform_fn, seed=seed))
# No shuffle.
if "batch_size" in params:
ds = ds.batch(params["batch_size"], drop_remainder=True)
return ds.prefetch(tf.contrib.data.AUTOTUNE)
# For backwards compatibility ImageDataset.
def input_fn(self, params, mode=tf.estimator.ModeKeys.TRAIN,
preprocess_fn=None):
assert mode == tf.estimator.ModeKeys.TRAIN, mode
return self.train_input_fn(params=params, preprocess_fn=preprocess_fn)
# For backwards compatibility ImageDataset.
def load_dataset(self, split_name):
assert split_name == "test", split_name
return self.eval_input_fn()
class MnistDataset(ImageDatasetV2):
"""Wrapper for the MNIST dataset from TFDS."""
def __init__(self, seed):
super(MnistDataset, self).__init__(
name="mnist",
tfds_name="mnist",
resolution=28,
colors=1,
num_classes=10,
eval_test_samples=10000,
seed=seed)
class FashionMnistDataset(ImageDatasetV2):
"""Wrapper for the Fashion-MNIST dataset from TDFS."""
def __init__(self, seed):
super(FashionMnistDataset, self).__init__(
name="fashion_mnist",
tfds_name="fashion_mnist",
resolution=28,
colors=1,
num_classes=10,
eval_test_samples=10000,
seed=seed)
class Cifar10Dataset(ImageDatasetV2):
"""Wrapper for the CIFAR10 dataset from TDFS."""
def __init__(self, seed):
super(Cifar10Dataset, self).__init__(
name="cifar10",
tfds_name="cifar10",
resolution=32,
colors=3,
num_classes=10,
eval_test_samples=10000,
seed=seed)
class CelebaDataset(ImageDatasetV2):
"""Wrapper for the CelebA dataset from TFDS."""
def __init__(self, seed):
super(CelebaDataset, self).__init__(
name="celeb_a",
tfds_name="celeb_a",
resolution=64,
colors=3,
num_classes=None,
eval_test_samples=10000,
seed=seed)
def _parse_fn(self, features):
"""Returns 64x64x3 image and constant label."""
image = features["image"]
image = tf.image.resize_image_with_crop_or_pad(image, 160, 160)
# Note: possibly consider using NumPy's imresize(image, (64, 64))
image = tf.image.resize_images(image, [64, 64])
image.set_shape(self.image_shape)
image = tf.cast(image, tf.float32) / 255.0
label = tf.constant(0, dtype=tf.int32)
return image, label
class LsunBedroomDataset(ImageDatasetV2):
"""Wrapper from the LSUN Bedrooms dataset from TFDS."""
def __init__(self, seed):
super(LsunBedroomDataset, self).__init__(
name="lsun-bedroom",
tfds_name="lsun/bedroom",
resolution=128,
colors=3,
num_classes=None,
eval_test_samples=30000,
seed=seed)
# As the official LSUN validation set only contains 300 samples, which is
# insufficient for FID computation, we're splitting off some trianing
# samples. The smallest percentage selectable through TFDS is 1%, so we're
# going to use that (corresponding roughly to 30000 samples).
# If you want to use fewer eval samples, just modify eval_test_samples.
self._train_split, self._eval_split = \
tfds.Split.TRAIN.subsplit([99, 1])
def _parse_fn(self, features):
"""Returns a 128x128x3 Tensor with constant label 0."""
image = features["image"]
image = tf.image.resize_image_with_crop_or_pad(
image, target_height=128, target_width=128)
image = tf.cast(image, tf.float32) / 255.0
label = tf.constant(0, dtype=tf.int32)
return image, label
def _transform_imagnet_image(image, target_image_shape, crop_method, seed):
"""Preprocesses ImageNet images to have a target image shape.
Args:
image: 3-D tensor with a single image.
target_image_shape: List/Tuple with target image shape.
crop_method: Method for cropping the image:
One of: distorted, random, middle, none
seed: Random seed, only used for `crop_method=distorted`.
Returns:
Image tensor with shape `target_image_shape`.
"""
if crop_method == "distorted":
begin, size, _ = tf.image.sample_distorted_bounding_box(
tf.shape(image),
tf.zeros([0, 0, 4], tf.float32),
aspect_ratio_range=[1.0, 1.0],
area_range=[0.5, 1.0],
use_image_if_no_bounding_boxes=True,
seed=seed)
image = tf.slice(image, begin, size)
# Unfortunately, the above operation loses the depth-dimension. So we need
# to restore it the manual way.
image.set_shape([None, None, target_image_shape[-1]])
elif crop_method == "random":
tf.set_random_seed(seed)
shape = tf.shape(image)
h, w = shape[0], shape[1]
size = tf.minimum(h, w)
begin = [h - size, w - size] * tf.random.uniform([2], 0, 1)
begin = tf.cast(begin, tf.int32)
begin = tf.concat([begin, [0]], axis=0) # Add channel dimension.
image = tf.slice(image, begin, [size, size, 3])
elif crop_method == "middle":
shape = tf.shape(image)
h, w = shape[0], shape[1]
size = tf.minimum(h, w)
begin = tf.cast([h - size, w - size], tf.float32) / 2.0
begin = tf.cast(begin, tf.int32)
begin = tf.concat([begin, [0]], axis=0) # Add channel dimension.
image = tf.slice(image, begin, [size, size, 3])
elif crop_method != "none":
raise ValueError("Unsupported crop method: {}".format(crop_method))
image = tf.image.resize_images(
image, [target_image_shape[0], target_image_shape[1]])
image.set_shape(target_image_shape)
return image
@gin.configurable("train_imagenet_transform", whitelist=["crop_method"])
def _train_imagenet_transform(image, target_image_shape, seed,
crop_method="distorted"):
return _transform_imagnet_image(
image,
target_image_shape=target_image_shape,
crop_method=crop_method,
seed=seed)
@gin.configurable("eval_imagenet_transform", whitelist=["crop_method"])
def _eval_imagenet_transform(image, target_image_shape, seed,
crop_method="middle"):
return _transform_imagnet_image(
image,
target_image_shape=target_image_shape,
crop_method=crop_method,
seed=seed)
class ImagenetDataset(ImageDatasetV2):
"""ImageNet2012 as defined by TF Datasets."""
def __init__(self, resolution, seed, filter_unlabeled=False):
if resolution not in [64, 128, 256, 512]:
raise ValueError("Unsupported resolution: {}".format(resolution))
super(ImagenetDataset, self).__init__(
name="imagenet_{}".format(resolution),
tfds_name="imagenet2012",
resolution=resolution,
colors=3,
num_classes=1000,
eval_test_samples=50000,
seed=seed)
self._eval_split = tfds.Split.VALIDATION
self._filter_unlabeled = filter_unlabeled
def _train_filter_fn(self, image, label):
del image
if not self._filter_unlabeled:
return True
logging.warning("Filtering unlabeled examples.")
return tf.math.greater_equal(label, 0)
def _train_transform_fn(self, image, label, seed):
image = _train_imagenet_transform(
image=image, target_image_shape=self.image_shape, seed=seed)
return image, label
def _eval_transform_fn(self, image, label, seed):
image = _eval_imagenet_transform(
image=image, target_image_shape=self.image_shape, seed=seed)
return image, label
class SizeFilteredImagenetDataset(ImagenetDataset):
"""ImageNet from TFDS filtered by image size."""
def __init__(self, resolution, threshold, seed):
super(SizeFilteredImagenetDataset, self).__init__(
resolution=resolution,
seed=seed)
self._name = "imagenet_{}_hq{}".format(resolution, threshold)
self._threshold = threshold
def _train_filter_fn(self, image, label):
"""The minimum image dimension has to be larger than the threshold."""
del label
size = tf.math.reduce_min(tf.shape(image)[:2])
return tf.greater_equal(size, self._threshold)
class SingleClassImagenetDataset(ImagenetDataset):
"""ImageNet from TFDS with all instances having a constant label 0.
It can be used to simmulate the setting where no labels are provided.
"""
def __init__(self, resolution, seed):
super(SingleClassImagenetDataset, self).__init__(
resolution=resolution,
seed=seed)
self._name = "single_class_" + self._name
self._num_classes = 1
def _parse_fn(self, features):
image, _ = super(SingleClassImagenetDataset, self)._parse_fn(features)
label = tf.constant(0, dtype=tf.int32)
return image, label
class RandomClassImagenetDataset(ImagenetDataset):
"""ImageNet2012 dataset with random labels."""
def __init__(self, resolution, seed):
super(RandomClassImagenetDataset, self).__init__(
resolution=resolution,
seed=seed)
self._name = "random_class_" + self._name
self._num_classes = 1000
def _parse_fn(self, features):
image, _ = super(RandomClassImagenetDataset, self)._parse_fn(features)
label = tf.random.uniform(minval=0, maxval=1000, dtype=tf.int32)
return image, label
class SoftLabeledImagenetDataset(ImagenetDataset):
"""ImageNet2012 dataset with soft labels."""
def __init__(self, resolution, seed):
super(SoftLabeledImagenetDataset, self).__init__(
resolution=resolution,
seed=seed)
self._name = "soft_labeled_" + self._name
def _replace_label(self, feature_dict, new_unparsed_label):
"""Replaces the label from the feature_dict with the new (soft) label.
The function assumes that the new_unparsed_label contains a list of logits
which will be converted to a soft label using the softmax.
Args:
feature_dict: A serialized TFRecord containing the old label.
new_unparsed_label: A serialized TFRecord containing the new label.
Returns:
Updates the label in the label dict to the new soft label.
"""
label_spec = {
"file_name": tf.FixedLenFeature((), tf.string),
"label": tf.FixedLenFeature([self._num_classes], tf.float32)
}
parsed_label = tf.parse_single_example(new_unparsed_label, label_spec)
with tf.control_dependencies([
tf.assert_equal(parsed_label["file_name"], feature_dict["file_name"])]):
feature_dict["label"] = tf.nn.softmax(logits=parsed_label["label"])
return feature_dict
DATASETS = {
"celeb_a": CelebaDataset,
"cifar10": Cifar10Dataset,
"fashion-mnist": FashionMnistDataset,
"lsun-bedroom": LsunBedroomDataset,
"mnist": MnistDataset,
"imagenet_64": functools.partial(ImagenetDataset, resolution=64),
"imagenet_128": functools.partial(ImagenetDataset, resolution=128),
"imagenet_256": functools.partial(ImagenetDataset, resolution=256),
"imagenet_512": functools.partial(ImagenetDataset, resolution=512),
"imagenet_512_hq400": (functools.partial(
SizeFilteredImagenetDataset, resolution=512, threshold=400)),
"soft_labeled_imagenet_128": functools.partial(
SoftLabeledImagenetDataset, resolution=128),
"single_class_imagenet_128": functools.partial(
SingleClassImagenetDataset, resolution=128),
"random_class_imagenet_128": functools.partial(
RandomClassImagenetDataset, resolution=128),
"labeled_only_imagenet_128": functools.partial(
ImagenetDataset, resolution=128, filter_unlabeled=True),
}
@gin.configurable("dataset")
def get_dataset(name, seed=547):
"""Instantiates a data set and sets the random seed."""
if name not in DATASETS:
raise ValueError("Dataset %s is not available." % name)
return DATASETS[name](seed=seed)
|
import sys
from lark import Lark, tree
grammar = """
sentence: noun verb noun -> simple
| noun verb "like" noun -> comparative
noun: adj? NOUN
verb: VERB
adj: ADJ
NOUN: "flies" | "bananas" | "fruit"
VERB: "like" | "flies"
ADJ: "fruit"
%import common.WS
%ignore WS
"""
parser = Lark(grammar, start='sentence', ambiguity='explicit')
sentence = 'fruit flies like bananas'
def make_png(filename):
tree.pydot__tree_to_png( parser.parse(sentence), filename)
def make_dot(filename):
tree.pydot__tree_to_dot( parser.parse(sentence), filename)
if __name__ == '__main__':
print(parser.parse(sentence).pretty())
# make_png(sys.argv[1])
# make_dot(sys.argv[1])
# Output:
#
# _ambig
# comparative
# noun fruit
# verb flies
# noun bananas
# simple
# noun
# fruit
# flies
# verb like
# noun bananas
#
# (or view a nicer version at "./fruitflies.png")
|
import pandas as pd
from scattertext.termscoring.CorpusBasedTermScorer import CorpusBasedTermScorer
class ZScores(CorpusBasedTermScorer):
'''
Z-scores from Welch's t-test
term_scorer = (ZScores(corpus).set_categories('Positive', ['Negative'], ['Plot']))
html = st.produce_frequency_explorer(
corpus,
category='Positive',
not_categories=['Negative'],
neutral_categories=['Plot'],
term_scorer=term_scorer,
metadata=rdf['movie_name'],
grey_threshold=0,
show_neutral=True
)
file_name = 'rotten_fresh_fre.html'
open(file_name, 'wb').write(html.encode('utf-8'))
IFrame(src=file_name, width=1300, height=700)
'''
def _set_scorer_args(self, **kwargs):
pass
def get_scores(self, *args):
return pd.Series(self.get_t_statistics()[0],
index=self._get_index())
def get_name(self):
return "Z-Score from Welch's T-Test"
|
import logging
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.loader import bind_hass
_LOGGER = logging.getLogger(__name__)
DOMAIN = "panel_custom"
CONF_COMPONENT_NAME = "name"
CONF_SIDEBAR_TITLE = "sidebar_title"
CONF_SIDEBAR_ICON = "sidebar_icon"
CONF_URL_PATH = "url_path"
CONF_CONFIG = "config"
CONF_JS_URL = "js_url"
CONF_MODULE_URL = "module_url"
CONF_EMBED_IFRAME = "embed_iframe"
CONF_TRUST_EXTERNAL_SCRIPT = "trust_external_script"
CONF_URL_EXCLUSIVE_GROUP = "url_exclusive_group"
CONF_REQUIRE_ADMIN = "require_admin"
DEFAULT_EMBED_IFRAME = False
DEFAULT_TRUST_EXTERNAL = False
DEFAULT_ICON = "mdi:bookmark"
LEGACY_URL = "/api/panel_custom/{}"
PANEL_DIR = "panels"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.All(
cv.ensure_list,
[
vol.Schema(
{
vol.Required(CONF_COMPONENT_NAME): cv.string,
vol.Optional(CONF_SIDEBAR_TITLE): cv.string,
vol.Optional(CONF_SIDEBAR_ICON, default=DEFAULT_ICON): cv.icon,
vol.Optional(CONF_URL_PATH): cv.string,
vol.Optional(CONF_CONFIG): dict,
vol.Optional(
CONF_JS_URL,
): cv.string,
vol.Optional(
CONF_MODULE_URL,
): cv.string,
vol.Optional(
CONF_EMBED_IFRAME, default=DEFAULT_EMBED_IFRAME
): cv.boolean,
vol.Optional(
CONF_TRUST_EXTERNAL_SCRIPT,
default=DEFAULT_TRUST_EXTERNAL,
): cv.boolean,
vol.Optional(CONF_REQUIRE_ADMIN, default=False): cv.boolean,
}
),
],
)
},
extra=vol.ALLOW_EXTRA,
)
@bind_hass
async def async_register_panel(
hass,
# The url to serve the panel
frontend_url_path,
# The webcomponent name that loads your panel
webcomponent_name,
# Title/icon for sidebar
sidebar_title=None,
sidebar_icon=None,
# JS source of your panel
js_url=None,
# JS module of your panel
module_url=None,
# If your panel should be run inside an iframe
embed_iframe=DEFAULT_EMBED_IFRAME,
# Should user be asked for confirmation when loading external source
trust_external=DEFAULT_TRUST_EXTERNAL,
# Configuration to be passed to the panel
config=None,
# If your panel should only be shown to admin users
require_admin=False,
):
"""Register a new custom panel."""
if js_url is None and module_url is None:
raise ValueError("Either js_url, module_url or html_url is required.")
if config is not None and not isinstance(config, dict):
raise ValueError("Config needs to be a dictionary.")
custom_panel_config = {
"name": webcomponent_name,
"embed_iframe": embed_iframe,
"trust_external": trust_external,
}
if js_url is not None:
custom_panel_config["js_url"] = js_url
if module_url is not None:
custom_panel_config["module_url"] = module_url
if config is not None:
# Make copy because we're mutating it
config = dict(config)
else:
config = {}
config["_panel_custom"] = custom_panel_config
hass.components.frontend.async_register_built_in_panel(
component_name="custom",
sidebar_title=sidebar_title,
sidebar_icon=sidebar_icon,
frontend_url_path=frontend_url_path,
config=config,
require_admin=require_admin,
)
async def async_setup(hass, config):
"""Initialize custom panel."""
if DOMAIN not in config:
return True
for panel in config[DOMAIN]:
name = panel[CONF_COMPONENT_NAME]
kwargs = {
"webcomponent_name": panel[CONF_COMPONENT_NAME],
"frontend_url_path": panel.get(CONF_URL_PATH, name),
"sidebar_title": panel.get(CONF_SIDEBAR_TITLE),
"sidebar_icon": panel.get(CONF_SIDEBAR_ICON),
"config": panel.get(CONF_CONFIG),
"trust_external": panel[CONF_TRUST_EXTERNAL_SCRIPT],
"embed_iframe": panel[CONF_EMBED_IFRAME],
"require_admin": panel[CONF_REQUIRE_ADMIN],
}
if CONF_JS_URL in panel:
kwargs["js_url"] = panel[CONF_JS_URL]
if CONF_MODULE_URL in panel:
kwargs["module_url"] = panel[CONF_MODULE_URL]
try:
await async_register_panel(hass, **kwargs)
except ValueError as err:
_LOGGER.error(
"Unable to register panel %s: %s",
panel.get(CONF_SIDEBAR_TITLE, name),
err,
)
return True
|
import os
import sys
import numpy as np
from mayavi.tools.mlab_scene_model import MlabSceneModel
from pyface.api import confirm, error, FileDialog, OK, YES
from traits.api import (HasTraits, HasPrivateTraits, on_trait_change,
cached_property, Instance, Property, Array, Bool,
Button, Enum, File, Float, List, Str, ArrayOrNone)
from traitsui.api import View, Item, HGroup, VGroup, CheckListEditor
from traitsui.menu import Action, CancelButton
from ..transforms import apply_trans, rotation, translation
from ..coreg import fit_matched_points
from ..io.kit import read_mrk
from ..io._digitization import _write_dig_points
from ._viewer import PointObject
from ._backend import _get_pyface_backend
if _get_pyface_backend() == 'wx':
mrk_wildcard = [
'Supported Files (*.sqd, *.mrk, *.txt, *.pickled)|*.sqd;*.mrk;*.txt;*.pickled', # noqa:E501
'Sqd marker file (*.sqd;*.mrk)|*.sqd;*.mrk',
'Text marker file (*.txt)|*.txt',
'Pickled markers (*.pickled)|*.pickled']
mrk_out_wildcard = ["Tab separated values file (*.txt)|*.txt"]
else:
if sys.platform in ('win32', 'linux2'):
# on Windows and Ubuntu, multiple wildcards does not seem to work
mrk_wildcard = ["*.sqd", "*.mrk", "*.txt", "*.pickled"]
else:
mrk_wildcard = ["*.sqd;*.mrk;*.txt;*.pickled"]
mrk_out_wildcard = "*.txt"
out_ext = '.txt'
use_editor_v = CheckListEditor(cols=1, values=[(i, str(i)) for i in range(5)])
use_editor_h = CheckListEditor(cols=5, values=[(i, str(i)) for i in range(5)])
mrk_view_editable = View(
VGroup('file',
Item('name', show_label=False, style='readonly'),
HGroup(
Item('use', editor=use_editor_v, enabled_when="enabled",
style='custom'),
'points',
),
HGroup(Item('clear', enabled_when="can_save", show_label=False),
Item('save_as', enabled_when="can_save",
show_label=False)),
))
mrk_view_basic = View(
VGroup('file',
Item('name', show_label=False, style='readonly'),
Item('use', editor=use_editor_h, enabled_when="enabled",
style='custom'),
HGroup(Item('clear', enabled_when="can_save", show_label=False),
Item('edit', show_label=False),
Item('switch_left_right', label="Switch Left/Right",
show_label=False),
Item('reorder', show_label=False),
Item('save_as', enabled_when="can_save",
show_label=False)),
))
mrk_view_edit = View(VGroup('points'))
class ReorderDialog(HasPrivateTraits):
"""Dialog for reordering marker points."""
order = Str("0 1 2 3 4")
index = Property(List, depends_on='order')
is_ok = Property(Bool, depends_on='order')
view = View(
Item('order', label='New order (five space delimited numbers)'),
buttons=[CancelButton, Action(name='OK', enabled_when='is_ok')])
def _get_index(self):
try:
return [int(i) for i in self.order.split()]
except ValueError:
return []
def _get_is_ok(self):
return sorted(self.index) == [0, 1, 2, 3, 4]
class MarkerPoints(HasPrivateTraits):
"""Represent 5 marker points."""
points = Array(float, (5, 3))
can_save = Property(depends_on='points')
save_as = Button()
view = View(VGroup('points',
Item('save_as', enabled_when='can_save')))
@cached_property
def _get_can_save(self):
return np.any(self.points)
def _save_as_fired(self):
dlg = FileDialog(action="save as", wildcard=mrk_out_wildcard,
default_filename=self.name,
default_directory=self.dir)
dlg.open()
if dlg.return_code != OK:
return
path, ext = os.path.splitext(dlg.path)
if not path.endswith(out_ext) and len(ext) != 0:
ValueError("The extension '%s' is not supported." % ext)
path = path + out_ext
if os.path.exists(path):
answer = confirm(None, "The file %r already exists. Should it "
"be replaced?", "Overwrite File?")
if answer != YES:
return
self.save(path)
def save(self, path):
"""Save the marker points.
Parameters
----------
path : str
Path to the file to write. The kind of file to write is determined
based on the extension: '.txt' for tab separated text file,
'.pickled' for pickled file.
"""
_write_dig_points(path, self.points)
class MarkerPointSource(MarkerPoints): # noqa: D401
"""MarkerPoints subclass for source files."""
file = File(filter=mrk_wildcard, exists=True)
name = Property(Str, depends_on='file')
dir = Property(Str, depends_on='file')
use = List(list(range(5)), desc="Which points to use for the interpolated "
"marker.")
enabled = Property(Bool, depends_on=['points', 'use'])
clear = Button(desc="Clear the current marker data")
edit = Button(desc="Edit the marker coordinates manually")
switch_left_right = Button(
desc="Switch left and right marker points; this is intended to "
"correct for markers that were attached in the wrong order")
reorder = Button(desc="Change the order of the marker points")
view = mrk_view_basic
@cached_property
def _get_enabled(self):
return np.any(self.points)
@cached_property
def _get_dir(self):
if self.file:
return os.path.dirname(self.file)
@cached_property
def _get_name(self):
if self.file:
return os.path.basename(self.file)
@on_trait_change('file')
def load(self, fname):
if not fname:
self.reset_traits(['points'])
return
try:
pts = read_mrk(fname)
except Exception as err:
error(None, str(err), "Error Reading mrk")
self.reset_traits(['points'])
else:
self.points = pts
def _clear_fired(self):
self.reset_traits(['file', 'points', 'use'])
def _edit_fired(self):
self.edit_traits(view=mrk_view_edit)
def _reorder_fired(self):
dlg = ReorderDialog()
ui = dlg.edit_traits(kind='modal')
if not ui.result: # user pressed cancel
return
self.points = self.points[dlg.index]
def _switch_left_right_fired(self):
self.points = self.points[[1, 0, 2, 4, 3]]
class MarkerPointDest(MarkerPoints): # noqa: D401
"""MarkerPoints subclass that serves for derived points."""
src1 = Instance(MarkerPointSource)
src2 = Instance(MarkerPointSource)
name = Property(Str, depends_on='src1.name,src2.name')
dir = Property(Str, depends_on='src1.dir,src2.dir')
points = Property(ArrayOrNone(float, (5, 3)),
depends_on=['method', 'src1.points', 'src1.use',
'src2.points', 'src2.use'])
enabled = Property(Bool, depends_on=['points'])
method = Enum('Transform', 'Average', desc="Transform: estimate a rotation"
"/translation from mrk1 to mrk2; Average: use the average "
"of the mrk1 and mrk2 coordinates for each point.")
view = View(VGroup(Item('method', style='custom'),
Item('save_as', enabled_when='can_save',
show_label=False)))
@cached_property
def _get_dir(self):
return self.src1.dir
@cached_property
def _get_name(self):
n1 = self.src1.name
n2 = self.src2.name
if not n1:
if n2:
return n2
else:
return ''
elif not n2:
return n1
if n1 == n2:
return n1
i = 0
l1 = len(n1) - 1
l2 = len(n1) - 2
while n1[i] == n2[i]:
if i == l1:
return n1
elif i == l2:
return n2
i += 1
return n1[:i]
@cached_property
def _get_enabled(self):
return np.any(self.points)
@cached_property
def _get_points(self):
# in case only one or no source is enabled
if not (self.src1 and self.src1.enabled):
if (self.src2 and self.src2.enabled):
return self.src2.points
else:
return np.zeros((5, 3))
elif not (self.src2 and self.src2.enabled):
return self.src1.points
# Average method
if self.method == 'Average':
if len(np.union1d(self.src1.use, self.src2.use)) < 5:
error(None, "Need at least one source for each point.",
"Marker Average Error")
return np.zeros((5, 3))
pts = (self.src1.points + self.src2.points) / 2.
for i in np.setdiff1d(self.src1.use, self.src2.use):
pts[i] = self.src1.points[i]
for i in np.setdiff1d(self.src2.use, self.src1.use):
pts[i] = self.src2.points[i]
return pts
# Transform method
idx = np.intersect1d(np.array(self.src1.use),
np.array(self.src2.use), assume_unique=True)
if len(idx) < 3:
error(None, "Need at least three shared points for trans"
"formation.", "Marker Interpolation Error")
return np.zeros((5, 3))
src_pts = self.src1.points[idx]
tgt_pts = self.src2.points[idx]
est = fit_matched_points(src_pts, tgt_pts, out='params')
rot = np.array(est[:3]) / 2.
tra = np.array(est[3:]) / 2.
if len(self.src1.use) == 5:
trans = np.dot(translation(*tra), rotation(*rot))
pts = apply_trans(trans, self.src1.points)
elif len(self.src2.use) == 5:
trans = np.dot(translation(* -tra), rotation(* -rot))
pts = apply_trans(trans, self.src2.points)
else:
trans1 = np.dot(translation(*tra), rotation(*rot))
pts = apply_trans(trans1, self.src1.points)
trans2 = np.dot(translation(* -tra), rotation(* -rot))
for i in np.setdiff1d(self.src2.use, self.src1.use):
pts[i] = apply_trans(trans2, self.src2.points[i])
return pts
class CombineMarkersModel(HasPrivateTraits):
"""Combine markers model."""
mrk1_file = Instance(File)
mrk2_file = Instance(File)
mrk1 = Instance(MarkerPointSource)
mrk2 = Instance(MarkerPointSource)
mrk3 = Instance(MarkerPointDest)
clear = Button(desc="Clear the current marker data")
# stats
distance = Property(Str, depends_on=['mrk1.points', 'mrk2.points'])
def _clear_fired(self):
self.mrk1.clear = True
self.mrk2.clear = True
self.mrk3.reset_traits(['method'])
def _mrk1_default(self):
return MarkerPointSource()
def _mrk1_file_default(self):
return self.mrk1.trait('file')
def _mrk2_default(self):
return MarkerPointSource()
def _mrk2_file_default(self):
return self.mrk2.trait('file')
def _mrk3_default(self):
return MarkerPointDest(src1=self.mrk1, src2=self.mrk2)
@cached_property
def _get_distance(self):
if (self.mrk1 is None or self.mrk2 is None or
(not np.any(self.mrk1.points)) or
(not np.any(self.mrk2.points))):
return ""
ds = np.sqrt(np.sum((self.mrk1.points - self.mrk2.points) ** 2, 1))
desc = '\t'.join('%.1f mm' % (d * 1000) for d in ds)
return desc
class CombineMarkersPanel(HasTraits): # noqa: D401
"""Has two marker points sources and interpolates to a third one."""
model = Instance(CombineMarkersModel, ())
# model references for UI
mrk1 = Instance(MarkerPointSource)
mrk2 = Instance(MarkerPointSource)
mrk3 = Instance(MarkerPointDest)
distance = Str
# Visualization
scene = Instance(MlabSceneModel)
scale = Float(5e-3)
mrk1_obj = Instance(PointObject)
mrk2_obj = Instance(PointObject)
mrk3_obj = Instance(PointObject)
trans = Array()
view = View(VGroup(VGroup(Item('mrk1', style='custom'),
Item('mrk1_obj', style='custom'),
show_labels=False,
label="Source Marker 1", show_border=True),
VGroup(Item('mrk2', style='custom'),
Item('mrk2_obj', style='custom'),
show_labels=False,
label="Source Marker 2", show_border=True),
VGroup(Item('distance', style='readonly'),
label='Stats', show_border=True),
VGroup(Item('mrk3', style='custom'),
Item('mrk3_obj', style='custom'),
show_labels=False,
label="New Marker", show_border=True),
))
def _mrk1_default(self):
return self.model.mrk1
def _mrk2_default(self):
return self.model.mrk2
def _mrk3_default(self):
return self.model.mrk3
def __init__(self, *args, **kwargs): # noqa: D102
super(CombineMarkersPanel, self).__init__(*args, **kwargs)
self.model.sync_trait('distance', self, 'distance', mutual=False)
self.mrk1_obj = PointObject(scene=self.scene,
color=(0.608, 0.216, 0.216),
point_scale=self.scale)
self.model.mrk1.sync_trait(
'enabled', self.mrk1_obj, 'visible', mutual=False)
self.mrk2_obj = PointObject(scene=self.scene,
color=(0.216, 0.608, 0.216),
point_scale=self.scale)
self.model.mrk2.sync_trait(
'enabled', self.mrk2_obj, 'visible', mutual=False)
self.mrk3_obj = PointObject(scene=self.scene,
color=(0.588, 0.784, 1.),
point_scale=self.scale)
self.model.mrk3.sync_trait(
'enabled', self.mrk3_obj, 'visible', mutual=False)
@on_trait_change('model:mrk1:points,trans')
def _update_mrk1(self):
if self.mrk1_obj is not None:
self.mrk1_obj.points = apply_trans(self.trans,
self.model.mrk1.points)
@on_trait_change('model:mrk2:points,trans')
def _update_mrk2(self):
if self.mrk2_obj is not None:
self.mrk2_obj.points = apply_trans(self.trans,
self.model.mrk2.points)
@on_trait_change('model:mrk3:points,trans')
def _update_mrk3(self):
if self.mrk3_obj is not None:
self.mrk3_obj.points = apply_trans(self.trans,
self.model.mrk3.points)
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.