text
stringlengths 213
32.3k
|
---|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import csv
import itertools
import numpy as np
from six.moves import zip
def ParseCsvFile(fp):
"""Parse dstat results file in csv format.
Args:
file: string. Name of the file.
Returns:
A tuple of list of dstat labels and ndarray containing parsed data.
"""
reader = csv.reader(fp)
headers = list(itertools.islice(reader, 5))
if len(headers) != 5:
raise ValueError(
'Expected exactly 5 header lines got {}\n{}'.format(
len(headers), headers))
if 'Dstat' not in headers[0][0]:
raise ValueError(
'Expected first header cell to contain "Dstat"\n{}'.format(
headers[0]))
if 'Host:' not in headers[2][0]:
raise ValueError(('Expected first cell in third line to be '
'"Host:"\n{}').format(headers[2]))
categories = next(reader)
# Categories are not repeated; copy category name across columns in the
# same category
for i, category in enumerate(categories):
if not categories[i]:
categories[i] = categories[i - 1]
labels = next(reader)
if len(labels) != len(categories):
raise ValueError((
'Number of categories ({}) does not match number of '
'labels ({})\nCategories: {}\nLabels:{}').format(
len(categories), len(labels), categories, labels))
# Generate new column names
labels = ['%s__%s' % x for x in zip(labels, categories)]
data = []
for i, row in enumerate(reader):
# Remove the trailing comma
if len(row) == len(labels) + 1:
if row[-1]:
raise ValueError(('Expected the last element of row {0} to be empty,'
' found {1}').format(row, row[-1]))
row = row[:-1]
if len(labels) != len(row):
raise ValueError(('Number of labels ({}) does not match number of '
'columns ({}) in row {}:\n{}').format(
len(labels), len(row), i, row))
data.append(row)
return labels, np.array(data, dtype=float)
def _Install(vm):
"""Installs the dstat package on the VM."""
vm.InstallPackages('dstat')
def YumInstall(vm):
"""Installs the dstat package on the VM."""
_Install(vm)
def AptInstall(vm):
"""Installs the dstat package on the VM."""
_Install(vm)
|
import asyncio
import logging
import time
import pytest
from homeassistant import config_entries, data_entry_flow, setup
from homeassistant.config import async_process_ha_core_config
from homeassistant.helpers import config_entry_oauth2_flow
from homeassistant.helpers.network import NoURLAvailableError
from tests.async_mock import patch
from tests.common import MockConfigEntry, mock_platform
TEST_DOMAIN = "oauth2_test"
CLIENT_SECRET = "5678"
CLIENT_ID = "1234"
REFRESH_TOKEN = "mock-refresh-token"
ACCESS_TOKEN_1 = "mock-access-token-1"
ACCESS_TOKEN_2 = "mock-access-token-2"
AUTHORIZE_URL = "https://example.como/auth/authorize"
TOKEN_URL = "https://example.como/auth/token"
@pytest.fixture
async def local_impl(hass):
"""Local implementation."""
assert await setup.async_setup_component(hass, "http", {})
return config_entry_oauth2_flow.LocalOAuth2Implementation(
hass, TEST_DOMAIN, CLIENT_ID, CLIENT_SECRET, AUTHORIZE_URL, TOKEN_URL
)
@pytest.fixture
def flow_handler(hass):
"""Return a registered config flow."""
mock_platform(hass, f"{TEST_DOMAIN}.config_flow")
class TestFlowHandler(config_entry_oauth2_flow.AbstractOAuth2FlowHandler):
"""Test flow handler."""
DOMAIN = TEST_DOMAIN
@property
def logger(self) -> logging.Logger:
"""Return logger."""
return logging.getLogger(__name__)
@property
def extra_authorize_data(self) -> dict:
"""Extra data that needs to be appended to the authorize url."""
return {"scope": "read write"}
with patch.dict(config_entries.HANDLERS, {TEST_DOMAIN: TestFlowHandler}):
yield TestFlowHandler
class MockOAuth2Implementation(config_entry_oauth2_flow.AbstractOAuth2Implementation):
"""Mock implementation for testing."""
@property
def name(self) -> str:
"""Name of the implementation."""
return "Mock"
@property
def domain(self) -> str:
"""Domain that is providing the implementation."""
return "test"
@property
def extra_authorize_data(self) -> dict:
"""Extra data that needs to be appended to the authorize url."""
return {"extra": "data"}
async def async_generate_authorize_url(self, flow_id: str) -> str:
"""Generate a url for the user to authorize."""
return "http://example.com/auth"
async def async_resolve_external_data(self, external_data) -> dict:
"""Resolve external data to tokens."""
return external_data
async def _async_refresh_token(self, token: dict) -> dict:
"""Refresh a token."""
raise NotImplementedError()
def test_inherit_enforces_domain_set():
"""Test we enforce setting DOMAIN."""
class TestFlowHandler(config_entry_oauth2_flow.AbstractOAuth2FlowHandler):
"""Test flow handler."""
@property
def logger(self) -> logging.Logger:
"""Return logger."""
return logging.getLogger(__name__)
with patch.dict(config_entries.HANDLERS, {TEST_DOMAIN: TestFlowHandler}):
with pytest.raises(TypeError):
TestFlowHandler()
async def test_abort_if_no_implementation(hass, flow_handler):
"""Check flow abort when no implementations."""
flow = flow_handler()
flow.hass = hass
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "missing_configuration"
async def test_abort_if_authorization_timeout(hass, flow_handler, local_impl):
"""Check timeout generating authorization url."""
flow_handler.async_register_implementation(hass, local_impl)
flow = flow_handler()
flow.hass = hass
with patch.object(
local_impl, "async_generate_authorize_url", side_effect=asyncio.TimeoutError
):
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "authorize_url_timeout"
async def test_abort_if_no_url_available(hass, flow_handler, local_impl):
"""Check no_url_available generating authorization url."""
flow_handler.async_register_implementation(hass, local_impl)
flow = flow_handler()
flow.hass = hass
with patch.object(
local_impl, "async_generate_authorize_url", side_effect=NoURLAvailableError
):
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "no_url_available"
async def test_abort_if_oauth_error(
hass, flow_handler, local_impl, aiohttp_client, aioclient_mock, current_request
):
"""Check bad oauth token."""
await async_process_ha_core_config(
hass,
{"external_url": "https://example.com"},
)
flow_handler.async_register_implementation(hass, local_impl)
config_entry_oauth2_flow.async_register_implementation(
hass, TEST_DOMAIN, MockOAuth2Implementation()
)
result = await hass.config_entries.flow.async_init(
TEST_DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "pick_implementation"
# Pick implementation
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={"implementation": TEST_DOMAIN}
)
state = config_entry_oauth2_flow._encode_jwt(hass, {"flow_id": result["flow_id"]})
assert result["type"] == data_entry_flow.RESULT_TYPE_EXTERNAL_STEP
assert result["url"] == (
f"{AUTHORIZE_URL}?response_type=code&client_id={CLIENT_ID}"
"&redirect_uri=https://example.com/auth/external/callback"
f"&state={state}&scope=read+write"
)
client = await aiohttp_client(hass.http.app)
resp = await client.get(f"/auth/external/callback?code=abcd&state={state}")
assert resp.status == 200
assert resp.headers["content-type"] == "text/html; charset=utf-8"
aioclient_mock.post(
TOKEN_URL,
json={
"refresh_token": REFRESH_TOKEN,
"access_token": ACCESS_TOKEN_1,
"type": "bearer",
"expires_in": "badnumber",
},
)
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "oauth_error"
async def test_step_discovery(hass, flow_handler, local_impl):
"""Check flow triggers from discovery."""
await async_process_ha_core_config(
hass,
{"external_url": "https://example.com"},
)
flow_handler.async_register_implementation(hass, local_impl)
config_entry_oauth2_flow.async_register_implementation(
hass, TEST_DOMAIN, MockOAuth2Implementation()
)
result = await hass.config_entries.flow.async_init(
TEST_DOMAIN, context={"source": config_entries.SOURCE_ZEROCONF}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "pick_implementation"
async def test_abort_discovered_multiple(hass, flow_handler, local_impl):
"""Test if aborts when discovered multiple times."""
await async_process_ha_core_config(
hass,
{"external_url": "https://example.com"},
)
flow_handler.async_register_implementation(hass, local_impl)
config_entry_oauth2_flow.async_register_implementation(
hass, TEST_DOMAIN, MockOAuth2Implementation()
)
result = await hass.config_entries.flow.async_init(
TEST_DOMAIN, context={"source": config_entries.SOURCE_SSDP}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "pick_implementation"
result = await hass.config_entries.flow.async_init(
TEST_DOMAIN, context={"source": config_entries.SOURCE_ZEROCONF}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_in_progress"
async def test_abort_discovered_existing_entries(hass, flow_handler, local_impl):
"""Test if abort discovery when entries exists."""
await async_process_ha_core_config(
hass,
{"external_url": "https://example.com"},
)
flow_handler.async_register_implementation(hass, local_impl)
config_entry_oauth2_flow.async_register_implementation(
hass, TEST_DOMAIN, MockOAuth2Implementation()
)
entry = MockConfigEntry(
domain=TEST_DOMAIN,
data={},
)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
TEST_DOMAIN, context={"source": config_entries.SOURCE_SSDP}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_full_flow(
hass, flow_handler, local_impl, aiohttp_client, aioclient_mock, current_request
):
"""Check full flow."""
await async_process_ha_core_config(
hass,
{"external_url": "https://example.com"},
)
flow_handler.async_register_implementation(hass, local_impl)
config_entry_oauth2_flow.async_register_implementation(
hass, TEST_DOMAIN, MockOAuth2Implementation()
)
result = await hass.config_entries.flow.async_init(
TEST_DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "pick_implementation"
# Pick implementation
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={"implementation": TEST_DOMAIN}
)
state = config_entry_oauth2_flow._encode_jwt(hass, {"flow_id": result["flow_id"]})
assert result["type"] == data_entry_flow.RESULT_TYPE_EXTERNAL_STEP
assert result["url"] == (
f"{AUTHORIZE_URL}?response_type=code&client_id={CLIENT_ID}"
"&redirect_uri=https://example.com/auth/external/callback"
f"&state={state}&scope=read+write"
)
client = await aiohttp_client(hass.http.app)
resp = await client.get(f"/auth/external/callback?code=abcd&state={state}")
assert resp.status == 200
assert resp.headers["content-type"] == "text/html; charset=utf-8"
aioclient_mock.post(
TOKEN_URL,
json={
"refresh_token": REFRESH_TOKEN,
"access_token": ACCESS_TOKEN_1,
"type": "bearer",
"expires_in": 60,
},
)
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["data"]["auth_implementation"] == TEST_DOMAIN
result["data"]["token"].pop("expires_at")
assert result["data"]["token"] == {
"refresh_token": REFRESH_TOKEN,
"access_token": ACCESS_TOKEN_1,
"type": "bearer",
"expires_in": 60,
}
entry = hass.config_entries.async_entries(TEST_DOMAIN)[0]
assert (
await config_entry_oauth2_flow.async_get_config_entry_implementation(
hass, entry
)
is local_impl
)
async def test_local_refresh_token(hass, local_impl, aioclient_mock):
"""Test we can refresh token."""
aioclient_mock.post(
TOKEN_URL, json={"access_token": ACCESS_TOKEN_2, "expires_in": 100}
)
new_tokens = await local_impl.async_refresh_token(
{
"refresh_token": REFRESH_TOKEN,
"access_token": ACCESS_TOKEN_1,
"type": "bearer",
"expires_in": 60,
}
)
new_tokens.pop("expires_at")
assert new_tokens == {
"refresh_token": REFRESH_TOKEN,
"access_token": ACCESS_TOKEN_2,
"type": "bearer",
"expires_in": 100,
}
assert len(aioclient_mock.mock_calls) == 1
assert aioclient_mock.mock_calls[0][2] == {
"client_id": CLIENT_ID,
"client_secret": CLIENT_SECRET,
"grant_type": "refresh_token",
"refresh_token": REFRESH_TOKEN,
}
async def test_oauth_session(hass, flow_handler, local_impl, aioclient_mock):
"""Test the OAuth2 session helper."""
flow_handler.async_register_implementation(hass, local_impl)
aioclient_mock.post(
TOKEN_URL, json={"access_token": ACCESS_TOKEN_2, "expires_in": 100}
)
aioclient_mock.post("https://example.com", status=201)
config_entry = MockConfigEntry(
domain=TEST_DOMAIN,
data={
"auth_implementation": TEST_DOMAIN,
"token": {
"refresh_token": REFRESH_TOKEN,
"access_token": ACCESS_TOKEN_1,
"expires_in": 10,
"expires_at": 0, # Forces a refresh,
"token_type": "bearer",
"random_other_data": "should_stay",
},
},
)
now = time.time()
session = config_entry_oauth2_flow.OAuth2Session(hass, config_entry, local_impl)
resp = await session.async_request("post", "https://example.com")
assert resp.status == 201
# Refresh token, make request
assert len(aioclient_mock.mock_calls) == 2
assert (
aioclient_mock.mock_calls[1][3]["authorization"] == f"Bearer {ACCESS_TOKEN_2}"
)
assert config_entry.data["token"]["refresh_token"] == REFRESH_TOKEN
assert config_entry.data["token"]["access_token"] == ACCESS_TOKEN_2
assert config_entry.data["token"]["expires_in"] == 100
assert config_entry.data["token"]["random_other_data"] == "should_stay"
assert round(config_entry.data["token"]["expires_at"] - now) == 100
async def test_oauth_session_with_clock_slightly_out_of_sync(
hass, flow_handler, local_impl, aioclient_mock
):
"""Test the OAuth2 session helper when the remote clock is slightly out of sync."""
flow_handler.async_register_implementation(hass, local_impl)
aioclient_mock.post(
TOKEN_URL, json={"access_token": ACCESS_TOKEN_2, "expires_in": 19}
)
aioclient_mock.post("https://example.com", status=201)
config_entry = MockConfigEntry(
domain=TEST_DOMAIN,
data={
"auth_implementation": TEST_DOMAIN,
"token": {
"refresh_token": REFRESH_TOKEN,
"access_token": ACCESS_TOKEN_1,
"expires_in": 19,
"expires_at": time.time() + 19, # Forces a refresh,
"token_type": "bearer",
"random_other_data": "should_stay",
},
},
)
now = time.time()
session = config_entry_oauth2_flow.OAuth2Session(hass, config_entry, local_impl)
resp = await session.async_request("post", "https://example.com")
assert resp.status == 201
# Refresh token, make request
assert len(aioclient_mock.mock_calls) == 2
assert (
aioclient_mock.mock_calls[1][3]["authorization"] == f"Bearer {ACCESS_TOKEN_2}"
)
assert config_entry.data["token"]["refresh_token"] == REFRESH_TOKEN
assert config_entry.data["token"]["access_token"] == ACCESS_TOKEN_2
assert config_entry.data["token"]["expires_in"] == 19
assert config_entry.data["token"]["random_other_data"] == "should_stay"
assert round(config_entry.data["token"]["expires_at"] - now) == 19
async def test_oauth_session_no_token_refresh_needed(
hass, flow_handler, local_impl, aioclient_mock
):
"""Test the OAuth2 session helper when no refresh is needed."""
flow_handler.async_register_implementation(hass, local_impl)
aioclient_mock.post("https://example.com", status=201)
config_entry = MockConfigEntry(
domain=TEST_DOMAIN,
data={
"auth_implementation": TEST_DOMAIN,
"token": {
"refresh_token": REFRESH_TOKEN,
"access_token": ACCESS_TOKEN_1,
"expires_in": 500,
"expires_at": time.time() + 500, # Should NOT refresh
"token_type": "bearer",
"random_other_data": "should_stay",
},
},
)
now = time.time()
session = config_entry_oauth2_flow.OAuth2Session(hass, config_entry, local_impl)
resp = await session.async_request("post", "https://example.com")
assert resp.status == 201
# make request (no refresh)
assert len(aioclient_mock.mock_calls) == 1
assert (
aioclient_mock.mock_calls[0][3]["authorization"] == f"Bearer {ACCESS_TOKEN_1}"
)
assert config_entry.data["token"]["refresh_token"] == REFRESH_TOKEN
assert config_entry.data["token"]["access_token"] == ACCESS_TOKEN_1
assert config_entry.data["token"]["expires_in"] == 500
assert config_entry.data["token"]["random_other_data"] == "should_stay"
assert round(config_entry.data["token"]["expires_at"] - now) == 500
async def test_implementation_provider(hass, local_impl):
"""Test providing an implementation provider."""
assert (
await config_entry_oauth2_flow.async_get_implementations(hass, TEST_DOMAIN)
== {}
)
mock_domain_with_impl = "some_domain"
config_entry_oauth2_flow.async_register_implementation(
hass, mock_domain_with_impl, local_impl
)
assert await config_entry_oauth2_flow.async_get_implementations(
hass, mock_domain_with_impl
) == {TEST_DOMAIN: local_impl}
provider_source = {}
async def async_provide_implementation(hass, domain):
"""Mock implementation provider."""
return provider_source.get(domain)
config_entry_oauth2_flow.async_add_implementation_provider(
hass, "cloud", async_provide_implementation
)
assert await config_entry_oauth2_flow.async_get_implementations(
hass, mock_domain_with_impl
) == {TEST_DOMAIN: local_impl}
provider_source[
mock_domain_with_impl
] = config_entry_oauth2_flow.LocalOAuth2Implementation(
hass, "cloud", CLIENT_ID, CLIENT_SECRET, AUTHORIZE_URL, TOKEN_URL
)
assert await config_entry_oauth2_flow.async_get_implementations(
hass, mock_domain_with_impl
) == {TEST_DOMAIN: local_impl, "cloud": provider_source[mock_domain_with_impl]}
|
import asyncio
from datetime import timedelta
import logging
from haffmpeg.core import HAFFmpeg
from pyhap.camera import (
VIDEO_CODEC_PARAM_LEVEL_TYPES,
VIDEO_CODEC_PARAM_PROFILE_ID_TYPES,
Camera as PyhapCamera,
)
from pyhap.const import CATEGORY_CAMERA
from homeassistant.components.ffmpeg import DATA_FFMPEG
from homeassistant.const import STATE_ON
from homeassistant.core import callback
from homeassistant.helpers.event import (
async_track_state_change_event,
async_track_time_interval,
)
from homeassistant.util import get_local_ip
from .accessories import TYPES, HomeAccessory
from .const import (
CHAR_MOTION_DETECTED,
CHAR_MUTE,
CHAR_PROGRAMMABLE_SWITCH_EVENT,
CONF_AUDIO_CODEC,
CONF_AUDIO_MAP,
CONF_AUDIO_PACKET_SIZE,
CONF_LINKED_DOORBELL_SENSOR,
CONF_LINKED_MOTION_SENSOR,
CONF_MAX_FPS,
CONF_MAX_HEIGHT,
CONF_MAX_WIDTH,
CONF_STREAM_ADDRESS,
CONF_STREAM_COUNT,
CONF_STREAM_SOURCE,
CONF_SUPPORT_AUDIO,
CONF_VIDEO_CODEC,
CONF_VIDEO_MAP,
CONF_VIDEO_PACKET_SIZE,
DEFAULT_AUDIO_CODEC,
DEFAULT_AUDIO_MAP,
DEFAULT_AUDIO_PACKET_SIZE,
DEFAULT_MAX_FPS,
DEFAULT_MAX_HEIGHT,
DEFAULT_MAX_WIDTH,
DEFAULT_STREAM_COUNT,
DEFAULT_SUPPORT_AUDIO,
DEFAULT_VIDEO_CODEC,
DEFAULT_VIDEO_MAP,
DEFAULT_VIDEO_PACKET_SIZE,
SERV_DOORBELL,
SERV_MOTION_SENSOR,
SERV_SPEAKER,
SERV_STATELESS_PROGRAMMABLE_SWITCH,
)
from .img_util import scale_jpeg_camera_image
from .util import pid_is_alive
_LOGGER = logging.getLogger(__name__)
DOORBELL_SINGLE_PRESS = 0
DOORBELL_DOUBLE_PRESS = 1
DOORBELL_LONG_PRESS = 2
VIDEO_OUTPUT = (
"-map {v_map} -an "
"-c:v {v_codec} "
"{v_profile}"
"-tune zerolatency -pix_fmt yuv420p "
"-r {fps} "
"-b:v {v_max_bitrate}k -bufsize {v_bufsize}k -maxrate {v_max_bitrate}k "
"-payload_type 99 "
"-ssrc {v_ssrc} -f rtp "
"-srtp_out_suite AES_CM_128_HMAC_SHA1_80 -srtp_out_params {v_srtp_key} "
"srtp://{address}:{v_port}?rtcpport={v_port}&"
"localrtcpport={v_port}&pkt_size={v_pkt_size}"
)
AUDIO_OUTPUT = (
"-map {a_map} -vn "
"-c:a {a_encoder} "
"{a_application}"
"-ac 1 -ar {a_sample_rate}k "
"-b:a {a_max_bitrate}k -bufsize {a_bufsize}k "
"-payload_type 110 "
"-ssrc {a_ssrc} -f rtp "
"-srtp_out_suite AES_CM_128_HMAC_SHA1_80 -srtp_out_params {a_srtp_key} "
"srtp://{address}:{a_port}?rtcpport={a_port}&"
"localrtcpport={a_port}&pkt_size={a_pkt_size}"
)
SLOW_RESOLUTIONS = [
(320, 180, 15),
(320, 240, 15),
]
RESOLUTIONS = [
(320, 180),
(320, 240),
(480, 270),
(480, 360),
(640, 360),
(640, 480),
(1024, 576),
(1024, 768),
(1280, 720),
(1280, 960),
(1920, 1080),
(1600, 1200),
]
VIDEO_PROFILE_NAMES = ["baseline", "main", "high"]
FFMPEG_WATCH_INTERVAL = timedelta(seconds=5)
FFMPEG_WATCHER = "ffmpeg_watcher"
FFMPEG_PID = "ffmpeg_pid"
SESSION_ID = "session_id"
CONFIG_DEFAULTS = {
CONF_SUPPORT_AUDIO: DEFAULT_SUPPORT_AUDIO,
CONF_MAX_WIDTH: DEFAULT_MAX_WIDTH,
CONF_MAX_HEIGHT: DEFAULT_MAX_HEIGHT,
CONF_MAX_FPS: DEFAULT_MAX_FPS,
CONF_AUDIO_CODEC: DEFAULT_AUDIO_CODEC,
CONF_AUDIO_MAP: DEFAULT_AUDIO_MAP,
CONF_VIDEO_MAP: DEFAULT_VIDEO_MAP,
CONF_VIDEO_CODEC: DEFAULT_VIDEO_CODEC,
CONF_AUDIO_PACKET_SIZE: DEFAULT_AUDIO_PACKET_SIZE,
CONF_VIDEO_PACKET_SIZE: DEFAULT_VIDEO_PACKET_SIZE,
CONF_STREAM_COUNT: DEFAULT_STREAM_COUNT,
}
@TYPES.register("Camera")
class Camera(HomeAccessory, PyhapCamera):
"""Generate a Camera accessory."""
def __init__(self, hass, driver, name, entity_id, aid, config):
"""Initialize a Camera accessory object."""
self._ffmpeg = hass.data[DATA_FFMPEG]
for config_key in CONFIG_DEFAULTS:
if config_key not in config:
config[config_key] = CONFIG_DEFAULTS[config_key]
max_fps = config[CONF_MAX_FPS]
max_width = config[CONF_MAX_WIDTH]
max_height = config[CONF_MAX_HEIGHT]
resolutions = [
(w, h, fps)
for w, h, fps in SLOW_RESOLUTIONS
if w <= max_width and h <= max_height and fps < max_fps
] + [
(w, h, max_fps)
for w, h in RESOLUTIONS
if w <= max_width and h <= max_height
]
video_options = {
"codec": {
"profiles": [
VIDEO_CODEC_PARAM_PROFILE_ID_TYPES["BASELINE"],
VIDEO_CODEC_PARAM_PROFILE_ID_TYPES["MAIN"],
VIDEO_CODEC_PARAM_PROFILE_ID_TYPES["HIGH"],
],
"levels": [
VIDEO_CODEC_PARAM_LEVEL_TYPES["TYPE3_1"],
VIDEO_CODEC_PARAM_LEVEL_TYPES["TYPE3_2"],
VIDEO_CODEC_PARAM_LEVEL_TYPES["TYPE4_0"],
],
},
"resolutions": resolutions,
}
audio_options = {
"codecs": [
{"type": "OPUS", "samplerate": 24},
{"type": "OPUS", "samplerate": 16},
]
}
stream_address = config.get(CONF_STREAM_ADDRESS, get_local_ip())
options = {
"video": video_options,
"audio": audio_options,
"address": stream_address,
"srtp": True,
"stream_count": config[CONF_STREAM_COUNT],
}
super().__init__(
hass,
driver,
name,
entity_id,
aid,
config,
category=CATEGORY_CAMERA,
options=options,
)
self._char_motion_detected = None
self.linked_motion_sensor = self.config.get(CONF_LINKED_MOTION_SENSOR)
if self.linked_motion_sensor:
state = self.hass.states.get(self.linked_motion_sensor)
if state:
serv_motion = self.add_preload_service(SERV_MOTION_SENSOR)
self._char_motion_detected = serv_motion.configure_char(
CHAR_MOTION_DETECTED, value=False
)
self._async_update_motion_state(state)
self._char_doorbell_detected = None
self._char_doorbell_detected_switch = None
self.linked_doorbell_sensor = self.config.get(CONF_LINKED_DOORBELL_SENSOR)
if self.linked_doorbell_sensor:
state = self.hass.states.get(self.linked_doorbell_sensor)
if state:
serv_doorbell = self.add_preload_service(SERV_DOORBELL)
self.set_primary_service(serv_doorbell)
self._char_doorbell_detected = serv_doorbell.configure_char(
CHAR_PROGRAMMABLE_SWITCH_EVENT,
value=0,
)
serv_stateless_switch = self.add_preload_service(
SERV_STATELESS_PROGRAMMABLE_SWITCH
)
self._char_doorbell_detected_switch = (
serv_stateless_switch.configure_char(
CHAR_PROGRAMMABLE_SWITCH_EVENT,
value=0,
valid_values={"SinglePress": DOORBELL_SINGLE_PRESS},
)
)
serv_speaker = self.add_preload_service(SERV_SPEAKER)
serv_speaker.configure_char(CHAR_MUTE, value=0)
self._async_update_doorbell_state(state)
async def run_handler(self):
"""Handle accessory driver started event.
Run inside the Home Assistant event loop.
"""
if self._char_motion_detected:
async_track_state_change_event(
self.hass,
[self.linked_motion_sensor],
self._async_update_motion_state_event,
)
if self._char_doorbell_detected:
async_track_state_change_event(
self.hass,
[self.linked_doorbell_sensor],
self._async_update_doorbell_state_event,
)
await super().run_handler()
@callback
def _async_update_motion_state_event(self, event):
"""Handle state change event listener callback."""
self._async_update_motion_state(event.data.get("new_state"))
@callback
def _async_update_motion_state(self, new_state):
"""Handle link motion sensor state change to update HomeKit value."""
if not new_state:
return
detected = new_state.state == STATE_ON
if self._char_motion_detected.value == detected:
return
self._char_motion_detected.set_value(detected)
_LOGGER.debug(
"%s: Set linked motion %s sensor to %d",
self.entity_id,
self.linked_motion_sensor,
detected,
)
@callback
def _async_update_doorbell_state_event(self, event):
"""Handle state change event listener callback."""
self._async_update_doorbell_state(event.data.get("new_state"))
@callback
def _async_update_doorbell_state(self, new_state):
"""Handle link doorbell sensor state change to update HomeKit value."""
if not new_state:
return
if new_state.state == STATE_ON:
self._char_doorbell_detected.set_value(DOORBELL_SINGLE_PRESS)
self._char_doorbell_detected_switch.set_value(DOORBELL_SINGLE_PRESS)
_LOGGER.debug(
"%s: Set linked doorbell %s sensor to %d",
self.entity_id,
self.linked_doorbell_sensor,
DOORBELL_SINGLE_PRESS,
)
@callback
def async_update_state(self, new_state):
"""Handle state change to update HomeKit value."""
pass # pylint: disable=unnecessary-pass
async def _async_get_stream_source(self):
"""Find the camera stream source url."""
stream_source = self.config.get(CONF_STREAM_SOURCE)
if stream_source:
return stream_source
try:
stream_source = await self.hass.components.camera.async_get_stream_source(
self.entity_id
)
except Exception: # pylint: disable=broad-except
_LOGGER.exception(
"Failed to get stream source - this could be a transient error or your camera might not be compatible with HomeKit yet"
)
if stream_source:
self.config[CONF_STREAM_SOURCE] = stream_source
return stream_source
async def start_stream(self, session_info, stream_config):
"""Start a new stream with the given configuration."""
_LOGGER.debug(
"[%s] Starting stream with the following parameters: %s",
session_info["id"],
stream_config,
)
input_source = await self._async_get_stream_source()
if not input_source:
_LOGGER.error("Camera has no stream source")
return False
if "-i " not in input_source:
input_source = "-i " + input_source
video_profile = ""
if self.config[CONF_VIDEO_CODEC] != "copy":
video_profile = (
"-profile:v "
+ VIDEO_PROFILE_NAMES[
int.from_bytes(stream_config["v_profile_id"], byteorder="big")
]
+ " "
)
audio_application = ""
if self.config[CONF_AUDIO_CODEC] == "libopus":
audio_application = "-application lowdelay "
output_vars = stream_config.copy()
output_vars.update(
{
"v_profile": video_profile,
"v_bufsize": stream_config["v_max_bitrate"] * 4,
"v_map": self.config[CONF_VIDEO_MAP],
"v_pkt_size": self.config[CONF_VIDEO_PACKET_SIZE],
"v_codec": self.config[CONF_VIDEO_CODEC],
"a_bufsize": stream_config["a_max_bitrate"] * 4,
"a_map": self.config[CONF_AUDIO_MAP],
"a_pkt_size": self.config[CONF_AUDIO_PACKET_SIZE],
"a_encoder": self.config[CONF_AUDIO_CODEC],
"a_application": audio_application,
}
)
output = VIDEO_OUTPUT.format(**output_vars)
if self.config[CONF_SUPPORT_AUDIO]:
output = output + " " + AUDIO_OUTPUT.format(**output_vars)
_LOGGER.debug("FFmpeg output settings: %s", output)
stream = HAFFmpeg(self._ffmpeg.binary, loop=self.driver.loop)
opened = await stream.open(
cmd=[], input_source=input_source, output=output, stdout_pipe=False
)
if not opened:
_LOGGER.error("Failed to open ffmpeg stream")
return False
_LOGGER.info(
"[%s] Started stream process - PID %d",
session_info["id"],
stream.process.pid,
)
session_info["stream"] = stream
session_info[FFMPEG_PID] = stream.process.pid
async def watch_session(_):
await self._async_ffmpeg_watch(session_info["id"])
session_info[FFMPEG_WATCHER] = async_track_time_interval(
self.hass,
watch_session,
FFMPEG_WATCH_INTERVAL,
)
return await self._async_ffmpeg_watch(session_info["id"])
async def _async_ffmpeg_watch(self, session_id):
"""Check to make sure ffmpeg is still running and cleanup if not."""
ffmpeg_pid = self.sessions[session_id][FFMPEG_PID]
if pid_is_alive(ffmpeg_pid):
return True
_LOGGER.warning("Streaming process ended unexpectedly - PID %d", ffmpeg_pid)
self._async_stop_ffmpeg_watch(session_id)
self.set_streaming_available(self.sessions[session_id]["stream_idx"])
return False
@callback
def _async_stop_ffmpeg_watch(self, session_id):
"""Cleanup a streaming session after stopping."""
if FFMPEG_WATCHER not in self.sessions[session_id]:
return
self.sessions[session_id].pop(FFMPEG_WATCHER)()
async def stop_stream(self, session_info):
"""Stop the stream for the given ``session_id``."""
session_id = session_info["id"]
stream = session_info.get("stream")
if not stream:
_LOGGER.debug("No stream for session ID %s", session_id)
return
self._async_stop_ffmpeg_watch(session_id)
if not pid_is_alive(stream.process.pid):
_LOGGER.info("[%s] Stream already stopped", session_id)
return True
for shutdown_method in ["close", "kill"]:
_LOGGER.info("[%s] %s stream", session_id, shutdown_method)
try:
await getattr(stream, shutdown_method)()
return
except Exception: # pylint: disable=broad-except
_LOGGER.exception(
"[%s] Failed to %s stream", session_id, shutdown_method
)
async def reconfigure_stream(self, session_info, stream_config):
"""Reconfigure the stream so that it uses the given ``stream_config``."""
return True
def get_snapshot(self, image_size):
"""Return a jpeg of a snapshot from the camera."""
return scale_jpeg_camera_image(
asyncio.run_coroutine_threadsafe(
self.hass.components.camera.async_get_image(self.entity_id),
self.hass.loop,
).result(),
image_size["image-width"],
image_size["image-height"],
)
|
from functools import reduce
import operator
from django.core.exceptions import ImproperlyConfigured
from django.db.models import Q
from rest_framework.filters import BaseFilterBackend
class CMSPagesFilterBackend(BaseFilterBackend):
"""
Use this backend to only show products assigned to the current page.
"""
cms_pages_fields = ['cms_pages']
def _get_filtered_queryset(self, current_page, queryset, cms_pages_fields):
filter_by_cms_page = (Q((field, current_page)) for field in cms_pages_fields)
return queryset.filter(reduce(operator.or_, filter_by_cms_page)).distinct()
def filter_queryset(self, request, queryset, view):
cms_pages_fields = getattr(view, 'cms_pages_fields', self.cms_pages_fields)
if not isinstance(cms_pages_fields, (list, tuple)):
msg = "`cms_pages_fields` must be a list or tuple of fields referring to djangoCMS pages."
raise ImproperlyConfigured(msg)
current_page = request.current_page
if current_page.publisher_is_draft:
current_page = current_page.publisher_public
return self._get_filtered_queryset(current_page, queryset, cms_pages_fields)
class RecursiveCMSPagesFilterBackend(CMSPagesFilterBackend):
"""
Use this backend to show products assigned to the current page or any of its descendants.
"""
def _get_filtered_queryset(self, current_page, queryset, cms_pages_fields):
pages = current_page.get_descendants(include_self=True)
filter_by_cms_page = (Q((field + "__in", pages)) for field in self.cms_pages_fields)
return queryset.filter(reduce(operator.or_, filter_by_cms_page)).distinct()
|
from __future__ import print_function
import os
import sys
import zipfile
import argparse
def main(args):
ap = argparse.ArgumentParser()
ap.add_argument('-d', '--exdir', nargs='?', help='extract files into exdir')
ap.add_argument('-v', '--verbose', action='store_true', help='be more chatty')
ap.add_argument('-t', '--list', action='store_true', help='list the contents of an archive')
ap.add_argument('zipfile', help='zip file to be extracted')
ns = ap.parse_args(args)
if not os.path.isfile(ns.zipfile):
print("%s: No such file" % ns.zipfile)
else:
# PK magic marker check
with open(ns.zipfile, "rb") as f:
try:
pk_check = f.read(2)
except:
pk_check = ''
if pk_check != b'PK':
print("%s: does not appear to be a zip file" % ns.zipfile)
sys.exit(1)
if ns.list:
location = ''
else:
if os.path.basename(ns.zipfile).lower().endswith('.zip'):
altpath = os.path.splitext(os.path.basename(ns.zipfile))[0]
else:
altpath = os.path.basename(ns.zipfile) + '_unzipped'
altpath = os.path.join(os.path.dirname(ns.zipfile), altpath)
location = ns.exdir or altpath
if (os.path.exists(location)) and not (os.path.isdir(location)):
print("%s: destination is not a directory" % location)
sys.exit(1)
elif not os.path.exists(location):
os.makedirs(location)
with open(ns.zipfile, 'rb') as zipfp:
try:
zipf = zipfile.ZipFile(zipfp)
# check for a leading directory common to all files and remove it
dirnames = [os.path.join(os.path.dirname(x), '') for x in zipf.namelist()]
common_dir = os.path.commonprefix(dirnames or ['/'])
# Check to make sure there aren't 2 or more sub directories with the same prefix
if not common_dir.endswith('/'):
common_dir = os.path.join(os.path.dirname(common_dir), '')
for name in zipf.namelist():
data = zipf.read(name)
fn = name
if common_dir:
if fn.startswith(common_dir):
fn = fn.split(common_dir, 1)[-1]
elif fn.startswith('/' + common_dir):
fn = fn.split('/' + common_dir, 1)[-1]
fn = fn.lstrip('/')
fn = os.path.join(location, fn)
dirf = os.path.dirname(fn)
if not os.path.exists(dirf) and not ns.list:
os.makedirs(dirf)
if fn.endswith('/'):
# A directory
if not os.path.exists(fn) and not ns.list:
os.makedirs(fn)
elif not ns.list:
fp = open(fn, 'wb')
try:
fp.write(data)
finally:
fp.close()
if ns.verbose or ns.list:
print(fn)
except:
print("%s: zip file is corrupt" % ns.zipfile)
if __name__ == '__main__':
main(sys.argv[1:])
|
from unittest import TestCase
from django.http import HttpRequest
from weblate.trans.debug import WeblateExceptionReporterFilter
class ReportFilterTest(TestCase):
def test_report_none(self):
reporter = WeblateExceptionReporterFilter()
result = reporter.get_post_parameters(None)
self.assertEqual(result, {})
def test_report_request(self):
reporter = WeblateExceptionReporterFilter()
request = HttpRequest()
reporter.get_post_parameters(request)
self.assertIn("WEBLATE_VERSION:Weblate", request.META)
def test_report_language(self):
reporter = WeblateExceptionReporterFilter()
request = HttpRequest()
request.session = {"django_language": "testlang"}
reporter.get_post_parameters(request)
self.assertIn("WEBLATE_LANGUAGE", request.META)
|
import os
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from test import run_only
from mock import Mock
from mock import patch
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
try:
from docker import Client
except ImportError:
Client = None
from diamond.collector import Collector
from memory_docker import MemoryDockerCollector
dirname = os.path.dirname(__file__)
fixtures_path = os.path.join(dirname, 'fixtures/')
fixtures = []
for root, dirnames, filenames in os.walk(fixtures_path):
fixtures.append([root, dirnames, filenames])
docker_fixture = [
{u'Id': u'c3341726a9b4235a35b390c5f6f28e5a6869879a48da1d609db8f6bf4275bdc5',
u'Names': [u'/testcontainer']},
{u'Id': u'9c151939e20682b924d7299875e94a4aabbe946b30b407f89e276507432c625b',
u'Names': None}]
def run_only_if_docker_client_is_available(func):
try:
from docker import Client
except ImportError:
Client = None
pred = lambda: Client is not None
return run_only(func, pred)
class TestMemoryDockerCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('MemoryDockerCollector', {
'interval': 10,
'byte_unit': 'megabyte',
'memory_path': fixtures_path,
})
self.collector = MemoryDockerCollector(config, None)
def test_import(self):
self.assertTrue(MemoryDockerCollector)
@run_only_if_docker_client_is_available
@patch('__builtin__.open')
@patch.object(Client, 'containers', Mock(return_value=[]))
@patch.object(Collector, 'publish')
def test_should_open_all_cpuacct_stat(self, publish_mock, open_mock):
open_mock.side_effect = lambda x: StringIO('')
self.collector.collect()
open_mock.assert_any_call(
fixtures_path + 'lxc/testcontainer/memory.stat')
open_mock.assert_any_call(fixtures_path + 'lxc/memory.stat')
open_mock.assert_any_call(fixtures_path + 'memory.stat')
@run_only_if_docker_client_is_available
@patch('__builtin__.open')
@patch.object(Client, 'containers')
@patch.object(Collector, 'publish')
def test_should_get_containers(self, publish_mock, containers_mock,
open_mock):
containers_mock.return_value = []
open_mock.side_effect = lambda x: StringIO('')
self.collector.collect()
containers_mock.assert_any_call(all=True)
@run_only_if_docker_client_is_available
@patch.object(Collector, 'publish')
@patch.object(Client, 'containers',
Mock(return_value=docker_fixture))
def test_should_work_with_real_data(self, publish_mock):
self.collector.collect()
self.assertPublishedMany(publish_mock, {
'lxc.testcontainer.cache': 1,
'lxc.testcontainer.rss': 1,
'lxc.testcontainer.swap': 1,
'lxc.cache': 1,
'lxc.rss': 1,
'lxc.swap': 1,
'system.cache': 1,
'system.rss': 1,
'system.swap': 1,
'docker.testcontainer.cache': 1,
'docker.testcontainer.rss': 1,
'docker.testcontainer.swap': 1,
'docker.cache': 1,
'docker.rss': 1,
'docker.swap': 1,
})
if __name__ == "__main__":
unittest.main()
|
import json
import logging
from azure.servicebus.aio import Message, ServiceBusClient
from azure.servicebus.common.errors import (
MessageSendFailed,
ServiceBusConnectionError,
ServiceBusResourceNotFound,
)
import voluptuous as vol
from homeassistant.components.notify import (
ATTR_DATA,
ATTR_TARGET,
ATTR_TITLE,
PLATFORM_SCHEMA,
BaseNotificationService,
)
from homeassistant.const import CONTENT_TYPE_JSON
import homeassistant.helpers.config_validation as cv
CONF_CONNECTION_STRING = "connection_string"
CONF_QUEUE_NAME = "queue"
CONF_TOPIC_NAME = "topic"
ATTR_ASB_MESSAGE = "message"
ATTR_ASB_TITLE = "title"
ATTR_ASB_TARGET = "target"
PLATFORM_SCHEMA = vol.All(
cv.has_at_least_one_key(CONF_QUEUE_NAME, CONF_TOPIC_NAME),
PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_CONNECTION_STRING): cv.string,
vol.Exclusive(
CONF_QUEUE_NAME, "output", "Can only send to a queue or a topic."
): cv.string,
vol.Exclusive(
CONF_TOPIC_NAME, "output", "Can only send to a queue or a topic."
): cv.string,
}
),
)
_LOGGER = logging.getLogger(__name__)
def get_service(hass, config, discovery_info=None):
"""Get the notification service."""
connection_string = config[CONF_CONNECTION_STRING]
queue_name = config.get(CONF_QUEUE_NAME)
topic_name = config.get(CONF_TOPIC_NAME)
# Library can do synchronous IO when creating the clients.
# Passes in loop here, but can't run setup on the event loop.
servicebus = ServiceBusClient.from_connection_string(
connection_string, loop=hass.loop
)
try:
if queue_name:
client = servicebus.get_queue(queue_name)
else:
client = servicebus.get_topic(topic_name)
except (ServiceBusConnectionError, ServiceBusResourceNotFound) as err:
_LOGGER.error(
"Connection error while creating client for queue/topic '%s'. %s",
queue_name or topic_name,
err,
)
return None
return ServiceBusNotificationService(client)
class ServiceBusNotificationService(BaseNotificationService):
"""Implement the notification service for the service bus service."""
def __init__(self, client):
"""Initialize the service."""
self._client = client
async def async_send_message(self, message, **kwargs):
"""Send a message."""
dto = {ATTR_ASB_MESSAGE: message}
if ATTR_TITLE in kwargs:
dto[ATTR_ASB_TITLE] = kwargs[ATTR_TITLE]
if ATTR_TARGET in kwargs:
dto[ATTR_ASB_TARGET] = kwargs[ATTR_TARGET]
data = kwargs.get(ATTR_DATA)
if data:
dto.update(data)
queue_message = Message(json.dumps(dto))
queue_message.properties.content_type = CONTENT_TYPE_JSON
try:
await self._client.send(queue_message)
except MessageSendFailed as err:
_LOGGER.error(
"Could not send service bus notification to %s. %s",
self._client.name,
err,
)
|
from typing import Optional, Union
from homeassistant.core import callback
from .base import ONVIFBaseEntity
from .const import DOMAIN
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up a ONVIF binary sensor."""
device = hass.data[DOMAIN][config_entry.unique_id]
entities = {
event.uid: ONVIFSensor(event.uid, device)
for event in device.events.get_platform("sensor")
}
async_add_entities(entities.values())
@callback
def async_check_entities():
"""Check if we have added an entity for the event."""
new_entities = []
for event in device.events.get_platform("sensor"):
if event.uid not in entities:
entities[event.uid] = ONVIFSensor(event.uid, device)
new_entities.append(entities[event.uid])
async_add_entities(new_entities)
device.events.async_add_listener(async_check_entities)
return True
class ONVIFSensor(ONVIFBaseEntity):
"""Representation of a ONVIF sensor event."""
def __init__(self, uid, device):
"""Initialize the ONVIF binary sensor."""
self.uid = uid
super().__init__(device)
@property
def state(self) -> Union[None, str, int, float]:
"""Return the state of the entity."""
return self.device.events.get_uid(self.uid).value
@property
def name(self):
"""Return the name of the event."""
return self.device.events.get_uid(self.uid).name
@property
def device_class(self) -> Optional[str]:
"""Return the class of this device, from component DEVICE_CLASSES."""
return self.device.events.get_uid(self.uid).device_class
@property
def unit_of_measurement(self) -> Optional[str]:
"""Return the unit of measurement of this entity, if any."""
return self.device.events.get_uid(self.uid).unit_of_measurement
@property
def unique_id(self) -> str:
"""Return a unique ID."""
return self.uid
@property
def entity_registry_enabled_default(self) -> bool:
"""Return if the entity should be enabled when first added to the entity registry."""
return self.device.events.get_uid(self.uid).entity_enabled
@property
def should_poll(self) -> bool:
"""Return True if entity has to be polled for state.
False if entity pushes its state to HA.
"""
return False
async def async_added_to_hass(self):
"""Connect to dispatcher listening for entity data notifications."""
self.async_on_remove(
self.device.events.async_add_listener(self.async_write_ha_state)
)
|
import dbus as _dbus
from openrazer.client.devices import RazerDevice as __RazerDevice
from openrazer.client.macro import RazerMacro as _RazerMacro
from openrazer.client import constants as _c
class RazerMouse(__RazerDevice):
_MACRO_CLASS = _RazerMacro
def __init__(self, serial, vid_pid=None, daemon_dbus=None):
super(RazerMouse, self).__init__(serial, vid_pid=vid_pid, daemon_dbus=daemon_dbus)
# Capabilities
self._capabilities['poll_rate'] = self._has_feature('razer.device.misc', ('getPollRate', 'setPollRate'))
self._capabilities['dpi'] = self._has_feature('razer.device.dpi', ('getDPI', 'setDPI'))
self._capabilities['dpi_stages'] = self._has_feature('razer.device.dpi', ('getDPIStages', 'setDPIStages'))
self._capabilities['available_dpi'] = self._has_feature('razer.device.dpi', 'availableDPI')
self._capabilities['battery'] = self._has_feature('razer.device.power', 'getBattery')
if self.has('dpi'):
self._dbus_interfaces['dpi'] = _dbus.Interface(self._dbus, "razer.device.dpi")
if self.has('battery'):
self._dbus_interfaces['power'] = _dbus.Interface(self._dbus, "razer.device.power")
@property
def max_dpi(self) -> int:
"""
Gets max DPI
:return: Max DPI, if device does not have DPI it'll return None
:rtype: int or None
"""
if self.has('dpi'):
return int(self._dbus_interfaces['dpi'].maxDPI())
else:
return None
@property
def available_dpi(self) -> list:
"""
Gets the available DPI
:return: Available DPI, if device has only a couple of fixed possible DPI values
:rtype: list or None
"""
if self.has('available_dpi'):
dbuslist = self._dbus_interfaces['dpi'].availableDPI()
# Repack list from dbus ints to normal ints
return [int(d) for d in dbuslist]
else:
return None
@property
def dpi(self) -> tuple:
"""
Get mouse DPI
Will return a tuple
:return: DPI (500, 500)
:rtype: tuple
:raises NotImplementedError: If function is not supported
"""
if self.has('dpi'):
dpi_x, dpi_y = self._dbus_interfaces['dpi'].getDPI()
# Converting to integers to remove the dbus types
return int(dpi_x), int(dpi_y)
else:
raise NotImplementedError()
@dpi.setter
def dpi(self, value: tuple):
"""
Set mouse dpi
Daemon does type validation but can't be too careful
:param value: DPI X, Y tuple
:type value: tuple
:raises ValueError: If the tuple isn't long enough or contains invalid crap
:raises NotImplementedError: If function is not supported
"""
if self.has('dpi'):
if len(value) != 2:
raise ValueError("DPI tuple is not of length 2. Length: {0}".format(len(value)))
max_dpi = self.max_dpi
dpi_x, dpi_y = value
if not isinstance(dpi_x, int) or not isinstance(dpi_y, int):
raise ValueError("DPI X or Y is not an integer, X:{0} Y:{1}".format(type(dpi_x), type(dpi_y)))
if dpi_x < 0 or dpi_x > max_dpi:
raise ValueError("DPI X either too small or too large, X:{0}".format(dpi_x))
if dpi_y < 0 or dpi_y > max_dpi:
raise ValueError("DPI Y either too small or too large, Y:{0}".format(dpi_y))
self._dbus_interfaces['dpi'].setDPI(dpi_x, dpi_y)
else:
raise NotImplementedError()
@property
def dpi_stages(self) -> (int, list):
"""
Get mouse DPI stages
Will return a tuple containing the active DPI stage number and the list
of DPI stages as tuples.
The active DPI stage number must be: >= 1 and <= nr of DPI stages.
:return: active DPI stage number and DPI stages
(1, [(500, 500), (1000, 1000), (2000, 2000) ...]
:rtype: (int, list)
:raises NotImplementedError: if function is not supported
"""
if self.has('dpi_stages'):
response = self._dbus_interfaces['dpi'].getDPIStages()
dpi_stages = []
active_stage = int(response[0])
for dpi_x, dpi_y in response[1]:
dpi_stages.append((int(dpi_x), int(dpi_y)))
return (active_stage, dpi_stages)
else:
raise NotImplementedError()
@dpi_stages.setter
def dpi_stages(self, value: (int, list)):
"""
Set mouse DPI stages
Daemon does type validation but can't be too careful
:param value: active DPI stage number and list of DPI X, Y tuples
:type value: (int, list)
:raises ValueError: when the input is invalid
:raises NotImplementedError: If function is not supported
"""
if self.has('dpi_stages'):
max_dpi = self.max_dpi
dpi_stages = []
active_stage = value[0]
if not isinstance(active_stage, int):
raise ValueError(
"Active DPI stage is not an integer: {0}".format(
type(active_stage)))
if active_stage < 1:
raise ValueError(
"Active DPI stage has invalid value: {0} < 1".format(
active_stage))
for stage in value[1]:
if len(stage) != 2:
raise ValueError(
"DPI tuple is not of length 2. Length: {0}".format(
len(stage)))
dpi_x, dpi_y = stage
if not isinstance(dpi_x, int) or not isinstance(dpi_y, int):
raise ValueError(
"DPI X or Y is not an integer, X:{0} Y:{1}".format(
type(dpi_x), type(dpi_y)))
if dpi_x < 0 or dpi_x > max_dpi:
raise ValueError(
"DPI X either too small or too large, X:{0}".format(
dpi_x))
if dpi_y < 0 or dpi_y > max_dpi:
raise ValueError(
"DPI Y either too small or too large, Y:{0}".format(
dpi_y))
dpi_stages.append((dpi_x, dpi_y))
if active_stage > len(dpi_stages):
raise ValueError(
"Active DPI stage has invalid value: {0} > {1}".format(
active_stage, len(dpi_stages)))
self._dbus_interfaces['dpi'].setDPIStages(active_stage, dpi_stages)
else:
raise NotImplementedError()
@property
def poll_rate(self) -> int:
"""
Get poll rate from device
:return: Poll rate
:rtype: int
:raises NotImplementedError: If function is not supported
"""
if self.has('poll_rate'):
return int(self._dbus_interfaces['device'].getPollRate())
else:
raise NotImplementedError()
@poll_rate.setter
def poll_rate(self, poll_rate: int):
"""
Set poll rate of device
:param poll_rate: Polling rate
:type poll_rate: int
:raises NotImplementedError: If function is not supported
"""
if self.has('poll_rate'):
if not isinstance(poll_rate, int):
raise ValueError("Poll rate is not an integer: {0}".format(poll_rate))
if poll_rate not in (_c.POLL_125HZ, _c.POLL_500HZ, _c.POLL_1000HZ):
raise ValueError('Poll rate "{0}" is not one of {1}'.format(poll_rate, (_c.POLL_125HZ, _c.POLL_500HZ, _c.POLL_1000HZ)))
self._dbus_interfaces['device'].setPollRate(poll_rate)
else:
raise NotImplementedError()
@property
def battery_level(self) -> int:
"""
Get battery level from device
:return: Battery level (0-100)
"""
if self.has('battery'):
return int(self._dbus_interfaces['power'].getBattery())
@property
def is_charging(self) -> bool:
"""
Get whether the device is charging or not
:return: Boolean
"""
if self.has('battery'):
return bool(self._dbus_interfaces['power'].isCharging())
def set_idle_time(self, idle_time) -> None:
"""
Sets the idle time on the device
:param idle_time: the time in seconds
"""
if self.has('battery'):
self._dbus_interfaces['power'].setIdleTime(idle_time)
def get_idle_time(self) -> int:
"""
Gets the idle time of the device
:return: Number of seconds before this device goes into powersave
(60-900)
"""
if self.has('battery'):
return int(self._dbus_interfaces['power'].getIdleTime())
def set_low_battery_threshold(self, threshold) -> None:
"""
Set the low battery threshold as a percentage
:param threshold: Battery threshold as a percentage
:type threshold: int
"""
if self.has('battery'):
self._dbus_interfaces['power'].setLowBatteryThreshold(threshold)
def get_low_battery_threshold(self) -> int:
"""
Get the low battery threshold as a percentage
:return: Battery threshold as a percentage
"""
if self.has('battery'):
return int(self._dbus_interfaces['power'].getLowBatteryThreshold())
|
import json
import sys
import webbrowser
import smart_open
def copy_to_clipboard(text):
try:
import pyperclip
except ImportError:
print('pyperclip <https://pypi.org/project/pyperclip/> is missing.', file=sys.stderr)
print('copy-paste the following text manually:', file=sys.stderr)
print('\t', text, file=sys.stderr)
else:
pyperclip.copy(text)
prid = int(sys.argv[1])
url = "https://api.github.com/repos/RaRe-Technologies/smart_open/pulls/%d" % prid
with smart_open.open(url) as fin:
prinfo = json.load(fin)
prinfo['user_login'] = prinfo['user']['login']
prinfo['user_html_url'] = prinfo['user']['html_url']
text = '- %(title)s (PR [#%(number)s](%(html_url)s), [@%(user_login)s](%(user_html_url)s))' % prinfo
copy_to_clipboard(text)
prinfo['head_repo_html_url'] = prinfo['head']['repo']['html_url']
prinfo['head_ref'] = prinfo['head']['ref']
edit_url = '%(head_repo_html_url)s/edit/%(head_ref)s/CHANGELOG.md' % prinfo
webbrowser.open(edit_url)
|
from datetime import timedelta
from pyetherscan import get_balance
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CONF_ADDRESS, CONF_NAME, CONF_TOKEN
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
ATTRIBUTION = "Data provided by etherscan.io"
CONF_TOKEN_ADDRESS = "token_address"
SCAN_INTERVAL = timedelta(minutes=5)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_ADDRESS): cv.string,
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_TOKEN): cv.string,
vol.Optional(CONF_TOKEN_ADDRESS): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Etherscan.io sensors."""
address = config.get(CONF_ADDRESS)
name = config.get(CONF_NAME)
token = config.get(CONF_TOKEN)
token_address = config.get(CONF_TOKEN_ADDRESS)
if token:
token = token.upper()
if not name:
name = "%s Balance" % token
if not name:
name = "ETH Balance"
add_entities([EtherscanSensor(name, address, token, token_address)], True)
class EtherscanSensor(Entity):
"""Representation of an Etherscan.io sensor."""
def __init__(self, name, address, token, token_address):
"""Initialize the sensor."""
self._name = name
self._address = address
self._token_address = token_address
self._token = token
self._state = None
self._unit_of_measurement = self._token or "ETH"
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement this sensor expresses itself in."""
return self._unit_of_measurement
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
return {ATTR_ATTRIBUTION: ATTRIBUTION}
def update(self):
"""Get the latest state of the sensor."""
if self._token_address:
self._state = get_balance(self._address, self._token_address)
elif self._token:
self._state = get_balance(self._address, self._token)
else:
self._state = get_balance(self._address)
|
from django.test.utils import modify_settings, override_settings
from weblate.auth.models import Group, Permission, Role, User
from weblate.trans.models import Comment, Project
from weblate.trans.tests.test_views import FixtureTestCase
from weblate.trans.tests.utils import create_test_billing
class PermissionsTest(FixtureTestCase):
def setUp(self):
super().setUp()
self.user = User.objects.create_user("user", "[email protected]")
self.admin = User.objects.create_user("admin", "[email protected]")
self.superuser = User.objects.create_user(
"super", "[email protected]", is_superuser=True
)
self.project.add_user(self.admin, "@Administration")
def test_admin_perm(self):
self.assertTrue(self.superuser.has_perm("upload.authorship", self.project))
self.assertTrue(self.admin.has_perm("upload.authorship", self.project))
self.assertFalse(self.user.has_perm("upload.authorship", self.project))
def test_user_perm(self):
self.assertTrue(self.superuser.has_perm("comment.add", self.project))
self.assertTrue(self.admin.has_perm("comment.add", self.project))
self.assertTrue(self.user.has_perm("comment.add", self.project))
def test_delete_comment(self):
comment = Comment(unit=self.get_unit())
self.assertTrue(self.superuser.has_perm("comment.delete", comment))
self.assertTrue(self.admin.has_perm("comment.delete", comment))
self.assertFalse(self.user.has_perm("comment.delete", comment))
def test_delete_owned_comment(self):
comment = Comment(unit=self.get_unit(), user=self.user)
self.assertTrue(self.superuser.has_perm("comment.delete", comment))
self.assertTrue(self.admin.has_perm("comment.delete", comment))
self.assertTrue(self.user.has_perm("comment.delete", comment))
def test_delete_not_owned_comment(self):
comment = Comment(unit=self.get_unit(), user=self.admin)
self.assertTrue(self.superuser.has_perm("comment.delete", comment))
self.assertTrue(self.admin.has_perm("comment.delete", comment))
self.assertFalse(self.user.has_perm("comment.delete", comment))
@override_settings(AUTH_RESTRICT_ADMINS={"super": ("trans.add_project",)})
def test_restrict_super(self):
self.assertFalse(self.superuser.has_perm("trans.change_project"))
self.assertFalse(self.admin.has_perm("trans.change_project"))
self.assertFalse(self.user.has_perm("trans.change_project"))
self.assertTrue(self.superuser.has_perm("trans.add_project"))
self.assertFalse(self.admin.has_perm("trans.add_project"))
self.assertFalse(self.user.has_perm("trans.add_project"))
# Should have no effect here
self.test_delete_comment()
@override_settings(AUTH_RESTRICT_ADMINS={"admin": ("trans.add_project",)})
def test_restrict_admin(self):
self.assertTrue(self.superuser.has_perm("trans.change_project"))
self.assertFalse(self.admin.has_perm("trans.change_project"))
self.assertFalse(self.user.has_perm("trans.change_project"))
self.assertTrue(self.superuser.has_perm("trans.add_project"))
self.assertFalse(self.admin.has_perm("trans.add_project"))
self.assertFalse(self.user.has_perm("trans.add_project"))
# Should have no effect here
self.test_delete_comment()
def test_global_perms(self):
self.assertTrue(self.superuser.has_perm("management.use"))
self.assertFalse(self.admin.has_perm("management.use"))
self.assertFalse(self.user.has_perm("management.use"))
def test_global_perms_granted(self):
permission = Permission.objects.get(codename="management.use")
role = Role.objects.create(name="Nearly superuser")
role.permissions.add(permission)
group = Group.objects.create(name="Nearly superuser")
group.roles.add(role)
self.user.groups.add(group)
self.assertTrue(self.user.has_perm("management.use"))
def test_restricted_component(self):
self.assertTrue(self.superuser.has_perm("unit.edit", self.component))
self.assertTrue(self.admin.has_perm("unit.edit", self.component))
self.assertTrue(self.user.has_perm("unit.edit", self.component))
self.component.restricted = True
self.component.save(update_fields=["restricted"])
self.assertTrue(self.superuser.has_perm("unit.edit", self.component))
self.assertFalse(self.admin.has_perm("unit.edit", self.component))
self.assertFalse(self.user.has_perm("unit.edit", self.component))
@modify_settings(INSTALLED_APPS={"append": "weblate.billing"})
def test_permission_billing(self):
# Permissions should apply without billing
with modify_settings(INSTALLED_APPS={"remove": "weblate.billing"}):
self.assertTrue(
self.superuser.has_perm("billing:project.permissions", self.project)
)
self.assertTrue(
self.admin.has_perm("billing:project.permissions", self.project)
)
self.assertFalse(
self.user.has_perm("billing:project.permissions", self.project)
)
# With billing enabled and no plan it should be disabled
self.assertFalse(
self.superuser.has_perm("billing:project.permissions", self.project)
)
self.assertFalse(
self.admin.has_perm("billing:project.permissions", self.project)
)
self.assertFalse(
self.user.has_perm("billing:project.permissions", self.project)
)
project = Project.objects.get(pk=self.project.pk)
billing = create_test_billing(self.admin)
billing.projects.add(project)
# The default plan allows
self.assertTrue(self.superuser.has_perm("billing:project.permissions", project))
self.assertTrue(self.admin.has_perm("billing:project.permissions", project))
self.assertFalse(self.user.has_perm("billing:project.permissions", project))
billing.plan.change_access_control = False
billing.plan.save()
project = Project.objects.get(pk=self.project.pk)
# It should be restricted now
self.assertFalse(
self.superuser.has_perm("billing:project.permissions", project)
)
self.assertFalse(self.admin.has_perm("billing:project.permissions", project))
self.assertFalse(self.user.has_perm("billing:project.permissions", project))
|
import pywink
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP,
ATTR_HS_COLOR,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_COLOR_TEMP,
LightEntity,
)
from homeassistant.util import color as color_util
from homeassistant.util.color import (
color_temperature_mired_to_kelvin as mired_to_kelvin,
)
from . import DOMAIN, WinkDevice
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Wink lights."""
for light in pywink.get_light_bulbs():
_id = light.object_id() + light.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkLight(light, hass)])
for light in pywink.get_light_groups():
_id = light.object_id() + light.name()
if _id not in hass.data[DOMAIN]["unique_ids"]:
add_entities([WinkLight(light, hass)])
class WinkLight(WinkDevice, LightEntity):
"""Representation of a Wink light."""
async def async_added_to_hass(self):
"""Call when entity is added to hass."""
self.hass.data[DOMAIN]["entities"]["light"].append(self)
@property
def is_on(self):
"""Return true if light is on."""
return self.wink.state()
@property
def brightness(self):
"""Return the brightness of the light."""
if self.wink.brightness() is not None:
return int(self.wink.brightness() * 255)
return None
@property
def hs_color(self):
"""Define current bulb color."""
if self.wink.supports_xy_color():
return color_util.color_xy_to_hs(*self.wink.color_xy())
if self.wink.supports_hue_saturation():
hue = self.wink.color_hue()
saturation = self.wink.color_saturation()
if hue is not None and saturation is not None:
return hue * 360, saturation * 100
return None
@property
def color_temp(self):
"""Define current bulb color in degrees Kelvin."""
if not self.wink.supports_temperature():
return None
return color_util.color_temperature_kelvin_to_mired(
self.wink.color_temperature_kelvin()
)
@property
def supported_features(self):
"""Flag supported features."""
supports = SUPPORT_BRIGHTNESS
if self.wink.supports_temperature():
supports = supports | SUPPORT_COLOR_TEMP
if self.wink.supports_xy_color():
supports = supports | SUPPORT_COLOR
elif self.wink.supports_hue_saturation():
supports = supports | SUPPORT_COLOR
return supports
def turn_on(self, **kwargs):
"""Turn the switch on."""
brightness = kwargs.get(ATTR_BRIGHTNESS)
hs_color = kwargs.get(ATTR_HS_COLOR)
color_temp_mired = kwargs.get(ATTR_COLOR_TEMP)
state_kwargs = {}
if hs_color:
if self.wink.supports_xy_color():
xy_color = color_util.color_hs_to_xy(*hs_color)
state_kwargs["color_xy"] = xy_color
if self.wink.supports_hue_saturation():
hs_scaled = hs_color[0] / 360, hs_color[1] / 100
state_kwargs["color_hue_saturation"] = hs_scaled
if color_temp_mired:
state_kwargs["color_kelvin"] = mired_to_kelvin(color_temp_mired)
if brightness:
state_kwargs["brightness"] = brightness / 255.0
self.wink.set_state(True, **state_kwargs)
def turn_off(self, **kwargs):
"""Turn the switch off."""
self.wink.set_state(False)
|
from __future__ import annotations
import asyncio
import enum
from types import TracebackType
from typing import Any, Dict, List, Optional, Type, Union
from .async_ import run_callback_threadsafe
ZONE_GLOBAL = "global"
class _State(str, enum.Enum):
"""States of a task."""
INIT = "INIT"
ACTIVE = "ACTIVE"
TIMEOUT = "TIMEOUT"
EXIT = "EXIT"
class _GlobalFreezeContext:
"""Context manager that freezes the global timeout."""
def __init__(self, manager: TimeoutManager) -> None:
"""Initialize internal timeout context manager."""
self._loop: asyncio.AbstractEventLoop = asyncio.get_running_loop()
self._manager: TimeoutManager = manager
async def __aenter__(self) -> _GlobalFreezeContext:
self._enter()
return self
async def __aexit__(
self,
exc_type: Type[BaseException],
exc_val: BaseException,
exc_tb: TracebackType,
) -> Optional[bool]:
self._exit()
return None
def __enter__(self) -> _GlobalFreezeContext:
self._loop.call_soon_threadsafe(self._enter)
return self
def __exit__(
self,
exc_type: Type[BaseException],
exc_val: BaseException,
exc_tb: TracebackType,
) -> Optional[bool]:
self._loop.call_soon_threadsafe(self._exit)
return True
def _enter(self) -> None:
"""Run freeze."""
if not self._manager.freezes_done:
return
# Global reset
for task in self._manager.global_tasks:
task.pause()
# Zones reset
for zone in self._manager.zones.values():
if not zone.freezes_done:
continue
zone.pause()
self._manager.global_freezes.append(self)
def _exit(self) -> None:
"""Finish freeze."""
self._manager.global_freezes.remove(self)
if not self._manager.freezes_done:
return
# Global reset
for task in self._manager.global_tasks:
task.reset()
# Zones reset
for zone in self._manager.zones.values():
if not zone.freezes_done:
continue
zone.reset()
class _ZoneFreezeContext:
"""Context manager that freezes a zone timeout."""
def __init__(self, zone: _ZoneTimeoutManager) -> None:
"""Initialize internal timeout context manager."""
self._loop: asyncio.AbstractEventLoop = asyncio.get_running_loop()
self._zone: _ZoneTimeoutManager = zone
async def __aenter__(self) -> _ZoneFreezeContext:
self._enter()
return self
async def __aexit__(
self,
exc_type: Type[BaseException],
exc_val: BaseException,
exc_tb: TracebackType,
) -> Optional[bool]:
self._exit()
return None
def __enter__(self) -> _ZoneFreezeContext:
self._loop.call_soon_threadsafe(self._enter)
return self
def __exit__(
self,
exc_type: Type[BaseException],
exc_val: BaseException,
exc_tb: TracebackType,
) -> Optional[bool]:
self._loop.call_soon_threadsafe(self._exit)
return True
def _enter(self) -> None:
"""Run freeze."""
if self._zone.freezes_done:
self._zone.pause()
self._zone.enter_freeze(self)
def _exit(self) -> None:
"""Finish freeze."""
self._zone.exit_freeze(self)
if not self._zone.freezes_done:
return
self._zone.reset()
class _GlobalTaskContext:
"""Context manager that tracks a global task."""
def __init__(
self,
manager: TimeoutManager,
task: asyncio.Task[Any],
timeout: float,
cool_down: float,
) -> None:
"""Initialize internal timeout context manager."""
self._loop: asyncio.AbstractEventLoop = asyncio.get_running_loop()
self._manager: TimeoutManager = manager
self._task: asyncio.Task[Any] = task
self._time_left: float = timeout
self._expiration_time: Optional[float] = None
self._timeout_handler: Optional[asyncio.Handle] = None
self._wait_zone: asyncio.Event = asyncio.Event()
self._state: _State = _State.INIT
self._cool_down: float = cool_down
async def __aenter__(self) -> _GlobalTaskContext:
self._manager.global_tasks.append(self)
self._start_timer()
self._state = _State.ACTIVE
return self
async def __aexit__(
self,
exc_type: Type[BaseException],
exc_val: BaseException,
exc_tb: TracebackType,
) -> Optional[bool]:
self._stop_timer()
self._manager.global_tasks.remove(self)
# Timeout on exit
if exc_type is asyncio.CancelledError and self.state == _State.TIMEOUT:
raise asyncio.TimeoutError
self._state = _State.EXIT
self._wait_zone.set()
return None
@property
def state(self) -> _State:
"""Return state of the Global task."""
return self._state
def zones_done_signal(self) -> None:
"""Signal that all zones are done."""
self._wait_zone.set()
def _start_timer(self) -> None:
"""Start timeout handler."""
if self._timeout_handler:
return
self._expiration_time = self._loop.time() + self._time_left
self._timeout_handler = self._loop.call_at(
self._expiration_time, self._on_timeout
)
def _stop_timer(self) -> None:
"""Stop zone timer."""
if self._timeout_handler is None:
return
self._timeout_handler.cancel()
self._timeout_handler = None
# Calculate new timeout
assert self._expiration_time
self._time_left = self._expiration_time - self._loop.time()
def _on_timeout(self) -> None:
"""Process timeout."""
self._state = _State.TIMEOUT
self._timeout_handler = None
# Reset timer if zones are running
if not self._manager.zones_done:
asyncio.create_task(self._on_wait())
else:
self._cancel_task()
def _cancel_task(self) -> None:
"""Cancel own task."""
if self._task.done():
return
self._task.cancel()
def pause(self) -> None:
"""Pause timers while it freeze."""
self._stop_timer()
def reset(self) -> None:
"""Reset timer after freeze."""
self._start_timer()
async def _on_wait(self) -> None:
"""Wait until zones are done."""
await self._wait_zone.wait()
await asyncio.sleep(self._cool_down) # Allow context switch
if not self.state == _State.TIMEOUT:
return
self._cancel_task()
class _ZoneTaskContext:
"""Context manager that tracks an active task for a zone."""
def __init__(
self,
zone: _ZoneTimeoutManager,
task: asyncio.Task[Any],
timeout: float,
) -> None:
"""Initialize internal timeout context manager."""
self._loop: asyncio.AbstractEventLoop = asyncio.get_running_loop()
self._zone: _ZoneTimeoutManager = zone
self._task: asyncio.Task[Any] = task
self._state: _State = _State.INIT
self._time_left: float = timeout
self._expiration_time: Optional[float] = None
self._timeout_handler: Optional[asyncio.Handle] = None
@property
def state(self) -> _State:
"""Return state of the Zone task."""
return self._state
async def __aenter__(self) -> _ZoneTaskContext:
self._zone.enter_task(self)
self._state = _State.ACTIVE
# Zone is on freeze
if self._zone.freezes_done:
self._start_timer()
return self
async def __aexit__(
self,
exc_type: Type[BaseException],
exc_val: BaseException,
exc_tb: TracebackType,
) -> Optional[bool]:
self._zone.exit_task(self)
self._stop_timer()
# Timeout on exit
if exc_type is asyncio.CancelledError and self.state == _State.TIMEOUT:
raise asyncio.TimeoutError
self._state = _State.EXIT
return None
def _start_timer(self) -> None:
"""Start timeout handler."""
if self._timeout_handler:
return
self._expiration_time = self._loop.time() + self._time_left
self._timeout_handler = self._loop.call_at(
self._expiration_time, self._on_timeout
)
def _stop_timer(self) -> None:
"""Stop zone timer."""
if self._timeout_handler is None:
return
self._timeout_handler.cancel()
self._timeout_handler = None
# Calculate new timeout
assert self._expiration_time
self._time_left = self._expiration_time - self._loop.time()
def _on_timeout(self) -> None:
"""Process timeout."""
self._state = _State.TIMEOUT
self._timeout_handler = None
# Timeout
if self._task.done():
return
self._task.cancel()
def pause(self) -> None:
"""Pause timers while it freeze."""
self._stop_timer()
def reset(self) -> None:
"""Reset timer after freeze."""
self._start_timer()
class _ZoneTimeoutManager:
"""Manage the timeouts for a zone."""
def __init__(self, manager: TimeoutManager, zone: str) -> None:
"""Initialize internal timeout context manager."""
self._manager: TimeoutManager = manager
self._zone: str = zone
self._tasks: List[_ZoneTaskContext] = []
self._freezes: List[_ZoneFreezeContext] = []
@property
def name(self) -> str:
"""Return Zone name."""
return self._zone
@property
def active(self) -> bool:
"""Return True if zone is active."""
return len(self._tasks) > 0 or len(self._freezes) > 0
@property
def freezes_done(self) -> bool:
"""Return True if all freeze are done."""
return len(self._freezes) == 0 and self._manager.freezes_done
def enter_task(self, task: _ZoneTaskContext) -> None:
"""Start into new Task."""
self._tasks.append(task)
def exit_task(self, task: _ZoneTaskContext) -> None:
"""Exit a running Task."""
self._tasks.remove(task)
# On latest listener
if not self.active:
self._manager.drop_zone(self.name)
def enter_freeze(self, freeze: _ZoneFreezeContext) -> None:
"""Start into new freeze."""
self._freezes.append(freeze)
def exit_freeze(self, freeze: _ZoneFreezeContext) -> None:
"""Exit a running Freeze."""
self._freezes.remove(freeze)
# On latest listener
if not self.active:
self._manager.drop_zone(self.name)
def pause(self) -> None:
"""Stop timers while it freeze."""
if not self.active:
return
# Forward pause
for task in self._tasks:
task.pause()
def reset(self) -> None:
"""Reset timer after freeze."""
if not self.active:
return
# Forward reset
for task in self._tasks:
task.reset()
class TimeoutManager:
"""Class to manage timeouts over different zones.
Manages both global and zone based timeouts.
"""
def __init__(self) -> None:
"""Initialize TimeoutManager."""
self._loop: asyncio.AbstractEventLoop = asyncio.get_running_loop()
self._zones: Dict[str, _ZoneTimeoutManager] = {}
self._globals: List[_GlobalTaskContext] = []
self._freezes: List[_GlobalFreezeContext] = []
@property
def zones_done(self) -> bool:
"""Return True if all zones are finished."""
return not bool(self._zones)
@property
def freezes_done(self) -> bool:
"""Return True if all freezes are finished."""
return not self._freezes
@property
def zones(self) -> Dict[str, _ZoneTimeoutManager]:
"""Return all Zones."""
return self._zones
@property
def global_tasks(self) -> List[_GlobalTaskContext]:
"""Return all global Tasks."""
return self._globals
@property
def global_freezes(self) -> List[_GlobalFreezeContext]:
"""Return all global Freezes."""
return self._freezes
def drop_zone(self, zone_name: str) -> None:
"""Drop a zone out of scope."""
self._zones.pop(zone_name, None)
if self._zones:
return
# Signal Global task, all zones are done
for task in self._globals:
task.zones_done_signal()
def async_timeout(
self, timeout: float, zone_name: str = ZONE_GLOBAL, cool_down: float = 0
) -> Union[_ZoneTaskContext, _GlobalTaskContext]:
"""Timeout based on a zone.
For using as Async Context Manager.
"""
current_task: Optional[asyncio.Task[Any]] = asyncio.current_task()
assert current_task
# Global Zone
if zone_name == ZONE_GLOBAL:
task = _GlobalTaskContext(self, current_task, timeout, cool_down)
return task
# Zone Handling
if zone_name in self.zones:
zone: _ZoneTimeoutManager = self.zones[zone_name]
else:
self.zones[zone_name] = zone = _ZoneTimeoutManager(self, zone_name)
# Create Task
return _ZoneTaskContext(zone, current_task, timeout)
def async_freeze(
self, zone_name: str = ZONE_GLOBAL
) -> Union[_ZoneFreezeContext, _GlobalFreezeContext]:
"""Freeze all timer until job is done.
For using as Async Context Manager.
"""
# Global Freeze
if zone_name == ZONE_GLOBAL:
return _GlobalFreezeContext(self)
# Zone Freeze
if zone_name in self.zones:
zone: _ZoneTimeoutManager = self.zones[zone_name]
else:
self.zones[zone_name] = zone = _ZoneTimeoutManager(self, zone_name)
return _ZoneFreezeContext(zone)
def freeze(
self, zone_name: str = ZONE_GLOBAL
) -> Union[_ZoneFreezeContext, _GlobalFreezeContext]:
"""Freeze all timer until job is done.
For using as Context Manager.
"""
return run_callback_threadsafe(
self._loop, self.async_freeze, zone_name
).result()
|
from __future__ import print_function
import logging
import optparse
import pymongo
from .utils import do_db_auth, setup_logging
from ..arctic import Arctic, ArcticLibraryBinding
from ..hooks import get_mongodb_uri
logger = logging.getLogger(__name__)
def prune_versions(lib, symbols, keep_mins):
logger.info("Fixing snapshot pointers")
lib._cleanup_orphaned_versions(dry_run=False)
for symbol in symbols:
logger.info("Pruning %s" % symbol)
lib._prune_previous_versions(symbol, keep_mins=keep_mins)
def main():
usage = """usage: %prog [options]
Prunes (i.e. deletes) versions of data that are not the most recent, and are older than 10 minutes,
and are not in use by snapshots. Must be used on a Arctic VersionStore library instance.
Example:
arctic_prune_versions --host=hostname --library=arctic_jblackburn.my_library
"""
setup_logging()
parser = optparse.OptionParser(usage=usage)
parser.add_option("--host", default='localhost', help="Hostname, or clustername. Default: localhost")
parser.add_option("--library", help="The name of the library. e.g. 'arctic_jblackburn.library'")
parser.add_option("--symbols", help="The symbols to prune - comma separated (default all)")
parser.add_option("--keep-mins", default=10, help="Ensure there's a version at least keep-mins old. Default:10")
(opts, _) = parser.parse_args()
if not opts.library:
parser.error('Must specify the Arctic library e.g. arctic_jblackburn.library!')
db_name, _ = ArcticLibraryBinding._parse_db_lib(opts.library)
print("Pruning (old) versions in : %s on mongo %s" % (opts.library, opts.host))
print("Keeping all versions <= %s mins old" % (opts.keep_mins))
c = pymongo.MongoClient(get_mongodb_uri(opts.host))
if not do_db_auth(opts.host, c, db_name):
logger.error('Authentication Failed. Exiting.')
return
lib = Arctic(c)[opts.library]
if opts.symbols:
symbols = opts.symbols.split(',')
else:
symbols = lib.list_symbols(all_symbols=True)
logger.info("Found %s symbols" % len(symbols))
prune_versions(lib, symbols, opts.keep_mins)
logger.info("Done")
if __name__ == '__main__':
main()
|
import numpy as np
import os
from chainercv.chainer_experimental.datasets.sliceable import GetterDataset
from chainercv.utils import read_image
def directory_parsing_label_names(root, numerical_sort=False):
"""Get label names from the directories that are named by them.
The label names are the names of the directories that locate a
layer below the root directory.
The label names can be used together with
:class:`~chainercv.datasets.DirectoryParsingLabelDataset`.
The index of a label name corresponds to the label id
that is used by the dataset to refer the label.
Args:
root (string): The root directory.
numerical_sort (bool): Label names are sorted numerically.
This means that label :obj:`2` is before label :obj:`10`,
which is not the case when string sort is used.
The default value is :obj:`False`.
Returns:
list of strings:
Sorted names of classes.
"""
label_names = [d for d in os.listdir(root)
if os.path.isdir(os.path.join(root, d))]
if not numerical_sort:
label_names.sort()
else:
label_names = sorted(label_names, key=int)
return label_names
def _check_img_ext(path):
img_extensions = ['.jpg', '.jpeg', '.png', '.ppm', '.bmp']
return any(os.path.splitext(path)[1].lower() == extension for
extension in img_extensions)
def _parse_label_dataset(root, label_names,
check_img_file=_check_img_ext):
img_paths = []
labels = []
for label, label_name in enumerate(label_names):
label_dir = os.path.join(root, label_name)
if not os.path.isdir(label_dir):
continue
walk_dir = sorted(os.walk(label_dir), key=lambda x: x[0])
for cur_dir, _, names in walk_dir:
names = sorted(names)
for name in names:
img_path = os.path.join(cur_dir, name)
if check_img_file(img_path):
img_paths.append(img_path)
labels.append(label)
return img_paths, np.array(labels, np.int32)
class DirectoryParsingLabelDataset(GetterDataset):
"""A label dataset whose label names are the names of the subdirectories.
The label names are the names of the directories that locate a layer below
the root directory.
All images locating under the subdirectoies will be categorized to classes
with subdirectory names.
An image is parsed only when the function :obj:`check_img_file`
returns :obj:`True` by taking the path to the image as an argument.
If :obj:`check_img_file` is :obj:`None`,
the path with any image extensions will be parsed.
Example:
A directory structure should be one like below.
.. code::
root
|-- class_0
| |-- img_0.png
| |-- img_1.png
|
--- class_1
|-- img_0.png
>>> from chainercv.datasets import DirectoryParsingLabelDataset
>>> dataset = DirectoryParsingLabelDataset('root')
>>> dataset.img_paths
['root/class_0/img_0.png', 'root/class_0/img_1.png',
'root_class_1/img_0.png']
>>> dataset.labels
array([0, 0, 1])
Args:
root (string): The root directory.
check_img_file (callable): A function to determine
if a file should be included in the dataset.
color (bool): If :obj:`True`, this dataset read images
as color images. The default value is :obj:`True`.
numerical_sort (bool): Label names are sorted numerically.
This means that label :obj:`2` is before label :obj:`10`,
which is not the case when string sort is used.
Regardless of this option, string sort is used for the
order of files with the same label.
The default value is :obj:`False`.
This dataset returns the following data.
.. csv-table::
:header: name, shape, dtype, format
:obj:`img`, ":math:`(3, H, W)` [#directory_parsing_1]_", \
:obj:`float32`, "RGB, :math:`[0, 255]`"
:obj:`label`, scalar, :obj:`int32`, ":math:`[0, \#class - 1]`"
.. [#directory_parsing_1] :math:`(1, H, W)` if :obj:`color = False`.
"""
def __init__(self, root, check_img_file=None, color=True,
numerical_sort=False):
super(DirectoryParsingLabelDataset, self).__init__()
self.color = color
label_names = directory_parsing_label_names(
root, numerical_sort=numerical_sort)
if check_img_file is None:
check_img_file = _check_img_ext
self.img_paths, self.labels = _parse_label_dataset(
root, label_names, check_img_file)
self.add_getter('img', self._get_image)
self.add_getter('label', self._get_label)
def __len__(self):
return len(self.img_paths)
def _get_image(self, i):
return read_image(self.img_paths[i], color=self.color)
def _get_label(self, i):
return self.labels[i]
|
import abodepy.helpers.constants as CONST
from homeassistant.components.abode import ATTR_DEVICE_ID
from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_FRIENDLY_NAME,
ATTR_SUPPORTED_FEATURES,
SERVICE_ALARM_ARM_AWAY,
SERVICE_ALARM_ARM_HOME,
SERVICE_ALARM_DISARM,
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_DISARMED,
)
from .common import setup_platform
from tests.async_mock import PropertyMock, patch
DEVICE_ID = "alarm_control_panel.abode_alarm"
async def test_entity_registry(hass):
"""Tests that the devices are registered in the entity registry."""
await setup_platform(hass, ALARM_DOMAIN)
entity_registry = await hass.helpers.entity_registry.async_get_registry()
entry = entity_registry.async_get(DEVICE_ID)
# Abode alarm device unique_id is the MAC address
assert entry.unique_id == "001122334455"
async def test_attributes(hass):
"""Test the alarm control panel attributes are correct."""
await setup_platform(hass, ALARM_DOMAIN)
state = hass.states.get(DEVICE_ID)
assert state.state == STATE_ALARM_DISARMED
assert state.attributes.get(ATTR_DEVICE_ID) == "area_1"
assert not state.attributes.get("battery_backup")
assert not state.attributes.get("cellular_backup")
assert state.attributes.get(ATTR_FRIENDLY_NAME) == "Abode Alarm"
assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == 3
async def test_set_alarm_away(hass):
"""Test the alarm control panel can be set to away."""
with patch("abodepy.AbodeEventController.add_device_callback") as mock_callback:
with patch("abodepy.ALARM.AbodeAlarm.set_away") as mock_set_away:
await setup_platform(hass, ALARM_DOMAIN)
await hass.services.async_call(
ALARM_DOMAIN,
SERVICE_ALARM_ARM_AWAY,
{ATTR_ENTITY_ID: DEVICE_ID},
blocking=True,
)
await hass.async_block_till_done()
mock_set_away.assert_called_once()
with patch(
"abodepy.ALARM.AbodeAlarm.mode",
new_callable=PropertyMock,
) as mock_mode:
mock_mode.return_value = CONST.MODE_AWAY
update_callback = mock_callback.call_args[0][1]
await hass.async_add_executor_job(update_callback, "area_1")
await hass.async_block_till_done()
state = hass.states.get(DEVICE_ID)
assert state.state == STATE_ALARM_ARMED_AWAY
async def test_set_alarm_home(hass):
"""Test the alarm control panel can be set to home."""
with patch("abodepy.AbodeEventController.add_device_callback") as mock_callback:
with patch("abodepy.ALARM.AbodeAlarm.set_home") as mock_set_home:
await setup_platform(hass, ALARM_DOMAIN)
await hass.services.async_call(
ALARM_DOMAIN,
SERVICE_ALARM_ARM_HOME,
{ATTR_ENTITY_ID: DEVICE_ID},
blocking=True,
)
await hass.async_block_till_done()
mock_set_home.assert_called_once()
with patch(
"abodepy.ALARM.AbodeAlarm.mode", new_callable=PropertyMock
) as mock_mode:
mock_mode.return_value = CONST.MODE_HOME
update_callback = mock_callback.call_args[0][1]
await hass.async_add_executor_job(update_callback, "area_1")
await hass.async_block_till_done()
state = hass.states.get(DEVICE_ID)
assert state.state == STATE_ALARM_ARMED_HOME
async def test_set_alarm_standby(hass):
"""Test the alarm control panel can be set to standby."""
with patch("abodepy.AbodeEventController.add_device_callback") as mock_callback:
with patch("abodepy.ALARM.AbodeAlarm.set_standby") as mock_set_standby:
await setup_platform(hass, ALARM_DOMAIN)
await hass.services.async_call(
ALARM_DOMAIN,
SERVICE_ALARM_DISARM,
{ATTR_ENTITY_ID: DEVICE_ID},
blocking=True,
)
await hass.async_block_till_done()
mock_set_standby.assert_called_once()
with patch(
"abodepy.ALARM.AbodeAlarm.mode", new_callable=PropertyMock
) as mock_mode:
mock_mode.return_value = CONST.MODE_STANDBY
update_callback = mock_callback.call_args[0][1]
await hass.async_add_executor_job(update_callback, "area_1")
await hass.async_block_till_done()
state = hass.states.get(DEVICE_ID)
assert state.state == STATE_ALARM_DISARMED
async def test_state_unknown(hass):
"""Test an unknown alarm control panel state."""
with patch("abodepy.ALARM.AbodeAlarm.mode", new_callable=PropertyMock) as mock_mode:
await setup_platform(hass, ALARM_DOMAIN)
await hass.async_block_till_done()
mock_mode.return_value = None
state = hass.states.get(DEVICE_ID)
assert state.state == "unknown"
|
import logging
import xmlrpc.client
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_URL
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
ATTR_DESCRIPTION = "description"
ATTR_GROUP = "group"
DEFAULT_URL = "http://localhost:9001/RPC2"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Optional(CONF_URL, default=DEFAULT_URL): cv.url}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Supervisord platform."""
url = config.get(CONF_URL)
try:
supervisor_server = xmlrpc.client.ServerProxy(url)
processes = supervisor_server.supervisor.getAllProcessInfo()
except ConnectionRefusedError:
_LOGGER.error("Could not connect to Supervisord")
return False
add_entities(
[SupervisorProcessSensor(info, supervisor_server) for info in processes], True
)
class SupervisorProcessSensor(Entity):
"""Representation of a supervisor-monitored process."""
def __init__(self, info, server):
"""Initialize the sensor."""
self._info = info
self._server = server
self._available = True
@property
def name(self):
"""Return the name of the sensor."""
return self._info.get("name")
@property
def state(self):
"""Return the state of the sensor."""
return self._info.get("statename")
@property
def available(self):
"""Could the device be accessed during the last update call."""
return self._available
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {
ATTR_DESCRIPTION: self._info.get("description"),
ATTR_GROUP: self._info.get("group"),
}
def update(self):
"""Update device state."""
try:
self._info = self._server.supervisor.getProcessInfo(
self._info.get("group") + ":" + self._info.get("name")
)
self._available = True
except ConnectionRefusedError:
_LOGGER.warning("Supervisord not available")
self._available = False
|
import lakeside
import voluptuous as vol
from homeassistant.const import (
CONF_ACCESS_TOKEN,
CONF_ADDRESS,
CONF_DEVICES,
CONF_NAME,
CONF_PASSWORD,
CONF_TYPE,
CONF_USERNAME,
)
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
DOMAIN = "eufy"
DEVICE_SCHEMA = vol.Schema(
{
vol.Required(CONF_ADDRESS): cv.string,
vol.Required(CONF_ACCESS_TOKEN): cv.string,
vol.Required(CONF_TYPE): cv.string,
vol.Optional(CONF_NAME): cv.string,
}
)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_DEVICES, default=[]): vol.All(
cv.ensure_list, [DEVICE_SCHEMA]
),
vol.Inclusive(CONF_USERNAME, "authentication"): cv.string,
vol.Inclusive(CONF_PASSWORD, "authentication"): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
EUFY_DISPATCH = {
"T1011": "light",
"T1012": "light",
"T1013": "light",
"T1201": "switch",
"T1202": "switch",
"T1203": "switch",
"T1211": "switch",
}
def setup(hass, config):
"""Set up Eufy devices."""
if CONF_USERNAME in config[DOMAIN] and CONF_PASSWORD in config[DOMAIN]:
data = lakeside.get_devices(
config[DOMAIN][CONF_USERNAME], config[DOMAIN][CONF_PASSWORD]
)
for device in data:
kind = device["type"]
if kind not in EUFY_DISPATCH:
continue
discovery.load_platform(hass, EUFY_DISPATCH[kind], DOMAIN, device, config)
for device_info in config[DOMAIN][CONF_DEVICES]:
kind = device_info["type"]
if kind not in EUFY_DISPATCH:
continue
device = {}
device["address"] = device_info["address"]
device["code"] = device_info["access_token"]
device["type"] = device_info["type"]
device["name"] = device_info["name"]
discovery.load_platform(hass, EUFY_DISPATCH[kind], DOMAIN, device, config)
return True
|
import functools
import unittest
from perfkitbenchmarker import units
from perfkitbenchmarker.linux_benchmarks import multichase_benchmark
class MemorySizeParserTestCase(unittest.TestCase):
def setUp(self):
self._parser = multichase_benchmark._MEMORY_SIZE_PARSER
def testParseNoUnits(self):
with self.assertRaises(ValueError):
self._parser.parse('10')
def testParseInvalidUnits(self):
with self.assertRaises(ValueError):
self._parser.parse('20 seconds')
def testParseExplicitMemory(self):
q = self._parser.parse('30 GiB')
self.assertEqual(q.magnitude, 30)
self.assertEqual(q.units, units.Unit('gibibyte'))
def testParsePercent(self):
q = self._parser.parse('40%')
self.assertEqual(q.magnitude, 40)
self.assertEqual(q.units, units.percent)
class TranslateMemorySizeTestCase(unittest.TestCase):
def setUp(self):
self._func = multichase_benchmark._TranslateMemorySize
def testExplicitSize(self):
result = self._func(lambda: 1024, units.Quantity('1 GiB'))
self.assertEqual(result, 1073741824)
def testPercent(self):
result = self._func(lambda: 1024, units.Quantity('25%'))
self.assertEqual(result, 256)
class IterMemorySizesTestCase(unittest.TestCase):
def setUp(self):
self._func = functools.partial(multichase_benchmark._IterMemorySizes,
lambda: 1024)
def testHitsUpperBound(self):
result = list(self._func(1 * units.byte, 32 * units.byte))
self.assertEqual(result, [1, 2, 4, 8, 16, 32])
def testSurpassesUpperBound(self):
result = list(self._func(1 * units.byte, 20 * units.byte))
self.assertEqual(result, [1, 2, 4, 8, 16])
def testPercent(self):
result = list(self._func(1 * units.percent, 10 * units.percent))
self.assertEqual(result, [10, 20, 40, 80])
def testEqual(self):
result = list(self._func(32 * units.byte, 32 * units.byte))
self.assertEqual(result, [32])
def testMaxLessThanMin(self):
result = list(self._func(64 * units.byte, 32 * units.byte))
self.assertEqual(result, [])
if __name__ == '__main__':
unittest.main()
|
from homeassistant.components.binary_sensor import DEVICE_CLASSES, BinarySensorEntity
from . import DATA_KEY, VolvoEntity
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Volvo sensors."""
if discovery_info is None:
return
async_add_entities([VolvoSensor(hass.data[DATA_KEY], *discovery_info)])
class VolvoSensor(VolvoEntity, BinarySensorEntity):
"""Representation of a Volvo sensor."""
@property
def is_on(self):
"""Return True if the binary sensor is on."""
return self.instrument.is_on
@property
def device_class(self):
"""Return the class of this sensor, from DEVICE_CLASSES."""
if self.instrument.device_class in DEVICE_CLASSES:
return self.instrument.device_class
return None
|
import errno
import os
import shutil
import subprocess
import tempfile
from collections import defaultdict
from . import _vc
class Vc(_vc.Vc):
# Requires Darcs version >= 2.10.3
# TODO implement get_commits_to_push_summary using `darcs push --dry-run`
# Currently `darcs whatsnew` (as of v2.10.3) does not report conflicts
# see http://bugs.darcs.net/issue2138
CMD = "darcs"
NAME = "Darcs"
VC_DIR = "_darcs"
state_map = {
"a": _vc.STATE_NONE,
"A": _vc.STATE_NEW,
"M": _vc.STATE_MODIFIED,
"M!": _vc.STATE_CONFLICT,
"R": _vc.STATE_REMOVED,
"F": _vc.STATE_NONEXIST, # previous name of file
"T": _vc.STATE_RENAMED, # new name of file
}
@classmethod
def is_installed(cls):
try:
proc = _vc.popen([cls.CMD, '--version'])
# check that version >= 2.10.3
(x, y, z) = proc.read().split(" ", 1)[0].split(".", 2)[:3]
assert (x, y, z) >= (2, 10, 3)
return True
except Exception:
return False
def commit(self, runner, files, message):
command = [self.CMD, 'record', '-a', '-m', message]
runner(command, [], refresh=True, working_dir=self.root)
def update(self, runner):
command = [self.CMD, 'pull', '-a']
runner(command, [], refresh=True, working_dir=self.root)
def push(self, runner):
command = [self.CMD, 'push', '-a']
runner(command, [], refresh=True, working_dir=self.root)
def add(self, runner, files):
command = [self.CMD, 'add', '-r']
runner(command, files, refresh=True, working_dir=self.root)
def remove(self, runner, files):
command = [self.CMD, 'remove', '-r']
runner(command, files, refresh=True, working_dir=self.root)
def revert(self, runner, files):
command = [self.CMD, 'revert', '-a']
runner(command, files, refresh=True, working_dir=self.root)
def get_path_for_repo_file(self, path, commit=None):
if commit is not None:
raise NotImplementedError()
if not path.startswith(self.root + os.path.sep):
raise _vc.InvalidVCPath(self, path, "Path not in repository")
# `darcs show contents` needs the path before rename
if path in self._reverse_rename_cache:
path = self._reverse_rename_cache[path]
path = path[len(self.root) + 1:]
process = subprocess.Popen(
[self.CMD, "show", "contents", path], cwd=self.root,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
with tempfile.NamedTemporaryFile(prefix='meld-tmp', delete=False) as f:
shutil.copyfileobj(process.stdout, f)
return f.name
@classmethod
def valid_repo(cls, path):
return not _vc.call([cls.CMD, "show", "repo", "--no-files"], cwd=path)
def _update_tree_state_cache(self, path):
# FIXME: currently ignoring 'path' due to darcs's bad
# behaviour (= fails) when given "" argument
""" Update the state of the file(s) at self._tree_cache['path'] """
while 1:
try:
proc = _vc.popen(
[self.CMD, "whatsnew", "-sl", "--machine-readable"],
cwd=self.location)
lines = proc.read().split("\n")[:-1]
break
except OSError as e:
if e.errno != errno.EAGAIN:
raise
# Files can appear twice in the list if were modified and renamed
# at once. Darcs first show file moves then modifications.
if len(lines) == 0 and os.path.isfile(path):
# If we're just updating a single file there's a chance that it
# was it was previously modified, and now has been edited so that
# it is un-modified. This will result in an empty 'entries' list,
# and self._tree_cache['path'] will still contain stale data.
# When this corner case occurs we force self._tree_cache['path']
# to STATE_NORMAL.
self._tree_cache[path] = _vc.STATE_NORMAL
else:
tree_cache = defaultdict(int)
tree_meta_cache = defaultdict(list)
self._rename_cache = rename_cache = {}
self._reverse_rename_cache = {}
old_name = None
for line in lines:
# skip empty lines and line starting with "What's new in foo"
if (not line.strip()) or line.startswith("What"):
continue
statekey, name = line.split(" ", 1)
name = os.path.normpath(name)
if statekey == "F":
old_name = name
path = os.path.join(self.location, name)
if statekey == "T" and old_name:
old_path = os.path.join(self.location, old_name)
rename_cache[old_path] = path
old_name = None
state = self.state_map.get(statekey.strip(), _vc.STATE_NONE)
tree_cache[path] = state
for old, new in rename_cache.items():
self._reverse_rename_cache[new] = old
old_name = old[len(self.root) + 1:]
new_name = new[len(self.root) + 1:]
tree_meta_cache[new] = ("%s ➡ %s" % (old_name, new_name))
self._tree_cache.update(
dict((x, y) for x, y in tree_cache.items()))
self._tree_meta_cache = dict(tree_meta_cache)
|
import pypck
import voluptuous as vol
from homeassistant.const import (
CONF_ADDRESS,
CONF_BRIGHTNESS,
CONF_STATE,
CONF_UNIT_OF_MEASUREMENT,
TIME_SECONDS,
)
import homeassistant.helpers.config_validation as cv
from .const import (
CONF_CONNECTIONS,
CONF_KEYS,
CONF_LED,
CONF_OUTPUT,
CONF_PCK,
CONF_RELVARREF,
CONF_ROW,
CONF_SETPOINT,
CONF_TABLE,
CONF_TEXT,
CONF_TIME,
CONF_TIME_UNIT,
CONF_TRANSITION,
CONF_VALUE,
CONF_VARIABLE,
DATA_LCN,
LED_PORTS,
LED_STATUS,
OUTPUT_PORTS,
RELVARREF,
SENDKEYCOMMANDS,
SETPOINTS,
THRESHOLDS,
TIME_UNITS,
VAR_UNITS,
VARIABLES,
)
from .helpers import (
get_connection,
is_address,
is_key_lock_states_string,
is_relays_states_string,
)
class LcnServiceCall:
"""Parent class for all LCN service calls."""
schema = vol.Schema({vol.Required(CONF_ADDRESS): is_address})
def __init__(self, hass):
"""Initialize service call."""
self.connections = hass.data[DATA_LCN][CONF_CONNECTIONS]
def get_address_connection(self, call):
"""Get address connection object."""
addr, connection_id = call.data[CONF_ADDRESS]
addr = pypck.lcn_addr.LcnAddr(*addr)
if connection_id is None:
connection = self.connections[0]
else:
connection = get_connection(self.connections, connection_id)
return connection.get_address_conn(addr)
class OutputAbs(LcnServiceCall):
"""Set absolute brightness of output port in percent."""
schema = LcnServiceCall.schema.extend(
{
vol.Required(CONF_OUTPUT): vol.All(vol.Upper, vol.In(OUTPUT_PORTS)),
vol.Required(CONF_BRIGHTNESS): vol.All(
vol.Coerce(int), vol.Range(min=0, max=100)
),
vol.Optional(CONF_TRANSITION, default=0): vol.All(
vol.Coerce(float), vol.Range(min=0.0, max=486.0)
),
}
)
def __call__(self, call):
"""Execute service call."""
output = pypck.lcn_defs.OutputPort[call.data[CONF_OUTPUT]]
brightness = call.data[CONF_BRIGHTNESS]
transition = pypck.lcn_defs.time_to_ramp_value(
call.data[CONF_TRANSITION] * 1000
)
address_connection = self.get_address_connection(call)
address_connection.dim_output(output.value, brightness, transition)
class OutputRel(LcnServiceCall):
"""Set relative brightness of output port in percent."""
schema = LcnServiceCall.schema.extend(
{
vol.Required(CONF_OUTPUT): vol.All(vol.Upper, vol.In(OUTPUT_PORTS)),
vol.Required(CONF_BRIGHTNESS): vol.All(
vol.Coerce(int), vol.Range(min=-100, max=100)
),
}
)
def __call__(self, call):
"""Execute service call."""
output = pypck.lcn_defs.OutputPort[call.data[CONF_OUTPUT]]
brightness = call.data[CONF_BRIGHTNESS]
address_connection = self.get_address_connection(call)
address_connection.rel_output(output.value, brightness)
class OutputToggle(LcnServiceCall):
"""Toggle output port."""
schema = LcnServiceCall.schema.extend(
{
vol.Required(CONF_OUTPUT): vol.All(vol.Upper, vol.In(OUTPUT_PORTS)),
vol.Optional(CONF_TRANSITION, default=0): vol.All(
vol.Coerce(float), vol.Range(min=0.0, max=486.0)
),
}
)
def __call__(self, call):
"""Execute service call."""
output = pypck.lcn_defs.OutputPort[call.data[CONF_OUTPUT]]
transition = pypck.lcn_defs.time_to_ramp_value(
call.data[CONF_TRANSITION] * 1000
)
address_connection = self.get_address_connection(call)
address_connection.toggle_output(output.value, transition)
class Relays(LcnServiceCall):
"""Set the relays status."""
schema = LcnServiceCall.schema.extend(
{vol.Required(CONF_STATE): is_relays_states_string}
)
def __call__(self, call):
"""Execute service call."""
states = [
pypck.lcn_defs.RelayStateModifier[state] for state in call.data[CONF_STATE]
]
address_connection = self.get_address_connection(call)
address_connection.control_relays(states)
class Led(LcnServiceCall):
"""Set the led state."""
schema = LcnServiceCall.schema.extend(
{
vol.Required(CONF_LED): vol.All(vol.Upper, vol.In(LED_PORTS)),
vol.Required(CONF_STATE): vol.All(vol.Upper, vol.In(LED_STATUS)),
}
)
def __call__(self, call):
"""Execute service call."""
led = pypck.lcn_defs.LedPort[call.data[CONF_LED]]
led_state = pypck.lcn_defs.LedStatus[call.data[CONF_STATE]]
address_connection = self.get_address_connection(call)
address_connection.control_led(led, led_state)
class VarAbs(LcnServiceCall):
"""Set absolute value of a variable or setpoint.
Variable has to be set as counter!
Regulator setpoints can also be set using R1VARSETPOINT, R2VARSETPOINT.
"""
schema = LcnServiceCall.schema.extend(
{
vol.Required(CONF_VARIABLE): vol.All(
vol.Upper, vol.In(VARIABLES + SETPOINTS)
),
vol.Optional(CONF_VALUE, default=0): cv.positive_int,
vol.Optional(CONF_UNIT_OF_MEASUREMENT, default="native"): vol.All(
vol.Upper, vol.In(VAR_UNITS)
),
}
)
def __call__(self, call):
"""Execute service call."""
var = pypck.lcn_defs.Var[call.data[CONF_VARIABLE]]
value = call.data[CONF_VALUE]
unit = pypck.lcn_defs.VarUnit.parse(call.data[CONF_UNIT_OF_MEASUREMENT])
address_connection = self.get_address_connection(call)
address_connection.var_abs(var, value, unit)
class VarReset(LcnServiceCall):
"""Reset value of variable or setpoint."""
schema = LcnServiceCall.schema.extend(
{vol.Required(CONF_VARIABLE): vol.All(vol.Upper, vol.In(VARIABLES + SETPOINTS))}
)
def __call__(self, call):
"""Execute service call."""
var = pypck.lcn_defs.Var[call.data[CONF_VARIABLE]]
address_connection = self.get_address_connection(call)
address_connection.var_reset(var)
class VarRel(LcnServiceCall):
"""Shift value of a variable, setpoint or threshold."""
schema = LcnServiceCall.schema.extend(
{
vol.Required(CONF_VARIABLE): vol.All(
vol.Upper, vol.In(VARIABLES + SETPOINTS + THRESHOLDS)
),
vol.Optional(CONF_VALUE, default=0): int,
vol.Optional(CONF_UNIT_OF_MEASUREMENT, default="native"): vol.All(
vol.Upper, vol.In(VAR_UNITS)
),
vol.Optional(CONF_RELVARREF, default="current"): vol.All(
vol.Upper, vol.In(RELVARREF)
),
}
)
def __call__(self, call):
"""Execute service call."""
var = pypck.lcn_defs.Var[call.data[CONF_VARIABLE]]
value = call.data[CONF_VALUE]
unit = pypck.lcn_defs.VarUnit.parse(call.data[CONF_UNIT_OF_MEASUREMENT])
value_ref = pypck.lcn_defs.RelVarRef[call.data[CONF_RELVARREF]]
address_connection = self.get_address_connection(call)
address_connection.var_rel(var, value, unit, value_ref)
class LockRegulator(LcnServiceCall):
"""Locks a regulator setpoint."""
schema = LcnServiceCall.schema.extend(
{
vol.Required(CONF_SETPOINT): vol.All(vol.Upper, vol.In(SETPOINTS)),
vol.Optional(CONF_STATE, default=False): bool,
}
)
def __call__(self, call):
"""Execute service call."""
setpoint = pypck.lcn_defs.Var[call.data[CONF_SETPOINT]]
state = call.data[CONF_STATE]
reg_id = pypck.lcn_defs.Var.to_set_point_id(setpoint)
address_connection = self.get_address_connection(call)
address_connection.lock_regulator(reg_id, state)
class SendKeys(LcnServiceCall):
"""Sends keys (which executes bound commands)."""
schema = LcnServiceCall.schema.extend(
{
vol.Required(CONF_KEYS): vol.All(
vol.Upper, cv.matches_regex(r"^([A-D][1-8])+$")
),
vol.Optional(CONF_STATE, default="hit"): vol.All(
vol.Upper, vol.In(SENDKEYCOMMANDS)
),
vol.Optional(CONF_TIME, default=0): cv.positive_int,
vol.Optional(CONF_TIME_UNIT, default=TIME_SECONDS): vol.All(
vol.Upper, vol.In(TIME_UNITS)
),
}
)
def __call__(self, call):
"""Execute service call."""
address_connection = self.get_address_connection(call)
keys = [[False] * 8 for i in range(4)]
key_strings = zip(call.data[CONF_KEYS][::2], call.data[CONF_KEYS][1::2])
for table, key in key_strings:
table_id = ord(table) - 65
key_id = int(key) - 1
keys[table_id][key_id] = True
delay_time = call.data[CONF_TIME]
if delay_time != 0:
hit = pypck.lcn_defs.SendKeyCommand.HIT
if pypck.lcn_defs.SendKeyCommand[call.data[CONF_STATE]] != hit:
raise ValueError(
"Only hit command is allowed when sending deferred keys."
)
delay_unit = pypck.lcn_defs.TimeUnit.parse(call.data[CONF_TIME_UNIT])
address_connection.send_keys_hit_deferred(keys, delay_time, delay_unit)
else:
state = pypck.lcn_defs.SendKeyCommand[call.data[CONF_STATE]]
address_connection.send_keys(keys, state)
class LockKeys(LcnServiceCall):
"""Lock keys."""
schema = LcnServiceCall.schema.extend(
{
vol.Optional(CONF_TABLE, default="a"): vol.All(
vol.Upper, cv.matches_regex(r"^[A-D]$")
),
vol.Required(CONF_STATE): is_key_lock_states_string,
vol.Optional(CONF_TIME, default=0): cv.positive_int,
vol.Optional(CONF_TIME_UNIT, default=TIME_SECONDS): vol.All(
vol.Upper, vol.In(TIME_UNITS)
),
}
)
def __call__(self, call):
"""Execute service call."""
address_connection = self.get_address_connection(call)
states = [
pypck.lcn_defs.KeyLockStateModifier[state]
for state in call.data[CONF_STATE]
]
table_id = ord(call.data[CONF_TABLE]) - 65
delay_time = call.data[CONF_TIME]
if delay_time != 0:
if table_id != 0:
raise ValueError(
"Only table A is allowed when locking keys for a specific time."
)
delay_unit = pypck.lcn_defs.TimeUnit.parse(call.data[CONF_TIME_UNIT])
address_connection.lock_keys_tab_a_temporary(delay_time, delay_unit, states)
else:
address_connection.lock_keys(table_id, states)
address_connection.request_status_locked_keys_timeout()
class DynText(LcnServiceCall):
"""Send dynamic text to LCN-GTxD displays."""
schema = LcnServiceCall.schema.extend(
{
vol.Required(CONF_ROW): vol.All(int, vol.Range(min=1, max=4)),
vol.Required(CONF_TEXT): vol.All(str, vol.Length(max=60)),
}
)
def __call__(self, call):
"""Execute service call."""
row_id = call.data[CONF_ROW] - 1
text = call.data[CONF_TEXT]
address_connection = self.get_address_connection(call)
address_connection.dyn_text(row_id, text)
class Pck(LcnServiceCall):
"""Send arbitrary PCK command."""
schema = LcnServiceCall.schema.extend({vol.Required(CONF_PCK): str})
def __call__(self, call):
"""Execute service call."""
pck = call.data[CONF_PCK]
address_connection = self.get_address_connection(call)
address_connection.pck(pck)
|
from test import CollectorTestCase
from test import get_collector_config
from openstackswift import OpenstackSwiftCollector
class TestOpenstackSwiftCollector(CollectorTestCase):
def setUp(self, allowed_names=None):
if not allowed_names:
allowed_names = []
config = get_collector_config('OpenstackSwiftCollector', {
'allowed_names': allowed_names,
'interval': 1
})
self.collector = OpenstackSwiftCollector(config, None)
def test_import(self):
self.assertTrue(OpenstackSwiftCollector)
|
from test import CollectorTestCase
from test import get_collector_config
from mock import patch
from diamond.collector import Collector
from endecadgraph import EndecaDgraphCollector
class TestEndecaDgraphCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('EndecaDgraphCollector', {
})
self.collector = EndecaDgraphCollector(config, None)
def test_import(self):
self.assertTrue(EndecaDgraphCollector)
@patch('urllib2.urlopen')
@patch.object(Collector, 'publish')
def test_real_data(self, publish_mock, urlopen_mock):
urlopen_mock.return_value = self.getFixture('data1.xml')
self.collector.collect()
self.assertPublishedMany(publish_mock, {})
urlopen_mock.return_value = self.getFixture('data2.xml')
self.collector.collect()
# assert with a random selection (instead of 1000+)
metrics = {
'statistics.cache_section.main_cache.'
'aggregatedrecordcount.entry_count': 3957,
'statistics.cache_section.main_cache.'
'dval_bincount.entry_count': 4922448,
'statistics.hot_spot_analysis.'
'content_spotlighting_performance.min': 0.0209961,
'statistics.hot_spot_analysis.'
'insertion_sort_time.avg': 0.00523964,
'statistics.hot_spot_analysis.'
'ordinal_insertion_sort_time.n': 1484793,
'statistics.search_performance_analysis.'
'qconj_lookupphr.min': 0.000976562,
'statistics.updates.update_latency.'
'commit.audit_stat_calculation_time_resume_.n': 0,
}
self.assertPublishedMany(publish_mock, metrics)
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
|
import json
import os
from griddypower.async_api import GriddyPriceData
from homeassistant.components.griddy import CONF_LOADZONE, DOMAIN
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
from tests.common import load_fixture
async def _load_json_fixture(hass, path):
fixture = await hass.async_add_executor_job(
load_fixture, os.path.join("griddy", path)
)
return json.loads(fixture)
def _mock_get_config():
"""Return a default griddy config."""
return {DOMAIN: {CONF_LOADZONE: "LZ_HOUSTON"}}
async def test_houston_loadzone(hass):
"""Test creation of the houston load zone."""
getnow_json = await _load_json_fixture(hass, "getnow.json")
griddy_price_data = GriddyPriceData(getnow_json)
with patch(
"homeassistant.components.griddy.AsyncGriddy.async_getnow",
return_value=griddy_price_data,
):
assert await async_setup_component(hass, DOMAIN, _mock_get_config())
await hass.async_block_till_done()
sensor_lz_houston_price_now = hass.states.get("sensor.lz_houston_price_now")
assert sensor_lz_houston_price_now.state == "1.269"
|
from cerberus import Validator
from cerberus.tests import assert_normalized
def test_rename():
assert_normalized(
schema={'foo': {'rename': 'bar'}, 'bar': {}},
document={'foo': 0},
expected={'bar': 0},
)
def test_rename_handler_in_allow_unknown():
assert_normalized(
schema={},
document={'0': 'foo'},
expected={0: 'foo'},
validator=Validator(allow_unknown={'rename_handler': int}),
)
|
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_DOOR,
DEVICE_CLASS_HEAT,
DEVICE_CLASS_MOISTURE,
DEVICE_CLASS_MOTION,
DEVICE_CLASS_SMOKE,
BinarySensorEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.helpers.typing import HomeAssistantType
from .const import DOMAIN
from .devolo_device import DevoloDeviceEntity
DEVICE_CLASS_MAPPING = {
"Water alarm": DEVICE_CLASS_MOISTURE,
"Home Security": DEVICE_CLASS_MOTION,
"Smoke Alarm": DEVICE_CLASS_SMOKE,
"Heat Alarm": DEVICE_CLASS_HEAT,
"door": DEVICE_CLASS_DOOR,
}
async def async_setup_entry(
hass: HomeAssistantType, entry: ConfigEntry, async_add_entities
) -> None:
"""Get all binary sensor and multi level sensor devices and setup them via config entry."""
entities = []
for gateway in hass.data[DOMAIN][entry.entry_id]["gateways"]:
for device in gateway.binary_sensor_devices:
for binary_sensor in device.binary_sensor_property:
entities.append(
DevoloBinaryDeviceEntity(
homecontrol=gateway,
device_instance=device,
element_uid=binary_sensor,
)
)
for device in gateway.devices.values():
if hasattr(device, "remote_control_property"):
for remote in device.remote_control_property:
for index in range(
1, device.remote_control_property[remote].key_count + 1
):
entities.append(
DevoloRemoteControl(
homecontrol=gateway,
device_instance=device,
element_uid=remote,
key=index,
)
)
async_add_entities(entities, False)
class DevoloBinaryDeviceEntity(DevoloDeviceEntity, BinarySensorEntity):
"""Representation of a binary sensor within devolo Home Control."""
def __init__(self, homecontrol, device_instance, element_uid):
"""Initialize a devolo binary sensor."""
self._binary_sensor_property = device_instance.binary_sensor_property.get(
element_uid
)
super().__init__(
homecontrol=homecontrol,
device_instance=device_instance,
element_uid=element_uid,
)
self._device_class = DEVICE_CLASS_MAPPING.get(
self._binary_sensor_property.sub_type
or self._binary_sensor_property.sensor_type
)
if self._device_class is None:
if device_instance.binary_sensor_property.get(element_uid).sub_type != "":
self._name += f" {device_instance.binary_sensor_property.get(element_uid).sub_type}"
else:
self._name += f" {device_instance.binary_sensor_property.get(element_uid).sensor_type}"
self._value = self._binary_sensor_property.state
if element_uid.startswith("devolo.WarningBinaryFI:"):
self._enabled_default = False
@property
def is_on(self):
"""Return the state."""
return self._value
@property
def device_class(self):
"""Return device class."""
return self._device_class
class DevoloRemoteControl(DevoloDeviceEntity, BinarySensorEntity):
"""Representation of a remote control within devolo Home Control."""
def __init__(self, homecontrol, device_instance, element_uid, key):
"""Initialize a devolo remote control."""
self._remote_control_property = device_instance.remote_control_property.get(
element_uid
)
super().__init__(
homecontrol=homecontrol,
device_instance=device_instance,
element_uid=f"{element_uid}_{key}",
)
self._key = key
self._state = False
@property
def is_on(self):
"""Return the state."""
return self._state
def _sync(self, message):
"""Update the binary sensor state."""
if (
message[0] == self._remote_control_property.element_uid
and message[1] == self._key
):
self._state = True
elif (
message[0] == self._remote_control_property.element_uid and message[1] == 0
):
self._state = False
else:
self._generic_message(message)
self.schedule_update_ha_state()
|
import pytest
from homeassistant.components.config import area_registry
from tests.common import mock_area_registry
@pytest.fixture
def client(hass, hass_ws_client):
"""Fixture that can interact with the config manager API."""
hass.loop.run_until_complete(area_registry.async_setup(hass))
yield hass.loop.run_until_complete(hass_ws_client(hass))
@pytest.fixture
def registry(hass):
"""Return an empty, loaded, registry."""
return mock_area_registry(hass)
async def test_list_areas(hass, client, registry):
"""Test list entries."""
registry.async_create("mock 1")
registry.async_create("mock 2")
await client.send_json({"id": 1, "type": "config/area_registry/list"})
msg = await client.receive_json()
assert len(msg["result"]) == len(registry.areas)
async def test_create_area(hass, client, registry):
"""Test create entry."""
await client.send_json(
{"id": 1, "name": "mock", "type": "config/area_registry/create"}
)
msg = await client.receive_json()
assert "mock" in msg["result"]["name"]
assert len(registry.areas) == 1
async def test_create_area_with_name_already_in_use(hass, client, registry):
"""Test create entry that should fail."""
registry.async_create("mock")
await client.send_json(
{"id": 1, "name": "mock", "type": "config/area_registry/create"}
)
msg = await client.receive_json()
assert not msg["success"]
assert msg["error"]["code"] == "invalid_info"
assert msg["error"]["message"] == "Name is already in use"
assert len(registry.areas) == 1
async def test_delete_area(hass, client, registry):
"""Test delete entry."""
area = registry.async_create("mock")
await client.send_json(
{"id": 1, "area_id": area.id, "type": "config/area_registry/delete"}
)
msg = await client.receive_json()
assert msg["success"]
assert not registry.areas
async def test_delete_non_existing_area(hass, client, registry):
"""Test delete entry that should fail."""
registry.async_create("mock")
await client.send_json(
{"id": 1, "area_id": "", "type": "config/area_registry/delete"}
)
msg = await client.receive_json()
assert not msg["success"]
assert msg["error"]["code"] == "invalid_info"
assert msg["error"]["message"] == "Area ID doesn't exist"
assert len(registry.areas) == 1
async def test_update_area(hass, client, registry):
"""Test update entry."""
area = registry.async_create("mock 1")
await client.send_json(
{
"id": 1,
"area_id": area.id,
"name": "mock 2",
"type": "config/area_registry/update",
}
)
msg = await client.receive_json()
assert msg["result"]["area_id"] == area.id
assert msg["result"]["name"] == "mock 2"
assert len(registry.areas) == 1
async def test_update_area_with_same_name(hass, client, registry):
"""Test update entry."""
area = registry.async_create("mock 1")
await client.send_json(
{
"id": 1,
"area_id": area.id,
"name": "mock 1",
"type": "config/area_registry/update",
}
)
msg = await client.receive_json()
assert msg["result"]["area_id"] == area.id
assert msg["result"]["name"] == "mock 1"
assert len(registry.areas) == 1
async def test_update_area_with_name_already_in_use(hass, client, registry):
"""Test update entry."""
area = registry.async_create("mock 1")
registry.async_create("mock 2")
await client.send_json(
{
"id": 1,
"area_id": area.id,
"name": "mock 2",
"type": "config/area_registry/update",
}
)
msg = await client.receive_json()
assert not msg["success"]
assert msg["error"]["code"] == "invalid_info"
assert msg["error"]["message"] == "Name is already in use"
assert len(registry.areas) == 2
|
import unittest
import os
import json
from unittest.mock import patch
import threading
from test.support import EnvironmentVarGuard
from urllib.parse import urlparse
from http.server import BaseHTTPRequestHandler, HTTPServer
from google.cloud import bigquery
from google.auth.exceptions import DefaultCredentialsError
from google.cloud.bigquery._http import Connection
from kaggle_gcp import KaggleKernelCredentials, PublicBigqueryClient, init_bigquery
import kaggle_secrets
class TestBigQuery(unittest.TestCase):
def _test_proxy(self, client):
class HTTPHandler(BaseHTTPRequestHandler):
called = False
proxy_header_found = False
def do_HEAD(self):
self.send_response(200)
def do_GET(self):
HTTPHandler.called = True
HTTPHandler.proxy_header_found = any(
k for k in self.headers if k == "X-KAGGLE-PROXY-DATA" and self.headers[k] == "test-key")
self.send_response(200)
self.send_header("Content-type", "application/json")
self.end_headers()
sample_dataset = {
"id": "bigqueryproject:datasetname",
"datasetReference": {
"datasetId": "datasetname",
"projectId": "bigqueryproject"
}
}
self.wfile.write(json.dumps({"kind": "bigquery#datasetList", "datasets": [sample_dataset]}).encode("utf-8"))
server_address = urlparse(os.getenv('KAGGLE_DATA_PROXY_URL'))
with HTTPServer((server_address.hostname, server_address.port), HTTPHandler) as httpd:
threading.Thread(target=httpd.serve_forever).start()
for dataset in client.list_datasets():
self.assertEqual(dataset.dataset_id, "datasetname")
httpd.shutdown()
self.assertTrue(
HTTPHandler.called, msg="Fake server was not called from the BQ client, but should have been.")
self.assertTrue(
HTTPHandler.proxy_header_found, msg="X-KAGGLE-PROXY-DATA header was missing from the BQ proxy request.")
def test_proxy_using_library(self):
env = EnvironmentVarGuard()
env.unset('KAGGLE_USER_SECRETS_TOKEN')
with env:
client = PublicBigqueryClient()
self._test_proxy(client)
def test_proxy_no_project(self):
env = EnvironmentVarGuard()
env.unset('KAGGLE_USER_SECRETS_TOKEN')
with env:
client = bigquery.Client()
self._test_proxy(client)
def test_monkeypatching_idempotent(self):
env = EnvironmentVarGuard()
env.unset('KAGGLE_USER_SECRETS_TOKEN')
with env:
client1 = bigquery.Client
init_bigquery()
client2 = bigquery.Client
self.assertEqual(client1, client2)
def test_proxy_with_kwargs(self):
env = EnvironmentVarGuard()
env.unset('KAGGLE_USER_SECRETS_TOKEN')
with env:
client = bigquery.Client(
default_query_job_config=bigquery.QueryJobConfig(maximum_bytes_billed=int(1e9)))
self._test_proxy(client)
|
import os
from babelfish import Language, language_converters
import pytest
from vcr import VCR
from subliminal.exceptions import AuthenticationError, ConfigurationError
from subliminal.providers.addic7ed import Addic7edProvider, Addic7edSubtitle, series_year_re
vcr = VCR(path_transformer=lambda path: path + '.yaml',
record_mode=os.environ.get('VCR_RECORD_MODE', 'once'),
match_on=['method', 'scheme', 'host', 'port', 'path', 'query', 'body'],
cassette_library_dir=os.path.join('tests', 'cassettes', 'addic7ed'))
@pytest.mark.converter
def test_converter_convert_alpha3_country_script():
assert language_converters['addic7ed'].convert('srp', None, 'Cyrl') == 'Serbian (Cyrillic)'
@pytest.mark.converter
def test_converter_convert_alpha3_country():
assert language_converters['addic7ed'].convert('por', 'BR') == 'Portuguese (Brazilian)'
@pytest.mark.converter
def test_converter_convert_alpha3():
assert language_converters['addic7ed'].convert('eus') == 'Euskera'
@pytest.mark.converter
def test_converter_convert_alpha3_name_converter():
assert language_converters['addic7ed'].convert('fra') == 'French'
@pytest.mark.converter
def test_converter_reverse():
assert language_converters['addic7ed'].reverse('Chinese (Traditional)') == ('zho',)
@pytest.mark.converter
def test_converter_reverse_name_converter():
assert language_converters['addic7ed'].reverse('English') == ('eng', None, None)
def test_series_year_re():
match = series_year_re.match('That\'s: A-series.name!? (US) (2016)')
assert match
assert match.group('series') == 'That\'s: A-series.name!? (US)'
assert int(match.group('year')) == 2016
def test_get_matches_release_group(episodes):
subtitle = Addic7edSubtitle(Language('eng'), True, None, 'The Big Bang Theory', 7, 5, 'The Workplace Proximity',
2007, 'DIMENSION', None)
matches = subtitle.get_matches(episodes['bbt_s07e05'])
assert matches == {'series', 'season', 'episode', 'title', 'year', 'country', 'release_group'}
def test_get_matches_equivalent_release_group(episodes):
subtitle = Addic7edSubtitle(Language('eng'), True, None, 'The Big Bang Theory', 7, 5, 'The Workplace Proximity',
2007, 'LOL', None)
matches = subtitle.get_matches(episodes['bbt_s07e05'])
assert matches == {'series', 'season', 'episode', 'title', 'year', 'country', 'release_group'}
def test_get_matches_resolution_release_group(episodes):
subtitle = Addic7edSubtitle(Language('heb'), True, None, 'The Big Bang Theory', 7, 5, 'The Workplace Proximity',
2007, '720PDIMENSION', None)
matches = subtitle.get_matches(episodes['bbt_s07e05'])
assert matches == {'series', 'season', 'episode', 'title', 'year', 'country', 'release_group', 'resolution'}
def test_get_matches_source_release_group(episodes):
subtitle = Addic7edSubtitle(Language('eng'), True, None, 'Game of Thrones', 3, 10, 'Mhysa', None, 'WEB-DL-NTb',
None)
matches = subtitle.get_matches(episodes['got_s03e10'])
assert matches == {'series', 'season', 'episode', 'title', 'year', 'country', 'release_group', 'source'}
def test_get_matches_streaming_service(episodes):
subtitle = Addic7edSubtitle(Language('nld'), True, None, 'The Walking Dead', 8, 7, None, None,
'AMZN.WEB-DL-CasStudio', None)
matches = subtitle.get_matches(episodes['walking_dead_s08e07'])
assert matches == {'series', 'season', 'episode', 'year', 'country', 'release_group', 'streaming_service', 'source'}
def test_get_matches_only_year_country(episodes):
subtitle = Addic7edSubtitle(Language('eng'), True, None, 'The Big Bang Theory', 7, 5, 'The Workplace Proximity',
None, 'DIMENSION', None)
matches = subtitle.get_matches(episodes['got_s03e10'])
assert matches == {'year', 'country'}
def test_get_matches_no_match(episodes):
subtitle = Addic7edSubtitle(Language('eng'), True, None, 'The Big Bang Theory', 7, 5, 'The Workplace Proximity',
2007, 'DIMENSION', None)
matches = subtitle.get_matches(episodes['house_of_cards_us_s06e01'])
assert matches == set()
def test_configuration_error_no_username():
with pytest.raises(ConfigurationError):
Addic7edProvider(password='subliminal')
def test_configuration_error_no_password():
with pytest.raises(ConfigurationError):
Addic7edProvider(username='subliminal')
@pytest.mark.integration
@vcr.use_cassette
def test_login():
provider = Addic7edProvider('subliminal', 'subliminal')
assert provider.logged_in is False
provider.initialize()
assert provider.logged_in is True
r = provider.session.get(provider.server_url + 'panel.php', allow_redirects=False)
assert r.status_code == 200
@pytest.mark.integration
@vcr.use_cassette
def test_login_bad_password():
provider = Addic7edProvider('subliminal', 'lanimilbus')
with pytest.raises(AuthenticationError):
provider.initialize()
@pytest.mark.integration
@vcr.use_cassette
def test_logout():
provider = Addic7edProvider('subliminal', 'subliminal')
provider.initialize()
provider.terminate()
assert provider.logged_in is False
r = provider.session.get(provider.server_url + 'panel.php', allow_redirects=False)
assert r.status_code == 302
@pytest.mark.integration
@vcr.use_cassette
def test_search_show_id():
with Addic7edProvider() as provider:
show_id = provider._search_show_id('The Big Bang Theory')
assert show_id == 126
@pytest.mark.integration
@vcr.use_cassette
def test_search_show_id_incomplete():
with Addic7edProvider() as provider:
show_id = provider._search_show_id('The Big Bang')
assert show_id is None
@pytest.mark.integration
@vcr.use_cassette
def test_search_show_id_no_year():
with Addic7edProvider() as provider:
show_id = provider._search_show_id('Dallas')
assert show_id == 802
@pytest.mark.integration
@vcr.use_cassette
def test_search_show_id_year():
with Addic7edProvider() as provider:
show_id = provider._search_show_id('Dallas', 2012)
assert show_id == 2559
@pytest.mark.integration
@vcr.use_cassette
def test_search_show_id_error():
with Addic7edProvider() as provider:
show_id = provider._search_show_id('The Big How I Met Your Mother')
assert show_id is None
@pytest.mark.integration
@vcr.use_cassette
def test_search_show_id_quote():
with Addic7edProvider() as provider:
show_id = provider._search_show_id('Grey\'s Anatomy')
assert show_id == 30
@pytest.mark.integration
@vcr.use_cassette('test_get_show_ids')
def test_get_show_ids():
with Addic7edProvider() as provider:
show_ids = provider._get_show_ids()
assert 'the big bang theory' in show_ids
assert show_ids['the big bang theory'] == 126
@pytest.mark.integration
@vcr.use_cassette('test_get_show_ids')
def test_get_show_ids_no_year():
with Addic7edProvider() as provider:
show_ids = provider._get_show_ids()
assert 'dallas' in show_ids
assert show_ids['dallas'] == 802
@pytest.mark.integration
@vcr.use_cassette('test_get_show_ids')
def test_get_show_ids_year():
with Addic7edProvider() as provider:
show_ids = provider._get_show_ids()
assert 'dallas 2012' in show_ids
assert show_ids['dallas 2012'] == 2559
@pytest.mark.integration
@vcr.use_cassette('test_get_show_ids')
def test_get_show_ids_dot():
with Addic7edProvider() as provider:
show_ids = provider._get_show_ids()
assert 'mr robot' in show_ids
assert show_ids['mr robot'] == 5151
@pytest.mark.integration
@vcr.use_cassette('test_get_show_ids')
def test_get_show_ids_country():
with Addic7edProvider() as provider:
show_ids = provider._get_show_ids()
assert 'being human us' in show_ids
assert show_ids['being human us'] == 1317
@pytest.mark.integration
@vcr.use_cassette('test_get_show_ids')
def test_get_show_ids_quote():
with Addic7edProvider() as provider:
show_ids = provider._get_show_ids()
assert 'marvels agents of s h i e l d' in show_ids
assert show_ids['marvels agents of s h i e l d'] == 4010
@pytest.mark.integration
@vcr.use_cassette
def test_get_show_id_quote_dots_mixed_case(episodes):
video = episodes['marvels_agents_of_shield_s02e06']
with Addic7edProvider() as provider:
show_id = provider.get_show_id(video.series)
assert show_id == 4010
@pytest.mark.integration
@vcr.use_cassette
def test_get_show_id_with_comma(episodes):
video = episodes['alex_inc_s01e04']
with Addic7edProvider() as provider:
show_id = provider.get_show_id(video.series)
assert show_id == 6388
@pytest.mark.integration
@vcr.use_cassette
def test_get_show_id_country():
with Addic7edProvider() as provider:
show_id = provider.get_show_id('Being Human', country_code='US')
assert show_id == 1317
@pytest.mark.integration
@vcr.use_cassette
def test_get_show_id_year():
with Addic7edProvider() as provider:
show_id = provider.get_show_id('Dallas', year=2012)
assert show_id == 2559
@pytest.mark.integration
@vcr.use_cassette
def test_get_show_id():
with Addic7edProvider() as provider:
show_id = provider.get_show_id('Dallas')
assert show_id == 802
@pytest.mark.integration
@vcr.use_cassette
def test_query(episodes):
video = episodes['bbt_s07e05']
with Addic7edProvider() as provider:
show_id = provider.get_show_id(video.series, video.year)
subtitles = provider.query(show_id, video.series, video.season, video.year)
assert len(subtitles) == 474
for subtitle in subtitles:
assert subtitle.series == video.series
assert subtitle.season == video.season
assert subtitle.year is None
@pytest.mark.integration
@vcr.use_cassette
def test_query_wrong_series(episodes):
video = episodes['bbt_s07e05']
with Addic7edProvider() as provider:
subtitles = provider.query(0, video.series[:12], video.season, video.year)
assert len(subtitles) == 0
@pytest.mark.integration
@vcr.use_cassette
def test_query_parsing(episodes):
video = episodes['got_s03e10']
with Addic7edProvider() as provider:
show_id = provider.get_show_id(video.series, video.year)
subtitles = provider.query(show_id, video.series, video.season)
subtitle = [s for s in subtitles if s.download_link == 'updated/1/76311/1'][0]
assert subtitle.language == Language('eng')
assert subtitle.hearing_impaired is True
assert subtitle.page_link == 'http://www.addic7ed.com/serie/Game_of_Thrones/3/10/Mhysa'
assert subtitle.series == video.series
assert subtitle.season == video.season
assert subtitle.episode == video.episode
assert subtitle.title == video.title
assert subtitle.year == video.year
assert subtitle.version == 'EVOLVE'
@pytest.mark.integration
@vcr.use_cassette
def test_query_parsing_quote_dots_mixed_case(episodes):
video = episodes['marvels_agents_of_shield_s02e06']
with Addic7edProvider() as provider:
show_id = provider.get_show_id(video.series, video.year)
subtitles = provider.query(show_id, video.series, video.season)
subtitle = [s for s in subtitles if s.download_link == 'updated/10/93279/9'][0]
assert subtitle.language == Language('por', country='BR')
assert subtitle.hearing_impaired is False
assert subtitle.page_link == 'http://www.addic7ed.com/serie/Marvel%27s_Agents_of_S.H.I.E.L.D./2/6/A_Fractured_House'
assert subtitle.series == video.series
assert subtitle.season == video.season
assert subtitle.episode == video.episode
assert subtitle.version == 'KILLERS'
@pytest.mark.integration
@vcr.use_cassette
def test_query_parsing_colon(episodes):
video = episodes['csi_cyber_s02e03']
with Addic7edProvider() as provider:
show_id = provider.get_show_id(video.series, video.year)
subtitles = provider.query(show_id, video.series, video.season)
subtitle = [s for s in subtitles if s.download_link == 'updated/1/105111/2'][0]
assert subtitle.language == Language('eng')
assert subtitle.hearing_impaired is False
assert subtitle.page_link == 'http://www.addic7ed.com/serie/CSI%3A_Cyber/2/3/Brown_Eyes%2C_Blue_Eyes'
assert subtitle.series == video.series
assert subtitle.season == video.season
assert subtitle.episode == video.episode
assert subtitle.version == 'DIMENSION'
@pytest.mark.integration
@vcr.use_cassette
def test_query_parsing_dash(episodes):
video = episodes['the_x_files_s10e02']
with Addic7edProvider() as provider:
show_id = provider.get_show_id(video.series, video.year)
subtitles = provider.query(show_id, video.series, video.season)
subtitle = [s for s in subtitles if s.download_link == 'updated/8/108202/21'][0]
assert subtitle.language == Language('fra')
assert subtitle.hearing_impaired is False
assert subtitle.page_link == 'http://www.addic7ed.com/serie/The_X-Files/10/2/Founder%27s_Mutation'
assert subtitle.series == video.series
assert subtitle.season == video.season
assert subtitle.episode == video.episode
assert subtitle.version == 'KILLERS'
@pytest.mark.integration
@vcr.use_cassette
def test_query_year(episodes):
video = episodes['dallas_2012_s01e03']
with Addic7edProvider() as provider:
show_id = provider.get_show_id(video.series, video.year)
subtitles = provider.query(show_id, video.series, video.season, video.year)
assert len(subtitles) == 123
for subtitle in subtitles:
assert subtitle.series == video.series
assert subtitle.season == video.season
assert subtitle.year == video.year
@pytest.mark.integration
@vcr.use_cassette
def test_query_no_year(episodes):
video = episodes['dallas_s01e03']
with Addic7edProvider() as provider:
show_id = provider.get_show_id(video.series, video.year)
subtitles = provider.query(show_id, video.series, video.season)
assert len(subtitles) == 7
for subtitle in subtitles:
assert subtitle.series == video.series
assert subtitle.season == video.season
assert subtitle.year is None
@pytest.mark.integration
@vcr.use_cassette
def test_list_subtitles(episodes):
video = episodes['bbt_s07e05']
languages = {Language('deu'), Language('fra')}
expected_subtitles = {'updated/8/80254/1', 'updated/11/80254/5'}
with Addic7edProvider() as provider:
subtitles = provider.list_subtitles(video, languages)
assert {subtitle.download_link for subtitle in subtitles} == expected_subtitles
assert {subtitle.language for subtitle in subtitles} == languages
@pytest.mark.integration
@vcr.use_cassette
def test_download_subtitle(episodes):
video = episodes['bbt_s07e05']
languages = {Language('fra')}
with Addic7edProvider() as provider:
subtitles = provider.list_subtitles(video, languages)
provider.download_subtitle(subtitles[0])
assert subtitles[0].content is not None
assert subtitles[0].is_valid() is True
@pytest.mark.integration
@vcr.use_cassette
def test_list_subtitles_episode_alternative_series(episodes):
video = episodes['turn_s04e03']
languages = {Language('eng')}
expected_subtitles = {'updated/1/125243/0', 'updated/1/125243/1'}
with Addic7edProvider() as provider:
subtitles = provider.list_subtitles(video, languages)
matches = subtitles[0].get_matches(episodes['turn_s04e03'])
assert {subtitle.download_link for subtitle in subtitles} == expected_subtitles
assert {subtitle.language for subtitle in subtitles} == languages
assert matches == {'episode', 'title', 'series', 'season', 'year', 'country', 'release_group'}
@pytest.mark.integration
@vcr.use_cassette
def test_show_with_asterisk(episodes):
video = episodes['the_end_of_the_fucking_world']
languages = {Language('eng')}
expected_subtitles = {u'updated/1/129156/0', u'updated/1/129156/2', u'updated/1/129156/3'}
with Addic7edProvider() as provider:
subtitles = provider.list_subtitles(video, languages)
matches = subtitles[0].get_matches(episodes['the_end_of_the_fucking_world'])
assert {subtitle.download_link for subtitle in subtitles} == expected_subtitles
assert {subtitle.language for subtitle in subtitles} == languages
assert matches == {'year', 'country', 'series', 'episode', 'season'}
|
import datetime
import logging
from typing import Sequence
from typing import Tuple
import pysensu_yelp
from paasta_tools import flink_tools
from paasta_tools.check_services_replication_tools import main
from paasta_tools.flink_tools import FlinkDeploymentConfig
from paasta_tools.kubernetes_tools import filter_pods_by_service_instance
from paasta_tools.kubernetes_tools import is_pod_ready
from paasta_tools.kubernetes_tools import V1Pod
from paasta_tools.monitoring_tools import check_under_replication
from paasta_tools.monitoring_tools import send_replication_event
from paasta_tools.smartstack_tools import KubeSmartstackEnvoyReplicationChecker
from paasta_tools.utils import is_under_replicated
log = logging.getLogger(__name__)
def container_lifetime(pod: V1Pod,) -> datetime.timedelta:
"""Return a time duration for how long the pod is alive
"""
st = pod.status.start_time
return datetime.datetime.now(st.tzinfo) - st
def healthy_flink_containers_cnt(si_pods: Sequence[V1Pod], container_type: str) -> int:
"""Return count of healthy Flink containers with given type
"""
return len(
[
pod
for pod in si_pods
if pod.metadata.labels["flink.yelp.com/container-type"] == container_type
and is_pod_ready(pod)
and container_lifetime(pod).total_seconds() > 60
]
)
def check_under_registered_taskmanagers(
instance_config: FlinkDeploymentConfig, expected_count: int, cr_name: str,
) -> Tuple[bool, str]:
"""Check if not enough taskmanagers have been registered to the jobmanager and
returns both the result of the check in the form of a boolean and a human-readable
text to be used in logging or monitoring events.
"""
unhealthy = True
if cr_name != "":
try:
overview = flink_tools.get_flink_jobmanager_overview(
cr_name, instance_config.cluster
)
num_reported = overview.get("taskmanagers", 0)
crit_threshold = instance_config.get_replication_crit_percentage()
output = (
f"Service {instance_config.job_id} has "
f"{num_reported} out of {expected_count} expected instances "
f"of taskmanager reported by dashboard!\n"
f"(threshold: {crit_threshold}%)"
)
unhealthy, _ = is_under_replicated(
num_reported, expected_count, crit_threshold
)
except ValueError as e:
output = f"Dashboard of service {instance_config.job_id} is not available!\n({e})"
else:
output = f"Dashboard of service {instance_config.job_id} is not available!\n"
if unhealthy:
output += f"""
What this alert means:
This alert means that the Flink dashboard is not reporting the expected
number of taskmanagers.
Reasons this might be happening:
The service may simply be unhealthy. There also may not be enough resources
in the cluster to support the requested instance count.
Things you can do:
* Fix the cause of the unhealthy service. Try running:
paasta status -s {instance_config.service} -i {instance_config.instance} -c {instance_config.cluster} -vv
"""
return (unhealthy, output)
def get_cr_name(si_pods: Sequence[V1Pod]) -> str:
"""Returns the flink custom resource name based on the pod name. We are randomly choosing jobmanager pod here.
This change is related to FLINK-3129
"""
jobmanager_pod = [
pod
for pod in si_pods
if pod.metadata.labels["flink.yelp.com/container-type"] == "jobmanager"
and is_pod_ready(pod)
and container_lifetime(pod).total_seconds() > 60
]
if len(jobmanager_pod) == 1:
return jobmanager_pod[0].metadata.name.split("-jobmanager-")[0]
else:
return ""
def check_flink_service_health(
instance_config: FlinkDeploymentConfig,
all_tasks_or_pods: Sequence[V1Pod],
replication_checker: KubeSmartstackEnvoyReplicationChecker,
) -> None:
si_pods = filter_pods_by_service_instance(
pod_list=all_tasks_or_pods,
service=instance_config.service,
instance=instance_config.instance,
)
taskmanagers_expected_cnt = instance_config.config_dict.get(
"taskmanager", {"instances": 10}
).get("instances", 10)
num_healthy_supervisors = healthy_flink_containers_cnt(si_pods, "supervisor")
num_healthy_jobmanagers = healthy_flink_containers_cnt(si_pods, "jobmanager")
num_healthy_taskmanagers = healthy_flink_containers_cnt(si_pods, "taskmanager")
service_cr_name = get_cr_name(si_pods)
results = [
check_under_replication(
instance_config=instance_config,
expected_count=1,
num_available=num_healthy_supervisors,
sub_component="supervisor",
),
check_under_replication(
instance_config=instance_config,
expected_count=1,
num_available=num_healthy_jobmanagers,
sub_component="jobmanager",
),
check_under_replication(
instance_config=instance_config,
expected_count=taskmanagers_expected_cnt,
num_available=num_healthy_taskmanagers,
sub_component="taskmanager",
),
check_under_registered_taskmanagers(
instance_config=instance_config,
expected_count=taskmanagers_expected_cnt,
cr_name=service_cr_name,
),
]
output = "\n########\n".join([r[1] for r in results])
if any(r[0] for r in results):
log.error(output)
status = pysensu_yelp.Status.CRITICAL
else:
log.info(output)
status = pysensu_yelp.Status.OK
send_replication_event(
instance_config=instance_config, status=status, output=output
)
if __name__ == "__main__":
main(
flink_tools.FlinkDeploymentConfig,
check_flink_service_health,
namespace="paasta-flinks",
)
|
import pytest
from requests.exceptions import ConnectTimeout, HTTPError
from homeassistant import data_entry_flow
from homeassistant.components.solaredge import config_flow
from homeassistant.components.solaredge.const import CONF_SITE_ID, DEFAULT_NAME
from homeassistant.const import CONF_API_KEY, CONF_NAME
from tests.async_mock import Mock, patch
from tests.common import MockConfigEntry
NAME = "solaredge site 1 2 3"
SITE_ID = "1a2b3c4d5e6f7g8h"
API_KEY = "a1b2c3d4e5f6g7h8"
@pytest.fixture(name="test_api")
def mock_controller():
"""Mock a successful Solaredge API."""
api = Mock()
api.get_details.return_value = {"details": {"status": "active"}}
with patch("solaredge.Solaredge", return_value=api):
yield api
def init_config_flow(hass):
"""Init a configuration flow."""
flow = config_flow.SolarEdgeConfigFlow()
flow.hass = hass
return flow
async def test_user(hass, test_api):
"""Test user config."""
flow = init_config_flow(hass)
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
# tets with all provided
result = await flow.async_step_user(
{CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "solaredge_site_1_2_3"
assert result["data"][CONF_SITE_ID] == SITE_ID
assert result["data"][CONF_API_KEY] == API_KEY
async def test_import(hass, test_api):
"""Test import step."""
flow = init_config_flow(hass)
# import with site_id and api_key
result = await flow.async_step_import(
{CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "solaredge"
assert result["data"][CONF_SITE_ID] == SITE_ID
assert result["data"][CONF_API_KEY] == API_KEY
# import with all
result = await flow.async_step_import(
{CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID, CONF_NAME: NAME}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "solaredge_site_1_2_3"
assert result["data"][CONF_SITE_ID] == SITE_ID
assert result["data"][CONF_API_KEY] == API_KEY
async def test_abort_if_already_setup(hass, test_api):
"""Test we abort if the site_id is already setup."""
flow = init_config_flow(hass)
MockConfigEntry(
domain="solaredge",
data={CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY},
).add_to_hass(hass)
# import: Should fail, same SITE_ID
result = await flow.async_step_import(
{CONF_NAME: DEFAULT_NAME, CONF_SITE_ID: SITE_ID, CONF_API_KEY: API_KEY}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "site_exists"
# user: Should fail, same SITE_ID
result = await flow.async_step_user(
{CONF_NAME: "test", CONF_SITE_ID: SITE_ID, CONF_API_KEY: "test"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {CONF_SITE_ID: "site_exists"}
async def test_asserts(hass, test_api):
"""Test the _site_in_configuration_exists method."""
flow = init_config_flow(hass)
# test with inactive site
test_api.get_details.return_value = {"details": {"status": "NOK"}}
result = await flow.async_step_user(
{CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {CONF_SITE_ID: "site_not_active"}
# test with api_failure
test_api.get_details.return_value = {}
result = await flow.async_step_user(
{CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {CONF_SITE_ID: "api_failure"}
# test with ConnectionTimeout
test_api.get_details.side_effect = ConnectTimeout()
result = await flow.async_step_user(
{CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {CONF_SITE_ID: "could_not_connect"}
# test with HTTPError
test_api.get_details.side_effect = HTTPError()
result = await flow.async_step_user(
{CONF_NAME: NAME, CONF_API_KEY: API_KEY, CONF_SITE_ID: SITE_ID}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {CONF_SITE_ID: "could_not_connect"}
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import logging
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker import linux_packages
from perfkitbenchmarker import sample
from perfkitbenchmarker.linux_packages import glibc
import six
FLAGS = flags.FLAGS
BENCHMARK_NAME = 'glibc'
BENCHMARK_CONFIG = """
glibc:
description: Runs Glibc Microbenchmark.
vm_groups:
default:
vm_spec: *default_dual_core
vm_count: null
os_type: ubuntu1804
"""
glibc_default_benchset = ['bench-math',
'bench-pthread',
'bench-string',
'string-benchset',
'wcsmbs-benchset',
'stdlib-benchset',
'stdio-common-benchset',
'math-benchset',
'malloc-thread']
flags.DEFINE_multi_enum(
'glibc_benchset', glibc_default_benchset, glibc_default_benchset,
'By default, it will run the whole set of benchmarks. To run only a subset '
'of benchmarks, one may set "glibc_benchset = bench-math bench-pthread" by '
'using the flag on the command line multiple times.')
GLIBC_BENCH = ['bench-math', 'bench-pthread', 'bench-string']
GLIBC_BENCH_MALLOC = ['malloc-thread']
# TODO(user): Parse other *-benchset benchmarks.
GLIBC_MATH_BENCHSET = ['math-benchset']
RESULTS_DIR = '%s/glibc/glibc-build/benchtests' % linux_packages.INSTALL_DIR
def GetConfig(user_config):
return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
def PrepareGlibc(vm):
"""Building glibc on a single vm."""
logging.info('Installing Glibc on %s', vm)
vm.Install('glibc')
def Prepare(benchmark_spec):
"""Install glibc on the target vms.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
vms = benchmark_spec.vms
vm = vms[0]
PrepareGlibc(vm)
def GetCommonMetadata(benchmark_spec):
"""Update metadata with glibc-related flag values."""
metadata = dict()
metadata['gcc'] = glibc.GetGccVersion(benchmark_spec.vms[0])
metadata['glibc_benchset'] = FLAGS.glibc_benchset
metadata['glibc_version'] = glibc.GLIBC_VERSION
metadata['num_machines'] = len(benchmark_spec.vms)
return metadata
# The reason to write this helper function is that glibc_benchset_output.json
# contains duplicate keys.
def HelperParseOutput(lst):
"""Self defined parameter function for 'object_pairs_hook' of json.loads().
Purpose:
This function helps to parse json text with duplicate keys, for instance:
json text: {"key":1, "key":2, "key2":3, "key2":4}
With this function, we will have the following dict:
{"key":[1, 2],
"key2":[3, 4]
}
Args:
lst: A list of tuples contains the output of benchmark tests.
Returns:
A dict contains the output of benchmark tests.
"""
result = {}
for key, val in lst:
if key in result:
if isinstance(result[key], list):
result[key].append(val)
else:
result[key] = [result[key], val]
else:
result[key] = val
return result
def ParseOutput(glibc_output, upper_key, results, metadata):
"""Parses the output from glibc.
Args:
glibc_output: A json format string containing the output of benchmark tests.
upper_key: The first dimension key of the glibc_output dict.
results: A list to which this function will append new samples based on the
glibc output. (in the same format as Run() returns).
metadata: Common metadata to attach to samples.
"""
jsondata = json.loads(glibc_output, object_pairs_hook=HelperParseOutput)
for function, items in six.iteritems(jsondata[upper_key]):
# handle the jsondata with duplicate keys
if isinstance(items, list):
for item in items:
current_metadata = metadata.copy()
for key, val in six.iteritems(item):
metric = '{0}:{1}'.format(function, key)
for subitem, value in six.iteritems(val):
current_metadata[subitem] = value
results.append(sample.Sample(metric, -1, '', current_metadata))
# handle the jsondata with unique keys
else:
for item in items:
current_metadata = metadata.copy()
metric = '{0}:{1}'.format(function, item)
for subitem, value in six.iteritems(items[item]):
current_metadata[subitem] = value
results.append(sample.Sample(metric, -1, '', current_metadata))
def Run(benchmark_spec):
"""Run Glibc on the cluster.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample objects.
"""
vms = benchmark_spec.vms
vm = vms[0]
results = []
# Ensure that we can get common metadata before running.
metadata = GetCommonMetadata(benchmark_spec)
glibc_user_benchset = ' '.join(FLAGS.glibc_benchset)
vm.RobustRemoteCommand('cd %s/glibc/glibc-build && '
'make bench BENCHSET="%s"' %
(linux_packages.INSTALL_DIR, glibc_user_benchset))
logging.info('Glibc Benchmark Tests Results:')
# Parse the output for "bench-math", "bench-string" and "bench-pthread".
if any(i in GLIBC_BENCH for i in FLAGS.glibc_benchset):
stdout, _ = vm.RemoteCommand(
'cat {0}/bench.out'.format(RESULTS_DIR), should_log=True)
ParseOutput(stdout, 'functions', results, metadata)
# Parse the output for "malloc-thread".
if any(i in GLIBC_BENCH_MALLOC for i in FLAGS.glibc_benchset):
thread_num = ['1', '8', '16', '32']
for num in thread_num:
stdout, _ = vm.RemoteCommand(
'cat {0}/bench-malloc-thread-{1}.out'.format(RESULTS_DIR, num),
should_log=True)
ParseOutput(stdout, 'functions', results, metadata)
# Parse the output for "math-benchset".
if any(i in GLIBC_MATH_BENCHSET for i in FLAGS.glibc_benchset):
stdout, _ = vm.RemoteCommand(
'cat {0}/bench-math-inlines.out'.format(RESULTS_DIR), should_log=True)
ParseOutput('{%s}' % stdout, 'math-inlines', results, metadata)
return results
def Cleanup(unused_benchmark_spec):
"""Cleanup Glibc on the cluster."""
pass
|
import asyncio
from typing import Any, Dict, Iterable, Optional
from homeassistant.const import (
SERVICE_MEDIA_PAUSE,
SERVICE_MEDIA_PLAY,
SERVICE_MEDIA_STOP,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
SERVICE_VOLUME_MUTE,
SERVICE_VOLUME_SET,
STATE_IDLE,
STATE_OFF,
STATE_ON,
STATE_PAUSED,
STATE_PLAYING,
)
from homeassistant.core import Context, State
from homeassistant.helpers.typing import HomeAssistantType
from .const import (
ATTR_INPUT_SOURCE,
ATTR_MEDIA_CONTENT_ID,
ATTR_MEDIA_CONTENT_TYPE,
ATTR_MEDIA_ENQUEUE,
ATTR_MEDIA_VOLUME_LEVEL,
ATTR_MEDIA_VOLUME_MUTED,
ATTR_SOUND_MODE,
DOMAIN,
SERVICE_PLAY_MEDIA,
SERVICE_SELECT_SOUND_MODE,
SERVICE_SELECT_SOURCE,
)
# mypy: allow-untyped-defs
async def _async_reproduce_states(
hass: HomeAssistantType,
state: State,
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Reproduce component states."""
async def call_service(service: str, keys: Iterable) -> None:
"""Call service with set of attributes given."""
data = {"entity_id": state.entity_id}
for key in keys:
if key in state.attributes:
data[key] = state.attributes[key]
await hass.services.async_call(
DOMAIN, service, data, blocking=True, context=context
)
if state.state == STATE_OFF:
await call_service(SERVICE_TURN_OFF, [])
# entities that are off have no other attributes to restore
return
if state.state in [
STATE_ON,
STATE_PLAYING,
STATE_IDLE,
STATE_PAUSED,
]:
await call_service(SERVICE_TURN_ON, [])
if ATTR_MEDIA_VOLUME_LEVEL in state.attributes:
await call_service(SERVICE_VOLUME_SET, [ATTR_MEDIA_VOLUME_LEVEL])
if ATTR_MEDIA_VOLUME_MUTED in state.attributes:
await call_service(SERVICE_VOLUME_MUTE, [ATTR_MEDIA_VOLUME_MUTED])
if ATTR_INPUT_SOURCE in state.attributes:
await call_service(SERVICE_SELECT_SOURCE, [ATTR_INPUT_SOURCE])
if ATTR_SOUND_MODE in state.attributes:
await call_service(SERVICE_SELECT_SOUND_MODE, [ATTR_SOUND_MODE])
already_playing = False
if (ATTR_MEDIA_CONTENT_TYPE in state.attributes) and (
ATTR_MEDIA_CONTENT_ID in state.attributes
):
await call_service(
SERVICE_PLAY_MEDIA,
[ATTR_MEDIA_CONTENT_TYPE, ATTR_MEDIA_CONTENT_ID, ATTR_MEDIA_ENQUEUE],
)
already_playing = True
if state.state == STATE_PLAYING and not already_playing:
await call_service(SERVICE_MEDIA_PLAY, [])
elif state.state == STATE_IDLE:
await call_service(SERVICE_MEDIA_STOP, [])
elif state.state == STATE_PAUSED:
await call_service(SERVICE_MEDIA_PAUSE, [])
async def async_reproduce_states(
hass: HomeAssistantType,
states: Iterable[State],
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Reproduce component states."""
await asyncio.gather(
*(
_async_reproduce_states(
hass, state, context=context, reproduce_options=reproduce_options
)
for state in states
)
)
|
from pydeconz.utils import normalize_bridge_id
import voluptuous as vol
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
from homeassistant.helpers.entity_registry import (
async_entries_for_config_entry,
async_entries_for_device,
)
from .config_flow import get_master_gateway
from .const import (
CONF_BRIDGE_ID,
DOMAIN,
LOGGER,
NEW_GROUP,
NEW_LIGHT,
NEW_SCENE,
NEW_SENSOR,
)
DECONZ_SERVICES = "deconz_services"
SERVICE_FIELD = "field"
SERVICE_ENTITY = "entity"
SERVICE_DATA = "data"
SERVICE_CONFIGURE_DEVICE = "configure"
SERVICE_CONFIGURE_DEVICE_SCHEMA = vol.All(
vol.Schema(
{
vol.Optional(SERVICE_ENTITY): cv.entity_id,
vol.Optional(SERVICE_FIELD): cv.matches_regex("/.*"),
vol.Required(SERVICE_DATA): dict,
vol.Optional(CONF_BRIDGE_ID): str,
}
),
cv.has_at_least_one_key(SERVICE_ENTITY, SERVICE_FIELD),
)
SERVICE_DEVICE_REFRESH = "device_refresh"
SERVICE_REMOVE_ORPHANED_ENTRIES = "remove_orphaned_entries"
SELECT_GATEWAY_SCHEMA = vol.All(vol.Schema({vol.Optional(CONF_BRIDGE_ID): str}))
async def async_setup_services(hass):
"""Set up services for deCONZ integration."""
if hass.data.get(DECONZ_SERVICES, False):
return
hass.data[DECONZ_SERVICES] = True
async def async_call_deconz_service(service_call):
"""Call correct deCONZ service."""
service = service_call.service
service_data = service_call.data
if service == SERVICE_CONFIGURE_DEVICE:
await async_configure_service(hass, service_data)
elif service == SERVICE_DEVICE_REFRESH:
await async_refresh_devices_service(hass, service_data)
elif service == SERVICE_REMOVE_ORPHANED_ENTRIES:
await async_remove_orphaned_entries_service(hass, service_data)
hass.services.async_register(
DOMAIN,
SERVICE_CONFIGURE_DEVICE,
async_call_deconz_service,
schema=SERVICE_CONFIGURE_DEVICE_SCHEMA,
)
hass.services.async_register(
DOMAIN,
SERVICE_DEVICE_REFRESH,
async_call_deconz_service,
schema=SELECT_GATEWAY_SCHEMA,
)
hass.services.async_register(
DOMAIN,
SERVICE_REMOVE_ORPHANED_ENTRIES,
async_call_deconz_service,
schema=SELECT_GATEWAY_SCHEMA,
)
async def async_unload_services(hass):
"""Unload deCONZ services."""
if not hass.data.get(DECONZ_SERVICES):
return
hass.data[DECONZ_SERVICES] = False
hass.services.async_remove(DOMAIN, SERVICE_CONFIGURE_DEVICE)
hass.services.async_remove(DOMAIN, SERVICE_DEVICE_REFRESH)
hass.services.async_remove(DOMAIN, SERVICE_REMOVE_ORPHANED_ENTRIES)
async def async_configure_service(hass, data):
"""Set attribute of device in deCONZ.
Entity is used to resolve to a device path (e.g. '/lights/1').
Field is a string representing either a full path
(e.g. '/lights/1/state') when entity is not specified, or a
subpath (e.g. '/state') when used together with entity.
Data is a json object with what data you want to alter
e.g. data={'on': true}.
{
"field": "/lights/1/state",
"data": {"on": true}
}
See Dresden Elektroniks REST API documentation for details:
http://dresden-elektronik.github.io/deconz-rest-doc/rest/
"""
gateway = get_master_gateway(hass)
if CONF_BRIDGE_ID in data:
gateway = hass.data[DOMAIN][normalize_bridge_id(data[CONF_BRIDGE_ID])]
field = data.get(SERVICE_FIELD, "")
entity_id = data.get(SERVICE_ENTITY)
data = data[SERVICE_DATA]
if entity_id:
try:
field = gateway.deconz_ids[entity_id] + field
except KeyError:
LOGGER.error("Could not find the entity %s", entity_id)
return
await gateway.api.request("put", field, json=data)
async def async_refresh_devices_service(hass, data):
"""Refresh available devices from deCONZ."""
gateway = get_master_gateway(hass)
if CONF_BRIDGE_ID in data:
gateway = hass.data[DOMAIN][normalize_bridge_id(data[CONF_BRIDGE_ID])]
gateway.ignore_state_updates = True
await gateway.api.refresh_state()
gateway.ignore_state_updates = False
gateway.async_add_device_callback(NEW_GROUP, list(gateway.api.groups.values()))
gateway.async_add_device_callback(NEW_LIGHT, list(gateway.api.lights.values()))
gateway.async_add_device_callback(NEW_SCENE, list(gateway.api.scenes.values()))
gateway.async_add_device_callback(NEW_SENSOR, list(gateway.api.sensors.values()))
async def async_remove_orphaned_entries_service(hass, data):
"""Remove orphaned deCONZ entries from device and entity registries."""
gateway = get_master_gateway(hass)
if CONF_BRIDGE_ID in data:
gateway = hass.data[DOMAIN][normalize_bridge_id(data[CONF_BRIDGE_ID])]
entity_registry = await hass.helpers.entity_registry.async_get_registry()
device_registry = await hass.helpers.device_registry.async_get_registry()
entity_entries = async_entries_for_config_entry(
entity_registry, gateway.config_entry.entry_id
)
entities_to_be_removed = []
devices_to_be_removed = [
entry.id
for entry in device_registry.devices.values()
if gateway.config_entry.entry_id in entry.config_entries
]
# Don't remove the Gateway host entry
gateway_host = device_registry.async_get_device(
connections={(CONNECTION_NETWORK_MAC, gateway.api.config.mac)},
identifiers=set(),
)
if gateway_host.id in devices_to_be_removed:
devices_to_be_removed.remove(gateway_host.id)
# Don't remove the Gateway service entry
gateway_service = device_registry.async_get_device(
identifiers={(DOMAIN, gateway.api.config.bridgeid)}, connections=set()
)
if gateway_service.id in devices_to_be_removed:
devices_to_be_removed.remove(gateway_service.id)
# Don't remove devices belonging to available events
for event in gateway.events:
if event.device_id in devices_to_be_removed:
devices_to_be_removed.remove(event.device_id)
for entry in entity_entries:
# Don't remove available entities
if entry.unique_id in gateway.entities[entry.domain]:
# Don't remove devices with available entities
if entry.device_id in devices_to_be_removed:
devices_to_be_removed.remove(entry.device_id)
continue
# Remove entities that are not available
entities_to_be_removed.append(entry.entity_id)
# Remove unavailable entities
for entity_id in entities_to_be_removed:
entity_registry.async_remove(entity_id)
# Remove devices that don't belong to any entity
for device_id in devices_to_be_removed:
if len(async_entries_for_device(entity_registry, device_id)) == 0:
device_registry.async_remove_device(device_id)
|
import asyncio
from typing import Optional
import aiopulse
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .const import ACMEDA_ENTITY_REMOVE, ACMEDA_HUB_UPDATE, LOGGER
from .helpers import update_devices
class PulseHub:
"""Manages a single Pulse Hub."""
def __init__(self, hass, config_entry):
"""Initialize the system."""
self.config_entry = config_entry
self.hass = hass
self.api: Optional[aiopulse.Hub] = None
self.tasks = []
self.current_rollers = {}
self.cleanup_callbacks = []
@property
def title(self):
"""Return the title of the hub shown in the integrations list."""
return f"{self.api.id} ({self.api.host})"
@property
def host(self):
"""Return the host of this hub."""
return self.config_entry.data["host"]
async def async_setup(self, tries=0):
"""Set up a hub based on host parameter."""
host = self.host
hub = aiopulse.Hub(host)
self.api = hub
hub.callback_subscribe(self.async_notify_update)
self.tasks.append(asyncio.create_task(hub.run()))
LOGGER.debug("Hub setup complete")
return True
async def async_reset(self):
"""Reset this hub to default state."""
for cleanup_callback in self.cleanup_callbacks:
cleanup_callback()
# If not setup
if self.api is None:
return False
self.api.callback_unsubscribe(self.async_notify_update)
await self.api.stop()
del self.api
self.api = None
# Wait for any running tasks to complete
await asyncio.wait(self.tasks)
return True
async def async_notify_update(self, update_type):
"""Evaluate entities when hub reports that update has occurred."""
LOGGER.debug("Hub {update_type.name} updated")
if update_type == aiopulse.UpdateType.rollers:
await update_devices(self.hass, self.config_entry, self.api.rollers)
self.hass.config_entries.async_update_entry(
self.config_entry, title=self.title
)
async_dispatcher_send(
self.hass, ACMEDA_HUB_UPDATE.format(self.config_entry.entry_id)
)
for unique_id in list(self.current_rollers):
if unique_id not in self.api.rollers:
LOGGER.debug("Notifying remove of %s", unique_id)
self.current_rollers.pop(unique_id)
async_dispatcher_send(
self.hass, ACMEDA_ENTITY_REMOVE.format(unique_id)
)
|
from django.core.exceptions import ImproperlyConfigured
from django.forms import fields, widgets
from django.template import engines, TemplateDoesNotExist
from django.template.loader import select_template
from django.utils.html import mark_safe
from django.utils.translation import gettext_lazy as _
from entangled.forms import EntangledModelFormMixin
from cms.apphook_pool import apphook_pool
from cms.plugin_pool import plugin_pool
from shop.conf import app_settings
from shop.cascade.plugin_base import ShopPluginBase
class BreadcrumbPluginForm(EntangledModelFormMixin):
CHOICES = [
('default', _("Default")),
('soft-root', _("With “Soft-Root”")),
('catalog', _("With Catalog Count")),
]
render_type = fields.ChoiceField(
choices=CHOICES,
widget=widgets.RadioSelect,
label=_("Render as"),
initial='default',
help_text=_("Render an alternative Breadcrumb"),
)
class Meta:
entangled_fields = {'glossary': ['render_type']}
class BreadcrumbPlugin(ShopPluginBase):
name = _("Breadcrumb")
parent_classes = []
allow_children = None
form = BreadcrumbPluginForm
@classmethod
def get_identifier(cls, instance):
render_type = instance.glossary.get('render_type')
return mark_safe(dict(cls.form.CHOICES).get(render_type, ''))
def get_render_template(self, context, instance, placeholder):
render_type = instance.glossary.get('render_type')
try:
return select_template([
'{}/breadcrumb/{}.html'.format(app_settings.APP_LABEL, render_type),
'shop/breadcrumb/{}.html'.format(render_type),
])
except TemplateDoesNotExist:
return engines['django'].from_string('<!-- empty breadcrumb -->')
def get_use_cache(self, context, instance, placeholder):
try:
app = apphook_pool.get_apphook(instance.page.application_urls)
return app.cache_placeholders
except (AttributeError, ImproperlyConfigured):
return super().get_use_cache(context, instance, placeholder)
plugin_pool.register_plugin(BreadcrumbPlugin)
|
from pyedimax.smartplug import SmartPlug
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchEntity
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME
import homeassistant.helpers.config_validation as cv
DOMAIN = "edimax"
DEFAULT_NAME = "Edimax Smart Plug"
DEFAULT_PASSWORD = "1234"
DEFAULT_USERNAME = "admin"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PASSWORD, default=DEFAULT_PASSWORD): cv.string,
vol.Optional(CONF_USERNAME, default=DEFAULT_USERNAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Find and return Edimax Smart Plugs."""
host = config.get(CONF_HOST)
auth = (config.get(CONF_USERNAME), config.get(CONF_PASSWORD))
name = config.get(CONF_NAME)
add_entities([SmartPlugSwitch(SmartPlug(host, auth), name)], True)
class SmartPlugSwitch(SwitchEntity):
"""Representation an Edimax Smart Plug switch."""
def __init__(self, smartplug, name):
"""Initialize the switch."""
self.smartplug = smartplug
self._name = name
self._now_power = None
self._now_energy_day = None
self._state = False
self._supports_power_monitoring = False
self._info = None
self._mac = None
@property
def unique_id(self):
"""Return the device's MAC address."""
return self._mac
@property
def name(self):
"""Return the name of the Smart Plug, if any."""
return self._name
@property
def current_power_w(self):
"""Return the current power usage in W."""
return self._now_power
@property
def today_energy_kwh(self):
"""Return the today total energy usage in kWh."""
return self._now_energy_day
@property
def is_on(self):
"""Return true if switch is on."""
return self._state
def turn_on(self, **kwargs):
"""Turn the switch on."""
self.smartplug.state = "ON"
def turn_off(self, **kwargs):
"""Turn the switch off."""
self.smartplug.state = "OFF"
def update(self):
"""Update edimax switch."""
if not self._info:
self._info = self.smartplug.info
self._mac = self._info["mac"]
self._supports_power_monitoring = self._info["model"] != "SP1101W"
if self._supports_power_monitoring:
try:
self._now_power = float(self.smartplug.now_power)
except (TypeError, ValueError):
self._now_power = None
try:
self._now_energy_day = float(self.smartplug.now_energy_day)
except (TypeError, ValueError):
self._now_energy_day = None
self._state = self.smartplug.state == "ON"
|
from openrazer_daemon.dbus_services import endpoint
@endpoint('razer.device.misc.mug', 'isMugPresent', out_sig='b')
def is_mug_present(self):
"""
Get if the mug is present
:return: True if there's a mug
:rtype: bool
"""
self.logger.debug("DBus call is_mug_present")
driver_path = self.get_driver_path('is_mug_present')
with open(driver_path, 'r') as driver_file:
return int(driver_file.read().strip()) == 1
|
import sys
import argparse
from PyQt5.QtCore import QUrl
from PyQt5.QtWidgets import QApplication
from PyQt5.QtWebEngineWidgets import QWebEngineView
def parse_args():
"""Parse commandline arguments."""
parser = argparse.ArgumentParser()
parser.add_argument('url', help='The URL to open',
nargs='?', default='https://qutebrowser.org/')
return parser.parse_known_args()[0]
if __name__ == '__main__':
args = parse_args()
app = QApplication(sys.argv)
wv = QWebEngineView()
wv.loadStarted.connect(lambda: print("Loading started"))
wv.loadProgress.connect(lambda p: print("Loading progress: {}%".format(p)))
wv.loadFinished.connect(lambda: print("Loading finished"))
wv.load(QUrl.fromUserInput(args.url))
wv.show()
app.exec_()
|
from tests.common import assert_lists_same, async_get_device_automations
from tests.components.homekit_controller.common import (
setup_accessories_from_file,
setup_test_accessories,
)
async def test_aqara_switch_setup(hass):
"""Test that a Aqara Switch can be correctly setup in HA."""
accessories = await setup_accessories_from_file(hass, "aqara_switch.json")
config_entry, pairing = await setup_test_accessories(hass, accessories)
entity_registry = await hass.helpers.entity_registry.async_get_registry()
battery_id = "sensor.programmable_switch_battery"
battery = entity_registry.async_get(battery_id)
assert battery.unique_id == "homekit-111a1111a1a111-5"
# The fixture file has 1 button and a battery
expected = [
{
"device_id": battery.device_id,
"domain": "sensor",
"entity_id": "sensor.programmable_switch_battery",
"platform": "device",
"type": "battery_level",
}
]
for subtype in ("single_press", "double_press", "long_press"):
expected.append(
{
"device_id": battery.device_id,
"domain": "homekit_controller",
"platform": "device",
"type": "button1",
"subtype": subtype,
}
)
triggers = await async_get_device_automations(hass, "trigger", battery.device_id)
assert_lists_same(triggers, expected)
|
from __future__ import print_function
"""
clean_junit_xml.py is a simple script that takes all the xml-formatted
Ant JUnit XML test output in test_results and aggregates them into
test_results/_hudson. In this process, it strips any characters that
tend to cause Hudson trouble.
"""
import os
import sys
import rospkg
import rosunit.junitxml as junitxml
PKG = 'rosunit'
def prepare_dirs(output_dir_name):
test_results_dir = rospkg.get_test_results_dir()
print('will read test results from', test_results_dir)
output_dir = os.path.join(test_results_dir, output_dir_name)
if not os.path.exists(output_dir):
print('creating directory', output_dir)
os.makedirs(output_dir)
return test_results_dir, output_dir
def clean_results(test_results_dir, output_dir, filter):
"""
Read results from test_results_dir and write them into output_dir.
"""
for d in os.listdir(test_results_dir):
if filter and d in filter:
continue
print('looking at', d)
test_dir = os.path.join(test_results_dir, d)
if not os.path.isdir(test_dir):
continue
base_test_name = os.path.basename(test_dir)
# for each test result that a package generated, read it, then
# rewrite it to our output directory. This will invoke our
# cleaning rules on the XML that protect the result from Hudson
# issues.
for file in os.listdir(test_dir):
if file.endswith('.xml'):
test_name = base_test_name + '.' + file[:-4]
file = os.path.join(test_dir, file)
try:
result = junitxml.read(file, test_name)
output_path = os.path.join(output_dir, '%s.xml' % test_name)
with open(output_path, 'w') as f:
print('re-writing', output_path)
f.write(result.xml().encode('utf-8'))
except Exception as e:
sys.stderr.write('ignoring [%s]: %s\n' % (file, e))
def main():
print('[clean_junit_xml]: STARTING')
output_dir_name = '_hudson'
test_results_dir, output_dir = prepare_dirs(output_dir_name)
print('[clean_junit_xml]: writing aggregated test results to %s' % output_dir)
clean_results(test_results_dir, output_dir, [output_dir_name, '.svn'])
print('[clean_junit_xml]: FINISHED')
if __name__ == '__main__':
main()
|
from typing import Any, Callable, List, Optional
from bond_api import Action, DeviceType
from homeassistant.components.switch import SwitchEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity import Entity
from .const import DOMAIN
from .entity import BondEntity
from .utils import BondDevice, BondHub
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
async_add_entities: Callable[[List[Entity], bool], None],
) -> None:
"""Set up Bond generic devices."""
hub: BondHub = hass.data[DOMAIN][entry.entry_id]
switches = [
BondSwitch(hub, device)
for device in hub.devices
if DeviceType.is_generic(device.type)
]
async_add_entities(switches, True)
class BondSwitch(BondEntity, SwitchEntity):
"""Representation of a Bond generic device."""
def __init__(self, hub: BondHub, device: BondDevice):
"""Create HA entity representing Bond generic device (switch)."""
super().__init__(hub, device)
self._power: Optional[bool] = None
def _apply_state(self, state: dict):
self._power = state.get("power")
@property
def is_on(self) -> bool:
"""Return True if power is on."""
return self._power == 1
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the device on."""
await self._hub.bond.action(self._device.device_id, Action.turn_on())
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the device off."""
await self._hub.bond.action(self._device.device_id, Action.turn_off())
|
from datetime import timedelta
import logging
from pyatome.client import AtomeClient, PyAtomeError
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_NAME,
CONF_PASSWORD,
CONF_USERNAME,
DEVICE_CLASS_POWER,
ENERGY_KILO_WATT_HOUR,
POWER_WATT,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "atome"
LIVE_SCAN_INTERVAL = timedelta(seconds=30)
DAILY_SCAN_INTERVAL = timedelta(seconds=150)
WEEKLY_SCAN_INTERVAL = timedelta(hours=1)
MONTHLY_SCAN_INTERVAL = timedelta(hours=1)
YEARLY_SCAN_INTERVAL = timedelta(days=1)
LIVE_NAME = "Atome Live Power"
DAILY_NAME = "Atome Daily"
WEEKLY_NAME = "Atome Weekly"
MONTHLY_NAME = "Atome Monthly"
YEARLY_NAME = "Atome Yearly"
LIVE_TYPE = "live"
DAILY_TYPE = "day"
WEEKLY_TYPE = "week"
MONTHLY_TYPE = "month"
YEARLY_TYPE = "year"
ICON = "mdi:flash"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Atome sensor."""
username = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
try:
atome_client = AtomeClient(username, password)
atome_client.login()
except PyAtomeError as exp:
_LOGGER.error(exp)
return
data = AtomeData(atome_client)
sensors = []
sensors.append(AtomeSensor(data, LIVE_NAME, LIVE_TYPE))
sensors.append(AtomeSensor(data, DAILY_NAME, DAILY_TYPE))
sensors.append(AtomeSensor(data, WEEKLY_NAME, WEEKLY_TYPE))
sensors.append(AtomeSensor(data, MONTHLY_NAME, MONTHLY_TYPE))
sensors.append(AtomeSensor(data, YEARLY_NAME, YEARLY_TYPE))
add_entities(sensors, True)
class AtomeData:
"""Stores data retrieved from Neurio sensor."""
def __init__(self, client: AtomeClient):
"""Initialize the data."""
self.atome_client = client
self._live_power = None
self._subscribed_power = None
self._is_connected = None
self._day_usage = None
self._day_price = None
self._week_usage = None
self._week_price = None
self._month_usage = None
self._month_price = None
self._year_usage = None
self._year_price = None
@property
def live_power(self):
"""Return latest active power value."""
return self._live_power
@property
def subscribed_power(self):
"""Return latest active power value."""
return self._subscribed_power
@property
def is_connected(self):
"""Return latest active power value."""
return self._is_connected
@Throttle(LIVE_SCAN_INTERVAL)
def update_live_usage(self):
"""Return current power value."""
try:
values = self.atome_client.get_live()
self._live_power = values["last"]
self._subscribed_power = values["subscribed"]
self._is_connected = values["isConnected"]
_LOGGER.debug(
"Updating Atome live data. Got: %d, isConnected: %s, subscribed: %d",
self._live_power,
self._is_connected,
self._subscribed_power,
)
except KeyError as error:
_LOGGER.error("Missing last value in values: %s: %s", values, error)
@property
def day_usage(self):
"""Return latest daily usage value."""
return self._day_usage
@property
def day_price(self):
"""Return latest daily usage value."""
return self._day_price
@Throttle(DAILY_SCAN_INTERVAL)
def update_day_usage(self):
"""Return current daily power usage."""
try:
values = self.atome_client.get_consumption(DAILY_TYPE)
self._day_usage = values["total"] / 1000
self._day_price = values["price"]
_LOGGER.debug("Updating Atome daily data. Got: %d", self._day_usage)
except KeyError as error:
_LOGGER.error("Missing last value in values: %s: %s", values, error)
@property
def week_usage(self):
"""Return latest weekly usage value."""
return self._week_usage
@property
def week_price(self):
"""Return latest weekly usage value."""
return self._week_price
@Throttle(WEEKLY_SCAN_INTERVAL)
def update_week_usage(self):
"""Return current weekly power usage."""
try:
values = self.atome_client.get_consumption(WEEKLY_TYPE)
self._week_usage = values["total"] / 1000
self._week_price = values["price"]
_LOGGER.debug("Updating Atome weekly data. Got: %d", self._week_usage)
except KeyError as error:
_LOGGER.error("Missing last value in values: %s: %s", values, error)
@property
def month_usage(self):
"""Return latest monthly usage value."""
return self._month_usage
@property
def month_price(self):
"""Return latest monthly usage value."""
return self._month_price
@Throttle(MONTHLY_SCAN_INTERVAL)
def update_month_usage(self):
"""Return current monthly power usage."""
try:
values = self.atome_client.get_consumption(MONTHLY_TYPE)
self._month_usage = values["total"] / 1000
self._month_price = values["price"]
_LOGGER.debug("Updating Atome monthly data. Got: %d", self._month_usage)
except KeyError as error:
_LOGGER.error("Missing last value in values: %s: %s", values, error)
@property
def year_usage(self):
"""Return latest yearly usage value."""
return self._year_usage
@property
def year_price(self):
"""Return latest yearly usage value."""
return self._year_price
@Throttle(YEARLY_SCAN_INTERVAL)
def update_year_usage(self):
"""Return current yearly power usage."""
try:
values = self.atome_client.get_consumption(YEARLY_TYPE)
self._year_usage = values["total"] / 1000
self._year_price = values["price"]
_LOGGER.debug("Updating Atome yearly data. Got: %d", self._year_usage)
except KeyError as error:
_LOGGER.error("Missing last value in values: %s: %s", values, error)
class AtomeSensor(Entity):
"""Representation of a sensor entity for Atome."""
def __init__(self, data, name, sensor_type):
"""Initialize the sensor."""
self._name = name
self._data = data
self._state = None
self._attributes = {}
self._sensor_type = sensor_type
if sensor_type == LIVE_TYPE:
self._unit_of_measurement = POWER_WATT
else:
self._unit_of_measurement = ENERGY_KILO_WATT_HOUR
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._attributes
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self._unit_of_measurement
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return ICON
@property
def device_class(self):
"""Return the device class."""
return DEVICE_CLASS_POWER
def update(self):
"""Update device state."""
update_function = getattr(self._data, f"update_{self._sensor_type}_usage")
update_function()
if self._sensor_type == LIVE_TYPE:
self._state = self._data.live_power
self._attributes["subscribed_power"] = self._data.subscribed_power
self._attributes["is_connected"] = self._data.is_connected
else:
self._state = getattr(self._data, f"{self._sensor_type}_usage")
self._attributes["price"] = getattr(
self._data, f"{self._sensor_type}_price"
)
|
import subprocess
import sys
from concurrent.futures import ThreadPoolExecutor
from datetime import datetime, timedelta
from typing import Dict, Iterator, Optional, Tuple
import yaml
IGNORE_DEPS = {
"black",
"coveralls",
"flake8",
"hypothesis",
"isort",
"mypy",
"pip",
"pytest",
"pytest-cov",
"pytest-env",
"pytest-xdist",
}
POLICY_MONTHS = {"python": 42, "numpy": 24, "setuptools": 42}
POLICY_MONTHS_DEFAULT = 12
POLICY_OVERRIDE = {
# dask < 2.9 has trouble with nan-reductions
# TODO remove this special case and the matching note in installing.rst
# after January 2021.
"dask": (2, 9),
"distributed": (2, 9),
# setuptools-scm doesn't work with setuptools < 36.7 (Nov 2017).
# The conda metadata is malformed for setuptools < 38.4 (Jan 2018)
# (it's missing a timestamp which prevents this tool from working).
# TODO remove this special case and the matching note in installing.rst
# after July 2021.
"setuptools": (38, 4),
}
has_errors = False
def error(msg: str) -> None:
global has_errors
has_errors = True
print("ERROR:", msg)
def warning(msg: str) -> None:
print("WARNING:", msg)
def parse_requirements(fname) -> Iterator[Tuple[str, int, int, Optional[int]]]:
"""Load requirements/py36-min-all-deps.yml
Yield (package name, major version, minor version, [patch version])
"""
global has_errors
with open(fname) as fh:
contents = yaml.safe_load(fh)
for row in contents["dependencies"]:
if isinstance(row, dict) and list(row) == ["pip"]:
continue
pkg, eq, version = row.partition("=")
if pkg.rstrip("<>") in IGNORE_DEPS:
continue
if pkg.endswith("<") or pkg.endswith(">") or eq != "=":
error("package should be pinned with exact version: " + row)
continue
try:
version_tup = tuple(int(x) for x in version.split("."))
except ValueError:
raise ValueError("non-numerical version: " + row)
if len(version_tup) == 2:
yield (pkg, *version_tup, None) # type: ignore
elif len(version_tup) == 3:
yield (pkg, *version_tup) # type: ignore
else:
raise ValueError("expected major.minor or major.minor.patch: " + row)
def query_conda(pkg: str) -> Dict[Tuple[int, int], datetime]:
"""Query the conda repository for a specific package
Return map of {(major version, minor version): publication date}
"""
stdout = subprocess.check_output(
["conda", "search", pkg, "--info", "-c", "defaults", "-c", "conda-forge"]
)
out = {} # type: Dict[Tuple[int, int], datetime]
major = None
minor = None
for row in stdout.decode("utf-8").splitlines():
label, _, value = row.partition(":")
label = label.strip()
if label == "file name":
value = value.strip()[len(pkg) :]
smajor, sminor = value.split("-")[1].split(".")[:2]
major = int(smajor)
minor = int(sminor)
if label == "timestamp":
assert major is not None
assert minor is not None
ts = datetime.strptime(value.split()[0].strip(), "%Y-%m-%d")
if (major, minor) in out:
out[major, minor] = min(out[major, minor], ts)
else:
out[major, minor] = ts
# Hardcoded fix to work around incorrect dates in conda
if pkg == "python":
out.update(
{
(2, 7): datetime(2010, 6, 3),
(3, 5): datetime(2015, 9, 13),
(3, 6): datetime(2016, 12, 23),
(3, 7): datetime(2018, 6, 27),
(3, 8): datetime(2019, 10, 14),
}
)
return out
def process_pkg(
pkg: str, req_major: int, req_minor: int, req_patch: Optional[int]
) -> Tuple[str, str, str, str, str, str]:
"""Compare package version from requirements file to available versions in conda.
Return row to build pandas dataframe:
- package name
- major.minor.[patch] version in requirements file
- publication date of version in requirements file (YYYY-MM-DD)
- major.minor version suggested by policy
- publication date of version suggested by policy (YYYY-MM-DD)
- status ("<", "=", "> (!)")
"""
print("Analyzing %s..." % pkg)
versions = query_conda(pkg)
try:
req_published = versions[req_major, req_minor]
except KeyError:
error("not found in conda: " + pkg)
return pkg, fmt_version(req_major, req_minor, req_patch), "-", "-", "-", "(!)"
policy_months = POLICY_MONTHS.get(pkg, POLICY_MONTHS_DEFAULT)
policy_published = datetime.now() - timedelta(days=policy_months * 30)
policy_major = req_major
policy_minor = req_minor
policy_published_actual = req_published
for (major, minor), published in reversed(sorted(versions.items())):
if published < policy_published:
break
policy_major = major
policy_minor = minor
policy_published_actual = published
try:
policy_major, policy_minor = POLICY_OVERRIDE[pkg]
except KeyError:
pass
if (req_major, req_minor) < (policy_major, policy_minor):
status = "<"
elif (req_major, req_minor) > (policy_major, policy_minor):
status = "> (!)"
error("Package is too new: " + pkg)
else:
status = "="
if req_patch is not None:
warning("patch version should not appear in requirements file: " + pkg)
status += " (w)"
return (
pkg,
fmt_version(req_major, req_minor, req_patch),
req_published.strftime("%Y-%m-%d"),
fmt_version(policy_major, policy_minor),
policy_published_actual.strftime("%Y-%m-%d"),
status,
)
def fmt_version(major: int, minor: int, patch: int = None) -> str:
if patch is None:
return f"{major}.{minor}"
else:
return f"{major}.{minor}.{patch}"
def main() -> None:
fname = sys.argv[1]
with ThreadPoolExecutor(8) as ex:
futures = [
ex.submit(process_pkg, pkg, major, minor, patch)
for pkg, major, minor, patch in parse_requirements(fname)
]
rows = [f.result() for f in futures]
print("Package Required Policy Status")
print("------------- -------------------- -------------------- ------")
fmt = "{:13} {:7} ({:10}) {:7} ({:10}) {}"
for row in rows:
print(fmt.format(*row))
assert not has_errors
if __name__ == "__main__":
main()
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from diamond.collector import Collector
from puppetdashboard import PuppetDashboardCollector
##########################################################################
class TestPuppetDashboardCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('PuppetDashboardCollector', {
'interval': 10
})
self.collector = PuppetDashboardCollector(config, None)
def test_import(self):
self.assertTrue(PuppetDashboardCollector)
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
patch_urlopen = patch('urllib2.urlopen', Mock(
return_value=self.getFixture('index.html')))
patch_urlopen.start()
self.collector.collect()
patch_urlopen.stop()
metrics = {
'unresponsive': 3,
'pending': 0,
'changed': 10,
'unchanged': 4,
'unreported': 0,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_should_fail_gracefully(self, publish_mock):
patch_urlopen = patch('urllib2.urlopen', Mock(
return_value=self.getFixture('index.blank')))
patch_urlopen.start()
self.collector.collect()
patch_urlopen.stop()
self.assertPublishedMany(publish_mock, {})
##########################################################################
if __name__ == "__main__":
unittest.main()
|
import logging
import random
import string
from sucks import EcoVacsAPI, VacBot
import voluptuous as vol
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, EVENT_HOMEASSISTANT_STOP
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DOMAIN = "ecovacs"
CONF_COUNTRY = "country"
CONF_CONTINENT = "continent"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_COUNTRY): vol.All(vol.Lower, cv.string),
vol.Required(CONF_CONTINENT): vol.All(vol.Lower, cv.string),
}
)
},
extra=vol.ALLOW_EXTRA,
)
ECOVACS_DEVICES = "ecovacs_devices"
# Generate a random device ID on each bootup
ECOVACS_API_DEVICEID = "".join(
random.choice(string.ascii_uppercase + string.digits) for _ in range(8)
)
def setup(hass, config):
"""Set up the Ecovacs component."""
_LOGGER.debug("Creating new Ecovacs component")
hass.data[ECOVACS_DEVICES] = []
ecovacs_api = EcoVacsAPI(
ECOVACS_API_DEVICEID,
config[DOMAIN].get(CONF_USERNAME),
EcoVacsAPI.md5(config[DOMAIN].get(CONF_PASSWORD)),
config[DOMAIN].get(CONF_COUNTRY),
config[DOMAIN].get(CONF_CONTINENT),
)
devices = ecovacs_api.devices()
_LOGGER.debug("Ecobot devices: %s", devices)
for device in devices:
_LOGGER.info(
"Discovered Ecovacs device on account: %s with nickname %s",
device["did"],
device["nick"],
)
vacbot = VacBot(
ecovacs_api.uid,
ecovacs_api.REALM,
ecovacs_api.resource,
ecovacs_api.user_access_token,
device,
config[DOMAIN].get(CONF_CONTINENT).lower(),
monitor=True,
)
hass.data[ECOVACS_DEVICES].append(vacbot)
def stop(event: object) -> None:
"""Shut down open connections to Ecovacs XMPP server."""
for device in hass.data[ECOVACS_DEVICES]:
_LOGGER.info(
"Shutting down connection to Ecovacs device %s", device.vacuum["did"]
)
device.disconnect()
# Listen for HA stop to disconnect.
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop)
if hass.data[ECOVACS_DEVICES]:
_LOGGER.debug("Starting vacuum components")
discovery.load_platform(hass, "vacuum", DOMAIN, {}, config)
return True
|
from flask import Flask
from flasgger import Swagger
app = Flask(__name__)
swagger_config = {
"headers": [],
"openapi": "3.0.2",
"components": {
"securitySchemes": {
"oAuthSample": {
"type": "oauth2",
"flows": {
"clientCredentials": {
"tokenUrl": "https://api.pgsmartshopassistant.com/o/token/",
}
}
}
},
},
"servers": [
{
"url": "https://api.example.com/v1",
"description": "Production server (uses live data)"
},
{
"url": "https://sandbox-api.example.com:8443/v1",
"description": "Sandbox server (uses test data)"
}
],
"specs": [
{
"endpoint": "swagger",
"route": "/characteristics/swagger.json",
"rule_filter": lambda rule: True, # all in
"model_filter": lambda tag: True, # all in
}
],
"title": "Product Characteristics API",
"version": '',
"termsOfService": "",
"static_url_path": "/characteristics/static",
"swagger_ui": True,
"specs_route": "/characteristics/swagger/",
"description": "",
}
colors_spec = {
"tags": [
"colors"
],
"parameters": [
{
"name": "palette",
"in": "path",
"type": "string",
"enum": [
"all",
"rgb",
"cmyk"
],
"required": True,
"default": "all",
"description": "Which palette to filter?"
}
],
"operationId": "get_colors",
"consumes": [
"application/json"
],
"produces": [
"application/json"
],
"security": {
"colors_auth": [
"write:colors",
"read:colors"
]
},
"schemes": [
"http",
"https"
],
"deprecated": False,
"externalDocs": {
"description": "Project repository",
"url": "http://github.com/rochacbruno/flasgger"
},
"definitions": {
"Palette": {
"type": "object",
"properties": {
"palette_name": {
"type": "array",
"items": {
"$ref": "#/definitions/Color"
}
}
}
},
"Color": {
"type": "string"
}
},
"responses": {
"200": {
"description": "A list of colors (may be filtered by palette)",
"schema": {
"$ref": "#/definitions/Palette"
},
"examples": {
"rgb": [
"red",
"green",
"blue"
]
}
}
}
}
swag = Swagger(app, config=swagger_config)
def test_swag(client, specs_data):
"""
This test is runs automatically in Travis CI
:param client: Flask app test client
:param specs_data: {'url': {swag_specs}} for every spec in app
"""
for spec in specs_data.values():
assert 'openapi' in spec
assert '3.0.2' == spec['openapi']
assert 'swagger' not in spec
assert 'components' in spec
assert 'securitySchemes' in spec['components']
assert 'oAuthSample' in spec['components']['securitySchemes']
assert 'servers' in spec # See issue #366
if __name__ == '__main__':
app.run(debug=True)
|
import contextlib
import math
import os
import string
from collections import OrderedDict
from configparser import RawConfigParser
from radicale import auth, rights, storage, web
DEFAULT_CONFIG_PATH = os.pathsep.join([
"?/etc/radicale/config",
"?~/.config/radicale/config"])
def positive_int(value):
value = int(value)
if value < 0:
raise ValueError("value is negative: %d" % value)
return value
def positive_float(value):
value = float(value)
if not math.isfinite(value):
raise ValueError("value is infinite")
if math.isnan(value):
raise ValueError("value is not a number")
if value < 0:
raise ValueError("value is negative: %f" % value)
return value
def logging_level(value):
if value not in ("debug", "info", "warning", "error", "critical"):
raise ValueError("unsupported level: %r" % value)
return value
def filepath(value):
if not value:
return ""
value = os.path.expanduser(value)
if os.name == "nt":
value = os.path.expandvars(value)
return os.path.abspath(value)
def list_of_ip_address(value):
def ip_address(value):
try:
address, port = value.rsplit(":", 1)
return address.strip(string.whitespace + "[]"), int(port)
except ValueError:
raise ValueError("malformed IP address: %r" % value)
return [ip_address(s) for s in value.split(",")]
def str_or_callable(value):
if callable(value):
return value
return str(value)
def unspecified_type(value):
return value
def _convert_to_bool(value):
if value.lower() not in RawConfigParser.BOOLEAN_STATES:
raise ValueError("not a boolean: %r" % value)
return RawConfigParser.BOOLEAN_STATES[value.lower()]
INTERNAL_OPTIONS = ("_allow_extra",)
# Default configuration
DEFAULT_CONFIG_SCHEMA = OrderedDict([
("server", OrderedDict([
("hosts", {
"value": "localhost:5232",
"help": "set server hostnames including ports",
"aliases": ("-H", "--hosts",),
"type": list_of_ip_address}),
("max_connections", {
"value": "8",
"help": "maximum number of parallel connections",
"type": positive_int}),
("max_content_length", {
"value": "100000000",
"help": "maximum size of request body in bytes",
"type": positive_int}),
("timeout", {
"value": "30",
"help": "socket timeout",
"type": positive_float}),
("ssl", {
"value": "False",
"help": "use SSL connection",
"aliases": ("-s", "--ssl",),
"opposite": ["-S", "--no-ssl"],
"type": bool}),
("certificate", {
"value": "/etc/ssl/radicale.cert.pem",
"help": "set certificate file",
"aliases": ("-c", "--certificate",),
"type": filepath}),
("key", {
"value": "/etc/ssl/radicale.key.pem",
"help": "set private key file",
"aliases": ("-k", "--key",),
"type": filepath}),
("certificate_authority", {
"value": "",
"help": "set CA certificate for validating clients",
"aliases": ("--certificate-authority",),
"type": filepath}),
("_internal_server", {
"value": "False",
"help": "the internal server is used",
"type": bool})])),
("encoding", OrderedDict([
("request", {
"value": "utf-8",
"help": "encoding for responding requests",
"type": str}),
("stock", {
"value": "utf-8",
"help": "encoding for storing local collections",
"type": str})])),
("auth", OrderedDict([
("type", {
"value": "none",
"help": "authentication method",
"type": str_or_callable,
"internal": auth.INTERNAL_TYPES}),
("htpasswd_filename", {
"value": "/etc/radicale/users",
"help": "htpasswd filename",
"type": filepath}),
("htpasswd_encryption", {
"value": "md5",
"help": "htpasswd encryption method",
"type": str}),
("realm", {
"value": "Radicale - Password Required",
"help": "message displayed when a password is needed",
"type": str}),
("delay", {
"value": "1",
"help": "incorrect authentication delay",
"type": positive_float})])),
("rights", OrderedDict([
("type", {
"value": "owner_only",
"help": "rights backend",
"type": str_or_callable,
"internal": rights.INTERNAL_TYPES}),
("file", {
"value": "/etc/radicale/rights",
"help": "file for rights management from_file",
"type": filepath})])),
("storage", OrderedDict([
("type", {
"value": "multifilesystem",
"help": "storage backend",
"type": str_or_callable,
"internal": storage.INTERNAL_TYPES}),
("filesystem_folder", {
"value": "/var/lib/radicale/collections",
"help": "path where collections are stored",
"type": filepath}),
("max_sync_token_age", {
"value": "2592000", # 30 days
"help": "delete sync token that are older",
"type": positive_int}),
("hook", {
"value": "",
"help": "command that is run after changes to storage",
"type": str}),
("_filesystem_fsync", {
"value": "True",
"help": "sync all changes to filesystem during requests",
"type": bool})])),
("web", OrderedDict([
("type", {
"value": "internal",
"help": "web interface backend",
"type": str_or_callable,
"internal": web.INTERNAL_TYPES})])),
("logging", OrderedDict([
("level", {
"value": "warning",
"help": "threshold for the logger",
"type": logging_level}),
("mask_passwords", {
"value": "True",
"help": "mask passwords in logs",
"type": bool})])),
("headers", OrderedDict([
("_allow_extra", str)]))])
def parse_compound_paths(*compound_paths):
"""Parse a compound path and return the individual paths.
Paths in a compound path are joined by ``os.pathsep``. If a path starts
with ``?`` the return value ``IGNORE_IF_MISSING`` is set.
When multiple ``compound_paths`` are passed, the last argument that is
not ``None`` is used.
Returns a dict of the format ``[(PATH, IGNORE_IF_MISSING), ...]``
"""
compound_path = ""
for p in compound_paths:
if p is not None:
compound_path = p
paths = []
for path in compound_path.split(os.pathsep):
ignore_if_missing = path.startswith("?")
if ignore_if_missing:
path = path[1:]
path = filepath(path)
if path:
paths.append((path, ignore_if_missing))
return paths
def load(paths=()):
"""
Create instance of ``Configuration`` for use with
``radicale.app.Application``.
``paths`` a list of configuration files with the format
``[(PATH, IGNORE_IF_MISSING), ...]``.
If a configuration file is missing and IGNORE_IF_MISSING is set, the
config is set to ``Configuration.SOURCE_MISSING``.
The configuration can later be changed with ``Configuration.update()``.
"""
configuration = Configuration(DEFAULT_CONFIG_SCHEMA)
for path, ignore_if_missing in paths:
parser = RawConfigParser()
config_source = "config file %r" % path
try:
if not parser.read(path):
config = Configuration.SOURCE_MISSING
if not ignore_if_missing:
raise RuntimeError("No such file: %r" % path)
else:
config = {s: {o: parser[s][o] for o in parser.options(s)}
for s in parser.sections()}
except Exception as e:
raise RuntimeError(
"Failed to load %s: %s" % (config_source, e)) from e
configuration.update(config, config_source)
return configuration
class Configuration:
SOURCE_MISSING = {}
def __init__(self, schema):
"""Initialize configuration.
``schema`` a dict that describes the configuration format.
See ``DEFAULT_CONFIG_SCHEMA``.
The content of ``schema`` must not change afterwards, it is kept
as an internal reference.
Use ``load()`` to create an instance for use with
``radicale.app.Application``.
"""
self._schema = schema
self._values = {}
self._configs = []
default = {section: {option: self._schema[section][option]["value"]
for option in self._schema[section]
if option not in INTERNAL_OPTIONS}
for section in self._schema}
self.update(default, "default config", privileged=True)
def update(self, config, source=None, privileged=False):
"""Update the configuration.
``config`` a dict of the format {SECTION: {OPTION: VALUE, ...}, ...}.
The configuration is checked for errors according to the config schema.
The content of ``config`` must not change afterwards, it is kept
as an internal reference.
``source`` a description of the configuration source (used in error
messages).
``privileged`` allows updating sections and options starting with "_".
"""
source = source or "unspecified config"
new_values = {}
for section in config:
if (section not in self._schema or
section.startswith("_") and not privileged):
raise ValueError(
"Invalid section %r in %s" % (section, source))
new_values[section] = {}
extra_type = None
extra_type = self._schema[section].get("_allow_extra")
if "type" in self._schema[section]:
if "type" in config[section]:
plugin = config[section]["type"]
else:
plugin = self.get(section, "type")
if plugin not in self._schema[section]["type"]["internal"]:
extra_type = unspecified_type
for option in config[section]:
type_ = extra_type
if option in self._schema[section]:
type_ = self._schema[section][option]["type"]
if (not type_ or option in INTERNAL_OPTIONS or
option.startswith("_") and not privileged):
raise RuntimeError("Invalid option %r in section %r in "
"%s" % (option, section, source))
raw_value = config[section][option]
try:
if type_ == bool and not isinstance(raw_value, bool):
raw_value = _convert_to_bool(raw_value)
new_values[section][option] = type_(raw_value)
except Exception as e:
raise RuntimeError(
"Invalid %s value for option %r in section %r in %s: "
"%r" % (type_.__name__, option, section, source,
raw_value)) from e
self._configs.append((config, source, bool(privileged)))
for section in new_values:
self._values[section] = self._values.get(section, {})
self._values[section].update(new_values[section])
def get(self, section, option):
"""Get the value of ``option`` in ``section``."""
with contextlib.suppress(KeyError):
return self._values[section][option]
raise KeyError(section, option)
def get_raw(self, section, option):
"""Get the raw value of ``option`` in ``section``."""
for config, _, _ in reversed(self._configs):
if option in config.get(section, {}):
return config[section][option]
raise KeyError(section, option)
def get_source(self, section, option):
"""Get the source that provides ``option`` in ``section``."""
for config, source, _ in reversed(self._configs):
if option in config.get(section, {}):
return source
raise KeyError(section, option)
def sections(self):
"""List all sections."""
return self._values.keys()
def options(self, section):
"""List all options in ``section``"""
return self._values[section].keys()
def sources(self):
"""List all config sources."""
return [(source, config is self.SOURCE_MISSING) for
config, source, _ in self._configs]
def copy(self, plugin_schema=None):
"""Create a copy of the configuration
``plugin_schema`` is a optional dict that contains additional options
for usage with a plugin. See ``DEFAULT_CONFIG_SCHEMA``.
"""
if plugin_schema is None:
schema = self._schema
else:
schema = self._schema.copy()
for section, options in plugin_schema.items():
if (section not in schema or "type" not in schema[section] or
"internal" not in schema[section]["type"]):
raise ValueError("not a plugin section: %r" % section)
schema[section] = schema[section].copy()
schema[section]["type"] = schema[section]["type"].copy()
schema[section]["type"]["internal"] = [
self.get(section, "type")]
for option, value in options.items():
if option in schema[section]:
raise ValueError("option already exists in %r: %r" % (
section, option))
schema[section][option] = value
copy = type(self)(schema)
for config, source, privileged in self._configs:
copy.update(config, source, privileged)
return copy
|
import requests
from django.conf import settings
import weblate
from weblate.utils.management.base import BaseCommand
TAGS_API = "https://api.github.com/repos/WeblateOrg/weblate/git/ref/tags/{}"
RELEASES_API = "https://sentry.io/api/0/organizations/{}/releases/"
class Command(BaseCommand):
help = "records a release on Sentry"
def handle(self, *args, **options):
if weblate.GIT_REVISION:
# Get release from Git
version = ref = weblate.GIT_REVISION
else:
# Get commit hash from GitHub
version = weblate.TAG_NAME
response = requests.get(TAGS_API.format(version))
response.raise_for_status()
response = requests.get(response.json()["object"]["url"])
response.raise_for_status()
ref = response.json()["object"]["sha"]
sentry_auth = {"Authorization": f"Bearer {settings.SENTRY_TOKEN}"}
sentry_base = RELEASES_API.format(settings.SENTRY_ORGANIZATION)
release_url = sentry_base + version + "/"
# Ensure the release is tracked on Sentry
response = requests.get(release_url, headers=sentry_auth)
if response.status_code == 404:
data = {
"version": version,
"projects": settings.SENTRY_PROJECTS,
"ref": ref,
"refs": [{"repository": "WeblateOrg/weblate", "commit": ref}],
}
response = requests.post(sentry_base, json=data, headers=sentry_auth)
self.stdout.write(f"Created new release {version}")
response.raise_for_status()
# Track the deploy
response = requests.post(
release_url + "deploys/",
data={"environment": settings.SENTRY_ENVIRONMENT},
headers=sentry_auth,
)
response.raise_for_status()
self.stdout.write("Created new Sentry deploy {}".format(response.json()["id"]))
|
import numpy as np
import matplotlib.pyplot as plt
import mne
from mne import io
from mne.datasets import sample
print(__doc__)
data_path = sample.data_path()
###############################################################################
# Set parameters
raw_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw.fif'
event_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw-eve.fif'
event_id, tmin, tmax = 1, -0.2, 0.4
# Setup for reading the raw data
raw = io.read_raw_fif(raw_fname)
events = mne.read_events(event_fname)
# Set up pick list: EEG + MEG - bad channels (modify to your needs)
raw.info['bads'] = ['MEG 2443', 'EEG 053']
# Create epochs, here for gradiometers + EOG only for simplicity
epochs = mne.Epochs(raw, events, event_id, tmin, tmax, proj=True,
picks=('grad', 'eog'), baseline=(None, 0), preload=True,
reject=dict(grad=4000e-13, eog=150e-6))
###############################################################################
# Show event-related fields images
# and order with spectral reordering
# If you don't have scikit-learn installed set order_func to None
from sklearn.manifold import spectral_embedding # noqa
from sklearn.metrics.pairwise import rbf_kernel # noqa
def order_func(times, data):
this_data = data[:, (times > 0.0) & (times < 0.350)]
this_data /= np.sqrt(np.sum(this_data ** 2, axis=1))[:, np.newaxis]
return np.argsort(spectral_embedding(rbf_kernel(this_data, gamma=1.),
n_components=1, random_state=0).ravel())
good_pick = 97 # channel with a clear evoked response
bad_pick = 98 # channel with no evoked response
# We'll also plot a sample time onset for each trial
plt_times = np.linspace(0, .2, len(epochs))
plt.close('all')
mne.viz.plot_epochs_image(epochs, [good_pick, bad_pick], sigma=.5,
order=order_func, vmin=-250, vmax=250,
overlay_times=plt_times, show=True)
###############################################################################
# References
# ----------
# .. [1] Graph-based variability estimation in single-trial event-related
# neural responses. A. Gramfort, R. Keriven, M. Clerc, 2010,
# Biomedical Engineering, IEEE Trans. on, vol. 57 (5), 1051-1061
# https://ieeexplore.ieee.org/document/5406156
|
import posixpath
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker import errors
from perfkitbenchmarker import linux_packages
from perfkitbenchmarker import regex_util
from perfkitbenchmarker import sample
from perfkitbenchmarker.linux_packages import coremark
BENCHMARK_NAME = 'coremark'
BENCHMARK_CONFIG = """
coremark:
description: Run Coremark a simple processor benchmark
vm_groups:
default:
vm_spec: *default_single_core
"""
COREMARK_DIR = posixpath.join(linux_packages.INSTALL_DIR, 'coremark-1.01')
COREMARK_BUILDFILE = 'linux64/core_portme.mak'
# The number of iterations per CPU was chosen such that the runtime will always
# be greater than 10 seconds as specified in the run rules at
# https://www.eembc.org/coremark/CoreMarkRunRules.pdf.
ITERATIONS_PER_CPU = 1000000
# Methods of parallelism supported by Coremark.
PARALLELISM_PTHREAD = 'PTHREAD'
PARALLELISM_FORK = 'FORK'
PARALLELISM_SOCKET = 'SOCKET'
flags.DEFINE_enum('coremark_parallelism_method', PARALLELISM_PTHREAD,
[PARALLELISM_PTHREAD, PARALLELISM_FORK, PARALLELISM_SOCKET],
'Method to use for parallelism in the Coremark benchmark.')
FLAGS = flags.FLAGS
def GetConfig(user_config):
return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
def CheckPrerequisites(benchmark_config):
"""Verifies that the required resources are present."""
del benchmark_config
def PrepareCoremark(remote_command):
"""Prepares coremark on a VM.
Args:
remote_command: Function to run a remote command on the VM.
"""
if FLAGS.coremark_parallelism_method == PARALLELISM_PTHREAD:
remote_command('sed -i -e "s/LFLAGS_END += -lrt/LFLAGS_END += -lrt '
'-lpthread/g" %s/%s' % (COREMARK_DIR, COREMARK_BUILDFILE))
def Prepare(benchmark_spec):
"""Install Coremark on the target vm.
Args:
benchmark_spec: The benchmark specification.
"""
vm = benchmark_spec.vms[0]
vm.Install('coremark')
PrepareCoremark(vm.RemoteCommand)
def RunCoremark(remote_command, num_threads):
"""Runs coremark on the VM.
Args:
remote_command: Function to run a remote command on the VM.
num_threads: Number of threads to use.
Returns:
A list of sample.Sample objects with the performance results.
"""
remote_command('cd %s;make PORT_DIR=linux64 ITERATIONS=%s XCFLAGS="-g -O2 '
'-DMULTITHREAD=%d -DUSE_%s -DPERFORMANCE_RUN=1"' %
(COREMARK_DIR, ITERATIONS_PER_CPU, num_threads,
FLAGS.coremark_parallelism_method))
output, _ = remote_command('cat %s/run1.log' % COREMARK_DIR, should_log=True)
return _ParseOutputForSamples(output)
def _ParseOutputForSamples(output):
"""Parses the output from running Coremark to get performance samples.
Args:
output: The output from running Coremark.
Returns:
A list of sample.Sample objects.
Raises:
Benchmarks.RunError: If correct operation is not validated.
"""
if 'Correct operation validated' not in output:
raise errors.Benchmarks.RunError('Correct operation not validated.')
value = regex_util.ExtractFloat(r'CoreMark 1.0 : ([0-9]*\.[0-9]*)', output)
metadata = {
'summary':
output.splitlines()[-1], # Last line of output is a summary.
'size':
regex_util.ExtractInt(r'CoreMark Size\s*:\s*([0-9]*)', output),
'total_ticks':
regex_util.ExtractInt(r'Total ticks\s*:\s*([0-9]*)', output),
'total_time_sec':
regex_util.ExtractFloat(r'Total time \(secs\)\s*:\s*([0-9]*\.[0-9]*)',
output),
'iterations':
regex_util.ExtractInt(r'Iterations\s*:\s*([0-9]*)', output),
'iterations_per_cpu': ITERATIONS_PER_CPU,
'parallelism_method': FLAGS.coremark_parallelism_method,
}
return [sample.Sample('Coremark Score', value, '', metadata)]
def Run(benchmark_spec):
"""Runs Coremark on the target vm.
Args:
benchmark_spec: The benchmark specification.
Returns:
A list of sample.Sample objects with the performance results.
Raises:
Benchmarks.RunError: If correct operation is not validated.
"""
vm = benchmark_spec.vms[0]
return RunCoremark(vm.RemoteCommand, vm.NumCpusForBenchmark())
def CleanupCoremark(remote_command):
"""Cleans up the coremark installation.
Args:
remote_command: Function to run a remote command on the VM.
"""
remote_command('rm -rf %s' % COREMARK_DIR)
remote_command('rm -f %s' % coremark.COREMARK_TAR)
def Cleanup(benchmark_spec):
"""Cleanup Coremark on the target vm.
Args:
benchmark_spec: The benchmark specification.
"""
vm = benchmark_spec.vms[0]
CleanupCoremark(vm.RemoteCommand)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import flags
from absl.flags import _helpers
from absl.testing import absltest
FLAGS = flags.FLAGS
class FlagsUnitTest(absltest.TestCase):
"""Flags formatting Unit Test."""
def test_get_help_width(self):
"""Verify that get_help_width() reflects _help_width."""
default_help_width = _helpers._DEFAULT_HELP_WIDTH # Save.
self.assertEqual(80, _helpers._DEFAULT_HELP_WIDTH)
self.assertEqual(_helpers._DEFAULT_HELP_WIDTH, flags.get_help_width())
_helpers._DEFAULT_HELP_WIDTH = 10
self.assertEqual(_helpers._DEFAULT_HELP_WIDTH, flags.get_help_width())
_helpers._DEFAULT_HELP_WIDTH = default_help_width # restore
def test_text_wrap(self):
"""Test that wrapping works as expected.
Also tests that it is using global flags._help_width by default.
"""
default_help_width = _helpers._DEFAULT_HELP_WIDTH
_helpers._DEFAULT_HELP_WIDTH = 10
# Generate a string with length 40, no spaces
text = ''
expect = []
for n in range(4):
line = str(n)
line += '123456789'
text += line
expect.append(line)
# Verify we still break
wrapped = flags.text_wrap(text).split('\n')
self.assertEqual(4, len(wrapped))
self.assertEqual(expect, wrapped)
wrapped = flags.text_wrap(text, 80).split('\n')
self.assertEqual(1, len(wrapped))
self.assertEqual([text], wrapped)
# Normal case, breaking at word boundaries and rewriting new lines
input_value = 'a b c d e f g h'
expect = {1: ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h'],
2: ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h'],
3: ['a b', 'c d', 'e f', 'g h'],
4: ['a b', 'c d', 'e f', 'g h'],
5: ['a b c', 'd e f', 'g h'],
6: ['a b c', 'd e f', 'g h'],
7: ['a b c d', 'e f g h'],
8: ['a b c d', 'e f g h'],
9: ['a b c d e', 'f g h'],
10: ['a b c d e', 'f g h'],
11: ['a b c d e f', 'g h'],
12: ['a b c d e f', 'g h'],
13: ['a b c d e f g', 'h'],
14: ['a b c d e f g', 'h'],
15: ['a b c d e f g h']}
for width, exp in expect.items():
self.assertEqual(exp, flags.text_wrap(input_value, width).split('\n'))
# We turn lines with only whitespace into empty lines
# We strip from the right up to the first new line
self.assertEqual('', flags.text_wrap(' '))
self.assertEqual('\n', flags.text_wrap(' \n '))
self.assertEqual('\n', flags.text_wrap('\n\n'))
self.assertEqual('\n\n', flags.text_wrap('\n\n\n'))
self.assertEqual('\n', flags.text_wrap('\n '))
self.assertEqual('a\n\nb', flags.text_wrap('a\n \nb'))
self.assertEqual('a\n\n\nb', flags.text_wrap('a\n \n \nb'))
self.assertEqual('a\nb', flags.text_wrap(' a\nb '))
self.assertEqual('\na\nb', flags.text_wrap('\na\nb\n'))
self.assertEqual('\na\nb\n', flags.text_wrap(' \na\nb\n '))
self.assertEqual('\na\nb\n', flags.text_wrap(' \na\nb\n\n'))
# Double newline.
self.assertEqual('a\n\nb', flags.text_wrap(' a\n\n b'))
# We respect prefix
self.assertEqual(' a\n b\n c', flags.text_wrap('a\nb\nc', 80, ' '))
self.assertEqual('a\n b\n c', flags.text_wrap('a\nb\nc', 80, ' ', ''))
# tabs
self.assertEqual('a\n b c',
flags.text_wrap('a\nb\tc', 80, ' ', ''))
self.assertEqual('a\n bb c',
flags.text_wrap('a\nbb\tc', 80, ' ', ''))
self.assertEqual('a\n bbb c',
flags.text_wrap('a\nbbb\tc', 80, ' ', ''))
self.assertEqual('a\n bbbb c',
flags.text_wrap('a\nbbbb\tc', 80, ' ', ''))
self.assertEqual('a\n b\n c\n d',
flags.text_wrap('a\nb\tc\td', 3, ' ', ''))
self.assertEqual('a\n b\n c\n d',
flags.text_wrap('a\nb\tc\td', 4, ' ', ''))
self.assertEqual('a\n b\n c\n d',
flags.text_wrap('a\nb\tc\td', 5, ' ', ''))
self.assertEqual('a\n b c\n d',
flags.text_wrap('a\nb\tc\td', 6, ' ', ''))
self.assertEqual('a\n b c\n d',
flags.text_wrap('a\nb\tc\td', 7, ' ', ''))
self.assertEqual('a\n b c\n d',
flags.text_wrap('a\nb\tc\td', 8, ' ', ''))
self.assertEqual('a\n b c\n d',
flags.text_wrap('a\nb\tc\td', 9, ' ', ''))
self.assertEqual('a\n b c d',
flags.text_wrap('a\nb\tc\td', 10, ' ', ''))
# multiple tabs
self.assertEqual('a c',
flags.text_wrap('a\t\tc', 80, ' ', ''))
_helpers._DEFAULT_HELP_WIDTH = default_help_width # restore
def test_doc_to_help(self):
self.assertEqual('', flags.doc_to_help(' '))
self.assertEqual('', flags.doc_to_help(' \n '))
self.assertEqual('a\n\nb', flags.doc_to_help('a\n \nb'))
self.assertEqual('a\n\n\nb', flags.doc_to_help('a\n \n \nb'))
self.assertEqual('a b', flags.doc_to_help(' a\nb '))
self.assertEqual('a b', flags.doc_to_help('\na\nb\n'))
self.assertEqual('a\n\nb', flags.doc_to_help('\na\n\nb\n'))
self.assertEqual('a b', flags.doc_to_help(' \na\nb\n '))
# Different first line, one line empty - erm double new line.
self.assertEqual('a b c\n\nd', flags.doc_to_help('a\n b\n c\n\n d'))
self.assertEqual('a b\n c d', flags.doc_to_help('a\n b\n \tc\n d'))
self.assertEqual('a b\n c\n d',
flags.doc_to_help('a\n b\n \tc\n \td'))
def test_doc_to_help_flag_values(self):
# !!!!!!!!!!!!!!!!!!!!
# The following doc string is taken as is directly from flags.py:FlagValues
# The intention of this test is to verify 'live' performance
# !!!!!!!!!!!!!!!!!!!!
"""Used as a registry for 'Flag' objects.
A 'FlagValues' can then scan command line arguments, passing flag
arguments through to the 'Flag' objects that it owns. It also
provides easy access to the flag values. Typically only one
'FlagValues' object is needed by an application: flags.FLAGS
This class is heavily overloaded:
'Flag' objects are registered via __setitem__:
FLAGS['longname'] = x # register a new flag
The .value member of the registered 'Flag' objects can be accessed as
members of this 'FlagValues' object, through __getattr__. Both the
long and short name of the original 'Flag' objects can be used to
access its value:
FLAGS.longname # parsed flag value
FLAGS.x # parsed flag value (short name)
Command line arguments are scanned and passed to the registered 'Flag'
objects through the __call__ method. Unparsed arguments, including
argv[0] (e.g. the program name) are returned.
argv = FLAGS(sys.argv) # scan command line arguments
The original registered Flag objects can be retrieved through the use
"""
doc = flags.doc_to_help(self.test_doc_to_help_flag_values.__doc__)
# Test the general outline of the converted docs
lines = doc.splitlines()
self.assertEqual(17, len(lines))
empty_lines = [index for index in range(len(lines)) if not lines[index]]
self.assertEqual([1, 3, 5, 8, 12, 15], empty_lines)
# test that some starting prefix is kept
flags_lines = [index for index in range(len(lines))
if lines[index].startswith(' FLAGS')]
self.assertEqual([7, 10, 11], flags_lines)
# but other, especially common space has been removed
space_lines = [index for index in range(len(lines))
if lines[index] and lines[index][0].isspace()]
self.assertEqual([7, 10, 11, 14], space_lines)
# No right space was kept
rspace_lines = [index for index in range(len(lines))
if lines[index] != lines[index].rstrip()]
self.assertEqual([], rspace_lines)
# test double spaces are kept
self.assertEqual(True, lines[2].endswith('application: flags.FLAGS'))
def test_text_wrap_raises_on_excessive_indent(self):
"""Ensure an indent longer than line length raises."""
self.assertRaises(ValueError,
flags.text_wrap, 'dummy', length=10, indent=' ' * 10)
def test_text_wrap_raises_on_excessive_first_line(self):
"""Ensure a first line indent longer than line length raises."""
self.assertRaises(
ValueError,
flags.text_wrap, 'dummy', length=80, firstline_indent=' ' * 80)
if __name__ == '__main__':
absltest.main()
|
import os
import sys
import argparse
import struct
from openrazer_daemon.keyboard import EVENT_MAPPING, TARTARUS_EVENT_MAPPING
EVENT_FORMAT = '@llHHI'
EVENT_SIZE = struct.calcsize(EVENT_FORMAT)
def loop_on_event(event_file, mapping):
with open(event_file, 'rb') as open_file:
while True:
payload = open_file.read(EVENT_SIZE)
ev_type, code, value = struct.unpack(EVENT_FORMAT, payload)[2:]
if (ev_type == code == 0) or ev_type == 4:
continue
if ev_type == 1:
if value == 0:
value = 'UP'
elif value == 1:
value = 'DOWN'
else:
value = 'REPEAT'
code = mapping.get(code, code)
print("Type: EV_KEY, Code: {0}, Value: {1}".format(code, value))
else:
print("Type: {0}, Code: {1}, Value: {2}".format(ev_type, code, value))
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('event_file', metavar='EVENT_FILE', type=str, help="Device event file like \"/dev/input/by-id/usb-Razer_Razer_BlackWidow_Chroma-event-kbd\"")
parser.add_argument('--tartarus', action='store_true', help='Use the tartarus event mapping instead')
return parser.parse_args()
def run():
args = parse_args()
if not os.path.exists(args.event_file):
print('Event file does not exist', file=sys.stderr)
sys.exit(1)
if args.tartarus:
mapping = TARTARUS_EVENT_MAPPING
else:
#mapping = EVENT_MAPPING
mapping = {}
print('Starting. Press keys', file=sys.stderr)
try:
loop_on_event(args.event_file, mapping)
except KeyboardInterrupt:
print("Exiting", file=sys.stderr)
if __name__ == '__main__':
run()
|
from typing import List
from aioflo.location import SLEEP_MINUTE_OPTIONS, SYSTEM_MODE_HOME, SYSTEM_REVERT_MODES
import voluptuous as vol
from homeassistant.components.switch import SwitchEntity
from homeassistant.core import callback
from homeassistant.helpers import entity_platform
from .const import DOMAIN as FLO_DOMAIN
from .device import FloDeviceDataUpdateCoordinator
from .entity import FloEntity
ATTR_REVERT_TO_MODE = "revert_to_mode"
ATTR_SLEEP_MINUTES = "sleep_minutes"
SERVICE_SET_SLEEP_MODE = "set_sleep_mode"
SERVICE_SET_AWAY_MODE = "set_away_mode"
SERVICE_SET_HOME_MODE = "set_home_mode"
SERVICE_RUN_HEALTH_TEST = "run_health_test"
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Flo switches from config entry."""
devices: List[FloDeviceDataUpdateCoordinator] = hass.data[FLO_DOMAIN][
config_entry.entry_id
]["devices"]
async_add_entities([FloSwitch(device) for device in devices])
platform = entity_platform.current_platform.get()
platform.async_register_entity_service(
SERVICE_SET_AWAY_MODE, {}, "async_set_mode_away"
)
platform.async_register_entity_service(
SERVICE_SET_HOME_MODE, {}, "async_set_mode_home"
)
platform.async_register_entity_service(
SERVICE_RUN_HEALTH_TEST, {}, "async_run_health_test"
)
platform.async_register_entity_service(
SERVICE_SET_SLEEP_MODE,
{
vol.Required(ATTR_SLEEP_MINUTES, default=120): vol.In(SLEEP_MINUTE_OPTIONS),
vol.Required(ATTR_REVERT_TO_MODE, default=SYSTEM_MODE_HOME): vol.In(
SYSTEM_REVERT_MODES
),
},
"async_set_mode_sleep",
)
class FloSwitch(FloEntity, SwitchEntity):
"""Switch class for the Flo by Moen valve."""
def __init__(self, device: FloDeviceDataUpdateCoordinator):
"""Initialize the Flo switch."""
super().__init__("shutoff_valve", "Shutoff Valve", device)
self._state = self._device.last_known_valve_state == "open"
@property
def is_on(self) -> bool:
"""Return True if the valve is open."""
return self._state
@property
def icon(self):
"""Return the icon to use for the valve."""
if self.is_on:
return "mdi:valve-open"
return "mdi:valve-closed"
async def async_turn_on(self, **kwargs) -> None:
"""Open the valve."""
await self._device.api_client.device.open_valve(self._device.id)
self._state = True
self.async_write_ha_state()
async def async_turn_off(self, **kwargs) -> None:
"""Close the valve."""
await self._device.api_client.device.close_valve(self._device.id)
self._state = False
self.async_write_ha_state()
@callback
def async_update_state(self) -> None:
"""Retrieve the latest valve state and update the state machine."""
self._state = self._device.last_known_valve_state == "open"
self.async_write_ha_state()
async def async_added_to_hass(self):
"""When entity is added to hass."""
self.async_on_remove(self._device.async_add_listener(self.async_update_state))
async def async_set_mode_home(self):
"""Set the Flo location to home mode."""
await self._device.async_set_mode_home()
async def async_set_mode_away(self):
"""Set the Flo location to away mode."""
await self._device.async_set_mode_away()
async def async_set_mode_sleep(self, sleep_minutes, revert_to_mode):
"""Set the Flo location to sleep mode."""
await self._device.async_set_mode_sleep(sleep_minutes, revert_to_mode)
async def async_run_health_test(self):
"""Run a Flo device health test."""
await self._device.async_run_health_test()
|
from copy import deepcopy
import os.path as op
import numpy as np
from numpy.testing import assert_allclose
import mne
from mne.datasets import testing
from mne.beamformer import make_lcmv, make_lcmv_resolution_matrix
data_path = testing.data_path(download=False)
subjects_dir = op.join(data_path, 'subjects')
fname_inv = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc-meg-eeg-oct-6-meg-inv.fif')
fname_evoked = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc-ave.fif')
fname_raw = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc_raw.fif')
fname_fwd = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc-meg-eeg-oct-4-fwd.fif')
fname_cov = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-cov.fif')
@testing.requires_testing_data
def test_resolution_matrix_lcmv():
"""Test computation of resolution matrix for LCMV beamformers."""
# read forward solution
forward = mne.read_forward_solution(fname_fwd)
# remove bad channels
forward = mne.pick_channels_forward(forward, exclude='bads')
# forward operator with fixed source orientations
forward_fxd = mne.convert_forward_solution(forward, surf_ori=True,
force_fixed=True)
# evoked info
info = mne.io.read_info(fname_evoked)
mne.pick_info(info, mne.pick_types(info, meg=True), copy=False) # good MEG
# noise covariance matrix
# ad-hoc to avoid discrepancies due to regularisation of real noise
# covariance matrix
noise_cov = mne.make_ad_hoc_cov(info)
# Resolution matrix for Beamformer
data_cov = noise_cov.copy() # to test a property of LCMV
# compute beamformer filters
# reg=0. to make sure noise_cov and data_cov are as similar as possible
filters = make_lcmv(info, forward_fxd, data_cov, reg=0.,
noise_cov=noise_cov,
pick_ori=None, rank=None,
weight_norm=None,
reduce_rank=False,
verbose=False)
# Compute resolution matrix for beamformer
resmat_lcmv = make_lcmv_resolution_matrix(filters, forward_fxd, info)
# for noise_cov==data_cov and whitening, the filter weights should be the
# transpose of leadfield
# create filters with transposed whitened leadfield as weights
forward_fxd = mne.pick_channels_forward(forward_fxd, info['ch_names'])
filters_lfd = deepcopy(filters)
filters_lfd['weights'][:] = forward_fxd['sol']['data'].T
# compute resolution matrix for filters with transposed leadfield
resmat_fwd = make_lcmv_resolution_matrix(filters_lfd, forward_fxd, info)
# pairwise correlation for rows (CTFs) of resolution matrices for whitened
# LCMV beamformer and transposed leadfield should be 1
# Some rows are off by about 0.1 - not yet clear why
corr = []
for (f, l) in zip(resmat_fwd, resmat_lcmv):
corr.append(np.corrcoef(f, l)[0, 1])
# all row correlations should at least be above ~0.8
assert_allclose(corr, 1., atol=0.2)
# Maximum row correlation should at least be close to 1
assert_allclose(np.max(corr), 1., atol=0.01)
|
from ReText import settings
from enum import IntFlag, auto
import re
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QColor, QFont, QSyntaxHighlighter, QTextCharFormat
reHtmlTags = re.compile('<[^<>@]*>')
reHtmlSymbols = re.compile(r'&#?\w+;')
reHtmlStrings = re.compile('"[^"<]*"(?=[^<]*>)')
reHtmlComments = re.compile('<!--[^<>]*-->')
reAsterisks = re.compile(r'(?<!\*)\*[^ \*][^\*]*\*')
reUnderline = re.compile(r'(?<!_|\w)_[^_]+_(?!\w)')
reDblAsterisks = re.compile(r'(?<!\*)\*\*((?!\*\*).)*\*\*')
reDblUnderline = re.compile(r'(?<!_|\w)__[^_]+__(?!\w)')
reTrpAsterisks = re.compile(r'\*{3,3}[^\*]+\*{3,3}')
reTrpUnderline = re.compile('___[^_]+___')
reMkdHeaders = re.compile('^#.+')
reMkdLinksImgs = re.compile(r'(?<=\[)[^\[\]]*(?=\])')
reMkdLinkRefs = re.compile(r'(?<=\]\()[^\(\)]*(?=\))')
reBlockQuotes = re.compile('^ *>.+')
reReSTDirects = re.compile(r'\.\. [a-z]+::')
reReSTRoles = re.compile('(:[a-z-]+:)(`.+?`)')
reReSTLinks = re.compile('(`.+?<)(.+?)(>`__?)')
reReSTLinkRefs = re.compile(r'\.\. _`?(.*?)`?: (.*)')
reReSTFldLists = re.compile('^ *:(.*?):')
reTextileHdrs = re.compile(r'^h[1-6][()<>=]*\.\s.+')
reTextileQuot = re.compile(r'^bq\.\s.+')
reMkdCodeSpans = re.compile('`[^`]*`')
reMkdMathSpans = re.compile(r'\\[\(\[].*?\\[\)\]]')
reReSTCodeSpan = re.compile('``.+?``')
reWords = re.compile('[^_\\W]+')
reSpacesOnEnd = re.compile(r'\s+$')
defaultColorScheme = {
'htmlTags': Qt.darkMagenta,
'htmlSymbols': Qt.darkCyan,
'htmlStrings': Qt.darkYellow,
'htmlComments': Qt.gray,
'codeSpans': QColor(0x50, 0x50, 0x50),
'markdownLinks': QColor(0, 0, 0x90),
'blockquotes': Qt.darkGray,
'restDirectives': Qt.darkMagenta,
'restRoles': Qt.darkRed,
'whitespaceOnEnd': QColor(0xe1, 0xe1, 0xa5, 0x80)
}
colorScheme = {}
def updateColorScheme(settings=settings):
settings.beginGroup('ColorScheme')
for key in defaultColorScheme:
if settings.contains(key):
colorScheme[key] = settings.value(key, type=QColor)
else:
colorScheme[key] = defaultColorScheme[key]
settings.endGroup()
updateColorScheme()
class Formatter:
def __init__(self, funcs=None):
self._funcs = funcs or []
def __or__(self, other):
result = Formatter(self._funcs.copy())
if isinstance(other, Formatter):
result._funcs.extend(other._funcs)
elif isinstance(other, QFont.Weight):
result._funcs.append(lambda f: f.setFontWeight(other))
return result
def format(self, charFormat):
for func in self._funcs:
func(charFormat)
NF = Formatter()
ITAL = Formatter([lambda f: f.setFontItalic(True)])
UNDL = Formatter([lambda f: f.setFontUnderline(True)])
def FG(colorName):
color = colorScheme[colorName]
func = lambda f: f.setForeground(color)
return Formatter([func])
def QString_length(text):
# In QString, surrogate pairs are represented using multiple QChars,
# so the length of QString is not always equal to the number of graphemes
# in it (which is the case with Python strings).
return sum(2 if ord(char) > 65535 else 1 for char in text)
class Markup(IntFlag):
Mkd = auto()
ReST = auto()
Textile = auto()
HTML = auto()
# Special value which means that no other markup is allowed inside this pattern
CodeSpan = auto()
docTypesMapping = {
'Markdown': Markup.Mkd,
'reStructuredText': Markup.ReST,
'Textile': Markup.Textile,
'html': Markup.HTML,
}
class ReTextHighlighter(QSyntaxHighlighter):
dictionary = None
docType = None
patterns = (
# regex, color, markups
(reMkdCodeSpans, FG('codeSpans'), Markup.Mkd | Markup.CodeSpan),
(reMkdMathSpans, FG('codeSpans'), Markup.Mkd | Markup.CodeSpan),
(reReSTCodeSpan, FG('codeSpans'), Markup.ReST | Markup.CodeSpan),
(reHtmlTags, FG('htmlTags') | QFont.Bold, Markup.Mkd | Markup.Textile | Markup.HTML),
(reHtmlSymbols, FG('htmlSymbols') | QFont.Bold, Markup.Mkd | Markup.HTML),
(reHtmlStrings, FG('htmlStrings') | QFont.Bold, Markup.Mkd | Markup.HTML),
(reHtmlComments, FG('htmlComments'), Markup.Mkd | Markup.HTML),
(reAsterisks, ITAL, Markup.Mkd | Markup.ReST),
(reUnderline, ITAL, Markup.Mkd | Markup.Textile),
(reDblAsterisks, NF | QFont.Bold, Markup.Mkd | Markup.ReST | Markup.Textile),
(reDblUnderline, NF | QFont.Bold, Markup.Mkd),
(reTrpAsterisks, ITAL | QFont.Bold, Markup.Mkd),
(reTrpUnderline, ITAL | QFont.Bold, Markup.Mkd),
(reMkdHeaders, NF | QFont.Black, Markup.Mkd),
(reMkdLinksImgs, FG('markdownLinks'), Markup.Mkd),
(reMkdLinkRefs, ITAL | UNDL, Markup.Mkd),
(reBlockQuotes, FG('blockquotes'), Markup.Mkd),
(reReSTDirects, FG('restDirectives') | QFont.Bold, Markup.ReST),
(reReSTRoles, NF, FG('restRoles') | QFont.Bold, FG('htmlStrings'), Markup.ReST),
(reTextileHdrs, NF | QFont.Black, Markup.Textile),
(reTextileQuot, FG('blockquotes'), Markup.Textile),
(reAsterisks, NF | QFont.Bold, Markup.Textile),
(reDblUnderline, ITAL, Markup.Textile),
(reReSTLinks, NF, NF, ITAL | UNDL, NF, Markup.ReST),
(reReSTLinkRefs, NF, FG('markdownLinks'), ITAL | UNDL, Markup.ReST),
(reReSTFldLists, NF, FG('restDirectives'), Markup.ReST),
)
def highlightBlock(self, text):
# Syntax highlighter
codeSpans = set()
if self.docType in docTypesMapping:
markup = docTypesMapping[self.docType]
for pattern, *formatters, markups in self.patterns:
if not (markup & markups):
continue
for match in pattern.finditer(text):
start, end = match.start(), match.end()
if markups & Markup.CodeSpan:
codeSpans.add((start, end))
elif any(start < codeEnd and end > codeStart
for codeStart, codeEnd in codeSpans):
# Ignore any syntax if its match intersects with code spans.
# See https://github.com/retext-project/retext/issues/529
continue
for i, formatter in enumerate(formatters):
charFormat = QTextCharFormat()
formatter.format(charFormat)
self.setFormat(QString_length(text[:match.start(i)]),
QString_length(match.group(i)),
charFormat)
for match in reSpacesOnEnd.finditer(text):
charFormat = QTextCharFormat()
charFormat.setBackground(colorScheme['whitespaceOnEnd'])
self.setFormat(QString_length(text[:match.start()]),
QString_length(match.group(0)),
charFormat)
# Spell checker
if self.dictionary:
charFormat = QTextCharFormat()
charFormat.setUnderlineColor(Qt.red)
charFormat.setUnderlineStyle(QTextCharFormat.SpellCheckUnderline)
for match in reWords.finditer(text):
finalFormat = QTextCharFormat()
finalFormat.merge(charFormat)
finalFormat.merge(self.format(match.start()))
if not self.dictionary.check(match.group(0)):
self.setFormat(QString_length(text[:match.start()]),
QString_length(match.group(0)),
finalFormat)
|
from cpuinfo import cpuinfo
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_NAME, FREQUENCY_GIGAHERTZ
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
ATTR_BRAND = "brand"
ATTR_HZ = "ghz_advertised"
ATTR_ARCH = "arch"
HZ_ACTUAL = "hz_actual"
HZ_ADVERTISED = "hz_advertised"
DEFAULT_NAME = "CPU speed"
ICON = "mdi:pulse"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the CPU speed sensor."""
name = config[CONF_NAME]
add_entities([CpuSpeedSensor(name)], True)
class CpuSpeedSensor(Entity):
"""Representation of a CPU sensor."""
def __init__(self, name):
"""Initialize the CPU sensor."""
self._name = name
self._state = None
self.info = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return FREQUENCY_GIGAHERTZ
@property
def device_state_attributes(self):
"""Return the state attributes."""
if self.info is not None:
attrs = {
ATTR_ARCH: self.info["arch_string_raw"],
ATTR_BRAND: self.info["brand_raw"],
}
if HZ_ADVERTISED in self.info:
attrs[ATTR_HZ] = round(self.info[HZ_ADVERTISED][0] / 10 ** 9, 2)
return attrs
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return ICON
def update(self):
"""Get the latest data and updates the state."""
self.info = cpuinfo.get_cpu_info()
if HZ_ACTUAL in self.info:
self._state = round(float(self.info[HZ_ACTUAL][0]) / 10 ** 9, 2)
else:
self._state = None
|
import os
import pytest
from molecule import config
from molecule.driver import delegated
@pytest.fixture
def _driver_managed_section_data():
return {
'driver': {
'name': 'delegated',
}
}
@pytest.fixture
def _driver_unmanaged_section_data():
return {
'driver': {
'name': 'delegated',
'options': {
'login_cmd_template': 'docker exec -ti {instance} bash',
'ansible_connection_options': {
'ansible_connection': 'docker'
},
'managed': False,
}
}
}
@pytest.fixture
def _instance(config_instance):
return delegated.Delegated(config_instance)
def test_config_private_member(_instance):
assert isinstance(_instance._config, config.Config)
def test_testinfra_options_property(_instance):
assert {
'connection': 'ansible',
'ansible-inventory': _instance._config.provisioner.inventory_file
} == _instance.testinfra_options
def test_name_property(_instance):
assert 'delegated' == _instance.name
@pytest.mark.parametrize(
'config_instance', ['_driver_unmanaged_section_data'], indirect=True)
def test_options_property(_instance):
x = {
'ansible_connection_options': {
'ansible_connection': 'docker'
},
'login_cmd_template': 'docker exec -ti {instance} bash',
'managed': False,
}
assert x == _instance.options
@pytest.mark.parametrize(
'config_instance', ['_driver_managed_section_data'], indirect=True)
def test_options_property_when_managed(_instance):
x = {
'managed': True,
}
assert x == _instance.options
@pytest.mark.parametrize(
'config_instance', ['_driver_unmanaged_section_data'], indirect=True)
def test_login_cmd_template_property(_instance):
x = 'docker exec -ti {instance} bash'
assert x == _instance.login_cmd_template
@pytest.mark.parametrize(
'config_instance', ['_driver_managed_section_data'], indirect=True)
def test_login_cmd_template_property_when_managed(_instance):
x = ('ssh {address} -l {user} -p {port} -i {identity_file} '
'-o UserKnownHostsFile=/dev/null '
'-o ControlMaster=auto '
'-o ControlPersist=60s '
'-o IdentitiesOnly=yes '
'-o StrictHostKeyChecking=no')
assert x == _instance.login_cmd_template
def test_safe_files_property(_instance):
assert [] == _instance.safe_files
def test_default_safe_files_property(_instance):
assert [] == _instance.default_safe_files
def test_delegated_property(_instance):
assert _instance.delegated
def test_managed_property(_instance):
assert _instance.managed
@pytest.mark.parametrize(
'config_instance', ['_driver_unmanaged_section_data'], indirect=True)
def test_default_ssh_connection_options_property(_instance):
assert [] == _instance.default_ssh_connection_options
@pytest.mark.parametrize(
'config_instance', ['_driver_managed_section_data'], indirect=True)
def test_default_ssh_connection_options_property_when_managed(_instance):
x = [
'-o UserKnownHostsFile=/dev/null',
'-o ControlMaster=auto',
'-o ControlPersist=60s',
'-o IdentitiesOnly=yes',
'-o StrictHostKeyChecking=no',
]
assert x == _instance.default_ssh_connection_options
@pytest.mark.parametrize(
'config_instance', ['_driver_unmanaged_section_data'], indirect=True)
def test_login_options(_instance):
assert {'instance': 'foo'} == _instance.login_options('foo')
@pytest.mark.parametrize(
'config_instance', ['_driver_managed_section_data'], indirect=True)
def test_login_options_when_managed(mocker, _instance):
m = mocker.patch(
'molecule.driver.delegated.Delegated._get_instance_config')
m.return_value = {
'instance': 'foo',
'address': '172.16.0.2',
'user': 'cloud-user',
'port': 22,
'identity_file': '/foo/bar',
}
x = {
'instance': 'foo',
'address': '172.16.0.2',
'user': 'cloud-user',
'port': 22,
'identity_file': '/foo/bar',
}
assert x == _instance.login_options('foo')
@pytest.mark.parametrize(
'config_instance', ['_driver_unmanaged_section_data'], indirect=True)
def test_ansible_connection_options(_instance):
x = {'ansible_connection': 'docker'}
assert x == _instance.ansible_connection_options('foo')
@pytest.mark.parametrize(
'config_instance', ['_driver_managed_section_data'], indirect=True)
def test_ansible_connection_options_when_managed(mocker, _instance):
ssh_case_data = mocker.patch(
'molecule.driver.delegated.Delegated._get_instance_config')
ssh_case_data.return_value = {
'instance': 'foo',
'address': '172.16.0.2',
'user': 'cloud-user',
'port': 22,
'identity_file': '/foo/bar',
}
ssh_expected_data = {
'ansible_host':
'172.16.0.2',
'ansible_port':
22,
'ansible_user':
'cloud-user',
'ansible_private_key_file':
'/foo/bar',
'ansible_connection':
'smart',
'ansible_ssh_common_args': ('-o UserKnownHostsFile=/dev/null '
'-o ControlMaster=auto '
'-o ControlPersist=60s '
'-o IdentitiesOnly=yes '
'-o StrictHostKeyChecking=no'),
}
assert ssh_expected_data == _instance.ansible_connection_options('foo')
winrm_case_data = mocker.patch(
'molecule.driver.delegated.Delegated._get_instance_config')
winrm_case_data.return_value = {
'instance': 'foo',
'address': '172.16.0.2',
'user': 'cloud-user',
'port': 5896,
'connection': 'winrm'
}
winrm_expected_data = {
'ansible_host': '172.16.0.2',
'ansible_port': 5896,
'ansible_user': 'cloud-user',
'ansible_connection': 'winrm'
}
assert winrm_expected_data == _instance.ansible_connection_options('foo')
def test_ansible_connection_options_handles_missing_instance_config_managed(
mocker, _instance):
m = mocker.patch('molecule.util.safe_load_file')
m.side_effect = IOError
assert {} == _instance.ansible_connection_options('foo')
def test_ansible_connection_options_handles_missing_results_key_when_managed(
mocker, _instance):
m = mocker.patch('molecule.util.safe_load_file')
m.side_effect = StopIteration
assert {} == _instance.ansible_connection_options('foo')
def test_instance_config_property(_instance):
x = os.path.join(_instance._config.scenario.ephemeral_directory,
'instance_config.yml')
assert x == _instance.instance_config
@pytest.mark.parametrize(
'config_instance', ['_driver_unmanaged_section_data'], indirect=True)
def test_ssh_connection_options_property(_instance):
assert [] == _instance.ssh_connection_options
def test_status(mocker, _instance):
result = _instance.status()
assert 2 == len(result)
assert result[0].instance_name == 'instance-1'
assert result[0].driver_name == 'delegated'
assert result[0].provisioner_name == 'ansible'
assert result[0].scenario_name == 'default'
assert result[0].created == 'false'
assert result[0].converged == 'false'
assert result[1].instance_name == 'instance-2'
assert result[1].driver_name == 'delegated'
assert result[1].provisioner_name == 'ansible'
assert result[1].scenario_name == 'default'
assert result[1].created == 'false'
assert result[1].converged == 'false'
def test_created(_instance):
assert 'false' == _instance._created()
@pytest.fixture
def _driver_options_managed_section_data():
return {
'driver': {
'options': {
'managed': False,
},
}
}
@pytest.mark.parametrize(
'config_instance', ['_driver_options_managed_section_data'], indirect=True)
def test_created_unknown_when_managed_false(
_driver_options_managed_section_data, _instance):
assert 'unknown' == _instance._created()
def test_property(_instance):
assert 'false' == _instance._converged()
def test_get_instance_config(mocker, _instance):
m = mocker.patch('molecule.util.safe_load_file')
m.return_value = [{
'instance': 'foo',
}, {
'instance': 'bar',
}]
x = {
'instance': 'foo',
}
assert x == _instance._get_instance_config('foo')
|
from tensornetwork import FiniteMPS
from tensornetwork.matrixproductstates.dmrg import FiniteDMRG, BaseDMRG
from tensornetwork.backends import backend_factory
from tensornetwork.matrixproductstates.mpo import FiniteXXZ
import pytest
import numpy as np
@pytest.fixture(
name="backend_dtype_values",
params=[('numpy', np.float64), ('numpy', np.complex128),
('jax', np.float64), ('jax', np.complex128),
('pytorch', np.float64)])
def backend_dtype(request):
return request.param
def get_XXZ_Hamiltonian(N, Jx, Jy, Jz):
Sx = {}
Sy = {}
Sz = {}
sx = np.array([[0, 0.5], [0.5, 0]])
sy = np.array([[0, 0.5], [-0.5, 0]])
sz = np.diag([-0.5, 0.5])
for n in range(N):
Sx[n] = np.kron(np.kron(np.eye(2**n), sx), np.eye(2**(N - 1 - n)))
Sy[n] = np.kron(np.kron(np.eye(2**n), sy), np.eye(2**(N - 1 - n)))
Sz[n] = np.kron(np.kron(np.eye(2**n), sz), np.eye(2**(N - 1 - n)))
H = np.zeros((2**N, 2**N))
for n in range(N - 1):
H += Jx * Sx[n] @ Sx[n + 1] - Jy * Sy[n] @ Sy[n + 1] + Jz * Sz[n] @ Sz[n +
1]
return H
def test_BaseDMRG_init(backend_dtype_values):
backend = backend_factory.get_backend(backend_dtype_values[0])
dtype = backend_dtype_values[1]
N = 10
D = 10
Jz = np.ones(N - 1)
Jxy = np.ones(N - 1)
Bz = np.zeros(N)
mpo = FiniteXXZ(Jz, Jxy, Bz, dtype=dtype, backend=backend)
mps = FiniteMPS.random([2] * N, [D] * (N - 1), dtype=dtype, backend=backend)
dmrg = BaseDMRG(mps, mpo, np.ones((1, 1, 1), dtype=dtype),
np.ones((1, 1, 1), dtype=dtype), 'name')
assert dmrg.name == 'name'
assert dmrg.backend is backend
def test_BaseDMRG_raises():
numpy_backend = backend_factory.get_backend('numpy')
pytorch_backend = backend_factory.get_backend('pytorch')
dtype = np.float64
N = 10
D = 10
Jz = np.ones(N - 1)
Jxy = np.ones(N - 1)
Bz = np.zeros(N)
mpo = FiniteXXZ(Jz, Jxy, Bz, dtype=dtype, backend=numpy_backend)
mps = FiniteMPS.random(
[2] * (N - 1), [D] * (N - 2), dtype=dtype, backend=numpy_backend)
with pytest.raises(ValueError):
BaseDMRG(mps, mpo, numpy_backend.ones((1, 1, 1), dtype=dtype),
numpy_backend.ones((1, 1, 1), dtype=dtype), 'name')
mpo = FiniteXXZ(Jz, Jxy, Bz, dtype=np.float64, backend=numpy_backend)
mps = FiniteMPS.random(
[2] * N, [D] * (N - 1), dtype=np.float32, backend=numpy_backend)
with pytest.raises(
TypeError,
match="mps.dtype = {} is different from "
"mpo.dtype = {}".format(mps.dtype, mpo.dtype)):
BaseDMRG(mps, mpo, numpy_backend.ones((1, 1, 1), dtype=dtype),
numpy_backend.ones((1, 1, 1), dtype=dtype), 'name')
mpo = FiniteXXZ(Jz, Jxy, Bz, dtype=np.float64, backend=numpy_backend)
mps = FiniteMPS.random(
[2] * N, [D] * (N - 1), dtype=np.float64, backend=pytorch_backend)
with pytest.raises(TypeError, match="mps and mpo use different backends."):
BaseDMRG(mps, mpo, numpy_backend.ones((1, 1, 1), dtype=dtype),
numpy_backend.ones((1, 1, 1), dtype=dtype), 'name')
def test_BaseDMRG_raises_2():
backend = 'numpy'
backend_obj = backend_factory.get_backend(backend)
dtype = np.float64
N = 10
D = 10
Jz = np.ones(N - 1)
Jxy = np.ones(N - 1)
Bz = np.zeros(N)
mpo = FiniteXXZ(Jz, Jxy, Bz, dtype=dtype, backend=backend)
tensors = [np.random.randn(1, 2, D)] + [
np.random.randn(D, 2, D) for _ in range(N - 2)
] + [np.random.randn(D, 2, 1)]
mps = FiniteMPS(
tensors, center_position=None, canonicalize=False, backend=backend)
with pytest.raises(
ValueError,
match="Found mps in non-canonical form. Please canonicalize mps."):
BaseDMRG(mps, mpo, backend_obj.ones((1, 1, 1), dtype=dtype),
backend_obj.ones((1, 1, 1), dtype=dtype), 'name')
def test_BaseDMRG_position(backend_dtype_values):
backend = backend_factory.get_backend(backend_dtype_values[0])
dtype = backend_dtype_values[1]
N = 10
D = 10
Jz = np.ones(N - 1)
Jxy = np.ones(N - 1)
Bz = np.zeros(N)
mpo = FiniteXXZ(Jz, Jxy, Bz, dtype=dtype, backend=backend)
mps = FiniteMPS.random([2] * N, [D] * (N - 1), dtype=dtype, backend=backend)
dmrg = BaseDMRG(mps, mpo, np.ones((1, 1, 1), dtype=dtype),
np.ones((1, 1, 1), dtype=dtype), 'name')
dmrg.position(N - 1)
np.testing.assert_allclose(np.arange(N), sorted(list(dmrg.left_envs.keys())))
np.testing.assert_allclose([N - 1], list(dmrg.right_envs.keys()))
assert dmrg.mps.center_position == N - 1
dmrg.position(0)
np.testing.assert_allclose([0], list(dmrg.left_envs.keys()))
np.testing.assert_allclose(np.arange(N), sorted(list(dmrg.right_envs.keys())))
assert dmrg.mps.center_position == 0
with pytest.raises(IndexError, match="site > length of mps"):
dmrg.position(N)
with pytest.raises(IndexError, match="site < 0"):
dmrg.position(-1)
def test_compute_envs(backend_dtype_values):
backend = backend_factory.get_backend(backend_dtype_values[0])
dtype = backend_dtype_values[1]
N = 10
D = 10
Jz = np.ones(N - 1)
Jxy = np.ones(N - 1)
Bz = np.zeros(N)
mpo = FiniteXXZ(Jz, Jxy, Bz, dtype=dtype, backend=backend)
mps = FiniteMPS.random([2] * N, [D] * (N - 1), dtype=dtype, backend=backend)
dmrg = BaseDMRG(mps, mpo, np.ones((1, 1, 1), dtype=dtype),
np.ones((1, 1, 1), dtype=dtype), 'name')
dmrg.position(5)
dmrg.compute_left_envs()
dmrg.compute_right_envs()
np.testing.assert_allclose([0, 1, 2, 3, 4, 5],
sorted(list(dmrg.left_envs.keys())))
np.testing.assert_allclose([5, 6, 7, 8, 9],
sorted(list(dmrg.right_envs.keys())))
@pytest.mark.parametrize("N", [4, 6, 7])
def test_finite_DMRG_init(backend_dtype_values, N):
np.random.seed(16)
backend = backend_dtype_values[0]
dtype = backend_dtype_values[1]
H = get_XXZ_Hamiltonian(N, 1, 1, 1)
eta, _ = np.linalg.eigh(H)
mpo = FiniteXXZ(
Jz=np.ones(N - 1),
Jxy=np.ones(N - 1),
Bz=np.zeros(N),
dtype=dtype,
backend=backend)
D = 32
# test one-site DMRG
mps = FiniteMPS.random([2] * N, [D] * (N - 1), dtype=dtype, backend=backend)
dmrg = FiniteDMRG(mps, mpo)
one_site_energy = dmrg.run_one_site(num_sweeps=4, num_krylov_vecs=10)
np.testing.assert_allclose(one_site_energy, eta[0])
one_site_energy_no_sweeps = dmrg.run_one_site(num_sweeps=0,
num_krylov_vecs=10)
np.testing.assert_allclose(one_site_energy_no_sweeps, one_site_energy)
# test two-site DMRG
mps = FiniteMPS.random([2] * N, [D] * (N - 1), dtype=dtype, backend=backend)
dmrg = FiniteDMRG(mps, mpo)
two_site_energy = dmrg.run_two_site(max_bond_dim=D, num_sweeps=4,
num_krylov_vecs=10)
np.testing.assert_allclose(two_site_energy, eta[0])
two_site_energy_no_sweeps = dmrg.run_two_site(max_bond_dim=D, num_sweeps=0,
num_krylov_vecs=10)
np.testing.assert_allclose(two_site_energy_no_sweeps, two_site_energy)
def test_finite_DMRG_one_site_outstream(backend_dtype_values, capsys):
np.random.seed(16)
N = 6
backend = backend_dtype_values[0]
dtype = backend_dtype_values[1]
mpo = FiniteXXZ(
Jz=np.ones(N - 1),
Jxy=np.ones(N - 1),
Bz=np.zeros(N),
dtype=dtype,
backend=backend)
D = 32
mps = FiniteMPS.random([2] * N, [D] * (N - 1), dtype=dtype, backend=backend)
dmrg = FiniteDMRG(mps, mpo)
num_sweeps = 2
dmrg.run_one_site(
num_sweeps=num_sweeps, num_krylov_vecs=10, verbose=2, precision=1E-100)
out, _ = capsys.readouterr()
out = out.split('\n')
act = [o[:28] + '\n' for o in out]
act = ''.join(act[0:num_sweeps * (2 * N - 2)])
exp = ''.join([
f"SS-DMRG sweep={n}/{num_sweeps}, site={m}/{N}:\n"
for n in range(1, num_sweeps + 1)
for m in [0, 1, 2, 3, 4, 5, 4, 3, 2, 1]
])
assert act == exp
def test_finite_DMRG_two_site_outstream(backend_dtype_values, capsys):
np.random.seed(16)
N = 6
backend = backend_dtype_values[0]
dtype = backend_dtype_values[1]
mpo = FiniteXXZ(
Jz=np.ones(N - 1),
Jxy=np.ones(N - 1),
Bz=np.zeros(N),
dtype=dtype,
backend=backend)
D = 32
mps = FiniteMPS.random([2] * N, [D] * (N - 1), dtype=dtype, backend=backend)
dmrg = FiniteDMRG(mps, mpo)
num_sweeps = 2
dmrg.run_two_site(max_bond_dim=D, num_sweeps=num_sweeps, num_krylov_vecs=10,
verbose=2, precision=1E-100)
out, _ = capsys.readouterr()
out = out.split('\n')
act = [o[:33] + '\n' for o in out]
act = ''.join(act[0:num_sweeps * (2 * N - 2)])
exp = ''.join([
f"TS-DMRG sweep={n}/{num_sweeps}, sites=({left_site},{left_site + 1})"
f"/{N}:\n"
for n in range(1, num_sweeps + 1)
for left_site in [0, 1, 2, 3, 4, 4, 3, 2, 1, 0]
])
assert act == exp
|
import typing
import inspect
from pathlib import Path
import functools
import dill
from tqdm import tqdm
import numpy as np
import pandas as pd
import matchzoo
tqdm.pandas()
def _convert_to_list_index(
index: typing.Union[int, slice, np.array],
length: int
):
if isinstance(index, int):
index = [index]
elif isinstance(index, slice):
index = list(range(*index.indices(length)))
return index
class DataPack(object):
"""
Matchzoo :class:`DataPack` data structure, store dataframe and context.
`DataPack` is a MatchZoo native data structure that most MatchZoo data
handling processes build upon. A `DataPack` consists of three parts:
`left`, `right` and `relation`, each one of is a `pandas.DataFrame`.
:param relation: Store the relation between left document
and right document use ids.
:param left: Store the content or features for id_left.
:param right: Store the content or features for
id_right.
Example:
>>> left = [
... ['qid1', 'query 1'],
... ['qid2', 'query 2']
... ]
>>> right = [
... ['did1', 'document 1'],
... ['did2', 'document 2']
... ]
>>> relation = [['qid1', 'did1', 1], ['qid2', 'did2', 1]]
>>> relation_df = pd.DataFrame(relation)
>>> left = pd.DataFrame(left)
>>> right = pd.DataFrame(right)
>>> dp = DataPack(
... relation=relation_df,
... left=left,
... right=right,
... )
>>> len(dp)
2
"""
DATA_FILENAME = 'data.dill'
def __init__(
self,
relation: pd.DataFrame,
left: pd.DataFrame,
right: pd.DataFrame
):
""":class:`DataPack` initializer."""
self._relation = relation
self._left = left
self._right = right
@property
def has_label(self) -> bool:
""":return: `True` if `label` column exists, `False` other wise."""
return 'label' in self._relation.columns
def __len__(self) -> int:
"""Get numer of rows in the class:`DataPack` object."""
return self._relation.shape[0]
@property
def frame(self) -> 'DataPack.FrameView':
"""
View the data pack as a :class:`pandas.DataFrame`.
Returned data frame is created by merging the left data frame,
the right dataframe and the relation data frame. Use `[]` to access
an item or a slice of items.
:return: A :class:`matchzoo.DataPack.FrameView` instance.
Example:
>>> import matchzoo as mz
>>> data_pack = mz.datasets.toy.load_data()
>>> type(data_pack.frame)
<class 'matchzoo.data_pack.data_pack.DataPack.FrameView'>
>>> frame_slice = data_pack.frame[0:5]
>>> type(frame_slice)
<class 'pandas.core.frame.DataFrame'>
>>> list(frame_slice.columns)
['id_left', 'text_left', 'id_right', 'text_right', 'label']
>>> full_frame = data_pack.frame()
>>> len(full_frame) == len(data_pack)
True
"""
return DataPack.FrameView(self)
def unpack(self) -> typing.Tuple[typing.Dict[str, np.array],
typing.Optional[np.array]]:
"""
Unpack the data for training.
The return value can be directly feed to `model.fit` or
`model.fit_generator`.
:return: A tuple of (X, y). `y` is `None` if `self` has no label.
Example:
>>> import matchzoo as mz
>>> data_pack = mz.datasets.toy.load_data()
>>> X, y = data_pack.unpack()
>>> type(X)
<class 'dict'>
>>> sorted(X.keys())
['id_left', 'id_right', 'text_left', 'text_right']
>>> type(y)
<class 'numpy.ndarray'>
>>> X, y = data_pack.drop_label().unpack()
>>> type(y)
<class 'NoneType'>
"""
frame = self.frame()
columns = list(frame.columns)
if self.has_label:
columns.remove('label')
y = np.vstack(np.asarray(frame['label']))
else:
y = None
x = frame[columns].to_dict(orient='list')
for key, val in x.items():
x[key] = np.array(val)
return x, y
def __getitem__(self, index: typing.Union[int, slice, np.array]
) -> 'DataPack':
"""
Get specific item(s) as a new :class:`DataPack`.
The returned :class:`DataPack` will be a copy of the subset of the
original :class:`DataPack`.
:param index: Index of the item(s) to get.
:return: An instance of :class:`DataPack`.
"""
index = _convert_to_list_index(index, len(self))
relation = self._relation.loc[index].reset_index(drop=True)
left = self._left.loc[relation['id_left'].unique()]
right = self._right.loc[relation['id_right'].unique()]
return DataPack(left=left.copy(),
right=right.copy(),
relation=relation.copy())
@property
def relation(self):
"""`relation` getter."""
return self._relation
@relation.setter
def relation(self, value):
"""`relation` setter."""
self._relation = value
@property
def left(self) -> pd.DataFrame:
"""Get :meth:`left` of :class:`DataPack`."""
return self._left
@property
def right(self) -> pd.DataFrame:
"""Get :meth:`right` of :class:`DataPack`."""
return self._right
def copy(self) -> 'DataPack':
""":return: A deep copy."""
return DataPack(left=self._left.copy(),
right=self._right.copy(),
relation=self._relation.copy())
def save(self, dirpath: typing.Union[str, Path]):
"""
Save the :class:`DataPack` object.
A saved :class:`DataPack` is represented as a directory with a
:class:`DataPack` object (transformed user input as features and
context), it will be saved by `pickle`.
:param dirpath: directory path of the saved :class:`DataPack`.
"""
dirpath = Path(dirpath)
data_file_path = dirpath.joinpath(self.DATA_FILENAME)
if data_file_path.exists():
raise FileExistsError(
f'{data_file_path} already exist, fail to save')
elif not dirpath.exists():
dirpath.mkdir()
dill.dump(self, open(data_file_path, mode='wb'))
def _optional_inplace(func):
"""
Decorator that adds `inplace` key word argument to a method.
Decorate any method that modifies inplace to make that inplace change
optional.
"""
doc = ":param inplace: `True` to modify inplace, `False` to return " \
"a modified copy. (default: `False`)"
def _clean(s):
return s.replace(' ', '').replace('\n', '')
if _clean(doc) not in _clean(inspect.getdoc(func)):
raise NotImplementedError(
f"`inplace` parameter of {func} not documented.\n"
f"Please add the following line to its documentation:\n{doc}")
@functools.wraps(func)
def wrapper(
self, *args, inplace: bool = False, **kwargs
) -> typing.Optional['DataPack']:
if inplace:
target = self
else:
target = self.copy()
func(target, *args, **kwargs)
if not inplace:
return target
return wrapper
@_optional_inplace
def shuffle(self):
"""
Shuffle the data pack by shuffling the relation column.
:param inplace: `True` to modify inplace, `False` to return a modified
copy. (default: `False`)
Example:
>>> import matchzoo as mz
>>> import numpy.random
>>> numpy.random.seed(0)
>>> data_pack = mz.datasets.toy.load_data()
>>> orig_ids = data_pack.relation['id_left']
>>> shuffled = data_pack.shuffle()
>>> (shuffled.relation['id_left'] != orig_ids).any()
True
"""
self._relation = self._relation.sample(frac=1)
self._relation.reset_index(drop=True, inplace=True)
@_optional_inplace
def drop_label(self):
"""
Remove `label` column from the data pack.
:param inplace: `True` to modify inplace, `False` to return a modified
copy. (default: `False`)
Example:
>>> import matchzoo as mz
>>> data_pack = mz.datasets.toy.load_data()
>>> data_pack.has_label
True
>>> data_pack.drop_label(inplace=True)
>>> data_pack.has_label
False
"""
self._relation = self._relation.drop(columns='label')
@_optional_inplace
def drop_invalid(self):
"""
Remove rows from the data pack where the length is zero.
:param inplace: `True` to modify inplace, `False` to return a modified
copy. (default: `False`)
Example:
>>> import matchzoo as mz
>>> data_pack = mz.datasets.toy.load_data()
>>> data_pack.append_text_length(inplace=True, verbose=0)
>>> data_pack.drop_invalid(inplace=True)
"""
if not ('length_left' in self._left and 'length_right' in self._right):
raise ValueError(f"`lenght_left` or `length_right` is missing. "
f"Please call `append_text_length` in advance.")
valid_left = self._left.loc[self._left.length_left != 0]
valid_right = self._right.loc[self._right.length_right != 0]
self._left = self._left[self._left.index.isin(valid_left.index)]
self._right = self._right[self._right.index.isin(valid_right.index)]
self._relation = self._relation[self._relation.id_left.isin(
valid_left.index) & self._relation.id_right.isin(
valid_right.index)]
self._relation.reset_index(drop=True, inplace=True)
@_optional_inplace
def append_text_length(self, verbose=1):
"""
Append `length_left` and `length_right` columns.
:param inplace: `True` to modify inplace, `False` to return a modified
copy. (default: `False`)
:param verbose: Verbosity.
Example:
>>> import matchzoo as mz
>>> data_pack = mz.datasets.toy.load_data()
>>> 'length_left' in data_pack.frame[0].columns
False
>>> new_data_pack = data_pack.append_text_length(verbose=0)
>>> 'length_left' in new_data_pack.frame[0].columns
True
>>> 'length_left' in data_pack.frame[0].columns
False
>>> data_pack.append_text_length(inplace=True, verbose=0)
>>> 'length_left' in data_pack.frame[0].columns
True
"""
self.apply_on_text(len, rename=('length_left', 'length_right'),
inplace=True, verbose=verbose)
@_optional_inplace
def apply_on_text(
self, func: typing.Callable,
mode: str = 'both',
rename: typing.Optional[str] = None,
verbose: int = 1
):
"""
Apply `func` to text columns based on `mode`.
:param func: The function to apply.
:param mode: One of "both", "left" and "right".
:param rename: If set, use new names for results instead of replacing
the original columns. To set `rename` in "both" mode, use a tuple
of `str`, e.g. ("text_left_new_name", "text_right_new_name").
:param inplace: `True` to modify inplace, `False` to return a modified
copy. (default: `False`)
:param verbose: Verbosity.
Examples::
>>> import matchzoo as mz
>>> data_pack = mz.datasets.toy.load_data()
>>> frame = data_pack.frame
To apply `len` on the left text and add the result as 'length_left':
>>> data_pack.apply_on_text(len, mode='left',
... rename='length_left',
... inplace=True,
... verbose=0)
>>> list(frame[0].columns) # noqa: E501
['id_left', 'text_left', 'length_left', 'id_right', 'text_right', 'label']
To do the same to the right text:
>>> data_pack.apply_on_text(len, mode='right',
... rename='length_right',
... inplace=True,
... verbose=0)
>>> list(frame[0].columns) # noqa: E501
['id_left', 'text_left', 'length_left', 'id_right', 'text_right', 'length_right', 'label']
To do the same to the both texts at the same time:
>>> data_pack.apply_on_text(len, mode='both',
... rename=('extra_left', 'extra_right'),
... inplace=True,
... verbose=0)
>>> list(frame[0].columns) # noqa: E501
['id_left', 'text_left', 'length_left', 'extra_left', 'id_right', 'text_right', 'length_right', 'extra_right', 'label']
To suppress outputs:
>>> data_pack.apply_on_text(len, mode='both', verbose=0,
... inplace=True)
"""
if mode == 'both':
self._apply_on_text_both(func, rename, verbose=verbose)
elif mode == 'left':
self._apply_on_text_left(func, rename, verbose=verbose)
elif mode == 'right':
self._apply_on_text_right(func, rename, verbose=verbose)
else:
raise ValueError(f"{mode} is not a valid mode type."
f"Must be one of `left` `right` `both`.")
def _apply_on_text_right(self, func, rename, verbose=1):
name = rename or 'text_right'
if verbose:
tqdm.pandas(desc="Processing " + name + " with " + func.__name__)
self._right[name] = self._right['text_right'].progress_apply(func)
else:
self._right[name] = self._right['text_right'].apply(func)
def _apply_on_text_left(self, func, rename, verbose=1):
name = rename or 'text_left'
if verbose:
tqdm.pandas(desc="Processing " + name + " with " + func.__name__)
self._left[name] = self._left['text_left'].progress_apply(func)
else:
self._left[name] = self._left['text_left'].apply(func)
def _apply_on_text_both(self, func, rename, verbose=1):
left_name, right_name = rename or ('text_left', 'text_right')
self._apply_on_text_left(func, rename=left_name, verbose=verbose)
self._apply_on_text_right(func, rename=right_name, verbose=verbose)
@_optional_inplace
def one_hot_encode_label(self, num_classes=2):
"""
One-hot encode `label` column of `relation`.
:param num_classes: Number of classes.
:param inplace: `True` to modify inplace, `False` to return a modified
copy. (default: `False`)
:return:
"""
self._relation['label'] = self._relation['label'].apply(
lambda idx: matchzoo.one_hot(idx, num_classes))
class FrameView(object):
"""FrameView."""
def __init__(self, data_pack: 'DataPack'):
"""
View a data pack as a frame.
A slice of the view is genereated by merging three parts of the
data pack being viewed into a big table.
:param data_pack: :class:`DataPack` to view.
Examples::
>>> import matchzoo as mz
>>> data_pack = mz.datasets.toy.load_data()
>>> frame = data_pack.frame
Use `()` to get a full copy of the frame:
>>> list(frame().columns)
['id_left', 'text_left', 'id_right', 'text_right', 'label']
>>> len(frame()) == len(data_pack)
True
Notice that a view is binded to the original data pack, so changing
contents of the data pack will affect a view previously created:
>>> data_pack.drop_label(inplace=True)
>>> list(frame().columns)
['id_left', 'text_left', 'id_right', 'text_right']
To slice the view:
>>> frame_slice = frame[3:5]
>>> len(frame_slice)
2
"""
self._data_pack = data_pack
def __getitem__(self, index: typing.Union[int, slice, np.array]
) -> pd.DataFrame:
"""Slicer."""
dp = self._data_pack
index = _convert_to_list_index(index, len(dp))
left_df = dp.left.loc[dp.relation['id_left'][index]].reset_index()
right_df = dp.right.loc[
dp.relation['id_right'][index]].reset_index()
joined_table = left_df.join(right_df)
for column in dp.relation.columns:
if column not in ['id_left', 'id_right']:
labels = dp.relation[column][index].to_frame()
labels = labels.reset_index(drop=True)
joined_table = joined_table.join(labels)
return joined_table
def __call__(self):
""":return: A full copy. Equivalant to `frame[:]`."""
return self[:]
def load_data_pack(dirpath: typing.Union[str, Path]) -> DataPack:
"""
Load a :class:`DataPack`. The reverse function of :meth:`save`.
:param dirpath: directory path of the saved model.
:return: a :class:`DataPack` instance.
"""
dirpath = Path(dirpath)
data_file_path = dirpath.joinpath(DataPack.DATA_FILENAME)
dp = dill.load(open(data_file_path, 'rb'))
return dp
|
import logging
# pylint: disable=import-error
from decora_wifi import DecoraWiFiSession
from decora_wifi.models.person import Person
from decora_wifi.models.residence import Residence
from decora_wifi.models.residential_account import ResidentialAccount
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_TRANSITION,
PLATFORM_SCHEMA,
SUPPORT_BRIGHTNESS,
SUPPORT_TRANSITION,
LightEntity,
)
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, EVENT_HOMEASSISTANT_STOP
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
# Validation of the user's configuration
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string}
)
NOTIFICATION_ID = "leviton_notification"
NOTIFICATION_TITLE = "myLeviton Decora Setup"
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Decora WiFi platform."""
email = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
session = DecoraWiFiSession()
try:
success = session.login(email, password)
# If login failed, notify user.
if success is None:
msg = "Failed to log into myLeviton Services. Check credentials."
_LOGGER.error(msg)
hass.components.persistent_notification.create(
msg, title=NOTIFICATION_TITLE, notification_id=NOTIFICATION_ID
)
return False
# Gather all the available devices...
perms = session.user.get_residential_permissions()
all_switches = []
for permission in perms:
if permission.residentialAccountId is not None:
acct = ResidentialAccount(session, permission.residentialAccountId)
for residence in acct.get_residences():
for switch in residence.get_iot_switches():
all_switches.append(switch)
elif permission.residenceId is not None:
residence = Residence(session, permission.residenceId)
for switch in residence.get_iot_switches():
all_switches.append(switch)
add_entities(DecoraWifiLight(sw) for sw in all_switches)
except ValueError:
_LOGGER.error("Failed to communicate with myLeviton Service")
# Listen for the stop event and log out.
def logout(event):
"""Log out..."""
try:
if session is not None:
Person.logout(session)
except ValueError:
_LOGGER.error("Failed to log out of myLeviton Service")
hass.bus.listen(EVENT_HOMEASSISTANT_STOP, logout)
class DecoraWifiLight(LightEntity):
"""Representation of a Decora WiFi switch."""
def __init__(self, switch):
"""Initialize the switch."""
self._switch = switch
@property
def supported_features(self):
"""Return supported features."""
if self._switch.canSetLevel:
return SUPPORT_BRIGHTNESS | SUPPORT_TRANSITION
return 0
@property
def name(self):
"""Return the display name of this switch."""
return self._switch.name
@property
def brightness(self):
"""Return the brightness of the dimmer switch."""
return int(self._switch.brightness * 255 / 100)
@property
def is_on(self):
"""Return true if switch is on."""
return self._switch.power == "ON"
def turn_on(self, **kwargs):
"""Instruct the switch to turn on & adjust brightness."""
attribs = {"power": "ON"}
if ATTR_BRIGHTNESS in kwargs:
min_level = self._switch.data.get("minLevel", 0)
max_level = self._switch.data.get("maxLevel", 100)
brightness = int(kwargs[ATTR_BRIGHTNESS] * max_level / 255)
brightness = max(brightness, min_level)
attribs["brightness"] = brightness
if ATTR_TRANSITION in kwargs:
transition = int(kwargs[ATTR_TRANSITION])
attribs["fadeOnTime"] = attribs["fadeOffTime"] = transition
try:
self._switch.update_attributes(attribs)
except ValueError:
_LOGGER.error("Failed to turn on myLeviton switch")
def turn_off(self, **kwargs):
"""Instruct the switch to turn off."""
attribs = {"power": "OFF"}
try:
self._switch.update_attributes(attribs)
except ValueError:
_LOGGER.error("Failed to turn off myLeviton switch")
def update(self):
"""Fetch new state data for this switch."""
try:
self._switch.refresh()
except ValueError:
_LOGGER.error("Failed to update myLeviton switch data")
|
from datetime import timedelta
import logging
from pyruckus import Ruckus
from pyruckus.exceptions import AuthenticationError
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import (
API_CLIENTS,
API_CURRENT_ACTIVE_CLIENTS,
API_MAC,
DOMAIN,
SCAN_INTERVAL,
)
_LOGGER = logging.getLogger(__package__)
class RuckusUnleashedDataUpdateCoordinator(DataUpdateCoordinator):
"""Coordinator to manage data from Ruckus Unleashed client."""
def __init__(self, hass: HomeAssistant, *, ruckus: Ruckus):
"""Initialize global Ruckus Unleashed data updater."""
self.ruckus = ruckus
update_interval = timedelta(seconds=SCAN_INTERVAL)
super().__init__(
hass,
_LOGGER,
name=DOMAIN,
update_interval=update_interval,
)
async def _fetch_clients(self) -> dict:
"""Fetch clients from the API and format them."""
clients = await self.hass.async_add_executor_job(
self.ruckus.current_active_clients
)
return {e[API_MAC]: e for e in clients[API_CURRENT_ACTIVE_CLIENTS][API_CLIENTS]}
async def _async_update_data(self) -> dict:
"""Fetch Ruckus Unleashed data."""
try:
return {API_CLIENTS: await self._fetch_clients()}
except (AuthenticationError, ConnectionError) as error:
raise UpdateFailed(error) from error
|
from kombu.utils.eventio import READ, WRITE, ERR
from kombu.utils.functional import reprcall
def repr_flag(flag):
"""Return description of event loop flag."""
return '{}{}{}'.format('R' if flag & READ else '',
'W' if flag & WRITE else '',
'!' if flag & ERR else '')
def _rcb(obj):
if obj is None:
return '<missing>'
if isinstance(obj, str):
return obj
if isinstance(obj, tuple):
cb, args = obj
return reprcall(cb.__name__, args=args)
return obj.__name__
def repr_active(h):
"""Return description of active readers and writers."""
return ', '.join(repr_readers(h) + repr_writers(h))
def repr_events(h, events):
"""Return description of events returned by poll."""
return ', '.join(
'{}({})->{}'.format(
_rcb(callback_for(h, fd, fl, '(GONE)')), fd,
repr_flag(fl),
)
for fd, fl in events
)
def repr_readers(h):
"""Return description of pending readers."""
return ['({}){}->{}'.format(fd, _rcb(cb), repr_flag(READ | ERR))
for fd, cb in h.readers.items()]
def repr_writers(h):
"""Return description of pending writers."""
return ['({}){}->{}'.format(fd, _rcb(cb), repr_flag(WRITE))
for fd, cb in h.writers.items()]
def callback_for(h, fd, flag, *default):
"""Return the callback used for hub+fd+flag."""
try:
if flag & READ:
return h.readers[fd]
if flag & WRITE:
if fd in h.consolidate:
return h.consolidate_callback
return h.writers[fd]
except KeyError:
if default:
return default[0]
raise
|
import functools
import os
import traceback
from typing import Optional
from PyQt5.QtCore import QUrl
from PyQt5.QtWidgets import QApplication
from qutebrowser.browser import qutescheme
from qutebrowser.utils import log, objreg, usertypes, message, debug, utils
from qutebrowser.keyinput import modeman
from qutebrowser.commands import runners
from qutebrowser.api import cmdutils
from qutebrowser.misc import ( # pylint: disable=unused-import
consolewidget, debugcachestats, objects, miscwidgets)
from qutebrowser.utils.version import pastebin_version
from qutebrowser.qt import sip
@cmdutils.register(maxsplit=1, no_cmd_split=True, no_replace_variables=True)
@cmdutils.argument('win_id', value=cmdutils.Value.win_id)
def later(ms: int, command: str, win_id: int) -> None:
"""Execute a command after some time.
Args:
ms: How many milliseconds to wait.
command: The command to run, with optional args.
"""
if ms < 0:
raise cmdutils.CommandError("I can't run something in the past!")
commandrunner = runners.CommandRunner(win_id)
timer = usertypes.Timer(name='later', parent=QApplication.instance())
try:
timer.setSingleShot(True)
try:
timer.setInterval(ms)
except OverflowError:
raise cmdutils.CommandError("Numeric argument is too large for "
"internal int representation.")
timer.timeout.connect(
functools.partial(commandrunner.run_safely, command))
timer.timeout.connect(timer.deleteLater)
timer.start()
except:
timer.deleteLater()
raise
@cmdutils.register(maxsplit=1, no_cmd_split=True, no_replace_variables=True)
@cmdutils.argument('win_id', value=cmdutils.Value.win_id)
@cmdutils.argument('count', value=cmdutils.Value.count)
def repeat(times: int, command: str, win_id: int, count: int = None) -> None:
"""Repeat a given command.
Args:
times: How many times to repeat.
command: The command to run, with optional args.
count: Multiplies with 'times' when given.
"""
if count is not None:
times *= count
if times < 0:
raise cmdutils.CommandError("A negative count doesn't make sense.")
commandrunner = runners.CommandRunner(win_id)
for _ in range(times):
commandrunner.run_safely(command)
@cmdutils.register(maxsplit=1, no_cmd_split=True, no_replace_variables=True)
@cmdutils.argument('win_id', value=cmdutils.Value.win_id)
@cmdutils.argument('count', value=cmdutils.Value.count)
def run_with_count(count_arg: int, command: str, win_id: int,
count: int = 1) -> None:
"""Run a command with the given count.
If run_with_count itself is run with a count, it multiplies count_arg.
Args:
count_arg: The count to pass to the command.
command: The command to run, with optional args.
count: The count that run_with_count itself received.
"""
runners.CommandRunner(win_id).run(command, count_arg * count)
@cmdutils.register()
def clear_messages() -> None:
"""Clear all message notifications."""
message.global_bridge.clear_messages.emit()
@cmdutils.register(debug=True)
def debug_all_objects() -> None:
"""Print a list of all objects to the debug log."""
s = debug.get_all_objects()
log.misc.debug(s)
@cmdutils.register(debug=True)
def debug_cache_stats() -> None:
"""Print LRU cache stats."""
debugcachestats.debug_cache_stats()
@cmdutils.register(debug=True)
def debug_console() -> None:
"""Show the debugging console."""
if consolewidget.console_widget is None:
log.misc.debug('initializing debug console')
consolewidget.init()
assert consolewidget.console_widget is not None
if consolewidget.console_widget.isVisible():
log.misc.debug('hiding debug console')
consolewidget.console_widget.hide()
else:
log.misc.debug('showing debug console')
consolewidget.console_widget.show()
@cmdutils.register(maxsplit=0, debug=True, no_cmd_split=True)
def debug_pyeval(s: str, file: bool = False, quiet: bool = False) -> None:
"""Evaluate a python string and display the results as a web page.
Args:
s: The string to evaluate.
file: Interpret s as a path to file, also implies --quiet.
quiet: Don't show the output in a new tab.
"""
if file:
quiet = True
path = os.path.expanduser(s)
try:
with open(path, 'r', encoding='utf-8') as f:
s = f.read()
except OSError as e:
raise cmdutils.CommandError(str(e))
try:
exec(s)
out = "No error"
except Exception:
out = traceback.format_exc()
else:
try:
r = eval(s)
out = repr(r)
except Exception:
out = traceback.format_exc()
qutescheme.pyeval_output = out
if quiet:
log.misc.debug("pyeval output: {}".format(out))
else:
tabbed_browser = objreg.get('tabbed-browser', scope='window',
window='last-focused')
tabbed_browser.load_url(QUrl('qute://pyeval'), newtab=True)
@cmdutils.register(debug=True)
def debug_set_fake_clipboard(s: str = None) -> None:
"""Put data into the fake clipboard and enable logging, used for tests.
Args:
s: The text to put into the fake clipboard, or unset to enable logging.
"""
if s is None:
utils.log_clipboard = True
else:
utils.fake_clipboard = s
@cmdutils.register()
@cmdutils.argument('win_id', value=cmdutils.Value.win_id)
@cmdutils.argument('count', value=cmdutils.Value.count)
def repeat_command(win_id: int, count: int = None) -> None:
"""Repeat the last executed command.
Args:
count: Which count to pass the command.
"""
mode_manager = modeman.instance(win_id)
if mode_manager.mode not in runners.last_command:
raise cmdutils.CommandError("You didn't do anything yet.")
cmd = runners.last_command[mode_manager.mode]
commandrunner = runners.CommandRunner(win_id)
commandrunner.run(cmd[0], count if count is not None else cmd[1])
@cmdutils.register(debug=True, name='debug-log-capacity')
def log_capacity(capacity: int) -> None:
"""Change the number of log lines to be stored in RAM.
Args:
capacity: Number of lines for the log.
"""
if capacity < 0:
raise cmdutils.CommandError("Can't set a negative log capacity!")
assert log.ram_handler is not None
log.ram_handler.change_log_capacity(capacity)
@cmdutils.register(debug=True)
def debug_log_filter(filters: str) -> None:
"""Change the log filter for console logging.
Args:
filters: A comma separated list of logger names. Can also be "none" to
clear any existing filters.
"""
if log.console_filter is None:
raise cmdutils.CommandError("No log.console_filter. Not attached "
"to a console?")
try:
new_filter = log.LogFilter.parse(filters)
except log.InvalidLogFilterError as e:
raise cmdutils.CommandError(e)
log.console_filter.update_from(new_filter)
@cmdutils.register()
@cmdutils.argument('current_win_id', value=cmdutils.Value.win_id)
def window_only(current_win_id: int) -> None:
"""Close all windows except for the current one."""
for win_id, window in objreg.window_registry.items():
# We could be in the middle of destroying a window here
if sip.isdeleted(window):
continue
if win_id != current_win_id:
window.close()
@cmdutils.register()
@cmdutils.argument('win_id', value=cmdutils.Value.win_id)
def version(win_id: int, paste: bool = False) -> None:
"""Show version information.
Args:
paste: Paste to pastebin.
"""
tabbed_browser = objreg.get('tabbed-browser', scope='window',
window=win_id)
tabbed_browser.load_url(QUrl('qute://version/'), newtab=True)
if paste:
pastebin_version()
_keytester_widget: Optional[miscwidgets.KeyTesterWidget] = None
@cmdutils.register(debug=True)
def debug_keytester() -> None:
"""Show a keytester widget."""
global _keytester_widget
if (_keytester_widget and
not sip.isdeleted(_keytester_widget) and
_keytester_widget.isVisible()):
_keytester_widget.close()
else:
_keytester_widget = miscwidgets.KeyTesterWidget()
_keytester_widget.show()
|
import logging
from pyhap.const import CATEGORY_ALARM_SYSTEM
from pyhap.loader import get_loader
from homeassistant.components.alarm_control_panel import DOMAIN
from homeassistant.components.alarm_control_panel.const import (
SUPPORT_ALARM_ARM_AWAY,
SUPPORT_ALARM_ARM_HOME,
SUPPORT_ALARM_ARM_NIGHT,
SUPPORT_ALARM_TRIGGER,
)
from homeassistant.const import (
ATTR_CODE,
ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES,
SERVICE_ALARM_ARM_AWAY,
SERVICE_ALARM_ARM_HOME,
SERVICE_ALARM_ARM_NIGHT,
SERVICE_ALARM_DISARM,
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_DISARMED,
STATE_ALARM_TRIGGERED,
)
from homeassistant.core import callback
from .accessories import TYPES, HomeAccessory
from .const import (
CHAR_CURRENT_SECURITY_STATE,
CHAR_TARGET_SECURITY_STATE,
SERV_SECURITY_SYSTEM,
)
_LOGGER = logging.getLogger(__name__)
HASS_TO_HOMEKIT = {
STATE_ALARM_ARMED_HOME: 0,
STATE_ALARM_ARMED_AWAY: 1,
STATE_ALARM_ARMED_NIGHT: 2,
STATE_ALARM_DISARMED: 3,
STATE_ALARM_TRIGGERED: 4,
}
HASS_TO_HOMEKIT_SERVICES = {
SERVICE_ALARM_ARM_HOME: 0,
SERVICE_ALARM_ARM_AWAY: 1,
SERVICE_ALARM_ARM_NIGHT: 2,
SERVICE_ALARM_DISARM: 3,
}
HOMEKIT_TO_HASS = {c: s for s, c in HASS_TO_HOMEKIT.items()}
STATE_TO_SERVICE = {
STATE_ALARM_ARMED_AWAY: SERVICE_ALARM_ARM_AWAY,
STATE_ALARM_ARMED_HOME: SERVICE_ALARM_ARM_HOME,
STATE_ALARM_ARMED_NIGHT: SERVICE_ALARM_ARM_NIGHT,
STATE_ALARM_DISARMED: SERVICE_ALARM_DISARM,
}
@TYPES.register("SecuritySystem")
class SecuritySystem(HomeAccessory):
"""Generate an SecuritySystem accessory for an alarm control panel."""
def __init__(self, *args):
"""Initialize a SecuritySystem accessory object."""
super().__init__(*args, category=CATEGORY_ALARM_SYSTEM)
state = self.hass.states.get(self.entity_id)
self._alarm_code = self.config.get(ATTR_CODE)
supported_states = state.attributes.get(
ATTR_SUPPORTED_FEATURES,
(
SUPPORT_ALARM_ARM_HOME
| SUPPORT_ALARM_ARM_AWAY
| SUPPORT_ALARM_ARM_NIGHT
| SUPPORT_ALARM_TRIGGER
),
)
loader = get_loader()
default_current_states = loader.get_char(
"SecuritySystemCurrentState"
).properties.get("ValidValues")
default_target_services = loader.get_char(
"SecuritySystemTargetState"
).properties.get("ValidValues")
current_supported_states = [
HASS_TO_HOMEKIT[STATE_ALARM_DISARMED],
HASS_TO_HOMEKIT[STATE_ALARM_TRIGGERED],
]
target_supported_services = [HASS_TO_HOMEKIT_SERVICES[SERVICE_ALARM_DISARM]]
if supported_states & SUPPORT_ALARM_ARM_HOME:
current_supported_states.append(HASS_TO_HOMEKIT[STATE_ALARM_ARMED_HOME])
target_supported_services.append(
HASS_TO_HOMEKIT_SERVICES[SERVICE_ALARM_ARM_HOME]
)
if supported_states & SUPPORT_ALARM_ARM_AWAY:
current_supported_states.append(HASS_TO_HOMEKIT[STATE_ALARM_ARMED_AWAY])
target_supported_services.append(
HASS_TO_HOMEKIT_SERVICES[SERVICE_ALARM_ARM_AWAY]
)
if supported_states & SUPPORT_ALARM_ARM_NIGHT:
current_supported_states.append(HASS_TO_HOMEKIT[STATE_ALARM_ARMED_NIGHT])
target_supported_services.append(
HASS_TO_HOMEKIT_SERVICES[SERVICE_ALARM_ARM_NIGHT]
)
new_current_states = {
key: val
for key, val in default_current_states.items()
if val in current_supported_states
}
new_target_services = {
key: val
for key, val in default_target_services.items()
if val in target_supported_services
}
serv_alarm = self.add_preload_service(SERV_SECURITY_SYSTEM)
self.char_current_state = serv_alarm.configure_char(
CHAR_CURRENT_SECURITY_STATE,
value=HASS_TO_HOMEKIT[STATE_ALARM_DISARMED],
valid_values=new_current_states,
)
self.char_target_state = serv_alarm.configure_char(
CHAR_TARGET_SECURITY_STATE,
value=HASS_TO_HOMEKIT_SERVICES[SERVICE_ALARM_DISARM],
valid_values=new_target_services,
setter_callback=self.set_security_state,
)
# Set the state so it is in sync on initial
# GET to avoid an event storm after homekit startup
self.async_update_state(state)
def set_security_state(self, value):
"""Move security state to value if call came from HomeKit."""
_LOGGER.debug("%s: Set security state to %d", self.entity_id, value)
hass_value = HOMEKIT_TO_HASS[value]
service = STATE_TO_SERVICE[hass_value]
params = {ATTR_ENTITY_ID: self.entity_id}
if self._alarm_code:
params[ATTR_CODE] = self._alarm_code
self.call_service(DOMAIN, service, params)
@callback
def async_update_state(self, new_state):
"""Update security state after state changed."""
hass_state = new_state.state
if hass_state in HASS_TO_HOMEKIT:
current_security_state = HASS_TO_HOMEKIT[hass_state]
if self.char_current_state.value != current_security_state:
self.char_current_state.set_value(current_security_state)
_LOGGER.debug(
"%s: Updated current state to %s (%d)",
self.entity_id,
hass_state,
current_security_state,
)
# SecuritySystemTargetState does not support triggered
if (
hass_state != STATE_ALARM_TRIGGERED
and self.char_target_state.value != current_security_state
):
self.char_target_state.set_value(current_security_state)
|
import re
from packaging import version
from kalliope._version import version_str
import requests
from kalliope.core import NeuronModule
KALLIOPE_PROJECT_MASTER_VERSION_URL = "https://raw.githubusercontent.com/kalliope-project/kalliope" \
"/master/kalliope/_version.py"
class Kalliope_version(NeuronModule):
def __init__(self, **kwargs):
super(Kalliope_version, self).__init__(**kwargs)
new_version_available = False
last_master_version = None
# get the last version online
response = requests.get(KALLIOPE_PROJECT_MASTER_VERSION_URL)
regex_version = r"(\d\.\d(\.\d)?(\.\d)?)"
version_search = re.search(regex_version, response.text)
if version_search:
last_master_version = version_search.group(1)
current_version = version_str
if last_master_version:
if version.parse(current_version) < version.parse(last_master_version):
new_version_available = True
message = {
"current_version": current_version,
"new_version_available": new_version_available,
"last_master_version": last_master_version
}
self.say(message)
|
from pyps4_2ndscreen.errors import CredentialTimeout
import pytest
from homeassistant import data_entry_flow
from homeassistant.components import ps4
from homeassistant.components.ps4.const import (
DEFAULT_ALIAS,
DEFAULT_NAME,
DEFAULT_REGION,
DOMAIN,
)
from homeassistant.const import (
CONF_CODE,
CONF_HOST,
CONF_IP_ADDRESS,
CONF_NAME,
CONF_REGION,
CONF_TOKEN,
)
from homeassistant.util import location
from tests.async_mock import patch
from tests.common import MockConfigEntry
MOCK_TITLE = "PlayStation 4"
MOCK_CODE = 12345678
MOCK_CODE_LEAD_0 = 1234567
MOCK_CODE_LEAD_0_STR = "01234567"
MOCK_CREDS = "000aa000"
MOCK_HOST = "192.0.0.0"
MOCK_HOST_ADDITIONAL = "192.0.0.1"
MOCK_DEVICE = {
CONF_HOST: MOCK_HOST,
CONF_NAME: DEFAULT_NAME,
CONF_REGION: DEFAULT_REGION,
}
MOCK_DEVICE_ADDITIONAL = {
CONF_HOST: MOCK_HOST_ADDITIONAL,
CONF_NAME: DEFAULT_NAME,
CONF_REGION: DEFAULT_REGION,
}
MOCK_CONFIG = {
CONF_IP_ADDRESS: MOCK_HOST,
CONF_NAME: DEFAULT_NAME,
CONF_REGION: DEFAULT_REGION,
CONF_CODE: MOCK_CODE,
}
MOCK_CONFIG_ADDITIONAL = {
CONF_IP_ADDRESS: MOCK_HOST_ADDITIONAL,
CONF_NAME: DEFAULT_NAME,
CONF_REGION: DEFAULT_REGION,
CONF_CODE: MOCK_CODE,
}
MOCK_DATA = {CONF_TOKEN: MOCK_CREDS, "devices": [MOCK_DEVICE]}
MOCK_UDP_PORT = int(987)
MOCK_TCP_PORT = int(997)
MOCK_AUTO = {"Config Mode": "Auto Discover"}
MOCK_MANUAL = {"Config Mode": "Manual Entry", CONF_IP_ADDRESS: MOCK_HOST}
MOCK_LOCATION = location.LocationInfo(
"0.0.0.0",
"US",
"United States",
"CA",
"California",
"San Diego",
"92122",
"America/Los_Angeles",
32.8594,
-117.2073,
True,
)
@pytest.fixture(name="location_info", autouse=True)
def location_info_fixture():
"""Mock location info."""
with patch(
"homeassistant.components.ps4.config_flow.location.async_detect_location_info",
return_value=MOCK_LOCATION,
):
yield
@pytest.fixture(name="ps4_setup", autouse=True)
def ps4_setup_fixture():
"""Patch ps4 setup entry."""
with patch(
"homeassistant.components.ps4.async_setup_entry",
return_value=True,
):
yield
async def test_full_flow_implementation(hass):
"""Test registering an implementation and flow works."""
# User Step Started, results in Step Creds
with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "creds"
# Step Creds results with form in Step Mode.
with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "mode"
# Step Mode with User Input which is not manual, results in Step Link.
with patch(
"pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}]
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=MOCK_AUTO
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "link"
# User Input results in created entry.
with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch(
"pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}]
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=MOCK_CONFIG
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"][CONF_TOKEN] == MOCK_CREDS
assert result["data"]["devices"] == [MOCK_DEVICE]
assert result["title"] == MOCK_TITLE
async def test_multiple_flow_implementation(hass):
"""Test multiple device flows."""
# User Step Started, results in Step Creds
with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "creds"
# Step Creds results with form in Step Mode.
with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "mode"
# Step Mode with User Input which is not manual, results in Step Link.
with patch(
"pyps4_2ndscreen.Helper.has_devices",
return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}],
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=MOCK_AUTO
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "link"
# User Input results in created entry.
with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)), patch(
"pyps4_2ndscreen.Helper.has_devices",
return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}],
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=MOCK_CONFIG
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"][CONF_TOKEN] == MOCK_CREDS
assert result["data"]["devices"] == [MOCK_DEVICE]
assert result["title"] == MOCK_TITLE
# Check if entry exists.
entries = hass.config_entries.async_entries()
assert len(entries) == 1
# Check if there is a device config in entry.
entry_1 = entries[0]
assert len(entry_1.data["devices"]) == 1
# Test additional flow.
# User Step Started, results in Step Mode:
with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None), patch(
"pyps4_2ndscreen.Helper.has_devices",
return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}],
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "creds"
# Step Creds results with form in Step Mode.
with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "mode"
# Step Mode with User Input which is not manual, results in Step Link.
with patch(
"pyps4_2ndscreen.Helper.has_devices",
return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}],
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=MOCK_AUTO
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "link"
# Step Link
with patch(
"pyps4_2ndscreen.Helper.has_devices",
return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}],
), patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"][CONF_TOKEN] == MOCK_CREDS
assert len(result["data"]["devices"]) == 1
assert result["title"] == MOCK_TITLE
# Check if there are 2 entries.
entries = hass.config_entries.async_entries()
assert len(entries) == 2
# Check if there is device config in the last entry.
entry_2 = entries[-1]
assert len(entry_2.data["devices"]) == 1
# Check that entry 1 is different from entry 2.
assert entry_1 is not entry_2
async def test_port_bind_abort(hass):
"""Test that flow aborted when cannot bind to ports 987, 997."""
with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_UDP_PORT):
reason = "port_987_bind_error"
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == reason
with patch("pyps4_2ndscreen.Helper.port_bind", return_value=MOCK_TCP_PORT):
reason = "port_997_bind_error"
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == reason
async def test_duplicate_abort(hass):
"""Test that Flow aborts when found devices already configured."""
MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA).add_to_hass(hass)
with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "creds"
with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "mode"
with patch(
"pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}]
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=MOCK_AUTO
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_additional_device(hass):
"""Test that Flow can configure another device."""
# Mock existing entry.
entry = MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA)
entry.add_to_hass(hass)
with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "creds"
with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "mode"
with patch(
"pyps4_2ndscreen.Helper.has_devices",
return_value=[{"host-ip": MOCK_HOST}, {"host-ip": MOCK_HOST_ADDITIONAL}],
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=MOCK_AUTO
)
with patch("pyps4_2ndscreen.Helper.link", return_value=(True, True)):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=MOCK_CONFIG_ADDITIONAL
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"][CONF_TOKEN] == MOCK_CREDS
assert len(result["data"]["devices"]) == 1
assert result["title"] == MOCK_TITLE
async def test_0_pin(hass):
"""Test Pin with leading '0' is passed correctly."""
with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": "creds"},
data={},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "mode"
with patch(
"pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}]
), patch(
"homeassistant.components.ps4.config_flow.location.async_detect_location_info",
return_value=MOCK_LOCATION,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], MOCK_AUTO
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "link"
mock_config = MOCK_CONFIG
mock_config[CONF_CODE] = MOCK_CODE_LEAD_0
with patch(
"pyps4_2ndscreen.Helper.link", return_value=(True, True)
) as mock_call, patch(
"pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}]
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], mock_config
)
mock_call.assert_called_once_with(
MOCK_HOST, MOCK_CREDS, MOCK_CODE_LEAD_0_STR, DEFAULT_ALIAS
)
async def test_no_devices_found_abort(hass):
"""Test that failure to find devices aborts flow."""
with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "creds"
with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "mode"
with patch("pyps4_2ndscreen.Helper.has_devices", return_value=[]):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=MOCK_AUTO
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "no_devices_found"
async def test_manual_mode(hass):
"""Test host specified in manual mode is passed to Step Link."""
with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "creds"
with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "mode"
# Step Mode with User Input: manual, results in Step Link.
with patch(
"pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}]
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=MOCK_MANUAL
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "link"
async def test_credential_abort(hass):
"""Test that failure to get credentials aborts flow."""
with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "creds"
with patch("pyps4_2ndscreen.Helper.get_creds", return_value=None):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "credential_error"
async def test_credential_timeout(hass):
"""Test that Credential Timeout shows error."""
with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "creds"
with patch("pyps4_2ndscreen.Helper.get_creds", side_effect=CredentialTimeout):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "creds"
assert result["errors"] == {"base": "credential_timeout"}
async def test_wrong_pin_error(hass):
"""Test that incorrect pin throws an error."""
with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "creds"
with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "mode"
with patch(
"pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}]
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=MOCK_AUTO
)
with patch("pyps4_2ndscreen.Helper.link", return_value=(True, False)):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=MOCK_CONFIG
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "link"
assert result["errors"] == {"base": "login_failed"}
async def test_device_connection_error(hass):
"""Test that device not connected or on throws an error."""
with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "creds"
with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "mode"
with patch(
"pyps4_2ndscreen.Helper.has_devices", return_value=[{"host-ip": MOCK_HOST}]
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=MOCK_AUTO
)
with patch("pyps4_2ndscreen.Helper.link", return_value=(False, True)):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=MOCK_CONFIG
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "link"
assert result["errors"] == {"base": "cannot_connect"}
async def test_manual_mode_no_ip_error(hass):
"""Test no IP specified in manual mode throws an error."""
with patch("pyps4_2ndscreen.Helper.port_bind", return_value=None):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "creds"
with patch("pyps4_2ndscreen.Helper.get_creds", return_value=MOCK_CREDS):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "mode"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={"Config Mode": "Manual Entry"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "mode"
assert result["errors"] == {CONF_IP_ADDRESS: "no_ipaddress"}
|
import logging
from deluge_client import DelugeRPCClient, FailedToReconnectException
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_USERNAME,
STATE_OFF,
STATE_ON,
)
from homeassistant.exceptions import PlatformNotReady
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import ToggleEntity
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "Deluge Switch"
DEFAULT_PORT = 58846
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Required(CONF_USERNAME): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Deluge switch."""
name = config[CONF_NAME]
host = config[CONF_HOST]
username = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
port = config[CONF_PORT]
deluge_api = DelugeRPCClient(host, port, username, password)
try:
deluge_api.connect()
except ConnectionRefusedError as err:
_LOGGER.error("Connection to Deluge Daemon failed")
raise PlatformNotReady from err
add_entities([DelugeSwitch(deluge_api, name)])
class DelugeSwitch(ToggleEntity):
"""Representation of a Deluge switch."""
def __init__(self, deluge_client, name):
"""Initialize the Deluge switch."""
self._name = name
self.deluge_client = deluge_client
self._state = STATE_OFF
self._available = False
@property
def name(self):
"""Return the name of the switch."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def is_on(self):
"""Return true if device is on."""
return self._state == STATE_ON
@property
def available(self):
"""Return true if device is available."""
return self._available
def turn_on(self, **kwargs):
"""Turn the device on."""
torrent_ids = self.deluge_client.call("core.get_session_state")
self.deluge_client.call("core.resume_torrent", torrent_ids)
def turn_off(self, **kwargs):
"""Turn the device off."""
torrent_ids = self.deluge_client.call("core.get_session_state")
self.deluge_client.call("core.pause_torrent", torrent_ids)
def update(self):
"""Get the latest data from deluge and updates the state."""
try:
torrent_list = self.deluge_client.call(
"core.get_torrents_status", {}, ["paused"]
)
self._available = True
except FailedToReconnectException:
_LOGGER.error("Connection to Deluge Daemon Lost")
self._available = False
return
for torrent in torrent_list.values():
item = torrent.popitem()
if not item[1]:
self._state = STATE_ON
return
self._state = STATE_OFF
|
import numpy as np
from functools import reduce
from operator import mul
from tensornetwork.block_sparse.index import Index
from tensornetwork.block_sparse.charge import (fuse_charges, BaseCharge,
fuse_ndarray_charges,
charge_equal)
from tensornetwork.block_sparse.utils import (fuse_stride_arrays, unique,
fuse_degeneracies, intersect,
_find_best_partition,
fuse_ndarrays)
from tensornetwork.block_sparse.caching import get_cacher
from typing import List, Union, Any, Tuple, Optional, Sequence, Callable
from tensornetwork.block_sparse.sizetypes import SIZE_T
Tensor = Any
def _data_initializer(
numpy_initializer: Callable, comp_num_elements: Callable,
indices: Sequence[Index], *args, **kwargs
) -> Tuple[np.ndarray, List[BaseCharge], List[bool], List[List[int]]]:
"""
Initialize a 1d np.ndarray using `numpy_initializer` function.
Args:
numpy_initializer: Callable, should return a 1d np.ndarray.
Function call signature: `numpy_initializer(*args, **kwargs)`.
comp_num_elements: Callable, computes the number of elements of
the returned 1d np.ndarray, using `numel = comp_num_elements(indices)`.
indices: List if `Index` objects.
*args, **kwargs: Arguments to `numpy_initializer`.
Returns:
np.ndarray: An initialized numpy array.
List[BaseCharge]: A list containing the flattened charges in `indices`
List[bool]: The flattened flows of `indices`.
List[List]: A list of list of int, the order information needed to
initialize a BlockSparseTensor.
"""
charges, flows = get_flat_meta_data(indices)
num_elements = comp_num_elements(charges, flows)
tmp = np.append(0, np.cumsum([len(i.flat_charges) for i in indices]))
order = [list(np.arange(tmp[n], tmp[n + 1])) for n in range(len(tmp) - 1)]
data = numpy_initializer(num_elements, *args, **kwargs)
return data, charges, flows, order
def get_flat_meta_data(indices: Sequence[Index]) -> Tuple[List, List]:
"""
Return charges and flows of flattened `indices`.
Args:
indices: A list of `Index` objects.
Returns:
List[BaseCharge]: The flattened charges.
List[bool]: The flattened flows.
"""
charges = []
flows = []
for i in indices:
flows.extend(i.flat_flows)
charges.extend(i.flat_charges)
return charges, flows
def compute_sparse_lookup(
charges: List[BaseCharge], flows: Union[np.ndarray, List[bool]],
target_charges: BaseCharge) -> Tuple[np.ndarray, BaseCharge, np.ndarray]:
"""
Compute lookup table for how dense index positions map
to sparse index positions, treating only those elements as non-zero
whose charges fuse to `target_charges`.
Args:
charges: List of `BaseCharge` objects.
flows: A list of `bool`; the flow directions.
target_charges: A `BaseCharge`; the target charges for which
the fusion of `charges` is non-zero.
Returns:
lookup: An np.ndarray of positive numbers between `0` and
`len(unique_charges)`. The position of values `n` in `lookup`
are positions with charge values `unique_charges[n]`.
unique_charges: The unique charges of fusion of `charges`
label_to_unique: The integer labels of the unique charges.
"""
fused_charges = fuse_charges(charges, flows)
unique_charges, inverse = unique(fused_charges.charges, return_inverse=True)
_, label_to_unique, _ = intersect(
unique_charges, target_charges.charges, return_indices=True)
# _, label_to_unique, _ = unique_charges.intersect(
# target_charges, return_indices=True)
tmp = np.full(
unique_charges.shape[0], fill_value=-1, dtype=charges[0].label_dtype)
obj = charges[0].__new__(type(charges[0]))
obj.__init__(
charges=unique_charges,
charge_labels=None,
charge_types=charges[0].charge_types)
tmp[label_to_unique] = label_to_unique
lookup = tmp[inverse]
lookup = lookup[lookup >= 0]
return lookup, obj, np.sort(label_to_unique)
def compute_fused_charge_degeneracies(
charges: List[BaseCharge],
flows: Union[np.ndarray, List[bool]]) -> Tuple[BaseCharge, np.ndarray]:
"""
For a list of charges, computes all possible fused charges resulting
from fusing `charges` and their respective degeneracies
Args:
charges: List of `BaseCharge`, one for each leg of a
tensor.
flows: A list of bool, one for each leg of a tensor.
with values `False` or `True` denoting inflowing and
outflowing charge direction, respectively.
Returns:
BaseCharge: The unique fused charges.
np.ndarray: The degeneracies of each unqiue fused charge.
"""
if len(charges) == 1:
return (charges[0] * flows[0]).unique(return_counts=True)
dims = [c.dim for c in charges]
# for small dims is faster to fuse all and use unique
# directly
if reduce(mul, dims, 1) < 20000:
fused = fuse_charges(charges, flows)
return fused.unique(return_counts=True)
partition = _find_best_partition(dims)
fused_left = fuse_charges(charges[:partition], flows[:partition])
fused_right = fuse_charges(charges[partition:], flows[partition:])
left_unique, left_degens = fused_left.unique(return_counts=True)
right_unique, right_degens = fused_right.unique(return_counts=True)
fused = left_unique + right_unique
unique_charges, charge_labels = fused.unique(return_inverse=True)
fused_degeneracies = fuse_degeneracies(left_degens, right_degens)
new_ord = np.argsort(charge_labels)
all_degens = np.cumsum(fused_degeneracies[new_ord])
cum_degens = all_degens[np.flatnonzero(np.diff(charge_labels[new_ord]))]
final_degeneracies = np.append(cum_degens, all_degens[-1]) - np.append(
0, cum_degens)
return unique_charges, final_degeneracies
def compute_unique_fused_charges(
charges: List[BaseCharge], flows: Union[np.ndarray,
List[bool]]) -> BaseCharge:
"""
For a list of charges, compute all possible fused charges resulting
from fusing `charges`.
Args:
charges: List of `BaseCharge`, one for each leg of a
tensor.
flows: A list of bool, one for each leg of a tensor.
with values `False` or `True` denoting inflowing and
outflowing charge direction, respectively.
Returns:
BaseCharge: The unique fused charges.
"""
if len(charges) == 1:
return (charges[0] * flows[0]).unique()
accumulated_charges = (charges[0] * flows[0]).unique()
for n in range(1, len(charges)):
leg_charges = charges[n].unique()
fused_charges = accumulated_charges + leg_charges * flows[n]
accumulated_charges = fused_charges.unique()
return accumulated_charges
def compute_num_nonzero(charges: List[BaseCharge],
flows: Union[np.ndarray, List[bool]]) -> int:
"""
Compute the number of non-zero elements, given the meta-data of
a symmetric tensor.
Args:
charges: List of `BaseCharge`, one for each leg of a
tensor.
flows: A list of bool, one for each leg of a tensor.
with values `False` or `True` denoting inflowing and
outflowing charge direction, respectively.
Returns:
int: The number of non-zero elements.
"""
if np.any([len(c) == 0 for c in charges]):
return 0
#pylint: disable=line-too-long
accumulated_charges, accumulated_degeneracies = compute_fused_charge_degeneracies(
charges, flows)
res = accumulated_charges == accumulated_charges.identity_charges(dim=1)
nz_inds = np.nonzero(res)[0]
if len(nz_inds) > 0:
return np.squeeze(accumulated_degeneracies[nz_inds][0])
return 0
def reduce_charges(charges: List[BaseCharge],
flows: Union[np.ndarray, List[bool]],
target_charges: np.ndarray,
return_locations: Optional[bool] = False,
strides: Optional[np.ndarray] = None) -> Any:
"""
Add quantum numbers arising from combining two or more charges into a
single index, keeping only the quantum numbers that appear in
`target_charges`. Equilvalent to using "combine_charges" followed
by "reduce", but is generally much more efficient.
Args:
charges: List of `BaseCharge`, one for each leg of a
tensor.
flows: A list of bool, one for each leg of a tensor.
with values `False` or `True` denoting inflowing and
outflowing charge direction, respectively.
target_charges: n-by-D array of charges which should be kept,
with `n` the number of symmetries.
return_locations: If `True` return the location of the kept
values of the fused charges
strides: Index strides with which to compute the
retured locations of the kept elements. Defaults to trivial strides
(based on row major order).
Returns:
BaseCharge: the fused index after reduction.
np.ndarray: Locations of the fused BaseCharge charges that were kept.
"""
tensor_dims = [len(c) for c in charges]
if len(charges) == 1:
# reduce single index
if strides is None:
strides = np.array([1], dtype=SIZE_T)
return charges[0].dual(flows[0]).reduce(
target_charges, return_locations=return_locations, strides=strides[0])
# find size-balanced partition of charges
partition = _find_best_partition(tensor_dims)
# compute quantum numbers for each partition
left_ind = fuse_charges(charges[:partition], flows[:partition])
right_ind = fuse_charges(charges[partition:], flows[partition:])
# compute combined qnums
comb_qnums = fuse_ndarray_charges(left_ind.unique_charges,
right_ind.unique_charges,
charges[0].charge_types)
#special case of empty charges
#pylint: disable=unsubscriptable-object
if (comb_qnums.shape[0] == 0) or (len(left_ind.charge_labels) == 0) or (len(
right_ind.charge_labels) == 0):
obj = charges[0].__new__(type(charges[0]))
obj.__init__(
np.empty((0, charges[0].num_symmetries), dtype=charges[0].dtype),
np.empty(0, dtype=charges[0].label_dtype), charges[0].charge_types)
if return_locations:
return obj, np.empty(0, dtype=SIZE_T)
return obj
unique_comb_qnums, comb_labels = unique(comb_qnums, return_inverse=True)
num_unique = unique_comb_qnums.shape[0]
# intersect combined qnums and target_charges
reduced_qnums, label_to_unique, _ = intersect(
unique_comb_qnums, target_charges, axis=0, return_indices=True)
map_to_kept = -np.ones(num_unique, dtype=charges[0].label_dtype)
map_to_kept[label_to_unique] = np.arange(len(label_to_unique))
# new_comb_labels is a matrix of shape
# (left_ind.num_unique, right_ind.num_unique)
# each row new_comb_labels[n,:] contains integers values.
# Positions where values > 0
# denote labels of right-charges that are kept.
new_comb_labels = map_to_kept[comb_labels].reshape(
[left_ind.num_unique, right_ind.num_unique])
reduced_rows = [0] * left_ind.num_unique
for n in range(left_ind.num_unique):
temp_label = new_comb_labels[n, right_ind.charge_labels]
reduced_rows[n] = temp_label[temp_label >= 0]
reduced_labels = np.concatenate(
[reduced_rows[n] for n in left_ind.charge_labels])
obj = charges[0].__new__(type(charges[0]))
obj.__init__(reduced_qnums, reduced_labels, charges[0].charge_types)
if return_locations:
row_locs = [0] * left_ind.num_unique
if strides is not None:
# computed locations based on non-trivial strides
row_pos = fuse_stride_arrays(tensor_dims[:partition], strides[:partition])
col_pos = fuse_stride_arrays(tensor_dims[partition:], strides[partition:])
for n in range(left_ind.num_unique):
temp_label = new_comb_labels[n, right_ind.charge_labels]
temp_keep = temp_label >= 0
if strides is not None:
row_locs[n] = col_pos[temp_keep]
else:
row_locs[n] = np.where(temp_keep)[0]
if strides is not None:
reduced_locs = np.concatenate([
row_pos[n] + row_locs[left_ind.charge_labels[n]]
for n in range(left_ind.dim)
])
else:
reduced_locs = np.concatenate([
n * right_ind.dim + row_locs[left_ind.charge_labels[n]]
for n in range(left_ind.dim)
])
return obj, reduced_locs
return obj
def _find_diagonal_sparse_blocks(
charges: List[BaseCharge], flows: Union[np.ndarray, List[bool]],
partition: int) -> Tuple[List, BaseCharge, np.ndarray]:
"""
Find the location of all non-trivial symmetry blocks from the data vector of
of BlockSparseTensor (when viewed as a matrix across some prescribed index
bi-partition).
Args:
charges: List of `BaseCharge`, one for each leg of a tensor.
flows: A list of bool, one for each leg of a tensor.
with values `False` or `True` denoting inflowing and
outflowing charge direction, respectively.
partition: location of tensor partition (i.e. such that the
tensor is viewed as a matrix between `charges[:partition]` and
the remaining charges).
Returns:
block_maps (List[np.ndarray]): list of integer arrays, which each
containing the location of a symmetry block in the data vector.
block_qnums (BaseCharge): The charges of the corresponding blocks.n
block, with 'n' the number of symmetries and 'm' the number of blocks.
block_dims (np.ndarray): 2-by-m array of matrix dimensions of each block.
"""
cacher = get_cacher()
if cacher.do_caching:
hash_val = _to_string(charges, flows, partition, list(range(len(charges))))
if hash_val in cacher.cache:
return cacher.cache[hash_val]
num_inds = len(charges)
if partition in (0, num_inds):
# special cases (matrix of trivial height or width)
num_nonzero = compute_num_nonzero(charges, flows)
block_maps = [np.arange(0, num_nonzero, dtype=SIZE_T).ravel()]
block_qnums = charges[0].identity_charges(dim=1).charges
block_dims = np.array([[1], [num_nonzero]])
if partition == len(flows):
block_dims = np.flipud(block_dims)
obj = charges[0].__new__(type(charges[0]))
obj.__init__(block_qnums, np.arange(1, dtype=charges[0].label_dtype),
charges[0].charge_types)
return block_maps, obj, block_dims
unique_row_qnums, row_degen = compute_fused_charge_degeneracies(
charges[:partition], flows[:partition])
unique_col_qnums, col_degen = compute_fused_charge_degeneracies(
charges[partition:], np.logical_not(flows[partition:]))
block_qnums, row_to_block, col_to_block = intersect(
unique_row_qnums.unique_charges,
unique_col_qnums.unique_charges,
axis=0,
return_indices=True)
num_blocks = block_qnums.shape[0]
if num_blocks == 0:
obj = charges[0].__new__(type(charges[0]))
obj.__init__(
np.zeros((0, charges[0].num_symmetries), dtype=charges[0].dtype),
np.arange(0, dtype=charges[0].label_dtype), charges[0].charge_types)
return [], obj, np.empty((2, 0), dtype=SIZE_T)
# calculate number of non-zero elements in each row of the matrix
row_ind = reduce_charges(charges[:partition], flows[:partition], block_qnums)
row_num_nz = col_degen[col_to_block[row_ind.charge_labels]]
cumulate_num_nz = np.insert(np.cumsum(row_num_nz[0:-1]), 0, 0).astype(SIZE_T)
# calculate mappings for the position in datavector of each block
if num_blocks < 15:
# faster method for small number of blocks
row_locs = np.concatenate([
(row_ind.charge_labels == n) for n in range(num_blocks)
]).reshape(num_blocks, row_ind.dim)
else:
# faster method for large number of blocks
row_locs = np.zeros([num_blocks, row_ind.dim], dtype=bool)
row_locs[row_ind.charge_labels,
np.arange(row_ind.dim)] = np.ones(
row_ind.dim, dtype=bool)
block_dims = np.array(
[[row_degen[row_to_block[n]], col_degen[col_to_block[n]]]
for n in range(num_blocks)],
dtype=SIZE_T).T
#pylint: disable=unsubscriptable-object
block_maps = [
np.ravel(cumulate_num_nz[row_locs[n, :]][:, None] +
np.arange(block_dims[1, n])[None, :]) for n in range(num_blocks)
]
obj = charges[0].__new__(type(charges[0]))
obj.__init__(block_qnums,
np.arange(block_qnums.shape[0], dtype=charges[0].label_dtype),
charges[0].charge_types)
if cacher.do_caching:
cacher.cache[hash_val] = (block_maps, obj, block_dims)
return cacher.cache[hash_val]
return block_maps, obj, block_dims
def _find_transposed_diagonal_sparse_blocks(
charges: List[BaseCharge],
flows: Union[np.ndarray, List[bool]],
tr_partition: int,
order: Optional[Union[List, np.ndarray]] = None
) -> Tuple[List, BaseCharge, np.ndarray]:
"""
Find the diagonal blocks of a transposed tensor with
meta-data `charges` and `flows`. `charges` and `flows`
are the charges and flows of the untransposed tensor,
`order` is the final transposition, and `tr_partition`
is the partition of the transposed tensor according to
which the diagonal blocks should be found.
Args:
charges: List of `BaseCharge`, one for each leg of a tensor.
flows: A list of bool, one for each leg of a tensor.
with values `False` or `True` denoting inflowing and
outflowing charge direction, respectively.
tr_partition: Location of the transposed tensor partition
(i.e. such that the tensor is viewed as a matrix between
`charges[order[:partition]]` and `charges[order[partition:]]`).
order: Order with which to permute the tensor axes.
Returns:
block_maps (List[np.ndarray]): list of integer arrays, which each
containing the location of a symmetry block in the data vector.
block_qnums (BaseCharge): The charges of the corresponding blocks.
block_dims (np.ndarray): 2-by-m array of matrix dimensions of each block.
"""
flows = np.asarray(flows)
cacher = get_cacher()
if cacher.do_caching:
hash_val = _to_string(charges, flows, tr_partition, order)
if hash_val in cacher.cache:
return cacher.cache[hash_val]
if np.array_equal(order, None) or (np.array_equal(
np.array(order), np.arange(len(charges)))):
# no transpose order
return _find_diagonal_sparse_blocks(charges, flows, tr_partition)
# general case: non-trivial transposition is required
num_inds = len(charges)
tensor_dims = np.array([charges[n].dim for n in range(num_inds)], dtype=int)
strides = np.append(np.flip(np.cumprod(np.flip(tensor_dims[1:]))), 1)
# compute qnums of row/cols in original tensor
orig_partition = _find_best_partition(tensor_dims)
orig_width = np.prod(tensor_dims[orig_partition:])
orig_unique_row_qnums = compute_unique_fused_charges(charges[:orig_partition],
flows[:orig_partition])
orig_unique_col_qnums, orig_col_degen = compute_fused_charge_degeneracies(
charges[orig_partition:], np.logical_not(flows[orig_partition:]))
orig_block_qnums, row_map, col_map = intersect(
orig_unique_row_qnums.unique_charges,
orig_unique_col_qnums.unique_charges,
axis=0,
return_indices=True)
orig_num_blocks = orig_block_qnums.shape[0]
if orig_num_blocks == 0:
# special case: trivial number of non-zero elements
obj = charges[0].__new__(type(charges[0]))
obj.__init__(
np.empty((0, charges[0].num_symmetries), dtype=charges[0].dtype),
np.arange(0, dtype=charges[0].label_dtype), charges[0].charge_types)
return [], obj, np.empty((2, 0), dtype=SIZE_T)
orig_row_ind = fuse_charges(charges[:orig_partition], flows[:orig_partition])
orig_col_ind = fuse_charges(charges[orig_partition:],
np.logical_not(flows[orig_partition:]))
inv_row_map = -np.ones(
orig_unique_row_qnums.unique_charges.shape[0],
dtype=charges[0].label_dtype)
inv_row_map[row_map] = np.arange(len(row_map), dtype=charges[0].label_dtype)
all_degens = np.append(orig_col_degen[col_map],
0)[inv_row_map[orig_row_ind.charge_labels]]
all_cumul_degens = np.cumsum(np.insert(all_degens[:-1], 0, 0)).astype(SIZE_T)
dense_to_sparse = np.empty(orig_width, dtype=SIZE_T)
for n in range(orig_num_blocks):
dense_to_sparse[orig_col_ind.charge_labels == col_map[n]] = np.arange(
orig_col_degen[col_map[n]], dtype=SIZE_T)
# define properties of new tensor resulting from transposition
new_strides = strides[order]
new_row_charges = [charges[n] for n in order[:tr_partition]]
new_col_charges = [charges[n] for n in order[tr_partition:]]
new_row_flows = flows[order[:tr_partition]]
new_col_flows = flows[order[tr_partition:]]
if tr_partition == 0:
# special case: reshape into row vector
# compute qnums of row/cols in transposed tensor
unique_col_qnums, new_col_degen = compute_fused_charge_degeneracies(
new_col_charges, np.logical_not(new_col_flows))
identity_charges = charges[0].identity_charges(dim=1)
block_qnums, new_row_map, new_col_map = intersect(
identity_charges.unique_charges,
unique_col_qnums.unique_charges,
axis=0,
return_indices=True)
block_dims = np.array([[1], new_col_degen[new_col_map]], dtype=SIZE_T)
num_blocks = 1
col_ind, col_locs = reduce_charges(
new_col_charges,
np.logical_not(new_col_flows),
block_qnums,
return_locations=True,
strides=new_strides[tr_partition:])
# find location of blocks in transposed tensor (w.r.t positions in original)
#pylint: disable=no-member
orig_row_posR, orig_col_posR = np.divmod(
col_locs[col_ind.charge_labels == 0], orig_width)
block_maps = [(all_cumul_degens[orig_row_posR] +
dense_to_sparse[orig_col_posR]).ravel()]
obj = charges[0].__new__(type(charges[0]))
obj.__init__(block_qnums,
np.arange(block_qnums.shape[0], dtype=charges[0].label_dtype),
charges[0].charge_types)
elif tr_partition == len(charges):
# special case: reshape into col vector
# compute qnums of row/cols in transposed tensor
unique_row_qnums, new_row_degen = compute_fused_charge_degeneracies(
new_row_charges, new_row_flows)
identity_charges = charges[0].identity_charges(dim=1)
block_qnums, new_row_map, new_col_map = intersect(
unique_row_qnums.unique_charges,
identity_charges.unique_charges,
axis=0,
return_indices=True)
block_dims = np.array([new_row_degen[new_row_map], [1]], dtype=SIZE_T)
num_blocks = 1
row_ind, row_locs = reduce_charges(
new_row_charges,
new_row_flows,
block_qnums,
return_locations=True,
strides=new_strides[:tr_partition])
# find location of blocks in transposed tensor (w.r.t positions in original)
#pylint: disable=no-member
orig_row_posL, orig_col_posL = np.divmod(
row_locs[row_ind.charge_labels == 0], orig_width)
block_maps = [(all_cumul_degens[orig_row_posL] +
dense_to_sparse[orig_col_posL]).ravel()]
obj = charges[0].__new__(type(charges[0]))
obj.__init__(block_qnums,
np.arange(block_qnums.shape[0], dtype=charges[0].label_dtype),
charges[0].charge_types)
else:
unique_row_qnums, new_row_degen = compute_fused_charge_degeneracies(
new_row_charges, new_row_flows)
unique_col_qnums, new_col_degen = compute_fused_charge_degeneracies(
new_col_charges, np.logical_not(new_col_flows))
block_qnums, new_row_map, new_col_map = intersect(
unique_row_qnums.unique_charges,
unique_col_qnums.unique_charges,
axis=0,
return_indices=True)
block_dims = np.array(
[new_row_degen[new_row_map], new_col_degen[new_col_map]], dtype=SIZE_T)
num_blocks = len(new_row_map)
row_ind, row_locs = reduce_charges(
new_row_charges,
new_row_flows,
block_qnums,
return_locations=True,
strides=new_strides[:tr_partition])
col_ind, col_locs = reduce_charges(
new_col_charges,
np.logical_not(new_col_flows),
block_qnums,
return_locations=True,
strides=new_strides[tr_partition:])
block_maps = [0] * num_blocks
for n in range(num_blocks):
#pylint: disable=no-member
orig_row_posL, orig_col_posL = np.divmod(
row_locs[row_ind.charge_labels == n], orig_width)
#pylint: disable=no-member
orig_row_posR, orig_col_posR = np.divmod(
col_locs[col_ind.charge_labels == n], orig_width)
block_maps[n] = (
all_cumul_degens[np.add.outer(orig_row_posL, orig_row_posR)] +
dense_to_sparse[np.add.outer(orig_col_posL, orig_col_posR)]).ravel()
obj = charges[0].__new__(type(charges[0]))
obj.__init__(block_qnums,
np.arange(block_qnums.shape[0], dtype=charges[0].label_dtype),
charges[0].charge_types)
if cacher.do_caching:
cacher.cache[hash_val] = (block_maps, obj, block_dims)
return cacher.cache[hash_val]
return block_maps, obj, block_dims
def _to_string(charges: List[BaseCharge], flows: Union[np.ndarray, List],
tr_partition: int, order: List[int]) -> str:
"""
map the input arguments of _find_transposed_diagonal_sparse_blocks
to a string.
Args:
charges: List of `BaseCharge`, one for each leg of a tensor.
flows: A list of bool, one for each leg of a tensor.
with values `False` or `True` denoting inflowing and
outflowing charge direction, respectively.
tr_partition: Location of the transposed tensor partition
(i.e. such that the tensor is viewed as a matrix between
`charges[order[:partition]]` and `charges[order[partition:]]`).
order: Order with which to permute the tensor axes.
Returns:
str: The string representation of the input
"""
return ''.join([str(c.charges.tostring()) for c in charges] + [
str(np.array(flows).tostring()),
str(tr_partition),
str(np.array(order, dtype=np.int16).tostring())
])
|
from flask import current_app
from lemur.common.managers import InstanceManager
# inspired by https://github.com/getsentry/sentry
class PluginManager(InstanceManager):
def __iter__(self):
return iter(self.all())
def __len__(self):
return sum(1 for i in self.all())
def all(self, version=1, plugin_type=None):
for plugin in sorted(
super(PluginManager, self).all(), key=lambda x: x.get_title()
):
if not plugin.type == plugin_type and plugin_type:
continue
if not plugin.is_enabled():
continue
if version is not None and plugin.__version__ != version:
continue
yield plugin
def get(self, slug):
for plugin in self.all(version=1):
if plugin.slug == slug:
return plugin
for plugin in self.all(version=2):
if plugin.slug == slug:
return plugin
current_app.logger.error(
"Unable to find slug: {} in self.all version 1: {} or version 2: {}".format(
slug, self.all(version=1), self.all(version=2)
)
)
raise KeyError(slug)
def first(self, func_name, *args, **kwargs):
version = kwargs.pop("version", 1)
for plugin in self.all(version=version):
try:
result = getattr(plugin, func_name)(*args, **kwargs)
except Exception as e:
current_app.logger.error(
"Error processing %s() on %r: %s",
func_name,
plugin.__class__,
e,
extra={"func_arg": args, "func_kwargs": kwargs},
exc_info=True,
)
continue
if result is not None:
return result
def register(self, cls):
self.add("%s.%s" % (cls.__module__, cls.__name__))
return cls
def unregister(self, cls):
self.remove("%s.%s" % (cls.__module__, cls.__name__))
return cls
|
import operator
import os
import uuid
import natsort
from nikola import utils
from nikola.packages.datecond import date_in_range
from nikola.plugin_categories import ShortcodePlugin
class PostListShortcode(ShortcodePlugin):
"""Provide a shortcode to create a list of posts.
Post List
=========
:Directive Arguments: None.
:Directive Options: lang, start, stop, reverse, sort, date, tags, categories, sections, slugs, post_type, template, id
:Directive Content: None.
The posts appearing in the list can be filtered by options.
*List slicing* is provided with the *start*, *stop* and *reverse* options.
The following not required options are recognized:
``start`` : integer
The index of the first post to show.
A negative value like ``-3`` will show the *last* three posts in the
post-list.
Defaults to None.
``stop`` : integer
The index of the last post to show.
A value negative value like ``-1`` will show every post, but not the
*last* in the post-list.
Defaults to None.
``reverse`` : flag
Reverse the order of the post-list.
Defaults is to not reverse the order of posts.
``sort`` : string
Sort post list by one of each post's attributes, usually ``title`` or a
custom ``priority``. Defaults to None (chronological sorting).
``date`` : string
Show posts that match date range specified by this option. Format:
* comma-separated clauses (AND)
* clause: attribute comparison_operator value (spaces optional)
* attribute: year, month, day, hour, month, second, weekday, isoweekday; or empty for full datetime
* comparison_operator: == != <= >= < >
* value: integer, 'now', 'today', or dateutil-compatible date input
``tags`` : string [, string...]
Filter posts to show only posts having at least one of the ``tags``.
Defaults to None.
``require_all_tags`` : flag
Change tag filter behaviour to show only posts that have all specified ``tags``.
Defaults to False.
``categories`` : string [, string...]
Filter posts to show only posts having one of the ``categories``.
Defaults to None.
``sections`` : string [, string...]
Filter posts to show only posts having one of the ``sections``.
Defaults to None.
``slugs`` : string [, string...]
Filter posts to show only posts having at least one of the ``slugs``.
Defaults to None.
``post_type`` (or ``type``) : string
Show only ``posts``, ``pages`` or ``all``.
Replaces ``all``. Defaults to ``posts``.
``lang`` : string
The language of post *titles* and *links*.
Defaults to default language.
``template`` : string
The name of an alternative template to render the post-list.
Defaults to ``post_list_directive.tmpl``
``id`` : string
A manual id for the post list.
Defaults to a random name composed by 'post_list_' + uuid.uuid4().hex.
"""
name = "post_list"
def set_site(self, site):
"""Set the site."""
super().set_site(site)
site.register_shortcode('post-list', self.handler)
def handler(self, start=None, stop=None, reverse=False, tags=None, require_all_tags=False, categories=None,
sections=None, slugs=None, post_type='post', type=False,
lang=None, template='post_list_directive.tmpl', sort=None,
id=None, data=None, state=None, site=None, date=None, filename=None, post=None):
"""Generate HTML for post-list."""
if lang is None:
lang = utils.LocaleBorg().current_lang
if site.invariant: # for testing purposes
post_list_id = id or 'post_list_' + 'fixedvaluethatisnotauuid'
else:
post_list_id = id or 'post_list_' + uuid.uuid4().hex
# Get post from filename if available
if filename:
self_post = site.post_per_input_file.get(filename)
else:
self_post = None
if self_post:
self_post.register_depfile("####MAGIC####TIMELINE", lang=lang)
self_post.register_depfile("####MAGIC####CONFIG:GLOBAL_CONTEXT", lang=lang)
# If we get strings for start/stop, make them integers
if start is not None:
start = int(start)
if stop is not None:
stop = int(stop)
# Parse tags/categories/sections/slugs (input is strings)
categories = [c.strip().lower() for c in categories.split(',')] if categories else []
sections = [s.strip().lower() for s in sections.split(',')] if sections else []
slugs = [s.strip() for s in slugs.split(',')] if slugs else []
filtered_timeline = []
posts = []
step = None if reverse is False else -1
if type is not False:
post_type = type
if post_type == 'page' or post_type == 'pages':
timeline = [p for p in site.timeline if not p.use_in_feeds]
elif post_type == 'all':
timeline = [p for p in site.timeline]
else: # post
timeline = [p for p in site.timeline if p.use_in_feeds]
# self_post should be removed from timeline because this is redundant
timeline = [p for p in timeline if p.source_path != filename]
if categories:
timeline = [p for p in timeline if p.meta('category', lang=lang).lower() in categories]
if sections:
timeline = [p for p in timeline if p.section_name(lang).lower() in sections]
if tags:
tags = {t.strip().lower() for t in tags.split(',')}
if require_all_tags:
compare = set.issubset
else:
compare = operator.and_
for post in timeline:
post_tags = {t.lower() for t in post.tags}
if compare(tags, post_tags):
filtered_timeline.append(post)
else:
filtered_timeline = timeline
if sort:
filtered_timeline = natsort.natsorted(filtered_timeline, key=lambda post: post.meta[lang][sort], alg=natsort.ns.F | natsort.ns.IC)
if date:
_now = utils.current_time()
filtered_timeline = [p for p in filtered_timeline if date_in_range(utils.html_unescape(date), p.date, now=_now)]
for post in filtered_timeline[start:stop:step]:
if slugs:
cont = True
for slug in slugs:
if slug == post.meta('slug'):
cont = False
if cont:
continue
bp = post.translated_base_path(lang)
if os.path.exists(bp) and state:
state.document.settings.record_dependencies.add(bp)
elif os.path.exists(bp) and self_post:
self_post.register_depfile(bp, lang=lang)
posts += [post]
template_deps = site.template_system.template_deps(template)
if state:
# Register template as a dependency (Issue #2391)
for d in template_deps:
state.document.settings.record_dependencies.add(d)
elif self_post:
for d in template_deps:
self_post.register_depfile(d, lang=lang)
template_data = site.GLOBAL_CONTEXT.copy()
template_data.update({
'lang': lang,
'posts': posts,
# Need to provide str, not TranslatableSetting (Issue #2104)
'date_format': site.GLOBAL_CONTEXT.get('date_format')[lang],
'post_list_id': post_list_id,
'messages': site.MESSAGES,
'_link': site.link,
})
output = site.template_system.render_template(
template, None, template_data)
return output, template_deps
# Request file name from shortcode (Issue #2412)
PostListShortcode.handler.nikola_shortcode_pass_filename = True
|
from .std import tqdm, TqdmTypeError, TqdmKeyError
from ._version import __version__ # NOQA
import sys
import re
import logging
__all__ = ["main"]
def cast(val, typ):
log = logging.getLogger(__name__)
log.debug((val, typ))
if " or " in typ:
for t in typ.split(" or "):
try:
return cast(val, t)
except TqdmTypeError:
pass
raise TqdmTypeError(val + ' : ' + typ)
# sys.stderr.write('\ndebug | `val:type`: `' + val + ':' + typ + '`.\n')
if typ == 'bool':
if (val == 'True') or (val == ''):
return True
elif val == 'False':
return False
else:
raise TqdmTypeError(val + ' : ' + typ)
try:
return eval(typ + '("' + val + '")')
except:
if typ == 'chr':
return chr(ord(eval('"' + val + '"')))
else:
raise TqdmTypeError(val + ' : ' + typ)
def posix_pipe(fin, fout, delim='\n', buf_size=256,
callback=lambda int: None # pragma: no cover
):
"""
Params
------
fin : file with `read(buf_size : int)` method
fout : file with `write` (and optionally `flush`) methods.
callback : function(int), e.g.: `tqdm.update`
"""
fp_write = fout.write
# tmp = ''
if not delim:
while True:
tmp = fin.read(buf_size)
# flush at EOF
if not tmp:
getattr(fout, 'flush', lambda: None)() # pragma: no cover
return
fp_write(tmp)
callback(len(tmp))
# return
buf = ''
# n = 0
while True:
tmp = fin.read(buf_size)
# flush at EOF
if not tmp:
if buf:
fp_write(buf)
callback(1 + buf.count(delim)) # n += 1 + buf.count(delim)
getattr(fout, 'flush', lambda: None)() # pragma: no cover
return # n
while True:
try:
i = tmp.index(delim)
except ValueError:
buf += tmp
break
else:
fp_write(buf + tmp[:i + len(delim)])
callback(1) # n += 1
buf = ''
tmp = tmp[i + len(delim):]
# ((opt, type), ... )
RE_OPTS = re.compile(r'\n {8}(\S+)\s{2,}:\s*([^,]+)')
# better split method assuming no positional args
RE_SHLEX = re.compile(r'\s*(?<!\S)--?([^\s=]+)(\s+|=|$)')
# TODO: add custom support for some of the following?
UNSUPPORTED_OPTS = ('iterable', 'gui', 'out', 'file')
# The 8 leading spaces are required for consistency
CLI_EXTRA_DOC = r"""
Extra CLI Options
-----------------
name : type, optional
TODO: find out why this is needed.
delim : chr, optional
Delimiting character [default: '\n']. Use '\0' for null.
N.B.: on Windows systems, Python converts '\n' to '\r\n'.
buf_size : int, optional
String buffer size in bytes [default: 256]
used when `delim` is specified.
bytes : bool, optional
If true, will count bytes, ignore `delim`, and default
`unit_scale` to True, `unit_divisor` to 1024, and `unit` to 'B'.
manpath : str, optional
Directory in which to install tqdm man pages.
log : str, optional
CRITICAL|FATAL|ERROR|WARN(ING)|[default: 'INFO']|DEBUG|NOTSET.
"""
def main(fp=sys.stderr, argv=None):
"""
Parameters (internal use only)
---------
fp : file-like object for tqdm
argv : list (default: sys.argv[1:])
"""
if argv is None:
argv = sys.argv[1:]
try:
log = argv.index('--log')
except ValueError:
for i in argv:
if i.startswith('--log='):
logLevel = i[len('--log='):]
break
else:
logLevel = 'INFO'
else:
# argv.pop(log)
# logLevel = argv.pop(log)
logLevel = argv[log + 1]
logging.basicConfig(
level=getattr(logging, logLevel),
format="%(levelname)s:%(module)s:%(lineno)d:%(message)s")
log = logging.getLogger(__name__)
d = tqdm.__init__.__doc__ + CLI_EXTRA_DOC
opt_types = dict(RE_OPTS.findall(d))
# opt_types['delim'] = 'chr'
for o in UNSUPPORTED_OPTS:
opt_types.pop(o)
log.debug(sorted(opt_types.items()))
# d = RE_OPTS.sub(r' --\1=<\1> : \2', d)
split = RE_OPTS.split(d)
opt_types_desc = zip(split[1::3], split[2::3], split[3::3])
d = ''.join('\n --{0}=<{0}> : {1}{2}'.format(*otd)
for otd in opt_types_desc if otd[0] not in UNSUPPORTED_OPTS)
d = """Usage:
tqdm [--help | options]
Options:
-h, --help Print this help and exit
-v, --version Print version and exit
""" + d.strip('\n') + '\n'
# opts = docopt(d, version=__version__)
if any(v in argv for v in ('-v', '--version')):
sys.stdout.write(__version__ + '\n')
sys.exit(0)
elif any(v in argv for v in ('-h', '--help')):
sys.stdout.write(d + '\n')
sys.exit(0)
argv = RE_SHLEX.split(' '.join(["tqdm"] + argv))
opts = dict(zip(argv[1::3], argv[3::3]))
log.debug(opts)
opts.pop('log', True)
tqdm_args = {'file': fp}
try:
for (o, v) in opts.items():
try:
tqdm_args[o] = cast(v, opt_types[o])
except KeyError as e:
raise TqdmKeyError(str(e))
log.debug('args:' + str(tqdm_args))
except:
fp.write('\nError:\nUsage:\n tqdm [--help | options]\n')
for i in sys.stdin:
sys.stdout.write(i)
raise
else:
buf_size = tqdm_args.pop('buf_size', 256)
delim = tqdm_args.pop('delim', '\n')
delim_per_char = tqdm_args.pop('bytes', False)
manpath = tqdm_args.pop('manpath', None)
stdin = getattr(sys.stdin, 'buffer', sys.stdin)
stdout = getattr(sys.stdout, 'buffer', sys.stdout)
if manpath is not None:
from os import path
from shutil import copyfile
from pkg_resources import resource_filename, Requirement
fi = resource_filename(Requirement.parse('tqdm'), 'tqdm/tqdm.1')
fo = path.join(manpath, 'tqdm.1')
copyfile(fi, fo)
log.info("written:" + fo)
sys.exit(0)
if delim_per_char:
tqdm_args.setdefault('unit', 'B')
tqdm_args.setdefault('unit_scale', True)
tqdm_args.setdefault('unit_divisor', 1024)
log.debug(tqdm_args)
with tqdm(**tqdm_args) as t:
posix_pipe(stdin, stdout, '', buf_size, t.update)
elif delim == '\n':
log.debug(tqdm_args)
for i in tqdm(stdin, **tqdm_args):
stdout.write(i)
else:
log.debug(tqdm_args)
with tqdm(**tqdm_args) as t:
posix_pipe(stdin, stdout, delim, buf_size, t.update)
|
import io
import os
import subprocess
from nikola.plugin_categories import PageCompiler
from nikola.utils import req_missing, makedirs, write_metadata
class CompilePandoc(PageCompiler):
"""Compile markups into HTML using pandoc."""
name = "pandoc"
friendly_name = "pandoc"
def set_site(self, site):
"""Set Nikola site."""
self.config_dependencies = [str(site.config['PANDOC_OPTIONS'])]
super().set_site(site)
def compile(self, source, dest, is_two_file=True, post=None, lang=None):
"""Compile the source file into HTML and save as dest."""
makedirs(os.path.dirname(dest))
try:
subprocess.check_call(['pandoc', '-o', dest, source] + self.site.config['PANDOC_OPTIONS'])
with open(dest, 'r', encoding='utf-8-sig') as inf:
output, shortcode_deps = self.site.apply_shortcodes(inf.read())
with open(dest, 'w', encoding='utf-8') as outf:
outf.write(output)
if post is None:
if shortcode_deps:
self.logger.error(
"Cannot save dependencies for post {0} (post unknown)",
source)
else:
post._depfile[dest] += shortcode_deps
except OSError as e:
if e.strreror == 'No such file or directory':
req_missing(['pandoc'], 'build this site (compile with pandoc)', python=False)
def compile_string(self, data, source_path=None, is_two_file=True, post=None, lang=None):
"""Compile into HTML strings."""
raise ValueError("Pandoc compiler does not support compile_string due to multiple output formats")
def create_post(self, path, **kw):
"""Create a new post."""
content = kw.pop('content', None)
onefile = kw.pop('onefile', False)
# is_page is not used by create_post as of now.
kw.pop('is_page', False)
metadata = {}
metadata.update(self.default_metadata)
metadata.update(kw)
makedirs(os.path.dirname(path))
if not content.endswith('\n'):
content += '\n'
with io.open(path, "w+", encoding="utf8") as fd:
if onefile:
fd.write(write_metadata(metadata, comment_wrap=True, site=self.site, compiler=self))
fd.write(content)
|
import pathlib
from unittest import mock
import pytest
from qutebrowser.misc import lineparser as lineparsermod
class TestBaseLineParser:
CONFDIR = "this really doesn't matter"
FILENAME = "and neither does this"
@pytest.fixture
def lineparser(self):
"""Fixture providing a BaseLineParser."""
return lineparsermod.BaseLineParser(self.CONFDIR, self.FILENAME)
def test_prepare_save_missing(self, mocker, lineparser):
"""Test if _prepare_save does what it's supposed to do."""
os_mock = mocker.patch('qutebrowser.misc.lineparser.os')
lineparser._prepare_save()
os_mock.makedirs.assert_called_with(self.CONFDIR, 0o755, exist_ok=True)
def test_double_open(self, mocker, lineparser):
"""Test if _open refuses reentry."""
mocker.patch('builtins.open', mock.mock_open())
with lineparser._open('r'):
with pytest.raises(IOError,
match="Refusing to double-open LineParser."):
with lineparser._open('r'):
pass
def test_binary(self, mocker):
"""Test if _open and _write correctly handle binary files."""
open_mock = mock.mock_open()
mocker.patch('builtins.open', open_mock)
testdata = b'\xf0\xff'
lineparser = lineparsermod.BaseLineParser(
self.CONFDIR, self.FILENAME, binary=True)
with lineparser._open('r') as f:
lineparser._write(f, [testdata])
open_mock.assert_called_once_with(
str(pathlib.Path(self.CONFDIR) / self.FILENAME), 'rb')
open_mock().write.assert_has_calls([
mock.call(testdata),
mock.call(b'\n')
])
class TestLineParser:
@pytest.fixture
def lineparser(self, tmp_path):
"""Fixture to get a LineParser for tests."""
lp = lineparsermod.LineParser(str(tmp_path), 'file')
lp.save()
return lp
def test_init(self, tmp_path):
"""Test if creating a line parser correctly reads its file."""
(tmp_path / 'file').write_text('one\ntwo\n')
lineparser = lineparsermod.LineParser(str(tmp_path), 'file')
assert lineparser.data == ['one', 'two']
(tmp_path / 'file').write_bytes(b'\xfe\n\xff\n')
lineparser = lineparsermod.LineParser(str(tmp_path), 'file',
binary=True)
assert lineparser.data == [b'\xfe', b'\xff']
def test_clear(self, tmp_path, lineparser):
"""Test if clear() empties its file."""
lineparser.data = ['one', 'two']
lineparser.save()
assert (tmp_path / 'file').read_text() == 'one\ntwo\n'
lineparser.clear()
assert not lineparser.data
assert (tmp_path / 'file').read_text() == ''
def test_double_open(self, lineparser):
"""Test if save() bails on an already open file."""
with lineparser._open('r'):
with pytest.raises(IOError,
match="Refusing to double-open LineParser."):
lineparser.save()
def test_prepare_save(self, tmp_path, lineparser):
"""Test if save() bails when _prepare_save() returns False."""
(tmp_path / 'file').write_text('pristine\n')
lineparser.data = ['changed']
lineparser._prepare_save = lambda: False
lineparser.save()
assert (tmp_path / 'file').read_text() == 'pristine\n'
|
from homeassistant.core import State
from homeassistant.setup import async_setup_component
async def test_reproducing_states(hass):
"""Test reproducing input_boolean states."""
assert await async_setup_component(
hass,
"input_boolean",
{
"input_boolean": {
"initial_on": {"initial": True},
"initial_off": {"initial": False},
}
},
)
await hass.helpers.state.async_reproduce_state(
[
State("input_boolean.initial_on", "off"),
State("input_boolean.initial_off", "on"),
# Should not raise
State("input_boolean.non_existing", "on"),
],
)
assert hass.states.get("input_boolean.initial_off").state == "on"
assert hass.states.get("input_boolean.initial_on").state == "off"
await hass.helpers.state.async_reproduce_state(
[
# Test invalid state
State("input_boolean.initial_on", "invalid_state"),
# Set to state it already is.
State("input_boolean.initial_off", "on"),
],
)
assert hass.states.get("input_boolean.initial_on").state == "off"
assert hass.states.get("input_boolean.initial_off").state == "on"
|
from acme.errors import PollError, WildcardUnsupportedError
from acme.messages import Error as AcmeError
from botocore.exceptions import ClientError
from flask import current_app
from lemur.authorizations import service as authorization_service
from lemur.constants import CRLReason
from lemur.dns_providers import service as dns_provider_service
from lemur.exceptions import InvalidConfiguration
from lemur.extensions import metrics, sentry
from lemur.plugins import lemur_acme as acme
from lemur.plugins.bases import IssuerPlugin
from lemur.plugins.lemur_acme.acme_handlers import AcmeHandler, AcmeDnsHandler
from lemur.plugins.lemur_acme.challenge_types import AcmeHttpChallenge, AcmeDnsChallenge
class ACMEIssuerPlugin(IssuerPlugin):
title = "Acme"
slug = "acme-issuer"
description = (
"Enables the creation of certificates via ACME CAs (including Let's Encrypt), using the DNS-01 challenge"
)
version = acme.VERSION
author = "Netflix"
author_url = "https://github.com/netflix/lemur.git"
options = [
{
"name": "acme_url",
"type": "str",
"required": True,
"validation": r"/^http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+$/",
"helpMessage": "Must be a valid web url starting with http[s]://",
},
{
"name": "telephone",
"type": "str",
"default": "",
"helpMessage": "Telephone to use",
},
{
"name": "email",
"type": "str",
"default": "",
"validation": r"/^?([-a-zA-Z0-9.`?{}]+@\w+\.\w+)$/",
"helpMessage": "Email to use",
},
{
"name": "certificate",
"type": "textarea",
"default": "",
"validation": "/^-----BEGIN CERTIFICATE-----/",
"helpMessage": "Certificate to use",
},
{
"name": "store_account",
"type": "bool",
"required": False,
"helpMessage": "Disable to create a new account for each ACME request",
"default": False,
}
]
def __init__(self, *args, **kwargs):
super(ACMEIssuerPlugin, self).__init__(*args, **kwargs)
def get_ordered_certificate(self, pending_cert):
self.acme = AcmeDnsHandler()
acme_client, registration = self.acme.setup_acme_client(pending_cert.authority)
order_info = authorization_service.get(pending_cert.external_id)
if pending_cert.dns_provider_id:
dns_provider = dns_provider_service.get(pending_cert.dns_provider_id)
for domain in order_info.domains:
# Currently, we only support specifying one DNS provider per certificate, even if that
# certificate has multiple SANs that may belong to different providers.
self.acme.dns_providers_for_domain[domain] = [dns_provider]
else:
for domain in order_info.domains:
self.acme.autodetect_dns_providers(domain)
try:
order = acme_client.new_order(pending_cert.csr)
except WildcardUnsupportedError:
metrics.send("get_ordered_certificate_wildcard_unsupported", "counter", 1)
raise Exception(
"The currently selected ACME CA endpoint does"
" not support issuing wildcard certificates."
)
try:
authorizations = self.acme.get_authorizations(
acme_client, order, order_info
)
except ClientError:
sentry.captureException()
metrics.send("get_ordered_certificate_error", "counter", 1)
current_app.logger.error(
f"Unable to resolve pending cert: {pending_cert.name}", exc_info=True
)
return False
authorizations = self.acme.finalize_authorizations(acme_client, authorizations)
pem_certificate, pem_certificate_chain = self.acme.request_certificate(
acme_client, authorizations, order
)
cert = {
"body": "\n".join(str(pem_certificate).splitlines()),
"chain": "\n".join(str(pem_certificate_chain).splitlines()),
"external_id": str(pending_cert.external_id),
}
return cert
def get_ordered_certificates(self, pending_certs):
self.acme = AcmeDnsHandler()
self.acme_dns_challenge = AcmeDnsChallenge()
pending = []
certs = []
for pending_cert in pending_certs:
try:
acme_client, registration = self.acme.setup_acme_client(
pending_cert.authority
)
order_info = authorization_service.get(pending_cert.external_id)
if pending_cert.dns_provider_id:
dns_provider = dns_provider_service.get(
pending_cert.dns_provider_id
)
for domain in order_info.domains:
# Currently, we only support specifying one DNS provider per certificate, even if that
# certificate has multiple SANs that may belong to different providers.
self.acme.dns_providers_for_domain[domain] = [dns_provider]
else:
for domain in order_info.domains:
self.acme.autodetect_dns_providers(domain)
try:
order = acme_client.new_order(pending_cert.csr)
except WildcardUnsupportedError:
sentry.captureException()
metrics.send(
"get_ordered_certificates_wildcard_unsupported_error",
"counter",
1,
)
raise Exception(
"The currently selected ACME CA endpoint does"
" not support issuing wildcard certificates."
)
authorizations = self.acme.get_authorizations(
acme_client, order, order_info
)
pending.append(
{
"acme_client": acme_client,
"authorizations": authorizations,
"pending_cert": pending_cert,
"order": order,
}
)
except (ClientError, ValueError, Exception) as e:
sentry.captureException()
metrics.send(
"get_ordered_certificates_pending_creation_error", "counter", 1
)
current_app.logger.error(
f"Unable to resolve pending cert: {pending_cert}", exc_info=True
)
error = e
if globals().get("order") and order:
error += f" Order uri: {order.uri}"
certs.append(
{"cert": False, "pending_cert": pending_cert, "last_error": e}
)
for entry in pending:
try:
entry["authorizations"] = self.acme.finalize_authorizations(
entry["acme_client"], entry["authorizations"]
)
pem_certificate, pem_certificate_chain = self.acme.request_certificate(
entry["acme_client"], entry["authorizations"], entry["order"]
)
cert = {
"body": "\n".join(str(pem_certificate).splitlines()),
"chain": "\n".join(str(pem_certificate_chain).splitlines()),
"external_id": str(entry["pending_cert"].external_id),
}
certs.append({"cert": cert, "pending_cert": entry["pending_cert"]})
except (PollError, AcmeError, Exception) as e:
sentry.captureException()
metrics.send("get_ordered_certificates_resolution_error", "counter", 1)
order_url = order.uri
error = f"{e}. Order URI: {order_url}"
current_app.logger.error(
f"Unable to resolve pending cert: {pending_cert}. "
f"Check out {order_url} for more information.",
exc_info=True,
)
certs.append(
{
"cert": False,
"pending_cert": entry["pending_cert"],
"last_error": error,
}
)
# Ensure DNS records get deleted
self.acme_dns_challenge.cleanup(
entry["authorizations"], entry["acme_client"]
)
return certs
def create_certificate(self, csr, issuer_options):
"""
Creates an ACME certificate using the DNS-01 challenge.
:param csr:
:param issuer_options:
:return: :raise Exception:
"""
acme_dns_challenge = AcmeDnsChallenge()
return acme_dns_challenge.create_certificate(csr, issuer_options)
@staticmethod
def create_authority(options):
"""
Creates an authority, this authority is then used by Lemur to allow a user
to specify which Certificate Authority they want to sign their certificate.
:param options:
:return:
"""
role = {"username": "", "password": "", "name": "acme"}
plugin_options = options.get("plugin", {}).get("plugin_options")
if not plugin_options:
error = "Invalid options for lemur_acme plugin: {}".format(options)
current_app.logger.error(error)
raise InvalidConfiguration(error)
# Define static acme_root based off configuration variable by default. However, if user has passed a
# certificate, use this certificate as the root.
acme_root = current_app.config.get("ACME_ROOT")
for option in plugin_options:
if option.get("name") == "certificate":
acme_root = option.get("value")
return acme_root, "", [role]
def cancel_ordered_certificate(self, pending_cert, **kwargs):
# Needed to override issuer function.
pass
def revoke_certificate(self, certificate, reason):
self.acme = AcmeDnsHandler()
crl_reason = CRLReason.unspecified
if "crl_reason" in reason:
crl_reason = CRLReason[reason["crl_reason"]]
return self.acme.revoke_certificate(certificate, crl_reason.value)
class ACMEHttpIssuerPlugin(IssuerPlugin):
title = "Acme HTTP-01"
slug = "acme-http-issuer"
description = (
"Enables the creation of certificates via ACME CAs (including Let's Encrypt), using the HTTP-01 challenge"
)
version = acme.VERSION
author = "Netflix"
author_url = "https://github.com/netflix/lemur.git"
options = [
{
"name": "acme_url",
"type": "str",
"required": True,
"validation": r"/^http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+$/",
"helpMessage": "Must be a valid web url starting with http[s]://",
},
{
"name": "telephone",
"type": "str",
"default": "",
"helpMessage": "Telephone to use",
},
{
"name": "email",
"type": "str",
"default": "",
"validation": r"/^?([-a-zA-Z0-9.`?{}]+@\w+\.\w+)$/",
"helpMessage": "Email to use",
},
{
"name": "certificate",
"type": "textarea",
"default": "",
"validation": "/^-----BEGIN CERTIFICATE-----/",
"helpMessage": "Certificate to use",
},
{
"name": "store_account",
"type": "bool",
"required": False,
"helpMessage": "Disable to create a new account for each ACME request",
"default": False,
},
{
"name": "tokenDestination",
"type": "destinationSelect",
"required": True,
"helpMessage": "The destination to use to deploy the token.",
},
]
def __init__(self, *args, **kwargs):
super(ACMEHttpIssuerPlugin, self).__init__(*args, **kwargs)
def create_certificate(self, csr, issuer_options):
"""
Creates an ACME certificate using the HTTP-01 challenge.
:param csr:
:param issuer_options:
:return: :raise Exception:
"""
acme_http_challenge = AcmeHttpChallenge()
return acme_http_challenge.create_certificate(csr, issuer_options)
@staticmethod
def create_authority(options):
"""
Creates an authority, this authority is then used by Lemur to allow a user
to specify which Certificate Authority they want to sign their certificate.
:param options:
:return:
"""
role = {"username": "", "password": "", "name": "acme"}
plugin_options = options.get("plugin", {}).get("plugin_options")
if not plugin_options:
error = "Invalid options for lemur_acme plugin: {}".format(options)
current_app.logger.error(error)
raise InvalidConfiguration(error)
# Define static acme_root based off configuration variable by default. However, if user has passed a
# certificate, use this certificate as the root.
acme_root = current_app.config.get("ACME_ROOT")
for option in plugin_options:
if option.get("name") == "certificate":
acme_root = option.get("value")
return acme_root, "", [role]
def cancel_ordered_certificate(self, pending_cert, **kwargs):
# Needed to override issuer function.
pass
def revoke_certificate(self, certificate, reason):
self.acme = AcmeHandler()
crl_reason = CRLReason.unspecified
if "crl_reason" in reason:
crl_reason = CRLReason[reason["crl_reason"]]
return self.acme.revoke_certificate(certificate, crl_reason.value)
|
import unittest
from unittest.mock import patch, Mock
from flask import Flask
from lemur.plugins.lemur_acme import plugin, ultradns
from requests.models import Response
class TestUltradns(unittest.TestCase):
@patch("lemur.plugins.lemur_acme.plugin.dns_provider_service")
def setUp(self, mock_dns_provider_service):
self.ACMEIssuerPlugin = plugin.ACMEIssuerPlugin()
self.acme = plugin.AcmeHandler()
mock_dns_provider = Mock()
mock_dns_provider.name = "cloudflare"
mock_dns_provider.credentials = "{}"
mock_dns_provider.provider_type = "cloudflare"
self.acme.dns_providers_for_domain = {
"www.test.com": [mock_dns_provider],
"test.fakedomain.net": [mock_dns_provider],
}
# Creates a new Flask application for a test duration. In python 3.8, manual push of application context is
# needed to run tests in dev environment without getting error 'Working outside of application context'.
_app = Flask('lemur_test_acme')
self.ctx = _app.app_context()
assert self.ctx
self.ctx.push()
def tearDown(self):
self.ctx.pop()
@patch("lemur.plugins.lemur_acme.ultradns.requests")
@patch("lemur.plugins.lemur_acme.ultradns.current_app")
def test_ultradns_get_token(self, mock_current_app, mock_requests):
# ret_val = json.dumps({"access_token": "access"})
the_response = Response()
the_response._content = b'{"access_token": "access"}'
mock_requests.post = Mock(return_value=the_response)
mock_current_app.config.get = Mock(return_value="Test")
result = ultradns.get_ultradns_token()
self.assertTrue(len(result) > 0)
@patch("lemur.plugins.lemur_acme.ultradns.current_app")
def test_ultradns_create_txt_record(self, mock_current_app):
domain = "_acme_challenge.test.example.com"
zone = "test.example.com"
token = "ABCDEFGHIJ"
account_number = "1234567890"
change_id = (domain, token)
ultradns.get_zone_name = Mock(return_value=zone)
mock_current_app.logger.debug = Mock()
ultradns._post = Mock()
log_data = {
"function": "create_txt_record",
"fqdn": domain,
"token": token,
"message": "TXT record created"
}
result = ultradns.create_txt_record(domain, token, account_number)
mock_current_app.logger.debug.assert_called_with(log_data)
self.assertEqual(result, change_id)
@patch("lemur.plugins.lemur_acme.ultradns.current_app")
@patch("lemur.extensions.metrics")
def test_ultradns_delete_txt_record(self, mock_metrics, mock_current_app):
domain = "_acme_challenge.test.example.com"
zone = "test.example.com"
token = "ABCDEFGHIJ"
account_number = "1234567890"
change_id = (domain, token)
mock_current_app.logger.debug = Mock()
ultradns.get_zone_name = Mock(return_value=zone)
ultradns._post = Mock()
ultradns._get = Mock()
ultradns._get.return_value = {'zoneName': 'test.example.com.com',
'rrSets': [{'ownerName': '_acme-challenge.test.example.com.',
'rrtype': 'TXT (16)', 'ttl': 5, 'rdata': ['ABCDEFGHIJ']}],
'queryInfo': {'sort': 'OWNER', 'reverse': False, 'limit': 100},
'resultInfo': {'totalCount': 1, 'offset': 0, 'returnedCount': 1}}
ultradns._delete = Mock()
mock_metrics.send = Mock()
ultradns.delete_txt_record(change_id, account_number, domain, token)
mock_current_app.logger.debug.assert_not_called()
mock_metrics.send.assert_not_called()
@patch("lemur.plugins.lemur_acme.ultradns.current_app")
@patch("lemur.extensions.metrics")
def test_ultradns_wait_for_dns_change(self, mock_metrics, mock_current_app):
ultradns._has_dns_propagated = Mock(return_value=True)
nameserver = "1.1.1.1"
ultradns.get_authoritative_nameserver = Mock(return_value=nameserver)
mock_metrics.send = Mock()
domain = "_acme-challenge.test.example.com"
token = "ABCDEFGHIJ"
change_id = (domain, token)
mock_current_app.logger.debug = Mock()
ultradns.wait_for_dns_change(change_id)
# mock_metrics.send.assert_not_called()
log_data = {
"function": "wait_for_dns_change",
"fqdn": domain,
"status": True,
"message": "Record status on Public DNS"
}
mock_current_app.logger.debug.assert_called_with(log_data)
def test_ultradns_get_zone_name(self):
zones = ['example.com', 'test.example.com']
zone = "test.example.com"
domain = "_acme-challenge.test.example.com"
account_number = "1234567890"
ultradns.get_zones = Mock(return_value=zones)
result = ultradns.get_zone_name(domain, account_number)
self.assertEqual(result, zone)
def test_ultradns_get_zones(self):
account_number = "1234567890"
path = "a/b/c"
zones = ['example.com', 'test.example.com']
paginate_response = [{
'properties': {
'name': 'example.com.', 'accountName': 'example', 'type': 'PRIMARY',
'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9,
'lastModifiedDateTime': '2017-06-14T06:45Z'},
'registrarInfo': {
'nameServers': {'missing': ['example.ultradns.com.', 'example.ultradns.net.',
'example.ultradns.biz.', 'example.ultradns.org.']}},
'inherit': 'ALL'}, {
'properties': {
'name': 'test.example.com.', 'accountName': 'example', 'type': 'PRIMARY',
'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9,
'lastModifiedDateTime': '2017-06-14T06:45Z'},
'registrarInfo': {
'nameServers': {'missing': ['example.ultradns.com.', 'example.ultradns.net.',
'example.ultradns.biz.', 'example.ultradns.org.']}},
'inherit': 'ALL'}, {
'properties': {
'name': 'example2.com.', 'accountName': 'example', 'type': 'SECONDARY',
'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9,
'lastModifiedDateTime': '2017-06-14T06:45Z'},
'registrarInfo': {
'nameServers': {'missing': ['example.ultradns.com.', 'example.ultradns.net.',
'example.ultradns.biz.', 'example.ultradns.org.']}},
'inherit': 'ALL'}]
ultradns._paginate = Mock(path, "zones")
ultradns._paginate.side_effect = [[paginate_response]]
result = ultradns.get_zones(account_number)
self.assertEqual(result, zones)
|
from datetime import timedelta
from django.test.utils import override_settings
from django.utils import timezone
from weblate.trans.models import Comment, Suggestion
from weblate.trans.tasks import (
cleanup_old_comments,
cleanup_old_suggestions,
cleanup_suggestions,
daily_update_checks,
)
from weblate.trans.tests.test_views import ViewTestCase
from weblate.utils.state import STATE_TRANSLATED
class CleanupTest(ViewTestCase):
def test_cleanup_suggestions_case_sensitive(self):
request = self.get_request()
unit = self.get_unit()
# Add two suggestions
Suggestion.objects.add(unit, "Zkouška\n", request)
Suggestion.objects.add(unit, "zkouška\n", request)
# This should be ignored
Suggestion.objects.add(unit, "zkouška\n", request)
self.assertEqual(len(self.get_unit().suggestions), 2)
# Perform cleanup, no suggestions should be deleted
cleanup_suggestions()
self.assertEqual(len(self.get_unit().suggestions), 2)
# Translate string to one of suggestions
unit.translate(self.user, "zkouška\n", STATE_TRANSLATED)
# The cleanup should remove one
cleanup_suggestions()
self.assertEqual(len(self.get_unit().suggestions), 1)
def test_cleanup_suggestions_duplicate(self):
request = self.get_request()
unit = self.get_unit()
# Add two suggestions
Suggestion.objects.add(unit, "Zkouška", request)
Suggestion.objects.add(unit, "zkouška", request)
self.assertEqual(len(self.get_unit().suggestions), 2)
# Perform cleanup, no suggestions should be deleted
cleanup_suggestions()
self.assertEqual(len(self.get_unit().suggestions), 2)
# Create two suggestions with same target
unit.suggestions.update(target="zkouška")
# The cleanup should remove one
cleanup_suggestions()
self.assertEqual(len(self.get_unit().suggestions), 1)
def test_cleanup_old_suggestions(self, expected=2):
request = self.get_request()
unit = self.get_unit()
Suggestion.objects.add(unit, "Zkouška", request)
Suggestion.objects.all().update(timestamp=timezone.now() - timedelta(days=30))
Suggestion.objects.add(unit, "Zkouška 2", request)
cleanup_old_suggestions()
self.assertEqual(Suggestion.objects.count(), expected)
@override_settings(SUGGESTION_CLEANUP_DAYS=15)
def test_cleanup_old_suggestions_enabled(self):
self.test_cleanup_old_suggestions(1)
def test_cleanup_old_comments(self, expected=2):
request = self.get_request()
unit = self.get_unit()
Comment.objects.add(unit.source_unit, request, "Zkouška")
Comment.objects.all().update(timestamp=timezone.now() - timedelta(days=30))
Comment.objects.add(unit.source_unit, request, "Zkouška 2")
cleanup_old_comments()
self.assertEqual(Comment.objects.count(), expected)
@override_settings(COMMENT_CLEANUP_DAYS=15)
def test_cleanup_old_comments_enabled(self):
self.test_cleanup_old_comments(1)
class TasksTest(ViewTestCase):
def test_daily_update_checks(self):
daily_update_checks()
|
import logging
import subprocess
import voluptuous as vol
from homeassistant.components.notify import PLATFORM_SCHEMA, BaseNotificationService
from homeassistant.const import CONF_COMMAND, CONF_NAME
import homeassistant.helpers.config_validation as cv
from .const import CONF_COMMAND_TIMEOUT, DEFAULT_TIMEOUT
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_COMMAND): cv.string,
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_COMMAND_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
}
)
def get_service(hass, config, discovery_info=None):
"""Get the Command Line notification service."""
command = config[CONF_COMMAND]
timeout = config[CONF_COMMAND_TIMEOUT]
return CommandLineNotificationService(command, timeout)
class CommandLineNotificationService(BaseNotificationService):
"""Implement the notification service for the Command Line service."""
def __init__(self, command, timeout):
"""Initialize the service."""
self.command = command
self._timeout = timeout
def send_message(self, message="", **kwargs):
"""Send a message to a command line."""
try:
proc = subprocess.Popen(
self.command,
universal_newlines=True,
stdin=subprocess.PIPE,
shell=True, # nosec # shell by design
)
proc.communicate(input=message, timeout=self._timeout)
if proc.returncode != 0:
_LOGGER.error("Command failed: %s", self.command)
except subprocess.TimeoutExpired:
_LOGGER.error("Timeout for command: %s", self.command)
except subprocess.SubprocessError:
_LOGGER.error("Error trying to exec command: %s", self.command)
|
import functools as ft
import logging
from homeassistant.components import remote
from homeassistant.const import CONF_DEVICE, CONF_NAME
from homeassistant.helpers.entity import Entity
DOMAIN = "kira"
_LOGGER = logging.getLogger(__name__)
CONF_REMOTE = "remote"
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Kira platform."""
if discovery_info:
name = discovery_info.get(CONF_NAME)
device = discovery_info.get(CONF_DEVICE)
kira = hass.data[DOMAIN][CONF_REMOTE][name]
add_entities([KiraRemote(device, kira)])
return True
class KiraRemote(Entity):
"""Remote representation used to send commands to a Kira device."""
def __init__(self, name, kira):
"""Initialize KiraRemote class."""
_LOGGER.debug("KiraRemote device init started for: %s", name)
self._name = name
self._kira = kira
@property
def name(self):
"""Return the Kira device's name."""
return self._name
def update(self):
"""No-op."""
def send_command(self, command, **kwargs):
"""Send a command to one device."""
for single_command in command:
code_tuple = (single_command, kwargs.get(remote.ATTR_DEVICE))
_LOGGER.info("Sending Command: %s to %s", *code_tuple)
self._kira.sendCode(code_tuple)
async def async_send_command(self, command, **kwargs):
"""Send a command to a device."""
return await self.hass.async_add_executor_job(
ft.partial(self.send_command, command, **kwargs)
)
|
from __future__ import unicode_literals
import os
import sys
import inspect
import codecs
import re
import copy
import argparse
import warnings
import tempfile
from multiprocessing import Pool, cpu_count
from collections import defaultdict, Counter
# hack for python2/3 compatibility
from io import open
argparse.open = open
def create_parser(subparsers=None):
if subparsers:
parser = subparsers.add_parser('learn-bpe',
formatter_class=argparse.RawDescriptionHelpFormatter,
description="learn BPE-based word segmentation")
else:
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description="learn BPE-based word segmentation")
parser.add_argument(
'--input', '-i', type=argparse.FileType('r'), default=sys.stdin,
metavar='PATH',
help="Input text (default: standard input).")
parser.add_argument(
'--output', '-o', type=argparse.FileType('w'), default=sys.stdout,
metavar='PATH',
help="Output file for BPE codes (default: standard output)")
parser.add_argument(
'--symbols', '-s', type=int, default=10000,
help="Create this many new symbols (each representing a character n-gram) (default: %(default)s)")
parser.add_argument(
'--min-frequency', type=int, default=2, metavar='FREQ',
help='Stop if no symbol pair has frequency >= FREQ (default: %(default)s)')
parser.add_argument('--dict-input', action="store_true",
help="If set, input file is interpreted as a dictionary where each line contains a word-count pair")
parser.add_argument(
'--total-symbols', '-t', action="store_true",
help="subtract number of characters from the symbols to be generated (so that '--symbols' becomes an estimate for the total number of symbols needed to encode text).")
parser.add_argument(
'--num-workers', type=int, default=1,
help="Number of processors to process texts, only supported in Python3. If -1, set `multiprocessing.cpu_count()`. (default: %(default)s)")
parser.add_argument(
'--verbose', '-v', action="store_true",
help="verbose mode.")
return parser
def get_vocabulary(fobj, is_dict=False, num_workers=1):
"""Read text and return dictionary that encodes vocabulary
"""
vocab = Counter()
if is_dict:
for i, line in enumerate(fobj):
try:
word, count = line.strip('\r\n ').split(' ')
except:
print('Failed reading vocabulary file at line {0}: {1}'.format(i, line))
sys.exit(1)
vocab[word] += int(count)
elif num_workers == 1 or fobj.name == '<stdin>':
if num_workers > 1:
warnings.warn("In parallel mode, the input cannot be STDIN. Using 1 processor instead.")
for i, line in enumerate(fobj):
for word in line.strip('\r\n ').split(' '):
if word:
vocab[word] += 1
elif num_workers > 1:
if sys.version_info < (3, 0):
print("Parallel mode is only supported in Python3.")
sys.exit(1)
with open(fobj.name, encoding="utf8") as f:
size = os.fstat(f.fileno()).st_size
chunk_size = int(size / num_workers)
offsets = [0 for _ in range(num_workers + 1)]
for i in range(1, num_workers):
f.seek(chunk_size * i)
pos = f.tell()
while True:
try:
line = f.readline()
break
except UnicodeDecodeError:
pos -= 1
f.seek(pos)
offsets[i] = f.tell()
assert 0 <= offsets[i] < 1e20, "Bad new line separator, e.g. '\\r'"
vocab_files = []
pool = Pool(processes=num_workers)
for i in range(num_workers):
tmp = tempfile.NamedTemporaryFile(delete=False)
tmp.close()
vocab_files.append(tmp)
pool.apply_async(_get_vocabulary, (fobj.name, tmp.name, offsets[i], offsets[i + 1]))
pool.close()
pool.join()
import pickle
for i in range(num_workers):
with open(vocab_files[i].name, 'rb') as f:
vocab += pickle.load(f)
os.remove(vocab_files[i].name)
else:
raise ValueError('`num_workers` is expected to be a positive number, but got {}.'.format(num_workers))
return vocab
def _get_vocabulary(infile, outfile, begin, end):
import pickle
vocab = Counter()
with open(infile, encoding="utf8") as f:
f.seek(begin)
line = f.readline()
while line:
pos = f.tell()
assert 0 <= pos < 1e20, "Bad new line separator, e.g. '\\r'"
if end > 0 and pos > end:
break
for word in line.strip('\r\n ').split(' '):
if word:
vocab[word] += 1
line = f.readline()
with open(outfile, 'wb') as f:
pickle.dump(vocab, f)
def update_pair_statistics(pair, changed, stats, indices):
"""Minimally update the indices and frequency of symbol pairs
if we merge a pair of symbols, only pairs that overlap with occurrences
of this pair are affected, and need to be updated.
"""
stats[pair] = 0
indices[pair] = defaultdict(int)
first, second = pair
new_pair = first+second
for j, word, old_word, freq in changed:
# find all instances of pair, and update frequency/indices around it
i = 0
while True:
# find first symbol
try:
i = old_word.index(first, i)
except ValueError:
break
# if first symbol is followed by second symbol, we've found an occurrence of pair (old_word[i:i+2])
if i < len(old_word)-1 and old_word[i+1] == second:
# assuming a symbol sequence "A B C", if "B C" is merged, reduce the frequency of "A B"
if i:
prev = old_word[i-1:i+1]
stats[prev] -= freq
indices[prev][j] -= 1
if i < len(old_word)-2:
# assuming a symbol sequence "A B C B", if "B C" is merged, reduce the frequency of "C B".
# however, skip this if the sequence is A B C B C, because the frequency of "C B" will be reduced by the previous code block
if old_word[i+2] != first or i >= len(old_word)-3 or old_word[i+3] != second:
nex = old_word[i+1:i+3]
stats[nex] -= freq
indices[nex][j] -= 1
i += 2
else:
i += 1
i = 0
while True:
try:
# find new pair
i = word.index(new_pair, i)
except ValueError:
break
# assuming a symbol sequence "A BC D", if "B C" is merged, increase the frequency of "A BC"
if i:
prev = word[i-1:i+1]
stats[prev] += freq
indices[prev][j] += 1
# assuming a symbol sequence "A BC B", if "B C" is merged, increase the frequency of "BC B"
# however, if the sequence is A BC BC, skip this step because the count of "BC BC" will be incremented by the previous code block
if i < len(word)-1 and word[i+1] != new_pair:
nex = word[i:i+2]
stats[nex] += freq
indices[nex][j] += 1
i += 1
def get_pair_statistics(vocab):
"""Count frequency of all symbol pairs, and create index"""
# data structure of pair frequencies
stats = defaultdict(int)
#index from pairs to words
indices = defaultdict(lambda: defaultdict(int))
for i, (word, freq) in enumerate(vocab):
prev_char = word[0]
for char in word[1:]:
stats[prev_char, char] += freq
indices[prev_char, char][i] += 1
prev_char = char
return stats, indices
def replace_pair(pair, vocab, indices):
"""Replace all occurrences of a symbol pair ('A', 'B') with a new symbol 'AB'"""
first, second = pair
pair_str = ''.join(pair)
pair_str = pair_str.replace('\\','\\\\')
changes = []
pattern = re.compile(r'(?<!\S)' + re.escape(first + ' ' + second) + r'(?!\S)')
if sys.version_info < (3, 0):
iterator = indices[pair].iteritems()
else:
iterator = indices[pair].items()
for j, freq in iterator:
if freq < 1:
continue
word, freq = vocab[j]
new_word = ' '.join(word)
new_word = pattern.sub(pair_str, new_word)
new_word = tuple(new_word.split(' '))
vocab[j] = (new_word, freq)
changes.append((j, new_word, word, freq))
return changes
def prune_stats(stats, big_stats, threshold):
"""Prune statistics dict for efficiency of max()
The frequency of a symbol pair never increases, so pruning is generally safe
(until we the most frequent pair is less frequent than a pair we previously pruned)
big_stats keeps full statistics for when we need to access pruned items
"""
for item,freq in list(stats.items()):
if freq < threshold:
del stats[item]
if freq < 0:
big_stats[item] += freq
else:
big_stats[item] = freq
def learn_bpe(infile, outfile, num_symbols, min_frequency=2, verbose=False, is_dict=False, total_symbols=False, num_workers=1):
"""Learn num_symbols BPE operations from vocabulary, and write to outfile.
"""
# version 0.2 changes the handling of the end-of-word token ('</w>');
# version numbering allows bckward compatibility
outfile.write('#version: 0.2\n')
vocab = get_vocabulary(infile, is_dict, num_workers)
vocab = dict([(tuple(x[:-1])+(x[-1]+'</w>',) ,y) for (x,y) in vocab.items()])
sorted_vocab = sorted(vocab.items(), key=lambda x: x[1], reverse=True)
stats, indices = get_pair_statistics(sorted_vocab)
big_stats = copy.deepcopy(stats)
if total_symbols:
uniq_char_internal = set()
uniq_char_final = set()
for word in vocab:
for char in word[:-1]:
uniq_char_internal.add(char)
uniq_char_final.add(word[-1])
sys.stderr.write('Number of word-internal characters: {0}\n'.format(len(uniq_char_internal)))
sys.stderr.write('Number of word-final characters: {0}\n'.format(len(uniq_char_final)))
sys.stderr.write('Reducing number of merge operations by {0}\n'.format(len(uniq_char_internal) + len(uniq_char_final)))
num_symbols -= len(uniq_char_internal) + len(uniq_char_final)
# threshold is inspired by Zipfian assumption, but should only affect speed
threshold = max(stats.values()) / 10
for i in range(num_symbols):
if stats:
most_frequent = max(stats, key=lambda x: (stats[x], x))
# we probably missed the best pair because of pruning; go back to full statistics
if not stats or (i and stats[most_frequent] < threshold):
prune_stats(stats, big_stats, threshold)
stats = copy.deepcopy(big_stats)
most_frequent = max(stats, key=lambda x: (stats[x], x))
# threshold is inspired by Zipfian assumption, but should only affect speed
threshold = stats[most_frequent] * i/(i+10000.0)
prune_stats(stats, big_stats, threshold)
if stats[most_frequent] < min_frequency:
sys.stderr.write('no pair has frequency >= {0}. Stopping\n'.format(min_frequency))
break
if verbose:
sys.stderr.write('pair {0}: {1} {2} -> {1}{2} (frequency {3})\n'.format(i, most_frequent[0], most_frequent[1], stats[most_frequent]))
outfile.write('{0} {1}\n'.format(*most_frequent))
changes = replace_pair(most_frequent, sorted_vocab, indices)
update_pair_statistics(most_frequent, changes, stats, indices)
stats[most_frequent] = 0
if not i % 100:
prune_stats(stats, big_stats, threshold)
if __name__ == '__main__':
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
newdir = os.path.join(currentdir, 'subword_nmt')
if os.path.isdir(newdir):
warnings.simplefilter('default')
warnings.warn(
"this script's location has moved to {0}. This symbolic link will be removed in a future version. Please point to the new location, or install the package and use the command 'subword-nmt'".format(newdir),
DeprecationWarning
)
# python 2/3 compatibility
if sys.version_info < (3, 0):
sys.stderr = codecs.getwriter('UTF-8')(sys.stderr)
sys.stdout = codecs.getwriter('UTF-8')(sys.stdout)
sys.stdin = codecs.getreader('UTF-8')(sys.stdin)
else:
sys.stderr = codecs.getwriter('UTF-8')(sys.stderr.buffer)
sys.stdout = codecs.getwriter('UTF-8')(sys.stdout.buffer)
sys.stdin = codecs.getreader('UTF-8')(sys.stdin.buffer)
parser = create_parser()
args = parser.parse_args()
if args.num_workers <= 0:
args.num_workers = cpu_count()
if sys.version_info < (3, 0) and args.num_workers > 1:
args.num_workers = 1
warnings.warn("Parallel mode is only supported in Python3. Using 1 processor instead.")
# read/write files as UTF-8
if args.input.name != '<stdin>':
args.input = codecs.open(args.input.name, encoding='utf-8')
if args.output.name != '<stdout>':
args.output = codecs.open(args.output.name, 'w', encoding='utf-8')
learn_bpe(args.input, args.output, args.symbols, args.min_frequency, args.verbose, is_dict=args.dict_input, total_symbols=args.total_symbols, num_workers=args.num_workers)
|
import logging
from august.authenticator import ValidationResult
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_PASSWORD, CONF_TIMEOUT, CONF_USERNAME
from .const import (
CONF_LOGIN_METHOD,
DEFAULT_TIMEOUT,
LOGIN_METHODS,
VERIFICATION_CODE_KEY,
)
from .const import DOMAIN # pylint:disable=unused-import
from .exceptions import CannotConnect, InvalidAuth, RequireValidation
from .gateway import AugustGateway
_LOGGER = logging.getLogger(__name__)
async def async_validate_input(
data,
august_gateway,
):
"""Validate the user input allows us to connect.
Data has the keys from DATA_SCHEMA with values provided by the user.
Request configuration steps from the user.
"""
code = data.get(VERIFICATION_CODE_KEY)
if code is not None:
result = await august_gateway.authenticator.async_validate_verification_code(
code
)
_LOGGER.debug("Verification code validation: %s", result)
if result != ValidationResult.VALIDATED:
raise RequireValidation
try:
await august_gateway.async_authenticate()
except RequireValidation:
_LOGGER.debug(
"Requesting new verification code for %s via %s",
data.get(CONF_USERNAME),
data.get(CONF_LOGIN_METHOD),
)
if code is None:
await august_gateway.authenticator.async_send_verification_code()
raise
return {
"title": data.get(CONF_USERNAME),
"data": august_gateway.config_entry(),
}
class AugustConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for August."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
def __init__(self):
"""Store an AugustGateway()."""
self._august_gateway = None
self.user_auth_details = {}
self._needs_reset = False
super().__init__()
async def async_step_user(self, user_input=None):
"""Handle the initial step."""
if self._august_gateway is None:
self._august_gateway = AugustGateway(self.hass)
errors = {}
if user_input is not None:
combined_inputs = {**self.user_auth_details, **user_input}
await self._august_gateway.async_setup(combined_inputs)
if self._needs_reset:
self._needs_reset = False
await self._august_gateway.async_reset_authentication()
try:
info = await async_validate_input(
combined_inputs,
self._august_gateway,
)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except RequireValidation:
self.user_auth_details.update(user_input)
return await self.async_step_validation()
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
if not errors:
self.user_auth_details.update(user_input)
existing_entry = await self.async_set_unique_id(
combined_inputs[CONF_USERNAME]
)
if existing_entry:
self.hass.config_entries.async_update_entry(
existing_entry, data=info["data"]
)
await self.hass.config_entries.async_reload(existing_entry.entry_id)
return self.async_abort(reason="reauth_successful")
return self.async_create_entry(title=info["title"], data=info["data"])
return self.async_show_form(
step_id="user", data_schema=self._async_build_schema(), errors=errors
)
async def async_step_validation(self, user_input=None):
"""Handle validation (2fa) step."""
if user_input:
return await self.async_step_user({**self.user_auth_details, **user_input})
return self.async_show_form(
step_id="validation",
data_schema=vol.Schema(
{vol.Required(VERIFICATION_CODE_KEY): vol.All(str, vol.Strip)}
),
description_placeholders={
CONF_USERNAME: self.user_auth_details.get(CONF_USERNAME),
CONF_LOGIN_METHOD: self.user_auth_details.get(CONF_LOGIN_METHOD),
},
)
async def async_step_import(self, user_input):
"""Handle import."""
await self.async_set_unique_id(user_input[CONF_USERNAME])
self._abort_if_unique_id_configured()
return await self.async_step_user(user_input)
async def async_step_reauth(self, data):
"""Handle configuration by re-auth."""
self.user_auth_details = dict(data)
self._needs_reset = True
return await self.async_step_user()
def _async_build_schema(self):
"""Generate the config flow schema."""
base_schema = {
vol.Required(CONF_LOGIN_METHOD, default="phone"): vol.In(LOGIN_METHODS),
vol.Required(CONF_USERNAME): str,
vol.Required(CONF_PASSWORD): str,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): vol.Coerce(int),
}
for key in self.user_auth_details:
if key == CONF_PASSWORD or key not in base_schema:
continue
del base_schema[key]
return vol.Schema(base_schema)
|
import os
import sh
from molecule import logger
from molecule import util
from molecule.dependency import base
LOG = logger.get_logger(__name__)
class Gilt(base.Base):
"""
`Gilt`_ is an alternate dependency manager.
Additional options can be passed to ``gilt overlay`` through the options
dict. Any option set in this section will override the defaults.
.. code-block:: yaml
dependency:
name: gilt
options:
debug: True
The dependency manager can be disabled by setting ``enabled`` to False.
.. code-block:: yaml
dependency:
name: gilt
enabled: False
Environment variables can be passed to the dependency.
.. code-block:: yaml
dependency:
name: gilt
env:
FOO: bar
.. _`Gilt`: https://gilt.readthedocs.io
"""
def __init__(self, config):
super(Gilt, self).__init__(config)
self._sh_command = None
self.command = 'gilt'
@property
def default_options(self):
config = os.path.join(self._config.scenario.directory, 'gilt.yml')
d = {'config': config}
if self._config.debug:
d['debug'] = True
return d
@property
def default_env(self):
return util.merge_dicts(os.environ.copy(), self._config.env)
def bake(self):
"""
Bake a ``gilt`` command so it's ready to execute and returns None.
:return: None
"""
self._sh_command = getattr(sh, self.command)
self._sh_command = self._sh_command.bake(
self.options,
'overlay',
_env=self.env,
_out=LOG.out,
_err=LOG.error)
def execute(self):
if not self.enabled:
msg = 'Skipping, dependency is disabled.'
LOG.warn(msg)
return
if not self._has_requirements_file():
msg = 'Skipping, missing the requirements file.'
LOG.warn(msg)
return
if self._sh_command is None:
self.bake()
try:
util.run_command(self._sh_command, debug=self._config.debug)
msg = 'Dependency completed successfully.'
LOG.success(msg)
except sh.ErrorReturnCode as e:
util.sysexit(e.exit_code)
def _config_file(self):
return self.options.get('config')
def _has_requirements_file(self):
return os.path.isfile(self._config_file())
|
import pyvera as pv
from homeassistant.components.light import ATTR_BRIGHTNESS, ATTR_HS_COLOR
from homeassistant.core import HomeAssistant
from .common import ComponentFactory, new_simple_controller_config
from tests.async_mock import MagicMock
async def test_light(
hass: HomeAssistant, vera_component_factory: ComponentFactory
) -> None:
"""Test function."""
vera_device = MagicMock(spec=pv.VeraDimmer) # type: pv.VeraDimmer
vera_device.device_id = 1
vera_device.vera_device_id = vera_device.device_id
vera_device.name = "dev1"
vera_device.category = pv.CATEGORY_DIMMER
vera_device.is_switched_on = MagicMock(return_value=False)
vera_device.get_brightness = MagicMock(return_value=0)
vera_device.get_color = MagicMock(return_value=[0, 0, 0])
vera_device.is_dimmable = True
entity_id = "light.dev1_1"
component_data = await vera_component_factory.configure_component(
hass=hass,
controller_config=new_simple_controller_config(devices=(vera_device,)),
)
update_callback = component_data.controller_data[0].update_callback
assert hass.states.get(entity_id).state == "off"
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": entity_id},
)
await hass.async_block_till_done()
vera_device.switch_on.assert_called()
vera_device.is_switched_on.return_value = True
update_callback(vera_device)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == "on"
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": entity_id, ATTR_HS_COLOR: [300, 70]},
)
await hass.async_block_till_done()
vera_device.set_color.assert_called_with((255, 76, 255))
vera_device.is_switched_on.return_value = True
vera_device.get_color.return_value = (255, 76, 255)
update_callback(vera_device)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == "on"
assert hass.states.get(entity_id).attributes["hs_color"] == (300.0, 70.196)
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": entity_id, ATTR_BRIGHTNESS: 55},
)
await hass.async_block_till_done()
vera_device.set_brightness.assert_called_with(55)
vera_device.is_switched_on.return_value = True
vera_device.get_brightness.return_value = 55
update_callback(vera_device)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == "on"
assert hass.states.get(entity_id).attributes["brightness"] == 55
await hass.services.async_call(
"light",
"turn_off",
{"entity_id": entity_id},
)
await hass.async_block_till_done()
vera_device.switch_off.assert_called()
vera_device.is_switched_on.return_value = False
update_callback(vera_device)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == "off"
|
import asyncio
from dataclasses import dataclass
from datetime import timedelta
import logging
from typing import Dict, Optional
import voluptuous as vol
from xbox.webapi.api.client import XboxLiveClient
from xbox.webapi.api.provider.catalog.const import SYSTEM_PFN_ID_MAP
from xbox.webapi.api.provider.catalog.models import AlternateIdType, Product
from xbox.webapi.api.provider.people.models import (
PeopleResponse,
Person,
PresenceDetail,
)
from xbox.webapi.api.provider.smartglass.models import (
SmartglassConsoleList,
SmartglassConsoleStatus,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET
from homeassistant.core import HomeAssistant
from homeassistant.helpers import (
aiohttp_client,
config_entry_oauth2_flow,
config_validation as cv,
)
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from . import api, config_flow
from .const import DOMAIN, OAUTH2_AUTHORIZE, OAUTH2_TOKEN
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_CLIENT_ID): cv.string,
vol.Required(CONF_CLIENT_SECRET): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
PLATFORMS = ["media_player", "remote", "binary_sensor", "sensor"]
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the xbox component."""
hass.data[DOMAIN] = {}
if DOMAIN not in config:
return True
config_flow.OAuth2FlowHandler.async_register_implementation(
hass,
config_entry_oauth2_flow.LocalOAuth2Implementation(
hass,
DOMAIN,
config[DOMAIN][CONF_CLIENT_ID],
config[DOMAIN][CONF_CLIENT_SECRET],
OAUTH2_AUTHORIZE,
OAUTH2_TOKEN,
),
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up xbox from a config entry."""
implementation = (
await config_entry_oauth2_flow.async_get_config_entry_implementation(
hass, entry
)
)
session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation)
auth = api.AsyncConfigEntryAuth(
aiohttp_client.async_get_clientsession(hass), session
)
client = XboxLiveClient(auth)
consoles: SmartglassConsoleList = await client.smartglass.get_console_list()
_LOGGER.debug(
"Found %d consoles: %s",
len(consoles.result),
consoles.dict(),
)
coordinator = XboxUpdateCoordinator(hass, client, consoles)
await coordinator.async_refresh()
hass.data[DOMAIN][entry.entry_id] = {
"client": XboxLiveClient(auth),
"consoles": consoles,
"coordinator": coordinator,
}
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
# Unsub from coordinator updates
hass.data[DOMAIN][entry.entry_id]["sensor_unsub"]()
hass.data[DOMAIN][entry.entry_id]["binary_sensor_unsub"]()
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
@dataclass
class ConsoleData:
"""Xbox console status data."""
status: SmartglassConsoleStatus
app_details: Optional[Product]
@dataclass
class PresenceData:
"""Xbox user presence data."""
xuid: str
gamertag: str
display_pic: str
online: bool
status: str
in_party: bool
in_game: bool
in_multiplayer: bool
gamer_score: str
gold_tenure: Optional[str]
account_tier: str
@dataclass
class XboxData:
"""Xbox dataclass for update coordinator."""
consoles: Dict[str, ConsoleData]
presence: Dict[str, PresenceData]
class XboxUpdateCoordinator(DataUpdateCoordinator):
"""Store Xbox Console Status."""
def __init__(
self,
hass: HomeAssistantType,
client: XboxLiveClient,
consoles: SmartglassConsoleList,
) -> None:
"""Initialize."""
super().__init__(
hass,
_LOGGER,
name=DOMAIN,
update_interval=timedelta(seconds=10),
)
self.data: XboxData = XboxData({}, [])
self.client: XboxLiveClient = client
self.consoles: SmartglassConsoleList = consoles
async def _async_update_data(self) -> XboxData:
"""Fetch the latest console status."""
# Update Console Status
new_console_data: Dict[str, ConsoleData] = {}
for console in self.consoles.result:
current_state: Optional[ConsoleData] = self.data.consoles.get(console.id)
status: SmartglassConsoleStatus = (
await self.client.smartglass.get_console_status(console.id)
)
_LOGGER.debug(
"%s status: %s",
console.name,
status.dict(),
)
# Setup focus app
app_details: Optional[Product] = None
if current_state is not None:
app_details = current_state.app_details
if status.focus_app_aumid:
if (
not current_state
or status.focus_app_aumid != current_state.status.focus_app_aumid
):
app_id = status.focus_app_aumid.split("!")[0]
id_type = AlternateIdType.PACKAGE_FAMILY_NAME
if app_id in SYSTEM_PFN_ID_MAP:
id_type = AlternateIdType.LEGACY_XBOX_PRODUCT_ID
app_id = SYSTEM_PFN_ID_MAP[app_id][id_type]
catalog_result = (
await self.client.catalog.get_product_from_alternate_id(
app_id, id_type
)
)
if catalog_result and catalog_result.products:
app_details = catalog_result.products[0]
else:
app_details = None
new_console_data[console.id] = ConsoleData(
status=status, app_details=app_details
)
# Update user presence
presence_data = {}
batch: PeopleResponse = await self.client.people.get_friends_own_batch(
[self.client.xuid]
)
own_presence: Person = batch.people[0]
presence_data[own_presence.xuid] = _build_presence_data(own_presence)
friends: PeopleResponse = await self.client.people.get_friends_own()
for friend in friends.people:
if not friend.is_favorite:
continue
presence_data[friend.xuid] = _build_presence_data(friend)
return XboxData(new_console_data, presence_data)
def _build_presence_data(person: Person) -> PresenceData:
"""Build presence data from a person."""
active_app: Optional[PresenceDetail] = None
try:
active_app = next(
presence for presence in person.presence_details if presence.is_primary
)
except StopIteration:
pass
return PresenceData(
xuid=person.xuid,
gamertag=person.gamertag,
display_pic=person.display_pic_raw,
online=person.presence_state == "Online",
status=person.presence_text,
in_party=person.multiplayer_summary.in_party > 0,
in_game=active_app and active_app.is_game,
in_multiplayer=person.multiplayer_summary.in_multiplayer_session,
gamer_score=person.gamer_score,
gold_tenure=person.detail.tenure,
account_tier=person.detail.account_tier,
)
|
import logging
from typing import Any, Callable, List, Optional
from bond_api import Action, DeviceType
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
SUPPORT_BRIGHTNESS,
LightEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity import Entity
from . import BondHub
from .const import DOMAIN
from .entity import BondEntity
from .utils import BondDevice
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
async_add_entities: Callable[[List[Entity], bool], None],
) -> None:
"""Set up Bond light devices."""
hub: BondHub = hass.data[DOMAIN][entry.entry_id]
fan_lights: List[Entity] = [
BondLight(hub, device)
for device in hub.devices
if DeviceType.is_fan(device.type) and device.supports_light()
]
fireplaces: List[Entity] = [
BondFireplace(hub, device)
for device in hub.devices
if DeviceType.is_fireplace(device.type)
]
fp_lights: List[Entity] = [
BondLight(hub, device, "light")
for device in hub.devices
if DeviceType.is_fireplace(device.type) and device.supports_light()
]
async_add_entities(fan_lights + fireplaces + fp_lights, True)
class BondLight(BondEntity, LightEntity):
"""Representation of a Bond light."""
def __init__(
self, hub: BondHub, device: BondDevice, sub_device: Optional[str] = None
):
"""Create HA entity representing Bond fan."""
super().__init__(hub, device, sub_device)
self._brightness: Optional[int] = None
self._light: Optional[int] = None
def _apply_state(self, state: dict):
self._light = state.get("light")
self._brightness = state.get("brightness")
@property
def supported_features(self) -> Optional[int]:
"""Flag supported features."""
features = 0
if self._device.supports_set_brightness():
features |= SUPPORT_BRIGHTNESS
return features
@property
def is_on(self) -> bool:
"""Return if light is currently on."""
return self._light == 1
@property
def brightness(self) -> int:
"""Return the brightness of this light between 1..255."""
brightness_value = (
round(self._brightness * 255 / 100) if self._brightness else None
)
return brightness_value
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn on the light."""
brightness = kwargs.get(ATTR_BRIGHTNESS)
if brightness:
await self._hub.bond.action(
self._device.device_id,
Action.set_brightness(round((brightness * 100) / 255)),
)
else:
await self._hub.bond.action(self._device.device_id, Action.turn_light_on())
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn off the light."""
await self._hub.bond.action(self._device.device_id, Action.turn_light_off())
class BondFireplace(BondEntity, LightEntity):
"""Representation of a Bond-controlled fireplace."""
def __init__(self, hub: BondHub, device: BondDevice):
"""Create HA entity representing Bond fireplace."""
super().__init__(hub, device)
self._power: Optional[bool] = None
# Bond flame level, 0-100
self._flame: Optional[int] = None
def _apply_state(self, state: dict):
self._power = state.get("power")
self._flame = state.get("flame")
@property
def supported_features(self) -> Optional[int]:
"""Flag brightness as supported feature to represent flame level."""
return SUPPORT_BRIGHTNESS
@property
def is_on(self) -> bool:
"""Return True if power is on."""
return self._power == 1
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the fireplace on."""
_LOGGER.debug("Fireplace async_turn_on called with: %s", kwargs)
brightness = kwargs.get(ATTR_BRIGHTNESS)
if brightness:
flame = round((brightness * 100) / 255)
await self._hub.bond.action(self._device.device_id, Action.set_flame(flame))
else:
await self._hub.bond.action(self._device.device_id, Action.turn_on())
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the fireplace off."""
_LOGGER.debug("Fireplace async_turn_off called with: %s", kwargs)
await self._hub.bond.action(self._device.device_id, Action.turn_off())
@property
def brightness(self):
"""Return the flame of this fireplace converted to HA brightness between 0..255."""
return round(self._flame * 255 / 100) if self._flame else None
@property
def icon(self) -> Optional[str]:
"""Show fireplace icon for the entity."""
return "mdi:fireplace" if self._power == 1 else "mdi:fireplace-off"
|
class EditableListWidget:
"""Helper class with behaviour for simple editable lists
The entire point of this is to handle simple list item addition,
removal, and rearrangement, and the associated sensitivity handling.
This requires template children to be bound as:
* `treeview`
* `remove`
* `move_up`
* `move_down`
"""
def setup_sensitivity_handling(self):
model = self.treeview.get_model()
model.connect("row-inserted", self._update_sensitivity)
model.connect("rows-reordered", self._update_sensitivity)
self.treeview.get_selection().connect(
"changed", self._update_sensitivity)
self._update_sensitivity()
def _update_sensitivity(self, *args):
model, it, path = self._get_selected()
if not it:
self.remove.set_sensitive(False)
self.move_up.set_sensitive(False)
self.move_down.set_sensitive(False)
else:
self.remove.set_sensitive(True)
self.move_up.set_sensitive(path > 0)
self.move_down.set_sensitive(path < len(model) - 1)
def _get_selected(self):
model, it = self.treeview.get_selection().get_selected()
path = model.get_path(it)[0] if it else None
return (model, it, path)
def add_entry(self):
self.treeview.get_model().append(self.default_entry)
def remove_selected_entry(self):
model, it, path = self._get_selected()
model.remove(it)
def move_up_selected_entry(self):
model, it, path = self._get_selected()
model.swap(it, model.get_iter(path - 1))
def move_down_selected_entry(self):
model, it, path = self._get_selected()
model.swap(it, model.get_iter(path + 1))
|
import unittest
import numpy as np
from chainer import testing
from chainercv.transforms import resize_point
class TestResizePoint(unittest.TestCase):
def test_resize_point_ndarray(self):
point = np.random.uniform(
low=0., high=32., size=(3, 12, 2))
out = resize_point(point, in_size=(16, 32), out_size=(8, 64))
point[:, :, 0] *= 0.5
point[:, :, 1] *= 2
np.testing.assert_equal(out, point)
def test_resize_point_list(self):
point = [
np.random.uniform(low=0., high=32., size=(12, 2)),
np.random.uniform(low=0., high=32., size=(10, 2))
]
out = resize_point(point, in_size=(16, 32), out_size=(8, 64))
for i, pnt in enumerate(point):
pnt[:, 0] *= 0.5
pnt[:, 1] *= 2
np.testing.assert_equal(out[i], pnt)
testing.run_module(__name__, __file__)
|
import numpy as np
from scattertext.CSRMatrixTools import CSRMatrixFactory
from scattertext.ParsedCorpus import ParsedCorpus
from scattertext.features.FeatsFromSpacyDoc import FeatsFromSpacyDoc
from scattertext.indexstore.IndexStore import IndexStore
class CorpusFromParsedDocuments(object):
def __init__(self,
df,
category_col,
parsed_col,
feats_from_spacy_doc=FeatsFromSpacyDoc()):
'''
Parameters
----------
df : pd.DataFrame
contains category_col, and parse_col, were parsed col is entirely spacy docs
category_col : str
name of category column in convention_df
parsed_col : str
name of spacy parsed column in convention_df
feats_from_spacy_doc : FeatsFromSpacyDoc
'''
self._df = df.reset_index()
self._category_col = category_col
self._parsed_col = parsed_col
self._category_idx_store = IndexStore()
self._X_factory = CSRMatrixFactory()
self._mX_factory = CSRMatrixFactory()
self._term_idx_store = IndexStore()
self._metadata_idx_store = IndexStore()
self._feats_from_spacy_doc = feats_from_spacy_doc
def build(self):
'''Constructs the term doc matrix.
Returns
-------
scattertext.ParsedCorpus.ParsedCorpus
'''
self._y = self._get_y_and_populate_category_idx_store()
self._df.apply(self._add_to_x_factory, axis=1)
self._X = self._X_factory.set_last_row_idx(len(self._y)-1).get_csr_matrix()
self._mX = self._mX_factory.set_last_row_idx(len(self._y)-1).get_csr_matrix()
return ParsedCorpus(self._df,
self._X,
self._mX,
self._y,
self._term_idx_store,
self._category_idx_store,
self._metadata_idx_store,
self._parsed_col,
self._category_col)
def _get_y_and_populate_category_idx_store(self):
return np.array(self._df[self._category_col].apply(self._category_idx_store.getidx))
def _add_to_x_factory(self, row):
parsed_text = row[self._parsed_col]
for term, count in self._feats_from_spacy_doc.get_feats(parsed_text).items():
term_idx = self._term_idx_store.getidx(term)
self._X_factory[row.name, term_idx] = count
for meta, val in self._feats_from_spacy_doc.get_doc_metadata(parsed_text).items():
meta_idx = self._metadata_idx_store.getidx(meta)
self._mX_factory[row.name, meta_idx] = val
def _make_new_term_doc_matrix(self,
new_X=None,
new_mX=None,
new_y=None,
new_term_idx_store=None,
new_category_idx_store=None,
new_metadata_idx_store=None,
new_y_mask=None):
return ParsedCorpus(self._df[new_y_mask] if new_y_mask else self._df,
self._X if new_X is None else new_X,
self._mX if new_mX is None else new_mX,
self._y if new_y is None else new_y,
self._term_idx_store if new_term_idx_store is None else new_term_idx_store,
self._category_idx_store if new_category_idx_store is None else new_category_idx_store,
self._metadata_idx_store if new_metadata_idx_store is None else new_metadata_idx_store,
self._parsed_col,
self._category_col)
|
import re
_RE_FIND_FIRST_CAP = re.compile('(.)([A-Z][a-z]+)')
_RE_SPAN_OF_CAPS = re.compile('([a-z0-9])([A-Z])')
def camelcase_to_underscore(name):
return _RE_SPAN_OF_CAPS.sub(r'\1_\2',
_RE_FIND_FIRST_CAP.sub(r'\1_\2', name)
).lower()
class binary:
"""
Store the value in bits so we can convert between things easily
"""
value = None
def __init__(self, value=None, unit=None):
self.do(value=value, unit=unit)
@staticmethod
def convert(value=None, oldUnit=None, newUnit=None):
convertor = binary(value=value, unit=oldUnit)
return convertor.get(unit=newUnit)
def set(self, value, unit=None):
return self.do(value=value, unit=unit)
def get(self, unit=None):
return self.do(unit=unit)
def do(self, value=None, unit=None):
if not unit:
return self.bit(value=value)
if unit in ['bit', 'b']:
return self.bit(value=value)
if unit in ['kilobit', 'kbit', 'Kibit']:
return self.kilobit(value=value)
if unit in ['megabit', 'Mbit', 'Mibit', 'Mbit']:
return self.megabit(value=value)
if unit in ['gigabit', 'Gbit', 'Gibit']:
return self.gigabit(value=value)
if unit in ['terabit', 'Tbit', 'Tibit']:
return self.terabit(value=value)
if unit in ['petabit', 'Pbit', 'Pibit']:
return self.petabit(value=value)
if unit in ['exabit', 'Ebit', 'Eibit']:
return self.exabit(value=value)
if unit in ['zettabit', 'Zbit', 'Zibit']:
return self.zettabit(value=value)
if unit in ['yottabit', 'Ybit', 'Yibit']:
return self.yottabit(value=value)
if unit in ['byte', 'B']:
return self.byte(value=value)
if unit in ['kilobyte', 'kB', 'KiB']:
return self.kilobyte(value=value)
if unit in ['megabyte', 'MB', 'MiB', 'Mbyte']:
return self.megabyte(value=value)
if unit in ['gigabyte', 'GB', 'GiB']:
return self.gigabyte(value=value)
if unit in ['terabyte', 'TB', 'TiB']:
return self.terabyte(value=value)
if unit in ['petabyte', 'PB', 'PiB']:
return self.petabyte(value=value)
if unit in ['exabyte', 'EB', 'EiB']:
return self.exabyte(value=value)
if unit in ['zettabyte', 'ZB', 'ZiB']:
return self.zettabyte(value=value)
if unit in ['yottabyte', 'YB', 'YiB']:
return self.yottabyte(value=value)
raise NotImplementedError("unit %s" % unit)
def bit(self, value=None):
if value is None:
return self.value
else:
self.value = float(value)
def convertb(self, value, source, offset=1):
if value is None:
return source() / pow(1024, offset)
else:
source(value * pow(1024, offset))
def kilobit(self, value=None):
return self.convertb(value, self.bit)
def megabit(self, value=None):
return self.convertb(value, self.bit, 2)
def gigabit(self, value=None):
return self.convertb(value, self.bit, 3)
def terabit(self, value=None):
return self.convertb(value, self.bit, 4)
def petabit(self, value=None):
return self.convertb(value, self.bit, 5)
def exabit(self, value=None):
return self.convertb(value, self.bit, 6)
def zettabit(self, value=None):
return self.convertb(value, self.bit, 7)
def yottabit(self, value=None):
return self.convertb(value, self.bit, 8)
def byte(self, value=None):
if value is None:
return self.value / 8
else:
self.value = float(value) * 8
def kilobyte(self, value=None):
return self.convertb(value, self.byte)
def megabyte(self, value=None):
return self.convertb(value, self.byte, 2)
def gigabyte(self, value=None):
return self.convertb(value, self.byte, 3)
def terabyte(self, value=None):
return self.convertb(value, self.byte, 4)
def petabyte(self, value=None):
return self.convertb(value, self.byte, 5)
def exabyte(self, value=None):
return self.convertb(value, self.byte, 6)
def zettabyte(self, value=None):
return self.convertb(value, self.byte, 7)
def yottabyte(self, value=None):
return self.convertb(value, self.byte, 8)
class time:
"""
Store the value in miliseconds so we can convert between things easily
"""
value = None
def __init__(self, value=None, unit=None):
self.do(value=value, unit=unit)
@staticmethod
def convert(value=None, oldUnit=None, newUnit=None):
convertor = time(value=value, unit=oldUnit)
return convertor.get(unit=newUnit)
def set(self, value, unit=None):
return self.do(value=value, unit=unit)
def get(self, unit=None):
return self.do(unit=unit)
def do(self, value=None, unit=None):
if not unit:
v = self.millisecond(value=value)
elif unit.lower() in ['millisecond', 'milliseconds', 'ms']:
v = self.millisecond(value=value)
elif unit.lower() in ['second', 'seconds', 's']:
v = self.second(value=value)
elif unit.lower() in ['minute', 'minutes', 'm']:
v = self.minute(value=value)
elif unit.lower() in ['hour', 'hours', 'h']:
v = self.hour(value=value)
elif unit.lower() in ['day', 'days', 'd']:
v = self.day(value=value)
elif unit.lower() in ['year', 'years', 'y']:
v = self.year(value=value)
elif unit.lower() in ['microsecond', 'microseconds', 'us']:
v = self.microsecond(value=value)
elif unit.lower() in ['nanosecond', 'nanoseconds', 'ns']:
v = self.nanosecond(value=value)
else:
raise NotImplementedError("unit %s" % unit)
return v
def millisecond(self, value=None):
if value is None:
return self.value
else:
self.value = float(value)
def second(self, value=None):
if value is None:
return self.millisecond() / 1000
else:
self.millisecond(value * 1000)
def minute(self, value=None):
if value is None:
return self.second() / 60
else:
self.millisecond(self.second(value * 60))
def hour(self, value=None):
if value is None:
return self.minute() / 60
else:
self.millisecond(self.minute(value * 60))
def day(self, value=None):
if value is None:
return self.hour() / 24
else:
self.millisecond(self.hour(value * 24))
def year(self, value=None):
"""
We do *NOT* know for what year we are converting so lets assume the
year has 365 days.
"""
if value is None:
return self.day() / 365
else:
self.millisecond(self.day(value * 365))
def microsecond(self, value=None):
if value is None:
return self.millisecond() * 1000
else:
self.millisecond(value / 1000)
def nanosecond(self, value=None):
if value is None:
return self.microsecond() * 1000
else:
self.millisecond(self.microsecond(value / 1000))
|
import logging
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
ATTR_LATITUDE,
ATTR_LONGITUDE,
CONCENTRATION_PARTS_PER_MILLION,
DEGREE,
DEVICE_CLASS_BATTERY,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_PRESSURE,
DEVICE_CLASS_SIGNAL_STRENGTH,
DEVICE_CLASS_TEMPERATURE,
LENGTH_MILLIMETERS,
PERCENTAGE,
PRESSURE_MBAR,
SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
SPEED_KILOMETERS_PER_HOUR,
TEMP_CELSIUS,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.device_registry import async_entries_for_config_entry
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from .const import CONF_WEATHER_AREAS, DATA_HANDLER, DOMAIN, MANUFACTURER, SIGNAL_NAME
from .data_handler import (
HOMECOACH_DATA_CLASS_NAME,
PUBLICDATA_DATA_CLASS_NAME,
WEATHERSTATION_DATA_CLASS_NAME,
)
from .helper import NetatmoArea
from .netatmo_entity_base import NetatmoBase
_LOGGER = logging.getLogger(__name__)
SUPPORTED_PUBLIC_SENSOR_TYPES = [
"temperature",
"pressure",
"humidity",
"rain",
"windstrength",
"guststrength",
"sum_rain_1",
"sum_rain_24",
]
SENSOR_TYPES = {
"temperature": ["Temperature", TEMP_CELSIUS, None, DEVICE_CLASS_TEMPERATURE, True],
"temp_trend": ["Temperature trend", None, "mdi:trending-up", None, False],
"co2": ["CO2", CONCENTRATION_PARTS_PER_MILLION, "mdi:molecule-co2", None, True],
"pressure": ["Pressure", PRESSURE_MBAR, None, DEVICE_CLASS_PRESSURE, True],
"pressure_trend": ["Pressure trend", None, "mdi:trending-up", None, False],
"noise": ["Noise", "dB", "mdi:volume-high", None, True],
"humidity": ["Humidity", PERCENTAGE, None, DEVICE_CLASS_HUMIDITY, True],
"rain": ["Rain", LENGTH_MILLIMETERS, "mdi:weather-rainy", None, True],
"sum_rain_1": [
"Rain last hour",
LENGTH_MILLIMETERS,
"mdi:weather-rainy",
None,
False,
],
"sum_rain_24": ["Rain today", LENGTH_MILLIMETERS, "mdi:weather-rainy", None, True],
"battery_percent": [
"Battery Percent",
PERCENTAGE,
None,
DEVICE_CLASS_BATTERY,
True,
],
"windangle": ["Direction", None, "mdi:compass-outline", None, True],
"windangle_value": ["Angle", DEGREE, "mdi:compass-outline", None, False],
"windstrength": [
"Wind Strength",
SPEED_KILOMETERS_PER_HOUR,
"mdi:weather-windy",
None,
True,
],
"gustangle": ["Gust Direction", None, "mdi:compass-outline", None, False],
"gustangle_value": ["Gust Angle", DEGREE, "mdi:compass-outline", None, False],
"guststrength": [
"Gust Strength",
SPEED_KILOMETERS_PER_HOUR,
"mdi:weather-windy",
None,
False,
],
"reachable": ["Reachability", None, "mdi:signal", None, False],
"rf_status": ["Radio", None, "mdi:signal", None, False],
"rf_status_lvl": [
"Radio Level",
SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
None,
DEVICE_CLASS_SIGNAL_STRENGTH,
False,
],
"wifi_status": ["Wifi", None, "mdi:wifi", None, False],
"wifi_status_lvl": [
"Wifi Level",
SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
None,
DEVICE_CLASS_SIGNAL_STRENGTH,
False,
],
"health_idx": ["Health", None, "mdi:cloud", None, True],
}
MODULE_TYPE_OUTDOOR = "NAModule1"
MODULE_TYPE_WIND = "NAModule2"
MODULE_TYPE_RAIN = "NAModule3"
MODULE_TYPE_INDOOR = "NAModule4"
BATTERY_VALUES = {
MODULE_TYPE_WIND: {"Full": 5590, "High": 5180, "Medium": 4770, "Low": 4360},
MODULE_TYPE_RAIN: {"Full": 5500, "High": 5000, "Medium": 4500, "Low": 4000},
MODULE_TYPE_INDOOR: {"Full": 5500, "High": 5280, "Medium": 4920, "Low": 4560},
MODULE_TYPE_OUTDOOR: {"Full": 5500, "High": 5000, "Medium": 4500, "Low": 4000},
}
PUBLIC = "public"
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up the Netatmo weather and homecoach platform."""
data_handler = hass.data[DOMAIN][entry.entry_id][DATA_HANDLER]
async def find_entities(data_class_name):
"""Find all entities."""
await data_handler.register_data_class(data_class_name, data_class_name, None)
all_module_infos = {}
data = data_handler.data
if not data.get(data_class_name):
return []
data_class = data[data_class_name]
for station_id in data_class.stations:
for module_id in data_class.get_modules(station_id):
all_module_infos[module_id] = data_class.get_module(module_id)
all_module_infos[station_id] = data_class.get_station(station_id)
entities = []
for module in all_module_infos.values():
if "_id" not in module:
_LOGGER.debug("Skipping module %s", module.get("module_name"))
continue
_LOGGER.debug(
"Adding module %s %s",
module.get("module_name"),
module.get("_id"),
)
conditions = [
c.lower()
for c in data_class.get_monitored_conditions(module_id=module["_id"])
if c.lower() in SENSOR_TYPES
]
for condition in conditions:
if f"{condition}_value" in SENSOR_TYPES:
conditions.append(f"{condition}_value")
elif f"{condition}_lvl" in SENSOR_TYPES:
conditions.append(f"{condition}_lvl")
for condition in conditions:
entities.append(
NetatmoSensor(data_handler, data_class_name, module, condition)
)
return entities
for data_class_name in [
WEATHERSTATION_DATA_CLASS_NAME,
HOMECOACH_DATA_CLASS_NAME,
]:
async_add_entities(await find_entities(data_class_name), True)
device_registry = await hass.helpers.device_registry.async_get_registry()
async def add_public_entities(update=True):
"""Retrieve Netatmo public weather entities."""
entities = {
device.name: device.id
for device in async_entries_for_config_entry(
device_registry, entry.entry_id
)
if device.model == "Public Weather stations"
}
new_entities = []
for area in [
NetatmoArea(**i) for i in entry.options.get(CONF_WEATHER_AREAS, {}).values()
]:
signal_name = f"{PUBLICDATA_DATA_CLASS_NAME}-{area.uuid}"
if area.area_name in entities:
entities.pop(area.area_name)
if update:
async_dispatcher_send(
hass,
f"netatmo-config-{area.area_name}",
area,
)
continue
await data_handler.register_data_class(
PUBLICDATA_DATA_CLASS_NAME,
signal_name,
None,
LAT_NE=area.lat_ne,
LON_NE=area.lon_ne,
LAT_SW=area.lat_sw,
LON_SW=area.lon_sw,
)
for sensor_type in SUPPORTED_PUBLIC_SENSOR_TYPES:
new_entities.append(
NetatmoPublicSensor(data_handler, area, sensor_type)
)
for device_id in entities.values():
device_registry.async_remove_device(device_id)
if new_entities:
async_add_entities(new_entities)
async_dispatcher_connect(
hass, f"signal-{DOMAIN}-public-update-{entry.entry_id}", add_public_entities
)
entry.add_update_listener(async_config_entry_updated)
await add_public_entities(False)
async def async_config_entry_updated(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Handle signals of config entry being updated."""
async_dispatcher_send(hass, f"signal-{DOMAIN}-public-update-{entry.entry_id}")
class NetatmoSensor(NetatmoBase):
"""Implementation of a Netatmo sensor."""
def __init__(self, data_handler, data_class_name, module_info, sensor_type):
"""Initialize the sensor."""
super().__init__(data_handler)
self._data_classes.append(
{"name": data_class_name, SIGNAL_NAME: data_class_name}
)
self._id = module_info["_id"]
self._station_id = module_info.get("main_device", self._id)
station = self._data.get_station(self._station_id)
device = self._data.get_module(self._id)
if not device:
# Assume it's a station if module can't be found
device = station
if device["type"] in ("NHC", "NAMain"):
self._device_name = module_info["station_name"]
else:
self._device_name = (
f"{station['station_name']} "
f"{module_info.get('module_name', device['type'])}"
)
self._name = (
f"{MANUFACTURER} {self._device_name} {SENSOR_TYPES[sensor_type][0]}"
)
self.type = sensor_type
self._state = None
self._device_class = SENSOR_TYPES[self.type][3]
self._icon = SENSOR_TYPES[self.type][2]
self._unit_of_measurement = SENSOR_TYPES[self.type][1]
self._model = device["type"]
self._unique_id = f"{self._id}-{self.type}"
self._enabled_default = SENSOR_TYPES[self.type][4]
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return self._icon
@property
def device_class(self):
"""Return the device class of the sensor."""
return self._device_class
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
@property
def available(self):
"""Return entity availability."""
return self._state is not None
@property
def entity_registry_enabled_default(self) -> bool:
"""Return if the entity should be enabled when first added to the entity registry."""
return self._enabled_default
@callback
def async_update_callback(self):
"""Update the entity's state."""
if self._data is None:
if self._state is None:
return
_LOGGER.warning("No data from update")
self._state = None
return
data = self._data.get_last_data(station_id=self._station_id, exclude=3600).get(
self._id
)
if data is None:
if self._state:
_LOGGER.debug(
"No data found for %s - %s (%s)",
self.name,
self._device_name,
self._id,
)
self._state = None
return
try:
if self.type == "temperature":
self._state = round(data["Temperature"], 1)
elif self.type == "temp_trend":
self._state = data["temp_trend"]
elif self.type == "humidity":
self._state = data["Humidity"]
elif self.type == "rain":
self._state = data["Rain"]
elif self.type == "sum_rain_1":
self._state = round(data["sum_rain_1"], 1)
elif self.type == "sum_rain_24":
self._state = data["sum_rain_24"]
elif self.type == "noise":
self._state = data["Noise"]
elif self.type == "co2":
self._state = data["CO2"]
elif self.type == "pressure":
self._state = round(data["Pressure"], 1)
elif self.type == "pressure_trend":
self._state = data["pressure_trend"]
elif self.type == "battery_percent":
self._state = data["battery_percent"]
elif self.type == "windangle_value":
self._state = fix_angle(data["WindAngle"])
elif self.type == "windangle":
self._state = process_angle(fix_angle(data["WindAngle"]))
elif self.type == "windstrength":
self._state = data["WindStrength"]
elif self.type == "gustangle_value":
self._state = fix_angle(data["GustAngle"])
elif self.type == "gustangle":
self._state = process_angle(fix_angle(data["GustAngle"]))
elif self.type == "guststrength":
self._state = data["GustStrength"]
elif self.type == "reachable":
self._state = data["reachable"]
elif self.type == "rf_status_lvl":
self._state = data["rf_status"]
elif self.type == "rf_status":
self._state = process_rf(data["rf_status"])
elif self.type == "wifi_status_lvl":
self._state = data["wifi_status"]
elif self.type == "wifi_status":
self._state = process_wifi(data["wifi_status"])
elif self.type == "health_idx":
self._state = process_health(data["health_idx"])
except KeyError:
if self._state:
_LOGGER.debug("No %s data found for %s", self.type, self._device_name)
self._state = None
return
def fix_angle(angle: int) -> int:
"""Fix angle when value is negative."""
if angle < 0:
return 360 + angle
return angle
def process_angle(angle: int) -> str:
"""Process angle and return string for display."""
if angle >= 330:
return "N"
if angle >= 300:
return "NW"
if angle >= 240:
return "W"
if angle >= 210:
return "SW"
if angle >= 150:
return "S"
if angle >= 120:
return "SE"
if angle >= 60:
return "E"
if angle >= 30:
return "NE"
return "N"
def process_battery(data: int, model: str) -> str:
"""Process battery data and return string for display."""
values = BATTERY_VALUES[model]
if data >= values["Full"]:
return "Full"
if data >= values["High"]:
return "High"
if data >= values["Medium"]:
return "Medium"
if data >= values["Low"]:
return "Low"
return "Very Low"
def process_health(health):
"""Process health index and return string for display."""
if health == 0:
return "Healthy"
if health == 1:
return "Fine"
if health == 2:
return "Fair"
if health == 3:
return "Poor"
if health == 4:
return "Unhealthy"
def process_rf(strength):
"""Process wifi signal strength and return string for display."""
if strength >= 90:
return "Low"
if strength >= 76:
return "Medium"
if strength >= 60:
return "High"
return "Full"
def process_wifi(strength):
"""Process wifi signal strength and return string for display."""
if strength >= 86:
return "Low"
if strength >= 71:
return "Medium"
if strength >= 56:
return "High"
return "Full"
class NetatmoPublicSensor(NetatmoBase):
"""Represent a single sensor in a Netatmo."""
def __init__(self, data_handler, area, sensor_type):
"""Initialize the sensor."""
super().__init__(data_handler)
self._signal_name = f"{PUBLICDATA_DATA_CLASS_NAME}-{area.uuid}"
self._data_classes.append(
{
"name": PUBLICDATA_DATA_CLASS_NAME,
"LAT_NE": area.lat_ne,
"LON_NE": area.lon_ne,
"LAT_SW": area.lat_sw,
"LON_SW": area.lon_sw,
"area_name": area.area_name,
SIGNAL_NAME: self._signal_name,
}
)
self.type = sensor_type
self.area = area
self._mode = area.mode
self._area_name = area.area_name
self._id = self._area_name
self._device_name = f"{self._area_name}"
self._name = f"{MANUFACTURER} {self._device_name} {SENSOR_TYPES[self.type][0]}"
self._state = None
self._device_class = SENSOR_TYPES[self.type][3]
self._icon = SENSOR_TYPES[self.type][2]
self._unit_of_measurement = SENSOR_TYPES[self.type][1]
self._show_on_map = area.show_on_map
self._unique_id = f"{self._device_name.replace(' ', '-')}-{self.type}"
self._model = PUBLIC
@property
def icon(self):
"""Icon to use in the frontend."""
return self._icon
@property
def device_class(self):
"""Return the device class of the sensor."""
return self._device_class
@property
def device_state_attributes(self):
"""Return the attributes of the device."""
attrs = {}
if self._show_on_map:
attrs[ATTR_LATITUDE] = (self.area.lat_ne + self.area.lat_sw) / 2
attrs[ATTR_LONGITUDE] = (self.area.lon_ne + self.area.lon_sw) / 2
return attrs
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity."""
return self._unit_of_measurement
@property
def available(self):
"""Return True if entity is available."""
return self._state is not None
@property
def _data(self):
return self.data_handler.data[self._signal_name]
async def async_added_to_hass(self) -> None:
"""Entity created."""
await super().async_added_to_hass()
self.data_handler.listeners.append(
async_dispatcher_connect(
self.hass,
f"netatmo-config-{self.device_info['name']}",
self.async_config_update_callback,
)
)
async def async_config_update_callback(self, area):
"""Update the entity's config."""
if self.area == area:
return
await self.data_handler.unregister_data_class(
self._signal_name, self.async_update_callback
)
self.area = area
self._signal_name = f"{PUBLICDATA_DATA_CLASS_NAME}-{area.uuid}"
self._data_classes = [
{
"name": PUBLICDATA_DATA_CLASS_NAME,
"LAT_NE": area.lat_ne,
"LON_NE": area.lon_ne,
"LAT_SW": area.lat_sw,
"LON_SW": area.lon_sw,
"area_name": area.area_name,
SIGNAL_NAME: self._signal_name,
}
]
self._mode = area.mode
self._show_on_map = area.show_on_map
await self.data_handler.register_data_class(
PUBLICDATA_DATA_CLASS_NAME,
self._signal_name,
self.async_update_callback,
LAT_NE=area.lat_ne,
LON_NE=area.lon_ne,
LAT_SW=area.lat_sw,
LON_SW=area.lon_sw,
)
@callback
def async_update_callback(self):
"""Update the entity's state."""
if self._data is None:
if self._state is None:
return
_LOGGER.warning("No data from update")
self._state = None
return
data = None
if self.type == "temperature":
data = self._data.get_latest_temperatures()
elif self.type == "pressure":
data = self._data.get_latest_pressures()
elif self.type == "humidity":
data = self._data.get_latest_humidities()
elif self.type == "rain":
data = self._data.get_latest_rain()
elif self.type == "sum_rain_1":
data = self._data.get_60_min_rain()
elif self.type == "sum_rain_24":
data = self._data.get_24_h_rain()
elif self.type == "windstrength":
data = self._data.get_latest_wind_strengths()
elif self.type == "guststrength":
data = self._data.get_latest_gust_strengths()
if not data:
if self._state is None:
return
_LOGGER.debug(
"No station provides %s data in the area %s", self.type, self._area_name
)
self._state = None
return
values = [x for x in data.values() if x is not None]
if self._mode == "avg":
self._state = round(sum(values) / len(values), 1)
elif self._mode == "max":
self._state = max(values)
|
from datetime import timedelta
from homeassistant.bootstrap import async_setup_component
import homeassistant.util.dt as dt_util
async def test_events_http_api(hass, hass_client):
"""Test the calendar demo view."""
await async_setup_component(hass, "calendar", {"calendar": {"platform": "demo"}})
await hass.async_block_till_done()
client = await hass_client()
response = await client.get("/api/calendars/calendar.calendar_2")
assert response.status == 400
start = dt_util.now()
end = start + timedelta(days=1)
response = await client.get(
"/api/calendars/calendar.calendar_1?start={}&end={}".format(
start.isoformat(), end.isoformat()
)
)
assert response.status == 200
events = await response.json()
assert events[0]["summary"] == "Future Event"
assert events[0]["title"] == "Future Event"
async def test_calendars_http_api(hass, hass_client):
"""Test the calendar demo view."""
await async_setup_component(hass, "calendar", {"calendar": {"platform": "demo"}})
await hass.async_block_till_done()
client = await hass_client()
response = await client.get("/api/calendars")
assert response.status == 200
data = await response.json()
assert data == [
{"entity_id": "calendar.calendar_1", "name": "Calendar 1"},
{"entity_id": "calendar.calendar_2", "name": "Calendar 2"},
]
|
import os
import re
import a_sync
from . import exceptions
from . import framework
from . import mesos_file
from paasta_tools.async_utils import async_ttl_cache
class Task:
cmd_re = re.compile(r"\(Command: (.+)\)")
def __init__(self, master, items):
self.master = master
self.__items = items
def __str__(self):
return "{}:{}".format(a_sync.block(self.slave), self["id"])
def __getitem__(self, name):
return self.__items[name]
async def executor(self):
return await (await self.slave()).task_executor(self["id"])
async def framework(self):
return framework.Framework(await self.master.framework(self["framework_id"]))
@async_ttl_cache(cleanup_self=True)
async def directory(self):
try:
return (await self.executor())["directory"]
except exceptions.MissingExecutor:
return ""
@async_ttl_cache(cleanup_self=True)
async def slave(self):
return await self.master.slave(self["slave_id"])
async def file(self, path):
return mesos_file.File(await self.slave(), self, path)
async def file_list(self, path):
return await (await self.slave()).file_list(os.path.join(self.directory, path))
async def stats(self):
try:
return await (await self.slave()).task_stats(self["id"])
except exceptions.MissingExecutor:
return {}
async def cpu_time(self):
st = await self.stats()
secs = st.get("cpus_user_time_secs", 0) + st.get("cpus_system_time_secs", 0)
return secs
async def cpu_limit(self):
return (await self.stats()).get("cpus_limit", 0)
async def mem_limit(self):
return (await self.stats()).get("mem_limit_bytes", 0)
async def rss(self):
return (await self.stats()).get("mem_rss_bytes", 0)
async def command(self):
try:
result = self.cmd_re.search((await self.executor())["name"])
except exceptions.MissingExecutor:
result = None
if not result:
return "none"
return result.group(1)
async def user(self):
return (await self.framework()).user
|
import unittest
import chainer
from chainer import testing
from chainer.testing import attr
from chainercv.links.model.deeplab.xception import Xception65
class TestXception(unittest.TestCase):
def setUp(self):
self.link = Xception65()
def check_call(self):
xp = self.link.xp
x = chainer.Variable(xp.random.uniform(
low=-1, high=1, size=(2, 3, 64, 64)
).astype(xp.float32))
y1, y2 = self.link(x)
self.assertIsInstance(y1, chainer.Variable)
self.assertIsInstance(y1.data, xp.ndarray)
self.assertEqual(y1.shape, (2, 256, 16, 16))
self.assertIsInstance(y2, chainer.Variable)
self.assertIsInstance(y2.data, xp.ndarray)
self.assertEqual(y2.shape, (2, 2048, 8, 8))
@attr.slow
def test_call_cpu(self):
self.check_call()
@attr.gpu
@attr.slow
def test_call_gpu(self):
self.link.to_gpu()
self.check_call()
testing.run_module(__name__, __file__)
|
import re
from redbot.core.commands import Converter, BadArgument
from redbot.core.i18n import Translator
_ = Translator("Mod", __file__)
_id_regex = re.compile(r"([0-9]{15,21})$")
_mention_regex = re.compile(r"<@!?([0-9]{15,21})>$")
class RawUserIds(Converter):
async def convert(self, ctx, argument):
# This is for the hackban and unban commands, where we receive IDs that
# are most likely not in the guild.
# Mentions are supported, but most likely won't ever be in cache.
if match := _id_regex.match(argument) or _mention_regex.match(argument):
return int(match.group(1))
raise BadArgument(_("{} doesn't look like a valid user ID.").format(argument))
|
from __future__ import absolute_import
from __future__ import print_function
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation
from keras.utils import np_utils
from keras import optimizers
from elephas.ml_model import ElephasEstimator, load_ml_estimator, ElephasTransformer, load_ml_transformer
from elephas.ml.adapter import to_data_frame
from pyspark.mllib.evaluation import MulticlassMetrics
from pyspark.ml import Pipeline
import pytest
pytest.mark.usefixtures("spark_context")
# Define basic parameters
batch_size = 64
nb_classes = 10
epochs = 1
# Load data
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train = x_train.reshape(60000, 784)[:1000]
x_test = x_test.reshape(10000, 784)
x_train = x_train.astype("float32")
x_test = x_test.astype("float32")
x_train /= 255
x_test /= 255
print(x_train.shape[0], 'train samples')
print(x_test.shape[0], 'test samples')
# Convert class vectors to binary class matrices
y_train = np_utils.to_categorical(y_train, nb_classes)
y_test = np_utils.to_categorical(y_test, nb_classes)
model = Sequential()
model.add(Dense(128, input_dim=784))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(128))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(10))
model.add(Activation('softmax'))
def test_serialization_transformer():
transformer = ElephasTransformer()
transformer.set_keras_model_config(model.to_yaml())
transformer.save("test.h5")
load_ml_transformer("test.h5")
def test_serialization_estimator():
estimator = ElephasEstimator()
estimator.set_keras_model_config(model.to_yaml())
estimator.set_loss("categorical_crossentropy")
estimator.save("test.h5")
load_ml_estimator("test.h5")
def test_spark_ml_model(spark_context):
df = to_data_frame(spark_context, x_train, y_train, categorical=True)
test_df = to_data_frame(spark_context, x_test, y_test, categorical=True)
sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
sgd_conf = optimizers.serialize(sgd)
# Initialize Spark ML Estimator
estimator = ElephasEstimator()
estimator.set_keras_model_config(model.to_yaml())
estimator.set_optimizer_config(sgd_conf)
estimator.set_mode("synchronous")
estimator.set_loss("categorical_crossentropy")
estimator.set_metrics(['acc'])
estimator.set_epochs(epochs)
estimator.set_batch_size(batch_size)
estimator.set_validation_split(0.1)
estimator.set_categorical_labels(True)
estimator.set_nb_classes(nb_classes)
# Fitting a model returns a Transformer
pipeline = Pipeline(stages=[estimator])
fitted_pipeline = pipeline.fit(df)
# Evaluate Spark model by evaluating the underlying model
prediction = fitted_pipeline.transform(test_df)
pnl = prediction.select("label", "prediction")
pnl.show(100)
prediction_and_label = pnl.rdd.map(lambda row: (row.label, row.prediction))
metrics = MulticlassMetrics(prediction_and_label)
print(metrics.precision())
print(metrics.recall())
|
import subprocess
import re
import os
import sys
from ceph import CephCollector
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)),
'ceph'))
patternchk = re.compile(r'\bclient io .*')
numberchk = re.compile(r'\d+')
unitchk = re.compile(r'[a-zA-Z]{1,2}')
# This is external to the CephCollector so it can be tested
# separately.
def to_bytes(value, unit):
fval = float(value)
unit = str(unit.lower()).strip()
if unit == "b":
return fval
unit_list = {'kb': 1, 'mb': 2, 'gb': 3, 'tb': 4, 'pb': 5, 'eb': 6}
for i in range(unit_list[unit]):
fval = fval * 1000
return fval
def process_ceph_status(output):
res = patternchk.search(output)
if not res:
return {}
ceph_stats = res.group()
if not ceph_stats:
return {}
ret = {}
rd = wr = iops = runit = wunit = None
rd = numberchk.search(ceph_stats)
if rd is not None:
runit = unitchk.search(ceph_stats, rd.end())
if runit is None:
self.log.exception('Could not get read units')
return {}
ret['rd'] = repr(to_bytes(rd.group(), runit.group()))
wr = numberchk.search(ceph_stats, rd.end())
if wr is not None:
wunit = unitchk.search(ceph_stats, wr.end())
if runit is None:
self.log.exception('Could not get read units')
return {}
ret['wr'] = repr(to_bytes(wr.group(), wunit.group()))
iops = numberchk.search(ceph_stats, wr.end())
if iops is not None:
ret['iops'] = iops.group()
return ret
class CephStatsCollector(CephCollector):
def _get_stats(self):
"""
Get ceph stats
"""
try:
output = subprocess.check_output(['ceph', '-s'])
except subprocess.CalledProcessError as err:
self.log.info(
'Could not get stats: %s' % err)
self.log.exception('Could not get stats')
return {}
return process_ceph_status(output)
def collect(self):
"""
Collect ceph stats
"""
stats = self._get_stats()
self._publish_stats('cephstats', stats)
return
|
Subsets and Splits