text
stringlengths 213
32.3k
|
---|
import unittest
from unittest import mock
from homeassistant.components.ecobee import climate as ecobee
import homeassistant.const as const
from homeassistant.const import STATE_OFF
class TestEcobee(unittest.TestCase):
"""Tests for Ecobee climate."""
def setUp(self):
"""Set up test variables."""
vals = {
"name": "Ecobee",
"program": {
"climates": [
{"name": "Climate1", "climateRef": "c1"},
{"name": "Climate2", "climateRef": "c2"},
],
"currentClimateRef": "c1",
},
"runtime": {
"actualTemperature": 300,
"actualHumidity": 15,
"desiredHeat": 400,
"desiredCool": 200,
"desiredFanMode": "on",
},
"settings": {
"hvacMode": "auto",
"heatStages": 1,
"coolStages": 1,
"fanMinOnTime": 10,
"heatCoolMinDelta": 50,
"holdAction": "nextTransition",
},
"equipmentStatus": "fan",
"events": [
{
"name": "Event1",
"running": True,
"type": "hold",
"holdClimateRef": "away",
"endDate": "2017-01-01 10:00:00",
"startDate": "2017-02-02 11:00:00",
}
],
}
self.ecobee = mock.Mock()
self.ecobee.__getitem__ = mock.Mock(side_effect=vals.__getitem__)
self.ecobee.__setitem__ = mock.Mock(side_effect=vals.__setitem__)
self.data = mock.Mock()
self.data.ecobee.get_thermostat.return_value = self.ecobee
self.thermostat = ecobee.Thermostat(self.data, 1)
def test_name(self):
"""Test name property."""
assert "Ecobee" == self.thermostat.name
def test_current_temperature(self):
"""Test current temperature."""
assert 30 == self.thermostat.current_temperature
self.ecobee["runtime"]["actualTemperature"] = const.HTTP_NOT_FOUND
assert 40.4 == self.thermostat.current_temperature
def test_target_temperature_low(self):
"""Test target low temperature."""
assert 40 == self.thermostat.target_temperature_low
self.ecobee["runtime"]["desiredHeat"] = 502
assert 50.2 == self.thermostat.target_temperature_low
def test_target_temperature_high(self):
"""Test target high temperature."""
assert 20 == self.thermostat.target_temperature_high
self.ecobee["runtime"]["desiredCool"] = 103
assert 10.3 == self.thermostat.target_temperature_high
def test_target_temperature(self):
"""Test target temperature."""
assert self.thermostat.target_temperature is None
self.ecobee["settings"]["hvacMode"] = "heat"
assert 40 == self.thermostat.target_temperature
self.ecobee["settings"]["hvacMode"] = "cool"
assert 20 == self.thermostat.target_temperature
self.ecobee["settings"]["hvacMode"] = "auxHeatOnly"
assert 40 == self.thermostat.target_temperature
self.ecobee["settings"]["hvacMode"] = "off"
assert self.thermostat.target_temperature is None
def test_desired_fan_mode(self):
"""Test desired fan mode property."""
assert "on" == self.thermostat.fan_mode
self.ecobee["runtime"]["desiredFanMode"] = "auto"
assert "auto" == self.thermostat.fan_mode
def test_fan(self):
"""Test fan property."""
assert const.STATE_ON == self.thermostat.fan
self.ecobee["equipmentStatus"] = ""
assert STATE_OFF == self.thermostat.fan
self.ecobee["equipmentStatus"] = "heatPump, heatPump2"
assert STATE_OFF == self.thermostat.fan
def test_hvac_mode(self):
"""Test current operation property."""
assert self.thermostat.hvac_mode == "heat_cool"
self.ecobee["settings"]["hvacMode"] = "heat"
assert self.thermostat.hvac_mode == "heat"
self.ecobee["settings"]["hvacMode"] = "cool"
assert self.thermostat.hvac_mode == "cool"
self.ecobee["settings"]["hvacMode"] = "auxHeatOnly"
assert self.thermostat.hvac_mode == "heat"
self.ecobee["settings"]["hvacMode"] = "off"
assert self.thermostat.hvac_mode == "off"
def test_hvac_modes(self):
"""Test operation list property."""
assert ["heat_cool", "heat", "cool", "off"] == self.thermostat.hvac_modes
def test_hvac_mode2(self):
"""Test operation mode property."""
assert self.thermostat.hvac_mode == "heat_cool"
self.ecobee["settings"]["hvacMode"] = "heat"
assert self.thermostat.hvac_mode == "heat"
def test_device_state_attributes(self):
"""Test device state attributes property."""
self.ecobee["equipmentStatus"] = "heatPump2"
assert {
"fan": "off",
"climate_mode": "Climate1",
"fan_min_on_time": 10,
"equipment_running": "heatPump2",
} == self.thermostat.device_state_attributes
self.ecobee["equipmentStatus"] = "auxHeat2"
assert {
"fan": "off",
"climate_mode": "Climate1",
"fan_min_on_time": 10,
"equipment_running": "auxHeat2",
} == self.thermostat.device_state_attributes
self.ecobee["equipmentStatus"] = "compCool1"
assert {
"fan": "off",
"climate_mode": "Climate1",
"fan_min_on_time": 10,
"equipment_running": "compCool1",
} == self.thermostat.device_state_attributes
self.ecobee["equipmentStatus"] = ""
assert {
"fan": "off",
"climate_mode": "Climate1",
"fan_min_on_time": 10,
"equipment_running": "",
} == self.thermostat.device_state_attributes
self.ecobee["equipmentStatus"] = "Unknown"
assert {
"fan": "off",
"climate_mode": "Climate1",
"fan_min_on_time": 10,
"equipment_running": "Unknown",
} == self.thermostat.device_state_attributes
self.ecobee["program"]["currentClimateRef"] = "c2"
assert {
"fan": "off",
"climate_mode": "Climate2",
"fan_min_on_time": 10,
"equipment_running": "Unknown",
} == self.thermostat.device_state_attributes
def test_is_aux_heat_on(self):
"""Test aux heat property."""
assert not self.thermostat.is_aux_heat
self.ecobee["equipmentStatus"] = "fan, auxHeat"
assert self.thermostat.is_aux_heat
def test_set_temperature(self):
"""Test set temperature."""
# Auto -> Auto
self.data.reset_mock()
self.thermostat.set_temperature(target_temp_low=20, target_temp_high=30)
self.data.ecobee.set_hold_temp.assert_has_calls(
[mock.call(1, 30, 20, "nextTransition")]
)
# Auto -> Hold
self.data.reset_mock()
self.thermostat.set_temperature(temperature=20)
self.data.ecobee.set_hold_temp.assert_has_calls(
[mock.call(1, 25, 15, "nextTransition")]
)
# Cool -> Hold
self.data.reset_mock()
self.ecobee["settings"]["hvacMode"] = "cool"
self.thermostat.set_temperature(temperature=20.5)
self.data.ecobee.set_hold_temp.assert_has_calls(
[mock.call(1, 20.5, 20.5, "nextTransition")]
)
# Heat -> Hold
self.data.reset_mock()
self.ecobee["settings"]["hvacMode"] = "heat"
self.thermostat.set_temperature(temperature=20)
self.data.ecobee.set_hold_temp.assert_has_calls(
[mock.call(1, 20, 20, "nextTransition")]
)
# Heat -> Auto
self.data.reset_mock()
self.ecobee["settings"]["hvacMode"] = "heat"
self.thermostat.set_temperature(target_temp_low=20, target_temp_high=30)
assert not self.data.ecobee.set_hold_temp.called
def test_set_hvac_mode(self):
"""Test operation mode setter."""
self.data.reset_mock()
self.thermostat.set_hvac_mode("heat_cool")
self.data.ecobee.set_hvac_mode.assert_has_calls([mock.call(1, "auto")])
self.data.reset_mock()
self.thermostat.set_hvac_mode("heat")
self.data.ecobee.set_hvac_mode.assert_has_calls([mock.call(1, "heat")])
def test_set_fan_min_on_time(self):
"""Test fan min on time setter."""
self.data.reset_mock()
self.thermostat.set_fan_min_on_time(15)
self.data.ecobee.set_fan_min_on_time.assert_has_calls([mock.call(1, 15)])
self.data.reset_mock()
self.thermostat.set_fan_min_on_time(20)
self.data.ecobee.set_fan_min_on_time.assert_has_calls([mock.call(1, 20)])
def test_resume_program(self):
"""Test resume program."""
# False
self.data.reset_mock()
self.thermostat.resume_program(False)
self.data.ecobee.resume_program.assert_has_calls([mock.call(1, "false")])
self.data.reset_mock()
self.thermostat.resume_program(None)
self.data.ecobee.resume_program.assert_has_calls([mock.call(1, "false")])
self.data.reset_mock()
self.thermostat.resume_program(0)
self.data.ecobee.resume_program.assert_has_calls([mock.call(1, "false")])
# True
self.data.reset_mock()
self.thermostat.resume_program(True)
self.data.ecobee.resume_program.assert_has_calls([mock.call(1, "true")])
self.data.reset_mock()
self.thermostat.resume_program(1)
self.data.ecobee.resume_program.assert_has_calls([mock.call(1, "true")])
def test_hold_preference(self):
"""Test hold preference."""
assert "nextTransition" == self.thermostat.hold_preference()
for action in [
"useEndTime4hour",
"useEndTime2hour",
"nextPeriod",
"indefinite",
"askMe",
]:
self.ecobee["settings"]["holdAction"] = action
assert "nextTransition" == self.thermostat.hold_preference()
def test_set_fan_mode_on(self):
"""Test set fan mode to on."""
self.data.reset_mock()
self.thermostat.set_fan_mode("on")
self.data.ecobee.set_fan_mode.assert_has_calls(
[mock.call(1, "on", 20, 40, "nextTransition")]
)
def test_set_fan_mode_auto(self):
"""Test set fan mode to auto."""
self.data.reset_mock()
self.thermostat.set_fan_mode("auto")
self.data.ecobee.set_fan_mode.assert_has_calls(
[mock.call(1, "auto", 20, 40, "nextTransition")]
)
|
from unittest import TestCase
from httpobs.scanner.utils import sanitize_headers
class TestValidHostname(TestCase):
def test_valid_size_headers(self):
# TODO: Try to find a site with www.site.foo but not site.foo
headers = {
'Content-Type': 'text/html',
'Location': '/whatever'
}
self.assertEquals(headers, sanitize_headers(headers))
def test_huge_headers(self):
headers = {
'Content-Type': 'text/html',
'Location': '/whatever' * 10000
}
self.assertIsNone(sanitize_headers(headers))
|
from django.apps import AppConfig
from django.core.checks import Warning, register
from weblate.utils.checks import weblate_check
def check_formats(app_configs, **kwargs):
from weblate.formats.models import FILE_FORMATS
message = "Failure in loading handler for {} file format: {}"
return [
weblate_check(
f"weblate.W025.{key}", message.format(key, value.strip()), Warning
)
for key, value in FILE_FORMATS.errors.items()
]
class FormatsConfig(AppConfig):
name = "weblate.formats"
label = "formats"
verbose_name = "Formats"
def ready(self):
super().ready()
register(check_formats)
|
import unicodedata
from django.conf import settings
from django.utils.translation import gettext as _
from django.utils.translation import gettext_lazy
# Names of hardcoded characters
CHAR_NAMES = {
"\t": gettext_lazy("Insert tab character"),
"\n": gettext_lazy("Insert new line"),
"β¦": gettext_lazy("Insert horizontal ellipsis"),
}
DISPLAY_CHARS = {"\t": "βΉ", "\n": "β΅"}
HYPHEN_LANGS = {
"af",
"am",
"ar",
"ast",
"az",
"bg",
"bs",
"ca",
"cs",
"cy",
"da",
"de",
"dsb",
"dz",
"ee",
"el",
"en",
"eo",
"es",
"fa",
"fi",
"fr",
"fy",
"gd",
"gl",
"gu",
"he",
"hr",
"hsb",
"id",
"is",
"ja",
"ka",
"kk",
"kn",
"ko",
"ksh",
"ky",
"lb",
"lkt",
"lt",
"lv",
"mk",
"mn",
"mr",
"nl",
"os",
"pa",
"pl",
"pt",
"ro",
"ru",
"sk",
"sr",
"sv",
"ta",
"th",
"to",
"tr",
"uz",
"vi",
"vo",
"yi",
"zh",
}
EN_DASH_LANGS = {
"af",
"am",
"ar",
"ast",
"az",
"bg",
"bs",
"ca",
"cs",
"cy",
"da",
"de",
"dsb",
"dz",
"ee",
"el",
"en",
"eo",
"es",
"fi",
"fr",
"fy",
"gd",
"gl",
"gu",
"he",
"hr",
"hsb",
"hu",
"id",
"is",
"ka",
"kk",
"kn",
"ksh",
"ky",
"lb",
"lkt",
"lt",
"lv",
"mk",
"mn",
"mr",
"nb",
"nl",
"os",
"pa",
"pl",
"pt",
"ro",
"ru",
"sk",
"sr",
"sv",
"ta",
"th",
"to",
"tr",
"uk",
"uz",
"vi",
"vo",
"yi",
"zh",
}
EM_DASH_LANGS = {
"af",
"ar",
"ast",
"az",
"bg",
"bs",
"ca",
"cy",
"de",
"dsb",
"dz",
"ee",
"el",
"en",
"eo",
"es",
"fr",
"fy",
"gd",
"gl",
"gu",
"he",
"hr",
"hsb",
"id",
"is",
"it",
"ja",
"ka",
"kk",
"kn",
"ko",
"ksh",
"ky",
"lb",
"lkt",
"lt",
"lv",
"mk",
"mn",
"mr",
"nl",
"os",
"pa",
"pl",
"pt",
"ro",
"ru",
"sv",
"ta",
"th",
"to",
"tr",
"uz",
"vi",
"vo",
"yi",
"zh",
}
EXTRA_CHARS = {
"brx": ("ΰ₯€", "ΰ₯₯"),
"he": ("ΦΎ", "Χ΄", "Χ³"),
}
# Additional characters for RTL languages
RTL_CHARS = (8204, 8205, 8206, 8207, 8234, 8235, 8236, 8237, 8238)
def get_quote(code, data, name):
"""Return special char for quote."""
if code in data:
return name, data[code], data[code]
return name, data["ALL"], data["ALL"]
def get_display_char(char):
name = short = char
if unicodedata.category(char)[0] in ("C", "Z"):
# Various control and space characters
try:
name = unicodedata.name(char)
short = "".join(
x[0] for x in name.replace("-TO-", " ").replace("-", " ").split()
)
except ValueError:
# Char now known to unicode data
# This mostly happens for control characters < 0x20
name = short = char.encode("unicode_escape").decode("ascii")
if char in DISPLAY_CHARS:
short = DISPLAY_CHARS[char]
return name, short
def format_char(char):
"""Return verbose description of a character."""
name, short = get_display_char(char)
if char in CHAR_NAMES:
return CHAR_NAMES[char], short, char
return _("Insert character {0}").format(name), short, char
def get_special_chars(language, additional="", source=""): # noqa: C901
"""Return list of special characters."""
for char in settings.SPECIAL_CHARS:
yield format_char(char)
code = language.code.replace("_", "-").split("-")[0]
if code in EXTRA_CHARS:
for char in EXTRA_CHARS[code]:
yield format_char(char)
yield get_quote(code, DOUBLE_OPEN, _("Opening double quote"))
yield get_quote(code, DOUBLE_CLOSE, _("Closing double quote"))
yield get_quote(code, SINGLE_OPEN, _("Opening single quote"))
yield get_quote(code, SINGLE_CLOSE, _("Closing single quote"))
if code in HYPHEN_LANGS:
yield _("Hyphen"), "-", "-"
if code in EN_DASH_LANGS:
yield _("En dash"), "β", "β"
if code in EM_DASH_LANGS:
yield _("Em dash"), "β", "β"
for char in additional:
name, short = get_display_char(char)
yield _("User configured character: {}").format(name), short, char
rtl = language.direction == "rtl"
for char in set(source):
try:
name = unicodedata.name(char)
except ValueError:
continue
if "ARROW" in name:
if rtl and "LEFT" in name:
try:
char = unicodedata.lookup(name.replace("LEFT", "RIGHT"))
except KeyError:
continue
yield format_char(char)
elif rtl and "RIGHT" in name:
try:
char = unicodedata.lookup(name.replace("RIGHT", "LEFT"))
except KeyError:
continue
yield format_char(char)
else:
yield format_char(char)
RTL_CHARS_DATA = [format_char(chr(c)) for c in RTL_CHARS]
# Quotes data, geenrated using scripts/generate-specialchars
SINGLE_OPEN = {
"ALL": "β",
"af": "β",
"agq": "β",
"ak": "β",
"am": "βΉ",
"ar": "β",
"as": "β",
"asa": "β",
"ast": "β",
"az": "βΉ",
"bas": "β",
"be": "βΉ",
"bem": "β",
"bez": "β",
"bg": "β",
"bm": "β",
"bn": "β",
"bo": "β",
"br": "βΉ",
"brx": "β",
"bs": "β",
"ca": "β",
"cgg": "β",
"chr": "β",
"cs": "β",
"cy": "β",
"da": "βΊ",
"de": "β",
"dsb": "β",
"dyo": "β",
"ee": "β",
"el": "β",
"en": "β",
"eo": "β",
"es": "β",
"et": "β",
"eu": "βΉ",
"ewo": "β",
"fa": "βΉ",
"ff": "β",
"fi": "β",
"fil": "β",
"fo": "β",
"fr": "βΉ",
"fur": "β",
"ga": "β",
"gl": "β",
"gsw": "βΉ",
"gu": "β",
"gv": "β",
"ha": "β",
"haw": "β",
"he": "β",
"hi": "β",
"hr": "β",
"hsb": "β",
"hu": "Β»",
"hy": "β",
"ia": "β",
"id": "β",
"ig": "β",
"ii": "β",
"is": "β",
"it": "β",
"ja": "γ",
"jgo": "βΉ",
"jmc": "β",
"ka": "β",
"kab": "β",
"kde": "β",
"kea": "β",
"ki": "β",
"kk": "β",
"kkj": "βΉ",
"kl": "β",
"km": "β",
"kn": "β",
"ko": "β",
"kok": "β",
"ksb": "β",
"ksh": "β",
"kw": "β",
"ky": "β",
"lag": "β",
"lb": "β",
"lg": "β",
"ln": "β",
"lt": "β",
"lu": "β",
"luy": "β",
"lv": "β",
"mas": "β",
"mfe": "β",
"mg": "β",
"mk": "β",
"ml": "β",
"mr": "β",
"ms": "β",
"mt": "β",
"mua": "β",
"my": "β",
"naq": "β",
"nb_NO": "β",
"nd": "β",
"ne": "β",
"nl": "β",
"nmg": "Β«",
"nn": "β",
"nnh": "β",
"nr": "β",
"nso": "β",
"nyn": "β",
"om": "β",
"or": "β",
"os": "β",
"pa": "β",
"pl": "Β«",
"ps": "β",
"pt": "β",
"rm": "βΉ",
"rn": "β",
"ro": "Β«",
"rof": "β",
"ru": "β",
"rw": "β",
"rwk": "β",
"saq": "β",
"se": "β",
"seh": "β",
"ses": "β",
"sg": "β",
"shi": "β",
"si": "β",
"sk": "β",
"sl": "β",
"sn": "β",
"so": "β",
"sq": "β",
"sr": "β",
"sr_Cyrl": "β",
"sr_Latn": "β",
"ss": "β",
"sv": "β",
"sw": "β",
"ta": "β",
"te": "β",
"teo": "β",
"th": "β",
"ti": "β",
"tn": "β",
"to": "β",
"tr": "β",
"ts": "β",
"tzm": "β",
"ug": "βΊ",
"uk": "β",
"ur": "β",
"uz": "β",
"uz_Latn": "β",
"ve": "β",
"vi": "β",
"vo": "β",
"vun": "β",
"wae": "βΉ",
"xog": "β",
"yav": "Β«",
"yi": "'",
"yo": "β",
"zgh": "β",
"zh": "γ",
"zh_Hans": "β",
"zh_Hant": "γ",
"zu": "β",
}
SINGLE_CLOSE = {
"ALL": "β",
"af": "β",
"ak": "β",
"am": "βΊ",
"ar": "β",
"as": "β",
"asa": "β",
"ast": "β",
"az": "βΊ",
"bas": "β",
"be": "βΊ",
"bem": "β",
"bez": "β",
"bg": "β",
"bm": "β",
"bn": "β",
"bo": "β",
"br": "βΊ",
"brx": "β",
"bs": "β",
"ca": "β",
"cgg": "β",
"chr": "β",
"cs": "β",
"cy": "β",
"da": "βΉ",
"de": "β",
"dsb": "β",
"dyo": "β",
"ee": "β",
"el": "β",
"en": "β",
"eo": "β",
"es": "β",
"et": "β",
"eu": "βΊ",
"ewo": "β",
"fa": "βΊ",
"ff": "β",
"fi": "β",
"fil": "β",
"fo": "β",
"fr": "βΊ",
"fur": "β",
"ga": "β",
"gl": "β",
"gsw": "βΊ",
"gu": "β",
"gv": "β",
"ha": "β",
"haw": "β",
"he": "β",
"hi": "β",
"hr": "β",
"hsb": "β",
"hu": "Β«",
"hy": "β",
"ia": "β",
"id": "β",
"ig": "β",
"ii": "β",
"is": "β",
"it": "β",
"ja": "γ",
"jgo": "βΊ",
"jmc": "β",
"ka": "β",
"kab": "β",
"kde": "β",
"kea": "β",
"ki": "β",
"kk": "β",
"kkj": "βΊ",
"kl": "β",
"km": "β",
"kn": "β",
"ko": "β",
"kok": "β",
"ksb": "β",
"ksh": "β",
"kw": "β",
"ky": "β",
"lag": "β",
"lb": "β",
"lg": "β",
"ln": "β",
"lt": "β",
"lu": "β",
"luy": "β",
"lv": "β",
"mas": "β",
"mfe": "β",
"mg": "β",
"mk": "β",
"ml": "β",
"mr": "β",
"ms": "β",
"mt": "β",
"mua": "β",
"my": "β",
"naq": "β",
"nb_NO": "β",
"nd": "β",
"ne": "β",
"nl": "β",
"nmg": "Β»",
"nn": "β",
"nnh": "β",
"nr": "β",
"nso": "β",
"nyn": "β",
"om": "β",
"or": "β",
"os": "β",
"pa": "β",
"pl": "Β»",
"ps": "β",
"pt": "β",
"rm": "βΊ",
"rn": "β",
"ro": "Β»",
"rof": "β",
"ru": "β",
"rw": "β",
"rwk": "β",
"saq": "β",
"seh": "β",
"ses": "β",
"sg": "β",
"shi": "β",
"si": "β",
"sk": "β",
"sl": "β",
"sn": "β",
"so": "β",
"sq": "β",
"sr": "β",
"sr_Cyrl": "β",
"sr_Latn": "β",
"ss": "β",
"sv": "β",
"sw": "β",
"ta": "β",
"te": "β",
"teo": "β",
"th": "β",
"ti": "β",
"tn": "β",
"to": "β",
"tr": "β",
"ts": "β",
"tzm": "β",
"ug": "βΉ",
"uk": "β",
"ur": "β",
"uz": "β",
"uz_Latn": "β",
"ve": "β",
"vi": "β",
"vo": "β",
"vun": "β",
"wae": "βΊ",
"xog": "β",
"yav": "Β»",
"yi": "'",
"yo": "β",
"zgh": "β",
"zh": "γ",
"zh_Hans": "β",
"zh_Hant": "γ",
"zu": "β",
}
DOUBLE_OPEN = {
"ALL": "β",
"af": "β",
"agq": "β",
"ak": "β",
"am": "Β«",
"ar": "β",
"as": "β",
"asa": "β",
"ast": "Β«",
"az": "Β«",
"bas": "Β«",
"be": "Β«",
"bem": "β",
"bez": "β",
"bg": "β",
"bm": "Β«",
"bn": "β",
"bo": "β",
"br": "Β«",
"brx": "β",
"bs": "β",
"ca": "Β«",
"cgg": "β",
"chr": "β",
"cs": "β",
"cy": "β",
"da": "Β»",
"de": "β",
"dsb": "β",
"dua": "Β«",
"dyo": "Β«",
"ee": "β",
"el": "Β«",
"en": "β",
"eo": "β",
"es": "Β«",
"et": "β",
"eu": "Β«",
"ewo": "Β«",
"fa": "Β«",
"ff": "β",
"fi": "β",
"fil": "β",
"fo": "β",
"fr": "Β«",
"fur": "β",
"ga": "β",
"gl": "β",
"gsw": "Β«",
"gu": "β",
"gv": "β",
"ha": "β",
"haw": "β",
"he": "β",
"hi": "β",
"hr": "β",
"hsb": "β",
"hu": "β",
"hy": "β",
"ia": "β",
"id": "β",
"ig": "β",
"ii": "β",
"is": "β",
"it": "Β«",
"ja": "γ",
"jgo": "Β«",
"jmc": "β",
"ka": "β",
"kab": "Β«",
"kde": "β",
"kea": "β",
"ki": "β",
"kk": "β",
"kkj": "Β«",
"kl": "β",
"km": "β",
"kn": "β",
"ko": "β",
"kok": "β",
"ksb": "β",
"ksf": "Β«",
"ksh": "β",
"kw": "β",
"ky": "Β«",
"lag": "β",
"lb": "β",
"lg": "β",
"ln": "β",
"lt": "β",
"lu": "β",
"luy": "β",
"lv": "Β«",
"mas": "β",
"mfe": "β",
"mg": "Β«",
"mk": "β",
"ml": "β",
"mr": "β",
"ms": "β",
"mt": "β",
"mua": "Β«",
"my": "β",
"naq": "β",
"nb": "Β«",
"nb_NO": "Β«",
"nd": "β",
"ne": "β",
"nl": "β",
"nmg": "β",
"nn": "Β«",
"nnh": "Β«",
"nr": "β",
"nso": "β",
"nyn": "β",
"om": "β",
"or": "β",
"os": "Β«",
"pa": "β",
"pl": "β",
"ps": "β",
"pt": "β",
"rm": "Β«",
"rn": "β",
"ro": "β",
"rof": "β",
"ru": "Β«",
"rw": "Β«",
"rwk": "β",
"saq": "β",
"se": "β",
"seh": "β",
"ses": "β",
"sg": "Β«",
"shi": "Β«",
"si": "β",
"sk": "β",
"sl": "β",
"sn": "β",
"so": "β",
"sq": "β",
"sr": "β",
"sr_Cyrl": "β",
"sr_Latn": "β",
"ss": "β",
"sv": "β",
"sw": "β",
"ta": "β",
"te": "β",
"teo": "β",
"th": "β",
"ti": "β",
"tn": "β",
"to": "β",
"tr": "β",
"ts": "β",
"tzm": "β",
"ug": "Β»",
"uk": "Β«",
"ur": "β",
"uz": "β",
"uz_Latn": "β",
"ve": "β",
"vi": "β",
"vo": "β",
"vun": "β",
"wae": "Β«",
"xog": "β",
"yav": "Β«",
"yi": '"',
"yo": "β",
"zgh": "Β«",
"zh": "γ",
"zh_Hans": "β",
"zh_Hant": "γ",
"zu": "β",
}
DOUBLE_CLOSE = {
"ALL": "β",
"af": "β",
"ak": "β",
"am": "Β»",
"ar": "β",
"as": "β",
"asa": "β",
"ast": "Β»",
"az": "Β»",
"bas": "Β»",
"be": "Β»",
"bem": "β",
"bez": "β",
"bg": "β",
"bm": "Β»",
"bn": "β",
"bo": "β",
"br": "Β»",
"brx": "β",
"bs": "β",
"ca": "Β»",
"cgg": "β",
"chr": "β",
"cs": "β",
"cy": "β",
"da": "Β«",
"de": "β",
"dsb": "β",
"dua": "Β»",
"dyo": "Β»",
"ee": "β",
"el": "Β»",
"en": "β",
"eo": "β",
"es": "Β»",
"et": "β",
"eu": "Β»",
"ewo": "Β»",
"fa": "Β»",
"ff": "β",
"fi": "β",
"fil": "β",
"fo": "β",
"fr": "Β»",
"fur": "β",
"ga": "β",
"gl": "β",
"gsw": "Β»",
"gu": "β",
"gv": "β",
"ha": "β",
"haw": "β",
"he": "β",
"hi": "β",
"hr": "β",
"hsb": "β",
"hu": "β",
"hy": "β",
"ia": "β",
"id": "β",
"ig": "β",
"ii": "β",
"is": "β",
"it": "Β»",
"ja": "γ",
"jgo": "Β»",
"jmc": "β",
"ka": "β",
"kab": "Β»",
"kde": "β",
"kea": "β",
"ki": "β",
"kk": "β",
"kkj": "Β»",
"kl": "β",
"km": "β",
"kn": "β",
"ko": "β",
"kok": "β",
"ksb": "β",
"ksf": "Β»",
"ksh": "β",
"kw": "β",
"ky": "Β»",
"lag": "β",
"lb": "β",
"lg": "β",
"ln": "β",
"lt": "β",
"lu": "β",
"luy": "β",
"lv": "Β»",
"mas": "β",
"mfe": "β",
"mg": "Β»",
"mk": "β",
"ml": "β",
"mr": "β",
"ms": "β",
"mt": "β",
"mua": "Β»",
"my": "β",
"naq": "β",
"nb": "Β»",
"nb_NO": "Β»",
"nd": "β",
"ne": "β",
"nl": "β",
"nn": "Β»",
"nnh": "Β»",
"nr": "β",
"nso": "β",
"nyn": "β",
"om": "β",
"or": "β",
"os": "Β»",
"pa": "β",
"pl": "β",
"ps": "β",
"pt": "β",
"rm": "Β»",
"rn": "β",
"ro": "β",
"rof": "β",
"ru": "Β»",
"rw": "Β»",
"rwk": "β",
"saq": "β",
"seh": "β",
"ses": "β",
"sg": "Β»",
"shi": "Β»",
"si": "β",
"sk": "β",
"sl": "β",
"sn": "β",
"so": "β",
"sq": "β",
"sr": "β",
"sr_Cyrl": "β",
"sr_Latn": "β",
"ss": "β",
"sv": "β",
"sw": "β",
"ta": "β",
"te": "β",
"teo": "β",
"th": "β",
"ti": "β",
"tn": "β",
"to": "β",
"tr": "β",
"ts": "β",
"tzm": "β",
"ug": "Β«",
"uk": "Β»",
"ur": "β",
"uz": "β",
"uz_Latn": "β",
"ve": "β",
"vi": "β",
"vo": "β",
"vun": "β",
"wae": "Β»",
"xog": "β",
"yav": "Β»",
"yi": '"',
"yo": "β",
"zgh": "Β»",
"zh": "γ",
"zh_Hans": "β",
"zh_Hant": "γ",
"zu": "β",
}
|
import asyncio
from base64 import b64encode
import logging
import aiohttp
import async_timeout
import voluptuous as vol
from homeassistant.components.image_processing import (
CONF_CONFIDENCE,
CONF_ENTITY_ID,
CONF_NAME,
CONF_SOURCE,
PLATFORM_SCHEMA,
)
from homeassistant.components.openalpr_local.image_processing import (
ImageProcessingAlprEntity,
)
from homeassistant.const import CONF_API_KEY, HTTP_OK
from homeassistant.core import split_entity_id
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
OPENALPR_API_URL = "https://api.openalpr.com/v1/recognize"
OPENALPR_REGIONS = [
"au",
"auwide",
"br",
"eu",
"fr",
"gb",
"kr",
"kr2",
"mx",
"sg",
"us",
"vn2",
]
CONF_REGION = "region"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_REGION): vol.All(vol.Lower, vol.In(OPENALPR_REGIONS)),
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the OpenALPR cloud API platform."""
confidence = config[CONF_CONFIDENCE]
params = {
"secret_key": config[CONF_API_KEY],
"tasks": "plate",
"return_image": 0,
"country": config[CONF_REGION],
}
entities = []
for camera in config[CONF_SOURCE]:
entities.append(
OpenAlprCloudEntity(
camera[CONF_ENTITY_ID], params, confidence, camera.get(CONF_NAME)
)
)
async_add_entities(entities)
class OpenAlprCloudEntity(ImageProcessingAlprEntity):
"""Representation of an OpenALPR cloud entity."""
def __init__(self, camera_entity, params, confidence, name=None):
"""Initialize OpenALPR cloud API."""
super().__init__()
self._params = params
self._camera = camera_entity
self._confidence = confidence
if name:
self._name = name
else:
self._name = f"OpenAlpr {split_entity_id(camera_entity)[1]}"
@property
def confidence(self):
"""Return minimum confidence for send events."""
return self._confidence
@property
def camera_entity(self):
"""Return camera entity id from process pictures."""
return self._camera
@property
def name(self):
"""Return the name of the entity."""
return self._name
async def async_process_image(self, image):
"""Process image.
This method is a coroutine.
"""
websession = async_get_clientsession(self.hass)
params = self._params.copy()
body = {"image_bytes": str(b64encode(image), "utf-8")}
try:
with async_timeout.timeout(self.timeout):
request = await websession.post(
OPENALPR_API_URL, params=params, data=body
)
data = await request.json()
if request.status != HTTP_OK:
_LOGGER.error("Error %d -> %s", request.status, data.get("error"))
return
except (asyncio.TimeoutError, aiohttp.ClientError):
_LOGGER.error("Timeout for OpenALPR API")
return
# Processing API data
vehicles = 0
result = {}
for row in data["plate"]["results"]:
vehicles += 1
for p_data in row["candidates"]:
try:
result.update({p_data["plate"]: float(p_data["confidence"])})
except ValueError:
continue
self.async_process_plates(result, vehicles)
|
from homeassistant.components.water_heater import (
ATTR_TEMPERATURE,
STATE_ECO,
STATE_PERFORMANCE,
WaterHeaterEntity,
)
from homeassistant.const import STATE_OFF, TEMP_CELSIUS
from . import DOMAIN, WATER_HEATER, AtagEntity
SUPPORT_FLAGS_HEATER = 0
OPERATION_LIST = [STATE_OFF, STATE_ECO, STATE_PERFORMANCE]
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Initialize DHW device from config entry."""
coordinator = hass.data[DOMAIN][config_entry.entry_id]
async_add_entities([AtagWaterHeater(coordinator, WATER_HEATER)])
class AtagWaterHeater(AtagEntity, WaterHeaterEntity):
"""Representation of an ATAG water heater."""
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_FLAGS_HEATER
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return TEMP_CELSIUS
@property
def current_temperature(self):
"""Return the current temperature."""
return self.coordinator.atag.dhw.temperature
@property
def current_operation(self):
"""Return current operation."""
operation = self.coordinator.atag.dhw.current_operation
return operation if operation in self.operation_list else STATE_OFF
@property
def operation_list(self):
"""List of available operation modes."""
return OPERATION_LIST
async def async_set_temperature(self, **kwargs):
"""Set new target temperature."""
if await self.coordinator.atag.dhw.set_temp(kwargs.get(ATTR_TEMPERATURE)):
self.async_write_ha_state()
@property
def target_temperature(self):
"""Return the setpoint if water demand, otherwise return base temp (comfort level)."""
return self.coordinator.atag.dhw.target_temperature
@property
def max_temp(self):
"""Return the maximum temperature."""
return self.coordinator.atag.dhw.max_temp
@property
def min_temp(self):
"""Return the minimum temperature."""
return self.coordinator.atag.dhw.min_temp
|
from itertools import combinations, groupby
import csv
import os
import time
import optparse
import logging
import dedupe
import exampleIO
def canonicalImport(filename):
preProcess = exampleIO.preProcess
data_d = {}
with open(filename) as f:
reader = csv.DictReader(f)
for (i, row) in enumerate(reader):
clean_row = {k: preProcess(v) for (k, v) in
row.items()}
data_d[i] = clean_row
return data_d, reader.fieldnames
def evaluateDuplicates(found_dupes, true_dupes):
true_positives = found_dupes.intersection(true_dupes)
false_positives = found_dupes.difference(true_dupes)
print('found duplicate')
print(len(found_dupes))
print('precision')
print(1 - len(false_positives) / float(len(found_dupes)))
print('recall')
print(len(true_positives) / float(len(true_dupes)))
if __name__ == '__main__':
optp = optparse.OptionParser()
optp.add_option('-v', '--verbose', dest='verbose', action='count',
help='Increase verbosity (specify multiple times for more)'
)
(opts, args) = optp.parse_args()
log_level = logging.WARNING
if opts.verbose is not None:
if opts.verbose == 1:
log_level = logging.INFO
elif opts.verbose >= 2:
log_level = logging.DEBUG
logging.getLogger().setLevel(log_level)
settings_file = 'canonical_learned_settings'
raw_data = 'tests/datasets/restaurant-nophone-training.csv'
data_d, header = canonicalImport(raw_data)
training_pairs = dedupe.training_data_dedupe(data_d,
'unique_id',
5000)
duplicates = set()
for _, pair in groupby(sorted(data_d.items(),
key=lambda x: x[1]['unique_id']),
key=lambda x: x[1]['unique_id']):
pair = list(pair)
if len(pair) == 2:
a, b = pair
duplicates.add(frozenset((a[0], b[0])))
t0 = time.time()
print('number of known duplicate pairs', len(duplicates))
if os.path.exists(settings_file):
with open(settings_file, 'rb') as f:
deduper = dedupe.StaticDedupe(f)
else:
fields = [{'field': 'name', 'type': 'String'},
{'field': 'name', 'type': 'Exact'},
{'field': 'address', 'type': 'String'},
{'field': 'cuisine', 'type': 'ShortString',
'has missing': True},
{'field': 'city', 'type': 'ShortString'}
]
deduper = dedupe.Dedupe(fields, num_cores=5)
deduper.prepare_training(data_d, sample_size=10000)
deduper.mark_pairs(training_pairs)
deduper.train(index_predicates=True)
with open(settings_file, 'wb') as f:
deduper.write_settings(f)
# print candidates
print('clustering...')
clustered_dupes = deduper.partition(data_d, threshold=0.5)
print('Evaluate Clustering')
confirm_dupes = set([])
for dupes, score in clustered_dupes:
for pair in combinations(dupes, 2):
confirm_dupes.add(frozenset(pair))
evaluateDuplicates(confirm_dupes, duplicates)
print('ran in ', time.time() - t0, 'seconds')
|
import argparse
import logging
import sys
from typing import List
from typing import Sequence
from typing import Tuple
import boto3
from boto3_type_annotations.ec2 import Client
from botocore.exceptions import ClientError
from kubernetes.client import V1DeleteOptions
from kubernetes.client import V1Node
from kubernetes.client.rest import ApiException
from paasta_tools.kubernetes_tools import get_all_nodes
from paasta_tools.kubernetes_tools import KubeClient
log = logging.getLogger(__name__)
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(description="Remove terminated Kubernetes nodes")
parser.add_argument(
"-v", "--verbose", action="store_true", dest="verbose", default=False
)
parser.add_argument(
"-n", "--dry-run", action="store_true", dest="dry_run", default=False
)
args = parser.parse_args()
return args
def nodes_for_cleanup(ec2_client: Client, nodes: Sequence[V1Node]) -> List[V1Node]:
not_ready = [
node
for node in nodes
if not is_node_ready(node)
and "node-role.kubernetes.io/master" not in node.metadata.labels
]
terminated = terminated_nodes(ec2_client, not_ready)
return terminated
def terminated_nodes(ec2_client: Client, nodes: Sequence[V1Node]) -> List[V1Node]:
instance_ids = [node.spec.provider_id.split("/")[-1] for node in nodes]
# if there are any instances that don't exist in the query to describe_instance_status
# then amazon won't return the results for any, so we have to query the instances
# one by one
statuses = [
does_instance_exist(ec2_client, instance_id) for instance_id in instance_ids
]
for node, status in zip(nodes, statuses):
log.debug(f"{node.metadata.name} exists: {status}")
return [node for node, status in zip(nodes, statuses) if not status]
def does_instance_exist(ec2_client: Client, instance_id: str):
try:
instance = ec2_client.describe_instance_status(InstanceIds=[instance_id])
if instance["InstanceStatuses"]:
status = instance["InstanceStatuses"][0]["InstanceState"]["Name"]
# see possible values at https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ec2.html#EC2.Client.describe_instance_status
# 'pending'|'running'|'shutting-down'|'terminated'|'stopping'|'stopped'
# it's unlikely that we'll ever be in this situation with a pending node - the common case is that
# the node is either running and been marked as not ready, or it's being shutdown
if status not in ("running", "pending"):
return False
return True
log.debug(
f"no instance status in response for {instance_id}; assuming to have been terminated"
)
return False
except ClientError as e:
if e.response["Error"]["Code"] == "InvalidInstanceID.NotFound":
log.debug(
f"instance {instance_id} not found; assuming to have been terminated"
)
return False
else:
log.error(f"error fetching instance status for {instance_id}")
raise e
return True
def terminate_nodes(
client: KubeClient, nodes: List[str]
) -> Tuple[List[str], List[Tuple[str, Exception]]]:
success = []
errors = []
for node in nodes:
try:
body = V1DeleteOptions()
client.core.delete_node(node, body=body, propagation_policy="foreground")
except ApiException as e:
errors.append((node, e))
continue
success.append(node)
return (success, errors)
def is_node_ready(node: V1Node) -> bool:
for condition in node.status.conditions:
if condition.type == "Ready":
return condition.status == "True"
log.error(
f"no KubeletReady condition found for node {node.metadata.name}. Conditions {node.status.conditions}"
)
return True
def main() -> None:
args = parse_args()
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
dry_run = args.dry_run
kube_client = KubeClient()
all_nodes = get_all_nodes(kube_client)
log.debug(f"found nodes in cluster {[node.metadata.name for node in all_nodes]}")
# we depend on iam credentials existing on the host for this to run.
# anywhere else, and you'll need to set credentials using environment variables
# we also make the assumption that all nodes are in the same region here
region = all_nodes[0].metadata.labels["failure-domain.beta.kubernetes.io/region"]
ec2_client = boto3.client("ec2", region)
filtered_nodes = nodes_for_cleanup(ec2_client, all_nodes)
if logging.DEBUG >= logging.root.level:
log.debug(
f"nodes to be deleted: {[node.metadata.name for node in filtered_nodes]}"
)
if not dry_run:
success, errors = terminate_nodes(
kube_client, [node.metadata.name for node in filtered_nodes]
)
else:
success, errors = [], []
log.info("dry run mode detected: not deleting nodes")
for node_name in success:
log.info(f"successfully deleted node {node_name}")
for node_name, exception in errors:
log.error(f"error deleting node: {node_name}: {exception}")
if errors:
sys.exit(1)
if __name__ == "__main__":
main()
|
import time
def uts(dt):
return int(time.mktime(dt.timetuple()))
class Decoder(object):
"""
Decode metrics incoming from tank into points for InfluxDB client
Parameters
----------
parent_tags : dict
common per-test tags
tank_tag : str
tank identifier tag
uuid : str
test id tag
labeled : bool
detailed stats for each label
histograms : bool
response time histograms measurements
"""
def __init__(self, tank_tag, uuid, parent_tags, labeled, histograms):
self.labeled = labeled
initial_tags = {
"tank": tank_tag,
"uuid": uuid
}
initial_tags.update(parent_tags)
self.tags = initial_tags
self.histograms = histograms
def set_uuid(self, id_):
self.tags['uuid'] = id_
def decode_monitoring(self, data):
"""
The reason why we have two separate methods for monitoring
and aggregates is a strong difference in incoming data.
"""
points = list()
for second_data in data:
for host, host_data in second_data["data"].items():
points.append(
self.__make_points(
"monitoring",
{"host": host, "comment": host_data.get("comment")},
second_data["timestamp"],
{
# cast int to float. avoid https://github.com/yandex/yandex-tank/issues/776
metric: float(value) if isinstance(value, int) else value
for metric, value in host_data["metrics"].items()
}
)
)
return points
def decode_aggregates(self, aggregated_data, gun_stats, prefix):
ts = aggregated_data["ts"]
points = list()
# stats overall w/ __OVERALL__ label
points += self.__make_points_for_label(
ts,
aggregated_data["overall"],
"__OVERALL__",
prefix,
gun_stats
)
# detailed stats per tag
if self.labeled:
for label, aggregated_data_by_tag in aggregated_data["tagged"].items():
points += self.__make_points_for_label(
ts,
aggregated_data_by_tag,
label,
prefix,
gun_stats
)
return points
def __make_points_for_label(self, ts, data, label, prefix, gun_stats):
"""x
Make a set of points for `this` label
overall_quantiles, overall_meta, net_codes, proto_codes, histograms
"""
label_points = list()
label_points.extend(
(
# overall quantiles for label
self.__make_points(
prefix + "overall_quantiles",
{"label": label},
ts,
self.__make_quantile_fields(data)
),
# overall meta (gun status) for label
self.__make_points(
prefix + "overall_meta",
{"label": label},
ts,
self.__make_overall_meta_fields(data, gun_stats)
),
# net codes for label
self.__make_points(
prefix + "net_codes",
{"label": label},
ts,
self.__make_netcodes_fields(data)
),
# proto codes for label
self.__make_points(
prefix + "proto_codes",
{"label": label},
ts,
self.__make_protocodes_fields(data)
)
)
)
# histograms, one row for each bin
if self.histograms:
for bin_, count in zip(data["interval_real"]["hist"]["bins"],
data["interval_real"]["hist"]["data"]):
label_points.append(
self.__make_points(
prefix + "histograms",
{"label": label},
ts,
{"bin": bin_, "count": count}
)
)
return label_points
@staticmethod
def __make_quantile_fields(data):
return {
'q' + str(q): value / 1000.0
for q, value in zip(data["interval_real"]["q"]["q"],
data["interval_real"]["q"]["value"])
}
@staticmethod
def __make_overall_meta_fields(data, stats):
return {
"active_threads": stats["metrics"]["instances"],
"RPS": data["interval_real"]["len"],
"planned_requests": float(stats["metrics"]["reqps"]),
}
@staticmethod
def __make_netcodes_fields(data):
return {
str(code): int(cnt)
for code, cnt in data["net_code"]["count"].items()
}
@staticmethod
def __make_protocodes_fields(data):
return {
str(code): int(cnt)
for code, cnt in data["proto_code"]["count"].items()
}
def __make_points(self, measurement, additional_tags, ts, fields):
"""
Parameters
----------
measurement : string
measurement type (e.g. monitoring, overall_meta, net_codes, proto_codes, overall_quantiles)
additional_tags : dict
custom additional tags for this points
ts : integer
timestamp
fields : dict
influxdb columns
Returns
-------
dict
points for InfluxDB client
"""
tags = self.tags.copy()
tags.update(additional_tags)
return {
"measurement": measurement,
"tags": tags,
"time": int(ts),
"fields": fields,
}
|
import unittest
import numpy as np
from pykalman import KalmanFilter
from pykalman import UnscentedKalmanFilter
from pykalman.sqrt import CholeskyKalmanFilter, AdditiveUnscentedKalmanFilter
class TestPyKalman(unittest.TestCase):
def test_kalman_filter(self):
kf = KalmanFilter(transition_matrices = [[1, 1], [0, 1]], observation_matrices = [[0.1, 0.5], [-0.3, 0.0]])
measurements = np.asarray([[1,0], [0,0], [0,1]]) # 3 observations
kf = kf.em(measurements, n_iter=5)
(filtered_state_means, filtered_state_covariances) = kf.filter(measurements)
(smoothed_state_means, smoothed_state_covariances) = kf.smooth(measurements)
return filtered_state_means
def test_kalman_missing(self):
kf = KalmanFilter(transition_matrices = [[1, 1], [0, 1]], observation_matrices = [[0.1, 0.5], [-0.3, 0.0]])
measurements = np.asarray([[1,0], [0,0], [0,1]]) # 3 observations
measurements = np.ma.asarray(measurements)
measurements[1] = np.ma.masked
kf = kf.em(measurements, n_iter=5)
(filtered_state_means, filtered_state_covariances) = kf.filter(measurements)
(smoothed_state_means, smoothed_state_covariances) = kf.smooth(measurements)
return filtered_state_means
def test_unscented_kalman(self):
ukf = UnscentedKalmanFilter(lambda x, w: x + np.sin(w), lambda x, v: x + v, transition_covariance=0.1)
(filtered_state_means, filtered_state_covariances) = ukf.filter([0, 1, 2])
(smoothed_state_means, smoothed_state_covariances) = ukf.smooth([0, 1, 2])
return filtered_state_means
def test_online_update(self):
kf = KalmanFilter(transition_matrices = [[1, 1], [0, 1]], observation_matrices = [[0.1, 0.5], [-0.3, 0.0]])
measurements = np.asarray([[1,0], [0,0], [0,1]]) # 3 observations
measurements = np.ma.asarray(measurements)
measurements[1] = np.ma.masked # measurement at timestep 1 is unobserved
kf = kf.em(measurements, n_iter=5)
(filtered_state_means, filtered_state_covariances) = kf.filter(measurements)
for t in range(1, 3):
filtered_state_means[t], filtered_state_covariances[t] = \
kf.filter_update(filtered_state_means[t-1], filtered_state_covariances[t-1], measurements[t])
return filtered_state_means
def test_robust_sqrt(self):
kf = CholeskyKalmanFilter(transition_matrices = [[1, 1], [0, 1]], observation_matrices = [[0.1, 0.5], [-0.3, 0.0]])
ukf = AdditiveUnscentedKalmanFilter(lambda x, w: x + np.sin(w), lambda x, v: x + v, observation_covariance=0.1)
|
from rumps import *
@clicked('Testing')
def tester(sender):
sender.state = not sender.state
class SomeApp(rumps.App):
def __init__(self):
super(SomeApp, self).__init__(type(self).__name__, menu=['On', 'Testing'])
rumps.debug_mode(True)
@clicked('On')
def button(self, sender):
sender.title = 'Off' if sender.title == 'On' else 'On'
Window("I can't think of a good example app...").run()
if __name__ == "__main__":
SomeApp().run()
|
import unittest
import chainer
from chainer import testing
from chainer.testing import attr
from chainercv.links.model.deeplab import SeparableASPP
class TestSeparableASPP(unittest.TestCase):
def setUp(self):
self.in_channels = 128
self.out_channels = 32
self.link = SeparableASPP(
self.in_channels, self.out_channels)
def check_call(self):
xp = self.link.xp
x = chainer.Variable(xp.random.uniform(
low=-1, high=1, size=(2, self.in_channels, 64, 64)
).astype(xp.float32))
y = self.link(x)
self.assertIsInstance(y, chainer.Variable)
self.assertIsInstance(y.data, xp.ndarray)
self.assertEqual(y.shape, (2, self.out_channels, 64, 64))
@attr.slow
def test_call_cpu(self):
self.check_call()
@attr.gpu
@attr.slow
def test_call_gpu(self):
self.link.to_gpu()
self.check_call()
testing.run_module(__name__, __file__)
|
import os
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
def test_hostname(host):
assert 'instance' == host.check_output('hostname -s')
def test_etc_molecule_directory(host):
f = host.file('/etc/molecule')
assert f.is_directory
assert f.user == 'root'
assert f.group == 'root'
assert f.mode == 0o755
def test_etc_molecule_ansible_hostname_file(host):
f = host.file('/etc/molecule/instance')
assert f.is_file
assert f.user == 'root'
assert f.group == 'root'
assert f.mode == 0o644
|
import json
import unittest
from Tests.test_api.base import RestAPITestBase
from kalliope._version import version_str
class TestMainView(RestAPITestBase):
def test_server_is_up_and_running(self):
# response = urllib2.urlopen(self.get_server_url())
response = self.client.get(self.get_server_url())
self.assertEqual(response.status_code, 200)
def test_get_main_page(self):
url = self.get_server_url() + "/"
response = self.client.get(url)
expected_content = {
"Kalliope version": "%s" % version_str
}
self.assertEqual(json.dumps(expected_content, sort_keys=True),
json.dumps(json.loads(response.get_data().decode('utf-8')), sort_keys=True))
if __name__ == '__main__':
unittest.main()
|
import logging
from pprint import pformat
from homeassistant.components.cover import (
ATTR_POSITION,
DEVICE_CLASS_GARAGE,
CoverEntity,
)
from homeassistant.components.supla import (
DOMAIN,
SUPLA_COORDINATORS,
SUPLA_SERVERS,
SuplaChannel,
)
_LOGGER = logging.getLogger(__name__)
SUPLA_SHUTTER = "CONTROLLINGTHEROLLERSHUTTER"
SUPLA_GATE = "CONTROLLINGTHEGATE"
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Supla covers."""
if discovery_info is None:
return
_LOGGER.debug("Discovery: %s", pformat(discovery_info))
entities = []
for device in discovery_info:
device_name = device["function_name"]
server_name = device["server_name"]
if device_name == SUPLA_SHUTTER:
entities.append(
SuplaCover(
device,
hass.data[DOMAIN][SUPLA_SERVERS][server_name],
hass.data[DOMAIN][SUPLA_COORDINATORS][server_name],
)
)
elif device_name == SUPLA_GATE:
entities.append(
SuplaGateDoor(
device,
hass.data[DOMAIN][SUPLA_SERVERS][server_name],
hass.data[DOMAIN][SUPLA_COORDINATORS][server_name],
)
)
async_add_entities(entities)
class SuplaCover(SuplaChannel, CoverEntity):
"""Representation of a Supla Cover."""
@property
def current_cover_position(self):
"""Return current position of cover. 0 is closed, 100 is open."""
state = self.channel_data.get("state")
if state:
return 100 - state["shut"]
return None
async def async_set_cover_position(self, **kwargs):
"""Move the cover to a specific position."""
await self.async_action("REVEAL", percentage=kwargs.get(ATTR_POSITION))
@property
def is_closed(self):
"""Return if the cover is closed."""
if self.current_cover_position is None:
return None
return self.current_cover_position == 0
async def async_open_cover(self, **kwargs):
"""Open the cover."""
await self.async_action("REVEAL")
async def async_close_cover(self, **kwargs):
"""Close the cover."""
await self.async_action("SHUT")
async def async_stop_cover(self, **kwargs):
"""Stop the cover."""
await self.async_action("STOP")
class SuplaGateDoor(SuplaChannel, CoverEntity):
"""Representation of a Supla gate door."""
@property
def is_closed(self):
"""Return if the gate is closed or not."""
state = self.channel_data.get("state")
if state and "hi" in state:
return state.get("hi")
return None
async def async_open_cover(self, **kwargs) -> None:
"""Open the gate."""
if self.is_closed:
await self.async_action("OPEN_CLOSE")
async def async_close_cover(self, **kwargs) -> None:
"""Close the gate."""
if not self.is_closed:
await self.async_action("OPEN_CLOSE")
async def async_stop_cover(self, **kwargs) -> None:
"""Stop the gate."""
await self.async_action("OPEN_CLOSE")
async def async_toggle(self, **kwargs) -> None:
"""Toggle the gate."""
await self.async_action("OPEN_CLOSE")
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return DEVICE_CLASS_GARAGE
|
import sys
from json_parser import Lark_StandAlone, Transformer, inline_args
class TreeToJson(Transformer):
@inline_args
def string(self, s):
return s[1:-1].replace('\\"', '"')
array = list
pair = tuple
object = dict
number = inline_args(float)
null = lambda self, _: None
true = lambda self, _: True
false = lambda self, _: False
parser = Lark_StandAlone(transformer=TreeToJson())
if __name__ == '__main__':
with open(sys.argv[1]) as f:
print(parser.parse(f.read()))
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import posixpath
PACKAGE_NAME = 'openfoam'
OPENFOAM_ROOT = '/opt/openfoam7'
"""Needed for downloading OpenFOAM."""
_OPENFOAM_REPOSITORY_URL = 'http://dl.openfoam.org/ubuntu'
_OPENFOAM_REPOSITORY_KEY = 'openfoam.key'
def YumInstall(vm):
del vm
raise NotImplementedError()
def AptInstall(vm):
"""Install OpenFOAM https://openfoam.org/download/7-ubuntu/."""
remote_key_file = '/tmp/openfoam.key'
vm.PushDataFile(_OPENFOAM_REPOSITORY_KEY, remote_key_file)
vm.RemoteCommand('sudo apt-key add {0}; rm {0}'.format(remote_key_file))
vm.RemoteCommand('sudo add-apt-repository {}'
.format(_OPENFOAM_REPOSITORY_URL))
vm.RemoteCommand('sudo apt-get -y update')
vm.Install('build_tools')
vm.InstallPackages('openfoam7')
openfoam_bash_path = posixpath.join(OPENFOAM_ROOT, 'etc/bashrc')
# Separate commands since $WM_PROJECT_DIR comes from the OpenFOAM bashrc.
vm.RemoteCommand('cat {} | tee $HOME/.bashrc'.format(openfoam_bash_path))
vm.RemoteCommand('cat {} | tee -a $HOME/.bashrc'.format(
'$WM_PROJECT_DIR/bin/tools/RunFunctions'))
|
import time
import dns
import dns.exception
import dns.name
import dns.query
import dns.resolver
from dyn.tm.errors import (
DynectCreateError,
DynectDeleteError,
DynectGetError,
DynectUpdateError,
)
from dyn.tm.session import DynectSession
from dyn.tm.zones import Node, Zone, get_all_zones
from flask import current_app
from lemur.extensions import metrics, sentry
def get_dynect_session():
try:
dynect_session = DynectSession(
current_app.config.get("ACME_DYN_CUSTOMER_NAME", ""),
current_app.config.get("ACME_DYN_USERNAME", ""),
current_app.config.get("ACME_DYN_PASSWORD", ""),
)
except Exception as e:
sentry.captureException()
metrics.send("get_dynect_session_fail", "counter", 1)
current_app.logger.debug("Unable to establish connection to Dyn", exc_info=True)
raise
return dynect_session
def _has_dns_propagated(fqdn, token):
txt_records = []
try:
dns_resolver = dns.resolver.Resolver()
dns_resolver.nameservers = [get_authoritative_nameserver(fqdn)]
dns_response = dns_resolver.query(fqdn, "TXT")
for rdata in dns_response:
for txt_record in rdata.strings:
txt_records.append(txt_record.decode("utf-8"))
except dns.exception.DNSException:
metrics.send("has_dns_propagated_fail", "counter", 1, metric_tags={"dns": fqdn})
return False
for txt_record in txt_records:
if txt_record == token:
metrics.send("has_dns_propagated_success", "counter", 1, metric_tags={"dns": fqdn})
return True
return False
def wait_for_dns_change(change_id, account_number=None):
fqdn, token = change_id
number_of_attempts = 20
for attempts in range(0, number_of_attempts):
status = _has_dns_propagated(fqdn, token)
current_app.logger.debug("Record status for fqdn: {}: {}".format(fqdn, status))
if status:
metrics.send("wait_for_dns_change_success", "counter", 1, metric_tags={"dns": fqdn})
break
time.sleep(10)
if not status:
# TODO: Delete associated DNS text record here
metrics.send("wait_for_dns_change_fail", "counter", 1, metric_tags={"dns": fqdn})
sentry.captureException(extra={"fqdn": str(fqdn), "txt_record": str(token)})
metrics.send(
"wait_for_dns_change_error",
"counter",
1,
metric_tags={"fqdn": fqdn, "txt_record": token},
)
return
def get_zone_name(domain):
zones = get_all_zones()
zone_name = ""
for z in zones:
if domain.endswith(z.name):
# Find the most specific zone possible for the domain
# Ex: If fqdn is a.b.c.com, there is a zone for c.com,
# and a zone for b.c.com, we want to use b.c.com.
if z.name.count(".") > zone_name.count("."):
zone_name = z.name
if not zone_name:
metrics.send("dyn_no_zone_name", "counter", 1)
raise Exception("No Dyn zone found for domain: {}".format(domain))
return zone_name
def get_zones(account_number):
get_dynect_session()
zones = get_all_zones()
zone_list = []
for zone in zones:
zone_list.append(zone.name)
return zone_list
def create_txt_record(domain, token, account_number):
get_dynect_session()
zone_name = get_zone_name(domain)
zone_parts = len(zone_name.split("."))
node_name = ".".join(domain.split(".")[:-zone_parts])
fqdn = "{0}.{1}".format(node_name, zone_name)
zone = Zone(zone_name)
try:
zone.add_record(
node_name, record_type="TXT", txtdata='"{}"'.format(token), ttl=5
)
zone.publish()
current_app.logger.debug(
"TXT record created: {0}, token: {1}".format(fqdn, token)
)
except (DynectCreateError, DynectUpdateError) as e:
if "Cannot duplicate existing record data" in e.message:
current_app.logger.debug(
"Unable to add record. Domain: {}. Token: {}. "
"Record already exists: {}".format(domain, token, e),
exc_info=True,
)
else:
metrics.send("create_txt_record_error", "counter", 1)
sentry.captureException()
raise
change_id = (fqdn, token)
return change_id
def delete_txt_record(change_id, account_number, domain, token):
get_dynect_session()
if not domain:
current_app.logger.debug("delete_txt_record: No domain passed")
return
zone_name = get_zone_name(domain)
zone_parts = len(zone_name.split("."))
node_name = ".".join(domain.split(".")[:-zone_parts])
fqdn = "{0}.{1}".format(node_name, zone_name)
zone = Zone(zone_name)
node = Node(zone_name, fqdn)
try:
all_txt_records = node.get_all_records_by_type("TXT")
except DynectGetError:
metrics.send("delete_txt_record_geterror", "counter", 1)
# No Text Records remain or host is not in the zone anymore because all records have been deleted.
return
for txt_record in all_txt_records:
if txt_record.txtdata == ("{}".format(token)):
current_app.logger.debug("Deleting TXT record name: {0}".format(fqdn))
try:
txt_record.delete()
except DynectDeleteError:
sentry.captureException(
extra={
"fqdn": str(fqdn),
"zone_name": str(zone_name),
"node_name": str(node_name),
"txt_record": str(txt_record.txtdata),
}
)
metrics.send(
"delete_txt_record_deleteerror",
"counter",
1,
metric_tags={"fqdn": fqdn, "txt_record": txt_record.txtdata},
)
try:
zone.publish()
except DynectUpdateError:
sentry.captureException(
extra={
"fqdn": str(fqdn),
"zone_name": str(zone_name),
"node_name": str(node_name),
"txt_record": str(txt_record.txtdata),
}
)
metrics.send(
"delete_txt_record_publish_error",
"counter",
1,
metric_tags={"fqdn": str(fqdn), "txt_record": str(txt_record.txtdata)},
)
def delete_acme_txt_records(domain):
get_dynect_session()
if not domain:
current_app.logger.debug("delete_acme_txt_records: No domain passed")
return
acme_challenge_string = "_acme-challenge"
if not domain.startswith(acme_challenge_string):
current_app.logger.debug(
"delete_acme_txt_records: Domain {} doesn't start with string {}. "
"Cowardly refusing to delete TXT records".format(
domain, acme_challenge_string
)
)
return
zone_name = get_zone_name(domain)
zone_parts = len(zone_name.split("."))
node_name = ".".join(domain.split(".")[:-zone_parts])
fqdn = "{0}.{1}".format(node_name, zone_name)
zone = Zone(zone_name)
node = Node(zone_name, fqdn)
all_txt_records = node.get_all_records_by_type("TXT")
for txt_record in all_txt_records:
current_app.logger.debug("Deleting TXT record name: {0}".format(fqdn))
try:
txt_record.delete()
except DynectDeleteError:
sentry.captureException(
extra={
"fqdn": str(fqdn),
"zone_name": str(zone_name),
"node_name": str(node_name),
"txt_record": str(txt_record.txtdata),
}
)
metrics.send(
"delete_txt_record_deleteerror",
"counter",
1,
metric_tags={"fqdn": fqdn, "txt_record": txt_record.txtdata},
)
zone.publish()
def get_authoritative_nameserver(domain):
if current_app.config.get("ACME_DYN_GET_AUTHORATATIVE_NAMESERVER"):
n = dns.name.from_text(domain)
depth = 2
default = dns.resolver.get_default_resolver()
nameserver = default.nameservers[0]
last = False
while not last:
s = n.split(depth)
last = s[0].to_unicode() == u"@"
sub = s[1]
query = dns.message.make_query(sub, dns.rdatatype.NS)
response = dns.query.udp(query, nameserver)
rcode = response.rcode()
if rcode != dns.rcode.NOERROR:
metrics.send("get_authoritative_nameserver_error", "counter", 1)
if rcode == dns.rcode.NXDOMAIN:
raise Exception("%s does not exist." % sub)
else:
raise Exception("Error %s" % dns.rcode.to_text(rcode))
if len(response.authority) > 0:
rrset = response.authority[0]
else:
rrset = response.answer[0]
rr = rrset[0]
if rr.rdtype != dns.rdatatype.SOA:
authority = rr.target
nameserver = default.query(authority).rrset[0].to_text()
depth += 1
return nameserver
else:
return "8.8.8.8"
|
import os
import threading
import sys
import atexit
from contextlib import contextmanager
from plumbum.machines.local import local
from plumbum.lib import six
if not hasattr(threading, "get_ident"):
try:
import thread
except ImportError:
import _thread as thread
threading.get_ident = thread.get_ident
del thread
try:
import fcntl
except ImportError:
import msvcrt
try:
from pywintypes import error as WinError
from win32file import LockFileEx, UnlockFile, OVERLAPPED
from win32con import LOCKFILE_EXCLUSIVE_LOCK, LOCKFILE_FAIL_IMMEDIATELY
except ImportError:
raise ImportError(
"On Windows, we require Python for Windows Extensions (pywin32)")
@contextmanager
def locked_file(fileno, blocking=True):
hndl = msvcrt.get_osfhandle(fileno)
try:
LockFileEx(
hndl, LOCKFILE_EXCLUSIVE_LOCK |
(0 if blocking else LOCKFILE_FAIL_IMMEDIATELY), 0xffffffff,
0xffffffff, OVERLAPPED())
except WinError:
_, ex, _ = sys.exc_info()
raise WindowsError(*ex.args)
try:
yield
finally:
UnlockFile(hndl, 0, 0, 0xffffffff, 0xffffffff)
else:
if hasattr(fcntl, "lockf"):
@contextmanager
def locked_file(fileno, blocking=True):
fcntl.lockf(fileno,
fcntl.LOCK_EX | (0 if blocking else fcntl.LOCK_NB))
try:
yield
finally:
fcntl.lockf(fileno, fcntl.LOCK_UN)
else:
@contextmanager
def locked_file(fileno, blocking=True):
fcntl.flock(fileno,
fcntl.LOCK_EX | (0 if blocking else fcntl.LOCK_NB))
try:
yield
finally:
fcntl.flock(fileno, fcntl.LOCK_UN)
class AtomicFile(object):
"""
Atomic file operations implemented using file-system advisory locks (``flock`` on POSIX,
``LockFile`` on Windows).
.. note::
On Linux, the manpage says ``flock`` might have issues with NFS mounts. You should
take this into account.
.. versionadded:: 1.3
"""
CHUNK_SIZE = 32 * 1024
def __init__(self, filename, ignore_deletion=False):
self.path = local.path(filename)
self._ignore_deletion = ignore_deletion
self._thdlock = threading.Lock()
self._owned_by = None
self._fileobj = None
self.reopen()
def __repr__(self):
return "<AtomicFile: %s>" % (
self.path, ) if self._fileobj else "<AtomicFile: closed>"
def __del__(self):
self.close()
def __enter__(self):
return self
def __exit__(self, t, v, tb):
self.close()
def close(self):
if self._fileobj is not None:
self._fileobj.close()
self._fileobj = None
def reopen(self):
"""
Close and reopen the file; useful when the file was deleted from the file system
by a different process
"""
self.close()
self._fileobj = os.fdopen(
os.open(str(self.path), os.O_CREAT | os.O_RDWR, 384), "r+b", 0)
@contextmanager
def locked(self, blocking=True):
"""
A context manager that locks the file; this function is reentrant by the thread currently
holding the lock.
:param blocking: if ``True``, the call will block until we can grab the file system lock.
if ``False``, the call may fail immediately with the underlying exception
(``IOError`` or ``WindowsError``)
"""
if self._owned_by == threading.get_ident():
yield
return
with self._thdlock:
with locked_file(self._fileobj.fileno(), blocking):
if not self.path.exists() and not self._ignore_deletion:
raise ValueError("Atomic file removed from filesystem")
self._owned_by = threading.get_ident()
try:
yield
finally:
self._owned_by = None
def delete(self):
"""
Atomically delete the file (holds the lock while doing it)
"""
with self.locked():
self.path.delete()
def _read_all(self):
self._fileobj.seek(0)
data = []
while True:
buf = self._fileobj.read(self.CHUNK_SIZE)
data.append(buf)
if len(buf) < self.CHUNK_SIZE:
break
return six.b("").join(data)
def read_atomic(self):
"""Atomically read the entire file"""
with self.locked():
return self._read_all()
def read_shared(self):
"""Read the file **without** holding the lock"""
return self._read_all()
def write_atomic(self, data):
"""Writes the given data atomically to the file. Note that it overwrites the entire file;
``write_atomic("foo")`` followed by ``write_atomic("bar")`` will result in only ``"bar"``.
"""
with self.locked():
self._fileobj.seek(0)
while data:
chunk = data[:self.CHUNK_SIZE]
self._fileobj.write(chunk)
data = data[len(chunk):]
self._fileobj.flush()
self._fileobj.truncate()
class AtomicCounterFile(object):
"""
An atomic counter based on AtomicFile. Each time you call ``next()``, it will
atomically read and increment the counter's value, returning its previous value
Example::
acf = AtomicCounterFile.open("/some/file")
print acf.next() # e.g., 7
print acf.next() # 8
print acf.next() # 9
.. versionadded:: 1.3
"""
def __init__(self, atomicfile, initial=0):
"""
:param atomicfile: an :class:`AtomicFile <plumbum.atomic.AtomicFile>` instance
:param initial: the initial value (used when the first time the file is created)
"""
self.atomicfile = atomicfile
self.initial = initial
def __enter__(self):
return self
def __exit__(self, t, v, tb):
self.close()
def close(self):
self.atomicfile.close()
@classmethod
def open(cls, filename):
"""
Shortcut for ``AtomicCounterFile(AtomicFile(filename))``
"""
return cls(AtomicFile(filename))
def reset(self, value=None):
"""
Reset the counter's value to the one given. If ``None``, it will default to the
initial value provided to the constructor
"""
if value is None:
value = self.initial
if not isinstance(value, six.integer_types):
raise TypeError(
"value must be an integer, not %r" % (type(value), ))
self.atomicfile.write_atomic(str(value).encode("utf8"))
def next(self):
"""
Read and increment the counter, returning its previous value
"""
with self.atomicfile.locked():
curr = self.atomicfile.read_atomic().decode("utf8")
if not curr:
curr = self.initial
else:
curr = int(curr)
self.atomicfile.write_atomic(str(curr + 1).encode("utf8"))
return curr
class PidFileTaken(SystemExit):
"""
This exception is raised when PidFile.acquire fails to lock the pid file. Note that it
derives from ``SystemExit``, so unless explicitly handled, it will terminate the process
cleanly
"""
def __init__(self, msg, pid):
SystemExit.__init__(self, msg)
self.pid = pid
class PidFile(object):
"""
A PID file is a file that's locked by some process from the moment it starts until it dies
(the OS will clear the lock when the process exits). It is used to prevent two instances
of the same process (normally a daemon) from running concurrently. The PID file holds its
process' PID, so you know who's holding it.
.. versionadded:: 1.3
"""
def __init__(self, filename):
self.atomicfile = AtomicFile(filename)
self._ctx = None
def __enter__(self):
self.acquire()
def __exit__(self, t, v, tb):
self.release()
def __del__(self):
try:
self.release()
except Exception:
pass
def close(self):
self.atomicfile.close()
def acquire(self):
"""
Attempt to acquire the PID file. If it's already locked, raises
:class:`PidFileTaken <plumbum.atomic.PidFileTaken>`. You should normally acquire
the file as early as possible when the program starts
"""
if self._ctx is not None:
return
self._ctx = self.atomicfile.locked(blocking=False)
try:
self._ctx.__enter__()
except (IOError, OSError):
self._ctx = None
try:
pid = self.atomicfile.read_shared().strip().decode("utf8")
except (IOError, OSError):
pid = "Unknown"
raise PidFileTaken(
"PID file %r taken by process %s" % (self.atomicfile.path,
pid), pid)
else:
self.atomicfile.write_atomic(str(os.getpid()).encode("utf8"))
atexit.register(self.release)
def release(self):
"""
Release the PID file (should only happen when the program terminates)
"""
if self._ctx is None:
return
self.atomicfile.delete()
try:
self._ctx.__exit__(None, None, None)
finally:
self._ctx = None
|
from docutils import nodes
from docutils.parsers.rst import Directive, directives
from nikola.plugin_categories import RestExtension
from nikola.utils import req_missing
try:
import micawber
except ImportError:
micawber = None
class Plugin(RestExtension):
"""Plugin for reST media directive."""
name = "rest_media"
def set_site(self, site):
"""Set Nikola site."""
self.site = site
directives.register_directive('media', Media)
self.site.register_shortcode('media', _gen_media_embed)
return super().set_site(site)
class Media(Directive):
"""reST extension for inserting any sort of media using micawber."""
has_content = False
required_arguments = 1
optional_arguments = 999
def run(self):
"""Run media directive."""
html = _gen_media_embed(" ".join(self.arguments))
return [nodes.raw('', html, format='html')]
def _gen_media_embed(url, *q, **kw):
if micawber is None:
msg = req_missing(['micawber'], 'use the media directive', optional=True)
return '<div class="text-error">{0}</div>'.format(msg)
providers = micawber.bootstrap_basic()
return micawber.parse_text(url, providers)
|
from datetime import timedelta
import logging
from fixerio import Fixerio
from fixerio.exceptions import FixerioException
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CONF_API_KEY, CONF_NAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
ATTR_EXCHANGE_RATE = "Exchange rate"
ATTR_TARGET = "Target currency"
ATTRIBUTION = "Data provided by the European Central Bank (ECB)"
CONF_TARGET = "target"
DEFAULT_BASE = "USD"
DEFAULT_NAME = "Exchange rate"
ICON = "mdi:currency-usd"
SCAN_INTERVAL = timedelta(days=1)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_TARGET): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Fixer.io sensor."""
api_key = config.get(CONF_API_KEY)
name = config.get(CONF_NAME)
target = config.get(CONF_TARGET)
try:
Fixerio(symbols=[target], access_key=api_key).latest()
except FixerioException:
_LOGGER.error("One of the given currencies is not supported")
return
data = ExchangeData(target, api_key)
add_entities([ExchangeRateSensor(data, name, target)], True)
class ExchangeRateSensor(Entity):
"""Representation of a Exchange sensor."""
def __init__(self, data, name, target):
"""Initialize the sensor."""
self.data = data
self._target = target
self._name = name
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._target
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def device_state_attributes(self):
"""Return the state attributes."""
if self.data.rate is not None:
return {
ATTR_ATTRIBUTION: ATTRIBUTION,
ATTR_EXCHANGE_RATE: self.data.rate["rates"][self._target],
ATTR_TARGET: self._target,
}
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return ICON
def update(self):
"""Get the latest data and updates the states."""
self.data.update()
self._state = round(self.data.rate["rates"][self._target], 3)
class ExchangeData:
"""Get the latest data and update the states."""
def __init__(self, target_currency, api_key):
"""Initialize the data object."""
self.api_key = api_key
self.rate = None
self.target_currency = target_currency
self.exchange = Fixerio(symbols=[self.target_currency], access_key=self.api_key)
def update(self):
"""Get the latest data from Fixer.io."""
self.rate = self.exchange.latest()
|
from flask_sqlalchemy import SQLAlchemy as SA
class SQLAlchemy(SA):
def apply_pool_defaults(self, app, options):
SA.apply_pool_defaults(self, app, options)
options["pool_pre_ping"] = True
db = SQLAlchemy()
from flask_migrate import Migrate
migrate = Migrate()
from flask_bcrypt import Bcrypt
bcrypt = Bcrypt()
from flask_principal import Principal
principal = Principal(use_sessions=False)
from flask_mail import Mail
smtp_mail = Mail()
from lemur.metrics import Metrics
metrics = Metrics()
from raven.contrib.flask import Sentry
sentry = Sentry()
from blinker import Namespace
signals = Namespace()
from flask_cors import CORS
cors = CORS()
|
from datetime import datetime
import logging
from typing import Any, Dict, Optional
from surepy import SureLocationID, SureProductID
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_CONNECTIVITY,
DEVICE_CLASS_PRESENCE,
BinarySensorEntity,
)
from homeassistant.const import CONF_ID, CONF_TYPE
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from . import SurePetcareAPI
from .const import DATA_SURE_PETCARE, SPC, TOPIC_UPDATE
_LOGGER = logging.getLogger(__name__)
async def async_setup_platform(
hass, config, async_add_entities, discovery_info=None
) -> None:
"""Set up Sure PetCare Flaps sensors based on a config entry."""
if discovery_info is None:
return
entities = []
spc = hass.data[DATA_SURE_PETCARE][SPC]
for thing in spc.ids:
sure_id = thing[CONF_ID]
sure_type = thing[CONF_TYPE]
# connectivity
if sure_type in [
SureProductID.CAT_FLAP,
SureProductID.PET_FLAP,
SureProductID.FEEDER,
]:
entities.append(DeviceConnectivity(sure_id, sure_type, spc))
if sure_type == SureProductID.PET:
entity = Pet(sure_id, spc)
elif sure_type == SureProductID.HUB:
entity = Hub(sure_id, spc)
else:
continue
entities.append(entity)
async_add_entities(entities, True)
class SurePetcareBinarySensor(BinarySensorEntity):
"""A binary sensor implementation for Sure Petcare Entities."""
def __init__(
self,
_id: int,
spc: SurePetcareAPI,
device_class: str,
sure_type: SureProductID,
):
"""Initialize a Sure Petcare binary sensor."""
self._id = _id
self._sure_type = sure_type
self._device_class = device_class
self._spc: SurePetcareAPI = spc
self._spc_data: Dict[str, Any] = self._spc.states[self._sure_type].get(self._id)
self._state: Dict[str, Any] = {}
# cover special case where a device has no name set
if "name" in self._spc_data:
name = self._spc_data["name"]
else:
name = f"Unnamed {self._sure_type.name.capitalize()}"
self._name = f"{self._sure_type.name.capitalize()} {name.capitalize()}"
self._async_unsub_dispatcher_connect = None
@property
def is_on(self) -> Optional[bool]:
"""Return true if entity is on/unlocked."""
return bool(self._state)
@property
def should_poll(self) -> bool:
"""Return true."""
return False
@property
def name(self) -> str:
"""Return the name of the device if any."""
return self._name
@property
def device_class(self) -> str:
"""Return the device class."""
return None if not self._device_class else self._device_class
@property
def unique_id(self) -> str:
"""Return an unique ID."""
return f"{self._spc_data['household_id']}-{self._id}"
async def async_update(self) -> None:
"""Get the latest data and update the state."""
self._spc_data = self._spc.states[self._sure_type].get(self._id)
self._state = self._spc_data.get("status")
_LOGGER.debug("%s -> self._state: %s", self._name, self._state)
async def async_added_to_hass(self) -> None:
"""Register callbacks."""
@callback
def update() -> None:
"""Update the state."""
self.async_schedule_update_ha_state(True)
self._async_unsub_dispatcher_connect = async_dispatcher_connect(
self.hass, TOPIC_UPDATE, update
)
async def async_will_remove_from_hass(self) -> None:
"""Disconnect dispatcher listener when removed."""
if self._async_unsub_dispatcher_connect:
self._async_unsub_dispatcher_connect()
class Hub(SurePetcareBinarySensor):
"""Sure Petcare Pet."""
def __init__(self, _id: int, spc: SurePetcareAPI) -> None:
"""Initialize a Sure Petcare Hub."""
super().__init__(_id, spc, DEVICE_CLASS_CONNECTIVITY, SureProductID.HUB)
@property
def available(self) -> bool:
"""Return true if entity is available."""
return bool(self._state["online"])
@property
def is_on(self) -> bool:
"""Return true if entity is online."""
return self.available
@property
def device_state_attributes(self) -> Optional[Dict[str, Any]]:
"""Return the state attributes of the device."""
attributes = None
if self._state:
attributes = {
"led_mode": int(self._state["led_mode"]),
"pairing_mode": bool(self._state["pairing_mode"]),
}
return attributes
class Pet(SurePetcareBinarySensor):
"""Sure Petcare Pet."""
def __init__(self, _id: int, spc: SurePetcareAPI) -> None:
"""Initialize a Sure Petcare Pet."""
super().__init__(_id, spc, DEVICE_CLASS_PRESENCE, SureProductID.PET)
@property
def is_on(self) -> bool:
"""Return true if entity is at home."""
try:
return bool(SureLocationID(self._state["where"]) == SureLocationID.INSIDE)
except (KeyError, TypeError):
return False
@property
def device_state_attributes(self) -> Optional[Dict[str, Any]]:
"""Return the state attributes of the device."""
attributes = None
if self._state:
attributes = {
"since": str(
datetime.fromisoformat(self._state["since"]).replace(tzinfo=None)
),
"where": SureLocationID(self._state["where"]).name.capitalize(),
}
return attributes
async def async_update(self) -> None:
"""Get the latest data and update the state."""
self._spc_data = self._spc.states[self._sure_type].get(self._id)
self._state = self._spc_data.get("position")
_LOGGER.debug("%s -> self._state: %s", self._name, self._state)
class DeviceConnectivity(SurePetcareBinarySensor):
"""Sure Petcare Pet."""
def __init__(
self,
_id: int,
sure_type: SureProductID,
spc: SurePetcareAPI,
) -> None:
"""Initialize a Sure Petcare Device."""
super().__init__(_id, spc, DEVICE_CLASS_CONNECTIVITY, sure_type)
@property
def name(self) -> str:
"""Return the name of the device if any."""
return f"{self._name}_connectivity"
@property
def unique_id(self) -> str:
"""Return an unique ID."""
return f"{self._spc_data['household_id']}-{self._id}-connectivity"
@property
def available(self) -> bool:
"""Return true if entity is available."""
return bool(self._state)
@property
def is_on(self) -> bool:
"""Return true if entity is online."""
return self.available
@property
def device_state_attributes(self) -> Optional[Dict[str, Any]]:
"""Return the state attributes of the device."""
attributes = None
if self._state:
attributes = {
"device_rssi": f'{self._state["signal"]["device_rssi"]:.2f}',
"hub_rssi": f'{self._state["signal"]["hub_rssi"]:.2f}',
}
return attributes
|
import datetime
import re
from typing import Any
from urllib.parse import urlparse
from aiohttp.test_utils import TestClient
import pytest
import requests_mock
from withings_api.common import NotifyAppli, NotifyListProfile, NotifyListResponse
from homeassistant.components.withings.common import (
ConfigEntryWithingsApi,
DataManager,
WebhookConfig,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.config_entry_oauth2_flow import AbstractOAuth2Implementation
from tests.async_mock import MagicMock
from tests.common import MockConfigEntry
from tests.components.withings.common import (
ComponentFactory,
get_data_manager_by_user_id,
new_profile_config,
)
from tests.test_util.aiohttp import AiohttpClientMocker
async def test_config_entry_withings_api(hass: HomeAssistant) -> None:
"""Test ConfigEntryWithingsApi."""
config_entry = MockConfigEntry(
data={"token": {"access_token": "mock_access_token", "expires_at": 1111111}}
)
config_entry.add_to_hass(hass)
implementation_mock = MagicMock(spec=AbstractOAuth2Implementation)
implementation_mock.async_refresh_token.return_value = {
"expires_at": 1111111,
"access_token": "mock_access_token",
}
with requests_mock.mock() as rqmck:
rqmck.get(
re.compile(".*"),
status_code=200,
json={"status": 0, "body": {"message": "success"}},
)
api = ConfigEntryWithingsApi(hass, config_entry, implementation_mock)
response = await hass.async_add_executor_job(
api.request, "test", {"arg1": "val1", "arg2": "val2"}
)
assert response == {"message": "success"}
@pytest.mark.parametrize(
["user_id", "arg_user_id", "arg_appli", "expected_code"],
[
[0, 0, NotifyAppli.WEIGHT.value, 0], # Success
[0, None, 1, 0], # Success, we ignore the user_id.
[0, None, None, 12], # No request body.
[0, "GG", None, 20], # appli not provided.
[0, 0, None, 20], # appli not provided.
[0, 0, 99, 21], # Invalid appli.
[0, 11, NotifyAppli.WEIGHT.value, 0], # Success, we ignore the user_id
],
)
async def test_webhook_post(
hass: HomeAssistant,
component_factory: ComponentFactory,
aiohttp_client,
user_id: int,
arg_user_id: Any,
arg_appli: Any,
expected_code: int,
) -> None:
"""Test webhook callback."""
person0 = new_profile_config("person0", user_id)
await component_factory.configure_component(profile_configs=(person0,))
await component_factory.setup_profile(person0.user_id)
data_manager = get_data_manager_by_user_id(hass, user_id)
client: TestClient = await aiohttp_client(hass.http.app)
post_data = {}
if arg_user_id is not None:
post_data["userid"] = arg_user_id
if arg_appli is not None:
post_data["appli"] = arg_appli
resp = await client.post(
urlparse(data_manager.webhook_config.url).path, data=post_data
)
# Wait for remaining tasks to complete.
await hass.async_block_till_done()
data = await resp.json()
resp.close()
assert data["code"] == expected_code
async def test_webhook_head(
hass: HomeAssistant,
component_factory: ComponentFactory,
aiohttp_client,
) -> None:
"""Test head method on webhook view."""
person0 = new_profile_config("person0", 0)
await component_factory.configure_component(profile_configs=(person0,))
await component_factory.setup_profile(person0.user_id)
data_manager = get_data_manager_by_user_id(hass, person0.user_id)
client: TestClient = await aiohttp_client(hass.http.app)
resp = await client.head(urlparse(data_manager.webhook_config.url).path)
assert resp.status == 200
async def test_webhook_put(
hass: HomeAssistant,
component_factory: ComponentFactory,
aiohttp_client,
) -> None:
"""Test webhook callback."""
person0 = new_profile_config("person0", 0)
await component_factory.configure_component(profile_configs=(person0,))
await component_factory.setup_profile(person0.user_id)
data_manager = get_data_manager_by_user_id(hass, person0.user_id)
client: TestClient = await aiohttp_client(hass.http.app)
resp = await client.put(urlparse(data_manager.webhook_config.url).path)
# Wait for remaining tasks to complete.
await hass.async_block_till_done()
assert resp.status == 200
data = await resp.json()
assert data
assert data["code"] == 2
async def test_data_manager_webhook_subscription(
hass: HomeAssistant,
component_factory: ComponentFactory,
aioclient_mock: AiohttpClientMocker,
) -> None:
"""Test data manager webhook subscriptions."""
person0 = new_profile_config("person0", 0)
await component_factory.configure_component(profile_configs=(person0,))
api: ConfigEntryWithingsApi = MagicMock(spec=ConfigEntryWithingsApi)
data_manager = DataManager(
hass,
"person0",
api,
0,
WebhookConfig(id="1234", url="http://localhost/api/webhook/1234", enabled=True),
)
# pylint: disable=protected-access
data_manager._notify_subscribe_delay = datetime.timedelta(seconds=0)
data_manager._notify_unsubscribe_delay = datetime.timedelta(seconds=0)
api.notify_list.return_value = NotifyListResponse(
profiles=(
NotifyListProfile(
appli=NotifyAppli.BED_IN,
callbackurl="https://not.my.callback/url",
expires=None,
comment=None,
),
NotifyListProfile(
appli=NotifyAppli.BED_IN,
callbackurl=data_manager.webhook_config.url,
expires=None,
comment=None,
),
NotifyListProfile(
appli=NotifyAppli.BED_OUT,
callbackurl=data_manager.webhook_config.url,
expires=None,
comment=None,
),
)
)
aioclient_mock.clear_requests()
aioclient_mock.request(
"HEAD",
data_manager.webhook_config.url,
status=200,
)
# Test subscribing
await data_manager.async_subscribe_webhook()
api.notify_subscribe.assert_any_call(
data_manager.webhook_config.url, NotifyAppli.WEIGHT
)
api.notify_subscribe.assert_any_call(
data_manager.webhook_config.url, NotifyAppli.CIRCULATORY
)
api.notify_subscribe.assert_any_call(
data_manager.webhook_config.url, NotifyAppli.ACTIVITY
)
api.notify_subscribe.assert_any_call(
data_manager.webhook_config.url, NotifyAppli.SLEEP
)
try:
api.notify_subscribe.assert_any_call(
data_manager.webhook_config.url, NotifyAppli.USER
)
assert False
except AssertionError:
pass
try:
api.notify_subscribe.assert_any_call(
data_manager.webhook_config.url, NotifyAppli.BED_IN
)
assert False
except AssertionError:
pass
try:
api.notify_subscribe.assert_any_call(
data_manager.webhook_config.url, NotifyAppli.BED_OUT
)
assert False
except AssertionError:
pass
# Test unsubscribing.
await data_manager.async_unsubscribe_webhook()
api.notify_revoke.assert_any_call(
data_manager.webhook_config.url, NotifyAppli.BED_IN
)
api.notify_revoke.assert_any_call(
data_manager.webhook_config.url, NotifyAppli.BED_OUT
)
|
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchEntity
from homeassistant.const import CONF_NAME
import homeassistant.helpers.config_validation as cv
from . import (
CONF_ALIASES,
CONF_DEVICE_DEFAULTS,
CONF_DEVICES,
CONF_FIRE_EVENT,
CONF_GROUP,
CONF_GROUP_ALIASES,
CONF_NOGROUP_ALIASES,
CONF_SIGNAL_REPETITIONS,
DEVICE_DEFAULTS_SCHEMA,
SwitchableRflinkDevice,
)
PARALLEL_UPDATES = 0
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(
CONF_DEVICE_DEFAULTS, default=DEVICE_DEFAULTS_SCHEMA({})
): DEVICE_DEFAULTS_SCHEMA,
vol.Optional(CONF_DEVICES, default={}): {
cv.string: vol.Schema(
{
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_ALIASES, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_GROUP_ALIASES, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_NOGROUP_ALIASES, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_FIRE_EVENT): cv.boolean,
vol.Optional(CONF_SIGNAL_REPETITIONS): vol.Coerce(int),
vol.Optional(CONF_GROUP, default=True): cv.boolean,
}
)
},
},
extra=vol.ALLOW_EXTRA,
)
def devices_from_config(domain_config):
"""Parse configuration and add Rflink switch devices."""
devices = []
for device_id, config in domain_config[CONF_DEVICES].items():
device_config = dict(domain_config[CONF_DEVICE_DEFAULTS], **config)
device = RflinkSwitch(device_id, **device_config)
devices.append(device)
return devices
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Rflink platform."""
async_add_entities(devices_from_config(config))
class RflinkSwitch(SwitchableRflinkDevice, SwitchEntity):
"""Representation of a Rflink switch."""
|
from kalliope.core.NeuronModule import NeuronModule, MissingParameterException
class Say(NeuronModule):
def __init__(self, **kwargs):
super(Say, self).__init__(**kwargs)
self.message = kwargs.get('message', None)
self.file_template = kwargs.get('file_template', None)
self.parameters = kwargs.get('parameters', None)
if self.file_template is not None:
self.message = self.parameters
# check if parameters have been provided
if self._is_parameters_ok():
self.say(self.message)
def _is_parameters_ok(self):
"""
Check if received parameters are ok to perform operations in the neuron
:return: true if parameters are ok, raise an exception otherwise
.. raises:: MissingParameterException
"""
if self.message is None and self.file_template is None:
raise MissingParameterException("You must specify a message string, a list of messages or a file template"
" as parameter")
return True
|
import asyncio
import logging
import secrets
from rachiopy import Rachio
from requests.exceptions import ConnectTimeout
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import CONF_API_KEY
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv
from .const import (
CONF_CLOUDHOOK_URL,
CONF_MANUAL_RUN_MINS,
CONF_WEBHOOK_ID,
DEFAULT_MANUAL_RUN_MINS,
DOMAIN,
)
from .device import RachioPerson
from .webhooks import (
async_get_or_create_registered_webhook_id_and_url,
async_register_webhook,
)
_LOGGER = logging.getLogger(__name__)
SUPPORTED_DOMAINS = ["switch", "binary_sensor"]
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Optional(
CONF_MANUAL_RUN_MINS, default=DEFAULT_MANUAL_RUN_MINS
): cv.positive_int,
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the rachio component from YAML."""
conf = config.get(DOMAIN)
hass.data.setdefault(DOMAIN, {})
if not conf:
return True
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=conf
)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in SUPPORTED_DOMAINS
]
)
)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
async def async_remove_entry(hass, entry):
"""Remove a rachio config entry."""
if CONF_CLOUDHOOK_URL in entry.data:
await hass.components.cloud.async_delete_cloudhook(entry.data[CONF_WEBHOOK_ID])
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up the Rachio config entry."""
config = entry.data
options = entry.options
# CONF_MANUAL_RUN_MINS can only come from a yaml import
if not options.get(CONF_MANUAL_RUN_MINS) and config.get(CONF_MANUAL_RUN_MINS):
options_copy = options.copy()
options_copy[CONF_MANUAL_RUN_MINS] = config[CONF_MANUAL_RUN_MINS]
hass.config_entries.async_update_entry(entry, options=options_copy)
# Configure API
api_key = config[CONF_API_KEY]
rachio = Rachio(api_key)
# Get the URL of this server
rachio.webhook_auth = secrets.token_hex()
webhook_id, webhook_url = await async_get_or_create_registered_webhook_id_and_url(
hass, entry
)
rachio.webhook_url = webhook_url
person = RachioPerson(rachio, entry)
# Get the API user
try:
await hass.async_add_executor_job(person.setup, hass)
except ConnectTimeout as error:
_LOGGER.error("Could not reach the Rachio API: %s", error)
raise ConfigEntryNotReady from error
# Check for Rachio controller devices
if not person.controllers:
_LOGGER.error("No Rachio devices found in account %s", person.username)
return False
_LOGGER.info(
"%d Rachio device(s) found; The url %s must be accessible from the internet in order to receive updates",
len(person.controllers),
webhook_url,
)
# Enable component
hass.data[DOMAIN][entry.entry_id] = person
async_register_webhook(hass, webhook_id, entry.entry_id)
for component in SUPPORTED_DOMAINS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
|
import os
import pytest
from molecule import config
from molecule import util
from molecule.command import base
class ExtendedBase(base.Base):
def execute():
pass
# NOTE(retr0h): The use of the `patched_config_validate` fixture, disables
# config.Config._validate from executing. Thus preventing odd side-effects
# throughout patched.assert_called unit tests.
@pytest.fixture
def _base_class(patched_config_validate, config_instance):
return ExtendedBase
@pytest.fixture
def _instance(_base_class, config_instance):
return _base_class(config_instance)
@pytest.fixture
def _patched_verify_configs(mocker):
return mocker.patch('molecule.command.base._verify_configs')
@pytest.fixture
def _patched_base_setup(mocker):
return mocker.patch('test.unit.command.test_base.ExtendedBase._setup')
@pytest.fixture
def _patched_write_config(mocker):
return mocker.patch('molecule.provisioner.ansible.Ansible.write_config')
@pytest.fixture
def _patched_manage_inventory(mocker):
return mocker.patch(
'molecule.provisioner.ansible.Ansible.manage_inventory')
@pytest.fixture
def _patched_execute_subcommand(mocker):
return mocker.patch('molecule.command.base.execute_subcommand')
@pytest.fixture
def _patched_execute_scenario(mocker):
return mocker.patch('molecule.command.base.execute_scenario')
@pytest.fixture
def _patched_print_matrix(mocker):
return mocker.patch('molecule.scenarios.Scenarios.print_matrix')
@pytest.fixture
def _patched_prune(mocker):
return mocker.patch('molecule.scenario.Scenario.prune')
@pytest.fixture
def _patched_sysexit(mocker):
return mocker.patch('molecule.util.sysexit')
def test_config_private_member(_instance):
assert isinstance(_instance._config, config.Config)
def test_init_calls_setup(_patched_base_setup, _instance):
_patched_base_setup.assert_called_once_with()
def test_print_info(mocker, patched_logger_info, _instance):
_instance.print_info()
x = [
mocker.call("Scenario: 'default'"),
mocker.call("Action: 'extended_base'"),
]
assert x == patched_logger_info.mock_calls
def test_setup(mocker, patched_add_or_update_vars, _patched_write_config,
_patched_manage_inventory, _instance):
assert os.path.isdir(
os.path.dirname(_instance._config.provisioner.inventory_file))
_patched_manage_inventory.assert_called_once_with()
_patched_write_config.assert_called_once_with()
def test_execute_cmdline_scenarios(config_instance, _patched_print_matrix,
_patched_execute_scenario):
# Ensure execute_cmdline_scenarios runs normally:
# - scenarios.print_matrix is called, which also indicates Scenarios
# was instantiated correctly
# - execute_scenario is called once, indicating the function correctly
# loops over Scenarios.
scenario_name = None
args = {}
command_args = {
'destroy': 'always',
'subcommand': 'test',
}
base.execute_cmdline_scenarios(scenario_name, args, command_args)
assert _patched_print_matrix.called_once_with()
assert _patched_execute_scenario.call_count == 1
def test_execute_cmdline_scenarios_destroy(
config_instance, _patched_execute_scenario, _patched_prune,
_patched_execute_subcommand, _patched_sysexit):
# Ensure execute_cmdline_scenarios handles errors correctly when 'destroy'
# is 'always':
# - cleanup and destroy subcommands are run when execute_scenario
# raises SystemExit
# - scenario is pruned
scenario_name = 'default'
args = {}
command_args = {
'destroy': 'always',
'subcommand': 'test',
}
_patched_execute_scenario.side_effect = SystemExit()
base.execute_cmdline_scenarios(scenario_name, args, command_args)
assert _patched_execute_subcommand.call_count == 2
# pull out the second positional call argument for each call,
# which is the called subcommand. 'cleanup' and 'destroy' should be called.
assert _patched_execute_subcommand.call_args_list[0][0][1] == 'cleanup'
assert _patched_execute_subcommand.call_args_list[1][0][1] == 'destroy'
assert _patched_prune.called
assert _patched_sysexit.called
def test_execute_cmdline_scenarios_nodestroy(config_instance,
_patched_execute_scenario,
_patched_prune, _patched_sysexit):
# Ensure execute_cmdline_scenarios handles errors correctly when 'destroy'
# is 'always':
# - destroy subcommand is not run when execute_scenario raises SystemExit
# - scenario is not pruned
# - caught SystemExit is reraised
scenario_name = 'default'
args = {}
command_args = {
'destroy': 'never',
'subcommand': 'test',
}
_patched_execute_scenario.side_effect = SystemExit()
# Catch the expected SystemExit reraise
with pytest.raises(SystemExit):
base.execute_cmdline_scenarios(scenario_name, args, command_args)
assert _patched_execute_scenario.called
assert not _patched_prune.called
assert not _patched_sysexit.called
def test_execute_subcommand(config_instance):
# scenario's config.action is mutated in-place for every sequence action,
# so make sure that is currently set to the executed action
assert config_instance.action != 'list'
assert base.execute_subcommand(config_instance, 'list')
assert config_instance.action == 'list'
def test_execute_scenario(mocker, _patched_execute_subcommand):
# call a spoofed scenario with a sequence that does not include destroy:
# - execute_subcommand should be called once for each sequence item
# - prune should not be called, since the sequence has no destroy step
scenario = mocker.Mock()
scenario.sequence = ('a', 'b', 'c')
base.execute_scenario(scenario)
assert _patched_execute_subcommand.call_count == len(scenario.sequence)
assert not scenario.prune.called
def test_execute_scenario_destroy(mocker, _patched_execute_subcommand):
# call a spoofed scenario with a sequence that includes destroy:
# - execute_subcommand should be called once for each sequence item
# - prune should be called, since the sequence has a destroy step
scenario = mocker.Mock()
scenario.sequence = ('a', 'b', 'destroy', 'c')
base.execute_scenario(scenario)
assert _patched_execute_subcommand.call_count == len(scenario.sequence)
assert scenario.prune.called
def test_get_configs(config_instance):
molecule_file = config_instance.molecule_file
data = config_instance.config
util.write_file(molecule_file, util.safe_dump(data))
result = base.get_configs({}, {})
assert 1 == len(result)
assert isinstance(result, list)
assert isinstance(result[0], config.Config)
def test_get_configs_calls_verify_configs(_patched_verify_configs):
base.get_configs({}, {})
_patched_verify_configs.assert_called_once_with([])
def test_verify_configs(config_instance):
configs = [config_instance]
assert base._verify_configs(configs) is None
def test_verify_configs_raises_with_no_configs(patched_logger_critical):
with pytest.raises(SystemExit) as e:
base._verify_configs([])
assert 1 == e.value.code
msg = "'molecule/*/molecule.yml' glob failed. Exiting."
patched_logger_critical.assert_called_once_with(msg)
def test_verify_configs_raises_with_duplicate_configs(patched_logger_critical,
config_instance):
with pytest.raises(SystemExit) as e:
configs = [config_instance, config_instance]
base._verify_configs(configs)
assert 1 == e.value.code
msg = "Duplicate scenario name 'default' found. Exiting."
patched_logger_critical.assert_called_once_with(msg)
def test_get_subcommand():
assert 'test_base' == base._get_subcommand(__name__)
|
import unittest
from unittest.mock import patch, MagicMock
from credstash import get_session, reset_sessions
class TestGetSession(unittest.TestCase):
def setUp(self):
reset_sessions()
@patch('boto3.Session')
def test_get_session_initial_session(self, mock_session):
mock_session.return_value = 'session1'
get_session(
aws_access_key_id='session1'
)
mock_session.assert_called_once_with(
aws_access_key_id='session1',
aws_secret_access_key=None,
aws_session_token=None,
profile_name=None
)
@patch('boto3.Session')
def test_get_session_single_last_session(self, mock_session):
mock_session.return_value = 'session1'
get_session(
aws_access_key_id='session1'
)
mock_session.assert_called_once_with(
aws_access_key_id='session1',
aws_secret_access_key=None,
aws_session_token=None,
profile_name=None
)
self.assertEqual(get_session(), 'session1')
@patch('boto3.Session')
def test_get_session_two_sessions(self, mock_session):
mock_session.side_effect = ['session1', 'session2']
get_session(
aws_access_key_id='session1'
)
mock_session.assert_called_with(
aws_access_key_id='session1',
aws_secret_access_key=None,
aws_session_token=None,
profile_name=None
)
get_session(
aws_access_key_id='session2'
)
mock_session.assert_called_with(
aws_access_key_id='session2',
aws_secret_access_key=None,
aws_session_token=None,
profile_name=None
)
self.assertEqual(get_session(), 'session2')
self.assertEqual(get_session(aws_access_key_id='session1'), 'session1')
self.assertEqual(get_session(), 'session1')
self.assertEqual(get_session(aws_access_key_id='session2'), 'session2')
self.assertEqual(get_session(), 'session2')
@patch('boto3.Session')
def test_get_session_no_params(self, mock_session):
mock_session.return_value = 'defaultsession'
self.assertEqual(get_session(), 'defaultsession')
self.assertEqual(get_session(), 'defaultsession')
mock_session.assert_called_once_with(profile_name=None)
@patch('boto3.Session')
def test_get_session_specify_profile(self, mock_session):
mock_session.return_value = 'session1'
get_session(
profile_name='profile1'
)
mock_session.assert_called_once_with(
profile_name='profile1'
)
|
from ipaddress import ip_address, ip_network
import pytest
import voluptuous as vol
from homeassistant import auth
from homeassistant.auth import auth_store
from homeassistant.auth.providers import trusted_networks as tn_auth
@pytest.fixture
def store(hass):
"""Mock store."""
return auth_store.AuthStore(hass)
@pytest.fixture
def provider(hass, store):
"""Mock provider."""
return tn_auth.TrustedNetworksAuthProvider(
hass,
store,
tn_auth.CONFIG_SCHEMA(
{
"type": "trusted_networks",
"trusted_networks": [
"192.168.0.1",
"192.168.128.0/24",
"::1",
"fd00::/8",
],
}
),
)
@pytest.fixture
def provider_with_user(hass, store):
"""Mock provider with trusted users config."""
return tn_auth.TrustedNetworksAuthProvider(
hass,
store,
tn_auth.CONFIG_SCHEMA(
{
"type": "trusted_networks",
"trusted_networks": [
"192.168.0.1",
"192.168.128.0/24",
"::1",
"fd00::/8",
],
# user_id will be injected in test
"trusted_users": {
"192.168.0.1": [],
"192.168.128.0/24": [],
"fd00::/8": [],
},
}
),
)
@pytest.fixture
def provider_bypass_login(hass, store):
"""Mock provider with allow_bypass_login config."""
return tn_auth.TrustedNetworksAuthProvider(
hass,
store,
tn_auth.CONFIG_SCHEMA(
{
"type": "trusted_networks",
"trusted_networks": [
"192.168.0.1",
"192.168.128.0/24",
"::1",
"fd00::/8",
],
"allow_bypass_login": True,
}
),
)
@pytest.fixture
def manager(hass, store, provider):
"""Mock manager."""
return auth.AuthManager(hass, store, {(provider.type, provider.id): provider}, {})
@pytest.fixture
def manager_with_user(hass, store, provider_with_user):
"""Mock manager with trusted user."""
return auth.AuthManager(
hass,
store,
{(provider_with_user.type, provider_with_user.id): provider_with_user},
{},
)
@pytest.fixture
def manager_bypass_login(hass, store, provider_bypass_login):
"""Mock manager with allow bypass login."""
return auth.AuthManager(
hass,
store,
{(provider_bypass_login.type, provider_bypass_login.id): provider_bypass_login},
{},
)
async def test_trusted_networks_credentials(manager, provider):
"""Test trusted_networks credentials related functions."""
owner = await manager.async_create_user("test-owner")
tn_owner_cred = await provider.async_get_or_create_credentials({"user": owner.id})
assert tn_owner_cred.is_new is False
assert any(cred.id == tn_owner_cred.id for cred in owner.credentials)
user = await manager.async_create_user("test-user")
tn_user_cred = await provider.async_get_or_create_credentials({"user": user.id})
assert tn_user_cred.id != tn_owner_cred.id
assert tn_user_cred.is_new is False
assert any(cred.id == tn_user_cred.id for cred in user.credentials)
with pytest.raises(tn_auth.InvalidUserError):
await provider.async_get_or_create_credentials({"user": "invalid-user"})
async def test_validate_access(provider):
"""Test validate access from trusted networks."""
provider.async_validate_access(ip_address("192.168.0.1"))
provider.async_validate_access(ip_address("192.168.128.10"))
provider.async_validate_access(ip_address("::1"))
provider.async_validate_access(ip_address("fd01:db8::ff00:42:8329"))
with pytest.raises(tn_auth.InvalidAuthError):
provider.async_validate_access(ip_address("192.168.0.2"))
with pytest.raises(tn_auth.InvalidAuthError):
provider.async_validate_access(ip_address("127.0.0.1"))
with pytest.raises(tn_auth.InvalidAuthError):
provider.async_validate_access(ip_address("2001:db8::ff00:42:8329"))
async def test_login_flow(manager, provider):
"""Test login flow."""
owner = await manager.async_create_user("test-owner")
user = await manager.async_create_user("test-user")
# not from trusted network
flow = await provider.async_login_flow({"ip_address": ip_address("127.0.0.1")})
step = await flow.async_step_init()
assert step["type"] == "abort"
assert step["reason"] == "not_allowed"
# from trusted network, list users
flow = await provider.async_login_flow({"ip_address": ip_address("192.168.0.1")})
step = await flow.async_step_init()
assert step["step_id"] == "init"
schema = step["data_schema"]
assert schema({"user": owner.id})
with pytest.raises(vol.Invalid):
assert schema({"user": "invalid-user"})
# login with valid user
step = await flow.async_step_init({"user": user.id})
assert step["type"] == "create_entry"
assert step["data"]["user"] == user.id
async def test_trusted_users_login(manager_with_user, provider_with_user):
"""Test available user list changed per different IP."""
owner = await manager_with_user.async_create_user("test-owner")
sys_user = await manager_with_user.async_create_system_user(
"test-sys-user"
) # system user will not be available to select
user = await manager_with_user.async_create_user("test-user")
# change the trusted users config
config = provider_with_user.config["trusted_users"]
assert ip_network("192.168.0.1") in config
config[ip_network("192.168.0.1")] = [owner.id]
assert ip_network("192.168.128.0/24") in config
config[ip_network("192.168.128.0/24")] = [sys_user.id, user.id]
# not from trusted network
flow = await provider_with_user.async_login_flow(
{"ip_address": ip_address("127.0.0.1")}
)
step = await flow.async_step_init()
assert step["type"] == "abort"
assert step["reason"] == "not_allowed"
# from trusted network, list users intersect trusted_users
flow = await provider_with_user.async_login_flow(
{"ip_address": ip_address("192.168.0.1")}
)
step = await flow.async_step_init()
assert step["step_id"] == "init"
schema = step["data_schema"]
# only owner listed
assert schema({"user": owner.id})
with pytest.raises(vol.Invalid):
assert schema({"user": user.id})
# from trusted network, list users intersect trusted_users
flow = await provider_with_user.async_login_flow(
{"ip_address": ip_address("192.168.128.1")}
)
step = await flow.async_step_init()
assert step["step_id"] == "init"
schema = step["data_schema"]
# only user listed
assert schema({"user": user.id})
with pytest.raises(vol.Invalid):
assert schema({"user": owner.id})
with pytest.raises(vol.Invalid):
assert schema({"user": sys_user.id})
# from trusted network, list users intersect trusted_users
flow = await provider_with_user.async_login_flow({"ip_address": ip_address("::1")})
step = await flow.async_step_init()
assert step["step_id"] == "init"
schema = step["data_schema"]
# both owner and user listed
assert schema({"user": owner.id})
assert schema({"user": user.id})
with pytest.raises(vol.Invalid):
assert schema({"user": sys_user.id})
# from trusted network, list users intersect trusted_users
flow = await provider_with_user.async_login_flow(
{"ip_address": ip_address("fd00::1")}
)
step = await flow.async_step_init()
assert step["step_id"] == "init"
schema = step["data_schema"]
# no user listed
with pytest.raises(vol.Invalid):
assert schema({"user": owner.id})
with pytest.raises(vol.Invalid):
assert schema({"user": user.id})
with pytest.raises(vol.Invalid):
assert schema({"user": sys_user.id})
async def test_trusted_group_login(manager_with_user, provider_with_user):
"""Test config trusted_user with group_id."""
owner = await manager_with_user.async_create_user("test-owner")
# create a user in user group
user = await manager_with_user.async_create_user("test-user")
await manager_with_user.async_update_user(
user, group_ids=[auth.const.GROUP_ID_USER]
)
# change the trusted users config
config = provider_with_user.config["trusted_users"]
assert ip_network("192.168.0.1") in config
config[ip_network("192.168.0.1")] = [{"group": [auth.const.GROUP_ID_USER]}]
assert ip_network("192.168.128.0/24") in config
config[ip_network("192.168.128.0/24")] = [
owner.id,
{"group": [auth.const.GROUP_ID_USER]},
]
# not from trusted network
flow = await provider_with_user.async_login_flow(
{"ip_address": ip_address("127.0.0.1")}
)
step = await flow.async_step_init()
assert step["type"] == "abort"
assert step["reason"] == "not_allowed"
# from trusted network, list users intersect trusted_users
flow = await provider_with_user.async_login_flow(
{"ip_address": ip_address("192.168.0.1")}
)
step = await flow.async_step_init()
assert step["step_id"] == "init"
schema = step["data_schema"]
# only user listed
print(user.id)
assert schema({"user": user.id})
with pytest.raises(vol.Invalid):
assert schema({"user": owner.id})
# from trusted network, list users intersect trusted_users
flow = await provider_with_user.async_login_flow(
{"ip_address": ip_address("192.168.128.1")}
)
step = await flow.async_step_init()
assert step["step_id"] == "init"
schema = step["data_schema"]
# both owner and user listed
assert schema({"user": owner.id})
assert schema({"user": user.id})
async def test_bypass_login_flow(manager_bypass_login, provider_bypass_login):
"""Test login flow can be bypass if only one user available."""
owner = await manager_bypass_login.async_create_user("test-owner")
# not from trusted network
flow = await provider_bypass_login.async_login_flow(
{"ip_address": ip_address("127.0.0.1")}
)
step = await flow.async_step_init()
assert step["type"] == "abort"
assert step["reason"] == "not_allowed"
# from trusted network, only one available user, bypass the login flow
flow = await provider_bypass_login.async_login_flow(
{"ip_address": ip_address("192.168.0.1")}
)
step = await flow.async_step_init()
assert step["type"] == "create_entry"
assert step["data"]["user"] == owner.id
user = await manager_bypass_login.async_create_user("test-user")
# from trusted network, two available user, show up login form
flow = await provider_bypass_login.async_login_flow(
{"ip_address": ip_address("192.168.0.1")}
)
step = await flow.async_step_init()
schema = step["data_schema"]
# both owner and user listed
assert schema({"user": owner.id})
assert schema({"user": user.id})
|
from datetime import timedelta
from homeassistant.const import (
ATTR_UNIT_OF_MEASUREMENT,
DATA_MEGABYTES,
DATA_RATE_MEGABYTES_PER_SECOND,
DEVICE_CLASS_TIMESTAMP,
)
from homeassistant.util import dt as dt_util
from . import init_integration
from tests.async_mock import patch
async def test_sensors(hass, nzbget_api) -> None:
"""Test the creation and values of the sensors."""
now = dt_util.utcnow().replace(microsecond=0)
with patch("homeassistant.components.nzbget.sensor.utcnow", return_value=now):
entry = await init_integration(hass)
registry = await hass.helpers.entity_registry.async_get_registry()
uptime = now - timedelta(seconds=600)
sensors = {
"article_cache": ("ArticleCacheMB", "64", DATA_MEGABYTES, None),
"average_speed": (
"AverageDownloadRate",
"1.19",
DATA_RATE_MEGABYTES_PER_SECOND,
None,
),
"download_paused": ("DownloadPaused", "False", None, None),
"speed": ("DownloadRate", "2.38", DATA_RATE_MEGABYTES_PER_SECOND, None),
"size": ("DownloadedSizeMB", "256", DATA_MEGABYTES, None),
"disk_free": ("FreeDiskSpaceMB", "1024", DATA_MEGABYTES, None),
"post_processing_jobs": ("PostJobCount", "2", "Jobs", None),
"post_processing_paused": ("PostPaused", "False", None, None),
"queue_size": ("RemainingSizeMB", "512", DATA_MEGABYTES, None),
"uptime": ("UpTimeSec", uptime.isoformat(), None, DEVICE_CLASS_TIMESTAMP),
}
for (sensor_id, data) in sensors.items():
entity_entry = registry.async_get(f"sensor.nzbgettest_{sensor_id}")
assert entity_entry
assert entity_entry.device_class == data[3]
assert entity_entry.unique_id == f"{entry.entry_id}_{data[0]}"
state = hass.states.get(f"sensor.nzbgettest_{sensor_id}")
assert state
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == data[2]
assert state.state == data[1]
|
from nikola.plugin_categories import Taxonomy
class PageIndex(Taxonomy):
"""Classify for the page index."""
name = "classify_page_index"
classification_name = "page_index_folder"
overview_page_variable_name = "page_folder"
more_than_one_classifications_per_post = False
has_hierarchy = True
include_posts_from_subhierarchies = False
show_list_as_index = False
template_for_single_list = "list.tmpl"
template_for_classification_overview = None
always_disable_rss = True
always_disable_atom = True
apply_to_posts = False
apply_to_pages = True
omit_empty_classifications = True
path_handler_docstrings = {
'page_index_folder_index': None,
'page_index_folder': None,
'page_index_folder_atom': None,
'page_index_folder_rss': None,
}
def is_enabled(self, lang=None):
"""Return True if this taxonomy is enabled, or False otherwise."""
return self.site.config["PAGE_INDEX"]
def classify(self, post, lang):
"""Classify the given post for the given language."""
destpath = post.destination_path(lang, sep='/')
if post.has_pretty_url(lang):
idx = '/index.html'
if destpath.endswith(idx):
destpath = destpath[:-len(idx)]
i = destpath.rfind('/')
return [destpath[:i] if i >= 0 else '']
def get_classification_friendly_name(self, dirname, lang, only_last_component=False):
"""Extract a friendly name from the classification."""
return dirname
def get_path(self, hierarchy, lang, dest_type='page'):
"""Return a path for the given classification."""
return hierarchy, 'always'
def extract_hierarchy(self, dirname):
"""Given a classification, return a list of parts in the hierarchy."""
return dirname.split('/') if dirname else []
def recombine_classification_from_hierarchy(self, hierarchy):
"""Given a list of parts in the hierarchy, return the classification string."""
return '/'.join(hierarchy)
def provide_context_and_uptodate(self, dirname, lang, node=None):
"""Provide data for the context and the uptodate list for the list of the given classifiation."""
kw = {
"translations": self.site.config['TRANSLATIONS'],
"filters": self.site.config['FILTERS'],
}
context = {
"title": self.site.config['BLOG_TITLE'](lang),
"pagekind": ["list", "front_page", "page_index"] if dirname == '' else ["list", "page_index"],
"kind": "page_index_folder",
"classification": dirname,
"has_no_feeds": True,
}
kw.update(context)
return context, kw
def should_generate_classification_page(self, dirname, post_list, lang):
"""Only generates list of posts for classification if this function returns True."""
short_destination = dirname + '/' + self.site.config['INDEX_FILE']
for post in post_list:
# If there is an index.html pending to be created from a page, do not generate the page index.
if post.destination_path(lang, sep='/') == short_destination:
return False
return True
|
from django.test import TestCase
from django.utils.translation import activate
from django.utils.translation import deactivate
from zinnia.urls import i18n_url
class TranslatedURLsTestCase(TestCase):
"""Test cases for translated URLs"""
def test_translated_urls(self):
deactivate()
self.assertEqual(
i18n_url(r'^authors/'), r'^authors/')
activate('fr')
self.assertEqual(
i18n_url(r'^authors/', True), r'^auteurs/')
self.assertEqual(
i18n_url(r'^authors/', False), r'^authors/')
|
from unittest.mock import call
import pytest
from homeassistant.components.light import ATTR_BRIGHTNESS
from homeassistant.components.rfxtrx import DOMAIN
from homeassistant.core import State
from tests.common import MockConfigEntry, mock_restore_cache
from tests.components.rfxtrx.conftest import create_rfx_test_cfg
async def test_one_light(hass, rfxtrx):
"""Test with 1 light."""
entry_data = create_rfx_test_cfg(
devices={"0b1100cd0213c7f210020f51": {"signal_repetitions": 1}}
)
mock_entry = MockConfigEntry(domain="rfxtrx", unique_id=DOMAIN, data=entry_data)
mock_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_entry.entry_id)
await hass.async_block_till_done()
state = hass.states.get("light.ac_213c7f2_16")
assert state
assert state.state == "off"
assert state.attributes.get("friendly_name") == "AC 213c7f2:16"
await hass.services.async_call(
"light", "turn_on", {"entity_id": "light.ac_213c7f2_16"}, blocking=True
)
state = hass.states.get("light.ac_213c7f2_16")
assert state.state == "on"
assert state.attributes.get("brightness") == 255
await hass.services.async_call(
"light", "turn_off", {"entity_id": "light.ac_213c7f2_16"}, blocking=True
)
state = hass.states.get("light.ac_213c7f2_16")
assert state.state == "off"
assert state.attributes.get("brightness") is None
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": "light.ac_213c7f2_16", "brightness": 100},
blocking=True,
)
state = hass.states.get("light.ac_213c7f2_16")
assert state.state == "on"
assert state.attributes.get("brightness") == 100
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": "light.ac_213c7f2_16", "brightness": 10},
blocking=True,
)
state = hass.states.get("light.ac_213c7f2_16")
assert state.state == "on"
assert state.attributes.get("brightness") == 10
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": "light.ac_213c7f2_16", "brightness": 255},
blocking=True,
)
state = hass.states.get("light.ac_213c7f2_16")
assert state.state == "on"
assert state.attributes.get("brightness") == 255
await hass.services.async_call(
"light", "turn_off", {"entity_id": "light.ac_213c7f2_16"}, blocking=True
)
state = hass.states.get("light.ac_213c7f2_16")
assert state.state == "off"
assert state.attributes.get("brightness") is None
assert rfxtrx.transport.send.mock_calls == [
call(bytearray(b"\x0b\x11\x00\x00\x02\x13\xc7\xf2\x10\x01\x00\x00")),
call(bytearray(b"\x0b\x11\x00\x00\x02\x13\xc7\xf2\x10\x00\x00\x00")),
call(bytearray(b"\x0b\x11\x00\x00\x02\x13\xc7\xf2\x10\x02\x06\x00")),
call(bytearray(b"\x0b\x11\x00\x00\x02\x13\xc7\xf2\x10\x02\x00\x00")),
call(bytearray(b"\x0b\x11\x00\x00\x02\x13\xc7\xf2\x10\x02\x0f\x00")),
call(bytearray(b"\x0b\x11\x00\x00\x02\x13\xc7\xf2\x10\x00\x00\x00")),
]
@pytest.mark.parametrize("state,brightness", [["on", 100], ["on", 50], ["off", None]])
async def test_state_restore(hass, rfxtrx, state, brightness):
"""State restoration."""
entity_id = "light.ac_213c7f2_16"
mock_restore_cache(
hass, [State(entity_id, state, attributes={ATTR_BRIGHTNESS: brightness})]
)
entry_data = create_rfx_test_cfg(
devices={"0b1100cd0213c7f210020f51": {"signal_repetitions": 1}}
)
mock_entry = MockConfigEntry(domain="rfxtrx", unique_id=DOMAIN, data=entry_data)
mock_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_entry.entry_id)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == state
assert hass.states.get(entity_id).attributes.get(ATTR_BRIGHTNESS) == brightness
async def test_several_lights(hass, rfxtrx):
"""Test with 3 lights."""
entry_data = create_rfx_test_cfg(
devices={
"0b1100cd0213c7f230020f71": {"signal_repetitions": 1},
"0b1100100118cdea02020f70": {"signal_repetitions": 1},
"0b1100101118cdea02050f70": {"signal_repetitions": 1},
}
)
mock_entry = MockConfigEntry(domain="rfxtrx", unique_id=DOMAIN, data=entry_data)
mock_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_entry.entry_id)
await hass.async_block_till_done()
await hass.async_start()
state = hass.states.get("light.ac_213c7f2_48")
assert state
assert state.state == "off"
assert state.attributes.get("friendly_name") == "AC 213c7f2:48"
state = hass.states.get("light.ac_118cdea_2")
assert state
assert state.state == "off"
assert state.attributes.get("friendly_name") == "AC 118cdea:2"
state = hass.states.get("light.ac_1118cdea_2")
assert state
assert state.state == "off"
assert state.attributes.get("friendly_name") == "AC 1118cdea:2"
await rfxtrx.signal("0b1100cd0213c7f230010f71")
state = hass.states.get("light.ac_213c7f2_48")
assert state
assert state.state == "on"
await rfxtrx.signal("0b1100cd0213c7f230000f71")
state = hass.states.get("light.ac_213c7f2_48")
assert state
assert state.state == "off"
await rfxtrx.signal("0b1100cd0213c7f230020f71")
state = hass.states.get("light.ac_213c7f2_48")
assert state
assert state.state == "on"
assert state.attributes.get("brightness") == 255
@pytest.mark.parametrize("repetitions", [1, 3])
async def test_repetitions(hass, rfxtrx, repetitions):
"""Test signal repetitions."""
entry_data = create_rfx_test_cfg(
devices={"0b1100cd0213c7f230020f71": {"signal_repetitions": repetitions}}
)
mock_entry = MockConfigEntry(domain="rfxtrx", unique_id=DOMAIN, data=entry_data)
mock_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_entry.entry_id)
await hass.async_block_till_done()
await hass.services.async_call(
"light", "turn_on", {"entity_id": "light.ac_213c7f2_48"}, blocking=True
)
await hass.async_block_till_done()
assert rfxtrx.transport.send.call_count == repetitions
async def test_discover_light(hass, rfxtrx_automatic):
"""Test with discovery of lights."""
rfxtrx = rfxtrx_automatic
await rfxtrx.signal("0b11009e00e6116202020070")
state = hass.states.get("light.ac_0e61162_2")
assert state
assert state.state == "on"
assert state.attributes.get("friendly_name") == "AC 0e61162:2"
await rfxtrx.signal("0b1100120118cdea02020070")
state = hass.states.get("light.ac_118cdea_2")
assert state
assert state.state == "on"
assert state.attributes.get("friendly_name") == "AC 118cdea:2"
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from vmsdoms import VMSDomsCollector
###############################################################################
class TestVMSDomsCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('VMSDomsCollector', {
})
self.collector = VMSDomsCollector(config, None)
def test_import(self):
self.assertTrue(VMSDomsCollector)
###############################################################################
if __name__ == "__main__":
unittest.main()
|
import os
import sh
from molecule import logger
from molecule import util
from molecule.verifier.lint import base
LOG = logger.get_logger(__name__)
class Flake8(base.Base):
"""
`Flake8`_ is the default verifier linter.
`Flake8`_ is a linter for python files.
Additional options can be passed to `flake8` through the options
dict. Any option set in this section will override the defaults.
.. code-block:: yaml
verifier:
name: testinfra
lint:
name: flake8
options:
benchmark: True
Test file linting can be disabled by setting `enabled` to False.
.. code-block:: yaml
verifier:
name: testinfra
lint:
name: flake8
enabled: False
Environment variables can be passed to lint.
.. code-block:: yaml
verifier:
name: testinfra
lint:
name: flake8
env:
FOO: bar
.. _`Flake8`: http://flake8.pycqa.org/en/latest/
"""
def __init__(self, config):
"""
Sets up the requirements to execute `flake8` and returns None.
:param config: An instance of a Molecule config.
:return: None
"""
super(Flake8, self).__init__(config)
self._flake8_command = None
if config:
self._tests = self._get_tests()
@property
def default_options(self):
return {}
@property
def default_env(self):
return util.merge_dicts(os.environ.copy(), self._config.env)
def bake(self):
"""
Bake a `flake8` command so it's ready to execute and returns None.
:return: None
"""
self._flake8_command = sh.flake8.bake(
self.options,
self._tests,
_env=self.env,
_out=LOG.out,
_err=LOG.error)
def execute(self):
if not self.enabled:
msg = 'Skipping, verifier_lint is disabled.'
LOG.warn(msg)
return
if not len(self._tests) > 0:
msg = 'Skipping, no tests found.'
LOG.warn(msg)
return
if self._flake8_command is None:
self.bake()
msg = 'Executing Flake8 on files found in {}/...'.format(
self._config.verifier.directory)
LOG.info(msg)
try:
util.run_command(self._flake8_command, debug=self._config.debug)
msg = 'Lint completed successfully.'
LOG.success(msg)
except sh.ErrorReturnCode as e:
util.sysexit(e.exit_code)
def _get_tests(self):
"""
Walk the verifier's directory for tests and returns a list.
:return: list
"""
return [
filename for filename in util.os_walk(
self._config.verifier.directory, 'test_*.py')
]
|
import os
import unittest
from absl import flags
from perfkitbenchmarker.linux_benchmarks import horovod_benchmark
from tests import pkb_common_test_case
FLAGS = flags.FLAGS
class HorovodBenchmarkTestCase(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(HorovodBenchmarkTestCase, self).setUp()
filenames = [
'horovod_output_resnet.txt',
'horovod_output_bert.txt',
]
self.test_output = dict()
for fn in filenames:
path = os.path.join(os.path.dirname(__file__), '../data', fn)
self.test_output[fn] = open(path).read()
def testExtractResNetThroughput(self):
throughput, _ = horovod_benchmark._ExtractResNetThroughput(
self.test_output['horovod_output_resnet.txt'])
self.assertEqual(6638.0, throughput)
def testExtractBertThroughput(self):
throughput, _ = horovod_benchmark._ExtractBertThroughput(
self.test_output['horovod_output_bert.txt'])
self.assertEqual(52.3, throughput)
if __name__ == '__main__':
unittest.main()
|
from datetime import timedelta
from typing import Mapping, Optional
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components import ssdp
from homeassistant.const import CONF_SCAN_INTERVAL
from homeassistant.core import callback
from .const import ( # pylint: disable=unused-import
CONFIG_ENTRY_SCAN_INTERVAL,
CONFIG_ENTRY_ST,
CONFIG_ENTRY_UDN,
DEFAULT_SCAN_INTERVAL,
DISCOVERY_LOCATION,
DISCOVERY_NAME,
DISCOVERY_ST,
DISCOVERY_UDN,
DISCOVERY_USN,
DOMAIN,
DOMAIN_COORDINATORS,
LOGGER as _LOGGER,
)
from .device import Device
class UpnpFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a UPnP/IGD config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
# Paths:
# - ssdp(discovery_info) --> ssdp_confirm(None) --> ssdp_confirm({}) --> create_entry()
# - user(None): scan --> user({...}) --> create_entry()
# - import(None) --> create_entry()
def __init__(self):
"""Initialize the UPnP/IGD config flow."""
self._discoveries: Mapping = None
async def async_step_user(self, user_input: Optional[Mapping] = None):
"""Handle a flow start."""
_LOGGER.debug("async_step_user: user_input: %s", user_input)
# This uses DISCOVERY_USN as the identifier for the device.
if user_input is not None:
# Ensure wanted device was discovered.
matching_discoveries = [
discovery
for discovery in self._discoveries
if discovery[DISCOVERY_USN] == user_input["usn"]
]
if not matching_discoveries:
return self.async_abort(reason="no_devices_found")
discovery = matching_discoveries[0]
await self.async_set_unique_id(
discovery[DISCOVERY_USN], raise_on_progress=False
)
return await self._async_create_entry_from_discovery(discovery)
# Discover devices.
discoveries = await Device.async_discover(self.hass)
# Store discoveries which have not been configured, add name for each discovery.
current_usns = {entry.unique_id for entry in self._async_current_entries()}
self._discoveries = [
{
**discovery,
DISCOVERY_NAME: await self._async_get_name_for_discovery(discovery),
}
for discovery in discoveries
if discovery[DISCOVERY_USN] not in current_usns
]
# Ensure anything to add.
if not self._discoveries:
return self.async_abort(reason="no_devices_found")
data_schema = vol.Schema(
{
vol.Required("usn"): vol.In(
{
discovery[DISCOVERY_USN]: discovery[DISCOVERY_NAME]
for discovery in self._discoveries
}
),
}
)
return self.async_show_form(
step_id="user",
data_schema=data_schema,
)
async def async_step_import(self, import_info: Optional[Mapping]):
"""Import a new UPnP/IGD device as a config entry.
This flow is triggered by `async_setup`. If no device has been
configured before, find any device and create a config_entry for it.
Otherwise, do nothing.
"""
_LOGGER.debug("async_step_import: import_info: %s", import_info)
# Landed here via configuration.yaml entry.
# Any device already added, then abort.
if self._async_current_entries():
_LOGGER.debug("Already configured, aborting")
return self.async_abort(reason="already_configured")
# Discover devices.
self._discoveries = await Device.async_discover(self.hass)
# Ensure anything to add. If not, silently abort.
if not self._discoveries:
_LOGGER.info("No UPnP devices discovered, aborting")
return self.async_abort(reason="no_devices_found")
# Ensure complete discovery.
discovery_info = self._discoveries[0]
if DISCOVERY_USN not in discovery_info:
_LOGGER.debug("Incomplete discovery, ignoring")
return self.async_abort(reason="incomplete_discovery")
# Ensure not already configuring/configured.
usn = discovery_info[DISCOVERY_USN]
await self.async_set_unique_id(usn)
return await self._async_create_entry_from_discovery(discovery_info)
async def async_step_ssdp(self, discovery_info: Mapping):
"""Handle a discovered UPnP/IGD device.
This flow is triggered by the SSDP component. It will check if the
host is already configured and delegate to the import step if not.
"""
_LOGGER.debug("async_step_ssdp: discovery_info: %s", discovery_info)
# Ensure complete discovery.
if (
ssdp.ATTR_UPNP_UDN not in discovery_info
or ssdp.ATTR_SSDP_ST not in discovery_info
):
_LOGGER.debug("Incomplete discovery, ignoring")
return self.async_abort(reason="incomplete_discovery")
# Ensure not already configuring/configured.
udn = discovery_info[ssdp.ATTR_UPNP_UDN]
st = discovery_info[ssdp.ATTR_SSDP_ST] # pylint: disable=invalid-name
usn = f"{udn}::{st}"
await self.async_set_unique_id(usn)
self._abort_if_unique_id_configured()
# Store discovery.
name = discovery_info.get("friendlyName", "")
discovery = {
DISCOVERY_UDN: udn,
DISCOVERY_ST: st,
DISCOVERY_NAME: name,
}
self._discoveries = [discovery]
# Ensure user recognizable.
# pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167
self.context["title_placeholders"] = {
"name": name,
}
return await self.async_step_ssdp_confirm()
async def async_step_ssdp_confirm(self, user_input: Optional[Mapping] = None):
"""Confirm integration via SSDP."""
_LOGGER.debug("async_step_ssdp_confirm: user_input: %s", user_input)
if user_input is None:
return self.async_show_form(step_id="ssdp_confirm")
discovery = self._discoveries[0]
return await self._async_create_entry_from_discovery(discovery)
@staticmethod
@callback
def async_get_options_flow(config_entry):
"""Define the config flow to handle options."""
return UpnpOptionsFlowHandler(config_entry)
async def _async_create_entry_from_discovery(
self,
discovery: Mapping,
):
"""Create an entry from discovery."""
_LOGGER.debug(
"_async_create_entry_from_discovery: discovery: %s",
discovery,
)
# Get name from device, if not found already.
if DISCOVERY_NAME not in discovery and DISCOVERY_LOCATION in discovery:
discovery[DISCOVERY_NAME] = await self._async_get_name_for_discovery(
discovery
)
title = discovery.get(DISCOVERY_NAME, "")
data = {
CONFIG_ENTRY_UDN: discovery[DISCOVERY_UDN],
CONFIG_ENTRY_ST: discovery[DISCOVERY_ST],
}
return self.async_create_entry(title=title, data=data)
async def _async_get_name_for_discovery(self, discovery: Mapping):
"""Get the name of the device from a discovery."""
_LOGGER.debug("_async_get_name_for_discovery: discovery: %s", discovery)
device = await Device.async_create_device(
self.hass, discovery[DISCOVERY_LOCATION]
)
return device.name
class UpnpOptionsFlowHandler(config_entries.OptionsFlow):
"""Handle a UPnP options flow."""
def __init__(self, config_entry):
"""Initialize."""
self.config_entry = config_entry
async def async_step_init(self, user_input=None):
"""Manage the options."""
if user_input is not None:
udn = self.config_entry.data.get(CONFIG_ENTRY_UDN)
coordinator = self.hass.data[DOMAIN][DOMAIN_COORDINATORS][udn]
update_interval_sec = user_input.get(
CONFIG_ENTRY_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL
)
update_interval = timedelta(seconds=update_interval_sec)
_LOGGER.debug("Updating coordinator, update_interval: %s", update_interval)
coordinator.update_interval = update_interval
return self.async_create_entry(title="", data=user_input)
scan_interval = self.config_entry.options.get(
CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL
)
return self.async_show_form(
step_id="init",
data_schema=vol.Schema(
{
vol.Optional(
CONF_SCAN_INTERVAL,
default=scan_interval,
): vol.All(vol.Coerce(int), vol.Range(min=30)),
}
),
)
|
from django.db import migrations
def update_name(apps, schema_editor):
Group = apps.get_model("weblate_auth", "Group")
db_alias = schema_editor.connection.alias
for group in Group.objects.using(db_alias).filter(
name__endswith="@Template", internal=True
):
group.name = "{}@Sources".format(group.name.rsplit("@", 1)[0])
group.save(update_fields=["name"])
class Migration(migrations.Migration):
dependencies = [
("weblate_auth", "0012_auto_20200729_1200"),
]
operations = [
migrations.RunPython(update_name, migrations.RunPython.noop, elidable=True),
]
|
from pygti.gti import GTI, Auth
class GTIHub:
"""GTI Hub."""
def __init__(self, host, username, password, session):
"""Initialize."""
self.host = host
self.username = username
self.password = password
self.gti = GTI(Auth(session, self.username, self.password, self.host))
async def authenticate(self):
"""Test if we can authenticate with the host."""
return await self.gti.init()
|
import diamond.collector
import os
class NfsCollector(diamond.collector.Collector):
PROC = '/proc/net/rpc/nfs'
def get_default_config_help(self):
config_help = super(NfsCollector, self).get_default_config_help()
config_help.update({
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(NfsCollector, self).get_default_config()
config.update({
'path': 'nfs'
})
return config
def collect(self):
"""
Collect stats
"""
if os.access(self.PROC, os.R_OK):
results = {}
# Open file
file = open(self.PROC)
for line in file:
line = line.split()
if line[0] == 'net':
results['net.packets'] = line[1]
results['net.udpcnt'] = line[2]
results['net.tcpcnt'] = line[3]
results['net.tcpconn'] = line[4]
elif line[0] == 'rpc':
results['rpc.calls'] = line[1]
results['rpc.retrans'] = line[2]
results['rpc.authrefrsh'] = line[3]
elif line[0] == 'proc2':
line.pop(1) # remove column-cnt field
results['v2.null'] = line[1]
results['v2.getattr'] = line[2]
results['v2.setattr'] = line[3]
results['v2.root'] = line[4]
results['v2.lookup'] = line[5]
results['v2.readlink'] = line[6]
results['v2.read'] = line[7]
results['v2.wrcache'] = line[8]
results['v2.write'] = line[9]
results['v2.create'] = line[10]
results['v2.remove'] = line[11]
results['v2.rename'] = line[12]
results['v2.link'] = line[13]
results['v2.symlink'] = line[14]
results['v2.mkdir'] = line[15]
results['v2.rmdir'] = line[16]
results['v2.readdir'] = line[17]
results['v2.fsstat'] = line[18]
elif line[0] == 'proc3':
line.pop(1) # remove column-cnt field
results['v3.null'] = line[1]
results['v3.getattr'] = line[2]
results['v3.setattr'] = line[3]
results['v3.lookup'] = line[4]
results['v3.access'] = line[5]
results['v3.readlink'] = line[6]
results['v3.read'] = line[7]
results['v3.write'] = line[8]
results['v3.create'] = line[9]
results['v3.mkdir'] = line[10]
results['v3.symlink'] = line[11]
results['v3.mknod'] = line[12]
results['v3.remove'] = line[13]
results['v3.rmdir'] = line[14]
results['v3.rename'] = line[15]
results['v3.link'] = line[16]
results['v3.readdir'] = line[17]
results['v3.readdirplus'] = line[18]
results['v3.fsstat'] = line[19]
results['v3.fsinfo'] = line[20]
results['v3.pathconf'] = line[21]
results['v3.commit'] = line[22]
elif line[0] == 'proc4':
line.pop(1) # remove column-cnt field
results['v4.null'] = line[1]
results['v4.read'] = line[2]
results['v4.write'] = line[3]
results['v4.commit'] = line[4]
results['v4.open'] = line[5]
results['v4.open_conf'] = line[6]
results['v4.open_noat'] = line[7]
results['v4.open_dgrd'] = line[8]
results['v4.close'] = line[9]
results['v4.setattr'] = line[10]
results['v4.fsinfo'] = line[11]
results['v4.renew'] = line[12]
results['v4.setclntid'] = line[13]
results['v4.confirm'] = line[14]
results['v4.lock'] = line[15]
results['v4.lockt'] = line[16]
results['v4.locku'] = line[17]
results['v4.access'] = line[18]
results['v4.getattr'] = line[19]
results['v4.lookup'] = line[20]
results['v4.lookup_root'] = line[21]
results['v4.remove'] = line[22]
results['v4.rename'] = line[23]
results['v4.link'] = line[24]
results['v4.symlink'] = line[25]
results['v4.create'] = line[26]
results['v4.pathconf'] = line[27]
results['v4.statfs'] = line[28]
results['v4.readlink'] = line[29]
results['v4.readdir'] = line[30]
try:
results['v4.server_caps'] = line[31]
except IndexError:
pass
try:
results['v4.delegreturn'] = line[32]
except IndexError:
pass
try:
results['v4.getacl'] = line[33]
except IndexError:
pass
try:
results['v4.setacl'] = line[34]
except IndexError:
pass
try:
results['v4.fs_locations'] = line[35]
except IndexError:
pass
try:
results['v4.rel_lkowner'] = line[36]
except IndexError:
pass
try:
results['v4.exchange_id'] = line[37]
except IndexError:
pass
try:
results['v4.create_ses'] = line[38]
except IndexError:
pass
try:
results['v4.destroy_ses'] = line[39]
except IndexError:
pass
try:
results['v4.sequence'] = line[40]
except IndexError:
pass
try:
results['v4.get_lease_t'] = line[41]
except IndexError:
pass
try:
results['v4.reclaim_comp'] = line[42]
except IndexError:
pass
try:
results['v4.layoutget'] = line[43]
except IndexError:
pass
try:
results['v4.layoutcommit'] = line[44]
except IndexError:
pass
try:
results['v4.layoutreturn'] = line[45]
except IndexError:
pass
try:
results['v4.getdevlist'] = line[46]
except IndexError:
pass
try:
results['v4.getdevinfo'] = line[47]
except IndexError:
pass
try:
results['v4.ds_write'] = line[48]
except IndexError:
pass
try:
results['v4.ds_commit'] = line[49]
except IndexError:
pass
try:
results['v4.getdevlist'] = line[50]
except IndexError:
pass
# Close File
file.close()
for stat in results.keys():
metric_name = stat
metric_value = long(float(results[stat]))
metric_value = self.derivative(metric_name, metric_value)
self.publish(metric_name, metric_value, precision=3)
return True
return False
|
from django.conf import settings
from django.contrib.auth import logout
from django.core.cache import cache
from django.middleware.csrf import rotate_token
from django.shortcuts import redirect
from django.template.loader import render_to_string
from weblate.utils import messages
from weblate.utils.hash import calculate_checksum
from weblate.utils.request import get_ip_address
def get_cache_key(scope, request=None, address=None, user=None):
"""Generate cache key for request."""
if (request and request.user.is_authenticated) or user:
if user:
key = user.id
else:
key = request.user.id
origin = "user"
else:
if address is None:
address = get_ip_address(request)
origin = "ip"
key = calculate_checksum(address)
return f"ratelimit-{origin}-{scope}-{key}"
def reset_rate_limit(scope, request=None, address=None, user=None):
"""Resets rate limit."""
cache.delete(get_cache_key(scope, request, address, user))
def get_rate_setting(scope, suffix):
key = f"RATELIMIT_{scope.upper()}_{suffix}"
if hasattr(settings, key):
return getattr(settings, key)
return getattr(settings, f"RATELIMIT_{suffix}")
def revert_rate_limit(scope, request):
"""Revert rate limit to previous state.
This can be used when rate limiting POST, but ignoring some events.
"""
key = get_cache_key(scope, request)
try:
# Try to decrease cache key
cache.decr(key)
except ValueError:
pass
def check_rate_limit(scope, request):
"""Check authentication rate limit."""
key = get_cache_key(scope, request)
try:
# Try to increase cache key
attempts = cache.incr(key)
except ValueError:
# No such key, so set it
cache.set(key, 1, get_rate_setting(scope, "WINDOW"))
attempts = 1
if attempts > get_rate_setting(scope, "ATTEMPTS"):
# Set key to longer expiry for lockout period
cache.set(key, attempts, get_rate_setting(scope, "LOCKOUT"))
return False
return True
def session_ratelimit_post(scope):
def session_ratelimit_post_inner(function):
"""Session based rate limiting for POST requests."""
def rate_wrap(request, *args, **kwargs):
if request.method == "POST" and not check_rate_limit(scope, request):
# Rotate session token
rotate_token(request)
# Logout user
do_logout = request.user.is_authenticated
if do_logout:
logout(request)
messages.error(
request,
render_to_string(
"ratelimit.html", {"do_logout": do_logout, "user": request.user}
),
)
return redirect("login")
return function(request, *args, **kwargs)
return rate_wrap
return session_ratelimit_post_inner
|
from typing import TYPE_CHECKING, Callable, Dict
from aioswitcher.api import SwitcherV2Api
from aioswitcher.consts import (
COMMAND_OFF,
COMMAND_ON,
STATE_OFF as SWITCHER_STATE_OFF,
STATE_ON as SWITCHER_STATE_ON,
WAITING_TEXT,
)
from homeassistant.components.switch import ATTR_CURRENT_POWER_W, SwitchEntity
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.typing import HomeAssistantType
from . import (
ATTR_AUTO_OFF_SET,
ATTR_ELECTRIC_CURRENT,
ATTR_REMAINING_TIME,
DATA_DEVICE,
DOMAIN,
SIGNAL_SWITCHER_DEVICE_UPDATE,
)
# pylint: disable=ungrouped-imports
if TYPE_CHECKING:
from aioswitcher.api.messages import SwitcherV2ControlResponseMSG
from aioswitcher.devices import SwitcherV2Device
DEVICE_PROPERTIES_TO_HA_ATTRIBUTES = {
"power_consumption": ATTR_CURRENT_POWER_W,
"electric_current": ATTR_ELECTRIC_CURRENT,
"remaining_time": ATTR_REMAINING_TIME,
"auto_off_set": ATTR_AUTO_OFF_SET,
}
async def async_setup_platform(
hass: HomeAssistantType,
config: Dict,
async_add_entities: Callable,
discovery_info: Dict,
) -> None:
"""Set up the switcher platform for the switch component."""
if discovery_info is None:
return
async_add_entities([SwitcherControl(hass.data[DOMAIN][DATA_DEVICE])])
class SwitcherControl(SwitchEntity):
"""Home Assistant switch entity."""
def __init__(self, device_data: "SwitcherV2Device") -> None:
"""Initialize the entity."""
self._self_initiated = False
self._device_data = device_data
self._state = device_data.state
@property
def name(self) -> str:
"""Return the device's name."""
return self._device_data.name
@property
def should_poll(self) -> bool:
"""Return False, entity pushes its state to HA."""
return False
@property
def unique_id(self) -> str:
"""Return a unique ID."""
return f"{self._device_data.device_id}-{self._device_data.mac_addr}"
@property
def is_on(self) -> bool:
"""Return True if entity is on."""
return self._state == SWITCHER_STATE_ON
@property
def current_power_w(self) -> int:
"""Return the current power usage in W."""
return self._device_data.power_consumption
@property
def device_state_attributes(self) -> Dict:
"""Return the optional state attributes."""
attribs = {}
for prop, attr in DEVICE_PROPERTIES_TO_HA_ATTRIBUTES.items():
value = getattr(self._device_data, prop)
if value and value is not WAITING_TEXT:
attribs[attr] = value
return attribs
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self._state in [SWITCHER_STATE_ON, SWITCHER_STATE_OFF]
async def async_added_to_hass(self) -> None:
"""Run when entity about to be added to hass."""
self.async_on_remove(
async_dispatcher_connect(
self.hass, SIGNAL_SWITCHER_DEVICE_UPDATE, self.async_update_data
)
)
async def async_update_data(self, device_data: "SwitcherV2Device") -> None:
"""Update the entity data."""
if device_data:
if self._self_initiated:
self._self_initiated = False
else:
self._device_data = device_data
self._state = self._device_data.state
self.async_write_ha_state()
async def async_turn_on(self, **kwargs: Dict) -> None:
"""Turn the entity on."""
await self._control_device(True)
async def async_turn_off(self, **kwargs: Dict) -> None:
"""Turn the entity off."""
await self._control_device(False)
async def _control_device(self, send_on: bool) -> None:
"""Turn the entity on or off."""
response: "SwitcherV2ControlResponseMSG" = None
async with SwitcherV2Api(
self.hass.loop,
self._device_data.ip_addr,
self._device_data.phone_id,
self._device_data.device_id,
self._device_data.device_password,
) as swapi:
response = await swapi.control_device(
COMMAND_ON if send_on else COMMAND_OFF
)
if response and response.successful:
self._self_initiated = True
self._state = SWITCHER_STATE_ON if send_on else SWITCHER_STATE_OFF
self.async_write_ha_state()
|
from flask import Flask
try:
from http import HTTPStatus
except ImportError:
import httplib as HTTPStatus
from flasgger import Swagger
swagger_config = {
'headers': [
],
'specs': [
{
'endpoint': 'apispec',
'route': '/apispec.json',
'rule_filter': lambda rule: True,
'model_filter': lambda tag: True,
}
],
'swagger_ui': False
}
app = Flask(__name__)
swag = Swagger(app, config=swagger_config)
def test_swag(client, specs_data):
"""
This test is runs automatically in Travis CI
:param client: Flask app test client
:param specs_data: {'url': {swag_specs}} for every spec in app
"""
assert not specs_data
assert client.get('/apidocs/').status_code == HTTPStatus.NOT_FOUND
assert client.get('/apispec.json').status_code == HTTPStatus.OK
if __name__ == '__main__':
app.run(debug=True)
|
from elgato import Elgato, ElgatoConnectionError
from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, CONF_PORT
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.typing import ConfigType
from .const import DATA_ELGATO_CLIENT, DOMAIN
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Elgato Key Light components."""
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Elgato Key Light from a config entry."""
session = async_get_clientsession(hass)
elgato = Elgato(
entry.data[CONF_HOST],
port=entry.data[CONF_PORT],
session=session,
)
# Ensure we can connect to it
try:
await elgato.info()
except ElgatoConnectionError as exception:
raise ConfigEntryNotReady from exception
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN][entry.entry_id] = {DATA_ELGATO_CLIENT: elgato}
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, LIGHT_DOMAIN)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload Elgato Key Light config entry."""
# Unload entities for this entry/device.
await hass.config_entries.async_forward_entry_unload(entry, LIGHT_DOMAIN)
# Cleanup
del hass.data[DOMAIN][entry.entry_id]
if not hass.data[DOMAIN]:
del hass.data[DOMAIN]
return True
|
import importlib
import logging
import smart_open.local_file
logger = logging.getLogger(__name__)
NO_SCHEME = ''
_REGISTRY = {NO_SCHEME: smart_open.local_file}
_ERRORS = {}
_MISSING_DEPS_ERROR = """You are trying to use the %(module)s functionality of smart_open
but you do not have the correct %(module)s dependencies installed. Try:
pip install smart_open[%(module)s]
"""
def register_transport(submodule):
"""Register a submodule as a transport mechanism for ``smart_open``.
This module **must** have:
- `SCHEME` attribute (or `SCHEMES`, if the submodule supports multiple schemes)
- `open` function
- `open_uri` function
- `parse_uri' function
Once registered, you can get the submodule by calling :func:`get_transport`.
"""
global _REGISTRY, _ERRORS
module_name = submodule
if isinstance(submodule, str):
try:
submodule = importlib.import_module(submodule)
except ImportError:
return
else:
module_name = submodule.__name__
# Save only the last module name piece
module_name = module_name.rsplit(".")[-1]
if hasattr(submodule, 'SCHEME'):
schemes = [submodule.SCHEME]
elif hasattr(submodule, 'SCHEMES'):
schemes = submodule.SCHEMES
else:
raise ValueError('%r does not have a .SCHEME or .SCHEMES attribute' % submodule)
for f in ('open', 'open_uri', 'parse_uri'):
assert hasattr(submodule, f), '%r is missing %r' % (submodule, f)
for scheme in schemes:
assert scheme not in _REGISTRY
if getattr(submodule, "MISSING_DEPS", False):
_ERRORS[scheme] = module_name
else:
_REGISTRY[scheme] = submodule
def get_transport(scheme):
"""Get the submodule that handles transport for the specified scheme.
This submodule must have been previously registered via :func:`register_transport`.
"""
global _ERRORS, _MISSING_DEPS_ERROR, _REGISTRY, SUPPORTED_SCHEMES
expected = SUPPORTED_SCHEMES
readme_url = 'https://github.com/RaRe-Technologies/smart_open/blob/master/README.rst'
message = (
"Unable to handle scheme %(scheme)r, expected one of %(expected)r. "
"Extra dependencies required by %(scheme)r may be missing. "
"See <%(readme_url)s> for details." % locals()
)
if scheme in _ERRORS:
raise ImportError(_MISSING_DEPS_ERROR % dict(module=_ERRORS[scheme]))
if scheme in _REGISTRY:
return _REGISTRY[scheme]
raise NotImplementedError(message)
register_transport(smart_open.local_file)
register_transport('smart_open.azure')
register_transport('smart_open.gcs')
register_transport('smart_open.hdfs')
register_transport('smart_open.http')
register_transport('smart_open.s3')
register_transport('smart_open.ssh')
register_transport('smart_open.webhdfs')
SUPPORTED_SCHEMES = tuple(sorted(_REGISTRY.keys()))
"""The transport schemes that the local installation of ``smart_open`` supports."""
|
from abc import ABC, abstractmethod
class Index(ABC):
@abstractmethod
def __init__(self): # pragma: no cover
pass
@abstractmethod
def index(self, doc): # pragma: no cover
pass
@abstractmethod
def unindex(self, doc): # pragma: no cover
pass
@abstractmethod # pragma: no cover
def search(self, doc, threshold=0):
pass
@abstractmethod
def initSearch(self): # pragma: no cover
pass
|
from homeassistant.components.homekit.const import (
ATTR_KEY_NAME,
ATTR_VALUE,
CHAR_REMOTE_KEY,
CONF_FEATURE_LIST,
EVENT_HOMEKIT_TV_REMOTE_KEY_PRESSED,
FEATURE_ON_OFF,
FEATURE_PLAY_PAUSE,
FEATURE_PLAY_STOP,
FEATURE_TOGGLE_MUTE,
KEY_ARROW_RIGHT,
)
from homeassistant.components.homekit.type_media_players import (
MediaPlayer,
TelevisionMediaPlayer,
)
from homeassistant.components.media_player import DEVICE_CLASS_TV
from homeassistant.components.media_player.const import (
ATTR_INPUT_SOURCE,
ATTR_INPUT_SOURCE_LIST,
ATTR_MEDIA_VOLUME_LEVEL,
ATTR_MEDIA_VOLUME_MUTED,
DOMAIN,
)
from homeassistant.const import (
ATTR_DEVICE_CLASS,
ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES,
EVENT_HOMEASSISTANT_START,
STATE_IDLE,
STATE_OFF,
STATE_ON,
STATE_PAUSED,
STATE_PLAYING,
STATE_STANDBY,
)
from homeassistant.core import CoreState
from homeassistant.helpers import entity_registry
from tests.common import async_mock_service
async def test_media_player_set_state(hass, hk_driver, events):
"""Test if accessory and HA are updated accordingly."""
config = {
CONF_FEATURE_LIST: {
FEATURE_ON_OFF: None,
FEATURE_PLAY_PAUSE: None,
FEATURE_PLAY_STOP: None,
FEATURE_TOGGLE_MUTE: None,
}
}
entity_id = "media_player.test"
hass.states.async_set(
entity_id,
None,
{ATTR_SUPPORTED_FEATURES: 20873, ATTR_MEDIA_VOLUME_MUTED: False},
)
await hass.async_block_till_done()
acc = MediaPlayer(hass, hk_driver, "MediaPlayer", entity_id, 2, config)
await acc.run_handler()
await hass.async_block_till_done()
assert acc.aid == 2
assert acc.category == 8 # Switch
assert acc.chars[FEATURE_ON_OFF].value is False
assert acc.chars[FEATURE_PLAY_PAUSE].value is False
assert acc.chars[FEATURE_PLAY_STOP].value is False
assert acc.chars[FEATURE_TOGGLE_MUTE].value is False
hass.states.async_set(entity_id, STATE_ON, {ATTR_MEDIA_VOLUME_MUTED: True})
await hass.async_block_till_done()
assert acc.chars[FEATURE_ON_OFF].value is True
assert acc.chars[FEATURE_TOGGLE_MUTE].value is True
hass.states.async_set(entity_id, STATE_OFF)
await hass.async_block_till_done()
assert acc.chars[FEATURE_ON_OFF].value is False
hass.states.async_set(entity_id, STATE_ON)
await hass.async_block_till_done()
assert acc.chars[FEATURE_ON_OFF].value is True
hass.states.async_set(entity_id, STATE_STANDBY)
await hass.async_block_till_done()
assert acc.chars[FEATURE_ON_OFF].value is False
hass.states.async_set(entity_id, STATE_PLAYING)
await hass.async_block_till_done()
assert acc.chars[FEATURE_PLAY_PAUSE].value is True
assert acc.chars[FEATURE_PLAY_STOP].value is True
hass.states.async_set(entity_id, STATE_PAUSED)
await hass.async_block_till_done()
assert acc.chars[FEATURE_PLAY_PAUSE].value is False
hass.states.async_set(entity_id, STATE_IDLE)
await hass.async_block_till_done()
assert acc.chars[FEATURE_PLAY_STOP].value is False
# Set from HomeKit
call_turn_on = async_mock_service(hass, DOMAIN, "turn_on")
call_turn_off = async_mock_service(hass, DOMAIN, "turn_off")
call_media_play = async_mock_service(hass, DOMAIN, "media_play")
call_media_pause = async_mock_service(hass, DOMAIN, "media_pause")
call_media_stop = async_mock_service(hass, DOMAIN, "media_stop")
call_toggle_mute = async_mock_service(hass, DOMAIN, "volume_mute")
await hass.async_add_executor_job(
acc.chars[FEATURE_ON_OFF].client_update_value, True
)
await hass.async_block_till_done()
assert call_turn_on
assert call_turn_on[0].data[ATTR_ENTITY_ID] == entity_id
assert len(events) == 1
assert events[-1].data[ATTR_VALUE] is None
await hass.async_add_executor_job(
acc.chars[FEATURE_ON_OFF].client_update_value, False
)
await hass.async_block_till_done()
assert call_turn_off
assert call_turn_off[0].data[ATTR_ENTITY_ID] == entity_id
assert len(events) == 2
assert events[-1].data[ATTR_VALUE] is None
await hass.async_add_executor_job(
acc.chars[FEATURE_PLAY_PAUSE].client_update_value, True
)
await hass.async_block_till_done()
assert call_media_play
assert call_media_play[0].data[ATTR_ENTITY_ID] == entity_id
assert len(events) == 3
assert events[-1].data[ATTR_VALUE] is None
await hass.async_add_executor_job(
acc.chars[FEATURE_PLAY_PAUSE].client_update_value, False
)
await hass.async_block_till_done()
assert call_media_pause
assert call_media_pause[0].data[ATTR_ENTITY_ID] == entity_id
assert len(events) == 4
assert events[-1].data[ATTR_VALUE] is None
await hass.async_add_executor_job(
acc.chars[FEATURE_PLAY_STOP].client_update_value, True
)
await hass.async_block_till_done()
assert call_media_play
assert call_media_play[1].data[ATTR_ENTITY_ID] == entity_id
assert len(events) == 5
assert events[-1].data[ATTR_VALUE] is None
await hass.async_add_executor_job(
acc.chars[FEATURE_PLAY_STOP].client_update_value, False
)
await hass.async_block_till_done()
assert call_media_stop
assert call_media_stop[0].data[ATTR_ENTITY_ID] == entity_id
assert len(events) == 6
assert events[-1].data[ATTR_VALUE] is None
await hass.async_add_executor_job(
acc.chars[FEATURE_TOGGLE_MUTE].client_update_value, True
)
await hass.async_block_till_done()
assert call_toggle_mute
assert call_toggle_mute[0].data[ATTR_ENTITY_ID] == entity_id
assert call_toggle_mute[0].data[ATTR_MEDIA_VOLUME_MUTED] is True
assert len(events) == 7
assert events[-1].data[ATTR_VALUE] is None
await hass.async_add_executor_job(
acc.chars[FEATURE_TOGGLE_MUTE].client_update_value, False
)
await hass.async_block_till_done()
assert call_toggle_mute
assert call_toggle_mute[1].data[ATTR_ENTITY_ID] == entity_id
assert call_toggle_mute[1].data[ATTR_MEDIA_VOLUME_MUTED] is False
assert len(events) == 8
assert events[-1].data[ATTR_VALUE] is None
async def test_media_player_television(hass, hk_driver, events, caplog):
"""Test if television accessory and HA are updated accordingly."""
entity_id = "media_player.television"
# Supports 'select_source', 'volume_step', 'turn_on', 'turn_off',
# 'volume_mute', 'volume_set', 'pause'
hass.states.async_set(
entity_id,
None,
{
ATTR_DEVICE_CLASS: DEVICE_CLASS_TV,
ATTR_SUPPORTED_FEATURES: 3469,
ATTR_MEDIA_VOLUME_MUTED: False,
ATTR_INPUT_SOURCE_LIST: ["HDMI 1", "HDMI 2", "HDMI 3", "HDMI 4"],
},
)
await hass.async_block_till_done()
acc = TelevisionMediaPlayer(hass, hk_driver, "MediaPlayer", entity_id, 2, None)
await acc.run_handler()
await hass.async_block_till_done()
assert acc.aid == 2
assert acc.category == 31 # Television
assert acc.char_active.value == 0
assert acc.char_remote_key.value == 0
assert acc.char_input_source.value == 0
assert acc.char_mute.value is False
hass.states.async_set(entity_id, STATE_ON, {ATTR_MEDIA_VOLUME_MUTED: True})
await hass.async_block_till_done()
assert acc.char_active.value == 1
assert acc.char_mute.value is True
hass.states.async_set(entity_id, STATE_OFF)
await hass.async_block_till_done()
assert acc.char_active.value == 0
hass.states.async_set(entity_id, STATE_ON)
await hass.async_block_till_done()
assert acc.char_active.value == 1
hass.states.async_set(entity_id, STATE_STANDBY)
await hass.async_block_till_done()
assert acc.char_active.value == 0
hass.states.async_set(entity_id, STATE_ON, {ATTR_INPUT_SOURCE: "HDMI 2"})
await hass.async_block_till_done()
assert acc.char_input_source.value == 1
hass.states.async_set(entity_id, STATE_ON, {ATTR_INPUT_SOURCE: "HDMI 3"})
await hass.async_block_till_done()
assert acc.char_input_source.value == 2
hass.states.async_set(entity_id, STATE_ON, {ATTR_INPUT_SOURCE: "HDMI 5"})
await hass.async_block_till_done()
assert acc.char_input_source.value == 0
assert caplog.records[-2].levelname == "WARNING"
# Set from HomeKit
call_turn_on = async_mock_service(hass, DOMAIN, "turn_on")
call_turn_off = async_mock_service(hass, DOMAIN, "turn_off")
call_media_play = async_mock_service(hass, DOMAIN, "media_play")
call_media_pause = async_mock_service(hass, DOMAIN, "media_pause")
call_media_play_pause = async_mock_service(hass, DOMAIN, "media_play_pause")
call_toggle_mute = async_mock_service(hass, DOMAIN, "volume_mute")
call_select_source = async_mock_service(hass, DOMAIN, "select_source")
call_volume_up = async_mock_service(hass, DOMAIN, "volume_up")
call_volume_down = async_mock_service(hass, DOMAIN, "volume_down")
call_volume_set = async_mock_service(hass, DOMAIN, "volume_set")
await hass.async_add_executor_job(acc.char_active.client_update_value, 1)
await hass.async_block_till_done()
assert call_turn_on
assert call_turn_on[0].data[ATTR_ENTITY_ID] == entity_id
assert len(events) == 1
assert events[-1].data[ATTR_VALUE] is None
await hass.async_add_executor_job(acc.char_active.client_update_value, 0)
await hass.async_block_till_done()
assert call_turn_off
assert call_turn_off[0].data[ATTR_ENTITY_ID] == entity_id
assert len(events) == 2
assert events[-1].data[ATTR_VALUE] is None
await hass.async_add_executor_job(acc.char_remote_key.client_update_value, 11)
await hass.async_block_till_done()
assert call_media_play_pause
assert call_media_play_pause[0].data[ATTR_ENTITY_ID] == entity_id
assert len(events) == 3
assert events[-1].data[ATTR_VALUE] is None
hass.states.async_set(entity_id, STATE_PLAYING)
await hass.async_block_till_done()
await hass.async_add_executor_job(acc.char_remote_key.client_update_value, 11)
await hass.async_block_till_done()
assert call_media_pause
assert call_media_pause[0].data[ATTR_ENTITY_ID] == entity_id
assert len(events) == 4
assert events[-1].data[ATTR_VALUE] is None
await hass.async_add_executor_job(acc.char_remote_key.client_update_value, 10)
await hass.async_block_till_done()
assert len(events) == 4
assert events[-1].data[ATTR_VALUE] is None
hass.states.async_set(entity_id, STATE_PAUSED)
await hass.async_block_till_done()
await hass.async_add_executor_job(acc.char_remote_key.client_update_value, 11)
await hass.async_block_till_done()
assert call_media_play
assert call_media_play[0].data[ATTR_ENTITY_ID] == entity_id
assert len(events) == 5
assert events[-1].data[ATTR_VALUE] is None
await hass.async_add_executor_job(acc.char_mute.client_update_value, True)
await hass.async_block_till_done()
assert call_toggle_mute
assert call_toggle_mute[0].data[ATTR_ENTITY_ID] == entity_id
assert call_toggle_mute[0].data[ATTR_MEDIA_VOLUME_MUTED] is True
assert len(events) == 6
assert events[-1].data[ATTR_VALUE] is None
await hass.async_add_executor_job(acc.char_mute.client_update_value, False)
await hass.async_block_till_done()
assert call_toggle_mute
assert call_toggle_mute[1].data[ATTR_ENTITY_ID] == entity_id
assert call_toggle_mute[1].data[ATTR_MEDIA_VOLUME_MUTED] is False
assert len(events) == 7
assert events[-1].data[ATTR_VALUE] is None
await hass.async_add_executor_job(acc.char_input_source.client_update_value, 1)
await hass.async_block_till_done()
assert call_select_source
assert call_select_source[0].data[ATTR_ENTITY_ID] == entity_id
assert call_select_source[0].data[ATTR_INPUT_SOURCE] == "HDMI 2"
assert len(events) == 8
assert events[-1].data[ATTR_VALUE] is None
await hass.async_add_executor_job(acc.char_volume_selector.client_update_value, 0)
await hass.async_block_till_done()
assert call_volume_up
assert call_volume_up[0].data[ATTR_ENTITY_ID] == entity_id
assert len(events) == 9
assert events[-1].data[ATTR_VALUE] is None
await hass.async_add_executor_job(acc.char_volume_selector.client_update_value, 1)
await hass.async_block_till_done()
assert call_volume_down
assert call_volume_down[0].data[ATTR_ENTITY_ID] == entity_id
assert len(events) == 10
assert events[-1].data[ATTR_VALUE] is None
await hass.async_add_executor_job(acc.char_volume.client_update_value, 20)
await hass.async_block_till_done()
assert call_volume_set[0]
assert call_volume_set[0].data[ATTR_ENTITY_ID] == entity_id
assert call_volume_set[0].data[ATTR_MEDIA_VOLUME_LEVEL] == 20
assert len(events) == 11
assert events[-1].data[ATTR_VALUE] is None
events = []
def listener(event):
events.append(event)
hass.bus.async_listen(EVENT_HOMEKIT_TV_REMOTE_KEY_PRESSED, listener)
await hass.async_add_executor_job(acc.char_remote_key.client_update_value, 20)
await hass.async_block_till_done()
await hass.async_add_executor_job(acc.char_remote_key.client_update_value, 7)
await hass.async_block_till_done()
assert len(events) == 1
assert events[0].data[ATTR_KEY_NAME] == KEY_ARROW_RIGHT
async def test_media_player_television_basic(hass, hk_driver, events, caplog):
"""Test if basic television accessory and HA are updated accordingly."""
entity_id = "media_player.television"
# Supports turn_on', 'turn_off'
hass.states.async_set(
entity_id,
None,
{ATTR_DEVICE_CLASS: DEVICE_CLASS_TV, ATTR_SUPPORTED_FEATURES: 384},
)
await hass.async_block_till_done()
acc = TelevisionMediaPlayer(hass, hk_driver, "MediaPlayer", entity_id, 2, None)
await acc.run_handler()
await hass.async_block_till_done()
assert acc.chars_tv == [CHAR_REMOTE_KEY]
assert acc.chars_speaker == []
assert acc.support_select_source is False
hass.states.async_set(entity_id, STATE_ON, {ATTR_MEDIA_VOLUME_MUTED: True})
await hass.async_block_till_done()
assert acc.char_active.value == 1
hass.states.async_set(entity_id, STATE_OFF)
await hass.async_block_till_done()
assert acc.char_active.value == 0
hass.states.async_set(entity_id, STATE_ON, {ATTR_INPUT_SOURCE: "HDMI 3"})
await hass.async_block_till_done()
assert acc.char_active.value == 1
assert not caplog.messages or "Error" not in caplog.messages[-1]
async def test_media_player_television_supports_source_select_no_sources(
hass, hk_driver, events, caplog
):
"""Test if basic tv that supports source select but is missing a source list."""
entity_id = "media_player.television"
# Supports turn_on', 'turn_off'
hass.states.async_set(
entity_id,
None,
{ATTR_DEVICE_CLASS: DEVICE_CLASS_TV, ATTR_SUPPORTED_FEATURES: 3469},
)
await hass.async_block_till_done()
acc = TelevisionMediaPlayer(hass, hk_driver, "MediaPlayer", entity_id, 2, None)
await acc.run_handler()
await hass.async_block_till_done()
assert acc.support_select_source is False
async def test_tv_restore(hass, hk_driver, events):
"""Test setting up an entity from state in the event registry."""
hass.state = CoreState.not_running
registry = await entity_registry.async_get_registry(hass)
registry.async_get_or_create(
"media_player",
"generic",
"1234",
suggested_object_id="simple",
device_class=DEVICE_CLASS_TV,
)
registry.async_get_or_create(
"media_player",
"generic",
"9012",
suggested_object_id="all_info_set",
capabilities={
ATTR_INPUT_SOURCE_LIST: ["HDMI 1", "HDMI 2", "HDMI 3", "HDMI 4"],
},
supported_features=3469,
device_class=DEVICE_CLASS_TV,
)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START, {})
await hass.async_block_till_done()
acc = TelevisionMediaPlayer(
hass, hk_driver, "MediaPlayer", "media_player.simple", 2, None
)
assert acc.category == 31
assert acc.chars_tv == [CHAR_REMOTE_KEY]
assert acc.chars_speaker == []
assert acc.support_select_source is False
assert not hasattr(acc, "char_input_source")
acc = TelevisionMediaPlayer(
hass, hk_driver, "MediaPlayer", "media_player.all_info_set", 2, None
)
assert acc.category == 31
assert acc.chars_tv == [CHAR_REMOTE_KEY]
assert acc.chars_speaker == [
"Name",
"Active",
"VolumeControlType",
"VolumeSelector",
"Volume",
]
assert acc.support_select_source is True
assert acc.char_input_source is not None
|
import requests
from homeassistant.const import CONF_DEVICES, TEMP_CELSIUS
from homeassistant.helpers.entity import Entity
from .const import (
ATTR_STATE_DEVICE_LOCKED,
ATTR_STATE_LOCKED,
CONF_CONNECTIONS,
DOMAIN as FRITZBOX_DOMAIN,
LOGGER,
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Fritzbox smarthome sensor from config_entry."""
entities = []
devices = hass.data[FRITZBOX_DOMAIN][CONF_DEVICES]
fritz = hass.data[FRITZBOX_DOMAIN][CONF_CONNECTIONS][config_entry.entry_id]
for device in await hass.async_add_executor_job(fritz.get_devices):
if (
device.has_temperature_sensor
and not device.has_switch
and not device.has_thermostat
and device.ain not in devices
):
entities.append(FritzBoxTempSensor(device, fritz))
devices.add(device.ain)
async_add_entities(entities)
class FritzBoxTempSensor(Entity):
"""The entity class for Fritzbox temperature sensors."""
def __init__(self, device, fritz):
"""Initialize the switch."""
self._device = device
self._fritz = fritz
@property
def device_info(self):
"""Return device specific attributes."""
return {
"name": self.name,
"identifiers": {(FRITZBOX_DOMAIN, self._device.ain)},
"manufacturer": self._device.manufacturer,
"model": self._device.productname,
"sw_version": self._device.fw_version,
}
@property
def unique_id(self):
"""Return the unique ID of the device."""
return self._device.ain
@property
def name(self):
"""Return the name of the device."""
return self._device.name
@property
def state(self):
"""Return the state of the sensor."""
return self._device.temperature
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return TEMP_CELSIUS
def update(self):
"""Get latest data and states from the device."""
try:
self._device.update()
except requests.exceptions.HTTPError as ex:
LOGGER.warning("Fritzhome connection error: %s", ex)
self._fritz.login()
@property
def device_state_attributes(self):
"""Return the state attributes of the device."""
attrs = {
ATTR_STATE_DEVICE_LOCKED: self._device.device_lock,
ATTR_STATE_LOCKED: self._device.lock,
}
return attrs
|
import asyncio
import logging
import ambiclimate
import voluptuous as vol
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.const import (
ATTR_NAME,
ATTR_TEMPERATURE,
CONF_CLIENT_ID,
CONF_CLIENT_SECRET,
TEMP_CELSIUS,
)
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import (
ATTR_VALUE,
DOMAIN,
SERVICE_COMFORT_FEEDBACK,
SERVICE_COMFORT_MODE,
SERVICE_TEMPERATURE_MODE,
STORAGE_KEY,
STORAGE_VERSION,
)
_LOGGER = logging.getLogger(__name__)
SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE
SEND_COMFORT_FEEDBACK_SCHEMA = vol.Schema(
{vol.Required(ATTR_NAME): cv.string, vol.Required(ATTR_VALUE): cv.string}
)
SET_COMFORT_MODE_SCHEMA = vol.Schema({vol.Required(ATTR_NAME): cv.string})
SET_TEMPERATURE_MODE_SCHEMA = vol.Schema(
{vol.Required(ATTR_NAME): cv.string, vol.Required(ATTR_VALUE): cv.string}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Ambicliamte device."""
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up the Ambicliamte device from config entry."""
config = entry.data
websession = async_get_clientsession(hass)
store = hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY)
token_info = await store.async_load()
oauth = ambiclimate.AmbiclimateOAuth(
config[CONF_CLIENT_ID],
config[CONF_CLIENT_SECRET],
config["callback_url"],
websession,
)
try:
token_info = await oauth.refresh_access_token(token_info)
except ambiclimate.AmbiclimateOauthError:
token_info = None
if not token_info:
_LOGGER.error("Failed to refresh access token")
return
await store.async_save(token_info)
data_connection = ambiclimate.AmbiclimateConnection(
oauth, token_info=token_info, websession=websession
)
if not await data_connection.find_devices():
_LOGGER.error("No devices found")
return
tasks = []
for heater in data_connection.get_devices():
tasks.append(heater.update_device_info())
await asyncio.wait(tasks)
devs = []
for heater in data_connection.get_devices():
devs.append(AmbiclimateEntity(heater, store))
async_add_entities(devs, True)
async def send_comfort_feedback(service):
"""Send comfort feedback."""
device_name = service.data[ATTR_NAME]
device = data_connection.find_device_by_room_name(device_name)
if device:
await device.set_comfort_feedback(service.data[ATTR_VALUE])
hass.services.async_register(
DOMAIN,
SERVICE_COMFORT_FEEDBACK,
send_comfort_feedback,
schema=SEND_COMFORT_FEEDBACK_SCHEMA,
)
async def set_comfort_mode(service):
"""Set comfort mode."""
device_name = service.data[ATTR_NAME]
device = data_connection.find_device_by_room_name(device_name)
if device:
await device.set_comfort_mode()
hass.services.async_register(
DOMAIN, SERVICE_COMFORT_MODE, set_comfort_mode, schema=SET_COMFORT_MODE_SCHEMA
)
async def set_temperature_mode(service):
"""Set temperature mode."""
device_name = service.data[ATTR_NAME]
device = data_connection.find_device_by_room_name(device_name)
if device:
await device.set_temperature_mode(service.data[ATTR_VALUE])
hass.services.async_register(
DOMAIN,
SERVICE_TEMPERATURE_MODE,
set_temperature_mode,
schema=SET_TEMPERATURE_MODE_SCHEMA,
)
class AmbiclimateEntity(ClimateEntity):
"""Representation of a Ambiclimate Thermostat device."""
def __init__(self, heater, store):
"""Initialize the thermostat."""
self._heater = heater
self._store = store
self._data = {}
@property
def unique_id(self):
"""Return a unique ID."""
return self._heater.device_id
@property
def name(self):
"""Return the name of the entity."""
return self._heater.name
@property
def device_info(self):
"""Return the device info."""
return {
"identifiers": {(DOMAIN, self.unique_id)},
"name": self.name,
"manufacturer": "Ambiclimate",
}
@property
def temperature_unit(self):
"""Return the unit of measurement which this thermostat uses."""
return TEMP_CELSIUS
@property
def target_temperature(self):
"""Return the target temperature."""
return self._data.get("target_temperature")
@property
def target_temperature_step(self):
"""Return the supported step of target temperature."""
return 1
@property
def current_temperature(self):
"""Return the current temperature."""
return self._data.get("temperature")
@property
def current_humidity(self):
"""Return the current humidity."""
return self._data.get("humidity")
@property
def min_temp(self):
"""Return the minimum temperature."""
return self._heater.get_min_temp()
@property
def max_temp(self):
"""Return the maximum temperature."""
return self._heater.get_max_temp()
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_FLAGS
@property
def hvac_modes(self):
"""Return the list of available hvac operation modes."""
return [HVAC_MODE_HEAT, HVAC_MODE_OFF]
@property
def hvac_mode(self):
"""Return current operation."""
if self._data.get("power", "").lower() == "on":
return HVAC_MODE_HEAT
return HVAC_MODE_OFF
async def async_set_temperature(self, **kwargs):
"""Set new target temperature."""
temperature = kwargs.get(ATTR_TEMPERATURE)
if temperature is None:
return
await self._heater.set_target_temperature(temperature)
async def async_set_hvac_mode(self, hvac_mode):
"""Set new target hvac mode."""
if hvac_mode == HVAC_MODE_HEAT:
await self._heater.turn_on()
return
if hvac_mode == HVAC_MODE_OFF:
await self._heater.turn_off()
async def async_update(self):
"""Retrieve latest state."""
try:
token_info = await self._heater.control.refresh_access_token()
except ambiclimate.AmbiclimateOauthError:
_LOGGER.error("Failed to refresh access token")
return
if token_info:
await self._store.async_save(token_info)
self._data = await self._heater.update_device()
|
import os
import aiohttp
from pyipp import IPPConnectionUpgradeRequired, IPPError
from homeassistant.components.ipp.const import CONF_BASE_PATH, CONF_UUID, DOMAIN
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PORT,
CONF_SSL,
CONF_TYPE,
CONF_VERIFY_SSL,
)
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry
from tests.test_util.aiohttp import AiohttpClientMocker
ATTR_HOSTNAME = "hostname"
ATTR_PROPERTIES = "properties"
HOST = "192.168.1.31"
PORT = 631
BASE_PATH = "/ipp/print"
IPP_ZEROCONF_SERVICE_TYPE = "_ipp._tcp.local."
IPPS_ZEROCONF_SERVICE_TYPE = "_ipps._tcp.local."
ZEROCONF_NAME = "EPSON XP-6000 Series"
ZEROCONF_HOST = HOST
ZEROCONF_HOSTNAME = "EPSON123456.local."
ZEROCONF_PORT = PORT
ZEROCONF_RP = "ipp/print"
MOCK_USER_INPUT = {
CONF_HOST: HOST,
CONF_PORT: PORT,
CONF_SSL: False,
CONF_VERIFY_SSL: False,
CONF_BASE_PATH: BASE_PATH,
}
MOCK_ZEROCONF_IPP_SERVICE_INFO = {
CONF_TYPE: IPP_ZEROCONF_SERVICE_TYPE,
CONF_NAME: f"{ZEROCONF_NAME}.{IPP_ZEROCONF_SERVICE_TYPE}",
CONF_HOST: ZEROCONF_HOST,
ATTR_HOSTNAME: ZEROCONF_HOSTNAME,
CONF_PORT: ZEROCONF_PORT,
ATTR_PROPERTIES: {"rp": ZEROCONF_RP},
}
MOCK_ZEROCONF_IPPS_SERVICE_INFO = {
CONF_TYPE: IPPS_ZEROCONF_SERVICE_TYPE,
CONF_NAME: f"{ZEROCONF_NAME}.{IPPS_ZEROCONF_SERVICE_TYPE}",
CONF_HOST: ZEROCONF_HOST,
ATTR_HOSTNAME: ZEROCONF_HOSTNAME,
CONF_PORT: ZEROCONF_PORT,
ATTR_PROPERTIES: {"rp": ZEROCONF_RP},
}
def load_fixture_binary(filename):
"""Load a binary fixture."""
path = os.path.join(os.path.dirname(__file__), "..", "..", "fixtures", filename)
with open(path, "rb") as fptr:
return fptr.read()
def mock_connection(
aioclient_mock: AiohttpClientMocker,
host: str = HOST,
port: int = PORT,
ssl: bool = False,
base_path: str = BASE_PATH,
conn_error: bool = False,
conn_upgrade_error: bool = False,
ipp_error: bool = False,
no_unique_id: bool = False,
parse_error: bool = False,
version_not_supported: bool = False,
):
"""Mock the IPP connection."""
scheme = "https" if ssl else "http"
ipp_url = f"{scheme}://{host}:{port}"
if ipp_error:
aioclient_mock.post(f"{ipp_url}{base_path}", exc=IPPError)
return
if conn_error:
aioclient_mock.post(f"{ipp_url}{base_path}", exc=aiohttp.ClientError)
return
if conn_upgrade_error:
aioclient_mock.post(f"{ipp_url}{base_path}", exc=IPPConnectionUpgradeRequired)
return
fixture = "ipp/get-printer-attributes.bin"
if no_unique_id:
fixture = "ipp/get-printer-attributes-success-nodata.bin"
elif version_not_supported:
fixture = "ipp/get-printer-attributes-error-0x0503.bin"
if parse_error:
content = "BAD"
else:
content = load_fixture_binary(fixture)
aioclient_mock.post(
f"{ipp_url}{base_path}",
content=content,
headers={"Content-Type": "application/ipp"},
)
async def init_integration(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
skip_setup: bool = False,
host: str = HOST,
port: int = PORT,
ssl: bool = False,
base_path: str = BASE_PATH,
uuid: str = "cfe92100-67c4-11d4-a45f-f8d027761251",
unique_id: str = "cfe92100-67c4-11d4-a45f-f8d027761251",
conn_error: bool = False,
) -> MockConfigEntry:
"""Set up the IPP integration in Home Assistant."""
entry = MockConfigEntry(
domain=DOMAIN,
unique_id=unique_id,
data={
CONF_HOST: host,
CONF_PORT: port,
CONF_SSL: ssl,
CONF_VERIFY_SSL: True,
CONF_BASE_PATH: base_path,
CONF_UUID: uuid,
},
)
entry.add_to_hass(hass)
mock_connection(
aioclient_mock,
host=host,
port=port,
ssl=ssl,
base_path=base_path,
conn_error=conn_error,
)
if not skip_setup:
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry
|
from datetime import datetime, timedelta
import logging
import re
import requests
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_MODE, HTTP_OK, TIME_MINUTES
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
ATTR_ATCOCODE = "atcocode"
ATTR_LOCALITY = "locality"
ATTR_STOP_NAME = "stop_name"
ATTR_REQUEST_TIME = "request_time"
ATTR_NEXT_BUSES = "next_buses"
ATTR_STATION_CODE = "station_code"
ATTR_CALLING_AT = "calling_at"
ATTR_NEXT_TRAINS = "next_trains"
CONF_API_APP_KEY = "app_key"
CONF_API_APP_ID = "app_id"
CONF_QUERIES = "queries"
CONF_ORIGIN = "origin"
CONF_DESTINATION = "destination"
_QUERY_SCHEME = vol.Schema(
{
vol.Required(CONF_MODE): vol.All(
cv.ensure_list, [vol.In(list(["bus", "train"]))]
),
vol.Required(CONF_ORIGIN): cv.string,
vol.Required(CONF_DESTINATION): cv.string,
}
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_APP_ID): cv.string,
vol.Required(CONF_API_APP_KEY): cv.string,
vol.Required(CONF_QUERIES): [_QUERY_SCHEME],
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Get the uk_transport sensor."""
sensors = []
number_sensors = len(config.get(CONF_QUERIES))
interval = timedelta(seconds=87 * number_sensors)
for query in config.get(CONF_QUERIES):
if "bus" in query.get(CONF_MODE):
stop_atcocode = query.get(CONF_ORIGIN)
bus_direction = query.get(CONF_DESTINATION)
sensors.append(
UkTransportLiveBusTimeSensor(
config.get(CONF_API_APP_ID),
config.get(CONF_API_APP_KEY),
stop_atcocode,
bus_direction,
interval,
)
)
elif "train" in query.get(CONF_MODE):
station_code = query.get(CONF_ORIGIN)
calling_at = query.get(CONF_DESTINATION)
sensors.append(
UkTransportLiveTrainTimeSensor(
config.get(CONF_API_APP_ID),
config.get(CONF_API_APP_KEY),
station_code,
calling_at,
interval,
)
)
add_entities(sensors, True)
class UkTransportSensor(Entity):
"""
Sensor that reads the UK transport web API.
transportapi.com provides comprehensive transport data for UK train, tube
and bus travel across the UK via simple JSON API. Subclasses of this
base class can be used to access specific types of information.
"""
TRANSPORT_API_URL_BASE = "https://transportapi.com/v3/uk/"
ICON = "mdi:train"
def __init__(self, name, api_app_id, api_app_key, url):
"""Initialize the sensor."""
self._data = {}
self._api_app_id = api_app_id
self._api_app_key = api_app_key
self._url = self.TRANSPORT_API_URL_BASE + url
self._name = name
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit this state is expressed in."""
return TIME_MINUTES
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return self.ICON
def _do_api_request(self, params):
"""Perform an API request."""
request_params = dict(
{"app_id": self._api_app_id, "app_key": self._api_app_key}, **params
)
response = requests.get(self._url, params=request_params)
if response.status_code != HTTP_OK:
_LOGGER.warning("Invalid response from API")
elif "error" in response.json():
if "exceeded" in response.json()["error"]:
self._state = "Usage limits exceeded"
if "invalid" in response.json()["error"]:
self._state = "Credentials invalid"
else:
self._data = response.json()
class UkTransportLiveBusTimeSensor(UkTransportSensor):
"""Live bus time sensor from UK transportapi.com."""
ICON = "mdi:bus"
def __init__(self, api_app_id, api_app_key, stop_atcocode, bus_direction, interval):
"""Construct a live bus time sensor."""
self._stop_atcocode = stop_atcocode
self._bus_direction = bus_direction
self._next_buses = []
self._destination_re = re.compile(f"{bus_direction}", re.IGNORECASE)
sensor_name = f"Next bus to {bus_direction}"
stop_url = f"bus/stop/{stop_atcocode}/live.json"
UkTransportSensor.__init__(self, sensor_name, api_app_id, api_app_key, stop_url)
self.update = Throttle(interval)(self._update)
def _update(self):
"""Get the latest live departure data for the specified stop."""
params = {"group": "route", "nextbuses": "no"}
self._do_api_request(params)
if self._data != {}:
self._next_buses = []
for (route, departures) in self._data["departures"].items():
for departure in departures:
if self._destination_re.search(departure["direction"]):
self._next_buses.append(
{
"route": route,
"direction": departure["direction"],
"scheduled": departure["aimed_departure_time"],
"estimated": departure["best_departure_estimate"],
}
)
if self._next_buses:
self._state = min(
_delta_mins(bus["scheduled"]) for bus in self._next_buses
)
else:
self._state = None
@property
def device_state_attributes(self):
"""Return other details about the sensor state."""
attrs = {}
if self._data is not None:
for key in [
ATTR_ATCOCODE,
ATTR_LOCALITY,
ATTR_STOP_NAME,
ATTR_REQUEST_TIME,
]:
attrs[key] = self._data.get(key)
attrs[ATTR_NEXT_BUSES] = self._next_buses
return attrs
class UkTransportLiveTrainTimeSensor(UkTransportSensor):
"""Live train time sensor from UK transportapi.com."""
ICON = "mdi:train"
def __init__(self, api_app_id, api_app_key, station_code, calling_at, interval):
"""Construct a live bus time sensor."""
self._station_code = station_code
self._calling_at = calling_at
self._next_trains = []
sensor_name = f"Next train to {calling_at}"
query_url = f"train/station/{station_code}/live.json"
UkTransportSensor.__init__(
self, sensor_name, api_app_id, api_app_key, query_url
)
self.update = Throttle(interval)(self._update)
def _update(self):
"""Get the latest live departure data for the specified stop."""
params = {
"darwin": "false",
"calling_at": self._calling_at,
"train_status": "passenger",
}
self._do_api_request(params)
self._next_trains = []
if self._data != {}:
if self._data["departures"]["all"] == []:
self._state = "No departures"
else:
for departure in self._data["departures"]["all"]:
self._next_trains.append(
{
"origin_name": departure["origin_name"],
"destination_name": departure["destination_name"],
"status": departure["status"],
"scheduled": departure["aimed_departure_time"],
"estimated": departure["expected_departure_time"],
"platform": departure["platform"],
"operator_name": departure["operator_name"],
}
)
if self._next_trains:
self._state = min(
_delta_mins(train["scheduled"]) for train in self._next_trains
)
else:
self._state = None
@property
def device_state_attributes(self):
"""Return other details about the sensor state."""
attrs = {}
if self._data is not None:
attrs[ATTR_STATION_CODE] = self._station_code
attrs[ATTR_CALLING_AT] = self._calling_at
if self._next_trains:
attrs[ATTR_NEXT_TRAINS] = self._next_trains
return attrs
def _delta_mins(hhmm_time_str):
"""Calculate time delta in minutes to a time in hh:mm format."""
now = dt_util.now()
hhmm_time = datetime.strptime(hhmm_time_str, "%H:%M")
hhmm_datetime = now.replace(hour=hhmm_time.hour, minute=hhmm_time.minute)
if hhmm_datetime < now:
hhmm_datetime += timedelta(days=1)
delta_mins = (hhmm_datetime - now).seconds // 60
return delta_mins
|
from contextlib import contextmanager
from homeassistant.components.lock import (
DOMAIN as LOCK_DOMAIN,
SERVICE_LOCK,
SERVICE_UNLOCK,
)
from homeassistant.components.verisure import DOMAIN as VERISURE_DOMAIN
from homeassistant.const import STATE_UNLOCKED
from homeassistant.setup import async_setup_component
from tests.async_mock import call, patch
NO_DEFAULT_LOCK_CODE_CONFIG = {
"verisure": {
"username": "test",
"password": "test",
"locks": True,
"alarm": False,
"door_window": False,
"hygrometers": False,
"mouse": False,
"smartplugs": False,
"thermometers": False,
"smartcam": False,
}
}
DEFAULT_LOCK_CODE_CONFIG = {
"verisure": {
"username": "test",
"password": "test",
"locks": True,
"default_lock_code": "9999",
"alarm": False,
"door_window": False,
"hygrometers": False,
"mouse": False,
"smartplugs": False,
"thermometers": False,
"smartcam": False,
}
}
LOCKS = ["door_lock"]
@contextmanager
def mock_hub(config, get_response=LOCKS[0]):
"""Extensively mock out a verisure hub."""
hub_prefix = "homeassistant.components.verisure.lock.hub"
# Since there is no conf to disable ethernet status, mock hub for
# binary sensor too
hub_binary_sensor = "homeassistant.components.verisure.binary_sensor.hub"
verisure_prefix = "verisure.Session"
with patch(verisure_prefix) as session, patch(hub_prefix) as hub:
session.login.return_value = True
hub.config = config["verisure"]
hub.get.return_value = LOCKS
hub.get_first.return_value = get_response.upper()
hub.session.set_lock_state.return_value = {
"doorLockStateChangeTransactionId": "test"
}
hub.session.get_lock_state_transaction.return_value = {"result": "OK"}
with patch(hub_binary_sensor, hub):
yield hub
async def setup_verisure_locks(hass, config):
"""Set up mock verisure locks."""
with mock_hub(config):
await async_setup_component(hass, VERISURE_DOMAIN, config)
await hass.async_block_till_done()
# lock.door_lock, ethernet_status
assert len(hass.states.async_all()) == 2
async def test_verisure_no_default_code(hass):
"""Test configs without a default lock code."""
await setup_verisure_locks(hass, NO_DEFAULT_LOCK_CODE_CONFIG)
with mock_hub(NO_DEFAULT_LOCK_CODE_CONFIG, STATE_UNLOCKED) as hub:
mock = hub.session.set_lock_state
await hass.services.async_call(
LOCK_DOMAIN, SERVICE_LOCK, {"entity_id": "lock.door_lock"}
)
await hass.async_block_till_done()
assert mock.call_count == 0
await hass.services.async_call(
LOCK_DOMAIN, SERVICE_LOCK, {"entity_id": "lock.door_lock", "code": "12345"}
)
await hass.async_block_till_done()
assert mock.call_args == call("12345", LOCKS[0], "lock")
mock.reset_mock()
await hass.services.async_call(
LOCK_DOMAIN, SERVICE_UNLOCK, {"entity_id": "lock.door_lock"}
)
await hass.async_block_till_done()
assert mock.call_count == 0
await hass.services.async_call(
LOCK_DOMAIN,
SERVICE_UNLOCK,
{"entity_id": "lock.door_lock", "code": "12345"},
)
await hass.async_block_till_done()
assert mock.call_args == call("12345", LOCKS[0], "unlock")
async def test_verisure_default_code(hass):
"""Test configs with a default lock code."""
await setup_verisure_locks(hass, DEFAULT_LOCK_CODE_CONFIG)
with mock_hub(DEFAULT_LOCK_CODE_CONFIG, STATE_UNLOCKED) as hub:
mock = hub.session.set_lock_state
await hass.services.async_call(
LOCK_DOMAIN, SERVICE_LOCK, {"entity_id": "lock.door_lock"}
)
await hass.async_block_till_done()
assert mock.call_args == call("9999", LOCKS[0], "lock")
await hass.services.async_call(
LOCK_DOMAIN, SERVICE_UNLOCK, {"entity_id": "lock.door_lock"}
)
await hass.async_block_till_done()
assert mock.call_args == call("9999", LOCKS[0], "unlock")
await hass.services.async_call(
LOCK_DOMAIN, SERVICE_LOCK, {"entity_id": "lock.door_lock", "code": "12345"}
)
await hass.async_block_till_done()
assert mock.call_args == call("12345", LOCKS[0], "lock")
await hass.services.async_call(
LOCK_DOMAIN,
SERVICE_UNLOCK,
{"entity_id": "lock.door_lock", "code": "12345"},
)
await hass.async_block_till_done()
assert mock.call_args == call("12345", LOCKS[0], "unlock")
|
from django.views.generic.dates import BaseDateDetailView
from zinnia.models.entry import Entry
from zinnia.views.mixins.archives import ArchiveMixin
from zinnia.views.mixins.callable_queryset import CallableQuerysetMixin
from zinnia.views.mixins.entry_cache import EntryCacheMixin
from zinnia.views.mixins.entry_preview import EntryPreviewMixin
from zinnia.views.mixins.entry_protection import EntryProtectionMixin
from zinnia.views.mixins.templates import EntryArchiveTemplateResponseMixin
class EntryDateDetail(ArchiveMixin,
EntryArchiveTemplateResponseMixin,
CallableQuerysetMixin,
BaseDateDetailView):
"""
Mixin combinating:
- ArchiveMixin configuration centralizing conf for archive views
- EntryArchiveTemplateResponseMixin to provide a
custom templates depending on the date
- BaseDateDetailView to retrieve the entry with date and slug
- CallableQueryMixin to defer the execution of the *queryset*
property when imported
"""
queryset = Entry.published.on_site
class EntryDetail(EntryCacheMixin,
EntryPreviewMixin,
EntryProtectionMixin,
EntryDateDetail):
"""
Detailled archive view for an Entry with password
and login protections and restricted preview.
"""
|
from homeassistant.const import CURRENCY_CENT, ENERGY_KILO_WATT_HOUR
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import CONF_LOADZONE, DOMAIN
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the August sensors."""
coordinator = hass.data[DOMAIN][config_entry.entry_id]
settlement_point = config_entry.data[CONF_LOADZONE]
async_add_entities([GriddyPriceSensor(settlement_point, coordinator)], True)
class GriddyPriceSensor(CoordinatorEntity):
"""Representation of an August sensor."""
def __init__(self, settlement_point, coordinator):
"""Initialize the sensor."""
super().__init__(coordinator)
self._settlement_point = settlement_point
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return f"{CURRENCY_CENT}/{ENERGY_KILO_WATT_HOUR}"
@property
def name(self):
"""Device Name."""
return f"{self._settlement_point} Price Now"
@property
def icon(self):
"""Device Ice."""
return "mdi:currency-usd"
@property
def unique_id(self):
"""Device Uniqueid."""
return f"{self._settlement_point}_price_now"
@property
def state(self):
"""Get the current price."""
return round(float(self.coordinator.data.now.price_cents_kwh), 4)
|
import unittest
import pandas as pd
import numpy as np
from pgmpy.models import BayesianModel
from pgmpy.estimators import MaximumLikelihoodEstimator
from pgmpy.factors.discrete import TabularCPD
class TestMLE(unittest.TestCase):
def setUp(self):
self.m1 = BayesianModel([("A", "C"), ("B", "C")])
self.d1 = pd.DataFrame(data={"A": [0, 0, 1], "B": [0, 1, 0], "C": [1, 1, 0]})
self.d2 = pd.DataFrame(
data={
"A": [0, np.NaN, 1],
"B": [0, 1, 0],
"C": [1, 1, np.NaN],
"D": [np.NaN, "Y", np.NaN],
}
)
self.cpds = [
TabularCPD("A", 2, [[2.0 / 3], [1.0 / 3]]),
TabularCPD("B", 2, [[2.0 / 3], [1.0 / 3]]),
TabularCPD(
"C",
2,
[[0.0, 0.0, 1.0, 0.5], [1.0, 1.0, 0.0, 0.5]],
evidence=["A", "B"],
evidence_card=[2, 2],
),
]
self.mle1 = MaximumLikelihoodEstimator(self.m1, self.d1)
def test_get_parameters_incomplete_data(self):
self.assertSetEqual(set(self.mle1.get_parameters()), set(self.cpds))
def test_estimate_cpd(self):
self.assertEqual(self.mle1.estimate_cpd("A"), self.cpds[0])
self.assertEqual(self.mle1.estimate_cpd("B"), self.cpds[1])
self.assertEqual(self.mle1.estimate_cpd("C"), self.cpds[2])
def test_state_names1(self):
m = BayesianModel([("A", "B")])
d = pd.DataFrame(data={"A": [2, 3, 8, 8, 8], "B": ["X", "O", "X", "O", "X"]})
cpd_b = TabularCPD(
"B",
2,
[[0, 1, 1.0 / 3], [1, 0, 2.0 / 3]],
evidence=["A"],
evidence_card=[3],
state_names={"A": [2, 3, 8], "B": ["O", "X"]},
)
mle2 = MaximumLikelihoodEstimator(m, d)
self.assertEqual(mle2.estimate_cpd("B"), cpd_b)
def test_state_names2(self):
m = BayesianModel([("Light?", "Color"), ("Fruit", "Color")])
d = pd.DataFrame(
data={
"Fruit": ["Apple", "Apple", "Apple", "Banana", "Banana"],
"Light?": [True, True, False, False, True],
"Color": ["red", "green", "black", "black", "yellow"],
}
)
color_cpd = TabularCPD(
"Color",
4,
[[1, 0, 1, 0], [0, 0.5, 0, 0], [0, 0.5, 0, 0], [0, 0, 0, 1]],
evidence=["Fruit", "Light?"],
evidence_card=[2, 2],
state_names={
"Color": ["black", "green", "red", "yellow"],
"Light?": [False, True],
"Fruit": ["Apple", "Banana"],
},
)
mle2 = MaximumLikelihoodEstimator(m, d)
self.assertEqual(mle2.estimate_cpd("Color"), color_cpd)
def test_class_init(self):
mle = MaximumLikelihoodEstimator(
self.m1, self.d1, state_names={"A": [0, 1], "B": [0, 1], "C": [0, 1]}
)
self.assertSetEqual(set(mle.get_parameters()), set(self.cpds))
def test_nonoccurring_values(self):
mle = MaximumLikelihoodEstimator(
self.m1,
self.d1,
state_names={"A": [0, 1, 23], "B": [0, 1], "C": [0, 42, 1], 1: [2]},
)
cpds = [
TabularCPD(
"A", 3, [[2.0 / 3], [1.0 / 3], [0]], state_names={"A": [0, 1, 23]}
),
TabularCPD("B", 2, [[2.0 / 3], [1.0 / 3]], state_names={"B": [0, 1]}),
TabularCPD(
"C",
3,
[
[0.0, 0.0, 1.0, 1.0 / 3, 1.0 / 3, 1.0 / 3],
[0.0, 0.0, 0.0, 1.0 / 3, 1.0 / 3, 1.0 / 3],
[1.0, 1.0, 0.0, 1.0 / 3, 1.0 / 3, 1.0 / 3],
],
evidence=["A", "B"],
evidence_card=[3, 2],
state_names={"A": [0, 1, 23], "B": [0, 1], "C": [0, 42, 1]},
),
]
self.assertSetEqual(set(mle.get_parameters()), set(cpds))
def test_missing_data(self):
e1 = MaximumLikelihoodEstimator(
self.m1, self.d2, state_names={"C": [0, 1]}, complete_samples_only=False
)
cpds1 = set(
[
TabularCPD("A", 2, [[0.5], [0.5]]),
TabularCPD("B", 2, [[2.0 / 3], [1.0 / 3]]),
TabularCPD(
"C",
2,
[[0, 0.5, 0.5, 0.5], [1, 0.5, 0.5, 0.5]],
evidence=["A", "B"],
evidence_card=[2, 2],
),
]
)
self.assertSetEqual(cpds1, set(e1.get_parameters()))
e2 = MaximumLikelihoodEstimator(
self.m1, self.d2, state_names={"C": [0, 1]}, complete_samples_only=True
)
cpds2 = set(
[
TabularCPD("A", 2, [[0.5], [0.5]]),
TabularCPD("B", 2, [[0.5], [0.5]]),
TabularCPD(
"C",
2,
[[0.5, 0.5, 0.5, 0.5], [0.5, 0.5, 0.5, 0.5]],
evidence=["A", "B"],
evidence_card=[2, 2],
),
]
)
self.assertSetEqual(cpds2, set(e2.get_parameters()))
def tearDown(self):
del self.m1
del self.d1
del self.d2
|
import mock
import requests
from docker_registry.lib import config
from docker_registry.lib import mirroring
import base
from docker_registry.core import compat
json = compat.json
def mock_lookup_source(path, stream=False, source=None):
resp = requests.Response()
resp.status_code = 200
resp._content_consumed = True
# resp.headers['X-Fake-Source-Header'] = 'foobar'
if path.endswith('01451234/layer'):
resp._content = "abcdef0123456789xxxxxx=-//"
elif path.endswith('01451234/json'):
resp._content = ('{"id": "cafebabe01451234",'
'"created":"2014-02-03T16:47:06.615279788Z"}')
elif path.endswith('01451234/ancestry'):
resp._content = '["cafebabe01451234"]'
elif path.endswith('test/tags'):
resp._content = ('{"latest": "cafebabe01451234", "0.1.2": '
'"cafebabe01451234"}')
else:
resp.status_code = 404
return resp
class TestMirrorDecorator(base.TestCase):
def setUp(self):
self.cfg = config.load()
self.cfg._config['mirroring'] = {
'source': 'https://registry.mock'
}
def tearDown(self):
del self.cfg._config['mirroring']
def test_config_tampering(self):
self.assertEqual(self.cfg.mirroring.source,
'https://registry.mock')
def test_is_mirror(self):
self.assertEqual(mirroring.is_mirror(), True)
@mock.patch('docker_registry.lib.mirroring.lookup_source',
mock_lookup_source)
def test_source_lookup(self):
resp = self.http_client.get('/v1/images/cafebabe01451234/layer')
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.data, "abcdef0123456789xxxxxx=-//")
resp_2 = self.http_client.get('/v1/images/cafebabe01451234/json')
self.assertEqual(resp_2.status_code, 200)
# Note(dmp): unicode patch XXX not applied assume requests does the job
json_data = json.loads(resp_2.data)
assert 'id' in json_data
assert 'created' in json_data
self.assertEqual(json_data['id'], 'cafebabe01451234')
resp_3 = self.http_client.get('/v1/images/cafebabe01451234/ancestry')
self.assertEqual(resp_3.status_code, 200)
# Note(dmp): unicode patch XXX not applied assume requests does the job
json_data_2 = json.loads(resp_3.data)
self.assertEqual(len(json_data_2), 1)
self.assertEqual(json_data_2[0], 'cafebabe01451234')
resp_4 = self.http_client.get('/v1/images/doe587e8157/json')
self.assertEqual(resp_4.status_code, 404)
@mock.patch('docker_registry.lib.mirroring.lookup_source',
mock_lookup_source)
def test_source_lookup_tag(self):
resp = self.http_client.get('/v1/repositories/testing/test/tags')
self.assertEqual(resp.status_code, 200)
self.assertEqual(
resp.data,
'{"latest": "cafebabe01451234", "0.1.2": "cafebabe01451234"}'
)
resp_2 = self.http_client.get('/v1/repositories/testing/bogus/tags')
self.assertEqual(resp_2.status_code, 404)
|
import time
from kombu import Connection, Exchange, Queue, Consumer
media_exchange = Exchange('media', 'direct')
video_queue = Queue('video', exchange=media_exchange, routing_key='video')
task_queues = [video_queue]
def handle_message(body, message):
print(f"{time.time()} RECEIVED MESSAGE: {body!r}")
message.ack()
connection = Connection("memory:///")
consumer = Consumer(connection, task_queues, callbacks=[handle_message])
producer = connection.Producer(serializer='json')
producer.publish({"foo": "bar"}, exchange=media_exchange, routing_key='video', declare=task_queues)
consumer.consume()
connection.drain_events()
|
import os
import sys
import docutils
import pytest
from nikola.utils import LocaleBorg
from ..helper import FakeSite
@pytest.fixture(scope="module")
def test_dir():
"""
Absolute path to the directory with the tests.
"""
return os.path.abspath(os.path.dirname(__file__))
@pytest.fixture(scope="session")
def other_locale() -> str:
return os.environ.get("NIKOLA_LOCALE_OTHER", "pl")
@pytest.fixture(scope="module")
def output_dir(target_dir):
return os.path.join(target_dir, "output")
@pytest.fixture(scope="module")
def target_dir(tmpdir_factory):
tdir = tmpdir_factory.mktemp("integration").join("target")
yield str(tdir)
@pytest.fixture(scope="module", autouse=True)
def remove_conf_module():
"""
Remove the module `conf` from `sys.modules` after loading the config.
Fixes issue #438
"""
try:
yield
finally:
try:
del sys.modules["conf"]
except KeyError:
pass
@pytest.fixture(scope="module", autouse=True)
def localeborg_setup(default_locale):
"""
Reset the LocaleBorg before and after every test.
"""
LocaleBorg.reset()
LocaleBorg.initialize({}, default_locale)
try:
yield
finally:
LocaleBorg.reset()
@pytest.fixture(autouse=True, scope="module")
def fix_leaked_state():
"""Fix leaked state from integration tests"""
try:
yield
finally:
try:
func = docutils.parsers.rst.roles.role("doc", None, None, None)[0]
func.site = FakeSite()
except AttributeError:
pass
|
import json
import logging
import re
from absl import flags
from perfkitbenchmarker import disk
from perfkitbenchmarker import errors
from perfkitbenchmarker import flag_util
from perfkitbenchmarker import kubernetes_helper
from perfkitbenchmarker import providers
from perfkitbenchmarker import resource
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.configs import option_decoders
from perfkitbenchmarker.vm_util import OUTPUT_EXIT_CODE as EXIT_CODE
from perfkitbenchmarker.vm_util import OUTPUT_STDERR as STDERR
from perfkitbenchmarker.vm_util import OUTPUT_STDOUT as STDOUT
FLAGS = flags.FLAGS
def CreateDisks(disk_specs, vm_name):
"""Creates instances of KubernetesDisk child classes."""
# Depending on scratch disk type.
scratch_disks = []
for disk_num, disk_spec in enumerate(disk_specs):
disk_class = GetKubernetesDiskClass(disk_spec.disk_type)
scratch_disk = disk_class(disk_num, disk_spec, vm_name)
scratch_disk.Create()
scratch_disks.append(scratch_disk)
return scratch_disks
class KubernetesDiskSpec(disk.BaseDiskSpec):
"""Kubernetes disk Spec class."""
CLOUD = providers.KUBERNETES
@classmethod
def _GetOptionDecoderConstructions(cls):
result = super(KubernetesDiskSpec, cls)._GetOptionDecoderConstructions()
result.update({
'provisioner': (option_decoders.StringDecoder,
{'default': None, 'none_ok': True}),
'parameters': (option_decoders.TypeVerifier,
{'default': {}, 'valid_types': (dict,)})
})
return result
@classmethod
def _ApplyFlags(cls, config_values, flag_values):
"""Overrides config values with flag values.
Can be overridden by derived classes to add support for specific flags.
Args:
config_values: dict mapping config option names to provided values. Is
modified by this function.
flag_values: flags.FlagValues. Runtime flags that may override the
provided config values.
Returns:
dict mapping config option names to values derived from the config
values or flag values.
"""
super(KubernetesDiskSpec, cls)._ApplyFlags(config_values, flag_values)
if flag_values['k8s_volume_provisioner'].present:
config_values['provisioner'] = flag_values.k8s_volume_provisioner
if flag_values['k8s_volume_parameters'].present:
config_values['parameters'] = config_values.get('parameters', {})
config_values['parameters'].update(
flag_util.ParseKeyValuePairs(flag_values.k8s_volume_parameters))
def GetKubernetesDiskClass(volume_type):
return resource.GetResourceClass(KubernetesDisk, K8S_VOLUME_TYPE=volume_type)
class KubernetesDisk(disk.BaseDisk):
"""
Base class for Kubernetes Disks.
"""
RESOURCE_TYPE = 'KubernetesDisk'
REQUIRED_ATTRS = ['K8S_VOLUME_TYPE']
def __init__(self, disk_num, disk_spec, name):
super(KubernetesDisk, self).__init__(disk_spec)
self.name = '%s-%s' % (name, disk_num)
def _Create(self):
return
def _Delete(self):
return
def Attach(self, vm):
return
def Detach(self):
return
def SetDevicePath(self, vm):
return
def AttachVolumeMountInfo(self, volume_mounts):
volume_mount = {
'mountPath': self.mount_point,
'name': self.name
}
volume_mounts.append(volume_mount)
class EmptyDirDisk(KubernetesDisk):
"""
Implementation of Kubernetes 'emptyDir' type of volume.
"""
K8S_VOLUME_TYPE = 'emptyDir'
def GetDevicePath(self):
"""Get device path."""
# In case of LocalDisk, host's disk is mounted (empty directory from the
# host is mounted to the docker instance) and we intentionally
# prevent from formatting the device.
raise errors.Error('GetDevicePath not supported for Kubernetes local disk')
def AttachVolumeInfo(self, volumes):
local_volume = {
'name': self.name,
'emptyDir': {}
}
volumes.append(local_volume)
class CephDisk(KubernetesDisk):
"""
Implementation of Kubernetes 'rbd' type of volume.
"""
K8S_VOLUME_TYPE = 'rbd'
def __init__(self, disk_num, disk_spec, name):
super(CephDisk, self).__init__(disk_num, disk_spec, name)
self.ceph_secret = FLAGS.ceph_secret
def _Create(self):
"""Creates Rados Block Device volumes and installs filesystem on them."""
cmd = ['rbd', '-p', FLAGS.rbd_pool, 'create', self.name, '--size',
str(1024 * self.disk_size)]
output = vm_util.IssueCommand(cmd, raise_on_failure=False)
if output[EXIT_CODE] != 0:
raise Exception('Creating RBD image failed: %s' % output[STDERR])
cmd = ['rbd', 'map', FLAGS.rbd_pool + '/' + self.name]
output = vm_util.IssueCommand(cmd, raise_on_failure=False)
if output[EXIT_CODE] != 0:
raise Exception('Mapping RBD image failed: %s' % output[STDERR])
rbd_device = output[STDOUT].rstrip()
if '/dev/rbd' not in rbd_device:
# Sometimes 'rbd map' command doesn't return any output.
# Trying to find device location another way.
cmd = ['rbd', 'showmapped']
output = vm_util.IssueCommand(cmd, raise_on_failure=False)
for image_device in output[STDOUT].split('\n'):
if self.name in image_device:
pattern = re.compile('/dev/rbd.*')
output = pattern.findall(image_device)
rbd_device = output[STDOUT].rstrip()
break
cmd = ['/sbin/mkfs.ext4', rbd_device]
output = vm_util.IssueCommand(cmd, raise_on_failure=False)
if output[EXIT_CODE] != 0:
raise Exception('Formatting partition failed: %s' % output[STDERR])
cmd = ['rbd', 'unmap', rbd_device]
output = vm_util.IssueCommand(cmd, raise_on_failure=False)
if output[EXIT_CODE] != 0:
raise Exception('Unmapping block device failed: %s' % output[STDERR])
def _Delete(self):
"""Deletes RBD image."""
cmd = ['rbd', 'rm', FLAGS.rbd_pool + '/' + self.name]
output = vm_util.IssueCommand(cmd, raise_on_failure=False)
if output[EXIT_CODE] != 0:
msg = 'Removing RBD image failed. Reattempting.'
logging.warning(msg)
raise Exception(msg)
def AttachVolumeInfo(self, volumes):
ceph_volume = {
'name': self.name,
'rbd': {
'monitors': FLAGS.ceph_monitors,
'pool': FLAGS.rbd_pool,
'image': self.name,
'keyring': FLAGS.ceph_keyring,
'user': FLAGS.rbd_user,
'fsType': 'ext4',
'readOnly': False
}
}
if FLAGS.ceph_secret:
ceph_volume['rbd']['secretRef'] = {'name': FLAGS.ceph_secret}
volumes.append(ceph_volume)
def SetDevicePath(self, vm):
"""Retrieves the path to scratch disk device."""
cmd = "mount | grep %s | tr -s ' ' | cut -f 1 -d ' '" % self.mount_point
device, _ = vm.RemoteCommand(cmd)
self.device_path = device.rstrip()
def GetDevicePath(self):
return self.device_path
class PersistentVolumeClaim(resource.BaseResource):
"""Object representing a K8s PVC."""
@vm_util.Retry(poll_interval=10, max_retries=100, log_errors=False)
def _WaitForPVCBoundCompletion(self):
"""Need to wait for the PVC to get up."""
# PVC may take some time to be ready(Bound).
exists_cmd = [FLAGS.kubectl, '--kubeconfig=%s' % FLAGS.kubeconfig, 'get',
'pvc', '-o=json', self.name]
logging.info('Waiting for PVC %s', self.name)
pvc_info, _, _ = vm_util.IssueCommand(exists_cmd, suppress_warning=True,
raise_on_failure=False)
if pvc_info:
pvc_info = json.loads(pvc_info)
pvc = pvc_info['status']['phase']
if pvc == 'Bound':
logging.info('PVC is ready.')
return
raise Exception('PVC %s is not ready. Retrying to check status.' %
self.name)
def __init__(self, name, storage_class, size):
super(PersistentVolumeClaim, self).__init__()
self.name = name
self.storage_class = storage_class
self.size = size
def _Create(self):
"""Creates the PVC."""
body = self._BuildBody()
kubernetes_helper.CreateResource(body)
self._WaitForPVCBoundCompletion()
def _Delete(self):
"""Deletes the PVC."""
body = self._BuildBody()
kubernetes_helper.DeleteResource(body)
def _BuildBody(self):
"""Builds JSON representing the PVC."""
body = {
'kind': 'PersistentVolumeClaim',
'apiVersion': 'v1',
'metadata': {
'name': self.name
},
'spec': {
'accessModes': ['ReadWriteOnce'],
'resources': {
'requests': {
'storage': '%sGi' % self.size
}
},
'storageClassName': self.storage_class,
}
}
return json.dumps(body)
class StorageClass(resource.BaseResource):
"""Object representing a K8s StorageClass (with dynamic provisioning)."""
def __init__(self, name, provisioner, parameters):
super(StorageClass, self).__init__()
self.name = name
self.provisioner = provisioner
self.parameters = parameters
def _CheckStorageClassExists(self):
"""Prevent duplicated StorageClass creation."""
# If the StorageClass with the same name and parameters exists
# :return: True or False
exists_cmd = [FLAGS.kubectl, '--kubeconfig=%s' % FLAGS.kubeconfig, 'get',
'sc', '-o=json', self.name]
sc_info, _, _ = vm_util.IssueCommand(exists_cmd, suppress_warning=True,
raise_on_failure=False)
if sc_info:
sc_info = json.loads(sc_info)
sc_name = sc_info['metadata']['name']
if sc_name == self.name:
logging.info('StorageClass already exists.')
return True
else:
logging.info('About to create new StorageClass: %s', self.name)
return False
def _Create(self):
"""Creates the StorageClass."""
body = self._BuildBody()
if not self._CheckStorageClassExists():
kubernetes_helper.CreateResource(body)
def _Delete(self):
"""Deletes the StorageClass."""
body = self._BuildBody()
kubernetes_helper.DeleteResource(body)
def _BuildBody(self):
"""Builds JSON representing the StorageClass."""
body = {
'kind': 'StorageClass',
'apiVersion': 'storage.k8s.io/v1',
'metadata': {
'name': self.name
},
'provisioner': self.provisioner,
'parameters': self.parameters
}
return json.dumps(body)
class PvcVolume(KubernetesDisk):
"""Volume representing a persistent volume claim."""
K8S_VOLUME_TYPE = 'persistentVolumeClaim'
PROVISIONER = None
def __init__(self, disk_num, spec, name):
super(PvcVolume, self).__init__(disk_num, spec, name)
self.storage_class = StorageClass(
name, self.PROVISIONER or spec.provisioner, spec.parameters)
self.pvc = PersistentVolumeClaim(
self.name, self.storage_class.name, spec.disk_size)
def _Create(self):
self.storage_class.Create()
self.pvc.Create()
def _Delete(self):
self.pvc.Delete()
self.storage_class.Delete()
def AttachVolumeInfo(self, volumes):
pvc_volume = {
'name': self.name,
'persistentVolumeClaim': {
'claimName': self.name
}
}
volumes.append(pvc_volume)
|
import lakeside
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP,
ATTR_HS_COLOR,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_COLOR_TEMP,
LightEntity,
)
import homeassistant.util.color as color_util
from homeassistant.util.color import (
color_temperature_kelvin_to_mired as kelvin_to_mired,
color_temperature_mired_to_kelvin as mired_to_kelvin,
)
EUFY_MAX_KELVIN = 6500
EUFY_MIN_KELVIN = 2700
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up Eufy bulbs."""
if discovery_info is None:
return
add_entities([EufyLight(discovery_info)], True)
class EufyLight(LightEntity):
"""Representation of a Eufy light."""
def __init__(self, device):
"""Initialize the light."""
self._temp = None
self._brightness = None
self._hs = None
self._state = None
self._name = device["name"]
self._address = device["address"]
self._code = device["code"]
self._type = device["type"]
self._bulb = lakeside.bulb(self._address, self._code, self._type)
self._colormode = False
if self._type == "T1011":
self._features = SUPPORT_BRIGHTNESS
elif self._type == "T1012":
self._features = SUPPORT_BRIGHTNESS | SUPPORT_COLOR_TEMP
elif self._type == "T1013":
self._features = SUPPORT_BRIGHTNESS | SUPPORT_COLOR_TEMP | SUPPORT_COLOR
self._bulb.connect()
def update(self):
"""Synchronise state from the bulb."""
self._bulb.update()
if self._bulb.power:
self._brightness = self._bulb.brightness
self._temp = self._bulb.temperature
if self._bulb.colors:
self._colormode = True
self._hs = color_util.color_RGB_to_hs(*self._bulb.colors)
else:
self._colormode = False
self._state = self._bulb.power
@property
def unique_id(self):
"""Return the ID of this light."""
return self._address
@property
def name(self):
"""Return the name of the device if any."""
return self._name
@property
def is_on(self):
"""Return true if device is on."""
return self._state
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return int(self._brightness * 255 / 100)
@property
def min_mireds(self):
"""Return minimum supported color temperature."""
return kelvin_to_mired(EUFY_MAX_KELVIN)
@property
def max_mireds(self):
"""Return maximu supported color temperature."""
return kelvin_to_mired(EUFY_MIN_KELVIN)
@property
def color_temp(self):
"""Return the color temperature of this light."""
temp_in_k = int(
EUFY_MIN_KELVIN + (self._temp * (EUFY_MAX_KELVIN - EUFY_MIN_KELVIN) / 100)
)
return kelvin_to_mired(temp_in_k)
@property
def hs_color(self):
"""Return the color of this light."""
if not self._colormode:
return None
return self._hs
@property
def supported_features(self):
"""Flag supported features."""
return self._features
def turn_on(self, **kwargs):
"""Turn the specified light on."""
brightness = kwargs.get(ATTR_BRIGHTNESS)
colortemp = kwargs.get(ATTR_COLOR_TEMP)
# pylint: disable=invalid-name
hs = kwargs.get(ATTR_HS_COLOR)
if brightness is not None:
brightness = int(brightness * 100 / 255)
else:
if self._brightness is None:
self._brightness = 100
brightness = self._brightness
if colortemp is not None:
self._colormode = False
temp_in_k = mired_to_kelvin(colortemp)
relative_temp = temp_in_k - EUFY_MIN_KELVIN
temp = int(relative_temp * 100 / (EUFY_MAX_KELVIN - EUFY_MIN_KELVIN))
else:
temp = None
if hs is not None:
rgb = color_util.color_hsv_to_RGB(hs[0], hs[1], brightness / 255 * 100)
self._colormode = True
elif self._colormode:
rgb = color_util.color_hsv_to_RGB(
self._hs[0], self._hs[1], brightness / 255 * 100
)
else:
rgb = None
try:
self._bulb.set_state(
power=True, brightness=brightness, temperature=temp, colors=rgb
)
except BrokenPipeError:
self._bulb.connect()
self._bulb.set_state(
power=True, brightness=brightness, temperature=temp, colors=rgb
)
def turn_off(self, **kwargs):
"""Turn the specified light off."""
try:
self._bulb.set_state(power=False)
except BrokenPipeError:
self._bulb.connect()
self._bulb.set_state(power=False)
|
import os
import os.path
import sys
import argparse
import subprocess
import tempfile
import attr
sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.pardir,
os.pardir))
from scripts import utils
@attr.s
class Line:
"""A line in "coredumpctl list"."""
time = attr.ib()
pid = attr.ib()
uid = attr.ib()
gid = attr.ib()
sig = attr.ib()
present = attr.ib()
exe = attr.ib()
def _convert_present(data):
"""Convert " "/"*" to True/False for parse_coredumpctl_line."""
if data == '*':
return True
elif data == ' ':
return False
else:
raise ValueError(data)
def parse_coredumpctl_line(line):
"""Parse a given string coming from coredumpctl and return a Line object.
Example input:
Mon 2015-09-28 23:22:24 CEST 10606 1000 1000 11 /usr/bin/python3.4
"""
fields = {
'time': (0, 28, str),
'pid': (29, 35, int),
'uid': (36, 41, int),
'gid': (42, 47, int),
'sig': (48, 51, int),
'present': (52, 53, _convert_present),
'exe': (54, None, str),
}
data = {}
for name, (start, end, converter) in fields.items():
data[name] = converter(line[start:end])
return Line(**data)
def get_info(pid):
"""Get and parse "coredumpctl info" output for the given PID."""
data = {}
output = subprocess.run(['coredumpctl', 'info', str(pid)], check=True,
stdout=subprocess.PIPE).stdout
output = output.decode('utf-8')
for line in output.split('\n'):
if not line.strip():
continue
try:
key, value = line.split(':', maxsplit=1)
except ValueError:
# systemd stack output
continue
data[key.strip()] = value.strip()
return data
def is_qutebrowser_dump(parsed):
"""Check if the given Line is a qutebrowser dump."""
basename = os.path.basename(parsed.exe)
if basename == 'python' or basename.startswith('python3'):
info = get_info(parsed.pid)
try:
cmdline = info['Command Line']
except KeyError:
return True
else:
return '-m qutebrowser' in cmdline
else:
return basename == 'qutebrowser'
def dump_infos_gdb(parsed):
"""Dump all needed infos for the given crash using gdb."""
with tempfile.TemporaryDirectory() as tempdir:
coredump = os.path.join(tempdir, 'dump')
subprocess.run(['coredumpctl', 'dump', '-o', coredump,
str(parsed.pid)], check=True)
subprocess.run(['gdb', parsed.exe, coredump,
'-ex', 'info threads',
'-ex', 'thread apply all bt full',
'-ex', 'quit'], check=True)
def dump_infos(parsed):
"""Dump all possible infos for the given crash."""
if not parsed.present:
info = get_info(parsed.pid)
print("{}: Signal {} with no coredump: {}".format(
parsed.time, info.get('Signal', None),
info.get('Command Line', None)))
else:
print('\n\n\n')
utils.print_title('{} - {}'.format(parsed.time, parsed.pid))
sys.stdout.flush()
dump_infos_gdb(parsed)
def check_prerequisites():
"""Check if coredumpctl/gdb are installed."""
for binary in ['coredumpctl', 'gdb']:
try:
subprocess.run([binary, '--version'], check=True)
except FileNotFoundError:
print("{} is needed to run this script!".format(binary),
file=sys.stderr)
sys.exit(1)
def main():
check_prerequisites()
parser = argparse.ArgumentParser()
parser.add_argument('--all', help="Also list crashes without coredumps.",
action='store_true')
args = parser.parse_args()
coredumps = subprocess.run(['coredumpctl', 'list'], check=True,
stdout=subprocess.PIPE).stdout
lines = coredumps.decode('utf-8').split('\n')
for line in lines[1:]:
if not line.strip():
continue
parsed = parse_coredumpctl_line(line)
if not parsed.present and not args.all:
continue
if is_qutebrowser_dump(parsed):
dump_infos(parsed)
if __name__ == '__main__':
main()
|
import os
from gi.module import get_introspection_module
from gi.repository import Gdk, GLib, GObject, Pango
from meld.style import colour_lookup_with_fallback
from meld.treehelpers import SearchableTreeStore
from meld.vc._vc import ( # noqa: F401
CONFLICT_BASE,
CONFLICT_LOCAL,
CONFLICT_MERGED,
CONFLICT_OTHER,
CONFLICT_REMOTE,
CONFLICT_THIS,
STATE_CONFLICT,
STATE_EMPTY,
STATE_ERROR,
STATE_IGNORED,
STATE_MAX,
STATE_MISSING,
STATE_MODIFIED,
STATE_NEW,
STATE_NOCHANGE,
STATE_NONE,
STATE_NONEXIST,
STATE_NORMAL,
STATE_REMOVED,
)
_GIGtk = None
try:
_GIGtk = get_introspection_module('Gtk')
except Exception:
pass
COL_PATH, COL_STATE, COL_TEXT, COL_ICON, COL_TINT, COL_FG, COL_STYLE, \
COL_WEIGHT, COL_STRIKE, COL_END = list(range(10))
COL_TYPES = (str, str, str, str, Gdk.RGBA, Gdk.RGBA, Pango.Style,
Pango.Weight, bool)
class DiffTreeStore(SearchableTreeStore):
def __init__(self, ntree, types):
full_types = []
for col_type in (COL_TYPES + tuple(types)):
full_types.extend([col_type] * ntree)
super().__init__(*full_types)
self._none_of_cols = {
col_num: GObject.Value(col_type, None)
for col_num, col_type in enumerate(full_types)
}
self.ntree = ntree
self._setup_default_styles()
def on_style_updated(self, widget):
style = widget.get_style_context()
self._setup_default_styles(style)
def _setup_default_styles(self, style=None):
roman, italic = Pango.Style.NORMAL, Pango.Style.ITALIC
normal, bold = Pango.Weight.NORMAL, Pango.Weight.BOLD
lookup = colour_lookup_with_fallback
unk_fg = lookup("meld:unknown-text", "foreground")
new_fg = lookup("meld:insert", "foreground")
mod_fg = lookup("meld:replace", "foreground")
del_fg = lookup("meld:delete", "foreground")
err_fg = lookup("meld:error", "foreground")
con_fg = lookup("meld:conflict", "foreground")
self.text_attributes = [
# foreground, style, weight, strikethrough
(unk_fg, roman, normal, None), # STATE_IGNORED
(unk_fg, roman, normal, None), # STATE_NONE
(None, roman, normal, None), # STATE_NORMAL
(None, italic, normal, None), # STATE_NOCHANGE
(err_fg, roman, bold, None), # STATE_ERROR
(unk_fg, italic, normal, None), # STATE_EMPTY
(new_fg, roman, bold, None), # STATE_NEW
(mod_fg, roman, bold, None), # STATE_MODIFIED
(mod_fg, roman, normal, None), # STATE_RENAMED
(con_fg, roman, bold, None), # STATE_CONFLICT
(del_fg, roman, bold, True), # STATE_REMOVED
(del_fg, roman, bold, True), # STATE_MISSING
(unk_fg, roman, normal, True), # STATE_NONEXIST
]
self.icon_details = [
# file-icon, folder-icon, file-tint, folder-tint
("text-x-generic", "folder", None, None), # IGNORED
("text-x-generic", "folder", None, None), # NONE
("text-x-generic", "folder", None, None), # NORMAL
("text-x-generic", "folder", None, None), # NOCHANGE
("dialog-warning", None, None, None), # ERROR
(None, None, None, None), # EMPTY
("text-x-generic", "folder", new_fg, None), # NEW
("text-x-generic", "folder", mod_fg, None), # MODIFIED
("text-x-generic", "folder", mod_fg, None), # RENAMED
("text-x-generic", "folder", con_fg, None), # CONFLICT
("text-x-generic", "folder", del_fg, None), # REMOVED
("text-x-generic", "folder", unk_fg, unk_fg), # MISSING
("text-x-generic", "folder", unk_fg, unk_fg), # NONEXIST
]
assert len(self.icon_details) == len(self.text_attributes) == STATE_MAX
def value_paths(self, it):
return [self.value_path(it, i) for i in range(self.ntree)]
def value_path(self, it, pane):
return self.get_value(it, self.column_index(COL_PATH, pane))
def is_folder(self, it, pane, path):
# A folder may no longer exist, and is only tracked by VC.
# Therefore, check the icon instead, as the pane already knows.
icon = self.get_value(it, self.column_index(COL_ICON, pane))
return icon == "folder" or (bool(path) and os.path.isdir(path))
def column_index(self, col, pane):
return self.ntree * col + pane
def add_entries(self, parent, names):
it = self.append(parent)
for pane, path in enumerate(names):
self.unsafe_set(it, pane, {COL_PATH: path})
return it
def add_empty(self, parent, text="empty folder"):
it = self.append(parent)
for pane in range(self.ntree):
self.set_state(it, pane, STATE_EMPTY, text)
return it
def add_error(self, parent, msg, pane, defaults={}):
it = self.append(parent)
key_values = {COL_STATE: str(STATE_ERROR)}
key_values.update(defaults)
for i in range(self.ntree):
self.unsafe_set(it, i, key_values)
self.set_state(it, pane, STATE_ERROR, msg)
def set_path_state(self, it, pane, state, isdir=0, display_text=None):
if not display_text:
fullname = self.get_value(it, self.column_index(COL_PATH, pane))
display_text = GLib.markup_escape_text(os.path.basename(fullname))
self.set_state(it, pane, state, display_text, isdir)
def set_state(self, it, pane, state, label, isdir=0):
icon = self.icon_details[state][1 if isdir else 0]
tint = self.icon_details[state][3 if isdir else 2]
fg, style, weight, strike = self.text_attributes[state]
self.unsafe_set(it, pane, {
COL_STATE: str(state),
COL_TEXT: label,
COL_ICON: icon,
COL_TINT: tint,
COL_FG: fg,
COL_STYLE: style,
COL_WEIGHT: weight,
COL_STRIKE: strike
})
def get_state(self, it, pane):
state_idx = self.column_index(COL_STATE, pane)
try:
return int(self.get_value(it, state_idx))
except TypeError:
return None
def _find_next_prev_diff(self, start_path):
def match_func(it):
# TODO: It works, but matching on the first pane only is very poor
return self.get_state(it, 0) not in (
STATE_NORMAL, STATE_NOCHANGE, STATE_EMPTY)
return self.get_previous_next_paths(start_path, match_func)
def state_rows(self, states):
"""Generator of rows in one of the given states
Tree iterators are returned in depth-first tree order.
"""
root = self.get_iter_first()
for it in self.inorder_search_down(root):
state = self.get_state(it, 0)
if state in states:
yield it
def unsafe_set(self, treeiter, pane, keys_values):
""" This must be fastest than super.set,
at the cost that may crash the application if you don't
know what your're passing here.
ie: pass treeiter or column as None crash meld
treeiter: Gtk.TreeIter
keys_values: dict<column, value>
column: Int col index
value: Str (UTF-8), Int, Float, Double, Boolean, None or GObject
return None
"""
safe_keys_values = {
self.column_index(col, pane):
val if val is not None
else self._none_of_cols.get(self.column_index(col, pane))
for col, val in keys_values.items()
}
if _GIGtk and treeiter:
columns = [col for col in safe_keys_values.keys()]
values = [val for val in safe_keys_values.values()]
_GIGtk.TreeStore.set(self, treeiter, columns, values)
else:
self.set(treeiter, safe_keys_values)
class TreeviewCommon:
def on_treeview_popup_menu(self, treeview):
cursor_path, cursor_col = treeview.get_cursor()
if not cursor_path:
self.popup_menu.popup_at_pointer(None)
return True
# We always want to pop up to the right of the first column,
# ignoring the actual cursor column location.
rect = treeview.get_background_area(
cursor_path, treeview.get_column(0))
self.popup_menu.popup_at_rect(
treeview.get_bin_window(),
rect,
Gdk.Gravity.SOUTH_EAST,
Gdk.Gravity.NORTH_WEST,
None,
)
return True
def on_treeview_button_press_event(self, treeview, event):
# If we have multiple treeviews, unselect clear other tree selections
num_panes = getattr(self, 'num_panes', 1)
if num_panes > 1:
for t in self.treeview[:self.num_panes]:
if t != treeview:
t.get_selection().unselect_all()
if (event.triggers_context_menu() and
event.type == Gdk.EventType.BUTTON_PRESS):
treeview.grab_focus()
path = treeview.get_path_at_pos(int(event.x), int(event.y))
if path is None:
return False
selection = treeview.get_selection()
model, rows = selection.get_selected_rows()
if path[0] not in rows:
selection.unselect_all()
selection.select_path(path[0])
treeview.set_cursor(path[0])
self.popup_menu.popup_at_pointer(event)
return True
return False
def treeview_search_cb(model, column, key, it, data):
# If the key contains a path separator, search the whole path,
# otherwise just use the filename. If the key is all lower-case, do a
# case-insensitive match.
abs_search = '/' in key
lower_key = key.islower()
for path in model.value_paths(it):
if not path:
continue
text = path if abs_search else os.path.basename(path)
text = text.lower() if lower_key else text
if key in text:
return False
return True
|
from ..core import driver
from ..core import exceptions
from nose import tools
class Query(object):
def __init__(self, scheme=None):
self.scheme = scheme
def testDriverIsAvailable(self):
drvs = driver.available()
assert self.scheme in drvs
def testFetchingDriver(self):
resultdriver = driver.fetch(self.scheme)
# XXX hacking is sick
storage = __import__('docker_registry.drivers.%s' % self.scheme,
globals(), locals(), ['Storage'], 0) # noqa
assert resultdriver == storage.Storage
assert issubclass(resultdriver, driver.Base)
assert resultdriver.scheme == self.scheme
@tools.raises(exceptions.NotImplementedError)
def testFetchingNonExistentDriver(self):
driver.fetch("nonexistentstupidlynameddriver")
|
import asyncio
from typing import Any, Dict, Iterable, Optional
from homeassistant.const import ATTR_TEMPERATURE
from homeassistant.core import Context, State
from homeassistant.helpers.typing import HomeAssistantType
from .const import (
ATTR_AUX_HEAT,
ATTR_HUMIDITY,
ATTR_HVAC_MODE,
ATTR_PRESET_MODE,
ATTR_SWING_MODE,
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
DOMAIN,
HVAC_MODES,
SERVICE_SET_AUX_HEAT,
SERVICE_SET_HUMIDITY,
SERVICE_SET_HVAC_MODE,
SERVICE_SET_PRESET_MODE,
SERVICE_SET_SWING_MODE,
SERVICE_SET_TEMPERATURE,
)
async def _async_reproduce_states(
hass: HomeAssistantType,
state: State,
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Reproduce component states."""
async def call_service(service: str, keys: Iterable, data=None):
"""Call service with set of attributes given."""
data = data or {}
data["entity_id"] = state.entity_id
for key in keys:
if key in state.attributes:
data[key] = state.attributes[key]
await hass.services.async_call(
DOMAIN, service, data, blocking=True, context=context
)
if state.state in HVAC_MODES:
await call_service(SERVICE_SET_HVAC_MODE, [], {ATTR_HVAC_MODE: state.state})
if ATTR_AUX_HEAT in state.attributes:
await call_service(SERVICE_SET_AUX_HEAT, [ATTR_AUX_HEAT])
if (
(ATTR_TEMPERATURE in state.attributes)
or (ATTR_TARGET_TEMP_HIGH in state.attributes)
or (ATTR_TARGET_TEMP_LOW in state.attributes)
):
await call_service(
SERVICE_SET_TEMPERATURE,
[ATTR_TEMPERATURE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW],
)
if ATTR_PRESET_MODE in state.attributes:
await call_service(SERVICE_SET_PRESET_MODE, [ATTR_PRESET_MODE])
if ATTR_SWING_MODE in state.attributes:
await call_service(SERVICE_SET_SWING_MODE, [ATTR_SWING_MODE])
if ATTR_HUMIDITY in state.attributes:
await call_service(SERVICE_SET_HUMIDITY, [ATTR_HUMIDITY])
async def async_reproduce_states(
hass: HomeAssistantType,
states: Iterable[State],
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Reproduce component states."""
await asyncio.gather(
*(
_async_reproduce_states(
hass, state, context=context, reproduce_options=reproduce_options
)
for state in states
)
)
|
import socket
from threading import Thread, Event
import pytest
from queue import Queue
from yandextank.common.util import FileScanner, FileMultiReader
from yandextank.common.util import AddressWizard
from netort.data_processing import Drain, Chopper
class TestDrain(object):
def test_run(self):
"""
Test drain's run function (in a same thread)
"""
source = range(5)
destination = Queue()
drain = Drain(source, destination)
drain.run()
assert destination.qsize() == 5
def test_interrupt(self):
"""
Test we can interrupt the drain
"""
source = range(1000000)
destination = Queue()
drain = Drain(source, destination)
drain.start()
drain.close()
assert destination.qsize() < 1000000
def test_interrupt_and_wait(self):
"""
Test we can interrupt the drain
"""
source = range(1000000)
destination = Queue()
drain = Drain(source, destination)
drain.start()
drain.join()
assert destination.qsize() == 1000000
class TestChopper(object):
def test_output(self):
source = (range(i) for i in range(5))
expected = [0, 0, 1, 0, 1, 2, 0, 1, 2, 3]
assert list(Chopper(source)) == expected
class TestFileScanner(object):
@staticmethod
def __process_chunks(chunks, sep="\n"):
reader = FileScanner("somefile.txt", sep=sep)
result = []
for chunk in chunks:
result.extend(reader._read_lines(chunk))
return result
def test_empty(self):
assert self.__process_chunks([""]) == []
def test_simple(self):
assert self.__process_chunks(["aaa\n", "bbb\n", "ccc\n"]) == ["aaa", "bbb", "ccc"]
def test_split(self):
assert self.__process_chunks(["aaa\nbbb\n", "ccc\n"]) == ["aaa", "bbb", "ccc"]
def test_join(self):
assert self.__process_chunks(["aaa", "bbb\n", "ccc\n"]) == ["aaabbb", "ccc"]
def test_no_first_separator(self):
assert self.__process_chunks(["aaa"]) == []
def test_no_last_separator(self):
assert self.__process_chunks(["aaa\n", "bbb\n", "ccc"]) == ["aaa", "bbb"]
def test_use_custom_separator(self):
assert self.__process_chunks(["aaa:bbb:ccc:"], ":") == ["aaa", "bbb", "ccc"]
class TestAddressResolver(object):
@staticmethod
def __resolve(chunk):
aw = AddressWizard()
# return format: is_v6, parsed_ip, int(port), address_str
return aw.resolve(chunk)
def __resolve_hostname_and_test(self, address_str, test_hostname, test_port):
passed = False
try:
resolved = socket.getaddrinfo(test_hostname, test_port)
except Exception:
# skip this check if resolver not available
return True
try:
for i in resolved:
if i[4][1] == self.__resolve(address_str)[2] and i[4][0] == self.__resolve(address_str)[1]:
passed = True
except IndexError:
pass
assert passed
# ipv6
def test_ipv6(self):
assert self.__resolve('2a02:6b8::2:242') == (True, '2a02:6b8::2:242', 80, '2a02:6b8::2:242')
def test_ipv6_braces_port(self):
assert self.__resolve('[2a02:6b8::2:242]:666') == (True, '2a02:6b8::2:242', 666, '2a02:6b8::2:242')
def test_ipv6_braces_port_spaces(self):
assert self.__resolve('[ 2a02:6b8::2:242 ]: 666') == (True, '2a02:6b8::2:242', 666, '2a02:6b8::2:242')
def test_ipv4(self):
assert self.__resolve('87.250.250.242') == (False, '87.250.250.242', 80, '87.250.250.242')
def test_ipv4_port(self):
assert self.__resolve('87.250.250.242:666') == (False, '87.250.250.242', 666, '87.250.250.242')
def test_ipv4_braces_port(self):
assert self.__resolve('[87.250.250.242]:666') == (False, '87.250.250.242', 666, '87.250.250.242')
# hostname
def test_hostname_port(self):
self.__resolve_hostname_and_test('ya.ru:666', 'ya.ru', '666')
def test_hostname_braces(self):
self.__resolve_hostname_and_test('[ya.ru]', 'ya.ru', '80')
def test_hostname_braces_port(self):
self.__resolve_hostname_and_test('[ya.ru]:666', 'ya.ru', '666')
class TestFileMultiReader(object):
filename = 'yandextank/common/tests/ph.out'
@staticmethod
def mock_consumer(f, expected, step, errors):
for line in [expected[i: i + step] for i in range(0, len(expected), step)]:
res = f.read(step)
if line not in res:
errors.append("Expected: {}\nGot: {}".format(expected, res))
@staticmethod
def mock_complex_consumer(f, expected, n_steps, errors):
for n in range(n_steps):
f.read()
res = f.readline() + f.read(10)
if res != expected:
errors.append("Expected: {}\nGot: {}".format(expected, res))
def phout_multi_read(self):
with open(self.filename) as f:
exp = f.read()
errors = []
stop = Event()
mr = FileMultiReader(self.filename, stop)
threads = [Thread(target=self.mock_consumer,
args=(mr.get_file(i), exp, i, errors),
name='Thread-%d' % i) for i in [1000, 4000, 8000]]
[th.start() for th in threads]
stop.set()
[th.join() for th in threads]
mr.close()
return errors
def phout_multi_readline(self):
errors = []
stop = Event()
mr = FileMultiReader(self.filename, stop)
threads = [Thread(target=self.mock_complex_consumer,
args=(mr.get_file(i), exp, 10, errors),
name='Thread-%d' % i) for i, exp in
[(1000, '\n1543699431'),
(4000, '815\t0\t200\n1543699487'),
(8000, '10968\t3633\t16\t7283\t36\t7387\t1066\t328\t0\t405\n1543699534')]]
[th.start() for th in threads]
stop.set()
[th.join() for th in threads]
mr.close()
return errors
@pytest.mark.skip('no module in arcadia')
@pytest.mark.benchmark(min_rounds=10)
def test_read(self, benchmark):
errors = benchmark(self.phout_multi_read)
assert len(errors) == 0
@pytest.mark.skip('no module in arcadia')
@pytest.mark.benchmark(min_rounds=5)
def test_readline(self, benchmark):
errors = benchmark(self.phout_multi_readline)
assert len(errors) == 0
|
import asyncio
import functools as ft
import importlib
import json
import logging
import pathlib
import sys
from types import ModuleType
from typing import (
TYPE_CHECKING,
Any,
Callable,
Dict,
List,
Optional,
Set,
TypeVar,
Union,
cast,
)
from homeassistant.generated.mqtt import MQTT
from homeassistant.generated.ssdp import SSDP
from homeassistant.generated.zeroconf import HOMEKIT, ZEROCONF
# Typing imports that create a circular dependency
if TYPE_CHECKING:
from homeassistant.core import HomeAssistant
CALLABLE_T = TypeVar("CALLABLE_T", bound=Callable) # pylint: disable=invalid-name
_LOGGER = logging.getLogger(__name__)
DATA_COMPONENTS = "components"
DATA_INTEGRATIONS = "integrations"
DATA_CUSTOM_COMPONENTS = "custom_components"
PACKAGE_CUSTOM_COMPONENTS = "custom_components"
PACKAGE_BUILTIN = "homeassistant.components"
CUSTOM_WARNING = (
"You are using a custom integration for %s which has not "
"been tested by Home Assistant. This component might "
"cause stability problems, be sure to disable it if you "
"experience issues with Home Assistant."
)
_UNDEF = object()
def manifest_from_legacy_module(domain: str, module: ModuleType) -> Dict:
"""Generate a manifest from a legacy module."""
return {
"domain": domain,
"name": domain,
"documentation": None,
"requirements": getattr(module, "REQUIREMENTS", []),
"dependencies": getattr(module, "DEPENDENCIES", []),
"codeowners": [],
}
async def _async_get_custom_components(
hass: "HomeAssistant",
) -> Dict[str, "Integration"]:
"""Return list of custom integrations."""
if hass.config.safe_mode:
return {}
try:
import custom_components # pylint: disable=import-outside-toplevel
except ImportError:
return {}
def get_sub_directories(paths: List[str]) -> List[pathlib.Path]:
"""Return all sub directories in a set of paths."""
return [
entry
for path in paths
for entry in pathlib.Path(path).iterdir()
if entry.is_dir()
]
dirs = await hass.async_add_executor_job(
get_sub_directories, custom_components.__path__
)
integrations = await asyncio.gather(
*(
hass.async_add_executor_job(
Integration.resolve_from_root, hass, custom_components, comp.name
)
for comp in dirs
)
)
return {
integration.domain: integration
for integration in integrations
if integration is not None
}
async def async_get_custom_components(
hass: "HomeAssistant",
) -> Dict[str, "Integration"]:
"""Return cached list of custom integrations."""
reg_or_evt = hass.data.get(DATA_CUSTOM_COMPONENTS)
if reg_or_evt is None:
evt = hass.data[DATA_CUSTOM_COMPONENTS] = asyncio.Event()
reg = await _async_get_custom_components(hass)
hass.data[DATA_CUSTOM_COMPONENTS] = reg
evt.set()
return reg
if isinstance(reg_or_evt, asyncio.Event):
await reg_or_evt.wait()
return cast(Dict[str, "Integration"], hass.data.get(DATA_CUSTOM_COMPONENTS))
return cast(Dict[str, "Integration"], reg_or_evt)
async def async_get_config_flows(hass: "HomeAssistant") -> Set[str]:
"""Return cached list of config flows."""
# pylint: disable=import-outside-toplevel
from homeassistant.generated.config_flows import FLOWS
flows: Set[str] = set()
flows.update(FLOWS)
integrations = await async_get_custom_components(hass)
flows.update(
[
integration.domain
for integration in integrations.values()
if integration.config_flow
]
)
return flows
async def async_get_zeroconf(hass: "HomeAssistant") -> Dict[str, List[Dict[str, str]]]:
"""Return cached list of zeroconf types."""
zeroconf: Dict[str, List[Dict[str, str]]] = ZEROCONF.copy()
integrations = await async_get_custom_components(hass)
for integration in integrations.values():
if not integration.zeroconf:
continue
for entry in integration.zeroconf:
data = {"domain": integration.domain}
if isinstance(entry, dict):
typ = entry["type"]
entry_without_type = entry.copy()
del entry_without_type["type"]
data.update(entry_without_type)
else:
typ = entry
zeroconf.setdefault(typ, []).append(data)
return zeroconf
async def async_get_homekit(hass: "HomeAssistant") -> Dict[str, str]:
"""Return cached list of homekit models."""
homekit: Dict[str, str] = HOMEKIT.copy()
integrations = await async_get_custom_components(hass)
for integration in integrations.values():
if (
not integration.homekit
or "models" not in integration.homekit
or not integration.homekit["models"]
):
continue
for model in integration.homekit["models"]:
homekit[model] = integration.domain
return homekit
async def async_get_ssdp(hass: "HomeAssistant") -> Dict[str, List]:
"""Return cached list of ssdp mappings."""
ssdp: Dict[str, List] = SSDP.copy()
integrations = await async_get_custom_components(hass)
for integration in integrations.values():
if not integration.ssdp:
continue
ssdp[integration.domain] = integration.ssdp
return ssdp
async def async_get_mqtt(hass: "HomeAssistant") -> Dict[str, List]:
"""Return cached list of MQTT mappings."""
mqtt: Dict[str, List] = MQTT.copy()
integrations = await async_get_custom_components(hass)
for integration in integrations.values():
if not integration.mqtt:
continue
mqtt[integration.domain] = integration.mqtt
return mqtt
class Integration:
"""An integration in Home Assistant."""
@classmethod
def resolve_from_root(
cls, hass: "HomeAssistant", root_module: ModuleType, domain: str
) -> "Optional[Integration]":
"""Resolve an integration from a root module."""
for base in root_module.__path__: # type: ignore
manifest_path = pathlib.Path(base) / domain / "manifest.json"
if not manifest_path.is_file():
continue
try:
manifest = json.loads(manifest_path.read_text())
except ValueError as err:
_LOGGER.error(
"Error parsing manifest.json file at %s: %s", manifest_path, err
)
continue
return cls(
hass, f"{root_module.__name__}.{domain}", manifest_path.parent, manifest
)
return None
@classmethod
def resolve_legacy(
cls, hass: "HomeAssistant", domain: str
) -> "Optional[Integration]":
"""Resolve legacy component.
Will create a stub manifest.
"""
comp = _load_file(hass, domain, _lookup_path(hass))
if comp is None:
return None
return cls(
hass,
comp.__name__,
pathlib.Path(comp.__file__).parent,
manifest_from_legacy_module(domain, comp),
)
def __init__(
self,
hass: "HomeAssistant",
pkg_path: str,
file_path: pathlib.Path,
manifest: Dict[str, Any],
):
"""Initialize an integration."""
self.hass = hass
self.pkg_path = pkg_path
self.file_path = file_path
self.manifest = manifest
manifest["is_built_in"] = self.is_built_in
if self.dependencies:
self._all_dependencies_resolved: Optional[bool] = None
self._all_dependencies: Optional[Set[str]] = None
else:
self._all_dependencies_resolved = True
self._all_dependencies = set()
_LOGGER.info("Loaded %s from %s", self.domain, pkg_path)
@property
def name(self) -> str:
"""Return name."""
return cast(str, self.manifest["name"])
@property
def disabled(self) -> Optional[str]:
"""Return reason integration is disabled."""
return cast(Optional[str], self.manifest.get("disabled"))
@property
def domain(self) -> str:
"""Return domain."""
return cast(str, self.manifest["domain"])
@property
def dependencies(self) -> List[str]:
"""Return dependencies."""
return cast(List[str], self.manifest.get("dependencies", []))
@property
def after_dependencies(self) -> List[str]:
"""Return after_dependencies."""
return cast(List[str], self.manifest.get("after_dependencies", []))
@property
def requirements(self) -> List[str]:
"""Return requirements."""
return cast(List[str], self.manifest.get("requirements", []))
@property
def config_flow(self) -> bool:
"""Return config_flow."""
return cast(bool, self.manifest.get("config_flow", False))
@property
def documentation(self) -> Optional[str]:
"""Return documentation."""
return cast(str, self.manifest.get("documentation"))
@property
def issue_tracker(self) -> Optional[str]:
"""Return issue tracker link."""
return cast(str, self.manifest.get("issue_tracker"))
@property
def quality_scale(self) -> Optional[str]:
"""Return Integration Quality Scale."""
return cast(str, self.manifest.get("quality_scale"))
@property
def mqtt(self) -> Optional[list]:
"""Return Integration MQTT entries."""
return cast(List[dict], self.manifest.get("mqtt"))
@property
def ssdp(self) -> Optional[list]:
"""Return Integration SSDP entries."""
return cast(List[dict], self.manifest.get("ssdp"))
@property
def zeroconf(self) -> Optional[list]:
"""Return Integration zeroconf entries."""
return cast(List[str], self.manifest.get("zeroconf"))
@property
def homekit(self) -> Optional[dict]:
"""Return Integration homekit entries."""
return cast(Dict[str, List], self.manifest.get("homekit"))
@property
def is_built_in(self) -> bool:
"""Test if package is a built-in integration."""
return self.pkg_path.startswith(PACKAGE_BUILTIN)
@property
def all_dependencies(self) -> Set[str]:
"""Return all dependencies including sub-dependencies."""
if self._all_dependencies is None:
raise RuntimeError("Dependencies not resolved!")
return self._all_dependencies
@property
def all_dependencies_resolved(self) -> bool:
"""Return if all dependencies have been resolved."""
return self._all_dependencies_resolved is not None
async def resolve_dependencies(self) -> bool:
"""Resolve all dependencies."""
if self._all_dependencies_resolved is not None:
return self._all_dependencies_resolved
try:
dependencies = await _async_component_dependencies(
self.hass, self.domain, self, set(), set()
)
dependencies.discard(self.domain)
self._all_dependencies = dependencies
self._all_dependencies_resolved = True
except IntegrationNotFound as err:
_LOGGER.error(
"Unable to resolve dependencies for %s: we are unable to resolve (sub)dependency %s",
self.domain,
err.domain,
)
self._all_dependencies_resolved = False
except CircularDependency as err:
_LOGGER.error(
"Unable to resolve dependencies for %s: it contains a circular dependency: %s -> %s",
self.domain,
err.from_domain,
err.to_domain,
)
self._all_dependencies_resolved = False
return self._all_dependencies_resolved
def get_component(self) -> ModuleType:
"""Return the component."""
cache = self.hass.data.setdefault(DATA_COMPONENTS, {})
if self.domain not in cache:
cache[self.domain] = importlib.import_module(self.pkg_path)
return cache[self.domain] # type: ignore
def get_platform(self, platform_name: str) -> ModuleType:
"""Return a platform for an integration."""
cache = self.hass.data.setdefault(DATA_COMPONENTS, {})
full_name = f"{self.domain}.{platform_name}"
if full_name not in cache:
cache[full_name] = self._import_platform(platform_name)
return cache[full_name] # type: ignore
def _import_platform(self, platform_name: str) -> ModuleType:
"""Import the platform."""
return importlib.import_module(f"{self.pkg_path}.{platform_name}")
def __repr__(self) -> str:
"""Text representation of class."""
return f"<Integration {self.domain}: {self.pkg_path}>"
async def async_get_integration(hass: "HomeAssistant", domain: str) -> Integration:
"""Get an integration."""
cache = hass.data.get(DATA_INTEGRATIONS)
if cache is None:
if not _async_mount_config_dir(hass):
raise IntegrationNotFound(domain)
cache = hass.data[DATA_INTEGRATIONS] = {}
int_or_evt: Union[Integration, asyncio.Event, None] = cache.get(domain, _UNDEF)
if isinstance(int_or_evt, asyncio.Event):
await int_or_evt.wait()
int_or_evt = cache.get(domain, _UNDEF)
# When we have waited and it's _UNDEF, it doesn't exist
# We don't cache that it doesn't exist, or else people can't fix it
# and then restart, because their config will never be valid.
if int_or_evt is _UNDEF:
raise IntegrationNotFound(domain)
if int_or_evt is not _UNDEF:
return cast(Integration, int_or_evt)
event = cache[domain] = asyncio.Event()
# Instead of using resolve_from_root we use the cache of custom
# components to find the integration.
integration = (await async_get_custom_components(hass)).get(domain)
if integration is not None:
_LOGGER.warning(CUSTOM_WARNING, domain)
cache[domain] = integration
event.set()
return integration
from homeassistant import components # pylint: disable=import-outside-toplevel
integration = await hass.async_add_executor_job(
Integration.resolve_from_root, hass, components, domain
)
if integration is not None:
cache[domain] = integration
event.set()
return integration
integration = Integration.resolve_legacy(hass, domain)
if integration is not None:
cache[domain] = integration
else:
# Remove event from cache.
cache.pop(domain)
event.set()
if not integration:
raise IntegrationNotFound(domain)
return integration
class LoaderError(Exception):
"""Loader base error."""
class IntegrationNotFound(LoaderError):
"""Raised when a component is not found."""
def __init__(self, domain: str) -> None:
"""Initialize a component not found error."""
super().__init__(f"Integration '{domain}' not found.")
self.domain = domain
class CircularDependency(LoaderError):
"""Raised when a circular dependency is found when resolving components."""
def __init__(self, from_domain: str, to_domain: str) -> None:
"""Initialize circular dependency error."""
super().__init__(f"Circular dependency detected: {from_domain} -> {to_domain}.")
self.from_domain = from_domain
self.to_domain = to_domain
def _load_file(
hass: "HomeAssistant", comp_or_platform: str, base_paths: List[str]
) -> Optional[ModuleType]:
"""Try to load specified file.
Looks in config dir first, then built-in components.
Only returns it if also found to be valid.
Async friendly.
"""
try:
return hass.data[DATA_COMPONENTS][comp_or_platform] # type: ignore
except KeyError:
pass
cache = hass.data.get(DATA_COMPONENTS)
if cache is None:
if not _async_mount_config_dir(hass):
return None
cache = hass.data[DATA_COMPONENTS] = {}
for path in (f"{base}.{comp_or_platform}" for base in base_paths):
try:
module = importlib.import_module(path)
# In Python 3 you can import files from directories that do not
# contain the file __init__.py. A directory is a valid module if
# it contains a file with the .py extension. In this case Python
# will succeed in importing the directory as a module and call it
# a namespace. We do not care about namespaces.
# This prevents that when only
# custom_components/switch/some_platform.py exists,
# the import custom_components.switch would succeed.
# __file__ was unset for namespaces before Python 3.7
if getattr(module, "__file__", None) is None:
continue
cache[comp_or_platform] = module
if module.__name__.startswith(PACKAGE_CUSTOM_COMPONENTS):
_LOGGER.warning(CUSTOM_WARNING, comp_or_platform)
return module
except ImportError as err:
# This error happens if for example custom_components/switch
# exists and we try to load switch.demo.
# Ignore errors for custom_components, custom_components.switch
# and custom_components.switch.demo.
white_listed_errors = []
parts = []
for part in path.split("."):
parts.append(part)
white_listed_errors.append(f"No module named '{'.'.join(parts)}'")
if str(err) not in white_listed_errors:
_LOGGER.exception(
("Error loading %s. Make sure all dependencies are installed"), path
)
return None
class ModuleWrapper:
"""Class to wrap a Python module and auto fill in hass argument."""
def __init__(self, hass: "HomeAssistant", module: ModuleType) -> None:
"""Initialize the module wrapper."""
self._hass = hass
self._module = module
def __getattr__(self, attr: str) -> Any:
"""Fetch an attribute."""
value = getattr(self._module, attr)
if hasattr(value, "__bind_hass"):
value = ft.partial(value, self._hass)
setattr(self, attr, value)
return value
class Components:
"""Helper to load components."""
def __init__(self, hass: "HomeAssistant") -> None:
"""Initialize the Components class."""
self._hass = hass
def __getattr__(self, comp_name: str) -> ModuleWrapper:
"""Fetch a component."""
# Test integration cache
integration = self._hass.data.get(DATA_INTEGRATIONS, {}).get(comp_name)
if isinstance(integration, Integration):
component: Optional[ModuleType] = integration.get_component()
else:
# Fallback to importing old-school
component = _load_file(self._hass, comp_name, _lookup_path(self._hass))
if component is None:
raise ImportError(f"Unable to load {comp_name}")
wrapped = ModuleWrapper(self._hass, component)
setattr(self, comp_name, wrapped)
return wrapped
class Helpers:
"""Helper to load helpers."""
def __init__(self, hass: "HomeAssistant") -> None:
"""Initialize the Helpers class."""
self._hass = hass
def __getattr__(self, helper_name: str) -> ModuleWrapper:
"""Fetch a helper."""
helper = importlib.import_module(f"homeassistant.helpers.{helper_name}")
wrapped = ModuleWrapper(self._hass, helper)
setattr(self, helper_name, wrapped)
return wrapped
def bind_hass(func: CALLABLE_T) -> CALLABLE_T:
"""Decorate function to indicate that first argument is hass."""
setattr(func, "__bind_hass", True)
return func
async def _async_component_dependencies(
hass: "HomeAssistant",
start_domain: str,
integration: Integration,
loaded: Set[str],
loading: Set[str],
) -> Set[str]:
"""Recursive function to get component dependencies.
Async friendly.
"""
domain = integration.domain
loading.add(domain)
for dependency_domain in integration.dependencies:
# Check not already loaded
if dependency_domain in loaded:
continue
# If we are already loading it, we have a circular dependency.
if dependency_domain in loading:
raise CircularDependency(domain, dependency_domain)
loaded.add(dependency_domain)
dep_integration = await async_get_integration(hass, dependency_domain)
if start_domain in dep_integration.after_dependencies:
raise CircularDependency(start_domain, dependency_domain)
if dep_integration.dependencies:
dep_loaded = await _async_component_dependencies(
hass, start_domain, dep_integration, loaded, loading
)
loaded.update(dep_loaded)
loaded.add(domain)
loading.remove(domain)
return loaded
def _async_mount_config_dir(hass: "HomeAssistant") -> bool:
"""Mount config dir in order to load custom_component.
Async friendly but not a coroutine.
"""
if hass.config.config_dir is None:
_LOGGER.error("Can't load integrations - configuration directory is not set")
return False
if hass.config.config_dir not in sys.path:
sys.path.insert(0, hass.config.config_dir)
return True
def _lookup_path(hass: "HomeAssistant") -> List[str]:
"""Return the lookup paths for legacy lookups."""
if hass.config.safe_mode:
return [PACKAGE_BUILTIN]
return [PACKAGE_CUSTOM_COMPONENTS, PACKAGE_BUILTIN]
|
import unittest
import os
import json
from unittest.mock import patch
import threading
from test.support import EnvironmentVarGuard
from urllib.parse import urlparse
from http.server import BaseHTTPRequestHandler, HTTPServer
from google.cloud import bigquery
from google.auth.exceptions import DefaultCredentialsError
from google.cloud.bigquery._http import Connection
from kaggle_gcp import KaggleKernelCredentials, PublicBigqueryClient, _DataProxyConnection, init_bigquery
import kaggle_secrets
class TestBigQuery(unittest.TestCase):
API_BASE_URL = "http://127.0.0.1:2121"
def _test_integration(self, client):
class HTTPHandler(BaseHTTPRequestHandler):
called = False
bearer_header_found = False
def do_HEAD(self):
self.send_response(200)
def do_GET(self):
HTTPHandler.called = True
HTTPHandler.bearer_header_found = any(
k for k in self.headers if k == "authorization" and self.headers[k] == "Bearer secret")
self.send_response(200)
self.send_header("Content-type", "application/json")
self.end_headers()
sample_dataset = {
"id": "bigqueryproject:datasetname",
"datasetReference": {
"datasetId": "datasetname",
"projectId": "bigqueryproject"
}
}
self.wfile.write(json.dumps({"kind": "bigquery#datasetList", "datasets": [sample_dataset]}).encode("utf-8"))
server_address = urlparse(self.API_BASE_URL)
with HTTPServer((server_address.hostname, server_address.port), HTTPHandler) as httpd:
threading.Thread(target=httpd.serve_forever).start()
for dataset in client.list_datasets():
self.assertEqual(dataset.dataset_id, "datasetname")
httpd.shutdown()
self.assertTrue(
HTTPHandler.called, msg="Fake server was not called from the BQ client, but should have been.")
self.assertTrue(
HTTPHandler.bearer_header_found, msg="authorization header was missing from the BQ request.")
def _setup_mocks(self, api_url_mock):
api_url_mock.__str__.return_value = self.API_BASE_URL
@patch.object(Connection, 'API_BASE_URL')
@patch.object(kaggle_secrets.UserSecretsClient, 'get_bigquery_access_token', return_value=('secret',1000))
def test_project_with_connected_account(self, mock_access_token, ApiUrlMock):
self._setup_mocks(ApiUrlMock)
env = EnvironmentVarGuard()
env.set('KAGGLE_USER_SECRETS_TOKEN', 'foobar')
with env:
client = bigquery.Client(
project='ANOTHER_PROJECT', credentials=KaggleKernelCredentials())
self._test_integration(client)
@patch.object(Connection, 'API_BASE_URL')
@patch.object(kaggle_secrets.UserSecretsClient, 'get_bigquery_access_token', return_value=('secret',1000))
def test_project_with_empty_integrations(self, mock_access_token, ApiUrlMock):
self._setup_mocks(ApiUrlMock)
env = EnvironmentVarGuard()
env.set('KAGGLE_USER_SECRETS_TOKEN', 'foobar')
env.set('KAGGLE_KERNEL_INTEGRATIONS', '')
with env:
client = bigquery.Client(
project='ANOTHER_PROJECT', credentials=KaggleKernelCredentials())
self._test_integration(client)
@patch.object(Connection, 'API_BASE_URL')
@patch.object(kaggle_secrets.UserSecretsClient, 'get_bigquery_access_token', return_value=('secret',1000))
def test_project_with_connected_account_unrelated_integrations(self, mock_access_token, ApiUrlMock):
self._setup_mocks(ApiUrlMock)
env = EnvironmentVarGuard()
env.set('KAGGLE_USER_SECRETS_TOKEN', 'foobar')
env.set('KAGGLE_KERNEL_INTEGRATIONS', 'GCS:ANOTHER_ONE')
with env:
client = bigquery.Client(
project='ANOTHER_PROJECT', credentials=KaggleKernelCredentials())
self._test_integration(client)
@patch.object(Connection, 'API_BASE_URL')
@patch.object(kaggle_secrets.UserSecretsClient, 'get_bigquery_access_token', return_value=('secret',1000))
def test_project_with_connected_account_default_credentials(self, mock_access_token, ApiUrlMock):
self._setup_mocks(ApiUrlMock)
env = EnvironmentVarGuard()
env.set('KAGGLE_USER_SECRETS_TOKEN', 'foobar')
env.set('KAGGLE_KERNEL_INTEGRATIONS', 'BIGQUERY')
with env:
client = bigquery.Client(project='ANOTHER_PROJECT')
self.assertTrue(client._connection.user_agent.startswith("kaggle-gcp-client/1.0"))
self._test_integration(client)
@patch.object(Connection, 'API_BASE_URL')
@patch.object(kaggle_secrets.UserSecretsClient, 'get_bigquery_access_token', return_value=('secret',1000))
def test_project_with_env_var_project_default_credentials(self, mock_access_token, ApiUrlMock):
self._setup_mocks(ApiUrlMock)
env = EnvironmentVarGuard()
env.set('KAGGLE_USER_SECRETS_TOKEN', 'foobar')
env.set('KAGGLE_KERNEL_INTEGRATIONS', 'BIGQUERY')
env.set('GOOGLE_CLOUD_PROJECT', 'ANOTHER_PROJECT')
with env:
client = bigquery.Client()
self._test_integration(client)
@patch.object(Connection, 'API_BASE_URL')
@patch.object(kaggle_secrets.UserSecretsClient, 'get_bigquery_access_token', return_value=('secret',1000))
def test_simultaneous_clients(self, mock_access_token, ApiUrlMock):
self._setup_mocks(ApiUrlMock)
env = EnvironmentVarGuard()
env.set('KAGGLE_USER_SECRETS_TOKEN', 'foobar')
with env:
proxy_client = bigquery.Client()
bq_client = bigquery.Client(
project='ANOTHER_PROJECT', credentials=KaggleKernelCredentials())
self._test_integration(bq_client)
# Verify that proxy client is still going to proxy to ensure global Connection
# isn't being modified.
self.assertNotEqual(type(proxy_client._connection), KaggleKernelCredentials)
self.assertEqual(type(proxy_client._connection), _DataProxyConnection)
def test_no_project_with_connected_account(self):
env = EnvironmentVarGuard()
env.set('KAGGLE_USER_SECRETS_TOKEN', 'foobar')
env.set('KAGGLE_KERNEL_INTEGRATIONS', 'BIGQUERY')
with env:
with self.assertRaises(DefaultCredentialsError):
# TODO(vimota): Handle this case, either default to Kaggle Proxy or use some default project
# by the user or throw a custom exception.
client = bigquery.Client()
self._test_integration(client)
def test_magics_with_connected_account_default_credentials(self):
env = EnvironmentVarGuard()
env.set('KAGGLE_USER_SECRETS_TOKEN', 'foobar')
env.set('KAGGLE_KERNEL_INTEGRATIONS', 'BIGQUERY')
with env:
init_bigquery()
from google.cloud.bigquery import magics
self.assertEqual(type(magics.context._credentials), KaggleKernelCredentials)
magics.context.credentials = None
def test_magics_without_connected_account(self):
env = EnvironmentVarGuard()
env.set('KAGGLE_USER_SECRETS_TOKEN', 'foobar')
with env:
init_bigquery()
from google.cloud.bigquery import magics
self.assertIsNone(magics.context._credentials)
|
import numpy as np
from ...utils import verbose
from ._utils import _fetch_one, _data_path, TEMAZEPAM_SLEEP_RECORDS
from ._utils import _check_subjects
data_path = _data_path # expose _data_path(..) as data_path(..)
BASE_URL = 'https://physionet.org/physiobank/database/sleep-edfx/sleep-telemetry/' # noqa: E501
@verbose
def fetch_data(subjects, recording=[b'Placebo', 'temazepam'],
path=None, force_update=False,
update_path=None, base_url=BASE_URL, verbose=None):
"""Get paths to local copies of PhysioNet Polysomnography dataset files.
This will fetch data from the publicly available subjects from PhysioNet's
study of Temazepam effects on sleep [1]_. This corresponds to
a set of 22 subjects. Subjects had mild difficulty falling asleep
but were otherwise healthy.
See more details in the `physionet website
<https://physionet.org/physiobank/database/sleep-edfx/>`_.
Parameters
----------
subjects : list of int
The subjects to use. Can be in the range of 0-21 (inclusive).
path : None | str
Location of where to look for the PhysioNet data storing location.
If None, the environment variable or config parameter
``MNE_DATASETS_PHYSIONET_SLEEP_PATH`` is used. If it doesn't exist,
the "~/mne_data" directory is used. If the Polysomnography dataset
is not found under the given path, the data
will be automatically downloaded to the specified folder.
force_update : bool
Force update of the dataset even if a local copy exists.
update_path : bool | None
If True, set the MNE_DATASETS_EEGBCI_PATH in mne-python
config to the given path. If None, the user is prompted.
%(verbose)s
Returns
-------
paths : list
List of local data paths of the given type.
Notes
-----
For example, one could do:
>>> from mne.datasets import sleep_physionet
>>> sleep_physionet.temazepam.fetch_data(subjects=[1]) # doctest: +SKIP
This would download data for subject 0 if it isn't there already.
References
----------
.. [1] B Kemp, AH Zwinderman, B Tuk, HAC Kamphuisen, JJL OberyΓ©. Analysis
of a sleep-dependent neuronal feedback loop: the slow-wave
microcontinuity of the EEG. IEEE-BME 47(9):1185-1194 (2000).
.. [2] Goldberger AL, Amaral LAN, Glass L, Hausdorff JM, Ivanov PCh,
Mark RG, Mietus JE, Moody GB, Peng C-K, Stanley HE. (2000)
PhysioBank, PhysioToolkit, and PhysioNet: Components of a New
Research Resource for Complex Physiologic Signals.
Circulation 101(23):e215-e220
See Also
--------
:func:`mne.datasets.sleep_physionet.age.fetch_data`
"""
records = np.loadtxt(TEMAZEPAM_SLEEP_RECORDS,
skiprows=1,
delimiter=',',
usecols=(0, 3, 6, 7, 8, 9),
dtype={'names': ('subject', 'record', 'hyp sha',
'psg sha', 'hyp fname', 'psg fname'),
'formats': ('<i2', '<S15', 'S40', 'S40',
'<S22', '<S16')}
)
_check_subjects(subjects, 22)
path = data_path(path=path, update_path=update_path)
params = [path, force_update, base_url]
fnames = []
for subject in subjects: # all the subjects are present at this point
for idx in np.where(records['subject'] == subject)[0]:
if records['record'][idx] == b'Placebo':
psg_fname = _fetch_one(records['psg fname'][idx].decode(),
records['psg sha'][idx].decode(),
*params)
hyp_fname = _fetch_one(records['hyp fname'][idx].decode(),
records['hyp sha'][idx].decode(),
*params)
fnames.append([psg_fname, hyp_fname])
return fnames
|
__all__ = ['default_environment', 'compile', 'interpret']
import ast
import os
import platform
import sys
import weakref
_builtin_compile = compile
try:
from platform import python_implementation
except ImportError:
if os.name == "java":
# Jython 2.5 has ast module, but not platform.python_implementation() function.
def python_implementation():
return "Jython"
else:
raise
# restricted set of variables
_VARS = {'sys.platform': sys.platform,
'python_version': '%s.%s' % sys.version_info[:2],
# FIXME parsing sys.platform is not reliable, but there is no other
# way to get e.g. 2.7.2+, and the PEP is defined with sys.version
'python_full_version': sys.version.split(' ', 1)[0],
'os.name': os.name,
'platform.version': platform.version(),
'platform.machine': platform.machine(),
'platform.python_implementation': python_implementation(),
'extra': None # wheel extension
}
for var in list(_VARS.keys()):
if '.' in var:
_VARS[var.replace('.', '_')] = _VARS[var]
def default_environment():
"""Return copy of default PEP 385 globals dictionary."""
return dict(_VARS)
class ASTWhitelist(ast.NodeTransformer):
def __init__(self, statement):
self.statement = statement # for error messages
ALLOWED = (ast.Compare, ast.BoolOp, ast.Attribute, ast.Name, ast.Load, ast.Str)
# Bool operations
ALLOWED += (ast.And, ast.Or)
# Comparison operations
ALLOWED += (ast.Eq, ast.Gt, ast.GtE, ast.In, ast.Is, ast.IsNot, ast.Lt, ast.LtE, ast.NotEq, ast.NotIn)
def visit(self, node):
"""Ensure statement only contains allowed nodes."""
if not isinstance(node, self.ALLOWED):
raise SyntaxError('Not allowed in environment markers.\n%s\n%s' %
(self.statement,
(' ' * node.col_offset) + '^'))
return ast.NodeTransformer.visit(self, node)
def visit_Attribute(self, node):
"""Flatten one level of attribute access."""
new_node = ast.Name("%s.%s" % (node.value.id, node.attr), node.ctx)
return ast.copy_location(new_node, node)
def parse_marker(marker):
tree = ast.parse(marker, mode='eval')
new_tree = ASTWhitelist(marker).generic_visit(tree)
return new_tree
def compile_marker(parsed_marker):
return _builtin_compile(parsed_marker, '<environment marker>', 'eval',
dont_inherit=True)
_cache = weakref.WeakValueDictionary()
def compile(marker):
"""Return compiled marker as a function accepting an environment dict."""
try:
return _cache[marker]
except KeyError:
pass
if not marker.strip():
def marker_fn(environment=None, override=None):
""""""
return True
else:
compiled_marker = compile_marker(parse_marker(marker))
def marker_fn(environment=None, override=None):
"""override updates environment"""
if override is None:
override = {}
if environment is None:
environment = default_environment()
environment.update(override)
return eval(compiled_marker, environment)
marker_fn.__doc__ = marker
_cache[marker] = marker_fn
return _cache[marker]
def interpret(marker, environment=None):
return compile(marker)(environment)
|
import json
from collections import Counter
from pathlib import Path
from random import choice, randrange
from typing import Callable, Dict, List, Mapping, Optional, Union
from typing import Counter as CounterType
from pytest import mark
from cerberus import rules_set_registry, schema_registry, TypeDefinition, Validator
from cerberus.benchmarks import DOCUMENTS_PATH
rules_set_registry.add("path_rules", {"coerce": Path, "type": "path"})
schema_registry.add(
"field_3_schema",
{
# an outer rule requires all fields' values to be a list
"field_31": {"contains": 0, "empty": False},
"field_32": {
"default": [None, None, None],
"items": [
{"type": "integer"},
{"type": "string"},
{"type": ["integer", "string"]},
],
"itemsrules": {"nullable": True},
},
},
)
def schema_1_field_3_allow_unknown_check_with(field, value, error):
if len(value) > 9:
error(field, "Requires a smaller list.")
schema_1 = {
"field_1": {
"type": "dict",
"required": True,
"allow_unknown": True,
"keysrules": {"regex": r"field_1[12345]"},
"minlength": 3,
"maxlength": 5,
"schema": {
"field_11": {
"type": "integer",
"allowed": list(range(100)),
"dependencies": {"field_12": 0, "^field_1.field_13": 0},
},
"field_12": {
"type": "integer",
"default_setter": lambda _: 1,
"forbidden": (1,),
},
"field_13": {"type": "integer"},
"field_14": {"rename": "field_13"},
},
},
"field_2": {
"type": Mapping,
"allow_unknown": False,
"schema": {
"field_21": {
"type": "integer",
"coerce": [str.strip, int],
"min": 9,
"max": 89,
"anyof": [{"dependencies": "field_22"}, {"dependencies": "field_23"}],
},
"field_22": {"excludes": "field_23", "nullable": True},
"field_23": {"nullable": True},
},
},
"field_3": {
"allow_unknown": {"check_with": schema_1_field_3_allow_unknown_check_with},
"valuesrules": {"type": "list"},
"require_all": True,
"schema": "field_3_schema",
},
"field_4": "path_rules",
}
def init_validator():
class TestValidator(Validator):
types_mapping = {
**Validator.types_mapping,
"path": TypeDefinition("path", (Path,), ()),
}
return TestValidator(schema_1, purge_unknown=True)
def load_documents():
with (DOCUMENTS_PATH / "overall_documents_1.json").open() as f:
documents = json.load(f)
return documents
def validate_documents(init_validator: Callable, documents: List[dict]) -> None:
doc_count = failed_count = 0
error_paths: CounterType[tuple] = Counter()
validator = init_validator()
def count_errors(errors):
if errors is None:
return
for error in errors:
if error.is_group_error:
count_errors(error.child_errors)
else:
error_paths[error.schema_path] += 1
for document in documents:
if validator.validated(document) is None:
failed_count += 1
count_errors(validator._errors)
doc_count += 1
print(
f"{failed_count} out of {doc_count} documents failed with "
f"{len(error_paths)} different error leafs."
)
print("Top 3 errors, excluding container errors:")
for path, count in error_paths.most_common(3):
print(f"{count}: {path}")
@mark.benchmark(group="overall-1")
def test_overall_performance_1(benchmark):
benchmark.pedantic(validate_documents, (init_validator, load_documents()), rounds=5)
#
def generate_sample_document_1() -> dict:
result = {}
for i in (1, 2, 3, 4, 5):
if randrange(100):
result[f"field_{i}"] = globals()[f"generate_document_1_field_{i}"]()
return result
def generate_document_1_field_1() -> dict:
result: Dict[str, Optional[int]] = {"field_11": randrange(100), "field_13": 0}
if randrange(100):
result["field_12"] = 0
if not randrange(100):
result["field_14"] = None
if randrange(100):
result["field_15"] = None
return result
def generate_document_1_field_2() -> dict:
x = "*" if not randrange(50) else " "
result: Dict[str, Union[int, str, None]] = {"field_21": x + str(randrange(100)) + x}
if randrange(100):
result["field_22"] = None
if "field_22" in result and not randrange(100):
result["field_23"] = None
return result
def generate_document_1_field_3() -> dict:
result: Dict[str, Optional[list]] = {}
if randrange(100):
result["field_31"] = [randrange(2) for _ in range(randrange(20))]
else:
result["field_31"] = None
if randrange(100):
result["field_32"] = [
choice((0, 0, 0, 0, 0, 0, 0, 0, "", None)),
choice(("", "", "", "", "", "", "", "", 0, None)),
choice((0, 0, 0, 0, "", "", "", "", None)),
]
if not randrange(10):
result["3_unknown"] = [0] * (randrange(10) + 1)
return result
def generate_document_1_field_4():
return "/foo/bar" if randrange(100) else 0
def generate_document_1_field_5():
return None
def write_sample_documents():
with (DOCUMENTS_PATH / "overall_documents_1.json").open("wt") as f:
json.dump([generate_sample_document_1() for _ in range(10_000)], f)
if __name__ == "__main__":
write_sample_documents()
|
import math
import numpy as np
from ..utils import logger, verbose, check_random_state, random_permutation
@verbose
def infomax(data, weights=None, l_rate=None, block=None, w_change=1e-12,
anneal_deg=60., anneal_step=0.9, extended=True, n_subgauss=1,
kurt_size=6000, ext_blocks=1, max_iter=200, random_state=None,
blowup=1e4, blowup_fac=0.5, n_small_angle=20, use_bias=True,
verbose=None, return_n_iter=False):
"""Run (extended) Infomax ICA decomposition on raw data.
Parameters
----------
data : np.ndarray, shape (n_samples, n_features)
The whitened data to unmix.
weights : np.ndarray, shape (n_features, n_features)
The initialized unmixing matrix.
Defaults to None, which means the identity matrix is used.
l_rate : float
This quantity indicates the relative size of the change in weights.
Defaults to ``0.01 / log(n_features ** 2)``.
.. note:: Smaller learning rates will slow down the ICA procedure.
block : int
The block size of randomly chosen data segments.
Defaults to floor(sqrt(n_times / 3.)).
w_change : float
The change at which to stop iteration. Defaults to 1e-12.
anneal_deg : float
The angle (in degrees) at which the learning rate will be reduced.
Defaults to 60.0.
anneal_step : float
The factor by which the learning rate will be reduced once
``anneal_deg`` is exceeded: ``l_rate *= anneal_step.``
Defaults to 0.9.
extended : bool
Whether to use the extended Infomax algorithm or not.
Defaults to True.
n_subgauss : int
The number of subgaussian components. Only considered for extended
Infomax. Defaults to 1.
kurt_size : int
The window size for kurtosis estimation. Only considered for extended
Infomax. Defaults to 6000.
ext_blocks : int
Only considered for extended Infomax. If positive, denotes the number
of blocks after which to recompute the kurtosis, which is used to
estimate the signs of the sources. In this case, the number of
sub-gaussian sources is automatically determined.
If negative, the number of sub-gaussian sources to be used is fixed
and equal to n_subgauss. In this case, the kurtosis is not estimated.
Defaults to 1.
max_iter : int
The maximum number of iterations. Defaults to 200.
%(random_state)s
blowup : float
The maximum difference allowed between two successive estimations of
the unmixing matrix. Defaults to 10000.
blowup_fac : float
The factor by which the learning rate will be reduced if the difference
between two successive estimations of the unmixing matrix exceededs
``blowup``: ``l_rate *= blowup_fac``. Defaults to 0.5.
n_small_angle : int | None
The maximum number of allowed steps in which the angle between two
successive estimations of the unmixing matrix is less than
``anneal_deg``. If None, this parameter is not taken into account to
stop the iterations. Defaults to 20.
use_bias : bool
This quantity indicates if the bias should be computed.
Defaults to True.
%(verbose)s
return_n_iter : bool
Whether to return the number of iterations performed. Defaults to
False.
Returns
-------
unmixing_matrix : np.ndarray, shape (n_features, n_features)
The linear unmixing operator.
n_iter : int
The number of iterations. Only returned if ``return_max_iter=True``.
References
----------
.. [1] A. J. Bell, T. J. Sejnowski. An information-maximization approach to
blind separation and blind deconvolution. Neural Computation, 7(6),
1129-1159, 1995.
.. [2] T. W. Lee, M. Girolami, T. J. Sejnowski. Independent component
analysis using an extended infomax algorithm for mixed subgaussian
and supergaussian sources. Neural Computation, 11(2), 417-441, 1999.
"""
from scipy.stats import kurtosis
rng = check_random_state(random_state)
# define some default parameters
max_weight = 1e8
restart_fac = 0.9
min_l_rate = 1e-10
degconst = 180.0 / np.pi
# for extended Infomax
extmomentum = 0.5
signsbias = 0.02
signcount_threshold = 25
signcount_step = 2
# check data shape
n_samples, n_features = data.shape
n_features_square = n_features ** 2
# check input parameters
# heuristic default - may need adjustment for large or tiny data sets
if l_rate is None:
l_rate = 0.01 / math.log(n_features ** 2.0)
if block is None:
block = int(math.floor(math.sqrt(n_samples / 3.0)))
logger.info('Computing%sInfomax ICA' % ' Extended ' if extended else ' ')
# collect parameters
nblock = n_samples // block
lastt = (nblock - 1) * block + 1
# initialize training
if weights is None:
weights = np.identity(n_features, dtype=np.float64)
else:
weights = weights.T
BI = block * np.identity(n_features, dtype=np.float64)
bias = np.zeros((n_features, 1), dtype=np.float64)
onesrow = np.ones((1, block), dtype=np.float64)
startweights = weights.copy()
oldweights = startweights.copy()
step = 0
count_small_angle = 0
wts_blowup = False
blockno = 0
signcount = 0
initial_ext_blocks = ext_blocks # save the initial value in case of reset
# for extended Infomax
if extended:
signs = np.ones(n_features)
for k in range(n_subgauss):
signs[k] = -1
kurt_size = min(kurt_size, n_samples)
old_kurt = np.zeros(n_features, dtype=np.float64)
oldsigns = np.zeros(n_features)
# trainings loop
olddelta, oldchange = 1., 0.
while step < max_iter:
# shuffle data at each step
permute = random_permutation(n_samples, rng)
# ICA training block
# loop across block samples
for t in range(0, lastt, block):
u = np.dot(data[permute[t:t + block], :], weights)
u += np.dot(bias, onesrow).T
if extended:
# extended ICA update
y = np.tanh(u)
weights += l_rate * np.dot(weights,
BI -
signs[None, :] * np.dot(u.T, y) -
np.dot(u.T, u))
if use_bias:
bias += l_rate * np.reshape(np.sum(y, axis=0,
dtype=np.float64) * -2.0,
(n_features, 1))
else:
# logistic ICA weights update
y = 1.0 / (1.0 + np.exp(-u))
weights += l_rate * np.dot(weights,
BI + np.dot(u.T, (1.0 - 2.0 * y)))
if use_bias:
bias += l_rate * np.reshape(np.sum((1.0 - 2.0 * y), axis=0,
dtype=np.float64),
(n_features, 1))
# check change limit
max_weight_val = np.max(np.abs(weights))
if max_weight_val > max_weight:
wts_blowup = True
blockno += 1
if wts_blowup:
break
# ICA kurtosis estimation
if extended:
if ext_blocks > 0 and blockno % ext_blocks == 0:
if kurt_size < n_samples:
rp = np.floor(rng.uniform(0, 1, kurt_size) *
(n_samples - 1))
tpartact = np.dot(data[rp.astype(int), :], weights).T
else:
tpartact = np.dot(data, weights).T
# estimate kurtosis
kurt = kurtosis(tpartact, axis=1, fisher=True)
if extmomentum != 0:
kurt = (extmomentum * old_kurt +
(1.0 - extmomentum) * kurt)
old_kurt = kurt
# estimate weighted signs
signs = np.sign(kurt + signsbias)
ndiff = (signs - oldsigns != 0).sum()
if ndiff == 0:
signcount += 1
else:
signcount = 0
oldsigns = signs
if signcount >= signcount_threshold:
ext_blocks = np.fix(ext_blocks * signcount_step)
signcount = 0
# here we continue after the for loop over the ICA training blocks
# if weights in bounds:
if not wts_blowup:
oldwtchange = weights - oldweights
step += 1
angledelta = 0.0
delta = oldwtchange.reshape(1, n_features_square)
change = np.sum(delta * delta, dtype=np.float64)
if step > 2:
angledelta = math.acos(np.sum(delta * olddelta) /
math.sqrt(change * oldchange))
angledelta *= degconst
if verbose:
logger.info(
'step %d - lrate %5f, wchange %8.8f, angledelta %4.1f deg'
% (step, l_rate, change, angledelta))
# anneal learning rate
oldweights = weights.copy()
if angledelta > anneal_deg:
l_rate *= anneal_step # anneal learning rate
# accumulate angledelta until anneal_deg reaches l_rate
olddelta = delta
oldchange = change
count_small_angle = 0 # reset count when angledelta is large
else:
if step == 1: # on first step only
olddelta = delta # initialize
oldchange = change
if n_small_angle is not None:
count_small_angle += 1
if count_small_angle > n_small_angle:
max_iter = step
# apply stopping rule
if step > 2 and change < w_change:
step = max_iter
elif change > blowup:
l_rate *= blowup_fac
# restart if weights blow up (for lowering l_rate)
else:
step = 0 # start again
wts_blowup = 0 # re-initialize variables
blockno = 1
l_rate *= restart_fac # with lower learning rate
weights = startweights.copy()
oldweights = startweights.copy()
olddelta = np.zeros((1, n_features_square), dtype=np.float64)
bias = np.zeros((n_features, 1), dtype=np.float64)
ext_blocks = initial_ext_blocks
# for extended Infomax
if extended:
signs = np.ones(n_features)
for k in range(n_subgauss):
signs[k] = -1
oldsigns = np.zeros(n_features)
if l_rate > min_l_rate:
if verbose:
logger.info('... lowering learning rate to %g'
'\n... re-starting...' % l_rate)
else:
raise ValueError('Error in Infomax ICA: unmixing_matrix matrix'
'might not be invertible!')
# prepare return values
if return_n_iter:
return weights.T, step
else:
return weights.T
|
import os.path as op
import numpy as np
import pytest
from numpy.testing import assert_allclose
from scipy.io import loadmat
from scipy import linalg
from mne.channels import make_dig_montage
from mne import create_info, EvokedArray, pick_types, Epochs
from mne.io import read_raw_fif, RawArray
from mne.io.constants import FIFF
from mne.utils import object_diff, run_tests_if_main
from mne.datasets import testing
from mne.preprocessing import compute_current_source_density
data_path = op.join(testing.data_path(download=False), 'preprocessing')
eeg_fname = op.join(data_path, 'test_eeg.mat')
coords_fname = op.join(data_path, 'test_eeg_pos.mat')
csd_fname = op.join(data_path, 'test_eeg_csd.mat')
io_path = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data')
raw_fname = op.join(io_path, 'test_raw.fif')
@pytest.fixture(scope='function', params=[testing._pytest_param()])
def evoked_csd_sphere():
"""Get the MATLAB EEG data."""
data = loadmat(eeg_fname)['data']
coords = loadmat(coords_fname)['coords'] * 1e-3
csd = loadmat(csd_fname)['csd']
sphere = np.array((0, 0, 0, 0.08500060886258405)) # meters
sfreq = 256 # sampling rate
# swap coordinates' shape
pos = np.rollaxis(coords, 1)
# swap coordinates' positions
pos[:, [0]], pos[:, [1]] = pos[:, [1]], pos[:, [0]]
# invert first coordinate
pos[:, [0]] *= -1
dists = np.linalg.norm(pos, axis=-1)
assert_allclose(dists, sphere[-1], rtol=1e-2) # close to spherical, meters
# assign channel names to coordinates
ch_names = [str(ii) for ii in range(len(pos))]
dig_ch_pos = dict(zip(ch_names, pos))
montage = make_dig_montage(ch_pos=dig_ch_pos, coord_frame='head')
# create info
info = create_info(ch_names=ch_names, sfreq=sfreq, ch_types='eeg')
# make Evoked object
evoked = EvokedArray(data=data, info=info, tmin=-1)
evoked.set_montage(montage)
return evoked, csd, sphere
def test_csd_matlab(evoked_csd_sphere):
"""Test replication of the CSD MATLAB toolbox."""
evoked, csd, sphere = evoked_csd_sphere
evoked_csd = compute_current_source_density(evoked, sphere=sphere)
assert_allclose(linalg.norm(csd), 0.00177, atol=1e-5)
# If we don't project onto the sphere, we get 1e-12 accuracy here,
# but it's a bad assumption for real data!
# Also, we divide by (radius ** 2) to get to units of V/mΒ², unclear
# why this isn't done in the upstream implementation
evoked_csd_data = evoked_csd.data * sphere[-1] ** 2
assert_allclose(evoked_csd_data, csd, atol=2e-7)
with pytest.raises(ValueError, match=('CSD already applied, '
'should not be reapplied')):
compute_current_source_density(evoked_csd, sphere=sphere)
# 1e-5 here if we don't project...
assert_allclose(evoked_csd_data.sum(), 0.02455, atol=2e-3)
def test_csd_degenerate(evoked_csd_sphere):
"""Test degenerate conditions."""
evoked, csd, sphere = evoked_csd_sphere
warn_evoked = evoked.copy()
warn_evoked.info['bads'].append(warn_evoked.ch_names[3])
with pytest.raises(ValueError, match='Either drop.*or interpolate'):
compute_current_source_density(warn_evoked)
with pytest.raises(TypeError, match='must be an instance of'):
compute_current_source_density(None)
fail_evoked = evoked.copy()
with pytest.raises(ValueError, match='Zero or infinite position'):
for ch in fail_evoked.info['chs']:
ch['loc'][:3] = np.array([0, 0, 0])
compute_current_source_density(fail_evoked, sphere=sphere)
with pytest.raises(ValueError, match='Zero or infinite position'):
fail_evoked.info['chs'][3]['loc'][:3] = np.inf
compute_current_source_density(fail_evoked, sphere=sphere)
with pytest.raises(ValueError, match='No EEG channels found.'):
fail_evoked = evoked.copy()
fail_evoked.set_channel_types({ch_name: 'ecog' for ch_name in
fail_evoked.ch_names})
compute_current_source_density(fail_evoked, sphere=sphere)
with pytest.raises(TypeError, match='lambda2'):
compute_current_source_density(evoked, lambda2='0', sphere=sphere)
with pytest.raises(ValueError, match='lambda2 must be between 0 and 1'):
compute_current_source_density(evoked, lambda2=2, sphere=sphere)
with pytest.raises(TypeError, match='stiffness must be'):
compute_current_source_density(evoked, stiffness='0', sphere=sphere)
with pytest.raises(ValueError, match='stiffness must be non-negative'):
compute_current_source_density(evoked, stiffness=-2, sphere=sphere)
with pytest.raises(TypeError, match='n_legendre_terms must be'):
compute_current_source_density(evoked, n_legendre_terms=0.1,
sphere=sphere)
with pytest.raises(ValueError, match=('n_legendre_terms must be '
'greater than 0')):
compute_current_source_density(evoked, n_legendre_terms=0,
sphere=sphere)
with pytest.raises(ValueError, match='sphere must be'):
compute_current_source_density(evoked, sphere=-0.1)
with pytest.raises(ValueError, match=('sphere radius must be '
'greater than 0')):
compute_current_source_density(evoked, sphere=(-0.1, 0., 0., -1.))
with pytest.raises(TypeError):
compute_current_source_density(evoked, copy=2, sphere=sphere)
# gh-7859
raw = RawArray(evoked.data, evoked.info)
epochs = Epochs(
raw, [[0, 0, 1]], tmin=0, tmax=evoked.times[-1] - evoked.times[0],
baseline=None, preload=False, proj=False)
epochs.drop_bad()
assert len(epochs) == 1
assert_allclose(epochs.get_data()[0], evoked.data)
with pytest.raises(RuntimeError, match='Computing CSD requires.*preload'):
compute_current_source_density(epochs)
epochs.load_data()
raw = compute_current_source_density(raw)
assert not np.allclose(raw.get_data(), evoked.data)
evoked = compute_current_source_density(evoked)
assert_allclose(raw.get_data(), evoked.data)
epochs = compute_current_source_density(epochs)
assert_allclose(epochs.get_data()[0], evoked.data)
def test_csd_fif():
"""Test applying CSD to FIF data."""
raw = read_raw_fif(raw_fname).load_data()
raw.info['bads'] = []
picks = pick_types(raw.info, meg=False, eeg=True)
assert 'csd' not in raw
orig_eeg = raw.get_data('eeg')
assert len(orig_eeg) == 60
raw_csd = compute_current_source_density(raw)
assert 'eeg' not in raw_csd
new_eeg = raw_csd.get_data('csd')
assert not (orig_eeg == new_eeg).any()
# reset the only things that should change, and assert objects are the same
assert raw_csd.info['custom_ref_applied'] == FIFF.FIFFV_MNE_CUSTOM_REF_CSD
raw_csd.info['custom_ref_applied'] = 0
for pick in picks:
ch = raw_csd.info['chs'][pick]
assert ch['coil_type'] == FIFF.FIFFV_COIL_EEG_CSD
assert ch['unit'] == FIFF.FIFF_UNIT_V_M2
ch.update(coil_type=FIFF.FIFFV_COIL_EEG, unit=FIFF.FIFF_UNIT_V)
raw_csd._data[pick] = raw._data[pick]
assert object_diff(raw.info, raw_csd.info) == ''
run_tests_if_main()
|
from dataclasses import dataclass
from typing import Any, List
@dataclass
class DeviceInfo:
"""Represent device information."""
manufacturer: str = None
model: str = None
fw_version: str = None
serial_number: str = None
mac: str = None
@dataclass
class Resolution:
"""Represent video resolution."""
width: int
height: int
@dataclass
class Video:
"""Represent video encoding settings."""
encoding: str
resolution: Resolution
@dataclass
class PTZ:
"""Represents PTZ configuration on a profile."""
continuous: bool
relative: bool
absolute: bool
presets: List[str] = None
@dataclass
class Profile:
"""Represent a ONVIF Profile."""
index: int
token: str
name: str
video: Video
ptz: PTZ = None
@dataclass
class Capabilities:
"""Represents Service capabilities."""
snapshot: bool = False
events: bool = False
ptz: bool = False
@dataclass
class Event:
"""Represents a ONVIF event."""
uid: str
name: str
platform: str
device_class: str = None
unit_of_measurement: str = None
value: Any = None
entity_enabled: bool = True
|
from unittest.mock import call
import pytest
from homeassistant.components.rfxtrx import DOMAIN
from homeassistant.core import State
from tests.common import MockConfigEntry, mock_restore_cache
from tests.components.rfxtrx.conftest import create_rfx_test_cfg
EVENT_RFY_ENABLE_SUN_AUTO = "081a00000301010113"
EVENT_RFY_DISABLE_SUN_AUTO = "081a00000301010114"
async def test_one_switch(hass, rfxtrx):
"""Test with 1 switch."""
entry_data = create_rfx_test_cfg(
devices={"0b1100cd0213c7f210010f51": {"signal_repetitions": 1}}
)
mock_entry = MockConfigEntry(domain="rfxtrx", unique_id=DOMAIN, data=entry_data)
mock_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_entry.entry_id)
await hass.async_block_till_done()
state = hass.states.get("switch.ac_213c7f2_16")
assert state
assert state.state == "off"
assert state.attributes.get("friendly_name") == "AC 213c7f2:16"
await hass.services.async_call(
"switch", "turn_on", {"entity_id": "switch.ac_213c7f2_16"}, blocking=True
)
state = hass.states.get("switch.ac_213c7f2_16")
assert state.state == "on"
await hass.services.async_call(
"switch", "turn_off", {"entity_id": "switch.ac_213c7f2_16"}, blocking=True
)
state = hass.states.get("switch.ac_213c7f2_16")
assert state.state == "off"
assert rfxtrx.transport.send.mock_calls == [
call(bytearray(b"\x0b\x11\x00\x00\x02\x13\xc7\xf2\x10\x01\x00\x00")),
call(bytearray(b"\x0b\x11\x00\x00\x02\x13\xc7\xf2\x10\x00\x00\x00")),
]
@pytest.mark.parametrize("state", ["on", "off"])
async def test_state_restore(hass, rfxtrx, state):
"""State restoration."""
entity_id = "switch.ac_213c7f2_16"
mock_restore_cache(hass, [State(entity_id, state)])
entry_data = create_rfx_test_cfg(
devices={"0b1100cd0213c7f210010f51": {"signal_repetitions": 1}}
)
mock_entry = MockConfigEntry(domain="rfxtrx", unique_id=DOMAIN, data=entry_data)
mock_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_entry.entry_id)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == state
async def test_several_switches(hass, rfxtrx):
"""Test with 3 switches."""
entry_data = create_rfx_test_cfg(
devices={
"0b1100cd0213c7f230010f71": {"signal_repetitions": 1},
"0b1100100118cdea02010f70": {"signal_repetitions": 1},
"0b1100101118cdea02010f70": {"signal_repetitions": 1},
}
)
mock_entry = MockConfigEntry(domain="rfxtrx", unique_id=DOMAIN, data=entry_data)
mock_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_entry.entry_id)
await hass.async_block_till_done()
state = hass.states.get("switch.ac_213c7f2_48")
assert state
assert state.state == "off"
assert state.attributes.get("friendly_name") == "AC 213c7f2:48"
state = hass.states.get("switch.ac_118cdea_2")
assert state
assert state.state == "off"
assert state.attributes.get("friendly_name") == "AC 118cdea:2"
state = hass.states.get("switch.ac_1118cdea_2")
assert state
assert state.state == "off"
assert state.attributes.get("friendly_name") == "AC 1118cdea:2"
@pytest.mark.parametrize("repetitions", [1, 3])
async def test_repetitions(hass, rfxtrx, repetitions):
"""Test signal repetitions."""
entry_data = create_rfx_test_cfg(
devices={"0b1100cd0213c7f230010f71": {"signal_repetitions": repetitions}}
)
mock_entry = MockConfigEntry(domain="rfxtrx", unique_id=DOMAIN, data=entry_data)
mock_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_entry.entry_id)
await hass.async_block_till_done()
await hass.services.async_call(
"switch", "turn_on", {"entity_id": "switch.ac_213c7f2_48"}, blocking=True
)
await hass.async_block_till_done()
assert rfxtrx.transport.send.call_count == repetitions
async def test_discover_switch(hass, rfxtrx_automatic):
"""Test with discovery of switches."""
rfxtrx = rfxtrx_automatic
await rfxtrx.signal("0b1100100118cdea02010f70")
state = hass.states.get("switch.ac_118cdea_2")
assert state
assert state.state == "on"
await rfxtrx.signal("0b1100100118cdeb02010f70")
state = hass.states.get("switch.ac_118cdeb_2")
assert state
assert state.state == "on"
async def test_discover_rfy_sun_switch(hass, rfxtrx_automatic):
"""Test with discovery of switches."""
rfxtrx = rfxtrx_automatic
await rfxtrx.signal(EVENT_RFY_DISABLE_SUN_AUTO)
state = hass.states.get("switch.rfy_030101_1")
assert state
assert state.state == "off"
await rfxtrx.signal(EVENT_RFY_ENABLE_SUN_AUTO)
state = hass.states.get("switch.rfy_030101_1")
assert state
assert state.state == "on"
async def test_unknown_event_code(hass, rfxtrx):
"""Test with 3 switches."""
entry_data = create_rfx_test_cfg(devices={"1234567890": {"signal_repetitions": 1}})
mock_entry = MockConfigEntry(domain="rfxtrx", unique_id=DOMAIN, data=entry_data)
mock_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_entry.entry_id)
await hass.async_block_till_done()
conf_entries = hass.config_entries.async_entries(DOMAIN)
assert len(conf_entries) == 1
entry = conf_entries[0]
assert entry.state == "loaded"
|
from homeassistant.components.cover import (
SERVICE_CLOSE_COVER,
SERVICE_OPEN_COVER,
intent as cover_intent,
)
from homeassistant.helpers import intent
from tests.common import async_mock_service
async def test_open_cover_intent(hass):
"""Test HassOpenCover intent."""
await cover_intent.async_setup_intents(hass)
hass.states.async_set("cover.garage_door", "closed")
calls = async_mock_service(hass, "cover", SERVICE_OPEN_COVER)
response = await intent.async_handle(
hass, "test", "HassOpenCover", {"name": {"value": "garage door"}}
)
await hass.async_block_till_done()
assert response.speech["plain"]["speech"] == "Opened garage door"
assert len(calls) == 1
call = calls[0]
assert call.domain == "cover"
assert call.service == "open_cover"
assert call.data == {"entity_id": "cover.garage_door"}
async def test_close_cover_intent(hass):
"""Test HassCloseCover intent."""
await cover_intent.async_setup_intents(hass)
hass.states.async_set("cover.garage_door", "open")
calls = async_mock_service(hass, "cover", SERVICE_CLOSE_COVER)
response = await intent.async_handle(
hass, "test", "HassCloseCover", {"name": {"value": "garage door"}}
)
await hass.async_block_till_done()
assert response.speech["plain"]["speech"] == "Closed garage door"
assert len(calls) == 1
call = calls[0]
assert call.domain == "cover"
assert call.service == "close_cover"
assert call.data == {"entity_id": "cover.garage_door"}
|
from math import sqrt
from django.contrib.sites.models import Site
from django.core.cache import InvalidCacheBackendError
from django.core.cache import caches
from django.utils.functional import cached_property
from django.utils.html import strip_tags
import regex as re
from zinnia.models.entry import Entry
from zinnia.settings import COMPARISON_FIELDS
from zinnia.settings import STOP_WORDS
PUNCTUATION = re.compile(r'\p{P}+')
def pearson_score(list1, list2):
"""
Compute the Pearson' score between 2 lists of vectors.
"""
size = len(list1)
sum1 = sum(list1)
sum2 = sum(list2)
sum_sq1 = sum([pow(l, 2) for l in list1])
sum_sq2 = sum([pow(l, 2) for l in list2])
prod_sum = sum([list1[i] * list2[i] for i in range(size)])
num = prod_sum - (sum1 * sum2 / float(size))
den = sqrt((sum_sq1 - pow(sum1, 2.0) / size) *
(sum_sq2 - pow(sum2, 2.0) / size))
return num / den
class ModelVectorBuilder(object):
"""
Build a list of vectors based on a Queryset.
"""
limit = None
fields = None
queryset = None
def __init__(self, **kwargs):
self.limit = kwargs.pop('limit', self.limit)
self.fields = kwargs.pop('fields', self.fields)
self.queryset = kwargs.pop('queryset', self.queryset)
def get_related(self, instance, number):
"""
Return a list of the most related objects to instance.
"""
related_pks = self.compute_related(instance.pk)[:number]
related_pks = [pk for pk, score in related_pks]
related_objects = sorted(
self.queryset.model.objects.filter(pk__in=related_pks),
key=lambda x: related_pks.index(x.pk))
return related_objects
def compute_related(self, object_id, score=pearson_score):
"""
Compute the most related pks to an object's pk.
"""
dataset = self.dataset
object_vector = dataset.get(object_id)
if not object_vector:
return []
object_related = {}
for o_id, o_vector in dataset.items():
if o_id != object_id:
try:
object_related[o_id] = score(object_vector, o_vector)
except ZeroDivisionError:
pass
related = sorted(object_related.items(),
key=lambda k_v: (k_v[1], k_v[0]), reverse=True)
return related
@cached_property
def raw_dataset(self):
"""
Generate a raw dataset based on the queryset
and the specified fields.
"""
dataset = {}
queryset = self.queryset.values_list(*(['pk'] + self.fields))
if self.limit:
queryset = queryset[:self.limit]
for item in queryset:
item = list(item)
item_pk = item.pop(0)
datas = ' '.join(map(str, item))
dataset[item_pk] = self.raw_clean(datas)
return dataset
def raw_clean(self, datas):
"""
Apply a cleaning on raw datas.
"""
datas = strip_tags(datas) # Remove HTML
datas = STOP_WORDS.rebase(datas, '') # Remove STOP WORDS
datas = PUNCTUATION.sub('', datas) # Remove punctuation
datas = datas.lower()
return [d for d in datas.split() if len(d) > 1]
@cached_property
def columns_dataset(self):
"""
Generate the columns and the whole dataset.
"""
data = {}
words_total = {}
for instance, words in self.raw_dataset.items():
words_item_total = {}
for word in words:
words_total.setdefault(word, 0)
words_item_total.setdefault(word, 0)
words_total[word] += 1
words_item_total[word] += 1
data[instance] = words_item_total
columns = sorted(words_total.keys(),
key=lambda w: words_total[w],
reverse=True)[:250]
columns = sorted(columns)
dataset = {}
for instance in data.keys():
dataset[instance] = [data[instance].get(word, 0)
for word in columns]
return columns, dataset
@property
def columns(self):
"""
Access to columns.
"""
return self.columns_dataset[0]
@property
def dataset(self):
"""
Access to dataset.
"""
return self.columns_dataset[1]
class CachedModelVectorBuilder(ModelVectorBuilder):
"""
Cached version of VectorBuilder.
"""
@property
def cache_backend(self):
"""
Try to access to ``comparison`` cache value,
if fail use the ``default`` cache backend config.
"""
try:
comparison_cache = caches['comparison']
except InvalidCacheBackendError:
comparison_cache = caches['default']
return comparison_cache
@property
def cache_key(self):
"""
Key for the cache.
"""
return self.__class__.__name__
def get_cache(self):
"""
Get the cache from cache.
"""
return self.cache_backend.get(self.cache_key, {})
def set_cache(self, value):
"""
Assign the cache in cache.
"""
value.update(self.cache)
return self.cache_backend.set(self.cache_key, value)
cache = property(get_cache, set_cache)
def cache_flush(self):
"""
Flush the cache for this instance.
"""
return self.cache_backend.delete(self.cache_key)
def get_related(self, instance, number):
"""
Implement high level cache system for get_related.
"""
cache = self.cache
cache_key = '%s:%s' % (instance.pk, number)
if cache_key not in cache:
related_objects = super(CachedModelVectorBuilder,
self).get_related(instance, number)
cache[cache_key] = related_objects
self.cache = cache
return cache[cache_key]
@property
def columns_dataset(self):
"""
Implement high level cache system for columns and dataset.
"""
cache = self.cache
cache_key = 'columns_dataset'
if cache_key not in cache:
columns_dataset = super(CachedModelVectorBuilder, self
).columns_dataset
cache[cache_key] = columns_dataset
self.cache = cache
return cache[cache_key]
class EntryPublishedVectorBuilder(CachedModelVectorBuilder):
"""
Vector builder for published entries.
"""
limit = 100
queryset = Entry.published
fields = COMPARISON_FIELDS
@property
def cache_key(self):
"""
Key for the cache handling current site.
"""
return '%s:%s' % (super(EntryPublishedVectorBuilder, self).cache_key,
Site.objects.get_current().pk)
|
import functools
import logging
import sys
import time
debug = logging.debug
def fatal(msg, code=1):
print(msg + "\n")
logging.error(msg)
sys.exit(code)
def fn(f, *args, **kwargs):
logging.debug("{}: {} {}".format(repr(f), args, kwargs))
return f(*args, **kwargs)
def duration(fn):
@functools.wraps(fn)
def timer(*args, **kwargs):
start = time.time()
try:
return fn(*args, **kwargs)
finally:
debug(
"duration: {}.{}: {:2.2f}s".format(
fn.__module__, fn.__name__, time.time() - start
)
)
return timer
|
from collections import defaultdict
from datetime import timedelta
import logging
import uuid
import brottsplatskartan
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_NAME,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
CONF_AREA = "area"
DEFAULT_NAME = "Brottsplatskartan"
SCAN_INTERVAL = timedelta(minutes=30)
AREAS = [
"Blekinge lΓ€n",
"Dalarnas lΓ€n",
"Gotlands lΓ€n",
"GΓ€vleborgs lΓ€n",
"Hallands lΓ€n",
"JΓ€mtlands lΓ€n",
"JΓΆnkΓΆpings lΓ€n",
"Kalmar lΓ€n",
"Kronobergs lΓ€n",
"Norrbottens lΓ€n",
"SkΓ₯ne lΓ€n",
"Stockholms lΓ€n",
"SΓΆdermanlands lΓ€n",
"Uppsala lΓ€n",
"VΓ€rmlands lΓ€n",
"VΓ€sterbottens lΓ€n",
"VΓ€sternorrlands lΓ€n",
"VΓ€stmanlands lΓ€n",
"VΓ€stra GΓΆtalands lΓ€n",
"Γrebro lΓ€n",
"ΓstergΓΆtlands lΓ€n",
]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Inclusive(CONF_LATITUDE, "coordinates"): cv.latitude,
vol.Inclusive(CONF_LONGITUDE, "coordinates"): cv.longitude,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_AREA, default=[]): vol.All(cv.ensure_list, [vol.In(AREAS)]),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Brottsplatskartan platform."""
area = config.get(CONF_AREA)
latitude = config.get(CONF_LATITUDE, hass.config.latitude)
longitude = config.get(CONF_LONGITUDE, hass.config.longitude)
name = config[CONF_NAME]
# Every Home Assistant instance should have their own unique
# app parameter: https://brottsplatskartan.se/sida/api
app = f"ha-{uuid.getnode()}"
bpk = brottsplatskartan.BrottsplatsKartan(
app=app, area=area, latitude=latitude, longitude=longitude
)
add_entities([BrottsplatskartanSensor(bpk, name)], True)
class BrottsplatskartanSensor(Entity):
"""Representation of a Brottsplatskartan Sensor."""
def __init__(self, bpk, name):
"""Initialize the Brottsplatskartan sensor."""
self._attributes = {}
self._brottsplatskartan = bpk
self._name = name
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._attributes
def update(self):
"""Update device state."""
incident_counts = defaultdict(int)
incidents = self._brottsplatskartan.get_incidents()
if incidents is False:
_LOGGER.debug("Problems fetching incidents")
return
for incident in incidents:
incident_type = incident.get("title_type")
incident_counts[incident_type] += 1
self._attributes = {ATTR_ATTRIBUTION: brottsplatskartan.ATTRIBUTION}
self._attributes.update(incident_counts)
self._state = len(incidents)
|
from datetime import timedelta
import json
from gios import ApiError
from homeassistant.components.air_quality import (
ATTR_AQI,
ATTR_CO,
ATTR_NO2,
ATTR_OZONE,
ATTR_PM_2_5,
ATTR_PM_10,
ATTR_SO2,
)
from homeassistant.components.gios.air_quality import ATTRIBUTION
from homeassistant.components.gios.const import AQI_GOOD
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_ICON,
ATTR_UNIT_OF_MEASUREMENT,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
STATE_UNAVAILABLE,
)
from homeassistant.util.dt import utcnow
from tests.async_mock import patch
from tests.common import async_fire_time_changed, load_fixture
from tests.components.gios import init_integration
async def test_air_quality(hass):
"""Test states of the air_quality."""
await init_integration(hass)
registry = await hass.helpers.entity_registry.async_get_registry()
state = hass.states.get("air_quality.home")
assert state
assert state.state == "4"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_AQI) == AQI_GOOD
assert state.attributes.get(ATTR_PM_10) == 17
assert state.attributes.get(ATTR_PM_2_5) == 4
assert state.attributes.get(ATTR_CO) == 252
assert state.attributes.get(ATTR_SO2) == 4
assert state.attributes.get(ATTR_NO2) == 7
assert state.attributes.get(ATTR_OZONE) == 96
assert (
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
== CONCENTRATION_MICROGRAMS_PER_CUBIC_METER
)
assert state.attributes.get(ATTR_ICON) == "mdi:emoticon-happy"
assert state.attributes.get("station") == "Test Name 1"
entry = registry.async_get("air_quality.home")
assert entry
assert entry.unique_id == 123
async def test_air_quality_with_incomplete_data(hass):
"""Test states of the air_quality with incomplete data from measuring station."""
await init_integration(hass, incomplete_data=True)
registry = await hass.helpers.entity_registry.async_get_registry()
state = hass.states.get("air_quality.home")
assert state
assert state.state == "4"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_AQI) == "foo"
assert state.attributes.get(ATTR_PM_10) is None
assert state.attributes.get(ATTR_PM_2_5) == 4
assert state.attributes.get(ATTR_CO) == 252
assert state.attributes.get(ATTR_SO2) == 4
assert state.attributes.get(ATTR_NO2) == 7
assert state.attributes.get(ATTR_OZONE) == 96
assert (
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
== CONCENTRATION_MICROGRAMS_PER_CUBIC_METER
)
assert state.attributes.get(ATTR_ICON) == "mdi:blur"
assert state.attributes.get("station") == "Test Name 1"
entry = registry.async_get("air_quality.home")
assert entry
assert entry.unique_id == 123
async def test_availability(hass):
"""Ensure that we mark the entities unavailable correctly when service causes an error."""
await init_integration(hass)
state = hass.states.get("air_quality.home")
assert state
assert state.state != STATE_UNAVAILABLE
assert state.state == "4"
future = utcnow() + timedelta(minutes=60)
with patch(
"homeassistant.components.gios.Gios._get_all_sensors",
side_effect=ApiError("Unexpected error"),
):
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get("air_quality.home")
assert state
assert state.state == STATE_UNAVAILABLE
future = utcnow() + timedelta(minutes=120)
with patch(
"homeassistant.components.gios.Gios._get_all_sensors",
return_value=json.loads(load_fixture("gios/sensors.json")),
), patch(
"homeassistant.components.gios.Gios._get_indexes",
return_value=json.loads(load_fixture("gios/indexes.json")),
):
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get("air_quality.home")
assert state
assert state.state != STATE_UNAVAILABLE
assert state.state == "4"
|
from unittest import TestCase
from django.http.request import HttpRequest
from django.test.utils import override_settings
from weblate.middleware import ProxyMiddleware
class ProxyTest(TestCase):
def get_response(self, request):
self.assertEqual(request.META["REMOTE_ADDR"], "1.2.3.4")
return "response"
@override_settings(
IP_BEHIND_REVERSE_PROXY=False,
IP_PROXY_HEADER="HTTP_X_FORWARDED_FOR",
IP_PROXY_OFFSET=0,
)
def test_direct(self):
request = HttpRequest()
request.META["REMOTE_ADDR"] = "1.2.3.4"
middleware = ProxyMiddleware(self.get_response)
self.assertEqual(middleware(request), "response")
@override_settings(
IP_BEHIND_REVERSE_PROXY=True,
IP_PROXY_HEADER="HTTP_X_FORWARDED_FOR",
IP_PROXY_OFFSET=0,
)
def test_proxy(self):
request = HttpRequest()
request.META["REMOTE_ADDR"] = "7.8.9.0"
request.META["HTTP_X_FORWARDED_FOR"] = "1.2.3.4"
middleware = ProxyMiddleware(self.get_response)
self.assertEqual(middleware(request), "response")
@override_settings(
IP_BEHIND_REVERSE_PROXY=True,
IP_PROXY_HEADER="HTTP_X_FORWARDED_FOR",
IP_PROXY_OFFSET=1,
)
def test_proxy_second(self):
request = HttpRequest()
request.META["REMOTE_ADDR"] = "7.8.9.0"
request.META["HTTP_X_FORWARDED_FOR"] = "2.3.4.5, 1.2.3.4"
middleware = ProxyMiddleware(self.get_response)
self.assertEqual(middleware(request), "response")
@override_settings(
IP_BEHIND_REVERSE_PROXY=True,
IP_PROXY_HEADER="HTTP_X_FORWARDED_FOR",
IP_PROXY_OFFSET=0,
)
def test_proxy_invalid(self):
request = HttpRequest()
request.META["REMOTE_ADDR"] = "1.2.3.4"
request.META["HTTP_X_FORWARDED_FOR"] = "2.3.4"
middleware = ProxyMiddleware(self.get_response)
self.assertEqual(middleware(request), "response")
|
import colorsys
import random
from openrazer.client import DeviceManager
from openrazer.client import constants as razer_constants
# Create a DeviceManager. This is used to get specific devices
device_manager = DeviceManager()
print("Found {} Razer devices".format(len(device_manager.devices)))
devices = device_manager.devices
for device in devices:
if not device.fx.advanced:
print("Skipping device " + device.name + " (" + device.serial + ")")
devices.remove(device)
print()
# Disable daemon effect syncing.
# Without this, the daemon will try to set the lighting effect to every device.
device_manager.sync_effects = False
# Helper function to generate interesting colors
def random_color():
rgb = colorsys.hsv_to_rgb(random.uniform(0, 1), random.uniform(0.5, 1), 1)
return tuple(map(lambda x: int(256 * x), rgb))
# Set random colors for each zone of each device
for device in devices:
rows, cols = device.fx.advanced.rows, device.fx.advanced.cols
for row in range(rows):
for col in range(cols):
device.fx.advanced.matrix[row, col] = random_color()
device.fx.advanced.draw()
|
import logging
import random
import warnings
import tempfile
import os
from subprocess import PIPE
import numpy as np
from gensim import utils, corpora, matutils
from gensim.utils import check_output
logger = logging.getLogger(__name__)
class DtmModel(utils.SaveLoad):
"""Python wrapper using `DTM implementation <https://github.com/magsilva/dtm/tree/master/bin>`_.
Communication between DTM and Python takes place by passing around data files on disk and executing
the DTM binary as a subprocess.
Warnings
--------
This is **only** python wrapper for `DTM implementation <https://github.com/magsilva/dtm/tree/master/bin>`_,
you need to install original implementation first and pass the path to binary to ``dtm_path``.
"""
def __init__(self, dtm_path, corpus=None, time_slices=None, mode='fit', model='dtm', num_topics=100,
id2word=None, prefix=None, lda_sequence_min_iter=6, lda_sequence_max_iter=20, lda_max_em_iter=10,
alpha=0.01, top_chain_var=0.005, rng_seed=0, initialize_lda=True):
"""
Parameters
----------
dtm_path : str
Path to the dtm binary, e.g. `/home/username/dtm/dtm/main`.
corpus : iterable of iterable of (int, int)
Collection of texts in BoW format.
time_slices : list of int
Sequence of timestamps.
mode : {'fit', 'time'}, optional
Controls the mode of the mode: 'fit' is for training, 'time' for analyzing documents through time
according to a DTM, basically a held out set.
model : {'fixed', 'dtm'}, optional
Control model that will be runned: 'fixed' is for DIM and 'dtm' for DTM.
num_topics : int, optional
Number of topics.
id2word : :class:`~gensim.corpora.dictionary.Dictionary`, optional
Mapping between tokens ids and words from corpus, if not specified - will be inferred from `corpus`.
prefix : str, optional
Prefix for produced temporary files.
lda_sequence_min_iter : int, optional
Min iteration of LDA.
lda_sequence_max_iter : int, optional
Max iteration of LDA.
lda_max_em_iter : int, optional
Max em optimization iterations in LDA.
alpha : int, optional
Hyperparameter that affects sparsity of the document-topics for the LDA models in each timeslice.
top_chain_var : float, optional
This hyperparameter controls one of the key aspect of topic evolution which is the speed at which
these topics evolve. A smaller top_chain_var leads to similar word distributions over multiple timeslice.
rng_seed : int, optional
Random seed.
initialize_lda : bool, optional
If True - initialize DTM with LDA.
"""
if not os.path.isfile(dtm_path):
raise ValueError("dtm_path must point to the binary file, not to a folder")
self.dtm_path = dtm_path
self.id2word = id2word
if self.id2word is None:
logger.warning("no word id mapping provided; initializing from corpus, assuming identity")
self.id2word = utils.dict_from_corpus(corpus)
self.num_terms = len(self.id2word)
else:
self.num_terms = 0 if not self.id2word else 1 + max(self.id2word.keys())
if self.num_terms == 0:
raise ValueError("cannot compute DTM over an empty collection (no terms)")
self.num_topics = num_topics
try:
lencorpus = len(corpus)
except TypeError:
logger.warning("input corpus stream has no len(); counting documents")
lencorpus = sum(1 for _ in corpus)
if lencorpus == 0:
raise ValueError("cannot compute DTM over an empty corpus")
if model == "fixed" and any(not text for text in corpus):
raise ValueError("""There is a text without words in the input corpus.
This breaks method='fixed' (The DIM model).""")
if lencorpus != sum(time_slices):
raise ValueError(
"mismatched timeslices %{slices} for corpus of len {clen}"
.format(slices=sum(time_slices), clen=lencorpus)
)
self.lencorpus = lencorpus
if prefix is None:
rand_prefix = hex(random.randint(0, 0xffffff))[2:] + '_'
prefix = os.path.join(tempfile.gettempdir(), rand_prefix)
self.prefix = prefix
self.time_slices = time_slices
self.lda_sequence_min_iter = int(lda_sequence_min_iter)
self.lda_sequence_max_iter = int(lda_sequence_max_iter)
self.lda_max_em_iter = int(lda_max_em_iter)
self.alpha = alpha
self.top_chain_var = top_chain_var
self.rng_seed = rng_seed
self.initialize_lda = str(initialize_lda).lower()
self.lambda_ = None
self.obs_ = None
self.lhood_ = None
self.gamma_ = None
self.init_alpha = None
self.init_beta = None
self.init_ss = None
self.em_steps = []
self.influences_time = []
if corpus is not None:
self.train(corpus, time_slices, mode, model)
def fout_liklihoods(self):
"""Get path to temporary lhood data file.
Returns
-------
str
Path to lhood data file.
"""
return self.prefix + 'train_out/lda-seq/' + 'lhoods.dat'
def fout_gamma(self):
"""Get path to temporary gamma data file.
Returns
-------
str
Path to gamma data file.
"""
return self.prefix + 'train_out/lda-seq/' + 'gam.dat'
def fout_prob(self):
"""Get template of path to temporary file.
Returns
-------
str
Path to file.
"""
return self.prefix + 'train_out/lda-seq/' + 'topic-{i}-var-e-log-prob.dat'
def fout_observations(self):
"""Get template of path to temporary file.
Returns
-------
str
Path to file.
"""
return self.prefix + 'train_out/lda-seq/' + 'topic-{i}-var-obs.dat'
def fout_influence(self):
"""Get template of path to temporary file.
Returns
-------
str
Path to file.
"""
return self.prefix + 'train_out/lda-seq/' + 'influence_time-{i}'
def foutname(self):
"""Get path to temporary file.
Returns
-------
str
Path to file.
"""
return self.prefix + 'train_out'
def fem_steps(self):
"""Get path to temporary em_step data file.
Returns
-------
str
Path to em_step data file.
"""
return self.prefix + 'train_out/' + 'em_log.dat'
def finit_alpha(self):
"""Get path to initially trained lda alpha file.
Returns
-------
str
Path to initially trained lda alpha file.
"""
return self.prefix + 'train_out/' + 'initial-lda.alpha'
def finit_beta(self):
"""Get path to initially trained lda beta file.
Returns
-------
str
Path to initially trained lda beta file.
"""
return self.prefix + 'train_out/' + 'initial-lda.beta'
def flda_ss(self):
"""Get path to initial lda binary file.
Returns
-------
str
Path to initial lda binary file.
"""
return self.prefix + 'train_out/' + 'initial-lda-ss.dat'
def fcorpustxt(self):
"""Get path to temporary file.
Returns
-------
str
Path to multiple train binary file.
"""
return self.prefix + 'train-mult.dat'
def fcorpus(self):
"""Get path to corpus file.
Returns
-------
str
Path to corpus file.
"""
return self.prefix + 'train'
def ftimeslices(self):
"""Get path to time slices binary file.
Returns
-------
str
Path to time slices binary file.
"""
return self.prefix + 'train-seq.dat'
def convert_input(self, corpus, time_slices):
"""Convert corpus into LDA-C format by :class:`~gensim.corpora.bleicorpus.BleiCorpus` and save to temp file.
Path to temporary file produced by :meth:`~gensim.models.wrappers.dtmmodel.DtmModel.ftimeslices`.
Parameters
----------
corpus : iterable of iterable of (int, float)
Corpus in BoW format.
time_slices : list of int
Sequence of timestamps.
"""
logger.info("serializing temporary corpus to %s", self.fcorpustxt())
# write out the corpus in a file format that DTM understands:
corpora.BleiCorpus.save_corpus(self.fcorpustxt(), corpus)
with utils.open(self.ftimeslices(), 'wb') as fout:
fout.write(utils.to_utf8(str(len(self.time_slices)) + "\n"))
for sl in time_slices:
fout.write(utils.to_utf8(str(sl) + "\n"))
def train(self, corpus, time_slices, mode, model):
"""Train DTM model.
Parameters
----------
corpus : iterable of iterable of (int, int)
Collection of texts in BoW format.
time_slices : list of int
Sequence of timestamps.
mode : {'fit', 'time'}, optional
Controls the mode of the mode: 'fit' is for training, 'time' for analyzing documents through time
according to a DTM, basically a held out set.
model : {'fixed', 'dtm'}, optional
Control model that will be runned: 'fixed' is for DIM and 'dtm' for DTM.
"""
self.convert_input(corpus, time_slices)
arguments = \
"--ntopics={p0} --model={mofrl} --mode={p1} --initialize_lda={p2} --corpus_prefix={p3} " \
"--outname={p4} --alpha={p5}".format(
p0=self.num_topics, mofrl=model, p1=mode, p2=self.initialize_lda,
p3=self.fcorpus(), p4=self.foutname(), p5=self.alpha
)
params = \
"--lda_max_em_iter={p0} --lda_sequence_min_iter={p1} --lda_sequence_max_iter={p2} " \
"--top_chain_var={p3} --rng_seed={p4} ".format(
p0=self.lda_max_em_iter, p1=self.lda_sequence_min_iter, p2=self.lda_sequence_max_iter,
p3=self.top_chain_var, p4=self.rng_seed
)
arguments = arguments + " " + params
logger.info("training DTM with args %s", arguments)
cmd = [self.dtm_path] + arguments.split()
logger.info("Running command %s", cmd)
check_output(args=cmd, stderr=PIPE)
self.em_steps = np.loadtxt(self.fem_steps())
self.init_ss = np.loadtxt(self.flda_ss())
if self.initialize_lda:
self.init_alpha = np.loadtxt(self.finit_alpha())
self.init_beta = np.loadtxt(self.finit_beta())
self.lhood_ = np.loadtxt(self.fout_liklihoods())
# document-topic proportions
self.gamma_ = np.loadtxt(self.fout_gamma())
# cast to correct shape, gamme[5,10] is the proprtion of the 10th topic
# in doc 5
self.gamma_.shape = (self.lencorpus, self.num_topics)
# normalize proportions
self.gamma_ /= self.gamma_.sum(axis=1)[:, np.newaxis]
self.lambda_ = np.zeros((self.num_topics, self.num_terms * len(self.time_slices)))
self.obs_ = np.zeros((self.num_topics, self.num_terms * len(self.time_slices)))
for t in range(self.num_topics):
topic = "%03d" % t
self.lambda_[t, :] = np.loadtxt(self.fout_prob().format(i=topic))
self.obs_[t, :] = np.loadtxt(self.fout_observations().format(i=topic))
# cast to correct shape, lambda[5,10,0] is the proportion of the 10th
# topic in doc 5 at time 0
self.lambda_.shape = (self.num_topics, self.num_terms, len(self.time_slices))
self.obs_.shape = (self.num_topics, self.num_terms, len(self.time_slices))
# extract document influence on topics for each time slice
# influences_time[0] , influences at time 0
if model == 'fixed':
for k, t in enumerate(self.time_slices):
stamp = "%03d" % k
influence = np.loadtxt(self.fout_influence().format(i=stamp))
influence.shape = (t, self.num_topics)
# influence[2,5] influence of document 2 on topic 5
self.influences_time.append(influence)
def print_topics(self, num_topics=10, times=5, num_words=10):
"""Alias for :meth:`~gensim.models.wrappers.dtmmodel.DtmModel.show_topics`.
Parameters
----------
num_topics : int, optional
Number of topics to return, set `-1` to get all topics.
times : int, optional
Number of times.
num_words : int, optional
Number of words.
Returns
-------
list of str
Topics as a list of strings
"""
return self.show_topics(num_topics, times, num_words, log=True)
def show_topics(self, num_topics=10, times=5, num_words=10, log=False, formatted=True):
"""Get the `num_words` most probable words for `num_topics` number of topics at 'times' time slices.
Parameters
----------
num_topics : int, optional
Number of topics to return, set `-1` to get all topics.
times : int, optional
Number of times.
num_words : int, optional
Number of words.
log : bool, optional
THIS PARAMETER WILL BE IGNORED.
formatted : bool, optional
If `True` - return the topics as a list of strings, otherwise as lists of (weight, word) pairs.
Returns
-------
list of str
Topics as a list of strings (if formatted=True) **OR**
list of (float, str)
Topics as list of (weight, word) pairs (if formatted=False)
"""
if num_topics < 0 or num_topics >= self.num_topics:
num_topics = self.num_topics
chosen_topics = range(num_topics)
else:
num_topics = min(num_topics, self.num_topics)
chosen_topics = range(num_topics)
if times < 0 or times >= len(self.time_slices):
times = len(self.time_slices)
chosen_times = range(times)
else:
times = min(times, len(self.time_slices))
chosen_times = range(times)
shown = []
for time in chosen_times:
for i in chosen_topics:
if formatted:
topic = self.print_topic(i, time, topn=num_words)
else:
topic = self.show_topic(i, time, topn=num_words)
shown.append(topic)
return shown
def show_topic(self, topicid, time, topn=50, num_words=None):
"""Get `num_words` most probable words for the given `topicid`.
Parameters
----------
topicid : int
Id of topic.
time : int
Timestamp.
topn : int, optional
Top number of topics that you'll receive.
num_words : int, optional
DEPRECATED PARAMETER, use `topn` instead.
Returns
-------
list of (float, str)
Sequence of probable words, as a list of `(word_probability, word)`.
"""
if num_words is not None: # deprecated num_words is used
warnings.warn("The parameter `num_words` is deprecated, will be removed in 4.0.0, use `topn` instead.")
topn = num_words
topics = self.lambda_[:, :, time]
topic = topics[topicid]
# likelihood to probability
topic = np.exp(topic)
# normalize to probability dist
topic = topic / topic.sum()
# sort according to prob
bestn = matutils.argsort(topic, topn, reverse=True)
beststr = [(topic[idx], self.id2word[idx]) for idx in bestn]
return beststr
def print_topic(self, topicid, time, topn=10, num_words=None):
"""Get the given topic, formatted as a string.
Parameters
----------
topicid : int
Id of topic.
time : int
Timestamp.
topn : int, optional
Top number of topics that you'll receive.
num_words : int, optional
DEPRECATED PARAMETER, use `topn` instead.
Returns
-------
str
The given topic in string format, like '0.132*someword + 0.412*otherword + ...'.
"""
if num_words is not None: # deprecated num_words is used
warnings.warn("The parameter `num_words` is deprecated, will be removed in 4.0.0, use `topn` instead.")
topn = num_words
return ' + '.join('%.3f*%s' % v for v in self.show_topic(topicid, time, topn=topn))
def dtm_vis(self, corpus, time):
"""Get data specified by pyLDAvis format.
Parameters
----------
corpus : iterable of iterable of (int, float)
Collection of texts in BoW format.
time : int
Sequence of timestamp.
Notes
-----
All of these are needed to visualise topics for DTM for a particular time-slice via pyLDAvis.
Returns
-------
doc_topic : numpy.ndarray
Document-topic proportions.
topic_term : numpy.ndarray
Calculated term of topic suitable for pyLDAvis format.
doc_lengths : list of int
Length of each documents in corpus.
term_frequency : numpy.ndarray
Frequency of each word from vocab.
vocab : list of str
List of words from docpus.
"""
topic_term = np.exp(self.lambda_[:, :, time]) / np.exp(self.lambda_[:, :, time]).sum()
topic_term *= self.num_topics
doc_topic = self.gamma_
doc_lengths = [len(doc) for doc_no, doc in enumerate(corpus)]
term_frequency = np.zeros(len(self.id2word))
for doc_no, doc in enumerate(corpus):
for pair in doc:
term_frequency[pair[0]] += pair[1]
vocab = [self.id2word[i] for i in range(0, len(self.id2word))]
# returns numpy arrays for doc_topic proportions, topic_term proportions, and document_lengths, term_frequency.
# these should be passed to the `pyLDAvis.prepare` method to visualise one time-slice of DTM topics.
return doc_topic, topic_term, doc_lengths, term_frequency, vocab
def dtm_coherence(self, time, num_words=20):
"""Get all topics of a particular time-slice without probability values for it to be used.
For either "u_mass" or "c_v" coherence.
Parameters
----------
num_words : int
Number of words.
time : int
Timestamp
Returns
-------
coherence_topics : list of list of str
All topics of a particular time-slice without probability values for it to be used.
Warnings
--------
TODO: because of print format right now can only return for 1st time-slice, should we fix the coherence
printing or make changes to the print statements to mirror DTM python?
"""
coherence_topics = []
for topic_no in range(0, self.num_topics):
topic = self.show_topic(topicid=topic_no, time=time, topn=num_words)
coherence_topic = []
for prob, word in topic:
coherence_topic.append(word)
coherence_topics.append(coherence_topic)
return coherence_topics
|
import copy
import hmac
import logging
import uuid
from homeassistant.components import http
from homeassistant.const import HTTP_NOT_FOUND, HTTP_UNAUTHORIZED
from homeassistant.core import callback
from homeassistant.helpers import template
import homeassistant.util.dt as dt_util
from .const import (
API_PASSWORD,
ATTR_MAIN_TEXT,
ATTR_REDIRECTION_URL,
ATTR_STREAM_URL,
ATTR_TITLE_TEXT,
ATTR_UID,
ATTR_UPDATE_DATE,
CONF_AUDIO,
CONF_DISPLAY_URL,
CONF_PASSWORD,
CONF_TEXT,
CONF_TITLE,
CONF_UID,
DATE_FORMAT,
)
_LOGGER = logging.getLogger(__name__)
FLASH_BRIEFINGS_API_ENDPOINT = "/api/alexa/flash_briefings/{briefing_id}"
@callback
def async_setup(hass, flash_briefing_config):
"""Activate Alexa component."""
hass.http.register_view(AlexaFlashBriefingView(hass, flash_briefing_config))
class AlexaFlashBriefingView(http.HomeAssistantView):
"""Handle Alexa Flash Briefing skill requests."""
url = FLASH_BRIEFINGS_API_ENDPOINT
requires_auth = False
name = "api:alexa:flash_briefings"
def __init__(self, hass, flash_briefings):
"""Initialize Alexa view."""
super().__init__()
self.flash_briefings = copy.deepcopy(flash_briefings)
template.attach(hass, self.flash_briefings)
@callback
def get(self, request, briefing_id):
"""Handle Alexa Flash Briefing request."""
_LOGGER.debug("Received Alexa flash briefing request for: %s", briefing_id)
if request.query.get(API_PASSWORD) is None:
err = "No password provided for Alexa flash briefing: %s"
_LOGGER.error(err, briefing_id)
return b"", HTTP_UNAUTHORIZED
if not hmac.compare_digest(
request.query[API_PASSWORD].encode("utf-8"),
self.flash_briefings[CONF_PASSWORD].encode("utf-8"),
):
err = "Wrong password for Alexa flash briefing: %s"
_LOGGER.error(err, briefing_id)
return b"", HTTP_UNAUTHORIZED
if not isinstance(self.flash_briefings.get(briefing_id), list):
err = "No configured Alexa flash briefing was found for: %s"
_LOGGER.error(err, briefing_id)
return b"", HTTP_NOT_FOUND
briefing = []
for item in self.flash_briefings.get(briefing_id, []):
output = {}
if item.get(CONF_TITLE) is not None:
if isinstance(item.get(CONF_TITLE), template.Template):
output[ATTR_TITLE_TEXT] = item[CONF_TITLE].async_render(
parse_result=False
)
else:
output[ATTR_TITLE_TEXT] = item.get(CONF_TITLE)
if item.get(CONF_TEXT) is not None:
if isinstance(item.get(CONF_TEXT), template.Template):
output[ATTR_MAIN_TEXT] = item[CONF_TEXT].async_render(
parse_result=False
)
else:
output[ATTR_MAIN_TEXT] = item.get(CONF_TEXT)
uid = item.get(CONF_UID)
if uid is None:
uid = str(uuid.uuid4())
output[ATTR_UID] = uid
if item.get(CONF_AUDIO) is not None:
if isinstance(item.get(CONF_AUDIO), template.Template):
output[ATTR_STREAM_URL] = item[CONF_AUDIO].async_render(
parse_result=False
)
else:
output[ATTR_STREAM_URL] = item.get(CONF_AUDIO)
if item.get(CONF_DISPLAY_URL) is not None:
if isinstance(item.get(CONF_DISPLAY_URL), template.Template):
output[ATTR_REDIRECTION_URL] = item[CONF_DISPLAY_URL].async_render(
parse_result=False
)
else:
output[ATTR_REDIRECTION_URL] = item.get(CONF_DISPLAY_URL)
output[ATTR_UPDATE_DATE] = dt_util.utcnow().strftime(DATE_FORMAT)
briefing.append(output)
return self.json(briefing)
|
from hatasmota.const import AUTOMATION_TYPE_TRIGGER
from homeassistant.helpers.device_registry import EVENT_DEVICE_REGISTRY_UPDATED
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from . import device_trigger
from .const import DATA_REMOVE_DISCOVER_COMPONENT, DATA_UNSUB
from .discovery import TASMOTA_DISCOVERY_ENTITY_NEW
async def async_remove_automations(hass, device_id):
"""Remove automations for a Tasmota device."""
await device_trigger.async_remove_triggers(hass, device_id)
async def async_setup_entry(hass, config_entry):
"""Set up Tasmota device automation dynamically through discovery."""
async def async_device_removed(event):
"""Handle the removal of a device."""
if event.data["action"] != "remove":
return
await async_remove_automations(hass, event.data["device_id"])
async def async_discover(tasmota_automation, discovery_hash):
"""Discover and add a Tasmota device automation."""
if tasmota_automation.automation_type == AUTOMATION_TYPE_TRIGGER:
await device_trigger.async_setup_trigger(
hass, tasmota_automation, config_entry, discovery_hash
)
hass.data[
DATA_REMOVE_DISCOVER_COMPONENT.format("device_automation")
] = async_dispatcher_connect(
hass,
TASMOTA_DISCOVERY_ENTITY_NEW.format("device_automation", "tasmota"),
async_discover,
)
hass.data[DATA_UNSUB].append(
hass.bus.async_listen(EVENT_DEVICE_REGISTRY_UPDATED, async_device_removed)
)
|
import asyncio
import logging
from pymonoprice import get_monoprice
from serial import SerialException
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_PORT
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from .const import (
CONF_NOT_FIRST_RUN,
DOMAIN,
FIRST_RUN,
MONOPRICE_OBJECT,
UNDO_UPDATE_LISTENER,
)
PLATFORMS = ["media_player"]
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the Monoprice 6-Zone Amplifier component."""
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up Monoprice 6-Zone Amplifier from a config entry."""
port = entry.data[CONF_PORT]
try:
monoprice = await hass.async_add_executor_job(get_monoprice, port)
except SerialException as err:
_LOGGER.error("Error connecting to Monoprice controller at %s", port)
raise ConfigEntryNotReady from err
# double negative to handle absence of value
first_run = not bool(entry.data.get(CONF_NOT_FIRST_RUN))
if first_run:
hass.config_entries.async_update_entry(
entry, data={**entry.data, CONF_NOT_FIRST_RUN: True}
)
undo_listener = entry.add_update_listener(_update_listener)
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = {
MONOPRICE_OBJECT: monoprice,
UNDO_UPDATE_LISTENER: undo_listener,
FIRST_RUN: first_run,
}
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN][entry.entry_id][UNDO_UPDATE_LISTENER]()
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
async def _update_listener(hass: HomeAssistant, entry: ConfigEntry):
"""Handle options update."""
await hass.config_entries.async_reload(entry.entry_id)
|
import zipfile
import os
import tempfile
import time
import shutil
import datetime
import stat
from io import BytesIO
from stashutils.fsi import base
from stashutils.fsi import errors
# TODO: check filename bug when writing
class ZipfileFSI(base.BaseFSI):
"""FSI for zipfiles"""
def __init__(self, logger):
base.BaseFSI.__init__(self, logger)
self.logger = logger
self.path = "/"
self.zf = None
self.is_new = True
self.dirs = ["/"] # list of dirs with no files in them
self.log("Warning: The ZipfileFSI has some unfixed bugs!\n")
# ^^^ These bugs are beyond my abilities (and they seem to be case
# dependent)
def abspath(self, path):
"""returns the absolute path for path."""
p = os.path.join(self.path, path)
while p.startswith("/"):
p = p[1:]
return p
def _getdirs(self):
"""returns a list of all dirs"""
dirs = ["/"] + self.dirs
for name in self.zf.namelist():
dirpath = os.path.dirname(name)
if dirpath not in dirs:
dirs.append(dirpath)
return dirs
def _update(self, remove=[]):
"""create a new zipfile with some changes"""
nzfp = os.path.join(tempfile.gettempdir(), "tempzip_{t}.zip".format(t=time.time()))
op = self.zf.fp.name
pswd = self.zf.pwd
comment = self.zf.comment
nzf = zipfile.ZipFile(nzfp, "w", self.zf.compression, True)
infos = self.zf.infolist()
for zipinfo in infos:
add = True
for rm in remove:
if zipinfo.filename.startswith(rm):
add = False
break
if not add:
continue
ofo = self.zf.open(zipinfo)
nzf.writestr(zipinfo, ofo.read())
self.zf.close()
os.remove(op)
nzf.close()
shutil.copy(nzfp, op)
self.zf = zipfile.ZipFile(op, "a", zipfile.ZIP_DEFLATED, True)
self.zf.setpassword(pswd)
self.zf.comment = comment
def connect(self, *args):
"""open the zipfile"""
if len(args) != 1:
return "expected one or two arguments!"
ap = os.path.abspath(args[0])
if os.path.exists(ap):
if not zipfile.is_zipfile(ap):
return "not a zipfile"
try:
self.zf = zipfile.ZipFile(ap, "a", zipfile.ZIP_DEFLATED, True)
self.is_new = False
except Exception as e:
return e.message
if len(args) == 2:
self.zf.setpassword(args[1])
return True
else:
try:
self.zf = zipfile.ZipFile(ap, "w", zipfile.ZIP_DEFLATED, True)
self.is_new = True
except Exception as e:
return e.message
return True
def repr(self):
"""returns a string representing this fsi"""
template = "{inz} Zipfile at '{p}'"
inz = "New" if self.is_new else "Open"
return template.format(inz=inz, p=self.zf.fp.name)
def listdir(self, path="."):
ap = self.abspath(path)
dirlist = self._getdirs()
namelist = self.zf.namelist()
names = dirlist + namelist
content = []
for name in names:
dirname = os.path.dirname(name)
if dirname == ap:
content.append(name.replace(dirname, ""))
return content
def cd(self, path):
np = self.abspath(path)
dirs = self._getdirs()
if np not in dirs:
raise errors.OperationFailure("Dir does not exists!")
self.path = np
def get_path(self):
return self.path
def remove(self, path):
ap = self.abspath(path)
self._update(remove=[ap])
def mkdir(self, name):
ap = self.abspath(name)
self.dirs.append(ap)
def close(self):
self.zf.close()
def isdir(self, name):
ap = self.abspath(name)
return ((ap in self._getdirs()) and not self.isfile(name))
def isfile(self, name):
ap = self.abspath(name)
return (ap in self.zf.namelist())
def stat(self, name):
ap = self.abspath(name)
self.log("stat: {ap}\n".format(ap=ap))
isdir = self.isdir(name)
isfile = self.isfile(name)
if not (isdir or isfile):
self.log("stat-target not found.\n")
raise errors.OperationFailure("Not found!")
if isdir:
size = 1
mtime = None
else:
zipinfo = self.zf.getinfo(ap)
size = zipinfo.file_size
timestamp = zipinfo.date_time
dt = datetime.datetime(*timestamp)
mtime = (dt - datetime.datetime(1970, 1, 1)).total_seconds()
type_ = (stat.S_IFREG if isfile else stat.S_IFDIR)
mode = base.calc_mode(type=type_)
self.log("stat return\n")
return base.make_stat(size=size, mtime=mtime, ctime=mtime, mode=mode)
def open(self, name, mode="r", buffering=0):
ap = self.abspath(name)
self.log("open {ap} with mode {m}\n".format(ap=ap, m=mode))
if "r" in mode:
try:
reader = ZipReader(self, ap, mode, buffering)
except:
raise errors.OperationFailure("Not found!")
else:
return reader
elif "w" in mode:
if ap in self.zf.namelist():
self._update(remove=[ap])
return ZipWriter(self, ap, mode, buffering)
else:
raise errors.OperationFailure("Unsupported mode!")
class ZipWriter(object):
"""utility class used for writing to a ZipFile."""
def __init__(self, root, fp, mode, buffering):
self.root = root
self.fp = fp
self.name = fp
self.buffering = buffering
self.mode = mode
self.sio = BytesIO()
self.closed = False
def close(self):
"""called on file close"""
if self.closed:
return
self.closed = True
content = self.sio.getvalue()
self.sio.close()
self.root.zf.writestr(self.fp, content)
def __getattr__(self, name):
return getattr(self.sio, name)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_tb):
self.close()
def __del__(self):
self.close()
class ZipReader(ZipWriter):
"""utility class for reading a file from a zip."""
def __init__(self, root, fp, mode, buffering):
self.root = root
self.fp = fp
self.name = fp
self.buffering = buffering
self.mode = mode
self.sio = BytesIO(self.root.zf.read(fp))
self.closed = False
def close(self):
if self.closed:
return
self.closed = True
self.sio.close()
|
import logging
from urllib.parse import urljoin
import re
import pandas
import requests
from netort.data_manager import DataSession, thread_safe_property
import threading as th
from requests import ConnectionError
from yandextank.plugins.Phantom.reader import string_to_df_microsec
from yandextank.common.interfaces import AbstractPlugin,\
MonitoringDataListener
logger = logging.getLogger(__name__) # pylint: disable=C0103
class Plugin(AbstractPlugin, MonitoringDataListener):
SECTION = 'neuploader'
importance_high = {
'interval_real',
'proto_code',
'net_code'
}
OVERALL = '__overall__'
LUNA_LINK = 'https://luna.yandex-team.ru/tests/'
PLANNED_RPS_METRICS_NAME = 'planned_rps'
ACTUAL_RPS_METRICS_NAME = 'actual_rps'
def __init__(self, core, cfg, name):
super(Plugin, self).__init__(core, cfg, name)
self.clients_cfg = [{'type': 'luna',
'api_address': self.cfg.get('api_address'),
'db_name': self.cfg.get('db_name'),
'max_df_len': self.cfg.get('max_df_len')}]
self.metrics_objs = {} # map of case names and metric objects
self.monitoring_metrics = {}
self.rps_metrics = {
'actual_rps_metrics_obj': None,
'planned_rps_metrics_obj': None,
'actual_rps_latest': pandas.Series([])
}
self.rps_uploader = th.Thread(target=self.upload_planned_rps)
self._col_map = None
self._data_session = None
self._meta = None
self._test_name = None
@property
def meta(self):
if self._meta is None:
self._meta = dict(self.get_lp_meta(), **self.cfg.get('meta', {}))
return self._meta
@property
def test_name(self):
if self._test_name is None:
self._test_name = self.cfg.get('test_name') or self.core.info.get_value(['uploader', 'job_name'])
return self._test_name
def configure(self):
pass
def start_test(self):
try:
self.reader = self.core.job.generator_plugin.get_reader(parser=string_to_df_microsec)
except TypeError:
logger.error('Generator plugin does not support NeUploader')
self.is_test_finished = lambda: -1
self.reader = []
@thread_safe_property
def col_map(self):
return {
'interval_real': self.data_session.new_true_metric,
'connect_time': self.data_session.new_true_metric,
'send_time': self.data_session.new_true_metric,
'latency': self.data_session.new_true_metric,
'receive_time': self.data_session.new_true_metric,
'interval_event': self.data_session.new_true_metric,
'net_code': self.data_session.new_event_metric,
'proto_code': self.data_session.new_event_metric
}
@thread_safe_property
def data_session(self):
"""
:rtype: DataSession
"""
if self._data_session is None:
config_filenames = {'validated_conf.yaml', 'configinitial.yaml'}
self._data_session = DataSession({'clients': self.clients_cfg},
tankapi_info=self.tankapi_info(),
config_filenames=config_filenames,
artifacts_dir=self.core.artifacts_dir,
test_start=self.core.info.get_value(['generator', 'test_start'], 0) * 10**6)
self.add_cleanup(self._cleanup)
self._data_session.update_job(dict({'name': self.test_name,
'__type': 'tank'},
**self.meta))
job_no = self._data_session.clients[0].job_number
if job_no:
self.publish('job_no', int(job_no))
self.publish('web_link', urljoin(self.LUNA_LINK, job_no))
return self._data_session
def tankapi_info(self):
meta = self.cfg.get('meta', {})
return {
'host': meta.get('tankapi_host'),
'port': meta.get('tankapi_port'),
'local_id': self.core.test_id
}
def _cleanup(self):
self.upload_actual_rps(data=pandas.DataFrame([]), last_piece=True)
uploader_metainfo = self.get_lp_meta()
autostop_info = self.get_autostop_info()
regressions = self.get_regressions_names(uploader_metainfo)
lp_link = self.core.info.get_value(['uploader', 'web_link'])
meta = self.meta
meta.update(autostop_info)
meta['regression'] = regressions
meta['lunapark_link'] = lp_link
self.data_session.update_job(meta)
self.data_session.close(test_end=self.core.info.get_value(['generator', 'test_end'], 0) * 10**6)
def is_test_finished(self):
df = next(self.reader)
if df is not None:
self.upload(df)
return -1
def monitoring_data(self, data_list):
self.upload_monitoring(data_list)
def post_process(self, retcode):
try:
self.rps_uploader.start()
for chunk in self.reader:
if chunk is not None:
self.upload(chunk)
self.upload_actual_rps(data=pandas.DataFrame([]), last_piece=True)
if self.rps_uploader.is_alive():
self.rps_uploader.join()
except KeyboardInterrupt:
logger.warning('Caught KeyboardInterrupt on Neuploader')
self._cleanup()
return retcode
@property
def is_telegraf(self):
return True
def get_metric_obj(self, col, case):
"""
Generator of metric objects:
Checks existent metrics and creates new metric if it does not exist.
:param col: str with column name
:param case: str with case name
:return: metric object
"""
case_metrics = self.metrics_objs.get(case)
if case_metrics is None:
for col, constructor in self.col_map.items():
self.metrics_objs.setdefault(case, {})[col] = constructor(
dict(self.meta,
name=col,
source='tank',
importance='high' if col in self.importance_high else ''),
raw=False, aggregate=True,
parent=self.get_metric_obj(col, self.OVERALL) if case != self.OVERALL else None,
case=case if case != self.OVERALL else None
)
return self.metrics_objs[case][col]
def upload(self, df):
self.upload_actual_rps(df)
df_cases_set = set()
for row in df.itertuples():
if row.tag and isinstance(row.tag, str):
df_cases_set.add(row.tag)
if '|' in row.tag:
for tag in row.tag.split('|'):
df_cases_set.add(tag)
for column in self.col_map:
overall_metric_obj = self.get_metric_obj(column, self.OVERALL)
df['value'] = df[column]
result_df = self.filter_df_by_case(df, self.OVERALL)
overall_metric_obj.put(result_df)
for case_name in df_cases_set:
case_metric_obj = self.get_metric_obj(column, case_name)
df['value'] = df[column]
result_df = self.filter_df_by_case(df, case_name)
case_metric_obj.put(result_df)
def upload_monitoring(self, data):
for metric_name, df in self.monitoring_data_to_dfs(data).items():
if metric_name not in self.monitoring_metrics:
panel, metric = metric_name.split(':', 1)
try:
group, name = metric.split('_', 1)
except ValueError:
name = metric
group = '_OTHER_'
self.monitoring_metrics[metric_name] =\
self.data_session.new_true_metric(
meta=dict(self.meta,
name=name,
group=group,
host=panel,
type='monitoring'))
self.monitoring_metrics[metric_name].put(df)
def upload_planned_rps(self):
""" Uploads planned rps as a raw metric """
df = self.parse_stpd()
if not df.empty:
self.rps_metrics['planned_rps_metrics_obj'] = self.data_session.new_true_metric(
meta=dict(self.meta, name=self.PLANNED_RPS_METRICS_NAME, source='tank'),
raw=True, aggregate=False, parent=None, case=None)
self.rps_metrics['planned_rps_metrics_obj'].put(df)
def upload_actual_rps(self, data, last_piece=False):
""" Upload actual rps metric """
if self.rps_metrics['actual_rps_metrics_obj'] is None:
self.rps_metrics['actual_rps_metrics_obj'] = self.data_session.new_true_metric(
meta=dict(self.meta, name=self.ACTUAL_RPS_METRICS_NAME),
raw=True, aggregate=False, parent=None, case=None
)
df = self.count_actual_rps(data, last_piece)
if not df.empty:
self.rps_metrics['actual_rps_metrics_obj'].put(df)
def parse_stpd(self):
""" Reads rps plan from stpd file """
stpd_file = self.core.info.get_value(['stepper', 'stpd_file'])
if not stpd_file:
logger.info('No stpd found, no planned_rps metrics')
return pandas.DataFrame()
rows_list = []
test_start = int(self.core.info.get_value(['generator', 'test_start'], 0) * 10 ** 3)
pattern = r'^\d+ (\d+)\s*.*$'
regex = re.compile(pattern)
try:
with open(stpd_file) as stpd:
for line in stpd:
if regex.match(line):
timestamp = int((int(line.split(' ')[1]) + test_start) / 1e3) # seconds
rows_list.append(timestamp)
except Exception:
logger.warning('Failed to parse stpd file')
logger.debug('', exc_info=True)
return pandas.DataFrame()
return self.rps_series_to_df(pandas.Series(rows_list))
def count_actual_rps(self, data, last_piece):
""" Counts actual rps on base of input chunk. Uses buffer for latest timestamp in df. """
if not last_piece and not data.empty:
concat_ts = pandas.concat([(data.ts / 1e6).astype(int), self.rps_metrics['actual_rps_latest']])
self.rps_metrics['actual_rps_latest'] = concat_ts.loc[lambda s: s == concat_ts.max()]
series_to_send = concat_ts.loc[lambda s: s < concat_ts.max()]
df = self.rps_series_to_df(series_to_send) if series_to_send.any else pandas.DataFrame([])
else:
df = self.rps_series_to_df(self.rps_metrics['actual_rps_latest'])
self.rps_metrics['actual_rps_latest'] = pandas.Series()
return df
@staticmethod
def monitoring_data_to_dfs(data):
panels = {}
for chunk in data:
for panel_name, content in chunk['data'].items():
if panel_name in panels:
for metric_name, value in content['metrics'].items():
if metric_name in panels[panel_name]:
panels[panel_name][metric_name]['value'].append(value)
panels[panel_name][metric_name]['ts'].append(chunk['timestamp'])
else:
panels[panel_name][metric_name] = {'value': [value], 'ts': [chunk['timestamp']]}
else:
panels[panel_name] = {name: {'value': [value], 'ts': [chunk['timestamp']]} for name, value in content['metrics'].items()}
return {'{}:{}'.format(panelk, name): pandas.DataFrame({'ts': [ts * 1000000 for ts in values['ts']], 'value': values['value']})
for panelk, panelv in panels.items() for name, values in panelv.items()}
@staticmethod
def rps_series_to_df(series):
df = series.value_counts().to_frame(name='value')
df_to_send = df.rename_axis('ts')
df_to_send.reset_index(inplace=True)
df_to_send.loc[:, 'ts'] = (df_to_send['ts'] * 1e6).astype(int)
return df_to_send
@staticmethod
def filter_df_by_case(df, case):
"""
Filter dataframe by case name. If case is '__overall__', return all rows.
:param df: DataFrame
:param case: str with case name
:return: DataFrame with columns 'ts' and 'value'
"""
case = case.strip()
return df[['ts', 'value']] if case == Plugin.OVERALL else df[df.tag.str.strip() == case][['ts', 'value']]
def get_lp_meta(self):
uploader_meta = self.core.info.get_value(['uploader'])
if not uploader_meta:
logger.info('No uploader metainfo found')
return {}
else:
meta_tags_names = ['component', 'description', 'name', 'person', 'task', 'version', 'lunapark_jobno']
meta_tags = {key: uploader_meta.get(key) for key in meta_tags_names if key in uploader_meta}
meta_tags.update({k: v if v is not None else '' for k, v in uploader_meta.get('meta', {}).items()})
return meta_tags
@staticmethod
def get_regressions_names(uploader_metainfo):
task, component_name = uploader_metainfo.get('task'), uploader_metainfo.get('component')
if not task or not component_name:
return []
project_name = task.split('-')[0]
lp_api_url = 'https://lunapark.yandex-team.ru/api/regress/{}/componentlist.json'.format(project_name)
try:
componentlist =\
requests.get(lp_api_url).json()
except (ValueError, ConnectionError):
logger.info("Failed to fetch data from {}".format(lp_api_url), exc_info=True)
return []
for component in componentlist:
try:
if component['name'] == component_name:
services = component['services']
if len(services) == 0:
services = ['__OTHER__']
return ['{}_{}'.format(project_name, s).replace(' ', '_') for s in services]
except KeyError:
pass
else:
return []
def get_autostop_info(self):
autostop_info = self.core.info.get_value(['autostop'])
if autostop_info:
autostop_rps = autostop_info.get('rps', 0)
autostop_reason = autostop_info.get('reason', '')
self.log.warning('Autostop: %s %s', autostop_rps, autostop_reason)
return {'autostop_rps': autostop_rps, 'autostop_reason': autostop_reason}
else:
return {}
|
from homeassistant.components.abode import ATTR_DEVICE_ID
from homeassistant.components.cover import DOMAIN as COVER_DOMAIN
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_FRIENDLY_NAME,
SERVICE_CLOSE_COVER,
SERVICE_OPEN_COVER,
STATE_CLOSED,
)
from .common import setup_platform
from tests.async_mock import patch
DEVICE_ID = "cover.garage_door"
async def test_entity_registry(hass):
"""Tests that the devices are registered in the entity registry."""
await setup_platform(hass, COVER_DOMAIN)
entity_registry = await hass.helpers.entity_registry.async_get_registry()
entry = entity_registry.async_get(DEVICE_ID)
assert entry.unique_id == "61cbz3b542d2o33ed2fz02721bda3324"
async def test_attributes(hass):
"""Test the cover attributes are correct."""
await setup_platform(hass, COVER_DOMAIN)
state = hass.states.get(DEVICE_ID)
assert state.state == STATE_CLOSED
assert state.attributes.get(ATTR_DEVICE_ID) == "ZW:00000007"
assert not state.attributes.get("battery_low")
assert not state.attributes.get("no_response")
assert state.attributes.get("device_type") == "Secure Barrier"
assert state.attributes.get(ATTR_FRIENDLY_NAME) == "Garage Door"
async def test_open(hass):
"""Test the cover can be opened."""
await setup_platform(hass, COVER_DOMAIN)
with patch("abodepy.AbodeCover.open_cover") as mock_open:
await hass.services.async_call(
COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: DEVICE_ID}, blocking=True
)
await hass.async_block_till_done()
mock_open.assert_called_once()
async def test_close(hass):
"""Test the cover can be closed."""
await setup_platform(hass, COVER_DOMAIN)
with patch("abodepy.AbodeCover.close_cover") as mock_close:
await hass.services.async_call(
COVER_DOMAIN,
SERVICE_CLOSE_COVER,
{ATTR_ENTITY_ID: DEVICE_ID},
blocking=True,
)
await hass.async_block_till_done()
mock_close.assert_called_once()
|
from kombu import Connection, Exchange, Queue, Producer, Consumer
from kombu.asynchronous import Hub
hub = Hub()
exchange = Exchange('asynt')
queue = Queue('asynt', exchange, 'asynt')
def send_message(conn):
producer = Producer(conn)
producer.publish('hello world', exchange=exchange, routing_key='asynt')
print('message sent')
def on_message(message):
print(f'received: {message.body!r}')
message.ack()
hub.stop() # <-- exit after one message
if __name__ == '__main__':
conn = Connection('amqp://')
conn.register_with_event_loop(hub)
with Consumer(conn, [queue], on_message=on_message):
send_message(conn)
hub.run_forever()
|
import sys
import mne
def run():
"""Run command."""
parser = mne.commands.utils.get_optparser(
__file__, usage='mne show_fiff <file>')
parser.add_option("-t", "--tag", dest="tag",
help="provide information about this tag", metavar="TAG")
options, args = parser.parse_args()
if len(args) != 1:
parser.print_help()
sys.exit(1)
msg = mne.io.show_fiff(args[0], tag=options.tag).strip()
print(msg)
mne.utils.run_command_if_main()
|
try:
# Importing hunter to register its atexit handler early so it gets called
# late.
import hunter # pylint: disable=unused-import
except ImportError:
hunter = None
import sys
import faulthandler
import traceback
import signal
import importlib
import datetime
try:
import tkinter
except ImportError:
tkinter = None # type: ignore[assignment]
# NOTE: No qutebrowser or PyQt import should be done here, as some early
# initialization needs to take place before that!
START_TIME = datetime.datetime.now()
def _missing_str(name, *, webengine=False):
"""Get an error string for missing packages.
Args:
name: The name of the package.
webengine: Whether this is checking the QtWebEngine package
"""
blocks = ["Fatal error: <b>{}</b> is required to run qutebrowser but "
"could not be imported! Maybe it's not installed?".format(name),
"<b>The error encountered was:</b><br />%ERROR%"]
lines = ['Please search for the python3 version of {} in your '
'distributions packages, or see '
'https://github.com/qutebrowser/qutebrowser/blob/master/doc/install.asciidoc'
.format(name)]
blocks.append('<br />'.join(lines))
if not webengine:
lines = ['<b>If you installed a qutebrowser package for your '
'distribution, please report this as a bug.</b>']
blocks.append('<br />'.join(lines))
return '<br /><br />'.join(blocks)
def _die(message, exception=None):
"""Display an error message using Qt and quit.
We import the imports here as we want to do other stuff before the imports.
Args:
message: The message to display.
exception: The exception object if we're handling an exception.
"""
from PyQt5.QtWidgets import QApplication, QMessageBox
from PyQt5.QtCore import Qt
if (('--debug' in sys.argv or '--no-err-windows' in sys.argv) and
exception is not None):
print(file=sys.stderr)
traceback.print_exc()
app = QApplication(sys.argv)
if '--no-err-windows' in sys.argv:
print(message, file=sys.stderr)
print("Exiting because of --no-err-windows.", file=sys.stderr)
else:
if exception is not None:
message = message.replace('%ERROR%', str(exception))
msgbox = QMessageBox(QMessageBox.Critical, "qutebrowser: Fatal error!",
message)
msgbox.setTextFormat(Qt.RichText)
msgbox.resize(msgbox.sizeHint())
msgbox.exec_()
app.quit()
sys.exit(1)
def init_faulthandler(fileobj=sys.__stderr__):
"""Enable faulthandler module if available.
This print a nice traceback on segfaults.
We use sys.__stderr__ instead of sys.stderr here so this will still work
when sys.stderr got replaced, e.g. by "Python Tools for Visual Studio".
Args:
fobj: An opened file object to write the traceback to.
"""
if fileobj is None:
# When run with pythonw.exe, sys.__stderr__ can be None:
# https://docs.python.org/3/library/sys.html#sys.__stderr__
# If we'd enable faulthandler in that case, we just get a weird
# exception, so we don't enable faulthandler if we have no stdout.
#
# Later when we have our data dir available we re-enable faulthandler
# to write to a file so we can display a crash to the user at the next
# start.
return
faulthandler.enable(fileobj)
if (hasattr(faulthandler, 'register') and hasattr(signal, 'SIGUSR1') and
sys.stderr is not None):
# If available, we also want a traceback on SIGUSR1.
# pylint: disable=no-member,useless-suppression
faulthandler.register(signal.SIGUSR1)
# pylint: enable=no-member,useless-suppression
def check_pyqt():
"""Check if PyQt core modules (QtCore/QtWidgets) are installed."""
for name in ['PyQt5.QtCore', 'PyQt5.QtWidgets']:
try:
importlib.import_module(name)
except ImportError as e:
text = _missing_str(name)
text = text.replace('<b>', '')
text = text.replace('</b>', '')
text = text.replace('<br />', '\n')
text = text.replace('%ERROR%', str(e))
if tkinter and '--no-err-windows' not in sys.argv:
root = tkinter.Tk()
root.withdraw()
tkinter.messagebox.showerror("qutebrowser: Fatal error!", text)
else:
print(text, file=sys.stderr)
if '--debug' in sys.argv or '--no-err-windows' in sys.argv:
print(file=sys.stderr)
traceback.print_exc()
sys.exit(1)
def qt_version(qversion=None, qt_version_str=None):
"""Get a Qt version string based on the runtime/compiled versions."""
if qversion is None:
from PyQt5.QtCore import qVersion
qversion = qVersion()
if qt_version_str is None:
from PyQt5.QtCore import QT_VERSION_STR
qt_version_str = QT_VERSION_STR
if qversion != qt_version_str:
return '{} (compiled {})'.format(qversion, qt_version_str)
else:
return qversion
def check_qt_version():
"""Check if the Qt version is recent enough."""
from PyQt5.QtCore import (qVersion, QT_VERSION, PYQT_VERSION,
PYQT_VERSION_STR)
from pkg_resources import parse_version
parsed_qversion = parse_version(qVersion())
if (QT_VERSION < 0x050C00 or PYQT_VERSION < 0x050C00 or
parsed_qversion < parse_version('5.12.0')):
text = ("Fatal error: Qt >= 5.12.0 and PyQt >= 5.12.0 are required, "
"but Qt {} / PyQt {} is installed.".format(qt_version(),
PYQT_VERSION_STR))
_die(text)
def check_ssl_support():
"""Check if SSL support is available."""
try:
from PyQt5.QtNetwork import QSslSocket # pylint: disable=unused-import
except ImportError:
_die("Fatal error: Your Qt is built without SSL support.")
def _check_modules(modules):
"""Make sure the given modules are available."""
from qutebrowser.utils import log
for name, text in modules.items():
try:
# https://bitbucket.org/fdik/pypeg/commits/dd15ca462b532019c0a3be1d39b8ee2f3fa32f4e
# pylint: disable=bad-continuation
with log.py_warning_filter(
category=DeprecationWarning,
message=r'invalid escape sequence'
), log.py_warning_filter(
category=ImportWarning,
message=r'Not importing directory .*: missing __init__'
), log.py_warning_filter(
category=DeprecationWarning,
message=r'the imp module is deprecated',
):
# pylint: enable=bad-continuation
importlib.import_module(name)
except ImportError as e:
_die(text, e)
def check_libraries():
"""Check if all needed Python libraries are installed."""
modules = {
'pkg_resources': _missing_str("pkg_resources/setuptools"),
'pypeg2': _missing_str("pypeg2"),
'jinja2': _missing_str("jinja2"),
'pygments': _missing_str("pygments"),
'yaml': _missing_str("PyYAML"),
'attr': _missing_str("attrs"),
'PyQt5.QtQml': _missing_str("PyQt5.QtQml"),
'PyQt5.QtSql': _missing_str("PyQt5.QtSql"),
'PyQt5.QtOpenGL': _missing_str("PyQt5.QtOpenGL"),
}
_check_modules(modules)
def configure_pyqt():
"""Remove the PyQt input hook and enable overflow checking.
Doing this means we can't use the interactive shell anymore (which we don't
anyways), but we can use pdb instead.
"""
from PyQt5 import QtCore
QtCore.pyqtRemoveInputHook()
try:
QtCore.pyqt5_enable_new_onexit_scheme(True) # type: ignore[attr-defined]
except AttributeError:
# Added in PyQt 5.13 somewhere, going to be the default in 5.14
pass
from qutebrowser.qt import sip
sip.enableoverflowchecking(True)
def init_log(args):
"""Initialize logging.
Args:
args: The argparse namespace.
"""
from qutebrowser.utils import log
log.init_log(args)
log.init.debug("Log initialized.")
def check_optimize_flag():
"""Check whether qutebrowser is running with -OO."""
from qutebrowser.utils import log
if sys.flags.optimize >= 2:
log.init.warning("Running on optimize level higher than 1, "
"unexpected behavior may occur.")
def early_init(args):
"""Do all needed early initialization.
Note that it's vital the other earlyinit functions get called in the right
order!
Args:
args: The argparse namespace.
"""
# First we initialize the faulthandler as early as possible, so we
# theoretically could catch segfaults occurring later during earlyinit.
init_faulthandler()
# Here we check if QtCore is available, and if not, print a message to the
# console or via Tk.
check_pyqt()
# Init logging as early as possible
init_log(args)
# Now we can be sure QtCore is available, so we can print dialogs on
# errors, so people only using the GUI notice them as well.
check_libraries()
check_qt_version()
configure_pyqt()
check_ssl_support()
check_optimize_flag()
|
import os
import click
from molecule import config
from molecule import logger
from molecule import util
from molecule.command import base as command_base
from molecule.command.init import base
LOG = logger.get_logger(__name__)
class Scenario(base.Base):
"""
.. program:: molecule init scenario --scenario-name bar --role-name foo
.. option:: molecule init scenario --scenario-name bar --role-name foo
Initialize a new scenario using a local _cookiecutter_ template. In
order to customise the role, please refer to the `init role` command.
.. program:: cd foo; molecule init scenario --scenario-name bar --role-name foo
.. option:: cd foo; molecule init scenario --scenario-name bar --role-name foo
Initialize an existing role with Molecule:
""" # noqa
def __init__(self, command_args):
self._command_args = command_args
def execute(self):
"""
Execute the actions necessary to perform a `molecule init scenario` and
returns None.
:return: None
"""
scenario_name = self._command_args['scenario_name']
role_name = os.getcwd().split(os.sep)[-1]
role_directory = util.abs_path(os.path.join(os.getcwd(), os.pardir))
msg = 'Initializing new scenario {}...'.format(scenario_name)
LOG.info(msg)
molecule_directory = config.molecule_directory(
os.path.join(role_directory, role_name))
scenario_directory = os.path.join(molecule_directory, scenario_name)
scenario_base_directory = os.path.dirname(scenario_directory)
if os.path.isdir(scenario_directory):
msg = ('The directory molecule/{} exists. '
'Cannot create new scenario.').format(scenario_name)
util.sysexit_with_message(msg)
scenario_base_directory = os.path.join(role_directory, role_name)
templates = [
'scenario/driver/{driver_name}'.format(**self._command_args),
'scenario/verifier/{verifier_name}'.format(**self._command_args),
]
for template in templates:
self._process_templates(template, self._command_args,
scenario_base_directory)
self._process_templates('molecule', self._command_args, role_directory)
role_directory = os.path.join(role_directory, role_name)
msg = 'Initialized scenario in {} successfully.'.format(
scenario_directory)
LOG.success(msg)
def _role_exists(ctx, param, value): # pragma: no cover
# if role name was not mentioned we assume that current directory is the
# one hosting the role and determining the role name.
if not value:
value = os.path.basename(os.getcwd())
role_directory = os.path.join(os.pardir, value)
if not os.path.exists(role_directory):
msg = ("The role '{}' not found. "
'Please choose the proper role name.').format(value)
util.sysexit_with_message(msg)
return value
def _default_scenario_exists(ctx, param, value): # pragma: no cover
if value == command_base.MOLECULE_DEFAULT_SCENARIO_NAME:
return value
default_scenario_directory = os.path.join(
'molecule', command_base.MOLECULE_DEFAULT_SCENARIO_NAME)
if not os.path.exists(default_scenario_directory):
msg = ('The default scenario not found. Please create a scenario '
"named '{}' first.").format(
command_base.MOLECULE_DEFAULT_SCENARIO_NAME)
util.sysexit_with_message(msg)
return value
@click.command()
@click.pass_context
@click.option(
'--dependency-name',
type=click.Choice(['galaxy']),
default='galaxy',
help='Name of dependency to initialize. (galaxy)')
@click.option(
'--driver-name',
'-d',
type=click.Choice(config.molecule_drivers()),
default='docker',
help='Name of driver to initialize. (docker)')
@click.option(
'--lint-name',
type=click.Choice(['yamllint']),
default='yamllint',
help='Name of lint to initialize. (ansible-lint)')
@click.option(
'--provisioner-name',
type=click.Choice(['ansible']),
default='ansible',
help='Name of provisioner to initialize. (ansible)')
@click.option(
'--role-name',
'-r',
required=False,
callback=_role_exists,
help='Name of the role to create.')
@click.option(
'--scenario-name',
'-s',
default=command_base.MOLECULE_DEFAULT_SCENARIO_NAME,
required=True,
callback=_default_scenario_exists,
help='Name of the scenario to create. ({})'.format(
command_base.MOLECULE_DEFAULT_SCENARIO_NAME))
@click.option(
'--verifier-name',
type=click.Choice(config.molecule_verifiers()),
default='testinfra',
help='Name of verifier to initialize. (testinfra)')
def scenario(ctx, dependency_name, driver_name, lint_name, provisioner_name,
role_name, scenario_name, verifier_name): # pragma: no cover
""" Initialize a new scenario for use with Molecule. """
command_args = {
'dependency_name': dependency_name,
'driver_name': driver_name,
'lint_name': lint_name,
'provisioner_name': provisioner_name,
'role_name': role_name,
'scenario_name': scenario_name,
'subcommand': __name__,
'verifier_name': verifier_name,
}
if verifier_name == 'inspec':
command_args['verifier_lint_name'] = 'rubocop'
if verifier_name == 'goss':
command_args['verifier_lint_name'] = 'yamllint'
s = Scenario(command_args)
s.execute()
|
import unittest
import mock
from kalliope.core.NeuronModule import MissingParameterException
from kalliope.neurons.brain.brain import Brain
class TestBrain(unittest.TestCase):
def test_is_parameters_ok(self):
# valid neuron with boolean
synapse_name = "synapse_name"
enabled = True
with mock.patch("kalliope.neurons.brain.brain.Brain._update_brain"):
brain_neuron = Brain(synapse_name=synapse_name, enabled=enabled)
self.assertTrue(brain_neuron._is_parameters_ok())
self.assertTrue(brain_neuron.enabled)
# valid neuron with boolean as string
synapse_name = "synapse_name"
enabled = "True"
with mock.patch("kalliope.neurons.brain.brain.Brain._update_brain"):
brain_neuron = Brain(synapse_name=synapse_name, enabled=enabled)
self.assertTrue(brain_neuron._is_parameters_ok())
self.assertTrue(brain_neuron.enabled)
# valid neuron with boolean as string
synapse_name = "synapse_name"
enabled = "true"
with mock.patch("kalliope.neurons.brain.brain.Brain._update_brain"):
brain_neuron = Brain(synapse_name=synapse_name, enabled=enabled)
self.assertTrue(brain_neuron._is_parameters_ok())
self.assertTrue(brain_neuron.enabled)
# invalid neuron with no synapse name
synapse_name = ""
enabled = "true"
with mock.patch("kalliope.neurons.brain.brain.Brain._update_brain"):
with self.assertRaises(MissingParameterException):
Brain(synapse_name=synapse_name, enabled=enabled)
# invalid neuron with no synapse name
synapse_name = "test"
enabled = ""
with mock.patch("kalliope.neurons.brain.brain.Brain._update_brain"):
with self.assertRaises(MissingParameterException):
Brain(synapse_name=synapse_name, enabled=enabled)
# valid neuron but enabled bool automatically converted to False
synapse_name = "test"
enabled = "no a bool"
with mock.patch("kalliope.neurons.brain.brain.Brain._update_brain"):
brain_neuron = Brain(synapse_name=synapse_name, enabled=enabled)
self.assertTrue(brain_neuron._is_parameters_ok())
self.assertFalse(brain_neuron.enabled)
if __name__ == '__main__':
unittest.main()
|
import logging
import os
from . import get_config
from .utils import logger, verbose, warn, ProgressBar
from .utils.check import int_like
from .fixes import _get_args
if 'MNE_FORCE_SERIAL' in os.environ:
_force_serial = True
else:
_force_serial = None
@verbose
def parallel_func(func, n_jobs, max_nbytes='auto', pre_dispatch='n_jobs',
total=None, prefer=None, verbose=None):
"""Return parallel instance with delayed function.
Util function to use joblib only if available
Parameters
----------
func : callable
A function.
n_jobs : int
Number of jobs to run in parallel.
max_nbytes : int, str, or None
Threshold on the minimum size of arrays passed to the workers that
triggers automated memory mapping. Can be an int in Bytes,
or a human-readable string, e.g., '1M' for 1 megabyte.
Use None to disable memmaping of large arrays. Use 'auto' to
use the value set using mne.set_memmap_min_size.
pre_dispatch : int, or str, optional
See :class:`joblib.Parallel`.
total : int | None
If int, use a progress bar to display the progress of dispatched
jobs. This should only be used when directly iterating, not when
using ``split_list`` or :func:`np.array_split`.
If None (default), do not add a progress bar.
prefer : str | None
If str, can be "processes" or "threads". See :class:`joblib.Parallel`.
Ignored if the joblib version is too old to support this.
.. versionadded:: 0.18
%(verbose)s INFO or DEBUG
will print parallel status, others will not.
Returns
-------
parallel: instance of joblib.Parallel or list
The parallel object.
my_func: callable
``func`` if not parallel or delayed(func).
n_jobs: int
Number of jobs >= 0.
"""
should_print = (logger.level <= logging.INFO)
# for a single job, we don't need joblib
if n_jobs != 1:
try:
from joblib import Parallel, delayed
except ImportError:
try:
from sklearn.externals.joblib import Parallel, delayed
except ImportError:
warn('joblib not installed. Cannot run in parallel.')
n_jobs = 1
if n_jobs == 1:
n_jobs = 1
my_func = func
parallel = list
else:
# check if joblib is recent enough to support memmaping
p_args = _get_args(Parallel.__init__)
joblib_mmap = ('temp_folder' in p_args and 'max_nbytes' in p_args)
cache_dir = get_config('MNE_CACHE_DIR', None)
if isinstance(max_nbytes, str) and max_nbytes == 'auto':
max_nbytes = get_config('MNE_MEMMAP_MIN_SIZE', None)
if max_nbytes is not None:
if not joblib_mmap and cache_dir is not None:
warn('"MNE_CACHE_DIR" is set but a newer version of joblib is '
'needed to use the memmapping pool.')
if joblib_mmap and cache_dir is None:
logger.info(
'joblib supports memapping pool but "MNE_CACHE_DIR" '
'is not set in MNE-Python config. To enable it, use, '
'e.g., mne.set_cache_dir(\'/tmp/shm\'). This will '
'store temporary files under /dev/shm and can result '
'in large memory savings.')
# create keyword arguments for Parallel
kwargs = {'verbose': 5 if should_print and total is None else 0}
kwargs['pre_dispatch'] = pre_dispatch
if 'prefer' in p_args:
kwargs['prefer'] = prefer
if joblib_mmap:
if cache_dir is None:
max_nbytes = None # disable memmaping
kwargs['temp_folder'] = cache_dir
kwargs['max_nbytes'] = max_nbytes
n_jobs = check_n_jobs(n_jobs)
parallel = _check_wrapper(Parallel(n_jobs, **kwargs))
my_func = delayed(func)
if total is not None:
def parallel_progress(op_iter):
return parallel(ProgressBar(iterable=op_iter, max_value=total))
parallel_out = parallel_progress
else:
parallel_out = parallel
return parallel_out, my_func, n_jobs
def _check_wrapper(fun):
def run(*args, **kwargs):
try:
return fun(*args, **kwargs)
except RuntimeError as err:
msg = str(err.args[0]) if err.args else ''
if msg.startswith('The task could not be sent to the workers'):
raise RuntimeError(
msg + ' Consider using joblib memmap caching to get '
'around this problem. See mne.set_mmap_min_size, '
'mne.set_cache_dir, and buffer_size parallel function '
'arguments (if applicable).')
raise
return run
def check_n_jobs(n_jobs, allow_cuda=False):
"""Check n_jobs in particular for negative values.
Parameters
----------
n_jobs : int
The number of jobs.
allow_cuda : bool
Allow n_jobs to be 'cuda'. Default: False.
Returns
-------
n_jobs : int
The checked number of jobs. Always positive (or 'cuda' if
applicable).
"""
if not isinstance(n_jobs, int_like):
if not allow_cuda:
raise ValueError('n_jobs must be an integer')
elif not isinstance(n_jobs, str) or n_jobs != 'cuda':
raise ValueError('n_jobs must be an integer, or "cuda"')
# else, we have n_jobs='cuda' and this is okay, so do nothing
elif _force_serial:
n_jobs = 1
logger.info('... MNE_FORCE_SERIAL set. Processing in forced '
'serial mode.')
elif n_jobs <= 0:
try:
import multiprocessing
n_cores = multiprocessing.cpu_count()
n_jobs = min(n_cores + n_jobs + 1, n_cores)
if n_jobs <= 0:
raise ValueError('If n_jobs has a negative value it must not '
'be less than the number of CPUs present. '
'You\'ve got %s CPUs' % n_cores)
except ImportError:
# only warn if they tried to use something other than 1 job
if n_jobs != 1:
warn('multiprocessing not installed. Cannot run in parallel.')
n_jobs = 1
return n_jobs
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from compare_gan.architectures import arch_ops as ops
from compare_gan.architectures import resnet_ops
import numpy as np
from six.moves import range
import tensorflow as tf
class Generator(resnet_ops.ResNetGenerator):
"""ResNet generator consisting of 5 blocks, outputs 128x128x3 resolution."""
def __init__(self, ch=64, channels=(8, 8, 4, 4, 2, 1), **kwargs):
super(Generator, self).__init__(**kwargs)
self._ch = ch
self._channels = channels
def apply(self, z, y, is_training):
"""Build the generator network for the given inputs.
Args:
z: `Tensor` of shape [batch_size, z_dim] with latent code.
y: `Tensor` of shape [batch_size, num_classes] with one hot encoded
labels.
is_training: boolean, are we in train or eval model.
Returns:
A tensor of size [batch_size] + self._image_shape with values in [0, 1].
"""
# Each block upscales by a factor of 2.
seed_size = 4
image_size = self._image_shape[0]
# Map noise to the actual seed.
net = ops.linear(
z,
self._ch * self._channels[0] * seed_size * seed_size,
scope="fc_noise")
# Reshape the seed to be a rank-4 Tensor.
net = tf.reshape(
net,
[-1, seed_size, seed_size, self._ch * self._channels[0]],
name="fc_reshaped")
up_layers = np.log2(float(image_size) / seed_size)
if not up_layers.is_integer():
raise ValueError("log2({}/{}) must be an integer.".format(
image_size, seed_size))
if up_layers < 0 or up_layers > 5:
raise ValueError("Invalid image_size {}.".format(image_size))
up_layers = int(up_layers)
for block_idx in range(5):
block = self._resnet_block(
name="B{}".format(block_idx + 1),
in_channels=self._ch * self._channels[block_idx],
out_channels=self._ch * self._channels[block_idx + 1],
scale="up" if block_idx < up_layers else "none")
net = block(net, z=z, y=y, is_training=is_training)
net = self.batch_norm(
net, z=z, y=y, is_training=is_training, name="final_norm")
net = tf.nn.relu(net)
net = ops.conv2d(net, output_dim=self._image_shape[2],
k_h=3, k_w=3, d_h=1, d_w=1, name="final_conv")
net = tf.nn.sigmoid(net)
return net
class Discriminator(resnet_ops.ResNetDiscriminator):
"""ResNet5 discriminator, 5 blocks, supporting 128x128x3 and 128x128x1."""
def __init__(self, ch=64, channels=(1, 2, 4, 4, 8, 8), **kwargs):
super(Discriminator, self).__init__(**kwargs)
self._ch = ch
self._channels = channels
def apply(self, x, y, is_training):
"""Apply the discriminator on a input.
Args:
x: `Tensor` of shape [batch_size, ?, ?, ?] with real or fake images.
y: `Tensor` of shape [batch_size, num_classes] with one hot encoded
labels.
is_training: Boolean, whether the architecture should be constructed for
training or inference.
Returns:
Tuple of 3 Tensors, the final prediction of the discriminator, the logits
before the final output activation function and logits form the second
last layer.
"""
resnet_ops.validate_image_inputs(x)
colors = x.shape[3].value
if colors not in [1, 3]:
raise ValueError("Number of color channels not supported: {}".format(
colors))
block = self._resnet_block(
name="B0",
in_channels=colors,
out_channels=self._ch,
scale="down")
output = block(x, z=None, y=y, is_training=is_training)
for block_idx in range(5):
block = self._resnet_block(
name="B{}".format(block_idx + 1),
in_channels=self._ch * self._channels[block_idx],
out_channels=self._ch * self._channels[block_idx + 1],
scale="down")
output = block(output, z=None, y=y, is_training=is_training)
output = tf.nn.relu(output)
pre_logits = tf.reduce_mean(output, axis=[1, 2])
out_logit = ops.linear(pre_logits, 1, scope="disc_final_fc",
use_sn=self._spectral_norm)
out = tf.nn.sigmoid(out_logit)
return out, out_logit, pre_logits
|
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
async def test_random_binary_sensor_on(hass):
"""Test the Random binary sensor."""
config = {"binary_sensor": {"platform": "random", "name": "test"}}
with patch(
"homeassistant.components.random.binary_sensor.getrandbits",
return_value=1,
):
assert await async_setup_component(
hass,
"binary_sensor",
config,
)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == "on"
async def test_random_binary_sensor_off(hass):
"""Test the Random binary sensor."""
config = {"binary_sensor": {"platform": "random", "name": "test"}}
with patch(
"homeassistant.components.random.binary_sensor.getrandbits",
return_value=False,
):
assert await async_setup_component(
hass,
"binary_sensor",
config,
)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == "off"
|
from homeassistant.const import PERCENTAGE, SIGNAL_STRENGTH_DECIBELS_MILLIWATT
from homeassistant.core import callback
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.icon import icon_for_battery_level
from . import DOMAIN
from .entity import RingEntityMixin
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up a sensor for a Ring device."""
devices = hass.data[DOMAIN][config_entry.entry_id]["devices"]
sensors = []
for device_type in ("chimes", "doorbots", "authorized_doorbots", "stickup_cams"):
for sensor_type in SENSOR_TYPES:
if device_type not in SENSOR_TYPES[sensor_type][1]:
continue
for device in devices[device_type]:
if device_type == "battery" and device.battery_life is None:
continue
sensors.append(
SENSOR_TYPES[sensor_type][6](
config_entry.entry_id, device, sensor_type
)
)
async_add_entities(sensors)
class RingSensor(RingEntityMixin, Entity):
"""A sensor implementation for Ring device."""
def __init__(self, config_entry_id, device, sensor_type):
"""Initialize a sensor for Ring device."""
super().__init__(config_entry_id, device)
self._sensor_type = sensor_type
self._extra = None
self._icon = "mdi:{}".format(SENSOR_TYPES.get(sensor_type)[3])
self._kind = SENSOR_TYPES.get(sensor_type)[4]
self._name = "{} {}".format(self._device.name, SENSOR_TYPES.get(sensor_type)[0])
self._unique_id = f"{device.id}-{sensor_type}"
@property
def should_poll(self):
"""Return False, updates are controlled via the hub."""
return False
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
if self._sensor_type == "volume":
return self._device.volume
if self._sensor_type == "battery":
return self._device.battery_life
@property
def unique_id(self):
"""Return a unique ID."""
return self._unique_id
@property
def device_class(self):
"""Return sensor device class."""
return SENSOR_TYPES[self._sensor_type][5]
@property
def icon(self):
"""Icon to use in the frontend, if any."""
if self._sensor_type == "battery" and self._device.battery_life is not None:
return icon_for_battery_level(
battery_level=self._device.battery_life, charging=False
)
return self._icon
@property
def unit_of_measurement(self):
"""Return the units of measurement."""
return SENSOR_TYPES.get(self._sensor_type)[2]
class HealthDataRingSensor(RingSensor):
"""Ring sensor that relies on health data."""
async def async_added_to_hass(self):
"""Register callbacks."""
await super().async_added_to_hass()
await self.ring_objects["health_data"].async_track_device(
self._device, self._health_update_callback
)
async def async_will_remove_from_hass(self):
"""Disconnect callbacks."""
await super().async_will_remove_from_hass()
self.ring_objects["health_data"].async_untrack_device(
self._device, self._health_update_callback
)
@callback
def _health_update_callback(self, _health_data):
"""Call update method."""
self.async_write_ha_state()
@property
def entity_registry_enabled_default(self) -> bool:
"""Return if the entity should be enabled when first added to the entity registry."""
# These sensors are data hungry and not useful. Disable by default.
return False
@property
def state(self):
"""Return the state of the sensor."""
if self._sensor_type == "wifi_signal_category":
return self._device.wifi_signal_category
if self._sensor_type == "wifi_signal_strength":
return self._device.wifi_signal_strength
class HistoryRingSensor(RingSensor):
"""Ring sensor that relies on history data."""
_latest_event = None
async def async_added_to_hass(self):
"""Register callbacks."""
await super().async_added_to_hass()
await self.ring_objects["history_data"].async_track_device(
self._device, self._history_update_callback
)
async def async_will_remove_from_hass(self):
"""Disconnect callbacks."""
await super().async_will_remove_from_hass()
self.ring_objects["history_data"].async_untrack_device(
self._device, self._history_update_callback
)
@callback
def _history_update_callback(self, history_data):
"""Call update method."""
if not history_data:
return
found = None
if self._kind is None:
found = history_data[0]
else:
for entry in history_data:
if entry["kind"] == self._kind:
found = entry
break
if not found:
return
self._latest_event = found
self.async_write_ha_state()
@property
def state(self):
"""Return the state of the sensor."""
if self._latest_event is None:
return None
return self._latest_event["created_at"].isoformat()
@property
def device_state_attributes(self):
"""Return the state attributes."""
attrs = super().device_state_attributes
if self._latest_event:
attrs["created_at"] = self._latest_event["created_at"]
attrs["answered"] = self._latest_event["answered"]
attrs["recording_status"] = self._latest_event["recording"]["status"]
attrs["category"] = self._latest_event["kind"]
return attrs
# Sensor types: Name, category, units, icon, kind, device_class, class
SENSOR_TYPES = {
"battery": [
"Battery",
["doorbots", "authorized_doorbots", "stickup_cams"],
PERCENTAGE,
None,
None,
"battery",
RingSensor,
],
"last_activity": [
"Last Activity",
["doorbots", "authorized_doorbots", "stickup_cams"],
None,
"history",
None,
"timestamp",
HistoryRingSensor,
],
"last_ding": [
"Last Ding",
["doorbots", "authorized_doorbots"],
None,
"history",
"ding",
"timestamp",
HistoryRingSensor,
],
"last_motion": [
"Last Motion",
["doorbots", "authorized_doorbots", "stickup_cams"],
None,
"history",
"motion",
"timestamp",
HistoryRingSensor,
],
"volume": [
"Volume",
["chimes", "doorbots", "authorized_doorbots", "stickup_cams"],
None,
"bell-ring",
None,
None,
RingSensor,
],
"wifi_signal_category": [
"WiFi Signal Category",
["chimes", "doorbots", "authorized_doorbots", "stickup_cams"],
None,
"wifi",
None,
None,
HealthDataRingSensor,
],
"wifi_signal_strength": [
"WiFi Signal Strength",
["chimes", "doorbots", "authorized_doorbots", "stickup_cams"],
SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
"wifi",
None,
"signal_strength",
HealthDataRingSensor,
],
}
|
import unittest
import ast
import mock
from kalliope.core.Models.settings.Options import Options
from kalliope.core.Models.settings.Player import Player
from kalliope.core.Models.Signal import Signal
from kalliope.core.Models.settings.Tts import Tts
from kalliope.core.Models.settings.Trigger import Trigger
from kalliope.core.Models.settings.Stt import Stt
from kalliope.core.Models.settings.RestAPI import RestAPI
from kalliope.core.Models.Dna import Dna
from kalliope.core import LIFOBuffer
from kalliope.core.Models.settings.Settings import Settings
from kalliope.core.Models import Neuron, Synapse, Brain, Resources, Singleton
from kalliope.core.Models.APIResponse import APIResponse
from kalliope.core.Models.MatchedSynapse import MatchedSynapse
class TestModels(unittest.TestCase):
def setUp(self):
# Kill the singleton
Singleton._instances = dict()
# Init
neuron1 = Neuron(name='neurone1', parameters={'var1': 'val1'})
neuron2 = Neuron(name='neurone2', parameters={'var2': 'val2'})
neuron3 = Neuron(name='neurone3', parameters={'var3': 'val3'})
neuron4 = Neuron(name='neurone4', parameters={'var4': 'val4'})
signal1 = Signal(name="order", parameters="this is the sentence")
signal2 = Signal(name="order", parameters="this is the second sentence")
signal3 = Signal(name="order", parameters="that is part of the third sentence")
self.synapse1 = Synapse(name="Synapse1", neurons=[neuron1, neuron2], signals=[signal1])
self.synapse2 = Synapse(name="Synapse2", neurons=[neuron3, neuron4], signals=[signal2])
self.synapse3 = Synapse(name="Synapse3", neurons=[neuron2, neuron4], signals=[signal3])
self.all_synapse_list1 = [self.synapse1,
self.synapse2,
self.synapse3]
self.all_synapse_list2 = [self.synapse2,
self.synapse3]
self.brain_test1 = Brain(synapses=self.all_synapse_list1)
self.brain_test2 = Brain(synapses=self.all_synapse_list2)
# this brain is the same as the first one
self.brain_test3 = Brain(synapses=self.all_synapse_list1)
self.settings_test = Settings()
# clean the LiFO
LIFOBuffer.lifo_list = list()
def test_APIResponse(self):
user_order = "user order"
self.matched_synapse = MatchedSynapse(matched_synapse=self.synapse1, matched_order=user_order)
api_response = APIResponse()
api_response.user_order = user_order
api_response.list_processed_matched_synapse = [self.matched_synapse]
expected_result_serialize = {
'status': None,
'matched_synapses':
[
{
'matched_order': 'user order',
'neuron_module_list': [],
'synapse_name': 'Synapse1'
}
],
'user_order': 'user order'
}
self.assertDictEqual(expected_result_serialize, api_response.serialize())
def test_Brain(self):
# test get synapse by name
expect_result = self.synapse1
synapse_name = "Synapse1"
self.assertEqual(self.brain_test1.get_synapse_by_name(synapse_name), expect_result)
# test equals
self.assertTrue(self.brain_test1.__eq__(self.brain_test3))
# test not equals
self.assertFalse(self.brain_test1.__eq__(self.brain_test2))
def test_Dna(self):
# create DNA object
dna1 = Dna(name="dna1", module_type="neuron", author="kalliope",
kalliope_supported_version="0.4.4", tags="test")
dna2 = Dna(name="dna2", module_type="neuron", author="community",
kalliope_supported_version="0.4.2", tags="other")
# this dna is exactly the same as the first one
dna3 = Dna(name="dna1", module_type="neuron", author="kalliope",
kalliope_supported_version="0.4.4", tags="test")
expected_result_serialize = {
'kalliope_supported_version': '0.4.4',
'tags': 'test',
'type': 'neuron',
'name': 'dna1',
'author': 'kalliope'
}
self.assertDictEqual(expected_result_serialize, dna1.serialize())
self.assertTrue(dna1.__eq__(dna3))
self.assertFalse(dna1.__eq__(dna2))
def test_MatchedSynapse(self):
user_order = "user order"
matched_synapse1 = MatchedSynapse(matched_synapse=self.synapse1, matched_order=user_order)
matched_synapse2 = MatchedSynapse(matched_synapse=self.synapse2, matched_order=user_order)
matched_synapse3 = MatchedSynapse(matched_synapse=self.synapse1, matched_order=user_order)
expected_result_serialize = {
'matched_order': 'user order',
'neuron_module_list': [],
'synapse_name': 'Synapse1'
}
self.assertDictEqual(expected_result_serialize, matched_synapse1.serialize())
self.assertTrue(matched_synapse1.__eq__(matched_synapse3))
self.assertFalse(matched_synapse1.__eq__(matched_synapse2))
# test neuron parameter loader is called
with mock.patch("kalliope.core.NeuronParameterLoader.get_parameters") as mock_get_parameters:
MatchedSynapse(matched_synapse=self.synapse1, matched_order=user_order, user_order=user_order)
mock_get_parameters.assert_called_once_with(synapse_order=user_order,
user_order=user_order)
mock_get_parameters.reset_mock()
def test_Neuron(self):
neuron1 = Neuron(name="test", parameters={"key1": "val1", "key2": "val2"})
neuron2 = Neuron(name="test", parameters={"key3": "val3", "key4": "val4"})
neuron3 = Neuron(name="test", parameters={"key1": "val1", "key2": "val2"})
expected_result_serialize = {'name': 'test', 'parameters': {'key2': 'val2', 'key1': 'val1'}}
self.assertDictEqual(expected_result_serialize, neuron1.serialize())
self.assertTrue(neuron1.__eq__(neuron3))
self.assertFalse(neuron1.__eq__(neuron2))
# test password
neuron_name = "test"
neuron_parameters = {
"password": "my secret",
"parameter": "test"
}
neuron = Neuron()
neuron.name = neuron_name
neuron.parameters = neuron_parameters
expected_result_str = "{'name': 'test', 'parameters': {'password': '*****', 'parameter': 'test'}}"
self.assertDictEqual(ast.literal_eval(neuron.__str__()), ast.literal_eval(expected_result_str))
neuron_name = "test"
neuron_parameters = {
"password_parameter": "my secret",
"parameter": "test"
}
neuron = Neuron()
neuron.name = neuron_name
neuron.parameters = neuron_parameters
expected_result_str = "{'name': 'test', 'parameters': {'parameter': 'test', 'password_parameter': '*****'}}"
self.assertDictEqual(ast.literal_eval(neuron.__str__()), ast.literal_eval(expected_result_str))
def test_Resources(self):
resource1 = Resources(neuron_folder="/path/neuron", stt_folder="/path/stt",
tts_folder="/path/tts", trigger_folder="/path/trigger")
resource2 = Resources(neuron_folder="/other_path/neuron", stt_folder="/other_path/stt",
tts_folder="/other_path/tts", trigger_folder="/other_path/trigger")
resource3 = Resources(neuron_folder="/path/neuron", stt_folder="/path/stt",
tts_folder="/path/tts", trigger_folder="/path/trigger")
expected_result_serialize = {
'tts_folder': '/path/tts',
'neuron_folder': '/path/neuron',
'stt_folder': '/path/stt',
'trigger_folder': '/path/trigger',
'signal_folder': None
}
self.assertDictEqual(expected_result_serialize, resource1.serialize())
self.assertTrue(resource1.__eq__(resource3))
self.assertFalse(resource1.__eq__(resource2))
def test_RestAPI(self):
rest_api1 = RestAPI(password_protected=True, login="admin", password="password", active=True,
port=5000, allowed_cors_origin="*")
rest_api2 = RestAPI(password_protected=False, active=False,
port=5000, allowed_cors_origin=None)
rest_api3 = RestAPI(password_protected=True, login="admin", password="password", active=True,
port=5000, allowed_cors_origin="*")
expected_result_serialize = {
'password_protected': True,
'port': 5000,
'active': True,
'allowed_cors_origin': '*',
'password': 'password',
'login': 'admin'
}
self.assertDictEqual(expected_result_serialize, rest_api1.serialize())
self.assertTrue(rest_api1.__eq__(rest_api3))
self.assertFalse(rest_api1.__eq__(rest_api2))
def test_Settings(self):
with mock.patch('platform.machine', return_value='pumpkins'):
rest_api1 = RestAPI(password_protected=True,
login="admin",
password="password",
active=True,
port=5000, allowed_cors_origin="*")
tts1 = Tts(name="tts1", parameters=dict())
tts2 = Tts(name="tts2", parameters=dict())
stt1 = Stt(name="stt1", parameters=dict())
stt2 = Stt(name="stt2", parameters=dict())
trigger1 = Trigger(name="snowboy", parameters=dict())
player = Player(name="player1")
resources = Resources()
options = Options()
setting1 = Settings(default_tts_name="pico2wav",
default_stt_name="google",
default_trigger_name="swoyboy",
default_player_name="mplayer",
ttss=[tts1],
stts=[stt1],
triggers=[trigger1],
players=[player],
rest_api=rest_api1,
cache_path="/tmp/kalliope",
resources=resources,
variables={"key1": "val1"},
options=options,
send_anonymous_usage_stats=0)
setting1.kalliope_version = "0.4.5"
setting2 = Settings(default_tts_name="pico2wav",
default_stt_name="google",
default_trigger_name="swoyboy",
default_player_name="mplayer",
ttss=[tts2],
stts=[stt2],
triggers=[trigger1],
players=[player],
rest_api=rest_api1,
cache_path="/tmp/kalliope",
resources=resources,
variables={"key1": "val1"},
options=options,
send_anonymous_usage_stats=0)
setting3 = Settings(default_tts_name="pico2wav",
default_stt_name="google",
default_trigger_name="swoyboy",
default_player_name="mplayer",
ttss=[tts1],
stts=[stt1],
triggers=[trigger1],
players=[player],
rest_api=rest_api1,
cache_path="/tmp/kalliope",
resources=resources,
variables={"key1": "val1"},
options=options,
send_anonymous_usage_stats=0)
setting3.kalliope_version = "0.4.5"
expected_result_serialize = {'default_tts_name': 'pico2wav', 'default_stt_name': 'google', 'default_trigger_name': 'swoyboy', 'default_player_name': 'mplayer', 'ttss': [{'name': 'tts1', 'parameters': {}}], 'stts': [{'name': 'stt1', 'parameters': {}}], 'triggers': [{'name': 'snowboy', 'parameters': {}}], 'players': [{'name': 'player1', 'parameters': None}], 'rest_api': {'password_protected': True, 'login': 'admin', 'password': 'password', 'active': True, 'port': 5000, 'allowed_cors_origin': '*'}, 'cache_path': '/tmp/kalliope', 'resources': {'neuron_folder': None, 'stt_folder': None, 'tts_folder': None, 'trigger_folder': None, 'signal_folder': None}, 'variables': {'key1': 'val1'}, 'machine': 'pumpkins', 'kalliope_version': '0.4.5', 'options': {'name': 'Options', 'recognizer_multiplier': 1.0, 'recognizer_energy_ratio': 1.5, 'recognizer_recording_timeout': 15.0, 'recognizer_recording_timeout_with_silence': 3.0, 'deaf': None, 'mute': None}, 'hooks': None, 'send_anonymous_usage_stats': 0}
self.maxDiff = None
self.assertDictEqual(expected_result_serialize, setting1.serialize())
self.assertTrue(setting1.__eq__(setting3))
self.assertFalse(setting1.__eq__(setting2))
def test_Stt(self):
stt1 = Stt(name="stt1", parameters={"key1": "val1"})
stt2 = Stt(name="stt2", parameters={"key2": "val2"})
stt3 = Stt(name="stt1", parameters={"key1": "val1"})
expected_result_serialize = {'name': 'stt1', 'parameters': {'key1': 'val1'}}
self.assertDictEqual(expected_result_serialize, stt1.serialize())
self.assertTrue(stt1.__eq__(stt3))
self.assertFalse(stt1.__eq__(stt2))
def test_Synapse(self):
neuron1 = Neuron(name='neurone1', parameters={'var1': 'val1'})
neuron2 = Neuron(name='neurone2', parameters={'var2': 'val2'})
neuron3 = Neuron(name='neurone3', parameters={'var3': 'val3'})
neuron4 = Neuron(name='neurone4', parameters={'var4': 'val4'})
signal1 = Signal(name="order", parameters="this is the sentence")
signal2 = Signal(name="order", parameters="this is the second sentence")
synapse1 = Synapse(name="Synapse1", neurons=[neuron1, neuron2], signals=[signal1])
synapse2 = Synapse(name="Synapse2", neurons=[neuron3, neuron4], signals=[signal2])
synapse3 = Synapse(name="Synapse1", neurons=[neuron1, neuron2], signals=[signal1])
expected_result_serialize = {
'signals': [
{
'name': 'order',
'parameters': 'this is the sentence'
}
],
'neurons': [
{
'name': 'neurone1',
'parameters': {
'var1': 'val1'
}
},
{
'name': 'neurone2',
'parameters':
{
'var2': 'val2'
}
}
],
'name': 'Synapse1',
'enabled': True
}
self.assertDictEqual(expected_result_serialize, synapse1.serialize())
self.assertTrue(synapse1.__eq__(synapse3))
self.assertFalse(synapse1.__eq__(synapse2))
def test_Trigger(self):
trigger1 = Trigger(name="trigger1", parameters={"key1": "val1"})
trigger2 = Trigger(name="trigger2", parameters={"key2": "val2"})
trigger3 = Trigger(name="trigger1", parameters={"key1": "val1"})
expected_result_serialize = {'name': 'trigger1', 'parameters': {'key1': 'val1'}}
self.assertDictEqual(expected_result_serialize, trigger1.serialize())
self.assertTrue(trigger1.__eq__(trigger3))
self.assertFalse(trigger1.__eq__(trigger2))
def test_Player(self):
player1 = Player(name="player1", parameters={"key1": "val1"})
player2 = Player(name="player2", parameters={"key2": "val2"})
player3 = Player(name="player1", parameters={"key1": "val1"})
expected_result_serialize = {'name': 'player1', 'parameters': {'key1': 'val1'}}
self.assertDictEqual(expected_result_serialize, player1.serialize())
self.assertTrue(player1.__eq__(player3))
self.assertFalse(player1.__eq__(player2))
def test_Tts(self):
tts1 = Tts(name="tts1", parameters={"key1": "val1"})
tts2 = Tts(name="tts2", parameters={"key2": "val2"})
tts3 = Tts(name="tts1", parameters={"key1": "val1"})
expected_result_serialize = {'name': 'tts1', 'parameters': {'key1': 'val1'}}
self.assertDictEqual(expected_result_serialize, tts1.serialize())
self.assertTrue(tts1.__eq__(tts3))
self.assertFalse(tts1.__eq__(tts2))
if __name__ == '__main__':
unittest.main()
# suite = unittest.TestSuite()
# suite.addTest(TestLIFOBuffer("test_process_neuron_list"))
# runner = unittest.TextTestRunner()
# runner.run(suite)
|
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import QLabel, QSizePolicy
from PyQt5.QtGui import QPainter
from qutebrowser.utils import qtutils, utils
class TextBase(QLabel):
"""A text in the statusbar.
Unlike QLabel, the text will get elided.
Eliding is loosely based on
http://gedgedev.blogspot.ch/2010/12/elided-labels-in-qt.html
Attributes:
_elidemode: Where to elide the text.
_elided_text: The current elided text.
"""
def __init__(self, parent=None, elidemode=Qt.ElideRight):
super().__init__(parent)
self.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Minimum)
self._elidemode = elidemode
self._elided_text = ''
def __repr__(self):
return utils.get_repr(self, text=self.text())
def _update_elided_text(self, width):
"""Update the elided text when necessary.
Args:
width: The maximal width the text should take.
"""
if self.text():
self._elided_text = self.fontMetrics().elidedText(
self.text(), self._elidemode, width, Qt.TextShowMnemonic)
else:
self._elided_text = ''
def setText(self, txt):
"""Extend QLabel::setText to update the elided text afterwards.
Args:
txt: The text to set (string).
"""
super().setText(txt)
if self._elidemode != Qt.ElideNone:
self._update_elided_text(self.geometry().width())
def resizeEvent(self, e):
"""Extend QLabel::resizeEvent to update the elided text afterwards."""
super().resizeEvent(e)
size = e.size()
qtutils.ensure_valid(size)
self._update_elided_text(size.width())
def paintEvent(self, e):
"""Override QLabel::paintEvent to draw elided text."""
if self._elidemode == Qt.ElideNone:
super().paintEvent(e)
else:
e.accept()
painter = QPainter(self)
geom = self.geometry()
qtutils.ensure_valid(geom)
painter.drawText(0, 0, geom.width(), geom.height(),
int(self.alignment()), self._elided_text)
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.