text
stringlengths 213
32.3k
|
---|
import datetime
import logging
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
import homeassistant.util.dt as dt_util
from .const import DOMAIN
TIME_FRAME1_BEGIN = "time_frame1_begin"
TIME_FRAME1_END = "time_frame1_end"
TIME_FRAME2_BEGIN = "time_frame2_begin"
TIME_FRAME2_END = "time_frame2_end"
TIME_FRAME3_BEGIN = "time_frame3_begin"
TIME_FRAME3_END = "time_frame3_end"
MIN_TIME_BETWEEN_UPDATES = datetime.timedelta(seconds=15)
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Ebus sensor."""
ebusd_api = hass.data[DOMAIN]
monitored_conditions = discovery_info["monitored_conditions"]
name = discovery_info["client_name"]
dev = []
for condition in monitored_conditions:
dev.append(
EbusdSensor(ebusd_api, discovery_info["sensor_types"][condition], name)
)
add_entities(dev, True)
class EbusdSensor(Entity):
"""Ebusd component sensor methods definition."""
def __init__(self, data, sensor, name):
"""Initialize the sensor."""
self._state = None
self._client_name = name
self._name, self._unit_of_measurement, self._icon, self._type = sensor
self.data = data
@property
def name(self):
"""Return the name of the sensor."""
return f"{self._client_name} {self._name}"
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def device_state_attributes(self):
"""Return the device state attributes."""
if self._type == 1 and self._state is not None:
schedule = {
TIME_FRAME1_BEGIN: None,
TIME_FRAME1_END: None,
TIME_FRAME2_BEGIN: None,
TIME_FRAME2_END: None,
TIME_FRAME3_BEGIN: None,
TIME_FRAME3_END: None,
}
time_frame = self._state.split(";")
for index, item in enumerate(sorted(schedule.items())):
if index < len(time_frame):
parsed = datetime.datetime.strptime(time_frame[index], "%H:%M")
parsed = parsed.replace(
dt_util.now().year, dt_util.now().month, dt_util.now().day
)
schedule[item[0]] = parsed.isoformat()
return schedule
return None
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return self._icon
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self._unit_of_measurement
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Fetch new state data for the sensor."""
try:
self.data.update(self._name, self._type)
if self._name not in self.data.value:
return
self._state = self.data.value[self._name]
except RuntimeError:
_LOGGER.debug("EbusdData.update exception")
|
import os
import unittest
from absl import flags
from absl.testing import parameterized
from perfkitbenchmarker import sample
from perfkitbenchmarker import test_util
from perfkitbenchmarker.windows_packages import ntttcp
FLAGS = flags.FLAGS
FLAGS.mark_as_parsed()
NtttcpConf = ntttcp.NtttcpConf
class NtttcpBenchmarkTestCase(parameterized.TestCase, unittest.TestCase,
test_util.SamplesTestMixin):
def getDataContents(self, file_name):
path = os.path.join(os.path.dirname(__file__), '..', 'data', file_name)
with open(path) as fp:
contents = fp.read()
return contents
def setUp(self):
super(NtttcpBenchmarkTestCase, self).setUp()
self.xml_tcp_send_results = self.getDataContents('ntttcp_tcp_sender.xml')
self.xml_tcp_rec_results = self.getDataContents('ntttcp_tcp_receiver.xml')
self.xml_udp_send_results = self.getDataContents('ntttcp_udp_sender.xml')
self.xml_udp_rec_results = self.getDataContents('ntttcp_udp_receiver.xml')
def testNtttcpTcpParsing(self):
samples = ntttcp.ParseNtttcpResults(self.xml_tcp_send_results,
self.xml_tcp_rec_results, {})
expected_metadata = {
'async': 'False',
'bind_sender': 'False',
'cooldown_time': '30000',
'dash_n_timeout': '10800000',
'max_active_threads': '2',
'no_sync': 'False',
'port': '5003',
'receiver avg_bytes_per_compl': '149.998',
'receiver avg_frame_size': '1266.217',
'receiver avg_packets_per_dpc': '0.598',
'receiver avg_packets_per_interrupt': '0.379',
'receiver bufferCount': '9223372036854775807',
'receiver bufferLen': '150',
'receiver cpu': '36.872',
'receiver cycles': '89.055',
'receiver dpcs': '48156.278',
'receiver errors': '1',
'receiver interrupts': '75870.499',
'receiver io': '2',
'receiver packets_received': '1726938',
'receiver packets_retransmitted': '4',
'receiver packets_sent': '1092640',
'receiver realtime': '60.015000',
'receiver rb': -1,
'receiver sb': -1,
'receiver threads_avg_bytes_per_compl': '149.998',
'receiver throughput': '291.484',
'receiver total_buffers': '14577858.000',
'receiver total_bytes': '2085.379314',
'recv_socket_buff': '-1',
'run_time': '60000',
'sender avg_bytes_per_compl': '150.000',
'sender avg_frame_size': '751.222',
'sender avg_packets_per_dpc': '1.064',
'sender avg_packets_per_interrupt': '0.516',
'sender bufferCount': '9223372036854775807',
'sender bufferLen': '150',
'sender cpu': '36.234',
'sender cycles': '87.514',
'sender dpcs': '17108.590',
'sender errors': '0',
'sender interrupts': '35302.624',
'sender io': '2',
'sender_name': None,
'sender packets_received': '1092639',
'sender packets_retransmitted': '10',
'sender packets_sent': '2910833',
'sender realtime': '60.015000',
'sender rb': -1,
'sender sb': -1,
'sender threads_avg_bytes_per_compl': '150.000',
'sender total_buffers': '14577884.000',
'sender total_bytes': '2085.383034',
'send_socket_buff': '8192',
'sync_port': 'False',
'udp': 'False',
'use_ipv6': 'False',
'verbose': 'False',
'verify_data': 'False',
'wait_all': 'False',
'wait_timeout_milliseconds': '600000',
'warmup_time': '30000',
'wsa': 'False',
}
expected_thread_0_metadata = expected_metadata.copy()
expected_thread_0_metadata['thread_index'] = '0'
expected_thread_1_metadata = expected_metadata.copy()
expected_thread_1_metadata['thread_index'] = '1'
expected_samples = [
sample.Sample('Total Throughput', 291.485, 'Mbps', expected_metadata),
sample.Sample('Thread Throughput', 147.105, 'Mbps',
expected_thread_0_metadata),
sample.Sample('Thread Throughput', 144.379, 'Mbps',
expected_thread_1_metadata)
]
self.assertSampleListsEqualUpToTimestamp(expected_samples, samples)
def testNtttcpUdpParsing(self):
samples = ntttcp.ParseNtttcpResults(self.xml_udp_send_results,
self.xml_udp_rec_results, {})
expected_metadata = {
'async': 'False',
'bind_sender': 'False',
'cooldown_time': '30000',
'dash_n_timeout': '10800000',
'max_active_threads': '2',
'no_sync': 'False',
'port': '5003',
'receiver avg_bytes_per_compl': '128.000',
'receiver avg_frame_size': '99.200',
'receiver avg_packets_per_dpc': '6.147',
'receiver avg_packets_per_interrupt': '3.838',
'receiver bufferCount': '9223372036854775807',
'receiver bufferLen': '128',
'receiver cpu': '51.120',
'receiver cycles': '189.967',
'receiver dpcs': '38835.774',
'receiver errors': '0',
'receiver interrupts': '62200.183',
'receiver io': '2',
'receiver packets_received': '14326674',
'receiver packets_retransmitted': '0',
'receiver packets_sent': '0',
'receiver realtime': '60.015000',
'receiver rb': -1,
'receiver sb': -1,
'receiver threads_avg_bytes_per_compl': '128.000',
'receiver throughput': '189.447',
'receiver total_buffers': '11103157.000',
'receiver total_bytes': '1355.365845',
'recv_socket_buff': '-1',
'run_time': '60000',
'sender avg_bytes_per_compl': '128.000',
'sender avg_frame_size': '128.000',
'sender avg_packets_per_dpc': '0.000',
'sender avg_packets_per_interrupt': '0.000',
'sender bufferCount': '9223372036854775807',
'sender bufferLen': '128',
'sender cpu': '68.290',
'sender cycles': '196.108',
'sender dpcs': '250.737',
'sender errors': '0',
'sender interrupts': '1669.516',
'sender io': '2',
'sender_name': None,
'sender packets_received': '0',
'sender packets_retransmitted': '0',
'sender packets_sent': '14368008',
'sender realtime': '60.015000',
'sender rb': -1,
'sender sb': -1,
'sender threads_avg_bytes_per_compl': '128.000',
'sender total_buffers': '14368009.000',
'sender total_bytes': '1753.907349',
'send_socket_buff': '8192',
'sync_port': 'False',
'udp': 'True',
'use_ipv6': 'False',
'verbose': 'False',
'verify_data': 'False',
'wait_all': 'False',
'wait_timeout_milliseconds': '600000',
'warmup_time': '30000',
'wsa': 'False',
}
expected_thread_0_metadata = expected_metadata.copy()
expected_thread_0_metadata['thread_index'] = '0'
expected_thread_1_metadata = expected_metadata.copy()
expected_thread_1_metadata['thread_index'] = '1'
expected_samples = [
sample.Sample('Total Throughput', 245.153, 'Mbps', expected_metadata),
sample.Sample('Thread Throughput', 121.160, 'Mbps',
expected_thread_0_metadata),
sample.Sample('Thread Throughput', 123.993, 'Mbps',
expected_thread_1_metadata)
]
self.assertSampleListsEqualUpToTimestamp(expected_samples, samples)
def testSingleConfigParse(self):
ntttcp.FLAGS.ntttcp_config_list = ['True:7:800:INTERNAL:1']
expected_list = [
NtttcpConf(
udp=True, threads=7, time_s=800, ip_type='INTERNAL', packet_size=1)
]
conf_list = ntttcp.ParseConfigList()
self.assertListEqual(conf_list, expected_list)
def testEmptyConfig(self):
ntttcp.FLAGS.ntttcp_config_list = []
expected_list = [
NtttcpConf(
udp=FLAGS.ntttcp_udp,
threads=FLAGS.ntttcp_threads,
time_s=FLAGS.ntttcp_time,
ip_type=FLAGS.ip_addresses,
packet_size=FLAGS.ntttcp_packet_size)
]
conf_list = ntttcp.ParseConfigList()
self.assertListEqual(conf_list, expected_list)
def testMultiConfigParse(self):
ntttcp.FLAGS.ntttcp_config_list = [
'True:7:800:INTERNAL:1', 'False:1:2:EXTERNAL:2',
'True:44:1001:INTERNAL:3'
]
expected_list = [
NtttcpConf(
udp=True, threads=7, time_s=800, ip_type='INTERNAL', packet_size=1),
NtttcpConf(
udp=False, threads=1, time_s=2, ip_type='EXTERNAL', packet_size=2),
NtttcpConf(
udp=True,
threads=44,
time_s=1001,
ip_type='INTERNAL',
packet_size=3),
]
conf_list = ntttcp.ParseConfigList()
self.assertListEqual(conf_list, expected_list)
@parameterized.named_parameters(
('MissingVal', ['True:7:800:INTERNAL:1', 'False::2:EXTERNAL:2']),
('Misspell', ['rue:7:800:INTERNAL:3', 'True:44:1001:EXTERNAL:4']),
('WrongOrder', ['True:7:INTERNAL:800:1', '44:True:1001:EXTERNAL:6']))
def testMalformedConfig(self, conf):
with self.assertRaises(flags.IllegalFlagValueError):
ntttcp.FLAGS.ntttcp_config_list = conf
if __name__ == '__main__':
unittest.main()
|
from homeassistant.components.zwave import const, node_entity
from homeassistant.const import ATTR_ENTITY_ID
from tests.async_mock import MagicMock, patch
import tests.mock.zwave as mock_zwave
async def test_maybe_schedule_update(hass, mock_openzwave):
"""Test maybe schedule update."""
base_entity = node_entity.ZWaveBaseEntity()
base_entity.entity_id = "zwave.bla"
base_entity.hass = hass
with patch.object(hass.loop, "call_later") as mock_call_later:
base_entity._schedule_update()
assert mock_call_later.called
base_entity._schedule_update()
assert len(mock_call_later.mock_calls) == 1
assert base_entity._update_scheduled is True
do_update = mock_call_later.mock_calls[0][1][1]
do_update()
assert base_entity._update_scheduled is False
base_entity._schedule_update()
assert len(mock_call_later.mock_calls) == 2
async def test_node_event_activated(hass, mock_openzwave):
"""Test Node event activated event."""
mock_receivers = []
def mock_connect(receiver, signal, *args, **kwargs):
if signal == mock_zwave.MockNetwork.SIGNAL_NODE_EVENT:
mock_receivers.append(receiver)
node = mock_zwave.MockNode(node_id=11)
with patch("pydispatch.dispatcher.connect", new=mock_connect):
entity = node_entity.ZWaveNodeEntity(node, mock_openzwave)
assert len(mock_receivers) == 1
events = []
def listener(event):
events.append(event)
hass.bus.async_listen(const.EVENT_NODE_EVENT, listener)
# Test event before entity added to hass
value = 234
hass.async_add_job(mock_receivers[0], node, value)
await hass.async_block_till_done()
assert len(events) == 0
# Add entity to hass
entity.hass = hass
entity.entity_id = "zwave.mock_node"
value = 234
hass.async_add_job(mock_receivers[0], node, value)
await hass.async_block_till_done()
assert len(events) == 1
assert events[0].data[ATTR_ENTITY_ID] == "zwave.mock_node"
assert events[0].data[const.ATTR_NODE_ID] == 11
assert events[0].data[const.ATTR_BASIC_LEVEL] == value
async def test_scene_activated(hass, mock_openzwave):
"""Test scene activated event."""
mock_receivers = []
def mock_connect(receiver, signal, *args, **kwargs):
if signal == mock_zwave.MockNetwork.SIGNAL_SCENE_EVENT:
mock_receivers.append(receiver)
node = mock_zwave.MockNode(node_id=11)
with patch("pydispatch.dispatcher.connect", new=mock_connect):
entity = node_entity.ZWaveNodeEntity(node, mock_openzwave)
assert len(mock_receivers) == 1
events = []
def listener(event):
events.append(event)
hass.bus.async_listen(const.EVENT_SCENE_ACTIVATED, listener)
# Test event before entity added to hass
scene_id = 123
hass.async_add_job(mock_receivers[0], node, scene_id)
await hass.async_block_till_done()
assert len(events) == 0
# Add entity to hass
entity.hass = hass
entity.entity_id = "zwave.mock_node"
scene_id = 123
hass.async_add_job(mock_receivers[0], node, scene_id)
await hass.async_block_till_done()
assert len(events) == 1
assert events[0].data[ATTR_ENTITY_ID] == "zwave.mock_node"
assert events[0].data[const.ATTR_NODE_ID] == 11
assert events[0].data[const.ATTR_SCENE_ID] == scene_id
async def test_central_scene_activated(hass, mock_openzwave):
"""Test central scene activated event."""
mock_receivers = []
def mock_connect(receiver, signal, *args, **kwargs):
if signal == mock_zwave.MockNetwork.SIGNAL_VALUE_CHANGED:
mock_receivers.append(receiver)
node = mock_zwave.MockNode(node_id=11)
with patch("pydispatch.dispatcher.connect", new=mock_connect):
entity = node_entity.ZWaveNodeEntity(node, mock_openzwave)
assert len(mock_receivers) == 1
events = []
def listener(event):
events.append(event)
hass.bus.async_listen(const.EVENT_SCENE_ACTIVATED, listener)
# Test event before entity added to hass
scene_id = 1
scene_data = 3
value = mock_zwave.MockValue(
command_class=const.COMMAND_CLASS_CENTRAL_SCENE, index=scene_id, data=scene_data
)
hass.async_add_job(mock_receivers[0], node, value)
await hass.async_block_till_done()
assert len(events) == 0
# Add entity to hass
entity.hass = hass
entity.entity_id = "zwave.mock_node"
scene_id = 1
scene_data = 3
value = mock_zwave.MockValue(
command_class=const.COMMAND_CLASS_CENTRAL_SCENE, index=scene_id, data=scene_data
)
hass.async_add_job(mock_receivers[0], node, value)
await hass.async_block_till_done()
assert len(events) == 1
assert events[0].data[ATTR_ENTITY_ID] == "zwave.mock_node"
assert events[0].data[const.ATTR_NODE_ID] == 11
assert events[0].data[const.ATTR_SCENE_ID] == scene_id
assert events[0].data[const.ATTR_SCENE_DATA] == scene_data
async def test_application_version(hass, mock_openzwave):
"""Test application version."""
mock_receivers = {}
signal_mocks = [
mock_zwave.MockNetwork.SIGNAL_VALUE_CHANGED,
mock_zwave.MockNetwork.SIGNAL_VALUE_ADDED,
]
def mock_connect(receiver, signal, *args, **kwargs):
if signal in signal_mocks:
mock_receivers[signal] = receiver
node = mock_zwave.MockNode(node_id=11)
with patch("pydispatch.dispatcher.connect", new=mock_connect):
entity = node_entity.ZWaveNodeEntity(node, mock_openzwave)
for signal_mock in signal_mocks:
assert signal_mock in mock_receivers.keys()
events = []
def listener(event):
events.append(event)
# Make sure application version isn't set before
assert (
node_entity.ATTR_APPLICATION_VERSION
not in entity.device_state_attributes.keys()
)
# Add entity to hass
entity.hass = hass
entity.entity_id = "zwave.mock_node"
# Fire off an added value
value = mock_zwave.MockValue(
command_class=const.COMMAND_CLASS_VERSION,
label="Application Version",
data="5.10",
)
hass.async_add_job(
mock_receivers[mock_zwave.MockNetwork.SIGNAL_VALUE_ADDED], node, value
)
await hass.async_block_till_done()
assert (
entity.device_state_attributes[node_entity.ATTR_APPLICATION_VERSION] == "5.10"
)
# Fire off a changed
value = mock_zwave.MockValue(
command_class=const.COMMAND_CLASS_VERSION,
label="Application Version",
data="4.14",
)
hass.async_add_job(
mock_receivers[mock_zwave.MockNetwork.SIGNAL_VALUE_CHANGED], node, value
)
await hass.async_block_till_done()
assert (
entity.device_state_attributes[node_entity.ATTR_APPLICATION_VERSION] == "4.14"
)
async def test_network_node_changed_from_value(hass, mock_openzwave):
"""Test for network_node_changed."""
zwave_network = MagicMock()
node = mock_zwave.MockNode()
entity = node_entity.ZWaveNodeEntity(node, zwave_network)
value = mock_zwave.MockValue(node=node)
with patch.object(entity, "maybe_schedule_update") as mock:
mock_zwave.value_changed(value)
mock.assert_called_once_with()
async def test_network_node_changed_from_node(hass, mock_openzwave):
"""Test for network_node_changed."""
zwave_network = MagicMock()
node = mock_zwave.MockNode()
entity = node_entity.ZWaveNodeEntity(node, zwave_network)
with patch.object(entity, "maybe_schedule_update") as mock:
mock_zwave.node_changed(node)
mock.assert_called_once_with()
async def test_network_node_changed_from_another_node(hass, mock_openzwave):
"""Test for network_node_changed."""
zwave_network = MagicMock()
node = mock_zwave.MockNode()
entity = node_entity.ZWaveNodeEntity(node, zwave_network)
with patch.object(entity, "maybe_schedule_update") as mock:
another_node = mock_zwave.MockNode(node_id=1024)
mock_zwave.node_changed(another_node)
assert not mock.called
async def test_network_node_changed_from_notification(hass, mock_openzwave):
"""Test for network_node_changed."""
zwave_network = MagicMock()
node = mock_zwave.MockNode()
entity = node_entity.ZWaveNodeEntity(node, zwave_network)
with patch.object(entity, "maybe_schedule_update") as mock:
mock_zwave.notification(node_id=node.node_id)
mock.assert_called_once_with()
async def test_network_node_changed_from_another_notification(hass, mock_openzwave):
"""Test for network_node_changed."""
zwave_network = MagicMock()
node = mock_zwave.MockNode()
entity = node_entity.ZWaveNodeEntity(node, zwave_network)
with patch.object(entity, "maybe_schedule_update") as mock:
mock_zwave.notification(node_id=1024)
assert not mock.called
async def test_node_changed(hass, mock_openzwave):
"""Test node_changed function."""
zwave_network = MagicMock()
node = mock_zwave.MockNode(
query_stage="Dynamic",
is_awake=True,
is_ready=False,
is_failed=False,
is_info_received=True,
max_baud_rate=40000,
is_zwave_plus=False,
capabilities=[],
neighbors=[],
location=None,
)
entity = node_entity.ZWaveNodeEntity(node, zwave_network)
assert {
"node_id": node.node_id,
"node_name": "Mock Node",
"manufacturer_name": "Test Manufacturer",
"product_name": "Test Product",
} == entity.device_state_attributes
node.get_values.return_value = {1: mock_zwave.MockValue(data=1800)}
zwave_network.manager.getNodeStatistics.return_value = {
"receivedCnt": 4,
"ccData": [
{"receivedCnt": 0, "commandClassId": 134, "sentCnt": 0},
{"receivedCnt": 1, "commandClassId": 133, "sentCnt": 1},
{"receivedCnt": 1, "commandClassId": 115, "sentCnt": 1},
{"receivedCnt": 0, "commandClassId": 114, "sentCnt": 0},
{"receivedCnt": 0, "commandClassId": 112, "sentCnt": 0},
{"receivedCnt": 1, "commandClassId": 32, "sentCnt": 1},
{"receivedCnt": 0, "commandClassId": 0, "sentCnt": 0},
],
"receivedUnsolicited": 0,
"sentTS": "2017-03-27 15:38:15:620 ",
"averageRequestRTT": 2462,
"lastResponseRTT": 3679,
"retries": 0,
"sentFailed": 1,
"sentCnt": 7,
"quality": 0,
"lastRequestRTT": 1591,
"lastReceivedMessage": [
0,
4,
0,
15,
3,
32,
3,
0,
221,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
"receivedDups": 1,
"averageResponseRTT": 2443,
"receivedTS": "2017-03-27 15:38:19:298 ",
}
entity.node_changed()
assert {
"node_id": node.node_id,
"node_name": "Mock Node",
"manufacturer_name": "Test Manufacturer",
"product_name": "Test Product",
"query_stage": "Dynamic",
"is_awake": True,
"is_ready": False,
"is_failed": False,
"is_info_received": True,
"max_baud_rate": 40000,
"is_zwave_plus": False,
"battery_level": 42,
"wake_up_interval": 1800,
"averageRequestRTT": 2462,
"averageResponseRTT": 2443,
"lastRequestRTT": 1591,
"lastResponseRTT": 3679,
"receivedCnt": 4,
"receivedDups": 1,
"receivedTS": "2017-03-27 15:38:19:298 ",
"receivedUnsolicited": 0,
"retries": 0,
"sentCnt": 7,
"sentFailed": 1,
"sentTS": "2017-03-27 15:38:15:620 ",
} == entity.device_state_attributes
node.can_wake_up_value = False
entity.node_changed()
assert "wake_up_interval" not in entity.device_state_attributes
async def test_name(hass, mock_openzwave):
"""Test name property."""
zwave_network = MagicMock()
node = mock_zwave.MockNode()
entity = node_entity.ZWaveNodeEntity(node, zwave_network)
assert entity.name == "Mock Node"
async def test_state_before_update(hass, mock_openzwave):
"""Test state before update was called."""
zwave_network = MagicMock()
node = mock_zwave.MockNode()
entity = node_entity.ZWaveNodeEntity(node, zwave_network)
assert entity.state is None
async def test_state_not_ready(hass, mock_openzwave):
"""Test state property."""
zwave_network = MagicMock()
node = mock_zwave.MockNode(
query_stage="Dynamic",
is_awake=True,
is_ready=False,
is_failed=False,
is_info_received=True,
)
entity = node_entity.ZWaveNodeEntity(node, zwave_network)
node.is_ready = False
entity.node_changed()
assert entity.state == "initializing"
node.is_failed = True
node.query_stage = "Complete"
entity.node_changed()
assert entity.state == "dead"
node.is_failed = False
node.is_awake = False
entity.node_changed()
assert entity.state == "sleeping"
async def test_state_ready(hass, mock_openzwave):
"""Test state property."""
zwave_network = MagicMock()
node = mock_zwave.MockNode(
query_stage="Dynamic",
is_awake=True,
is_ready=False,
is_failed=False,
is_info_received=True,
)
entity = node_entity.ZWaveNodeEntity(node, zwave_network)
node.query_stage = "Complete"
node.is_ready = True
entity.node_changed()
await hass.async_block_till_done()
assert entity.state == "ready"
node.is_failed = True
entity.node_changed()
assert entity.state == "dead"
node.is_failed = False
node.is_awake = False
entity.node_changed()
assert entity.state == "sleeping"
async def test_not_polled(hass, mock_openzwave):
"""Test should_poll property."""
zwave_network = MagicMock()
node = mock_zwave.MockNode()
entity = node_entity.ZWaveNodeEntity(node, zwave_network)
assert not entity.should_poll
async def test_unique_id(hass, mock_openzwave):
"""Test unique_id."""
zwave_network = MagicMock()
node = mock_zwave.MockNode()
entity = node_entity.ZWaveNodeEntity(node, zwave_network)
assert entity.unique_id == "node-567"
async def test_unique_id_missing_data(hass, mock_openzwave):
"""Test unique_id."""
zwave_network = MagicMock()
node = mock_zwave.MockNode()
node.manufacturer_name = None
node.name = None
node.is_ready = False
entity = node_entity.ZWaveNodeEntity(node, zwave_network)
assert entity.unique_id is None
|
import logging
from perfkitbenchmarker import errors
from perfkitbenchmarker import os_types
def YumInstall(vm):
"""Installs build tools on the VM."""
vm.InstallPackageGroup('Development Tools')
def AptInstall(vm):
"""Installs build tools on the VM."""
vm.InstallPackages('build-essential git libtool autoconf automake')
def GetVersion(vm, pkg):
"""Get version of package."""
# TODO(user): Add gcc version to all samples similar to lscpu/proccpu.
out, _ = vm.RemoteCommand(
'{pkg} -dumpversion'.format(pkg=pkg), ignore_failure=True)
return out.rstrip()
def Reinstall(vm, version='4.7'):
"""Install specific version of gcc.
Args:
vm: VirtualMachine object.
version: string. GCC version.
Raises:
Error: If this is ran on a non debian based system.
"""
# TODO(user): Make this work on yum based systems.
if vm.BASE_OS_TYPE != os_types.DEBIAN:
raise errors.Error('Updating GCC only works on Debian based systems.')
vm.RemoteCommand('sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y')
vm.RemoteCommand('sudo apt-get update')
for pkg in ('gcc', 'gfortran', 'g++'):
version_string = GetVersion(vm, pkg)
if version in version_string:
logging.info('Have expected version of %s: %s', pkg, version_string)
continue
else:
new_pkg = pkg + '-' + version
vm.InstallPackages(new_pkg)
vm.RemoteCommand('sudo rm -f /usr/bin/{pkg}'.format(pkg=pkg))
vm.RemoteCommand('sudo ln -s /usr/bin/{new_pkg} /usr/bin/{pkg}'.format(
new_pkg=new_pkg, pkg=pkg))
logging.info('Updated version of %s: Old: %s New: %s', pkg,
version_string, GetVersion(vm, pkg))
|
import pytest
from homeassistant.components.risco import CannotConnectError, UnauthorizedError
from homeassistant.components.risco.const import DOMAIN
from homeassistant.const import STATE_OFF, STATE_ON
from homeassistant.helpers.entity_component import async_update_entity
from .util import TEST_CONFIG, TEST_SITE_UUID, setup_risco
from tests.async_mock import MagicMock, PropertyMock, patch
from tests.common import MockConfigEntry
FIRST_ENTITY_ID = "binary_sensor.zone_0"
SECOND_ENTITY_ID = "binary_sensor.zone_1"
def _zone_mock():
return MagicMock(
triggered=False,
bypassed=False,
)
@pytest.fixture
def two_zone_alarm():
"""Fixture to mock alarm with two zones."""
zone_mocks = {0: _zone_mock(), 1: _zone_mock()}
alarm_mock = MagicMock()
with patch.object(
zone_mocks[0], "id", new_callable=PropertyMock(return_value=0)
), patch.object(
zone_mocks[0], "name", new_callable=PropertyMock(return_value="Zone 0")
), patch.object(
zone_mocks[1], "id", new_callable=PropertyMock(return_value=1)
), patch.object(
zone_mocks[1], "name", new_callable=PropertyMock(return_value="Zone 1")
), patch.object(
alarm_mock,
"zones",
new_callable=PropertyMock(return_value=zone_mocks),
), patch(
"homeassistant.components.risco.RiscoAPI.get_state",
return_value=alarm_mock,
):
yield alarm_mock
async def test_cannot_connect(hass):
"""Test connection error."""
with patch(
"homeassistant.components.risco.RiscoAPI.login",
side_effect=CannotConnectError,
):
config_entry = MockConfigEntry(domain=DOMAIN, data=TEST_CONFIG)
config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
registry = await hass.helpers.entity_registry.async_get_registry()
assert not registry.async_is_registered(FIRST_ENTITY_ID)
assert not registry.async_is_registered(SECOND_ENTITY_ID)
async def test_unauthorized(hass):
"""Test unauthorized error."""
with patch(
"homeassistant.components.risco.RiscoAPI.login",
side_effect=UnauthorizedError,
):
config_entry = MockConfigEntry(domain=DOMAIN, data=TEST_CONFIG)
config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
registry = await hass.helpers.entity_registry.async_get_registry()
assert not registry.async_is_registered(FIRST_ENTITY_ID)
assert not registry.async_is_registered(SECOND_ENTITY_ID)
async def test_setup(hass, two_zone_alarm):
"""Test entity setup."""
registry = await hass.helpers.entity_registry.async_get_registry()
assert not registry.async_is_registered(FIRST_ENTITY_ID)
assert not registry.async_is_registered(SECOND_ENTITY_ID)
await setup_risco(hass)
assert registry.async_is_registered(FIRST_ENTITY_ID)
assert registry.async_is_registered(SECOND_ENTITY_ID)
registry = await hass.helpers.device_registry.async_get_registry()
device = registry.async_get_device({(DOMAIN, TEST_SITE_UUID + "_zone_0")}, {})
assert device is not None
assert device.manufacturer == "Risco"
device = registry.async_get_device({(DOMAIN, TEST_SITE_UUID + "_zone_1")}, {})
assert device is not None
assert device.manufacturer == "Risco"
async def _check_state(hass, alarm, triggered, bypassed, entity_id, zone_id):
with patch.object(
alarm.zones[zone_id],
"triggered",
new_callable=PropertyMock(return_value=triggered),
), patch.object(
alarm.zones[zone_id],
"bypassed",
new_callable=PropertyMock(return_value=bypassed),
):
await async_update_entity(hass, entity_id)
await hass.async_block_till_done()
expected_triggered = STATE_ON if triggered else STATE_OFF
assert hass.states.get(entity_id).state == expected_triggered
assert hass.states.get(entity_id).attributes["bypassed"] == bypassed
async def test_states(hass, two_zone_alarm):
"""Test the various alarm states."""
await setup_risco(hass)
await _check_state(hass, two_zone_alarm, True, True, FIRST_ENTITY_ID, 0)
await _check_state(hass, two_zone_alarm, True, False, FIRST_ENTITY_ID, 0)
await _check_state(hass, two_zone_alarm, False, True, FIRST_ENTITY_ID, 0)
await _check_state(hass, two_zone_alarm, False, False, FIRST_ENTITY_ID, 0)
await _check_state(hass, two_zone_alarm, True, True, SECOND_ENTITY_ID, 1)
await _check_state(hass, two_zone_alarm, True, False, SECOND_ENTITY_ID, 1)
await _check_state(hass, two_zone_alarm, False, True, SECOND_ENTITY_ID, 1)
await _check_state(hass, two_zone_alarm, False, False, SECOND_ENTITY_ID, 1)
async def test_bypass(hass, two_zone_alarm):
"""Test bypassing a zone."""
await setup_risco(hass)
with patch("homeassistant.components.risco.RiscoAPI.bypass_zone") as mock:
data = {"entity_id": FIRST_ENTITY_ID}
await hass.services.async_call(
DOMAIN, "bypass_zone", service_data=data, blocking=True
)
mock.assert_awaited_once_with(0, True)
async def test_unbypass(hass, two_zone_alarm):
"""Test unbypassing a zone."""
await setup_risco(hass)
with patch("homeassistant.components.risco.RiscoAPI.bypass_zone") as mock:
data = {"entity_id": FIRST_ENTITY_ID}
await hass.services.async_call(
DOMAIN, "unbypass_zone", service_data=data, blocking=True
)
mock.assert_awaited_once_with(0, False)
|
from homeassistant.setup import async_setup_component
from tests.common import load_fixture
token = "9p6QGJ7dpZfO3fqPTBk1fyEmjV1cGoLT"
multi_sensor_token = "9r6QGF7dpZfO3fqPTBl1fyRmjV1cGoLT"
ONE_SENSOR_CONFIG = {
"platform": "efergy",
"app_token": token,
"utc_offset": "300",
"monitored_variables": [
{"type": "amount", "period": "day"},
{"type": "instant_readings"},
{"type": "budget"},
{"type": "cost", "period": "day", "currency": "$"},
{"type": "current_values"},
],
}
MULTI_SENSOR_CONFIG = {
"platform": "efergy",
"app_token": multi_sensor_token,
"utc_offset": "300",
"monitored_variables": [{"type": "current_values"}],
}
def mock_responses(mock):
"""Mock responses for Efergy."""
base_url = "https://engage.efergy.com/mobile_proxy/"
mock.get(
f"{base_url}getInstant?token={token}",
text=load_fixture("efergy_instant.json"),
)
mock.get(
f"{base_url}getEnergy?token={token}&offset=300&period=day",
text=load_fixture("efergy_energy.json"),
)
mock.get(
f"{base_url}getBudget?token={token}",
text=load_fixture("efergy_budget.json"),
)
mock.get(
f"{base_url}getCost?token={token}&offset=300&period=day",
text=load_fixture("efergy_cost.json"),
)
mock.get(
f"{base_url}getCurrentValuesSummary?token={token}",
text=load_fixture("efergy_current_values_single.json"),
)
mock.get(
f"{base_url}getCurrentValuesSummary?token={multi_sensor_token}",
text=load_fixture("efergy_current_values_multi.json"),
)
async def test_single_sensor_readings(hass, requests_mock):
"""Test for successfully setting up the Efergy platform."""
mock_responses(requests_mock)
assert await async_setup_component(hass, "sensor", {"sensor": ONE_SENSOR_CONFIG})
await hass.async_block_till_done()
assert "38.21" == hass.states.get("sensor.energy_consumed").state
assert "1580" == hass.states.get("sensor.energy_usage").state
assert "ok" == hass.states.get("sensor.energy_budget").state
assert "5.27" == hass.states.get("sensor.energy_cost").state
assert "1628" == hass.states.get("sensor.efergy_728386").state
async def test_multi_sensor_readings(hass, requests_mock):
"""Test for multiple sensors in one household."""
mock_responses(requests_mock)
assert await async_setup_component(hass, "sensor", {"sensor": MULTI_SENSOR_CONFIG})
await hass.async_block_till_done()
assert "218" == hass.states.get("sensor.efergy_728386").state
assert "1808" == hass.states.get("sensor.efergy_0").state
assert "312" == hass.states.get("sensor.efergy_728387").state
|
from unittest import TestCase
import pandas as pd
from scattertext import TermDocMatrixFilter
from scattertext import TermDocMatrixFromPandas
from scattertext import whitespace_nlp
from scattertext.TermDocMatrixFilter import AtLeastOneCategoryHasNoTermsException, filter_bigrams_by_pmis, \
unigrams_that_only_occur_in_one_bigram, filter_out_unigrams_that_only_occur_in_one_bigram
from scattertext.test.test_TermDocMat import get_hamlet_term_doc_matrix
class TestPMIFiltering(TestCase):
def test_main(self):
term_doc_mat = get_hamlet_term_doc_matrix()
pmi_filter = TermDocMatrixFilter(pmi_threshold_coef=4,
minimum_term_freq=3)
filtered_term_doc_mat = pmi_filter.filter(term_doc_mat)
self.assertLessEqual(len(filtered_term_doc_mat.get_term_freq_df()), len(term_doc_mat.get_term_freq_df()))
def _test_nothing_passes_filter_raise_error(self):
term_doc_mat = get_hamlet_term_doc_matrix()
pmi_filter = TermDocMatrixFilter(pmi_threshold_coef=4000,
minimum_term_freq=3000)
with self.assertRaises(AtLeastOneCategoryHasNoTermsException):
pmi_filter.filter(term_doc_mat)
def test_filter_bigrams_by_pmis(self):
term_doc_mat = get_hamlet_term_doc_matrix()
df = term_doc_mat.get_term_freq_df()
filtered_df = filter_bigrams_by_pmis(df, threshold_coef=3)
self.assertLess(len(filtered_df), len(df))
def test_unigrams_that_only_occur_in_one_bigram(self):
bigrams = set(['the cat', 'the saw', 'horses are', 'are pigs', 'pigs horses'])
expected = {'cat', 'saw'}
self.assertEqual(expected, unigrams_that_only_occur_in_one_bigram(bigrams))
def test_filter_out_unigrams_that_only_occur_in_one_bigram(self):
bigrams = ['the cat', 'the saw', 'horses are', 'are pigs', 'pigs horses']
df = TermDocMatrixFromPandas(
data_frame=pd.DataFrame({'text': bigrams,
'category': ['a', 'a', 'a', 'b', 'b']}),
category_col='category',
text_col='text',
nlp=whitespace_nlp
).build().get_term_freq_df()
new_df = filter_out_unigrams_that_only_occur_in_one_bigram(df)
self.assertFalse('cat' in new_df.index)
self.assertFalse('saw' in new_df.index)
self.assertTrue('the' in new_df.index)
self.assertTrue('horses' in new_df.index)
self.assertTrue('pigs' in new_df.index)
self.assertEqual(set(bigrams) & set(new_df.index), set(bigrams))
|
import logging
from homeassistant.components.binary_sensor import BinarySensorEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_NAME, CONF_PIN
from homeassistant.core import HomeAssistant
from .const import CONF_NEGATE_STATE, CONF_PIN_MODE, DOMAIN
from .entity import FirmataPinEntity
from .pin import FirmataBinaryDigitalInput, FirmataPinUsedException
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities
) -> None:
"""Set up the Firmata binary sensors."""
new_entities = []
board = hass.data[DOMAIN][config_entry.entry_id]
for binary_sensor in board.binary_sensors:
pin = binary_sensor[CONF_PIN]
pin_mode = binary_sensor[CONF_PIN_MODE]
negate = binary_sensor[CONF_NEGATE_STATE]
api = FirmataBinaryDigitalInput(board, pin, pin_mode, negate)
try:
api.setup()
except FirmataPinUsedException:
_LOGGER.error(
"Could not setup binary sensor on pin %s since pin already in use",
binary_sensor[CONF_PIN],
)
continue
name = binary_sensor[CONF_NAME]
binary_sensor_entity = FirmataBinarySensor(api, config_entry, name, pin)
new_entities.append(binary_sensor_entity)
if new_entities:
async_add_entities(new_entities)
class FirmataBinarySensor(FirmataPinEntity, BinarySensorEntity):
"""Representation of a binary sensor on a Firmata board."""
async def async_added_to_hass(self) -> None:
"""Set up a binary sensor."""
await self._api.start_pin(self.async_write_ha_state)
async def async_will_remove_from_hass(self) -> None:
"""Stop reporting a binary sensor."""
await self._api.stop_pin()
@property
def is_on(self) -> bool:
"""Return true if binary sensor is on."""
return self._api.is_on
|
import contextlib
import posixpath
import socket
import xml.etree.ElementTree as ET
from http import client
from urllib.parse import unquote, urlparse
from radicale import app, httputils, pathutils, storage, xmlutils
from radicale.item import filter as radicale_filter
from radicale.log import logger
def xml_report(base_prefix, path, xml_request, collection, encoding,
unlock_storage_fn):
"""Read and answer REPORT requests.
Read rfc3253-3.6 for info.
"""
multistatus = ET.Element(xmlutils.make_clark("D:multistatus"))
if xml_request is None:
return client.MULTI_STATUS, multistatus
root = xml_request
if root.tag in (
xmlutils.make_clark("D:principal-search-property-set"),
xmlutils.make_clark("D:principal-property-search"),
xmlutils.make_clark("D:expand-property")):
# We don't support searching for principals or indirect retrieving of
# properties, just return an empty result.
# InfCloud asks for expand-property reports (even if we don't announce
# support for them) and stops working if an error code is returned.
logger.warning("Unsupported REPORT method %r on %r requested",
xmlutils.make_human_tag(root.tag), path)
return client.MULTI_STATUS, multistatus
if (root.tag == xmlutils.make_clark("C:calendar-multiget") and
collection.get_meta("tag") != "VCALENDAR" or
root.tag == xmlutils.make_clark("CR:addressbook-multiget") and
collection.get_meta("tag") != "VADDRESSBOOK" or
root.tag == xmlutils.make_clark("D:sync-collection") and
collection.get_meta("tag") not in ("VADDRESSBOOK", "VCALENDAR")):
logger.warning("Invalid REPORT method %r on %r requested",
xmlutils.make_human_tag(root.tag), path)
return (client.FORBIDDEN,
xmlutils.webdav_error("D:supported-report"))
prop_element = root.find(xmlutils.make_clark("D:prop"))
props = (
[prop.tag for prop in prop_element]
if prop_element is not None else [])
if root.tag in (
xmlutils.make_clark("C:calendar-multiget"),
xmlutils.make_clark("CR:addressbook-multiget")):
# Read rfc4791-7.9 for info
hreferences = set()
for href_element in root.findall(xmlutils.make_clark("D:href")):
href_path = pathutils.sanitize_path(
unquote(urlparse(href_element.text).path))
if (href_path + "/").startswith(base_prefix + "/"):
hreferences.add(href_path[len(base_prefix):])
else:
logger.warning("Skipping invalid path %r in REPORT request on "
"%r", href_path, path)
elif root.tag == xmlutils.make_clark("D:sync-collection"):
old_sync_token_element = root.find(
xmlutils.make_clark("D:sync-token"))
old_sync_token = ""
if old_sync_token_element is not None and old_sync_token_element.text:
old_sync_token = old_sync_token_element.text.strip()
logger.debug("Client provided sync token: %r", old_sync_token)
try:
sync_token, names = collection.sync(old_sync_token)
except ValueError as e:
# Invalid sync token
logger.warning("Client provided invalid sync token %r: %s",
old_sync_token, e, exc_info=True)
# client.CONFLICT doesn't work with some clients (e.g. InfCloud)
return (client.FORBIDDEN,
xmlutils.webdav_error("D:valid-sync-token"))
hreferences = (pathutils.unstrip_path(
posixpath.join(collection.path, n)) for n in names)
# Append current sync token to response
sync_token_element = ET.Element(xmlutils.make_clark("D:sync-token"))
sync_token_element.text = sync_token
multistatus.append(sync_token_element)
else:
hreferences = (path,)
filters = (
root.findall(xmlutils.make_clark("C:filter")) +
root.findall(xmlutils.make_clark("CR:filter")))
def retrieve_items(collection, hreferences, multistatus):
"""Retrieves all items that are referenced in ``hreferences`` from
``collection`` and adds 404 responses for missing and invalid items
to ``multistatus``."""
collection_requested = False
def get_names():
"""Extracts all names from references in ``hreferences`` and adds
404 responses for invalid references to ``multistatus``.
If the whole collections is referenced ``collection_requested``
gets set to ``True``."""
nonlocal collection_requested
for hreference in hreferences:
try:
name = pathutils.name_from_path(hreference, collection)
except ValueError as e:
logger.warning("Skipping invalid path %r in REPORT request"
" on %r: %s", hreference, path, e)
response = xml_item_response(base_prefix, hreference,
found_item=False)
multistatus.append(response)
continue
if name:
# Reference is an item
yield name
else:
# Reference is a collection
collection_requested = True
for name, item in collection.get_multi(get_names()):
if not item:
uri = pathutils.unstrip_path(
posixpath.join(collection.path, name))
response = xml_item_response(base_prefix, uri,
found_item=False)
multistatus.append(response)
else:
yield item, False
if collection_requested:
yield from collection.get_filtered(filters)
# Retrieve everything required for finishing the request.
retrieved_items = list(retrieve_items(collection, hreferences,
multistatus))
collection_tag = collection.get_meta("tag")
# Don't access storage after this!
unlock_storage_fn()
def match(item, filter_):
tag = collection_tag
if (tag == "VCALENDAR" and
filter_.tag != xmlutils.make_clark("C:%s" % filter_)):
if len(filter_) == 0:
return True
if len(filter_) > 1:
raise ValueError("Filter with %d children" % len(filter_))
if filter_[0].tag != xmlutils.make_clark("C:comp-filter"):
raise ValueError("Unexpected %r in filter" % filter_[0].tag)
return radicale_filter.comp_match(item, filter_[0])
if (tag == "VADDRESSBOOK" and
filter_.tag != xmlutils.make_clark("CR:%s" % filter_)):
for child in filter_:
if child.tag != xmlutils.make_clark("CR:prop-filter"):
raise ValueError("Unexpected %r in filter" % child.tag)
test = filter_.get("test", "anyof")
if test == "anyof":
return any(
radicale_filter.prop_match(item.vobject_item, f, "CR")
for f in filter_)
if test == "allof":
return all(
radicale_filter.prop_match(item.vobject_item, f, "CR")
for f in filter_)
raise ValueError("Unsupported filter test: %r" % test)
raise ValueError("Unsupported filter %r for %r" % (filter_.tag, tag))
while retrieved_items:
# ``item.vobject_item`` might be accessed during filtering.
# Don't keep reference to ``item``, because VObject requires a lot of
# memory.
item, filters_matched = retrieved_items.pop(0)
if filters and not filters_matched:
try:
if not all(match(item, filter_) for filter_ in filters):
continue
except ValueError as e:
raise ValueError("Failed to filter item %r from %r: %s" %
(item.href, collection.path, e)) from e
except Exception as e:
raise RuntimeError("Failed to filter item %r from %r: %s" %
(item.href, collection.path, e)) from e
found_props = []
not_found_props = []
for tag in props:
element = ET.Element(tag)
if tag == xmlutils.make_clark("D:getetag"):
element.text = item.etag
found_props.append(element)
elif tag == xmlutils.make_clark("D:getcontenttype"):
element.text = xmlutils.get_content_type(item, encoding)
found_props.append(element)
elif tag in (
xmlutils.make_clark("C:calendar-data"),
xmlutils.make_clark("CR:address-data")):
element.text = item.serialize()
found_props.append(element)
else:
not_found_props.append(element)
uri = pathutils.unstrip_path(
posixpath.join(collection.path, item.href))
multistatus.append(xml_item_response(
base_prefix, uri, found_props=found_props,
not_found_props=not_found_props, found_item=True))
return client.MULTI_STATUS, multistatus
def xml_item_response(base_prefix, href, found_props=(), not_found_props=(),
found_item=True):
response = ET.Element(xmlutils.make_clark("D:response"))
href_element = ET.Element(xmlutils.make_clark("D:href"))
href_element.text = xmlutils.make_href(base_prefix, href)
response.append(href_element)
if found_item:
for code, props in ((200, found_props), (404, not_found_props)):
if props:
propstat = ET.Element(xmlutils.make_clark("D:propstat"))
status = ET.Element(xmlutils.make_clark("D:status"))
status.text = xmlutils.make_response(code)
prop_element = ET.Element(xmlutils.make_clark("D:prop"))
for prop in props:
prop_element.append(prop)
propstat.append(prop_element)
propstat.append(status)
response.append(propstat)
else:
status = ET.Element(xmlutils.make_clark("D:status"))
status.text = xmlutils.make_response(404)
response.append(status)
return response
class ApplicationReportMixin:
def do_REPORT(self, environ, base_prefix, path, user):
"""Manage REPORT request."""
access = app.Access(self._rights, user, path)
if not access.check("r"):
return httputils.NOT_ALLOWED
try:
xml_content = self._read_xml_request_body(environ)
except RuntimeError as e:
logger.warning(
"Bad REPORT request on %r: %s", path, e, exc_info=True)
return httputils.BAD_REQUEST
except socket.timeout:
logger.debug("Client timed out", exc_info=True)
return httputils.REQUEST_TIMEOUT
with contextlib.ExitStack() as lock_stack:
lock_stack.enter_context(self._storage.acquire_lock("r", user))
item = next(self._storage.discover(path), None)
if not item:
return httputils.NOT_FOUND
if not access.check("r", item):
return httputils.NOT_ALLOWED
if isinstance(item, storage.BaseCollection):
collection = item
else:
collection = item.collection
headers = {"Content-Type": "text/xml; charset=%s" % self._encoding}
try:
status, xml_answer = xml_report(
base_prefix, path, xml_content, collection, self._encoding,
lock_stack.close)
except ValueError as e:
logger.warning(
"Bad REPORT request on %r: %s", path, e, exc_info=True)
return httputils.BAD_REQUEST
return status, headers, self._xml_response(xml_answer)
|
import contextlib
from homeassistant.components.cloud import ALEXA_SCHEMA, alexa_config
from homeassistant.helpers.entity_registry import EVENT_ENTITY_REGISTRY_UPDATED
from homeassistant.util.dt import utcnow
from tests.async_mock import AsyncMock, Mock, patch
from tests.common import async_fire_time_changed
async def test_alexa_config_expose_entity_prefs(hass, cloud_prefs):
"""Test Alexa config should expose using prefs."""
entity_conf = {"should_expose": False}
await cloud_prefs.async_update(
alexa_entity_configs={"light.kitchen": entity_conf},
alexa_default_expose=["light"],
)
conf = alexa_config.AlexaConfig(hass, ALEXA_SCHEMA({}), cloud_prefs, None)
assert not conf.should_expose("light.kitchen")
entity_conf["should_expose"] = True
assert conf.should_expose("light.kitchen")
entity_conf["should_expose"] = None
assert conf.should_expose("light.kitchen")
await cloud_prefs.async_update(
alexa_default_expose=["sensor"],
)
assert not conf.should_expose("light.kitchen")
async def test_alexa_config_report_state(hass, cloud_prefs):
"""Test Alexa config should expose using prefs."""
conf = alexa_config.AlexaConfig(hass, ALEXA_SCHEMA({}), cloud_prefs, None)
assert cloud_prefs.alexa_report_state is False
assert conf.should_report_state is False
assert conf.is_reporting_states is False
with patch.object(conf, "async_get_access_token", AsyncMock(return_value="hello")):
await cloud_prefs.async_update(alexa_report_state=True)
await hass.async_block_till_done()
assert cloud_prefs.alexa_report_state is True
assert conf.should_report_state is True
assert conf.is_reporting_states is True
await cloud_prefs.async_update(alexa_report_state=False)
await hass.async_block_till_done()
assert cloud_prefs.alexa_report_state is False
assert conf.should_report_state is False
assert conf.is_reporting_states is False
async def test_alexa_config_invalidate_token(hass, cloud_prefs, aioclient_mock):
"""Test Alexa config should expose using prefs."""
aioclient_mock.post(
"http://example/alexa_token",
json={
"access_token": "mock-token",
"event_endpoint": "http://example.com/alexa_endpoint",
"expires_in": 30,
},
)
conf = alexa_config.AlexaConfig(
hass,
ALEXA_SCHEMA({}),
cloud_prefs,
Mock(
alexa_access_token_url="http://example/alexa_token",
auth=Mock(async_check_token=AsyncMock()),
websession=hass.helpers.aiohttp_client.async_get_clientsession(),
),
)
token = await conf.async_get_access_token()
assert token == "mock-token"
assert len(aioclient_mock.mock_calls) == 1
token = await conf.async_get_access_token()
assert token == "mock-token"
assert len(aioclient_mock.mock_calls) == 1
assert conf._token_valid is not None
conf.async_invalidate_access_token()
assert conf._token_valid is None
token = await conf.async_get_access_token()
assert token == "mock-token"
assert len(aioclient_mock.mock_calls) == 2
@contextlib.contextmanager
def patch_sync_helper():
"""Patch sync helper.
In Py3.7 this would have been an async context manager.
"""
to_update = []
to_remove = []
def sync_helper(to_upd, to_rem):
to_update.extend([ent_id for ent_id in to_upd if ent_id not in to_update])
to_remove.extend([ent_id for ent_id in to_rem if ent_id not in to_remove])
return True
with patch("homeassistant.components.cloud.alexa_config.SYNC_DELAY", 0), patch(
"homeassistant.components.cloud.alexa_config.AlexaConfig._sync_helper",
side_effect=sync_helper,
):
yield to_update, to_remove
async def test_alexa_update_expose_trigger_sync(hass, cloud_prefs):
"""Test Alexa config responds to updating exposed entities."""
alexa_config.AlexaConfig(hass, ALEXA_SCHEMA({}), cloud_prefs, None)
with patch_sync_helper() as (to_update, to_remove):
await cloud_prefs.async_update_alexa_entity_config(
entity_id="light.kitchen", should_expose=True
)
await hass.async_block_till_done()
async_fire_time_changed(hass, utcnow())
await hass.async_block_till_done()
assert to_update == ["light.kitchen"]
assert to_remove == []
with patch_sync_helper() as (to_update, to_remove):
await cloud_prefs.async_update_alexa_entity_config(
entity_id="light.kitchen", should_expose=False
)
await cloud_prefs.async_update_alexa_entity_config(
entity_id="binary_sensor.door", should_expose=True
)
await cloud_prefs.async_update_alexa_entity_config(
entity_id="sensor.temp", should_expose=True
)
await hass.async_block_till_done()
async_fire_time_changed(hass, utcnow())
await hass.async_block_till_done()
assert sorted(to_update) == ["binary_sensor.door", "sensor.temp"]
assert to_remove == ["light.kitchen"]
async def test_alexa_entity_registry_sync(hass, mock_cloud_login, cloud_prefs):
"""Test Alexa config responds to entity registry."""
alexa_config.AlexaConfig(hass, ALEXA_SCHEMA({}), cloud_prefs, hass.data["cloud"])
with patch_sync_helper() as (to_update, to_remove):
hass.bus.async_fire(
EVENT_ENTITY_REGISTRY_UPDATED,
{"action": "create", "entity_id": "light.kitchen"},
)
await hass.async_block_till_done()
assert to_update == ["light.kitchen"]
assert to_remove == []
with patch_sync_helper() as (to_update, to_remove):
hass.bus.async_fire(
EVENT_ENTITY_REGISTRY_UPDATED,
{"action": "remove", "entity_id": "light.kitchen"},
)
await hass.async_block_till_done()
assert to_update == []
assert to_remove == ["light.kitchen"]
with patch_sync_helper() as (to_update, to_remove):
hass.bus.async_fire(
EVENT_ENTITY_REGISTRY_UPDATED,
{
"action": "update",
"entity_id": "light.kitchen",
"changes": ["entity_id"],
"old_entity_id": "light.living_room",
},
)
await hass.async_block_till_done()
assert to_update == ["light.kitchen"]
assert to_remove == ["light.living_room"]
with patch_sync_helper() as (to_update, to_remove):
hass.bus.async_fire(
EVENT_ENTITY_REGISTRY_UPDATED,
{"action": "update", "entity_id": "light.kitchen", "changes": ["icon"]},
)
await hass.async_block_till_done()
assert to_update == []
assert to_remove == []
async def test_alexa_update_report_state(hass, cloud_prefs):
"""Test Alexa config responds to reporting state."""
alexa_config.AlexaConfig(hass, ALEXA_SCHEMA({}), cloud_prefs, None)
with patch(
"homeassistant.components.cloud.alexa_config.AlexaConfig.async_sync_entities",
) as mock_sync, patch(
"homeassistant.components.cloud.alexa_config.AlexaConfig.async_enable_proactive_mode",
):
await cloud_prefs.async_update(alexa_report_state=True)
await hass.async_block_till_done()
assert len(mock_sync.mock_calls) == 1
|
import numpy as np
import unittest
from chainer.dataset import DatasetMixin
from chainercv.utils import assert_is_semantic_segmentation_dataset
from chainercv.utils import testing
class SemanticSegmentationDataset(DatasetMixin):
def __init__(self, *options):
self.options = options
def __len__(self):
return 10
def get_example(self, i):
img = np.random.randint(0, 256, size=(3, 48, 64))
label = np.random.randint(-1, 21, size=(48, 64)).astype(np.int32)
return (img, label) + self.options
class InvalidSampleSizeDataset(SemanticSegmentationDataset):
def get_example(self, i):
img, label = super(
InvalidSampleSizeDataset, self).get_example(i)[:2]
return img
class InvalidImageDataset(SemanticSegmentationDataset):
def get_example(self, i):
img, label = super(InvalidImageDataset, self).get_example(i)[:2]
return img[0], label
class InvalidLabelDataset(SemanticSegmentationDataset):
def get_example(self, i):
img, label = super(InvalidLabelDataset, self).get_example(i)[:2]
label += 1000
return img, label
@testing.parameterize(
{'dataset': SemanticSegmentationDataset(), 'valid': True},
{'dataset': SemanticSegmentationDataset('option'), 'valid': True},
{'dataset': InvalidSampleSizeDataset(), 'valid': False},
{'dataset': InvalidImageDataset(), 'valid': False},
{'dataset': InvalidLabelDataset(), 'valid': False},
)
class TestAssertIsSemanticSegmentationDataset(unittest.TestCase):
def test_assert_is_semantic_segmentation_dataset(self):
if self.valid:
assert_is_semantic_segmentation_dataset(self.dataset, 21)
else:
with self.assertRaises(AssertionError):
assert_is_semantic_segmentation_dataset(self.dataset, 21)
testing.run_module(__name__, __file__)
|
import numpy as np
import pandas as pd
from ..core.common import _contains_datetime_like_objects
from .cftime_offsets import _MONTH_ABBREVIATIONS
from .cftimeindex import CFTimeIndex
_ONE_MICRO = 1
_ONE_MILLI = _ONE_MICRO * 1000
_ONE_SECOND = _ONE_MILLI * 1000
_ONE_MINUTE = 60 * _ONE_SECOND
_ONE_HOUR = 60 * _ONE_MINUTE
_ONE_DAY = 24 * _ONE_HOUR
def infer_freq(index):
"""
Infer the most likely frequency given the input index.
Parameters
----------
index : CFTimeIndex, DataArray, DatetimeIndex, TimedeltaIndex, Series
If not passed a CFTimeIndex, this simply calls `pandas.infer_freq`.
If passed a Series or a DataArray will use the values of the series (NOT THE INDEX).
Returns
-------
str or None
None if no discernible frequency.
Raises
------
TypeError
If the index is not datetime-like.
ValueError
If there are fewer than three values or the index is not 1D.
"""
from xarray.core.dataarray import DataArray
if isinstance(index, (DataArray, pd.Series)):
if index.ndim != 1:
raise ValueError("'index' must be 1D")
elif not _contains_datetime_like_objects(DataArray(index)):
raise ValueError("'index' must contain datetime-like objects")
dtype = np.asarray(index).dtype
if dtype == "datetime64[ns]":
index = pd.DatetimeIndex(index.values)
elif dtype == "timedelta64[ns]":
index = pd.TimedeltaIndex(index.values)
else:
index = CFTimeIndex(index.values)
if isinstance(index, CFTimeIndex):
inferer = _CFTimeFrequencyInferer(index)
return inferer.get_freq()
return pd.infer_freq(index)
class _CFTimeFrequencyInferer: # (pd.tseries.frequencies._FrequencyInferer):
def __init__(self, index):
self.index = index
self.values = index.asi8
if len(index) < 3:
raise ValueError("Need at least 3 dates to infer frequency")
self.is_monotonic = (
self.index.is_monotonic_decreasing or self.index.is_monotonic_increasing
)
self._deltas = None
self._year_deltas = None
self._month_deltas = None
def get_freq(self):
"""Find the appropriate frequency string to describe the inferred frequency of self.index
Adapted from `pandas.tsseries.frequencies._FrequencyInferer.get_freq` for CFTimeIndexes.
Returns
-------
str or None
"""
if not self.is_monotonic or not self.index.is_unique:
return None
delta = self.deltas[0] # Smallest delta
if _is_multiple(delta, _ONE_DAY):
return self._infer_daily_rule()
# There is no possible intraday frequency with a non-unique delta
# Different from pandas: we don't need to manage DST and business offsets in cftime
elif not len(self.deltas) == 1:
return None
if _is_multiple(delta, _ONE_HOUR):
return _maybe_add_count("H", delta / _ONE_HOUR)
elif _is_multiple(delta, _ONE_MINUTE):
return _maybe_add_count("T", delta / _ONE_MINUTE)
elif _is_multiple(delta, _ONE_SECOND):
return _maybe_add_count("S", delta / _ONE_SECOND)
elif _is_multiple(delta, _ONE_MILLI):
return _maybe_add_count("L", delta / _ONE_MILLI)
else:
return _maybe_add_count("U", delta / _ONE_MICRO)
def _infer_daily_rule(self):
annual_rule = self._get_annual_rule()
if annual_rule:
nyears = self.year_deltas[0]
month = _MONTH_ABBREVIATIONS[self.index[0].month]
alias = f"{annual_rule}-{month}"
return _maybe_add_count(alias, nyears)
quartely_rule = self._get_quartely_rule()
if quartely_rule:
nquarters = self.month_deltas[0] / 3
mod_dict = {0: 12, 2: 11, 1: 10}
month = _MONTH_ABBREVIATIONS[mod_dict[self.index[0].month % 3]]
alias = f"{quartely_rule}-{month}"
return _maybe_add_count(alias, nquarters)
monthly_rule = self._get_monthly_rule()
if monthly_rule:
return _maybe_add_count(monthly_rule, self.month_deltas[0])
if len(self.deltas) == 1:
# Daily as there is no "Weekly" offsets with CFTime
days = self.deltas[0] / _ONE_DAY
return _maybe_add_count("D", days)
# CFTime has no business freq and no "week of month" (WOM)
return None
def _get_annual_rule(self):
if len(self.year_deltas) > 1:
return None
if len(np.unique(self.index.month)) > 1:
return None
return {"cs": "AS", "ce": "A"}.get(month_anchor_check(self.index))
def _get_quartely_rule(self):
if len(self.month_deltas) > 1:
return None
if not self.month_deltas[0] % 3 == 0:
return None
return {"cs": "QS", "ce": "Q"}.get(month_anchor_check(self.index))
def _get_monthly_rule(self):
if len(self.month_deltas) > 1:
return None
return {"cs": "MS", "ce": "M"}.get(month_anchor_check(self.index))
@property
def deltas(self):
"""Sorted unique timedeltas as microseconds."""
if self._deltas is None:
self._deltas = _unique_deltas(self.values)
return self._deltas
@property
def year_deltas(self):
"""Sorted unique year deltas."""
if self._year_deltas is None:
self._year_deltas = _unique_deltas(self.index.year)
return self._year_deltas
@property
def month_deltas(self):
"""Sorted unique month deltas."""
if self._month_deltas is None:
self._month_deltas = _unique_deltas(self.index.year * 12 + self.index.month)
return self._month_deltas
def _unique_deltas(arr):
"""Sorted unique deltas of numpy array"""
return np.sort(np.unique(np.diff(arr)))
def _is_multiple(us, mult: int):
"""Whether us is a multiple of mult"""
return us % mult == 0
def _maybe_add_count(base: str, count: float):
"""If count is greater than 1, add it to the base offset string"""
if count != 1:
assert count == int(count)
count = int(count)
return f"{count}{base}"
else:
return base
def month_anchor_check(dates):
"""Return the monthly offset string.
Return "cs" if all dates are the first days of the month,
"ce" if all dates are the last day of the month,
None otherwise.
Replicated pandas._libs.tslibs.resolution.month_position_check
but without business offset handling.
"""
calendar_end = True
calendar_start = True
for date in dates:
if calendar_start:
calendar_start &= date.day == 1
if calendar_end:
cal = date.day == date.daysinmonth
if calendar_end:
calendar_end &= cal
elif not calendar_start:
break
if calendar_end:
return "ce"
elif calendar_start:
return "cs"
else:
return None
|
from datetime import timedelta
import voluptuous as vol
from homeassistant.const import (
CONF_EVENT,
CONF_OFFSET,
CONF_PLATFORM,
SUN_EVENT_SUNRISE,
)
from homeassistant.core import HassJob, callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import async_track_sunrise, async_track_sunset
# mypy: allow-untyped-defs, no-check-untyped-defs
TRIGGER_SCHEMA = vol.Schema(
{
vol.Required(CONF_PLATFORM): "sun",
vol.Required(CONF_EVENT): cv.sun_event,
vol.Required(CONF_OFFSET, default=timedelta(0)): cv.time_period,
}
)
async def async_attach_trigger(hass, config, action, automation_info):
"""Listen for events based on configuration."""
event = config.get(CONF_EVENT)
offset = config.get(CONF_OFFSET)
description = event
if offset:
description = f"{description} with offset"
job = HassJob(action)
@callback
def call_action():
"""Call action with right context."""
hass.async_run_hass_job(
job,
{
"trigger": {
"platform": "sun",
"event": event,
"offset": offset,
"description": description,
}
},
)
if event == SUN_EVENT_SUNRISE:
return async_track_sunrise(hass, call_action, offset)
return async_track_sunset(hass, call_action, offset)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
from absl import logging
from compare_gan.architectures import abstract_arch
from compare_gan.architectures import arch_ops as ops
from compare_gan.architectures import resnet_ops
import gin
from six.moves import range
import tensorflow as tf
@gin.configurable
class BigGanDeepResNetBlock(object):
"""ResNet block with bottleneck and identity preserving skip connections."""
def __init__(self,
name,
in_channels,
out_channels,
scale,
spectral_norm=False,
batch_norm=None):
"""Constructs a new ResNet block with bottleneck.
Args:
name: Scope name for the resent block.
in_channels: Integer, the input channel size.
out_channels: Integer, the output channel size.
scale: Whether or not to scale up or down, choose from "up", "down" or
"none".
spectral_norm: Use spectral normalization for all weights.
batch_norm: Function for batch normalization.
"""
assert scale in ["up", "down", "none"]
self._name = name
self._in_channels = in_channels
self._out_channels = out_channels
self._scale = scale
self._spectral_norm = spectral_norm
self.batch_norm = batch_norm
def __call__(self, inputs, z, y, is_training):
return self.apply(inputs=inputs, z=z, y=y, is_training=is_training)
def _shortcut(self, inputs):
"""Constructs a skip connection from inputs."""
with tf.variable_scope("shortcut", values=[inputs]):
shortcut = inputs
num_channels = inputs.shape[-1].value
if num_channels > self._out_channels:
assert self._scale == "up"
# Drop redundant channels.
logging.info("[Shortcut] Dropping %d channels in shortcut.",
num_channels - self._out_channels)
shortcut = shortcut[:, :, :, :self._out_channels]
if self._scale == "up":
shortcut = resnet_ops.unpool(shortcut)
if self._scale == "down":
shortcut = tf.nn.pool(shortcut, [2, 2], "AVG", "SAME",
strides=[2, 2], name="pool")
if num_channels < self._out_channels:
assert self._scale == "down"
# Increase number of channels if necessary.
num_missing = self._out_channels - num_channels
logging.info("[Shortcut] Adding %d channels in shortcut.", num_missing)
added = ops.conv1x1(shortcut, num_missing, name="add_channels",
use_sn=self._spectral_norm)
shortcut = tf.concat([shortcut, added], axis=-1)
return shortcut
def apply(self, inputs, z, y, is_training):
""""ResNet block containing possible down/up sampling, shared for G / D.
Args:
inputs: a 3d input tensor of feature map.
z: the latent vector for potential self-modulation. Can be None if use_sbn
is set to False.
y: `Tensor` of shape [batch_size, num_classes] with one hot encoded
labels.
is_training: boolean, whether or notthis is called during the training.
Returns:
output: a 3d output tensor of feature map.
"""
if inputs.shape[-1].value != self._in_channels:
raise ValueError(
"Unexpected number of input channels (expected {}, got {}).".format(
self._in_channels, inputs.shape[-1].value))
bottleneck_channels = max(self._in_channels, self._out_channels) // 4
bn = functools.partial(self.batch_norm, z=z, y=y, is_training=is_training)
conv1x1 = functools.partial(ops.conv1x1, use_sn=self._spectral_norm)
conv3x3 = functools.partial(ops.conv2d, k_h=3, k_w=3, d_h=1, d_w=1,
use_sn=self._spectral_norm)
with tf.variable_scope(self._name, values=[inputs]):
outputs = inputs
with tf.variable_scope("conv1", values=[outputs]):
outputs = bn(outputs, name="bn")
outputs = tf.nn.relu(outputs)
outputs = conv1x1(outputs, bottleneck_channels, name="1x1_conv")
with tf.variable_scope("conv2", values=[outputs]):
outputs = bn(outputs, name="bn")
outputs = tf.nn.relu(outputs)
if self._scale == "up":
outputs = resnet_ops.unpool(outputs)
outputs = conv3x3(outputs, bottleneck_channels, name="3x3_conv")
with tf.variable_scope("conv3", values=[outputs]):
outputs = bn(outputs, name="bn")
outputs = tf.nn.relu(outputs)
outputs = conv3x3(outputs, bottleneck_channels, name="3x3_conv")
with tf.variable_scope("conv4", values=[outputs]):
outputs = bn(outputs, name="bn")
outputs = tf.nn.relu(outputs)
if self._scale == "down":
outputs = tf.nn.pool(outputs, [2, 2], "AVG", "SAME", strides=[2, 2],
name="avg_pool")
outputs = conv1x1(outputs, self._out_channels, name="1x1_conv")
# Add skip-connection.
outputs += self._shortcut(inputs)
logging.info("[Block] %s (z=%s, y=%s) -> %s", inputs.shape,
None if z is None else z.shape,
None if y is None else y.shape, outputs.shape)
return outputs
@gin.configurable
class Generator(abstract_arch.AbstractGenerator):
"""ResNet-based generator supporting resolutions 32, 64, 128, 256, 512."""
def __init__(self,
ch=128,
embed_y=True,
embed_y_dim=128,
experimental_fast_conv_to_rgb=False,
**kwargs):
"""Constructor for BigGAN generator.
Args:
ch: Channel multiplier.
embed_y: If True use a learnable embedding of y that is used instead.
embed_y_dim: Size of the embedding of y.
experimental_fast_conv_to_rgb: If True optimize the last convolution to
sacrifize memory for better speed.
**kwargs: additional arguments past on to ResNetGenerator.
"""
super(Generator, self).__init__(**kwargs)
self._ch = ch
self._embed_y = embed_y
self._embed_y_dim = embed_y_dim
self._experimental_fast_conv_to_rgb = experimental_fast_conv_to_rgb
def _resnet_block(self, name, in_channels, out_channels, scale):
"""ResNet block for the generator."""
if scale not in ["up", "none"]:
raise ValueError(
"Unknown generator ResNet block scaling: {}.".format(scale))
return BigGanDeepResNetBlock(
name=name,
in_channels=in_channels,
out_channels=out_channels,
scale=scale,
spectral_norm=self._spectral_norm,
batch_norm=self.batch_norm)
def _get_in_out_channels(self):
# See Table 7-9.
resolution = self._image_shape[0]
if resolution == 512:
channel_multipliers = 4 * [16] + 4 * [8] + [4, 4, 2, 2, 1, 1, 1]
elif resolution == 256:
channel_multipliers = 4 * [16] + 4 * [8] + [4, 4, 2, 2, 1]
elif resolution == 128:
channel_multipliers = 4 * [16] + 2 * [8] + [4, 4, 2, 2, 1]
elif resolution == 64:
channel_multipliers = 4 * [16] + 2 * [8] + [4, 4, 2]
elif resolution == 32:
channel_multipliers = 8 * [4]
else:
raise ValueError("Unsupported resolution: {}".format(resolution))
in_channels = [self._ch * c for c in channel_multipliers[:-1]]
out_channels = [self._ch * c for c in channel_multipliers[1:]]
return in_channels, out_channels
def apply(self, z, y, is_training):
"""Build the generator network for the given inputs.
Args:
z: `Tensor` of shape [batch_size, z_dim] with latent code.
y: `Tensor` of shape [batch_size, num_classes] with one hot encoded
labels.
is_training: boolean, are we in train or eval model.
Returns:
A tensor of size [batch_size] + self._image_shape with values in [0, 1].
"""
shape_or_none = lambda t: None if t is None else t.shape
logging.info("[Generator] inputs are z=%s, y=%s", z.shape, shape_or_none(y))
seed_size = 4
if self._embed_y:
y = ops.linear(y, self._embed_y_dim, scope="embed_y", use_sn=False,
use_bias=False)
if y is not None:
y = tf.concat([z, y], axis=1)
z = y
in_channels, out_channels = self._get_in_out_channels()
num_blocks = len(in_channels)
# Map noise to the actual seed.
net = ops.linear(
z,
in_channels[0] * seed_size * seed_size,
scope="fc_noise",
use_sn=self._spectral_norm)
# Reshape the seed to be a rank-4 Tensor.
net = tf.reshape(
net,
[-1, seed_size, seed_size, in_channels[0]],
name="fc_reshaped")
for block_idx in range(num_blocks):
scale = "none" if block_idx % 2 == 0 else "up"
block = self._resnet_block(
name="B{}".format(block_idx + 1),
in_channels=in_channels[block_idx],
out_channels=out_channels[block_idx],
scale=scale)
net = block(net, z=z, y=y, is_training=is_training)
# At resolution 64x64 there is a self-attention block.
if scale == "up" and net.shape[1].value == 64:
logging.info("[Generator] Applying non-local block to %s", net.shape)
net = ops.non_local_block(net, "non_local_block",
use_sn=self._spectral_norm)
# Final processing of the net.
# Use unconditional batch norm.
logging.info("[Generator] before final processing: %s", net.shape)
net = ops.batch_norm(net, is_training=is_training, name="final_norm")
net = tf.nn.relu(net)
colors = self._image_shape[2]
if self._experimental_fast_conv_to_rgb:
net = ops.conv2d(net, output_dim=128, k_h=3, k_w=3,
d_h=1, d_w=1, name="final_conv",
use_sn=self._spectral_norm)
net = net[:, :, :, :colors]
else:
net = ops.conv2d(net, output_dim=colors, k_h=3, k_w=3,
d_h=1, d_w=1, name="final_conv",
use_sn=self._spectral_norm)
logging.info("[Generator] after final processing: %s", net.shape)
net = (tf.nn.tanh(net) + 1.0) / 2.0
return net
@gin.configurable
class Discriminator(abstract_arch.AbstractDiscriminator):
"""ResNet-based discriminator supporting resolutions 32, 64, 128, 256, 512."""
def __init__(self,
ch=128,
blocks_with_attention="B1",
project_y=True,
**kwargs):
"""Constructor for BigGAN discriminator.
Args:
ch: Channel multiplier.
blocks_with_attention: Comma-separated list of blocks that are followed by
a non-local block.
project_y: Add an embedding of y in the output layer.
**kwargs: additional arguments past on to ResNetDiscriminator.
"""
super(Discriminator, self).__init__(**kwargs)
self._ch = ch
self._blocks_with_attention = set(blocks_with_attention.split(","))
self._project_y = project_y
def _resnet_block(self, name, in_channels, out_channels, scale):
"""ResNet block for the generator."""
if scale not in ["down", "none"]:
raise ValueError(
"Unknown discriminator ResNet block scaling: {}.".format(scale))
return BigGanDeepResNetBlock(
name=name,
in_channels=in_channels,
out_channels=out_channels,
scale=scale,
spectral_norm=self._spectral_norm,
batch_norm=self.batch_norm)
def _get_in_out_channels(self, colors, resolution):
# See Table 7-9.
if colors not in [1, 3]:
raise ValueError("Unsupported color channels: {}".format(colors))
if resolution == 512:
channel_multipliers = [1, 1, 1, 2, 2, 4, 4] + 4 * [8] + 4 * [16]
elif resolution == 256:
channel_multipliers = [1, 2, 2, 4, 4] + 4 * [8] + 4 * [16]
elif resolution == 128:
channel_multipliers = [1, 2, 2, 4, 4] + 2 * [8] + 4 * [16]
elif resolution == 64:
channel_multipliers = [2, 4, 4] + 2 * [8] + 4 * [16]
elif resolution == 32:
channel_multipliers = 8 * [2]
else:
raise ValueError("Unsupported resolution: {}".format(resolution))
in_channels = [self._ch * c for c in channel_multipliers[:-1]]
out_channels = [self._ch * c for c in channel_multipliers[1:]]
return in_channels, out_channels
def apply(self, x, y, is_training):
"""Apply the discriminator on a input.
Args:
x: `Tensor` of shape [batch_size, ?, ?, ?] with real or fake images.
y: `Tensor` of shape [batch_size, num_classes] with one hot encoded
labels.
is_training: Boolean, whether the architecture should be constructed for
training or inference.
Returns:
Tuple of 3 Tensors, the final prediction of the discriminator, the logits
before the final output activation function and logits form the second
last layer.
"""
logging.info("[Discriminator] inputs are x=%s, y=%s", x.shape,
None if y is None else y.shape)
resnet_ops.validate_image_inputs(x)
in_channels, out_channels = self._get_in_out_channels(
colors=x.shape[-1].value, resolution=x.shape[1].value)
num_blocks = len(in_channels)
net = ops.conv2d(x, output_dim=in_channels[0], k_h=3, k_w=3,
d_h=1, d_w=1, name="initial_conv",
use_sn=self._spectral_norm)
for block_idx in range(num_blocks):
scale = "down" if block_idx % 2 == 0 else "none"
block = self._resnet_block(
name="B{}".format(block_idx + 1),
in_channels=in_channels[block_idx],
out_channels=out_channels[block_idx],
scale=scale)
net = block(net, z=None, y=y, is_training=is_training)
# At resolution 64x64 there is a self-attention block.
if scale == "none" and net.shape[1].value == 64:
logging.info("[Discriminator] Applying non-local block to %s",
net.shape)
net = ops.non_local_block(net, "non_local_block",
use_sn=self._spectral_norm)
# Final part
logging.info("[Discriminator] before final processing: %s", net.shape)
net = tf.nn.relu(net)
h = tf.math.reduce_sum(net, axis=[1, 2])
out_logit = ops.linear(h, 1, scope="final_fc", use_sn=self._spectral_norm)
logging.info("[Discriminator] after final processing: %s", net.shape)
if self._project_y:
if y is None:
raise ValueError("You must provide class information y to project.")
with tf.variable_scope("embedding_fc"):
y_embedding_dim = out_channels[-1]
# We do not use ops.linear() below since it does not have an option to
# override the initializer.
kernel = tf.get_variable(
"kernel", [y.shape[1], y_embedding_dim], tf.float32,
initializer=tf.initializers.glorot_normal())
if self._spectral_norm:
kernel = ops.spectral_norm(kernel)
embedded_y = tf.matmul(y, kernel)
logging.info("[Discriminator] embedded_y for projection: %s",
embedded_y.shape)
out_logit += tf.reduce_sum(embedded_y * h, axis=1, keepdims=True)
out = tf.nn.sigmoid(out_logit)
return out, out_logit, h
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import flags
from absl.testing import flagsaver
from absl.testing import parameterized
from compare_gan import datasets
import tensorflow as tf
FLAGS = flags.FLAGS
_TPU_SUPPORTED_TYPES = {
tf.float32, tf.int32, tf.complex64, tf.int64, tf.bool, tf.bfloat16
}
def _preprocess_fn_id(images, labels):
return {"images": images}, labels
def _preprocess_fn_add_noise(images, labels, seed=None):
del labels
tf.set_random_seed(seed)
noise = tf.random.uniform([128], maxval=1.0)
return {"images": images}, noise
class DatasetsTest(parameterized.TestCase, tf.test.TestCase):
def setUp(self):
super(DatasetsTest, self).setUp()
FLAGS.data_shuffle_buffer_size = 100
def get_element_and_verify_shape(self, dataset_name, expected_shape):
dataset = datasets.get_dataset(dataset_name)
dataset = dataset.eval_input_fn()
image, label = dataset.make_one_shot_iterator().get_next()
# Check if shape is known at compile time, required for TPUs.
self.assertAllEqual(image.shape.as_list(), expected_shape)
self.assertEqual(image.dtype, tf.float32)
self.assertIn(label.dtype, _TPU_SUPPORTED_TYPES)
with self.cached_session() as session:
image = session.run(image)
self.assertEqual(image.shape, expected_shape)
self.assertGreaterEqual(image.min(), 0.0)
self.assertLessEqual(image.max(), 1.0)
def test_mnist(self):
self.get_element_and_verify_shape("mnist", (28, 28, 1))
def test_fashion_mnist(self):
self.get_element_and_verify_shape("fashion-mnist", (28, 28, 1))
def test_celeba(self):
self.get_element_and_verify_shape("celeb_a", (64, 64, 3))
def test_lsun(self):
self.get_element_and_verify_shape("lsun-bedroom", (128, 128, 3))
def _run_train_input_fn(self, dataset_name, preprocess_fn):
dataset = datasets.get_dataset(dataset_name)
with tf.Graph().as_default():
dataset = dataset.input_fn(params={"batch_size": 1},
preprocess_fn=preprocess_fn)
iterator = dataset.make_initializable_iterator()
with self.session() as sess:
sess.run(iterator.initializer)
next_batch = iterator.get_next()
return [sess.run(next_batch) for _ in range(5)]
@parameterized.named_parameters(
("FakeCifar", _preprocess_fn_id),
("FakeCifarWithRandomNoise", _preprocess_fn_add_noise),
)
@flagsaver.flagsaver
def test_train_input_fn_is_determinsitic(self, preprocess_fn):
FLAGS.data_fake_dataset = True
batches1 = self._run_train_input_fn("cifar10", preprocess_fn)
batches2 = self._run_train_input_fn("cifar10", preprocess_fn)
for i in range(len(batches1)):
# Check that both runs got the same images/noise
self.assertAllClose(batches1[i][0], batches2[i][0])
self.assertAllClose(batches1[i][1], batches2[i][1])
@flagsaver.flagsaver
def test_train_input_fn_noise_changes(self):
FLAGS.data_fake_dataset = True
batches = self._run_train_input_fn("cifar10", _preprocess_fn_add_noise)
for i in range(1, len(batches)):
self.assertNotAllClose(batches[0][1], batches[i][1])
self.assertNotAllClose(batches[i - 1][1], batches[i][1])
if __name__ == "__main__":
tf.test.main()
|
from typing import List
import voluptuous as vol
from homeassistant.components.automation import AutomationActionType
from homeassistant.components.device_automation import TRIGGER_BASE_SCHEMA
from homeassistant.components.homeassistant.triggers import state
from homeassistant.const import (
CONF_DEVICE_ID,
CONF_DOMAIN,
CONF_ENTITY_ID,
CONF_PLATFORM,
CONF_TYPE,
STATE_OFF,
STATE_ON,
)
from homeassistant.core import CALLBACK_TYPE, HomeAssistant
from homeassistant.helpers import config_validation as cv, entity_registry
from homeassistant.helpers.typing import ConfigType
from . import DOMAIN
# TODO specify your supported trigger types.
TRIGGER_TYPES = {"turned_on", "turned_off"}
TRIGGER_SCHEMA = TRIGGER_BASE_SCHEMA.extend(
{
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Required(CONF_TYPE): vol.In(TRIGGER_TYPES),
}
)
async def async_get_triggers(hass: HomeAssistant, device_id: str) -> List[dict]:
"""List device triggers for NEW_NAME devices."""
registry = await entity_registry.async_get_registry(hass)
triggers = []
# TODO Read this comment and remove it.
# This example shows how to iterate over the entities of this device
# that match this integration. If your triggers instead rely on
# events fired by devices without entities, do something like:
# zha_device = await _async_get_zha_device(hass, device_id)
# return zha_device.device_triggers
# Get all the integrations entities for this device
for entry in entity_registry.async_entries_for_device(registry, device_id):
if entry.domain != DOMAIN:
continue
# Add triggers for each entity that belongs to this integration
# TODO add your own triggers.
triggers.append(
{
CONF_PLATFORM: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "turned_on",
}
)
triggers.append(
{
CONF_PLATFORM: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "turned_off",
}
)
return triggers
async def async_attach_trigger(
hass: HomeAssistant,
config: ConfigType,
action: AutomationActionType,
automation_info: dict,
) -> CALLBACK_TYPE:
"""Attach a trigger."""
config = TRIGGER_SCHEMA(config)
# TODO Implement your own logic to attach triggers.
# Generally we suggest to re-use the existing state or event
# triggers from the automation integration.
if config[CONF_TYPE] == "turned_on":
from_state = STATE_OFF
to_state = STATE_ON
else:
from_state = STATE_ON
to_state = STATE_OFF
state_config = {
state.CONF_PLATFORM: "state",
CONF_ENTITY_ID: config[CONF_ENTITY_ID],
state.CONF_FROM: from_state,
state.CONF_TO: to_state,
}
state_config = state.TRIGGER_SCHEMA(state_config)
return await state.async_attach_trigger(
hass, state_config, action, automation_info, platform_type="device"
)
|
import os
def volume_of(path) :
func = VolumeOf(os.path.ismount, os.path.abspath)
return func(path)
class FakeFstab:
def __init__(self):
self.ismount = FakeIsMount()
self.volume_of = VolumeOf(self.ismount, os.path.normpath)
def mount_points(self):
return self.ismount.mount_points()
def volume_of(self, path):
volume_of = VolumeOf(self.ismount, os.path.abspath)
return volume_of(path)
def add_mount(self, path):
self.ismount.add_mount(path)
from trashcli.list_mount_points import os_mount_points
class OsIsMount:
def __call__(self, path):
return os.path.ismount(path)
def mount_points(self):
return os_mount_points()
class FakeIsMount:
def __init__(self):
self.fakes = set(['/'])
def add_mount(self, path):
self.fakes.add(path)
def __call__(self, path):
if path == '/':
return True
path = os.path.normpath(path)
if path in self.fakes:
return True
return False
class VolumeOf:
def __init__(self, ismount, abspath):
self.ismount = ismount
self.abspath = abspath
def __call__(self, path):
path = self.abspath(path)
while path != os.path.dirname(path):
if self.ismount(path):
break
path = os.path.dirname(path)
return path
|
import threading
import time
from absl import flags
from perfkitbenchmarker import errors
from perfkitbenchmarker import resource
from perfkitbenchmarker import sample
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.configs import option_decoders
from perfkitbenchmarker.configs import spec
FLAGS = flags.FLAGS
flags.DEFINE_string('appservice', None,
'Type of app service. e.g. AppEngine')
flags.DEFINE_string('appservice_region', None,
'Region of deployed app service.')
flags.DEFINE_string('appservice_backend', None,
'Backend instance type of app service uses.')
flags.DEFINE_string('app_runtime', None,
'Runtime environment of app service uses. '
'e.g. python, java')
flags.DEFINE_string('app_type', None,
'Type of app packages builders should built.')
flags.DEFINE_integer('appservice_count', 1,
'Copies of applications to launch.')
def GetAppServiceSpecClass(service):
return spec.GetSpecClass(
BaseAppServiceSpec, SERVICE=service)
class BaseAppServiceSpec(spec.BaseSpec):
"""Storing various data about app service."""
SPEC_TYPE = 'BaseAppServiceSpec'
SPEC_ATTRS = ['SERVICE']
@classmethod
def _ApplyFlags(cls, config_values, flag_values):
super(BaseAppServiceSpec, cls)._ApplyFlags(config_values, flag_values)
if flag_values['appservice_region'] .present:
config_values['appservice_region'] = flag_values.appservice_region
if flag_values['appservice_backend'].present:
config_values['appservice_backend'] = flag_values.appservice_backend
if flag_values['appservice'].present:
config_values['appservice'] = flag_values.appservice
@classmethod
def _GetOptionDecoderConstructions(cls):
result = super(BaseAppServiceSpec, cls)._GetOptionDecoderConstructions()
result.update({
'appservice_region': (option_decoders.StringDecoder, {
'default': None, 'none_ok': True}),
'appservice_backend': (option_decoders.StringDecoder, {
'default': None, 'none_ok': True}),
'appservice': (option_decoders.StringDecoder, {
'default': None, 'none_ok': True})
})
return result
def GetAppServiceClass(service):
return resource.GetResourceClass(
BaseAppService, SERVICE=service)
class BaseAppService(resource.BaseResource):
"""Base class for representing an App instance."""
RESOURCE_TYPE = 'BaseAppService'
REQUIRED_ATTRS = ['SERVICE']
POLL_INTERVAL = 1
_appservice_counter = 0
_appservice_counter_lock = threading.Lock()
def __init__(self, base_app_service_spec):
super(BaseAppService, self).__init__()
with self._appservice_counter_lock:
self.appservice_number = self._appservice_counter
self.name = 'pkb-%s-%s' % (FLAGS.run_uri, self.appservice_number)
BaseAppService._appservice_counter += 1
self.region = base_app_service_spec.appservice_region
self.backend = base_app_service_spec.appservice_backend
self.builder = None
# update metadata
self.metadata.update({'backend': self.backend,
'region': self.region,
'concurrency': 'default'})
self.samples = []
def _UpdateDependencies(self):
"""Update dependencies for AppService."""
self.builder.Mutate()
def _Update(self):
raise NotImplementedError()
def Update(self):
"""Update a deployed app instance."""
@vm_util.Retry(poll_interval=self.POLL_INTERVAL, fuzz=0,
timeout=self.READY_TIMEOUT,
retryable_exceptions=(
errors.Resource.RetryableCreationError,))
def WaitUntilReady():
if not self._IsReady():
raise errors.Resource.RetryableCreationError('Not yet ready')
if self.user_managed:
return
self._UpdateDependencies()
self.update_start_time = time.time()
self._Update()
self.update_end_time = time.time()
WaitUntilReady()
self.update_ready_time = time.time()
self.samples.append(
sample.Sample('update latency',
self.update_end_time - self.update_start_time,
'seconds', {}))
self.samples.append(
sample.Sample('update ready latency',
self.update_ready_time - self.update_start_time,
'seconds', {}))
def Invoke(self, args=None):
"""Invoke a deployed app instance.
Args:
args: dict. Arguments passed to app.
"""
raise NotImplementedError()
def _CreateDependencies(self):
"""Builds app package."""
if self.builder:
self.builder.Create()
def _DeleteDependencies(self):
"""Delete app package."""
if self.builder:
self.builder.Delete()
def SetBuilder(self, builder=None, **kwargs):
"""Set builder for AppService."""
if builder:
self.builder = builder
def GetLifeCycleMetrics(self):
"""Export internal lifecycle metrics."""
if self.builder:
self.metadata.update(self.builder.GetResourceMetadata())
for s in self.samples:
s.metadata.update(self.metadata)
return self.samples
def _PostCreate(self):
"""Method called after _CreateResource."""
if self.builder:
self.metadata.update(self.builder.GetResourceMetadata())
def Create(self):
super(BaseAppService, self).Create()
self.samples.append(
sample.Sample('create latency',
self.create_end_time - self.create_start_time,
'seconds', {}))
self.samples.append(
sample.Sample('create ready latency',
self.resource_ready_time - self.create_start_time,
'seconds', {}))
|
import json
import os
import shutil
from pathlib import Path
from test.common import test_root, tmp_dir
import pytest
import ruamel.yaml as yaml
import toml
from box import Box, BoxError, BoxList
class TestBoxList:
@pytest.fixture(autouse=True)
def temp_dir_cleanup(self):
shutil.rmtree(str(tmp_dir), ignore_errors=True)
try:
os.mkdir(str(tmp_dir))
except OSError:
pass
yield
shutil.rmtree(str(tmp_dir), ignore_errors=True)
def test_box_list(self):
new_list = BoxList({"item": x} for x in range(0, 10))
new_list.extend([{"item": 22}])
assert new_list[-1].item == 22
new_list.append([{"bad_item": 33}])
assert new_list[-1][0].bad_item == 33
assert repr(new_list).startswith("<BoxList:")
for x in new_list.to_list():
assert not isinstance(x, (BoxList, Box))
new_list.insert(0, {"test": 5})
new_list.insert(1, ["a", "b"])
new_list.append("x")
assert new_list[0].test == 5
assert isinstance(str(new_list), str)
assert isinstance(new_list[1], BoxList)
assert not isinstance(new_list.to_list(), BoxList)
def test_frozen_list(self):
bl = BoxList([5, 4, 3], frozen_box=True)
with pytest.raises(BoxError):
bl.pop(1)
with pytest.raises(BoxError):
bl.remove(4)
with pytest.raises(BoxError):
bl.sort()
with pytest.raises(BoxError):
bl.reverse()
with pytest.raises(BoxError):
bl.append("test")
with pytest.raises(BoxError):
bl.extend([4])
with pytest.raises(BoxError):
del bl[0]
with pytest.raises(BoxError):
bl[0] = 5
bl2 = BoxList([5, 4, 3])
del bl2[0]
assert bl2[0] == 4
bl2[1] = 4
assert bl2[1] == 4
def test_box_list_to_json(self):
bl = BoxList([{"item": 1, "CamelBad": 2}])
assert json.loads(bl.to_json())[0]["item"] == 1
def test_box_list_from_json(self):
alist = [{"item": 1}, {"CamelBad": 2}]
json_list = json.dumps(alist)
bl = BoxList.from_json(json_list, camel_killer_box=True)
assert bl[0].item == 1
assert bl[1].camel_bad == 2
with pytest.raises(BoxError):
BoxList.from_json(json.dumps({"a": 2}))
def test_box_list_to_yaml(self):
bl = BoxList([{"item": 1, "CamelBad": 2}])
assert yaml.load(bl.to_yaml(), Loader=yaml.SafeLoader)[0]["item"] == 1
def test_box_list_from_yaml(self):
alist = [{"item": 1}, {"CamelBad": 2}]
yaml_list = yaml.dump(alist)
bl = BoxList.from_yaml(yaml_list, camel_killer_box=True)
assert bl[0].item == 1
assert bl[1].camel_bad == 2
with pytest.raises(BoxError):
BoxList.from_yaml(yaml.dump({"a": 2}))
def test_box_list_to_toml(self):
bl = BoxList([{"item": 1, "CamelBad": 2}])
assert toml.loads(bl.to_toml(key_name="test"))["test"][0]["item"] == 1
with pytest.raises(BoxError):
BoxList.from_toml("[[test]]\nitem = 1\nCamelBad = 2\n\n", key_name="does not exist")
def test_box_list_from_tml(self):
alist = [{"item": 1}, {"CamelBad": 2}]
toml_list = toml.dumps({"key": alist})
bl = BoxList.from_toml(toml_string=toml_list, key_name="key", camel_killer_box=True)
assert bl[0].item == 1
assert bl[1].camel_bad == 2
with pytest.raises(BoxError):
BoxList.from_toml(toml.dumps({"a": 2}), "a")
with pytest.raises(BoxError):
BoxList.from_toml(toml_list, "bad_key")
def test_intact_types_list(self):
class MyList(list):
pass
bl = BoxList([[1, 2], MyList([3, 4])], box_intact_types=(MyList,))
assert isinstance(bl[0], BoxList)
def test_to_csv(self):
data = BoxList(
[
{"Number": 1, "Name": "Chris", "Country": "US"},
{"Number": 2, "Name": "Sam", "Country": "US"},
{"Number": 3, "Name": "Jess", "Country": "US"},
{"Number": 4, "Name": "Frank", "Country": "UK"},
{"Number": 5, "Name": "Demo", "Country": "CA"},
]
)
file = Path(tmp_dir, "csv_file.csv")
data.to_csv(filename=file)
assert file.read_text().startswith("Number,Name,Country\n1,Chris,US")
assert data.to_csv().endswith("2,Sam,US\r\n3,Jess,US\r\n4,Frank,UK\r\n5,Demo,CA\r\n")
def test_from_csv(self):
bl = BoxList.from_csv(filename=Path(test_root, "data", "csv_file.csv"))
assert bl[1].Name == "Sam"
b2 = BoxList.from_csv(
"Number,Name,Country\r\n1,Chris,US\r\n2,Sam" ",US\r\n3,Jess,US\r\n4,Frank,UK\r\n5,Demo,CA\r\n"
)
assert b2[2].Name == "Jess"
def test_bad_csv(self):
data = BoxList([{"test": 1}, {"bad": 2, "data": 3}])
file = Path(tmp_dir, "csv_file.csv")
with pytest.raises(BoxError):
data.to_csv(file)
def test_box_list_dots(self):
data = BoxList(
[
{"test": 1},
{"bad": 2, "data": 3},
[[[0, -1], [77, 88]], {"inner": "one", "lister": [[{"down": "rabbit"}]]}],
4,
],
box_dots=True,
)
assert data["[0].test"] == 1
assert data["[1].data"] == 3
assert data[1].data == 3
data["[1].data"] = "new_data"
assert data["[1].data"] == "new_data"
assert data["[2][0][0][1]"] == -1
assert data[2][0][0][1] == -1
data["[2][0][0][1]"] = 1_000_000
assert data["[2][0][0][1]"] == 1_000_000
assert data[2][0][0][1] == 1_000_000
assert data["[2][1].lister[0][0].down"] == "rabbit"
data["[2][1].lister[0][0].down"] = "hole"
assert data["[2][1].lister[0][0].down"] == "hole"
assert data[2][1].lister[0][0].down == "hole"
db = Box(a=data, box_dots=True)
keys = db.keys(dotted=True)
assert keys == [
"a[0].test",
"a[1].bad",
"a[1].data",
"a[2][0][0][0]",
"a[2][0][0][1]",
"a[2][0][1][0]",
"a[2][0][1][1]",
"a[2][1].inner",
"a[2][1].lister[0][0].down",
"a[3]",
]
for key in keys:
db[key]
def test_box_config_propagate(self):
structure = Box(a=[Box(default_box=False)], default_box=True, box_inherent_settings=True)
assert structure._box_config["default_box"] is True
assert structure.a[0]._box_config["default_box"] is True
base = BoxList([BoxList([Box(default_box=False)])], default_box=True)
assert base[0].box_options["default_box"] is True
base2 = BoxList((BoxList([Box()], default_box=False),), default_box=True)
assert base2[0][0]._box_config["default_box"] is True
base3 = Box(
a=[Box(default_box=False)], default_box=True, box_inherent_settings=True, box_intact_types=[Box, BoxList]
)
base3.a.append(Box(default_box=False))
base3.a.append(BoxList(default_box=False))
for item in base3.a:
if isinstance(item, Box):
assert item._box_config["default_box"] is True
elif isinstance(item, BoxList):
assert item.box_options["default_box"] is True
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import csv
import os
import re
import time
from absl import flags
from absl import logging
from compare_gan import datasets
from compare_gan import eval_gan_lib
from compare_gan import hooks
from compare_gan.gans import utils
from compare_gan.metrics import fid_score as fid_score_lib
from compare_gan.metrics import inception_score as inception_score_lib
import gin.tf
import numpy as np
import six
import tensorflow as tf
FLAGS = flags.FLAGS
class _DummyParserDelegate(gin.config_parser.ParserDelegate):
"""Dummy class required to parse Gin configs.
Our use case (just get the config as dictionary) does not require real
implementations the two methods.
"""
def configurable_reference(self, scoped_name, evaluate):
return scoped_name
def macro(self, scoped_name):
return scoped_name
def _parse_gin_config(config_path):
"""Parses a Gin config into a dictionary. All values are strings."""
with tf.gfile.Open(config_path) as f:
config_str = f.read()
parser = gin.config_parser.ConfigParser(config_str, _DummyParserDelegate())
config = {}
for statement in parser:
if not isinstance(statement, gin.config_parser.ImportStatement):
name = statement.scope + "/" if statement.scope else ""
name = statement.selector + "." + statement.arg_name
config[name] = statement.value
return config
@gin.configurable("options")
def get_options_dict(batch_size=gin.REQUIRED,
gan_class=gin.REQUIRED,
architecture=gin.REQUIRED,
training_steps=gin.REQUIRED,
discriminator_normalization=None,
lamba=1,
disc_iters=1,
z_dim=128):
"""Parse legacy options from Gin configurations into a Python dict.
Args:
batch_size: The (global) batch size to use. On TPUs each core will get a
fraction of this.
gan_class: References to the GAN classe to use. This must implement the
AbstractGAN interface.
architecture: Name of the architecuter to use for G and D. This should be
value from consts.ARCHITECTURES and be supported by the GAN class.
training_steps: The number of training steps. These are discriminator steps.
discriminator_normalization: Deprecated. Ignored, but kept to read old
configs.
lamba: Weight for gradient penalty.
disc_iters: How often the discriminator is trained before training G for one
step. G will be trained for `training_steps // disc_iters` steps.
z_dim: Length of the latent variable z fed to the generator.
Returns:
A Python dictionary with the options.
"""
del discriminator_normalization
return {
"use_tpu": FLAGS.use_tpu, # For compatibility with AbstractGAN.
"batch_size": batch_size,
"gan_class": gan_class,
"architecture": architecture,
"training_steps": training_steps,
"lambda": lamba, # Different spelling intended.
"disc_iters": disc_iters,
"z_dim": z_dim,
}
class TaskManager(object):
"""Interface for managing a task."""
def __init__(self, model_dir):
self._model_dir = model_dir
@property
def model_dir(self):
return self._model_dir
def mark_training_done(self):
with tf.gfile.Open(os.path.join(self.model_dir, "TRAIN_DONE"), "w") as f:
f.write("")
def is_training_done(self):
return tf.gfile.Exists(os.path.join(self.model_dir, "TRAIN_DONE"))
def add_eval_result(self, checkpoint_path, result_dict, default_value):
pass
def get_checkpoints_with_results(self):
return set()
def unevaluated_checkpoints(self, timeout=0, eval_every_steps=None):
"""Generator for checkpoints without evaluation results.
Args:
timeout: Optional timeout for waiting for new checkpoints. Set this to
do continious evaluation.
eval_every_steps: Only evaluate checkpoints from steps divisible by this
integer.
Yields:
Path to checkpoints that have not yet been evaluated.
"""
logging.info("Looking for checkpoints in %s", self._model_dir)
evaluated_checkpoints = self.get_checkpoints_with_results()
last_eval = time.time()
while True:
unevaluated_checkpoints = []
checkpoint_state = tf.train.get_checkpoint_state(self.model_dir)
if checkpoint_state:
checkpoints = set(checkpoint_state.all_model_checkpoint_paths)
# Remove already evaluated checkpoints and sort ascending by step
# number.
unevaluated_checkpoints = checkpoints - evaluated_checkpoints
step_and_ckpt = sorted(
[(int(x.split("-")[-1]), x) for x in unevaluated_checkpoints])
if eval_every_steps:
step_and_ckpt = [(step, ckpt) for step, ckpt in step_and_ckpt
if step > 0 and step % eval_every_steps == 0]
unevaluated_checkpoints = [ckpt for _, ckpt in step_and_ckpt]
logging.info(
"Found checkpoints: %s\nEvaluated checkpoints: %s\n"
"Unevaluated checkpoints: %s", checkpoints, evaluated_checkpoints,
unevaluated_checkpoints)
for checkpoint_path in unevaluated_checkpoints:
yield checkpoint_path
if unevaluated_checkpoints:
evaluated_checkpoints |= set(unevaluated_checkpoints)
last_eval = time.time()
continue
# No new checkpoints, timeout or stop if training finished. Otherwise
# wait 1 minute.
if time.time() - last_eval > timeout or self.is_training_done():
break
time.sleep(60)
def report_progress(self, message):
pass
class TaskManagerWithCsvResults(TaskManager):
"""Task Manager that writes results to a CSV file."""
def __init__(self, model_dir, score_file=None):
super(TaskManagerWithCsvResults, self).__init__(model_dir)
if score_file is None:
score_file = os.path.join(model_dir, "scores.csv")
self._score_file = score_file
def _get_config_for_step(self, step):
"""Returns the latest operative config for the global step as dictionary."""
saved_configs = tf.gfile.Glob(
os.path.join(self.model_dir, "operative_config-*.gin"))
get_step = lambda fn: int(re.findall(r"operative_config-(\d+).gin", fn)[0])
config_steps = [get_step(fn) for fn in saved_configs]
assert config_steps
last_config_step = sorted([s for s in config_steps if s <= step])[-1]
config_path = os.path.join(
self.model_dir, "operative_config-{}.gin".format(last_config_step))
return _parse_gin_config(config_path)
def add_eval_result(self, checkpoint_path, result_dict, default_value):
step = os.path.basename(checkpoint_path).split("-")[-1]
config = self._get_config_for_step(step)
csv_header = (
["checkpoint_path", "step"] + sorted(result_dict) + sorted(config))
write_header = not tf.gfile.Exists(self._score_file)
if write_header:
with tf.gfile.Open(self._score_file, "w") as f:
writer = csv.DictWriter(f, fieldnames=csv_header, extrasaction="ignore")
writer.writeheader()
row = dict(checkpoint_path=checkpoint_path, step=step, **config)
for k, v in six.iteritems(result_dict):
if isinstance(v, float):
v = "{:.3f}".format(v)
row[k] = v
with tf.gfile.Open(self._score_file, "a") as f:
writer = csv.DictWriter(f, fieldnames=csv_header, extrasaction="ignore")
writer.writerow(row)
def get_checkpoints_with_results(self):
if not tf.gfile.Exists(self._score_file):
return set()
with tf.gfile.Open(self._score_file) as f:
reader = csv.DictReader(f)
return {r["checkpoint_path"] for r in reader}
return set()
def _run_eval(module_spec, checkpoints, task_manager, run_config,
use_tpu, num_averaging_runs):
"""Evaluates the given checkpoints and add results to a result writer.
Args:
module_spec: `ModuleSpec` of the model.
checkpoints: Generator for for checkpoint paths.
task_manager: `TaskManager`. init_eval() will be called before adding
results.
run_config: `RunConfig` to use. Values for master and tpu_config are
currently ignored.
use_tpu: Whether to use TPU for evaluation.
num_averaging_runs: Determines how many times each metric is computed.
"""
# By default, we compute FID and Inception scores. Other tasks defined in
# the metrics folder (such as the one in metrics/kid_score.py) can be added
# to this list if desired.
eval_tasks = [
inception_score_lib.InceptionScoreTask(),
fid_score_lib.FIDScoreTask()
]
logging.info("eval_tasks: %s", eval_tasks)
for checkpoint_path in checkpoints:
step = os.path.basename(checkpoint_path).split("-")[-1]
if step == 0:
continue
export_path = os.path.join(run_config.model_dir, "tfhub", str(step))
if not tf.gfile.Exists(export_path):
module_spec.export(export_path, checkpoint_path=checkpoint_path)
default_value = -1.0
try:
result_dict = eval_gan_lib.evaluate_tfhub_module(
export_path, eval_tasks, use_tpu=use_tpu,
num_averaging_runs=num_averaging_runs)
except ValueError as nan_found_error:
result_dict = {}
logging.exception(nan_found_error)
default_value = eval_gan_lib.NAN_DETECTED
logging.info("Evaluation result for checkpoint %s: %s (default value: %s)",
checkpoint_path, result_dict, default_value)
task_manager.add_eval_result(checkpoint_path, result_dict, default_value)
def run_with_schedule(schedule, run_config, task_manager, options, use_tpu,
num_eval_averaging_runs=1, eval_every_steps=-1):
"""Run the schedule with the given options.
Available schedules:
- train: Train up to options["training_steps"], continuing from existing
checkpoints if available.
- eval_after_train: First train up to options["training_steps"] then
evaluate all checkpoints.
- continuous_eval: Waiting for new checkpoints and evaluate them as they
become available. This is meant to run in parallel with a job running
the training schedule but can also run after it.
Args:
schedule: Schedule to run. One of: train, continuous_eval, train_and_eval.
run_config: `tf.contrib.tpu.RunConfig` to use.
task_manager: `TaskManager` for this run.
options: Python dictionary will run parameters.
use_tpu: Boolean whether to use TPU.
num_eval_averaging_runs: Determines how many times each metric is computed.
eval_every_steps: Integer determining which checkpoints to evaluate.
"""
logging.info("Running schedule '%s' with options: %s", schedule, options)
if run_config.tf_random_seed:
logging.info("Setting NumPy random seed to %s.", run_config.tf_random_seed)
np.random.seed(run_config.tf_random_seed)
result_dir = os.path.join(run_config.model_dir, "result")
utils.check_folder(result_dir)
dataset = datasets.get_dataset()
gan = options["gan_class"](dataset=dataset,
parameters=options,
model_dir=run_config.model_dir)
if schedule not in {"train", "eval_after_train", "continuous_eval"}:
raise ValueError("Schedule {} not supported.".format(schedule))
if schedule in {"train", "eval_after_train"}:
train_hooks = [
gin.tf.GinConfigSaverHook(run_config.model_dir),
hooks.ReportProgressHook(task_manager,
max_steps=options["training_steps"]),
]
if run_config.save_checkpoints_steps:
# This replaces the default checkpoint saver hook in the estimator.
logging.info("Using AsyncCheckpointSaverHook.")
train_hooks.append(
hooks.AsyncCheckpointSaverHook(
checkpoint_dir=run_config.model_dir,
save_steps=run_config.save_checkpoints_steps))
# (b/122782388): Remove hotfix.
run_config = run_config.replace(save_checkpoints_steps=1000000)
estimator = gan.as_estimator(
run_config, batch_size=options["batch_size"], use_tpu=use_tpu)
estimator.train(
input_fn=gan.input_fn,
max_steps=options["training_steps"],
hooks=train_hooks)
task_manager.mark_training_done()
if schedule == "continuous_eval":
# Continuous eval with up to 24 hours between checkpoints.
checkpoints = task_manager.unevaluated_checkpoints(
timeout=24 * 3600, eval_every_steps=eval_every_steps)
if schedule == "eval_after_train":
checkpoints = task_manager.unevaluated_checkpoints(
eval_every_steps=eval_every_steps)
if schedule in {"continuous_eval", "eval_after_train"}:
_run_eval(
gan.as_module_spec(),
checkpoints=checkpoints,
task_manager=task_manager,
run_config=run_config,
use_tpu=use_tpu,
num_averaging_runs=num_eval_averaging_runs)
|
import pandas as pd
import numpy as np
from scipy.stats import rankdata
from scattertext.termranking.AbsoluteFrequencyRanker import AbsoluteFrequencyRanker
from scattertext.termscoring.ScaledFScore import ScaledFScorePresetsNeg1To1
class TermCategoryRanker(object):
def __init__(self,
scorer=ScaledFScorePresetsNeg1To1,
term_ranker=AbsoluteFrequencyRanker,
use_non_text_features=False):
self.scorer = scorer
self.term_ranker = term_ranker
self.use_non_text_features = use_non_text_features
def get_rank_df(self, term_doc_matrix):
# tdf = term_doc_matrix.get_term_freq_df('')
ranker = self.term_ranker(term_doc_matrix)
if self.use_non_text_features:
ranker = ranker.use_non_text_features()
tdf = ranker.get_ranks('')
tdf_sum = tdf.sum(axis=1)
score_data = {}
for category in term_doc_matrix.get_categories():
score_data[category] = self.scorer().get_scores(tdf[category], tdf_sum - tdf[category])
return pd.DataFrame(score_data, index=tdf.index).apply(lambda x: rankdata(x, 'dense'))
def get_max_rank(self, term_doc_matrix):
'''
:param term_doc_matrix: TermDocMatrix
:return: int
'''
rank_df = self.get_rank_df(term_doc_matrix)
return rank_df.max().max()
class BaseAssociationCompactor(object):
def __init__(self,
scorer=ScaledFScorePresetsNeg1To1,
term_ranker=AbsoluteFrequencyRanker,
use_non_text_features=False):
self.scorer = TermCategoryRanker(scorer, term_ranker, use_non_text_features)
def _prune_higher_ranked_terms(self, term_doc_matrix, rank_df, rank):
term_to_remove = rank_df.index[np.isnan(rank_df[rank_df <= rank])
.apply(lambda x: all(x), axis=1)]
return self._remove_terms(term_doc_matrix, term_to_remove)
def _remove_terms(self, term_doc_matrix, term_to_remove):
return term_doc_matrix.remove_terms(term_to_remove, non_text=self.scorer.use_non_text_features)
class JSDCompactor(BaseAssociationCompactor):
def __init__(self,
max_terms,
term_ranker=AbsoluteFrequencyRanker,
use_non_text_features=False):
self.max_terms = max_terms
BaseAssociationCompactor.__init__(self, term_ranker=term_ranker, use_non_text_features=use_non_text_features)
def compact(self, term_doc_matrix, verbose=False):
rank_df = self.scorer.get_rank_df(term_doc_matrix)
p_df = rank_df/rank_df.sum(axis=0) + 0.001
m = p_df.sum(axis=1)
def lg(x): return np.log(x) / np.log(2)
rank_df['Score'] = m * lg(1/m) - (p_df * lg(1/p_df)).sum(axis=1)
terms_to_remove = rank_df.sort_values(
by='Score', ascending=False
).iloc[self.max_terms:].index
return term_doc_matrix.remove_terms(terms_to_remove, self.scorer.use_non_text_features)
class AssociationCompactor(BaseAssociationCompactor):
def __init__(self,
max_terms,
scorer=ScaledFScorePresetsNeg1To1,
term_ranker=AbsoluteFrequencyRanker,
use_non_text_features=False):
self.max_terms = max_terms
BaseAssociationCompactor.__init__(self, scorer, term_ranker, use_non_text_features)
def compact(self, term_doc_matrix, verbose=False):
'''
Parameters
----------
term_doc_matrix : TermDocMatrix
Term document matrix object to compact
Returns
-------
New term doc matrix
'''
rank_df = self.scorer.get_rank_df(term_doc_matrix)
optimal_rank = self._find_optimal_rank(rank_df)
compacted_term_doc_matrix = self._prune_higher_ranked_terms(term_doc_matrix, rank_df, optimal_rank)
if verbose:
print('max terms', self.max_terms, 'optimal_rank', optimal_rank,
'num_terms', compacted_term_doc_matrix.get_num_terms())
return compacted_term_doc_matrix
def _get_num_terms_at_rank(self, rank_i, rank_df):
return sum(np.isnan(rank_df[rank_df <= rank_i]).apply(lambda x: not all(x), axis=1))
def _find_optimal_rank(self, ranks_df):
max_rank = ranks_df.max().max()
min_rank = 1
last_max_rank = None
last_min_rank = None
while max_rank - 1 > min_rank:
if last_max_rank is not None:
if last_min_rank == min_rank and last_max_rank == max_rank:
raise Exception("Error. Potential infinite loop detected.")
last_max_rank = max_rank
last_min_rank = min_rank
cur_rank = int((max_rank - min_rank) / 2) + min_rank
num_terms = self._get_num_terms_at_rank(cur_rank, ranks_df)
if num_terms > self.max_terms:
max_rank = cur_rank
elif num_terms < self.max_terms:
min_rank = cur_rank
else:
return cur_rank
return min_rank
class AssociationCompactorByRank(BaseAssociationCompactor):
def __init__(self,
rank,
scorer=ScaledFScorePresetsNeg1To1,
term_ranker=AbsoluteFrequencyRanker,
use_non_text_features=False):
self.rank = rank
BaseAssociationCompactor.__init__(self, scorer, term_ranker, use_non_text_features)
def compact(self, term_doc_matrix):
'''
Parameters
----------
term_doc_matrix : TermDocMatrix
Term document matrix object to compact
Returns
-------
TermDocMatrix
'''
rank_df = self.scorer.get_rank_df(term_doc_matrix)
return self._prune_higher_ranked_terms(term_doc_matrix, rank_df, self.rank)
|
import logging
from time import sleep
import homeassistant.components.alarm_control_panel as alarm
from homeassistant.components.alarm_control_panel.const import (
SUPPORT_ALARM_ARM_AWAY,
SUPPORT_ALARM_ARM_HOME,
)
from homeassistant.const import (
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_DISARMED,
)
from . import CONF_ALARM, CONF_CODE_DIGITS, CONF_GIID, HUB as hub
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Verisure platform."""
alarms = []
if int(hub.config.get(CONF_ALARM, 1)):
hub.update_overview()
alarms.append(VerisureAlarm())
add_entities(alarms)
def set_arm_state(state, code=None):
"""Send set arm state command."""
transaction_id = hub.session.set_arm_state(code, state)[
"armStateChangeTransactionId"
]
_LOGGER.info("verisure set arm state %s", state)
transaction = {}
while "result" not in transaction:
sleep(0.5)
transaction = hub.session.get_arm_state_transaction(transaction_id)
hub.update_overview(no_throttle=True)
class VerisureAlarm(alarm.AlarmControlPanelEntity):
"""Representation of a Verisure alarm status."""
def __init__(self):
"""Initialize the Verisure alarm panel."""
self._state = None
self._digits = hub.config.get(CONF_CODE_DIGITS)
self._changed_by = None
@property
def name(self):
"""Return the name of the device."""
giid = hub.config.get(CONF_GIID)
if giid is not None:
aliass = {i["giid"]: i["alias"] for i in hub.session.installations}
if giid in aliass:
return "{} alarm".format(aliass[giid])
_LOGGER.error("Verisure installation giid not found: %s", giid)
return "{} alarm".format(hub.session.installations[0]["alias"])
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def supported_features(self) -> int:
"""Return the list of supported features."""
return SUPPORT_ALARM_ARM_HOME | SUPPORT_ALARM_ARM_AWAY
@property
def code_format(self):
"""Return one or more digits/characters."""
return alarm.FORMAT_NUMBER
@property
def changed_by(self):
"""Return the last change triggered by."""
return self._changed_by
def update(self):
"""Update alarm status."""
hub.update_overview()
status = hub.get_first("$.armState.statusType")
if status == "DISARMED":
self._state = STATE_ALARM_DISARMED
elif status == "ARMED_HOME":
self._state = STATE_ALARM_ARMED_HOME
elif status == "ARMED_AWAY":
self._state = STATE_ALARM_ARMED_AWAY
elif status != "PENDING":
_LOGGER.error("Unknown alarm state %s", status)
self._changed_by = hub.get_first("$.armState.name")
def alarm_disarm(self, code=None):
"""Send disarm command."""
set_arm_state("DISARMED", code)
def alarm_arm_home(self, code=None):
"""Send arm home command."""
set_arm_state("ARMED_HOME", code)
def alarm_arm_away(self, code=None):
"""Send arm away command."""
set_arm_state("ARMED_AWAY", code)
|
import pytest
import requests_mock
import homeassistant.components.facebook.notify as fb
@pytest.fixture
def facebook():
"""Fixture for facebook."""
access_token = "page-access-token"
return fb.FacebookNotificationService(access_token)
async def test_send_simple_message(hass, facebook):
"""Test sending a simple message with success."""
with requests_mock.Mocker() as mock:
mock.register_uri(requests_mock.POST, fb.BASE_URL, status_code=200)
message = "This is just a test"
target = ["+15555551234"]
facebook.send_message(message=message, target=target)
assert mock.called
assert mock.call_count == 1
expected_body = {
"recipient": {"phone_number": target[0]},
"message": {"text": message},
"messaging_type": "MESSAGE_TAG",
"tag": "ACCOUNT_UPDATE",
}
assert mock.last_request.json() == expected_body
expected_params = {"access_token": ["page-access-token"]}
assert mock.last_request.qs == expected_params
async def test_send_multiple_message(hass, facebook):
"""Test sending a message to multiple targets."""
with requests_mock.Mocker() as mock:
mock.register_uri(requests_mock.POST, fb.BASE_URL, status_code=200)
message = "This is just a test"
targets = ["+15555551234", "+15555551235"]
facebook.send_message(message=message, target=targets)
assert mock.called
assert mock.call_count == 2
for idx, target in enumerate(targets):
request = mock.request_history[idx]
expected_body = {
"recipient": {"phone_number": target},
"message": {"text": message},
"messaging_type": "MESSAGE_TAG",
"tag": "ACCOUNT_UPDATE",
}
assert request.json() == expected_body
expected_params = {"access_token": ["page-access-token"]}
assert request.qs == expected_params
async def test_send_message_attachment(hass, facebook):
"""Test sending a message with a remote attachment."""
with requests_mock.Mocker() as mock:
mock.register_uri(requests_mock.POST, fb.BASE_URL, status_code=200)
message = "This will be thrown away."
data = {
"attachment": {
"type": "image",
"payload": {"url": "http://www.example.com/image.jpg"},
}
}
target = ["+15555551234"]
facebook.send_message(message=message, data=data, target=target)
assert mock.called
assert mock.call_count == 1
expected_body = {
"recipient": {"phone_number": target[0]},
"message": data,
"messaging_type": "MESSAGE_TAG",
"tag": "ACCOUNT_UPDATE",
}
assert mock.last_request.json() == expected_body
expected_params = {"access_token": ["page-access-token"]}
assert mock.last_request.qs == expected_params
async def test_send_targetless_message(hass, facebook):
"""Test sending a message without a target."""
with requests_mock.Mocker() as mock:
mock.register_uri(requests_mock.POST, fb.BASE_URL, status_code=200)
facebook.send_message(message="going nowhere")
assert not mock.called
async def test_send_message_with_400(hass, facebook):
"""Test sending a message with a 400 from Facebook."""
with requests_mock.Mocker() as mock:
mock.register_uri(
requests_mock.POST,
fb.BASE_URL,
status_code=400,
json={
"error": {
"message": "Invalid OAuth access token.",
"type": "OAuthException",
"code": 190,
"fbtrace_id": "G4Da2pFp2Dp",
}
},
)
facebook.send_message(message="nope!", target=["+15555551234"])
assert mock.called
assert mock.call_count == 1
|
class PorterStemmer:
"""Class contains implementation of Porter stemming algorithm.
Attributes
--------
b : str
Buffer holding a word to be stemmed. The letters are in b[0], b[1] ... ending at b[`k`].
k : int
Readjusted downwards as the stemming progresses.
j : int
Word length.
"""
def __init__(self):
self.b = "" # buffer for word to be stemmed
self.k = 0
self.j = 0 # j is a general offset into the string
def _cons(self, i):
"""Check if b[i] is a consonant letter.
Parameters
----------
i : int
Index for `b`.
Returns
-------
bool
Examples
--------
.. sourcecode:: pycon
>>> from gensim.parsing.porter import PorterStemmer
>>> p = PorterStemmer()
>>> p.b = "hi"
>>> p._cons(1)
False
>>> p.b = "meow"
>>> p._cons(3)
True
"""
ch = self.b[i]
if ch in "aeiou":
return False
if ch == 'y':
return i == 0 or not self._cons(i - 1)
return True
def _m(self):
"""Calculate the number of consonant sequences between 0 and j.
If c is a consonant sequence and v a vowel sequence, and <..>
indicates arbitrary presence,
<c><v> gives 0
<c>vc<v> gives 1
<c>vcvc<v> gives 2
<c>vcvcvc<v> gives 3
Returns
-------
int
The number of consonant sequences between 0 and j.
Examples
--------
.. sourcecode:: pycon
>>> from gensim.parsing.porter import PorterStemmer
>>> p = PorterStemmer()
>>> p.b = "<bm>aobm<ao>"
>>> p.j = 11
>>> p._m()
2
"""
i = 0
while True:
if i > self.j:
return 0
if not self._cons(i):
break
i += 1
i += 1
n = 0
while True:
while True:
if i > self.j:
return n
if self._cons(i):
break
i += 1
i += 1
n += 1
while 1:
if i > self.j:
return n
if not self._cons(i):
break
i += 1
i += 1
def _vowelinstem(self):
"""Check if b[0: j + 1] contains a vowel letter.
Returns
-------
bool
Examples
--------
.. sourcecode:: pycon
>>> from gensim.parsing.porter import PorterStemmer
>>> p = PorterStemmer()
>>> p.b = "gnsm"
>>> p.j = 3
>>> p._vowelinstem()
False
>>> p.b = "gensim"
>>> p.j = 5
>>> p._vowelinstem()
True
"""
return not all(self._cons(i) for i in range(self.j + 1))
def _doublec(self, j):
"""Check if b[j - 1: j + 1] contain a double consonant letter.
Parameters
----------
j : int
Index for `b`
Returns
-------
bool
Examples
--------
.. sourcecode:: pycon
>>> from gensim.parsing.porter import PorterStemmer
>>> p = PorterStemmer()
>>> p.b = "real"
>>> p.j = 3
>>> p._doublec(3)
False
>>> p.b = "really"
>>> p.j = 5
>>> p._doublec(4)
True
"""
return j > 0 and self.b[j] == self.b[j - 1] and self._cons(j)
def _cvc(self, i):
"""Check if b[j - 2: j + 1] makes the (consonant, vowel, consonant) pattern and also
if the second 'c' is not 'w', 'x' or 'y'. This is used when trying to restore an 'e' at the end of a short word,
e.g. cav(e), lov(e), hop(e), crim(e), but snow, box, tray.
Parameters
----------
i : int
Index for `b`
Returns
-------
bool
Examples
--------
.. sourcecode:: pycon
>>> from gensim.parsing.porter import PorterStemmer
>>> p = PorterStemmer()
>>> p.b = "lib"
>>> p.j = 2
>>> p._cvc(2)
True
>>> p.b = "dll"
>>> p.j = 2
>>> p._cvc(2)
False
>>> p.b = "wow"
>>> p.j = 2
>>> p._cvc(2)
False
"""
if i < 2 or not self._cons(i) or self._cons(i - 1) or not self._cons(i - 2):
return False
return self.b[i] not in "wxy"
def _ends(self, s):
"""Check if b[: k + 1] ends with `s`.
Parameters
----------
s : str
Returns
-------
bool
Examples
--------
.. sourcecode:: pycon
>>> from gensim.parsing.porter import PorterStemmer
>>> p = PorterStemmer()
>>> p.b = "cowboy"
>>> p.j = 5
>>> p.k = 2
>>> p._ends("cow")
True
"""
if s[-1] != self.b[self.k]: # tiny speed-up
return False
length = len(s)
if length > (self.k + 1):
return False
if self.b[self.k - length + 1:self.k + 1] != s:
return False
self.j = self.k - length
return True
def _setto(self, s):
"""Append `s` to `b`, adjusting `k`.
Parameters
----------
s : str
"""
self.b = self.b[:self.j + 1] + s
self.k = len(self.b) - 1
def _r(self, s):
if self._m() > 0:
self._setto(s)
def _step1ab(self):
"""Get rid of plurals and -ed or -ing.
caresses -> caress
ponies -> poni
ties -> ti
caress -> caress
cats -> cat
feed -> feed
agreed -> agree
disabled -> disable
matting -> mat
mating -> mate
meeting -> meet
milling -> mill
messing -> mess
meetings -> meet
"""
if self.b[self.k] == 's':
if self._ends("sses"):
self.k -= 2
elif self._ends("ies"):
self._setto("i")
elif self.b[self.k - 1] != 's':
self.k -= 1
if self._ends("eed"):
if self._m() > 0:
self.k -= 1
elif (self._ends("ed") or self._ends("ing")) and self._vowelinstem():
self.k = self.j
if self._ends("at"):
self._setto("ate")
elif self._ends("bl"):
self._setto("ble")
elif self._ends("iz"):
self._setto("ize")
elif self._doublec(self.k):
if self.b[self.k - 1] not in "lsz":
self.k -= 1
elif self._m() == 1 and self._cvc(self.k):
self._setto("e")
def _step1c(self):
"""Turn terminal 'y' to 'i' when there is another vowel in the stem."""
if self._ends("y") and self._vowelinstem():
self.b = self.b[:self.k] + 'i'
def _step2(self):
"""Map double suffices to single ones.
So, -ization ( = -ize plus -ation) maps to -ize etc. Note that the
string before the suffix must give _m() > 0.
"""
ch = self.b[self.k - 1]
if ch == 'a':
if self._ends("ational"):
self._r("ate")
elif self._ends("tional"):
self._r("tion")
elif ch == 'c':
if self._ends("enci"):
self._r("ence")
elif self._ends("anci"):
self._r("ance")
elif ch == 'e':
if self._ends("izer"):
self._r("ize")
elif ch == 'l':
if self._ends("bli"):
self._r("ble") # --DEPARTURE--
# To match the published algorithm, replace this phrase with
# if self._ends("abli"): self._r("able")
elif self._ends("alli"):
self._r("al")
elif self._ends("entli"):
self._r("ent")
elif self._ends("eli"):
self._r("e")
elif self._ends("ousli"):
self._r("ous")
elif ch == 'o':
if self._ends("ization"):
self._r("ize")
elif self._ends("ation"):
self._r("ate")
elif self._ends("ator"):
self._r("ate")
elif ch == 's':
if self._ends("alism"):
self._r("al")
elif self._ends("iveness"):
self._r("ive")
elif self._ends("fulness"):
self._r("ful")
elif self._ends("ousness"):
self._r("ous")
elif ch == 't':
if self._ends("aliti"):
self._r("al")
elif self._ends("iviti"):
self._r("ive")
elif self._ends("biliti"):
self._r("ble")
elif ch == 'g': # --DEPARTURE--
if self._ends("logi"):
self._r("log")
# To match the published algorithm, delete this phrase
def _step3(self):
"""Deal with -ic-, -full, -ness etc. Similar strategy to _step2."""
ch = self.b[self.k]
if ch == 'e':
if self._ends("icate"):
self._r("ic")
elif self._ends("ative"):
self._r("")
elif self._ends("alize"):
self._r("al")
elif ch == 'i':
if self._ends("iciti"):
self._r("ic")
elif ch == 'l':
if self._ends("ical"):
self._r("ic")
elif self._ends("ful"):
self._r("")
elif ch == 's':
if self._ends("ness"):
self._r("")
def _step4(self):
"""Takes off -ant, -ence etc., in context <c>vcvc<v>."""
ch = self.b[self.k - 1]
if ch == 'a':
if not self._ends("al"):
return
elif ch == 'c':
if not self._ends("ance") and not self._ends("ence"):
return
elif ch == 'e':
if not self._ends("er"):
return
elif ch == 'i':
if not self._ends("ic"):
return
elif ch == 'l':
if not self._ends("able") and not self._ends("ible"):
return
elif ch == 'n':
if self._ends("ant"):
pass
elif self._ends("ement"):
pass
elif self._ends("ment"):
pass
elif self._ends("ent"):
pass
else:
return
elif ch == 'o':
if self._ends("ion") and self.b[self.j] in "st":
pass
elif self._ends("ou"):
pass
# takes care of -ous
else:
return
elif ch == 's':
if not self._ends("ism"):
return
elif ch == 't':
if not self._ends("ate") and not self._ends("iti"):
return
elif ch == 'u':
if not self._ends("ous"):
return
elif ch == 'v':
if not self._ends("ive"):
return
elif ch == 'z':
if not self._ends("ize"):
return
else:
return
if self._m() > 1:
self.k = self.j
def _step5(self):
"""Remove a final -e if _m() > 1, and change -ll to -l if m() > 1."""
k = self.j = self.k
if self.b[k] == 'e':
a = self._m()
if a > 1 or (a == 1 and not self._cvc(k - 1)):
self.k -= 1
if self.b[self.k] == 'l' and self._doublec(self.k) and self._m() > 1:
self.k -= 1
def stem(self, w):
"""Stem the word `w`.
Parameters
----------
w : str
Returns
-------
str
Stemmed version of `w`.
Examples
--------
.. sourcecode:: pycon
>>> from gensim.parsing.porter import PorterStemmer
>>> p = PorterStemmer()
>>> p.stem("ponies")
'poni'
"""
w = w.lower()
k = len(w) - 1
if k <= 1:
return w # --DEPARTURE--
# With this line, strings of length 1 or 2 don't go through the
# stemming process, although no mention is made of this in the
# published algorithm. Remove the line to match the published
# algorithm.
self.b = w
self.k = k
self._step1ab()
self._step1c()
self._step2()
self._step3()
self._step4()
self._step5()
return self.b[:self.k + 1]
def stem_sentence(self, txt):
"""Stem the sentence `txt`.
Parameters
----------
txt : str
Input sentence.
Returns
-------
str
Stemmed sentence.
Examples
--------
.. sourcecode:: pycon
>>> from gensim.parsing.porter import PorterStemmer
>>> p = PorterStemmer()
>>> p.stem_sentence("Wow very nice woman with apple")
'wow veri nice woman with appl'
"""
return " ".join(self.stem(x) for x in txt.split())
def stem_documents(self, docs):
"""Stem documents.
Parameters
----------
docs : list of str
Input documents
Returns
-------
list of str
Stemmed documents.
Examples
--------
.. sourcecode:: pycon
>>> from gensim.parsing.porter import PorterStemmer
>>> p = PorterStemmer()
>>> p.stem_documents(["Have a very nice weekend", "Have a very nice weekend"])
['have a veri nice weekend', 'have a veri nice weekend']
"""
return [self.stem_sentence(x) for x in docs]
if __name__ == '__main__':
import sys
p = PorterStemmer()
for f in sys.argv[1:]:
with open(f) as infile:
for line in infile:
print(p.stem_sentence(line))
|
from homeassistant import data_entry_flow
from homeassistant.components import twilio
from homeassistant.core import callback
from tests.async_mock import patch
async def test_config_flow_registers_webhook(hass, aiohttp_client):
"""Test setting up Twilio and sending webhook."""
with patch("homeassistant.util.get_local_ip", return_value="example.com"):
result = await hass.config_entries.flow.async_init(
"twilio", context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM, result
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
webhook_id = result["result"].data["webhook_id"]
twilio_events = []
@callback
def handle_event(event):
"""Handle Twilio event."""
twilio_events.append(event)
hass.bus.async_listen(twilio.RECEIVED_DATA, handle_event)
client = await aiohttp_client(hass.http.app)
await client.post(f"/api/webhook/{webhook_id}", data={"hello": "twilio"})
assert len(twilio_events) == 1
assert twilio_events[0].data["webhook_id"] == webhook_id
assert twilio_events[0].data["hello"] == "twilio"
|
from homeassistant.setup import async_setup_component
async def test_random_sensor(hass):
"""Test the Random number sensor."""
config = {
"sensor": {
"platform": "random",
"name": "test",
"minimum": 10,
"maximum": 20,
}
}
assert await async_setup_component(
hass,
"sensor",
config,
)
await hass.async_block_till_done()
state = hass.states.get("sensor.test")
assert int(state.state) <= config["sensor"]["maximum"]
assert int(state.state) >= config["sensor"]["minimum"]
|
import asyncio
from collections import OrderedDict
import async_timeout
from logi_circle import LogiCircle
from logi_circle.exception import AuthorizationFailed
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components.http import HomeAssistantView
from homeassistant.const import (
CONF_CLIENT_ID,
CONF_CLIENT_SECRET,
CONF_SENSORS,
HTTP_BAD_REQUEST,
)
from homeassistant.core import callback
from .const import CONF_API_KEY, CONF_REDIRECT_URI, DEFAULT_CACHEDB, DOMAIN
_TIMEOUT = 15 # seconds
DATA_FLOW_IMPL = "logi_circle_flow_implementation"
EXTERNAL_ERRORS = "logi_errors"
AUTH_CALLBACK_PATH = "/api/logi_circle"
AUTH_CALLBACK_NAME = "api:logi_circle"
@callback
def register_flow_implementation(
hass, domain, client_id, client_secret, api_key, redirect_uri, sensors
):
"""Register a flow implementation.
domain: Domain of the component responsible for the implementation.
client_id: Client ID.
client_secret: Client secret.
api_key: API key issued by Logitech.
redirect_uri: Auth callback redirect URI.
sensors: Sensor config.
"""
if DATA_FLOW_IMPL not in hass.data:
hass.data[DATA_FLOW_IMPL] = OrderedDict()
hass.data[DATA_FLOW_IMPL][domain] = {
CONF_CLIENT_ID: client_id,
CONF_CLIENT_SECRET: client_secret,
CONF_API_KEY: api_key,
CONF_REDIRECT_URI: redirect_uri,
CONF_SENSORS: sensors,
EXTERNAL_ERRORS: None,
}
@config_entries.HANDLERS.register(DOMAIN)
class LogiCircleFlowHandler(config_entries.ConfigFlow):
"""Config flow for Logi Circle component."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
def __init__(self):
"""Initialize flow."""
self.flow_impl = None
async def async_step_import(self, user_input=None):
"""Handle external yaml configuration."""
if self.hass.config_entries.async_entries(DOMAIN):
return self.async_abort(reason="already_configured")
self.flow_impl = DOMAIN
return await self.async_step_auth()
async def async_step_user(self, user_input=None):
"""Handle a flow start."""
flows = self.hass.data.get(DATA_FLOW_IMPL, {})
if self.hass.config_entries.async_entries(DOMAIN):
return self.async_abort(reason="already_configured")
if not flows:
return self.async_abort(reason="missing_configuration")
if len(flows) == 1:
self.flow_impl = list(flows)[0]
return await self.async_step_auth()
if user_input is not None:
self.flow_impl = user_input["flow_impl"]
return await self.async_step_auth()
return self.async_show_form(
step_id="user",
data_schema=vol.Schema({vol.Required("flow_impl"): vol.In(list(flows))}),
)
async def async_step_auth(self, user_input=None):
"""Create an entry for auth."""
if self.hass.config_entries.async_entries(DOMAIN):
return self.async_abort(reason="external_setup")
external_error = self.hass.data[DATA_FLOW_IMPL][DOMAIN][EXTERNAL_ERRORS]
errors = {}
if external_error:
# Handle error from another flow
errors["base"] = external_error
self.hass.data[DATA_FLOW_IMPL][DOMAIN][EXTERNAL_ERRORS] = None
elif user_input is not None:
errors["base"] = "follow_link"
url = self._get_authorization_url()
return self.async_show_form(
step_id="auth",
description_placeholders={"authorization_url": url},
errors=errors,
)
def _get_authorization_url(self):
"""Create temporary Circle session and generate authorization url."""
flow = self.hass.data[DATA_FLOW_IMPL][self.flow_impl]
client_id = flow[CONF_CLIENT_ID]
client_secret = flow[CONF_CLIENT_SECRET]
api_key = flow[CONF_API_KEY]
redirect_uri = flow[CONF_REDIRECT_URI]
logi_session = LogiCircle(
client_id=client_id,
client_secret=client_secret,
api_key=api_key,
redirect_uri=redirect_uri,
)
self.hass.http.register_view(LogiCircleAuthCallbackView())
return logi_session.authorize_url
async def async_step_code(self, code=None):
"""Received code for authentication."""
if self.hass.config_entries.async_entries(DOMAIN):
return self.async_abort(reason="already_configured")
return await self._async_create_session(code)
async def _async_create_session(self, code):
"""Create Logi Circle session and entries."""
flow = self.hass.data[DATA_FLOW_IMPL][DOMAIN]
client_id = flow[CONF_CLIENT_ID]
client_secret = flow[CONF_CLIENT_SECRET]
api_key = flow[CONF_API_KEY]
redirect_uri = flow[CONF_REDIRECT_URI]
sensors = flow[CONF_SENSORS]
logi_session = LogiCircle(
client_id=client_id,
client_secret=client_secret,
api_key=api_key,
redirect_uri=redirect_uri,
cache_file=self.hass.config.path(DEFAULT_CACHEDB),
)
try:
with async_timeout.timeout(_TIMEOUT):
await logi_session.authorize(code)
except AuthorizationFailed:
(self.hass.data[DATA_FLOW_IMPL][DOMAIN][EXTERNAL_ERRORS]) = "invalid_auth"
return self.async_abort(reason="external_error")
except asyncio.TimeoutError:
(
self.hass.data[DATA_FLOW_IMPL][DOMAIN][EXTERNAL_ERRORS]
) = "authorize_url_timeout"
return self.async_abort(reason="external_error")
account_id = (await logi_session.account)["accountId"]
await logi_session.close()
return self.async_create_entry(
title=f"Logi Circle ({account_id})",
data={
CONF_CLIENT_ID: client_id,
CONF_CLIENT_SECRET: client_secret,
CONF_API_KEY: api_key,
CONF_REDIRECT_URI: redirect_uri,
CONF_SENSORS: sensors,
},
)
class LogiCircleAuthCallbackView(HomeAssistantView):
"""Logi Circle Authorization Callback View."""
requires_auth = False
url = AUTH_CALLBACK_PATH
name = AUTH_CALLBACK_NAME
async def get(self, request):
"""Receive authorization code."""
hass = request.app["hass"]
if "code" in request.query:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": "code"}, data=request.query["code"]
)
)
return self.json_message("Authorisation code saved")
return self.json_message(
"Authorisation code missing from query string", status_code=HTTP_BAD_REQUEST
)
|
import dataclasses
from datetime import timedelta
import logging
from typing import Dict, List
import requests.exceptions
import upcloud_api
import voluptuous as vol
from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import (
CONF_PASSWORD,
CONF_SCAN_INTERVAL,
CONF_USERNAME,
STATE_OFF,
STATE_ON,
STATE_PROBLEM,
)
from homeassistant.core import CALLBACK_TYPE
from homeassistant.exceptions import ConfigEntryNotReady
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
)
from .const import CONFIG_ENTRY_UPDATE_SIGNAL_TEMPLATE, DEFAULT_SCAN_INTERVAL, DOMAIN
_LOGGER = logging.getLogger(__name__)
ATTR_CORE_NUMBER = "core_number"
ATTR_HOSTNAME = "hostname"
ATTR_MEMORY_AMOUNT = "memory_amount"
ATTR_STATE = "state"
ATTR_TITLE = "title"
ATTR_UUID = "uuid"
ATTR_ZONE = "zone"
CONF_SERVERS = "servers"
DATA_UPCLOUD = "data_upcloud"
DEFAULT_COMPONENT_NAME = "UpCloud {}"
DEFAULT_COMPONENT_DEVICE_CLASS = "power"
CONFIG_ENTRY_DOMAINS = {BINARY_SENSOR_DOMAIN, SWITCH_DOMAIN}
SIGNAL_UPDATE_UPCLOUD = "upcloud_update"
STATE_MAP = {"error": STATE_PROBLEM, "started": STATE_ON, "stopped": STATE_OFF}
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(
CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL
): cv.time_period,
}
)
},
extra=vol.ALLOW_EXTRA,
)
class UpCloudDataUpdateCoordinator(
DataUpdateCoordinator[Dict[str, upcloud_api.Server]]
):
"""UpCloud data update coordinator."""
def __init__(
self,
hass: HomeAssistantType,
*,
cloud_manager: upcloud_api.CloudManager,
update_interval: timedelta,
username: str,
) -> None:
"""Initialize coordinator."""
super().__init__(
hass, _LOGGER, name=f"{username}@UpCloud", update_interval=update_interval
)
self.cloud_manager = cloud_manager
self.unsub_handlers: List[CALLBACK_TYPE] = []
async def async_update_config(self, config_entry: ConfigEntry) -> None:
"""Handle config update."""
self.update_interval = timedelta(
seconds=config_entry.options[CONF_SCAN_INTERVAL]
)
async def _async_update_data(self) -> Dict[str, upcloud_api.Server]:
return {
x.uuid: x
for x in await self.hass.async_add_executor_job(
self.cloud_manager.get_servers
)
}
@dataclasses.dataclass
class UpCloudHassData:
"""Home Assistant UpCloud runtime data."""
coordinators: Dict[str, UpCloudDataUpdateCoordinator] = dataclasses.field(
default_factory=dict
)
scan_interval_migrations: Dict[str, int] = dataclasses.field(default_factory=dict)
async def async_setup(hass: HomeAssistantType, config) -> bool:
"""Set up UpCloud component."""
domain_config = config.get(DOMAIN)
if not domain_config:
return True
_LOGGER.warning(
"Loading upcloud via top level config is deprecated and no longer "
"necessary as of 0.117. Please remove it from your YAML configuration."
)
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={
CONF_USERNAME: domain_config[CONF_USERNAME],
CONF_PASSWORD: domain_config[CONF_PASSWORD],
},
)
)
if domain_config[CONF_SCAN_INTERVAL]:
hass.data[DATA_UPCLOUD] = UpCloudHassData()
hass.data[DATA_UPCLOUD].scan_interval_migrations[
domain_config[CONF_USERNAME]
] = domain_config[CONF_SCAN_INTERVAL]
return True
def _config_entry_update_signal_name(config_entry: ConfigEntry) -> str:
"""Get signal name for updates to a config entry."""
return CONFIG_ENTRY_UPDATE_SIGNAL_TEMPLATE.format(config_entry.unique_id)
async def _async_signal_options_update(
hass: HomeAssistantType, config_entry: ConfigEntry
) -> None:
"""Signal config entry options update."""
async_dispatcher_send(
hass, _config_entry_update_signal_name(config_entry), config_entry
)
async def async_setup_entry(hass: HomeAssistantType, config_entry: ConfigEntry) -> bool:
"""Set up the UpCloud config entry."""
manager = upcloud_api.CloudManager(
config_entry.data[CONF_USERNAME], config_entry.data[CONF_PASSWORD]
)
try:
await hass.async_add_executor_job(manager.authenticate)
except upcloud_api.UpCloudAPIError:
_LOGGER.error("Authentication failed", exc_info=True)
return False
except requests.exceptions.RequestException as err:
_LOGGER.error("Failed to connect", exc_info=True)
raise ConfigEntryNotReady from err
upcloud_data = hass.data.setdefault(DATA_UPCLOUD, UpCloudHassData())
# Handle pre config entry (0.117) scan interval migration to options
migrated_scan_interval = upcloud_data.scan_interval_migrations.pop(
config_entry.data[CONF_USERNAME], None
)
if migrated_scan_interval and (
not config_entry.options.get(CONF_SCAN_INTERVAL)
or config_entry.options[CONF_SCAN_INTERVAL] == DEFAULT_SCAN_INTERVAL.seconds
):
update_interval = migrated_scan_interval
hass.config_entries.async_update_entry(
config_entry,
options={CONF_SCAN_INTERVAL: update_interval.seconds},
)
elif config_entry.options.get(CONF_SCAN_INTERVAL):
update_interval = timedelta(seconds=config_entry.options[CONF_SCAN_INTERVAL])
else:
update_interval = DEFAULT_SCAN_INTERVAL
coordinator = UpCloudDataUpdateCoordinator(
hass,
update_interval=update_interval,
cloud_manager=manager,
username=config_entry.data[CONF_USERNAME],
)
# Call the UpCloud API to refresh data
await coordinator.async_request_refresh()
if not coordinator.last_update_success:
raise ConfigEntryNotReady
# Listen to config entry updates
coordinator.unsub_handlers.append(
config_entry.add_update_listener(_async_signal_options_update)
)
coordinator.unsub_handlers.append(
async_dispatcher_connect(
hass,
_config_entry_update_signal_name(config_entry),
coordinator.async_update_config,
)
)
upcloud_data.coordinators[config_entry.data[CONF_USERNAME]] = coordinator
# Forward entry setup
for domain in CONFIG_ENTRY_DOMAINS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, domain)
)
return True
async def async_unload_entry(hass, config_entry):
"""Unload the config entry."""
for domain in CONFIG_ENTRY_DOMAINS:
await hass.config_entries.async_forward_entry_unload(config_entry, domain)
coordinator: UpCloudDataUpdateCoordinator = hass.data[
DATA_UPCLOUD
].coordinators.pop(config_entry.data[CONF_USERNAME])
while coordinator.unsub_handlers:
coordinator.unsub_handlers.pop()()
return True
class UpCloudServerEntity(CoordinatorEntity):
"""Entity class for UpCloud servers."""
def __init__(self, coordinator, uuid):
"""Initialize the UpCloud server entity."""
super().__init__(coordinator)
self.uuid = uuid
@property
def _server(self) -> upcloud_api.Server:
return self.coordinator.data[self.uuid]
@property
def unique_id(self) -> str:
"""Return unique ID for the entity."""
return self.uuid
@property
def name(self):
"""Return the name of the component."""
try:
return DEFAULT_COMPONENT_NAME.format(self._server.title)
except (AttributeError, KeyError, TypeError):
return DEFAULT_COMPONENT_NAME.format(self.uuid)
@property
def icon(self):
"""Return the icon of this server."""
return "mdi:server" if self.is_on else "mdi:server-off"
@property
def state(self):
"""Return state of the server."""
try:
return STATE_MAP.get(self._server.state, self._server.state)
except AttributeError:
return None
@property
def is_on(self):
"""Return true if the server is on."""
return self.state == STATE_ON
@property
def device_class(self):
"""Return the class of this server."""
return DEFAULT_COMPONENT_DEVICE_CLASS
@property
def device_state_attributes(self):
"""Return the state attributes of the UpCloud server."""
return {
x: getattr(self._server, x, None)
for x in (
ATTR_UUID,
ATTR_TITLE,
ATTR_HOSTNAME,
ATTR_ZONE,
ATTR_CORE_NUMBER,
ATTR_MEMORY_AMOUNT,
)
}
|
from pyspcwebgw.const import AreaMode
import homeassistant.components.alarm_control_panel as alarm
from homeassistant.components.alarm_control_panel.const import (
SUPPORT_ALARM_ARM_AWAY,
SUPPORT_ALARM_ARM_HOME,
SUPPORT_ALARM_ARM_NIGHT,
)
from homeassistant.const import (
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_DISARMED,
STATE_ALARM_TRIGGERED,
)
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from . import DATA_API, SIGNAL_UPDATE_ALARM
def _get_alarm_state(area):
"""Get the alarm state."""
if area.verified_alarm:
return STATE_ALARM_TRIGGERED
mode_to_state = {
AreaMode.UNSET: STATE_ALARM_DISARMED,
AreaMode.PART_SET_A: STATE_ALARM_ARMED_HOME,
AreaMode.PART_SET_B: STATE_ALARM_ARMED_NIGHT,
AreaMode.FULL_SET: STATE_ALARM_ARMED_AWAY,
}
return mode_to_state.get(area.mode)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the SPC alarm control panel platform."""
if discovery_info is None:
return
api = hass.data[DATA_API]
async_add_entities([SpcAlarm(area=area, api=api) for area in api.areas.values()])
class SpcAlarm(alarm.AlarmControlPanelEntity):
"""Representation of the SPC alarm panel."""
def __init__(self, area, api):
"""Initialize the SPC alarm panel."""
self._area = area
self._api = api
async def async_added_to_hass(self):
"""Call for adding new entities."""
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_UPDATE_ALARM.format(self._area.id),
self._update_callback,
)
)
@callback
def _update_callback(self):
"""Call update method."""
self.async_schedule_update_ha_state(True)
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the device."""
return self._area.name
@property
def changed_by(self):
"""Return the user the last change was triggered by."""
return self._area.last_changed_by
@property
def state(self):
"""Return the state of the device."""
return _get_alarm_state(self._area)
@property
def supported_features(self) -> int:
"""Return the list of supported features."""
return SUPPORT_ALARM_ARM_HOME | SUPPORT_ALARM_ARM_AWAY | SUPPORT_ALARM_ARM_NIGHT
async def async_alarm_disarm(self, code=None):
"""Send disarm command."""
await self._api.change_mode(area=self._area, new_mode=AreaMode.UNSET)
async def async_alarm_arm_home(self, code=None):
"""Send arm home command."""
await self._api.change_mode(area=self._area, new_mode=AreaMode.PART_SET_A)
async def async_alarm_arm_night(self, code=None):
"""Send arm home command."""
await self._api.change_mode(area=self._area, new_mode=AreaMode.PART_SET_B)
async def async_alarm_arm_away(self, code=None):
"""Send arm away command."""
await self._api.change_mode(area=self._area, new_mode=AreaMode.FULL_SET)
|
import unittest
from pytext.config.field_config import FeatureConfig
from pytext.data.featurizer import InputRecord, SimpleFeaturizer
class TestPyText(unittest.TestCase):
def test_tokenize(self):
featurizer = SimpleFeaturizer.from_config(
SimpleFeaturizer.Config(), FeatureConfig()
)
tokens = featurizer.featurize(InputRecord(raw_text="At eight o'clock")).tokens
self.assertEqual(['at', 'eight', "o'clock"], tokens)
|
import os
import mock
from mock import patch
from paasta_tools.cli.cmds.validate import check_secrets_for_instance
from paasta_tools.cli.cmds.validate import check_service_path
from paasta_tools.cli.cmds.validate import get_schema
from paasta_tools.cli.cmds.validate import get_service_path
from paasta_tools.cli.cmds.validate import paasta_validate
from paasta_tools.cli.cmds.validate import paasta_validate_soa_configs
from paasta_tools.cli.cmds.validate import SCHEMA_INVALID
from paasta_tools.cli.cmds.validate import SCHEMA_VALID
from paasta_tools.cli.cmds.validate import UNKNOWN_SERVICE
from paasta_tools.cli.cmds.validate import validate_instance_names
from paasta_tools.cli.cmds.validate import validate_paasta_objects
from paasta_tools.cli.cmds.validate import validate_schema
from paasta_tools.cli.cmds.validate import validate_secrets
from paasta_tools.cli.cmds.validate import validate_tron
from paasta_tools.cli.cmds.validate import validate_unique_instance_names
@patch("paasta_tools.cli.cmds.validate.validate_unique_instance_names", autospec=True)
@patch("paasta_tools.cli.cmds.validate.validate_paasta_objects", autospec=True)
@patch("paasta_tools.cli.cmds.validate.validate_all_schemas", autospec=True)
@patch("paasta_tools.cli.cmds.validate.validate_tron", autospec=True)
@patch("paasta_tools.cli.cmds.validate.get_service_path", autospec=True)
@patch("paasta_tools.cli.cmds.validate.check_service_path", autospec=True)
@patch("paasta_tools.cli.cmds.validate.validate_secrets", autospec=True)
def test_paasta_validate_calls_everything(
mock_validate_secrets,
mock_check_service_path,
mock_get_service_path,
mock_validate_tron,
mock_validate_all_schemas,
mock_validate_paasta_objects,
mock_validate_unique_instance_names,
):
# Ensure each check in 'paasta_validate' is called
mock_validate_secrets.return_value = True
mock_check_service_path.return_value = True
mock_get_service_path.return_value = "unused_path"
mock_validate_all_schemas.return_value = True
mock_validate_tron.return_value = True
mock_validate_paasta_objects.return_value = True
mock_validate_unique_instance_names.return_value = True
args = mock.MagicMock()
args.service = "test"
args.soa_dir = None
paasta_validate(args)
assert mock_validate_all_schemas.called
assert mock_validate_tron.called
assert mock_validate_unique_instance_names.called
assert mock_validate_paasta_objects.called
assert mock_validate_secrets.called
@patch("paasta_tools.cli.cmds.validate.get_instance_config", autospec=True)
@patch("paasta_tools.cli.cmds.validate.list_clusters", autospec=True)
@patch("paasta_tools.cli.cmds.validate.list_all_instances_for_service", autospec=True)
@patch("paasta_tools.cli.cmds.validate.path_to_soa_dir_service", autospec=True)
def test_validate_paasta_objects(
mock_path_to_soa_dir_service,
mock_list_all_instances_for_service,
mock_list_clusters,
mock_get_instance_config,
capsys,
):
fake_service = "fake-service"
fake_instance = "fake-instance"
fake_cluster = "penguin"
mock_paasta_instance = mock.Mock(autospec=True)
mock_paasta_instance.validate.return_value = ["Something is wrong!"]
mock_path_to_soa_dir_service.return_value = ("fake_soa_dir", fake_service)
mock_list_clusters.return_value = [fake_cluster]
mock_list_all_instances_for_service.return_value = [fake_instance]
mock_get_instance_config.return_value = mock_paasta_instance
assert validate_paasta_objects("fake-service-path") is False, capsys
captured = capsys.readouterr()
assert "Something is wrong!" in captured.out
def test_get_service_path_unknown(capsys):
service = None
soa_dir = "unused"
assert get_service_path(service, soa_dir) is None
output, _ = capsys.readouterr()
assert UNKNOWN_SERVICE in output
def test_validate_unknown_service():
args = mock.MagicMock()
args.service = None
args.yelpsoa_config_root = "unused"
paasta_validate(args) == 1
def test_validate_service_name():
args = mock.MagicMock()
args.service = "aa________________________________a"
args.yelpsoa_config_root = "unused"
paasta_validate(args) == 1
def test_validate_unknown_service_service_path():
service_path = "unused/path"
service = "unused"
assert not paasta_validate_soa_configs(service, service_path)
@patch("paasta_tools.cli.cmds.validate.os.path.isdir", autospec=True)
@patch("paasta_tools.cli.cmds.validate.glob", autospec=True)
def test_get_service_path_cwd(mock_glob, mock_isdir):
mock_isdir.return_value = True
mock_glob.return_value = ["something.yaml"]
service = None
soa_dir = os.getcwd()
service_path = get_service_path(service, soa_dir)
assert service_path == os.getcwd()
@patch("paasta_tools.cli.cmds.validate.os.path.isdir", autospec=True)
@patch("paasta_tools.cli.cmds.validate.glob", autospec=True)
def test_get_service_path_soa_dir(mock_glob, mock_isdir):
mock_isdir.return_value = True
mock_glob.return_value = ["something.yaml"]
service = "some_service"
soa_dir = "some/path"
service_path = get_service_path(service, soa_dir)
assert service_path == f"{soa_dir}/{service}"
def is_schema(schema):
assert schema is not None
assert isinstance(schema, dict)
assert "$schema" in schema
def test_get_schema_marathon_found():
schema = get_schema("marathon")
is_schema(schema)
def test_get_schema_tron_found():
schema = get_schema("tron")
is_schema(schema)
def test_get_schema_missing():
assert get_schema("fake_schema") is None
@patch("paasta_tools.cli.cmds.validate.get_file_contents", autospec=True)
def test_marathon_validate_schema_list_hashes_good(mock_get_file_contents, capsys):
marathon_content = """
---
main_worker:
cpus: 0.1
instances: 2
mem: 250
disk: 512
cmd: virtualenv_run/bin/python adindexer/adindex_worker.py
healthcheck_mode: cmd
healthcheck_cmd: '/bin/true'
_main_http:
cpus: 0.1
instances: 2
mem: 250
disk: 512
registrations: ['foo.bar', 'bar.baz']
"""
mock_get_file_contents.return_value = marathon_content
for schema_type in ["marathon", "kubernetes"]:
assert validate_schema("unused_service_path.yaml", schema_type)
output, _ = capsys.readouterr()
assert SCHEMA_VALID in output
@patch("paasta_tools.cli.cmds.validate.get_file_contents", autospec=True)
def test_validate_instance_names(mock_get_file_contents, capsys):
fake_instances = {
"a_________________________________________a": {},
"b_________________________________________b": {},
}
assert not validate_instance_names(fake_instances, "fake_path")
output, _ = capsys.readouterr()
assert "Length of instance name" in output
@patch("paasta_tools.cli.cmds.validate.get_file_contents", autospec=True)
def test_marathon_validate_understands_underscores(mock_get_file_contents, capsys):
marathon_content = """
---
_template: &template
foo: bar
main:
cpus: 0.1
instances: 2
env:
<<: *template
"""
mock_get_file_contents.return_value = marathon_content
for schema_type in ["marathon", "kubernetes"]:
assert validate_schema("unused_service_path.yaml", schema_type)
output, _ = capsys.readouterr()
assert SCHEMA_VALID in output
@patch("paasta_tools.cli.cmds.validate.get_file_contents", autospec=True)
def test_marathon_validate_schema_healthcheck_non_cmd(mock_get_file_contents, capsys):
marathon_content = """
---
main_worker:
cpus: 0.1
instances: 2
mem: 250
disk: 512
cmd: virtualenv_run/bin/python adindexer/adindex_worker.py
healthcheck_mode: tcp
"""
mock_get_file_contents.return_value = marathon_content
for schema_type in ["marathon", "kubernetes"]:
assert validate_schema("unused_service_path.yaml", schema_type)
output, _ = capsys.readouterr()
assert SCHEMA_VALID in output
marathon_content = """
---
main_worker:
cpus: 0.1
instances: 2
mem: 250
disk: 512
cmd: virtualenv_run/bin/python adindexer/adindex_worker.py
"""
mock_get_file_contents.return_value = marathon_content
for schema_type in ["marathon", "kubernetes"]:
assert validate_schema("unused_service_path.yaml", schema_type)
output, _ = capsys.readouterr()
assert SCHEMA_VALID in output
@patch("paasta_tools.cli.cmds.validate.get_file_contents", autospec=True)
def test_marathon_validate_id(mock_get_file_contents, capsys):
marathon_content = """
---
valid:
cpus: 0.1
instances: 2
mem: 250
disk: 512
cmd: virtualenv_run/bin/python adindexer/adindex_worker.py
"""
mock_get_file_contents.return_value = marathon_content
for schema_type in ["marathon", "kubernetes"]:
assert validate_schema("unused_service_path.yaml", schema_type)
output, _ = capsys.readouterr()
assert SCHEMA_VALID in output
marathon_content = """
---
this_is_okay_too_1:
cpus: 0.1
instances: 2
mem: 250
disk: 512
cmd: virtualenv_run/bin/python adindexer/adindex_worker.py
"""
mock_get_file_contents.return_value = marathon_content
for schema_type in ["marathon", "kubernetes"]:
assert validate_schema("unused_service_path.yaml", schema_type)
output, _ = capsys.readouterr()
assert SCHEMA_VALID in output
marathon_content = """
---
dashes-are-okay-too:
cpus: 0.1
instances: 2
mem: 250
disk: 512
cmd: virtualenv_run/bin/python adindexer/adindex_worker.py
"""
mock_get_file_contents.return_value = marathon_content
for schema_type in ["marathon", "kubernetes"]:
assert validate_schema("unused_service_path.yaml", schema_type)
output, _ = capsys.readouterr()
assert SCHEMA_VALID in output
marathon_content = """
---
main_worker_CAPITALS_INVALID:
cpus: 0.1
instances: 2
mem: 250
disk: 512
cmd: virtualenv_run/bin/python adindexer/adindex_worker.py
"""
mock_get_file_contents.return_value = marathon_content
for schema_type in ["marathon", "kubernetes"]:
assert not validate_schema("unused_service_path.yaml", schema_type)
output, _ = capsys.readouterr()
assert SCHEMA_INVALID in output
marathon_content = """
---
$^&*()(&*^%&definitely_not_okay:
cpus: 0.1
instances: 2
mem: 250
disk: 512
cmd: virtualenv_run/bin/python adindexer/adindex_worker.py
"""
mock_get_file_contents.return_value = marathon_content
for schema_type in ["marathon", "kubernetes"]:
assert not validate_schema("unused_service_path.yaml", schema_type)
output, _ = capsys.readouterr()
assert SCHEMA_INVALID in output
@patch("paasta_tools.cli.cmds.validate.get_file_contents", autospec=True)
def test_marathon_validate_schema_healthcheck_cmd_has_cmd(
mock_get_file_contents, capsys
):
marathon_content = """
---
main_worker:
cpus: 0.1
instances: 2
mem: 250
disk: 512
cmd: virtualenv_run/bin/python adindexer/adindex_worker.py
healthcheck_mode: cmd
"""
mock_get_file_contents.return_value = marathon_content
for schema_type in ["marathon", "kubernetes"]:
assert not validate_schema("unused_service_path.yaml", schema_type)
output, _ = capsys.readouterr()
assert SCHEMA_INVALID in output
marathon_content = """
---
main_worker:
cpus: 0.1
instances: 2
mem: 250
disk: 512
cmd: virtualenv_run/bin/python adindexer/adindex_worker.py
healthcheck_mode: cmd
healthcheck_cmd: '/bin/true'
"""
mock_get_file_contents.return_value = marathon_content
for schema_type in ["marathon", "kubernetes"]:
assert validate_schema("unused_service_path.yaml", schema_type)
output, _ = capsys.readouterr()
assert SCHEMA_VALID in output
@patch("paasta_tools.cli.cmds.validate.get_file_contents", autospec=True)
def test_marathon_validate_schema_keys_outside_instance_blocks_bad(
mock_get_file_contents, capsys
):
mock_get_file_contents.return_value = """
{
"main": {
"instances": 5
},
"page": false
}
"""
for schema_type in ["marathon", "kubernetes"]:
assert not validate_schema("unused_service_path.json", schema_type)
output, _ = capsys.readouterr()
assert SCHEMA_INVALID in output
@patch("paasta_tools.cli.cmds.validate.get_file_contents", autospec=True)
def test_marathon_validate_schema_security_good(mock_get_file_contents, capsys):
mock_get_file_contents.return_value = """
main:
dependencies_reference: main
security:
outbound_firewall: block
"""
assert validate_schema("unused_service_path.yaml", "marathon")
output, _ = capsys.readouterr()
assert SCHEMA_VALID in output
@patch("paasta_tools.cli.cmds.validate.get_file_contents", autospec=True)
def test_marathon_validate_schema_security_bad(mock_get_file_contents, capsys):
mock_get_file_contents.return_value = """
main:
dependencies_reference: main
security:
outbound_firewall: bblock
"""
for schema_type in ["marathon", "kubernetes"]:
assert not validate_schema("unused_service_path.yaml", schema_type)
output, _ = capsys.readouterr()
assert SCHEMA_INVALID in output
@patch("paasta_tools.cli.cmds.validate.get_file_contents", autospec=True)
def test_marathon_validate_invalid_key_bad(mock_get_file_contents, capsys):
mock_get_file_contents.return_value = """
{
"main": {
"fake_key": 5
}
}
"""
for schema_type in ["marathon", "kubernetes"]:
assert not validate_schema("unused_service_path.json", schema_type)
output, _ = capsys.readouterr()
assert SCHEMA_INVALID in output
@patch("paasta_tools.cli.cmds.validate.get_file_contents", autospec=True)
def test_tron_validate_schema_understands_underscores(mock_get_file_contents, capsys):
tron_content = """
_my_template: &a_template
actions:
first:
command: echo hello world
test_job:
node: batch_box
schedule:
type: cron
value: "0 7 * * 5"
<<: *a_template
"""
mock_get_file_contents.return_value = tron_content
assert validate_schema("unused_service_path.yaml", "tron")
output, _ = capsys.readouterr()
assert SCHEMA_VALID in output
@patch("paasta_tools.cli.cmds.validate.get_file_contents", autospec=True)
def test_tron_validate_schema_job_extra_properties_bad(mock_get_file_contents, capsys):
tron_content = """
test_job:
node: batch_box
schedule: "daily 04:00:00"
unexpected: 100
actions:
first:
command: echo hello world
"""
mock_get_file_contents.return_value = tron_content
assert not validate_schema("unused_service_path.yaml", "tron")
output, _ = capsys.readouterr()
assert SCHEMA_INVALID in output
@patch("paasta_tools.cli.cmds.validate.get_file_contents", autospec=True)
def test_tron_validate_schema_actions_extra_properties_bad(
mock_get_file_contents, capsys
):
tron_content = """
test_job:
node: batch_box
schedule: "daily 04:00:00"
actions:
first:
command: echo hello world
something_else: true
"""
mock_get_file_contents.return_value = tron_content
assert not validate_schema("unused_service_path.yaml", "tron")
output, _ = capsys.readouterr()
assert SCHEMA_INVALID in output
@patch("paasta_tools.cli.cmds.validate.get_file_contents", autospec=True)
def test_tron_validate_schema_cleanup_action_extra_properties_bad(
mock_get_file_contents, capsys
):
tron_content = """
test_job:
node: batch_box
schedule: "daily 04:00:00"
actions:
first:
command: echo hello world
cleanup_action:
command: rm output
other_key: value
"""
mock_get_file_contents.return_value = tron_content
assert not validate_schema("unused_service_path.yaml", "tron")
output, _ = capsys.readouterr()
assert SCHEMA_INVALID in output
@patch("paasta_tools.cli.cmds.validate.list_tron_clusters", autospec=True)
@patch("paasta_tools.cli.cmds.validate.validate_complete_config", autospec=True)
def test_validate_tron_with_service_invalid(
mock_validate_tron_config, mock_list_clusters, capsys
):
mock_list_clusters.return_value = ["dev", "stage", "prod"]
mock_validate_tron_config.side_effect = [[], ["some error"], []]
assert not validate_tron("soa/my_service")
mock_list_clusters.assert_called_once_with("my_service", "soa")
expected_calls = [
mock.call("my_service", cluster, "soa")
for cluster in mock_list_clusters.return_value
]
assert mock_validate_tron_config.call_args_list == expected_calls
output, _ = capsys.readouterr()
assert "some error" in output
@patch("paasta_tools.cli.cmds.validate.list_tron_clusters", autospec=True)
@patch("paasta_tools.cli.cmds.validate.validate_complete_config", autospec=True)
def test_validate_tron_with_service_valid(
mock_validate_tron_config, mock_list_clusters, capsys
):
mock_list_clusters.return_value = ["dev", "prod"]
mock_validate_tron_config.side_effect = [[], []]
assert validate_tron("soa/my_service")
mock_list_clusters.assert_called_once_with("my_service", "soa")
expected_calls = [
mock.call("my_service", cluster, "soa")
for cluster in mock_list_clusters.return_value
]
assert mock_validate_tron_config.call_args_list == expected_calls
output, _ = capsys.readouterr()
assert "tron-dev.yaml is valid" in output
def test_check_service_path_none(capsys):
service_path = None
assert not check_service_path(service_path)
output, _ = capsys.readouterr()
assert "%s is not a directory" % service_path in output
@patch("paasta_tools.cli.cmds.validate.os.path.isdir", autospec=True)
def test_check_service_path_empty(mock_isdir, capsys):
mock_isdir.return_value = True
service_path = "fake/path"
assert not check_service_path(service_path)
output, _ = capsys.readouterr()
assert "%s does not contain any .yaml files" % service_path in output
@patch("paasta_tools.cli.cmds.validate.os.path.isdir", autospec=True)
@patch("paasta_tools.cli.cmds.validate.glob", autospec=True)
def test_check_service_path_good(mock_glob, mock_isdir):
mock_isdir.return_value = True
mock_glob.return_value = True
service_path = "fake/path"
assert check_service_path(service_path)
@patch("paasta_tools.cli.cmds.validate.get_service_instance_list", autospec=True)
@patch("paasta_tools.cli.cmds.validate.list_clusters", autospec=True)
def test_validate_unique_service_name_success(
mock_list_clusters, mock_get_service_instance_list
):
service_name = "service_1"
mock_list_clusters.return_value = ["cluster_1"]
mock_get_service_instance_list.return_value = [
(service_name, "instance_1"),
(service_name, "instance_2"),
(service_name, "instance_3"),
]
assert validate_unique_instance_names(f"soa/{service_name}")
@patch("paasta_tools.cli.cmds.validate.get_service_instance_list", autospec=True)
@patch("paasta_tools.cli.cmds.validate.list_clusters", autospec=True)
def test_validate_unique_service_name_failure(
mock_list_clusters, mock_get_service_instance_list, capsys
):
service_name = "service_1"
mock_list_clusters.return_value = ["cluster_1"]
mock_get_service_instance_list.return_value = [
(service_name, "instance_1"),
(service_name, "instance_2"),
(service_name, "instance_1"),
]
assert not validate_unique_instance_names(f"soa/{service_name}")
output, _ = capsys.readouterr()
assert "instance_1" in output
@patch("paasta_tools.cli.cmds.validate.get_instance_config", autospec=True)
@patch("paasta_tools.cli.cmds.validate.list_clusters", autospec=True)
@patch("paasta_tools.cli.cmds.validate.list_all_instances_for_service", autospec=True)
@patch("paasta_tools.cli.cmds.validate.path_to_soa_dir_service", autospec=True)
@patch("paasta_tools.cli.cmds.validate.load_system_paasta_config", autospec=True)
@patch("paasta_tools.cli.cmds.validate.check_secrets_for_instance", autospec=True)
def test_validate_secrets(
mock_check_secrets_for_instance,
mock_load_system_paasta_config,
mock_path_to_soa_dir_service,
mock_list_all_instances_for_service,
mock_list_clusters,
mock_get_instance_config,
capsys,
):
mock_path_to_soa_dir_service.return_value = ("fake_soa_dir", "fake_service")
mock_list_clusters.return_value = ["fake_cluster"]
mock_load_system_paasta_config.return_value = mock.Mock(
get_vault_cluster_config=mock.Mock(
return_value={"fake_cluster": "fake_vault_env"}
)
)
mock_list_all_instances_for_service.return_value = [
"fake_instance",
"fake_instance2",
]
mock_paasta_instance = mock.Mock(
config_dict={"env": {"SUPER_SECRET1": "SECRET(secret1)"}}
)
mock_paasta_instance2 = mock.Mock(
config_dict={"env": {"SUPER_SECRET1": "SHARED_SECRET(secret1)"}}
)
mock_get_instance_config.side_effect = [mock_paasta_instance, mock_paasta_instance2]
mock_check_secrets_for_instance.return_value = True
assert validate_secrets("fake-service-path"), capsys
captured = capsys.readouterr()
assert "No orphan secrets found" in captured.out
assert mock_check_secrets_for_instance.call_count == 2
@patch("paasta_tools.cli.cmds.validate.get_file_contents", autospec=True)
@patch("paasta_tools.cli.cmds.validate.os.path.isfile", autospec=True)
def test_check_secrets_for_instance(mock_isfile, mock_get_file_contents):
instance_config_dict = {"env": {"SUPER_SECRET1": "SECRET(secret1)"}}
soa_dir = "fake_soa_dir"
service_path = "fake-service-path"
vault_env = "fake_vault_env"
secret_content = """
{
"environments": {
"fake_vault_env": {
"ciphertext": "bla"
}
}
}
"""
mock_get_file_contents.return_value = secret_content
mock_isfile.return_value = True
assert check_secrets_for_instance(
instance_config_dict, soa_dir, service_path, vault_env
)
mock_get_file_contents.assert_called_with("fake-service-path/secrets/secret1.json")
instance_config_dict = {"env": {"SUPER_SECRET1": "SHARED_SECRET(secret1)"}}
assert check_secrets_for_instance(
instance_config_dict, soa_dir, service_path, vault_env
)
mock_get_file_contents.assert_called_with(
"fake_soa_dir/_shared/secrets/secret1.json"
)
@patch("paasta_tools.cli.cmds.validate.get_file_contents", autospec=True)
@patch("paasta_tools.cli.cmds.validate.os.path.isfile", autospec=True)
def test_check_secrets_for_instance_missing_secret(
mock_isfile, mock_get_file_contents, capsys
):
instance_config_dict = {"env": {"SUPER_SECRET1": "SECRET(secret1)"}}
soa_dir = "fake_soa_dir"
service_path = "fake-service-path"
vault_env = "even_more_fake_vault_env"
secret_content = """
{
"environments": {
"fake_vault_env": {
"ciphertext": "bla"
}
}
}
"""
mock_get_file_contents.return_value = secret_content
mock_isfile.return_value = True
assert not check_secrets_for_instance(
instance_config_dict, soa_dir, service_path, vault_env
), capsys
captured = capsys.readouterr()
assert (
"Secret secret1 not defined for ecosystem even_more_fake_vault_env on secret file fake-service-path/secrets/secret1.json"
in captured.out
)
|
from unittest import TestCase
import pandas as pd
from scattertext.CorpusFromPandas import CorpusFromPandas
from scattertext.WhitespaceNLP import whitespace_nlp
from scattertext.semioticsquare.FourSquareAxis import FourSquareAxes
def get_docs_categories_four():
documents = [u"What art thou that usurp'st this time of night,",
u'Together with that fair and warlike form',
u'In which the majesty of buried Denmark',
u'Did sometimes march? by heaven I charge thee, speak!',
u'Halt! Who goes there?',
u'[Intro]',
u'It is I sire Tone from Brooklyn.',
u'Well, speak up man what is it?',
u'News from the East sire! THE BEST OF BOTH WORLDS HAS RETURNED!',
u'I think it therefore manifest, from what I have here advanced,',
u'that the main Point of Skill and Address, is to furnish Employment',
u'for this Redundancy of Vapour, and prudently to adjust the Season 1',
u'of it ; by which ,means it may certainly become of Cardinal',
u"Ain't it just like the night to play tricks when you're tryin' to be so quiet?",
u"We sit here stranded, though we're all doin' our best to deny it",
u"And Louise holds a handful of rain, temptin' you to defy it",
u'Lights flicker from the opposite loft',
u'In this room the heat pipes just cough',
u'The country music station plays soft']
categories = ['hamlet'] * 4 + ['jay-z/r. kelly'] * 5 + ['swift'] * 4 + ['dylan'] * 6
return categories, documents
class TestFourSquareAxes(TestCase):
def test_build(self):
corpus = self._get_test_corpus()
with self.assertRaises(AssertionError):
fs = FourSquareAxes(corpus, 'hamlet', ['jay-z/r. kelly'], ['swift'], ['dylan'])
with self.assertRaises(AssertionError):
fs = FourSquareAxes(corpus, ['hamlet'], 'jay-z/r. kelly', ['swift'], ['dylan'])
with self.assertRaises(AssertionError):
fs = FourSquareAxes(corpus, ['hamlet'], ['jay-z/r. kelly'], 'swift', ['dylan'])
with self.assertRaises(AssertionError):
fs = FourSquareAxes(corpus, ['hamlet'], ['jay-z/r. kelly'], ['swift'], 'dylan')
fs = FourSquareAxes(corpus, ['hamlet'], ['jay-z/r. kelly'], ['swift'], ['dylan'])
self.assertEqual(fs.get_labels(),
{'a_and_b_label': 'swift',
'a_and_not_b_label': 'hamlet',
'a_label': '',
'b_and_not_a_label': 'jay-z/r. kelly',
'b_label': '',
'not_a_and_not_b_label': 'dylan',
'not_a_label': '',
'not_b_label': ''})
fs = FourSquareAxes(corpus, ['hamlet'], ['jay-z/r. kelly'], ['swift'], ['dylan'],
labels={'a': 'swiftham', 'b': 'swiftj'})
self.assertEqual(fs.get_labels(),
{'a_and_b_label': 'swift',
'a_and_not_b_label': 'hamlet',
'a_label': 'swiftham',
'b_and_not_a_label': 'jay-z/r. kelly',
'b_label': 'swiftj',
'not_a_and_not_b_label': 'dylan',
'not_a_label': '',
'not_b_label': ''})
axes = fs.get_axes()
self.assertEqual(len(axes), len(corpus.get_terms()))
self.assertEqual(set(axes.columns), {'x', 'y', 'counts'})
fs.lexicons
def _get_test_corpus(self):
cats, docs = get_docs_categories_four()
df = pd.DataFrame({'category': cats, 'text': docs})
corpus = CorpusFromPandas(df, 'category', 'text', nlp=whitespace_nlp).build()
return corpus
def _get_test_semiotic_square(self):
corpus = self._get_test_corpus()
semsq = FourSquareAxes(corpus, ['hamlet'], ['jay-z/r. kelly'], ['swift'], ['dylan'])
return semsq
|
import configparser
import logging
import os
import json
import requests
import time
from acdcli.utils.conf import get_conf
from . import oauth
from .backoff_req import BackOffRequest
from .common import *
from .account import AccountMixin
from .content import ContentMixin
from .metadata import MetadataMixin
from .trash import TrashMixin
logger = logging.getLogger(__name__)
_EXP_TIME_KEY = 'exp_time'
_AMZ_ENDPOINT_REQ_URL = 'https://drive.amazonaws.com/drive/v1/account/endpoint'
_SETTINGS_FILENAME = 'acd_client.ini'
_def_conf = configparser.ConfigParser()
_def_conf['endpoints'] = dict(filename='endpoint_data', validity_duration=259200)
_def_conf['transfer'] = dict(fs_chunk_size=128 * 1024, dl_chunk_size=500 * 1024 ** 2,
chunk_retries=1, connection_timeout=30, idle_timeout=60)
_def_conf['proxies'] = dict()
class ACDClient(AccountMixin, ContentMixin, MetadataMixin, TrashMixin):
"""Provides a client to the Amazon Cloud Drive RESTful interface."""
def __init__(self, cache_path='', settings_path=''):
"""Initializes OAuth and endpoints."""
self._conf = get_conf(settings_path, _SETTINGS_FILENAME, _def_conf)
self.cache_path = cache_path
logger.info('Initializing ACD with path "%s".' % cache_path)
self.handler = oauth.create_handler(cache_path)
self._endpoint_data = {}
self._load_endpoints()
requests_timeout = (self._conf.getint('transfer', 'connection_timeout'),
self._conf.getint('transfer', 'idle_timeout'))
proxies = dict(self._conf['proxies'])
self.BOReq = BackOffRequest(self.handler, requests_timeout, proxies)
@property
def _endpoint_data_path(self):
return os.path.join(self.cache_path, self._conf['endpoints']['filename'])
def _load_endpoints(self):
"""Tries to load endpoints from file and calls
:meth:`_get_endpoints` on failure or if they are outdated."""
if not os.path.isfile(self._endpoint_data_path):
self._endpoint_data = self._get_endpoints()
else:
with open(self._endpoint_data_path) as ep:
self._endpoint_data = json.load(ep)
if time.time() > self._endpoint_data[_EXP_TIME_KEY]:
logger.info('Endpoint data expired.')
self._endpoint_data = self._get_endpoints()
def _get_endpoints(self) -> dict:
"""Retrieves Amazon endpoints and saves them on success.
:raises: ValueError if requests returned invalid JSON
:raises: KeyError if endpoint data does not include expected keys"""
r = requests.get(_AMZ_ENDPOINT_REQ_URL, auth=self.handler)
if r.status_code not in OK_CODES:
logger.critical('Error getting endpoint data. Response: %s' % r.text)
raise Exception
try:
e = r.json()
except ValueError as e:
logger.critical('Invalid JSON: "%s"' % r.text)
raise e
e[_EXP_TIME_KEY] = time.time() + self._conf.getint('endpoints', 'validity_duration')
self._endpoint_data = e
try:
self.metadata_url
self.content_url
except KeyError as e:
logger.critical('Received invalid endpoint data.')
raise e
self._save_endpoint_data()
return e
def _save_endpoint_data(self):
f = open(self._endpoint_data_path, 'w')
json.dump(self._endpoint_data, f, indent=4, sort_keys=True)
f.flush()
os.fsync(f.fileno())
f.close()
@property
def metadata_url(self):
return self._endpoint_data['metadataUrl']
@property
def content_url(self):
return self._endpoint_data['contentUrl']
|
import copy
import logging
import os
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker import data
from perfkitbenchmarker import object_storage_service
from perfkitbenchmarker import sample
from perfkitbenchmarker.dpb_service import BaseDpbService
BENCHMARK_NAME = 'dpb_spark_io_benchmark'
BENCHMARK_CONFIG = """
dpb_spark_io_benchmark:
flags:
cloud: GCP
dpb_service_zone: us-east1-b
description: >
Create a dpb cluster and Run a Spark IO application.
dpb_service:
service_type: dataproc
worker_group:
vm_spec:
GCP:
machine_type: n1-standard-1
disk_spec:
GCP:
disk_type: pd-standard
disk_size: 500
worker_count: 2
"""
FLAGS = flags.FLAGS
SPARK_SAMPLE_SCRIPT = 'query.sql'
RESOURCE_LIFECYCLE_ARTIFACTS = {
'dml_script': {
'artifact': 'dml_script.py'
},
'data': {
'artifact': 'data.snappy.parquet',
'prefix': '/data/'
},
'query_script': {
'artifact': SPARK_SAMPLE_SCRIPT
}
}
def GetConfig(user_config):
return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
def ManageLifecycleResources(base_folder, dpb_service_instance,
storage_service):
"""Takes the static artifacts and persists them in object store for execution.
Args:
base_folder: Base folder for the current benchmark run.
dpb_service_instance: An instance of the dpb service being benchmarked.
storage_service: The object store to use for persistence of the artifacts.
Returns:
A dictionary of lifecycle to resource uris.
"""
resource_uri_dictionary = {}
for lifecycle_step, artifact_details in RESOURCE_LIFECYCLE_ARTIFACTS.items():
lifecycle_bucket_name = '{}_{}'.format(base_folder, lifecycle_step)
dpb_service_instance.CreateBucket(lifecycle_bucket_name)
lifecycle_folder_uri = '{}{}'.format(
dpb_service_instance.PERSISTENT_FS_PREFIX, lifecycle_bucket_name)
if 'prefix' in artifact_details.keys():
lifecycle_folder_uri = '{}{}'.format(lifecycle_folder_uri,
artifact_details['prefix'])
static_artifact_url = data.ResourcePath(
os.path.join('spark_io', artifact_details['artifact']))
storage_service.Copy(static_artifact_url, lifecycle_folder_uri)
if 'prefix' in artifact_details.keys():
lifecycle_artifact_uri = lifecycle_folder_uri[0:len(
lifecycle_folder_uri) - 1]
else:
lifecycle_artifact_uri = '{}/{}'.format(lifecycle_folder_uri,
artifact_details['artifact'])
resource_uri_dictionary[lifecycle_step] = lifecycle_artifact_uri
return resource_uri_dictionary
def Prepare(benchmark_spec):
"""Prepare phase uses schema creation script and sample data to prepare table.
Args:
benchmark_spec: Configuration that holds the definition and instance details
of the resources used for benchmarking.
"""
storage_service = object_storage_service.GetObjectStorageClass(FLAGS.cloud)()
dpb_service_instance = benchmark_spec.dpb_service
run_uri = benchmark_spec.uuid.split('-')[0]
uri_map = ManageLifecycleResources(run_uri, dpb_service_instance,
storage_service)
dml_script_uri = uri_map['dml_script']
data_folder_uri = uri_map['data']
stats = dpb_service_instance.SubmitJob(
pyspark_file=dml_script_uri,
job_type=BaseDpbService.PYSPARK_JOB_TYPE,
job_arguments=[data_folder_uri])
logging.info(stats)
if not stats['success']:
logging.warning('Table Creation Failed')
def Run(benchmark_spec):
"""Executes the sql script on the specified Spark cluster.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample objects.
Raises:
JobSubmissionError if SQL script fails.
"""
results = []
dpb_service_instance = benchmark_spec.dpb_service
run_uri = benchmark_spec.uuid.split('-')[0]
metadata = copy.copy(dpb_service_instance.GetMetadata())
query_script_folder = '{}_query_script'.format(run_uri)
query_script_folder_uri = '{}{}'.format(
dpb_service_instance.PERSISTENT_FS_PREFIX, query_script_folder)
query_script_uri = '{}/{}'.format(query_script_folder_uri,
SPARK_SAMPLE_SCRIPT)
result = dpb_service_instance.SubmitJob(
query_file=query_script_uri,
job_type=BaseDpbService.SPARKSQL_JOB_TYPE)
logging.info(result)
results.append(
sample.Sample('run_time', result.run_time, 'seconds', metadata))
def Cleanup(benchmark_spec):
storage_service = object_storage_service.GetObjectStorageClass(FLAGS.cloud)()
base_folder = benchmark_spec.uuid.split('-')[0]
for lifecycle_step, _ in RESOURCE_LIFECYCLE_ARTIFACTS.items():
dml_script_folder = '{}_{}'.format(base_folder, lifecycle_step)
storage_service.DeleteBucket(dml_script_folder)
|
import asyncio
import pytest
import random
import textwrap
from redbot.core.utils import (
chat_formatting,
bounded_gather,
bounded_gather_iter,
deduplicate_iterables,
common_filters,
)
def test_bordered_symmetrical():
expected = textwrap.dedent(
"""\
┌──────────────┐ ┌─────────────┐
│one │ │four │
│two │ │five │
│three │ │six │
└──────────────┘ └─────────────┘"""
)
col1, col2 = ["one", "two", "three"], ["four", "five", "six"]
assert chat_formatting.bordered(col1, col2) == expected
def test_bordered_asymmetrical():
expected = textwrap.dedent(
"""\
┌──────────────┐ ┌──────────────┐
│one │ │four │
│two │ │five │
│three │ │six │
└──────────────┘ │seven │
└──────────────┘"""
)
col1, col2 = ["one", "two", "three"], ["four", "five", "six", "seven"]
assert chat_formatting.bordered(col1, col2) == expected
def test_bordered_asymmetrical_2():
expected = textwrap.dedent(
"""\
┌──────────────┐ ┌─────────────┐
│one │ │five │
│two │ │six │
│three │ └─────────────┘
│four │
└──────────────┘ """
)
col1, col2 = ["one", "two", "three", "four"], ["five", "six"]
assert chat_formatting.bordered(col1, col2) == expected
def test_bordered_ascii():
expected = textwrap.dedent(
"""\
+--------------+ +-------------+
|one | |four |
|two | |five |
|three | |six |
+--------------+ +-------------+"""
)
col1, col2 = ["one", "two", "three"], ["four", "five", "six"]
assert chat_formatting.bordered(col1, col2, ascii_border=True) == expected
def test_deduplicate_iterables():
expected = [1, 2, 3, 4, 5]
inputs = [[1, 2, 1], [3, 1, 2, 4], [5, 1, 2]]
assert deduplicate_iterables(*inputs) == expected
@pytest.mark.asyncio
async def test_bounded_gather():
status = [0, 0] # num_running, max_running
async def wait_task(i, delay, status, fail=False):
status[0] += 1
await asyncio.sleep(delay)
status[1] = max(status)
status[0] -= 1
if fail:
raise RuntimeError
return i
num_concurrent = random.randint(2, 8)
num_tasks = random.randint(4 * num_concurrent, 5 * num_concurrent)
num_fail = random.randint(num_concurrent, num_tasks)
tasks = [wait_task(i, random.random() / 1000, status) for i in range(num_tasks)]
tasks += [wait_task(i, random.random() / 1000, status, fail=True) for i in range(num_fail)]
num_failed = 0
results = await bounded_gather(*tasks, limit=num_concurrent, return_exceptions=True)
for i, result in enumerate(results):
if isinstance(result, RuntimeError):
num_failed += 1
else:
assert result == i # verify_permissions original orde
assert 0 <= result < num_tasks
assert 0 < status[1] <= num_concurrent
assert num_fail == num_failed
@pytest.mark.asyncio
async def test_bounded_gather_iter():
status = [0, 0] # num_running, max_running
async def wait_task(i, delay, status, fail=False):
status[0] += 1
await asyncio.sleep(delay)
status[1] = max(status)
status[0] -= 1
if fail:
raise RuntimeError
return i
num_concurrent = random.randint(2, 8)
num_tasks = random.randint(4 * num_concurrent, 16 * num_concurrent)
num_fail = random.randint(num_concurrent, num_tasks)
tasks = [wait_task(i, random.random() / 1000, status) for i in range(num_tasks)]
tasks += [wait_task(i, random.random() / 1000, status, fail=True) for i in range(num_fail)]
random.shuffle(tasks)
num_failed = 0
for result in bounded_gather_iter(*tasks, limit=num_concurrent):
try:
result = await result
except RuntimeError:
num_failed += 1
continue
assert 0 <= result < num_tasks
assert 0 < status[1] <= num_concurrent
assert num_fail == num_failed
@pytest.mark.skip(reason="spams logs with pending task warnings")
@pytest.mark.asyncio
async def test_bounded_gather_iter_cancel():
status = [0, 0, 0] # num_running, max_running, num_ran
async def wait_task(i, delay, status, fail=False):
status[0] += 1
await asyncio.sleep(delay)
status[1] = max(status[:2])
status[0] -= 1
if fail:
raise RuntimeError
status[2] += 1
return i
num_concurrent = random.randint(2, 8)
num_tasks = random.randint(4 * num_concurrent, 16 * num_concurrent)
quit_on = random.randint(0, num_tasks)
num_fail = random.randint(num_concurrent, num_tasks)
tasks = [wait_task(i, random.random() / 1000, status) for i in range(num_tasks)]
tasks += [wait_task(i, random.random() / 1000, status, fail=True) for i in range(num_fail)]
random.shuffle(tasks)
num_failed = 0
i = 0
for result in bounded_gather_iter(*tasks, limit=num_concurrent):
try:
result = await result
except RuntimeError:
num_failed += 1
continue
if i == quit_on:
break
assert 0 <= result < num_tasks
i += 1
assert 0 < status[1] <= num_concurrent
assert quit_on <= status[2] <= quit_on + num_concurrent
assert num_failed <= num_fail
def test_normalize_smartquotes():
assert common_filters.normalize_smartquotes("Should\u2018 normalize") == "Should' normalize"
assert common_filters.normalize_smartquotes("Same String") == "Same String"
|
import PIL
import random
import unittest
import numpy as np
import chainer
from chainer import testing
from chainercv.transforms import flip
from chainercv.transforms import rotate
try:
import cv2 # NOQA
_cv2_available = True
except ImportError:
_cv2_available = False
@testing.parameterize(*testing.product({
'interpolation': [PIL.Image.NEAREST, PIL.Image.BILINEAR,
PIL.Image.BICUBIC],
'fill': [-1, 0, 100],
'size': [(3, 32, 24), (1, 32, 24)],
}))
class TestRotate(unittest.TestCase):
def test_rotate_pil(self):
img = np.random.uniform(0, 256, size=self.size).astype(np.float32)
angle = random.uniform(-180, 180)
with chainer.using_config('cv_rotate_backend', 'PIL'):
out = rotate(img, angle, fill=self.fill,
interpolation=self.interpolation)
expected = flip(img, x_flip=True)
with chainer.using_config('cv_rotate_backend', 'PIL'):
expected = rotate(
expected, -1 * angle, fill=self.fill,
interpolation=self.interpolation)
expected = flip(expected, x_flip=True)
if self.interpolation == PIL.Image.NEAREST:
assert np.mean(out == expected) > 0.99
else:
np.testing.assert_almost_equal(out, expected, decimal=3)
def test_rotate_none_and_cv2(self):
backends = [None, 'cv2'] if _cv2_available else [None]
for backend in backends:
img = np.random.uniform(0, 256, size=self.size).astype(np.float32)
angle = random.uniform(-180, 180)
with chainer.using_config('cv_rotate_backend', backend):
out = rotate(img, angle, fill=self.fill,
interpolation=self.interpolation)
opposite_out = rotate(img, -angle, fill=self.fill,
interpolation=self.interpolation)
assert out.shape[1:] == opposite_out.shape[1:]
def test_rotate_no_expand(self):
backends = [None, 'cv2', 'PIL'] if _cv2_available else [None, 'PIL']
for backend in backends:
img = np.random.uniform(0, 256, size=self.size).astype(np.float32)
angle = random.uniform(-180, 180)
with chainer.using_config('cv_rotate_backend', backend):
out = rotate(img, angle, fill=self.fill,
expand=False,
interpolation=self.interpolation)
assert out.shape == img.shape
@unittest.skipUnless(not _cv2_available, 'cv2 is installed')
class TestRotateRaiseErrorWithCv2(unittest.TestCase):
def test_rotate_raise_error_with_cv2(self):
img = np.random.uniform(0, 256, size=(3, 32, 24)).astype(np.float32)
angle = random.uniform(-180, 180)
with chainer.using_config('cv_rotate_backend', 'cv2'):
with self.assertRaises(ValueError):
rotate(img, angle)
testing.run_module(__name__, __file__)
|
from homeassistant.const import DEVICE_CLASS_POWER, ENERGY_KILO_WATT_HOUR
from . import VelbusEntity
from .const import DOMAIN
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up Velbus sensor based on config_entry."""
cntrl = hass.data[DOMAIN][entry.entry_id]["cntrl"]
modules_data = hass.data[DOMAIN][entry.entry_id]["sensor"]
entities = []
for address, channel in modules_data:
module = cntrl.get_module(address)
entities.append(VelbusSensor(module, channel))
if module.get_class(channel) == "counter":
entities.append(VelbusSensor(module, channel, True))
async_add_entities(entities)
class VelbusSensor(VelbusEntity):
"""Representation of a sensor."""
def __init__(self, module, channel, counter=False):
"""Initialize a sensor Velbus entity."""
super().__init__(module, channel)
self._is_counter = counter
@property
def unique_id(self):
"""Return unique ID for counter sensors."""
unique_id = super().unique_id
if self._is_counter:
unique_id = f"{unique_id}-counter"
return unique_id
@property
def device_class(self):
"""Return the device class of the sensor."""
if self._module.get_class(self._channel) == "counter" and not self._is_counter:
if self._module.get_counter_unit(self._channel) == ENERGY_KILO_WATT_HOUR:
return DEVICE_CLASS_POWER
return None
return self._module.get_class(self._channel)
@property
def state(self):
"""Return the state of the sensor."""
if self._is_counter:
return self._module.get_counter_state(self._channel)
return self._module.get_state(self._channel)
@property
def unit_of_measurement(self):
"""Return the unit this state is expressed in."""
if self._is_counter:
return self._module.get_counter_unit(self._channel)
return self._module.get_unit(self._channel)
@property
def icon(self):
"""Icon to use in the frontend."""
if self._is_counter:
return "mdi:counter"
return None
|
from unittest import TestCase
import pandas as pd
import numpy as np
from scattertext import SemioticSquare
from scattertext.CorpusFromPandas import CorpusFromPandas
from scattertext.WhitespaceNLP import whitespace_nlp
from scattertext.semioticsquare.SemioticSquare import EmptyNeutralCategoriesError
def get_docs_categories_semiotic():
documents = [u"What art thou that usurp'st this time of night,",
u'Together with that fair and warlike form',
u'In which the majesty of buried Denmark',
u'Did sometimes march? by heaven I charge thee, speak!',
u'Halt! Who goes there?',
u'[Intro]',
u'It is I sire Tone from Brooklyn.',
u'Well, speak up man what is it?',
u'News from the East sire! THE BEST OF BOTH WORLDS HAS RETURNED!',
u'I think it therefore manifest, from what I have here advanced,',
u'that the main Point of Skill and Address, is to furnish Employment',
u'for this Redundancy of Vapour, and prudently to adjust the Season 1',
u'of it ; by which ,means it may certainly become of Cardinal'
]
categories = ['hamlet'] * 4 + ['jay-z/r. kelly'] * 5 + ['swift'] * 4
return categories, documents
def get_test_corpus():
df = pd.DataFrame(data=np.array(get_docs_categories_semiotic()).T,
columns=['category', 'text'])
corpus = CorpusFromPandas(df, 'category', 'text', nlp=whitespace_nlp).build()
return corpus
def get_test_semiotic_square():
corpus = get_test_corpus()
semsq = SemioticSquare(corpus, 'hamlet', 'jay-z/r. kelly', ['swift'])
return semsq
class TestSemioticSquare(TestCase):
def test_constructor(self):
df = pd.DataFrame(data=np.array(get_docs_categories_semiotic()).T,
columns=['category', 'text'])
corpus = CorpusFromPandas(df, 'category', 'text', nlp=whitespace_nlp).build()
SemioticSquare(corpus, 'hamlet', 'jay-z/r. kelly', ['swift'])
with self.assertRaises(AssertionError):
SemioticSquare(corpus, 'XXXhamlet', 'jay-z/r. kelly', ['swift'])
with self.assertRaises(AssertionError):
SemioticSquare(corpus, 'hamlet', 'jay-z/r. kellyXXX', ['swift'])
with self.assertRaises(AssertionError):
SemioticSquare(corpus, 'hamlet', 'jay-z/r. kelly', ['swift', 'asd'])
with self.assertRaises(EmptyNeutralCategoriesError):
SemioticSquare(corpus, 'hamlet', 'jay-z/r. kelly', [])
def test_get_labels(self):
corpus = get_test_corpus()
semsq = SemioticSquare(corpus, 'hamlet', 'jay-z/r. kelly', ['swift'])
a, b = 'hamlet', 'jay-z/r. kelly'
default_labels = {'a': a,
'not_a': 'Not ' + a,
'b': b,
'not_b': 'Not ' + b,
'a_and_b': a + ' + ' + b,
'not_a_and_not_b': 'Not ' + a + ' + Not ' + b,
'a_and_not_b': a + ' + Not ' + b,
'b_and_not_a': 'Not ' + a + ' + ' + b}
labels = semsq.get_labels()
for name,default_label in default_labels.items():
self.assertTrue(name + '_label' in labels)
self.assertEqual(labels[name + '_label'], default_label)
semsq = SemioticSquare(corpus, 'hamlet', 'jay-z/r. kelly', ['swift'], labels={'a':'AAA'})
labels = semsq.get_labels()
for name,default_label in default_labels.items():
if name == 'a':
self.assertEqual(labels[name + '_label'], 'AAA')
else:
self.assertTrue(name + '_label' in labels)
self.assertEqual(labels[name + '_label'], default_label)
def test_get_lexicons(self):
semsq = get_test_semiotic_square()
lexicons = semsq.get_lexicons()
for category in self.categories():
self.assertIn(category, lexicons)
self.assertLessEqual(len(lexicons[category]), 10)
lexicons = semsq.get_lexicons(5)
for category in self.categories():
self.assertIn(category, lexicons)
self.assertLessEqual(len(lexicons[category]), 5)
def test_get_axes(self):
semsq = get_test_semiotic_square()
ax = semsq.get_axes()
self.assertEqual(list(sorted(ax.index)),
list(sorted(semsq.term_doc_matrix_.get_terms())))
def categories(self):
return ['a',
'b',
'not_a',
'not_b',
'a_and_not_b',
'b_and_not_a',
'a_and_b',
'not_a_and_not_b']
|
from copy import deepcopy
from typing import List, Optional
from django.conf import settings
from django.core import mail
from django.test import SimpleTestCase
from django.test.utils import override_settings
from weblate.accounts.models import AuditLog, Profile, Subscription
from weblate.accounts.notifications import (
FREQ_DAILY,
FREQ_INSTANT,
FREQ_MONTHLY,
FREQ_NONE,
FREQ_WEEKLY,
SCOPE_ADMIN,
SCOPE_ALL,
SCOPE_COMPONENT,
SCOPE_PROJECT,
SCOPE_WATCHED,
MergeFailureNotification,
)
from weblate.accounts.tasks import (
notify_change,
notify_daily,
notify_monthly,
notify_weekly,
send_mails,
)
from weblate.auth.models import User
from weblate.lang.models import Language
from weblate.trans.models import Announcement, Change, Comment, Suggestion
from weblate.trans.tests.test_views import RegistrationTestMixin, ViewTestCase
TEMPLATES_RAISE = deepcopy(settings.TEMPLATES)
TEMPLATES_RAISE[0]["OPTIONS"]["string_if_invalid"] = "TEMPLATE_BUG[%s]"
@override_settings(TEMPLATES=TEMPLATES_RAISE)
class NotificationTest(ViewTestCase, RegistrationTestMixin):
def setUp(self):
super().setUp()
self.user.email = "[email protected]"
self.user.save()
czech = Language.objects.get(code="cs")
profile = Profile.objects.get(user=self.user)
profile.watched.add(self.project)
profile.languages.add(czech)
profile.save()
notifications = (
"MergeFailureNotification",
"RepositoryNotification",
"ParseErrorNotification",
"NewStringNotificaton",
"NewContributorNotificaton",
"NewSuggestionNotificaton",
"NewCommentNotificaton",
"NewComponentNotificaton",
"LockNotification",
"LicenseNotification",
"ChangedStringNotificaton",
"TranslatedStringNotificaton",
"ApprovedStringNotificaton",
"NewTranslationNotificaton",
"MentionCommentNotificaton",
"LastAuthorCommentNotificaton",
)
for notification in notifications:
Subscription.objects.create(
user=self.user,
scope=SCOPE_WATCHED,
notification=notification,
frequency=FREQ_INSTANT,
)
self.thirduser = User.objects.create_user(
"thirduser", "[email protected]", "testpassword"
)
def validate_notifications(
self, count, subject: Optional[str] = None, subjects: Optional[List[str]] = None
):
for i, message in enumerate(mail.outbox):
self.assertNotIn("TEMPLATE_BUG", message.subject)
self.assertNotIn("TEMPLATE_BUG", message.body)
self.assertNotIn("TEMPLATE_BUG", message.alternatives[0][0])
if subject:
self.assertEqual(message.subject, subject)
if subjects:
self.assertEqual(message.subject, subjects[i])
self.assertEqual(len(mail.outbox), count)
def test_notify_lock(self):
Change.objects.create(
component=self.component,
action=Change.ACTION_LOCK,
)
self.validate_notifications(1, "[Weblate] Component Test/Test was locked")
mail.outbox = []
Change.objects.create(
component=self.component,
action=Change.ACTION_UNLOCK,
)
self.validate_notifications(1, "[Weblate] Component Test/Test was unlocked")
def test_notify_onetime(self):
Subscription.objects.filter(notification="LockNotification").delete()
Subscription.objects.create(
user=self.user,
scope=SCOPE_WATCHED,
notification="LockNotification",
frequency=FREQ_INSTANT,
onetime=True,
)
Change.objects.create(
component=self.component,
action=Change.ACTION_UNLOCK,
)
self.validate_notifications(1, "[Weblate] Component Test/Test was unlocked")
mail.outbox = []
Change.objects.create(
component=self.component,
action=Change.ACTION_LOCK,
)
self.validate_notifications(0)
self.assertFalse(
Subscription.objects.filter(notification="LockNotification").exists()
)
def test_notify_license(self):
self.component.license = "WTFPL"
self.component.save()
self.validate_notifications(1, "[Weblate] Test/Test was re-licensed to WTFPL")
def test_notify_agreement(self):
self.component.agreement = "You have to agree."
self.component.save()
self.validate_notifications(
1, "[Weblate] Contributor agreement for Test/Test was changed"
)
def test_notify_merge_failure(self):
change = Change.objects.create(
component=self.component,
details={"error": "Failed merge", "status": "Error\nstatus"},
action=Change.ACTION_FAILED_MERGE,
)
# Check mail
self.assertEqual(len(mail.outbox), 1)
# Add project owner
self.component.project.add_user(self.anotheruser, "@Administration")
notify_change(change.pk)
# Check mail
self.validate_notifications(2, "[Weblate] Repository failure in Test/Test")
def test_notify_repository(self):
change = Change.objects.create(
component=self.component, action=Change.ACTION_MERGE
)
# Check mail
self.assertEqual(len(mail.outbox), 1)
# Add project owner
self.component.project.add_user(self.anotheruser, "@Administration")
notify_change(change.pk)
# Check mail
self.validate_notifications(2, "[Weblate] Repository operation in Test/Test")
def test_notify_parse_error(self):
change = Change.objects.create(
translation=self.get_translation(),
details={"error_message": "Failed merge", "filename": "test/file.po"},
action=Change.ACTION_PARSE_ERROR,
)
# Check mail
self.assertEqual(len(mail.outbox), 1)
# Add project owner
self.component.project.add_user(self.anotheruser, "@Administration")
notify_change(change.pk)
# Check mail
self.validate_notifications(3, "[Weblate] Parse error in Test/Test")
def test_notify_new_string(self):
Change.objects.create(
translation=self.get_translation(), action=Change.ACTION_NEW_STRING
)
# Check mail
self.validate_notifications(
1, "[Weblate] New string to translate in Test/Test — Czech"
)
def test_notify_new_strings(self):
Change.objects.create(
translation=self.get_translation(),
action=Change.ACTION_NEW_STRING,
details={"count": 10},
)
# Check mail
self.validate_notifications(
1, "[Weblate] New strings to translate in Test/Test — Czech"
)
def test_notify_new_translation(self):
Change.objects.create(
unit=self.get_unit(),
user=self.anotheruser,
old="",
action=Change.ACTION_CHANGE,
)
# Check mail - ChangedStringNotificaton and TranslatedStringNotificaton
self.validate_notifications(2, "[Weblate] New translation in Test/Test — Czech")
def test_notify_approved_translation(self):
Change.objects.create(
unit=self.get_unit(),
user=self.anotheruser,
old="",
action=Change.ACTION_APPROVE,
)
# Check mail - ChangedStringNotificaton and ApprovedStringNotificaton
self.validate_notifications(
2,
subjects=[
"[Weblate] New translation in Test/Test — Czech",
"[Weblate] Approved translation in Test/Test — Czech",
],
)
def test_notify_new_language(self):
anotheruser = self.anotheruser
change = Change.objects.create(
user=anotheruser,
component=self.component,
details={"language": "de"},
action=Change.ACTION_REQUESTED_LANGUAGE,
)
# Check mail
self.assertEqual(len(mail.outbox), 1)
# Add project owner
self.component.project.add_user(anotheruser, "@Administration")
notify_change(change.pk)
# Check mail
self.validate_notifications(2, "[Weblate] New language request in Test/Test")
def test_notify_new_contributor(self):
Change.objects.create(
unit=self.get_unit(),
user=self.anotheruser,
action=Change.ACTION_NEW_CONTRIBUTOR,
)
# Check mail
self.validate_notifications(1, "[Weblate] New contributor in Test/Test — Czech")
def test_notify_new_suggestion(self):
unit = self.get_unit()
Change.objects.create(
unit=unit,
suggestion=Suggestion.objects.create(unit=unit, target="Foo"),
user=self.anotheruser,
action=Change.ACTION_SUGGESTION,
)
# Check mail
self.validate_notifications(1, "[Weblate] New suggestion in Test/Test — Czech")
def add_comment(self, comment="Foo", language="en"):
unit = self.get_unit(language=language)
Change.objects.create(
unit=unit,
comment=Comment.objects.create(unit=unit, comment=comment),
user=self.thirduser,
action=Change.ACTION_COMMENT,
)
def test_notify_new_comment(self, expected=1, comment="Foo"):
self.add_comment(comment=comment)
# Check mail
self.validate_notifications(expected, "[Weblate] New comment in Test/Test")
def test_notify_new_comment_language(self):
# Subscribed language
self.add_comment(language="cs")
self.validate_notifications(1, "[Weblate] New comment in Test/Test")
# Empty outbox
mail.outbox = []
# Unsubscribed language
self.add_comment(language="de")
self.assertEqual(len(mail.outbox), 0)
def test_notify_new_comment_report(self):
self.component.report_source_bugs = "[email protected]"
self.component.save()
self.test_notify_new_comment(2)
def test_notify_new_comment_mention(self):
self.test_notify_new_comment(
2, f"Hello @{self.anotheruser.username} and @invalid"
)
def test_notify_new_comment_author(self):
self.edit_unit("Hello, world!\n", "Ahoj svete!\n")
# No notification for own edit
self.assertEqual(len(mail.outbox), 0)
change = self.get_unit().recent_content_changes[0]
change.user = self.anotheruser
change.save()
# Notification for other user edit
# ChangedStringNotificaton and TranslatedStringNotificaton
self.assertEqual(len(mail.outbox), 2)
mail.outbox = []
def test_notify_new_component(self):
Change.objects.create(
component=self.component, action=Change.ACTION_CREATE_COMPONENT
)
self.validate_notifications(1, "[Weblate] New translation component Test/Test")
def test_notify_new_announcement(self):
Announcement.objects.create(component=self.component, message="Hello word")
self.validate_notifications(1, "[Weblate] New announcement on Test")
mail.outbox = []
Announcement.objects.create(message="Hello global word")
self.validate_notifications(
User.objects.filter(is_active=True).count(),
"[Weblate] New announcement at Weblate",
)
def test_notify_alert(self):
self.component.project.add_user(self.user, "@Administration")
self.component.add_alert("PushFailure", error="Some error")
self.validate_notifications(
2,
subjects=[
"[Weblate] New alert on Test/Test",
"[Weblate] Component Test/Test was locked",
],
)
def test_notify_alert_ignore(self):
self.component.project.add_user(self.user, "@Administration")
# Create linked component, this triggers missing license alert
self.create_link_existing()
mail.outbox = []
self.component.add_alert("PushFailure", error="Some error")
self.validate_notifications(
3,
subjects=[
"[Weblate] New alert on Test/Test",
"[Weblate] Component Test/Test was locked",
"[Weblate] Component Test/Test2 was locked",
],
)
def test_notify_account(self):
request = self.get_request()
AuditLog.objects.create(request.user, request, "password")
self.assertEqual(len(mail.outbox), 1)
self.assert_notify_mailbox(mail.outbox[0])
# Verify site root expansion in email
content = mail.outbox[0].alternatives[0][0]
self.assertNotIn('href="/', content)
def test_notify_html_language(self):
self.user.profile.language = "cs"
self.user.profile.save()
request = self.get_request()
AuditLog.objects.create(request.user, request, "password")
self.assertEqual(len(mail.outbox), 1)
# There is just one (html) alternative
content = mail.outbox[0].alternatives[0][0]
self.assertIn('lang="cs"', content)
self.assertIn("změněno", content)
def test_digest(
self,
frequency=FREQ_DAILY,
notify=notify_daily,
change=Change.ACTION_FAILED_MERGE,
subj="Repository failure",
):
Subscription.objects.filter(
frequency=FREQ_INSTANT,
notification__in=("MergeFailureNotification", "NewTranslationNotificaton"),
).update(frequency=frequency)
Change.objects.create(
component=self.component,
details={
"error": "Failed merge",
"status": "Error\nstatus",
"language": "de",
},
action=change,
)
# Check mail
self.assertEqual(len(mail.outbox), 0)
# Trigger notification
notify()
self.validate_notifications(1, f"[Weblate] Digest: {subj}")
content = mail.outbox[0].alternatives[0][0]
self.assertNotIn('img src="/', content)
def test_digest_weekly(self):
self.test_digest(FREQ_WEEKLY, notify_weekly)
def test_digest_monthly(self):
self.test_digest(FREQ_MONTHLY, notify_monthly)
def test_digest_new_lang(self):
self.test_digest(change=Change.ACTION_REQUESTED_LANGUAGE, subj="New language")
def test_reminder(
self,
frequency=FREQ_DAILY,
notify=notify_daily,
notification="ToDoStringsNotification",
subj="4 strings needing action in Test/Test",
):
self.user.subscription_set.create(
scope=SCOPE_WATCHED, notification=notification, frequency=frequency
)
# Check mail
self.assertEqual(len(mail.outbox), 0)
# Trigger notification
notify()
self.validate_notifications(1, f"[Weblate] {subj}")
def test_reminder_weekly(self):
self.test_reminder(FREQ_WEEKLY, notify_weekly)
def test_reminder_monthly(self):
self.test_reminder(FREQ_MONTHLY, notify_monthly)
def test_reminder_suggestion(self):
unit = self.get_unit()
Suggestion.objects.create(unit=unit, target="Foo")
self.test_reminder(
notification="PendingSuggestionsNotification",
subj="1 pending suggestion in Test/Test",
)
class SubscriptionTest(ViewTestCase):
notification = MergeFailureNotification
def get_users(self, frequency):
change = Change.objects.create(
action=Change.ACTION_FAILED_MERGE, component=self.component
)
notification = self.notification(None)
return list(notification.get_users(frequency, change))
def test_scopes(self):
self.user.profile.watched.add(self.project)
# Not subscriptions
self.user.subscription_set.all().delete()
self.assertEqual(len(self.get_users(FREQ_INSTANT)), 0)
self.assertEqual(len(self.get_users(FREQ_DAILY)), 0)
self.assertEqual(len(self.get_users(FREQ_WEEKLY)), 0)
self.assertEqual(len(self.get_users(FREQ_MONTHLY)), 0)
# Default subscription
self.user.subscription_set.create(
scope=SCOPE_WATCHED,
notification=self.notification.get_name(),
frequency=FREQ_MONTHLY,
)
self.assertEqual(len(self.get_users(FREQ_INSTANT)), 0)
self.assertEqual(len(self.get_users(FREQ_DAILY)), 0)
self.assertEqual(len(self.get_users(FREQ_WEEKLY)), 0)
self.assertEqual(len(self.get_users(FREQ_MONTHLY)), 1)
# Admin subscription
self.user.subscription_set.create(
scope=SCOPE_ADMIN,
notification=self.notification.get_name(),
frequency=FREQ_WEEKLY,
)
self.assertEqual(len(self.get_users(FREQ_INSTANT)), 0)
self.assertEqual(len(self.get_users(FREQ_DAILY)), 0)
self.assertEqual(len(self.get_users(FREQ_WEEKLY)), 0)
self.assertEqual(len(self.get_users(FREQ_MONTHLY)), 1)
self.component.project.add_user(self.user, "@Administration")
self.assertEqual(len(self.get_users(FREQ_INSTANT)), 0)
self.assertEqual(len(self.get_users(FREQ_DAILY)), 0)
self.assertEqual(len(self.get_users(FREQ_WEEKLY)), 1)
self.assertEqual(len(self.get_users(FREQ_MONTHLY)), 0)
# Project subscription
self.user.subscription_set.create(
scope=SCOPE_PROJECT,
project=self.project,
notification=self.notification.get_name(),
frequency=FREQ_DAILY,
)
self.assertEqual(len(self.get_users(FREQ_INSTANT)), 0)
self.assertEqual(len(self.get_users(FREQ_DAILY)), 1)
self.assertEqual(len(self.get_users(FREQ_WEEKLY)), 0)
self.assertEqual(len(self.get_users(FREQ_MONTHLY)), 0)
# Component subscription
subscription = self.user.subscription_set.create(
scope=SCOPE_COMPONENT,
project=self.project,
notification=self.notification.get_name(),
frequency=FREQ_INSTANT,
)
self.assertEqual(len(self.get_users(FREQ_INSTANT)), 1)
self.assertEqual(len(self.get_users(FREQ_DAILY)), 0)
self.assertEqual(len(self.get_users(FREQ_WEEKLY)), 0)
self.assertEqual(len(self.get_users(FREQ_MONTHLY)), 0)
# Disabled notification for component
subscription.frequency = FREQ_NONE
subscription.save()
self.assertEqual(len(self.get_users(FREQ_INSTANT)), 0)
self.assertEqual(len(self.get_users(FREQ_DAILY)), 0)
self.assertEqual(len(self.get_users(FREQ_WEEKLY)), 0)
self.assertEqual(len(self.get_users(FREQ_MONTHLY)), 0)
def test_all_scope(self):
self.user.subscription_set.all().delete()
self.assertEqual(len(self.get_users(FREQ_INSTANT)), 0)
self.assertEqual(len(self.get_users(FREQ_DAILY)), 0)
self.assertEqual(len(self.get_users(FREQ_WEEKLY)), 0)
self.assertEqual(len(self.get_users(FREQ_MONTHLY)), 0)
self.user.subscription_set.create(
scope=SCOPE_ALL,
notification=self.notification.get_name(),
frequency=FREQ_MONTHLY,
)
self.assertEqual(len(self.get_users(FREQ_INSTANT)), 0)
self.assertEqual(len(self.get_users(FREQ_DAILY)), 0)
self.assertEqual(len(self.get_users(FREQ_WEEKLY)), 0)
self.assertEqual(len(self.get_users(FREQ_MONTHLY)), 1)
def test_skip(self):
self.user.profile.watched.add(self.project)
# Not subscriptions
self.user.subscription_set.all().delete()
self.assertEqual(len(self.get_users(FREQ_INSTANT)), 0)
# Default subscription
self.user.subscription_set.create(
scope=SCOPE_WATCHED,
notification=self.notification.get_name(),
frequency=FREQ_INSTANT,
)
self.assertEqual(len(self.get_users(FREQ_INSTANT)), 1)
# Subscribe to parent event
self.user.subscription_set.create(
scope=SCOPE_WATCHED,
notification="NewAlertNotificaton",
frequency=FREQ_INSTANT,
)
self.assertEqual(len(self.get_users(FREQ_INSTANT)), 0)
class SendMailsTest(SimpleTestCase):
@override_settings(
EMAIL_HOST="nonexisting.weblate.org",
EMAIL_BACKEND="django.core.mail.backends.smtp.EmailBackend",
)
def test_error_handling(self):
send_mails([{}])
self.assertEqual(len(mail.outbox), 0)
|
import json
from aiohttp import web
import pytest
import voluptuous as vol
from homeassistant import const
from homeassistant.bootstrap import DATA_LOGGING
import homeassistant.core as ha
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
from tests.common import async_mock_service
@pytest.fixture
def mock_api_client(hass, hass_client):
"""Start the Home Assistant HTTP component and return admin API client."""
hass.loop.run_until_complete(async_setup_component(hass, "api", {}))
return hass.loop.run_until_complete(hass_client())
async def test_api_list_state_entities(hass, mock_api_client):
"""Test if the debug interface allows us to list state entities."""
hass.states.async_set("test.entity", "hello")
resp = await mock_api_client.get(const.URL_API_STATES)
assert resp.status == 200
json = await resp.json()
remote_data = [ha.State.from_dict(item) for item in json]
assert remote_data == hass.states.async_all()
async def test_api_get_state(hass, mock_api_client):
"""Test if the debug interface allows us to get a state."""
hass.states.async_set("hello.world", "nice", {"attr": 1})
resp = await mock_api_client.get("/api/states/hello.world")
assert resp.status == 200
json = await resp.json()
data = ha.State.from_dict(json)
state = hass.states.get("hello.world")
assert data.state == state.state
assert data.last_changed == state.last_changed
assert data.attributes == state.attributes
async def test_api_get_non_existing_state(hass, mock_api_client):
"""Test if the debug interface allows us to get a state."""
resp = await mock_api_client.get("/api/states/does_not_exist")
assert resp.status == const.HTTP_NOT_FOUND
async def test_api_state_change(hass, mock_api_client):
"""Test if we can change the state of an entity that exists."""
hass.states.async_set("test.test", "not_to_be_set")
await mock_api_client.post(
"/api/states/test.test", json={"state": "debug_state_change2"}
)
assert hass.states.get("test.test").state == "debug_state_change2"
# pylint: disable=invalid-name
async def test_api_state_change_of_non_existing_entity(hass, mock_api_client):
"""Test if changing a state of a non existing entity is possible."""
new_state = "debug_state_change"
resp = await mock_api_client.post(
"/api/states/test_entity.that_does_not_exist", json={"state": new_state}
)
assert resp.status == 201
assert hass.states.get("test_entity.that_does_not_exist").state == new_state
# pylint: disable=invalid-name
async def test_api_state_change_with_bad_data(hass, mock_api_client):
"""Test if API sends appropriate error if we omit state."""
resp = await mock_api_client.post(
"/api/states/test_entity.that_does_not_exist", json={}
)
assert resp.status == 400
# pylint: disable=invalid-name
async def test_api_state_change_to_zero_value(hass, mock_api_client):
"""Test if changing a state to a zero value is possible."""
resp = await mock_api_client.post(
"/api/states/test_entity.with_zero_state", json={"state": 0}
)
assert resp.status == 201
resp = await mock_api_client.post(
"/api/states/test_entity.with_zero_state", json={"state": 0.0}
)
assert resp.status == 200
# pylint: disable=invalid-name
async def test_api_state_change_push(hass, mock_api_client):
"""Test if we can push a change the state of an entity."""
hass.states.async_set("test.test", "not_to_be_set")
events = []
@ha.callback
def event_listener(event):
"""Track events."""
events.append(event)
hass.bus.async_listen(const.EVENT_STATE_CHANGED, event_listener)
await mock_api_client.post("/api/states/test.test", json={"state": "not_to_be_set"})
await hass.async_block_till_done()
assert len(events) == 0
await mock_api_client.post(
"/api/states/test.test", json={"state": "not_to_be_set", "force_update": True}
)
await hass.async_block_till_done()
assert len(events) == 1
# pylint: disable=invalid-name
async def test_api_fire_event_with_no_data(hass, mock_api_client):
"""Test if the API allows us to fire an event."""
test_value = []
@ha.callback
def listener(event):
"""Record that our event got called."""
test_value.append(1)
hass.bus.async_listen_once("test.event_no_data", listener)
await mock_api_client.post("/api/events/test.event_no_data")
await hass.async_block_till_done()
assert len(test_value) == 1
# pylint: disable=invalid-name
async def test_api_fire_event_with_data(hass, mock_api_client):
"""Test if the API allows us to fire an event."""
test_value = []
@ha.callback
def listener(event):
"""Record that our event got called.
Also test if our data came through.
"""
if "test" in event.data:
test_value.append(1)
hass.bus.async_listen_once("test_event_with_data", listener)
await mock_api_client.post("/api/events/test_event_with_data", json={"test": 1})
await hass.async_block_till_done()
assert len(test_value) == 1
# pylint: disable=invalid-name
async def test_api_fire_event_with_invalid_json(hass, mock_api_client):
"""Test if the API allows us to fire an event."""
test_value = []
@ha.callback
def listener(event):
"""Record that our event got called."""
test_value.append(1)
hass.bus.async_listen_once("test_event_bad_data", listener)
resp = await mock_api_client.post(
"/api/events/test_event_bad_data", data=json.dumps("not an object")
)
await hass.async_block_till_done()
assert resp.status == 400
assert len(test_value) == 0
# Try now with valid but unusable JSON
resp = await mock_api_client.post(
"/api/events/test_event_bad_data", data=json.dumps([1, 2, 3])
)
await hass.async_block_till_done()
assert resp.status == 400
assert len(test_value) == 0
async def test_api_get_config(hass, mock_api_client):
"""Test the return of the configuration."""
resp = await mock_api_client.get(const.URL_API_CONFIG)
result = await resp.json()
if "components" in result:
result["components"] = set(result["components"])
if "whitelist_external_dirs" in result:
result["whitelist_external_dirs"] = set(result["whitelist_external_dirs"])
if "allowlist_external_dirs" in result:
result["allowlist_external_dirs"] = set(result["allowlist_external_dirs"])
if "allowlist_external_urls" in result:
result["allowlist_external_urls"] = set(result["allowlist_external_urls"])
assert hass.config.as_dict() == result
async def test_api_get_components(hass, mock_api_client):
"""Test the return of the components."""
resp = await mock_api_client.get(const.URL_API_COMPONENTS)
result = await resp.json()
assert set(result) == hass.config.components
async def test_api_get_event_listeners(hass, mock_api_client):
"""Test if we can get the list of events being listened for."""
resp = await mock_api_client.get(const.URL_API_EVENTS)
data = await resp.json()
local = hass.bus.async_listeners()
for event in data:
assert local.pop(event["event"]) == event["listener_count"]
assert len(local) == 0
async def test_api_get_services(hass, mock_api_client):
"""Test if we can get a dict describing current services."""
resp = await mock_api_client.get(const.URL_API_SERVICES)
data = await resp.json()
local_services = hass.services.async_services()
for serv_domain in data:
local = local_services.pop(serv_domain["domain"])
assert serv_domain["services"] == local
async def test_api_call_service_no_data(hass, mock_api_client):
"""Test if the API allows us to call a service."""
test_value = []
@ha.callback
def listener(service_call):
"""Record that our service got called."""
test_value.append(1)
hass.services.async_register("test_domain", "test_service", listener)
await mock_api_client.post("/api/services/test_domain/test_service")
await hass.async_block_till_done()
assert len(test_value) == 1
async def test_api_call_service_with_data(hass, mock_api_client):
"""Test if the API allows us to call a service."""
test_value = []
@ha.callback
def listener(service_call):
"""Record that our service got called.
Also test if our data came through.
"""
if "test" in service_call.data:
test_value.append(1)
hass.services.async_register("test_domain", "test_service", listener)
await mock_api_client.post(
"/api/services/test_domain/test_service", json={"test": 1}
)
await hass.async_block_till_done()
assert len(test_value) == 1
async def test_api_template(hass, mock_api_client):
"""Test the template API."""
hass.states.async_set("sensor.temperature", 10)
resp = await mock_api_client.post(
const.URL_API_TEMPLATE,
json={"template": "{{ states.sensor.temperature.state }}"},
)
body = await resp.text()
assert body == "10"
async def test_api_template_error(hass, mock_api_client):
"""Test the template API."""
hass.states.async_set("sensor.temperature", 10)
resp = await mock_api_client.post(
const.URL_API_TEMPLATE, json={"template": "{{ states.sensor.temperature.state"}
)
assert resp.status == 400
async def test_stream(hass, mock_api_client):
"""Test the stream."""
listen_count = _listen_count(hass)
resp = await mock_api_client.get(const.URL_API_STREAM)
assert resp.status == 200
assert listen_count + 1 == _listen_count(hass)
hass.bus.async_fire("test_event")
data = await _stream_next_event(resp.content)
assert data["event_type"] == "test_event"
async def test_stream_with_restricted(hass, mock_api_client):
"""Test the stream with restrictions."""
listen_count = _listen_count(hass)
resp = await mock_api_client.get(
f"{const.URL_API_STREAM}?restrict=test_event1,test_event3"
)
assert resp.status == 200
assert listen_count + 1 == _listen_count(hass)
hass.bus.async_fire("test_event1")
data = await _stream_next_event(resp.content)
assert data["event_type"] == "test_event1"
hass.bus.async_fire("test_event2")
hass.bus.async_fire("test_event3")
data = await _stream_next_event(resp.content)
assert data["event_type"] == "test_event3"
async def _stream_next_event(stream):
"""Read the stream for next event while ignoring ping."""
while True:
last_new_line = False
data = b""
while True:
dat = await stream.read(1)
if dat == b"\n" and last_new_line:
break
data += dat
last_new_line = dat == b"\n"
conv = data.decode("utf-8").strip()[6:]
if conv != "ping":
break
return json.loads(conv)
def _listen_count(hass):
"""Return number of event listeners."""
return sum(hass.bus.async_listeners().values())
async def test_api_error_log(hass, aiohttp_client, hass_access_token, hass_admin_user):
"""Test if we can fetch the error log."""
hass.data[DATA_LOGGING] = "/some/path"
await async_setup_component(hass, "api", {})
client = await aiohttp_client(hass.http.app)
resp = await client.get(const.URL_API_ERROR_LOG)
# Verify auth required
assert resp.status == 401
with patch(
"aiohttp.web.FileResponse", return_value=web.Response(status=200, text="Hello")
) as mock_file:
resp = await client.get(
const.URL_API_ERROR_LOG,
headers={"Authorization": f"Bearer {hass_access_token}"},
)
assert len(mock_file.mock_calls) == 1
assert mock_file.mock_calls[0][1][0] == hass.data[DATA_LOGGING]
assert resp.status == 200
assert await resp.text() == "Hello"
# Verify we require admin user
hass_admin_user.groups = []
resp = await client.get(
const.URL_API_ERROR_LOG,
headers={"Authorization": f"Bearer {hass_access_token}"},
)
assert resp.status == 401
async def test_api_fire_event_context(hass, mock_api_client, hass_access_token):
"""Test if the API sets right context if we fire an event."""
test_value = []
@ha.callback
def listener(event):
"""Record that our event got called."""
test_value.append(event)
hass.bus.async_listen("test.event", listener)
await mock_api_client.post(
"/api/events/test.event",
headers={"authorization": f"Bearer {hass_access_token}"},
)
await hass.async_block_till_done()
refresh_token = await hass.auth.async_validate_access_token(hass_access_token)
assert len(test_value) == 1
assert test_value[0].context.user_id == refresh_token.user.id
async def test_api_call_service_context(hass, mock_api_client, hass_access_token):
"""Test if the API sets right context if we call a service."""
calls = async_mock_service(hass, "test_domain", "test_service")
await mock_api_client.post(
"/api/services/test_domain/test_service",
headers={"authorization": f"Bearer {hass_access_token}"},
)
await hass.async_block_till_done()
refresh_token = await hass.auth.async_validate_access_token(hass_access_token)
assert len(calls) == 1
assert calls[0].context.user_id == refresh_token.user.id
async def test_api_set_state_context(hass, mock_api_client, hass_access_token):
"""Test if the API sets right context if we set state."""
await mock_api_client.post(
"/api/states/light.kitchen",
json={"state": "on"},
headers={"authorization": f"Bearer {hass_access_token}"},
)
refresh_token = await hass.auth.async_validate_access_token(hass_access_token)
state = hass.states.get("light.kitchen")
assert state.context.user_id == refresh_token.user.id
async def test_event_stream_requires_admin(hass, mock_api_client, hass_admin_user):
"""Test user needs to be admin to access event stream."""
hass_admin_user.groups = []
resp = await mock_api_client.get("/api/stream")
assert resp.status == 401
async def test_states_view_filters(hass, mock_api_client, hass_admin_user):
"""Test filtering only visible states."""
hass_admin_user.mock_policy({"entities": {"entity_ids": {"test.entity": True}}})
hass.states.async_set("test.entity", "hello")
hass.states.async_set("test.not_visible_entity", "invisible")
resp = await mock_api_client.get(const.URL_API_STATES)
assert resp.status == 200
json = await resp.json()
assert len(json) == 1
assert json[0]["entity_id"] == "test.entity"
async def test_get_entity_state_read_perm(hass, mock_api_client, hass_admin_user):
"""Test getting a state requires read permission."""
hass_admin_user.mock_policy({})
resp = await mock_api_client.get("/api/states/light.test")
assert resp.status == 401
async def test_post_entity_state_admin(hass, mock_api_client, hass_admin_user):
"""Test updating state requires admin."""
hass_admin_user.groups = []
resp = await mock_api_client.post("/api/states/light.test")
assert resp.status == 401
async def test_delete_entity_state_admin(hass, mock_api_client, hass_admin_user):
"""Test deleting entity requires admin."""
hass_admin_user.groups = []
resp = await mock_api_client.delete("/api/states/light.test")
assert resp.status == 401
async def test_post_event_admin(hass, mock_api_client, hass_admin_user):
"""Test sending event requires admin."""
hass_admin_user.groups = []
resp = await mock_api_client.post("/api/events/state_changed")
assert resp.status == 401
async def test_rendering_template_admin(hass, mock_api_client, hass_admin_user):
"""Test rendering a template requires admin."""
hass_admin_user.groups = []
resp = await mock_api_client.post(const.URL_API_TEMPLATE)
assert resp.status == 401
async def test_rendering_template_legacy_user(
hass, mock_api_client, aiohttp_client, legacy_auth
):
"""Test rendering a template with legacy API password."""
hass.states.async_set("sensor.temperature", 10)
client = await aiohttp_client(hass.http.app)
resp = await client.post(
const.URL_API_TEMPLATE,
json={"template": "{{ states.sensor.temperature.state }}"},
)
assert resp.status == 401
async def test_api_call_service_not_found(hass, mock_api_client):
"""Test if the API fails 400 if unknown service."""
resp = await mock_api_client.post("/api/services/test_domain/test_service")
assert resp.status == 400
async def test_api_call_service_bad_data(hass, mock_api_client):
"""Test if the API fails 400 if unknown service."""
test_value = []
@ha.callback
def listener(service_call):
"""Record that our service got called."""
test_value.append(1)
hass.services.async_register(
"test_domain", "test_service", listener, schema=vol.Schema({"hello": str})
)
resp = await mock_api_client.post(
"/api/services/test_domain/test_service", json={"hello": 5}
)
assert resp.status == 400
|
import json
from homeassistant.bootstrap import async_setup_component
from homeassistant.components import config
from homeassistant.util.yaml import dump
from tests.async_mock import patch
async def test_update_scene(hass, hass_client):
"""Test updating a scene."""
with patch.object(config, "SECTIONS", ["scene"]):
await async_setup_component(hass, "config", {})
client = await hass_client()
orig_data = [{"id": "light_on"}, {"id": "light_off"}]
def mock_read(path):
"""Mock reading data."""
return orig_data
written = []
def mock_write(path, data):
"""Mock writing data."""
data = dump(data)
written.append(data)
with patch("homeassistant.components.config._read", mock_read), patch(
"homeassistant.components.config._write", mock_write
), patch("homeassistant.config.async_hass_config_yaml", return_value={}):
resp = await client.post(
"/api/config/scene/config/light_off",
data=json.dumps(
{
"id": "light_off",
"name": "Lights off",
"entities": {"light.bedroom": {"state": "off"}},
}
),
)
assert resp.status == 200
result = await resp.json()
assert result == {"result": "ok"}
assert len(written) == 1
written_yaml = written[0]
assert (
written_yaml
== """- id: light_on
- id: light_off
name: Lights off
entities:
light.bedroom:
state: 'off'
"""
)
async def test_bad_formatted_scene(hass, hass_client):
"""Test that we handle scene without ID."""
with patch.object(config, "SECTIONS", ["scene"]):
await async_setup_component(hass, "config", {})
client = await hass_client()
orig_data = [
{
# No ID
"entities": {"light.bedroom": "on"}
},
{"id": "light_off"},
]
def mock_read(path):
"""Mock reading data."""
return orig_data
written = []
def mock_write(path, data):
"""Mock writing data."""
written.append(data)
with patch("homeassistant.components.config._read", mock_read), patch(
"homeassistant.components.config._write", mock_write
), patch("homeassistant.config.async_hass_config_yaml", return_value={}):
resp = await client.post(
"/api/config/scene/config/light_off",
data=json.dumps(
{
"id": "light_off",
"name": "Lights off",
"entities": {"light.bedroom": {"state": "off"}},
}
),
)
assert resp.status == 200
result = await resp.json()
assert result == {"result": "ok"}
# Verify ID added to orig_data
assert "id" in orig_data[0]
assert orig_data[1] == {
"id": "light_off",
"name": "Lights off",
"entities": {"light.bedroom": {"state": "off"}},
}
async def test_delete_scene(hass, hass_client):
"""Test deleting a scene."""
ent_reg = await hass.helpers.entity_registry.async_get_registry()
assert await async_setup_component(
hass,
"scene",
{
"scene": [
{"id": "light_on", "name": "Light on", "entities": {}},
{"id": "light_off", "name": "Light off", "entities": {}},
]
},
)
assert len(ent_reg.entities) == 2
with patch.object(config, "SECTIONS", ["scene"]):
assert await async_setup_component(hass, "config", {})
client = await hass_client()
orig_data = [{"id": "light_on"}, {"id": "light_off"}]
def mock_read(path):
"""Mock reading data."""
return orig_data
written = []
def mock_write(path, data):
"""Mock writing data."""
written.append(data)
with patch("homeassistant.components.config._read", mock_read), patch(
"homeassistant.components.config._write", mock_write
), patch("homeassistant.config.async_hass_config_yaml", return_value={}):
resp = await client.delete("/api/config/scene/config/light_on")
await hass.async_block_till_done()
assert resp.status == 200
result = await resp.json()
assert result == {"result": "ok"}
assert len(written) == 1
assert written[0][0]["id"] == "light_off"
assert len(ent_reg.entities) == 1
|
import os
import os.path as op
import re
import warnings
import pytest
from mne import read_evokeds
from mne.utils import (warn, set_log_level, set_log_file, filter_out_warnings,
verbose, _get_call_line, use_log_level, catch_logging,
logger)
from mne.utils._logging import _frame_info
base_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data')
fname_evoked = op.join(base_dir, 'test-ave.fif')
fname_log = op.join(base_dir, 'test-ave.log')
fname_log_2 = op.join(base_dir, 'test-ave-2.log')
@verbose
def _fun(verbose=None):
logger.debug('Test')
def test_frame_info(capsys, monkeypatch):
"""Test _frame_info."""
stack = _frame_info(100)
assert 2 < len(stack) < 100
this, pytest_line = stack[:2]
assert re.match('^test_logging:[1-9][0-9]$', this) is not None, this
assert 'pytest' in pytest_line
capsys.readouterr()
with use_log_level('debug', add_frames=4):
_fun()
out, _ = capsys.readouterr()
out = out.replace('\n', ' ')
assert re.match(
'.*pytest'
'.*test_logging:[2-9][0-9] '
'.*test_logging:[1-9][0-9] :.*Test', out) is not None, this
monkeypatch.setattr('inspect.currentframe', lambda: None)
assert _frame_info(1) == ['unknown']
def test_how_to_deal_with_warnings():
"""Test filter some messages out of warning records."""
with pytest.warns(UserWarning, match='bb') as w:
warnings.warn("aa warning", UserWarning)
warnings.warn("bb warning", UserWarning)
warnings.warn("bb warning", RuntimeWarning)
warnings.warn("aa warning", UserWarning)
filter_out_warnings(w, category=UserWarning, match='aa')
filter_out_warnings(w, category=RuntimeWarning)
assert len(w) == 1
def clean_lines(lines=[]):
"""Scrub filenames for checking logging output (in test_logging)."""
return [line if 'Reading ' not in line else 'Reading test file'
for line in lines]
def test_logging_options(tmpdir):
"""Test logging (to file)."""
with use_log_level(None): # just ensure it's set back
with pytest.raises(ValueError, match="Invalid value for the 'verbose"):
set_log_level('foo')
tempdir = str(tmpdir)
test_name = op.join(tempdir, 'test.log')
with open(fname_log, 'r') as old_log_file:
# [:-1] used to strip an extra "No baseline correction applied"
old_lines = clean_lines(old_log_file.readlines())
old_lines.pop(-1)
with open(fname_log_2, 'r') as old_log_file_2:
old_lines_2 = clean_lines(old_log_file_2.readlines())
old_lines_2.pop(14)
old_lines_2.pop(-1)
if op.isfile(test_name):
os.remove(test_name)
# test it one way (printing default off)
set_log_file(test_name)
set_log_level('WARNING')
# should NOT print
evoked = read_evokeds(fname_evoked, condition=1)
with open(test_name) as fid:
assert (fid.readlines() == [])
# should NOT print
evoked = read_evokeds(fname_evoked, condition=1, verbose=False)
with open(test_name) as fid:
assert (fid.readlines() == [])
# should NOT print
evoked = read_evokeds(fname_evoked, condition=1, verbose='WARNING')
with open(test_name) as fid:
assert (fid.readlines() == [])
# SHOULD print
evoked = read_evokeds(fname_evoked, condition=1, verbose=True)
with open(test_name, 'r') as new_log_file:
new_lines = clean_lines(new_log_file.readlines())
assert new_lines == old_lines
set_log_file(None) # Need to do this to close the old file
os.remove(test_name)
# now go the other way (printing default on)
set_log_file(test_name)
set_log_level('INFO')
# should NOT print
evoked = read_evokeds(fname_evoked, condition=1, verbose='WARNING')
with open(test_name) as fid:
assert (fid.readlines() == [])
# should NOT print
evoked = read_evokeds(fname_evoked, condition=1, verbose=False)
with open(test_name) as fid:
assert (fid.readlines() == [])
# SHOULD print
evoked = read_evokeds(fname_evoked, condition=1)
with open(test_name, 'r') as new_log_file:
new_lines = clean_lines(new_log_file.readlines())
assert new_lines == old_lines
# check to make sure appending works (and as default, raises a warning)
set_log_file(test_name, overwrite=False)
with pytest.warns(RuntimeWarning, match='appended to the file'):
set_log_file(test_name)
evoked = read_evokeds(fname_evoked, condition=1)
with open(test_name, 'r') as new_log_file:
new_lines = clean_lines(new_log_file.readlines())
assert new_lines == old_lines_2
# make sure overwriting works
set_log_file(test_name, overwrite=True)
# this line needs to be called to actually do some logging
evoked = read_evokeds(fname_evoked, condition=1)
del evoked
with open(test_name, 'r') as new_log_file:
new_lines = clean_lines(new_log_file.readlines())
assert new_lines == old_lines
with catch_logging() as log:
pass
assert log.getvalue() == ''
def test_warn(capsys):
"""Test the smart warn() function."""
with pytest.warns(RuntimeWarning, match='foo'):
warn('foo')
captured = capsys.readouterr()
assert captured.out == '' # gh-5592
assert captured.err == '' # this is because pytest.warns took it already
def test_get_call_line():
"""Test getting a call line."""
@verbose
def foo(verbose=None):
return _get_call_line()
for v in (None, True):
my_line = foo(verbose=v) # testing
assert my_line == 'my_line = foo(verbose=v) # testing'
def bar():
return _get_call_line()
my_line = bar() # testing more
assert my_line == 'my_line = bar() # testing more'
def test_verbose_strictness():
"""Test that the verbose decorator is strict about usability."""
@verbose
def bad_verbose():
pass
with pytest.raises(RuntimeError, match='does not accept'):
bad_verbose()
class Okay:
@verbose
def meth(self): # allowed because it should just use self.verbose
pass
o = Okay()
with pytest.raises(RuntimeError, match=r'does not have self\.verbose'):
o.meth() # should raise, no verbose attr yet
o.verbose = None
o.meth()
|
import pytest
from unittest.mock import patch
from kombu import Connection
pytest.importorskip('sqlalchemy')
class test_SqlAlchemy:
def test_url_parser(self):
with patch('kombu.transport.sqlalchemy.Channel._open'):
url = 'sqlalchemy+sqlite:///celerydb.sqlite'
Connection(url).connect()
url = 'sqla+sqlite:///celerydb.sqlite'
Connection(url).connect()
url = 'sqlb+sqlite:///celerydb.sqlite'
with pytest.raises(KeyError):
Connection(url).connect()
def test_simple_queueing(self):
conn = Connection('sqlalchemy+sqlite:///:memory:')
conn.connect()
try:
channel = conn.channel()
assert channel.queue_cls.__table__.name == 'kombu_queue'
assert channel.message_cls.__table__.name == 'kombu_message'
channel._put('celery', 'DATA_SIMPLE_QUEUEING')
assert channel._get('celery') == 'DATA_SIMPLE_QUEUEING'
finally:
conn.release()
def test_clone(self):
hostname = 'sqlite:///celerydb.sqlite'
x = Connection('+'.join(['sqla', hostname]))
try:
assert x.uri_prefix == 'sqla'
assert x.hostname == hostname
clone = x.clone()
try:
assert clone.hostname == hostname
assert clone.uri_prefix == 'sqla'
finally:
clone.release()
finally:
x.release()
|
import filelock
import os
from chainer.dataset import download
from chainercv import utils
def get_ade20k(root, url):
# To support ChainerMN, the target directory should be locked.
with filelock.FileLock(os.path.join(download.get_dataset_directory(
'pfnet/chainercv/.lock'), 'ade20k.lock')):
data_root = download.get_dataset_directory(root)
if os.path.exists(os.path.join(data_root, 'ADEChallengeData2016')):
return data_root
cache_path = utils.cached_download(url)
utils.extractall(cache_path, data_root, os.path.splitext(url)[1])
return data_root
ade20k_semantic_segmentation_label_names = (
'wall',
'edifice',
'sky',
'flooring',
'tree',
'ceiling',
'route',
'bed ',
'window ',
'grass',
'cabinet',
'pavement',
'soul',
'ground',
'door',
'table',
'mount',
'life',
'pall',
'chair',
'motorcar',
'water',
'picture',
'lounge',
'shelf',
'house',
'sea',
'mirror',
'carpeting',
'field',
'armchair',
'seat',
'fencing',
'desk',
'stone',
'press',
'lamp',
'tub',
'rail',
'cushion',
'stand',
'box',
'pillar',
'sign',
'dresser',
'counter',
'sand',
'sink',
'skyscraper',
'fireplace',
'icebox',
'stand',
'path',
'steps',
'runway',
'vitrine',
'table',
'pillow',
'screen',
'staircase',
'river',
'span',
'bookcase',
'screen',
'table',
'throne',
'flower',
'book',
'hill',
'bench',
'countertop',
'stove',
'tree',
'island',
'system',
'chair',
'boat',
'bar',
'machine',
'shanty',
'vehicle',
'towel',
'source',
'motortruck',
'tower',
'pendent',
'sunblind',
'lamp',
'kiosk',
'box',
'plane',
'track',
'clothes',
'pole',
'soil',
'handrail',
'stairway',
'hassock',
'bottle',
'sideboard',
'card',
'stage',
'van',
'ship',
'fountain',
'transporter',
'canopy',
'machine',
'toy',
'natatorium',
'stool',
'cask',
'handbasket',
'falls',
'shelter',
'bag',
'motorbike',
'cradle',
'oven',
'ball',
'food',
'stair',
'tank',
'marque',
'oven',
'flowerpot',
'fauna',
'cycle ',
'lake',
'machine',
'screen',
'cover',
'sculpture',
'hood',
'sconce',
'vase',
'stoplight',
'tray',
'bin',
'fan',
'dock',
'screen',
'plate',
'device',
'board',
'shower',
'radiator',
'glass',
'clock',
'flag'
)
ade20k_semantic_segmentation_label_colors = (
(120, 120, 120),
(180, 120, 120),
(6, 230, 230),
(80, 50, 50),
(4, 200, 3),
(120, 120, 80),
(140, 140, 140),
(204, 5, 255),
(230, 230, 230),
(4, 250, 7),
(224, 5, 255),
(235, 255, 7),
(150, 5, 61),
(120, 120, 70),
(8, 255, 51),
(255, 6, 82),
(143, 255, 140),
(204, 255, 4),
(255, 51, 7),
(204, 70, 3),
(0, 102, 200),
(61, 230, 250),
(255, 6, 51),
(11, 102, 255),
(255, 7, 71),
(255, 9, 224),
(9, 7, 230),
(220, 220, 220),
(255, 9, 92),
(112, 9, 255),
(8, 255, 214),
(7, 255, 224),
(255, 184, 6),
(10, 255, 71),
(255, 41, 10),
(7, 255, 255),
(224, 255, 8),
(102, 8, 255),
(255, 61, 6),
(255, 194, 7),
(255, 122, 8),
(0, 255, 20),
(255, 8, 41),
(255, 5, 153),
(6, 51, 255),
(235, 12, 255),
(160, 150, 20),
(0, 163, 255),
(140, 140, 140),
(250, 10, 15),
(20, 255, 0),
(31, 255, 0),
(255, 31, 0),
(255, 224, 0),
(153, 255, 0),
(0, 0, 255),
(255, 71, 0),
(0, 235, 255),
(0, 173, 255),
(31, 0, 255),
(11, 200, 200),
(255, 82, 0),
(0, 255, 245),
(0, 61, 255),
(0, 255, 112),
(0, 255, 133),
(255, 0, 0),
(255, 163, 0),
(255, 102, 0),
(194, 255, 0),
(0, 143, 255),
(51, 255, 0),
(0, 82, 255),
(0, 255, 41),
(0, 255, 173),
(10, 0, 255),
(173, 255, 0),
(0, 255, 153),
(255, 92, 0),
(255, 0, 255),
(255, 0, 245),
(255, 0, 102),
(255, 173, 0),
(255, 0, 20),
(255, 184, 184),
(0, 31, 255),
(0, 255, 61),
(0, 71, 255),
(255, 0, 204),
(0, 255, 194),
(0, 255, 82),
(0, 10, 255),
(0, 112, 255),
(51, 0, 255),
(0, 194, 255),
(0, 122, 255),
(0, 255, 163),
(255, 153, 0),
(0, 255, 10),
(255, 112, 0),
(143, 255, 0),
(82, 0, 255),
(163, 255, 0),
(255, 235, 0),
(8, 184, 170),
(133, 0, 255),
(0, 255, 92),
(184, 0, 255),
(255, 0, 31),
(0, 184, 255),
(0, 214, 255),
(255, 0, 112),
(92, 255, 0),
(0, 224, 255),
(112, 224, 255),
(70, 184, 160),
(163, 0, 255),
(153, 0, 255),
(71, 255, 0),
(255, 0, 163),
(255, 204, 0),
(255, 0, 143),
(0, 255, 235),
(133, 255, 0),
(255, 0, 235),
(245, 0, 255),
(255, 0, 122),
(255, 245, 0),
(10, 190, 212),
(214, 255, 0),
(0, 204, 255),
(20, 0, 255),
(255, 255, 0),
(0, 153, 255),
(0, 41, 255),
(0, 255, 204),
(41, 0, 255),
(41, 255, 0),
(173, 0, 255),
(0, 245, 255),
(71, 0, 255),
(122, 0, 255),
(0, 255, 184),
(0, 92, 255),
(184, 255, 0),
(0, 133, 255),
(255, 214, 0),
(25, 194, 194),
(102, 255, 0),
(92, 0, 255)
)
|
import copy
from homeassistant.components.reddit.sensor import (
ATTR_BODY,
ATTR_COMMENTS_NUMBER,
ATTR_CREATED,
ATTR_ID,
ATTR_POSTS,
ATTR_SCORE,
ATTR_SUBREDDIT,
ATTR_TITLE,
ATTR_URL,
CONF_SORT_BY,
DOMAIN,
)
from homeassistant.const import (
CONF_CLIENT_ID,
CONF_CLIENT_SECRET,
CONF_MAXIMUM,
CONF_PASSWORD,
CONF_USERNAME,
)
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
VALID_CONFIG = {
"sensor": {
"platform": DOMAIN,
CONF_CLIENT_ID: "test_client_id",
CONF_CLIENT_SECRET: "test_client_secret",
CONF_USERNAME: "test_username",
CONF_PASSWORD: "test_password",
"subreddits": ["worldnews", "news"],
}
}
VALID_LIMITED_CONFIG = {
"sensor": {
"platform": DOMAIN,
CONF_CLIENT_ID: "test_client_id",
CONF_CLIENT_SECRET: "test_client_secret",
CONF_USERNAME: "test_username",
CONF_PASSWORD: "test_password",
"subreddits": ["worldnews", "news"],
CONF_MAXIMUM: 1,
}
}
INVALID_SORT_BY_CONFIG = {
"sensor": {
"platform": DOMAIN,
CONF_CLIENT_ID: "test_client_id",
CONF_CLIENT_SECRET: "test_client_secret",
CONF_USERNAME: "test_username",
CONF_PASSWORD: "test_password",
"subreddits": ["worldnews", "news"],
"sort_by": "invalid_sort_by",
}
}
class ObjectView:
"""Use dict properties as attributes."""
def __init__(self, d):
"""Set dict as internal dict."""
self.__dict__ = d
MOCK_RESULTS = {
"results": [
ObjectView(
{
"id": 0,
"url": "http://example.com/1",
"title": "example1",
"score": "1",
"num_comments": "1",
"created": "",
"selftext": "example1 selftext",
}
),
ObjectView(
{
"id": 1,
"url": "http://example.com/2",
"title": "example2",
"score": "2",
"num_comments": "2",
"created": "",
"selftext": "example2 selftext",
}
),
]
}
MOCK_RESULTS_LENGTH = len(MOCK_RESULTS["results"])
class MockPraw:
"""Mock class for Reddit library."""
def __init__(
self,
client_id: str,
client_secret: str,
username: str,
password: str,
user_agent: str,
):
"""Add mock data for API return."""
self._data = MOCK_RESULTS
def subreddit(self, subreddit: str):
"""Return an instance of a subreddit."""
return MockSubreddit(subreddit, self._data)
class MockSubreddit:
"""Mock class for a subreddit instance."""
def __init__(self, subreddit: str, data):
"""Add mock data for API return."""
self._subreddit = subreddit
self._data = data
def top(self, limit):
"""Return top posts for a subreddit."""
return self._return_data(limit)
def controversial(self, limit):
"""Return controversial posts for a subreddit."""
return self._return_data(limit)
def hot(self, limit):
"""Return hot posts for a subreddit."""
return self._return_data(limit)
def new(self, limit):
"""Return new posts for a subreddit."""
return self._return_data(limit)
def _return_data(self, limit):
"""Test method to return modified data."""
data = copy.deepcopy(self._data)
return data["results"][:limit]
@patch("praw.Reddit", new=MockPraw)
async def test_setup_with_valid_config(hass):
"""Test the platform setup with Reddit configuration."""
assert await async_setup_component(hass, "sensor", VALID_CONFIG)
await hass.async_block_till_done()
state = hass.states.get("sensor.reddit_worldnews")
assert int(state.state) == MOCK_RESULTS_LENGTH
state = hass.states.get("sensor.reddit_news")
assert int(state.state) == MOCK_RESULTS_LENGTH
assert state.attributes[ATTR_SUBREDDIT] == "news"
assert state.attributes[ATTR_POSTS][0] == {
ATTR_ID: 0,
ATTR_URL: "http://example.com/1",
ATTR_TITLE: "example1",
ATTR_SCORE: "1",
ATTR_COMMENTS_NUMBER: "1",
ATTR_CREATED: "",
ATTR_BODY: "example1 selftext",
}
assert state.attributes[CONF_SORT_BY] == "hot"
@patch("praw.Reddit", new=MockPraw)
async def test_setup_with_invalid_config(hass):
"""Test the platform setup with invalid Reddit configuration."""
assert await async_setup_component(hass, "sensor", INVALID_SORT_BY_CONFIG)
await hass.async_block_till_done()
assert not hass.states.get("sensor.reddit_worldnews")
|
import asyncio
import logging
import pysmarthab
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
from homeassistant.exceptions import ConfigEntryNotReady
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.typing import HomeAssistantType
DOMAIN = "smarthab"
DATA_HUB = "hub"
COMPONENTS = ["light", "cover"]
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_EMAIL): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config) -> bool:
"""Set up the SmartHab platform."""
hass.data.setdefault(DOMAIN, {})
if DOMAIN not in config:
return True
if not hass.config_entries.async_entries(DOMAIN):
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data=config[DOMAIN],
)
)
return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry):
"""Set up config entry for SmartHab integration."""
# Assign configuration variables
username = entry.data[CONF_EMAIL]
password = entry.data[CONF_PASSWORD]
# Setup connection with SmartHab API
hub = pysmarthab.SmartHab()
try:
await hub.async_login(username, password)
except pysmarthab.RequestFailedException as err:
_LOGGER.exception("Error while trying to reach SmartHab API")
raise ConfigEntryNotReady from err
# Pass hub object to child platforms
hass.data[DOMAIN][entry.entry_id] = {DATA_HUB: hub}
for component in COMPONENTS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
async def async_unload_entry(hass: HomeAssistantType, entry: ConfigEntry):
"""Unload config entry from SmartHab integration."""
result = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in COMPONENTS
]
)
)
if result:
hass.data[DOMAIN].pop(entry.entry_id)
return result
|
from absl import flags
from perfkitbenchmarker import configs
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.windows_packages import psping
FLAGS = flags.FLAGS
BENCHMARK_NAME = 'psping'
BENCHMARK_CONFIG = """
psping:
description: Run psping between two VMs.
vm_groups:
vm_1:
vm_spec: *default_single_core
vm_2:
vm_spec: *default_single_core
"""
def GetConfig(user_config):
return configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
def Prepare(benchmark_spec):
vms = benchmark_spec.vms[:2]
for vm in vms:
vm.Install('psping')
def Run(benchmark_spec):
"""Measure TCP latency between two VMs.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample objects with the benchmark results.
"""
vms = benchmark_spec.vms
results = []
def _RunTest(sending_vm, receiving_vm):
if vm_util.ShouldRunOnExternalIpAddress():
results.extend(psping.RunLatencyTest(sending_vm,
receiving_vm,
use_internal_ip=False))
if vm_util.ShouldRunOnInternalIpAddress(sending_vm, receiving_vm):
results.extend(psping.RunLatencyTest(sending_vm,
receiving_vm,
use_internal_ip=True))
_RunTest(vms[0], vms[1])
_RunTest(vms[1], vms[0])
return results
def Cleanup(unused_benchmark_spec):
pass
|
import sys
import urwid.lcd_display
CGRAM = """
...... ...... ...... ...... ..X... ...... ...... ......
XXXXXX XXXXXX XXXXXX XXXXXX X.XX.. .XXXXX ..XXX. .....X
...... XX.... XXXX.. XXXXXX X.XXX. .X...X .X...X ....XX
...... XX.... XXXX.. XXXXXX X.XXXX .X...X .X...X .X.XX.
...... XX.... XXXX.. XXXXXX X.XXX. .X...X .X...X .XXX..
XXXXXX XXXXXX XXXXXX XXXXXX X.XX.. .XXXXX ..XXX. ..X...
...... ...... ...... ...... ..X... ...... ...... ......
...... ...... ...... ...... ...... ...... ...... ......
"""
def program_cgram(screen):
"""
Load the character data
"""
# convert .'s and X's above into integer data
cbuf = [list() for x in range(8)]
for row in CGRAM.strip().split('\n'):
rowsegments = row.strip().split()
for num, r in enumerate(rowsegments):
accum = 0
for c in r:
accum = (accum << 1) + (c == 'X')
cbuf[num].append(accum)
for num, cdata in enumerate(cbuf):
screen.program_cgram(num, cdata)
class LCDCheckBox(urwid.CheckBox):
"""
A check box+label that uses only one character for the check box,
including custom CGRAM character
"""
states = {
True: urwid.SelectableIcon('\xd0'),
False: urwid.SelectableIcon('\x05'),
}
reserve_columns = 1
class LCDRadioButton(urwid.RadioButton):
"""
A radio button+label that uses only one character for the radio button,
including custom CGRAM character
"""
states = {
True: urwid.SelectableIcon('\xbb'),
False: urwid.SelectableIcon('\x06'),
}
reserve_columns = 1
class LCDProgressBar(urwid.FlowWidget):
"""
The "progress bar" used by the horizontal slider for this device,
using custom CGRAM characters
"""
segments = '\x00\x01\x02\x03'
def __init__(self, range, value):
self.range = range
self.value = value
def rows(self, size, focus=False):
return 1
def render(self, size, focus=False):
"""
Draw the bar with self.segments where [0] is empty and [-1]
is completely full
"""
(maxcol,) = size
steps = self.get_steps(size)
filled = urwid.int_scale(self.value, self.range, steps)
full_segments = int(filled / (len(self.segments) - 1))
last_char = filled % (len(self.segments) - 1) + 1
s = (self.segments[-1] * full_segments +
self.segments[last_char] +
self.segments[0] * (maxcol -full_segments - 1))
return urwid.Text(s).render(size)
def move_position(self, size, direction):
"""
Update and return the value one step +ve or -ve, based on
the size of the displayed bar.
direction -- 1 for +ve, 0 for -ve
"""
steps = self.get_steps(size)
filled = urwid.int_scale(self.value, self.range, steps)
filled += 2 * direction - 1
value = urwid.int_scale(filled, steps, self.range)
value = max(0, min(self.range - 1, value))
if value != self.value:
self.value = value
self._invalidate()
return value
def get_steps(self, size):
"""
Return the number of steps available given size for rendering
the bar and number of segments we can draw.
"""
(maxcol,) = size
return maxcol * (len(self.segments) - 1)
class LCDHorizontalSlider(urwid.WidgetWrap):
"""
A slider control using custom CGRAM characters
"""
def __init__(self, range, value, callback):
self.bar = LCDProgressBar(range, value)
cols = urwid.Columns([
('fixed', 1, urwid.SelectableIcon('\x11')),
self.bar,
('fixed', 1, urwid.SelectableIcon('\x04')),
])
self.__super.__init__(cols)
self.callback = callback
def keypress(self, size, key):
# move the slider based on which arrow is focused
if key == 'enter':
# use the correct size for adjusting the bar
self.bar.move_position((self._w.column_widths(size)[1],),
self._w.get_focus_column() != 0)
self.callback(self.bar.value)
else:
return self.__super.keypress(size, key)
class MenuOption(urwid.Button):
"""
A menu option, indicated with a single arrow character
"""
def __init__(self, label, submenu):
self.__super.__init__("")
# use a Text widget for label, we want the cursor
# on the arrow not the label
self._label = urwid.Text("")
self.set_label(label)
self._w = urwid.Columns([
('fixed', 1, urwid.SelectableIcon('\xdf')),
self._label])
urwid.connect_signal(self, 'click',
lambda option: show_menu(submenu))
def keypress(self, size, key):
if key == 'right':
key = 'enter'
return self.__super.keypress(size, key)
class Menu(urwid.ListBox):
def __init__(self, widgets):
self.menu_parent = None
self.__super.__init__(urwid.SimpleListWalker(widgets))
def keypress(self, size, key):
"""
Go back to the previous menu on cancel button (mapped to esc)
"""
key = self.__super.keypress(size, key)
if key in ('left', 'esc') and self.menu_parent:
show_menu(self.menu_parent)
else:
return key
def build_menus():
cursor_option_group = []
def cursor_option(label, style):
"a radio button that sets the cursor style"
def on_change(b, state):
if state: screen.set_cursor_style(style)
b = LCDRadioButton(cursor_option_group, label,
screen.cursor_style == style)
urwid.connect_signal(b, 'change', on_change)
return b
def display_setting(label, range, fn):
slider = LCDHorizontalSlider(range, range/2, fn)
return urwid.Columns([
urwid.Text(label),
('fixed', 10, slider),
])
def led_custom(index):
def exp_scale_led(rg):
"""
apply an exponential transformation to values sent so
that apparent brightness increases in a natural way.
"""
return lambda value: screen.set_led_pin(index, rg,
[0, 1, 2, 3, 4, 5, 6, 8, 11, 14, 18,
23, 29, 38, 48, 61, 79, 100][value])
return urwid.Columns([
('fixed', 2, urwid.Text('%dR' % index)),
LCDHorizontalSlider(18, 0, exp_scale_led(0)),
('fixed', 2, urwid.Text(' G')),
LCDHorizontalSlider(18, 0, exp_scale_led(1)),
])
menu_structure = [
('Display Settings', [
display_setting('Brightness', 101, screen.set_backlight),
display_setting('Contrast', 76,
lambda x: screen.set_lcd_contrast(x + 75)),
]),
('Cursor Settings', [
cursor_option('Block', screen.CURSOR_BLINKING_BLOCK),
cursor_option('Underscore', screen.CURSOR_UNDERSCORE),
cursor_option('Block + Underscore',
screen.CURSOR_BLINKING_BLOCK_UNDERSCORE),
cursor_option('Inverting Block',
screen.CURSOR_INVERTING_BLINKING_BLOCK),
]),
('LEDs', [
led_custom(0),
led_custom(1),
led_custom(2),
led_custom(3),
]),
('About this Demo', [
urwid.Text("This is a demo of Urwid's CF635Display "
"module. If you need an interface for a limited "
"character display device this should serve as a "
"good example for implementing your own display "
"module and menu-driven application."),
])
]
def build_submenu(ms):
"""
Recursive menu building from structure above
"""
options = []
submenus = []
for opt in ms:
# shortform for MenuOptions
if type(opt) == tuple:
name, sub = opt
submenu = build_submenu(sub)
opt = MenuOption(name, submenu)
submenus.append(submenu)
options.append(opt)
menu = Menu(options)
for s in submenus:
s.menu_parent = menu
return menu
return build_submenu(menu_structure)
screen = urwid.lcd_display.CF635Screen(sys.argv[1])
# set up our font
program_cgram(screen)
loop = urwid.MainLoop(build_menus(), screen=screen)
# FIXME: want screen to know it is in narrow mode, or better yet,
# do the unicode conversion for us
urwid.set_encoding('narrow')
def show_menu(menu):
loop.widget = menu
loop.run()
|
import base64
import itertools
import logging
# pylint: disable=no-name-in-module,import-error
from google.protobuf.descriptor import FieldDescriptor
# pylint: enable=no-name-in-module,import-error
logger = logging.getLogger(__name__)
def _decode_field(message, field, value):
"""Decode optional or required field."""
if field.type == FieldDescriptor.TYPE_MESSAGE:
decode(getattr(message, field.name), value)
else:
try:
if field.type == FieldDescriptor.TYPE_BYTES:
value = base64.b64decode(value)
setattr(message, field.name, value)
except (ValueError, TypeError) as e:
# ValueError: invalid enum value, negative unsigned int value, or
# invalid base64
# TypeError: mismatched type
logger.warning('Message %r ignoring field %s: %s',
message.__class__.__name__, field.name, e)
def _decode_repeated_field(message, field, value_list):
"""Decode repeated field."""
if field.type == FieldDescriptor.TYPE_MESSAGE:
for value in value_list:
decode(getattr(message, field.name).add(), value)
else:
try:
for value in value_list:
if field.type == FieldDescriptor.TYPE_BYTES:
value = base64.b64decode(value)
getattr(message, field.name).append(value)
except (ValueError, TypeError) as e:
# ValueError: invalid enum value, negative unsigned int value, or
# invalid base64
# TypeError: mismatched type
logger.warning('Message %r ignoring repeated field %s: %s',
message.__class__.__name__, field.name, e)
# Ignore any values already decoded by clearing list
message.ClearField(field.name)
def decode(message, pblite, ignore_first_item=False):
"""Decode pblite to Protocol Buffer message.
This method is permissive of decoding errors and will log them as warnings
and continue decoding where possible.
The first element of the outer pblite list must often be ignored using the
ignore_first_item parameter because it contains an abbreviation of the name
of the protobuf message (eg. cscmrp for ClientSendChatMessageResponseP)
that's not part of the protobuf.
Args:
message: protocol buffer message instance to decode into.
pblite: list representing a pblite-serialized message.
ignore_first_item: If True, ignore the item at index 0 in the pblite
list, making the item at index 1 correspond to field 1 in the
message.
"""
if not isinstance(pblite, list):
logger.warning('Ignoring invalid message: expected list, got %r',
type(pblite))
return
if ignore_first_item:
pblite = pblite[1:]
# If the last item of the list is a dict, use it as additional field/value
# mappings. This seems to be an optimization added for dealing with really
# high field numbers.
if pblite and isinstance(pblite[-1], dict):
extra_fields = {int(field_number): value for field_number, value
in pblite[-1].items()}
pblite = pblite[:-1]
else:
extra_fields = {}
fields_values = itertools.chain(enumerate(pblite, start=1),
extra_fields.items())
for field_number, value in fields_values:
if value is None:
continue
try:
field = message.DESCRIPTOR.fields_by_number[field_number]
except KeyError:
# If the tag number is unknown and the value is non-trivial, log a
# message to aid reverse-engineering the missing field in the
# message.
if value not in [[], '', 0]:
logger.debug('Message %r contains unknown field %s with value '
'%r', message.__class__.__name__, field_number,
value)
continue
if field.label == FieldDescriptor.LABEL_REPEATED:
_decode_repeated_field(message, field, value)
else:
_decode_field(message, field, value)
|
from collections import deque
from aiohue.groups import Groups
from aiohue.lights import Lights
from aiohue.scenes import Scenes
from aiohue.sensors import Sensors
import pytest
from homeassistant import config_entries
from homeassistant.components import hue
from homeassistant.components.hue import sensor_base as hue_sensor_base
from tests.async_mock import AsyncMock, Mock, patch
from tests.components.light.conftest import mock_light_profiles # noqa
@pytest.fixture(autouse=True)
def no_request_delay():
"""Make the request refresh delay 0 for instant tests."""
with patch("homeassistant.components.hue.light.REQUEST_REFRESH_DELAY", 0):
yield
def create_mock_bridge(hass):
"""Create a mock Hue bridge."""
bridge = Mock(
hass=hass,
available=True,
authorized=True,
allow_unreachable=False,
allow_groups=False,
api=Mock(),
reset_jobs=[],
spec=hue.HueBridge,
)
bridge.sensor_manager = hue_sensor_base.SensorManager(bridge)
bridge.mock_requests = []
# We're using a deque so we can schedule multiple responses
# and also means that `popleft()` will blow up if we get more updates
# than expected.
bridge.mock_light_responses = deque()
bridge.mock_group_responses = deque()
bridge.mock_sensor_responses = deque()
async def mock_request(method, path, **kwargs):
kwargs["method"] = method
kwargs["path"] = path
bridge.mock_requests.append(kwargs)
if path == "lights":
return bridge.mock_light_responses.popleft()
if path == "groups":
return bridge.mock_group_responses.popleft()
if path == "sensors":
return bridge.mock_sensor_responses.popleft()
return None
async def async_request_call(task):
await task()
bridge.async_request_call = async_request_call
bridge.api.config.apiversion = "9.9.9"
bridge.api.lights = Lights({}, mock_request)
bridge.api.groups = Groups({}, mock_request)
bridge.api.sensors = Sensors({}, mock_request)
return bridge
@pytest.fixture
def mock_api(hass):
"""Mock the Hue api."""
api = Mock(initialize=AsyncMock())
api.mock_requests = []
api.mock_light_responses = deque()
api.mock_group_responses = deque()
api.mock_sensor_responses = deque()
api.mock_scene_responses = deque()
async def mock_request(method, path, **kwargs):
kwargs["method"] = method
kwargs["path"] = path
api.mock_requests.append(kwargs)
if path == "lights":
return api.mock_light_responses.popleft()
if path == "groups":
return api.mock_group_responses.popleft()
if path == "sensors":
return api.mock_sensor_responses.popleft()
if path == "scenes":
return api.mock_scene_responses.popleft()
return None
api.config.apiversion = "9.9.9"
api.lights = Lights({}, mock_request)
api.groups = Groups({}, mock_request)
api.sensors = Sensors({}, mock_request)
api.scenes = Scenes({}, mock_request)
return api
@pytest.fixture
def mock_bridge(hass):
"""Mock a Hue bridge."""
return create_mock_bridge(hass)
async def setup_bridge_for_sensors(hass, mock_bridge, hostname=None):
"""Load the Hue platform with the provided bridge for sensor-related platforms."""
if hostname is None:
hostname = "mock-host"
hass.config.components.add(hue.DOMAIN)
config_entry = config_entries.ConfigEntry(
1,
hue.DOMAIN,
"Mock Title",
{"host": hostname},
"test",
config_entries.CONN_CLASS_LOCAL_POLL,
system_options={},
)
mock_bridge.config_entry = config_entry
hass.data[hue.DOMAIN] = {config_entry.entry_id: mock_bridge}
await hass.config_entries.async_forward_entry_setup(config_entry, "binary_sensor")
await hass.config_entries.async_forward_entry_setup(config_entry, "sensor")
# simulate a full setup by manually adding the bridge config entry
hass.config_entries._entries.append(config_entry)
# and make sure it completes before going further
await hass.async_block_till_done()
|
import logging
from pycec.commands import ( # pylint: disable=import-error
CecCommand,
KeyPressCommand,
KeyReleaseCommand,
)
from pycec.const import ( # pylint: disable=import-error
KEY_BACKWARD,
KEY_FORWARD,
KEY_MUTE_TOGGLE,
KEY_PAUSE,
KEY_PLAY,
KEY_STOP,
KEY_VOLUME_DOWN,
KEY_VOLUME_UP,
POWER_OFF,
POWER_ON,
STATUS_PLAY,
STATUS_STILL,
STATUS_STOP,
TYPE_AUDIO,
TYPE_PLAYBACK,
TYPE_RECORDER,
TYPE_TUNER,
)
from homeassistant.components.media_player import MediaPlayerEntity
from homeassistant.components.media_player.const import (
DOMAIN,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_STOP,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_STEP,
)
from homeassistant.const import (
STATE_IDLE,
STATE_OFF,
STATE_ON,
STATE_PAUSED,
STATE_PLAYING,
)
from . import ATTR_NEW, CecEntity
_LOGGER = logging.getLogger(__name__)
ENTITY_ID_FORMAT = DOMAIN + ".{}"
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Find and return HDMI devices as +switches."""
if ATTR_NEW in discovery_info:
_LOGGER.debug("Setting up HDMI devices %s", discovery_info[ATTR_NEW])
entities = []
for device in discovery_info[ATTR_NEW]:
hdmi_device = hass.data.get(device)
entities.append(CecPlayerEntity(hdmi_device, hdmi_device.logical_address))
add_entities(entities, True)
class CecPlayerEntity(CecEntity, MediaPlayerEntity):
"""Representation of a HDMI device as a Media player."""
def __init__(self, device, logical) -> None:
"""Initialize the HDMI device."""
CecEntity.__init__(self, device, logical)
self.entity_id = f"{DOMAIN}.hdmi_{hex(self._logical_address)[2:]}"
def send_keypress(self, key):
"""Send keypress to CEC adapter."""
_LOGGER.debug(
"Sending keypress %s to device %s", hex(key), hex(self._logical_address)
)
self._device.send_command(KeyPressCommand(key, dst=self._logical_address))
self._device.send_command(KeyReleaseCommand(dst=self._logical_address))
def send_playback(self, key):
"""Send playback status to CEC adapter."""
self._device.async_send_command(CecCommand(key, dst=self._logical_address))
def mute_volume(self, mute):
"""Mute volume."""
self.send_keypress(KEY_MUTE_TOGGLE)
def media_previous_track(self):
"""Go to previous track."""
self.send_keypress(KEY_BACKWARD)
def turn_on(self):
"""Turn device on."""
self._device.turn_on()
self._state = STATE_ON
def clear_playlist(self):
"""Clear players playlist."""
raise NotImplementedError()
def turn_off(self):
"""Turn device off."""
self._device.turn_off()
self._state = STATE_OFF
def media_stop(self):
"""Stop playback."""
self.send_keypress(KEY_STOP)
self._state = STATE_IDLE
def play_media(self, media_type, media_id, **kwargs):
"""Not supported."""
raise NotImplementedError()
def media_next_track(self):
"""Skip to next track."""
self.send_keypress(KEY_FORWARD)
def media_seek(self, position):
"""Not supported."""
raise NotImplementedError()
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
raise NotImplementedError()
def media_pause(self):
"""Pause playback."""
self.send_keypress(KEY_PAUSE)
self._state = STATE_PAUSED
def select_source(self, source):
"""Not supported."""
raise NotImplementedError()
def media_play(self):
"""Start playback."""
self.send_keypress(KEY_PLAY)
self._state = STATE_PLAYING
def volume_up(self):
"""Increase volume."""
_LOGGER.debug("%s: volume up", self._logical_address)
self.send_keypress(KEY_VOLUME_UP)
def volume_down(self):
"""Decrease volume."""
_LOGGER.debug("%s: volume down", self._logical_address)
self.send_keypress(KEY_VOLUME_DOWN)
@property
def state(self) -> str:
"""Cache state of device."""
return self._state
def update(self):
"""Update device status."""
device = self._device
if device.power_status in [POWER_OFF, 3]:
self._state = STATE_OFF
elif not self.support_pause:
if device.power_status in [POWER_ON, 4]:
self._state = STATE_ON
elif device.status == STATUS_PLAY:
self._state = STATE_PLAYING
elif device.status == STATUS_STOP:
self._state = STATE_IDLE
elif device.status == STATUS_STILL:
self._state = STATE_PAUSED
else:
_LOGGER.warning("Unknown state: %s", device.status)
@property
def supported_features(self):
"""Flag media player features that are supported."""
if self.type_id == TYPE_RECORDER or self.type == TYPE_PLAYBACK:
return (
SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_PLAY_MEDIA
| SUPPORT_PAUSE
| SUPPORT_STOP
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_NEXT_TRACK
)
if self.type == TYPE_TUNER:
return (
SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_PLAY_MEDIA
| SUPPORT_PAUSE
| SUPPORT_STOP
)
if self.type_id == TYPE_AUDIO:
return (
SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_VOLUME_STEP
| SUPPORT_VOLUME_MUTE
)
return SUPPORT_TURN_ON | SUPPORT_TURN_OFF
|
import pytest
from molecule.model import schema_v2
@pytest.fixture
def _model_lint_section_data():
return {
'lint': {
'name': 'yamllint',
'enabled': True,
'options': {
'foo': 'bar',
},
'env': {
'FOO': 'foo',
'FOO_BAR': 'foo_bar',
},
}
}
@pytest.mark.parametrize(
'_config', ['_model_lint_section_data'], indirect=True)
def test_lint(_config):
assert {} == schema_v2.validate(_config)
@pytest.fixture
def _model_lint_errors_section_data():
return {
'lint': {
'name': int(),
'enabled': str(),
'options': [],
'env': {
'foo': 'foo',
'foo-bar': 'foo-bar',
},
}
}
@pytest.mark.parametrize(
'_config', ['_model_lint_errors_section_data'], indirect=True)
def test_lint_has_errors(_config):
x = {
'lint': [{
'enabled': ['must be of boolean type'],
'name': ['must be of string type'],
'env': [{
'foo': ["value does not match regex '^[A-Z0-9_-]+$'"],
'foo-bar': ["value does not match regex '^[A-Z0-9_-]+$'"],
}],
'options': ['must be of dict type'],
}]
}
assert x == schema_v2.validate(_config)
@pytest.fixture
def _model_lint_allows_yamllint_section_data():
return {
'lint': {
'name': 'yamllint',
}
}
@pytest.mark.parametrize(
'_config', [
('_model_lint_allows_yamllint_section_data'),
], indirect=True)
def test_lint_allows_name(_config):
assert {} == schema_v2.validate(_config)
|
class IndexStore(object):
def __init__(self):
self._next_i = 0
self._i2val = []
self._val2i = {}
def getval(self, idx):
return self._i2val[idx]
def __len__(self):
return len(self._i2val)
def __contains__(self, val):
return self._hasval(val)
def _hasval(self, val):
return val in self._val2i
def getidxstrictbatch(self, vals):
return [self._val2i[t] for t in vals]
def getidx(self, val):
try:
return self._val2i[val]
except KeyError:
self._val2i[val] = self._next_i
self._i2val.append(val)
self._next_i += 1
return self._next_i - 1
def getidxstrict(self, val):
return self._val2i[val]
def getnumvals(self):
return self._next_i
def getvals(self):
return set(self._i2val)
def hasidx(self, idx):
return 0 <= idx < self._next_i
def items(self):
return enumerate(self._i2val)
def batch_delete_vals(self, values):
idx_delete_list = []
for val in values:
if not self._hasval(val):
raise KeyError(str(val) + ' not found')
idx_delete_list.append(self.getidx(val))
return self.batch_delete_idx(idx_delete_list)
def batch_delete_idx(self, idx_list):
new_idxstore = IndexStore()
last_idx_to_delete = -1
number_of_values = self.getnumvals()
for idx_to_delete in sorted(idx_list):
if idx_to_delete >= number_of_values:
raise ValueError('index ' + str(idx_to_delete) + ' not found')
new_idxstore._i2val += self._i2val[last_idx_to_delete + 1:idx_to_delete]
last_idx_to_delete = idx_to_delete
new_idxstore._i2val += self._i2val[last_idx_to_delete + 1:]
new_idxstore._val2i = {val: i for i, val in enumerate(new_idxstore._i2val)}
new_idxstore._next_i = len(new_idxstore._val2i)
return new_idxstore
def _regenerate_val2i_and_next_i(self):
self._val2i = {val: idx for idx, val in enumerate(self._i2val)}
self._next_i = len(self._i2val)
def values(self):
'''
Returns
-------
list
A list containing all values registered.
'''
return self._i2val
|
import logging
from pushover_complete import PushoverAPI
import voluptuous as vol
from homeassistant.components.notify import (
ATTR_DATA,
ATTR_TARGET,
ATTR_TITLE,
ATTR_TITLE_DEFAULT,
PLATFORM_SCHEMA,
BaseNotificationService,
)
from homeassistant.const import CONF_API_KEY
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
ATTR_ATTACHMENT = "attachment"
ATTR_URL = "url"
ATTR_URL_TITLE = "url_title"
ATTR_PRIORITY = "priority"
ATTR_RETRY = "retry"
ATTR_SOUND = "sound"
ATTR_HTML = "html"
ATTR_CALLBACK_URL = "callback_url"
ATTR_EXPIRE = "expire"
ATTR_TIMESTAMP = "timestamp"
CONF_USER_KEY = "user_key"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_USER_KEY): cv.string, vol.Required(CONF_API_KEY): cv.string}
)
def get_service(hass, config, discovery_info=None):
"""Get the Pushover notification service."""
return PushoverNotificationService(
hass, config[CONF_USER_KEY], config[CONF_API_KEY]
)
class PushoverNotificationService(BaseNotificationService):
"""Implement the notification service for Pushover."""
def __init__(self, hass, user_key, api_token):
"""Initialize the service."""
self._hass = hass
self._user_key = user_key
self._api_token = api_token
self.pushover = PushoverAPI(self._api_token)
def send_message(self, message="", **kwargs):
"""Send a message to a user."""
# Extract params from data dict
title = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)
data = dict(kwargs.get(ATTR_DATA) or {})
url = data.get(ATTR_URL)
url_title = data.get(ATTR_URL_TITLE)
priority = data.get(ATTR_PRIORITY)
retry = data.get(ATTR_RETRY)
expire = data.get(ATTR_EXPIRE)
callback_url = data.get(ATTR_CALLBACK_URL)
timestamp = data.get(ATTR_TIMESTAMP)
sound = data.get(ATTR_SOUND)
html = 1 if data.get(ATTR_HTML, False) else 0
image = data.get(ATTR_ATTACHMENT)
# Check for attachment
if image is not None:
# Only allow attachments from whitelisted paths, check valid path
if self._hass.config.is_allowed_path(data[ATTR_ATTACHMENT]):
# try to open it as a normal file.
try:
file_handle = open(data[ATTR_ATTACHMENT], "rb")
# Replace the attachment identifier with file object.
image = file_handle
except OSError as ex_val:
_LOGGER.error(ex_val)
# Remove attachment key to send without attachment.
image = None
else:
_LOGGER.error("Path is not whitelisted")
# Remove attachment key to send without attachment.
image = None
targets = kwargs.get(ATTR_TARGET)
if not isinstance(targets, list):
targets = [targets]
for target in targets:
try:
self.pushover.send_message(
self._user_key,
message,
target,
title,
url,
url_title,
image,
priority,
retry,
expire,
callback_url,
timestamp,
sound,
html,
)
except ValueError as val_err:
_LOGGER.error(val_err)
|
from datetime import timedelta
from homeassistant.components.fan import (
DOMAIN as SENSOR_DOMAIN,
ENTITY_ID_FORMAT,
SUPPORT_OSCILLATE,
SUPPORT_SET_SPEED,
FanEntity,
)
from homeassistant.const import CONF_PLATFORM, STATE_OFF
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from . import TuyaDevice
from .const import DOMAIN, TUYA_DATA, TUYA_DISCOVERY_NEW
SCAN_INTERVAL = timedelta(seconds=15)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up tuya sensors dynamically through tuya discovery."""
platform = config_entry.data[CONF_PLATFORM]
async def async_discover_sensor(dev_ids):
"""Discover and add a discovered tuya sensor."""
if not dev_ids:
return
entities = await hass.async_add_executor_job(
_setup_entities,
hass,
dev_ids,
platform,
)
async_add_entities(entities)
async_dispatcher_connect(
hass, TUYA_DISCOVERY_NEW.format(SENSOR_DOMAIN), async_discover_sensor
)
devices_ids = hass.data[DOMAIN]["pending"].pop(SENSOR_DOMAIN)
await async_discover_sensor(devices_ids)
def _setup_entities(hass, dev_ids, platform):
"""Set up Tuya Fan device."""
tuya = hass.data[DOMAIN][TUYA_DATA]
entities = []
for dev_id in dev_ids:
device = tuya.get_device_by_id(dev_id)
if device is None:
continue
entities.append(TuyaFanDevice(device, platform))
return entities
class TuyaFanDevice(TuyaDevice, FanEntity):
"""Tuya fan devices."""
def __init__(self, tuya, platform):
"""Init Tuya fan device."""
super().__init__(tuya, platform)
self.entity_id = ENTITY_ID_FORMAT.format(tuya.object_id())
self.speeds = [STATE_OFF]
async def async_added_to_hass(self):
"""Create fan list when add to hass."""
await super().async_added_to_hass()
self.speeds.extend(self._tuya.speed_list())
def set_speed(self, speed: str) -> None:
"""Set the speed of the fan."""
if speed == STATE_OFF:
self.turn_off()
else:
self._tuya.set_speed(speed)
def turn_on(self, speed: str = None, **kwargs) -> None:
"""Turn on the fan."""
if speed is not None:
self.set_speed(speed)
else:
self._tuya.turn_on()
def turn_off(self, **kwargs) -> None:
"""Turn the entity off."""
self._tuya.turn_off()
def oscillate(self, oscillating) -> None:
"""Oscillate the fan."""
self._tuya.oscillate(oscillating)
@property
def oscillating(self):
"""Return current oscillating status."""
if self.supported_features & SUPPORT_OSCILLATE == 0:
return None
if self.speed == STATE_OFF:
return False
return self._tuya.oscillating()
@property
def is_on(self):
"""Return true if the entity is on."""
return self._tuya.state()
@property
def speed(self) -> str:
"""Return the current speed."""
if self.is_on:
return self._tuya.speed()
return STATE_OFF
@property
def speed_list(self) -> list:
"""Get the list of available speeds."""
return self.speeds
@property
def supported_features(self) -> int:
"""Flag supported features."""
supports = SUPPORT_SET_SPEED
if self._tuya.support_oscillate():
supports = supports | SUPPORT_OSCILLATE
return supports
|
from homeassistant import config_entries, data_entry_flow, setup
from homeassistant.components.harmony.config_flow import CannotConnect
from homeassistant.components.harmony.const import DOMAIN, PREVIOUS_ACTIVE_ACTIVITY
from homeassistant.const import CONF_HOST, CONF_NAME
from tests.async_mock import AsyncMock, MagicMock, PropertyMock, patch
from tests.common import MockConfigEntry
def _get_mock_harmonyapi(connect=None, close=None):
harmonyapi_mock = MagicMock()
type(harmonyapi_mock).connect = AsyncMock(return_value=connect)
type(harmonyapi_mock).close = AsyncMock(return_value=close)
return harmonyapi_mock
def _get_mock_harmonyclient():
harmonyclient_mock = MagicMock()
type(harmonyclient_mock).connect = AsyncMock()
type(harmonyclient_mock).close = AsyncMock()
type(harmonyclient_mock).get_activity_name = MagicMock(return_value="Watch TV")
type(harmonyclient_mock.hub_config).activities = PropertyMock(
return_value=[{"name": "Watch TV", "id": 123}]
)
type(harmonyclient_mock.hub_config).devices = PropertyMock(
return_value=[{"name": "My TV", "id": 1234}]
)
type(harmonyclient_mock.hub_config).info = PropertyMock(return_value={})
type(harmonyclient_mock.hub_config).hub_state = PropertyMock(return_value={})
return harmonyclient_mock
async def test_user_form(hass):
"""Test we get the user form."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
harmonyapi = _get_mock_harmonyapi(connect=True)
with patch(
"homeassistant.components.harmony.util.HarmonyAPI",
return_value=harmonyapi,
), patch(
"homeassistant.components.harmony.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.harmony.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"host": "1.2.3.4", "name": "friend"},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "friend"
assert result2["data"] == {"host": "1.2.3.4", "name": "friend"}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_import(hass):
"""Test we get the form with import source."""
await setup.async_setup_component(hass, "persistent_notification", {})
harmonyapi = _get_mock_harmonyapi(connect=True)
with patch(
"homeassistant.components.harmony.util.HarmonyAPI",
return_value=harmonyapi,
), patch(
"homeassistant.components.harmony.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.harmony.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={
"host": "1.2.3.4",
"name": "friend",
"activity": "Watch TV",
"delay_secs": 0.9,
"unique_id": "555234534543",
},
)
await hass.async_block_till_done()
assert result["result"].unique_id == "555234534543"
assert result["type"] == "create_entry"
assert result["title"] == "friend"
assert result["data"] == {
"host": "1.2.3.4",
"name": "friend",
"activity": "Watch TV",
"delay_secs": 0.9,
}
# It is not possible to import options at this time
# so they end up in the config entry data and are
# used a fallback when they are not in options
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_ssdp(hass):
"""Test we get the form with ssdp source."""
await setup.async_setup_component(hass, "persistent_notification", {})
harmonyapi = _get_mock_harmonyapi(connect=True)
with patch(
"homeassistant.components.harmony.util.HarmonyAPI",
return_value=harmonyapi,
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_SSDP},
data={
"friendlyName": "Harmony Hub",
"ssdp_location": "http://192.168.1.12:8088/description",
},
)
assert result["type"] == "form"
assert result["step_id"] == "link"
assert result["errors"] == {}
assert result["description_placeholders"] == {
"host": "Harmony Hub",
"name": "192.168.1.12",
}
with patch(
"homeassistant.components.harmony.util.HarmonyAPI",
return_value=harmonyapi,
), patch(
"homeassistant.components.harmony.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.harmony.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "Harmony Hub"
assert result2["data"] == {"host": "192.168.1.12", "name": "Harmony Hub"}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_ssdp_aborts_before_checking_remoteid_if_host_known(hass):
"""Test we abort without connecting if the host is already known."""
await setup.async_setup_component(hass, "persistent_notification", {})
config_entry = MockConfigEntry(
domain=DOMAIN,
data={"host": "2.2.2.2", "name": "any"},
)
config_entry.add_to_hass(hass)
config_entry_without_host = MockConfigEntry(
domain=DOMAIN,
data={"name": "other"},
)
config_entry_without_host.add_to_hass(hass)
harmonyapi = _get_mock_harmonyapi(connect=True)
with patch(
"homeassistant.components.harmony.util.HarmonyAPI",
return_value=harmonyapi,
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_SSDP},
data={
"friendlyName": "Harmony Hub",
"ssdp_location": "http://2.2.2.2:8088/description",
},
)
assert result["type"] == "abort"
async def test_form_cannot_connect(hass):
"""Test we handle cannot connect error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.harmony.util.HarmonyAPI",
side_effect=CannotConnect,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"host": "1.2.3.4",
"name": "friend",
"activity": "Watch TV",
"delay_secs": 0.2,
},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "cannot_connect"}
async def test_options_flow(hass):
"""Test config flow options."""
config_entry = MockConfigEntry(
domain=DOMAIN,
unique_id="abcde12345",
data={CONF_HOST: "1.2.3.4", CONF_NAME: "Guest Room"},
options={"activity": "Watch TV", "delay_secs": 0.5},
)
harmony_client = _get_mock_harmonyclient()
with patch(
"aioharmony.harmonyapi.HarmonyClient",
return_value=harmony_client,
), patch("homeassistant.components.harmony.remote.HarmonyRemote.write_config_file"):
config_entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
result = await hass.config_entries.options.async_init(config_entry.entry_id)
await hass.async_block_till_done()
assert await hass.config_entries.async_unload(config_entry.entry_id)
await hass.async_block_till_done()
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "init"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={"activity": PREVIOUS_ACTIVE_ACTIVITY, "delay_secs": 0.4},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert config_entry.options == {
"activity": PREVIOUS_ACTIVE_ACTIVITY,
"delay_secs": 0.4,
}
|
import errno
import socket
from amqp.exceptions import RecoverableConnectionError
from kombu.exceptions import ChannelError, ConnectionError
from kombu.message import Message
from kombu.utils.functional import dictfilter
from kombu.utils.objects import cached_property
from kombu.utils.time import maybe_s_to_ms
__all__ = ('Message', 'StdChannel', 'Management', 'Transport')
RABBITMQ_QUEUE_ARGUMENTS = { # type: Mapping[str, Tuple[str, Callable]]
'expires': ('x-expires', maybe_s_to_ms),
'message_ttl': ('x-message-ttl', maybe_s_to_ms),
'max_length': ('x-max-length', int),
'max_length_bytes': ('x-max-length-bytes', int),
'max_priority': ('x-max-priority', int),
}
def to_rabbitmq_queue_arguments(arguments, **options):
# type: (Mapping, **Any) -> Dict
"""Convert queue arguments to RabbitMQ queue arguments.
This is the implementation for Channel.prepare_queue_arguments
for AMQP-based transports. It's used by both the pyamqp and librabbitmq
transports.
Arguments:
arguments (Mapping):
User-supplied arguments (``Queue.queue_arguments``).
Keyword Arguments:
expires (float): Queue expiry time in seconds.
This will be converted to ``x-expires`` in int milliseconds.
message_ttl (float): Message TTL in seconds.
This will be converted to ``x-message-ttl`` in int milliseconds.
max_length (int): Max queue length (in number of messages).
This will be converted to ``x-max-length`` int.
max_length_bytes (int): Max queue size in bytes.
This will be converted to ``x-max-length-bytes`` int.
max_priority (int): Max priority steps for queue.
This will be converted to ``x-max-priority`` int.
Returns:
Dict: RabbitMQ compatible queue arguments.
"""
prepared = dictfilter(dict(
_to_rabbitmq_queue_argument(key, value)
for key, value in options.items()
))
return dict(arguments, **prepared) if prepared else arguments
def _to_rabbitmq_queue_argument(key, value):
# type: (str, Any) -> Tuple[str, Any]
opt, typ = RABBITMQ_QUEUE_ARGUMENTS[key]
return opt, typ(value) if value is not None else value
def _LeftBlank(obj, method):
return NotImplementedError(
'Transport {0.__module__}.{0.__name__} does not implement {1}'.format(
obj.__class__, method))
class StdChannel:
"""Standard channel base class."""
no_ack_consumers = None
def Consumer(self, *args, **kwargs):
from kombu.messaging import Consumer
return Consumer(self, *args, **kwargs)
def Producer(self, *args, **kwargs):
from kombu.messaging import Producer
return Producer(self, *args, **kwargs)
def get_bindings(self):
raise _LeftBlank(self, 'get_bindings')
def after_reply_message_received(self, queue):
"""Callback called after RPC reply received.
Notes:
Reply queue semantics: can be used to delete the queue
after transient reply message received.
"""
def prepare_queue_arguments(self, arguments, **kwargs):
return arguments
def __enter__(self):
return self
def __exit__(self, *exc_info):
self.close()
class Management:
"""AMQP Management API (incomplete)."""
def __init__(self, transport):
self.transport = transport
def get_bindings(self):
raise _LeftBlank(self, 'get_bindings')
class Implements(dict):
"""Helper class used to define transport features."""
def __getattr__(self, key):
try:
return self[key]
except KeyError:
raise AttributeError(key)
def __setattr__(self, key, value):
self[key] = value
def extend(self, **kwargs):
return self.__class__(self, **kwargs)
default_transport_capabilities = Implements(
asynchronous=False,
exchange_type=frozenset(['direct', 'topic', 'fanout', 'headers']),
heartbeats=False,
)
class Transport:
"""Base class for transports."""
Management = Management
#: The :class:`~kombu.Connection` owning this instance.
client = None
#: Set to True if :class:`~kombu.Connection` should pass the URL
#: unmodified.
can_parse_url = False
#: Default port used when no port has been specified.
default_port = None
#: Tuple of errors that can happen due to connection failure.
connection_errors = (ConnectionError,)
#: Tuple of errors that can happen due to channel/method failure.
channel_errors = (ChannelError,)
#: Type of driver, can be used to separate transports
#: using the AMQP protocol (driver_type: 'amqp'),
#: Redis (driver_type: 'redis'), etc...
driver_type = 'N/A'
#: Name of driver library (e.g. 'py-amqp', 'redis').
driver_name = 'N/A'
__reader = None
implements = default_transport_capabilities.extend()
def __init__(self, client, **kwargs):
self.client = client
def establish_connection(self):
raise _LeftBlank(self, 'establish_connection')
def close_connection(self, connection):
raise _LeftBlank(self, 'close_connection')
def create_channel(self, connection):
raise _LeftBlank(self, 'create_channel')
def close_channel(self, connection):
raise _LeftBlank(self, 'close_channel')
def drain_events(self, connection, **kwargs):
raise _LeftBlank(self, 'drain_events')
def heartbeat_check(self, connection, rate=2):
pass
def driver_version(self):
return 'N/A'
def get_heartbeat_interval(self, connection):
return 0
def register_with_event_loop(self, connection, loop):
pass
def unregister_from_event_loop(self, connection, loop):
pass
def verify_connection(self, connection):
return True
def _make_reader(self, connection, timeout=socket.timeout,
error=socket.error, _unavail=(errno.EAGAIN, errno.EINTR)):
drain_events = connection.drain_events
def _read(loop):
if not connection.connected:
raise RecoverableConnectionError('Socket was disconnected')
try:
drain_events(timeout=0)
except timeout:
return
except error as exc:
if exc.errno in _unavail:
return
raise
loop.call_soon(_read, loop)
return _read
def qos_semantics_matches_spec(self, connection):
return True
def on_readable(self, connection, loop):
reader = self.__reader
if reader is None:
reader = self.__reader = self._make_reader(connection)
reader(loop)
@property
def default_connection_params(self):
return {}
def get_manager(self, *args, **kwargs):
return self.Management(self)
@cached_property
def manager(self):
return self.get_manager()
@property
def supports_heartbeats(self):
return self.implements.heartbeats
@property
def supports_ev(self):
return self.implements.asynchronous
|
import os, shutil, sys, tempfile, urllib2
from optparse import OptionParser
tmpeggs = tempfile.mkdtemp()
is_jython = sys.platform.startswith('java')
# parsing arguments
parser = OptionParser()
parser.add_option("-v", "--version", dest="version",
help="use a specific zc.buildout version")
parser.add_option("-d", "--distribute",
action="store_true", dest="distribute", default=True,
help="Use Disribute rather than Setuptools.")
options, args = parser.parse_args()
if options.version is not None:
VERSION = '==%s' % options.version
else:
VERSION = ''
USE_DISTRIBUTE = options.distribute
args = args + ['bootstrap']
to_reload = False
try:
import pkg_resources
if not hasattr(pkg_resources, '_distribute'):
to_reload = True
raise ImportError
except ImportError:
ez = {}
if USE_DISTRIBUTE:
exec urllib2.urlopen('http://python-distribute.org/distribute_setup.py'
).read() in ez
ez['use_setuptools'](to_dir=tmpeggs, download_delay=0, no_fake=True)
else:
exec urllib2.urlopen('http://peak.telecommunity.com/dist/ez_setup.py'
).read() in ez
ez['use_setuptools'](to_dir=tmpeggs, download_delay=0)
if to_reload:
reload(pkg_resources)
else:
import pkg_resources
if sys.platform == 'win32':
def quote(c):
if ' ' in c:
return '"%s"' % c # work around spawn lamosity on windows
else:
return c
else:
def quote (c):
return c
cmd = 'from setuptools.command.easy_install import main; main()'
ws = pkg_resources.working_set
if USE_DISTRIBUTE:
requirement = 'distribute'
else:
requirement = 'setuptools'
if is_jython:
import subprocess
assert subprocess.Popen([sys.executable] + ['-c', quote(cmd), '-mqNxd',
quote(tmpeggs), 'zc.buildout' + VERSION],
env=dict(os.environ,
PYTHONPATH=
ws.find(pkg_resources.Requirement.parse(requirement)).location
),
).wait() == 0
else:
assert os.spawnle(
os.P_WAIT, sys.executable, quote (sys.executable),
'-c', quote (cmd), '-mqNxd', quote (tmpeggs), 'zc.buildout' + VERSION,
dict(os.environ,
PYTHONPATH=
ws.find(pkg_resources.Requirement.parse(requirement)).location
),
) == 0
ws.add_entry(tmpeggs)
ws.require('zc.buildout' + VERSION)
import zc.buildout.buildout
zc.buildout.buildout.main(args)
shutil.rmtree(tmpeggs)
|
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
from .unifi_entity_base import UniFiBase
class UniFiClient(UniFiBase):
"""Base class for UniFi clients."""
def __init__(self, client, controller) -> None:
"""Set up client."""
super().__init__(client, controller)
self._is_wired = client.mac not in controller.wireless_clients
@property
def client(self):
"""Wrap item."""
return self._item
@property
def is_wired(self):
"""Return if the client is wired.
Allows disabling logic to keep track of clients affected by UniFi wired bug marking wireless devices as wired. This is useful when running a network not only containing UniFi APs.
"""
if self._is_wired and self.client.mac in self.controller.wireless_clients:
self._is_wired = False
if self.controller.option_ignore_wired_bug:
return self.client.is_wired
return self._is_wired
@property
def unique_id(self):
"""Return a unique identifier for this switch."""
return f"{self.TYPE}-{self.client.mac}"
@property
def name(self) -> str:
"""Return the name of the client."""
return self.client.name or self.client.hostname
@property
def available(self) -> bool:
"""Return if controller is available."""
return self.controller.available
@property
def device_info(self) -> dict:
"""Return a client description for device registry."""
return {
"connections": {(CONNECTION_NETWORK_MAC, self.client.mac)},
"default_name": self.name,
"default_manufacturer": self.client.oui,
}
|
import unittest
from absl import flags
from perfkitbenchmarker import cloud_tpu
from perfkitbenchmarker.configs import benchmark_config_spec
from tests import pkb_common_test_case
FLAGS = flags.FLAGS
_BENCHMARK_NAME = 'name'
_BENCHMARK_UID = 'benchmark_uid'
_COMPONENT = 'test_component'
_FLAGS = None
_GROUP_NAME = 'train'
def MergeDicts(dict1, dict2):
result = dict1.copy()
result.update(dict2)
return result
class FakeTpu(cloud_tpu.BaseTpu):
def _Create(self):
pass
def _Delete(self):
pass
def GetName(self):
pass
def GetMasterGrpcAddress(self):
pass
def GetNumShards(self):
pass
def GetZone(self):
pass
def GetAcceleratorType(self):
pass
class TpuSpecTestCase(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(TpuSpecTestCase, self).setUp()
FLAGS['run_uri'].parse('123')
self.minimal_spec = {
'cloud': 'GCP',
}
cloud_tpu._CLOUD_TPU_REGISTRY = {
'GCP': FakeTpu(None)
}
def tearDown(self):
super(TpuSpecTestCase, self).tearDown()
cloud_tpu._CLOUD_TPU_REGISTRY = {}
def testMinimalConfig(self):
result = benchmark_config_spec._TpuGroupSpec(
_COMPONENT, _GROUP_NAME, flag_values=FLAGS, **self.minimal_spec)
self.assertEqual(result.cloud, 'GCP')
def testDefaultTpuName(self):
result = benchmark_config_spec._TpuGroupSpec(
_COMPONENT, _GROUP_NAME, flag_values=FLAGS, **self.minimal_spec)
self.assertEqual(result.tpu_name, 'pkb-tpu-train-123')
def testCustomTpuName(self):
spec = MergeDicts(self.minimal_spec, {'tpu_name': 'pkb-tpu'})
result = benchmark_config_spec._TpuGroupSpec(
_COMPONENT, _GROUP_NAME, flag_values=FLAGS, **spec)
self.assertEqual(result.tpu_name, 'pkb-tpu')
def testDefaultTpuCidrRange(self):
result = benchmark_config_spec._TpuGroupSpec(
_COMPONENT, _GROUP_NAME, flag_values=FLAGS, **self.minimal_spec)
self.assertEqual(result.tpu_cidr_range, None)
def testCustomTpuCidrRange(self):
spec = MergeDicts(self.minimal_spec, {'tpu_cidr_range': '192.168.0.0/29'})
result = benchmark_config_spec._TpuGroupSpec(
_COMPONENT, _GROUP_NAME, flag_values=FLAGS, **spec)
self.assertEqual(result.tpu_cidr_range, '192.168.0.0/29')
def testDefaultTpuAcceleratorType(self):
result = benchmark_config_spec._TpuGroupSpec(
_COMPONENT, _GROUP_NAME, flag_values=FLAGS, **self.minimal_spec)
self.assertEqual(result.tpu_accelerator_type, None)
def testCustomTpuAcceleratorType(self):
spec = MergeDicts(self.minimal_spec, {'tpu_accelerator_type': 'tpu-v2'})
result = benchmark_config_spec._TpuGroupSpec(
_COMPONENT, _GROUP_NAME, flag_values=FLAGS, **spec)
self.assertEqual(result.tpu_accelerator_type, 'tpu-v2')
def testDefaultTpuDescription(self):
result = benchmark_config_spec._TpuGroupSpec(
_COMPONENT, _GROUP_NAME, flag_values=FLAGS, **self.minimal_spec)
self.assertEqual(result.tpu_description, None)
def testCustomTpuDescription(self):
spec = MergeDicts(self.minimal_spec, {'tpu_description': 'My TF Node'})
result = benchmark_config_spec._TpuGroupSpec(
_COMPONENT, _GROUP_NAME, flag_values=FLAGS, **spec)
self.assertEqual(result.tpu_description, 'My TF Node')
def testDefaultTpuNetwork(self):
result = benchmark_config_spec._TpuGroupSpec(
_COMPONENT, _GROUP_NAME, flag_values=FLAGS, **self.minimal_spec)
self.assertEqual(result.tpu_network, None)
def testCustomTpuNetwork(self):
spec = MergeDicts(self.minimal_spec, {'tpu_network': 'default'})
result = benchmark_config_spec._TpuGroupSpec(
_COMPONENT, _GROUP_NAME, flag_values=FLAGS, **spec)
self.assertEqual(result.tpu_network, 'default')
def testDefaultTpuZone(self):
result = benchmark_config_spec._TpuGroupSpec(
_COMPONENT, _GROUP_NAME, flag_values=FLAGS, **self.minimal_spec)
self.assertEqual(result.tpu_zone, None)
def testCustomTpuZone(self):
spec = MergeDicts(self.minimal_spec, {'tpu_zone': 'us-central1-a'})
result = benchmark_config_spec._TpuGroupSpec(
_COMPONENT, _GROUP_NAME, flag_values=FLAGS, **spec)
self.assertEqual(result.tpu_zone, 'us-central1-a')
def testDefaultTpuVersion(self):
result = benchmark_config_spec._TpuGroupSpec(
_COMPONENT, _GROUP_NAME, flag_values=FLAGS, **self.minimal_spec)
self.assertEqual(result.tpu_tf_version, None)
def testCustomTpuVersion(self):
spec = MergeDicts(self.minimal_spec, {'tpu_tf_version': 'nightly'})
result = benchmark_config_spec._TpuGroupSpec(
_COMPONENT, _GROUP_NAME, flag_values=FLAGS, **spec)
self.assertEqual(result.tpu_tf_version, 'nightly')
class TpuSpecFlagsTestCase(pkb_common_test_case.PkbCommonTestCase):
def setUp(self):
super(TpuSpecFlagsTestCase, self).setUp()
FLAGS['run_uri'].parse('123')
self.full_spec = {
'cloud': 'GCP',
'tpu_name': 'pkb-tpu-123',
'tpu_cidr_range': '192.168.0.0/29',
'tpu_accelerator_type': 'tpu-v2',
'tpu_description': 'My TF Node',
'tpu_network': 'default',
'tpu_tf_version': 'nightly',
'tpu_zone': 'us-central1-a'
}
cloud_tpu._CLOUD_TPU_REGISTRY = {
'GCP': FakeTpu(None)
}
def tearDown(self):
super(TpuSpecFlagsTestCase, self).tearDown()
cloud_tpu._CLOUD_TPU_REGISTRY = {}
def testCloudFlag(self):
pass
def testTpuNameFlag(self):
FLAGS['tpu_name'].parse('pkb-tpu')
result = benchmark_config_spec._TpuGroupSpec(
_COMPONENT, _GROUP_NAME, flag_values=FLAGS, **self.full_spec)
self.assertEqual(result.tpu_name, 'pkb-tpu')
def testTpuCidrRangeFlag(self):
FLAGS['tpu_cidr_range'].parse('10.240.0.0/29')
result = benchmark_config_spec._TpuGroupSpec(
_COMPONENT, _GROUP_NAME, flag_values=FLAGS, **self.full_spec)
self.assertEqual(result.tpu_cidr_range, '10.240.0.0/29')
def testTpuAcceleratorTypeFlag(self):
FLAGS['tpu_accelerator_type'].parse('tpu-v1')
result = benchmark_config_spec._TpuGroupSpec(
_COMPONENT, _GROUP_NAME, flag_values=FLAGS, **self.full_spec)
self.assertEqual(result.tpu_accelerator_type, 'tpu-v1')
def testTpuDescriptionFlag(self):
FLAGS['tpu_description'].parse('MyTfNode')
result = benchmark_config_spec._TpuGroupSpec(
_COMPONENT, _GROUP_NAME, flag_values=FLAGS, **self.full_spec)
self.assertEqual(result.tpu_description, 'MyTfNode')
def testTpuNetworkFlag(self):
FLAGS['tpu_network'].parse('my-tf-network')
result = benchmark_config_spec._TpuGroupSpec(
_COMPONENT, _GROUP_NAME, flag_values=FLAGS, **self.full_spec)
self.assertEqual(result.tpu_network, 'my-tf-network')
def testTpuTfVersion(self):
FLAGS['tpu_tf_version'].parse('1.2')
result = benchmark_config_spec._TpuGroupSpec(
_COMPONENT, _GROUP_NAME, flag_values=FLAGS, **self.full_spec)
self.assertEqual(result.tpu_tf_version, '1.2')
def testTpuZone(self):
FLAGS['tpu_zone'].parse('us-central1-c')
result = benchmark_config_spec._TpuGroupSpec(
_COMPONENT, _GROUP_NAME, flag_values=FLAGS, **self.full_spec)
self.assertEqual(result.tpu_zone, 'us-central1-c')
if __name__ == '__main__':
unittest.main()
|
from __future__ import print_function
import argparse # noqa
import distutils.spawn
import getpass
import logging
import os
import sys
from .server import env
logger = logging.getLogger(__name__)
DESCRIPTION = """run the docker-registry with gunicorn, honoring the following
environment variables:
REGISTRY_HOST: TCP host or ip to bind to; default is 0.0.0.0
REGISTRY_PORT: TCP port to bind to; default is 5000
GUNICORN_WORKERS: number of worker processes gunicorn should start
GUNICORN_GRACEFUL_TIMEOUT: timeout in seconds for graceful worker restart
GUNICORN_SILENT_TIMEOUT: timeout in seconds for restarting silent workers
GUNICORN_USER: unix user to downgrade priviledges to
GUNICORN_GROUP: unix group to downgrade priviledges to
GUNICORN_ACCESS_LOG_FILE: File to log access to
GUNICORN_ERROR_LOG_FILE: File to log errors to
GUNICORN_OPTS: extra options to pass to gunicorn
"""
def run_gunicorn():
"""Exec gunicorn with our wsgi app.
Settings are taken from environment variables as listed in the help text.
This is intended to be called as a console_script entry point.
"""
# this only exists to provide help/usage text
parser = argparse.ArgumentParser(
description=DESCRIPTION,
formatter_class=argparse.RawTextHelpFormatter)
parser.parse_args()
gunicorn_path = distutils.spawn.find_executable('gunicorn')
if not gunicorn_path:
print('error: gunicorn executable not found', file=sys.stderr)
sys.exit(1)
address = '%s:%s' % (
env.source('REGISTRY_HOST'),
env.source('REGISTRY_PORT')
)
args = [
gunicorn_path, 'gunicorn',
'--access-logfile', env.source('GUNICORN_ACCESS_LOG_FILE'),
'--error-logfile', env.source('GUNICORN_ERROR_LOG_FILE'),
'--max-requests', '100',
'-k', 'gevent',
'--graceful-timeout', env.source('GUNICORN_GRACEFUL_TIMEOUT'),
'-t', env.source('GUNICORN_SILENT_TIMEOUT'),
'-w', env.source('GUNICORN_WORKERS'),
'-b', address,
]
if env.source('SETTINGS_FLAVOR') != 'prod':
args.append('--reload')
user = env.source('GUNICORN_USER')
group = env.source('GUNICORN_GROUP')
if user or group:
if getpass.getuser() == 'root':
if user:
logger.info('Downgrading privs to user %s' % user)
args.append('-u')
args.append(user)
if group:
logger.info('Downgrading privs to group %s' % user)
args.append('-g')
args.append(group)
else:
logger.warn('You asked we drop priviledges, but we are not root!')
args += env.source('GUNICORN_OPTS')
args.append('docker_registry.wsgi:application')
# Stringify all args and call
os.execl(*[str(v) for v in args])
|
import unittest
from pylatex import Document
class TestInheritance(unittest.TestCase):
def test_latex_name(self):
class MyDoc(Document):
def __init__(self):
super().__init__()
doc = Document()
my_doc = MyDoc()
self.assertEqual(my_doc.latex_name, doc.latex_name)
|
import json
import os
from uuid import uuid4
from django.conf import settings
from django.template.loader import render_to_string
from django.utils.functional import cached_property
from django.utils.translation import gettext_lazy as _
from weblate.addons.base import BaseAddon
from weblate.addons.events import EVENT_DAILY, EVENT_POST_COMMIT, EVENT_POST_UPDATE
from weblate.addons.forms import CDNJSForm
from weblate.addons.tasks import cdn_parse_html
from weblate.utils.state import STATE_TRANSLATED
class CDNJSAddon(BaseAddon):
events = (EVENT_DAILY, EVENT_POST_COMMIT, EVENT_POST_UPDATE)
name = "weblate.cdn.cdnjs"
verbose = _("JavaScript localization CDN")
description = _("Adds localization CDN for JavaScript or HTML localization.")
settings_form = CDNJSForm
icon = "cloud-upload.svg"
stay_on_create = True
@classmethod
def create_object(cls, component, **kwargs):
# Generate UUID for the CDN
if "state" not in kwargs:
kwargs["state"] = {"uuid": uuid4().hex}
return super().create_object(component, **kwargs)
@classmethod
def can_install(cls, component, user):
if (
not settings.LOCALIZE_CDN_URL
or not settings.LOCALIZE_CDN_PATH
or not component.has_template()
or not component.translation_set.exists()
):
return False
return super().can_install(component, user)
def cdn_path(self, filename):
return os.path.join(
settings.LOCALIZE_CDN_PATH, self.instance.state["uuid"], filename
)
@cached_property
def cdn_js_url(self):
return os.path.join(
settings.LOCALIZE_CDN_URL, self.instance.state["uuid"], "weblate.js"
)
def post_commit(self, component):
# Get list of applicable translations
threshold = self.instance.configuration["threshold"]
translations = [
translation
for translation in component.translation_set.all()
if (not translation.is_source or component.intermediate)
and translation.stats.translated > threshold
]
# Create output directory
dirname = self.cdn_path("")
if not os.path.exists(dirname):
os.makedirs(dirname)
# Generate JavasScript loader
with open(self.cdn_path("weblate.js"), "w") as handle:
# The languages variable is included inside quotes to make
# sure the template is valid JavaScript code as well
handle.write(
render_to_string(
"addons/js/weblate.js",
{
"languages": '", "'.join(
sorted(
translation.language.code
for translation in translations
)
),
"url": os.path.join(
settings.LOCALIZE_CDN_URL, self.instance.state["uuid"]
),
"cookie_name": self.instance.configuration["cookie_name"],
"css_selector": self.instance.configuration["css_selector"],
},
)
)
# Generate bilingual JSON files
for translation in translations:
with open(
self.cdn_path(f"{translation.language.code}.json"), "w"
) as handle:
json.dump(
{
unit.source: unit.target
for unit in translation.unit_set.filter(
state__gte=STATE_TRANSLATED
)
},
handle,
)
def daily(self, component):
if not self.instance.configuration["files"].strip():
return
# Trigger parsing files
cdn_parse_html.delay(
self.instance.configuration["files"],
self.instance.configuration["css_selector"],
component.id,
)
def post_update(self, component, previous_head: str, skip_push: bool):
self.daily(component)
|
import logging
import socket
from pyblackbird import get_blackbird
from serial import SerialException
import voluptuous as vol
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
SUPPORT_SELECT_SOURCE,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_HOST,
CONF_NAME,
CONF_PORT,
CONF_TYPE,
STATE_OFF,
STATE_ON,
)
import homeassistant.helpers.config_validation as cv
from .const import DOMAIN, SERVICE_SETALLZONES
_LOGGER = logging.getLogger(__name__)
SUPPORT_BLACKBIRD = SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_SELECT_SOURCE
MEDIA_PLAYER_SCHEMA = vol.Schema({ATTR_ENTITY_ID: cv.comp_entity_ids})
ZONE_SCHEMA = vol.Schema({vol.Required(CONF_NAME): cv.string})
SOURCE_SCHEMA = vol.Schema({vol.Required(CONF_NAME): cv.string})
CONF_ZONES = "zones"
CONF_SOURCES = "sources"
DATA_BLACKBIRD = "blackbird"
ATTR_SOURCE = "source"
BLACKBIRD_SETALLZONES_SCHEMA = MEDIA_PLAYER_SCHEMA.extend(
{vol.Required(ATTR_SOURCE): cv.string}
)
# Valid zone ids: 1-8
ZONE_IDS = vol.All(vol.Coerce(int), vol.Range(min=1, max=8))
# Valid source ids: 1-8
SOURCE_IDS = vol.All(vol.Coerce(int), vol.Range(min=1, max=8))
PLATFORM_SCHEMA = vol.All(
cv.has_at_least_one_key(CONF_PORT, CONF_HOST),
PLATFORM_SCHEMA.extend(
{
vol.Exclusive(CONF_PORT, CONF_TYPE): cv.string,
vol.Exclusive(CONF_HOST, CONF_TYPE): cv.string,
vol.Required(CONF_ZONES): vol.Schema({ZONE_IDS: ZONE_SCHEMA}),
vol.Required(CONF_SOURCES): vol.Schema({SOURCE_IDS: SOURCE_SCHEMA}),
}
),
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Monoprice Blackbird 4k 8x8 HDBaseT Matrix platform."""
if DATA_BLACKBIRD not in hass.data:
hass.data[DATA_BLACKBIRD] = {}
port = config.get(CONF_PORT)
host = config.get(CONF_HOST)
connection = None
if port is not None:
try:
blackbird = get_blackbird(port)
connection = port
except SerialException:
_LOGGER.error("Error connecting to the Blackbird controller")
return
if host is not None:
try:
blackbird = get_blackbird(host, False)
connection = host
except socket.timeout:
_LOGGER.error("Error connecting to the Blackbird controller")
return
sources = {
source_id: extra[CONF_NAME] for source_id, extra in config[CONF_SOURCES].items()
}
devices = []
for zone_id, extra in config[CONF_ZONES].items():
_LOGGER.info("Adding zone %d - %s", zone_id, extra[CONF_NAME])
unique_id = f"{connection}-{zone_id}"
device = BlackbirdZone(blackbird, sources, zone_id, extra[CONF_NAME])
hass.data[DATA_BLACKBIRD][unique_id] = device
devices.append(device)
add_entities(devices, True)
def service_handle(service):
"""Handle for services."""
entity_ids = service.data.get(ATTR_ENTITY_ID)
source = service.data.get(ATTR_SOURCE)
if entity_ids:
devices = [
device
for device in hass.data[DATA_BLACKBIRD].values()
if device.entity_id in entity_ids
]
else:
devices = hass.data[DATA_BLACKBIRD].values()
for device in devices:
if service.service == SERVICE_SETALLZONES:
device.set_all_zones(source)
hass.services.register(
DOMAIN, SERVICE_SETALLZONES, service_handle, schema=BLACKBIRD_SETALLZONES_SCHEMA
)
class BlackbirdZone(MediaPlayerEntity):
"""Representation of a Blackbird matrix zone."""
def __init__(self, blackbird, sources, zone_id, zone_name):
"""Initialize new zone."""
self._blackbird = blackbird
# dict source_id -> source name
self._source_id_name = sources
# dict source name -> source_id
self._source_name_id = {v: k for k, v in sources.items()}
# ordered list of all source names
self._source_names = sorted(
self._source_name_id.keys(), key=lambda v: self._source_name_id[v]
)
self._zone_id = zone_id
self._name = zone_name
self._state = None
self._source = None
def update(self):
"""Retrieve latest state."""
state = self._blackbird.zone_status(self._zone_id)
if not state:
return
self._state = STATE_ON if state.power else STATE_OFF
idx = state.av
if idx in self._source_id_name:
self._source = self._source_id_name[idx]
else:
self._source = None
@property
def name(self):
"""Return the name of the zone."""
return self._name
@property
def state(self):
"""Return the state of the zone."""
return self._state
@property
def supported_features(self):
"""Return flag of media commands that are supported."""
return SUPPORT_BLACKBIRD
@property
def media_title(self):
"""Return the current source as media title."""
return self._source
@property
def source(self):
"""Return the current input source of the device."""
return self._source
@property
def source_list(self):
"""List of available input sources."""
return self._source_names
def set_all_zones(self, source):
"""Set all zones to one source."""
if source not in self._source_name_id:
return
idx = self._source_name_id[source]
_LOGGER.debug("Setting all zones source to %s", idx)
self._blackbird.set_all_zone_source(idx)
def select_source(self, source):
"""Set input source."""
if source not in self._source_name_id:
return
idx = self._source_name_id[source]
_LOGGER.debug("Setting zone %d source to %s", self._zone_id, idx)
self._blackbird.set_zone_source(self._zone_id, idx)
def turn_on(self):
"""Turn the media player on."""
_LOGGER.debug("Turning zone %d on", self._zone_id)
self._blackbird.set_zone_power(self._zone_id, True)
def turn_off(self):
"""Turn the media player off."""
_LOGGER.debug("Turning zone %d off", self._zone_id)
self._blackbird.set_zone_power(self._zone_id, False)
|
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
PLATFORM_SCHEMA,
SUPPORT_BRIGHTNESS,
LightEntity,
)
from homeassistant.const import CONF_NAME
import homeassistant.helpers.config_validation as cv
from . import (
CONF_ADS_VAR,
CONF_ADS_VAR_BRIGHTNESS,
DATA_ADS,
STATE_KEY_BRIGHTNESS,
STATE_KEY_STATE,
AdsEntity,
)
DEFAULT_NAME = "ADS Light"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_ADS_VAR): cv.string,
vol.Optional(CONF_ADS_VAR_BRIGHTNESS): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the light platform for ADS."""
ads_hub = hass.data.get(DATA_ADS)
ads_var_enable = config[CONF_ADS_VAR]
ads_var_brightness = config.get(CONF_ADS_VAR_BRIGHTNESS)
name = config[CONF_NAME]
add_entities([AdsLight(ads_hub, ads_var_enable, ads_var_brightness, name)])
class AdsLight(AdsEntity, LightEntity):
"""Representation of ADS light."""
def __init__(self, ads_hub, ads_var_enable, ads_var_brightness, name):
"""Initialize AdsLight entity."""
super().__init__(ads_hub, name, ads_var_enable)
self._state_dict[STATE_KEY_BRIGHTNESS] = None
self._ads_var_brightness = ads_var_brightness
async def async_added_to_hass(self):
"""Register device notification."""
await self.async_initialize_device(self._ads_var, self._ads_hub.PLCTYPE_BOOL)
if self._ads_var_brightness is not None:
await self.async_initialize_device(
self._ads_var_brightness,
self._ads_hub.PLCTYPE_UINT,
STATE_KEY_BRIGHTNESS,
)
@property
def brightness(self):
"""Return the brightness of the light (0..255)."""
return self._state_dict[STATE_KEY_BRIGHTNESS]
@property
def supported_features(self):
"""Flag supported features."""
support = 0
if self._ads_var_brightness is not None:
support = SUPPORT_BRIGHTNESS
return support
@property
def is_on(self):
"""Return True if the entity is on."""
return self._state_dict[STATE_KEY_STATE]
def turn_on(self, **kwargs):
"""Turn the light on or set a specific dimmer value."""
brightness = kwargs.get(ATTR_BRIGHTNESS)
self._ads_hub.write_by_name(self._ads_var, True, self._ads_hub.PLCTYPE_BOOL)
if self._ads_var_brightness is not None and brightness is not None:
self._ads_hub.write_by_name(
self._ads_var_brightness, brightness, self._ads_hub.PLCTYPE_UINT
)
def turn_off(self, **kwargs):
"""Turn the light off."""
self._ads_hub.write_by_name(self._ads_var, False, self._ads_hub.PLCTYPE_BOOL)
|
import errno
import os
import re
import subprocess
import sys
def get_keywords():
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
git_date = "$Format:%ci$"
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
return keywords
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "pep440"
cfg.tag_prefix = "v"
cfg.parentdir_prefix = "PyLaTeX-"
cfg.versionfile_source = "pylatex/_version.py"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None, None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None, None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
print("stdout was %s" % stdout)
return None, p.returncode
return stdout, p.returncode
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %s but none started with prefix %s" %
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None,
"date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags", "date": None}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %s not under git control" % root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%s*" % tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
cwd=root)[0].strip()
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"],
"date": None}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%s'" % style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None,
"date": pieces.get("date")}
def get_versions():
"""Get version information or return default if unable to do so."""
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to find root of source tree",
"date": None}
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
return render(pieces, cfg.style)
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
pass
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to compute version", "date": None}
|
import random
import time
from datetime import datetime, timedelta
from multiprocessing import Process, Semaphore
from pandas.core.frame import DataFrame
from arctic.arctic import Arctic
from arctic.exceptions import OptimisticLockException
class Appender(object):
def __init__(self, mongo_server, library_name, sem, counter_init, runtime=30):
super(Appender, self).__init__()
self.lib = Arctic(mongo_server)[library_name]
self.sem = sem
self.begin = counter_init
self.last = counter_init
self.timeout = datetime.now() + timedelta(seconds=runtime)
def run(self):
self.sem.acquire()
while datetime.now() < self.timeout:
try:
# Randomy length dataframe to keep appending to
df = DataFrame({'v': [self.last]}, [datetime.now()])
for i in range(random.randint(1, 10)):
df = df.append(DataFrame({'v': [self.last + i]}, [datetime.now()]))
self.last + i
df.index.name = 'index'
self.lib.append('symbol', df)
assert self.last in self.lib.read('symbol').data['v'].tolist()
self.last += 2
except OptimisticLockException:
# Concurrent write, not successful
pass
# time.sleep(self.begin)
def check_written_data_exists(self):
values = self.lib.read('symbol').data['v'].tolist()
assert len(set(values)) == len(values), "Written: %s" % values
i = self.begin
while i < self.last:
assert i in values, "Missing %s in %s" % (i, values)
i += 2
def test_append_kill(library, mongo_host, library_name):
# Empty DF to start
df = DataFrame({'v': []}, [])
df.index.name = 'index'
library.write('symbol', df)
sem = Semaphore(0)
def run_append(end):
app_1 = Appender(mongo_host, library_name, sem, 0, end)
proc = Process(target=app_1.run)
proc.start()
sem.release()
return proc
def check_written():
sym = library.read('symbol')
print("Checking written %d" % len(sym.data))
# time how long it takes to do an append operation
start = datetime.now()
proc = run_append(1)
proc.join()
check_written()
time_taken = (datetime.now() - start).total_seconds()
for i in range(100):
print("Loop %d" % i)
proc = run_append(100)
# kill it randomly
time.sleep(2 * (random.random() * time_taken))
# Forcibly kill it
proc.terminate()
# Check we can read the data
check_written()
|
import pytest
from decimal import Decimal, getcontext
import math
try:
import cPickle as pickle
except ImportError:
import pickle
import json
from rest_framework import serializers
from shop.money.money_maker import AbstractMoney, MoneyMaker, _make_money
from shop.rest.money import MoneyField, JSONRenderer
EUR = MoneyMaker('EUR')
def test_is_abstract():
with pytest.raises(TypeError):
AbstractMoney(1)
def test_create_money_type_without_arguments():
Money = MoneyMaker()
amount = Money()
assert amount.is_nan() is True
def test_create_money_type_with_unknown_currency():
with pytest.raises(TypeError):
MoneyMaker(currency_code="ABC")
def test_create_money_type_without_decimal_places():
Money = MoneyMaker(currency_code='JPY')
assert Money._cents == 0
def test_create_instance_with_value_as_none():
Money = MoneyMaker()
money = Money(value=None)
assert money.is_nan() is True
def test_create_instance_with_invalid_value():
Money = MoneyMaker()
with pytest.raises(ValueError):
Money(value="invalid")
def test_wrong_currency_raises_assertion_error():
# If we try to call a money class with a value that has a
# different currency than the class, and the value is an
# instance of the money class, there should be an
# AssertionError.
Money = MoneyMaker(currency_code='EUR')
value = Money()
value._currency_code = 'USD'
with pytest.raises(AssertionError):
Money(value)
def test_create_instance_from_decimal():
value = Decimal('1.2')
assert issubclass(EUR, Decimal)
assert isinstance(EUR(value), Decimal)
def test_str_with_too_much_precision():
value = EUR(1)
prec = getcontext().prec
value._cents = Decimal("0." + ("0" * prec))
with pytest.raises(ValueError):
str(value)
def test_thousand_separator(settings):
value = EUR()
assert str(value) == "€ –"
value = EUR('999999.99')
settings.USE_THOUSAND_SEPARATOR = False
assert str(value) == "€ 999999.99"
settings.USE_THOUSAND_SEPARATOR = True
assert str(value) == "€ 999,999.99"
settings.LANGUAGE_CODE = 'de'
assert str(value) == "€ 999.999,99"
settings.USE_THOUSAND_SEPARATOR = False
assert str(value) == "€ 999999,99"
def test_check_rounding():
value = EUR('999999.995')
assert str(value) == "€ 1000000.00"
value = EUR('-111111.114')
assert str(value) == "-€ 111111.11"
def test_check_formatting_currency():
value = -EUR('111111.11')
value.MONEY_FORMAT='{minus}{amount} {code}'
assert str(value) == "-111111.11 EUR"
def test_reduce():
value = EUR()
func, args = value.__reduce__()
assert func is _make_money
assert args == ("EUR", "NaN")
Money = func(*args)
assert Money.is_nan() is True
def test_format():
JPY = MoneyMaker('JPY')
assert format(EUR()) == "€ –"
assert format(JPY()) == "¥ –"
assert format(EUR(1.1)) == "€ 1.10"
assert format(JPY(1)) == "¥ 1"
def test_float_format():
d = Decimal(1.99)
e = EUR(d)
assert '{}'.format(e) == "€ 1.99"
assert '{:}'.format(e) == "€ 1.99"
assert '{:f}'.format(e) == "€ 1.99"
assert '{:.5}'.format(e) == "€ 1.9900"
assert '{:.5f}'.format(e) == "€ 1.99000"
assert '{:.20}'.format(e) == "€ {:.20}".format(d)
assert '{:.20f}'.format(e) == "€ {:.20f}".format(d)
def test_add():
Money = MoneyMaker()
assert Money('1.23') + Money(2) == Money('3.23')
assert Money('1.23') + Money(0) == Money('1.23')
assert Money(1) + (Money(-1)) == Money(0)
assert Money(1).__radd__(Money(2)) == Money(3)
def test_add_zero():
Money = MoneyMaker()
assert Money('1.23') + 0 == Money('1.23')
assert Money('1.23') + 0.0 == Money('1.23')
assert Money('1.23') + Money('NaN') == Money('1.23')
assert 0 + Money('1.23') == Money('1.23')
assert 0.0 + Money('1.23') == Money('1.23')
assert Money('NaN') + Money('1.23') == Money('1.23')
with pytest.raises(ValueError):
Money(1) + 1
with pytest.raises(ValueError):
Money(1) + 1.0
with pytest.raises(ValueError):
1 + Money(1)
with pytest.raises(ValueError):
1.0 + Money(1)
def test_sub():
Money = MoneyMaker()
assert Money(1) - Money(2) == Money(-1)
with pytest.raises(ValueError):
Money(1).__rsub__(Money(2))
def test_neg():
Money = MoneyMaker()
assert -Money(1) == Money(-1)
assert -Money(-1) == Money(1)
assert -Money(0) == Money(0)
def test_mul():
Money = MoneyMaker()
assert Money(1) * 1 == Money(1)
assert Money(1) * 0 == Money(0)
assert Money(1) * -1 == Money(-1)
assert Money(1) * 1 == Money(1)
assert Money(1) * 0 == Money(0)
assert Money(1).__rmul__(-1) == Money(-1)
assert Money(1).__rmul__(1.0) == Money(1)
assert format(Money(1) * None) == "€ –"
with pytest.raises(ValueError):
Money(1) * (Money(1))
def test_div():
Money = MoneyMaker()
assert Money(1) / 2, Money(0.5)
assert Money(1) / 2.0, Money(0.5)
with pytest.raises(ValueError):
Money(1) / Money(2)
with pytest.raises(ValueError):
Money(1).__rdiv__(2)
def test_truediv():
Money = MoneyMaker()
assert Money(1).__truediv__(2) == Money(0.5)
assert Money(1).__truediv__(2.0) == Money(0.5)
with pytest.raises(ValueError):
Money(1).__truediv__(Money(2))
with pytest.raises(ValueError):
Money(1).__rtruediv__(2)
def test_pow():
Money = MoneyMaker()
with pytest.raises(ValueError):
Money(1) ** Money(2)
def test_float():
Money = MoneyMaker()
money = Money(Decimal('sNaN'))
with pytest.raises(ValueError):
float(money)
money = Money(Decimal('NaN'))
assert math.isnan(float(money)) is True
money = Money(Decimal('-NaN'))
assert math.isnan(float(money)) is True
money = Money(Decimal('1.0'))
assert float(money) == 1.0
def test_currency():
assert EUR.currency == 'EUR'
assert EUR.subunits == 100
JPY = MoneyMaker('JPY')
assert JPY.currency == 'JPY'
assert JPY.subunits == 1
def test_as_decimal():
amount = EUR('1.23')
quantized_decimal = Decimal('1.23').quantize(Decimal('.01'))
assert amount.as_decimal() == quantized_decimal
def test_as_integer():
assert EUR('1.23').as_integer() == 123
def test_as_bool():
amount = EUR('1.23')
assert bool(amount) is True
amount = EUR(0)
assert bool(amount) is False
amount = EUR()
assert bool(amount) is False
def test_pickle():
amount = EUR('1.23')
pickled = pickle.dumps(amount)
assert pickle.loads(pickled) == amount
class MoneyTestSerializer(serializers.Serializer):
amount = MoneyField(read_only=True)
def test_money_serializer():
instance = type(str('TestMoney'), (object,), {'amount': EUR('1.23')})
serializer = MoneyTestSerializer(instance)
assert {'amount': '€ 1.23'} == serializer.data
def test_json_renderer():
renderer = JSONRenderer()
data = {'amount': EUR('1.23')}
rendered_json = renderer.render(data, 'application/json')
assert {'amount': "€ 1.23"} == json.loads(rendered_json.decode('utf-8'))
|
import json
from homeassistant import config_entries, setup
from homeassistant.components.metoffice.const import DOMAIN
from .const import (
METOFFICE_CONFIG_WAVERTREE,
TEST_API_KEY,
TEST_LATITUDE_WAVERTREE,
TEST_LONGITUDE_WAVERTREE,
TEST_SITE_NAME_WAVERTREE,
)
from tests.async_mock import patch
from tests.common import MockConfigEntry, load_fixture
async def test_form(hass, requests_mock):
"""Test we get the form."""
hass.config.latitude = TEST_LATITUDE_WAVERTREE
hass.config.longitude = TEST_LONGITUDE_WAVERTREE
# all metoffice test data encapsulated in here
mock_json = json.loads(load_fixture("metoffice.json"))
all_sites = json.dumps(mock_json["all_sites"])
requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites)
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch(
"homeassistant.components.metoffice.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.metoffice.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"], {"api_key": TEST_API_KEY}
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == TEST_SITE_NAME_WAVERTREE
assert result2["data"] == {
"api_key": TEST_API_KEY,
"latitude": TEST_LATITUDE_WAVERTREE,
"longitude": TEST_LONGITUDE_WAVERTREE,
"name": TEST_SITE_NAME_WAVERTREE,
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_already_configured(hass, requests_mock):
"""Test we handle duplicate entries."""
hass.config.latitude = TEST_LATITUDE_WAVERTREE
hass.config.longitude = TEST_LONGITUDE_WAVERTREE
# all metoffice test data encapsulated in here
mock_json = json.loads(load_fixture("metoffice.json"))
all_sites = json.dumps(mock_json["all_sites"])
requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites)
requests_mock.get(
"/public/data/val/wxfcs/all/json/354107?res=3hourly",
text="",
)
MockConfigEntry(
domain=DOMAIN,
unique_id=f"{TEST_LATITUDE_WAVERTREE}_{TEST_LONGITUDE_WAVERTREE}",
data=METOFFICE_CONFIG_WAVERTREE,
).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_USER},
data=METOFFICE_CONFIG_WAVERTREE,
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
async def test_form_cannot_connect(hass, requests_mock):
"""Test we handle cannot connect error."""
hass.config.latitude = TEST_LATITUDE_WAVERTREE
hass.config.longitude = TEST_LONGITUDE_WAVERTREE
requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text="")
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"api_key": TEST_API_KEY},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "cannot_connect"}
async def test_form_unknown_error(hass, mock_simple_manager_fail):
"""Test we handle unknown error."""
mock_instance = mock_simple_manager_fail.return_value
mock_instance.get_nearest_forecast_site.side_effect = ValueError
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"api_key": TEST_API_KEY},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "unknown"}
|
from numpy.random import randn
import numpy as np
import matplotlib.pyplot as plt
from filterpy.kalman import EnsembleKalmanFilter as EnKF
from filterpy.common import Q_discrete_white_noise, Saver
from math import cos, sin
DO_PLOT = False
def test_1d_const_vel():
def hx(x):
return np.array([x[0]])
F = np.array([[1., 1.],[0., 1.]])
def fx(x, dt):
return np.dot(F, x)
x = np.array([0., 1.])
P = np.eye(2)* 100.
f = EnKF(x=x, P=P, dim_z=1, dt=1., N=8, hx=hx, fx=fx)
std_noise = 10.
f.R *= std_noise**2
f.Q = Q_discrete_white_noise(2, 1., .001)
measurements = []
results = []
ps = []
zs = []
s = Saver(f)
for t in range (0,100):
# create measurement = t plus white noise
z = t + randn()*std_noise
zs.append(z)
f.predict()
f.update(np.asarray([z]))
# save data
results.append (f.x[0])
measurements.append(z)
ps.append(3*(f.P[0,0]**.5))
s.save()
s.to_array()
results = np.asarray(results)
ps = np.asarray(ps)
if DO_PLOT:
plt.plot(results, label='EnKF')
plt.plot(measurements, c='r', label='z')
plt.plot (results-ps, c='k',linestyle='--', label='3$\sigma$')
plt.plot(results+ps, c='k', linestyle='--')
plt.legend(loc='best')
#print(ps)
return f
def test_circle():
def hx(x):
return np.array([x[0], x[3]])
F = np.array([[1., 1., .5, 0., 0., 0.],
[0., 1., 1., 0., 0., 0.],
[0., 0., 1., 0., 0., 0.],
[0., 0., 0., 1., 1., .5],
[0., 0., 0., 0., 1., 1.],
[0., 0., 0., 0., 0., 1.]])
def fx(x, dt):
return np.dot(F, x)
x = np.array([50., 0., 0, 0, .0, 0.])
P = np.eye(6)* 100.
f = EnKF(x=x, P=P, dim_z=2, dt=1., N=30, hx=hx, fx=fx)
std_noise = .1
f.R *= std_noise**2
f.Q[0:3, 0:3] = Q_discrete_white_noise(3, 1., .001)
f.Q[3:6, 3:6] = Q_discrete_white_noise(3, 1., .001)
measurements = []
results = []
zs = []
for t in range (0,300):
a = t / 300000
x = cos(a) * 50.
y = sin(a) * 50.
# create measurement = t plus white noise
z = np.array([x,y])
zs.append(z)
f.predict()
f.update(z)
# save data
results.append (f.x)
measurements.append(z)
#test that __repr__ doesn't assert
str(f)
results = np.asarray(results)
measurements = np.asarray(measurements)
if DO_PLOT:
plt.plot(results[:,0], results[:,2], label='EnKF')
plt.plot(measurements[:,0], measurements[:,1], c='r', label='z')
#plt.plot (results-ps, c='k',linestyle='--', label='3$\sigma$')
#plt.plot(results+ps, c='k', linestyle='--')
plt.legend(loc='best')
plt.axis('equal')
#print(ps)
if __name__ == '__main__':
DO_PLOT = True
test_circle ()
test_1d_const_vel()
#test_noisy_1d()
|
try:
import psycopg2
import psycopg2.extras
except ImportError:
psycopg2 = None
import diamond.collector
class PgQCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(PgQCollector, self).get_default_config_help()
config_help.update({
"instances": "The databases to be monitored. Each should have a "
"`dsn` attribute, which must be a valid libpq "
"connection string."
})
return config_help
def get_default_config(self):
config = super(PgQCollector, self).get_default_config()
config.update({
'instances': {},
})
return config
def collect(self):
if psycopg2 is None:
self.log.error('Unable to import module psycopg2')
return None
for instance, configuration in self.config['instances'].iteritems():
connection = psycopg2.connect(configuration['dsn'])
connection.set_isolation_level(
psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT,
)
self._collect_for_instance(instance, connection)
def _collect_for_instance(self, instance, connection):
"""Collects metrics for a named connection."""
with connection.cursor() as cursor:
for queue, metrics in self.get_queue_info(instance, cursor):
for name, metric in metrics.items():
self.publish('.'.join((instance, queue, name)), metric)
with connection.cursor() as cursor:
consumers = self.get_consumer_info(instance, cursor)
for queue, consumer, metrics in consumers:
for name, metric in metrics.items():
key_parts = (instance, queue, 'consumers', consumer, name)
self.publish('.'.join(key_parts), metric)
QUEUE_INFO_STATEMENT = """
SELECT
queue_name,
EXTRACT(epoch from ticker_lag),
ev_per_sec
FROM pgq.get_queue_info()
"""
def get_queue_info(self, instance, cursor):
"""Collects metrics for all queues on the connected database."""
cursor.execute(self.QUEUE_INFO_STATEMENT)
for queue_name, ticker_lag, ev_per_sec in cursor:
yield queue_name, {
'ticker_lag': ticker_lag,
'ev_per_sec': ev_per_sec,
}
CONSUMER_INFO_STATEMENT = """
SELECT
queue_name,
consumer_name,
EXTRACT(epoch from lag),
pending_events,
EXTRACT(epoch from last_seen)
FROM pgq.get_consumer_info()
"""
def get_consumer_info(self, instance, cursor):
"""Collects metrics for all consumers on the connected database."""
cursor.execute(self.CONSUMER_INFO_STATEMENT)
for queue_name, consumer_name, lag, pending_events, last_seen in cursor:
yield queue_name, consumer_name, {
'lag': lag,
'pending_events': pending_events,
'last_seen': last_seen,
}
|
from datetime import timedelta
from functools import partial
import ipaddress
import logging
from aiokef import AsyncKefSpeaker
from aiokef.aiokef import DSP_OPTION_MAPPING
from getmac import get_mac_address
import voluptuous as vol
from homeassistant.components.media_player import (
PLATFORM_SCHEMA,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SELECT_SOURCE,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
SUPPORT_VOLUME_STEP,
MediaPlayerEntity,
)
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PORT,
CONF_TYPE,
STATE_OFF,
STATE_ON,
)
from homeassistant.helpers import config_validation as cv, entity_platform
from homeassistant.helpers.event import async_track_time_interval
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "KEF"
DEFAULT_PORT = 50001
DEFAULT_MAX_VOLUME = 0.5
DEFAULT_VOLUME_STEP = 0.05
DEFAULT_INVERSE_SPEAKER_MODE = False
DEFAULT_SUPPORTS_ON = True
DOMAIN = "kef"
SCAN_INTERVAL = timedelta(seconds=30)
SOURCES = {"LSX": ["Wifi", "Bluetooth", "Aux", "Opt"]}
SOURCES["LS50"] = SOURCES["LSX"] + ["Usb"]
CONF_MAX_VOLUME = "maximum_volume"
CONF_VOLUME_STEP = "volume_step"
CONF_INVERSE_SPEAKER_MODE = "inverse_speaker_mode"
CONF_SUPPORTS_ON = "supports_on"
CONF_STANDBY_TIME = "standby_time"
SERVICE_MODE = "set_mode"
SERVICE_DESK_DB = "set_desk_db"
SERVICE_WALL_DB = "set_wall_db"
SERVICE_TREBLE_DB = "set_treble_db"
SERVICE_HIGH_HZ = "set_high_hz"
SERVICE_LOW_HZ = "set_low_hz"
SERVICE_SUB_DB = "set_sub_db"
SERVICE_UPDATE_DSP = "update_dsp"
DSP_SCAN_INTERVAL = timedelta(seconds=3600)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_TYPE): vol.In(["LS50", "LSX"]),
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_MAX_VOLUME, default=DEFAULT_MAX_VOLUME): cv.small_float,
vol.Optional(CONF_VOLUME_STEP, default=DEFAULT_VOLUME_STEP): cv.small_float,
vol.Optional(
CONF_INVERSE_SPEAKER_MODE, default=DEFAULT_INVERSE_SPEAKER_MODE
): cv.boolean,
vol.Optional(CONF_SUPPORTS_ON, default=DEFAULT_SUPPORTS_ON): cv.boolean,
vol.Optional(CONF_STANDBY_TIME): vol.In([20, 60]),
}
)
def get_ip_mode(host):
"""Get the 'mode' used to retrieve the MAC address."""
try:
if ipaddress.ip_address(host).version == 6:
return "ip6"
return "ip"
except ValueError:
return "hostname"
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the KEF platform."""
if DOMAIN not in hass.data:
hass.data[DOMAIN] = {}
host = config[CONF_HOST]
speaker_type = config[CONF_TYPE]
port = config[CONF_PORT]
name = config[CONF_NAME]
maximum_volume = config[CONF_MAX_VOLUME]
volume_step = config[CONF_VOLUME_STEP]
inverse_speaker_mode = config[CONF_INVERSE_SPEAKER_MODE]
supports_on = config[CONF_SUPPORTS_ON]
standby_time = config.get(CONF_STANDBY_TIME)
sources = SOURCES[speaker_type]
_LOGGER.debug(
"Setting up %s with host: %s, port: %s, name: %s, sources: %s",
DOMAIN,
host,
port,
name,
sources,
)
mode = get_ip_mode(host)
mac = await hass.async_add_executor_job(partial(get_mac_address, **{mode: host}))
unique_id = f"kef-{mac}" if mac is not None else None
media_player = KefMediaPlayer(
name,
host,
port,
maximum_volume,
volume_step,
standby_time,
inverse_speaker_mode,
supports_on,
sources,
speaker_type,
loop=hass.loop,
unique_id=unique_id,
)
if host in hass.data[DOMAIN]:
_LOGGER.debug("%s is already configured", host)
else:
hass.data[DOMAIN][host] = media_player
async_add_entities([media_player], update_before_add=True)
platform = entity_platform.current_platform.get()
platform.async_register_entity_service(
SERVICE_MODE,
{
vol.Optional("desk_mode"): cv.boolean,
vol.Optional("wall_mode"): cv.boolean,
vol.Optional("phase_correction"): cv.boolean,
vol.Optional("high_pass"): cv.boolean,
vol.Optional("sub_polarity"): vol.In(["-", "+"]),
vol.Optional("bass_extension"): vol.In(["Less", "Standard", "Extra"]),
},
"set_mode",
)
platform.async_register_entity_service(SERVICE_UPDATE_DSP, {}, "update_dsp")
def add_service(name, which, option):
options = DSP_OPTION_MAPPING[which]
dtype = type(options[0]) # int or float
platform.async_register_entity_service(
name,
{
vol.Required(option): vol.All(
vol.Coerce(float), vol.Coerce(dtype), vol.In(options)
)
},
f"set_{which}",
)
add_service(SERVICE_DESK_DB, "desk_db", "db_value")
add_service(SERVICE_WALL_DB, "wall_db", "db_value")
add_service(SERVICE_TREBLE_DB, "treble_db", "db_value")
add_service(SERVICE_HIGH_HZ, "high_hz", "hz_value")
add_service(SERVICE_LOW_HZ, "low_hz", "hz_value")
add_service(SERVICE_SUB_DB, "sub_db", "db_value")
class KefMediaPlayer(MediaPlayerEntity):
"""Kef Player Object."""
def __init__(
self,
name,
host,
port,
maximum_volume,
volume_step,
standby_time,
inverse_speaker_mode,
supports_on,
sources,
speaker_type,
loop,
unique_id,
):
"""Initialize the media player."""
self._name = name
self._sources = sources
self._speaker = AsyncKefSpeaker(
host,
port,
volume_step,
maximum_volume,
standby_time,
inverse_speaker_mode,
loop=loop,
)
self._unique_id = unique_id
self._supports_on = supports_on
self._speaker_type = speaker_type
self._state = None
self._muted = None
self._source = None
self._volume = None
self._is_online = None
self._dsp = None
self._update_dsp_task_remover = None
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._state
async def async_update(self):
"""Update latest state."""
_LOGGER.debug("Running async_update")
try:
self._is_online = await self._speaker.is_online()
if self._is_online:
(
self._volume,
self._muted,
) = await self._speaker.get_volume_and_is_muted()
state = await self._speaker.get_state()
self._source = state.source
self._state = STATE_ON if state.is_on else STATE_OFF
if self._dsp is None:
# Only do this when necessary because it is a slow operation
await self.update_dsp()
else:
self._muted = None
self._source = None
self._volume = None
self._state = STATE_OFF
except (ConnectionRefusedError, ConnectionError, TimeoutError) as err:
_LOGGER.debug("Error in `update`: %s", err)
self._state = None
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._volume
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._muted
@property
def supported_features(self):
"""Flag media player features that are supported."""
support_kef = (
SUPPORT_VOLUME_SET
| SUPPORT_VOLUME_STEP
| SUPPORT_VOLUME_MUTE
| SUPPORT_SELECT_SOURCE
| SUPPORT_TURN_OFF
| SUPPORT_NEXT_TRACK # only in Bluetooth and Wifi
| SUPPORT_PAUSE # only in Bluetooth and Wifi
| SUPPORT_PLAY # only in Bluetooth and Wifi
| SUPPORT_PREVIOUS_TRACK # only in Bluetooth and Wifi
)
if self._supports_on:
support_kef |= SUPPORT_TURN_ON
return support_kef
@property
def source(self):
"""Name of the current input source."""
return self._source
@property
def source_list(self):
"""List of available input sources."""
return self._sources
@property
def available(self):
"""Return if the speaker is reachable online."""
return self._is_online
@property
def unique_id(self):
"""Return the device unique id."""
return self._unique_id
@property
def icon(self):
"""Return the device's icon."""
return "mdi:speaker-wireless"
async def async_turn_off(self):
"""Turn the media player off."""
await self._speaker.turn_off()
async def async_turn_on(self):
"""Turn the media player on."""
if not self._supports_on:
raise NotImplementedError()
await self._speaker.turn_on()
async def async_volume_up(self):
"""Volume up the media player."""
await self._speaker.increase_volume()
async def async_volume_down(self):
"""Volume down the media player."""
await self._speaker.decrease_volume()
async def async_set_volume_level(self, volume):
"""Set volume level, range 0..1."""
await self._speaker.set_volume(volume)
async def async_mute_volume(self, mute):
"""Mute (True) or unmute (False) media player."""
if mute:
await self._speaker.mute()
else:
await self._speaker.unmute()
async def async_select_source(self, source: str):
"""Select input source."""
if source in self.source_list:
await self._speaker.set_source(source)
else:
raise ValueError(f"Unknown input source: {source}.")
async def async_media_play(self):
"""Send play command."""
await self._speaker.set_play_pause()
async def async_media_pause(self):
"""Send pause command."""
await self._speaker.set_play_pause()
async def async_media_previous_track(self):
"""Send previous track command."""
await self._speaker.prev_track()
async def async_media_next_track(self):
"""Send next track command."""
await self._speaker.next_track()
async def update_dsp(self, _=None) -> None:
"""Update the DSP settings."""
if self._speaker_type == "LS50" and self._state == STATE_OFF:
# The LSX is able to respond when off the LS50 has to be on.
return
mode = await self._speaker.get_mode()
self._dsp = dict(
desk_db=await self._speaker.get_desk_db(),
wall_db=await self._speaker.get_wall_db(),
treble_db=await self._speaker.get_treble_db(),
high_hz=await self._speaker.get_high_hz(),
low_hz=await self._speaker.get_low_hz(),
sub_db=await self._speaker.get_sub_db(),
**mode._asdict(),
)
async def async_added_to_hass(self):
"""Subscribe to DSP updates."""
self._update_dsp_task_remover = async_track_time_interval(
self.hass, self.update_dsp, DSP_SCAN_INTERVAL
)
async def async_will_remove_from_hass(self):
"""Unsubscribe to DSP updates."""
self._update_dsp_task_remover()
self._update_dsp_task_remover = None
@property
def device_state_attributes(self):
"""Return the DSP settings of the KEF device."""
return self._dsp or {}
async def set_mode(
self,
desk_mode=None,
wall_mode=None,
phase_correction=None,
high_pass=None,
sub_polarity=None,
bass_extension=None,
):
"""Set the speaker mode."""
await self._speaker.set_mode(
desk_mode=desk_mode,
wall_mode=wall_mode,
phase_correction=phase_correction,
high_pass=high_pass,
sub_polarity=sub_polarity,
bass_extension=bass_extension,
)
self._dsp = None
async def set_desk_db(self, db_value):
"""Set desk_db of the KEF speakers."""
await self._speaker.set_desk_db(db_value)
self._dsp = None
async def set_wall_db(self, db_value):
"""Set wall_db of the KEF speakers."""
await self._speaker.set_wall_db(db_value)
self._dsp = None
async def set_treble_db(self, db_value):
"""Set treble_db of the KEF speakers."""
await self._speaker.set_treble_db(db_value)
self._dsp = None
async def set_high_hz(self, hz_value):
"""Set high_hz of the KEF speakers."""
await self._speaker.set_high_hz(hz_value)
self._dsp = None
async def set_low_hz(self, hz_value):
"""Set low_hz of the KEF speakers."""
await self._speaker.set_low_hz(hz_value)
self._dsp = None
async def set_sub_db(self, db_value):
"""Set sub_db of the KEF speakers."""
await self._speaker.set_sub_db(db_value)
self._dsp = None
|
from functools import partial
import numpy as np
from ..core import indexing
from ..core.pycompat import is_duck_dask_array
from ..core.variable import Variable
from .variables import (
VariableCoder,
lazy_elemwise_func,
pop_to,
safe_setitem,
unpack_for_decoding,
unpack_for_encoding,
)
def create_vlen_dtype(element_type):
# based on h5py.special_dtype
return np.dtype("O", metadata={"element_type": element_type})
def check_vlen_dtype(dtype):
if dtype.kind != "O" or dtype.metadata is None:
return None
else:
return dtype.metadata.get("element_type")
def is_unicode_dtype(dtype):
return dtype.kind == "U" or check_vlen_dtype(dtype) == str
def is_bytes_dtype(dtype):
return dtype.kind == "S" or check_vlen_dtype(dtype) == bytes
class EncodedStringCoder(VariableCoder):
"""Transforms between unicode strings and fixed-width UTF-8 bytes."""
def __init__(self, allows_unicode=True):
self.allows_unicode = allows_unicode
def encode(self, variable, name=None):
dims, data, attrs, encoding = unpack_for_encoding(variable)
contains_unicode = is_unicode_dtype(data.dtype)
encode_as_char = encoding.get("dtype") == "S1"
if encode_as_char:
del encoding["dtype"] # no longer relevant
if contains_unicode and (encode_as_char or not self.allows_unicode):
if "_FillValue" in attrs:
raise NotImplementedError(
"variable {!r} has a _FillValue specified, but "
"_FillValue is not yet supported on unicode strings: "
"https://github.com/pydata/xarray/issues/1647".format(name)
)
string_encoding = encoding.pop("_Encoding", "utf-8")
safe_setitem(attrs, "_Encoding", string_encoding, name=name)
# TODO: figure out how to handle this in a lazy way with dask
data = encode_string_array(data, string_encoding)
return Variable(dims, data, attrs, encoding)
def decode(self, variable, name=None):
dims, data, attrs, encoding = unpack_for_decoding(variable)
if "_Encoding" in attrs:
string_encoding = pop_to(attrs, encoding, "_Encoding")
func = partial(decode_bytes_array, encoding=string_encoding)
data = lazy_elemwise_func(data, func, np.dtype(object))
return Variable(dims, data, attrs, encoding)
def decode_bytes_array(bytes_array, encoding="utf-8"):
# This is faster than using np.char.decode() or np.vectorize()
bytes_array = np.asarray(bytes_array)
decoded = [x.decode(encoding) for x in bytes_array.ravel()]
return np.array(decoded, dtype=object).reshape(bytes_array.shape)
def encode_string_array(string_array, encoding="utf-8"):
string_array = np.asarray(string_array)
encoded = [x.encode(encoding) for x in string_array.ravel()]
return np.array(encoded, dtype=bytes).reshape(string_array.shape)
def ensure_fixed_length_bytes(var):
"""Ensure that a variable with vlen bytes is converted to fixed width."""
dims, data, attrs, encoding = unpack_for_encoding(var)
if check_vlen_dtype(data.dtype) == bytes:
# TODO: figure out how to handle this with dask
data = np.asarray(data, dtype=np.string_)
return Variable(dims, data, attrs, encoding)
class CharacterArrayCoder(VariableCoder):
"""Transforms between arrays containing bytes and character arrays."""
def encode(self, variable, name=None):
variable = ensure_fixed_length_bytes(variable)
dims, data, attrs, encoding = unpack_for_encoding(variable)
if data.dtype.kind == "S" and encoding.get("dtype") is not str:
data = bytes_to_char(data)
if "char_dim_name" in encoding.keys():
char_dim_name = encoding.pop("char_dim_name")
else:
char_dim_name = "string%s" % data.shape[-1]
dims = dims + (char_dim_name,)
return Variable(dims, data, attrs, encoding)
def decode(self, variable, name=None):
dims, data, attrs, encoding = unpack_for_decoding(variable)
if data.dtype == "S1" and dims:
encoding["char_dim_name"] = dims[-1]
dims = dims[:-1]
data = char_to_bytes(data)
return Variable(dims, data, attrs, encoding)
def bytes_to_char(arr):
"""Convert numpy/dask arrays from fixed width bytes to characters."""
if arr.dtype.kind != "S":
raise ValueError("argument must have a fixed-width bytes dtype")
if is_duck_dask_array(arr):
import dask.array as da
return da.map_blocks(
_numpy_bytes_to_char,
arr,
dtype="S1",
chunks=arr.chunks + ((arr.dtype.itemsize,)),
new_axis=[arr.ndim],
)
else:
return _numpy_bytes_to_char(arr)
def _numpy_bytes_to_char(arr):
"""Like netCDF4.stringtochar, but faster and more flexible."""
# ensure the array is contiguous
arr = np.array(arr, copy=False, order="C", dtype=np.string_)
return arr.reshape(arr.shape + (1,)).view("S1")
def char_to_bytes(arr):
"""Convert numpy/dask arrays from characters to fixed width bytes."""
if arr.dtype != "S1":
raise ValueError("argument must have dtype='S1'")
if not arr.ndim:
# no dimension to concatenate along
return arr
size = arr.shape[-1]
if not size:
# can't make an S0 dtype
return np.zeros(arr.shape[:-1], dtype=np.string_)
if is_duck_dask_array(arr):
import dask.array as da
if len(arr.chunks[-1]) > 1:
raise ValueError(
"cannot stacked dask character array with "
"multiple chunks in the last dimension: {}".format(arr)
)
dtype = np.dtype("S" + str(arr.shape[-1]))
return da.map_blocks(
_numpy_char_to_bytes,
arr,
dtype=dtype,
chunks=arr.chunks[:-1],
drop_axis=[arr.ndim - 1],
)
else:
return StackedBytesArray(arr)
def _numpy_char_to_bytes(arr):
"""Like netCDF4.chartostring, but faster and more flexible."""
# based on: http://stackoverflow.com/a/10984878/809705
arr = np.array(arr, copy=False, order="C")
dtype = "S" + str(arr.shape[-1])
return arr.view(dtype).reshape(arr.shape[:-1])
class StackedBytesArray(indexing.ExplicitlyIndexedNDArrayMixin):
"""Wrapper around array-like objects to create a new indexable object where
values, when accessed, are automatically stacked along the last dimension.
>>> indexer = indexing.BasicIndexer((slice(None),))
>>> StackedBytesArray(np.array(["a", "b", "c"], dtype="S1"))[indexer]
array(b'abc', dtype='|S3')
"""
def __init__(self, array):
"""
Parameters
----------
array : array-like
Original array of values to wrap.
"""
if array.dtype != "S1":
raise ValueError(
"can only use StackedBytesArray if argument has dtype='S1'"
)
self.array = indexing.as_indexable(array)
@property
def dtype(self):
return np.dtype("S" + str(self.array.shape[-1]))
@property
def shape(self):
return self.array.shape[:-1]
def __repr__(self):
return "{}({!r})".format(type(self).__name__, self.array)
def __getitem__(self, key):
# require slicing the last dimension completely
key = type(key)(indexing.expanded_indexer(key.tuple, self.array.ndim))
if key.tuple[-1] != slice(None):
raise IndexError("too many indices")
return _numpy_char_to_bytes(self.array[key])
|
import argparse
import asyncio
import json
import logging
import traceback
import aiohttp
from paasta_tools import marathon_tools
from paasta_tools import mesos_tools
from paasta_tools import tron_tools
from paasta_tools import utils
log = logging.getLogger(__name__)
LOG_COMPONENT = "task_lifecycle"
class UnknownFrameworkError(Exception):
pass
def interpret_task_updated(task_updated) -> str:
message = task_updated["status"].get("message")
healthy = task_updated["status"].get("healthy")
reason = task_updated["status"].get("reason")
state = task_updated["state"]
if state == "TASK_STARTING":
return state
if state == "TASK_RUNNING":
if reason == "REASON_TASK_HEALTH_CHECK_STATUS_UPDATED":
return f"Health check update: healthy={healthy}"
elif reason is None:
return f"Task running, no healthchecks defined."
else:
return "Unknown: TASK_RUNNING but unrecognized status.reason"
if state == "TASK_KILLED":
if healthy is False:
message = task_updated["status"]["message"]
if message == "Container exited with status 143":
return "Killed by Mesos due to healthcheck failures"
else:
return f"Unknown: TASK_KILLED with healthy={healthy!r} but unrecognized status.message"
elif healthy is None:
return "Killed by Paasta (bounce? autoscaling?)"
else:
return f"Unknown: TASK_KILLED but unrecognized status.healthy={healthy!r}"
elif state == "TASK_FAILED":
if message == "Container exited with status 137":
return f"Probable OOM: {message}"
elif message is None:
return f"Unknown: TASK_FAILED but status.message is None. This seems to happen when a task exits very quickly on startup. Mesos usually follows up with a corrected message shortly."
elif message.startswith("Container exited with status "):
return f"TASK_FAILED: {message}"
else:
return f"Unknown: TASK_FAILED but unrecognized status.message"
elif state == "TASK_FINISHED":
if message is None:
return f"Unknown: TASK_FINISHED but status.message is None. This seems to happen when a task exits very quickly on startup. Mesos usually follows up with a corrected message shortly."
return f"TASK_FINISHED: {message}"
else:
return f"Unknown: unrecognized state"
class MesosEventSubscriber:
def __init__(self, cluster):
self.framework_id_to_name = {}
self.framework_id_task_id_to_git_sha = {}
self.cluster = cluster
async def subscribe(self):
# This connection should live ~forever, so disable some timeouts.
timeout = aiohttp.ClientTimeout(
total=None, sock_read=None, connect=30, sock_connect=30,
)
async with aiohttp.ClientSession(timeout=timeout) as session:
payload = '{"type":"SUBSCRIBE"}'
master_host_port = mesos_tools.find_mesos_leader(cluster=self.cluster)
async with session.post(
f"http://{master_host_port}/api/v1",
data=payload,
# allow_redirects=True,
headers={"Content-Type": "application/json"},
timeout=timeout,
) as resp:
while True:
_size = await resp.content.readline()
if not _size:
break
size = int(_size)
record = await resp.content.readexactly(size)
yield json.loads(record)
def determine_service_instance(self, framework_name, status):
executor_id = status.get("executor_id", {}).get("value")
task_id = status.get("task_id", {}).get("value")
if framework_name.startswith("marathon"):
return marathon_tools.parse_service_instance_from_executor_id(
executor_id or task_id
)
elif framework_name.startswith("tron"):
return tron_tools.decompose_executor_id(executor_id)[:2]
elif framework_name.startswith("paasta-remote "):
# sorta gross, but it's the same format.
return marathon_tools.parse_service_instance_from_executor_id(executor_id)
else:
raise UnknownFrameworkError(
f"don't know how to parse task IDs for framework {framework_name}"
)
def skip_updates_from_framework(self, framework_name) -> bool:
if framework_name.startswith("jupyterhub_"):
return True
if framework_name.startswith("jenkins"):
return True
if framework_name.startswith("paasta_spark_run"):
return True
return False
def handler_task_updated(self, event):
task_updated = event["task_updated"]
try:
del task_updated["status"]["data"]
except KeyError:
pass
framework_name = self.framework_id_to_name[
task_updated["framework_id"]["value"]
]
if self.skip_updates_from_framework(framework_name):
return
service, instance = self.determine_service_instance(
framework_name, task_updated["status"]
)
git_sha = self.framework_id_task_id_to_git_sha.get(
(
task_updated["framework_id"]["value"],
task_updated["status"]["task_id"]["value"],
)
)
self.log_task_updated(
service=service,
instance=instance,
git_sha=git_sha,
task_updated=task_updated,
)
def log_task_updated(self, service, instance, git_sha, task_updated):
message = {
"type": "mesos_task_updated",
"is_terminal": task_updated["state"] in mesos_tools.TERMINAL_STATES,
"interpretation": interpret_task_updated(task_updated),
"git_sha": git_sha,
"task_updated": task_updated,
}
utils._log(
service=service,
instance=instance,
component=LOG_COMPONENT,
cluster=self.cluster,
line=json.dumps(message),
)
def handler_subscribed(self, event):
state = event["subscribed"]["get_state"]
for framework in state["get_frameworks"]["frameworks"]:
framework_info = framework["framework_info"]
self.register_framework(framework_info)
for task in state["get_tasks"]["tasks"]:
self.register_task(task)
def register_framework(self, framework_info):
self.framework_id_to_name[framework_info["id"]["value"]] = framework_info[
"name"
]
def register_task(self, task):
framework_name = self.framework_id_to_name[task["framework_id"]["value"]]
if self.skip_updates_from_framework(framework_name):
return
git_sha = self.get_git_sha_from_task_dict(task)
self.framework_id_task_id_to_git_sha[
task["framework_id"]["value"], task["task_id"]["value"]
] = git_sha
def get_git_sha_from_task_dict(self, task):
try:
docker_image = task["container"]["docker"]["image"]
except KeyError:
try:
docker_image = task["container"]["mesos"]["image"]["docker"]["name"]
except KeyError:
log.debug("Could not figure out what docker image task uses: {task}")
return None
return utils.get_git_sha_from_dockerurl(docker_image)
def handler_framework_added(self, event):
self.register_framework(event["framework_added"]["framework"]["framework_info"])
def handler_task_added(self, event):
self.register_task(event["task_added"]["task"])
async def follow(self):
async for event in self.subscribe():
try:
func = {
"SUBSCRIBED": self.handler_subscribed,
"TASK_UPDATED": self.handler_task_updated,
"FRAMEWORK_ADDED": self.handler_framework_added,
"TASK_ADDED": self.handler_task_added,
}[event["type"]]
except KeyError:
log.debug(f"Unknown event type {event['type']}")
continue
try:
func(event)
except Exception:
log.error(traceback.format_exc())
def main():
system_paasta_config = utils.load_system_paasta_config()
try:
cluster = system_paasta_config.get_cluster()
except utils.PaastaNotConfiguredError:
cluster = None
parser = argparse.ArgumentParser()
parser.add_argument(
"--cluster", type=str, default=cluster, required=(cluster is None)
)
args = parser.parse_args()
loop = asyncio.get_event_loop()
subscriber = MesosEventSubscriber(cluster=args.cluster)
loop.run_until_complete(subscriber.follow())
if __name__ == "__main__":
main()
|
import unittest
from mock import MagicMock, patch, call
from uiautomator import AutomatorServer, JsonRPCError
class TestAutomatorServer(unittest.TestCase):
def setUp(self):
self.Adb_patch = patch('uiautomator.Adb')
self.Adb = self.Adb_patch.start()
def tearDown(self):
self.Adb.stop()
def test_local_port(self):
self.assertEqual(AutomatorServer("1234", 9010).local_port, 9010)
self.Adb.assert_called_once_with(serial="1234", adb_server_host=None, adb_server_port=None)
def test_local_port_forwarded(self):
self.Adb.return_value.forward_list.return_value = [
("1234", "tcp:1001", "tcp:9009"),
("1234", "tcp:1000", "tcp:9008")
]
self.Adb.return_value.device_serial.return_value = "1234"
self.assertEqual(AutomatorServer("1234").local_port, 1000)
def test_local_port_scanning(self):
with patch('uiautomator.next_local_port') as next_local_port:
self.Adb.return_value.forward_list.return_value = []
next_local_port.return_value = 1234
self.assertEqual(AutomatorServer("abcd", None).local_port,
next_local_port.return_value)
next_local_port.return_value = 14321
self.Adb.return_value.forward_list.return_value = Exception("error")
self.assertEqual(AutomatorServer("abcd", None).local_port,
next_local_port.return_value)
def test_device_port(self):
self.assertEqual(AutomatorServer().device_port, 9008)
def test_start_success(self):
server = AutomatorServer()
server.push = MagicMock()
server.push.return_value = ["bundle.jar", "uiautomator-stub.jar"]
server.ping = MagicMock()
server.ping.return_value = "pong"
server.adb = MagicMock()
server.start()
server.adb.cmd.assert_valled_onec_with('shell', 'uiautomator', 'runtest', 'bundle.jar', 'uiautomator-stub.jar', '-c', 'com.github.uiautomatorstub.Stub')
def test_start_error(self):
server = AutomatorServer()
server.push = MagicMock()
server.push.return_value = ["bundle.jar", "uiautomator-stub.jar"]
server.ping = MagicMock()
server.ping.return_value = None
server.adb = MagicMock()
with patch("time.sleep"):
with self.assertRaises(IOError):
server.start()
def test_auto_start(self):
try:
import urllib2
except ImportError:
import urllib.request as urllib2
with patch("uiautomator.JsonRPCMethod") as JsonRPCMethod:
returns = [urllib2.URLError("error"), "ok"]
def side_effect():
result = returns.pop(0)
if isinstance(result, Exception):
raise result
return result
JsonRPCMethod.return_value.side_effect = side_effect
server = AutomatorServer()
server.start = MagicMock()
server.stop = MagicMock()
self.assertEqual("ok", server.jsonrpc.any_method())
server.start.assert_called_once_with(timeout=30)
with patch("uiautomator.JsonRPCMethod") as JsonRPCMethod:
returns = [JsonRPCError(-32000-1, "error msg"), "ok"]
def side_effect():
result = returns.pop(0)
if isinstance(result, Exception):
raise result
return result
JsonRPCMethod.return_value.side_effect = side_effect
server = AutomatorServer()
server.start = MagicMock()
server.stop = MagicMock()
self.assertEqual("ok", server.jsonrpc.any_method())
server.start.assert_called_once_with()
with patch("uiautomator.JsonRPCMethod") as JsonRPCMethod:
JsonRPCMethod.return_value.side_effect = JsonRPCError(-32000-2, "error msg")
server = AutomatorServer()
server.start = MagicMock()
server.stop = MagicMock()
with self.assertRaises(JsonRPCError):
server.jsonrpc.any_method()
def test_start_ping(self):
with patch("uiautomator.JsonRPCClient") as JsonRPCClient:
JsonRPCClient.return_value.ping.return_value = "pong"
server = AutomatorServer()
server.adb = MagicMock()
server.adb.forward.return_value = 0
self.assertEqual(server.ping(), "pong")
def test_start_ping_none(self):
with patch("uiautomator.JsonRPCClient") as JsonRPCClient:
JsonRPCClient.return_value.ping.side_effect = Exception("error")
server = AutomatorServer()
self.assertEqual(server.ping(), None)
class TestAutomatorServer_Stop(unittest.TestCase):
def setUp(self):
try:
import urllib2
self.urlopen_patch = patch('urllib2.urlopen')
except:
self.urlopen_patch = patch('urllib.request.urlopen')
finally:
self.urlopen = self.urlopen_patch.start()
def tearDown(self):
self.urlopen_patch.stop()
def test_screenshot(self):
server = AutomatorServer()
server.sdk_version = MagicMock()
server.sdk_version.return_value = 17
self.assertEqual(server.screenshot(), None)
server.sdk_version.return_value = 18
self.urlopen.return_value.read = MagicMock()
self.urlopen.return_value.read.return_value = b"123456"
self.assertEqual(server.screenshot(), b"123456")
self.assertEqual(server.screenshot("/tmp/test.txt"), "/tmp/test.txt")
def test_push(self):
jars = ["bundle.jar", "uiautomator-stub.jar"]
server = AutomatorServer()
server.adb = MagicMock()
self.assertEqual(set(server.push()), set(jars))
for args in server.adb.cmd.call_args_list:
self.assertEqual(args[0][0], "push")
self.assertEqual(args[0][2], "/data/local/tmp/")
def test_stop_started_server(self):
server = AutomatorServer()
server.adb = MagicMock()
server.uiautomator_process = process = MagicMock()
process.poll.return_value = None
server.stop()
process.wait.assert_called_once_with()
server.uiautomator_process = process = MagicMock()
process.poll.return_value = None
self.urlopen.side_effect = IOError("error")
server.stop()
process.kill.assert_called_once_with()
def test_stop(self):
results = [
b"USER PID PPID VSIZE RSS WCHAN PC NAME\n\rsystem 372 126 635596 104808 ffffffff 00000000 S uiautomator",
b"USER PID PPID VSIZE RSS WCHAN PC NAME\r\nsystem 372 126 635596 104808 ffffffff 00000000 S uiautomator",
b"USER PID PPID VSIZE RSS WCHAN PC NAME\nsystem 372 126 635596 104808 ffffffff 00000000 S uiautomator",
b"USER PID PPID VSIZE RSS WCHAN PC NAME\rsystem 372 126 635596 104808 ffffffff 00000000 S uiautomator"
]
for r in results:
server = AutomatorServer()
server.adb = MagicMock()
server.adb.cmd.return_value.communicate.return_value = (r, "")
server.stop()
self.assertEqual(server.adb.cmd.call_args_list,
[call("shell", "ps", "-C", "uiautomator"), call("shell", "kill", "-9", "372")])
class TestJsonRPCError(unittest.TestCase):
def testJsonRPCError(self):
e = JsonRPCError(200, "error")
self.assertEqual(200, e.code)
self.assertTrue(len(str(e)) > 0)
e = JsonRPCError("200", "error")
self.assertEqual(200, e.code)
|
import json
import logging
import os
import threading
from absl import flags
from perfkitbenchmarker import container_service
from perfkitbenchmarker import context
from perfkitbenchmarker import data
from perfkitbenchmarker import disk
from perfkitbenchmarker import errors
from perfkitbenchmarker import linux_virtual_machine
from perfkitbenchmarker import providers
from perfkitbenchmarker import virtual_machine
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.providers.docker import docker_container_spec
from perfkitbenchmarker.providers.docker import docker_disk
FLAGS = flags.FLAGS
UBUNTU16_IMAGE = 'ubuntu:xenial'
DEFAULT_DOCKER_IMAGE = 'pkb/ubuntu16'
DOCKERFILE_DIRECTORY = 'perfkitbenchmarker/data/docker'
class DockerContainer(virtual_machine.BaseVirtualMachine):
"""Object representing a Docker Container instance."""
CLOUD = providers.DOCKER
DEFAULT_IMAGE = None
CONTAINER_COMMAND = None
docker_build_lock = threading.Lock()
def __init__(self, vm_spec):
"""Initialize a Docker Container."""
super(DockerContainer, self).__init__(vm_spec)
self.name = self.name.replace('_', '-')
self.container_id = ''
self.user_name = FLAGS.username
self.image = self.image or self.DEFAULT_IMAGE
self.cpus = vm_spec.machine_type.cpus
self.memory_mb = vm_spec.machine_type.memory
self.privileged = vm_spec.privileged_docker
self.container_image = DEFAULT_DOCKER_IMAGE
self.docker_sysctl_flags = ''
# apply flags
if FLAGS.docker_custom_image:
self.container_image = FLAGS.docker_custom_image
if FLAGS.sysctl:
self.docker_sysctl_flags = FLAGS.sysctl
def _CreateDependencies(self):
self._CreateVolumes()
def _DeleteDependencies(self):
self._DeleteVolumes()
def _Create(self):
"""Create a Docker instance."""
# Locally build docker container
with self.docker_build_lock:
image_exists = self._LocalImageExists(self.container_image)
if image_exists is False:
self._BuildImageLocally()
create_command = self._FormatCreateCommand()
container_info, _, _ = vm_util.IssueCommand(create_command,
raise_on_failure=False)
self.container_id = container_info.encode('ascii')
def _FormatCreateCommand(self):
"""Formats the command for Docker based on vm_spec and flags."""
create_command = ['docker', 'run', '-d', '--name', self.name]
# format scratch disks
for vol in self.scratch_disks:
vol_string = vol.volume_name + ':' + vol.mount_point
create_command.append('-v')
create_command.append(vol_string)
# format cpus option
if self.cpus > 0:
create_command.append('--cpus')
create_command.append(str(self.cpus))
# format memory option
if self.memory_mb > 0:
create_command.append('-m')
create_command.append(str(self.memory_mb) + 'm')
if self.docker_sysctl_flags:
logging.info(self.docker_sysctl_flags)
sysctl_string = ''
for sysctl_flag in self.docker_sysctl_flags:
sysctl_string = sysctl_string + sysctl_flag + ' '
logging.info(sysctl_string)
create_command.append('--sysctl')
create_command.append(sysctl_string)
create_command.append(self.container_image)
create_command.append('/usr/sbin/sshd')
create_command.append('-D')
return create_command
@vm_util.Retry()
def _PostCreate(self):
"""Prepares running container.
Gets the IP address, copies public keys,
and configures the proxy if one is specified
"""
self._GetIpAddresses()
# Copy ssh key to container to enable ssh login
copy_ssh_command = ['docker', 'cp', self.ssh_public_key,
'%s:/root/.ssh/authorized_keys' % self.name]
vm_util.IssueCommand(copy_ssh_command, raise_on_failure=False)
# change ownership of authorized_key file to root in container
chown_command = ['docker', 'exec', self.name, 'chown',
'root:root', '/root/.ssh/authorized_keys']
vm_util.IssueCommand(chown_command, raise_on_failure=False)
self._ConfigureProxy()
def _Delete(self):
"""Kill and Remove Docker Container."""
delete_command = ['docker', 'kill', self.name]
output = vm_util.IssueCommand(delete_command, raise_on_failure=False)
logging.info(output[vm_util.OUTPUT_STDOUT].rstrip())
remove_command = ['docker', 'rm', self.name]
output = vm_util.IssueCommand(remove_command, raise_on_failure=False)
logging.info(output[vm_util.OUTPUT_STDOUT].rstrip())
return
@vm_util.Retry(poll_interval=10, max_retries=10)
def _Exists(self):
"""Returns whether the container is up and running."""
info, return_code = self._GetContainerInfo()
logging.info('Checking if Docker Container Exists')
if info and return_code == 0:
status = info[0]['State']['Running']
if status:
logging.info('Docker Container %s is up and running.', self.name)
return True
return False
def _CreateVolumes(self):
"""Creates volumes for scratch disks.
These volumes have to be created
BEFORE containers creation because Docker doesn't allow to attach
volume to currently running containers.
"""
self.scratch_disks = docker_disk.CreateDisks(self.disk_specs, self.name)
@vm_util.Retry(poll_interval=10, max_retries=20, log_errors=False)
def _DeleteVolumes(self):
"""Deletes volumes."""
for scratch_disk in self.scratch_disks[:]:
scratch_disk.Delete()
self.scratch_disks.remove(scratch_disk)
def DeleteScratchDisks(self):
pass
def _GetIpAddresses(self):
"""Sets the internal and external IP address for the Container."""
info, return_code = self._GetContainerInfo()
ip = False
if info and return_code == 0:
ip = info[0]['NetworkSettings']['IPAddress'].encode('ascii')
logging.info('IP: %s', ip)
self.ip_address = ip
self.internal_ip = ip
else:
logging.warning('IP address information not found')
def _RemoveIfExists(self):
if self._Exists():
self._Delete()
def _GetContainerInfo(self):
"""Returns information about a container.
Gets Container information from Docker Inspect. Returns the information,
if there is any and a return code. 0
"""
logging.info('Checking Container Information')
inspect_cmd = ['docker', 'inspect', self.name]
info, _, return_code = vm_util.IssueCommand(inspect_cmd,
suppress_warning=True,
raise_on_failure=False)
info = json.loads(info)
return info, return_code
def _ConfigureProxy(self):
"""Configure network proxy for Docker Container.
In Docker containers environment variables from /etc/environment
are not sourced - this results in connection problems when running
behind proxy. Prepending proxy environment variables to bashrc
solves the problem. Note: APPENDING to bashrc will not work because
the script exits when it is NOT executed in interactive shell.
"""
if FLAGS.http_proxy:
http_proxy = "sed -i '1i export http_proxy=%s' /etc/bash.bashrc"
self.RemoteCommand(http_proxy % FLAGS.http_proxy)
if FLAGS.https_proxy:
https_proxy = "sed -i '1i export https_proxy=%s' /etc/bash.bashrc"
self.RemoteCommand(https_proxy % FLAGS.http_proxy)
if FLAGS.ftp_proxy:
ftp_proxy = "sed -i '1i export ftp_proxy=%s' /etc/bash.bashrc"
self.RemoteCommand(ftp_proxy % FLAGS.ftp_proxy)
def _BuildVolumesBody(self):
"""Construct volumes-related part of create command for Docker Container."""
volumes = []
for scratch_disk in self.scratch_disks:
vol_string = scratch_disk.volume_name + ':' + scratch_disk.mount_point
volumes.append('-v')
volumes.append(vol_string)
return volumes
def _LocalImageExists(self, docker_image_name):
"""Returns whether a Docker image exists locally."""
inspect_cmd = ['docker', 'image', 'inspect', docker_image_name]
info, _, return_code = vm_util.IssueCommand(inspect_cmd,
suppress_warning=True,
raise_on_failure=False)
info = json.loads(info)
logging.info('Checking if Docker Image Exists')
if info and return_code == 0:
logging.info('Image exists')
return True
logging.info('Image does not exist')
return False
def _BuildImageLocally(self):
"""Build Container Image Locally.
Dockerfiles located at
PerfKitBenchmarker/data/docker/pkb/<containerImage>/Dockerfile
"""
directory = os.path.dirname(
data.ResourcePath(os.path.join(DOCKERFILE_DIRECTORY,
self.container_image,
'Dockerfile')))
build_cmd = [
'docker', 'build', '--no-cache',
'-t', self.container_image, directory]
vm_util.IssueCommand(build_cmd, raise_on_failure=False)
def GetResourceMetadata(self):
"""Returns a dict containing metadata about the VM.
Returns:
dict mapping string property key to value.
"""
result = super(DockerContainer, self).GetResourceMetadata()
logging.warn('GET RESOURCE METADATA')
return result
class DebianBasedDockerContainer(DockerContainer,
linux_virtual_machine.BaseDebianMixin):
def _GetNumCpus(self):
return self.cpus
def ApplySysctlPersistent(self, sysctl_params):
"""Override ApplySysctlPeristent function for Docker provider.
Parent function causes errors with Docker because it shutdowns container
Args:
sysctl_params: dict - the keys and values to write
"""
logging.warn('sysctl flags are applied when container is created. '
'Not all sysctl flags work with Docker. It does not '
'support flags that modify the host system')
def _RebootIfNecessary(self):
"""Override RebootIfNecessary for Docker Provider."""
logging.warn('Docker Containers cannot be rebooted to apply flags')
class Ubuntu1604BasedDockerContainer(DebianBasedDockerContainer,
linux_virtual_machine.Ubuntu1604Mixin):
DEFAULT_IMAGE = UBUNTU16_IMAGE
# Note: to add support for ubuntu 14 and ubuntu 18, we simply need to
# create/test Dockerfiles for those distros. This should be
# fairly simple, but may require a few changes from the
# ubuntu16 Dockerfile.
|
from django.template.loader import select_template
from django.utils import translation
from django_elasticsearch_dsl import fields, Document, Index
from elasticsearch.exceptions import NotFoundError
from shop.conf import app_settings
from shop.models.product import ProductModel
from shop.search.analyzers import body_analyzers
class _ProductDocument(Document):
product_code = fields.KeywordField(
multi=True,
boost=3,
)
product_name = fields.TextField(
boost=2,
)
product_type = fields.TextField()
class Django:
model = ProductModel
fields = ['id']
ignore_signals = True # performed by ProductModel.update_search_index()
queryset_pagination = 499 # let DRF do the pagination
def __str__(self):
return "{} {}: {}".format(self.product_type, self.id, self.product_name)
def get_queryset(self):
queryset = super().get_queryset()
return queryset.filter(active=True)
def prepare_product_code(self, instance):
"""
Create a list of textual representation for product codes.
"""
has_valid_product_code = lambda obj: isinstance(getattr(obj, 'product_code', None), str)
variants = instance.get_product_variants()
product_codes = [v.product_code for v in variants if has_valid_product_code(v)]
if has_valid_product_code(instance):
product_codes.append(instance.product_code)
return product_codes
def prepare_body(self, instance):
"""
Create a textual representation of the product's instance to be used by Elasticsearch for
creating a full text search index.
"""
app_label = instance._meta.app_label.lower()
params = [
(app_label, instance.product_model),
(app_label, 'product'),
('shop', 'product'),
]
template = select_template(['{0}/search/indexes/{1}.txt'.format(*p) for p in params])
body = template.render({'product': instance})
return body
def update(self, thing, refresh=None, action='index', parallel=False, **kwargs):
if isinstance(thing, ProductModel._materialized_model) and thing.active is False:
try:
doc = self.get(id=thing.id)
except NotFoundError:
pass
else:
doc.delete()
else:
if self._language:
with translation.override(self._language):
super().update(thing, refresh=None, action='index', parallel=False, **kwargs)
else:
super().update(thing, refresh=None, action='index', parallel=False, **kwargs)
class ProductDocument:
"""
Factory for building an elasticsearch-dsl Document class. This class
language_analizers needs to be a dictionary using language as a key and analyzer as a value
language also needs to set if language_analizers is set
see https://elasticsearch-dsl.readthedocs.io/en/latest/api.html#elasticsearch_dsl.Index.analyzer
"""
def __new__(cls, language=None, settings=None, language_analizers=None):
index_name_parts = [app_settings.SHOP_APP_LABEL]
if language_analizers:
copy = language_analizers.copy()
body_analyzers.update(copy) #overrides default language settings
if language:
index_name_parts.append(language.lower())
doc_name = 'ProductDocument{}'.format(language.title())
analyzer = body_analyzers.get(language, body_analyzers['default'])
else:
doc_name = 'ProductDocument'
analyzer = body_analyzers['default']
index_name_parts.append('products')
products_index = Index('.'.join(index_name_parts))
if settings:
products_index.settings(**settings)
attrs = {'_language': language, 'body': fields.TextField(analyzer=analyzer)}
doc_class = type(doc_name, (_ProductDocument,), attrs)
products_index.document(doc_class)
return doc_class
|
from datetime import timedelta
import voluptuous as vol
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_MOTION,
DEVICE_CLASS_OCCUPANCY,
PLATFORM_SCHEMA,
BinarySensorEntity,
)
from homeassistant.const import CONF_ENTITY_NAMESPACE, CONF_MONITORED_CONDITIONS
import homeassistant.helpers.config_validation as cv
from . import DEFAULT_ENTITY_NAMESPACE, DOMAIN as SKYBELL_DOMAIN, SkybellDevice
SCAN_INTERVAL = timedelta(seconds=5)
# Sensor types: Name, device_class, event
SENSOR_TYPES = {
"button": ["Button", DEVICE_CLASS_OCCUPANCY, "device:sensor:button"],
"motion": ["Motion", DEVICE_CLASS_MOTION, "device:sensor:motion"],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(
CONF_ENTITY_NAMESPACE, default=DEFAULT_ENTITY_NAMESPACE
): cv.string,
vol.Required(CONF_MONITORED_CONDITIONS, default=[]): vol.All(
cv.ensure_list, [vol.In(SENSOR_TYPES)]
),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the platform for a Skybell device."""
skybell = hass.data.get(SKYBELL_DOMAIN)
sensors = []
for sensor_type in config.get(CONF_MONITORED_CONDITIONS):
for device in skybell.get_devices():
sensors.append(SkybellBinarySensor(device, sensor_type))
add_entities(sensors, True)
class SkybellBinarySensor(SkybellDevice, BinarySensorEntity):
"""A binary sensor implementation for Skybell devices."""
def __init__(self, device, sensor_type):
"""Initialize a binary sensor for a Skybell device."""
super().__init__(device)
self._sensor_type = sensor_type
self._name = "{} {}".format(
self._device.name, SENSOR_TYPES[self._sensor_type][0]
)
self._device_class = SENSOR_TYPES[self._sensor_type][1]
self._event = {}
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def is_on(self):
"""Return True if the binary sensor is on."""
return self._state
@property
def device_class(self):
"""Return the class of the binary sensor."""
return self._device_class
@property
def device_state_attributes(self):
"""Return the state attributes."""
attrs = super().device_state_attributes
attrs["event_date"] = self._event.get("createdAt")
return attrs
def update(self):
"""Get the latest data and updates the state."""
super().update()
event = self._device.latest(SENSOR_TYPES[self._sensor_type][2])
self._state = bool(event and event.get("id") != self._event.get("id"))
self._event = event or {}
|
from __future__ import unicode_literals
import hashlib
def md516_encode(item):
"""md5 message digest algorithm output 16 char"""
try:
return hashlib.md5(item.encode("utf-8")).hexdigest()[8:-8]
except:
return ''
|
import os.path as op
import numpy as np
import mne
from mne.cov import compute_covariance
from mne.datasets import somato
from mne.time_frequency import csd_morlet
from mne.beamformer import (make_dics, apply_dics_csd, make_lcmv,
apply_lcmv_cov)
from mne.minimum_norm import (make_inverse_operator, apply_inverse_cov)
print(__doc__)
###############################################################################
# Reading the raw data and creating epochs:
data_path = somato.data_path()
subject = '01'
task = 'somato'
raw_fname = op.join(data_path, 'sub-{}'.format(subject), 'meg',
'sub-{}_task-{}_meg.fif'.format(subject, task))
# crop to 5 minutes to save memory
raw = mne.io.read_raw_fif(raw_fname).crop(0, 300)
# We are interested in the beta band (12-30 Hz)
raw.load_data().filter(12, 30)
# The DICS beamformer currently only supports a single sensor type.
# We'll use the gradiometers in this example.
picks = mne.pick_types(raw.info, meg='grad', exclude='bads')
# Read epochs
events = mne.find_events(raw)
epochs = mne.Epochs(raw, events, event_id=1, tmin=-1.5, tmax=2, picks=picks,
preload=True, decim=3)
# Read forward operator and point to freesurfer subject directory
fname_fwd = op.join(data_path, 'derivatives', 'sub-{}'.format(subject),
'sub-{}_task-{}-fwd.fif'.format(subject, task))
subjects_dir = op.join(data_path, 'derivatives', 'freesurfer', 'subjects')
fwd = mne.read_forward_solution(fname_fwd)
###############################################################################
# Compute covariances
# -------------------
# ERS activity starts at 0.5 seconds after stimulus onset.
active_win = (0.5, 1.5)
baseline_win = (-1, 0)
baseline_cov = compute_covariance(epochs, tmin=baseline_win[0],
tmax=baseline_win[1], method='shrunk',
rank=None)
active_cov = compute_covariance(epochs, tmin=active_win[0], tmax=active_win[1],
method='shrunk', rank=None)
# Weighted averaging is already in the addition of covariance objects.
common_cov = baseline_cov + active_cov
###############################################################################
# Compute some source estimates
# -----------------------------
# Here we will use DICS, LCMV beamformer, and dSPM.
#
# See :ref:`ex-inverse-source-power` for more information about DICS.
def _gen_dics(active_win, baseline_win, epochs):
freqs = np.logspace(np.log10(12), np.log10(30), 9)
csd = csd_morlet(epochs, freqs, tmin=-1, tmax=1.5, decim=20)
csd_baseline = csd_morlet(epochs, freqs, tmin=baseline_win[0],
tmax=baseline_win[1], decim=20)
csd_ers = csd_morlet(epochs, freqs, tmin=active_win[0], tmax=active_win[1],
decim=20)
filters = make_dics(epochs.info, fwd, csd.mean(), pick_ori='max-power',
reduce_rank=True)
stc_base, freqs = apply_dics_csd(csd_baseline.mean(), filters)
stc_act, freqs = apply_dics_csd(csd_ers.mean(), filters)
stc_act /= stc_base
return stc_act
# generate lcmv source estimate
def _gen_lcmv(active_cov, baseline_cov, common_cov):
filters = make_lcmv(epochs.info, fwd, common_cov, reg=0.05,
noise_cov=None, pick_ori='max-power')
stc_base = apply_lcmv_cov(baseline_cov, filters)
stc_act = apply_lcmv_cov(active_cov, filters)
stc_act /= stc_base
return stc_act
# generate mne/dSPM source estimate
def _gen_mne(active_cov, baseline_cov, common_cov, fwd, info, method='dSPM'):
inverse_operator = make_inverse_operator(info, fwd, common_cov)
stc_act = apply_inverse_cov(active_cov, info, inverse_operator,
method=method, verbose=True)
stc_base = apply_inverse_cov(baseline_cov, info, inverse_operator,
method=method, verbose=True)
stc_act /= stc_base
return stc_act
# Compute source estimates
stc_dics = _gen_dics(active_win, baseline_win, epochs)
stc_lcmv = _gen_lcmv(active_cov, baseline_cov, common_cov)
stc_dspm = _gen_mne(active_cov, baseline_cov, common_cov, fwd, epochs.info)
###############################################################################
# Plot source estimates
# ---------------------
# DICS:
brain_dics = stc_dics.plot(
hemi='rh', subjects_dir=subjects_dir, subject=subject,
time_label='DICS source power in the 12-30 Hz frequency band')
###############################################################################
# LCMV:
brain_lcmv = stc_lcmv.plot(
hemi='rh', subjects_dir=subjects_dir, subject=subject,
time_label='LCMV source power in the 12-30 Hz frequency band')
###############################################################################
# dSPM:
brain_dspm = stc_dspm.plot(
hemi='rh', subjects_dir=subjects_dir, subject=subject,
time_label='dSPM source power in the 12-30 Hz frequency band')
|
import pytest
from homeassistant.components.media_player.const import (
ATTR_INPUT_SOURCE,
ATTR_MEDIA_CONTENT_ID,
ATTR_MEDIA_CONTENT_TYPE,
ATTR_MEDIA_ENQUEUE,
ATTR_MEDIA_VOLUME_LEVEL,
ATTR_MEDIA_VOLUME_MUTED,
ATTR_SOUND_MODE,
DOMAIN,
SERVICE_PLAY_MEDIA,
SERVICE_SELECT_SOUND_MODE,
SERVICE_SELECT_SOURCE,
)
from homeassistant.components.media_player.reproduce_state import async_reproduce_states
from homeassistant.const import (
SERVICE_MEDIA_PAUSE,
SERVICE_MEDIA_PLAY,
SERVICE_MEDIA_STOP,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
SERVICE_VOLUME_MUTE,
SERVICE_VOLUME_SET,
STATE_IDLE,
STATE_OFF,
STATE_ON,
STATE_PAUSED,
STATE_PLAYING,
)
from homeassistant.core import Context, State
from tests.common import async_mock_service
ENTITY_1 = "media_player.test1"
ENTITY_2 = "media_player.test2"
@pytest.mark.parametrize(
"service,state",
[
(SERVICE_TURN_ON, STATE_ON),
(SERVICE_TURN_OFF, STATE_OFF),
(SERVICE_MEDIA_PLAY, STATE_PLAYING),
(SERVICE_MEDIA_STOP, STATE_IDLE),
(SERVICE_MEDIA_PAUSE, STATE_PAUSED),
],
)
async def test_state(hass, service, state):
"""Test that we can turn a state into a service call."""
calls_1 = async_mock_service(hass, DOMAIN, service)
if service != SERVICE_TURN_ON:
async_mock_service(hass, DOMAIN, SERVICE_TURN_ON)
await async_reproduce_states(hass, [State(ENTITY_1, state)])
await hass.async_block_till_done()
assert len(calls_1) == 1
assert calls_1[0].data == {"entity_id": ENTITY_1}
async def test_turn_on_with_mode(hass):
"""Test that state with additional attributes call multiple services."""
calls_1 = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON)
calls_2 = async_mock_service(hass, DOMAIN, SERVICE_SELECT_SOUND_MODE)
await async_reproduce_states(
hass, [State(ENTITY_1, "on", {ATTR_SOUND_MODE: "dummy"})]
)
await hass.async_block_till_done()
assert len(calls_1) == 1
assert calls_1[0].data == {"entity_id": ENTITY_1}
assert len(calls_2) == 1
assert calls_2[0].data == {"entity_id": ENTITY_1, ATTR_SOUND_MODE: "dummy"}
async def test_multiple_same_state(hass):
"""Test that multiple states with same state gets calls."""
calls_1 = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON)
await async_reproduce_states(hass, [State(ENTITY_1, "on"), State(ENTITY_2, "on")])
await hass.async_block_till_done()
assert len(calls_1) == 2
# order is not guaranteed
assert any(call.data == {"entity_id": "media_player.test1"} for call in calls_1)
assert any(call.data == {"entity_id": "media_player.test2"} for call in calls_1)
async def test_multiple_different_state(hass):
"""Test that multiple states with different state gets calls."""
calls_1 = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON)
calls_2 = async_mock_service(hass, DOMAIN, SERVICE_TURN_OFF)
await async_reproduce_states(hass, [State(ENTITY_1, "on"), State(ENTITY_2, "off")])
await hass.async_block_till_done()
assert len(calls_1) == 1
assert calls_1[0].data == {"entity_id": "media_player.test1"}
assert len(calls_2) == 1
assert calls_2[0].data == {"entity_id": "media_player.test2"}
async def test_state_with_context(hass):
"""Test that context is forwarded."""
calls = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON)
context = Context()
await async_reproduce_states(hass, [State(ENTITY_1, "on")], context=context)
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data == {"entity_id": ENTITY_1}
assert calls[0].context == context
async def test_attribute_no_state(hass):
"""Test that no state service call is made with none state."""
calls_1 = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON)
calls_2 = async_mock_service(hass, DOMAIN, SERVICE_TURN_OFF)
calls_3 = async_mock_service(hass, DOMAIN, SERVICE_SELECT_SOUND_MODE)
value = "dummy"
await async_reproduce_states(
hass, [State(ENTITY_1, None, {ATTR_SOUND_MODE: value})]
)
await hass.async_block_till_done()
assert len(calls_1) == 0
assert len(calls_2) == 0
assert len(calls_3) == 1
assert calls_3[0].data == {"entity_id": ENTITY_1, ATTR_SOUND_MODE: value}
@pytest.mark.parametrize(
"service,attribute",
[
(SERVICE_VOLUME_SET, ATTR_MEDIA_VOLUME_LEVEL),
(SERVICE_VOLUME_MUTE, ATTR_MEDIA_VOLUME_MUTED),
(SERVICE_SELECT_SOURCE, ATTR_INPUT_SOURCE),
(SERVICE_SELECT_SOUND_MODE, ATTR_SOUND_MODE),
],
)
async def test_attribute(hass, service, attribute):
"""Test that service call is made for each attribute."""
calls_1 = async_mock_service(hass, DOMAIN, service)
value = "dummy"
await async_reproduce_states(hass, [State(ENTITY_1, None, {attribute: value})])
await hass.async_block_till_done()
assert len(calls_1) == 1
assert calls_1[0].data == {"entity_id": ENTITY_1, attribute: value}
async def test_play_media(hass):
"""Test that no state service call is made with none state."""
calls_1 = async_mock_service(hass, DOMAIN, SERVICE_PLAY_MEDIA)
value_1 = "dummy_1"
value_2 = "dummy_2"
value_3 = "dummy_3"
await async_reproduce_states(
hass,
[
State(
ENTITY_1,
None,
{ATTR_MEDIA_CONTENT_TYPE: value_1, ATTR_MEDIA_CONTENT_ID: value_2},
)
],
)
await async_reproduce_states(
hass,
[
State(
ENTITY_1,
None,
{
ATTR_MEDIA_CONTENT_TYPE: value_1,
ATTR_MEDIA_CONTENT_ID: value_2,
ATTR_MEDIA_ENQUEUE: value_3,
},
)
],
)
await hass.async_block_till_done()
assert len(calls_1) == 2
assert calls_1[0].data == {
"entity_id": ENTITY_1,
ATTR_MEDIA_CONTENT_TYPE: value_1,
ATTR_MEDIA_CONTENT_ID: value_2,
}
assert calls_1[1].data == {
"entity_id": ENTITY_1,
ATTR_MEDIA_CONTENT_TYPE: value_1,
ATTR_MEDIA_CONTENT_ID: value_2,
ATTR_MEDIA_ENQUEUE: value_3,
}
|
from django.core import checks
from django.db import models
from django.db.models.aggregates import Sum
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from shop.conf import app_settings
from shop.models.product import Availability, BaseReserveProductMixin
from shop.exceptions import ProductNotAvailable
class AvailableProductMixin:
"""
Add this mixin class to the product models declaration, wanting to keep track on the
current amount of products in stock. In comparison to
:class:`shop.models.product.ReserveProductMixin`, this mixin does not reserve items in pending
carts, with the risk for overselling. It thus is suited for products kept in the cart
for a long period.
The product class must implement a field named ``quantity`` accepting numerical values.
"""
def get_availability(self, request, **kwargs):
"""
Returns the current available quantity for this product.
If other customers have pending carts containing this same product, the quantity
is not not adjusted. This may result in a situation, where someone adds a product
to the cart, but then is unable to purchase, because someone else bought it in the
meantime.
is not adjusted. This may result in a situation, where someone adds a product to the cart,
but then is unable to purchase, because someone else bought it in the meantime.
"""
def create_availability(**kwargs):
quantity = inventory_set.aggregate(sum=Sum('quantity'))['sum']
inventory = inventory_set.order_by('earliest').first()
earliest = inventory.earliest
latest = inventory_set.order_by('latest').last().latest
if latest < now + app_settings.SHOP_LIMITED_OFFER_PERIOD:
kwargs['limited_offer'] = True
return Availability(quantity=quantity, earliest=earliest, latest=latest, **kwargs)
now = timezone.now()
inventory_set = self.inventory_set.filter(earliest__lt=now, latest__gt=now, quantity__gt=0)
if inventory_set.exists():
return create_availability()
# check, if we can sell short
later = now + app_settings.SHOP_SELL_SHORT_PERIOD
inventory_set = self.inventory_set.filter(earliest__lt=later, latest__gt=now, quantity__gt=0)
if inventory_set.exists():
return create_availability(sell_short=True)
return Availability(quantity=0)
def deduct_from_stock(self, quantity, **kwargs):
"""
Deduce requested quantity from all available inventories.
"""
later = timezone.now() + app_settings.SHOP_SELL_SHORT_PERIOD
for inventory in self.inventory_set.filter(earliest__lt=later, quantity__gt=0).order_by('earliest', 'latest'):
reduce_by = min(inventory.quantity, quantity)
inventory.quantity -= reduce_by
inventory.save(update_fields=['quantity'])
quantity -= reduce_by
if quantity == 0:
break
else:
raise ProductNotAvailable(self)
def managed_availability(self):
return True
@classmethod
def check(cls, **kwargs):
errors = super().check(**kwargs)
for rel in cls._meta.related_objects:
if rel.name == 'inventory_set':
if rel.get_internal_type() != 'ForeignKey':
msg = "Field `product` in class inheriting from `BaseInventory` is not a valid foreign key pointing onto {}."
errors.append(checks.Error(msg.format(cls.__name__)))
break
else:
msg = "A model inheriting from `BaseInventory` must implement a foreign key `product` pointing onto {}."
errors.append(checks.Error(msg.format(cls.__name__)))
return errors
class ReserveProductMixin(BaseReserveProductMixin, AvailableProductMixin):
"""
Add this mixin class to the product models declaration, wanting to keep track on the
current amount of products in stock. In comparison to
:class:`shop.models.product.AvailableProductMixin`, this mixin reserves items in pending
carts, without the risk for overselling. On the other hand, the shop may run out of sellable
items, if customers keep products in the cart for a long period, without proceeding to checkout.
Use this mixin for products kept for a short period until checking out the cart, for
instance for ticket sales. Ensure that pending carts are flushed regularly.
"""
class BaseInventory(models.Model):
"""
This is a holder for the quantity of products items in stock.
It also keeps track of the period, during which that product is available.
The class implementing this abstract base class, must add a field named 'quantity'
of type IntegerField, DecimalField or FloatField.
"""
earliest = models.DateTimeField(
_("Available after"),
default=timezone.datetime.min.replace(tzinfo=timezone.get_current_timezone()),
db_index=True,
)
latest = models.DateTimeField(
_("Available before"),
default=timezone.datetime.max.replace(tzinfo=timezone.get_current_timezone()),
db_index=True,
)
class Meta:
abstract = True
verbose_name = _("Product Inventory")
verbose_name_plural = _("Product Inventories")
@classmethod
def check(cls, **kwargs):
from shop.models.cart import CartItemModel
errors = super().check(**kwargs)
for cart_field in CartItemModel._meta.fields:
if cart_field.attname == 'quantity':
break
else:
msg = "Class `{}` must implement a field named `quantity`."
errors.append(checks.Error(msg.format(CartItemModel.__name__)))
for field in cls._meta.fields:
if field.attname == 'quantity':
if field.get_internal_type() != cart_field.get_internal_type():
msg = "Field `{}.quantity` must be of same type as `{}.quantity`."
errors.append(checks.Error(msg.format(cls.__name__, CartItemModel.__name__)))
break
else:
msg = "Class `{}` must implement a field named `quantity`."
errors.append(checks.Error(msg.format(cls.__name__)))
for field in cls._meta.fields:
if field.attname == 'product_id':
if field.get_internal_type() == 'ForeignKey':
if field.related_query_name() != 'inventory_set':
msg = "Class `{}.product` must have a related_name 'inventory_set'."
errors.append(checks.Error(msg.format(cls.__name__)))
break
msg = "Class `{}.product` must be a foreign key pointing onto a Product model or variation of thereof."
errors.append(checks.Error(msg.format(cls.__name__)))
else:
msg = "Class `{}` must implement a foreign key pointing onto a Product model or variation of thereof."
errors.append(checks.Error(msg.format(cls.__name__)))
return errors
|
from mock import sentinel, create_autospec, patch, call, Mock
from pymongo.collection import Collection
from arctic import Arctic
from arctic.arctic import ArcticLibraryBinding
from arctic.store.bson_store import BSONStore
def test_enable_sharding():
arctic_lib = create_autospec(ArcticLibraryBinding)
arctic_lib.arctic = create_autospec(Arctic)
with patch('arctic.store.bson_store.enable_sharding', autospec=True) as enable_sharding:
arctic_lib.get_top_level_collection.return_value.database.create_collection.__name__ = 'some_name'
arctic_lib.get_top_level_collection.return_value.database.collection_names.__name__ = 'some_name'
bsons = BSONStore(arctic_lib)
bsons.enable_sharding()
# Check we always set the sharding to be hashed.
assert enable_sharding.call_args_list == [call(arctic_lib.arctic, arctic_lib.get_name(), hashed=True, key='_id')]
def test_find():
arctic_lib = create_autospec(ArcticLibraryBinding, instance=True)
collection = create_autospec(Collection, instance=True)
collection.find.return_value = (doc for doc in [sentinel.document])
arctic_lib.get_top_level_collection.return_value = collection
bsons = BSONStore(arctic_lib)
assert list(bsons.find(sentinel.filter)) == [sentinel.document]
assert collection.find.call_count == 1
assert collection.find.call_args_list == [call(sentinel.filter)]
def test_find_one():
arctic_lib = create_autospec(ArcticLibraryBinding, instance=True)
collection = create_autospec(Collection, instance=True)
collection.find_one.return_value = sentinel.document
arctic_lib.get_top_level_collection.return_value = collection
ms = BSONStore(arctic_lib)
assert ms.find_one(sentinel.filter) == sentinel.document
assert collection.find_one.call_count == 1
assert collection.find_one.call_args_list == [call(sentinel.filter)]
def test_insert_one():
arctic_lib = create_autospec(ArcticLibraryBinding, instance=True)
collection = create_autospec(Collection, instance=True)
arctic_lib.get_top_level_collection.return_value = collection
bsons = BSONStore(arctic_lib)
bsons.insert_one(sentinel.document)
assert arctic_lib.check_quota.call_count == 1
assert collection.insert_one.call_count == 1
assert collection.insert_one.call_args_list == [call(sentinel.document)]
def test_insert_many():
arctic_lib = create_autospec(ArcticLibraryBinding, instance=True)
collection = create_autospec(Collection, instance=True)
arctic_lib.get_top_level_collection.return_value = collection
bsons = BSONStore(arctic_lib)
bsons.insert_many(sentinel.documents)
assert arctic_lib.check_quota.call_count == 1
assert collection.insert_many.call_count == 1
assert collection.insert_many.call_args_list == [call(sentinel.documents)]
def test_replace_one():
arctic_lib = create_autospec(ArcticLibraryBinding, instance=True)
collection = create_autospec(Collection, instance=True)
arctic_lib.get_top_level_collection.return_value = collection
bsons = BSONStore(arctic_lib)
bsons.replace_one(sentinel.filter, sentinel.replacement)
assert arctic_lib.check_quota.call_count == 1
assert collection.replace_one.call_count == 1
assert collection.replace_one.call_args_list == [call(sentinel.filter, sentinel.replacement)]
def test_update_one():
arctic_lib = create_autospec(ArcticLibraryBinding, instance=True)
collection = create_autospec(Collection, instance=True)
arctic_lib.get_top_level_collection.return_value = collection
bsons = BSONStore(arctic_lib)
bsons.update_one(sentinel.filter, sentinel.replacement)
assert arctic_lib.check_quota.call_count == 1
assert collection.update_one.call_count == 1
assert collection.update_one.call_args_list == [call(sentinel.filter, sentinel.replacement)]
def test_update_many():
arctic_lib = create_autospec(ArcticLibraryBinding, instance=True)
collection = create_autospec(Collection, instance=True)
arctic_lib.get_top_level_collection.return_value = collection
bsons = BSONStore(arctic_lib)
bsons.update_many(sentinel.filter, sentinel.replacements)
assert arctic_lib.check_quota.call_count == 1
assert collection.update_many.call_count == 1
assert collection.update_many.call_args_list == [call(sentinel.filter, sentinel.replacements)]
def test_find_one_and_replace():
arctic_lib = create_autospec(ArcticLibraryBinding, instance=True)
collection = create_autospec(Collection, instance=True)
arctic_lib.get_top_level_collection.return_value = collection
bsons = BSONStore(arctic_lib)
bsons.find_one_and_replace(sentinel.filter, sentinel.replacement)
assert arctic_lib.check_quota.call_count == 1
assert collection.find_one_and_replace.call_count == 1
assert collection.find_one_and_replace.call_args_list == [call(sentinel.filter, sentinel.replacement)]
def test_find_one_and_update():
arctic_lib = create_autospec(ArcticLibraryBinding, instance=True)
collection = create_autospec(Collection, instance=True)
arctic_lib.get_top_level_collection.return_value = collection
ms = BSONStore(arctic_lib)
ms.find_one_and_update(sentinel.filter, sentinel.update)
assert arctic_lib.check_quota.call_count == 1
assert collection.find_one_and_update.call_count == 1
assert collection.find_one_and_update.call_args_list == [call(sentinel.filter, sentinel.update)]
def test_find_one_and_delete():
arctic_lib = create_autospec(ArcticLibraryBinding, instance=True)
collection = create_autospec(Collection, instance=True)
arctic_lib.get_top_level_collection.return_value = collection
ms = BSONStore(arctic_lib)
ms.find_one_and_delete(sentinel.filter)
assert collection.find_one_and_delete.call_count == 1
assert collection.find_one_and_delete.call_args_list == [call(sentinel.filter)]
def test_bulk_write():
arctic_lib = create_autospec(ArcticLibraryBinding, instance=True)
collection = create_autospec(Collection, instance=True)
arctic_lib.get_top_level_collection.return_value = collection
bsons = BSONStore(arctic_lib)
bsons.bulk_write(sentinel.requests)
assert arctic_lib.check_quota.call_count == 1
assert collection.bulk_write.call_count == 1
assert collection.bulk_write.call_args_list == [call(sentinel.requests)]
def test_delete_one():
arctic_lib = create_autospec(ArcticLibraryBinding, instance=True)
collection = create_autospec(Collection, instance=True)
arctic_lib.get_top_level_collection.return_value = collection
bsons = BSONStore(arctic_lib)
bsons.delete_one(sentinel.filter)
assert collection.delete_one.call_count == 1
assert collection.delete_one.call_args_list == [call(sentinel.filter)]
def test_count():
arctic_lib = create_autospec(ArcticLibraryBinding, instance=True)
collection = create_autospec(Collection, instance=True, count=Mock(), count_documents=Mock())
arctic_lib.get_top_level_collection.return_value = collection
bsons = BSONStore(arctic_lib)
bsons.count(sentinel.filter)
assert collection.count.call_count + collection.count_documents.call_count == 1
assert collection.count.call_args_list == [call(filter=sentinel.filter)] or collection.count_documents.call_args_list == [call(filter=sentinel.filter)]
def test_distinct():
arctic_lib = create_autospec(ArcticLibraryBinding, instance=True)
collection = create_autospec(Collection, instance=True)
arctic_lib.get_top_level_collection.return_value = collection
bsons = BSONStore(arctic_lib)
bsons.distinct(sentinel.key)
assert collection.distinct.call_count == 1
assert collection.distinct.call_args_list == [call(sentinel.key)]
def test_delete_many():
arctic_lib = create_autospec(ArcticLibraryBinding, instance=True)
collection = create_autospec(Collection, instance=True)
arctic_lib.get_top_level_collection.return_value = collection
bsons = BSONStore(arctic_lib)
bsons.delete_many(sentinel.filter)
assert collection.delete_many.call_count == 1
assert collection.delete_many.call_args_list == [call(sentinel.filter)]
def test_create_index():
arctic_lib = create_autospec(ArcticLibraryBinding, instance=True)
collection = create_autospec(Collection, instance=True)
arctic_lib.get_top_level_collection.return_value = collection
bsons = BSONStore(arctic_lib)
bsons.create_index([(sentinel.path1, sentinel.order1), (sentinel.path2, sentinel.path2)])
assert collection.create_index.call_count == 1
assert collection.create_index.call_args_list == [call([(sentinel.path1, sentinel.order1), (sentinel.path2, sentinel.path2)])]
def test_drop_index():
arctic_lib = create_autospec(ArcticLibraryBinding, instance=True)
collection = create_autospec(Collection, instance=True)
arctic_lib.get_top_level_collection.return_value = collection
bsons = BSONStore(arctic_lib)
bsons.drop_index(sentinel.name)
assert collection.drop_index.call_count == 1
assert collection.drop_index.call_args_list == [call(sentinel.name)]
def test_index_information():
arctic_lib = create_autospec(ArcticLibraryBinding, instance=True)
collection = create_autospec(Collection, instance=True)
arctic_lib.get_top_level_collection.return_value = collection
bsons = BSONStore(arctic_lib)
bsons.index_information()
assert collection.index_information.call_count == 1
|
from cryptography import x509
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
from .vectors import SAN_CERT, WILDCARD_CERT, INTERMEDIATE_CERT
def test_cert_get_cn(client):
from lemur.common.defaults import common_name
assert common_name(SAN_CERT) == "san.example.org"
def test_cert_sub_alt_domains(client):
from lemur.common.defaults import domains
assert domains(INTERMEDIATE_CERT) == []
assert domains(SAN_CERT) == [
"san.example.org",
"san2.example.org",
"daniel-san.example.org",
]
def test_cert_is_san(client):
from lemur.common.defaults import san
assert san(SAN_CERT)
# Wildcard cert has just one SAN record that matches the common name
assert not san(WILDCARD_CERT)
def test_cert_is_wildcard(client):
from lemur.common.defaults import is_wildcard
assert is_wildcard(WILDCARD_CERT)
assert not is_wildcard(INTERMEDIATE_CERT)
def test_cert_bitstrength(client):
from lemur.common.defaults import bitstrength
assert bitstrength(INTERMEDIATE_CERT) == 2048
def test_cert_issuer(client):
from lemur.common.defaults import issuer
assert issuer(INTERMEDIATE_CERT) == "LemurTrustUnittestsRootCA2018"
def test_text_to_slug(client):
from lemur.common.defaults import text_to_slug
assert text_to_slug("test - string") == "test-string"
assert text_to_slug("test - string", "") == "teststring"
# Accented characters are decomposed
assert text_to_slug("föö bär") == "foo-bar"
# Melt away the Unicode Snowman
assert text_to_slug("\u2603") == ""
assert text_to_slug("\u2603test\u2603") == "test"
assert text_to_slug("snow\u2603man") == "snow-man"
assert text_to_slug("snow\u2603man", "") == "snowman"
# IDNA-encoded domain names should be kept as-is
assert (
text_to_slug("xn--i1b6eqas.xn--xmpl-loa9b3671b.com")
== "xn--i1b6eqas.xn--xmpl-loa9b3671b.com"
)
def test_create_name(client):
from lemur.common.defaults import certificate_name
from datetime import datetime
assert (
certificate_name(
"example.com",
"Example Inc,",
datetime(2015, 5, 7, 0, 0, 0),
datetime(2015, 5, 12, 0, 0, 0),
False,
)
== "example.com-ExampleInc-20150507-20150512"
)
assert (
certificate_name(
"example.com",
"Example Inc,",
datetime(2015, 5, 7, 0, 0, 0),
datetime(2015, 5, 12, 0, 0, 0),
True,
)
== "SAN-example.com-ExampleInc-20150507-20150512"
)
assert (
certificate_name(
"xn--mnchen-3ya.de",
"Vertrauenswürdig Autorität",
datetime(2015, 5, 7, 0, 0, 0),
datetime(2015, 5, 12, 0, 0, 0),
False,
)
== "xn--mnchen-3ya.de-VertrauenswurdigAutoritat-20150507-20150512"
)
assert (
certificate_name(
"selfie.example.org",
"<selfsigned>",
datetime(2015, 5, 7, 0, 0, 0),
datetime(2025, 5, 12, 13, 37, 0),
False,
)
== "selfie.example.org-selfsigned-20150507-20250512"
)
def test_issuer(client, cert_builder, issuer_private_key):
from lemur.common.defaults import issuer
assert issuer(INTERMEDIATE_CERT) == "LemurTrustUnittestsRootCA2018"
# We need to override builder's issuer name
cert_builder._issuer_name = None
# Unicode issuer name
cert = cert_builder.issuer_name(
x509.Name(
[x509.NameAttribute(x509.NameOID.COMMON_NAME, "Vertrauenswürdig Autorität")]
)
).sign(issuer_private_key, hashes.SHA256(), default_backend())
assert issuer(cert) == "VertrauenswurdigAutoritat"
# Fallback to 'Organization' field when issuer CN is missing
cert = cert_builder.issuer_name(
x509.Name(
[x509.NameAttribute(x509.NameOID.ORGANIZATION_NAME, "No Such Organization")]
)
).sign(issuer_private_key, hashes.SHA256(), default_backend())
assert issuer(cert) == "NoSuchOrganization"
# Missing issuer name
cert = cert_builder.issuer_name(x509.Name([])).sign(
issuer_private_key, hashes.SHA256(), default_backend()
)
assert issuer(cert) == "<unknown>"
def test_issuer_selfsigned(selfsigned_cert):
from lemur.common.defaults import issuer
assert issuer(selfsigned_cert) == "<selfsigned>"
|
import warnings
from .dataarray import DataArray
from .dataset import Dataset
class AccessorRegistrationWarning(Warning):
"""Warning for conflicts in accessor registration."""
class _CachedAccessor:
"""Custom property-like object (descriptor) for caching accessors."""
def __init__(self, name, accessor):
self._name = name
self._accessor = accessor
def __get__(self, obj, cls):
if obj is None:
# we're accessing the attribute of the class, i.e., Dataset.geo
return self._accessor
# Use the same dict as @pandas.util.cache_readonly.
# It must be explicitly declared in obj.__slots__.
try:
cache = obj._cache
except AttributeError:
cache = obj._cache = {}
try:
return cache[self._name]
except KeyError:
pass
try:
accessor_obj = self._accessor(obj)
except AttributeError:
# __getattr__ on data object will swallow any AttributeErrors
# raised when initializing the accessor, so we need to raise as
# something else (GH933):
raise RuntimeError("error initializing %r accessor." % self._name)
cache[self._name] = accessor_obj
return accessor_obj
def _register_accessor(name, cls):
def decorator(accessor):
if hasattr(cls, name):
warnings.warn(
"registration of accessor %r under name %r for type %r is "
"overriding a preexisting attribute with the same name."
% (accessor, name, cls),
AccessorRegistrationWarning,
stacklevel=2,
)
setattr(cls, name, _CachedAccessor(name, accessor))
return accessor
return decorator
def register_dataarray_accessor(name):
"""Register a custom accessor on xarray.DataArray objects.
Parameters
----------
name : str
Name under which the accessor should be registered. A warning is issued
if this name conflicts with a preexisting attribute.
See also
--------
register_dataset_accessor
"""
return _register_accessor(name, DataArray)
def register_dataset_accessor(name):
"""Register a custom property on xarray.Dataset objects.
Parameters
----------
name : str
Name under which the accessor should be registered. A warning is issued
if this name conflicts with a preexisting attribute.
Examples
--------
In your library code:
>>> @xr.register_dataset_accessor("geo")
... class GeoAccessor:
... def __init__(self, xarray_obj):
... self._obj = xarray_obj
...
... @property
... def center(self):
... # return the geographic center point of this dataset
... lon = self._obj.latitude
... lat = self._obj.longitude
... return (float(lon.mean()), float(lat.mean()))
...
... def plot(self):
... # plot this array's data on a map, e.g., using Cartopy
... pass
Back in an interactive IPython session:
>>> ds = xr.Dataset(
... {"longitude": np.linspace(0, 10), "latitude": np.linspace(0, 20)}
... )
>>> ds.geo.center
(10.0, 5.0)
>>> ds.geo.plot() # plots data on a map
See also
--------
register_dataarray_accessor
"""
return _register_accessor(name, Dataset)
|
from django.core.cache import cache
from django.core.exceptions import ObjectDoesNotExist
from django.http import Http404, HttpResponse
from django.urls import reverse
from django.utils.safestring import mark_safe
from django.utils.translation import gettext_lazy as _
from weblate.checks.base import TargetCheckParametrized
from weblate.checks.parser import multi_value_flag
from weblate.fonts.utils import check_render_size
FONT_PARAMS = (
("font-family", "sans"),
("font-weight", None),
("font-size", 10),
("font-spacing", 0),
)
IMAGE = '<a href="{0}" class="thumbnail"><img class="img-responsive" src="{0}" /></a>'
class MaxSizeCheck(TargetCheckParametrized):
"""Check for maximum size of rendered text."""
check_id = "max-size"
name = _("Maximum size of translation")
description = _("Translation rendered text should not exceed given size")
default_disabled = True
last_font = None
always_display = True
@property
def param_type(self):
return multi_value_flag(int, 1, 2)
def get_params(self, unit):
for name, default in FONT_PARAMS:
if unit.all_flags.has_value(name):
try:
yield unit.all_flags.get_value(name)
except KeyError:
yield default
else:
yield default
def load_font(self, project, language, name):
try:
group = project.fontgroup_set.get(name=name)
except ObjectDoesNotExist:
return "sans"
try:
override = group.fontoverride_set.get(language=language)
return f"{override.font.family} {override.font.style}"
except ObjectDoesNotExist:
return f"{group.font.family} {group.font.style}"
def check_target_params(self, sources, targets, unit, value):
if len(value) == 2:
width, lines = value
else:
width = value[0]
lines = 1
font_group, weight, size, spacing = self.get_params(unit)
font = self.last_font = self.load_font(
unit.translation.component.project, unit.translation.language, font_group
)
replace = self.get_replacement_function(unit)
return any(
(
not check_render_size(
font,
weight,
size,
spacing,
replace(target),
width,
lines,
self.get_cache_key(unit, i),
)
for i, target in enumerate(targets)
)
)
def get_description(self, check_obj):
url = reverse(
"render-check",
kwargs={"check_id": self.check_id, "unit_id": check_obj.unit_id},
)
return mark_safe(
"\n".join(
IMAGE.format(f"{url}?pos={i}")
for i in range(len(check_obj.unit.get_target_plurals()))
)
)
def render(self, request, unit):
try:
pos = int(request.GET.get("pos", "0"))
except ValueError:
pos = 0
key = self.get_cache_key(unit, pos)
result = cache.get(key)
if result is None:
self.check_target_unit(
unit.get_source_plurals(), unit.get_target_plurals(), unit
)
result = cache.get(key)
if result is None:
raise Http404("Invalid check")
response = HttpResponse(content_type="image/png")
response.write(result)
return response
|
from typing import Dict, List
import voluptuous as vol
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_CONDITION,
CONF_DEVICE_ID,
CONF_DOMAIN,
CONF_ENTITY_ID,
CONF_TYPE,
STATE_HOME,
STATE_NOT_HOME,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import condition, config_validation as cv, entity_registry
from homeassistant.helpers.config_validation import DEVICE_CONDITION_BASE_SCHEMA
from homeassistant.helpers.typing import ConfigType, TemplateVarsType
from . import DOMAIN
CONDITION_TYPES = {"is_home", "is_not_home"}
CONDITION_SCHEMA = DEVICE_CONDITION_BASE_SCHEMA.extend(
{
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Required(CONF_TYPE): vol.In(CONDITION_TYPES),
}
)
async def async_get_conditions(
hass: HomeAssistant, device_id: str
) -> List[Dict[str, str]]:
"""List device conditions for Device tracker devices."""
registry = await entity_registry.async_get_registry(hass)
conditions = []
# Get all the integrations entities for this device
for entry in entity_registry.async_entries_for_device(registry, device_id):
if entry.domain != DOMAIN:
continue
# Add conditions for each entity that belongs to this integration
conditions.append(
{
CONF_CONDITION: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "is_home",
}
)
conditions.append(
{
CONF_CONDITION: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "is_not_home",
}
)
return conditions
@callback
def async_condition_from_config(
config: ConfigType, config_validation: bool
) -> condition.ConditionCheckerType:
"""Create a function to test a device condition."""
if config_validation:
config = CONDITION_SCHEMA(config)
if config[CONF_TYPE] == "is_home":
state = STATE_HOME
else:
state = STATE_NOT_HOME
@callback
def test_is_state(hass: HomeAssistant, variables: TemplateVarsType) -> bool:
"""Test if an entity is a certain state."""
return condition.state(hass, config[ATTR_ENTITY_ID], state)
return test_is_state
|
from datetime import datetime
import pytest
from homeassistant.components.zodiac.const import (
ATTR_ELEMENT,
ATTR_MODALITY,
DOMAIN,
ELEMENT_EARTH,
ELEMENT_FIRE,
ELEMENT_WATER,
MODALITY_CARDINAL,
MODALITY_FIXED,
SIGN_ARIES,
SIGN_SCORPIO,
SIGN_TAURUS,
)
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.async_mock import patch
DAY1 = datetime(2020, 11, 15, tzinfo=dt_util.UTC)
DAY2 = datetime(2020, 4, 20, tzinfo=dt_util.UTC)
DAY3 = datetime(2020, 4, 21, tzinfo=dt_util.UTC)
@pytest.mark.parametrize(
"now,sign,element,modality",
[
(DAY1, SIGN_SCORPIO, ELEMENT_WATER, MODALITY_FIXED),
(DAY2, SIGN_ARIES, ELEMENT_FIRE, MODALITY_CARDINAL),
(DAY3, SIGN_TAURUS, ELEMENT_EARTH, MODALITY_FIXED),
],
)
async def test_zodiac_day(hass, now, sign, element, modality):
"""Test the zodiac sensor."""
config = {DOMAIN: {}}
with patch("homeassistant.components.zodiac.sensor.utcnow", return_value=now):
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
state = hass.states.get("sensor.zodiac")
assert state
assert state.state == sign
assert state.attributes
assert state.attributes[ATTR_ELEMENT] == element
assert state.attributes[ATTR_MODALITY] == modality
|
import os
import pytest
from molecule import config
from molecule.driver import gce
# NOTE(retr0h): The use of the `patched_config_validate` fixture, disables
# config.Config._validate from executing. Thus preventing odd side-effects
# throughout patched.assert_called unit tests.
@pytest.fixture
def _instance(patched_config_validate, config_instance):
return gce.GCE(config_instance)
def test_config_private_member(_instance):
assert isinstance(_instance._config, config.Config)
def test_testinfra_options_property(_instance):
assert {
'connection': 'ansible',
'ansible-inventory': _instance._config.provisioner.inventory_file
} == _instance.testinfra_options
def test_name_property(_instance):
assert 'gce' == _instance.name
def test_options_property(_instance):
x = {'managed': True}
assert x == _instance.options
def test_login_cmd_template_property(_instance):
x = ('ssh {address} -l {user} -p {port} -i {identity_file} '
'-o UserKnownHostsFile=/dev/null '
'-o ControlMaster=auto '
'-o ControlPersist=60s '
'-o IdentitiesOnly=yes '
'-o StrictHostKeyChecking=no')
assert x == _instance.login_cmd_template
def test_safe_files_property(_instance):
x = [
os.path.join(_instance._config.scenario.ephemeral_directory,
'instance_config.yml'),
]
assert x == _instance.safe_files
def test_default_safe_files_property(_instance):
x = [
os.path.join(_instance._config.scenario.ephemeral_directory,
'instance_config.yml'),
]
assert x == _instance.default_safe_files
def test_delegated_property(_instance):
assert not _instance.delegated
def test_managed_property(_instance):
assert _instance.managed
def test_default_ssh_connection_options_property(_instance):
x = [
'-o UserKnownHostsFile=/dev/null',
'-o ControlMaster=auto',
'-o ControlPersist=60s',
'-o IdentitiesOnly=yes',
'-o StrictHostKeyChecking=no',
]
assert x == _instance.default_ssh_connection_options
def test_login_options(mocker, _instance):
m = mocker.patch('molecule.driver.gce.GCE._get_instance_config')
m.return_value = {
'instance': 'foo',
'address': '172.16.0.2',
'user': 'cloud-user',
'port': 22,
'identity_file': '/foo/bar',
}
x = {
'instance': 'foo',
'address': '172.16.0.2',
'user': 'cloud-user',
'port': 22,
'identity_file': '/foo/bar',
}
assert x == _instance.login_options('foo')
def test_ansible_connection_options(mocker, _instance):
m = mocker.patch('molecule.driver.gce.GCE._get_instance_config')
m.return_value = {
'instance': 'foo',
'address': '172.16.0.2',
'user': 'cloud-user',
'port': 22,
'identity_file': '/foo/bar',
}
x = {
'ansible_host':
'172.16.0.2',
'ansible_port':
22,
'ansible_user':
'cloud-user',
'ansible_private_key_file':
'/foo/bar',
'connection':
'ssh',
'ansible_ssh_common_args': ('-o UserKnownHostsFile=/dev/null '
'-o ControlMaster=auto '
'-o ControlPersist=60s '
'-o IdentitiesOnly=yes '
'-o StrictHostKeyChecking=no'),
}
assert x == _instance.ansible_connection_options('foo')
def test_ansible_connection_options_handles_missing_instance_config(
mocker, _instance):
m = mocker.patch('molecule.util.safe_load_file')
m.side_effect = IOError
assert {} == _instance.ansible_connection_options('foo')
def test_ansible_connection_options_handles_missing_results_key(
mocker, _instance):
m = mocker.patch('molecule.util.safe_load_file')
m.side_effect = StopIteration
assert {} == _instance.ansible_connection_options('foo')
def test_instance_config_property(_instance):
x = os.path.join(_instance._config.scenario.ephemeral_directory,
'instance_config.yml')
assert x == _instance.instance_config
def test_ssh_connection_options_property(_instance):
x = [
'-o UserKnownHostsFile=/dev/null',
'-o ControlMaster=auto',
'-o ControlPersist=60s',
'-o IdentitiesOnly=yes',
'-o StrictHostKeyChecking=no',
]
assert x == _instance.ssh_connection_options
def test_status(mocker, _instance):
result = _instance.status()
assert 2 == len(result)
assert result[0].instance_name == 'instance-1'
assert result[0].driver_name == 'gce'
assert result[0].provisioner_name == 'ansible'
assert result[0].scenario_name == 'default'
assert result[0].created == 'false'
assert result[0].converged == 'false'
assert result[1].instance_name == 'instance-2'
assert result[1].driver_name == 'gce'
assert result[1].provisioner_name == 'ansible'
assert result[1].scenario_name == 'default'
assert result[1].created == 'false'
assert result[1].converged == 'false'
def test_get_instance_config(mocker, _instance):
m = mocker.patch('molecule.util.safe_load_file')
m.return_value = [{
'instance': 'foo',
}, {
'instance': 'bar',
}]
x = {
'instance': 'foo',
}
assert x == _instance._get_instance_config('foo')
def test_created(_instance):
assert 'false' == _instance._created()
def test_converged(_instance):
assert 'false' == _instance._converged()
|
def parse_config(fname):
"""Parse a config file (like .ave and .cov files).
Parameters
----------
fname : str
Config file name.
Returns
-------
conditions : list of dict
Each condition is indexed by the event type.
A condition contains as keys::
tmin, tmax, name, grad_reject, mag_reject,
eeg_reject, eog_reject
"""
reject_params = read_reject_parameters(fname)
with open(fname, 'r') as f:
lines = f.readlines()
cat_ind = [i for i, x in enumerate(lines) if "category {" in x]
event_dict = dict()
for ind in cat_ind:
for k in range(ind + 1, ind + 7):
words = lines[k].split()
if len(words) >= 2:
key = words[0]
if key == 'event':
event = int(words[1])
break
else:
raise ValueError('Could not find event id.')
event_dict[event] = dict(**reject_params)
for k in range(ind + 1, ind + 7):
words = lines[k].split()
if len(words) >= 2:
key = words[0]
if key == 'name':
name = ' '.join(words[1:])
if name[0] == '"':
name = name[1:]
if name[-1] == '"':
name = name[:-1]
event_dict[event]['name'] = name
if key in ['tmin', 'tmax', 'basemin', 'basemax']:
event_dict[event][key] = float(words[1])
return event_dict
def read_reject_parameters(fname):
"""Read rejection parameters from .cov or .ave config file.
Parameters
----------
fname : str
Filename to read.
Returns
-------
params : dict
The rejection parameters.
"""
with open(fname, 'r') as f:
lines = f.readlines()
reject_names = ['gradReject', 'magReject', 'eegReject', 'eogReject',
'ecgReject']
reject_pynames = ['grad', 'mag', 'eeg', 'eog', 'ecg']
reject = dict()
for line in lines:
words = line.split()
if words[0] in reject_names:
reject[reject_pynames[reject_names.index(words[0])]] = \
float(words[1])
return reject
def read_flat_parameters(fname):
"""Read flat channel rejection parameters from .cov or .ave config file."""
with open(fname, 'r') as f:
lines = f.readlines()
reject_names = ['gradFlat', 'magFlat', 'eegFlat', 'eogFlat', 'ecgFlat']
reject_pynames = ['grad', 'mag', 'eeg', 'eog', 'ecg']
flat = dict()
for line in lines:
words = line.split()
if words[0] in reject_names:
flat[reject_pynames[reject_names.index(words[0])]] = \
float(words[1])
return flat
|
import json
from unittest import TestCase
import numpy as np
import pandas as pd
from scattertext import CorpusFromParsedDocuments
from scattertext import whitespace_nlp
from scattertext.CorpusFromPandas import CorpusFromPandas
from scattertext.DocsAndLabelsFromCorpus import DocsAndLabelsFromCorpus, \
DocsAndLabelsFromCorpusSample, CorpusShouldBeParsedCorpusException
from scattertext.test.test_corpusFromPandas import get_docs_categories
from scattertext.test.test_termDocMatrixFactory import build_hamlet_jz_corpus_with_meta
class TestDocsAndLabelsFromCorpus(TestCase):
@classmethod
def setUp(cls):
cls.categories, cls.documents = get_docs_categories()
cls.parsed_docs = []
for doc in cls.documents:
cls.parsed_docs.append(whitespace_nlp(doc))
cls.df = pd.DataFrame({'category': cls.categories,
'parsed': cls.parsed_docs,
'orig': [d.upper() for d in cls.documents]})
cls.parsed_corpus = CorpusFromParsedDocuments(cls.df, 'category', 'parsed').build()
cls.corpus = CorpusFromPandas(cls.df, 'category', 'orig', nlp=whitespace_nlp).build()
def test_categories(self):
for obj in [DocsAndLabelsFromCorpusSample(self.parsed_corpus, 1), DocsAndLabelsFromCorpus(self.parsed_corpus)]:
output = obj.get_labels_and_texts()
self.assertEqual(output['categories'], ['hamlet', 'jay-z/r. kelly', '???'])
metadata = ['element 0 0', 'element 1 0', 'element 2 0', 'element 3 0',
'element 4 1', 'element 5 1', 'element 6 1',
'element 7 1', 'element 8 1', 'element 9 2']
output = obj.get_labels_and_texts_and_meta(metadata)
self.assertEqual(output['categories'], ['hamlet', 'jay-z/r. kelly', '???'])
def test_main(self):
d = DocsAndLabelsFromCorpus(self.parsed_corpus)
output = d.get_labels_and_texts()
self.assertTrue('texts' in output)
self.assertTrue('labels' in output)
self.assertEqual(self.parsed_corpus._y.astype(int).tolist(),
list(output['labels']))
self.assertEqual(self.parsed_corpus.get_texts().tolist(),
list(output['texts']))
def test_extra_features(self):
corpus = build_hamlet_jz_corpus_with_meta()
d = DocsAndLabelsFromCorpus(corpus).use_non_text_features()
metadata = ['meta%s' % (i) for i in range(corpus.get_num_docs())]
output = d.get_labels_and_texts_and_meta(metadata)
extra_val = [{'cat3': 1, 'cat4': 2}, {'cat4': 2}, {'cat5': 1, 'cat3': 2},
{'cat9': 1, 'cat6': 2}, {'cat3': 1, 'cat4': 2},
{'cat1': 2, 'cat2': 1},
{'cat5': 1, 'cat2': 2},
{'cat3': 2, 'cat4': 1}]
extra_val = [{'cat1': 2}, {'cat1': 2}, {'cat1': 2}, {'cat1': 2}, {'cat1': 2}, {'cat1': 2}, {'cat1': 2}, {'cat1': 2}]
output['labels'] = list(output['labels'])
self.assertEqual(output, {'categories': ['hamlet', 'jay-z/r. kelly'],
'texts': ["what art thou that usurp'st this time of night,",
'together with that fair and warlike form',
'in which the majesty of buried denmark',
'did sometimes march? by heaven i charge thee, speak!', 'halt! who goes there?',
'it is i sire tone from brooklyn.', 'well, speak up man what is it?',
'news from the east sire! the best of both worlds has returned!'],
'meta': ['meta0', 'meta1', 'meta2', 'meta3', 'meta4', 'meta5', 'meta6', 'meta7'],
'labels': [0, 0, 0, 0, 1, 1, 1, 1],
'extra': extra_val}
)
def test_alternative_text_field(self):
DocsAndLabelsFromCorpus(self.corpus)
DocsAndLabelsFromCorpus(self.parsed_corpus)
with self.assertRaises(CorpusShouldBeParsedCorpusException):
DocsAndLabelsFromCorpus(self.corpus, alternative_text_field='orig')
d = DocsAndLabelsFromCorpus(self.parsed_corpus, alternative_text_field='orig')
self.assertEqual(d.get_labels_and_texts()['texts'][0],
d.get_labels_and_texts()['texts'][0].upper())
d = DocsAndLabelsFromCorpus(self.parsed_corpus)
self.assertNotEqual(d.get_labels_and_texts()['texts'][0],
d.get_labels_and_texts()['texts'][0].upper())
d = DocsAndLabelsFromCorpusSample(self.parsed_corpus, 2, alternative_text_field='orig', seed=0)
texts = d.get_labels_and_texts()['texts']
self.assertEqual(texts[0],
texts[0].upper())
d = DocsAndLabelsFromCorpusSample(self.parsed_corpus, 2)
self.assertNotEqual(d.get_labels_and_texts()['texts'][0],
d.get_labels_and_texts()['texts'][0].upper())
def test_metadata(self):
d = DocsAndLabelsFromCorpus(self.parsed_corpus)
metadata = ['element 0 0', 'element 1 0', 'element 2 0', 'element 3 0',
'element 4 1', 'element 5 1', 'element 6 1',
'element 7 1', 'element 8 1', 'element 9 2']
output = d.get_labels_and_texts_and_meta(metadata)
self.assertTrue('texts' in output)
self.assertTrue('labels' in output)
self.assertTrue('meta' in output)
self.assertEqual(output['meta'], metadata)
def test_max_per_category(self):
docs_and_labels = DocsAndLabelsFromCorpusSample(self.parsed_corpus, max_per_category=2, seed=0)
metadata = np.array(['element 0 0', 'element 1 0', 'element 2 0', 'element 3 0',
'element 4 1', 'element 5 1', 'element 6 1',
'element 7 1', 'element 8 1', 'element 9 2'])
output = docs_and_labels.get_labels_and_texts_and_meta(metadata)
self.assertTrue('texts' in output)
self.assertTrue('labels' in output)
self.assertTrue('meta' in output)
self.assertTrue('extra' not in output)
d = {}
for text, lab, meta in zip(output['texts'], output['labels'], output['meta']):
d.setdefault(lab, []).append(text)
for lab, documents in d.items():
self.assertLessEqual(len(documents), 2)
json.dumps(d)
docs_and_labels = DocsAndLabelsFromCorpusSample(self.parsed_corpus, max_per_category=2)
output = docs_and_labels.get_labels_and_texts()
self.assertTrue('texts' in output)
self.assertTrue('labels' in output)
self.assertTrue('meta' not in output)
self.assertTrue('extra' not in output)
d = {}
for text, lab in zip(output['texts'], output['labels']):
d.setdefault(lab, []).append(text)
for lab, documents in d.items():
self.assertLessEqual(len(documents), 2)
json.dumps(d)
docs_and_labels = DocsAndLabelsFromCorpusSample(self.parsed_corpus, max_per_category=2).use_non_text_features()
output = docs_and_labels.get_labels_and_texts()
self.assertTrue('texts' in output)
self.assertTrue('labels' in output)
self.assertTrue('meta' not in output)
self.assertTrue('extra' in output)
d = {}
for text, lab in zip(output['texts'], output['labels']):
d.setdefault(lab, []).append(text)
for lab, documents in d.items():
self.assertLessEqual(len(documents), 2)
json.dumps(d)
|
from lark import Lark
from lark.parsers.earley_forest import TreeForestTransformer, handles_ambiguity, Discard
class CustomTransformer(TreeForestTransformer):
@handles_ambiguity
def sentence(self, trees):
return next(tree for tree in trees if tree.data == 'simple')
def simple(self, children):
children.append('.')
return self.tree_class('simple', children)
def adj(self, children):
raise Discard()
def __default_token__(self, token):
return token.capitalize()
grammar = """
sentence: noun verb noun -> simple
| noun verb "like" noun -> comparative
noun: adj? NOUN
verb: VERB
adj: ADJ
NOUN: "flies" | "bananas" | "fruit"
VERB: "like" | "flies"
ADJ: "fruit"
%import common.WS
%ignore WS
"""
parser = Lark(grammar, start='sentence', ambiguity='forest')
sentence = 'fruit flies like bananas'
forest = parser.parse(sentence)
tree = CustomTransformer(resolve_ambiguity=False).transform(forest)
print(tree.pretty())
# Output:
#
# simple
# noun Flies
# verb Like
# noun Bananas
# .
#
|
import logging
import requests
import voluptuous as vol
from homeassistant.components.notify import PLATFORM_SCHEMA, BaseNotificationService
from homeassistant.const import CONF_API_KEY, CONF_RECIPIENT, HTTP_ACCEPTED, HTTP_OK
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "clickatell"
BASE_API_URL = "https://platform.clickatell.com/messages/http/send"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_API_KEY): cv.string, vol.Required(CONF_RECIPIENT): cv.string}
)
def get_service(hass, config, discovery_info=None):
"""Get the Clickatell notification service."""
return ClickatellNotificationService(config)
class ClickatellNotificationService(BaseNotificationService):
"""Implementation of a notification service for the Clickatell service."""
def __init__(self, config):
"""Initialize the service."""
self.api_key = config[CONF_API_KEY]
self.recipient = config[CONF_RECIPIENT]
def send_message(self, message="", **kwargs):
"""Send a message to a user."""
data = {"apiKey": self.api_key, "to": self.recipient, "content": message}
resp = requests.get(BASE_API_URL, params=data, timeout=5)
if (resp.status_code != HTTP_OK) or (resp.status_code != HTTP_ACCEPTED):
_LOGGER.error("Error %s : %s", resp.status_code, resp.text)
|
import logging
from .module_exceptions import StpdFileError
class Stpd(object):
'''
STPD ammo formatter
'''
def __init__(self, ammo_factory):
self.af = ammo_factory
def __iter__(self):
for timestamp, marker, missile in self.af:
yield b"%s %s %s\n%s\n" % (str(len(missile)).encode('utf8'),
str(timestamp).encode('utf8'),
marker, missile)
class StpdReader(object):
'''Read missiles from stpd file'''
def __init__(self, filename):
self.filename = filename
self.log = logging.getLogger(__name__)
self.log.info("Loading stepped missiles from '%s'" % filename)
def __iter__(self):
def read_chunk_header(ammo_file):
chunk_header = ''
while not chunk_header:
line = ammo_file.readline().decode('utf8')
if not line:
return line # EOF
chunk_header = line.strip('\r\n')
return chunk_header
with open(self.filename, 'rb') as ammo_file:
chunk_header = read_chunk_header(ammo_file)
while chunk_header != '':
try:
fields = chunk_header.split()
chunk_size = int(fields[0])
timestamp = int(fields[1])
marker = fields[2] if len(fields) > 2 else ''
missile = ammo_file.read(chunk_size)
if len(missile) < chunk_size:
raise StpdFileError(
"Unexpected end of file: read %s bytes instead of %s"
% (len(missile), chunk_size))
yield (timestamp, missile, marker)
except (IndexError, ValueError) as e:
raise StpdFileError(
"Error while reading ammo file. Position: %s, header: '%s', original exception: %s"
% (ammo_file.tell(), chunk_header, e))
chunk_header = read_chunk_header(ammo_file)
self.log.info("Reached the end of stpd file")
|
import os
import time
import osmium as o
import requests
import shapely.wkb as wkblib
from src.config import RAW_LABEL_DATA_DIR
# http://docs.osmcode.org/pyosmium/latest/intro.html
# A global factory that creates WKB from a osmium geometry
wkbfab = o.geom.WKBFactory()
class WayMap():
"""Extract ways from OpenStreetMap PBF extracts."""
def __init__(self, extract_type='highway'):
"""The extract_type can be highway, footway, cycleway, or tennis."""
self.extracter = WayExtracter(extract_type)
def extract_files(self, file_list):
"""Extract ways from each PBF in file_list."""
for path in file_list:
self.run_extraction(path)
def run_extraction(self, file_path):
"""Extract ways from a PBF file at file_path."""
t0 = time.time()
self.extracter.apply_file(file_path, locations=True)
t1 = time.time()
elapsed = "{0:.1f}".format(t1 - t0)
print "EXTRACTED WAYS with locations from pbf file {}, took {}s".format(file_path, elapsed)
class WayExtracter(o.SimpleHandler):
"""Subclass of osmium SimpleHandler to extract ways from OpenStreetMap PBF files."""
def __init__(self, extract_type='highway'):
"""Extract ways from OpenStreetMap PBF files."""
o.SimpleHandler.__init__(self)
self.ways = []
self.way_dict = {}
self.types = []
self.extract_type = extract_type
def way(self, w):
"""Fire this callback when osmium parses a way in the PBF file."""
if self.extract_type == 'tennis':
self.extract_if_tennis_court(w)
else:
self.extract_way_type(w)
def extract_if_tennis_court(self, w):
"""Extract the way of it has a 'sport' tag with value 'tennis'."""
is_tennis = False
for tag in w.tags:
if tag.k == 'sport' and 'tennis' == tag.v:
is_tennis = True
if not is_tennis:
return
way_dict = {'uid': w.uid,
'ends_have_same_id': w.ends_have_same_id(),
'id': w.id,
'tags': []}
for tag in w.tags:
way_dict['tags'].append((tag.k, tag.v))
self.add_linestring(w, way_dict)
def extract_way_type(self, w):
"""Extract the way (w) if its type matches extract_type (highway, footway, or cycleway)."""
should_extract = False
way_type = None
for tag in w.tags:
if tag.k == self.extract_type:
way_type = tag.v
should_extract = True
# for roads analysis, don't extract ways that don't allow vehicle access
if self.extract_type == 'highway' and tag.k == 'motor_vehicle' and tag.v == 'no':
return
if not should_extract:
return
if way_type not in self.types:
self.types.append(way_type)
way_dict = {'visible': w.visible,
'deleted': w.deleted,
'uid': w.uid,
'way_type': way_type,
'ends_have_same_id': w.ends_have_same_id(),
'id': w.id,
'tags': []}
for tag in w.tags:
way_dict['tags'].append((tag.k, tag.v))
self.add_linestring(w, way_dict)
def add_linestring(self, w, way_dict):
"""Append the way_dict, with coords normalized to (lat,lon) instead of (lon,lat) pairs."""
try:
wkb = wkbfab.create_linestring(w)
except:
# throws on single point ways
return
line = wkblib.loads(wkb, hex=True)
reverse_points = []
for point in list(line.coords):
reverse_points.append([point[1], point[0]])
way_dict['linestring'] = reverse_points
self.ways.append(way_dict)
def download_and_extract(file_urls_to_download, extract_type='highway'):
"""Download PBFs file_urls_to_download, and extract ways that match extract_type."""
file_paths = download_files(file_urls_to_download)
w = WayMap(extract_type=extract_type)
w.extract_files(file_paths)
return w
def download_file(url):
"""Download a large file in chunks and return its local path."""
local_filename = url.split('/')[-1]
full_local_filename = os.path.join(RAW_LABEL_DATA_DIR, local_filename)
r = requests.get(url, stream=True)
with open(full_local_filename, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
return full_local_filename
def download_files(url_list):
"""Download the PBF files in url_list, and return a list of local paths."""
paths = []
print("DOWNLOADING {} PBFs...".format(len(url_list)))
t0 = time.time()
for url in url_list:
local_filename = url.split('/')[-1]
full_local_filename = os.path.join(RAW_LABEL_DATA_DIR, local_filename)
if not os.path.exists(full_local_filename):
paths.append(download_file(url))
else:
paths.append(full_local_filename)
print("PBF {} already downloaded".format(full_local_filename))
if time.time() - t0 > 0.01:
print("downloads took {0:.1f}s".format(time.time() - t0))
return paths
|
import jax
import jax.config
jax.config.update("jax_enable_x64", True)
import jax.numpy as np
import tensornetwork
import tensornetwork.linalg.node_linalg
from tensornetwork import contractors
@jax.jit
def binary_mera_energy(hamiltonian, state, isometry, disentangler):
"""Computes the energy using a layer of uniform binary MERA.
Args:
hamiltonian: The hamiltonian (rank-6 tensor) defined at the bottom of the
MERA layer.
state: The 3-site reduced state (rank-6 tensor) defined at the top of the
MERA layer.
isometry: The isometry tensor (rank 3) of the binary MERA.
disentangler: The disentangler tensor (rank 4) of the binary MERA.
Returns:
The energy.
"""
backend = "jax"
out = []
for dirn in ('left', 'right'):
iso_l = tensornetwork.Node(isometry, backend=backend)
iso_c = tensornetwork.Node(isometry, backend=backend)
iso_r = tensornetwork.Node(isometry, backend=backend)
iso_l_con = tensornetwork.linalg.node_linalg.conj(iso_l)
iso_c_con = tensornetwork.linalg.node_linalg.conj(iso_c)
iso_r_con = tensornetwork.linalg.node_linalg.conj(iso_r)
op = tensornetwork.Node(hamiltonian, backend=backend)
rho = tensornetwork.Node(state, backend=backend)
un_l = tensornetwork.Node(disentangler, backend=backend)
un_l_con = tensornetwork.linalg.node_linalg.conj(un_l)
un_r = tensornetwork.Node(disentangler, backend=backend)
un_r_con = tensornetwork.linalg.node_linalg.conj(un_r)
tensornetwork.connect(iso_l[2], rho[0])
tensornetwork.connect(iso_c[2], rho[1])
tensornetwork.connect(iso_r[2], rho[2])
tensornetwork.connect(iso_l[0], iso_l_con[0])
tensornetwork.connect(iso_l[1], un_l[2])
tensornetwork.connect(iso_c[0], un_l[3])
tensornetwork.connect(iso_c[1], un_r[2])
tensornetwork.connect(iso_r[0], un_r[3])
tensornetwork.connect(iso_r[1], iso_r_con[1])
if dirn == 'right':
tensornetwork.connect(un_l[0], un_l_con[0])
tensornetwork.connect(un_l[1], op[3])
tensornetwork.connect(un_r[0], op[4])
tensornetwork.connect(un_r[1], op[5])
tensornetwork.connect(op[0], un_l_con[1])
tensornetwork.connect(op[1], un_r_con[0])
tensornetwork.connect(op[2], un_r_con[1])
elif dirn == 'left':
tensornetwork.connect(un_l[0], op[3])
tensornetwork.connect(un_l[1], op[4])
tensornetwork.connect(un_r[0], op[5])
tensornetwork.connect(un_r[1], un_r_con[1])
tensornetwork.connect(op[0], un_l_con[0])
tensornetwork.connect(op[1], un_l_con[1])
tensornetwork.connect(op[2], un_r_con[0])
tensornetwork.connect(un_l_con[2], iso_l_con[1])
tensornetwork.connect(un_l_con[3], iso_c_con[0])
tensornetwork.connect(un_r_con[2], iso_c_con[1])
tensornetwork.connect(un_r_con[3], iso_r_con[0])
tensornetwork.connect(iso_l_con[2], rho[3])
tensornetwork.connect(iso_c_con[2], rho[4])
tensornetwork.connect(iso_r_con[2], rho[5])
# FIXME: Check that this is giving us a good path!
out.append(
contractors.branch(tensornetwork.reachable(rho),
nbranch=2).get_tensor())
return 0.5 * sum(out)
descend = jax.jit(jax.grad(binary_mera_energy, argnums=0, holomorphic=True))
"""Descending super-operator.
Args:
hamiltonian: A dummy rank-6 tensor not involved in the computation.
state: The 3-site reduced state to be descended (rank-6 tensor).
isometry: The isometry tensor of the binary MERA.
disentangler: The disentangler tensor of the binary MERA.
Returns:
The descended state (spatially averaged).
"""
ascend = jax.jit(jax.grad(binary_mera_energy, argnums=1, holomorphic=True))
"""Ascending super-operator.
Args:
operator: The operator to be ascended (rank-6 tensor).
state: A dummy rank-6 tensor not involved in the computation.
isometry: The isometry tensor of the binary MERA.
disentangler: The disentangler tensor of the binary MERA.
Returns:
The ascended operator (spatially averaged).
"""
# NOTE: Not a holomorphic function, but a real-valued loss function.
env_iso = jax.jit(jax.grad(binary_mera_energy, argnums=2, holomorphic=True))
"""Isometry environment tensor.
In other words: The derivative of the `binary_mera_energy()` with respect to
the isometry tensor.
Args:
hamiltonian: The hamiltonian (rank-6 tensor) defined at the bottom of the
MERA layer.
state: The 3-site reduced state (rank-6 tensor) defined at the top of the
MERA layer.
isometry: A dummy isometry tensor (rank 3) not used in the computation.
disentangler: The disentangler tensor (rank 4) of the binary MERA.
Returns:
The environment tensor of the isometry, including all contributions.
"""
# NOTE: Not a holomorphic function, but a real-valued loss function.
env_dis = jax.jit(jax.grad(binary_mera_energy, argnums=3, holomorphic=True))
"""Disentangler environment.
In other words: The derivative of the `binary_mera_energy()` with respect to
the disentangler tensor.
Args:
hamiltonian: The hamiltonian (rank-6 tensor) defined at the bottom of the
MERA layer.
state: The 3-site reduced state (rank-6 tensor) defined at the top of the
MERA layer.
isometry: The isometry tensor (rank 3) of the binary MERA.
disentangler: A dummy disentangler (rank 4) not used in the computation.
Returns:
The environment tensor of the disentangler, including all contributions.
"""
@jax.jit
def update_iso(hamiltonian, state, isometry, disentangler):
"""Updates the isometry with the aim of reducing the energy.
Args:
hamiltonian: The hamiltonian (rank-6 tensor) defined at the bottom of the
MERA layer.
state: The 3-site reduced state (rank-6 tensor) defined at the top of the
MERA layer.
isometry: The isometry tensor (rank 3) of the binary MERA.
disentangler: The disentangler tensor (rank 4) of the binary MERA.
Returns:
The updated isometry.
"""
env = env_iso(hamiltonian, state, isometry, disentangler)
nenv = tensornetwork.Node(env, axis_names=["l", "r", "t"], backend="jax")
output_edges = [nenv["l"], nenv["r"], nenv["t"]]
nu, _, nv, _ = tensornetwork.split_node_full_svd(
nenv, [nenv["l"], nenv["r"]], [nenv["t"]],
left_edge_name="s1",
right_edge_name="s2")
nu["s1"].disconnect()
nv["s2"].disconnect()
tensornetwork.connect(nu["s1"], nv["s2"])
nres = tensornetwork.contract_between(nu, nv, output_edge_order=output_edges)
return np.conj(nres.get_tensor())
@jax.jit
def update_dis(hamiltonian, state, isometry, disentangler):
"""Updates the disentangler with the aim of reducing the energy.
Args:
hamiltonian: The hamiltonian (rank-6 tensor) defined at the bottom of the
MERA layer.
state: The 3-site reduced state (rank-6 tensor) defined at the top of the
MERA layer.
isometry: The isometry tensor (rank 3) of the binary MERA.
disentangler: The disentangler tensor (rank 4) of the binary MERA.
Returns:
The updated disentangler.
"""
env = env_dis(hamiltonian, state, isometry, disentangler)
nenv = tensornetwork.Node(
env, axis_names=["bl", "br", "tl", "tr"], backend="jax")
output_edges = [nenv["bl"], nenv["br"], nenv["tl"], nenv["tr"]]
nu, _, nv, _ = tensornetwork.split_node_full_svd(
nenv, [nenv["bl"], nenv["br"]], [nenv["tl"], nenv["tr"]],
left_edge_name="s1",
right_edge_name="s2")
nu["s1"].disconnect()
nv["s2"].disconnect()
tensornetwork.connect(nu["s1"], nv["s2"])
nres = tensornetwork.contract_between(nu, nv, output_edge_order=output_edges)
return np.conj(nres.get_tensor())
def shift_ham(hamiltonian, shift=None):
"""Applies a shift to a hamiltonian.
Args:
hamiltonian: The hamiltonian tensor (rank 6).
shift: The amount by which to shift. If `None`, shifts so that the local
term is negative semi-definite.
Returns:
The shifted Hamiltonian.
"""
hmat = np.reshape(hamiltonian, (2**3, -1))
if shift is None:
shift = np.amax(np.linalg.eigh(hmat)[0])
hmat -= shift * np.eye(2**3)
return np.reshape(hmat, [2] * 6)
def optimize_linear(hamiltonian, state, isometry, disentangler, num_itr):
"""Optimize a scale-invariant MERA using linearized updates.
The MERA is assumed to be completely uniform and scale-invariant, consisting
of a single isometry and disentangler.
Args:
hamiltonian: The hamiltonian (rank-6 tensor) defined at the bottom.
state: An initial 3-site reduced state (rank-6 tensor) to initialize the
descending fixed-point computation.
isometry: The isometry tensor (rank 3) of the binary MERA.
disentangler: The disentangler tensor (rank 4) of the binary MERA.
Returns:
state: The approximate descending fixed-point reduced state (rank 6).
isometry: The optimized isometry.
disentangler: The optimized disentangler.
"""
h_shifted = shift_ham(hamiltonian)
for i in range(num_itr):
isometry = update_iso(h_shifted, state, isometry, disentangler)
disentangler = update_dis(h_shifted, state, isometry, disentangler)
for _ in range(10):
state = descend(hamiltonian, state, isometry, disentangler)
en = binary_mera_energy(hamiltonian, state, isometry, disentangler)
print("{}:\t{}".format(i, en))
return state, isometry, disentangler
def ham_ising():
"""Dimension 2 "Ising" Hamiltonian.
This version from Evenbly & White, Phys. Rev. Lett. 116, 140403
(2016).
"""
E = np.array([[1, 0], [0, 1]])
X = np.array([[0, 1], [1, 0]])
Z = np.array([[1, 0], [0, -1]])
hmat = np.kron(X, np.kron(Z, X))
hmat -= 0.5 * (np.kron(np.kron(X, X), E) + np.kron(E, np.kron(X, X)))
return np.reshape(hmat, [2] * 6)
if __name__ == '__main__':
# Starting from a very simple initial MERA, optimize for the critical Ising
# model.
h = ham_ising()
s = np.reshape(np.eye(2**3), [2] * 6) / 2**3
dis = np.reshape(np.eye(2**2), [2] * 4)
iso = dis[:, :, :, 0]
s, iso, dis = optimize_linear(h, s, iso, dis, 100)
|
import logging
from typing import Optional
from aiohttp import ClientResponseError
from incomfortclient import Gateway as InComfortGateway
import voluptuous as vol
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import callback
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.discovery import async_load_platform
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
DOMAIN = "incomfort"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Inclusive(CONF_USERNAME, "credentials"): cv.string,
vol.Inclusive(CONF_PASSWORD, "credentials"): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, hass_config):
"""Create an Intergas InComfort/Intouch system."""
incomfort_data = hass.data[DOMAIN] = {}
credentials = dict(hass_config[DOMAIN])
hostname = credentials.pop(CONF_HOST)
client = incomfort_data["client"] = InComfortGateway(
hostname, **credentials, session=async_get_clientsession(hass)
)
try:
heaters = incomfort_data["heaters"] = list(await client.heaters)
except ClientResponseError as err:
_LOGGER.warning("Setup failed, check your configuration, message is: %s", err)
return False
for heater in heaters:
await heater.update()
for platform in ["water_heater", "binary_sensor", "sensor", "climate"]:
hass.async_create_task(
async_load_platform(hass, platform, DOMAIN, {}, hass_config)
)
return True
class IncomfortEntity(Entity):
"""Base class for all InComfort entities."""
def __init__(self) -> None:
"""Initialize the class."""
self._unique_id = self._name = None
@property
def unique_id(self) -> Optional[str]:
"""Return a unique ID."""
return self._unique_id
@property
def name(self) -> Optional[str]:
"""Return the name of the sensor."""
return self._name
class IncomfortChild(IncomfortEntity):
"""Base class for all InComfort entities (excluding the boiler)."""
async def async_added_to_hass(self) -> None:
"""Set up a listener when this entity is added to HA."""
self.async_on_remove(async_dispatcher_connect(self.hass, DOMAIN, self._refresh))
@callback
def _refresh(self) -> None:
self.async_schedule_update_ha_state(force_refresh=True)
@property
def should_poll(self) -> bool:
"""Return False as this device should never be polled."""
return False
|
import os
from datetime import timedelta
import arrow
from moto import mock_ses
from lemur.certificates.schemas import certificate_notification_output_schema
from lemur.plugins.lemur_email.plugin import render_html
from lemur.tests.factories import CertificateFactory
from lemur.tests.test_messaging import verify_sender_email
dir_path = os.path.dirname(os.path.realpath(__file__))
def get_options():
return [
{"name": "interval", "value": 10},
{"name": "unit", "value": "days"},
{"name": "recipients", "value": "[email protected],[email protected]"},
]
def test_render_expiration(certificate, endpoint):
new_cert = CertificateFactory()
new_cert.replaces.append(certificate)
assert render_html("expiration", get_options(), [certificate_notification_output_schema.dump(certificate).data])
def test_render_rotation(certificate, endpoint):
certificate.endpoints.append(endpoint)
assert render_html("rotation", get_options(), certificate_notification_output_schema.dump(certificate).data)
def test_render_rotation_failure(pending_certificate):
assert render_html("failed", get_options(), certificate_notification_output_schema.dump(pending_certificate).data)
@mock_ses
def test_send_expiration_notification():
from lemur.notifications.messaging import send_expiration_notifications
from lemur.tests.factories import CertificateFactory
from lemur.tests.factories import NotificationFactory
now = arrow.utcnow()
in_ten_days = now + timedelta(days=10, hours=1) # a bit more than 10 days since we'll check in the future
certificate = CertificateFactory()
notification = NotificationFactory(plugin_name="email-notification")
certificate.not_after = in_ten_days
certificate.notifications.append(notification)
certificate.notifications[0].options = get_options()
verify_sender_email()
assert send_expiration_notifications([]) == (4, 0) # owner (1), recipients (2), and security (1)
@mock_ses
def test_send_rotation_notification(endpoint, source_plugin):
from lemur.notifications.messaging import send_rotation_notification
from lemur.deployment.service import rotate_certificate
new_certificate = CertificateFactory()
rotate_certificate(endpoint, new_certificate)
assert endpoint.certificate == new_certificate
verify_sender_email()
assert send_rotation_notification(new_certificate)
@mock_ses
def test_send_pending_failure_notification(user, pending_certificate, async_issuer_plugin):
from lemur.notifications.messaging import send_pending_failure_notification
verify_sender_email()
assert send_pending_failure_notification(pending_certificate)
assert send_pending_failure_notification(pending_certificate, True, True)
assert send_pending_failure_notification(pending_certificate, True, False)
assert send_pending_failure_notification(pending_certificate, False, True)
assert send_pending_failure_notification(pending_certificate, False, False)
def test_get_recipients(certificate, endpoint):
from lemur.plugins.lemur_email.plugin import EmailNotificationPlugin
options = [{"name": "recipients", "value": "[email protected],[email protected]"}]
two_emails = sorted(["[email protected]", "[email protected]"])
assert sorted(EmailNotificationPlugin.get_recipients(options, [])) == two_emails
assert sorted(EmailNotificationPlugin.get_recipients(options, ["[email protected]"])) == two_emails
three_emails = sorted(["[email protected]", "[email protected]", "[email protected]"])
assert sorted(EmailNotificationPlugin.get_recipients(options, ["[email protected]"])) == three_emails
assert sorted(EmailNotificationPlugin.get_recipients(options, ["[email protected]", "[email protected]",
"[email protected]"])) == three_emails
|
import pytest
from yandextank.validator.docs_gen import RSTRenderer, format_option
@pytest.mark.parametrize('option_schema, expected', [
({'report_file': {
'description': 'path to file to store autostop report',
'type': 'string',
'default': 'autostop_report.txt'}},
r"""``report_file`` (string)
------------------------
*\- path to file to store autostop report. Default:* ``autostop_report.txt``"""),
({'gun_type': {
'type': 'string',
'description': 'gun type',
'allowed': ['custom', 'http', 'scenario', 'ultimate'],
# 'values_description': {
# 'custom': 'custom gun', 'http': 'http gun', 'scenario': 'scenario gun', 'ultimate': 'ultimate gun'
# },
'required': 'true'}},
r"""``gun_type`` (string)
---------------------
*\- gun type.* **Required.**
:one of: [``custom``, ``http``, ``scenario``, ``ultimate``]"""),
({'gun_type': {
'type': 'string',
'description': 'gun type',
'allowed': ['custom', 'http', 'scenario', 'ultimate'],
'values_description': {
'custom': 'custom gun', 'http': 'http gun', 'scenario': 'scenario gun', 'ultimate': 'ultimate gun'
},
'required': 'true'}},
r"""``gun_type`` (string)
---------------------
*\- gun type.* **Required.**
:one of:
:``custom``: custom gun
:``http``: http gun
:``scenario``: scenario gun
:``ultimate``: ultimate gun"""),
({"load_profile": {
"type": "dict",
'description': 'specify parameters of your load',
'schema': {
'load_type': {
'type': 'string',
'required': 'true',
'description': 'choose your load type',
'allowed': ['rps', 'instances', 'stpd_file'],
'values_description': {
'instances': 'fix number of instances',
'rps': 'fix rps rate',
'stpd_file': 'use ready schedule file'}
},
'schedule': {
'type': 'string',
'required': True,
'description': 'load schedule or path to stpd file',
'examples': {
'line(100,200,10m)': 'linear growth from 100 to 200 instances/rps during 10 minutes',
'const(200,90s)': 'constant load of 200 instances/rps during 90s',
'test_dir/test_backend.stpd': 'path to ready schedule file'}
}
},
'required': True}},
r"""``load_profile`` (dict)
-----------------------
*\- specify parameters of your load.* **Required.**
:``load_type`` (string):
*\- choose your load type.* **Required.**
:one of:
:``instances``: fix number of instances
:``rps``: fix rps rate
:``stpd_file``: use ready schedule file
:``schedule`` (string):
*\- load schedule or path to stpd file.* **Required.**
:examples:
``const(200,90s)``
constant load of 200 instances/rps during 90s
``line(100,200,10m)``
linear growth from 100 to 200 instances/rps during 10 minutes
``test_dir/test_backend.stpd``
path to ready schedule file"""), # noqa: W293
({'lock_targets': {
'default': 'auto',
'description': 'targets to lock',
'values_description': {
'auto': 'automatically identify target host',
'[ya.ru, ...]': 'list of targets to lock'
},
'anyof': [
{'type': 'list'},
{'type': 'string', 'allowed': ['auto']}
],
'tutorial_link': 'http://yandextank.readthedocs.io'}},
r"""``lock_targets`` (list or string)
---------------------------------
*\- targets to lock. Default:* ``auto``
:one of:
:``[ya.ru, ...]``: list of targets to lock
:``auto``: automatically identify target host
:tutorial_link:
http://yandextank.readthedocs.io"""),
({'autostop': {
'description': 'list of autostop constraints',
'type': 'list',
'schema': {
'type': 'string',
'description': 'autostop constraint',
'examples': {'http(4xx,50%,5)': 'stop when rate of 4xx http codes is 50% or more during 5 seconds'}
},
'default': []}},
r"""``autostop`` (list of string)
-----------------------------
*\- list of autostop constraints. Default:* ``[]``
:[list_element] (string):
*\- autostop constraint.*
:examples:
``http(4xx,50%,5)``
stop when rate of 4xx http codes is 50% or more during 5 seconds""") # noqa: W293
])
def test_format_option(option_schema, expected):
assert format_option(option_schema, RSTRenderer) == expected
|
async def async_setup(hass, config):
"""Set up the Mill platform."""
return True
async def async_setup_entry(hass, entry):
"""Set up the Mill heater."""
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, "climate")
)
return True
async def async_unload_entry(hass, config_entry):
"""Unload a config entry."""
unload_ok = await hass.config_entries.async_forward_entry_unload(
config_entry, "climate"
)
return unload_ok
|
Subsets and Splits